hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
18a6b12cf06ffbc877411cfa4a4097b14b2f0796 | 201 | class AddComplianceControlIdToComplianceControlBlocks < ActiveRecord::Migration
def change
add_reference :compliance_control_blocks, :compliance_control, index: true, foreign_key: true
end
end
| 33.5 | 97 | 0.835821 |
e9779a251f99cfdc20556bff3502d22a6a1de78f | 580 | name 'docker'
maintainer 'Sous Chefs'
maintainer_email '[email protected]'
license 'Apache-2.0'
description 'Provides docker_service, docker_image, and docker_container resources'
version '10.1.5'
source_url 'https://github.com/sous-chefs/docker'
issues_url 'https://github.com/sous-chefs/docker/issues'
chef_version '>= 16.0'
supports 'amazon'
supports 'centos'
supports 'scientific'
supports 'oracle'
supports 'debian'
supports 'fedora'
supports 'redhat'
supports 'ubuntu'
gem 'docker-api', '>= 1.34', '< 3'
| 27.619048 | 89 | 0.674138 |
b94105d482e2abdac5215917f880310be4ec9e52 | 4,494 | # Copyright © 2011-2016 MUSC Foundation for Research Development
# All rights reserved.
# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,
# BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
# SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
# TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
class Dashboard::SubsidiesController < Dashboard::BaseController
respond_to :json, :js, :html
def new
@subsidy = PendingSubsidy.new(sub_service_request_id: params[:sub_service_request_id])
@header_text = t(:subsidies)[:new]
@admin = params[:admin] == 'true'
@path = dashboard_subsidies_path
@action = 'new'
@subsidy.percent_subsidy = @subsidy.default_percentage
end
def create
@subsidy = PendingSubsidy.new(subsidy_params)
admin_param = params[:admin] == 'true'
if admin_param && (subsidy_params[:percent_subsidy] != 0)
@subsidy.save(validate: false)
perform_subsidy_creation(admin_param)
else
if @subsidy.valid?
@subsidy.save
perform_subsidy_creation
else
@errors = @subsidy.errors
end
end
end
def edit
@subsidy = PendingSubsidy.find(params[:id])
@header_text = t(:subsidies)[:edit]
@admin = params[:admin] == 'true'
@path = dashboard_subsidy_path(@subsidy)
@action = 'edit'
end
def update
@subsidy = PendingSubsidy.find(params[:id])
@sub_service_request = @subsidy.sub_service_request
admin_param = params[:admin] == 'true'
if admin_param && (subsidy_params[:percent_subsidy] != 0)
@subsidy.assign_attributes(subsidy_params)
@subsidy.save(validate: false)
perform_subsidy_update(admin_param)
else
if @subsidy.update_attributes(subsidy_params)
perform_subsidy_update
else
@errors = @subsidy.errors
@subsidy.reload
end
end
end
def destroy
@subsidy = Subsidy.find(params[:id])
@sub_service_request = @subsidy.sub_service_request
if @subsidy.destroy
@admin = true
flash[:alert] = t(:subsidies)[:destroyed]
end
end
def approve
subsidy = PendingSubsidy.find(params[:id])
subsidy = subsidy.grant_approval(current_user)
@sub_service_request = subsidy.sub_service_request.reload
@admin = true
flash[:success] = t(:subsidies)[:approved]
end
private
def subsidy_params
@subsidy_params ||= begin
temp = params.require(:pending_subsidy).permit(:sub_service_request_id,
:overridden,
:status,
:percent_subsidy)
if temp[:percent_subsidy].present?
temp[:percent_subsidy] = temp[:percent_subsidy].gsub(/[^\d^\.]/, '').to_f / 100
end
temp
end
end
def perform_subsidy_creation(admin_param=false)
@sub_service_request = @subsidy.sub_service_request
@admin = admin_param
flash[:success] = t(:dashboard)[:subsidies][:created]
unless @admin
redirect_to dashboard_sub_service_request_path(@sub_service_request, format: :js)
end
end
def perform_subsidy_update(admin_param=false)
@admin = admin_param
flash[:success] = t(:dashboard)[:subsidies][:updated]
unless @admin
redirect_to dashboard_sub_service_request_path(@sub_service_request, format: :js)
end
end
end
| 35.666667 | 145 | 0.720961 |
bb0774608fb303cb215f20355e6a1960d4544842 | 2,298 | module JSONQueryHelpers
extend ActiveSupport::Concern
OPERATORS = {
"in" => "in",
"any" => "any",
"lt" => "<",
"lteq" => "<=",
"gt" => ">",
"gteq" => ">="
}.freeze
FIELD_TYPES = %w[numeric date].freeze
class_methods do
def json_has_value(keys, value, json_column_name)
# Adapted from:
# https://stackoverflow.com/questions/33432421/sqlite-json1-example-for-json-extract-set
# http://guides.rubyonrails.org/active_record_postgresql.html#json
key, field_type, operator = parse_keys(keys)
json_column = "\"#{table_name}\".\"#{json_column_name}\""
sql = if operator == "in" then "#{json_column} #>> :key IN (:value)"
elsif operator == "any" then "(#{json_column} #>> :key)::JSONB ?| array[:value]"
elsif value.nil? then "#{json_column} #>> :key IS NULL"
elsif operator.in?(%w[lt lteq gt gteq])
field_type ||= "numeric"
operator = OPERATORS.fetch(operator)
"(#{json_column}#>>:key)::#{field_type} #{operator} :value::#{field_type}"
else
"#{json_column} #>> :key = :value"
end
# From: https://www.postgresql.org/docs/current/static/functions-json.html
# '{"a":[1,2,3],"b":[4,5,6]}'::json#>>'{a,2}' => 3
# Note that the column is already jsonb so no need to cast
value = value.is_a?(Array) ? value.map(&:to_s) : value.to_s
where(sql, key: "{#{key}}", value: value)
end
def json_has_values(hash, json_column)
flattened_hash = flatten_hash(hash)
scope = all
flattened_hash.each do |keys, v|
scope = scope.json_has_value(keys, v, json_column)
end
scope
end
private
# Adapted from:
# https://stackoverflow.com/a/9648410
def flatten_hash(hash, keys = [])
return { keys => hash } unless hash.is_a?(Hash)
hash.inject({}) { |h, v| h.merge! flatten_hash(v[-1], keys + [v[0]]) }
end
def parse_keys(keys)
key = [keys].flatten.join(",")
key_parts = key.split(".")
operator = key_parts.pop if key_parts.last.in?(OPERATORS.keys)
field_type = key_parts.pop if key_parts.last.in?(FIELD_TYPES)
key = key_parts.join(".")
[key, field_type, operator]
end
end
end
| 31.054054 | 94 | 0.579199 |
6115ebce9a9b3e6a755d439e23eb0b431d391007 | 5,430 | # A multithreaded Amazon SQS queue processor that supports throttling and batched operations.
#
# This is an intentionally minimal and SQS-specific implementation to meet the
# needs of queueing high scale database writes in SQS, for which throttled and
# batched process are essential.
#
# Why a custom solution? For our general asynchronous job queueing neds, ActiveJob
# and Shoryuken could be good choices, but neither of these satisfy our
# particular requirements for high scale processing:
# - The existing ActiveJob worker interface does not support batching.
# - Shoryuken's custom API supports batching but not throttling, and the
# implementation has some concerning details (e.g. busy waiting) for high scale
# usage. Given the complexity of the code base, we decided not to try to fork it
# to add the needed features.
require 'aws-sdk'
require 'logger'
require 'thread'
require_relative 'messages_handler'
require_relative 'metrics'
require_relative 'queue_processor_config'
require_relative 'rate_limiter'
module SQS
# A class for processing an SQS queue using a pool of worker threads, each of which
# does long polling against the queue.
class QueueProcessor
attr_reader :state, :logger, :config, :metrics, :handler
# @param [SQS::QueueProcessorConfig] config
# @param [SQS::Metrics] metrics
def initialize(config, metrics)
raise ArgumentError if config.nil? || metrics.nil?
@state = :initialized
@config = config
@metrics = metrics
@logger = config.logger
@handler = config.handler
end
# Begin processing messages. Must be called exactly once after initialize and before
# stop is called. This does not block and returns immediately after starting the worker
# thread, so be sure to make to keep the process running.
def start
assert_state :initialized, "Can't run in state #{@state}, must be :initialized"
@state = :running
# Kill the process if a thread dies with an unhandled exception. We attempt to catch all possible
# exceptions, so this shouldn't happen.
Thread.abort_on_exception = true
@worker_threads = []
logger.info "Polling on #{@config.queue_url}"
([email protected]_workers_per_processor).each do |i|
worker_thread = Thread.new do
Thread.current[:name] = "SQS worker #{i}"
rate_limiter = RateLimiter.new(@config)
poller = Aws::SQS::QueuePoller.new(@config.queue_url)
# Break out of the polling loop when we leave the running state.
poller.before_request do |_stats|
throw :stop_polling if @state != :running
end
# Wrap the polling loop in an outer exception handler and while loop,
# to prevent an unanticipated exception from terminating the thread.
while @state == :running
begin
# Long-poll for messages and handle them until we're told to stop.
poller.poll(max_number_of_messages: 10, wait_time_seconds: 10, visibility_timeout: 5) do |sqs_messages|
batch_failed = false
messages = sqs_messages.map {|sqs_message|
SQS::Message.new(sqs_message.body)
}
batch_size = sqs_messages.size
start_time_sec = Time.now.to_f
begin
# Use with_connection to return the thread to pool when the operation is done.
ActiveRecord::Base.connection_pool.with_connection do |_conn|
@handler.handle(messages)
end
@metrics.successes.increment(batch_size)
rescue Exception => exception
@logger.warn "Failed on batch of size #{batch_size}"
@metrics.failures.increment(batch_size)
batch_failed = true
@logger.warn exception.message
end
# Sleep for a bit to make sure we stay below the configured maximum rate. Note that we
# pause even if the handler failed because we don't want to exceed the configured rate
# even when failures are occuring.
delay = rate_limiter.inter_batch_delay(
batch_size: batch_size,
elapsed_time_sec: Time.now.to_f - start_time_sec
)
sleep(delay) if delay > 0
# Tell SQS to resend the batch if it failed.
throw :skip_delete if batch_failed
end
rescue => exception
@logger.warn exception
end
end # while
end
@worker_threads << worker_thread
end
end
# Request each of the worker threads to stop and block until they terminate (which could take
# up to max_wait_time seconds). Requires that run has been called.
def stop
assert_state :running, "Can't stop in state #{@state}, must be :running"
@state = :stopping
@worker_threads.each(&:join)
@state = :stopped
end
def assert_workers_alive_for_test
@worker_threads.each do |thread|
raise "Unexpected thread death" unless thread.alive?
end
end
# Asserts that the current state is `desired_state`.
def assert_state(desired_state, error_message)
raise error_message unless @state == desired_state
end
end
end
| 41.136364 | 117 | 0.650829 |
2831177376c236e4845638f56a0ff147a6038360 | 3,759 | require 'test_helper'
class ImportToolsImportTest < ActiveSupport::TestCase
test '#new tries to lock the import' do
ImportTools::Import.any_instance.stubs(:create_db)
ImportTools::Import.any_instance.stubs(:use_import_db=)
ImportTools::Import.any_instance.expects(:lock_import)
ImportTools::Import.new
end
test '#new raises an exception if the lock fails' do
ImportTools::Import.any_instance.expects(:create_db).never
ImportTools::RedisHandler.any_instance.stubs(:lock).returns(false)
assert_raise ImportTools::AlreadyRunningImportError do
ImportTools::Import.new
end
end
test '#new creates and seeds a DB with the given name' do
ImportTools::Import.any_instance.stubs(:lock_import)
ImportTools::Import.any_instance.expects(:create_db)
ImportTools::Import.any_instance.stubs(:use_import_db=)
ImportTools::Import.new
end
test '.started_at returns a Time instance created from the import token (timestamp)' do
import_starting_time = Time.new(2014, 1, 1)
token = import_starting_time.to_i
import = ImportTools::Import.new(token)
assert_equal import_starting_time, import.started_at
end
test '.increase_total_jobs_count calls redis to increase the number of jobs' do
token = 123
ImportTools::RedisHandler.any_instance.expects(:increase_property)
import = ImportTools::Import.new(token)
import.increase_total_jobs_count
end
test '.increase_completed_jobs_count sets the completed property for the import' do
ImportTools::RedisHandler.any_instance.expects(:increase_property_and_compare).returns(true)
import = ImportTools::Import.new(123)
import.increase_completed_jobs_count
assert import.completed?
end
test '.stop(true) drops the old backup db, renames the old to backup, and the new to old' do
import = ImportTools::Import.new(123)
test_db = Rails.configuration.database_configuration[Rails.env]['database']
import_db = "import_db_#{import.token}"
ImportTools::PostgresHandler.any_instance.expects(:drop_database).with("#{test_db}_backup")
ImportTools::PostgresHandler.any_instance.expects(:rename_database).with(test_db, "#{test_db}_backup")
ImportTools::PostgresHandler.any_instance.expects(:rename_database).with(import_db, test_db)
import.stop
end
test '.stop(true) unlocks the import' do
import = ImportTools::Import.new(123)
ImportTools::PostgresHandler.stubs(:new).returns(stub_everything)
ImportTools::RedisHandler.any_instance.expects(:unlock)
import.stop
end
test '.stop(true) adds the import to the done imports' do
import = ImportTools::Import.new(123)
ImportTools::PostgresHandler.stubs(:new).returns(stub_everything)
ImportTools::RedisHandler.any_instance.expects(:add_to_previous_imports).with(123)
import.stop
end
test '.stop(false) unlocks the import, but does not swap the databases' do
import = ImportTools::Import.new(123)
ImportTools::PostgresHandler.any_instance.expects(:drop_database).never
ImportTools::PostgresHandler.any_instance.expects(:rename_database).never
ImportTools::PostgresHandler.stubs(:new).returns(stub_everything)
ImportTools::RedisHandler.any_instance.expects(:unlock)
import.stop
end
test '.stop(false) adds the import to the done imports, but does not
swap the databases' do
import = ImportTools::Import.new(123)
ImportTools::PostgresHandler.any_instance.expects(:drop_database).never
ImportTools::PostgresHandler.any_instance.expects(:rename_database).never
ImportTools::PostgresHandler.stubs(:new).returns(stub_everything)
ImportTools::RedisHandler.any_instance.expects(:add_to_previous_imports).with(123)
import.stop
end
end
| 34.172727 | 106 | 0.762703 |
08c28d983614c3578af4a708b606a57ca255f604 | 43 | module MirrorGithub
VERSION = "0.0.3"
end | 14.333333 | 19 | 0.72093 |
e9c6fe3dad152e33c9c876db66662818923cf0e4 | 275 | # frozen_string_literal: true
class CreateTasks < ActiveRecord::Migration[5.2]
def change
create_table :tasks do |t|
t.string :title
t.references :user, foreign_key: true
t.references :project, foreign_key: true
t.timestamps
end
end
end
| 19.642857 | 48 | 0.683636 |
261625a7e1ca2b0626b646a30d072ddf86d18b10 | 840 | # Encoding: UTF-8
#
# Cookbook Name:: shipyard
# Spec:: recipes/default
#
# Copyright (C) 2014, Jonathan Hartman
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'spec_helper'
describe 'shipyard::default' do
let(:overrides) { {} }
let(:runner) { ChefSpec::Runner.new }
let(:chef_run) { runner.converge(described_recipe) }
end
| 30 | 74 | 0.738095 |
5dbc4f237a9832d187e4d068d345d26c4776ec99 | 13,184 | # frozen_string_literal: true
require "rails_helper"
RSpec.describe OrganizationWebhook, type: :model do
let(:organization) { classroom_org }
let(:client) { classroom_teacher.github_client }
subject do
organization_webhook = create(:organization_webhook, github_organization_id: organization.github_id)
organization_webhook.organizations << organization
organization_webhook
end
it { should have_many(:organizations) }
it { should have_many(:users).through(:organizations) }
it { should validate_uniqueness_of(:github_id).allow_nil }
it { should validate_presence_of(:github_organization_id) }
it { should validate_uniqueness_of(:github_organization_id) }
describe "#admin_org_hook_scoped_github_client" do
context "token is present" do
before do
allow(subject).to receive_message_chain(:users, :first, :token) { "token" }
end
it "returns a Octokit::Client" do
expect(subject.admin_org_hook_scoped_github_client).to be_a(Octokit::Client)
end
end
context "token is nil" do
before do
allow(subject).to receive_message_chain(:users, :first) { nil }
end
it "raises a NoValidTokenError" do
expect { subject.admin_org_hook_scoped_github_client }.to raise_error(described_class::NoValidTokenError)
end
end
end
describe "#ensure_webhook_is_active!", :vcr do
context "client is nil" do
before do
expect(subject)
.to receive(:admin_org_hook_scoped_github_client)
.and_return(client)
end
context "github_id is not present" do
context "there isn't an org hook on github" do
before do
expect(subject).to receive(:github_id).and_return(nil).twice
expect(subject).to receive(:retrieve_org_hook_id!).and_return(nil)
end
it "invokes create_org_hook!" do
expect(subject).to receive(:create_org_hook!).and_return(true)
subject.ensure_webhook_is_active!
end
it "returns true" do
expect(subject).to receive(:create_org_hook!).and_return(true)
expect(subject.ensure_webhook_is_active!).to be_truthy
end
end
context "hook exists on github" do
before do
expect(subject).to receive(:github_id).and_return(nil, true)
expect(subject).to receive_message_chain(:github_org_hook, :active?) { true }
end
it "invokes retrieve_org_hook_id!" do
expect(subject).to receive(:retrieve_org_hook_id!).and_return(true)
subject.ensure_webhook_is_active!
end
it "returns true" do
expect(subject).to receive(:retrieve_org_hook_id!).and_return(true)
expect(subject.ensure_webhook_is_active!).to be_truthy
end
end
end
context "github_org_hook is not found" do
before do
expect(subject).to receive(:github_id).and_return(true).twice
expect(subject).to receive_message_chain(:github_org_hook, :active?) { nil }
end
it "invokes create_org_hook!" do
expect(subject).to receive(:create_org_hook!).and_return(true)
subject.ensure_webhook_is_active!
end
it "returns true" do
expect(subject).to receive(:create_org_hook!).and_return(true)
expect(subject.ensure_webhook_is_active!).to be_truthy
end
end
context "github_org_hook was NotFound" do
before do
expect(subject).to receive(:github_id).and_return(true).twice
expect(subject).to receive_message_chain(:github_org_hook, :active?) { nil }
end
it "invokes create_org_hook!" do
expect(subject).to receive(:create_org_hook!).and_return(true)
subject.ensure_webhook_is_active!
end
it "returns true" do
expect(subject).to receive(:create_org_hook!).and_return(true)
expect(subject.ensure_webhook_is_active!).to be_truthy
end
end
context "github_org_hook is not active" do
before do
expect(subject).to receive(:github_id).and_return(true).twice
expect(subject).to receive_message_chain(:github_org_hook, :active?) { false }
end
it "invokes activate_org_hook!" do
expect(subject).to receive(:activate_org_hook).and_return(true)
subject.ensure_webhook_is_active!
end
it "returns true" do
expect(subject).to receive(:activate_org_hook).and_return(true)
expect(subject.ensure_webhook_is_active!).to be_truthy
end
end
context "github_org_hook is active" do
before do
expect(subject).to receive(:github_id).and_return(true).twice
expect(subject).to receive_message_chain(:github_org_hook, :active?) { true }
end
it "does not invoke create_org_hook!" do
expect(subject).to_not receive(:create_org_hook!)
subject.ensure_webhook_is_active!
end
it "returns true" do
expect(subject.ensure_webhook_is_active!).to be_truthy
end
end
end
context "client is present" do
context "github_id is not present" do
context "there isn't an org hook on github" do
before do
expect(subject).to receive(:github_id).and_return(nil).twice
expect(subject).to receive(:retrieve_org_hook_id!).and_return(nil)
end
it "invokes create_org_hook!" do
expect(subject).to receive(:create_org_hook!).and_return(true)
subject.ensure_webhook_is_active!(client: client)
end
it "returns true" do
expect(subject).to receive(:create_org_hook!).and_return(true)
expect(subject.ensure_webhook_is_active!(client: client)).to be_truthy
end
end
context "hook exists on github" do
before do
expect(subject).to receive(:github_id).and_return(nil, true)
expect(subject).to receive_message_chain(:github_org_hook, :active?) { true }
end
it "invokes retrieve_org_hook_id!" do
expect(subject).to receive(:retrieve_org_hook_id!).and_return(0)
subject.ensure_webhook_is_active!(client: client)
end
it "returns true" do
expect(subject).to receive(:retrieve_org_hook_id!).and_return(0)
expect(subject.ensure_webhook_is_active!(client: client)).to be_truthy
end
end
end
context "github_org_hook was NotFound" do
before do
expect(subject).to receive(:github_id).and_return(true).twice
expect(subject).to receive_message_chain(:github_org_hook, :active?) { nil }
end
it "invokes create_org_hook!" do
expect(subject).to receive(:create_org_hook!).and_return(true)
subject.ensure_webhook_is_active!(client: client)
end
it "returns true" do
expect(subject).to receive(:create_org_hook!).and_return(true)
expect(subject.ensure_webhook_is_active!(client: client)).to be_truthy
end
end
context "github_org_hook is not active" do
before do
expect(subject).to receive(:github_id).and_return(true).twice
expect(subject).to receive_message_chain(:github_org_hook, :active?) { false }
end
it "invokes activate_org_hook!" do
expect(subject).to receive(:activate_org_hook).and_return(true)
subject.ensure_webhook_is_active!(client: client)
end
it "returns true" do
expect(subject).to receive(:activate_org_hook).and_return(true)
expect(subject.ensure_webhook_is_active!(client: client)).to be_truthy
end
end
context "github_org_hook is active" do
before do
expect(subject).to receive(:github_id).and_return(true).twice
expect(subject).to receive_message_chain(:github_org_hook, :active?) { true }
end
it "does not invoke create_org_hook!" do
expect(subject).to_not receive(:create_org_hook!)
subject.ensure_webhook_is_active!(client: client)
end
it "returns true" do
expect(subject.ensure_webhook_is_active!(client: client)).to be_truthy
end
end
end
end
describe "#retrieve_org_hook_id!", :vcr do
context "org hook exists" do
before do
expect_any_instance_of(GitHubOrganization)
.to receive(:organization_webhooks)
.and_return([double("Classroom webhook", id: 0)])
end
context "saves successfully" do
it "returns the expected id" do
expect(subject.send(:retrieve_org_hook_id!, client)).to eq(0)
end
it "saves the new id" do
subject.send(:retrieve_org_hook_id!, client)
expect(subject.reload.github_id).to eq(0)
end
end
context "raises a ActiveRecord::RecordInvalid" do
before do
expect(subject).to receive(:save!).and_raise(ActiveRecord::RecordInvalid)
end
it "returns nil" do
expect(subject.send(:retrieve_org_hook_id!, client)).to be_nil
end
end
end
context "org hook does not exist" do
before do
expect_any_instance_of(GitHubOrganization)
.to receive(:organization_webhooks)
.and_return([])
end
it "retruns nil" do
expect(subject.send(:retrieve_org_hook_id!, client)).to be_nil
end
end
context "raises a GitHub::Error" do
before do
expect_any_instance_of(GitHubOrganization)
.to receive(:organization_webhooks)
.and_raise(GitHub::Error)
end
it "returns nil" do
expect(subject.send(:retrieve_org_hook_id!, client)).to be_nil
end
end
end
describe "#create_org_hook!", :vcr do
context "GitHub::Error is raised" do
before do
expect_any_instance_of(GitHubOrganization)
.to receive(:create_organization_webhook)
.and_raise(GitHub::Error)
end
it "raises a GitHub::Error" do
expect { subject.create_org_hook!(client) }
.to raise_error(GitHub::Error)
end
end
context "ActiveRecord::RecordInvalid is raised" do
before do
expect_any_instance_of(GitHubOrganization)
.to receive_message_chain(:create_organization_webhook, :id) { 0 }
expect(subject)
.to receive(:save!)
.and_raise(ActiveRecord::RecordInvalid)
end
it "raises a ActiveRecord::RecordInvalid" do
expect { subject.create_org_hook!(client) }
.to raise_error(ActiveRecord::RecordInvalid)
end
end
context "org hook is successfully created" do
before do
expect_any_instance_of(GitHubOrganization)
.to receive_message_chain(:create_organization_webhook, :id) { 0 }
end
it "returns true" do
expect(subject.create_org_hook!(client)).to be_truthy
end
it "saves the new id" do
subject.create_org_hook!(client)
expect(subject.github_id).to eq(0)
end
end
end
describe "#activate_org_hook!", :vcr do
context "GitHub::Error is raised" do
before do
expect_any_instance_of(GitHubOrganization)
.to receive(:activate_organization_webhook)
.and_raise(GitHub::Error)
end
it "raises a GitHub::Error" do
expect { subject.activate_org_hook(client) }
.to raise_error(GitHub::Error)
end
end
context "org hook is successfully activated" do
before do
expect_any_instance_of(GitHubOrganization)
.to receive_message_chain(:activate_organization_webhook) { 0 }
end
it "returns true" do
expect(subject.activate_org_hook(client)).to be_truthy
end
end
end
describe "#users_with_admin_org_hook_scope" do
context "user with admin_org hook scope doesn't exist" do
before do
User.any_instance.stub(:github_client_scopes)
.and_return([])
end
it "returns an empty list" do
expect(subject.send(:users_with_admin_org_hook_scope)).to be_empty
end
end
context "user with admin_org hook scope exists" do
before do
User.any_instance.stub(:github_client_scopes)
.and_return(["admin:org_hook"])
end
it "returns a list with the user" do
expect(subject.send(:users_with_admin_org_hook_scope)).to_not be_empty
end
end
end
describe "#webhook_url" do
context "webhook_url_prefix is present" do
it "returns a valid webhook_url" do
expect(subject.send(:webhook_url)).to be_truthy
end
end
context "webhook_url_prefix is blank" do
before do
stub_const("ENV", {})
end
it "returns a valid webhook_url" do
expect { subject.send(:webhook_url) }
.to raise_error(RuntimeError, described_class::WEBHOOK_URL_DEVELOPMENT_ERROR)
end
end
end
end
| 31.922518 | 113 | 0.647148 |
33eece9196ce13533bdc8f01815de41615773a54 | 2,241 | require 'spec_helper'
describe Campaign do
let(:campaign) {
campaign = FactoryGirl.create(:campaign)
}
describe "#queued!" do
it "should notify admin" do
campaign.update_attribute(:aasm_state, 'queueing')
campaign.should_receive(:notify_admin_of_state_change){ true }
campaign.queued!
end
end
describe "#send_preview" do
before(:each) do
campaign.preview_recipients = '[email protected], [email protected]'
end
it 'should not change state' do
expect {
campaign.send_preview
}.to_not change{ campaign.state }
end
it "should prefix the subject line" do
campaign.send_preview
email = ActionMailer::Base.deliveries.last
email.subject.should == "[PREVIEW] #{campaign.subject}"
end
end
describe '#send_campaign!' do
before(:each) do
campaign.aasm_state = 'queued'
campaign.save!
end
it "transitions from queued to sending" do
campaign.send_campaign!
campaign.sending?.should be_true
end
context 'with 1000 queued mails' do
before do
queued_mails = 'Array'
queued_mails.stub(:count).and_return(1000)
campaign.stub(:queued_mails).and_return(queued_mails)
end
it "enqueues 10 campaign worker jobs" do
CampaignWorker.jobs.should be_empty
campaign.send_campaign!
CampaignWorker.jobs.size.should == 10
end
end
context 'with 100 queued mails' do
before do
queued_mails = 'Array'
queued_mails.stub(:count).and_return(100)
campaign.stub(:queued_mails).and_return(queued_mails)
end
it "enqueues 1 campaign worker jobs" do
CampaignWorker.jobs.should be_empty
campaign.send_campaign!
CampaignWorker.jobs.size.should == 1
end
end
context 'with 5 queued mails' do
before do
queued_mails = 'Array'
queued_mails.stub(:count).and_return(5)
campaign.stub(:queued_mails).and_return(queued_mails)
end
it "enqueues 1 campaign worker jobs" do
CampaignWorker.jobs.should be_empty
campaign.send_campaign!
CampaignWorker.jobs.size.should == 1
end
end
end
end
| 22.867347 | 70 | 0.651049 |
111776c16763acce37675b307539e4a8954befb2 | 402 | FactoryGirl.define do
factory :user do
password 'secret'
sequence(:email) { |n| "name_#{n}@example.com" }
factory :random_user do
sequence(:email) { |n| "#{Faker::Internet.user_name(nil, %w(._-))}_#{n}@example.com" }
sequence(:image_url) { |n| "https://randomuser.me/api/portraits/#{rand(2).zero? ? 'men' : 'women'}/#{n % 200}.jpg" }
has_random_dates
end
end
end
| 30.923077 | 122 | 0.609453 |
b9a01360c7c5de0b641b25594492831a6b1a3f31 | 27 | module TaskTypesHelper
end
| 9 | 22 | 0.888889 |
0392e43895fea69a97962abc7e23dc99b65d998b | 3,453 | require 'tempfile'
require 'rubygems'
require 'rubygems/remote_fetcher'
##
# A fake Gem::RemoteFetcher for use in tests or to avoid real live HTTP
# requests when testing code that uses RubyGems.
#
# Example:
#
# @fetcher = Gem::FakeFetcher.new
# @fetcher.data['http://gems.example.com/yaml'] = source_index.to_yaml
# Gem::RemoteFetcher.fetcher = @fetcher
#
# # invoke RubyGems code
#
# paths = @fetcher.paths
# assert_equal 'http://gems.example.com/yaml', paths.shift
# assert paths.empty?, paths.join(', ')
#
# See RubyGems' tests for more examples of FakeFetcher.
class Gem::FakeFetcher
attr_reader :data
attr_reader :last_request
attr_accessor :paths
def initialize
@data = {}
@paths = []
end
def find_data(path)
path = path.to_s
@paths << path
raise ArgumentError, 'need full URI' unless path =~ %r'^https?://'
unless @data.key? path then
raise Gem::RemoteFetcher::FetchError.new("no data for #{path}", path)
end
@data[path]
end
def fetch_path path, mtime = nil
data = find_data(path)
if data.respond_to?(:call) then
data.call
else
if path.to_s =~ /gz$/ and not data.nil? and not data.empty? then
data = Gem.gunzip data
end
data
end
end
# Thanks, FakeWeb!
def open_uri_or_path(path)
data = find_data(path)
body, code, msg = data
response = Net::HTTPResponse.send(:response_class, code.to_s).new("1.0", code.to_s, msg)
response.instance_variable_set(:@body, body)
response.instance_variable_set(:@read, true)
response
end
def request(uri, request_class, last_modified = nil)
data = find_data(uri)
body, code, msg = data
@last_request = request_class.new uri.request_uri
yield @last_request if block_given?
response = Net::HTTPResponse.send(:response_class, code.to_s).new("1.0", code.to_s, msg)
response.instance_variable_set(:@body, body)
response.instance_variable_set(:@read, true)
response
end
def fetch_size(path)
path = path.to_s
@paths << path
raise ArgumentError, 'need full URI' unless path =~ %r'^http://'
unless @data.key? path then
raise Gem::RemoteFetcher::FetchError.new("no data for #{path}", path)
end
data = @data[path]
data.respond_to?(:call) ? data.call : data.length
end
def download spec, source_uri, install_dir = Gem.dir
name = spec.file_name
path = File.join(install_dir, 'cache', name)
Gem.ensure_gem_subdirectories install_dir
if source_uri =~ /^http/ then
File.open(path, "wb") do |f|
f.write fetch_path(File.join(source_uri, "gems", name))
end
else
FileUtils.cp source_uri, path
end
path
end
end
# :stopdoc:
class Gem::RemoteFetcher
def self.fetcher=(fetcher)
@fetcher = fetcher
end
end
# :startdoc:
##
# A StringIO duck-typed class that uses Tempfile instead of String as the
# backing store.
#--
# This class was added to flush out problems in Rubinius' IO implementation.
class TempIO
@@count = 0
def initialize(string = '')
@tempfile = Tempfile.new "TempIO-#{@@count += 1}"
@tempfile.binmode
@tempfile.write string
@tempfile.rewind
end
def method_missing(meth, *args, &block)
@tempfile.send(meth, *args, &block)
end
def respond_to?(meth)
@tempfile.respond_to? meth
end
def string
@tempfile.flush
Gem.read_binary @tempfile.path
end
end
| 21.447205 | 92 | 0.663191 |
f70f8c7bd74f2032a44749f730f699b7a1ba6c37 | 1,159 | module RelaxDB
class Server
def initialize(host, port)
@host = host
@port = port
end
def delete(uri)
request(Net::HTTP::Delete.new(uri))
end
def get(uri)
request(Net::HTTP::Get.new(uri))
end
def put(uri, json)
req = Net::HTTP::Put.new(uri)
req["content-type"] = "application/json"
req.body = json
request(req)
end
def post(uri, json)
req = Net::HTTP::Post.new(uri)
req["content-type"] = "application/json"
req.body = json
request(req)
end
def request(req)
res = Net::HTTP.start(@host, @port) {|http|
http.request(req)
}
if (not res.kind_of?(Net::HTTPSuccess))
handle_error(req, res)
end
res
end
def to_s
"http://#{@host}:#{@port}/"
end
private
def handle_error(req, res)
msg = "#{res.code}:#{res.message}\nMETHOD:#{req.method}\nURI:#{req.path}\n#{res.body}"
begin
klass = RelaxDB.const_get("HTTP_#{res.code}")
e = klass.new(msg)
rescue
e = RuntimeError.new(msg)
end
raise e
end
end
end | 19 | 92 | 0.534944 |
e26a73ad0abc11f59cceb6d6d67f12cc6c7e8421 | 1,462 | Pod::Spec.new do |s|
s.name = 'RHScroll'
s.version = '1.0'
s.summary = 'With RHScroll you have a horizontal ScrollView with built in animations and notifications for tap and longPress gestures.'
s.swift_version = '4.2'
s.description = <<-DESC
RHScroll is a ScrollView similar to several famous apps out on the App Store. It creates in a simple way a horizontal ScrollView which can be used for several different purposes. You´re able to choose the size of the ScrollView, size of the items, the data and the positioning of everything. It has built in animations for tap and longPress.
DESC
s.homepage = 'https://github.com/rashwanlazkani/RHScroll'
# s.screenshots = 'www.example.com/screenshots_1', 'www.example.com/screenshots_2'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'rashwanlazkani' => '[email protected]' }
s.source = { :git => 'https://github.com/rashwanlazkani/RHScroll.git', :tag => s.version.to_s }
# s.social_media_url = 'https://twitter.com/<TWITTER_USERNAME>'
s.ios.deployment_target = '10.0'
s.source_files = 'RHScroll/Classes/*.swift'
# s.resource_bundles = {
# 'RHScroll' => ['RHScroll/Assets/*.png']
# }
# s.public_header_files = 'Pod/Classes/**/*.h'
s.frameworks = 'UIKit'
# s.dependency 'AFNetworking', '~> 2.3'
end
| 47.16129 | 346 | 0.627223 |
79ebf7e1340369be0528cf65853da48b1570cbe3 | 1,320 | module Patriot
module Tool
module PatriotCommands
# manage plugins
module Plugin
Patriot::Tool::PatriotCommand.class_eval do
desc 'plugin [options] install <path to plugin>',
'manage plugins'
method_option :force,
:aliases => '-f',
:type => :boolean,
:desc => 'force operation'
method_option :unpack,
:type => :boolean,
:desc => 'unpack gem into plugin dir'
def plugin(sub_cmd, *plugin)
opts = symbolize_options(options)
conf = {:ignore_plugin => true}
conf[:path] = opts[:config] if opts.has_key?(:config)
config = load_config(conf)
controller = Patriot::Controller::PackageController.new(config)
plugins = []
if plugin.nil? || plugin.empty?
plugins = config.get(Patriot::Util::Config::PLUGIN_KEY, plugin)
else
plugins = plugin
end
sub_cmd = sub_cmd.to_sym
if sub_cmd == :install
plugins.each{|name| controller.install_plugin(name, opts) }
else
help("plugin")
end
end
end
end
end
end
end
| 31.428571 | 77 | 0.50303 |
bb015747649a7f4ab22df179a52f2ca69a6b4d5f | 10,655 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe ApprovalRules::ParamsFilteringService do
let(:service) { described_class.new(merge_request, user, params) }
let(:project_member) { create(:user) }
let(:outsider) { create(:user) }
let(:accessible_group) { create(:group, :private) }
let(:inaccessible_group) { create(:group, :private) }
let(:project) { create(:project, :repository) }
let(:user) { create(:user) }
describe '#execute' do
before do
project.add_maintainer(user)
project.add_reporter(project_member)
accessible_group.add_developer(user)
end
shared_examples_for(:assigning_users_and_groups) do
before do
allow(Ability).to receive(:allowed?).and_call_original
allow(Ability)
.to receive(:allowed?)
.with(user, :update_approvers, merge_request)
.and_return(can_update_approvers?)
end
context 'user can update approvers' do
let(:can_update_approvers?) { true }
it 'only assigns eligible users and groups' do
params = service.execute
rule1 = params[:approval_rules_attributes].first
expect(rule1[:user_ids]).to contain_exactly(project_member.id)
rule2 = params[:approval_rules_attributes].last
expected_group_ids = expected_groups.map(&:id)
expect(rule2[:user_ids]).to be_empty
expect(rule2[:group_ids]).to contain_exactly(*expected_group_ids)
end
end
context 'user cannot update approvers' do
let(:can_update_approvers?) { false }
it 'deletes the approval_rules_attributes from params' do
expect(service.execute).not_to have_key(:approval_rules_attributes)
end
end
end
context 'create' do
let(:merge_request) { build(:merge_request, target_project: project, source_project: project) }
let(:params) do
{
title: 'Awesome merge_request',
description: 'please fix',
source_branch: 'feature',
target_branch: 'master',
force_remove_source_branch: '1',
approval_rules_attributes: approval_rules_attributes
}
end
it_behaves_like :assigning_users_and_groups do
let(:approval_rules_attributes) do
[
{ name: 'foo', user_ids: [project_member.id, outsider.id] },
{ name: 'bar', user_ids: [outsider.id], group_ids: [accessible_group.id, inaccessible_group.id] }
]
end
let(:expected_groups) { [accessible_group] }
end
context 'inapplicable user defined rules' do
let!(:source_rule) { create(:approval_project_rule, project: project) }
let!(:another_source_rule) { create(:approval_project_rule, project: project) }
let(:protected_branch) { create(:protected_branch, project: project, name: 'stable-*') }
let(:approval_rules_attributes) do
[
{ name: another_source_rule.name, approval_project_rule_id: another_source_rule.id, user_ids: [project_member.id, outsider.id] }
]
end
before do
source_rule.update!(protected_branches: [protected_branch])
end
context 'when multiple_approval_rules feature is available' do
before do
stub_licensed_features(multiple_approval_rules: true)
end
it 'adds inapplicable user defined rules' do
params = service.execute
approval_rules_attrs = params[:approval_rules_attributes]
aggregate_failures do
expect(approval_rules_attrs.size).to eq(2)
expect(approval_rules_attrs.first).to include(
name: another_source_rule.name,
approval_project_rule_id: another_source_rule.id
)
expect(approval_rules_attrs.last).to include(
name: source_rule.name,
approval_project_rule_id: source_rule.id,
user_ids: source_rule.user_ids,
group_ids: source_rule.group_ids,
approvals_required: source_rule.approvals_required,
rule_type: source_rule.rule_type
)
end
end
end
context 'when multiple_approval_rules feature is not available' do
before do
stub_licensed_features(multiple_approval_rules: false)
end
it 'does not add inapplicable user defined rules' do
params = service.execute
approval_rules_attrs = params[:approval_rules_attributes]
aggregate_failures do
expect(approval_rules_attrs.size).to eq(1)
expect(approval_rules_attrs.first).to include(
name: another_source_rule.name,
approval_project_rule_id: another_source_rule.id
)
end
end
end
end
context 'any approver rule' do
let(:can_update_approvers?) { true }
let(:approval_rules_attributes) do
[
{ user_ids: [], group_ids: [] }
]
end
it 'sets rule type for the rules attributes' do
params = service.execute
rule = params[:approval_rules_attributes].first
expect(rule[:rule_type]).to eq(:any_approver)
expect(rule[:name]).to eq('All Members')
end
end
# A test case for https://gitlab.com/gitlab-org/gitlab/-/issues/208978#note_353379792
# Approval project rules with any_approver type have groups, but they shouldn't
context 'any approver rule from a project rule' do
let(:can_update_approvers?) { true }
let(:approval_rules_attributes) do
[
{ user_ids: [""], group_ids: [""], approval_project_rule_id: approval_rule.id }
]
end
context 'and the project rule has hidden groups' do
let(:approval_rule) do
create(:approval_project_rule, project: project, rule_type: :any_approver).tap do |rule|
rule.groups << create(:group, :private)
end
end
it 'sets rule type for the rules attributes' do
params = service.execute
rule = params[:approval_rules_attributes].first
expect(rule[:rule_type]).to eq(:any_approver)
expect(rule[:name]).to eq('All Members')
end
end
end
end
context 'update' do
let(:merge_request) { create(:merge_request, target_project: project, source_project: project) }
let(:existing_private_group) { create(:group, :private) }
let!(:rule1) { create(:approval_merge_request_rule, merge_request: merge_request, users: [create(:user)]) }
let!(:rule2) { create(:approval_merge_request_rule, merge_request: merge_request, groups: [existing_private_group]) }
it_behaves_like :assigning_users_and_groups do
let(:params) do
{
approval_rules_attributes: [
{ id: rule1.id, name: 'foo', user_ids: [project_member.id, outsider.id] },
{ id: rule2.id, name: 'bar', user_ids: [outsider.id], group_ids: [accessible_group.id, inaccessible_group.id] }
]
}
end
let(:expected_groups) { [accessible_group, existing_private_group] }
end
context 'inapplicable user defined rules' do
let!(:source_rule) { create(:approval_project_rule, project: project) }
let(:protected_branch) { create(:protected_branch, project: project, name: 'stable-*') }
let(:approval_rules_attrs) { service.execute[:approval_rules_attributes] }
let(:params) do
{
approval_rules_attributes: [
{ id: rule1.id, name: 'foo', user_ids: [project_member.id, outsider.id] }
]
}
end
before do
source_rule.update!(protected_branches: [protected_branch])
end
it 'does not add inapplicable user defined rules' do
aggregate_failures do
expect(approval_rules_attrs.size).to eq(1)
expect(approval_rules_attrs.first[:name]).to eq('foo')
end
end
context 'when reset_approval_rules_to_defaults is true' do
let(:params) do
{
approval_rules_attributes: [
{ id: rule1.id, name: 'foo', user_ids: [project_member.id, outsider.id] }
],
reset_approval_rules_to_defaults: true
}
end
context 'when multiple_approval_rules feature is available' do
before do
stub_licensed_features(multiple_approval_rules: true)
end
it 'adds inapplicable user defined rules' do
aggregate_failures do
expect(approval_rules_attrs.size).to eq(2)
expect(approval_rules_attrs.first).to include(
id: rule1.id,
name: 'foo'
)
expect(approval_rules_attrs.last).to include(
name: source_rule.name,
approval_project_rule_id: source_rule.id,
user_ids: source_rule.user_ids,
group_ids: source_rule.group_ids,
approvals_required: source_rule.approvals_required,
rule_type: source_rule.rule_type
)
end
end
end
context 'when multiple_approval_rules feature is not available' do
before do
stub_licensed_features(multiple_approval_rules: false)
end
it 'does not add inapplicable user defined rules' do
aggregate_failures do
expect(approval_rules_attrs.size).to eq(1)
expect(approval_rules_attrs.first).to include(
id: rule1.id,
name: 'foo'
)
end
end
end
end
end
context 'with remove_hidden_groups being true' do
it_behaves_like :assigning_users_and_groups do
let(:params) do
{
approval_rules_attributes: [
{ id: rule1.id, name: 'foo', user_ids: [project_member.id, outsider.id] },
{ id: rule2.id, name: 'bar', user_ids: [outsider.id], group_ids: [accessible_group.id, inaccessible_group.id], remove_hidden_groups: true }
]
}
end
let(:expected_groups) { [accessible_group] }
end
end
end
end
end
| 34.820261 | 155 | 0.601971 |
e8fc64374ad97e7dd8eef6f2276b4236cb17a1ad | 58 | class Income < ApplicationRecord
belongs_to :user
end
| 14.5 | 32 | 0.775862 |
798dc2ef920d4e18d38be594748846b5aa520e3f | 4,356 | #
# Cookbook Name:: nginx
# Recipe:: source
#
# Author:: Adam Jacob (<[email protected]>)
# Author:: Joshua Timberman (<[email protected]>)
#
# Copyright 2009-2011, Opscode, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
include_recipe "build-essential"
packages = value_for_platform(
["centos","redhat","fedora"] => {'default' => ['pcre-devel', 'openssl-devel']},
"default" => ['libpcre3', 'libpcre3-dev', 'libssl-dev']
)
packages.each do |devpkg|
package devpkg
end
nginx_version = node[:nginx][:version]
src_url = node[:nginx][:url]
node.set[:nginx][:install_path] = "/opt/nginx-#{nginx_version}"
node.set[:nginx][:src_binary] = "#{node[:nginx][:install_path]}/sbin/nginx"
node.set[:nginx][:daemon_disable] = true
node.set[:nginx][:configure_flags] = [
"--prefix=#{node[:nginx][:install_path]}",
"--conf-path=#{node[:nginx][:dir]}/nginx.conf",
"--with-http_ssl_module",
"--with-http_gzip_static_module"
]
configure_flags = node[:nginx][:configure_flags].join(" ")
remote_file "#{Chef::Config[:file_cache_path]}/nginx-#{nginx_version}.tar.gz" do
source src_url
action :create_if_missing
end
bash "compile_nginx_source" do
cwd Chef::Config[:file_cache_path]
code <<-EOH
tar zxf nginx-#{nginx_version}.tar.gz
cd nginx-#{nginx_version} && ./configure #{configure_flags}
make && make install
EOH
creates node[:nginx][:src_binary]
notifies :restart, "service[nginx]"
end
user node[:nginx][:user] do
system true
shell "/bin/false"
home "/var/www"
end
directory node[:nginx][:log_dir] do
mode 0755
owner node[:nginx][:user]
action :create
end
directory node[:nginx][:dir] do
owner "root"
group "root"
mode "0755"
end
case node[:nginx][:init_style]
when "runit"
include_recipe "runit"
runit_service "nginx"
service "nginx" do
supports :status => true, :restart => true, :reload => true
reload_command "[[ -f #{node[:nginx][:pid]} ]] && kill -HUP `cat #{node[:nginx][:pid]}` || true"
end
when "bluepill"
include_recipe "bluepill"
template "#{node['bluepill']['conf_dir']}/nginx.pill" do
source "nginx.pill.erb"
mode 0644
variables(
:working_dir => node[:nginx][:install_path],
:src_binary => node[:nginx][:src_binary],
:nginx_dir => node[:nginx][:dir],
:log_dir => node[:nginx][:log_dir],
:pid => node[:nginx][:pid]
)
end
bluepill_service "nginx" do
action [ :enable, :load ]
end
service "nginx" do
supports :status => true, :restart => true, :reload => true
reload_command "[[ -f #{node[:nginx][:pid]} ]] && kill -HUP `cat #{node[:nginx][:pid]}` || true"
action :nothing
end
else
#install init db script
template "/etc/init.d/nginx" do
source "nginx.init.erb"
owner "root"
group "root"
mode "0755"
end
#install sysconfig file (not really needed but standard)
template "/etc/sysconfig/nginx" do
source "nginx.sysconfig.erb"
owner "root"
group "root"
mode "0644"
end
#register service
service "nginx" do
supports :status => true, :restart => true, :reload => true
action :enable
end
end
%w{ sites-available sites-enabled conf.d }.each do |dir|
directory "#{node[:nginx][:dir]}/#{dir}" do
owner "root"
group "root"
mode "0755"
end
end
%w{nxensite nxdissite}.each do |nxscript|
template "/usr/sbin/#{nxscript}" do
source "#{nxscript}.erb"
mode "0755"
owner "root"
group "root"
end
end
template "nginx.conf" do
path "#{node[:nginx][:dir]}/nginx.conf"
source "nginx.conf.erb"
owner "root"
group "root"
mode "0644"
notifies :reload, resources(:service => "nginx"), :immediately
end
cookbook_file "#{node[:nginx][:dir]}/mime.types" do
source "mime.types"
owner "root"
group "root"
mode "0644"
notifies :reload, resources(:service => "nginx"), :immediately
end
| 25.034483 | 100 | 0.662075 |
2641285656011ff9af2477e89f44fec34f9bd2bb | 441 | module WelcomeBot
class Contributors
include Aws::Record
string_attr :username, hash_key: true
string_attr :interactions
end
class Reporters
include Aws::Record
string_attr :username, hash_key: true
string_attr :interactions
end
class Messages
include Aws::Record
string_attr :org, hash_key: true
string_attr :pr_message
string_attr :issue_message
end
end
| 18.375 | 45 | 0.678005 |
395d055fa5931de68676adcb7126f31018b92745 | 1,575 | RepMine::Application.configure do
# Settings specified here will take precedence over those in config/application.rb
# The test environment is used exclusively to run your application's
# test suite. You never need to work with it otherwise. Remember that
# your test database is "scratch space" for the test suite and is wiped
# and recreated between test runs. Don't rely on the data there!
config.cache_classes = true
# Configure static asset server for tests with Cache-Control for performance
config.serve_static_assets = true
config.static_cache_control = "public, max-age=3600"
# Log error messages when you accidentally call methods on nil
config.whiny_nils = true
# Show full error reports and disable caching
config.consider_all_requests_local = true
config.action_controller.perform_caching = false
# Raise exceptions instead of rendering exception templates
config.action_dispatch.show_exceptions = false
# Disable request forgery protection in test environment
config.action_controller.allow_forgery_protection = false
# Tell Action Mailer not to deliver emails to the real world.
# The :test delivery method accumulates sent emails in the
# ActionMailer::Base.deliveries array.
config.action_mailer.delivery_method = :test
# Raise exception on mass assignment protection for Active Record models
config.active_record.mass_assignment_sanitizer = :strict
# Print deprecation notices to the stderr
config.active_support.deprecation = :stderr
config.i18n.enforce_available_locales = true
end
| 39.375 | 84 | 0.786032 |
e895d30c7399e33b4899d094e2d0dcc09c4991b3 | 3,584 | describe TreeBuilderComplianceHistory do
context 'TreeBuilderComplianceHistory' do
before do
role = MiqUserRole.find_by(:name => "EvmRole-operator")
@group = FactoryBot.create(:miq_group, :miq_user_role => role, :description => "Compliance History Group")
login_as FactoryBot.create(:user, :userid => 'comliance_history__wilma', :miq_groups => [@group])
compliance_detail_one = FactoryBot.create(:compliance_detail,
:miq_policy_id => 1234,
:condition_desc => "I am first condition")
compliance_detail_two = FactoryBot.create(:compliance_detail,
:miq_policy_id => 1234,
:condition_desc => "I am second condition")
compliance_detail_negative = FactoryBot.create(:compliance_detail,
:miq_policy_id => 1111,
:miq_policy_result => false,
:condition_result => false)
compliance_details = [compliance_detail_one, compliance_detail_two, compliance_detail_negative]
empty_compliance = FactoryBot.create(:compliance)
compliance = FactoryBot.create(:compliance, :compliance_details => compliance_details)
root = FactoryBot.create(:host, :compliances => [empty_compliance, compliance])
@ch_tree = TreeBuilderComplianceHistory.new(:ch_tree, {}, true, :root => root)
end
it 'is not lazy' do
tree_options = @ch_tree.send(:tree_init_options)
expect(tree_options[:lazy]).not_to be_truthy
end
it 'returns Compliance as root kids' do
kids = @ch_tree.send(:x_get_tree_roots, false)
kids.each do |kid|
expect(kid).to be_a_kind_of(Compliance)
end
end
it 'returns correctly ComplianceDetail nodes' do
parent = @ch_tree.send(:x_get_tree_roots, false).find { |x| x.compliance_details.present? }
kids = @ch_tree.send(:x_get_compliance_kids, parent, false)
expect(@ch_tree.send(:x_get_compliance_kids, parent, true)).to eq(2)
kids.each do |kid|
expect(kid).to be_a_kind_of(ComplianceDetail)
end
end
it 'returns empty node' do
parents = @ch_tree.send(:x_get_tree_roots, false)
parent = parents.find { |x| x.compliance_details == [] }
kid = @ch_tree.send(:x_get_compliance_kids, parent, false).first
expect(kid).to eq(:id => "#{parent.id}-nopol",
:text => "No Compliance Policies Found",
:selectable => false,
:icon => "fa fa-ban",
:tip => nil)
expect(kid).to be_a_kind_of(Hash)
expect(@ch_tree.send(:x_get_tree_custom_kids, kid, true)).to eq(0)
end
it 'returns Policy with multiple Conditions' do
grandparents = @ch_tree.send(:x_get_tree_roots, false)
grandparent = grandparents.find { |x| x.compliance_details.present? }
grandparent_id = "cm-#{grandparent.id}"
parents = @ch_tree.send(:x_get_compliance_kids, grandparent, false)
parent = parents.find { |x| x.miq_policy_id == 1234 }
kids = @ch_tree.send(:x_get_compliance_detail_kids, parent, false, [grandparent_id])
expect(@ch_tree.send(:x_get_compliance_detail_kids, parent, true, [grandparent_id])).to eq(2)
kids.each do |kid|
expect(kid).to be_a_kind_of(Hash)
end
end
end
end
| 53.492537 | 112 | 0.605469 |
33ac526db37944357e32d040782412d56a29cc84 | 942 | class HomeController < ApplicationController
load_and_authorize_resource
def index
total_var
if @total_answered.zero?
@question = Question.where(:id=>1).first
@users_question = UsersQuestion.new
b = @users_question.users_questions_answers.build
c=b.users_questions_answers_sub_questions.build
b.users_questions_answers_custom_answers.build
d=c.users_questions_answers_sub_questions_sub_answers.build
d.users_questions_answers_sub_questions_sub_answers_custom_answers.build
render template: 'questions/show'
else
if current_user.sex
female_only=false
else
female_only=[true,false]
end
@question_categories=QuestionCategory.where(:female_only=>female_only,:enable=>true)
@next_question_id=get_next_question_id
end
end
def no_auth; end
end
| 30.387097 | 94 | 0.680467 |
bf588a9c31c169cf6c5348b376ae73b5bec66db8 | 808 | RSpec.describe ToughLove do
it "has a version number" do
expect(ToughLove::VERSION).not_to be nil
end
it 'exists' do
expect(ToughLove).to_not be nil
end
it "returns some tough love" do
list = [ "Is that the best you can do?",
"Keep it up! If you can...",
"Have I told you how good you look today? Because you don't.",
"You can't and you won't.",
"Do your best. It still won't be enough.",
"Believe in yourself. Someone should.",
"If it was easy, everyone would do it. So they do.",
"Shine bright like a diamond! Right now you're still coal.",
"Dangggggg. You suck.",
"You can do better.",
"This is SO not cool.",
"You are amazing. But, seriously, try harder."]
expect(list).to include(ToughLove.inspire)
end
end
| 31.076923 | 68 | 0.627475 |
3974599b18a2a1f2ea7232a64745634941236574 | 268 | module AllSeeingPi
class Camera
attr_reader :script
def initialize
path = '../../../capture.sh'
@script = AllSeeingPi.config[:capture_script] || File.expand_path(path, __FILE__)
end
def capture
`#{script}`.strip
end
end
end
| 17.866667 | 87 | 0.626866 |
d5c8370e3e24128bc8770bd48115c9f229311c48 | 314 | # frozen_string_literal: true
module Stupidedi
module Versions
module ThirtyTen
module SyntaxNotes
P = Common::SyntaxNotes::P
R = Common::SyntaxNotes::R
E = Common::SyntaxNotes::E
C = Common::SyntaxNotes::C
L = Common::SyntaxNotes::L
end
end
end
end
| 20.933333 | 34 | 0.61465 |
f89e362b95748a13f324b8499ebc9097ae0f9d8e | 2,230 | class Goocanvas < Formula
desc "Canvas widget for GTK+ using the Cairo 2D library for drawing"
homepage "https://wiki.gnome.org/Projects/GooCanvas"
url "https://download.gnome.org/sources/goocanvas/2.0/goocanvas-2.0.4.tar.xz"
sha256 "c728e2b7d4425ae81b54e1e07a3d3c8a4bd6377a63cffa43006045bceaa92e90"
revision 2
bottle do
sha256 arm64_monterey: "82c8618cacc3ede0528d7b5642df008d85004ffcb2dca6161854191fc5f7305f"
sha256 arm64_big_sur: "afc6329ef248fba21b033b7f9e409112260e28f0c9964e5748df4bed40a0cdae"
sha256 monterey: "54771e536701697bd1e1225d41c2ac1cb74ab05fc5b7e0500699acb469e7b65d"
sha256 big_sur: "31471c7264bf173c9f82ba40daec0555403f9007cc8046d7bee5b2406bfeedae"
sha256 catalina: "ff71ce064b86b1e8973ee5c6aaebdbba6a1159614f5c425d83cc3fb6b00e8b97"
sha256 mojave: "b9d36364339793b428077bbc7735981f8cd33e681971653806dc574236382778"
sha256 high_sierra: "6822fe0a452809ce94bc1fd70fb32b024ad52702a56878db381b7dad2e05aa28"
sha256 sierra: "44b1bd9f058cd4fe112cd1022a0ad2daa93c7f849257ae57bc6d10f9c33e57de"
sha256 cellar: :any_skip_relocation, x86_64_linux: "df27306b70d7a3b4ff8ab489bf4d3fe4cf9c2e9d6e49589095b1c6b2c5926ca0"
end
depends_on "gobject-introspection" => :build
depends_on "pkg-config" => :build
depends_on "cairo"
depends_on "glib"
depends_on "gtk+3"
# Fix -flat_namespace being used on Big Sur and later.
patch do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/03cf8088210822aa2c1ab544ed58ea04c897d9c4/libtool/configure-pre-0.4.2.418-big_sur.diff"
sha256 "83af02f2aa2b746bb7225872cab29a253264be49db0ecebb12f841562d9a2923"
end
def install
system "./configure", "--disable-dependency-tracking",
"--disable-silent-rules",
"--prefix=#{prefix}",
"--enable-introspection=yes",
"--disable-gtk-doc-html"
system "make", "install"
end
end
| 54.390244 | 154 | 0.673094 |
ff26531b7a2b80aff86b3c61471c2b1373855894 | 528 | cask "electrumsv" do
version "1.3.5"
sha256 "036c7b64667529c7e1d2fe2e9220eb6a6e059e4c80884eebbec623b6d31228fe"
# s3.us-east-2.amazonaws.com/electrumsv-downloads/ was verified as official when first introduced to the cask
url "https://s3.us-east-2.amazonaws.com/electrumsv-downloads/releases/#{version}/ElectrumSV-#{version}.dmg"
appcast "https://github.com/electrumsv/electrumsv/releases.atom"
name "ElectrumSV"
desc "Desktop wallet for Bitcoin SV"
homepage "https://electrumsv.io/"
app "ElectrumSV.app"
end
| 37.714286 | 111 | 0.772727 |
1c18311752510471c336de7460a568a3f861ef1f | 5,326 | # A connection pool allowing multi-threaded access to a pool of connections.
# This is the default connection pool used by Sequel.
class Sequel::ThreadedConnectionPool < Sequel::ConnectionPool
# The maximum number of connections this pool will create (per shard/server
# if sharding).
attr_reader :max_size
# An array of connections that are available for use by the pool.
attr_reader :available_connections
# A hash with thread keys and connection values for currently allocated
# connections.
attr_reader :allocated
# The following additional options are respected:
# * :max_connections - The maximum number of connections the connection pool
# will open (default 4)
# * :pool_sleep_time - The amount of time to sleep before attempting to acquire
# a connection again (default 0.001)
# * :pool_timeout - The amount of seconds to wait to acquire a connection
# before raising a PoolTimeoutError (default 5)
def initialize(opts = {}, &block)
super
@max_size = Integer(opts[:max_connections] || 4)
raise(Sequel::Error, ':max_connections must be positive') if @max_size < 1
@mutex = Mutex.new
@available_connections = []
@allocated = {}
@timeout = Integer(opts[:pool_timeout] || 5)
@sleep_time = Float(opts[:pool_sleep_time] || 0.001)
end
# The total number of connections opened for the given server, should
# be equal to available_connections.length + allocated.length.
def size
@allocated.length + @available_connections.length
end
# Removes all connections currently available on all servers, optionally
# yielding each connection to the given block. This method has the effect of
# disconnecting from the database, assuming that no connections are currently
# being used.
#
# Once a connection is requested using #hold, the connection pool
# creates new connections to the database.
def disconnect(opts={}, &block)
block ||= @disconnection_proc
sync do
@available_connections.each{|conn| block.call(conn)} if block
@available_connections.clear
end
end
# Chooses the first available connection to the given server, or if none are
# available, creates a new connection. Passes the connection to the supplied
# block:
#
# pool.hold {|conn| conn.execute('DROP TABLE posts')}
#
# Pool#hold is re-entrant, meaning it can be called recursively in
# the same thread without blocking.
#
# If no connection is immediately available and the pool is already using the maximum
# number of connections, Pool#hold will block until a connection
# is available or the timeout expires. If the timeout expires before a
# connection can be acquired, a Sequel::PoolTimeout is
# raised.
def hold(server=nil)
t = Thread.current
if conn = owned_connection(t)
return yield(conn)
end
begin
unless conn = acquire(t)
time = Time.now
timeout = time + @timeout
sleep_time = @sleep_time
sleep sleep_time
until conn = acquire(t)
raise(::Sequel::PoolTimeout) if Time.now > timeout
sleep sleep_time
end
end
yield conn
rescue Sequel::DatabaseDisconnectError
oconn = conn
conn = nil
@disconnection_proc.call(oconn) if @disconnection_proc && oconn
@allocated.delete(t)
raise
ensure
sync{release(t)} if conn
end
end
private
# Assigns a connection to the supplied thread for the given server, if one
# is available. The calling code should NOT already have the mutex when
# calling this.
def acquire(thread)
sync do
if conn = available
@allocated[thread] = conn
end
end
end
# Returns an available connection to the given server. If no connection is
# available, tries to create a new connection. The calling code should already
# have the mutex before calling this.
def available
@available_connections.pop || make_new(DEFAULT_SERVER)
end
# Alias the default make_new method, so subclasses can call it directly.
alias default_make_new make_new
# Creates a new connection to the given server if the size of the pool for
# the server is less than the maximum size of the pool. The calling code
# should already have the mutex before calling this.
def make_new(server)
if (n = size) >= @max_size
@allocated.keys.each{|t| release(t) unless t.alive?}
n = nil
end
super if (n || size) < @max_size
end
# Returns the connection owned by the supplied thread for the given server,
# if any. The calling code should NOT already have the mutex before calling this.
def owned_connection(thread)
sync{@allocated[thread]}
end
# Releases the connection assigned to the supplied thread and server. If the
# server or connection given is scheduled for disconnection, remove the
# connection instead of releasing it back to the pool.
# The calling code should already have the mutex before calling this.
def release(thread)
@available_connections << @allocated.delete(thread)
end
# Yield to the block while inside the mutex. The calling code should NOT
# already have the mutex before calling this.
def sync
@mutex.synchronize{yield}
end
CONNECTION_POOL_MAP[[false, false]] = self
end
| 35.039474 | 87 | 0.706722 |
790e6ce8cd8db9eb5d6e4a1180a58b7a12f8fd5a | 6,135 | # frozen_string_literal: true
require "spec_helper"
RSpec.describe UuidStringList do
let(:connection) { described_class.connection }
let(:schema_cache) { connection.schema_cache }
let(:table_name) { described_class.table_name }
describe ".primary_key" do
subject { described_class.primary_key }
it { is_expected.to eq("id") }
end
describe ".create" do
let(:some_string) { "a" }
subject { described_class.create!(some_string: some_string) }
context "when partition key in list" do
its(:id) { is_expected.to be_a_uuid }
its(:some_string) { is_expected.to eq(some_string) }
end
context "when partition key outside list" do
let(:some_string) { "e" }
it "raises error" do
expect { subject }.to raise_error(ActiveRecord::StatementInvalid, /PG::CheckViolation/)
end
end
end
describe ".partitions" do
subject { described_class.partitions }
context "when query successful" do
it { is_expected.to contain_exactly("#{table_name}_a", "#{table_name}_b") }
end
context "when an error occurs" do
before { allow(PgParty.cache).to receive(:fetch_partitions).and_raise("boom") }
it { is_expected.to eq([]) }
end
end
describe ".create_partition" do
let(:values) { ["e", "f"] }
let(:child_table_name) { "#{table_name}_c" }
subject(:create_partition) { described_class.create_partition(values: values, name: child_table_name) }
subject(:partitions) { described_class.partitions }
subject(:child_table_exists) { schema_cache.data_source_exists?(child_table_name) }
before do
schema_cache.clear!
described_class.partitions
end
after { connection.drop_table(child_table_name) if child_table_exists }
context "when values do not overlap" do
it "returns table name and adds it to partition list" do
expect(create_partition).to eq(child_table_name)
expect(partitions).to contain_exactly(
"#{table_name}_a",
"#{table_name}_b",
"#{table_name}_c"
)
end
end
context "when name not provided" do
let(:child_table_name) { create_partition }
subject(:create_partition) { described_class.create_partition(values: values) }
it "returns table name and adds it to partition list" do
expect(create_partition).to match(/^#{table_name}_\w{7}$/)
expect(partitions).to contain_exactly(
"#{table_name}_a",
"#{table_name}_b",
child_table_name,
)
end
end
context "when values overlap" do
let(:values) { ["b", "c"] }
it "raises error and cleans up intermediate table" do
expect { create_partition }.to raise_error(ActiveRecord::StatementInvalid, /PG::InvalidObjectDefinition/)
expect(child_table_exists).to eq(false)
end
end
end
describe ".in_partition" do
let(:child_table_name) { "#{table_name}_a" }
subject { described_class.in_partition(child_table_name) }
its(:table_name) { is_expected.to eq(child_table_name) }
its(:name) { is_expected.to eq(described_class.name) }
its(:new) { is_expected.to be_an_instance_of(described_class) }
its(:allocate) { is_expected.to be_an_instance_of(described_class) }
describe "query methods" do
let!(:record_one) { described_class.create!(some_string: "a") }
let!(:record_two) { described_class.create!(some_string: "b") }
let!(:record_three) { described_class.create!(some_string: "d") }
describe ".all" do
subject { described_class.in_partition(child_table_name).all }
it { is_expected.to contain_exactly(record_one, record_two) }
end
describe ".where" do
subject { described_class.in_partition(child_table_name).where(id: record_one.id) }
it { is_expected.to contain_exactly(record_one) }
end
end
end
describe ".partition_key_in" do
let(:values) { ["a", "b"] }
let!(:record_one) { described_class.create!(some_string: "a") }
let!(:record_two) { described_class.create!(some_string: "b") }
let!(:record_three) { described_class.create!(some_string: "d") }
subject { described_class.partition_key_in(values) }
context "when spanning a single partition" do
it { is_expected.to contain_exactly(record_one, record_two) }
end
context "when spanning multiple partitions" do
let(:values) { ["a", "b", "c", "d"] }
it { is_expected.to contain_exactly(record_one, record_two, record_three) }
end
context "when chaining methods" do
subject { described_class.partition_key_in(values).where(some_string: "a") }
it { is_expected.to contain_exactly(record_one) }
end
end
describe ".partition_key_eq" do
let(:partition_key) { "a" }
let!(:record_one) { described_class.create!(some_string: "a") }
let!(:record_two) { described_class.create!(some_string: "c") }
subject { described_class.partition_key_eq(partition_key) }
context "when partition key in first partition" do
it { is_expected.to contain_exactly(record_one) }
end
context "when partition key in second partition" do
let(:partition_key) { "c" }
it { is_expected.to contain_exactly(record_two) }
end
context "when chaining methods" do
subject do
described_class
.in_partition("#{table_name}_b")
.unscoped
.partition_key_eq(partition_key)
end
it { is_expected.to be_empty }
end
context "when table is aliased" do
subject do
described_class
.select("*")
.from(described_class.arel_table.alias)
.partition_key_eq(partition_key)
end
it { is_expected.to contain_exactly(record_one) }
end
context "when table alias not resolvable" do
subject do
described_class
.select("*")
.from("garbage")
.partition_key_eq(partition_key)
end
it { expect { subject }.to raise_error("could not find arel table in current scope") }
end
end
end
| 29.354067 | 113 | 0.659332 |
085631f315b0bbae432d98bb9d1ed68c7668623e | 172 | require 'spec_helper'
describe Nostalgia::Simulations do
it "should respond to simulate" do
Nostalgia::Simulations.respond_to?(:simulate).must_equal true
end
end
| 19.111111 | 65 | 0.773256 |
e88be5e3364669d1702ff085e7c494174e4f124c | 16,285 | require File.expand_path(File.join(File.dirname(__FILE__), '..', 'spec_helper'))
RSpec.describe HTTParty::ConnectionAdapter do
describe "initialization" do
let(:uri) { URI 'http://www.google.com' }
it "takes a URI as input" do
HTTParty::ConnectionAdapter.new(uri)
end
it "raises an ArgumentError if the uri is nil" do
expect { HTTParty::ConnectionAdapter.new(nil) }.to raise_error ArgumentError
end
it "raises an ArgumentError if the uri is a String" do
expect { HTTParty::ConnectionAdapter.new('http://www.google.com') }.to raise_error ArgumentError
end
it "sets the uri" do
adapter = HTTParty::ConnectionAdapter.new(uri)
expect(adapter.uri).to be uri
end
it "also accepts an optional options hash" do
HTTParty::ConnectionAdapter.new(uri, {})
end
it "sets the options" do
options = {foo: :bar}
adapter = HTTParty::ConnectionAdapter.new(uri, options)
expect(adapter.options.keys).to include(:verify, :verify_peer, :foo)
end
end
describe ".call" do
let(:uri) { URI 'http://www.google.com' }
let(:options) { { foo: :bar } }
it "generates an HTTParty::ConnectionAdapter instance with the given uri and options" do
expect(HTTParty::ConnectionAdapter).to receive(:new).with(uri, options).and_return(double(connection: nil))
HTTParty::ConnectionAdapter.call(uri, options)
end
it "calls #connection on the connection adapter" do
adapter = double('Adapter')
connection = double('Connection')
expect(adapter).to receive(:connection).and_return(connection)
allow(HTTParty::ConnectionAdapter).to receive_messages(new: adapter)
expect(HTTParty::ConnectionAdapter.call(uri, options)).to be connection
end
end
describe '#connection' do
let(:uri) { URI 'http://www.google.com' }
let(:options) { Hash.new }
let(:adapter) { HTTParty::ConnectionAdapter.new(uri, options) }
describe "the resulting connection" do
subject { adapter.connection }
it { is_expected.to be_an_instance_of Net::HTTP }
context "using port 80" do
let(:uri) { URI 'http://foobar.com' }
it { is_expected.not_to use_ssl }
end
context "when dealing with ssl" do
let(:uri) { URI 'https://foobar.com' }
context "uses the system cert_store, by default" do
let!(:system_cert_store) do
system_cert_store = double('default_cert_store')
expect(system_cert_store).to receive(:set_default_paths)
expect(OpenSSL::X509::Store).to receive(:new).and_return(system_cert_store)
system_cert_store
end
it { is_expected.to use_cert_store(system_cert_store) }
end
context "should use the specified cert store, when one is given" do
let(:custom_cert_store) { double('custom_cert_store') }
let(:options) { {cert_store: custom_cert_store} }
it { is_expected.to use_cert_store(custom_cert_store) }
end
context "using port 443 for ssl" do
let(:uri) { URI 'https://api.foo.com/v1:443' }
it { is_expected.to use_ssl }
end
context "https scheme with default port" do
it { is_expected.to use_ssl }
end
context "https scheme with non-standard port" do
let(:uri) { URI 'https://foobar.com:123456' }
it { is_expected.to use_ssl }
end
context "when ssl version is set" do
let(:options) { {ssl_version: :TLSv1} }
it "sets ssl version" do
expect(subject.ssl_version).to eq(:TLSv1)
end
end if RUBY_VERSION > '1.9'
end
context "when dealing with IPv6" do
let(:uri) { URI 'http://[fd00::1]' }
it "strips brackets from the address" do
expect(subject.address).to eq('fd00::1')
end
end
context "specifying ciphers" do
let(:options) { {ciphers: 'RC4-SHA' } }
it "should set the ciphers on the connection" do
expect(subject.ciphers).to eq('RC4-SHA')
end
end if RUBY_VERSION > '1.9'
context "when timeout is not set" do
it "doesn't set the timeout" do
http = double(
"http",
:null_object => true,
:use_ssl= => false,
:use_ssl? => false
)
expect(http).not_to receive(:open_timeout=)
expect(http).not_to receive(:read_timeout=)
allow(Net::HTTP).to receive_messages(new: http)
adapter.connection
end
end
context "when setting timeout" do
context "to 5 seconds" do
let(:options) { {timeout: 5} }
describe '#open_timeout' do
subject { super().open_timeout }
it { is_expected.to eq(5) }
end
describe '#read_timeout' do
subject { super().read_timeout }
it { is_expected.to eq(5) }
end
end
context "and timeout is a string" do
let(:options) { {timeout: "five seconds"} }
it "doesn't set the timeout" do
http = double(
"http",
:null_object => true,
:use_ssl= => false,
:use_ssl? => false
)
expect(http).not_to receive(:open_timeout=)
expect(http).not_to receive(:read_timeout=)
allow(Net::HTTP).to receive_messages(new: http)
adapter.connection
end
end
end
context "when timeout is not set and read_timeout is set to 6 seconds" do
let(:options) { {read_timeout: 6} }
describe '#read_timeout' do
subject { super().read_timeout }
it { is_expected.to eq(6) }
end
it "should not set the open_timeout" do
http = double(
"http",
:null_object => true,
:use_ssl= => false,
:use_ssl? => false,
:read_timeout= => 0
)
expect(http).not_to receive(:open_timeout=)
allow(Net::HTTP).to receive_messages(new: http)
adapter.connection
end
end
context "when timeout is set and read_timeout is set to 6 seconds" do
let(:options) { {timeout: 5, read_timeout: 6} }
describe '#open_timeout' do
subject { super().open_timeout }
it { is_expected.to eq(5) }
end
describe '#read_timeout' do
subject { super().read_timeout }
it { is_expected.to eq(6) }
end
it "should override the timeout option" do
http = double(
"http",
:null_object => true,
:use_ssl= => false,
:use_ssl? => false,
:read_timeout= => 0,
:open_timeout= => 0
)
expect(http).to receive(:open_timeout=)
expect(http).to receive(:read_timeout=).twice
allow(Net::HTTP).to receive_messages(new: http)
adapter.connection
end
end
context "when timeout is not set and open_timeout is set to 7 seconds" do
let(:options) { {open_timeout: 7} }
describe '#open_timeout' do
subject { super().open_timeout }
it { is_expected.to eq(7) }
end
it "should not set the read_timeout" do
http = double(
"http",
:null_object => true,
:use_ssl= => false,
:use_ssl? => false,
:open_timeout= => 0
)
expect(http).not_to receive(:read_timeout=)
allow(Net::HTTP).to receive_messages(new: http)
adapter.connection
end
end
context "when timeout is set and open_timeout is set to 7 seconds" do
let(:options) { {timeout: 5, open_timeout: 7} }
describe '#open_timeout' do
subject { super().open_timeout }
it { is_expected.to eq(7) }
end
describe '#read_timeout' do
subject { super().read_timeout }
it { is_expected.to eq(5) }
end
it "should override the timeout option" do
http = double(
"http",
:null_object => true,
:use_ssl= => false,
:use_ssl? => false,
:read_timeout= => 0,
:open_timeout= => 0
)
expect(http).to receive(:open_timeout=).twice
expect(http).to receive(:read_timeout=)
allow(Net::HTTP).to receive_messages(new: http)
adapter.connection
end
end
context "when debug_output" do
let(:http) { Net::HTTP.new(uri) }
before do
allow(Net::HTTP).to receive_messages(new: http)
end
context "is set to $stderr" do
let(:options) { {debug_output: $stderr} }
it "has debug output set" do
expect(http).to receive(:set_debug_output).with($stderr)
adapter.connection
end
end
context "is not provided" do
it "does not set_debug_output" do
expect(http).not_to receive(:set_debug_output)
adapter.connection
end
end
end
context 'when providing proxy address and port' do
let(:options) { {http_proxyaddr: '1.2.3.4', http_proxyport: 8080} }
it { is_expected.to be_a_proxy }
describe '#proxy_address' do
subject { super().proxy_address }
it { is_expected.to eq('1.2.3.4') }
end
describe '#proxy_port' do
subject { super().proxy_port }
it { is_expected.to eq(8080) }
end
context 'as well as proxy user and password' do
let(:options) do
{http_proxyaddr: '1.2.3.4', http_proxyport: 8080,
http_proxyuser: 'user', http_proxypass: 'pass'}
end
describe '#proxy_user' do
subject { super().proxy_user }
it { is_expected.to eq('user') }
end
describe '#proxy_pass' do
subject { super().proxy_pass }
it { is_expected.to eq('pass') }
end
end
end
context 'when providing nil as proxy address' do
let(:uri) { URI 'http://noproxytest.com' }
let(:options) { {http_proxyaddr: nil} }
it { is_expected.not_to be_a_proxy }
it "does pass nil proxy parameters to the connection, this forces to not use a proxy" do
http = Net::HTTP.new("noproxytest.com")
expect(Net::HTTP).to receive(:new).once.with("noproxytest.com", 80, nil, nil, nil, nil).and_return(http)
adapter.connection
end
end
context 'when not providing a proxy address' do
let(:uri) { URI 'http://proxytest.com' }
it "does not pass any proxy parameters to the connection" do
http = Net::HTTP.new("proxytest.com")
expect(Net::HTTP).to receive(:new).once.with("proxytest.com", 80).and_return(http)
adapter.connection
end
end
context 'when providing a local bind address and port' do
let(:options) { {local_host: "127.0.0.1", local_port: 12345 } }
describe '#local_host' do
subject { super().local_host }
it { is_expected.to eq('127.0.0.1') }
end
describe '#local_port' do
subject { super().local_port }
it { is_expected.to eq(12345) }
end
end if RUBY_VERSION >= '2.0'
context "when providing PEM certificates" do
let(:pem) { :pem_contents }
let(:options) { {pem: pem, pem_password: "password"} }
context "when scheme is https" do
let(:uri) { URI 'https://google.com' }
let(:cert) { double("OpenSSL::X509::Certificate") }
let(:key) { double("OpenSSL::PKey::RSA") }
before do
expect(OpenSSL::X509::Certificate).to receive(:new).with(pem).and_return(cert)
expect(OpenSSL::PKey::RSA).to receive(:new).with(pem, "password").and_return(key)
end
it "uses the provided PEM certificate" do
expect(subject.cert).to eq(cert)
expect(subject.key).to eq(key)
end
it "will verify the certificate" do
expect(subject.verify_mode).to eq(OpenSSL::SSL::VERIFY_PEER)
end
context "when options include verify=false" do
let(:options) { {pem: pem, pem_password: "password", verify: false} }
it "should not verify the certificate" do
expect(subject.verify_mode).to eq(OpenSSL::SSL::VERIFY_NONE)
end
end
context "when options include verify_peer=false" do
let(:options) { {pem: pem, pem_password: "password", verify_peer: false} }
it "should not verify the certificate" do
expect(subject.verify_mode).to eq(OpenSSL::SSL::VERIFY_NONE)
end
end
end
context "when scheme is not https" do
let(:uri) { URI 'http://google.com' }
let(:http) { Net::HTTP.new(uri) }
before do
allow(Net::HTTP).to receive_messages(new: http)
expect(OpenSSL::X509::Certificate).not_to receive(:new).with(pem)
expect(OpenSSL::PKey::RSA).not_to receive(:new).with(pem, "password")
expect(http).not_to receive(:cert=)
expect(http).not_to receive(:key=)
end
it "has no PEM certificate " do
expect(subject.cert).to be_nil
expect(subject.key).to be_nil
end
end
end
context "when providing PKCS12 certificates" do
let(:p12) { :p12_contents }
let(:options) { {p12: p12, p12_password: "password"} }
context "when scheme is https" do
let(:uri) { URI 'https://google.com' }
let(:pkcs12) { double("OpenSSL::PKCS12", certificate: cert, key: key) }
let(:cert) { double("OpenSSL::X509::Certificate") }
let(:key) { double("OpenSSL::PKey::RSA") }
before do
expect(OpenSSL::PKCS12).to receive(:new).with(p12, "password").and_return(pkcs12)
end
it "uses the provided P12 certificate " do
expect(subject.cert).to eq(cert)
expect(subject.key).to eq(key)
end
it "will verify the certificate" do
expect(subject.verify_mode).to eq(OpenSSL::SSL::VERIFY_PEER)
end
context "when options include verify=false" do
let(:options) { {p12: p12, p12_password: "password", verify: false} }
it "should not verify the certificate" do
expect(subject.verify_mode).to eq(OpenSSL::SSL::VERIFY_NONE)
end
end
context "when options include verify_peer=false" do
let(:options) { {p12: p12, p12_password: "password", verify_peer: false} }
it "should not verify the certificate" do
expect(subject.verify_mode).to eq(OpenSSL::SSL::VERIFY_NONE)
end
end
end
context "when scheme is not https" do
let(:uri) { URI 'http://google.com' }
let(:http) { Net::HTTP.new(uri) }
before do
allow(Net::HTTP).to receive_messages(new: http)
expect(OpenSSL::PKCS12).not_to receive(:new).with(p12, "password")
expect(http).not_to receive(:cert=)
expect(http).not_to receive(:key=)
end
it "has no PKCS12 certificate " do
expect(subject.cert).to be_nil
expect(subject.key).to be_nil
end
end
end
context "when uri port is not defined" do
context "falls back to 80 port on http" do
let(:uri) { URI 'http://foobar.com' }
before { allow(uri).to receive(:port).and_return(nil) }
it { expect(subject.port).to be 80 }
end
context "falls back to 443 port on https" do
let(:uri) { URI 'https://foobar.com' }
before { allow(uri).to receive(:port).and_return(nil) }
it { expect(subject.port).to be 443 }
end
end
end
end
end
| 32.635271 | 114 | 0.566779 |
e9efec1f68debc8023a3ecdd60ded1604566afe3 | 82,391 | # frozen_string_literal: true
# WARNING ABOUT GENERATED CODE
#
# This file is generated. See the contributing guide for more information:
# https://github.com/aws/aws-sdk-ruby/blob/version-3/CONTRIBUTING.md
#
# WARNING ABOUT GENERATED CODE
module Aws::SecretsManager
module Types
# @note When making an API call, you may pass CancelRotateSecretRequest
# data as a hash:
#
# {
# secret_id: "SecretIdType", # required
# }
#
# @!attribute [rw] secret_id
# The ARN or name of the secret.
#
# For an ARN, we recommend that you specify a complete ARN rather than
# a partial ARN.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/secretsmanager-2017-10-17/CancelRotateSecretRequest AWS API Documentation
#
class CancelRotateSecretRequest < Struct.new(
:secret_id)
SENSITIVE = []
include Aws::Structure
end
# @!attribute [rw] arn
# The ARN of the secret.
# @return [String]
#
# @!attribute [rw] name
# The name of the secret.
# @return [String]
#
# @!attribute [rw] version_id
# The unique identifier of the version of the secret created during
# the rotation. This version might not be complete, and should be
# evaluated for possible deletion. We recommend that you remove the
# `VersionStage` value `AWSPENDING` from this version so that Secrets
# Manager can delete it. Failing to clean up a cancelled rotation can
# block you from starting future rotations.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/secretsmanager-2017-10-17/CancelRotateSecretResponse AWS API Documentation
#
class CancelRotateSecretResponse < Struct.new(
:arn,
:name,
:version_id)
SENSITIVE = []
include Aws::Structure
end
# @note When making an API call, you may pass CreateSecretRequest
# data as a hash:
#
# {
# name: "NameType", # required
# client_request_token: "ClientRequestTokenType",
# description: "DescriptionType",
# kms_key_id: "KmsKeyIdType",
# secret_binary: "data",
# secret_string: "SecretStringType",
# tags: [
# {
# key: "TagKeyType",
# value: "TagValueType",
# },
# ],
# add_replica_regions: [
# {
# region: "RegionType",
# kms_key_id: "KmsKeyIdType",
# },
# ],
# force_overwrite_replica_secret: false,
# }
#
# @!attribute [rw] name
# The name of the new secret.
#
# The secret name can contain ASCII letters, numbers, and the
# following characters: /\_+=.@-
#
# Do not end your secret name with a hyphen followed by six
# characters. If you do so, you risk confusion and unexpected results
# when searching for a secret by partial ARN. Secrets Manager
# automatically adds a hyphen and six random characters after the
# secret name at the end of the ARN.
# @return [String]
#
# @!attribute [rw] client_request_token
# If you include `SecretString` or `SecretBinary`, then Secrets
# Manager creates an initial version for the secret, and this
# parameter specifies the unique identifier for the new version.
#
# <note markdown="1"> If you use the Amazon Web Services CLI or one of the Amazon Web
# Services SDKs to call this operation, then you can leave this
# parameter empty. The CLI or SDK generates a random UUID for you and
# includes it as the value for this parameter in the request. If you
# don't use the SDK and instead generate a raw HTTP request to the
# Secrets Manager service endpoint, then you must generate a
# `ClientRequestToken` yourself for the new version and include the
# value in the request.
#
# </note>
#
# This value helps ensure idempotency. Secrets Manager uses this value
# to prevent the accidental creation of duplicate versions if there
# are failures and retries during a rotation. We recommend that you
# generate a [UUID-type][1] value to ensure uniqueness of your
# versions within the specified secret.
#
# * If the `ClientRequestToken` value isn't already associated with a
# version of the secret then a new version of the secret is created.
#
# * If a version with this value already exists and the version
# `SecretString` and `SecretBinary` values are the same as those in
# the request, then the request is ignored.
#
# * If a version with this value already exists and that version's
# `SecretString` and `SecretBinary` values are different from those
# in the request, then the request fails because you cannot modify
# an existing version. Instead, use PutSecretValue to create a new
# version.
#
# This value becomes the `VersionId` of the new version.
#
# **A suitable default value is auto-generated.** You should normally
# not need to pass this option.
#
#
#
# [1]: https://wikipedia.org/wiki/Universally_unique_identifier
# @return [String]
#
# @!attribute [rw] description
# The description of the secret.
# @return [String]
#
# @!attribute [rw] kms_key_id
# The ARN, key ID, or alias of the KMS key that Secrets Manager uses
# to encrypt the secret value in the secret.
#
# To use a KMS key in a different account, use the key ARN or the
# alias ARN.
#
# If you don't specify this value, then Secrets Manager uses the key
# `aws/secretsmanager`. If that key doesn't yet exist, then Secrets
# Manager creates it for you automatically the first time it encrypts
# the secret value.
#
# If the secret is in a different Amazon Web Services account from the
# credentials calling the API, then you can't use
# `aws/secretsmanager` to encrypt the secret, and you must create and
# use a customer managed KMS key.
# @return [String]
#
# @!attribute [rw] secret_binary
# The binary data to encrypt and store in the new version of the
# secret. We recommend that you store your binary data in a file and
# then pass the contents of the file as a parameter.
#
# Either `SecretString` or `SecretBinary` must have a value, but not
# both.
#
# This parameter is not available in the Secrets Manager console.
# @return [String]
#
# @!attribute [rw] secret_string
# The text data to encrypt and store in this new version of the
# secret. We recommend you use a JSON structure of key/value pairs for
# your secret value.
#
# Either `SecretString` or `SecretBinary` must have a value, but not
# both.
#
# If you create a secret by using the Secrets Manager console then
# Secrets Manager puts the protected secret text in only the
# `SecretString` parameter. The Secrets Manager console stores the
# information as a JSON structure of key/value pairs that a Lambda
# rotation function can parse.
# @return [String]
#
# @!attribute [rw] tags
# A list of tags to attach to the secret. Each tag is a key and value
# pair of strings in a JSON text string, for example:
#
# `[\{"Key":"CostCenter","Value":"12345"\},\{"Key":"environment","Value":"production"\}]`
#
# Secrets Manager tag key names are case sensitive. A tag with the key
# "ABC" is a different tag from one with key "abc".
#
# If you check tags in permissions policies as part of your security
# strategy, then adding or removing a tag can change permissions. If
# the completion of this operation would result in you losing your
# permissions for this secret, then Secrets Manager blocks the
# operation and returns an `Access Denied` error. For more
# information, see [Control access to secrets using tags][1] and
# [Limit access to identities with tags that match secrets' tags][2].
#
# For information about how to format a JSON parameter for the various
# command line tool environments, see [Using JSON for Parameters][3].
# If your command-line tool or SDK requires quotation marks around the
# parameter, you should use single quotes to avoid confusion with the
# double quotes required in the JSON text.
#
# The following restrictions apply to tags:
#
# * Maximum number of tags per secret: 50
#
# * Maximum key length: 127 Unicode characters in UTF-8
#
# * Maximum value length: 255 Unicode characters in UTF-8
#
# * Tag keys and values are case sensitive.
#
# * Do not use the `aws:` prefix in your tag names or values because
# Amazon Web Services reserves it for Amazon Web Services use. You
# can't edit or delete tag names or values with this prefix. Tags
# with this prefix do not count against your tags per secret limit.
#
# * If you use your tagging schema across multiple services and
# resources, other services might have restrictions on allowed
# characters. Generally allowed characters: letters, spaces, and
# numbers representable in UTF-8, plus the following special
# characters: + - = . \_ : / @.
#
#
#
# [1]: https://docs.aws.amazon.com/secretsmanager/latest/userguide/auth-and-access_examples.html#tag-secrets-abac
# [2]: https://docs.aws.amazon.com/secretsmanager/latest/userguide/auth-and-access_examples.html#auth-and-access_tags2
# [3]: https://docs.aws.amazon.com/cli/latest/userguide/cli-using-param.html#cli-using-param-json
# @return [Array<Types::Tag>]
#
# @!attribute [rw] add_replica_regions
# A list of Regions and KMS keys to replicate secrets.
# @return [Array<Types::ReplicaRegionType>]
#
# @!attribute [rw] force_overwrite_replica_secret
# Specifies whether to overwrite a secret with the same name in the
# destination Region.
# @return [Boolean]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/secretsmanager-2017-10-17/CreateSecretRequest AWS API Documentation
#
class CreateSecretRequest < Struct.new(
:name,
:client_request_token,
:description,
:kms_key_id,
:secret_binary,
:secret_string,
:tags,
:add_replica_regions,
:force_overwrite_replica_secret)
SENSITIVE = [:secret_binary, :secret_string]
include Aws::Structure
end
# @!attribute [rw] arn
# The ARN of the new secret. The ARN includes the name of the secret
# followed by six random characters. This ensures that if you create a
# new secret with the same name as a deleted secret, then users with
# access to the old secret don't get access to the new secret because
# the ARNs are different.
# @return [String]
#
# @!attribute [rw] name
# The name of the new secret.
# @return [String]
#
# @!attribute [rw] version_id
# The unique identifier associated with the version of the new secret.
# @return [String]
#
# @!attribute [rw] replication_status
# A list of the replicas of this secret and their status:
#
# * `Failed`, which indicates that the replica was not created.
#
# * `InProgress`, which indicates that Secrets Manager is in the
# process of creating the replica.
#
# * `InSync`, which indicates that the replica was created.
# @return [Array<Types::ReplicationStatusType>]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/secretsmanager-2017-10-17/CreateSecretResponse AWS API Documentation
#
class CreateSecretResponse < Struct.new(
:arn,
:name,
:version_id,
:replication_status)
SENSITIVE = []
include Aws::Structure
end
# Secrets Manager can't decrypt the protected secret text using the
# provided KMS key.
#
# @!attribute [rw] message
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/secretsmanager-2017-10-17/DecryptionFailure AWS API Documentation
#
class DecryptionFailure < Struct.new(
:message)
SENSITIVE = []
include Aws::Structure
end
# @note When making an API call, you may pass DeleteResourcePolicyRequest
# data as a hash:
#
# {
# secret_id: "SecretIdType", # required
# }
#
# @!attribute [rw] secret_id
# The ARN or name of the secret to delete the attached resource-based
# policy for.
#
# For an ARN, we recommend that you specify a complete ARN rather than
# a partial ARN.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/secretsmanager-2017-10-17/DeleteResourcePolicyRequest AWS API Documentation
#
class DeleteResourcePolicyRequest < Struct.new(
:secret_id)
SENSITIVE = []
include Aws::Structure
end
# @!attribute [rw] arn
# The ARN of the secret that the resource-based policy was deleted
# for.
# @return [String]
#
# @!attribute [rw] name
# The name of the secret that the resource-based policy was deleted
# for.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/secretsmanager-2017-10-17/DeleteResourcePolicyResponse AWS API Documentation
#
class DeleteResourcePolicyResponse < Struct.new(
:arn,
:name)
SENSITIVE = []
include Aws::Structure
end
# @note When making an API call, you may pass DeleteSecretRequest
# data as a hash:
#
# {
# secret_id: "SecretIdType", # required
# recovery_window_in_days: 1,
# force_delete_without_recovery: false,
# }
#
# @!attribute [rw] secret_id
# The ARN or name of the secret to delete.
#
# For an ARN, we recommend that you specify a complete ARN rather than
# a partial ARN.
# @return [String]
#
# @!attribute [rw] recovery_window_in_days
# The number of days from 7 to 30 that Secrets Manager waits before
# permanently deleting the secret. You can't use both this parameter
# and `ForceDeleteWithoutRecovery` in the same call. If you don't use
# either, then Secrets Manager defaults to a 30 day recovery window.
# @return [Integer]
#
# @!attribute [rw] force_delete_without_recovery
# Specifies whether to delete the secret without any recovery window.
# You can't use both this parameter and `RecoveryWindowInDays` in the
# same call. If you don't use either, then Secrets Manager defaults
# to a 30 day recovery window.
#
# Secrets Manager performs the actual deletion with an asynchronous
# background process, so there might be a short delay before the
# secret is permanently deleted. If you delete a secret and then
# immediately create a secret with the same name, use appropriate back
# off and retry logic.
#
# Use this parameter with caution. This parameter causes the operation
# to skip the normal recovery window before the permanent deletion
# that Secrets Manager would normally impose with the
# `RecoveryWindowInDays` parameter. If you delete a secret with the
# `ForceDeleteWithouRecovery` parameter, then you have no opportunity
# to recover the secret. You lose the secret permanently.
# @return [Boolean]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/secretsmanager-2017-10-17/DeleteSecretRequest AWS API Documentation
#
class DeleteSecretRequest < Struct.new(
:secret_id,
:recovery_window_in_days,
:force_delete_without_recovery)
SENSITIVE = []
include Aws::Structure
end
# @!attribute [rw] arn
# The ARN of the secret.
# @return [String]
#
# @!attribute [rw] name
# The name of the secret.
# @return [String]
#
# @!attribute [rw] deletion_date
# The date and time after which this secret Secrets Manager can
# permanently delete this secret, and it can no longer be restored.
# This value is the date and time of the delete request plus the
# number of days in `RecoveryWindowInDays`.
# @return [Time]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/secretsmanager-2017-10-17/DeleteSecretResponse AWS API Documentation
#
class DeleteSecretResponse < Struct.new(
:arn,
:name,
:deletion_date)
SENSITIVE = []
include Aws::Structure
end
# @note When making an API call, you may pass DescribeSecretRequest
# data as a hash:
#
# {
# secret_id: "SecretIdType", # required
# }
#
# @!attribute [rw] secret_id
# The ARN or name of the secret.
#
# For an ARN, we recommend that you specify a complete ARN rather than
# a partial ARN.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/secretsmanager-2017-10-17/DescribeSecretRequest AWS API Documentation
#
class DescribeSecretRequest < Struct.new(
:secret_id)
SENSITIVE = []
include Aws::Structure
end
# @!attribute [rw] arn
# The ARN of the secret.
# @return [String]
#
# @!attribute [rw] name
# The name of the secret.
# @return [String]
#
# @!attribute [rw] description
# The description of the secret.
# @return [String]
#
# @!attribute [rw] kms_key_id
# The ARN of the KMS key that Secrets Manager uses to encrypt the
# secret value. If the secret is encrypted with the Amazon Web
# Services managed key `aws/secretsmanager`, this field is omitted.
# @return [String]
#
# @!attribute [rw] rotation_enabled
# Specifies whether automatic rotation is turned on for this secret.
#
# To turn on rotation, use RotateSecret. To turn off rotation, use
# CancelRotateSecret.
# @return [Boolean]
#
# @!attribute [rw] rotation_lambda_arn
# The ARN of the Lambda function that Secrets Manager invokes to
# rotate the secret.
# @return [String]
#
# @!attribute [rw] rotation_rules
# The rotation schedule and Lambda function for this secret. If the
# secret previously had rotation turned on, but it is now turned off,
# this field shows the previous rotation schedule and rotation
# function. If the secret never had rotation turned on, this field is
# omitted.
# @return [Types::RotationRulesType]
#
# @!attribute [rw] last_rotated_date
# The last date and time that Secrets Manager rotated the secret. If
# the secret isn't configured for rotation, Secrets Manager returns
# null.
# @return [Time]
#
# @!attribute [rw] last_changed_date
# The last date and time that this secret was modified in any way.
# @return [Time]
#
# @!attribute [rw] last_accessed_date
# The last date that the secret value was retrieved. This value does
# not include the time. This field is omitted if the secret has never
# been retrieved.
# @return [Time]
#
# @!attribute [rw] deleted_date
# The date the secret is scheduled for deletion. If it is not
# scheduled for deletion, this field is omitted. When you delete a
# secret, Secrets Manager requires a recovery window of at least 7
# days before deleting the secret. Some time after the deleted date,
# Secrets Manager deletes the secret, including all of its versions.
#
# If a secret is scheduled for deletion, then its details, including
# the encrypted secret value, is not accessible. To cancel a scheduled
# deletion and restore access to the secret, use RestoreSecret.
# @return [Time]
#
# @!attribute [rw] tags
# The list of tags attached to the secret. To add tags to a secret,
# use TagResource. To remove tags, use UntagResource.
# @return [Array<Types::Tag>]
#
# @!attribute [rw] version_ids_to_stages
# A list of the versions of the secret that have staging labels
# attached. Versions that don't have staging labels are considered
# deprecated and Secrets Manager can delete them.
#
# Secrets Manager uses staging labels to indicate the status of a
# secret version during rotation. The three staging labels for
# rotation are:
#
# * `AWSCURRENT`, which indicates the current version of the secret.
#
# * `AWSPENDING`, which indicates the version of the secret that
# contains new secret information that will become the next current
# version when rotation finishes.
#
# During rotation, Secrets Manager creates an `AWSPENDING` version
# ID before creating the new secret version. To check if a secret
# version exists, call GetSecretValue.
#
# * `AWSPREVIOUS`, which indicates the previous current version of the
# secret. You can use this as the *last known good* version.
#
# For more information about rotation and staging labels, see [How
# rotation works][1].
#
#
#
# [1]: https://docs.aws.amazon.com/secretsmanager/latest/userguide/rotate-secrets_how.html
# @return [Hash<String,Array<String>>]
#
# @!attribute [rw] owning_service
# The name of the service that created this secret.
# @return [String]
#
# @!attribute [rw] created_date
# The date the secret was created.
# @return [Time]
#
# @!attribute [rw] primary_region
# The Region the secret is in. If a secret is replicated to other
# Regions, the replicas are listed in `ReplicationStatus`.
# @return [String]
#
# @!attribute [rw] replication_status
# A list of the replicas of this secret and their status:
#
# * `Failed`, which indicates that the replica was not created.
#
# * `InProgress`, which indicates that Secrets Manager is in the
# process of creating the replica.
#
# * `InSync`, which indicates that the replica was created.
# @return [Array<Types::ReplicationStatusType>]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/secretsmanager-2017-10-17/DescribeSecretResponse AWS API Documentation
#
class DescribeSecretResponse < Struct.new(
:arn,
:name,
:description,
:kms_key_id,
:rotation_enabled,
:rotation_lambda_arn,
:rotation_rules,
:last_rotated_date,
:last_changed_date,
:last_accessed_date,
:deleted_date,
:tags,
:version_ids_to_stages,
:owning_service,
:created_date,
:primary_region,
:replication_status)
SENSITIVE = []
include Aws::Structure
end
# Secrets Manager can't encrypt the protected secret text using the
# provided KMS key. Check that the KMS key is available, enabled, and
# not in an invalid state. For more information, see [Key state: Effect
# on your KMS key][1].
#
#
#
# [1]: https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html
#
# @!attribute [rw] message
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/secretsmanager-2017-10-17/EncryptionFailure AWS API Documentation
#
class EncryptionFailure < Struct.new(
:message)
SENSITIVE = []
include Aws::Structure
end
# Allows you to add filters when you use the search function in Secrets
# Manager. For more information, see [Find secrets in Secrets
# Manager][1].
#
#
#
# [1]: https://docs.aws.amazon.com/secretsmanager/latest/userguide/manage_search-secret.html
#
# @note When making an API call, you may pass Filter
# data as a hash:
#
# {
# key: "description", # accepts description, name, tag-key, tag-value, primary-region, all
# values: ["FilterValueStringType"],
# }
#
# @!attribute [rw] key
# The following are keys you can use:
#
# * **description**\: Prefix match, not case-sensitive.
#
# * **name**\: Prefix match, case-sensitive.
#
# * **tag-key**\: Prefix match, case-sensitive.
#
# * **tag-value**\: Prefix match, case-sensitive.
#
# * **primary-region**\: Prefix match, case-sensitive.
#
# * **all**\: Breaks the filter value string into words and then
# searches all attributes for matches. Not case-sensitive.
# @return [String]
#
# @!attribute [rw] values
# The keyword to filter for.
#
# You can prefix your search value with an exclamation mark (`!`) in
# order to perform negation filters.
# @return [Array<String>]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/secretsmanager-2017-10-17/Filter AWS API Documentation
#
class Filter < Struct.new(
:key,
:values)
SENSITIVE = []
include Aws::Structure
end
# @note When making an API call, you may pass GetRandomPasswordRequest
# data as a hash:
#
# {
# password_length: 1,
# exclude_characters: "ExcludeCharactersType",
# exclude_numbers: false,
# exclude_punctuation: false,
# exclude_uppercase: false,
# exclude_lowercase: false,
# include_space: false,
# require_each_included_type: false,
# }
#
# @!attribute [rw] password_length
# The length of the password. If you don't include this parameter,
# the default length is 32 characters.
# @return [Integer]
#
# @!attribute [rw] exclude_characters
# A string of the characters that you don't want in the password.
# @return [String]
#
# @!attribute [rw] exclude_numbers
# Specifies whether to exclude numbers from the password. If you
# don't include this switch, the password can contain numbers.
# @return [Boolean]
#
# @!attribute [rw] exclude_punctuation
# Specifies whether to exclude the following punctuation characters
# from the password: `` ! " # $ % & ' ( ) * + , - . / : ; < = > ? @ [
# \ ] ^ _ ` \{ | \} ~ ``. If you don't include this switch, the
# password can contain punctuation.
# @return [Boolean]
#
# @!attribute [rw] exclude_uppercase
# Specifies whether to exclude uppercase letters from the password. If
# you don't include this switch, the password can contain uppercase
# letters.
# @return [Boolean]
#
# @!attribute [rw] exclude_lowercase
# Specifies whether to exclude lowercase letters from the password. If
# you don't include this switch, the password can contain lowercase
# letters.
# @return [Boolean]
#
# @!attribute [rw] include_space
# Specifies whether to include the space character. If you include
# this switch, the password can contain space characters.
# @return [Boolean]
#
# @!attribute [rw] require_each_included_type
# Specifies whether to include at least one upper and lowercase
# letter, one number, and one punctuation. If you don't include this
# switch, the password contains at least one of every character type.
# @return [Boolean]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/secretsmanager-2017-10-17/GetRandomPasswordRequest AWS API Documentation
#
class GetRandomPasswordRequest < Struct.new(
:password_length,
:exclude_characters,
:exclude_numbers,
:exclude_punctuation,
:exclude_uppercase,
:exclude_lowercase,
:include_space,
:require_each_included_type)
SENSITIVE = []
include Aws::Structure
end
# @!attribute [rw] random_password
# A string with the password.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/secretsmanager-2017-10-17/GetRandomPasswordResponse AWS API Documentation
#
class GetRandomPasswordResponse < Struct.new(
:random_password)
SENSITIVE = [:random_password]
include Aws::Structure
end
# @note When making an API call, you may pass GetResourcePolicyRequest
# data as a hash:
#
# {
# secret_id: "SecretIdType", # required
# }
#
# @!attribute [rw] secret_id
# The ARN or name of the secret to retrieve the attached
# resource-based policy for.
#
# For an ARN, we recommend that you specify a complete ARN rather than
# a partial ARN.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/secretsmanager-2017-10-17/GetResourcePolicyRequest AWS API Documentation
#
class GetResourcePolicyRequest < Struct.new(
:secret_id)
SENSITIVE = []
include Aws::Structure
end
# @!attribute [rw] arn
# The ARN of the secret that the resource-based policy was retrieved
# for.
# @return [String]
#
# @!attribute [rw] name
# The name of the secret that the resource-based policy was retrieved
# for.
# @return [String]
#
# @!attribute [rw] resource_policy
# A JSON-formatted string that contains the permissions policy
# attached to the secret. For more information about permissions
# policies, see [Authentication and access control for Secrets
# Manager][1].
#
#
#
# [1]: https://docs.aws.amazon.com/secretsmanager/latest/userguide/auth-and-access.html
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/secretsmanager-2017-10-17/GetResourcePolicyResponse AWS API Documentation
#
class GetResourcePolicyResponse < Struct.new(
:arn,
:name,
:resource_policy)
SENSITIVE = []
include Aws::Structure
end
# @note When making an API call, you may pass GetSecretValueRequest
# data as a hash:
#
# {
# secret_id: "SecretIdType", # required
# version_id: "SecretVersionIdType",
# version_stage: "SecretVersionStageType",
# }
#
# @!attribute [rw] secret_id
# The ARN or name of the secret to retrieve.
#
# For an ARN, we recommend that you specify a complete ARN rather than
# a partial ARN.
# @return [String]
#
# @!attribute [rw] version_id
# The unique identifier of the version of the secret to retrieve. If
# you include both this parameter and `VersionStage`, the two
# parameters must refer to the same secret version. If you don't
# specify either a `VersionStage` or `VersionId`, then Secrets Manager
# returns the `AWSCURRENT` version.
#
# This value is typically a [UUID-type][1] value with 32 hexadecimal
# digits.
#
#
#
# [1]: https://wikipedia.org/wiki/Universally_unique_identifier
# @return [String]
#
# @!attribute [rw] version_stage
# The staging label of the version of the secret to retrieve.
#
# Secrets Manager uses staging labels to keep track of different
# versions during the rotation process. If you include both this
# parameter and `VersionId`, the two parameters must refer to the same
# secret version. If you don't specify either a `VersionStage` or
# `VersionId`, Secrets Manager returns the `AWSCURRENT` version.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/secretsmanager-2017-10-17/GetSecretValueRequest AWS API Documentation
#
class GetSecretValueRequest < Struct.new(
:secret_id,
:version_id,
:version_stage)
SENSITIVE = []
include Aws::Structure
end
# @!attribute [rw] arn
# The ARN of the secret.
# @return [String]
#
# @!attribute [rw] name
# The friendly name of the secret.
# @return [String]
#
# @!attribute [rw] version_id
# The unique identifier of this version of the secret.
# @return [String]
#
# @!attribute [rw] secret_binary
# The decrypted secret value, if the secret value was originally
# provided as binary data in the form of a byte array. The response
# parameter represents the binary data as a [base64-encoded][1]
# string.
#
# If the secret was created by using the Secrets Manager console, or
# if the secret value was originally provided as a string, then this
# field is omitted. The secret value appears in `SecretString`
# instead.
#
#
#
# [1]: https://tools.ietf.org/html/rfc4648#section-4
# @return [String]
#
# @!attribute [rw] secret_string
# The decrypted secret value, if the secret value was originally
# provided as a string or through the Secrets Manager console.
#
# If this secret was created by using the console, then Secrets
# Manager stores the information as a JSON structure of key/value
# pairs.
# @return [String]
#
# @!attribute [rw] version_stages
# A list of all of the staging labels currently attached to this
# version of the secret.
# @return [Array<String>]
#
# @!attribute [rw] created_date
# The date and time that this version of the secret was created. If
# you don't specify which version in `VersionId` or `VersionStage`,
# then Secrets Manager uses the `AWSCURRENT` version.
# @return [Time]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/secretsmanager-2017-10-17/GetSecretValueResponse AWS API Documentation
#
class GetSecretValueResponse < Struct.new(
:arn,
:name,
:version_id,
:secret_binary,
:secret_string,
:version_stages,
:created_date)
SENSITIVE = [:secret_binary, :secret_string]
include Aws::Structure
end
# An error occurred on the server side.
#
# @!attribute [rw] message
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/secretsmanager-2017-10-17/InternalServiceError AWS API Documentation
#
class InternalServiceError < Struct.new(
:message)
SENSITIVE = []
include Aws::Structure
end
# The `NextToken` value is invalid.
#
# @!attribute [rw] message
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/secretsmanager-2017-10-17/InvalidNextTokenException AWS API Documentation
#
class InvalidNextTokenException < Struct.new(
:message)
SENSITIVE = []
include Aws::Structure
end
# The parameter name is invalid value.
#
# @!attribute [rw] message
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/secretsmanager-2017-10-17/InvalidParameterException AWS API Documentation
#
class InvalidParameterException < Struct.new(
:message)
SENSITIVE = []
include Aws::Structure
end
# A parameter value is not valid for the current state of the resource.
#
# Possible causes:
#
# * The secret is scheduled for deletion.
#
# * You tried to enable rotation on a secret that doesn't already have
# a Lambda function ARN configured and you didn't include such an ARN
# as a parameter in this call.
#
# @!attribute [rw] message
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/secretsmanager-2017-10-17/InvalidRequestException AWS API Documentation
#
class InvalidRequestException < Struct.new(
:message)
SENSITIVE = []
include Aws::Structure
end
# The request failed because it would exceed one of the Secrets Manager
# quotas.
#
# @!attribute [rw] message
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/secretsmanager-2017-10-17/LimitExceededException AWS API Documentation
#
class LimitExceededException < Struct.new(
:message)
SENSITIVE = []
include Aws::Structure
end
# @note When making an API call, you may pass ListSecretVersionIdsRequest
# data as a hash:
#
# {
# secret_id: "SecretIdType", # required
# max_results: 1,
# next_token: "NextTokenType",
# include_deprecated: false,
# }
#
# @!attribute [rw] secret_id
# The ARN or name of the secret whose versions you want to list.
#
# For an ARN, we recommend that you specify a complete ARN rather than
# a partial ARN.
# @return [String]
#
# @!attribute [rw] max_results
# The number of results to include in the response.
#
# If there are more results available, in the response, Secrets
# Manager includes `NextToken`. To get the next results, call
# `ListSecretVersionIds` again with the value from `NextToken`.
# @return [Integer]
#
# @!attribute [rw] next_token
# A token that indicates where the output should continue from, if a
# previous call did not show all results. To get the next results,
# call `ListSecretVersionIds` again with this value.
# @return [String]
#
# @!attribute [rw] include_deprecated
# Specifies whether to include versions of secrets that don't have
# any staging labels attached to them. Versions without staging labels
# are considered deprecated and are subject to deletion by Secrets
# Manager.
# @return [Boolean]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/secretsmanager-2017-10-17/ListSecretVersionIdsRequest AWS API Documentation
#
class ListSecretVersionIdsRequest < Struct.new(
:secret_id,
:max_results,
:next_token,
:include_deprecated)
SENSITIVE = []
include Aws::Structure
end
# @!attribute [rw] versions
# A list of the versions of the secret.
# @return [Array<Types::SecretVersionsListEntry>]
#
# @!attribute [rw] next_token
# Secrets Manager includes this value if there's more output
# available than what is included in the current response. This can
# occur even when the response includes no values at all, such as when
# you ask for a filtered view of a long list. To get the next results,
# call `ListSecretVersionIds` again with this value.
# @return [String]
#
# @!attribute [rw] arn
# The ARN of the secret.
# @return [String]
#
# @!attribute [rw] name
# The name of the secret.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/secretsmanager-2017-10-17/ListSecretVersionIdsResponse AWS API Documentation
#
class ListSecretVersionIdsResponse < Struct.new(
:versions,
:next_token,
:arn,
:name)
SENSITIVE = []
include Aws::Structure
end
# @note When making an API call, you may pass ListSecretsRequest
# data as a hash:
#
# {
# max_results: 1,
# next_token: "NextTokenType",
# filters: [
# {
# key: "description", # accepts description, name, tag-key, tag-value, primary-region, all
# values: ["FilterValueStringType"],
# },
# ],
# sort_order: "asc", # accepts asc, desc
# }
#
# @!attribute [rw] max_results
# The number of results to include in the response.
#
# If there are more results available, in the response, Secrets
# Manager includes `NextToken`. To get the next results, call
# `ListSecrets` again with the value from `NextToken`.
# @return [Integer]
#
# @!attribute [rw] next_token
# A token that indicates where the output should continue from, if a
# previous call did not show all results. To get the next results,
# call `ListSecrets` again with this value.
# @return [String]
#
# @!attribute [rw] filters
# The filters to apply to the list of secrets.
# @return [Array<Types::Filter>]
#
# @!attribute [rw] sort_order
# Lists secrets in the requested order.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/secretsmanager-2017-10-17/ListSecretsRequest AWS API Documentation
#
class ListSecretsRequest < Struct.new(
:max_results,
:next_token,
:filters,
:sort_order)
SENSITIVE = []
include Aws::Structure
end
# @!attribute [rw] secret_list
# A list of the secrets in the account.
# @return [Array<Types::SecretListEntry>]
#
# @!attribute [rw] next_token
# Secrets Manager includes this value if there's more output
# available than what is included in the current response. This can
# occur even when the response includes no values at all, such as when
# you ask for a filtered view of a long list. To get the next results,
# call `ListSecrets` again with this value.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/secretsmanager-2017-10-17/ListSecretsResponse AWS API Documentation
#
class ListSecretsResponse < Struct.new(
:secret_list,
:next_token)
SENSITIVE = []
include Aws::Structure
end
# The resource policy has syntax errors.
#
# @!attribute [rw] message
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/secretsmanager-2017-10-17/MalformedPolicyDocumentException AWS API Documentation
#
class MalformedPolicyDocumentException < Struct.new(
:message)
SENSITIVE = []
include Aws::Structure
end
# The request failed because you did not complete all the prerequisite
# steps.
#
# @!attribute [rw] message
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/secretsmanager-2017-10-17/PreconditionNotMetException AWS API Documentation
#
class PreconditionNotMetException < Struct.new(
:message)
SENSITIVE = []
include Aws::Structure
end
# The `BlockPublicPolicy` parameter is set to true, and the resource
# policy did not prevent broad access to the secret.
#
# @!attribute [rw] message
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/secretsmanager-2017-10-17/PublicPolicyException AWS API Documentation
#
class PublicPolicyException < Struct.new(
:message)
SENSITIVE = []
include Aws::Structure
end
# @note When making an API call, you may pass PutResourcePolicyRequest
# data as a hash:
#
# {
# secret_id: "SecretIdType", # required
# resource_policy: "NonEmptyResourcePolicyType", # required
# block_public_policy: false,
# }
#
# @!attribute [rw] secret_id
# The ARN or name of the secret to attach the resource-based policy.
#
# For an ARN, we recommend that you specify a complete ARN rather than
# a partial ARN.
# @return [String]
#
# @!attribute [rw] resource_policy
# A JSON-formatted string for an Amazon Web Services resource-based
# policy. For example policies, see [Permissions policy examples][1].
#
#
#
# [1]: https://docs.aws.amazon.com/secretsmanager/latest/userguide/auth-and-access_examples.html
# @return [String]
#
# @!attribute [rw] block_public_policy
# Specifies whether to block resource-based policies that allow broad
# access to the secret. By default, Secrets Manager blocks policies
# that allow broad access, for example those that use a wildcard for
# the principal.
# @return [Boolean]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/secretsmanager-2017-10-17/PutResourcePolicyRequest AWS API Documentation
#
class PutResourcePolicyRequest < Struct.new(
:secret_id,
:resource_policy,
:block_public_policy)
SENSITIVE = []
include Aws::Structure
end
# @!attribute [rw] arn
# The ARN of the secret.
# @return [String]
#
# @!attribute [rw] name
# The name of the secret.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/secretsmanager-2017-10-17/PutResourcePolicyResponse AWS API Documentation
#
class PutResourcePolicyResponse < Struct.new(
:arn,
:name)
SENSITIVE = []
include Aws::Structure
end
# @note When making an API call, you may pass PutSecretValueRequest
# data as a hash:
#
# {
# secret_id: "SecretIdType", # required
# client_request_token: "ClientRequestTokenType",
# secret_binary: "data",
# secret_string: "SecretStringType",
# version_stages: ["SecretVersionStageType"],
# }
#
# @!attribute [rw] secret_id
# The ARN or name of the secret to add a new version to.
#
# For an ARN, we recommend that you specify a complete ARN rather than
# a partial ARN.
#
# If the secret doesn't already exist, use `CreateSecret` instead.
# @return [String]
#
# @!attribute [rw] client_request_token
# A unique identifier for the new version of the secret.
#
# <note markdown="1"> If you use the Amazon Web Services CLI or one of the Amazon Web
# Services SDKs to call this operation, then you can leave this
# parameter empty because they generate a random UUID for you. If you
# don't use the SDK and instead generate a raw HTTP request to the
# Secrets Manager service endpoint, then you must generate a
# `ClientRequestToken` yourself for new versions and include that
# value in the request.
#
# </note>
#
# This value helps ensure idempotency. Secrets Manager uses this value
# to prevent the accidental creation of duplicate versions if there
# are failures and retries during the Lambda rotation function
# processing. We recommend that you generate a [UUID-type][1] value to
# ensure uniqueness within the specified secret.
#
# * If the `ClientRequestToken` value isn't already associated with a
# version of the secret then a new version of the secret is created.
#
# * If a version with this value already exists and that version's
# `SecretString` or `SecretBinary` values are the same as those in
# the request then the request is ignored. The operation is
# idempotent.
#
# * If a version with this value already exists and the version of the
# `SecretString` and `SecretBinary` values are different from those
# in the request, then the request fails because you can't modify a
# secret version. You can only create new versions to store new
# secret values.
#
# This value becomes the `VersionId` of the new version.
#
# **A suitable default value is auto-generated.** You should normally
# not need to pass this option.
#
#
#
# [1]: https://wikipedia.org/wiki/Universally_unique_identifier
# @return [String]
#
# @!attribute [rw] secret_binary
# The binary data to encrypt and store in the new version of the
# secret. To use this parameter in the command-line tools, we
# recommend that you store your binary data in a file and then pass
# the contents of the file as a parameter.
#
# You must include `SecretBinary` or `SecretString`, but not both.
#
# You can't access this value from the Secrets Manager console.
# @return [String]
#
# @!attribute [rw] secret_string
# The text to encrypt and store in the new version of the secret.
#
# You must include `SecretBinary` or `SecretString`, but not both.
#
# We recommend you create the secret string as JSON key/value pairs,
# as shown in the example.
# @return [String]
#
# @!attribute [rw] version_stages
# A list of staging labels to attach to this version of the secret.
# Secrets Manager uses staging labels to track versions of a secret
# through the rotation process.
#
# If you specify a staging label that's already associated with a
# different version of the same secret, then Secrets Manager removes
# the label from the other version and attaches it to this version. If
# you specify `AWSCURRENT`, and it is already attached to another
# version, then Secrets Manager also moves the staging label
# `AWSPREVIOUS` to the version that `AWSCURRENT` was removed from.
#
# If you don't include `VersionStages`, then Secrets Manager
# automatically moves the staging label `AWSCURRENT` to this version.
# @return [Array<String>]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/secretsmanager-2017-10-17/PutSecretValueRequest AWS API Documentation
#
class PutSecretValueRequest < Struct.new(
:secret_id,
:client_request_token,
:secret_binary,
:secret_string,
:version_stages)
SENSITIVE = [:secret_binary, :secret_string]
include Aws::Structure
end
# @!attribute [rw] arn
# The ARN of the secret.
# @return [String]
#
# @!attribute [rw] name
# The name of the secret.
# @return [String]
#
# @!attribute [rw] version_id
# The unique identifier of the version of the secret.
# @return [String]
#
# @!attribute [rw] version_stages
# The list of staging labels that are currently attached to this
# version of the secret. Secrets Manager uses staging labels to track
# a version as it progresses through the secret rotation process.
# @return [Array<String>]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/secretsmanager-2017-10-17/PutSecretValueResponse AWS API Documentation
#
class PutSecretValueResponse < Struct.new(
:arn,
:name,
:version_id,
:version_stages)
SENSITIVE = []
include Aws::Structure
end
# @note When making an API call, you may pass RemoveRegionsFromReplicationRequest
# data as a hash:
#
# {
# secret_id: "SecretIdType", # required
# remove_replica_regions: ["RegionType"], # required
# }
#
# @!attribute [rw] secret_id
# The ARN or name of the secret.
# @return [String]
#
# @!attribute [rw] remove_replica_regions
# The Regions of the replicas to remove.
# @return [Array<String>]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/secretsmanager-2017-10-17/RemoveRegionsFromReplicationRequest AWS API Documentation
#
class RemoveRegionsFromReplicationRequest < Struct.new(
:secret_id,
:remove_replica_regions)
SENSITIVE = []
include Aws::Structure
end
# @!attribute [rw] arn
# The ARN of the primary secret.
# @return [String]
#
# @!attribute [rw] replication_status
# The status of replicas for this secret after you remove Regions.
# @return [Array<Types::ReplicationStatusType>]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/secretsmanager-2017-10-17/RemoveRegionsFromReplicationResponse AWS API Documentation
#
class RemoveRegionsFromReplicationResponse < Struct.new(
:arn,
:replication_status)
SENSITIVE = []
include Aws::Structure
end
# A custom type that specifies a `Region` and the `KmsKeyId` for a
# replica secret.
#
# @note When making an API call, you may pass ReplicaRegionType
# data as a hash:
#
# {
# region: "RegionType",
# kms_key_id: "KmsKeyIdType",
# }
#
# @!attribute [rw] region
# A Region code. For a list of Region codes, see [Name and code of
# Regions][1].
#
#
#
# [1]: https://docs.aws.amazon.com/general/latest/gr/rande.html#regional-endpoints
# @return [String]
#
# @!attribute [rw] kms_key_id
# The ARN, key ID, or alias of the KMS key to encrypt the secret. If
# you don't include this field, Secrets Manager uses
# `aws/secretsmanager`.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/secretsmanager-2017-10-17/ReplicaRegionType AWS API Documentation
#
class ReplicaRegionType < Struct.new(
:region,
:kms_key_id)
SENSITIVE = []
include Aws::Structure
end
# @note When making an API call, you may pass ReplicateSecretToRegionsRequest
# data as a hash:
#
# {
# secret_id: "SecretIdType", # required
# add_replica_regions: [ # required
# {
# region: "RegionType",
# kms_key_id: "KmsKeyIdType",
# },
# ],
# force_overwrite_replica_secret: false,
# }
#
# @!attribute [rw] secret_id
# The ARN or name of the secret to replicate.
# @return [String]
#
# @!attribute [rw] add_replica_regions
# A list of Regions in which to replicate the secret.
# @return [Array<Types::ReplicaRegionType>]
#
# @!attribute [rw] force_overwrite_replica_secret
# Specifies whether to overwrite a secret with the same name in the
# destination Region.
# @return [Boolean]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/secretsmanager-2017-10-17/ReplicateSecretToRegionsRequest AWS API Documentation
#
class ReplicateSecretToRegionsRequest < Struct.new(
:secret_id,
:add_replica_regions,
:force_overwrite_replica_secret)
SENSITIVE = []
include Aws::Structure
end
# @!attribute [rw] arn
# The ARN of the primary secret.
# @return [String]
#
# @!attribute [rw] replication_status
# The status of replication.
# @return [Array<Types::ReplicationStatusType>]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/secretsmanager-2017-10-17/ReplicateSecretToRegionsResponse AWS API Documentation
#
class ReplicateSecretToRegionsResponse < Struct.new(
:arn,
:replication_status)
SENSITIVE = []
include Aws::Structure
end
# A replication object consisting of a `RegionReplicationStatus` object
# and includes a Region, KMSKeyId, status, and status message.
#
# @!attribute [rw] region
# The Region where replication occurs.
# @return [String]
#
# @!attribute [rw] kms_key_id
# Can be an `ARN`, `Key ID`, or `Alias`.
# @return [String]
#
# @!attribute [rw] status
# The status can be `InProgress`, `Failed`, or `InSync`.
# @return [String]
#
# @!attribute [rw] status_message
# Status message such as "*Secret with this name already exists in
# this region*".
# @return [String]
#
# @!attribute [rw] last_accessed_date
# The date that you last accessed the secret in the Region.
# @return [Time]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/secretsmanager-2017-10-17/ReplicationStatusType AWS API Documentation
#
class ReplicationStatusType < Struct.new(
:region,
:kms_key_id,
:status,
:status_message,
:last_accessed_date)
SENSITIVE = []
include Aws::Structure
end
# A resource with the ID you requested already exists.
#
# @!attribute [rw] message
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/secretsmanager-2017-10-17/ResourceExistsException AWS API Documentation
#
class ResourceExistsException < Struct.new(
:message)
SENSITIVE = []
include Aws::Structure
end
# Secrets Manager can't find the resource that you asked for.
#
# @!attribute [rw] message
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/secretsmanager-2017-10-17/ResourceNotFoundException AWS API Documentation
#
class ResourceNotFoundException < Struct.new(
:message)
SENSITIVE = []
include Aws::Structure
end
# @note When making an API call, you may pass RestoreSecretRequest
# data as a hash:
#
# {
# secret_id: "SecretIdType", # required
# }
#
# @!attribute [rw] secret_id
# The ARN or name of the secret to restore.
#
# For an ARN, we recommend that you specify a complete ARN rather than
# a partial ARN.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/secretsmanager-2017-10-17/RestoreSecretRequest AWS API Documentation
#
class RestoreSecretRequest < Struct.new(
:secret_id)
SENSITIVE = []
include Aws::Structure
end
# @!attribute [rw] arn
# The ARN of the secret that was restored.
# @return [String]
#
# @!attribute [rw] name
# The name of the secret that was restored.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/secretsmanager-2017-10-17/RestoreSecretResponse AWS API Documentation
#
class RestoreSecretResponse < Struct.new(
:arn,
:name)
SENSITIVE = []
include Aws::Structure
end
# @note When making an API call, you may pass RotateSecretRequest
# data as a hash:
#
# {
# secret_id: "SecretIdType", # required
# client_request_token: "ClientRequestTokenType",
# rotation_lambda_arn: "RotationLambdaARNType",
# rotation_rules: {
# automatically_after_days: 1,
# },
# }
#
# @!attribute [rw] secret_id
# The ARN or name of the secret to rotate.
#
# For an ARN, we recommend that you specify a complete ARN rather than
# a partial ARN.
# @return [String]
#
# @!attribute [rw] client_request_token
# A unique identifier for the new version of the secret that helps
# ensure idempotency. Secrets Manager uses this value to prevent the
# accidental creation of duplicate versions if there are failures and
# retries during rotation. This value becomes the `VersionId` of the
# new version.
#
# If you use the Amazon Web Services CLI or one of the Amazon Web
# Services SDK to call this operation, then you can leave this
# parameter empty. The CLI or SDK generates a random UUID for you and
# includes that in the request for this parameter. If you don't use
# the SDK and instead generate a raw HTTP request to the Secrets
# Manager service endpoint, then you must generate a
# `ClientRequestToken` yourself for new versions and include that
# value in the request.
#
# You only need to specify this value if you implement your own retry
# logic and you want to ensure that Secrets Manager doesn't attempt
# to create a secret version twice. We recommend that you generate a
# [UUID-type][1] value to ensure uniqueness within the specified
# secret.
#
# **A suitable default value is auto-generated.** You should normally
# not need to pass this option.
#
#
#
# [1]: https://wikipedia.org/wiki/Universally_unique_identifier
# @return [String]
#
# @!attribute [rw] rotation_lambda_arn
# The ARN of the Lambda rotation function that can rotate the secret.
# @return [String]
#
# @!attribute [rw] rotation_rules
# A structure that defines the rotation configuration for this secret.
# @return [Types::RotationRulesType]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/secretsmanager-2017-10-17/RotateSecretRequest AWS API Documentation
#
class RotateSecretRequest < Struct.new(
:secret_id,
:client_request_token,
:rotation_lambda_arn,
:rotation_rules)
SENSITIVE = []
include Aws::Structure
end
# @!attribute [rw] arn
# The ARN of the secret.
# @return [String]
#
# @!attribute [rw] name
# The name of the secret.
# @return [String]
#
# @!attribute [rw] version_id
# The ID of the new version of the secret.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/secretsmanager-2017-10-17/RotateSecretResponse AWS API Documentation
#
class RotateSecretResponse < Struct.new(
:arn,
:name,
:version_id)
SENSITIVE = []
include Aws::Structure
end
# A structure that defines the rotation configuration for the secret.
#
# @note When making an API call, you may pass RotationRulesType
# data as a hash:
#
# {
# automatically_after_days: 1,
# }
#
# @!attribute [rw] automatically_after_days
# Specifies the number of days between automatic scheduled rotations
# of the secret.
#
# Secrets Manager schedules the next rotation when the previous one is
# complete. Secrets Manager schedules the date by adding the rotation
# interval (number of days) to the actual date of the last rotation.
# The service chooses the hour within that 24-hour date window
# randomly. The minute is also chosen somewhat randomly, but weighted
# towards the top of the hour and influenced by a variety of factors
# that help distribute load.
# @return [Integer]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/secretsmanager-2017-10-17/RotationRulesType AWS API Documentation
#
class RotationRulesType < Struct.new(
:automatically_after_days)
SENSITIVE = []
include Aws::Structure
end
# A structure that contains the details about a secret. It does not
# include the encrypted `SecretString` and `SecretBinary` values. To get
# those values, use the GetSecretValue operation.
#
# @!attribute [rw] arn
# The Amazon Resource Name (ARN) of the secret.
# @return [String]
#
# @!attribute [rw] name
# The friendly name of the secret. You can use forward slashes in the
# name to represent a path hierarchy. For example,
# `/prod/databases/dbserver1` could represent the secret for a server
# named `dbserver1` in the folder `databases` in the folder `prod`.
# @return [String]
#
# @!attribute [rw] description
# The user-provided description of the secret.
# @return [String]
#
# @!attribute [rw] kms_key_id
# The ARN of the KMS key that Secrets Manager uses to encrypt the
# secret value. If the secret is encrypted with the Amazon Web
# Services managed key `aws/secretsmanager`, this field is omitted.
# @return [String]
#
# @!attribute [rw] rotation_enabled
# Indicates whether automatic, scheduled rotation is enabled for this
# secret.
# @return [Boolean]
#
# @!attribute [rw] rotation_lambda_arn
# The ARN of an Amazon Web Services Lambda function invoked by Secrets
# Manager to rotate and expire the secret either automatically per the
# schedule or manually by a call to RotateSecret.
# @return [String]
#
# @!attribute [rw] rotation_rules
# A structure that defines the rotation configuration for the secret.
# @return [Types::RotationRulesType]
#
# @!attribute [rw] last_rotated_date
# The most recent date and time that the Secrets Manager rotation
# process was successfully completed. This value is null if the secret
# hasn't ever rotated.
# @return [Time]
#
# @!attribute [rw] last_changed_date
# The last date and time that this secret was modified in any way.
# @return [Time]
#
# @!attribute [rw] last_accessed_date
# The last date that this secret was accessed. This value is truncated
# to midnight of the date and therefore shows only the date, not the
# time.
# @return [Time]
#
# @!attribute [rw] deleted_date
# The date and time the deletion of the secret occurred. Not present
# on active secrets. The secret can be recovered until the number of
# days in the recovery window has passed, as specified in the
# `RecoveryWindowInDays` parameter of the DeleteSecret operation.
# @return [Time]
#
# @!attribute [rw] tags
# The list of user-defined tags associated with the secret. To add
# tags to a secret, use TagResource. To remove tags, use
# UntagResource.
# @return [Array<Types::Tag>]
#
# @!attribute [rw] secret_versions_to_stages
# A list of all of the currently assigned `SecretVersionStage` staging
# labels and the `SecretVersionId` attached to each one. Staging
# labels are used to keep track of the different versions during the
# rotation process.
#
# <note markdown="1"> A version that does not have any `SecretVersionStage` is considered
# deprecated and subject to deletion. Such versions are not included
# in this list.
#
# </note>
# @return [Hash<String,Array<String>>]
#
# @!attribute [rw] owning_service
# Returns the name of the service that created the secret.
# @return [String]
#
# @!attribute [rw] created_date
# The date and time when a secret was created.
# @return [Time]
#
# @!attribute [rw] primary_region
# The Region where Secrets Manager originated the secret.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/secretsmanager-2017-10-17/SecretListEntry AWS API Documentation
#
class SecretListEntry < Struct.new(
:arn,
:name,
:description,
:kms_key_id,
:rotation_enabled,
:rotation_lambda_arn,
:rotation_rules,
:last_rotated_date,
:last_changed_date,
:last_accessed_date,
:deleted_date,
:tags,
:secret_versions_to_stages,
:owning_service,
:created_date,
:primary_region)
SENSITIVE = []
include Aws::Structure
end
# A structure that contains information about one version of a secret.
#
# @!attribute [rw] version_id
# The unique version identifier of this version of the secret.
# @return [String]
#
# @!attribute [rw] version_stages
# An array of staging labels that are currently associated with this
# version of the secret.
# @return [Array<String>]
#
# @!attribute [rw] last_accessed_date
# The date that this version of the secret was last accessed. Note
# that the resolution of this field is at the date level and does not
# include the time.
# @return [Time]
#
# @!attribute [rw] created_date
# The date and time this version of the secret was created.
# @return [Time]
#
# @!attribute [rw] kms_key_ids
# The KMS keys used to encrypt the secret version.
# @return [Array<String>]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/secretsmanager-2017-10-17/SecretVersionsListEntry AWS API Documentation
#
class SecretVersionsListEntry < Struct.new(
:version_id,
:version_stages,
:last_accessed_date,
:created_date,
:kms_key_ids)
SENSITIVE = []
include Aws::Structure
end
# @note When making an API call, you may pass StopReplicationToReplicaRequest
# data as a hash:
#
# {
# secret_id: "SecretIdType", # required
# }
#
# @!attribute [rw] secret_id
# The ARN of the primary secret.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/secretsmanager-2017-10-17/StopReplicationToReplicaRequest AWS API Documentation
#
class StopReplicationToReplicaRequest < Struct.new(
:secret_id)
SENSITIVE = []
include Aws::Structure
end
# @!attribute [rw] arn
# The ARN of the promoted secret. The ARN is the same as the original
# primary secret except the Region is changed.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/secretsmanager-2017-10-17/StopReplicationToReplicaResponse AWS API Documentation
#
class StopReplicationToReplicaResponse < Struct.new(
:arn)
SENSITIVE = []
include Aws::Structure
end
# A structure that contains information about a tag.
#
# @note When making an API call, you may pass Tag
# data as a hash:
#
# {
# key: "TagKeyType",
# value: "TagValueType",
# }
#
# @!attribute [rw] key
# The key identifier, or name, of the tag.
# @return [String]
#
# @!attribute [rw] value
# The string value associated with the key of the tag.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/secretsmanager-2017-10-17/Tag AWS API Documentation
#
class Tag < Struct.new(
:key,
:value)
SENSITIVE = []
include Aws::Structure
end
# @note When making an API call, you may pass TagResourceRequest
# data as a hash:
#
# {
# secret_id: "SecretIdType", # required
# tags: [ # required
# {
# key: "TagKeyType",
# value: "TagValueType",
# },
# ],
# }
#
# @!attribute [rw] secret_id
# The identifier for the secret to attach tags to. You can specify
# either the Amazon Resource Name (ARN) or the friendly name of the
# secret.
#
# For an ARN, we recommend that you specify a complete ARN rather than
# a partial ARN.
# @return [String]
#
# @!attribute [rw] tags
# The tags to attach to the secret as a JSON text string argument.
# Each element in the list consists of a `Key` and a `Value`.
#
# For storing multiple values, we recommend that you use a JSON text
# string argument and specify key/value pairs. For more information,
# see [Specifying parameter values for the Amazon Web Services CLI][1]
# in the Amazon Web Services CLI User Guide.
#
#
#
# [1]: https://docs.aws.amazon.com/cli/latest/userguide/cli-usage-parameters.html
# @return [Array<Types::Tag>]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/secretsmanager-2017-10-17/TagResourceRequest AWS API Documentation
#
class TagResourceRequest < Struct.new(
:secret_id,
:tags)
SENSITIVE = []
include Aws::Structure
end
# @note When making an API call, you may pass UntagResourceRequest
# data as a hash:
#
# {
# secret_id: "SecretIdType", # required
# tag_keys: ["TagKeyType"], # required
# }
#
# @!attribute [rw] secret_id
# The ARN or name of the secret.
#
# For an ARN, we recommend that you specify a complete ARN rather than
# a partial ARN.
# @return [String]
#
# @!attribute [rw] tag_keys
# A list of tag key names to remove from the secret. You don't
# specify the value. Both the key and its associated value are
# removed.
#
# This parameter requires a JSON text string argument.
#
# For storing multiple values, we recommend that you use a JSON text
# string argument and specify key/value pairs. For more information,
# see [Specifying parameter values for the Amazon Web Services CLI][1]
# in the Amazon Web Services CLI User Guide.
#
#
#
# [1]: https://docs.aws.amazon.com/cli/latest/userguide/cli-usage-parameters.html
# @return [Array<String>]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/secretsmanager-2017-10-17/UntagResourceRequest AWS API Documentation
#
class UntagResourceRequest < Struct.new(
:secret_id,
:tag_keys)
SENSITIVE = []
include Aws::Structure
end
# @note When making an API call, you may pass UpdateSecretRequest
# data as a hash:
#
# {
# secret_id: "SecretIdType", # required
# client_request_token: "ClientRequestTokenType",
# description: "DescriptionType",
# kms_key_id: "KmsKeyIdType",
# secret_binary: "data",
# secret_string: "SecretStringType",
# }
#
# @!attribute [rw] secret_id
# The ARN or name of the secret.
#
# For an ARN, we recommend that you specify a complete ARN rather than
# a partial ARN.
# @return [String]
#
# @!attribute [rw] client_request_token
# If you include `SecretString` or `SecretBinary`, then Secrets
# Manager creates a new version for the secret, and this parameter
# specifies the unique identifier for the new version.
#
# <note markdown="1"> If you use the Amazon Web Services CLI or one of the Amazon Web
# Services SDKs to call this operation, then you can leave this
# parameter empty. The CLI or SDK generates a random UUID for you and
# includes it as the value for this parameter in the request. If you
# don't use the SDK and instead generate a raw HTTP request to the
# Secrets Manager service endpoint, then you must generate a
# `ClientRequestToken` yourself for the new version and include the
# value in the request.
#
# </note>
#
# This value becomes the `VersionId` of the new version.
#
# **A suitable default value is auto-generated.** You should normally
# not need to pass this option.
# @return [String]
#
# @!attribute [rw] description
# The description of the secret.
# @return [String]
#
# @!attribute [rw] kms_key_id
# The ARN, key ID, or alias of the KMS key that Secrets Manager uses
# to encrypt new secret versions as well as any existing versions the
# staging labels `AWSCURRENT`, `AWSPENDING`, or `AWSPREVIOUS`. For
# more information about versions and staging labels, see [Concepts:
# Version][1].
#
# You can only use the Amazon Web Services managed key
# `aws/secretsmanager` if you call this operation using credentials
# from the same Amazon Web Services account that owns the secret. If
# the secret is in a different account, then you must use a customer
# managed key and provide the ARN of that KMS key in this field. The
# user making the call must have permissions to both the secret and
# the KMS key in their respective accounts.
#
#
#
# [1]: https://docs.aws.amazon.com/secretsmanager/latest/userguide/getting-started.html#term_version
# @return [String]
#
# @!attribute [rw] secret_binary
# The binary data to encrypt and store in the new version of the
# secret. We recommend that you store your binary data in a file and
# then pass the contents of the file as a parameter.
#
# Either `SecretBinary` or `SecretString` must have a value, but not
# both.
#
# You can't access this parameter in the Secrets Manager console.
# @return [String]
#
# @!attribute [rw] secret_string
# The text data to encrypt and store in the new version of the secret.
# We recommend you use a JSON structure of key/value pairs for your
# secret value.
#
# Either `SecretBinary` or `SecretString` must have a value, but not
# both.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/secretsmanager-2017-10-17/UpdateSecretRequest AWS API Documentation
#
class UpdateSecretRequest < Struct.new(
:secret_id,
:client_request_token,
:description,
:kms_key_id,
:secret_binary,
:secret_string)
SENSITIVE = [:secret_binary, :secret_string]
include Aws::Structure
end
# @!attribute [rw] arn
# The ARN of the secret that was updated.
# @return [String]
#
# @!attribute [rw] name
# The name of the secret that was updated.
# @return [String]
#
# @!attribute [rw] version_id
# If Secrets Manager created a new version of the secret during this
# operation, then `VersionId` contains the unique identifier of the
# new version.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/secretsmanager-2017-10-17/UpdateSecretResponse AWS API Documentation
#
class UpdateSecretResponse < Struct.new(
:arn,
:name,
:version_id)
SENSITIVE = []
include Aws::Structure
end
# @note When making an API call, you may pass UpdateSecretVersionStageRequest
# data as a hash:
#
# {
# secret_id: "SecretIdType", # required
# version_stage: "SecretVersionStageType", # required
# remove_from_version_id: "SecretVersionIdType",
# move_to_version_id: "SecretVersionIdType",
# }
#
# @!attribute [rw] secret_id
# The ARN or the name of the secret with the version and staging
# labelsto modify.
#
# For an ARN, we recommend that you specify a complete ARN rather than
# a partial ARN.
# @return [String]
#
# @!attribute [rw] version_stage
# The staging label to add to this version.
# @return [String]
#
# @!attribute [rw] remove_from_version_id
# The ID of the version that the staging label is to be removed from.
# If the staging label you are trying to attach to one version is
# already attached to a different version, then you must include this
# parameter and specify the version that the label is to be removed
# from. If the label is attached and you either do not specify this
# parameter, or the version ID does not match, then the operation
# fails.
# @return [String]
#
# @!attribute [rw] move_to_version_id
# The ID of the version to add the staging label to. To remove a label
# from a version, then do not specify this parameter.
#
# If the staging label is already attached to a different version of
# the secret, then you must also specify the `RemoveFromVersionId`
# parameter.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/secretsmanager-2017-10-17/UpdateSecretVersionStageRequest AWS API Documentation
#
class UpdateSecretVersionStageRequest < Struct.new(
:secret_id,
:version_stage,
:remove_from_version_id,
:move_to_version_id)
SENSITIVE = []
include Aws::Structure
end
# @!attribute [rw] arn
# The ARN of the secret that was updated.
# @return [String]
#
# @!attribute [rw] name
# The name of the secret that was updated.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/secretsmanager-2017-10-17/UpdateSecretVersionStageResponse AWS API Documentation
#
class UpdateSecretVersionStageResponse < Struct.new(
:arn,
:name)
SENSITIVE = []
include Aws::Structure
end
# @note When making an API call, you may pass ValidateResourcePolicyRequest
# data as a hash:
#
# {
# secret_id: "SecretIdType",
# resource_policy: "NonEmptyResourcePolicyType", # required
# }
#
# @!attribute [rw] secret_id
# This field is reserved for internal use.
# @return [String]
#
# @!attribute [rw] resource_policy
# A JSON-formatted string that contains an Amazon Web Services
# resource-based policy. The policy in the string identifies who can
# access or manage this secret and its versions. For example policies,
# see [Permissions policy examples][1].
#
#
#
# [1]: https://docs.aws.amazon.com/secretsmanager/latest/userguide/auth-and-access_examples.html
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/secretsmanager-2017-10-17/ValidateResourcePolicyRequest AWS API Documentation
#
class ValidateResourcePolicyRequest < Struct.new(
:secret_id,
:resource_policy)
SENSITIVE = []
include Aws::Structure
end
# @!attribute [rw] policy_validation_passed
# True if your policy passes validation, otherwise false.
# @return [Boolean]
#
# @!attribute [rw] validation_errors
# Validation errors if your policy didn't pass validation.
# @return [Array<Types::ValidationErrorsEntry>]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/secretsmanager-2017-10-17/ValidateResourcePolicyResponse AWS API Documentation
#
class ValidateResourcePolicyResponse < Struct.new(
:policy_validation_passed,
:validation_errors)
SENSITIVE = []
include Aws::Structure
end
# Displays errors that occurred during validation of the resource
# policy.
#
# @!attribute [rw] check_name
# Checks the name of the policy.
# @return [String]
#
# @!attribute [rw] error_message
# Displays error messages if validation encounters problems during
# validation of the resource policy.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/secretsmanager-2017-10-17/ValidationErrorsEntry AWS API Documentation
#
class ValidationErrorsEntry < Struct.new(
:check_name,
:error_message)
SENSITIVE = []
include Aws::Structure
end
end
end
| 35.6671 | 134 | 0.635494 |
7937191c596ec66a1f3db07faa69ad861e780d0d | 2,961 | # frozen_string_literal: true
require 'test_helper'
module Shipit
module Api
class HooksControllerTest < ActionController::TestCase
setup do
authenticate!
@stack = shipit_stacks(:shipit)
end
test "the route has priority over stacks one" do
assert_recognizes({ controller: 'shipit/api/hooks', action: 'show', id: '42' }, '/api/hooks/42')
end
test "#index without a stack_id returns the list of global hooks" do
hook = Hook.global.first
get :index
assert_response :ok
assert_json '0.id', hook.id
assert_json '0.delivery_url', hook.delivery_url
assert_json '0.content_type', hook.content_type
assert_no_json '0.stack'
end
test "#index with a stack_id returns the list of scoped hooks" do
hook = Hook.scoped_to(@stack).first
get :index, params: { stack_id: @stack.to_param }
assert_response :ok
assert_json '0.id', hook.id
assert_json '0.delivery_url', hook.delivery_url
assert_json '0.content_type', hook.content_type
assert_json '0.stack.id', @stack.id
end
test "#show returns the hooks" do
hook = Hook.scoped_to(@stack).first
get :show, params: { stack_id: @stack.to_param, id: hook.id }
assert_response :ok
assert_json 'id', hook.id
assert_json 'delivery_url', hook.delivery_url
assert_json 'content_type', hook.content_type
assert_json 'stack.id', @stack.id
end
test "#create adds a new hook" do
assert_difference -> { Hook.count }, 1 do
post :create, params: { delivery_url: 'https://example.com/hook', events: %w(deploy rollback) }
end
hook = Hook.last
assert_json 'delivery_url', 'https://example.com/hook'
assert_json 'url', "http://shipit.com/api/hooks/#{hook.id}"
assert_json 'id', hook.id
end
test "#create do not allow to set protected attributes" do
post :create, params: {
delivery_url: 'https://example.com/hook',
events: %w(deploy rollback),
created_at: 2.months.ago.to_s(:db),
}
Hook.last.created_at > 2.seconds.ago
end
test "#create returns validation errors" do
post :create, params: { delivery_url: '../etc/passwd', events: %w(deploy) }
assert_response :unprocessable_entity
assert_json 'errors', 'delivery_url' => ['is not a valid URL']
end
test "#update changes an existing hook" do
hook = Hook.global.first
patch :update, params: { id: hook.id, delivery_url: 'https://shipit.com/' }
assert_response :ok
assert_json 'delivery_url', 'https://shipit.com/'
end
test "#destroy removes an existing hook" do
hook = Hook.global.first
delete :destroy, params: { id: hook.id }
assert_response :no_content
end
end
end
end
| 32.9 | 105 | 0.620061 |
ed112ba9345d1cd5758f76316f6b9db48eb931b7 | 66 | require 'rails_helper'
RSpec.describe Field, type: :model do
end
| 13.2 | 37 | 0.772727 |
03432a13a57c41990fda0230e60ca8aa7b83630d | 182 | class AddLatitudeAndLongitudeToListing < ActiveRecord::Migration[5.1]
def change
add_column :listings, :latitude, :float
add_column :listings, :longitude, :float
end
end
| 26 | 69 | 0.758242 |
2657119e2a2332aca80d0e3fcdf8df2e8a24da4a | 1,330 | ##
# This module requires Metasploit: https://metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
require 'msf/core/handler/reverse_https'
require 'msf/base/sessions/meterpreter_options'
require 'msf/base/sessions/mettle_config'
require 'msf/base/sessions/meterpreter_ppc64le_linux'
module MetasploitModule
CachedSize = 1236760
include Msf::Payload::Single
include Msf::Sessions::MeterpreterOptions
include Msf::Sessions::MettleConfig
def initialize(info = {})
super(
update_info(
info,
'Name' => 'Linux Meterpreter, Reverse HTTPS Inline',
'Description' => 'Run the Meterpreter / Mettle server payload (stageless)',
'Author' => [
'Adam Cammack <adam_cammack[at]rapid7.com>',
'Brent Cook <brent_cook[at]rapid7.com>',
'timwr'
],
'Platform' => 'linux',
'Arch' => ARCH_PPC64LE,
'License' => MSF_LICENSE,
'Handler' => Msf::Handler::ReverseHttps,
'Session' => Msf::Sessions::Meterpreter_ppc64le_Linux
)
)
end
def generate
opts = {
scheme: 'https',
stageless: true
}
MetasploitPayloads::Mettle.new('powerpc64le-linux-musl', generate_config(opts)).to_binary :exec
end
end
| 28.297872 | 99 | 0.633835 |
28f8b07934f520cedde4e6020724ed54d1a40552 | 195 | require 'Jekyll'
puts "this is a test!!!!!!"
Jekyll.logger.info("THIS IS A TEST!!!!!!!")
Jekyll::Hooks.register :site, :post_write do |site|
loop do
puts 'test!!!'
sleep 100
end
end | 17.727273 | 51 | 0.625641 |
281024f8ba86075e1a218afdfa993c64e6e527bf | 926 | class User < ApplicationRecord
# Include default devise modules. Others available are:
# :confirmable, :lockable, :timeoutable, :trackable and :omniauthable
devise :database_authenticatable, :registerable,
:recoverable, :rememberable, :validatable,
:omniauthable, :omniauth_providers => [:facebook]
has_many :scripts
has_many :departments, through: :scripts
def self.new_with_session(params, session)
super.tap do |user|
if data = session["devise.facebook_data"] && session["devise.facebook_data"]["extra"]["raw_info"]
user.email = data["email"] if user.email.blank?
end
end
end
def self.from_omniauth(auth)
where(provider: auth.provider, uid: auth.uid).first_or_create do |user|
user.email = auth.info.email
user.password = Devise.friendly_token[0,20]
user.name = auth.info.name
user.image = auth.info.image
end
end
end
| 33.071429 | 103 | 0.690065 |
21d6fcb79d6a67236388ff10ddfb15bb3b944e81 | 1,617 | require 'test_helper'
require 'unit/response_stubs/list_payment_methods_stubs'
class ListPaymentMethodsTest < Test::Unit::TestCase
include ListPaymentMethodsStubs
def setup
@environment = Spreedly::Environment.new("key", "secret")
end
def test_successful_list_payment_methods
list = list_using(successful_list_payment_methods_response)
assert_kind_of(Array, list)
assert_equal 5, list.size
assert_kind_of Spreedly::CreditCard, list.first
assert_kind_of Spreedly::CreditCard, list[1]
assert_kind_of Spreedly::ThirdPartyToken, list[2]
assert_kind_of Spreedly::Sprel, list[3]
assert_kind_of Spreedly::BankAccount, list[4]
assert_equal 'Perrin Aybara', list.first.full_name
assert_equal 'Mat Cauthon', list[1].full_name
assert_equal 'test_vault:4111111111111111', list[2].third_party_token
assert_equal 'retained', list[3].storage_state
assert_equal '4321', list[4].account_number_display_digits
end
def test_request_url
assert_request_url 'https://core.spreedly.com/v1/payment_methods.xml' do
@environment.list_payment_methods
end
assert_request_url 'https://core.spreedly.com/v1/payment_methods.xml?since_token=SomeToken' do
@environment.list_payment_methods("SomeToken")
end
end
private
def list_using(response)
@environment.stubs(:raw_ssl_request).returns(response)
@environment.list_payment_methods
end
def assert_request_url(expected_url)
actual_url = get_request_url(successful_list_payment_methods_response) do
yield
end
assert_equal expected_url, actual_url
end
end
| 29.4 | 98 | 0.770563 |
1ce2061ac6b0990ca7cfa12322bdbc1a25929747 | 1,219 | #
# Be sure to run `pod lib lint DroidTimeSelection.podspec' to ensure this is a
# valid spec before submitting.
#
# Any lines starting with a # are optional, but their use is encouraged
# To learn more about a Podspec see https://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = 'DroidTimeSelection'
s.version = '1.0.0'
s.summary = 'The Android Time Selector for iOS.'
s.description = <<-DESC
As someone who used Android for a long time, I found I really missed selecting time using the Android selector. So, I brought it to iOS and mixed it with the manual selection of the UIDatePicker, native to iOS. Now we can enjoy the best of both worlds :D
DESC
s.homepage = 'https://github.com/DDraiman1990/DroidTimeSelection'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'DDraiman1990' => '[email protected]' }
s.source = { :git => 'https://github.com/DDraiman1990/DroidTimeSelection.git', :tag => s.version.to_s }
s.ios.deployment_target = '11.0'
s.source_files = 'DroidTimeSelection/Classes/**/*'
s.swift_version = '5.0'
s.frameworks = 'UIKit'
end
| 40.633333 | 256 | 0.654635 |
39695a615c98fbd94f4c7687a3a212d70452c6d8 | 712 | require 'rails_helper'
RSpec.describe SlackEvent::TeamJoinJob, type: :job do
let(:options) {{
event: {
user: {
id: "user123",
name: "johndoe",
real_name: "John Doe",
is_bot: false,
},
}
}}
describe "#perform" do
it "does not welcome bots to the team" do
options[:event][:user][:is_bot] = true
expect(subject).not_to receive(:welcome_user_to_team)
subject.perform(options)
end
it "welcomes the user to the team" do
expect(subject).to receive(:welcome_user_to_team).with(user: options[:event][:user])
subject.perform(options)
end
pending "should have better tests for the Slack::Web::Client"
end
end
| 23.733333 | 90 | 0.623596 |
878a238f8d13098f50e6208684aef8e46a8e4074 | 2,537 | # frozen_string_literal: true
require "abstract_unit"
module ActionController
class Base
include ActionController::Testing
end
end
class InfoControllerTest < ActionController::TestCase
tests Rails::InfoController
def setup
Rails.application.routes.draw do
get "/rails/info/properties" => "rails/info#properties"
get "/rails/info/routes" => "rails/info#routes"
end
@routes = Rails.application.routes
Rails::InfoController.include(@routes.url_helpers)
@request.env["REMOTE_ADDR"] = "127.0.0.1"
end
test "info controller does not allow remote requests" do
@request.env["REMOTE_ADDR"] = "example.org"
get :properties
assert_response :forbidden
end
test "info controller renders an error message when request was forbidden" do
@request.env["REMOTE_ADDR"] = "example.org"
get :properties
assert_select "p"
end
test "info controller allows requests when all requests are considered local" do
get :properties
assert_response :success
end
test "info controller allows local requests" do
get :properties
assert_response :success
end
test "info controller renders a table with properties" do
get :properties
assert_select "table"
end
test "info controller renders with routes" do
get :routes
assert_response :success
end
test "info controller returns exact matches" do
exact_count = -> { JSON(response.body)["exact"].size }
get :routes, params: { path: "rails/info/route" }
assert exact_count.call == 0, "should not match incomplete routes"
get :routes, params: { path: "rails/info/routes" }
assert exact_count.call == 1, "should match complete routes"
get :routes, params: { path: "rails/info/routes.html" }
assert exact_count.call == 1, "should match complete routes with optional parts"
end
test "info controller returns fuzzy matches" do
fuzzy_count = -> { JSON(response.body)["fuzzy"].size }
get :routes, params: { path: "rails/info" }
assert fuzzy_count.call == 2, "should match incomplete routes"
get :routes, params: { path: "rails/info/routes" }
assert fuzzy_count.call == 1, "should match complete routes"
get :routes, params: { path: "rails/info/routes.html" }
assert fuzzy_count.call == 0, "should match optional parts of route literally"
end
test "internal routes do not have a default params[:internal] value" do
get :properties
assert_response :success
assert_nil @controller.params[:internal]
end
end
| 28.188889 | 84 | 0.703981 |
5d43ee4db2a0b322fcfd52e64380df3ec0ca0887 | 15,142 | # frozen_string_literal: true
module ThemeCheck
# A node from the Liquid AST, the result of parsing a liquid file.
class LiquidNode < Node
attr_reader :value, :parent, :theme_file
def initialize(value, parent, theme_file)
raise ArgumentError, "Expected a Liquid AST Node" if value.is_a?(LiquidNode)
@value = value
@parent = parent
@theme_file = theme_file
@tag_markup = nil
@line_number_offset = 0
end
# Array of children nodes.
def children
@children ||= begin
nodes =
if comment?
[]
elsif defined?(@value.class::ParseTreeVisitor)
@value.class::ParseTreeVisitor.new(@value, {}).children
elsif @value.respond_to?(:nodelist)
Array(@value.nodelist)
else
[]
end
# Work around a bug in Liquid::Variable::ParseTreeVisitor that doesn't return
# the args in a hash as children nodes.
nodes = nodes.flat_map do |node|
case node
when Hash
node.values
else
node
end
end
nodes.map { |node| LiquidNode.new(node, self, @theme_file) }
end
end
# The original source code of the node. Doesn't contain wrapping braces.
def markup
if tag?
tag_markup
elsif literal?
value.to_s
elsif @value.instance_variable_defined?(:@markup)
@value.instance_variable_get(:@markup)
end
end
# The original source code of the node. Does contain wrapping braces.
def outer_markup
if literal?
markup
elsif variable_lookup?
''
elsif variable?
start_token + markup + end_token
elsif tag? && block?
start_index = block_start_start_index
end_index = block_start_end_index
end_index += inner_markup.size
end_index = find_block_delimiter(end_index)&.end(0)
source[start_index...end_index]
elsif tag?
source[block_start_start_index...block_start_end_index]
else
inner_markup
end
end
def inner_markup
return '' unless block?
@inner_markup ||= source[block_start_end_index...block_end_start_index]
end
def inner_json
return nil unless schema?
@inner_json ||= JSON.parse(inner_markup)
rescue JSON::ParserError
# Handled by ValidSchema
@inner_json = nil
end
def markup=(markup)
if @value.instance_variable_defined?(:@markup)
@value.instance_variable_set(:@markup, markup)
end
end
# Most nodes have a line number, but it's not guaranteed.
def line_number
if tag? && @value.respond_to?(:line_number)
markup # initialize the line_number_offset
@value.line_number - @line_number_offset
elsif @value.respond_to?(:line_number)
@value.line_number
end
end
def start_index
position.start_index
end
def start_row
position.start_row
end
def start_column
position.start_column
end
def end_index
position.end_index
end
def end_row
position.end_row
end
def end_column
position.end_column
end
# Literals are hard-coded values in the liquid file.
def literal?
@value.is_a?(String) || @value.is_a?(Integer)
end
# A {% tag %} node?
def tag?
@value.is_a?(Liquid::Tag)
end
def variable?
@value.is_a?(Liquid::Variable)
end
def assigned_or_echoed_variable?
variable? && start_token == ""
end
def variable_lookup?
@value.is_a?(Liquid::VariableLookup)
end
# A {% comment %} block node?
def comment?
@value.is_a?(Liquid::Comment)
end
# Top level node of every liquid_file.
def document?
@value.is_a?(Liquid::Document)
end
alias_method :root?, :document?
# A {% tag %}...{% endtag %} node?
def block_tag?
@value.is_a?(Liquid::Block)
end
# The body of blocks
def block_body?
@value.is_a?(Liquid::BlockBody)
end
# A block of type of node?
def block?
block_tag? || block_body? || document?
end
def schema?
@value.is_a?(ThemeCheck::Tags::Schema)
end
# The `:under_score_name` of this type of node. Used to dispatch to the `on_<type_name>`
# and `after_<type_name>` check methods.
def type_name
@type_name ||= StringHelpers.underscore(StringHelpers.demodulize(@value.class.name)).to_sym
end
def source
theme_file&.source
end
def block_start_markup
source[block_start_start_index...block_start_end_index]
end
def block_start_start_index
@block_start_start_index ||= if inside_liquid_tag?
backtrack_on_whitespace(source, start_index, /[ \t]/)
elsif tag?
backtrack_on_whitespace(source, start_index) - start_token.length
else
position.start_index - start_token.length
end
end
def block_start_end_index
@block_start_end_index ||= position.end_index + end_token.size
end
def block_end_markup
source[block_end_start_index...block_end_end_index]
end
def block_end_start_index
return block_start_end_index unless tag? && block?
@block_end_start_index ||= block_end_match&.begin(0) || block_start_end_index
end
def block_end_end_index
return block_end_start_index unless tag? && block?
@block_end_end_index ||= block_end_match&.end(0) || block_start_end_index
end
def outer_markup_start_index
outer_markup_position.start_index
end
def outer_markup_end_index
outer_markup_position.end_index
end
def outer_markup_start_row
outer_markup_position.start_row
end
def outer_markup_start_column
outer_markup_position.start_column
end
def outer_markup_end_row
outer_markup_position.end_row
end
def outer_markup_end_column
outer_markup_position.end_column
end
def inner_markup_start_index
inner_markup_position.start_index
end
def inner_markup_end_index
inner_markup_position.end_index
end
def inner_markup_start_row
inner_markup_position.start_row
end
def inner_markup_start_column
inner_markup_position.start_column
end
def inner_markup_end_row
inner_markup_position.end_row
end
def inner_markup_end_column
inner_markup_position.end_column
end
WHITESPACE = /\s/
# Is this node inside a `{% liquid ... %}` block?
def inside_liquid_tag?
# What we're doing here is starting at the start of the tag and
# backtrack on all the whitespace until we land on something. If
# that something is {% or %-, then we can safely assume that
# we're inside a full tag and not a liquid tag.
@inside_liquid_tag ||= if tag? && start_index && source
i = 1
i += 1 while source[start_index - i] =~ WHITESPACE && i < start_index
first_two_backtracked_characters = source[(start_index - i - 1)..(start_index - i)]
first_two_backtracked_characters != "{%" && first_two_backtracked_characters != "%-"
else
false
end
end
# Is this node inside a tag or variable that starts by removing whitespace. i.e. {%- or {{-
def whitespace_trimmed_start?
@whitespace_trimmed_start ||= if start_index && source && !inside_liquid_tag?
i = 1
i += 1 while source[start_index - i] =~ WHITESPACE && i < start_index
source[start_index - i] == "-"
else
false
end
end
# Is this node inside a tag or variable ends starts by removing whitespace. i.e. -%} or -}}
def whitespace_trimmed_end?
@whitespace_trimmed_end ||= if end_index && source && !inside_liquid_tag?
i = 0
i += 1 while source[end_index + i] =~ WHITESPACE && i < source.size
source[end_index + i] == "-"
else
false
end
end
def start_token
if inside_liquid_tag?
""
elsif variable? && source[start_index - 3..start_index - 1] == "{{-"
"{{-"
elsif variable? && source[start_index - 2..start_index - 1] == "{{"
"{{"
elsif tag? && whitespace_trimmed_start?
"{%-"
elsif tag?
"{%"
else
""
end
end
def end_token
if inside_liquid_tag? && source[end_index] == "\n"
"\n"
elsif inside_liquid_tag?
""
elsif variable? && source[end_index...end_index + 3] == "-}}"
"-}}"
elsif variable? && source[end_index...end_index + 2] == "}}"
"}}"
elsif tag? && whitespace_trimmed_end?
"-%}"
elsif tag?
"%}"
else # this could happen because we're in an assign statement (variable)
""
end
end
private
def position
@position ||= Position.new(
markup,
theme_file&.source,
line_number_1_indexed: line_number
)
end
def outer_markup_position
@outer_markup_position ||= StrictPosition.new(
outer_markup,
source,
block_start_start_index,
)
end
def inner_markup_position
@inner_markup_position ||= StrictPosition.new(
inner_markup,
source,
block_start_end_index,
)
end
# Here we're hacking around a glorious bug in Liquid that makes it so the
# line_number and markup of a tag is wrong if there's whitespace
# between the tag_name and the markup of the tag.
#
# {%
# render
# 'foo'
# %}
#
# Returns a raw value of "render 'foo'\n".
# The "\n " between render and 'foo' got replaced by a single space.
#
# And the line number is the one of 'foo'\n%}. Yay!
#
# This breaks any kind of position logic we have since that string
# does not exist in the theme_file.
def tag_markup
return @tag_markup if @tag_markup
l = 1
scanner = StringScanner.new(source)
scanner.scan_until(/\n/) while l < @value.line_number && (l += 1)
start = scanner.charpos
tag_name = @value.tag_name
tag_markup = @value.instance_variable_get('@markup')
# This is tricky, if the tag_markup is empty, then the tag could
# either start on a previous line, or the tag could start on the
# same line.
#
# Consider this:
# 1 {%
# 2 comment
# 3 %}{% endcomment %}{%comment%}
#
# Both comments would markup == "" AND line_number == 3
#
# There's no way to determine which one is the correct one, but
# we'll try our best to at least give you one.
#
# To screw with you even more, the name of the tag could be
# outside of a tag on the same line :) But I won't do anything
# about that (yet?).
#
# {% comment
# %}comment{% endcomment %}
if tag_markup.empty?
eol = source.index("\n", start) || source.size
# OK here I'm trying one of two things. Either tag_start is on
# the same line OR tag_start is on a previous line. The line
# number would be at the end of the whitespace after tag_name.
unless (tag_start = source.index(tag_name, start)) && tag_start < eol
tag_start = start
tag_start -= 1 while source[tag_start - 1] =~ WHITESPACE
tag_start -= @value.tag_name.size
# keep track of the error in line_number
@line_number_offset = source[tag_start...start].count("\n")
end
tag_end = tag_start + tag_name.size
tag_end += 1 while source[tag_end] =~ WHITESPACE
# return the real raw content
@tag_markup = source[tag_start...tag_end]
return @tag_markup
# See https://github.com/Shopify/theme-check/pull/423/files#r701936559 for a detailed explanation
# of why we're doing the check below.
#
# TL;DR it's because line_numbers are not enough to accurately
# determine the position of the raw markup and because that
# markup could be present on the same line outside of a Tag. e.g.
#
# uhoh {% if uhoh %}
elsif (match = /#{tag_name} +#{Regexp.escape(tag_markup)}/.match(source, start))
return @tag_markup = match[0]
end
# find the markup
markup_start = source.index(tag_markup, start)
markup_end = markup_start + tag_markup.size
# go back until you find the tag_name
tag_start = markup_start
tag_start -= 1 while source[tag_start - 1] =~ WHITESPACE
tag_start -= tag_name.size
# keep track of the error in line_number
@line_number_offset = source[tag_start...markup_start].count("\n")
# return the real raw content
@tag_markup = source[tag_start...markup_end]
end
# Returns the index of the leftmost consecutive whitespace
# starting from start going backwards.
#
# e.g. backtrack_on_whitespace("01 45", 4) would return 2.
# e.g. backtrack_on_whitespace("{% render %}", 5) would return 2.
def backtrack_on_whitespace(string, start, whitespace = WHITESPACE)
i = start
i -= 1 while string[i - 1] =~ whitespace && i > 0
i
end
def find_block_delimiter(start_index)
return nil unless tag? && block?
tag_start, tag_end = if inside_liquid_tag?
[
/^\s*#{@value.tag_name}\s*/,
/^\s*end#{@value.tag_name}\s*/,
]
else
[
/#{Liquid::TagStart}-?\s*#{@value.tag_name}/mi,
/#{Liquid::TagStart}-?\s*end#{@value.tag_name}\s*-?#{Liquid::TagEnd}/mi,
]
end
# This little algorithm below find the _correct_ block delimiter
# (endif, endcase, endcomment) for the current tag. What do I
# mean by correct? It means the one you'd expect. Making sure
# that we don't do the naive regex find. Since you can have
# nested ifs, fors, etc.
#
# It works by having a stack, pushing onto the stack when we
# open a tag of our type_name. And popping when we find a
# closing tag of our type_name.
#
# When the stack is empty, we return the end tag match.
index = start_index
stack = []
stack.push("open")
loop do
tag_start_match = tag_start.match(source, index)
tag_end_match = tag_end.match(source, index)
return nil unless tag_end_match
# We have found a tag_start and it appeared _before_ the
# tag_end that we found, thus we push it onto the stack.
if tag_start_match && tag_start_match.end(0) < tag_end_match.end(0)
stack.push("open")
end
# We have found a tag_end, therefore we pop
stack.pop
# Nothing left on the stack, we're done.
break tag_end_match if stack.empty?
# We keep looking from the end of the end tag we just found.
index = tag_end_match.end(0)
end
end
def block_end_match
@block_end_match ||= find_block_delimiter(block_start_end_index)
end
end
end
| 28.302804 | 103 | 0.621186 |
623c7cfb63fc9145b2c8f7787359d3bf614e1af2 | 7,211 | require 'vanagon/platform/dsl'
describe 'Vanagon::Platform::DSL' do
let (:deb_platform_block) { "platform 'debian-test-fixture' do |plat| end" }
let (:el_5_platform_block) { "platform 'el-5-fixture' do |plat| end" }
let (:el_6_platform_block) { "platform 'el-6-fixture' do |plat| end" }
let (:sles_platform_block) { "platform 'sles-test-fixture' do |plat| end" }
let (:cicso_wrlinux_platform_block) { "platform 'cisco-wrlinux-fixture' do |plat| end" }
let (:solaris_10_platform_block) { "platform 'solaris-10-fixture' do |plat| end" }
let (:solaris_11_platform_block) { "platform 'solaris-11-fixture' do |plat| end" }
let(:apt_definition) { "http://builds.delivery.puppetlabs.net/puppet-agent/0.2.1/repo_configs/deb/pl-puppet-agent-0.2.1-wheezy" }
let(:apt_definition_deb) { "http://builds.delivery.puppetlabs.net/puppet-agent/0.2.1/repo_configs/deb/pl-puppet-agent-0.2.1-wheezy.deb" }
let(:apt_definition_gpg) { "http://pl-build-tools.delivery.puppetlabs.net/debian/keyring.gpg" }
let(:el_definition) { "http://builds.delivery.puppetlabs.net/puppet-agent/0.2.1/repo_configs/rpm/pl-puppet-agent-0.2.1-el-7-x86_64" }
let(:el_definition_rpm) { "http://builds.delivery.puppetlabs.net/puppet-agent/0.2.1/repo_configs/rpm/pl-puppet-agent-0.2.1-release.rpm" }
let(:sles_definition) { "http://builds.delivery.puppetlabs.net/puppet-agent/0.2.2/repo_configs/rpm/pl-puppet-agent-0.2.2-sles-12-x86_64" }
let(:sles_definition_rpm) { "http://builds.delivery.puppetlabs.net/puppet-agent/0.2.1/repo_configs/rpm/pl-puppet-agent-0.2.1-release-sles.rpm" }
let(:cisco_wrlinux_definition) { "http://builds.delivery.puppetlabs.net/puppet-agent/0.2.1/repo_configs/rpm/pl-puppet-agent-0.2.1-cisco-wrlinux-5-x86_64.repo" }
let(:hex_value) { "906264d248061b0edb1a576cc9c8f6c7" }
before :each do
# suppress `#warn` output during tests
allow_any_instance_of(Vanagon::Platform::DSL).to receive(:warn)
end
# These apt_repo, yum_repo, and zypper_repo methods are all deprecated.
describe '#apt_repo' do
it "grabs the file and adds .list to it" do
plat = Vanagon::Platform::DSL.new('debian-test-fixture')
expect(SecureRandom).to receive(:hex).and_return(hex_value)
plat.instance_eval(deb_platform_block)
plat.apt_repo(apt_definition)
expect(plat._platform.provisioning).to include("curl -o '/etc/apt/sources.list.d/#{hex_value}-pl-puppet-agent-0.2.1-wheezy.list' '#{apt_definition}'")
end
it "installs a deb when given a deb" do
plat = Vanagon::Platform::DSL.new('debian-test-fixture')
plat.instance_eval(deb_platform_block)
plat.apt_repo(apt_definition_deb)
expect(plat._platform.provisioning).to include("curl -o local.deb '#{apt_definition_deb}' && dpkg -i local.deb; rm -f local.deb")
end
it "installs a gpg key if given one" do
plat = Vanagon::Platform::DSL.new('debian-test-fixture')
expect(SecureRandom).to receive(:hex).and_return(hex_value).twice
plat.instance_eval(deb_platform_block)
plat.apt_repo(apt_definition, apt_definition_gpg)
expect(plat._platform.provisioning).to include("curl -o '/etc/apt/trusted.gpg.d/#{hex_value}-keyring.gpg' '#{apt_definition_gpg}'")
end
end
describe '#yum_repo' do
it "grabs the file and adds .repo to it" do
plat = Vanagon::Platform::DSL.new('el-5-fixture')
expect(SecureRandom).to receive(:hex).and_return(hex_value)
plat.instance_eval(el_5_platform_block)
plat.yum_repo(el_definition)
expect(plat._platform.provisioning).to include("curl -o '/etc/yum.repos.d/#{hex_value}-pl-puppet-agent-0.2.1-el-7-x86_64.repo' '#{el_definition}'")
end
# This test currently covers wrlinux 5 and 7
it "downloads the repo file to the correct yum location for wrlinux" do
plat = Vanagon::Platform::DSL.new('cisco-wrlinux-fixture')
expect(SecureRandom).to receive(:hex).and_return(hex_value)
plat.instance_eval(cicso_wrlinux_platform_block)
plat.yum_repo(cisco_wrlinux_definition)
expect(plat._platform.provisioning).to include("curl -o '/etc/yum/repos.d/#{hex_value}-pl-puppet-agent-0.2.1-cisco-wrlinux-5-x86_64.repo' '#{cisco_wrlinux_definition}'")
end
describe "installs a rpm when given a rpm" do
it 'uses rpm everywhere' do
plat = Vanagon::Platform::DSL.new('el-5-fixture')
plat.instance_eval(el_5_platform_block)
plat.yum_repo(el_definition_rpm)
expect(plat._platform.provisioning).to include("rpm -q curl > /dev/null || yum -y install curl")
expect(plat._platform.provisioning).to include("curl -o local.rpm '#{el_definition_rpm}'; rpm -Uvh local.rpm; rm -f local.rpm")
end
end
end
describe '#zypper_repo' do
it "grabs the file and adds .repo to it" do
plat = Vanagon::Platform::DSL.new('sles-test-fixture')
plat.instance_eval(sles_platform_block)
plat.zypper_repo(sles_definition)
expect(plat._platform.provisioning).to include("yes | zypper -n --no-gpg-checks ar -t YUM --repo '#{sles_definition}'")
end
it "installs a sles rpm when given a rpm" do
plat = Vanagon::Platform::DSL.new('sles-test-fixture')
plat.instance_eval(sles_platform_block)
plat.zypper_repo(sles_definition_rpm)
expect(plat._platform.provisioning).to include("curl -o local.rpm '#{sles_definition_rpm}'; rpm -Uvh local.rpm; rm -f local.rpm")
end
end
describe '#add_build_repository' do
it 'hands off to the platform specific method if defined' do
plat = Vanagon::Platform::DSL.new('solaris-test-fixture')
plat.instance_eval(solaris_11_platform_block)
plat.add_build_repository("http://solaris-repo.puppetlabs.com", "puppetlabs.com")
expect(plat._platform.provisioning).to include("pkg set-publisher -G '*' -g http://solaris-repo.puppetlabs.com puppetlabs.com")
end
it 'raises an error if the platform does not define "add_repository"' do
plat = Vanagon::Platform::DSL.new('solaris-test-fixture')
plat.instance_eval(solaris_10_platform_block)
expect {plat.add_build_repository("anything")}.to raise_error(Vanagon::Error, /Adding a build repository not defined/)
end
end
describe '#abs_resource_name' do
it 'sets the instance variable on platform' do
plat = Vanagon::Platform::DSL.new('solaris-test-fixture')
plat.instance_eval(solaris_10_platform_block)
plat.abs_resource_name 'solaris-10-x86_64'
expect(plat._platform.abs_resource_name).to eq('solaris-10-x86_64')
end
end
describe '#vmpooler_template' do
it 'sets the instance variable on platform' do
plat = Vanagon::Platform::DSL.new('solaris-test-fixture')
plat.instance_eval(solaris_10_platform_block)
plat.vmpooler_template 'solaris-10-x86_64'
expect(plat._platform.vmpooler_template).to eq('solaris-10-x86_64')
end
it 'is called by vcloud_name as a deprecation' do
plat = Vanagon::Platform::DSL.new('solaris-test-fixture')
plat.instance_eval(solaris_10_platform_block)
plat.vcloud_name 'solaris-11-x86_64'
expect(plat._platform.vmpooler_template).to eq('solaris-11-x86_64')
end
end
end
| 51.877698 | 175 | 0.712939 |
2136f06a13b657b5d2293b1f1bc2e58ccfcfa3dd | 2,397 | # File: config.rb
require 'yaml'
require_relative 'file-action'
module Cukedep # Module used as a namespace
FileMetaData = Struct.new(:name)
Config = Struct.new(
:feature_encoding, # The character encoding of feature files
:proj_dir, # The directory of the cucumber project
:feature2id, # Meta-data about the feature => feature id report
:id2feature, # Meta-data about the feature id => feature report
:graph_file, # Meta-data about the dependency graph file
:rake_file, # Name of the output rake file
:cucumber_args, # Command-line syntax to use for the cucumber application
# File actions triggered at Cucumber invocation events
:before_all_f_actions,
:before_each_f_actions,
:after_each_f_actions,
:after_all_f_actions
)
# Re-open the class for further customisation
# Configuration object for the Cukedep application.
class Config
# Factory method. Build a config object with default settings.
def self.default
instance = Config.new(
'UTF-8',
nil,
FileMetaData.new('feature2id.csv'),
FileMetaData.new('feature2id.csv'),
FileMetaData.new('dependencies.dot'),
'cukedep.rake',
[]
)
file_action_attrs.each do |attr|
instance[attr] = empty_action_triplet
end
return instance
end
# Read the YAML file with specified name from the current working directory.
# If that file does not exist, then return an instance with default values.
def self.load_cfg(filename)
# TODO: validation
instance = File.exist?(filename) ? YAML.load_file(filename) : default
return instance
end
# Save the Config object to a YAML file.
def write(filename)
File.open(filename, 'w') { |f| YAML.dump(self, f) }
end
# Purpose: get the list of attributes referencing
# a file action triplet.
def self.file_action_attrs
return %I[
before_all_f_actions
before_each_f_actions
after_each_f_actions
after_all_f_actions
]
end
# Return Hash config for a no-op action triplet.
def self.empty_action_triplet
{
save_patterns: [],
save_subdir: '',
delete_patterns: [],
delete_subdir: '',
copy_patterns: [],
copy_subdir: ''
}
end
end # class
end # module
# End of file
| 27.872093 | 80 | 0.657906 |
38a471670d195bcfa706580e402594d8c371938f | 557 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe AcaEntities::Operations::Encryption::Decrypt do
subject { described_class.new.call(input) }
context 'When any type of value is passed' do
let(:encrypted_value) {AcaEntities::Operations::Encryption::Encrypt.new.call({ value: "Hello World" }).value! }
let(:input) { { value: encrypted_value } }
it 'should return success with decrypted value' do
expect(subject).to be_a Dry::Monads::Result::Success
expect(subject.value!).to eq "Hello World"
end
end
end | 30.944444 | 115 | 0.716338 |
6175392cff7ef879203a2fd6c34d40c90d06832a | 1,486 | # frozen_string_literal: true
if RUBY_ENGINE == "jruby"
require "active_record/connection_adapters/jdbcpostgresql_adapter"
else
require "pg"
end
module ActiveRecord # :nodoc:
module ConnectionHandling # :nodoc:
if RUBY_ENGINE == "jruby"
def postgis_connection(config)
config[:adapter_class] = ConnectionAdapters::PostGISAdapter
postgresql_connection(config)
end
alias_method :jdbcpostgis_connection, :postgis_connection
else
# Based on the default <tt>postgresql_connection</tt> definition from ActiveRecord.
# https://github.com/rails/rails/blob/master/activerecord/lib/active_record/connection_adapters/postgresql_adapter.rb
# FULL REPLACEMENT because we need to create a different class.
def postgis_connection(config)
conn_params = config.symbolize_keys.compact
# Map ActiveRecords param names to PGs.
conn_params[:user] = conn_params.delete(:username) if conn_params[:username]
conn_params[:dbname] = conn_params.delete(:database) if conn_params[:database]
# Forward only valid config params to PG.connect
valid_conn_param_keys = PG::Connection.conndefaults_hash.keys + [:requiressl]
conn_params.slice!(*valid_conn_param_keys)
ConnectionAdapters::PostGISAdapter.new(
ConnectionAdapters::PostGISAdapter.new_client(conn_params),
logger,
conn_params,
config
)
end
end
end
end
| 31.617021 | 123 | 0.711306 |
6157279351d241afd36e456d702de0bc61f35c46 | 4,039 | # frozen_string_literal: true
require 'rails_helper'
include Warden::Test::Helpers
# NOTE: If you generated more than one work, you have to set "js: true"
RSpec.feature 'Create a DenverMultimedia' do
let(:user) { User.new(email: '[email protected]') { |u| u.save(validate: false) } }
let(:admin_set_id) { AdminSet.find_or_create_default_admin_set_id }
let(:permission_template) { Hyrax::PermissionTemplate.find_or_create_by!(source_id: admin_set_id) }
let(:workflow) { Sipity::Workflow.create!(active: true, name: 'test-workflow', permission_template: permission_template) }
let(:work_type) { "denver_multimedia" }
let(:new_work_path) { "concern/#{work_type.to_s.pluralize}/new" }
before do
# Create a single action that can be taken
Sipity::WorkflowAction.create!(name: 'submit', workflow: workflow)
# Grant the user access to deposit into the admin set.
Hyrax::PermissionTemplateAccess.create!(
permission_template_id: permission_template.id,
agent_type: 'user',
agent_id: user.user_key,
access: 'deposit'
)
login_as user
visit new_work_path
end
it 'renders the new Denver Work page' do
expect(page).to have_content "Add New Denver Multimedia"
end
it 'adds files to work' do
click_link "Files"
expect(page).to have_content "Add files"
expect(page).to have_content "Add folder"
within('span#addfiles') do
attach_file("files[]", Rails.root.join('spec', 'fixtures', 'hyrax', 'image.jp2'), visible: false)
attach_file("files[]", Rails.root.join('spec', 'fixtures', 'hyrax', 'jp2_fits.xml'), visible: false)
end
end
it 'applys work visibility' do
find('body').click
choose("#{work_type}_visibility_open")
expect(page).to have_content('Please note, making something visible to the world (i.e. marking this as Public) may be viewed as publishing which could impact your ability to')
end
it 'saves the work' do
click_link "Descriptions"
fill_in("#{work_type}_title", with: 'My Test Work')
select('Organisational', from: "#{work_type}_creator__creator_name_type")
fill_in("#{work_type}_creator__creator_organization_name", with: 'Ubiquity Press')
check('agreement')
click_on('Save')
expect(page).to have_content('My Test Work')
expect(page).to have_content('Public')
expect(page).to have_content("Your files are being processed by Hyku in the background.")
end
context "when rendering the form" do
before do
click_on "Additional fields"
end
it "renders all simple worktype fields" do
worktype_simple_fields = %w[title alt_title resource_type institution abstract keyword subject
library_of_congress_classification publisher place_of_publication
event_title event_location license rights_holder
rights_statement medium duration language add_info]
worktype_simple_fields.each do |field|
expect(page).to have_field("#{work_type}_#{field}")
end
end
it "renders complex name fields" do
worktype_complex_name_fields = %w[creator contributor]
worktype_complex_name_fields.each do |field|
expect(page).to have_field("#{work_type}_#{field}__#{field}_family_name")
expect(page).to have_field("#{work_type}_#{field}__#{field}_given_name")
end
end
it "renders complex identifier fields" do
worktype_complex_fields = %w[alternate_identifier related_identifier]
worktype_complex_fields.each do |field|
expect(page).to have_field("#{work_type}_#{field}__#{field}")
end
end
it "renders all date fields" do
worktype_date_fields = %w[date_published]
worktype_date_fields.each do |field|
expect(page).to have_field("#{work_type}_#{field}__#{field}_year")
expect(page).to have_field("#{work_type}_#{field}__#{field}_month")
expect(page).to have_field("#{work_type}_#{field}__#{field}_day")
end
end
end
end
| 39.598039 | 179 | 0.693488 |
01e4f269ddc58c372b80a63496cc6b21efa712e8 | 3,048 | #
# Copyright:: 2020, Chef Software, Inc.
# Author:: Tim Smith (<[email protected]>)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
module RuboCop
module Cop
module Chef
module ChefModernize
# There is no need to check if the chef_gem resource supports compile_time as Chef Infra Client 12.1 and later support the compile_time property.
#
# # bad
# chef_gem 'ultradns-sdk' do
# compile_time true if Chef::Resource::ChefGem.method_defined?(:compile_time)
# action :nothing
# end
#
# chef_gem 'ultradns-sdk' do
# compile_time true if Chef::Resource::ChefGem.instance_methods(false).include?(:compile_time)
# action :nothing
# end
#
# chef_gem 'ultradns-sdk' do
# compile_time true if respond_to?(:compile_time)
# action :nothing
# end
#
# # good
# chef_gem 'ultradns-sdk' do
# compile_time true
# action :nothing
# end
#
class RespondToCompileTime < Cop
extend TargetChefVersion
minimum_target_chef_version '12.1'
MSG = 'There is no need to check if the chef_gem resource supports compile_time as Chef Infra Client 12.1 and later support the compile_time property.'.freeze
def_node_matcher :compile_time_method_defined?, <<-PATTERN
(if
{
(send
(const
(const
(const nil? :Chef) :Resource) :ChefGem) :method_defined?
(sym :compile_time))
(send
(send
(const
(const
(const nil? :Chef) :Resource) :ChefGem) :instance_methods
(false)) :include?
(sym :compile_time))
(send nil? :respond_to?
(sym :compile_time))
}
(send nil? :compile_time
$(_)) nil?)
PATTERN
def on_if(node)
compile_time_method_defined?(node) do
add_offense(node, location: :expression, message: MSG, severity: :refactor)
end
end
def autocorrect(node)
lambda do |corrector|
compile_time_method_defined?(node) do |val|
corrector.replace(node.loc.expression, "compile_time #{val.source}")
end
end
end
end
end
end
end
end
| 32.425532 | 168 | 0.567913 |
187297bb0a1305dec92d1835a2cc2ebad58c0b93 | 3,476 | require 'browsermob/proxy'
require 'capybara/chromedriver/logger'
require 'capybara/cucumber'
require 'nokogiri'
require 'ptools'
require 'plek'
require 'selenium-webdriver'
require 'uri'
require 'webdrivers'
# Set up environment
case ENV["ENVIRONMENT"]
when "test"
ENV["GOVUK_APP_DOMAIN"] ||= "test.publishing.service.gov.uk"
ENV["GOVUK_WEBSITE_ROOT"] ||= "https://www.test.publishing.service.gov.uk"
when "integration"
ENV["GOVUK_APP_DOMAIN"] ||= "integration.publishing.service.gov.uk"
ENV["GOVUK_WEBSITE_ROOT"] ||= "https://www.integration.publishing.service.gov.uk"
when "staging", "staging_aws"
ENV["GOVUK_APP_DOMAIN"] ||= "staging.publishing.service.gov.uk"
ENV["GOVUK_WEBSITE_ROOT"] ||= "https://www.staging.publishing.service.gov.uk"
when "production", "production_aws"
ENV["GOVUK_APP_DOMAIN"] ||= "publishing.service.gov.uk"
ENV["GOVUK_WEBSITE_ROOT"] ||= "https://www.gov.uk"
else
raise "ENVIRONMENT should be one of test, integration, staging, staging_aws, production or production_aws"
end
# Set up basic URLs
ENV["GOVUK_DRAFT_WEBSITE_ROOT"] ||= Plek.new.external_url_for("draft-origin")
Capybara.app_host = ENV["GOVUK_WEBSITE_ROOT"]
# Set up proxy server (used to manipulate HTTP headers etc since Selenium doesn't
# support this) on a random port between 3222 and 3229
proxy_port = (3222..3229).to_a.sample
server = BrowserMob::Proxy::Server.new(
"./bin/browserup-proxy",
port: proxy_port,
log: ENV.fetch("ENABLE_BROWSERMOB_LOGS", false),
timeout: 20
)
server.start
proxy = server.create_proxy
# Make the proxy available to the tests
$proxy = proxy
# Add request headers
if ENV["RATE_LIMIT_TOKEN"]
proxy.header({ "Rate-Limit-Token" => ENV["RATE_LIMIT_TOKEN"] })
end
if ENV["AUTH_USERNAME"] && ENV["AUTH_PASSWORD"]
proxy.basic_authentication(
URI.parse(ENV["GOVUK_WEBSITE_ROOT"]).host,
ENV["AUTH_USERNAME"],
ENV["AUTH_PASSWORD"],
)
end
# Blacklist YouTube to prevent cross-site errors
proxy.blacklist(/^https:\/\/www\.youtube\.com/i, 200)
proxy.blacklist(/^https:\/\/s\.ytimg\.com/i, 200)
# To avoid sending events to Google Analytics
proxy.blacklist(/^https:\/\/www\.google\-analytics\.com/i, 200)
# Licensify admin doesn't have favicon.ico so block requests to prevent errors
proxy.blacklist(/^https:\/\/licensify-admin(.*)\.publishing\.service\.gov\.uk\/favicon\.ico$/i, 200)
# Use Chrome in headless mode
Capybara.register_driver :headless_chrome do |app|
capabilities = Selenium::WebDriver::Remote::Capabilities.chrome(
acceptInsecureCerts: true,
loggingPrefs: { browser: "ALL" },
proxy: { type: :manual, ssl: "#{proxy.host}:#{proxy.port}" }
)
options = Selenium::WebDriver::Chrome::Options.new
options.add_argument("--headless")
options.add_argument("--disable-gpu")
options.add_argument("--disable-xss-auditor")
options.add_argument("--user-agent=Smokey\ Test\ \/\ Ruby")
options.add_argument("--no-sandbox") if ENV.key?("NO_SANDBOX")
Capybara::Selenium::Driver.new(
app,
browser: :chrome,
options: options,
desired_capabilities: capabilities
)
end
Capybara.default_driver = :headless_chrome
Capybara.javascript_driver = :headless_chrome
# Only raise for severe JavaScript errors and filter our 404s and CORS messages
Capybara::Chromedriver::Logger.raise_js_errors = true
Capybara::Chromedriver::Logger.filter_levels = %i(debug info warning)
Capybara::Chromedriver::Logger.filters = [
/Failed to load resource/i,
/The target origin provided/i,
]
| 33.423077 | 108 | 0.736479 |
4af79395ac2efcfb2441275dc9020a7817bd753d | 17,585 | module Facebooker
module Rails
# ActionMailer like module for publishing Facbook messages
#
# To use, create a subclass and define methods
# Each method should start by calling send_as to specify the type of message
# Valid options are :email and :notification, :user_action, :profile, :ref
#
#
# Below is an example of each type
#
# class TestPublisher < Facebooker::Rails::Publisher
# # The new message templates are supported as well
# # First, create a method that contains your templates:
# # You may include multiple one line story templates and short story templates
# # but only one full story template
# # Your most specific template should be first
# #
# # Before using, you must register your template by calling register. For this example
# # You would call TestPublisher.register_publish_action
# # Registering the template will store the template id returned from Facebook in the
# # facebook_templates table that is created when you create your first publisher
# def publish_action_template
# one_line_story_template "{*actor*} did stuff with {*friend*}"
# one_line_story_template "{*actor*} did stuff"
# short_story_template "{*actor*} has a title {*friend*}", render(:partial=>"short_body")
# short_story_template "{*actor*} has a title", render(:partial=>"short_body")
# full_story_template "{*actor*} has a title {*friend*}", render(:partial=>"full_body")
# action_links action_link("My text {*template_var*}","{*link_url*}")
# end
#
# # To send a registered template, you need to create a method to set the data
# # The publisher will look up the template id from the facebook_templates table
# def publish_action(f)
# send_as :user_action
# from f
# story_size SHORT # or ONE_LINE or FULL
# data :friend=>"Mike"
# end
#
#
# # Provide a from user to send a general notification
# # if from is nil, this will send an announcement
# def notification(to,f)
# send_as :notification
# recipients to
# from f
# fbml "Not"
# end
#
# def email(to,f)
# send_as :email
# recipients to
# from f
# title "Email"
# fbml 'text'
# text fbml
# end
# # This will render the profile in /users/profile.erb
# # it will set @user to user_to_update in the template
# # The mobile profile will be rendered from the app/views/test_publisher/_mobile.erb
# # template
# def profile_update(user_to_update,user_with_session_to_use)
# send_as :profile
# from user_with_session_to_use
# to user_to_update
# profile render(:action=>"/users/profile",:assigns=>{:user=>user_to_update})
# profile_action "A string"
# mobile_profile render(:partial=>"mobile",:assigns=>{:user=>user_to_update})
# end
#
# # Update the given handle ref with the content from a
# # template
# def ref_update(user)
# send_as :ref
# from user
# fbml render(:action=>"/users/profile",:assigns=>{:user=>user_to_update})
# handle "a_ref_handle"
# end
#
#
# To send a message, use ActionMailer like semantics
# TestPublisher.deliver_action(@user)
#
# For testing, you may want to create an instance of the underlying message without sending it
# TestPublisher.create_action(@user)
# will create and return an instance of Facebooker::Feeds::Action
#
# Publisher makes many helpers available, including the linking and asset helpers
class Publisher
#story sizes from the Facebooker API
ONE_LINE=1
SHORT=2
FULL=4
def initialize
@controller = PublisherController.new
end
# use facebook options everywhere
def request_comes_from_facebook?
true
end
class FacebookTemplate < ::ActiveRecord::Base
cattr_accessor :template_cache
self.template_cache = {}
def self.inspect(*args)
"FacebookTemplate"
end
def template_changed?(hash)
if respond_to?(:content_hash)
content_hash != hash
else
false
end
end
class << self
def register(klass,method)
publisher = setup_publisher(klass,method)
template_id = Facebooker::Session.create.register_template_bundle(publisher.one_line_story_templates,publisher.short_story_templates,publisher.full_story_template,publisher.action_links)
template = find_or_initialize_by_template_name(template_name(klass,method))
template.bundle_id = template_id
template.content_hash = hashed_content(klass,method) if template.respond_to?(:content_hash)
template.save!
cache(klass,method,template)
template
end
def for_class_and_method(klass,method)
find_cached(klass,method)
end
def bundle_id_for_class_and_method(klass,method)
for_class_and_method(klass,method).bundle_id
end
def cache(klass,method,template)
template_cache[template_name(klass,method)] = template
end
def clear_cache!
self.template_cache = {}
end
def find_cached(klass,method)
template_cache[template_name(klass,method)] || find_in_db(klass,method)
end
def find_in_db(klass,method)
template = find_by_template_name(template_name(klass,method))
if template and template.template_changed?(hashed_content(klass,method))
template.destroy
template = nil
end
if template.nil?
template = register(klass,method)
end
template
end
def setup_publisher(klass,method)
publisher = klass.new
publisher.send method + '_template'
publisher
end
def hashed_content(klass, method)
publisher = setup_publisher(klass,method)
# sort the Hash elements (in the short_story and full_story) before generating MD5
Digest::MD5.hexdigest [publisher.one_line_story_templates,
(publisher.short_story_templates and publisher.short_story_templates.collect{|ss| ss.to_a.sort_by{|e| e[0]}}),
(publisher.full_story_template and publisher.full_story_template.to_a.sort_by{|e| e[0]})
].to_json
end
def template_name(klass,method)
"#{klass.name}::#{method}"
end
end
end
class_inheritable_accessor :master_helper_module
attr_accessor :one_line_story_templates, :short_story_templates, :action_links
cattr_accessor :skip_registry
self.skip_registry = false
class InvalidSender < StandardError; end
class UnknownBodyType < StandardError; end
class UnspecifiedBodyType < StandardError; end
class Email
attr_accessor :title
attr_accessor :text
attr_accessor :fbml
end
class Notification
attr_accessor :fbml
end
class Profile
attr_accessor :profile
attr_accessor :profile_action
attr_accessor :mobile_profile
attr_accessor :profile_main
end
class Ref
attr_accessor :handle
attr_accessor :fbml
end
class UserAction
attr_accessor :data
attr_accessor :target_ids
attr_accessor :body_general
attr_accessor :template_id
attr_accessor :template_name
attr_accessor :story_size
def target_ids=(val)
@target_ids = val.is_a?(Array) ? val.join(",") : val
end
def data_hash
default_data = story_size.nil? ? {} : {:story_size=>story_size}
default_data.merge(data||{})
end
end
cattr_accessor :ignore_errors
attr_accessor :_body
def recipients(*args)
if args.size==0
@recipients
else
@recipients=args.first
end
end
def from(*args)
if args.size==0
@from
else
@from=args.first
end
end
def send_as(option)
self._body=case option
when :action
Facebooker::Feed::Action.new
when :story
Facebooker::Feed::Story.new
when :templatized_action
Facebooker::Feed::TemplatizedAction.new
when :notification
Notification.new
when :email
Email.new
when :profile
Profile.new
when :ref
Ref.new
when :user_action
UserAction.new
else
raise UnknownBodyType.new("Unknown type to publish")
end
end
def full_story_template(title=nil,body=nil,params={})
if title.nil?
@full_story_template
else
@full_story_template=params.merge(:template_title=>title, :template_body=>body)
end
end
def one_line_story_template(str)
@one_line_story_templates ||= []
@one_line_story_templates << str
end
def short_story_template(title,body,params={})
@short_story_templates ||= []
@short_story_templates << params.merge(:template_title=>title, :template_body=>body)
end
def action_links(*links)
if links.blank?
@action_links
else
@action_links = links
end
end
def method_missing(name,*args)
if args.size==1 and self._body.respond_to?("#{name}=")
self._body.send("#{name}=",*args)
elsif self._body.respond_to?(name)
self._body.send(name,*args)
else
super
end
end
def image(src,target)
{:src=>image_path(src),:href=> target.respond_to?(:to_str) ? target : url_for(target)}
end
def action_link(text,target)
{:text=>text, :href=>target}
end
def requires_from_user?(from,body)
! (announcement_notification?(from,body) or ref_update?(body) or profile_update?(body))
end
def profile_update?(body)
body.is_a?(Profile)
end
def ref_update?(body)
body.is_a?(Ref)
end
def announcement_notification?(from,body)
from.nil? and body.is_a?(Notification)
end
def send_message(method)
@recipients = @recipients.is_a?(Array) ? @recipients : [@recipients]
if from.nil? and @recipients.size==1 and requires_from_user?(from,_body)
@from = @recipients.first
end
# notifications can
# omit the from address
raise InvalidSender.new("Sender must be a Facebooker::User") unless from.is_a?(Facebooker::User) || !requires_from_user?(from,_body)
case _body
when Facebooker::Feed::TemplatizedAction,Facebooker::Feed::Action
from.publish_action(_body)
when Facebooker::Feed::Story
@recipients.each {|r| r.publish_story(_body)}
when Notification
(from.nil? ? Facebooker::Session.create : from.session).send_notification(@recipients,_body.fbml)
when Email
from.session.send_email(@recipients, _body.title, _body.text, _body.fbml)
when Profile
# If recipient and from aren't the same person, create a new user object using the
# userid from recipient and the session from from
@from = Facebooker::User.new(Facebooker::User.cast_to_facebook_id(@recipients.first),Facebooker::Session.create)
@from.set_profile_fbml(_body.profile, _body.mobile_profile, _body.profile_action, _body.profile_main)
when Ref
Facebooker::Session.create.server_cache.set_ref_handle(_body.handle,_body.fbml)
when UserAction
@from.session.publish_user_action(_body.template_id,_body.data_hash,_body.target_ids,_body.body_general)
else
raise UnspecifiedBodyType.new("You must specify a valid send_as")
end
end
# nodoc
# needed for actionview
def logger
RAILS_DEFAULT_LOGGER
end
# nodoc
# delegate to action view. Set up assigns and render
def render(opts)
opts = opts.dup
body = opts.delete(:assigns) || {}
initialize_template_class(body.dup.merge(:controller=>self)).render(opts)
end
def initialize_template_class(assigns)
template_root = "#{RAILS_ROOT}/app/views"
controller_root = File.join(template_root,self.class.controller_path)
#only do this on Rails 2.1
if ActionController::Base.respond_to?(:append_view_path)
# only add the view path once
ActionController::Base.append_view_path(controller_root) unless ActionController::Base.view_paths.include?(controller_root)
end
returning ActionView::Base.new([template_root,controller_root], assigns, self) do |template|
template.controller=self
template.extend(self.class.master_helper_module)
def template.request_comes_from_facebook?
true
end
end
end
self.master_helper_module = Module.new
self.master_helper_module.module_eval do
# url_helper delegates to @controller,
# so we need to define that in the template
# we make it point to the publisher
include ActionView::Helpers::UrlHelper
include ActionView::Helpers::TextHelper
include ActionView::Helpers::TagHelper
include ActionView::Helpers::FormHelper
include ActionView::Helpers::FormTagHelper
include ActionView::Helpers::AssetTagHelper
include Facebooker::Rails::Helpers
#define this for the publisher views
def protect_against_forgery?
@paf ||= ActionController::Base.new.send(:protect_against_forgery?)
end
# url_for calls in publishers tend to want full paths
def url_for(options = {})
super(options.kind_of?(Hash) ? {:only_path => false}.update(options) : options)
end
end
ActionController::Routing::Routes.named_routes.install(self.master_helper_module)
include self.master_helper_module
class <<self
def register_all_templates
all_templates = instance_methods.grep(/_template$/) - %w(short_story_template full_story_template one_line_story_template)
all_templates.each do |template|
template_name=template.sub(/_template$/,"")
puts "Registering #{template_name}"
send("register_"+template_name)
end
end
def method_missing(name,*args)
should_send = false
method = ''
if md = /^create_(.*)$/.match(name.to_s)
method = md[1]
elsif md = /^deliver_(.*)$/.match(name.to_s)
method = md[1]
should_send = true
elsif md = /^register_(.*)$/.match(name.to_s)
return FacebookTemplate.register(self, md[1])
else
super
end
#now create the item
(publisher=new).send(method,*args)
case publisher._body
when UserAction
publisher._body.template_name = method
publisher._body.template_id = FacebookTemplate.bundle_id_for_class_and_method(self,method)
end
should_send ? publisher.send_message(method) : publisher._body
end
def default_url_options
{:host => Facebooker.canvas_server_base + Facebooker.facebook_path_prefix}
end
def controller_path
self.to_s.underscore
end
def helper(*args)
args.each do |arg|
case arg
when Symbol,String
add_template_helper("#{arg.to_s.classify}Helper".constantize)
when Module
add_template_helper(arg)
end
end
end
def add_template_helper(helper_module) #:nodoc:
master_helper_module.send :include,helper_module
include master_helper_module
end
def inherited(child)
super
child.master_helper_module=Module.new
child.master_helper_module.__send__(:include,self.master_helper_module)
child.send(:include, child.master_helper_module)
FacebookTemplate.clear_cache!
end
end
class PublisherController
include Facebooker::Rails::Publisher.master_helper_module
include ActionController::UrlWriter
def self.default_url_options(*args)
Facebooker::Rails::Publisher.default_url_options(*args)
end
end
end
end
end
| 34.821782 | 198 | 0.603867 |
33361d5fbcd83d42ab97f11c63d906ffc1bcab9e | 17 | steam_game 'l4d2' | 17 | 17 | 0.823529 |
18b391cc562a4fb37c87e83effb51a083e3b16ac | 41,720 | require 'cocoapods-core/specification/dsl/attribute_support'
require 'cocoapods-core/specification/dsl/attribute'
require 'cocoapods-core/specification/dsl/platform_proxy'
module Pod
class Specification
#- NOTE ------------------------------------------------------------------#
# The of the methods defined in this file and the order of the methods is
# relevant for the documentation generated on the
# CocoaPods/cocoapods.github.com repository.
#-------------------------------------------------------------------------#
# A specification describes a version of Pod library. It includes details
# about where the source should be fetched from, what files to use, the
# build settings to apply, and other general metadata such as its name,
# version, and description.
#
# ---
#
# A stub specification file can be generated by the [pod spec
# create](http://guides.cocoapods.org/terminal/commands.html#pod_spec_create) command.
#
# ---
#
# The specification DSL provides great flexibility and dynamism. Moreover,
# the DSL adopts the
# [convention over configuration](http://en.wikipedia.org/wiki/Convention_over_configuration)
# and thus it can be very simple:
#
# Pod::Spec.new do |spec|
# spec.name = 'Reachability'
# spec.version = '3.1.0'
# spec.license = { :type => 'BSD' }
# spec.homepage = 'https://github.com/tonymillion/Reachability'
# spec.authors = { 'Tony Million' => '[email protected]' }
# spec.summary = 'ARC and GCD Compatible Reachability Class for iOS and OS X.'
# spec.source = { :git => 'https://github.com/tonymillion/Reachability.git', :tag => 'v3.1.0' }
# spec.source_files = 'Reachability.{h,m}'
# spec.framework = 'SystemConfiguration'
# end
#
module DSL
extend Pod::Specification::DSL::AttributeSupport
# Deprecations must be required after include AttributeSupport
require 'cocoapods-core/specification/dsl/deprecations'
#-----------------------------------------------------------------------#
# @!group Root specification
#
# A ‘root’ specification stores the information about the specific
# version of a library.
#
# The attributes in this group can only be written to on the ‘root’
# specification, **not** on the ‘sub-specifications’.
#
# ---
#
# The attributes listed in this group are the only one which are
# required by a podspec.
#
# The attributes of the other groups are offered to refine the podspec
# and follow a convention over configuration approach. A root
# specification can describe these attributes either directly of
# through ‘[sub-specifications](#subspec)’.
#-----------------------------------------------------------------------#
# @!method name=(name)
#
# The name of the Pod.
#
# @example
#
# spec.name = 'AFNetworking'
#
# @param [String] name
# the name of the pod.
#
attribute :name,
:required => true,
:inherited => false,
:multi_platform => false
#------------------#
# @!method version=(version)
#
# The version of the Pod. CocoaPods follows
# [semantic versioning](http://semver.org).
#
# @example
#
# spec.version = '0.0.1'
#
# @param [String] version
# the version of the Pod.
#
root_attribute :version,
:required => true
#-----------------------------------------------------------------------#
# @!method cocoapods_version=(cocoapods_version)
#
# The version of CocoaPods that the specification supports.
#
# @example
#
# spec.cocoapods_version = '>= 0.36'
#
# @param [String] cocoapods_version
# the CocoaPods version that the specification supports.
# CocoaPods follows [semantic versioning](http://semver.org).
#
root_attribute :cocoapods_version
#------------------#
# @!method authors=(authors)
#
# The name and email addresses of the library maintainers, not the
# Podspec maintainer.
#
# @example
#
# spec.author = 'Darth Vader'
#
# @example
#
# spec.authors = 'Darth Vader', 'Wookiee'
#
# @example
#
# spec.authors = { 'Darth Vader' => '[email protected]',
# 'Wookiee' => '[email protected]' }
#
# @param [String, Hash{String=>String}] authors
# the list of the authors of the library and their emails.
#
root_attribute :authors,
:types => [String, Array, Hash],
:container => Hash,
:required => true,
:singularize => true
#------------------#
# @!method social_media_url=(social_media_url)
#
# The URL for the social media contact of the Pod, CocoaPods web
# services can use this.
#
# For example, the @CocoaPodsFeed notifications will include the
# Twitter handle (shortening the description) if the URL is relative to
# Twitter. This does **not** necessarily have to be a Twitter URL, but
# only those are included in the Twitter @CocoaPodsFeed notifications.
#
# @example
#
# spec.social_media_url = 'https://twitter.com/cocoapods'
#
# @example
#
# spec.social_media_url = 'https://groups.google.com/forum/#!forum/cocoapods'
#
# @param [String] social_media_url
# the social media URL.
#
root_attribute :social_media_url
#------------------#
# The keys accepted by the license attribute.
#
LICENSE_KEYS = [:type, :file, :text].freeze
# @!method license=(license)
#
# The license of the Pod.
#
# ---
#
# Unless the source contains a file named `LICENSE.*` or `LICENCE.*`,
# the path of the license file **or** the integral text of the notice
# commonly used for the license type must be specified.
# If a license file is specified, it either must be without a file
# extensions or be one of `txt`, `md`, or `markdown`.
#
# This information is used by CocoaPods to generate acknowledgement
# files (markdown and plist) which can be used in the acknowledgements
# section of the final application.
#
# @example
#
# spec.license = 'MIT'
#
# @example
#
# spec.license = { :type => 'MIT', :file => 'MIT-LICENSE.txt' }
#
# @example
#
# spec.license = { :type => 'MIT', :text => <<-LICENSE
# Copyright 2012
# Permission is granted to...
# LICENSE
# }
#
# @param [String] license
# The type of the license
#
# @overload license=(license)
# @param [String, Hash{Symbol=>String}] license
# @option license [String] :type license type
# @option license [String] :file file containing full license text. Supports txt, md, and markdown
# @option license [String] :text full license text
#
root_attribute :license,
:container => Hash,
:keys => LICENSE_KEYS,
:required => true
#------------------#
# @!method homepage=(homepage)
#
# The URL of the homepage of the Pod.
#
# @example
#
# spec.homepage = 'http://www.example.com'
#
# @param [String] homepage
# the URL of the homepage of the Pod.
#
root_attribute :homepage,
:required => true
#------------------#
# The keys accepted by the hash of the source attribute.
#
SOURCE_KEYS = {
:git => [:tag, :branch, :commit, :submodules].freeze,
:svn => [:folder, :tag, :revision].freeze,
:hg => [:revision].freeze,
:http => [:flatten, :type, :sha256, :sha1].freeze,
:path => nil,
}.freeze
# @!method source=(source)
#
# The location from where the library should be retrieved.
#
# @example Specifying a Git source with a tag. This is how most OSS Podspecs work.
#
# spec.source = { :git => 'https://github.com/AFNetworking/AFNetworking.git',
# :tag => spec.version.to_s }
#
# @example Using a tag prefixed with 'v' and submodules.
#
# spec.source = { :git => 'https://github.com/typhoon-framework/Typhoon.git',
# :tag => "v#{spec.version}", :submodules => true }
#
# @example Using Subversion with a tag.
#
# spec.source = { :svn => 'http://svn.code.sf.net/p/polyclipping/code', :tag => '4.8.8' }
#
# @example Using Mercurial with the same revision as the spec's semantic version string.
#
# spec.source = { :hg => 'https://bitbucket.org/dcutting/hyperbek', :revision => "#{s.version}" }
#
# @example Using HTTP to download a compressed file of the code. It supports zip, tgz, bz2, txz and tar.
#
# spec.source = { :http => 'http://dev.wechatapp.com/download/sdk/WeChat_SDK_iOS_en.zip' }
#
# @example Using HTTP to download a file using a hash to verify the download. It supports sha1 and sha256.
#
# spec.source = { :http => 'http://dev.wechatapp.com/download/sdk/WeChat_SDK_iOS_en.zip',
# :sha1 => '7e21857fe11a511f472cfd7cfa2d979bd7ab7d96' }
#
#
# @overload source=(git)
# @param [Hash] git
# @option git [String] :git git source URI
# @option git [String] :tag version tag
# @option git [Bool] :submodules Whether to checkout submodules
# @option git [String] :branch branch name
# @option git [String] :commit commit hash
#
# @overload source=(svn)
# @param [Hash] svn
# @option svn [String] :svn svn source URI
# @option svn [String] :tag version tag
# @option svn [String] :folder folder
# @option svn [String] :revision revision
#
# @overload source=(hg)
# @param [Hash] hg
# @option hg [String] :hg mercurial source URI
# @option hg [String] :revision revision
#
# @overload source=(http)
# @param [Hash] http
# @option http [String] :http compressed source URL
# @option http [String] :type file type. Supports zip, tgz, bz2, txz and tar
# @option http [String] :sha1 SHA hash. Supports SHA1 and SHA256
#
# @overload source=(path)
# @param [Hash] path
# @option path [String] :path local source path
#
root_attribute :source,
:container => Hash,
:keys => SOURCE_KEYS,
:required => true
#------------------#
# @!method summary=(summary)
#
# A short (maximum 140 characters) description of the Pod.
#
# ---
#
# The description should be short, yet informative. It represents the
# tag line of the Pod and there is no need to specify that a Pod is a
# library (they always are).
#
# The summary is expected to be properly capitalised and containing the
# correct punctuation.
#
# @example
#
# spec.summary = 'Computes the meaning of life.'
#
# @param [String] summary
# A short description of the Pod.
#
root_attribute :summary,
:required => true
#------------------#
# @!method description=(description)
#
# A description of the Pod more detailed than the summary.
#
# @example
#
# spec.description = <<-DESC
# Computes the meaning of life.
# Features:
# 1. Is self aware
# ...
# 42. Likes candies.
# DESC
#
# @param [String] description
# A longer description of the Pod.
#
root_attribute :description
#------------------#
# @!method screenshots=(screenshots)
#
# A list of URLs to images showcasing the Pod. Intended for UI oriented
# libraries. CocoaPods recommends the usage of the `gif` format.
#
# @example
#
# spec.screenshot = 'http://dl.dropbox.com/u/378729/MBProgressHUD/1.png'
#
# @example
#
# spec.screenshots = [ 'http://dl.dropbox.com/u/378729/MBProgressHUD/1.png',
# 'http://dl.dropbox.com/u/378729/MBProgressHUD/2.png' ]
#
# @param [String] screenshots
# An URL for the screenshot of the Pod.
#
root_attribute :screenshots,
:singularize => true,
:container => Array
#------------------#
# @!method documentation_url=(documentation_url)
#
# An optional URL for the documentation of the Pod which will be honoured by
# CocoaPods web properties. Leaving it blank will default to a CocoaDocs
# generated URL for your library.
#
# @example
#
# spec.documentation_url = 'http://www.example.com/docs.html'
#
# @param [String] documentation_url
# The link of the web documentation of the Pod.
#
root_attribute :documentation_url
#------------------#
# @!method prepare_command=(command)
#
# A bash script that will be executed after the Pod is downloaded. This
# command can be used to create, delete and modify any file downloaded
# and will be ran before any paths for other file attributes of the
# specification are collected.
#
# This command is executed before the Pod is cleaned and before the
# Pods project is created. The working directory is the root of the
# Pod.
#
# If the pod is installed with the `:path` option this command will not
# be executed.
#
# @example
#
# spec.prepare_command = 'ruby build_files.rb'
#
# @example
#
# spec.prepare_command = <<-CMD
# sed -i 's/MyNameSpacedHeader/Header/g' ./**/*.h
# sed -i 's/MyNameOtherSpacedHeader/OtherHeader/g' ./**/*.h
# CMD
#
# @param [String] command
# the prepare command of the pod.
#
root_attribute :prepare_command
#------------------#
# @!method deprecated=(flag)
#
# Whether the library has been deprecated.
#
# @example
#
# spec.deprecated = true
#
# @param [Bool] flag
# whether the library has been deprecated.
#
root_attribute :deprecated,
:types => [TrueClass, FalseClass],
:default_value => false
# @!method deprecated_in_favor_of=(deprecated_in_favor_of)
#
# The name of the Pod that this one has been deprecated in favor of.
#
# @example
#
# spec.deprecated_in_favor_of = 'NewMoreAwesomePod'
#
# @param [String] deprecated_in_favor_of
# the name of the Pod that this one has been deprecated in
# favor of.
#
root_attribute :deprecated_in_favor_of
#-----------------------------------------------------------------------#
#-----------------------------------------------------------------------#
# @!group Build settings
#
# In this group are listed the attributes related to the configuration
# of the build environment that should be used to build the library.
#
# If not defined in a subspec the attributes of this group inherit the
# value of the parent.
#-----------------------------------------------------------------------#
# @todo This currently is not used in the Ruby DSL.
#
attribute :dependencies,
:container => Hash,
:inherited => true
# Any dependency on other Pods or to a ‘sub-specification’.
#
# ---
#
# Dependencies can specify versions requirements. The use of the optimistic
# version indicator `~>` is recommended because it provides good
# control over the version without being too restrictive. For example,
# `~> 1.0.1` is equivalent to `>= 1.0.1` combined with `< 1.1`. Similarly,
# `~> 1.0` will match `1.0`, `1.0.1`, `1.1`, but will not upgrade to `2.0`.
#
# Pods with overly restrictive dependencies limit their compatibility with
# other Pods.
#
# @example
# spec.dependency 'AFNetworking', '~> 1.0'
#
# @example
# spec.dependency 'RestKit/CoreData', '~> 0.20.0'
#
# @example
# spec.ios.dependency 'MBProgressHUD', '~> 0.5'
#
def dependency(*args)
name, *version_requirements = args
if name == self.name
raise Informative, "A specification can't require itself as a " \
'subspec'
end
if @parent
composed_name = ''
@parent.name.split('/').each do |component|
composed_name << component
if name == composed_name
raise Informative, "A subspec can't require one of its " \
'parents specifications'
else
composed_name << '/'
end
end
end
unless version_requirements.all? { |req| req.is_a?(String) }
raise Informative, 'Unsupported version requirements'
end
attributes_hash['dependencies'] ||= {}
attributes_hash['dependencies'][name] = version_requirements
end
#------------------#
# @!method requires_arc=(flag)
#
# `requires_arc` allows you to specify which source_files use ARC.
# This can either be the files which support ARC, or true to indicate
# all of the source_files use ARC.
#
# Files which do not use ARC will have the `-fno-objc-arc` compiler
# flag.
#
# The default value of this attribute is `true`.
#
# @example
#
# spec.requires_arc = false
#
# @example
#
# spec.requires_arc = 'Classes/Arc'
#
# @example
#
# spec.requires_arc = ['Classes/*ARC.m', 'Classes/ARC.mm']
#
# @param [Bool, String, Array<String>] flag
# whether the source files require ARC.
#
attribute :requires_arc,
:types => [TrueClass, FalseClass, String, Array],
:file_patterns => true,
:default_value => true,
:inherited => true
#------------------#
# @!method frameworks=(*frameworks)
#
# A list of system frameworks that the user’s target needs to link
# against.
#
# @example
#
# spec.ios.framework = 'CFNetwork'
#
# @example
#
# spec.frameworks = 'QuartzCore', 'CoreData'
#
# @param [String, Array<String>] frameworks
# A list of framework names.
#
attribute :frameworks,
:container => Array,
:singularize => true,
:inherited => true
#------------------#
# @!method weak_frameworks=(*frameworks)
#
# A list of frameworks that the user’s target needs to **weakly** link
# against.
#
# @example
#
# spec.weak_framework = 'Twitter'
#
# @param [String, Array<String>] weak_frameworks
# A list of frameworks names.
#
attribute :weak_frameworks,
:container => Array,
:singularize => true,
:inherited => true
#------------------#
# @!method libraries=(*libraries)
#
# A list of system libraries that the user’s target (application) needs to
# link against.
#
# @example
#
# spec.ios.library = 'xml2'
#
# @example
#
# spec.libraries = 'xml2', 'z'
#
# @param [String, Array<String>] libraries
# A list of library names.
#
attribute :libraries,
:container => Array,
:singularize => true,
:inherited => true
#------------------#
# @!method compiler_flags=(flags)
#
# A list of flags which should be passed to the compiler.
#
# @example
#
# spec.compiler_flags = '-DOS_OBJECT_USE_OBJC=0', '-Wno-format'
#
# @param [String, Array<String>] flags
# A list of flags.
#
attribute :compiler_flags,
:container => Array,
:singularize => true,
:inherited => true
#------------------#
# @!method pod_target_xcconfig=(value)
#
# Any flag to add to the final __private__ pod target xcconfig file.
#
# @example
#
# spec.pod_target_xcconfig = { 'OTHER_LDFLAGS' => '-lObjC' }
#
# @param [Hash{String => String}] value
# Key-value pairs representing build settings.
#
attribute :pod_target_xcconfig,
:container => Hash,
:inherited => true
# @!method user_target_xcconfig=(value)
#
# Specifies flags to add to the final aggregate target xcconfig file,
# which propagates to non-overridden and inheriting build settings to
# the integrated user targets.
#
# ---
#
# This attribute is __not recommended__ as Pods should not pollute the
# build settings of the user project and this can cause conflicts.
#
# Multiple definitions for build settings that take multiple values
# will be merged. The user is warned on conflicting definitions for
# custom build settings and build settings that take only one value.
#
# Typically clang compiler flags or precompiler macro definitions go
# in here if they are required when importing the pod in the user
# target. Note that, this influences not only the compiler view of the
# public interface of your pod, but also all other integrated pods
# alongside to yours. You should always prefer [`pod_target_xcconfig`](
# http://guides.cocoapods.org/syntax/podspec.html#pod_target_xcconfig),
# which can contain the same settings, but only influence the
# toolchain when compiling your pod target.
#
# @example
#
# spec.user_target_xcconfig = { 'MY_SUBSPEC' => 'YES' }
#
# @param [Hash{String => String}] value
# Key-value pairs representing build settings.
#
attribute :user_target_xcconfig,
:container => Hash,
:inherited => true
#------------------#
# @!method prefix_header_contents=(content)
#
# Any content to inject in the prefix header of the pod project.
#
# ---
#
# This attribute is __not recommended__ as Pods should not pollute the
# prefix header of other libraries or of the user project.
#
# @example
#
# spec.prefix_header_contents = '#import <UIKit/UIKit.h>'
#
# @example
#
# spec.prefix_header_contents = '#import <UIKit/UIKit.h>', '#import <Foundation/Foundation.h>'
#
# @param [String] content
# The contents of the prefix header.
#
attribute :prefix_header_contents,
:types => [Array, String],
:inherited => true
#------------------#
# @!method prefix_header_file=(path)
#
# A path to a prefix header file to inject in the prefix header of the
# pod project.
#
# ---
#
# This attribute is __not recommended__ as Pods should not pollute the
# prefix header of other libraries or of the user project.
#
# @example
#
# spec.prefix_header_file = 'iphone/include/prefix.pch'
#
# @param [String] path
# The path to the prefix header file.
#
attribute :prefix_header_file,
:inherited => true
#------------------#
# @!method module_name=(name)
#
# The name to use for the framework / clang module which
# will be generated for this specification instead of the
# default (header_dir if set, otherwise the specification
# name).
#
# @example
#
# spec.module_name = 'Three20'
#
# @param [String] name
# the module name.
#
root_attribute :module_name
#------------------#
# @!method header_dir=(dir)
#
# The directory where to store the headers files so they don't break
# includes.
#
# @example
#
# spec.header_dir = 'Three20Core'
#
# @param [String] dir
# the headers directory.
#
attribute :header_dir,
:inherited => true
#------------------#
# @!method header_mappings_dir=(dir)
#
# A directory from where to preserve the folder structure for the
# headers files. If not provided the headers files are flattened.
#
# @example
#
# spec.header_mappings_dir = 'src/include'
#
# @param [String] dir
# the directory from where to preserve the headers namespacing.
#
attribute :header_mappings_dir,
:inherited => true
#-----------------------------------------------------------------------#
# @!group File patterns
#
# Podspecs should be located at the **root** of the repository, and paths
# to files should be specified **relative** to the root of the repository
# as well. File patterns do not support traversing the parent directory ( `..` ).
# File patterns may contain the following wildcard patterns:
#
# ---
#
# ### Pattern: *
#
# Matches any file. Can be restricted by other values in the glob.
#
# * `*` will match all files
# * `c*` will match all files beginning with `c`
# * `*c` will match all files ending with `c`
# * `*c*` will match all files that have `c` in them (including at the
# beginning or end)
#
# Equivalent to `/.*/x` in regexp.
#
# **Note** this will not match Unix-like hidden files (dotfiles). In
# order to include those in the match results, you must use something
# like `{*,.*}`.
#
# ---
#
# ### Pattern: **
#
# Matches directories recursively.
#
# ---
#
# ### Pattern: ?
#
# Matches any one character. Equivalent to `/.{1}/` in regexp.
#
# ---
#
# ### Pattern: [set]
#
# Matches any one character in set.
#
# Behaves exactly like character sets in Regexp, including set negation
# (`[^a-z]`).
#
# ---
#
# ### Pattern: {p,q}
#
# Matches either literal `p` or literal `q`.
#
# Matching literals may be more than one character in length. More than
# two literals may be specified.
#
# Equivalent to pattern alternation in regexp.
#
# ---
#
# ### Pattern: \
#
# Escapes the next meta-character.
#
# ---
#
# ### Examples
#
# Consider these to be evaluated in the source root of
# [JSONKit](https://github.com/johnezang/JSONKit).
#
# "JSONKit.?" #=> ["JSONKit.h", "JSONKit.m"]
# "*.[a-z][a-z]" #=> ["CHANGELOG.md", "README.md"]
# "*.[^m]*" #=> ["JSONKit.h"]
# "*.{h,m}" #=> ["JSONKit.h", "JSONKit.m"]
# "*" #=> ["CHANGELOG.md", "JSONKit.h", "JSONKit.m", "README.md"]
#-----------------------------------------------------------------------#
# @!method source_files=(source_files)
#
# The source files of the Pod.
#
# @example
#
# spec.source_files = 'Classes/**/*.{h,m}'
#
# @example
#
# spec.source_files = 'Classes/**/*.{h,m}', 'More_Classes/**/*.{h,m}'
#
# @param [String, Array<String>] source_files
# the source files of the Pod.
#
attribute :source_files,
:container => Array,
:file_patterns => true
#------------------#
# @!method public_header_files=(public_header_files)
#
# A list of file patterns that should be used as public headers.
#
# ---
#
# These are the headers that will be exposed to the user’s project and
# from which documentation will be generated. If no public headers are
# specified then **all** the headers in source_files are considered public.
#
# @example
#
# spec.public_header_files = 'Headers/Public/*.h'
#
# @param [String, Array<String>] public_header_files
# the public headers of the Pod.
#
attribute :public_header_files,
:container => Array,
:file_patterns => true
#------------------#
# @!method private_header_files=(private_header_files)
#
# A list of file patterns that should be used to mark private headers.
#
# ---
#
# These patterns are matched against the public headers (or all the
# headers if no public headers have been specified) to exclude those
# headers which should not be exposed to the user project and which
# should not be used to generate the documentation.
#
# @example
#
# spec.private_header_files = 'Headers/Private/*.h'
#
# @param [String, Array<String>] private_header_files
# the private headers of the Pod.
#
attribute :private_header_files,
:container => Array,
:file_patterns => true
#------------------#
# @!method vendored_frameworks=(*frameworks)
#
# The paths of the framework bundles that come shipped with the Pod.
#
# @example
#
# spec.ios.vendored_frameworks = 'Frameworks/MyFramework.framework'
#
# @example
#
# spec.vendored_frameworks = 'MyFramework.framework', 'TheirFramework.framework'
#
# @param [String, Array<String>] vendored_frameworks
# A list of framework bundles paths.
#
attribute :vendored_frameworks,
:container => Array,
:file_patterns => true,
:singularize => true
#------------------#
# @!method vendored_libraries=(*frameworks)
#
# The paths of the libraries that come shipped with the Pod.
#
# @example
#
# spec.ios.vendored_library = 'Libraries/libProj4.a'
#
# @example
#
# spec.vendored_libraries = 'libProj4.a', 'libJavaScriptCore.a'
#
# @param [String, Array<String>] vendored_libraries
# A list of library paths.
#
attribute :vendored_libraries,
:container => Array,
:file_patterns => true,
:singularize => true
#------------------#
# @!method resource_bundles=(*frameworks)
#
# This attribute allows to define the name and the file of the resource
# bundles which should be built for the Pod. They are specified as a
# hash where the keys represent the name of the bundles and the values
# the file patterns that they should include.
#
# We strongly **recommend** library developers to adopt resource
# bundles as there can be name collisions using the resources
# attribute.
#
# The names of the bundles should at least include the name of the Pod
# to minimise the chance of name collisions.
#
# To provide different resources per platform namespaced bundles *must*
# be used.
#
# @example
#
# spec.ios.resource_bundle = { 'MapBox' => 'MapView/Map/Resources/*.png' }
#
# @example
#
# spec.resource_bundles = {
# 'MapBox' => ['MapView/Map/Resources/*.png'],
# 'OtherResources' => ['MapView/Map/OtherResources/*.png']
# }
#
# @param [Hash{String=>String}] resource_bundles
# A hash where the keys are the names of the resource bundles
# and the values are their relative file patterns.
#
attribute :resource_bundles,
:types => [String, Array],
:container => Hash,
:file_patterns => true,
:singularize => true
#------------------#
# @!method resources=(resources)
#
# A list of resources that should be copied into the target bundle.
#
# We strongly **recommend** library developers to adopt [resource
# bundles](http://guides.cocoapods.org/syntax/podspec.html#resource_bundles)
# as there can be name collisions using the resources attribute.
# Moreover, resources specified with this attribute are copied
# directly to the client target and therefore they are not
# optimised by Xcode.
#
# @example
#
# spec.resource = 'Resources/HockeySDK.bundle'
#
# @example
#
# spec.resources = ['Images/*.png', 'Sounds/*']
#
# @param [String, Array<String>] resources
# The resources shipped with the Pod.
#
attribute :resources,
:container => Array,
:file_patterns => true,
:singularize => true
#------------------#
# @!method exclude_files=(exclude_files)
#
# A list of file patterns that should be excluded from the other
# file patterns.
#
# @example
#
# spec.ios.exclude_files = 'Classes/osx'
#
# @example
#
# spec.exclude_files = 'Classes/**/unused.{h,m}'
#
# @param [String, Array<String>] exclude_files
# the file patterns that the Pod should ignore.
#
attribute :exclude_files,
:container => Array,
:file_patterns => true
#------------------#
# @!method preserve_paths=(preserve_paths)
#
# Any file that should **not** be removed after being downloaded.
#
# ---
#
# By default, CocoaPods removes all files that are not matched by any
# of the other file pattern.
#
# @example
#
# spec.preserve_path = 'IMPORTANT.txt'
#
# @example
#
# spec.preserve_paths = 'Frameworks/*.framework'
#
# @param [String, Array<String>] preserve_paths
# the paths that should be not cleaned.
#
attribute :preserve_paths,
:container => Array,
:file_patterns => true,
:singularize => true
#------------------#
# @!method module_map=(module_map)
#
# The module map file that should be used when this pod is integrated as
# a framework.
#
# ---
#
# By default, CocoaPods creates a module map file based upon the public
# headers in a specification.
#
# @example
#
# spec.module_map = 'source/module.modulemap'
#
# @param [String] module_map
# the path to the module map file that should be used.
#
attribute :module_map,
:root_only => true
#-----------------------------------------------------------------------#
# @!group Subspecs
#
# A library can specify a dependency on either another library, a
# subspec of another library, or a subspec of itself.
#-----------------------------------------------------------------------#
# Represents specification for a module of the library.
#
# ---
#
# Subspecs participate on a dual hierarchy.
#
# On one side, a specification automatically inherits as a dependency all
# it children ‘sub-specifications’ (unless a default subspec is
# specified).
#
# On the other side, a ‘sub-specification’ inherits the value of the
# attributes of the parents so common values for attributes can be
# specified in the ancestors.
#
# Although it sounds complicated in practice it means that subspecs in
# general do what you would expect:
#
# pod 'ShareKit', '2.0'
#
# Installs ShareKit with all the sharers like `ShareKit/Evernote`,
# `ShareKit/Facebook`, etc, as they are defined as subspecs.
#
# pod 'ShareKit/Twitter', '2.0'
# pod 'ShareKit/Pinboard', '2.0'
#
# Installs ShareKit with only the source files for `ShareKit/Twitter`,
# `ShareKit/Pinboard`. Note that, in this case, the ‘sub-specifications’
# to compile need the source files, the dependencies, and the other
# attributes defined by the root specification. CocoaPods is smart enough
# to handle any issues arising from duplicate attributes.
#
# @example Subspecs with different source files.
#
# subspec 'Twitter' do |sp|
# sp.source_files = 'Classes/Twitter'
# end
#
# subspec 'Pinboard' do |sp|
# sp.source_files = 'Classes/Pinboard'
# end
#
# @example Subspecs referencing dependencies to other subspecs.
#
# Pod::Spec.new do |s|
# s.name = 'RestKit'
#
# s.subspec 'Core' do |cs|
# cs.dependency 'RestKit/ObjectMapping'
# cs.dependency 'RestKit/Network'
# cs.dependency 'RestKit/CoreData'
# end
#
# s.subspec 'ObjectMapping' do |os|
# end
# end
#
# @example Nested subspecs.
#
# Pod::Spec.new do |s|
# s.name = 'Root'
#
# s.subspec 'Level_1' do |sp|
# sp.subspec 'Level_2' do |ssp|
# end
# end
# end
#
def subspec(name, &block)
subspec = Specification.new(self, name, &block)
@subspecs << subspec
subspec
end
#------------------#
# @!method default_subspecs=(subspec_array)
#
# An array of subspecs names that should be used as preferred dependency.
# If not specified a specifications requires all its subspecs as
# dependencies.
#
# ---
#
# A Pod should make available the full library by default. Users can
# fine tune their dependencies, and exclude unneeded subspecs, once
# their requirements are known. Therefore, this attribute is rarely
# needed. It is intended to be used to select a default if there are
# ‘sub-specifications’ which provide alternative incompatible
# implementations, or to exclude modules rarely needed (especially if
# they trigger dependencies on other libraries).
#
# @example
#
# spec.default_subspec = 'Core'
#
# @example
# spec.default_subspecs = 'Core', 'UI'
#
# @param [Array<String>] subspec_names
# An array of subspec names that should be inherited as
# dependency.
#
attribute :default_subspecs,
:container => Array,
:singularize => true,
:multi_platform => false,
:root_only => true
end
end
end
| 33.163752 | 114 | 0.518337 |
390d60967e0bc4ca5a63f7f650248efe5285d374 | 143 | class AddCheckedOutToCarts < ActiveRecord::Migration[6.0]
def change
add_column :carts, :checked_out, :boolean, default: false
end
end
| 23.833333 | 61 | 0.755245 |
182f1e17b9a0d11c0634ea274780c2b2141bc8d1 | 581 | # rubocop:disable all
class AddCiToProject < ActiveRecord::Migration[4.2]
def change
add_column :projects, :ci_id, :integer
add_column :projects, :builds_enabled, :boolean, default: true, null: false
add_column :projects, :shared_runners_enabled, :boolean, default: true, null: false
add_column :projects, :runners_token, :string
add_column :projects, :build_coverage_regex, :string
add_column :projects, :build_allow_git_fetch, :boolean, default: true, null: false
add_column :projects, :build_timeout, :integer, default: 3600, null: false
end
end
| 44.692308 | 87 | 0.748709 |
614fdc11e39cfeb836d73a0d0216c2c37f6325c1 | 3,065 | TCPDFFontDescriptor.define('veramobi') do |font|
font[:type]='TrueTypeUnicode';
font[:name]='BitstreamVeraSansMono-BoldOb';
font[:desc]={'Ascent'=>928,'Descent'=>-236,'CapHeight'=>928,'Flags'=>97,'FontBBox'=>'[-73 -236 681 928]','ItalicAngle'=>-11,'StemV'=>120,'MissingWidth'=>602}
font[:up]=-108;
font[:ut]=120;
font[:cw]={
270=>602, 32=>602, 33=>602, 34=>602, 35=>602, 36=>602, 37=>602, 38=>602, 39=>602, 40=>602, 41=>602, 42=>602, 43=>602, 44=>602, 45=>602, 46=>602,
47=>602, 48=>602, 49=>602, 50=>602, 51=>602, 52=>602, 53=>602, 54=>602, 55=>602, 56=>602, 57=>602, 58=>602, 59=>602, 60=>602, 61=>602, 62=>602,
63=>602, 64=>602, 65=>602, 66=>602, 67=>602, 68=>602, 69=>602, 70=>602, 71=>602, 72=>602, 73=>602, 74=>602, 75=>602, 76=>602, 77=>602, 78=>602,
79=>602, 80=>602, 81=>602, 82=>602, 83=>602, 84=>602, 85=>602, 86=>602, 87=>602, 88=>602, 89=>602, 90=>602, 91=>602, 92=>602, 93=>602, 94=>602,
95=>602, 96=>602, 97=>602, 98=>602, 99=>602, 100=>602, 101=>602, 102=>602, 103=>602, 104=>602, 105=>602, 106=>602, 107=>602, 108=>602, 109=>602, 110=>602,
111=>602, 112=>602, 113=>602, 114=>602, 115=>602, 116=>602, 117=>602, 118=>602, 119=>602, 120=>602, 121=>602, 122=>602, 123=>602, 124=>602, 125=>602, 126=>602,
8364=>602, 8218=>602, 402=>602, 8222=>602, 8230=>602, 8224=>602, 8225=>602, 710=>602, 8240=>602, 352=>602, 8249=>602, 338=>602, 381=>602, 8216=>602, 8217=>602, 8220=>602,
8221=>602, 8226=>602, 8211=>602, 8212=>602, 732=>602, 8482=>602, 353=>602, 8250=>602, 339=>602, 382=>602, 376=>602, 160=>602, 161=>602, 162=>602, 163=>602, 164=>602,
165=>602, 166=>602, 167=>602, 168=>602, 169=>602, 170=>602, 171=>602, 172=>602, 173=>602, 174=>602, 175=>602, 176=>602, 177=>602, 178=>602, 179=>602, 180=>602,
181=>602, 182=>602, 183=>602, 184=>602, 185=>602, 186=>602, 187=>602, 188=>602, 189=>602, 190=>602, 191=>602, 192=>602, 193=>602, 194=>602, 195=>602, 196=>602,
197=>602, 198=>602, 199=>602, 200=>602, 201=>602, 202=>602, 203=>602, 204=>602, 205=>602, 206=>602, 207=>602, 208=>602, 209=>602, 210=>602, 211=>602, 212=>602,
213=>602, 214=>602, 215=>602, 216=>602, 217=>602, 218=>602, 219=>602, 220=>602, 221=>602, 222=>602, 223=>602, 224=>602, 225=>602, 226=>602, 227=>602, 228=>602,
229=>602, 230=>602, 231=>602, 232=>602, 233=>602, 234=>602, 235=>602, 236=>602, 237=>602, 238=>602, 239=>602, 240=>602, 241=>602, 242=>602, 243=>602, 244=>602,
245=>602, 246=>602, 247=>602, 248=>602, 249=>602, 250=>602, 251=>602, 252=>602, 253=>602, 254=>602, 255=>602, 8800=>602, 8734=>602, 8804=>602, 8805=>602, 8706=>602,
8721=>602, 8719=>602, 960=>602, 8747=>602, 937=>602, 8730=>602, 8776=>602, 8710=>602, 9674=>602, 8725=>602, 64257=>602, 64258=>602, 305=>602, 728=>602, 729=>602, 730=>602,
733=>602, 731=>602, 711=>602, 321=>602, 322=>602, 8722=>602, 286=>602, 287=>602, 304=>602, 350=>602, 351=>602, 262=>602, 263=>602, 268=>602, 269=>602, 273=>602,
8729=>602}
font[:enc]='';
font[:diff]='';
font[:file]='veramobi.z';
font[:ctg]='veramobi.ctg.z';
font[:originalsize]=55032;
end
| 98.870968 | 174 | 0.58956 |
181da1bb59403e5d1628b2ddb94a1f6f8f68c666 | 145 | class AddPercentagesToReportReports < ActiveRecord::Migration[4.2]
def change
add_column :report_reports, :percent_type, :string
end
end
| 24.166667 | 66 | 0.786207 |
5d6246371e02f82bc622121c1642697401be7b9a | 79 | class Country < ApplicationRecord
validates :code, :name, presence: true
end
| 19.75 | 40 | 0.772152 |
b9908f4d26a278af421f81afa60c494f54c8ada3 | 4,084 | require 'rails_helper'
RSpec.describe 'DeviseRegistrationNews', type: :system do
let(:user) { create(:user, :non_activate) }
let(:activate_user) { create(:user) }
# 有効な情報を保持したフォーム
def submit_with_information(email: '[email protected]', password: 'password', password_confirmation: 'password')
fill_in 'メールアドレス(例:[email protected])', with: email
fill_in 'パスワード(6文字以上)', with: password
fill_in 'パスワード(再入力)', with: password_confirmation
find('.form-submit').click
end
# 各項目が無効なパラメータ
describe '/users/sign_up layout' do
context 'ログアウト状態でアクセスした場合' do
# 登録画面に移動
before do
visit new_user_registration_path
end
it '登録画面は表示される' do
expect(page).to have_current_path new_user_registration_path,
ignore_query: true
end
describe 'ページ内リンクの表示確認' do
it 'ログインページへのリンク' do
expect(page).to have_link 'アカウントをお持ちの方はこちら',
href: new_user_session_path
end
it '認証メール再送信ページへのリンク' do
expect(page).to have_link '再度認証メールを送信する方はこちら',
href: new_user_confirmation_path
end
it '凍結解除メール再送信ページへのリンク' do
expect(page).to have_link '再度凍結解除メールを送信する方はこちら',
href: new_user_unlock_path
end
end
context '無効なパラメータを送信した場合' do
it '同じ画面が表示' do
submit_with_information(email: 'invalid@example', password: 'foo', password_confirmation: 'bar')
expect(title).to eq full_title('アカウント作成')
end
it 'エラーメッセージが表示' do
submit_with_information(email: 'invalid@example', password: 'foo', password_confirmation: 'bar')
expect(page).to have_selector '.alert-danger'
end
describe '各項目が無効な値の場合ユーザーは登録されない' do
it '全項目が無効' do
expect {
submit_with_information(email: 'invalid@example', password: 'foo', password_confirmation: 'bar')
}.to change(User, :count).by(0)
end
it 'メールアドレスが無効' do
expect {
submit_with_information(email: 'invalid@example')
}.to change(User, :count).by(0)
end
it 'パスワードが無効' do
expect {
submit_with_information(password: 'foo')
}.to change(User, :count).by(0)
end
it '確認用パスワードが無効' do
expect {
submit_with_information(password_confirmation: 'foo')
}.to change(User, :count).by(0)
end
it 'パスワードと確認用パスワードの不一致は無効' do
expect {
submit_with_information(password_confirmation: 'mismatch_password_confirmation')
}.to change(User, :count).by(0)
end
end
end
context '有効なパラメータを送信した場合' do
it 'アカウント登録される' do
expect {
submit_with_information
}.to change(User, :count).by(1)
end
describe 'アカウント登録に付随する処理' do
it 'メールが送信される' do
expect {
submit_with_information
}.to change { ActionMailer::Base.deliveries.size }.by(1)
end
it 'ホーム画面に移動' do
submit_with_information
expect(page).to have_current_path root_path, ignore_query: true
end
it 'アカウント有効化メール送信メッセージが表示' do
submit_with_information
expect(page).to have_selector '.alert-notice'
end
end
end
end
context 'ログイン状態でアクセスした場合' do
# ユーザーがログイン後登録画面に移動
before do
sign_in activate_user
visit new_user_registration_path
end
it '登録画面は表示されない' do
expect(page).to have_no_current_path new_user_registration_path,
ignore_query: true
end
it '自動でホームへ移動' do
expect(page).to have_current_path root_path, ignore_query: true
end
it '警告メッセージが表示' do
expect(page).to have_selector '.alert-alert'
end
end
end
end
| 29.381295 | 114 | 0.579824 |
085a668c325beb14c4fcd1f793d4fec505f03fa6 | 541 | require 'rexml/document'
require File.dirname(__FILE__) + '/../../../spec_helper'
describe "REXML::Element#comments" do
before :each do
@e = REXML::Element.new "root"
@c1 = REXML::Comment.new "this is a comment"
@c2 = REXML::Comment.new "this is another comment"
@e << @c1
@e << @c2
end
it "returns the array of comments" do
@e.comments.should == [@c1, @c2]
end
it "returns a frozen object" do
comment = REXML::Comment.new "The insertion should fail"
@e.comments.frozen?.should == true
end
end
| 24.590909 | 60 | 0.639556 |
7963ac388f82e11fb4faac0de4f1ba8286383de0 | 4,631 | require "test_helper"
describe Vanity do
describe "#configuration" do
it "returns the same configuration" do
assert_same Vanity.configuration, Vanity.configuration
end
it "returns nil if if skipping bang" do
Vanity.configuration = nil
assert_nil Vanity.configuration(false)
end
end
describe "#configure!" do
it "returns a configuration" do
assert_kind_of Vanity::Configuration, Vanity.configure!
end
it "returns a new configuration" do
refute_same Vanity.configure!, Vanity.configure!
end
end
describe "#reset!" do
it "creates a new configuration" do
original_configuration = Vanity.configuration
refute_same original_configuration, Vanity.reset!
end
end
describe "#configure" do
it "configures via a block" do
Vanity.configure do |config|
config.collecting = false
end
assert !Vanity.configuration.collecting
end
end
describe "#context" do
it "returns the context" do
Vanity.context = Object.new
assert_same Vanity.context, Vanity.context
end
end
describe "#connection" do
it "returns the same connection" do
assert_same Vanity.connection, Vanity.connection
end
it "returns nil if if skipping bang" do
Vanity.connection = nil
assert_nil Vanity.connection(false)
end
end
describe "#connect!" do
it "returns a connection" do
assert_kind_of Vanity::Connection, Vanity.connect!
end
it "returns a new connection" do
refute_same Vanity.connect!, Vanity.connect!
end
describe "deprecated settings" do
before do
FakeFS.activate!
end
after do
FakeFS.deactivate!
FakeFS::FileSystem.clear
end
it "uses legacy connection key" do
connection_config = VanityTestHelpers::VANITY_CONFIGS["vanity.yml.redis"]
FileUtils.mkpath "./config"
File.open("./config/vanity.yml", "w") do |f|
f.write(connection_config)
end
Vanity::Connection.expects(:new).with("redis://:[email protected]:6380/15")
Vanity.disconnect!
Vanity.connect!
end
it "uses redis.yml" do
FileUtils.mkpath "./config"
File.open("./config/redis.yml", "w") do |f|
f.write VanityTestHelpers::VANITY_CONFIGS["redis.yml.url"]
end
Vanity::Connection.expects(:new).with("localhost:6379/15")
Vanity.disconnect!
Vanity.connect!
end
it "uses legacy collecting key" do
connection_config = VanityTestHelpers::VANITY_CONFIGS["vanity.yml.redis"]
FileUtils.mkpath "./config"
File.open("./config/vanity.yml", "w") do |f|
f.write(connection_config)
end
out, _err = capture_io do
Vanity.reset!
Vanity.configuration.logger = Logger.new($stdout)
Vanity.disconnect!
Vanity::Connection.stubs(:new)
Vanity.connect!
end
assert_equal false, Vanity.configuration.collecting
assert_match(/Deprecated/, out)
end
end
end
describe "#disconnect!" do
it "sets the connection to nil" do
Vanity.disconnect!
assert_nil Vanity.connection(false)
end
it "handles nil connections" do
Vanity.connection = nil
assert_nil Vanity.disconnect!
end
end
describe "#reconnect!" do
it "reconnects with the same configuration" do
Vanity.disconnect!
original_specification = Vanity.connection.specification
Vanity.reconnect!
assert_equal original_specification, Vanity.connection.specification
end
it "creates a new connection" do
original_configuration = Vanity.connection
refute_same original_configuration, Vanity.reconnect!
end
end
describe "#playground" do
it "returns the same playground" do
assert_same Vanity.playground, Vanity.playground
end
it "returns nil if if skipping bang" do
Vanity.playground = nil
assert_nil Vanity.playground(false)
end
end
describe "#load!" do
it "returns a playground" do
assert_kind_of Vanity::Playground, Vanity.load!
end
it "returns a new playground" do
refute_same Vanity.load!, Vanity.load!
end
end
describe "#unload!" do
it "sets the playground to nil" do
Vanity.unload!
assert_nil Vanity.playground(false)
end
end
describe "#reload!" do
it "creates a new playground" do
original_configuration = Vanity.playground
refute_same original_configuration, Vanity.reload!
end
end
end
| 24.897849 | 83 | 0.657957 |
ed20b240cce5d90fbaab822f95947d352b78272f | 111 | # frozen_string_literal: true
class GroupVariableSerializer < BaseSerializer
entity GroupVariableEntity
end
| 18.5 | 46 | 0.855856 |
79f1d1dec5a389829b93a9b67c7546dc896d1215 | 3,603 | require "forwardable"
module Searchkick
class Results
include Enumerable
extend Forwardable
attr_reader :klass, :response, :options
def_delegators :results, :each, :any?, :empty?, :size, :length, :slice, :[], :to_ary
def initialize(klass, response, options = {})
@klass = klass
@response = response
@options = options
end
def results
@results ||= begin
if options[:load]
# results can have different types
results = {}
hits.group_by{|hit, i| hit["_type"] }.each do |type, grouped_hits|
records = type.camelize.constantize
if options[:includes]
records = records.includes(options[:includes])
end
results[type] =
if records.respond_to?(:primary_key) and records.primary_key
# ActiveRecord
records.where(records.primary_key => grouped_hits.map{|hit| hit["_id"] }).to_a
elsif records.respond_to?(:all) and records.all.respond_to?(:for_ids)
# Mongoid 2
records.all.for_ids(grouped_hits.map{|hit| hit["_id"] }).to_a
else
# Mongoid 3+
records.queryable.for_ids(grouped_hits.map{|hit| hit["_id"] }).to_a
end
end
# sort
hits.map do |hit|
results[hit["_type"]].find{|r| r.id.to_s == hit["_id"].to_s }
end.compact
else
hits.map do |hit|
result =
if hit["_source"]
hit.except("_source").merge(hit["_source"])
else
hit.except("fields").merge(hit["fields"])
end
result["id"] ||= result["_id"] # needed for legacy reasons
Hashie::Mash.new(result)
end
end
end
end
def suggestions
if response["suggest"]
response["suggest"].values.flat_map{|v| v.first["options"] }.sort_by{|o| -o["score"] }.map{|o| o["text"] }.uniq
else
raise "Pass `suggest: true` to the search method for suggestions"
end
end
def each_with_hit(&block)
results.zip(hits).each(&block)
end
def with_details
each_with_hit.map do |model, hit|
details = {}
if hit["highlight"]
details[:highlight] = Hash[ hit["highlight"].map{|k, v| [(options[:json] ? k : k.sub(/\.analyzed\z/, "")).to_sym, v.first] } ]
end
[model, details]
end
end
def facets
response["facets"]
end
def model_name
klass.model_name
end
def entry_name
model_name.human.downcase
end
def total_count
response["hits"]["total"]
end
alias_method :total_entries, :total_count
def current_page
options[:page]
end
def per_page
options[:per_page]
end
alias_method :limit_value, :per_page
def padding
options[:padding]
end
def total_pages
(total_count / per_page.to_f).ceil
end
alias_method :num_pages, :total_pages
def offset_value
(current_page - 1) * per_page + padding
end
alias_method :offset, :offset_value
def previous_page
current_page > 1 ? (current_page - 1) : nil
end
alias_method :prev_page, :previous_page
def next_page
current_page < total_pages ? (current_page + 1) : nil
end
def first_page?
previous_page.nil?
end
def last_page?
next_page.nil?
end
protected
def hits
@response["hits"]["hits"]
end
end
end
| 24.344595 | 136 | 0.561477 |
18c560d0d8fbc31adaa4ded391e68fea6c648727 | 510 | # frozen_string_literal: true
module Leftovers
class ConfigLoader
class Suggester
def initialize(words)
@words = words
@did_you_mean = ::DidYouMean::SpellChecker.new(dictionary: words) if defined?(::DidYouMean)
end
def suggest(word)
suggestions = did_you_mean.correct(word) if did_you_mean
suggestions = words if !suggestions || suggestions.empty?
suggestions
end
private
attr_reader :words, :did_you_mean
end
end
end
| 22.173913 | 99 | 0.662745 |
e99370b2a1c6bba323a736d5b6e30a563de71d21 | 17,468 | require 'spec_helper'
require 'actions/build_create'
require 'isolation_segment_assign'
require 'isolation_segment_unassign'
module VCAP::CloudController
RSpec.describe BuildCreate do
subject(:action) do
BuildCreate.new(
user_audit_info: user_audit_info,
memory_limit_calculator: memory_limit_calculator,
disk_limit_calculator: disk_limit_calculator,
environment_presenter: environment_builder
)
end
let(:memory_limit_calculator) { double(:memory_limit_calculator) }
let(:disk_limit_calculator) { double(:disk_limit_calculator) }
let(:environment_builder) { double(:environment_builder) }
let(:user_audit_info) { UserAuditInfo.new(user_email: '[email protected]', user_guid: '1234', user_name: 'charles') }
let(:staging_message) { BuildCreateMessage.new(request) }
let(:request) do
{
staging_memory_in_mb: staging_memory_in_mb,
staging_disk_in_mb: staging_disk_in_mb,
lifecycle: request_lifecycle,
}.deep_stringify_keys
end
let(:request_lifecycle) do
{
type: 'buildpack',
data: lifecycle_data
}
end
let(:metadata) do
{
labels: {
release: 'stable',
'seriouseats.com/potato' => 'mashed'
},
annotations: {
anno: 'tations'
}
}
end
let(:lifecycle) { BuildpackLifecycle.new(package, staging_message) }
let(:package) { PackageModel.make(app: app, state: PackageModel::READY_STATE) }
let(:space) { Space.make }
let(:org) { space.organization }
let(:app) { AppModel.make(space: space) }
let!(:process) { ProcessModel.make(app: app, memory: 8192, disk_quota: 512) }
let(:buildpack_git_url) { 'http://example.com/repo.git' }
let(:stack) { Stack.default }
let(:lifecycle_data) do
{
stack: stack.name,
buildpacks: [buildpack_git_url]
}
end
let(:stagers) { instance_double(Stagers) }
let(:stager) { instance_double(Diego::Stager) }
let(:calculated_mem_limit) { 32 }
let(:calculated_staging_disk_in_mb) { 64 }
let(:staging_memory_in_mb) { 1024 }
let(:staging_disk_in_mb) { 2048 }
let(:environment_variables) { 'random string' }
before do
allow(CloudController::DependencyLocator.instance).to receive(:stagers).and_return(stagers)
allow(stagers).to receive(:stager_for_build).and_return(stager)
allow(stager).to receive(:stage)
allow(memory_limit_calculator).to receive(:get_limit).with(staging_memory_in_mb, space, org).and_return(calculated_mem_limit)
allow(disk_limit_calculator).to receive(:get_limit).with(staging_disk_in_mb).and_return(calculated_staging_disk_in_mb)
allow(environment_builder).to receive(:build).and_return(environment_variables)
end
describe '#create_and_stage' do
context 'creating a build' do
it 'creates a build' do
build = nil
expect {
build = action.create_and_stage(package: package, lifecycle: lifecycle, metadata: metadata)
}.to change { BuildModel.count }.by(1)
expect(build.state).to eq(BuildModel::STAGING_STATE)
expect(build.app_guid).to eq(app.guid)
expect(build.package_guid).to eq(package.guid)
expect(build.lifecycle_data.to_hash).to eq(lifecycle_data)
expect(build.created_by_user_guid).to eq('1234')
expect(build.created_by_user_name).to eq('charles')
expect(build.created_by_user_email).to eq('[email protected]')
expect(build.labels.map { |label| { prefix: label.key_prefix, key: label.key_name, value: label.value } }).
to match_array([{ prefix: nil, key: 'release', value: 'stable' },
{ prefix: 'seriouseats.com', key: 'potato', value: 'mashed' },
])
expect(build.annotations.map { |a| { key: a.key, value: a.value } }).
to match_array([{ key: 'anno', value: 'tations' }])
end
it 'creates an app usage event for STAGING_STARTED' do
build = nil
expect {
build = action.create_and_stage(package: package, lifecycle: lifecycle, metadata: metadata)
}.to change {
AppUsageEvent.count
}.by(1)
event = AppUsageEvent.last
expect(event).to_not be_nil
expect(event.state).to eq('STAGING_STARTED')
expect(event.previous_state).to eq('STAGING')
expect(event.instance_count).to eq(1)
expect(event.previous_instance_count).to eq(1)
expect(event.memory_in_mb_per_instance).to eq(BuildModel::STAGING_MEMORY)
expect(event.previous_memory_in_mb_per_instance).to eq(BuildModel::STAGING_MEMORY)
expect(event.org_guid).to eq(build.app.space.organization.guid)
expect(event.space_guid).to eq(build.app.space.guid)
expect(event.parent_app_guid).to eq(build.app.guid)
expect(event.parent_app_name).to eq(build.app.name)
expect(event.package_guid).to eq(build.package_guid)
expect(event.app_name).to eq('')
expect(event.app_guid).to eq('')
expect(event.package_state).to eq('READY')
expect(event.previous_package_state).to eq('READY')
expect(event.buildpack_guid).to eq(nil)
expect(event.buildpack_name).to eq(buildpack_git_url)
end
it 'creates a build audit event' do
build = action.create_and_stage(package: package, lifecycle: lifecycle, metadata: metadata)
event = Event.last
expect(event.type).to eq('audit.app.build.create')
expect(event.actor).to eq('1234')
expect(event.actor_type).to eq('user')
expect(event.actor_name).to eq('[email protected]')
expect(event.actor_username).to eq('charles')
expect(event.actee).to eq(app.guid)
expect(event.actee_type).to eq('app')
expect(event.actee_name).to eq(app.name)
expect(event.timestamp).to be
expect(event.space_guid).to eq(app.space_guid)
expect(event.organization_guid).to eq(app.space.organization.guid)
expect(event.metadata).to eq({
'build_guid' => build.guid,
'package_guid' => package.guid,
})
end
it 'does not create a droplet audit event' do
expect {
action.create_and_stage(package: package, lifecycle: lifecycle)
}.to_not change {
Event.where(type: 'audit.app.droplet.create').count
}
end
end
describe 'creating a stage request' do
it 'initiates a staging request' do
build = action.create_and_stage(package: package, lifecycle: lifecycle)
expect(stager).to have_received(:stage) do |staging_details|
expect(staging_details.package).to eq(package)
expect(staging_details.staging_guid).to eq(build.guid)
expect(staging_details.staging_memory_in_mb).to eq(calculated_mem_limit)
expect(staging_details.staging_disk_in_mb).to eq(calculated_staging_disk_in_mb)
expect(staging_details.environment_variables).to eq(environment_variables)
expect(staging_details.lifecycle).to eq(lifecycle)
expect(staging_details.isolation_segment).to be_nil
expect(build.labels.size).to eq(0)
expect(build.annotations.size).to eq(0)
end
end
context 'when staging memory is not specified in the message' do
let(:staging_memory_in_mb) { nil }
it 'uses the app web process memory for staging memory' do
expect(memory_limit_calculator).to receive(:get_limit).with(process.memory, space, org)
action.create_and_stage(package: package, lifecycle: lifecycle)
end
end
context 'when staging disk is not specified in the message' do
let(:staging_disk_in_mb) { nil }
it 'uses the app web process disk for staging disk' do
expect(disk_limit_calculator).to receive(:get_limit).with(process.disk_quota)
action.create_and_stage(package: package, lifecycle: lifecycle)
end
end
describe 'isolation segments' do
let(:assigner) { VCAP::CloudController::IsolationSegmentAssign.new }
let(:isolation_segment_model) { VCAP::CloudController::IsolationSegmentModel.make }
let(:isolation_segment_model_2) { VCAP::CloudController::IsolationSegmentModel.make }
let(:shared_isolation_segment) {
VCAP::CloudController::IsolationSegmentModel.first(guid: VCAP::CloudController::IsolationSegmentModel::SHARED_ISOLATION_SEGMENT_GUID)
}
context 'when the org has a default' do
context 'and the default is the shared isolation segments' do
before do
assigner.assign(shared_isolation_segment, [org])
end
it 'does not set an isolation segment' do
action.create_and_stage(package: package, lifecycle: lifecycle)
expect(stager).to have_received(:stage) do |staging_details|
expect(staging_details.isolation_segment).to be_nil
end
end
end
context 'and the default is not the shared isolation segment' do
before do
assigner.assign(isolation_segment_model, [org])
org.update(default_isolation_segment_model: isolation_segment_model)
end
it 'sets the isolation segment' do
action.create_and_stage(package: package, lifecycle: lifecycle)
expect(stager).to have_received(:stage) do |staging_details|
expect(staging_details.isolation_segment).to eq(isolation_segment_model.name)
end
end
context 'and the space from that org has an isolation segment' do
context 'and the isolation segment is the shared isolation segment' do
before do
assigner.assign(shared_isolation_segment, [org])
space.isolation_segment_model = shared_isolation_segment
space.save
space.reload
end
it 'does not set the isolation segment' do
action.create_and_stage(package: package, lifecycle: lifecycle)
expect(stager).to have_received(:stage) do |staging_details|
expect(staging_details.isolation_segment).to be_nil
end
end
end
context 'and the isolation segment is not the shared or the default' do
before do
assigner.assign(isolation_segment_model_2, [org])
space.isolation_segment_model = isolation_segment_model_2
space.save
end
it 'sets the IS from the space' do
action.create_and_stage(package: package, lifecycle: lifecycle)
expect(stager).to have_received(:stage) do |staging_details|
expect(staging_details.isolation_segment).to eq(isolation_segment_model_2.name)
end
end
end
end
end
end
context 'when the org does not have a default' do
context 'and the space from that org has an isolation segment' do
context 'and the isolation segment is not the shared isolation segment' do
before do
assigner.assign(isolation_segment_model, [org])
space.isolation_segment_model = isolation_segment_model
space.save
end
it 'sets the isolation segment' do
action.create_and_stage(package: package, lifecycle: lifecycle)
expect(stager).to have_received(:stage) do |staging_details|
expect(staging_details.isolation_segment).to eq(isolation_segment_model.name)
end
end
end
end
end
end
end
context 'when the package is not ready' do
let(:package) { PackageModel.make(app: app, state: PackageModel::PENDING_STATE) }
it 'raises an InvalidPackage exception' do
expect {
action.create_and_stage(package: package, lifecycle: lifecycle)
}.to raise_error(BuildCreate::InvalidPackage, /not ready/)
end
end
context 'when any buildpack is disabled' do
let!(:disabled_buildpack) { Buildpack.make(enabled: false, name: 'disabled-buildpack') }
let!(:enabled_buildpack) { Buildpack.make(name: 'enabled-buildpack') }
let!(:lifecycle_data_model) do
VCAP::CloudController::BuildpackLifecycleDataModel.make(
app: app,
buildpacks: [disabled_buildpack.name, enabled_buildpack.name, 'http://custom-buildpack.example'],
stack: stack.name
)
end
let(:lifecycle_data) do
{
stack: stack.name,
buildpacks: [disabled_buildpack.name, enabled_buildpack.name, 'http://custom-buildpack.example']
}
end
it 'raises an exception' do
expect {
action.create_and_stage(package: package, lifecycle: lifecycle)
}.to raise_error(CloudController::Errors::ApiError, /'#{disabled_buildpack.name}' are disabled/)
end
end
context 'when there is already a staging in progress for the app' do
it 'raises a StagingInProgress exception' do
BuildModel.make(state: BuildModel::STAGING_STATE, app: app)
expect {
action.create_and_stage(package: package, lifecycle: lifecycle)
}.to raise_error(BuildCreate::StagingInProgress)
end
end
describe 'using custom buildpacks' do
let!(:app) { AppModel.make(space: space) }
context 'when custom buildpacks are disabled' do
before { TestConfig.override(disable_custom_buildpacks: true) }
context 'when the custom buildpack is inherited from the app' do
let(:request_lifecycle) do
{}
end
before do
app.update(buildpack_lifecycle_data: BuildpackLifecycleDataModel.create(
buildpacks: ['http://example.com/repo.git'],
stack: Stack.make.name,
))
end
it 'raises an exception' do
expect {
action.create_and_stage(package: package, lifecycle: lifecycle)
}.to raise_error(CloudController::Errors::ApiError, /Custom buildpacks are disabled/)
end
it 'does not create any DB records' do
expect {
action.create_and_stage(package: package, lifecycle: lifecycle) rescue nil
}.not_to change { [BuildModel.count, BuildpackLifecycleDataModel.count, AppUsageEvent.count, Event.count] }
end
end
context 'when the custom buildpack is set on the build' do
let(:lifecycle_data) do
{
stack: stack.name,
buildpacks: ['http://example.com/repo.git'],
}
end
it 'raises an exception' do
expect {
action.create_and_stage(package: package, lifecycle: lifecycle)
}.to raise_error(CloudController::Errors::ApiError, /Custom buildpacks are disabled/)
end
it 'does not create any DB records' do
expect {
action.create_and_stage(package: package, lifecycle: lifecycle) rescue nil
}.not_to change { [BuildModel.count, BuildpackLifecycleDataModel.count, AppUsageEvent.count, Event.count] }
end
end
end
context 'when custom buildpacks are enabled' do
context 'when the custom buildpack is inherited from the app' do
let!(:app_lifecycle_data_model) do
BuildpackLifecycleDataModel.make(
buildpacks: ['http://example.com/repo.git'],
app: app
)
end
let(:lifecycle_data) do
{
stack: stack.name,
buildpacks: nil,
}
end
it 'successfully creates a build' do
expect {
action.create_and_stage(package: package, lifecycle: lifecycle)
}.to change { BuildModel.count }.by(1)
end
end
context 'when the custom buildpack is set on the build' do
let(:lifecycle_data) do
{
stack: stack.name,
buildpacks: ['http://example.com/repo.git'],
}
end
it 'successfully creates a build' do
expect {
action.create_and_stage(package: package, lifecycle: lifecycle)
}.to change { BuildModel.count }.by(1)
end
end
end
end
end
end
end
| 40.623256 | 145 | 0.606251 |
1c85d6b0d66f8640bc9135792b306019d7635347 | 609 | module Cryptoexchange::Exchanges
module Cezex
module Services
class Pairs < Cryptoexchange::Services::Pairs
PAIRS_URL = "#{Cryptoexchange::Exchanges::Cezex::Market::API_URL}/markets"
def fetch
output = super
adapt(output)
end
def adapt(output)
output.map do |market|
Cryptoexchange::Models::MarketPair.new({
base: market['name'].split("/")[0],
target: market['name'].split("/")[1],
market: Cezex::Market::NAME
})
end
end
end
end
end
end
| 24.36 | 82 | 0.535304 |
7a53436ae5adab53a22fce2c014f08e2feb87c09 | 9,868 | require 'test_helper'
WickedPdf.config = { :exe_path => ENV['WKHTMLTOPDF_BIN'] }
HTML_DOCUMENT = '<html><body>Hello World</body></html>'.freeze
# Provide a public accessor to the normally-private parse_options function.
# Also, smash the returned array of options into a single string for
# convenience in testing below.
class WickedPdf
undef :binary_version
undef :binary_version=
attr_accessor :binary_version
def get_parsed_options(opts)
parse_options(opts).join(' ')
end
def get_valid_option(name)
valid_option(name)
end
end
class WickedPdfTest < ActiveSupport::TestCase
def setup
@wp = WickedPdf.new
end
test 'should generate PDF from html document' do
pdf = @wp.pdf_from_string HTML_DOCUMENT
assert pdf.start_with?('%PDF-1.4')
assert pdf.rstrip.end_with?('%%EOF')
assert pdf.length > 100
end
test 'should generate PDF from html document with long lines' do
document_with_long_line_file = File.new('test/fixtures/document_with_long_line.html', 'r')
pdf = @wp.pdf_from_string(document_with_long_line_file.read)
assert pdf.start_with?('%PDF-1.4')
assert pdf.rstrip.end_with?('%%EOF')
assert pdf.length > 100
end
test 'should generate PDF from html existing HTML file without converting it to string' do
filepath = File.join(Dir.pwd, 'test/fixtures/document_with_long_line.html')
pdf = @wp.pdf_from_html_file(filepath)
assert pdf.start_with?('%PDF-1.4')
assert pdf.rstrip.end_with?('%%EOF')
assert pdf.length > 100
end
test 'should raise exception when no path to wkhtmltopdf' do
assert_raise RuntimeError do
WickedPdf.new ' '
end
end
test 'should raise exception when wkhtmltopdf path is wrong' do
assert_raise RuntimeError do
WickedPdf.new '/i/do/not/exist/notwkhtmltopdf'
end
end
test 'should raise exception when wkhtmltopdf is not executable' do
begin
tmp = Tempfile.new('wkhtmltopdf')
fp = tmp.path
File.chmod 0o000, fp
assert_raise RuntimeError do
WickedPdf.new fp
end
ensure
tmp.delete
end
end
test 'should raise exception when pdf generation fails' do
begin
tmp = Tempfile.new('wkhtmltopdf')
fp = tmp.path
File.chmod 0o777, fp
wp = WickedPdf.new fp
assert_raise RuntimeError do
wp.pdf_from_string HTML_DOCUMENT
end
ensure
tmp.delete
end
end
test 'should parse header and footer options' do
[:header, :footer].each do |hf|
[:center, :font_name, :left, :right].each do |o|
assert_equal "--#{hf}-#{o.to_s.tr('_', '-')} header_footer",
@wp.get_parsed_options(hf => { o => 'header_footer' }).strip
end
[:font_size, :spacing].each do |o|
assert_equal "--#{hf}-#{o.to_s.tr('_', '-')} 12",
@wp.get_parsed_options(hf => { o => '12' }).strip
end
assert_equal "--#{hf}-line",
@wp.get_parsed_options(hf => { :line => true }).strip
assert_equal "--#{hf}-html http://www.abc.com",
@wp.get_parsed_options(hf => { :html => { :url => 'http://www.abc.com' } }).strip
end
end
test 'should parse toc options' do
toc_option = @wp.get_valid_option('toc')
[:font_name, :header_text].each do |o|
assert_equal "#{toc_option} --toc-#{o.to_s.tr('_', '-')} toc",
@wp.get_parsed_options(:toc => { o => 'toc' }).strip
end
[
:depth, :header_fs, :l1_font_size, :l2_font_size, :l3_font_size, :l4_font_size,
:l5_font_size, :l6_font_size, :l7_font_size, :l1_indentation, :l2_indentation,
:l3_indentation, :l4_indentation, :l5_indentation, :l6_indentation, :l7_indentation
].each do |o|
assert_equal "#{toc_option} --toc-#{o.to_s.tr('_', '-')} 5",
@wp.get_parsed_options(:toc => { o => 5 }).strip
end
[:no_dots, :disable_links, :disable_back_links].each do |o|
assert_equal "#{toc_option} --toc-#{o.to_s.tr('_', '-')}",
@wp.get_parsed_options(:toc => { o => true }).strip
end
end
test 'should parse outline options' do
assert_equal '--outline', @wp.get_parsed_options(:outline => { :outline => true }).strip
assert_equal '--outline-depth 5', @wp.get_parsed_options(:outline => { :outline_depth => 5 }).strip
end
test 'should parse no_images option' do
assert_equal '--no-images', @wp.get_parsed_options(:no_images => true).strip
assert_equal '--images', @wp.get_parsed_options(:images => true).strip
end
test 'should parse margins options' do
[:top, :bottom, :left, :right].each do |o|
assert_equal "--margin-#{o} 12", @wp.get_parsed_options(:margin => { o => '12' }).strip
end
end
test 'should parse cover' do
cover_option = @wp.get_valid_option('cover')
pathname = Rails.root.join('app', 'views', 'pdf', 'file.html')
assert_equal "#{cover_option} http://example.org", @wp.get_parsed_options(:cover => 'http://example.org').strip, 'URL'
assert_equal "#{cover_option} #{pathname}", @wp.get_parsed_options(:cover => pathname).strip, 'Pathname'
assert_match %r{#{cover_option} .+wicked_cover_pdf.+\.html}, @wp.get_parsed_options(:cover => '<html><body>HELLO</body></html>').strip, 'HTML'
end
test 'should parse other options' do
[
:orientation, :page_size, :proxy, :username, :password, :dpi,
:encoding, :user_style_sheet
].each do |o|
assert_equal "--#{o.to_s.tr('_', '-')} opts", @wp.get_parsed_options(o => 'opts').strip
end
[:cookie, :post].each do |o|
assert_equal "--#{o.to_s.tr('_', '-')} name value", @wp.get_parsed_options(o => 'name value').strip
nv_formatter = proc { |number| "--#{o.to_s.tr('_', '-')} par#{number} val#{number}" }
assert_equal "#{nv_formatter.call(1)} #{nv_formatter.call(2)}", @wp.get_parsed_options(o => ['par1 val1', 'par2 val2']).strip
end
[:redirect_delay, :zoom, :page_offset].each do |o|
assert_equal "--#{o.to_s.tr('_', '-')} 5", @wp.get_parsed_options(o => 5).strip
end
[
:book, :default_header, :disable_javascript, :grayscale, :lowquality,
:enable_plugins, :disable_internal_links, :disable_external_links,
:print_media_type, :disable_smart_shrinking, :use_xserver, :no_background
].each do |o|
assert_equal "--#{o.to_s.tr('_', '-')}", @wp.get_parsed_options(o => true).strip
end
end
test 'should extract old wkhtmltopdf version' do
version_info_sample = "Name:\n wkhtmltopdf 0.9.9\n\nLicense:\n Copyright (C) 2008,2009 Wkhtmltopdf Authors.\n\n\n\n License GPLv3+: GNU GPL version 3 or later <http://gnu.org/licenses/gpl.html>.\n This is free software: you are free to change and redistribute it. There is NO\n WARRANTY, to the extent permitted by law.\n\nAuthors:\n Written by Jakob Truelsen. Patches by Mrio Silva, Benoit Garret and Emmanuel\n Bouthenot.\n"
assert_equal WickedPdf::DEFAULT_BINARY_VERSION, @wp.send(:parse_version, version_info_sample)
end
test 'should extract new wkhtmltopdf version' do
version_info_sample = "Name:\n wkhtmltopdf 0.11.0 rc2\n\nLicense:\n Copyright (C) 2010 wkhtmltopdf/wkhtmltoimage Authors.\n\n\n\n License LGPLv3+: GNU Lesser General Public License version 3 or later\n <http://gnu.org/licenses/lgpl.html>. This is free software: you are free to\n change and redistribute it. There is NO WARRANTY, to the extent permitted by\n law.\n\nAuthors:\n Written by Jan Habermann, Christian Sciberras and Jakob Truelsen. Patches by\n Mehdi Abbad, Lyes Amazouz, Pascal Bach, Emmanuel Bouthenot, Benoit Garret and\n Mario Silva."
assert_equal Gem::Version.new('0.11.0'), @wp.send(:parse_version, version_info_sample)
end
test 'should extract wkhtmltopdf version with nondigit symbols' do
version_info_sample = "Name:\n wkhtmltopdf 0.10.4b\n\nLicense:\n Copyright (C) 2008,2009 Wkhtmltopdf Authors.\n\n\n\n License GPLv3+: GNU GPL version 3 or later <http://gnu.org/licenses/gpl.html>.\n This is free software: you are free to change and redistribute it. There is NO\n WARRANTY, to the extent permitted by law.\n\nAuthors:\n Written by Jakob Truelsen. Patches by Mrio Silva, Benoit Garret and Emmanuel\n Bouthenot.\n"
assert_equal Gem::Version.new('0.10.4b'), @wp.send(:parse_version, version_info_sample)
end
test 'should fallback to default version on parse error' do
assert_equal WickedPdf::DEFAULT_BINARY_VERSION, @wp.send(:parse_version, '')
end
test 'should set version on initialize' do
assert_not_equal @wp.send(:binary_version), ''
end
test 'should not use double dash options for version without dashes' do
@wp.binary_version = WickedPdf::BINARY_VERSION_WITHOUT_DASHES
%w[toc cover].each do |name|
assert_equal @wp.get_valid_option(name), name
end
end
test 'should use double dash options for version with dashes' do
@wp.binary_version = Gem::Version.new('0.11.0')
%w[toc cover].each do |name|
assert_equal @wp.get_valid_option(name), "--#{name}"
end
end
test '-- options should not be given after object' do
options = { :header => { :center => 3 }, :cover => 'http://example.org', :disable_javascript => true }
cover_option = @wp.get_valid_option('cover')
assert_equal @wp.get_parsed_options(options), "--disable-javascript --header-center 3 #{cover_option} http://example.org"
end
test 'should output progress when creating pdfs on compatible hosts' do
wp = WickedPdf.new
output = []
options = { :progress => proc { |o| output << o } }
wp.pdf_from_string HTML_DOCUMENT, options
if RbConfig::CONFIG['target_os'] =~ /mswin|mingw/
assert_empty output
else
assert(output.collect { |l| !l.match(/Loading/).nil? }.include?(true)) # should output something like "Loading pages (1/5)"
end
end
end
| 40.946058 | 563 | 0.67045 |
7927072f41dac67f67a319a9df7c5dcb36221b32 | 1,787 | # This file should contain all the record creation needed to seed the database with its default values.
# The data can then be loaded with the rake db:seed (or created alongside the db with db:setup).
#
# Examples:
#
# cities = City.create([{ name: 'Chicago' }, { name: 'Copenhagen' }])
# Mayor.create(name: 'Emanuel', city: cities.first)
Post.create(title: 'título del post seed 2',
body: 'Cuerpo del post, debe tener muchos 2',
slug: 'esta va a ser la url 2',
category: 'Desarrollo 2',
meta_description: 'La descripción del meta 2',
meta_title: 'Etiqueta title 2',
published: true
)
Post.create(title: 'título del post seed 3',
body: 'Cuerpo del post, debe tener muchos 3',
slug: 'esta va a ser la url 3',
category: 'Desarrollo 3',
meta_description: 'La descripción del meta 3',
meta_title: 'Etiqueta title 3',
published: true
)
Post.create(title: 'título del post seed 4',
body: 'Cuerpo del post, debe tener muchos 4',
slug: 'esta va a ser la url 4',
category: 'Desarrollo 4',
meta_description: 'La descripción del meta 4',
meta_title: 'Etiqueta title 4',
published: true
)
Post.create(title: 'título del post seed 5',
body: 'Cuerpo del post, debe tener muchos 5',
slug: 'esta va a ser la url 5',
category: 'Desarrollo 5',
meta_description: 'La descripción del meta 5',
meta_title: 'Etiqueta title 5',
published: true
)
Post.create(title: 'título del post seed 6',
body: 'Cuerpo del post, debe tener muchos 6',
slug: 'esta va a ser la url 6',
category: 'Desarrollo 6',
meta_description: 'La descripción del meta 6',
meta_title: 'Etiqueta title 6',
published: true
) | 34.365385 | 103 | 0.636821 |
26c89bd5e370cd2ba1ed218844b32f105d1c5d04 | 1,505 | cask 'wkhtmltopdf' do
version '0.12.3'
if Hardware::CPU.is_32_bit?
sha256 '6e4613c060eb9e5eb0bd05b0ccd85d09086ef7a1156300e53a9dfba7969b6fc0'
# download.gna.org/wkhtmltopdf was verified as official when first introduced to the cask
url "http://download.gna.org/wkhtmltopdf/#{version.major_minor}/#{version}/wkhtmltox-#{version}_osx-carbon-i386.pkg"
pkg "wkhtmltox-#{version}_osx-carbon-i386.pkg"
else
sha256 'c8aa0f9456444847d08c6c772e2e0e4244813e6a2911eba6c940439df9abd0f0'
# download.gna.org/wkhtmltopdf was verified as official when first introduced to the cask
url "http://download.gna.org/wkhtmltopdf/#{version.major_minor}/#{version}/wkhtmltox-#{version}_osx-cocoa-x86-64.pkg"
pkg "wkhtmltox-#{version}_osx-cocoa-x86-64.pkg"
end
name 'wkhtmltopdf'
homepage 'http://wkhtmltopdf.org/'
license :gpl
depends_on macos: '>= :snow_leopard'
uninstall pkgutil: 'org.wkhtmltopdf.wkhtmltox',
delete: [
'/usr/local/include/wkhtmltox',
'/usr/local/lib/libwkhtmltox.dylib',
"/usr/local/lib/libwkhtmltox.#{version.major}.dylib",
"/usr/local/lib/libwkhtmltox.#{version.major_minor}.dylib",
"/usr/local/lib/libwkhtmltox.#{version.sub(%r{-.*$}, '')}.dylib",
'/usr/local/bin/wkhtmltoimage',
'/usr/local/bin/wkhtmltopdf',
]
caveats do
files_in_usr_local
end
end
| 40.675676 | 121 | 0.648505 |
6179879e6194014e7d58a861bca83da4c69ecc08 | 1,924 | # frozen_string_literal: true
require 'administrate/base_dashboard'
class CategoryDashboard < Administrate::BaseDashboard
# ATTRIBUTE_TYPES
# a hash that describes the type of each of the model's fields.
#
# Each different type represents an Administrate::Field object,
# which determines how the attribute is displayed
# on pages throughout the dashboard.
ATTRIBUTE_TYPES = {
id: Field::String.with_options(searchable: false),
name: Field::String,
description: Field::Text,
concepts: HasManySortedField.with_options(order: :name),
created_at: Field::DateTime.with_options(timezone: 'GB'),
updated_at: Field::DateTime.with_options(timezone: 'GB'),
parent: BreadcrumbField.with_options(class_name: 'Category'),
children: Field::HasMany.with_options(class_name: 'Category'),
child_categories: Field::Number,
position: Field::Number,
versions: Field::HasMany.with_options(class_name: 'PaperTrail::Version')
}.freeze
# COLLECTION_ATTRIBUTES
# an array of attributes that will be displayed on the model's index page.
#
# By default, it's limited to four items to reduce clutter on index pages.
# Feel free to add, remove, or rearrange items.
COLLECTION_ATTRIBUTES = %i[
name
description
concepts
child_categories
].freeze
# SHOW_PAGE_ATTRIBUTES
# an array of attributes that will be displayed on the model's show page.
SHOW_PAGE_ATTRIBUTES = %i[
id
name
description
parent
children
concepts
created_at
updated_at
].freeze
# FORM_ATTRIBUTES
# an array of attributes that will be displayed
# on the model's form (`new` and `edit`) pages.
FORM_ATTRIBUTES = %i[
name
description
concepts
].freeze
# Overwrite this method to customize how categories are displayed
# across all pages of the admin dashboard.
#
def display_resource(category)
category.name
end
end
| 28.716418 | 76 | 0.722973 |
26f0fb15d6cfea19c6e8935c54a78eb43810c9ae | 5,255 | require "rails_helper"
RSpec.describe UserMailer, type: :mailer do
describe "submission_notification" do
let!(:organization) { FactoryBot.create(:organization) }
let(:user) { FactoryBot.create(:user, organization: organization) }
let(:form) { FactoryBot.create(:form, organization: organization, user: user)}
let!(:submission) { FactoryBot.create(:submission, form: form) }
let(:mail) { UserMailer.submission_notification(submission_id: submission.id, emails: [user.email]) }
it "renders the headers" do
expect(mail.subject).to eq("New Submission to #{submission.form.name}")
expect(mail.to).to eq([user.email])
expect(mail.from).to eq([ENV.fetch("TOUCHPOINTS_EMAIL_SENDER")])
end
it "renders the body" do
expect(mail.body.encoded).to match("Touchpoints.gov Response Notification")
expect(mail.body.encoded).to match("New feedback has been submitted to your form, #{submission.form.name}.")
end
end
describe "submission_digest" do
let!(:organization) { FactoryBot.create(:organization) }
let(:user) { FactoryBot.create(:user, organization: organization) }
let(:form) { FactoryBot.create(:form, organization: organization, user: user)}
let!(:submission) { FactoryBot.create(:submission, form: form) }
let(:begin_day) { 1.day.ago }
let(:mail) { UserMailer.submissions_digest(form.id, begin_day) }
before do
ENV["ENABLE_EMAIL_NOTIFICATIONS"] = "true"
end
it "renders the headers" do
expect(mail.subject).to eq("New Submissions to #{form.name} since #{begin_day}")
expect(mail.to).to eq(form.notification_emails.split)
expect(mail.from).to eq([ENV.fetch("TOUCHPOINTS_EMAIL_SENDER")])
end
it "renders the body" do
expect(mail.body.encoded).to match("Notification of feedback received since #{ @begin_day }")
expect(mail.body.encoded).to match("1 feedback responses have been submitted to your form, #{ form.name }, since #{begin_day}")
end
end
describe "account_deactivation_scheduled_notification" do
let!(:organization) { FactoryBot.create(:organization) }
let(:user) { FactoryBot.create(:user, organization: organization) }
let(:active_days) { 14 }
let(:mail) { UserMailer.account_deactivation_scheduled_notification(user.email, active_days) }
before do
ENV["ENABLE_EMAIL_NOTIFICATIONS"] = "true"
end
it "renders the headers" do
expect(mail.subject).to eq("Your account is scheduled to be deactivated in #{active_days} days due to inactivity")
expect(mail.to).to eq([user.email])
expect(mail.from).to eq([ENV.fetch("TOUCHPOINTS_EMAIL_SENDER")])
end
it "renders the body" do
expect(mail.body.encoded).to match("Account deactivation scheduled in #{ active_days } days.")
expect(mail.body.encoded).to match("Your account is scheduled to be deactivated in #{ active_days } days due to inactivity.")
end
end
describe "admin_summary" do
let(:mail) { UserMailer.admin_summary }
it "renders the headers" do
expect(mail.subject).to eq("Admin summary")
expect(mail.to).to eq(ENV.fetch("TOUCHPOINTS_ADMIN_EMAILS").split(","))
expect(mail.from).to eq([ENV.fetch("TOUCHPOINTS_EMAIL_SENDER")])
end
it "renders the body" do
expect(mail.body.encoded).to match("Hi")
end
end
describe "webmaster_summary" do
let(:mail) { UserMailer.webmaster_summary }
it "renders the headers" do
expect(mail.subject).to eq("Webmaster summary")
expect(mail.to).to eq(ENV.fetch("TOUCHPOINTS_ADMIN_EMAILS").split(","))
expect(mail.from).to eq([ENV.fetch("TOUCHPOINTS_EMAIL_SENDER")])
end
it "renders the body" do
expect(mail.body.encoded).to match("Hi")
end
end
describe "new_user_notification" do
let(:user) { FactoryBot.create(:user) }
let(:mail) { UserMailer.new_user_notification(user) }
it "renders the headers" do
expect(mail.subject).to eq("New user account created")
expect(mail.to).to eq([UserMailer.touchpoints_team])
expect(mail.from).to eq([ENV.fetch("TOUCHPOINTS_EMAIL_SENDER")])
end
it "renders the body" do
expect(mail.body.encoded).to match("New user account created")
end
end
describe "org_user_notification" do
let(:user) { FactoryBot.create(:user) }
let(:mail) { UserMailer.org_user_notification(user,user) }
it "renders the headers" do
expect(mail.subject).to eq("New user added to organization")
expect(mail.to).to eq([user.email])
expect(mail.from).to eq([ENV.fetch("TOUCHPOINTS_EMAIL_SENDER")])
end
it "renders the body" do
expect(mail.body.encoded).to match("New user added to organization")
end
end
describe "no_org_notification" do
let(:user) { FactoryBot.create(:user) }
let(:mail) { UserMailer.no_org_notification(user) }
it "renders the headers" do
expect(mail.subject).to eq("New user account creation failed")
expect(mail.to).to eq([UserMailer.touchpoints_support])
expect(mail.from).to eq([ENV.fetch("TOUCHPOINTS_EMAIL_SENDER")])
end
it "renders the body" do
expect(mail.body.encoded).to match("New user account creation failed")
end
end
end
| 35.993151 | 133 | 0.688677 |
f79ffc967ccd1c5cbe87511d779483fff3aaeff3 | 143 | class Note < RedisOrm::Base
property :body, :string, :default => "made by redis_orm"
belongs_to :user, :as => :owner, :index => true
end | 28.6 | 58 | 0.657343 |
6aaf7ed46a6ab4f5e7563d2b192bcbd15d8e3f74 | 561 | # class Interview
class Interview < JsonSerializer
ANSWERS = %w(open accepted rejected).freeze
RESULTS = %w(open accepted rejected wait).freeze
attribute :date, Date
attribute :comments, String, default: ''
attribute :invited, Boolean, default: false
attribute :answer, String, default: 'open'
attribute :result, String, default: 'open'
attribute :reason, String, default: ''
def open?
result == 'open'
end
def accepted?
result == 'accepted'
end
def rejected?
result == 'rejected'
end
end
| 21.576923 | 50 | 0.654189 |
610e924ad20d8151a5df6a623d2bc21d38c1d6f7 | 787 | require 'term_canvas'
field = TermCanvas::Canvas.new(x: 0, y: 0, w: TermCanvas.width, h: TermCanvas.height)
text_pos = {y: 0, x: 0}
loop do
key = TermCanvas.gets
case key
when ?k
text_pos[:y] -= 1
when ?j
text_pos[:y] += 1
when ?h
text_pos[:x] -= 2
when ?l
text_pos[:x] += 2
when ?q
break
end
field.clear
field.rect(
TermCanvas::Rect.new(
x: field.centerx, y: field.centery, width: 2, height: 1, background_color: {r: 200, b: 200, g: 800}
).offset(x: -TermCanvas.width / 2, y: 0)
)
field.text(
TermCanvas::Text.new(
x: text_pos[:x], y: text_pos[:y], body: "test",
background_color: {r: 800, g: 800, b: 800}, foreground_color: {r: 200, g: 200, b: 200}
)
)
field.update
sleep 0.05
end
TermCanvas.close
| 21.27027 | 105 | 0.595934 |
e9664c56f72336efbba8d213e8bb2f4f0efaa846 | 7,467 | module Pod
class Source
# The Aggregate manages a directory of sources repositories.
#
class Aggregate
# @return [Pathname] the directory were the repositories are stored.
#
attr_reader :repos_dir
# @param [Pathname] repos_dir @see repos_dir.
#
def initialize(repos_dir)
@repos_dir = repos_dir
end
# @return [Array<Source>] all the sources.
#
def all
@sources ||= dirs.map { |repo| Source.new(repo) }.sort_by(&:name)
end
# @return [Array<String>] the names of all the pods available.
#
def all_pods
all.map(&:pods).flatten.uniq
end
# @return [Array<Set>] the sets for all the pods available.
#
# @note Implementation detail: The sources don't cache their values
# because they might change in response to an update. Therefore
# this method to prevent slowness caches the values before
# processing them.
#
def all_sets
pods_by_source = {}
all.each do |source|
pods_by_source[source] = source.pods
end
sources = pods_by_source.keys
pods = pods_by_source.values.flatten.uniq
pods.map do |pod|
pod_sources = sources.select{ |s| pods_by_source[s].include?(pod) }.compact
Specification::Set.new(pod, pod_sources)
end
end
# @return [Array<Pathname>] the directories where the sources are stored.
#
# @note If the repos dir doesn't exits this will return an empty array.
#
# @raise If the repos dir doesn't exits.
#
def dirs
if repos_dir.exist?
repos_dir.children.select(&:directory?)
else
[]
end
end
# Returns a set configured with the source which contains the highest
# version in the aggregate.
#
# @param [String] name
# The name of the Pod.
#
# @return [Set] The most representative set for the Pod with the given
# name.
#
def represenative_set(name)
representative_source = nil
highest_version = nil
all.each do |source|
source_versions = source.versions(name)
if source_versions
source_version = source_versions.first
if highest_version.nil? || (highest_version < source_version)
highest_version = source_version
representative_source = source
end
end
end
Specification::Set.new(name, representative_source)
end
public
# @!group Search
#-----------------------------------------------------------------------#
# @return [Set, nil] a set for a given dependency including all the
# {Source} that contain the Pod. If no sources containing the
# Pod where found it returns nil.
#
# @raise If no source including the set can be found.
#
# @see Source#search
#
def search(dependency)
sources = all.select { |s| !s.search(dependency).nil? }
Specification::Set.new(dependency.root_name, sources) unless sources.empty?
end
# @return [Array<Set>] the sets that contain the search term.
#
# @raise If no source including the set can be found.
#
# @todo Clients should raise not this method.
#
# @see Source#search_by_name
#
def search_by_name(query, full_text_search = false)
pods_by_source = {}
result = []
all.each { |s| pods_by_source[s] = s.search_by_name(query, full_text_search).map(&:name) }
root_spec_names = pods_by_source.values.flatten.uniq
root_spec_names.each do |pod|
sources = []
pods_by_source.each{ |source, pods| sources << source if pods.include?(pod) }
result << Specification::Set.new(pod, sources)
end
if result.empty?
extra = ", author, summary, or description" if full_text_search
raise(Informative, "Unable to find a pod with name" \
"#{extra} matching `#{query}'")
end
result
end
public
# @!group Search Index
#-----------------------------------------------------------------------#
# Generates from scratch the search data for all the sources of the
# aggregate. This operation can take a considerable amount of time
# (seconds) as it needs to evaluate the most representative podspec
# for each Pod.
#
# @return [Hash{String=>Hash}] The search data of every set grouped by
# name.
#
def generate_search_index
result = {}
all_sets.each do |set|
result[set.name] = search_data_from_set(set)
end
result
end
# Updates inline the given search data with the information stored in all
# the sources. The update skips the Pods for which the version of the
# search data is the same of the highest version known to the aggregate.
# This can lead to updates in podspecs being skipped until a new version
# is released.
#
# @note This procedure is considerably faster as it only needs to
# load the most representative spec of the new or updated Pods.
#
# @return [Hash{String=>Hash}] The search data of every set grouped by
# name.
#
def update_search_index(search_data)
enumerated_names = []
all_sets.each do |set|
enumerated_names << set.name
set_data = search_data[set.name]
has_data = set_data && set_data['version']
needs_update = !has_data || Version.new(set_data['version']) < set.required_version
if needs_update
search_data[set.name] = search_data_from_set(set)
end
end
stored_names = search_data.keys
delted_names = stored_names - enumerated_names
delted_names.each do |name|
search_data.delete(name)
end
search_data
end
private
# @!group Private helpers
#-----------------------------------------------------------------------#
# Returns the search related information from the most representative
# specification of the set following keys:
#
# - version
# - summary
# - description
# - authors
#
# @param [Set] set
# The set for which the information is needed.
#
# @note If the specification can't load an empty hash is returned and
# a warning is printed.
#
# @note For compatibility with non Ruby clients a strings are used
# instead of symbols for the keys.
#
# @return [Hash{String=>String}] A hash with the search information.
#
def search_data_from_set(set)
result = {}
spec = set.specification
result['version'] = spec.version.to_s
result['summary'] = spec.summary
result['description'] = spec.description
result['authors'] = spec.authors.keys.sort * ', '
result
rescue
CoreUI.warn "Skipping `#{set.name}` because the podspec contains errors."
result
end
#-----------------------------------------------------------------------#
end
end
end
| 32.465217 | 98 | 0.566493 |
bbb25723af3a402443c3952f9f68504418324b78 | 5,310 | # frozen_string_literal: true
# This file was generated by the `rspec --init` command. Conventionally, all
# specs live under a `spec` directory, which RSpec adds to the `$LOAD_PATH`.
# The generated `.rspec` file contains `--require spec_helper` which will cause
# this file to always be loaded, without a need to explicitly require it in any
# files.
#
# Given that it is always loaded, you are encouraged to keep this file as
# light-weight as possible. Requiring heavyweight dependencies from this file
# will add to the boot time of your test suite on EVERY test run, even for an
# individual file that may not need all of that loaded. Instead, consider making
# a separate helper file that requires the additional dependencies and performs
# the additional setup, and require it from the spec files that actually need
# it.
#
# See http://rubydoc.info/gems/rspec-core/RSpec/Core/Configuration
require "webmock/rspec"
require "dotenv/load"
require "hcloud"
Dir[File.join(__dir__, "support/**/*.rb")].sort.each { |f| require f }
Dir[File.join(__dir__, "factories/**/*.rb")].sort.each { |f| require f }
RSpec.configure do |config|
# rspec-expectations config goes here. You can use an alternate
# assertion/expectation library such as wrong or the stdlib/minitest
# assertions if you prefer.
config.expect_with :rspec do |expectations|
# This option will default to `true` in RSpec 4. It makes the `description`
# and `failure_message` of custom matchers include text for helper methods
# defined using `chain`, e.g.:
# be_bigger_than(2).and_smaller_than(4).description
# # => "be bigger than 2 and smaller than 4"
# ...rather than:
# # => "be bigger than 2"
expectations.include_chain_clauses_in_custom_matcher_descriptions = true
end
# rspec-mocks config goes here. You can use an alternate test double
# library (such as bogus or mocha) by changing the `mock_with` option here.
config.mock_with :rspec do |mocks|
# Prevents you from mocking or stubbing a method that does not exist on
# a real object. This is generally recommended, and will default to
# `true` in RSpec 4.
mocks.verify_partial_doubles = true
end
# This option will default to `:apply_to_host_groups` in RSpec 4 (and will
# have no way to turn it off -- the option exists only for backwards
# compatibility in RSpec 3). It causes shared context metadata to be
# inherited by the metadata hash of host groups and examples, rather than
# triggering implicit auto-inclusion in groups with matching metadata.
config.shared_context_metadata_behavior = :apply_to_host_groups
# The settings below are suggested to provide a good initial experience
# with RSpec, but feel free to customize to your heart's content.
# # This allows you to limit a spec run to individual examples or groups
# # you care about by tagging them with `:focus` metadata. When nothing
# # is tagged with `:focus`, all examples get run. RSpec also provides
# # aliases for `it`, `describe`, and `context` that include `:focus`
# # metadata: `fit`, `fdescribe` and `fcontext`, respectively.
# config.filter_run_when_matching :focus
#
# # Allows RSpec to persist some state between runs in order to support
# # the `--only-failures` and `--next-failure` CLI options. We recommend
# # you configure your source control system to ignore this file.
config.example_status_persistence_file_path = "spec/examples.txt"
#
# # Limits the available syntax to the non-monkey patched syntax that is
# # recommended. For more details, see:
# # - http://rspec.info/blog/2012/06/rspecs-new-expectation-syntax/
# # - http://www.teaisaweso.me/blog/2013/05/27/rspecs-new-message-expectation-syntax/
# # - http://rspec.info/blog/2014/05/notable-changes-in-rspec-3/#zero-monkey-patching-mode
# config.disable_monkey_patching!
#
# # This setting enables warnings. It's recommended, but in some cases may
# # be too noisy due to issues in dependencies.
# config.warnings = true
#
# # Many RSpec users commonly either run the entire suite or an individual
# # file, and it's useful to allow more verbose output when running an
# # individual spec file.
# if config.files_to_run.one?
# # Use the documentation formatter for detailed output,
# # unless a formatter has already been configured
# # (e.g. via a command-line flag).
# config.default_formatter = "doc"
# end
#
# # Print the 10 slowest examples and example groups at the
# # end of the spec run, to help surface which specs are running
# # particularly slow.
# config.profile_examples = 10
#
# # Run specs in random order to surface order dependencies. If you find an
# # order dependency and want to debug it, you can fix the order by providing
# # the seed, which is printed after each run.
# # --seed 1234
config.order = :random
#
# # Seed global randomization in this process using the `--seed` CLI option.
# # Setting this allows you to use `--seed` to deterministically reproduce
# # test failures related to randomization by passing the same `--seed` value
# # as the one that triggered the failure.
# Kernel.srand config.seed
end
| 47.837838 | 96 | 0.713936 |
bf2af0c96adaf300b4598111e2f9eab4740a206b | 332 | package "collectd"
service "collectd" do
supports :restart => true, :status => true
end
%w(collectd collection thresholds).each do |file|
template "/etc/collectd/#{file}.conf" do
source "#{file}.conf.erb"
owner "root"
group "root"
mode "644"
notifies :restart, resources(:service => "collectd")
end
end
| 19.529412 | 56 | 0.659639 |
0829d5d438c0acb59f38619df12fd9e807dc772a | 1,035 | require File.dirname(__FILE__) + '/../../../spec_helper'
has_tty? do # needed for CI until we figure out a better way
require 'readline'
describe "Readline::HISTORY.[]=" do
before(:each) do
Readline::HISTORY.push("1", "2", "3")
end
after(:each) do
Readline::HISTORY.pop
Readline::HISTORY.pop
Readline::HISTORY.pop
end
it "returns the new value for the passed index" do
(Readline::HISTORY[1] = "second test").should == "second test"
end
it "raises an IndexError when there is no item at the passed positive index" do
lambda { Readline::HISTORY[10] = "test" }.should raise_error(IndexError)
end
it "sets the item at the given index" do
Readline::HISTORY[0] = "test"
Readline::HISTORY[0].should == "test"
Readline::HISTORY[1] = "second test"
Readline::HISTORY[1].should == "second test"
end
it "raises an IndexError when there is no item at the passed negative index" do
lambda { Readline::HISTORY[10] = "test" }.should raise_error(IndexError)
end
end
end
| 27.236842 | 81 | 0.671498 |
8766ef9aec1519227882ea406213c9281c584bf3 | 464 | # encoding: utf-8
module RuboCop
# A basic wrapper around Parser's tokens.
class Token
attr_reader :pos, :type, :text
def self.from_parser_token(parser_token)
type, details = *parser_token
text, range = *details
new(range, type, text)
end
def initialize(pos, type, text)
@pos, @type, @text = pos, type, text
end
def to_s
"[[#{@pos.line}, #{@pos.column}], #{@type}, #{@text.inspect}]"
end
end
end
| 20.173913 | 68 | 0.601293 |
ac7b3a9b2a69463c3f49800d7a52c8a6d33bdf8e | 3,149 | #
# Copyright 2015-2016, Noah Kantrowitz
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'chef/provider'
require 'poise'
require 'poise_ruby/resources/ruby_gem'
require 'poise_ruby/resources/ruby_runtime'
module PoiseRuby
module RubyProviders
class Base < Chef::Provider
include Poise(inversion: :ruby_runtime)
# Set default inversion options.
#
# @api private
def self.default_inversion_options(node, new_resource)
super.merge({
bundler_version: new_resource.bundler_version,
version: new_resource.version,
})
end
# The `install` action for the `ruby_runtime` resource.
#
# @return [void]
def action_install
notifying_block do
install_ruby
install_bundler
end
end
# The `uninstall` action for the `ruby_runtime` resource.
#
# @return [void]
def action_uninstall
notifying_block do
uninstall_ruby
end
end
# The path to the `ruby` binary.
#
# @abstract
# @return [String]
def ruby_binary
raise NotImplementedError
end
# Output property for environment variables.
#
# @return [Hash<String, String>]
def ruby_environment
# No environment variables needed. Rejoice.
{}
end
# The path to the `gem` binary. Look relative to the
# `ruby` binary for a default implementation.
#
# @return [String]
def gem_binary
dir, base = ::File.split(ruby_binary)
# If this ruby is called something weird, bail out.
raise NotImplementedError unless base.start_with?('ruby')
# Allow for names like "ruby2.0" -> "gem2.0".
::File.join(dir, base.sub(/^ruby/, 'gem'))
end
private
# Install the Ruby runtime. Must be implemented by subclass.
#
# @abstract
# @return [void]
def install_ruby
end
# Uninstall the Ruby runtime. Must be implemented by subclass.
#
# @abstract
# @return [void]
def uninstall_ruby
end
# Install Bundler in to the Ruby runtime.
#
# @return [void]
def install_bundler
# Captured because #options conflicts with Chef::Resource::Package#options.
bundler_version = options[:bundler_version]
return unless bundler_version
ruby_gem 'bundler' do
action :upgrade if bundler_version == true
parent_ruby new_resource
version bundler_version if bundler_version.is_a?(String)
end
end
end
end
end
| 26.686441 | 83 | 0.63544 |
edae29eddcc3143f15f259f66aff368d719c6a4a | 273 | class AjoutListeMotCacher < ActiveRecord::Migration
def self.up
add_column :grilles, :listes_mots, :string
add_column :grilles, :mot_cache, :integer
end
def self.down
remove_column :grilles, :listes_mots
remove_column :grilles, :mot_cache
end
end
| 21 | 51 | 0.736264 |
28c825f1835f366071f763adea03446e294e7d3e | 917 | # frozen_string_literal: true
require_relative "filename_helpers"
module Delfos
module MethodTrace
module CodeLocation
class CallSite
include FilenameHelpers
attr_reader :line_number, :container_method, :called_method
def initialize(file:, line_number:, container_method:, called_method:)
@file = file
@line_number = line_number.to_i
@container_method = container_method
@called_method = called_method
end
def container_method_path
container_method.raw_path
end
def called_method_path
called_method.raw_path
end
def summary
{
call_site: "#{file}:#{line_number}",
container_method: container_method.summary,
called_method: called_method.summary,
}
end
end
end
end
end
| 23.512821 | 78 | 0.609597 |
39cef4b3575fd6cf2a20bf3434e46ea7ead7f294 | 822 | # frozen_string_literal: true
require "rails_helper"
RSpec.describe "RenewingRegistrations", type: :request do
let(:registration) { create(:registration) }
describe "/bo/registrations/:reg_identifier" do
context "when a valid user is signed in" do
let(:user) { create(:user) }
before(:each) do
sign_in(user)
end
it "renders the index template and returns a 200 response" do
get "/bo/registrations/#{registration.reg_identifier}"
expect(response).to render_template(:show)
expect(response).to have_http_status(200)
end
context "when no matching registration exists" do
it "redirects to the dashboard" do
get "/bo/registrations/foo"
expect(response).to redirect_to(bo_path)
end
end
end
end
end
| 26.516129 | 67 | 0.660584 |
e9860b80f73348fc09973911c99904c8a4ddf758 | 3,601 | require 'spec_helper'
describe 'elasticsearch::template', :type => 'define' do
on_supported_os(
:hardwaremodels => ['x86_64'],
:supported_os => [
{
'operatingsystem' => 'CentOS',
'operatingsystemrelease' => ['6']
}
]
).each do |os, facts|
context "on #{os}" do
let(:facts) { facts.merge(
:scenario => '',
:common => ''
) }
let(:title) { 'foo' }
let(:pre_condition) do
'class { "elasticsearch" : }'
end
describe 'parameter validation' do
%i[api_ca_file api_ca_path].each do |param|
let :params do
{
:ensure => 'present',
:content => '{}',
param => 'foo/cert'
}
end
it 'validates cert paths' do
is_expected.to compile.and_raise_error(/expects a/)
end
end
describe 'missing parent class' do
let(:pre_condition) {}
it { should_not compile }
end
end
describe 'template from source' do
let :params do
{
:ensure => 'present',
:source => 'puppet:///path/to/foo.json',
:api_protocol => 'https',
:api_host => '127.0.0.1',
:api_port => 9201,
:api_timeout => 11,
:api_basic_auth_username => 'elastic',
:api_basic_auth_password => 'password',
:validate_tls => false
}
end
it { should contain_elasticsearch__template('foo') }
it do
should contain_es_instance_conn_validator('foo-template')
.that_comes_before('Elasticsearch_template[foo]')
end
it 'passes through parameters' do
should contain_elasticsearch_template('foo').with(
:ensure => 'present',
:source => 'puppet:///path/to/foo.json',
:protocol => 'https',
:host => '127.0.0.1',
:port => 9201,
:timeout => 11,
:username => 'elastic',
:password => 'password',
:validate_tls => false
)
end
end
describe 'class parameter inheritance' do
let :params do
{
:ensure => 'present',
:content => '{}'
}
end
let(:pre_condition) do
<<-EOS
class { 'elasticsearch' :
api_protocol => 'https',
api_host => '127.0.0.1',
api_port => 9201,
api_timeout => 11,
api_basic_auth_username => 'elastic',
api_basic_auth_password => 'password',
api_ca_file => '/foo/bar.pem',
api_ca_path => '/foo/',
validate_tls => false,
}
EOS
end
it do
should contain_elasticsearch_template('foo').with(
:ensure => 'present',
:content => '{}',
:protocol => 'https',
:host => '127.0.0.1',
:port => 9201,
:timeout => 11,
:username => 'elastic',
:password => 'password',
:ca_file => '/foo/bar.pem',
:ca_path => '/foo/',
:validate_tls => false
)
end
end
describe 'template deletion' do
let :params do
{
:ensure => 'absent'
}
end
it 'removes templates' do
should contain_elasticsearch_template('foo').with(:ensure => 'absent')
end
end
end
end
end
| 26.873134 | 80 | 0.465426 |
e2e836433c96a776b6ad4e60f394b18ff90a4f37 | 2,201 | # -*- encoding : us-ascii -*-
# frozen_string_literal: true
require 'minitest/autorun'
require 'rack/contrib/enforce_valid_encoding'
if "a string".respond_to?(:valid_encoding?)
require 'rack/mock'
require 'rack/contrib/enforce_valid_encoding'
VALID_PATH = "h%C3%A4ll%C3%B2"
INVALID_PATH = "/%D1%A1%D4%F1%D7%A2%B2%E1%D3%C3%BB%A7%C3%FB"
describe "Rack::EnforceValidEncoding" do
before do
@app = Rack::Lint.new(
Rack::EnforceValidEncoding.new(
lambda do |env|
[200, {'Content-Type'=>'text/plain'}, ['Hello World']]
end
)
)
end
describe "contstant assertions" do
it "INVALID_PATH should not be a valid UTF-8 string when decoded" do
_(Rack::Utils.unescape(INVALID_PATH).valid_encoding?).must_equal false
end
it "VALID_PATH should be valid when decoded" do
_(Rack::Utils.unescape(VALID_PATH).valid_encoding?).must_equal true
end
end
it "should accept a request with a correctly encoded path" do
response = Rack::MockRequest.new(@app).get(VALID_PATH)
_(response.body).must_equal("Hello World")
_(response.status).must_equal(200)
end
it "should reject a request with a poorly encoded path" do
response = Rack::MockRequest.new(@app).get(INVALID_PATH)
_(response.status).must_equal(400)
end
it "should accept a request with a correctly encoded query string" do
response = Rack::MockRequest.new(@app).get('/', 'QUERY_STRING' => VALID_PATH)
_(response.body).must_equal("Hello World")
_(response.status).must_equal(200)
end
it "should reject a request with a poorly encoded query string" do
response = Rack::MockRequest.new(@app).get('/', 'QUERY_STRING' => INVALID_PATH)
_(response.status).must_equal(400)
end
it "should reject a request containing malformed multibyte characters" do
response = Rack::MockRequest.new(@app).get('/', 'QUERY_STRING' => Rack::Utils.unescape(INVALID_PATH, Encoding::ASCII_8BIT))
_(response.status).must_equal(400)
end
end
else
STDERR.puts "WARN: Skipping Rack::EnforceValidEncoding tests (String#valid_encoding? not available)"
end
| 33.861538 | 129 | 0.679691 |
2866613d9db455fab8b4792aeb7b6853804e0eb4 | 725 |
Pod::Spec.new do |s|
s.platform = :ios
s.ios.deployment_target = '10.0'
s.name = "PodLocation"
s.version = "1.0.15"
s.summary = "PodLocation classes"
s.description = <<-DESC
A bunch of classes i have found useful for PodLocation
DESC
s.license = { :type => "MIT", :file => "LICENSE" }
s.author = { "Etienne Goulet-Lang" => "" }
s.source = { :git => "https://github.com/egouletlang/PodLocation.git", :tag => "#{s.version}" }
s.homepage = "https://github.com/egouletlang/PodLocation"
s.source_files = "PodLocation", "PodLocation/**/*.{h,m}", "PodLocation/**/*.{swift}"
s.exclude_files = "Classes/Exclude"
s.dependency 'BaseUtils'
end
| 26.851852 | 103 | 0.588966 |
e8f527ee19dff075122ab78bf4282801c4e098e7 | 1,582 | #
# Be sure to run `pod lib lint A_Category.podspec' to ensure this is a
# valid spec before submitting.
#
# Any lines starting with a # are optional, but their use is encouraged
# To learn more about a Podspec see https://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = 'A_Category'
s.version = '0.4.0'
s.summary = 'A short description of A_Category.'
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = <<-DESC
TODO: Add long description of the pod here.
DESC
s.homepage = 'https://github.com/weixin1/A_Category'
# s.screenshots = 'www.example.com/screenshots_1', 'www.example.com/screenshots_2'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'Terrence' => '[email protected]' }
s.source = { :git => 'https://github.com/weixin1/A_Category.git', :tag => s.version.to_s }
# s.social_media_url = 'https://twitter.com/<TWITTER_USERNAME>'
s.ios.deployment_target = '8.0'
s.source_files = 'A_Category/Classes/**/*.{h,m}'
# s.resource_bundles = {
# 'A_Category' => ['A_Category/Assets/*.png']
# }
# s.public_header_files = 'Pod/Classes/**/*.h'
# s.frameworks = 'UIKit', 'MapKit'
s.dependency "CTMediator"
end
| 36.790698 | 102 | 0.6378 |
f78f79a8bf5b2ba4c6a53bbf6404de6e73c54d00 | 267 | class CreateCharacter < ActiveRecord::Migration[6.0]
def change
create_table :characters do |t|
t.string :name
t.string :dynasty
t.boolean :marriage_status
t.string :culture
t.string :religion
t.string :sex
end
end
end
| 20.538462 | 52 | 0.64794 |
ac200fb86f9e801248283a9bd92b782abbce92f5 | 926 | KepplerBlog::Engine.routes.draw do
mount Ckeditor::Engine => '/ckeditor'
scope :admin, :blog do
resources :posts do
get 'find/subcategories', action: :subcategories_of_cagegory, on: :collection
get '(page/:page)', action: :index, on: :collection, as: 'search'
delete '/destroy_multiple', action: :destroy_multiple, on: :collection, as: :destroy_multiple
end
resources :categories do
get '(page/:page)', action: :index, on: :collection, as: 'search'
delete '/destroy_multiple', action: :destroy_multiple, on: :collection, as: :destroy_multiple
end
end
scope :blog do
get '(page/:page)', to: 'blog#index', as: :blog_listing
get ":type/:permalink", to: 'blog#filter', as: :filter
get "category/:category/subcategory/:subcategory", to: 'blog#filter_subcategory', as: :filter_subcategory
get '/:permalink', to: 'blog#show', as: :blog_show_post
end
end
| 35.615385 | 109 | 0.674946 |
ac18e8fca0cb580b344a84ea9b059fdf8913815e | 5,108 | test_name "Setup environment"
require 'json'
require 'open-uri'
step 'Configure paths' do
add_aio_defaults_on(hosts)
add_puppet_paths_on(hosts)
end
step "Install git and tar"
PACKAGES = {
:redhat => ['git'],
:debian => [
['git', 'git-core'],
],
:solaris_10 => [
'coreutils',
'git',
'gtar',
],
:windows => ['git'],
:sles => ['git'],
}
# We need to be able override which tar we use on solaris, which we call later
# when we unpack the puppet-runtime archive
tar = 'tar'
agents.each do |host|
case host['platform']
when /solaris/
tar = 'gtar'
if host['platform'] =~ /11/
# The file allows us to non-interactively install these packages with
# pkgutil on solaris 11. Solaris 10 does this as a part of the
# `install_packages_on` method in beaker. Since we install packages for
# solaris 11 using pkg by default, we can't use that method for sol11.
# We have to override it so that we can get git from opencws, as it has
# the updated ssl certs we need to access github repos.
create_remote_file host, "/root/shutupsolaris", <<-END
mail=
# Overwrite already installed instances
instance=overwrite
# Do not bother checking for partially installed packages
partial=nocheck
# Do not bother checking the runlevel
runlevel=nocheck
# Do not bother checking package dependencies (We take care of this)
idepend=nocheck
rdepend=nocheck
# DO check for available free space and abort if there isn't enough
space=quit
# Do not check for setuid files.
setuid=nocheck
# Do not check if files conflict with other packages
conflict=nocheck
# We have no action scripts. Do not check for them.
action=nocheck
# Install to the default base directory.
basedir=default
END
on host, 'pkgadd -d http://get.opencsw.org/now -a /root/shutupsolaris -n all'
on host, '/opt/csw/bin/pkgutil -U all'
on host, '/opt/csw/bin/pkgutil -y -i git'
on host, '/opt/csw/bin/pkgutil -y -i gtar'
end
host.add_env_var('PATH', '/opt/csw/bin')
end
end
install_packages_on(agents, PACKAGES, :check_if_exists => true)
step "Unpack puppet-runtime" do
need_to_run = false
agents.each do |host|
# we only need to unpack the runtime if the host doesn't already have runtime
# and if it's a not an existing container
need_to_run ||= (!host['has_runtime'] && !host['use_existing_container'])
end
if need_to_run
dev_builds_url = ENV['DEV_BUILDS_URL'] || 'http://builds.delivery.puppetlabs.net'
branch = ENV['RUNTIME_BRANCH'] || 'master'
# We want to grab whatever tag has been promoted most recently into the branch
# of puppet-agent that corresponds to whatever component we're working on.
# This will allow us to get the latest runtime package that has passed tests.
runtime_json = "https://raw.githubusercontent.com/puppetlabs/puppet-agent/#{branch}/configs/components/puppet-runtime.json"
runtime_tag = JSON.load(open(runtime_json))['version']
runtime_url = "#{dev_builds_url}/puppet-runtime/#{runtime_tag}/artifacts/"
runtime_prefix = "agent-runtime-#{branch}-#{runtime_tag}."
runtime_suffix = ".tar.gz"
agents.each do |host|
next if host['has_runtime'] || host['use_existing_container']
platform_tag = host['packaging_platform']
if platform_tag =~ /windows/
# the windows version is hard coded to `2012r2`. Unfortunately,
# `host['packaging_platform']` is hard coded to `2012`, so we have to add
# the `r2` on our own.
platform, version, arch = platform_tag.split('-')
platform_tag = "#{platform}-#{version}r2-#{arch}"
end
tarball_name = runtime_prefix + platform_tag + runtime_suffix
on host, "curl -Of #{runtime_url}#{tarball_name}"
case host['platform']
when /windows/
on host, "gunzip -c #{tarball_name} | tar -k -C /cygdrive/c/ -xf -"
if arch == 'x64'
program_files = 'ProgramFiles64Folder'
else
program_files = 'ProgramFilesFolder'
end
if branch == '5.5.x'
bindir = "/cygdrive/c/#{program_files}/PuppetLabs/Puppet/sys/ruby/bin"
else
bindir = "/cygdrive/c/#{program_files}/PuppetLabs/Puppet/puppet/bin"
end
on host, "chmod 755 #{bindir}/*"
# Because the runtime archive for windows gets installed in a non-standard
# directory (ProgramFiles64Folder), we need to add it to the path here
# rather than rely on `host['privatebindir']` like we can for other
# platforms
host.add_env_var('PATH', bindir)
when /osx/
on host, "tar -xzf #{tarball_name}"
on host, "for d in opt var private; do rsync -ka \"${d}/\" \"/${d}/\"; done"
else
on host, "gunzip -c #{tarball_name} | #{tar} -k -C / -xf -"
end
end
end
end
step "Install bundler" do
# Only configure gem mirror after Ruby has been installed, but before any gems are installed.
configure_gem_mirror(agents)
agents.each do |host|
on host, "#{gem_command(host)} install bundler --no-ri --no-rdoc"
end
end
| 33.827815 | 127 | 0.667189 |
f8ba77e2bbaa46c8c0ce1472b91bcb2590569d9e | 179 | # frozen_string_literal: true
class AddSearchOnlyToAccounts < ActiveRecord::Migration[5.2]
def change
add_column :accounts, :search_only, :boolean, default: false
end
end
| 25.571429 | 64 | 0.776536 |
91f4441eab5cdbd9d25285c94893b85b14c301bf | 4,549 | # Licensed to Elasticsearch B.V. under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
# ownership. Elasticsearch B.V. licenses this file to you under
# the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# A collection of Rake tasks to facilitate importing data from your models into Elasticsearch.
#
# Add this e.g. into the `lib/tasks/elasticsearch.rake` file in your Rails application:
#
# require 'elasticsearch/rails/tasks/import'
#
# To import the records from your `Article` model, run:
#
# $ bundle exec rake environment elasticsearch:import:model CLASS='MyModel'
#
# Run this command to display usage instructions:
#
# $ bundle exec rake -D elasticsearch
#
STDOUT.sync = true
STDERR.sync = true
begin; require 'ansi/progressbar'; rescue LoadError; end
namespace :elasticsearch do
task :import => 'import:model'
namespace :import do
import_model_desc = <<-DESC.gsub(/ /, '')
Import data from your model (pass name as CLASS environment variable).
$ rake environment elasticsearch:import:model CLASS='MyModel'
Force rebuilding the index (delete and create):
$ rake environment elasticsearch:import:model CLASS='Article' FORCE=y
Customize the batch size:
$ rake environment elasticsearch:import:model CLASS='Article' BATCH=100
Set target index name:
$ rake environment elasticsearch:import:model CLASS='Article' INDEX='articles-new'
Pass an ActiveRecord scope to limit the imported records:
$ rake environment elasticsearch:import:model CLASS='Article' SCOPE='published'
DESC
desc import_model_desc
task :model do
if ENV['CLASS'].to_s == ''
puts '='*90, 'USAGE', '='*90, import_model_desc, ""
exit(1)
end
klass = eval(ENV['CLASS'].to_s)
total = klass.count rescue nil
pbar = ANSI::Progressbar.new(klass.to_s, total) rescue nil
pbar.__send__ :show if pbar
unless ENV['DEBUG']
begin
klass.__elasticsearch__.client.transport.logger.level = Logger::WARN
rescue NoMethodError; end
begin
klass.__elasticsearch__.client.transport.tracer.level = Logger::WARN
rescue NoMethodError; end
end
total_errors = klass.__elasticsearch__.import force: ENV.fetch('FORCE', false),
batch_size: ENV.fetch('BATCH', 1000).to_i,
index: ENV.fetch('INDEX', nil),
type: ENV.fetch('TYPE', nil),
scope: ENV.fetch('SCOPE', nil) do |response|
pbar.inc response['items'].size if pbar
STDERR.flush
STDOUT.flush
end
pbar.finish if pbar
puts "[IMPORT] #{total_errors} errors occurred" unless total_errors.zero?
puts '[IMPORT] Done'
end
desc <<-DESC.gsub(/ /, '')
Import all indices from `app/models` (or use DIR environment variable).
$ rake environment elasticsearch:import:all DIR=app/models
DESC
task :all do
dir = ENV['DIR'].to_s != '' ? ENV['DIR'] : Rails.root.join("app/models")
puts "[IMPORT] Loading models from: #{dir}"
Dir.glob(File.join("#{dir}/**/*.rb")).each do |path|
model_filename = path[/#{Regexp.escape(dir.to_s)}\/([^\.]+).rb/, 1]
next if model_filename.match(/^concerns\//i) # Skip concerns/ folder
begin
klass = model_filename.camelize.constantize
rescue NameError
require(path) ? retry : raise(RuntimeError, "Cannot load class '#{klass}'")
end
# Skip if the class doesn't have Elasticsearch integration
next unless klass.respond_to?(:__elasticsearch__)
puts "[IMPORT] Processing model: #{klass}..."
ENV['CLASS'] = klass.to_s
Rake::Task["elasticsearch:import:model"].invoke
Rake::Task["elasticsearch:import:model"].reenable
puts
end
end
end
end
| 34.992308 | 94 | 0.650473 |
18ad44cb3b772fd3b393fe42b04f0a69a45b920d | 659 | require File.expand_path('../../../spec_helper', __FILE__)
require File.expand_path('../fixtures/classes', __FILE__)
describe "Kernel#untrusted?" do
it "returns the untrusted status of an object" do
o = mock('o')
o.untrusted?.should == false
o.untrust
o.untrusted?.should == true
end
it "has no effect on immediate values" do
a = nil
b = true
c = false
a.untrust
b.untrust
c.untrust
a.untrusted?.should == false
b.untrusted?.should == false
c.untrusted?.should == false
end
it "has no effect on immediate values" do
d = 1
lambda { d.untrust }.should raise_error(RuntimeError)
end
end
| 22.724138 | 58 | 0.649469 |
38d436f5bdfc0b62037ef08ac41b5aed024a3846 | 1,904 | # The test environment is used exclusively to run your application's
# test suite. You never need to work with it otherwise. Remember that
# your test database is "scratch space" for the test suite and is wiped
# and recreated between test runs. Don't rely on the data there!
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
config.cache_classes = false
# Do not eager load code on boot. This avoids loading your whole application
# just for the purpose of running a single test. If you are using a tool that
# preloads Rails for running tests, you may have to set it to true.
config.eager_load = false
# Configure public file server for tests with Cache-Control for performance.
config.public_file_server.enabled = true
config.public_file_server.headers = {
'Cache-Control' => "public, max-age=#{1.hour.to_i}"
}
# Show full error reports and disable caching.
config.consider_all_requests_local = true
config.action_controller.perform_caching = false
config.cache_store = :null_store
# Raise exceptions instead of rendering exception templates.
config.action_dispatch.show_exceptions = false
# Disable request forgery protection in test environment.
config.action_controller.allow_forgery_protection = false
# Store uploaded files on the local file system in a temporary directory.
config.active_storage.service = :test
config.action_mailer.perform_caching = false
# Tell Action Mailer not to deliver emails to the real world.
# The :test delivery method accumulates sent emails in the
# ActionMailer::Base.deliveries array.
config.action_mailer.delivery_method = :test
# Print deprecation notices to the stderr.
config.active_support.deprecation = :stderr
# Raises error for missing translations.
config.action_view.raise_on_missing_translations = true
end
| 38.857143 | 85 | 0.77416 |
26e17a7553ef616d1fb4aa06ddfac1ad45803758 | 1,614 | class Explore::ProjectsController < Explore::ApplicationController
include FilterProjects
def index
@projects = ProjectsFinder.new.execute(current_user)
@tags = @projects.tags_on(:tags)
@projects = @projects.tagged_with(params[:tag]) if params[:tag].present?
@projects = @projects.where(visibility_level: params[:visibility_level]) if params[:visibility_level].present?
@projects = filter_projects(@projects)
@projects = @projects.sort(@sort = params[:sort])
@projects = @projects.includes(:namespace).page(params[:page])
respond_to do |format|
format.html
format.json do
render json: {
html: view_to_html_string("dashboard/projects/_projects", locals: { projects: @projects })
}
end
end
end
def trending
@projects = filter_projects(Project.trending)
@projects = @projects.sort(@sort = params[:sort])
@projects = @projects.page(params[:page])
respond_to do |format|
format.html
format.json do
render json: {
html: view_to_html_string("dashboard/projects/_projects", locals: { projects: @projects })
}
end
end
end
def starred
@projects = ProjectsFinder.new.execute(current_user)
@projects = filter_projects(@projects)
@projects = @projects.reorder('star_count DESC')
@projects = @projects.page(params[:page])
respond_to do |format|
format.html
format.json do
render json: {
html: view_to_html_string("dashboard/projects/_projects", locals: { projects: @projects })
}
end
end
end
end
| 29.888889 | 114 | 0.66171 |
01cbf50fde6ed63b462fd0582753706a50078622 | 666 | Pod::Spec.new do |s|
s.name = "MaterialCatalog"
s.version = "95.0.0"
s.summary = "Helper Objective-C classes for the MDC catalog."
s.description = "This spec is made for use in the MDC Catalog."
s.homepage = "https://github.com/material-components/material-components-ios"
s.license = "Apache 2.0"
s.authors = { 'Apple platform engineering at Google' => '[email protected]' }
s.source = { :git => "https://github.com/material-components/material-components-ios.git", :tag => "v#{s.version}" }
s.source_files = "*.{h,m}"
s.public_header_files = "*.h"
s.dependency "MaterialComponents/Themes"
end
| 44.4 | 124 | 0.648649 |
bf00922ed63cc119da7867e815a3546d45838277 | 1,183 | require 'test_helper'
class UsersEditTest < ActionDispatch::IntegrationTest
def setup
@user = users(:michael)
end
test "unsuccessful edit" do
log_in_as(@user)
get edit_user_path(@user)
assert_template 'users/edit'
patch user_path(@user), params: { user: { name: "",
email: "foo@invalid",
password: "foo",
password_confirmation: "bar" } }
assert_template 'users/edit'
end
test "successful edit with friendly forwarding" do
get edit_user_path(@user)
log_in_as(@user)
assert_redirected_to edit_user_url(@user)
name = "Foo Bar"
email = "[email protected]"
patch user_path (@user), params: { user: { name: name,
email: email,
password: "",
password_confirmation: "" } }
assert_not flash.empty?
assert_redirected_to @user
@user.reload
assert_equal name, @user.name
assert_equal email, @user.email
end
end
| 32.861111 | 78 | 0.509721 |
ff27f135746f3e0cec96129b83d9f69324912df0 | 4,401 | require "active_support/core_ext/integer/time"
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Ensures that a master key has been made available in either ENV["RAILS_MASTER_KEY"]
# or in config/master.key. This key is used to decrypt credentials (and other encrypted files).
# config.require_master_key = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV["RAILS_SERVE_STATIC_FILES"].present?
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Include generic and useful information about system operation, but avoid logging too much
# information to avoid inadvertent exposure of personally identifiable information (PII).
config.log_level = :info
# Prepend all log lines with the following tags.
config.log_tags = [:request_id]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment).
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "authenticating_proxy_production"
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Log disallowed deprecations.
config.active_support.disallowed_deprecation = :log
# Tell Active Support which deprecation messages to disallow.
config.active_support.disallowed_deprecation_warnings = []
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require "syslog/logger"
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Inserts middleware to perform automatic connection switching.
# The `database_selector` hash is used to pass options to the DatabaseSelector
# middleware. The `delay` is used to determine how long to wait after a write
# to send a subsequent read to the primary.
#
# The `database_resolver` class is used by the middleware to determine which
# database is appropriate to use based on the time delay.
#
# The `database_resolver_context` class is used by the middleware to set
# timestamps for the last write to the primary. The resolver uses the context
# class timestamps to determine how long to wait before reading from the
# replica.
#
# By default Rails will store a last write timestamp in the session. The
# DatabaseSelector middleware is designed as such you can define your own
# strategy for connection switching and pass that into the middleware through
# these configuration options.
# config.active_record.database_selector = { delay: 2.seconds }
# config.active_record.database_resolver = ActiveRecord::Middleware::DatabaseSelector::Resolver
# config.active_record.database_resolver_context = ActiveRecord::Middleware::DatabaseSelector::Resolver::Session
end
| 44.908163 | 114 | 0.76778 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.