hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
3965f0efc201adc20c5e79e4ae3911858debf47e | 1,273 |
describe 'tungsten::service' do
let(:chef_run) {
ChefSpec::SoloRunner.new(platform: 'centos', version: '6.5') do |node|
node.set['tungsten']['systemUser'] = 'bob'
node.set['tungsten']['homeDir'] = '/home/bob'
end.converge(described_recipe)
}
it 'should create an init.d script' do
expect(chef_run).to create_template('/etc/init.d/tungsten').with({
owner: 'root',
mode: 0755
})
end
it 'should allow the init.d script to start tungsten' do
expect(chef_run).to render_file('/etc/init.d/tungsten').with_content(/start\(\) \{{\n|\s}*su - bob -c \/home\/bob\/tungsten\/cluster-home\/bin\/startall &/)
end
it 'should allow the init.d script to stop tungsten' do
expect(chef_run).to render_file('/etc/init.d/tungsten').with_content(/stop\(\) \{{\n|\s}*su - bob -c \/home\/bob\/tungsten\/cluster-home\/bin\/stopall &/)
end
it 'should enable the script' do
expect(chef_run).to run_execute('chkconfig --add tungsten && chkconfig --level 2345 tungsten on')
end
it 'should define a tungsten service resource' do
resource = chef_run.service('tungsten')
expect(resource).to do_nothing
expect(resource.supports).to eq({:status => true, :start => true, :stop => true, :restart => true})
end
end | 35.361111 | 160 | 0.660644 |
d5f2f4b4f4b0483c1a9f739b2d3315d68b151604 | 2,282 | class K3d < Formula
desc "Little helper to run Rancher Lab's k3s in Docker"
homepage "https://k3d.io"
url "https://github.com/rancher/k3d/archive/v4.4.3.tar.gz"
sha256 "b7ff3d5fac9d0bc6c58c3e5abdb5b2f38bf63a7a6bf0c3872e64f63879f4c160"
license "MIT"
livecheck do
url :stable
regex(/^v?(\d+(?:\.\d+)+)$/i)
end
bottle do
sha256 cellar: :any_skip_relocation, arm64_big_sur: "79d780f452c7646fd7e902e340d5ed2e5bf17f9c8e563635a1db916b6d49d06a"
sha256 cellar: :any_skip_relocation, big_sur: "9dedc3c56e23abeaa53b22746ec07318a43e06134aad8fc66f814ec5d56adcde"
sha256 cellar: :any_skip_relocation, catalina: "602ac41c99582edc3484333d2996802dec63fc1b27c8394df05777d5d209524a"
sha256 cellar: :any_skip_relocation, mojave: "c5cd1b6c5ae0c752328954b70c7ac2209149b5a90f01d7a533940aff3c8fe8ba"
sha256 cellar: :any_skip_relocation, x86_64_linux: "27d70ff6c10eac561cee6c1d320b0922f2da55cdf068eb5e9fc4312da4cb886a"
end
depends_on "go" => :build
def install
system "go", "build",
"-mod", "vendor",
"-ldflags", "-s -w -X github.com/rancher/k3d/v#{version.major}/version.Version=v#{version}"\
" -X github.com/rancher/k3d/v#{version.major}/version.K3sVersion=latest",
"-trimpath", "-o", bin/"k3d"
# Install bash completion
output = Utils.safe_popen_read("#{bin}/k3d", "completion", "bash")
(bash_completion/"k3d").write output
# Install zsh completion
output = Utils.safe_popen_read("#{bin}/k3d", "completion", "zsh")
(zsh_completion/"_k3d").write output
# Install fish completion
output = Utils.safe_popen_read("#{bin}/k3d", "completion", "fish")
(fish_completion/"k3d.fish").write output
end
test do
assert_match "k3d version v#{version}\nk3s version latest (default)", shell_output("#{bin}/k3d --version")
# Either docker is not present or it is, where the command will fail in the first case.
# In any case I wouldn't expect a cluster with name 6d6de430dbd8080d690758a4b5d57c86 to be present
# (which is the md5sum of 'homebrew-failing-test')
output = shell_output("#{bin}/k3d cluster get 6d6de430dbd8080d690758a4b5d57c86 2>&1", 1).split("\n").pop
assert_match "No nodes found for given cluster", output
end
end
| 43.884615 | 122 | 0.718668 |
bb575c117dcbaf3069f2af888756dfc0f35d2cef | 710 | # frozen_string_literal: true
require "spec_helper"
RSpec.describe "Run command with arguments" do
before :all do
setup_environment
end
after :all do
command_helpers_teardown
end
describe "help command" do
it "outputs the name, description, version number and usage for the git-cmd command" do
run_system_call "#{git_cmd_path} help"
expect(last_command.output).
to include("git-cmd – Git Commander allows running custom git commands from a centralized location")
expect(last_command.output).
to include(GitCommander::VERSION)
expect(last_command.output).
to include("git-cmd command [command options] [arguments...]")
end
end
end
| 27.307692 | 108 | 0.711268 |
6127416a57d63aade4b89fb2f541518592f6f5fe | 16,717 | require "pact_broker/pacts/verifiable_pact_messages"
require "pact_broker/pacts/verifiable_pact"
require "pact_broker/pacts/selectors"
module PactBroker
module Pacts
describe VerifiablePactMessages do
let(:pending_provider_tags) { [] }
let(:non_pending_provider_tags) { [] }
let(:provider_branch) { nil }
let(:pending) { false }
let(:wip) { false }
let(:selectors) { Selectors.new }
let(:pact_version_url) { "http://pact" }
let(:verifiable_pact) do
double(VerifiablePact,
consumer: double("consumer", main_branch: "main"),
consumer_name: "Foo",
consumer_version_number: "123",
provider_name: "Bar",
pending_provider_tags: pending_provider_tags,
non_pending_provider_tags: non_pending_provider_tags,
pending?: pending,
wip?: wip,
selectors: selectors,
provider_branch: provider_branch
)
end
let(:consumer_version) { double("version", number: "1234", order: 1) }
let(:environment) { instance_double("PactBroker::Deployments::Environment", name: "test", production?: false) }
let(:test_environment) { environment }
let(:prod_environment) { instance_double("PactBroker::Deployments::Environment", name: "prod", production?: true) }
subject { VerifiablePactMessages.new(verifiable_pact, pact_version_url) }
describe "#inclusion_reason" do
context "when there is one selector" do
let(:selectors) { Selectors.create_for_overall_latest.resolve(consumer_version) }
its(:inclusion_reason) { is_expected.to include "The pact at http://pact is being verified because the pact content belongs to the consumer version matching the following criterion:" }
end
context "when there is more than one selector" do
let(:selectors) { Selectors.create_for_latest_of_each_branch(%w[main feat-x]).resolve(consumer_version) }
its(:inclusion_reason) { is_expected.to include "The pact at http://pact is being verified because the pact content belongs to the consumer versions matching the following criteria:" }
end
context "when there are no head consumer tags" do
let(:selectors) { Selectors.create_for_overall_latest.resolve(consumer_version) }
its(:inclusion_reason) { is_expected.to include "latest version of a consumer that has a pact with Bar (1234)" }
end
context "when there is 1 head consumer tags" do
let(:selectors) { Selectors.create_for_latest_of_each_tag(%w[dev]).resolve(consumer_version) }
its(:inclusion_reason) { is_expected.to include "latest version tagged 'dev' (1234)" }
its(:pact_description) { is_expected.to eq "Pact between Foo and Bar, consumer version 123, latest with tag dev"}
end
context "when there are branches" do
let(:selectors) { Selectors.create_for_latest_of_each_branch(%w[main feat-x]).resolve(consumer_version) }
its(:inclusion_reason) { is_expected.to include "latest version from branch 'feat-x' (1234)" }
its(:inclusion_reason) { is_expected.to include "latest version from branch 'main' (1234)" }
its(:pact_description) { is_expected.to eq "Pact between Foo and Bar, consumer version 123, latest from branch main, latest from branch feat-x"}
end
context "when there are branches and tags" do
let(:selectors) { Selectors.new([Selector.latest_for_branch("main"), Selector.latest_for_tag("prod")]).resolve(consumer_version) }
its(:inclusion_reason) { is_expected.to include "latest version from branch 'main' (1234)" }
its(:inclusion_reason) { is_expected.to include "latest version tagged 'prod' (1234)" }
end
context "when there are 2 head consumer tags" do
let(:selectors) { Selectors.create_for_latest_of_each_tag(%w[dev prod]).resolve(consumer_version) }
its(:inclusion_reason) { is_expected.to include "latest version tagged 'dev' (1234)" }
its(:inclusion_reason) { is_expected.to include "latest version tagged 'prod' (1234)" }
end
context "when the pact was selected by the fallback tag" do
let(:selectors) { Selectors.new(Selector.latest_for_tag_with_fallback("feat-x", "master").resolve_for_fallback(consumer_version)) }
its(:inclusion_reason) { is_expected.to include "latest version tagged 'master' (fallback tag used as no pact was found with tag 'feat-x') (1234)" }
end
context "when the pact was selected by the fallback tag" do
let(:selectors) { Selectors.new(Selector.latest_for_branch_with_fallback("feat-x", "master").resolve_for_fallback(consumer_version)) }
its(:inclusion_reason) { is_expected.to include "latest version from branch 'master' (fallback branch used as no pact was found from branch 'feat-x') (1234)" }
end
context "when the pact was for the main branch" do
let(:selectors) { Selectors.new(Selector.for_main_branch.resolve(consumer_version)) }
context "when the main branch is called main" do
its(:inclusion_reason) { is_expected.to include "latest version from the main branch 'main' (1234)" }
end
end
context "when the pact is a WIP pact for the specified provider tags" do
let(:selectors) { Selectors.create_for_latest_of_each_tag(%w[feat-x]).resolve(consumer_version) }
let(:wip) { true }
let(:pending) { true }
let(:pending_provider_tags) { %w[dev] }
its(:inclusion_reason) { is_expected.to include "The pact at http://pact is being verified because it is a 'work in progress' pact (ie. it is the pact for the latest version of Foo tagged with 'feat-x' and it has not yet been successfully verified by a version of Bar with tag 'dev' when the pact's application version was explicitly specified in the consumer version selectors). Read more at https://docs.pact.io/go/wip"}
context "when the pact is a WIP pact for a consumer branch" do
let(:selectors) { Selectors.create_for_latest_of_each_branch(%w[feat-x feat-y]).resolve(consumer_version) }
its(:inclusion_reason) { is_expected.to include "The pact at http://pact is being verified because it is a 'work in progress' pact (ie. it is the pact for the latest versions of Foo from branches 'feat-x' and 'feat-y' (both have the same content)"}
end
context "when the pact is a WIP pact for a consumer branch and consumer rags" do
let(:selectors) { Selectors.create_for_latest_of_each_branch(%w[feat-x feat-y]).resolve(consumer_version) + Selectors.create_for_latest_of_each_tag(%w[feat-z feat-w]).resolve(consumer_version) }
its(:inclusion_reason) { is_expected.to include "it is the pact for the latest versions of Foo from branches 'feat-x' and 'feat-y' and tagged with 'feat-z' and 'feat-w' (all have the same content)"}
end
end
context "when the pact is one of all versions for a tag" do
let(:selectors) { Selectors.create_for_all_of_each_tag(%w[prod]).resolve(consumer_version) }
its(:inclusion_reason) { is_expected.to include "all consumer versions tagged 'prod' (1234)"}
end
context "when the pact is one of all versions for a tag and consumer" do
let(:selectors) { Selectors.new(Selector.all_for_tag_and_consumer("prod", "Foo")).resolve(consumer_version) }
its(:inclusion_reason) { is_expected.to include "all Foo versions tagged 'prod' (1234)"}
end
context "when the pact is the latest version for a tag and consumer" do
let(:selectors) { Selectors.new(Selector.latest_for_tag_and_consumer("prod", "Foo")).resolve(consumer_version) }
its(:inclusion_reason) { is_expected.to include "latest version of Foo tagged 'prod' (1234)"}
end
context "when the pact is the latest version for a branch and consumer" do
let(:selectors) { Selectors.new(Selector.latest_for_branch_and_consumer("main", "Foo")).resolve(consumer_version) }
its(:inclusion_reason) { is_expected.to include "latest version of Foo from branch 'main' (1234)"}
end
context "when the pact is the latest version for a consumer" do
let(:selectors) { Selectors.new(Selector.latest_for_consumer("Foo")).resolve(consumer_version) }
its(:inclusion_reason) { is_expected.to include "latest version of Foo that has a pact with Bar (1234)"}
end
context "when the consumer version is currently deployed to a single environment" do
let(:selectors) { Selectors.new(Selector.for_currently_deployed("test")).resolve_for_environment(consumer_version, environment) }
its(:inclusion_reason) { is_expected.to include "consumer version(s) currently deployed to test (1234)"}
end
context "when the consumer version is released and supported in a single environment" do
let(:selectors) { Selectors.new(Selector.for_currently_supported("test")).resolve_for_environment(consumer_version, environment) }
its(:inclusion_reason) { is_expected.to include "consumer version(s) released and supported in test (1234)"}
end
context "when the consumer version is currently released/deployed in single environment" do
let(:selectors) { Selectors.new(Selector.for_environment("test")).resolve_for_environment(consumer_version, environment) }
its(:inclusion_reason) { is_expected.to include "a consumer version in environment test (1234)"}
end
context "when the verison of a specific consumer is currently released/deployed in single environment" do
let(:selectors) { Selectors.new(Selector.for_environment_and_consumer("test", "Foo")).resolve_for_environment(consumer_version, environment) }
its(:inclusion_reason) { is_expected.to include "Foo version in environment test (1234)"}
end
context "when the consumer version is currently deployed to a multiple environments" do
let(:selectors) do
Selectors.new(
Selector.for_currently_deployed("dev").resolve_for_environment(consumer_version, double("environment", name: "dev", production?: false)),
Selector.for_currently_deployed("test").resolve_for_environment(consumer_version, test_environment),
Selector.for_currently_deployed("prod").resolve_for_environment(consumer_version, prod_environment)
)
end
its(:inclusion_reason) { is_expected.to include "consumer version(s) currently deployed to dev (1234), test (1234) and prod (1234)"}
end
context "when the currently deployed consumer version is for a consumer" do
let(:selectors) do
Selectors.new(
Selector.for_currently_deployed_and_environment_and_consumer("test", "Foo").resolve_for_environment(consumer_version, test_environment),
Selector.for_currently_deployed_and_environment_and_consumer("prod", "Foo").resolve_for_environment(consumer_version, prod_environment),
Selector.for_currently_deployed_and_environment_and_consumer("test", "Bar").resolve_for_environment(consumer_version, test_environment),
Selector.for_currently_deployed("test").resolve_for_environment(consumer_version, test_environment),
)
end
its(:inclusion_reason) { is_expected.to include "version(s) of Foo currently deployed to test (1234) and prod (1234)"}
its(:inclusion_reason) { is_expected.to include "version(s) of Bar currently deployed to test (1234)"}
its(:inclusion_reason) { is_expected.to include "consumer version(s) currently deployed to test (1234)"}
end
end
describe "#pending_reason" do
context "when the pact is not pending" do
context "when there are no non_pending_provider_tags or a provider branch" do
its(:pending_reason) { is_expected.to include "This pact has previously been successfully verified by Bar. If this verification fails, it will fail the build." }
end
context "when there is 1 non_pending_provider_tag" do
let(:non_pending_provider_tags) { %w[dev] }
its(:pending_reason) { is_expected.to include "This pact has previously been successfully verified by a version of Bar with tag 'dev'. If this verification fails, it will fail the build."}
end
context "when there is a provider branch" do
let(:provider_branch) { "main" }
let(:non_pending_provider_tags) { %w[dev] }
# uses branch in preference as that's what the WIP pacts logic does
its(:pending_reason) { is_expected.to include "This pact has previously been successfully verified by a version of Bar from branch 'main'. If this verification fails, it will fail the build."}
end
end
context "when the pact is pending but not wip" do
let(:pending) { true }
context "when there are no non_pending_provider_tags or a provider_branch" do
its(:pending_reason) { is_expected.to include "This pact is in pending state for this version of Bar because a successful verification result for Bar has not yet been published. If this verification fails, it will not cause the overall build to fail." }
end
context "when there is a provider_branch" do
let(:provider_branch) { "main" }
its(:pending_reason) { is_expected.to include "This pact is in pending state for this version of Bar because a successful verification result for a version of Bar from branch 'main' has not yet been published. If this verification fails, it will not cause the overall build to fail." }
end
context "when there is 1 pending_provider_tag" do
let(:pending_provider_tags) { %w[dev] }
its(:pending_reason) { is_expected.to include "This pact is in pending state for this version of Bar because a successful verification result for a version of Bar with tag 'dev' has not yet been published. If this verification fails, it will not cause the overall build to fail." }
end
context "when there are 2 pending_provider_tags" do
let(:pending_provider_tags) { %w[dev feat-x] }
its(:pending_reason) { is_expected.to include "This pact is in pending state for this version of Bar because a successful verification result for a versions of Bar with tag 'dev' and 'feat-x' has not yet been published. If this verification fails, it will not cause the overall build to fail." }
end
context "when there are 3 pending_provider_tags" do
let(:pending_provider_tags) { %w[dev feat-x feat-y] }
its(:pending_reason) { is_expected.to include "'dev', 'feat-x' and 'feat-y'" }
end
end
context "when the pact is pending and is wip" do
let(:pending) { true }
let(:wip) { true }
context "when there are no non_pending_provider_tags or a provider_branch" do
its(:pending_reason) { is_expected.to include "This pact is in pending state for this version of Bar because it was included as a 'work in progress' pact. If this verification fails, it will not cause the overall build to fail." }
end
context "when there is a provider_branch" do
let(:provider_branch) { "main" }
its(:pending_reason) { is_expected.to include "This pact is in pending state for this version of Bar because it was included as a 'work in progress' pact. If this verification fails, it will not cause the overall build to fail." }
end
context "when there is 1 pending_provider_tag" do
let(:pending_provider_tags) { %w[dev] }
its(:pending_reason) { is_expected.to include "This pact is in pending state for this version of Bar because it was included as a 'work in progress' pact. If this verification fails, it will not cause the overall build to fail." }
end
context "when there are 2 pending_provider_tags" do
let(:pending_provider_tags) { %w[dev feat-x] }
its(:pending_reason) { is_expected.to include "This pact is in pending state for this version of Bar because it was included as a 'work in progress' pact. If this verification fails, it will not cause the overall build to fail." }
end
end
end
end
end
end
| 59.703571 | 432 | 0.687085 |
bb68484f43728e9298f2d3fb3ba60e3fe28c5def | 3,404 | class Job
module SdmExtension
def process_new_sdm
Rails.logger.info("I'm about to process sdm job #{self.id}")
# sdm colour: #31B369
# the coordinates may be stored as a proper coordinate string or by
# four simple values supplied by simple users. Here one converts
# the latter into the former
user = User.find_by_email(self.email)
coordinates = self.parameters[:coords].gsub(/\r\n?/, "")
# create the args.txt needed by the compiled sdm job
outstring =
"\"coords\",\"#{coordinates}\",\n\
\"jobid\",\"#{self.id}\",\n\
\"jobdescription\",\"#{self.job_description}\",\n\
\"jobtitle\",\"#{self.job_name}\",\n\
\"jobsubmitter\",\"#{self.email}\",\n\
\"species\",\"#{self.parameters['species'].gsub(/[\n\r]+/,",")}\"\n"
# first, it is necessary to create the directory structure
tmpdir = "/tmp/#{self.id}"
begin
FileUtils.mkdir_p("#{tmpdir}/input/data/gbif")
File.open("#{tmpdir}/input/args.txt","w") {|f| f.write(outstring) }
if !self.infile.nil?
# this section writes out the species zip file...
File.open("#{tmpdir}/input/data/gbif/#{self.inname}","w") {|f| f.write(self.infile.read.force_encoding('UTF-8')) }
# ...extracts all contents...
out = `cd #{tmpdir}/input/data/gbif/ && unzip -j #{self.inname}`
# ...and removes the zip
File.unlink("#{tmpdir}/input/data/gbif/#{self.inname}")
end
rescue
self.jobfail("Could not write files for sdm job ","files not written")
return
end
# get the name of the server which is to analyse this job
choice = Account.hostcheck(self.type)
# then, create a zipfile and put it in the dropbox dir
dropdir = "#{Vibrant::Application.config.dropbox}/sdm"
begin
out = `cd /tmp && zip -r #{self.id}.#{choice}.zip #{self.id}`
FileUtils.mv "/tmp/#{self.id}.#{choice}.zip", "#{dropdir}"
system("chmod 777 #{dropdir}/#{self.id}.#{choice}.zip")
FileUtils.rm_r tmpdir
rescue
self.jobfail("Could not prepare zip for sdm job ","zip creation failure")
return
end
# then:
self.update_attributes(:status =>'in progress')
CheckingWorker.perform_at(15.minutes.from_now, self.id)
end
def check_progress_sdm
dropdir = "#{Vibrant::Application.config.dropbox}/sdm"
if File.exists?("#{dropdir}/#{self.id}.fail.zip")
self.jobfail("Server says no for ","server reports job failed")
self.outfile = File.open("#{dropdir}/#{self.id}.fail.zip")
self.save
File.unlink("#{dropdir}/#{self.id}.fail.zip")
return
elsif File.exists?("#{dropdir}/#{self.id}.done.zip")
begin
FileUtils.mv "#{dropdir}/#{self.id}.done.zip", "#{dropdir}/#{self.id}.zip"
self.outfile = File.open("#{dropdir}/#{self.id}.zip")
self.status = 'finished'
self.save
# notify of success
user = User.find_by_email(self.email)
UserMailer.job_ready(user,self).deliver
File.unlink("#{dropdir}/#{self.id}.zip")
rescue
self.jobfail("Zip handling failed for ","could not process outfile")
return
end
else
# still running
CheckingWorker.perform_at(15.minutes.from_now, self.id)
end
end
end
end
| 37 | 124 | 0.592244 |
bb608f16f7a90500919e0bbdffb1682767796830 | 776 | class Ecm::Youtube::Backend::CategoriesController < Itsf::Backend::Resource::BaseController
def self.resource_class
# Set the resource class here.
#
# Default: Ecm::Youtube::Category
#
Ecm::Youtube::Category
end
private
def collection_scope
# Customize the collection scope here for collection retrival (i.e. for the
# index action).
#
# Example: current_user.posts.includes(:comments)
#
# Default: resource_class
#
resource_class
end
def permitted_params
# Set the allowed params, for your create and update methods.
#
# Example: params
# .require(:category)
# .permit(:title, :body)
#
params
.require(:category)
.permit(:identifier)
end
end
| 22.171429 | 91 | 0.635309 |
ede25467bc5f2dcf1b155cfef1879820d1e0b54d | 2,434 | require 'csv'
namespace :reports do
namespace :shop do
# Following task will generate daily report with terminated benefit applications
# RAILS_ENV=production bundle exec rake reports:shop:benefit_application_terminated_list['termination_date']
# RAILS_ENV=production bundle exec rake reports:shop:benefit_application_terminated_list['02/01/2017']
desc "Report of Benefit Applications Terminated"
task :benefit_application_terminated_list, [:termination_date] => :environment do |task, args|
include Config::AcaHelper
window_date = Date.strptime(args[:termination_date], "%m/%d/%Y")
valid_states = [:terminated, :termination_pending]
terminated_sponsorships = BenefitSponsors::BenefitSponsorships::BenefitSponsorship.where({
"benefit_applications" => {
"$elemMatch" => {
"terminated_on" => window_date,
"aasm_state" => {"$in" => valid_states}
}
}
})
processed_count = 0
file_name = fetch_file_format('benefit_application_terminated_list', 'BENEFITAPPLICATIONTERMINATEDLIST')
field_names = [ "FEIN", "Legal Name", "DBA", "AASM State", "Plan Year Effective Date", "OE Close Date", "Termination reason", "Termination Kind"]
CSV.open(file_name, "w") do |csv|
csv << field_names
terminated_sponsorships.each do |terminated_sponsorship|
begin
employer_profile = terminated_sponsorship.profile
benefit_applications = terminated_sponsorship.benefit_applications.benefit_terminate_on(window_date)
benefit_applications.each do |benefit_application|
csv << [
employer_profile.fein,
employer_profile.legal_name,
employer_profile.dba,
benefit_application.aasm_state,
benefit_application.start_on.to_date.to_s,
benefit_application.open_enrollment_end_on.to_date.to_s,
benefit_application.termination_reason,
benefit_application.termination_kind
]
end
rescue Exception => e
"Exception #{e}"
end
processed_count += 1
end
end
puts "For #{window_date}, #{processed_count} benefit application terminations output to file: #{file_name}"
end
end
end
| 42.701754 | 154 | 0.643796 |
1acf3220a06ea741ed81c7a808f3657b440206b5 | 1,788 | require File.join(File.dirname(__FILE__), 'hao_de_generator')
module Merb::Generators
class HaoDeLayout < HaoDeGenerator
@@after_generation_messages = []
def self.source_root
File.join(File.dirname(__FILE__), 'templates', 'layout')
end
desc <<-DESC
Generates a hao de layout.
DESC
template :layout do |template|
template.source = "application.html.haml"
template.destination = "app/views/layout/application.html.haml"
end
template :menu do |template|
template.source = "_menu.html.haml"
template.destination = "app/views/layout/_menu.html.haml"
create_menu_items_helper
use_haml_template_engine
end
def use_haml_template_engine
# use_template_engine :haml
init_path = Merb.root + "/config/init.rb"
if File.exists?(init_path)
content = File.read(init_path)
if ((content =~ /use_template_engine\s+:haml/)==nil) and (content =~ /use_template_engine\s+:erb/)
content.gsub!(/use_template_engine\s+:erb/, 'use_template_engine :haml')
File.open(init_path, 'wb') { |file| file.write(content) }
add_message "now uses haml as a template engine"
end
end
end
def after_generation
if @@after_generation_messages.size > 0
@@after_generation_messages.each do |mess|
STDOUT << message(mess)
end
end
end
def add_message(mess)
@@after_generation_messages << mess
end
invoke :hao_de_javascripts do |generator|
generator.new(destination_root, options)
end
invoke :hao_de_stylesheet do |generator|
generator.new(destination_root, options)
end
end
add :hao_de_layout, HaoDeLayout
end | 27.9375 | 106 | 0.646532 |
ffe9f76ff3d51fe4d10b965956097cd9b29fbfdc | 8,575 | # WARNING ABOUT GENERATED CODE
#
# This file is generated. See the contributing guide for more information:
# https://github.com/aws/aws-sdk-ruby/blob/master/CONTRIBUTING.md
#
# WARNING ABOUT GENERATED CODE
module Aws::IAM
class RolePolicy
extend Aws::Deprecations
# @overload def initialize(role_name, name, options = {})
# @param [String] role_name
# @param [String] name
# @option options [Client] :client
# @overload def initialize(options = {})
# @option options [required, String] :role_name
# @option options [required, String] :name
# @option options [Client] :client
def initialize(*args)
options = Hash === args.last ? args.pop.dup : {}
@role_name = extract_role_name(args, options)
@name = extract_name(args, options)
@data = options.delete(:data)
@client = options.delete(:client) || Client.new(options)
@waiter_block_warned = false
end
# @!group Read-Only Attributes
# @return [String]
def role_name
@role_name
end
# @return [String]
def name
@name
end
alias :policy_name :name
# The policy document.
#
# IAM stores policies in JSON format. However, resources that were
# created using AWS CloudFormation templates can be formatted in YAML.
# AWS CloudFormation always converts a YAML policy to JSON format before
# submitting it to IAM.
# @return [String]
def policy_document
data[:policy_document]
end
# @!endgroup
# @return [Client]
def client
@client
end
# Loads, or reloads {#data} for the current {RolePolicy}.
# Returns `self` making it possible to chain methods.
#
# role_policy.reload.data
#
# @return [self]
def load
resp = @client.get_role_policy(
role_name: @role_name,
policy_name: @name
)
@data = resp.data
self
end
alias :reload :load
# @return [Types::GetRolePolicyResponse]
# Returns the data for this {RolePolicy}. Calls
# {Client#get_role_policy} if {#data_loaded?} is `false`.
def data
load unless @data
@data
end
# @return [Boolean]
# Returns `true` if this resource is loaded. Accessing attributes or
# {#data} on an unloaded resource will trigger a call to {#load}.
def data_loaded?
!!@data
end
# @deprecated Use [Aws::IAM::Client] #wait_until instead
#
# Waiter polls an API operation until a resource enters a desired
# state.
#
# @note The waiting operation is performed on a copy. The original resource remains unchanged
#
# ## Basic Usage
#
# Waiter will polls until it is successful, it fails by
# entering a terminal state, or until a maximum number of attempts
# are made.
#
# # polls in a loop until condition is true
# resource.wait_until(options) {|resource| condition}
#
# ## Example
#
# instance.wait_until(max_attempts:10, delay:5) {|instance| instance.state.name == 'running' }
#
# ## Configuration
#
# You can configure the maximum number of polling attempts, and the
# delay (in seconds) between each polling attempt. The waiting condition is set
# by passing a block to {#wait_until}:
#
# # poll for ~25 seconds
# resource.wait_until(max_attempts:5,delay:5) {|resource|...}
#
# ## Callbacks
#
# You can be notified before each polling attempt and before each
# delay. If you throw `:success` or `:failure` from these callbacks,
# it will terminate the waiter.
#
# started_at = Time.now
# # poll for 1 hour, instead of a number of attempts
# proc = Proc.new do |attempts, response|
# throw :failure if Time.now - started_at > 3600
# end
#
# # disable max attempts
# instance.wait_until(before_wait:proc, max_attempts:nil) {...}
#
# ## Handling Errors
#
# When a waiter is successful, it returns the Resource. When a waiter
# fails, it raises an error.
#
# begin
# resource.wait_until(...)
# rescue Aws::Waiters::Errors::WaiterFailed
# # resource did not enter the desired state in time
# end
#
#
# @yield param [Resource] resource to be used in the waiting condition
#
# @raise [Aws::Waiters::Errors::FailureStateError] Raised when the waiter terminates
# because the waiter has entered a state that it will not transition
# out of, preventing success.
#
# yet successful.
#
# @raise [Aws::Waiters::Errors::UnexpectedError] Raised when an error is encountered
# while polling for a resource that is not expected.
#
# @raise [NotImplementedError] Raised when the resource does not
#
# @option options [Integer] :max_attempts (10) Maximum number of
# attempts
# @option options [Integer] :delay (10) Delay between each
# attempt in seconds
# @option options [Proc] :before_attempt (nil) Callback
# invoked before each attempt
# @option options [Proc] :before_wait (nil) Callback
# invoked before each wait
# @return [Resource] if the waiter was successful
def wait_until(options = {}, &block)
self_copy = self.dup
attempts = 0
options[:max_attempts] = 10 unless options.key?(:max_attempts)
options[:delay] ||= 10
options[:poller] = Proc.new do
attempts += 1
if block.call(self_copy)
[:success, self_copy]
else
self_copy.reload unless attempts == options[:max_attempts]
:retry
end
end
Aws::Waiters::Waiter.new(options).wait({})
end
# @!group Actions
# @example Request syntax with placeholder values
#
# role_policy.delete()
# @param [Hash] options ({})
# @return [EmptyStructure]
def delete(options = {})
options = options.merge(
role_name: @role_name,
policy_name: @name
)
resp = @client.delete_role_policy(options)
resp.data
end
# @example Request syntax with placeholder values
#
# role_policy.put({
# policy_document: "policyDocumentType", # required
# })
# @param [Hash] options ({})
# @option options [required, String] :policy_document
# The policy document.
#
# You must provide policies in JSON format in IAM. However, for AWS
# CloudFormation templates formatted in YAML, you can provide the policy
# in JSON or YAML format. AWS CloudFormation always converts a YAML
# policy to JSON format before submitting it to IAM.
#
# The [regex pattern][1] used to validate this parameter is a string of
# characters consisting of the following:
#
# * Any printable ASCII character ranging from the space character
# (`\u0020`) through the end of the ASCII character range
#
# * The printable characters in the Basic Latin and Latin-1 Supplement
# character set (through `\u00FF`)
#
# * The special characters tab (`\u0009`), line feed (`\u000A`), and
# carriage return (`\u000D`)
#
#
#
# [1]: http://wikipedia.org/wiki/regex
# @return [EmptyStructure]
def put(options = {})
options = options.merge(
role_name: @role_name,
policy_name: @name
)
resp = @client.put_role_policy(options)
resp.data
end
# @!group Associations
# @return [Role]
def role
Role.new(
name: @role_name,
client: @client
)
end
# @deprecated
# @api private
def identifiers
{
role_name: @role_name,
name: @name
}
end
deprecated(:identifiers)
private
def extract_role_name(args, options)
value = args[0] || options.delete(:role_name)
case value
when String then value
when nil then raise ArgumentError, "missing required option :role_name"
else
msg = "expected :role_name to be a String, got #{value.class}"
raise ArgumentError, msg
end
end
def extract_name(args, options)
value = args[1] || options.delete(:name)
case value
when String then value
when nil then raise ArgumentError, "missing required option :name"
else
msg = "expected :name to be a String, got #{value.class}"
raise ArgumentError, msg
end
end
class Collection < Aws::Resources::Collection; end
end
end
| 29.568966 | 102 | 0.620758 |
b9f58f62fe5c92db9b87acee3fd210d56a6b13bf | 187 | def compose(f,g)
lambda {|x| f[g[x]]}
end
s = compose(Math.method(:sin), Math.method(:cos))
p s[0.5] # => 0.769196354841008
# verify
p Math.sin(Math.cos(0.5)) # => 0.769196354841008
| 20.777778 | 49 | 0.631016 |
ab7e334f08c85d2decbf364b395fa8a403178f4c | 476 | # frozen_string_literal: true
ENV["RAILS_ENV"] = "test"
begin
require "pry-byebug"
rescue LoadError
end
PROJECT_ROOT = File.expand_path("../", __dir__)
RSpec.configure do |config|
config.mock_with :rspec do |mocks|
mocks.verify_partial_doubles = true
end
config.example_status_persistence_file_path = "tmp/rspec_examples.txt"
config.filter_run :focus
config.run_all_when_everything_filtered = true
config.order = :random
Kernel.srand config.seed
end
| 19.833333 | 72 | 0.762605 |
acb38508fc06ea77198f29c05a6c8cee6fbc4a17 | 248 | require 'spec_helper'
describe Rgentpl::Application do
describe '#exec' do
it 'executes application' do
# Fix rake_taks bug
ARGV = []
expect { Rgentpl::Application.exec }.to output(/commands:/).to_stdout
end
end
end
| 19.076923 | 75 | 0.66129 |
abfb18b194504ddac926d36c823fde350690f55e | 1,182 | # frozen_string_literal: true
require 'rails_helper'
describe 'hunters/:id/hunter_backstories/new' do
let(:user) { create :user }
let(:hunter) { create :hunter, user: user, playbook: playbook }
let(:playbook) { create :playbook }
before :each do
sign_in user
end
subject { visit "/hunters/#{hunter.id}/hunter_backstories/new".dup }
it 'creates a hunter backstory' do
subject
expect(page).to have_content 'New Hunter'
expect(page).to have_select('hunter_backstory[hunter_id]',
selected: hunter.name)
expect(page).to have_select('hunter_backstory[playbook_id]',
selected: playbook.name)
fill_in 'hunter_backstory[choices]',
with: '[{ "name": "Fate", "choices": ["Some weirdo told you"]}]'
click_button 'Create Hunter backstory'
expect(page).to have_content('Hunter backstory was successfully created.')
expect(HunterBackstory.last).to have_attributes(
hunter_id: hunter.id,
playbook_id: playbook.id
)
end
it 'back leaves the page' do
subject
click_link 'Back'
expect(page).to have_content 'Hunter Backstories'
end
end
| 30.307692 | 78 | 0.662437 |
bb11dd85909950344f6b278632f53623b9148a8f | 1,242 | # frozen_string_literal: true
require_dependency "wf/application_controller"
module Wf
class FormsController < ApplicationController
breadcrumb "Forms", :forms_path
def index
@forms = Wf::Form.order("id DESC").page(params[:page])
end
def new
@form = Wf::Form.new
end
def edit
@form = Wf::Form.find(params[:id])
end
def show
@form = Wf::Form.find(params[:id])
end
def destroy
@form = Wf::Form.find(params[:id])
@form.destroy
respond_to do |format|
format.html { redirect_to forms_path, notice: "form was successfully deleted." }
format.js { render js: "window.location.reload();" }
end
end
def update
@form = Wf::Form.find(params[:id])
if @form.update(form_params)
redirect_to form_path(@form), notice: "form was successfully updated."
else
render :edit
end
end
def create
@form = Wf::Form.new(form_params)
if @form.save
redirect_to forms_path, notice: "form was successfully created."
else
render :new
end
end
private
def form_params
params.fetch(:form, {}).permit(:name, :description)
end
end
end
| 20.7 | 88 | 0.60467 |
e91ddee17632c94eebcdf2d9c799820889a572e8 | 2,191 | # frozen_string_literal: true
Gem::Specification.new do |s|
s.name = "rubygems-update"
s.version = "3.1.0.pre1"
s.authors = ["Jim Weirich", "Chad Fowler", "Eric Hodel", "Luis Lavena", "Aaron Patterson", "Samuel Giddins", "André Arko", "Evan Phoenix", "Hiroshi SHIBATA"]
s.email = ["", "", "[email protected]", "[email protected]", "[email protected]", "[email protected]", "[email protected]", "[email protected]", "[email protected]"]
s.summary = "RubyGems is a package management framework for Ruby."
s.description = "A package (also known as a library) contains a set of functionality
that can be invoked by a Ruby program, such as reading and parsing an XML file. We call
these packages 'gems' and RubyGems is a tool to install, create, manage and load these
packages in your Ruby environment. RubyGems is also a client for RubyGems.org, a public
repository of Gems that allows you to publish a Gem that can be shared and used by other
developers. See our guide on publishing a Gem at guides.rubygems.org"
s.homepage = "https://rubygems.org"
s.licenses = ["Ruby", "MIT"]
s.files = File.read('Manifest.txt').split
s.executables = ["update_rubygems"]
s.require_paths = ["hide_lib_for_update"]
s.rdoc_options = ["--main", "README.md", "--title=RubyGems Update Documentation"]
s.extra_rdoc_files = [
"History.txt", "LICENSE.txt", "MAINTAINERS.txt",
"MIT.txt", "Manifest.txt", "README.md",
"UPGRADING.md", "POLICIES.md", "CODE_OF_CONDUCT.md",
"CONTRIBUTING.md", "bundler/CHANGELOG.md", "bundler/CODE_OF_CONDUCT.md",
"bundler/LICENSE.md", "bundler/README.md",
"hide_lib_for_update/note.txt", *Dir["bundler/man/*.1"]
]
s.required_ruby_version = Gem::Requirement.new(">= 2.3.0")
s.required_rubygems_version = Gem::Requirement.new(">= 0")
s.specification_version = 4
s.add_development_dependency(%q<builder>, ["~> 3.0"])
s.add_development_dependency(%q<rdoc>, ["~> 6.0"])
s.add_development_dependency(%q<rake>, ["~> 12.0"])
s.add_development_dependency(%q<minitest>, ["~> 5.0"])
s.add_development_dependency(%q<simplecov>, ["< 0.18.0"])
s.add_development_dependency(%q<rubocop>, ["~> 0.74.0"])
end
| 49.795455 | 179 | 0.697398 |
1ded1a611a2f7cde668da5e2a89bf20efb6014a5 | 1,234 | # == Schema Information
#
# Table name: transactions
#
# id :integer not null, primary key
# description :text
# created_at :datetime not null
# updated_at :datetime not null
# audited :boolean
# type :string default("Transaction"), not null
# person_id :integer
# checkout_method_id :integer
# gross :decimal(, )
# items :decimal(, )
# fee :decimal(, ) default(0.0)
# processing_fee :decimal(, ) default(0.0)
# merch_fee :decimal(, ) default(0.0)
# liability :decimal(, ) default(0.0)
#
# Indexes
#
# index_transactions_on_checkout_method_id (checkout_method_id)
# index_transactions_on_fees (fee)
# index_transactions_on_gross (gross)
# index_transactions_on_items (items)
# index_transactions_on_liability (liability)
# index_transactions_on_merch_fee (merch_fee)
# index_transactions_on_person_id (person_id)
# index_transactions_on_processing_fee (processing_fee)
#
class Transaction::Payment::StripeCreditCard < Transaction::Order
end
| 36.294118 | 72 | 0.589951 |
28ae704ee0f7ed79c50a274ef3ccc8f7b07e52c9 | 9,660 | require "cgi"
# We abuse Homebrew's download strategies considerably here.
# * Our downloader instances only invoke the fetch and
# clear_cache methods, ignoring stage
# * Our overridden fetch methods are expected to return
# a value: the successfully downloaded file.
module Hbc
class AbstractDownloadStrategy
attr_reader :cask, :name, :url, :uri_object, :version
def initialize(cask, command = SystemCommand)
@cask = cask
@command = command
# TODO: this excess of attributes is a function of integrating
# with Homebrew's classes. Later we should be able to remove
# these in favor of @cask
@name = cask.token
@url = cask.url.to_s
@uri_object = cask.url
@version = cask.version
end
# All download strategies are expected to implement these methods
def fetch; end
def cached_location; end
def clear_cache; end
end
class HbVCSDownloadStrategy < AbstractDownloadStrategy
REF_TYPES = [:branch, :revision, :revisions, :tag].freeze
def initialize(cask, command = SystemCommand)
super
@ref_type, @ref = extract_ref
@clone = Hbc.cache.join(cache_filename)
end
def extract_ref
key = REF_TYPES.find do |type|
uri_object.respond_to?(type) && uri_object.send(type)
end
[key, key ? uri_object.send(key) : nil]
end
def cache_filename
"#{name}--#{cache_tag}"
end
def cache_tag
"__UNKNOWN__"
end
def cached_location
@clone
end
def clear_cache
cached_location.rmtree if cached_location.exist?
end
end
class CurlDownloadStrategy < AbstractDownloadStrategy
# TODO: should be part of url object
def mirrors
@mirrors ||= []
end
def tarball_path
@tarball_path ||= Hbc.cache.join("#{name}--#{version}#{ext}")
end
def temporary_path
@temporary_path ||= tarball_path.sub(/$/, ".incomplete")
end
def cached_location
tarball_path
end
def clear_cache
[cached_location, temporary_path].each do |path|
next unless path.exist?
begin
LockFile.new(path.basename).with_lock do
path.unlink
end
rescue OperationInProgressError
raise CurlDownloadStrategyError, "#{path} is in use by another process"
end
end
end
def downloaded_size
temporary_path.size? || 0
end
def _fetch
odebug "Calling curl with args #{cask_curl_args}"
curl(*cask_curl_args)
end
def fetch
ohai "Downloading #{@url}"
if tarball_path.exist?
puts "Already downloaded: #{tarball_path}"
else
had_incomplete_download = temporary_path.exist?
begin
LockFile.new(temporary_path.basename).with_lock do
_fetch
end
rescue ErrorDuringExecution
# 33 == range not supported
# try wiping the incomplete download and retrying once
if $CHILD_STATUS.exitstatus == 33 && had_incomplete_download
ohai "Trying a full download"
temporary_path.unlink
had_incomplete_download = false
retry
end
msg = @url
msg.concat("\nThe incomplete download is cached at #{temporary_path}") if temporary_path.exist?
raise CurlDownloadStrategyError, msg
end
ignore_interrupts { temporary_path.rename(tarball_path) }
end
tarball_path
rescue CurlDownloadStrategyError
raise if mirrors.empty?
puts "Trying a mirror..."
@url = mirrors.shift
retry
end
private
def cask_curl_args
default_curl_args.tap do |args|
args.concat(user_agent_args)
args.concat(cookies_args)
args.concat(referer_args)
end
end
def default_curl_args
[url, "-C", downloaded_size, "-o", temporary_path]
end
def user_agent_args
if uri_object.user_agent
["-A", uri_object.user_agent]
else
[]
end
end
def cookies_args
if uri_object.cookies
[
"-b",
# sort_by is for predictability between Ruby versions
uri_object
.cookies
.sort_by(&:to_s)
.map { |key, value| "#{CGI.escape(key.to_s)}=#{CGI.escape(value.to_s)}" }
.join(";"),
]
else
[]
end
end
def referer_args
if uri_object.referer
["-e", uri_object.referer]
else
[]
end
end
def ext
Pathname.new(@url).extname
end
end
class CurlPostDownloadStrategy < CurlDownloadStrategy
def cask_curl_args
super
default_curl_args.concat(post_args)
end
def post_args
if uri_object.data
# sort_by is for predictability between Ruby versions
uri_object
.data
.sort_by(&:to_s)
.map { |key, value| ["-d", "#{CGI.escape(key.to_s)}=#{CGI.escape(value.to_s)}"] }
.flatten
else
["-X", "POST"]
end
end
end
class SubversionDownloadStrategy < HbVCSDownloadStrategy
def cache_tag
# TODO: pass versions as symbols, support :head here
(version == "head") ? "svn-HEAD" : "svn"
end
def repo_valid?
(@clone/".svn").directory?
end
def repo_url
`svn info '#{@clone}' 2>/dev/null`.strip[/^URL: (.+)$/, 1]
end
# super does not provide checks for already-existing downloads
def fetch
if tarball_path.exist?
puts "Already downloaded: #{tarball_path}"
else
@url = @url.sub(/^svn\+/, "") if @url =~ %r{^svn\+http://}
ohai "Checking out #{@url}"
clear_cache unless @url.chomp("/") == repo_url || quiet_system("svn", "switch", @url, @clone)
if @clone.exist? && !repo_valid?
puts "Removing invalid SVN repo from cache"
clear_cache
end
case @ref_type
when :revision
fetch_repo @clone, @url, @ref
when :revisions
# nil is OK for main_revision, as fetch_repo will then get latest
main_revision = @ref[:trunk]
fetch_repo @clone, @url, main_revision, true
fetch_externals do |external_name, external_url|
fetch_repo @clone + external_name, external_url, @ref[external_name], true
end
else
fetch_repo @clone, @url
end
compress
end
tarball_path
end
# This primary reason for redefining this method is the trust_cert
# option, controllable from the Cask definition. We also force
# consistent timestamps. The rest of this method is similar to
# Homebrew's, but translated to local idiom.
def fetch_repo(target, url, revision = uri_object.revision, ignore_externals = false)
# Use "svn up" when the repository already exists locally.
# This saves on bandwidth and will have a similar effect to verifying the
# cache as it will make any changes to get the right revision.
svncommand = target.directory? ? "up" : "checkout"
args = [svncommand]
# SVN shipped with XCode 3.1.4 can't force a checkout.
args << "--force" unless MacOS.version == :leopard
# make timestamps consistent for checksumming
args.concat(%w[--config-option config:miscellany:use-commit-times=yes])
if uri_object.trust_cert
args << "--trust-server-cert"
args << "--non-interactive"
end
args << url unless target.directory?
args << target
args << "-r" << revision if revision
args << "--ignore-externals" if ignore_externals
@command.run!("/usr/bin/svn",
args: args,
print_stderr: false)
end
def tarball_path
@tarball_path ||= cached_location.dirname.join(cached_location.basename.to_s + "-#{@cask.version}.tar")
end
def shell_quote(str)
# Oh god escaping shell args.
# See http://notetoself.vrensk.com/2008/08/escaping-single-quotes-in-ruby-harder-than-expected/
str.gsub(/\\|'/) { |c| "\\#{c}" }
end
def fetch_externals
`svn propget svn:externals '#{shell_quote(@url)}'`.chomp.each_line do |line|
name, url = line.split(/\s+/)
yield name, url
end
end
private
# TODO/UPDATE: the tar approach explained below is fragile
# against challenges such as case-sensitive filesystems,
# and must be re-implemented.
#
# Seems nutty: we "download" the contents into a tape archive.
# Why?
# * A single file is tractable to the rest of the Cask toolchain,
# * An alternative would be to create a Directory container type.
# However, some type of file-serialization trick would still be
# needed in order to enable calculating a single checksum over
# a directory. So, in that alternative implementation, the
# special cases would propagate outside this class, including
# the use of tar or equivalent.
# * SubversionDownloadStrategy.cached_location is not versioned
# * tarball_path provides a needed return value for our overridden
# fetch method.
# * We can also take this private opportunity to strip files from
# the download which are protocol-specific.
def compress
Dir.chdir(cached_location) do
@command.run!("/usr/bin/tar",
args: ['-s/^\.//', "--exclude", ".svn", "-cf", Pathname.new(tarball_path), "--", "."],
print_stderr: false)
end
clear_cache
end
end
end
| 28.495575 | 116 | 0.614907 |
ed51dd176e18ed890c54d94d9d06d44264034700 | 3,707 | class Pgloader < Formula
desc "Data loading tool for PostgreSQL"
homepage "https://github.com/dimitri/pgloader"
url "https://github.com/dimitri/pgloader/releases/download/v3.6.2/pgloader-bundle-3.6.2.tgz"
sha256 "e35b8c2d3f28f3c497f7e0508281772705940b7ae789fa91f77c86c0afe116cb"
license "PostgreSQL"
revision 1
head "https://github.com/dimitri/pgloader.git"
bottle do
cellar :any_skip_relocation
sha256 "64cc2495286edb61be4a85b7a6eaaf96d94565a6392a2077c26bc6a4ec05ccfc" => :catalina
sha256 "8fc8d50d06ebaf09fe7a6759d93daaa26caabbbe67421a85c00ecb74c6474ba6" => :mojave
sha256 "8ac23995514d59190c2c9a93037beaea82ce336f3f9dec79c7530226b1ba1af7" => :high_sierra
end
depends_on "buildapp" => :build
depends_on "freetds"
depends_on "[email protected]"
depends_on "postgresql"
depends_on "sbcl"
# From https://github.com/dimitri/pgloader/issues/1218
# Fixes: "Compilation failed: Constant NIL conflicts with its asserted type FUNCTION."
patch :DATA
def install
system "make"
bin.install "bin/pgloader"
end
def launch_postgres(socket_dir)
require "timeout"
socket_dir = Pathname.new(socket_dir)
mkdir_p socket_dir
postgres_command = [
"postgres",
"--listen_addresses=",
"--unix_socket_directories=#{socket_dir}",
]
IO.popen(postgres_command * " ") do |postgres|
ohai postgres_command * " "
# Postgres won't create the socket until it's ready for connections, but
# if it fails to start, we'll be waiting for the socket forever. So we
# time out quickly; this is simpler than mucking with child process
# signals.
Timeout.timeout(5) { sleep 0.2 while socket_dir.children.empty? }
yield
ensure
Process.kill(:TERM, postgres.pid)
end
end
test do
return if ENV["CI"]
# Remove any Postgres environment variables that might prevent us from
# isolating this disposable copy of Postgres.
ENV.reject! { |key, _| key.start_with?("PG") }
ENV["PGDATA"] = testpath/"data"
ENV["PGHOST"] = testpath/"socket"
ENV["PGDATABASE"] = "brew"
(testpath/"test.load").write <<~EOS
LOAD CSV
FROM inline (code, country)
INTO postgresql:///#{ENV["PGDATABASE"]}?tablename=csv
WITH fields terminated by ','
BEFORE LOAD DO
$$ CREATE TABLE csv (code char(2), country text); $$;
GB,United Kingdom
US,United States
CA,Canada
US,United States
GB,United Kingdom
CA,Canada
EOS
system "initdb"
launch_postgres(ENV["PGHOST"]) do
system "createdb"
system "#{bin}/pgloader", testpath/"test.load"
output = shell_output("psql -Atc 'SELECT COUNT(*) FROM csv'")
assert_equal "6", output.lines.last.chomp
end
end
end
__END__
--- a/local-projects/cl-csv/parser.lisp
+++ b/local-projects/cl-csv/parser.lisp
@@ -31,12 +31,12 @@ See: csv-reader "))
(ignore-errors (format s "~S" (string (buffer o))))))
(defclass read-dispatch-table-entry ()
- ((delimiter :type (vector (or boolean character))
+ ((delimiter :type (or (vector (or boolean character)) null)
:accessor delimiter :initarg :delimiter :initform nil)
(didx :type fixnum :initform -1 :accessor didx :initarg :didx)
(dlen :type fixnum :initform 0 :accessor dlen :initarg :dlen)
(dlen-1 :type fixnum :initform -1 :accessor dlen-1 :initarg :dlen-1)
- (dispatch :type function :initform nil :accessor dispatch :initarg :dispatch)
+ (dispatch :type (or function null) :initform nil :accessor dispatch :initarg :dispatch)
)
(:documentation "When a certain delimiter is matched it will call a certain function
T matches anything
| 32.80531 | 94 | 0.68492 |
799d801a2e185a6aa8bf4d75d16d806a045122bf | 156 | require 'rails_helper'
module B1Admin
RSpec.describe Permission, :type => :model do
pending "add some examples to (or delete) #{__FILE__}"
end
end
| 19.5 | 58 | 0.717949 |
622f2e286d8bf7e27078839368ef4437235d3e42 | 2,076 | module Redistat
class Event
include Database
include Options
attr_reader :id
attr_reader :key
attr_accessor :stats
attr_accessor :meta
def default_options
{ :depth => :hour,
:store_event => false,
:connection_ref => nil,
:enable_grouping => true,
:label_indexing => true }
end
def initialize(scope, label = nil, date = nil, stats = {}, opts = {}, meta = {}, is_new = true)
parse_options(opts)
@key = Key.new(scope, label, date, @options)
@stats = stats ||= {}
@meta = meta ||= {}
@new = is_new
end
def new?
@new
end
def date
@key.date
end
def date=(input)
@key.date = input
end
def scope
@key.scope
end
def scope=(input)
@key.scope = input
end
def label
@key.label
end
def label_hash
@key.label_hash
end
def label=(input)
@key.label = input
end
def next_id
db.incr("#{self.scope}#{KEY_NEXT_ID}")
end
def save
return false if !self.new?
Summary.update_all(@key, @stats, depth_limit, @options)
if @options[:store_event]
@id = self.next_id
db.hmset("#{self.scope}#{KEY_EVENT}#{@id}",
"scope", self.scope,
"label", self.label,
"date", self.date.to_time.to_s,
"stats", self.stats.to_json,
"meta", self.meta.to_json,
"options", self.options.to_json)
db.sadd("#{self.scope}#{KEY_EVENT_IDS}", @id)
end
@new = false
self
end
def depth_limit
@options[:depth] ||= @key.depth
end
def self.create(*args)
self.new(*args).save
end
def self.find(scope, id)
event = db.hgetall "#{scope}#{KEY_EVENT}#{id}"
return nil if event.size == 0
self.new( event["scope"], event["label"], event["date"], JSON.parse(event["stats"]),
JSON.parse(event["options"]), JSON.parse(event["meta"]), false )
end
end
end
| 20.969697 | 99 | 0.535164 |
f876ef07aa79e8c4f8d189a8d1a92e1bbb917122 | 861 | require File.expand_path("../../Abstract/abstract-php-extension", __FILE__)
class Php71Imagick < AbstractPhp71Extension
init
desc "Provides a wrapper to the ImageMagick library."
homepage "https://pecl.php.net/package/imagick"
url "https://pecl.php.net/get/imagick-3.4.3.tgz"
sha256 "1f3c5b5eeaa02800ad22f506cd100e8889a66b2ec937e192eaaa30d74562567c"
head "https://github.com/mkoppanen/imagick.git"
revision 7
depends_on "pkg-config" => :build
depends_on "imagemagick6"
def install
Dir.chdir "imagick-#{version}" unless build.head?
safe_phpize
system "./configure", "--prefix=#{prefix}",
phpconfig,
"--with-imagick=#{Formula["imagemagick6"].opt_prefix}"
system "make"
prefix.install "modules/imagick.so"
write_config_file if build.with? "config-file"
end
end
| 30.75 | 80 | 0.689895 |
7a1adedefb3bbe2837d46cf8c249020b10e54e38 | 1,150 | # encoding: utf-8
require 'test_helper'
#require "webmock/test_unit"
class TestCampaignStats < Test::Unit::TestCase
context "Yandex Campaign Stats" do
setup do
set_sandbox_access
# webmock campaign stats
stub_request(:post, "https://api-sandbox.direct.yandex.ru/json-api/v4/").
with( :body => "{\"method\":\"GetSummaryStat\",\"locale\":\"uk\",\"login\":\"\",\"application_id\":\"\",\"token\":\"\",\"param\":{\"CampaignIDS\":[123451],\"StartDate\":\"2008-11-13\",\"EndDate\":\"2008-11-15\"}}",
:headers => {'Accept'=>'*/*', 'Content-Type'=>'application/json', 'User-Agent'=>'Ruby'}).
to_return(:status => 200, :body => load_fixture("yandex_campaign_stats.json"))
end
context "find" do
setup do
@campaign_stats = YandexApiDirect::CampaignStats.find campaign_ids: [123451], start_date: Date.new(2008,11,13), end_date: Date.new(2008,11,15)
end
should "have right attributes" do
assert_equal @campaign_stats.first.campaign_id, 123451
end
should "be for 3 days" do
assert_equal @campaign_stats.size, 3
end
end
end
end
| 31.944444 | 222 | 0.626087 |
5d383264d317c9ca58bad81733b2380f1a441d80 | 1,573 | =begin
#Selling Partner API for Merchant Fulfillment
#The Selling Partner API for Merchant Fulfillment helps you build applications that let sellers purchase shipping for non-Prime and Prime orders using Amazon’s Buy Shipping Services.
OpenAPI spec version: v0
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 2.4.26
=end
require 'spec_helper'
require 'json'
require 'date'
# Unit tests for AmzSpApi::MerchantFulfillmentApiModel::FileContents
# Automatically generated by swagger-codegen (github.com/swagger-api/swagger-codegen)
# Please update as you see appropriate
describe 'FileContents' do
before do
# run before each test
@instance = AmzSpApi::MerchantFulfillmentApiModel::FileContents.new
end
after do
# run after each test
end
describe 'test an instance of FileContents' do
it 'should create an instance of FileContents' do
expect(@instance).to be_instance_of(AmzSpApi::MerchantFulfillmentApiModel::FileContents)
end
end
describe 'test attribute "contents"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "file_type"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "checksum"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
end
| 29.12963 | 182 | 0.750795 |
28c0b6dfe7300d43a9e575073cf6453643da654f | 37 | module ArJdbc
VERSION = '51.1'
end
| 9.25 | 18 | 0.675676 |
33fc830fbb130c5f1082246b2dc30e2b17461b3d | 2,523 | require File.expand_path('../boot', __FILE__)
require "action_controller/railtie"
require "action_mailer/railtie"
require "active_resource/railtie"
require "sprockets/railtie"
if defined?(Bundler)
# If you precompile assets before deploying to production, use this line
Bundler.require *Rails.groups(:assets => %w(development test))
# If you want your assets lazily compiled in production, use this line
# Bundler.require(:default, :assets, Rails.env)
end
module Checklisthub
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
# config.autoload_paths += %W(#{config.root}/extras)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
# Enable the asset pipeline
config.assets.enabled = true
# Version of your assets, change this if you want to expire all your assets
config.assets.version = '1.0'
config.generators do |g|
g.template_engine :haml
g.test_framework :rspec, :fixture => true, :views => false
g.stylesheets false
g.fixture_replacement :factory_girl, :dir => 'spec/factories'
end
config.action_view.field_error_proc = Proc.new { |html_tag, instance| %Q(<div class="error">#{html_tag}</div>).html_safe }
end
end
| 40.693548 | 126 | 0.718985 |
e81cb380d618d52ad1f5f094b7fb1537b3752c06 | 151 | # Be sure to restart your server when you modify this file.
Rails.application.config.session_store :cookie_store, key: '_scientificprotocols_session'
| 37.75 | 89 | 0.821192 |
ff51c2cae98b1f2cee90632d991df240858d58c5 | 6,667 | require 'rbconfig'
# ruby 1.8.7 doesn't define RUBY_ENGINE
ruby_engine = defined?(RUBY_ENGINE) ? RUBY_ENGINE : 'ruby'
ruby_version = RbConfig::CONFIG["ruby_version"]
path = File.expand_path('..', __FILE__)
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/concurrent-ruby-1.1.7/lib/concurrent-ruby"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/i18n-1.8.5/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/minitest-5.14.2/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/thread_safe-0.3.6/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/tzinfo-1.2.7/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/zeitwerk-2.4.0/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/activesupport-6.0.3.4/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/ast-2.4.1/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/bindata-2.4.8/lib"
$:.unshift "#{path}/"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/extensions/universal-darwin-19/2.6.0/byebug-11.1.3"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/byebug-11.1.3/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/extensions/universal-darwin-19/2.6.0/json-2.3.1"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/json-2.3.1/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/docile-1.3.2/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/simplecov-html-0.12.3/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/simplecov-0.19.0/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/codecov-0.2.11/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/coderay-1.1.3/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/colorize-0.8.1/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/highline-2.0.3/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/commander-4.5.2/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/connection_pool-2.2.3/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/diff-lcs-1.4.4/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/extensions/universal-darwin-19/2.6.0/unf_ext-0.0.7.7"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/unf_ext-0.0.7.7/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/unf-0.1.4/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/domain_name-0.5.20190701/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/elftools-1.1.3/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/extensions/universal-darwin-19/2.6.0/hpricot-0.8.6"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/hpricot-0.8.6/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/http-cookie-1.0.3/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/mime-types-data-3.2020.0512/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/mime-types-3.3.1/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/net-http-digest_auth-1.4.1/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/net-http-persistent-4.0.0/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/mini_portile2-2.4.0/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/extensions/universal-darwin-19/2.6.0/nokogiri-1.10.10"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/nokogiri-1.10.10/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/ntlm-http-0.1.1/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/webrobots-0.1.2/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/mechanize-2.7.6/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/method_source-1.0.0/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/mustache-1.1.1/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/parallel-1.19.2/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/parallel_tests-3.3.0/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/parser-2.7.2.0/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/rainbow-3.0.0/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/sorbet-runtime-0.5.5942/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/parlour-4.0.1/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/patchelf-1.3.0/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/plist-3.5.0/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/pry-0.13.1/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/extensions/universal-darwin-19/2.6.0/rdiscount-2.2.0.2"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/rdiscount-2.2.0.2/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/regexp_parser-1.8.1/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/rexml-3.2.4/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/ronn-0.7.3/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/rspec-support-3.9.3/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/rspec-core-3.9.3/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/rspec-expectations-3.9.2/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/rspec-mocks-3.9.1/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/rspec-3.9.0/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/rspec-its-1.3.0/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/rspec-retry-0.6.2/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/rspec-wait-0.0.9/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/rubocop-ast-0.7.1/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/ruby-progressbar-1.10.1/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/unicode-display_width-1.7.0/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/rubocop-0.92.0/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/rubocop-performance-1.8.1/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/rubocop-rspec-1.43.2/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/ruby-macho-2.2.0/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/sorbet-static-0.5.5942-universal-darwin-19/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/sorbet-0.5.5942/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/thor-1.0.1/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/spoom-1.0.4/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/tapioca-0.4.7/lib"
| 79.369048 | 109 | 0.668517 |
389fe2a50b2d8626124bda169eeff1ef20072457 | 43,245 | # frozen_string_literal: true
# WARNING ABOUT GENERATED CODE
#
# This file is generated. See the contributing guide for more information:
# https://github.com/aws/aws-sdk-ruby/blob/master/CONTRIBUTING.md
#
# WARNING ABOUT GENERATED CODE
module Aws::Transfer
# @api private
module ClientApi
include Seahorse::Model
AccessDeniedException = Shapes::StructureShape.new(name: 'AccessDeniedException')
AddressAllocationId = Shapes::StringShape.new(name: 'AddressAllocationId')
AddressAllocationIds = Shapes::ListShape.new(name: 'AddressAllocationIds')
Arn = Shapes::StringShape.new(name: 'Arn')
Certificate = Shapes::StringShape.new(name: 'Certificate')
ConflictException = Shapes::StructureShape.new(name: 'ConflictException')
CreateServerRequest = Shapes::StructureShape.new(name: 'CreateServerRequest')
CreateServerResponse = Shapes::StructureShape.new(name: 'CreateServerResponse')
CreateUserRequest = Shapes::StructureShape.new(name: 'CreateUserRequest')
CreateUserResponse = Shapes::StructureShape.new(name: 'CreateUserResponse')
DateImported = Shapes::TimestampShape.new(name: 'DateImported')
DeleteServerRequest = Shapes::StructureShape.new(name: 'DeleteServerRequest')
DeleteSshPublicKeyRequest = Shapes::StructureShape.new(name: 'DeleteSshPublicKeyRequest')
DeleteUserRequest = Shapes::StructureShape.new(name: 'DeleteUserRequest')
DescribeServerRequest = Shapes::StructureShape.new(name: 'DescribeServerRequest')
DescribeServerResponse = Shapes::StructureShape.new(name: 'DescribeServerResponse')
DescribeUserRequest = Shapes::StructureShape.new(name: 'DescribeUserRequest')
DescribeUserResponse = Shapes::StructureShape.new(name: 'DescribeUserResponse')
DescribedServer = Shapes::StructureShape.new(name: 'DescribedServer')
DescribedUser = Shapes::StructureShape.new(name: 'DescribedUser')
EndpointDetails = Shapes::StructureShape.new(name: 'EndpointDetails')
EndpointType = Shapes::StringShape.new(name: 'EndpointType')
HomeDirectory = Shapes::StringShape.new(name: 'HomeDirectory')
HomeDirectoryMapEntry = Shapes::StructureShape.new(name: 'HomeDirectoryMapEntry')
HomeDirectoryMappings = Shapes::ListShape.new(name: 'HomeDirectoryMappings')
HomeDirectoryType = Shapes::StringShape.new(name: 'HomeDirectoryType')
HostKey = Shapes::StringShape.new(name: 'HostKey')
HostKeyFingerprint = Shapes::StringShape.new(name: 'HostKeyFingerprint')
IdentityProviderDetails = Shapes::StructureShape.new(name: 'IdentityProviderDetails')
IdentityProviderType = Shapes::StringShape.new(name: 'IdentityProviderType')
ImportSshPublicKeyRequest = Shapes::StructureShape.new(name: 'ImportSshPublicKeyRequest')
ImportSshPublicKeyResponse = Shapes::StructureShape.new(name: 'ImportSshPublicKeyResponse')
InternalServiceError = Shapes::StructureShape.new(name: 'InternalServiceError')
InvalidNextTokenException = Shapes::StructureShape.new(name: 'InvalidNextTokenException')
InvalidRequestException = Shapes::StructureShape.new(name: 'InvalidRequestException')
ListServersRequest = Shapes::StructureShape.new(name: 'ListServersRequest')
ListServersResponse = Shapes::StructureShape.new(name: 'ListServersResponse')
ListTagsForResourceRequest = Shapes::StructureShape.new(name: 'ListTagsForResourceRequest')
ListTagsForResourceResponse = Shapes::StructureShape.new(name: 'ListTagsForResourceResponse')
ListUsersRequest = Shapes::StructureShape.new(name: 'ListUsersRequest')
ListUsersResponse = Shapes::StructureShape.new(name: 'ListUsersResponse')
ListedServer = Shapes::StructureShape.new(name: 'ListedServer')
ListedServers = Shapes::ListShape.new(name: 'ListedServers')
ListedUser = Shapes::StructureShape.new(name: 'ListedUser')
ListedUsers = Shapes::ListShape.new(name: 'ListedUsers')
MapEntry = Shapes::StringShape.new(name: 'MapEntry')
MapTarget = Shapes::StringShape.new(name: 'MapTarget')
MaxResults = Shapes::IntegerShape.new(name: 'MaxResults')
Message = Shapes::StringShape.new(name: 'Message')
NextToken = Shapes::StringShape.new(name: 'NextToken')
NullableRole = Shapes::StringShape.new(name: 'NullableRole')
Policy = Shapes::StringShape.new(name: 'Policy')
Protocol = Shapes::StringShape.new(name: 'Protocol')
Protocols = Shapes::ListShape.new(name: 'Protocols')
Resource = Shapes::StringShape.new(name: 'Resource')
ResourceExistsException = Shapes::StructureShape.new(name: 'ResourceExistsException')
ResourceNotFoundException = Shapes::StructureShape.new(name: 'ResourceNotFoundException')
ResourceType = Shapes::StringShape.new(name: 'ResourceType')
Response = Shapes::StringShape.new(name: 'Response')
RetryAfterSeconds = Shapes::StringShape.new(name: 'RetryAfterSeconds')
Role = Shapes::StringShape.new(name: 'Role')
ServerId = Shapes::StringShape.new(name: 'ServerId')
ServiceErrorMessage = Shapes::StringShape.new(name: 'ServiceErrorMessage')
ServiceUnavailableException = Shapes::StructureShape.new(name: 'ServiceUnavailableException')
SourceIp = Shapes::StringShape.new(name: 'SourceIp')
SshPublicKey = Shapes::StructureShape.new(name: 'SshPublicKey')
SshPublicKeyBody = Shapes::StringShape.new(name: 'SshPublicKeyBody')
SshPublicKeyCount = Shapes::IntegerShape.new(name: 'SshPublicKeyCount')
SshPublicKeyId = Shapes::StringShape.new(name: 'SshPublicKeyId')
SshPublicKeys = Shapes::ListShape.new(name: 'SshPublicKeys')
StartServerRequest = Shapes::StructureShape.new(name: 'StartServerRequest')
State = Shapes::StringShape.new(name: 'State')
StatusCode = Shapes::IntegerShape.new(name: 'StatusCode')
StopServerRequest = Shapes::StructureShape.new(name: 'StopServerRequest')
SubnetId = Shapes::StringShape.new(name: 'SubnetId')
SubnetIds = Shapes::ListShape.new(name: 'SubnetIds')
Tag = Shapes::StructureShape.new(name: 'Tag')
TagKey = Shapes::StringShape.new(name: 'TagKey')
TagKeys = Shapes::ListShape.new(name: 'TagKeys')
TagResourceRequest = Shapes::StructureShape.new(name: 'TagResourceRequest')
TagValue = Shapes::StringShape.new(name: 'TagValue')
Tags = Shapes::ListShape.new(name: 'Tags')
TestIdentityProviderRequest = Shapes::StructureShape.new(name: 'TestIdentityProviderRequest')
TestIdentityProviderResponse = Shapes::StructureShape.new(name: 'TestIdentityProviderResponse')
ThrottlingException = Shapes::StructureShape.new(name: 'ThrottlingException')
UntagResourceRequest = Shapes::StructureShape.new(name: 'UntagResourceRequest')
UpdateServerRequest = Shapes::StructureShape.new(name: 'UpdateServerRequest')
UpdateServerResponse = Shapes::StructureShape.new(name: 'UpdateServerResponse')
UpdateUserRequest = Shapes::StructureShape.new(name: 'UpdateUserRequest')
UpdateUserResponse = Shapes::StructureShape.new(name: 'UpdateUserResponse')
Url = Shapes::StringShape.new(name: 'Url')
UserCount = Shapes::IntegerShape.new(name: 'UserCount')
UserName = Shapes::StringShape.new(name: 'UserName')
UserPassword = Shapes::StringShape.new(name: 'UserPassword')
VpcEndpointId = Shapes::StringShape.new(name: 'VpcEndpointId')
VpcId = Shapes::StringShape.new(name: 'VpcId')
AccessDeniedException.add_member(:message, Shapes::ShapeRef.new(shape: ServiceErrorMessage, location_name: "Message"))
AccessDeniedException.struct_class = Types::AccessDeniedException
AddressAllocationIds.member = Shapes::ShapeRef.new(shape: AddressAllocationId)
ConflictException.add_member(:message, Shapes::ShapeRef.new(shape: Message, required: true, location_name: "Message"))
ConflictException.struct_class = Types::ConflictException
CreateServerRequest.add_member(:certificate, Shapes::ShapeRef.new(shape: Certificate, location_name: "Certificate"))
CreateServerRequest.add_member(:endpoint_details, Shapes::ShapeRef.new(shape: EndpointDetails, location_name: "EndpointDetails"))
CreateServerRequest.add_member(:endpoint_type, Shapes::ShapeRef.new(shape: EndpointType, location_name: "EndpointType"))
CreateServerRequest.add_member(:host_key, Shapes::ShapeRef.new(shape: HostKey, location_name: "HostKey"))
CreateServerRequest.add_member(:identity_provider_details, Shapes::ShapeRef.new(shape: IdentityProviderDetails, location_name: "IdentityProviderDetails"))
CreateServerRequest.add_member(:identity_provider_type, Shapes::ShapeRef.new(shape: IdentityProviderType, location_name: "IdentityProviderType"))
CreateServerRequest.add_member(:logging_role, Shapes::ShapeRef.new(shape: Role, location_name: "LoggingRole"))
CreateServerRequest.add_member(:protocols, Shapes::ShapeRef.new(shape: Protocols, location_name: "Protocols"))
CreateServerRequest.add_member(:tags, Shapes::ShapeRef.new(shape: Tags, location_name: "Tags"))
CreateServerRequest.struct_class = Types::CreateServerRequest
CreateServerResponse.add_member(:server_id, Shapes::ShapeRef.new(shape: ServerId, required: true, location_name: "ServerId"))
CreateServerResponse.struct_class = Types::CreateServerResponse
CreateUserRequest.add_member(:home_directory, Shapes::ShapeRef.new(shape: HomeDirectory, location_name: "HomeDirectory"))
CreateUserRequest.add_member(:home_directory_type, Shapes::ShapeRef.new(shape: HomeDirectoryType, location_name: "HomeDirectoryType"))
CreateUserRequest.add_member(:home_directory_mappings, Shapes::ShapeRef.new(shape: HomeDirectoryMappings, location_name: "HomeDirectoryMappings"))
CreateUserRequest.add_member(:policy, Shapes::ShapeRef.new(shape: Policy, location_name: "Policy"))
CreateUserRequest.add_member(:role, Shapes::ShapeRef.new(shape: Role, required: true, location_name: "Role"))
CreateUserRequest.add_member(:server_id, Shapes::ShapeRef.new(shape: ServerId, required: true, location_name: "ServerId"))
CreateUserRequest.add_member(:ssh_public_key_body, Shapes::ShapeRef.new(shape: SshPublicKeyBody, location_name: "SshPublicKeyBody"))
CreateUserRequest.add_member(:tags, Shapes::ShapeRef.new(shape: Tags, location_name: "Tags"))
CreateUserRequest.add_member(:user_name, Shapes::ShapeRef.new(shape: UserName, required: true, location_name: "UserName"))
CreateUserRequest.struct_class = Types::CreateUserRequest
CreateUserResponse.add_member(:server_id, Shapes::ShapeRef.new(shape: ServerId, required: true, location_name: "ServerId"))
CreateUserResponse.add_member(:user_name, Shapes::ShapeRef.new(shape: UserName, required: true, location_name: "UserName"))
CreateUserResponse.struct_class = Types::CreateUserResponse
DeleteServerRequest.add_member(:server_id, Shapes::ShapeRef.new(shape: ServerId, required: true, location_name: "ServerId"))
DeleteServerRequest.struct_class = Types::DeleteServerRequest
DeleteSshPublicKeyRequest.add_member(:server_id, Shapes::ShapeRef.new(shape: ServerId, required: true, location_name: "ServerId"))
DeleteSshPublicKeyRequest.add_member(:ssh_public_key_id, Shapes::ShapeRef.new(shape: SshPublicKeyId, required: true, location_name: "SshPublicKeyId"))
DeleteSshPublicKeyRequest.add_member(:user_name, Shapes::ShapeRef.new(shape: UserName, required: true, location_name: "UserName"))
DeleteSshPublicKeyRequest.struct_class = Types::DeleteSshPublicKeyRequest
DeleteUserRequest.add_member(:server_id, Shapes::ShapeRef.new(shape: ServerId, required: true, location_name: "ServerId"))
DeleteUserRequest.add_member(:user_name, Shapes::ShapeRef.new(shape: UserName, required: true, location_name: "UserName"))
DeleteUserRequest.struct_class = Types::DeleteUserRequest
DescribeServerRequest.add_member(:server_id, Shapes::ShapeRef.new(shape: ServerId, required: true, location_name: "ServerId"))
DescribeServerRequest.struct_class = Types::DescribeServerRequest
DescribeServerResponse.add_member(:server, Shapes::ShapeRef.new(shape: DescribedServer, required: true, location_name: "Server"))
DescribeServerResponse.struct_class = Types::DescribeServerResponse
DescribeUserRequest.add_member(:server_id, Shapes::ShapeRef.new(shape: ServerId, required: true, location_name: "ServerId"))
DescribeUserRequest.add_member(:user_name, Shapes::ShapeRef.new(shape: UserName, required: true, location_name: "UserName"))
DescribeUserRequest.struct_class = Types::DescribeUserRequest
DescribeUserResponse.add_member(:server_id, Shapes::ShapeRef.new(shape: ServerId, required: true, location_name: "ServerId"))
DescribeUserResponse.add_member(:user, Shapes::ShapeRef.new(shape: DescribedUser, required: true, location_name: "User"))
DescribeUserResponse.struct_class = Types::DescribeUserResponse
DescribedServer.add_member(:arn, Shapes::ShapeRef.new(shape: Arn, required: true, location_name: "Arn"))
DescribedServer.add_member(:certificate, Shapes::ShapeRef.new(shape: Certificate, location_name: "Certificate"))
DescribedServer.add_member(:endpoint_details, Shapes::ShapeRef.new(shape: EndpointDetails, location_name: "EndpointDetails"))
DescribedServer.add_member(:endpoint_type, Shapes::ShapeRef.new(shape: EndpointType, location_name: "EndpointType"))
DescribedServer.add_member(:host_key_fingerprint, Shapes::ShapeRef.new(shape: HostKeyFingerprint, location_name: "HostKeyFingerprint"))
DescribedServer.add_member(:identity_provider_details, Shapes::ShapeRef.new(shape: IdentityProviderDetails, location_name: "IdentityProviderDetails"))
DescribedServer.add_member(:identity_provider_type, Shapes::ShapeRef.new(shape: IdentityProviderType, location_name: "IdentityProviderType"))
DescribedServer.add_member(:logging_role, Shapes::ShapeRef.new(shape: Role, location_name: "LoggingRole"))
DescribedServer.add_member(:protocols, Shapes::ShapeRef.new(shape: Protocols, location_name: "Protocols"))
DescribedServer.add_member(:server_id, Shapes::ShapeRef.new(shape: ServerId, location_name: "ServerId"))
DescribedServer.add_member(:state, Shapes::ShapeRef.new(shape: State, location_name: "State"))
DescribedServer.add_member(:tags, Shapes::ShapeRef.new(shape: Tags, location_name: "Tags"))
DescribedServer.add_member(:user_count, Shapes::ShapeRef.new(shape: UserCount, location_name: "UserCount"))
DescribedServer.struct_class = Types::DescribedServer
DescribedUser.add_member(:arn, Shapes::ShapeRef.new(shape: Arn, required: true, location_name: "Arn"))
DescribedUser.add_member(:home_directory, Shapes::ShapeRef.new(shape: HomeDirectory, location_name: "HomeDirectory"))
DescribedUser.add_member(:home_directory_mappings, Shapes::ShapeRef.new(shape: HomeDirectoryMappings, location_name: "HomeDirectoryMappings"))
DescribedUser.add_member(:home_directory_type, Shapes::ShapeRef.new(shape: HomeDirectoryType, location_name: "HomeDirectoryType"))
DescribedUser.add_member(:policy, Shapes::ShapeRef.new(shape: Policy, location_name: "Policy"))
DescribedUser.add_member(:role, Shapes::ShapeRef.new(shape: Role, location_name: "Role"))
DescribedUser.add_member(:ssh_public_keys, Shapes::ShapeRef.new(shape: SshPublicKeys, location_name: "SshPublicKeys"))
DescribedUser.add_member(:tags, Shapes::ShapeRef.new(shape: Tags, location_name: "Tags"))
DescribedUser.add_member(:user_name, Shapes::ShapeRef.new(shape: UserName, location_name: "UserName"))
DescribedUser.struct_class = Types::DescribedUser
EndpointDetails.add_member(:address_allocation_ids, Shapes::ShapeRef.new(shape: AddressAllocationIds, location_name: "AddressAllocationIds"))
EndpointDetails.add_member(:subnet_ids, Shapes::ShapeRef.new(shape: SubnetIds, location_name: "SubnetIds"))
EndpointDetails.add_member(:vpc_endpoint_id, Shapes::ShapeRef.new(shape: VpcEndpointId, location_name: "VpcEndpointId"))
EndpointDetails.add_member(:vpc_id, Shapes::ShapeRef.new(shape: VpcId, location_name: "VpcId"))
EndpointDetails.struct_class = Types::EndpointDetails
HomeDirectoryMapEntry.add_member(:entry, Shapes::ShapeRef.new(shape: MapEntry, required: true, location_name: "Entry"))
HomeDirectoryMapEntry.add_member(:target, Shapes::ShapeRef.new(shape: MapTarget, required: true, location_name: "Target"))
HomeDirectoryMapEntry.struct_class = Types::HomeDirectoryMapEntry
HomeDirectoryMappings.member = Shapes::ShapeRef.new(shape: HomeDirectoryMapEntry)
IdentityProviderDetails.add_member(:url, Shapes::ShapeRef.new(shape: Url, location_name: "Url"))
IdentityProviderDetails.add_member(:invocation_role, Shapes::ShapeRef.new(shape: Role, location_name: "InvocationRole"))
IdentityProviderDetails.struct_class = Types::IdentityProviderDetails
ImportSshPublicKeyRequest.add_member(:server_id, Shapes::ShapeRef.new(shape: ServerId, required: true, location_name: "ServerId"))
ImportSshPublicKeyRequest.add_member(:ssh_public_key_body, Shapes::ShapeRef.new(shape: SshPublicKeyBody, required: true, location_name: "SshPublicKeyBody"))
ImportSshPublicKeyRequest.add_member(:user_name, Shapes::ShapeRef.new(shape: UserName, required: true, location_name: "UserName"))
ImportSshPublicKeyRequest.struct_class = Types::ImportSshPublicKeyRequest
ImportSshPublicKeyResponse.add_member(:server_id, Shapes::ShapeRef.new(shape: ServerId, required: true, location_name: "ServerId"))
ImportSshPublicKeyResponse.add_member(:ssh_public_key_id, Shapes::ShapeRef.new(shape: SshPublicKeyId, required: true, location_name: "SshPublicKeyId"))
ImportSshPublicKeyResponse.add_member(:user_name, Shapes::ShapeRef.new(shape: UserName, required: true, location_name: "UserName"))
ImportSshPublicKeyResponse.struct_class = Types::ImportSshPublicKeyResponse
InternalServiceError.add_member(:message, Shapes::ShapeRef.new(shape: Message, required: true, location_name: "Message"))
InternalServiceError.struct_class = Types::InternalServiceError
InvalidNextTokenException.add_member(:message, Shapes::ShapeRef.new(shape: Message, required: true, location_name: "Message"))
InvalidNextTokenException.struct_class = Types::InvalidNextTokenException
InvalidRequestException.add_member(:message, Shapes::ShapeRef.new(shape: Message, required: true, location_name: "Message"))
InvalidRequestException.struct_class = Types::InvalidRequestException
ListServersRequest.add_member(:max_results, Shapes::ShapeRef.new(shape: MaxResults, location_name: "MaxResults"))
ListServersRequest.add_member(:next_token, Shapes::ShapeRef.new(shape: NextToken, location_name: "NextToken"))
ListServersRequest.struct_class = Types::ListServersRequest
ListServersResponse.add_member(:next_token, Shapes::ShapeRef.new(shape: NextToken, location_name: "NextToken"))
ListServersResponse.add_member(:servers, Shapes::ShapeRef.new(shape: ListedServers, required: true, location_name: "Servers"))
ListServersResponse.struct_class = Types::ListServersResponse
ListTagsForResourceRequest.add_member(:arn, Shapes::ShapeRef.new(shape: Arn, required: true, location_name: "Arn"))
ListTagsForResourceRequest.add_member(:max_results, Shapes::ShapeRef.new(shape: MaxResults, location_name: "MaxResults"))
ListTagsForResourceRequest.add_member(:next_token, Shapes::ShapeRef.new(shape: NextToken, location_name: "NextToken"))
ListTagsForResourceRequest.struct_class = Types::ListTagsForResourceRequest
ListTagsForResourceResponse.add_member(:arn, Shapes::ShapeRef.new(shape: Arn, location_name: "Arn"))
ListTagsForResourceResponse.add_member(:next_token, Shapes::ShapeRef.new(shape: NextToken, location_name: "NextToken"))
ListTagsForResourceResponse.add_member(:tags, Shapes::ShapeRef.new(shape: Tags, location_name: "Tags"))
ListTagsForResourceResponse.struct_class = Types::ListTagsForResourceResponse
ListUsersRequest.add_member(:max_results, Shapes::ShapeRef.new(shape: MaxResults, location_name: "MaxResults"))
ListUsersRequest.add_member(:next_token, Shapes::ShapeRef.new(shape: NextToken, location_name: "NextToken"))
ListUsersRequest.add_member(:server_id, Shapes::ShapeRef.new(shape: ServerId, required: true, location_name: "ServerId"))
ListUsersRequest.struct_class = Types::ListUsersRequest
ListUsersResponse.add_member(:next_token, Shapes::ShapeRef.new(shape: NextToken, location_name: "NextToken"))
ListUsersResponse.add_member(:server_id, Shapes::ShapeRef.new(shape: ServerId, required: true, location_name: "ServerId"))
ListUsersResponse.add_member(:users, Shapes::ShapeRef.new(shape: ListedUsers, required: true, location_name: "Users"))
ListUsersResponse.struct_class = Types::ListUsersResponse
ListedServer.add_member(:arn, Shapes::ShapeRef.new(shape: Arn, required: true, location_name: "Arn"))
ListedServer.add_member(:identity_provider_type, Shapes::ShapeRef.new(shape: IdentityProviderType, location_name: "IdentityProviderType"))
ListedServer.add_member(:endpoint_type, Shapes::ShapeRef.new(shape: EndpointType, location_name: "EndpointType"))
ListedServer.add_member(:logging_role, Shapes::ShapeRef.new(shape: Role, location_name: "LoggingRole"))
ListedServer.add_member(:server_id, Shapes::ShapeRef.new(shape: ServerId, location_name: "ServerId"))
ListedServer.add_member(:state, Shapes::ShapeRef.new(shape: State, location_name: "State"))
ListedServer.add_member(:user_count, Shapes::ShapeRef.new(shape: UserCount, location_name: "UserCount"))
ListedServer.struct_class = Types::ListedServer
ListedServers.member = Shapes::ShapeRef.new(shape: ListedServer)
ListedUser.add_member(:arn, Shapes::ShapeRef.new(shape: Arn, required: true, location_name: "Arn"))
ListedUser.add_member(:home_directory, Shapes::ShapeRef.new(shape: HomeDirectory, location_name: "HomeDirectory"))
ListedUser.add_member(:home_directory_type, Shapes::ShapeRef.new(shape: HomeDirectoryType, location_name: "HomeDirectoryType"))
ListedUser.add_member(:role, Shapes::ShapeRef.new(shape: Role, location_name: "Role"))
ListedUser.add_member(:ssh_public_key_count, Shapes::ShapeRef.new(shape: SshPublicKeyCount, location_name: "SshPublicKeyCount"))
ListedUser.add_member(:user_name, Shapes::ShapeRef.new(shape: UserName, location_name: "UserName"))
ListedUser.struct_class = Types::ListedUser
ListedUsers.member = Shapes::ShapeRef.new(shape: ListedUser)
Protocols.member = Shapes::ShapeRef.new(shape: Protocol)
ResourceExistsException.add_member(:message, Shapes::ShapeRef.new(shape: Message, required: true, location_name: "Message"))
ResourceExistsException.add_member(:resource, Shapes::ShapeRef.new(shape: Resource, required: true, location_name: "Resource"))
ResourceExistsException.add_member(:resource_type, Shapes::ShapeRef.new(shape: ResourceType, required: true, location_name: "ResourceType"))
ResourceExistsException.struct_class = Types::ResourceExistsException
ResourceNotFoundException.add_member(:message, Shapes::ShapeRef.new(shape: Message, required: true, location_name: "Message"))
ResourceNotFoundException.add_member(:resource, Shapes::ShapeRef.new(shape: Resource, required: true, location_name: "Resource"))
ResourceNotFoundException.add_member(:resource_type, Shapes::ShapeRef.new(shape: ResourceType, required: true, location_name: "ResourceType"))
ResourceNotFoundException.struct_class = Types::ResourceNotFoundException
ServiceUnavailableException.add_member(:message, Shapes::ShapeRef.new(shape: ServiceErrorMessage, location_name: "Message"))
ServiceUnavailableException.struct_class = Types::ServiceUnavailableException
SshPublicKey.add_member(:date_imported, Shapes::ShapeRef.new(shape: DateImported, required: true, location_name: "DateImported"))
SshPublicKey.add_member(:ssh_public_key_body, Shapes::ShapeRef.new(shape: SshPublicKeyBody, required: true, location_name: "SshPublicKeyBody"))
SshPublicKey.add_member(:ssh_public_key_id, Shapes::ShapeRef.new(shape: SshPublicKeyId, required: true, location_name: "SshPublicKeyId"))
SshPublicKey.struct_class = Types::SshPublicKey
SshPublicKeys.member = Shapes::ShapeRef.new(shape: SshPublicKey)
StartServerRequest.add_member(:server_id, Shapes::ShapeRef.new(shape: ServerId, required: true, location_name: "ServerId"))
StartServerRequest.struct_class = Types::StartServerRequest
StopServerRequest.add_member(:server_id, Shapes::ShapeRef.new(shape: ServerId, required: true, location_name: "ServerId"))
StopServerRequest.struct_class = Types::StopServerRequest
SubnetIds.member = Shapes::ShapeRef.new(shape: SubnetId)
Tag.add_member(:key, Shapes::ShapeRef.new(shape: TagKey, required: true, location_name: "Key"))
Tag.add_member(:value, Shapes::ShapeRef.new(shape: TagValue, required: true, location_name: "Value"))
Tag.struct_class = Types::Tag
TagKeys.member = Shapes::ShapeRef.new(shape: TagKey)
TagResourceRequest.add_member(:arn, Shapes::ShapeRef.new(shape: Arn, required: true, location_name: "Arn"))
TagResourceRequest.add_member(:tags, Shapes::ShapeRef.new(shape: Tags, required: true, location_name: "Tags"))
TagResourceRequest.struct_class = Types::TagResourceRequest
Tags.member = Shapes::ShapeRef.new(shape: Tag)
TestIdentityProviderRequest.add_member(:server_id, Shapes::ShapeRef.new(shape: ServerId, required: true, location_name: "ServerId"))
TestIdentityProviderRequest.add_member(:server_protocol, Shapes::ShapeRef.new(shape: Protocol, location_name: "ServerProtocol"))
TestIdentityProviderRequest.add_member(:source_ip, Shapes::ShapeRef.new(shape: SourceIp, location_name: "SourceIp"))
TestIdentityProviderRequest.add_member(:user_name, Shapes::ShapeRef.new(shape: UserName, required: true, location_name: "UserName"))
TestIdentityProviderRequest.add_member(:user_password, Shapes::ShapeRef.new(shape: UserPassword, location_name: "UserPassword"))
TestIdentityProviderRequest.struct_class = Types::TestIdentityProviderRequest
TestIdentityProviderResponse.add_member(:response, Shapes::ShapeRef.new(shape: Response, location_name: "Response"))
TestIdentityProviderResponse.add_member(:status_code, Shapes::ShapeRef.new(shape: StatusCode, required: true, location_name: "StatusCode"))
TestIdentityProviderResponse.add_member(:message, Shapes::ShapeRef.new(shape: Message, location_name: "Message"))
TestIdentityProviderResponse.add_member(:url, Shapes::ShapeRef.new(shape: Url, required: true, location_name: "Url"))
TestIdentityProviderResponse.struct_class = Types::TestIdentityProviderResponse
ThrottlingException.add_member(:retry_after_seconds, Shapes::ShapeRef.new(shape: RetryAfterSeconds, location_name: "RetryAfterSeconds"))
ThrottlingException.struct_class = Types::ThrottlingException
UntagResourceRequest.add_member(:arn, Shapes::ShapeRef.new(shape: Arn, required: true, location_name: "Arn"))
UntagResourceRequest.add_member(:tag_keys, Shapes::ShapeRef.new(shape: TagKeys, required: true, location_name: "TagKeys"))
UntagResourceRequest.struct_class = Types::UntagResourceRequest
UpdateServerRequest.add_member(:certificate, Shapes::ShapeRef.new(shape: Certificate, location_name: "Certificate"))
UpdateServerRequest.add_member(:endpoint_details, Shapes::ShapeRef.new(shape: EndpointDetails, location_name: "EndpointDetails"))
UpdateServerRequest.add_member(:endpoint_type, Shapes::ShapeRef.new(shape: EndpointType, location_name: "EndpointType"))
UpdateServerRequest.add_member(:host_key, Shapes::ShapeRef.new(shape: HostKey, location_name: "HostKey"))
UpdateServerRequest.add_member(:identity_provider_details, Shapes::ShapeRef.new(shape: IdentityProviderDetails, location_name: "IdentityProviderDetails"))
UpdateServerRequest.add_member(:logging_role, Shapes::ShapeRef.new(shape: NullableRole, location_name: "LoggingRole"))
UpdateServerRequest.add_member(:protocols, Shapes::ShapeRef.new(shape: Protocols, location_name: "Protocols"))
UpdateServerRequest.add_member(:server_id, Shapes::ShapeRef.new(shape: ServerId, required: true, location_name: "ServerId"))
UpdateServerRequest.struct_class = Types::UpdateServerRequest
UpdateServerResponse.add_member(:server_id, Shapes::ShapeRef.new(shape: ServerId, required: true, location_name: "ServerId"))
UpdateServerResponse.struct_class = Types::UpdateServerResponse
UpdateUserRequest.add_member(:home_directory, Shapes::ShapeRef.new(shape: HomeDirectory, location_name: "HomeDirectory"))
UpdateUserRequest.add_member(:home_directory_type, Shapes::ShapeRef.new(shape: HomeDirectoryType, location_name: "HomeDirectoryType"))
UpdateUserRequest.add_member(:home_directory_mappings, Shapes::ShapeRef.new(shape: HomeDirectoryMappings, location_name: "HomeDirectoryMappings"))
UpdateUserRequest.add_member(:policy, Shapes::ShapeRef.new(shape: Policy, location_name: "Policy"))
UpdateUserRequest.add_member(:role, Shapes::ShapeRef.new(shape: Role, location_name: "Role"))
UpdateUserRequest.add_member(:server_id, Shapes::ShapeRef.new(shape: ServerId, required: true, location_name: "ServerId"))
UpdateUserRequest.add_member(:user_name, Shapes::ShapeRef.new(shape: UserName, required: true, location_name: "UserName"))
UpdateUserRequest.struct_class = Types::UpdateUserRequest
UpdateUserResponse.add_member(:server_id, Shapes::ShapeRef.new(shape: ServerId, required: true, location_name: "ServerId"))
UpdateUserResponse.add_member(:user_name, Shapes::ShapeRef.new(shape: UserName, required: true, location_name: "UserName"))
UpdateUserResponse.struct_class = Types::UpdateUserResponse
# @api private
API = Seahorse::Model::Api.new.tap do |api|
api.version = "2018-11-05"
api.metadata = {
"apiVersion" => "2018-11-05",
"endpointPrefix" => "transfer",
"jsonVersion" => "1.1",
"protocol" => "json",
"serviceAbbreviation" => "AWS Transfer",
"serviceFullName" => "AWS Transfer Family",
"serviceId" => "Transfer",
"signatureVersion" => "v4",
"signingName" => "transfer",
"targetPrefix" => "TransferService",
"uid" => "transfer-2018-11-05",
}
api.add_operation(:create_server, Seahorse::Model::Operation.new.tap do |o|
o.name = "CreateServer"
o.http_method = "POST"
o.http_request_uri = "/"
o.input = Shapes::ShapeRef.new(shape: CreateServerRequest)
o.output = Shapes::ShapeRef.new(shape: CreateServerResponse)
o.errors << Shapes::ShapeRef.new(shape: AccessDeniedException)
o.errors << Shapes::ShapeRef.new(shape: ServiceUnavailableException)
o.errors << Shapes::ShapeRef.new(shape: InternalServiceError)
o.errors << Shapes::ShapeRef.new(shape: InvalidRequestException)
o.errors << Shapes::ShapeRef.new(shape: ResourceExistsException)
o.errors << Shapes::ShapeRef.new(shape: ThrottlingException)
end)
api.add_operation(:create_user, Seahorse::Model::Operation.new.tap do |o|
o.name = "CreateUser"
o.http_method = "POST"
o.http_request_uri = "/"
o.input = Shapes::ShapeRef.new(shape: CreateUserRequest)
o.output = Shapes::ShapeRef.new(shape: CreateUserResponse)
o.errors << Shapes::ShapeRef.new(shape: ServiceUnavailableException)
o.errors << Shapes::ShapeRef.new(shape: InternalServiceError)
o.errors << Shapes::ShapeRef.new(shape: InvalidRequestException)
o.errors << Shapes::ShapeRef.new(shape: ResourceExistsException)
o.errors << Shapes::ShapeRef.new(shape: ResourceNotFoundException)
end)
api.add_operation(:delete_server, Seahorse::Model::Operation.new.tap do |o|
o.name = "DeleteServer"
o.http_method = "POST"
o.http_request_uri = "/"
o.input = Shapes::ShapeRef.new(shape: DeleteServerRequest)
o.output = Shapes::ShapeRef.new(shape: Shapes::StructureShape.new(struct_class: Aws::EmptyStructure))
o.errors << Shapes::ShapeRef.new(shape: AccessDeniedException)
o.errors << Shapes::ShapeRef.new(shape: ServiceUnavailableException)
o.errors << Shapes::ShapeRef.new(shape: InternalServiceError)
o.errors << Shapes::ShapeRef.new(shape: InvalidRequestException)
o.errors << Shapes::ShapeRef.new(shape: ResourceNotFoundException)
end)
api.add_operation(:delete_ssh_public_key, Seahorse::Model::Operation.new.tap do |o|
o.name = "DeleteSshPublicKey"
o.http_method = "POST"
o.http_request_uri = "/"
o.input = Shapes::ShapeRef.new(shape: DeleteSshPublicKeyRequest)
o.output = Shapes::ShapeRef.new(shape: Shapes::StructureShape.new(struct_class: Aws::EmptyStructure))
o.errors << Shapes::ShapeRef.new(shape: ServiceUnavailableException)
o.errors << Shapes::ShapeRef.new(shape: InternalServiceError)
o.errors << Shapes::ShapeRef.new(shape: InvalidRequestException)
o.errors << Shapes::ShapeRef.new(shape: ResourceNotFoundException)
o.errors << Shapes::ShapeRef.new(shape: ThrottlingException)
end)
api.add_operation(:delete_user, Seahorse::Model::Operation.new.tap do |o|
o.name = "DeleteUser"
o.http_method = "POST"
o.http_request_uri = "/"
o.input = Shapes::ShapeRef.new(shape: DeleteUserRequest)
o.output = Shapes::ShapeRef.new(shape: Shapes::StructureShape.new(struct_class: Aws::EmptyStructure))
o.errors << Shapes::ShapeRef.new(shape: ServiceUnavailableException)
o.errors << Shapes::ShapeRef.new(shape: InternalServiceError)
o.errors << Shapes::ShapeRef.new(shape: InvalidRequestException)
o.errors << Shapes::ShapeRef.new(shape: ResourceNotFoundException)
end)
api.add_operation(:describe_server, Seahorse::Model::Operation.new.tap do |o|
o.name = "DescribeServer"
o.http_method = "POST"
o.http_request_uri = "/"
o.input = Shapes::ShapeRef.new(shape: DescribeServerRequest)
o.output = Shapes::ShapeRef.new(shape: DescribeServerResponse)
o.errors << Shapes::ShapeRef.new(shape: ServiceUnavailableException)
o.errors << Shapes::ShapeRef.new(shape: InternalServiceError)
o.errors << Shapes::ShapeRef.new(shape: InvalidRequestException)
o.errors << Shapes::ShapeRef.new(shape: ResourceNotFoundException)
end)
api.add_operation(:describe_user, Seahorse::Model::Operation.new.tap do |o|
o.name = "DescribeUser"
o.http_method = "POST"
o.http_request_uri = "/"
o.input = Shapes::ShapeRef.new(shape: DescribeUserRequest)
o.output = Shapes::ShapeRef.new(shape: DescribeUserResponse)
o.errors << Shapes::ShapeRef.new(shape: ServiceUnavailableException)
o.errors << Shapes::ShapeRef.new(shape: InternalServiceError)
o.errors << Shapes::ShapeRef.new(shape: InvalidRequestException)
o.errors << Shapes::ShapeRef.new(shape: ResourceNotFoundException)
end)
api.add_operation(:import_ssh_public_key, Seahorse::Model::Operation.new.tap do |o|
o.name = "ImportSshPublicKey"
o.http_method = "POST"
o.http_request_uri = "/"
o.input = Shapes::ShapeRef.new(shape: ImportSshPublicKeyRequest)
o.output = Shapes::ShapeRef.new(shape: ImportSshPublicKeyResponse)
o.errors << Shapes::ShapeRef.new(shape: ServiceUnavailableException)
o.errors << Shapes::ShapeRef.new(shape: InternalServiceError)
o.errors << Shapes::ShapeRef.new(shape: InvalidRequestException)
o.errors << Shapes::ShapeRef.new(shape: ResourceExistsException)
o.errors << Shapes::ShapeRef.new(shape: ResourceNotFoundException)
o.errors << Shapes::ShapeRef.new(shape: ThrottlingException)
end)
api.add_operation(:list_servers, Seahorse::Model::Operation.new.tap do |o|
o.name = "ListServers"
o.http_method = "POST"
o.http_request_uri = "/"
o.input = Shapes::ShapeRef.new(shape: ListServersRequest)
o.output = Shapes::ShapeRef.new(shape: ListServersResponse)
o.errors << Shapes::ShapeRef.new(shape: ServiceUnavailableException)
o.errors << Shapes::ShapeRef.new(shape: InternalServiceError)
o.errors << Shapes::ShapeRef.new(shape: InvalidNextTokenException)
o.errors << Shapes::ShapeRef.new(shape: InvalidRequestException)
o[:pager] = Aws::Pager.new(
limit_key: "max_results",
tokens: {
"next_token" => "next_token"
}
)
end)
api.add_operation(:list_tags_for_resource, Seahorse::Model::Operation.new.tap do |o|
o.name = "ListTagsForResource"
o.http_method = "POST"
o.http_request_uri = "/"
o.input = Shapes::ShapeRef.new(shape: ListTagsForResourceRequest)
o.output = Shapes::ShapeRef.new(shape: ListTagsForResourceResponse)
o.errors << Shapes::ShapeRef.new(shape: ServiceUnavailableException)
o.errors << Shapes::ShapeRef.new(shape: InternalServiceError)
o.errors << Shapes::ShapeRef.new(shape: InvalidNextTokenException)
o.errors << Shapes::ShapeRef.new(shape: InvalidRequestException)
o[:pager] = Aws::Pager.new(
limit_key: "max_results",
tokens: {
"next_token" => "next_token"
}
)
end)
api.add_operation(:list_users, Seahorse::Model::Operation.new.tap do |o|
o.name = "ListUsers"
o.http_method = "POST"
o.http_request_uri = "/"
o.input = Shapes::ShapeRef.new(shape: ListUsersRequest)
o.output = Shapes::ShapeRef.new(shape: ListUsersResponse)
o.errors << Shapes::ShapeRef.new(shape: ServiceUnavailableException)
o.errors << Shapes::ShapeRef.new(shape: InternalServiceError)
o.errors << Shapes::ShapeRef.new(shape: InvalidNextTokenException)
o.errors << Shapes::ShapeRef.new(shape: InvalidRequestException)
o.errors << Shapes::ShapeRef.new(shape: ResourceNotFoundException)
o[:pager] = Aws::Pager.new(
limit_key: "max_results",
tokens: {
"next_token" => "next_token"
}
)
end)
api.add_operation(:start_server, Seahorse::Model::Operation.new.tap do |o|
o.name = "StartServer"
o.http_method = "POST"
o.http_request_uri = "/"
o.input = Shapes::ShapeRef.new(shape: StartServerRequest)
o.output = Shapes::ShapeRef.new(shape: Shapes::StructureShape.new(struct_class: Aws::EmptyStructure))
o.errors << Shapes::ShapeRef.new(shape: ServiceUnavailableException)
o.errors << Shapes::ShapeRef.new(shape: InternalServiceError)
o.errors << Shapes::ShapeRef.new(shape: InvalidRequestException)
o.errors << Shapes::ShapeRef.new(shape: ResourceNotFoundException)
o.errors << Shapes::ShapeRef.new(shape: ThrottlingException)
end)
api.add_operation(:stop_server, Seahorse::Model::Operation.new.tap do |o|
o.name = "StopServer"
o.http_method = "POST"
o.http_request_uri = "/"
o.input = Shapes::ShapeRef.new(shape: StopServerRequest)
o.output = Shapes::ShapeRef.new(shape: Shapes::StructureShape.new(struct_class: Aws::EmptyStructure))
o.errors << Shapes::ShapeRef.new(shape: ServiceUnavailableException)
o.errors << Shapes::ShapeRef.new(shape: InternalServiceError)
o.errors << Shapes::ShapeRef.new(shape: InvalidRequestException)
o.errors << Shapes::ShapeRef.new(shape: ResourceNotFoundException)
o.errors << Shapes::ShapeRef.new(shape: ThrottlingException)
end)
api.add_operation(:tag_resource, Seahorse::Model::Operation.new.tap do |o|
o.name = "TagResource"
o.http_method = "POST"
o.http_request_uri = "/"
o.input = Shapes::ShapeRef.new(shape: TagResourceRequest)
o.output = Shapes::ShapeRef.new(shape: Shapes::StructureShape.new(struct_class: Aws::EmptyStructure))
o.errors << Shapes::ShapeRef.new(shape: ServiceUnavailableException)
o.errors << Shapes::ShapeRef.new(shape: InternalServiceError)
o.errors << Shapes::ShapeRef.new(shape: InvalidRequestException)
o.errors << Shapes::ShapeRef.new(shape: ResourceNotFoundException)
end)
api.add_operation(:test_identity_provider, Seahorse::Model::Operation.new.tap do |o|
o.name = "TestIdentityProvider"
o.http_method = "POST"
o.http_request_uri = "/"
o.input = Shapes::ShapeRef.new(shape: TestIdentityProviderRequest)
o.output = Shapes::ShapeRef.new(shape: TestIdentityProviderResponse)
o.errors << Shapes::ShapeRef.new(shape: ServiceUnavailableException)
o.errors << Shapes::ShapeRef.new(shape: InternalServiceError)
o.errors << Shapes::ShapeRef.new(shape: InvalidRequestException)
o.errors << Shapes::ShapeRef.new(shape: ResourceNotFoundException)
end)
api.add_operation(:untag_resource, Seahorse::Model::Operation.new.tap do |o|
o.name = "UntagResource"
o.http_method = "POST"
o.http_request_uri = "/"
o.input = Shapes::ShapeRef.new(shape: UntagResourceRequest)
o.output = Shapes::ShapeRef.new(shape: Shapes::StructureShape.new(struct_class: Aws::EmptyStructure))
o.errors << Shapes::ShapeRef.new(shape: ServiceUnavailableException)
o.errors << Shapes::ShapeRef.new(shape: InternalServiceError)
o.errors << Shapes::ShapeRef.new(shape: InvalidRequestException)
o.errors << Shapes::ShapeRef.new(shape: ResourceNotFoundException)
end)
api.add_operation(:update_server, Seahorse::Model::Operation.new.tap do |o|
o.name = "UpdateServer"
o.http_method = "POST"
o.http_request_uri = "/"
o.input = Shapes::ShapeRef.new(shape: UpdateServerRequest)
o.output = Shapes::ShapeRef.new(shape: UpdateServerResponse)
o.errors << Shapes::ShapeRef.new(shape: AccessDeniedException)
o.errors << Shapes::ShapeRef.new(shape: ServiceUnavailableException)
o.errors << Shapes::ShapeRef.new(shape: ConflictException)
o.errors << Shapes::ShapeRef.new(shape: InternalServiceError)
o.errors << Shapes::ShapeRef.new(shape: InvalidRequestException)
o.errors << Shapes::ShapeRef.new(shape: ResourceExistsException)
o.errors << Shapes::ShapeRef.new(shape: ResourceNotFoundException)
o.errors << Shapes::ShapeRef.new(shape: ThrottlingException)
end)
api.add_operation(:update_user, Seahorse::Model::Operation.new.tap do |o|
o.name = "UpdateUser"
o.http_method = "POST"
o.http_request_uri = "/"
o.input = Shapes::ShapeRef.new(shape: UpdateUserRequest)
o.output = Shapes::ShapeRef.new(shape: UpdateUserResponse)
o.errors << Shapes::ShapeRef.new(shape: ServiceUnavailableException)
o.errors << Shapes::ShapeRef.new(shape: InternalServiceError)
o.errors << Shapes::ShapeRef.new(shape: InvalidRequestException)
o.errors << Shapes::ShapeRef.new(shape: ResourceNotFoundException)
o.errors << Shapes::ShapeRef.new(shape: ThrottlingException)
end)
end
end
end
| 66.942724 | 160 | 0.749751 |
e2d4d6c756085ddd9be4a6f2e8883f1e30d70595 | 251 | class ManageIQ::Providers::EmbeddedAnsible::AutomationManager::RefreshWorker < MiqEmsRefreshWorker
require_nested :Runner
def self.ems_class
parent
end
def self.settings_name
:ems_refresh_worker_embedded_ansible_automation
end
end
| 20.916667 | 98 | 0.808765 |
1d649bb3a60d5f4b9c4eeee5d16165db2f441609 | 5,208 | ##
# This module requires Metasploit: http://metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
include Msf::Auxiliary::Report
class MetasploitModule < Msf::Post
def initialize(info={})
super(update_info(info,
'Name' => 'Multi Recon Local Exploit Suggester',
'Description' => %q{
This module suggests local meterpreter exploits that can be used. The
exploits are suggested based on the architecture and platform that
the user has a shell opened as well as the available exploits in
meterpreter.
It's important to note that not all local exploits will be fired.
Exploits are chosen based on these conditions: session type,
platform, architecture, and required default options.
},
'License' => MSF_LICENSE,
'Author' => [ 'sinn3r', 'Mo' ],
'Platform' => all_platforms,
'SessionTypes' => [ 'meterpreter', 'shell' ]
))
register_options([
Msf::OptInt.new('SESSION', [ true, "The session to run this module on." ]),
Msf::OptBool.new('SHOWDESCRIPTION', [true, "Displays a detailed description for the available exploits", false])
],Msf::Post)
end
def all_platforms
Msf::Module::Platform.subclasses.collect {|c| c.realname.downcase }
end
def is_module_arch?(mod)
mod_arch = mod.target.arch || mod.arch
mod_arch.include?(session.arch)
end
def is_module_options_ready?(mod)
mod.options.each_pair do |option_name, option|
if option.required && option.default.nil? && mod.datastore[option_name].blank?
return false
end
end
true
end
def is_module_platform?(mod)
platform_obj = nil
begin
platform_obj = Msf::Module::Platform.find_platform(session.platform)
rescue ArgumentError => e
# When not found, find_platform raises an ArgumentError
elog("#{e.class} #{e.message}\n#{e.backtrace * "\n"}")
return false
end
module_platforms = mod.target.platform ? mod.target.platform.platforms : mod.platform.platforms
module_platforms.include?(platform_obj)
end
def is_session_type_compat?(mod)
mod.session_compatible?(session.sid)
end
def set_module_options(mod)
self.datastore.each_pair do |k,v|
mod.datastore[k] = v
end
if !mod.datastore['SESSION'] && session.present?
mod.datastore['SESSION'] = session.sid
end
end
def is_module_wanted?(mod)
(
mod.kind_of?(Msf::Exploit::Local) &&
mod.respond_to?(:check) &&
is_session_type_compat?(mod) &&
is_module_platform?(mod) &&
is_module_arch?(mod) &&
is_module_options_ready?(mod)
)
end
def setup
print_status "Collecting local exploits for #{session.session_type}..."
# Initializes an array
@local_exploits = []
# Collects exploits into an array
framework.exploits.each do |name, obj|
mod = framework.exploits.create(name)
next unless mod
set_module_options(mod)
@local_exploits << mod if is_module_wanted?(mod)
end
end
def show_found_exploits
if datastore['VERBOSE']
print_status("The following #{@local_exploits.length} exploit checks are being tried:")
else
print_status("#{@local_exploits.length} exploit checks are being tried...")
end
@local_exploits.each do |x|
vprint_status(x.fullname)
end
end
def run
if @local_exploits.length < 1
print_error "No suggestions available."
return
end
show_found_exploits
results = []
@local_exploits.each do |m|
begin
checkcode = m.check
# See def is_check_interesting?
if is_check_interesting?(checkcode)
# Prints the full name and the checkcode message for the exploit
print_good("#{m.fullname}: #{checkcode.second}")
results << [m.fullname, checkcode.second]
# If the datastore option is true, a detailed description will show
if datastore['SHOWDESCRIPTION']
# Formatting for the description text
Rex::Text.wordwrap(Rex::Text.compress(m.description), 2, 70).split(/\n/).each do |line|
print_line line
end
end
else
vprint_status("#{m.fullname}: #{checkcode.second}")
end
rescue Rex::Post::Meterpreter::RequestError => e
# Creates a log record in framework.log
elog("#{e.class} #{e.message}\n#{e.backtrace * "\n"}")
vprint_error("#{e.class} #{m.shortname} failled to run: #{e.message}")
end
end
report_note(
:host => rhost,
:type => "local.suggested_exploits",
:data => results
)
end
def is_check_interesting?(checkcode)
[
Msf::Exploit::CheckCode::Vulnerable,
Msf::Exploit::CheckCode::Appears,
Msf::Exploit::CheckCode::Detected
].include?(checkcode)
end
def print_status(msg='')
super("#{session.session_host} - #{msg}")
end
def print_good(msg='')
super("#{session.session_host} - #{msg}")
end
def print_error(msg='')
super("#{session.session_host} - #{msg}")
end
end
| 27.410526 | 118 | 0.637289 |
bfee8a7f544b125b243d84304e7521ed17514e23 | 889 | # -*- coding: utf-8 -*-
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'marauder/marauder'
Gem::Specification.new do |s|
s.name = 'prism-marauder'
s.version = Marauder::VERSION
s.summary = "service locator based on prism"
s.description = "Command line tool to find services in Prism based on simple queries"
s.authors = ["Sébastien Cevey", "Simon Hildrew"]
s.email = '[email protected]'
s.homepage = 'https://github.com/guardian/prism/tree/master/marauder#readme'
s.license = 'GPL'
s.files = `git ls-files`.split($/).grep(%r{(bin|lib)/})
s.executables = s.files.grep(%r{^bin/}).map{ |f| File.basename(f) }
s.require_paths = ["lib"]
s.add_runtime_dependency "commander"
s.add_runtime_dependency "net-ssh"
s.add_runtime_dependency "httparty"
end
| 37.041667 | 89 | 0.652418 |
1c36cbe7afa4c76ac098835422779d0c16a305fe | 545 | cask 'editready' do
version '2.5.2'
sha256 'd1d7ee821943f9faf1a09ed6e2477848e76f4a452a9759ef48e946cc6535572b'
url "https://www.divergentmedia.com/fileRepository/EditReady%20#{version}.dmg"
appcast 'https://www.divergentmedia.com/autoupdater/editready/2_x'
name 'Divergent Media EditReady'
homepage 'https://www.divergentmedia.com/editready'
app 'EditReady.app'
zap trash: [
'~/Library/Application Support/EditReady',
'~/Library/Preferences/com.divergentmedia.EditReady.plist',
]
end
| 32.058824 | 80 | 0.717431 |
bbb83dd833ca2a42e022d1c388a4c16410ca2ec3 | 1,482 | require "#{File.dirname(__FILE__)}/../spec_helper"
describe Bonsai::Generate do
describe "generator" do
before :all do
@path = File.expand_path("spec/tmp-generator")
FileUtils.rm_rf @path
Bonsai::Generate.new(@path)
end
it "should create base directories" do
%w(content content/index templates public public/css public/js).each do |dir|
File.directory?("#{@path}/#{dir}").should be_true
end
end
it "should copy the htaccess file to public/.htaccess" do
File.exists?("#{@path}/public/.htaccess").should be_true
end
it "should copy the base.scss file" do
File.exists?("#{@path}/public/css/base.scss").should be_true
end
it "should copy an index page" do
File.exists?("#{@path}/content/index/default.yml").should be_true
end
it "should copy a default template" do
File.exists?("#{@path}/templates/default.liquid").should be_true
end
it "should create a robots.txt" do
File.exists?("#{@path}/public/robots.txt").should be_true
end
it "should create a site.yml" do
File.exists?("#{@path}/site.yml").should be_true
end
it "should generate 22 files" do
Dir.glob("#{@path}/**/*", File::FNM_DOTMATCH).select{|f| File.file?(f) }.size.should == 22
end
it "should generate 14 directories" do
Dir.glob("#{@path}/**/*").select{|f| File.directory?(f) }.size.should == 14
end
end
end | 30.875 | 96 | 0.622132 |
87498a75c3fe4403e50379b1aae5a56f5dfc60b9 | 838 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'icomoon_as_well/version'
Gem::Specification.new do |spec|
spec.name = "icomoon_as_well"
spec.version = IcomoonAsWell::VERSION
spec.authors = ["Alex"]
spec.email = ["[email protected]"]
spec.summary = %q{Icomoon unpacker}
spec.description = %q{Icomoon unpacker in a best way}
spec.homepage = "https://github.com/babakhanov1"
spec.license = "MIT"
spec.files = Dir['lib/**/*']
spec.bindir = "bin"
spec.executables = ["icomoon_as_well"]
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.10"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_dependency "rubyzip", "~> 1.1"
end
| 32.230769 | 57 | 0.642005 |
912b453ef5d5ecee3f36a1e4882086ee338ce275 | 2,130 | require 'test_helper'
require_relative '../lib/sandi_meter/method_arguments_counter'
describe SandiMeter::MethodArgumentsCounter do
let(:test_loader) { SandiMeter::ArgsLoader.new }
let(:analyzer) { SandiMeter::MethodArgumentsCounter.new }
context 'when variable/method arguments' do
let(:args_add_block_1) { load_args_block('blah arg1, arg2')}
let(:args_add_block_2) { load_args_block('blah(arg1, arg2)')}
it 'counts arguments' do
expect(analyzer.count(args_add_block_1)).to eq([2, 1])
expect(analyzer.count(args_add_block_2)).to eq([2, 1])
end
end
context 'when hash arguments' do
let(:args_add_block_1) { load_args_block('blah k: :v') }
let(:args_add_block_2) { load_args_block('blah(k: :v)') }
let(:args_add_block_3) { load_args_block('blah k1: :v1, k2: :v2') }
let(:args_add_block_4) { load_args_block('blah(k1: :v1, k2: :v2)') }
it 'counts arguments' do
expect(analyzer.count(args_add_block_1)).to eq([1, 1])
expect(analyzer.count(args_add_block_2)).to eq([1, 1])
expect(analyzer.count(args_add_block_3)).to eq([2, 1])
expect(analyzer.count(args_add_block_4)).to eq([2, 1])
end
end
context 'when variable/method with hash' do
let(:code_1) { load_args_block('blah arg_1, arg_2, k: :v') }
let(:code_2) { load_args_block('blah(arg_1, arg_2, k: :v)') }
let(:code_3) { load_args_block('blah arg_1, arg_2, k1: :v1, k2: :v2') }
let(:code_4) { load_args_block('blah(arg_1, arg_2, k1: :v1, k2: :v2)') }
it 'counts arguments' do
expect(analyzer.count(code_1)).to eq([3, 1])
expect(analyzer.count(code_2)).to eq([3, 1])
end
it 'counts hash keys as argumets' do
expect(analyzer.count(code_3)).to eq([4, 1])
expect(analyzer.count(code_4)).to eq([4, 1])
end
end
context 'when argument with default value' do
let(:code_1) { load_args_block('blah arg_1 = "blah"') }
let(:code_2) { load_args_block('blah(arg_1 = "blah")') }
it 'counts arguments' do
expect(analyzer.count(code_1)).to eq([1, 1])
expect(analyzer.count(code_2)).to eq([1, 1])
end
end
end
| 35.5 | 76 | 0.660563 |
6a6189c8babc1080f99880f979c6948f8806bd05 | 851 | name "tftp"
maintainer "Opscode, Inc."
maintainer_email "[email protected]"
license "Apache 2.0"
description "Installs/Configures tftpd"
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version "1.2.0"
%w{ debian ubuntu fedora scientific centos redhat}.each do |os|
supports os
end
attribute "tftp/username",
:display_name => "tftp user",
:default => "tftp"
attribute "tftp/directory",
:display_name => "tftp directory",
:description => "Directory to be shared via tftp.",
:default => "/var/lib/tftpboot"
attribute "tftp/address",
:display_name => "tftp address",
:default => "0.0.0.0:69"
attribute "tftp/tftp_options",
:display_name => "tftp tftp_options",
:default => "--secure"
attribute "tftp/options",
:display_name => "tftp options",
:default => "-s"
| 25.787879 | 72 | 0.6651 |
ab243beea0fdd971e53806e3257219da7e51f6ec | 1,221 | cask "graphicconverter" do
version "11.6.1,5515"
sha256 "39b9b6c882c8df0fb0ca0e8108a400e8e58088c3c3b9bc5236c7ca30ce195320"
url "https://www.lemkesoft.info/files/graphicconverter/gc#{version.major}_build#{version.csv.second}.zip",
verified: "lemkesoft.info/"
name "GraphicConverter"
desc "For browsing, enhancing and converting images"
homepage "https://www.lemkesoft.de/en/products/graphicconverter/"
# TODO: Return to using the `Sparkle` strategy once all `item`s are passed
# into the `strategy` and we can omit items using the `beta` channel.
livecheck do
url "https://www.lemkesoft.info/sparkle/graphicconverter/graphicconverter#{version.major}.xml"
regex(/<title>Version\s+(\d+(?:\.\d+)+)\s+Build\s+(\d+)(?:<|\s+\()/i)
strategy :page_match do |page, regex|
page.scan(regex).map { |match| "#{match[0]},#{match[1]}" }
end
end
auto_updates true
app "GraphicConverter #{version.major}.app"
zap trash: [
"/Users/Shared/Library/Application Support/GraphicConverter",
"~/Library/Application Support/GraphicConverter",
"~/Library/Caches/com.lemkesoft.graphicconverter*",
"~/Library/Group Containers/*.com.lemkesoft.graphicconverter*.group",
]
end
| 38.15625 | 108 | 0.714169 |
389607134493f5aed696e5b5cafcda9837fb8b1e | 6,449 | require 'pathname'
Puppet::Type.newtype(:dsc_xexchmailboxserver) do
require Pathname.new(__FILE__).dirname + '../../' + 'puppet/type/base_dsc'
require Pathname.new(__FILE__).dirname + '../../puppet_x/puppetlabs/dsc_type_helpers'
@doc = %q{
The DSC xExchMailboxServer resource type.
Automatically generated from
'xExchange/DSCResources/MSFT_xExchMailboxServer/MSFT_xExchMailboxServer.schema.mof'
To learn more about PowerShell Desired State Configuration, please
visit https://technet.microsoft.com/en-us/library/dn249912.aspx.
For more information about built-in DSC Resources, please visit
https://technet.microsoft.com/en-us/library/dn249921.aspx.
For more information about xDsc Resources, please visit
https://github.com/PowerShell/DscResources.
}
validate do
fail('dsc_identity is a required attribute') if self[:dsc_identity].nil?
end
def dscmeta_resource_friendly_name; 'xExchMailboxServer' end
def dscmeta_resource_name; 'MSFT_xExchMailboxServer' end
def dscmeta_module_name; 'xExchange' end
def dscmeta_module_version; '1.6.0.0' end
newparam(:name, :namevar => true ) do
end
ensurable do
newvalue(:exists?) { provider.exists? }
newvalue(:present) { provider.create }
defaultto { :present }
end
# Name: Identity
# Type: string
# IsMandatory: True
# Values: None
newparam(:dsc_identity) do
def mof_type; 'string' end
def mof_is_embedded?; false end
desc "Identity - The Identity parameter specifies the Mailbox server that you want to modify."
isrequired
validate do |value|
unless value.kind_of?(String)
fail("Invalid value '#{value}'. Should be a string")
end
end
end
# Name: Credential
# Type: MSFT_Credential
# IsMandatory: False
# Values: None
newparam(:dsc_credential) do
def mof_type; 'MSFT_Credential' end
def mof_is_embedded?; true end
desc "Credential - Credentials used to establish a remote Powershell session to Exchange"
validate do |value|
unless value.kind_of?(Hash)
fail("Invalid value '#{value}'. Should be a hash")
end
PuppetX::Dsc::TypeHelpers.validate_MSFT_Credential("Credential", value)
end
end
# Name: DomainController
# Type: string
# IsMandatory: False
# Values: None
newparam(:dsc_domaincontroller) do
def mof_type; 'string' end
def mof_is_embedded?; false end
desc "DomainController - The DomainController parameter specifies the fully qualified domain name (FQDN) of the domain controller that writes this configuration change to Active Directory."
validate do |value|
unless value.kind_of?(String)
fail("Invalid value '#{value}'. Should be a string")
end
end
end
# Name: DatabaseCopyActivationDisabledAndMoveNow
# Type: boolean
# IsMandatory: False
# Values: None
newparam(:dsc_databasecopyactivationdisabledandmovenow) do
def mof_type; 'boolean' end
def mof_is_embedded?; false end
desc "DatabaseCopyActivationDisabledAndMoveNow - The DatabaseCopyActivationDisabledAndMoveNow parameter specifies whether to prevent databases from being mounted on this Mailbox server if there are other healthy copies of the databases on other Mailbox servers. It will also immediately move any mounted databases on the server to other servers if copies exist and are healthy."
validate do |value|
end
newvalues(true, false)
munge do |value|
PuppetX::Dsc::TypeHelpers.munge_boolean(value.to_s)
end
end
# Name: DatabaseCopyAutoActivationPolicy
# Type: string
# IsMandatory: False
# Values: ["Blocked", "IntrasiteOnly", "Unrestricted"]
newparam(:dsc_databasecopyautoactivationpolicy) do
def mof_type; 'string' end
def mof_is_embedded?; false end
desc "DatabaseCopyAutoActivationPolicy - The DatabaseCopyAutoActivationPolicy parameter specifies the type of automatic activation available for mailbox database copies on the specified Mailbox server. Valid values are Blocked, IntrasiteOnly, and Unrestricted. Valid values are Blocked, IntrasiteOnly, Unrestricted."
validate do |value|
unless value.kind_of?(String)
fail("Invalid value '#{value}'. Should be a string")
end
unless ['Blocked', 'blocked', 'IntrasiteOnly', 'intrasiteonly', 'Unrestricted', 'unrestricted'].include?(value)
fail("Invalid value '#{value}'. Valid values are Blocked, IntrasiteOnly, Unrestricted")
end
end
end
# Name: MaximumActiveDatabases
# Type: string
# IsMandatory: False
# Values: None
newparam(:dsc_maximumactivedatabases) do
def mof_type; 'string' end
def mof_is_embedded?; false end
desc "MaximumActiveDatabases - The MaximumActiveDatabases parameter specifies the number of databases that can be mounted on this Mailbox server. This parameter accepts numeric values."
validate do |value|
unless value.kind_of?(String)
fail("Invalid value '#{value}'. Should be a string")
end
end
end
# Name: MaximumPreferredActiveDatabases
# Type: string
# IsMandatory: False
# Values: None
newparam(:dsc_maximumpreferredactivedatabases) do
def mof_type; 'string' end
def mof_is_embedded?; false end
desc "MaximumPreferredActiveDatabases - The MaximumPreferredActiveDatabases parameter specifies a preferred maximum number of databases that a server should have. This value is different from the actual maximum, which is configured using the MaximumActiveDatabases parameter. The value of MaximumPreferredActiveDatabases is only honored during best copy and server selection, database and server switchovers, and when rebalancing the DAG."
validate do |value|
unless value.kind_of?(String)
fail("Invalid value '#{value}'. Should be a string")
end
end
end
def builddepends
pending_relations = super()
PuppetX::Dsc::TypeHelpers.ensure_reboot_relationship(self, pending_relations)
end
end
Puppet::Type.type(:dsc_xexchmailboxserver).provide :powershell, :parent => Puppet::Type.type(:base_dsc).provider(:powershell) do
confine :true => (Gem::Version.new(Facter.value(:powershell_version)) >= Gem::Version.new('5.0.10240.16384'))
defaultfor :operatingsystem => :windows
mk_resource_methods
end
| 39.084848 | 443 | 0.717476 |
914ae645b9f74abf2fab972a8adbb5c032ca8d35 | 102 | class Product < ApplicationRecord
has_many :line_items
has_many :orders, through: :line_items
end
| 20.4 | 40 | 0.794118 |
01085ec5aa89762e15bd645ea4c91fc816b86c0b | 1,049 | # frozen_string_literal: true
require 'text'
module OrgSelection
# This class provides a search mechanism for Orgs that looks at records in the
# the database along with any available external APIs
class OrgToHashService
class << self
# Convert an Identifiable Model over to hash results like:
# An Org with id = 123, name = "Foo (foo.org)",
# identifier (ROR) = "http://example.org/123"
# becomes:
# {
# id: "123",
# ror: "http://ror.org/123",
# name: "Foo (foo.org)",
# sort_name: "Foo"
# }
def to_hash(org:)
return {} unless org.present?
out = {
id: org.id,
name: org.name,
sort_name: OrgSelection::SearchService.name_without_alias(name: org.name)
}
# tack on any identifiers
org.identifiers.each do |id|
next unless id.identifier_scheme.present?
out[:"#{id.identifier_scheme.name.downcase}"] = id.value
end
out
end
end
end
end
| 26.897436 | 83 | 0.575786 |
87573529f182789276a9c1c3b760dd70865a4889 | 1,533 | # -*- encoding: utf-8 -*-
# stub: ipaddr 1.2.2 ruby lib
Gem::Specification.new do |s|
s.name = "ipaddr".freeze
s.version = "1.2.2"
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib".freeze]
s.authors = ["Akinori MUSHA".freeze, "Hajimu UMEMOTO".freeze]
s.bindir = "exe".freeze
s.date = "2020-04-01"
s.description = "IPAddr provides a set of methods to manipulate an IP address.\nBoth IPv4 and IPv6 are supported.\n".freeze
s.email = ["[email protected]".freeze, "[email protected]".freeze]
s.files = ["ipaddr.rb".freeze]
s.homepage = "https://github.com/ruby/ipaddr".freeze
s.licenses = ["BSD-2-Clause".freeze]
s.rubygems_version = "3.0.3".freeze
s.summary = "A class to manipulate an IP address in ruby".freeze
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<bundler>.freeze, ["~> 1.15"])
s.add_development_dependency(%q<rake>.freeze, ["~> 10.0"])
s.add_development_dependency(%q<test-unit>.freeze, [">= 0"])
else
s.add_dependency(%q<bundler>.freeze, ["~> 1.15"])
s.add_dependency(%q<rake>.freeze, ["~> 10.0"])
s.add_dependency(%q<test-unit>.freeze, [">= 0"])
end
else
s.add_dependency(%q<bundler>.freeze, ["~> 1.15"])
s.add_dependency(%q<rake>.freeze, ["~> 10.0"])
s.add_dependency(%q<test-unit>.freeze, [">= 0"])
end
end
| 39.307692 | 125 | 0.654925 |
1d52f77622243f7bdded00d0d4f401e7dde2e77d | 16,405 | require "active_record"
db_namespace = namespace :db do
desc "Set the environment value for the database"
task "environment:set" => [:environment, :load_config] do
ActiveRecord::InternalMetadata.create_table
ActiveRecord::InternalMetadata[:environment] = ActiveRecord::Migrator.current_environment
end
task check_protected_environments: [:environment, :load_config] do
ActiveRecord::Tasks::DatabaseTasks.check_protected_environments!
end
task :load_config do
ActiveRecord::Base.configurations = ActiveRecord::Tasks::DatabaseTasks.database_configuration || {}
ActiveRecord::Migrator.migrations_paths = ActiveRecord::Tasks::DatabaseTasks.migrations_paths
end
namespace :create do
task all: :load_config do
ActiveRecord::Tasks::DatabaseTasks.create_all
end
end
desc "Creates the database from DATABASE_URL or config/database.yml for the current RAILS_ENV (use db:create:all to create all databases in the config). Without RAILS_ENV or when RAILS_ENV is development, it defaults to creating the development and test databases."
task create: [:load_config] do
ActiveRecord::Tasks::DatabaseTasks.create_current
end
namespace :drop do
task all: [:load_config, :check_protected_environments] do
ActiveRecord::Tasks::DatabaseTasks.drop_all
end
end
desc "Drops the database from DATABASE_URL or config/database.yml for the current RAILS_ENV (use db:drop:all to drop all databases in the config). Without RAILS_ENV or when RAILS_ENV is development, it defaults to dropping the development and test databases."
task drop: [:load_config, :check_protected_environments] do
db_namespace["drop:_unsafe"].invoke
end
task "drop:_unsafe" => [:load_config] do
ActiveRecord::Tasks::DatabaseTasks.drop_current
end
namespace :purge do
task all: [:load_config, :check_protected_environments] do
ActiveRecord::Tasks::DatabaseTasks.purge_all
end
end
# desc "Empty the database from DATABASE_URL or config/database.yml for the current RAILS_ENV (use db:purge:all to purge all databases in the config). Without RAILS_ENV it defaults to purging the development and test databases."
task purge: [:load_config, :check_protected_environments] do
ActiveRecord::Tasks::DatabaseTasks.purge_current
end
desc "Migrate the database (options: VERSION=x, VERBOSE=false, SCOPE=blog)."
task migrate: [:environment, :load_config] do
ActiveRecord::Tasks::DatabaseTasks.migrate
db_namespace["_dump"].invoke
end
# IMPORTANT: This task won't dump the schema if ActiveRecord::Base.dump_schema_after_migration is set to false
task :_dump do
if ActiveRecord::Base.dump_schema_after_migration
case ActiveRecord::Base.schema_format
when :ruby then db_namespace["schema:dump"].invoke
when :sql then db_namespace["structure:dump"].invoke
else
raise "unknown schema format #{ActiveRecord::Base.schema_format}"
end
end
# Allow this task to be called as many times as required. An example is the
# migrate:redo task, which calls other two internally that depend on this one.
db_namespace["_dump"].reenable
end
namespace :migrate do
# desc 'Rollbacks the database one migration and re migrate up (options: STEP=x, VERSION=x).'
task redo: [:environment, :load_config] do
if ENV["VERSION"]
db_namespace["migrate:down"].invoke
db_namespace["migrate:up"].invoke
else
db_namespace["rollback"].invoke
db_namespace["migrate"].invoke
end
end
# desc 'Resets your database using your migrations for the current environment'
task reset: ["db:drop", "db:create", "db:migrate"]
# desc 'Runs the "up" for a given migration VERSION.'
task up: [:environment, :load_config] do
version = ENV["VERSION"] ? ENV["VERSION"].to_i : nil
raise "VERSION is required" unless version
ActiveRecord::Migrator.run(:up, ActiveRecord::Tasks::DatabaseTasks.migrations_paths, version)
db_namespace["_dump"].invoke
end
# desc 'Runs the "down" for a given migration VERSION.'
task down: [:environment, :load_config] do
version = ENV["VERSION"] ? ENV["VERSION"].to_i : nil
raise "VERSION is required - To go down one migration, run db:rollback" unless version
ActiveRecord::Migrator.run(:down, ActiveRecord::Tasks::DatabaseTasks.migrations_paths, version)
db_namespace["_dump"].invoke
end
desc "Display status of migrations"
task status: [:environment, :load_config] do
unless ActiveRecord::SchemaMigration.table_exists?
abort "Schema migrations table does not exist yet."
end
db_list = ActiveRecord::SchemaMigration.normalized_versions
file_list =
ActiveRecord::Tasks::DatabaseTasks.migrations_paths.flat_map do |path|
Dir.foreach(path).map do |file|
next unless ActiveRecord::Migrator.match_to_migration_filename?(file)
version, name, scope = ActiveRecord::Migrator.parse_migration_filename(file)
version = ActiveRecord::SchemaMigration.normalize_migration_number(version)
status = db_list.delete(version) ? "up" : "down"
[status, version, (name + scope).humanize]
end.compact
end
db_list.map! do |version|
["up", version, "********** NO FILE **********"]
end
# output
puts "\ndatabase: #{ActiveRecord::Base.connection_config[:database]}\n\n"
puts "#{'Status'.center(8)} #{'Migration ID'.ljust(14)} Migration Name"
puts "-" * 50
(db_list + file_list).sort_by { |_, version, _| version }.each do |status, version, name|
puts "#{status.center(8)} #{version.ljust(14)} #{name}"
end
puts
end
end
desc "Rolls the schema back to the previous version (specify steps w/ STEP=n)."
task rollback: [:environment, :load_config] do
step = ENV["STEP"] ? ENV["STEP"].to_i : 1
ActiveRecord::Migrator.rollback(ActiveRecord::Tasks::DatabaseTasks.migrations_paths, step)
db_namespace["_dump"].invoke
end
# desc 'Pushes the schema to the next version (specify steps w/ STEP=n).'
task forward: [:environment, :load_config] do
step = ENV["STEP"] ? ENV["STEP"].to_i : 1
ActiveRecord::Migrator.forward(ActiveRecord::Tasks::DatabaseTasks.migrations_paths, step)
db_namespace["_dump"].invoke
end
# desc 'Drops and recreates the database from db/schema.rb for the current environment and loads the seeds.'
task reset: [ "db:drop", "db:setup" ]
# desc "Retrieves the charset for the current environment's database"
task charset: [:environment, :load_config] do
puts ActiveRecord::Tasks::DatabaseTasks.charset_current
end
# desc "Retrieves the collation for the current environment's database"
task collation: [:environment, :load_config] do
begin
puts ActiveRecord::Tasks::DatabaseTasks.collation_current
rescue NoMethodError
$stderr.puts "Sorry, your database adapter is not supported yet. Feel free to submit a patch."
end
end
desc "Retrieves the current schema version number"
task version: [:environment, :load_config] do
puts "Current version: #{ActiveRecord::Migrator.current_version}"
end
# desc "Raises an error if there are pending migrations"
task abort_if_pending_migrations: [:environment, :load_config] do
pending_migrations = ActiveRecord::Migrator.open(ActiveRecord::Tasks::DatabaseTasks.migrations_paths).pending_migrations
if pending_migrations.any?
puts "You have #{pending_migrations.size} pending #{pending_migrations.size > 1 ? 'migrations:' : 'migration:'}"
pending_migrations.each do |pending_migration|
puts " %4d %s" % [pending_migration.version, pending_migration.name]
end
abort %{Run `rails db:migrate` to update your database then try again.}
end
end
desc "Creates the database, loads the schema, and initializes with the seed data (use db:reset to also drop the database first)"
task setup: ["db:schema:load_if_ruby", "db:structure:load_if_sql", :seed]
desc "Loads the seed data from db/seeds.rb"
task :seed do
db_namespace["abort_if_pending_migrations"].invoke
ActiveRecord::Tasks::DatabaseTasks.load_seed
end
namespace :fixtures do
desc "Loads fixtures into the current environment's database. Load specific fixtures using FIXTURES=x,y. Load from subdirectory in test/fixtures using FIXTURES_DIR=z. Specify an alternative path (eg. spec/fixtures) using FIXTURES_PATH=spec/fixtures."
task load: [:environment, :load_config] do
require "active_record/fixtures"
base_dir = ActiveRecord::Tasks::DatabaseTasks.fixtures_path
fixtures_dir = if ENV["FIXTURES_DIR"]
File.join base_dir, ENV["FIXTURES_DIR"]
else
base_dir
end
fixture_files = if ENV["FIXTURES"]
ENV["FIXTURES"].split(",")
else
# The use of String#[] here is to support namespaced fixtures.
Dir["#{fixtures_dir}/**/*.yml"].map {|f| f[(fixtures_dir.size + 1)..-5] }
end
ActiveRecord::FixtureSet.create_fixtures(fixtures_dir, fixture_files)
end
# desc "Search for a fixture given a LABEL or ID. Specify an alternative path (eg. spec/fixtures) using FIXTURES_PATH=spec/fixtures."
task identify: [:environment, :load_config] do
require "active_record/fixtures"
label, id = ENV["LABEL"], ENV["ID"]
raise "LABEL or ID required" if label.blank? && id.blank?
puts %Q(The fixture ID for "#{label}" is #{ActiveRecord::FixtureSet.identify(label)}.) if label
base_dir = ActiveRecord::Tasks::DatabaseTasks.fixtures_path
Dir["#{base_dir}/**/*.yml"].each do |file|
if data = YAML::load(ERB.new(IO.read(file)).result)
data.each_key do |key|
key_id = ActiveRecord::FixtureSet.identify(key)
if key == label || key_id == id.to_i
puts "#{file}: #{key} (#{key_id})"
end
end
end
end
end
end
namespace :schema do
desc "Creates a db/schema.rb file that is portable against any DB supported by Active Record"
task dump: [:environment, :load_config] do
require "active_record/schema_dumper"
filename = ENV["SCHEMA"] || File.join(ActiveRecord::Tasks::DatabaseTasks.db_dir, "schema.rb")
File.open(filename, "w:utf-8") do |file|
ActiveRecord::SchemaDumper.dump(ActiveRecord::Base.connection, file)
end
db_namespace["schema:dump"].reenable
end
desc "Loads a schema.rb file into the database"
task load: [:environment, :load_config, :check_protected_environments] do
ActiveRecord::Tasks::DatabaseTasks.load_schema_current(:ruby, ENV["SCHEMA"])
end
task load_if_ruby: ["db:create", :environment] do
db_namespace["schema:load"].invoke if ActiveRecord::Base.schema_format == :ruby
end
namespace :cache do
desc "Creates a db/schema_cache.dump file."
task dump: [:environment, :load_config] do
con = ActiveRecord::Base.connection
filename = File.join(ActiveRecord::Tasks::DatabaseTasks.db_dir, "schema_cache.dump")
con.schema_cache.clear!
con.data_sources.each { |table| con.schema_cache.add(table) }
open(filename, "wb") { |f| f.write(Marshal.dump(con.schema_cache)) }
end
desc "Clears a db/schema_cache.dump file."
task clear: [:environment, :load_config] do
filename = File.join(ActiveRecord::Tasks::DatabaseTasks.db_dir, "schema_cache.dump")
rm_f filename, verbose: false
end
end
end
namespace :structure do
desc "Dumps the database structure to db/structure.sql. Specify another file with SCHEMA=db/my_structure.sql"
task dump: [:environment, :load_config] do
filename = ENV["SCHEMA"] || File.join(ActiveRecord::Tasks::DatabaseTasks.db_dir, "structure.sql")
current_config = ActiveRecord::Tasks::DatabaseTasks.current_config
ActiveRecord::Tasks::DatabaseTasks.structure_dump(current_config, filename)
if ActiveRecord::Base.connection.supports_migrations? &&
ActiveRecord::SchemaMigration.table_exists?
File.open(filename, "a") do |f|
f.puts ActiveRecord::Base.connection.dump_schema_information
f.print "\n"
end
end
db_namespace["structure:dump"].reenable
end
desc "Recreates the databases from the structure.sql file"
task load: [:environment, :load_config, :check_protected_environments] do
ActiveRecord::Tasks::DatabaseTasks.load_schema_current(:sql, ENV["SCHEMA"])
end
task load_if_sql: ["db:create", :environment] do
db_namespace["structure:load"].invoke if ActiveRecord::Base.schema_format == :sql
end
end
namespace :test do
task :deprecated do
Rake.application.top_level_tasks.grep(/^db:test:/).each do |task|
$stderr.puts "WARNING: #{task} is deprecated. The Rails test helper now maintains " \
"your test schema automatically, see the release notes for details."
end
end
# desc "Recreate the test database from the current schema"
task load: %w(db:test:purge) do
case ActiveRecord::Base.schema_format
when :ruby
db_namespace["test:load_schema"].invoke
when :sql
db_namespace["test:load_structure"].invoke
end
end
# desc "Recreate the test database from an existent schema.rb file"
task load_schema: %w(db:test:purge) do
begin
should_reconnect = ActiveRecord::Base.connection_pool.active_connection?
ActiveRecord::Schema.verbose = false
ActiveRecord::Tasks::DatabaseTasks.load_schema ActiveRecord::Base.configurations["test"], :ruby, ENV["SCHEMA"]
ensure
if should_reconnect
ActiveRecord::Base.establish_connection(ActiveRecord::Base.configurations[ActiveRecord::Tasks::DatabaseTasks.env])
end
end
end
# desc "Recreate the test database from an existent structure.sql file"
task load_structure: %w(db:test:purge) do
ActiveRecord::Tasks::DatabaseTasks.load_schema ActiveRecord::Base.configurations["test"], :sql, ENV["SCHEMA"]
end
# desc "Recreate the test database from a fresh schema"
task clone: %w(db:test:deprecated environment) do
case ActiveRecord::Base.schema_format
when :ruby
db_namespace["test:clone_schema"].invoke
when :sql
db_namespace["test:clone_structure"].invoke
end
end
# desc "Recreate the test database from a fresh schema.rb file"
task clone_schema: %w(db:test:deprecated db:schema:dump db:test:load_schema)
# desc "Recreate the test database from a fresh structure.sql file"
task clone_structure: %w(db:test:deprecated db:structure:dump db:test:load_structure)
# desc "Empty the test database"
task purge: %w(environment load_config check_protected_environments) do
ActiveRecord::Tasks::DatabaseTasks.purge ActiveRecord::Base.configurations["test"]
end
# desc 'Load the test schema'
task prepare: %w(environment load_config) do
unless ActiveRecord::Base.configurations.blank?
db_namespace["test:load"].invoke
end
end
end
end
namespace :railties do
namespace :install do
# desc "Copies missing migrations from Railties (e.g. engines). You can specify Railties to use with FROM=railtie1,railtie2"
task migrations: :'db:load_config' do
to_load = ENV["FROM"].blank? ? :all : ENV["FROM"].split(",").map(&:strip)
railties = {}
Rails.application.migration_railties.each do |railtie|
next unless to_load == :all || to_load.include?(railtie.railtie_name)
if railtie.respond_to?(:paths) && (path = railtie.paths["db/migrate"].first)
railties[railtie.railtie_name] = path
end
end
on_skip = Proc.new do |name, migration|
puts "NOTE: Migration #{migration.basename} from #{name} has been skipped. Migration with the same name already exists."
end
on_copy = Proc.new do |name, migration|
puts "Copied migration #{migration.basename} from #{name}"
end
ActiveRecord::Migration.copy(ActiveRecord::Tasks::DatabaseTasks.migrations_paths.first, railties,
on_skip: on_skip, on_copy: on_copy)
end
end
end
| 40.208333 | 267 | 0.695215 |
393a8cd23cc55d1d6b29dad24066b2cc0bb2fdac | 989 | Rails3017::Application.configure do
# Settings specified here will take precedence over those in config/application.rb
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the webserver when you make code changes.
config.cache_classes = false
# Log error messages when you accidentally call methods on nil.
config.whiny_nils = true
# Show full error reports and disable caching
config.consider_all_requests_local = true
config.action_view.debug_rjs = true
config.action_controller.perform_caching = false
# Don't care if the mailer can't send
config.action_mailer.raise_delivery_errors = false
# Print deprecation notices to the Rails logger
config.active_support.deprecation = :log
# Only use best-standards-support built into browsers
config.action_dispatch.best_standards_support = :builtin
end
| 36.62963 | 84 | 0.773509 |
4aa627776cd0752514d5e2b78767e18056203887 | 976 | require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module ILike
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
end
end
| 40.666667 | 99 | 0.722336 |
e9077ff53b73d5539cc3ae93edce7c968e22e4f7 | 1,036 | # Copyright (c) 2015 Vault12, Inc.
# MIT License https://opensource.org/licenses/MIT
require 'test_helper'
class VerifySessionTest < ActionDispatch::IntegrationTest
test 'start session token flow' do
post '/start_session'
_fail_response :bad_request # wrong token
@client_token = RbNaCl::Random.random_bytes 32
_post '/start_session', @client_token
_success_response
@client_token = RbNaCl::Random.random_bytes 31
_post '/start_session', @client_token
_fail_response :unauthorized # wrong token
@client_token = RbNaCl::Random.random_bytes(32).to_b64
_post '/start_session', @client_token
_fail_response :unauthorized
_post '/start_session', 'hello vault12'
_fail_response :unauthorized
@client_token = RbNaCl::Random.random_bytes 32
_post '/start_session', @client_token
_success_response
lines = _check_body response.body
pkey = lines[0].from_b64 # first line is token, second difficulty
assert_not_empty pkey
assert_not_empty lines[1]
end
end
| 29.6 | 69 | 0.740347 |
335730971e340f393e00700328f08ac266db2cec | 820 | #
# Cookbook:: pozoledf-sample-app
# Spec:: default
#
# Copyright:: 2021, The Authors, All Rights Reserved.
require 'spec_helper'
describe 'pozoledf-sample-app::default' do
context 'When all attributes are default, on Ubuntu 20.04' do
# for a complete list of available platforms and versions see:
# https://github.com/chefspec/fauxhai/blob/master/PLATFORMS.md
platform 'ubuntu', '20.04'
it 'converges successfully' do
expect { chef_run }.to_not raise_error
end
end
context 'When all attributes are default, on CentOS 8' do
# for a complete list of available platforms and versions see:
# https://github.com/chefspec/fauxhai/blob/master/PLATFORMS.md
platform 'centos', '8'
it 'converges successfully' do
expect { chef_run }.to_not raise_error
end
end
end
| 27.333333 | 66 | 0.708537 |
61eaa00b9af2f72e6b8fbca6f7ed414628efd596 | 3,014 | require "rails_helper"
RSpec.describe CheckboxesAnswerPresenter do
describe "#response" do
it "returns all none nil values as an array" do
step = build(:checkbox_answers, response: ["Yes", "No", "Morning break", ""])
presenter = described_class.new(step)
expect(presenter.response).to eq(["Yes", "No", "Morning break"])
end
end
describe "#to_param" do
it "returns a hash of options" do
step = build(:checkbox_answers,
response: ["Yes", "No"],
skipped: false,
further_information: {
yes_further_information: "More yes info",
no_further_information: "More no info"
})
presenter = described_class.new(step)
expect(presenter.to_param).to eql({
response: ["Yes", "No"],
skipped: false,
concatenated_response: "Yes, No",
selected_answers: [
{
machine_value: :yes,
human_value: "Yes",
further_information: "More yes info"
},
{
machine_value: :no,
human_value: "No",
further_information: "More no info"
}
]
})
end
context "when there is no response" do
it "sets the response to an empty array" do
step = build(:checkbox_answers, response: [])
presenter = described_class.new(step)
expect(presenter.to_param).to include({response: []})
end
it "sets the concatenated_response to an array with an empty hash" do
step = build(:checkbox_answers, response: [])
presenter = described_class.new(step)
expect(presenter.to_param).to include({selected_answers: []})
end
it "sets the concatenated_response to a nil" do
step = build(:checkbox_answers, response: [])
presenter = described_class.new(step)
expect(presenter.to_param).to include({concatenated_response: nil})
end
end
context "when the answer is skipped" do
it "sets the skipped value to true" do
step = build(:checkbox_answers, skipped: true)
presenter = described_class.new(step)
expect(presenter.to_param).to include({skipped: true})
end
end
context "when the option includes special characters" do
it "the further_information is correctly returned" do
step = build(:checkbox_answers,
response: ["Other, please specify"],
further_information: {"other_please_specify_further_information": "Sinks and stuff"})
presenter = described_class.new(step)
expect(presenter.to_param).to eql({
response: ["Other, please specify"],
skipped: false,
concatenated_response: "Other, please specify",
selected_answers: [
{
machine_value: :other_please_specify,
human_value: "Other, please specify",
further_information: "Sinks and stuff"
}
]
})
end
end
end
end
| 32.408602 | 95 | 0.604512 |
03b5e9da3c958a25bfdbc44b9c813b7ca640bdd3 | 119 | class AddCountryToUsers < ActiveRecord::Migration[5.1]
def change
add_column :users, :country, :string
end
end
| 19.833333 | 54 | 0.739496 |
ab4290b43105e5517f719b17bd58583fa7c13d9d | 1,347 | require 'spec_helper'
describe "Identifiers" do
def generate_tags
system('ctags -R .')
end
def symbol_type(symbol, full_path = '')
symbol_data = "{'symbol': '#{symbol}', 'full_path': '#{full_path}'}"
vim.command("echo rustbucket#identifier#New(#{symbol_data}).Type()")
end
specify "resolving a basic symbol using tags" do
edit_file 'test.rs', <<~EOF
struct TestStruct {
foo: Bar,
}
enum TestEnum {
Foo,
Bar,
}
impl TestStruct {
fn test_fn_1() { }
pub fn test_fn_2(&self) { }
}
impl TestEnum {
pub(crate) fn test_fn_1(&self) { }
}
fn main() {
TestStruct::test_fn_1()
}
EOF
generate_tags
expect(symbol_type("TestStruct")).to eq 'struct'
expect(symbol_type("TestEnum")).to eq 'enum'
expect(symbol_type("test_fn_1")).to eq 'fn'
expect(symbol_type("test_fn_2")).to eq 'fn'
end
specify "resolving a symbol based on namespace" do
edit_file 'test.rs', <<~EOF
mod ns1 {
struct TestType { foo: Bar }
}
mod ns2 {
enum TestType { foo: Bar }
}
EOF
generate_tags
expect(symbol_type("TestType", "ns1::TestType")).to eq 'struct'
expect(symbol_type("TestType", "ns2::TestType")).to eq 'enum'
end
end
| 21.046875 | 72 | 0.570898 |
33f7c37b6ac66152cf71dd7f9d4ff4e73f0f3b50 | 1,691 | require "formula_installer"
require "development_tools"
require "messages"
module Homebrew
module_function
def reinstall_formula(f, build_from_source: false)
if f.opt_prefix.directory?
keg = Keg.new(f.opt_prefix.resolved_path)
keg_had_linked_opt = true
keg_was_linked = keg.linked?
backup keg
end
build_options = BuildOptions.new(Options.create(ARGV.flags_only), f.options)
options = build_options.used_options
options |= f.build.used_options
options &= f.options
fi = FormulaInstaller.new(f)
fi.options = options
fi.invalid_option_names = build_options.invalid_option_names
fi.build_bottle = ARGV.build_bottle? || (!f.bottled? && f.build.bottle?)
fi.interactive = ARGV.interactive?
fi.git = ARGV.git?
fi.link_keg ||= keg_was_linked if keg_had_linked_opt
fi.build_from_source = true if build_from_source
fi.prelude
oh1 "Reinstalling #{Formatter.identifier(f.full_name)} #{options.to_a.join " "}"
fi.install
fi.finish
rescue FormulaInstallationAlreadyAttemptedError
nil
rescue Exception # rubocop:disable Lint/RescueException
ignore_interrupts { restore_backup(keg, keg_was_linked) }
raise
else
backup_path(keg).rmtree if backup_path(keg).exist?
end
def backup(keg)
keg.unlink
keg.rename backup_path(keg)
end
def restore_backup(keg, keg_was_linked)
path = backup_path(keg)
return unless path.directory?
Pathname.new(keg).rmtree if keg.exist?
path.rename keg
keg.link if keg_was_linked
end
def backup_path(path)
Pathname.new "#{path}.reinstall"
end
end
| 26.421875 | 84 | 0.694855 |
1823f8a3ec82af6dca311a3ee6cbdd2ca54a4fc1 | 3,144 | class Qt5Base < Formula
desc "Qt5 Core Libraries"
homepage "http://qt-project.org/"
url "http://download.qt.io/official_releases/qt/5.10/5.10.0/submodules/qtbase-everywhere-src-5.10.0.tar.xz"
sha256 "fd5578cd320a13617c12cf2b19439386b203d6d45548e855f94e07be9829f762"
keg_only "Qt5 very picky about install locations, so keep it isolated"
# depends_on :xcode => :build if OS.mac?
# depends_on "pkg-config" => :build
unless OS.mac?
# depends_on "icu4c"
# depends_on "bxcppdev/bxtap/fontconfig"
# depends_on "freetype"
# depends_on "zlib"
end
conflicts_with "qt5", :because => "Core homebrew ships a complete Qt5 install"
# try submodules as resources
resource "qtsvg" do
url "http://download.qt.io/official_releases/qt/5.10/5.10.0/submodules/qtsvg-everywhere-src-5.10.0.tar.xz"
sha256 "4a2aa7cae70a3156846655422b9ed884d8b08b3707b95858e49c7cf9afe5e7b0"
end
def install
args = %W[
-verbose
-prefix #{prefix}
-release
-opensource -confirm-license
-system-zlib
-qt-libpng
-qt-libjpeg
-qt-freetype
-qt-pcre
-nomake tests
-nomake examples
-pkg-config
-no-avx
-no-avx2
-c++std c++14
]
if OS.linux?
# Minimizes X11 dependencies
# See
# https://github.com/Linuxbrew/homebrew-core/pull/1062
args << "-qt-xcb"
# Ensure GUI can display fonts, fontconfig option
# must be used with system-freetype. Dependence on
# brewed fontconfig on Linux should pull both in
args << "-fontconfig"
args << "-system-freetype"
# Need to use -R as qt5 seemingly ignores LDFLAGS, and doesn't
# use -L paths provided by pkg-config. Configure can have odd
# effects depending on what system provides.
# Qt5 is keg-only, so add its own libdir
args << "-R#{lib}"
# If we depend on anything from brew, then need the core path
args << "-R#{HOMEBREW_PREFIX}/lib"
# If we end up depending on any keg_only Formulae, add extra
# -R lines for each of them below here.
end
system "./configure", *args
# Cannot parallelize build on OSX
system "make"
system "make", "install"
resource("qtsvg").stage do
system "#{bin}/qmake"
system "make", "-j#{ENV.make_jobs}", "install"
end
end
def caveats; <<-EOS
We agreed to the Qt opensource license for you.
If this is unacceptable you should uninstall.
EOS
end
test do
(testpath/"hello.pro").write <<-EOS
QT += core
QT -= gui
TARGET = hello
CONFIG += console
CONFIG -= app_bundle
TEMPLATE = app
SOURCES += main.cpp
EOS
(testpath/"main.cpp").write <<-EOS
#include <QCoreApplication>
#include <QDebug>
int main(int argc, char *argv[])
{
QCoreApplication a(argc, argv);
qDebug() << "Hello World!";
return 0;
}
EOS
system bin/"qmake", testpath/"hello.pro"
system "make"
assert File.exist?("hello")
assert File.exist?("main.o")
system "./hello"
end
end
| 27.33913 | 114 | 0.624046 |
e8fe297bcdf4bbf1b887475d6fb6f5851b61628a | 607 | cask 'idocument-plus' do
version '2.7'
sha256 'a3ec0e2ada87b7795f1d7bc1b12610a800c2ce6b4442d07e7e24703c8f34a1e2'
# amazonaws.com/IcyBlaze-iDocument2 was verified as official when first introduced to the cask
url "https://s3.amazonaws.com/IcyBlaze-iDocument2/Download/idocument_plus_#{version}.dmg"
appcast 'http://www.icyblaze.com/idocument/changelogs/idocumentplus_release.html',
checkpoint: '816e6021878b62ab2c0e0f2f808255a08ae4b2dd1e4da7cfdd62170e7c7a1cde'
name 'iDocument Plus'
homepage 'http://www.icyblaze.com/idocument/'
license :commercial
app 'iDocument Plus.app'
end
| 40.466667 | 96 | 0.797364 |
e28424bec9fac479b95dc11446f79a0ec38b23a7 | 4,543 | require "mega_lotto"
# This file was generated by the `rspec --init` command. Conventionally, all
# specs live under a `spec` directory, which RSpec adds to the `$LOAD_PATH`.
# The generated `.rspec` file contains `--require spec_helper` which will cause
# this file to always be loaded, without a need to explicitly require it in any
# files.
#
# Given that it is always loaded, you are encouraged to keep this file as
# light-weight as possible. Requiring heavyweight dependencies from this file
# will add to the boot time of your test suite on EVERY test run, even for an
# individual file that may not need all of that loaded. Instead, consider making
# a separate helper file that requires the additional dependencies and performs
# the additional setup, and require it from the spec files that actually need
# it.
#
# The `.rspec` file also contains a few flags that are not defaults but that
# users commonly want.
#
# See http://rubydoc.info/gems/rspec-core/RSpec/Core/Configuration
RSpec.configure do |config|
# rspec-expectations config goes here. You can use an alternate
# assertion/expectation library such as wrong or the stdlib/minitest
# assertions if you prefer.
config.expect_with :rspec do |expectations|
# This option will default to `true` in RSpec 4. It makes the `description`
# and `failure_message` of custom matchers include text for helper methods
# defined using `chain`, e.g.:
# be_bigger_than(2).and_smaller_than(4).description
# # => "be bigger than 2 and smaller than 4"
# ...rather than:
# # => "be bigger than 2"
expectations.include_chain_clauses_in_custom_matcher_descriptions = true
end
# rspec-mocks config goes here. You can use an alternate test double
# library (such as bogus or mocha) by changing the `mock_with` option here.
config.mock_with :rspec do |mocks|
# Prevents you from mocking or stubbing a method that does not exist on
# a real object. This is generally recommended, and will default to
# `true` in RSpec 4.
mocks.verify_partial_doubles = true
end
# The settings below are suggested to provide a good initial experience
# with RSpec, but feel free to customize to your heart's content.
=begin
# These two settings work together to allow you to limit a spec run
# to individual examples or groups you care about by tagging them with
# `:focus` metadata. When nothing is tagged with `:focus`, all examples
# get run.
config.filter_run :focus
config.run_all_when_everything_filtered = true
# Allows RSpec to persist some state between runs in order to support
# the `--only-failures` and `--next-failure` CLI options. We recommend
# you configure your source control system to ignore this file.
config.example_status_persistence_file_path = "spec/examples.txt"
# Limits the available syntax to the non-monkey patched syntax that is
# recommended. For more details, see:
# - http://myronmars.to/n/dev-blog/2012/06/rspecs-new-expectation-syntax
# - http://www.teaisaweso.me/blog/2013/05/27/rspecs-new-message-expectation-syntax/
# - http://myronmars.to/n/dev-blog/2014/05/notable-changes-in-rspec-3#new__config_option_to_disable_rspeccore_monkey_patching
config.disable_monkey_patching!
# This setting enables warnings. It's recommended, but in some cases may
# be too noisy due to issues in dependencies.
config.warnings = true
# Many RSpec users commonly either run the entire suite or an individual
# file, and it's useful to allow more verbose output when running an
# individual spec file.
if config.files_to_run.one?
# Use the documentation formatter for detailed output,
# unless a formatter has already been configured
# (e.g. via a command-line flag).
config.default_formatter = 'doc'
end
# Print the 10 slowest examples and example groups at the
# end of the spec run, to help surface which specs are running
# particularly slow.
config.profile_examples = 10
# Run specs in random order to surface order dependencies. If you find an
# order dependency and want to debug it, you can fix the order by providing
# the seed, which is printed after each run.
# --seed 1234
config.order = :random
# Seed global randomization in this process using the `--seed` CLI option.
# Setting this allows you to use `--seed` to deterministically reproduce
# test failures related to randomization by passing the same `--seed` value
# as the one that triggered the failure.
Kernel.srand config.seed
=end
end
| 45.888889 | 129 | 0.744002 |
abec26ff9f51f415e498c60402020aba71396e0c | 140 | require_relative '../../spec_helper'
require 'date'
describe "DateTime._strptime" do
it "needs to be reviewed for spec completeness"
end
| 20 | 49 | 0.757143 |
f7b5f234065def72d1b5747a4776acd0eac2f12f | 437 | class TaxonNameClassification::Icvcn::Valid::Accepted < TaxonNameClassification::Icvcn::Valid
NOMEN_URI='http://purl.obolibrary.org/obo/NOMEN_0000127'.freeze
def self.disjoint_taxon_name_classes
self.parent.disjoint_taxon_name_classes +
self.collect_descendants_and_itself_to_s(TaxonNameClassification::Icvcn::Valid::Unaccepted)
end
def self.gbif_status
'valid'
end
def self.assignable
true
end
end | 24.277778 | 99 | 0.778032 |
5d2c2fb3bac7887c197dc8291be64d8371111f6c | 1,936 | # frozen_string_literal: true
module JekyllAdmin
module FileHelper
# The file the user requested in the URL
def requested_file
find_by_path(path)
end
# The file ultimately written to disk
# This may be the requested file, or in the case of a rename will be read
# from the new path that was actually written to disk
def written_file
find_by_path(write_path)
end
# Write a file to disk with the given content
def write_file(path, content)
Jekyll.logger.debug "WRITING:", path
path = sanitized_path(path)
FileUtils.mkdir_p File.dirname(path)
File.open(path, "wb") do |file|
file.write(content)
end
# we should fully process in dev mode for tests to pass
if ENV["RACK_ENV"] == "production"
site.read
else
site.process
end
end
# Delete the file at the given path
def delete_file(path)
Jekyll.logger.debug "DELETING:", path
FileUtils.rm_f sanitized_path(path)
site.process
end
def delete_file_without_process(path)
Jekyll.logger.debug "DELETING:", path
FileUtils.rm_f sanitized_path(path)
end
private
def ensure_requested_file
ensure_file(requested_file)
end
def ensure_written_file
ensure_file(written_file)
end
def find_by_path(path)
files = case namespace
when "collections"
collection.docs
when "data"
DataFile.all
when "drafts"
drafts
when "pages", "static_files"
site.public_send(namespace.to_sym)
else
[]
end
files.find { |f| sanitized_path(f.path) == path }
end
def ensure_file(file)
render_404 if file.nil?
end
def ensure_directory
render_404 unless Dir.exist?(directory_path)
end
end
end
| 24.2 | 77 | 0.614669 |
03fdc78e908f29ad2fc0374712b078c1bab644e4 | 1,696 | class Velero < Formula
desc "Disaster recovery for Kubernetes resources and persistent volumes"
homepage "https://github.com/vmware-tanzu/velero"
url "https://github.com/vmware-tanzu/velero/archive/v1.5.3.tar.gz"
sha256 "f541416805f792c9f6e5ec2620b0c0f5d43d32ee6714f99fb9fadd8ee9072378"
license "Apache-2.0"
bottle do
cellar :any_skip_relocation
sha256 "48533b571d22497df4c10e6ec43a356e060923ebdeebf79d4294bb06ac4dd222" => :big_sur
sha256 "e38bd17473efe454d815194e52ca516ce08ca1623e0d0a2f33a7043ac22f51d1" => :arm64_big_sur
sha256 "e88af008dbe0f1aacd927c9d1d545cb4377b1bbcbdefd8ea7ef60d29eb52fae5" => :catalina
sha256 "d8c95c53579e629b85ad556d49f01fc9b669e97d5a0d08158af20faaf1fe2702" => :mojave
end
depends_on "go" => :build
def install
system "go", "build", *std_go_args, "-installsuffix", "static",
"-ldflags",
"-s -w -X github.com/vmware-tanzu/velero/pkg/buildinfo.Version=v#{version}",
"./cmd/velero"
# Install bash completion
output = Utils.safe_popen_read("#{bin}/velero", "completion", "bash")
(bash_completion/"velero").write output
# Install zsh completion
output = Utils.safe_popen_read("#{bin}/velero", "completion", "zsh")
(zsh_completion/"_velero").write output
end
test do
output = shell_output("#{bin}/velero 2>&1", 1)
assert_match "Velero is a tool for managing disaster recovery", output
assert_match "Version: v#{version}", shell_output("#{bin}/velero version --client-only 2>&1")
system bin/"velero", "client", "config", "set", "TEST=value"
assert_match "value", shell_output("#{bin}/velero client config get 2>&1")
end
end
| 41.365854 | 97 | 0.714623 |
e8f5f8ceaddde3fe4bfb0423ae4d3bdebb5e7b24 | 2,148 | # frozen_string_literal: true
require 'recaptcha/configuration'
require 'uri'
require 'net/http'
if defined?(Rails)
require 'recaptcha/railtie'
else
require 'recaptcha/client_helper'
require 'recaptcha/verify'
end
module Recaptcha
CONFIG = {
'server_url' => 'https://recaptcha.net/recaptcha/api.js',
'verify_url' => 'https://recaptcha.net/recaptcha/api/siteverify'
}.freeze
USE_SSL_BY_DEFAULT = false
HANDLE_TIMEOUTS_GRACEFULLY = true
DEFAULT_TIMEOUT = 3
# Gives access to the current Configuration.
def self.configuration
@configuration ||= Configuration.new
end
# Allows easy setting of multiple configuration options. See Configuration
# for all available options.
#--
# The temp assignment is only used to get a nicer rdoc. Feel free to remove
# this hack.
#++
def self.configure
config = configuration
yield(config)
end
def self.with_configuration(config)
original_config = {}
config.each do |key, value|
original_config[key] = configuration.send(key)
configuration.send("#{key}=", value)
end
yield if block_given?
ensure
original_config.each { |key, value| configuration.send("#{key}=", value) }
end
def self.get(verify_hash, options)
http = if Recaptcha.configuration.proxy
proxy_server = URI.parse(Recaptcha.configuration.proxy)
Net::HTTP::Proxy(proxy_server.host, proxy_server.port, proxy_server.user, proxy_server.password)
else
Net::HTTP
end
query = URI.encode_www_form(verify_hash)
uri = URI.parse(Recaptcha.configuration.verify_url + '?' + query)
http_instance = http.new(uri.host, uri.port)
http_instance.read_timeout = http_instance.open_timeout = options[:timeout] || DEFAULT_TIMEOUT
http_instance.use_ssl = true if uri.port == 443
request = Net::HTTP::Get.new(uri.request_uri)
http_instance.request(request).body
end
def self.i18n(key, default)
if defined?(I18n)
I18n.translate(key, default: default)
else
default
end
end
class RecaptchaError < StandardError
end
class VerifyError < RecaptchaError
end
end
| 25.879518 | 102 | 0.703445 |
21cd71e15f5852c7c3ed4bdb5ff0093bbfde6629 | 2,368 | # encoding: UTF-8
# This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20160722234053) do
# These are extensions that must be enabled in order to support this database
enable_extension "plpgsql"
enable_extension "hstore"
create_table "herd_assets", force: :cascade do |t|
t.string "file_name"
t.integer "file_size"
t.string "content_type"
t.string "type"
t.text "meta"
t.integer "parent_asset_id"
t.datetime "created_at"
t.datetime "updated_at"
t.integer "transform_id"
t.integer "assetable_id"
t.string "assetable_type"
t.integer "position"
end
add_index "herd_assets", ["assetable_id"], name: "index_herd_assets_on_assetable_id", using: :btree
add_index "herd_assets", ["assetable_type"], name: "index_herd_assets_on_assetable_type", using: :btree
add_index "herd_assets", ["transform_id"], name: "index_herd_assets_on_transform_id", using: :btree
create_table "herd_pages", force: :cascade do |t|
t.string "path"
t.datetime "created_at"
t.datetime "updated_at"
end
create_table "herd_transforms", force: :cascade do |t|
t.string "type"
t.hstore "options"
t.datetime "created_at"
t.datetime "updated_at"
t.string "assetable_type"
t.string "name"
end
add_index "herd_transforms", ["assetable_type"], name: "index_herd_transforms_on_assetable_type", using: :btree
add_index "herd_transforms", ["name"], name: "index_herd_transforms_on_name", using: :btree
create_table "posts", force: :cascade do |t|
t.string "title"
t.text "body"
t.datetime "created_at"
t.datetime "updated_at"
end
end
| 36.430769 | 113 | 0.726351 |
ffb96c7314a0b9bc8fb6814d921e06e810769c70 | 349 | # encoding: utf-8
#
require 'spec_helper'
describe DealCategory do
describe '#attributes' do
it { should be_subclass_of(Category) }
end
describe '#associations' do
it { should have_many(:deals).as_inverse_of(:category).with_dependent(:nullify) }
end
describe '#fabrication' do
it { should have_valid_fabricator }
end
end
| 18.368421 | 85 | 0.716332 |
1cc19dc8dcfd4b069a95c681ffb0c8c91cb4a5ed | 2,597 | $LOAD_PATH << File.join(File.dirname(__FILE__), '..', '..', 'lib')
require 'logger'
require 'benchmark'
require 'message_bus'
require_relative "../helpers"
backends = ENV['MESSAGE_BUS_BACKENDS'].split(",").map(&:to_sym)
channel = "/foo"
iterations = 10_000
results = []
puts "Running publication benchmark with #{iterations} iterations on backends: #{backends.inspect}"
benchmark_publication_only = lambda do |bm, backend|
bus = MessageBus::Instance.new
bus.configure(test_config_for_backend(backend))
bm.report("#{backend} - publication only") do
iterations.times { bus.publish(channel, "Hello world") }
end
bus.reset!
bus.destroy
end
benchmark_subscription_no_trimming = lambda do |bm, backend|
test_title = "#{backend} - subscription no trimming"
bus = MessageBus::Instance.new
bus.configure(test_config_for_backend(backend))
bus.reliable_pub_sub.max_backlog_size = iterations
bus.reliable_pub_sub.max_global_backlog_size = iterations
messages_received = 0
bus.after_fork
bus.subscribe(channel) do |_message|
messages_received += 1
end
bm.report(test_title) do
iterations.times { bus.publish(channel, "Hello world") }
wait_for(60000) { messages_received == iterations }
end
results << "[#{test_title}]: #{iterations} messages sent, #{messages_received} received, rate of #{(messages_received.to_f / iterations.to_f) * 100}%"
bus.reset!
bus.destroy
end
benchmark_subscription_with_trimming = lambda do |bm, backend|
test_title = "#{backend} - subscription with trimming"
bus = MessageBus::Instance.new
bus.configure(test_config_for_backend(backend))
bus.reliable_pub_sub.max_backlog_size = (iterations / 10)
bus.reliable_pub_sub.max_global_backlog_size = (iterations / 10)
messages_received = 0
bus.after_fork
bus.subscribe(channel) do |_message|
messages_received += 1
end
bm.report(test_title) do
iterations.times { bus.publish(channel, "Hello world") }
wait_for(60000) { messages_received == iterations }
end
results << "[#{test_title}]: #{iterations} messages sent, #{messages_received} received, rate of #{(messages_received.to_f / iterations.to_f) * 100}%"
bus.reset!
bus.destroy
end
puts
Benchmark.bm(60) do |bm|
backends.each do |backend|
benchmark_publication_only.call(bm, backend)
end
puts
backends.each do |backend|
benchmark_subscription_no_trimming.call(bm, backend)
end
results << nil
puts
backends.each do |backend|
benchmark_subscription_with_trimming.call(bm, backend)
end
end
puts
results.each do |result|
puts result
end
| 25.213592 | 152 | 0.732769 |
7a1c60827119ed3c1ab43753d8a08265ce25268f | 4,958 | #Processes the Sexp from routes.rb. Stores results in tracker.routes.
#
#Note that it is only interested in determining what methods on which
#controllers are used as routes, not the generated URLs for routes.
class Brakeman::Rails3RoutesProcessor < Brakeman::BaseProcessor
include Brakeman::RouteHelper
attr_reader :map, :nested, :current_controller
def initialize tracker
super
@map = Sexp.new(:lvar, :map)
@nested = nil #used for identifying nested targets
@prefix = [] #Controller name prefix (a module name, usually)
@current_controller = nil
@with_options = nil #For use inside map.with_options
end
def process_routes exp
process exp.dup
end
def process_call exp
case exp[2]
when :resources
process_resources exp
when :resource
process_resource exp
when :root
process_root exp
when :member
process_default exp
when :get, :put, :post, :delete
process_verb exp
when :match
process_match exp
else
exp
end
end
def process_iter exp
case exp[1][2]
when :namespace
process_namespace exp
when :resource
process_resource_block exp
when :resources
process_resources_block exp
when :scope
process_scope_block exp
else
super
end
end
def process_namespace exp
name = exp[1][3][1][1]
block = exp[3]
@prefix << camelize(name)
process block
@prefix.pop
exp
end
def process_root exp
args = exp[3][1..-1]
hash_iterate args[0] do |k, v|
if symbol? k and k[1] == :to and string? v[1]
controller, action = extract_action v[1]
self.current_controller = controller
@tracker.routes[@current_controller] << action.to_sym
break
end
end
exp
end
def process_match exp
args = exp[3][1..-1]
#Check if there is an unrestricted action parameter
action_variable = false
if string? args[0]
matcher = args[0][1]
if matcher == ':controller(/:action(/:id(.:format)))' or
matcher.include? ':controller' and matcher.include? ':action' #Default routes
@tracker.routes[:allow_all_actions] = args[0]
return exp
elsif matcher.include? ':action'
action_variable = true
end
end
if hash? args[-1]
hash_iterate args[-1] do |k, v|
if string? k and string? v
controller, action = extract_action v[1]
self.current_controller = controller
@tracker.routes[@current_controller] << action.to_sym if action
elsif symbol? k and k[1] == :action
@tracker.routes[@current_controller] << v[1].to_sym
action_variable = false
end
end
end
if action_variable
@tracker.routes[@current_controller] = :allow_all_actions
end
exp
end
def process_verb exp
args = exp[3][1..-1]
if symbol? args[0] and not hash? args[1]
@tracker.routes[@current_controller] << args[0][1]
elsif hash? args[1]
hash_iterate args[1] do |k, v|
if symbol? k and k[1] == :to and string? v
controller, action = extract_action v[1]
self.current_controller = controller
@tracker.routes[@current_controller] << action.to_sym
end
end
elsif string? args[0]
route = args[0][1].split "/"
if route.length != 2
@tracker.routes[@current_controller] << route[0].to_sym
else
self.current_controller = route[0]
@tracker.routes[@current_controller] << route[1].to_sym
@current_controller = nil
end
else hash? args[0]
hash_iterate args[0] do |k, v|
if string? v
controller, action = extract_action v[1]
self.current_controller = controller
@tracker.routes[@current_controller] << action.to_sym
break
end
end
end
exp
end
def process_resources exp
if exp[3] and exp[3][2] and exp[3][2][0] == :hash
self.current_controller = exp[3][1][1]
#handle hash
add_resources_routes
elsif exp[3][1..-1].all? { |s| symbol? s }
exp[3][1..-1].each do |s|
self.current_controller = s[1]
add_resources_routes
end
end
exp
end
def process_resource exp
#Does resource even take more than one controller name?
exp[3][1..-1].each do |s|
if symbol? s
self.current_controller = pluralize(s[1].to_s)
add_resource_routes
else
#handle something else, like options
#or something?
end
end
exp
end
def process_resources_block exp
process_resources exp[1]
process exp[3]
exp
end
def process_resource_block exp
process_resource exp[1]
process exp[3]
exp
end
def process_scope_block exp
#How to deal with options?
process exp[3]
exp
end
def extract_action str
str.split "#"
end
end
| 22.953704 | 85 | 0.623235 |
01aec040d202c2f98fee887b3705fa17dd08ef65 | 741 | require 'formula'
class Unrar < Formula
homepage 'http://www.rarlab.com'
url 'http://www.rarlab.com/rar/unrarsrc-5.1.6.tar.gz'
sha1 'af9220d85cb96ce102b7375c1e5e501b64fc89fd'
def install
system "make"
bin.install 'unrar'
end
test do
contentpath = "directory/file.txt"
rarpath = testpath/"archive.rar"
data = 'UmFyIRoHAM+QcwAADQAAAAAAAACaCHQggDIACQAAAAkAAAADtPej1LZwZE' +
'QUMBIApIEAAGRpcmVjdG9yeVxmaWxlLnR4dEhvbWVicmV3CsQ9ewBABwA='
rarpath.write data.unpack('m').first
assert_equal contentpath, `#{bin}/unrar lb #{rarpath}`.strip
assert_equal 0, $?.exitstatus
system "#{bin}/unrar", "x", rarpath, testpath
assert_equal "Homebrew\n", (testpath/contentpath).read
end
end
| 27.444444 | 74 | 0.711201 |
6a79ea96494ab5ad5f89f0ddb8f0597d7fe67536 | 2,221 | module GreenhouseIo
class JobBoard
include HTTParty
include GreenhouseIo::API
attr_accessor :api_token, :organization
base_uri 'https://api.greenhouse.io/v1'
def initialize(api_token = nil, default_options = {})
@api_token = api_token || GreenhouseIo.configuration.api_token
@organization = default_options.delete(:organization) || GreenhouseIo.configuration.organization
end
def offices(options = {})
get_from_job_board_api("/boards/#{ query_organization(options) }/embed/offices")
end
def office(id, options = {})
get_from_job_board_api("/boards/#{ query_organization(options) }/embed/office", query: { id: id })
end
def departments(options = {})
get_from_job_board_api("/boards/#{ query_organization(options) }/embed/departments")
end
def department(id, options = {})
get_from_job_board_api("/boards/#{ query_organization(options) }/embed/department", query: { id: id })
end
def jobs(options = {})
get_from_job_board_api("/boards/#{ query_organization(options) }/embed/jobs", query: { content: options[:content] })
end
def job(id, options = {})
get_from_job_board_api("/boards/#{ query_organization(options) }/embed/job", query: { id: id, questions: options[:questions] })
end
def apply_to_job(job_form_hash)
post_to_job_board_api('/applications', { :body => job_form_hash, :basic_auth => basic_auth })
end
private
def query_organization(options_hash)
org = options_hash[:organization] || @organization
org.nil? ? (raise GreenhouseIo::Error.new("organization can't be blank")) : org
end
def get_from_job_board_api(url, options = {})
response = get_response(url, options)
if response.code == 200
parse_json(response)
else
raise GreenhouseIo::Error.new(response.code)
end
end
def post_to_job_board_api(url, options)
response = post_response(url, options)
if response.code == 200
response.include?("success") ? parse_json(response) : raise(GreenhouseIo::Error.new(response["reason"]))
else
raise GreenhouseIo::Error.new(response.code)
end
end
end
end
| 33.149254 | 133 | 0.676272 |
1d874a61794e9d1cca322d125b713bafa3ceb4d8 | 13,509 | # Use this hook to configure devise mailer, warden hooks and so forth.
# Many of these configuration options can be set straight in your model.
Devise.setup do |config|
# The secret key used by Devise. Devise uses this key to generate
# random tokens. Changing this key will render invalid all existing
# confirmation, reset password and unlock tokens in the database.
# Devise will use the `secret_key_base` as its `secret_key`
# by default. You can change it below and use your own secret key.
# config.secret_key = '54f09cb0a73f8c8e724523cf40055eb86e65219251b3278f74c2336f67a735d325777824748af6f1552a11e5abd35b4a36891d9bb71a590f1b5eafc306088e8e'
# ==> Mailer Configuration
# Configure the e-mail address which will be shown in Devise::Mailer,
# note that it will be overwritten if you use your own mailer class
# with default "from" parameter.
config.mailer_sender = '[email protected]'
# Configure the class responsible to send e-mails.
# config.mailer = 'Devise::Mailer'
# Configure the parent class responsible to send e-mails.
# config.parent_mailer = 'ActionMailer::Base'
# ==> ORM configuration
# Load and configure the ORM. Supports :active_record (default) and
# :mongoid (bson_ext recommended) by default. Other ORMs may be
# available as additional gems.
require 'devise/orm/active_record'
# ==> Configuration for any authentication mechanism
# Configure which keys are used when authenticating a user. The default is
# just :email. You can configure it to use [:username, :subdomain], so for
# authenticating a user, both parameters are required. Remember that those
# parameters are used only when authenticating and not when retrieving from
# session. If you need permissions, you should implement that in a before filter.
# You can also supply a hash where the value is a boolean determining whether
# or not authentication should be aborted when the value is not present.
# config.authentication_keys = [:email]
# Configure parameters from the request object used for authentication. Each entry
# given should be a request method and it will automatically be passed to the
# find_for_authentication method and considered in your model lookup. For instance,
# if you set :request_keys to [:subdomain], :subdomain will be used on authentication.
# The same considerations mentioned for authentication_keys also apply to request_keys.
# config.request_keys = []
# Configure which authentication keys should be case-insensitive.
# These keys will be downcased upon creating or modifying a user and when used
# to authenticate or find a user. Default is :email.
config.case_insensitive_keys = [:email]
# Configure which authentication keys should have whitespace stripped.
# These keys will have whitespace before and after removed upon creating or
# modifying a user and when used to authenticate or find a user. Default is :email.
config.strip_whitespace_keys = [:email]
# Tell if authentication through request.params is enabled. True by default.
# It can be set to an array that will enable params authentication only for the
# given strategies, for example, `config.params_authenticatable = [:database]` will
# enable it only for database (email + password) authentication.
# config.params_authenticatable = true
# Tell if authentication through HTTP Auth is enabled. False by default.
# It can be set to an array that will enable http authentication only for the
# given strategies, for example, `config.http_authenticatable = [:database]` will
# enable it only for database authentication. The supported strategies are:
# :database = Support basic authentication with authentication key + password
# config.http_authenticatable = false
# If 401 status code should be returned for AJAX requests. True by default.
# config.http_authenticatable_on_xhr = true
# The realm used in Http Basic Authentication. 'Application' by default.
# config.http_authentication_realm = 'Application'
# It will change confirmation, password recovery and other workflows
# to behave the same regardless if the e-mail provided was right or wrong.
# Does not affect registerable.
# config.paranoid = true
# By default Devise will store the user in session. You can skip storage for
# particular strategies by setting this option.
# Notice that if you are skipping storage for all authentication paths, you
# may want to disable generating routes to Devise's sessions controller by
# passing skip: :sessions to `devise_for` in your config/routes.rb
config.skip_session_storage = [:http_auth]
# By default, Devise cleans up the CSRF token on authentication to
# avoid CSRF token fixation attacks. This means that, when using AJAX
# requests for sign in and sign up, you need to get a new CSRF token
# from the server. You can disable this option at your own risk.
# config.clean_up_csrf_token_on_authentication = true
# When false, Devise will not attempt to reload routes on eager load.
# This can reduce the time taken to boot the app but if your application
# requires the Devise mappings to be loaded during boot time the application
# won't boot properly.
# config.reload_routes = true
# ==> Configuration for :database_authenticatable
# For bcrypt, this is the cost for hashing the password and defaults to 11. If
# using other algorithms, it sets how many times you want the password to be hashed.
#
# Limiting the stretches to just one in testing will increase the performance of
# your test suite dramatically. However, it is STRONGLY RECOMMENDED to not use
# a value less than 10 in other environments. Note that, for bcrypt (the default
# algorithm), the cost increases exponentially with the number of stretches (e.g.
# a value of 20 is already extremely slow: approx. 60 seconds for 1 calculation).
config.stretches = Rails.env.test? ? 1 : 11
# Set up a pepper to generate the hashed password.
# config.pepper = '37788503797b9f2520bb46c8fcd6f12374afa01d5a8ed82c8a5d982fb66d33bd5b490d1e675c5ea32a3965fec1657bd163f140bcf222b66435092fdc85044596'
# Send a notification email when the user's password is changed
# config.send_password_change_notification = false
# ==> Configuration for :confirmable
# A period that the user is allowed to access the website even without
# confirming their account. For instance, if set to 2.days, the user will be
# able to access the website for two days without confirming their account,
# access will be blocked just in the third day. Default is 0.days, meaning
# the user cannot access the website without confirming their account.
# config.allow_unconfirmed_access_for = 2.days
# A period that the user is allowed to confirm their account before their
# token becomes invalid. For example, if set to 3.days, the user can confirm
# their account within 3 days after the mail was sent, but on the fourth day
# their account can't be confirmed with the token any more.
# Default is nil, meaning there is no restriction on how long a user can take
# before confirming their account.
# config.confirm_within = 3.days
# If true, requires any email changes to be confirmed (exactly the same way as
# initial account confirmation) to be applied. Requires additional unconfirmed_email
# db field (see migrations). Until confirmed, new email is stored in
# unconfirmed_email column, and copied to email column on successful confirmation.
config.reconfirmable = true
# Defines which key will be used when confirming an account
# config.confirmation_keys = [:email]
# ==> Configuration for :rememberable
# The time the user will be remembered without asking for credentials again.
# config.remember_for = 2.weeks
# Invalidates all the remember me tokens when the user signs out.
config.expire_all_remember_me_on_sign_out = true
# If true, extends the user's remember period when remembered via cookie.
# config.extend_remember_period = false
# Options to be passed to the created cookie. For instance, you can set
# secure: true in order to force SSL only cookies.
# config.rememberable_options = {}
# ==> Configuration for :validatable
# Range for password length.
config.password_length = 6..128
# Email regex used to validate email formats. It simply asserts that
# one (and only one) @ exists in the given string. This is mainly
# to give user feedback and not to assert the e-mail validity.
config.email_regexp = /\A[^@\s]+@[^@\s]+\z/
# ==> Configuration for :timeoutable
# The time you want to timeout the user session without activity. After this
# time the user will be asked for credentials again. Default is 30 minutes.
# config.timeout_in = 30.minutes
# ==> Configuration for :lockable
# Defines which strategy will be used to lock an account.
# :failed_attempts = Locks an account after a number of failed attempts to sign in.
# :none = No lock strategy. You should handle locking by yourself.
# config.lock_strategy = :failed_attempts
# Defines which key will be used when locking and unlocking an account
# config.unlock_keys = [:email]
# Defines which strategy will be used to unlock an account.
# :email = Sends an unlock link to the user email
# :time = Re-enables login after a certain amount of time (see :unlock_in below)
# :both = Enables both strategies
# :none = No unlock strategy. You should handle unlocking by yourself.
# config.unlock_strategy = :both
# Number of authentication tries before locking an account if lock_strategy
# is failed attempts.
# config.maximum_attempts = 20
# Time interval to unlock the account if :time is enabled as unlock_strategy.
# config.unlock_in = 1.hour
# Warn on the last attempt before the account is locked.
# config.last_attempt_warning = true
# ==> Configuration for :recoverable
#
# Defines which key will be used when recovering the password for an account
# config.reset_password_keys = [:email]
# Time interval you can reset your password with a reset password key.
# Don't put a too small interval or your users won't have the time to
# change their passwords.
config.reset_password_within = 6.hours
# When set to false, does not sign a user in automatically after their password is
# reset. Defaults to true, so a user is signed in automatically after a reset.
# config.sign_in_after_reset_password = true
# ==> Configuration for :encryptable
# Allow you to use another hashing or encryption algorithm besides bcrypt (default).
# You can use :sha1, :sha512 or algorithms from others authentication tools as
# :clearance_sha1, :authlogic_sha512 (then you should set stretches above to 20
# for default behavior) and :restful_authentication_sha1 (then you should set
# stretches to 10, and copy REST_AUTH_SITE_KEY to pepper).
#
# Require the `devise-encryptable` gem when using anything other than bcrypt
# config.encryptor = :sha512
# ==> Scopes configuration
# Turn scoped views on. Before rendering "sessions/new", it will first check for
# "users/sessions/new". It's turned off by default because it's slower if you
# are using only default views.
# config.scoped_views = false
# Configure the default scope given to Warden. By default it's the first
# devise role declared in your routes (usually :user).
# config.default_scope = :user
# Set this configuration to false if you want /users/sign_out to sign out
# only the current scope. By default, Devise signs out all scopes.
# config.sign_out_all_scopes = true
# ==> Navigation configuration
# Lists the formats that should be treated as navigational. Formats like
# :html, should redirect to the sign in page when the user does not have
# access, but formats like :xml or :json, should return 401.
#
# If you have any extra navigational formats, like :iphone or :mobile, you
# should add them to the navigational formats lists.
#
# The "*/*" below is required to match Internet Explorer requests.
# config.navigational_formats = ['*/*', :html]
# The default HTTP method used to sign out a resource. Default is :delete.
config.sign_out_via = :delete
# ==> OmniAuth
# Add a new OmniAuth provider. Check the wiki for more information on setting
# up on your models and hooks.
# config.omniauth :github, 'APP_ID', 'APP_SECRET', scope: 'user,public_repo'
# ==> Warden configuration
# If you want to use other strategies, that are not supported by Devise, or
# change the failure app, you can configure them inside the config.warden block.
#
# config.warden do |manager|
# manager.intercept_401 = false
# manager.default_strategies(scope: :user).unshift :some_external_strategy
# end
# ==> Mountable engine configurations
# When using Devise inside an engine, let's call it `MyEngine`, and this engine
# is mountable, there are some extra configurations to be taken into account.
# The following options are available, assuming the engine is mounted as:
#
# mount MyEngine, at: '/my_engine'
#
# The router that invoked `devise_for`, in the example above, would be:
# config.router_name = :my_engine
#
# When using OmniAuth, Devise cannot automatically set OmniAuth path,
# so you need to do it manually. For the users scope, it would be:
# config.omniauth_path_prefix = '/my_engine/users/auth'
end
| 49.123636 | 154 | 0.751129 |
e2769785dfbf77b411bf425f679f89bd81ca2a92 | 543 | # frozen_string_literal: true
require 'json'
require 'rb_json5'
require 'json_refs'
require 'json_schemer'
require_relative 'duh/version'
require_relative 'duh/exceptions'
require_relative 'duh/schema'
require_relative 'duh/loader'
module RgGen
module DUH
extend Core::Plugin
setup_plugin :'rggen-duh' do |plugin|
plugin.register_loader :register_map, :duh, Loader
plugin.files [
'duh/extractor/bit_assignment',
'duh/extractor/simple_extractors',
'duh/extractor/type'
]
end
end
end
| 20.884615 | 56 | 0.714549 |
4ae431b9bb6aa5fc3252ce70cc122a60fd640372 | 145 | module Knock
class Engine < ::Rails::Engine
config.eager_load_paths += Dir["#{config.root}/lib/**/"]
isolate_namespace Knock
end
end
| 20.714286 | 60 | 0.682759 |
2670fafffd313c33b70e3a6ee857e31f4e84d6f6 | 115 | class Api::Communities::Posts::Hot::WeekPolicy < ApplicationPolicy
def index?
!exiled? && !banned?
end
end
| 19.166667 | 66 | 0.695652 |
03a15cc236c9a17893b903915b31a328ea2f8686 | 363 | require "bundler/setup"
require "banacle"
RSpec.configure do |config|
# Enable flags like --only-failures and --next-failure
config.example_status_persistence_file_path = ".rspec_status"
# Disable RSpec exposing methods globally on `Module` and `main`
config.disable_monkey_patching!
config.expect_with :rspec do |c|
c.syntax = :expect
end
end
| 24.2 | 66 | 0.752066 |
032f82a571f099582e45395c2b15b6cc3db9e33a | 273 | module Opbeat
# @api private
module DataBuilders
class DataBuilder
def initialize config
@config = config
end
attr_reader :config
end
%w{transactions error}.each do |f|
require "opbeat/data_builders/#{f}"
end
end
end
| 16.058824 | 41 | 0.626374 |
289dbb123485069775642da2d421ca15f1ff01f9 | 121 | class AddCreatorToBands < ActiveRecord::Migration
def change
add_reference :bands, :creator, index: true
end
end
| 20.166667 | 49 | 0.760331 |
f844dd0e1580936bec3b84f69c473eb0d1b283a8 | 5,302 | class ErrsController < ApplicationController
include ActionView::Helpers::TextHelper
before_filter :find_app, :except => [:index, :all, :destroy_several, :resolve_several, :unresolve_several, :merge_several, :unmerge_several]
before_filter :find_problem, :except => [:index, :all, :destroy_several, :resolve_several, :unresolve_several, :merge_several, :unmerge_several]
before_filter :find_selected_problems, :only => [:destroy_several, :resolve_several, :unresolve_several, :merge_several, :unmerge_several]
before_filter :set_sorting_params, :only => [:index, :all]
before_filter :set_tracker_params, :only => [:create_issue]
def index
app_scope = current_user.admin? ? App.all : current_user.apps
@problems = Problem.for_apps(app_scope).in_env(params[:environment]).unresolved.ordered_by(@sort, @order)
@selected_problems = params[:problems] || []
respond_to do |format|
format.html do
@problems = @problems.page(params[:page]).per(current_user.per_page)
end
format.atom
end
end
def all
app_scope = current_user.admin? ? App.all : current_user.apps
@problems = Problem.for_apps(app_scope).ordered_by(@sort, @order).page(params[:page]).per(current_user.per_page)
@selected_problems = params[:problems] || []
end
def show
@notices = @problem.notices.reverse_ordered.page(params[:notice]).per(1)
@notice = @notices.first
@comment = Comment.new
if request.headers['X-PJAX']
params["_pjax"] = nil
render :layout => false
end
end
def create_issue
# Create an issue on GitHub using user's github token
if params[:tracker] == 'user_github'
if [email protected]_repo?
flash[:error] = "This app doesn't have a GitHub repo set up."
elsif !current_user.github_account?
flash[:error] = "You haven't linked your Github account."
else
@tracker = GithubIssuesTracker.new(
:app => @app,
:login => current_user.github_login,
:oauth_token => current_user.github_oauth_token
)
end
# Or, create an issue using the App's issue tracker
elsif @app.issue_tracker_configured?
@tracker = @app.issue_tracker
# Otherwise, display error about missing tracker configuration.
else
flash[:error] = "This app has no issue tracker setup."
end
if flash[:error].blank? && @tracker
begin
@tracker.create_issue @problem, current_user
rescue Exception => ex
Rails.logger.error "Error during issue creation: " << ex.message
flash[:error] = "There was an error during issue creation: #{ex.message}"
end
end
redirect_to app_err_path(@app, @problem)
end
def unlink_issue
@problem.update_attribute :issue_link, nil
redirect_to app_err_path(@app, @problem)
end
def resolve
@problem.resolve!
flash[:success] = 'Great news everyone! The err has been resolved.'
redirect_to :back
rescue ActionController::RedirectBackError
redirect_to app_path(@app)
end
def resolve_several
@selected_problems.each(&:resolve!)
flash[:success] = "Great news everyone! #{pluralize(@selected_problems.count, 'err has', 'errs have')} been resolved."
redirect_to :back
end
def unresolve_several
@selected_problems.each(&:unresolve!)
flash[:success] = "#{pluralize(@selected_problems.count, 'err has', 'errs have')} been unresolved."
redirect_to :back
end
def merge_several
if @selected_problems.length < 2
flash[:notice] = "You must select at least two errors to merge"
else
@merged_problem = Problem.merge!(@selected_problems)
flash[:notice] = "#{@selected_problems.count} errors have been merged."
end
redirect_to :back
end
def unmerge_several
all = @selected_problems.map(&:unmerge!).flatten
flash[:success] = "#{pluralize(all.length, 'err has', 'errs have')} been unmerged."
redirect_to :back
end
def destroy_several
@selected_problems.each(&:destroy)
flash[:notice] = "#{pluralize(@selected_problems.count, 'err has', 'errs have')} been deleted."
redirect_to :back
end
protected
def find_app
@app = App.find(params[:app_id])
# Mongoid Bug: could not chain: current_user.apps.find_by_id!
# apparently finding by 'watchers.email' and 'id' is broken
raise(Mongoid::Errors::DocumentNotFound.new(App,@app.id)) unless current_user.admin? || current_user.watching?(@app)
end
def find_problem
@problem = @app.problems.find(params[:id])
end
def set_tracker_params
IssueTracker.default_url_options[:host] = request.host
IssueTracker.default_url_options[:port] = request.port
IssueTracker.default_url_options[:protocol] = request.scheme
end
def find_selected_problems
err_ids = (params[:problems] || []).compact
if err_ids.empty?
flash[:notice] = "You have not selected any errors"
redirect_to :back
else
@selected_problems = Array(Problem.find(err_ids))
end
end
def set_sorting_params
@sort = params[:sort]
@sort = "last_notice_at" unless %w{app message last_notice_at last_deploy_at count}.member?(@sort)
@order = params[:order] || "desc"
end
end
| 33.556962 | 146 | 0.680687 |
bf832decf6fd23c609ce7aa138108a33cb05f0a6 | 19,059 | require 'spec_helper'
describe Spree::CheckoutController, type: :controller do
let(:token) { 'some_token' }
let(:user) { stub_model(Spree::LegacyUser) }
let(:order) { FactoryBot.create(:order_with_totals) }
let(:address_params) do
address = FactoryBot.build(:address)
address.attributes.except('created_at', 'updated_at')
end
before do
allow(controller).to receive_messages try_spree_current_user: user
allow(controller).to receive_messages spree_current_user: user
allow(controller).to receive_messages current_order: order
end
context '#edit' do
it 'checks if the user is authorized for :edit' do
expect(controller).to receive(:authorize!).with(:edit, order, token)
request.cookie_jar.signed[:guest_token] = token
spree_get :edit, state: 'address'
end
it 'redirects to the cart path unless checkout_allowed?' do
allow(order).to receive_messages checkout_allowed?: false
spree_get :edit, state: 'delivery'
expect(response).to redirect_to(spree.cart_path)
end
it 'redirects to the cart path if current_order is nil' do
allow(controller).to receive(:current_order).and_return(nil)
spree_get :edit, state: 'delivery'
expect(response).to redirect_to(spree.cart_path)
end
it 'redirects to cart if order is completed' do
allow(order).to receive_messages(completed?: true)
spree_get :edit, state: 'address'
expect(response).to redirect_to(spree.cart_path)
end
# Regression test for #2280
it 'redirects to current step trying to access a future step' do
order.update_column(:state, 'address')
spree_get :edit, state: 'delivery'
expect(response).to redirect_to spree.checkout_state_path('address')
end
context 'when entering the checkout' do
before do
# The first step for checkout controller is address
# Transitioning into this state first is required
order.update_column(:state, 'address')
end
it 'associates the order with a user' do
order.update_column :user_id, nil
expect(order).to receive(:associate_user!).with(user)
spree_get :edit, {}, order_id: 1
end
end
end
context '#update' do
it 'checks if the user is authorized for :edit' do
expect(controller).to receive(:authorize!).with(:edit, order, token)
request.cookie_jar.signed[:guest_token] = token
spree_post :update, state: 'address'
end
context 'save successful' do
def spree_post_address
spree_post :update,
state: 'address',
order: {
bill_address_attributes: address_params,
use_billing: true
}
end
before do
# Must have *a* shipping method and a payment method so updating from address works
allow(order).to receive(:available_shipping_methods).
and_return [stub_model(Spree::ShippingMethod)]
allow(order).to receive(:available_payment_methods).
and_return [stub_model(Spree::PaymentMethod)]
allow(order).to receive(:ensure_available_shipping_rates).
and_return true
order.line_items << FactoryBot.create(:line_item)
end
context 'with the order in the cart state' do
before do
order.update_column(:state, 'cart')
allow(order).to receive_messages user: user
end
it 'assigns order' do
spree_post :update, state: 'address'
expect(assigns[:order]).not_to be_nil
end
it 'advances the state' do
spree_post_address
expect(order.reload.state).to eq('delivery')
end
it 'redirects the next state' do
spree_post_address
expect(response).to redirect_to spree.checkout_state_path('delivery')
end
context 'current_user respond to save address method' do
it 'calls persist order address on user' do
expect(user).to receive(:persist_order_address)
spree_post :update,
state: 'address',
order: {
bill_address_attributes: address_params,
use_billing: true
},
save_user_address: '1'
end
end
context 'current_user doesnt respond to persist_order_address' do
it 'doesnt raise any error' do
expect do
spree_post :update,
state: 'address',
order: {
bill_address_attributes: address_params,
use_billing: true
},
save_user_address: '1'
end.not_to raise_error
end
end
end
context 'with the order in the address state' do
before do
order.update_columns(ship_address_id: create(:address).id, state: 'address')
allow(order).to receive_messages user: user
end
context 'with a billing and shipping address' do
let(:bill_address_params) do
order.bill_address.attributes.except('created_at', 'updated_at')
end
let(:ship_address_params) do
order.ship_address.attributes.except('created_at', 'updated_at')
end
let(:update_params) do
{
state: 'address',
order: {
bill_address_attributes: bill_address_params,
ship_address_attributes: ship_address_params,
use_billing: false
}
}
end
before do
@expected_bill_address_id = order.bill_address.id
@expected_ship_address_id = order.ship_address.id
spree_post :update, update_params
order.reload
end
it 'updates the same billing and shipping address' do
expect(order.bill_address.id).to eq(@expected_bill_address_id)
expect(order.ship_address.id).to eq(@expected_ship_address_id)
end
end
end
context 'when in the confirm state' do
before do
allow(order).to receive_messages confirmation_required?: true
order.update_column(:state, 'confirm')
allow(order).to receive_messages user: user
# An order requires a payment to reach the complete state
# This is because payment_required? is true on the order
create(:payment, amount: order.total, order: order)
order.payments.reload
end
# This inadvertently is a regression test for #2694
it 'redirects to the order view' do
spree_post :update, state: 'confirm'
expect(response).to redirect_to spree.order_path(order)
end
it 'populates the flash message' do
spree_post :update, state: 'confirm'
expect(flash.notice).to eq(Spree.t(:order_processed_successfully))
end
it 'removes completed order from current_order' do
spree_post :update, { state: 'confirm' }, order_id: 'foofah'
expect(assigns(:current_order)).to be_nil
expect(assigns(:order)).to eql controller.current_order
end
end
# Regression test for #4190
context 'state_lock_version' do
let(:post_params) do
{
state: 'address',
order: {
bill_address_attributes: order.bill_address.attributes.except('created_at', 'updated_at'),
state_lock_version: 0,
use_billing: true
}
}
end
context 'correct' do
it 'properly updates and increment version' do
spree_post :update, post_params
expect(order.state_lock_version).to eq 1
end
end
context 'incorrect' do
before do
order.update_columns(state_lock_version: 1, state: 'address')
end
it 'order should receieve ensure_valid_order_version callback' do
expect_any_instance_of(described_class).to receive(:ensure_valid_state_lock_version)
spree_post :update, post_params
end
it 'order should receieve with_lock message' do
expect(order).to receive(:with_lock)
spree_post :update, post_params
end
it 'redirects back to current state' do
spree_post :update, post_params
expect(response).to redirect_to spree.checkout_state_path('address')
expect(flash[:error]).to eq 'The order has already been updated.'
end
end
end
end
context 'save unsuccessful' do
before do
allow(order).to receive_messages user: user
allow(order).to receive_messages update_attributes: false
end
it 'does not assign order' do
spree_post :update, state: 'address'
expect(assigns[:order]).not_to be_nil
end
it 'does not change the order state' do
spree_post :update, state: 'address'
end
it 'renders the edit template' do
spree_post :update, state: 'address'
expect(response).to render_template :edit
end
it 'renders order in payment state when payment fails' do
order.update_column(:state, 'confirm')
allow(controller).to receive(:insufficient_payment?).and_return(true)
spree_post :update, state: 'confirm'
expect(order.state).to eq('payment')
end
end
context 'when current_order is nil' do
before { allow(controller).to receive_messages current_order: nil }
it 'does not change the state if order is completed' do
expect(order).not_to receive(:update_attribute)
spree_post :update, state: 'confirm'
end
it 'redirects to the cart_path' do
spree_post :update, state: 'confirm'
expect(response).to redirect_to spree.cart_path
end
end
context 'Spree::Core::GatewayError' do
before do
allow(order).to receive_messages user: user
allow(order).to receive(:update_attributes).and_raise(Spree::Core::GatewayError.new('Invalid something or other.'))
spree_post :update, state: 'address'
end
it 'renders the edit template and display exception message' do
expect(response).to render_template :edit
expect(flash.now[:error]).to eq(Spree.t(:spree_gateway_error_flash_for_checkout))
expect(assigns(:order).errors[:base]).to include('Invalid something or other.')
end
end
context 'fails to transition from address' do
let(:order) do
FactoryBot.create(:order_with_line_items).tap do |order|
order.next!
expect(order.state).to eq('address')
end
end
before do
allow(controller).to receive_messages current_order: order
allow(controller).to receive_messages check_authorization: true
end
context 'when the country is not a shippable country' do
before do
order.ship_address.tap do |address|
# A different country which is not included in the list of shippable countries
address.country = FactoryBot.create(:country, name: 'Australia')
address.state_name = 'Victoria'
address.save
end
end
it 'due to no available shipping rates for any of the shipments' do
expect(order.shipments.count).to eq(1)
order.shipments.first.shipping_rates.delete_all
spree_put :update, state: order.state, order: {}
expect(flash[:error]).to eq(Spree.t(:items_cannot_be_shipped))
expect(response).to redirect_to(spree.checkout_state_path('address'))
end
end
context 'when the order is invalid' do
before do
allow(order).to receive_messages(update_from_params: true, next: nil)
order.errors.add(:base, 'Base error')
order.errors.add(:adjustments, 'error')
end
it 'due to the order having errors' do
spree_put :update, state: order.state, order: {}
expect(flash[:error]).to eql("Base error\nAdjustments error")
expect(response).to redirect_to(spree.checkout_state_path('address'))
end
end
end
context 'fails to transition from payment to complete' do
let(:order) do
FactoryBot.create(:order_with_line_items).tap do |order|
order.next! until order.state == 'payment'
# So that the confirmation step is skipped and we get straight to the action.
payment_method = FactoryBot.create(:simple_credit_card_payment_method)
payment = FactoryBot.create(:payment, payment_method: payment_method)
order.payments << payment
end
end
before do
allow(controller).to receive_messages current_order: order
allow(controller).to receive_messages check_authorization: true
end
it 'when GatewayError is raised' do
allow_any_instance_of(Spree::Payment).to receive(:process!).and_raise(Spree::Core::GatewayError.new(Spree.t(:payment_processing_failed)))
spree_put :update, state: order.state, order: {}
expect(flash[:error]).to eq(Spree.t(:payment_processing_failed))
end
end
end
context 'When last inventory item has been purchased' do
let(:product) { mock_model(Spree::Product, name: 'Amazing Object') }
let(:variant) { mock_model(Spree::Variant) }
let(:line_item) { mock_model Spree::LineItem, insufficient_stock?: true, amount: 0 }
let(:order) { create(:order_with_line_items) }
before do
allow(order).to receive_messages(insufficient_stock_lines: [line_item], state: 'payment')
configure_spree_preferences do |config|
config.track_inventory_levels = true
end
end
context 'and back orders are not allowed' do
before do
spree_post :update, state: 'payment'
end
it 'redirects to cart' do
expect(response).to redirect_to spree.cart_path
end
it 'sets flash message for no inventory' do
expect(flash[:error]).to eq(
Spree.t(:inventory_error_flash_for_insufficient_quantity, names: "'#{product.name}'")
)
end
end
end
context "order doesn't have a delivery step" do
before do
allow(order).to receive_messages(checkout_steps: ['cart', 'address', 'payment'])
allow(order).to receive_messages state: 'address'
allow(controller).to receive_messages check_authorization: true
end
it "doesn't set shipping address on the order" do
expect(order).not_to receive(:ship_address=)
spree_post :update, state: order.state
end
it "doesn't remove unshippable items before payment" do
expect { spree_post :update, state: 'payment' }.
not_to change(order, :line_items)
end
end
it 'does remove unshippable items before payment' do
allow(order).to receive_messages payment_required?: true
allow(controller).to receive_messages check_authorization: true
expect { spree_post :update, state: 'payment' }.
to change { order.reload.line_items.length }
end
context 'in the payment step' do
let(:order) { OrderWalkthrough.up_to(:payment) }
let(:payment_method_id) { Spree::PaymentMethod.first.id }
before do
expect(order.state).to eq 'payment'
allow(order).to receive_messages user: user
allow(order).to receive_messages confirmation_required?: true
end
it 'does not advance the order extra even when called twice' do
spree_put :update, state: 'payment',
order: { payments_attributes: [{ payment_method_id: payment_method_id }] }
order.reload
expect(order.state).to eq 'confirm'
spree_put :update, state: 'payment',
order: { payments_attributes: [{ payment_method_id: payment_method_id }] }
order.reload
expect(order.state).to eq 'confirm'
end
context 'with store credits payment' do
let(:user) { create(:user) }
let(:credit_amount) { order.total + 1.00 }
let(:put_attrs) do
{
state: 'payment',
apply_store_credit: 'Apply Store Credit',
order: {
payments_attributes: [{ payment_method_id: payment_method_id }]
}
}
end
before do
create(:store_credit_payment_method)
create(:store_credit, user: user, amount: credit_amount)
end
def expect_one_store_credit_payment(order, amount)
expect(order.payments.count).to eq 1
expect(order.payments.first.source).to be_a Spree::StoreCredit
expect(order.payments.first.amount).to eq amount
end
it 'can fully pay with store credits while removing other payment attributes' do
spree_put :update, put_attrs
order.reload
expect(order.state).to eq 'confirm'
expect_one_store_credit_payment(order, order.total)
end
it 'can fully pay with store credits while removing an existing card' do
credit_card = create(:credit_card, user: user, payment_method: Spree::PaymentMethod.first)
put_attrs[:order][:existing_card] = credit_card.id
spree_put :update, put_attrs
order.reload
expect(order.state).to eq 'confirm'
expect_one_store_credit_payment(order, order.total)
end
context 'partial payment' do
let(:credit_amount) { order.total - 1.00 }
it 'returns to payment for partial store credit' do
spree_put :update, put_attrs
order.reload
expect(order.state).to eq 'payment'
expect_one_store_credit_payment(order, credit_amount)
end
end
end
context 'remove store credits payment' do
let(:user) { create(:user) }
let(:credit_amount) { order.total - 1.00 }
let(:put_attrs) do
{
state: 'payment',
remove_store_credit: 'Remove Store Credit',
order: {
payments_attributes: [{ payment_method_id: payment_method_id }]
}
}
end
before do
create(:store_credit_payment_method)
create(:store_credit, user: user, amount: credit_amount)
order.add_store_credit_payments
end
def expect_invalid_store_credit_payment(order)
expect(order.payments.store_credits.with_state(:invalid).count).to eq 1
expect(order.payments.store_credits.with_state(:invalid).first.source).to be_a Spree::StoreCredit
end
it 'can fully pay with store credits while removing other payment attributes' do
spree_put :update, put_attrs
order.reload
expect(order.state).to eq 'payment'
expect_invalid_store_credit_payment(order)
end
end
end
end
| 34.652727 | 145 | 0.633979 |
e9fef2b8f74d77b40c17afc15e98797836770d16 | 874 | # frozen_string_literal: true
module Backmeup
# Run a script if it exist and return true, otherwise return false.
class ScriptIfExist
def self.run(**args)
new(**args).run
end
def initialize(env:, root:, script_name:)
@env = env
@root = root
@script_name = script_name
end
attr_reader :env, :root, :script_name
def run
File.exist?(script_path) or return false
raise "#{script_name} is not executable" \
unless File.executable?(script_path)
run_cmd
true
end
private
def script_path
@script_path ||= File.join(root.bin, script_name)
end
def run_cmd
cmd = TTY::Command.new
result = cmd.run(script_path, env: env)
status = result.status
raise "#{script_name} exited with #{status}" unless status.zero?
end
end
end
| 20.325581 | 70 | 0.618993 |
d5cfda98857f1b5f1dc45fd0fb5908c676ce66ca | 4,798 | #
# Author:: Stephen Delano (<[email protected]>)
# Copyright:: Copyright (c) 2010 Opscode, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'spec_helper'
describe Chef::Knife::ClientBulkDelete do
let(:stdout_io) { StringIO.new }
let(:stdout) {stdout_io.string}
let(:stderr_io) { StringIO.new }
let(:stderr) { stderr_io.string }
let(:knife) {
k = Chef::Knife::ClientBulkDelete.new
k.name_args = name_args
k.config = option_args
k.ui.stub(:stdout).and_return(stdout_io)
k.ui.stub(:stderr).and_return(stderr_io)
k.ui.stub(:confirm).and_return(knife_confirm)
k.ui.stub(:confirm_without_exit).and_return(knife_confirm)
k
}
let(:name_args) { [ "." ] }
let(:option_args) { {} }
let(:knife_confirm) { true }
let(:nonvalidator_client_names) { %w{tim dan stephen} }
let(:nonvalidator_clients) {
clients = Hash.new
nonvalidator_client_names.each do |client_name|
client = Chef::ApiClient.new()
client.name(client_name)
client.stub(:destroy).and_return(true)
clients[client_name] = client
end
clients
}
let(:validator_client_names) { %w{myorg-validator} }
let(:validator_clients) {
clients = Hash.new
validator_client_names.each do |validator_client_name|
validator_client = Chef::ApiClient.new()
validator_client.name(validator_client_name)
validator_client.stub(:validator).and_return(true)
validator_client.stub(:destroy).and_return(true)
clients[validator_client_name] = validator_client
end
clients
}
let(:client_names) { nonvalidator_client_names + validator_client_names}
let(:clients) {
nonvalidator_clients.merge(validator_clients)
}
before(:each) do
Chef::ApiClient.stub(:list).and_return(clients)
end
describe "run" do
describe "without a regex" do
let(:name_args) { [ ] }
it "should exit if the regex is not provided" do
lambda { knife.run }.should raise_error(SystemExit)
end
end
describe "with any clients" do
it "should get the list of the clients" do
Chef::ApiClient.should_receive(:list)
knife.run
end
it "should print the name of the clients" do
knife.run
client_names.each do |client_name|
stdout.should include(client_name)
end
end
it "should confirm you really want to delete them" do
knife.ui.should_receive(:confirm)
knife.run
end
describe "without --delete-validators" do
it "should mention that validator clients wont be deleted" do
knife.run
stdout.should include("Following clients are validators and will not be deleted.")
info = stdout.index "Following clients are validators and will not be deleted."
val = stdout.index "myorg-validator"
(val > info).should be_true
end
it "should only delete nonvalidator clients" do
nonvalidator_clients.each_value do |c|
c.should_receive(:destroy)
end
validator_clients.each_value do |c|
c.should_not_receive(:destroy)
end
knife.run
end
end
describe "with --delete-validators" do
let(:option_args) { {:delete_validators => true} }
it "should mention that validator clients will be deleted" do
knife.run
stdout.should include("The following validators will be deleted")
end
it "should confirm twice" do
knife.ui.should_receive(:confirm).once
knife.ui.should_receive(:confirm_without_exit).once
knife.run
end
it "should delete all clients" do
clients.each_value do |c|
c.should_receive(:destroy)
end
knife.run
end
end
end
describe "with some clients" do
let(:name_args) { [ "^ti" ] }
it "should only delete clients that match the regex" do
clients["tim"].should_receive(:destroy)
clients["stephen"].should_not_receive(:destroy)
clients["dan"].should_not_receive(:destroy)
clients["myorg-validator"].should_not_receive(:destroy)
knife.run
end
end
end
end
| 28.730539 | 92 | 0.657566 |
7a484bcc6c106b32d619cd3ce5eb17a03d5f2cd0 | 650 | require File.expand_path('../../spec_helper', __FILE__)
describe "A Not node" do
relates "(not true)" do
compile do |g|
f = g.new_label
t = g.new_label
g.push :true
g.git f
g.push :true
g.goto t
f.set!
g.push :false
t.set!
end
end
relates <<-ruby do
a = 1
b = !a
ruby
compile do |g|
invert = g.new_label
done = g.new_label
g.push 1
g.set_local 0
g.pop
g.push_local 0
g.git invert
g.push :true
g.goto done
invert.set!
g.push :false
done.set!
g.set_local 1
end
end
end
| 13.541667 | 55 | 0.504615 |
016a3540f3957ca9ec22b1dd4a319531bd55c347 | 338 | cask "sketch@43" do
version "43,38999"
url "https://download.sketch.com/sketch-#{version.csv.first}-#{version.csv.second}.zip"
name "Sketch"
desc "Digital design and prototyping platform"
homepage "https://www.sketch.com/"
auto_updates true
depends_on macos: ">= :mojave"
app "Sketch.app"
end | 24.142857 | 91 | 0.647929 |
621a42f04e3d0db5a14f569261d987b3ec9fa83e | 337 | class CreateSupportBrbInvoiceSituations < ActiveRecord::Migration[5.2] # :nodoc:
def change
create_table 'extranet.brb_invoice_situations' do |t|
t.string :name
t.boolean :status, default: true
t.timestamps
t.boolean :deleted, default: false
t.datetime :deleted_at, default: nil
end
end
end
| 24.071429 | 80 | 0.688427 |
e8d0cb148c82d100dd84fee1e062d30577b5512c | 3,611 | Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Ensures that a master key has been made available in either ENV["RAILS_MASTER_KEY"]
# or in config/master.key. This key is used to decrypt credentials (and other encrypted files).
# config.require_master_key = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Store uploaded files on the local file system (see config/storage.yml for options)
config.active_storage.service = :local
# Mount Action Cable outside main process or domain
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
config.log_tags = [ :request_id ]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment)
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "any_movie_#{Rails.env}"
config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
end
| 41.988372 | 100 | 0.758239 |
4aa80938518a23efb29b3d16af383ff7dcc7bf59 | 1,693 | class Perltidy < Formula
desc "Indents and reformats Perl scripts to make them easier to read"
homepage "https://perltidy.sourceforge.io/"
url "https://downloads.sourceforge.net/project/perltidy/20200907/Perl-Tidy-20200907.tar.gz"
sha256 "72c9324a188ecf7c9cd4ed8b7718be993ad77d4d9bc770b284caa17278467c18"
license "GPL-2.0-or-later"
livecheck do
url :stable
regex(%r{url=.*?/Perl-Tidy[._-]v?(\d+)\.t}i)
end
bottle do
cellar :any_skip_relocation
sha256 "240c8e2c88b1e25dc272ffd842e11954b6d59e2525778473efd182ca79c91700" => :catalina
sha256 "f8d51c96d8669b691b94ea813bb1a4abdae133f1e161f4e72ecfcc8923231244" => :mojave
sha256 "c0c6d266728fabe3991a7ff17d3cc36eac4533f3ce6fa9476347f473592f69da" => :high_sierra
end
def install
ENV.prepend_create_path "PERL5LIB", libexec/"lib/perl5"
system "perl", "Makefile.PL", "INSTALL_BASE=#{libexec}",
"INSTALLSITESCRIPT=#{bin}",
"INSTALLSITEMAN1DIR=#{man1}",
"INSTALLSITEMAN3DIR=#{man3}"
system "make"
system "make", "install"
bin.env_script_all_files(libexec/"bin", PERL5LIB: ENV["PERL5LIB"])
end
test do
(testpath/"testfile.pl").write <<~EOS
print "Help Desk -- What Editor do you use?";
chomp($editor = <STDIN>);
if ($editor =~ /emacs/i) {
print "Why aren't you using vi?\n";
} elsif ($editor =~ /vi/i) {
print "Why aren't you using emacs?\n";
} else {
print "I think that's the problem\n";
}
EOS
system bin/"perltidy", testpath/"testfile.pl"
assert_predicate testpath/"testfile.pl.tdy", :exist?
end
end
| 36.021277 | 93 | 0.652688 |
62f144fded50779dacd9cd4d85103a80d9195596 | 753 | class PaperclipOptions < Hash
def self.s3_options
if ENV['S3_ACCESS_KEY_ID'] and ENV['S3_SECRET_ACCESS_KEY'] then
{
:storage => :s3,
:s3_credentials => { :access_key_id => ENV['S3_ACCESS_KEY_ID'], :secret_access_key => ENV['S3_SECRET_ACCESS_KEY'] },
:path => ":class/:attachment/:id/:style.:extension",
:bucket => "bhamruby-#{Rails.env}"
}
else
{}
end
end
def self.default_styles
{ } # Any consistent options can go here
end
def initialize(opts = {}, env = Rails.env)
merge! self.class.s3_options if env.to_sym == :production # not using ActiveSupport::StringInquirer methods in case they pass a symbol
merge! self.class.default_styles
merge! opts
end
end
| 28.961538 | 138 | 0.646746 |
625eb950b0e8e69a109e5634fbfcd7aa3a0f3181 | 19,386 | require 'spec_helper'
module Bosh::Director::DeploymentPlan
describe InstanceSpec do
include Support::StemcellHelpers
subject(:instance_spec) { described_class.create_from_instance_plan(instance_plan) }
let(:links_manager) do
instance_double(Bosh::Director::Links::LinksManager).tap do |double|
allow(double).to receive(:get_links_from_deployment).and_return([])
end
end
let(:job_spec) do
{ 'name' => 'smurf-job', 'release' => 'release', 'templates' => [] }
end
let(:packages) do
{ 'pkg' => { 'name' => 'package', 'version' => '1.0' } }
end
let(:properties) do
{ 'key' => 'value' }
end
let(:links) do
{
'smurf-job' => {
'link_name' => {
'deployment_name' => 'dep1',
'networks' => ['default'],
'properties' => {
'listen_port' => 'Kittens',
},
'address' => 'some.address.com',
'instances' => [
{
'name' => 'provider',
'index' => 0,
'bootstrap' => true,
'id' => '3d46803d-1527-4209-8e1f-822105fece7c',
'az' => 'z1',
'address' => '10.244.0.4',
},
],
'instance_group' => 'smurf-ig',
'default_network' => 'smurf-net',
'domain' => 'smurf.bosh',
'use_short_dns_addresses' => true,
'use_link_dns_names' => true,
'non-whitelisted-key' => 'some_value',
'group_name' => 'link_provider_name-link_provider_type',
},
},
}
end
let(:smurf_job_links) { links['smurf-job'] }
let(:variables_interpolator) { instance_double(Bosh::Director::ConfigServer::VariablesInterpolator) }
let(:lifecycle) { InstanceGroup::DEFAULT_LIFECYCLE_PROFILE }
let(:network_spec) do
{ 'name' => 'default', 'subnets' => [{ 'cloud_properties' => { 'foo' => 'bar' }, 'az' => 'foo-az' }] }
end
let(:network) { DynamicNetwork.parse(network_spec, [AvailabilityZone.new('foo-az', {})], logger) }
let(:instance_group) do
instance_double(
'Bosh::Director::DeploymentPlan::InstanceGroup',
name: 'fake-job',
spec: job_spec,
canonical_name: 'job',
instances: ['instance0'],
default_network: { 'gateway' => 'default' },
vm_type: vm_type,
vm_extensions: [],
stemcell: stemcell,
env: env,
package_spec: packages,
persistent_disk_collection: persistent_disk_collection,
errand?: false,
compilation?: false,
update_spec: {},
properties: properties,
lifecycle: lifecycle,
vm_resources: nil,
vm_strategy: UpdateConfig::VM_STRATEGY_DELETE_CREATE,
)
end
let(:index) { 0 }
let(:instance_state) do
{}
end
let(:desired_variable_set) { instance_double(Bosh::Director::Models::VariableSet) }
let(:instance) do
instance = Instance.create_from_instance_group(
instance_group,
index,
'started',
deployment,
instance_state,
availability_zone,
logger,
variables_interpolator,
)
instance.desired_variable_set = desired_variable_set
instance
end
let(:vm_type) { VmType.new('name' => 'fake-vm-type') }
let(:availability_zone) { Bosh::Director::DeploymentPlan::AvailabilityZone.new('foo-az', 'a' => 'b') }
let(:stemcell) { make_stemcell(name: 'fake-stemcell-name', version: '1.0') }
let(:env) { Env.new('key' => 'value') }
let(:deployment_name) { 'fake-deployment' }
let(:deployment) { Bosh::Director::Models::Deployment.make(name: deployment_name) }
let(:instance_model) { Bosh::Director::Models::Instance.make(deployment: deployment, bootstrap: true, uuid: 'uuid-1') }
let(:instance_plan) do
InstancePlan.new(
existing_instance: nil,
desired_instance: DesiredInstance.new(instance_group),
instance: instance,
variables_interpolator: variables_interpolator,
)
end
let(:persistent_disk_collection) { PersistentDiskCollection.new(logger) }
before do
allow(Bosh::Director::Links::LinksManager).to receive(:new).and_return(links_manager)
persistent_disk_collection.add_by_disk_size(0)
reservation = Bosh::Director::DesiredNetworkReservation.new_dynamic(instance.model, network)
reservation.resolve_ip('192.168.0.10')
instance_plan.network_plans << NetworkPlanner::Plan.new(reservation: reservation)
instance.bind_existing_instance_model(instance_model)
end
describe '#as_apply_spec' do
it 'returns a valid instance apply_spec' do
network_name = network_spec['name']
spec = instance_spec.as_apply_spec
expect(spec['deployment']).to eq('fake-deployment')
expect(spec['name']).to eq('fake-job')
expect(spec['job']).to eq(job_spec)
expect(spec['az']).to eq('foo-az')
expect(spec['index']).to eq(index)
expect(spec['networks']).to include(network_name)
expect(spec['networks'][network_name]).to eq(
'type' => 'dynamic',
'cloud_properties' => network_spec['subnets'].first['cloud_properties'],
'default' => ['gateway'],
)
expect(spec['packages']).to eq(packages)
expect(spec['persistent_disk']).to eq(0)
expect(spec['configuration_hash']).to be_nil
expect(spec['dns_domain_name']).to eq('bosh')
expect(spec['id']).to eq('uuid-1')
end
it 'includes rendered_templates_archive key after rendered templates were archived' do
instance.rendered_templates_archive =
Bosh::Director::Core::Templates::RenderedTemplatesArchive.new('fake-blobstore-id', 'fake-sha1')
expect(instance_spec.as_apply_spec['rendered_templates_archive']).to eq(
'blobstore_id' => 'fake-blobstore-id',
'sha1' => 'fake-sha1',
)
end
it 'does not include rendered_templates_archive key before rendered templates were archived' do
expect(instance_spec.as_apply_spec).to_not have_key('rendered_templates_archive')
end
end
describe '#as_jobless_apply_spec' do
it 'returns a valid instance apply_spec' do
network_name = network_spec['name']
spec = instance_spec.as_jobless_apply_spec
expect(spec['deployment']).to eq('fake-deployment')
expect(spec['name']).to eq('fake-job')
expect(spec['job']).to eq({})
expect(spec['az']).to eq('foo-az')
expect(spec['index']) .to eq(index)
expect(spec['networks']).to include(network_name)
expect(spec['networks'][network_name]).to eq(
'type' => 'dynamic',
'cloud_properties' => network_spec['subnets'].first['cloud_properties'],
'default' => ['gateway'],
)
expect(spec['packages']).to eq(packages)
expect(spec['persistent_disk']).to eq(0)
expect(spec['configuration_hash']).to be_nil
expect(spec['dns_domain_name']).to eq('bosh')
expect(spec['id']).to eq('uuid-1')
end
it 'includes rendered_templates_archive key after rendered templates were archived' do
instance.rendered_templates_archive =
Bosh::Director::Core::Templates::RenderedTemplatesArchive.new('fake-blobstore-id', 'fake-sha1')
expect(instance_spec.as_jobless_apply_spec['rendered_templates_archive']).to eq(
'blobstore_id' => 'fake-blobstore-id',
'sha1' => 'fake-sha1',
)
end
it 'does not include rendered_templates_archive key before rendered templates were archived' do
expect(instance_spec.as_jobless_apply_spec).to_not have_key('rendered_templates_archive')
end
end
describe '#template_spec' do
let(:variables_interpolator) { double(Bosh::Director::ConfigServer::VariablesInterpolator) }
let(:expected_links) do
{
'smurf-job' => {
'link_name' => {
'properties' => {
'listen_port' => 'Kittens',
},
'address' => 'some.address.com',
'instances' => [{
'name' => 'provider',
'index' => 0,
'bootstrap' => true,
'id' => '3d46803d-1527-4209-8e1f-822105fece7c',
'az' => 'z1',
'address' => '10.244.0.4',
}],
'instance_group' => 'smurf-ig',
'default_network' => 'smurf-net',
'deployment_name' => 'dep1',
'domain' => 'smurf.bosh',
'use_short_dns_addresses' => true,
'use_link_dns_names' => true,
'group_name' => 'link_provider_name-link_provider_type',
},
},
}
end
before do
allow(Bosh::Director::ConfigServer::VariablesInterpolator).to receive(:new).and_return(variables_interpolator)
allow(variables_interpolator).to receive(:interpolate_template_spec_properties)
.with(properties, 'fake-deployment', instance.desired_variable_set)
.and_return(properties)
allow(variables_interpolator).to receive(:interpolate_link_spec_properties)
.with(smurf_job_links, instance.desired_variable_set)
.and_return(smurf_job_links)
expect(links_manager).to receive(:get_links_for_instance).and_return(links)
end
context 'links specs whitelisting' do
it 'respects whitelist for links spec' do
expect([instance_spec.as_template_spec['links']]).to include(expected_links)
end
end
context 'properties interpolation' do
let(:properties) do
{
'smurf_1' => '((smurf_placeholder_1))',
'smurf_2' => '((smurf_placeholder_2))',
}
end
let(:first_link) do
{
'deployment_name' => 'dep1',
'instances' => [{ 'name' => 'v1' }],
'networks' => 'foo',
'properties' => { 'smurf' => '((smurf_val1))' },
}
end
let(:second_link) do
{
'deployment_name' => 'dep2',
'instances' => [{ 'name' => 'v2' }],
'networks' => 'foo2',
'properties' => { 'smurf' => '((smurf_val2))' },
}
end
let(:links) do
{
'smurf-job' => {
'link_1' => first_link,
'link_2' => second_link,
},
}
end
let(:resolved_properties) do
{
'smurf_1' => 'lazy smurf',
'smurf_2' => 'happy smurf',
}
end
let(:resolved_first_link) do
{ 'instances' => [{ 'name' => 'v1' }], 'properties' => { 'smurf' => 'strong smurf' } }
end
let(:resolved_second_link) do
{ 'instances' => [{ 'name' => 'v2' }], 'properties' => { 'smurf' => 'sleepy smurf' } }
end
let(:resolved_links) do
{
'smurf-job' => {
'link_1' => resolved_first_link,
'link_2' => resolved_second_link,
},
}
end
let(:resolved_smurf_job_links) { resolved_links['smurf-job'] }
it 'resolves properties and links properties' do
expect(variables_interpolator).to receive(:interpolate_template_spec_properties)
.with(properties, 'fake-deployment', instance.desired_variable_set)
.and_return(resolved_properties)
expect(variables_interpolator).to receive(:interpolate_link_spec_properties)
.with(smurf_job_links, instance.desired_variable_set)
.and_return(resolved_smurf_job_links)
spec = instance_spec.as_template_spec
expect(spec['properties']).to eq(resolved_properties)
expect(spec['links']).to eq(resolved_links)
end
end
context 'when instance_group has a manual network' do
let(:subnet_spec) do
{
'range' => '192.168.0.0/24',
'gateway' => '192.168.0.254',
'cloud_properties' => { 'foo' => 'bar' },
}
end
let(:subnet) { ManualNetworkSubnet.parse(network_spec['name'], subnet_spec, [availability_zone]) }
let(:network) { ManualNetwork.new(network_spec['name'], [subnet], logger) }
it 'returns a valid instance template_spec' do
network_name = network_spec['name']
spec = instance_spec.as_template_spec
expect(spec['deployment']).to eq('fake-deployment')
expect(spec['name']).to eq('fake-job')
expect(spec['job']).to eq(job_spec)
expect(spec['index']).to eq(index)
expect(spec['networks']).to include(network_name)
expect(spec['networks'][network_name]).to include(
'ip' => '192.168.0.10',
'netmask' => '255.255.255.0',
'cloud_properties' => { 'foo' => 'bar' },
'dns_record_name' => '0.smurf-job.default.fake-deployment.bosh',
'gateway' => '192.168.0.254',
)
expect(spec['persistent_disk']).to eq(0)
expect(spec['configuration_hash']).to be_nil
expect(spec['properties']).to eq(properties)
expect(spec['dns_domain_name']).to eq('bosh')
expect(spec['links']).to eq(expected_links)
expect(spec['id']).to eq('uuid-1')
expect(spec['az']).to eq('foo-az')
expect(spec['bootstrap']).to eq(true)
expect(spec['address']).to eq('192.168.0.10')
expect(spec['ip']).to eq('192.168.0.10')
end
end
context 'when instance_group has dynamic network' do
context 'when vm does not have network ip assigned' do
it 'returns a valid instance template_spec' do
network_name = network_spec['name']
spec = instance_spec.as_template_spec
expect(spec['deployment']).to eq('fake-deployment')
expect(spec['name']).to eq('fake-job')
expect(spec['job']).to eq(job_spec)
expect(spec['index']).to eq(index)
expect(spec['networks']).to include(network_name)
expect(spec['networks'][network_name]).to include(
'type' => 'dynamic',
'ip' => '127.0.0.1',
'netmask' => '127.0.0.1',
'gateway' => '127.0.0.1',
'dns_record_name' => '0.smurf-job.default.fake-deployment.bosh',
'cloud_properties' => network_spec['subnets'].first['cloud_properties'],
)
expect(spec['persistent_disk']).to eq(0)
expect(spec['configuration_hash']).to be_nil
expect(spec['properties']).to eq(properties)
expect(spec['dns_domain_name']).to eq('bosh')
expect(spec['links']).to eq(expected_links)
expect(spec['id']).to eq('uuid-1')
expect(spec['az']).to eq('foo-az')
expect(spec['bootstrap']).to eq(true)
expect(spec['address']).to eq('uuid-1.fake-job.default.fake-deployment.bosh')
expect(spec['ip']).to eq(nil)
end
end
context 'when vm has network ip assigned' do
let(:instance_state) do
{
'networks' => {
'default' => {
'type' => 'dynamic',
'ip' => '192.0.2.19',
'netmask' => '255.255.255.0',
'gateway' => '192.0.2.1',
},
},
}
end
it 'returns a valid instance template_spec' do
network_name = network_spec['name']
spec = instance_spec.as_template_spec
expect(spec['deployment']).to eq('fake-deployment')
expect(spec['name']).to eq('fake-job')
expect(spec['job']).to eq(job_spec)
expect(spec['index']).to eq(index)
expect(spec['networks']).to include(network_name)
expect(spec['networks'][network_name]).to include(
'type' => 'dynamic',
'ip' => '192.0.2.19',
'netmask' => '255.255.255.0',
'gateway' => '192.0.2.1',
'dns_record_name' => '0.smurf-job.default.fake-deployment.bosh',
'cloud_properties' => network_spec['subnets'].first['cloud_properties'],
)
expect(spec['persistent_disk']).to eq(0)
expect(spec['configuration_hash']).to be_nil
expect(spec['properties']).to eq(properties)
expect(spec['dns_domain_name']).to eq('bosh')
expect(spec['links']).to eq(expected_links)
expect(spec['id']).to eq('uuid-1')
expect(spec['az']).to eq('foo-az')
expect(spec['bootstrap']).to eq(true)
expect(spec['address']).to eq('uuid-1.fake-job.default.fake-deployment.bosh')
expect(spec['ip']).to eq('192.0.2.19')
end
end
end
end
describe '#full_spec' do
it 'return correct json format' do
expected_spec = {
'deployment' => 'fake-deployment',
'job' => {
'name' => 'smurf-job',
'release' => 'release',
'templates' => [],
},
'index' => 0,
'bootstrap' => true,
'lifecycle' => 'service',
'name' => 'fake-job',
'id' => 'uuid-1',
'az' => 'foo-az',
'networks' => {
'default' => {
'type' => 'dynamic',
'cloud_properties' => { 'foo' => 'bar' },
'default' => ['gateway'],
},
},
'vm_type' => {
'name' => 'fake-vm-type',
'cloud_properties' => {},
},
'vm_resources' => nil,
'stemcell' => {
'name' => 'fake-stemcell-name',
'version' => '1.0',
},
'env' => { 'key' => 'value' },
'packages' => {
'pkg' => {
'name' => 'package',
'version' => '1.0',
},
},
'properties' => { 'key' => 'value' },
'properties_need_filtering' => true,
'dns_domain_name' => 'bosh',
'address' => 'uuid-1.fake-job.default.fake-deployment.bosh',
'update' => {},
'persistent_disk' => 0,
'persistent_disk_type' => {
'name' => String,
'disk_size' => 0,
'cloud_properties' => {},
},
}
expect(instance_spec.full_spec).to match(expected_spec)
end
context 'when CompilationJobs' do
let(:lifecycle) { nil }
context 'lifecycle is not set' do
it "contains 'nil' for 'lifecycle'" do
expect(instance_spec.full_spec['lifecycle']).to be_nil
end
end
end
InstanceGroup::VALID_LIFECYCLE_PROFILES.each do |lifecycle_value|
context "when 'lifecycle' is set to '#{lifecycle_value}'" do
let(:lifecycle) { lifecycle_value }
it "contains '#{lifecycle_value}' for 'lifecycle'" do
expect(instance_spec.full_spec['lifecycle']).to eq(lifecycle_value)
end
end
end
end
end
end
| 36.785579 | 123 | 0.550655 |
626c2e282e326bb0cff1f67724d65a373ed7fc0a | 338 | require 'chefspec'
describe 'expect_exception::no_error' do
let(:chef_run) { ChefSpec::ServerRunner.new(platform: 'ubuntu', version: '16.04').converge(described_recipe) }
it 'does not raise an error' do
expect(Chef::Formatters::ErrorMapper).to_not receive(:file_load_failed)
expect { chef_run }.to_not raise_error
end
end
| 30.727273 | 112 | 0.745562 |
5d23bb1dd1da2832365717acb451e0e7b562cc1f | 848 | lib = File.expand_path("../lib", __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
Gem::Specification.new do |spec|
spec.name = "fluent-plugin-prometheus-format"
spec.version = "2.0.0"
spec.authors = ["Sumo Logic"]
spec.email = ["[email protected]"]
spec.summary = %q{Fluentd plugin for transfer data points to prometheus metrics format.}
spec.homepage = "https://github.com/SumoLogic/sumologic-kubernetes-collection"
spec.license = "Apache-2.0"
spec.files = Dir.glob(File.join('lib', '**', '*.rb'))
spec.executables = []
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 2.0"
spec.add_development_dependency "rake", "~> 13.0"
spec.add_development_dependency "test-unit", "~> 3.0"
spec.add_runtime_dependency "fluentd", "= 1.14.5"
end
| 36.869565 | 96 | 0.675708 |
ff27cd7782a50fdf1d279036e3028f16108626bd | 168 | # frozen_string_literal: true
class AddMediaIdToTwitterImages < ActiveRecord::Migration[5.2]
def change
add_column :twitter_images, :media_id, :string
end
end
| 21 | 62 | 0.779762 |
abd9d83532c562442fc3e32750cfd9c86b6d9fca | 306 | class SorceryBruteForceProtection < ActiveRecord::Migration[4.2]
def change
add_column :internal_users, :failed_logins_count, :integer, default: 0
add_column :internal_users, :lock_expires_at, :datetime, default: nil
add_column :internal_users, :unlock_token, :string, default: nil
end
end
| 38.25 | 74 | 0.77451 |
d5e1ee084f42be7a7d0998d99cab850641b9350b | 759 | module Util
module Memoization
def valid_ivar_name(str)
str.match(/[\w_]+/)[0]
end
MEMOIZE_PREFIX = 'memoize_'
def memoize(method_name = nil)
method_name = @@method_added unless method_name
return if method_name.to_s.start_with? MEMOIZE_PREFIX
alias_method "#{MEMOIZE_PREFIX}#{method_name}", method_name
define_method(method_name) do |*args|
ivar_name = "@#{self.class.valid_ivar_name(method_name)}"
if instance_variable_defined?(ivar_name)
instance_variable_get(ivar_name)
else
instance_variable_set(ivar_name, send("#{MEMOIZE_PREFIX}#{method_name}", *args))
end
end
end
def method_added(name)
@@method_added = name
end
end
end
| 26.172414 | 90 | 0.662714 |
f8c1248e6c7213f57e347e7afe660737744af83c | 680 | require 'spec_helper'
describe ExternalSystem do
it "has a valid factory" do
expect(FactoryGirl.build(:external_system)).to be_valid
end
it "fails without code" do
expect(FactoryGirl.build(:external_system, code: nil)).not_to be_valid
end
it "returns untranslated name" do
external_system = FactoryGirl.build(:external_system)
expect(external_system.name).to eq "translation missing: en.docdel.code"+
".external_system."+external_system.code
end
it "code is unique" do
external_system = FactoryGirl.create(:external_system)
expect(FactoryGirl.build(:external_system, :code =>
external_system.code)).not_to be_valid
end
end
| 27.2 | 77 | 0.742647 |
bfcd76bd07d7e894ae937a6b9bd7842c84d7829e | 905 | # frozen_string_literal: true
module Names
def self.string_search(scope, search_string, columns)
model = scope.is_a?(ActiveRecord::Relation) ? scope.model : scope
query = columns.map { |column| "lower(#{model.table_name}.#{column}) like :term" }.join(' OR ')
search_string
.split(/\s+/)
.select(&:present?)
.inject(scope || model) { |working_scope, term| working_scope.where(query, term: "%#{term.downcase}%") }
end
extend ActiveSupport::Concern
# Return the user's name.
def name
name_parts.compact.join(' ')
end
# Return the user's name in last, first format.
def name_inverted
[last_name, first_name].compact.join(', ')
end
def name_parts
[first_name, last_name]
end
included do
scope :name_search,
->(search_string, columns: %w[first_name last_name]) { Names.string_search(self, search_string, columns) }
end
end
| 26.617647 | 116 | 0.669613 |
1873a3016165257c4b00449854985b810ea6ca1f | 265 | cask :v1 => 'rdio' do
version :latest
sha256 :no_check
url 'https://www.rdio.com/media/static/desktop/mac/Rdio.dmg'
appcast 'http://www.rdio.com/media/static/desktop/mac/appcast.xml'
homepage 'http://www.rdio.com'
license :gratis
app 'Rdio.app'
end
| 22.083333 | 68 | 0.698113 |
edbf99e47cc23a02fc221440cb956aed92a07be4 | 403 | class AppointmentsController < ApplicationController
def index
@appointments = Appointment.order('appt_time ASC')
@appointment = Appointment.new
end
def create
@appointment = Appointment.create(appointment_params)
@appointments = Appointment.order('appt_time ASC')
end
private
def appointment_params
params.require(:appointment).permit(:title, :appt_time)
end
end
| 22.388889 | 59 | 0.744417 |
eda02a2b42c9e59d7db6aba2cc4a1fa787329177 | 35,404 | # frozen_string_literal: true
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
require "gapic/operation"
require "google/longrunning/operations_pb"
module Google
module Cloud
module Firestore
module Admin
module V1
module FirestoreAdmin
# Service that implements Longrunning Operations API.
class Operations
# @private
attr_reader :operations_stub
##
# Configuration for the FirestoreAdmin Operations API.
#
# @yield [config] Configure the Operations client.
# @yieldparam config [Operations::Configuration]
#
# @return [Operations::Configuration]
#
def self.configure
@configure ||= Operations::Configuration.new
yield @configure if block_given?
@configure
end
##
# Configure the FirestoreAdmin Operations instance.
#
# The configuration is set to the derived mode, meaning that values can be changed,
# but structural changes (adding new fields, etc.) are not allowed. Structural changes
# should be made on {Operations.configure}.
#
# @yield [config] Configure the Operations client.
# @yieldparam config [Operations::Configuration]
#
# @return [Operations::Configuration]
#
def configure
yield @config if block_given?
@config
end
##
# Create a new Operations client object.
#
# @yield [config] Configure the Client client.
# @yieldparam config [Operations::Configuration]
#
def initialize
# These require statements are intentionally placed here to initialize
# the gRPC module only when it's required.
# See https://github.com/googleapis/toolkit/issues/446
require "gapic/grpc"
require "google/longrunning/operations_services_pb"
# Create the configuration object
@config = Configuration.new Operations.configure
# Yield the configuration if needed
yield @config if block_given?
# Create credentials
credentials = @config.credentials
credentials ||= Credentials.default scope: @config.scope
if credentials.is_a?(String) || credentials.is_a?(Hash)
credentials = Credentials.new credentials, scope: @config.scope
end
@quota_project_id = @config.quota_project
@quota_project_id ||= credentials.quota_project_id if credentials.respond_to? :quota_project_id
@operations_stub = ::Gapic::ServiceStub.new(
::Google::Longrunning::Operations::Stub,
credentials: credentials,
endpoint: @config.endpoint,
channel_args: @config.channel_args,
interceptors: @config.interceptors
)
end
# Service calls
##
# Lists operations that match the specified filter in the request. If the
# server doesn't support this method, it returns `UNIMPLEMENTED`.
#
# NOTE: the `name` binding allows API services to override the binding
# to use different resource name schemes, such as `users/*/operations`. To
# override the binding, API services can add a binding such as
# `"/v1/{name=users/*}/operations"` to their service configuration.
# For backwards compatibility, the default name includes the operations
# collection id, however overriding users must ensure the name binding
# is the parent resource, without the operations collection id.
#
# @overload list_operations(request, options = nil)
# Pass arguments to `list_operations` via a request object, either of type
# {::Google::Longrunning::ListOperationsRequest} or an equivalent Hash.
#
# @param request [::Google::Longrunning::ListOperationsRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
#
# @overload list_operations(name: nil, filter: nil, page_size: nil, page_token: nil)
# Pass arguments to `list_operations` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param name [::String]
# The name of the operation's parent resource.
# @param filter [::String]
# The standard list filter.
# @param page_size [::Integer]
# The standard list page size.
# @param page_token [::String]
# The standard list page token.
#
# @yield [response, operation] Access the result along with the RPC operation
# @yieldparam response [::Gapic::PagedEnumerable<::Gapic::Operation>]
# @yieldparam operation [::GRPC::ActiveCall::Operation]
#
# @return [::Gapic::PagedEnumerable<::Gapic::Operation>]
#
# @raise [::Google::Cloud::Error] if the RPC is aborted.
#
def list_operations request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Longrunning::ListOperationsRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
metadata = @config.rpcs.list_operations.metadata.to_h
# Set x-goog-api-client and x-goog-user-project headers
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Firestore::Admin::V1::VERSION
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
header_params = {
"name" => request.name
}
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
metadata[:"x-goog-request-params"] ||= request_params_header
options.apply_defaults timeout: @config.rpcs.list_operations.timeout,
metadata: metadata,
retry_policy: @config.rpcs.list_operations.retry_policy
options.apply_defaults metadata: @config.metadata,
retry_policy: @config.retry_policy
@operations_stub.call_rpc :list_operations, request, options: options do |response, operation|
wrap_lro_operation = ->(op_response) { ::Gapic::Operation.new op_response, @operations_client }
response = ::Gapic::PagedEnumerable.new @operations_stub, :list_operations, request, response, operation, options, format_resource: wrap_lro_operation
yield response, operation if block_given?
return response
end
rescue ::GRPC::BadStatus => e
raise ::Google::Cloud::Error.from_error(e)
end
##
# Gets the latest state of a long-running operation. Clients can use this
# method to poll the operation result at intervals as recommended by the API
# service.
#
# @overload get_operation(request, options = nil)
# Pass arguments to `get_operation` via a request object, either of type
# {::Google::Longrunning::GetOperationRequest} or an equivalent Hash.
#
# @param request [::Google::Longrunning::GetOperationRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
#
# @overload get_operation(name: nil)
# Pass arguments to `get_operation` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param name [::String]
# The name of the operation resource.
#
# @yield [response, operation] Access the result along with the RPC operation
# @yieldparam response [::Gapic::Operation]
# @yieldparam operation [::GRPC::ActiveCall::Operation]
#
# @return [::Gapic::Operation]
#
# @raise [::Google::Cloud::Error] if the RPC is aborted.
#
def get_operation request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Longrunning::GetOperationRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
metadata = @config.rpcs.get_operation.metadata.to_h
# Set x-goog-api-client and x-goog-user-project headers
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Firestore::Admin::V1::VERSION
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
header_params = {
"name" => request.name
}
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
metadata[:"x-goog-request-params"] ||= request_params_header
options.apply_defaults timeout: @config.rpcs.get_operation.timeout,
metadata: metadata,
retry_policy: @config.rpcs.get_operation.retry_policy
options.apply_defaults metadata: @config.metadata,
retry_policy: @config.retry_policy
@operations_stub.call_rpc :get_operation, request, options: options do |response, operation|
response = ::Gapic::Operation.new response, @operations_client, options: options
yield response, operation if block_given?
return response
end
rescue ::GRPC::BadStatus => e
raise ::Google::Cloud::Error.from_error(e)
end
##
# Deletes a long-running operation. This method indicates that the client is
# no longer interested in the operation result. It does not cancel the
# operation. If the server doesn't support this method, it returns
# `google.rpc.Code.UNIMPLEMENTED`.
#
# @overload delete_operation(request, options = nil)
# Pass arguments to `delete_operation` via a request object, either of type
# {::Google::Longrunning::DeleteOperationRequest} or an equivalent Hash.
#
# @param request [::Google::Longrunning::DeleteOperationRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
#
# @overload delete_operation(name: nil)
# Pass arguments to `delete_operation` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param name [::String]
# The name of the operation resource to be deleted.
#
# @yield [response, operation] Access the result along with the RPC operation
# @yieldparam response [::Google::Protobuf::Empty]
# @yieldparam operation [::GRPC::ActiveCall::Operation]
#
# @return [::Google::Protobuf::Empty]
#
# @raise [::Google::Cloud::Error] if the RPC is aborted.
#
def delete_operation request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Longrunning::DeleteOperationRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
metadata = @config.rpcs.delete_operation.metadata.to_h
# Set x-goog-api-client and x-goog-user-project headers
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Firestore::Admin::V1::VERSION
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
header_params = {
"name" => request.name
}
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
metadata[:"x-goog-request-params"] ||= request_params_header
options.apply_defaults timeout: @config.rpcs.delete_operation.timeout,
metadata: metadata,
retry_policy: @config.rpcs.delete_operation.retry_policy
options.apply_defaults metadata: @config.metadata,
retry_policy: @config.retry_policy
@operations_stub.call_rpc :delete_operation, request, options: options do |response, operation|
yield response, operation if block_given?
return response
end
rescue ::GRPC::BadStatus => e
raise ::Google::Cloud::Error.from_error(e)
end
##
# Starts asynchronous cancellation on a long-running operation. The server
# makes a best effort to cancel the operation, but success is not
# guaranteed. If the server doesn't support this method, it returns
# `google.rpc.Code.UNIMPLEMENTED`. Clients can use
# Operations.GetOperation or
# other methods to check whether the cancellation succeeded or whether the
# operation completed despite cancellation. On successful cancellation,
# the operation is not deleted; instead, it becomes an operation with
# an {::Google::Longrunning::Operation#error Operation.error} value with a {::Google::Rpc::Status#code google.rpc.Status.code} of 1,
# corresponding to `Code.CANCELLED`.
#
# @overload cancel_operation(request, options = nil)
# Pass arguments to `cancel_operation` via a request object, either of type
# {::Google::Longrunning::CancelOperationRequest} or an equivalent Hash.
#
# @param request [::Google::Longrunning::CancelOperationRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
#
# @overload cancel_operation(name: nil)
# Pass arguments to `cancel_operation` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param name [::String]
# The name of the operation resource to be cancelled.
#
# @yield [response, operation] Access the result along with the RPC operation
# @yieldparam response [::Google::Protobuf::Empty]
# @yieldparam operation [::GRPC::ActiveCall::Operation]
#
# @return [::Google::Protobuf::Empty]
#
# @raise [::Google::Cloud::Error] if the RPC is aborted.
#
def cancel_operation request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Longrunning::CancelOperationRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
metadata = @config.rpcs.cancel_operation.metadata.to_h
# Set x-goog-api-client and x-goog-user-project headers
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Firestore::Admin::V1::VERSION
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
header_params = {
"name" => request.name
}
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
metadata[:"x-goog-request-params"] ||= request_params_header
options.apply_defaults timeout: @config.rpcs.cancel_operation.timeout,
metadata: metadata,
retry_policy: @config.rpcs.cancel_operation.retry_policy
options.apply_defaults metadata: @config.metadata,
retry_policy: @config.retry_policy
@operations_stub.call_rpc :cancel_operation, request, options: options do |response, operation|
yield response, operation if block_given?
return response
end
rescue ::GRPC::BadStatus => e
raise ::Google::Cloud::Error.from_error(e)
end
##
# Waits for the specified long-running operation until it is done or reaches
# at most a specified timeout, returning the latest state. If the operation
# is already done, the latest state is immediately returned. If the timeout
# specified is greater than the default HTTP/RPC timeout, the HTTP/RPC
# timeout is used. If the server does not support this method, it returns
# `google.rpc.Code.UNIMPLEMENTED`.
# Note that this method is on a best-effort basis. It may return the latest
# state before the specified timeout (including immediately), meaning even an
# immediate response is no guarantee that the operation is done.
#
# @overload wait_operation(request, options = nil)
# Pass arguments to `wait_operation` via a request object, either of type
# {::Google::Longrunning::WaitOperationRequest} or an equivalent Hash.
#
# @param request [::Google::Longrunning::WaitOperationRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
#
# @overload wait_operation(name: nil, timeout: nil)
# Pass arguments to `wait_operation` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param name [::String]
# The name of the operation resource to wait on.
# @param timeout [::Google::Protobuf::Duration, ::Hash]
# The maximum duration to wait before timing out. If left blank, the wait
# will be at most the time permitted by the underlying HTTP/RPC protocol.
# If RPC context deadline is also specified, the shorter one will be used.
#
# @yield [response, operation] Access the result along with the RPC operation
# @yieldparam response [::Gapic::Operation]
# @yieldparam operation [::GRPC::ActiveCall::Operation]
#
# @return [::Gapic::Operation]
#
# @raise [::Google::Cloud::Error] if the RPC is aborted.
#
def wait_operation request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Longrunning::WaitOperationRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
metadata = @config.rpcs.wait_operation.metadata.to_h
# Set x-goog-api-client and x-goog-user-project headers
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Firestore::Admin::V1::VERSION
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
options.apply_defaults timeout: @config.rpcs.wait_operation.timeout,
metadata: metadata,
retry_policy: @config.rpcs.wait_operation.retry_policy
options.apply_defaults metadata: @config.metadata,
retry_policy: @config.retry_policy
@operations_stub.call_rpc :wait_operation, request, options: options do |response, operation|
response = ::Gapic::Operation.new response, @operations_client, options: options
yield response, operation if block_given?
return response
end
rescue ::GRPC::BadStatus => e
raise ::Google::Cloud::Error.from_error(e)
end
##
# Configuration class for the Operations API.
#
# This class represents the configuration for Operations,
# providing control over timeouts, retry behavior, logging, transport
# parameters, and other low-level controls. Certain parameters can also be
# applied individually to specific RPCs. See
# {::Google::Longrunning::Operations::Client::Configuration::Rpcs}
# for a list of RPCs that can be configured independently.
#
# Configuration can be applied globally to all clients, or to a single client
# on construction.
#
# # Examples
#
# To modify the global config, setting the timeout for list_operations
# to 20 seconds, and all remaining timeouts to 10 seconds:
#
# ::Google::Longrunning::Operations::Client.configure do |config|
# config.timeout = 10.0
# config.rpcs.list_operations.timeout = 20.0
# end
#
# To apply the above configuration only to a new client:
#
# client = ::Google::Longrunning::Operations::Client.new do |config|
# config.timeout = 10.0
# config.rpcs.list_operations.timeout = 20.0
# end
#
# @!attribute [rw] endpoint
# The hostname or hostname:port of the service endpoint.
# Defaults to `"firestore.googleapis.com"`.
# @return [::String]
# @!attribute [rw] credentials
# Credentials to send with calls. You may provide any of the following types:
# * (`String`) The path to a service account key file in JSON format
# * (`Hash`) A service account key as a Hash
# * (`Google::Auth::Credentials`) A googleauth credentials object
# (see the [googleauth docs](https://googleapis.dev/ruby/googleauth/latest/index.html))
# * (`Signet::OAuth2::Client`) A signet oauth2 client object
# (see the [signet docs](https://googleapis.dev/ruby/signet/latest/Signet/OAuth2/Client.html))
# * (`GRPC::Core::Channel`) a gRPC channel with included credentials
# * (`GRPC::Core::ChannelCredentials`) a gRPC credentails object
# * (`nil`) indicating no credentials
# @return [::Object]
# @!attribute [rw] scope
# The OAuth scopes
# @return [::Array<::String>]
# @!attribute [rw] lib_name
# The library name as recorded in instrumentation and logging
# @return [::String]
# @!attribute [rw] lib_version
# The library version as recorded in instrumentation and logging
# @return [::String]
# @!attribute [rw] channel_args
# Extra parameters passed to the gRPC channel. Note: this is ignored if a
# `GRPC::Core::Channel` object is provided as the credential.
# @return [::Hash]
# @!attribute [rw] interceptors
# An array of interceptors that are run before calls are executed.
# @return [::Array<::GRPC::ClientInterceptor>]
# @!attribute [rw] timeout
# The call timeout in seconds.
# @return [::Numeric]
# @!attribute [rw] metadata
# Additional gRPC headers to be sent with the call.
# @return [::Hash{::Symbol=>::String}]
# @!attribute [rw] retry_policy
# The retry policy. The value is a hash with the following keys:
# * `:initial_delay` (*type:* `Numeric`) - The initial delay in seconds.
# * `:max_delay` (*type:* `Numeric`) - The max delay in seconds.
# * `:multiplier` (*type:* `Numeric`) - The incremental backoff multiplier.
# * `:retry_codes` (*type:* `Array<String>`) - The error codes that should
# trigger a retry.
# @return [::Hash]
# @!attribute [rw] quota_project
# A separate project against which to charge quota.
# @return [::String]
#
class Configuration
extend ::Gapic::Config
config_attr :endpoint, "firestore.googleapis.com", ::String
config_attr :credentials, nil do |value|
allowed = [::String, ::Hash, ::Proc, ::Symbol, ::Google::Auth::Credentials, ::Signet::OAuth2::Client, nil]
allowed += [::GRPC::Core::Channel, ::GRPC::Core::ChannelCredentials] if defined? ::GRPC
allowed.any? { |klass| klass === value }
end
config_attr :scope, nil, ::String, ::Array, nil
config_attr :lib_name, nil, ::String, nil
config_attr :lib_version, nil, ::String, nil
config_attr(:channel_args, { "grpc.service_config_disable_resolution" => 1 }, ::Hash, nil)
config_attr :interceptors, nil, ::Array, nil
config_attr :timeout, nil, ::Numeric, nil
config_attr :metadata, nil, ::Hash, nil
config_attr :retry_policy, nil, ::Hash, ::Proc, nil
config_attr :quota_project, nil, ::String, nil
# @private
def initialize parent_config = nil
@parent_config = parent_config unless parent_config.nil?
yield self if block_given?
end
##
# Configurations for individual RPCs
# @return [Rpcs]
#
def rpcs
@rpcs ||= begin
parent_rpcs = nil
parent_rpcs = @parent_config.rpcs if defined?(@parent_config) && @parent_config.respond_to?(:rpcs)
Rpcs.new parent_rpcs
end
end
##
# Configuration RPC class for the Operations API.
#
# Includes fields providing the configuration for each RPC in this service.
# Each configuration object is of type `Gapic::Config::Method` and includes
# the following configuration fields:
#
# * `timeout` (*type:* `Numeric`) - The call timeout in seconds
# * `metadata` (*type:* `Hash{Symbol=>String}`) - Additional gRPC headers
# * `retry_policy (*type:* `Hash`) - The retry policy. The policy fields
# include the following keys:
# * `:initial_delay` (*type:* `Numeric`) - The initial delay in seconds.
# * `:max_delay` (*type:* `Numeric`) - The max delay in seconds.
# * `:multiplier` (*type:* `Numeric`) - The incremental backoff multiplier.
# * `:retry_codes` (*type:* `Array<String>`) - The error codes that should
# trigger a retry.
#
class Rpcs
##
# RPC-specific configuration for `list_operations`
# @return [::Gapic::Config::Method]
#
attr_reader :list_operations
##
# RPC-specific configuration for `get_operation`
# @return [::Gapic::Config::Method]
#
attr_reader :get_operation
##
# RPC-specific configuration for `delete_operation`
# @return [::Gapic::Config::Method]
#
attr_reader :delete_operation
##
# RPC-specific configuration for `cancel_operation`
# @return [::Gapic::Config::Method]
#
attr_reader :cancel_operation
##
# RPC-specific configuration for `wait_operation`
# @return [::Gapic::Config::Method]
#
attr_reader :wait_operation
# @private
def initialize parent_rpcs = nil
list_operations_config = parent_rpcs.list_operations if parent_rpcs.respond_to? :list_operations
@list_operations = ::Gapic::Config::Method.new list_operations_config
get_operation_config = parent_rpcs.get_operation if parent_rpcs.respond_to? :get_operation
@get_operation = ::Gapic::Config::Method.new get_operation_config
delete_operation_config = parent_rpcs.delete_operation if parent_rpcs.respond_to? :delete_operation
@delete_operation = ::Gapic::Config::Method.new delete_operation_config
cancel_operation_config = parent_rpcs.cancel_operation if parent_rpcs.respond_to? :cancel_operation
@cancel_operation = ::Gapic::Config::Method.new cancel_operation_config
wait_operation_config = parent_rpcs.wait_operation if parent_rpcs.respond_to? :wait_operation
@wait_operation = ::Gapic::Config::Method.new wait_operation_config
yield self if block_given?
end
end
end
end
end
end
end
end
end
end
| 53.805471 | 168 | 0.555728 |
d57544b0f773da4108aea433e00bc894fd0d8c23 | 1,471 | require 'rspec'
require 'bosh/template/test'
require_relative '../spec_helper'
require_relative '../collector_config'
require_relative '../common_config'
require_relative '../elasticsearch_config'
describe 'jaeger-collector' do
job_name = 'jaeger-collector'
let(:release) { Bosh::Template::Test::ReleaseDir.new(File.join(File.dirname(__FILE__), '../..')) }
let(:job) { release.job(job_name) }
let(:template) { job.template('config/bpm.yml') }
let(:config) { es_config }
it_should_behave_like 'a jaeger component', job_name
it_should_behave_like 'a collector', job_name
it_should_behave_like 'an elasticsearch connected component', job_name
it 'uses the correct admin port' do
args = get_process_from_bpm(YAML::load(template.render(config)), job_name)['args']
expect(args).to include '--admin-http-port=14270'
end
it 'only allows supported span_storage_types' do
expect{ template.render({'span_storage_type' => 'elasticsearch', 'es' => { 'server-urls' => ['foo']}}) }.not_to raise_error
expect{ template.render({'span_storage_type' => 'memory'}) }.to raise_error('memory is not a supported span_storage_type for jaeger-collector')
expect{ template.render({'span_storage_type' => 'badger'}) }.to raise_error('badger is not a supported span_storage_type for jaeger-collector')
expect{ template.render({'span_storage_type' => 'foo'}) }.to raise_error('foo is not a supported span_storage_type for jaeger-collector')
end
end | 49.033333 | 147 | 0.737593 |
f7a7d375a6c4b9df20f5a7d13f604c765a4761e4 | 131 | class ChangeNameOnWastePickers < ActiveRecord::Migration
def change
change_column_null :waste_pickers, :name, false
end
end
| 21.833333 | 56 | 0.801527 |
7945b37ab28ccc1f02504acbdfaa9c443f5b4d4e | 3,387 | module SpreeSitemap::SpreeDefaults
include Spree::Core::Engine.routes.url_helpers
include Spree::BaseHelper # for meta_data
def default_url_options
{ host: SitemapGenerator::Sitemap.default_host }
end
def add_login(options = {})
add(login_path, options)
end
def add_signup(options = {})
add(signup_path, options)
end
def add_account(options = {})
add(account_path, options)
end
def add_password_reset(options = {})
add(new_spree_user_password_path, options)
end
def add_products(options = {})
active_products = Spree::Product.active.uniq
add(products_path, options.merge(lastmod: active_products.last_updated))
active_products.each do |product|
add_product(product, options)
end
end
def add_product(product, options = {})
opts = options.merge(lastmod: product.updated_at)
if gem_available?('spree_videos') && product.videos.present?
# TODO: add exclusion list configuration option
# https://sites.google.com/site/webmasterhelpforum/en/faq-video-sitemaps#multiple-pages
# don't include all the videos on the page to avoid duplicate title warnings
primary_video = product.videos.first
opts.merge!(video: [video_options(primary_video.youtube_ref, product)])
end
add(product_path(product), opts)
end
def add_pages(options = {})
# TODO: this should be refactored to add_pages & add_page
Spree::Page.active.each do |page|
add(page.path, options.merge(lastmod: page.updated_at))
end if gem_available? 'spree_essential_cms'
Spree::Page.visible.each do |page|
add(page.slug, options.merge(lastmod: page.updated_at))
end if gem_available? 'spree_static_content'
end
def add_taxons(options = {})
Spree::Taxon.roots.each { |taxon| add_taxon(taxon, options) }
end
def add_taxon(taxon, options = {})
add(nested_taxons_path(taxon.permalink), options.merge(lastmod: taxon.products.last_updated)) if taxon.permalink.present?
taxon.children.each { |child| add_taxon(child, options) }
end
def gem_available?(name)
Gem::Specification.find_by_name(name)
rescue Gem::LoadError
false
rescue
Gem.available?(name)
end
def main_app
Rails.application.routes.url_helpers
end
private
##
# Multiple videos of the same ID can exist, but all videos linked in the sitemap should be inique
#
# Required video fields:
# http://www.seomoz.org/blog/video-sitemap-guide-for-vimeo-and-youtube
#
# YouTube thumbnail images:
# http://www.reelseo.com/youtube-thumbnail-image/
#
# NOTE title should match the page title, however the title generation isn't self-contained
# although not a future proof solution, the best (+ easiest) solution is to mimic the title for product pages
# https://github.com/spree/spree/blob/1-3-stable/core/lib/spree/core/controller_helpers/common.rb#L39
# https://github.com/spree/spree/blob/1-3-stable/core/app/controllers/spree/products_controller.rb#L41
#
def video_options(youtube_id, object = false)
({ description: meta_data(object)[:description] } rescue {}).merge(
({ title: [Spree::Config[:site_name], object.name].join(' - ') } rescue {})
).merge(
thumbnail_loc: "http://img.youtube.com/vi/#{youtube_id}/0.jpg",
player_loc: "http://www.youtube.com/v/#{youtube_id}",
autoplay: 'ap=1'
)
end
end
| 31.361111 | 125 | 0.709182 |
1a3a9fc15abdc3416a154bd1b52a3725c8f9d772 | 1,751 | module Carto::Limits
def soft_geocoding_limit?
Carto::AccountType.new.soft_geocoding_limit?(self)
end
alias_method :soft_geocoding_limit, :soft_geocoding_limit?
def hard_geocoding_limit?
!soft_geocoding_limit?
end
alias_method :hard_geocoding_limit, :hard_geocoding_limit?
def soft_here_isolines_limit?
Carto::AccountType.new.soft_here_isolines_limit?(self)
end
alias_method :soft_here_isolines_limit, :soft_here_isolines_limit?
def hard_here_isolines_limit?
!soft_here_isolines_limit?
end
alias_method :hard_here_isolines_limit, :hard_here_isolines_limit?
def soft_obs_snapshot_limit?
Carto::AccountType.new.soft_obs_snapshot_limit?(self)
end
alias_method :soft_obs_snapshot_limit, :soft_obs_snapshot_limit?
def hard_obs_snapshot_limit?
!soft_obs_snapshot_limit?
end
alias_method :hard_obs_snapshot_limit, :hard_obs_snapshot_limit?
def soft_obs_general_limit?
Carto::AccountType.new.soft_obs_general_limit?(self)
end
alias_method :soft_obs_general_limit, :soft_obs_general_limit?
def hard_obs_general_limit?
!soft_obs_general_limit?
end
alias_method :hard_obs_general_limit, :hard_obs_general_limit?
def soft_twitter_datasource_limit?
soft_twitter_datasource_limit == true
end
def hard_twitter_datasource_limit?
!soft_twitter_datasource_limit?
end
alias_method :hard_twitter_datasource_limit, :hard_twitter_datasource_limit?
def soft_mapzen_routing_limit?
Carto::AccountType.new.soft_mapzen_routing_limit?(self)
end
alias_method :soft_mapzen_routing_limit, :soft_mapzen_routing_limit?
def hard_mapzen_routing_limit?
!soft_mapzen_routing_limit?
end
alias_method :hard_mapzen_routing_limit, :hard_mapzen_routing_limit?
end
| 28.704918 | 78 | 0.816105 |
28ee210186a5a8b3563d6cf47ddf29ae341bfc9d | 1,637 | RSpec.describe StackMaster::TemplateCompilers::Cfndsl do
let(:compile_time_parameters) { {'InstanceType' => 't2.medium'} }
before(:all) { described_class.require_dependencies }
let(:template_dir) { 'spec/fixtures/templates/rb/cfndsl/' }
describe '.compile' do
def compile
described_class.compile(template_dir, template, compile_time_parameters)
end
context 'valid cfndsl template' do
let(:template) { 'sample.rb' }
let(:valid_compiled_json_path) { 'spec/fixtures/templates/rb/cfndsl/sample.json' }
it 'produces valid JSON' do
valid_compiled_json = File.read(valid_compiled_json_path)
expect(JSON.parse(compile)).to eq(JSON.parse(valid_compiled_json))
end
end
context 'with compile time parameters' do
let(:template) { 'sample-ctp.rb' }
let(:valid_compiled_json_path) { 'spec/fixtures/templates/rb/cfndsl/sample-ctp.json' }
it 'produces valid JSON' do
valid_compiled_json = File.read(valid_compiled_json_path)
expect(JSON.parse(compile)).to eq(JSON.parse(valid_compiled_json))
end
context 'compiling multiple times' do
let(:compile_time_parameters) { {'InstanceType' => 't2.medium', 'DisableApiTermination' => 'true'} }
let(:template) { 'sample-ctp-repeated.rb' }
it 'does not leak compile time params across invocations' do
expect {
compile_time_parameters.delete("DisableApiTermination")
}.to change { JSON.parse(compile)["Resources"]["MyInstance"]["Properties"]["DisableApiTermination"] }.from('true').to(nil)
end
end
end
end
end
| 36.377778 | 132 | 0.681735 |
1abc7742450c87580951b2bd3cdbf7f83b9231e8 | 2,311 | class Mdk < Formula
desc "GNU MIX development kit"
homepage "https://www.gnu.org/software/mdk/mdk.html"
url "http://ftpmirror.gnu.org/mdk/v1.2.8/mdk-1.2.8.tar.gz"
mirror "https://ftp.gnu.org/gnu/mdk/v1.2.8/mdk-1.2.8.tar.gz"
sha256 "7bff1e10b829c6e1f3c278bfecbe82f0f658753ce80ea58b6f71c05f9490b0db"
revision 1
bottle do
revision 1
sha256 "5b24eaf48d048cd8d482ec4b28dc093e5e75fe4e3430934d8930b64d003d7b5c" => :yosemite
sha256 "73dee962b53150de146a017161004995a341a7239554b46a5f895c31aea80755" => :mavericks
sha256 "76631db9d820f00fa4f807c9316c08b20a54151827f5b40d7b49a5608dbba547" => :mountain_lion
end
depends_on "gtk+"
depends_on "libglade"
depends_on "glib"
depends_on "flex"
depends_on "guile"
depends_on "intltool" => :build
depends_on "pkg-config" => :build
def install
system "./configure", "--disable-debug",
"--disable-dependency-tracking",
"--disable-silent-rules",
"--prefix=#{prefix}"
system "make", "install"
end
test do
(testpath/"hello.mixal").write <<-EOS.undent
* (1)
* hello.mixal: say "hello world" in MIXAL (2)
* (3)
* label ins operand comment (4)
TERM EQU 19 the MIX console device number (5)
ORIG 1000 start address (6)
START OUT MSG(TERM) output data at address MSG (7)
HLT halt execution (8)
MSG ALF "MIXAL" (9)
ALF " HELL" (10)
ALF "O WOR" (11)
ALF "LD" (12)
END START end of the program (13)
EOS
system "#{bin}/mixasm", "hello"
output = `#{bin}/mixvm -r hello`
expected = <<-EOS.undent
Program loaded. Start address: 1000
Running ...
MIXAL HELLO WORLDXXX
... done
EOS
expected = expected.gsub("XXX", " " *53)
assert_equal expected, output
end
end
| 37.274194 | 95 | 0.509303 |
7a98a1735bffd36288388c968a0c250d5ff01e40 | 4,050 | # typed: false
require 'datadog/core'
require 'datadog/tracing/pipeline'
module Datadog
# Datadog APM tracing public API.
#
# The Datadog team ensures that public methods in this module
# only receive backwards compatible changes, and breaking changes
# will only occur in new major versions releases.
# @public_api
module Tracing
class << self
# (see Datadog::Tracing::Tracer#trace)
# @public_api
def trace(name, continue_from: nil, **span_options, &block)
tracer.trace(name, continue_from: continue_from, **span_options, &block)
end
# (see Datadog:::Tracing::Tracer#continue_trace!)
# @public_api
def continue_trace!(digest, &block)
tracer.continue_trace!(digest, &block)
end
# The tracer's internal logger instance.
# All tracing log output is handled by this object.
#
# The logger can be configured through {.configure},
# through {Datadog::Core::Configuration::Settings::DSL::Logger} options.
#
# @!attribute [r] logger
# @public_api
def logger
Datadog.logger
end
# (see Datadog::Tracing::Tracer#active_trace)
# @public_api
def active_trace
current_tracer = tracer
return unless current_tracer
current_tracer.active_trace
end
# (see Datadog:::Tracing::Tracer#active_span)
# @public_api
def active_span
current_tracer = tracer
return unless current_tracer
current_tracer.active_span
end
# (see Datadog:::Tracing::TraceSegment#keep!)
# If no trace is active, no action is taken.
# @public_api
def keep!
trace = active_trace
active_trace.keep! if trace
end
# (see Datadog:::Tracing::TraceSegment#reject!)
# If no trace is active, no action is taken.
# @public_api
def reject!
trace = active_trace
active_trace.reject! if trace
end
# (see Datadog:::Tracing::Tracer#active_correlation)
# @public_api
def correlation
current_tracer = tracer
return unless current_tracer
current_tracer.active_correlation
end
# Textual representation of {.correlation}, which can be
# added to individual log lines in order to correlate them with the active
# trace.
#
# Example:
#
# ```
# MyLogger.log("#{Datadog::Tracing.log_correlation}] My message")
# # dd.env=prod dd.service=auth dd.version=13.8 dd.trace_id=5458478252992251 dd.span_id=7117552347370098 My message
# ```
#
# @return [String] correlation information
# @public_api
def log_correlation
correlation.to_log_format
end
# Gracefully shuts down the tracer.
#
# The public tracing API will still respond to method calls as usual
# but might not internally perform the expected internal work after shutdown.
#
# This avoids errors being raised across the host application
# during shutdown while allowing for the graceful decommission of resources.
#
# {.shutdown!} cannot be reversed.
# @public_api
def shutdown!
current_tracer = tracer
return unless current_tracer
current_tracer.shutdown!
end
# (see Datadog:::Tracing::Pipeline.before_flush)
def before_flush(*processors, &processor_block)
Pipeline.before_flush(*processors, &processor_block)
end
# Is the tracer collecting telemetry data in this process?
# @return [Boolean] `true` if the tracer is collecting data in this process, otherwise `false`.
def enabled?
current_tracer = tracer
return false unless current_tracer
current_tracer.enabled
end
private
# DEV: components hosts both tracing and profiling inner objects today
def components
Datadog.send(:components)
end
def tracer
components.tracer
end
end
end
end
| 28.321678 | 121 | 0.64321 |
081cb2d5632591be2fe2e0f928bcd1c7df620444 | 539 | require "rails_helper"
describe LieutenantAssignmentCollection do
subject { described_class.new(params) }
let(:form_answer) { create(:form_answer) }
let!(:ceremonial_county) { create(:ceremonial_county) }
context "Single assignment" do
let(:params) do
{
form_answer_ids: form_answer.id.to_s,
ceremonial_county_id: ceremonial_county.id.to_s
}
end
it "assigns the assessor" do
subject.save
expect(form_answer.reload.ceremonial_county).to eq(ceremonial_county)
end
end
end
| 24.5 | 75 | 0.706865 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.