hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
f735a0dbf783c583629488fa54c4563c2026a359 | 135 | module Ecm
module UserArea
module Backend
class ApplicationController < ActionController::Base
end
end
end
end
| 15 | 58 | 0.696296 |
e9f6852d8bc11cc43414f7266dafdc8d307865fa | 307 | class RemoveStartupLinkFieldsFromUser < ActiveRecord::Migration
def up
remove_columns :users, :startup_link_verifier_id, :startup_verifier_token
end
def down
add_column :users, :startup_verifier_token, :string
add_column :users, :startup_link_verifier_id, :integer, index: true
end
end
| 27.909091 | 77 | 0.785016 |
1d591f36cf5ebd886789ecdbbee2462d0ad8744a | 3,005 | class Cockroach < Formula
desc "Distributed SQL database"
homepage "https://www.cockroachlabs.com"
url "https://binaries.cockroachdb.com/cockroach-v2.0.4.src.tgz"
version "2.0.4"
sha256 "3636017029fccf48b23ee1c45a3412adc36803f53df254035e6e2f82af45fb50"
head "https://github.com/cockroachdb/cockroach.git"
bottle do
cellar :any_skip_relocation
sha256 "c80ad073bd32292b6444204d53d1b63210834201de5d2b87866bd1de82443557" => :high_sierra
sha256 "e9a72d84326dc30475cc1d79423e55c425f82fd9bc1fb2f02b49cee7241c7ed1" => :sierra
sha256 "ca3a5c559c14a895fdb519d1049fd13ffba2a20eb2ed34a3f57abe8ad94a7320" => :el_capitan
end
depends_on "autoconf" => :build
depends_on "cmake" => :build
depends_on "go" => :build
depends_on "xz" => :build
def install
system "make", "install", "prefix=#{prefix}"
end
def caveats; <<~EOS
For local development only, this formula ships a launchd configuration to
start a single-node cluster that stores its data under:
#{var}/cockroach/
Instead of the default port of 8080, the node serves its admin UI at:
#{Formatter.url("http://localhost:26256")}
Do NOT use this cluster to store data you care about; it runs in insecure
mode and may expose data publicly in e.g. a DNS rebinding attack. To run
CockroachDB securely, please see:
#{Formatter.url("https://www.cockroachlabs.com/docs/secure-a-cluster.html")}
EOS
end
plist_options :manual => "cockroach start --insecure"
def plist; <<~EOS
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>#{plist_name}</string>
<key>ProgramArguments</key>
<array>
<string>#{opt_bin}/cockroach</string>
<string>start</string>
<string>--store=#{var}/cockroach/</string>
<string>--http-port=26256</string>
<string>--insecure</string>
<string>--host=localhost</string>
</array>
<key>WorkingDirectory</key>
<string>#{var}</string>
<key>RunAtLoad</key>
<true/>
<key>KeepAlive</key>
<true/>
</dict>
</plist>
EOS
end
test do
begin
# Redirect stdout and stderr to a file, or else `brew test --verbose`
# will hang forever as it waits for stdout and stderr to close.
system "#{bin}/cockroach start --insecure --background &> start.out"
pipe_output("#{bin}/cockroach sql --insecure", <<~EOS)
CREATE DATABASE bank;
CREATE TABLE bank.accounts (id INT PRIMARY KEY, balance DECIMAL);
INSERT INTO bank.accounts VALUES (1, 1000.50);
EOS
output = pipe_output("#{bin}/cockroach sql --insecure --format=csv",
"SELECT * FROM bank.accounts;")
assert_equal <<~EOS, output
id,balance
1,1000.50
EOS
ensure
system "#{bin}/cockroach", "quit", "--insecure"
end
end
end
| 33.764045 | 106 | 0.661564 |
790a94c14e6c07c3cc4b1629bf972fc59745e985 | 8,708 | =begin
#SendinBlue API
#SendinBlue provide a RESTFul API that can be used with any languages. With this API, you will be able to : - Manage your campaigns and get the statistics - Manage your contacts - Send transactional Emails and SMS - and much more... You can download our wrappers at https://github.com/orgs/sendinblue **Possible responses** | Code | Message | | :-------------: | ------------- | | 200 | OK. Successful Request | | 201 | OK. Successful Creation | | 202 | OK. Request accepted | | 204 | OK. Successful Update/Deletion | | 400 | Error. Bad Request | | 401 | Error. Authentication Needed | | 402 | Error. Not enough credit, plan upgrade needed | | 403 | Error. Permission denied | | 404 | Error. Object does not exist | | 405 | Error. Method not allowed | | 406 | Error. Not Acceptable |
OpenAPI spec version: 3.0.0
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 2.4.18
=end
require 'date'
module SibApiV3Sdk
class GetTransacSmsReportReports
# Date for which statistics are retrieved
attr_accessor :date
# Number of requests for the date
attr_accessor :requests
# Number of delivered SMS for the date
attr_accessor :delivered
# Number of hardbounces for the date
attr_accessor :hard_bounces
# Number of softbounces for the date
attr_accessor :soft_bounces
# Number of blocked contact for the date
attr_accessor :blocked
# Number of unsubscription for the date
attr_accessor :unsubscribed
# Number of answered SMS for the date
attr_accessor :replied
# Number of accepted for the date
attr_accessor :accepted
# Number of rejected for the date
attr_accessor :rejected
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'date' => :'date',
:'requests' => :'requests',
:'delivered' => :'delivered',
:'hard_bounces' => :'hardBounces',
:'soft_bounces' => :'softBounces',
:'blocked' => :'blocked',
:'unsubscribed' => :'unsubscribed',
:'replied' => :'replied',
:'accepted' => :'accepted',
:'rejected' => :'rejected'
}
end
# Attribute type mapping.
def self.swagger_types
{
:'date' => :'Date',
:'requests' => :'Integer',
:'delivered' => :'Integer',
:'hard_bounces' => :'Integer',
:'soft_bounces' => :'Integer',
:'blocked' => :'Integer',
:'unsubscribed' => :'Integer',
:'replied' => :'Integer',
:'accepted' => :'Integer',
:'rejected' => :'Integer'
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
if attributes.has_key?(:'date')
self.date = attributes[:'date']
end
if attributes.has_key?(:'requests')
self.requests = attributes[:'requests']
end
if attributes.has_key?(:'delivered')
self.delivered = attributes[:'delivered']
end
if attributes.has_key?(:'hardBounces')
self.hard_bounces = attributes[:'hardBounces']
end
if attributes.has_key?(:'softBounces')
self.soft_bounces = attributes[:'softBounces']
end
if attributes.has_key?(:'blocked')
self.blocked = attributes[:'blocked']
end
if attributes.has_key?(:'unsubscribed')
self.unsubscribed = attributes[:'unsubscribed']
end
if attributes.has_key?(:'replied')
self.replied = attributes[:'replied']
end
if attributes.has_key?(:'accepted')
self.accepted = attributes[:'accepted']
end
if attributes.has_key?(:'rejected')
self.rejected = attributes[:'rejected']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
date == o.date &&
requests == o.requests &&
delivered == o.delivered &&
hard_bounces == o.hard_bounces &&
soft_bounces == o.soft_bounces &&
blocked == o.blocked &&
unsubscribed == o.unsubscribed &&
replied == o.replied &&
accepted == o.accepted &&
rejected == o.rejected
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[date, requests, delivered, hard_bounces, soft_bounces, blocked, unsubscribed, replied, accepted, rejected].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end # or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BOOLEAN
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
temp_model = SibApiV3Sdk.const_get(type).new
temp_model.build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 31.550725 | 839 | 0.610358 |
f86939afdcd0951b47f5505afc2844da180d6fe7 | 943 | class Mosml < Formula
desc "Moscow ML"
homepage "http://mosml.org"
url "https://github.com/kfl/mosml/archive/ver-2.10.1.tar.gz"
sha256 "fed5393668b88d69475b070999b1fd34e902591345de7f09b236824b92e4a78f"
bottle do
sha256 "7a888abd233069f837cf9aba4021baa71387a4b720bc53323d40a963433b566a" => :high_sierra
sha256 "297c05c55f2784f3b934a2fdb3ec2f91d8b11a06453c8649c1f6562cefdc089e" => :sierra
sha256 "5dae62ca2034ba70844d684111cec58561895eac39db3177d439747512206002" => :el_capitan
sha256 "3a0289ba1b1a56cf3c2a598ccbee9b1739c7c35628a173dd00bd2f20fead6703" => :yosemite
sha256 "97ba76cf36e165dc798bdae33fc06c7c5954b1293686f43d2781b3130e75a119" => :mavericks
end
depends_on "gmp"
def install
cd "src" do
system "make", "PREFIX=#{prefix}", "CC=#{ENV.cc}", "world"
system "make", "PREFIX=#{prefix}", "CC=#{ENV.cc}", "install"
end
end
test do
system "#{bin}/mosml", "-P full"
end
end
| 33.678571 | 93 | 0.747614 |
f838a542cff4cd593222650f19207711b8719c0b | 6,121 | #
# shell.rb -
# $Release Version: 0.7 $
# $Revision: 1.9 $
# by Keiju ISHITSUKA([email protected])
#
# --
#
#
#
require "e2mmap"
require "thread" unless defined?(Mutex)
require "forwardable"
require "shell/error"
require "shell/command-processor"
require "shell/process-controller"
class Shell
@RCS_ID='-$Id: shell.rb,v 1.9 2002/03/04 12:01:10 keiju Exp keiju $-'
include Error
extend Exception2MessageMapper
# @cascade = true
# debug: true -> normal debug
# debug: 1 -> eval definition debug
# debug: 2 -> detail inspect debug
@debug = false
@verbose = true
@debug_display_process_id = false
@debug_display_thread_id = true
@debug_output_mutex = Mutex.new
class << Shell
extend Forwardable
attr_accessor :cascade, :debug, :verbose
# alias cascade? cascade
alias debug? debug
alias verbose? verbose
@verbose = true
def debug=(val)
@debug = val
@verbose = val if val
end
def cd(path)
new(path)
end
def default_system_path
if @default_system_path
@default_system_path
else
ENV["PATH"].split(":")
end
end
def default_system_path=(path)
@default_system_path = path
end
def default_record_separator
if @default_record_separator
@default_record_separator
else
$/
end
end
def default_record_separator=(rs)
@default_record_separator = rs
end
# os resource mutex
mutex_methods = ["unlock", "lock", "locked?", "synchronize", "try_lock", "exclusive_unlock"]
for m in mutex_methods
def_delegator("@debug_output_mutex", m, "debug_output_"+m.to_s)
end
end
def initialize(pwd = Dir.pwd, umask = nil)
@cwd = File.expand_path(pwd)
@dir_stack = []
@umask = umask
@system_path = Shell.default_system_path
@record_separator = Shell.default_record_separator
@command_processor = CommandProcessor.new(self)
@process_controller = ProcessController.new(self)
@verbose = Shell.verbose
@debug = Shell.debug
end
attr_reader :system_path
def system_path=(path)
@system_path = path
rehash
end
attr_accessor :umask, :record_separator
attr_accessor :verbose, :debug
def debug=(val)
@debug = val
@verbose = val if val
end
alias verbose? verbose
alias debug? debug
attr_reader :command_processor
attr_reader :process_controller
def expand_path(path)
File.expand_path(path, @cwd)
end
# Most Shell commands are defined via CommandProcessor
#
# Dir related methods
#
# Shell#cwd/dir/getwd/pwd
# Shell#chdir/cd
# Shell#pushdir/pushd
# Shell#popdir/popd
# Shell#mkdir
# Shell#rmdir
attr_reader :cwd
alias dir cwd
alias getwd cwd
alias pwd cwd
attr_reader :dir_stack
alias dirs dir_stack
# If called as iterator, it restores the current directory when the
# block ends.
def chdir(path = nil, verbose = @verbose)
check_point
if iterator?
notify("chdir(with block) #{path}") if verbose
cwd_old = @cwd
begin
chdir(path, nil)
yield
ensure
chdir(cwd_old, nil)
end
else
notify("chdir #{path}") if verbose
path = "~" unless path
@cwd = expand_path(path)
notify "current dir: #{@cwd}"
rehash
Void.new(self)
end
end
alias cd chdir
def pushdir(path = nil, verbose = @verbose)
check_point
if iterator?
notify("pushdir(with block) #{path}") if verbose
pushdir(path, nil)
begin
yield
ensure
popdir
end
elsif path
notify("pushdir #{path}") if verbose
@dir_stack.push @cwd
chdir(path, nil)
notify "dir stack: [#{@dir_stack.join ', '}]"
self
else
notify("pushdir") if verbose
if pop = @dir_stack.pop
@dir_stack.push @cwd
chdir pop
notify "dir stack: [#{@dir_stack.join ', '}]"
self
else
Shell.Fail DirStackEmpty
end
end
Void.new(self)
end
alias pushd pushdir
def popdir
check_point
notify("popdir")
if pop = @dir_stack.pop
chdir pop
notify "dir stack: [#{@dir_stack.join ', '}]"
self
else
Shell.Fail DirStackEmpty
end
Void.new(self)
end
alias popd popdir
#
# process management
#
def jobs
@process_controller.jobs
end
def kill(sig, command)
@process_controller.kill_job(sig, command)
end
#
# command definitions
#
def Shell.def_system_command(command, path = command)
CommandProcessor.def_system_command(command, path)
end
def Shell.undef_system_command(command)
CommandProcessor.undef_system_command(command)
end
def Shell.alias_command(ali, command, *opts, &block)
CommandProcessor.alias_command(ali, command, *opts, &block)
end
def Shell.unalias_command(ali)
CommandProcessor.unalias_command(ali)
end
def Shell.install_system_commands(pre = "sys_")
CommandProcessor.install_system_commands(pre)
end
#
def inspect
if debug.kind_of?(Integer) && debug > 2
super
else
to_s
end
end
def self.notify(*opts, &block)
Shell::debug_output_synchronize do
if opts[-1].kind_of?(String)
yorn = verbose?
else
yorn = opts.pop
end
return unless yorn
if @debug_display_thread_id
if @debug_display_process_id
prefix = "shell(##{Process.pid}:#{Thread.current.to_s.sub("Thread", "Th")}): "
else
prefix = "shell(#{Thread.current.to_s.sub("Thread", "Th")}): "
end
else
prefix = "shell: "
end
_head = true
STDERR.print opts.collect{|mes|
mes = mes.dup
yield mes if iterator?
if _head
_head = false
# "shell" " + mes
prefix + mes
else
" "* prefix.size + mes
end
}.join("\n")+"\n"
end
end
CommandProcessor.initialize
CommandProcessor.run_config
end
| 20.335548 | 97 | 0.618526 |
18750977e5c5f32393ac6a758811760589708b09 | 1,924 | module UserDocumentsController
extend ActiveSupport::Concern
included do
before_action :signed_in_user, :correct_user, except: [:show]
before_action :correct_user_or_shared_page, only: [:show]
before_action :assign_user
before_action :assign_user_document, except: [:new, :create]
end
def new
self.document = user_documents.build
end
def create
self.document = user_documents.build(document_params)
if self.document.save
redirect_to self.document.path
else
flash.now[:error] = self.document.errors.full_messages.first
render 'new'
end
end
def show
end
def edit
end
def update
if self.document.update_attributes(document_params)
redirect_to self.document.path
else
flash.now[:error] = self.document.errors.full_messages.first
render 'edit'
end
end
def destroy
self.document.destroy if self.document
respond_to do |format|
format.html { redirect_to root_url }
format.js { render nothing: true }
end
end
protected
# Getter for @<document type>.
def document
self.instance_variable_get("@#{controller_name.singularize}")
end
# Setter for @<document type>.
def document=(new_document)
self.instance_variable_set("@#{controller_name.singularize}", new_document)
end
# Requires the class to have method @<document type>_params.
def document_params
self.send("#{controller_name.singularize}_params")
end
# Reurns all the documents of the type that belongs to @user.
def user_documents
@user.send(controller_name)
end
# Before actions
# Finds the document (that belongs to @user)
# by slug in params and assigns it to @<document type>.
def assign_user_document
self.document = user_documents.find_by(slug: params[:slug])
end
end
| 23.753086 | 81 | 0.677235 |
872f769e37338c93b77a4c42e67544265a9bab33 | 324 | # frozen_string_literal: true
class AddLatitudeAndLongitudeToAddressesCities < ActiveRecord::Migration[6.0]
def change
add_column :addresses_cities, :latitude, :float
add_index :addresses_cities, :latitude
add_column :addresses_cities, :longitude, :float
add_index :addresses_cities, :longitude
end
end
| 29.454545 | 77 | 0.783951 |
d51ee8b0b375dd9ca4740b14f27990d3e56f9f4d | 1,501 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Storage::Mgmt::V2018_07_01
module Models
#
# The response from the List Usages operation.
#
class UsageListResult
include MsRestAzure
# @return [Array<Usage>] Gets or sets the list of Storage Resource
# Usages.
attr_accessor :value
#
# Mapper for UsageListResult class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'UsageListResult',
type: {
name: 'Composite',
class_name: 'UsageListResult',
model_properties: {
value: {
client_side_validation: true,
required: false,
serialized_name: 'value',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'UsageElementType',
type: {
name: 'Composite',
class_name: 'Usage'
}
}
}
}
}
}
}
end
end
end
end
| 26.333333 | 72 | 0.49567 |
b935db137d0353ea812342a62e040e1f1c736a85 | 2,183 | module Fog
module Compute
class AWS
class Real
require 'fog/aws/parsers/compute/detach_volume'
# Detach an Amazon EBS volume from a running instance
#
# ==== Parameters
# * volume_id<~String> - Id of amazon EBS volume to associate with instance
# * options<~Hash>:
# * 'Device'<~String> - Specifies how the device is exposed to the instance (e.g. "/dev/sdh")
# * 'Force'<~Boolean> - If true forces detach, can cause data loss/corruption
# * 'InstanceId'<~String> - Id of instance to associate volume with
#
# ==== Returns
# * response<~Excon::Response>:
# * body<~Hash>:
# * 'attachTime'<~Time> - Time of attachment was initiated at
# * 'device'<~String> - Device as it is exposed to the instance
# * 'instanceId'<~String> - Id of instance for volume
# * 'requestId'<~String> - Id of request
# * 'status'<~String> - Status of volume
# * 'volumeId'<~String> - Reference to volume
#
# {Amazon API Reference}[http://docs.amazonwebservices.com/AWSEC2/latest/APIReference/ApiReference-query-DetachVolume.html]
def detach_volume(volume_id, options = {})
request({
'Action' => 'DetachVolume',
'VolumeId' => volume_id,
:idempotent => true,
:parser => Fog::Parsers::Compute::AWS::DetachVolume.new
}.merge!(options))
end
end
class Mock
def detach_volume(volume_id, options = {})
response = Excon::Response.new
response.status = 200
if (volume = self.data[:volumes][volume_id]) && !volume['attachmentSet'].empty?
data = volume['attachmentSet'].pop
volume['status'] = 'available'
response.status = 200
response.body = {
'requestId' => Fog::AWS::Mock.request_id
}.merge!(data)
response
else
raise Fog::Compute::AWS::NotFound.new("The volume '#{volume_id}' does not exist.")
end
end
end
end
end
end
| 35.786885 | 131 | 0.551077 |
7909885f647ca5e7c55635f506951bfaaa113dc0 | 7,480 | module YARD::CodeObjects
# A "namespace" is any object that can store other objects within itself.
# The two main Ruby objects that can act as namespaces are modules
# ({ModuleObject}) and classes ({ClassObject}).
class NamespaceObject < Base
attr_writer :constants, :cvars, :mixins, :child, :meths
attr_writer :class_attributes, :instance_attributes
attr_writer :included_constants, :included_meths
# @return [Array<String>] a list of ordered group names inside the namespace
# @since 0.6.0
attr_accessor :groups
# The list of objects defined in this namespace
# @return [Array<Base>] a list of objects
attr_reader :children
# A hash containing two keys, class and instance, each containing
# the attribute name with a { :read, :write } hash for the read and
# write objects respectively.
#
# @example The attributes of an object
# >> Registry.at('YARD::Docstring').attributes
# => {
# :class => { },
# :instance => {
# :ref_tags => {
# :read => #<yardoc method YARD::Docstring#ref_tags>,
# :write => nil
# },
# :object => {
# :read => #<yardoc method YARD::Docstring#object>,
# :write => #<yardoc method YARD::Docstring#object=>
# },
# ...
# }
# }
# @return [Hash] a list of methods
attr_reader :attributes
# A hash containing two keys, :class and :instance, each containing
# a hash of objects and their alias names.
# @return [Hash] a list of methods
attr_reader :aliases
# Class mixins
# @return [Array<ModuleObject>] a list of mixins
attr_reader :class_mixins
# Instance mixins
# @return [Array<ModuleObject>] a list of mixins
attr_reader :instance_mixins
# Creates a new namespace object inside +namespace+ with +name+.
# @see Base#initialize
def initialize(namespace, name, *args, &block)
@children = CodeObjectList.new(self)
@class_mixins = CodeObjectList.new(self)
@instance_mixins = CodeObjectList.new(self)
@attributes = SymbolHash[:class => SymbolHash.new, :instance => SymbolHash.new]
@aliases = {}
@groups = []
super
end
# Only the class attributes
# @return [Hash] a list of method names and their read/write objects
# @see #attributes
def class_attributes
attributes[:class]
end
# Only the instance attributes
# @return [Hash] a list of method names and their read/write objects
# @see #attributes
def instance_attributes
attributes[:instance]
end
# Looks for a child that matches the attributes specified by +opts+.
#
# @example Finds a child by name and scope
# namespace.child(:name => :to_s, :scope => :instance)
# # => #<yardoc method MyClass#to_s>
# @return [Base, nil] the first matched child object, or nil
def child(opts = {})
if !opts.is_a?(Hash)
children.find {|o| o.name == opts.to_sym }
else
opts = SymbolHash[opts]
children.find do |obj|
opts.each do |meth, value|
break false if !(value.is_a?(Array) ? value.include?(obj[meth]) : obj[meth] == value)
end
end
end
end
# Returns all methods that match the attributes specified by +opts+. If
# no options are provided, returns all methods.
#
# @example Finds all private and protected class methods
# namespace.meths(:visibility => [:private, :protected], :scope => :class)
# # => [#<yardoc method MyClass.privmeth>, #<yardoc method MyClass.protmeth>]
# @option opts [Array<Symbol>, Symbol] :visibility ([:public, :private,
# :protected]) the visibility of the methods to list. Can be an array or
# single value.
# @option opts [Array<Symbol>, Symbol] :scope ([:class, :instance]) the
# scope of the methods to list. Can be an array or single value.
# @option opts [Boolean] :included (true) whether to include mixed in
# methods in the list.
# @return [Array<MethodObject>] a list of method objects
def meths(opts = {})
opts = SymbolHash[
:visibility => [:public, :private, :protected],
:scope => [:class, :instance],
:included => true
].update(opts)
opts[:visibility] = [opts[:visibility]].flatten
opts[:scope] = [opts[:scope]].flatten
ourmeths = children.select do |o|
o.is_a?(MethodObject) &&
opts[:visibility].include?(o.visibility) &&
opts[:scope].include?(o.scope)
end
ourmeths + (opts[:included] ? included_meths(opts) : [])
end
# Returns methods included from any mixins that match the attributes
# specified by +opts+. If no options are specified, returns all included
# methods.
#
# @option opts [Array<Symbol>, Symbol] :visibility ([:public, :private,
# :protected]) the visibility of the methods to list. Can be an array or
# single value.
# @option opts [Array<Symbol>, Symbol] :scope ([:class, :instance]) the
# scope of the methods to list. Can be an array or single value.
# @option opts [Boolean] :included (true) whether to include mixed in
# methods in the list.
# @see #meths
def included_meths(opts = {})
opts = SymbolHash[:scope => [:instance, :class]].update(opts)
[opts[:scope]].flatten.map do |scope|
mixins(scope).inject([]) do |list, mixin|
next list if mixin.is_a?(Proxy)
arr = mixin.meths(opts.merge(:scope => :instance)).reject do |o|
next false if opts[:all]
child(:name => o.name, :scope => scope) || list.find {|o2| o2.name == o.name }
end
arr.map! {|o| ExtendedMethodObject.new(o) } if scope == :class
list + arr
end
end.flatten
end
# Returns all constants in the namespace
#
# @option opts [Boolean] :included (true) whether or not to include
# mixed in constants in list
# @return [Array<ConstantObject>] a list of constant objects
def constants(opts = {})
opts = SymbolHash[:included => true].update(opts)
consts = children.select {|o| o.is_a? ConstantObject }
consts + (opts[:included] ? included_constants : [])
end
# Returns constants included from any mixins
# @return [Array<ConstantObject>] a list of constant objects
def included_constants
instance_mixins.inject([]) do |list, mixin|
if mixin.respond_to? :constants
list += mixin.constants.reject do |o|
child(:name => o.name) || list.find {|o2| o2.name == o.name }
end
else
list
end
end
end
# Returns class variables defined in this namespace.
# @return [Array<ClassVariableObject>] a list of class variable objects
def cvars
children.select {|o| o.is_a? ClassVariableObject }
end
# Returns for specific scopes. If no scopes are provided, returns all mixins.
# @param [Array<Symbol>] scopes a list of scopes (:class, :instance) to
# return mixins for. If this is empty, all scopes will be returned.
# @return [Array<ModuleObject>] a list of mixins
def mixins(*scopes)
return class_mixins if scopes == [:class]
return instance_mixins if scopes == [:instance]
class_mixins | instance_mixins
end
end
end
| 37.21393 | 97 | 0.617914 |
7adc29bde6f361c4c7b2461e7ef38e419d081e1b | 297 | require 'pry'
require 'active_record'
#uncomment to view SQl command
# ActiveRecord::Base.logger = Logger.new(STDERR)
require './db_config'
require './models/user'
require './models/book'
require './models/comment'
require './models/vote'
require './models/rank'
require './main.rb'
binding.pry | 19.8 | 48 | 0.737374 |
bfd72e32a3acbfcda76c93a79ce1ab3dc17ff3c5 | 459 | #!/usr/bin/env ruby
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
#
# Simple script to read a JSON document and export it to Facebook Instant
# Article.
#
# Usage:
#
# ./bin/article_json_export_facebook.rb < my_document.json
#
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
require_relative '../lib/article_json'
puts ArticleJSON::Article.from_json(ARGF.read).to_facebook_instant_article
| 28.6875 | 79 | 0.498911 |
26bff219ea9e38ac2b2bac3b380a3ef6c0d90a7d | 3,668 | # Constructs basic login/submission post requests (assumes protect_from_forgery
# is turned off)
if __FILE__ == $0
require 'net/http' # need net/http for constructing requests
require 'uri' # URI parsing lib
require 'fileutils'
require 'digest/md5' # required for boundary MD5 hashing
def get_file_header(file_name, boundary)
head = "--#{boundary}" + CRLF
head += "Content-Disposition: form-data; name=\"new_files[]\"; filename=\"#{file_name}\"" + CRLF
head += "Content-Type: application/x-ruby" + CRLF + CRLF
return head
end
if ARGV[0].nil?
puts "usage: construct_requests.rb APP_URI [students_logins_file]"
exit 1
end
# Constants:
# - URI's to post to
# - File paths, etc
APP_URI = ARGV[0]
LOGIN_URI = APP_URI + "/"
SUBMISSION_URI = APP_URI + "/main/submissions/file_manager/1"
HOME_REQUESTS = "requests"
POST_DIR = "posts"
LOGIN_DIR = "logins"
SUBMISSION_DIR = "submissions"
BOUNDARY_DIR = "boundaries"
COOKIES_DIR = "cookies"
STUDENTS_LIST_FILE = ARGV[1] || "student_logins.txt"
SUBMISSION_RES_DIR = File.join(File.dirname(__FILE__), "submission_files")
CRLF = "\r\n" # convenience constant
# cleanup from previous runs
if File.exist?(HOME_REQUESTS)
FileUtils.rm_r(HOME_REQUESTS)
end
# creat directory structure
FileUtils.mkdir_p(File.join(HOME_REQUESTS, POST_DIR, LOGIN_DIR))
FileUtils.mkdir_p(File.join(HOME_REQUESTS, POST_DIR, SUBMISSION_DIR))
FileUtils.mkdir_p(File.join(HOME_REQUESTS, COOKIES_DIR))
FileUtils.mkdir_p(File.join(HOME_REQUESTS, BOUNDARY_DIR))
students = File.new(File.join(File.dirname(__FILE__), STUDENTS_LIST_FILE)).readlines
students.each do |login|
# construct some login requests
login = login.strip
login_url = URI.parse(LOGIN_URI)
req = Net::HTTP::Get.new(login_url.path)
res = Net::HTTP.start(login_url.host, login_url.port) {|http|
http.request(req)
}
# get cookie
cookie = res.response['set-cookie']
cookie = cookie[0..(cookie.index(";")-1)]
post_request = "user_login=#{login}&user_password=somepassword"
post_request += "&commit=Log+in" + CRLF
# construct post-body-file for login
File.open(File.join(HOME_REQUESTS, POST_DIR, LOGIN_DIR, login), "w") { |file|
file.write(post_request)
}
# write cookie file for login
File.open(File.join(HOME_REQUESTS, COOKIES_DIR, login), "w") { |file|
file.write(cookie + "\n")
}
# submissions post requests
boundary = Digest::MD5.hexdigest(Time.now.to_f.to_s)
boundary = "---------------------------" + boundary
#boundary = Time.now.to_f.to_s.gsub(/\./, "")
submission_post_request_body = ""
# construct some submission request bodies
Dir.glob(File.join(SUBMISSION_RES_DIR, "*")).each { |file|
file_content = File.open(file).read
submission_post_request_body += get_file_header(URI.encode(File.basename(file)), boundary)
submission_post_request_body += file_content + CRLF + CRLF
}
submission_post_request_body += "--#{boundary}" + CRLF
submission_post_request_body += "Content-Disposition: form-data; name=\"commit\"" + CRLF + CRLF
submission_post_request_body += "Submit" + CRLF
submission_post_request_body += "--#{boundary}--" + CRLF# epilogue
# write submission post request to file
File.open(File.join(HOME_REQUESTS, POST_DIR, SUBMISSION_DIR, login), "w") { |file|
file.write(submission_post_request_body)
}
# write cookie file for login
File.open(File.join(HOME_REQUESTS, BOUNDARY_DIR, login), "w") { |file|
file.write(boundary + "\n")
}
end
end
| 35.960784 | 100 | 0.678844 |
6a94466193af88fbb191a80182bf5130137ed4bc | 126 | Then /^the notices have loaded$/ do
using_wait_time(10) do
expect(page).to have_no_content("Loading notices")
end
end
| 21 | 54 | 0.730159 |
5d6b454c07ede7f6224d2781ff4622c313d83c7e | 2,187 | # This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 2019_07_23_222409) do
create_table "machines", force: :cascade do |t|
t.string "name"
t.integer "repetitions"
t.integer "sets"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "routines", force: :cascade do |t|
t.integer "user_id"
t.string "name"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["user_id"], name: "index_routines_on_user_id"
end
create_table "users", force: :cascade do |t|
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.string "email", default: "", null: false
t.string "encrypted_password", default: "", null: false
t.string "name", default: "", null: false
t.string "reset_password_token"
t.datetime "reset_password_sent_at"
t.datetime "remember_created_at"
t.string "provider"
t.string "uid"
t.index ["email"], name: "index_users_on_email", unique: true
t.index ["reset_password_token"], name: "index_users_on_reset_password_token", unique: true
end
create_table "weights", force: :cascade do |t|
t.integer "routine_id"
t.integer "machine_id"
t.integer "weight"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["machine_id"], name: "index_weights_on_machine_id"
t.index ["routine_id"], name: "index_weights_on_routine_id"
end
end
| 38.368421 | 95 | 0.721994 |
62319c9f444f698837e4c42f16f1dcc1e1a7dda5 | 290 | #!/usr/bin/env ruby
srand(Time.now.to_i + Process.pid + 93) # seed the rand
def rgen()
r = rand(2887453441..2887485696)
if r < 2887483392 || r > 2887485695 && r < 2887458816 || r > 2887459071
return r
else
rgen()
end
end
ip = rgen()
puts [ip].pack('N').unpack('CCCC').join('.')
| 17.058824 | 72 | 0.627586 |
e2b94f624f77f74511ffa34a806e58b7be16ff09 | 909 |
require 'rails_helper'
describe 'People search' do
let(:user) { create :user_admin }
before :each do
sign_in(user)
end
it 'busca uma pessoa que existe' do
person1 = create :person
person2 = create :person
visit people_path
expect(page).to have_content person1.name
expect(page).to have_content person2.name
fill_in 'query', with: person1.name
click_on 'btn-search'
expect(page).to have_content person1.name
expect(page).to_not have_content person2.name
end
it 'busca uma pessoa que não existe' do
person1 = create :person
person2 = create :person
visit people_path
expect(page).to have_content person1.name
expect(page).to have_content person2.name
fill_in 'query', with: 'eu não existo'
click_on 'btn-search'
expect(page).to_not have_content person1.name
expect(page).to_not have_content person2.name
end
end | 23.307692 | 49 | 0.708471 |
79b7ce69961868b66ae347cbf765983c5efd93b1 | 1,627 | module DeclarableHelper
def declarable_stw_link(declarable, search, anchor = 'import')
geographical_area = GeographicalArea.find(search.country)
declarable_type = declarable.heading? ? 'heading' : 'commodity'
today = Time.zone.today
stw_options = {
commodity: declarable.code,
originCountry: search.country,
goodsIntent: 'bringGoodsToSell',
userTypeTrader: 'true',
tradeType: 'import',
destinationCountry: 'GB',
importDeclarations: 'yes',
importOrigin: nil,
importDateDay: search.day.presence || today.day,
importDateMonth: search.month.presence || today.month,
importDateYear: search.year.presence || today.year,
}
stw_link = "#{TradeTariffFrontend.single_trade_window_url}?#{CGI.unescape(stw_options.to_query)}"
link_to(
"check how to #{anchor} #{declarable_type} #{declarable.code} from #{geographical_area&.description}.",
stw_link,
target: '_blank',
class: 'govuk-link',
rel: 'noopener',
)
end
# Supplementary unit measures treat no country specified in the search as the entire world
def supplementary_geographical_area_id(search)
search.country || GeographicalArea::ERGA_OMNES
end
def trading_partner_country_description(geographical_area_id)
if geographical_area_id.present?
GeographicalArea.find(geographical_area_id).description
else
'All countries'
end
end
def declarable_url_json(declarable)
if declarable.heading?
heading_url(declarable, format: :json)
else
commodity_url(declarable, format: :json)
end
end
end
| 31.288462 | 109 | 0.704978 |
abd2bd82ea43a45a004391e53e9e5ced021b6420 | 1,932 | #
# Cookbook:: cerny-loadbalancer
# Recipe:: proxy
#
# Copyright:: 2018, Nathan Cerny
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
unless node['kernel']['release'].end_with?('pve')
include_recipe "#{cookbook_name}::deps"
package 'glb-redirect-iptables-dkms'
end
kernel_module 'fou'
systemd_unit 'glb-redirect.service' do
action [:create, :enable, :start]
content <<-EOU.gsub(/^\s+/, '')
[Unit]
Description=Configure GUE and IPTables Rules for GLB proxy layer.
After=network.target
[Service]
ExecStartPre=/bin/ip fou add port 19523 gue
ExecStartPre=/bin/ip link set up dev tunl0
ExecStart=/bin/ip addr add #{node['glb']['forwarding_table']['binds'].first} dev tunl0
ExecStartPost=/sbin/iptables -t raw -A PREROUTING -p udp -m udp --dport 19523 -j CT --notrack
ExecStartPost=-/sbin/iptables -A INPUT -p udp -m udp --dport 19523 -j GLBREDIRECT
ExecStopPre=/sbin/iptables -t raw -D PREROUTING -p udp -m udp --dport 19523 -j CT --notrack
ExecStopPre=-/sbin/iptables -t raw -D OUTPUT -p udp -m udp --dport 19523 -j CT --notrack
ExecStopPre=-/sbin/iptables -D INPUT -p udp -m udp --dport 19523 -j GLBREDIRECT
ExecStop=/bin/ip addr del #{node['glb']['forwarding_table']['binds'].first} dev tunl0
ExecStopPost=/bin/ip link set down dev tunl0
ExecStopPost=/bin/ip fou del port 19523 gue
RemainAfterExit=true
[Install]
WantedBy=multi-user.target
EOU
end
| 37.153846 | 97 | 0.713251 |
616e086406f03c553247f14f690596a446b5f2a1 | 598 | cask "preform" do
version "3.11.0,691"
sha256 "d69198762918892fca78567cb2b7b085981a97d1550c6081fded7452a18a6cb1"
url "https://s3.amazonaws.com/FormlabsReleases/Release/#{version.before_comma}/PreForm_#{version.before_comma}_release__build_#{version.after_comma}.dmg",
verified: "s3.amazonaws.com/FormlabsReleases/"
appcast "https://macupdater.net/cgi-bin/check_urls/check_url_redirect.cgi?url=https://formlabs.com/download-preform-mac/"
name "PreForm"
desc "3D printing setup, management, and monitoring"
homepage "https://formlabs.com/tools/preform/"
app "PreForm.app"
end
| 42.714286 | 156 | 0.780936 |
62cbee959c4132196089aba20abf0a013e0afbe3 | 319 | class AddDescriptionAndHintToErrorTemplate < ActiveRecord::Migration
def change
add_column :error_templates, :description, :text
add_column :error_templates, :hint, :text
add_column :error_template_attributes, :description, :text
add_column :error_template_attributes, :important, :boolean
end
end
| 31.9 | 68 | 0.786834 |
03373964d725d050a2298f10137880642ad023bc | 329 | cask 'amadeus-pro' do
version :latest
sha256 :no_check
# amazonaws.com/AmadeusPro2 was verified as official when first introduced to the cask
url 'https://s3.amazonaws.com/AmadeusPro2/AmadeusPro.dmg'
name 'Amadeus Pro'
homepage 'http://www.hairersoft.com/pro.html'
license :commercial
app 'Amadeus Pro.app'
end
| 25.307692 | 88 | 0.74772 |
1ae3c0e8ac2571fab5ccb76b2e243147dceb5b32 | 10,559 | # frozen_string_literal: true
require 'spec_helper'
describe Whois::Server do
describe ".load_json" do
it "loads a definition from a JSON file" do
expect(File).to receive(:read).with("tld.json").and_return(<<~JSON)
{
"ae.org": {
"host": "whois.centralnic.com"
},
"ar.com": {
"host": "whois.centralnic.com"
}
}
JSON
with_definitions do
described_class.load_json("tld.json")
expect(described_class.definitions(:tld)).to eq([
["ae.org", "whois.centralnic.com", {}],
["ar.com", "whois.centralnic.com", {}],
])
end
end
it "convert option keys to Symbol" do
expect(File).to receive(:read).with("tld.json").and_return(<<~JSON)
{
"com": {
"host": "whois.crsnic.net",
"adapter": "verisign"
}
}
JSON
with_definitions do
described_class.load_json("tld.json")
expect(described_class.definitions(:tld)).to eq([
["com", "whois.crsnic.net", { adapter: "verisign" }],
])
end
end
end
describe ".definitions" do
it "returns the definitions array for given type" do
with_definitions do
Whois::Server.define(Whois::Server::TYPE_TLD, "foo", "whois.foo")
definition = described_class.definitions(Whois::Server::TYPE_TLD)
expect(definition).to be_a(Array)
expect(definition).to eq([["foo", "whois.foo", {}]])
end
end
it "raises ArgumentError when the type is invalid" do
with_definitions do
expect {
described_class.definitions(:foo)
}.to raise_error(ArgumentError)
end
end
end
describe ".define" do
it "adds a new definition with given arguments" do
with_definitions do
Whois::Server.define(Whois::Server::TYPE_TLD, "foo", "whois.foo")
expect(described_class.definitions(Whois::Server::TYPE_TLD)).to eq([["foo", "whois.foo", {}]])
end
end
it "accepts a hash of options" do
with_definitions do
Whois::Server.define(Whois::Server::TYPE_TLD, "foo", "whois.foo", foo: "bar")
expect(described_class.definitions(Whois::Server::TYPE_TLD)).to eq([["foo", "whois.foo", { foo: "bar" }]])
end
end
end
describe ".factory" do
it "returns an adapter initialized with given arguments" do
server = Whois::Server.factory(:tld, "test", "whois.test")
expect(server.type).to eq(:tld)
expect(server.allocation).to eq("test")
expect(server.host).to eq("whois.test")
expect(server.options).to eq({})
end
it "returns a standard adapter by default" do
server = Whois::Server.factory(:tld, "test", "whois.test")
expect(server).to be_a(Whois::Server::Adapters::Standard)
end
it "accepts an :adapter option as Class and returns an instance of given adapter" do
a = Class.new do
attr_reader :args
def initialize(*args)
@args = args
end
end
server = Whois::Server.factory(:tld, "test", "whois.test", adapter: a)
expect(server).to be_a(a)
expect(server.args).to eq([:tld, "test", "whois.test", {}])
end
it "accepts an :adapter option as Symbol or String, load Class and returns an instance of given adapter" do
server = Whois::Server.factory(:tld, "test", "whois.test", adapter: :none)
expect(server).to be_a(Whois::Server::Adapters::None)
server = Whois::Server.factory(:tld, "test", "whois.test", adapter: "none")
expect(server).to be_a(Whois::Server::Adapters::None)
end
it "deletes the adapter option" do
server = Whois::Server.factory(:tld, "test", "whois.test", adapter: Whois::Server::Adapters::None, foo: "bar")
expect(server.options).to eq({ foo: "bar" })
end
end
describe ".guess" do
it "recognizes tld" do
server = Whois::Server.guess(".com")
expect(server).to be_a(Whois::Server::Adapters::Base)
expect(server.type).to eq(Whois::Server::TYPE_TLD)
end
it "recognizes domain" do
server = Whois::Server.guess("example.com")
expect(server).to be_a(Whois::Server::Adapters::Base)
expect(server.type).to eq(Whois::Server::TYPE_TLD)
end
it "recognizes ipv4" do
server = Whois::Server.guess("127.0.0.1")
expect(server).to be_a(Whois::Server::Adapters::Base)
expect(server.type).to eq(Whois::Server::TYPE_IPV4)
end
it "recognizes ipv6" do
server = Whois::Server.guess("2001:0db8:85a3:0000:0000:8a2e:0370:7334")
expect(server).to be_a(Whois::Server::Adapters::Base)
expect(server.type).to eq(Whois::Server::TYPE_IPV6)
end
it "recognizes ipv6 when zero groups" do
server = Whois::Server.guess("2002::1")
expect(server).to be_a(Whois::Server::Adapters::Base)
expect(server.type).to eq(Whois::Server::TYPE_IPV6)
end
it "recognizes asn16" do
server = Whois::Server.guess("AS23456")
expect(server).to be_a(Whois::Server::Adapters::Base)
expect(server.type).to eq(Whois::Server::TYPE_ASN16)
end
it "recognizes asn32" do
server = Whois::Server.guess("AS131072")
expect(server).to be_a(Whois::Server::Adapters::Base)
expect(server.type).to eq(Whois::Server::TYPE_ASN32)
end
it "recognizes email" do
expect {
Whois::Server.guess("[email protected]")
}.to raise_error(Whois::ServerNotSupported, /email/)
end
it "raises when unrecognized value" do
expect {
Whois::Server.guess("invalid")
}.to raise_error(Whois::ServerNotFound)
end
context "when the input is a tld" do
it "returns a IANA adapter" do
expect(Whois::Server.guess(".com")).to eq(Whois::Server.factory(:tld, ".", "whois.iana.org"))
end
it "returns a IANA adapter when the input is an idn" do
expect(Whois::Server.guess(".xn--fiqs8s")).to eq(Whois::Server.factory(:tld, ".", "whois.iana.org"))
end
end
context "when the input is a domain" do
it "lookups definitions and returns the adapter" do
with_definitions do
Whois::Server.define(:tld, "test", "whois.test")
expect(Whois::Server.guess("example.test")).to eq(Whois::Server.factory(:tld, "test", "whois.test"))
end
end
it "doesn't consider the dot as a regexp pattern" do
with_definitions do
Whois::Server.define(:tld, "no.com", "whois.no.com")
Whois::Server.define(:tld, "com", "whois.com")
expect(Whois::Server.guess("antoniocangiano.com")).to eq(Whois::Server.factory(:tld, "com", "whois.com"))
end
end
it "returns the closer definition" do
with_definitions do
Whois::Server.define(:tld, "com", com = "whois.com")
Whois::Server.define(:tld, "com.foo", comfoo = "whois.com.foo")
Whois::Server.define(:tld, "foo.com", foocom = "whois.foo.com")
expect(Whois::Server.guess("example.com").host).to eq(com)
expect(Whois::Server.guess("example.com.foo").host).to eq(comfoo)
expect(Whois::Server.guess("example.foo.com").host).to eq(foocom)
end
end
end
context "when the input is an asn16" do
it "lookups definitions and returns the adapter" do
with_definitions do
Whois::Server.define(:asn16, "0 65535", "whois.test")
expect(Whois::Server.guess("AS65535")).to eq(Whois::Server.factory(:asn16, "0 65535", "whois.test"))
end
end
it "raises if definition is not found" do
with_definitions do
Whois::Server.define(:asn16, "0 60000", "whois.test")
expect {
Whois::Server.guess("AS65535")
}.to raise_error(Whois::AllocationUnknown)
end
end
end
context "when the input is an asn32" do
it "lookups definitions and returns the adapter" do
with_definitions do
Whois::Server.define(:asn32, "65536 394239", "whois.test")
expect(Whois::Server.guess("AS65536")).to eq(Whois::Server.factory(:asn32, "65536 394239", "whois.test"))
end
end
it "raises if definition is not found" do
with_definitions do
Whois::Server.define(:asn32, "65536 131071", "whois.test")
expect {
Whois::Server.guess("AS200000")
}.to raise_error(Whois::AllocationUnknown)
end
end
end
context "when the input is a ipv4" do
it "lookups definitions and returns the adapter" do
with_definitions do
Whois::Server.define(:ipv4, "192.168.1.0/10", "whois.test")
expect(Whois::Server.find_for_ip("192.168.1.1")).to eq(Whois::Server.factory(:ipv4, "192.168.1.0/10", "whois.test"))
end
end
it "raises if definition is not found" do
with_definitions do
Whois::Server.define(:ipv4, "192.168.1.0/10", "whois.test")
expect {
Whois::Server.guess("192.192.0.1")
}.to raise_error(Whois::AllocationUnknown)
end
end
end
context "when the input is a ipv6" do
it "lookups definitions and returns the adapter" do
with_definitions do
Whois::Server.define(:ipv6, "2001:0200::/23", "whois.test")
expect(Whois::Server.guess("2001:0200::1")).to eq(Whois::Server.factory(:ipv6, "2001:0200::/23", "whois.test"))
end
end
it "raises if definition is not found" do
with_definitions do
Whois::Server.define(:ipv6, "::1", "whois.test")
expect {
Whois::Server.guess("2002:0300::1")
}.to raise_error(Whois::AllocationUnknown)
end
end
it "recognizes ipv4 compatibility mode" do
with_definitions do
Whois::Server.define(:ipv6, "::192.168.1.1", "whois.test")
expect(Whois::Server.guess("::192.168.1.1")).to eq(Whois::Server.factory(:ipv6, "::192.168.1.1", "whois.test"))
end
end
it "rescues IPAddr ArgumentError", issue: "weppos/whois#174" do
with_definitions do
expect {
Whois::Server.guess("f53")
}.to raise_error(Whois::AllocationUnknown)
end
end
end
end
end
| 34.848185 | 126 | 0.592102 |
79cfd6308ff7d15660f8938306055683c98a878f | 1,290 | require 'geocoder'
class MapCluster
attr_reader :clusters, :zoom, :tile_size
def initialize(zoom: 12, tile_size: 256, distance_in_pixels: 60)
@zoom, @tile_size = [zoom, 3].max, tile_size
@distance_in_pixels = distance_in_pixels
@clusters, @coordinates = [], []
end
def add_coordinate(coordinate)
@coordinates << coordinate
add_to_cluster(coordinate)
end
def clear
@coordinates = []
@clusters = []
end
private
def add_to_cluster(coordinate)
selected_cluster, max_distance = nil, nil
@clusters.each do |cluster|
distance = Geocoder::Calculations.distance_between(cluster.center, coordinate.to_coordinates)
if max_distance.nil? || max_distance > distance
max_distance = distance
selected_cluster = cluster
end
end
if selected_cluster && selected_cluster.in_bounds?(coordinate)
selected_cluster.add_coordinate(coordinate)
else
cluster = MapCluster::Cluster.new(zoom: @zoom, tile_size: @tile_size, distance_in_pixels: @distance_in_pixels)
cluster.add_coordinate(coordinate)
@clusters << cluster
end
end
end
require 'map_cluster/tile'
require 'map_cluster/pixel'
require 'map_cluster/coordinate'
require 'map_cluster/calculator'
require 'map_cluster/cluster'
| 26.875 | 116 | 0.720155 |
284e8c0baa2652e9dabe9af528f686df167d4c9d | 386 | # Be sure to restart your server when you modify this file.
# Add new inflection rules using the following format
# (all these examples are active by default):
# ActiveSupport::Inflector.inflections do |inflect|
# inflect.plural /^(ox)$/i, '\1en'
# inflect.singular /^(ox)en/i, '\1'
# inflect.irregular 'person', 'people'
# inflect.uncountable %w( fish sheep )
# end
| 35.090909 | 60 | 0.686528 |
e951cd6a38e37d4aa94a529bfa6c0d3785db5315 | 3,640 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Network::Mgmt::V2018_12_01
module Models
#
# Information about a hop between the source and the destination.
#
class ConnectivityHop
include MsRestAzure
# @return [String] The type of the hop.
attr_accessor :type
# @return [String] The ID of the hop.
attr_accessor :id
# @return [String] The IP address of the hop.
attr_accessor :address
# @return [String] The ID of the resource corresponding to this hop.
attr_accessor :resource_id
# @return [Array<String>] List of next hop identifiers.
attr_accessor :next_hop_ids
# @return [Array<ConnectivityIssue>] List of issues.
attr_accessor :issues
#
# Mapper for ConnectivityHop class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'ConnectivityHop',
type: {
name: 'Composite',
class_name: 'ConnectivityHop',
model_properties: {
type: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'type',
type: {
name: 'String'
}
},
id: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'id',
type: {
name: 'String'
}
},
address: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'address',
type: {
name: 'String'
}
},
resource_id: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'resourceId',
type: {
name: 'String'
}
},
next_hop_ids: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'nextHopIds',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'StringElementType',
type: {
name: 'String'
}
}
}
},
issues: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'issues',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'ConnectivityIssueElementType',
type: {
name: 'Composite',
class_name: 'ConnectivityIssue'
}
}
}
}
}
}
}
end
end
end
end
| 29.12 | 74 | 0.447802 |
f88fbff5e5b0fa546df048ac0b356670f64053a3 | 13,228 | require_relative 'test_api'
module PaypalService
module TestMerchant
def self.build(api_builder)
PaypalService::Merchant.new(nil, TestLogger.new, TestMerchantActions.new.default_test_actions, api_builder)
end
end
class FakePalMerchant
def initialize
@tokens = {}
@payments_by_order_id = {}
@payments_by_auth_id = {}
@billing_agreements_by_token = {}
end
def save_token(req, payment_action)
token = {
token: SecureRandom.uuid,
payment_action: payment_action,
item_name: req[:item_name],
item_quantity: req[:item_quantity],
item_price: req[:item_price],
order_total: req[:order_total],
receiver_id: req[:receiver_username],
no_shipping: req[:require_shipping_address] ? 0 : 1
}
@tokens[token[:token]] = token
token
end
def get_token(token)
@tokens[token]
end
def create_and_save_payment(token)
if token[:payment_action] == :order
create_and_save_order_payment(token)
else
create_and_save_auth_payment(token)
end
end
def create_and_save_order_payment(token)
payment = {
order_date: Time.now,
payment_status: "pending",
pending_reason: "order",
order_id: SecureRandom.uuid,
order_total: token[:order_total],
receiver_id: token[:receiver_id]
}
@payments_by_order_id[payment[:order_id]] = payment
payment
end
def create_and_save_auth_payment(token)
payment = {
authorization_date: Time.now,
payment_status: "pending",
pending_reason: "authorization",
authorization_id: SecureRandom.uuid,
authorization_total: token[:order_total],
receiver_id: token[:receiver_id]
}
@payments_by_auth_id[payment[:authorization_id]] = payment
payment
end
def create_and_save_billing_agreement(token)
billing_agreement = {
billing_agreement_id: SecureRandom.uuid
}
@billing_agreements_by_token[token[:token]] = billing_agreement
billing_agreement
end
def authorize_payment(order_id, authorization_total)
payment = @payments_by_order_id[order_id]
raise "No order with order id: #{order_id}" if payment.nil?
raise "Cannot authorize more than order_total" if authorization_total.cents > payment[:order_total].cents
raise "Cannot authorize already authorized payment" if payment[:pending_reason] != "order"
auth_id = SecureRandom.uuid
auth_payment = payment.merge({
authorization_date: Time.now,
authorization_total: authorization_total,
authorization_id: auth_id,
payment_status: "pending",
pending_reason: "authorization",
})
@payments_by_order_id[order_id] = auth_payment
@payments_by_auth_id[auth_id] = auth_payment
auth_payment
end
def capture_payment(auth_id, payment_total)
payment = @payments_by_auth_id[auth_id]
raise "No payment for auth id: #{auth_id}" if payment.nil?
raise "Cannot capture more than authorization_total" if payment_total.cents > payment[:authorization_total].cents
payment_id = SecureRandom.uuid
captured_payment = payment.merge({
payment_id: payment_id,
payment_total: payment_total,
fee_total: Money.new((payment_total.cents*0.1).to_i, payment_total.currency.iso_code),
payment_date: Time.now,
payment_status: "completed",
pending_reason: "none"
})
end
def get_payment(auth_or_order_id)
@payments_by_auth_id[auth_or_order_id] || @payments_by_order_id[auth_or_order_id]
end
def get_billing_agreement(token)
@billing_agreements_by_token[token[:token]]
end
def void(auth_or_order_id)
payment = get_payment(auth_or_order_id)
raise "No payment with order or auth id: #{auth_or_order_id}" if payment.nil?
voided_payment = payment.merge({
payment_status: "voided",
pending_reason: "none"
})
@payments_by_order_id[voided_payment[:order_id]] = voided_payment
@payments_by_auth_id[voided_payment[:authorization_id]] = voided_payment unless voided_payment[:authorization_id].nil?
voided_payment
end
end
# rubocop:disable all
class TestMerchantActions
attr_reader :default_test_actions
def initialize
@fake_pal = FakePalMerchant.new
@default_test_actions = build_default_test_actions
end
def build_default_test_actions
identity = -> (val, _) { val }
{
get_express_checkout_details: PaypalAction.def_action(
input_transformer: identity,
wrapper_method_name: :do_nothing,
action_method_name: :wrap,
output_transformer: -> (res, api) {
req = res[:value]
token = @fake_pal.get_token(req[:token])
billing_agreement = @fake_pal.get_billing_agreement(token)
if (!token.nil?)
response = {
token: token[:token],
checkout_status: "not_used_in_tests",
billing_agreement_accepted: !billing_agreement.nil?,
payer: token[:email],
payer_id: "payer_id",
order_total: token[:order_total]
}
if(token[:no_shipping] == 0)
response[:shipping_address_status] = "Confirmed"
response[:shipping_address_city] = "city"
response[:shipping_address_country] = "country"
response[:shipping_address_country_code] = "CC"
response[:shipping_address_name] = "name"
response[:shipping_address_phone] = "123456"
response[:shipping_address_postal_code] = "WX1GQ"
response[:shipping_address_state_or_province] = "state"
response[:shipping_address_street1] = "street1"
response[:shipping_address_street2] = "street2"
end
DataTypes::Merchant.create_get_express_checkout_details_response(response)
else
PaypalService::DataTypes::FailureResponse.call()
end
}
),
set_express_checkout_order: PaypalAction.def_action(
input_transformer: identity,
wrapper_method_name: :do_nothing,
action_method_name: :wrap,
output_transformer: -> (res, api) {
req = res[:value]
token = @fake_pal.save_token(req, :order)
DataTypes::Merchant.create_set_express_checkout_order_response({
token: token[:token],
redirect_url: "https://paypaltest.com/#{token[:token]}",
receiver_username: api.config.subject || api.config.username
})
}
),
set_express_checkout_authorization: PaypalAction.def_action(
input_transformer: identity,
wrapper_method_name: :do_nothing,
action_method_name: :wrap,
output_transformer: -> (res, api) {
req = res[:value]
token = @fake_pal.save_token(req, :authorization)
DataTypes::Merchant.create_set_express_checkout_order_response({
token: token[:token],
redirect_url: "https://paypaltest.com/#{token[:token]}",
receiver_username: api.config.subject || api.config.username
})
}
),
do_express_checkout_payment: PaypalAction.def_action(
input_transformer: identity,
wrapper_method_name: :do_nothing,
action_method_name: :wrap,
output_transformer: -> (res, api) {
req = res[:value]
token = @fake_pal.get_token(req[:token])
if (!token.nil?)
payment = @fake_pal.create_and_save_payment(token)
DataTypes::Merchant.create_do_express_checkout_payment_response(payment)
else
PaypalService::DataTypes::FailureResponse.call()
end
}
),
do_authorization: PaypalAction.def_action(
input_transformer: identity,
wrapper_method_name: :do_nothing,
action_method_name: :wrap,
output_transformer: -> (res, api) {
req = res[:value]
payment = @fake_pal.authorize_payment(req[:order_id], req[:authorization_total])
DataTypes::Merchant.create_do_authorization_response({
authorization_id: payment[:authorization_id],
payment_status: payment[:payment_status],
pending_reason: payment[:pending_reason],
authorization_total: payment[:authorization_total],
authorization_date: payment[:authorization_date],
msg_sub_id: req[:msg_sub_id]
})
}
),
do_capture: PaypalAction.def_action(
input_transformer: identity,
wrapper_method_name: :do_nothing,
action_method_name: :wrap,
output_transformer: -> (res, api) {
req = res[:value]
payment = @fake_pal.capture_payment(req[:authorization_id], req[:payment_total])
DataTypes::Merchant.create_do_full_capture_response(
{
authorization_id: payment[:authorization_id],
payment_id: payment[:payment_id],
payment_status: payment[:payment_status],
pending_reason: payment[:pending_reason],
payment_total: payment[:payment_total],
fee_total: payment[:fee_total],
payment_date: payment[:payment_date]
}
)
}
),
do_void: PaypalAction.def_action(
input_transformer: identity,
wrapper_method_name: :do_nothing,
action_method_name: :wrap,
output_transformer: -> (res, api) {
req = res[:value]
payment = @fake_pal.void(req[:transaction_id])
DataTypes::Merchant.create_do_void_response(
{
voided_id: req[:transaction_id],
msg_sub_id: req[:msg_sub_id]
}
)
}
),
get_transaction_details: PaypalAction.def_action(
input_transformer: identity,
wrapper_method_name: :do_nothing,
action_method_name: :wrap,
output_transformer: -> (res, api) {
req = res[:value]
payment = @fake_pal.get_payment(req[:transaction_id])
DataTypes::Merchant.create_get_transaction_details_response(
{
transaction_id: req[:transaction_id],
payment_status: payment[:payment_status],
pending_reason: payment[:pending_reason],
transaction_total: payment[:order_total]
}
)
}
),
do_reference_transaction: PaypalAction.def_action(
input_transformer: identity,
wrapper_method_name: :do_nothing,
action_method_name: :wrap,
output_transformer: -> (res, api) {
req = res[:value]
DataTypes::Merchant.create_do_reference_transaction_response({
billing_agreement_id: req[:billing_agreement_id],
payment_id: SecureRandom.uuid,
payment_total: req[:payment_total],
payment_date: Time.now,
fee: Money.new((req[:payment_total].cents*0.1).to_i, req[:payment_total].currency.iso_code),
payment_status: "completed",
pending_reason: "none",
username_to: api.config.subject || api.config.username
})
}
),
setup_billing_agreement: PaypalAction.def_action(
input_transformer: identity,
wrapper_method_name: :do_nothing,
action_method_name: :wrap,
output_transformer: -> (res, api) {
token = @fake_pal.save_token({}, :authorization)
DataTypes::Merchant.create_setup_billing_agreement_response(
{
token: token[:token],
redirect_url: "https://paypaltest.com/billing_agreement?token=#{token[:token]}",
username_to: api.config.subject || api.config.username
}
)
}
),
create_billing_agreement: PaypalAction.def_action(
input_transformer: identity,
wrapper_method_name: :do_nothing,
action_method_name: :wrap,
output_transformer: -> (res, api) {
req = res[:value]
token = @fake_pal.get_token(req[:token])
if (!token.nil?)
billing_agreement = @fake_pal.create_and_save_billing_agreement(token)
DataTypes::Merchant.create_create_billing_agreement_response(
billing_agreement_id: billing_agreement[:billing_agreement_id]
)
else
PaypalService::DataTypes::FailureResponse.call()
end
}
)
}
end
end
# rubocop:enable all
end
| 34.71916 | 124 | 0.610145 |
f8e3caa81f0dd649fed33a9f9d9bcbb8a0f1fc7c | 2,312 | class RunCampaignJob < ApplicationJob
queue_as :critical
SLOW_ADJUSTMENT_FACTOR = 0.25
FAST_ADJUSTMENT_FACTOR = 0.9
LOWER_BOUND_CONVERSION_RATE = 0.01
V3_JOB_MINUTES_CADENCE = 10
V2_JOB_MINUTES_CADENCE = 5
# Job that decides users the be matched for a given campaign at a given point in time.
# This job creates the matches.
def perform(campaign_id)
Rails.logger.info("Run RunCampaignJob for campaign_id #{campaign_id}")
@campaign = Campaign.find(campaign_id)
return unless @campaign.running?
return @campaign.completed! if @campaign.remaining_doses <= 0
return @campaign.completed! if Time.now.utc >= @campaign.ends_at
return unless should_run?
# compute how many more users we need to match
limit = [compute_new_users_to_reach * compute_adjustment_factor, @campaign.email_budget_remaining].min.floor
return if limit <= 0
users = @campaign.reachable_users_query(limit: limit)
# create matches
users.each do |user|
REDIS_LOCK.lock!("create_match_for_user_id_#{user.id}", 2000) do
Match.create(
campaign: @campaign,
vaccination_center: @campaign.vaccination_center,
user: user
)
end
rescue Redlock::LockError
Rails.logger.warn("Could not obtain lock to create match for user_id #{user.id}")
end
end
def compute_new_users_to_reach
return (@campaign.target_matches_count - @campaign.matches.pending.alive.count) if @campaign.matching_algo_v2?
compute_new_users_to_reach_with_v3
end
def compute_new_users_to_reach_with_v3
projected_conversion = if @campaign.matches.count <= 0
1.0 / @campaign.initial_match_count.to_f
else
[@campaign.projected_confirmations / @campaign.matches.count, LOWER_BOUND_CONVERSION_RATE].max
end
(@campaign.available_doses.to_f - @campaign.projected_confirmations) / projected_conversion
end
def compute_adjustment_factor
[[@campaign.matches.confirmed.count / 15.0, SLOW_ADJUSTMENT_FACTOR].max, FAST_ADJUSTMENT_FACTOR].min
end
def should_run?
now_minute = DateTime.now.minute
return true if @campaign.matching_algo_v3? && (now_minute % V3_JOB_MINUTES_CADENCE == 0)
return true if @campaign.matching_algo_v2? && (now_minute % V2_JOB_MINUTES_CADENCE == 0)
false
end
end
| 35.030303 | 114 | 0.737024 |
ffddf73ad29686eec3088552fd6d298105460992 | 2,988 | describe Unitpay::Service do
let(:service) { described_class.new('domain', 'public_key', 'secret_key', use_sign) }
let(:sum) { 100 }
let(:account) { 1 }
let(:desc) { 'description' }
let(:valid_signature) { '2053205d12e1b73ca6f42cf1cc9289a05aefd34f6e5ab9cb043e00494ae4b03c' }
let(:use_sign) { true }
describe '#calculate_sign' do
subject { service.send(:calculate_sign, sum, account, desc) }
it { is_expected.to eq(valid_signature) }
end
describe '#valid_sign?' do
subject { service.valid_signature?(signature, sum, account, desc) }
context 'when valid signature' do
let(:signature) { valid_signature }
it { is_expected.to be_truthy }
end
context 'when invalid signature' do
let(:signature) { '1' }
it { is_expected.to be_falsey }
end
end
describe '#valid_action_signature?' do
subject { service.valid_action_signature?(method, params) }
let(:params) do
{
account: 'test',
date: '2015-11-29 12:29:00',
operator: 'mts',
paymentType: 'mc',
projectId: '22760',
phone: '9001234567',
profit: '9.5',
sum: amount,
orderSum: '10.00',
signature: 'df236bfc284beb1a922ceb1f98b4ddb23ac87d5761fcc71acbf09bc06aeca720',
orderCurrency: 'RUB',
unitpayId: '87370'
}
end
let(:method) { 'check' }
context 'when valid' do
let(:amount) { 10 }
it { is_expected.to be_truthy }
end
context 'when valid' do
let(:amount) { 11 }
it { is_expected.to be_falsey }
end
context 'when params is nil' do
let(:params) { nil }
it { is_expected.to be_falsey }
end
context 'when params signature is nil' do
let(:params) { {} }
it { is_expected.to be_falsey }
end
end
describe '#payment_params' do
let(:options) { {} }
subject { service.payment_params(sum, account, desc, options) }
context 'when simple params' do
it { is_expected.to eq(sum: sum, account: account, desc: desc, signature: valid_signature, currency: 'RUB') }
end
context 'when dont use sign' do
let(:use_sign) { false }
it { is_expected.to eq(sum: sum, account: account, desc: desc, currency: 'RUB') }
end
context 'when set extra params' do
let(:use_sign) { false }
let(:options) { { locale: 'ru' } }
it { is_expected.to eq(sum: sum, account: account, desc: desc, currency: 'RUB', locale: 'ru') }
end
end
describe '#params_for_widget' do
subject { service.params_for_widget(sum, account, desc) }
it { is_expected.to eq(publicKey: 'public_key', sum: sum, account: account, desc: desc, signature: valid_signature, currency: 'RUB') }
end
describe '#payment_url' do
subject { service.payment_url(sum, account, desc) }
it { is_expected.to eq "https://domain/pay/public_key?sum=100&account=1&desc=description¤cy=RUB&signature=#{ valid_signature }" }
end
end
| 26.442478 | 139 | 0.631191 |
b93fc2b5b2c81336382eca27f532a6eba1fed10e | 343 | module Lunar
# @private A helper for producing all the characters of a word.
#
# @example
#
# expected = %w(a ab abr abra abrah abraha abraham)
# FuzzyWord.new("Abraham").partials == expected
# # => true
#
class FuzzyWord < String
def partials
(1..length).map { |i| self[0, i].downcase }
end
end
end
| 21.4375 | 65 | 0.606414 |
18bb44a8b52d33575b415304fd2040b56bebf6ed | 3,960 | Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Ensures that a master key has been made available in either ENV["RAILS_MASTER_KEY"]
# or in config/master.key. This key is used to decrypt credentials (and other encrypted files).
# config.require_master_key = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Store uploaded files on the local file system (see config/storage.yml for options)
config.active_storage.service = :local
# Mount Action Cable outside main process or domain
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
config.log_tags = [ :request_id ]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment)
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "fit_challenge_#{Rails.env}"
config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = [I18n.default_locale]
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
end
| 41.684211 | 102 | 0.758838 |
7ab907f2cc0ddc97029f0c12e6811c8f572da000 | 1,636 | class Logstalgia < Formula
desc "Web server access log visualizer with retro style"
homepage "http://logstalgia.io/"
url "https://github.com/acaudwell/Logstalgia/releases/download/logstalgia-1.0.7/logstalgia-1.0.7.tar.gz"
sha256 "5553fd03fb7be564538fe56e871eac6e3caf56f40e8abc4602d2553964f8f0e1"
revision 2
bottle do
rebuild 1
sha256 "d075772d62ab3bfd0816dc0aaa9bfff86277b6d590fabb1e1ede7213f5fc5d80" => :sierra
sha256 "529e9f890e9fe1dda35ed4318499aa02c2254d05c22d9e4d93f3b08f17539fd9" => :el_capitan
sha256 "69f55d7dd17d0f601e6ed520f1715402e209ac9bddfda4acb610ad61b1c2c851" => :yosemite
end
head do
url "https://github.com/acaudwell/Logstalgia.git"
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
end
depends_on "pkg-config" => :build
depends_on "boost" => :build
depends_on "glm" => :build
depends_on "sdl2"
depends_on "sdl2_image"
depends_on "freetype"
depends_on "glew"
depends_on "libpng"
depends_on "pcre"
needs :cxx11
def install
# clang on Mt. Lion will try to build against libstdc++,
# despite -std=gnu++0x
ENV.libcxx
# For non-/usr/local installs
ENV.append "CXXFLAGS", "-I#{HOMEBREW_PREFIX}/include"
# Handle building head.
system "autoreconf", "-f", "-i" if build.head?
system "./configure", "--disable-dependency-tracking",
"--prefix=#{prefix}",
"--without-x"
system "make"
system "make", "install"
end
test do
assert_match "Logstalgia v1.", shell_output("#{bin}/logstalgia --help")
end
end
| 28.701754 | 106 | 0.690709 |
bbb9cbc8aea1c808ff0babdc72ed489008d0c475 | 346 | require 'ostruct'
class Hash
def to_ostruct
o = OpenStruct.new(self)
each do |k,v|
o.send(:"#{k}=", v.to_ostruct) if v.respond_to? :to_ostruct
end
o
end
end
class Array
def to_ostruct
map do |item|
if item.respond_to? :to_ostruct
item.to_ostruct
else
item
end
end
end
end
| 14.416667 | 65 | 0.589595 |
1878453b4982300d2f0e157510079c01bf9efefb | 1,055 | class Conserver < Formula
desc "Allows multiple users to watch a serial console at the same time"
homepage "https://www.conserver.com/"
url "https://github.com/conserver/conserver/releases/download/v8.2.2/conserver-8.2.2.tar.gz"
sha256 "05ea1693bf92b42ad2f0a9389c60352ccd35c2ea93c8fc8e618d0153362a7d81"
bottle do
sha256 "3332bc506c85754b3b601989941613c83932ea28b4fb8aef87e45dfa576807c2" => :mojave
sha256 "3fca2dc202bf6d68cc8294be8c2703bef813ad6583d51c3d9c4da12dbc975a89" => :high_sierra
sha256 "c3885b3a01be7c4d0a4ed943906c4cb13be485b770271ac94cf9e439fdb9ca9a" => :sierra
sha256 "d9f2d169e4a3adf0e46b7047991dba382147da1bd0923969bd0bec7bf7f14900" => :el_capitan
sha256 "816ecd617b233fb756a0dbe15dfe728f8409f3e7c3c7cf9f3ee6568032fb3051" => :x86_64_linux
end
def install
system "./configure", "--prefix=#{prefix}"
system "make"
system "make", "install"
end
test do
console = fork do
exec bin/"console", "-n", "-p", "8000", "test"
end
sleep 1
Process.kill("TERM", console)
end
end
| 36.37931 | 94 | 0.759242 |
e229e167150bd6861614ce6b14afd5ad2d5597b0 | 118 | class AddLocationToGroups < ActiveRecord::Migration
def change
add_column :groups, :location, :string
end
end
| 19.666667 | 51 | 0.762712 |
61001f07c655b16fcad1c2dd69a89b4a2d0b4c23 | 9,296 | require "pp"
RSpec.describe ValueSemantics do
around do |example|
# this is necessary for mutation testing to work properly
with_constant(:Doggums, dog_class) { example.run }
end
let(:dog_class) do
Class.new do
include ValueSemantics.for_attributes {
name
trained?
}
end
end
describe 'initialization' do
it "supports keyword arguments" do
dog = Doggums.new(name: 'Fido', trained?: true)
expect(dog).to have_attributes(name: 'Fido', trained?: true)
end
it "supports Hash arguments" do
dog = Doggums.new({ name: 'Rufus', trained?: true })
expect(dog).to have_attributes(name: 'Rufus', trained?: true)
end
it "supports any value that responds to #to_h" do
arg = double(to_h: { name: 'Rex', trained?: true })
dog = Doggums.new(arg)
expect(dog).to have_attributes(name: 'Rex', trained?: true)
end
it "does not mutate hash arguments" do
attrs = { name: 'Kipper', trained?: true }
expect { Doggums.new(attrs) }.not_to change { attrs }
end
it "can not be constructed with attributes missing" do
expect { Doggums.new }.to raise_error(
ValueSemantics::MissingAttributes,
"Some attributes required by `Doggums` are missing: `name`, `trained?`",
)
end
it "can not be constructed with undefined attributes" do
expect {
Doggums.new(name: 'Fido', trained?: true, meow: 'cattt', moo: 'cowww')
}.to raise_error(
ValueSemantics::UnrecognizedAttributes,
"`Doggums` does not define attributes: `:meow`, `:moo`",
)
end
it "gives precedence to `UnrecognizedAttributes` over `MissingAttributes`" do
expect { Doggums.new(nayme: 'Fiydo', tentacles: 8) }
.to raise_error(ValueSemantics::UnrecognizedAttributes, /tentacles/)
end
it "can not be constructed with an object that does not respond to #to_h" do
expect { dog_class.new(double) }.to raise_error(TypeError,
<<~END_MESSAGE.strip.split.join(' ')
Can not initialize a `Doggums` with a `RSpec::Mocks::Double` object.
This argument is typically a `Hash` of attributes, but can be any
object that responds to `#to_h`.
END_MESSAGE
)
end
it "does not intercept errors raised from calling #to_h" do
arg = double
allow(arg).to receive(:to_h).and_raise("this implementation sucks")
expect { dog_class.new(arg) }.to raise_error("this implementation sucks")
end
end
describe 'basic usage' do
it "has attr readers and ivars" do
dog = Doggums.new(name: 'Fido', trained?: true)
expect(dog).to have_attributes(name: 'Fido', trained?: true)
expect(dog.instance_variable_get(:@name)).to eq('Fido')
expect(dog.instance_variable_get(:@trained)).to eq(true)
end
it "does not define attr writers" do
dog = Doggums.new(name: 'Fido', trained?: true)
expect{ dog.name = 'Goofy' }.to raise_error(NoMethodError, /name=/)
expect{ dog.trained = false }.to raise_error(NoMethodError, /trained=/)
end
it "has square brackets as a variable attr reader" do
dog = Doggums.new(name: 'Fido', trained?: true)
expect(dog[:name]).to eq('Fido')
expect { dog[:fins] }.to raise_error(
ValueSemantics::UnrecognizedAttributes,
"`Doggums` has no attribute named `:fins`"
)
end
it "can do non-destructive updates" do
sally = Doggums.new(name: 'Sally', trained?: false)
bob = sally.with(name: 'Bob')
expect(bob).to have_attributes(name: 'Bob', trained?: false)
end
it "can be converted to a hash of attributes" do
dog = Doggums.new(name: 'Fido', trained?: false)
expect(dog.to_h).to eq({ name: 'Fido', trained?: false })
end
it "has a human-friendly #inspect string" do
dog = Doggums.new(name: 'Fido', trained?: true)
expect(dog.inspect).to eq('#<Doggums name="Fido" trained?=true>')
end
it "has nice pp output" do
output = StringIO.new
dog = Doggums.new(name: "Fido", trained?: true)
PP.pp(dog, output, 3)
expect(output.string).to eq(<<~END_PP)
#<Doggums
name="Fido"
trained?=true>
END_PP
end
it "has a human-friendly module name" do
mod = Doggums.ancestors[1]
expect(mod.name).to eq("Doggums::ValueSemantics_Attributes")
end
end
describe 'default values' do
let(:cat) do
Class.new do
include ValueSemantics.for_attributes {
name default: 'Kitty'
scratch_proc default: ->{ "scratch" }
born_at default_generator: ->{ Time.now }
}
end
end
it "uses the default if no value is given" do
expect(cat.new.name).to eq('Kitty')
end
it "allows the default to be overriden" do
expect(cat.new(name: 'Tomcat').name).to eq('Tomcat')
end
it "does not override nil" do
expect(cat.new(name: nil).name).to be_nil
end
it "allows procs as default values" do
expect(cat.new.scratch_proc.call).to eq("scratch")
end
it "can generate defaults with a proc" do
expect(cat.new.born_at).to be_a(Time)
end
end
describe 'validation' do
module WingValidator
def self.===(value)
case value
when 'feathery flappers' then true
when 'smooth feet' then false
else fail 'wut?'
end
end
end
class Birb
include ValueSemantics.for_attributes {
wings WingValidator
i Integer
}
end
it "accepts values that pass the validator" do
expect{ Birb.new(wings: 'feathery flappers', i: 0) }.not_to raise_error
end
it "rejects values that fail the validator" do
expect{ Birb.new(wings: 'smooth feet', i: 0.0) }.to raise_error(
ValueSemantics::InvalidValue,
<<~END_ERROR
Some attributes of `Birb` are invalid:
- wings: "smooth feet"
- i: 0.0
END_ERROR
)
end
it "does not validate missing attributes" do
expect{ Birb.new(i: 0) }.to raise_error(ValueSemantics::MissingAttributes)
end
end
describe "equality" do
let(:puppy_class) { Class.new(Doggums) }
let(:dog1) { Doggums.new(name: 'Fido', trained?: true) }
let(:dog2) { Doggums.new(name: 'Fido', trained?: true) }
let(:different) { Doggums.new(name: 'Brutus', trained?: false) }
let(:child) { puppy_class.new(name: 'Fido', trained?: true) }
it "defines loose equality between subclasses with #===" do
expect(dog1).to eq(dog2)
expect(dog1).not_to eq(different)
expect(dog1).not_to eq("hello")
expect(dog1).to eq(child)
expect(child).to eq(dog1)
end
it "defines strict equality with #eql?" do
expect(dog1.eql?(dog2)).to be(true)
expect(dog1.eql?(different)).to be(false)
expect(dog1.eql?(child)).to be(false)
expect(child.eql?(dog1)).to be(false)
end
it "allows objects to be used as keys in Hash objects" do
expect(dog1.hash).to eq(dog2.hash)
expect(dog1.hash).not_to eq(different.hash)
hash_key_test = { dog1 => 'woof', different => 'diff' }.merge(dog2 => 'bark')
expect(hash_key_test).to eq({ dog1 => 'bark', different => 'diff' })
end
it "hashes differently depending on class" do
expect(dog1.hash).not_to eq(child.hash)
end
end
describe 'coercion' do
module Callable
def self.call(x)
"callable: #{x}"
end
end
class CoercionTest
include ValueSemantics.for_attributes {
no_coercion String, default: ""
with_true String, coerce: true, default: ""
with_callable String, coerce: Callable, default: ""
double_it String, coerce: ->(x) { x * 2 }, default: "42"
}
private
def self.coerce_with_true(value)
"class_method: #{value}"
end
def self.coerce_no_coercion(value)
fail "Should never get here"
end
end
it "does not call coercion methods by default" do
subject = CoercionTest.new(no_coercion: 'dinklage')
expect(subject.no_coercion).to eq('dinklage')
end
it "calls a class method when coerce: true" do
subject = CoercionTest.new(with_true: 'peter')
expect(subject.with_true).to eq('class_method: peter')
end
it "calls obj.call when coerce: obj" do
subject = CoercionTest.new(with_callable: 'daenerys')
expect(subject.with_callable).to eq('callable: daenerys')
end
it "coerces default values" do
subject = CoercionTest.new
expect(subject.double_it).to eq('4242')
end
it "performs coercion before validation" do
expect {
CoercionTest.new(double_it: 6)
}.to raise_error(
ValueSemantics::InvalidValue,
<<~END_ERROR
Some attributes of `CoercionTest` are invalid:
- double_it: 12
END_ERROR
)
end
it "provides a class method for coercing hashes into value objects" do
value = CoercionTest.coercer.([['no_coercion', 'wario']])
expect(value.no_coercion).to eq('wario')
end
end
it "has a version number" do
expect(ValueSemantics::VERSION).not_to be_empty
end
end
| 29.141066 | 83 | 0.625323 |
1dbea03d475ce67576b6525581282480cc2c16a9 | 2,242 | require 'sequel'
module Intrigue
module Core
module System
module Database
## no need to namespace?
# database set up
def setup_database
database_config = YAML.load_file("#{$intrigue_basedir}/config/database.yml")
options = {
:max_connections => database_config[$intrigue_environment]["max_connections"] || 20,
:pool_timeout => database_config[$intrigue_environment]["pool_timeout"] || 240
}
#
# Allow the database to be configured via ENV var or our config file, or fall back
# to a sensible default
database_host = ENV["DB_HOST"] || database_config[$intrigue_environment]["host"] || "localhost"
database_port = ENV["DB_PORT"] || database_config[$intrigue_environment]["port"] || 5432
database_user = ENV["DB_USER"] || database_config[$intrigue_environment]["user"] || "intrigue"
database_pass = ENV["DB_PASS"] || database_config[$intrigue_environment]["password"]
database_name = ENV["DB_NAME"] || database_config[$intrigue_environment]["database"] || "intrigue_dev"
database_debug = ENV["DB_DEBUG"] || database_config[$intrigue_environment]["debug"]
if $intrigue_environment != "test"
if database_pass
postgres_connect_string = "postgres://#{database_user}:#{database_pass}@#{database_host}:#{database_port}/#{database_name}"
else # handle the case where we're configured to postgres TRUST auth
postgres_connect_string = "postgres://#{database_user}@#{database_host}:#{database_port}/#{database_name}"
end
#puts "Connecting to Postgres at: #{postgres_connect_string}"
$db = ::Sequel.connect(postgres_connect_string, options)
# Allow data to be stored / queryed in JSON format
$db.extension :pg_json
Sequel.extension :pg_json_ops
# Allow datasets to be paginated
$db.extension :pagination
else
sqlite_test_string = "sqlite://#{$intrigue_basedir}/data/test.db"
puts "TEST! Connecting to Sqlite at: #{sqlite_test_string}"
$db = ::Sequel.connect("#{sqlite_test_string}")
end
$db.loggers << ::Logger.new($stdout) if database_debug
Sequel.extension :migration, :core_extensions
::Sequel::Model.plugin :update_or_create
end
end
end
end
end | 36.754098 | 131 | 0.699822 |
3992f0eedb58f38fae9224938a26cc995be9d524 | 256 | # Copyright 2020 Google LLC
#
# Use of this source code is governed by an MIT-style
# license that can be found in the LICENSE file or at
# https://opensource.org/licenses/MIT.
class Post < ActiveRecord::Base
belongs_to :author
has_many :comments
end
| 23.272727 | 53 | 0.753906 |
33ee8a4f10c22290a2985babe9599c86ee0046a4 | 1,024 | require "rubygems"
require "bundler/setup"
require "ruby-transmitsms"
require "test/unit"
require "vcr"
VCR.configure do |c|
c.cassette_library_dir = "fixtures/vcr_cassettes"
c.hook_into :webmock
end
class EmailApiTest < Test::Unit::TestCase
def setup()
@api = EmailApi.new("15ad266c538fb36c4d90f01055aef494", "moose")
end
def test_add_email()
VCR.use_cassette("email_sms_api_add_email") do
response = @api.add_email("[email protected]")
assert response.code == 200
assert response.body["success"] == true
assert response.body["error"]["code"] == "SUCCESS"
assert response.body["error"]["description"] == "OK"
end
end
def test_delete_email()
VCR.use_cassette("email_sms_api_delete_email") do
response = @api.delete_email("[email protected]")
assert response.code == 200
assert response.body["success"] == true
assert response.body["error"]["code"] == "SUCCESS"
assert response.body["error"]["description"] == "OK"
end
end
end
| 24.97561 | 68 | 0.678711 |
e28a1f51a85c6ed68fb270e540d4563a84d3066b | 11,476 | # frozen_string_literal: true
require 'rails_helper'
require_relative '../support/iam_session_helper'
require_relative '../support/matchers/json_schema_matcher'
RSpec.describe 'claims and appeals overview', type: :request do
include JsonSchemaMatchers
before(:all) do
@original_cassette_dir = VCR.configure(&:cassette_library_dir)
VCR.configure { |c| c.cassette_library_dir = 'modules/mobile/spec/support/vcr_cassettes' }
end
after(:all) { VCR.configure { |c| c.cassette_library_dir = @original_cassette_dir } }
describe '#index is polled an unauthorized user' do
it 'and not user returns a 401 status' do
get '/mobile/v0/claims-and-appeals-overview'
expect(response).to have_http_status(:unauthorized)
end
end
describe 'GET /v0/claims-and-appeals-overview' do
before { iam_sign_in }
let(:params) { { useCache: false } }
describe '#index (all user claims) is polled' do
it 'and a result that matches our schema is successfully returned with the 200 status ' do
VCR.use_cassette('claims/claims') do
VCR.use_cassette('appeals/appeals') do
get '/mobile/v0/claims-and-appeals-overview', headers: iam_headers, params: params
expect(response).to have_http_status(:ok)
# check a couple entries to make sure the data is correct
parsed_response_contents = response.parsed_body['data']
expect(parsed_response_contents.length).to eq(10)
expect(response.parsed_body.dig('meta', 'pagination', 'totalPages')).to eq(15)
open_claim = parsed_response_contents.select { |entry| entry['id'] == '600114693' }[0]
closed_claim = parsed_response_contents.select { |entry| entry['id'] == '600106271' }[0]
open_appeal = parsed_response_contents.select { |entry| entry['id'] == '3294289' }[0]
expect(open_claim.dig('attributes', 'completed')).to eq(false)
expect(closed_claim.dig('attributes', 'completed')).to eq(true)
expect(open_appeal.dig('attributes', 'completed')).to eq(false)
expect(open_claim['type']).to eq('claim')
expect(closed_claim['type']).to eq('claim')
expect(open_appeal['type']).to eq('appeal')
expect(open_claim.dig('attributes', 'updatedAt')).to eq('2017-09-28')
expect(closed_claim.dig('attributes', 'updatedAt')).to eq('2017-09-20')
expect(open_appeal.dig('attributes', 'updatedAt')).to eq('2018-01-16')
expect(open_appeal.dig('attributes', 'displayTitle')).to eq('disability compensation appeal')
expect(response.body).to match_json_schema('claims_and_appeals_overview_response')
end
end
end
it 'and invalid headers return a 401 status' do
VCR.use_cassette('claims/claims') do
VCR.use_cassette('appeals/appeals') do
get '/mobile/v0/claims-and-appeals-overview'
expect(response).to have_http_status(:unauthorized)
expect(response.body).to match_json_schema('evss_errors')
end
end
end
end
describe '#index (all user claims) is polled with additional pagination params' do
let(:params) do
{ useCache: false,
startDate: '2017-05-01T07:00:00.000Z',
page: { number: 2, size: 12 } }
end
it 'and the results are for page 2 of a 12 item pages which only has 10 entries' do
VCR.use_cassette('claims/claims') do
VCR.use_cassette('appeals/appeals') do
get '/mobile/v0/claims-and-appeals-overview', headers: iam_headers, params: params
expect(response).to have_http_status(:ok)
# check a couple entries to make sure the data is correct
parsed_response_contents = response.parsed_body['data']
expect(response.parsed_body.dig('links', 'next')).to be(nil)
expect(parsed_response_contents.length).to eq(10)
expect(response.body).to match_json_schema('claims_and_appeals_overview_response')
end
end
end
end
describe '#index (all user claims) is polled requesting only closed claims' do
let(:params) do
{ useCache: false,
startDate: '2017-05-01T07:00:00.000Z',
showCompleted: true }
end
it 'and the results contain only closed records' do
VCR.use_cassette('claims/claims') do
VCR.use_cassette('appeals/appeals') do
get '/mobile/v0/claims-and-appeals-overview', headers: iam_headers, params: params
expect(response).to have_http_status(:ok)
# check a couple entries to make sure the data is correct
parsed_response_contents = response.parsed_body['data']
parsed_response_contents.each do |entry|
expect(entry.dig('attributes', 'completed')).to eq(true)
end
expect(response.body).to match_json_schema('claims_and_appeals_overview_response')
end
end
end
end
describe '#index (all user claims) is polled requesting only open claims' do
let(:params) do
{ useCache: false,
startDate: '2017-05-01T07:00:00.000Z',
showCompleted: false }
end
it 'and the results contain only open records' do
VCR.use_cassette('claims/claims') do
VCR.use_cassette('appeals/appeals') do
get '/mobile/v0/claims-and-appeals-overview', headers: iam_headers, params: params
expect(response).to have_http_status(:ok)
# check a couple entries to make sure the data is correct
parsed_response_contents = response.parsed_body['data']
parsed_response_contents.each do |entry|
expect(entry.dig('attributes', 'completed')).to eq(false)
end
expect(response.body).to match_json_schema('claims_and_appeals_overview_response')
end
end
end
end
describe '#index is polled' do
let(:params) { { useCache: false } }
it 'and claims service fails, but appeals succeeds' do
VCR.use_cassette('claims/claims_with_errors') do
VCR.use_cassette('appeals/appeals') do
get '/mobile/v0/claims-and-appeals-overview', headers: iam_headers, params: params
parsed_response_contents = response.parsed_body['data']
expect(parsed_response_contents[0]['type']).to eq('appeal')
expect(parsed_response_contents.last['type']).to eq('appeal')
expect(response).to have_http_status(:multi_status)
expect(response.parsed_body.dig('meta', 'errors').length).to eq(1)
expect(response.parsed_body.dig('meta', 'errors')[0]['service']).to eq('claims')
open_appeal = parsed_response_contents.select { |entry| entry['id'] == '3294289' }[0]
closed_appeal = parsed_response_contents.select { |entry| entry['id'] == '2348605' }[0]
expect(open_appeal.dig('attributes', 'completed')).to eq(false)
expect(closed_appeal.dig('attributes', 'completed')).to eq(true)
expect(open_appeal['type']).to eq('appeal')
expect(closed_appeal['type']).to eq('appeal')
expect(open_appeal.dig('attributes', 'displayTitle')).to eq('disability compensation appeal')
expect(closed_appeal.dig('attributes', 'displayTitle')).to eq('appeal')
expect(response.body).to match_json_schema('claims_and_appeals_overview_response')
end
end
end
it 'and appeals service fails, but claims succeeds' do
VCR.use_cassette('claims/claims') do
VCR.use_cassette('appeals/server_error') do
get '/mobile/v0/claims-and-appeals-overview', headers: iam_headers, params: params
expect(response).to have_http_status(:multi_status)
parsed_response_contents = response.parsed_body['data']
expect(parsed_response_contents[0]['type']).to eq('claim')
expect(parsed_response_contents.last['type']).to eq('claim')
expect(response.parsed_body.dig('meta', 'errors').length).to eq(1)
expect(response.parsed_body.dig('meta', 'errors')[0]['service']).to eq('appeals')
open_claim = parsed_response_contents.select { |entry| entry['id'] == '600114693' }[0]
closed_claim = parsed_response_contents.select { |entry| entry['id'] == '600106271' }[0]
expect(open_claim.dig('attributes', 'completed')).to eq(false)
expect(closed_claim.dig('attributes', 'completed')).to eq(true)
expect(open_claim['type']).to eq('claim')
expect(closed_claim['type']).to eq('claim')
expect(response.body).to match_json_schema('claims_and_appeals_overview_response')
end
end
end
it 'both fail in upstream service' do
VCR.use_cassette('claims/claims_with_errors') do
VCR.use_cassette('appeals/server_error') do
get '/mobile/v0/claims-and-appeals-overview', headers: iam_headers, params: params
expect(response).to have_http_status(:bad_gateway)
expect(response.parsed_body.dig('meta', 'errors').length).to eq(2)
expect(response.parsed_body.dig('meta', 'errors')[0]['service']).to eq('claims')
expect(response.parsed_body.dig('meta', 'errors')[1]['service']).to eq('appeals')
expect(response.body).to match_json_schema('claims_and_appeals_overview_response')
end
end
end
end
context 'when an internal error occurs getting claims' do
it 'includes appeals but has error details in the meta object for claims' do
allow_any_instance_of(IAMUser).to receive(:loa).and_raise(NoMethodError)
VCR.use_cassette('claims/claims') do
VCR.use_cassette('appeals/appeals') do
get '/mobile/v0/claims-and-appeals-overview', headers: iam_headers, params: params
expect(response.parsed_body['data'].size).to eq(
5
)
expect(response.parsed_body.dig('meta', 'errors').first).to eq(
{ 'service' => 'claims', 'errorDetails' => 'NoMethodError' }
)
end
end
end
end
context 'when there are cached claims and appeals' do
let(:user) { FactoryBot.build(:iam_user) }
let(:params) { { useCache: true } }
before do
iam_sign_in
path = Rails.root.join('modules', 'mobile', 'spec', 'support', 'fixtures', 'claims_and_appeals.json')
data = Mobile::V0::Adapters::ClaimsOverview.new.parse(JSON.parse(File.read(path)))
Mobile::V0::ClaimOverview.set_cached(user, data)
end
it 'retrieves the cached appointments rather than hitting the service' do
expect_any_instance_of(Mobile::V0::Claims::Proxy).not_to receive(:get_claims_and_appeals)
get '/mobile/v0/claims-and-appeals-overview', headers: iam_headers, params: params
expect(response).to have_http_status(:ok)
parsed_response_contents = response.parsed_body['data']
open_claim = parsed_response_contents.select { |entry| entry['id'] == '600114693' }[0]
expect(open_claim.dig('attributes', 'completed')).to eq(false)
expect(open_claim['type']).to eq('claim')
expect(response.body).to match_json_schema('claims_and_appeals_overview_response')
end
end
end
end
| 48.218487 | 109 | 0.64404 |
7ab55e605ac742b0cbc745509c9841ba96a14b6b | 242 | module InputTypes
class CreateMetricInputType < Types::BaseInputObject
description "Attributes for creating a metric and associating it to a user"
argument :name, String, "Name associated with this metric", required: true
end
end
| 34.571429 | 79 | 0.77686 |
6194b755dec32eff8a36776bc0ae939ab9152c62 | 2,103 | # frozen_string_literal: true
module EE
module Types
module QueryType
extend ActiveSupport::Concern
prepended do
field :iteration, ::Types::IterationType,
null: true,
resolve: -> (_obj, args, _ctx) { ::GitlabSchema.find_by_gid(args[:id]) },
description: 'Find an iteration' do
argument :id, ::Types::GlobalIDType[::Iteration],
required: true,
description: 'Find an iteration by its ID'
end
field :vulnerabilities,
::Types::VulnerabilityType.connection_type,
null: true,
description: "Vulnerabilities reported on projects on the current user's instance security dashboard",
resolver: ::Resolvers::VulnerabilitiesResolver
field :vulnerabilities_count_by_day,
::Types::VulnerabilitiesCountByDayType.connection_type,
null: true,
description: "Number of vulnerabilities per day for the projects on the current user's instance security dashboard",
resolver: ::Resolvers::VulnerabilitiesCountPerDayResolver
field :vulnerabilities_count_by_day_and_severity,
::Types::VulnerabilitiesCountByDayAndSeverityType.connection_type,
null: true,
description: "Number of vulnerabilities per severity level, per day, for the projects on the current user's instance security dashboard",
resolver: ::Resolvers::VulnerabilitiesHistoryResolver,
deprecated: { reason: 'Use `vulnerabilitiesCountByDay`', milestone: '13.3' }
field :geo_node, ::Types::Geo::GeoNodeType,
null: true,
resolver: ::Resolvers::Geo::GeoNodeResolver,
description: 'Find a Geo node'
field :instance_security_dashboard, ::Types::InstanceSecurityDashboardType,
null: true,
resolver: ::Resolvers::InstanceSecurityDashboardResolver,
description: 'Fields related to Instance Security Dashboard'
end
end
end
end
| 42.06 | 151 | 0.636234 |
e9e18acddc7b42e0af97bcffedae6ee34139a8cf | 222 | class ChangeOdsaAttemptsCorrectColumnName < ActiveRecord::Migration
def change
rename_column :odsa_exercise_attempts, :correct, :worth_credit
rename_column :odsa_exercise_attempts, :completed, :correct
end
end
| 31.714286 | 67 | 0.815315 |
d585d895425c037f6925d5378bb506c395733ed7 | 111 | class Admin::AdminController < ApplicationController
layout "admin"
before_filter :authenticate_admin!
end
| 22.2 | 52 | 0.81982 |
4aecdb2fd026ced1e43ec510698246f1e4a5952c | 1,783 | # == Schema Information
#
# Table name: litigations
#
# id :bigint not null, primary key
# title :string not null
# slug :string not null
# citation_reference_number :string
# document_type :string
# geography_id :bigint
# summary :text
# at_issue :text
# created_at :datetime not null
# updated_at :datetime not null
# visibility_status :string default("draft")
# created_by_id :bigint
# updated_by_id :bigint
# discarded_at :datetime
# jurisdiction :string
# tsv :tsvector
#
FactoryBot.define do
factory :litigation do
draft
geography { association :geography, created_by: created_by }
jurisdiction { 'Court in Country' }
# laws_sectors { |a| [a.association(:laws_sector)] }
association :created_by, factory: :admin_user
updated_by { created_by }
sequence(:title) { |n| 'Litigation title -' + ('AAAA'..'ZZZZ').to_a[n] }
document_type { 'administrative_case' }
citation_reference_number { 'SFKD777FDK77' }
summary { 'Summary Lorem ipsum dolor dalej nie pamietam' }
at_issue { 'At issue Lorem ipsumumum' }
trait :with_sides do
after(:create) do |l|
create :litigation_side, litigation: l, side_type: 'a'
create :litigation_side, :company, litigation: l, side_type: 'a'
create :litigation_side, :geography, litigation: l, side_type: 'b'
end
end
trait :with_events do
after(:create) do |l|
create_list :litigation_event, 2, eventable: l
end
end
end
end
| 32.418182 | 76 | 0.570387 |
5d7ee13ae98ba72244198969c1545f96cb207a75 | 27,690 | require 'spec_helper'
describe Mongo::URI do
describe '.get' do
let(:uri) { described_class.get(string) }
context 'when the scheme is mongodb://' do
let(:string) do
'mongodb://localhost:27017'
end
it 'returns a Mongo::URI object' do
expect(uri).to be_a(Mongo::URI)
end
end
context 'when the scheme is mongodb+srv://', if: test_connecting_externally? do
let(:string) do
'mongodb+srv://test5.test.build.10gen.cc'
end
it 'returns a Mongo::URI::SRVProtocol object' do
expect(uri).to be_a(Mongo::URI::SRVProtocol)
end
end
context 'when the scheme is invalid' do
let(:string) do
'mongo://localhost:27017'
end
it 'raises an exception' do
expect { uri }.to raise_error(Mongo::Error::InvalidURI)
end
end
end
let(:scheme) { 'mongodb://' }
let(:uri) { described_class.new(string) }
describe 'invalid uris' do
context 'string is not uri' do
let(:string) { 'tyler' }
it 'raises an error' do
expect { uri }.to raise_error(Mongo::Error::InvalidURI)
end
end
context 'empty string' do
let(:string) { '' }
it 'raises an error' do
expect { uri }.to raise_error(Mongo::Error::InvalidURI)
end
end
context 'mongo://localhost:27017' do
let(:string) { 'mongo://localhost:27017' }
it 'raises an error' do
expect { uri }.to raise_error(Mongo::Error::InvalidURI)
end
end
context 'mongodb://' do
let(:string) { 'mongodb://' }
it 'raises an error' do
expect { uri }.to raise_error(Mongo::Error::InvalidURI)
end
end
context 'mongodb://localhost::27017' do
let(:string) { 'mongodb://localhost::27017' }
it 'raises an error' do
expect { uri }.to raise_error(Mongo::Error::InvalidURI)
end
end
context 'mongodb://localhost::27017/' do
let(:string) { 'mongodb://localhost::27017/' }
it 'raises an error' do
expect { uri }.to raise_error(Mongo::Error::InvalidURI)
end
end
context 'mongodb://::' do
let(:string) { 'mongodb://::' }
it 'raises an error' do
expect { uri }.to raise_error(Mongo::Error::InvalidURI)
end
end
context 'mongodb://localhost,localhost::' do
let(:string) { 'mongodb://localhost,localhost::' }
it 'raises an error' do
expect { uri }.to raise_error(Mongo::Error::InvalidURI)
end
end
context 'mongodb://localhost::27017,abc' do
let(:string) { 'mongodb://localhost::27017,abc' }
it 'raises an error' do
expect { uri }.to raise_error(Mongo::Error::InvalidURI)
end
end
context 'mongodb://localhost:-1' do
let(:string) { 'mongodb://localhost:-1' }
it 'raises an error' do
expect { uri }.to raise_error(Mongo::Error::InvalidURI)
end
end
context 'mongodb://localhost:0/' do
let(:string) { 'mongodb://localhost:0/' }
it 'raises an error' do
expect { uri }.to raise_error(Mongo::Error::InvalidURI)
end
end
context 'mongodb://localhost:65536' do
let(:string) { 'mongodb://localhost:65536' }
it 'raises an error' do
expect { uri }.to raise_error(Mongo::Error::InvalidURI)
end
end
context 'mongodb://localhost:foo' do
let(:string) { 'mongodb://localhost:foo' }
it 'raises an error' do
expect { uri }.to raise_error(Mongo::Error::InvalidURI)
end
end
context 'mongodb://[::1]:-1' do
let(:string) { 'mongodb://[::1]:-1' }
it 'raises an error' do
expect { uri }.to raise_error(Mongo::Error::InvalidURI)
end
end
context 'mongodb://[::1]:0/' do
let(:string) { 'mongodb://[::1]:0/' }
it 'raises an error' do
expect { uri }.to raise_error(Mongo::Error::InvalidURI)
end
end
context 'mongodb://[::1]:65536' do
let(:string) { 'mongodb://[::1]:65536' }
it 'raises an error' do
expect { uri }.to raise_error(Mongo::Error::InvalidURI)
end
end
context 'mongodb://[::1]:65536/' do
let(:string) { 'mongodb://[::1]:65536/' }
it 'raises an error' do
expect { uri }.to raise_error(Mongo::Error::InvalidURI)
end
end
context 'mongodb://[::1]:foo' do
let(:string) { 'mongodb://[::1]:foo' }
it 'raises an error' do
expect { uri }.to raise_error(Mongo::Error::InvalidURI)
end
end
context 'mongodb://example.com?w=1' do
let(:string) { 'mongodb://example.com?w=1' }
it 'raises an error' do
expect { uri }.to raise_error(Mongo::Error::InvalidURI)
end
end
context 'mongodb://example.com/?w' do
let(:string) { 'mongodb://example.com/?w' }
it 'raises an error' do
expect { uri }.to raise_error(Mongo::Error::InvalidURI)
end
end
context 'mongodb://alice:foo:[email protected]' do
let(:string) { 'mongodb://alice:foo:[email protected]' }
it 'raises an error' do
expect { uri }.to raise_error(Mongo::Error::InvalidURI)
end
end
context 'mongodb://alice@@127.0.0.1' do
let(:string) { 'mongodb://alice@@127.0.0.1' }
it 'raises an error' do
expect { uri }.to raise_error(Mongo::Error::InvalidURI)
end
end
context 'mongodb://alice@foo:[email protected]' do
let(:string) { 'mongodb://alice@foo:[email protected]' }
it 'raises an error' do
expect { uri }.to raise_error(Mongo::Error::InvalidURI)
end
end
end
describe '#initialize' do
context 'string is not uri' do
let(:string) { 'tyler' }
it 'raises an error' do
expect { uri }.to raise_error(Mongo::Error::InvalidURI)
end
end
end
describe '#servers' do
let(:string) { "#{scheme}#{servers}" }
context 'single server' do
let(:servers) { 'localhost' }
it 'returns an array with the parsed server' do
expect(uri.servers).to eq([servers])
end
end
context 'single server with port' do
let(:servers) { 'localhost:27017' }
it 'returns an array with the parsed server' do
expect(uri.servers).to eq([servers])
end
end
context 'numerical ipv4 server' do
let(:servers) { '127.0.0.1' }
it 'returns an array with the parsed server' do
expect(uri.servers).to eq([servers])
end
end
context 'numerical ipv6 server' do
let(:servers) { '[::1]:27107' }
it 'returns an array with the parsed server' do
expect(uri.servers).to eq([servers])
end
end
context 'unix socket server' do
let(:servers) { '%2Ftmp%2Fmongodb-27017.sock' }
it 'returns an array with the parsed server' do
expect(uri.servers).to eq([URI.unescape(servers)])
end
end
context 'multiple servers' do
let(:servers) { 'localhost,127.0.0.1' }
it 'returns an array with the parsed servers' do
expect(uri.servers).to eq(servers.split(','))
end
end
context 'multiple servers with ports' do
let(:servers) { '127.0.0.1:27107,localhost:27018' }
it 'returns an array with the parsed servers' do
expect(uri.servers).to eq(servers.split(','))
end
end
end
describe '#client_options' do
let(:db) { TEST_DB }
let(:servers) { 'localhost' }
let(:string) { "#{scheme}#{credentials}@#{servers}/#{db}" }
let(:user) { 'tyler' }
let(:password) { 's3kr4t' }
let(:credentials) { "#{user}:#{password}" }
let(:options) do
uri.client_options
end
it 'includes the database in the options' do
expect(options[:database]).to eq(TEST_DB)
end
it 'includes the user in the options' do
expect(options[:user]).to eq(user)
end
it 'includes the password in the options' do
expect(options[:password]).to eq(password)
end
end
describe '#credentials' do
let(:servers) { 'localhost' }
let(:string) { "#{scheme}#{credentials}@#{servers}" }
let(:user) { 'tyler' }
context 'username provided' do
let(:credentials) { "#{user}:" }
it 'returns the username' do
expect(uri.credentials[:user]).to eq(user)
end
end
context 'username and password provided' do
let(:password) { 's3kr4t' }
let(:credentials) { "#{user}:#{password}" }
it 'returns the username' do
expect(uri.credentials[:user]).to eq(user)
end
it 'returns the password' do
expect(uri.credentials[:password]).to eq(password)
end
end
end
describe '#database' do
let(:servers) { 'localhost' }
let(:string) { "#{scheme}#{servers}/#{db}" }
let(:db) { 'auth-db' }
context 'database provided' do
it 'returns the database name' do
expect(uri.database).to eq(db)
end
end
end
describe '#uri_options' do
let(:servers) { 'localhost' }
let(:string) { "#{scheme}#{servers}/?#{options}" }
context 'when no options were provided' do
let(:string) { "#{scheme}#{servers}" }
it 'returns an empty hash' do
expect(uri.uri_options).to be_empty
end
end
context 'write concern options provided' do
context 'numerical w value' do
let(:options) { 'w=1' }
let(:concern) { Mongo::Options::Redacted.new(:w => 1)}
it 'sets the write concern options' do
expect(uri.uri_options[:write]).to eq(concern)
end
it 'sets the options on a client created with the uri' do
expect(Mongo::Client.new(string).options[:write]).to eq(concern)
end
end
context 'w=majority' do
let(:options) { 'w=majority' }
let(:concern) { Mongo::Options::Redacted.new(:w => :majority) }
it 'sets the write concern options' do
expect(uri.uri_options[:write]).to eq(concern)
end
it 'sets the options on a client created with the uri' do
expect(Mongo::Client.new(string).options[:write]).to eq(concern)
end
end
context 'journal' do
let(:options) { 'journal=true' }
let(:concern) { Mongo::Options::Redacted.new(:j => true) }
it 'sets the write concern options' do
expect(uri.uri_options[:write]).to eq(concern)
end
it 'sets the options on a client created with the uri' do
expect(Mongo::Client.new(string).options[:write]).to eq(concern)
end
end
context 'fsync' do
let(:options) { 'fsync=true' }
let(:concern) { Mongo::Options::Redacted.new(:fsync => true) }
it 'sets the write concern options' do
expect(uri.uri_options[:write]).to eq(concern)
end
it 'sets the options on a client created with the uri' do
expect(Mongo::Client.new(string).options[:write]).to eq(concern)
end
end
context 'wtimeoutMS' do
let(:timeout) { 1234 }
let(:options) { "w=2&wtimeoutMS=#{timeout}" }
let(:concern) { Mongo::Options::Redacted.new(:w => 2, :timeout => timeout) }
it 'sets the write concern options' do
expect(uri.uri_options[:write]).to eq(concern)
end
it 'sets the options on a client created with the uri' do
expect(Mongo::Client.new(string).options[:write]).to eq(concern)
end
end
end
context 'read preference option provided' do
let(:options) { "readPreference=#{mode}" }
context 'primary' do
let(:mode) { 'primary' }
let(:read) { Mongo::Options::Redacted.new(:mode => :primary) }
it 'sets the read preference' do
expect(uri.uri_options[:read]).to eq(read)
end
it 'sets the options on a client created with the uri' do
expect(Mongo::Client.new(string).options[:read]).to eq(read)
end
end
context 'primaryPreferred' do
let(:mode) { 'primaryPreferred' }
let(:read) { Mongo::Options::Redacted.new(:mode => :primary_preferred) }
it 'sets the read preference' do
expect(uri.uri_options[:read]).to eq(read)
end
it 'sets the options on a client created with the uri' do
expect(Mongo::Client.new(string).options[:read]).to eq(read)
end
end
context 'secondary' do
let(:mode) { 'secondary' }
let(:read) { Mongo::Options::Redacted.new(:mode => :secondary) }
it 'sets the read preference' do
expect(uri.uri_options[:read]).to eq(read)
end
it 'sets the options on a client created with the uri' do
expect(Mongo::Client.new(string).options[:read]).to eq(read)
end
end
context 'secondaryPreferred' do
let(:mode) { 'secondaryPreferred' }
let(:read) { Mongo::Options::Redacted.new(:mode => :secondary_preferred) }
it 'sets the read preference' do
expect(uri.uri_options[:read]).to eq(read)
end
it 'sets the options on a client created with the uri' do
expect(Mongo::Client.new(string).options[:read]).to eq(read)
end
end
context 'nearest' do
let(:mode) { 'nearest' }
let(:read) { Mongo::Options::Redacted.new(:mode => :nearest) }
it 'sets the read preference' do
expect(uri.uri_options[:read]).to eq(read)
end
it 'sets the options on a client created with the uri' do
expect(Mongo::Client.new(string).options[:read]).to eq(read)
end
end
end
context 'read preference tags provided' do
context 'single read preference tag set' do
let(:options) do
'readPreferenceTags=dc:ny,rack:1'
end
let(:read) do
Mongo::Options::Redacted.new(:tag_sets => [{ 'dc' => 'ny', 'rack' => '1' }])
end
it 'sets the read preference tag set' do
expect(uri.uri_options[:read]).to eq(read)
end
it 'sets the options on a client created with the uri' do
expect(Mongo::Client.new(string).options[:read]).to eq(read)
end
end
context 'multiple read preference tag sets' do
let(:options) do
'readPreferenceTags=dc:ny&readPreferenceTags=dc:bos'
end
let(:read) do
Mongo::Options::Redacted.new(:tag_sets => [{ 'dc' => 'ny' }, { 'dc' => 'bos' }])
end
it 'sets the read preference tag sets' do
expect(uri.uri_options[:read]).to eq(read)
end
it 'sets the options on a client created with the uri' do
expect(Mongo::Client.new(string).options[:read]).to eq(read)
end
end
end
context 'read preference max staleness option provided' do
let(:options) do
'readPreference=Secondary&maxStalenessSeconds=120'
end
let(:read) do
Mongo::Options::Redacted.new(mode: :secondary, :max_staleness => 120)
end
it 'sets the read preference max staleness in seconds' do
expect(uri.uri_options[:read]).to eq(read)
end
it 'sets the options on a client created with the uri' do
expect(Mongo::Client.new(string).options[:read]).to eq(read)
end
context 'when the read preference and max staleness combination is invalid' do
context 'when max staleness is combined with read preference mode primary' do
let(:options) do
'readPreference=primary&maxStalenessSeconds=120'
end
it 'raises an exception when read preference is accessed on the client' do
expect {
Mongo::Client.new(string).server_selector
}.to raise_exception(Mongo::Error::InvalidServerPreference)
end
end
context 'when the max staleness value is too small' do
let(:options) do
'readPreference=secondary&maxStalenessSeconds=89'
end
it 'does not raise an exception until the read preference is used' do
expect(Mongo::Client.new(string).read_preference).to eq(BSON::Document.new(mode: :secondary, max_staleness: 89))
end
end
end
end
context 'replica set option provided' do
let(:rs_name) { TEST_SET }
let(:options) { "replicaSet=#{rs_name}" }
it 'sets the replica set option' do
expect(uri.uri_options[:replica_set]).to eq(rs_name)
end
it 'sets the options on a client created with the uri' do
expect(Mongo::Client.new(string).options[:replica_set]).to eq(rs_name)
end
end
context 'auth mechanism provided' do
let(:options) { "authMechanism=#{mechanism}" }
context 'plain' do
let(:mechanism) { 'PLAIN' }
let(:expected) { :plain }
it 'sets the auth mechanism to :plain' do
expect(uri.uri_options[:auth_mech]).to eq(expected)
end
it 'sets the options on a client created with the uri' do
expect(Mongo::Client.new(string).options[:auth_mech]).to eq(expected)
end
it 'is case-insensitive' do
expect(Mongo::Client.new(string.downcase).options[:auth_mech]).to eq(expected)
end
end
context 'mongodb-cr' do
let(:mechanism) { 'MONGODB-CR' }
let(:expected) { :mongodb_cr }
it 'sets the auth mechanism to :mongodb_cr' do
expect(uri.uri_options[:auth_mech]).to eq(expected)
end
it 'sets the options on a client created with the uri' do
expect(Mongo::Client.new(string).options[:auth_mech]).to eq(expected)
end
it 'is case-insensitive' do
expect(Mongo::Client.new(string.downcase).options[:auth_mech]).to eq(expected)
end
end
context 'gssapi' do
let(:mechanism) { 'GSSAPI' }
let(:expected) { :gssapi }
it 'sets the auth mechanism to :gssapi' do
expect(uri.uri_options[:auth_mech]).to eq(expected)
end
it 'sets the options on a client created with the uri' do
expect(Mongo::Client.new(string).options[:auth_mech]).to eq(expected)
end
it 'is case-insensitive' do
expect(Mongo::Client.new(string.downcase).options[:auth_mech]).to eq(expected)
end
end
context 'scram-sha-1' do
let(:mechanism) { 'SCRAM-SHA-1' }
let(:expected) { :scram }
it 'sets the auth mechanism to :scram' do
expect(uri.uri_options[:auth_mech]).to eq(expected)
end
it 'sets the options on a client created with the uri' do
expect(Mongo::Client.new(string).options[:auth_mech]).to eq(expected)
end
it 'is case-insensitive' do
expect(Mongo::Client.new(string.downcase).options[:auth_mech]).to eq(expected)
end
end
context 'mongodb-x509' do
let(:mechanism) { 'MONGODB-X509' }
let(:expected) { :mongodb_x509 }
it 'sets the auth mechanism to :mongodb_x509' do
expect(uri.uri_options[:auth_mech]).to eq(expected)
end
it 'sets the options on a client created with the uri' do
expect(Mongo::Client.new(string).options[:auth_mech]).to eq(expected)
end
it 'is case-insensitive' do
expect(Mongo::Client.new(string.downcase).options[:auth_mech]).to eq(expected)
end
context 'when a username is not provided' do
it 'recognizes the mechanism with no username' do
expect(Mongo::Client.new(string.downcase).options[:auth_mech]).to eq(expected)
expect(Mongo::Client.new(string.downcase).options[:user]).to be_nil
end
end
end
end
context 'auth source provided' do
let(:options) { "authSource=#{source}" }
context 'regular db' do
let(:source) { 'foo' }
it 'sets the auth source to the database' do
expect(uri.uri_options[:auth_source]).to eq(source)
end
it 'sets the options on a client created with the uri' do
expect(Mongo::Client.new(string).options[:auth_source]).to eq(source)
end
end
context '$external' do
let(:source) { '$external' }
let(:expected) { :external }
it 'sets the auth source to :external' do
expect(uri.uri_options[:auth_source]).to eq(expected)
end
it 'sets the options on a client created with the uri' do
expect(Mongo::Client.new(string).options[:auth_source]).to eq(expected)
end
end
end
context 'auth mechanism properties provided' do
context 'service_name' do
let(:options) do
"authMechanismProperties=SERVICE_NAME:#{service_name}"
end
let(:service_name) { 'foo' }
let(:expected) { Mongo::Options::Redacted.new({ service_name: service_name }) }
it 'sets the auth mechanism properties' do
expect(uri.uri_options[:auth_mech_properties]).to eq(expected)
end
it 'sets the options on a client created with the uri' do
expect(Mongo::Client.new(string).options[:auth_mech_properties]).to eq(expected)
end
end
context 'canonicalize_host_name' do
let(:options) do
"authMechanismProperties=CANONICALIZE_HOST_NAME:#{canonicalize_host_name}"
end
let(:canonicalize_host_name) { 'true' }
let(:expected) { Mongo::Options::Redacted.new({ canonicalize_host_name: true }) }
it 'sets the auth mechanism properties' do
expect(uri.uri_options[:auth_mech_properties]).to eq(expected)
end
it 'sets the options on a client created with the uri' do
expect(Mongo::Client.new(string).options[:auth_mech_properties]).to eq(expected)
end
end
context 'service_realm' do
let(:options) do
"authMechanismProperties=SERVICE_REALM:#{service_realm}"
end
let(:service_realm) { 'dumdum' }
let(:expected) { Mongo::Options::Redacted.new({ service_realm: service_realm }) }
it 'sets the auth mechanism properties' do
expect(uri.uri_options[:auth_mech_properties]).to eq(expected)
end
it 'sets the options on a client created with the uri' do
expect(Mongo::Client.new(string).options[:auth_mech_properties]).to eq(expected)
end
end
context 'multiple properties' do
let(:options) do
"authMechanismProperties=SERVICE_REALM:#{service_realm}," +
"CANONICALIZE_HOST_NAME:#{canonicalize_host_name}," +
"SERVICE_NAME:#{service_name}"
end
let(:service_name) { 'foo' }
let(:canonicalize_host_name) { 'true' }
let(:service_realm) { 'dumdum' }
let(:expected) do
Mongo::Options::Redacted.new({ service_name: service_name,
canonicalize_host_name: true,
service_realm: service_realm })
end
it 'sets the auth mechanism properties' do
expect(uri.uri_options[:auth_mech_properties]).to eq(expected)
end
it 'sets the options on a client created with the uri' do
expect(Mongo::Client.new(string).options[:auth_mech_properties]).to eq(expected)
end
end
end
context 'connectTimeoutMS' do
let(:options) { "connectTimeoutMS=4567" }
it 'sets the the connect timeout' do
expect(uri.uri_options[:connect_timeout]).to eq(4.567)
end
end
context 'socketTimeoutMS' do
let(:options) { "socketTimeoutMS=8910" }
it 'sets the socket timeout' do
expect(uri.uri_options[:socket_timeout]).to eq(8.910)
end
end
context 'when providing serverSelectionTimeoutMS' do
let(:options) { "serverSelectionTimeoutMS=3561" }
it 'sets the the connect timeout' do
expect(uri.uri_options[:server_selection_timeout]).to eq(3.561)
end
end
context 'when providing localThresholdMS' do
let(:options) { "localThresholdMS=3561" }
it 'sets the the connect timeout' do
expect(uri.uri_options[:local_threshold]).to eq(3.561)
end
end
context 'when providing maxPoolSize' do
let(:max_pool_size) { 10 }
let(:options) { "maxPoolSize=#{max_pool_size}" }
it 'sets the max pool size option' do
expect(uri.uri_options[:max_pool_size]).to eq(max_pool_size)
end
end
context 'when providing minPoolSize' do
let(:min_pool_size) { 5 }
let(:options) { "minPoolSize=#{min_pool_size}" }
it 'sets the min pool size option' do
expect(uri.uri_options[:min_pool_size]).to eq(min_pool_size)
end
end
context 'when providing waitQueueTimeoutMS' do
let(:wait_queue_timeout) { 500 }
let(:options) { "waitQueueTimeoutMS=#{wait_queue_timeout}" }
it 'sets the wait queue timeout option' do
expect(uri.uri_options[:wait_queue_timeout]).to eq(0.5)
end
end
context 'ssl' do
let(:options) { "ssl=#{ssl}" }
context 'true' do
let(:ssl) { true }
it 'sets the ssl option to true' do
expect(uri.uri_options[:ssl]).to be true
end
end
context 'false' do
let(:ssl) { false }
it 'sets the ssl option to false' do
expect(uri.uri_options[:ssl]).to be false
end
end
end
context 'grouped and non-grouped options provided' do
let(:options) { 'w=1&ssl=true' }
it 'do not overshadow top level options' do
expect(uri.uri_options).not_to be_empty
end
end
context 'when an invalid option is provided' do
let(:options) { 'invalidOption=10' }
let(:uri_options) do
uri.uri_options
end
it 'does not raise an exception' do
expect(uri_options).to be_empty
end
context 'when an invalid option is combined with valid options' do
let(:options) { 'invalidOption=10&waitQueueTimeoutMS=500&ssl=true' }
it 'does not raise an exception' do
expect(uri_options).not_to be_empty
end
it 'sets the valid options' do
expect(uri_options[:wait_queue_timeout]).to eq(0.5)
expect(uri_options[:ssl]).to be true
end
end
end
context 'when an app name option is provided' do
let(:options) { "appname=uri_test" }
it 'sets the app name on the client' do
expect(Mongo::Client.new(string).options[:app_name]).to eq(:uri_test)
end
end
context 'when a supported compressors option is provided' do
let(:options) { "compressors=zlib" }
it 'sets the compressors as an array on the client' do
expect(Mongo::Client.new(string).options[:compressors]).to eq(['zlib'])
end
end
context 'when a non-supported compressors option is provided' do
let(:options) { "compressors=snoopy" }
let(:client) do
Mongo::Client.new(string)
end
it 'sets no compressors on the client and warns' do
expect(Mongo::Logger.logger).to receive(:warn)
expect(client.options[:compressors]).to be_nil
end
end
context 'when a zlibCompressionLevel option is provided' do
let(:options) { "zlibCompressionLevel=6" }
it 'sets the zlib compression level on the client' do
expect(Mongo::Client.new(string).options[:zlib_compression_level]).to eq(6)
end
end
end
end
| 27.634731 | 124 | 0.596714 |
911bff78b96753a37d2079c3bb7b361cfbcff6e9 | 709 | require 'rails_helper'
RSpec.describe "affiliates/index", :type => :view do
before(:each) do
assign(:affiliates, [
Affiliate.create!(
:name => "Name",
:adv => "MyText",
:display_count => 1,
:view_count => 2
),
Affiliate.create!(
:name => "Name",
:adv => "MyText",
:display_count => 1,
:view_count => 2
)
])
end
it "renders a list of affiliates" do
render
assert_select "tr>td", :text => "Name".to_s, :count => 2
assert_select "tr>td", :text => "MyText".to_s, :count => 2
assert_select "tr>td", :text => 1.to_s, :count => 2
assert_select "tr>td", :text => 2.to_s, :count => 2
end
end
| 24.448276 | 62 | 0.533145 |
9122e121ffe2fbb2b400afe4aa7b47f93e291db2 | 2,858 | module Shoulda # :nodoc:
module Matchers
module ActiveModel # :nodoc:
# Ensures that the attribute can be set on mass update.
#
# it { should_not allow_mass_assignment_of(:password) }
# it { should allow_mass_assignment_of(:first_name) }
#
# In Rails 3.1 you can check role as well:
#
# it { should allow_mass_assignment_of(:first_name).as(:admin) }
#
def allow_mass_assignment_of(value)
AllowMassAssignmentOfMatcher.new(value)
end
class AllowMassAssignmentOfMatcher # :nodoc:
attr_reader :failure_message, :negative_failure_message
def initialize(attribute)
@attribute = attribute.to_s
@options = {}
end
def as(role)
if active_model_less_than_3_1?
raise "You can specify role only in Rails 3.1 or greater"
end
@options[:role] = role
self
end
def matches?(subject)
@subject = subject
if attr_mass_assignable?
if whitelisting?
@negative_failure_message = "#{@attribute} was made accessible"
else
if protected_attributes.empty?
@negative_failure_message = "no attributes were protected"
else
@negative_failure_message = "#{class_name} is protecting " <<
"#{protected_attributes.to_a.to_sentence}, " <<
"but not #{@attribute}."
end
end
true
else
if whitelisting?
@failure_message = "Expected #{@attribute} to be accessible"
else
@failure_message = "Did not expect #{@attribute} to be protected"
end
false
end
end
def description
"allow mass assignment of #{@attribute}"
end
private
def role
@options[:role] || :default
end
def protected_attributes
@protected_attributes ||= (@subject.class.protected_attributes || [])
end
def accessible_attributes
@accessible_attributes ||= (@subject.class.accessible_attributes || [])
end
def whitelisting?
authorizer.kind_of?(::ActiveModel::MassAssignmentSecurity::WhiteList)
end
def attr_mass_assignable?
!authorizer.deny?(@attribute)
end
def authorizer
if active_model_less_than_3_1?
@subject.class.active_authorizer
else
@subject.class.active_authorizer[role]
end
end
def class_name
@subject.class.name
end
def active_model_less_than_3_1?
::ActiveModel::VERSION::STRING.to_f < 3.1
end
end
end
end
end
| 27.480769 | 81 | 0.561582 |
1c47d0efc2c1ff71ff7a5d7e080753dbff29625e | 527 | module Keymaker
class BatchGetNodesRequest < BatchRequest
attr_accessor :node_ids
def initialize(service, node_ids)
self.config = service.config
self.node_ids = node_ids
self.service = service
self.opts = build_job_descriptions_collection
end
def build_job_descriptions_collection
[].tap do |batch_jobs|
node_ids.each_with_index do |node_id, job_id|
batch_jobs << {id: job_id, to: node_uri(node_id), method: "GET"}
end
end
end
end
end
| 21.08 | 74 | 0.669829 |
28a5f9ca06a4a45e9550469c013c9c2c2d02ce3f | 5,734 | #!/usr/bin/perl
###########################################################
# Introducing non-breaking spaces into poetic lines
# for better EPUB formatting.
# An example script.
# NB! It is a sample for the simple cases and it does not
# handle poetry with complex predefined formatting.
#
# Copyright (c) 2011 Anton Bryl
#
##########################################################
use strict;
use warnings;
########################################
# The lists of words in some categories
# may be incomplete.
my $THRESHOLD_LENGTH = 25;
my $determiners = { "a" => 1, "an" => 1, "the" => 1, "this" => 1,
"these" => 1, "those" => 1, "that" => 1 };
my $conjunctions = { "and" => 1, "or" => 1, "but" => 1, "either" => 1,
"neither" => 1, "nor" => 1, "whether" => 1,
"if" => 1, "then" => 1, "else" => 1 };
my $interrogative = { "why" => 1, "who" => 1, "what" => 1, "how" => 1,
"where" => 1, "when" => 1, "whom" => 1,
"whose" => 1};
# the word "not" is not good for processing in this way
my $negation = { "no" => 1 };
my $prepositions = { "at" => 1, "to" => 1, "from" => 1, "in" => 1,
"of" => 1, "through" => 1, "about" => 1, "on" => 1,
"with" => 1, "without" => 1, "until" => 1,
"after" => 1, "before" => 1, "beneath" => 1};
#######################################
if ($#ARGV < 0)
{
print "Usage: perl poemnobr.pl INPUTFULE >OUTPUTFILE\n";
exit;
}
open IN, "$ARGV[0]";
# global array which at each moment hold
# the info on the currently processed line
my @array; # tokens
my @types; # token types
while (<IN>)
{
chomp;
if ($_ =~ /^\s*$/)
{
print "$_\n";
next;
}
my @chararray = split(//,$_);
@array = ();
@types = ();
########################################
# Convert string into an array of tokens
# and an array of token types
#####
my $currentCharType = -1;
my $curStr = "";
for (my $i = 0; $i <= $#chararray; ++$i)
{
my $newCharType = -1;
if ($chararray[$i] =~ /[\s\xa0]/) { $newCharType = 0; } # space
elsif ($chararray[$i] =~ /[,\.\?\-!;:–—\(\)]/) {
$newCharType = 1; # punctuation
}
else { $newCharType = 2; } # a part of a word
if ($newCharType == $currentCharType) { $curStr .= $chararray[$i]; }
else
{
if ($currentCharType != -1) {
push @array, $curStr; push @types, $currentCharType;
}
$curStr = $chararray[$i];
$currentCharType = $newCharType;
}
}
if ($currentCharType != 0) # no spaces in the end
{
push @array, $curStr;
push @types, $currentCharType;
}
############################################
# all initial spaces are non-breaking
## I don’t think I need this bit. Some non-breaking spaces such as names are better determined manually so I don’t want this script to strip them out
if ($types[0] == 0)
{
$array[0] =~ s/\s/\xa0/g;
}
# all non-initial spaces are single;
# no pre-inforced non-breaking spaces
for (my $i = 1; $i <= $#array; ++$i)
{
if ($types[$i] == 0)
{
$array[$i] = " ";
}
}
#####################################################
#####################################################
# Apply rules
#####################################################
# apply rule 1 : no breaks before dashes.
# starting search for dashes-following-spaces from the item [2]
# in the list (initial spaces, if any, are non-breaking already).
for (my $i = 2; $i <= $#array; ++$i)
{
if ($types[$i - 1] != 0) { next; }
if ($array[$i] !~ /^[-–—]+$/) { next; }
if (eligible($i - 1)) { $array[$i - 1] = "\xa0"};
}
#####################################################
# apply rule 2 : join together the portion after the last
# punctuation-followed-by-a-space.
# start searching backward from the last-but-two token
for (my $i = $#array - 1; $i > 0; --$i)
{
if (($types[$i] != 1) || ($types[$i + 1] != 0)) { next; }
$i += 2; # the position of the first non-space token after the punctuation
# now calculate the length of the portion to be concatenated
my $overallLength = 0;
for (my $j = $i; $j <= $#array; ++$j) {
$overallLength += length ($array[$j]);
}
unless ($overallLength > $THRESHOLD_LENGTH)
{
for (my $j = $i; $j <= $#array; ++$j) {
if ($types[$j] == 0) { $array[$j] = "\xa0"; }
}
}
last;
}
#####################################################
# apply rules 3-7 : no breaks after [word-category],
# unless before a punctuation mark.
applyWordSet($determiners);
applyWordSet($negation);
applyWordSet($conjunctions);
applyWordSet($interrogative);
applyWordSet($prepositions);
###########################################
###########################################
# Output
for (my $i = 0; $i <= $#array; ++$i)
{
$array[$i] =~ s/\xa0/ /g;
print $array[$i];
}
print "\n";
}
close IN;
##############################################
# checks that a new non-breaking space at the
# given position will not result in a too long
# non-breakable sequence
sub eligible
{
my $pos = shift;
my $p = $pos;
my $lengthbefore = 0;
while ($p > 0)
{
--$p;
if ($array[$p] eq " ") { last; }
$lengthbefore += length($array[$p]);
}
$p = $pos;
my $lengthafter = 0;
while ($p < $#array)
{
++$p;
if ($array[$p] eq " ") { last; }
$lengthafter += length($array[$p]);
}
return ($lengthbefore + $lengthafter + 1) <= $THRESHOLD_LENGTH;
}
#########################
sub applyWordSet
{
my $wordset = shift;
for (my $i = 0; $i <= $#array - 2; ++$i)
{
if ($array[$i + 1] != " ") { next; }
if ($types[$i + 2] == 1) { next; }
if (!exists($wordset->{lc($array[$i])})) { next; }
if (eligible($i + 1)) { $array[$i + 1] = "\xa0"; }
}
} | 26.302752 | 150 | 0.475061 |
0165c305ea34b4dd071d43e6c669fab3db377e57 | 2,141 | require 'helper'
require 'date'
describe Bluecap::Cohort do
before do
Bluecap.redis.flushall
@date = Date.parse('20120701')
@initial_event = 'Sign Up'
@attributes = {:country => 'Australia', :age => 31}
@users = {
:evelyn => Bluecap::Identify.new('Evelyn').handle,
:charlotte => Bluecap::Identify.new('Charlotte').handle
}
@users.values.each do |id|
event = Bluecap::Event.new :id => id,
:name => @initial_event,
:timestamp => @date.to_time.to_i
event.handle
attribute = Bluecap::Attributes.new :id => id,
:attributes => @attributes
attribute.handle
end
end
it 'should find cohort total for date' do
cohort = Bluecap::Cohort.new :initial_event => @initial_event,
:date => @date,
:report_id => 1
cohort.total.should == 2
end
it 'should not include in cohort if initial event did not occur on date' do
sarah = Bluecap::Identify.new('Sarah').handle
event = Bluecap::Event.new :id => sarah,
:name => @initial_event,
:timestamp => (@date + 1).to_time.to_i
event.handle
cohort = Bluecap::Cohort.new :initial_event => @initial_event,
:date => @date,
:report_id => 1
cohort.total.should == 2
end
it 'should allow cohorts to be constructed with attributes of users' do
cohort = Bluecap::Cohort.new :initial_event => @initial_event,
:date => @date,
:attributes => @attributes,
:report_id => 1
cohort.total.should == 2
end
it 'shold not include in cohort if attributes are not matched' do
sarah = Bluecap::Identify.new('Sarah').handle
event = Bluecap::Event.new :id => sarah,
:name => @initial_event,
:timestamp => @date.to_time.to_i
event.handle
attributes = @attributes.clone
attributes[:country] = 'New Zealand'
attribute = Bluecap::Attributes.new :id => sarah,
:attributes => attributes
attribute.handle
cohort = Bluecap::Cohort.new :initial_event => @initial_event,
:date => @date,
:attributes => @attributes,
:report_id => 1
cohort.total.should == 2
end
end
| 27.448718 | 77 | 0.625876 |
4a11988cdee87228c00ed56968bf638bb101cac5 | 252 | class Category < ApplicationRecord
has_many :tasks
has_many :users, through: :tasks
has_many :group_tasks
has_many :groups, through: :group_tasks
has_many :category_users
validates :name, uniqueness: true
validates_presence_of :name
end
| 25.2 | 42 | 0.777778 |
ff2122c10e4ac89b287f1f7512efa8877cddd8f5 | 1,796 | class GitFilterRepo < Formula
desc "Quickly rewrite git repository history"
homepage "https://github.com/newren/git-filter-repo"
url "https://github.com/newren/git-filter-repo/releases/download/v2.24.0/git-filter-repo-2.24.0.tar.xz"
sha256 "92188d3c44b9ff0dd40dfeed72859e0a088f775c12fb24c4e3e27a8064cfcc84"
bottle do
cellar :any_skip_relocation
sha256 "c47ad881f6c545f5ef1ba71b0c847f5a3575a8ff626b76412362813c0fc503a5" => :catalina
sha256 "336c9a9cf76ad1f46f47f74ad4015a905f8690612e2e02072db2300790e89895" => :mojave
sha256 "336c9a9cf76ad1f46f47f74ad4015a905f8690612e2e02072db2300790e89895" => :high_sierra
end
# ignore git dependency audit:
# * Don't use git as a dependency (it's always available)
# But we require Git 2.22.0+
# https://github.com/Homebrew/homebrew-core/pull/46550#issuecomment-563229479
depends_on "git"
# Use any python3 version available
# https://github.com/Homebrew/homebrew-core/pull/46550/files#r363751231
if MacOS.version >= :catalina
uses_from_macos "python3"
else
depends_on "python3"
end
def install
bin.install "git-filter-repo"
man1.install "Documentation/man1/git-filter-repo.1"
end
test do
system "#{bin}/git-filter-repo", "--version"
system "git", "init"
system "git", "config", "user.name", "BrewTestBot"
system "git", "config", "user.email", "[email protected]"
touch "foo"
system "git", "add", "foo"
system "git", "commit", "-m", "foo"
# Use --force to accept non-fresh clone run:
# Aborting: Refusing to overwrite repo history since this does not look like a fresh clone.
# (expected freshly packed repo)
system "#{bin}/git-filter-repo", "--path-rename=foo:bar", "--force"
assert_predicate testpath/"bar", :exist?
end
end
| 35.215686 | 105 | 0.722717 |
017cf5081cdf475dad426821784b2f59e2eca7ef | 479 | cask '[email protected]' do
version '1.1.3'
sha256 'a0a7a242f8299ac4a00af8aa10ccedaf63013c8a068f56eadfb9d730b87155ea'
# releases.hashicorp.com was verified as official when first introduced to the cask
url 'https://releases.hashicorp.com/vault/1.1.3/vault_1.1.3_darwin_amd64.zip'
appcast 'https://github.com/hashicorp/vault/releases.atom'
name 'Vault'
homepage 'https://www.vaultproject.io/'
auto_updates false
conflicts_with formula: 'vault'
binary 'vault'
end
| 29.9375 | 85 | 0.76618 |
f7a2dae253d6fae44cbb428ab1fec575b5b30cbc | 2,203 | class Post
include Mongoid::Document
include Mongoid::Timestamps
field :subject, :type => String
belongs_to :account, index: true, inverse_of: :posts_as_creator
belongs_to :commentable, polymorphic: true, index: true
has_many :subscriptions, :dependent => :destroy
has_many :comments, :dependent => :destroy
has_many :comment_reactions, :dependent => :destroy
after_create do
commentable.subscribers.each { |account| subscriptions.create account: account }
end
def self.commentable_types
%w{Team Activity Mapplication Habit Account Feature Place Photo}
end
def url
if commentable.is_a?(Team)
team = commentable
"#{ENV['BASE_URI']}/a/#{team.gathering.slug}/teams/#{team.id}#post-#{id}"
elsif commentable.is_a?(Activity)
activity = commentable
"#{ENV['BASE_URI']}/a/#{activity.gathering.slug}/activities/#{activity.id}#post-#{id}"
elsif commentable.is_a?(Mapplication)
mapplication = commentable
"#{ENV['BASE_URI']}/a/#{mapplication.gathering.slug}/mapplications/#{mapplication.id}#post-#{id}"
elsif commentable.is_a?(Habit)
habit = commentable
"#{ENV['BASE_URI']}/habits/#{habit.id}#post-#{id}"
elsif commentable.is_a?(Photo)
photo = commentable
"#{ENV['BASE_URI']}/photos/#{photo.id}#post-#{id}"
elsif commentable.is_a?(Account)
account = commentable
"#{ENV['BASE_URI']}/u/#{account.username}#post-#{id}"
elsif commentable.is_a?(Feature)
feature = commentable
"#{ENV['BASE_URI']}/features/#{feature.id}"
elsif commentable.is_a?(Place)
place = commentable
"#{ENV['BASE_URI']}/places/#{place.id}"
end
end
def self.admin_fields
{
:id => {:type => :text, :edit => false},
:subject => :text,
:account_id => :lookup,
:commentable_id => :text,
:commentable_type => :select,
:subscriptions => :collection,
:comments => :collection
}
end
def subscribers
Account.where(:unsubscribed.ne => true).where(:id.in => subscriptions.pluck(:account_id))
end
def emails
subscribers.pluck(:email)
end
end
| 31.028169 | 103 | 0.635497 |
7a2e3505d7a25fa0fef57c785835b8c0e641c904 | 1,966 | RSpec.describe 'group by' do
it 'groups words by length' do
words = ["sue", "alice", "steve", "sally", "adam", "fort", "tops", "dog", "cat"]
grouped = words.group_by do |word|
word.length
end
expected = {3=>["sue", "dog", "cat"], 4=>["adam", "fort", "tops"], 5=>["alice", "steve", "sally"]}
expect(grouped).to eq(expected)
end
it 'group by odd and even' do
numbers = [1, 1, 2, 3, 5, 8, 13, 21, 34, 55]
odd_and_even = numbers.group_by do |number|
number % 2
end
expected = {1=>[1, 1, 3, 5, 13, 21, 55], 0=>[2, 8, 34]}
expect(odd_and_even).to eq(expected)
end
it 'group by first letter' do
words = ["ant", "axis", "albatross", "bolt", "badge", "butter", "car", "cdr", "column"]
words_by_first_letter = words.group_by do |wrd|
wrd.chr
end
expected = {"a"=>["ant", "axis", "albatross"], "b"=>["bolt", "badge", "butter"], "c"=>["car", "cdr", "column"]}
expect(words_by_first_letter).to eq(expected)
end
it 'group by uniqueness' do
words = ["one", "two", "one", "TWO", "three", "one", "three", "three", "three"]
grouped = words.group_by do |wrd|
wrd.downcase
end
expected = {"one"=>["one", "one", "one"], "two"=>["two", "TWO"], "three"=>["three", "three", "three", "three"]}
expect(grouped).to eq(expected)
end
it 'grouped by number of zeroes' do
numbers = [1, 3, 500, 200, 4000, 3000, 10000, 90, 20, 500000]
grouped = numbers.group_by do |num|
num.to_s.count("0")
end
expected = {0=>[1, 3], 2=>[500, 200], 3=>[4000, 3000], 4=>[10000], 1=>[90, 20], 5=>[500000]}
expect(grouped).to eq(expected)
end
it 'grouped by order of magnitude' do
numbers = [1, 3, 503, 239, 4938, 3932, 19982, 93, 21, 501787]
grouped = numbers.group_by do |num|
num.to_s.length
end
expected = {1=>[1, 3], 2=>[93, 21], 3=>[503, 239], 4=>[4938, 3932], 5=>[19982], 6=>[501787]}
expect(grouped).to eq(expected)
end
end
| 35.107143 | 115 | 0.563072 |
62936c968e344d2d16d696ad92ad33833f43db7d | 16,427 | module Facebooker
module Rails
# ActionMailer like module for publishing Facbook messages
#
# To use, create a subclass and define methods
# Each method should start by calling send_as to specify the type of message
# Valid options are :email and :notification, :user_action, :profile, :ref
#
#
# Below is an example of each type
#
# class TestPublisher < Facebooker::Rails::Publisher
# # The new message templates are supported as well
# # First, create a method that contains your templates:
# # You may include multiple one line story templates and short story templates
# # but only one full story template
# # Your most specific template should be first
# #
# # Before using, you must register your template by calling register. For this example
# # You would call TestPublisher.register_publish_action
# # Registering the template will store the template id returned from Facebook in the
# # facebook_templates table that is created when you create your first publisher
# def publish_action_template
# one_line_story_template "{*actor*} did stuff with {*friend*}"
# one_line_story_template "{*actor*} did stuff"
# short_story_template "{*actor*} has a title {*friend*}", render(:partial=>"short_body")
# short_story_template "{*actor*} has a title", render(:partial=>"short_body")
# full_story_template "{*actor*} has a title {*friend*}", render(:partial=>"full_body")
# action_links action_link("My text {*template_var*}","{*link_url*}")
# end
#
# # To send a registered template, you need to create a method to set the data
# # The publisher will look up the template id from the facebook_templates table
# def publish_action(f)
# send_as :user_action
# from f
# data :friend=>"Mike"
# end
#
#
# # Provide a from user to send a general notification
# # if from is nil, this will send an announcement
# def notification(to,f)
# send_as :notification
# recipients to
# from f
# fbml "Not"
# end
#
# def email(to,f)
# send_as :email
# recipients to
# from f
# title "Email"
# fbml 'text'
# text fbml
# end
# # This will render the profile in /users/profile.erb
# # it will set @user to user_to_update in the template
# # The mobile profile will be rendered from the app/views/test_publisher/_mobile.erb
# # template
# def profile_update(user_to_update,user_with_session_to_use)
# send_as :profile
# from user_with_session_to_use
# to user_to_update
# profile render(:action=>"/users/profile",:assigns=>{:user=>user_to_update})
# profile_action "A string"
# mobile_profile render(:partial=>"mobile",:assigns=>{:user=>user_to_update})
# end
#
# # Update the given handle ref with the content from a
# # template
# def ref_update(user)
# send_as :ref
# from user
# fbml render(:action=>"/users/profile",:assigns=>{:user=>user_to_update})
# handle "a_ref_handle"
# end
#
#
# To send a message, use ActionMailer like semantics
# TestPublisher.deliver_action(@user)
#
# For testing, you may want to create an instance of the underlying message without sending it
# TestPublisher.create_action(@user)
# will create and return an instance of Facebooker::Feeds::Action
#
# Publisher makes many helpers available, including the linking and asset helpers
class Publisher
class FacebookTemplate < ::ActiveRecord::Base
cattr_accessor :template_cache
self.template_cache = {}
def self.inspect(*args)
"FacebookTemplate"
end
def changed?(hash)
if respond_to?(:content_hash)
content_hash != hash
else
false
end
end
class << self
def register(klass,method)
publisher = setup_publisher(klass,method)
template_id = Facebooker::Session.create.register_template_bundle(publisher.one_line_story_templates,publisher.short_story_templates,publisher.full_story_template,publisher.action_links)
template = find_or_initialize_by_template_name(template_name(klass,method))
template.bundle_id = template_id
template.content_hash = hashed_content(klass,method) if template.respond_to?(:content_hash)
template.save!
cache(klass,method,template)
template
end
def for_class_and_method(klass,method)
find_cached(klass,method)
end
def bundle_id_for_class_and_method(klass,method)
for_class_and_method(klass,method).bundle_id
end
def cache(klass,method,template)
template_cache[template_name(klass,method)] = template
end
def clear_cache!
self.template_cache = {}
end
def find_cached(klass,method)
template_cache[template_name(klass,method)] || find_in_db(klass,method)
end
def find_in_db(klass,method)
template = find_by_template_name(template_name(klass,method))
if template and template.changed?(hashed_content(klass,method))
template.destroy
template = nil
end
if template.nil?
template = register(klass,method)
end
template
end
def setup_publisher(klass,method)
publisher = klass.new
publisher.send method + '_template'
publisher
end
def hashed_content(klass, method)
publisher = setup_publisher(klass,method)
Digest::MD5.hexdigest [publisher.one_line_story_templates, publisher.short_story_templates, publisher.full_story_template].to_json
end
def template_name(klass,method)
"#{klass.name}::#{method}"
end
end
end
class_inheritable_accessor :master_helper_module
attr_accessor :one_line_story_templates, :short_story_templates, :action_links
cattr_accessor :skip_registry
self.skip_registry = false
class InvalidSender < StandardError; end
class UnknownBodyType < StandardError; end
class UnspecifiedBodyType < StandardError; end
class Email
attr_accessor :title
attr_accessor :text
attr_accessor :fbml
end
class Notification
attr_accessor :fbml
end
class Profile
attr_accessor :profile
attr_accessor :profile_action
attr_accessor :mobile_profile
attr_accessor :profile_main
end
class Ref
attr_accessor :handle
attr_accessor :fbml
end
class UserAction
attr_accessor :data
attr_accessor :target_ids
attr_accessor :body_general
attr_accessor :template_id
attr_accessor :template_name
def target_ids=(val)
@target_ids = val.is_a?(Array) ? val.join(",") : val
end
end
cattr_accessor :ignore_errors
attr_accessor :_body
def recipients(*args)
if args.size==0
@recipients
else
@recipients=args.first
end
end
def from(*args)
if args.size==0
@from
else
@from=args.first
end
end
def send_as(option)
self._body=case option
when :action
Facebooker::Feed::Action.new
when :story
Facebooker::Feed::Story.new
when :templatized_action
Facebooker::Feed::TemplatizedAction.new
when :notification
Notification.new
when :email
Email.new
when :profile
Profile.new
when :ref
Ref.new
when :user_action
UserAction.new
else
raise UnknownBodyType.new("Unknown type to publish")
end
end
def full_story_template(title=nil,body=nil,params={})
if title.nil?
@full_story_template
else
@full_story_template=params.merge(:template_title=>title, :template_body=>body)
end
end
def one_line_story_template(str)
@one_line_story_templates ||= []
@one_line_story_templates << str
end
def short_story_template(title,body,params={})
@short_story_templates ||= []
@short_story_templates << params.merge(:template_title=>title, :template_body=>body)
end
def action_links(*links)
if links.blank?
@action_links
else
@action_links = links
end
end
def method_missing(name,*args)
if args.size==1 and self._body.respond_to?("#{name}=")
self._body.send("#{name}=",*args)
elsif self._body.respond_to?(name)
self._body.send(name,*args)
else
super
end
end
def image(src,target)
{:src=>image_path(src),:href=> target.respond_to?(:to_str) ? target : url_for(target)}
end
def action_link(text,target)
{:text=>text, :href=>target}
end
def requires_from_user?(from,body)
! (announcement_notification?(from,body) or ref_update?(body) or profile_update?(body))
end
def profile_update?(body)
body.is_a?(Profile)
end
def ref_update?(body)
body.is_a?(Ref)
end
def announcement_notification?(from,body)
from.nil? and body.is_a?(Notification)
end
def send_message(method)
@recipients = @recipients.is_a?(Array) ? @recipients : [@recipients]
if from.nil? and @recipients.size==1 and requires_from_user?(from,_body)
@from = @recipients.first
end
# notifications can
# omit the from address
raise InvalidSender.new("Sender must be a Facebooker::User") unless from.is_a?(Facebooker::User) || !requires_from_user?(from,_body)
case _body
when Facebooker::Feed::TemplatizedAction,Facebooker::Feed::Action
from.publish_action(_body)
when Facebooker::Feed::Story
@recipients.each {|r| r.publish_story(_body)}
when Notification
(from.nil? ? Facebooker::Session.create : from.session).send_notification(@recipients,_body.fbml)
when Email
from.session.send_email(@recipients,
_body.title,
_body.text,
_body.fbml)
when Profile
# If recipient and from aren't the same person, create a new user object using the
# userid from recipient and the session from from
@from = Facebooker::User.new(Facebooker::User.cast_to_facebook_id(@recipients.first),Facebooker::Session.create)
@from.set_profile_fbml(_body.profile, _body.mobile_profile, _body.profile_action, _body.profile_main)
when Ref
Facebooker::Session.create.server_cache.set_ref_handle(_body.handle,_body.fbml)
when UserAction
@from.session.publish_user_action(_body.template_id,_body.data||{},_body.target_ids,_body.body_general)
else
raise UnspecifiedBodyType.new("You must specify a valid send_as")
end
end
# nodoc
# needed for actionview
def logger
RAILS_DEFAULT_LOGGER
end
# nodoc
# delegate to action view. Set up assigns and render
def render(opts)
opts = opts.dup
body = opts.delete(:assigns) || {}
initialize_template_class(body.dup.merge(:controller=>self)).render(opts)
end
def initialize_template_class(assigns)
template_root = "#{RAILS_ROOT}/app/views"
controller_root = File.join(template_root,self.class.controller_path)
#only do this on Rails 2.1
if ActionController::Base.respond_to?(:append_view_path)
# only add the view path once
ActionController::Base.append_view_path(controller_root) unless ActionController::Base.view_paths.include?(controller_root)
end
returning ActionView::Base.new([template_root,controller_root], assigns, self) do |template|
template.controller=self
template.extend(self.class.master_helper_module)
end
end
self.master_helper_module = Module.new
self.master_helper_module.module_eval do
# url_helper delegates to @controller,
# so we need to define that in the template
# we make it point to the publisher
include ActionView::Helpers::UrlHelper
include ActionView::Helpers::TextHelper
include ActionView::Helpers::TagHelper
include ActionView::Helpers::FormHelper
include ActionView::Helpers::FormTagHelper
include ActionView::Helpers::AssetTagHelper
include Facebooker::Rails::Helpers
#define this for the publisher views
def protect_against_forgery?
@paf ||= ActionController::Base.new.send(:protect_against_forgery?)
end
end
ActionController::Routing::Routes.named_routes.install(self.master_helper_module)
include self.master_helper_module
# Publisher is the controller, it should do the rewriting
include ActionController::UrlWriter
class <<self
def register_all_templates
all_templates = instance_methods.grep(/_template$/) - %w(short_story_template full_story_template one_line_story_template)
all_templates.each do |template|
template_name=template.sub(/_template$/,"")
puts "Registering #{template_name}"
send("register_"+template_name)
end
end
def method_missing(name,*args)
should_send = false
method = ''
if md = /^create_(.*)$/.match(name.to_s)
method = md[1]
elsif md = /^deliver_(.*)$/.match(name.to_s)
method = md[1]
should_send = true
elsif md = /^register_(.*)$/.match(name.to_s)
return FacebookTemplate.register(self, md[1])
else
super
end
#now create the item
(publisher=new).send(method,*args)
case publisher._body
when UserAction
publisher._body.template_name = method
publisher._body.template_id = FacebookTemplate.bundle_id_for_class_and_method(self,method)
end
should_send ? publisher.send_message(method) : publisher._body
end
def default_url_options
{:host => Facebooker.canvas_server_base + Facebooker.facebook_path_prefix}
end
def controller_path
self.to_s.underscore
end
def helper(*args)
args.each do |arg|
case arg
when Symbol,String
add_template_helper("#{arg.to_s.classify}Helper".constantize)
when Module
add_template_helper(arg)
end
end
end
def add_template_helper(helper_module) #:nodoc:
master_helper_module.send :include,helper_module
include master_helper_module
end
def inherited(child)
super
child.master_helper_module=Module.new
child.master_helper_module.__send__(:include,self.master_helper_module)
child.send(:include, child.master_helper_module)
FacebookTemplate.clear_cache!
end
end
end
end
end
| 34.510504 | 198 | 0.599683 |
d5a9c174bd808cf90fc6ef79a60aa8f1a4d1fd90 | 109 | require 'spec_helper'
describe InventoryOut do
pending "add some examples to (or delete) #{__FILE__}"
end
| 18.166667 | 56 | 0.761468 |
089e28308e0de8da2187a733dc4b352d57713f99 | 5,166 | Pod::Spec.new do |s|
s.name = 'Firebase'
s.version = '7.5.0'
s.summary = 'Firebase'
s.description = <<-DESC
Simplify your app development, grow your user base, and monetize more effectively with Firebase.
DESC
s.homepage = 'https://firebase.google.com'
s.license = { :type => 'Apache', :file => 'LICENSE' }
s.authors = 'Google, Inc.'
s.source = {
:git => 'https://github.com/firebase/firebase-ios-sdk.git',
:tag => 'CocoaPods-' + s.version.to_s
}
s.preserve_paths = [
"CoreOnly/CHANGELOG.md",
"CoreOnly/NOTICES",
"CoreOnly/README.md"
]
s.social_media_url = 'https://twitter.com/Firebase'
s.ios.deployment_target = '10.0'
s.osx.deployment_target = '10.12'
s.tvos.deployment_target = '10.0'
s.cocoapods_version = '>= 1.4.0'
s.default_subspec = 'Core'
s.subspec 'Core' do |ss|
ss.ios.deployment_target = '9.0'
ss.osx.deployment_target = '10.12'
ss.tvos.deployment_target = '10.0'
ss.ios.dependency 'FirebaseAnalytics', '7.5.0'
ss.dependency 'Firebase/CoreOnly'
end
s.subspec 'CoreOnly' do |ss|
ss.dependency 'FirebaseCore', '7.5.0'
ss.source_files = 'CoreOnly/Sources/Firebase.h'
ss.preserve_paths = 'CoreOnly/Sources/module.modulemap'
if ENV['FIREBASE_POD_REPO_FOR_DEV_POD'] then
ss.user_target_xcconfig = {
'HEADER_SEARCH_PATHS' => "$(inherited) \"" + ENV['FIREBASE_POD_REPO_FOR_DEV_POD'] + "/CoreOnly/Sources\""
}
else
ss.user_target_xcconfig = {
'HEADER_SEARCH_PATHS' => "$(inherited) ${PODS_ROOT}/Firebase/CoreOnly/Sources"
}
end
ss.ios.deployment_target = '9.0'
ss.osx.deployment_target = '10.12'
ss.tvos.deployment_target = '10.0'
ss.watchos.deployment_target = '6.0'
end
s.subspec 'Analytics' do |ss|
ss.ios.deployment_target = '9.0'
ss.dependency 'Firebase/Core'
end
s.subspec 'ABTesting' do |ss|
ss.dependency 'Firebase/CoreOnly'
ss.dependency 'FirebaseABTesting', '~> 7.5.0'
end
s.subspec 'AdMob' do |ss|
ss.dependency 'Firebase/CoreOnly'
ss.ios.deployment_target = '9.0'
ss.ios.dependency 'Google-Mobile-Ads-SDK', '~> 7.66'
end
s.subspec 'AppDistribution' do |ss|
ss.dependency 'Firebase/CoreOnly'
ss.ios.dependency 'FirebaseAppDistribution', '~> 7.5.0-beta'
end
s.subspec 'Auth' do |ss|
ss.dependency 'Firebase/CoreOnly'
ss.dependency 'FirebaseAuth', '~> 7.5.0'
# Standard platforms PLUS watchOS.
ss.ios.deployment_target = '10.0'
ss.osx.deployment_target = '10.12'
ss.tvos.deployment_target = '10.0'
ss.watchos.deployment_target = '6.0'
end
s.subspec 'Crashlytics' do |ss|
ss.dependency 'Firebase/CoreOnly'
ss.dependency 'FirebaseCrashlytics', '~> 7.5.0'
# Standard platforms PLUS watchOS.
ss.ios.deployment_target = '10.0'
ss.osx.deployment_target = '10.12'
ss.tvos.deployment_target = '10.0'
ss.watchos.deployment_target = '6.0'
end
s.subspec 'Database' do |ss|
ss.dependency 'Firebase/CoreOnly'
ss.dependency 'FirebaseDatabase', '~> 7.5.0'
end
s.subspec 'DynamicLinks' do |ss|
ss.dependency 'Firebase/CoreOnly'
ss.ios.dependency 'FirebaseDynamicLinks', '~> 7.5.0'
end
s.subspec 'Firestore' do |ss|
ss.dependency 'Firebase/CoreOnly'
ss.dependency 'FirebaseFirestore', '~> 7.5.0'
end
s.subspec 'Functions' do |ss|
ss.dependency 'Firebase/CoreOnly'
ss.dependency 'FirebaseFunctions', '~> 7.5.0'
end
s.subspec 'InAppMessaging' do |ss|
ss.dependency 'Firebase/CoreOnly'
ss.ios.dependency 'FirebaseInAppMessaging', '~> 7.5.0-beta'
end
s.subspec 'Installations' do |ss|
ss.dependency 'Firebase/CoreOnly'
ss.dependency 'FirebaseInstallations', '~> 7.5.0'
end
s.subspec 'Messaging' do |ss|
ss.dependency 'Firebase/CoreOnly'
ss.dependency 'FirebaseMessaging', '~> 7.5.0'
# Standard platforms PLUS watchOS.
ss.ios.deployment_target = '10.0'
ss.osx.deployment_target = '10.12'
ss.tvos.deployment_target = '10.0'
ss.watchos.deployment_target = '6.0'
end
s.subspec 'Performance' do |ss|
ss.dependency 'Firebase/CoreOnly'
ss.ios.dependency 'FirebasePerformance', '~> 7.5.0'
end
s.subspec 'RemoteConfig' do |ss|
ss.dependency 'Firebase/CoreOnly'
ss.dependency 'FirebaseRemoteConfig', '~> 7.5.0'
end
s.subspec 'Storage' do |ss|
ss.dependency 'Firebase/CoreOnly'
ss.dependency 'FirebaseStorage', '~> 7.5.0'
# Standard platforms PLUS watchOS.
ss.ios.deployment_target = '10.0'
ss.osx.deployment_target = '10.12'
ss.tvos.deployment_target = '10.0'
ss.watchos.deployment_target = '6.0'
end
s.subspec 'MLCommon' do |ss|
ss.dependency 'Firebase/CoreOnly'
ss.ios.dependency 'FirebaseMLCommon', '~> 7.5.0-beta'
end
s.subspec 'MLModelInterpreter' do |ss|
ss.dependency 'Firebase/CoreOnly'
ss.ios.dependency 'FirebaseMLModelInterpreter', '~> 7.5.0-beta'
end
s.subspec 'MLVision' do |ss|
ss.dependency 'Firebase/CoreOnly'
ss.ios.dependency 'FirebaseMLVision', '~> 7.5.0-beta'
end
end
| 29.186441 | 113 | 0.65331 |
01d5379e6fc871f5858573a6e735d479266d6e4c | 1,230 | Spree::Order.class_eval do
has_many :fedex_crossborder_order_confirmations, -> { order(created_at: :desc) }
attr_reader :current_fedex_crossborder_confirmation
def current_fedex_crossborder_confirmation
self.fedex_crossborder_order_confirmations.first
end
# Guest checkout doesn't seem to have shipping rates set for some reason.
# Force it to FedEx Crossborder's rate of 0 here if needed.
def ensure_available_shipping_rates
if shipments.any?
shipments.each do |sh|
if sh.shipping_rates.blank? && sh.address.country.iso != 'US'
Spree::ShippingRate.create({
shipment_id: sh.id,
shipping_method_id: Spree::ShippingMethod.find_by(name: 'FedEx CrossBorder').id,
selected: true,
cost: 0
})
end
end
return true
end
if shipments.empty? || shipments.any? { |shipment| shipment.shipping_rates.blank? }
# After this point, order redirects back to 'address' state and asks user to pick a proper address
# Therefore, shipments are not necessary at this point.
shipments.destroy_all
errors.add(:base, Spree.t(:items_cannot_be_shipped)) and return false
end
end
end
| 32.368421 | 104 | 0.687805 |
b9be1a7245daa66dab5b7376d222e6a07c7051bc | 443 | ENV['RAILS_ENV'] ||= 'test'
require File.expand_path('../../config/environment', __FILE__)
require 'rails/test_help'
require "minitest/reporters"
Minitest::Reporters.use!
class ActiveSupport::TestCase
# Setup all fixtures in test/fixtures/*.yml for all tests in alphabetical order.
fixtures :all
def is_logged_in?
!session[:user_id].nil?
end
# Add more helper methods to be used by all tests here...
include ApplicationHelper
end | 31.642857 | 82 | 0.749436 |
338a5ebd4f26f42965d33cfe6c64538bc65b61ef | 2,431 | require 'spec_helper'
describe "cms_notices", dbscope: :example, type: :feature do
let(:site) { cms_site }
let(:item) { create(:cms_notice, site: site) }
let(:index_path) { cms_notices_path site.host }
let(:new_path) { new_cms_notice_path site.host }
let(:show_path) { cms_notice_path site.host, item }
let(:edit_path) { edit_cms_notice_path site.host, item }
let(:delete_path) { delete_cms_notice_path site.host, item }
let(:copy_path) { copy_cms_notice_path site.host, item }
it "without login" do
visit index_path
expect(current_path).to eq sns_login_path
end
it "without auth" do
login_ss_user
visit index_path
expect(status_code).to eq 403
end
context "with auth" do
before { login_cms_user }
describe "#index" do
it do
visit index_path
expect(status_code).to eq 200
expect(current_path).to eq index_path
end
end
describe "#new" do
it do
visit new_path
within "form#item-form" do
fill_in "item[name]", with: "name-#{unique_id}"
fill_in "item[html]", with: "html-#{unique_id}"
click_button "保存"
end
expect(status_code).to eq 200
expect(current_path).not_to eq new_path
expect(page).not_to have_css("form#item-form")
end
end
describe "#show" do
it do
visit show_path
expect(status_code).to eq 200
expect(current_path).to eq show_path
end
end
describe "#edit" do
it do
visit edit_path
within "form#item-form" do
fill_in "item[name]", with: "name-#{unique_id}"
fill_in "item[html]", with: "html-#{unique_id}"
click_button "保存"
end
expect(status_code).to eq 200
expect(current_path).to eq show_path
expect(page).not_to have_css("form#item-form")
end
end
describe "#delete" do
it do
visit delete_path
within "form" do
click_button "削除"
end
expect(status_code).to eq 200
expect(current_path).to eq index_path
end
end
describe "#copy" do
it do
visit copy_path
within "form#item-form" do
click_button "保存"
end
expect(status_code).to eq 200
expect(current_path).to eq index_path
expect(page).not_to have_css("form#item-form")
end
end
end
end
| 25.589474 | 62 | 0.611682 |
fff7428873066ee6298c127319f6ff5101e2de13 | 2,483 | # meraki
#
# This file was automatically generated by APIMATIC v2.0
# ( https://apimatic.io ).
module Meraki
# AddNetworkSmProfileClarityModel Model.
class AddNetworkSmProfileClarityModel < BaseModel
# The bundle ID of the application, defaults to com.cisco.ciscosecurity.app
# @return [String]
attr_accessor :plugin_bundle_id
# Whether or not to enable browser traffic filtering (one of true, false).
# @return [String]
attr_accessor :filter_browsers
# Whether or not to enable socket traffic filtering (one of true, false).
# @return [String]
attr_accessor :filter_sockets
# The specific VendorConfig to be passed to the filtering framework, as
# JSON. VendorConfig should be an array of objects, as:
# [ { "key": "some_key", type: "some_type", "value": "some_value" }, ... ]
# type is one of manual_string, manual_int, manual_boolean, manual_choice,
# manual_multiselect, manual_list,
# auto_username, auto_email, auto_mac_address, auto_serial_number,
# auto_notes, auto_name
# @return [String]
attr_accessor :vendor_config
# A mapping from model property names to API property names.
def self.names
@_hash = {} if @_hash.nil?
@_hash['plugin_bundle_id'] = 'PluginBundleID'
@_hash['filter_browsers'] = 'FilterBrowsers'
@_hash['filter_sockets'] = 'FilterSockets'
@_hash['vendor_config'] = 'VendorConfig'
@_hash
end
def initialize(plugin_bundle_id = nil,
filter_browsers = nil,
filter_sockets = nil,
vendor_config = nil)
@plugin_bundle_id = plugin_bundle_id
@filter_browsers = filter_browsers
@filter_sockets = filter_sockets
@vendor_config = vendor_config
end
# Creates an instance of the object from a hash.
def self.from_hash(hash)
return nil unless hash
# Extract variables from the hash.
plugin_bundle_id = hash['PluginBundleID']
filter_browsers = hash['FilterBrowsers']
filter_sockets = hash['FilterSockets']
vendor_config = hash['VendorConfig']
# Create object from extracted values.
AddNetworkSmProfileClarityModel.new(plugin_bundle_id,
filter_browsers,
filter_sockets,
vendor_config)
end
end
end
| 35.985507 | 80 | 0.634313 |
acd87ea595485c699e8d46017414a5c3da81c736 | 919 | require 'spec_helper'
# from previous install
describe file('c:/Octopus') do
it { should be_directory }
end
describe file('C:/Program Files/Octopus Deploy/Octopus/Octopus.Server.exe') do
it { should be_file }
end
#describe windows_registry_key('HKEY_LOCAL_MACHINE\Software\Octopus\OctopusServer') do
# it { should exist }
# it { should have_property_value('InstallLocation', :type_string, "C:\\Program Files\\Octopus Deploy\\Octopus\\") }
#end
#describe windows_registry_key('HKEY_LOCAL_MACHINE\Software\Octopus\OctopusServer\OctopusServer') do
# it { should_not exist }
#end
describe file('C:/ProgramData/Octopus/OctopusServer/Instances/OctopusServer.config') do
it { should_not exist }
end
describe service('OctopusDeploy') do
it { should_not be_installed }
end
describe windows_dsc do
it { should be_able_to_get_dsc_configuration }
it { should have_applied_dsc_configuration_successfully }
end
| 27.848485 | 117 | 0.779108 |
1169fce854b2966c9460fd6defa9f27cb5c9d045 | 76 | class Comment < ActiveRecord::Base
belongs_to :user
belongs_to :post
end | 19 | 34 | 0.776316 |
b96a43414984644de4917c856e1646131696a195 | 102 | require "rails_helper"
RSpec.describe ScoreboardsController, type: :controller do
render_views
end
| 17 | 58 | 0.823529 |
33000f6c20e1e5e5c2183204a39c9f8c1d109bdf | 5,975 | # frozen_string_literal: true
module RuboCop
module Cop
module Style
# This cop checks for use of the `File.expand_path` arguments.
# Likewise, it also checks for the `Pathname.new` argument.
#
# Contrastive bad case and good case are alternately shown in
# the following examples.
#
# @example
# # bad
# File.expand_path('..', __FILE__)
#
# # good
# File.expand_path(__dir__)
#
# # bad
# File.expand_path('../..', __FILE__)
#
# # good
# File.expand_path('..', __dir__)
#
# # bad
# File.expand_path('.', __FILE__)
#
# # good
# File.expand_path(__FILE__)
#
# # bad
# Pathname(__FILE__).parent.expand_path
#
# # good
# Pathname(__dir__).expand_path
#
# # bad
# Pathname.new(__FILE__).parent.expand_path
#
# # good
# Pathname.new(__dir__).expand_path
#
class ExpandPathArguments < Cop
include RangeHelp
MSG = 'Use `expand_path(%<new_path>s%<new_default_dir>s)` instead of ' \
'`expand_path(%<current_path>s, __FILE__)`.'.freeze
PATHNAME_MSG = 'Use `Pathname(__dir__).expand_path` instead of ' \
'`Pathname(__FILE__).parent.expand_path`.'.freeze
PATHNAME_NEW_MSG = 'Use `Pathname.new(__dir__).expand_path` ' \
'instead of ' \
'`Pathname.new(__FILE__).parent.expand_path`.'.freeze
def_node_matcher :file_expand_path, <<-PATTERN
(send
(const nil? :File) :expand_path
$_
$_)
PATTERN
def_node_matcher :pathname_parent_expand_path, <<-PATTERN
(send
(send
(send nil? :Pathname
$_) :parent) :expand_path)
PATTERN
def_node_matcher :pathname_new_parent_expand_path, <<-PATTERN
(send
(send
(send
(const nil? :Pathname) :new
$_) :parent) :expand_path)
PATTERN
def on_send(node)
if (captured_values = file_expand_path(node))
current_path, default_dir = captured_values
inspect_offense_for_expand_path(node, current_path, default_dir)
elsif (default_dir = pathname_parent_expand_path(node))
return unless unrecommended_argument?(default_dir)
add_offense(node, message: PATHNAME_MSG)
elsif (default_dir = pathname_new_parent_expand_path(node))
return unless unrecommended_argument?(default_dir)
add_offense(node, message: PATHNAME_NEW_MSG)
end
end
def autocorrect(node)
lambda do |corrector|
if (captured_values = file_expand_path(node))
current_path, default_dir = captured_values
autocorrect_expand_path(corrector, current_path, default_dir)
elsif (default_dir = pathname_parent_expand_path(node)) ||
(default_dir = pathname_new_parent_expand_path(node))
corrector.replace(default_dir.loc.expression, '__dir__')
remove_parent_method(corrector, default_dir)
end
end
end
private
def unrecommended_argument?(default_dir)
default_dir.source == '__FILE__'
end
def inspect_offense_for_expand_path(node, current_path, default_dir)
return unless unrecommended_argument?(default_dir) &&
current_path.str_type?
current_path = strip_surrounded_quotes!(current_path.source)
parent_path = parent_path(current_path)
new_path = parent_path == '' ? '' : "'#{parent_path}', "
new_default_dir = depth(current_path).zero? ? '__FILE__' : '__dir__'
message = format(
MSG,
new_path: new_path,
new_default_dir: new_default_dir,
current_path: "'#{current_path}'"
)
add_offense(node, location: :selector, message: message)
end
def autocorrect_expand_path(corrector, current_path, default_dir)
stripped_current_path = strip_surrounded_quotes!(current_path.source)
case depth(stripped_current_path)
when 0
range = arguments_range(current_path)
corrector.replace(range, '__FILE__')
when 1
range = arguments_range(current_path)
corrector.replace(range, '__dir__')
else
new_path = "'#{parent_path(stripped_current_path)}'"
corrector.replace(current_path.loc.expression, new_path)
corrector.replace(default_dir.loc.expression, '__dir__')
end
end
def strip_surrounded_quotes!(path_string)
path_string.slice!(path_string.length - 1)
path_string.slice!(0)
path_string
end
def depth(current_path)
paths = current_path.split(File::SEPARATOR)
paths.reject { |path| path == '.' }.count
end
def parent_path(current_path)
paths = current_path.split(File::SEPARATOR)
paths.delete('.')
paths.each_with_index do |path, index|
if path == '..'
paths.delete_at(index)
break
end
end
paths.join(File::SEPARATOR)
end
def remove_parent_method(corrector, default_dir)
node = default_dir.parent.parent.parent.children.first
corrector.remove(node.loc.dot)
corrector.remove(node.loc.selector)
end
def arguments_range(node)
range_between(node.parent.first_argument.source_range.begin_pos,
node.parent.last_argument.source_range.end_pos)
end
end
end
end
end
| 30.641026 | 80 | 0.577573 |
5ddb20ccd9189b9a56674daa34ebad2458d8241e | 4,910 | # coding: utf-8
module Babushka
class Cmdline
handle('global', "Options that are valid for any handler") {
opt '-v', '--version', "Print the current version"
opt '-h', '--help', "Show this information"
opt '-d', '--debug', "Show more verbose logging, and realtime shell command output"
opt '-s', '--silent', "Only log errors, running silently on success"
opt '--[no-]color',
'--[no-]colour', "Disable color in the output"
}
handle('help', "Print usage information").run {|cmd|
Helpers.print_version :full => true
if cmd.argv.empty?
Helpers.print_usage
Helpers.print_handlers
Helpers.print_notes
elsif (handler = Handler.for(cmd.argv.first)).nil?
LogHelpers.log "#{cmd.argv.first.capitalize}? I have honestly never heard of that."
else
LogHelpers.log "\n#{handler.name} - #{handler.description}"
cmd.parse(&handler.opt_definer)
cmd.print_usage
end
LogHelpers.log "\n"
true
}
handle('version', "Print the current version").run {
Helpers.print_version
true
}
handle('list', "List the available deps") {
opt '-t', '--templates', "List templates instead of deps"
}.run {|cmd|
Base.sources.local_only {
Helpers.generate_list_for(cmd.opts[:templates] ? :templates : :deps, (cmd.argv.first || ""))
}
}
handle('meet', 'The main one: run a dep and all its dependencies.') {
opt '-n', '--dry-run', "Check which deps are met, but don't meet any unmet deps"
opt '-y', '--defaults', "Use dep arguments' default values without prompting"
opt '-u', '--update', "Update sources before loading deps from them"
opt '--show-args', "Show the arguments being passed between deps as they're run"
opt '--profile', "Print a per-line timestamp to the debug log"
opt '--git-fs', "[EXPERIMENTAL] Snapshot the root filesystem in a git repo after meeting deps"
opt '--remote-git-fs', "[EXPERIMENTAL] Snapshot the remote host using --git-fs after remote babushka runs"
}.run {|cmd|
dep_names, args = cmd.argv.partition {|arg| arg['='].nil? }
if !(bad_arg = args.detect {|arg| arg[/^\w+=/].nil? }).nil?
LogHelpers.log_error "'#{bad_arg}' looks like a dep argument, but it doesn't make sense."
elsif dep_names.empty?
LogHelpers.log_error "Nothing to do."
else
hashed_args = args.map {|i|
i.split('=', 2)
}.inject({}) {|hsh,i|
hsh[i.first] = i.last
hsh
}
Base.task.process(dep_names, hashed_args, cmd)
end
}
handle('sources', "Manage dep sources") {
opt '-a', '--add NAME URI', "Add the source at URI as NAME"
opt '-u', '--update', "Update all known sources from their remotes"
opt '-l', '--list', "List dep sources that are present locally"
}.run {|cmd|
if cmd.opts.slice(:add, :update, :list).length != 1
LogHelpers.log_error "'sources' requires a single option."
elsif cmd.opts.has_key?(:add)
if cmd.argv.length != 1
LogHelpers.log_error "The -a option requires a URI as its second argument. `babushka sources --help` for more info."
else
begin
Source.new(nil, cmd.opts[:add], cmd.argv.first).add!
rescue SourceError => e
LogHelpers.log_error e.message
end
end
elsif cmd.opts.has_key?(:update)
Base.sources.update!
elsif cmd.opts.has_key?(:list)
Base.sources.list!
end
}
handle('console', "Start an interactive (irb-based) babushka session").run {
exec "irb -r'#{Path.lib / 'babushka'}' --simple-prompt"
}
handle('edit', "Load the file containing the specified dep in $EDITOR").run {|cmd|
if cmd.argv.length != 1
LogHelpers.log_error "'edit' requires a single argument."
else
Base.sources.find_or_suggest(cmd.argv.first) {|dep|
if dep.load_path.nil?
LogHelpers.log_error "Can't edit '#{dep.name}', since it wasn't loaded from a file."
else
file, line = dep.context.source_location
editor_var = ENV['BABUSHKA_EDITOR'] || ENV['VISUAL'] || ENV['EDITOR'] || ShellHelpers.which('subl') || ShellHelpers.which('mate') || ShellHelpers.which('vim') || ShellHelpers.which('vi')
case editor_var
when /^subl/
exec "subl -n '#{file}':#{line}"
when /^mate/
exec "mate -l#{line} '#{file}'"
when /^vim?/, /^nano/, /^pico/, /^emacs/
exec "#{editor_var} +#{line} '#{file}'"
else
exec "#{editor_var} '#{file}'"
end
end
}
end
}
end
end
| 39.28 | 198 | 0.570672 |
f763178c211f2ffe47be02ed9749b2bb84c1b86a | 3,984 | require 'spec_helper'
describe 'collectd::plugin::write_graphite', :type => :class do
let :facts do
{
:osfamily => 'Debian',
:concat_basedir => tmpfilename('collectd-write_graphite'),
:id => 'root',
:kernel => 'Linux',
:path => '/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin',
:collectd_version => '5.0'
}
end
context 'single carbon writer' do
let :params do
{
:carbons => { 'graphite' => {} },
}
end
it 'Will create /etc/collectd.d/conf.d/write_graphite-config.conf' do
should contain_concat__fragment('collectd_plugin_write_graphite_conf_header').with({
:content => /<Plugin write_graphite>/,
:target => '/etc/collectd/conf.d/write_graphite-config.conf',
:order => '00'
})
end
it 'Will create /etc/collectd.d/conf.d/write_graphite-config' do
should contain_concat__fragment('collectd_plugin_write_graphite_conf_footer').with({
:content => /<\/Plugin>/,
:target => '/etc/collectd/conf.d/write_graphite-config.conf',
:order => '99'
})
end
it 'includes carbon configuration' do
should contain_concat__fragment('collectd_plugin_write_graphite_conf_graphite_tcp_2003').with({
:content => /<Carbon>/,
:target => '/etc/collectd/conf.d/write_graphite-config.conf',
})
should contain_concat__fragment('collectd_plugin_write_graphite_conf_graphite_tcp_2003').with({
:content => /Host "localhost"/,
})
should contain_concat__fragment('collectd_plugin_write_graphite_conf_graphite_tcp_2003').with({
:content => /Port "2003"/,
})
end
end
context 'multiple carbon writers, collectd <= 5.2' do
let :params do
{
:carbons => {
'graphite_one' => {'graphitehost' => '192.168.1.1', 'graphiteport' => 2004},
'graphite_two' => {'graphitehost' => '192.168.1.2', 'graphiteport' => 2005},
},
}
end
it 'includes graphite_one configuration' do
should contain_concat__fragment('collectd_plugin_write_graphite_conf_graphite_one_tcp_2004').with({
:content => /<Carbon>/,
:target => '/etc/collectd/conf.d/write_graphite-config.conf',
})
should contain_concat__fragment('collectd_plugin_write_graphite_conf_graphite_one_tcp_2004').with({
:content => /Host "192.168.1.1"/,
})
should contain_concat__fragment('collectd_plugin_write_graphite_conf_graphite_one_tcp_2004').with({
:content => /Port "2004"/,
})
end
it 'includes graphite_two configuration' do
should contain_concat__fragment('collectd_plugin_write_graphite_conf_graphite_two_tcp_2005').with({
:content => /<Carbon>/,
:target => '/etc/collectd/conf.d/write_graphite-config.conf',
})
should contain_concat__fragment('collectd_plugin_write_graphite_conf_graphite_two_tcp_2005').with({
:content => /Host "192.168.1.2"/,
})
should contain_concat__fragment('collectd_plugin_write_graphite_conf_graphite_two_tcp_2005').with({
:content => /Port "2005"/,
})
end
end
context 'collectd >= 5.3' do
let :facts do
{
:osfamily => 'Debian',
:concat_basedir => tmpfilename('collectd-write_graphite'),
:id => 'root',
:kernel => 'Linux',
:path => '/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin',
:collectd_version => '5.3'
}
end
let :params do
{
:carbons => { 'graphite' => {} },
}
end
it 'includes <Node "name"> syntax' do
should contain_concat__fragment('collectd_plugin_write_graphite_conf_graphite_tcp_2003').with({
:content => /<Node "graphite">/,
:target => '/etc/collectd/conf.d/write_graphite-config.conf',
})
end
end
end
| 32.655738 | 105 | 0.612952 |
01a2b44ac5a0f3263a137eae0ac5253ad63fddf1 | 156 | require 'test_helper'
module AttachIt
class AttachedFileTest < ActiveSupport::TestCase
# test "the truth" do
# assert true
# end
end
end
| 15.6 | 50 | 0.685897 |
38b84e0f01238a94dc92bc7406d94d687adc22c9 | 2,252 | require 'spec_helper'
describe 'ensure_not_symlink_target' do
let(:msg) { 'symlink target specified in ensure attr' }
context 'with fix disabled' do
context 'file resource creating a symlink with seperate target attr' do
let(:code) do
<<-END
file { 'foo':
ensure => link,
target => '/foo/bar',
}
END
end
it 'should not detect any problems' do
expect(problems).to have(0).problems
end
end
context 'file resource creating a symlink with target specified in ensure' do
let(:code) do
<<-END
file { 'foo':
ensure => '/foo/bar',
}
END
end
it 'should only detect a single problem' do
expect(problems).to have(1).problem
end
it 'should create a warning' do
expect(problems).to contain_warning(msg).on_line(2).in_column(23)
end
end
end
context 'with fix enabled' do
before do
PuppetLint.configuration.fix = true
end
after do
PuppetLint.configuration.fix = false
end
context 'file resource creating a symlink with seperate target attr' do
let(:code) do
<<-END
file { 'foo':
ensure => link,
target => '/foo/bar',
}
END
end
it 'should not detect any problems' do
expect(problems).to have(0).problems
end
it 'should not modify the manifest' do
expect(manifest).to eq(code)
end
end
context 'file resource creating a symlink with target specified in ensure' do
let(:code) do
<<-END
file { 'foo':
ensure => '/foo/bar',
}
END
end
let(:fixed) do
<<-END
file { 'foo':
ensure => symlink,
target => '/foo/bar',
}
END
end
it 'should only detect a single problem' do
expect(problems).to have(1).problem
end
it 'should fix the problem' do
expect(problems).to contain_fixed(msg).on_line(2).in_column(23)
end
it 'should create a new target param' do
expect(manifest).to eq(fixed)
end
end
end
end
| 22.29703 | 81 | 0.546625 |
87e33a45bd2f8d3090a86fdfed17f60e51dfe24a | 244 | module Tassadar
module MPQ
module CryptBuf
def self.[](index)
crypt_buf[index]
end
def self.crypt_buf
@@crypt_buff ||= crypt_buf!
end
def self.crypt_buf!
end
end
end
end
| 14.352941 | 35 | 0.540984 |
e2861ba8aa360eb690d2bb22ed0d7db21cb720c1 | 1,456 | module SpreeBlog
module Archive
# Public: The main class to be accessed when creating a HTML post archive.
class List
# Gets/Sets the list of years for this archive object.
attr_accessor :years
# Gets/Sets the list of _{Archiveable archiveable}_ objects for this Archive List.
attr_accessor :archiveables
# Create a new {List List} object
#
# archiveables - A collection of _archiveable_ objects. An archiveable is any
# object that responds to published_at.
#
def initialize(archiveables)
@archiveables = archiveables
ensure_archiveables_are_valid
@years = create_years_from_archiveables
end
# The name of the partial to load for a {List List} object
#
# Returns a String
def to_partial_path
self.class.name.underscore
end
private
def ensure_archiveables_are_valid
return true unless archiveables.detect { |a| !a.respond_to?(:published_at) }
raise SpreeBlog::Archive::ArchiveError,
"Invalid archive object detected. Please make sure your archiveable objects \
respond to :published_at."
end
def create_years_from_archiveables
years_with_archiveables.map { |year, archs| Year.new(year, archs) }
end
def years_with_archiveables
archiveables.chunk { |a| a.published_at.year }
end
end
end
end
| 27.471698 | 88 | 0.659341 |
ace05a53fddcbb5c777d408528dec33cfb23014f | 88 | class FoodAccessPoint < ApplicationRecord
has_many :schedules
belongs_to :user
end
| 14.666667 | 41 | 0.806818 |
26337dfd9015381609e730138cf515979d586c2d | 2,605 | # Copyright 2011-2020, The Trustees of Indiana University and Northwestern
# University. Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
#
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
# --- END LICENSE_HEADER BLOCK ---
module Avalon
module Batch
class Package
include Enumerable
extend Forwardable
attr_reader :manifest, :collection
def_delegators :@manifest, :each, :dir
def self.locate(root, collection)
Avalon::Batch::Manifest.locate(root).collect { |f| self.new(f, collection) }
end
def initialize(manifest, collection)
@manifest = Avalon::Batch::Manifest.load(manifest, self)
@collection = collection
end
def title
File.basename(@manifest.file)
end
def user
@user ||=
User.where(Devise.authentication_keys.first => @manifest.email).first ||
User.where(username: @manifest.email).first ||
User.where(email: @manifest.email).first
@user
end
def file_list
@manifest.collect { |entry| entry.files }.flatten.collect { |f| @manifest.path_to(f[:file]) }
end
def complete?
file_list.all? { |f| FileLocator.new(f).exist? }
end
def each_entry
@manifest.each_with_index do |entry, index|
files = entry.files.dup
files.each { |file| file[:file] = @manifest.path_to(file[:file]) }
yield(entry.fields, files, entry.opts, entry, index)
end
end
def processing?
@manifest.processing?
end
def processed?
@manifest.processed?
end
def valid?
@manifest.each { |entry| entry.valid? }
@manifest.all? { |entry| entry.errors.count == 0 }
end
def process!
@manifest.start!
begin
media_objects = @manifest.entries.collect { |entry| entry.process! }
@manifest.commit!
rescue Exception
@manifest.error!
raise
end
media_objects
end
def errors
Hash[@manifest.collect { |entry| [entry.row,entry.errors] }]
end
end
end
end
| 28.315217 | 101 | 0.624184 |
799bdb8d4245f30bd7822cea1e31adf7306ebfbf | 102 | class AddTrigram < ActiveRecord::Migration[6.1]
def change
enable_extension "pg_trgm"
end
end
| 17 | 47 | 0.745098 |
1c0dbed4fdcdf57af4521317594f696c9f3615f0 | 266 | class FontTelex < Formula
head "https://github.com/google/fonts/raw/master/ofl/telex/Telex-Regular.ttf"
desc "Telex"
homepage "https://www.google.com/fonts/specimen/Telex"
def install
(share/"fonts").install "Telex-Regular.ttf"
end
test do
end
end
| 24.181818 | 79 | 0.718045 |
034f4ab2eaaa75addb0638f2d3e090dbaa3057f7 | 715 | # FIX ERROR: uninitialized constant ApplicationCable::ActionCable
require 'rails_helper'
module Bobot
RSpec.describe CommanderJob, type: :job do
subject(:job) { described_class.perform_later(payload) }
let :payload do
{
'sender' => {
'id' => '2'
},
'recipient' => {
'id' => '3'
},
'timestamp' => 145_776_419_762_7,
'message' => {
'mid' => 'mid.1457764197618:41d102a3e1ae206a38',
'seq' => 73,
'text' => 'Hello, bot!'
}
}
end
it 'queues the job' do
expect { job }.to have_enqueued_job(described_class)
.with(payload)
.on_queue("default")
end
end
end
| 22.34375 | 65 | 0.534266 |
611d080a26f5f2c9e55c84d36f524ea7a05ee3fa | 203 | class CreateIngredientLists < ActiveRecord::Migration[6.1]
def change
create_table :ingredient_lists do |t|
t.integer :quantity
t.boolean :checked
t.timestamps
end
end
end
| 18.454545 | 58 | 0.689655 |
ff24ac27bbdaed0e96902db0cbb713d1235f9af8 | 1,426 | # -*- encoding: utf-8 -*-
# stub: bunny 2.12.0 ruby lib
Gem::Specification.new do |s|
s.name = "bunny".freeze
s.version = "2.12.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib".freeze]
s.authors = ["Chris Duncan".freeze, "Eric Lindvall".freeze, "Jakub Stastny aka botanicus".freeze, "Michael S. Klishin".freeze, "Stefan Kaes".freeze]
s.date = "2018-09-22"
s.description = "Easy to use, feature complete Ruby client for RabbitMQ 3.3 and later versions.".freeze
s.email = ["[email protected]".freeze]
s.extra_rdoc_files = ["README.md".freeze]
s.files = ["README.md".freeze]
s.homepage = "http://rubybunny.info".freeze
s.licenses = ["MIT".freeze]
s.required_ruby_version = Gem::Requirement.new(">= 2.2".freeze)
s.rubygems_version = "2.7.7".freeze
s.summary = "Popular easy to use Ruby client for RabbitMQ".freeze
s.installed_by_version = "2.7.7" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<amq-protocol>.freeze, [">= 2.3.0", "~> 2.3"])
else
s.add_dependency(%q<amq-protocol>.freeze, [">= 2.3.0", "~> 2.3"])
end
else
s.add_dependency(%q<amq-protocol>.freeze, [">= 2.3.0", "~> 2.3"])
end
end
| 39.611111 | 150 | 0.672511 |
1dfc84664e8c5b46e2ddbe1108db99da27b49844 | 824 | module UserFindables
module ClassMethods
##############################
# Finds the User by the slug #
##############################
def find_by_slug(slug)
self.all.find {|s| s.slug == slug}
end
#################################################
# Test to see if the input email already exists #
#################################################
def find_by_email(record)
self.where('lower(email) = ?', record.downcase).first
end
####################################################
# Test to see if the input username already exists #
####################################################
def find_by_username(record)
self.where('lower(username) = ?', record.downcase).first
end
end # class methods module
end # user finables module
| 25.75 | 62 | 0.427184 |
7a707e41afb3237501eaa216e431920ded3dded0 | 20,371 | require 'spec_helper'
require 'msf/core/payload_generator'
describe Msf::PayloadGenerator do
include_context 'Msf::Simple::Framework#modules loading'
let(:lhost) { "192.168.172.1"}
let(:lport) { "8443" }
let(:datastore) { { "LHOST" => lhost, "LPORT" => lport } }
let(:add_code) { false }
let(:arch) { "x86" }
let(:badchars) { "\x20\x0D\x0A" }
let(:encoder_reference_name) {
# use encoder_module to ensure it is loaded prior to passing to generator
encoder_module.refname
}
let(:format) { "raw" }
let(:iterations) { 1 }
let(:keep) { false }
let(:nops) { 0 }
let(:payload_reference_name) {
# use payload_module to ensure it is loaded prior to passing to generator
payload_module.refname
}
let(:platform) { "Windows" }
let(:space) { 1073741824 }
let(:stdin) { nil }
let(:template) { File.join(Msf::Config.data_directory, "templates", "template_x86_windows.exe") }
let(:generator_opts) {
{
add_code: add_code,
arch: arch,
badchars: badchars,
encoder: encoder_reference_name,
datastore: datastore,
format: format,
framework: framework,
iterations: iterations,
keep: keep,
nops: nops,
payload: payload_reference_name,
platform: platform,
space: space,
stdin: stdin,
template: template
}
}
let(:payload_module) {
load_and_create_module(
ancestor_reference_names: %w{
stagers/windows/reverse_tcp
stages/windows/meterpreter
},
module_type: 'payload',
reference_name: 'windows/meterpreter/reverse_tcp'
)
}
let(:shellcode) { "\x50\x51\x58\x59" }
let(:encoder_module) {
load_and_create_module(
module_type: 'encoder',
reference_name: 'x86/shikata_ga_nai'
)
}
let(:var_name) { 'buf' }
subject(:payload_generator) {
described_class.new(generator_opts)
}
it { should respond_to :add_code }
it { should respond_to :arch }
it { should respond_to :badchars }
it { should respond_to :cli }
it { should respond_to :encoder }
it { should respond_to :datastore }
it { should respond_to :format }
it { should respond_to :framework }
it { should respond_to :iterations }
it { should respond_to :keep }
it { should respond_to :nops }
it { should respond_to :payload }
it { should respond_to :platform }
it { should respond_to :space }
it { should respond_to :stdin }
it { should respond_to :template }
context 'when creating a new generator' do
subject(:new_payload_generator) { -> { described_class.new(generator_opts) } }
context 'when not given a framework instance' do
let(:generator_opts) {
{
add_code: add_code,
arch: arch,
badchars: badchars,
encoder: encoder_reference_name,
datastore: datastore,
format: format,
iterations: iterations,
keep: keep,
nops: nops,
payload: payload_reference_name,
platform: platform,
space: space,
stdin: stdin,
template: template
}
}
it { should raise_error(KeyError, "key not found: :framework") }
end
context 'when not given a payload' do
let(:payload_reference_name) { nil }
it { should raise_error(ArgumentError, "Invalid Payload Selected") }
end
context 'when given an invalid payload' do
let(:payload_reference_name) { "beos/meterpreter/reverse_gopher" }
it { should raise_error(ArgumentError, "Invalid Payload Selected") }
end
context 'when given a payload through stdin' do
let(:payload_reference_name) { "stdin" }
it { should_not raise_error }
end
context 'when given an invalid format' do
let(:format) { "foobar" }
it { should raise_error(ArgumentError, "Invalid Format Selected") }
end
context 'when given any valid transform format' do
let(:format) { ::Msf::Simple::Buffer.transform_formats.sample }
it { should_not raise_error }
end
context 'when given any valid executable format' do
let(:format) { ::Msf::Util::EXE.to_executable_fmt_formats.sample }
it { should_not raise_error }
end
end
context 'when not given a platform' do
let(:platform) { '' }
context '#platform_list' do
it 'returns an empty PlatformList' do
expect(payload_generator.platform_list.platforms).to be_empty
end
end
context '#choose_platform' do
it 'chooses the platform list for the module' do
expect(payload_generator.choose_platform(payload_module).platforms).to eq [Msf::Module::Platform::Windows]
end
it 'sets the platform attr to the first platform of the module' do
my_generator = payload_generator
my_generator.choose_platform(payload_module)
expect(my_generator.platform).to eq "Windows"
end
end
end
context 'when given an invalid platform' do
let(:platform) { 'foobar' }
context '#platform_list' do
it 'returns an empty PlatformList' do
expect(payload_generator.platform_list.platforms).to be_empty
end
end
context '#choose_platform' do
it 'chooses the platform list for the module' do
expect(payload_generator.choose_platform(payload_module).platforms).to eq [Msf::Module::Platform::Windows]
end
end
end
context 'when given a valid platform' do
context '#platform_list' do
it 'returns a PlatformList containing the Platform class' do
expect(payload_generator.platform_list.platforms.first).to eq Msf::Module::Platform::Windows
end
end
context '#choose_platform' do
context 'when the chosen platform matches the module' do
it 'returns the PlatformList for the selected platform' do
expect(payload_generator.choose_platform(payload_module).platforms).to eq payload_generator.platform_list.platforms
end
end
context 'when the chosen platform and module do not match' do
let(:platform) { "linux" }
it 'returns an empty PlatformList' do
expect(payload_generator.choose_platform(payload_module).platforms).to be_empty
end
end
end
end
context '#choose_arch' do
context 'when no arch is selected' do
let(:arch) { '' }
it 'returns the first arch of the module' do
expect(payload_generator.choose_arch(payload_module)).to eq "x86"
end
it 'sets the arch to match the module' do
my_generator = payload_generator
my_generator.choose_arch(payload_module)
expect(my_generator.arch).to eq "x86"
end
end
context 'when the arch matches the module' do
it 'returns the selected arch' do
expect(payload_generator.choose_arch(payload_module)).to eq arch
end
end
context 'when the arch does not match the module' do
let(:arch) { "mipsle" }
it "returns nil" do
expect(payload_generator.choose_arch(payload_module)).to be_nil
end
end
end
context '#generate_raw_payload' do
context 'when passing a payload through stdin' do
let(:stdin) { "\x90\x90\x90"}
let(:payload_reference_name) { "stdin" }
context 'when no arch has been selected' do
let(:arch) { '' }
it 'raises an IncompatibleArch error' do
expect{payload_generator.generate_raw_payload}.to raise_error(Msf::IncompatibleArch, "You must select an arch for a custom payload")
end
end
context 'when no platform has been selected' do
let(:platform) { '' }
it 'raises an IncompatiblePlatform error' do
expect{payload_generator.generate_raw_payload}.to raise_error(Msf::IncompatiblePlatform, "You must select a platform for a custom payload")
end
end
it 'returns the payload from stdin' do
expect(payload_generator.generate_raw_payload).to eq stdin
end
end
context 'when selecting a metasploit payload' do
context 'when the platform is incompatible with the payload' do
let(:platform) { "linux" }
it 'raises an IncompatiblePlatform error' do
expect{payload_generator.generate_raw_payload}.to raise_error(Msf::IncompatiblePlatform, "The selected platform is incompatible with the payload")
end
end
context 'when the arch is incompatible with the payload' do
let(:arch) { "mipsle" }
it 'raises an IncompatibleArch error' do
expect{payload_generator.generate_raw_payload}.to raise_error(Msf::IncompatibleArch, "The selected arch is incompatible with the payload")
end
end
context 'when one or more datastore options are missing' do
let(:datastore) { {} }
it 'should raise an error' do
expect{payload_generator.generate_raw_payload}.to raise_error(Msf::OptionValidateError)
end
end
it 'returns the raw bytes of the payload' do
expect(payload_generator.generate_raw_payload).to be_present
end
end
end
context '#add_shellcode' do
context 'when add_code is empty' do
it 'returns the original shellcode' do
expect(payload_generator.add_shellcode(shellcode)).to eq shellcode
end
end
context 'when add_code points to a valid file' do
let(:add_code) { File.join(FILE_FIXTURES_PATH, "nop_shellcode.bin")}
context 'but platform is not Windows' do
let(:platform) { "Linux" }
it 'returns the original shellcode' do
expect(payload_generator.add_shellcode(shellcode)).to eq shellcode
end
end
context 'but arch is not x86' do
let(:arch) { "x86_64" }
it 'returns the original shellcode' do
expect(payload_generator.add_shellcode(shellcode)).to eq shellcode
end
end
it 'returns modified shellcode' do
skip "This is a bad test and needs to be refactored"
# The exact length is variable due to random nops inserted into the routine
# It looks like it should always be > 300
# Can't do precise output matching due to this same issue
expect(payload_generator.add_shellcode(shellcode).length).to be > 300
end
end
context 'when add_code points to an invalid file' do
let(:add_code) { "gurfjhfdjhfdsjhfsdvfverf444" }
it 'raises an error' do
expect{payload_generator.add_shellcode(shellcode)}.to raise_error(Errno::ENOENT)
end
end
end
context '#prepend_nops' do
context 'when nops are set to 0' do
let(:nops) { 0 }
before(:each) do
load_and_create_module(
module_type: 'nop',
reference_name: 'x86/opty2'
)
end
it 'returns the unmodified shellcode' do
expect(payload_generator.prepend_nops(shellcode)).to eq shellcode
end
end
context 'when nops are set to more than 0' do
let(:badchars) { '' }
let(:nops) { 20 }
context 'when payload is x86' do
before(:each) do
load_and_create_module(
module_type: 'nop',
reference_name: 'x86/opty2'
)
end
it 'returns shellcode of the correct size' do
final = payload_generator.prepend_nops(shellcode)
expect(final.length).to eq 24
end
it 'puts the nops in front of the original shellcode' do
expect(payload_generator.prepend_nops(shellcode)[20,24]).to eq shellcode
end
end
context 'when payload is Windows x64' do
let(:arch) { 'x86_64' }
let(:payload_module) {
load_and_create_module(
ancestor_reference_names: %w{
stagers/windows/x64/reverse_tcp
stages/windows/x64/meterpreter
},
module_type: 'payload',
reference_name: 'windows/x64/meterpreter/reverse_tcp'
)
}
before(:each) do
load_and_create_module(
module_type: 'nop',
reference_name: 'x64/simple'
)
end
it 'returns shellcode of the correct size' do
final = payload_generator.prepend_nops(shellcode)
expect(final.length).to eq(nops + shellcode.length)
end
it 'puts the nops in front of the original shellcode' do
final = payload_generator.prepend_nops(shellcode)
expect(final[nops, nops + shellcode.length]).to eq shellcode
end
end
end
end
context '#get_encoders' do
let(:encoder_names) { ["Polymorphic XOR Additive Feedback Encoder", "Alpha2 Alphanumeric Mixedcase Encoder" ] }
context 'when an encoder is selected' do
it 'returns an array' do
expect(payload_generator.get_encoders).to be_kind_of Array
end
it 'returns an array with only one element' do
expect(payload_generator.get_encoders.count).to eq 1
end
it 'returns the correct encoder in the array' do
expect(payload_generator.get_encoders.first.name).to eq encoder_names[0]
end
end
context 'when multiple encoders are selected' do
#
# lets
#
let(:encoder_reference_name) {
encoder_reference_names.join(',')
}
let(:encoder_reference_names) {
%w{
x86/shikata_ga_nai
x86/alpha_mixed
}
}
#
# Callbacks
#
before(:each) do
encoder_reference_names.each do |reference_name|
load_and_create_module(
module_type: 'encoder',
reference_name: reference_name
)
end
end
it 'returns an array of the right size' do
expect(payload_generator.get_encoders.count).to eq 2
end
it 'returns each of the selected encoders in the array' do
payload_generator.get_encoders.each do |msf_encoder|
expect(encoder_names).to include msf_encoder.name
end
end
it 'returns the encoders in order of rank high to low' do
expect(payload_generator.get_encoders[0].rank).to be > payload_generator.get_encoders[1].rank
end
end
context 'when no encoder is selected but badchars are present' do
let(:encoder_reference_name) { '' }
it 'returns an array of all encoders with a compatible arch' do
payload_generator.get_encoders.each do |my_encoder|
expect(encoder_module.arch).to include arch
end
end
end
context 'when no encoder or badchars are selected' do
let(:encoder_reference_name) { '' }
let(:badchars) { '' }
it 'returns an empty array' do
expect(payload_generator.get_encoders).to be_empty
end
end
end
context '#run_encoder' do
it 'should call the encoder a number of times equal to the iterations' do
encoder_module.should_receive(:encode).exactly(iterations).times.and_return(shellcode)
payload_generator.run_encoder(encoder_module, shellcode)
end
context 'when the encoder makes a buffer too large' do
let(:space) { 4 }
it 'should raise an error' do
expect{payload_generator.run_encoder(encoder_module, shellcode)}.to raise_error(Msf::EncoderSpaceViolation, "encoder has made a buffer that is too big")
end
end
end
context '#format_payload' do
context 'when format is js_be' do
let(:format) { "js_be"}
context 'and arch is x86' do
it 'should raise an IncompatibleEndianess error' do
expect{payload_generator.format_payload(shellcode)}.to raise_error(Msf::IncompatibleEndianess, "Big endian format selected for a non big endian payload")
end
end
end
context 'when format is a transform format' do
let(:format) { 'c' }
it 'applies the appropriate transform format' do
::Msf::Simple::Buffer.should_receive(:transform).with(shellcode, format, var_name)
payload_generator.format_payload(shellcode)
end
end
context 'when format is an executable format' do
let(:format) { 'exe' }
it 'applies the appropriate executable format' do
::Msf::Util::EXE.should_receive(:to_executable_fmt).with(framework, arch, kind_of(payload_generator.platform_list.class), shellcode, format, payload_generator.exe_options)
payload_generator.format_payload(shellcode)
end
end
end
context '#generate_java_payload' do
context 'when format is war' do
let(:format) { 'war' }
context 'if the payload is a valid java payload' do
let(:payload_module) {
load_and_create_module(
ancestor_reference_names: %w{
stagers/java/reverse_tcp
stages/java/meterpreter
},
module_type: 'payload',
reference_name: 'java/meterpreter/reverse_tcp'
)
}
it 'calls the generate_war on the payload' do
framework.stub_chain(:payloads, :keys).and_return [payload_reference_name]
framework.stub_chain(:payloads, :create).and_return(payload_module)
payload_module.should_receive(:generate_war).and_call_original
payload_generator.generate_java_payload
end
end
it 'raises an InvalidFormat exception' do
expect{payload_generator.generate_java_payload}.to raise_error(Msf::InvalidFormat)
end
end
context 'when format is raw' do
let(:format) { 'raw' }
context 'if the payload responds to generate_jar' do
let(:payload_module) {
load_and_create_module(
ancestor_reference_names: %w{
stagers/java/reverse_tcp
stages/java/meterpreter
},
module_type: 'payload',
reference_name: 'java/meterpreter/reverse_tcp'
)
}
it 'calls the generate_jar on the payload' do
framework.stub_chain(:payloads, :keys).and_return [payload_reference_name]
framework.stub_chain(:payloads, :create).and_return(payload_module)
payload_module.should_receive(:generate_jar).and_call_original
payload_generator.generate_java_payload
end
end
context 'if the payload does not respond to generate_jar' do
let(:payload_module) {
load_and_create_module(
ancestor_reference_names: %w{
singles/java/jsp_shell_reverse_tcp
},
module_type: 'payload',
reference_name: 'java/jsp_shell_reverse_tcp'
)
}
it 'calls #generate' do
framework.stub_chain(:payloads, :keys).and_return [payload_reference_name]
framework.stub_chain(:payloads, :create).and_return(payload_module)
payload_module.should_receive(:generate).and_call_original
payload_generator.generate_java_payload
end
end
end
context 'when format is a non-java format' do
let(:format) { "exe" }
it 'raises an InvalidFormat exception' do
expect{payload_generator.generate_java_payload}.to raise_error(Msf::InvalidFormat)
end
end
end
context '#generate_payload' do
it 'calls each step of the process' do
payload_generator.should_receive(:generate_raw_payload).and_call_original
payload_generator.should_receive(:add_shellcode).and_call_original
payload_generator.should_receive(:encode_payload).and_call_original
payload_generator.should_receive(:prepend_nops).and_call_original
payload_generator.should_receive(:format_payload).and_call_original
payload_generator.generate_payload
end
context 'when the payload is java' do
let(:payload_module) {
load_and_create_module(
ancestor_reference_names: %w{
stagers/java/reverse_tcp
stages/java/meterpreter
},
module_type: 'payload',
reference_name: 'java/meterpreter/reverse_tcp'
)
}
it 'calls generate_java_payload' do
payload_generator.should_receive(:generate_java_payload).and_call_original
payload_generator.generate_payload
end
end
end
end
| 30.771903 | 179 | 0.648029 |
bb8f556dfb63e7191010f88e300ff5a2d15cf09d | 37 | module Mapas
VERSION = "0.0.1"
end
| 9.25 | 19 | 0.648649 |
87e5750ce6eb5dae81e9bd3b77ad1454947eecdb | 17,608 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe MergeRequests::MergeService do
let_it_be(:user) { create(:user) }
let_it_be(:user2) { create(:user) }
let(:merge_request) { create(:merge_request, :simple, author: user2, assignees: [user2]) }
let(:project) { merge_request.project }
before do
project.add_maintainer(user)
project.add_developer(user2)
end
describe '#execute' do
let(:service) { described_class.new(project, user, merge_params) }
let(:merge_params) do
{ commit_message: 'Awesome message', sha: merge_request.diff_head_sha }
end
context 'valid params' do
before do
allow(service).to receive(:execute_hooks)
expect(merge_request).to receive(:update_and_mark_in_progress_merge_commit_sha).twice.and_call_original
perform_enqueued_jobs do
service.execute(merge_request)
end
end
it { expect(merge_request).to be_valid }
it { expect(merge_request).to be_merged }
it 'persists merge_commit_sha and nullifies in_progress_merge_commit_sha' do
expect(merge_request.merge_commit_sha).not_to be_nil
expect(merge_request.in_progress_merge_commit_sha).to be_nil
end
it 'does not update squash_commit_sha if it is not a squash' do
expect(merge_request.squash_commit_sha).to be_nil
end
it 'sends email to user2 about merge of new merge_request' do
email = ActionMailer::Base.deliveries.last
expect(email.to.first).to eq(user2.email)
expect(email.subject).to include(merge_request.title)
end
context 'note creation' do
it 'creates resource state event about merge_request merge' do
event = merge_request.resource_state_events.last
expect(event.state).to eq('merged')
end
end
context 'when squashing' do
let(:merge_params) do
{ commit_message: 'Merge commit message',
squash_commit_message: 'Squash commit message',
sha: merge_request.diff_head_sha }
end
let(:merge_request) do
# A merge request with 5 commits
create(:merge_request, :simple,
author: user2,
assignees: [user2],
squash: true,
source_branch: 'improve/awesome',
target_branch: 'fix')
end
it 'merges the merge request with squashed commits' do
expect(merge_request).to be_merged
merge_commit = merge_request.merge_commit
squash_commit = merge_request.merge_commit.parents.last
expect(merge_commit.message).to eq('Merge commit message')
expect(squash_commit.message).to eq("Squash commit message\n")
end
it 'persists squash_commit_sha' do
squash_commit = merge_request.merge_commit.parents.last
expect(merge_request.squash_commit_sha).to eq(squash_commit.id)
end
end
end
context 'when an invalid sha is passed' do
let(:merge_request) do
create(:merge_request, :simple,
author: user2,
assignees: [user2],
squash: true,
source_branch: 'improve/awesome',
target_branch: 'fix')
end
let(:merge_params) do
{ sha: merge_request.commits.second.sha }
end
it 'does not merge the MR' do
service.execute(merge_request)
expect(merge_request).not_to be_merged
expect(merge_request.merge_error).to match(/Branch has been updated/)
end
end
context 'when the `sha` param is missing' do
let(:merge_params) { {} }
it 'returns the error' do
merge_error = 'Branch has been updated since the merge was requested. '\
'Please review the changes.'
expect { service.execute(merge_request) }
.to change { merge_request.merge_error }
.from(nil).to(merge_error)
end
end
context 'closes related issues' do
before do
allow(project).to receive(:default_branch).and_return(merge_request.target_branch)
end
it 'closes GitLab issue tracker issues' do
issue = create :issue, project: project
commit = instance_double('commit', safe_message: "Fixes #{issue.to_reference}", date: Time.current, authored_date: Time.current)
allow(merge_request).to receive(:commits).and_return([commit])
merge_request.cache_merge_request_closes_issues!
service.execute(merge_request)
expect(issue.reload.closed?).to be_truthy
end
context 'with Jira integration' do
include JiraServiceHelper
let(:jira_tracker) { project.create_jira_service }
let(:jira_issue) { ExternalIssue.new('JIRA-123', project) }
let(:commit) { double('commit', safe_message: "Fixes #{jira_issue.to_reference}") }
before do
stub_jira_service_test
project.update!(has_external_issue_tracker: true)
jira_service_settings
stub_jira_urls(jira_issue.id)
allow(merge_request).to receive(:commits).and_return([commit])
end
it 'closes issues on Jira issue tracker' do
jira_issue = ExternalIssue.new('JIRA-123', project)
stub_jira_urls(jira_issue)
commit = double('commit', safe_message: "Fixes #{jira_issue.to_reference}")
allow(merge_request).to receive(:commits).and_return([commit])
expect_any_instance_of(JiraService).to receive(:close_issue).with(merge_request, jira_issue, user).once
service.execute(merge_request)
end
context 'when jira_issue_transition_id is not present' do
before do
allow_any_instance_of(JIRA::Resource::Issue).to receive(:resolution).and_return(nil)
end
it 'does not close issue' do
jira_tracker.update!(jira_issue_transition_id: nil)
expect_any_instance_of(JiraService).not_to receive(:transition_issue)
service.execute(merge_request)
end
end
context 'wrong issue markdown' do
it 'does not close issues on Jira issue tracker' do
jira_issue = ExternalIssue.new('#JIRA-123', project)
stub_jira_urls(jira_issue)
commit = double('commit', safe_message: "Fixes #{jira_issue.to_reference}")
allow(merge_request).to receive(:commits).and_return([commit])
expect_any_instance_of(JiraService).not_to receive(:close_issue)
service.execute(merge_request)
end
end
end
end
context 'closes related todos' do
let(:merge_request) { create(:merge_request, assignees: [user], author: user) }
let(:project) { merge_request.project }
let!(:todo) do
create(:todo, :assigned,
project: project,
author: user,
user: user,
target: merge_request)
end
before do
allow(service).to receive(:execute_hooks)
perform_enqueued_jobs do
service.execute(merge_request)
todo.reload
end
end
it { expect(todo).to be_done }
end
context 'source branch removal' do
context 'when the source branch is protected' do
let(:service) do
described_class.new(project, user, merge_params.merge('should_remove_source_branch' => true))
end
before do
create(:protected_branch, project: project, name: merge_request.source_branch)
end
it 'does not delete the source branch' do
expect(::Branches::DeleteService).not_to receive(:new)
service.execute(merge_request)
end
end
context 'when the source branch is the default branch' do
let(:service) do
described_class.new(project, user, merge_params.merge('should_remove_source_branch' => true))
end
before do
allow(project).to receive(:root_ref?).with(merge_request.source_branch).and_return(true)
end
it 'does not delete the source branch' do
expect(::Branches::DeleteService).not_to receive(:new)
service.execute(merge_request)
end
end
context 'when the source branch can be removed' do
context 'when MR author set the source branch to be removed' do
before do
merge_request.update_attribute(:merge_params, { 'force_remove_source_branch' => '1' })
end
it 'removes the source branch using the author user' do
expect(::MergeRequests::DeleteSourceBranchWorker).to receive(:perform_async).with(merge_request.id, merge_request.source_branch_sha, merge_request.author.id)
service.execute(merge_request)
end
context 'when the merger set the source branch not to be removed' do
let(:service) { described_class.new(project, user, merge_params.merge('should_remove_source_branch' => false)) }
it 'does not delete the source branch' do
expect(::MergeRequests::DeleteSourceBranchWorker).not_to receive(:perform_async)
service.execute(merge_request)
end
end
end
context 'when MR merger set the source branch to be removed' do
let(:service) do
described_class.new(project, user, merge_params.merge('should_remove_source_branch' => true))
end
it 'removes the source branch using the current user' do
expect(::MergeRequests::DeleteSourceBranchWorker).to receive(:perform_async).with(merge_request.id, merge_request.source_branch_sha, user.id)
service.execute(merge_request)
end
end
end
end
context 'error handling' do
before do
allow(Gitlab::AppLogger).to receive(:error)
end
context 'when source is missing' do
it 'logs and saves error' do
allow(merge_request).to receive(:diff_head_sha) { nil }
error_message = 'No source for merge'
service.execute(merge_request)
expect(merge_request.merge_error).to eq(error_message)
expect(Gitlab::AppLogger).to have_received(:error).with(a_string_matching(error_message))
end
end
it 'logs and saves error if there is an exception' do
error_message = 'error message'
allow(service).to receive(:repository).and_raise(error_message)
allow(service).to receive(:execute_hooks)
service.execute(merge_request)
expect(merge_request.merge_error).to eq(described_class::GENERIC_ERROR_MESSAGE)
expect(Gitlab::AppLogger).to have_received(:error).with(a_string_matching(error_message))
end
it 'logs and saves error if user is not authorized' do
unauthorized_user = create(:user)
project.add_reporter(unauthorized_user)
service = described_class.new(project, unauthorized_user)
service.execute(merge_request)
expect(merge_request.merge_error)
.to eq('You are not allowed to merge this merge request')
end
it 'logs and saves error if there is an PreReceiveError exception' do
error_message = 'error message'
allow(service).to receive(:repository).and_raise(Gitlab::Git::PreReceiveError, "GitLab: #{error_message}")
allow(service).to receive(:execute_hooks)
service.execute(merge_request)
expect(merge_request.merge_error).to include('Something went wrong during merge pre-receive hook')
expect(Gitlab::AppLogger).to have_received(:error).with(a_string_matching(error_message))
end
it 'logs and saves error if commit is not created' do
allow_any_instance_of(Repository).to receive(:merge).and_return(false)
allow(service).to receive(:execute_hooks)
service.execute(merge_request)
expect(merge_request).to be_open
expect(merge_request.merge_commit_sha).to be_nil
expect(merge_request.merge_error).to include(described_class::GENERIC_ERROR_MESSAGE)
expect(Gitlab::AppLogger).to have_received(:error).with(a_string_matching(described_class::GENERIC_ERROR_MESSAGE))
end
context 'when squashing is required' do
before do
merge_request.update!(source_branch: 'master', target_branch: 'feature')
merge_request.target_project.project_setting.squash_always!
end
it 'raises an error if squashing is not done' do
error_message = 'requires squashing commits'
service.execute(merge_request)
expect(merge_request).to be_open
expect(merge_request.merge_commit_sha).to be_nil
expect(merge_request.squash_commit_sha).to be_nil
expect(merge_request.merge_error).to include(error_message)
expect(Gitlab::AppLogger).to have_received(:error).with(a_string_matching(error_message))
end
end
context 'when squashing' do
before do
merge_request.update!(source_branch: 'master', target_branch: 'feature')
end
it 'logs and saves error if there is an error when squashing' do
error_message = 'Failed to squash. Should be done manually'
allow_any_instance_of(MergeRequests::SquashService).to receive(:squash!).and_return(nil)
merge_request.update!(squash: true)
service.execute(merge_request)
expect(merge_request).to be_open
expect(merge_request.merge_commit_sha).to be_nil
expect(merge_request.squash_commit_sha).to be_nil
expect(merge_request.merge_error).to include(error_message)
expect(Gitlab::AppLogger).to have_received(:error).with(a_string_matching(error_message))
end
it 'logs and saves error if there is a squash in progress' do
error_message = 'another squash is already in progress'
allow_any_instance_of(MergeRequest).to receive(:squash_in_progress?).and_return(true)
merge_request.update!(squash: true)
service.execute(merge_request)
expect(merge_request).to be_open
expect(merge_request.merge_commit_sha).to be_nil
expect(merge_request.squash_commit_sha).to be_nil
expect(merge_request.merge_error).to include(error_message)
expect(Gitlab::AppLogger).to have_received(:error).with(a_string_matching(error_message))
end
it 'logs and saves error if there is an PreReceiveError exception' do
error_message = 'error message'
allow(service).to receive(:repository).and_raise(Gitlab::Git::PreReceiveError, "GitLab: #{error_message}")
allow(service).to receive(:execute_hooks)
merge_request.update!(squash: true)
service.execute(merge_request)
expect(merge_request).to be_open
expect(merge_request.merge_commit_sha).to be_nil
expect(merge_request.squash_commit_sha).to be_nil
expect(merge_request.merge_error).to include('Something went wrong during merge pre-receive hook')
expect(Gitlab::AppLogger).to have_received(:error).with(a_string_matching(error_message))
end
context 'when fast-forward merge is not allowed' do
before do
allow_any_instance_of(Repository).to receive(:ancestor?).and_return(nil)
end
%w(semi-linear ff).each do |merge_method|
it "logs and saves error if merge is #{merge_method} only" do
merge_method = 'rebase_merge' if merge_method == 'semi-linear'
merge_request.project.update!(merge_method: merge_method)
error_message = 'Only fast-forward merge is allowed for your project. Please update your source branch'
allow(service).to receive(:execute_hooks)
service.execute(merge_request)
expect(merge_request).to be_open
expect(merge_request.merge_commit_sha).to be_nil
expect(merge_request.squash_commit_sha).to be_nil
expect(merge_request.merge_error).to include(error_message)
expect(Gitlab::AppLogger).to have_received(:error).with(a_string_matching(error_message))
end
end
end
end
context 'when not mergeable' do
let!(:error_message) { 'Merge request is not mergeable' }
context 'with failing CI' do
before do
allow(merge_request).to receive(:mergeable_ci_state?) { false }
end
it 'logs and saves error' do
service.execute(merge_request)
expect(Gitlab::AppLogger).to have_received(:error).with(a_string_matching(error_message))
end
end
context 'with unresolved discussions' do
before do
allow(merge_request).to receive(:mergeable_discussions_state?) { false }
end
it 'logs and saves error' do
service.execute(merge_request)
expect(Gitlab::AppLogger).to have_received(:error).with(a_string_matching(error_message))
end
context 'when passing `skip_discussions_check: true` as `options` parameter' do
it 'merges the merge request' do
service.execute(merge_request, skip_discussions_check: true)
expect(merge_request).to be_valid
expect(merge_request).to be_merged
end
end
end
end
end
end
end
| 35.861507 | 169 | 0.655043 |
28f6a44d18268585e366dd8f4d2bbeac5964018c | 140 | class RemoveExpirationFromPassenger < ActiveRecord::Migration[5.1]
def change
remove_column :passengers, :expiration, :date
end
end
| 23.333333 | 66 | 0.778571 |
b903228d721183d0678ab69ac372700397dac73c | 617 | module RuGGby
module Action
# Action invoken when we want to change status
class ChangeStatus < Base
attr_reader :status, :description
def initialize(client, status, description)
@client = client
@block = client.actions[:change_status]
@status = status
@description = description
end
def run!
@client.logger.debug('RuGGby::Action::ChangeStatus')
msg = Packet::Outgoing::ChangeStatus.new(@status, @description)
@client.socket.write(msg)
@block.call(@status, @description) if @block
end
end
end
end
| 19.903226 | 71 | 0.627229 |
3885bf8649714a7aace3dfe199eb1c711c0351a9 | 368 | class ApplicationController < ActionController::Base
protect_from_forgery with: :exception
helper_method :current_user
def current_user
@current_user ||= User.find(session[:user_id]) if session[:user_id]
end
def require_user
redirect_to login_path unless current_user
end
def require_guest
redirect_to home_path if current_user
end
end
| 20.444444 | 71 | 0.774457 |
d5661a6ac0ed4227421369feed8c84c7ed72a57e | 263 | module RSpec
module Proverbs
# In RSpec, notifications are value objects that are passed to formatters
# to provide those formatters with information about a particular event.
Notification = Struct.new(:example, :type, :message, :options)
end
end
| 32.875 | 77 | 0.749049 |
39374a3816e749e3196bafedaa7a496d861ceccf | 3,539 | require_relative '../../spec_helper'
describe "Time#getlocal" do
it "returns a new time which is the local representation of time" do
# Testing with America/Regina here because it doesn't have DST.
with_timezone("CST", -6) do
t = Time.gm(2007, 1, 9, 12, 0, 0)
t.localtime.should == Time.local(2007, 1, 9, 6, 0, 0)
end
end
it "returns a Time with UTC offset specified as an Integer number of seconds" do
t = Time.gm(2007, 1, 9, 12, 0, 0).getlocal(3630)
t.should == Time.new(2007, 1, 9, 13, 0, 30, 3630)
t.utc_offset.should == 3630
end
platform_is_not :windows do
it "returns a new time with the correct utc_offset according to the set timezone" do
t = Time.new(2005, 2, 27, 22, 50, 0, -3600)
t.utc_offset.should == -3600
with_timezone("America/New_York") do
t.getlocal.utc_offset.should == -18000
end
end
end
describe "with an argument that responds to #to_int" do
it "coerces using #to_int" do
o = mock('integer')
o.should_receive(:to_int).and_return(3630)
t = Time.gm(2007, 1, 9, 12, 0, 0).getlocal(o)
t.should == Time.new(2007, 1, 9, 13, 0, 30, 3630)
t.utc_offset.should == 3630
end
end
it "returns a Time with a UTC offset of the specified number of Rational seconds" do
t = Time.gm(2007, 1, 9, 12, 0, 0).getlocal(Rational(7201, 2))
t.should == Time.new(2007, 1, 9, 13, 0, Rational(1, 2), Rational(7201, 2))
t.utc_offset.should eql(Rational(7201, 2))
end
describe "with an argument that responds to #to_r" do
it "coerces using #to_r" do
o = mock_numeric('rational')
o.should_receive(:to_r).and_return(Rational(7201, 2))
t = Time.gm(2007, 1, 9, 12, 0, 0).getlocal(o)
t.should == Time.new(2007, 1, 9, 13, 0, Rational(1, 2), Rational(7201, 2))
t.utc_offset.should eql(Rational(7201, 2))
end
end
it "returns a Time with a UTC offset specified as +HH:MM" do
t = Time.gm(2007, 1, 9, 12, 0, 0).getlocal("+01:00")
t.should == Time.new(2007, 1, 9, 13, 0, 0, 3600)
t.utc_offset.should == 3600
end
it "returns a Time with a UTC offset specified as -HH:MM" do
t = Time.gm(2007, 1, 9, 12, 0, 0).getlocal("-01:00")
t.should == Time.new(2007, 1, 9, 11, 0, 0, -3600)
t.utc_offset.should == -3600
end
describe "with an argument that responds to #to_str" do
it "coerces using #to_str" do
o = mock('string')
o.should_receive(:to_str).and_return("+01:00")
t = Time.gm(2007, 1, 9, 12, 0, 0).getlocal(o)
t.should == Time.new(2007, 1, 9, 13, 0, 0, 3600)
t.utc_offset.should == 3600
end
end
it "raises ArgumentError if the String argument is not of the form (+|-)HH:MM" do
t = Time.now
lambda { t.getlocal("3600") }.should raise_error(ArgumentError)
end
it "raises ArgumentError if the String argument is not in an ASCII-compatible encoding" do
t = Time.now
lambda { t.getlocal("-01:00".encode("UTF-16LE")) }.should raise_error(ArgumentError)
end
it "raises ArgumentError if the argument represents a value less than or equal to -86400 seconds" do
t = Time.new
t.getlocal(-86400 + 1).utc_offset.should == (-86400 + 1)
lambda { t.getlocal(-86400) }.should raise_error(ArgumentError)
end
it "raises ArgumentError if the argument represents a value greater than or equal to 86400 seconds" do
t = Time.new
t.getlocal(86400 - 1).utc_offset.should == (86400 - 1)
lambda { t.getlocal(86400) }.should raise_error(ArgumentError)
end
end
| 35.747475 | 104 | 0.642837 |
6a47db989aefea9dc0dff8143ec8b534a7d947b2 | 38,471 | #--
# Copyright 2006 by Chad Fowler, Rich Kilmer, Jim Weirich and others.
# All rights reserved.
# See LICENSE.txt for permissions.
#++
require 'rubygems/version'
require 'rubygems/requirement'
require 'rubygems/platform'
require 'rubygems/maven_gemify'
# :stopdoc:
class Date; end # for ruby_code if date.rb wasn't required
# :startdoc:
##
# The Specification class contains the metadata for a Gem. Typically
# defined in a .gemspec file or a Rakefile, and looks like this:
#
# spec = Gem::Specification.new do |s|
# s.name = 'example'
# s.version = '1.0'
# s.summary = 'Example gem specification'
# ...
# end
#
# For a great way to package gems, use Hoe.
class Gem::Specification
##
# Allows deinstallation of gems with legacy platforms.
attr_accessor :original_platform # :nodoc:
##
# The the version number of a specification that does not specify one
# (i.e. RubyGems 0.7 or earlier).
NONEXISTENT_SPECIFICATION_VERSION = -1
##
# The specification version applied to any new Specification instances
# created. This should be bumped whenever something in the spec format
# changes.
#--
# When updating this number, be sure to also update #to_ruby.
#
# NOTE RubyGems < 1.2 cannot load specification versions > 2.
CURRENT_SPECIFICATION_VERSION = 3
##
# An informal list of changes to the specification. The highest-valued
# key should be equal to the CURRENT_SPECIFICATION_VERSION.
SPECIFICATION_VERSION_HISTORY = {
-1 => ['(RubyGems versions up to and including 0.7 did not have versioned specifications)'],
1 => [
'Deprecated "test_suite_file" in favor of the new, but equivalent, "test_files"',
'"test_file=x" is a shortcut for "test_files=[x]"'
],
2 => [
'Added "required_rubygems_version"',
'Now forward-compatible with future versions',
],
3 => [
'Added Fixnum validation to the specification_version'
]
}
# :stopdoc:
MARSHAL_FIELDS = { -1 => 16, 1 => 16, 2 => 16, 3 => 17 }
now = Time.at(Time.now.to_i)
TODAY = now - ((now.to_i + now.gmt_offset) % 86400)
# :startdoc:
##
# Optional block used to gather newly defined instances.
@@gather = nil
##
# List of attribute names: [:name, :version, ...]
@@required_attributes = []
##
# List of _all_ attributes and default values:
#
# [[:name, nil],
# [:bindir, 'bin'],
# ...]
@@attributes = []
@@nil_attributes = []
@@non_nil_attributes = [:@original_platform]
##
# List of array attributes
@@array_attributes = []
##
# Map of attribute names to default values.
@@default_value = {}
##
# Names of all specification attributes
def self.attribute_names
@@attributes.map { |name, default| name }
end
##
# Default values for specification attributes
def self.attribute_defaults
@@attributes.dup
end
##
# The default value for specification attribute +name+
def self.default_value(name)
@@default_value[name]
end
##
# Required specification attributes
def self.required_attributes
@@required_attributes.dup
end
##
# Is +name+ a required attribute?
def self.required_attribute?(name)
@@required_attributes.include? name.to_sym
end
##
# Specification attributes that are arrays (appendable and so-forth)
def self.array_attributes
@@array_attributes.dup
end
##
# Specifies the +name+ and +default+ for a specification attribute, and
# creates a reader and writer method like Module#attr_accessor.
#
# The reader method returns the default if the value hasn't been set.
def self.attribute(name, default=nil)
ivar_name = "@#{name}".intern
if default.nil? then
@@nil_attributes << ivar_name
else
@@non_nil_attributes << [ivar_name, default]
end
@@attributes << [name, default]
@@default_value[name] = default
attr_accessor(name)
end
##
# Same as :attribute, but ensures that values assigned to the attribute
# are array values by applying :to_a to the value.
def self.array_attribute(name)
@@non_nil_attributes << ["@#{name}".intern, []]
@@array_attributes << name
@@attributes << [name, []]
@@default_value[name] = []
code = %{
def #{name}
@#{name} ||= []
end
def #{name}=(value)
@#{name} = Array(value)
end
}
module_eval code, __FILE__, __LINE__ - 9
end
##
# Same as attribute above, but also records this attribute as mandatory.
def self.required_attribute(*args)
@@required_attributes << args.first
attribute(*args)
end
##
# Sometimes we don't want the world to use a setter method for a
# particular attribute.
#
# +read_only+ makes it private so we can still use it internally.
def self.read_only(*names)
names.each do |name|
private "#{name}="
end
end
# Shortcut for creating several attributes at once (each with a default
# value of +nil+).
def self.attributes(*args)
args.each do |arg|
attribute(arg, nil)
end
end
##
# Some attributes require special behaviour when they are accessed. This
# allows for that.
def self.overwrite_accessor(name, &block)
remove_method name
define_method(name, &block)
end
##
# Defines a _singular_ version of an existing _plural_ attribute (i.e. one
# whose value is expected to be an array). This means just creating a
# helper method that takes a single value and appends it to the array.
# These are created for convenience, so that in a spec, one can write
#
# s.require_path = 'mylib'
#
# instead of:
#
# s.require_paths = ['mylib']
#
# That above convenience is available courtesy of:
#
# attribute_alias_singular :require_path, :require_paths
def self.attribute_alias_singular(singular, plural)
define_method("#{singular}=") { |val|
send("#{plural}=", [val])
}
define_method("#{singular}") {
val = send("#{plural}")
val.nil? ? nil : val.first
}
end
##
# Dump only crucial instance variables.
#--
# MAINTAIN ORDER!
def _dump(limit)
Marshal.dump [
@rubygems_version,
@specification_version,
@name,
@version,
(Time === @date ? @date : (require 'time'; Time.parse(@date.to_s))),
@summary,
@required_ruby_version,
@required_rubygems_version,
@original_platform,
@dependencies,
@rubyforge_project,
@email,
@authors,
@description,
@homepage,
@has_rdoc,
@new_platform,
@licenses
]
end
##
# Load custom marshal format, re-initializing defaults as needed
def self._load(str)
array = Marshal.load str
spec = Gem::Specification.new
spec.instance_variable_set :@specification_version, array[1]
current_version = CURRENT_SPECIFICATION_VERSION
field_count = if spec.specification_version > current_version then
spec.instance_variable_set :@specification_version,
current_version
MARSHAL_FIELDS[current_version]
else
MARSHAL_FIELDS[spec.specification_version]
end
if array.size < field_count then
raise TypeError, "invalid Gem::Specification format #{array.inspect}"
end
spec.instance_variable_set :@rubygems_version, array[0]
# spec version
spec.instance_variable_set :@name, array[2]
spec.instance_variable_set :@version, array[3]
spec.instance_variable_set :@date, array[4]
spec.instance_variable_set :@summary, array[5]
spec.instance_variable_set :@required_ruby_version, array[6]
spec.instance_variable_set :@required_rubygems_version, array[7]
spec.instance_variable_set :@original_platform, array[8]
spec.instance_variable_set :@dependencies, array[9]
spec.instance_variable_set :@rubyforge_project, array[10]
spec.instance_variable_set :@email, array[11]
spec.instance_variable_set :@authors, array[12]
spec.instance_variable_set :@description, array[13]
spec.instance_variable_set :@homepage, array[14]
spec.instance_variable_set :@has_rdoc, array[15]
spec.instance_variable_set :@new_platform, array[16]
spec.instance_variable_set :@platform, array[16].to_s
spec.instance_variable_set :@license, array[17]
spec.instance_variable_set :@loaded, false
spec
end
##
# List of depedencies that will automatically be activated at runtime.
def runtime_dependencies
dependencies.select { |d| d.type == :runtime || d.type == nil }
end
##
# List of dependencies that are used for development
def development_dependencies
dependencies.select { |d| d.type == :development }
end
def test_suite_file # :nodoc:
warn 'test_suite_file deprecated, use test_files'
test_files.first
end
def test_suite_file=(val) # :nodoc:
warn 'test_suite_file= deprecated, use test_files='
@test_files = [] unless defined? @test_files
@test_files << val
end
##
# true when this gemspec has been loaded from a specifications directory.
# This attribute is not persisted.
attr_accessor :loaded
##
# Path this gemspec was loaded from. This attribute is not persisted.
attr_accessor :loaded_from
##
# Returns an array with bindir attached to each executable in the
# executables list
def add_bindir(executables)
return nil if executables.nil?
if @bindir then
Array(executables).map { |e| File.join(@bindir, e) }
else
executables
end
rescue
return nil
end
##
# Files in the Gem under one of the require_paths
def lib_files
@files.select do |file|
require_paths.any? do |path|
file.index(path) == 0
end
end
end
##
# True if this gem was loaded from disk
alias :loaded? :loaded
##
# True if this gem has files in test_files
def has_unit_tests?
not test_files.empty?
end
# :stopdoc:
alias has_test_suite? has_unit_tests?
# :startdoc:
##
# Specification constructor. Assigns the default values to the
# attributes and yields itself for further
# initialization. Optionally takes +name+ and +version+.
def initialize name = nil, version = nil
@new_platform = nil
assign_defaults
@loaded = false
@loaded_from = nil
self.name = name if name
self.version = version if version
yield self if block_given?
@@gather.call(self) if @@gather
end
##
# Duplicates array_attributes from +other_spec+ so state isn't shared.
def initialize_copy(other_spec)
other_ivars = other_spec.instance_variables
other_ivars = other_ivars.map { |ivar| ivar.intern } if # for 1.9
other_ivars.any? { |ivar| String === ivar }
self.class.array_attributes.each do |name|
name = :"@#{name}"
next unless other_ivars.include? name
instance_variable_set name, other_spec.instance_variable_get(name).dup
end
end
##
# Each attribute has a default value (possibly nil). Here, we initialize
# all attributes to their default value. This is done through the
# accessor methods, so special behaviours will be honored. Furthermore,
# we take a _copy_ of the default so each specification instance has its
# own empty arrays, etc.
def assign_defaults
@@nil_attributes.each do |name|
instance_variable_set name, nil
end
@@non_nil_attributes.each do |name, default|
value = case default
when Time, Numeric, Symbol, true, false, nil then default
else default.dup
end
instance_variable_set name, value
end
# HACK
instance_variable_set :@new_platform, Gem::Platform::RUBY
end
##
# Special loader for YAML files. When a Specification object is loaded
# from a YAML file, it bypasses the normal Ruby object initialization
# routine (#initialize). This method makes up for that and deals with
# gems of different ages.
#
# 'input' can be anything that YAML.load() accepts: String or IO.
def self.from_yaml(input)
input = normalize_yaml_input input
spec = YAML.load input
if spec && spec.class == FalseClass then
raise Gem::EndOfYAMLException
end
unless Gem::Specification === spec then
raise Gem::Exception, "YAML data doesn't evaluate to gem specification"
end
unless (spec.instance_variables.include? '@specification_version' or
spec.instance_variables.include? :@specification_version) and
spec.instance_variable_get :@specification_version
spec.instance_variable_set :@specification_version,
NONEXISTENT_SPECIFICATION_VERSION
end
spec
end
##
# Loads ruby format gemspec from +filename+
def self.load(filename)
gemspec = nil
raise "NESTED Specification.load calls not allowed!" if @@gather
@@gather = proc { |gs| gemspec = gs }
data = File.read filename
eval data, nil, filename
gemspec
ensure
@@gather = nil
end
##
# Make sure the YAML specification is properly formatted with dashes
def self.normalize_yaml_input(input)
result = input.respond_to?(:read) ? input.read : input
result = "--- " + result unless result =~ /^--- /
result
end
##
# Sets the rubygems_version to the current RubyGems version
def mark_version
@rubygems_version = Gem::VERSION
end
##
# Ignore unknown attributes while loading
def method_missing(sym, *a, &b) # :nodoc:
if @specification_version > CURRENT_SPECIFICATION_VERSION and
sym.to_s =~ /=$/ then
warn "ignoring #{sym} loading #{full_name}" if $DEBUG
else
super
end
end
##
# Adds a development dependency named +gem+ with +requirements+ to this
# Gem. For example:
#
# spec.add_development_dependency 'jabber4r', '> 0.1', '<= 0.5'
#
# Development dependencies aren't installed by default and aren't
# activated when a gem is required.
def add_development_dependency(gem, *requirements)
add_dependency_with_type(gem, :development, *requirements)
end
##
# Adds a runtime dependency named +gem+ with +requirements+ to this Gem.
# For example:
#
# spec.add_runtime_dependency 'jabber4r', '> 0.1', '<= 0.5'
def add_runtime_dependency(gem, *requirements)
add_dependency_with_type(gem, :runtime, *requirements)
end
##
# Adds a runtime dependency
alias add_dependency add_runtime_dependency
##
# Returns the full name (name-version) of this Gem. Platform information
# is included (name-version-platform) if it is specified and not the
# default Ruby platform.
def full_name
if platform == Gem::Platform::RUBY or platform.nil? then
"#{@name}-#{@version}"
else
"#{@name}-#{@version}-#{platform}"
end
end
##
# Returns the full name (name-version) of this gemspec using the original
# platform. For use with legacy gems.
def original_name # :nodoc:
if platform == Gem::Platform::RUBY or platform.nil? then
"#{@name}-#{@version}"
else
"#{@name}-#{@version}-#{@original_platform}"
end
end
##
# The full path to the gem (install path + full name).
def full_gem_path
path = File.join installation_path, 'gems', full_name
return path if File.directory? path
File.join installation_path, 'gems', original_name
end
##
# The default (generated) file name of the gem. See also #spec_name.
#
# spec.file_name # => "example-1.0.gem"
def file_name
full_name + '.gem'
end
##
# The directory that this gem was installed into.
def installation_path
unless @loaded_from then
raise Gem::Exception, "spec #{full_name} is not from an installed gem"
end
File.expand_path File.dirname(File.dirname(@loaded_from))
end
##
# Checks if this specification meets the requirement of +dependency+.
def satisfies_requirement?(dependency)
return @name == dependency.name &&
dependency.requirement.satisfied_by?(@version)
end
##
# Returns an object you can use to sort specifications in #sort_by.
def sort_obj
[@name, @version, @new_platform == Gem::Platform::RUBY ? -1 : 1]
end
##
# The default name of the gemspec. See also #file_name
#
# spec.spec_name # => "example-1.0.gemspec"
def spec_name
full_name + '.gemspec'
end
def <=>(other) # :nodoc:
sort_obj <=> other.sort_obj
end
##
# Tests specs for equality (across all attributes).
def ==(other) # :nodoc:
self.class === other && same_attributes?(other)
end
alias eql? == # :nodoc:
##
# True if this gem has the same attributes as +other+.
def same_attributes?(other)
@@attributes.each do |name, default|
return false unless self.send(name) == other.send(name)
end
true
end
private :same_attributes?
def hash # :nodoc:
@@attributes.inject(0) { |hash_code, (name, default_value)|
n = self.send(name).hash
hash_code + n
}
end
def encode_with coder # :nodoc:
mark_version
attributes = @@attributes.map { |name,| name.to_s }.sort
attributes = attributes - %w[name version platform]
coder.add 'name', @name
coder.add 'version', @version
platform = case @original_platform
when nil, '' then
'ruby'
when String then
@original_platform
else
@original_platform.to_s
end
coder.add 'platform', platform
attributes.each do |name|
coder.add name, instance_variable_get("@#{name}")
end
end
def to_yaml(opts = {}) # :nodoc:
return super if YAML.const_defined?(:ENGINE) && !YAML::ENGINE.syck?
yaml = YAML.quick_emit object_id, opts do |out|
out.map taguri, to_yaml_style do |map|
encode_with map
end
end
end
def init_with coder # :nodoc:
yaml_initialize coder.tag, coder.map
end
def yaml_initialize(tag, vals) # :nodoc:
vals.each do |ivar, val|
instance_variable_set "@#{ivar}", val
end
@original_platform = @platform # for backwards compatibility
self.platform = Gem::Platform.new @platform
end
##
# Returns a Ruby code representation of this specification, such that it
# can be eval'ed and reconstruct the same specification later. Attributes
# that still have their default values are omitted.
def to_ruby
mark_version
result = []
result << "# -*- encoding: utf-8 -*-"
result << nil
result << "Gem::Specification.new do |s|"
result << " s.name = #{ruby_code name}"
result << " s.version = #{ruby_code version}"
unless platform.nil? or platform == Gem::Platform::RUBY then
result << " s.platform = #{ruby_code original_platform}"
end
result << ""
result << " s.required_rubygems_version = #{ruby_code required_rubygems_version} if s.respond_to? :required_rubygems_version="
handled = [
:dependencies,
:name,
:platform,
:required_rubygems_version,
:specification_version,
:version,
]
attributes = @@attributes.sort_by { |attr_name,| attr_name.to_s }
attributes.each do |attr_name, default|
next if handled.include? attr_name
current_value = self.send(attr_name)
if current_value != default or
self.class.required_attribute? attr_name then
result << " s.#{attr_name} = #{ruby_code current_value}"
end
end
result << nil
result << " if s.respond_to? :specification_version then"
result << " current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION"
result << " s.specification_version = #{specification_version}"
result << nil
result << " if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then"
unless dependencies.empty? then
dependencies.each do |dep|
version_reqs_param = dep.requirements_list.inspect
dep.instance_variable_set :@type, :runtime if dep.type.nil? # HACK
result << " s.add_#{dep.type}_dependency(%q<#{dep.name}>, #{version_reqs_param})"
end
end
result << " else"
unless dependencies.empty? then
dependencies.each do |dep|
version_reqs_param = dep.requirements_list.inspect
result << " s.add_dependency(%q<#{dep.name}>, #{version_reqs_param})"
end
end
result << ' end'
result << " else"
dependencies.each do |dep|
version_reqs_param = dep.requirements_list.inspect
result << " s.add_dependency(%q<#{dep.name}>, #{version_reqs_param})"
end
result << " end"
result << "end"
result << nil
result.join "\n"
end
##
# Checks that the specification contains all required fields, and does a
# very basic sanity check.
#
# Raises InvalidSpecificationException if the spec does not pass the
# checks..
def validate
extend Gem::UserInteraction
normalize
if rubygems_version != Gem::VERSION then
raise Gem::InvalidSpecificationException,
"expected RubyGems version #{Gem::VERSION}, was #{rubygems_version}"
end
@@required_attributes.each do |symbol|
unless self.send symbol then
raise Gem::InvalidSpecificationException,
"missing value for attribute #{symbol}"
end
end
unless String === name then
raise Gem::InvalidSpecificationException,
"invalid value for attribute name: \"#{name.inspect}\""
end
if require_paths.empty? then
raise Gem::InvalidSpecificationException,
'specification must have at least one require_path'
end
@files.delete_if do |file| File.directory? file end
@test_files.delete_if do |file| File.directory? file end
@executables.delete_if do |file|
File.directory? File.join(bindir, file)
end
@extra_rdoc_files.delete_if do |file| File.directory? file end
@extensions.delete_if do |file| File.directory? file end
non_files = files.select do |file|
!File.file? file
end
unless non_files.empty? then
non_files = non_files.map { |file| file.inspect }
raise Gem::InvalidSpecificationException,
"[#{non_files.join ", "}] are not files"
end
unless specification_version.is_a?(Fixnum)
raise Gem::InvalidSpecificationException,
'specification_version must be a Fixnum (did you mean version?)'
end
case platform
when Gem::Platform, Gem::Platform::RUBY then # ok
else
raise Gem::InvalidSpecificationException,
"invalid platform #{platform.inspect}, see Gem::Platform"
end
unless Array === authors and
authors.all? { |author| String === author } then
raise Gem::InvalidSpecificationException,
'authors must be Array of Strings'
end
licenses.each { |license|
if license.length > 64
raise Gem::InvalidSpecificationException,
"each license must be 64 characters or less"
end
}
# reject FIXME and TODO
unless authors.grep(/FIXME|TODO/).empty? then
raise Gem::InvalidSpecificationException,
'"FIXME" or "TODO" is not an author'
end
unless Array(email).grep(/FIXME|TODO/).empty? then
raise Gem::InvalidSpecificationException,
'"FIXME" or "TODO" is not an email address'
end
if description =~ /FIXME|TODO/ then
raise Gem::InvalidSpecificationException,
'"FIXME" or "TODO" is not a description'
end
if summary =~ /FIXME|TODO/ then
raise Gem::InvalidSpecificationException,
'"FIXME" or "TODO" is not a summary'
end
if homepage and not homepage.empty? and
homepage !~ /\A[a-z][a-z\d+.-]*:/i then
raise Gem::InvalidSpecificationException,
"\"#{homepage}\" is not a URI"
end
# Warnings
%w[author description email homepage rubyforge_project summary].each do |attribute|
value = self.send attribute
alert_warning "no #{attribute} specified" if value.nil? or value.empty?
end
if summary and not summary.empty? and description == summary then
alert_warning 'description and summary are identical'
end
alert_warning "deprecated autorequire specified" if autorequire
executables.each do |executable|
executable_path = File.join bindir, executable
shebang = File.read(executable_path, 2) == '#!'
alert_warning "#{executable_path} is missing #! line" unless shebang
end
true
end
##
# Normalize the list of files so that:
# * All file lists have redundancies removed.
# * Files referenced in the extra_rdoc_files are included in the package
# file list.
def normalize
if defined?(@extra_rdoc_files) and @extra_rdoc_files then
@extra_rdoc_files.uniq!
@files ||= []
@files.concat(@extra_rdoc_files)
end
@files.uniq! if @files
end
##
# Return a list of all gems that have a dependency on this gemspec. The
# list is structured with entries that conform to:
#
# [depending_gem, dependency, [list_of_gems_that_satisfy_dependency]]
def dependent_gems
out = []
Gem.source_index.each do |name,gem|
gem.dependencies.each do |dep|
if self.satisfies_requirement?(dep) then
sats = []
find_all_satisfiers(dep) do |sat|
sats << sat
end
out << [gem, dep, sats]
end
end
end
out
end
def to_s # :nodoc:
"#<Gem::Specification name=#{@name} version=#{@version}>"
end
def pretty_print(q) # :nodoc:
q.group 2, 'Gem::Specification.new do |s|', 'end' do
q.breakable
attributes = @@attributes.sort_by { |attr_name,| attr_name.to_s }
attributes.each do |attr_name, default|
current_value = self.send attr_name
if current_value != default or
self.class.required_attribute? attr_name then
q.text "s.#{attr_name} = "
if attr_name == :date then
current_value = current_value.utc
q.text "Time.utc(#{current_value.year}, #{current_value.month}, #{current_value.day})"
else
q.pp current_value
end
q.breakable
end
end
end
end
##
# Adds a dependency on gem +dependency+ with type +type+ that requires
# +requirements+. Valid types are currently <tt>:runtime</tt> and
# <tt>:development</tt>.
def add_dependency_with_type(dependency, type, *requirements)
requirements = if requirements.empty? then
Gem::Requirement.default
else
requirements.flatten
end
unless dependency.respond_to?(:name) &&
dependency.respond_to?(:version_requirements)
dependency = Gem::Dependency.new(dependency, requirements, type)
end
dependencies << dependency
end
private :add_dependency_with_type
##
# Finds all gems that satisfy +dep+
def find_all_satisfiers(dep)
Gem.source_index.each do |_, gem|
yield gem if gem.satisfies_requirement? dep
end
end
private :find_all_satisfiers
##
# Return a string containing a Ruby code representation of the given
# object.
def ruby_code(obj)
case obj
when String then '%q{' + obj + '}'
when Array then obj.inspect
when Gem::Version then obj.to_s.inspect
when Date then '%q{' + obj.strftime('%Y-%m-%d') + '}'
when Time then '%q{' + obj.strftime('%Y-%m-%d') + '}'
when Numeric then obj.inspect
when true, false, nil then obj.inspect
when Gem::Platform then "Gem::Platform.new(#{obj.to_a.inspect})"
when Gem::Requirement then "Gem::Requirement.new(#{obj.to_s.inspect})"
else raise Gem::Exception, "ruby_code case not handled: #{obj.class}"
end
end
private :ruby_code
# :section: Required gemspec attributes
##
# :attr_accessor: rubygems_version
#
# The version of RubyGems used to create this gem.
#
# Do not set this, it is set automatically when the gem is packaged.
required_attribute :rubygems_version, Gem::VERSION
##
# :attr_accessor: specification_version
#
# The Gem::Specification version of this gemspec.
#
# Do not set this, it is set automatically when the gem is packaged.
required_attribute :specification_version, CURRENT_SPECIFICATION_VERSION
##
# :attr_accessor: name
#
# This gem's name
required_attribute :name
##
# :attr_accessor: version
#
# This gem's version
required_attribute :version
##
# :attr_accessor: date
#
# The date this gem was created
#
# Do not set this, it is set automatically when the gem is packaged.
required_attribute :date, TODAY
##
# :attr_accessor: summary
#
# A short summary of this gem's description. Displayed in `gem list -d`.
#
# The description should be more detailed than the summary. For example,
# you might wish to copy the entire README into the description.
#
# As of RubyGems 1.3.2 newlines are no longer stripped.
required_attribute :summary
##
# :attr_accessor: require_paths
#
# Paths in the gem to add to $LOAD_PATH when this gem is activated.
#
# The default 'lib' is typically sufficient.
required_attribute :require_paths, ['lib']
# :section: Optional gemspec attributes
##
# :attr_accessor: email
#
# A contact email for this gem
#
# If you are providing multiple authors and multiple emails they should be
# in the same order such that:
#
# Hash[*spec.authors.zip(spec.emails).flatten]
#
# Gives a hash of author name to email address.
attribute :email
##
# :attr_accessor: homepage
#
# The URL of this gem's home page
attribute :homepage
##
# :attr_accessor: rubyforge_project
#
# The rubyforge project this gem lives under. i.e. RubyGems'
# rubyforge_project is "rubygems".
attribute :rubyforge_project
##
# :attr_accessor: description
#
# A long description of this gem
attribute :description
##
# :attr_accessor: autorequire
#
# Autorequire was used by old RubyGems to automatically require a file.
# It no longer is supported.
attribute :autorequire
##
# :attr_accessor: default_executable
#
# The default executable for this gem.
#
# This is not used.
attribute :default_executable
##
# :attr_accessor: bindir
#
# The path in the gem for executable scripts
attribute :bindir, 'bin'
##
# :attr_accessor: has_rdoc
#
# Deprecated and ignored, defaults to true.
#
# Formerly used to indicate this gem was RDoc-capable.
attribute :has_rdoc, true
##
# True if this gem supports RDoc
alias :has_rdoc? :has_rdoc
##
# :attr_accessor: required_ruby_version
#
# The version of ruby required by this gem
attribute :required_ruby_version, Gem::Requirement.default
##
# :attr_accessor: required_rubygems_version
#
# The RubyGems version required by this gem
attribute :required_rubygems_version, Gem::Requirement.default
##
# :attr_accessor: platform
#
# The platform this gem runs on. See Gem::Platform for details.
#
# Setting this to any value other than Gem::Platform::RUBY or
# Gem::Platform::CURRENT is probably wrong.
attribute :platform, Gem::Platform::RUBY
##
# :attr_accessor: signing_key
#
# The key used to sign this gem. See Gem::Security for details.
attribute :signing_key, nil
##
# :attr_accessor: cert_chain
#
# The certificate chain used to sign this gem. See Gem::Security for
# details.
attribute :cert_chain, []
##
# :attr_accessor: post_install_message
#
# A message that gets displayed after the gem is installed
attribute :post_install_message, nil
##
# :attr_accessor: authors
#
# The list of author names who wrote this gem.
#
# If you are providing multiple authors and multiple emails they should be
# in the same order such that:
#
# Hash[*spec.authors.zip(spec.emails).flatten]
#
# Gives a hash of author name to email address.
array_attribute :authors
##
# :attr_accessor: licenses
#
# The license(s) for the library. Each license must be a short name, no
# more than 64 characters.
array_attribute :licenses
##
# :attr_accessor: files
#
# Files included in this gem. You cannot append to this accessor, you must
# assign to it.
#
# Only add files you can require to this list, not directories, etc.
#
# Directories are automatically stripped from this list when building a gem,
# other non-files cause an error.
array_attribute :files
##
# :attr_accessor: test_files
#
# Test files included in this gem. You cannot append to this accessor, you
# must assign to it.
array_attribute :test_files
##
# :attr_accessor: rdoc_options
#
# An ARGV style array of options to RDoc
array_attribute :rdoc_options
##
# :attr_accessor: extra_rdoc_files
#
# Extra files to add to RDoc such as README or doc/examples.txt
array_attribute :extra_rdoc_files
##
# :attr_accessor: executables
#
# Executables included in the gem.
array_attribute :executables
##
# :attr_accessor: extensions
#
# Extensions to build when installing the gem. See
# Gem::Installer#build_extensions for valid values.
array_attribute :extensions
##
# :attr_accessor: requirements
#
# An array or things required by this gem. Not used by anything
# presently.
array_attribute :requirements
##
# :attr_reader: dependencies
#
# A list of Gem::Dependency objects this gem depends on.
#
# Use #add_dependency or #add_development_dependency to add dependencies to
# a gem.
array_attribute :dependencies
read_only :dependencies
# :section: Aliased gemspec attributes
##
# Singular accessor for #executables
attribute_alias_singular :executable, :executables
##
# Singular accessor for #authors
attribute_alias_singular :author, :authors
##
# Singular accessor for #licenses
attribute_alias_singular :license, :licenses
##
# Singular accessor for #require_paths
attribute_alias_singular :require_path, :require_paths
##
# Singular accessor for #test_files
attribute_alias_singular :test_file, :test_files
##
# has_rdoc is now ignored
overwrite_accessor :has_rdoc do
true
end
##
# has_rdoc is now ignored
overwrite_accessor :has_rdoc= do |value|
@has_rdoc = true
end
overwrite_accessor :version= do |version|
@version = Gem::Version.create(version)
self.required_rubygems_version = '> 1.3.1' if @version.prerelease?
return @version
end
overwrite_accessor :platform do
@new_platform
end
overwrite_accessor :platform= do |platform|
if @original_platform.nil? or
@original_platform == Gem::Platform::RUBY then
@original_platform = platform
end
case platform
when Gem::Platform::CURRENT then
@new_platform = Gem::Platform.local
@original_platform = @new_platform.to_s
when Gem::Platform then
@new_platform = platform
# legacy constants
when nil, Gem::Platform::RUBY then
@new_platform = Gem::Platform::RUBY
when 'mswin32' then # was Gem::Platform::WIN32
@new_platform = Gem::Platform.new 'x86-mswin32'
when 'i586-linux' then # was Gem::Platform::LINUX_586
@new_platform = Gem::Platform.new 'x86-linux'
when 'powerpc-darwin' then # was Gem::Platform::DARWIN
@new_platform = Gem::Platform.new 'ppc-darwin'
else
@new_platform = Gem::Platform.new platform
end
@platform = @new_platform.to_s
@new_platform
end
overwrite_accessor :required_ruby_version= do |value|
@required_ruby_version = Gem::Requirement.create(value)
end
overwrite_accessor :required_rubygems_version= do |value|
@required_rubygems_version = Gem::Requirement.create(value)
end
overwrite_accessor :date= do |date|
# We want to end up with a Time object with one-day resolution.
# This is the cleanest, most-readable, faster-than-using-Date
# way to do it.
case date
when String then
@date = if /\A(\d{4})-(\d{2})-(\d{2})\Z/ =~ date then
Time.local($1.to_i, $2.to_i, $3.to_i)
else
require 'time'
Time.parse date
end
when Time then
@date = Time.local(date.year, date.month, date.day)
when Date then
@date = Time.local(date.year, date.month, date.day)
else
@date = TODAY
end
end
overwrite_accessor :date do
self.date = nil if @date.nil? # HACK Sets the default value for date
@date
end
overwrite_accessor :summary= do |str|
@summary = if str then
str.strip.
gsub(/(\w-)\n[ \t]*(\w)/, '\1\2').
gsub(/\n[ \t]*/, " ")
end
end
overwrite_accessor :description= do |str|
@description = str.to_s
end
overwrite_accessor :default_executable do
begin
if defined?(@default_executable) and @default_executable
result = @default_executable
elsif @executables and @executables.size == 1
result = Array(@executables).first
else
result = nil
end
result
rescue
nil
end
end
overwrite_accessor :test_files do
# Handle the possibility that we have @test_suite_file but not
# @test_files. This will happen when an old gem is loaded via
# YAML.
if defined? @test_suite_file then
@test_files = [@test_suite_file].flatten
@test_suite_file = nil
end
if defined?(@test_files) and @test_files then
@test_files
else
@test_files = []
end
end
overwrite_accessor :files do
# DO NOT CHANGE TO ||= ! This is not a normal accessor. (yes, it sucks)
@files = [@files,
@test_files,
add_bindir(@executables),
@extra_rdoc_files,
@extensions,
].flatten.uniq.compact
end
end
| 25.579122 | 131 | 0.653505 |
38a69a203d797d071bf10f1a030e3116f6399528 | 973 | # frozen_string_literal: true
require "se_providers_api_client/dsl"
module SeProvidersApiClient
module DSL
module Quotes
# GET /api/providers/v1/quotes
# Get quotes. See https://demo.scienceexchange.com/api-docs/providers#/quotes/getProvidersV1Quotes
# @return [Array].
def get_quotes
Resources::Quote.parse(request(:get, "quotes/", nil, nil))
end
# GET /api/providers/v1/quotes/{id}
# Get an quote. See https://demo.scienceexchange.com/api-docs/providers#/quotes/getProvidersV1QuotesId
# @param [Hash] options The options to get a quote with.
# @option options [String, Fixnum] :id A quote ID.
# @raise [ArgumentError] If the :id is blank
# @return [SeProvidersApiClient::Resources::Quote, nil].
def get_quote(options = {})
id = options[:id] || raise(ArgumentError, "Must supply :id")
Resources::Quote.parse(request(:get, "quotes/#{id}"))
end
end
end
end
| 34.75 | 108 | 0.663926 |
21beb6f6d6f83ec2938f58e171a4be09ad86725b | 83 | module Bintray
module Error
class NotFound < StandardError
end
end
end
| 11.857143 | 34 | 0.710843 |
1c41e4ef8726382bc0b469cf16d372308545b9f5 | 163 | class UpdateTagsChangeSessionToUser < ActiveRecord::Migration
def change
remove_column :tags, :session_id
add_column :tags, :user_id, :integer
end
end
| 23.285714 | 61 | 0.766871 |
6a9d840831dc11faf092c288c11d930c5f3bde52 | 288 | # frozen_string_literal: true
module Types
class IssueStateEventEnum < BaseEnum
graphql_name 'IssueStateEvent'
description 'Values for issue state events'
value 'REOPEN', 'Reopens the issue', value: 'reopen'
value 'CLOSE', 'Closes the issue', value: 'close'
end
end
| 24 | 56 | 0.722222 |
61cd0e0209f3336de9f9a3ade0f6397db088fe4e | 163 | require 'spec_helper'
describe Tag, type: :model do
it {should validate_presence_of(:name)}
it {should have_many(:stubs)}
it {should have_many(:items)}
end
| 20.375 | 41 | 0.730061 |
917a0951ef82a3c9e362549775bcb5763b8173b9 | 1,106 | require "parallel_tests/gherkin/runner"
module ParallelTests
module Cucumber
class Runner < ParallelTests::Gherkin::Runner
SCENARIOS_RESULTS_BOUNDARY_REGEX = /^(Failing|Flaky) Scenarios:$/
SCENARIO_REGEX = /^cucumber features\/.+:\d+/
class << self
def name
'cucumber'
end
def line_is_result?(line)
super || line =~ SCENARIO_REGEX || line =~ SCENARIOS_RESULTS_BOUNDARY_REGEX
end
def summarize_results(results)
output = []
scenario_groups = results.slice_before(SCENARIOS_RESULTS_BOUNDARY_REGEX).group_by(&:first)
scenario_groups.each do |header, group|
scenarios = group.flatten.grep(SCENARIO_REGEX)
if scenarios.any?
output << ([header] + scenarios).join("\n")
end
end
output << super
output.join("\n\n")
end
def command_with_seed(cmd, seed)
clean = cmd.sub(/\s--order\s+random(:\d+)?\b/, '')
"#{clean} --order random:#{seed}"
end
end
end
end
end
| 26.333333 | 100 | 0.579566 |
d549781906aed4670ece5bc830f4bc2b81b45736 | 134 | class AddTimeStampToReferredRecords < ActiveRecord::Migration[4.2]
def change
add_timestamps(:spree_referred_records)
end
end
| 22.333333 | 66 | 0.80597 |
bb47b33905103059f84f293887d37324bd1b7e29 | 642 | require 'simplecov'
module SimpleCov::Configuration
def clean_filters
@filters = []
end
end
SimpleCov.configure do
clean_filters
load_adapter 'test_frameworks'
end
ENV['COVERAGE'] && SimpleCov.start do
add_filter '/.rvm/'
end
$LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', 'lib'))
$LOAD_PATH.unshift(File.dirname(__FILE__))
require 'bundler'
Bundler.require(:test)
require 'rspec'
require 'servlao'
# Requires supporting files with custom matchers and macros, etc,
# in ./support/ and its subdirectories.
Dir["#{File.dirname(__FILE__)}/support/**/*.rb"].each { |f| require f }
RSpec.configure do |_config|
end
| 20.709677 | 71 | 0.732087 |
7a90494e9ec4202bb4a8d875dea862d8c1032d54 | 44 | module Kvm
module Instructions
end
end
| 7.333333 | 21 | 0.75 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.