hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
26fca9db697b76aa93cd274e7a12e2e335f92ffb | 634 | module SpecHelpers
def event_hash(options = {})
{
address: options.fetch(:address, ethereum_address),
blockHash: options.fetch(:block_hash, ethereum_txid),
blockNumber: options.fetch(:block_number, rand(1_000_000_000)),
data: options.fetch(:data, SecureRandom.hex),
logIndex: options.fetch(:log_index, rand(1_000)),
subscription: options.fetch(:subscription, SecureRandom.uuid),
topics: options.fetch(:topics, [ethereum_txid]),
transactionHash: options.fetch(:transaction_hash, ethereum_txid),
transactionIndex: options.fetch(:transaction_index, rand(100)),
}
end
end
| 39.625 | 71 | 0.711356 |
38ee75f19304d8d22cbaf8a13fd67aacef184eb1 | 24,629 | # -*- encoding: utf-8; frozen_string_literal: true -*-
#
#--
# This file is part of HexaPDF.
#
# HexaPDF - A Versatile PDF Creation and Manipulation Library For Ruby
# Copyright (C) 2014-2019 Thomas Leitner
#
# HexaPDF is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License version 3 as
# published by the Free Software Foundation with the addition of the
# following permission added to Section 15 as permitted in Section 7(a):
# FOR ANY PART OF THE COVERED WORK IN WHICH THE COPYRIGHT IS OWNED BY
# THOMAS LEITNER, THOMAS LEITNER DISCLAIMS THE WARRANTY OF NON
# INFRINGEMENT OF THIRD PARTY RIGHTS.
#
# HexaPDF is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public
# License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with HexaPDF. If not, see <http://www.gnu.org/licenses/>.
#
# The interactive user interfaces in modified source and object code
# versions of HexaPDF must display Appropriate Legal Notices, as required
# under Section 5 of the GNU Affero General Public License version 3.
#
# In accordance with Section 7(b) of the GNU Affero General Public
# License, a covered work must retain the producer line in every PDF that
# is created or manipulated using HexaPDF.
#
# If the GNU Affero General Public License doesn't fit your need,
# commercial licenses are available at <https://gettalong.at/hexapdf/>.
#++
require 'stringio'
require 'hexapdf/error'
require 'hexapdf/content'
require 'hexapdf/configuration'
require 'hexapdf/reference'
require 'hexapdf/object'
require 'hexapdf/stream'
require 'hexapdf/revisions'
require 'hexapdf/type'
require 'hexapdf/task'
require 'hexapdf/encryption'
require 'hexapdf/writer'
require 'hexapdf/importer'
require 'hexapdf/image_loader'
require 'hexapdf/font_loader'
require 'hexapdf/layout'
# == HexaPDF API Documentation
#
# Here are some pointers to more in depth information:
#
# * For information about the command line application, see the HexaPDF::CLI module.
# * HexaPDF::Document provides information about how to work with a PDF file.
# * HexaPDF::Content::Canvas provides the canvas API for drawing/writing on a page or form XObject
module HexaPDF
autoload(:Composer, 'hexapdf/composer')
# Represents one PDF document.
#
# A PDF document consists of (indirect) objects, so the main job of this class is to provide
# methods for working with these objects. However, since a PDF document may also be
# incrementally updated and can therefore contain one or more revisions, there are also methods
# to work with these revisions.
#
# Note: This class provides everything to work on PDF documents on a low-level basis. This means
# that there are no convenience methods for higher PDF functionality whatsoever.
class Document
autoload(:Pages, 'hexapdf/document/pages')
autoload(:Fonts, 'hexapdf/document/fonts')
autoload(:Images, 'hexapdf/document/images')
autoload(:Files, 'hexapdf/document/files')
autoload(:Form, 'hexapdf/document/form')
# :call-seq:
# Document.open(filename, **docargs) -> doc
# Document.open(filename, **docargs) {|doc| block} -> obj
#
# Creates a new PDF Document object for the given file.
#
# Depending on whether a block is provided, the functionality is different:
#
# * If no block is provided, the whole file is instantly read into memory and the PDF Document
# created for it is returned.
#
# * If a block is provided, the file is opened and a PDF Document is created for it. The
# created document is passed as an argument to the block and when the block returns the
# associated file object is closed. The value of the block will be returned.
#
# The block version is useful, for example, when you are dealing with a large file and you
# only need a small portion of it.
#
# The provided keyword arguments (except +io+) are passed on unchanged to Document.new.
def self.open(filename, **kwargs)
if block_given?
File.open(filename, 'rb') do |file|
yield(new(**kwargs, io: file))
end
else
new(**kwargs, io: StringIO.new(File.binread(filename)))
end
end
# The configuration for the document.
attr_reader :config
# The revisions of the document.
attr_reader :revisions
# Creates a new PDF document, either an empty one or one read from the provided +io+.
#
# When an IO object is provided and it contains an encrypted PDF file, it is automatically
# decrypted behind the scenes. The +decryption_opts+ argument has to be set appropriately in
# this case.
#
# Options:
#
# io:: If an IO object is provided, then this document can read PDF objects from this IO
# object, otherwise it can only contain created PDF objects.
#
# decryption_opts:: A hash with options for decrypting the PDF objects loaded from the IO.
#
# config:: A hash with configuration options that is deep-merged into the default
# configuration (see DefaultDocumentConfiguration), meaning that direct sub-hashes
# are merged instead of overwritten.
def initialize(io: nil, decryption_opts: {}, config: {})
@config = Configuration.with_defaults(config)
@version = '1.2'
@revisions = Revisions.from_io(self, io)
@security_handler = if encrypted? && @config['document.auto_decrypt']
Encryption::SecurityHandler.set_up_decryption(self, decryption_opts)
else
nil
end
@listeners = {}
@cache = Hash.new {|h, k| h[k] = {} }
end
# :call-seq:
# doc.object(ref) -> obj or nil
# doc.object(oid) -> obj or nil
#
# Returns the current version of the indirect object for the given exact reference or for the
# given object number.
#
# For references to unknown objects, +nil+ is returned but free objects are represented by a
# PDF Null object, not by +nil+!
#
# See: PDF1.7 s7.3.9
def object(ref)
i = @revisions.size - 1
while i >= 0
return @revisions[i].object(ref) if @revisions[i].object?(ref)
i -= 1
end
nil
end
# Dereferences the given object.
#
# Return the object itself if it is not a reference, or the indirect object specified by the
# reference.
def deref(obj)
obj.kind_of?(Reference) ? object(obj) : obj
end
# :call-seq:
# doc.object?(ref) -> true or false
# doc.object?(oid) -> true or false
#
# Returns +true+ if the the document contains an indirect object for the given exact reference
# or for the given object number.
#
# Even though this method might return +true+ for some references, #object may return +nil+
# because this method takes *all* revisions into account. Also see the discussion on #each for
# more information.
def object?(ref)
@revisions.any? {|rev| rev.object?(ref) }
end
# :call-seq:
# doc.add(obj, revision: :current, **wrap_opts) -> indirect_object
#
# Adds the object to the specified revision of the document and returns the wrapped indirect
# object.
#
# The object can either be a native Ruby object (Hash, Array, Integer, ...) or a
# HexaPDF::Object. If it is not the latter, #wrap is called with the object and the
# additional keyword arguments.
#
# If the +revision+ option is +:current+, the current revision is used. Otherwise +revision+
# should be a revision index.
def add(obj, revision: :current, **wrap_opts)
obj = wrap(obj, wrap_opts) unless obj.kind_of?(HexaPDF::Object)
revision = (revision == :current ? @revisions.current : @revisions.revision(revision))
if revision.nil?
raise ArgumentError, "Invalid revision index specified"
end
if obj.document? && obj.document != self
raise HexaPDF::Error, "Can't add object that is already attached to another document"
end
obj.document = self
if obj.indirect? && (rev_obj = revision.object(obj.oid))
if rev_obj.equal?(obj)
return obj
else
raise HexaPDF::Error, "Can't add object because the specified revision already has " \
"an object with object number #{obj.oid}"
end
end
obj.oid = @revisions.map(&:next_free_oid).max unless obj.indirect?
revision.add(obj)
end
# :call-seq:
# doc.delete(ref, revision: :all)
# doc.delete(oid, revision: :all)
#
# Deletes the indirect object specified by an exact reference or by an object number from the
# document.
#
# Options:
#
# revision:: Specifies from which revisions the object should be deleted:
#
# :all:: Delete the object from all revisions.
# :current:: Delete the object only from the current revision.
#
# mark_as_free:: If +true+, objects are only marked as free objects instead of being actually
# deleted.
def delete(ref, revision: :all, mark_as_free: true)
case revision
when :current
@revisions.current.delete(ref, mark_as_free: mark_as_free)
when :all
@revisions.each {|rev| rev.delete(ref, mark_as_free: mark_as_free) }
else
raise ArgumentError, "Unsupported option revision: #{revision}"
end
end
# :call-seq:
# doc.import(obj) -> imported_object
#
# Imports the given, with a different document associated PDF object and returns the imported
# object.
#
# If the same argument is provided in multiple invocations, the import is done only once and
# the previously imoprted object is returned.
#
# See: Importer
def import(obj)
if !obj.kind_of?(HexaPDF::Object) || !obj.document? || obj.document == self
raise ArgumentError, "Importing only works for PDF objects associated " \
"with another document"
end
HexaPDF::Importer.for(source: obj.document, destination: self).import(obj)
end
# Wraps the given object inside a HexaPDF::Object class which allows one to use
# convenience functions to work with the object.
#
# The +obj+ argument can also be a HexaPDF::Object object so that it can be re-wrapped if
# needed.
#
# The class of the returned object is always a subclass of HexaPDF::Object (or of
# HexaPDF::Stream if a +stream+ is given). Which subclass is used, depends on the values
# of the +type+ and +subtype+ options as well as on the 'object.type_map' and
# 'object.subtype_map' global configuration options:
#
# * First +type+ is used to try to determine the class. If it is already a Class object, it is
# used, otherwise the type is looked up in 'object.type_map'.
#
# * If +subtype+ is provided or can be determined because +obj+ is a hash with a :Subtype or :S
# field, the type and subtype together are used to look up a special subtype class in
# 'object.subtype_map'.
#
# * If there is no valid class after the above steps, HexaPDF::Stream is used if a stream is
# given, HexaPDF::Dictionary if the given objecct is a hash or else HexaPDF::Object is used.
#
# Options:
#
# :type:: (Symbol or Class) The type of a PDF object that should be used for wrapping. This
# could be, for example, :Pages. If a class object is provided, it is used directly
# instead of the type detection system.
#
# :subtype:: (Symbol) The subtype of a PDF object which further qualifies a type. For
# example, image objects in PDF have a type of :XObject and a subtype of :Image.
#
# :oid:: (Integer) The object number that should be set on the wrapped object. Defaults to 0
# or the value of the given object's object number.
#
# :gen:: (Integer) The generation number that should be set on the wrapped object. Defaults to
# 0 or the value of the given object's generation number.
#
# :stream:: (String or StreamData) The stream object which should be set on the wrapped
# object.
def wrap(obj, type: nil, subtype: nil, oid: nil, gen: nil, stream: nil)
data = if obj.kind_of?(HexaPDF::Object)
obj.data
else
HexaPDF::PDFData.new(obj)
end
data.oid = oid if oid
data.gen = gen if gen
data.stream = stream if stream
if type.kind_of?(Class)
klass = type
type = (klass <= HexaPDF::Dictionary ? klass.type : nil)
else
type ||= deref(data.value[:Type]) if data.value.kind_of?(Hash)
klass = GlobalConfiguration.constantize('object.type_map', type) { nil } if type
end
if data.value.kind_of?(Hash)
subtype ||= deref(data.value[:Subtype]) || deref(data.value[:S])
end
if subtype
klass = GlobalConfiguration.constantize('object.subtype_map', type, subtype) { klass }
end
klass ||= if data.stream
HexaPDF::Stream
elsif data.value.kind_of?(Hash)
HexaPDF::Dictionary
else
HexaPDF::Object
end
klass.new(data, document: self)
end
# :call-seq:
# document.unwrap(obj) -> unwrapped_obj
#
# Recursively unwraps the object to get native Ruby objects (i.e. Hash, Array, Integer, ...
# instead of HexaPDF::Reference and HexaPDF::Object).
def unwrap(object, seen = {})
object = deref(object)
object = object.data if object.kind_of?(HexaPDF::Object)
if seen.key?(object)
raise HexaPDF::Error, "Can't unwrap a recursive structure"
end
case object
when Hash
seen[object] = true
object.each_with_object({}) {|(key, val), memo| memo[key] = unwrap(val, seen.dup) }
when Array
seen[object] = true
object.map {|inner_o| unwrap(inner_o, seen.dup) }
when HexaPDF::PDFData
seen[object] = true
unwrap(object.value, seen.dup)
else
object
end
end
# :call-seq:
# doc.each(only_current: true, only_loaded: false) {|obj| block } -> doc
# doc.each(only_current: true, only_loaded: false) {|obj, rev| block } -> doc
# doc.each(only_current: true, only_loaded: false) -> Enumerator
#
# Calls the given block once for every object, or, if +only_loaded+ is +true+, for every loaded
# object in the PDF document. The block may either accept only the object or the object and the
# revision it is in.
#
# By default, only the current version of each object is returned which implies that each
# object number is yielded exactly once. If the +current+ option is +false+, all stored
# objects from newest to oldest are returned, not only the current version of each object.
#
# The +current+ option can make a difference because the document can contain multiple
# revisions:
#
# * Multiple revisions may contain objects with the same object and generation numbers, e.g.
# two (different) objects with oid/gen [3,0].
#
# * Additionally, there may also be objects with the same object number but different
# generation numbers in different revisions, e.g. one object with oid/gen [3,0] and one with
# oid/gen [3,1].
def each(only_current: true, only_loaded: false, &block)
unless block_given?
return to_enum(__method__, only_current: only_current, only_loaded: only_loaded)
end
yield_rev = (block.arity == 2)
oids = {}
@revisions.reverse_each do |rev|
rev.each(only_loaded: only_loaded) do |obj|
next if only_current && oids.include?(obj.oid)
(yield_rev ? yield(obj, rev) : yield(obj))
oids[obj.oid] = true
end
end
self
end
# :call-seq:
# doc.register_listener(name, callable) -> callable
# doc.register_listener(name) {|*args| block} -> block
#
# Registers the given listener for the message +name+.
def register_listener(name, callable = nil, &block)
callable ||= block
(@listeners[name] ||= []) << callable
callable
end
# Dispatches the message +name+ with the given arguments to all registered listeners.
def dispatch_message(name, *args)
@listeners[name]&.each {|obj| obj.call(*args) }
end
# Caches the value or the return value of the given block using the given Object::PDFData and
# key arguments as composite hash key. If a cached value already exists, it is just returned.
#
# This facility can be used to cache expensive operations in PDF objects that are easy to
# compute again.
#
# Use #clear_cache to clear the cache if necessary.
def cache(pdf_data, key, value = nil)
@cache[pdf_data][key] ||= value || yield
end
# Returns +true+ if there is a value cached for the composite key consisting of the given
# +pdf_data+ and +key+ objects.
#
# Also see: #cache
def cached?(pdf_data, key)
@cache.key?(pdf_data) && @cache[pdf_data].key?(key)
end
# Clears all cached data or, if a Object::PDFData object is given, just the cache for this one
# object.
#
# It is *not* recommended to clear the whole cache! Better clear the cache for individual PDF
# objects!
#
# Also see: #cache
def clear_cache(pdf_data = nil)
pdf_data ? @cache[pdf_data].clear : @cache.clear
end
# Returns the Pages object that provides convenience methods for working with pages.
#
# Also see: HexaPDF::Type::PageTreeNode
def pages
@pages ||= Pages.new(self)
end
# Returns the Images object that provides convenience methods for working with images.
def images
@images ||= Images.new(self)
end
# Returns the Files object that provides convenience methods for working with files.
def files
@files ||= Files.new(self)
end
def form
forms ||= Form.new(self)
end
# Returns the Fonts object that provides convenience methods for working with fonts.
def fonts
@fonts ||= Fonts.new(self)
end
# Executes the given task and returns its result.
#
# Tasks provide an extensible way for performing operations on a PDF document without
# cluttering the Document interface.
#
# See Task for more information.
def task(name, **opts, &block)
task = config.constantize('task.map', name) do
raise HexaPDF::Error, "No task named '#{name}' is available"
end
task.call(self, **opts, &block)
end
# Returns the trailer dictionary for the document.
def trailer
@revisions.current.trailer
end
# Returns the document's catalog, the root of the object tree.
def catalog
trailer.catalog
end
# Returns the PDF document's version as string (e.g. '1.4').
#
# This method takes the file header version and the catalog's /Version key into account. If a
# version has been set manually and the catalog's /Version key refers to a later version, the
# later version is used.
#
# See: PDF1.7 s7.2.2
def version
catalog_version = (catalog[:Version] || '1.0').to_s
(@version < catalog_version ? catalog_version : @version)
end
# Sets the version of the PDF document. The argument must be a string in the format 'M.N'
# where M is the major version and N the minor version (e.g. '1.4' or '2.0').
def version=(value)
raise ArgumentError, "PDF version must follow format M.N" unless value.to_s.match?(/\A\d\.\d\z/)
@version = value.to_s
end
# Returns +true+ if the document is encrypted.
def encrypted?
!trailer[:Encrypt].nil?
end
# Encrypts the document.
#
# This is done by setting up a security handler for this purpose and populating the trailer's
# Encrypt dictionary accordingly. The actual encryption, however, is only done when writing the
# document.
#
# The security handler used for encrypting is selected via the +name+ argument. All other
# arguments are passed on the security handler.
#
# If the document should not be encrypted, the +name+ argument has to be set to +nil+. This
# removes the security handler and deletes the trailer's Encrypt dictionary.
#
# See: HexaPDF::Encryption::SecurityHandler#set_up_encryption and
# HexaPDF::Encryption::StandardSecurityHandler::EncryptionOptions for possible encryption
# options.
def encrypt(name: :Standard, **options)
if name.nil?
trailer.delete(:Encrypt)
@security_handler = nil
else
@security_handler = Encryption::SecurityHandler.set_up_encryption(self, name, **options)
end
end
# Returns the security handler that is used for decrypting or encrypting the document, or +nil+
# if none is set.
#
# * If the document was created by reading an existing file and the document was automatically
# decrypted, then this method returns the handler for decrypting.
#
# * Once the #encrypt method is called, the specified security handler for encrypting is
# returned.
def security_handler
@security_handler
end
# Validates all objects, or, if +only_loaded+ is +true+, only loaded objects, with optional
# auto-correction, and returns +true+ if everything is fine.
#
# If a block is given, it is called on validation problems.
#
# See HexaPDF::Object#validate for more information.
def validate(auto_correct: true, only_loaded: false) #:yield: object, msg, correctable
cur_obj = trailer
block = (block_given? ? lambda {|msg, correctable| yield(cur_obj, msg, correctable) } : nil)
result = trailer.validate(auto_correct: auto_correct, &block)
each(only_current: false, only_loaded: only_loaded) do |obj|
cur_obj = obj
result &&= obj.validate(auto_correct: auto_correct, &block)
end
result
end
# :call-seq:
# doc.write(filename, incremental: false, validate: true, update_fields: true, optimize: false)
# doc.write(io, incremental: false, validate: true, update_fields: true, optimize: false)
#
# Writes the document to the given file (in case +io+ is a String) or IO stream.
#
# Before the document is written, it is validated using #validate and an error is raised if the
# document is not valid. However, this step can be skipped if needed.
#
# Options:
#
# incremental::
# Use the incremental writing mode which just adds a new revision to an existing document.
# This is needed, for example, when modifying a signed PDF and the original signature should
# stay valid.
#
# See: PDF1.7 s7.5.6
#
# validate::
# Validates the document and raises an error if an uncorrectable problem is found.
#
# update_fields::
# Updates the /ID field in the trailer dictionary as well as the /ModDate field in the
# trailer's /Info dictionary so that it is clear that the document has been updated.
#
# optimize::
# Optimize the file size by using object and cross-reference streams. This will raise the PDF
# version to at least 1.5.
def write(file_or_io, incremental: false, validate: true, update_fields: true, optimize: false)
dispatch_message(:complete_objects)
if update_fields
trailer.update_id
trailer.info[:ModDate] = Time.now
end
if validate
self.validate(auto_correct: true) do |obj, msg, correctable|
next if correctable
raise HexaPDF::Error, "Validation error for (#{obj.oid},#{obj.gen}): #{msg}"
end
end
if optimize
task(:optimize, object_streams: :generate)
self.version = '1.5' if version < '1.5'
end
dispatch_message(:before_write)
if file_or_io.kind_of?(String)
File.open(file_or_io, 'w+') {|file| Writer.write(self, file, incremental: incremental) }
else
Writer.write(self, file_or_io, incremental: incremental)
end
end
end
end
| 38.066461 | 102 | 0.657152 |
184cec3566abfdc24d79f506f2db3cca15e7974c | 264 | class UserMailer < ActionMailer::Base
default from: "360Fly <[email protected]>"
def signup_email(user)
@user = user
@twitter_message = "#360Fly is changing the game…again!"
mail(:to => user.email, :subject => "Thanks for signing up!")
end
end
| 24 | 65 | 0.67803 |
d5ea01de527761fb956d79727771b8287dd0c1a3 | 3,459 | =begin
#BillForward REST API
#No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: 1.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=end
require "uri"
module BillForward
class QuotesApi
attr_accessor :api_client
def initialize(api_client = ApiClient.default)
@api_client = api_client
end
# Returns a quote.
# {\"nickname\":\"Returns a quote\",\"request\":\"PriceRequest.html\",\"response\":\"PriceCalculation.html\"}
# @param quote_request A quote request
# @param [Hash] opts the optional parameters
# @return [APIQuotePagedMetadata]
def quote(quote_request, opts = {})
data, _status_code, _headers = quote_with_http_info(quote_request, opts)
return data
end
# Returns a quote.
# {\"nickname\":\"Returns a quote\",\"request\":\"PriceRequest.html\",\"response\":\"PriceCalculation.html\"}
# @param quote_request A quote request
# @param [Hash] opts the optional parameters
# @return [Array<(APIQuotePagedMetadata, Fixnum, Hash)>] APIQuotePagedMetadata data, response status code and response headers
def quote_with_http_info(quote_request, opts = {})
if @api_client.config.debugging
@api_client.config.logger.debug "Calling API: QuotesApi.quote ..."
end
# verify the required parameter 'quote_request' is set
fail ArgumentError, "Missing the required parameter 'quote_request' when calling QuotesApi.quote" if quote_request.nil?
# resource path
local_var_path = "/quotes".sub('{format}','json')
# query parameters
query_params = {}
# header parameters
header_params = {}
# HTTP header 'Accept' (if needed)
local_header_accept = ['application/json']
local_header_accept_result = @api_client.select_header_accept(local_header_accept) and header_params['Accept'] = local_header_accept_result
# HTTP header 'Content-Type'
local_header_content_type = ['application/json']
header_params['Content-Type'] = @api_client.select_header_content_type(local_header_content_type)
# form parameters
form_params = {}
# http body (model)
post_body = @api_client.object_to_http_body(quote_request)
auth_names = []
data, status_code, headers = @api_client.call_api(:POST, local_var_path,
:header_params => header_params,
:query_params => query_params,
:form_params => form_params,
:body => post_body,
:auth_names => auth_names,
:return_type => 'APIQuotePagedMetadata')
if @api_client.config.debugging
@api_client.config.logger.debug "API called: QuotesApi#quote\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
end
return data, status_code, headers
end
end
end
| 37.597826 | 173 | 0.710899 |
01d0b7bc71a41ca287490f0d01223158b6a23d98 | 97 | module ActiveList
module Definition
class StatusColumn < AttributeColumn
end
end
end
| 13.857143 | 40 | 0.752577 |
ff1066e8f941e0d6365205a4870706358e906706 | 735 | # frozen_string_literal: true
require_relative 'boot'
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module Friends
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
config.load_defaults 6.1
# Configuration for the application, engines, and railties goes here.
#
# These settings can be overridden in specific environments using the files
# in config/environments, which are processed later.
#
# config.time_zone = "Central Time (US & Canada)"
# config.eager_load_paths << Rails.root.join("extras")
end
end
| 29.4 | 79 | 0.740136 |
d5bf4c5a901923e7511dabbf2515cd576d07404a | 716 | # frozen_string_literal: true
class RetailChainSummary
def self.process
retail_chain_visitors = retail_chains_with_visitors.map do |retail|
visitors = retail_chains_order(retail)
retail_chains_attrs(retail, visitors)
end
{ total: retail_chains_count, retail_chain_visitors: retail_chain_visitors }
end
def self.retail_chains_with_visitors
RetailChain.includes(:visitors).all
end
def self.retail_chains_count
RetailChain.count
end
def self.retail_chains_order(retail)
retail.visitors.order(:name).pluck(:name)
end
def self.retail_chains_attrs(retail, visitors)
{
name: retail.name,
cnpj: retail.cnpj,
visitors: visitors
}
end
end
| 22.375 | 80 | 0.73743 |
01f2214cc78b790beea436a6700a8a1588b73372 | 724 | # encoding: utf-8
# This file is autogenerated. Do not edit it manually.
# If you want change the content of this file, edit
#
# /spec/fixtures/responses/whois.nic.coop/coop/property_status_single.expected
#
# and regenerate the tests with the following rake task
#
# $ rake spec:generate
#
require 'spec_helper'
require 'whois/parsers/whois.nic.coop.rb'
describe Whois::Parsers::WhoisNicCoop, "property_status_single.expected" do
subject do
file = fixture("responses", "whois.nic.coop/coop/property_status_single.txt")
part = Whois::Record::Part.new(body: File.read(file))
described_class.new(part)
end
describe "#status" do
it do
expect(subject.status).to eq(["ok"])
end
end
end
| 24.133333 | 81 | 0.722376 |
08d92b7f66931d1a7ceff77ba7c08c3f0dc8d394 | 921 | Pod::Spec.new do |s|
s.name = "SimpleIOSViewStackController"
s.version = "0.1"
s.summary = "A view stack based on a UINavigationController which navigates to previously registered UIViewControllers upon receiving NSNotifications."
s.description = "A view stack based on a UINavigationController which navigates to previously registered UIViewControllers upon receiving NSNotifications. See : http://www.garethshapiro.com/item/simple-ios-viewstack for more information and a tutorial."
s.homepage = "http://www.garethshapiro.com/item/simple-ios-viewstack"
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { "Gareth Shapiro" => "[email protected]" }
s.source = { :git => "https://github.com/GarethShapiro/SimpleIOSViewStackController.git", :tag => "0.1" }
s.platform = :ios, '5.0'
s.source_files = '*.{h,m}'
s.requires_arc = true
end
| 65.785714 | 257 | 0.695983 |
6a37bd9513bb4a9994953ec4833919035c5507d3 | 496 | cask '[email protected]+ent' do
version '1.6.7+ent'
sha256 'ca298c862cb0e86cdf8b728d94874b7b673410351037f2473b551fceb33f0d4a'
# releases.hashicorp.com was verified as official when first introduced to the cask
url 'https://releases.hashicorp.com/consul/1.6.7+ent/consul_1.6.7+ent_darwin_amd64.zip'
appcast 'https://github.com/hashicorp/consul/releases.atom'
name 'Consul'
homepage 'https://www.consul.io/'
auto_updates false
conflicts_with formula: 'consul'
binary 'consul'
end
| 31 | 89 | 0.766129 |
ede89333886872801630354c814d53025732cdb7 | 131 | require 'rails_helper'
RSpec.describe ModuleSetting, type: :model do
pending "add some examples to (or delete) #{__FILE__}"
end
| 21.833333 | 56 | 0.755725 |
330f44ac536babd19b121926f2f892a7e7501bf1 | 655 | module WofWof
class Source
include Comparable
attr_reader :prerequisites
attr_reader :name
attr_accessor :path_handler
def initialize(path_handler=nil)
@prerequisites = []
@name = self.class.name.gsub(/^.*::(\w+)$/, '\1')
@path_handler = path_handler
end
def build_nodes(context)
raise NotImplementedError.new
end
def <=>(other)
return -1 if other.prerequisites.include? self.name
return 1 if self.prerequisites.include? other.name
str_cmp = self.name <=> other.name
return str_cmp if str_cmp != 0
return self.object_id <=> other.object_id
end
end
end
| 22.586207 | 57 | 0.651908 |
91b4aaa0def82f0b7a8241e5f5dfbdef5c3138fe | 2,213 | class UsersController < ApplicationController
# GET: /users
get "/users" do
if logged_in?
erb :"/users/index.html"
else
redirect "/users/new"
end
end
# GET: /users/new
get "/users/new" do
if logged_in?
redirect "/users/" + current_user.id.to_s
else
erb :"/users/new.html"
end
end
# POST: /users
post "/users" do
if params[:username] != "" && params[:email] != "" && params[:password] != ""
if User.find_by(username: params[:username]) != nil || User.find_by(email: params[:email]) != nil
redirect "/login"
end
user = User.new(username: params[:username], email: params[:email], password: params[:password])
if user.save
session[:user_id] = user.id
redirect "/users/" + user.id.to_s
else
redirect "/users/new"
end
else
redirect "/users/new"
end
end
# GET: /users/5
get "/users/:id" do
if logged_in?
@user = User.find(params[:id])
erb :"/users/show.html"
else
redirect "/login"
end
end
# GET: /users/5/edit
get "/users/:id/edit" do
if logged_in?
@user = User.find(params[:id])
erb :"/users/edit.html"
else
redirect "/login"
end
end
# PATCH: /users/5
patch "/users/:id" do
if session[:user_id] == User.find(params[:id])
redirect '/users'
end
if params[:email] == "" || params[:username] == ""
id = params[:id].to_s
redirect '/users/'+id+'/edit'
end
if params[:username] != ""
user = User.find(params[:id])
user.username = params[:username]
user.save
end
if params[:email] != ""
user = User.find(params[:id])
user.email = params[:email]
user.save
end
id = params[:id].to_s
redirect '/users/'+id
end
# DELETE: /users/5/delete
delete "/users/:id/delete" do
if logged_in?
if session[:user_id] == params[:id].to_i
User.find(params[:id]).destroy
session[:user_id] = {}
erb :welcome
else
id = params[:id].to_s
redirect '/users/'+id
end
else
id = params[:id].to_s
redirect '/users/'+id
end
end
end
| 20.877358 | 103 | 0.551288 |
62d9f4cffee79d6e3cb976d7dd64f457ba64e34a | 847 | require 'test_helper'
describe CommentsController do
describe 'create' do
it 'must create movie comments' do
as_logged_in_user do
movie = FactoryGirl.create :movie
post :create, { :movie_id => movie.friendly_id, :comment => FactoryGirl.attributes_for(:comment) }
response.status.must_equal 302
flash[:notice].wont_be_nil
movie.comments.size.must_equal 1
end
end
it 'must redirect and display an error message when submitting an invalid comment' do
as_logged_in_user do
movie = FactoryGirl.create :movie
post :create, { :movie_id => movie.friendly_id, :comment => FactoryGirl.attributes_for(:comment, :text => nil) }
response.status.must_equal 302
flash[:alert].wont_be_nil
movie.comments.size.must_equal 0
end
end
end
end
| 33.88 | 120 | 0.678867 |
ed7f3f3dc67eaac17f929ead7b15a8d2420114f7 | 416 | require 'formula'
class Giblib < Formula
homepage 'http://freshmeat.net/projects/giblib'
url 'http://linuxbrit.co.uk/downloads/giblib-1.2.4.tar.gz'
sha1 '342e6f7882c67d2277e1765299e1be5078329ab0'
depends_on :x11
depends_on 'imlib2' => :build
def install
system "./configure", "--prefix=#{prefix}"
system "make install"
end
test do
system "#{bin}/giblib-config", "--version"
end
end
| 20.8 | 60 | 0.692308 |
bb78e6356729a55026ab837041a77784cab0fff5 | 623 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Maestrano::Connector::Rails::OrganizationPolicy do
include SharedPunditExample
let!(:instance1) { create(:organization, tenant: 'default') }
let!(:instance2) { create(:organization, tenant: 'production') }
describe 'scope' do
it_behaves_like 'a model scoped to the tenant'
end
describe 'policy' do
let!(:user) { create(:user, tenant: 'default') }
subject { described_class.new(user.tenant, instance1) }
it { is_expected.to permit_new_and_create_actions }
it { is_expected.to permit_edit_and_update_actions }
end
end
| 27.086957 | 66 | 0.730337 |
791f504b36d3d7c4a241d4ac570550562df81bbe | 6,798 | #
# --------------------------------------------------------------------------------------------------------------------
# <copyright company="Aspose Pty Ltd" file="image_load_options.rb">
# Copyright (c) 2003-2021 Aspose Pty Ltd
# </copyright>
# <summary>
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# </summary>
# --------------------------------------------------------------------------------------------------------------------
#
require 'date'
module GroupDocsConversionCloud
# Image document load options
class ImageLoadOptions
# Default font for Psd, Emf, Wmf document types. The following font will be used if a font is missing.
attr_accessor :default_font
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'default_font' => :'DefaultFont'
}
end
# Attribute type mapping.
def self.swagger_types
{
:'default_font' => :'String'
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
if attributes.key?(:'DefaultFont')
self.default_font = attributes[:'DefaultFont']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properies with the reasons
def list_invalid_properties
invalid_properties = []
return invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
return true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(other)
return true if self.equal?(other)
self.class == other.class &&
default_font == other.default_font
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(other)
self == other
end
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[default_font].hash
end
# Downcases first letter.
# @return downcased string
def uncap(str)
str[0, 1].downcase + str[1..-1]
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
pname = uncap(self.class.attribute_map[key]).intern
value = attributes[pname]
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the the attribute
# is documented as an array but the input is not
if value.is_a?(Array)
self.send("#{key}=", value.map { |v| _deserialize($1, v) })
end
elsif !value.nil?
self.send("#{key}=", _deserialize(type, value))
end
# or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
Date.parse value
when :Date
Date.parse value
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BOOLEAN
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else
# model
temp_model = GroupDocsConversionCloud.const_get(type).new
temp_model.build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 31.618605 | 119 | 0.612827 |
2137b6a496489ec7a605245af49115010f3a85f4 | 875 | Sequel.migration do
up do
alter_table(:pallet_sequences) do
add_index :pallet_id, name: :pseq_pallet_id_idx
end
alter_table(:load_containers) do
add_index :load_id, name: :load_containers_load_id_idx
end
alter_table(:load_vehicles) do
add_index :load_id, name: :load_vehicles_load_id_idx
end
alter_table(:load_voyages) do
add_index :load_id, name: :load_voyages_load_id_idx
end
end
down do
alter_table(:pallet_sequences) do
drop_index :pallet_id, name: :pseq_pallet_id_idx
end
alter_table(:load_containers) do
drop_index :load_id, name: :load_containers_load_id_idx
end
alter_table(:load_vehicles) do
drop_index :load_id, name: :load_vehicles_load_id_idx
end
alter_table(:load_voyages) do
drop_index :load_id, name: :load_voyages_load_id_idx
end
end
end
| 27.34375 | 61 | 0.722286 |
1da7c731894c79117b6c3a8e0a401001c9fa6a42 | 568 | require 'OSM/StreamParser'
locations = nil
class OSMTestParserCallbacks < OSM::Callbacks
locations = nil
def self.locations
if locations
locations
else
#parse the test file, so we can later reference nodes and ways by name in tests
locations = {}
file = 'test/data/test.osm'
callbacks = OSMTestParserCallbacks.new
parser = OSM::StreamParser.new(:filename => file, :callbacks => callbacks)
parser.parse
puts locations
end
end
def node(node)
locations[node.name] = [node.lat,node.lon]
end
end | 22.72 | 85 | 0.670775 |
21647caf359ad56dbd5ca29c5cc7ba6d6a7f7c45 | 689 | module PluginScan
module Software
class NameConverter
def as_symbol(name)
case name.downcase
when 'adobe reader'
:ao_reader
when 'phoscode devalvr'
:ao_dvr
when 'adobe flash'
:ao_flash
when 'oracle java'
:ao_java
when 'apple quicktime'
:ao_qt
when 'realplayer'
:ao_rp
when 'adobe shockwave'
:ao_shock
when 'microsoft silverlight'
:ao_silver
when 'windows media player'
:ao_wmp
when 'vlc media player'
:ao_vlc
end
end
end
end
end
| 22.225806 | 38 | 0.493469 |
39c9010fe8a48ef2bb9ea1db4ed0a5d6b037f715 | 2,864 | # encoding: utf-8
# This file is distributed under New Relic's license terms.
# See https://github.com/newrelic/newrelic-ruby-agent/blob/main/LICENSE for complete details.
require File.expand_path(File.join(File.dirname(__FILE__),'..', '..','test_helper'))
require 'new_relic/agent/encoding_normalizer.rb'
class EncodingNormalizerTest < Minitest::Test
EncodingNormalizer = NewRelic::Agent::EncodingNormalizer
def test_normalize_object_converts_symbol_values_to_strings
result = EncodingNormalizer.normalize_object([:foo, :bar])
assert_equal(['foo', 'bar'], result)
end
def test_normalize_object_converts_symbols_in_hash_to_strings
result = EncodingNormalizer.normalize_object({:key => :value})
assert_equal({'key' => 'value'}, result)
end
def test_normalize_object_converts_rationals_to_floats
result = EncodingNormalizer.normalize_object({:key => Rational(3,2)})
assert_equal({'key' => 1.5}, result)
end
def test_normalize_string_returns_input_if_correctly_encoded_utf8
string = "i want a pony"
result = EncodingNormalizer.normalize_string(string)
assert_same(string, result)
assert_equal(Encoding.find('UTF-8'), result.encoding)
end
def test_normalize_string_returns_munged_copy_if_ascii_8bit
string = (0..255).to_a.pack("C*")
result = EncodingNormalizer.normalize_string(string)
refute_same(string, result)
assert_equal(Encoding.find('ISO-8859-1'), result.encoding)
assert_equal(string, result.dup.force_encoding('ASCII-8BIT'))
end
def test_normalize_string_returns_munged_copy_if_invalid_utf8
string = (0..255).to_a.pack("C*").force_encoding('UTF-8')
result = EncodingNormalizer.normalize_string(string)
refute_same(result, string)
assert_equal(Encoding.find('ISO-8859-1'), result.encoding)
assert_equal(string, result.dup.force_encoding('UTF-8'))
end
def test_normalize_string_returns_munged_copy_if_other_convertible_encoding
string = "i want a pony".encode('UTF-16LE')
result = EncodingNormalizer.normalize_string(string)
refute_same(result, string)
assert_equal(Encoding.find('UTF-8'), result.encoding)
assert_equal(string, result.encode('UTF-16LE'))
end
def test_normalize_string_returns_munged_copy_if_other_non_convertible_enocding
# Attempting to convert from UTF-7 to UTF-8 in Ruby will raise an
# Encoding::ConverterNotFoundError, which is what we're trying to
# replicate for this test case.
# The following UTF-7 string decodes to 'Jyväskylä', a city in Finland
string = "Jyv+AOQ-skyl+AOQ-".force_encoding("UTF-7")
assert string.valid_encoding?
result = EncodingNormalizer.normalize_string(string)
refute_same(result, string)
assert_equal(Encoding.find('ISO-8859-1'), result.encoding)
assert_equal('Jyv+AOQ-skyl+AOQ-'.force_encoding('ISO-8859-1'), result)
end
end
| 40.914286 | 93 | 0.758729 |
f730146ec9e5b01f45980380040bdb985123e0ad | 2,261 | module MetaProject
module Project
module Trac
class TracProject < Base
def initialize(trac_base_url, svn_root_url, svn_path)
@trac_base_url = trac_base_url
@svn_path = svn_path
@scm = RSCM::Subversion.new("#{svn_root_url}#{svn_path}", svn_path)
@tracker = ::MetaProject::Tracker::Trac::TracTracker.new(@trac_base_url)
end
TRAC_VERSION_PATTERN = /<strong>Trac ([\d\.]+)[^<]*<\/strong>/
def scm_web
unless @scm_web
front_page = better_open(@trac_base_url).read
if(front_page =~ TRAC_VERSION_PATTERN)
version = $1
# If there is no minor version part, add 0
version = "#{version}.0" if version =~ /^[\d]+\.[\d]+$/
version = version.gsub(/\./, "").to_i
if(version >= 90)
html = "#{@trac_base_url}/browser/#{@svn_path}/\#{path}?rev=\#{revision}"
raw = "#{@trac_base_url}/browser/#{@svn_path}/\#{path}?rev=\#{revision}&format=txt"
else
html = "#{@trac_base_url}/file/#{@svn_path}/\#{path}?rev=\#{revision}"
raw = "#{@trac_base_url}/file/#{@svn_path}/\#{path}?rev=\#{revision}&format=txt"
end
dir = "#{@trac_base_url}/browser/#{@svn_path}/\#{path}"
history = "#{@trac_base_url}/log/#{@svn_path}/\#{path}"
diff = "#{@trac_base_url}/changeset/\#{revision}"
child_dirs_pattern = /title="Browse Directory" href="[^"]+">([^<]+)<\/a>/
child_files_pattern = /title="View File" href="[^"]+">([^<]+)<\/a>/
@scm_web = ScmWeb::Browser.new(dir, history, raw, html, diff, child_dirs_pattern, child_files_pattern)
else
raise ProjectException.new("Couldn't determine the Trac version. Is the URL '#{@trac_base_url}' correct? I was looking for the regexp /#{TRAC_VERSION_PATTERN.source}/ on the page, but couldn't find it.")
end
end
@scm_web
end
def home_page
"#{@trac_base_url}/wiki"
end
end
end
end
end | 41.87037 | 217 | 0.518797 |
2835960e7e7016faff879ab8fc20420748b70476 | 3,836 | Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Attempt to read encrypted secrets from `config/secrets.yml.enc`.
# Requires an encryption key in `ENV["RAILS_MASTER_KEY"]` or
# `config/secrets.yml.key`.
config.read_encrypted_secrets = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = true
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Mount Action Cable outside main process or domain
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
config.log_tags = [ :request_id ]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment)
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "refinerycms_#{Rails.env}"
if config.respond_to?(:action_mailer)
config.action_mailer.perform_caching = false
end
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
end
| 40.808511 | 102 | 0.756517 |
217554d2707ee2449fb3ba425a8f67e21f74eea3 | 956 | require 'haml_lint/constants'
require 'haml_lint/exceptions'
require 'haml_lint/configuration'
require 'haml_lint/configuration_loader'
require 'haml_lint/parser'
require 'haml_lint/haml_visitor'
require 'haml_lint/lint'
require 'haml_lint/linter_registry'
require 'haml_lint/ruby_parser'
require 'haml_lint/linter'
require 'haml_lint/logger'
require 'haml_lint/reporter'
require 'haml_lint/report'
require 'haml_lint/runner'
require 'haml_lint/utils'
require 'haml_lint/version'
require 'haml'
# Load all parse tree node classes
require 'haml_lint/tree/node'
require 'haml_lint/node_transformer'
Dir[File.expand_path('haml_lint/tree/*.rb', File.dirname(__FILE__))].each do |file|
require file
end
# Load all linters
Dir[File.expand_path('haml_lint/linter/*.rb', File.dirname(__FILE__))].each do |file|
require file
end
# Load all reporters
Dir[File.expand_path('haml_lint/reporter/*.rb', File.dirname(__FILE__))].each do |file|
require file
end
| 26.555556 | 87 | 0.792887 |
335f298c6345f071e360356b972e3c2b7ba09417 | 14,274 | # frozen_string_literal: true
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
require "helper"
require "gapic/grpc/service_stub"
require "google/cloud/billing/budgets/v1beta1/budget_service_pb"
require "google/cloud/billing/budgets/v1beta1/budget_service_services_pb"
require "google/cloud/billing/budgets/v1beta1/budget_service"
class ::Google::Cloud::Billing::Budgets::V1beta1::BudgetService::ClientTest < Minitest::Test
class ClientStub
attr_accessor :call_rpc_count, :requests
def initialize response, operation, &block
@response = response
@operation = operation
@block = block
@call_rpc_count = 0
@requests = []
end
def call_rpc *args, **kwargs
@call_rpc_count += 1
@requests << @block&.call(*args, **kwargs)
yield @response, @operation if block_given?
@response
end
end
def test_create_budget
# Create GRPC objects.
grpc_response = ::Google::Cloud::Billing::Budgets::V1beta1::Budget.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
parent = "hello world"
budget = {}
create_budget_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :create_budget, name
assert_kind_of ::Google::Cloud::Billing::Budgets::V1beta1::CreateBudgetRequest, request
assert_equal "hello world", request.parent
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Cloud::Billing::Budgets::V1beta1::Budget), request.budget
refute_nil options
end
Gapic::ServiceStub.stub :new, create_budget_client_stub do
# Create client
client = ::Google::Cloud::Billing::Budgets::V1beta1::BudgetService::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.create_budget({ parent: parent, budget: budget }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.create_budget parent: parent, budget: budget do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.create_budget ::Google::Cloud::Billing::Budgets::V1beta1::CreateBudgetRequest.new(parent: parent, budget: budget) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.create_budget({ parent: parent, budget: budget }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.create_budget ::Google::Cloud::Billing::Budgets::V1beta1::CreateBudgetRequest.new(parent: parent, budget: budget), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, create_budget_client_stub.call_rpc_count
end
end
def test_update_budget
# Create GRPC objects.
grpc_response = ::Google::Cloud::Billing::Budgets::V1beta1::Budget.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
budget = {}
update_mask = {}
update_budget_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :update_budget, name
assert_kind_of ::Google::Cloud::Billing::Budgets::V1beta1::UpdateBudgetRequest, request
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Cloud::Billing::Budgets::V1beta1::Budget), request.budget
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Protobuf::FieldMask), request.update_mask
refute_nil options
end
Gapic::ServiceStub.stub :new, update_budget_client_stub do
# Create client
client = ::Google::Cloud::Billing::Budgets::V1beta1::BudgetService::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.update_budget({ budget: budget, update_mask: update_mask }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.update_budget budget: budget, update_mask: update_mask do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.update_budget ::Google::Cloud::Billing::Budgets::V1beta1::UpdateBudgetRequest.new(budget: budget, update_mask: update_mask) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.update_budget({ budget: budget, update_mask: update_mask }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.update_budget ::Google::Cloud::Billing::Budgets::V1beta1::UpdateBudgetRequest.new(budget: budget, update_mask: update_mask), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, update_budget_client_stub.call_rpc_count
end
end
def test_get_budget
# Create GRPC objects.
grpc_response = ::Google::Cloud::Billing::Budgets::V1beta1::Budget.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
name = "hello world"
get_budget_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :get_budget, name
assert_kind_of ::Google::Cloud::Billing::Budgets::V1beta1::GetBudgetRequest, request
assert_equal "hello world", request.name
refute_nil options
end
Gapic::ServiceStub.stub :new, get_budget_client_stub do
# Create client
client = ::Google::Cloud::Billing::Budgets::V1beta1::BudgetService::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.get_budget({ name: name }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.get_budget name: name do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.get_budget ::Google::Cloud::Billing::Budgets::V1beta1::GetBudgetRequest.new(name: name) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.get_budget({ name: name }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.get_budget ::Google::Cloud::Billing::Budgets::V1beta1::GetBudgetRequest.new(name: name), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, get_budget_client_stub.call_rpc_count
end
end
def test_list_budgets
# Create GRPC objects.
grpc_response = ::Google::Cloud::Billing::Budgets::V1beta1::ListBudgetsResponse.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
parent = "hello world"
page_size = 42
page_token = "hello world"
list_budgets_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :list_budgets, name
assert_kind_of ::Google::Cloud::Billing::Budgets::V1beta1::ListBudgetsRequest, request
assert_equal "hello world", request.parent
assert_equal 42, request.page_size
assert_equal "hello world", request.page_token
refute_nil options
end
Gapic::ServiceStub.stub :new, list_budgets_client_stub do
# Create client
client = ::Google::Cloud::Billing::Budgets::V1beta1::BudgetService::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.list_budgets({ parent: parent, page_size: page_size, page_token: page_token }) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use named arguments
client.list_budgets parent: parent, page_size: page_size, page_token: page_token do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.list_budgets ::Google::Cloud::Billing::Budgets::V1beta1::ListBudgetsRequest.new(parent: parent, page_size: page_size, page_token: page_token) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.list_budgets({ parent: parent, page_size: page_size, page_token: page_token }, grpc_options) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.list_budgets ::Google::Cloud::Billing::Budgets::V1beta1::ListBudgetsRequest.new(parent: parent, page_size: page_size, page_token: page_token), grpc_options do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, list_budgets_client_stub.call_rpc_count
end
end
def test_delete_budget
# Create GRPC objects.
grpc_response = ::Google::Protobuf::Empty.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
name = "hello world"
delete_budget_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :delete_budget, name
assert_kind_of ::Google::Cloud::Billing::Budgets::V1beta1::DeleteBudgetRequest, request
assert_equal "hello world", request.name
refute_nil options
end
Gapic::ServiceStub.stub :new, delete_budget_client_stub do
# Create client
client = ::Google::Cloud::Billing::Budgets::V1beta1::BudgetService::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.delete_budget({ name: name }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.delete_budget name: name do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.delete_budget ::Google::Cloud::Billing::Budgets::V1beta1::DeleteBudgetRequest.new(name: name) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.delete_budget({ name: name }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.delete_budget ::Google::Cloud::Billing::Budgets::V1beta1::DeleteBudgetRequest.new(name: name), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, delete_budget_client_stub.call_rpc_count
end
end
def test_configure
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
client = block_config = config = nil
Gapic::ServiceStub.stub :new, nil do
client = ::Google::Cloud::Billing::Budgets::V1beta1::BudgetService::Client.new do |config|
config.credentials = grpc_channel
end
end
config = client.configure do |c|
block_config = c
end
assert_same block_config, config
assert_kind_of ::Google::Cloud::Billing::Budgets::V1beta1::BudgetService::Client::Configuration, config
end
end
| 38.474394 | 193 | 0.715076 |
bf8047e951573c587f5cf5cba4ae98aae36b8529 | 916 | require 'thor'
require 'npmdc'
module Npmdc
class Cli < Thor
default_task :check
desc 'check', 'Run check'
method_option :path, desc: 'Path to package.json config'
method_option :color, desc: 'Enable color', type: :boolean, default: true
method_option :types, aliases: [:t],
desc: 'Dependency types to check',
type: :array,
enum: Npmdc::Config::DEPEPENDENCY_TYPES,
default: Npmdc::Config::DEPEPENDENCY_TYPES
method_option :format, aliases: [:f],
desc: 'Output format',
enum: Npmdc::Formatter::FORMATTERS.keys.map(&:to_s)
def check
Npmdc.call(options)
end
map %w[--version -v] => :__print_version
desc '--version, -v', 'Print gem version'
def __print_version
puts Npmdc::VERSION
end
end
end
| 29.548387 | 78 | 0.562227 |
1c79bf8e226b3722a47d35a073bfaedc09bfe884 | 1,585 | module Fog
module Storage
class AWS
class Real
# Change website configuration for an S3 bucket
#
# @param bucket_name [String] name of bucket to modify
# @param suffix [String] suffix to append to requests for the bucket
# @param options [Hash]
# @option options key [String] key to use for 4XX class errors
#
# @see http://docs.amazonwebservices.com/AmazonS3/latest/API/RESTBucketPUTwebsite.html
def put_bucket_website(bucket_name, suffix, options = {})
data =
<<-DATA
<WebsiteConfiguration xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
<IndexDocument>
<Suffix>#{suffix}</Suffix>
</IndexDocument>
DATA
if options[:key]
data <<
<<-DATA
<ErrorDocument>
<Key>#{options[:key]}</Key>
</ErrorDocument>
DATA
end
data << '</WebsiteConfiguration>'
request({
:body => data,
:expects => 200,
:headers => {},
:bucket_name => bucket_name,
:method => 'PUT',
:query => {'website' => nil}
})
end
end
class Mock # :nodoc:all
def put_bucket_website(bucket_name, suffix, options = {})
response = Excon::Response.new
if self.data[:buckets][bucket_name]
response.status = 200
else
response.status = 404
raise(Excon::Errors.status_error({:expects => 200}, response))
end
response
end
end
end
end
end
| 24.384615 | 94 | 0.54511 |
5d5351eb9fe15b0ab965ef54f2ecb7f09a2d7890 | 36,505 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Packages::Package, type: :model do
include SortingHelper
it_behaves_like 'having unique enum values'
describe 'relationships' do
it { is_expected.to belong_to(:project) }
it { is_expected.to belong_to(:creator) }
it { is_expected.to have_many(:package_files).dependent(:destroy) }
it { is_expected.to have_many(:dependency_links).inverse_of(:package) }
it { is_expected.to have_many(:tags).inverse_of(:package) }
it { is_expected.to have_many(:build_infos).inverse_of(:package) }
it { is_expected.to have_many(:pipelines).through(:build_infos) }
it { is_expected.to have_one(:conan_metadatum).inverse_of(:package) }
it { is_expected.to have_one(:maven_metadatum).inverse_of(:package) }
it { is_expected.to have_one(:debian_publication).inverse_of(:package).class_name('Packages::Debian::Publication') }
it { is_expected.to have_one(:debian_distribution).through(:debian_publication).source(:distribution).inverse_of(:packages).class_name('Packages::Debian::ProjectDistribution') }
it { is_expected.to have_one(:nuget_metadatum).inverse_of(:package) }
it { is_expected.to have_one(:rubygems_metadatum).inverse_of(:package) }
end
describe '.with_debian_codename' do
let_it_be(:publication) { create(:debian_publication) }
subject { described_class.with_debian_codename(publication.distribution.codename).to_a }
it { is_expected.to contain_exactly(publication.package) }
end
describe '.with_composer_target' do
let!(:package1) { create(:composer_package, :with_metadatum, sha: '123') }
let!(:package2) { create(:composer_package, :with_metadatum, sha: '123') }
let!(:package3) { create(:composer_package, :with_metadatum, sha: '234') }
subject { described_class.with_composer_target('123').to_a }
it 'selects packages with the specified sha' do
expect(subject).to include(package1)
expect(subject).to include(package2)
expect(subject).not_to include(package3)
end
end
describe '.sort_by_attribute' do
let_it_be(:group) { create(:group, :public) }
let_it_be(:project) { create(:project, :public, namespace: group, name: 'project A') }
let!(:package1) { create(:npm_package, project: project, version: '3.1.0', name: "@#{project.root_namespace.path}/foo1") }
let!(:package2) { create(:nuget_package, project: project, version: '2.0.4') }
let(:package3) { create(:maven_package, project: project, version: '1.1.1', name: 'zzz') }
before do
travel_to(1.day.ago) do
package3
end
end
RSpec.shared_examples 'package sorting by attribute' do |order_by|
subject { described_class.where(id: packages.map(&:id)).sort_by_attribute("#{order_by}_#{sort}").to_a }
context "sorting by #{order_by}" do
context 'ascending order' do
let(:sort) { 'asc' }
it { is_expected.to eq packages }
end
context 'descending order' do
let(:sort) { 'desc' }
it { is_expected.to eq packages.reverse }
end
end
end
it_behaves_like 'package sorting by attribute', 'name' do
let(:packages) { [package1, package2, package3] }
end
it_behaves_like 'package sorting by attribute', 'created_at' do
let(:packages) { [package3, package1, package2] }
end
it_behaves_like 'package sorting by attribute', 'version' do
let(:packages) { [package3, package2, package1] }
end
it_behaves_like 'package sorting by attribute', 'type' do
let(:packages) { [package3, package1, package2] }
end
it_behaves_like 'package sorting by attribute', 'project_path' do
let(:another_project) { create(:project, :public, namespace: group, name: 'project B') }
let!(:package4) { create(:npm_package, project: another_project, version: '3.1.0', name: "@#{project.root_namespace.path}/bar") }
let(:packages) { [package1, package2, package3, package4] }
end
end
describe '.for_projects' do
let_it_be(:package1) { create(:maven_package) }
let_it_be(:package2) { create(:maven_package) }
let_it_be(:package3) { create(:maven_package) }
let(:projects) { ::Project.id_in([package1.project_id, package2.project_id]) }
subject { described_class.for_projects(projects.select(:id)) }
it 'returns package1 and package2' do
expect(projects).not_to receive(:any?)
expect(subject).to match_array([package1, package2])
end
end
describe 'validations' do
subject { build(:package) }
it { is_expected.to validate_presence_of(:project) }
it { is_expected.to validate_uniqueness_of(:name).scoped_to(:project_id, :version, :package_type) }
describe '#name' do
it { is_expected.to allow_value("my/domain/com/my-app").for(:name) }
it { is_expected.to allow_value("my.app-11.07.2018").for(:name) }
it { is_expected.not_to allow_value("my(dom$$$ain)com.my-app").for(:name) }
context 'conan package' do
subject { build_stubbed(:conan_package) }
let(:fifty_one_characters) {'f_b' * 17}
it { is_expected.to allow_value('foo+bar').for(:name) }
it { is_expected.to allow_value('foo_bar').for(:name) }
it { is_expected.to allow_value('foo.bar').for(:name) }
it { is_expected.not_to allow_value(fifty_one_characters).for(:name) }
it { is_expected.not_to allow_value('+foobar').for(:name) }
it { is_expected.not_to allow_value('.foobar').for(:name) }
it { is_expected.not_to allow_value('%foo%bar').for(:name) }
end
context 'debian package' do
subject { build(:debian_package) }
it { is_expected.to allow_value('0ad').for(:name) }
it { is_expected.to allow_value('g++').for(:name) }
it { is_expected.not_to allow_value('a_b').for(:name) }
end
context 'debian incoming' do
subject { create(:debian_incoming) }
# Only 'incoming' is accepted
it { is_expected.to allow_value('incoming').for(:name) }
it { is_expected.not_to allow_value('0ad').for(:name) }
it { is_expected.not_to allow_value('g++').for(:name) }
it { is_expected.not_to allow_value('a_b').for(:name) }
end
context 'generic package' do
subject { build_stubbed(:generic_package) }
it { is_expected.to allow_value('123').for(:name) }
it { is_expected.to allow_value('foo').for(:name) }
it { is_expected.to allow_value('foo.bar.baz-2.0-20190901.47283-1').for(:name) }
it { is_expected.not_to allow_value('../../foo').for(:name) }
it { is_expected.not_to allow_value('..\..\foo').for(:name) }
it { is_expected.not_to allow_value('%2f%2e%2e%2f%2essh%2fauthorized_keys').for(:name) }
it { is_expected.not_to allow_value('$foo/bar').for(:name) }
it { is_expected.not_to allow_value('my file name').for(:name) }
it { is_expected.not_to allow_value('!!().for(:name)().for(:name)').for(:name) }
end
context 'helm package' do
subject { build(:helm_package) }
it { is_expected.to allow_value('prometheus').for(:name) }
it { is_expected.to allow_value('rook-ceph').for(:name) }
it { is_expected.not_to allow_value('a+b').for(:name) }
it { is_expected.not_to allow_value('Hé').for(:name) }
end
context 'nuget package' do
subject { build_stubbed(:nuget_package) }
it { is_expected.to allow_value('My.Package').for(:name) }
it { is_expected.to allow_value('My.Package.Mvc').for(:name) }
it { is_expected.to allow_value('MyPackage').for(:name) }
it { is_expected.to allow_value('My.23.Package').for(:name) }
it { is_expected.to allow_value('My23Package').for(:name) }
it { is_expected.to allow_value('runtime.my-test64.runtime.package.Mvc').for(:name) }
it { is_expected.to allow_value('my_package').for(:name) }
it { is_expected.not_to allow_value('My/package').for(:name) }
it { is_expected.not_to allow_value('../../../my_package').for(:name) }
it { is_expected.not_to allow_value('%2e%2e%2fmy_package').for(:name) }
end
context 'npm package' do
subject { build_stubbed(:npm_package) }
it { is_expected.to allow_value("@group-1/package").for(:name) }
it { is_expected.to allow_value("@any-scope/package").for(:name) }
it { is_expected.to allow_value("unscoped-package").for(:name) }
it { is_expected.not_to allow_value("@inv@lid-scope/package").for(:name) }
it { is_expected.not_to allow_value("@scope/../../package").for(:name) }
it { is_expected.not_to allow_value("@scope%2e%2e%fpackage").for(:name) }
it { is_expected.not_to allow_value("@scope/sub/package").for(:name) }
end
end
describe '#version' do
RSpec.shared_examples 'validating version to be SemVer compliant for' do |factory_name|
context "for #{factory_name}" do
subject { build_stubbed(factory_name) }
it { is_expected.to allow_value('1.2.3').for(:version) }
it { is_expected.to allow_value('1.2.3-beta').for(:version) }
it { is_expected.to allow_value('1.2.3-alpha.3').for(:version) }
it { is_expected.not_to allow_value('1').for(:version) }
it { is_expected.not_to allow_value('1.2').for(:version) }
it { is_expected.not_to allow_value('1./2.3').for(:version) }
it { is_expected.not_to allow_value('../../../../../1.2.3').for(:version) }
it { is_expected.not_to allow_value('%2e%2e%2f1.2.3').for(:version) }
end
end
context 'conan package' do
subject { build_stubbed(:conan_package) }
let(:fifty_one_characters) {'1.2' * 17}
it { is_expected.to allow_value('1.2').for(:version) }
it { is_expected.to allow_value('1.2.3-beta').for(:version) }
it { is_expected.to allow_value('1.2.3-pre1+build2').for(:version) }
it { is_expected.not_to allow_value('1').for(:version) }
it { is_expected.not_to allow_value(fifty_one_characters).for(:version) }
it { is_expected.not_to allow_value('1./2.3').for(:version) }
it { is_expected.not_to allow_value('.1.2.3').for(:version) }
it { is_expected.not_to allow_value('+1.2.3').for(:version) }
it { is_expected.not_to allow_value('%2e%2e%2f1.2.3').for(:version) }
end
context 'composer package' do
it_behaves_like 'validating version to be SemVer compliant for', :composer_package
it { is_expected.to allow_value('dev-master').for(:version) }
it { is_expected.to allow_value('2.x-dev').for(:version) }
end
context 'debian package' do
subject { build(:debian_package) }
it { is_expected.to allow_value('2:4.9.5+dfsg-5+deb10u1').for(:version) }
it { is_expected.not_to allow_value('1_0').for(:version) }
end
context 'debian incoming' do
subject { create(:debian_incoming) }
it { is_expected.to allow_value(nil).for(:version) }
it { is_expected.not_to allow_value('2:4.9.5+dfsg-5+deb10u1').for(:version) }
it { is_expected.not_to allow_value('1_0').for(:version) }
end
context 'maven package' do
subject { build_stubbed(:maven_package) }
it { is_expected.to allow_value('0').for(:version) }
it { is_expected.to allow_value('1').for(:version) }
it { is_expected.to allow_value('10').for(:version) }
it { is_expected.to allow_value('1.0').for(:version) }
it { is_expected.to allow_value('1.3.350.v20200505-1744').for(:version) }
it { is_expected.to allow_value('1.1-beta-2').for(:version) }
it { is_expected.to allow_value('1.2-SNAPSHOT').for(:version) }
it { is_expected.to allow_value('12.1.2-2-1').for(:version) }
it { is_expected.to allow_value('1.2.3..beta').for(:version) }
it { is_expected.to allow_value('1.2.3-beta').for(:version) }
it { is_expected.to allow_value('10.2.3-beta').for(:version) }
it { is_expected.to allow_value('2.0.0.v200706041905-7C78EK9E_EkMNfNOd2d8qq').for(:version) }
it { is_expected.to allow_value('1.2-alpha-1-20050205.060708-1').for(:version) }
it { is_expected.to allow_value('703220b4e2cea9592caeb9f3013f6b1e5335c293').for(:version) }
it { is_expected.to allow_value('RELEASE').for(:version) }
it { is_expected.not_to allow_value('..1.2.3').for(:version) }
it { is_expected.not_to allow_value(' 1.2.3').for(:version) }
it { is_expected.not_to allow_value("1.2.3 \r\t").for(:version) }
it { is_expected.not_to allow_value("\r\t 1.2.3").for(:version) }
it { is_expected.not_to allow_value('1.2.3-4/../../').for(:version) }
it { is_expected.not_to allow_value('1.2.3-4%2e%2e%').for(:version) }
it { is_expected.not_to allow_value('../../../../../1.2.3').for(:version) }
it { is_expected.not_to allow_value('%2e%2e%2f1.2.3').for(:version) }
end
context 'pypi package' do
subject { create(:pypi_package) }
it { is_expected.to allow_value('0.1').for(:version) }
it { is_expected.to allow_value('2.0').for(:version) }
it { is_expected.to allow_value('1.2.0').for(:version) }
it { is_expected.to allow_value('0100!0.0').for(:version) }
it { is_expected.to allow_value('00!1.2').for(:version) }
it { is_expected.to allow_value('1.0a').for(:version) }
it { is_expected.to allow_value('1.0-a').for(:version) }
it { is_expected.to allow_value('1.0.a1').for(:version) }
it { is_expected.to allow_value('1.0a1').for(:version) }
it { is_expected.to allow_value('1.0-a1').for(:version) }
it { is_expected.to allow_value('1.0alpha1').for(:version) }
it { is_expected.to allow_value('1.0b1').for(:version) }
it { is_expected.to allow_value('1.0beta1').for(:version) }
it { is_expected.to allow_value('1.0rc1').for(:version) }
it { is_expected.to allow_value('1.0pre1').for(:version) }
it { is_expected.to allow_value('1.0preview1').for(:version) }
it { is_expected.to allow_value('1.0.dev1').for(:version) }
it { is_expected.to allow_value('1.0.DEV1').for(:version) }
it { is_expected.to allow_value('1.0.post1').for(:version) }
it { is_expected.to allow_value('1.0.rev1').for(:version) }
it { is_expected.to allow_value('1.0.r1').for(:version) }
it { is_expected.to allow_value('1.0c2').for(:version) }
it { is_expected.to allow_value('2012.15').for(:version) }
it { is_expected.to allow_value('1.0+5').for(:version) }
it { is_expected.to allow_value('1.0+abc.5').for(:version) }
it { is_expected.to allow_value('1!1.1').for(:version) }
it { is_expected.to allow_value('1.0c3').for(:version) }
it { is_expected.to allow_value('1.0rc2').for(:version) }
it { is_expected.to allow_value('1.0c1').for(:version) }
it { is_expected.to allow_value('1.0b2-346').for(:version) }
it { is_expected.to allow_value('1.0b2.post345').for(:version) }
it { is_expected.to allow_value('1.0b2.post345.dev456').for(:version) }
it { is_expected.to allow_value('1.2.rev33+123456').for(:version) }
it { is_expected.to allow_value('1.1.dev1').for(:version) }
it { is_expected.to allow_value('1.0b1.dev456').for(:version) }
it { is_expected.to allow_value('1.0a12.dev456').for(:version) }
it { is_expected.to allow_value('1.0b2').for(:version) }
it { is_expected.to allow_value('1.0.dev456').for(:version) }
it { is_expected.to allow_value('1.0c1.dev456').for(:version) }
it { is_expected.to allow_value('1.0.post456').for(:version) }
it { is_expected.to allow_value('1.0.post456.dev34').for(:version) }
it { is_expected.to allow_value('1.2+123abc').for(:version) }
it { is_expected.to allow_value('1.2+abc').for(:version) }
it { is_expected.to allow_value('1.2+abc123').for(:version) }
it { is_expected.to allow_value('1.2+abc123def').for(:version) }
it { is_expected.to allow_value('1.2+1234.abc').for(:version) }
it { is_expected.to allow_value('1.2+123456').for(:version) }
it { is_expected.to allow_value('1.2.r32+123456').for(:version) }
it { is_expected.to allow_value('1!1.2.rev33+123456').for(:version) }
it { is_expected.to allow_value('1.0a12').for(:version) }
it { is_expected.to allow_value('1.2.3-45+abcdefgh').for(:version) }
it { is_expected.to allow_value('v1.2.3').for(:version) }
it { is_expected.not_to allow_value('1.2.3-45-abcdefgh').for(:version) }
it { is_expected.not_to allow_value('..1.2.3').for(:version) }
it { is_expected.not_to allow_value(' 1.2.3').for(:version) }
it { is_expected.not_to allow_value("1.2.3 \r\t").for(:version) }
it { is_expected.not_to allow_value("\r\t 1.2.3").for(:version) }
it { is_expected.not_to allow_value('1./2.3').for(:version) }
it { is_expected.not_to allow_value('1.2.3-4/../../').for(:version) }
it { is_expected.not_to allow_value('1.2.3-4%2e%2e%').for(:version) }
it { is_expected.not_to allow_value('../../../../../1.2.3').for(:version) }
it { is_expected.not_to allow_value('%2e%2e%2f1.2.3').for(:version) }
end
context 'generic package' do
subject { build_stubbed(:generic_package) }
it { is_expected.to validate_presence_of(:version) }
it { is_expected.to allow_value('1.2.3').for(:version) }
it { is_expected.to allow_value('1.3.350').for(:version) }
it { is_expected.to allow_value('1.3.350-20201230123456').for(:version) }
it { is_expected.to allow_value('1.2.3-rc1').for(:version) }
it { is_expected.to allow_value('1.2.3g').for(:version) }
it { is_expected.to allow_value('1.2').for(:version) }
it { is_expected.to allow_value('1.2.bananas').for(:version) }
it { is_expected.to allow_value('v1.2.4-build').for(:version) }
it { is_expected.to allow_value('d50d836eb3de6177ce6c7a5482f27f9c2c84b672').for(:version) }
it { is_expected.to allow_value('this_is_a_string_only').for(:version) }
it { is_expected.not_to allow_value('..1.2.3').for(:version) }
it { is_expected.not_to allow_value(' 1.2.3').for(:version) }
it { is_expected.not_to allow_value("1.2.3 \r\t").for(:version) }
it { is_expected.not_to allow_value("\r\t 1.2.3").for(:version) }
it { is_expected.not_to allow_value('1.2.3-4/../../').for(:version) }
it { is_expected.not_to allow_value('1.2.3-4%2e%2e%').for(:version) }
it { is_expected.not_to allow_value('../../../../../1.2.3').for(:version) }
it { is_expected.not_to allow_value('%2e%2e%2f1.2.3').for(:version) }
it { is_expected.not_to allow_value('').for(:version) }
it { is_expected.not_to allow_value(nil).for(:version) }
end
context 'helm package' do
subject { build_stubbed(:helm_package) }
it { is_expected.not_to allow_value(nil).for(:version) }
it { is_expected.not_to allow_value('').for(:version) }
it { is_expected.to allow_value('v1.2.3').for(:version) }
it { is_expected.not_to allow_value('1.2.3').for(:version) }
end
it_behaves_like 'validating version to be SemVer compliant for', :npm_package
context 'nuget package' do
it_behaves_like 'validating version to be SemVer compliant for', :nuget_package
it { is_expected.to allow_value('1.2.3.4').for(:version) }
end
end
describe '#package_already_taken' do
context 'maven package' do
let!(:package) { create(:maven_package) }
it 'will allow a package of the same name' do
new_package = build(:maven_package, name: package.name)
expect(new_package).to be_valid
end
end
end
context "recipe uniqueness for conan packages" do
let!(:package) { create('conan_package') }
it "will allow a conan package with same project, name, version and package_type" do
new_package = build('conan_package', project: package.project, name: package.name, version: package.version)
new_package.conan_metadatum.package_channel = 'beta'
expect(new_package).to be_valid
end
it "will not allow a conan package with same recipe (name, version, metadatum.package_channel, metadatum.package_username, and package_type)" do
new_package = build('conan_package', project: package.project, name: package.name, version: package.version)
expect(new_package).not_to be_valid
expect(new_package.errors.to_a).to include("Package recipe already exists")
end
end
describe "#unique_debian_package_name" do
let!(:package) { create(:debian_package) }
it "will allow a Debian package with same project, name and version, but different distribution" do
new_package = build(:debian_package, project: package.project, name: package.name, version: package.version)
expect(new_package).to be_valid
end
it "will not allow a Debian package with same project, name, version and distribution" do
new_package = build(:debian_package, project: package.project, name: package.name, version: package.version)
new_package.debian_publication.distribution = package.debian_publication.distribution
expect(new_package).not_to be_valid
expect(new_package.errors.to_a).to include('Debian package already exists in Distribution')
end
it "will allow a Debian package with same project, name, version, but no distribution" do
new_package = build(:debian_package, project: package.project, name: package.name, version: package.version, published_in: nil)
expect(new_package).to be_valid
end
end
Packages::Package.package_types.keys.without('conan', 'debian').each do |pt|
context "project id, name, version and package type uniqueness for package type #{pt}" do
let(:package) { create("#{pt}_package") }
it "will not allow a #{pt} package with same project, name, version and package_type" do
new_package = build("#{pt}_package", project: package.project, name: package.name, version: package.version)
expect(new_package).not_to be_valid
expect(new_package.errors.to_a).to include("Name has already been taken")
end
end
end
end
describe '#destroy' do
let(:package) { create(:npm_package) }
let(:package_file) { package.package_files.first }
let(:project_statistics) { ProjectStatistics.for_project_ids(package.project.id).first }
it 'affects project statistics' do
expect { package.destroy! }
.to change { project_statistics.reload.packages_size }
.from(package_file.size).to(0)
end
end
describe '.by_name_and_file_name' do
let!(:package) { create(:npm_package) }
let!(:package_file) { package.package_files.first }
subject { described_class }
it 'finds a package with correct arguiments' do
expect(subject.by_name_and_file_name(package.name, package_file.file_name)).to eq(package)
end
it 'will raise error if not found' do
expect { subject.by_name_and_file_name('foo', 'foo-5.5.5.tgz') }.to raise_error(ActiveRecord::RecordNotFound)
end
end
context 'version scopes' do
let!(:package1) { create(:npm_package, version: '1.0.0') }
let!(:package2) { create(:npm_package, version: '1.0.1') }
let!(:package3) { create(:npm_package, version: '1.0.1') }
describe '.last_of_each_version' do
subject { described_class.last_of_each_version }
it 'includes only latest package per version' do
is_expected.to include(package1, package3)
is_expected.not_to include(package2)
end
end
describe '.has_version' do
subject { described_class.has_version }
before do
create(:maven_metadatum).package.update!(version: nil)
end
it 'includes only packages with version attribute' do
is_expected.to match_array([package1, package2, package3])
end
end
describe '.with_version' do
subject { described_class.with_version('1.0.1') }
it 'includes only packages with specified version' do
is_expected.to match_array([package2, package3])
end
end
describe '.without_version_like' do
let(:version_pattern) { '%.0.0%' }
subject { described_class.without_version_like(version_pattern) }
it 'includes packages without the version pattern' do
is_expected.to match_array([package2, package3])
end
end
end
context 'conan scopes' do
let!(:package) { create(:conan_package) }
describe '.with_conan_channel' do
subject { described_class.with_conan_channel('stable') }
it 'includes only packages with specified version' do
is_expected.to include(package)
end
end
describe '.with_conan_username' do
subject do
described_class.with_conan_username(
Packages::Conan::Metadatum.package_username_from(full_path: package.project.full_path)
)
end
it 'includes only packages with specified version' do
is_expected.to match_array([package])
end
end
end
describe '.without_nuget_temporary_name' do
let!(:package1) { create(:nuget_package) }
let!(:package2) { create(:nuget_package, name: Packages::Nuget::TEMPORARY_PACKAGE_NAME) }
subject { described_class.without_nuget_temporary_name }
it 'does not include nuget temporary packages' do
expect(subject).to eq([package1])
end
end
describe '.processed' do
let!(:package1) { create(:nuget_package) }
let!(:package2) { create(:npm_package) }
let!(:package3) { create(:nuget_package) }
subject { described_class.processed }
it { is_expected.to match_array([package1, package2, package3]) }
context 'with temporary packages' do
let!(:package1) { create(:nuget_package, name: Packages::Nuget::TEMPORARY_PACKAGE_NAME) }
it { is_expected.to match_array([package2, package3]) }
end
end
describe '.limit_recent' do
let!(:package1) { create(:nuget_package) }
let!(:package2) { create(:nuget_package) }
let!(:package3) { create(:nuget_package) }
subject { described_class.limit_recent(2) }
it { is_expected.to match_array([package3, package2]) }
end
context 'with several packages' do
let_it_be(:package1) { create(:nuget_package, name: 'FooBar') }
let_it_be(:package2) { create(:nuget_package, name: 'foobar') }
let_it_be(:package3) { create(:npm_package) }
let_it_be(:package4) { create(:npm_package) }
describe '.pluck_names' do
subject { described_class.pluck_names }
it { is_expected.to match_array([package1, package2, package3, package4].map(&:name)) }
end
describe '.pluck_versions' do
subject { described_class.pluck_versions }
it { is_expected.to match_array([package1, package2, package3, package4].map(&:version)) }
end
describe '.with_name_like' do
subject { described_class.with_name_like(name_term) }
context 'with downcase name' do
let(:name_term) { 'foobar' }
it { is_expected.to match_array([package1, package2]) }
end
context 'with prefix wildcard' do
let(:name_term) { '%ar' }
it { is_expected.to match_array([package1, package2]) }
end
context 'with suffix wildcard' do
let(:name_term) { 'foo%' }
it { is_expected.to match_array([package1, package2]) }
end
context 'with surrounding wildcards' do
let(:name_term) { '%ooba%' }
it { is_expected.to match_array([package1, package2]) }
end
end
describe '.search_by_name' do
let(:query) { 'oba' }
subject { described_class.search_by_name(query) }
it { is_expected.to match_array([package1, package2]) }
end
describe '.with_normalized_pypi_name' do
let_it_be(:pypi_package) { create(:pypi_package, name: 'Foo.bAr---BAZ_buz') }
subject { described_class.with_normalized_pypi_name('foo-bar-baz-buz') }
it { is_expected.to match_array([pypi_package]) }
end
describe '.displayable' do
let_it_be(:hidden_package) { create(:maven_package, :hidden) }
let_it_be(:processing_package) { create(:maven_package, :processing) }
let_it_be(:error_package) { create(:maven_package, :error) }
subject { described_class.displayable }
it 'does not include non-displayable packages', :aggregate_failures do
is_expected.to include(error_package)
is_expected.not_to include(hidden_package)
is_expected.not_to include(processing_package)
end
end
describe '.with_status' do
let_it_be(:hidden_package) { create(:maven_package, :hidden) }
subject { described_class.with_status(:hidden) }
it 'returns packages with specified status' do
is_expected.to match_array([hidden_package])
end
end
end
describe '.select_distinct_name' do
let_it_be(:nuget_package) { create(:nuget_package) }
let_it_be(:nuget_packages) { create_list(:nuget_package, 3, name: nuget_package.name, project: nuget_package.project) }
let_it_be(:maven_package) { create(:maven_package) }
let_it_be(:maven_packages) { create_list(:maven_package, 3, name: maven_package.name, project: maven_package.project) }
subject { described_class.select_distinct_name }
it 'returns only distinct names' do
packages = subject
expect(packages.size).to eq(2)
expect(packages.pluck(:name)).to match_array([nuget_package.name, maven_package.name])
end
end
describe '.order_by_package_file' do
let_it_be(:project) { create(:project) }
let_it_be(:package1) { create(:maven_package, project: project) }
let_it_be(:package2) { create(:maven_package, project: project) }
it 'orders packages their associated package_file\'s created_at date', :aggregate_failures do
expect(project.packages.order_by_package_file).to match_array([package1, package1, package1, package2, package2, package2])
create(:package_file, :xml, package: package1)
expect(project.packages.order_by_package_file).to match_array([package1, package1, package1, package2, package2, package2, package1])
end
end
describe '#versions' do
let_it_be(:project) { create(:project) }
let_it_be(:package) { create(:maven_package, project: project) }
let_it_be(:package2) { create(:maven_package, project: project) }
let_it_be(:package3) { create(:maven_package, project: project, name: 'foo') }
it 'returns other package versions of the same package name belonging to the project' do
expect(package.versions).to contain_exactly(package2)
end
it 'does not return different packages' do
expect(package.versions).not_to include(package3)
end
end
describe '#pipeline' do
let_it_be_with_refind(:package) { create(:maven_package) }
context 'package without pipeline' do
it 'returns nil if there is no pipeline' do
expect(package.pipeline).to be_nil
end
end
context 'package with pipeline' do
let_it_be(:pipeline) { create(:ci_pipeline) }
before do
package.build_infos.create!(pipeline: pipeline)
end
it 'returns the pipeline' do
expect(package.pipeline).to eq(pipeline)
end
end
end
describe '#tag_names' do
let_it_be(:package) { create(:nuget_package) }
subject { package.tag_names }
it { is_expected.to eq([]) }
context 'with tags' do
let(:tags) { %w(tag1 tag2 tag3) }
before do
tags.each { |t| create(:packages_tag, name: t, package: package) }
end
it { is_expected.to contain_exactly(*tags) }
end
end
describe '#debian_incoming?' do
let(:package) { build(:package) }
subject { package.debian_incoming? }
it { is_expected.to eq(false) }
context 'with debian_incoming' do
let(:package) { create(:debian_incoming) }
it { is_expected.to eq(true) }
end
context 'with debian_package' do
let(:package) { create(:debian_package) }
it { is_expected.to eq(false) }
end
end
describe '#debian_package?' do
let(:package) { build(:package) }
subject { package.debian_package? }
it { is_expected.to eq(false) }
context 'with debian_incoming' do
let(:package) { create(:debian_incoming) }
it { is_expected.to eq(false) }
end
context 'with debian_package' do
let(:package) { create(:debian_package) }
it { is_expected.to eq(true) }
end
end
describe 'plan_limits' do
Packages::Package.package_types.keys.without('composer').each do |pt|
plan_limit_name = if pt == 'generic'
"#{pt}_packages_max_file_size"
else
"#{pt}_max_file_size"
end
context "File size limits for #{pt}" do
let(:package) { create("#{pt}_package") }
it "plan_limits includes column #{plan_limit_name}" do
expect { package.project.actual_limits.send(plan_limit_name) }
.not_to raise_error(NoMethodError)
end
end
end
end
describe '#original_build_info' do
let_it_be_with_refind(:package) { create(:npm_package) }
context 'without build_infos' do
it 'returns nil' do
expect(package.original_build_info).to be_nil
end
end
context 'with build_infos' do
let_it_be(:first_build_info) { create(:package_build_info, :with_pipeline, package: package) }
let_it_be(:second_build_info) { create(:package_build_info, :with_pipeline, package: package) }
it 'returns the first build info' do
expect(package.original_build_info).to eq(first_build_info)
end
end
end
describe '#package_settings' do
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, group: group) }
let_it_be(:package) { create(:maven_package, project: project) }
it 'returns the namespace package_settings' do
expect(package.package_settings).to eq(group.package_settings)
end
end
describe '#sync_maven_metadata' do
let_it_be(:user) { create(:user) }
let_it_be(:package) { create(:maven_package) }
subject { package.sync_maven_metadata(user) }
shared_examples 'not enqueuing a sync worker job' do
it 'does not enqueue a sync worker job' do
expect(::Packages::Maven::Metadata::SyncWorker)
.not_to receive(:perform_async)
subject
end
end
it 'enqueues a sync worker job' do
expect(::Packages::Maven::Metadata::SyncWorker)
.to receive(:perform_async).with(user.id, package.project.id, package.name)
subject
end
context 'with no user' do
let(:user) { nil }
it_behaves_like 'not enqueuing a sync worker job'
end
context 'with a versionless maven package' do
let_it_be(:package) { create(:maven_package, version: nil) }
it_behaves_like 'not enqueuing a sync worker job'
end
context 'with a non maven package' do
let_it_be(:package) { create(:npm_package) }
it_behaves_like 'not enqueuing a sync worker job'
end
end
context 'destroying a composer package' do
let_it_be(:package_name) { 'composer-package-name' }
let_it_be(:json) { { 'name' => package_name } }
let_it_be(:project) { create(:project, :custom_repo, files: { 'composer.json' => json.to_json } ) }
let!(:package) { create(:composer_package, :with_metadatum, project: project, name: package_name, version: '1.0.0', json: json) }
before do
Gitlab::Composer::Cache.new(project: project, name: package_name).execute
package.composer_metadatum.reload
end
it 'schedule the update job' do
expect(::Packages::Composer::CacheUpdateWorker).to receive(:perform_async).with(project.id, package_name, package.composer_metadatum.version_cache_sha)
package.destroy!
end
end
end
| 39.636265 | 181 | 0.655609 |
f8af4426e6d8e55f464aa6a9ea037c09af9c98f1 | 178 | class AddStripeIdToAccounts < ActiveRecord::Migration[5.0]
def change
add_column :accounts, :stripe_id, :string
add_index :accounts, :stripe_id, unique: true
end
end
| 25.428571 | 58 | 0.747191 |
5d49264e2da968198746b361ad0e2e6a5893daae | 608 | class ClearanceCreateUsers < ActiveRecord::Migration
def self.up
create_table(:users) do |t|
t.string :email
t.string :encrypted_password, :limit => 128
t.string :salt, :limit => 128
t.string :token, :limit => 128
t.datetime :token_expires_at
t.boolean :email_confirmed, :default => false, :null => false
t.boolean :admin, :default => false, :null => false
end
add_index :users, [:id, :token]
add_index :users, :email
add_index :users, :token
end
def self.down
drop_table :users
end
end
| 27.636364 | 68 | 0.590461 |
e9cc9d1031fe14f1ca4dfa8b377a756bd296dd93 | 3,794 | require 'virtualbox/com/ffi/load_ffi'
require 'virtualbox/ext/logger'
module VirtualBox
module COM
class FFIInterface < BaseInterface
extend ::FFI::Library
include Logger
# Constant used to initialize the XPCOM C interface
XPCOMC_VERSION = 0x00020000
# VBOXXPCOMC struct. This typically won't be used.
attr_reader :xpcom
# The VirtualBox and Session interfaces, both of which are extremely
# important in interfacing with the VirtualBox API. Once these have been
# initialized, all other parts of the API can be accessed via these
# instances.
attr_reader :virtualbox
attr_reader :session
class << self
# Sets up the FFI interface and also initializes the interface,
# returning an instance of {FFIInterface}.
def create(lib_path=nil)
setup(lib_path)
new
end
# Sets up the FFI interface by specifying the FFI library path
# and attaching the initial function (which can't be done until
# the FFI library is specified).
#
# @param [String] lib_path
def setup(lib_path=nil)
# Setup the path to the C library
lib_path ||= "/Applications/VirtualBox.app/Contents/MacOS/VBoxXPCOMC.dylib"
# Attach to the interface
ffi_lib lib_path
attach_function :VBoxGetXPCOMCFunctions, [:uint], :pointer
end
end
def initialize
super
initialize_com
end
# Initializes the COM interface with XPCOM. This sets up the `virtualbox`,
# `session`, and `xpcom` attributes. This should only be called once.
def initialize_com
# Get the pointer to the XPCOMC struct which contains the functions
# to initialize
xpcom_pointer = self.class.VBoxGetXPCOMCFunctions(XPCOMC_VERSION)
@xpcom = FFI::VBOXXPCOMC.new(xpcom_pointer)
initialize_singletons
end
# Initializes the VirtualBox and Session interfaces. It goes through
# the various directories until it finds a working pair.
def initialize_singletons
interface_dir = File.expand_path(File.join(File.dirname(__FILE__), "interface"))
Dir[File.join(interface_dir, "*")].each do |f|
if File.directory?(f)
return if initialize_for_version(File.basename(f))
end
end
end
# Initializes the FFI interface for a specific version.
def initialize_for_version(version)
logger.debug("FFI init: Trying version #{version}")
# Setup the FFI classes
COM::Util.set_interface_version(version)
virtualbox_klass = COM::Util.versioned_interface(:VirtualBox)
session_klass = COM::Util.versioned_interface(:Session)
# Setup the OUT pointers
virtualbox_ptr = ::FFI::MemoryPointer.new(:pointer)
session_ptr = ::FFI::MemoryPointer.new(:pointer)
# Call the initialization functions
@xpcom[:pfnComInitialize].call(virtualbox_klass::IID_STR, virtualbox_ptr, session_klass::IID_STR, session_ptr)
# Read the pointers from the results and fail if either are null,
# meaning that the initialization was not successful
virtualbox_ptr = virtualbox_ptr.read_pointer
session_ptr = session_ptr.read_pointer
return false if virtualbox_ptr.null? || session_ptr.null?
@virtualbox = virtualbox_klass.new(Implementer::FFI, self, virtualbox_ptr)
@session = session_klass.new(Implementer::FFI, self, session_ptr)
# Make a call to version to verify no exceptions are raised
@virtualbox.implementer.valid? && @session.implementer.valid?
logger.debug(" -- Valid version")
true
end
end
end
end
| 35.457944 | 118 | 0.666315 |
186bbc35d930e7b02fcc251b5104d87c3146b388 | 590 | Pod::Spec.new do |s|
s.name = "CasualUtility"
s.version = "1.0"
s.summary = "Swift tools for common tasks"
s.homepage = "https://github.com/lacyrhoades/CasualUtility"
s.license = { type: 'MIT', file: 'LICENSE' }
s.author = { "Lacy Rhoades" => "[email protected]" }
s.source = { git: "https://github.com/lacyrhoades/CasualUtility.git" }
s.ios.deployment_target = '11.0'
s.requires_arc = true
s.ios.source_files = 'Source/**/*.swift'
s.exclude_files = 'Source/**/*Test.swift'
s.dependency 'Disk'
end
| 39.333333 | 82 | 0.586441 |
183df461318d10517b6fe2328027868a56bb7328 | 3,800 | Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Attempt to read encrypted secrets from `config/secrets.yml.enc`.
# Requires an encryption key in `ENV["RAILS_MASTER_KEY"]` or
# `config/secrets.yml.key`.
config.read_encrypted_secrets = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Mount Action Cable outside main process or domain
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
config.log_tags = [ :request_id ]
# Use a different cache store in production.
config.cache_store = :redis_store if ENV['REDIS_URL']
# Use a real queuing backend for Active Job (and separate queues per environment)
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "hn-rails_#{Rails.env}"
config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
end
| 41.304348 | 102 | 0.756579 |
e8805c8c6c277b73ac2ea9babbf34b546c1d40c5 | 299 | class CreateTestResults < ActiveRecord::Migration[6.0]
def change
create_table :test_results do |t|
t.string :client_ip
t.integer :question_version
t.float :economic
t.float :diplomatic
t.float :civil
t.float :societal
t.timestamps
end
end
end
| 19.933333 | 54 | 0.655518 |
79a65cce1d65722a394b043a40756dcd1d29ee00 | 162 | class AlterPlacesAddUserIdColumn < ActiveRecord::Migration[5.0]
def change
add_column :places, :user_id, :integer
add_index :places, :user_id
end
end
| 23.142857 | 63 | 0.746914 |
7a75e3760b06101e558d08f3b9bd29c26ddbcb2f | 83 | class SinglePlatform::Configuration
attr_accessor :client_id,:secret,:api_key
end | 27.666667 | 43 | 0.843373 |
f89b6714fe22b6acb489b9031d9da32cb730b8ea | 1,214 | # frozen_string_literal: true
module Types
module Iterations
class CadenceType < BaseObject
graphql_name 'IterationCadence'
description 'Represents an iteration cadence'
authorize :read_iteration_cadence
field :id, ::Types::GlobalIDType[::Iterations::Cadence], null: false,
description: 'Global ID of the iteration cadence.'
field :title, GraphQL::STRING_TYPE, null: false,
description: 'Title of the iteration cadence.'
field :duration_in_weeks, GraphQL::INT_TYPE, null: true,
description: 'Duration in weeks of the iterations within this cadence.'
field :iterations_in_advance, GraphQL::INT_TYPE, null: true,
description: 'Future iterations to be created when iteration cadence is set to automatic.'
field :start_date, Types::TimeType, null: true,
description: 'Timestamp of the iteration cadence start date.'
field :automatic, GraphQL::BOOLEAN_TYPE, null: true,
description: 'Whether the iteration cadence should automatically generate future iterations.'
field :active, GraphQL::BOOLEAN_TYPE, null: true,
description: 'Whether the iteration cadence is active.'
end
end
end
| 35.705882 | 101 | 0.712521 |
f747e8f4bffe730dc38e29ea87a9ccf872084889 | 623 | module Xdrgen::AST
module Declarations
class Base < Treetop::Runtime::SyntaxNode
TYPE_NODES = [
Typespecs::Base,
Concerns::NestedDefinition
]
def type
search(type_s) do |node|
TYPE_NODES.any?{|t| node.is_a?(t)}
end
end
private
def search(cur_el, &predicate)
return cur_el if predicate.call(cur_el)
return if cur_el.elements.blank?
cur_el.elements.each do |next_el|
child_result = search(next_el, &predicate)
return child_result if child_result.present?
end
end
end
end
end | 23.074074 | 54 | 0.598716 |
f741401ad3355be7a458265994ca121a00774181 | 360 | # Have Mini Profiler show up on the right
Rack::MiniProfiler.config.position = "right"
# Have Mini Profiler start in hidden mode - display with short cut (defaulted to 'Alt+P')
# Rack::MiniProfiler.config.start_hidden = true
# Don't collect backtraces on SQL queries that take less than 5 ms to execute
# Rack::MiniProfiler.config.backtrace_threshold_ms = 5
| 40 | 89 | 0.772222 |
bfd322bb2c09964709b6292e16a4d2967d3a282d | 593 | # legge un file "merged.txt", seleziona i treni che arrivano il giorno dopo la partenza
# e li ordina per ora di arrivo crescente
dir = File.join(Dir.home, "dev", "ruby", "treni_misc", "liste_treni", "2016-03-04")
lines = File.readlines(File.join(dir, "merged.txt"))
lines.keep_if do |line|
arr = line.split("|")
arr[3]>arr[5]
end
lines.sort!{|a, b| a.split("|")[5] <=> b.split("|")[5]}
File.open(File.join(dir, "trans_day.txt"), 'w'){|f| lines.each{|l| f.write(l)}}
File.open(File.join(dir, "trans_day_(only arrival time).txt"), 'w'){|f| lines.each{|l| f.write(l.split("|")[5]+"\n")}} | 39.533333 | 118 | 0.642496 |
e99d476ea152ea2d290e0e76bdcae53a25116fc5 | 2,574 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::ManagedApplications::Mgmt::V2018_06_01
module Models
#
# SKU for the resource.
#
class Sku
include MsRestAzure
# @return [String] The SKU name.
attr_accessor :name
# @return [String] The SKU tier.
attr_accessor :tier
# @return [String] The SKU size.
attr_accessor :size
# @return [String] The SKU family.
attr_accessor :family
# @return [String] The SKU model.
attr_accessor :model
# @return [Integer] The SKU capacity.
attr_accessor :capacity
#
# Mapper for Sku class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'Sku',
type: {
name: 'Composite',
class_name: 'Sku',
model_properties: {
name: {
client_side_validation: true,
required: true,
serialized_name: 'name',
type: {
name: 'String'
}
},
tier: {
client_side_validation: true,
required: false,
serialized_name: 'tier',
type: {
name: 'String'
}
},
size: {
client_side_validation: true,
required: false,
serialized_name: 'size',
type: {
name: 'String'
}
},
family: {
client_side_validation: true,
required: false,
serialized_name: 'family',
type: {
name: 'String'
}
},
model: {
client_side_validation: true,
required: false,
serialized_name: 'model',
type: {
name: 'String'
}
},
capacity: {
client_side_validation: true,
required: false,
serialized_name: 'capacity',
type: {
name: 'Number'
}
}
}
}
}
end
end
end
end
| 25.235294 | 70 | 0.444056 |
0899dad34ca81ab744d4424f080add87c78f0620 | 788 | cask 'termius-beta' do
version '5.4.1'
sha256 '5544d3d79825b59612977295e55d5be58786baafffddae65bd322241d9e26b8c'
# s3.amazonaws.com/termius.desktop.autoupdate/mac was verified as official when first introduced to the cask
url 'https://www.termius.com/beta/download/mac/Termius+Beta.dmg'
name 'Termius Beta'
homepage 'https://www.termius.com/beta-program'
app 'Termius Beta.app'
zap trash: [
'~/.termius',
'~/Library/Application Support/Termius Beta',
'~/Library/Saved Application State/com.termius-beta.mac.savedState',
'/Library/Preferences/com.termius-beta.mac.helper.plist',
'/Library/Preferences/com.termius-beta.mac.plist',
'~/Library/Logs/Termius Beta',
]
end
| 37.52381 | 110 | 0.663706 |
115d0e37552f7a6543482bb2d418b94a00cda074 | 890 | cask 'splice' do
version '1.6.12-201601071747'
sha256 '49052352bb81cbd104b73241401e4f4a6d333749f12db32839c76e36dfc96784'
# amazonaws.com is the official download host per the appcast feed
url "https://s3-us-west-1.amazonaws.com/spliceosx/Splice.app-#{version}.zip"
appcast 'https://splice.com/appcast.xml',
checkpoint: 'aa13ed8622cf3aaa5a58590cc6fb7fa5493b7c2c400a60e07c6bf284124152d1'
name 'Splice'
homepage 'https://splice.com/'
license :gratis
installer script: 'Splice Installer.app/Contents/MacOS/Splice Installer',
args: ['-q'],
sudo: false
uninstall quit: 'com.splice.Splice',
delete: '/Applications/Splice.app'
zap delete: [
'~/Library/Application Support/*Splice*',
'~/Library/Caches/com.splice*',
'~/Library/Preferences/com.splice*',
]
end
| 34.230769 | 88 | 0.660674 |
625d663955d2112b8412ae6ebb6887038eed8527 | 1,349 | # coding: utf-8
lib = File.expand_path("../lib", __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require "slackdown/version"
Gem::Specification.new do |spec|
spec.name = "slackdown"
spec.version = Slackdown::VERSION
spec.authors = ["Bob Lail"]
spec.email = ["[email protected]"]
spec.summary = %q{Converts Markdown text to Slack's simplified markdown}
spec.description = %q{A converter for Kramdown that converts GitHub-Flavored Markdown to Slack's simplified Markdown}
spec.homepage = "https://github.com/houston/slackdown"
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
spec.add_dependency "kramdown", "~> 2.3"
spec.add_dependency "kramdown-parser-gfm", "~> 1.0", ">= 1.0.1"
spec.add_development_dependency "bundler"
spec.add_development_dependency "rake"
spec.add_development_dependency "minitest", "~> 5.0"
spec.add_development_dependency "minitest-reporters"
spec.add_development_dependency "minitest-reporters-turn_reporter"
spec.add_development_dependency "shoulda-context"
spec.add_development_dependency "pry"
end
| 40.878788 | 121 | 0.693847 |
1d53c32b90b2b187f389fab1943dd6e4aa701490 | 798 | require 'spec_helper'
ENV['RAILS_ENV'] ||= 'test'
require File.expand_path('../dummy/config/environment', __FILE__)
# Prevent database truncation if the environment is production
abort("The Rails environment is running in production mode!") if Rails.env.production?
require 'rspec/rails'
require 'factory_bot_rails'
begin
ActiveRecord::Migration.maintain_test_schema!
rescue ActiveRecord::PendingMigrationError => e
puts e.to_s.strip
exit 1
end
RSpec.configure do |config|
config.use_transactional_fixtures = true
config.infer_base_class_for_anonymous_controllers = false
config.order = "random"
config.infer_spec_type_from_file_location!
# Filter lines from Rails gems in backtraces.
config.filter_rails_from_backtrace!
config.include FactoryBot::Syntax::Methods
end
| 24.181818 | 86 | 0.793233 |
11308542c380b6973e0c1c8699d45d9619c5c404 | 1,642 | # Puma can serve each request in a thread from an internal thread pool.
# The `threads` method setting takes two numbers: a minimum and maximum.
# Any libraries that use thread pools should be configured to match
# the maximum value specified for Puma. Default is set to 5 threads for minimum
# and maximum; this matches the default thread size of Active Record.
#
max_threads_count = ENV.fetch('RAILS_MAX_THREADS') { 5 }
min_threads_count = ENV.fetch('RAILS_MIN_THREADS') { max_threads_count }
threads min_threads_count, max_threads_count
# Specifies the `port` that Puma will listen on to receive requests; default is 3000.
#
port ENV.fetch('PORT') { 3000 }
# Specifies the `environment` that Puma will run in.
#
environment ENV.fetch('RAILS_ENV') { 'development' }
# Specifies the `pidfile` that Puma will use.
pidfile ENV.fetch('PIDFILE') { 'tmp/pids/server.pid' }
# Specifies the number of `workers` to boot in clustered mode.
# Workers are forked web server processes. If using threads and workers together
# the concurrency of the application would be max `threads` * `workers`.
# Workers do not work on JRuby or Windows (both of which do not support
# processes).
#
# workers ENV.fetch("WEB_CONCURRENCY") { 2 }
workers ENV.fetch('WEB_CONCURRENCY') { 1 }
preload_app!
# Use the `preload_app!` method when specifying a `workers` number.
# This directive tells Puma to first boot the application and load code
# before forking the application. This takes advantage of Copy On Write
# process behavior so workers use less memory.
#
# preload_app!
# Allow puma to be restarted by `rails restart` command.
plugin :tmp_restart
| 39.095238 | 85 | 0.758222 |
7ac244d6eaff1f2e508c1584fdaae6a79ed8b6ba | 422 | module ApplicationHelper
################
# 定数宣言
# MeetingType IDs
FREE_MEETING_TYPE_ID = 1
PROJECT_MEETING_TYPE_ID = 2
GROUP_MEETING_TYPE_ID = 3
# ReceiverType IDs
TO_TYPE_ID = 1
CC_TYPE_ID = 2
BCC_TYPE_ID = 3
# グループ会議のメンバー範囲
SELECT_GROUP_ID = 1
PROGENY_GROUP_ID = 2
RELATION_GROUP_ID = 3
# プロジェクト会議のメンバー範囲
SELECT_PROJECT_ID = 1
PROGENY_PROJECT_ID = 2
RELATION_PROJECT_ID = 3
end
| 16.230769 | 29 | 0.701422 |
6238fd39f9d4c0b2fbc6338528db3f5b08f226bb | 416 | # frozen_string_literal: true
require 'test_helper'
class ThreadSafetyTest < ActionDispatch::IntegrationTest
include RouteTranslator::ConfigurationHelper
def setup
setup_config
end
def teardown
teardown_config
end
def test_i18n_locale_thread_safe
config_default_locale_settings 'en'
get '/es/dummy'
assert_equal 'es', @response.body
assert_equal :en, I18n.locale
end
end
| 16.64 | 56 | 0.757212 |
e80a7e0f0cd94d84f928a3e86687d3f794d76747 | 36,564 | require 'rubygems' # Will eventually be removed when this file is deprecated
require 'fileutils'
require 'getoptlong'
require 'net/http'
require 'net/https'
require 'net/ssh'
require 'rhc/vendor/sshkey'
require 'resolv'
require 'uri'
require 'highline/import'
require 'rhc'
require 'rhc/rest'
require 'rhc/helpers'
require 'rhc/config'
require 'rhc/wizard'
require 'rhc/targz'
require 'rhc/json'
module RHC
DEFAULT_MAX_LENGTH = 16
APP_NAME_MAX_LENGTH = 32
MAX_RETRIES = 7
DEFAULT_DELAY = 2.0
API = "1.1.3"
PATTERN_VERSION=/\A\d+\.\d+\.\d+\z/
@read_timeout = 120
@connect_timeout = 20
@mydebug = false
@@api_version = "?.?.?"
# reset lines
# \r moves the cursor to the beginning of line
# ANSI escape code to clear line from cursor to end of line
# "\e" is an alternative to "\033"
# cf. http://en.wikipedia.org/wiki/ANSI_escape_code
CLEAR_LINE = "\r" + "\e[0K"
DEBUG_INGORE_KEYS = {
'result' => nil,
'debug' => nil,
'exit_code' => nil,
'messages' => nil,
'data' => nil,
'api' => nil
}
def self.timeout(*vals)
vals.each do |val|
if val
unless val.to_i > 0
puts 'Timeout must be specified as a number greater than 0'
exit 1
end
@read_timeout = [val.to_i, @read_timeout].max
return @read_timeout
end
end
end
def self.connect_timeout(*vals)
vals.each do |val|
if val
unless val.to_i > 0
puts 'Timeout must be specified as a number greater than 0'
exit 1
end
@connect_timeout = [val.to_i, @connect_timeout].max
return @connect_timeout
end
end
end
def self.debug(bool)
@mydebug = bool
end
def self.update_server_api_v(dict)
if !dict['api'].nil? && (dict['api'] =~ PATTERN_VERSION)
@@api_version = dict['api']
end
end
def self.check_version
if @@api_version =~ PATTERN_VERSION
if API != @@api_version
puts "\nNOTICE: Client API version (#{API}) does not match the server (#{@@api_version}).\nThough requests may succeed, you should consider updating your client tools.\n\n"
end
end
end
def self.delay(time, adj=DEFAULT_DELAY)
(time*=adj).to_int
end
def self.json_encode(data)
RHC::Json.encode(data)
end
def self.json_decode(json)
RHC::Json.decode(json)
end
def self.generate_json(data)
data['api'] = API
json = json_encode(data)
json
end
def self.get_cartridges_list(libra_server, net_http, cart_type="standalone", print_result=nil)
puts "Obtaining list of cartridges (please excuse the delay)..."
data = {'cart_type' => cart_type}
if @mydebug
data[:debug] = true
end
print_post_data(data)
json_data = generate_json(data)
url = URI.parse("https://#{libra_server}/broker/cartlist")
response = http_post(net_http, url, json_data, "none")
unless response.code == '200'
print_response_err(response)
return []
end
begin
json_resp = json_decode(response.body)
rescue RHC::JsonError
exit 1
end
update_server_api_v(json_resp)
if print_result
print_response_success(json_resp)
end
begin
carts = (json_decode(json_resp['data']))['carts']
rescue RHC::JsonError
exit 1
end
carts
end
def self.get_cartridge_listing(carts, sep, libra_server, net_http, cart_type="standalone", print_result=nil)
carts = get_cartridges_list(libra_server, net_http, cart_type, print_result) if carts.nil?
carts.join(sep)
end
# Invalid chars (") ($) (^) (<) (>) (|) (%) (/) (;) (:) (,) (\) (*) (=) (~)
def self.check_rhlogin(rhlogin)
if rhlogin
if rhlogin =~ /["\$\^<>\|%\/;:,\\\*=~]/
puts 'OpenShift login may not contain any of these characters: (\") ($) (^) (<) (>) (|) (%) (/) (;) (:) (,) (\) (*) (=) (~)'
return false
end
else
puts "OpenShift login is required"
return false
end
true
end
def self.check_app(app)
check_field(app, 'application', APP_NAME_MAX_LENGTH)
end
def self.check_namespace(namespace)
check_field(namespace, 'namespace', DEFAULT_MAX_LENGTH)
end
def self.check_key(keyname)
check_field(keyname, 'key name', DEFAULT_MAX_LENGTH, /[^0-9a-zA-Z]/,
'contains invalid characters! Only alpha-numeric characters allowed.')
end
def self.check_field(field, type, max=0, val_regex=/[^0-9a-zA-Z]/,
regex_failed_error='contains non-alphanumeric characters!')
if field
if field =~ val_regex
say "#{type} " + regex_failed_error
return false
end
if max != 0 && field.length > max
say "maximum #{type} size is #{max} characters"
return false
end
else
say "#{type} is required"
return false
end
field
end
def self.print_post_data(h)
if (@mydebug)
puts 'Submitting form:'
h.each do |k,v|
if k.to_s != 'password'
puts "#{k.to_s}: #{v.to_s}"
else
print 'password: '
for i in (1..v.length)
print 'X'
end
puts ''
end
end
end
end
def self.get_user_info(libra_server, rhlogin, password, net_http, print_result, not_found_message=nil)
data = {'rhlogin' => rhlogin}
if @mydebug
data[:debug] = true
end
print_post_data(data)
json_data = generate_json(data)
url = URI.parse("https://#{libra_server}/broker/userinfo")
response = http_post(net_http, url, json_data, password)
unless response.code == '200'
if response.code == '404'
if not_found_message
puts not_found_message
else
puts "A user with rhlogin '#{rhlogin}' does not have a registered domain. Be sure to run 'rhc domain create' before using the other rhc tools."
end
exit 99
elsif response.code == '401'
puts "Invalid user credentials"
exit 97
else
print_response_err(response)
end
exit 1
end
begin
json_resp = json_decode(response.body)
rescue RHC::JsonError
exit 1
end
update_server_api_v(json_resp)
if print_result
print_response_success(json_resp)
end
begin
user_info = json_decode(json_resp['data'].to_s)
rescue RHC::JsonError
exit 1
end
user_info
end
# Public: Get a list of ssh keys
#
# type - The String type RSA or DSS.
# libra_server - The String DNS for the broker
# rhlogin - The String login name
# password - The String password for login
# net_http - The NET::HTTP Object to use
#
# Examples
#
# RHC::get_ssh_keys('openshift.redhat.com',
# '[email protected]',
# 'mypassword',
# RHC::Config.default_proxy)
# # => { "ssh_type" => "ssh-rsa",
# "ssh_key" => "AAAAB3NzaC1yc2EAAAADAQAB....",
# "fingerprint" => "ea:08:e3:c7:e3:c3:8e:6a:66:34:65:e4:56:f4:3e:ff"}
#
# FIXME! Exits on failure! Should return something instead
#
# Returns Hash on success or exits on failure
def self.get_ssh_keys(libra_server, rhlogin, password, net_http)
data = {'rhlogin' => rhlogin, 'action' => 'list-keys'}
if @mydebug
data[:debug] = true
end
print_post_data(data)
json_data = generate_json(data)
url = URI.parse("https://#{libra_server}/broker/ssh_keys")
response = http_post(net_http, url, json_data, password)
unless response.code == '200'
if response.code == '401'
puts "Invalid user credentials"
exit 97
else
print_response_err(response)
end
exit 1
end
begin
json_resp = json_decode(response.body)
rescue RHC::JsonError
exit 1
end
update_server_api_v(json_resp)
begin
ssh_keys = (json_decode(json_resp['data'].to_s))
rescue RHC::JsonError
exit 1
end
# Inject public fingerprint into key.
begin
if ssh_keys['ssh_type'].nil? or ssh_keys['ssh_type'].empty?
ssh_keys['fingerprint'] = nil
else
ssh_keys['fingerprint'] = \
Net::SSH::KeyFactory.load_data_public_key(
"#{ssh_keys['ssh_type']} #{ssh_keys['ssh_key']}").fingerprint
end
rescue NoMethodError
#older net/ssh (mac for example)
tempfile = `mktemp /tmp/openshift.XXXXXXXX`
`echo "#{ssh_keys['ssh_type']} #{ssh_keys['ssh_key']}" > #{tempfile}`
ssh_keys['fingerprint'] = `ssh-keygen -lf #{tempfile}`.split(' ')[1]
rescue Net::SSH::Exception, NotImplementedError, OpenSSL::PKey::PKeyError
# Could be a new unsupported key type or invalid data on the server
ssh_keys['fingerprint'] = 'Key type is not recognized. Please check this key is valid.'
end
if ssh_keys['keys'] && ssh_keys['keys'].kind_of?(Hash)
ssh_keys['keys'].each do |name, keyval|
type = keyval['type']
key = keyval['key']
begin
ssh_keys['keys'][name]['fingerprint'] = \
Net::SSH::KeyFactory.load_data_public_key(
"#{type} #{key}").fingerprint
rescue NoMethodError
#older net/ssh (mac for example)
tempfile = `mktemp /tmp/openshift.XXXXXXXX`
`echo "#{type} #{key}" > #{tempfile}`
ssh_keys['keys'][name]['fingerprint'] = `ssh-keygen -lf #{tempfile}`.split(' ')[1]
rescue NotImplementedError, Net::SSH::Exception, OpenSSL::PKey::PKeyError
# Could be a new unsupported key type or invalid data on the server
ssh_keys['keys'][name]['fingerprint'] = 'Key type is not recognized. Please check this key is valid.'
end
end
end
ssh_keys
end
def self.get_password
password = nil
begin
password = ask_password
rescue Interrupt
puts "\n"
exit 1
end
puts "\n"
password
end
def self.http_post(http, url, json_data, password)
req = http::Post.new(url.path)
puts "Contacting #{url.scheme}://#{url.host}" if @mydebug
req.set_form_data({'json_data' => json_data, 'password' => password})
req['User-Agent'] = RHC::Helpers.user_agent
http = http.new(url.host, url.port)
http.open_timeout = @connect_timeout
http.read_timeout = @read_timeout
if url.scheme == "https"
http.use_ssl = true
http.verify_mode = OpenSSL::SSL::VERIFY_NONE
end
begin
response = http.start {|http| http.request(req)}
if response.code == '404' && response.content_type == 'text/html'
# TODO probably want to remove this at some point
puts "!!!! WARNING !!!! WARNING !!!! WARNING !!!!"
puts "RHCloud server not found. You might want to try updating your rhc client tools."
exit 218
end
response
rescue Exception => e
puts "There was a problem communicating with the server. Response message: #{e.message}"
puts "If you were disconnected it is possible the operation finished without being able to report success."
puts "You can use 'rhc domain show' and 'rhc app show --state' to learn about the status of your user and application(s)."
exit 219
end
end
def self.print_response_err(response)
puts "Problem reported from server. Response code was #{response.code}."
if (!@mydebug)
puts "Re-run with -d for more information."
end
exit_code = 1
if response.class.inspect == "Struct::FakeResponse"
print_response_message(response.body)
elsif response.content_type == 'application/json'
begin
json_resp = json_decode(response.body)
exit_code = print_json_body(json_resp)
rescue RHC::JsonError
exit_code = 1
end
elsif @mydebug
puts "HTTP response from server is #{response.body}"
end
exit exit_code.nil? ? 666 : exit_code
end
def self.print_response_messages(json_resp)
messages = json_resp['messages']
print_response_message(messages)
end
def self.print_response_message(message)
if (message && !message.empty?)
puts ''
puts 'MESSAGES:'
puts message
puts ''
end
end
def self.print_response_success(json_resp, print_result=false)
if @mydebug
print "Response from server:"
$stdout.flush
print_json_body(json_resp, print_result)
elsif print_result
print_json_body(json_resp)
else
print_response_messages(json_resp)
end
end
def self.print_json_body(json_resp, print_result=true)
print_response_messages(json_resp)
exit_code = json_resp['exit_code']
if @mydebug
if json_resp['debug']
puts ''
puts 'DEBUG:'
puts json_resp['debug']
puts ''
puts "Exit Code: #{exit_code}"
if (json_resp.length > 3)
json_resp.each do |k,v|
if !DEBUG_INGORE_KEYS.has_key?(k)
puts "#{k.to_s}: #{v.to_s}"
end
end
end
end
if json_resp['api']
puts "API version: #{json_resp['api']}"
end
end
if print_result && json_resp['result']
puts ''
puts 'RESULT:'
puts json_resp['result']
puts ''
end
exit_code
end
#
# Check if host exists
#
def self.hostexist?(host)
RHC::Helpers.host_exists?(host)
end
def self.create_app(libra_server, net_http, user_info, app_name, app_type, rhlogin, password, repo_dir=nil, no_dns=false, no_git=false, is_embedded_jenkins=false, gear_size='small',scale=false)
# Need to have a fake HTTPResponse object for passing to print_reponse_err
# May already be initialized if called from another piece of code
# FIXME: remove this requirement when refactoring rhc
begin
Struct::FakeResponse
rescue NameError
Struct.new('FakeResponse',:body,:code,:content_type)
end
domains = user_info['user_info']['domains']
if domains.empty?
emessage = "Please create a domain with 'rhc domain create -n <namespace>' before creating applications."
print_response_err(Struct::FakeResponse.new(emessage,403))
end
namespace = domains[0]['namespace']
puts "Creating application: #{app_name} in #{namespace}"
data = {:cartridge => app_type,
:action => 'configure',
:node_profile => gear_size,
:app_name => app_name,
:rhlogin => rhlogin
}
if @mydebug
data[:debug] = true
end
# Need to use the new REST API for scaling apps
# We'll need to then get the new application using the existing
# API in order to access the rest of the logic in this function
if scale
end_point = "https://#{libra_server}/broker/rest/api"
client = RHC::Rest::Client.new(end_point, rhlogin, password)
domain = client.find_domain(user_info['user_info']['domains'][0]['namespace'])
namespace = domain.id
# Catch errors
begin
application = domain.add_application(app_name,{:cartridge => app_type, :scale => true, :gear_profile => gear_size})
# Variables that are needed for the rest of the function
app_uuid = application.uuid
result = "Successfully created application: #{app_name}"
# health check path now returned by the API
health_check_path = application.health_check_path
puts "DEBUG: '#{app_name}' creation returned success." if @mydebug
rescue RHC::Rest::ConnectionException, RHC::Rest::ResourceAccessException => e
print_response_err(Struct::FakeResponse.new(e.message,e.code))
rescue RHC::Rest::ValidationException => e
validation_error_code = (e.code.nil?) ? 406 : e.code
print_response_err(Struct::FakeResponse.new(e.message, validation_error_code))
rescue RHC::Rest::ServerErrorException => e
error_code = (e.code.nil?) ? 500 : e.code
print_response_err(Struct::FakeResponse.new(e.message, error_code))
end
else
json_data = generate_json(data)
url = URI.parse("https://#{libra_server}/broker/cartridge")
response = http_post(net_http, url, json_data, password)
if response.code == '200'
json_resp = json_decode(response.body)
print_response_success(json_resp)
json_data = json_decode(json_resp['data'])
health_check_path = json_data['health_check_path']
app_uuid = json_data['uuid']
result = json_resp['result']
puts "DEBUG: '#{app_name}' creation returned success." if @mydebug
else
print_response_err(response)
end
end
#
# At this point, we need to register a handler to guarantee app
# cleanup on any exceptions or calls to exit
#
at_exit do
unless $!.nil? || $!.is_a?(SystemExit) && $!.success?
puts "Cleaning up application"
destroy_app(libra_server, net_http, app_name, rhlogin, password)
end
end
rhc_domain = user_info['user_info']['rhc_domain']
fqdn = "#{app_name}-#{namespace}.#{rhc_domain}"
loop = 0
#
# Confirm that the host exists in DNS
#
unless no_dns
puts "Now your new domain name is being propagated worldwide (this might take a minute)..."
# Allow DNS to propogate
sleep 15
# Now start checking for DNS
sleep_time = 2
while loop < MAX_RETRIES && !hostexist?(fqdn)
sleep sleep_time
loop+=1
print CLEAR_LINE + " retry # #{loop} - Waiting for DNS: #{fqdn}"
$stdout.flush
sleep_time = delay(sleep_time)
end
end
# if we have executed print statements, then move to the next line
if loop > 0
puts
end
# construct the Git URL
git_url = "ssh://#{app_uuid}@#{app_name}-#{namespace}.#{rhc_domain}/~/git/#{app_name}.git/"
# If the hostname couldn't be resolved, print out the git URL
# and exit cleanly. This will help solve issues where DNS times
# out in APAC, etc on resolution.
if loop >= MAX_RETRIES
puts <<WARNING
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
WARNING: We were unable to lookup your hostname (#{fqdn})
in a reasonable amount of time. This can happen periodically and will just
take an extra minute or two to propagate depending on where you are in the
world. Once you are able to access your application in a browser, you can then
clone your git repository.
Application URL: http://#{fqdn}
Git Repository URL: #{git_url}
Git Clone command:
git clone #{git_url} #{repo_dir}
If you can't get your application '#{app_name}' running in the browser, you can
also try destroying and recreating the application as well using:
rhc app destroy -a #{app_name} -l #{rhlogin}
If this doesn't work for you, let us know in the forums or in IRC and we'll
make sure to get you up and running.
Forums: https://openshift.redhat.com/community/forums/openshift
IRC: #openshift (on Freenode)
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
WARNING
exit 0
end
#
# Pull new repo locally
#
unless no_git
puts "Pulling new repo down" if @mydebug
quiet = (@mydebug ? ' ' : '--quiet ')
puts "git clone #{quiet}#{git_url} #{repo_dir}" if @mydebug
git_clone = %x[git clone #{quiet} #{git_url} #{repo_dir} 2>&1]
if $?.exitstatus != 0
if RHC::Helpers.windows?
`nslookup #{app_name}-#{namespace}.#{rhc_domain}`
windows_nslookup = $?.exitstatus == 0
`ping #{app_name}-#{namespace}.#{rhc_domain} -n 2`
windows_ping = $?.exitstatus == 0
if windows_nslookup and !windows_ping # this is related to BZ #826769
puts <<WINSOCKISSUE
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
WARNING: We were unable to lookup your hostname (#{fqdn})
in a reasonable amount of time. This can happen periodically and will just
take up to 10 extra minutes to propagate depending on where you are in the
world. This may also be related to an issue with Winsock on Windows [1][2].
We recommend you wait a few minutes then clone your git repository manually.
Git Clone command:
git clone #{git_url} #{repo_dir}
[1] http://support.microsoft.com/kb/299357
[2] http://support.microsoft.com/kb/811259
If this doesn't work for you, let us know in the forums or in IRC and we'll
make sure to get you up and running.
Forums: https://openshift.redhat.com/community/forums/openshift
IRC: #openshift (on Freenode)
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
WINSOCKISSUE
exit 0
end
end
puts "Error in git clone"
puts git_clone
exit 216
end
else
if is_embedded_jenkins
# if this is a jenkins client application to be embedded,
# then print this message only in debug mode
if @mydebug
puts "
Note: There is a git repo for your Jenkins application '#{app_name}'
but it isn't being downloaded as part of this process. In most cases
it isn't needed but you can always clone it later.
"
end
else
puts <<IMPORTANT
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
IMPORTANT: Since the -n flag was specified, no local repo has been created.
This means you can't make changes to your published application until after
you clone the repo yourself. See the git url below for more information.
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
IMPORTANT
end
end
#
# At this point, we need to register a handler to guarantee git
# repo cleanup on any exceptions or calls to exit
#
unless no_git
at_exit do
unless $!.nil? || $!.is_a?(SystemExit) && $!.success?
puts "Cleaning up git repo"
FileUtils.rm_rf repo_dir
end
end
end
return {:app_name => app_name,
:fqdn => fqdn,
:health_check_path => health_check_path,
:git_url => git_url,
:repo_dir => repo_dir,
:result => result
}
end
#
# An application is considered available if the health check URL unambiguously returns a 1 or 0.
# Otherwise, if the root URL for the app successfully returns content it is also considered
# successful. In the future, applications that do not expose a public web interface will need
# a more advanced check mechanism, or the check should not prevent creation.
#
def self.check_app_available(net_http, app_name, fqdn, health_check_path, result, git_url, repo_dir, no_git)
available = MAX_RETRIES.times.any? do |i|
sleep i * DEFAULT_DELAY
puts "Checking if the application is available ##{i+1}"
if health_check_path and !health_check_path.empty?
value = open("http://#{fqdn}/#{health_check_path}").read[0,1] rescue nil
# TODO: I should be typed exception ApplicationHealthFailure
raise "ERROR: The application was unable to start. Please report this issue via the forums or IRC or file a bug through our public bug tracker." if value == '0'
next true if value == '1'
end
open("http://#{fqdn}") rescue nil
end
if available
puts "Application #{app_name} is available at: http://#{fqdn}/"
puts " Git URL: #{git_url}"
if @mydebug
unless no_git
puts "To make changes to '#{app_name}', commit to #{repo_dir}/."
else
puts <<LOOKSGOOD
To make changes to '#{app_name}', you must first clone it with:
git clone #{git_url}
LOOKSGOOD
puts "Then run 'git push' to update your OpenShift space."
end
end
if result && !result.empty?
puts "#{result}"
end
true
else
puts "Application is not available"
false
end
rescue StandardError => e
puts e
false
end
def self.destroy_app(libra_server, net_http, app_name, rhlogin, password)
json_data = generate_json(
{:action => 'deconfigure',
:app_name => app_name,
:rhlogin => rhlogin
})
url = URI.parse("https://#{libra_server}/broker/cartridge")
http_post(net_http, url, json_data, password)
end
def self.ctl_app(libra_server, net_http, app_name, rhlogin, password, action, embedded=false, framework=nil, server_alias=nil, print_result=true)
data = {:action => action,
:app_name => app_name,
:rhlogin => rhlogin
}
data[:server_alias] = server_alias if server_alias
if framework
data[:cartridge] = framework
end
if @mydebug
data[:debug] = true
end
json_data = generate_json(data)
url = nil
if embedded
url = URI.parse("https://#{libra_server}/broker/embed_cartridge")
else
url = URI.parse("https://#{libra_server}/broker/cartridge")
end
response = http_post(net_http, url, json_data, password)
if response.code == '200'
json_resp = json_decode(response.body)
print_response_success(json_resp, print_result || @mydebug)
else
print_response_err(response)
end
json_decode(response.body)
end
def self.snapshot_create(rhc_domain, namespace, app_name, app_uuid, filename, debug=false)
ssh_cmd = "ssh #{app_uuid}@#{app_name}-#{namespace}.#{rhc_domain} 'snapshot' > #{filename}"
puts "Pulling down a snapshot to #{filename}..."
puts ssh_cmd if debug
puts
begin
if ! RHC::Helpers.windows?
output = `#{ssh_cmd}`
if $?.exitstatus != 0
puts output
puts "Error in trying to save snapshot. You can try to save manually by running:"
puts
puts ssh_cmd
puts
return 1
end
else
Net::SSH.start("#{app_name}-#{namespace}.#{rhc_domain}", app_uuid) do |ssh|
File.open(filename, 'wb') do |file|
ssh.exec! "snapshot" do |channel, stream, data|
if stream == :stdout
file.write(data)
else
puts data if debug
end
end
end
end
end
rescue Exception => e
puts e.backtrace if debug
puts "Error in trying to save snapshot. You can try to save manually by running:"
puts
puts ssh_cmd
puts
return 1
end
true
end
def self.snapshot_restore(rhc_domain, namespace, app_name, app_uuid, filename, debug=false)
if File.exists? filename
include_git = RHC::Helpers.windows? ? false : RHC::TarGz.contains(filename, './*/git')
ssh_cmd = "cat #{filename} | ssh #{app_uuid}@#{app_name}-#{namespace}.#{rhc_domain} 'restore#{include_git ? ' INCLUDE_GIT' : ''}'"
puts "Restoring from snapshot #{filename}..."
puts ssh_cmd if debug
puts
begin
if ! RHC::Helpers.windows?
output = `#{ssh_cmd}`
if $?.exitstatus != 0
puts output
puts "Error in trying to restore snapshot. You can try to restore manually by running:"
puts
puts ssh_cmd
puts
return 1
end
else
ssh = Net::SSH.start("#{app_name}-#{namespace}.#{rhc_domain}", app_uuid)
ssh.open_channel do |channel|
channel.exec("restore#{include_git ? ' INCLUDE_GIT' : ''}") do |ch, success|
channel.on_data do |ch, data|
puts data
end
channel.on_extended_data do |ch, type, data|
puts data
end
channel.on_close do |ch|
puts "Terminating..."
end
File.open(filename, 'rb') do |file|
file.chunk(1024) do |chunk|
channel.send_data chunk
end
end
channel.eof!
end
end
ssh.loop
end
rescue Exception => e
puts e.backtrace
puts "Error in trying to restore snapshot. You can try to restore manually by running:"
puts
puts ssh_cmd
puts
return 1
end
else
puts "Archive not found: #{filename}"
return 255
end
true
end
end
# provide a hook for performing actions before rhc-* commands exit
at_exit {
# ensure client tools are up to date
RHC::check_version
}
#
# Config paths... /etc/openshift/express.conf or $GEM/conf/express.conf -> ~/.openshift/express.conf
#
# semi-private: Just in case we rename again :)
@opts_config_path = nil
@conf_name = 'express.conf'
_linux_cfg = '/etc/openshift/' + @conf_name
_gem_cfg = File.join(File.expand_path(File.dirname(__FILE__) + "/../conf"), @conf_name)
@home_conf = File.expand_path('~/.openshift')
@local_config_path = File.join(@home_conf, @conf_name)
@config_path = File.exists?(_linux_cfg) ? _linux_cfg : _gem_cfg
@home_dir=File.expand_path("~")
local_config_path = File.expand_path(@local_config_path)
#
# Check for proxy environment
#
@http = RHC::Config.default_proxy
def config_path
return @opts_config_path ? @opts_config_path : @local_config_path
end
def config
return @opts_config ? @opts_config : @local_config
end
def ask_password
return ask("Password: ") { |q|
q.echo = '*'
q.whitespace = :chomp
}
end
def kfile_not_found
puts <<KFILE_NOT_FOUND
Your SSH keys are created either by running ssh-keygen (password optional)
or by having the 'rhc domain create' command do it for you. If you created
them on your own (or want to use an existing keypair), be sure to paste
your public key into the express console at http://www.openshift.com.
The client tools use the value of 'ssh_key_file' in express.conf to find
your key followed by the defaults of id_rsa[.pub] and then
id_rsa[.pub].
KFILE_NOT_FOUND
#exit 212
end
def get_kfile(check_exists=true)
ssh_key_file = get_var('ssh_key_file')
if ssh_key_file
if (File.basename(ssh_key_file) == ssh_key_file)
kfile = "#{ENV['HOME']}/.ssh/#{ssh_key_file}"
else
kfile = File.expand_path(ssh_key_file)
end
else
kfile = "#{ENV['HOME']}/.ssh/id_rsa"
end
if check_exists && !File.exists?(kfile)
if ssh_key_file
puts "WARNING: Unable to find '#{kfile}' referenced in express.conf."
kfile_not_found
else
kfile = "#{ENV['HOME']}/.ssh/id_rsa"
if !File.exists?(kfile)
puts "WARNING: Unable to find ssh key file."
kfile_not_found
end
end
end
return kfile
end
def get_kpfile(kfile, check_exists=true)
kpfile = kfile + '.pub'
if check_exists && !File.exists?(kpfile)
puts "WARNING: Unable to find '#{kpfile}'"
kfile_not_found
end
return kpfile
end
# Add a new namespace to configs
def self.add_rhlogin_config(rhlogin, uuid)
config_path = RHC::Config.local_config_path
f = open(File.expand_path(config_path), 'a')
unless RHC::Config['default_rhlogin']
f.puts("# Default rhlogin to use if none is specified")
f.puts("default_rhlogin=#{rhlogin}")
f.puts("")
end
f.close
end
# Public: Handle response message when updating keys
#
# url - The Object URI::HTTPS
# data - The Hash representation of the data response
# password - The String password for the user
#
# Examples
#
# handle_key_mgmt_response(
# URI.parse('https://openshift.redhat.com/broker/ssh_keys'),
# {
# :rhlogin=>"[email protected]",
# :key_name=>"default",
# :action=>"update-key",
# :ssh=>"AAAAB3NzaC1yc2EAAAADAQABAAAAgQCrXG5c.....",
# :key_type=>"ssh-rsa"},
# 'mypass')
# # => nil
#
# Returns nil on Success and RHC::http object on failure
def handle_key_mgmt_response(url, data, password)
RHC::print_post_data(data)
json_data = RHC::generate_json(data)
response = RHC::http_post(RHC::Config.default_proxy, url, json_data, password)
if response.code == '200'
begin
json_resp = RHC::json_decode(response.body)
RHC::update_server_api_v(json_resp)
RHC::print_response_success(json_resp)
puts "Success"
return
rescue RHC::JsonError
RHC::print_response_err(response)
end
else
RHC::print_response_err(response)
end
puts "Failure"
return response
end
# Public: Add or upload an ssh key
#
# type - The String type RSA or DSS.
# command - The String value 'add' or 'update'
# identifier - The String value to identify the key
# pub_key_file_path - The String file path of the public key
# rhlogin - The String login to the broker
# password- The String password for the user
#
# Examples
#
# generate_ssh_key_ruby('add', 'newkeyname', '~/.ssh/id_rsa',
# 'mylogin', 'mypass')
# # => /home/user/.ssh/id_rsa.pub
#
# Returns nil on success or HTTP object on failure
def add_or_update_key(command, identifier, pub_key_file_path, rhlogin, password)
# Read user public ssh key
if pub_key_file_path
if File.readable?(pub_key_file_path)
begin
ssh_keyfile_contents = File.open(pub_key_file_path).gets.chomp.split(' ')
ssh_key = ssh_keyfile_contents[1]
ssh_key_type = ssh_keyfile_contents[0]
rescue Exception => e
puts "Invalid public keyfile format! Please specify a valid user public keyfile."
exit 1
end
else
puts "Unable to read user public keyfile #{pub_key_file_path}"
exit 1
end
else # create key
key_name = identifier
puts "Generating ssh key pair for user '#{key_name}' in the dir '#{Dir.pwd}/'"
# REMOVED in favor of generate_ssh_key_ruby: system("ssh-keygen -t rsa -f '#{key_name}'")
ssh_pub_key_file = generate_ssh_key_ruby()
ssh_keyfile_contents = File.open(ssh_pub_key_file).gets.chomp.split(' ')
ssh_key = ssh_keyfile_contents[1]
ssh_key_type = ssh_keyfile_contents[0]
end
data = {}
data[:rhlogin] = rhlogin
data[:key_name] = identifier
data[:ssh] = ssh_key
data[:action] = 'add-key'
data[:key_type] = ssh_key_type
if command == 'add'
data[:action] = 'add-key'
elsif command == 'update'
data[:action] = 'update-key'
end
url = URI.parse("https://#{RHC::Config['libra_server']}/broker/ssh_keys")
handle_key_mgmt_response(url, data, password)
end
# Public: Generate an SSH key and store it in ~/.ssh/id_rsa
#
# type - The String type RSA or DSS.
# bits - The Integer value for number of bits.
# comment - The String comment for the key
#
# Examples
#
# generate_ssh_key_ruby()
# # => /home/user/.ssh/id_rsa.pub
#
# Returns nil on failure or public key location as a String on success
def generate_ssh_key_ruby(type="RSA", bits = 1024, comment = "OpenShift-Key")
key = RHC::Vendor::SSHKey.generate(:type => type,
:bits => bits,
:comment => comment)
ssh_dir = "#{RHC::Config.home_dir}/.ssh"
if File.exists?("#{ssh_dir}/id_rsa")
puts "SSH key already exists: #{ssh_dir}/id_rsa. Reusing..."
return nil
else
unless File.exists?(ssh_dir)
FileUtils.mkdir_p(ssh_dir)
File.chmod(0700, ssh_dir)
end
File.open("#{ssh_dir}/id_rsa", 'w') {|f| f.write(key.private_key)}
File.chmod(0600, "#{ssh_dir}/id_rsa")
File.open("#{ssh_dir}/id_rsa.pub", 'w') {|f| f.write(key.ssh_public_key)}
end
"#{ssh_dir}/id_rsa.pub"
end
# Public: Run ssh command on remote host
#
# host - The String of the remote hostname to ssh to.
# username - The String username of the remote user to ssh as.
# command - The String command to run on the remote host.
#
# Examples
#
# ssh_ruby('myapp-t.rhcloud.com',
# '109745632b514e9590aa802ec015b074',
# 'rhcsh tail -f $OPENSHIFT_LOG_DIR/*"')
# # => true
#
# Returns true on success
def ssh_ruby(host, username, command)
Net::SSH.start(host, username) do |session|
session.open_channel do |channel|
channel.request_pty do |ch, success|
puts "pty could not be obtained" unless success
end
channel.on_data do |ch, data|
#puts "[#{file}] -> #{data}"
puts data
end
channel.exec command
end
session.loop
end
end
# Public: legacy convinience function for getting config keys
def get_var(key)
RHC::Config[key]
end
| 30.318408 | 195 | 0.621431 |
796541fad2198a45ba7c78319bd3f37d84a0e4c1 | 23,937 | require 'fastlane_core/configuration/config_item'
require 'credentials_manager/appfile_config'
module Supply
class Options
# rubocop:disable Metrics/PerceivedComplexity
def self.available_options
@options ||= [
FastlaneCore::ConfigItem.new(key: :package_name,
env_name: "SUPPLY_PACKAGE_NAME",
short_option: "-p",
description: "The package name of the application to use",
code_gen_sensitive: true,
default_value: CredentialsManager::AppfileConfig.try_fetch_value(:package_name),
default_value_dynamic: true),
FastlaneCore::ConfigItem.new(key: :version_name,
env_name: "SUPPLY_VERSION_NAME",
short_option: "-n",
optional: true,
description: "Version name (used when uploading new apks/aabs) - defaults to 'versionName' in build.gradle or AndroidManifest.xml",
default_value: CredentialsManager::AppfileConfig.try_fetch_value(:version_name),
default_value_dynamic: true),
FastlaneCore::ConfigItem.new(key: :version_code,
env_name: "SUPPLY_VERSION_CODE",
short_option: "-C",
optional: true,
type: Integer,
description: "Version code (used when updating rollout or promoting specific versions)",
default_value: CredentialsManager::AppfileConfig.try_fetch_value(:version_code),
default_value_dynamic: true),
FastlaneCore::ConfigItem.new(key: :release_status,
env_name: "SUPPLY_RELEASE_STATUS",
short_option: "-e",
optional: true,
description: "Release status (used when uploading new apks/aabs) - valid values are #{Supply::ReleaseStatus::ALL.join(', ')}",
default_value: Supply::ReleaseStatus::COMPLETED,
default_value_dynamic: true,
verify_block: proc do |value|
UI.user_error!("Value must be one of '#{Supply::RELEASE_STATUS}'") unless Supply::ReleaseStatus::ALL.include?(value)
end),
FastlaneCore::ConfigItem.new(key: :track,
short_option: "-a",
env_name: "SUPPLY_TRACK",
description: "The track of the application to use. The default available tracks are: #{Supply::Tracks::DEFAULTS.join(', ')}",
default_value: Supply::Tracks::DEFAULT,
type: String,
verify_block: proc do |value|
UI.user_error!("'rollout' is no longer a valid track name - please use 'production' instead") if value.casecmp('rollout').zero?
end),
FastlaneCore::ConfigItem.new(key: :rollout,
short_option: "-r",
description: "The percentage of the user fraction when uploading to the rollout track",
optional: true,
verify_block: proc do |value|
min = 0.0
max = 1.0
UI.user_error!("Invalid value '#{value}', must be greater than #{min} and less than #{max}") unless value.to_f > min && value.to_f <= max
end),
FastlaneCore::ConfigItem.new(key: :metadata_path,
env_name: "SUPPLY_METADATA_PATH",
short_option: "-m",
optional: true,
description: "Path to the directory containing the metadata files",
default_value: (Dir["./fastlane/metadata/android"] + Dir["./metadata"]).first,
default_value_dynamic: true),
FastlaneCore::ConfigItem.new(key: :key,
env_name: "SUPPLY_KEY",
short_option: "-k",
conflicting_options: [:json_key],
deprecated: 'Use `--json_key` instead',
description: "The p12 File used to authenticate with Google",
code_gen_sensitive: true,
default_value: Dir["*.p12"].first || CredentialsManager::AppfileConfig.try_fetch_value(:keyfile),
default_value_dynamic: true,
verify_block: proc do |value|
UI.user_error!("Could not find p12 file at path '#{File.expand_path(value)}'") unless File.exist?(File.expand_path(value))
end),
FastlaneCore::ConfigItem.new(key: :issuer,
env_name: "SUPPLY_ISSUER",
short_option: "-i",
conflicting_options: [:json_key],
deprecated: 'Use `--json_key` instead',
description: "The issuer of the p12 file (email address of the service account)",
code_gen_sensitive: true,
default_value: CredentialsManager::AppfileConfig.try_fetch_value(:issuer),
default_value_dynamic: true,
verify_block: proc do |value|
UI.important("DEPRECATED --issuer OPTION. Use --json_key instead")
end),
FastlaneCore::ConfigItem.new(key: :json_key,
env_name: "SUPPLY_JSON_KEY",
short_option: "-j",
conflicting_options: [:issuer, :key, :json_key_data],
optional: true, # this shouldn't be optional but is until --key and --issuer are completely removed
description: "The path to a file containing service account JSON, used to authenticate with Google",
code_gen_sensitive: true,
default_value: CredentialsManager::AppfileConfig.try_fetch_value(:json_key_file),
default_value_dynamic: true,
verify_block: proc do |value|
UI.user_error!("Could not find service account json file at path '#{File.expand_path(value)}'") unless File.exist?(File.expand_path(value))
UI.user_error!("'#{value}' doesn't seem to be a JSON file") unless FastlaneCore::Helper.json_file?(File.expand_path(value))
end),
FastlaneCore::ConfigItem.new(key: :json_key_data,
env_name: "SUPPLY_JSON_KEY_DATA",
short_option: "-c",
conflicting_options: [:issuer, :key, :json_key],
optional: true,
description: "The raw service account JSON data used to authenticate with Google",
code_gen_sensitive: true,
default_value: CredentialsManager::AppfileConfig.try_fetch_value(:json_key_data_raw),
default_value_dynamic: true,
verify_block: proc do |value|
begin
JSON.parse(value)
rescue JSON::ParserError
UI.user_error!("Could not parse service account json JSON::ParseError")
end
end),
FastlaneCore::ConfigItem.new(key: :apk,
env_name: "SUPPLY_APK",
description: "Path to the APK file to upload",
short_option: "-b",
conflicting_options: [:apk_paths, :aab, :aab_paths],
code_gen_sensitive: true,
default_value: Dir["*.apk"].last || Dir[File.join("app", "build", "outputs", "apk", "app-Release.apk")].last,
default_value_dynamic: true,
optional: true,
verify_block: proc do |value|
UI.user_error!("Could not find apk file at path '#{value}'") unless File.exist?(value)
UI.user_error!("apk file is not an apk") unless value.end_with?('.apk')
end),
FastlaneCore::ConfigItem.new(key: :apk_paths,
env_name: "SUPPLY_APK_PATHS",
conflicting_options: [:apk, :aab, :aab_paths],
code_gen_sensitive: true,
optional: true,
type: Array,
description: "An array of paths to APK files to upload",
short_option: "-u",
verify_block: proc do |value|
UI.user_error!("Could not evaluate array from '#{value}'") unless value.kind_of?(Array)
value.each do |path|
UI.user_error!("Could not find apk file at path '#{path}'") unless File.exist?(path)
UI.user_error!("file at path '#{path}' is not an apk") unless path.end_with?('.apk')
end
end),
FastlaneCore::ConfigItem.new(key: :aab,
env_name: "SUPPLY_AAB",
description: "Path to the AAB file to upload",
short_option: "-f",
conflicting_options: [:apk, :apk_paths, :aab_paths],
code_gen_sensitive: true,
default_value: Dir["*.aab"].last || Dir[File.join("app", "build", "outputs", "bundle", "release", "bundle.aab")].last,
default_value_dynamic: true,
optional: true,
verify_block: proc do |value|
UI.user_error!("Could not find aab file at path '#{value}'") unless File.exist?(value)
UI.user_error!("aab file is not an aab") unless value.end_with?('.aab')
end),
FastlaneCore::ConfigItem.new(key: :aab_paths,
env_name: "SUPPLY_AAB_PATHS",
conflicting_options: [:apk, :apk_paths, :aab],
code_gen_sensitive: true,
optional: true,
type: Array,
description: "An array of paths to AAB files to upload",
short_option: "-z",
verify_block: proc do |value|
UI.user_error!("Could not evaluate array from '#{value}'") unless value.kind_of?(Array)
value.each do |path|
UI.user_error!("Could not find aab file at path '#{path}'") unless File.exist?(path)
UI.user_error!("file at path '#{path}' is not an aab") unless path.end_with?('.aab')
end
end),
FastlaneCore::ConfigItem.new(key: :skip_upload_apk,
env_name: "SUPPLY_SKIP_UPLOAD_APK",
optional: true,
description: "Whether to skip uploading APK",
type: Boolean,
default_value: false),
FastlaneCore::ConfigItem.new(key: :skip_upload_aab,
env_name: "SUPPLY_SKIP_UPLOAD_AAB",
optional: true,
description: "Whether to skip uploading AAB",
type: Boolean,
default_value: false),
FastlaneCore::ConfigItem.new(key: :skip_upload_metadata,
env_name: "SUPPLY_SKIP_UPLOAD_METADATA",
optional: true,
description: "Whether to skip uploading metadata, changelogs not included",
type: Boolean,
default_value: false),
FastlaneCore::ConfigItem.new(key: :skip_upload_changelogs,
env_name: "SUPPLY_SKIP_UPLOAD_CHANGELOGS",
optional: true,
description: "Whether to skip uploading changelogs",
type: Boolean,
default_value: false),
FastlaneCore::ConfigItem.new(key: :skip_upload_images,
env_name: "SUPPLY_SKIP_UPLOAD_IMAGES",
optional: true,
description: "Whether to skip uploading images, screenshots not included",
type: Boolean,
default_value: false),
FastlaneCore::ConfigItem.new(key: :skip_upload_screenshots,
env_name: "SUPPLY_SKIP_UPLOAD_SCREENSHOTS",
optional: true,
description: "Whether to skip uploading SCREENSHOTS",
type: Boolean,
default_value: false),
FastlaneCore::ConfigItem.new(key: :track_promote_to,
env_name: "SUPPLY_TRACK_PROMOTE_TO",
optional: true,
description: "The track to promote to. The default available tracks are: #{Supply::Tracks::DEFAULTS.join(', ')}",
verify_block: proc do |value|
UI.user_error!("'rollout' is no longer a valid track name - please use 'production' instead") if value.casecmp('rollout').zero?
end),
FastlaneCore::ConfigItem.new(key: :validate_only,
env_name: "SUPPLY_VALIDATE_ONLY",
optional: true,
description: "Only validate changes with Google Play rather than actually publish",
type: Boolean,
default_value: false),
FastlaneCore::ConfigItem.new(key: :mapping,
env_name: "SUPPLY_MAPPING",
description: "Path to the mapping file to upload",
short_option: "-d",
conflicting_options: [:mapping_paths],
optional: true,
verify_block: proc do |value|
UI.user_error!("Could not find mapping file at path '#{value}'") unless File.exist?(value)
end),
FastlaneCore::ConfigItem.new(key: :mapping_paths,
env_name: "SUPPLY_MAPPING_PATHS",
conflicting_options: [:mapping],
optional: true,
type: Array,
description: "An array of paths to mapping files to upload",
short_option: "-s",
verify_block: proc do |value|
UI.user_error!("Could not evaluate array from '#{value}'") unless value.kind_of?(Array)
value.each do |path|
UI.user_error!("Could not find mapping file at path '#{path}'") unless File.exist?(path)
end
end),
FastlaneCore::ConfigItem.new(key: :root_url,
env_name: "SUPPLY_ROOT_URL",
description: "Root URL for the Google Play API. The provided URL will be used for API calls in place of https://www.googleapis.com/",
optional: true,
verify_block: proc do |value|
UI.user_error!("Could not parse URL '#{value}'") unless value =~ URI.regexp
end),
FastlaneCore::ConfigItem.new(key: :check_superseded_tracks,
env_name: "SUPPLY_CHECK_SUPERSEDED_TRACKS",
optional: true,
description: "Check the other tracks for superseded versions and disable them",
deprecated: "Google Play does this automatically now",
type: Boolean,
default_value: false),
FastlaneCore::ConfigItem.new(key: :timeout,
env_name: "SUPPLY_TIMEOUT",
optional: true,
description: "Timeout for read, open, and send (in seconds)",
type: Integer,
default_value: 300),
FastlaneCore::ConfigItem.new(key: :deactivate_on_promote,
env_name: "SUPPLY_DEACTIVATE_ON_PROMOTE",
optional: true,
description: "When promoting to a new track, deactivate the binary in the origin track",
deprecated: "Google Play does this automatically now",
type: Boolean,
default_value: true),
FastlaneCore::ConfigItem.new(key: :version_codes_to_retain,
optional: true,
type: Array,
description: "An array of version codes to retain when publishing a new APK",
verify_block: proc do |version_codes|
version_codes = version_codes.map(&:to_i)
UI.user_error!("Could not evaluate array from '#{version_codes}'") unless version_codes.kind_of?(Array)
version_codes.each do |version_code|
UI.user_error!("Version code '#{version_code}' is not an integer") if version_code == 0
end
end),
FastlaneCore::ConfigItem.new(key: :in_app_update_priority,
env_name: "SUPPLY_IN_APP_UPDATE_PRIORITY",
optional: true,
type: Integer,
description: "In-app update priority for all the newly added apks in the release. Can take values between [0,5]",
verify_block: proc do |in_app_update_priority|
in_app_update_priority = in_app_update_priority.to_i
UI.user_error!("Invalid in_app_update_priority value '#{in_app_update_priority}'. Values must be between [0,5]") unless (0..5).member?(in_app_update_priority)
end),
FastlaneCore::ConfigItem.new(key: :obb_main_references_version,
env_name: "SUPPLY_OBB_MAIN_REFERENCES_VERSION",
description: "References version of 'main' expansion file",
optional: true,
type: Numeric),
FastlaneCore::ConfigItem.new(key: :obb_main_file_size,
env_name: "SUPPLY_OBB_MAIN_FILE SIZE",
description: "Size of 'main' expansion file in bytes",
optional: true,
type: Numeric),
FastlaneCore::ConfigItem.new(key: :obb_patch_references_version,
env_name: "SUPPLY_OBB_PATCH_REFERENCES_VERSION",
description: "References version of 'patch' expansion file",
optional: true,
type: Numeric),
FastlaneCore::ConfigItem.new(key: :obb_patch_file_size,
env_name: "SUPPLY_OBB_PATCH_FILE SIZE",
description: "Size of 'patch' expansion file in bytes",
optional: true,
type: Numeric),
FastlaneCore::ConfigItem.new(key: :ack_bundle_installation_warning,
env_name: "ACK_BUNDLE_INSTALLATION_WARNING",
description: "Must be set to true if the bundle installation may trigger a warning on user devices (e.g can only be downloaded over wifi). Typically this is required for bundles over 150MB",
optional: true,
type: Boolean,
default_value: false)
]
end
# rubocop:enable Metrics/PerceivedComplexity
end
end
| 73.652308 | 227 | 0.432886 |
012ba009d053d35f5c69f459bd89e04da03d3036 | 2,295 | # frozen_string_literal: true
# Copyright (c) 2018 by Jiang Jinyang <[email protected]>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
require 'async'
require 'ciri/utils/logger'
module Ciri
module P2P
# DialScheduler
# establish outoging connections
class DialScheduler
include Utils::Logger
def initialize(network_state, dialer)
@network_state = network_state
@dialer = dialer
end
def run(task: Async::Task.current)
dial_bootnodes
# dial outgoing peers every 15 seconds
task.reactor.every(15) do
schedule_dialing_tasks
end
end
private
def dial_bootnodes
@network_state.peer_store.find_bootnodes(@network_state.number_of_attemp_outgoing).each do |node|
conn, handshake = @dialer.dial(node)
@network_state.new_peer_connected(conn, handshake, way_for_connection: Peer::OUTGOING)
end
end
def schedule_dialing_tasks
@network_state.peer_store.find_attempt_peers(@network_state.number_of_attemp_outgoing).each do |node|
conn, handshake = @dialer.dial(node)
@network_state.new_peer_connected(conn, handshake, way_for_connection: Peer::OUTGOING)
end
end
end
end
end
| 33.26087 | 109 | 0.724183 |
e8ef684718a710c5f06af09b80d6ce485848fd66 | 321 | cask "font-leckerli-one" do
version :latest
sha256 :no_check
url "https://github.com/google/fonts/raw/master/ofl/leckerlione/LeckerliOne-Regular.ttf",
verified: "github.com/google/fonts/"
name "Leckerli One"
homepage "https://fonts.google.com/specimen/Leckerli+One"
font "LeckerliOne-Regular.ttf"
end
| 26.75 | 91 | 0.738318 |
d5fe7d313e1cd4a4d200b2228a6d01919b42f292 | 3,432 | require 'devise/multi_email/parent_model_extensions'
module Devise
module Models
module EmailValidatable
extend ActiveSupport::Concern
included do
validates_presence_of :email, if: :email_required?
if Devise.activerecord51?
validates_uniqueness_of :email, allow_blank: true, case_sensitive: true, if: :will_save_change_to_email?
validates_format_of :email, with: email_regexp, allow_blank: true, if: :will_save_change_to_email?
else
validates_uniqueness_of :email, allow_blank: true, if: :email_changed?
validates_format_of :email, with: email_regexp, allow_blank: true, if: :email_changed?
end
end
def email_required?
true
end
module ClassMethods
Devise::Models.config(self, :email_regexp)
end
end
module MultiEmailValidatable
extend ActiveSupport::Concern
included do
include Devise::MultiEmail::ParentModelExtensions
assert_validations_api!(self)
validates_presence_of :email, if: :email_required?
validates_presence_of :password, if: :password_required?
validates_confirmation_of :password, if: :password_required?
validates_length_of :password, within: password_length, allow_blank: true
after_validation :propagate_email_errors
multi_email_association.include_module(EmailValidatable)
devise_modules << :validatable
end
def self.required_fields(klass)
[]
end
protected
# Same as Devise::Models::Validatable#password_required?
def password_required?
!persisted? || !password.nil? || !password_confirmation.nil?
end
# Same as Devise::Models::Validatable#email_required?
def email_required?
true
end
private
def propagate_email_errors
association_name = self.class.multi_email_association.name
email_error_key = errors_attribute_names.detect do |key|
[association_name.to_s, "#{association_name}.email"].include?(key.to_s)
end
return unless email_error_key.present?
email_errors =
if errors.respond_to?(:details)
errors
.details[email_error_key]
.map { |e| e[:error] }
.zip(errors.delete(email_error_key) || [])
else
errors.delete(email_error_key)
end
email_errors.each do |type, message|
errors.add(:email, type, message: message)
end
end
def errors_attribute_names
errors.respond_to?(:attribute_names) ? errors.attribute_names : errors.keys
end
module ClassMethods
# All validations used by this module.
VALIDATIONS = [:validates_presence_of, :validates_uniqueness_of, :validates_format_of,
:validates_confirmation_of, :validates_length_of].freeze
def assert_validations_api!(base) #:nodoc:
unavailable_validations = VALIDATIONS.select { |v| !base.respond_to?(v) }
unless unavailable_validations.empty?
raise "Could not use :validatable module since #{base} does not respond " <<
"to the following methods: #{unavailable_validations.to_sentence}."
end
end
Devise::Models.config(self, :password_length)
end
end
end
end
| 30.371681 | 114 | 0.652972 |
1c09da551d7305bc4e71b378364c2f18b1eb6ab3 | 1,679 | class Project < ActiveRecord::Base
has_many :rights, :class_name => 'UserProject'
has_many :users, :through => :rights, :class_name => "User"
belongs_to :repository
has_many :events, :order => "created_at DESC"
has_many :tickets
has_many :ticket_changes, :through => :tickets
has_many :milestones
has_many :releases
has_many :parts, :order => "name"
has_and_belongs_to_many :categories
has_and_belongs_to_many :tags
before_validation :format_root_path
alias :ticket_changes :changes
def to_param
self.short_name
end
def before_save
if self.name
result = self.name.downcase
result.gsub!(/['"]/, '') # replace quotes by nothing
result.gsub!(/\W/, ' ') # strip all non word chars
result.gsub!(/\ +/, '-') # replace all white space sections with a dash
result.gsub!(/(-)$/, '') # trim dashes
result.gsub!(/^(-)/, '')
self.short_name = result
end
end
def Project.show_for_user(current_user)
Project.find(:all, :conditions => "closed = 'f'")
end
#deprecated stuff
def relativize_svn_path(path)
path = path.join("/") if (path && (path.is_a? Array))
path ||= ''
path.gsub(/^\/?#{self.root_path}\/?/, '')
path
end
def absolutize_svn_path(path)
path = path.join("/") if (path && (path.is_a? Array))
path ||= ''
self.root_path.blank? ? path : File.join(self.root_path, path)
end
protected
def format_root_path
unless self.root_path.blank?
self.root_path.strip!
self.root_path.gsub!(/^\//, '')
self.root_path = self.root_path + '/' unless self.root_path =~ /\/$/
end
end
end
| 27.52459 | 82 | 0.623585 |
5de0686f0fd25406aac63b7cefdc6030288bd55b | 6,777 | =begin
#Kubernetes
#No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.8.3
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 2.2.3
=end
require 'date'
module Kubernetes
# StatefulSetList is a collection of StatefulSets.
class V1beta1StatefulSetList
# APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#resources
attr_accessor :api_version
attr_accessor :items
# Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds
attr_accessor :kind
attr_accessor :metadata
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'api_version' => :'apiVersion',
:'items' => :'items',
:'kind' => :'kind',
:'metadata' => :'metadata'
}
end
# Attribute type mapping.
def self.swagger_types
{
:'api_version' => :'String',
:'items' => :'Array<V1beta1StatefulSet>',
:'kind' => :'String',
:'metadata' => :'V1ListMeta'
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}){|(k,v), h| h[k.to_sym] = v}
if attributes.has_key?(:'apiVersion')
self.api_version = attributes[:'apiVersion']
end
if attributes.has_key?(:'items')
if (value = attributes[:'items']).is_a?(Array)
self.items = value
end
end
if attributes.has_key?(:'kind')
self.kind = attributes[:'kind']
end
if attributes.has_key?(:'metadata')
self.metadata = attributes[:'metadata']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properies with the reasons
def list_invalid_properties
invalid_properties = Array.new
if @items.nil?
invalid_properties.push("invalid value for 'items', items cannot be nil.")
end
return invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
return false if @items.nil?
return true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
api_version == o.api_version &&
items == o.items &&
kind == o.kind &&
metadata == o.metadata
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[api_version, items, kind, metadata].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map{ |v| _deserialize($1, v) } )
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end # or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BOOLEAN
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
temp_model = Kubernetes.const_get(type).new
temp_model.build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map{ |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 30.12 | 279 | 0.626826 |
4a19e2e3542314d37b7e2c6681c7e6259bb1a465 | 1,032 | class Libogg < Formula
desc "Ogg Bitstream Library"
homepage "https://www.xiph.org/ogg/"
url "http://downloads.xiph.org/releases/ogg/libogg-1.3.2.tar.gz"
sha256 "e19ee34711d7af328cb26287f4137e70630e7261b17cbe3cd41011d73a654692"
bottle do
cellar :any
sha256 "e6cd3367b66868d926581ddca05967355456bdf885292321a1f3238df6cf3c7f" => :tiger_altivec
sha256 "771d905c7cd8ec9b28e94ddb751e2af5e86e943ce1e156ead529b0e6f6044914" => :leopard_g3
sha256 "f5eff6ed4b72ec948f42dee65463623fb4e61094e96abc78af8e1669633f9c4f" => :leopard_altivec
end
head do
url "https://svn.xiph.org/trunk/ogg"
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
end
option :universal
def install
ENV.universal_binary if build.universal?
system "./autogen.sh" if build.head?
system "./configure", "--disable-dependency-tracking",
"--prefix=#{prefix}"
system "make"
ENV.deparallelize
system "make", "install"
end
end
| 29.485714 | 97 | 0.719961 |
ab86c0346cdca35fc6cb27d388788a2b2dfbbaba | 277 | class StaffSerializer < ApplicationSerializer
attributes :id, :first_name, :last_name, :email, :phone, :role, :created_at, :updated_at
has_many :orders
has_many :staff_projects
has_many :user_settings
has_many :notifications
has_many :projects
has_one :cart
end
| 25.181818 | 90 | 0.772563 |
33db59124e9a0a9ff03b9886a8ecbb51cf45bd38 | 3,335 | require_relative '../../spec_helper'
require_relative 'fixtures/classes'
describe "Hash#compare_by_identity" do
before :each do
@h = {}
@idh = {}.compare_by_identity
end
it "causes future comparisons on the receiver to be made by identity" do
@h[[1]] = :a
@h[[1]].should == :a
@h.compare_by_identity
@h[[1].dup].should be_nil
end
it "rehashes internally so that old keys can be looked up" do
h = {}
(1..10).each { |k| h[k] = k }
o = Object.new
def o.hash; 123; end
h[o] = 1
h.compare_by_identity
h[o].should == 1
end
it "returns self" do
h = {}
h[:foo] = :bar
h.compare_by_identity.should equal h
end
it "has no effect on an already compare_by_identity hash" do
@idh[:foo] = :bar
@idh.compare_by_identity.should equal @idh
@idh.compare_by_identity?.should == true
@idh[:foo].should == :bar
end
it "uses the semantics of BasicObject#equal? to determine key identity" do
[1].should_not equal([1])
@idh[[1]] = :c
@idh[[1]] = :d
:bar.should equal(:bar)
@idh[:bar] = :e
@idh[:bar] = :f
@idh.values.should == [:c, :d, :f]
end
it "uses #equal? semantics, but doesn't actually call #equal? to determine identity" do
obj = mock('equal')
obj.should_not_receive(:equal?)
@idh[:foo] = :glark
@idh[obj] = :a
@idh[obj].should == :a
end
it "does not call #hash on keys" do
key = HashSpecs::ByIdentityKey.new
@idh[key] = 1
@idh[key].should == 1
end
it "regards #dup'd objects as having different identities" do
key = ['foo']
@idh[key.dup] = :str
@idh[key].should be_nil
end
it "regards #clone'd objects as having different identities" do
key = ['foo']
@idh[key.clone] = :str
@idh[key].should be_nil
end
it "regards references to the same object as having the same identity" do
o = Object.new
@h[o] = :o
@h[:a] = :a
@h[o].should == :o
end
it "raises a #{frozen_error_class} on frozen hashes" do
@h = @h.freeze
lambda { @h.compare_by_identity }.should raise_error(frozen_error_class)
end
# Behaviour confirmed in bug #1871
it "persists over #dups" do
@idh['foo'] = :bar
@idh['foo'] = :glark
@idh.dup.should == @idh
@idh.dup.size.should == @idh.size
end
it "persists over #clones" do
@idh['foo'] = :bar
@idh['foo'] = :glark
@idh.clone.should == @idh
@idh.clone.size.should == @idh.size
end
it "does not copy string keys" do
foo = 'foo'
@idh[foo] = true
@idh[foo] = true
@idh.size.should == 1
@idh.keys.first.should equal foo
end
it "gives different identity for string literals" do
@idh['foo'] = 1
@idh['foo'] = 2
@idh.values.should == [1, 2]
@idh.size.should == 2
end
end
describe "Hash#compare_by_identity?" do
it "returns false by default" do
h = {}
h.compare_by_identity?.should be_false
end
it "returns true once #compare_by_identity has been invoked on self" do
h = {}
h.compare_by_identity
h.compare_by_identity?.should be_true
end
it "returns true when called multiple times on the same ident hash" do
h = {}
h.compare_by_identity
h.compare_by_identity?.should be_true
h.compare_by_identity?.should be_true
h.compare_by_identity?.should be_true
end
end
| 23.992806 | 89 | 0.621289 |
bb25eba9aa7e1fb53ad7ed9780fe7d89f0bdedae | 209 | FactoryBot.define do
sequence(:repeater_name){ |n| "##{n} Repeater" }
# Article repeater
factory :repeater, class: Binda::Repeater do
association :field_setting, factory: :selection_setting
end
end
| 19 | 57 | 0.732057 |
08925a6f4f6779a9fa2d1cded87bf578b03ecabd | 2,319 | #!/usr/bin/env ruby
require 'puppet'
require 'sqlite3'
require 'yaml'
def puppetdb
db = SQLite3::Database.open "puppet.db"
db.execute "CREATE TABLE IF NOT EXISTS hosts(id INTEGER PRIMARY KEY, host TEXT)"
db.execute "CREATE TABLE IF NOT EXISTS resources(id INTEGER PRIMARY KEY, name TEXT, type TEXT)"
db.execute "CREATE TABLE IF NOT EXISTS host_resources(host_id, resource_id)"
db
end
def parse_report(db, host, report_path)
puts "host: #{host}"
host_id = get_host_id(db, host)
db.execute('DELETE FROM host_resources WHERE host_id = ?', host_id)
seen = Set.new
report_yaml = YAML.load_file(report_path)
report_yaml.resource_statuses.each_pair do |resource, status|
status.containment_path.reject{|path| path.start_with?('Packages::') }.each do |path|
next if path == 'Stage[main]' or seen.include?(path)
seen.add(path)
match = path.match(/(?<type>[^\[]+)\[?(?<title>[^\]]+)?\]?$/)
# We have a defined type
if match[:title]
type = match[:type]
title = match[:title]
else
type = 'Class'
title = match[:type]
end
puts "\t#{type}[#{title}]"
resource_id = get_resource_id(db, title, type)
db.execute('INSERT INTO host_resources(host_id, resource_id) VALUES(?, ?)', host_id, resource_id)
end
end
end
def get_host_id(db, host)
host_id = db.get_first_value('SELECT id FROM hosts WHERE host = ?', host)
if host_id
host_id
else
db.execute('INSERT INTO hosts(host) VALUES(?)', host)
db.last_insert_row_id
end
end
def get_resource_id(db, title, type)
resource_id = db.get_first_value('SELECT id FROM resources WHERE name = ? and type = ?', title, type)
if resource_id
resource_id
else
db.execute('INSERT INTO resources(name, type) VALUES(?, ?)', title, type)
db.last_insert_row_id
end
end
def main
db = puppetdb
Puppet.initialize_settings
reports_dir = Puppet['reportdir']
Dir.each_child(reports_dir) do |host|
host_dir = File.join(reports_dir, host)
next unless File.directory?(host_dir)
most_recent_report = Dir.glob("#{host_dir}/*.yaml").max_by {|f| File.mtime(f)}
next unless most_recent_report
puts "processing: #{most_recent_report}"
parse_report(db, host, most_recent_report)
end
end
main
| 31.337838 | 105 | 0.671841 |
2821e10ed7cc396391f7797ac943d1c546295cb8 | 1,018 | module RubyChem
class Lewis
attr_accessor :structure
Valence = {"O" => 6, "C" => 4}
Species = [["C"], ["O", "2"]]
def initialize
@species = Species
determine_configuration
end
private
def determine_configuration
@valence_electrons_available = valence_electrons_available(@species)
@valence_electrons_needed = valence_electrons_needed_to_complete_shell(@species) * 8
@shared_electrons = @valence_electrons_needed - @valence_electrons_available
end
def valence_electrons_available(species)
electrons = []
species.each do |x|
if x.count > 1
x.count.times do
electrons << Valence[x.first]
end
else
electrons << Valence[x.first]
end
end
electrons.inject{|sum,x| sum + x }
end
def valence_electrons_needed_to_complete_shell(species)
array = []
species.each{|x|array << x.count}
array.inject{|sum,x| sum + x}
end
end
end
| 24.829268 | 90 | 0.619843 |
87703285c277126a6e5ce5b433add466c7827cbb | 2,302 | require 'rails_helper'
RSpec.describe WelcomesController, type: :controller do
let(:valid_attributes) {
{':greeting'=>'Initial Greeting'}
}
let(:invalid_attributes) {
{':greeting' => nil}
}
let(:valid_session) { {} }
before do
puts '========>'+' '+ 'Run before of FactoryBot for @welcome'
@authorize_user = FactoryBot.create(:authorize_user)
@welcome = FactoryBot.create(:welcome)
@welcome.save
end
describe "GET #index success #{200}" do
it "returns a success response" do
sign_in(@authorize_user)
expect(response).to be_success
end
end
describe "Sign_in GET #show" do
it "returns a success response" do
sign_in(@authorize_user)
welcome = Welcome.create!(greeting: 'Greeting by FactoryBot')
get :show, params: {id: welcome.to_param}, session: valid_session
#expect(response).to be_success
assert_response :success
#assert_response :redirect #302 Sign_in
end
end
describe "GET #new redirect to Sign_in #302" do
it "returns a success response" do
get :new, params: {}, session: valid_session
assert_response :redirect #302 Sign_in
end
end
describe "POST #create" do
context "create welcome redirect to sign_in" do
it "it redirect to users/sign_in" do
post :create, params: {welcome: { greeting: "Lorem ipsum" }}, session: valid_session
expect(response).to redirect_to("/users/sign_in")
end
end
it "login create welcome expect success" do
sign_in(@authorize_user)
welcome = Welcome.create!(greeting: 'Greeting by FactoryBot')
#post :create, params: {welcome: { greeting: "Lorem ipsum" }}, session: valid_session
expect(response).to be_success
end
end
describe "DELETE #destroy" do
it "can not delete then redirects to users/sign_in" do
delete :destroy, params: {id: 3333}
expect(response).to redirect_to("/users/sign_in")
end
end
describe "Login user can delete" do
it "then returns to welcomes" do
sign_in(@authorize_user)
puts '========>'+' '+ @welcome.to_param
@welcome.reload
delete :destroy, params: {id: @welcome.to_param}
expect(response).to redirect_to(welcomes_url)
end
end
end
| 26.45977 | 93 | 0.652476 |
267366f06e723dd9d398ceb314549532c4efddbc | 36,872 | # WARNING ABOUT GENERATED CODE
#
# This file is generated. See the contributing guide for more information:
# https://github.com/aws/aws-sdk-ruby/blob/master/CONTRIBUTING.md
#
# WARNING ABOUT GENERATED CODE
module Aws::CloudWatch
# @api private
module ClientApi
include Seahorse::Model
ActionPrefix = Shapes::StringShape.new(name: 'ActionPrefix')
ActionsEnabled = Shapes::BooleanShape.new(name: 'ActionsEnabled')
AlarmArn = Shapes::StringShape.new(name: 'AlarmArn')
AlarmDescription = Shapes::StringShape.new(name: 'AlarmDescription')
AlarmHistoryItem = Shapes::StructureShape.new(name: 'AlarmHistoryItem')
AlarmHistoryItems = Shapes::ListShape.new(name: 'AlarmHistoryItems')
AlarmName = Shapes::StringShape.new(name: 'AlarmName')
AlarmNamePrefix = Shapes::StringShape.new(name: 'AlarmNamePrefix')
AlarmNames = Shapes::ListShape.new(name: 'AlarmNames')
AwsQueryErrorMessage = Shapes::StringShape.new(name: 'AwsQueryErrorMessage')
ComparisonOperator = Shapes::StringShape.new(name: 'ComparisonOperator')
DashboardArn = Shapes::StringShape.new(name: 'DashboardArn')
DashboardBody = Shapes::StringShape.new(name: 'DashboardBody')
DashboardEntries = Shapes::ListShape.new(name: 'DashboardEntries')
DashboardEntry = Shapes::StructureShape.new(name: 'DashboardEntry')
DashboardErrorMessage = Shapes::StringShape.new(name: 'DashboardErrorMessage')
DashboardInvalidInputError = Shapes::StructureShape.new(name: 'DashboardInvalidInputError')
DashboardName = Shapes::StringShape.new(name: 'DashboardName')
DashboardNamePrefix = Shapes::StringShape.new(name: 'DashboardNamePrefix')
DashboardNames = Shapes::ListShape.new(name: 'DashboardNames')
DashboardNotFoundError = Shapes::StructureShape.new(name: 'DashboardNotFoundError')
DashboardValidationMessage = Shapes::StructureShape.new(name: 'DashboardValidationMessage')
DashboardValidationMessages = Shapes::ListShape.new(name: 'DashboardValidationMessages')
DataPath = Shapes::StringShape.new(name: 'DataPath')
Datapoint = Shapes::StructureShape.new(name: 'Datapoint')
DatapointValue = Shapes::FloatShape.new(name: 'DatapointValue')
DatapointValueMap = Shapes::MapShape.new(name: 'DatapointValueMap')
Datapoints = Shapes::ListShape.new(name: 'Datapoints')
DeleteAlarmsInput = Shapes::StructureShape.new(name: 'DeleteAlarmsInput')
DeleteDashboardsInput = Shapes::StructureShape.new(name: 'DeleteDashboardsInput')
DeleteDashboardsOutput = Shapes::StructureShape.new(name: 'DeleteDashboardsOutput')
DescribeAlarmHistoryInput = Shapes::StructureShape.new(name: 'DescribeAlarmHistoryInput')
DescribeAlarmHistoryOutput = Shapes::StructureShape.new(name: 'DescribeAlarmHistoryOutput')
DescribeAlarmsForMetricInput = Shapes::StructureShape.new(name: 'DescribeAlarmsForMetricInput')
DescribeAlarmsForMetricOutput = Shapes::StructureShape.new(name: 'DescribeAlarmsForMetricOutput')
DescribeAlarmsInput = Shapes::StructureShape.new(name: 'DescribeAlarmsInput')
DescribeAlarmsOutput = Shapes::StructureShape.new(name: 'DescribeAlarmsOutput')
Dimension = Shapes::StructureShape.new(name: 'Dimension')
DimensionFilter = Shapes::StructureShape.new(name: 'DimensionFilter')
DimensionFilters = Shapes::ListShape.new(name: 'DimensionFilters')
DimensionName = Shapes::StringShape.new(name: 'DimensionName')
DimensionValue = Shapes::StringShape.new(name: 'DimensionValue')
Dimensions = Shapes::ListShape.new(name: 'Dimensions')
DisableAlarmActionsInput = Shapes::StructureShape.new(name: 'DisableAlarmActionsInput')
EnableAlarmActionsInput = Shapes::StructureShape.new(name: 'EnableAlarmActionsInput')
ErrorMessage = Shapes::StringShape.new(name: 'ErrorMessage')
EvaluateLowSampleCountPercentile = Shapes::StringShape.new(name: 'EvaluateLowSampleCountPercentile')
EvaluationPeriods = Shapes::IntegerShape.new(name: 'EvaluationPeriods')
ExtendedStatistic = Shapes::StringShape.new(name: 'ExtendedStatistic')
ExtendedStatistics = Shapes::ListShape.new(name: 'ExtendedStatistics')
FaultDescription = Shapes::StringShape.new(name: 'FaultDescription')
GetDashboardInput = Shapes::StructureShape.new(name: 'GetDashboardInput')
GetDashboardOutput = Shapes::StructureShape.new(name: 'GetDashboardOutput')
GetMetricStatisticsInput = Shapes::StructureShape.new(name: 'GetMetricStatisticsInput')
GetMetricStatisticsOutput = Shapes::StructureShape.new(name: 'GetMetricStatisticsOutput')
HistoryData = Shapes::StringShape.new(name: 'HistoryData')
HistoryItemType = Shapes::StringShape.new(name: 'HistoryItemType')
HistorySummary = Shapes::StringShape.new(name: 'HistorySummary')
InternalServiceFault = Shapes::StructureShape.new(name: 'InternalServiceFault')
InvalidFormatFault = Shapes::StructureShape.new(name: 'InvalidFormatFault')
InvalidNextToken = Shapes::StructureShape.new(name: 'InvalidNextToken')
InvalidParameterCombinationException = Shapes::StructureShape.new(name: 'InvalidParameterCombinationException')
InvalidParameterValueException = Shapes::StructureShape.new(name: 'InvalidParameterValueException')
LastModified = Shapes::TimestampShape.new(name: 'LastModified')
LimitExceededFault = Shapes::StructureShape.new(name: 'LimitExceededFault')
ListDashboardsInput = Shapes::StructureShape.new(name: 'ListDashboardsInput')
ListDashboardsOutput = Shapes::StructureShape.new(name: 'ListDashboardsOutput')
ListMetricsInput = Shapes::StructureShape.new(name: 'ListMetricsInput')
ListMetricsOutput = Shapes::StructureShape.new(name: 'ListMetricsOutput')
MaxRecords = Shapes::IntegerShape.new(name: 'MaxRecords')
Message = Shapes::StringShape.new(name: 'Message')
Metric = Shapes::StructureShape.new(name: 'Metric')
MetricAlarm = Shapes::StructureShape.new(name: 'MetricAlarm')
MetricAlarms = Shapes::ListShape.new(name: 'MetricAlarms')
MetricData = Shapes::ListShape.new(name: 'MetricData')
MetricDatum = Shapes::StructureShape.new(name: 'MetricDatum')
MetricLabel = Shapes::StringShape.new(name: 'MetricLabel')
MetricName = Shapes::StringShape.new(name: 'MetricName')
Metrics = Shapes::ListShape.new(name: 'Metrics')
MissingRequiredParameterException = Shapes::StructureShape.new(name: 'MissingRequiredParameterException')
Namespace = Shapes::StringShape.new(name: 'Namespace')
NextToken = Shapes::StringShape.new(name: 'NextToken')
Period = Shapes::IntegerShape.new(name: 'Period')
PutDashboardInput = Shapes::StructureShape.new(name: 'PutDashboardInput')
PutDashboardOutput = Shapes::StructureShape.new(name: 'PutDashboardOutput')
PutMetricAlarmInput = Shapes::StructureShape.new(name: 'PutMetricAlarmInput')
PutMetricDataInput = Shapes::StructureShape.new(name: 'PutMetricDataInput')
ResourceList = Shapes::ListShape.new(name: 'ResourceList')
ResourceName = Shapes::StringShape.new(name: 'ResourceName')
ResourceNotFound = Shapes::StructureShape.new(name: 'ResourceNotFound')
SetAlarmStateInput = Shapes::StructureShape.new(name: 'SetAlarmStateInput')
Size = Shapes::IntegerShape.new(name: 'Size')
StandardUnit = Shapes::StringShape.new(name: 'StandardUnit')
StateReason = Shapes::StringShape.new(name: 'StateReason')
StateReasonData = Shapes::StringShape.new(name: 'StateReasonData')
StateValue = Shapes::StringShape.new(name: 'StateValue')
Statistic = Shapes::StringShape.new(name: 'Statistic')
StatisticSet = Shapes::StructureShape.new(name: 'StatisticSet')
Statistics = Shapes::ListShape.new(name: 'Statistics')
StorageResolution = Shapes::IntegerShape.new(name: 'StorageResolution')
Threshold = Shapes::FloatShape.new(name: 'Threshold')
Timestamp = Shapes::TimestampShape.new(name: 'Timestamp')
TreatMissingData = Shapes::StringShape.new(name: 'TreatMissingData')
AlarmHistoryItem.add_member(:alarm_name, Shapes::ShapeRef.new(shape: AlarmName, location_name: "AlarmName"))
AlarmHistoryItem.add_member(:timestamp, Shapes::ShapeRef.new(shape: Timestamp, location_name: "Timestamp"))
AlarmHistoryItem.add_member(:history_item_type, Shapes::ShapeRef.new(shape: HistoryItemType, location_name: "HistoryItemType"))
AlarmHistoryItem.add_member(:history_summary, Shapes::ShapeRef.new(shape: HistorySummary, location_name: "HistorySummary"))
AlarmHistoryItem.add_member(:history_data, Shapes::ShapeRef.new(shape: HistoryData, location_name: "HistoryData"))
AlarmHistoryItem.struct_class = Types::AlarmHistoryItem
AlarmHistoryItems.member = Shapes::ShapeRef.new(shape: AlarmHistoryItem)
AlarmNames.member = Shapes::ShapeRef.new(shape: AlarmName)
DashboardEntries.member = Shapes::ShapeRef.new(shape: DashboardEntry)
DashboardEntry.add_member(:dashboard_name, Shapes::ShapeRef.new(shape: DashboardName, location_name: "DashboardName"))
DashboardEntry.add_member(:dashboard_arn, Shapes::ShapeRef.new(shape: DashboardArn, location_name: "DashboardArn"))
DashboardEntry.add_member(:last_modified, Shapes::ShapeRef.new(shape: LastModified, location_name: "LastModified"))
DashboardEntry.add_member(:size, Shapes::ShapeRef.new(shape: Size, location_name: "Size"))
DashboardEntry.struct_class = Types::DashboardEntry
DashboardNames.member = Shapes::ShapeRef.new(shape: DashboardName)
DashboardValidationMessage.add_member(:data_path, Shapes::ShapeRef.new(shape: DataPath, location_name: "DataPath"))
DashboardValidationMessage.add_member(:message, Shapes::ShapeRef.new(shape: Message, location_name: "Message"))
DashboardValidationMessage.struct_class = Types::DashboardValidationMessage
DashboardValidationMessages.member = Shapes::ShapeRef.new(shape: DashboardValidationMessage)
Datapoint.add_member(:timestamp, Shapes::ShapeRef.new(shape: Timestamp, location_name: "Timestamp"))
Datapoint.add_member(:sample_count, Shapes::ShapeRef.new(shape: DatapointValue, location_name: "SampleCount"))
Datapoint.add_member(:average, Shapes::ShapeRef.new(shape: DatapointValue, location_name: "Average"))
Datapoint.add_member(:sum, Shapes::ShapeRef.new(shape: DatapointValue, location_name: "Sum"))
Datapoint.add_member(:minimum, Shapes::ShapeRef.new(shape: DatapointValue, location_name: "Minimum"))
Datapoint.add_member(:maximum, Shapes::ShapeRef.new(shape: DatapointValue, location_name: "Maximum"))
Datapoint.add_member(:unit, Shapes::ShapeRef.new(shape: StandardUnit, location_name: "Unit"))
Datapoint.add_member(:extended_statistics, Shapes::ShapeRef.new(shape: DatapointValueMap, location_name: "ExtendedStatistics"))
Datapoint.struct_class = Types::Datapoint
DatapointValueMap.key = Shapes::ShapeRef.new(shape: ExtendedStatistic)
DatapointValueMap.value = Shapes::ShapeRef.new(shape: DatapointValue)
Datapoints.member = Shapes::ShapeRef.new(shape: Datapoint)
DeleteAlarmsInput.add_member(:alarm_names, Shapes::ShapeRef.new(shape: AlarmNames, required: true, location_name: "AlarmNames"))
DeleteAlarmsInput.struct_class = Types::DeleteAlarmsInput
DeleteDashboardsInput.add_member(:dashboard_names, Shapes::ShapeRef.new(shape: DashboardNames, location_name: "DashboardNames"))
DeleteDashboardsInput.struct_class = Types::DeleteDashboardsInput
DeleteDashboardsOutput.struct_class = Types::DeleteDashboardsOutput
DescribeAlarmHistoryInput.add_member(:alarm_name, Shapes::ShapeRef.new(shape: AlarmName, location_name: "AlarmName"))
DescribeAlarmHistoryInput.add_member(:history_item_type, Shapes::ShapeRef.new(shape: HistoryItemType, location_name: "HistoryItemType"))
DescribeAlarmHistoryInput.add_member(:start_date, Shapes::ShapeRef.new(shape: Timestamp, location_name: "StartDate"))
DescribeAlarmHistoryInput.add_member(:end_date, Shapes::ShapeRef.new(shape: Timestamp, location_name: "EndDate"))
DescribeAlarmHistoryInput.add_member(:max_records, Shapes::ShapeRef.new(shape: MaxRecords, location_name: "MaxRecords"))
DescribeAlarmHistoryInput.add_member(:next_token, Shapes::ShapeRef.new(shape: NextToken, location_name: "NextToken"))
DescribeAlarmHistoryInput.struct_class = Types::DescribeAlarmHistoryInput
DescribeAlarmHistoryOutput.add_member(:alarm_history_items, Shapes::ShapeRef.new(shape: AlarmHistoryItems, location_name: "AlarmHistoryItems"))
DescribeAlarmHistoryOutput.add_member(:next_token, Shapes::ShapeRef.new(shape: NextToken, location_name: "NextToken"))
DescribeAlarmHistoryOutput.struct_class = Types::DescribeAlarmHistoryOutput
DescribeAlarmsForMetricInput.add_member(:metric_name, Shapes::ShapeRef.new(shape: MetricName, required: true, location_name: "MetricName"))
DescribeAlarmsForMetricInput.add_member(:namespace, Shapes::ShapeRef.new(shape: Namespace, required: true, location_name: "Namespace"))
DescribeAlarmsForMetricInput.add_member(:statistic, Shapes::ShapeRef.new(shape: Statistic, location_name: "Statistic"))
DescribeAlarmsForMetricInput.add_member(:extended_statistic, Shapes::ShapeRef.new(shape: ExtendedStatistic, location_name: "ExtendedStatistic"))
DescribeAlarmsForMetricInput.add_member(:dimensions, Shapes::ShapeRef.new(shape: Dimensions, location_name: "Dimensions"))
DescribeAlarmsForMetricInput.add_member(:period, Shapes::ShapeRef.new(shape: Period, location_name: "Period"))
DescribeAlarmsForMetricInput.add_member(:unit, Shapes::ShapeRef.new(shape: StandardUnit, location_name: "Unit"))
DescribeAlarmsForMetricInput.struct_class = Types::DescribeAlarmsForMetricInput
DescribeAlarmsForMetricOutput.add_member(:metric_alarms, Shapes::ShapeRef.new(shape: MetricAlarms, location_name: "MetricAlarms"))
DescribeAlarmsForMetricOutput.struct_class = Types::DescribeAlarmsForMetricOutput
DescribeAlarmsInput.add_member(:alarm_names, Shapes::ShapeRef.new(shape: AlarmNames, location_name: "AlarmNames"))
DescribeAlarmsInput.add_member(:alarm_name_prefix, Shapes::ShapeRef.new(shape: AlarmNamePrefix, location_name: "AlarmNamePrefix"))
DescribeAlarmsInput.add_member(:state_value, Shapes::ShapeRef.new(shape: StateValue, location_name: "StateValue"))
DescribeAlarmsInput.add_member(:action_prefix, Shapes::ShapeRef.new(shape: ActionPrefix, location_name: "ActionPrefix"))
DescribeAlarmsInput.add_member(:max_records, Shapes::ShapeRef.new(shape: MaxRecords, location_name: "MaxRecords"))
DescribeAlarmsInput.add_member(:next_token, Shapes::ShapeRef.new(shape: NextToken, location_name: "NextToken"))
DescribeAlarmsInput.struct_class = Types::DescribeAlarmsInput
DescribeAlarmsOutput.add_member(:metric_alarms, Shapes::ShapeRef.new(shape: MetricAlarms, location_name: "MetricAlarms"))
DescribeAlarmsOutput.add_member(:next_token, Shapes::ShapeRef.new(shape: NextToken, location_name: "NextToken"))
DescribeAlarmsOutput.struct_class = Types::DescribeAlarmsOutput
Dimension.add_member(:name, Shapes::ShapeRef.new(shape: DimensionName, required: true, location_name: "Name"))
Dimension.add_member(:value, Shapes::ShapeRef.new(shape: DimensionValue, required: true, location_name: "Value"))
Dimension.struct_class = Types::Dimension
DimensionFilter.add_member(:name, Shapes::ShapeRef.new(shape: DimensionName, required: true, location_name: "Name"))
DimensionFilter.add_member(:value, Shapes::ShapeRef.new(shape: DimensionValue, location_name: "Value"))
DimensionFilter.struct_class = Types::DimensionFilter
DimensionFilters.member = Shapes::ShapeRef.new(shape: DimensionFilter)
Dimensions.member = Shapes::ShapeRef.new(shape: Dimension)
DisableAlarmActionsInput.add_member(:alarm_names, Shapes::ShapeRef.new(shape: AlarmNames, required: true, location_name: "AlarmNames"))
DisableAlarmActionsInput.struct_class = Types::DisableAlarmActionsInput
EnableAlarmActionsInput.add_member(:alarm_names, Shapes::ShapeRef.new(shape: AlarmNames, required: true, location_name: "AlarmNames"))
EnableAlarmActionsInput.struct_class = Types::EnableAlarmActionsInput
ExtendedStatistics.member = Shapes::ShapeRef.new(shape: ExtendedStatistic)
GetDashboardInput.add_member(:dashboard_name, Shapes::ShapeRef.new(shape: DashboardName, location_name: "DashboardName"))
GetDashboardInput.struct_class = Types::GetDashboardInput
GetDashboardOutput.add_member(:dashboard_arn, Shapes::ShapeRef.new(shape: DashboardArn, location_name: "DashboardArn"))
GetDashboardOutput.add_member(:dashboard_body, Shapes::ShapeRef.new(shape: DashboardBody, location_name: "DashboardBody"))
GetDashboardOutput.add_member(:dashboard_name, Shapes::ShapeRef.new(shape: DashboardName, location_name: "DashboardName"))
GetDashboardOutput.struct_class = Types::GetDashboardOutput
GetMetricStatisticsInput.add_member(:namespace, Shapes::ShapeRef.new(shape: Namespace, required: true, location_name: "Namespace"))
GetMetricStatisticsInput.add_member(:metric_name, Shapes::ShapeRef.new(shape: MetricName, required: true, location_name: "MetricName"))
GetMetricStatisticsInput.add_member(:dimensions, Shapes::ShapeRef.new(shape: Dimensions, location_name: "Dimensions"))
GetMetricStatisticsInput.add_member(:start_time, Shapes::ShapeRef.new(shape: Timestamp, required: true, location_name: "StartTime"))
GetMetricStatisticsInput.add_member(:end_time, Shapes::ShapeRef.new(shape: Timestamp, required: true, location_name: "EndTime"))
GetMetricStatisticsInput.add_member(:period, Shapes::ShapeRef.new(shape: Period, required: true, location_name: "Period"))
GetMetricStatisticsInput.add_member(:statistics, Shapes::ShapeRef.new(shape: Statistics, location_name: "Statistics"))
GetMetricStatisticsInput.add_member(:extended_statistics, Shapes::ShapeRef.new(shape: ExtendedStatistics, location_name: "ExtendedStatistics"))
GetMetricStatisticsInput.add_member(:unit, Shapes::ShapeRef.new(shape: StandardUnit, location_name: "Unit"))
GetMetricStatisticsInput.struct_class = Types::GetMetricStatisticsInput
GetMetricStatisticsOutput.add_member(:label, Shapes::ShapeRef.new(shape: MetricLabel, location_name: "Label"))
GetMetricStatisticsOutput.add_member(:datapoints, Shapes::ShapeRef.new(shape: Datapoints, location_name: "Datapoints"))
GetMetricStatisticsOutput.struct_class = Types::GetMetricStatisticsOutput
ListDashboardsInput.add_member(:dashboard_name_prefix, Shapes::ShapeRef.new(shape: DashboardNamePrefix, location_name: "DashboardNamePrefix"))
ListDashboardsInput.add_member(:next_token, Shapes::ShapeRef.new(shape: NextToken, location_name: "NextToken"))
ListDashboardsInput.struct_class = Types::ListDashboardsInput
ListDashboardsOutput.add_member(:dashboard_entries, Shapes::ShapeRef.new(shape: DashboardEntries, location_name: "DashboardEntries"))
ListDashboardsOutput.add_member(:next_token, Shapes::ShapeRef.new(shape: NextToken, location_name: "NextToken"))
ListDashboardsOutput.struct_class = Types::ListDashboardsOutput
ListMetricsInput.add_member(:namespace, Shapes::ShapeRef.new(shape: Namespace, location_name: "Namespace"))
ListMetricsInput.add_member(:metric_name, Shapes::ShapeRef.new(shape: MetricName, location_name: "MetricName"))
ListMetricsInput.add_member(:dimensions, Shapes::ShapeRef.new(shape: DimensionFilters, location_name: "Dimensions"))
ListMetricsInput.add_member(:next_token, Shapes::ShapeRef.new(shape: NextToken, location_name: "NextToken"))
ListMetricsInput.struct_class = Types::ListMetricsInput
ListMetricsOutput.add_member(:metrics, Shapes::ShapeRef.new(shape: Metrics, location_name: "Metrics"))
ListMetricsOutput.add_member(:next_token, Shapes::ShapeRef.new(shape: NextToken, location_name: "NextToken"))
ListMetricsOutput.struct_class = Types::ListMetricsOutput
Metric.add_member(:namespace, Shapes::ShapeRef.new(shape: Namespace, location_name: "Namespace"))
Metric.add_member(:metric_name, Shapes::ShapeRef.new(shape: MetricName, location_name: "MetricName"))
Metric.add_member(:dimensions, Shapes::ShapeRef.new(shape: Dimensions, location_name: "Dimensions"))
Metric.struct_class = Types::Metric
MetricAlarm.add_member(:alarm_name, Shapes::ShapeRef.new(shape: AlarmName, location_name: "AlarmName"))
MetricAlarm.add_member(:alarm_arn, Shapes::ShapeRef.new(shape: AlarmArn, location_name: "AlarmArn"))
MetricAlarm.add_member(:alarm_description, Shapes::ShapeRef.new(shape: AlarmDescription, location_name: "AlarmDescription"))
MetricAlarm.add_member(:alarm_configuration_updated_timestamp, Shapes::ShapeRef.new(shape: Timestamp, location_name: "AlarmConfigurationUpdatedTimestamp"))
MetricAlarm.add_member(:actions_enabled, Shapes::ShapeRef.new(shape: ActionsEnabled, location_name: "ActionsEnabled"))
MetricAlarm.add_member(:ok_actions, Shapes::ShapeRef.new(shape: ResourceList, location_name: "OKActions"))
MetricAlarm.add_member(:alarm_actions, Shapes::ShapeRef.new(shape: ResourceList, location_name: "AlarmActions"))
MetricAlarm.add_member(:insufficient_data_actions, Shapes::ShapeRef.new(shape: ResourceList, location_name: "InsufficientDataActions"))
MetricAlarm.add_member(:state_value, Shapes::ShapeRef.new(shape: StateValue, location_name: "StateValue"))
MetricAlarm.add_member(:state_reason, Shapes::ShapeRef.new(shape: StateReason, location_name: "StateReason"))
MetricAlarm.add_member(:state_reason_data, Shapes::ShapeRef.new(shape: StateReasonData, location_name: "StateReasonData"))
MetricAlarm.add_member(:state_updated_timestamp, Shapes::ShapeRef.new(shape: Timestamp, location_name: "StateUpdatedTimestamp"))
MetricAlarm.add_member(:metric_name, Shapes::ShapeRef.new(shape: MetricName, location_name: "MetricName"))
MetricAlarm.add_member(:namespace, Shapes::ShapeRef.new(shape: Namespace, location_name: "Namespace"))
MetricAlarm.add_member(:statistic, Shapes::ShapeRef.new(shape: Statistic, location_name: "Statistic"))
MetricAlarm.add_member(:extended_statistic, Shapes::ShapeRef.new(shape: ExtendedStatistic, location_name: "ExtendedStatistic"))
MetricAlarm.add_member(:dimensions, Shapes::ShapeRef.new(shape: Dimensions, location_name: "Dimensions"))
MetricAlarm.add_member(:period, Shapes::ShapeRef.new(shape: Period, location_name: "Period"))
MetricAlarm.add_member(:unit, Shapes::ShapeRef.new(shape: StandardUnit, location_name: "Unit"))
MetricAlarm.add_member(:evaluation_periods, Shapes::ShapeRef.new(shape: EvaluationPeriods, location_name: "EvaluationPeriods"))
MetricAlarm.add_member(:threshold, Shapes::ShapeRef.new(shape: Threshold, location_name: "Threshold"))
MetricAlarm.add_member(:comparison_operator, Shapes::ShapeRef.new(shape: ComparisonOperator, location_name: "ComparisonOperator"))
MetricAlarm.add_member(:treat_missing_data, Shapes::ShapeRef.new(shape: TreatMissingData, location_name: "TreatMissingData"))
MetricAlarm.add_member(:evaluate_low_sample_count_percentile, Shapes::ShapeRef.new(shape: EvaluateLowSampleCountPercentile, location_name: "EvaluateLowSampleCountPercentile"))
MetricAlarm.struct_class = Types::MetricAlarm
MetricAlarms.member = Shapes::ShapeRef.new(shape: MetricAlarm)
MetricData.member = Shapes::ShapeRef.new(shape: MetricDatum)
MetricDatum.add_member(:metric_name, Shapes::ShapeRef.new(shape: MetricName, required: true, location_name: "MetricName"))
MetricDatum.add_member(:dimensions, Shapes::ShapeRef.new(shape: Dimensions, location_name: "Dimensions"))
MetricDatum.add_member(:timestamp, Shapes::ShapeRef.new(shape: Timestamp, location_name: "Timestamp"))
MetricDatum.add_member(:value, Shapes::ShapeRef.new(shape: DatapointValue, location_name: "Value"))
MetricDatum.add_member(:statistic_values, Shapes::ShapeRef.new(shape: StatisticSet, location_name: "StatisticValues"))
MetricDatum.add_member(:unit, Shapes::ShapeRef.new(shape: StandardUnit, location_name: "Unit"))
MetricDatum.add_member(:storage_resolution, Shapes::ShapeRef.new(shape: StorageResolution, location_name: "StorageResolution"))
MetricDatum.struct_class = Types::MetricDatum
Metrics.member = Shapes::ShapeRef.new(shape: Metric)
PutDashboardInput.add_member(:dashboard_name, Shapes::ShapeRef.new(shape: DashboardName, location_name: "DashboardName"))
PutDashboardInput.add_member(:dashboard_body, Shapes::ShapeRef.new(shape: DashboardBody, location_name: "DashboardBody"))
PutDashboardInput.struct_class = Types::PutDashboardInput
PutDashboardOutput.add_member(:dashboard_validation_messages, Shapes::ShapeRef.new(shape: DashboardValidationMessages, location_name: "DashboardValidationMessages"))
PutDashboardOutput.struct_class = Types::PutDashboardOutput
PutMetricAlarmInput.add_member(:alarm_name, Shapes::ShapeRef.new(shape: AlarmName, required: true, location_name: "AlarmName"))
PutMetricAlarmInput.add_member(:alarm_description, Shapes::ShapeRef.new(shape: AlarmDescription, location_name: "AlarmDescription"))
PutMetricAlarmInput.add_member(:actions_enabled, Shapes::ShapeRef.new(shape: ActionsEnabled, location_name: "ActionsEnabled"))
PutMetricAlarmInput.add_member(:ok_actions, Shapes::ShapeRef.new(shape: ResourceList, location_name: "OKActions"))
PutMetricAlarmInput.add_member(:alarm_actions, Shapes::ShapeRef.new(shape: ResourceList, location_name: "AlarmActions"))
PutMetricAlarmInput.add_member(:insufficient_data_actions, Shapes::ShapeRef.new(shape: ResourceList, location_name: "InsufficientDataActions"))
PutMetricAlarmInput.add_member(:metric_name, Shapes::ShapeRef.new(shape: MetricName, required: true, location_name: "MetricName"))
PutMetricAlarmInput.add_member(:namespace, Shapes::ShapeRef.new(shape: Namespace, required: true, location_name: "Namespace"))
PutMetricAlarmInput.add_member(:statistic, Shapes::ShapeRef.new(shape: Statistic, location_name: "Statistic"))
PutMetricAlarmInput.add_member(:extended_statistic, Shapes::ShapeRef.new(shape: ExtendedStatistic, location_name: "ExtendedStatistic"))
PutMetricAlarmInput.add_member(:dimensions, Shapes::ShapeRef.new(shape: Dimensions, location_name: "Dimensions"))
PutMetricAlarmInput.add_member(:period, Shapes::ShapeRef.new(shape: Period, required: true, location_name: "Period"))
PutMetricAlarmInput.add_member(:unit, Shapes::ShapeRef.new(shape: StandardUnit, location_name: "Unit"))
PutMetricAlarmInput.add_member(:evaluation_periods, Shapes::ShapeRef.new(shape: EvaluationPeriods, required: true, location_name: "EvaluationPeriods"))
PutMetricAlarmInput.add_member(:threshold, Shapes::ShapeRef.new(shape: Threshold, required: true, location_name: "Threshold"))
PutMetricAlarmInput.add_member(:comparison_operator, Shapes::ShapeRef.new(shape: ComparisonOperator, required: true, location_name: "ComparisonOperator"))
PutMetricAlarmInput.add_member(:treat_missing_data, Shapes::ShapeRef.new(shape: TreatMissingData, location_name: "TreatMissingData"))
PutMetricAlarmInput.add_member(:evaluate_low_sample_count_percentile, Shapes::ShapeRef.new(shape: EvaluateLowSampleCountPercentile, location_name: "EvaluateLowSampleCountPercentile"))
PutMetricAlarmInput.struct_class = Types::PutMetricAlarmInput
PutMetricDataInput.add_member(:namespace, Shapes::ShapeRef.new(shape: Namespace, required: true, location_name: "Namespace"))
PutMetricDataInput.add_member(:metric_data, Shapes::ShapeRef.new(shape: MetricData, required: true, location_name: "MetricData"))
PutMetricDataInput.struct_class = Types::PutMetricDataInput
ResourceList.member = Shapes::ShapeRef.new(shape: ResourceName)
SetAlarmStateInput.add_member(:alarm_name, Shapes::ShapeRef.new(shape: AlarmName, required: true, location_name: "AlarmName"))
SetAlarmStateInput.add_member(:state_value, Shapes::ShapeRef.new(shape: StateValue, required: true, location_name: "StateValue"))
SetAlarmStateInput.add_member(:state_reason, Shapes::ShapeRef.new(shape: StateReason, required: true, location_name: "StateReason"))
SetAlarmStateInput.add_member(:state_reason_data, Shapes::ShapeRef.new(shape: StateReasonData, location_name: "StateReasonData"))
SetAlarmStateInput.struct_class = Types::SetAlarmStateInput
StatisticSet.add_member(:sample_count, Shapes::ShapeRef.new(shape: DatapointValue, required: true, location_name: "SampleCount"))
StatisticSet.add_member(:sum, Shapes::ShapeRef.new(shape: DatapointValue, required: true, location_name: "Sum"))
StatisticSet.add_member(:minimum, Shapes::ShapeRef.new(shape: DatapointValue, required: true, location_name: "Minimum"))
StatisticSet.add_member(:maximum, Shapes::ShapeRef.new(shape: DatapointValue, required: true, location_name: "Maximum"))
StatisticSet.struct_class = Types::StatisticSet
Statistics.member = Shapes::ShapeRef.new(shape: Statistic)
# @api private
API = Seahorse::Model::Api.new.tap do |api|
api.version = "2010-08-01"
api.metadata = {
"endpointPrefix" => "monitoring",
"protocol" => "query",
"serviceFullName" => "Amazon CloudWatch",
"signatureVersion" => "v4",
"xmlNamespace" => "http://monitoring.amazonaws.com/doc/2010-08-01/",
}
api.add_operation(:delete_alarms, Seahorse::Model::Operation.new.tap do |o|
o.name = "DeleteAlarms"
o.http_method = "POST"
o.http_request_uri = "/"
o.input = Shapes::ShapeRef.new(shape: DeleteAlarmsInput)
o.output = Shapes::ShapeRef.new(shape: Shapes::StructureShape.new(struct_class: Aws::EmptyStructure))
o.errors << Shapes::ShapeRef.new(shape: ResourceNotFound)
end)
api.add_operation(:delete_dashboards, Seahorse::Model::Operation.new.tap do |o|
o.name = "DeleteDashboards"
o.http_method = "POST"
o.http_request_uri = "/"
o.input = Shapes::ShapeRef.new(shape: DeleteDashboardsInput)
o.output = Shapes::ShapeRef.new(shape: DeleteDashboardsOutput)
o.errors << Shapes::ShapeRef.new(shape: InvalidParameterValueException)
o.errors << Shapes::ShapeRef.new(shape: DashboardNotFoundError)
o.errors << Shapes::ShapeRef.new(shape: InternalServiceFault)
end)
api.add_operation(:describe_alarm_history, Seahorse::Model::Operation.new.tap do |o|
o.name = "DescribeAlarmHistory"
o.http_method = "POST"
o.http_request_uri = "/"
o.input = Shapes::ShapeRef.new(shape: DescribeAlarmHistoryInput)
o.output = Shapes::ShapeRef.new(shape: DescribeAlarmHistoryOutput)
o.errors << Shapes::ShapeRef.new(shape: InvalidNextToken)
o[:pager] = Aws::Pager.new(
limit_key: "max_records",
tokens: {
"next_token" => "next_token"
}
)
end)
api.add_operation(:describe_alarms, Seahorse::Model::Operation.new.tap do |o|
o.name = "DescribeAlarms"
o.http_method = "POST"
o.http_request_uri = "/"
o.input = Shapes::ShapeRef.new(shape: DescribeAlarmsInput)
o.output = Shapes::ShapeRef.new(shape: DescribeAlarmsOutput)
o.errors << Shapes::ShapeRef.new(shape: InvalidNextToken)
o[:pager] = Aws::Pager.new(
limit_key: "max_records",
tokens: {
"next_token" => "next_token"
}
)
end)
api.add_operation(:describe_alarms_for_metric, Seahorse::Model::Operation.new.tap do |o|
o.name = "DescribeAlarmsForMetric"
o.http_method = "POST"
o.http_request_uri = "/"
o.input = Shapes::ShapeRef.new(shape: DescribeAlarmsForMetricInput)
o.output = Shapes::ShapeRef.new(shape: DescribeAlarmsForMetricOutput)
end)
api.add_operation(:disable_alarm_actions, Seahorse::Model::Operation.new.tap do |o|
o.name = "DisableAlarmActions"
o.http_method = "POST"
o.http_request_uri = "/"
o.input = Shapes::ShapeRef.new(shape: DisableAlarmActionsInput)
o.output = Shapes::ShapeRef.new(shape: Shapes::StructureShape.new(struct_class: Aws::EmptyStructure))
end)
api.add_operation(:enable_alarm_actions, Seahorse::Model::Operation.new.tap do |o|
o.name = "EnableAlarmActions"
o.http_method = "POST"
o.http_request_uri = "/"
o.input = Shapes::ShapeRef.new(shape: EnableAlarmActionsInput)
o.output = Shapes::ShapeRef.new(shape: Shapes::StructureShape.new(struct_class: Aws::EmptyStructure))
end)
api.add_operation(:get_dashboard, Seahorse::Model::Operation.new.tap do |o|
o.name = "GetDashboard"
o.http_method = "POST"
o.http_request_uri = "/"
o.input = Shapes::ShapeRef.new(shape: GetDashboardInput)
o.output = Shapes::ShapeRef.new(shape: GetDashboardOutput)
o.errors << Shapes::ShapeRef.new(shape: InvalidParameterValueException)
o.errors << Shapes::ShapeRef.new(shape: DashboardNotFoundError)
o.errors << Shapes::ShapeRef.new(shape: InternalServiceFault)
end)
api.add_operation(:get_metric_statistics, Seahorse::Model::Operation.new.tap do |o|
o.name = "GetMetricStatistics"
o.http_method = "POST"
o.http_request_uri = "/"
o.input = Shapes::ShapeRef.new(shape: GetMetricStatisticsInput)
o.output = Shapes::ShapeRef.new(shape: GetMetricStatisticsOutput)
o.errors << Shapes::ShapeRef.new(shape: InvalidParameterValueException)
o.errors << Shapes::ShapeRef.new(shape: MissingRequiredParameterException)
o.errors << Shapes::ShapeRef.new(shape: InvalidParameterCombinationException)
o.errors << Shapes::ShapeRef.new(shape: InternalServiceFault)
end)
api.add_operation(:list_dashboards, Seahorse::Model::Operation.new.tap do |o|
o.name = "ListDashboards"
o.http_method = "POST"
o.http_request_uri = "/"
o.input = Shapes::ShapeRef.new(shape: ListDashboardsInput)
o.output = Shapes::ShapeRef.new(shape: ListDashboardsOutput)
o.errors << Shapes::ShapeRef.new(shape: InvalidParameterValueException)
o.errors << Shapes::ShapeRef.new(shape: InternalServiceFault)
end)
api.add_operation(:list_metrics, Seahorse::Model::Operation.new.tap do |o|
o.name = "ListMetrics"
o.http_method = "POST"
o.http_request_uri = "/"
o.input = Shapes::ShapeRef.new(shape: ListMetricsInput)
o.output = Shapes::ShapeRef.new(shape: ListMetricsOutput)
o.errors << Shapes::ShapeRef.new(shape: InternalServiceFault)
o.errors << Shapes::ShapeRef.new(shape: InvalidParameterValueException)
o[:pager] = Aws::Pager.new(
tokens: {
"next_token" => "next_token"
}
)
end)
api.add_operation(:put_dashboard, Seahorse::Model::Operation.new.tap do |o|
o.name = "PutDashboard"
o.http_method = "POST"
o.http_request_uri = "/"
o.input = Shapes::ShapeRef.new(shape: PutDashboardInput)
o.output = Shapes::ShapeRef.new(shape: PutDashboardOutput)
o.errors << Shapes::ShapeRef.new(shape: DashboardInvalidInputError)
o.errors << Shapes::ShapeRef.new(shape: InternalServiceFault)
end)
api.add_operation(:put_metric_alarm, Seahorse::Model::Operation.new.tap do |o|
o.name = "PutMetricAlarm"
o.http_method = "POST"
o.http_request_uri = "/"
o.input = Shapes::ShapeRef.new(shape: PutMetricAlarmInput)
o.output = Shapes::ShapeRef.new(shape: Shapes::StructureShape.new(struct_class: Aws::EmptyStructure))
o.errors << Shapes::ShapeRef.new(shape: LimitExceededFault)
end)
api.add_operation(:put_metric_data, Seahorse::Model::Operation.new.tap do |o|
o.name = "PutMetricData"
o.http_method = "POST"
o.http_request_uri = "/"
o.input = Shapes::ShapeRef.new(shape: PutMetricDataInput)
o.output = Shapes::ShapeRef.new(shape: Shapes::StructureShape.new(struct_class: Aws::EmptyStructure))
o.errors << Shapes::ShapeRef.new(shape: InvalidParameterValueException)
o.errors << Shapes::ShapeRef.new(shape: MissingRequiredParameterException)
o.errors << Shapes::ShapeRef.new(shape: InvalidParameterCombinationException)
o.errors << Shapes::ShapeRef.new(shape: InternalServiceFault)
end)
api.add_operation(:set_alarm_state, Seahorse::Model::Operation.new.tap do |o|
o.name = "SetAlarmState"
o.http_method = "POST"
o.http_request_uri = "/"
o.input = Shapes::ShapeRef.new(shape: SetAlarmStateInput)
o.output = Shapes::ShapeRef.new(shape: Shapes::StructureShape.new(struct_class: Aws::EmptyStructure))
o.errors << Shapes::ShapeRef.new(shape: ResourceNotFound)
o.errors << Shapes::ShapeRef.new(shape: InvalidFormatFault)
end)
end
end
end
| 68.535316 | 187 | 0.761391 |
ed1650130c1021e737fcd28a21adf7870bc08012 | 294 | # frozen_string_literal: true
FactoryBot.define do
factory(:data_wait_for_open_channel, class: 'FactoryBotWrapper') do
init_fundee { build(:input_init_fundee).get }
initialize_with do
new(Lightning::Channel::Messages::DataWaitForOpenChannel[init_fundee, ''])
end
end
end
| 26.727273 | 80 | 0.755102 |
abcc7772bf40e4178c52dddea2079d628a8782ee | 197 | module Poly
class CleanTrashJob < ApplicationJob
queue_as :default
# Delete the parent record and then remove the trash item
def perform(trash)
trash.destroy
end
end
end
| 17.909091 | 61 | 0.705584 |
b9dac4267053660a150f2529fd12823772b82fa5 | 608 | module IESD
class DMG
# {Install OS X.dmg}[rdoc-ref:IESD::DMG::InstallOSX]
#
# The installer dmg for OS X Mavericks and later.
# It contains an {Install OS X.app}[rdoc-ref:IESD::APP::InstallOSX].
# It's generated by `createinstallmedia`, a tool come with the installer app.
class InstallOSX < HDIUtil::DMG
# Export to a new DMG.
#
# options - The Dictionary of the export options
def export options
show { |mountpoint|
IESD::APP::InstallOSX.new(Dir[File.join(mountpoint, "*.app")][0]).export options
}
end
end
end
end
| 27.636364 | 90 | 0.625 |
acf9d2c20cb65829c960bd408fd02ec581f2ed0f | 1,554 | # frozen_string_literal: true
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
# [START aiplatform_v1_generated_MetadataService_ListArtifacts_sync]
require "google/cloud/ai_platform/v1"
# Create a client object. The client can be reused for multiple calls.
client = Google::Cloud::AIPlatform::V1::MetadataService::Client.new
# Create a request. To set request fields, pass in keyword arguments.
request = Google::Cloud::AIPlatform::V1::ListArtifactsRequest.new
# Call the list_artifacts method.
result = client.list_artifacts request
# The returned object is of type Gapic::PagedEnumerable. You can
# iterate over all elements by calling #each, and the enumerable
# will lazily make API calls to fetch subsequent pages. Other
# methods are also available for managing paging directly.
result.each do |response|
# Each element is of type ::Google::Cloud::AIPlatform::V1::Artifact.
p response
end
# [END aiplatform_v1_generated_MetadataService_ListArtifacts_sync]
| 38.85 | 74 | 0.78314 |
ac11ce6906abedb4b2989b1186c5ebb332d293fb | 1,985 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# Source: google/ads/googleads/v6/services/ad_parameter_service.proto for package 'Google.Ads.GoogleAds.V6.Services'
# Original file comments:
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'grpc'
require 'google/ads/google_ads/v6/services/ad_parameter_service_pb'
module Google
module Ads
module GoogleAds
module V6
module Services
module AdParameterService
# Proto file describing the Ad Parameter service.
#
# Service to manage ad parameters.
class Service
include GRPC::GenericService
self.marshal_class_method = :encode
self.unmarshal_class_method = :decode
self.service_name = 'google.ads.googleads.v6.services.AdParameterService'
# Returns the requested ad parameter in full detail.
rpc :GetAdParameter, ::Google::Ads::GoogleAds::V6::Services::GetAdParameterRequest, ::Google::Ads::GoogleAds::V6::Resources::AdParameter
# Creates, updates, or removes ad parameters. Operation statuses are
# returned.
rpc :MutateAdParameters, ::Google::Ads::GoogleAds::V6::Services::MutateAdParametersRequest, ::Google::Ads::GoogleAds::V6::Services::MutateAdParametersResponse
end
Stub = Service.rpc_stub_class
end
end
end
end
end
end
| 37.45283 | 172 | 0.687657 |
5d6e92a8e87d7f8d25f26c60ab326ea3b04612b9 | 190 | class CreateCourses < ActiveRecord::Migration[5.1]
def change
create_table :courses do |t|
t.string :name
t.integer :enrollment
t.timestamps
end
end
end
| 17.272727 | 50 | 0.636842 |
5d177b10f6e169b7be487796662dd4fea1ce91a7 | 158 | require File.expand_path('../../../../spec_helper', __FILE__)
describe "Gem::Specification#inspect" do
it "needs to be reviewed for spec completeness"
end
| 26.333333 | 61 | 0.727848 |
08d30c3ec69de32f265cc9149aa30444b3ba3ba6 | 1,036 | ENV.delete_if { |name, _| name.start_with?('GIT') }
require 'bundler'
begin
Bundler.setup(:default, :xzibit, :test)
rescue Bundler::BundlerError => e
$stderr.puts e.message
$stderr.puts 'Run `bundle install` to install missing gems'
exit e.status_code
end
require 'jeweler'
require 'mocha'
require 'mocha/api'
World(Mocha::API)
Before do
mocha_setup
end
After do
begin
mocha_verify
ensure
mocha_teardown
end
end
require 'output_catcher'
require 'timecop'
require 'active_support'
require 'active_support/core_ext/object/blank'
require 'test/unit/assertions'
World(Test::Unit::Assertions)
require 'test_construct'
World(TestConstruct::Helpers)
def yank_task_info(content, task)
if content =~ /#{Regexp.escape(task)}.new(\(.*\))? do \|(.*?)\|(.*?)^end$/m
[$2, $3]
end
end
def yank_group_info(content, group)
$1 if content =~ /group :#{group} do(.*?)end/m
end
def fixture_dir
File.expand_path File.join(File.dirname(__FILE__), '..', '..', 'test', 'fixtures')
end
After do
Timecop.return
end
| 18.5 | 84 | 0.704633 |
1a04d39d784a639ae9a0919bfea6d5e87b3dd2fa | 26,024 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::ApiManagement::Mgmt::V2017_03_01
#
# ApiManagement Client
#
class ApiIssueComment
include MsRestAzure
#
# Creates and initializes a new instance of the ApiIssueComment class.
# @param client service class for accessing basic functionality.
#
def initialize(client)
@client = client
end
# @return [ApiManagementClient] reference to the ApiManagementClient
attr_reader :client
#
# Gets the details of the issue Comment for an API specified by its identifier.
#
# @param resource_group_name [String] The name of the resource group.
# @param service_name [String] The name of the API Management service.
# @param api_id [String] API identifier. Must be unique in the current API
# Management service instance.
# @param issue_id [String] Issue identifier. Must be unique in the current API
# Management service instance.
# @param comment_id [String] Comment identifier within an Issue. Must be unique
# in the current Issue.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [IssueCommentContract] operation results.
#
def get(resource_group_name, service_name, api_id, issue_id, comment_id, custom_headers:nil)
response = get_async(resource_group_name, service_name, api_id, issue_id, comment_id, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Gets the details of the issue Comment for an API specified by its identifier.
#
# @param resource_group_name [String] The name of the resource group.
# @param service_name [String] The name of the API Management service.
# @param api_id [String] API identifier. Must be unique in the current API
# Management service instance.
# @param issue_id [String] Issue identifier. Must be unique in the current API
# Management service instance.
# @param comment_id [String] Comment identifier within an Issue. Must be unique
# in the current Issue.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def get_with_http_info(resource_group_name, service_name, api_id, issue_id, comment_id, custom_headers:nil)
get_async(resource_group_name, service_name, api_id, issue_id, comment_id, custom_headers:custom_headers).value!
end
#
# Gets the details of the issue Comment for an API specified by its identifier.
#
# @param resource_group_name [String] The name of the resource group.
# @param service_name [String] The name of the API Management service.
# @param api_id [String] API identifier. Must be unique in the current API
# Management service instance.
# @param issue_id [String] Issue identifier. Must be unique in the current API
# Management service instance.
# @param comment_id [String] Comment identifier within an Issue. Must be unique
# in the current Issue.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def get_async(resource_group_name, service_name, api_id, issue_id, comment_id, custom_headers:nil)
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, 'service_name is nil' if service_name.nil?
fail ArgumentError, "'service_name' should satisfy the constraint - 'MaxLength': '50'" if !service_name.nil? && service_name.length > 50
fail ArgumentError, "'service_name' should satisfy the constraint - 'MinLength': '1'" if !service_name.nil? && service_name.length < 1
fail ArgumentError, "'service_name' should satisfy the constraint - 'Pattern': '^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$'" if !service_name.nil? && service_name.match(Regexp.new('^^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$$')).nil?
fail ArgumentError, 'api_id is nil' if api_id.nil?
fail ArgumentError, "'api_id' should satisfy the constraint - 'MaxLength': '256'" if !api_id.nil? && api_id.length > 256
fail ArgumentError, "'api_id' should satisfy the constraint - 'MinLength': '1'" if !api_id.nil? && api_id.length < 1
fail ArgumentError, "'api_id' should satisfy the constraint - 'Pattern': '^[^*#&+:<>?]+$'" if !api_id.nil? && api_id.match(Regexp.new('^^[^*#&+:<>?]+$$')).nil?
fail ArgumentError, 'issue_id is nil' if issue_id.nil?
fail ArgumentError, "'issue_id' should satisfy the constraint - 'MaxLength': '256'" if !issue_id.nil? && issue_id.length > 256
fail ArgumentError, "'issue_id' should satisfy the constraint - 'MinLength': '1'" if !issue_id.nil? && issue_id.length < 1
fail ArgumentError, "'issue_id' should satisfy the constraint - 'Pattern': '^[^*#&+:<>?]+$'" if !issue_id.nil? && issue_id.match(Regexp.new('^^[^*#&+:<>?]+$$')).nil?
fail ArgumentError, 'comment_id is nil' if comment_id.nil?
fail ArgumentError, "'comment_id' should satisfy the constraint - 'MaxLength': '256'" if !comment_id.nil? && comment_id.length > 256
fail ArgumentError, "'comment_id' should satisfy the constraint - 'MinLength': '1'" if !comment_id.nil? && comment_id.length < 1
fail ArgumentError, "'comment_id' should satisfy the constraint - 'Pattern': '^[^*#&+:<>?]+$'" if !comment_id.nil? && comment_id.match(Regexp.new('^^[^*#&+:<>?]+$$')).nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/apis/{apiId}/issues/{issueId}/comments/{commentId}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'resourceGroupName' => resource_group_name,'serviceName' => service_name,'apiId' => api_id,'issueId' => issue_id,'commentId' => comment_id,'subscriptionId' => @client.subscription_id},
query_params: {'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRest::HttpOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::ApiManagement::Mgmt::V2017_03_01::Models::IssueCommentContract.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Creates a new Comment for the Issue in an API or updates an existing one.
#
# @param resource_group_name [String] The name of the resource group.
# @param service_name [String] The name of the API Management service.
# @param api_id [String] API identifier. Must be unique in the current API
# Management service instance.
# @param issue_id [String] Issue identifier. Must be unique in the current API
# Management service instance.
# @param comment_id [String] Comment identifier within an Issue. Must be unique
# in the current Issue.
# @param parameters [IssueCommentContract] Create parameters.
# @param if_match [String] ETag of the Issue Entity. ETag should match the
# current entity state from the header response of the GET request or it should
# be * for unconditional update.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [IssueCommentContract] operation results.
#
def create_or_update(resource_group_name, service_name, api_id, issue_id, comment_id, parameters, if_match:nil, custom_headers:nil)
response = create_or_update_async(resource_group_name, service_name, api_id, issue_id, comment_id, parameters, if_match:if_match, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Creates a new Comment for the Issue in an API or updates an existing one.
#
# @param resource_group_name [String] The name of the resource group.
# @param service_name [String] The name of the API Management service.
# @param api_id [String] API identifier. Must be unique in the current API
# Management service instance.
# @param issue_id [String] Issue identifier. Must be unique in the current API
# Management service instance.
# @param comment_id [String] Comment identifier within an Issue. Must be unique
# in the current Issue.
# @param parameters [IssueCommentContract] Create parameters.
# @param if_match [String] ETag of the Issue Entity. ETag should match the
# current entity state from the header response of the GET request or it should
# be * for unconditional update.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def create_or_update_with_http_info(resource_group_name, service_name, api_id, issue_id, comment_id, parameters, if_match:nil, custom_headers:nil)
create_or_update_async(resource_group_name, service_name, api_id, issue_id, comment_id, parameters, if_match:if_match, custom_headers:custom_headers).value!
end
#
# Creates a new Comment for the Issue in an API or updates an existing one.
#
# @param resource_group_name [String] The name of the resource group.
# @param service_name [String] The name of the API Management service.
# @param api_id [String] API identifier. Must be unique in the current API
# Management service instance.
# @param issue_id [String] Issue identifier. Must be unique in the current API
# Management service instance.
# @param comment_id [String] Comment identifier within an Issue. Must be unique
# in the current Issue.
# @param parameters [IssueCommentContract] Create parameters.
# @param if_match [String] ETag of the Issue Entity. ETag should match the
# current entity state from the header response of the GET request or it should
# be * for unconditional update.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def create_or_update_async(resource_group_name, service_name, api_id, issue_id, comment_id, parameters, if_match:nil, custom_headers:nil)
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, 'service_name is nil' if service_name.nil?
fail ArgumentError, "'service_name' should satisfy the constraint - 'MaxLength': '50'" if !service_name.nil? && service_name.length > 50
fail ArgumentError, "'service_name' should satisfy the constraint - 'MinLength': '1'" if !service_name.nil? && service_name.length < 1
fail ArgumentError, "'service_name' should satisfy the constraint - 'Pattern': '^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$'" if !service_name.nil? && service_name.match(Regexp.new('^^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$$')).nil?
fail ArgumentError, 'api_id is nil' if api_id.nil?
fail ArgumentError, "'api_id' should satisfy the constraint - 'MaxLength': '256'" if !api_id.nil? && api_id.length > 256
fail ArgumentError, "'api_id' should satisfy the constraint - 'MinLength': '1'" if !api_id.nil? && api_id.length < 1
fail ArgumentError, "'api_id' should satisfy the constraint - 'Pattern': '^[^*#&+:<>?]+$'" if !api_id.nil? && api_id.match(Regexp.new('^^[^*#&+:<>?]+$$')).nil?
fail ArgumentError, 'issue_id is nil' if issue_id.nil?
fail ArgumentError, "'issue_id' should satisfy the constraint - 'MaxLength': '256'" if !issue_id.nil? && issue_id.length > 256
fail ArgumentError, "'issue_id' should satisfy the constraint - 'MinLength': '1'" if !issue_id.nil? && issue_id.length < 1
fail ArgumentError, "'issue_id' should satisfy the constraint - 'Pattern': '^[^*#&+:<>?]+$'" if !issue_id.nil? && issue_id.match(Regexp.new('^^[^*#&+:<>?]+$$')).nil?
fail ArgumentError, 'comment_id is nil' if comment_id.nil?
fail ArgumentError, "'comment_id' should satisfy the constraint - 'MaxLength': '256'" if !comment_id.nil? && comment_id.length > 256
fail ArgumentError, "'comment_id' should satisfy the constraint - 'MinLength': '1'" if !comment_id.nil? && comment_id.length < 1
fail ArgumentError, "'comment_id' should satisfy the constraint - 'Pattern': '^[^*#&+:<>?]+$'" if !comment_id.nil? && comment_id.match(Regexp.new('^^[^*#&+:<>?]+$$')).nil?
fail ArgumentError, 'parameters is nil' if parameters.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['If-Match'] = if_match unless if_match.nil?
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
# Serialize Request
request_mapper = Azure::ApiManagement::Mgmt::V2017_03_01::Models::IssueCommentContract.mapper()
request_content = @client.serialize(request_mapper, parameters)
request_content = request_content != nil ? JSON.generate(request_content, quirks_mode: true) : nil
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/apis/{apiId}/issues/{issueId}/comments/{commentId}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'resourceGroupName' => resource_group_name,'serviceName' => service_name,'apiId' => api_id,'issueId' => issue_id,'commentId' => comment_id,'subscriptionId' => @client.subscription_id},
query_params: {'api-version' => @client.api_version},
body: request_content,
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:put, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 201 || status_code == 200
error_model = JSON.load(response_content)
fail MsRest::HttpOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 201
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::ApiManagement::Mgmt::V2017_03_01::Models::IssueCommentContract.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::ApiManagement::Mgmt::V2017_03_01::Models::IssueCommentContract.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Deletes the specified comment from an Issue.
#
# @param resource_group_name [String] The name of the resource group.
# @param service_name [String] The name of the API Management service.
# @param api_id [String] API identifier. Must be unique in the current API
# Management service instance.
# @param issue_id [String] Issue identifier. Must be unique in the current API
# Management service instance.
# @param comment_id [String] Comment identifier within an Issue. Must be unique
# in the current Issue.
# @param if_match [String] ETag of the Issue Entity. ETag should match the
# current entity state from the header response of the GET request or it should
# be * for unconditional update.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
#
def delete(resource_group_name, service_name, api_id, issue_id, comment_id, if_match, custom_headers:nil)
response = delete_async(resource_group_name, service_name, api_id, issue_id, comment_id, if_match, custom_headers:custom_headers).value!
nil
end
#
# Deletes the specified comment from an Issue.
#
# @param resource_group_name [String] The name of the resource group.
# @param service_name [String] The name of the API Management service.
# @param api_id [String] API identifier. Must be unique in the current API
# Management service instance.
# @param issue_id [String] Issue identifier. Must be unique in the current API
# Management service instance.
# @param comment_id [String] Comment identifier within an Issue. Must be unique
# in the current Issue.
# @param if_match [String] ETag of the Issue Entity. ETag should match the
# current entity state from the header response of the GET request or it should
# be * for unconditional update.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def delete_with_http_info(resource_group_name, service_name, api_id, issue_id, comment_id, if_match, custom_headers:nil)
delete_async(resource_group_name, service_name, api_id, issue_id, comment_id, if_match, custom_headers:custom_headers).value!
end
#
# Deletes the specified comment from an Issue.
#
# @param resource_group_name [String] The name of the resource group.
# @param service_name [String] The name of the API Management service.
# @param api_id [String] API identifier. Must be unique in the current API
# Management service instance.
# @param issue_id [String] Issue identifier. Must be unique in the current API
# Management service instance.
# @param comment_id [String] Comment identifier within an Issue. Must be unique
# in the current Issue.
# @param if_match [String] ETag of the Issue Entity. ETag should match the
# current entity state from the header response of the GET request or it should
# be * for unconditional update.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def delete_async(resource_group_name, service_name, api_id, issue_id, comment_id, if_match, custom_headers:nil)
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, 'service_name is nil' if service_name.nil?
fail ArgumentError, "'service_name' should satisfy the constraint - 'MaxLength': '50'" if !service_name.nil? && service_name.length > 50
fail ArgumentError, "'service_name' should satisfy the constraint - 'MinLength': '1'" if !service_name.nil? && service_name.length < 1
fail ArgumentError, "'service_name' should satisfy the constraint - 'Pattern': '^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$'" if !service_name.nil? && service_name.match(Regexp.new('^^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$$')).nil?
fail ArgumentError, 'api_id is nil' if api_id.nil?
fail ArgumentError, "'api_id' should satisfy the constraint - 'MaxLength': '256'" if !api_id.nil? && api_id.length > 256
fail ArgumentError, "'api_id' should satisfy the constraint - 'MinLength': '1'" if !api_id.nil? && api_id.length < 1
fail ArgumentError, "'api_id' should satisfy the constraint - 'Pattern': '^[^*#&+:<>?]+$'" if !api_id.nil? && api_id.match(Regexp.new('^^[^*#&+:<>?]+$$')).nil?
fail ArgumentError, 'issue_id is nil' if issue_id.nil?
fail ArgumentError, "'issue_id' should satisfy the constraint - 'MaxLength': '256'" if !issue_id.nil? && issue_id.length > 256
fail ArgumentError, "'issue_id' should satisfy the constraint - 'MinLength': '1'" if !issue_id.nil? && issue_id.length < 1
fail ArgumentError, "'issue_id' should satisfy the constraint - 'Pattern': '^[^*#&+:<>?]+$'" if !issue_id.nil? && issue_id.match(Regexp.new('^^[^*#&+:<>?]+$$')).nil?
fail ArgumentError, 'comment_id is nil' if comment_id.nil?
fail ArgumentError, "'comment_id' should satisfy the constraint - 'MaxLength': '256'" if !comment_id.nil? && comment_id.length > 256
fail ArgumentError, "'comment_id' should satisfy the constraint - 'MinLength': '1'" if !comment_id.nil? && comment_id.length < 1
fail ArgumentError, "'comment_id' should satisfy the constraint - 'Pattern': '^[^*#&+:<>?]+$'" if !comment_id.nil? && comment_id.match(Regexp.new('^^[^*#&+:<>?]+$$')).nil?
fail ArgumentError, 'if_match is nil' if if_match.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['If-Match'] = if_match unless if_match.nil?
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/apis/{apiId}/issues/{issueId}/comments/{commentId}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'resourceGroupName' => resource_group_name,'serviceName' => service_name,'apiId' => api_id,'issueId' => issue_id,'commentId' => comment_id,'subscriptionId' => @client.subscription_id},
query_params: {'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:delete, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 204
error_model = JSON.load(response_content)
fail MsRest::HttpOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
result
end
promise.execute
end
end
end
| 59.415525 | 233 | 0.697971 |
1affeff60ac89cf90e1db639279930a72948c5e1 | 1,128 | cask "macupdater" do
version "2.0.0,10946"
sha256 "6661a23a00d831acc065fe43bb08471ebb93d9ab8a15c85ab3d84a972842c540"
url "https://www.corecode.io/downloads/macupdater_#{version.before_comma}.dmg"
name "MacUpdater"
desc "Track and update to the latest versions of installed software"
homepage "https://www.corecode.io/macupdater/index.html"
livecheck do
url "https://www.corecode.io/macupdater/macupdater2.xml"
strategy :sparkle
end
auto_updates true
app "MacUpdater.app"
binary "#{appdir}/MacUpdater.app/Contents/Resources/macupdater_client"
uninstall quit: "com.corecode.MacUpdater",
launchctl: "com.corecode.MacUpdaterLaunchHelper"
zap trash: [
"~/Library/Application Scripts/com.corecode.MacUpdaterLaunchHelper",
"~/Library/Application Support/MacUpdater",
"~/Library/Application Support/MacUpdaterInstallHelper",
"~/Library/Caches/com.corecode.MacUpdater",
"~/Library/Containers/com.corecode.MacUpdaterLaunchHelper",
"~/Library/Cookies/com.corecode.MacUpdater.binarycookies",
"~/Library/Preferences/com.corecode.MacUpdater.plist",
]
end
| 34.181818 | 80 | 0.751773 |
262cb44cc9216dfd8d8cf10174c4619b4eb08fe1 | 684 |
Pod::Spec.new do |s|
s.name = 'ModuleBCategory'
s.version = '0.0.1'
s.summary = '业务模块B组件的路由分类组件'
s.description = <<-DESC
TODO: Add long description of the pod here.
DESC
s.homepage = 'https://github.com/CTPLucky/ModuleBCategory'
s.license = "MIT"
s.author = { 'CTPLucky' => '[email protected]' }
s.source = { :git => '[email protected]:CTPLucky/ModuleBCategory.git', :tag => s.version.to_s }
s.platform = :ios, "5.0"
s.requires_arc = true
s.source_files = "ModuleBCategory/Category/**/*.{h,m}"
s.dependency 'CTMediator'
end
| 26.307692 | 104 | 0.530702 |
6235e4ada87b5d903651d618091b31e77c82f61b | 587 | MRuby::Build.new do |conf|
if ENV['VisualStudioVersion'] || ENV['VSINSTALLDIR']
toolchain :visualcpp
conf.yacc do |yacc|
yacc.command = ENV['YACC'] || 'bison.exe'
yacc.compile_options = %q[-o "%{outfile}" "%{infile}"]
end
else
toolchain :gcc
end
conf.gembox 'default'
conf.gem :mgem => 'json'
conf.gem :mgem => 'dir'
conf.gem :mgem => 'regexp-pcre'
conf.gem :mgem => 'erb'
conf.cc.flags << '-DMRB_UTF8_STRING -DMRB_INT64'
conf.build_dir = ENV["MRUBY_BUILD_DIR"] || raise("MRUBY_BUILD_DIR undefined!")
end | 23.48 | 81 | 0.601363 |
623c3a85c97be31d0baea731ee1d4099b6574ab7 | 719 | class DeviseCreateAdmins < ActiveRecord::Migration
def change
create_table(:admins) do |t|
t.database_authenticatable :null => false
t.recoverable
t.rememberable
t.trackable
# t.encryptable
# t.confirmable
# t.lockable :lock_strategy => :failed_attempts, :unlock_strategy => :both
# t.token_authenticatable
t.timestamps
end
add_index :admins, :email, :unique => true
add_index :admins, :reset_password_token, :unique => true
# add_index :admins, :confirmation_token, :unique => true
# add_index :admins, :unlock_token, :unique => true
# add_index :admins, :authentication_token, :unique => true
end
end
| 27.653846 | 80 | 0.646732 |
ab05323b27fd6d0ae55b8be1a7a30138bfe868c6 | 535 | class SaunaController < ApplicationController
before_filter :authenticate_user!, :except => [:home, :maintenance, :error]
layout :set_layout, :except => [:home]
layout 'dashboard', :only => [:home, :maintenance, :error]
def home
redirect_to dashboards_path and return if current_user
end
def navbar
render :template => 'shared/_navbar', :layout => false and return
end
private
def set_layout
if request.headers['X-PJAX']
'single'
elsif
'application'
end
end
end
| 19.814815 | 77 | 0.657944 |
e25b14f2397b29f4b9bb565ef12dfe922577d3cf | 1,107 | module Phony
module NationalSplitters
# This is a national splitter for countries
# which have no NDC / Area Code.
#
class None < Default
# Get a splitter. Caches.
#
def self.instance_for(*)
@instance ||= new
end
# On false:
#
# This is a hack to make phony's
# plausible method work even with
# this splitter.
#
# Promise: We will rewrite this soon
# to beautify, but making it work for
# people in production is most important
# right now.
#
# The problem is that the validation looks
# at whether there is a NDC - if it is nil,
# it is not plausible.
# (Does not work with this class, of course
# since using nil is dangerous and breaks
# abstraction)
#
# Note: Decided it stays in. When formatting, it's turned into nil.
#
def split national_number
[nil, false, national_number]
end
# A valid length.
#
def length
0
end
end
end
end | 22.14 | 73 | 0.552846 |
e8b519bb4e4e33a676a43f929ea6749f763c5c9d | 188 | class AddUrlIdentifierToApplication < ActiveRecord::Migration[5.1]
def change
remove_column :applications, :base_url
add_column :applications, :url_identifier, :string
end
end
| 26.857143 | 66 | 0.781915 |
339b2451fb4bd1fb0835e7ec56e9705736a89817 | 794 | cask 'emclient' do
version '7.2.37961'
sha256 '4b38b07cb6b172e92aeb9b91482b560bda1cb8dbaf728440201ce6e584fcea81'
url "https://cdn-dist.emclient.com/dist/v#{version}_Mac/setup.pkg"
appcast 'https://www.emclient.com/release-history?os=mac'
name 'eM Client'
homepage 'https://www.emclient.com/'
auto_updates true
pkg 'setup.pkg'
uninstall pkgutil: 'com.emclient.mail.client.pkg'
zap trash: [
'~/Library/Caches/com.emclient.mail.client',
'~/Library/Preferences/com.emclient.mail.client.plist',
'~/Library/Preferences/com.emclient.mail.repair.plist',
'~/Library/Preferences/com.emclient.mail.shared.plist',
'~/Library/Saved Application State/com.emclient.mail.client.savedState',
]
end
| 33.083333 | 87 | 0.673804 |
aba1a5f73cfa71b26a13ec0654724b7f8b7ae910 | 1,431 | module Campaigns
class ClickStatisticsPresenter < BasePresenter
attr_reader :total_tracker
attr_reader :unique_clicks
delegate :campaign_ids, :start_date, :end_date, to: :shared_context
def present
query
OpenStruct.new(
total_trackers: total_trackers || 0,
unique_clicks: unique_clicks || 0
)
end
private
def query
return if campaign_ids.count.zero?
result = ActiveRecord::Base.connection.execute(sql).first
return unless result
@total_trackers = query_result["total_trackers"].try(:to_i)
@unique_clicks = query_result["unique_clicks"].try(:to_i)
end
def sql
<<-SQL
SELECT
COUNT(tracker_hits.tracker_id) AS total_trackers,
COUNT(DISTINCT tracker_hits.tracker_id) AS unique_clicks
FROM trackers
INNER JOIN campaign_messages_trackers ON campaign_messages_trackers.tracker_id = trackers.id
INNER JOIN campaign_messages ON campaign_messages.id = campaign_messages_trackers.campaign_message_id
LEFT OUTER JOIN tracker_hits ON tracker_hits.tracker_id = trackers.id
WHERE trackers.method != 0
AND trackers.path NOT LIKE '%unsubscribe%'
AND campaign_messages.campaign_id IN (#{campaign_ids.to_csv})
AND tracker_hits.created_at BETWEEN '#{start_date}'::timestamp AND '#{end_date}'::timestamp
SQL
end
end
end
| 29.8125 | 109 | 0.692523 |
03573b274d1685d7a92118aeae16cb05ea5566f3 | 14,776 | require "rails/generators/app_base"
module Rails
module ActionMethods # :nodoc:
attr_reader :options
def initialize(generator)
@generator = generator
@options = generator.options
end
private
%w(template copy_file directory empty_directory inside
empty_directory_with_keep_file create_file chmod shebang).each do |method|
class_eval <<-RUBY, __FILE__, __LINE__ + 1
def #{method}(*args, &block)
@generator.send(:#{method}, *args, &block)
end
RUBY
end
# TODO: Remove once this is fully in place
def method_missing(meth, *args, &block)
@generator.send(meth, *args, &block)
end
end
# The application builder allows you to override elements of the application
# generator without being forced to reverse the operations of the default
# generator.
#
# This allows you to override entire operations, like the creation of the
# Gemfile, README, or JavaScript files, without needing to know exactly
# what those operations do so you can create another template action.
class AppBuilder
def rakefile
template "Rakefile"
end
def readme
copy_file "README.md", "README.md"
end
def gemfile
template "Gemfile"
end
def configru
template "config.ru"
end
def gitignore
template "gitignore", ".gitignore"
end
def app
directory "app"
keep_file "app/assets/images"
empty_directory_with_keep_file "app/assets/javascripts/channels" unless options[:skip_action_cable]
keep_file "app/controllers/concerns"
keep_file "app/models/concerns"
end
def bin
directory "bin" do |content|
"#{shebang}\n" + content
end
chmod "bin", 0755 & ~File.umask, verbose: false
end
def config
empty_directory "config"
inside "config" do
template "routes.rb"
template "application.rb"
template "environment.rb"
template "secrets.yml"
template "cable.yml" unless options[:skip_action_cable]
template "puma.rb" unless options[:skip_puma]
template "spring.rb" if spring_install?
directory "environments"
directory "initializers"
directory "locales"
end
end
def config_when_updating
cookie_serializer_config_exist = File.exist?("config/initializers/cookies_serializer.rb")
action_cable_config_exist = File.exist?("config/cable.yml")
rack_cors_config_exist = File.exist?("config/initializers/cors.rb")
config
gsub_file "config/environments/development.rb", /^(\s+)config\.file_watcher/, '\1# config.file_watcher'
unless cookie_serializer_config_exist
gsub_file "config/initializers/cookies_serializer.rb", /json(?!,)/, "marshal"
end
unless action_cable_config_exist
template "config/cable.yml"
end
unless rack_cors_config_exist
remove_file "config/initializers/cors.rb"
end
end
def database_yml
template "config/databases/#{options[:database]}.yml", "config/database.yml"
end
def db
directory "db"
end
def lib
empty_directory "lib"
empty_directory_with_keep_file "lib/tasks"
empty_directory_with_keep_file "lib/assets"
end
def log
empty_directory_with_keep_file "log"
end
def public_directory
directory "public", "public", recursive: false
end
def test
empty_directory_with_keep_file "test/fixtures"
empty_directory_with_keep_file "test/fixtures/files"
empty_directory_with_keep_file "test/controllers"
empty_directory_with_keep_file "test/mailers"
empty_directory_with_keep_file "test/models"
empty_directory_with_keep_file "test/helpers"
empty_directory_with_keep_file "test/integration"
template "test/test_helper.rb"
end
def tmp
empty_directory_with_keep_file "tmp"
empty_directory "tmp/cache"
empty_directory "tmp/cache/assets"
end
def vendor
vendor_javascripts
vendor_stylesheets
end
def vendor_javascripts
unless options[:skip_javascript]
empty_directory_with_keep_file "vendor/assets/javascripts"
end
end
def vendor_stylesheets
empty_directory_with_keep_file "vendor/assets/stylesheets"
end
end
module Generators
# We need to store the RAILS_DEV_PATH in a constant, otherwise the path
# can change in Ruby 1.8.7 when we FileUtils.cd.
RAILS_DEV_PATH = File.expand_path("../../../../../..", File.dirname(__FILE__))
RESERVED_NAMES = %w[application destroy plugin runner test]
class AppGenerator < AppBase # :nodoc:
add_shared_options_for "application"
# Add bin/rails options
class_option :version, type: :boolean, aliases: "-v", group: :rails,
desc: "Show Rails version number and quit"
class_option :api, type: :boolean,
desc: "Preconfigure smaller stack for API only apps"
def initialize(*args)
super
unless app_path
raise Error, "Application name should be provided in arguments. For details run: rails --help"
end
if !options[:skip_active_record] && !DATABASES.include?(options[:database])
raise Error, "Invalid value for --database option. Supported for preconfiguration are: #{DATABASES.join(", ")}."
end
# Force sprockets to be skipped when generating API only apps.
# Can't modify options hash as it's frozen by default.
self.options = options.merge(skip_sprockets: true, skip_javascript: true).freeze if options[:api]
end
public_task :set_default_accessors!
public_task :create_root
def create_root_files
build(:readme)
build(:rakefile)
build(:configru)
build(:gitignore) unless options[:skip_git]
build(:gemfile) unless options[:skip_gemfile]
end
def create_app_files
build(:app)
end
def create_bin_files
build(:bin)
end
def create_config_files
build(:config)
end
def update_config_files
build(:config_when_updating)
end
remove_task :update_config_files
def display_upgrade_guide_info
say "\nAfter this, check Rails upgrade guide at http://guides.rubyonrails.org/upgrading_ruby_on_rails.html for more details about upgrading your app."
end
remove_task :display_upgrade_guide_info
def create_boot_file
template "config/boot.rb"
end
def create_active_record_files
return if options[:skip_active_record]
build(:database_yml)
end
def create_db_files
return if options[:skip_active_record]
build(:db)
end
def create_lib_files
build(:lib)
end
def create_log_files
build(:log)
end
def create_public_files
build(:public_directory)
end
def create_test_files
build(:test) unless options[:skip_test]
end
def create_tmp_files
build(:tmp)
end
def create_vendor_files
build(:vendor)
end
def delete_app_assets_if_api_option
if options[:api]
remove_dir "app/assets"
remove_dir "lib/assets"
remove_dir "tmp/cache/assets"
remove_dir "vendor/assets"
end
end
def delete_app_helpers_if_api_option
if options[:api]
remove_dir "app/helpers"
remove_dir "test/helpers"
end
end
def delete_application_layout_file_if_api_option
if options[:api]
remove_file "app/views/layouts/application.html.erb"
end
end
def delete_public_files_if_api_option
if options[:api]
remove_file "public/404.html"
remove_file "public/422.html"
remove_file "public/500.html"
remove_file "public/apple-touch-icon-precomposed.png"
remove_file "public/apple-touch-icon.png"
remove_file "public/favicon.ico"
end
end
def delete_js_folder_skipping_javascript
if options[:skip_javascript]
remove_dir "app/assets/javascripts"
end
end
def delete_assets_initializer_skipping_sprockets
if options[:skip_sprockets]
remove_file "config/initializers/assets.rb"
end
end
def delete_application_record_skipping_active_record
if options[:skip_active_record]
remove_file "app/models/application_record.rb"
end
end
def delete_action_mailer_files_skipping_action_mailer
if options[:skip_action_mailer]
remove_file "app/mailers/application_mailer.rb"
remove_file "app/views/layouts/mailer.html.erb"
remove_file "app/views/layouts/mailer.text.erb"
remove_dir "app/mailers"
remove_dir "test/mailers"
end
end
def delete_action_cable_files_skipping_action_cable
if options[:skip_action_cable]
remove_file "config/cable.yml"
remove_file "app/assets/javascripts/cable.js"
remove_dir "app/channels"
end
end
def delete_non_api_initializers_if_api_option
if options[:api]
remove_file "config/initializers/cookies_serializer.rb"
end
end
def delete_api_initializers
unless options[:api]
remove_file "config/initializers/cors.rb"
end
end
def finish_template
build(:leftovers)
end
public_task :apply_rails_template, :run_bundle
public_task :generate_spring_binstubs
def run_after_bundle_callbacks
@after_bundle_callbacks.each(&:call)
end
protected
def self.banner
"rails new #{arguments.map(&:usage).join(' ')} [options]"
end
# Define file as an alias to create_file for backwards compatibility.
def file(*args, &block)
create_file(*args, &block)
end
def app_name
@app_name ||= (defined_app_const_base? ? defined_app_name : File.basename(destination_root)).tr('\\', "").tr(". ", "_")
end
def defined_app_name
defined_app_const_base.underscore
end
def defined_app_const_base
Rails.respond_to?(:application) && defined?(Rails::Application) &&
Rails.application.is_a?(Rails::Application) && Rails.application.class.name.sub(/::Application$/, "")
end
alias :defined_app_const_base? :defined_app_const_base
def app_const_base
@app_const_base ||= defined_app_const_base || app_name.gsub(/\W/, "_").squeeze("_").camelize
end
alias :camelized :app_const_base
def app_const
@app_const ||= "#{app_const_base}::Application"
end
def valid_const?
if app_const =~ /^\d/
raise Error, "Invalid application name #{app_name}. Please give a name which does not start with numbers."
elsif RESERVED_NAMES.include?(app_name)
raise Error, "Invalid application name #{app_name}. Please give a " \
"name which does not match one of the reserved rails " \
"words: #{RESERVED_NAMES.join(", ")}"
elsif Object.const_defined?(app_const_base)
raise Error, "Invalid application name #{app_name}, constant #{app_const_base} is already in use. Please choose another application name."
end
end
def app_secret
SecureRandom.hex(64)
end
def mysql_socket
@mysql_socket ||= [
"/tmp/mysql.sock", # default
"/var/run/mysqld/mysqld.sock", # debian/gentoo
"/var/tmp/mysql.sock", # freebsd
"/var/lib/mysql/mysql.sock", # fedora
"/opt/local/lib/mysql/mysql.sock", # fedora
"/opt/local/var/run/mysqld/mysqld.sock", # mac + darwinports + mysql
"/opt/local/var/run/mysql4/mysqld.sock", # mac + darwinports + mysql4
"/opt/local/var/run/mysql5/mysqld.sock", # mac + darwinports + mysql5
"/opt/lampp/var/mysql/mysql.sock" # xampp for linux
].find { |f| File.exist?(f) } unless RbConfig::CONFIG["host_os"] =~ /mswin|mingw/
end
def get_builder_class
defined?(::AppBuilder) ? ::AppBuilder : Rails::AppBuilder
end
end
# This class handles preparation of the arguments before the AppGenerator is
# called. The class provides version or help information if they were
# requested, and also constructs the railsrc file (used for extra configuration
# options).
#
# This class should be called before the AppGenerator is required and started
# since it configures and mutates ARGV correctly.
class ARGVScrubber # :nodoc:
def initialize(argv = ARGV)
@argv = argv
end
def prepare!
handle_version_request!(@argv.first)
handle_invalid_command!(@argv.first, @argv) do
handle_rails_rc!(@argv.drop(1))
end
end
def self.default_rc_file
File.expand_path("~/.railsrc")
end
private
def handle_version_request!(argument)
if ["--version", "-v"].include?(argument)
require "rails/version"
puts "Rails #{Rails::VERSION::STRING}"
exit(0)
end
end
def handle_invalid_command!(argument, argv)
if argument == "new"
yield
else
["--help"] + argv.drop(1)
end
end
def handle_rails_rc!(argv)
if argv.find { |arg| arg == "--no-rc" }
argv.reject { |arg| arg == "--no-rc" }
else
railsrc(argv) { |rc_argv, rc| insert_railsrc_into_argv!(rc_argv, rc) }
end
end
def railsrc(argv)
if (customrc = argv.index { |x| x.include?("--rc=") })
fname = File.expand_path(argv[customrc].gsub(/--rc=/, ""))
yield(argv.take(customrc) + argv.drop(customrc + 1), fname)
else
yield argv, self.class.default_rc_file
end
end
def read_rc_file(railsrc)
extra_args = File.readlines(railsrc).flat_map(&:split)
puts "Using #{extra_args.join(" ")} from #{railsrc}"
extra_args
end
def insert_railsrc_into_argv!(argv, railsrc)
return argv unless File.exist?(railsrc)
extra_args = read_rc_file railsrc
argv.take(1) + extra_args + argv.drop(1)
end
end
end
end
| 29.201581 | 158 | 0.631023 |
4aee433f7be58153eef645704564a6a5a312c341 | 1,271 | class SessionsController < ApplicationController
def new
if logged_in?
redirect_to users_path
end
end
def create
user = User.find_by(username: params[:username])
if user && user.authenticate(params[:password])
session[:user_id] = user.id
redirect_to users_path
else
flash[:message] = "Invalid Credentials Please try again."
render :new
end
end
def omniauth
user = User.find_or_create_by(uid: request.env['omniauth.auth'][:uid], provider: request.env['omniauth.auth'][:provider]) do |a|
a.first_name = request.env['omniauth.auth'][:info][:first_name]
a.last_name = request.env['omniauth.auth'][:info][:last_name]
a.username = request.env['omniauth.auth'][:info][:name]
a.email = request.env['omniauth.auth'][:info][:email]
a.password = SecureRandom.hex(12)
end
if user.valid?
session[:user_id] = user.id
redirect_to users_path
else
redirect_to root_path
end
end
def logout
session.delete :user_id
redirect_to root_path
end
end
| 28.886364 | 146 | 0.560976 |
ed04353290d3b0a2f06c076f95febe8aaf9b7c81 | 2,120 | # frozen_string_literal: true
module WahWah
module Flac
module StreaminfoBlock
STREAMINFO_BLOCK_SIZE = 34
# STREAMINFO block data structure:
#
# Length(bit) Meaning
#
# 16 The minimum block size (in samples) used in the stream.
#
# 16 The maximum block size (in samples) used in the stream.
# (Minimum blocksize == maximum blocksize) implies a fixed-blocksize stream.
#
# 24 The minimum frame size (in bytes) used in the stream.
# May be 0 to imply the value is not known.
#
# 24 The maximum frame size (in bytes) used in the stream.
# May be 0 to imply the value is not known.
#
# 20 Sample rate in Hz. Though 20 bits are available,
# the maximum sample rate is limited by the structure of frame headers to 655350Hz.
# Also, a value of 0 is invalid.
#
# 3 (number of channels)-1. FLAC supports from 1 to 8 channels
#
# 5 (bits per sample)-1. FLAC supports from 4 to 32 bits per sample.
# Currently the reference encoder and decoders only support up to 24 bits per sample.
#
# 36 Total samples in stream. 'Samples' means inter-channel sample,
# i.e. one second of 44.1Khz audio will have 44100 samples regardless of the number of channels.
# A value of zero here means the number of total samples is unknown.
#
# 128 MD5 signature of the unencoded audio data.
def parse_streaminfo_block(block_data)
return unless block_data.size == STREAMINFO_BLOCK_SIZE
info_bits = block_data.unpack('x10B64').first
@sample_rate = info_bits[0..19].to_i(2)
@bit_depth = info_bits[23..27].to_i(2) + 1
total_samples = info_bits[28..-1].to_i(2)
@duration = (total_samples.to_f / @sample_rate).round if @sample_rate > 0
@bitrate = @sample_rate * @bit_depth / 1000
end
end
end
end
| 40.769231 | 115 | 0.587736 |
acc786d2e942c035a1b81d42f4b0234be898b4ec | 592 | module E20
module Ops
module Middleware
class HostnameMiddleware
def initialize(app, options = {})
@app = app
@options = options
@hostname = options[:hostname] || Hostname.new
end
def call(env)
status, headers, body = @app.call(env)
headers["X-Served-By"] = "#{@hostname.to_s} (#{Process.pid})"
if (logger = @options[:logger])
logger.info "[#{self.class.name}] Running on: #{@hostname}"
end
[status, headers, body]
end
end
end
end
end
| 23.68 | 71 | 0.52027 |
1af35b8bc6493ca2f7b4a5718d6e0399986bd062 | 782 | require_relative 'boot'
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
#config.generators do |g|
# g.assets false
#end
#config.assets.paths << Rails.root.join("lib", "videoplayer", "flash")
module Fashion
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
config.load_defaults 6.0
# Settings in config/environments/* take precedence over those specified here.
# Application configuration can go into files in config/initializers
# -- all .rb files in that directory are automatically loaded after loading
# the framework and any gems in your application.
end
end
| 27.928571 | 82 | 0.749361 |
bf1769ae5eeb7b13f5f7dd6a3ae221c02a9ba1de | 174 | module Pod
module Downloader
# @return [String] Downloader’s version, following
# [semver](http://semver.org).
#
VERSION = '1.3.0'.freeze
end
end
| 19.333333 | 54 | 0.603448 |
bfd55fb537e45d28e88caea3e0613bc5de6a9d81 | 693 | class PagesController < ApplicationController
before_action :authenticate_user!
before_action :set_page, only: [:show, :edit, :update, :destroy]
def index
@pages = Page.all
respond_with(@pages)
end
def show
respond_with(@page)
end
def new
@page = Page.new
respond_with(@page)
end
def edit
end
def create
@page = Page.new(page_params)
@page.save
respond_with(@page)
end
def update
@page.update(page_params)
respond_with(@page)
end
def destroy
@page.destroy
respond_with(@page)
end
private
def set_page
@page = Page.find(params[:id])
end
def page_params
params[:page]
end
end
| 14.744681 | 66 | 0.645022 |
bb8ffb0dca749880dba90afa1c6178c79419ce4a | 649 | require 'spec_helper'
module SoftwareVersion
describe Version do
context "Sort file test" do
before(:all) do
@version_array = fixture("windows_application_version_sort.txt").split("\n")
end
@version_array = fixture("windows_application_version_sort.txt").split("\n")
@version_array.each_index do |k|
it "compare #{@version_array[k]} < #{@version_array[k+1]}" do
next if @version_array[k+1].nil? || @version_array[k+1] == ""
a = Version.new(@version_array[k])
b = Version.new(@version_array[k+1])
expect(a < b).to be true
end
end
end
end
end
| 29.5 | 84 | 0.616333 |
ac83682a1bbd62d5088b93f99b4e62e20877298b | 254 | require "seeds_helper"
def seed(file)
load Rails.root.join("db", "seeds", "#{file}.rb")
puts "Seeded #{file}"
end
puts "Seeding #{Rails.env} database..."
seed "developers"
seed "businesses"
seed "admins"
seed "conversations"
puts "Seeded database"
| 18.142857 | 51 | 0.69685 |
61ceff5852fc92cacb60e3c245b72fe7f83aa6d4 | 1,708 | #!/usr/bin/env ruby
# (c) 2011 Rdio Inc
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
require 'rubygems'
$LOAD_PATH << '../'
require 'rdio'
require './rdio_consumer_credentials'
# create an instance of the Rdio object with our consumer credentials
rdio = Rdio.new([RDIO_CONSUMER_KEY, RDIO_CONSUMER_SECRET])
# authenticate against the Rdio service
url = rdio.begin_authentication('oob')
puts 'Go to: ' + url
print 'Then enter the code: '
verifier = gets.strip
rdio.complete_authentication(verifier)
# find out what playlists you created
playlists = rdio.call('getPlaylists')['result']['owned']
# list them
playlists.each { |playlist| puts "%s\t%s" % [playlist['shortUrl'], playlist['name']] }
| 39.72093 | 86 | 0.763466 |
acb1ee13885c8d13f7a65824b9285cc0c4ca2b11 | 1,079 | module TestHelpers
module ParamsFixtures
def params_fixture
DefaultConfiguration.get.merge({
databases: %w(postgresql mysql redis),
background_jobs: %w(delayed_job sidekiq),
server_type: 'nginx_unicorn',
vm_name: 'testapp',
vm_os: 'ubuntu/trusty64',
server_name: 'localhost',
ruby_install: 'rvm',
ruby_version: '2.1.2',
rails_version: '4',
postgresql_db_name: 'testapp',
postgresql_db_user: 'vagrant',
postgresql_db_password: 'vagrant',
postgresql_orm: 'activerecord',
mysql_db_name: 'testapp',
mysql_db_user: 'vagrant',
mysql_db_password: 'vagrant',
mysql_orm: 'activerecord',
mongodb_db_name: 'testapp',
mongodb_orm: 'mongoid',
redis_orm: 'redis-rb',
delayed_job_app_name: 'testapp-delayed_job',
delayed_job_command: 'bin/delayed_job run',
sidekiq_app_name: 'testapp-sidekiq',
sidekiq_command: 'sidekiq' })
end
end
end
| 33.71875 | 54 | 0.600556 |
ffc3d65bd397547c5b2f5e783c473ddd5a252a15 | 155 | class CreateImports < ActiveRecord::Migration
def change
create_table :imports do |t|
t.integer :imports
t.timestamps
end
end
end
| 15.5 | 45 | 0.677419 |
333180e1c083916f1200cb4472c3e2c55b60aadc | 959 | %w( formatter
railtie
log_subscribers/base
log_subscribers/action_controller
log_subscribers/action_dispatch
log_subscribers/action_mailer
log_subscribers/active_job
middleware/request_logger
middleware/routing_error_catcher
extensions/action_controller_helper
extensions/active_support_subscriber
).each do |fn|
require_relative "log_sanity/#{fn}"
end
ActionController::Base.include LogSanity::Extensions::ActionControllerHelper
ActionController::API.include LogSanity::Extensions::ActionControllerHelper if defined?(ActionController::API)
if Rails.version < '6'
ActiveSupport::Subscriber.include LogSanity::Extensions::ActiveSupportSubscriber
end
module LogSanity
module_function
def fields
Thread.current[:logsanity_fields] || reset_fields
end
def reset_fields
Thread.current[:logsanity_fields] = {}.with_indifferent_access
end
def log(key, val)
fields[key.to_s] = val
end
end
| 25.918919 | 111 | 0.786236 |
f7202140906d188d76a7127ac817ba03cd6bb73e | 1,480 |
Pod::Spec.new do |s|
# ――― Spec Metadata ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
s.name = "XSLPhotoBrowser"
s.version = "0.3.0"
s.swift_version = "4.0"
s.summary = " swift版本的图片浏览器"
# This description is used to generate tags and improve search results.
s.description = <<-DESC
图片浏览器——打开本地和网络图片,依赖Kingfisher,包含fade和Zoom动画。
DESC
s.homepage = "https://github.com/peicode/XSLPhotoBrowser"
# s.screenshots = "www.example.com/screenshots_1.gif", "www.example.com/screenshots_2.gif"
# ――― Spec License ――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
s.license = "MIT"
# s.license = { :type => "MIT", :file => "FILE_LICENSE" }
# ――― Author Metadata ――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
s.author = { "Pei丶Code" => "[email protected]" }
# ――― Platform Specifics ――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
# s.platform = :ios
s.platform = :ios, "9.0"
# ――― Source Location ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
s.source = { :git => "https://github.com/peicode/XSLPhotoBrowser.git", :tag => "#{s.version}" }
# ――― Source Code ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
s.source_files = "XSLPhotoBrowser/Classes/**/*.swift"
s.static_framework = true
s.dependency "SDWebImage", '~> 4.4.6'
s.dependency "Hero"
end
| 31.489362 | 103 | 0.440541 |
d507557df6c8124f9a6ad199be3e117dbf005627 | 1,389 | # frozen_string_literal: true
# Derive GraphQL types by convention, linking an {ApplicationRecord}
# subclass to its corresponding {Types::BaseObject} subclass.
module DerivedGraphqlTypes
extend ActiveSupport::Concern
included do
delegate :graphql_node_type, :graphql_node_type_name,
:graphql_connection_type, :graphql_edge_type,
to: :class
end
class_methods do
# The connection type to use for this model, derived
# by default from {.graphql_node_type}.
#
# @return [Class, nil]
def graphql_connection_type
graphql_node_type&.connection_type
end
# The edge type to use for this model, derived
# by default from {.graphql_node_type}.
#
# @return [Class, nil]
def graphql_edge_type
graphql_node_type&.edge_type
end
# The corresponding object type for this model in the GraphQL API.
#
# Derived from {.graphql_node_type_name}.
#
# Used to possibly derive {.graphql_connection_type} and {.graphql_edge_type}.
#
# @api private
# @return [Class, nil]
def graphql_node_type
@graphql_node_type ||= graphql_node_type_name.safe_constantize
end
# Overridable type used to derive {.graphql_node_type}.
#
# @api private
# @return [String]
def graphql_node_type_name
@graphql_node_type_name ||= "Types::#{model_name}Type"
end
end
end
| 26.711538 | 82 | 0.697624 |
211acb6b33dc8ce6d3fad51d0f3e5e475d429873 | 160 | # frozen_string_literal: true
class Mouse; end
module Acme
module Mouse
module AddSqueak
def squeak
'Squeak!'
end
end
end
end
| 11.428571 | 29 | 0.63125 |
5d8aaf141563b3a5aa1b551a249f7666753e0c54 | 4,491 | class ErlangAT17 < Formula
desc "Programming language for highly scalable real-time systems"
homepage "https://www.erlang.org/"
url "https://github.com/erlang/otp/archive/OTP-17.5.6.9.tar.gz"
sha256 "70d9d0a08969f4c51c78088f8c6b7da22a4806b1fd258a9fff1408f56553f378"
bottle do
cellar :any
# sha256 "c04320e6c42fe2116cd0bd52cfd8d4b4c9de9af874b59a91455184c93935576d" => :mojave
sha256 "de3143035b8e4861f90f3cdd2e6a518d97bc17f7b1087948026b99bc36b781fe" => :high_sierra
sha256 "819a566e39049cb521e3a26f39746258d333acd4ce9bc91eff2dc8969905f2fc" => :sierra
sha256 "e4faf6f98903c5dd7fa4894f7a61f722101119572f6d32ab9000fa47332f148d" => :el_capitan
sha256 "ab5c9f75b67c92c103a7712104edf8dd4f6edb52eda6d0312b50bde0e1f83780" => :yosemite
end
keg_only :versioned_formula
option "without-hipe", "Disable building hipe; fails on various macOS systems"
option "with-native-libs", "Enable native library compilation"
option "with-dirty-schedulers", "Enable experimental dirty schedulers"
option "without-docs", "Do not install documentation"
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
depends_on "openssl"
depends_on "unixodbc" if MacOS.version >= :mavericks
depends_on "wxmac" => :recommended # for GUI apps like observer
depends_on "fop" => :optional # enables building PDF docs
resource "man" do
url "https://www.erlang.org/download/otp_doc_man_17.5.tar.gz"
sha256 "85b1b2a1011fc01af550f1fe9e5a599a4c5f2a35d264d2804af1d05590a857c3"
end
resource "html" do
url "https://www.erlang.org/download/otp_doc_html_17.5.tar.gz"
sha256 "baba1d373c1faacf4a1a6ec1220d57d0cb2b977edb74f32cd58dc786361c6cf5"
end
# Erlang will crash on macOS 10.13 any time the crypto lib is used.
# The Erlang team has an open PR for the patch but it needs to be applied to
# older releases. See https://github.com/erlang/otp/pull/1501 and
# https://bugs.erlang.org/browse/ERL-439 for additional information.
patch do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/8cf3045/erlang%4017/boring-ssl-high-sierra.patch"
sha256 "ec4bbdabdfece3a273210727bc150e0e588479885a141382b4d54221bbec5fc3"
end
# Pointer comparison triggers error with Xcode 9
if DevelopmentTools.clang_build_version >= 900
patch do
url "https://github.com/erlang/otp/commit/a64c4d806fa54848c35632114585ad82b98712e8.diff?full_index=1"
sha256 "3261400f8d7f0dcff3a52821daea3391ebfa01fd859f9f2d9cc5142138e26e15"
end
end
def install
# Unset these so that building wx, kernel, compiler and
# other modules doesn't fail with an unintelligable error.
%w[LIBS FLAGS AFLAGS ZFLAGS].each { |k| ENV.delete("ERL_#{k}") }
ENV["FOP"] = "#{HOMEBREW_PREFIX}/bin/fop" if build.with? "fop"
# Do this if building from a checkout to generate configure
system "./otp_build", "autoconf" if File.exist? "otp_build"
args = %W[
--disable-debug
--disable-silent-rules
--prefix=#{prefix}
--enable-kernel-poll
--enable-threads
--enable-sctp
--enable-dynamic-ssl-lib
--with-ssl=#{Formula["openssl"].opt_prefix}
--enable-shared-zlib
--enable-smp-support
]
args << "--enable-darwin-64bit" if MacOS.prefer_64_bit?
args << "--enable-native-libs" if build.with? "native-libs"
args << "--enable-dirty-schedulers" if build.with? "dirty-schedulers"
args << "--enable-wx" if build.with? "wxmac"
args << "--with-dynamic-trace=dtrace" if MacOS::CLT.installed?
if build.without? "hipe"
# HIPE doesn't strike me as that reliable on macOS
# https://syntatic.wordpress.com/2008/06/12/macports-erlang-bus-error-due-to-mac-os-x-1053-update/
# https://www.erlang.org/pipermail/erlang-patches/2008-September/000293.html
args << "--disable-hipe"
else
args << "--enable-hipe"
end
system "./configure", *args
system "make"
ENV.deparallelize # Install is not thread-safe; can try to create folder twice and fail
system "make", "install"
if build.with? "docs"
(lib/"erlang").install resource("man").files("man")
doc.install resource("html")
end
end
def caveats; <<~EOS
Man pages can be found in:
#{opt_lib}/erlang/man
Access them with `erl -man`, or add this directory to MANPATH.
EOS
end
test do
system "#{bin}/erl", "-noshell", "-eval", "crypto:start().", "-s", "init", "stop"
end
end
| 37.739496 | 117 | 0.716767 |
87d1b1f16177ab55395ea18696911c0508c1612d | 1,022 | class SellerMailer < ApplicationMailer
def send_lot_closed_email(lot)
@user = lot.user
@lot = lot
@url = ENV['front_app_url'] + '/lots/' + @lot.id.to_s
mail(to: @user.email, subject: 'Your lot ' + @lot.title + ' was successfully sold')
end
def send_lot_closed_by_timeout_email(lot)
@user = lot.user
@lot = lot
@url = ENV['front_app_url'] + '/lots/' + @lot.id.to_s
mail(to: @user.email, subject: 'Your lot ' + @lot.title + ' wasnt sold - sold time is out')
end
def order_create_email(order)
@order = order
@lot = order.lot
@user = order.lot.user
@url = ENV["front_app_url"] + '/lots/' + @lot.id.to_s
mail(to: @user.email, subject: "Order was created on Your lot " + @lot.title)
end
def order_delivered_email(order)
@order = order
@lot = order.lot
@user = order.lot.user
@url = ENV["front_app_url"] + '/lots/' + @lot.id.to_s
mail(to: @user.email, subject: "Order on Your lot " + @lot.title + " was successfully delivered")
end
end
| 30.058824 | 101 | 0.62818 |
1cc946b26af5f214d1cabcef1f7173607753c6a2 | 3,769 | # -*- encoding: utf-8 -*-
# stub: bootstrap-sass 3.4.1 ruby lib
Gem::Specification.new do |s|
s.name = "bootstrap-sass".freeze
s.version = "3.4.1"
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib".freeze]
s.authors = ["Thomas McDonald".freeze]
s.date = "2019-02-13"
s.email = "[email protected]".freeze
s.homepage = "https://github.com/twbs/bootstrap-sass".freeze
s.licenses = ["MIT".freeze]
s.rubygems_version = "2.7.6.2".freeze
s.summary = "bootstrap-sass is a Sass-powered version of Bootstrap 3, ready to drop right into your Sass powered applications.".freeze
s.installed_by_version = "2.7.6.2" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<sassc>.freeze, [">= 2.0.0"])
s.add_runtime_dependency(%q<autoprefixer-rails>.freeze, [">= 5.2.1"])
s.add_development_dependency(%q<minitest>.freeze, ["~> 5.11"])
s.add_development_dependency(%q<minitest-reporters>.freeze, ["~> 1.3"])
s.add_development_dependency(%q<capybara>.freeze, ["~> 3.6"])
s.add_development_dependency(%q<poltergeist>.freeze, [">= 0"])
s.add_development_dependency(%q<sassc-rails>.freeze, [">= 2.0.0"])
s.add_development_dependency(%q<actionpack>.freeze, [">= 4.1.5"])
s.add_development_dependency(%q<activesupport>.freeze, [">= 4.1.5"])
s.add_development_dependency(%q<json>.freeze, [">= 1.8.1"])
s.add_development_dependency(%q<sprockets-rails>.freeze, [">= 2.1.3"])
s.add_development_dependency(%q<jquery-rails>.freeze, [">= 3.1.0"])
s.add_development_dependency(%q<slim-rails>.freeze, [">= 0"])
s.add_development_dependency(%q<uglifier>.freeze, [">= 0"])
s.add_development_dependency(%q<term-ansicolor>.freeze, [">= 0"])
else
s.add_dependency(%q<sassc>.freeze, [">= 2.0.0"])
s.add_dependency(%q<autoprefixer-rails>.freeze, [">= 5.2.1"])
s.add_dependency(%q<minitest>.freeze, ["~> 5.11"])
s.add_dependency(%q<minitest-reporters>.freeze, ["~> 1.3"])
s.add_dependency(%q<capybara>.freeze, ["~> 3.6"])
s.add_dependency(%q<poltergeist>.freeze, [">= 0"])
s.add_dependency(%q<sassc-rails>.freeze, [">= 2.0.0"])
s.add_dependency(%q<actionpack>.freeze, [">= 4.1.5"])
s.add_dependency(%q<activesupport>.freeze, [">= 4.1.5"])
s.add_dependency(%q<json>.freeze, [">= 1.8.1"])
s.add_dependency(%q<sprockets-rails>.freeze, [">= 2.1.3"])
s.add_dependency(%q<jquery-rails>.freeze, [">= 3.1.0"])
s.add_dependency(%q<slim-rails>.freeze, [">= 0"])
s.add_dependency(%q<uglifier>.freeze, [">= 0"])
s.add_dependency(%q<term-ansicolor>.freeze, [">= 0"])
end
else
s.add_dependency(%q<sassc>.freeze, [">= 2.0.0"])
s.add_dependency(%q<autoprefixer-rails>.freeze, [">= 5.2.1"])
s.add_dependency(%q<minitest>.freeze, ["~> 5.11"])
s.add_dependency(%q<minitest-reporters>.freeze, ["~> 1.3"])
s.add_dependency(%q<capybara>.freeze, ["~> 3.6"])
s.add_dependency(%q<poltergeist>.freeze, [">= 0"])
s.add_dependency(%q<sassc-rails>.freeze, [">= 2.0.0"])
s.add_dependency(%q<actionpack>.freeze, [">= 4.1.5"])
s.add_dependency(%q<activesupport>.freeze, [">= 4.1.5"])
s.add_dependency(%q<json>.freeze, [">= 1.8.1"])
s.add_dependency(%q<sprockets-rails>.freeze, [">= 2.1.3"])
s.add_dependency(%q<jquery-rails>.freeze, [">= 3.1.0"])
s.add_dependency(%q<slim-rails>.freeze, [">= 0"])
s.add_dependency(%q<uglifier>.freeze, [">= 0"])
s.add_dependency(%q<term-ansicolor>.freeze, [">= 0"])
end
end
| 50.932432 | 136 | 0.636774 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.