hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
6aaf6d510c92315b3ce16f67062e61ab3d074e3d | 16,252 | # frozen_string_literal: true
# A blob is a record that contains the metadata about a file and a key for where that file resides on the service.
# Blobs can be created in two ways:
#
# 1. Ahead of the file being uploaded server-side to the service, via <tt>create_and_upload!</tt>. A rewindable
# <tt>io</tt> with the file contents must be available at the server for this operation.
# 2. Ahead of the file being directly uploaded client-side to the service, via <tt>create_before_direct_upload!</tt>.
#
# The first option doesn't require any client-side JavaScript integration, and can be used by any other back-end
# service that deals with files. The second option is faster, since you're not using your own server as a staging
# point for uploads, and can work with deployments like Heroku that do not provide large amounts of disk space.
#
# Blobs are intended to be immutable in as-so-far as their reference to a specific file goes. You're allowed to
# update a blob's metadata on a subsequent pass, but you should not update the key or change the uploaded file.
# If you need to create a derivative or otherwise change the blob, simply create a new blob and purge the old one.
class ActiveStorage::Blob < ActiveStorage::Record
# We use constant paths in the following include calls to avoid a gotcha of
# classic mode: If the parent application defines a top-level Analyzable, for
# example, and ActiveStorage::Blob::Analyzable is not yet loaded, a bare
#
# include Analyzable
#
# would resolve to the top-level one, const_missing would not be triggered,
# and therefore ActiveStorage::Blob::Analyzable would not be autoloaded.
#
# By using qualified names, we ensure const_missing is invoked if needed.
# Please, note that Ruby 2.5 or newer is required, so Object is not checked
# when looking up the ancestors of ActiveStorage::Blob.
#
# Zeitwerk mode does not have this gotcha. If we ever drop classic mode, this
# can be simplified, bare constant names would just work.
include ActiveStorage::Blob::Analyzable
include ActiveStorage::Blob::Identifiable
include ActiveStorage::Blob::Representable
self.table_name = "active_storage_blobs"
MINIMUM_TOKEN_LENGTH = 28
has_secure_token :key, length: MINIMUM_TOKEN_LENGTH
store :metadata, accessors: [ :analyzed, :identified ], coder: ActiveRecord::Coders::JSON
class_attribute :services, default: {}
class_attribute :service, instance_accessor: false
has_many :attachments
scope :unattached, -> { where.missing(:attachments) }
after_initialize do
self.service_name ||= self.class.service&.name
end
after_update_commit :update_service_metadata, if: :content_type_previously_changed?
before_destroy(prepend: true) do
raise ActiveRecord::InvalidForeignKey if attachments.exists?
end
validates :service_name, presence: true
validate do
if service_name_changed? && service_name.present?
services.fetch(service_name) do
errors.add(:service_name, :invalid)
end
end
end
class << self
# You can use the signed ID of a blob to refer to it on the client side without fear of tampering.
# This is particularly helpful for direct uploads where the client-side needs to refer to the blob
# that was created ahead of the upload itself on form submission.
#
# The signed ID is also used to create stable URLs for the blob through the BlobsController.
def find_signed(id, record: nil, purpose: :blob_id)
super(id, purpose: purpose)
end
# Works like +find_signed+, but will raise an +ActiveSupport::MessageVerifier::InvalidSignature+
# exception if the +signed_id+ has either expired, has a purpose mismatch, is for another record,
# or has been tampered with. It will also raise an +ActiveRecord::RecordNotFound+ exception if
# the valid signed id can't find a record.
def find_signed!(id, record: nil, purpose: :blob_id)
super(id, purpose: purpose)
end
def build_after_upload(io:, filename:, content_type: nil, metadata: nil, service_name: nil, identify: true, record: nil) #:nodoc:
new(filename: filename, content_type: content_type, metadata: metadata, service_name: service_name).tap do |blob|
blob.upload(io, identify: identify)
end
end
deprecate :build_after_upload
def build_after_unfurling(key: nil, io:, filename:, content_type: nil, metadata: nil, service_name: nil, identify: true, record: nil) #:nodoc:
new(key: key, filename: filename, content_type: content_type, metadata: metadata, service_name: service_name).tap do |blob|
blob.unfurl(io, identify: identify)
end
end
def create_after_unfurling!(key: nil, io:, filename:, content_type: nil, metadata: nil, service_name: nil, identify: true, record: nil) #:nodoc:
build_after_unfurling(key: key, io: io, filename: filename, content_type: content_type, metadata: metadata, service_name: service_name, identify: identify).tap(&:save!)
end
# Creates a new blob instance and then uploads the contents of
# the given <tt>io</tt> to the service. The blob instance is going to
# be saved before the upload begins to prevent the upload clobbering another due to key collisions.
# When providing a content type, pass <tt>identify: false</tt> to bypass
# automatic content type inference.
def create_and_upload!(key: nil, io:, filename:, content_type: nil, metadata: nil, service_name: nil, identify: true, record: nil)
create_after_unfurling!(key: key, io: io, filename: filename, content_type: content_type, metadata: metadata, service_name: service_name, identify: identify).tap do |blob|
blob.upload_without_unfurling(io)
end
end
alias_method :create_after_upload!, :create_and_upload!
deprecate create_after_upload!: :create_and_upload!
# Returns a saved blob _without_ uploading a file to the service. This blob will point to a key where there is
# no file yet. It's intended to be used together with a client-side upload, which will first create the blob
# in order to produce the signed URL for uploading. This signed URL points to the key generated by the blob.
# Once the form using the direct upload is submitted, the blob can be associated with the right record using
# the signed ID.
def create_before_direct_upload!(key: nil, filename:, byte_size:, checksum:, content_type: nil, metadata: nil, service_name: nil, record: nil)
create! key: key, filename: filename, byte_size: byte_size, checksum: checksum, content_type: content_type, metadata: metadata, service_name: service_name
end
# To prevent problems with case-insensitive filesystems, especially in combination
# with databases which treat indices as case-sensitive, all blob keys generated are going
# to only contain the base-36 character alphabet and will therefore be lowercase. To maintain
# the same or higher amount of entropy as in the base-58 encoding used by `has_secure_token`
# the number of bytes used is increased to 28 from the standard 24
def generate_unique_secure_token(length: MINIMUM_TOKEN_LENGTH)
SecureRandom.base36(length)
end
# Customize signed ID purposes for backwards compatibility.
def combine_signed_id_purposes(purpose) #:nodoc:
purpose.to_s
end
# Customize the default signed ID verifier for backwards compatibility.
#
# We override the reader (.signed_id_verifier) instead of just calling the writer (.signed_id_verifier=)
# to guard against the case where ActiveStorage.verifier isn't yet initialized at load time.
def signed_id_verifier #:nodoc:
@signed_id_verifier ||= ActiveStorage.verifier
end
end
# Returns a signed ID for this blob that's suitable for reference on the client-side without fear of tampering.
def signed_id(purpose: :blob_id, expires_in: nil)
super
end
# Returns the key pointing to the file on the service that's associated with this blob. The key is the
# secure-token format from Rails in lower case. So it'll look like: xtapjjcjiudrlk3tmwyjgpuobabd.
# This key is not intended to be revealed directly to the user.
# Always refer to blobs using the signed_id or a verified form of the key.
def key
# We can't wait until the record is first saved to have a key for it
self[:key] ||= self.class.generate_unique_secure_token(length: MINIMUM_TOKEN_LENGTH)
end
# Returns an ActiveStorage::Filename instance of the filename that can be
# queried for basename, extension, and a sanitized version of the filename
# that's safe to use in URLs.
def filename
ActiveStorage::Filename.new(self[:filename])
end
# Returns true if the content_type of this blob is in the image range, like image/png.
def image?
content_type.start_with?("image")
end
# Returns true if the content_type of this blob is in the audio range, like audio/mpeg.
def audio?
content_type.start_with?("audio")
end
# Returns true if the content_type of this blob is in the video range, like video/mp4.
def video?
content_type.start_with?("video")
end
# Returns true if the content_type of this blob is in the text range, like text/plain.
def text?
content_type.start_with?("text")
end
# Returns the URL of the blob on the service. This returns a permanent URL for public files, and returns a
# short-lived URL for private files. Private files are signed, and not for public use. Instead,
# the URL should only be exposed as a redirect from a stable, possibly authenticated URL. Hiding the
# URL behind a redirect also allows you to change services without updating all URLs.
def url(expires_in: ActiveStorage.service_urls_expire_in, disposition: :inline, filename: nil, **options)
service.url key, expires_in: expires_in, filename: ActiveStorage::Filename.wrap(filename || self.filename),
content_type: content_type_for_serving, disposition: forced_disposition_for_serving || disposition, **options
end
alias_method :service_url, :url
deprecate service_url: :url
# Returns a URL that can be used to directly upload a file for this blob on the service. This URL is intended to be
# short-lived for security and only generated on-demand by the client-side JavaScript responsible for doing the uploading.
def service_url_for_direct_upload(expires_in: ActiveStorage.service_urls_expire_in)
service.url_for_direct_upload key, expires_in: expires_in, content_type: content_type, content_length: byte_size, checksum: checksum
end
# Returns a Hash of headers for +service_url_for_direct_upload+ requests.
def service_headers_for_direct_upload
service.headers_for_direct_upload key, filename: filename, content_type: content_type, content_length: byte_size, checksum: checksum
end
def content_type_for_serving #:nodoc:
forcibly_serve_as_binary? ? ActiveStorage.binary_content_type : content_type
end
def forced_disposition_for_serving #:nodoc:
if forcibly_serve_as_binary? || !allowed_inline?
:attachment
end
end
# Uploads the +io+ to the service on the +key+ for this blob. Blobs are intended to be immutable, so you shouldn't be
# using this method after a file has already been uploaded to fit with a blob. If you want to create a derivative blob,
# you should instead simply create a new blob based on the old one.
#
# Prior to uploading, we compute the checksum, which is sent to the service for transit integrity validation. If the
# checksum does not match what the service receives, an exception will be raised. We also measure the size of the +io+
# and store that in +byte_size+ on the blob record. The content type is automatically extracted from the +io+ unless
# you specify a +content_type+ and pass +identify+ as false.
#
# Normally, you do not have to call this method directly at all. Use the +create_and_upload!+ class method instead.
# If you do use this method directly, make sure you are using it on a persisted Blob as otherwise another blob's
# data might get overwritten on the service.
def upload(io, identify: true)
unfurl io, identify: identify
upload_without_unfurling io
end
def unfurl(io, identify: true) #:nodoc:
self.checksum = compute_checksum_in_chunks(io)
self.content_type = extract_content_type(io) if content_type.nil? || identify
self.byte_size = io.size
self.identified = true
end
def upload_without_unfurling(io) #:nodoc:
service.upload key, io, checksum: checksum, **service_metadata
end
# Downloads the file associated with this blob. If no block is given, the entire file is read into memory and returned.
# That'll use a lot of RAM for very large files. If a block is given, then the download is streamed and yielded in chunks.
def download(&block)
service.download key, &block
end
# Downloads the blob to a tempfile on disk. Yields the tempfile.
#
# The tempfile's name is prefixed with +ActiveStorage-+ and the blob's ID. Its extension matches that of the blob.
#
# By default, the tempfile is created in <tt>Dir.tmpdir</tt>. Pass +tmpdir:+ to create it in a different directory:
#
# blob.open(tmpdir: "/path/to/tmp") do |file|
# # ...
# end
#
# The tempfile is automatically closed and unlinked after the given block is executed.
#
# Raises ActiveStorage::IntegrityError if the downloaded data does not match the blob's checksum.
def open(tmpdir: nil, &block)
service.open key, checksum: checksum,
name: [ "ActiveStorage-#{id}-", filename.extension_with_delimiter ], tmpdir: tmpdir, &block
end
def mirror_later #:nodoc:
ActiveStorage::MirrorJob.perform_later(key, checksum: checksum) if service.respond_to?(:mirror)
end
# Deletes the files on the service associated with the blob. This should only be done if the blob is going to be
# deleted as well or you will essentially have a dead reference. It's recommended to use #purge and #purge_later
# methods in most circumstances.
def delete
service.delete(key)
service.delete_prefixed("variants/#{key}/") if image?
end
# Destroys the blob record and then deletes the file on the service. This is the recommended way to dispose of unwanted
# blobs. Note, though, that deleting the file off the service will initiate an HTTP connection to the service, which may
# be slow or prevented, so you should not use this method inside a transaction or in callbacks. Use #purge_later instead.
def purge
destroy
delete if previously_persisted?
rescue ActiveRecord::InvalidForeignKey
end
# Enqueues an ActiveStorage::PurgeJob to call #purge. This is the recommended way to purge blobs from a transaction,
# an Active Record callback, or in any other real-time scenario.
def purge_later
ActiveStorage::PurgeJob.perform_later(self)
end
# Returns an instance of service, which can be configured globally or per attachment
def service
services.fetch(service_name)
end
private
def compute_checksum_in_chunks(io)
Digest::MD5.new.tap do |checksum|
while chunk = io.read(5.megabytes)
checksum << chunk
end
io.rewind
end.base64digest
end
def extract_content_type(io)
Marcel::MimeType.for io, name: filename.to_s, declared_type: content_type
end
def forcibly_serve_as_binary?
ActiveStorage.content_types_to_serve_as_binary.include?(content_type)
end
def allowed_inline?
ActiveStorage.content_types_allowed_inline.include?(content_type)
end
def web_image?
ActiveStorage.web_image_content_types.include?(content_type)
end
def service_metadata
if forcibly_serve_as_binary?
{ content_type: ActiveStorage.binary_content_type, disposition: :attachment, filename: filename }
elsif !allowed_inline?
{ content_type: content_type, disposition: :attachment, filename: filename }
else
{ content_type: content_type }
end
end
def update_service_metadata
service.update_metadata key, **service_metadata if service_metadata.any?
end
end
ActiveSupport.run_load_hooks :active_storage_blob, ActiveStorage::Blob
| 45.780282 | 177 | 0.743539 |
1c950b455227b42fd6e0d2ab236a3b7031dfbe88 | 2,668 | require 'rails_helper'
RSpec.describe NfgUi::UI::Base do
let(:view_context) { action_controller_base_view_context }
let(:action_controller_base_view_context) { ActionController::Base.new.view_context }
let(:base_component) { NfgUi::UI::Base.new(view_context) }
subject { base_component }
pending 'specs needed'
describe '#initialize' do
subject { base_component.view_context }
it { is_expected.to eq action_controller_base_view_context }
end
describe 'Desired component suite creation methods' do
subject { base_component }
it 'responds to #bootstrap method' do
expect(subject).to respond_to :bootstrap
end
it 'responds to #nfg method' do
expect(subject).to respond_to :nfg
end
end
describe '#bootstrap' do
subject { base_component.bootstrap(*component_args) }
let(:body) { 'test' }
let(:component_args) { [:button, [], { body: body}] }
it 'renders a bootstrap component' do
expect(subject).to eq "<a class=\"btn btn-primary\" href=\"#\">#{body}</a>"
end
end
describe '#nfg' do
shared_examples 'a rendered nfg button component' do
it 'renders an nfg button component' do
expect(subject).to eq "<a class=\"btn btn-primary\" href=\"#\">#{body}</a>"
end
end
subject { base_component.nfg(*component_args) }
let(:body) { 'test' }
let(:options) {{ body: body }}
let(:component_args) { [:button, [], options] }
it_behaves_like 'a rendered nfg button component'
context 'when render_if is used' do
let(:options) {{ body: body, render_if: render_if }}
context 'when render_if is true' do
let(:render_if) { true }
it_behaves_like 'a rendered nfg button component'
end
context 'when render_if is false' do
let(:render_if) { false }
it 'returns nil' do
expect(subject).to eq nil
end
end
context 'when render_if is nil' do
let(:render_if) { nil }
it 'returns nil' do
expect(subject).to eq nil
end
end
end
context 'when render_unless is used' do
let(:options) {{ body: body, render_unless: render_unless }}
context 'when render_if is true' do
let(:render_unless) { true }
it 'returns nil' do
expect(subject).to eq nil
end
end
context 'when render_if is false' do
let(:render_unless) { false }
it_behaves_like 'a rendered nfg button component'
end
context 'when render_if is nil' do
let(:render_unless) { nil }
it_behaves_like 'a rendered nfg button component'
end
end
end
end
| 28.382979 | 87 | 0.635307 |
39eab725e0f1435966de0bd88a8a44cd38325fc0 | 1,623 | # encoding: UTF-8
# This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20150119173200) do
create_table "categories", force: true do |t|
t.string "name"
t.string "desc"
t.datetime "created_at"
t.datetime "updated_at"
end
create_table "exhibitors", force: true do |t|
t.string "name"
t.string "desc"
t.datetime "created_at"
t.datetime "updated_at"
end
create_table "exhibitors_categories", force: true do |t|
t.integer "exhibitor_id"
t.integer "category_id"
t.datetime "created_at"
t.datetime "updated_at"
end
create_table "products", force: true do |t|
t.string "name"
t.string "desc"
t.datetime "created_at"
t.datetime "updated_at"
end
create_table "products_categories", force: true do |t|
t.integer "product_id"
t.integer "category_id"
t.datetime "created_at"
t.datetime "updated_at"
end
end
| 31.211538 | 86 | 0.720887 |
01147cad525521bf9fc031c3a69037bef05811f2 | 3,402 | require 'optparse'
require 'io/console'
require 'yaml'
module Rmega
module CLI
module Helpers
def cli_options
$cli_options ||= read_configuration_file
end
def cli_prompt_password
print("Enter password: ")
password = STDIN.noecho(&:gets)
password = password[0..-2] if password.end_with?("\n")
puts
return password
end
def mega_url?(url)
Nodes::Factory.url?(url)
end
def configuration_filepath
File.expand_path('~/.rmega')
end
def read_configuration_file
return {} unless File.exists?(configuration_filepath)
opts = YAML.load_file(configuration_filepath)
opts.keys.each { |k| opts[k.to_sym] = opts.delete(k) } # symbolize_keys!
return opts
rescue Exception => ex
raise(ex)
end
def apply_cli_options
cli_options.each do |key, value|
Rmega.options.__send__("#{key}=", value)
end
Rmega.logger.level = ::Logger::DEBUG if cli_options[:debug]
Rmega.options.show_progress = true
if Thread.respond_to?(:report_on_exception) and !cli_options[:debug]
Thread.report_on_exception = false
end
end
def apply_opt_parser_options(opts)
opts.on("-t NUM", "--thread_pool_size", "Number of threads to use [1-8], default and recommended is #{Rmega.options.thread_pool_size}") { |num|
num = num.to_i
if num <= 0
num = 1
elsif num > 8
num = 8
end
cli_options[:thread_pool_size] = num
}
opts.on("--proxy-addr ADDRESS", "Http proxy address") { |value|
cli_options[:http_proxy_address] = value
}
opts.on("--proxy-port PORT", "Http proxy port") { |value|
cli_options[:http_proxy_port] = value.to_i
}
opts.on("-u", "--user USER_EMAIL", "User email address") { |value|
cli_options[:user] = value
}
opts.on("--pass [USER_PASSWORD]", "User password (if omitted will prompt for it)") { |value|
cli_options[:pass] = value
}
opts.on("--debug", "Debug mode") {
cli_options[:debug] = true
}
opts.on("-v", "--version", "Print the version number") {
puts Rmega::VERSION
puts Rmega::HOMEPAGE
exit(0)
}
end
def traverse_storage(node, path, opts = {})
path.gsub!(/^\/|\/$/, "")
curr_part = path.split("/")[0] || ""
last_part = (path.split("/")[1..-1] || []).join("/")
if curr_part.empty?
if node.type == :root or node.type == :folder
return node
else
return nil
end
else
n = node.folders.find { |n| n.name.casecmp(curr_part).zero? }
n ||= node.files.find { |n| n.name.casecmp(curr_part).zero? } unless opts[:only_folders]
if last_part.empty?
return n
else
return traverse_storage(n, last_part)
end
end
end
def cli_rescue
apply_cli_options
yield
rescue Interrupt
puts "\nInterrupted"
rescue Exception => ex
if cli_options[:debug]
raise(ex)
else
$stderr.puts "\nERROR: #{ex.message}"
end
end
end
end
end
| 26.169231 | 151 | 0.546443 |
e20b1248b42475c7b2e47f8a520488d8211f0474 | 1,786 | # Puma can serve each request in a thread from an internal thread pool.
# The `threads` method setting takes two numbers: a minimum and maximum.
# Any libraries that use thread pools should be configured to match
# the maximum value specified for Puma. Default is set to 5 threads for minimum
# and maximum; this matches the default thread size of Active Record.
#
max_threads_count = ENV.fetch("RAILS_MAX_THREADS", 5)
min_threads_count = ENV.fetch("RAILS_MIN_THREADS") { max_threads_count }
threads min_threads_count, max_threads_count
# Specifies the `worker_timeout` threshold that Puma will use to wait before
# terminating a worker in development environments.
#
worker_timeout 3600 if ENV.fetch("RAILS_ENV", "development") == "development"
# Specifies the `port` that Puma will listen on to receive requests; default is 3000.
#
port ENV.fetch("PORT", 4000)
# Specifies the `environment` that Puma will run in.
#
environment ENV.fetch("RAILS_ENV") { "development" }
# Specifies the `pidfile` that Puma will use.
pidfile ENV.fetch("PIDFILE") { "tmp/pids/server.pid" }
# Specifies the number of `workers` to boot in clustered mode.
# Workers are forked web server processes. If using threads and workers together
# the concurrency of the application would be max `threads` * `workers`.
# Workers do not work on JRuby or Windows (both of which do not support
# processes).
#
# workers ENV.fetch("WEB_CONCURRENCY") { 2 }
# Use the `preload_app!` method when specifying a `workers` number.
# This directive tells Puma to first boot the application and load code
# before forking the application. This takes advantage of Copy On Write
# process behavior so workers use less memory.
#
# preload_app!
# Allow puma to be restarted by `bin/rails restart` command.
plugin :tmp_restart
| 40.590909 | 85 | 0.767077 |
4aa15f3bbc20e39088a753bbaa143725808c2a12 | 5,750 | =begin
#Selling Partner API for Merchant Fulfillment
#The Selling Partner API for Merchant Fulfillment helps you build applications that let sellers purchase shipping for non-Prime and Prime orders using Amazon’s Buy Shipping Services.
OpenAPI spec version: v0
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 2.4.26
=end
require 'date'
module AmzSpApi::MerchantFulfillmentApiModel
# Response schema.
class GetShipmentResponse
# The payload for the getShipment operation.
attr_accessor :payload
# One or more unexpected errors occurred during this operation.
attr_accessor :errors
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'payload' => :'payload',
:'errors' => :'errors'
}
end
# Attribute type mapping.
def self.swagger_types
{
:'payload' => :'Shipment',
:'errors' => :'ErrorList'
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
if attributes.has_key?(:'payload')
self.payload = attributes[:'payload']
end
if attributes.has_key?(:'errors')
self.errors = attributes[:'errors']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
payload == o.payload &&
errors == o.errors
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[payload, errors].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end # or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BOOLEAN
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
temp_model = AmzSpApi::MerchantFulfillmentApiModel.const_get(type).new
temp_model.build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 29.187817 | 182 | 0.625217 |
39891ce01f068a5faaf9116ed9179221134c22e8 | 202 | Deface::Override.new(
:name => "replace_cart_container",
:virtual_path => "spree/orders/edit",
:replace => "[data-hook='cart_container']",
:partial => 'tshop/orders/cart_container',
)
| 28.857143 | 45 | 0.648515 |
6273cafead8e0663d6d7f7251bdc55f62b3c85b8 | 183 | require 'spec_helper'
describe Elias::Parks do
describe ".find" do
let(:mk) { Elias::Park.find('magic-kingdom') }
it { mk.should be_an_instance_of Elias::Park }
end
end
| 18.3 | 50 | 0.677596 |
18b686db8f24f16bb53ffeeae01637c3a9003e44 | 69 | # frozen_string_literal: true
module MyTado
VERSION = "0.1.0"
end
| 11.5 | 29 | 0.724638 |
edab6dba081dd5064b5bd6d5ff0ee0fa0016134d | 1,216 | lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'heartcheck/activerecord/version'
Gem::Specification.new do |spec|
spec.name = 'heartcheck-activerecord'
spec.version = Heartcheck::Activerecord::VERSION
spec.authors = ['Locaweb']
spec.email = ['[email protected]']
spec.summary = 'A activerecord checker'
spec.description = 'Plugin to check activerecord connection in heartcheck.'
spec.homepage = 'http://developer.locaweb.com.br'
spec.license = 'MIT'
spec.files = Dir['lib/**/*'].select { |f| File.file?(f) }
spec.executables = spec.files.grep(/^bin\//) { |f| File.basename(f) }
spec.test_files = spec.files.grep(/^spec\//)
spec.require_paths = ['lib']
spec.add_runtime_dependency 'net-telnet', '~> 0.1.1'
spec.add_dependency 'activerecord', '>= 3.2', '< 6.0'
spec.add_dependency 'heartcheck', '~> 2.0'
spec.add_development_dependency 'pry-nav'
spec.add_development_dependency 'redcarpet'
spec.add_development_dependency 'rspec'
spec.add_development_dependency 'rubocop'
spec.add_development_dependency 'sqlite3'
spec.add_development_dependency 'yard'
spec.required_ruby_version = '>= 2.3'
end
| 35.764706 | 77 | 0.722862 |
e9189b0514148b1d1a0ac2fb2dee595be4a691c4 | 8,538 | # frozen_string_literal: true
require "bundler"
require "bundler/friendly_errors"
require "cgi"
RSpec.describe Bundler, "friendly errors" do
context "with invalid YAML in .gemrc" do
before do
File.open(Gem.configuration.config_file_name, "w") do |f|
f.write "invalid: yaml: hah"
end
end
after do
FileUtils.rm(Gem.configuration.config_file_name)
end
it "reports a relevant friendly error message" do
gemfile <<-G
source "#{file_uri_for(gem_repo1)}"
gem "rack"
G
bundle :install, :env => { "DEBUG" => "true" }
expect(err).to include("Failed to load #{home(".gemrc")}")
expect(exitstatus).to eq(0) if exitstatus
end
end
it "calls log_error in case of exception" do
exception = Exception.new
expect(Bundler::FriendlyErrors).to receive(:exit_status).with(exception).and_return(1)
expect do
Bundler.with_friendly_errors do
raise exception
end
end.to raise_error(SystemExit)
end
it "calls exit_status on exception" do
exception = Exception.new
expect(Bundler::FriendlyErrors).to receive(:log_error).with(exception)
expect do
Bundler.with_friendly_errors do
raise exception
end
end.to raise_error(SystemExit)
end
describe "#log_error" do
shared_examples "Bundler.ui receive error" do |error, message|
it "" do
expect(Bundler.ui).to receive(:error).with(message || error.message)
Bundler::FriendlyErrors.log_error(error)
end
end
shared_examples "Bundler.ui receive trace" do |error|
it "" do
expect(Bundler.ui).to receive(:trace).with(error)
Bundler::FriendlyErrors.log_error(error)
end
end
context "YamlSyntaxError" do
it_behaves_like "Bundler.ui receive error", Bundler::YamlSyntaxError.new(StandardError.new, "sample_message")
it "Bundler.ui receive trace" do
std_error = StandardError.new
exception = Bundler::YamlSyntaxError.new(std_error, "sample_message")
expect(Bundler.ui).to receive(:trace).with(std_error)
Bundler::FriendlyErrors.log_error(exception)
end
end
context "Dsl::DSLError, GemspecError" do
it_behaves_like "Bundler.ui receive error", Bundler::Dsl::DSLError.new("description", "dsl_path", "backtrace")
it_behaves_like "Bundler.ui receive error", Bundler::GemspecError.new
end
context "GemRequireError" do
let(:orig_error) { StandardError.new }
let(:error) { Bundler::GemRequireError.new(orig_error, "sample_message") }
before do
allow(orig_error).to receive(:backtrace).and_return([])
end
it "Bundler.ui receive error" do
expect(Bundler.ui).to receive(:error).with(error.message)
Bundler::FriendlyErrors.log_error(error)
end
it "writes to Bundler.ui.trace" do
expect(Bundler.ui).to receive(:trace).with(orig_error)
Bundler::FriendlyErrors.log_error(error)
end
end
context "BundlerError" do
it "Bundler.ui receive error" do
error = Bundler::BundlerError.new
expect(Bundler.ui).to receive(:error).with(error.message, :wrap => true)
Bundler::FriendlyErrors.log_error(error)
end
it_behaves_like "Bundler.ui receive trace", Bundler::BundlerError.new
end
context "Thor::Error" do
it_behaves_like "Bundler.ui receive error", Bundler::Thor::Error.new
end
context "LoadError" do
let(:error) { LoadError.new("cannot load such file -- openssl") }
it "Bundler.ui receive error" do
expect(Bundler.ui).to receive(:error).with("\nCould not load OpenSSL.")
Bundler::FriendlyErrors.log_error(error)
end
it "Bundler.ui receive warn" do
expect(Bundler.ui).to receive(:warn).with(any_args, :wrap => true)
Bundler::FriendlyErrors.log_error(error)
end
it "Bundler.ui receive trace" do
expect(Bundler.ui).to receive(:trace).with(error)
Bundler::FriendlyErrors.log_error(error)
end
end
context "Interrupt" do
it "Bundler.ui receive error" do
expect(Bundler.ui).to receive(:error).with("\nQuitting...")
Bundler::FriendlyErrors.log_error(Interrupt.new)
end
it_behaves_like "Bundler.ui receive trace", Interrupt.new
end
context "Gem::InvalidSpecificationException" do
it "Bundler.ui receive error" do
error = Gem::InvalidSpecificationException.new
expect(Bundler.ui).to receive(:error).with(error.message, :wrap => true)
Bundler::FriendlyErrors.log_error(error)
end
end
context "SystemExit" do
# Does nothing
end
context "Java::JavaLang::OutOfMemoryError" do
module Java
module JavaLang
class OutOfMemoryError < StandardError; end
end
end
it "Bundler.ui receive error" do
error = Java::JavaLang::OutOfMemoryError.new
expect(Bundler.ui).to receive(:error).with(/JVM has run out of memory/)
Bundler::FriendlyErrors.log_error(error)
end
end
context "unexpected error" do
it "calls request_issue_report_for with error" do
error = StandardError.new
expect(Bundler::FriendlyErrors).to receive(:request_issue_report_for).with(error)
Bundler::FriendlyErrors.log_error(error)
end
end
end
describe "#exit_status" do
it "calls status_code for BundlerError" do
error = Bundler::BundlerError.new
expect(error).to receive(:status_code).and_return("sample_status_code")
expect(Bundler::FriendlyErrors.exit_status(error)).to eq("sample_status_code")
end
it "returns 15 for Thor::Error" do
error = Bundler::Thor::Error.new
expect(Bundler::FriendlyErrors.exit_status(error)).to eq(15)
end
it "calls status for SystemExit" do
error = SystemExit.new
expect(error).to receive(:status).and_return("sample_status")
expect(Bundler::FriendlyErrors.exit_status(error)).to eq("sample_status")
end
it "returns 1 in other cases" do
error = StandardError.new
expect(Bundler::FriendlyErrors.exit_status(error)).to eq(1)
end
end
describe "#request_issue_report_for" do
it "calls relevant methods for Bundler.ui" do
expect(Bundler.ui).to receive(:info)
expect(Bundler.ui).to receive(:error)
expect(Bundler.ui).to receive(:warn)
Bundler::FriendlyErrors.request_issue_report_for(StandardError.new)
end
it "includes error class, message and backlog" do
error = StandardError.new
allow(Bundler::FriendlyErrors).to receive(:issues_url).and_return("")
expect(error).to receive(:class).at_least(:once)
expect(error).to receive(:message).at_least(:once)
expect(error).to receive(:backtrace).at_least(:once)
Bundler::FriendlyErrors.request_issue_report_for(error)
end
end
describe "#issues_url" do
it "generates a search URL for the exception message" do
exception = Exception.new("Exception message")
expect(Bundler::FriendlyErrors.issues_url(exception)).to eq("https://github.com/bundler/bundler/search?q=Exception+message&type=Issues")
end
it "generates a search URL for only the first line of a multi-line exception message" do
exception = Exception.new(<<END)
First line of the exception message
Second line of the exception message
END
expect(Bundler::FriendlyErrors.issues_url(exception)).to eq("https://github.com/bundler/bundler/search?q=First+line+of+the+exception+message&type=Issues")
end
it "generates the url without colons" do
exception = Exception.new(<<END)
Exception ::: with ::: colons :::
END
issues_url = Bundler::FriendlyErrors.issues_url(exception)
expect(issues_url).not_to include("%3A")
expect(issues_url).to eq("https://github.com/bundler/bundler/search?q=#{CGI.escape("Exception with colons ")}&type=Issues")
end
it "removes information after - for Errono::EACCES" do
exception = Exception.new(<<END)
Errno::EACCES: Permission denied @ dir_s_mkdir - /Users/foo/bar/
END
allow(exception).to receive(:is_a?).with(Errno).and_return(true)
issues_url = Bundler::FriendlyErrors.issues_url(exception)
expect(issues_url).not_to include("/Users/foo/bar")
expect(issues_url).to eq("https://github.com/bundler/bundler/search?q=#{CGI.escape("Errno EACCES Permission denied @ dir_s_mkdir ")}&type=Issues")
end
end
end
| 33.351563 | 160 | 0.679784 |
61eb940fa3df9391ea1afeaab970109d354218c6 | 1,006 | =begin
#BrowserUp Proxy
#___ This is the REST API for controlling the BrowserUp Proxy. The BrowserUp Proxy is a swiss army knife for automated testing that captures HTTP traffic in HAR files. It is also useful for Selenium/Cypress tests. ___
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
OpenAPI Generator version: 5.1.1
=end
require 'spec_helper'
require 'json'
require 'date'
# Unit tests for BrowserupProxy::Har
# Automatically generated by openapi-generator (https://openapi-generator.tech)
# Please update as you see appropriate
describe BrowserupProxy::Har do
let(:instance) { BrowserupProxy::Har.new }
describe 'test an instance of Har' do
it 'should create an instance of Har' do
expect(instance).to be_instance_of(BrowserupProxy::Har)
end
end
describe 'test attribute "log"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
end
| 28.742857 | 219 | 0.750497 |
08e37723df0b3004db7f2543247fdec9f279854e | 9,890 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::DataFactory::Mgmt::V2018_06_01
module Models
#
# The Azure Data Factory nested object which identifies data within
# different data stores, such as tables, files, folders, and documents.
#
class Dataset
include MsRestAzure
@@discriminatorMap = Hash.new
@@discriminatorMap["GoogleAdWordsObject"] = "GoogleAdWordsObjectDataset"
@@discriminatorMap["AzureDataExplorerTable"] = "AzureDataExplorerTableDataset"
@@discriminatorMap["OracleServiceCloudObject"] = "OracleServiceCloudObjectDataset"
@@discriminatorMap["DynamicsAXResource"] = "DynamicsAXResourceDataset"
@@discriminatorMap["ResponsysObject"] = "ResponsysObjectDataset"
@@discriminatorMap["SalesforceMarketingCloudObject"] = "SalesforceMarketingCloudObjectDataset"
@@discriminatorMap["VerticaTable"] = "VerticaTableDataset"
@@discriminatorMap["NetezzaTable"] = "NetezzaTableDataset"
@@discriminatorMap["ZohoObject"] = "ZohoObjectDataset"
@@discriminatorMap["XeroObject"] = "XeroObjectDataset"
@@discriminatorMap["SquareObject"] = "SquareObjectDataset"
@@discriminatorMap["SparkObject"] = "SparkObjectDataset"
@@discriminatorMap["ShopifyObject"] = "ShopifyObjectDataset"
@@discriminatorMap["ServiceNowObject"] = "ServiceNowObjectDataset"
@@discriminatorMap["QuickBooksObject"] = "QuickBooksObjectDataset"
@@discriminatorMap["PrestoObject"] = "PrestoObjectDataset"
@@discriminatorMap["PhoenixObject"] = "PhoenixObjectDataset"
@@discriminatorMap["PaypalObject"] = "PaypalObjectDataset"
@@discriminatorMap["MarketoObject"] = "MarketoObjectDataset"
@@discriminatorMap["MariaDBTable"] = "MariaDBTableDataset"
@@discriminatorMap["MagentoObject"] = "MagentoObjectDataset"
@@discriminatorMap["JiraObject"] = "JiraObjectDataset"
@@discriminatorMap["ImpalaObject"] = "ImpalaObjectDataset"
@@discriminatorMap["HubspotObject"] = "HubspotObjectDataset"
@@discriminatorMap["HiveObject"] = "HiveObjectDataset"
@@discriminatorMap["HBaseObject"] = "HBaseObjectDataset"
@@discriminatorMap["GreenplumTable"] = "GreenplumTableDataset"
@@discriminatorMap["GoogleBigQueryObject"] = "GoogleBigQueryObjectDataset"
@@discriminatorMap["EloquaObject"] = "EloquaObjectDataset"
@@discriminatorMap["DrillTable"] = "DrillTableDataset"
@@discriminatorMap["CouchbaseTable"] = "CouchbaseTableDataset"
@@discriminatorMap["ConcurObject"] = "ConcurObjectDataset"
@@discriminatorMap["AzurePostgreSqlTable"] = "AzurePostgreSqlTableDataset"
@@discriminatorMap["AmazonMWSObject"] = "AmazonMWSObjectDataset"
@@discriminatorMap["HttpFile"] = "HttpDataset"
@@discriminatorMap["AzureSearchIndex"] = "AzureSearchIndexDataset"
@@discriminatorMap["WebTable"] = "WebTableDataset"
@@discriminatorMap["SapTableResource"] = "SapTableResourceDataset"
@@discriminatorMap["RestResource"] = "RestResourceDataset"
@@discriminatorMap["SqlServerTable"] = "SqlServerTableDataset"
@@discriminatorMap["SapOpenHubTable"] = "SapOpenHubTableDataset"
@@discriminatorMap["SapHanaTable"] = "SapHanaTableDataset"
@@discriminatorMap["SapEccResource"] = "SapEccResourceDataset"
@@discriminatorMap["SapCloudForCustomerResource"] = "SapCloudForCustomerResourceDataset"
@@discriminatorMap["SalesforceObject"] = "SalesforceObjectDataset"
@@discriminatorMap["RelationalTable"] = "RelationalTableDataset"
@@discriminatorMap["AzureMySqlTable"] = "AzureMySqlTableDataset"
@@discriminatorMap["OracleTable"] = "OracleTableDataset"
@@discriminatorMap["ODataResource"] = "ODataResourceDataset"
@@discriminatorMap["CosmosDbMongoDbApiCollection"] = "CosmosDbMongoDbApiCollectionDataset"
@@discriminatorMap["MongoDbV2Collection"] = "MongoDbV2CollectionDataset"
@@discriminatorMap["MongoDbCollection"] = "MongoDbCollectionDataset"
@@discriminatorMap["FileShare"] = "FileShareDataset"
@@discriminatorMap["Office365Table"] = "Office365Dataset"
@@discriminatorMap["AzureBlobFSFile"] = "AzureBlobFSDataset"
@@discriminatorMap["AzureDataLakeStoreFile"] = "AzureDataLakeStoreDataset"
@@discriminatorMap["DynamicsEntity"] = "DynamicsEntityDataset"
@@discriminatorMap["DocumentDbCollection"] = "DocumentDbCollectionDataset"
@@discriminatorMap["CustomDataset"] = "CustomDataset"
@@discriminatorMap["CassandraTable"] = "CassandraTableDataset"
@@discriminatorMap["AzureSqlDWTable"] = "AzureSqlDWTableDataset"
@@discriminatorMap["AzureSqlTable"] = "AzureSqlTableDataset"
@@discriminatorMap["AzureTable"] = "AzureTableDataset"
@@discriminatorMap["AzureBlob"] = "AzureBlobDataset"
@@discriminatorMap["DelimitedText"] = "DelimitedTextDataset"
@@discriminatorMap["Parquet"] = "ParquetDataset"
@@discriminatorMap["AmazonS3Object"] = "AmazonS3Dataset"
def initialize
@type = "Dataset"
end
attr_accessor :type
# @return Unmatched properties from the message are deserialized this
# collection
attr_accessor :additional_properties
# @return [String] Dataset description.
attr_accessor :description
# @return Columns that define the structure of the dataset. Type: array
# (or Expression with resultType array), itemType: DatasetDataElement.
attr_accessor :structure
# @return Columns that define the physical type schema of the dataset.
# Type: array (or Expression with resultType array), itemType:
# DatasetSchemaDataElement.
attr_accessor :schema
# @return [LinkedServiceReference] Linked service reference.
attr_accessor :linked_service_name
# @return [Hash{String => ParameterSpecification}] Parameters for
# dataset.
attr_accessor :parameters
# @return List of tags that can be used for describing the Dataset.
attr_accessor :annotations
# @return [DatasetFolder] The folder that this Dataset is in. If not
# specified, Dataset will appear at the root level.
attr_accessor :folder
#
# Mapper for Dataset class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'Dataset',
type: {
name: 'Composite',
polymorphic_discriminator: 'type',
uber_parent: 'Dataset',
class_name: 'Dataset',
model_properties: {
additional_properties: {
client_side_validation: true,
required: false,
type: {
name: 'Dictionary',
value: {
client_side_validation: true,
required: false,
serialized_name: 'ObjectElementType',
type: {
name: 'Object'
}
}
}
},
description: {
client_side_validation: true,
required: false,
serialized_name: 'description',
type: {
name: 'String'
}
},
structure: {
client_side_validation: true,
required: false,
serialized_name: 'structure',
type: {
name: 'Object'
}
},
schema: {
client_side_validation: true,
required: false,
serialized_name: 'schema',
type: {
name: 'Object'
}
},
linked_service_name: {
client_side_validation: true,
required: true,
serialized_name: 'linkedServiceName',
default_value: {},
type: {
name: 'Composite',
class_name: 'LinkedServiceReference'
}
},
parameters: {
client_side_validation: true,
required: false,
serialized_name: 'parameters',
type: {
name: 'Dictionary',
value: {
client_side_validation: true,
required: false,
serialized_name: 'ParameterSpecificationElementType',
type: {
name: 'Composite',
class_name: 'ParameterSpecification'
}
}
}
},
annotations: {
client_side_validation: true,
required: false,
serialized_name: 'annotations',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'ObjectElementType',
type: {
name: 'Object'
}
}
}
},
folder: {
client_side_validation: true,
required: false,
serialized_name: 'folder',
type: {
name: 'Composite',
class_name: 'DatasetFolder'
}
}
}
}
}
end
end
end
end
| 42.085106 | 100 | 0.60364 |
01abf40a9960c7f687f2fe5e6226a93bf3f270d2 | 9,259 | require 'spec_helper'
describe Etsy::User do
context "The User class" do
it "should be able to find a single user" do
users = mock_request('/users/littletjane', {}, 'User', 'getUser.single.json')
expect(Etsy::User.find('littletjane')).to eql(users.first)
end
# should "be able to find multiple users" do
# users = mock_request('/users/littletjane,reagent', {}, 'User', 'getUser.multiple.json')
# User.find('littletjane', 'reagent').should == users
# end
#
# should "be able to pass options when finding a user" do
# options = {:limit => 90, :offset => 90}
# users = mock_request('/users/littletjane', options, 'User', 'getUser.single.json')
# User.find('littletjane', options).should == users.first
# end
#
# should "be able to find the authenticated user" do
# options = {:access_token => 'token', :access_secret => 'secret'}
# users = mock_request('/users/__SELF__', options, 'User', 'getUser.single.json')
# User.myself('token', 'secret', options).should == users.first
# end
end
#
# context "An instance of the User class" do
#
# context "requested with oauth access token" do
# setup do
# options = {:access_token => 'token', :access_secret => 'secret'}
#
# data = read_fixture('user/getUser.single.json')
# response = 'response'
# response.stubs(:result).with().returns [data]
# Request.stubs(:get).with('/users/__SELF__', options).returns response
#
# @user = User.find('__SELF__', options)
# end
#
# should "persist the token" do
# @user.token.should == 'token'
# end
#
# should "persist the secret" do
# @user.secret.should == 'secret'
# end
# end
#
# context "with public response data" do
# setup do
# data = read_fixture('user/getUser.single.json')
# @user = User.new(data.first)
# end
#
# should "have an ID" do
# @user.id.should == 5327518
# end
#
# should "have a :username" do
# @user.username.should == 'littletjane'
# end
#
# should "have a value for :created" do
# @user.created.should == 1191381578
# end
#
# should "not have an email address" do
# @user.email.should be_nil
# end
# end
#
# context "with private response data" do
# setup do
# data = read_fixture('user/getUser.single.private.json')
# @user = User.new(data.first, 'token', 'secret')
# end
#
# should "have an email address" do
# @user.email.should == '[email protected]'
# end
# end
#
# context "requested with associated shops" do
# setup do
# data = read_fixture('user/getUser.single.withShops.json')
# @user = User.new(data.first)
# end
#
# should "have shops" do
# @user.shops.each do |shop|
# shop.class.should == Shop
# end
# end
#
# # This assumes for now that a user can have only one shop belonging to them
# should "return the first shop belonging to the user" do
# @user.shop.should == @user.shops.first
# end
# end
#
# context "requested without associated shops" do
# setup do
# @data_without_shops = read_fixture('user/getUser.single.json')
# @data_with_shops = read_fixture('user/getUser.single.withShops.json')
# @options = {:fields => 'user_id', :includes => 'Shops'}
#
# @user_without_shops = User.new(@data_without_shops.first)
# @user_with_shops = User.new(@data_with_shops.first)
# end
#
# should "make a call to the API to retrieve it if requested" do
# User.expects(:find).with('littletjane', @options).returns @user_with_shops
# @user_without_shops.shops
# end
#
# should "not call the api twice" do
# User.expects(:find).once.with('littletjane', @options).returns @user_with_shops
# @user_without_shops.shops
# @user_without_shops.shops
# end
#
# should "return a list of populated shop instances" do
# User.stubs(:find).with('littletjane', @options).returns @user_with_shops
# @user_without_shops.shops.first.name.should == 'LittleJane'
# end
#
# should "make the call with authentication if oauth is used" do
# user = User.new(@data_without_shops.first, 'token', 'secret')
# oauth = {:access_token => 'token', :access_secret => 'secret'}
# User.expects(:find).with('littletjane', @options.merge(oauth)).returns @user_with_shops
# user.shops
# end
# end
#
# context "requested with an associated profile" do
# setup do
# data = read_fixture('user/getUser.single.withProfile.json')
# @user = User.new(data.first)
# end
#
# should "have a profile" do
# @user.profile.class.should == Profile
# end
# end
#
# context "requested without an associated profile" do
# setup do
# @data_without_profile = read_fixture('user/getUser.single.json')
# @data_with_profile = read_fixture('user/getUser.single.withProfile.json')
# @options = {:fields => 'user_id', :includes => 'Profile'}
#
# @user_without_profile = User.new(@data_without_profile.first)
# @user_with_profile = User.new(@data_with_profile.first)
# end
#
# should "make a call to the API to retrieve it if requested" do
# User.expects(:find).with('littletjane', @options).returns @user_with_profile
# @user_without_profile.profile
# end
#
# should "not call the api twice" do
# User.expects(:find).once.with('littletjane', @options).returns @user_with_profile
# @user_without_profile.profile
# @user_without_profile.profile
# end
#
# should "return a populated profile instance" do
# User.stubs(:find).with('littletjane', @options).returns @user_with_profile
# @user_without_profile.profile.bio.should == 'I make stuff'
# end
#
# should "make the call with authentication if oauth is used" do
# user = User.new(@data_without_profile.first, 'token', 'secret')
# oauth = {:access_token => 'token', :access_secret => 'secret'}
# User.expects(:find).with('littletjane', @options.merge(oauth)).returns @user_with_profile
# user.profile
# end
# end
#
# context "instantiated with oauth token" do
# setup do
# @user = User.new(nil, 'token', 'secret')
# end
#
# should "have the token" do
# @user.token.should == 'token'
# end
#
# should "have the secret" do
# @user.secret.should == 'secret'
# end
#
# end
#
# should "know when the user was created" do
# user = User.new
# user.stubs(:created).returns(1)
#
# user.created_at.should == Time.at(1)
# end
#
# context "with favorite listings data" do
# setup do
# data = read_fixture('user/getUser.single.withProfile.json')
# @user = User.new(data.first)
# listing_1 = stub(:listing_id => 1, :user_id => @user.id)
# listing_2 = stub(:listing_id => 2, :user_id => @user.id)
# @favorite_listings = [listing_1, listing_2]
# end
#
# should "have all listings" do
# FavoriteListing.stubs(:find_all_user_favorite_listings).with(@user.id, {:access_token => nil, :access_secret => nil}).returns(@favorite_listings)
# Listing.stubs(:find).with([1, 2], {:access_token => nil, :access_secret => nil}).returns(['listings'])
# @user.favorites.should == ['listings']
# end
# end
#
# context "with bought listings data" do
# setup do
# data = read_fixture('user/getUser.single.withProfile.json')
# @user = User.new(data.first)
# listing_1 = stub(:listing_id => 1, :user_id => @user.id)
# listing_2 = stub(:listing_id => 2, :user_id => @user.id)
# @bought_listings = [listing_1, listing_2]
# end
#
# should "have all listings" do
# Transaction.stubs(:find_all_by_buyer_id).with(@user.id, {:access_token => nil, :access_secret => nil}).returns(@bought_listings)
# Listing.stubs(:find).with([1, 2], {:access_token => nil, :access_secret => nil}).returns(['listings'])
# @user.bought_listings.should == ['listings']
# end
# end
# end
#
# should "know the addresses for a user" do
# user = User.new
# user.stubs(:username).with().returns('username')
#
# Address.stubs(:find).with('username', {}).returns('addresses')
#
# user.addresses.should == 'addresses'
# end
end
| 37.184739 | 157 | 0.567664 |
115f085f8e43eb568cd2d4553d7c7a30e96f13b3 | 6,455 | # -------------------------------------------------------------------------- #
# Copyright 2002-2020, OpenNebula Project, OpenNebula Systems #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); you may #
# not use this file except in compliance with the License. You may obtain #
# a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
#--------------------------------------------------------------------------- #
require 'opennebula/pool'
module OpenNebula
class System
#######################################################################
# Constants and Class attribute accessors
#######################################################################
SYSTEM_METHODS = {
:userquotainfo => "userquota.info",
:userquotaupdate => "userquota.update",
:groupquotainfo => "groupquota.info",
:groupquotaupdate => "groupquota.update",
:version => "system.version",
:config => "system.config",
:sql => "system.sql",
:sqlquery => "system.sqlquery"
}
#######################################################################
# Class constructor
#######################################################################
# Constructor
# @param [Client] client that represents a XML-RPC connection
def initialize(client)
@client = client
end
#######################################################################
# XML-RPC Methods
#######################################################################
# Executes and replicates SQL commands on OpenNebula DB
# @param [String] Sql string
# @param [Boolean] True to replicate command on a federation. To
# operate on federated tables
# @return [Integer, OpenNebula::Error] Sql execution result in case
# of success, Error otherwise
def sql_command(sql, federate)
return @client.call(SYSTEM_METHODS[:sql], sql, federate)
end
# Executes a SQL query command on OpenNebula DB
# @param [String] Sql string
# @return [String, OpenNebula::Error] Sql execution result in XML
# format in case of success, Error otherwise
# <QUERY>
# the query sent to oned
# </QUERY>
# <RESULT>
# <ROW>
# <column_name>column_value</column_name>
# ...
# </ROW>
# </RESULT>
def sql_query_command(sql)
return @client.call(SYSTEM_METHODS[:sqlquery], sql)
end
#
# Gets the oned version
#
# @return [String, OpenNebula::Error] the oned version in case
# of success, Error otherwise
def get_oned_version()
return @client.call("system.version")
end
# Returns whether of not the oned version is the same as the OCA version
#
# @return [true, false, OpenNebula::Error] true if oned is the same
# version
def compatible_version()
no_revision = VERSION[/^\d+\.\d+\./]
oned_v = get_oned_version
if OpenNebula.is_error?(oned_v)
return oned_v
end
return (oned_v =~ /#{no_revision}/) != nil
end
# Gets the oned configuration
#
# @return [XMLElement, OpenNebula::Error] the oned configuration in case
# of success, Error otherwise
def get_configuration()
rc = @client.call(SYSTEM_METHODS[:config])
if OpenNebula.is_error?(rc)
return rc
end
config = XMLElement.new
config.initialize_xml(rc, 'OPENNEBULA_CONFIGURATION')
return config
end
# Gets the default user quota limits
#
# @return [XMLElement, OpenNebula::Error] the default user quota in case
# of success, Error otherwise
def get_user_quotas()
rc = @client.call(SYSTEM_METHODS[:userquotainfo])
if OpenNebula.is_error?(rc)
return rc
end
default_quotas = XMLElement.new
default_quotas.initialize_xml(rc, 'DEFAULT_USER_QUOTAS')
return default_quotas
end
# Sets the default user quota limits
# @param quota [String] a template (XML or txt) with the new quota limits
#
# @return [nil, OpenNebula::Error] nil in case of success, Error
# otherwise
def set_user_quotas(quota)
return @client.call(SYSTEM_METHODS[:userquotaupdate], quota)
end
# Gets the default group quota limits
#
# @return [XMLElement, OpenNebula::Error] the default group quota in case
# of success, Error otherwise
def get_group_quotas()
rc = @client.call(SYSTEM_METHODS[:groupquotainfo])
if OpenNebula.is_error?(rc)
return rc
end
default_quotas = XMLElement.new
default_quotas.initialize_xml(rc, 'DEFAULT_GROUP_QUOTAS')
return default_quotas
end
# Sets the default group quota limits
# @param quota [String] a template (XML or txt) with the new quota limits
#
# @return [nil, OpenNebula::Error] nil in case of success, Error
# otherwise
def set_group_quotas(quota)
return @client.call(SYSTEM_METHODS[:groupquotaupdate], quota)
end
end
end
| 37.748538 | 81 | 0.486754 |
7a5d796904fc86402b9ddd339661088ee8afeee2 | 969 | require 'spec_helper_acceptance'
# Ensure IP Forwarding is disabled - Section 3.1.1
# Ensure packet redirect sending is disabled - Section 3.1.2
# Ensure source routed packets are not accepted - Section 3.2.1
# Ensure ICMP redirects are not accepted - Section 3.2.2
describe file('/etc/sysctl.d/99-sysctl.conf') do
it { should be_symlink }
it { should be_owned_by 'root' }
it { should be_grouped_into 'root' }
its (:content) { should match /net.ipv4.ip_forward = 0/ }
its (:content) { should match /net.ipv4.conf.all.send_redirects = 0/ }
its (:content) { should match /net.ipv4.conf.default.send_redirects = 0/ }
its (:content) { should match /net.ipv4.conf.all.accept_source_route = 0/ }
its (:content) { should match /net.ipv4.conf.default.accept_source_route = 0/ }
its (:content) { should match /net.ipv4.conf.all.accept_redirects = 0/ }
its (:content) { should match /net.ipv4.conf.default.accept_redirects = 0/ }
end | 51 | 83 | 0.69969 |
620270fe38c07c8a53133b6eeec78d6a756db450 | 2,589 | # frozen_string_literal: true
module SM
module Middleware
module Response
# class responsible for customizing parsing
class SMParser < Faraday::Response::Middleware
def on_complete(env)
return unless env.response_headers['content-type'] =~ /\bjson/
env[:body] = parse(env.body) if env.body.present?
end
def parse(body = nil)
@parsed_json = body
@meta_attributes = split_meta_fields!
@errors = @parsed_json.delete(:errors) || {}
data = preferences ||
parsed_triage ||
parsed_folders ||
normalize_message(parsed_messages) ||
parsed_categories
@parsed_json = {
data: data,
errors: @errors,
metadata: @meta_attributes
}
@parsed_json
end
private
def preferences
%i[notify_me 0].any? { |k| @parsed_json.key?(k) } ? @parsed_json : nil
end
def parsed_folders
@parsed_json.key?(:system_folder) ? @parsed_json : @parsed_json[:folder]
end
def parsed_triage
@parsed_json.key?(:triage_team_id) ? @parsed_json : @parsed_json[:triage_team]
end
def parsed_messages
@parsed_json.key?(:recipient_id) ? @parsed_json : @parsed_json[:message]
end
def parsed_categories
@parsed_json.key?(:message_category_type) ? @parsed_json : @parsed_json[:message_category_type]
end
def split_errors!
@parsed_json.delete(:errors) || {}
end
def split_meta_fields!
{}
end
def normalize_message(object)
return object if object.blank?
if object.is_a?(Array)
object.map { |a| fix_attachments(a) }
else
fix_attachments(object)
end
end
def fix_attachments(message_json)
return message_json.except(:attachments) if message_json[:attachments].blank?
message_id = message_json[:id]
attachments = Array.wrap(message_json[:attachments])
# remove the outermost object name for attachment and inject message_id
attachments = attachments.map do |attachment|
attachment[:attachment].map { |e| e.merge(message_id: message_id) }
end.flatten
message_json.merge(attachments: attachments)
end
end
end
end
end
Faraday::Response.register_middleware sm_parser: SM::Middleware::Response::SMParser
| 29.420455 | 105 | 0.588258 |
5d9e51066e3cd4fcf6c03bdf394bd80531489980 | 2,470 | require 'mustache'
module Fern
module Documentation
class MarkdownGenerator
# rubocop:disable Metrics/LineLength
TEMPLATE = %(# {{verb}} {{path}}
_{{controller}}\#{{action}}_
{{doc}}
{{#has_parameters}}
## Parameters
| Name | Type | Array | Required | Min | Max | Values | Default |
| ---- | ---- | ----- | -------- | --- | --- | ------ | ------- |
{{#parameters}}
| {{name}} | `{{ type }}` | {{ array }} | {{ required }} | {{ min }} | {{ max }} | {{ values }} | {{ default }} |
{{/parameters}}
{{/has_parameters}}
{{#has_form}}
## Form
{{#form}}
### Class
`{{klass}}`
{{/form}}
{{#key}}
### Key
`{{key}}`
{{/key}}
{{/has_form}}
{{#presenter}}
## Presenter
### Class
`{{presenter}}`
{{/presenter}}
).freeze
# rubocop:enable Metrics/LineLength
def initialize(analysis)
@analysis = analysis
end
def generate
params = build_params
Mustache.render(
TEMPLATE,
verb: @analysis[:verb],
path: @analysis[:path],
controller: @analysis[:controller],
action: @analysis[:action],
doc: strip_leading_whitespace(@analysis[:doc]),
has_parameters: params.present?,
parameters: params,
has_form: @analysis[:form].present?,
form: @analysis[:form],
presenter: @analysis[:presenter]
).gsub(/\n{2,}/, "\n\n")
end
private
def build_params
return if @analysis[:params].nil?
@analysis[:params].map { |name, config| build_param(name, config) }
end
def build_param(name, config)
constraints = config[:constraints]
{
name: name,
type: config[:type],
array: check(constraints[:array]),
required: check(constraints[:required]),
min: constraints[:min],
max: constraints[:max],
values: constraints[:values]&.join(', '),
default: constraints[:default]
}
end
def check(val)
val ? '✓' : ''
end
def strip_leading_whitespace(str)
return nil if str.nil?
lines = str.split("\n")
first_line = lines.first
first_line = lines.second if first_line == ''
whitespace_to_trim = /^\s*/.match(first_line).to_s
new_lines = []
lines.each do |line|
new_lines << line.sub(whitespace_to_trim, '')
end
new_lines.join("\n")
end
end
end
end
| 21.478261 | 113 | 0.528745 |
7a019e7dcf8ababb7523f79570d67e02bc1bd19d | 489 | module Catalogillo
module Api
module V1
class ProductsController < ApplicationController
def index
params[:products].each do |p|
product = Catalogillo::Product.new p
product.index
end
head Sunspot.commit ? :ok : :unprocessable_entity
end
def destroy
head Sunspot.remove(Catalogillo::Product) { with(:id, params[:id])} ? :ok : :unprocessable_entity
end
end
end
end
end | 24.45 | 107 | 0.591002 |
ed2079c65923bc008fba4f01c67d9f9db15edba4 | 1,913 | require 'morpheus/api/api_client'
class Morpheus::AccountGroupsInterface < Morpheus::APIClient
def get(account_id, id, params={})
raise "#{self.class}.get() passed a blank id!" if id.to_s == ''
url = "#{@base_url}/api/accounts/#{account_id}/groups/#{id}"
headers = { params: params, authorization: "Bearer #{@access_token}" }
opts = {method: :get, url: url, headers: headers}
execute(opts)
end
def list(account_id, params={})
url = "#{@base_url}/api/accounts/#{account_id}/groups"
headers = { params: params, authorization: "Bearer #{@access_token}" }
opts = {method: :get, url: url, headers: headers}
execute(opts)
end
def create(account_id, payload)
url = "#{@base_url}/api/accounts/#{account_id}/groups"
headers = { :authorization => "Bearer #{@access_token}", 'Content-Type' => 'application/json' }
opts = {method: :post, url: url, headers: headers, payload: payload.to_json}
execute(opts)
end
def update(account_id, id, payload)
url = "#{@base_url}/api/accounts/#{account_id}/groups/#{id}"
headers = { :authorization => "Bearer #{@access_token}", 'Content-Type' => 'application/json' }
opts = {method: :put, url: url, headers: headers, payload: payload.to_json}
execute(opts)
end
def destroy(account_id, id, params={})
url = "#{@base_url}/api/accounts/#{account_id}/groups/#{id}"
headers = { :params => params, :authorization => "Bearer #{@access_token}", 'Content-Type' => 'application/json' }
opts = {method: :delete, url: url, headers: headers}
execute(opts)
end
def update_zones(account_id, id, payload)
url = "#{@base_url}/api/accounts/#{account_id}/groups/#{id}/update-zones"
headers = { :authorization => "Bearer #{@access_token}", 'Content-Type' => 'application/json' }
opts = {method: :put, url: url, headers: headers, payload: payload.to_json}
execute(opts)
end
end
| 39.040816 | 118 | 0.652901 |
f841685af65d508a817d57e738484888dabfbc68 | 1,279 | require 'spec_helper'
describe "Authentication" do
context "on successful sign in" do
use_vcr_cassette 'Github_Key/_create', :erb => true
before(:each) do
OmniAuth.config.mock_auth[:github] = { :credentials => { :token => "65r6w5er1w6er5w65ef1" },
:extra => {
:raw_info => {
:login => "testuser" } } }
end
it "should create a new User record" do
get "/auth/github"
follow_redirect!
User.should have(1).record
end
context "when user record already exists" do
before(:each) { User.disable_ssh_github_upload = true }
it "should use the existing record" do
user = FactoryGirl.create(:user)
OmniAuth.config.mock_auth[:github][:extra][:raw_info][:login] = user.username
get "/auth/github"
follow_redirect!
User.should have(1).record
end
end
end
context "on unsuccessful sign in" do
it "does something" do
OmniAuth.config.mock_auth[:github] = :invalid_credentials
get "/auth/github"
follow_redirect!
response.should redirect_to('/auth/failure?message=invalid_credentials')
end
end
end
| 27.804348 | 98 | 0.579359 |
ff786eb3f36ef054451684338f777a7844ef0b60 | 11,306 | module Spree
module ProductScopes
extend ActiveSupport::Concern
included do
cattr_accessor :search_scopes do
[]
end
def self.add_search_scope(name, &block)
singleton_class.send(:define_method, name.to_sym, &block)
search_scopes << name.to_sym
end
def self.simple_scopes
[
:ascend_by_updated_at,
:descend_by_updated_at,
:ascend_by_name,
:descend_by_name
]
end
def self.add_simple_scopes(scopes)
scopes.each do |name|
# We should not define price scopes here, as they require something slightly different
next if name.to_s.include?('master_price')
parts = name.to_s.match(/(.*)_by_(.*)/)
scope(name.to_s, -> { order(Arel.sql("#{Product.quoted_table_name}.#{parts[2]} #{parts[1] == 'ascend' ? 'ASC' : 'DESC'}")) })
end
end
def self.property_conditions(property)
properties = Property.table_name
case property
when String then { "#{properties}.name" => property }
when Property then { "#{properties}.id" => property.id }
else { "#{properties}.id" => property.to_i }
end
end
add_simple_scopes simple_scopes
add_search_scope :ascend_by_master_price do
order("#{price_table_name}.amount ASC")
end
add_search_scope :descend_by_master_price do
order("#{price_table_name}.amount DESC")
end
add_search_scope :price_between do |low, high|
where(Price.table_name => { amount: low..high })
end
add_search_scope :master_price_lte do |price|
where("#{price_table_name}.amount <= ?", price)
end
add_search_scope :master_price_gte do |price|
where("#{price_table_name}.amount >= ?", price)
end
# This scope selects products in taxon AND all its descendants
# If you need products only within one taxon use
#
# Spree::Product.joins(:taxons).where(Taxon.table_name => { id: taxon.id })
#
# If you're using count on the result of this scope, you must use the
# `:distinct` option as well:
#
# Spree::Product.in_taxon(taxon).count(distinct: true)
#
# This is so that the count query is distinct'd:
#
# SELECT COUNT(DISTINCT "spree_products"."id") ...
#
# vs.
#
# SELECT COUNT(*) ...
add_search_scope :in_taxon do |taxon|
includes(:classifications).
where('spree_products_taxons.taxon_id' => taxon.cached_self_and_descendants_ids).
order('spree_products_taxons.position ASC')
end
# This scope selects products in all taxons AND all its descendants
# If you need products only within one taxon use
#
# Spree::Product.taxons_id_eq([x,y])
add_search_scope :in_taxons do |*taxons|
taxons = get_taxons(taxons)
taxons.first ? prepare_taxon_conditions(taxons) : where(nil)
end
add_search_scope :ascend_by_taxons_min_position do |taxon_ids|
joins(:classifications).
where(Classification.table_name => { taxon_id: taxon_ids }).
select(
[
"#{Product.table_name}.*",
"MIN(#{Classification.table_name}.position) AS min_position"
].join(', ')
).
group(:id).
order(min_position: :asc)
end
# a scope that finds all products having property specified by name, object or id
add_search_scope :with_property do |property|
joins(:properties).where(property_conditions(property))
end
# a simple test for product with a certain property-value pairing
# note that it can test for properties with NULL values, but not for absent values
add_search_scope :with_property_value do |property, value|
joins(:properties).
where("#{ProductProperty.table_name}.value = ?", value).
where(property_conditions(property))
end
add_search_scope :with_property_values do |property_filter_param, property_values|
joins(product_properties: :property).
where(Property.table_name => { filter_param: property_filter_param }).
where(ProductProperty.table_name => { filter_param: property_values.map(&:parameterize) })
end
add_search_scope :with_option do |option|
option_types = OptionType.table_name
conditions = case option
when String then { "#{option_types}.name" => option }
when OptionType then { "#{option_types}.id" => option.id }
else { "#{option_types}.id" => option.to_i }
end
joins(:option_types).where(conditions)
end
add_search_scope :with_option_value do |option, value|
option_values = OptionValue.table_name
option_type_id = case option
when String then OptionType.find_by(name: option) || option.to_i
when OptionType then option.id
else option.to_i
end
conditions = "#{option_values}.name = ? AND #{option_values}.option_type_id = ?", value, option_type_id
group('spree_products.id').joins(variants_including_master: :option_values).where(conditions)
end
# Finds all products which have either:
# 1) have an option value with the name matching the one given
# 2) have a product property with a value matching the one given
add_search_scope :with do |value|
includes(variants_including_master: :option_values).
includes(:product_properties).
where("#{OptionValue.table_name}.name = ? OR #{ProductProperty.table_name}.value = ?", value, value)
end
# Finds all products that have a name containing the given words.
add_search_scope :in_name do |words|
like_any([:name], prepare_words(words))
end
# Finds all products that have a name or meta_keywords containing the given words.
add_search_scope :in_name_or_keywords do |words|
like_any([:name, :meta_keywords], prepare_words(words))
end
# Finds all products that have a name, description, meta_description or meta_keywords containing the given keywords.
add_search_scope :in_name_or_description do |words|
like_any([:name, :description, :meta_description, :meta_keywords], prepare_words(words))
end
# Finds all products that have the ids matching the given collection of ids.
# Alternatively, you could use find(collection_of_ids), but that would raise an exception if one product couldn't be found
add_search_scope :with_ids do |*ids|
where(id: ids)
end
# Sorts products from most popular (popularity is extracted from how many
# times use has put product in cart, not completed orders)
#
# there is alternative faster and more elegant solution, it has small drawback though,
# it doesn stack with other scopes :/
#
# joins: "LEFT OUTER JOIN (SELECT line_items.variant_id as vid, COUNT(*) as cnt FROM line_items GROUP BY line_items.variant_id) AS popularity_count ON variants.id = vid",
# order: 'COALESCE(cnt, 0) DESC'
add_search_scope :descend_by_popularity do
joins(:master).
order(%Q{
COALESCE((
SELECT
COUNT(#{LineItem.quoted_table_name}.id)
FROM
#{LineItem.quoted_table_name}
JOIN
#{Variant.quoted_table_name} AS popular_variants
ON
popular_variants.id = #{LineItem.quoted_table_name}.variant_id
WHERE
popular_variants.product_id = #{Product.quoted_table_name}.id
), 0) DESC
})
end
add_search_scope :not_deleted do
where("#{Product.quoted_table_name}.deleted_at IS NULL or #{Product.quoted_table_name}.deleted_at >= ?", Time.zone.now)
end
def self.not_discontinued(only_not_discontinued = true)
if only_not_discontinued != '0' && only_not_discontinued
where("#{Product.quoted_table_name}.discontinue_on IS NULL or #{Product.quoted_table_name}.discontinue_on >= ?", Time.zone.now)
else
all
end
end
search_scopes << :not_discontinued
def self.with_currency(currency)
joins(variants_including_master: :prices).
where(Price.table_name => { currency: currency.upcase }).
where.not(Price.table_name => { amount: nil }).
distinct
end
search_scopes << :with_currency
# Can't use add_search_scope for this as it needs a default argument
def self.available(available_on = nil, currency = nil)
available_on ||= Time.current
scope = not_discontinued.where("#{Product.quoted_table_name}.available_on <= ?", available_on)
unless Spree::Config.show_products_without_price
currency ||= Spree::Config[:currency]
scope = scope.with_currency(currency)
end
scope
end
search_scopes << :available
def self.active(currency = nil)
available(nil, currency)
end
search_scopes << :active
def self.for_filters(currency, taxon = nil)
scope = active(currency)
scope = scope.in_taxon(taxon) if taxon.present?
scope
end
search_scopes << :for_filters
def self.for_user(user = nil)
if user.try(:has_spree_role?, 'admin')
with_deleted
else
not_deleted.not_discontinued.where("#{Product.quoted_table_name}.available_on <= ?", Time.current)
end
end
add_search_scope :taxons_name_eq do |name|
group('spree_products.id').joins(:taxons).where(Taxon.arel_table[:name].eq(name))
end
def self.price_table_name
Price.quoted_table_name
end
private_class_method :price_table_name
# specifically avoid having an order for taxon search (conflicts with main order)
def self.prepare_taxon_conditions(taxons)
ids = taxons.map(&:cached_self_and_descendants_ids).flatten.uniq
joins(:classifications).where(Classification.table_name => { taxon_id: ids })
end
private_class_method :prepare_taxon_conditions
# Produce an array of keywords for use in scopes.
# Always return array with at least an empty string to avoid SQL errors
def self.prepare_words(words)
return [''] if words.blank?
a = words.split(/[,\s]/).map(&:strip)
a.any? ? a : ['']
end
private_class_method :prepare_words
def self.get_taxons(*ids_or_records_or_names)
taxons = Taxon.table_name
ids_or_records_or_names.flatten.map do |t|
case t
when Integer then Taxon.find_by(id: t)
when ApplicationRecord then t
when String
Taxon.find_by(name: t) ||
Taxon.where("#{taxons}.permalink LIKE ? OR #{taxons}.permalink = ?", "%/#{t}/", "#{t}/").first
end
end.compact.flatten.uniq
end
private_class_method :get_taxons
end
end
end
| 36.947712 | 176 | 0.629489 |
e8b68cb5a8f8e064ab739e1ae469a97e6742f550 | 249 | # encoding: utf-8
module Mongoid #:nodoc:
module Fields #:nodoc:
module Serializable #:nodoc:
# Defines the behaviour for date fields.
class Time
include Serializable
include Timekeeping
end
end
end
end
| 19.153846 | 46 | 0.64257 |
5dd63acd75ad6d814098e7d16469386a4914cbf4 | 5,316 | require 'spec_helper'
describe 'collectd::plugin::dns', type: :class do
on_supported_os(test_on).each do |os, facts|
context "on #{os} " do
let :facts do
facts
end
context 'with default values for all parameters' do
it { is_expected.to contain_class('collectd::plugin::dns') }
it do
is_expected.to contain_file('dns.load').with(
'ensure' => 'present'
)
end
default_fixture = File.read(fixtures('plugins/dns.conf.default'))
it { is_expected.to contain_file('dns.load').with_content(default_fixture) }
it { is_expected.to contain_package('collectd-dns') }
end
describe 'with ensure parameter' do
%w[present absent].each do |value|
context "set to a valid value of #{value}" do
let :params do
{ ensure: value }
end
it { is_expected.to contain_file('dns.load').with('ensure' => value) }
end
end
context 'set to an invalid value' do
let :params do
{ ensure: 'invalid' }
end
it 'fails' do
expect do
is_expected.to contain_class('collectd::plugin::dns')
end.to raise_error(Puppet::Error, %r{collectd::plugin::dns::ensure is <invalid> and must be either 'present' or 'absent'\.})
end
end
end
describe 'with ignoresource parameter' do
context 'set to a valid IP address' do
let :params do
{ ignoresource: '10.10.10.10' }
end
ignoresource_fixture = File.read(fixtures('plugins/dns.conf.ignoresource'))
it { is_expected.to contain_file('dns.load').with_content(ignoresource_fixture) }
end
context 'set to undef' do
it { is_expected.to contain_file('dns.load').without_content(%r{IgnoreSource\s+10\.10\.10\.10}) }
end
context 'set to an invalid value' do
let :params do
{ ignoresource: 'not_an_ip' }
end
it 'fails' do
expect do
is_expected.to contain_class('collectd::plugin::dns')
end.to raise_error(Puppet::Error, %r{collectd::plugin::dns::ignoresource is <not_an_ip> and must be a valid IP address\.})
end
end
end
describe 'with interface parameter' do
context 'set to a valid value' do
let :params do
{ interface: 'eth0' }
end
interface_fixture = File.read(fixtures('plugins/dns.conf.interface'))
it { is_expected.to contain_file('dns.load').with_content(interface_fixture) }
end
context 'set to an invalid value (non-string)' do
let :params do
{ interface: %w[not a string] }
end
it 'fails' do
expect do
is_expected.to contain_class('collectd::plugin::dns')
end.to raise_error(Puppet::Error, %r{is not a string})
end
end
end
describe 'with interval parameter' do
['10.0', '3600'].each do |value|
context "set to a valid numeric of #{value}" do
let :params do
{ interval: value }
end
it { is_expected.to contain_file('dns.load').with_content(%r{\s*Interval\s+#{Regexp.escape(value)}}) }
end
end
context 'set to an invalid value' do
let :params do
{ interval: 'invalid' }
end
it 'fails' do
expect do
is_expected.to contain_class('collectd::plugin::dns')
end.to raise_error(Puppet::Error, %r{Expected first argument to be a Numeric or Array, got String})
end
end
end
describe 'with selectnumericquerytypes parameter' do
['true', true, 'false', false].each do |value|
context "set to valid value of #{value}" do
let :params do
{ selectnumericquerytypes: value }
end
it { is_expected.to contain_file('dns.load').with_content(%r{\s*SelectNumericQueryTypes\s+#{Regexp.escape(value.to_s)}}) }
end
end
context 'set to an invalid value (non-boolean and non-stringified boolean)' do
let :params do
{ selectnumericquerytypes: 'invalid' }
end
it 'fails' do
expect do
is_expected.to contain_class('collectd::plugin::dns')
end.to raise_error(Puppet::Error, %r{Unknown type of boolean})
end
end
end
describe 'with manage_package parameter' do
['true', true].each do |value|
context "set to #{value}" do
%w[present absent].each do |ensure_value|
context "and ensure set to #{ensure_value}" do
let :params do
{
ensure: ensure_value,
manage_package: value
}
end
it do
is_expected.to contain_package('collectd-dns').with(
'ensure' => ensure_value
)
end
end
end
end
end
end
end
end
end
| 30.906977 | 136 | 0.541761 |
ff2e6429c97a1ffa0bf76da56b4ea5aecd4a9bd3 | 466 | module Rails3BeforeRender
module BeforeRenderInstance
extend ActiveSupport::Concern
included do
alias_method_chain :render, :before_render_filter
define_callbacks :render
end
def render_with_before_render_filter *opts, &blk
run_callbacks :render, action_name do
render_without_before_render_filter(*opts, &blk)
end
end
end
end
ActionController::Base.send :include, Rails3BeforeRender::BeforeRenderInstance | 24.526316 | 79 | 0.755365 |
614a112ed1b5fc2bebdbfddbb9b2daec367e7b4b | 198 | require "sagrone_scraper/version"
require "sagrone_scraper/agent"
require "sagrone_scraper/base"
require "sagrone_scraper/collection"
module SagroneScraper
def self.version
VERSION
end
end
| 18 | 36 | 0.813131 |
614236a6a3bb2b6a3d358c7ff213b96593ebcec5 | 232 | # frozen_string_literal: true
require "test_helper"
module Wf
class GuardsControllerTest < ActionDispatch::IntegrationTest
include Engine.routes.url_helpers
# test "the truth" do
# assert true
# end
end
end
| 16.571429 | 62 | 0.724138 |
d5ca49649434fe8c2482f0b16153fa336be52022 | 14,515 | module Bosh::Director
module DeploymentPlan
# Represents a single job instance.
class Instance
include DnsHelper
# @return [DeploymentPlan::Job] Associated job
attr_reader :job
# @return [Integer] Instance index
attr_reader :index
# @return [Models::Instance] Instance model
attr_reader :model
# @return [String] Checksum all of the configuration templates
attr_accessor :configuration_hash
# @return [Hash] A hash of template SHA1 hashes
attr_accessor :template_hashes
# @return [Bosh::Director::Core::Templates::RenderedTemplatesArchive]
attr_accessor :rendered_templates_archive
# @return [Hash<String, NetworkReservation>] network reservations
attr_accessor :network_reservations
# @return [String] job state
attr_accessor :state
# @return [Hash] current state as provided by the BOSH Agent
attr_accessor :current_state
# @return [DeploymentPlan::IdleVm] Associated resource pool VM
attr_reader :idle_vm
# @return [Boolean] true if this instance needs to be recreated
attr_accessor :recreate
# @return [Boolean] true if this instance needs to be restarted
attr_accessor :restart
##
# Creates a new instance specification based on the job and index.
#
# @param [DeploymentPlan::Job] job associated job
# @param [Integer] index index for this instance
def initialize(job, index)
@job = job
@index = index
@model = nil
@configuration_hash = nil
@template_hashes = nil
@idle_vm = nil
@current_state = nil
@network_reservations = {}
@state = job.instance_state(@index)
# Expanding virtual states
case @state
when 'recreate'
@recreate = true
@state = 'started'
when 'restart'
@restart = true
@state = 'started'
end
end
def to_s
"#{@job.name}/#{@index}"
end
# @param [Models::Instance] model Instance DB model
# @return [void]
def use_model(model)
if @model
raise DirectorError, 'Instance model is already bound'
end
@model = model
end
# Looks up a DB model for this instance, creates one if doesn't exist
# yet.
# @return [void]
def bind_model
@model ||= find_or_create_model
end
# Looks up instance model in DB and binds it to this instance spec.
# Instance model is created if it's not found in DB. New idle VM is
# allocated if instance DB record doesn't reference one.
# @return [void]
def bind_unallocated_vm
bind_model
if @model.vm.nil?
allocate_idle_vm
end
end
# Syncs instance state with instance model in DB. This is needed because
# not all instance states are available in the deployment manifest and we
# we cannot really persist this data in the agent state (as VM might be
# stopped or detached).
# @return [void]
def sync_state_with_db
if @model.nil?
raise DirectorError, "Instance `#{self}' model is not bound"
end
if @state
# Deployment plan explicitly sets state for this instance
@model.update(:state => @state)
elsif @model.state
# Instance has its state persisted from the previous deployment
@state = @model.state
else
# Target instance state should either be persisted in DB or provided
# via deployment plan, otherwise something is really wrong
raise InstanceTargetStateUndefined,
"Instance `#{self}' target state cannot be determined"
end
end
##
# Adds a new network to this instance
# @param [String] name network name
# @param [NetworkReservation] reservation
def add_network_reservation(name, reservation)
old_reservation = @network_reservations[name]
if old_reservation
raise NetworkReservationAlreadyExists,
"`#{self}' already has reservation " +
"for network `#{name}', IP #{old_reservation.ip}"
end
@network_reservations[name] = reservation
end
##
# Take any existing valid network reservations
#
# @param [Hash<String, NetworkReservation>] reservations
# @return [void]
def take_network_reservations(reservations)
reservations.each do |name, provided_reservation|
reservation = @network_reservations[name]
reservation.take(provided_reservation) if reservation
end
end
##
# @return [Hash] BOSH network settings used for Agent apply call
def network_settings
default_properties = {}
@job.default_network.each do |key, value|
(default_properties[value] ||= []) << key
end
network_settings = {}
@network_reservations.each do |name, reservation|
network = @job.deployment.network(name)
network_settings[name] = network.network_settings(reservation, default_properties[name])
# Temporary hack for running errands.
# We need to avoid RunErrand task thinking that
# network configuration for errand VM differs
# from network configuration for its Instance.
#
# Obviously this does not account for other changes
# in network configuration that errand job might need.
# (e.g. errand job desires static ip)
if @job.starts_on_deploy?
network_settings[name]['dns_record_name'] = dns_record_name(name)
end
# Somewhat of a hack: for dynamic networks we might know IP address, Netmask & Gateway
# if they're featured in agent state, in that case we put them into network spec to satisfy
# ConfigurationHasher in both agent and director.
if @current_state.is_a?(Hash) &&
@current_state['networks'].is_a?(Hash) &&
@current_state['networks'][name].is_a?(Hash) &&
network_settings[name]['type'] == 'dynamic'
%w(ip netmask gateway).each do |key|
network_settings[name][key] = @current_state['networks'][name][key]
end
end
end
network_settings
end
##
# @return [Integer] persistent disk size
def disk_size
if @model.nil?
current_state['persistent_disk'].to_i
elsif @model.persistent_disk
@model.persistent_disk.size
else
0
end
end
##
# @return [Hash<String, String>] dns record hash of dns name and IP
def dns_record_info
dns_record_info = {}
network_settings.each do |network_name, network|
name = dns_record_name(network_name)
dns_record_info[name] = network['ip']
end
dns_record_info
end
##
# @return [String] dns record name
def dns_record_name(network_name)
[index, job.canonical_name, canonical(network_name), job.deployment.canonical_name, dns_domain_name].join('.')
end
##
# @return [Boolean] returns true if the persistent disk is attached to the
# VM
def disk_currently_attached?
current_state['persistent_disk'].to_i > 0
end
##
# @return [Boolean] returns true if the network configuration changed
def networks_changed?
network_settings != @current_state['networks']
end
##
# @return [Boolean] returns true if the expected resource pool differs
# from the one provided by the VM
def resource_pool_changed?
if @recreate || @job.deployment.recreate
return true
end
if @job.resource_pool.spec != @current_state['resource_pool']
return true
end
# env is not a part of a resource pool spec but rather gets persisted
# in director DB, hence the check below
# NOTE: we only update VMs that have env persisted to avoid recreating
# everything, so if the director gets updated from the version that
# doesn't persist VM env to the version that does, there needs to
# be at least one deployment that recreates all VMs before the following
# code path gets exercised.
if @model && @model.vm && @model.vm.env &&
@job.resource_pool.env != @model.vm.env
return true
end
false
end
##
# @return [Boolean] returns true if the expected configuration hash
# differs from the one provided by the VM
def configuration_changed?
configuration_hash != @current_state['configuration_hash']
end
##
# @return [Boolean] returns true if the expected job configuration differs
# from the one provided by the VM
def job_changed?
job_spec = @job.spec
if job_spec != @current_state['job']
# The agent job spec could be in legacy form. job_spec cannot be,
# though, because we got it from the spec function in job.rb which
# automatically makes it non-legacy.
return job_spec != Job.convert_from_legacy_spec(@current_state['job'])
end
return false
end
##
# @return [Boolean] returns true if the expected packaged of the running
# instance differ from the ones provided by the VM
def packages_changed?
@job.package_spec != @current_state['packages']
end
##
# @return [Boolean] returns true if the expected persistent disk differs
# from the one currently configured on the VM
def persistent_disk_changed?
@job.persistent_disk != disk_size
end
##
# @return [Boolean] returns true if the DNS records configured for the
# instance differ from the ones configured on the DNS server
def dns_changed?
if Config.dns_enabled?
dns_record_info.any? do |name, ip|
Models::Dns::Record.find(:name => name, :type => 'A',
:content => ip).nil?
end
else
false
end
end
##
# Checks if agent view of the instance state is consistent with target
# instance state.
#
# In case the instance current state is 'detached' we should never get to
# this method call.
# @return [Boolean] returns true if the expected job state differs from
# the one provided by the VM
def state_changed?
@state == 'detached' ||
@state == 'started' && @current_state['job_state'] != 'running' ||
@state == 'stopped' && @current_state['job_state'] == 'running'
end
##
# @return [Boolean] returns true if the any of the expected specifications
# differ from the ones provided by the VM
def changed?
!changes.empty?
end
##
# @return [Set<Symbol>] returns a set of all of the specification
# differences
def changes
changes = Set.new
unless @state == 'detached' && @current_state.nil?
changes << :restart if @restart
changes << :resource_pool if resource_pool_changed?
changes << :network if networks_changed?
changes << :packages if packages_changed?
changes << :persistent_disk if persistent_disk_changed?
changes << :configuration if configuration_changed?
changes << :job if job_changed?
changes << :state if state_changed?
changes << :dns if dns_changed?
end
changes
end
##
# Instance spec that's passed to the VM during the BOSH Agent apply call.
# It's what's used for comparing the expected vs the actual state.
#
# @return [Hash<String, Object>] instance spec
def spec
spec = {
'deployment' => @job.deployment.name,
'job' => job.spec,
'index' => index,
'networks' => network_settings,
'resource_pool' => job.resource_pool.spec,
'packages' => job.package_spec,
'persistent_disk' => job.persistent_disk,
'configuration_hash' => configuration_hash,
'properties' => job.properties,
'dns_domain_name' => dns_domain_name
}
if template_hashes
spec['template_hashes'] = template_hashes
end
# Ruby BOSH Agent does not look at 'rendered_templates_archive'
# since it renders job templates and then compares template hashes.
# Go BOSH Agent has no ability to render ERB so pre-rendered templates are provided.
if rendered_templates_archive
spec['rendered_templates_archive'] = rendered_templates_archive.spec
end
spec
end
# Looks up instance model in DB
# @return [Models::Instance]
def find_or_create_model
if @job.deployment.model.nil?
raise DirectorError, 'Deployment model is not bound'
end
conditions = {
deployment_id: @job.deployment.model.id,
job: @job.name,
index: @index
}
Models::Instance.find_or_create(conditions) do |model|
model.state = 'started'
end
end
# Allocates an idle VM in this job resource pool and binds current
# instance to that idle VM.
# @return [void]
def allocate_idle_vm
resource_pool = @job.resource_pool
idle_vm = resource_pool.allocate_vm
network = resource_pool.network
if idle_vm.vm
# There's already a resource pool VM that can become our instance,
# so we can try to reuse its reservation
instance_reservation = @network_reservations[network.name]
if instance_reservation
instance_reservation.take(idle_vm.network_reservation)
end
else
# VM is not created yet: let's just make it reference this instance
# so later it knows what it needs to become
idle_vm.bound_instance = self
# this also means we no longer need previous VM network reservation
# (instance has its own)
idle_vm.release_reservation
end
@idle_vm = idle_vm
end
end
end
end
| 33.677494 | 118 | 0.613641 |
3953dff8a44e2fb65d05051aade8fbe8720e7f0e | 946 | $:.unshift File.expand_path("../lib", __FILE__)
require 'sinatra/asset_pipeline/version'
Gem::Specification.new do |gem|
gem.name = "sinatra-asset-pipeline"
gem.version = Sinatra::AssetPipeline::VERSION
gem.authors = ["Joakim Ekberg"]
gem.email = ["[email protected]"]
gem.description = "An asset pipeline implementation for Sinatra based on Sprockets with support for SASS, CoffeeScript and ERB."
gem.summary = "An asset pipeline implementation for Sinatra."
gem.homepage = "https://github.com/kalasjocke/sinatra-asset-pipeline"
gem.license = "MIT"
gem.files = Dir["README.md", "lib/**/*.rb"]
gem.add_dependency 'rake'
gem.add_dependency 'sinatra'
gem.add_dependency 'sass'
gem.add_dependency 'coffee-script'
gem.add_dependency 'sprockets'
gem.add_dependency 'sprockets-sass'
gem.add_dependency 'sprockets-helpers'
gem.add_development_dependency 'rspec'
gem.add_development_dependency 'rack-test'
end
| 36.384615 | 130 | 0.749471 |
91c22e7ad82966856ef4e06072afa003010092c1 | 1,627 | require 'rails_helper'
describe 'Admin disables Git access protocol' do
include StubENV
let(:project) { create(:project, :empty_repo) }
let(:admin) { create(:admin) }
before do
stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false')
sign_in(admin)
end
context 'with HTTP disabled' do
before do
disable_http_protocol
end
it 'shows only SSH url' do
visit_project
expect(page).to have_content("git clone #{project.ssh_url_to_repo}")
expect(page).not_to have_selector('#clone-dropdown')
end
end
context 'with SSH disabled' do
before do
disable_ssh_protocol
end
it 'shows only HTTP url' do
visit_project
expect(page).to have_content("git clone #{project.http_url_to_repo}")
expect(page).not_to have_selector('#clone-dropdown')
end
end
context 'with nothing disabled' do
before do
create(:personal_key, user: admin)
end
it 'shows default SSH url and protocol selection dropdown' do
visit_project
expect(page).to have_content("git clone #{project.ssh_url_to_repo}")
expect(page).to have_selector('#clone-dropdown')
end
end
def visit_project
visit project_path(project)
end
def disable_http_protocol
switch_git_protocol(2)
end
def disable_ssh_protocol
switch_git_protocol(3)
end
def switch_git_protocol(value)
visit admin_application_settings_path
page.within('.as-visibility-access') do
find('#application_setting_enabled_git_access_protocol').find(:xpath, "option[#{value}]").select_option
click_on 'Save'
end
end
end
| 21.986486 | 109 | 0.696988 |
ab94a2c7ce5f0de941432fddef71e28021769d40 | 1,081 | require 'spec_helper'
describe 'impala::statestore' do
context 'supported operating systems' do
on_supported_os.each do |os, facts|
path = '/etc/impala/conf'
context "on #{os}" do
let(:facts) do
facts
end
context "without any parameters" do
let(:params) {{ }}
it { is_expected.to compile.with_all_deps }
it { is_expected.to contain_class('impala::statestore') }
it { is_expected.to contain_class('impala::statestore::install').that_comes_before('impala::statestore::config') }
it { is_expected.to contain_class('impala::statestore::config') }
it { is_expected.to contain_class('impala::statestore::service').that_subscribes_to('impala::statestore::config') }
it { should contain_file(path + '/core-site.xml') }
it { should contain_file(path + '/hdfs-site.xml') }
it { is_expected.to contain_service('impala-state-store') }
it { is_expected.to contain_package('impala-state-store') }
end
end
end
end
end
| 33.78125 | 125 | 0.628122 |
6ac3127b1f69084153cf0bad593badd924d8d1b5 | 167 | class CreateWardVillages < ActiveRecord::Migration[5.1]
def change
create_table :ward_villages do |t|
t.string :name
t.timestamps
end
end
end
| 16.7 | 55 | 0.682635 |
21f3dc9674716d9893754671bc4f6f4271d8017b | 1,366 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'codebreaker/version'
Gem::Specification.new do |spec|
spec.name = "codebreaker"
spec.version = Codebreaker::VERSION
spec.authors = ["denis"]
spec.email = ["[email protected]"]
spec.summary = %q{TODO: Write a short summary, because Rubygems requires one.}
spec.description = %q{TODO: Write a longer description or delete this line.}
spec.homepage = "TODO: Put your gem's website or public repo URL here."
spec.license = "MIT"
# Prevent pushing this gem to RubyGems.org by setting 'allowed_push_host', or
# delete this section to allow pushing this gem to any host.
if spec.respond_to?(:metadata)
spec.metadata['allowed_push_host'] = "TODO: Set to 'http://mygemserver.com'"
else
raise "RubyGems 2.0 or newer is required to protect against public gem pushes."
end
spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.9"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_development_dependency "rspec"
end
| 40.176471 | 104 | 0.671303 |
bbad439c3a9e9e558545e916f4b7c46decef48d2 | 171 | # frozen_string_literal: true
module DropboxApi::Errors
class UploadWriteFailedError < BasicError
ErrorSubtypes = {
reason: WriteError
}.freeze
end
end
| 17.1 | 43 | 0.730994 |
e2bf1ef608fa55285ffd756a281822b85007353a | 2,977 | require 'rails_helper'
RSpec.describe Admin::QuantCommodityPropertiesController, type: :controller do
render_views
let(:user) { FactoryBot.create(:user) }
before { sign_in user }
describe 'POST create' do
let(:quant) { FactoryBot.create(:api_v3_quant) }
let(:quant_2) { FactoryBot.create(:api_v3_quant) }
let(:commodity) { FactoryBot.create(:api_v3_commodity) }
let(:commodity_2) { FactoryBot.create(:api_v3_commodity) }
let(:tooltip_text) { 'Tooltip text' }
let!(:quant_commodity_property) {
FactoryBot.create(
:api_v3_quant_commodity_property,
quant_id: quant_2.id,
commodity_id: commodity_2.id,
tooltip_text: tooltip_text
)
}
let(:duplicate) {
FactoryBot.attributes_for(
:api_v3_quant_commodity_property,
quant_id: quant_2.id,
commodity_id: commodity_2.id,
tooltip_text: tooltip_text
)
}
let(:valid_attributes) {
FactoryBot.attributes_for(
:api_v3_quant_commodity_property,
quant_id: quant.id,
commodity_id: commodity.id,
tooltip_text: tooltip_text
)
}
let(:no_quant_provided) {
FactoryBot.attributes_for(
:api_v3_quant_commodity_property,
quant_id: nil,
commodity_id: commodity.id,
tooltip_text: tooltip_text
)
}
let(:no_commodity_provided) {
FactoryBot.attributes_for(
:api_v3_quant_commodity_property,
quant_id: quant.id,
commodity_id: nil,
tooltip_text: tooltip_text
)
}
let(:no_tooltip_provided) {
FactoryBot.attributes_for(
:api_v3_quant_commodity_property,
quant_id: quant.id,
commodity_id: commodity.id,
tooltip_text: nil
)
}
it 'clears cache' do
expect(controller).to receive(:clear_cache_for_regexp)
post :create, params: {api_v3_quant_commodity_property: valid_attributes}
end
it 'fails if quant is not provided' do
post :create, params: {api_v3_quant_commodity_property: no_quant_provided}
expect(response).to render_template(:new)
end
it 'fails if commodity is not provided' do
post :create, params: {api_v3_quant_commodity_property: no_commodity_provided}
expect(response).to render_template(:new)
end
it 'fails if tooltip is not provided' do
post :create, params: {api_v3_quant_commodity_property: no_tooltip_provided}
expect(response).to render_template(:new)
end
it 'fails if property with commodity and quant are already coupled' do
post :create, params: {api_v3_quant_commodity_property: duplicate}
expect(response).to render_template(:new)
end
it 'renders index' do
get :index
expect(response).to render_template(:index)
end
it 'renders show' do
get :show, params: {id: quant_commodity_property.id}
expect(response).to render_template(:show)
end
end
end
| 27.564815 | 84 | 0.672153 |
acc2710928f8b4d7c7e159e50a1b54421e832e54 | 349 | FactoryBot.define do
factory :resource do
access_date { Date.today }
association(:resourcetype)
description { Faker::Movies::StarWars.quote }
draft { false }
private { true }
publication_date { Date.today }
status { Faker::Creature::Cat.registry }
title { "MyString" }
url { "https://impactoss.org" }
end
end
| 24.928571 | 49 | 0.653295 |
b91eb86cabeb7255ec27b22937b57fe3f45212d0 | 459 | # ActiveSupport dependencies.
%w{
concern
inflector
core_ext/hash/reverse_merge
core_ext/object/blank
}.each { |name| require "active_support/#{name}" }
# Flip files.
%w{
abstract_strategy
controller_filters
cookie_strategy
database_strategy
declarable
declaration_strategy
definition
facade
feature_set
forbidden
}.each { |name| require "flip/#{name}" }
require "flip/engine" if defined?(Rails)
module Flip
extend Facade
end
| 16.392857 | 50 | 0.742919 |
ab4cda5acf695b0962fab8c629f48330bf83e0d8 | 156 | class CreateWidgets < ActiveRecord::Migration[5.2]
def change
create_table :widgets do |t|
t.string :name
t.timestamps
end
end
end
| 15.6 | 50 | 0.660256 |
e84a3717c5b981ffeb031bd6dff1c250e5da1ed3 | 301 | # frozen_string_literal: true
class PlaylistBookmark < ActiveRecord::Migration
def change
create_table :playlist_bookmarks do |t|
t.integer :user_id
t.integer :playlist_id
end
add_index :playlist_bookmarks, :user_id
add_index :playlist_bookmarks, :playlist_id
end
end
| 23.153846 | 48 | 0.744186 |
ffa3ddd191ad07e931e9d0e7279868ff8af88d81 | 501 | SPEC = Gem::Specification.new do |s|
s.name = "mmmail"
s.version = "1.0.0"
s.date = "2009-02-22"
s.author = "Loren Segal"
s.email = "[email protected]"
s.homepage = "http://github.com/lsegal/mmmail"
s.platform = Gem::Platform::RUBY
s.summary = "Mmmm, a Minimalist mail library for Ruby. Works with SMTP or sendmail."
s.files = Dir.glob("{lib,spec}/**/*") + ['LICENSE', 'README.markdown', 'Rakefile']
s.require_paths = ['lib']
s.rubyforge_project = 'mmmail'
s.has_rdoc = 'yard'
end | 35.785714 | 86 | 0.646707 |
acf800deec5a179a3f148f8da1f2c54a1ebecf0c | 908 | # -*- encoding: utf-8 -*-
$LOAD_PATH.push File.expand_path("lib", File.dirname(__FILE__))
require 'unified_assets'
Gem::Specification.new do |s|
s.name = "unified-assets"
s.version = UnifiedAssets::VERSION
s.platform = Gem::Platform::RUBY
s.authors = ["Jake Gordon"]
s.email = ["[email protected]"]
s.homepage = "https://github.com/jakesgordon/unified-assets"
s.summary = UnifiedAssets::SUMMARY
s.description = UnifiedAssets::DESCRIPTION
s.add_dependency('rack') # for rake assets:server
s.has_rdoc = false
s.extra_rdoc_files = ["README.md"]
s.rdoc_options = ["--charset=UTF-8"]
s.files = `git ls-files `.split("\n")
s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
s.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
s.require_paths = ["lib"]
end
| 31.310345 | 86 | 0.622247 |
1a5ac6c613fb562f4dabc9cbd948f3f0ac65da2b | 615 | Pod::Spec.new do |s|
s.name = "ButtonStyleKit"
s.version = "1.6.0"
s.summary = "ButtonStyleKit is helper library for UIButton custom styles."
s.homepage = "https://github.com/keygx/ButtonStyleKit"
s.license = { :type => "MIT", :file => "LICENSE" }
s.author = { "keygx" => "[email protected]" }
s.social_media_url = "http://twitter.com/keygx"
s.platform = :ios
s.ios.deployment_target = '9.0'
s.source = { :git => "https://github.com/keygx/GradientCircularProgress.git", :tag => "#{s.version}" }
s.source_files = "ButtonStyleKitSample/ButtonStyleKit/*.{h,swift}"
s.requires_arc = true
end
| 41 | 104 | 0.668293 |
1c7bea0b9b95b682b338ec929ae0f8ba21c6ba30 | 5,522 | require 'json'
require 'uri'
module Agents
class PhantomJsCloudAgent < Agent
include ERB::Util
include FormConfigurable
include WebRequestConcern
can_dry_run!
default_schedule 'every_12h'
description <<-MD
This Agent generates [PhantomJs Cloud](https://phantomjscloud.com/) URLs that can be used to render JavaScript-heavy webpages for content extraction.
URLs generated by this Agent are formulated in accordance with the [PhantomJs Cloud API](https://phantomjscloud.com/docs/index.html).
The generated URLs can then be supplied to a Website Agent to fetch and parse the content.
[Sign up](https://dashboard.phantomjscloud.com/dash.html#/signup) to get an api key, and add it in Huginn credentials.
Please see the [Huginn Wiki for more info](https://github.com/huginn/huginn/wiki/Browser-Emulation-Using-PhantomJS-Cloud).
Options:
* `Api key` - PhantomJs Cloud API Key credential stored in Huginn
* `Url` - The url to render
* `Mode` - Create a new `clean` event or `merge` old payload with new values (default: `clean`)
* `Render type` - Render as html, plain text without html tags, or jpg as screenshot of the page (default: `html`)
* `Output as json` - Return the page conents and metadata as a JSON object (default: `false`)
* `Ignore images` - Skip loading of inlined images (default: `false`)
* `Url agent` - A custom User-Agent name (default: `#{default_user_agent}`)
* `Wait interval` - Milliseconds to delay rendering after the last resource is finished loading.
This is useful in case there are any AJAX requests or animations that need to finish up.
This can safely be set to 0 if you know there are no AJAX or animations you need to wait for (default: `1000`ms)
As this agent only provides a limited subset of the most commonly used options, you can follow [this guide](https://github.com/huginn/huginn/wiki/Browser-Emulation-Using-PhantomJS-Cloud) to make full use of additional options PhantomJsCloud provides.
MD
event_description <<-MD
Events look like this:
{
"url": "..."
}
MD
def default_options
{
'mode' => 'clean',
'url' => 'http://xkcd.com',
'render_type' => 'html',
'output_as_json' => false,
'ignore_images' => false,
'user_agent' => self.class.default_user_agent,
'wait_interval' => '1000'
}
end
form_configurable :mode, type: :array, values: ['clean', 'merge']
form_configurable :api_key, roles: :completable
form_configurable :url
form_configurable :render_type, type: :array, values: ['html', 'plainText', 'jpg']
form_configurable :output_as_json, type: :boolean
form_configurable :ignore_images, type: :boolean
form_configurable :user_agent, type: :text
form_configurable :wait_interval
def mode
interpolated['mode'].presence || default_options['mode']
end
def render_type
interpolated['render_type'].presence || default_options['render_type']
end
def output_as_json
boolify(interpolated['output_as_json'].presence ||
default_options['output_as_json'])
end
def ignore_images
boolify(interpolated['ignore_images'].presence ||
default_options['ignore_images'])
end
def user_agent
interpolated['user_agent'].presence || self.class.default_user_agent
end
def wait_interval
interpolated['wait_interval'].presence || default_options['wait_interval']
end
def page_request_settings
prs = {}
prs[:ignoreImages] = ignore_images if ignore_images
prs[:userAgent] = user_agent if user_agent.present?
if wait_interval != default_options['wait_interval']
prs[:wait_interval] = wait_interval
end
prs
end
def build_phantom_url(interpolated)
api_key = interpolated[:api_key]
page_request_hash = {
url: interpolated[:url],
renderType: render_type
}
page_request_hash[:outputAsJson] = output_as_json if output_as_json
page_request_settings_hash = page_request_settings
if page_request_settings_hash.any?
page_request_hash[:requestSettings] = page_request_settings_hash
end
request = page_request_hash.to_json
log "Generated request: #{request}"
encoded = url_encode(request)
"https://phantomjscloud.com/api/browser/v2/#{api_key}/?request=#{encoded}"
end
def check
phantom_url = build_phantom_url(interpolated)
create_event payload: { 'url' => phantom_url }
end
def receive(incoming_events)
incoming_events.each do |event|
interpolate_with(event) do
existing_payload = interpolated['mode'].to_s == 'merge' ? event.payload : {}
phantom_url = build_phantom_url(interpolated)
result = { 'url' => phantom_url }
create_event payload: existing_payload.merge(result)
end
end
end
def complete_api_key
user.user_credentials.map { |c| { text: c.credential_name, id: "{% credential #{c.credential_name} %}" } }
end
def working?
!recent_error_logs? || received_event_without_error?
end
def validate_options
# Check for required fields
errors.add(:base, 'Url is required') unless options['url'].present?
errors.add(:base, 'API key (credential) is required') unless options['api_key'].present?
end
end
end
| 34.08642 | 256 | 0.678377 |
7ab06d216a02d08c1ad96201abba2e645cd21471 | 2,980 | # Licensed to Elasticsearch B.V. under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
# ownership. Elasticsearch B.V. licenses this file to you under
# the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# frozen_string_literal: true
module ElasticAPM
module Metrics
# @api private
class Metric
def initialize(
key,
initial_value: nil,
tags: nil,
reset_on_collect: false
)
@key = key
@initial_value = initial_value
@value = initial_value
@tags = tags
@reset_on_collect = reset_on_collect
@mutex = Mutex.new
end
attr_reader :key, :initial_value, :tags, :value
def value=(value)
@mutex.synchronize { @value = value }
end
def reset!
self.value = initial_value
end
def tags?
!!tags&.any?
end
def reset_on_collect?
@reset_on_collect
end
def collect
@mutex.synchronize do
collected = @value
@value = initial_value if reset_on_collect?
return nil if reset_on_collect? && collected == 0
collected
end
end
end
# @api private
class NoopMetric
def value; end
def value=(_); end
def collect; end
def reset!; end
def tags?; end
def reset_on_collect?; end
def inc!; end
def dec!; end
def update(_, delta: nil); end
end
# @api private
class Counter < Metric
def initialize(key, initial_value: 0, **args)
super(key, initial_value: initial_value, **args)
end
def inc!
@mutex.synchronize do
@value += 1
end
end
def dec!
@mutex.synchronize do
@value -= 1
end
end
end
# @api private
class Gauge < Metric
def initialize(key, **args)
super(key, initial_value: 0, **args)
end
end
# @api private
class Timer < Metric
def initialize(key, **args)
super(key, initial_value: 0, **args)
@count = 0
end
attr_accessor :count
def update(duration, delta: 0)
@mutex.synchronize do
@value += duration
@count += delta
end
end
def reset!
@mutex.synchronize do
@value = 0
@count = 0
end
end
end
end
end
| 21.134752 | 63 | 0.592282 |
d5ae69e4e8ea75fd1926cf702d2fde6bd8fad248 | 5,590 | Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Ensures that a master key has been made available in either ENV["RAILS_MASTER_KEY"]
# or in config/master.key. This key is used to decrypt credentials (and other encrypted files).
# config.require_master_key = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress CSS using a preprocessor.
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Store uploaded files on the local file system (see config/storage.yml for options).
config.active_storage.service = :local
# Mount Action Cable outside main process or domain.
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
config.log_tags = [ :request_id ]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment).
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "sample_app_production"
config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
config.action_mailer.raise_delivery_errors = true
config.action_mailer.delivery_method = :smtp
host = 'https://serene-meadow-33935.herokuapp.com'
config.action_mailer.default_url_options = { host: host }
ActionMailer::Base.smtp_settings = {
:address => 'smtp.sendgrid.net',
:port => '587',
:authentication => :plain,
:user_name => ENV['SENDGRID_USERNAME'],
:password => ENV['SENDGRID_PASSWORD'],
:domain => 'heroku.com',
:enable_starttls_auto => true
}
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
# Store uploaded files on Amazon AWS.
config.active_storage.service = :amazon
# Inserts middleware to perform automatic connection switching.
# The `database_selector` hash is used to pass options to the DatabaseSelector
# middleware. The `delay` is used to determine how long to wait after a write
# to send a subsequent read to the primary.
#
# The `database_resolver` class is used by the middleware to determine which
# database is appropriate to use based on the time delay.
#
# The `database_resolver_context` class is used by the middleware to set
# timestamps for the last write to the primary. The resolver uses the context
# class timestamps to determine how long to wait before reading from the
# replica.
#
# By default Rails will store a last write timestamp in the session. The
# DatabaseSelector middleware is designed as such you can define your own
# strategy for connection switching and pass that into the middleware through
# these configuration options.
# config.active_record.database_selector = { delay: 2.seconds }
# config.active_record.database_resolver = ActiveRecord::Middleware::DatabaseSelector::Resolver
# config.active_record.database_resolver_context = ActiveRecord::Middleware::DatabaseSelector::Resolver::Session
end
| 43.671875 | 114 | 0.747048 |
391ff15400cd001b907abcdeffedff18dea2a4a4 | 378 | $LOAD_PATH.unshift File.expand_path('../../lib', __FILE__)
require 'active_record/default_values'
ActiveRecord::Base.establish_connection(adapter: 'sqlite3',
database: File.dirname(__FILE__) + '/active_record-default_values.sqlite3')
load File.dirname(__FILE__) + '/support/schema.rb'
load File.dirname(__FILE__) + '/support/models.rb'
| 42 | 115 | 0.695767 |
1db85baae31c437adf578969a63d5fd15c35c173 | 1,669 | #!/usr/bin/env ruby
require 'ghtorrent'
class GHTFixForks < GHTorrent::Command
include GHTorrent::Settings
include GHTorrent::Retriever
include GHTorrent::Persister
def logger
@ght.logger
end
def persister
@persister ||= connect(:mongo, settings)
@persister
end
def go
@ght ||= GHTorrent::Mirror.new(settings)
col = persister.get_underlying_connection[:repos]
fixed = tried = all = 0
col.find({"parent" => {"$exists" => 1}}, {:timeout => false}) do |cursor|
cursor.each do |x|
all += 1
repo = x['name']
owner = x['owner']['login']
parent_owner = x['parent']['owner']['login']
parent_repo = x['parent']['name']
begin
@ght.transaction do
forked = @ght.ensure_repo(owner, repo)
parent = @ght.ensure_repo(parent_owner, parent_repo)
if parent.nil?
puts("parent repo #{parent_owner}/#{parent_repo} does not exist")
next
end
if forked[:forked_from].nil? or forked[:forked_from] != parent[:id]
tried += 1
@ght.db[:projects].filter(:id => forked[:id]).update(:forked_from => parent[:id])
fixed += 1
puts "Added #{owner}/#{repo} as fork of #{parent_owner}/#{parent_repo}"
else
puts "Fork #{owner}/#{repo} of #{parent_owner}/#{parent_repo} exists"
end
end
rescue StandardError => e
puts "Exception: #{e.message}"
ensure
puts "Fixed #{fixed}/#{tried} (examined: #{all}) forks"
end
end
end
end
end
GHTFixForks.run
| 26.078125 | 95 | 0.554224 |
f7d4fe88877223fe1e1b43c0db51c585b4aa0d0e | 1,077 | require 'brakeman/checks/base_check'
#Checks if password is stored in controller
#when using http_basic_authenticate_with
#
#Only for Rails >= 3.1
class Brakeman::CheckBasicAuth < Brakeman::BaseCheck
Brakeman::Checks.add self
@description = "Checks for the use of http_basic_authenticate_with"
def run_check
return if version_between? "0.0.0", "3.0.99"
controllers = tracker.controllers.select do |name, c|
c[:options][:http_basic_authenticate_with]
end
Hash[controllers].each do |name, controller|
controller[:options][:http_basic_authenticate_with].each do |call|
if pass = get_password(call) and string? pass
warn :controller => name,
:warning_type => "Basic Auth",
:message => "Basic authentication password stored in source code",
:code => call,
:confidence => 0
break
end
end
end
end
def get_password call
arg = call.first_arg
return false if arg.nil? or not hash? arg
hash_access(arg, :password)
end
end
| 25.046512 | 80 | 0.657382 |
e926435a78421b96283b7fc62113aad7a2bc48b0 | 1,175 | require 'spec_helper'
describe <%= class_name.pluralize %>Controller do
render_views
describe "GET" do
before :each do
permit_with 200
@<%= singular_name %> = create :<%= singular_name %>
request.headers['HTTP_ACCEPT'] = "application/json"
request.headers['X-API-Token'] = "totally-fake"
end
it "should return JSON" do
get :show, id: @<%= singular_name %>
response.content_type.should == "application/json"
end
it "should return a 400 if the X-API-Token header is missing" do
request.headers['X-API-Token'] = nil
get :show, id: @<%= singular_name %>
response.status.should == 400
response.content_type.should == "application/json"
end
it "should return a 404 when the user can't be found" do
get :show, id: -1
response.status.should == 404
response.content_type.should == "application/json"
end
it "should return a 200 when successful" do
get :show, id: @<%= singular_name %>
response.status.should == 200
response.should render_template(partial: "_<%= singular_name %>", count: 1)
end
end
end
| 26.704545 | 81 | 0.621277 |
abddb632ece7e1a89dcdf656b477b3faf35e5121 | 366 | class Hash
# Returns the value of self for each argument and deletes those entries.
#
# @param *args The keys whose values should be extracted and deleted.
#
# @return [Array<Object>] The values of the provided arguments in
# corresponding order.
#
# @api public
def extract!(*args)
args.map do |arg|
self.delete(arg)
end
end
end
| 22.875 | 74 | 0.672131 |
e83ab5d9a06055b2d79b760d325e098b60aec6fc | 132 | require 'test_helper'
class GrimoireDefinitionTest < ActiveSupport::TestCase
# test "the truth" do
# assert true
# end
end
| 16.5 | 54 | 0.727273 |
bf3fd77797bdd273d07d8ce0f4e63d1632a5b3f9 | 393 | require_relative "locale"
require_relative "direction"
class ABBYY::Cloud
module Models
class Engine < Model
attribute :name, Types::Strict::String
attribute :languages, Types::Array.member(Types::Locale)
attribute :translation_directions, Types::Array.member(Types::Direction)
end
# Registers type Types::Engine
Types.register_type Engine
end
end
| 24.5625 | 78 | 0.720102 |
388ee049d09f108f6b19247fb0c3ee8405cba314 | 3,911 | # frozen_string_literal: true
RSpec.describe "Running bin/* commands" do
before :each do
install_gemfile! <<-G
source "file://#{gem_repo1}"
gem "rack"
G
end
it "runs the bundled command when in the bundle" do
bundle! "binstubs rack"
build_gem "rack", "2.0", :to_system => true do |s|
s.executables = "rackup"
end
gembin "rackup"
expect(out).to eq("1.0.0")
end
it "allows the location of the gem stubs to be specified" do
bundle! "binstubs rack", :path => "gbin"
expect(bundled_app("bin")).not_to exist
expect(bundled_app("gbin/rackup")).to exist
gembin bundled_app("gbin/rackup")
expect(out).to eq("1.0.0")
end
it "allows absolute paths as a specification of where to install bin stubs" do
bundle! "binstubs rack", :path => tmp("bin")
gembin tmp("bin/rackup")
expect(out).to eq("1.0.0")
end
it "uses the default ruby install name when shebang is not specified" do
bundle! "binstubs rack"
expect(File.open("bin/rackup").gets).to eq("#!/usr/bin/env #{RbConfig::CONFIG["ruby_install_name"]}\n")
end
it "allows the name of the shebang executable to be specified" do
bundle! "binstubs rack", :shebang => "ruby-foo"
expect(File.open("bin/rackup").gets).to eq("#!/usr/bin/env ruby-foo\n")
end
it "runs the bundled command when out of the bundle" do
bundle! "binstubs rack"
build_gem "rack", "2.0", :to_system => true do |s|
s.executables = "rackup"
end
Dir.chdir(tmp) do
gembin "rackup"
expect(out).to eq("1.0.0")
end
end
it "works with gems in path" do
build_lib "rack", :path => lib_path("rack") do |s|
s.executables = "rackup"
end
gemfile <<-G
gem "rack", :path => "#{lib_path("rack")}"
G
bundle! "binstubs rack"
build_gem "rack", "2.0", :to_system => true do |s|
s.executables = "rackup"
end
gembin "rackup"
expect(out).to eq("1.0")
end
it "creates a bundle binstub" do
build_gem "bundler", Bundler::VERSION, :to_system => true do |s|
s.executables = "bundle"
end
gemfile <<-G
source "file://#{gem_repo1}"
gem "bundler"
G
bundle! "binstubs bundler"
expect(bundled_app("bin/bundle")).to exist
end
it "does not generate bin stubs if the option was not specified" do
bundle! "install"
expect(bundled_app("bin/rackup")).not_to exist
end
it "allows you to stop installing binstubs", :bundler => "< 2" do
bundle! "install --binstubs bin/"
bundled_app("bin/rackup").rmtree
bundle! "install --binstubs \"\""
expect(bundled_app("bin/rackup")).not_to exist
bundle! "config bin"
expect(out).to include("You have not configured a value for `bin`")
end
it "remembers that the option was specified", :bundler => "< 2" do
gemfile <<-G
source "file://#{gem_repo1}"
gem "activesupport"
G
bundle! :install, forgotten_command_line_options([:binstubs, :bin] => "bin")
gemfile <<-G
source "file://#{gem_repo1}"
gem "activesupport"
gem "rack"
G
bundle "install"
expect(bundled_app("bin/rackup")).to exist
end
it "rewrites bins on --binstubs (to maintain backwards compatibility)", :bundler => "< 2" do
gemfile <<-G
source "file://#{gem_repo1}"
gem "rack"
G
bundle! :install, forgotten_command_line_options([:binstubs, :bin] => "bin")
File.open(bundled_app("bin/rackup"), "wb") do |file|
file.print "OMG"
end
bundle "install"
expect(bundled_app("bin/rackup").read).to_not eq("OMG")
end
it "rewrites bins on binstubs (to maintain backwards compatibility)" do
install_gemfile! <<-G
source "file://#{gem_repo1}"
gem "rack"
G
create_file("bin/rackup", "OMG")
bundle! "binstubs rack"
expect(bundled_app("bin/rackup").read).to_not eq("OMG")
end
end
| 24.141975 | 107 | 0.623626 |
f7094575ceb6225c232298ee7629fb55904e182f | 924 | #
# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html.
# Run `pod lib lint classifyfileselector.podspec' to validate before publishing.
#
Pod::Spec.new do |s|
s.name = 'classifyfileselector'
s.version = '0.0.1'
s.summary = 'A new Flutter plugin.'
s.description = <<-DESC
A new Flutter plugin.
DESC
s.homepage = 'http://example.com'
s.license = { :file => '../LICENSE' }
s.author = { 'Your Company' => '[email protected]' }
s.source = { :path => '.' }
s.source_files = 'Classes/**/*'
s.public_header_files = 'Classes/**/*.h'
s.dependency 'Flutter'
s.platform = :ios, '8.0'
# Flutter.framework does not contain a i386 slice. Only x86_64 simulators are supported.
s.pod_target_xcconfig = { 'DEFINES_MODULE' => 'YES', 'VALID_ARCHS[sdk=iphonesimulator*]' => 'x86_64' }
end
| 38.5 | 104 | 0.601732 |
ac777cc92247aa3ff68f197e23639c073393547e | 558 | Pod::Spec.new do |s|
s.name = "Fable"
s.version = "0.0.1"
s.summary = "An elegant highlight focus guide written in swift"
s.homepage = "https://github.com/fanglinwei/Fable"
s.license = { :type => "MIT", :file => "LICENSE" }
s.author = { "calm" => "[email protected]" }
s.platform = :ios, "10.0"
s.source = { :git => "https://github.com/fanglinwei/Fable.git", :tag => s.version }
s.source_files = "Sources/**/*.swift"
s.requires_arc = true
s.frameworks = "UIKit", "Foundation"
s.swift_version = "5.0"
end
| 21.461538 | 89 | 0.587814 |
180a850e1b8c207c21ff4027c79773cd292c7b07 | 1,027 | require 'test_helper'
class VerbsControllerTest < ActionController::TestCase
setup do
@verb = verbs(:one)
end
test "should get index" do
get :index
assert_response :success
assert_not_nil assigns(:verbs)
end
test "should get new" do
get :new
assert_response :success
end
test "should create verb" do
assert_difference('Verb.count') do
post :create, :verb => @verb.attributes
end
assert_redirected_to verb_path(assigns(:verb))
end
test "should show verb" do
get :show, :id => @verb.to_param
assert_response :success
end
test "should get edit" do
get :edit, :id => @verb.to_param
assert_response :success
end
test "should update verb" do
put :update, :id => @verb.to_param, :verb => @verb.attributes
assert_redirected_to verb_path(assigns(:verb))
end
test "should destroy verb" do
assert_difference('Verb.count', -1) do
delete :destroy, :id => @verb.to_param
end
assert_redirected_to verbs_path
end
end
| 20.54 | 65 | 0.676728 |
e2e137f4c15a33cab376eaf49adeeac5bc52212a | 993 | # Settings specified here will take precedence over those in config/environment.rb
# The test environment is used exclusively to run your application's
# test suite. You never need to work with it otherwise. Remember that
# your test database is "scratch space" for the test suite and is wiped
# and recreated between test runs. Don't rely on the data there!
config.cache_classes = true
# Log error messages when you accidentally call methods on nil.
config.whiny_nils = true
# Show full error reports and disable caching
config.action_controller.consider_all_requests_local = true
config.action_controller.perform_caching = false
# Disable request forgery protection in test environment
config.action_controller.allow_forgery_protection = false
# Tell ActionMailer not to deliver emails to the real world.
# The :test delivery method accumulates sent emails in the
# ActionMailer::Base.deliveries array.
config.action_mailer.delivery_method = :test
HOST = "localhost"
| 39.72 | 82 | 0.793555 |
bb04f4f710a1b7bb2f2c9e4324eeea76c47e8787 | 942 | require 'platform-api'
# Provides a simple interface to the PlatformApi that is easy to use and test.
class HerokuClient
def create_app(app_json_schema_data)
app_setup.create(app_json_schema_data)
end
def delete_app(app_name)
app.delete(app_name)
end
def source_app_config_vars
config_var.info(source_app)
end
def add_collaborator(app_name, user_params)
collaborator.create(app_name, user_params)
end
def source_app_collaborators
collaborator.list(source_app)
end
def app_setup_status(app_setup_id)
app_setup.info(app_setup_id)
end
private
def app_setup
client.app_setup
end
def app
client.app
end
def collaborator
client.collaborator
end
def config_var
client.config_var
end
def client
@client ||= PlatformAPI.connect(ENV.fetch('HEROKU_API_KEY'))
end
def source_app
@source_app ||= ENV.fetch('SOURCE_APP_FOR_CONFIG_VARS')
end
end
| 17.127273 | 78 | 0.740977 |
629459bcdca771a9de853ac7707a778436f8f695 | 939 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'timesheets/version'
Gem::Specification.new do |spec|
spec.name = "timesheets"
spec.version = Timesheets::VERSION
spec.authors = ["Bradley J. Spaulding"]
spec.email = ["[email protected]"]
spec.summary = %q{A CLI for managing timesheets.}
spec.description = ""
spec.homepage = ""
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_dependency "thor"
spec.add_dependency "terminal-table"
spec.add_development_dependency "bundler", "~> 1.6"
spec.add_development_dependency "rake"
spec.add_development_dependency "simplecov"
end
| 33.535714 | 74 | 0.659212 |
e24d4cbef42d73d330dc0d60448da4187ed35806 | 16,216 | module Hyrax
module CollectionTypes
class PermissionsService
# @api public
#
# Ids of collection types that a user can create or manage
#
# @param roles [String] type of access, Hyrax::CollectionTypeParticipant::MANAGE_ACCESS and/or Hyrax::CollectionTypeParticipant::CREATE_ACCESS
# @param user [User] user (required if ability is nil)
# @param ability [Ability] the ability coming from cancan ability check (default: nil) (required if user is nil)
# @return [Array<String>] ids for collection types for which a user has the specified role
# @note Several checks get the user's groups from the user's ability. The same values can be retrieved directly from a passed in ability.
# If calling from Abilities, pass the ability. If you try to get the ability from the user, you end up in an infinit loop.
def self.collection_type_ids_for_user(roles:, user: nil, ability: nil)
return false unless user.present? || ability.present?
return Hyrax::CollectionType.all.pluck('DISTINCT id') if user_admin?(user, ability)
Hyrax::CollectionTypeParticipant.where(agent_type: Hyrax::CollectionTypeParticipant::USER_TYPE,
agent_id: user_id(user, ability),
access: roles)
.or(
Hyrax::CollectionTypeParticipant.where(agent_type: Hyrax::CollectionTypeParticipant::GROUP_TYPE,
agent_id: user_groups(user, ability),
access: roles)
).pluck('DISTINCT hyrax_collection_type_id')
end
# @api public
#
# Instances of collection types that a user can create or manage
#
# @param roles [String] type of access, Hyrax::CollectionTypeParticipant::MANAGE_ACCESS and/or Hyrax::CollectionTypeParticipant::CREATE_ACCESS
# @param user [User] user (required if ability is nil)
# @param ability [Ability] the ability coming from cancan ability check (default: nil) (required if user is nil)
# @return [Array<Hyrax::CollectionType>] instances of collection types for which a user has the specified role
# @note Several checks get the user's groups from the user's ability. The same values can be retrieved directly from a passed in ability.
# If calling from Abilities, pass the ability. If you try to get the ability from the user, you end up in an infinit loop.
def self.collection_types_for_user(roles:, user: nil, ability: nil)
return false unless user.present? || ability.present?
return Hyrax::CollectionType.all if user_admin?(user, ability)
Hyrax::CollectionType.where(id: collection_type_ids_for_user(user: user, roles: roles, ability: ability))
end
# @api public
#
# Is the user a creator for any collection types?
#
# @param user [User] user (required if ability is nil)
# @param ability [Ability] the ability coming from cancan ability check (default: nil) (required if user is nil)
# @return [Boolean] true if the user has permission to create collections of at least one collection type
# @note Several checks get the user's groups from the user's ability. The same values can be retrieved directly from a passed in ability.
# If calling from Abilities, pass the ability. If you try to get the ability from the user, you end up in an infinit loop.
def self.can_create_any_collection_type?(user: nil, ability: nil)
return false unless user.present? || ability.present?
return true if user_admin?(user, ability)
# both manage and create access can create collections of a type, so no need to include access in the query
return true if Hyrax::CollectionTypeParticipant.where(agent_type: Hyrax::CollectionTypeParticipant::USER_TYPE,
agent_id: user_id(user, ability)).any?
return true if Hyrax::CollectionTypeParticipant.where(agent_type: Hyrax::CollectionTypeParticipant::GROUP_TYPE,
agent_id: user_groups(user, ability)).any?
false
end
# @api public
#
# Is the user a creator for admin sets collection types?
#
# @param user [User] user (required if ability is nil)
# @param ability [Ability] the ability coming from cancan ability check (default: nil) (required if user is nil)
# @return [Boolean] true if the user has permission to create collections of type admin_set
# @note Several checks get the user's groups from the user's ability. The same values can be retrieved directly from a passed in ability.
# If calling from Abilities, pass the ability. If you try to get the ability from the user, you end up in an infinit loop.
def self.can_create_admin_set_collection_type?(user: nil, ability: nil)
return false unless user.present? || ability.present?
return true if user_admin?(user, ability)
# both manage and create access can create collections of a type, so no need to include access in the query
return true if Hyrax::CollectionTypeParticipant.joins(:hyrax_collection_type)
.where(agent_type: Hyrax::CollectionTypeParticipant::USER_TYPE,
agent_id: user_id(user, ability),
hyrax_collection_types: { machine_id: Hyrax::CollectionType::ADMIN_SET_MACHINE_ID }).present?
return true if Hyrax::CollectionTypeParticipant.joins(:hyrax_collection_type)
.where(agent_type: Hyrax::CollectionTypeParticipant::GROUP_TYPE,
agent_id: user_groups(user, ability),
hyrax_collection_types: { machine_id: Hyrax::CollectionType::ADMIN_SET_MACHINE_ID }).present?
false
end
# @api public
#
# Get a list of collection types that a user can create
#
# @param user [User] user (required if ability is nil)
# @param ability [Ability] the ability coming from cancan ability check (default: nil) (required if user is nil)
# @return [Array<Hyrax::CollectionType>] array of collection types the user can create
# @note Several checks get the user's groups from the user's ability. The same values can be retrieved directly from a passed in ability.
# If calling from Abilities, pass the ability. If you try to get the ability from the user, you end up in an infinit loop.
def self.can_create_collection_types(user: nil, ability: nil)
collection_types_for_user(user: user, ability: ability, roles: [Hyrax::CollectionTypeParticipant::MANAGE_ACCESS, Hyrax::CollectionTypeParticipant::CREATE_ACCESS])
end
# @api public
#
# Get a list of collection types that a user can create
#
# @param collection_type [Hyrax::CollectionType] the type of the collection being created
# @param user [User] user (required if ability is nil)
# @param ability [Ability] the ability coming from cancan ability check (default: nil) (required if user is nil)
# @return [Boolean] true if the user has permission to create collections of specified type
# @note Several checks get the user's groups from the user's ability. The same values can be retrieved directly from a passed in ability.
# If calling from Abilities, pass the ability. If you try to get the ability from the user, you end up in an infinit loop.
def self.can_create_collection_of_type?(collection_type:, user: nil, ability: nil)
manage_access_for_collection_type?(user: user, ability: ability, collection_type: collection_type) ||
create_access_for_collection_type?(user: user, ability: ability, collection_type: collection_type)
end
# @api private
#
# Determine if the given user has :manage access for the given collection type
#
# @param collection_type [Hyrax::CollectionType] the collection type we are checking permissions on
# @param user [User] user (required if ability is nil)
# @param ability [Ability] the ability coming from cancan ability check (default: nil) (required if user is nil)
# @return [Boolean] true if the user has permission to manage collections of the specified collection type
# @note Several checks get the user's groups from the user's ability. The same values can be retrieved directly from a passed in ability.
# If calling from Abilities, pass the ability. If you try to get the ability from the user, you end up in an infinit loop.
def self.manage_access_for_collection_type?(collection_type:, user: nil, ability: nil)
access_to_collection_type?(user: user, ability: ability, collection_type: collection_type, access: 'manage')
end
private_class_method :manage_access_for_collection_type?
# @api private
#
# Determine if the given user has :create access for the given collection type
#
# @param collection_type [Hyrax::CollectionType] the collection type we are checking permissions on
# @param user [User] user (required if ability is nil)
# @param ability [Ability] the ability coming from cancan ability check (default: nil) (required if user is nil)
# @return [Boolean] true if the user has permission to create collections of the specified collection type
# @note Several checks get the user's groups from the user's ability. The same values can be retrieved directly from a passed in ability.
# If calling from Abilities, pass the ability. If you try to get the ability from the user, you end up in an infinit loop.
def self.create_access_for_collection_type?(collection_type:, user: nil, ability: nil)
access_to_collection_type?(user: user, ability: ability, collection_type: collection_type, access: 'create')
end
private_class_method :create_access_for_collection_type?
# @api private
#
# Determine if the given user has specified access for the given collection type
#
# @param collection_type [Hyrax::CollectionType] the collection type we are checking permissions on
# @param access [Symbol] the access level to check
# @param user [User] user (required if ability is nil)
# @param ability [Ability] the ability coming from cancan ability check (default: nil) (required if user is nil)
# @return [Boolean] true if the user has permission to create collections of the specified collection type
# @note Several checks get the user's groups from the user's ability. The same values can be retrieved directly from a passed in ability.
# If calling from Abilities, pass the ability. If you try to get the ability from the user, you end up in an infinit loop.
def self.access_to_collection_type?(collection_type:, access:, user: nil, ability: nil) # rubocop:disable Metrics/CyclomaticComplexity
return false unless user.present? || ability.present?
return false unless user && collection_type
return true if ([user_id(user, ability)] & agent_ids_for(collection_type: collection_type, agent_type: 'user', access: access)).present?
return true if (user_groups(user, ability) & agent_ids_for(collection_type: collection_type, agent_type: 'group', access: access)).present?
false
end
private_class_method :access_to_collection_type?
# @api public
#
# What types of collection can the user create or manage
#
# @param user [User] user - The user requesting to create/manage a Collection
# @param roles [String] type of access, Hyrax::CollectionTypeParticipant::MANAGE_ACCESS and/or Hyrax::CollectionTypeParticipant::CREATE_ACCESS
# @return [Array<Hyrax::CollectionType>]
# @note Several checks get the user's groups from the user's ability. The same values can be retrieved directly from a passed in ability.
# If calling from Abilities, pass the ability. If you try to get the ability from the user, you end up in an infinit loop.
def self.agent_ids_for(collection_type:, agent_type:, access:)
Hyrax::CollectionTypeParticipant.where(hyrax_collection_type_id: collection_type.id,
agent_type: agent_type,
access: access).pluck('DISTINCT agent_id')
end
private_class_method :agent_ids_for
# @api public
#
# Get a list of users who should be added as user editors for a new collection of the specified collection type
#
# @param collection_type [Hyrax::CollectionType] the type of the collection being created
# @return [Array<String>] array of user identifiers (typically emails) for users who can edit collections of this type
# @note Several checks get the user's groups from the user's ability. The same values can be retrieved directly from a passed in ability.
# If calling from Abilities, pass the ability. If you try to get the ability from the user, you end up in an infinit loop.
def self.user_edit_grants_for_collection_of_type(collection_type: nil)
return [] unless collection_type
Hyrax::CollectionTypeParticipant.joins(:hyrax_collection_type).where(hyrax_collection_type_id: collection_type.id,
agent_type: Hyrax::CollectionTypeParticipant::USER_TYPE,
access: Hyrax::CollectionTypeParticipant::MANAGE_ACCESS).pluck('DISTINCT agent_id')
end
# @api public
#
# Get a list of group that should be added as group editors for a new collection of the specified collection type
#
# @param collection_type [Hyrax::CollectionType] the type of the collection being created
# @return [Array<String>] array of group identifiers (typically groupname) for groups who can edit collections of this type
# @note Several checks get the user's groups from the user's ability. The same values can be retrieved directly from a passed in ability.
# If calling from Abilities, pass the ability. If you try to get the ability from the user, you end up in an infinit loop.
def self.group_edit_grants_for_collection_of_type(collection_type: nil)
return [] unless collection_type
groups = Hyrax::CollectionTypeParticipant.joins(:hyrax_collection_type).where(hyrax_collection_type_id: collection_type.id,
agent_type: Hyrax::CollectionTypeParticipant::GROUP_TYPE,
access: Hyrax::CollectionTypeParticipant::MANAGE_ACCESS).pluck('DISTINCT agent_id')
groups | ['admin']
end
def self.user_groups(user, ability)
# if called from abilities class, use ability instead of user; otherwise, you end up in an infinite loop
return ability.user_groups if ability.present?
user.ability.user_groups
end
private_class_method :user_groups
def self.user_admin?(user, ability)
# if called from abilities class, use ability instead of user; otherwise, you end up in an infinite loop
return ability.admin? if ability.present?
user.ability.admin?
end
private_class_method :user_groups
def self.user_id(user, ability)
return ability.current_user.user_key if ability.present?
user.user_key
end
private_class_method :user_id
end
end
end
| 69.299145 | 170 | 0.666564 |
bb833390fffe9fdc0db018647c537003bf888b19 | 122 | # Copyright (c) 2020 Jean-Sebastien Gelinas, see LICENSE.txt
# frozen_string_literal: true
require 'tileset_tooling/app'
| 24.4 | 60 | 0.795082 |
33196b1c93afa2220ea1d88d972a6c2abf5a5c40 | 589 | cask "eqmac" do
version "1.0.0"
sha256 "f0cf0831da9fc08f7c2f267b6a691173b32b4aa688625afcc43cade6d5f65a7f"
url "https://github.com/bitgapp/eqMac/releases/download/v#{version}/eqMac.pkg",
verified: "github.com/bitgapp/eqMac/"
name "eqMac"
desc "System-wide audio equalizer"
homepage "https://eqmac.app/"
pkg "eqMac.pkg"
uninstall pkgutil: "com.bitgapp.eqmac.pkg",
quit: "com.bitgapp.eqmac"
zap trash: [
"~/Library/Caches/com.bitgapp.eqmac",
"~/Library/Preferences/com.bitgapp.eqmac.plist",
"~/Library/WebKit/com.bitgapp.eqmac",
]
end
| 26.772727 | 81 | 0.692699 |
d54fe5e4618175c818d04e708d1c3a1d2bdca6ac | 459 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Network::Mgmt::V2020_05_01
module Models
#
# Defines values for IkeIntegrity
#
module IkeIntegrity
MD5 = "MD5"
SHA1 = "SHA1"
SHA256 = "SHA256"
SHA384 = "SHA384"
GCMAES256 = "GCMAES256"
GCMAES128 = "GCMAES128"
end
end
end
| 21.857143 | 70 | 0.651416 |
f863a8bae883d64e4fec3dbc8cb617845ac6ce01 | 432 | require 'bundler/setup'
Bundler.require
ENV['SINATRA_ENV'] ||= "development"
ActiveRecord::Base.establish_connection(
:adapter => "sqlite3",
:database => "db/development.sqlite"
)
require_relative "../app/controllers/application_controller.rb"
Dir[File.join(File.dirname(__FILE__), "../app/models", "*.rb")].each {|f| require f}
Dir[File.join(File.dirname(__FILE__), "../app/controllers", "*.rb")].sort.each {|f| require f}
| 28.8 | 94 | 0.708333 |
384ecac33159baf6005fe2fa902b07fc2c74a394 | 3,656 | controller :domains do
friendly_name "Domains API"
description "This API allows you to create and view domains on server"
authenticator :server
action :create do
title "Create a domain"
description "This action allows you to create domains"
param :name, "Domain name (max 50)", :required => true, :type => String
error 'ValidationError', "The provided data was not sufficient to create domain", :attributes => {:errors => "A hash of error details"}
error 'DomainNameMissing', "Domain name is missing"
error 'InvalidDomainName', "Domain name is invalid"
error 'DomainNameExists', "Domain name already exists"
returns Hash, :structure => :domain
action do
domain = identity.server.domains.find_by_name(params.name)
if domain.nil?
domain = Domain.new
domain.server = identity.server
domain.name = params.name
domain.verification_method = 'DNS'
domain.owner_type = Server
domain.owner_id = identity.server.id
domain.verified_at = Time.now
if domain.save
structure :domain, domain, :return => true
else
error_message = domain.errors.full_messages.first
if error_message == "Name is invalid"
error "InvalidDomainName"
else
error "Unknown Error", error_message
end
end
else
error 'DomainNameExists'
end
end
end
action :query do
title "Query domain"
description "This action allows you to query domain"
param :name, "Domain name (max 50)", :required => true, :type => String
error 'ValidationError', "The provided data was not sufficient to query domain", :attributes => {:errors => "A hash of error details"}
error 'DomainNameMissing', "Domain name is missing"
error 'DomainNotFound', "The domain not found"
returns Hash, :structure => :domain
action do
domain = identity.server.domains.find_by_name(params.name)
if domain.nil?
error 'DomainNotFound'
else
structure :domain, domain, :return => true
end
end
end
action :check do
title "Check domain status"
description "This action allows you to check domain status"
param :name, "Domain name (max 50)", :required => true, :type => String
error 'ValidationError', "The provided data was not sufficient to query domain", :attributes => {:errors => "A hash of error details"}
error 'DomainNameMissing', "Domain name is missing"
error 'DomainNotFound', "The domain not found"
returns Hash, :structure => :domain
action do
domain = identity.server.domains.find_by_name(params.name)
if domain.nil?
error 'DomainNotFound'
else
domain.check_dns(:manual)
structure :domain, domain, :return => true
end
end
end
action :delete do
title "Delete a domain"
description "This action allows you to delete domain"
param :name, "Domain name (max 50)", :required => true, :type => String
error 'ValidationError', "The provided data was not sufficient to query domain", :attributes => {:errors => "A hash of error details"}
error 'DomainNameMissing', "Domain name is missing"
error 'DomainNotFound', "The domain not found"
error 'DomainNotDeleted', "Domain could not be deleted"
returns Hash
action do
domain = identity.server.domains.find_by_name(params.name)
if domain.nil?
error 'DomainNotFound'
elsif domain.destroy
{:message => "Domain deleted successfully"}
else
error 'DomainNotDeleted'
end
end
end
end
| 31.517241 | 139 | 0.657002 |
5dd5941900eee9a2384c0218d6f73c15291c1ba2 | 1,285 | require_relative '../../../spec_helper'
describe Admin::Views::Players::Index do
let(:exposures) { Hash[players: []] }
let(:template) { Hanami::View::Template.new('apps/admin/templates/players/index.html.haml') }
let(:view) { Admin::Views::Players::Index.new(template, exposures) }
it 'exposes #players' do
view.players.must_equal exposures.fetch(:players)
end
describe 'rendering' do
let(:rendered) { view.render }
describe 'when there are no players' do
it 'shows a placeholder message' do
rendered.must_include('There are no players yet.')
end
end
describe 'when there are players' do
let(:player1) { Player.new(id: 1,first_name: 'George', last_name: 'Abitbol', email: '[email protected]') }
let(:player2) { Player.new(id: 2,first_name: 'Noel', last_name: 'Flantier', email: '[email protected]') }
let(:exposures) { Hash[players: [player1, player2]] }
it 'lists them all' do
rendered.scan(/class='player'/).count.must_equal 2
rendered.must_include('George')
rendered.must_include('Noel')
end
it 'hides the placeholder message' do
rendered.wont_include('<p class="placeholder">There are no players yet.</p>')
end
end
end
end
| 33.815789 | 119 | 0.649027 |
e2e399e034c21a4052016f6ca0797a3c7be14900 | 2,633 | class Lmod < Formula
desc "Lua-based environment modules system to modify PATH variable"
homepage "https://lmod.readthedocs.io"
url "https://github.com/TACC/Lmod/archive/8.4.26.tar.gz"
sha256 "7bcd16046439dfe380635ff404af501a436a93146321a535e3014066b83392d3"
license "MIT"
bottle do
sha256 cellar: :any_skip_relocation, arm64_big_sur: "415721c5b02a9b56d1a9351a399b44f3165fbf364d2fdfe37bb1eb0cf4547553"
sha256 cellar: :any_skip_relocation, big_sur: "3ef922c7bd5b3fbe913032a0f5e6b35619966a6180b767569173e2393b458026"
sha256 cellar: :any_skip_relocation, catalina: "212bd2b071d5523348fecf894d3d65e9eb58b548675ce563f6c97de7e64fad39"
sha256 cellar: :any_skip_relocation, mojave: "1261f624e2b93c027c4f419da1ba35ed0fb5ab5a6f38d9998495b85f89839091"
end
depends_on "luarocks" => :build
depends_on "pkg-config" => :build
depends_on "lua"
resource "luafilesystem" do
url "https://github.com/keplerproject/luafilesystem/archive/v1_8_0.tar.gz"
sha256 "16d17c788b8093f2047325343f5e9b74cccb1ea96001e45914a58bbae8932495"
end
resource "luaposix" do
url "https://github.com/luaposix/luaposix/archive/v35.0.tar.gz"
sha256 "a4edf2f715feff65acb009e8d1689e57ec665eb79bc36a6649fae55eafd56809"
end
def install
luaversion = Formula["lua"].version.major_minor
luapath = libexec/"vendor"
ENV["LUA_PATH"] = "?.lua;" \
"#{luapath}/share/lua/#{luaversion}/?.lua;" \
"#{luapath}/share/lua/#{luaversion}/?/init.lua"
ENV["LUA_CPATH"] = "#{luapath}/lib/lua/#{luaversion}/?.so"
resources.each do |r|
r.stage do
system "luarocks", "make", "--tree=#{luapath}"
end
end
system "./configure", "--with-siteControlPrefix=yes", "--prefix=#{prefix}"
system "make", "install"
end
def caveats
<<~EOS
To use Lmod, you should add the init script to the shell you are using.
For example, the bash setup script is here: #{opt_prefix}/init/profile
and you can source it in your bash setup or link to it.
If you use fish, use #{opt_prefix}/init/fish, such as:
ln -s #{opt_prefix}/init/fish ~/.config/fish/conf.d/00_lmod.fish
EOS
end
test do
sh_init = "#{prefix}/init/sh"
(testpath/"lmodtest.sh").write <<~EOS
#!/bin/sh
source #{sh_init}
module list
EOS
assert_match "No modules loaded", shell_output("sh #{testpath}/lmodtest.sh 2>&1")
system sh_init
output = shell_output("#{prefix}/libexec/spider #{prefix}/modulefiles/Core/")
assert_match "lmod", output
assert_match "settarg", output
end
end
| 34.644737 | 122 | 0.698063 |
e24484faae68742d153b89b98bce270ffd875a34 | 124 | require "test_helper"
class UserStatusTest < ActiveSupport::TestCase
# test "the truth" do
# assert true
# end
end
| 15.5 | 46 | 0.709677 |
4a3d49d5ce2c620d66df4ceaf90a3a311f57877c | 2,468 | module WatirPerformance
class Results
WD_ONLY_FINDERS = [:link_text,
:partial_link_text]
WD_FINDERS = [:class_name,
:id,
:name,
:tag_name]
CSS_FINDERS = [:css_id,
:css_class,
:css_tag,
:css_attribute,
:css_value]
XPATH_FINDERS = [:xpath_id,
:xpath_class,
:xpath_tag,
:xpath_attribute,
:xpath_value]
WATIR_FINDERS = [:href,
:text,
:data_text,
:hreflang]
WATIR_REGEX_FINDERS = [:text_regex,
:href_regex,
:data_text_regex,
:hreflang_regex]
def initialize
WD_FINDERS.each do |how|
instance_variable_set("@#{how}_wd_array", [])
instance_variable_set("@#{how}_watir_array", [])
instance_variable_set("@#{how}_regex_array", [])
end
end
WD_FINDERS.each do |how|
define_method("#{how}_watir") do
eval("@#{how}_watir_array").mean
end
define_method("#{how}_wd") do
eval("@#{how}_wd_array").mean
end
define_method("#{how}_regex") do
eval("@#{how}_regex_array").mean
end
define_method("#{how}_wd_watir") do
watir = eval("#{how}_watir")
wd = eval("@#{how}_wd")
wd == 0 ? 0 : 100 * (watir - wd) / wd
end
define_method("#{how}_watir_regex") do
regex = eval("#{how}_regex")
watir = eval("@#{how}_watir")
watir == 0 ? 0 : 100 * (regex - watir) / watir
end
define_method("#{how}_wd=") do |value|
eval("@#{how}_wd_array") << value
end
define_method("#{how}_watir=") do |value|
eval("@#{how}_watir_array") << value
end
define_method("#{how}_regex=") do |value|
eval("@#{how}_regex_array") << value
end
end
def to_s
s = ''
WD_FINDERS.each do |locator|
s << "#{locator} - Watir: #{eval("#{locator}_watir").round(4)}ms; WD: #{eval("#{locator}_wd").round(4)}ms" \
" -- #{eval("#{locator}_wd_watir").round(0)}% performance hit;" \
" Regex: #{eval("#{locator}_regex").round(4)}ms -- #{eval("#{locator}_watir_regex").round(0)}% performance hit;\n"
end
s
end
end
end | 25.708333 | 122 | 0.485413 |
3985a5c938c4b9d1299723cc93a6a7e00e7fddd1 | 255 | require "simplemde/version"
# module Simplemde
module Rails
class Simplemde < Engine
initializer :assets do |config|
Rails.application.config.assets.precompile += %w( simplemde/simplemde.min.js simplemde/simplemde.min.css )
end
end
end
| 23.181818 | 112 | 0.741176 |
62b358f52cd3066ba5af6a6db62a6f4703df6f96 | 857 | # frozen_string_literal: true
FactoryBot.define do
factory :matrix_question, class: 'Rapidfire::Question' do
question_text { 'Question?' }
association :question_group, factory: :question_group
type { 'Rapidfire::Questions::Radio' }
answer_options { "option1\r\noption2\r\noption3" }
validation_rules do
{
presence: 0,
grouped: 1,
grouped_question: 'What is the question?'
}
end
end
factory :matrix_question2, class: 'Rapidfire::Question' do
question_text { 'Question?' }
association :question_group, factory: :question_group
type { 'Rapidfire::Questions::Radio' }
answer_options { "option1\r\noption2\r\noption3" }
validation_rules do
{
presence: 0,
grouped: 1,
grouped_question: 'This question is different'
}
end
end
end
| 26.78125 | 60 | 0.652275 |
edc5ba6f81a52f85f2c6afa779513d8cf0879235 | 1,988 | =begin
#SendinBlue API
#SendinBlue provide a RESTFul API that can be used with any languages. With this API, you will be able to : - Manage your campaigns and get the statistics - Manage your contacts - Send transactional Emails and SMS - and much more... You can download our wrappers at https://github.com/orgs/sendinblue **Possible responses** | Code | Message | | :-------------: | ------------- | | 200 | OK. Successful Request | | 201 | OK. Successful Creation | | 202 | OK. Request accepted | | 204 | OK. Successful Update/Deletion | | 400 | Error. Bad Request | | 401 | Error. Authentication Needed | | 402 | Error. Not enough credit, plan upgrade needed | | 403 | Error. Permission denied | | 404 | Error. Object does not exist | | 405 | Error. Method not allowed | | 406 | Error. Not Acceptable |
OpenAPI spec version: 3.0.0
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 2.4.18
=end
require 'spec_helper'
require 'json'
require 'date'
# Unit tests for SibApiV3Sdk::AddContactToList
# Automatically generated by swagger-codegen (github.com/swagger-api/swagger-codegen)
# Please update as you see appropriate
describe 'AddContactToList' do
before do
# run before each test
@instance = SibApiV3Sdk::AddContactToList.new
end
after do
# run after each test
end
describe 'test an instance of AddContactToList' do
it 'should create an instance of AddContactToList' do
expect(@instance).to be_instance_of(SibApiV3Sdk::AddContactToList)
end
end
describe 'test attribute "emails"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "ids"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
end
| 41.416667 | 839 | 0.695674 |
03ec223279ed0ff1b053525c7f15f3a41292d109 | 44,475 | # This file has been automatically generated from a template file.
# Please make modifications to `templates/gRPC-C++.podspec.template`
# instead. This file can be regenerated from the template by running
# `tools/buildgen/generate_projects.sh`.
# gRPC C++ CocoaPods podspec
#
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
Pod::Spec.new do |s|
s.name = 'gRPC-C++'
# TODO (mxyan): use version that match gRPC version when pod is stabilized
# version = '1.16.0-dev'
version = '0.0.3'
s.version = version
s.summary = 'gRPC C++ library'
s.homepage = 'https://grpc.io'
s.license = 'Apache License, Version 2.0'
s.authors = { 'The gRPC contributors' => '[email protected]' }
grpc_version = '1.16.0-dev'
s.source = {
:git => 'https://github.com/grpc/grpc.git',
:tag => "v#{grpc_version}",
}
s.ios.deployment_target = '7.0'
s.osx.deployment_target = '10.9'
s.requires_arc = false
name = 'grpcpp'
# Use `grpcpp` as framework name so that `#include <grpcpp/xxx.h>` works when built as
# framework.
s.module_name = name
# Add include prefix `grpcpp` so that `#include <grpcpp/xxx.h>` works when built as static
# library.
s.header_dir = name
s.pod_target_xcconfig = {
'HEADER_SEARCH_PATHS' => '"$(inherited)" "$(PODS_TARGET_SRCROOT)/include"',
'USER_HEADER_SEARCH_PATHS' => '"$(PODS_TARGET_SRCROOT)"',
'GCC_PREPROCESSOR_DEFINITIONS' => '"$(inherited)" "COCOAPODS=1" "PB_NO_PACKED_STRUCTS=1"',
'CLANG_WARN_STRICT_PROTOTYPES' => 'NO',
'CLANG_WARN_DOCUMENTATION_COMMENTS' => 'NO',
# If we don't set these two settings, `include/grpc/support/time.h` and
# `src/core/lib/support/string.h` shadow the system `<time.h>` and `<string.h>`, breaking the
# build.
'USE_HEADERMAP' => 'NO',
'ALWAYS_SEARCH_USER_PATHS' => 'NO',
}
s.libraries = 'c++'
s.default_subspecs = 'Interface', 'Implementation'
s.header_mappings_dir = 'include/grpcpp'
s.subspec 'Interface' do |ss|
ss.header_mappings_dir = 'include/grpcpp'
ss.source_files = 'include/grpcpp/alarm.h',
'include/grpcpp/channel.h',
'include/grpcpp/client_context.h',
'include/grpcpp/completion_queue.h',
'include/grpcpp/create_channel.h',
'include/grpcpp/create_channel_posix.h',
'include/grpcpp/ext/health_check_service_server_builder_option.h',
'include/grpcpp/generic/async_generic_service.h',
'include/grpcpp/generic/generic_stub.h',
'include/grpcpp/grpcpp.h',
'include/grpcpp/health_check_service_interface.h',
'include/grpcpp/impl/call.h',
'include/grpcpp/impl/channel_argument_option.h',
'include/grpcpp/impl/client_unary_call.h',
'include/grpcpp/impl/codegen/core_codegen.h',
'include/grpcpp/impl/grpc_library.h',
'include/grpcpp/impl/method_handler_impl.h',
'include/grpcpp/impl/rpc_method.h',
'include/grpcpp/impl/rpc_service_method.h',
'include/grpcpp/impl/serialization_traits.h',
'include/grpcpp/impl/server_builder_option.h',
'include/grpcpp/impl/server_builder_plugin.h',
'include/grpcpp/impl/server_initializer.h',
'include/grpcpp/impl/service_type.h',
'include/grpcpp/resource_quota.h',
'include/grpcpp/security/auth_context.h',
'include/grpcpp/security/auth_metadata_processor.h',
'include/grpcpp/security/credentials.h',
'include/grpcpp/security/server_credentials.h',
'include/grpcpp/server.h',
'include/grpcpp/server_builder.h',
'include/grpcpp/server_context.h',
'include/grpcpp/server_posix.h',
'include/grpcpp/support/async_stream.h',
'include/grpcpp/support/async_unary_call.h',
'include/grpcpp/support/byte_buffer.h',
'include/grpcpp/support/channel_arguments.h',
'include/grpcpp/support/client_callback.h',
'include/grpcpp/support/config.h',
'include/grpcpp/support/proto_buffer_reader.h',
'include/grpcpp/support/proto_buffer_writer.h',
'include/grpcpp/support/slice.h',
'include/grpcpp/support/status.h',
'include/grpcpp/support/status_code_enum.h',
'include/grpcpp/support/string_ref.h',
'include/grpcpp/support/stub_options.h',
'include/grpcpp/support/sync_stream.h',
'include/grpcpp/support/time.h',
'include/grpcpp/impl/codegen/async_generic_service.h',
'include/grpcpp/impl/codegen/async_stream.h',
'include/grpcpp/impl/codegen/async_unary_call.h',
'include/grpcpp/impl/codegen/byte_buffer.h',
'include/grpcpp/impl/codegen/call.h',
'include/grpcpp/impl/codegen/call_hook.h',
'include/grpcpp/impl/codegen/callback_common.h',
'include/grpcpp/impl/codegen/channel_interface.h',
'include/grpcpp/impl/codegen/client_callback.h',
'include/grpcpp/impl/codegen/client_context.h',
'include/grpcpp/impl/codegen/client_unary_call.h',
'include/grpcpp/impl/codegen/completion_queue.h',
'include/grpcpp/impl/codegen/completion_queue_tag.h',
'include/grpcpp/impl/codegen/config.h',
'include/grpcpp/impl/codegen/core_codegen_interface.h',
'include/grpcpp/impl/codegen/create_auth_context.h',
'include/grpcpp/impl/codegen/grpc_library.h',
'include/grpcpp/impl/codegen/metadata_map.h',
'include/grpcpp/impl/codegen/method_handler_impl.h',
'include/grpcpp/impl/codegen/rpc_method.h',
'include/grpcpp/impl/codegen/rpc_service_method.h',
'include/grpcpp/impl/codegen/security/auth_context.h',
'include/grpcpp/impl/codegen/serialization_traits.h',
'include/grpcpp/impl/codegen/server_context.h',
'include/grpcpp/impl/codegen/server_interface.h',
'include/grpcpp/impl/codegen/service_type.h',
'include/grpcpp/impl/codegen/slice.h',
'include/grpcpp/impl/codegen/status.h',
'include/grpcpp/impl/codegen/status_code_enum.h',
'include/grpcpp/impl/codegen/string_ref.h',
'include/grpcpp/impl/codegen/stub_options.h',
'include/grpcpp/impl/codegen/sync_stream.h',
'include/grpcpp/impl/codegen/time.h'
end
s.subspec 'Implementation' do |ss|
ss.header_mappings_dir = '.'
ss.dependency "#{s.name}/Interface", version
ss.dependency 'gRPC-Core', grpc_version
ss.dependency 'nanopb', '~> 0.3'
ss.source_files = 'include/grpcpp/impl/codegen/core_codegen.h',
'src/cpp/client/secure_credentials.h',
'src/cpp/common/secure_auth_context.h',
'src/cpp/server/secure_server_credentials.h',
'src/cpp/client/create_channel_internal.h',
'src/cpp/common/channel_filter.h',
'src/cpp/server/dynamic_thread_pool.h',
'src/cpp/server/health/default_health_check_service.h',
'src/cpp/server/health/health.pb.h',
'src/cpp/server/thread_pool_interface.h',
'src/cpp/thread_manager/thread_manager.h',
'src/cpp/client/insecure_credentials.cc',
'src/cpp/client/secure_credentials.cc',
'src/cpp/common/auth_property_iterator.cc',
'src/cpp/common/secure_auth_context.cc',
'src/cpp/common/secure_channel_arguments.cc',
'src/cpp/common/secure_create_auth_context.cc',
'src/cpp/server/insecure_server_credentials.cc',
'src/cpp/server/secure_server_credentials.cc',
'src/cpp/client/channel_cc.cc',
'src/cpp/client/client_context.cc',
'src/cpp/client/create_channel.cc',
'src/cpp/client/create_channel_internal.cc',
'src/cpp/client/create_channel_posix.cc',
'src/cpp/client/credentials_cc.cc',
'src/cpp/client/generic_stub.cc',
'src/cpp/common/alarm.cc',
'src/cpp/common/callback_common.cc',
'src/cpp/common/channel_arguments.cc',
'src/cpp/common/channel_filter.cc',
'src/cpp/common/completion_queue_cc.cc',
'src/cpp/common/core_codegen.cc',
'src/cpp/common/resource_quota_cc.cc',
'src/cpp/common/rpc_method.cc',
'src/cpp/common/version_cc.cc',
'src/cpp/server/async_generic_service.cc',
'src/cpp/server/channel_argument_option.cc',
'src/cpp/server/create_default_thread_pool.cc',
'src/cpp/server/dynamic_thread_pool.cc',
'src/cpp/server/health/default_health_check_service.cc',
'src/cpp/server/health/health.pb.c',
'src/cpp/server/health/health_check_service.cc',
'src/cpp/server/health/health_check_service_server_builder_option.cc',
'src/cpp/server/server_builder.cc',
'src/cpp/server/server_cc.cc',
'src/cpp/server/server_context.cc',
'src/cpp/server/server_credentials.cc',
'src/cpp/server/server_posix.cc',
'src/cpp/thread_manager/thread_manager.cc',
'src/cpp/util/byte_buffer_cc.cc',
'src/cpp/util/status.cc',
'src/cpp/util/string_ref.cc',
'src/cpp/util/time_cc.cc',
'src/cpp/codegen/codegen_init.cc',
'src/core/lib/gpr/alloc.h',
'src/core/lib/gpr/arena.h',
'src/core/lib/gpr/env.h',
'src/core/lib/gpr/host_port.h',
'src/core/lib/gpr/mpscq.h',
'src/core/lib/gpr/murmur_hash.h',
'src/core/lib/gpr/spinlock.h',
'src/core/lib/gpr/string.h',
'src/core/lib/gpr/string_windows.h',
'src/core/lib/gpr/time_precise.h',
'src/core/lib/gpr/tls.h',
'src/core/lib/gpr/tls_gcc.h',
'src/core/lib/gpr/tls_msvc.h',
'src/core/lib/gpr/tls_pthread.h',
'src/core/lib/gpr/tmpfile.h',
'src/core/lib/gpr/useful.h',
'src/core/lib/gprpp/abstract.h',
'src/core/lib/gprpp/atomic.h',
'src/core/lib/gprpp/atomic_with_atm.h',
'src/core/lib/gprpp/atomic_with_std.h',
'src/core/lib/gprpp/fork.h',
'src/core/lib/gprpp/manual_constructor.h',
'src/core/lib/gprpp/memory.h',
'src/core/lib/gprpp/mutex_lock.h',
'src/core/lib/gprpp/thd.h',
'src/core/lib/profiling/timers.h',
'src/core/ext/transport/chttp2/transport/bin_decoder.h',
'src/core/ext/transport/chttp2/transport/bin_encoder.h',
'src/core/ext/transport/chttp2/transport/chttp2_transport.h',
'src/core/ext/transport/chttp2/transport/flow_control.h',
'src/core/ext/transport/chttp2/transport/frame.h',
'src/core/ext/transport/chttp2/transport/frame_data.h',
'src/core/ext/transport/chttp2/transport/frame_goaway.h',
'src/core/ext/transport/chttp2/transport/frame_ping.h',
'src/core/ext/transport/chttp2/transport/frame_rst_stream.h',
'src/core/ext/transport/chttp2/transport/frame_settings.h',
'src/core/ext/transport/chttp2/transport/frame_window_update.h',
'src/core/ext/transport/chttp2/transport/hpack_encoder.h',
'src/core/ext/transport/chttp2/transport/hpack_parser.h',
'src/core/ext/transport/chttp2/transport/hpack_table.h',
'src/core/ext/transport/chttp2/transport/http2_settings.h',
'src/core/ext/transport/chttp2/transport/huffsyms.h',
'src/core/ext/transport/chttp2/transport/incoming_metadata.h',
'src/core/ext/transport/chttp2/transport/internal.h',
'src/core/ext/transport/chttp2/transport/stream_map.h',
'src/core/ext/transport/chttp2/transport/varint.h',
'src/core/ext/transport/chttp2/alpn/alpn.h',
'src/core/ext/filters/http/client/http_client_filter.h',
'src/core/ext/filters/http/message_compress/message_compress_filter.h',
'src/core/ext/filters/http/server/http_server_filter.h',
'src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb.h',
'src/core/lib/security/context/security_context.h',
'src/core/lib/security/credentials/alts/alts_credentials.h',
'src/core/lib/security/credentials/composite/composite_credentials.h',
'src/core/lib/security/credentials/credentials.h',
'src/core/lib/security/credentials/fake/fake_credentials.h',
'src/core/lib/security/credentials/google_default/google_default_credentials.h',
'src/core/lib/security/credentials/iam/iam_credentials.h',
'src/core/lib/security/credentials/jwt/json_token.h',
'src/core/lib/security/credentials/jwt/jwt_credentials.h',
'src/core/lib/security/credentials/jwt/jwt_verifier.h',
'src/core/lib/security/credentials/local/local_credentials.h',
'src/core/lib/security/credentials/oauth2/oauth2_credentials.h',
'src/core/lib/security/credentials/plugin/plugin_credentials.h',
'src/core/lib/security/credentials/ssl/ssl_credentials.h',
'src/core/lib/security/security_connector/alts_security_connector.h',
'src/core/lib/security/security_connector/load_system_roots.h',
'src/core/lib/security/security_connector/load_system_roots_linux.h',
'src/core/lib/security/security_connector/local_security_connector.h',
'src/core/lib/security/security_connector/security_connector.h',
'src/core/lib/security/transport/auth_filters.h',
'src/core/lib/security/transport/secure_endpoint.h',
'src/core/lib/security/transport/security_handshaker.h',
'src/core/lib/security/transport/target_authority_table.h',
'src/core/lib/security/transport/tsi_error.h',
'src/core/lib/security/util/json_util.h',
'src/core/tsi/alts/crypt/gsec.h',
'src/core/tsi/alts/frame_protector/alts_counter.h',
'src/core/tsi/alts/frame_protector/alts_crypter.h',
'src/core/tsi/alts/frame_protector/alts_frame_protector.h',
'src/core/tsi/alts/frame_protector/alts_record_protocol_crypter_common.h',
'src/core/tsi/alts/frame_protector/frame_handler.h',
'src/core/tsi/alts/handshaker/alts_handshaker_client.h',
'src/core/tsi/alts/handshaker/alts_tsi_event.h',
'src/core/tsi/alts/handshaker/alts_tsi_handshaker.h',
'src/core/tsi/alts/handshaker/alts_tsi_handshaker_private.h',
'src/core/tsi/alts/zero_copy_frame_protector/alts_grpc_integrity_only_record_protocol.h',
'src/core/tsi/alts/zero_copy_frame_protector/alts_grpc_privacy_integrity_record_protocol.h',
'src/core/tsi/alts/zero_copy_frame_protector/alts_grpc_record_protocol.h',
'src/core/tsi/alts/zero_copy_frame_protector/alts_grpc_record_protocol_common.h',
'src/core/tsi/alts/zero_copy_frame_protector/alts_iovec_record_protocol.h',
'src/core/tsi/alts/zero_copy_frame_protector/alts_zero_copy_grpc_protector.h',
'src/core/lib/security/credentials/alts/check_gcp_environment.h',
'src/core/lib/security/credentials/alts/grpc_alts_credentials_options.h',
'src/core/tsi/alts/handshaker/alts_handshaker_service_api.h',
'src/core/tsi/alts/handshaker/alts_handshaker_service_api_util.h',
'src/core/tsi/alts/handshaker/alts_tsi_utils.h',
'src/core/tsi/alts/handshaker/transport_security_common_api.h',
'src/core/tsi/alts/handshaker/altscontext.pb.h',
'src/core/tsi/alts/handshaker/handshaker.pb.h',
'src/core/tsi/alts/handshaker/transport_security_common.pb.h',
'src/core/tsi/transport_security.h',
'src/core/tsi/transport_security_interface.h',
'src/core/ext/transport/chttp2/client/authority.h',
'src/core/ext/transport/chttp2/client/chttp2_connector.h',
'src/core/ext/filters/client_channel/backup_poller.h',
'src/core/ext/filters/client_channel/client_channel.h',
'src/core/ext/filters/client_channel/client_channel_channelz.h',
'src/core/ext/filters/client_channel/client_channel_factory.h',
'src/core/ext/filters/client_channel/connector.h',
'src/core/ext/filters/client_channel/http_connect_handshaker.h',
'src/core/ext/filters/client_channel/http_proxy.h',
'src/core/ext/filters/client_channel/lb_policy.h',
'src/core/ext/filters/client_channel/lb_policy_factory.h',
'src/core/ext/filters/client_channel/lb_policy_registry.h',
'src/core/ext/filters/client_channel/method_params.h',
'src/core/ext/filters/client_channel/parse_address.h',
'src/core/ext/filters/client_channel/proxy_mapper.h',
'src/core/ext/filters/client_channel/proxy_mapper_registry.h',
'src/core/ext/filters/client_channel/resolver.h',
'src/core/ext/filters/client_channel/resolver_factory.h',
'src/core/ext/filters/client_channel/resolver_registry.h',
'src/core/ext/filters/client_channel/retry_throttle.h',
'src/core/ext/filters/client_channel/subchannel.h',
'src/core/ext/filters/client_channel/subchannel_index.h',
'src/core/ext/filters/client_channel/uri_parser.h',
'src/core/ext/filters/deadline/deadline_filter.h',
'src/core/tsi/alts_transport_security.h',
'src/core/tsi/fake_transport_security.h',
'src/core/tsi/local_transport_security.h',
'src/core/tsi/ssl/session_cache/ssl_session.h',
'src/core/tsi/ssl/session_cache/ssl_session_cache.h',
'src/core/tsi/ssl_transport_security.h',
'src/core/tsi/ssl_types.h',
'src/core/tsi/transport_security_grpc.h',
'src/core/tsi/grpc_shadow_boringssl.h',
'src/core/ext/transport/chttp2/server/chttp2_server.h',
'src/core/ext/transport/inproc/inproc_transport.h',
'src/core/lib/avl/avl.h',
'src/core/lib/backoff/backoff.h',
'src/core/lib/channel/channel_args.h',
'src/core/lib/channel/channel_stack.h',
'src/core/lib/channel/channel_stack_builder.h',
'src/core/lib/channel/channel_trace.h',
'src/core/lib/channel/channelz.h',
'src/core/lib/channel/channelz_registry.h',
'src/core/lib/channel/connected_channel.h',
'src/core/lib/channel/context.h',
'src/core/lib/channel/handshaker.h',
'src/core/lib/channel/handshaker_factory.h',
'src/core/lib/channel/handshaker_registry.h',
'src/core/lib/channel/status_util.h',
'src/core/lib/compression/algorithm_metadata.h',
'src/core/lib/compression/compression_internal.h',
'src/core/lib/compression/message_compress.h',
'src/core/lib/compression/stream_compression.h',
'src/core/lib/compression/stream_compression_gzip.h',
'src/core/lib/compression/stream_compression_identity.h',
'src/core/lib/debug/stats.h',
'src/core/lib/debug/stats_data.h',
'src/core/lib/gprpp/debug_location.h',
'src/core/lib/gprpp/inlined_vector.h',
'src/core/lib/gprpp/orphanable.h',
'src/core/lib/gprpp/ref_counted.h',
'src/core/lib/gprpp/ref_counted_ptr.h',
'src/core/lib/http/format_request.h',
'src/core/lib/http/httpcli.h',
'src/core/lib/http/parser.h',
'src/core/lib/iomgr/block_annotate.h',
'src/core/lib/iomgr/buffer_list.h',
'src/core/lib/iomgr/call_combiner.h',
'src/core/lib/iomgr/closure.h',
'src/core/lib/iomgr/combiner.h',
'src/core/lib/iomgr/endpoint.h',
'src/core/lib/iomgr/endpoint_pair.h',
'src/core/lib/iomgr/error.h',
'src/core/lib/iomgr/error_internal.h',
'src/core/lib/iomgr/ev_epoll1_linux.h',
'src/core/lib/iomgr/ev_epollex_linux.h',
'src/core/lib/iomgr/ev_epollsig_linux.h',
'src/core/lib/iomgr/ev_poll_posix.h',
'src/core/lib/iomgr/ev_posix.h',
'src/core/lib/iomgr/exec_ctx.h',
'src/core/lib/iomgr/executor.h',
'src/core/lib/iomgr/gethostname.h',
'src/core/lib/iomgr/internal_errqueue.h',
'src/core/lib/iomgr/iocp_windows.h',
'src/core/lib/iomgr/iomgr.h',
'src/core/lib/iomgr/iomgr_custom.h',
'src/core/lib/iomgr/iomgr_internal.h',
'src/core/lib/iomgr/iomgr_posix.h',
'src/core/lib/iomgr/is_epollexclusive_available.h',
'src/core/lib/iomgr/load_file.h',
'src/core/lib/iomgr/lockfree_event.h',
'src/core/lib/iomgr/nameser.h',
'src/core/lib/iomgr/network_status_tracker.h',
'src/core/lib/iomgr/polling_entity.h',
'src/core/lib/iomgr/pollset.h',
'src/core/lib/iomgr/pollset_custom.h',
'src/core/lib/iomgr/pollset_set.h',
'src/core/lib/iomgr/pollset_set_custom.h',
'src/core/lib/iomgr/pollset_set_windows.h',
'src/core/lib/iomgr/pollset_windows.h',
'src/core/lib/iomgr/port.h',
'src/core/lib/iomgr/resolve_address.h',
'src/core/lib/iomgr/resolve_address_custom.h',
'src/core/lib/iomgr/resource_quota.h',
'src/core/lib/iomgr/sockaddr.h',
'src/core/lib/iomgr/sockaddr_custom.h',
'src/core/lib/iomgr/sockaddr_posix.h',
'src/core/lib/iomgr/sockaddr_utils.h',
'src/core/lib/iomgr/sockaddr_windows.h',
'src/core/lib/iomgr/socket_factory_posix.h',
'src/core/lib/iomgr/socket_mutator.h',
'src/core/lib/iomgr/socket_utils.h',
'src/core/lib/iomgr/socket_utils_posix.h',
'src/core/lib/iomgr/socket_windows.h',
'src/core/lib/iomgr/sys_epoll_wrapper.h',
'src/core/lib/iomgr/tcp_client.h',
'src/core/lib/iomgr/tcp_client_posix.h',
'src/core/lib/iomgr/tcp_custom.h',
'src/core/lib/iomgr/tcp_posix.h',
'src/core/lib/iomgr/tcp_server.h',
'src/core/lib/iomgr/tcp_server_utils_posix.h',
'src/core/lib/iomgr/tcp_windows.h',
'src/core/lib/iomgr/time_averaged_stats.h',
'src/core/lib/iomgr/timer.h',
'src/core/lib/iomgr/timer_custom.h',
'src/core/lib/iomgr/timer_heap.h',
'src/core/lib/iomgr/timer_manager.h',
'src/core/lib/iomgr/udp_server.h',
'src/core/lib/iomgr/unix_sockets_posix.h',
'src/core/lib/iomgr/wakeup_fd_cv.h',
'src/core/lib/iomgr/wakeup_fd_pipe.h',
'src/core/lib/iomgr/wakeup_fd_posix.h',
'src/core/lib/json/json.h',
'src/core/lib/json/json_common.h',
'src/core/lib/json/json_reader.h',
'src/core/lib/json/json_writer.h',
'src/core/lib/slice/b64.h',
'src/core/lib/slice/percent_encoding.h',
'src/core/lib/slice/slice_hash_table.h',
'src/core/lib/slice/slice_internal.h',
'src/core/lib/slice/slice_string_helpers.h',
'src/core/lib/slice/slice_weak_hash_table.h',
'src/core/lib/surface/api_trace.h',
'src/core/lib/surface/call.h',
'src/core/lib/surface/call_test_only.h',
'src/core/lib/surface/channel.h',
'src/core/lib/surface/channel_init.h',
'src/core/lib/surface/channel_stack_type.h',
'src/core/lib/surface/completion_queue.h',
'src/core/lib/surface/completion_queue_factory.h',
'src/core/lib/surface/event_string.h',
'src/core/lib/surface/init.h',
'src/core/lib/surface/lame_client.h',
'src/core/lib/surface/server.h',
'src/core/lib/surface/validate_metadata.h',
'src/core/lib/transport/bdp_estimator.h',
'src/core/lib/transport/byte_stream.h',
'src/core/lib/transport/connectivity_state.h',
'src/core/lib/transport/error_utils.h',
'src/core/lib/transport/http2_errors.h',
'src/core/lib/transport/metadata.h',
'src/core/lib/transport/metadata_batch.h',
'src/core/lib/transport/pid_controller.h',
'src/core/lib/transport/service_config.h',
'src/core/lib/transport/static_metadata.h',
'src/core/lib/transport/status_conversion.h',
'src/core/lib/transport/status_metadata.h',
'src/core/lib/transport/timeout_encoding.h',
'src/core/lib/transport/transport.h',
'src/core/lib/transport/transport_impl.h',
'src/core/lib/debug/trace.h',
'src/core/ext/filters/client_channel/lb_policy/grpclb/client_load_reporting_filter.h',
'src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb_channel.h',
'src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb_client_stats.h',
'src/core/ext/filters/client_channel/lb_policy/grpclb/load_balancer_api.h',
'src/core/ext/filters/client_channel/lb_policy/grpclb/proto/grpc/lb/v1/google/protobuf/duration.pb.h',
'src/core/ext/filters/client_channel/lb_policy/grpclb/proto/grpc/lb/v1/google/protobuf/timestamp.pb.h',
'src/core/ext/filters/client_channel/lb_policy/grpclb/proto/grpc/lb/v1/load_balancer.pb.h',
'src/core/ext/filters/client_channel/resolver/fake/fake_resolver.h',
'src/core/ext/filters/client_channel/lb_policy/subchannel_list.h',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_ev_driver.h',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper.h',
'src/core/ext/filters/max_age/max_age_filter.h',
'src/core/ext/filters/message_size/message_size_filter.h',
'src/core/ext/filters/http/client_authority_filter.h',
'src/core/ext/filters/workarounds/workaround_cronet_compression_filter.h',
'src/core/ext/filters/workarounds/workaround_utils.h'
ss.private_header_files = 'include/grpcpp/impl/codegen/core_codegen.h',
'src/cpp/client/secure_credentials.h',
'src/cpp/common/secure_auth_context.h',
'src/cpp/server/secure_server_credentials.h',
'src/cpp/client/create_channel_internal.h',
'src/cpp/common/channel_filter.h',
'src/cpp/server/dynamic_thread_pool.h',
'src/cpp/server/health/default_health_check_service.h',
'src/cpp/server/health/health.pb.h',
'src/cpp/server/thread_pool_interface.h',
'src/cpp/thread_manager/thread_manager.h',
'src/core/lib/gpr/alloc.h',
'src/core/lib/gpr/arena.h',
'src/core/lib/gpr/env.h',
'src/core/lib/gpr/host_port.h',
'src/core/lib/gpr/mpscq.h',
'src/core/lib/gpr/murmur_hash.h',
'src/core/lib/gpr/spinlock.h',
'src/core/lib/gpr/string.h',
'src/core/lib/gpr/string_windows.h',
'src/core/lib/gpr/time_precise.h',
'src/core/lib/gpr/tls.h',
'src/core/lib/gpr/tls_gcc.h',
'src/core/lib/gpr/tls_msvc.h',
'src/core/lib/gpr/tls_pthread.h',
'src/core/lib/gpr/tmpfile.h',
'src/core/lib/gpr/useful.h',
'src/core/lib/gprpp/abstract.h',
'src/core/lib/gprpp/atomic.h',
'src/core/lib/gprpp/atomic_with_atm.h',
'src/core/lib/gprpp/atomic_with_std.h',
'src/core/lib/gprpp/fork.h',
'src/core/lib/gprpp/manual_constructor.h',
'src/core/lib/gprpp/memory.h',
'src/core/lib/gprpp/mutex_lock.h',
'src/core/lib/gprpp/thd.h',
'src/core/lib/profiling/timers.h',
'src/core/lib/avl/avl.h',
'src/core/lib/backoff/backoff.h',
'src/core/lib/channel/channel_args.h',
'src/core/lib/channel/channel_stack.h',
'src/core/lib/channel/channel_stack_builder.h',
'src/core/lib/channel/channel_trace.h',
'src/core/lib/channel/channelz.h',
'src/core/lib/channel/channelz_registry.h',
'src/core/lib/channel/connected_channel.h',
'src/core/lib/channel/context.h',
'src/core/lib/channel/handshaker.h',
'src/core/lib/channel/handshaker_factory.h',
'src/core/lib/channel/handshaker_registry.h',
'src/core/lib/channel/status_util.h',
'src/core/lib/compression/algorithm_metadata.h',
'src/core/lib/compression/compression_internal.h',
'src/core/lib/compression/message_compress.h',
'src/core/lib/compression/stream_compression.h',
'src/core/lib/compression/stream_compression_gzip.h',
'src/core/lib/compression/stream_compression_identity.h',
'src/core/lib/debug/stats.h',
'src/core/lib/debug/stats_data.h',
'src/core/lib/gprpp/debug_location.h',
'src/core/lib/gprpp/inlined_vector.h',
'src/core/lib/gprpp/orphanable.h',
'src/core/lib/gprpp/ref_counted.h',
'src/core/lib/gprpp/ref_counted_ptr.h',
'src/core/lib/http/format_request.h',
'src/core/lib/http/httpcli.h',
'src/core/lib/http/parser.h',
'src/core/lib/iomgr/block_annotate.h',
'src/core/lib/iomgr/buffer_list.h',
'src/core/lib/iomgr/call_combiner.h',
'src/core/lib/iomgr/closure.h',
'src/core/lib/iomgr/combiner.h',
'src/core/lib/iomgr/endpoint.h',
'src/core/lib/iomgr/endpoint_pair.h',
'src/core/lib/iomgr/error.h',
'src/core/lib/iomgr/error_internal.h',
'src/core/lib/iomgr/ev_epoll1_linux.h',
'src/core/lib/iomgr/ev_epollex_linux.h',
'src/core/lib/iomgr/ev_epollsig_linux.h',
'src/core/lib/iomgr/ev_poll_posix.h',
'src/core/lib/iomgr/ev_posix.h',
'src/core/lib/iomgr/exec_ctx.h',
'src/core/lib/iomgr/executor.h',
'src/core/lib/iomgr/gethostname.h',
'src/core/lib/iomgr/internal_errqueue.h',
'src/core/lib/iomgr/iocp_windows.h',
'src/core/lib/iomgr/iomgr.h',
'src/core/lib/iomgr/iomgr_custom.h',
'src/core/lib/iomgr/iomgr_internal.h',
'src/core/lib/iomgr/iomgr_posix.h',
'src/core/lib/iomgr/is_epollexclusive_available.h',
'src/core/lib/iomgr/load_file.h',
'src/core/lib/iomgr/lockfree_event.h',
'src/core/lib/iomgr/nameser.h',
'src/core/lib/iomgr/network_status_tracker.h',
'src/core/lib/iomgr/polling_entity.h',
'src/core/lib/iomgr/pollset.h',
'src/core/lib/iomgr/pollset_custom.h',
'src/core/lib/iomgr/pollset_set.h',
'src/core/lib/iomgr/pollset_set_custom.h',
'src/core/lib/iomgr/pollset_set_windows.h',
'src/core/lib/iomgr/pollset_windows.h',
'src/core/lib/iomgr/port.h',
'src/core/lib/iomgr/resolve_address.h',
'src/core/lib/iomgr/resolve_address_custom.h',
'src/core/lib/iomgr/resource_quota.h',
'src/core/lib/iomgr/sockaddr.h',
'src/core/lib/iomgr/sockaddr_custom.h',
'src/core/lib/iomgr/sockaddr_posix.h',
'src/core/lib/iomgr/sockaddr_utils.h',
'src/core/lib/iomgr/sockaddr_windows.h',
'src/core/lib/iomgr/socket_factory_posix.h',
'src/core/lib/iomgr/socket_mutator.h',
'src/core/lib/iomgr/socket_utils.h',
'src/core/lib/iomgr/socket_utils_posix.h',
'src/core/lib/iomgr/socket_windows.h',
'src/core/lib/iomgr/sys_epoll_wrapper.h',
'src/core/lib/iomgr/tcp_client.h',
'src/core/lib/iomgr/tcp_client_posix.h',
'src/core/lib/iomgr/tcp_custom.h',
'src/core/lib/iomgr/tcp_posix.h',
'src/core/lib/iomgr/tcp_server.h',
'src/core/lib/iomgr/tcp_server_utils_posix.h',
'src/core/lib/iomgr/tcp_windows.h',
'src/core/lib/iomgr/time_averaged_stats.h',
'src/core/lib/iomgr/timer.h',
'src/core/lib/iomgr/timer_custom.h',
'src/core/lib/iomgr/timer_heap.h',
'src/core/lib/iomgr/timer_manager.h',
'src/core/lib/iomgr/udp_server.h',
'src/core/lib/iomgr/unix_sockets_posix.h',
'src/core/lib/iomgr/wakeup_fd_cv.h',
'src/core/lib/iomgr/wakeup_fd_pipe.h',
'src/core/lib/iomgr/wakeup_fd_posix.h',
'src/core/lib/json/json.h',
'src/core/lib/json/json_common.h',
'src/core/lib/json/json_reader.h',
'src/core/lib/json/json_writer.h',
'src/core/lib/slice/b64.h',
'src/core/lib/slice/percent_encoding.h',
'src/core/lib/slice/slice_hash_table.h',
'src/core/lib/slice/slice_internal.h',
'src/core/lib/slice/slice_string_helpers.h',
'src/core/lib/slice/slice_weak_hash_table.h',
'src/core/lib/surface/api_trace.h',
'src/core/lib/surface/call.h',
'src/core/lib/surface/call_test_only.h',
'src/core/lib/surface/channel.h',
'src/core/lib/surface/channel_init.h',
'src/core/lib/surface/channel_stack_type.h',
'src/core/lib/surface/completion_queue.h',
'src/core/lib/surface/completion_queue_factory.h',
'src/core/lib/surface/event_string.h',
'src/core/lib/surface/init.h',
'src/core/lib/surface/lame_client.h',
'src/core/lib/surface/server.h',
'src/core/lib/surface/validate_metadata.h',
'src/core/lib/transport/bdp_estimator.h',
'src/core/lib/transport/byte_stream.h',
'src/core/lib/transport/connectivity_state.h',
'src/core/lib/transport/error_utils.h',
'src/core/lib/transport/http2_errors.h',
'src/core/lib/transport/metadata.h',
'src/core/lib/transport/metadata_batch.h',
'src/core/lib/transport/pid_controller.h',
'src/core/lib/transport/service_config.h',
'src/core/lib/transport/static_metadata.h',
'src/core/lib/transport/status_conversion.h',
'src/core/lib/transport/status_metadata.h',
'src/core/lib/transport/timeout_encoding.h',
'src/core/lib/transport/transport.h',
'src/core/lib/transport/transport_impl.h',
'src/core/lib/debug/trace.h',
'src/core/ext/transport/inproc/inproc_transport.h'
end
s.subspec 'Protobuf' do |ss|
ss.header_mappings_dir = 'include/grpcpp'
ss.dependency "#{s.name}/Interface", version
ss.source_files = 'include/grpcpp/impl/codegen/proto_buffer_reader.h',
'include/grpcpp/impl/codegen/proto_buffer_writer.h',
'include/grpcpp/impl/codegen/proto_utils.h',
'include/grpcpp/impl/codegen/config_protobuf.h',
'include/grpcpp/impl/codegen/config_protobuf.h'
end
s.prepare_command = <<-END_OF_COMMAND
find src/cpp/ -type f ! -path '*.grpc_back' -print0 | xargs -0 -L1 sed -E -i'.grpc_back' 's;#include "(pb(_.*)?\\.h)";#include <nanopb/\\1>;g'
find src/cpp/ -type f -path '*.grpc_back' -print0 | xargs -0 rm
find src/core/ -type f ! -path '*.grpc_back' -print0 | xargs -0 -L1 sed -E -i'.grpc_back' 's;#include "(pb(_.*)?\\.h)";#include <nanopb/\\1>;g'
find src/core/ -type f -path '*.grpc_back' -print0 | xargs -0 rm
END_OF_COMMAND
end
| 63.085106 | 147 | 0.522406 |
fffa082baacfb5780980e42212dc0024ac6a8f9e | 1,227 | # frozen_string_literal: true
module Gitlab
module SlashCommands
module Presenters
class IssueSearch < Presenters::Base
include Presenters::IssueBase
def present
text = if resource.count >= 5
"Here are the first 5 issues I found:"
elsif resource.one?
"Here is the only issue I found:"
else
"Here are the #{resource.count} issues I found:"
end
ephemeral_response(text: text, attachments: attachments)
end
private
def attachments
resource.map do |issue|
url = "[#{issue.to_reference}](#{url_for([namespace, project, issue])})"
{
color: color(issue),
fallback: "#{issue.to_reference} #{issue.title}",
text: "#{url} · #{issue.title} (#{status_text(issue)})",
mrkdwn_in: [
:text
]
}
end
end
def project
@project ||= resource.first.project
end
def namespace
@namespace ||= project.namespace.becomes(Namespace)
end
end
end
end
end
| 24.54 | 84 | 0.502037 |
218165b53d4d8a7e73a2a4262d07a5ac770744c1 | 1,596 | require 'yaml'
class Api::V1::WebHooksController < Api::BaseController
skip_before_filter :verify_authenticity_token
before_filter :authenticate_with_api_key
before_filter :verify_authenticated_user
before_filter :find_gem_by_name, :except => :index
def index
respond_to do |format|
format.json { render :json => current_user.all_hooks }
format.xml { render :xml => current_user.all_hooks }
format.yaml { render :text => current_user.all_hooks.to_yaml }
end
end
def create
webhook = current_user.web_hooks.build(:url => @url, :rubygem => @rubygem)
if webhook.save
render :text => webhook.success_message,
:status => :created
else
render :text => webhook.errors.full_messages,
:status => :conflict
end
end
def remove
webhook = current_user.web_hooks.find_by_rubygem_id_and_url(
@rubygem.try(:id),
@url)
if webhook.try(:destroy)
render :text => webhook.removed_message
else
render :text => "No such webhook exists under your account.",
:status => :not_found
end
end
def fire
webhook = current_user.web_hooks.new(:url => @url)
@rubygem = Rubygem.find_by_name("gemcutter") unless @rubygem
if webhook.fire(request.host_with_port,
@rubygem,
@rubygem.versions.most_recent,
false)
render :text => webhook.deployed_message(@rubygem)
else
render :text => webhook.failed_message(@rubygem), :status => :bad_request
end
end
end
| 28 | 79 | 0.642857 |
b9ba94f6ce3d52060c24dd0303cdf58ee630c809 | 5,501 | require 'spec_helper'
describe 'mysql_service_test::single on centos-6.5' do
cached(:centos_6_service_57_single) do
ChefSpec::SoloRunner.new(
platform: 'centos',
version: '6.7',
step_into: 'mysql_service'
) do |node|
node.set['mysql']['version'] = '5.6'
end.converge('mysql_service_test::single')
end
before do
allow(Chef::Platform::ServiceHelpers).to receive(:service_resource_providers).and_return([:redhat])
stub_command('/usr/bin/test -f /var/lib/mysql-default/mysql/user.frm').and_return(true)
end
# Resource in mysql_service_test::single
context 'compiling the test recipe' do
it 'creates mysql_service[default]' do
expect(centos_6_service_57_single).to create_mysql_service('default')
end
end
# mysql_service resource internal implementation
context 'stepping into mysql_service[default] resource' do
it 'installs package[default :create mysql-community-server]' do
expect(centos_6_service_57_single).to install_package('default :create mysql-community-server')
.with(package_name: 'mysql-community-server', version: nil)
end
it 'stops service[default :create mysqld]' do
expect(centos_6_service_57_single).to disable_service('default :create mysqld')
expect(centos_6_service_57_single).to stop_service('default :create mysqld')
end
it 'creates group[default :create mysql]' do
expect(centos_6_service_57_single).to create_group('default :create mysql')
.with(group_name: 'mysql')
end
it 'creates user[default :create mysql]' do
expect(centos_6_service_57_single).to create_user('default :create mysql')
.with(username: 'mysql')
end
it 'deletes file[default :create /etc/mysql/my.cnf]' do
expect(centos_6_service_57_single).to delete_file('default :create /etc/mysql/my.cnf')
.with(path: '/etc/mysql/my.cnf')
end
it 'deletes file[default :create /etc/my.cnf]' do
expect(centos_6_service_57_single).to delete_file('default :create /etc/my.cnf')
.with(path: '/etc/my.cnf')
end
it 'creates link[default :create /usr/share/my-default.cnf]' do
expect(centos_6_service_57_single).to create_link('default :create /usr/share/my-default.cnf')
.with(
target_file: '/usr/share/my-default.cnf',
to: '/etc/mysql-default/my.cnf'
)
end
it 'creates directory[default :create /etc/mysql-default]' do
expect(centos_6_service_57_single).to create_directory('default :create /etc/mysql-default')
.with(
path: '/etc/mysql-default',
owner: 'mysql',
group: 'mysql',
mode: '0750',
recursive: true
)
end
it 'creates directory[default :create /etc/mysql-default/conf.d]' do
expect(centos_6_service_57_single).to create_directory('default :create /etc/mysql-default/conf.d')
.with(
path: '/etc/mysql-default/conf.d',
owner: 'mysql',
group: 'mysql',
mode: '0750',
recursive: true
)
end
it 'creates directory[default :create /var/run/mysql-default]' do
expect(centos_6_service_57_single).to create_directory('default :create /var/run/mysql-default')
.with(
path: '/var/run/mysql-default',
owner: 'mysql',
group: 'mysql',
mode: '0755',
recursive: true
)
end
it 'creates directory[default :create /var/log/mysql-default]' do
expect(centos_6_service_57_single).to create_directory('default :create /var/log/mysql-default')
.with(
path: '/var/log/mysql-default',
owner: 'mysql',
group: 'mysql',
mode: '0750',
recursive: true
)
end
it 'creates directory[default :create /var/lib/mysql-default]' do
expect(centos_6_service_57_single).to create_directory('default :create /var/lib/mysql-default')
.with(
path: '/var/lib/mysql-default',
owner: 'mysql',
group: 'mysql',
mode: '0750',
recursive: true
)
end
it 'creates template[default :create /etc/mysql-default/my.cnf]' do
expect(centos_6_service_57_single).to create_template('default :create /etc/mysql-default/my.cnf')
.with(
path: '/etc/mysql-default/my.cnf',
owner: 'mysql',
group: 'mysql',
mode: '0600'
)
end
it 'runs bash[default :create initialize mysql database]' do
expect(centos_6_service_57_single).to_not run_bash('default :create initialize mysql database')
.with(
cwd: '/var/lib/mysql-default'
)
end
it 'runs bash[default :create initial records]' do
expect(centos_6_service_57_single).to_not run_bash('default :create initial records')
end
it 'create template[default :start /etc/init.d/mysql-default]' do
expect(centos_6_service_57_single).to create_template('default :start /etc/init.d/mysql-default')
.with(
path: '/etc/init.d/mysql-default',
source: 'sysvinit/mysqld.erb',
owner: 'root',
group: 'root',
mode: '0755',
cookbook: 'mysql'
)
end
it 'starts service[default :start mysql-default]' do
expect(centos_6_service_57_single).to start_service('default :start mysql-default')
.with(
service_name: 'mysql-default'
)
end
end
end
| 33.95679 | 105 | 0.640065 |
e21d5948fd9fb07dbb758f26546d1fd189a0d274 | 1,663 | # This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# This file is the source Rails uses to define your schema when running `bin/rails
# db:schema:load`. When creating a new database, `bin/rails db:schema:load` tends to
# be faster and is potentially less error prone than running all of your
# migrations from scratch. Old migrations may fail to apply correctly if those
# migrations use external dependencies or application code.
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 2021_02_02_191547) do
create_table "posts", force: :cascade do |t|
t.string "title"
t.text "body"
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.integer "user_id"
end
create_table "users", force: :cascade do |t|
t.string "email", default: "", null: false
t.string "encrypted_password", default: "", null: false
t.string "reset_password_token"
t.datetime "reset_password_sent_at"
t.datetime "remember_created_at"
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.string "name"
t.string "username"
t.index ["email"], name: "index_users_on_email", unique: true
t.index ["reset_password_token"], name: "index_users_on_reset_password_token", unique: true
t.index ["username"], name: "index_users_on_username", unique: true
end
end
| 42.641026 | 95 | 0.736621 |
1c2c511d5c687b21905d1fe7c417da73ec816e3a | 1,017 | require_relative '../../spec_helper'
require_relative 'shared/extract_range_matched'
require 'strscan'
describe "StringScanner#pre_match" do
before :each do
@s = StringScanner.new("This is a test")
end
it "returns the pre-match (in the regular expression sense) of the last scan" do
@s.pre_match.should == nil
@s.scan(/\w+\s/)
@s.pre_match.should == ""
@s.getch
@s.pre_match.should == "This "
@s.get_byte
@s.pre_match.should == "This i"
@s.get_byte
@s.pre_match.should == "This is"
end
it "returns nil if there's no match" do
@s.scan(/\s+/)
@s.pre_match.should == nil
end
it "is more than just the data from the last match" do
@s.scan(/\w+/)
@s.scan_until(/a te/)
@s.pre_match.should == "This is "
end
it "is not changed when the scanner's position changes" do
@s.scan_until(/\s+/)
@s.pre_match.should == "This"
@s.pos -= 1
@s.pre_match.should == "This"
end
it_behaves_like :extract_range_matched, :pre_match
end
| 24.214286 | 82 | 0.639135 |
625487bafea94235462b6385729fb8040a78d59d | 413 | class CreateCoupons < ActiveRecord::Migration[5.0]
def change
create_table :coupons do |t|
t.string :title
t.text :description
t.decimal :ammount
t.integer :max_redemptions, default: 0
t.string :code
t.string :state, default: 'available'
t.date :start_date
t.date :end_date
t.references :profile, foreign_key: true
t.timestamps
end
end
end
| 22.944444 | 50 | 0.644068 |
7a0218d03e4155a180f78e8bc3bc858e33ad1ea6 | 151 | FactoryBot.define do
factory :benefit_markets_market_policies_market_policy, class: 'BenefitMarkets::MarketPolicies::MarketPolicy' do
end
end
| 25.166667 | 114 | 0.81457 |
ab932cd67733eaa134a1091d287d2a5ec34821e8 | 2,504 | #
# Copyright:: Copyright (c) 2012 Opscode, Inc.
# Copyright:: Copyright (c) 2014 GitLab.com
# Copyright:: Copyright (c) 2015 SKIF-Software Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
directory "/etc/labus" do
owner "root"
group "root"
mode "0775"
action :nothing
end.run_action(:create)
Labus[:node] = node
if File.exists?("/etc/labus/config.rb")
Labus.from_file("/etc/labus/config.rb")
end
node.consume_attributes(Labus.generate_config(node['fqdn']))
# salsa: what is this for?
#if File.exists?("/var/opt/labus/bootstrapped")
# node.set['labus']['bootstrap']['enable'] = false
#end
directory "/var/opt/labus" do
owner "root"
group "root"
mode "0755"
recursive true
action :create
end
include_recipe "labus::web-server"
# proceed to Gitlab configuration
#include_recipe "gitlab::default"
# back to Labus specific setup tasks
include_recipe "labus::labus-redmine"
include_recipe "labus::labus-simplesamlphp"
include_recipe "labus::labus-indico"
# salsa: todo
#if node['labus']['labus-ejabber']['enable']
# include_recipe "gitlab::users"
# include_recipe "gitlab::gitlab-shell"
# include_recipe "gitlab::gitlab-rails"
#end
#if node['gitlab']['gitlab-ci']['enable']
# include_recipe "gitlab::gitlab-ci"
#end
#include_recipe "gitlab::selinux"
#include_recipe "gitlab::cron"
# Create dummy services to receive notifications, in case
# the corresponding service recipe is not loaded below.
[
"redmine-unicorn",
"postgresql",
"php-fpm"
].each do |dummy|
service dummy do
supports []
end
end
#configure gitlab postgresql for labus usage
#????
#include_recipe "labus::postgresql"
# salsa: disable nginx in gitlab
# Configure Services
[
"redmine-unicorn",
"postgresql",
"nginx",
"ldap",
"php-fpm"
].each do |service|
if node["labus"][service]["enable"]
include_recipe "labus::#{service}"
else
include_recipe "labus::#{service}_disable"
end
end
include_recipe "labus::database_migrations"
| 24.31068 | 74 | 0.724042 |
b96794076af756db3e8ae0d334ad0951f6013f17 | 801 | # frozen_string_literal: true
module YARD::CodeObjects
# Represents an instance method of a module that was mixed into the class
# scope of another namespace.
#
# @see MethodObject
class ExtendedMethodObject
instance_methods.each {|m| undef_method(m) unless m =~ /^__/ || m.to_sym == :object_id }
# @return [Symbol] always +:class+
def scope; :class end
# Sets up a delegate for {MethodObject} obj.
#
# @param [MethodObject] obj the instance method to treat as a mixed in
# class method on another namespace.
def initialize(obj) @del = obj end
# Sends all methods to the {MethodObject} assigned in {#initialize}
# @see #initialize
# @see MethodObject
def method_missing(sym, *args, &block) @del.__send__(sym, *args, &block) end
end
end
| 32.04 | 92 | 0.684145 |
8728e325e14498e39b59fdd1ad040d851a490c61 | 40 | module Pvcglue
class Deploy
end
end
| 8 | 14 | 0.75 |
bbd2f390e66da5806551ea49a0f2aff7f0cc0b65 | 264 | module Enom
module Commands
class CheckDomain
def execute(args, options={})
name = args.shift
response = Domain.check(name)
output = "#{name} is #{response}"
puts output
return output
end
end
end
end
| 18.857143 | 41 | 0.57197 |
5da01cee954a4d7411d4a0f34e1c5f4967968d1e | 173 | require 'test_helper'
class UsersControllerTest < ActionDispatch::IntegrationTest
test "should get new" do
get signup_path
assert_response :success
end
end
| 15.727273 | 59 | 0.757225 |
1c79b2ddc38df7a5751b5ef5f0514d9206ab70ed | 2,437 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe 'User follows pipeline suggest nudge spec when feature is enabled', :js do
include CookieHelper
let(:project) { create(:project, :empty_repo) }
let(:user) { project.owner }
describe 'viewing the new blob page' do
before do
sign_in(user)
end
context 'when the page is loaded from the link using the suggest_gitlab_ci_yml param' do
before do
visit namespace_project_new_blob_path(namespace_id: project.namespace, project_id: project, id: 'master', suggest_gitlab_ci_yml: 'true')
end
it 'pre-fills .gitlab-ci.yml for file name' do
file_name = page.find_by_id('file_name')
expect(file_name.value).to have_content('.gitlab-ci.yml')
end
it 'chooses the .gitlab-ci.yml Template Type' do
template_type = page.find(:css, '.template-type-selector .dropdown-toggle-text')
expect(template_type.text).to have_content('.gitlab-ci.yml')
end
it 'displays suggest_gitlab_ci_yml popover' do
page.find(:css, '.gitlab-ci-yml-selector').click
popover_selector = '.suggest-gitlab-ci-yml'
expect(page).to have_css(popover_selector, visible: true)
page.within(popover_selector) do
expect(page).to have_content('1/2: Choose a template')
end
end
it 'sets the commit cookie when the Commit button is clicked' do
click_button 'Commit changes'
expect(get_cookie("suggest_gitlab_ci_yml_commit_#{project.id}")).to be_present
end
end
context 'when the page is visited without the param' do
before do
visit namespace_project_new_blob_path(namespace_id: project.namespace, project_id: project, id: 'master')
end
it 'does not pre-fill .gitlab-ci.yml for file name' do
file_name = page.find_by_id('file_name')
expect(file_name.value).not_to have_content('.gitlab-ci.yml')
end
it 'does not choose the .gitlab-ci.yml Template Type' do
template_type = page.find(:css, '.template-type-selector .dropdown-toggle-text')
expect(template_type.text).to have_content('Select a template type')
end
it 'does not display suggest_gitlab_ci_yml popover' do
popover_selector = '.b-popover.suggest-gitlab-ci-yml'
expect(page).not_to have_css(popover_selector, visible: true)
end
end
end
end
| 31.649351 | 144 | 0.682396 |
acaa27a6c4ea6f969d306e709155f9ca28be71a0 | 195 | class CreateEmails < ActiveRecord::Migration
def change
create_table :emails do |t|
t.string :user_id, :null => false
t.string :email, :null => false
t.timestamps
end
end
end
| 19.5 | 44 | 0.676923 |
1df41f00dc72f806339bc1b0fd2c8131b968ed35 | 287 | class Actiontastic < Cask
version 'beta8'
sha256 'f6536cd0cb477dd3867ccf359469b7a4377fb307a0307b27566c1f4219b79c44'
url 'https://s3.amazonaws.com/kaboomerang/actiontastic/ActiontasticBeta8.zip'
homepage 'http://actiontastic.com'
link 'ActiontasticBeta8/Actiontastic.app'
end
| 28.7 | 79 | 0.815331 |
620c55b2f0a1ab3b75586cb6a1ee2452d37b0178 | 530 | Rails.application.routes.draw do
# For details on the DSL available within this file, see https://guides.rubyonrails.org/routing.html
# resources :questions
# resources :rounds
# resources :games
namespace :api do
namespace :v1 do
resources :questions
resources :rounds
resources :games
resources :users, only: [:create, :index, :destroy, :update]
post '/login', to: 'auth#create'
post '/new', to: 'users#create'
get '/profile', to: 'users#profile'
end
end
end
| 25.238095 | 102 | 0.650943 |
e2018fbe3cc44edb133d9c9b5257718fcdab83da | 381 | if defined?(Spree::Admin::BaseController)
require File.expand_path('../../base_controller_decorator', __FILE__)
Spree::Admin::BaseController.class_eval do
protected
def model_class
const_name = controller_name.classify
if Spree.const_defined?(const_name)
return "Spree::#{const_name}".constantize
end
nil
end
end
end
| 27.214286 | 71 | 0.674541 |
6a669fa92d207e988f42385231c2bbc195b3d53c | 1,279 | ##########################################################################
# Copyright 2016 ThoughtWorks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##########################################################################
module ApiV3
module Shared
module Stages
module Tasks
class AntTaskRepresenter < BaseTaskRepresenter
alias_method :task, :represented
ERROR_KEYS = {
'buildFile' => 'build_file',
'onCancelConfig' => 'on_cancel',
'runIf' => 'run_if'
}
property :working_directory, skip_parse: SkipParseOnBlank
property :build_file, skip_parse: SkipParseOnBlank
property :target, skip_parse: SkipParseOnBlank
end
end
end
end
end | 34.567568 | 74 | 0.603597 |
e2b752cb4997ec383de90d55e686ed3de03c5e5f | 18,501 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Network::Mgmt::V2018_07_01
#
# A service client - single point of access to the REST API.
#
class NetworkManagementClient < MsRestAzure::AzureServiceClient
include MsRestAzure
include MsRestAzure::Serialization
# @return [String] the base URI of the service.
attr_accessor :base_url
# @return Credentials needed for the client to connect to Azure.
attr_reader :credentials
# @return [String] The subscription credentials which uniquely identify the
# Microsoft Azure subscription. The subscription ID forms part of the URI
# for every service call.
attr_accessor :subscription_id
# @return [String] Client API version.
attr_reader :api_version
# @return [String] The preferred language for the response.
attr_accessor :accept_language
# @return [Integer] The retry timeout in seconds for Long Running
# Operations. Default value is 30.
attr_accessor :long_running_operation_retry_timeout
# @return [Boolean] Whether a unique x-ms-client-request-id should be
# generated. When set to true a unique x-ms-client-request-id value is
# generated and included in each request. Default is true.
attr_accessor :generate_client_request_id
# @return [AzureFirewalls] azure_firewalls
attr_reader :azure_firewalls
# @return [ApplicationGateways] application_gateways
attr_reader :application_gateways
# @return [ApplicationSecurityGroups] application_security_groups
attr_reader :application_security_groups
# @return [DdosProtectionPlans] ddos_protection_plans
attr_reader :ddos_protection_plans
# @return [AvailableEndpointServices] available_endpoint_services
attr_reader :available_endpoint_services
# @return [ExpressRouteCircuitAuthorizations]
# express_route_circuit_authorizations
attr_reader :express_route_circuit_authorizations
# @return [ExpressRouteCircuitPeerings] express_route_circuit_peerings
attr_reader :express_route_circuit_peerings
# @return [ExpressRouteCircuitConnections]
# express_route_circuit_connections
attr_reader :express_route_circuit_connections
# @return [ExpressRouteCircuits] express_route_circuits
attr_reader :express_route_circuits
# @return [ExpressRouteServiceProviders] express_route_service_providers
attr_reader :express_route_service_providers
# @return [ExpressRouteCrossConnections] express_route_cross_connections
attr_reader :express_route_cross_connections
# @return [ExpressRouteCrossConnectionPeerings]
# express_route_cross_connection_peerings
attr_reader :express_route_cross_connection_peerings
# @return [LoadBalancers] load_balancers
attr_reader :load_balancers
# @return [LoadBalancerBackendAddressPools]
# load_balancer_backend_address_pools
attr_reader :load_balancer_backend_address_pools
# @return [LoadBalancerFrontendIPConfigurations]
# load_balancer_frontend_ipconfigurations
attr_reader :load_balancer_frontend_ipconfigurations
# @return [InboundNatRules] inbound_nat_rules
attr_reader :inbound_nat_rules
# @return [LoadBalancerLoadBalancingRules]
# load_balancer_load_balancing_rules
attr_reader :load_balancer_load_balancing_rules
# @return [LoadBalancerNetworkInterfaces] load_balancer_network_interfaces
attr_reader :load_balancer_network_interfaces
# @return [LoadBalancerProbes] load_balancer_probes
attr_reader :load_balancer_probes
# @return [NetworkInterfaces] network_interfaces
attr_reader :network_interfaces
# @return [NetworkInterfaceIPConfigurations]
# network_interface_ipconfigurations
attr_reader :network_interface_ipconfigurations
# @return [NetworkInterfaceLoadBalancers] network_interface_load_balancers
attr_reader :network_interface_load_balancers
# @return [NetworkSecurityGroups] network_security_groups
attr_reader :network_security_groups
# @return [SecurityRules] security_rules
attr_reader :security_rules
# @return [DefaultSecurityRules] default_security_rules
attr_reader :default_security_rules
# @return [NetworkWatchers] network_watchers
attr_reader :network_watchers
# @return [PacketCaptures] packet_captures
attr_reader :packet_captures
# @return [ConnectionMonitors] connection_monitors
attr_reader :connection_monitors
# @return [Operations] operations
attr_reader :operations
# @return [PublicIPAddresses] public_ipaddresses
attr_reader :public_ipaddresses
# @return [PublicIPPrefixes] public_ipprefixes
attr_reader :public_ipprefixes
# @return [RouteFilters] route_filters
attr_reader :route_filters
# @return [RouteFilterRules] route_filter_rules
attr_reader :route_filter_rules
# @return [RouteTables] route_tables
attr_reader :route_tables
# @return [Routes] routes
attr_reader :routes
# @return [BgpServiceCommunities] bgp_service_communities
attr_reader :bgp_service_communities
# @return [Usages] usages
attr_reader :usages
# @return [VirtualNetworks] virtual_networks
attr_reader :virtual_networks
# @return [Subnets] subnets
attr_reader :subnets
# @return [VirtualNetworkPeerings] virtual_network_peerings
attr_reader :virtual_network_peerings
# @return [VirtualNetworkGateways] virtual_network_gateways
attr_reader :virtual_network_gateways
# @return [VirtualNetworkGatewayConnections]
# virtual_network_gateway_connections
attr_reader :virtual_network_gateway_connections
# @return [LocalNetworkGateways] local_network_gateways
attr_reader :local_network_gateways
# @return [VirtualWANs] virtual_wans
attr_reader :virtual_wans
# @return [VpnSites] vpn_sites
attr_reader :vpn_sites
# @return [VpnSitesConfiguration] vpn_sites_configuration
attr_reader :vpn_sites_configuration
# @return [VirtualHubs] virtual_hubs
attr_reader :virtual_hubs
# @return [HubVirtualNetworkConnections] hub_virtual_network_connections
attr_reader :hub_virtual_network_connections
# @return [VpnGateways] vpn_gateways
attr_reader :vpn_gateways
# @return [VpnConnections] vpn_connections
attr_reader :vpn_connections
# @return [ServiceEndpointPolicies] service_endpoint_policies
attr_reader :service_endpoint_policies
# @return [ServiceEndpointPolicyDefinitions]
# service_endpoint_policy_definitions
attr_reader :service_endpoint_policy_definitions
#
# Creates initializes a new instance of the NetworkManagementClient class.
# @param credentials [MsRest::ServiceClientCredentials] credentials to authorize HTTP requests made by the service client.
# @param base_url [String] the base URI of the service.
# @param options [Array] filters to be applied to the HTTP requests.
#
def initialize(credentials = nil, base_url = nil, options = nil)
super(credentials, options)
@base_url = base_url || 'https://management.azure.com'
fail ArgumentError, 'invalid type of credentials input parameter' unless credentials.is_a?(MsRest::ServiceClientCredentials) unless credentials.nil?
@credentials = credentials
@azure_firewalls = AzureFirewalls.new(self)
@application_gateways = ApplicationGateways.new(self)
@application_security_groups = ApplicationSecurityGroups.new(self)
@ddos_protection_plans = DdosProtectionPlans.new(self)
@available_endpoint_services = AvailableEndpointServices.new(self)
@express_route_circuit_authorizations = ExpressRouteCircuitAuthorizations.new(self)
@express_route_circuit_peerings = ExpressRouteCircuitPeerings.new(self)
@express_route_circuit_connections = ExpressRouteCircuitConnections.new(self)
@express_route_circuits = ExpressRouteCircuits.new(self)
@express_route_service_providers = ExpressRouteServiceProviders.new(self)
@express_route_cross_connections = ExpressRouteCrossConnections.new(self)
@express_route_cross_connection_peerings = ExpressRouteCrossConnectionPeerings.new(self)
@load_balancers = LoadBalancers.new(self)
@load_balancer_backend_address_pools = LoadBalancerBackendAddressPools.new(self)
@load_balancer_frontend_ipconfigurations = LoadBalancerFrontendIPConfigurations.new(self)
@inbound_nat_rules = InboundNatRules.new(self)
@load_balancer_load_balancing_rules = LoadBalancerLoadBalancingRules.new(self)
@load_balancer_network_interfaces = LoadBalancerNetworkInterfaces.new(self)
@load_balancer_probes = LoadBalancerProbes.new(self)
@network_interfaces = NetworkInterfaces.new(self)
@network_interface_ipconfigurations = NetworkInterfaceIPConfigurations.new(self)
@network_interface_load_balancers = NetworkInterfaceLoadBalancers.new(self)
@network_security_groups = NetworkSecurityGroups.new(self)
@security_rules = SecurityRules.new(self)
@default_security_rules = DefaultSecurityRules.new(self)
@network_watchers = NetworkWatchers.new(self)
@packet_captures = PacketCaptures.new(self)
@connection_monitors = ConnectionMonitors.new(self)
@operations = Operations.new(self)
@public_ipaddresses = PublicIPAddresses.new(self)
@public_ipprefixes = PublicIPPrefixes.new(self)
@route_filters = RouteFilters.new(self)
@route_filter_rules = RouteFilterRules.new(self)
@route_tables = RouteTables.new(self)
@routes = Routes.new(self)
@bgp_service_communities = BgpServiceCommunities.new(self)
@usages = Usages.new(self)
@virtual_networks = VirtualNetworks.new(self)
@subnets = Subnets.new(self)
@virtual_network_peerings = VirtualNetworkPeerings.new(self)
@virtual_network_gateways = VirtualNetworkGateways.new(self)
@virtual_network_gateway_connections = VirtualNetworkGatewayConnections.new(self)
@local_network_gateways = LocalNetworkGateways.new(self)
@virtual_wans = VirtualWANs.new(self)
@vpn_sites = VpnSites.new(self)
@vpn_sites_configuration = VpnSitesConfiguration.new(self)
@virtual_hubs = VirtualHubs.new(self)
@hub_virtual_network_connections = HubVirtualNetworkConnections.new(self)
@vpn_gateways = VpnGateways.new(self)
@vpn_connections = VpnConnections.new(self)
@service_endpoint_policies = ServiceEndpointPolicies.new(self)
@service_endpoint_policy_definitions = ServiceEndpointPolicyDefinitions.new(self)
@api_version = '2018-07-01'
@accept_language = 'en-US'
@long_running_operation_retry_timeout = 30
@generate_client_request_id = true
add_telemetry
end
#
# Makes a request and returns the body of the response.
# @param method [Symbol] with any of the following values :get, :put, :post, :patch, :delete.
# @param path [String] the path, relative to {base_url}.
# @param options [Hash{String=>String}] specifying any request options like :body.
# @return [Hash{String=>String}] containing the body of the response.
# Example:
#
# request_content = "{'location':'westus','tags':{'tag1':'val1','tag2':'val2'}}"
# path = "/path"
# options = {
# body: request_content,
# query_params: {'api-version' => '2016-02-01'}
# }
# result = @client.make_request(:put, path, options)
#
def make_request(method, path, options = {})
result = make_request_with_http_info(method, path, options)
result.body unless result.nil?
end
#
# Makes a request and returns the operation response.
# @param method [Symbol] with any of the following values :get, :put, :post, :patch, :delete.
# @param path [String] the path, relative to {base_url}.
# @param options [Hash{String=>String}] specifying any request options like :body.
# @return [MsRestAzure::AzureOperationResponse] Operation response containing the request, response and status.
#
def make_request_with_http_info(method, path, options = {})
result = make_request_async(method, path, options).value!
result.body = result.response.body.to_s.empty? ? nil : JSON.load(result.response.body)
result
end
#
# Makes a request asynchronously.
# @param method [Symbol] with any of the following values :get, :put, :post, :patch, :delete.
# @param path [String] the path, relative to {base_url}.
# @param options [Hash{String=>String}] specifying any request options like :body.
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def make_request_async(method, path, options = {})
fail ArgumentError, 'method is nil' if method.nil?
fail ArgumentError, 'path is nil' if path.nil?
request_url = options[:base_url] || @base_url
if(!options[:headers].nil? && !options[:headers]['Content-Type'].nil?)
@request_headers['Content-Type'] = options[:headers]['Content-Type']
end
request_headers = @request_headers
request_headers.merge!({'accept-language' => @accept_language}) unless @accept_language.nil?
options.merge!({headers: request_headers.merge(options[:headers] || {})})
options.merge!({credentials: @credentials}) unless @credentials.nil?
super(request_url, method, path, options)
end
#
# Checks whether a domain name in the cloudapp.azure.com zone is available for
# use.
#
# @param location [String] The location of the domain name.
# @param domain_name_label [String] The domain name to be verified. It must
# conform to the following regular expression: ^[a-z][a-z0-9-]{1,61}[a-z0-9]$.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [DnsNameAvailabilityResult] operation results.
#
def check_dns_name_availability(location, domain_name_label, custom_headers:nil)
response = check_dns_name_availability_async(location, domain_name_label, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Checks whether a domain name in the cloudapp.azure.com zone is available for
# use.
#
# @param location [String] The location of the domain name.
# @param domain_name_label [String] The domain name to be verified. It must
# conform to the following regular expression: ^[a-z][a-z0-9-]{1,61}[a-z0-9]$.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def check_dns_name_availability_with_http_info(location, domain_name_label, custom_headers:nil)
check_dns_name_availability_async(location, domain_name_label, custom_headers:custom_headers).value!
end
#
# Checks whether a domain name in the cloudapp.azure.com zone is available for
# use.
#
# @param location [String] The location of the domain name.
# @param domain_name_label [String] The domain name to be verified. It must
# conform to the following regular expression: ^[a-z][a-z0-9-]{1,61}[a-z0-9]$.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def check_dns_name_availability_async(location, domain_name_label, custom_headers:nil)
fail ArgumentError, 'location is nil' if location.nil?
fail ArgumentError, 'domain_name_label is nil' if domain_name_label.nil?
fail ArgumentError, 'api_version is nil' if api_version.nil?
fail ArgumentError, 'subscription_id is nil' if subscription_id.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = accept_language unless accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/providers/Microsoft.Network/locations/{location}/CheckDnsNameAvailability'
request_url = @base_url || self.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'location' => location,'subscriptionId' => subscription_id},
query_params: {'domainNameLabel' => domain_name_label,'api-version' => api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = self.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::Network::Mgmt::V2018_07_01::Models::DnsNameAvailabilityResult.mapper()
result.body = self.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
private
#
# Adds telemetry information.
#
def add_telemetry
sdk_information = 'azure_mgmt_network'
sdk_information = "#{sdk_information}/0.20.0"
add_user_agent_information(sdk_information)
end
end
end
| 41.389262 | 154 | 0.738014 |
91709405ab6b7c7479e56680c4daadb9dbe6f5d1 | 2,558 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Network::Mgmt::V2019_11_01
module Models
#
# Country details.
#
class AvailableProvidersListCountry
include MsRestAzure
# @return [String] The country name.
attr_accessor :country_name
# @return [Array<String>] A list of Internet service providers.
attr_accessor :providers
# @return [Array<AvailableProvidersListState>] List of available states
# in the country.
attr_accessor :states
#
# Mapper for AvailableProvidersListCountry class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'AvailableProvidersListCountry',
type: {
name: 'Composite',
class_name: 'AvailableProvidersListCountry',
model_properties: {
country_name: {
client_side_validation: true,
required: false,
serialized_name: 'countryName',
type: {
name: 'String'
}
},
providers: {
client_side_validation: true,
required: false,
serialized_name: 'providers',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'StringElementType',
type: {
name: 'String'
}
}
}
},
states: {
client_side_validation: true,
required: false,
serialized_name: 'states',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'AvailableProvidersListStateElementType',
type: {
name: 'Composite',
class_name: 'AvailableProvidersListState'
}
}
}
}
}
}
}
end
end
end
end
| 29.402299 | 80 | 0.481235 |
5dea6dfa5b519795a2efc67346616b07cbede3b3 | 1,440 | # frozen_string_literal: true
module BrInvoicesPdf
module Nfce
module Renderer
module CustomerIdentification
extend Util::BaseRenderer
extend BaseRenderer
module_function
def execute(pdf, data)
box(pdf, [0, pdf.cursor], page_content_width(pdf)) do
customer = data[:customer]
add_customer_identification(pdf, data, identificator(customer[:identification_type],
customer[:identification]))
end
end
def identificator(identification, number)
id = identification
case id
when 'CPF'
"CPF DO CONSUMIDOR: #{format_cpf(number)}"
when 'CNPJ'
"CNPJ DO CONSUMIDOR: #{format_cnpj(number)}"
when 'idEstrangeiro'
"ID. ESTRANGEIRO: #{number}"
else
'CONSUMIDOR NÃO IDENTIFICADO'
end
end
private_class_method :identificator
# :reek:FeatureEnvy
def add_customer_identification(pdf, data, identificator)
address = data[:customer][:address]
pdf.text("Consumidor\n\n", style: :italic)
pdf.text(identificator, align: :center)
pdf.text(format_address(address), align: :center) if address[:streetname]
end
private_class_method :add_customer_identification
end
end
end
end
| 30 | 96 | 0.584722 |
ac2a255029cdc0fdc7bfa0f06ae38b603eda403d | 22,489 | #
# compiler.rb
#
# Copyright (c) 2010 by Daniel Kelley
#
# $Id:$
#
# GMP Forth Compiler
require 'gmpforth/word'
require 'gmpforth/documentation'
require 'pp'
require 'yaml'
require 'tempfile'
class GMPForth::Compiler
attr_accessor :case_sensitive
attr_reader :dict
# Target image is headless (no dictionary entries)
attr_accessor :headless
# Target image is host (can defined words)
attr_accessor :hosted
# Kernel is in Read Only Memory
attr_accessor :rom
# Verbose
attr_accessor :verbose
# Create boot image
attr_accessor :bootimage
# Save temporary files
attr_accessor :savetemp
BL = ' '
LIB_INDEX = 'Library.yaml'
FWD_LIMIT = 1000 # forward reference resolution loop limit
# immediate words
DICT = {
'+loop' => :f_plus_loop,
'."' => :f_dot_quote,
';' => :f_semicolon,
'begin' => :f_begin,
'do' => :f_do,
'does>' => :f_does,
'else' => :f_else,
'if' => :f_if,
'leave' => :f_leave,
'?leave' => :f_question_leave,
'literal' => :f_literal,
'loop' => :f_loop,
'postpone' => :f_postpone,
'recurse' => :f_recurse,
'repeat' => :f_repeat,
's"' => :f_s_quote,
'then' => :f_then,
'until' => :f_until,
'while' => :f_while,
'[' => :f_l_bracket,
']' => :f_r_bracket,
'again' => :f_again,
'?do' => :f_question_do,
'[char]' => :f_bracket_char,
'[\']' => :f_bracket_tick_bracket,
'vocabulary' => :f_vocabulary,
'forth' => :f_forth,
'only' => :f_only,
'definitions' => :f_definitions,
'user' => :f_user,
'constant' => :f_constant,
':' => :f_colon,
'immediate' => :f_immediate,
'compile-only' => :f_compile_only,
'(' => :f_parens,
'\\' => :f_backslash,
'code' => :f_code,
'end-code' => :f_end_code,
';code' => :f_semi_code,
'create' => :f_create,
'variable' => :f_variable,
'include"' => :f_include_quote,
'parameter' => :f_parameter,
'feature' => :f_feature,
'case' => :f_case,
'of' => :f_of,
'endof' => :f_endof,
'endcase' => :f_endcase,
}
CODE_DICT = {
'(' => :f_parens,
'\\' => :f_backslash,
'end-code' => :f_end_code,
}
TMP_BASE = 'gfg'
def initialize(options={})
@dict = {}
@dict_o = []
@state = 0
@code = false
@semicode = false
@latest = nil
@pstack = []
@to_in = 0
@case_sensitive = false
@postpone_fwd = {}
@tokenize = true
@include = [nil] # reserve element for 'also' dir
@alsodir = [] # current dir of scanned file
@libfwd = []
@libmap = []
@entry = nil
@bootimage = false
@verbose = false
@headless = true
@rom = false
@savetemp = false
@hosted = true
@optim_combine_next = false
@macro = {}
@macro_file = {}
@fstack = [] # file stack [file,line_no]
@dependency = []
end
def include(*dir)
dir.each { |d| @include << d }
end
def locate_relative(file)
absfile = file
@include.each do |dir|
absfile = File.expand_path(file, dir)
if File.exist? absfile
return absfile
end
end
raise "could not find #{file}"
end
def locate(file)
if file !~ %r{^/}
file = locate_relative(file)
elsif !File.exist? file
raise "could not find #{file}"
end
file
end
# also search directory of file
def also_search(dir)
if File.directory? dir
@alsodir << dir
@include[0] = dir
else
fatal("#{dir} not found")
end
end
# pop
def also_pop
@alsodir.pop
@include[0] = @alsodir[-1]
end
def fatal(msg=$!)
file,line = @fstack[-1]
$stderr.puts("#{file}:#{line}: #{msg}")
exit 1
end
def scan(*files)
files.each do |file|
_scan(file)
@entry = @latest
resolve_references
end
end
def _scan(file)
path = locate(file)
@dependency << path
also_search(File.dirname(path))
@fstack.push([path,0])
IO.foreach(path) do |line|
@fstack[-1][1] += 1 # incr line#
begin
parse_line(line)
rescue => detail
print detail.backtrace.join("\n")
fatal
end
end
@fstack.pop
also_pop
end
# add macro definition
def macro(name,value=true)
@macro[name] = value
@macro_file[name] = @fstack[-1][0]
end
def parse(string)
string.each_line { |line| parse_line(line) }
end
#
# Just barely enough support to compile forth-wordlist
#
def interpret(token)
case token
when "'"
@pstack.push(word(BL))
when ">body"
@pstack[-1] += "_pfa"
else
@pstack.push(token.to_i)
end
end
def tokenize(line)
@tib = line
@number_tib = line.length
@to_in = 0
while @to_in < @number_tib
token = word(BL)
if token.length == 0
next
end
# macro replacement
macro = @macro[token]
token = macro if !macro.nil? && macro != true
t = canonical(token)
w = @code ? CODE_DICT[t] : DICT[t]
if w.nil?
if @state == 0
interpret(token)
else
if !@dict[t].nil? && !@code
# dict word
@latest.append(t)
elsif token =~ /^-?\d+$/
# single
need('(dolit)')
@latest.append('(dolit)', token.to_i)
elsif token != '.' && token =~ /^-?[\d\.]+$/
# double
n = token.gsub('.', '').to_i
a = double_number(n)
need('(dolit)')
@latest.append('(dolit)', a[1])
@latest.append('(dolit)', a[0])
elsif token =~ /^\$([\da-fA-F]+)$/
# single hex
need('(dolit)')
@latest.append('(dolit)', $1.to_i(16))
elsif token =~ /^\$\w+\$$/
# symbolic reference
need('(dolit)')
@latest.append('(dolit)', token)
elsif token =~ /^\$([\d\.a-fA-F]+)$/
# double hex
n = $1.gsub('.', '').to_i(16)
a = double_number(n)
need('(dolit)')
@latest.append('(dolit)', a[1])
@latest.append('(dolit)', a[0])
else
# forward reference
forward_reference(t)
@latest.append(t)
end
end
else
# perform operation of dict word
send(w)
end
end
end
def library_path(*lib)
lib.each { |dir| library(dir) }
end
# add a library directory
def library(dir)
idxfile = File.expand_path(LIB_INDEX, dir)
if File.exist?(idxfile)
File.open(idxfile, 'r') do |f|
map = YAML::load(f)
# scan map for macro references
m = []
map.each do |k,v|
if v.is_a?(Array)
# macro found; load immediately
raise "oops" if v[0] != "MACRO"
m << k
_scan(v[1])
end
end
# delete macro from map
m.each do |name|
map.delete(name)
end
@libmap << map
end
else
raise "#{idxfile} not found"
end
end
def libfile(token)
@libmap.each do |map|
file = map[token]
return file if !file.nil?
end
raise "Cannot find #{token} in libraries"
end
def forward_reference(token, force = false)
if (@latest.kind != :code && @semicode == false) || force
@libfwd << canonical(token)
end
end
def resolve_references
if @libmap.length > 0
limit = FWD_LIMIT
while @libfwd.length != 0
# pop off a token
token = @libfwd.pop
if @dict[token].nil?
# not resolved - scan
file = libfile(token)
# $stderr.puts "resolving #{token} via #{file}"
_scan(file)
raise "resolution failed for #{token}" if @dict[token].nil?
raise "forward reference resolution stuck" if limit < 0
limit -= 1
yield(token) if block_given?
end
end
end
end
def lineize(line)
t = canonical(line)
w = DICT[t]
if w.nil?
# only used in assembly, so forward referencing not required
@latest.append(line)
else
# perform operation of dict word
send(w)
end
end
def parse_line(line)
line.chomp!
if @tokenize
tokenize(line)
else
lineize(line)
end
end
def word(delim)
delim=delim[0].chr
l = ''
w = ''
t = ''
# Skip leading delimiter if delimiter is whitespace. Not sure how
# to reconcile this with the requirement for WORD to skip leading
# delimiters and have this work for non-whitespace delimiters.
state = delim =~ /\s/ ? :skip : :collect
start = @to_in
(@to_in...@number_tib).each do |n|
c = @tib[n].chr
@to_in += 1
case state
when :skip
# scan forward while the delimiter is seen
if c == delim
l << c
else
w << c
state = :collect
end
when :collect
# accumulate non-delimiter characters until the delimiter
# is seen again, or the end of the string is reached
if c == delim
raise "huh?" if w.length == 0 && delim =~ /\s/
t << c
break
else
w << c
end
end
end
$stderr.puts "#{start} #{@to_in - 1} #{delim.inspect} #{l.inspect} #{w.inspect} #{t.inspect}" if @verbose
w
end
def name
n = word(BL)
raise "no name" if n.length == 0
n
end
#
# Return the canonical form of name
#
def canonical(name)
if @case_sensitive || ((@code || @semicode) && (name =~ /:::$/))
name
else
name.downcase
end
end
def define(kind)
n = canonical(name)
raise "#{n} redefined by #{@dict[n].file}" if !@dict[n].nil?
@latest = GMPForth::Word.new(n, kind)
file,line = @fstack[-1]
@latest.where(file, line, @tib)
@dict[n] = @latest
@dict_o << @latest
@latest
end
# indicate a word is needed
def need(name, force=false)
if @dict[name].nil?
forward_reference(name, force)
end
end
def words
@dict.sort.each { |k,v| puts v.see }
end
def quote(s)
s.gsub('"','\"')
end
def dot
puts 'digraph fc {'
@dict.each do |name,word|
word.children.each do |child|
puts " \"#{quote(name)}\" -> \"#{quote(child)}\";"
end
end
puts '}'
end
def dep
@dict.each do |name,word|
word.children.each do |child|
puts "#{name} #{child}"
end
end
end
# substitute string metavariables
def metasub(s)
s.gsub(/%(\w+)%/) do |match|
send($1)
end
end
# append a string cell, plus additional dummy cells corresponding to the string length
def append_counted_string(s)
@latest.append(metasub(s))
cell_len = cells(1)
cstr_len = s.length + 1 # counted string
num_pad_cells = (cstr_len / cell_len) - 1
if cstr_len % cell_len != 0
num_pad_cells += 1
end
raise "pad request (#{num_pad_cells}) was negative" if num_pad_cells < 0
num_pad_cells.times { @latest.append('<<dummy>>') }
end
#
# Return a library index data structure for all compiled words
#
def library_index
idx = {}
# create indicies
@dict.each do |name,word|
dir = File.dirname(word.file)
if idx[dir].nil?
idx[dir] = {}
end
idx[dir][name] = File.expand_path(word.file)
end
@macro_file.each do |name,file|
dir = File.dirname(file)
if idx[dir].nil?
idx[dir] = {}
end
idx[dir][name] = [ "MACRO", File.expand_path(file) ]
end
idx
end
#
# Make library index files for all compiled words
#
def make_index
# write out library index
library_index.each do |dir,lib|
idxfile = File.expand_path(LIB_INDEX, dir)
File.open(idxfile, 'w') do |f|
f.puts lib.to_yaml
end
end
end
#
# Make dependency file
#
def dependency(depspec)
depfile, target = depspec.split ','
# append local ruby libs
$LOADED_FEATURES.each do |path|
next if path !~ /gmpforth/
@dependency << path
end
# write out dep file
File.open(depfile, 'w') do |f|
f.write "#{target}: "
@dependency[0..-2].each do |path|
f.puts " #{path} \\"
end
f.puts " #{@dependency[-1]}"
end
end
#
# Temporary file
#
def tempfile(ext='')
tf = Tempfile.new(TMP_BASE+ext)
if @savetemp
# yuk - relies on some internal knowledge
ObjectSpace.undefine_finalizer(tf)
end
tf
end
#
#
# Make documentation template files for all compiled words
#
def make_doc_template(dir)
doc = GMPForth::Documentation.new(dir)
doc.generate(@dict)
end
#
# Set optimizations
#
def optimize(level)
case level
when 0
@optim_combine_next = false
when 1
@optim_combine_next = true
end
end
# Create an "automatic" message for generated files
def automatic(pfx='')
return <<EOF
#{pfx}Automatically generated by #{$0}
#{pfx}
#{pfx}Do not edit as changes may be lost
EOF
end
#
# Make C header
#
def macro_header(file)
base = File.basename(file)
guard = '_'
guard << base.gsub('.','_').upcase
guard << '_'
defs = ""
@macro.sort.each do |k,v|
next if v == false
defs << "#define #{k}"
defs << " #{v}" if v != true
defs << "\n"
end
defs << "#define ROM 1\n" if @rom
File.open(file, 'w') do |f|
f.puts <<EOF
/*
#{base}
#{automatic(' ')}
*/
#ifndef #{guard}
#define #{guard}
#{defs}
#endif /* #{guard} */
EOF
end
end
#
# make Ruby constants
#
def constant_module(file)
base = File.basename(file)
modname = base.sub(/\.rb$/,'').upcase
defs = ""
@macro.sort.each do |k,v|
defs << "#{k} = #{v}\n"
end
defs << "ROM = 1\n" if @rom
File.open(file, 'w') do |f|
f.puts <<EOF
#
# #{base}
#
#{automatic('# ')}
module GMPForth::#{modname}
#{defs}
end
EOF
end
end
#
# make GAS assembly header
#
def asm_header(file)
base = File.basename(file)
defs = ""
@macro.sort.each do |k,v|
next if v == false || v.nil?
defs << " .set #{k}"
defs << ", #{v}" if v != true
defs << "\n"
end
defs << " .set ROM, 1\n" if @rom
File.open(file, 'w') do |f|
f.puts <<EOF
/*
* #{base}
*
#{automatic(' * ').chomp}
*
*/
#{defs}
EOF
end
end
#
# Forth compiler words
#
def f_plus_loop
need('(+loop)')
@latest.append('(+loop)')
@latest.resolve_dest
@latest.resolve_orig
end
def f_dot_quote
need('(s")')
need('type')
@latest.append('(s")')
append_counted_string(word('"'))
@latest.append('type')
end
def f_semicolon
need('exit')
@latest.append('exit')
@latest.done
@state = 0
end
def f_begin
@latest.mark_dest
end
def f_do
need('(do)')
@latest.append('(do)')
@latest.mark_orig
@latest.mark_dest
end
def f_does
if @hosted
need('(;code)')
need('(does,)')
@latest.append('(;code)', '(does,)')
end
@latest.set_does
end
def f_else
need('(branch)')
@latest.append('(branch)')
@latest.mark_orig
@latest.cs_swap
@latest.resolve_orig
end
def f_if
need('(0branch)')
@latest.append('(0branch)')
@latest.mark_orig
end
def f_leave
need('(leave)')
@latest.append('(leave)')
end
def f_question_leave
need('(0branch)')
need('(leave)')
f_if
@latest.append('(leave)')
f_then
end
def f_literal
need('(dolit)')
@latest.append('(dolit)', @pstack.pop)
end
def f_loop
need('(loop)')
@latest.append('(loop)')
@latest.resolve_dest
@latest.resolve_orig
end
def postpone_fwdref(name, defn)
if @postpone_fwd[name].nil?
@postpone_fwd[name] = []
end
# $stderr.puts "#{defn.name} - postpone forward reference to #{name}"
@postpone_fwd[name] << [ defn, defn.param.length + 2 ]
end
def postpone_resolve(defn)
if !@postpone_fwd[defn.name].nil?
if defn.immediate
@postpone_fwd[defn.name].each do |target, offset|
# $stderr.puts "#{defn.name} - resolving postpone forward reference to #{target.name}@#{offset}"
raise "expected compile," if target.param[offset] != 'compile,'
target.param[offset] = 'execute'
end
else
# $stderr.puts "#{defn.name} - not immediate, no resolution needed"
end
@postpone_fwd.delete(defn.name)
end
end
def check_unresolved
# any remaining unresolved postpone forward references must be non-immediate
del = []
@postpone_fwd.each_key { |key| del << key if !@dict[key].immediate }
del.each { |key| @postpone_fwd.delete(key) }
# now what's left is unresolved
if @postpone_fwd.length != 0
fwd = @postpone_fwd.keys.join ' '
raise "unresolved postpone forward references: #{fwd}"
end
false
end
def f_postpone
w = canonical(word(BL))
pw = 'compile,'
defn = @dict[w]
if defn.nil?
# forward reference; resolve when word becomes known
postpone_fwdref(w, @latest)
else
pw = 'execute' if defn.immediate
end
need('(dolit)')
need(w)
need(pw)
@latest.append('(dolit)', w, pw)
end
def f_recurse
@latest.append(@latest.name)
end
def f_repeat
f_again
f_then
end
def f_s_quote
need('(s")')
@latest.append('(s")')
append_counted_string(word('"'))
end
def f_then
@latest.resolve_orig
end
def f_until
need('(0branch)')
@latest.append('(0branch)')
@latest.resolve_dest
end
def f_while
f_if
@latest.cs_swap
end
def f_l_bracket
raise "unsupported"
end
def f_r_bracket
raise "unsupported"
end
def f_again
need('(branch)')
@latest.append('(branch)')
@latest.resolve_dest
end
def f_question_do
need('(?do)')
@latest.append('(?do)')
@latest.mark_orig
@latest.mark_dest
end
def f_bracket_char
need('(dolit)')
w = word(BL)
@latest.append('(dolit)', w[0].ord)
end
def f_bracket_tick_bracket
w = canonical(word(BL))
need('(dolit)')
need(w)
@latest.append('(dolit)', w)
end
def f_vocabulary
define(:vocabulary)
@latest.add_param(0) # word list head
@latest.add_param(0) # voc-link
@latest.add_param(0) # pointer to name counted string
@latest.done
end
def f_forth
if @state == 1
@latest.append('forth')
end
end
def f_only
if @state == 1
@latest.append('only')
end
end
def f_definitions
if @state == 1
@latest.append('definitions')
end
end
def f_user
if @state == 0
raise "stack empty" if @pstack.length == 0
need_user if respond_to? :need_user
define(:user)
uoffset = @pstack.pop
if @macro['USER_LAST'].nil?
@macro['USER_LAST'] = "0"
end
ulast = [ uoffset, @macro['USER_LAST'].to_i ].max
@macro['USER_LAST'] = ulast
@latest.add_param(uoffset)
uname = @latest.name.upcase
# FIXME: need to eventually convert all user variables to assembly
# constants, but this is sufficient for trap.S
if uname =~ /^[A-Z_]+$/
@macro['USER_' + uname] = uoffset
end
@latest.done
else
@latest.append('user')
end
end
def f_constant
if @state == 0
raise "stack empty" if @pstack.length == 0
define(:constant)
@latest.add_param(@pstack.pop)
@latest.done
else
@latest.append('constant')
end
end
def f_create
if @state == 0
# no parameter expected
raise "stack not empty" if @pstack.length != 0
define(:variable)
@latest.done
else
@latest.append('create')
end
end
def f_variable
if @state == 0
define(:variable)
@latest.add_param(0)
@latest.done
else
@latest.append('variable')
end
end
def f_colon
define(:colon)
@state = 1
end
def f_immediate
if @state == 0
@latest.immediate = true
postpone_resolve(@latest)
else
@latest.append('immediate')
end
end
def f_compile_only
if @state == 0
@latest.compile_only = true
else
@latest.append('compile-only')
end
end
def f_parens
word(')')
end
def f_backslash
@to_in = @number_tib
end
def f_code
define(:code)
@state = 1
@code = true
@tokenize = respond_to?(:code_tokenized) ? code_tokenized : true
end
def f_end_code
@state = 0
@code = false
@latest.done
@tokenize = true
@semicode = false
end
def f_semi_code
need('(;code)')
@latest.append('(;code)')
@tokenize = respond_to?(:code_tokenized) ? code_tokenized : true
@semicode = true
end
def f_include_quote
raise "interpret only" if @state != 0
file = word('"')
scan(file)
end
def f_parameter
raise "stack empty" if @pstack.length == 0
raise "interpret only" if @state != 0
# because parameters are replaced at the token processing phase
# they need to be strings
macro(name,@pstack.pop.to_s)
end
def f_feature
raise "stack not empty" if @pstack.length != 0
raise "interpret only" if @state != 0
macro(name,true)
end
def f_case
@latest.mark_case
end
def f_of
need('(of)')
@latest.append('(of)')
f_if
end
def f_endof
f_else
end
def f_endcase
while @latest.resolve_endcase? do
f_then
end
end
#
# Stubs
#
def cells(n)
n
end
# Compiler interface to return an array of single precision numbers
# corresponding to the given double number.
def double_number(d)
[d]
end
# compiler interface to return the target name
def target_name
"none"
end
end
| 20.651056 | 109 | 0.545022 |
7a3aeafde032b47d0370a0496818b0e45a15b403 | 7,016 | require 'spec_helper'
require 'puppet_spec/files'
require 'puppet/pops'
require 'puppet/loaders'
require 'puppet_spec/compiler'
describe 'FileBased module loader' do
include PuppetSpec::Files
let(:static_loader) { Puppet::Pops::Loader::StaticLoader.new() }
let(:loaders) { Puppet::Pops::Loaders.new(Puppet::Node::Environment.create(:testing, [])) }
it 'can load a 4x function API ruby function in global name space' do
module_dir = dir_containing('testmodule', {
'lib' => {
'puppet' => {
'functions' => {
'foo4x.rb' => <<-CODE
Puppet::Functions.create_function(:foo4x) do
def foo4x()
'yay'
end
end
CODE
}
}
}
})
module_loader = Puppet::Pops::Loader::ModuleLoaders.module_loader_from(static_loader, loaders, 'testmodule', module_dir)
function = module_loader.load_typed(typed_name(:function, 'foo4x')).value
expect(function.class.name).to eq('foo4x')
expect(function.is_a?(Puppet::Functions::Function)).to eq(true)
end
it 'can load a 4x function API ruby function in qualified name space' do
module_dir = dir_containing('testmodule', {
'lib' => {
'puppet' => {
'functions' => {
'testmodule' => {
'foo4x.rb' => <<-CODE
Puppet::Functions.create_function('testmodule::foo4x') do
def foo4x()
'yay'
end
end
CODE
}
}
}
}})
module_loader = Puppet::Pops::Loader::ModuleLoaders.module_loader_from(static_loader, loaders, 'testmodule', module_dir)
function = module_loader.load_typed(typed_name(:function, 'testmodule::foo4x')).value
expect(function.class.name).to eq('testmodule::foo4x')
expect(function.is_a?(Puppet::Functions::Function)).to eq(true)
end
it 'system loader has itself as private loader' do
module_loader = loaders.puppet_system_loader
expect(module_loader.private_loader).to be(module_loader)
end
it 'makes parent loader win over entries in child' do
module_dir = dir_containing('testmodule', {
'lib' => { 'puppet' => { 'functions' => { 'testmodule' => {
'foo.rb' => <<-CODE
Puppet::Functions.create_function('testmodule::foo') do
def foo()
'yay'
end
end
CODE
}}}}})
module_loader = Puppet::Pops::Loader::ModuleLoaders.module_loader_from(static_loader, loaders, 'testmodule', module_dir)
module_dir2 = dir_containing('testmodule2', {
'lib' => { 'puppet' => { 'functions' => { 'testmodule2' => {
'foo.rb' => <<-CODE
raise "should not get here"
CODE
}}}}})
module_loader2 = Puppet::Pops::Loader::ModuleLoaders::FileBased.new(module_loader, loaders, 'testmodule2', module_dir2, 'test2')
function = module_loader2.load_typed(typed_name(:function, 'testmodule::foo')).value
expect(function.class.name).to eq('testmodule::foo')
expect(function.is_a?(Puppet::Functions::Function)).to eq(true)
end
context 'loading tasks' do
before(:each) do
Puppet[:tasks] = true
Puppet.push_context(:loaders => loaders)
end
after(:each) { Puppet.pop_context }
it 'can load tasks with multiple files' do
module_dir = dir_containing('testmodule', 'tasks' => {'foo.py' => '', 'foo.json' => '{}'})
module_loader = Puppet::Pops::Loader::ModuleLoaders.module_loader_from(static_loader, loaders, 'testmodule', module_dir)
task = module_loader.load_typed(typed_name(:task, 'testmodule::foo')).value
expect(task.name).to eq('testmodule::foo')
expect(File.basename(task.executable)).to eq('foo.py')
end
it 'can load multiple tasks with multiple files' do
module_dir = dir_containing('testmodule', 'tasks' => {'foo.py' => '', 'foo.json' => '{}', 'foobar.py' => '', 'foobar.json' => '{}'})
module_loader = Puppet::Pops::Loader::ModuleLoaders.module_loader_from(static_loader, loaders, 'testmodule', module_dir)
foo_task = module_loader.load_typed(typed_name(:task, 'testmodule::foo')).value
foobar_task = module_loader.load_typed(typed_name(:task, 'testmodule::foobar')).value
expect(foo_task.name).to eq('testmodule::foo')
expect(File.basename(foo_task.executable)).to eq('foo.py')
expect(foobar_task.name).to eq('testmodule::foobar')
expect(File.basename(foobar_task.executable)).to eq('foobar.py')
end
it "won't load tasks with invalid names" do
module_dir = dir_containing('testmodule', 'tasks' => {'a-b.py' => '', 'foo.tar.gz' => ''})
module_loader = Puppet::Pops::Loader::ModuleLoaders.module_loader_from(static_loader, loaders, 'testmodule', module_dir)
tasks = module_loader.discover(:task)
expect(tasks).to be_empty
expect(module_loader.load_typed(typed_name(:task, 'testmodule::foo'))).to be_nil
end
end
def typed_name(type, name)
Puppet::Pops::Loader::TypedName.new(type, name)
end
context 'module function and class using a module type alias' do
include PuppetSpec::Compiler
let(:modules) do
{
'mod' => {
'functions' => {
'afunc.pp' => <<-PUPPET.unindent
function mod::afunc(Mod::Analias $v) {
notice($v)
}
PUPPET
},
'types' => {
'analias.pp' => <<-PUPPET.unindent
type Mod::Analias = Enum[a,b]
PUPPET
},
'manifests' => {
'init.pp' => <<-PUPPET.unindent
class mod(Mod::Analias $v) {
notify { $v: }
}
PUPPET
}
}
}
end
let(:testing_env) do
{
'testing' => {
'modules' => modules
}
}
end
let(:environments_dir) { Puppet[:environmentpath] }
let(:testing_env_dir) do
dir_contained_in(environments_dir, testing_env)
env_dir = File.join(environments_dir, 'testing')
PuppetSpec::Files.record_tmp(env_dir)
env_dir
end
let(:env) { Puppet::Node::Environment.create(:testing, [File.join(testing_env_dir, 'modules')]) }
let(:node) { Puppet::Node.new('test', :environment => env) }
# The call to mod:afunc will load the function, and as a consequence, make an attempt to load
# the parameter type Mod::Analias. That load in turn, will trigger the Runtime3TypeLoader which
# will load the manifests in Mod. The init.pp manifest also references the Mod::Analias parameter
# which results in a recursive call to the same loader. This test asserts that this recursive
# call is handled OK.
# See PUP-7391 for more info.
it 'should handle a recursive load' do
expect(eval_and_collect_notices("mod::afunc('b')", node)).to eql(['b'])
end
end
end
| 35.08 | 138 | 0.607754 |
288ee4d6ac6fada37277875510115a26f3f50686 | 830 | require File.expand_path("../../Abstract/abstract-php-extension", __FILE__)
class Php55Xdebug < AbstractPhp55Extension
init
homepage 'http://xdebug.org'
url 'http://xdebug.org/files/xdebug-2.2.5.tgz'
sha1 '62d388e07a45cab9eee498e7905c92a7e5d023cc'
head 'https://github.com/xdebug/xdebug.git'
def extension_type; "zend_extension"; end
def install
Dir.chdir "xdebug-#{version}" unless build.head?
ENV.universal_binary if build.universal?
safe_phpize
system "./configure", "--prefix=#{prefix}",
phpconfig,
"--disable-debug",
"--disable-dependency-tracking",
"--enable-xdebug"
system "make"
prefix.install "modules/xdebug.so"
write_config_file if build.with? "config-file"
end
end
| 29.642857 | 75 | 0.631325 |
7a1a6da49900a0fd327186ec10479f050291797e | 712 | module ApexCharts
class BaseChart
attr_reader :options, :series
def initialize data, options={}
@series = sanitize_data(data)
@options = Utils::Hash.deep_merge(
build_options(x_sample, options),
Utils::Hash.camelize_keys(
{**@series, chart: {type: chart_type}}.compact
)
)
end
def render
ApexCharts::Renderer.render_default(options)
end
def chart_type
end
protected
def build_options(x_sample, options)
ApexCharts::OptionsBuilder.new(x_sample, options).build_options
end
def x_sample
series[:series][0][:data][0][:x]
end
end
end
| 20.342857 | 69 | 0.58427 |
e2ac44888e815defb02358706146b6d5ffa7c2dd | 16,359 | # frozen_string_literal: true
require 'active_support/concern'
require 'grape/dsl/headers'
module Grape
module DSL
module InsideRoute
extend ActiveSupport::Concern
include Grape::DSL::Settings
include Grape::DSL::Headers
# Denotes a situation where a DSL method has been invoked in a
# filter which it should not yet be available in
class MethodNotYetAvailable < StandardError; end
# @param type [Symbol] The type of filter for which evaluation has been
# completed
# @return [Module] A module containing method overrides suitable for the
# position in the filter evaluation sequence denoted by +type+. This
# defaults to an empty module if no overrides are defined for the given
# filter +type+.
def self.post_filter_methods(type)
@post_filter_modules ||= { before: PostBeforeFilter }
@post_filter_modules[type]
end
# Methods which should not be available in filters until the before filter
# has completed
module PostBeforeFilter
def declared(passed_params, options = {}, declared_params = nil, params_nested_path = [])
options = options.reverse_merge(include_missing: true, include_parent_namespaces: true)
declared_params ||= optioned_declared_params(**options)
if passed_params.is_a?(Array)
declared_array(passed_params, options, declared_params, params_nested_path)
else
declared_hash(passed_params, options, declared_params, params_nested_path)
end
end
private
def declared_array(passed_params, options, declared_params, params_nested_path)
passed_params.map do |passed_param|
declared(passed_param || {}, options, declared_params, params_nested_path)
end
end
def declared_hash(passed_params, options, declared_params, params_nested_path)
renamed_params = route_setting(:renamed_params) || {}
declared_params.each_with_object(passed_params.class.new) do |declared_param, memo|
if declared_param.is_a?(Hash)
declared_param.each_pair do |declared_parent_param, declared_children_params|
params_nested_path_dup = params_nested_path.dup
params_nested_path_dup << declared_parent_param.to_s
next unless options[:include_missing] || passed_params.key?(declared_parent_param)
rename_path = params_nested_path + [declared_parent_param.to_s]
renamed_param_name = renamed_params[rename_path]
memo_key = optioned_param_key(renamed_param_name || declared_parent_param, options)
passed_children_params = passed_params[declared_parent_param] || passed_params.class.new
memo[memo_key] = handle_passed_param(params_nested_path_dup, passed_children_params.any?) do
declared(passed_children_params, options, declared_children_params, params_nested_path_dup)
end
end
else
# If it is not a Hash then it does not have children.
# Find its value or set it to nil.
next unless options[:include_missing] || passed_params.key?(declared_param)
rename_path = params_nested_path + [declared_param.to_s]
renamed_param_name = renamed_params[rename_path]
memo_key = optioned_param_key(renamed_param_name || declared_param, options)
passed_param = passed_params[declared_param]
params_nested_path_dup = params_nested_path.dup
params_nested_path_dup << declared_param.to_s
memo[memo_key] = passed_param || handle_passed_param(params_nested_path_dup) do
passed_param
end
end
end
end
def handle_passed_param(params_nested_path, has_passed_children = false, &_block)
return yield if has_passed_children
key = params_nested_path[0]
key += "[#{params_nested_path[1..-1].join('][')}]" if params_nested_path.size > 1
route_options_params = options[:route_options][:params] || {}
type = route_options_params.dig(key, :type)
has_children = route_options_params.keys.any? { |k| k != key && k.start_with?(key) }
if type == 'Hash' && !has_children
{}
elsif type == 'Array' || (type&.start_with?('[') && !type&.include?(','))
[]
elsif type == 'Set' || type&.start_with?('#<Set')
Set.new
else
yield
end
end
def optioned_param_key(declared_param, options)
options[:stringify] ? declared_param.to_s : declared_param.to_sym
end
def optioned_declared_params(**options)
declared_params = if options[:include_parent_namespaces]
# Declared params including parent namespaces
route_setting(:declared_params)
else
# Declared params at current namespace
namespace_stackable(:declared_params).last || []
end
raise ArgumentError, 'Tried to filter for declared parameters but none exist.' unless declared_params
declared_params
end
end
# A filtering method that will return a hash
# consisting only of keys that have been declared by a
# `params` statement against the current/target endpoint or parent
# namespaces.
#
# @see +PostBeforeFilter#declared+
#
# @param params [Hash] The initial hash to filter. Usually this will just be `params`
# @param options [Hash] Can pass `:include_missing`, `:stringify` and `:include_parent_namespaces`
# options. `:include_parent_namespaces` defaults to true, hence must be set to false if
# you want only to return params declared against the current/target endpoint.
def declared(*)
raise MethodNotYetAvailable, '#declared is not available prior to parameter validation.'
end
# The API version as specified in the URL.
def version
env[Grape::Env::API_VERSION]
end
def configuration
options[:for].configuration.evaluate
end
# End the request and display an error to the
# end user with the specified message.
#
# @param message [String] The message to display.
# @param status [Integer] the HTTP Status Code. Defaults to default_error_status, 500 if not set.
# @param additional_headers [Hash] Addtional headers for the response.
def error!(message, status = nil, additional_headers = nil)
self.status(status || namespace_inheritable(:default_error_status))
headers = additional_headers.present? ? header.merge(additional_headers) : header
throw :error, message: message, status: self.status, headers: headers
end
# Redirect to a new url.
#
# @param url [String] The url to be redirect.
# @param options [Hash] The options used when redirect.
# :permanent, default false.
# :body, default a short message including the URL.
def redirect(url, permanent: false, body: nil, **_options)
body_message = body
if permanent
status 301
body_message ||= "This resource has been moved permanently to #{url}."
elsif env[Grape::Http::Headers::HTTP_VERSION] == 'HTTP/1.1' && request.request_method.to_s.upcase != Grape::Http::Headers::GET
status 303
body_message ||= "An alternate resource is located at #{url}."
else
status 302
body_message ||= "This resource has been moved temporarily to #{url}."
end
header 'Location', url
content_type 'text/plain'
body body_message
end
# Set or retrieve the HTTP status code.
#
# @param status [Integer] The HTTP Status Code to return for this request.
def status(status = nil)
case status
when Symbol
raise ArgumentError, "Status code :#{status} is invalid." unless Rack::Utils::SYMBOL_TO_STATUS_CODE.key?(status)
@status = Rack::Utils.status_code(status)
when Integer
@status = status
when nil
return @status if instance_variable_defined?(:@status) && @status
case request.request_method.to_s.upcase
when Grape::Http::Headers::POST
201
when Grape::Http::Headers::DELETE
if instance_variable_defined?(:@body) && @body.present?
200
else
204
end
else
200
end
else
raise ArgumentError, 'Status code must be Integer or Symbol.'
end
end
# Set response content-type
def content_type(val = nil)
if val
header(Grape::Http::Headers::CONTENT_TYPE, val)
else
header[Grape::Http::Headers::CONTENT_TYPE]
end
end
# Set or get a cookie
#
# @example
# cookies[:mycookie] = 'mycookie val'
# cookies['mycookie-string'] = 'mycookie string val'
# cookies[:more] = { value: '123', expires: Time.at(0) }
# cookies.delete :more
#
def cookies
@cookies ||= Cookies.new
end
# Allows you to define the response body as something other than the
# return value.
#
# @example
# get '/body' do
# body "Body"
# "Not the Body"
# end
#
# GET /body # => "Body"
def body(value = nil)
if value
@body = value
elsif value == false
@body = ''
status 204
else
instance_variable_defined?(:@body) ? @body : nil
end
end
# Allows you to explicitly return no content.
#
# @example
# delete :id do
# return_no_content
# "not returned"
# end
#
# DELETE /12 # => 204 No Content, ""
def return_no_content
status 204
body false
end
# Deprecated method to send files to the client. Use `sendfile` or `stream`
def file(value = nil)
if value.is_a?(String)
warn '[DEPRECATION] Use sendfile or stream to send files.'
sendfile(value)
elsif !value.is_a?(NilClass)
warn '[DEPRECATION] Use stream to use a Stream object.'
stream(value)
else
warn '[DEPRECATION] Use sendfile or stream to send files.'
sendfile
end
end
# Allows you to send a file to the client via sendfile.
#
# @example
# get '/file' do
# sendfile FileStreamer.new(...)
# end
#
# GET /file # => "contents of file"
def sendfile(value = nil)
if value.is_a?(String)
file_body = Grape::ServeStream::FileBody.new(value)
@stream = Grape::ServeStream::StreamResponse.new(file_body)
elsif !value.is_a?(NilClass)
raise ArgumentError, 'Argument must be a file path'
else
stream
end
end
# Allows you to define the response as a streamable object.
#
# If Content-Length and Transfer-Encoding are blank (among other conditions),
# Rack assumes this response can be streamed in chunks.
#
# @example
# get '/stream' do
# stream FileStreamer.new(...)
# end
#
# GET /stream # => "chunked contents of file"
#
# See:
# * https://github.com/rack/rack/blob/99293fa13d86cd48021630fcc4bd5acc9de5bdc3/lib/rack/chunked.rb
# * https://github.com/rack/rack/blob/99293fa13d86cd48021630fcc4bd5acc9de5bdc3/lib/rack/etag.rb
def stream(value = nil)
return if value.nil? && @stream.nil?
header 'Content-Length', nil
header 'Transfer-Encoding', nil
header 'Cache-Control', 'no-cache' # Skips ETag generation (reading the response up front)
if value.is_a?(String)
file_body = Grape::ServeStream::FileBody.new(value)
@stream = Grape::ServeStream::StreamResponse.new(file_body)
elsif value.respond_to?(:each)
@stream = Grape::ServeStream::StreamResponse.new(value)
elsif !value.is_a?(NilClass)
raise ArgumentError, 'Stream object must respond to :each.'
else
@stream
end
end
# Allows you to make use of Grape Entities by setting
# the response body to the serializable hash of the
# entity provided in the `:with` option. This has the
# added benefit of automatically passing along environment
# and version information to the serialization, making it
# very easy to do conditional exposures. See Entity docs
# for more info.
#
# @example
#
# get '/users/:id' do
# present User.find(params[:id]),
# with: API::Entities::User,
# admin: current_user.admin?
# end
def present(*args)
options = args.count > 1 ? args.extract_options! : {}
key, object = if args.count == 2 && args.first.is_a?(Symbol)
args
else
[nil, args.first]
end
entity_class = entity_class_for_obj(object, options)
root = options.delete(:root)
representation = if entity_class
entity_representation_for(entity_class, object, options)
else
object
end
representation = { root => representation } if root
if key
representation = (body || {}).merge(key => representation)
elsif entity_class.present? && body
raise ArgumentError, "Representation of type #{representation.class} cannot be merged." unless representation.respond_to?(:merge)
representation = body.merge(representation)
end
body representation
end
# Returns route information for the current request.
#
# @example
#
# desc "Returns the route description."
# get '/' do
# route.description
# end
def route
env[Grape::Env::GRAPE_ROUTING_ARGS][:route_info]
end
# Attempt to locate the Entity class for a given object, if not given
# explicitly. This is done by looking for the presence of Klass::Entity,
# where Klass is the class of the `object` parameter, or one of its
# ancestors.
# @param object [Object] the object to locate the Entity class for
# @param options [Hash]
# @option options :with [Class] the explicit entity class to use
# @return [Class] the located Entity class, or nil if none is found
def entity_class_for_obj(object, options)
entity_class = options.delete(:with)
if entity_class.nil?
# entity class not explicitly defined, auto-detect from relation#klass or first object in the collection
object_class = if object.respond_to?(:klass)
object.klass
else
object.respond_to?(:first) ? object.first.class : object.class
end
object_class.ancestors.each do |potential|
entity_class ||= (namespace_stackable_with_hash(:representations) || {})[potential]
end
entity_class ||= object_class.const_get(:Entity) if object_class.const_defined?(:Entity) && object_class.const_get(:Entity).respond_to?(:represent)
end
entity_class
end
# @return the representation of the given object as done through
# the given entity_class.
def entity_representation_for(entity_class, object, options)
embeds = { env: env }
embeds[:version] = env[Grape::Env::API_VERSION] if env[Grape::Env::API_VERSION]
entity_class.represent(object, **embeds.merge(options))
end
end
end
end
| 37.349315 | 157 | 0.604927 |
61f7687a7a3df95fdc6f7217984b344bf3cea345 | 1,749 | # frozen_string_literal: true
require_relative 'boot'
require 'rails'
# Pick the frameworks you want:
require 'active_model/railtie'
require 'active_job/railtie'
require 'active_record/railtie'
# require "active_storage/engine"
require 'action_controller/railtie'
require 'action_mailer/railtie'
# require "action_mailbox/engine"
# require "action_text/engine"
require 'action_view/railtie'
# require "action_cable/engine"
# require "sprockets/railtie"
# require "rails/test_unit/railtie"
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module LaaCourtDataUi
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
config.load_defaults 6.1
# Settings in config/environments/* take precedence over those specified here.
# Application configuration can go into files in config/initializers
# -- all .rb files in that directory are automatically loaded after loading
# the framework and any gems in your application.
# Don't generate system test files.
config.generators.system_tests = nil
# best practice not to autoload or eagerload 'lib'
# https://github.com/rails/rails/issues/13142#issuecomment-29744953
#
# config.autoload_paths << Rails.root.join('lib')
# config.eager_load_paths << Rails.root.join('lib')
config.exceptions_app = routes
config.active_job.queue_adapter = :sidekiq
config.x.support_email_address = '[email protected]'
config.x.display_raw_responses = %w[enabled true].include?(ENV['DISPLAY_RAW_RESPONSES'])
config.action_mailer.deliver_later_queue_name = :mailers
end
end
| 34.98 | 92 | 0.763865 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.