hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
bf2669016f7cc58e5cde0271a14fe7d0b4690dfe | 3,366 | module Bosh::Director
class ProblemScanner::VmScanStage
AGENT_TIMEOUT_IN_SECONDS = 10
attr_reader :agent_disks
def initialize(instance_manager, problem_register, cloud, deployment, event_logger, logger)
@instance_manager = instance_manager
@problem_register = problem_register
@cloud = cloud
@deployment = deployment
@event_logger = event_logger
@logger = logger
@agent_disks = {}
end
def scan(vms=nil)
if vms
vm_list = []
vms.each do |job, index|
instance = @instance_manager.find_by_name(@deployment.name, job, index)
vm_list << instance.vm
end
vms = vm_list
else
vms = Models::Vm.eager(:instance).filter(deployment: @deployment).all
end
@event_logger.begin_stage("Scanning #{vms.size} VMs", 2)
results = Hash.new(0)
lock = Mutex.new
@event_logger.track_and_log('Checking VM states') do
ThreadPool.new(max_threads: Config.max_threads).wrap do |pool|
vms.each do |vm|
pool.process do
scan_result = scan_vm(vm)
lock.synchronize { results[scan_result] += 1 }
end
end
end
end
@event_logger.track_and_log("#{results[:ok]} OK, " +
"#{results[:unresponsive]} unresponsive, " +
"#{results[:missing]} missing, " +
"#{results[:unbound]} unbound")
end
private
def scan_vm(vm)
agent_options = {
timeout: AGENT_TIMEOUT_IN_SECONDS,
retry_methods: {get_state: 0}
}
instance, mounted_disk_cid = @problem_register.get_vm_instance_and_disk(vm)
agent = AgentClient.with_vm(vm, agent_options)
begin
state = agent.get_state
# gather mounted disk info. (used by scan_disk)
begin
disk_list = agent.list_disk
mounted_disk_cid = disk_list.first
rescue Bosh::Director::RpcTimeout
mounted_disk_cid = nil
end
add_disk_owner(mounted_disk_cid, vm.cid) if mounted_disk_cid
return :unbound if is_unbound_instance_vm?(vm, instance, state)
:ok
rescue Bosh::Director::RpcTimeout
# We add the disk to avoid a duplicate problem when timeouts fetching agent status (unresponsive_agent and
# mount_info_mismatch)
add_disk_owner(mounted_disk_cid, vm.cid) if mounted_disk_cid
begin
unless @cloud.has_vm?(vm.cid)
@logger.info("Missing VM #{vm.cid}")
@problem_register.problem_found(:missing_vm, vm)
return :missing
end
rescue Bosh::Clouds::NotImplemented
end
@logger.info("Found unresponsive agent #{vm.agent_id}")
@problem_register.problem_found(:unresponsive_agent, vm)
:unresponsive
end
end
def add_disk_owner(disk_cid, vm_cid)
@agent_disks[disk_cid] ||= []
@agent_disks[disk_cid] << vm_cid
end
def is_unbound_instance_vm?(vm, instance, state)
job = state['job'] ? state['job']['name'] : nil
index = state['index']
if job && !instance
@logger.info("Found unbound VM #{vm.agent_id}")
@problem_register.problem_found(:unbound_instance_vm, vm,
job: job, index: index)
true
else
false
end
end
end
end
| 29.017241 | 114 | 0.615865 |
625c67886ef416ffb752cfda00f80f227ca2603e | 591 | class Tasks::Sources::NewSourceController < ApplicationController
include TaskControllerConfiguration
def index
end
# GET /sources/new_source/crossref_preview.json
def crossref_preview
if citation_param.blank?
render json: :invalid_request
else
@source = TaxonWorks::Vendor::Serrano.new_from_citation(citation: citation_param)
@source ||= Source::Bibtex.new
render '/sources/show'
end
end
protected
def citation_param
begin
params.require(:citation)
rescue ActionController::ParameterMissing
nil
end
end
end
| 20.37931 | 87 | 0.71912 |
085e85e0eb492a184ee500c83f6460cfc1f8ee2a | 370 | # frozen_string_literal: true
module SolidusTailwindFrontend
class Configuration
# Define here the settings for this extension, e.g.:
#
# attr_accessor :my_setting
end
class << self
def configuration
@configuration ||= Configuration.new
end
alias config configuration
def configure
yield configuration
end
end
end
| 16.818182 | 56 | 0.694595 |
7a44156962125b403ccb7ca30794ee59fffbf5cb | 166 | class CreateTags < ActiveRecord::Migration[4.2]
def change
create_table :tags do |t|
t.string :title
t.string :image
t.timestamps
end
end
end
| 16.6 | 47 | 0.662651 |
e9f0366901a861355bdaa94e8acb3ed4f9b9bf8c | 3,506 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::NetApp::Mgmt::V2019_07_01
module Models
#
# NetApp account resource
#
class NetAppAccount
include MsRestAzure
# @return [String] Resource location
attr_accessor :location
# @return [String] Resource Id
attr_accessor :id
# @return [String] Resource name
attr_accessor :name
# @return [String] Resource type
attr_accessor :type
# @return Resource tags
attr_accessor :tags
# @return [String] Azure lifecycle management
attr_accessor :provisioning_state
# @return [Array<ActiveDirectory>] Active Directories
attr_accessor :active_directories
#
# Mapper for NetAppAccount class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'netAppAccount',
type: {
name: 'Composite',
class_name: 'NetAppAccount',
model_properties: {
location: {
client_side_validation: true,
required: true,
serialized_name: 'location',
type: {
name: 'String'
}
},
id: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'id',
type: {
name: 'String'
}
},
name: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'name',
type: {
name: 'String'
}
},
type: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'type',
type: {
name: 'String'
}
},
tags: {
client_side_validation: true,
required: false,
serialized_name: 'tags',
type: {
name: 'Object'
}
},
provisioning_state: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'properties.provisioningState',
type: {
name: 'String'
}
},
active_directories: {
client_side_validation: true,
required: false,
serialized_name: 'properties.activeDirectories',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'ActiveDirectoryElementType',
type: {
name: 'Composite',
class_name: 'ActiveDirectory'
}
}
}
}
}
}
}
end
end
end
end
| 27.825397 | 70 | 0.454079 |
01e85911353980350a757390f4e93450daf479d3 | 402 | # frozen_string_literal: true
FactoryBot.define do
factory :calendar_protocol, class: "Calendars::Protocol" do
transient do
calendars { [] }
end
sequence(:name) { |i| "Protocol #{i}" }
kinds { nil }
community { calendars.first&.community || Defaults.community }
after(:create) do |protocol, evaluator|
protocol.calendars = evaluator.calendars
end
end
end
| 22.333333 | 66 | 0.664179 |
aba92c10b06588bf71c656e0ca0068eb3b1df884 | 1,675 | ENV['RC_ARCHS'] = '' if RUBY_PLATFORM =~ /darwin/
# :stopdoc:
require 'mkmf'
ROOT = File.expand_path(File.join(File.dirname(__FILE__), '..', '..'))
specified_curl = ARGV[0] =~ /^--with-curl/ ? ARGV[0].split("=")[1] : nil
LIBDIR = specified_curl ? "#{specified_curl}/lib": RbConfig::CONFIG['libdir']
INCLUDEDIR = specified_curl ? "#{specified_curl}/include" : RbConfig::CONFIG['includedir']
if defined?(RUBY_ENGINE) && RUBY_ENGINE == 'macruby'
$LIBRUBYARG_STATIC.gsub!(/-static/, '')
end
$CFLAGS << " #{ENV["CFLAGS"]}"
if RbConfig::CONFIG['target_os'] == 'mingw32'
$CFLAGS << " -DXP_WIN -DXP_WIN32 -DUSE_INCLUDED_VASPRINTF"
elsif RbConfig::CONFIG['target_os'] == 'solaris2'
$CFLAGS << " -DUSE_INCLUDED_VASPRINTF"
else
$CFLAGS << " -g -DXP_UNIX"
end
#$LIBPATH.unshift "/opt/local/lib"
#$LIBPATH.unshift "/usr/local/lib"
$CFLAGS << " -O3 -Wall -Wcast-qual -Wwrite-strings -Wconversion -Wmissing-noreturn -Winline"
found = pkg_config("libcurl") && have_header("curl/curl.h")
if RbConfig::CONFIG['target_os'] == 'mingw32'
header = File.join(ROOT, 'cross', 'curl-7.19.4.win32', 'include')
unless find_header('curl/curl.h', header)
abort "need libcurl"
end
elsif !found
HEADER_DIRS = [
INCLUDEDIR,
'/usr/local/include',
'/usr/include'
]
puts HEADER_DIRS.inspect
unless find_header('curl/curl.h', *HEADER_DIRS)
abort "need libcurl"
end
end
if RbConfig::CONFIG['target_os'] == 'mingw32'
find_library('curl', 'curl_easy_init',
File.join(ROOT, 'cross', 'curl-7.19.4.win32', 'bin'))
elsif !found
find_library('curl', 'curl_easy_init', LIBDIR, '/usr/local/lib', '/usr/lib')
end
create_makefile("typhoeus/native")
| 28.87931 | 92 | 0.670448 |
1889ab8bfc49cfe77b0c1b5b94c7387c10ed2b89 | 401 | require 'spec_helper'
describe API do
include Rack::Test::Methods
def app
API
end
describe "get" do
before do
get("/")
end
it "should return Hello world" do
expect(last_response.body).to eq("\"Hello world\"")
end
it "should return json format" do
expect(last_response.content_type).to eq("application/json")
end
end
end
def app
API
end
| 13.827586 | 66 | 0.63591 |
e295d009e37bec4f2acbb07dd7c96059841a4444 | 255 | class CreateMicroposts < ActiveRecord::Migration[5.1]
def change
create_table :microposts do |t|
t.text :content
t.references :user, foreign_key: true
t.timestamps
end
add_index :microposts, [:user_id, :created_at]
end
end
| 21.25 | 53 | 0.690196 |
bb8d0e8796b29973c773b2b1f8bb2aa1a8c81a18 | 1,892 | #https://sandbox-api.brewerydb.com/v2/
class API
BEER_URL = 'https://sandbox-api.brewerydb.com/v2/beers/'
def fetch_data_beer
url = BEER_URL + '/?key=' + API_KEY + "&withBreweries=Y"
uri = URI.parse(url)
body = uri.read
brewery = JSON.parse(body)
beers_hash = []
brewery["data"].each do |e|
if e["style"]
beers_hash << {name: e["name"], abv: e["abv"], description: e["style"]["description"], style: e["style"]['shortName'], brewery: e["breweries"][0]["nameShortDisplay"]}
end
end
beers_hash
end
def fetch_data_brewery
url = BEER_URL + '/?key=' + API_KEY + "&withBreweries=Y"
uri = URI.parse(url)
body = uri.read
brewery = JSON.parse(body)
brewery_hash = []
brewery["data"].each do |e|
website = e["breweries"][0]["website"]
brewery_name = e["breweries"][0]["nameShortDisplay"]
established = e["breweries"][0]["established"]
street_address = e["breweries"][0]["locations"][0]["streetAddress"]
locality = e["breweries"][0]["locations"][0]["locality"]
region = e["breweries"][0]["locations"][0]["region"]
zip = e["breweries"][0]["locations"][0]["postalCode"]
brewery_hash << {name: brewery_name, website: website, established: established, street_address: street_address, locality: locality, region: region, zip: zip}
end
brewery_hash.uniq
end
end
# brewery["data"][0]["name"] = beers name
# brewery["data"][0]["style"]["shortName"] = style
# brewery["data"][0]["style"]["description"] = description
# brewery["data"][0]["breweries"][0]["names"] = brewery name
| 37.098039 | 199 | 0.530655 |
abfa742610f490e9a9b3b60a1d58ba9d3139246d | 229 | class ExceptionTestController < ApplicationController
def index
test = "Test"
test_method
end
def xhr
raise "asda" if request.xhr?
end
def test_method
test2 = "Test2"
raise StandardError
end
end
| 14.3125 | 53 | 0.68559 |
5d944d73c1555796a5ac6cd73b477d17f5d09c24 | 2,126 | # Puma can serve each request in a thread from an internal thread pool.
# The `threads` method setting takes two numbers a minimum and maximum.
# Any libraries that use thread pools should be configured to match
# the maximum value specified for Puma. Default is set to 5 threads for minimum
# and maximum, this matches the default thread size of Active Record.
#
threads_count = ENV.fetch("RAILS_MAX_THREADS") { 5 }.to_i
threads threads_count, threads_count
# Specifies the `port` that Puma will listen on to receive requests, default is 3000.
#
port ENV.fetch("PORT") { 3000 }
# Specifies the `environment` that Puma will run in.
#
environment ENV.fetch("RAILS_ENV") { "development" }
# Specifies the number of `workers` to boot in clustered mode.
# Workers are forked webserver processes. If using threads and workers together
# the concurrency of the application would be max `threads` * `workers`.
# Workers do not work on JRuby or Windows (both of which do not support
# processes).
#
# workers ENV.fetch("WEB_CONCURRENCY") { 2 }
# Use the `preload_app!` method when specifying a `workers` number.
# This directive tells Puma to first boot the application and load code
# before forking the application. This takes advantage of Copy On Write
# process behavior so workers use less memory. If you use this option
# you need to make sure to reconnect any threads in the `on_worker_boot`
# block.
#
preload_app!
# The code in the `on_worker_boot` will be called if you are using
# clustered mode by specifying a number of `workers`. After each worker
# process is booted this block will be run, if you are using `preload_app!`
# option you will want to use this block to reconnect to any threads
# or connections that may have been created at application boot, Ruby
# cannot share connections between processes.
on_worker_boot do
# Worker specific setup for Rails 4.1+
# See: https://devcenter.heroku.com/articles/deploying-rails-applications-with-the-puma-web-server#on-worker-boot
ActiveRecord::Base.establish_connection
end
# Allow puma to be restarted by `rails restart` command.
plugin :tmp_restart
| 43.387755 | 115 | 0.767639 |
6258c6a925297cb65dc67d42f2e0625b4a461654 | 1,202 | class Admin::PlansController < Admin::ApplicationController
before_action :set_conference
before_action :set_plan, only: [:show, :edit, :update, :destroy]
def new
@plan = Plan.new(conference: @conference)
end
def edit
end
def create
@plan = Plan.new(plan_params)
@plan.conference = @conference
respond_to do |format|
if @plan.save
format.html { redirect_to edit_conference_plan_path(@conference, @plan), notice: 'Plan was successfully created.' }
else
format.html { render :new }
end
end
end
def update
respond_to do |format|
if @plan.update(plan_params)
format.html { redirect_to edit_conference_plan_path(@conference, @plan), notice: 'Plan was successfully updated.' }
else
format.html { render :new }
end
end
end
private
def plan_params
params.require(:plan).permit(
:name,
:rank,
:price_text,
:summary,
:capacity,
:number_of_guests,
:booth_size,
:words_limit,
)
end
def set_conference
@conference = Conference.find(params[:conference_id])
end
def set_plan
@plan = Plan.find(params[:id])
end
end
| 20.724138 | 123 | 0.642263 |
ac2605b38fa0cb0e116e02c3f2d92f8d75e9226e | 422 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::RecoveryServicesBackup::Mgmt::V2019_06_15
module Models
#
# Defines values for RetentionScheduleFormat
#
module RetentionScheduleFormat
Invalid = "Invalid"
Daily = "Daily"
Weekly = "Weekly"
end
end
end
| 23.444444 | 70 | 0.706161 |
bf50fdad2d5121b7aeb9a064591013f0070b0ca5 | 603 | Pod::Spec.new do |s|
s.name = 'Nimbus'
s.version = '0.0.8'
s.summary = 'Nimbus is a framework for building cross-platform hybrid applications.'
s.homepage = 'https://github.com/salesforce/nimbus'
s.source = { :git => 'https://github.com/salesforce/nimbus.git', :tag => s.version.to_s }
s.author = { 'Hybrid Platform Team' => '[email protected]' }
s.license = 'BSD-3-Clause'
s.source_files = 'platforms/apple/Sources/Nimbus/**/*.swift'
s.swift_version = '4.2'
s.ios.deployment_target = '11.0'
end
| 43.071429 | 101 | 0.593698 |
e223009c2d6b42fa292782fb12ecc3eacc27b58c | 14,248 | # frozen_string_literal: true
#-------------------------------------------------------------------------
# # Copyright (c) Microsoft and contributors. All rights reserved.
#
# The MIT License(MIT)
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files(the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions :
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#--------------------------------------------------------------------------
require "azure/core/signed_service"
require "azure/storage/common/core"
require "azure/storage/common/service/storage_service_properties"
require "azure/storage/common/service/storage_service_stats"
module Azure::Storage::Common
module Service
# A base class for StorageService implementations
class StorageService < Azure::Core::SignedService
# @!attribute storage_service_host
# @return [Hash] Get or set the storage service host
attr_accessor :storage_service_host
# Create a new instance of the StorageService
#
# @param signer [Azure::Core::Auth::Signer] An implementation of Signer used for signing requests.
# (optional, Default=Azure::Storage::CommonAuth::SharedKey.new)
# @param account_name [String] The account name (optional, Default=Azure::Storage.storage_account_name)
# @param options [Azure::Storage::CommonConfigurable] the client configuration context
def initialize(signer = nil, account_name = nil, options = {}, &block)
StorageService.register_request_callback(&block) if block_given?
client_config = options[:client]
signer = signer || Azure::Storage::Common::Core::Auth::SharedKey.new(
client_config.storage_account_name,
client_config.storage_access_key) if client_config.storage_access_key
signer = signer || Azure::Storage::Common::Core::Auth::SharedAccessSignatureSigner.new(
client_config.storage_account_name,
client_config.storage_sas_token)
@storage_service_host = { primary: "", secondary: "" };
super(signer, account_name, options)
end
def call(method, uri, body = nil, headers = {}, options = {})
super(method, uri, body, StorageService.common_headers(options, body).merge(headers), options)
end
# Public: Get Storage Service properties
#
# See http://msdn.microsoft.com/en-us/library/azure/hh452239
# See http://msdn.microsoft.com/en-us/library/azure/hh452243
#
# ==== Options
#
# * +:timeout+ - Integer. A timeout in seconds.
# * +:request_id+ - String. Provides a client-generated, opaque value with a 1 KB character limit that is recorded
# in the analytics logs when storage analytics logging is enabled.
#
# Returns a Hash with the service properties or nil if the operation failed
def get_service_properties(options = {})
query = {}
StorageService.with_query query, "timeout", options[:timeout].to_s if options[:timeout]
response = call(:get, service_properties_uri(query), nil, {}, options)
Serialization.service_properties_from_xml response.body
end
# Public: Set Storage Service properties
#
# service_properties - An instance of Azure::Storage::CommonService::StorageServiceProperties
#
# See http://msdn.microsoft.com/en-us/library/azure/hh452235
# See http://msdn.microsoft.com/en-us/library/azure/hh452232
#
# ==== Options
#
# * +:timeout+ - Integer. A timeout in seconds.
# * +:request_id+ - String. Provides a client-generated, opaque value with a 1 KB character limit that is recorded
# in the analytics logs when storage analytics logging is enabled.
#
# Returns boolean indicating success.
def set_service_properties(service_properties, options = {})
query = {}
StorageService.with_query query, "timeout", options[:timeout].to_s if options[:timeout]
body = Serialization.service_properties_to_xml service_properties
call(:put, service_properties_uri(query), body, {}, options)
nil
end
# Public: Retrieves statistics related to replication for the service.
# It is only available on the secondary location endpoint when read-access geo-redundant
# replication is enabled for the storage account.
#
# See https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-stats
# See https://docs.microsoft.com/en-us/rest/api/storageservices/get-queue-service-stats
# See https://docs.microsoft.com/en-us/rest/api/storageservices/get-table-service-stats
#
# ==== Options
#
# * +:timeout+ - Integer. A timeout in seconds.
# * +:request_id+ - String. Provides a client-generated, opaque value with a 1 KB character limit that is recorded
# in the analytics logs when storage analytics logging is enabled.
#
# Returns a Hash with the service statistics or nil if the operation failed
def get_service_stats(options = {})
query = {}
StorageService.with_query query, "timeout", options[:timeout].to_s if options[:timeout]
options.update(
location_mode: LocationMode::SECONDARY_ONLY,
request_location_mode: RequestLocationMode::SECONDARY_ONLY)
response = call(:get, service_stats_uri(query, options), nil, {}, options)
Serialization.service_stats_from_xml response.body
end
# Public: Generate the URI for the service properties
#
# * +:query+ - see Azure::Storage::CommonServices::GetServiceProperties#call documentation.
#
# Returns a URI.
def service_properties_uri(query = {})
query.update(restype: "service", comp: "properties")
generate_uri("", query)
end
# Public: Generate the URI for the service statistics
#
# * +:query+ - see Azure::Storage::CommonServices::GetServiceStats#call documentation.
#
# Returns a URI.
def service_stats_uri(query = {}, options = {})
query.update(restype: "service", comp: "stats")
generate_uri("", query, options)
end
# Overrides the base class implementation to determine the request uri
#
# path - String. the request path
# query - Hash. the query parameters
#
# ==== Options
#
# * +:encode+ - bool. Specifies whether to encode the path.
# * +:location_mode+ - LocationMode. Specifies the location mode used to decide
# which location the request should be sent to.
# * +:request_location_mode+ - RequestLocationMode. Specifies the location used to indicate
# which location the operation (REST API) can be performed against.
# This is determined by the API and cannot be specified by the users.
#
# Returns the uri hash
def generate_uri(path = "", query = {}, options = {})
location_mode =
if options[:location_mode].nil?
LocationMode::PRIMARY_ONLY
else
options[:location_mode]
end
request_location_mode =
if options[:request_location_mode].nil?
RequestLocationMode::PRIMARY_ONLY
else
request_location_mode = options[:request_location_mode]
end
location = StorageService.get_location location_mode, request_location_mode
if self.client.is_a?(Azure::Storage::Common::Client) && self.client.options[:use_path_style_uri]
account_path = get_account_path location
path = path.length > 0 ? account_path + "/" + path : account_path
end
@host = location == StorageLocation::PRIMARY ? @storage_service_host[:primary] : @storage_service_host[:secondary]
encode = options[:encode].nil? ? false : options[:encode]
if encode
path = CGI.escape(path.encode("UTF-8"))
# decode the forward slashes to match what the server expects.
path = path.gsub(/%2F/, "/")
# decode the backward slashes to match what the server expects.
path = path.gsub(/%5C/, "/")
# Re-encode the spaces (encoded as space) to the % encoding.
path = path.gsub(/\+/, "%20")
end
@host = storage_service_host[:primary]
options[:primary_uri] = super path, query
@host = storage_service_host[:secondary]
options[:secondary_uri] = super path, query
if location == StorageLocation::PRIMARY
@host = @storage_service_host[:primary]
return options[:primary_uri]
else
@host = @storage_service_host[:secondary]
return options[:secondary_uri]
end
end
# Get account path according to the location settings.
#
# * +:location+ - StorageLocation. Specifies the request location.
#
# Returns the account path
def get_account_path(location)
if location == StorageLocation::PRIMARY
self.client.options[:storage_account_name]
else
self.client.options[:storage_account_name] + "-secondary"
end
end
class << self
# @!attribute user_agent_prefix
# @return [Proc] Get or set the user agent prefix
attr_accessor :user_agent_prefix
# @!attribute request_callback
# @return [Proc] The callback before the request is signed and sent
attr_reader :request_callback
# Registers the callback when sending the request
# The headers in the request can be viewed or changed in the code block
def register_request_callback
@request_callback = Proc.new
end
# Get the request location.
#
# * +:location_mode+ - LocationMode. Specifies the location mode used to decide
# which location the request should be sent to.
# * +:request_location_mode+ - RequestLocationMode. Specifies the location used to indicate
# which location the operation (REST API) can be performed against.
# This is determined by the API and cannot be specified by the users.
#
# Returns the reqeust location
def get_location(location_mode, request_location_mode)
if request_location_mode == RequestLocationMode::PRIMARY_ONLY && location_mode == LocationMode::SECONDARY_ONLY
raise InvalidOptionsError, "This operation can only be executed against the primary storage location."
end
if request_location_mode == RequestLocationMode::SECONDARY_ONLY && location_mode == LocationMode::PRIMARY_ONLY
raise InvalidOptionsError, "This operation can only be executed against the secondary storage location."
end
if request_location_mode == RequestLocationMode::PRIMARY_ONLY
return StorageLocation::PRIMARY
elsif request_location_mode == RequestLocationMode::SECONDARY_ONLY
return StorageLocation::SECONDARY
end
if location_mode == LocationMode::PRIMARY_ONLY || location_mode == LocationMode::PRIMARY_THEN_SECONDARY
StorageLocation::PRIMARY
elsif location_mode == LocationMode::SECONDARY_ONLY || location_mode == LocationMode::SECONDARY_THEN_PRIMARY
StorageLocation::SECONDARY
end
end
# Adds metadata properties to header hash with required prefix
#
# * +:metadata+ - A Hash of metadata name/value pairs
# * +:headers+ - A Hash of HTTP headers
def add_metadata_to_headers(metadata, headers)
if metadata
metadata.each do |key, value|
headers["x-ms-meta-#{key}"] = value
end
end
end
# Adds a value to the Hash object
#
# * +:object+ - A Hash object
# * +:key+ - The key name
# * +:value+ - The value
def with_value(object, key, value)
object[key] = value.to_s if value
end
# Adds a header with the value
#
# * +:headers+ - A Hash of HTTP headers
# * +:name+ - The header name
# * +:value+ - The value
alias with_header with_value
# Adds a query parameter
#
# * +:query+ - A Hash of HTTP query
# * +:name+ - The parameter name
# * +:value+ - The value
alias with_query with_value
# Declares a default hash object for request headers
def common_headers(options = {}, body = nil)
headers = {}
headers.merge!("x-ms-client-request-id" => options[:request_id]) if options[:request_id]
@request_callback.call(headers) if @request_callback
headers
end
end
end
end
end
| 44.111455 | 135 | 0.625842 |
acef5d2e643943ac3262b43f108032f53ae94dde | 22,373 | # frozen_string_literal: true
module Ci
class Pipeline < ActiveRecord::Base
extend Gitlab::Ci::Model
include HasStatus
include Importable
include AfterCommitQueue
include Presentable
include Gitlab::OptimisticLocking
include Gitlab::Utils::StrongMemoize
include AtomicInternalId
include EnumWithNil
include HasRef
belongs_to :project, inverse_of: :all_pipelines
belongs_to :user
belongs_to :auto_canceled_by, class_name: 'Ci::Pipeline'
belongs_to :pipeline_schedule, class_name: 'Ci::PipelineSchedule'
belongs_to :merge_request, class_name: 'MergeRequest'
has_internal_id :iid, scope: :project, presence: false, init: ->(s) do
s&.project&.all_pipelines&.maximum(:iid) || s&.project&.all_pipelines&.count
end
has_many :stages, -> { order(position: :asc) }, inverse_of: :pipeline
has_many :statuses, class_name: 'CommitStatus', foreign_key: :commit_id, inverse_of: :pipeline
has_many :processables, -> { processables },
class_name: 'CommitStatus', foreign_key: :commit_id, inverse_of: :pipeline
has_many :builds, foreign_key: :commit_id, inverse_of: :pipeline
has_many :trigger_requests, dependent: :destroy, foreign_key: :commit_id # rubocop:disable Cop/ActiveRecordDependent
has_many :variables, class_name: 'Ci::PipelineVariable'
has_many :deployments, through: :builds
has_many :environments, -> { distinct }, through: :deployments
# Merge requests for which the current pipeline is running against
# the merge request's latest commit.
has_many :merge_requests, foreign_key: "head_pipeline_id"
has_many :pending_builds, -> { pending }, foreign_key: :commit_id, class_name: 'Ci::Build'
has_many :retryable_builds, -> { latest.failed_or_canceled.includes(:project) }, foreign_key: :commit_id, class_name: 'Ci::Build'
has_many :cancelable_statuses, -> { cancelable }, foreign_key: :commit_id, class_name: 'CommitStatus'
has_many :manual_actions, -> { latest.manual_actions.includes(:project) }, foreign_key: :commit_id, class_name: 'Ci::Build'
has_many :scheduled_actions, -> { latest.scheduled_actions.includes(:project) }, foreign_key: :commit_id, class_name: 'Ci::Build'
has_many :artifacts, -> { latest.with_artifacts_not_expired.includes(:project) }, foreign_key: :commit_id, class_name: 'Ci::Build'
has_many :auto_canceled_pipelines, class_name: 'Ci::Pipeline', foreign_key: 'auto_canceled_by_id'
has_many :auto_canceled_jobs, class_name: 'CommitStatus', foreign_key: 'auto_canceled_by_id'
accepts_nested_attributes_for :variables, reject_if: :persisted?
delegate :id, to: :project, prefix: true
delegate :full_path, to: :project, prefix: true
validates :sha, presence: { unless: :importing? }
validates :ref, presence: { unless: :importing? }
validates :merge_request, presence: { if: :merge_request? }
validates :merge_request, absence: { unless: :merge_request? }
validates :tag, inclusion: { in: [false], if: :merge_request? }
validates :status, presence: { unless: :importing? }
validate :valid_commit_sha, unless: :importing?
validates :source, exclusion: { in: %w(unknown), unless: :importing? }, on: :create
after_create :keep_around_commits, unless: :importing?
# We use `Ci::PipelineEnums.sources` here so that EE can more easily extend
# this `Hash` with new values.
enum_with_nil source: ::Ci::PipelineEnums.sources
enum_with_nil config_source: ::Ci::PipelineEnums.config_sources
# We use `Ci::PipelineEnums.failure_reasons` here so that EE can more easily
# extend this `Hash` with new values.
enum failure_reason: ::Ci::PipelineEnums.failure_reasons
state_machine :status, initial: :created do
event :enqueue do
transition [:created, :skipped, :scheduled] => :pending
transition [:success, :failed, :canceled] => :running
end
event :run do
transition any - [:running] => :running
end
event :skip do
transition any - [:skipped] => :skipped
end
event :drop do
transition any - [:failed] => :failed
end
event :succeed do
transition any - [:success] => :success
end
event :cancel do
transition any - [:canceled] => :canceled
end
event :block do
transition any - [:manual] => :manual
end
event :delay do
transition any - [:scheduled] => :scheduled
end
# IMPORTANT
# Do not add any operations to this state_machine
# Create a separate worker for each new operation
before_transition [:created, :pending] => :running do |pipeline|
pipeline.started_at = Time.now
end
before_transition any => [:success, :failed, :canceled] do |pipeline|
pipeline.finished_at = Time.now
pipeline.update_duration
end
before_transition any => [:manual] do |pipeline|
pipeline.update_duration
end
before_transition canceled: any - [:canceled] do |pipeline|
pipeline.auto_canceled_by = nil
end
before_transition any => :failed do |pipeline, transition|
transition.args.first.try do |reason|
pipeline.failure_reason = reason
end
end
after_transition [:created, :pending] => :running do |pipeline|
pipeline.run_after_commit { PipelineMetricsWorker.perform_async(pipeline.id) }
end
after_transition any => [:success] do |pipeline|
pipeline.run_after_commit { PipelineMetricsWorker.perform_async(pipeline.id) }
end
after_transition [:created, :pending, :running] => :success do |pipeline|
pipeline.run_after_commit { PipelineSuccessWorker.perform_async(pipeline.id) }
end
after_transition do |pipeline, transition|
next if transition.loopback?
pipeline.run_after_commit do
PipelineHooksWorker.perform_async(pipeline.id)
ExpirePipelineCacheWorker.perform_async(pipeline.id)
end
end
after_transition any => [:success, :failed] do |pipeline|
pipeline.run_after_commit do
PipelineNotificationWorker.perform_async(pipeline.id)
end
end
after_transition any => [:failed] do |pipeline|
next unless pipeline.auto_devops_source?
pipeline.run_after_commit { AutoDevops::DisableWorker.perform_async(pipeline.id) }
end
end
scope :internal, -> { where(source: internal_sources) }
scope :ci_sources, -> { where(config_source: ci_sources_values) }
scope :sort_by_merge_request_pipelines, -> do
sql = 'CASE ci_pipelines.source WHEN (?) THEN 0 ELSE 1 END, ci_pipelines.id DESC'
query = ActiveRecord::Base.send(:sanitize_sql_array, [sql, sources[:merge_request]]) # rubocop:disable GitlabSecurity/PublicSend
order(query)
end
scope :for_user, -> (user) { where(user: user) }
scope :for_merge_request, -> (merge_request, ref, sha) do
##
# We have to filter out unrelated MR pipelines.
# When merge request is empty, it selects general pipelines, such as push sourced pipelines.
# When merge request is matched, it selects MR pipelines.
where(merge_request: [nil, merge_request], ref: ref, sha: sha)
.sort_by_merge_request_pipelines
end
# Returns the pipelines in descending order (= newest first), optionally
# limited to a number of references.
#
# ref - The name (or names) of the branch(es)/tag(s) to limit the list of
# pipelines to.
# limit - This limits a backlog search, default to 100.
def self.newest_first(ref: nil, limit: 100)
relation = order(id: :desc)
relation = relation.where(ref: ref) if ref
if limit
ids = relation.limit(limit).select(:id)
# MySQL does not support limit in subquery
ids = ids.pluck(:id) if Gitlab::Database.mysql?
relation = relation.where(id: ids)
end
relation
end
def self.latest_status(ref = nil)
newest_first(ref: ref).pluck(:status).first
end
def self.latest_successful_for(ref)
newest_first(ref: ref).success.take
end
def self.latest_successful_for_refs(refs)
relation = newest_first(ref: refs).success
relation.each_with_object({}) do |pipeline, hash|
hash[pipeline.ref] ||= pipeline
end
end
# Returns a Hash containing the latest pipeline status for every given
# commit.
#
# The keys of this Hash are the commit SHAs, the values the statuses.
#
# commits - The list of commit SHAs to get the status for.
# ref - The ref to scope the data to (e.g. "master"). If the ref is not
# given we simply get the latest status for the commits, regardless
# of what refs their pipelines belong to.
def self.latest_status_per_commit(commits, ref = nil)
p1 = arel_table
p2 = arel_table.alias
# This LEFT JOIN will filter out all but the newest row for every
# combination of (project_id, sha) or (project_id, sha, ref) if a ref is
# given.
cond = p1[:sha].eq(p2[:sha])
.and(p1[:project_id].eq(p2[:project_id]))
.and(p1[:id].lt(p2[:id]))
cond = cond.and(p1[:ref].eq(p2[:ref])) if ref
join = p1.join(p2, Arel::Nodes::OuterJoin).on(cond)
relation = select(:sha, :status)
.where(sha: commits)
.where(p2[:id].eq(nil))
.joins(join.join_sources)
relation = relation.where(ref: ref) if ref
relation.each_with_object({}) do |row, hash|
hash[row[:sha]] = row[:status]
end
end
def self.latest_successful_ids_per_project
success.group(:project_id).select('max(id) as id')
end
def self.truncate_sha(sha)
sha[0...8]
end
def self.total_duration
where.not(duration: nil).sum(:duration)
end
def self.internal_sources
sources.reject { |source| source == "external" }.values
end
def self.latest_for_merge_request(merge_request, ref, sha)
for_merge_request(merge_request, ref, sha).first
end
def self.ci_sources_values
config_sources.values_at(:repository_source, :auto_devops_source, :unknown_source)
end
def stages_count
statuses.select(:stage).distinct.count
end
def total_size
statuses.count(:id)
end
def stages_names
statuses.order(:stage_idx).distinct
.pluck(:stage, :stage_idx).map(&:first)
end
def legacy_stage(name)
stage = Ci::LegacyStage.new(self, name: name)
stage unless stage.statuses_count.zero?
end
def ref_exists?
project.repository.ref_exists?(git_ref)
rescue Gitlab::Git::Repository::NoRepository
false
end
##
# TODO We do not completely switch to persisted stages because of
# race conditions with setting statuses gitlab-ce#23257.
#
def ordered_stages
return legacy_stages unless complete?
if Feature.enabled?('ci_pipeline_persisted_stages')
stages
else
legacy_stages
end
end
def legacy_stages
# TODO, this needs refactoring, see gitlab-ce#26481.
stages_query = statuses
.group('stage').select(:stage).order('max(stage_idx)')
status_sql = statuses.latest.where('stage=sg.stage').status_sql
warnings_sql = statuses.latest.select('COUNT(*)')
.where('stage=sg.stage').failed_but_allowed.to_sql
stages_with_statuses = CommitStatus.from(stages_query, :sg)
.pluck('sg.stage', status_sql, "(#{warnings_sql})")
stages_with_statuses.map do |stage|
Ci::LegacyStage.new(self, Hash[%i[name status warnings].zip(stage)])
end
end
def valid_commit_sha
if self.sha == Gitlab::Git::BLANK_SHA
self.errors.add(:sha, " cant be 00000000 (branch removal)")
end
end
def git_author_name
strong_memoize(:git_author_name) do
commit.try(:author_name)
end
end
def git_author_email
strong_memoize(:git_author_email) do
commit.try(:author_email)
end
end
def git_commit_message
strong_memoize(:git_commit_message) do
commit.try(:message)
end
end
def git_commit_title
strong_memoize(:git_commit_title) do
commit.try(:title)
end
end
def git_commit_full_title
strong_memoize(:git_commit_full_title) do
commit.try(:full_title)
end
end
def git_commit_description
strong_memoize(:git_commit_description) do
commit.try(:description)
end
end
def short_sha
Ci::Pipeline.truncate_sha(sha)
end
# NOTE: This is loaded lazily and will never be nil, even if the commit
# cannot be found.
#
# Use constructs like: `pipeline.commit.present?`
def commit
@commit ||= Commit.lazy(project, sha)
end
def branch?
super && !merge_request?
end
def stuck?
pending_builds.any?(&:stuck?)
end
def retryable?
retryable_builds.any?
end
def cancelable?
cancelable_statuses.any?
end
def auto_canceled?
canceled? && auto_canceled_by_id?
end
def cancel_running
retry_optimistic_lock(cancelable_statuses) do |cancelable|
cancelable.find_each do |job|
yield(job) if block_given?
job.cancel
end
end
end
def auto_cancel_running(pipeline)
update(auto_canceled_by: pipeline)
cancel_running do |job|
job.auto_canceled_by = pipeline
end
end
# rubocop: disable CodeReuse/ServiceClass
def retry_failed(current_user)
Ci::RetryPipelineService.new(project, current_user)
.execute(self)
end
# rubocop: enable CodeReuse/ServiceClass
def mark_as_processable_after_stage(stage_idx)
builds.skipped.after_stage(stage_idx).find_each(&:process)
end
def latest?
return false unless ref && commit.present?
project.commit(ref) == commit
end
def retried
@retried ||= (statuses.order(id: :desc) - statuses.latest)
end
def coverage
coverage_array = statuses.latest.map(&:coverage).compact
if coverage_array.size >= 1
'%.2f' % (coverage_array.reduce(:+) / coverage_array.size)
end
end
def stage_seeds
return [] unless config_processor
strong_memoize(:stage_seeds) do
seeds = config_processor.stages_attributes.map do |attributes|
Gitlab::Ci::Pipeline::Seed::Stage.new(self, attributes)
end
seeds.select(&:included?)
end
end
def seeds_size
stage_seeds.sum(&:size)
end
def has_kubernetes_active?
project.deployment_platform&.active?
end
def has_warnings?
number_of_warnings.positive?
end
def number_of_warnings
BatchLoader.for(id).batch(default_value: 0) do |pipeline_ids, loader|
::Ci::Build.where(commit_id: pipeline_ids)
.latest
.failed_but_allowed
.group(:commit_id)
.count
.each { |id, amount| loader.call(id, amount) }
end
end
def set_config_source
if ci_yaml_from_repo
self.config_source = :repository_source
elsif implied_ci_yaml_file
self.config_source = :auto_devops_source
end
end
##
# TODO, setting yaml_errors should be moved to the pipeline creation chain.
#
def config_processor
return unless ci_yaml_file
return @config_processor if defined?(@config_processor)
@config_processor ||= begin
::Gitlab::Ci::YamlProcessor.new(ci_yaml_file, { project: project, sha: sha, user: user })
rescue Gitlab::Ci::YamlProcessor::ValidationError => e
self.yaml_errors = e.message
nil
rescue
self.yaml_errors = 'Undefined error'
nil
end
end
def ci_yaml_file_path
return unless repository_source? || unknown_source?
if project.ci_config_path.blank?
'.gitlab-ci.yml'
else
project.ci_config_path
end
end
def ci_yaml_file
return @ci_yaml_file if defined?(@ci_yaml_file)
@ci_yaml_file =
if auto_devops_source?
implied_ci_yaml_file
else
ci_yaml_from_repo
end
if @ci_yaml_file
@ci_yaml_file
else
self.yaml_errors = "Failed to load CI/CD config file for #{sha}"
nil
end
end
def has_yaml_errors?
yaml_errors.present?
end
# Manually set the notes for a Ci::Pipeline
# There is no ActiveRecord relation between Ci::Pipeline and notes
# as they are related to a commit sha. This method helps importing
# them using the +Gitlab::ImportExport::RelationFactory+ class.
def notes=(notes)
notes.each do |note|
note[:id] = nil
note[:commit_id] = sha
note[:noteable_id] = self['id']
note.save!
end
end
def notes
project.notes.for_commit_id(sha)
end
# rubocop: disable CodeReuse/ServiceClass
def process!
Ci::ProcessPipelineService.new(project, user).execute(self)
end
# rubocop: enable CodeReuse/ServiceClass
def update_status
retry_optimistic_lock(self) do
case latest_builds_status.to_s
when 'created' then nil
when 'pending' then enqueue
when 'running' then run
when 'success' then succeed
when 'failed' then drop
when 'canceled' then cancel
when 'skipped' then skip
when 'manual' then block
when 'scheduled' then delay
else
raise HasStatus::UnknownStatusError,
"Unknown status `#{latest_builds_status}`"
end
end
end
def protected_ref?
strong_memoize(:protected_ref) { project.protected_for?(git_ref) }
end
def legacy_trigger
strong_memoize(:legacy_trigger) { trigger_requests.first }
end
def persisted_variables
Gitlab::Ci::Variables::Collection.new.tap do |variables|
break variables unless persisted?
variables.append(key: 'CI_PIPELINE_ID', value: id.to_s)
variables.append(key: 'CI_PIPELINE_URL', value: Gitlab::Routing.url_helpers.project_pipeline_url(project, self))
end
end
def predefined_variables
Gitlab::Ci::Variables::Collection.new.tap do |variables|
variables.append(key: 'CI_PIPELINE_IID', value: iid.to_s)
variables.append(key: 'CI_CONFIG_PATH', value: ci_yaml_file_path)
variables.append(key: 'CI_PIPELINE_SOURCE', value: source.to_s)
variables.append(key: 'CI_COMMIT_MESSAGE', value: git_commit_message.to_s)
variables.append(key: 'CI_COMMIT_TITLE', value: git_commit_full_title.to_s)
variables.append(key: 'CI_COMMIT_DESCRIPTION', value: git_commit_description.to_s)
if merge_request? && merge_request
variables.concat(merge_request.predefined_variables)
end
end
end
def queued_duration
return unless started_at
seconds = (started_at - created_at).to_i
seconds unless seconds.zero?
end
def update_duration
return unless started_at
self.duration = Gitlab::Ci::Pipeline::Duration.from_pipeline(self)
end
def execute_hooks
data = pipeline_data
project.execute_hooks(data, :pipeline_hooks)
project.execute_services(data, :pipeline_hooks)
end
# All the merge requests for which the current pipeline runs/ran against
def all_merge_requests
@all_merge_requests ||=
if merge_request?
project.merge_requests.where(id: merge_request_id)
else
project.merge_requests.where(source_branch: ref)
end
end
def detailed_status(current_user)
Gitlab::Ci::Status::Pipeline::Factory
.new(self, current_user)
.fabricate!
end
def latest_builds_with_artifacts
# We purposely cast the builds to an Array here. Because we always use the
# rows if there are more than 0 this prevents us from having to run two
# queries: one to get the count and one to get the rows.
@latest_builds_with_artifacts ||= builds.latest.with_artifacts_archive.to_a
end
def has_test_reports?
complete? && builds.latest.with_test_reports.any?
end
def test_reports
Gitlab::Ci::Reports::TestReports.new.tap do |test_reports|
builds.latest.with_test_reports.each do |build|
build.collect_test_reports!(test_reports)
end
end
end
def branch_updated?
strong_memoize(:branch_updated) do
push_details.branch_updated?
end
end
def modified_paths
strong_memoize(:modified_paths) do
push_details.modified_paths
end
end
def default_branch?
ref == project.default_branch
end
private
def ci_yaml_from_repo
return unless project
return unless sha
return unless ci_yaml_file_path
project.repository.gitlab_ci_yml_for(sha, ci_yaml_file_path)
rescue GRPC::NotFound, GRPC::Internal
nil
end
def implied_ci_yaml_file
return unless project
if project.auto_devops_enabled?
Gitlab::Template::GitlabCiYmlTemplate.find('Auto-DevOps').content
end
end
def pipeline_data
Gitlab::DataBuilder::Pipeline.build(self)
end
def push_details
strong_memoize(:push_details) do
Gitlab::Git::Push.new(project, before_sha, sha, git_ref)
end
end
def git_ref
if merge_request?
##
# In the future, we're going to change this ref to
# merge request's merged reference, such as "refs/merge-requests/:iid/merge".
# In order to do that, we have to update GitLab-Runner's source pulling
# logic.
# See https://gitlab.com/gitlab-org/gitlab-runner/merge_requests/1092
Gitlab::Git::BRANCH_REF_PREFIX + ref.to_s
else
super
end
end
def latest_builds_status
return 'failed' unless yaml_errors.blank?
statuses.latest.status || 'skipped'
end
def keep_around_commits
return unless project
project.repository.keep_around(self.sha, self.before_sha)
end
end
end
| 29.554822 | 134 | 0.662495 |
87e39cec5bf6035f237ba470ca62a550b9dd5943 | 366 | module ActionView::Helpers
module ChartrHelpers
# easy way to include Chartr assets
def chartr_includes
return "<!--[if IE]>\n" +
javascript_include_tag('chartr/excanvas.js', 'chartr/base64.js') +
"\n<![endif]-->" +
javascript_include_tag("chartr/canvas2image.js", "chartr/canvastext.js", "chartr/flotr.js")
end
end
end
| 30.5 | 99 | 0.653005 |
1deaee0dd35a54b7d0a5982b5c4514230aee486c | 4,593 | require './lib/feed_into'
require 'active_support/core_ext/hash/indifferent_access'
channel = {
name: :blockchain,
sym: :web,
options: {},
regexs: [ [ /https:\/\/raw.githubusercontent.com/ ] ],
download: :general,
mining: :rss_one,
pre: [],
transform: nil,
post: [ :pre_titles ]
}
puts 'CHANNEL:'
feed = FeedInto::Single.new(
options: { channels: [ channel ] }
)
feeds = FeedInto::Group.new(
single: { channels: [ channel ] }
)
tests = {
single: {
string_error: 'test',
string: 'https://raw.githubusercontent.com/a6b8/a6b8/main/docs/feed-into-for-ruby/readme/examples/nft.xml',
cmd_incomplete: {
name: 'test',
url: 'https://raw.githubusercontent.com/a6b8/a6b8/main/docs/feed-into-for-ruby/readme/examples/nft.xml'
},
cmd_complete: {
name: 'test',
url: 'https://raw.githubusercontent.com/a6b8/a6b8/main/docs/feed-into-for-ruby/readme/examples/nft.xml',
category: :crypto
},
cmd_error: {
name: 'test',
url: '//raw.githubusercontent.com/a6b8/a6b8/main/docs/feed-into-for-ruby/readme/examples/nft.xml',
category: :crypto
}
},
group: {
string: [
'https://raw.githubusercontent.com/a6b8/a6b8/main/docs/feed-into-for-ruby/readme/examples/nft.xml',
'https://raw.githubusercontent.com/a6b8/a6b8/main/docs/feed-into-for-ruby/readme/examples/crypto.xml'
],
string_error: [
'https://raw.githubusercontent.com/a6b8/a6b8/main/docs/feed-into-for-ruby/readme/examples/nft.xml',
'//raw.githubusercontent.com/a6b8/a6b8/main/docs/feed-into-for-ruby/readme/examples/crypto.xml'
],
cmds_incomplete: [
{
name: 'test',
url: 'https://raw.githubusercontent.com/a6b8/a6b8/main/docs/feed-into-for-ruby/readme/examples/nft.xml'
},
{
url: 'https://raw.githubusercontent.com/a6b8/a6b8/main/docs/feed-into-for-ruby/readme/examples/crypto.xml',
category: :crypto
}
],
cmds_complete: [
{
name: 'nft',
url: 'https://raw.githubusercontent.com/a6b8/a6b8/main/docs/feed-into-for-ruby/readme/examples/nft.xml',
category: :crypto
},
{
name: 'crypto',
url: 'https://raw.githubusercontent.com/a6b8/a6b8/main/docs/feed-into-for-ruby/readme/examples/crypto.xml',
category: :crypto
}
],
cmds_error: [
{
name: 'nft',
url: 'ht://raw.githubusercontent.com/a6b8/a6b8/main/docs/feed-into-for-ruby/readme/examples/nft.xml',
category: :crypto
},
{
name: 'crypto',
url: '://raw.githubusercontent.com/a6b8/a6b8/main/docs/feed-into-for-ruby/readme/examples/crypto.xml',
category: :crypto
}
]
}
}
res = feed.analyse( item: tests[:single][:string_error] )
puts "- single-error:\t\t\t#{!res[:success]}"
res = feed.analyse( item: tests[:single][:string] )
puts "- single:\t\t\t#{res[:success]}"
res = feed.analyse( item: tests[:single][:cmd_incomplete] )[:result][:items][ 0 ][:title].class.to_s.eql? 'String'
puts "- single incomplete:\t\t#{res}"
res = feed.analyse( item: tests[:single][:cmd_complete] )[:result][:items][ 0 ][:title].class.to_s.eql? 'String'
puts "- single cmd complete:\t\t#{res}"
res = feed.analyse( item: tests[:single][:cmd_error] )
puts "- single cmd error:\t\t#{!res[:success]}"
res = feeds
.analyse( items: tests[:group][:string], silent: true )
.to_h()[:unknown][ 0 ][:result][:items][ 0 ][:title].class.eql? String
puts "- group string:\t\t\t#{res}"
res = feeds
.analyse( items: tests[:group][:string], silent: true )
.merge
.to_h()[:unknown].length == 40
puts "- group string error:\t\t#{res}"
res = feeds
.analyse( items: tests[:group][:cmds_incomplete], silent: true )
.merge
.to_h().keys.length == 2 # [:unknown].length == 40
puts "- group cmds incomplete:\t#{res}"
res = feeds
.analyse( items: tests[:group][:cmds_complete], silent: true )
.merge
.to_h()[:crypto].length == 40
puts "- group cmds complete:\t\t#{res}"
res = feeds
.analyse( items: tests[:group][:cmds_error], silent: true )
.merge
.to_h()[:crypto].length == 0
puts "- group cmds error:\t\t#{res}" | 33.772059 | 123 | 0.573264 |
21f784a1aecfdb8cbcdb4e66b0e67f4e7d4e2936 | 654 | cask 'touchosc-editor' do
version '1.7.0'
sha256 'c23baa9eb9f3c15bb71f83f713a6b2e9aa01cc0ffe0acc15f23a6a8042acb766'
url "http://hexler.net/pub/touchosc/touchosc-editor-#{version}-osx.zip"
name 'TouchOSC Editor'
homepage 'http://hexler.net/software/touchosc'
license :unknown # TODO: change license and remove this comment; ':unknown' is a machine-generated placeholder
app "touchosc-editor-#{version}-osx/TouchOSC Editor.app"
artifact "touchosc-editor-#{version}-osx/layouts", :target => Pathname.new(File.expand_path('~')).join('Library/Application Support/TouchOSCEditor/layouts')
zap :delete => '~/Library/*/*TouchOSCEditor*'
end
| 43.6 | 158 | 0.756881 |
87e7b56d3d5c25323d34ac3c26e06fa58702bba4 | 1,745 | require 'user_agent'
describe "UserAgent: 'Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)'" do
before do
@useragent = UserAgent.parse("Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)")
end
it { expect(@useragent).to be_bot }
end
describe "UserAgent: 'Mozilla/5.0 (compatible; bingbot/2.0; +http://www.bing.com/bingbot.htm)'" do
before do
@useragent = UserAgent.parse("Mozilla/5.0 (compatible; bingbot/2.0; +http://www.bing.com/bingbot.htm)")
end
it { expect(@useragent).to be_bot }
end
describe "UserAgent: 'Twitterbot/1.0'" do
before do
@useragent = UserAgent.parse("Twitterbot/1.0")
end
it { expect(@useragent).to be_bot}
end
describe "UserAgent: Mozilla/5.0 (iPhone; CPU iPhone OS 8_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Version/8.0 Mobile/12B411 Safari/600.1.4 (compatible; YandexMobileBot/3.0; +http://yandex.com/bots)" do
before do
@useragent = UserAgent.parse("Mozilla/5.0 (iPhone; CPU iPhone OS 8_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Version/8.0 Mobile/12B411 Safari/600.1.4 (compatible; YandexMobileBot/3.0; +http://yandex.com/bots)")
end
it { expect(@useragent).to be_bot}
end
describe "UserAgent: Mozilla/5.0 (Linux; Android 6.0.1; Nexus 5X Build/MMB29P) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.96 Mobile Safari/537.36 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)" do
before do
@useragent = UserAgent.parse("Mozilla/5.0 (Linux; Android 6.0.1; Nexus 5X Build/MMB29P) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.96 Mobile Safari/537.36 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)")
end
it { expect(@useragent).to be_bot}
end
| 41.547619 | 234 | 0.711748 |
61fbef0317db942f5ac5a8ccafa9a60da3ad4906 | 472 | require "json"
class RubySnake < Sinatra::Base
def initialize(*)
@@moves = Hash.new(0)
super
end
def move(id)
@@moves[id] = (@@moves[id] + 1) % 4
%w(up left down right)[@@moves[id]]
end
post "/*/start" do
{
name: name(params),
color: "#123123"
}.to_json
end
post "/*/move" do
{move: move(id(params))}.to_json
end
def id(params)
params['splat']
end
def name(params)
id(params).join("-")
end
end
| 14.30303 | 39 | 0.548729 |
1abc2aec9ba80d344d166963c316fcd7abae2ad9 | 519 | require 'rails_helper'
RSpec.feature 'User login', type: :feature do
background do
login_user
visit root_path
end
scenario 'Can see welcome message' do
expect(page).to have_content "Welcome to the Recipe App, #{@user.name}"
end
scenario 'Can see Logout button' do
expect(page).to have_content 'Logout'
end
scenario 'Can see recipes button' do
expect(page).to have_content 'My recipes'
end
scenario 'Can see food button' do
expect(page).to have_content 'My food'
end
end
| 20.76 | 75 | 0.705202 |
e92a76cce9345b87b1c69e1da76c5e655ecbd9aa | 85 | # desc "Explaining what the task does"
# task :carload do
# # Task goes here
# end
| 17 | 38 | 0.670588 |
d57f6ce4f3fe3fca7f3883edc519bfb08192a38b | 234 | class CreateSubscriber < SpreeExtension::Migration[4.2]
def change
create_table :spree_chimpy_subscribers do |t|
t.string :email, null: false
t.boolean :subscribed, default: true
t.timestamps
end
end
end
| 23.4 | 55 | 0.700855 |
9122bfaa0fc5aff5b8a85fe787d4ae8f717739d9 | 8,748 | # -*- encoding: utf-8 -*-
shared_examples_for 'cnab240' do
let(:pagamento) do
Brcobranca::Remessa::Pagamento.new(valor: 199.9,
data_vencimento: Date.today,
nosso_numero: 123,
documento_sacado: '12345678901',
nome_sacado: 'PABLO DIEGO JOSÉ FRANCISCO DE PAULA JUAN NEPOMUCENO MARÍA DE LOS REMEDIOS CIPRIANO DE LA SANTÍSSIMA TRINIDAD RUIZ Y PICASSO',
endereco_sacado: 'RUA RIO GRANDE DO SUL São paulo Minas caçapa da silva junior',
bairro_sacado: 'São josé dos quatro apostolos magros',
cep_sacado: '12345678',
cidade_sacado: 'Santa rita de cássia maria da silva',
uf_sacado: 'SP',
valor_iof: 9.9,
valor_abatimento: 24.35,
documento_avalista: '12345678901',
nome_avalista: 'ISABEL CRISTINA LEOPOLDINA ALGUSTA MIGUELA GABRIELA RAFAELA GONZAGA DE BRAGANÇA E BOURBON',
numero_documento: '00000000123')
end
let(:params) do
p = {
empresa_mae: 'SOCIEDADE BRASILEIRA DE ZOOLOGIA LTDA',
agencia: '1234',
conta_corrente: '12345',
documento_cedente: '12345678901',
convenio: '123456',
sequencial_remessa: '1',
mensagem_1: 'Campo destinado ao preenchimento no momento do pagamento.',
mensagem_2: 'Campo destinado ao preenchimento no momento do pagamento.',
pagamentos: [pagamento]
}
if subject.class == Brcobranca::Remessa::Cnab240::Caixa
p.merge!(versao_aplicativo: '1234',
digito_agencia: '1')
elsif subject.class == Brcobranca::Remessa::Cnab240::BancoBrasil
p.merge!(carteira: '12',
variacao: '123')
end
p
end
let(:objeto) { subject.class.new(params) }
context 'header arquivo' do
it 'header arquivo deve ter 240 posicoes' do
expect(objeto.monta_header_arquivo.size).to eq 240
end
it 'header arquivo deve ter as informacoes nas posicoes corretas' do
header = objeto.monta_header_arquivo
expect(header[0..2]).to eq objeto.cod_banco # cod. do banco
expect(header[17]).to eq '1' # tipo inscricao do cedente
expect(header[18..31]).to eq '00012345678901' # documento do cedente
expect(header[32..51]).to eq objeto.codigo_convenio # informacoes do convenio
expect(header[52..71]).to eq objeto.info_conta # informacoes da conta
expect(header[72..101]).to eq 'SOCIEDADE BRASILEIRA DE ZOOLOG' # razao social do cedente
expect(header[157..162]).to eq '000001' # sequencial de remessa
expect(header[163..165]).to eq objeto.versao_layout_arquivo # versao do layout
end
end
context 'header lote' do
it 'header lote deve ter 240 posicoes' do
expect(objeto.monta_header_lote(1).size).to eq 240
end
it 'header lote deve ter as informacoes nas posicoes corretas' do
header = objeto.monta_header_lote 1
expect(header[0..2]).to eq objeto.cod_banco # cod. do banco
expect(header[3..6]).to eq '0001' # numero do lote
expect(header[13..15]).to eq objeto.versao_layout_lote # versao do layout
expect(header[17]).to eq '1' # tipo inscricao do cedente
expect(header[18..32]).to eq '000012345678901' # documento do cedente
expect(header[33..52]).to eq objeto.convenio_lote # informacoes do convenio
expect(header[53..72]).to eq objeto.info_conta # informacoes da conta
expect(header[73..102]).to eq 'SOCIEDADE BRASILEIRA DE ZOOLOG' # razao social do cedente
expect(header[103..142]).to eq 'Campo destinado ao preenchimento no mome' # 1a mensagem
expect(header[143..182]).to eq 'Campo destinado ao preenchimento no mome' # 2a mensagem
expect(header[183..190]).to eq '00000001' # sequencial de remessa
end
end
context 'segmento P' do
it 'segmento P deve ter 240 posicoes' do
expect(objeto.monta_segmento_p(pagamento, 1, 2).size).to eq 240
end
it 'segmento P deve ter as informacos nas posicoes corretas' do
segmento_p = objeto.monta_segmento_p pagamento, 1, 2
expect(segmento_p[0..2]).to eq objeto.cod_banco # codigo do banco
expect(segmento_p[3..6]).to eq '0001' # numero do lote
expect(segmento_p[8..12]).to eq '00002' # sequencial do registro no lote
expect(segmento_p[17..21]).to eq '01234' # agencia
expect(segmento_p[22]).to eq objeto.digito_agencia.to_s # digito da agencia
expect(segmento_p[23..56]).to eq objeto.complemento_p(pagamento) # complemento do segmento P
expect(segmento_p[62..76]).to eq '000000000000123' # numero do documento
expect(segmento_p[77..84]).to eq Date.today.strftime('%d%m%Y') # data de vencimento
expect(segmento_p[85..99]).to eq '000000000019990' # valor
expect(segmento_p[109..116]).to eq Date.today.strftime('%d%m%Y') # data de emissao
# mora
expect(segmento_p[141]).to eq '0' # codigo do desconto
expect(segmento_p[142..149]).to eq '00000000' # data de desconto
expect(segmento_p[150..164]).to eq ''.rjust(15, '0') # valor do desconto
expect(segmento_p[165..179]).to eq '000000000000990' # valor do IOF
expect(segmento_p[180..194]).to eq '000000000002435' # valor do abatimento
end
end
context 'segmento Q' do
it 'segmento Q deve ter 240 posicoes' do
expect(objeto.monta_segmento_q(pagamento, 1, 3).size).to eq 240
end
it 'segmento Q deve ter as informacoes nas posicoes corretas' do
segmento_q = objeto.monta_segmento_q pagamento, 1, 3
expect(segmento_q[0..2]).to eq objeto.cod_banco # codigo do banco
expect(segmento_q[3..6]).to eq '0001' # numero do lote
expect(segmento_q[8..12]).to eq '00003' # numero do registro no lote
expect(segmento_q[17]).to eq '1' # tipo inscricao sacado
expect(segmento_q[18..32]).to eq '000012345678901' # documento do sacado
expect(segmento_q[33..72]).to eq 'PABLO DIEGO JOSE FRANCISCO DE PAULA JUAN' # nome do sacado
expect(segmento_q[73..112]).to eq 'RUA RIO GRANDE DO SUL Sao paulo Minas ca' # endereco do sacado
expect(segmento_q[113..127]).to eq 'Sao jose dos qu' # bairro do sacado
expect(segmento_q[128..132]).to eq '12345' # CEP do sacado
expect(segmento_q[133..135]).to eq '678' # sufixo CEP do sacado
expect(segmento_q[136..150]).to eq 'Santa rita de c' # cidade do sacado
expect(segmento_q[151..152]).to eq 'SP' # UF do sacado
expect(segmento_q[153]).to eq '1' # tipo inscricao avalista
expect(segmento_q[154..168]).to eq '000012345678901' # documento avalista
expect(segmento_q[169..208]).to eq 'ISABEL CRISTINA LEOPOLDINA ALGUSTA MIGUE' # nome do avalista
end
end
context 'trailer lote' do
it 'trailer lote deve ter 240 posicoes' do
expect(objeto.monta_trailer_lote(1, 4).size).to eq 240
end
it 'trailer lote deve ter as informacoes nas posicoes corretas' do
trailer = objeto.monta_trailer_lote 1, 4
expect(trailer[0..2]).to eq objeto.cod_banco # cod. do banco
expect(trailer[3..6]).to eq '0001' # numero do lote
expect(trailer[17..22]).to eq '000004' # qtde de registros no lote
expect(trailer[23..239]).to eq objeto.complemento_trailer # complemento do registro trailer
end
end
context 'trailer arquivo' do
it 'trailer arquivo deve ter 240 posicoes' do
expect(objeto.monta_trailer_arquivo(1, 6).size).to eq 240
end
it 'trailer arquivo deve ter as informacoes nas posicoes corretas' do
trailer = objeto.monta_trailer_arquivo 1, 6
expect(trailer[0..2]).to eq objeto.cod_banco # cod. do banco
expect(trailer[17..22]).to eq '000001' # qtde de lotes
expect(trailer[23..28]).to eq '000006' # qtde de registros
end
end
context 'monta lote' do
it 'retorno de lote deve ser uma colecao com os registros' do
lote = objeto.monta_lote(1)
expect(lote.is_a?(Array)).to be true
expect(lote.count).to be 4 # header, segmento p, segmento q e trailer
end
it 'contador de registros deve acrescer 1 a cada registro' do
lote = objeto.monta_lote(1)
expect(lote[1][8..12]).to eq '00001' # segmento P
expect(lote[2][8..12]).to eq '00002' # segmento Q
expect(lote[3][17..22]).to eq '000004' # trailer do lote
end
end
context 'gera arquivo' do
it 'deve falhar se o objeto for invalido' do
expect { subject.class.new.gera_arquivo }.to raise_error(Brcobranca::RemessaInvalida)
end
it 'remessa deve conter os registros mais as quebras de linha' do
remessa = objeto.gera_arquivo
expect(remessa.size).to eq 1450
# quebras de linha
expect(remessa[240..241]).to eq "\r\n"
expect(remessa[482..483]).to eq "\r\n"
expect(remessa[724..725]).to eq "\r\n"
expect(remessa[966..967]).to eq "\r\n"
expect(remessa[1208..1209]).to eq "\r\n"
end
end
end
| 44.861538 | 145 | 0.680155 |
1d7d301b85e7feebb237613f81f2b8189f026a9d | 1,421 | describe Unidom::Accession::PostFulfillment, type: :model do
before :each do
end
after :each do
end
context do
model_attributes = {
fulfilled_id: SecureRandom.uuid,
fulfilled_type: 'Unidom::Accession::Fulfilled::Mock',
fulfiller_id: SecureRandom.uuid,
fulfiller_type: 'Unidom::Accession::Fulfiller::Mock'
}
it_behaves_like 'Unidom::Common::Concerns::ModelExtension', model_attributes
it_behaves_like 'scope', :part_time, [
{ attributes_collection: [ model_attributes ], count_diff: 0, args: [ true ] },
{ attributes_collection: [ model_attributes ], count_diff: 1, args: [ false ] },
{ attributes_collection: [ model_attributes.merge(part_time: true) ], count_diff: 1, args: [ true ] },
{ attributes_collection: [ model_attributes.merge(part_time: true) ], count_diff: 0, args: [ false ] } ]
it_behaves_like 'scope', :temporary, [
{ attributes_collection: [ model_attributes ], count_diff: 0, args: [ true ] },
{ attributes_collection: [ model_attributes ], count_diff: 1, args: [ false ] },
{ attributes_collection: [ model_attributes.merge(temporary: true) ], count_diff: 1, args: [ true ] },
{ attributes_collection: [ model_attributes.merge(temporary: true) ], count_diff: 0, args: [ false ] } ]
end
end
| 40.6 | 110 | 0.630542 |
aca06abb1906919cbe664d125e803d327825a5d8 | 5,256 | =begin
PureCloud Platform API
With the PureCloud Platform API, you can control all aspects of your PureCloud environment. With the APIs you can access the system configuration, manage conversations and more.
OpenAPI spec version: v2
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
License: UNLICENSED
https://help.mypurecloud.com/articles/terms-and-conditions/
Terms of Service: https://help.mypurecloud.com/articles/terms-and-conditions/
=end
require 'date'
module PureCloud
class ConversationVideoEventTopicJourneyCustomerSession
attr_accessor :id
attr_accessor :type
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'id' => :'id',
:'type' => :'type'
}
end
# Attribute type mapping.
def self.swagger_types
{
:'id' => :'String',
:'type' => :'String'
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}){|(k,v), h| h[k.to_sym] = v}
if attributes.has_key?(:'id')
self.id = attributes[:'id']
end
if attributes.has_key?(:'type')
self.type = attributes[:'type']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properies with the reasons
def list_invalid_properties
invalid_properties = Array.new
return invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
id == o.id &&
type == o.type
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[id, type].hash
end
# build the object from hash
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /^Array<(.*)>/i
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map{ |v| _deserialize($1, v) } )
else
#TODO show warning in debug mode
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
else
# data not found in attributes(hash), not an issue as the data can be optional
end
end
self
end
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BOOLEAN
if value.to_s =~ /^(true|t|yes|y|1)$/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
_model = Object.const_get("PureCloud").const_get(type).new
_model.build_from_hash(value)
end
end
def to_s
to_hash.to_s
end
# to_body is an alias to to_body (backward compatibility))
def to_body
to_hash
end
# return the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
hash[param] = _to_hash(value)
end
hash
end
# Method to output non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
def _to_hash(value)
if value.is_a?(Array)
value.compact.map{ |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 23.154185 | 177 | 0.576294 |
219c087c41de0b7a829a2709c5dfcd3ab74a2986 | 5,775 | module ApplicationHelper::PageLayouts
def layout_uses_listnav?
return false if @in_a_form
return false if %w[
about
all_tasks
chargeback
configuration
container_dashboard
container_topology
ems_infra_dashboard
infra_topology
network_topology
cloud_topology
diagnostics
exception
miq_ae_automate_button
miq_ae_customization
miq_ae_export
miq_ae_logs
miq_ae_tools
miq_policy
miq_policy_export
miq_policy_logs
monitor_alerts_overview
monitor_alerts_list
monitor_alerts_most_recent
my_tasks
ops
physical_infra_overview
physical_infra_topology
physical_network_port
pxe
report
server_build
storage
storage_pod
].include?(@layout)
return false if %w[
ad_hoc_metrics
consumption
dashboard
dialog_provision
topology
].include?(@showtype)
return false if dashboard_no_listnav?
return false if @layout.starts_with?("miq_request")
return false if controller.action_name.end_with?("tagging_edit")
true
end
def layout_uses_paging?
# listnav always implies paging, this only handles the non-listnav case
%w[
all_tasks
miq_request_ae
miq_request_host
miq_request_vm
my_tasks
].include?(@layout) && params[:action] != 'show'
end
def layout_uses_tabs?
return false if %w[login authenticate auth_error].include?(controller.action_name)
layout = case @layout
when 'container_dashboard', 'dashboard', 'ems_infra_dashboard', 'exception', 'physical_infra_overview',
'monitor_alerts_list', 'monitor_alerts_most_recent', 'monitor_alerts_overview'
false
when 'report'
!%w[new create edit copy update explorer].include?(controller.action_name)
when 'timeline'
@in_a_form
when 'vm'
controller.action_name != 'edit'
else
true
end
showtype = case @showtype
when 'dashboard'
[email protected]_s.ends_with?("_dashboard")
when 'topology'
false
else
true
end
layout && showtype
end
def layout_uses_breadcrumbs?
!%w[dashboard exception support configuration].include?(@layout)
end
def dashboard_no_listnav?
@layout == "dashboard" && %w[
auth_error
change_tab
show
].include?(controller.action_name)
end
def center_div_partial
if layout_uses_listnav?
"layouts/center_div_with_listnav"
elsif dashboard_no_listnav?
"layouts/center_div_dashboard_no_listnav"
else
"layouts/center_div_no_listnav"
end
end
def inner_layout_present?
@inner_layout_present ||=
begin
@explorer || params[:action] == "explorer" ||
(params[:controller] == "chargeback" && params[:action] == "chargeback") ||
(params[:controller] == "miq_ae_tools" && (params[:action] == "resolve" || params[:action] == "show")) ||
(params[:controller] == "miq_policy" && params[:action] == "rsop") ||
(params[:controller] == "utilization" || params[:controller] == "planning" || params[:controller] == "bottlenecks")
end
end
def simulate?
@simulate ||=
begin
rsop = controller.controller_name == 'miq_policy' && controller.action_name == 'rsop'
resolve = controller.controller_name == 'miq_ae_tools' && controller.action_name == 'resolve'
planning = controller.controller_name == 'planning'
rsop || resolve || planning
end
end
def saved_report_paging?
# saved report doesn't use miq_report object,
# need to use a different paging view to page thru a saved report
@sb[:pages] && @html && %i[reports_tree savedreports_tree cb_reports_tree].include?(x_active_tree)
end
def show_advanced_search?
x_tree && ((tree_with_advanced_search? && !@record) || @show_adv_search)
end
def show_adv_search?
show_search = %w[
auth_key_pair_cloud
availability_zone
automation_manager
cloud_network
cloud_object_store_container
cloud_object_store_object
cloud_subnet
cloud_tenant
cloud_volume
cloud_volume_backup
cloud_volume_snapshot
cloud_volume_type
configuration_job
container
container_build
container_group
container_image
container_image_registry
container_node
container_project
container_replicator
container_route
container_service
container_template
ems_cloud
ems_cluster
ems_container
ems_infra
ems_middleware
ems_network
ems_physical_infra
ems_storage
flavor
floating_ip
generic_object_definition
host
host_aggregate
load_balancer
middleware_deployment
middleware_domain
middleware_server
miq_template
network_port
network_router
offline
orchestration_stack
persistent_volume
physical_server
provider_foreman
resource_pool
retired
security_group
service
templates
vm
]
(@lastaction == "show_list" && !session[:menu_click] && show_search.include?(@layout) && !@in_a_form) ||
(@explorer && x_tree && tree_with_advanced_search? && !@record)
end
attr_reader :big_iframe
# a layout which gives full control over the center, but always provides the navbars and menus - to be overriden per-controller, used by v2v
def layout_full_center
nil
end
end
| 26.25 | 142 | 0.645022 |
ff7cd9ec23427a8f88fc903f1fdbbf60e07ea6c2 | 145 | require 'test_helper'
module Blogg
class PostTest < ActiveSupport::TestCase
# test "the truth" do
# assert true
# end
end
end
| 14.5 | 42 | 0.662069 |
18dc5d4f73a1f345b07f24fe425cf973c48afe82 | 64 | module Junctions
module Rails
VERSION = '0.0.1'
end
end
| 10.666667 | 21 | 0.65625 |
f7cf2f8b6cdcf302f12fa13ad2d7a7e4e5edd80b | 291 | # frozen_string_literal: true
class CreatePages < ActiveRecord::Migration[7.0]
def change
create_table :pages, id: :uuid do |t|
t.string :name
t.integer :page_order
t.belongs_to :app, null: false, foreign_key: true, type: :uuid
t.timestamps
end
end
end
| 20.785714 | 68 | 0.666667 |
7a2549451f79687168db41f9b503869bcfcabe78 | 1,577 | class RubricCriteriaController < ApplicationController
before_action :authorize_only_for_admin
def download_csv
@assignment = Assignment.find(params[:assignment_id])
file_out = MarkusCsv.generate(@assignment.get_criteria(:all, :rubric)) do |criterion|
criterion_array = [criterion.name, criterion.max_mark]
criterion.levels.each do |level|
criterion_array.push(level.name)
end
criterion.levels.each do |level|
criterion_array.push(level.description)
end
criterion_array
end
send_data(file_out,
type: 'text/csv',
filename: "#{@assignment.short_identifier}_rubric_criteria.csv",
disposition: 'attachment')
end
def csv_upload
@assignment = Assignment.find(params[:assignment_id])
encoding = params[:encoding]
if params[:csv_upload] && params[:csv_upload][:rubric]
file = params[:csv_upload][:rubric]
result = RubricCriterion.transaction do
MarkusCsv.parse(file.read, encoding: encoding) do |row|
next if CSV.generate_line(row).strip.empty?
RubricCriterion.create_or_update_from_csv_row(row, @assignment)
end
end
unless result[:invalid_lines].empty?
flash_message(:error, result[:invalid_lines])
end
unless result[:valid_lines].empty?
flash_message(:success, result[:valid_lines])
end
else
flash_message(:error, I18n.t('upload_errors.missing_file'))
end
redirect_to controller: 'criteria', action: 'index', id: @assignment.id
end
end
| 34.282609 | 89 | 0.680406 |
e2e5c0f996d8ef370a3eeb000ae8b402018f0cf6 | 414 | # frozen_string_literal: true
require 'bundler/setup'
require 'service_actor'
require 'pry'
RSpec.configure do |config|
# Enable flags like --only-failures and --next-failure
config.example_status_persistence_file_path = '.rspec_status'
# Disable RSpec exposing methods globally on `Module` and `main`
config.disable_monkey_patching!
config.expect_with :rspec do |c|
c.syntax = :expect
end
end
| 23 | 66 | 0.758454 |
38fecac1bfe21e394697a474a2d00cd5c7e37200 | 4,319 | # frozen_string_literal: true
# rubocop:disable Style/Documentation
module Gitlab
module BackgroundMigration
# Class that migrates events for the new push event payloads setup. All
# events are copied to a shadow table, and push events will also have a row
# created in the push_event_payloads table.
class MigrateEventsToPushEventPayloads
class Event < ActiveRecord::Base
self.table_name = 'events'
serialize :data
BLANK_REF = ('0' * 40).freeze
TAG_REF_PREFIX = 'refs/tags/'.freeze
MAX_INDEX = 69
PUSHED = 5
def push_event?
action == PUSHED && data.present?
end
def commit_title
commit = commits.last
return nil unless commit && commit[:message]
index = commit[:message].index("\n")
message = index ? commit[:message][0..index] : commit[:message]
message.strip.truncate(70)
end
def commit_from_sha
if create?
nil
else
data[:before]
end
end
def commit_to_sha
if remove?
nil
else
data[:after]
end
end
def data
super || {}
end
def commits
data[:commits] || []
end
def commit_count
data[:total_commits_count] || 0
end
def ref
data[:ref]
end
def trimmed_ref_name
if ref_type == :tag
ref[10..-1]
else
ref[11..-1]
end
end
def create?
data[:before] == BLANK_REF
end
def remove?
data[:after] == BLANK_REF
end
def push_action
if create?
:created
elsif remove?
:removed
else
:pushed
end
end
def ref_type
if ref.start_with?(TAG_REF_PREFIX)
:tag
else
:branch
end
end
end
class EventForMigration < ActiveRecord::Base
self.table_name = 'events_for_migration'
end
class PushEventPayload < ActiveRecord::Base
self.table_name = 'push_event_payloads'
enum action: {
created: 0,
removed: 1,
pushed: 2
}
enum ref_type: {
branch: 0,
tag: 1
}
end
# start_id - The start ID of the range of events to process
# end_id - The end ID of the range to process.
def perform(start_id, end_id)
return unless migrate?
find_events(start_id, end_id).each { |event| process_event(event) }
end
def process_event(event)
ActiveRecord::Base.transaction do
replicate_event(event)
create_push_event_payload(event) if event.push_event?
end
rescue ActiveRecord::InvalidForeignKey => e
# A foreign key error means the associated event was removed. In this
# case we'll just skip migrating the event.
Rails.logger.error("Unable to migrate event #{event.id}: #{e}")
end
def replicate_event(event)
new_attributes = event.attributes
.with_indifferent_access.except(:title, :data)
EventForMigration.create!(new_attributes)
end
def create_push_event_payload(event)
commit_from = pack(event.commit_from_sha)
commit_to = pack(event.commit_to_sha)
PushEventPayload.create!(
event_id: event.id,
commit_count: event.commit_count,
ref_type: event.ref_type,
action: event.push_action,
commit_from: commit_from,
commit_to: commit_to,
ref: event.trimmed_ref_name,
commit_title: event.commit_title
)
end
def find_events(start_id, end_id)
Event
.where('NOT EXISTS (SELECT true FROM events_for_migration WHERE events_for_migration.id = events.id)')
.where(id: start_id..end_id)
end
def migrate?
Event.table_exists? && PushEventPayload.table_exists? &&
EventForMigration.table_exists?
end
def pack(value)
value ? [value].pack('H*') : nil
end
end
end
end
| 23.994444 | 112 | 0.557768 |
6a752707e474458b34ea0f5f53a16a2e865c7b09 | 4,541 | require 'rails_helper'
# This spec was generated by rspec-rails when you ran the scaffold generator.
# It demonstrates how one might use RSpec to test the controller code that
# was generated by Rails when you ran the scaffold generator.
#
# It assumes that the implementation code is generated by the rails scaffold
# generator. If you are using any extension libraries to generate different
# controller code, this generated spec may or may not pass.
#
# It only uses APIs available in rails and/or rspec-rails. There are a number
# of tools you can use to make these specs even more expressive, but we're
# sticking to rails and rspec-rails APIs to keep things simple and stable.
RSpec.describe "/tickets", type: :request do
# This should return the minimal set of attributes required to create a valid
# Ticket. As you add validations to Ticket, be sure to
# adjust the attributes here as well.
let(:valid_attributes) {
skip("Add a hash of attributes valid for your model")
}
let(:invalid_attributes) {
skip("Add a hash of attributes invalid for your model")
}
# This should return the minimal set of values that should be in the headers
# in order to pass any filters (e.g. authentication) defined in
# TicketsController, or in your router and rack
# middleware. Be sure to keep this updated too.
let(:valid_headers) {
{}
}
describe "GET /index" do
it "renders a successful response" do
Ticket.create! valid_attributes
get tickets_url, headers: valid_headers, as: :json
expect(response).to be_successful
end
end
describe "GET /show" do
it "renders a successful response" do
ticket = Ticket.create! valid_attributes
get ticket_url(ticket), as: :json
expect(response).to be_successful
end
end
describe "POST /create" do
context "with valid parameters" do
it "creates a new Ticket" do
expect {
post tickets_url,
params: { ticket: valid_attributes }, headers: valid_headers, as: :json
}.to change(Ticket, :count).by(1)
end
it "renders a JSON response with the new ticket" do
post tickets_url,
params: { ticket: valid_attributes }, headers: valid_headers, as: :json
expect(response).to have_http_status(:created)
expect(response.content_type).to match(a_string_including("application/json"))
end
end
context "with invalid parameters" do
it "does not create a new Ticket" do
expect {
post tickets_url,
params: { ticket: invalid_attributes }, as: :json
}.to change(Ticket, :count).by(0)
end
it "renders a JSON response with errors for the new ticket" do
post tickets_url,
params: { ticket: invalid_attributes }, headers: valid_headers, as: :json
expect(response).to have_http_status(:unprocessable_entity)
expect(response.content_type).to eq("application/json")
end
end
end
describe "PATCH /update" do
context "with valid parameters" do
let(:new_attributes) {
skip("Add a hash of attributes valid for your model")
}
it "updates the requested ticket" do
ticket = Ticket.create! valid_attributes
patch ticket_url(ticket),
params: { ticket: new_attributes }, headers: valid_headers, as: :json
ticket.reload
skip("Add assertions for updated state")
end
it "renders a JSON response with the ticket" do
ticket = Ticket.create! valid_attributes
patch ticket_url(ticket),
params: { ticket: new_attributes }, headers: valid_headers, as: :json
expect(response).to have_http_status(:ok)
expect(response.content_type).to match(a_string_including("application/json"))
end
end
context "with invalid parameters" do
it "renders a JSON response with errors for the ticket" do
ticket = Ticket.create! valid_attributes
patch ticket_url(ticket),
params: { ticket: invalid_attributes }, headers: valid_headers, as: :json
expect(response).to have_http_status(:unprocessable_entity)
expect(response.content_type).to eq("application/json")
end
end
end
describe "DELETE /destroy" do
it "destroys the requested ticket" do
ticket = Ticket.create! valid_attributes
expect {
delete ticket_url(ticket), headers: valid_headers, as: :json
}.to change(Ticket, :count).by(-1)
end
end
end
| 35.476563 | 87 | 0.677604 |
1a5f8907f18de8c348e792876675f4ca6c0b7816 | 2,250 | Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the web server when you make code changes.
config.cache_classes = false
# Do not eager load code on boot.
config.eager_load = false
# Show full error reports.
config.consider_all_requests_local = true
# Enable/disable caching. By default caching is disabled.
# Run rails dev:cache to toggle caching.
if Rails.root.join('tmp', 'caching-dev.txt').exist?
config.action_controller.perform_caching = true
config.action_controller.enable_fragment_cache_logging = true
config.cache_store = :memory_store
config.public_file_server.headers = {
'Cache-Control' => "public, max-age=#{2.days.to_i}"
}
else
config.action_controller.perform_caching = false
config.cache_store = :null_store
end
# Store uploaded files on the local file system (see config/storage.yml for options).
config.active_storage.service = :cloudinary
# Don't care if the mailer can't send.
config.action_mailer.raise_delivery_errors = false
config.action_mailer.perform_caching = false
# Print deprecation notices to the Rails logger.
config.active_support.deprecation = :log
# Raise an error on page load if there are pending migrations.
config.active_record.migration_error = :page_load
# Highlight code that triggered database queries in logs.
config.active_record.verbose_query_logs = true
# Debug mode disables concatenation and preprocessing of assets.
# This option may cause significant delays in view rendering with a large
# number of complex assets.
config.assets.debug = true
# Suppress logger output for asset requests.
config.assets.quiet = true
# Raises error for missing translations.
# config.action_view.raise_on_missing_translations = true
# Use an evented file watcher to asynchronously detect changes in source code,
# routes, locales, etc. This feature depends on the listen gem.
# config.file_watcher = ActiveSupport::EventedFileUpdateChecker
end
| 35.714286 | 87 | 0.763556 |
1c3b17b3a728cf7efcc36b8c156ea4b9a414a96b | 2,815 | module MCollective
# Wrapper around systemu that handles executing of system commands
# in a way that makes stdout, stderr and status available. Supports
# timeouts and sets a default sane environment.
#
# s = Shell.new("date", opts)
# s.runcommand
# puts s.stdout
# puts s.stderr
# puts s.status.exitstatus
#
# Options hash can have:
#
# cwd - the working directory the command will be run from
# stdin - a string that will be sent to stdin of the program
# stdout - a variable that will receive stdout, must support <<
# stderr - a variable that will receive stdin, must support <<
# environment - the shell environment, defaults to include LC_ALL=C
# set to nil to clear the environment even of LC_ALL
#
class Shell
attr_reader :environment, :command, :status, :stdout, :stderr, :stdin, :cwd
def initialize(command, options={})
@environment = {"LC_ALL" => "C"}
@command = command
@status = nil
@stdout = ""
@stderr = ""
@stdin = nil
@cwd = Dir.tmpdir
options.each do |opt, val|
case opt.to_s
when "stdout"
raise "stdout should support <<" unless val.respond_to?("<<")
@stdout = val
when "stderr"
raise "stderr should support <<" unless val.respond_to?("<<")
@stderr = val
when "stdin"
raise "stdin should be a String" unless val.is_a?(String)
@stdin = val
when "cwd"
raise "Directory #{val} does not exist" unless File.directory?(val)
@cwd = val
when "environment"
if val.nil?
@environment = {}
else
@environment.merge!(val.dup)
end
end
end
end
# Actually does the systemu call passing in the correct environment, stdout and stderr
def runcommand
opts = {"env" => @environment,
"stdout" => @stdout,
"stderr" => @stderr,
"cwd" => @cwd}
opts["stdin"] = @stdin if @stdin
# Check if the parent thread is alive. If it should die,
# and the process spawned by systemu is still alive,
# fire off a blocking waitpid and wait for the process to
# finish so that we can avoid zombies.
thread = Thread.current
@status = systemu(@command, opts) do |cid|
begin
while(thread.alive?)
sleep 0.1
end
Process.waitpid(cid) if Process.getpgid(cid)
rescue SystemExit
rescue Errno::ECHILD
rescue Exception => e
Log.info("Unexpected exception received while waiting for child process: #{e.class}: #{e}")
end
end
end
end
end
| 30.934066 | 101 | 0.569805 |
03318a89599084b4c6673b4a0594b07dce4c9938 | 1,837 | # it is war-file
packaging 'war'
# get jruby dependencies
properties( 'jruby.version' => '@project.version@',
'project.build.sourceEncoding' => 'utf-8' )
pom( 'org.jruby:jruby', '${jruby.version}' )
# a gem to be used
gem 'flickraw', '0.9.7'
repository( :url => 'https://otto.takari.io/content/repositories/rubygems/maven/releases',
:id => 'rubygems-releases' )
jruby_plugin :gem, :includeRubygemsInResources => true do
execute_goal :initialize
end
execute 'jrubydir', 'initialize' do |ctx|
require 'jruby/commands'
JRuby::Commands.generate_dir_info( ctx.project.build.directory.to_pathname + '/rubygems' )
end
# ruby-maven will dump an equivalent pom.xml
properties( 'tesla.dump.pom' => 'pom.xml',
'jruby.home' => '../../../../../' )
# start jetty for the tests
plugin( 'org.eclipse.jetty:jetty-maven-plugin', '9.1.3.v20140225',
:path => '/',
:stopPort => 9999,
:stopKey => 'foo' ) do
execute_goal( 'start', :id => 'start jetty', :phase => 'pre-integration-test', :daemon => true )
execute_goal( 'stop', :id => 'stop jetty', :phase => 'post-integration-test' )
end
# download files during the tests
result = nil
execute 'download', :phase => 'integration-test' do
require 'open-uri'
result = open( 'http://localhost:8080' ).string
puts result
end
# verify the downloads
execute 'check download', :phase => :verify do
expected = 'hello world:'
unless result.match( /#{expected}/ )
raise "missed expected string in download: #{expected}"
end
expected = 'uri:classloader:/gems/flickraw-0.9.7'
unless result.match( /#{expected}/ )
raise "missed expected string in download: #{expected}"
end
expected = 'snakeyaml-1.13.0'
unless result.match( /#{expected}/ )
raise "missed expected string in download: #{expected}"
end
end
| 30.616667 | 99 | 0.659227 |
b92f5e68676670d002fea076476aa095c869a453 | 15,370 | # encoding: utf-8
require "logstash/namespace"
require "logstash/config/registry"
require "logstash/logging"
require "logstash/util/password"
require "logstash/version"
require "i18n"
# This module is meant as a mixin to classes wishing to be configurable from
# config files
#
# The idea is that you can do this:
#
# class Foo < LogStash::Config
# # Add config file settings
# config "path" => ...
# config "tag" => ...
#
# # Add global flags (becomes --foo-bar)
# flag "bar" => ...
# end
#
# And the config file should let you do:
#
# foo {
# "path" => ...
# "tag" => ...
# }
#
module LogStash::Config::Mixin
attr_accessor :config
attr_accessor :original_params
CONFIGSORT = {
Symbol => 0,
String => 0,
Regexp => 100,
}
# This method is called when someone does 'include LogStash::Config'
def self.included(base)
# Add the DSL methods to the 'base' given.
base.extend(LogStash::Config::Mixin::DSL)
end
def config_init(params)
# Validation will modify the values inside params if necessary.
# For example: converting a string to a number, etc.
# Keep a copy of the original config params so that we can later
# differentiate between explicit configuration and implicit (default)
# configuration.
@original_params = params.clone
# store the plugin type, turns LogStash::Inputs::Base into 'input'
@plugin_type = self.class.ancestors.find { |a| a.name =~ /::Base$/ }.config_name
# warn about deprecated variable use
params.each do |name, value|
opts = self.class.get_config[name]
if opts && opts[:deprecated]
extra = opts[:deprecated].is_a?(String) ? opts[:deprecated] : ""
extra.gsub!("%PLUGIN%", self.class.config_name)
@logger.warn("You are using a deprecated config setting " +
"#{name.inspect} set in #{self.class.config_name}. " +
"Deprecated settings will continue to work, " +
"but are scheduled for removal from logstash " +
"in the future. #{extra} If you have any questions " +
"about this, please visit the #logstash channel " +
"on freenode irc.", :name => name, :plugin => self)
end
end
# Set defaults from 'config :foo, :default => somevalue'
self.class.get_config.each do |name, opts|
next if params.include?(name.to_s)
if opts.include?(:default) and (name.is_a?(Symbol) or name.is_a?(String))
# default values should be cloned if possible
# cloning prevents
case opts[:default]
when FalseClass, TrueClass, NilClass, Numeric
params[name.to_s] = opts[:default]
else
params[name.to_s] = opts[:default].clone
end
end
# Allow plugins to override default values of config settings
if self.class.default?(name)
params[name.to_s] = self.class.get_default(name)
end
end
if !self.class.validate(params)
raise LogStash::ConfigurationError,
I18n.t("logstash.agent.configuration.invalid_plugin_settings")
end
# set instance variables like '@foo' for each config value given.
params.each do |key, value|
next if key[0, 1] == "@"
# Set this key as an instance variable only if it doesn't start with an '@'
@logger.debug("config #{self.class.name}/@#{key} = #{value.inspect}")
instance_variable_set("@#{key}", value)
end
@config = params
end # def config_init
module DSL
attr_accessor :flags
# If name is given, set the name and return it.
# If no name given (nil), return the current name.
def config_name(name=nil)
@config_name = name if !name.nil?
LogStash::Config::Registry.registry[@config_name] = self
return @config_name
end
def plugin_status(status=nil)
milestone(status)
end
def milestone(m=nil)
@milestone = m if !m.nil?
return @milestone
end
# Define a new configuration setting
def config(name, opts={})
@config ||= Hash.new
# TODO(sissel): verify 'name' is of type String, Symbol, or Regexp
name = name.to_s if name.is_a?(Symbol)
@config[name] = opts # ok if this is empty
if name.is_a?(String)
define_method(name) { instance_variable_get("@#{name}") }
define_method("#{name}=") { |v| instance_variable_set("@#{name}", v) }
end
end # def config
def default(name, value)
@defaults ||= {}
@defaults[name.to_s] = value
end
def get_config
return @config
end # def get_config
def get_default(name)
return @defaults && @defaults[name]
end
def default?(name)
return @defaults && @defaults.include?(name)
end
def options(opts)
# add any options from this class
prefix = self.name.split("::").last.downcase
@flags.each do |flag|
flagpart = flag[:args].first.gsub(/^--/,"")
# TODO(sissel): logger things here could help debugging.
opts.on("--#{prefix}-#{flagpart}", *flag[:args][1..-1], &flag[:block])
end
end # def options
# This is called whenever someone subclasses a class that has this mixin.
def inherited(subclass)
# Copy our parent's config to a subclass.
# This method is invoked whenever someone subclasses us, like:
# class Foo < Bar ...
subconfig = Hash.new
if [email protected]?
@config.each do |key, val|
subconfig[key] = val
end
end
subclass.instance_variable_set("@config", subconfig)
@@milestone_notice_given = false
end # def inherited
def validate(params)
@plugin_name = config_name
@plugin_type = ancestors.find { |a| a.name =~ /::Base$/ }.config_name
@logger = Cabin::Channel.get(LogStash)
is_valid = true
is_valid &&= validate_milestone
is_valid &&= validate_check_invalid_parameter_names(params)
is_valid &&= validate_check_required_parameter_names(params)
is_valid &&= validate_check_parameter_values(params)
return is_valid
end # def validate
def validate_milestone
return true if @@milestone_notice_given
docmsg = "For more information about plugin milestones, see http://logstash.net/docs/#{LOGSTASH_VERSION}/plugin-milestones "
plugin_type = ancestors.find { |a| a.name =~ /::Base$/ }.config_name
case @milestone
when 0,1,2
@logger.warn(I18n.t("logstash.plugin.milestone.#{@milestone}",
:type => plugin_type, :name => @config_name,
:LOGSTASH_VERSION => LOGSTASH_VERSION))
when 3
# No message to log for milestone 3 plugins.
when nil
raise "#{@config_name} must set a milestone. #{docmsg}"
else
raise "#{@config_name} set an invalid plugin status #{@milestone}. Valid values are 0, 1, 2, or 3. #{docmsg}"
end
@@milestone_notice_given = true
return true
end
def validate_check_invalid_parameter_names(params)
invalid_params = params.keys
# Filter out parameters that match regexp keys.
# These are defined in plugins like this:
# config /foo.*/ => ...
@config.each_key do |config_key|
if config_key.is_a?(Regexp)
invalid_params.reject! { |k| k =~ config_key }
elsif config_key.is_a?(String)
invalid_params.reject! { |k| k == config_key }
end
end
if invalid_params.size > 0
invalid_params.each do |name|
@logger.error("Unknown setting '#{name}' for #{@plugin_name}")
end
return false
end # if invalid_params.size > 0
return true
end # def validate_check_invalid_parameter_names
def validate_check_required_parameter_names(params)
is_valid = true
@config.each do |config_key, config|
next unless config[:required]
if config_key.is_a?(Regexp)
next if params.keys.select { |k| k =~ config_key }.length > 0
elsif config_key.is_a?(String)
next if params.keys.member?(config_key)
end
@logger.error(I18n.t("logstash.agent.configuration.setting_missing",
:setting => config_key, :plugin => @plugin_name,
:type => @plugin_type))
is_valid = false
end
return is_valid
end
def validate_check_parameter_values(params)
# Filter out parametrs that match regexp keys.
# These are defined in plugins like this:
# config /foo.*/ => ...
is_valid = true
params.each do |key, value|
@config.keys.each do |config_key|
next unless (config_key.is_a?(Regexp) && key =~ config_key) \
|| (config_key.is_a?(String) && key == config_key)
config_val = @config[config_key][:validate]
#puts " Key matches."
success, result = validate_value(value, config_val)
if success
# Accept coerced value if success
# Used for converting values in the config to proper objects.
params[key] = result if !result.nil?
else
@logger.error(I18n.t("logstash.agent.configuration.setting_invalid",
:plugin => @plugin_name, :type => @plugin_type,
:setting => key, :value => value.inspect,
:value_type => config_val,
:note => result))
end
#puts "Result: #{key} / #{result.inspect} / #{success}"
is_valid &&= success
break # done with this param key
end # config.each
end # params.each
return is_valid
end # def validate_check_parameter_values
def validator_find(key)
@config.each do |config_key, config_val|
if (config_key.is_a?(Regexp) && key =~ config_key) \
|| (config_key.is_a?(String) && key == config_key)
return config_val
end
end # @config.each
return nil
end
def validate_value(value, validator)
# Validator comes from the 'config' pieces of plugins.
# They look like this
# config :mykey => lambda do |value| ... end
# (see LogStash::Inputs::File for example)
result = nil
if validator.nil?
return true
elsif validator.is_a?(Array)
value = [*value]
if value.size > 1
return false, "Expected one of #{validator.inspect}, got #{value.inspect}"
end
if !validator.include?(value.first)
return false, "Expected one of #{validator.inspect}, got #{value.inspect}"
end
result = value.first
elsif validator.is_a?(Symbol)
# TODO(sissel): Factor this out into a coersion method?
# TODO(sissel): Document this stuff.
value = hash_or_array(value)
case validator
when :codec
if value.first.is_a?(String)
value = LogStash::Plugin.lookup("codec", value.first).new
return true, value
else
value = value.first
return true, value
end
when :hash
if value.is_a?(Hash)
return true, value
end
if value.size % 2 == 1
return false, "This field must contain an even number of items, got #{value.size}"
end
# Convert the array the config parser produces into a hash.
result = {}
value.each_slice(2) do |key, value|
entry = result[key]
if entry.nil?
result[key] = value
else
if entry.is_a?(Array)
entry << value
else
result[key] = [entry, value]
end
end
end
when :array
result = value
when :string
if value.size > 1 # only one value wanted
return false, "Expected string, got #{value.inspect}"
end
result = value.first
when :number
if value.size > 1 # only one value wanted
return false, "Expected number, got #{value.inspect} (type #{value.class})"
end
v = value.first
case v
when Numeric
result = v
when String
if v.to_s.to_f.to_s != v.to_s \
&& v.to_s.to_i.to_s != v.to_s
return false, "Expected number, got #{v.inspect} (type #{v})"
end
if v.include?(".")
# decimal value, use float.
result = v.to_f
else
result = v.to_i
end
end # case v
when :boolean
if value.size > 1 # only one value wanted
return false, "Expected boolean, got #{value.inspect}"
end
bool_value = value.first
if !!bool_value == bool_value
# is_a does not work for booleans
# we have Boolean and not a string
result = bool_value
else
if bool_value !~ /^(true|false)$/
return false, "Expected boolean 'true' or 'false', got #{bool_value.inspect}"
end
result = (bool_value == "true")
end
when :ipaddr
if value.size > 1 # only one value wanted
return false, "Expected IPaddr, got #{value.inspect}"
end
octets = value.split(".")
if octets.length != 4
return false, "Expected IPaddr, got #{value.inspect}"
end
octets.each do |o|
if o.to_i < 0 or o.to_i > 255
return false, "Expected IPaddr, got #{value.inspect}"
end
end
result = value.first
when :password
if value.size > 1
return false, "Expected password (one value), got #{value.size} values?"
end
result = ::LogStash::Util::Password.new(value.first)
when :path
if value.size > 1 # Only 1 value wanted
return false, "Expected path (one value), got #{value.size} values?"
end
# Paths must be absolute
#if !Pathname.new(value.first).absolute?
#return false, "Require absolute path, got relative path #{value.first}?"
#end
if !File.exists?(value.first) # Check if the file exists
return false, "File does not exist or cannot be opened #{value.first}"
end
result = value.first
end # case validator
else
return false, "Unknown validator #{validator.class}"
end
# Return the validator for later use, like with type coercion.
return true, result
end # def validate_value
def hash_or_array(value)
if !value.is_a?(Hash)
value = [*value] # coerce scalar to array if necessary
end
return value
end
end # module LogStash::Config::DSL
end # module LogStash::Config
| 33.196544 | 130 | 0.571243 |
1a7e8dd1c0bc4444b802a7e61898d5211164be83 | 106 | require 'range_operators/array_operator_definitions'
require 'range_operators/range_operator_definitions'
| 35.333333 | 52 | 0.90566 |
bf30a826ca77e2afd97d76eb528510537577fccf | 147 | module Homebrew
def __cellar
if ARGV.named.empty?
puts HOMEBREW_CELLAR
else
puts ARGV.formulae.map(&:rack)
end
end
end
| 14.7 | 36 | 0.653061 |
39381697b1fbbc82c6a207eb25b3b9ae83893b0b | 13,868 | # encoding: UTF-8
class Fluent::StatsOutput < Fluent::Output
Fluent::Plugin.register_output('stats', self)
# To support log_level option implemented by Fluentd v0.10.43
unless method_defined?(:log)
define_method("log") { $log }
end
# Define `router` method of v0.12 to support v0.10 or earlier
unless method_defined?(:router)
define_method("router") { Fluent::Engine }
end
def initialize
super
require 'pathname'
end
config_param :sum, :string, :default => nil
config_param :max, :string, :default => nil
config_param :min, :string, :default => nil
config_param :avg, :string, :default => nil
config_param :sum_keys, :string, :default => nil
config_param :max_keys, :string, :default => nil
config_param :min_keys, :string, :default => nil
config_param :avg_keys, :string, :default => nil
config_param :sum_suffix, :string, :default => ""
config_param :max_suffix, :string, :default => ""
config_param :min_suffix, :string, :default => ""
config_param :avg_suffix, :string, :default => ""
config_param :interval, :time, :default => 5
config_param :tag, :string, :default => nil
config_param :add_tag_prefix, :string, :default => nil
config_param :remove_tag_prefix, :string, :default => nil
config_param :add_tag_suffix, :string, :default => nil
config_param :remove_tag_suffix, :string, :default => nil
config_param :remove_tag_slice, :string, :default => nil
config_param :aggregate, :string, :default => 'in_tag'
config_param :store_file, :string, :default => nil
config_param :zero_emit, :bool, :default => false
attr_accessor :matches
attr_accessor :saved_duration
attr_accessor :saved_at
attr_accessor :last_checked
def configure(conf)
super
@interval = @interval.to_i
@sum = Regexp.new(@sum) if @sum
@max = Regexp.new(@max) if @max
@min = Regexp.new(@min) if @min
@avg = Regexp.new(@avg) if @avg
@sum_keys = @sum_keys ? @sum_keys.split(',') : []
@max_keys = @max_keys ? @max_keys.split(',') : []
@min_keys = @min_keys ? @min_keys.split(',') : []
@avg_keys = @avg_keys ? @avg_keys.split(',') : []
case @aggregate
when 'tag' # obsolete
@aggregate = 'in_tag'
when 'all'
raise Fluent::ConfigError, "tag must be specified for aggregate all" if @tag.nil?
end
unless ['in_tag', 'out_tag', 'all'].include?(@aggregate)
raise Fluent::ConfigError, "aggregate allows in_tag/out_tag/all"
end
if @tag.nil? and @add_tag_prefix.nil? and @remove_tag_prefix.nil? and @add_tag_suffix.nil? and @remove_tag_suffix.nil? and @remove_tag_slice.nil?
@add_tag_prefix = 'stats' # not ConfigError for lower version compatibility
end
@tag_proc = tag_proc
@aggregate_proc = aggregate_proc(@tag_proc)
@matches = {}
@mutex = Mutex.new
end
def initial_matches(prev_matches = nil)
if @zero_emit && prev_matches
matches = {}
prev_matches.keys.each do |aggregate_key|
next unless prev_matches[aggregate_key][:count] > 0 # Prohibit to emit anymore
matches[aggregate_key] = { :count => 0, :sum => {}, :max => {}, :min => {}, :avg => {}, :avg_count => {} }
# ToDo: would want default configuration for :max, :min
prev_matches[aggregate_key][:sum].keys.each {|key| matches[aggregate_key][:sum][key] = 0 }
prev_matches[aggregate_key][:max].keys.each {|key| matches[aggregate_key][:max][key] = 0 }
prev_matches[aggregate_key][:min].keys.each {|key| matches[aggregate_key][:min][key] = 0 }
prev_matches[aggregate_key][:avg].keys.each {|key| matches[aggregate_key][:avg][key] = 0 }
prev_matches[aggregate_key][:avg_count] ||= {} # for lower version compatibility
prev_matches[aggregate_key][:avg_count].keys.each {|key| matches[aggregate_key][:avg_count][key] = 0 }
end
matches
else
{}
end
end
def start
super
load_status(@store_file, @interval) if @store_file
@watcher = Thread.new(&method(:watcher))
end
def shutdown
super
@watcher.terminate
@watcher.join
save_status(@store_file) if @store_file
end
# Called when new line comes. This method actually does not emit
def emit(tag, es, chain)
# stats
matches = { :count => 0, :sum => {}, :max => {}, :min => {}, :avg => {}, :avg_count => {} }
es.each do |time, record|
record = stringify_keys(record)
@sum_keys.each do |key|
next unless record[key] and value = record[key].to_f
matches[:sum][key] = sum(matches[:sum][key], value)
end
@max_keys.each do |key|
next unless record[key] and value = record[key].to_f
matches[:max][key] = max(matches[:max][key], value)
end
@min_keys.each do |key|
next unless record[key] and value = record[key].to_f
matches[:min][key] = min(matches[:min][key], value)
end
@avg_keys.each do |key|
next unless record[key] and value = record[key].to_f
matches[:avg][key] = sum(matches[:avg][key], value)
# ignore zero emitted value
matches[:avg_count][key] = sum(matches[:avg_count][key], 1) unless (@zero_emit and value.zero?)
end
record.keys.each do |key|
key = key.to_s
value = record[key].to_f
if @sum and @sum.match(key)
matches[:sum][key] = sum(matches[:sum][key], value)
end
if @max and @max.match(key)
matches[:max][key] = max(matches[:max][key], value)
end
if @min and @min.match(key)
matches[:min][key] = min(matches[:min][key], value)
end
if @avg and @avg.match(key)
matches[:avg][key] = sum(matches[:avg][key], value) # sum yet
# ignore zero emitted value
matches[:avg_count][key] = sum(matches[:avg_count][key], 1) unless (@zero_emit and value.zero?)
end
end if @sum || @max || @min || @avg
matches[:count] += 1
end
aggregate_key = @aggregate_proc.call(tag)
# thread safe merge
@matches[aggregate_key] ||= { :count => 0, :sum => {}, :max => {}, :min => {}, :avg => {}, :avg_count => {} }
@matches[aggregate_key][:avg_count] ||= {} # for lower version compatibility
@mutex.synchronize do
matches[:sum].keys.each do |key|
@matches[aggregate_key][:sum][key] = sum(@matches[aggregate_key][:sum][key], matches[:sum][key])
end
matches[:max].keys.each do |key|
@matches[aggregate_key][:max][key] = max(@matches[aggregate_key][:max][key], matches[:max][key])
end
matches[:min].keys.each do |key|
@matches[aggregate_key][:min][key] = min(@matches[aggregate_key][:min][key], matches[:min][key])
end
matches[:avg].keys.each do |key|
@matches[aggregate_key][:avg][key] = sum(@matches[aggregate_key][:avg][key], matches[:avg][key]) # sum yet
@matches[aggregate_key][:avg_count][key] = sum(@matches[aggregate_key][:avg_count][key], matches[:avg_count][key])
end
@matches[aggregate_key][:count] += matches[:count]
end
log.trace "out_stats: tag:#{tag} @matches:#{@matches}"
chain.next
rescue => e
log.warn "out_stats: #{e.class} #{e.message} #{e.backtrace.first}"
end
# thread callback
def watcher
# instance variable, and public accessable, for test
@last_checked ||= Fluent::Engine.now
while (sleep 0.5)
begin
if Fluent::Engine.now - @last_checked >= @interval
report_time do
@last_checked = Fluent::Engine.now
flush_emit
end
end
rescue => e
log.warn "out_stats: #{e.class} #{e.message} #{e.backtrace.first}"
end
end
end
# This method is the real one to emit
def flush_emit
time = Fluent::Engine.now
flushed_matches = {}
@mutex.synchronize do
flushed_matches, @matches = @matches, initial_matches(@matches)
end
log.trace("out_stats: flushed_matches:#{flushed_matches} @matches:#{@matches}") unless flushed_matches.empty?
flushed_matches.each do |tag, matches|
case @aggregate
when 'all'
emit_tag = @tag
when 'in_tag'
emit_tag = @tag_proc.call(tag)
when 'out_tag'
emit_tag = tag
else
end
report_time(" emit_tag:#{emit_tag} matches:#{matches}") do
output = generate_output(matches)
router.emit(emit_tag, time, output) if output and !output.empty?
end
end
end
def generate_output(matches)
return nil if matches.empty?
output = {}
matches[:sum].keys.each do |key|
output[key + @sum_suffix] = matches[:sum][key]
end
matches[:max].keys.each do |key|
output[key + @max_suffix] = matches[:max][key]
end
matches[:min].keys.each do |key|
output[key + @min_suffix] = matches[:min][key]
end
matches[:avg_count] ||= {} # for lower version compatibility
matches[:avg].keys.each do |key|
output[key + @avg_suffix] = matches[:avg][key]
count = matches[:avg_count][key].to_f
output[key + @avg_suffix] /= count unless count.zero?
end
output
end
def sum(a, b)
[a, b].compact.inject(:+)
end
def max(a, b)
[a, b].compact.max
end
def min(a, b)
[a, b].compact.min
end
# Store internal status into a file
#
# @param [String] file_path
def save_status(file_path)
return unless file_path
begin
Pathname.new(file_path).open('wb') do |f|
@saved_at = Fluent::Engine.now
@saved_duration = @saved_at - @last_checked
Marshal.dump({
:matches => @matches,
:saved_at => @saved_at,
:saved_duration => @saved_duration,
:aggregate => @aggregate,
:sum => @sum,
:max => @max,
:min => @min,
:avg => @avg,
}, f)
end
rescue => e
log.warn "out_stats: Can't write store_file #{e.class} #{e.message}"
end
end
# Load internal status from a file
#
# @param [String] file_path
# @param [Interger] interval
def load_status(file_path, interval)
return unless (f = Pathname.new(file_path)).exist?
begin
f.open('rb') do |f|
stored = Marshal.load(f)
if stored[:aggregate] == @aggregate and
stored[:sum] == @sum and
stored[:max] == @max and
stored[:min] == @min and
stored[:avg] == @avg
if !stored[:matches].empty? and !stored[:matches].first[1].has_key?(:max)
log.warn "out_stats: stored data does not have compatibility with the current version. ignore stored data"
return
end
if Fluent::Engine.now <= stored[:saved_at] + interval
@matches = stored[:matches]
@saved_at = stored[:saved_at]
@saved_duration = stored[:saved_duration]
# for lower compatibility
if counts = stored[:counts]
@matches.keys.each {|tag| @matches[tag][:count] = counts[tag] }
end
# skip the saved duration to continue counting
@last_checked = Fluent::Engine.now - @saved_duration
else
log.warn "out_stats: stored data is outdated. ignore stored data"
end
else
log.warn "out_stats: configuration param was changed. ignore stored data"
end
end
rescue => e
log.warn "out_stats: Can't load store_file #{e.class} #{e.message}"
end
end
private
def transform_keys(hash)
result = {}
hash.each_key do |key|
result[yield(key)] = hash[key]
end
result
end
def stringify_keys(hash)
transform_keys(hash) { |key| key.to_s }
end
def aggregate_proc(tag_proc)
case @aggregate
when 'all'
Proc.new {|tag| :all }
when 'in_tag'
Proc.new {|tag| tag }
when 'out_tag'
Proc.new {|tag| tag_proc.call(tag) }
end
end
def tag_proc
tag_slice_proc =
if @remove_tag_slice
lindex, rindex = @remove_tag_slice.split('..', 2)
if lindex.nil? or rindex.nil? or lindex !~ /^-?\d+$/ or rindex !~ /^-?\d+$/
raise Fluent::ConfigError, "out_grepcounter: remove_tag_slice must be formatted like [num]..[num]"
end
l, r = lindex.to_i, rindex.to_i
Proc.new {|tag| (tags = tag.split('.')[l..r]).nil? ? "" : tags.join('.') }
else
Proc.new {|tag| tag }
end
rstrip = Proc.new {|str, substr| str.chomp(substr) }
lstrip = Proc.new {|str, substr| str.start_with?(substr) ? str[substr.size..-1] : str }
tag_prefix = "#{rstrip.call(@add_tag_prefix, '.')}." if @add_tag_prefix
tag_suffix = ".#{lstrip.call(@add_tag_suffix, '.')}" if @add_tag_suffix
tag_prefix_match = "#{rstrip.call(@remove_tag_prefix, '.')}." if @remove_tag_prefix
tag_suffix_match = ".#{lstrip.call(@remove_tag_suffix, '.')}" if @remove_tag_suffix
tag_fixed = @tag if @tag
if tag_prefix_match and tag_suffix_match
Proc.new {|tag| "#{tag_prefix}#{rstrip.call(lstrip.call(tag_slice_proc.call(tag), tag_prefix_match), tag_suffix_match)}#{tag_suffix}" }
elsif tag_prefix_match
Proc.new {|tag| "#{tag_prefix}#{lstrip.call(tag_slice_proc.call(tag), tag_prefix_match)}#{tag_suffix}" }
elsif tag_suffix_match
Proc.new {|tag| "#{tag_prefix}#{rstrip.call(tag_slice_proc.call(tag), tag_suffix_match)}#{tag_suffix}" }
elsif tag_prefix || @remove_tag_slice || tag_suffix
Proc.new {|tag| "#{tag_prefix}#{tag_slice_proc.call(tag)}#{tag_suffix}" }
else
Proc.new {|tag| tag_fixed }
end
end
def report_time(msg = nil, &blk)
t = Time.now
output = yield
log.debug sprintf("out_stats: elapsed_time:%.2f thread_id:%s%s caller:%s", (Time.now - t).to_f, Thread.current.object_id, msg, caller()[0])
output
end
end
| 34.583541 | 149 | 0.61285 |
d5ba2079365af6be66c1ad9f5273fcbcf3f0f53f | 1,420 | $LOAD_PATH.unshift File.expand_path('../../lib', __FILE__)
require 'i18n/backend/side_by_side'
require 'minitest/autorun'
I18n.load_path = Dir['test/locales/*.yml']
I18n.backend = I18n::Backend::SideBySide.new
class Test < Minitest::Test
def test_oldschool
assert_equal('Still here', I18n.t('oldschool', locale: :en))
assert_equal('Immer noch hier', I18n.t('oldschool', locale: :de))
end
def test_simple
assert_equal('Hi', I18n.t('foo.bar', locale: :en))
assert_equal('Hallo', I18n.t('foo.bar', locale: :de))
end
def test_hash
assert_equal({ key: 'value' }, I18n.t('foo.hash', locale: :en))
assert_equal({ key: 'Wert' }, I18n.t('foo.hash', locale: :de))
end
def test_count
assert_equal('none', I18n.t('foo.count', count: 0, locale: :en))
assert_equal('keine', I18n.t('foo.count', count: 0, locale: :de))
end
def test_partially_missing
assert_equal('some value', I18n.t('foo.only_en.some_key', locale: :en))
assert_equal({ some_key: 'some value' }, I18n.t('foo.only_en', locale: :en))
assert_raises { I18n.t('foo.only_en.some_key', locale: :de, raise: true) }
assert_raises { I18n.t('foo.only_en', locale: :de, raise: true) }
end
def test_without_language
assert_equal('Without Language', I18n.t('foo.without_language', language: :en))
assert_equal('Without Language', I18n.t('foo.without_language', language: :de))
end
end
| 34.634146 | 83 | 0.672535 |
e8e95e98b92a97905f456b88f754c4b36ae92339 | 385 | namespace :event do
desc 'Delete events by subj_id'
task :detete_by_sub_id, [:subj_id] => :environment do |task, args|
logger = Logger.new(STDOUT)
events= Event.where(subj_id: args[:subj_id])
total = events.size
events.find_each do |event|
event.destroy
end
logger.info "[Event Data] Deleted #{total} event with subj_id #{args[:subj_id]}"
end
end
| 25.666667 | 84 | 0.675325 |
d53e136effb2df0532357c0158bc54a522c32a0d | 7,522 | # frozen_string_literal: true
module Ci
class CreatePipelineService < BaseService
attr_reader :pipeline, :logger
CreateError = Class.new(StandardError)
LOG_MAX_DURATION_THRESHOLD = 3.seconds
LOG_MAX_PIPELINE_SIZE = 2_000
LOG_MAX_CREATION_THRESHOLD = 20.seconds
SEQUENCE = [Gitlab::Ci::Pipeline::Chain::Build,
Gitlab::Ci::Pipeline::Chain::Build::Associations,
Gitlab::Ci::Pipeline::Chain::Validate::Abilities,
Gitlab::Ci::Pipeline::Chain::Validate::Repository,
Gitlab::Ci::Pipeline::Chain::Validate::SecurityOrchestrationPolicy,
Gitlab::Ci::Pipeline::Chain::Skip,
Gitlab::Ci::Pipeline::Chain::Config::Content,
Gitlab::Ci::Pipeline::Chain::Config::Process,
Gitlab::Ci::Pipeline::Chain::Validate::AfterConfig,
Gitlab::Ci::Pipeline::Chain::RemoveUnwantedChatJobs,
Gitlab::Ci::Pipeline::Chain::SeedBlock,
Gitlab::Ci::Pipeline::Chain::EvaluateWorkflowRules,
Gitlab::Ci::Pipeline::Chain::Seed,
Gitlab::Ci::Pipeline::Chain::Limit::Size,
Gitlab::Ci::Pipeline::Chain::Limit::Deployments,
Gitlab::Ci::Pipeline::Chain::Validate::External,
Gitlab::Ci::Pipeline::Chain::Populate,
Gitlab::Ci::Pipeline::Chain::StopDryRun,
Gitlab::Ci::Pipeline::Chain::EnsureEnvironments,
Gitlab::Ci::Pipeline::Chain::EnsureResourceGroups,
Gitlab::Ci::Pipeline::Chain::Create,
Gitlab::Ci::Pipeline::Chain::CreateDeployments,
Gitlab::Ci::Pipeline::Chain::CreateCrossDatabaseAssociations,
Gitlab::Ci::Pipeline::Chain::Limit::Activity,
Gitlab::Ci::Pipeline::Chain::Limit::JobActivity,
Gitlab::Ci::Pipeline::Chain::CancelPendingPipelines,
Gitlab::Ci::Pipeline::Chain::Metrics,
Gitlab::Ci::Pipeline::Chain::TemplateUsage,
Gitlab::Ci::Pipeline::Chain::Pipeline::Process].freeze
# Create a new pipeline in the specified project.
#
# @param [Symbol] source What event (Ci::Pipeline.sources) triggers the pipeline
# creation.
# @param [Boolean] ignore_skip_ci Whether skipping a pipeline creation when `[skip ci]` comment
# is present in the commit body
# @param [Boolean] save_on_errors Whether persisting an invalid pipeline when it encounters an
# error during creation (e.g. invalid yaml)
# @param [Ci::TriggerRequest] trigger_request The pipeline trigger triggers the pipeline creation.
# @param [Ci::PipelineSchedule] schedule The pipeline schedule triggers the pipeline creation.
# @param [MergeRequest] merge_request The merge request triggers the pipeline creation.
# @param [ExternalPullRequest] external_pull_request The external pull request triggers the pipeline creation.
# @param [Ci::Bridge] bridge The bridge job that triggers the downstream pipeline creation.
# @param [String] content The content of .gitlab-ci.yml to override the default config
# contents (e.g. .gitlab-ci.yml in repostiry). Mainly used for
# generating a dangling pipeline.
#
# @return [Ci::Pipeline] The created Ci::Pipeline object.
# rubocop: disable Metrics/ParameterLists
def execute(source, ignore_skip_ci: false, save_on_errors: true, trigger_request: nil, schedule: nil, merge_request: nil, external_pull_request: nil, bridge: nil, **options, &block)
@logger = build_logger
@pipeline = Ci::Pipeline.new
command = Gitlab::Ci::Pipeline::Chain::Command.new(
source: source,
origin_ref: params[:ref],
checkout_sha: params[:checkout_sha],
after_sha: params[:after],
before_sha: params[:before], # The base SHA of the source branch (i.e merge_request.diff_base_sha).
source_sha: params[:source_sha], # The HEAD SHA of the source branch (i.e merge_request.diff_head_sha).
target_sha: params[:target_sha], # The HEAD SHA of the target branch.
trigger_request: trigger_request,
schedule: schedule,
merge_request: merge_request,
external_pull_request: external_pull_request,
ignore_skip_ci: ignore_skip_ci,
save_incompleted: save_on_errors,
seeds_block: block,
variables_attributes: params[:variables_attributes],
project: project,
current_user: current_user,
push_options: params[:push_options] || {},
chat_data: params[:chat_data],
bridge: bridge,
logger: @logger,
**extra_options(**options))
# Ensure we never persist the pipeline when dry_run: true
@pipeline.readonly! if command.dry_run?
Gitlab::Ci::Pipeline::Chain::Sequence
.new(pipeline, command, SEQUENCE)
.build!
if pipeline.persisted?
Gitlab::EventStore.publish(
Ci::PipelineCreatedEvent.new(data: { pipeline_id: pipeline.id })
)
create_namespace_onboarding_action
else
# If pipeline is not persisted, try to recover IID
pipeline.reset_project_iid
end
if error_message = pipeline.full_error_messages.presence || pipeline.failure_reason.presence
ServiceResponse.error(message: error_message, payload: pipeline)
else
ServiceResponse.success(payload: pipeline)
end
ensure
@logger.commit(pipeline: pipeline, caller: self.class.name)
end
# rubocop: enable Metrics/ParameterLists
def execute!(*args, &block)
source = args[0]
params = Hash(args[1])
execute(source, **params, &block).tap do |response|
unless response.payload.persisted?
raise CreateError, pipeline.full_error_messages
end
end
end
private
def commit
@commit ||= project.commit(origin_sha || origin_ref)
end
def sha
commit.try(:id)
end
def create_namespace_onboarding_action
Namespaces::OnboardingPipelineCreatedWorker.perform_async(project.namespace_id)
end
def extra_options(content: nil, dry_run: false)
{ content: content, dry_run: dry_run }
end
def build_logger
Gitlab::Ci::Pipeline::Logger.new(project: project) do |l|
l.log_when do |observations|
observations.any? do |name, values|
values.any? &&
name.to_s.end_with?('duration_s') &&
values.max >= LOG_MAX_DURATION_THRESHOLD
end
end
l.log_when do |observations|
values = observations['pipeline_size_count']
next false if values.empty?
values.max >= LOG_MAX_PIPELINE_SIZE
end
l.log_when do |observations|
values = observations['pipeline_creation_duration_s']
next false if values.empty?
values.max >= LOG_MAX_CREATION_THRESHOLD
end
end
end
end
end
Ci::CreatePipelineService.prepend_mod_with('Ci::CreatePipelineService')
| 42.497175 | 185 | 0.614198 |
7aef80f59db7ee879116c0f901b12c036a879408 | 219 | ENV['SINATRA_ENV'] ||= "development"
require 'bundler/setup'
Bundler.require(:default, ENV['SINATRA_ENV'])
configure :development do
set :database, 'sqlite3:db/database.db'
end
require_all 'models'
require './app'
| 18.25 | 45 | 0.73516 |
4abf69bee4cf1be381ac38437aa134c156aee92b | 1,057 | # -*- ruby -*-
#encoding: utf-8
require 'sequel/model'
require 'thingfish/mixins'
require 'thingfish/metastore/pggraph' unless defined?( Thingfish::Metastore::PgGraph )
### A row representing a relationship between two node objects.
###
class Thingfish::Metastore::PgGraph::Edge < Sequel::Model( :edges )
# Related resource associations
many_to_one :node, :key => :id_p
# Dataset methods
#
dataset_module do
#########
protected
#########
### Returns a Sequel expression suitable for use as the key of a query against
### the specified property field.
###
def prop_expr( field )
return Sequel.pg_jsonb( :prop ).get_text( field.to_s )
end
end
### Do some initial attribute setup for new objects.
###
def initialize( * )
super
self[ :prop ] ||= Sequel.pg_jsonb({})
end
#########
protected
#########
### Proxy method -- fetch a value from the edge property hash if it exists.
###
def method_missing( sym, *args, &block )
return self.prop[ sym.to_s ] || super
end
end # Thingfish::Metastore::PgGraph::Edge
| 19.943396 | 86 | 0.663198 |
031aa9c77fd7ed44a8a765bdc1c9cd563fc75c41 | 3,583 | Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Mount Action Cable outside main process or domain
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
config.log_tags = [ :request_id ]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment)
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "sample_app_#{Rails.env}"
config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
end
| 41.183908 | 102 | 0.758861 |
627c796a0049305ac7923495ad2de65df9375265 | 1,444 | # frozen_string_literal: true
# This migration comes from active_storage (originally 20170806125915)
class CreateActiveStorageTables < ActiveRecord::Migration[5.2]
def change
create_table :active_storage_blobs do |t|
t.string :key, null: false
t.string :filename, null: false
t.string :content_type
t.text :metadata
t.string :service_name, null: false
t.bigint :byte_size, null: false
t.string :checksum, null: false
t.datetime :created_at, null: false
t.index [:key], unique: true
end
create_table :active_storage_attachments do |t|
t.string :name, null: false
t.references :record, null: false, polymorphic: true, index: false
t.references :blob, null: false
t.datetime :created_at, null: false
t.index [:record_type, :record_id, :name, :blob_id],
name: "index_active_storage_attachments_uniqueness",
unique: true
t.foreign_key :active_storage_blobs, column: :blob_id
end
create_table :active_storage_variant_records do |t|
t.belongs_to :blob, null: false, index: false
t.string :variation_digest, null: false
t.index [:blob_id, :variation_digest],
name: "index_active_storage_variant_records_uniqueness",
unique: true
t.foreign_key :active_storage_blobs, column: :blob_id
end
end
end
| 33.581395 | 74 | 0.654432 |
ede52e98ecdba8869063d3376e4c9cc3cddf2565 | 781 | class Locomotive::Translation
include Locomotive::Mongoid::Document
## fields ##
field :key
field :values, type: Hash, default: {}
## associations ##
belongs_to :site, class_name: 'Locomotive::Site'
## validations ##
validates_uniqueness_of :key, scope: :site_id
validates_presence_of :site, :key
## scopes ##
scope :ordered, order_by(key: :asc)
## callbacks ##
before_validation :underscore_key
before_validation :remove_blanks
## methods ##
protected
# Make sure the translation key is underscored
# since it is the unique way to use it in a liquid template.
#
def underscore_key
if self.key
self.key = self.key.permalink(true)
end
end
def remove_blanks
self.values.delete_if { |k,v| v.blank? }
end
end | 19.525 | 62 | 0.683739 |
2108e8a6ef403fa7259b245ca349a47ca44170ac | 136 | # frozen_string_literal: true
ActiveModelSerializers.config.adapter = :json
ActiveModelSerializers.config.key_transform = :camel_lower
| 27.2 | 58 | 0.852941 |
1a3732bbdf9c90f112835d1e82adf91954b379da | 1,125 | # frozen_string_literal: true
class MergeRequest::MetricsFinder
include Gitlab::Allowable
def initialize(current_user, params = {})
@current_user = current_user
@params = params
end
def execute
return klass.none if target_project.blank? || user_not_authorized?
items = init_collection
items = by_target_project(items)
items = by_merged_after(items)
by_merged_before(items)
end
private
attr_reader :current_user, :params
def by_target_project(items)
items.by_target_project(target_project)
end
def by_merged_after(items)
return items unless merged_after
items.merged_after(merged_after)
end
def by_merged_before(items)
return items unless merged_before
items.merged_before(merged_before)
end
def user_not_authorized?
!can?(current_user, :read_merge_request, target_project)
end
def init_collection
klass.all
end
def klass
MergeRequest::Metrics
end
def target_project
params[:target_project]
end
def merged_after
params[:merged_after]
end
def merged_before
params[:merged_before]
end
end
| 17.578125 | 70 | 0.736 |
336470a09f3dcce18ff0679898f0d84451f09bd1 | 1,209 | # frozen_string_literal: true
require_dependency "renalware/hd"
module Renalware
module HD
class Patient < ActiveType::Record[Renalware::Patient]
has_one :hd_profile, class_name: "Profile"
has_one :hd_preference_set, class_name: "PreferenceSet"
has_many :hd_sessions, class_name: "Session"
has_many :prescription_administrations
scope :with_profile, lambda {
includes(hd_profile: :hospital_unit)
.joins(<<-SQL.squish)
LEFT OUTER JOIN hd_profiles ON hd_profiles.patient_id = patients.id
LEFT OUTER JOIN hospital_units ON hd_profiles.hospital_unit_id = hospital_units.id
SQL
.where("hd_profiles.deactivated_at is NULL")
}
def treated?
modality_descriptions.exists?(type: "Renalware::HD::ModalityDescription")
end
def has_ever_been_on_hd?
@has_ever_been_on_hd ||=
modality_descriptions.exists?(type: "Renalware::HD::ModalityDescription")
end
def current_modality_hd?
return false if current_modality.blank?
current_modality.description.is_a?(HD::ModalityDescription)
end
def __getobj__
self
end
end
end
end
| 28.785714 | 92 | 0.685691 |
e821905f31e5ad7f35680b76a3b475a5a9428a7d | 1,249 | require 'cucumber'
require 'flatware/cucumber/formatter'
require 'flatware/cucumber/result'
require 'flatware/cucumber/step_result'
require 'flatware/cucumber/formatters/console'
require 'flatware/cucumber/cli'
module Flatware
module Cucumber
class Config
attr_reader :config, :args
def initialize(cucumber_config, args)
@config = cucumber_config
@args = args
end
def feature_dir
@config.feature_dirs.first
end
def jobs
feature_files.map { |file| Job.new file, args }.to_a
end
private
def feature_files
config.feature_files - config.feature_dirs
end
end
module_function
def configure(args, out_stream = $stdout, error_stream = $stderr)
raw_args = args.dup
cli_config = ::Cucumber::Cli::Configuration.new(out_stream, error_stream)
cli_config.parse! args + %w[--format Flatware::Cucumber::Formatter]
cucumber_config = ::Cucumber::Configuration.new cli_config
Config.new cucumber_config, raw_args
end
def run(feature_files, options)
runtime(Array(feature_files) + options).run!
end
def runtime(args)
::Cucumber::Runtime.new(configure(args).config)
end
end
end
| 24.019231 | 79 | 0.682146 |
e85b0168d2d606191f6281ed8c6f06de8ddf2a48 | 331 | # frozen_string_literal: true
module Settings
# Security rules plan export settings
class PlanPolicy < ApplicationPolicy
# NOTE: @user is the signed_in_user and @record is an instance of Plan
def show?
@record.readable_by(@user.id)
end
def update?
@record.editable_by(@user.id)
end
end
end
| 19.470588 | 74 | 0.694864 |
1a3804043db1af689a07e60c69d78a68e84fce4f | 16,994 | require 'test/unit'
require 'rubygems'
gem 'activerecord', '>= 3.0.0'
require 'active_record'
require 'ruby-debug'
require File.join(File.dirname(__FILE__), '../lib/has_features')
require File.join(File.dirname(__FILE__), 'schema')
class FeaturedTest < Test::Unit::TestCase
def setup
setup_db
(1..4).each { |counter| FeaturedMixin.create! :pos => counter, :parent_id => 5 }
end
def teardown
teardown_db
end
def test_reordering
assert_equal [1, 2, 3, 4], FeaturedMixin.where(:parent_id => 5).order('pos').map(&:id)
FeaturedMixin.find(2).move_lower
assert_equal [1, 3, 2, 4], FeaturedMixin.where(:parent_id => 5).order('pos').map(&:id)
FeaturedMixin.find(2).move_higher
assert_equal [1, 2, 3, 4], FeaturedMixin.where(:parent_id => 5).order('pos').map(&:id)
FeaturedMixin.find(1).move_to_bottom
assert_equal [2, 3, 4, 1], FeaturedMixin.where(:parent_id => 5).order('pos').map(&:id)
FeaturedMixin.find(1).move_to_top
assert_equal [1, 2, 3, 4], FeaturedMixin.where(:parent_id => 5).order('pos').map(&:id)
FeaturedMixin.find(2).move_to_bottom
assert_equal [1, 3, 4, 2], FeaturedMixin.where(:parent_id => 5).order('pos').map(&:id)
FeaturedMixin.find(4).move_to_top
assert_equal [4, 1, 3, 2], FeaturedMixin.where(:parent_id => 5).order('pos').map(&:id)
end
def test_move_to_bottom_with_next_to_last_item
assert_equal [1, 2, 3, 4], FeaturedMixin.where(:parent_id => 5).order('pos').map(&:id)
FeaturedMixin.find(3).move_to_bottom
assert_equal [1, 2, 4, 3], FeaturedMixin.where(:parent_id => 5).order('pos').map(&:id)
end
def test_next_prev
assert_equal FeaturedMixin.find(2), FeaturedMixin.find(1).lower_item
assert_nil FeaturedMixin.find(1).higher_item
assert_equal FeaturedMixin.find(3), FeaturedMixin.find(4).higher_item
assert_nil FeaturedMixin.find(4).lower_item
end
def test_injection
item = FeaturedMixin.new(:parent_id => 1)
assert_equal '"mixins"."parent_id" = 1', item.scope_condition
assert_equal "pos", item.featured_position_column
end
def test_insert
new = FeaturedMixin.create(:parent_id => 20)
assert_equal nil, new.pos
assert !new.first?
assert !new.last?
end
def test_featuring
new = FeaturedMixin.create(:parent_id => 20)
new.featured = true
assert_equal 1, new.pos
assert new.featured?
assert new.featured
assert new.first?
assert new.last?
new = FeaturedMixin.create(:parent_id => 20)
new.featured = true
assert_equal 2, new.pos
assert !new.first?
assert new.last?
new = FeaturedMixin.create(:parent_id => 20)
new.featured = true
assert_equal 3, new.pos
assert !new.first?
assert new.last?
new = FeaturedMixin.create(:parent_id => 0)
new.featured = true
assert_equal 1, new.pos
assert new.first?
assert new.last?
end
def test_unfeaturing
new = FeaturedMixin.create(:parent_id => 20)
new.featured = true
assert_equal 1, new.pos
new.featured = false
assert_nil new.pos
end
def test_feature_at
new = FeaturedMixin.create(:parent_id => 20)
new.featured = true
assert_equal 1, new.pos
new = FeaturedMixin.create(:parent_id => 20)
new.featured = true
assert_equal 2, new.pos
new = FeaturedMixin.create(:parent_id => 20)
new.featured = true
assert_equal 3, new.pos
new4 = FeaturedMixin.create(:parent_id => 20)
new4.featured = true
assert_equal 4, new4.pos
new4.feature_at(3)
assert_equal 3, new4.pos
new.reload
assert_equal 4, new.pos
new.feature_at(2)
assert_equal 2, new.pos
new4.reload
assert_equal 4, new4.pos
new5 = FeaturedMixin.create(:parent_id => 20)
new5.featured = true
assert_equal 5, new5.pos
new5.feature_at(1)
assert_equal 1, new5.pos
new4.reload
assert_equal 5, new4.pos
end
def test_delete_middle
assert_equal [1, 2, 3, 4], FeaturedMixin.where(:parent_id => 5).order('pos').map(&:id)
FeaturedMixin.find(2).destroy
assert_equal [1, 3, 4], FeaturedMixin.where(:parent_id => 5).order('pos').map(&:id)
assert_equal 1, FeaturedMixin.find(1).pos
assert_equal 2, FeaturedMixin.find(3).pos
assert_equal 3, FeaturedMixin.find(4).pos
FeaturedMixin.find(1).destroy
assert_equal [3, 4], FeaturedMixin.where(:parent_id => 5).order('pos').map(&:id)
assert_equal 1, FeaturedMixin.find(3).pos
assert_equal 2, FeaturedMixin.find(4).pos
end
def test_with_string_based_scope
new = FeaturedWithStringScopeMixin.create(:parent_id => 500)
new.featured = true
assert_equal 1, new.pos
assert new.first?
assert new.last?
end
def test_nil_scope
new1, new2, new3 = FeaturedMixin.create, FeaturedMixin.create, FeaturedMixin.create
new1.featured = true
new2.featured = true
new3.featured = true
new2.move_higher
assert_equal [new2, new1, new3], FeaturedMixin.where('parent_id IS NULL').order('pos')
end
def test_unfeature_should_then_fail_in_list?
assert_equal true, FeaturedMixin.find(1).in_list?
FeaturedMixin.find(1).unfeature
assert_equal false, FeaturedMixin.find(1).in_list?
end
def test_unfeature_should_set_position_to_nil
assert_equal [1, 2, 3, 4], FeaturedMixin.where(:parent_id => 5).order('pos').map(&:id)
FeaturedMixin.find(2).unfeature
assert_equal [2, 1, 3, 4], FeaturedMixin.where(:parent_id => 5).order('pos').map(&:id)
assert_equal 1, FeaturedMixin.find(1).pos
assert_equal nil, FeaturedMixin.find(2).pos
assert_equal 2, FeaturedMixin.find(3).pos
assert_equal 3, FeaturedMixin.find(4).pos
end
def test_remove_before_destroy_does_not_shift_lower_items_twice
assert_equal [1, 2, 3, 4], FeaturedMixin.where(:parent_id => 5).order('pos').map(&:id)
FeaturedMixin.find(2).unfeature
FeaturedMixin.find(2).destroy
assert_equal [1, 3, 4], FeaturedMixin.where(:parent_id => 5).order('pos').map(&:id)
assert_equal 1, FeaturedMixin.find(1).pos
assert_equal 2, FeaturedMixin.find(3).pos
assert_equal 3, FeaturedMixin.find(4).pos
end
def test_before_destroy_callbacks_do_not_update_position_to_nil_before_deleting_the_record
assert_equal [1, 2, 3, 4], FeaturedMixin.where(:parent_id => 5).order('pos').map(&:id)
# We need to trigger all the before_destroy callbacks without actually
# destroying the record so we can see the affect the callbacks have on
# the record.
list = FeaturedMixin.find(2)
if list.respond_to?(:run_callbacks)
list.run_callbacks(:destroy)
else
list.send(:callback, :before_destroy)
end
assert_equal [1, 2, 3, 4], FeaturedMixin.where(:parent_id => 5).order('pos').map(&:id)
assert_equal 1, FeaturedMixin.find(1).pos
assert_equal 2, FeaturedMixin.find(2).pos
assert_equal 2, FeaturedMixin.find(3).pos
assert_equal 3, FeaturedMixin.find(4).pos
end
end
class FeaturedSubTest < Test::Unit::TestCase
def setup
setup_db
(1..4).each { |i| ((i % 2 == 1) ? FeaturedMixinSub1 : FeaturedMixinSub2).create! :pos => i, :parent_id => 5000 }
end
def teardown
teardown_db
end
def test_reordering
assert_equal [1, 2, 3, 4], FeaturedMixin.where(:parent_id => 5000).order('pos').map(&:id)
FeaturedMixin.find(2).move_lower
assert_equal [1, 3, 2, 4], FeaturedMixin.where(:parent_id => 5000).order('pos').map(&:id)
FeaturedMixin.find(2).move_higher
assert_equal [1, 2, 3, 4], FeaturedMixin.where(:parent_id => 5000).order('pos').map(&:id)
FeaturedMixin.find(1).move_to_bottom
assert_equal [2, 3, 4, 1], FeaturedMixin.where(:parent_id => 5000).order('pos').map(&:id)
FeaturedMixin.find(1).move_to_top
assert_equal [1, 2, 3, 4], FeaturedMixin.where(:parent_id => 5000).order('pos').map(&:id)
FeaturedMixin.find(2).move_to_bottom
assert_equal [1, 3, 4, 2], FeaturedMixin.where(:parent_id => 5000).order('pos').map(&:id)
FeaturedMixin.find(4).move_to_top
assert_equal [4, 1, 3, 2], FeaturedMixin.where(:parent_id => 5000).order('pos').map(&:id)
end
def test_move_to_bottom_with_next_to_last_item
assert_equal [1, 2, 3, 4], FeaturedMixin.where(:parent_id => 5000).order('pos').map(&:id)
FeaturedMixin.find(3).move_to_bottom
assert_equal [1, 2, 4, 3], FeaturedMixin.where(:parent_id => 5000).order('pos').map(&:id)
end
def test_next_prev
assert_equal FeaturedMixin.find(2), FeaturedMixin.find(1).lower_item
assert_nil FeaturedMixin.find(1).higher_item
assert_equal FeaturedMixin.find(3), FeaturedMixin.find(4).higher_item
assert_nil FeaturedMixin.find(4).lower_item
end
def test_injection
item = FeaturedMixin.new("parent_id"=>1)
assert_equal '"mixins"."parent_id" = 1', item.scope_condition
assert_equal "pos", item.featured_position_column
end
def test_feature_at
new = FeaturedMixin.create("parent_id" => 20)
new.featured = true
assert_equal 1, new.pos
new = FeaturedMixinSub1.create("parent_id" => 20)
new.featured = true
assert_equal 2, new.pos
new = FeaturedMixinSub2.create("parent_id" => 20)
new.featured = true
assert_equal 3, new.pos
new4 = FeaturedMixin.create("parent_id" => 20)
new4.featured = true
assert_equal 4, new4.pos
new4.feature_at(3)
assert_equal 3, new4.pos
new.reload
assert_equal 4, new.pos
new.feature_at(2)
assert_equal 2, new.pos
new4.reload
assert_equal 4, new4.pos
new5 = FeaturedMixinSub1.create("parent_id" => 20)
new5.featured = true
assert_equal 5, new5.pos
new5.feature_at(1)
assert_equal 1, new5.pos
new4.reload
assert_equal 5, new4.pos
end
def test_delete_middle
assert_equal [1, 2, 3, 4], FeaturedMixin.where(:parent_id => 5000).order('pos').map(&:id)
FeaturedMixin.find(2).destroy
assert_equal [1, 3, 4], FeaturedMixin.where(:parent_id => 5000).order('pos').map(&:id)
assert_equal 1, FeaturedMixin.find(1).pos
assert_equal 2, FeaturedMixin.find(3).pos
assert_equal 3, FeaturedMixin.find(4).pos
FeaturedMixin.find(1).destroy
assert_equal [3, 4], FeaturedMixin.where(:parent_id => 5000).order('pos').map(&:id)
assert_equal 1, FeaturedMixin.find(3).pos
assert_equal 2, FeaturedMixin.find(4).pos
end
end
class ArrayScopeFeaturedTest < Test::Unit::TestCase
def setup
setup_db
(1..4).each { |counter| ArrayScopeFeaturedMixin.create! :pos => counter, :parent_id => 5, :parent_type => 'ParentClass' }
end
def teardown
teardown_db
end
def test_reordering
assert_equal [1, 2, 3, 4], ArrayScopeFeaturedMixin.where(:parent_id => 5, :parent_type => 'ParentClass').order('pos').map(&:id)
ArrayScopeFeaturedMixin.find(2).move_lower
assert_equal [1, 3, 2, 4], ArrayScopeFeaturedMixin.where(:parent_id => 5, :parent_type => 'ParentClass').order('pos').map(&:id)
ArrayScopeFeaturedMixin.find(2).move_higher
assert_equal [1, 2, 3, 4], ArrayScopeFeaturedMixin.where(:parent_id => 5, :parent_type => 'ParentClass').order('pos').map(&:id)
ArrayScopeFeaturedMixin.find(1).move_to_bottom
assert_equal [2, 3, 4, 1], ArrayScopeFeaturedMixin.where(:parent_id => 5, :parent_type => 'ParentClass').order('pos').map(&:id)
ArrayScopeFeaturedMixin.find(1).move_to_top
assert_equal [1, 2, 3, 4], ArrayScopeFeaturedMixin.where(:parent_id => 5, :parent_type => 'ParentClass').order('pos').map(&:id)
ArrayScopeFeaturedMixin.find(2).move_to_bottom
assert_equal [1, 3, 4, 2], ArrayScopeFeaturedMixin.where(:parent_id => 5, :parent_type => 'ParentClass').order('pos').map(&:id)
ArrayScopeFeaturedMixin.find(4).move_to_top
assert_equal [4, 1, 3, 2], ArrayScopeFeaturedMixin.where(:parent_id => 5, :parent_type => 'ParentClass').order('pos').map(&:id)
end
def test_move_to_bottom_with_next_to_last_item
assert_equal [1, 2, 3, 4], ArrayScopeFeaturedMixin.where(:parent_id => 5, :parent_type => 'ParentClass').order('pos').map(&:id)
ArrayScopeFeaturedMixin.find(3).move_to_bottom
assert_equal [1, 2, 4, 3], ArrayScopeFeaturedMixin.where(:parent_id => 5, :parent_type => 'ParentClass').order('pos').map(&:id)
end
def test_next_prev
assert_equal ArrayScopeFeaturedMixin.find(2), ArrayScopeFeaturedMixin.find(1).lower_item
assert_nil ArrayScopeFeaturedMixin.find(1).higher_item
assert_equal ArrayScopeFeaturedMixin.find(3), ArrayScopeFeaturedMixin.find(4).higher_item
assert_nil ArrayScopeFeaturedMixin.find(4).lower_item
end
def test_injection
item = ArrayScopeFeaturedMixin.new(:parent_id => 1, :parent_type => 'ParentClass')
assert_equal '"mixins"."parent_id" = 1 AND "mixins"."parent_type" = \'ParentClass\'', item.scope_condition
assert_equal "pos", item.featured_position_column
end
def test_insert
ArrayScopeFeaturedMixin.destroy_all
new = ArrayScopeFeaturedMixin.create(:parent_id => 20, :parent_type => 'ParentClass')
new.featured = true
assert_equal 1, new.pos
assert new.first?
assert new.last?
new = ArrayScopeFeaturedMixin.create(:parent_id => 20, :parent_type => 'ParentClass')
new.featured = true
assert_equal 2, new.pos
assert !new.first?
assert new.last?
new = ArrayScopeFeaturedMixin.create(:parent_id => 20, :parent_type => 'ParentClass')
new.featured = true
assert_equal 3, new.pos
assert !new.first?
assert new.last?
new = ArrayScopeFeaturedMixin.create(:parent_id => 0, :parent_type => 'ParentClass')
new.featured = true
assert_equal 1, new.pos
assert new.first?
assert new.last?
end
def test_feature_at
new = ArrayScopeFeaturedMixin.create(:parent_id => 20, :parent_type => 'ParentClass')
new.featured = true
assert new.featured?
assert new.featured
assert_equal 1, new.pos
new = ArrayScopeFeaturedMixin.create(:parent_id => 20, :parent_type => 'ParentClass')
new.featured = true
assert_equal 2, new.pos
new = ArrayScopeFeaturedMixin.create(:parent_id => 20, :parent_type => 'ParentClass')
new.featured = true
assert_equal 3, new.pos
new4 = ArrayScopeFeaturedMixin.create(:parent_id => 20, :parent_type => 'ParentClass')
new4.featured = true
assert_equal 4, new4.pos
new4.feature_at(3)
assert_equal 3, new4.pos
new.reload
assert_equal 4, new.pos
new.feature_at(2)
assert_equal 2, new.pos
new4.reload
assert_equal 4, new4.pos
new5 = ArrayScopeFeaturedMixin.create(:parent_id => 20, :parent_type => 'ParentClass')
new5.featured = true
assert_equal 5, new5.pos
new5.feature_at(1)
assert_equal 1, new5.pos
new4.reload
assert_equal 5, new4.pos
end
def test_delete_middle
assert_equal [1, 2, 3, 4], ArrayScopeFeaturedMixin.where(:parent_id => 5, :parent_type => 'ParentClass').order('pos').map(&:id)
ArrayScopeFeaturedMixin.find(2).destroy
assert_equal [1, 3, 4], ArrayScopeFeaturedMixin.where(:parent_id => 5, :parent_type => 'ParentClass').order('pos').map(&:id)
assert_equal 1, ArrayScopeFeaturedMixin.find(1).pos
assert_equal 2, ArrayScopeFeaturedMixin.find(3).pos
assert_equal 3, ArrayScopeFeaturedMixin.find(4).pos
ArrayScopeFeaturedMixin.find(1).destroy
assert_equal [3, 4], ArrayScopeFeaturedMixin.where(:parent_id => 5, :parent_type => 'ParentClass').order('pos').map(&:id)
assert_equal 1, ArrayScopeFeaturedMixin.find(3).pos
assert_equal 2, ArrayScopeFeaturedMixin.find(4).pos
end
def test_unfeature_should_then_fail_in_list?
assert_equal true, ArrayScopeFeaturedMixin.find(1).in_list?
ArrayScopeFeaturedMixin.find(1).unfeature
assert_equal false, ArrayScopeFeaturedMixin.find(1).in_list?
end
def test_unfeature_should_set_position_to_nil
assert_equal [1, 2, 3, 4], ArrayScopeFeaturedMixin.where(:parent_id => 5, :parent_type => 'ParentClass').order('pos').map(&:id)
ArrayScopeFeaturedMixin.find(2).unfeature
assert_equal [2, 1, 3, 4], ArrayScopeFeaturedMixin.where(:parent_id => 5, :parent_type => 'ParentClass').order('pos').map(&:id)
assert_equal 1, ArrayScopeFeaturedMixin.find(1).pos
assert_equal nil, ArrayScopeFeaturedMixin.find(2).pos
assert_equal 2, ArrayScopeFeaturedMixin.find(3).pos
assert_equal 3, ArrayScopeFeaturedMixin.find(4).pos
end
def test_remove_before_destroy_does_not_shift_lower_items_twice
assert_equal [1, 2, 3, 4], ArrayScopeFeaturedMixin.where(:parent_id => 5, :parent_type => 'ParentClass').order('pos').map(&:id)
ArrayScopeFeaturedMixin.find(2).unfeature
ArrayScopeFeaturedMixin.find(2).destroy
assert_equal [1, 3, 4], ArrayScopeFeaturedMixin.where(:parent_id => 5, :parent_type => 'ParentClass').order('pos').map(&:id)
assert_equal 1, ArrayScopeFeaturedMixin.find(1).pos
assert_equal 2, ArrayScopeFeaturedMixin.find(3).pos
assert_equal 3, ArrayScopeFeaturedMixin.find(4).pos
end
end
| 32.431298 | 131 | 0.699011 |
e85751d5c36126b604a2be5633b2136140d24dda | 1,560 | Supply::Application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# The test environment is used exclusively to run your application's
# test suite. You never need to work with it otherwise. Remember that
# your test database is "scratch space" for the test suite and is wiped
# and recreated between test runs. Don't rely on the data there!
config.cache_classes = true
# Do not eager load code on boot. This avoids loading your whole application
# just for the purpose of running a single test. If you are using a tool that
# preloads Rails for running tests, you may have to set it to true.
config.eager_load = false
# Configure static asset server for tests with Cache-Control for performance.
config.serve_static_assets = true
config.static_cache_control = "public, max-age=3600"
# Show full error reports and disable caching.
config.consider_all_requests_local = true
config.action_controller.perform_caching = false
# Raise exceptions instead of rendering exception templates.
config.action_dispatch.show_exceptions = false
# Disable request forgery protection in test environment.
config.action_controller.allow_forgery_protection = false
# Tell Action Mailer not to deliver emails to the real world.
# The :test delivery method accumulates sent emails in the
# ActionMailer::Base.deliveries array.
config.action_mailer.delivery_method = :test
# Print deprecation notices to the stderr.
config.active_support.deprecation = :stderr
end
| 42.162162 | 85 | 0.774359 |
870bb157fd84cc3e27035d81dd5c30bed5eebb9e | 2,022 | $:.push File.expand_path('../lib', __FILE__)
# Maintain your gem's version:
require 'voluntary_survey/version'
# Describe your gem and declare its dependencies:
Gem::Specification.new do |s|
s.name = 'voluntary_survey'
s.version = VoluntarySurvey::VERSION
s.authors = ['Mathias Gawlista']
s.email = ['[email protected]']
s.homepage = 'http://Volontari.at'
s.summary = 'Plugin about surveys for crowdsourcing management system voluntary.software.'
s.description = 'Surveys plugin for #crowdsourcing gem voluntary: http://bit.ly/vsy-0-1-1'
s.license = 'MIT'
s.files = Dir['{app,config,db,lib}/**/*', 'MIT-LICENSE', 'Rakefile', 'README.rdoc']
s.add_dependency 'voluntary', '~> 0.6'
s.add_dependency 'voluntary-ember_js', '~> 0.1.0'
# group :development
s.add_development_dependency 'letter_opener', '~> 1.0.0'
# for tracing AR object instantiation and memory usage per request
s.add_development_dependency 'oink', '~> 0.10.1'
# group :development, :test
s.add_development_dependency 'awesome_print', '~> 1.1.0'
s.add_development_dependency 'rspec-rails', '~> 2.0'
# group :test
s.add_development_dependency 'capybara', '~> 2.4.4'
s.add_development_dependency 'capybara-webkit', '~> 1.6.0'
s.add_development_dependency 'cucumber-rails-training-wheels', '~> 1.0.0'
s.add_development_dependency 'timecop', '~> 0.6.1'
s.add_development_dependency 'factory_girl_rails', '~> 1.7.0'
s.add_development_dependency 'fixture_builder', '~> 0.3.3'
s.add_development_dependency 'selenium-webdriver', '~> 2.22.1'
s.add_development_dependency 'spork', '~> 1.0rc2'
s.add_development_dependency 'guard-rspec', '~> 3.0.2'
s.add_development_dependency 'guard-spork', '~> 1.5.1'
s.add_development_dependency 'guard-cucumber', '~> 1.4.0'
s.add_development_dependency 'launchy', '~> 2.1.2'
# group :cucumber, :test
s.add_development_dependency 'database_cleaner', '~> 0.7.1'
s.add_development_dependency 'test-unit', '~> 3.0.9'
end
| 39.647059 | 96 | 0.696835 |
e8a693d0c175ce8b1c5f6d7ea99de42fc90a9770 | 1,483 | require "spec_helper"
require "shamu/attributes"
describe Shamu::Attributes::Validation do
let( :klass ) do
Class.new do
include Shamu::Attributes
include Shamu::Attributes::Assignment
include Shamu::Attributes::Validation
attribute :name, presence: true
def self.name
"Example"
end
end
end
it "forwards unused options to .validates method" do
TestClass = Class.new do
include Shamu::Attributes
include Shamu::Attributes::Validation
end
expect( TestClass ).to receive( :validates ).with( :name, presence: true )
class TestClass
attribute :name, on: :user, presence: true
end
end
it "doesn't clear errors on call to valid?" do
instance = klass.new( {} )
instance.validate
expect( instance ).not_to be_valid
instance.name = "Something"
expect( instance ).not_to be_valid
end
it "validates methods on validate" do
instance = klass.new( {} )
instance.validate
expect( instance.errors ).to have_key :name
end
it "validates on first call to valid?" do
instance = klass.new( {} )
expect( instance ).to receive( :validate ).once.and_call_original
instance.valid?
instance.valid?
end
it "supports shamu validators with simple hash names" do
nested = Class.new( klass ) do
attribute :nested, valid: true
end
expect( nested.validators ).to include Shamu::Attributes::Validators::ValidValidator
end
end
| 23.919355 | 88 | 0.67296 |
615670ec9876c54868f7ebae094092fedeb57cb8 | 501 | module Lobbyist
module V2
class CustomInvitation < Lobbyist::V2::Base
attr_accessor :contact_id, :company_id, :first_name, :last_name, :phone_daytime, :email
attr_accessor :city, :state, :last_service_date, :created_at, :updated_at, :abbreviated_name
attr_accessor :review_id, :review_status
def self.list(company_id, params = {})
create_collection_from_response(get("/v2/companies/#{company_id}/custom_invitations.json", params))
end
end
end
end
| 29.470588 | 107 | 0.716567 |
ffd39f91d6dca3d596cbc7da125b188c90ce0968 | 888 | # frozen_string_literal: true
module ElasticAPM
RSpec.describe Util do
describe '.nearest_minute', :mock_time do
it 'normalizes to nearest minute' do
travel 125_000 # two minutes five secs
expect(Util.nearest_minute).to eq Time.utc(1992, 1, 1, 0, 2)
end
end
describe '.ms', mock_time: true do
it 'returns current µs since unix epoch' do
expect(Util.micros).to eq 694_224_000_000_000
end
end
describe '.truncate' do
it 'returns nil on nil' do
expect(Util.truncate(nil)).to be nil
end
it 'return string if shorter than max' do
expect(Util.truncate('poof')).to eq 'poof'
end
it 'returns a truncated string' do
result = Util.truncate('X' * 2000)
expect(result).to match(/\AX{1023}…\z/)
expect(result.length).to be 1024
end
end
end
end
| 25.371429 | 68 | 0.622748 |
189dd536fd6237e420cf37cf612719b8fa5acd87 | 8,955 | # frozen_string_literal: true
RSpec.shared_context 'clean-up builder' do
after(:all) do
RgGen.disable_all
end
end
RSpec.shared_context 'configuration common' do
class ConfigurationDummyLoader < RgGen::Core::Configuration::Loader
class << self
attr_accessor :values
attr_accessor :data_block
end
def support?(_file)
true
end
def load_file(_file, input_data, _valid_value_lists)
self.class.values.size.positive? &&
input_data.values(self.class.values)
self.class.data_block &&
input_data.__send__(:build_by_block, &self.class.data_block)
end
end
def build_configuration_factory(builder, enable_dummy_loader = true)
factory = builder.build_factory(:input, :configuration)
factory.loaders([ConfigurationDummyLoader.new([], {})]) if enable_dummy_loader
factory
end
def create_configuration(**values, &data_block)
ConfigurationDummyLoader.values = values
ConfigurationDummyLoader.data_block = data_block || proc {}
@configuration_factory[0] ||= build_configuration_factory(RgGen.builder)
@configuration_factory[0].create([''])
end
def raise_configuration_error(message, position = nil)
raise_rggen_error(RgGen::Core::Configuration::ConfigurationError, message, position)
end
def delete_configuration_factory
@configuration_factory.clear
end
before(:all) do
@configuration_factory ||= []
end
end
RSpec.shared_context 'register map common' do
include_context 'configuration common'
let(:default_configuration) do
create_configuration
end
class RegisterMapDummyLoader < RgGen::Core::RegisterMap::Loader
class << self
attr_accessor :data_block
end
def support?(_file)
true
end
def load_file(_file, input_data, _valid_value_lists)
input_data.__send__(:build_by_block, &self.class.data_block)
end
end
def build_register_map_factory(builder, enable_dummy_loader = true)
factory = builder.build_factory(:input, :register_map)
factory.loaders([RegisterMapDummyLoader.new([], {})]) if enable_dummy_loader
factory
end
def create_register_map(configuration = nil, &data_block)
RegisterMapDummyLoader.data_block = data_block || proc {}
@register_map_factory[0] ||= build_register_map_factory(RgGen.builder)
@register_map_factory[0].create(configuration || default_configuration, [''])
end
def raise_register_map_error(message = nil, position = nil)
raise_rggen_error(RgGen::Core::RegisterMap::RegisterMapError, message, position)
end
def delete_register_map_factory
@register_map_factory.clear
end
def match_access(access)
have_properties [
[:readable?, [:read_write, :read_only].include?(access)],
[:writable?, [:read_write, :write_only].include?(access)],
[:read_only?, access == :read_only],
[:write_only?, access == :write_only],
[:reserved?, access == :reserved]
]
end
before(:all) do
@register_map_factory ||= []
end
before(:all) do
RgGen::Core::RegisterMap::ComponentFactory.disable_no_children_error
end
after(:all) do
RgGen::Core::RegisterMap::ComponentFactory.enable_no_children_error
end
end
RSpec.shared_context 'sv rtl common' do
include_context 'configuration common'
include_context 'register map common'
def build_sv_rtl_factory(builder)
builder.build_factory(:output, :sv_rtl)
end
def create_sv_rtl(configuration = nil, &data_block)
register_map = create_register_map(configuration) do
register_block(&data_block)
end
@sv_rtl_factory[0] ||= build_sv_rtl_factory(RgGen.builder)
@sv_rtl_factory[0].create(configuration || default_configuration, register_map)
end
def delete_sv_rtl_factory
@sv_rtl_factory.clear
end
def have_port(*args, &body)
layer, handler, attributes =
if args.size == 3
args[0..2]
elsif args.size == 2
[nil, *args[0..1]]
else
[nil, args[0], {}]
end
port = RgGen::SystemVerilog::Common::Utility::DataObject.new(:argument, **attributes, &body)
have_declaration(layer, :port, port.declaration).and have_identifier(handler, port.identifier)
end
def not_have_port(*args, &body)
layer, handler, attributes =
if args.size == 3
args[0..2]
elsif args.size == 2
[nil, *args[0..1]]
else
[nil, args[0], {}]
end
port = RgGen::SystemVerilog::Common::Utility::DataObject.new(:argument, **attributes, &body)
not_have_declaration(layer, :port, port.declaration).and not_have_identifier(handler)
end
def have_interface_port(*args, &body)
layer, handler, attributes =
if args.size == 3
args[0..2]
elsif args.size == 2
[nil, *args[0..1]]
else
[nil, args[0], {}]
end
port = RgGen::SystemVerilog::Common::Utility::InterfacePort.new(**attributes, &body)
have_declaration(layer, :port, port.declaration).and have_identifier(handler, port.identifier)
end
def have_variable(*args, &body)
layer, handler, attributes =
if args.size == 3
args[0..2]
elsif args.size == 2
[nil, *args[0..1]]
else
[nil, args[0], {}]
end
variable = RgGen::SystemVerilog::Common::Utility::DataObject.new(:variable, **attributes, &body)
have_declaration(layer, :variable, variable.declaration).and have_identifier(handler, variable.identifier)
end
def have_interface(*args, &body)
layer, handler, attributes =
if args.size == 3
args[0..2]
elsif args.size == 2
[nil, *args[0..1]]
else
[nil, args[0], {}]
end
interface = RgGen::SystemVerilog::Common::Utility::InterfaceInstance.new(**attributes, &body)
have_declaration(layer, :variable, interface.declaration).and have_identifier(handler, interface.identifier)
end
def not_have_interface(*args, &body)
layer, handler, attributes =
if args.size == 3
args[0..2]
elsif args.size == 2
[nil, *args[0..1]]
else
[nil, args[0], {}]
end
interface = RgGen::SystemVerilog::Common::Utility::InterfaceInstance.new(**attributes, &body)
not_have_declaration(layer, :variable, interface.declaration).and not_have_identifier(handler)
end
def have_parameter(*args, &body)
layer, handler, attributes =
if args.size == 3
args[0..2]
elsif args.size == 2
[nil, *args[0..1]]
else
[nil, args[0], {}]
end
parameter = RgGen::SystemVerilog::Common::Utility::DataObject.new(:parameter, **attributes, &body)
have_declaration(layer, :parameter, parameter.declaration).and have_identifier(handler, parameter.identifier)
end
before(:all) do
@sv_rtl_factory ||= []
end
end
RSpec.shared_context 'sv ral common' do
include_context 'configuration common'
include_context 'register map common'
def build_sv_ral_factory(builder)
builder.build_factory(:output, :sv_ral)
end
def create_sv_ral(configuration = nil, &data_block)
register_map = create_register_map(configuration) do
register_block(&data_block)
end
@sv_ral_factory[0] ||= build_sv_ral_factory(RgGen.builder)
@sv_ral_factory[0].create(configuration || default_configuration, register_map)
end
def delete_sv_ral_factory
@sv_ral_factory.clear
end
def have_variable(*args, &body)
layer, handler, attributes =
if args.size == 3
args[0..2]
elsif args.size == 2
[nil, *args[0..1]]
else
[nil, args[0], {}]
end
variable = RgGen::SystemVerilog::Common::Utility::DataObject.new(:variable, **attributes, &body)
have_declaration(layer, :variable, variable.declaration).and have_identifier(handler, variable.identifier)
end
def have_parameter(*args, &body)
layer, handler, attributes =
if args.size == 3
args[0..2]
elsif args.size == 2
[nil, *args[0..1]]
else
[nil, args[0], {}]
end
parameter = RgGen::SystemVerilog::Common::Utility::DataObject.new(:parameter, **attributes, &body)
have_declaration(layer, :parameter, parameter.declaration).and have_identifier(handler, parameter.identifier)
end
before(:all) do
@sv_ral_factory ||= []
end
end
RSpec.shared_context 'markdown common' do
include_context 'configuration common'
include_context 'register map common'
def build_markdown_factory(builder)
builder.build_factory(:output, :markdown)
end
def create_markdown(configuraiton = nil, &data_block)
register_map = create_register_map(configuraiton) do
register_block(&data_block)
end
@markdown_factory[0] ||= build_markdown_factory(RgGen.builder)
@markdown_factory[0].create(configuraiton || default_configuration, register_map)
end
def delete_markdown_factory
@markdown_factory.clear
end
before(:all) do
@markdown_factory ||= []
end
end
| 28.980583 | 113 | 0.686097 |
ed908c577414e6531abd32df8ce7ffeedc71f100 | 8,497 | require File.expand_path( File.join( File.dirname( __FILE__ ), '..', 'spec_helper.rb' ) )
require 'tempfile'
describe "GraphiteStorage::Whisper::WhisperFile" do
it "should know its header information" do
whisper_file = GraphiteStorage::Whisper.open(EMPTY_WSP[:file])
whisper_file.archive_count.should == EMPTY_WSP[:archives]
whisper_file.aggregation_method.should == EMPTY_WSP[:aggregation]
whisper_file.max_retention.should == EMPTY_WSP[:max_retention]
whisper_file.x_files_factor.should == EMPTY_WSP[:x_files_factor]
end
it "should detect whether or not the file exists" do
no_whisper_file = GraphiteStorage::Whisper.open('this_file_does_not_exist')
no_whisper_file.exists?.should be_false
existing_whisper_file = GraphiteStorage::Whisper.open(EMPTY_WSP[:file])
existing_whisper_file.exists?.should be_true
end
it "should be able to modify settable header values" do
begin
temp_file = clone_whisper_file(EMPTY_WSP[:file])
whisper_file = GraphiteStorage::Whisper.open(temp_file)
whisper_file.x_files_factor.should == EMPTY_WSP[:x_files_factor]
whisper_file.x_files_factor = 0.1
# Rounding is necessary because 0.1 doesnt store exactly as 0.1
whisper_file.x_files_factor.round(1).should == 0.1
whisper_file.aggregation_method.should == 'average'
whisper_file.aggregation_method = 'sum'
whisper_file.aggregation_method.should == 'sum'
ensure
temp_file.unlink
end
end
it "should load the correct number of archives" do
whisper_file = GraphiteStorage::Whisper.open(EMPTY_WSP[:file])
whisper_file.archives.length.should == EMPTY_WSP[:archives]
end
it "should report the correct update interval" do
whisper_file = GraphiteStorage::Whisper.open(EMPTY_WSP[:file])
mock_archive = double('Archive')
mock_archive.stub(:interval) { 10 }
mock_archive.stub(:archive_count) { 1 }
whisper_file.stub(:archives) { [ mock_archive ] }
whisper_file.update_interval.should == 10
end
it "should return an empty series when no data points exist in the requested range" do
whisper_file = GraphiteStorage::Whisper.open(EMPTY_WSP[:file])
max_retention = whisper_file.max_retention
mock_archive = double('Archive')
mock_archive.stub(:interval) { 20 }
mock_archive.stub(:retention) { max_retention }
mock_archive.should_receive(:point_span).with(0, max_retention) { max_retention }
mock_archive.should_receive(:read).with(0, max_retention) { [] }
whisper_file.stub(:archives) { [ mock_archive ] }
result = whisper_file[0,max_retention]
result.empty?.should be_true
result.should == []
end
it "should select the archive that matches the width of the requested range and has the best point span" do
whisper_file = GraphiteStorage::Whisper.open(nil)
whisper_file.stub(:archive_count) { 3 } # add another archive to the actual file
whisper_file.stub(:max_retention) { 1200 }
first_archive = double('Archive0')
first_archive.stub(:interval) { 60 }
first_archive.stub(:retention) { 60 }
first_archive.stub(:point_span) { 60 }
first_archive.stub(:read) { [0]*60 }
second_archive = double('Archive1')
second_archive.stub(:interval) { 240 }
second_archive.stub(:retention) { 240 }
second_archive.stub(:point_span) { 240 }
second_archive.stub(:read) { [0]*240 }
third_archive = double('Archive2')
third_archive.stub(:interval) { 1200 }
third_archive.stub(:retention) { 1200 }
third_archive.stub(:point_span) { 1200 }
third_archive.stub(:read) { [0]*1200 }
whisper_file.stub(:archives) { [ first_archive, second_archive, third_archive ] }
now = 1200
whisper_file[now - 60,now].length.should == 60
whisper_file[now - 240,now].length.should == 240
whisper_file[now - 1200,now].length.should == 1200
end
it "should return a single value when accessed as an array with a single timestamp" do
whisper_file = GraphiteStorage::Whisper.open(nil)
whisper_file.stub(:archive_count) { 1 }
whisper_file.stub(:max_retention) { 60 }
mock_archive = double('Archive')
mock_archive.stub(:interval) { 60 }
mock_archive.stub(:retention) { 60 }
mock_archive.stub(:point_span) { 60 }
mock_archive.stub(:read) { [0] }
whisper_file.stub(:archives) { [ mock_archive ] }
data_point = whisper_file[0]
data_point.should == 0
end
it "should return a valid time series when accessed as a slice" do
whisper_file = GraphiteStorage::Whisper.open(nil)
whisper_file.stub(:archive_count) { 1 }
whisper_file.stub(:max_retention) { 60 }
mock_archive = double('Archive')
mock_archive.stub(:interval) { 60 }
mock_archive.stub(:retention) { 60 }
mock_archive.stub(:point_span) { 60 }
mock_archive.stub(:read) {
GraphiteStorage::Whisper::Series.new([0]*60, 60, 0, 60)
}
whisper_file.stub(:archives) { [ mock_archive ] }
data_points = whisper_file[0,60]
data_points.empty?.should be_false
data_points.should == [0]*60
data_points.begin.should == 0
data_points.end.should == 60
end
it "should return a valid time series when accessed as a range" do
whisper_file = GraphiteStorage::Whisper.open(nil)
whisper_file.stub(:archive_count) { 1 }
whisper_file.stub(:max_retention) { 60 }
mock_archive = double('Archive')
mock_archive.stub(:interval) { 60 }
mock_archive.stub(:retention) { 60 }
mock_archive.stub(:point_span) { 60 }
mock_archive.stub(:read) {
GraphiteStorage::Whisper::Series.new([0]*60, 60, 0, 60)
}
whisper_file.stub(:archives) { [ mock_archive ] }
data_points = whisper_file[0..60]
data_points.empty?.should be_false
data_points.should == [0]*60
data_points.begin.should == 0
data_points.end.should == 60
end
it "should be able to create a new whisper file with the specified retentions and default settings" do
temp_file = Tempfile.new('whisper')
defaults = GraphiteStorage::Whisper::Constants::DEFAULT_WHISPER_OPTIONS
whisper_file = GraphiteStorage::Whisper::WhisperFile.new(temp_file)
whisper_file.create!('10:60', '60:20')
whisper_file = GraphiteStorage::Whisper::WhisperFile.new(temp_file)
whisper_file.aggregation_method.should == defaults[:aggregation_method]
whisper_file.archive_count.should == 2
whisper_file.max_retention.should == 1200
whisper_file.x_files_factor.should == defaults[:x_files_factor]
end
it "should be able to create a new whisper file with the specified retentions and specified settings" do
temp_file = Tempfile.new('whisper')
whisper_file = GraphiteStorage::Whisper::WhisperFile.new(temp_file)
whisper_file.create!('10:60', '60:20', :x_files_factor => 0.0, :aggregation_method => 'last')
whisper_file = GraphiteStorage::Whisper::WhisperFile.new(temp_file)
whisper_file.aggregation_method.should == 'last'
whisper_file.archive_count.should == 2
whisper_file.max_retention.should == 1200
whisper_file.x_files_factor.should == 0.0
end
it "should reject creation of a whisper file with no archives" do
temp_file = Tempfile.new('whisper')
expect { GraphiteStorage::Whisper::WhisperFile.create(temp_file) }.to raise_error(GraphiteStorage::Exceptions::InvalidParameter)
end
it "should reject creation of a whisper file with intervals that dont evenly divide" do
temp_file = Tempfile.new('whisper')
expect { GraphiteStorage::Whisper::WhisperFile.create(temp_file, '3:10', '5:10') }.to raise_error(GraphiteStorage::Exceptions::InvalidParameter)
end
it "should reject creation of a whisper file with retentions that are not ordered smallest retention to largest" do
temp_file = Tempfile.new('whisper')
expect { GraphiteStorage::Whisper::WhisperFile.create(temp_file, '10:20', '20:5') }.to raise_error(GraphiteStorage::Exceptions::InvalidParameter)
end
it "should reject creation of a whisper file with intervals that are identical" do
temp_file = Tempfile.new('whisper')
expect { GraphiteStorage::Whisper::WhisperFile.create(temp_file, '10:20', '10:30') }.to raise_error(GraphiteStorage::Exceptions::InvalidParameter)
end
it "should reject creation of a whisper file where time retentions are identical" do
temp_file = Tempfile.new('whisper')
expect { GraphiteStorage::Whisper::WhisperFile.create(temp_file, '10:20', '20:10') }.to raise_error(GraphiteStorage::Exceptions::InvalidParameter)
end
end
| 43.798969 | 150 | 0.722843 |
210b64dacb7a4f503137ee07e9c97702b35bbff7 | 1,842 | module ScoutApm
module LayerConverters
class DatabaseConverter < ConverterBase
def initialize(*)
super
@db_query_metric_set = DbQueryMetricSet.new(context)
end
def register_hooks(walker)
super
return unless scope_layer
walker.on do |layer|
next if skip_layer?(layer)
stat = DbQueryMetricStats.new(
model_name(layer),
operation_name(layer),
scope_layer.legacy_metric_name, # controller_scope
1, # count, this is a single query, so 1
layer.total_call_time,
records_returned(layer)
)
@db_query_metric_set << stat
end
end
def record!
# Everything in the metric set here is from a single transaction, which
# we want to keep track of. (One web call did a User#find 10 times, but
# only due to 1 http request)
@db_query_metric_set.increment_transaction_count!
@store.track_db_query_metrics!(@db_query_metric_set)
nil # not returning anything in the layer results ... not used
end
def skip_layer?(layer)
layer.type != 'ActiveRecord' ||
layer.limited? ||
super
end
private
# If we can't name the model, default to:
DEFAULT_MODEL = "SQL"
# If we can't name the operation, default to:
DEFAULT_OPERATION = "other"
def model_name(layer)
layer.name.to_s.split("/").first || DEFAULT_MODEL
end
def operation_name(layer)
layer.name.to_s.split("/")[1] || DEFAULT_OPERATION
end
def records_returned(layer)
if layer.annotations
layer.annotations.fetch(:record_count, 0)
else
0
end
end
end
end
end
| 25.943662 | 81 | 0.588491 |
1a0fcfc088a28cbf5815540bba8fab12a5096d1c | 40,283 | # Copyright (C) 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'faraday'
require 'stringio'
require 'addressable/uri'
require 'signet'
require 'signet/errors'
require 'signet/oauth_2'
require 'jwt'
module Signet
module OAuth2
class Client
OOB_MODES = %w(urn:ietf:wg:oauth:2.0:oob:auto urn:ietf:wg:oauth:2.0:oob oob)
##
# Creates an OAuth 2.0 client.
#
# @param [Hash] options
# The configuration parameters for the client.
# - <code>:authorization_uri</code> -
# The authorization server's HTTP endpoint capable of
# authenticating the end-user and obtaining authorization.
# - <code>:token_credential_uri</code> -
# The authorization server's HTTP endpoint capable of issuing
# tokens and refreshing expired tokens.
# - <code>:client_id</code> -
# A unique identifier issued to the client to identify itself to the
# authorization server.
# - <code>:client_secret</code> -
# A shared symmetric secret issued by the authorization server,
# which is used to authenticate the client.
# - <code>:scope</code> -
# The scope of the access request, expressed either as an Array
# or as a space-delimited String.
# - <code>:state</code> -
# An arbitrary string designed to allow the client to maintain state.
# - <code>:code</code> -
# The authorization code received from the authorization server.
# - <code>:redirect_uri</code> -
# The redirection URI used in the initial request.
# - <code>:username</code> -
# The resource owner's username.
# - <code>:password</code> -
# The resource owner's password.
# - <code>:issuer</code> -
# Issuer ID when using assertion profile
# - <code>:person</code> -
# Target user for assertions
# - <code>:expiry</code> -
# Number of seconds assertions are valid for
# - <code>:signing_key</code> -
# Signing key when using assertion profile
# - <code>:refresh_token</code> -
# The refresh token associated with the access token
# to be refreshed.
# - <code>:access_token</code> -
# The current access token for this client.
# - <code>:id_token</code> -
# The current ID token for this client.
# - <code>:extension_parameters</code> -
# When using an extension grant type, this the set of parameters used
# by that extension.
#
# @example
# client = Signet::OAuth2::Client.new(
# :authorization_uri =>
# 'https://example.server.com/authorization',
# :token_credential_uri =>
# 'https://example.server.com/token',
# :client_id => 'anonymous',
# :client_secret => 'anonymous',
# :scope => 'example',
# :redirect_uri => 'https://example.client.com/oauth'
# )
#
# @see Signet::OAuth2::Client#update!
def initialize(options={})
@authorization_uri = nil
@token_credential_uri = nil
@client_id = nil
@client_secret = nil
@code = nil
@expires_at = nil
@expires_in = nil
@issued_at = nil
@issuer = nil
@password = nil
@principal = nil
@redirect_uri = nil
@scope = nil
@state = nil
@username = nil
self.update!(options)
end
##
# Updates an OAuth 2.0 client.
#
# @param [Hash] options
# The configuration parameters for the client.
# - <code>:authorization_uri</code> -
# The authorization server's HTTP endpoint capable of
# authenticating the end-user and obtaining authorization.
# - <code>:token_credential_uri</code> -
# The authorization server's HTTP endpoint capable of issuing
# tokens and refreshing expired tokens.
# - <code>:client_id</code> -
# A unique identifier issued to the client to identify itself to the
# authorization server.
# - <code>:client_secret</code> -
# A shared symmetric secret issued by the authorization server,
# which is used to authenticate the client.
# - <code>:scope</code> -
# The scope of the access request, expressed either as an Array
# or as a space-delimited String.
# - <code>:state</code> -
# An arbitrary string designed to allow the client to maintain state.
# - <code>:code</code> -
# The authorization code received from the authorization server.
# - <code>:redirect_uri</code> -
# The redirection URI used in the initial request.
# - <code>:username</code> -
# The resource owner's username.
# - <code>:password</code> -
# The resource owner's password.
# - <code>:issuer</code> -
# Issuer ID when using assertion profile
# - <code>:audience</code> -
# Target audience for assertions
# - <code>:person</code> -
# Target user for assertions
# - <code>:expiry</code> -
# Number of seconds assertions are valid for
# - <code>:signing_key</code> -
# Signing key when using assertion profile
# - <code>:refresh_token</code> -
# The refresh token associated with the access token
# to be refreshed.
# - <code>:access_token</code> -
# The current access token for this client.
# - <code>:id_token</code> -
# The current ID token for this client.
# - <code>:extension_parameters</code> -
# When using an extension grant type, this is the set of parameters used
# by that extension.
#
# @example
# client.update!(
# :code => 'i1WsRn1uB1',
# :access_token => 'FJQbwq9',
# :expires_in => 3600
# )
#
# @see Signet::OAuth2::Client#initialize
# @see Signet::OAuth2::Client#update_token!
def update!(options={})
# Normalize all keys to symbols to allow indifferent access.
options = deep_hash_normalize(options)
self.authorization_uri = options[:authorization_uri] if options.has_key?(:authorization_uri)
self.token_credential_uri = options[:token_credential_uri] if options.has_key?(:token_credential_uri)
self.client_id = options[:client_id] if options.has_key?(:client_id)
self.client_secret = options[:client_secret] if options.has_key?(:client_secret)
self.scope = options[:scope] if options.has_key?(:scope)
self.state = options[:state] if options.has_key?(:state)
self.code = options[:code] if options.has_key?(:code)
self.redirect_uri = options[:redirect_uri] if options.has_key?(:redirect_uri)
self.username = options[:username] if options.has_key?(:username)
self.password = options[:password] if options.has_key?(:password)
self.issuer = options[:issuer] if options.has_key?(:issuer)
self.person = options[:person] if options.has_key?(:person)
self.sub = options[:sub] if options.has_key?(:sub)
self.expiry = options[:expiry] || 60
self.audience = options[:audience] if options.has_key?(:audience)
self.signing_key = options[:signing_key] if options.has_key?(:signing_key)
self.extension_parameters = options[:extension_parameters] || {}
self.additional_parameters = options[:additional_parameters] || {}
self.update_token!(options)
return self
end
##
# Updates an OAuth 2.0 client.
#
# @param [Hash] options
# The configuration parameters related to the token.
# - <code>:refresh_token</code> -
# The refresh token associated with the access token
# to be refreshed.
# - <code>:access_token</code> -
# The current access token for this client.
# - <code>:id_token</code> -
# The current ID token for this client.
# - <code>:expires_in</code> -
# The time in seconds until access token expiration.
# - <code>:expires_at</code> -
# The time as an integer number of seconds since the Epoch
# - <code>:issued_at</code> -
# The timestamp that the token was issued at.
#
# @example
# client.update!(
# :refresh_token => 'n4E9O119d',
# :access_token => 'FJQbwq9',
# :expires_in => 3600
# )
#
# @see Signet::OAuth2::Client#initialize
# @see Signet::OAuth2::Client#update!
def update_token!(options={})
# Normalize all keys to symbols to allow indifferent access internally
options = deep_hash_normalize(options)
self.expires_in = options[:expires] if options.has_key?(:expires)
self.expires_in = options[:expires_in] if options.has_key?(:expires_in)
self.expires_at = options[:expires_at] if options.has_key?(:expires_at)
# By default, the token is issued at `Time.now` when `expires_in` is
# set, but this can be used to supply a more precise time.
self.issued_at = options[:issued_at] if options.has_key?(:issued_at)
self.access_token = options[:access_token] if options.has_key?(:access_token)
self.refresh_token = options[:refresh_token] if options.has_key?(:refresh_token)
self.id_token = options[:id_token] if options.has_key?(:id_token)
return self
end
##
# Returns the authorization URI that the user should be redirected to.
#
# @return [Addressable::URI] The authorization URI.
#
# @see Signet::OAuth2.generate_authorization_uri
def authorization_uri(options={})
# Normalize external input
options = deep_hash_normalize(options)
return nil if @authorization_uri == nil
unless options[:response_type]
options[:response_type] = :code
end
unless options[:access_type]
options[:access_type] = :offline
end
options[:client_id] ||= self.client_id
options[:redirect_uri] ||= self.redirect_uri
if options[:prompt] && options[:approval_prompt]
raise ArgumentError, "prompt and approval_prompt are mutually exclusive parameters"
end
if !options[:client_id]
raise ArgumentError, "Missing required client identifier."
end
unless options[:redirect_uri]
raise ArgumentError, "Missing required redirect URI."
end
if !options[:scope] && self.scope
options[:scope] = self.scope.join(' ')
end
options[:state] = self.state unless options[:state]
options.merge!(self.additional_parameters.merge(options[:additional_parameters] || {}))
options.delete(:additional_parameters)
options = Hash[options.map do |key, option|
[key.to_s, option]
end]
uri = Addressable::URI.parse(
::Signet::OAuth2.generate_authorization_uri(
@authorization_uri, options
)
)
if uri.normalized_scheme != 'https'
raise Signet::UnsafeOperationError,
'Authorization endpoint must be protected by TLS.'
end
return uri
end
##
# Sets the authorization URI for this client.
#
# @param [Addressable::URI, Hash, String, #to_str] new_authorization_uri
# The authorization URI.
def authorization_uri=(new_authorization_uri)
@authorization_uri = coerce_uri(new_authorization_uri)
end
##
# Returns the token credential URI for this client.
#
# @return [Addressable::URI] The token credential URI.
def token_credential_uri
return @token_credential_uri
end
##
# Sets the token credential URI for this client.
#
# @param [Addressable::URI, Hash, String, #to_str] new_token_credential_uri
# The token credential URI.
def token_credential_uri=(new_token_credential_uri)
@token_credential_uri = coerce_uri(new_token_credential_uri)
end
# Addressable expects URIs formatted as hashes to come in with symbols as keys.
# Returns nil implicitly for the nil case.
def coerce_uri(incoming_uri)
if incoming_uri.is_a? Hash
Addressable::URI.new(deep_hash_normalize(incoming_uri))
elsif incoming_uri
Addressable::URI.parse(incoming_uri)
end
end
##
# Returns the client identifier for this client.
#
# @return [String] The client identifier.
def client_id
return @client_id
end
##
# Sets the client identifier for this client.
#
# @param [String] new_client_id
# The client identifier.
def client_id=(new_client_id)
@client_id = new_client_id
end
##
# Returns the client secret for this client.
#
# @return [String] The client secret.
def client_secret
return @client_secret
end
##
# Sets the client secret for this client.
#
# @param [String] new_client_secret
# The client secret.
def client_secret=(new_client_secret)
@client_secret = new_client_secret
end
##
# Returns the scope for this client. Scope is a list of access ranges
# defined by the authorization server.
#
# @return [Array] The scope of access the client is requesting.
def scope
return @scope
end
##
# Sets the scope for this client.
#
# @param [Array, String] new_scope
# The scope of access the client is requesting. This may be
# expressed as either an Array of String objects or as a
# space-delimited String.
def scope=(new_scope)
case new_scope
when Array
new_scope.each do |scope|
if scope.include?(' ')
raise ArgumentError,
"Individual scopes cannot contain the space character."
end
end
@scope = new_scope
when String
@scope = new_scope.split(' ')
when nil
@scope = nil
else
raise TypeError, "Expected Array or String, got #{new_scope.class}"
end
end
##
# Returns the client's current state value.
#
# @return [String] The state value.
def state
return @state
end
##
# Sets the client's current state value.
#
# @param [String] new_state
# The state value.
def state=(new_state)
@state = new_state
end
##
# Returns the authorization code issued to this client.
# Used only by the authorization code access grant type.
#
# @return [String] The authorization code.
def code
return @code
end
##
# Sets the authorization code issued to this client.
# Used only by the authorization code access grant type.
#
# @param [String] new_code
# The authorization code.
def code=(new_code)
@code = new_code
end
##
# Returns the redirect URI for this client.
#
# @return [String] The redirect URI.
def redirect_uri
return @redirect_uri
end
##
# Sets the redirect URI for this client.
#
# @param [String] new_redirect_uri
# The redirect URI.
def redirect_uri=(new_redirect_uri)
new_redirect_uri = Addressable::URI.parse(new_redirect_uri)
#TODO - Better solution to allow google postmessage flow. For now, make an exception to the spec.
if new_redirect_uri == nil|| new_redirect_uri.absolute? || uri_is_postmessage?(new_redirect_uri) || uri_is_oob?(new_redirect_uri)
@redirect_uri = new_redirect_uri
else
raise ArgumentError, "Redirect URI must be an absolute URI."
end
end
##
# Returns the username associated with this client.
# Used only by the resource owner password credential access grant type.
#
# @return [String] The username.
def username
return @username
end
##
# Sets the username associated with this client.
# Used only by the resource owner password credential access grant type.
#
# @param [String] new_username
# The username.
def username=(new_username)
@username = new_username
end
##
# Returns the password associated with this client.
# Used only by the resource owner password credential access grant type.
#
# @return [String] The password.
def password
return @password
end
##
# Sets the password associated with this client.
# Used only by the resource owner password credential access grant type.
#
# @param [String] new_password
# The password.
def password=(new_password)
@password = new_password
end
##
# Returns the issuer ID associated with this client.
# Used only by the assertion grant type.
#
# @return [String] Issuer id.
def issuer
return @issuer
end
##
# Sets the issuer ID associated with this client.
# Used only by the assertion grant type.
#
# @param [String] new_issuer
# Issuer ID (typical in email adddress form).
def issuer=(new_issuer)
@issuer = new_issuer
end
##
# Returns the issuer ID associated with this client.
# Used only by the assertion grant type.
#
# @return [String] Target audience ID.
def audience
return @audience
end
##
# Sets the target audience ID when issuing assertions.
# Used only by the assertion grant type.
#
# @param [String] new_audience
# Target audience ID
def audience=(new_audience)
@audience = new_audience
end
##
# Returns the target resource owner for impersonation.
# Used only by the assertion grant type.
#
# @return [String] Target user for impersonation.
def principal
return @principal
end
##
# Sets the target resource owner for impersonation.
# Used only by the assertion grant type.
#
# @param [String] new_person
# Target user for impersonation
def principal=(new_person)
@principal = new_person
end
alias_method :person, :principal
alias_method :person=, :principal=
##
# The target "sub" when issuing assertions.
# Used in some Admin SDK APIs.
#
attr_accessor :sub
##
# Returns the number of seconds assertions are valid for
# Used only by the assertion grant type.
#
# @return [Fixnum] Assertion expiry, in seconds
def expiry
return @expiry
end
##
# Sets the number of seconds assertions are valid for
# Used only by the assertion grant type.
#
# @param [Fixnum, String] new_expiry
# Assertion expiry, in seconds
def expiry=(new_expiry)
@expiry = new_expiry ? new_expiry.to_i : nil
end
##
# Returns the signing key associated with this client.
# Used only by the assertion grant type.
#
# @return [String,OpenSSL::PKey] Signing key
def signing_key
return @signing_key
end
##
# Sets the signing key when issuing assertions.
# Used only by the assertion grant type.
#
# @param [String, OpenSSL::Pkey] new_key
# Signing key. Either private key for RSA or string for HMAC algorithm
def signing_key=(new_key)
@signing_key = new_key
end
##
# Algorithm used for signing JWTs
# @return [String] Signing algorithm
def signing_algorithm
self.signing_key.is_a?(String) ? "HS256" : "RS256"
end
##
# Returns the set of extension parameters used by the client.
# Used only by extension access grant types.
#
# @return [Hash] The extension parameters.
def extension_parameters
return @extension_parameters ||= {}
end
##
# Sets extension parameters used by the client.
# Used only by extension access grant types.
#
# @param [Hash] new_extension_parameters
# The parameters.
def extension_parameters=(new_extension_parameters)
if new_extension_parameters.respond_to?(:to_hash)
@extension_parameters = new_extension_parameters.to_hash
else
raise TypeError,
"Expected Hash, got #{new_extension_parameters.class}."
end
end
##
# Returns the set of additional (non standard) parameters to be used by the client.
#
# @return [Hash] The pass through parameters.
def additional_parameters
return @additional_parameters ||= {}
end
##
# Sets additional (non standard) parameters to be used by the client.
#
# @param [Hash] new_additional_parameters
# The parameters.
def additional_parameters=(new_additional_parameters)
if new_additional_parameters.respond_to?(:to_hash)
@additional_parameters = new_additional_parameters.to_hash
else
raise TypeError,
"Expected Hash, got #{new_additional_parameters.class}."
end
end
##
# Returns the refresh token associated with this client.
#
# @return [String] The refresh token.
def refresh_token
return @refresh_token ||= nil
end
##
# Sets the refresh token associated with this client.
#
# @param [String] new_refresh_token
# The refresh token.
def refresh_token=(new_refresh_token)
@refresh_token = new_refresh_token
end
##
# Returns the access token associated with this client.
#
# @return [String] The access token.
def access_token
return @access_token ||= nil
end
##
# Sets the access token associated with this client.
#
# @param [String] new_access_token
# The access token.
def access_token=(new_access_token)
@access_token = new_access_token
end
##
# Returns the ID token associated with this client.
#
# @return [String] The ID token.
def id_token
return @id_token ||= nil
end
##
# Sets the ID token associated with this client.
#
# @param [String] new_id_token
# The ID token.
def id_token=(new_id_token)
@id_token = new_id_token
end
##
# Returns the decoded ID token associated with this client.
#
# @param [OpenSSL::PKey::RSA, Object] public_key
# The public key to use to verify the ID token. Skips verification if
# omitted.
#
# @return [String] The decoded ID token.
def decoded_id_token(public_key=nil, options = {})
payload, _header = JWT.decode(self.id_token, public_key, !!public_key, options)
if !payload.has_key?('aud')
raise Signet::UnsafeOperationError, 'No ID token audience declared.'
elsif payload['aud'] != self.client_id
raise Signet::UnsafeOperationError,
'ID token audience did not match Client ID.'
end
return payload
end
##
# Returns the lifetime of the access token in seconds.
#
# @return [Fixnum] The access token lifetime.
def expires_in
return @expires_in
end
##
# Sets the lifetime of the access token in seconds. Resets the issued
# timestamp.
#
# @param [String, Fixnum] new_expires_in
# The access token lifetime.
def expires_in=(new_expires_in)
if new_expires_in != nil
@expires_in = new_expires_in.to_i
@issued_at = Time.now
else
@expires_in, @issued_at, @expires_at = nil, nil, nil
end
end
##
# Returns the timestamp the access token was issued at.
#
# @return [Time] The access token issuance time.
def issued_at
return @issued_at
end
##
# Sets the timestamp the access token was issued at.
#
# @param [String,Fixnum,Time] new_issued_at
# The access token issuance time.
def issued_at=(new_issued_at)
@issued_at = normalize_timestamp(new_issued_at)
end
##
# Returns the timestamp the access token will expire at.
#
# @return [Time] The access token lifetime.
def expires_at
if @expires_at
@expires_at
elsif @issued_at && @expires_in
return @issued_at + @expires_in
else
return nil
end
end
##
# Limits the lifetime of the access token as number of seconds since
# the Epoch
# @param [String,Fixnum,Time] new_expires_at
# The access token issuance time.
def expires_at=(new_expires_at)
@expires_at = normalize_timestamp(new_expires_at)
end
##
# Returns true if the access token has expired.
#
# @return [TrueClass, FalseClass]
# The expiration state of the access token.
def expired?
return self.expires_at != nil && Time.now >= self.expires_at
end
##
# Returns true if the access token has expired or expires within
# the next n seconds
#
# @param [Fixnum] sec
# Max number of seconds from now where a token is still considered
# expired.
# @return [TrueClass, FalseClass]
# The expiration state of the access token.
def expires_within?(sec)
return self.expires_at.nil? || Time.now >= (self.expires_at - sec)
end
##
# Removes all credentials from the client.
def clear_credentials!
@access_token = nil
@refresh_token = nil
@id_token = nil
@username = nil
@password = nil
@code = nil
@issued_at = nil
@expires_in = nil
end
##
# Returns the inferred grant type, based on the current state of the
# client object. Returns `"none"` if the client has insufficient
# information to make an in-band authorization request.
#
# @return [String]
# The inferred grant type.
def grant_type
@grant_type ||= nil
if @grant_type
return @grant_type
else
if self.code && self.redirect_uri
'authorization_code'
elsif self.refresh_token
'refresh_token'
elsif self.username && self.password
'password'
elsif self.issuer && self.signing_key
'urn:ietf:params:oauth:grant-type:jwt-bearer'
else
# We don't have sufficient auth information, assume an out-of-band
# authorization arrangement between the client and server, or an
# extension grant type.
nil
end
end
end
def grant_type=(new_grant_type)
case new_grant_type
when 'authorization_code', 'refresh_token',
'password', 'client_credentials'
@grant_type = new_grant_type
else
@grant_type = Addressable::URI.parse(new_grant_type)
end
end
def to_jwt(options={})
options = deep_hash_normalize(options)
now = Time.new
skew = options[:skew] || 60
assertion = {
"iss" => self.issuer,
"aud" => self.audience,
"exp" => (now + self.expiry).to_i,
"iat" => (now - skew).to_i
}
assertion['scope'] = self.scope.join(' ') unless self.scope.nil?
assertion['prn'] = self.person unless self.person.nil?
assertion['sub'] = self.sub unless self.sub.nil?
JWT.encode(assertion, self.signing_key, self.signing_algorithm)
end
##
# Serialize the client object to JSON.
#
# @note A serialized client contains sensitive information. Persist or transmit with care.
#
# @return [String] A serialized JSON representation of the client.
def to_json
return MultiJson.dump({
'authorization_uri' => self.authorization_uri ? self.authorization_uri.to_s : nil,
'token_credential_uri' => self.token_credential_uri ? self.token_credential_uri.to_s : nil,
'client_id' => self.client_id,
'client_secret' => self.client_secret,
'scope' => self.scope,
'state' => self.state,
'code' => self.code,
'redirect_uri' => self.redirect_uri ? self.redirect_uri.to_s : nil,
'username' => self.username,
'password' => self.password,
'issuer' => self.issuer,
'audience' => self.audience,
'person' => self.person,
'expiry' => self.expiry,
'expires_at' => self.expires_at ? self.expires_at.to_i : nil,
'signing_key' => self.signing_key,
'refresh_token' => self.refresh_token,
'access_token' => self.access_token,
'id_token' => self.id_token,
'extension_parameters' => self.extension_parameters
})
end
##
# Generates a request for token credentials.
#
# @param [Hash] options
# The configuration parameters for the request.
# - <code>:code</code> -
# The authorization code.
#
# @private
# @return [Array] The request object.
def generate_access_token_request(options={})
options = deep_hash_normalize(options)
parameters = {"grant_type" => self.grant_type}
case self.grant_type
when 'authorization_code'
parameters['code'] = self.code
parameters['redirect_uri'] = self.redirect_uri
when 'password'
parameters['username'] = self.username
parameters['password'] = self.password
when 'refresh_token'
parameters['refresh_token'] = self.refresh_token
when 'urn:ietf:params:oauth:grant-type:jwt-bearer'
parameters['assertion'] = self.to_jwt(options)
else
if self.redirect_uri
# Grant type was intended to be `authorization_code` because of
# the presence of the redirect URI.
raise ArgumentError, 'Missing authorization code.'
end
parameters.merge!(self.extension_parameters)
end
parameters['client_id'] = self.client_id unless self.client_id.nil?
parameters['client_secret'] = self.client_secret unless self.client_secret.nil?
parameters['scope'] = options[:scope] if options[:scope]
additional = self.additional_parameters.merge(options[:additional_parameters] || {})
additional.each { |k, v| parameters[k.to_s] = v }
parameters
end
def fetch_access_token(options={})
if self.token_credential_uri == nil
raise ArgumentError, 'Missing token endpoint URI.'
end
options = deep_hash_normalize(options)
client = options[:connection] ||= Faraday.default_connection
url = Addressable::URI.parse(self.token_credential_uri).normalize.to_s
parameters = self.generate_access_token_request(options)
if client.is_a?(Faraday::Connection)
response = client.post url,
Addressable::URI.form_encode(parameters),
{ 'Content-Type' => 'application/x-www-form-urlencoded' }
status = response.status.to_i
body = response.body
content_type = response.headers['Content-type']
else
# Hurley
response = client.post url, parameters
status = response.status_code.to_i
body = response.body
content_type = response.header[:content_type]
end
if status == 200
return ::Signet::OAuth2.parse_credentials(body, content_type)
elsif [400, 401, 403].include?(status)
message = 'Authorization failed.'
if body.to_s.strip.length > 0
message += " Server message:\n#{response.body.to_s.strip}"
end
raise ::Signet::AuthorizationError.new(
message, :response => response
)
else
message = "Unexpected status code: #{response.status}."
if body.to_s.strip.length > 0
message += " Server message:\n#{response.body.to_s.strip}"
end
raise ::Signet::AuthorizationError.new(
message, :response => response
)
end
end
def fetch_access_token!(options={})
options = deep_hash_normalize(options)
token_hash = self.fetch_access_token(options)
if token_hash
# No-op for grant types other than `authorization_code`.
# An authorization code is a one-time use token and is immediately
# revoked after usage.
self.code = nil
self.issued_at = Time.now
self.update_token!(token_hash)
end
return token_hash
end
##
# Refresh the access token, if possible
def refresh!(options={})
options = deep_hash_normalize(options)
self.fetch_access_token!(options)
end
##
# Generates an authenticated request for protected resources.
#
# @param [Hash] options
# The configuration parameters for the request.
# - <code>:request</code> -
# A pre-constructed request. An OAuth 2 Authorization header
# will be added to it, as well as an explicit Cache-Control
# `no-store` directive.
# - <code>:method</code> -
# The HTTP method for the request. Defaults to 'GET'.
# - <code>:uri</code> -
# The URI for the request.
# - <code>:headers</code> -
# The HTTP headers for the request.
# - <code>:body</code> -
# The HTTP body for the request.
# - <code>:realm</code> -
# The Authorization realm. See RFC 2617.
# @return [Faraday::Request] The request object.
def generate_authenticated_request(options={})
options = deep_hash_normalize(options)
if self.access_token == nil
raise ArgumentError, 'Missing access token.'
end
options = {
:realm => nil
}.merge(options)
if options[:request].kind_of?(Faraday::Request)
request = options[:request]
else
if options[:request].kind_of?(Array)
method, uri, headers, body = options[:request]
else
method = options[:method] || :get
uri = options[:uri]
headers = options[:headers] || []
body = options[:body] || ''
end
headers = headers.to_a if headers.kind_of?(Hash)
request_components = {
:method => method,
:uri => uri,
:headers => headers,
:body => body
}
# Verify that we have all pieces required to return an HTTP request
request_components.each do |(key, value)|
unless value
raise ArgumentError, "Missing :#{key} parameter."
end
end
method = method.to_s.downcase.to_sym
request = options[:connection].build_request(method.to_s.downcase.to_sym) do |req|
req.url(Addressable::URI.parse(uri).normalize.to_s)
req.headers = Faraday::Utils::Headers.new(headers)
req.body = body
end
end
request['Authorization'] = ::Signet::OAuth2.generate_bearer_authorization_header(
self.access_token,
options[:realm] ? [['realm', options[:realm]]] : nil
)
request['Cache-Control'] = 'no-store'
return request
end
##
# Transmits a request for a protected resource.
#
# @param [Hash] options
# The configuration parameters for the request.
# - <code>:request</code> -
# A pre-constructed request. An OAuth 2 Authorization header
# will be added to it, as well as an explicit Cache-Control
# `no-store` directive.
# - <code>:method</code> -
# The HTTP method for the request. Defaults to 'GET'.
# - <code>:uri</code> -
# The URI for the request.
# - <code>:headers</code> -
# The HTTP headers for the request.
# - <code>:body</code> -
# The HTTP body for the request.
# - <code>:realm</code> -
# The Authorization realm. See RFC 2617.
# - <code>:connection</code> -
# The HTTP connection to use.
# Must be of type <code>Faraday::Connection</code>.
#
# @example
# # Using Net::HTTP
# response = client.fetch_protected_resource(
# :uri => 'http://www.example.com/protected/resource'
# )
#
# @return [Array] The response object.
def fetch_protected_resource(options={})
options = deep_hash_normalize(options)
options[:connection] ||= Faraday.default_connection
request = self.generate_authenticated_request(options)
request_env = request.to_env(options[:connection])
request_env[:request] ||= request
response = options[:connection].app.call(request_env)
if response.status.to_i == 401
# When accessing a protected resource, we only want to raise an
# error for 401 responses.
message = 'Authorization failed.'
if response.body.to_s.strip.length > 0
message += " Server message:\n#{response.body.to_s.strip}"
end
raise ::Signet::AuthorizationError.new(
message, :request => request, :response => response
)
else
return response
end
end
private
##
# Check if URI is Google's postmessage flow (not a valid redirect_uri by spec, but allowed)
# @private
def uri_is_postmessage?(uri)
return uri.to_s.casecmp('postmessage') == 0
end
##
# Check if the URI is a out-of-band
# @private
def uri_is_oob?(uri)
return OOB_MODES.include?(uri.to_s)
end
# Convert all keys in this hash (nested) to symbols for uniform retrieval
def recursive_hash_normalize_keys(val)
if val.is_a? Hash
deep_hash_normalize(val)
else
val
end
end
def deep_hash_normalize(old_hash)
sym_hash = {}
old_hash and old_hash.each {|k,v| sym_hash[k.to_sym] = recursive_hash_normalize_keys(v)}
sym_hash
end
def normalize_timestamp(time)
case time
when NilClass
nil
when Time
time
when String
Time.parse(time)
when Fixnum, Bignum
Time.at(time)
else
fail "Invalid time value #{time}"
end
end
end
end
end
| 33.879731 | 137 | 0.593575 |
e27eea0a93b7a14c8b2aaedf84e499fe19c197d6 | 3,177 | # encoding: UTF-8
require 'spec_helper'
require 'mongo'
require 'polipus/storage/memory_store'
describe Polipus::Storage::MemoryStore do
let(:storage) { Polipus::Storage.memory_store }
it 'should store a page' do
p = page_factory 'http://www.google.com', code: 200, body: '<html></html>'
uuid = storage.add p
expect(uuid).to eq('ed646a3334ca891fd3467db131372140')
expect(storage.count).to be 1
p = storage.get p
expect(p.url.to_s).to eq('http://www.google.com')
expect(p.body).to eq('<html></html>')
end
it 'should update a page' do
p = page_factory 'http://www.google.com', code: 301, body: '<html></html>'
storage.add p
p = storage.get p
expect(p.code).to eq(301)
end
it 'should iterate over stored pages' do
storage.each do |k, page|
expect(k).to eq('ed646a3334ca891fd3467db131372140')
expect(page.url.to_s).to eq('http://www.google.com')
end
end
it 'should delete a page' do
p = page_factory 'http://www.google.com', code: 301, body: '<html></html>'
storage.remove p
expect(storage.get(p)).to be_nil
expect(storage.count).to be 0
end
it 'should store a page removing a query string from the uuid generation' do
p = page_factory 'http://www.asd.com/?asd=lol', code: 200, body: '<html></html>'
p_no_query = page_factory 'http://www.asd.com/?asdas=dasda&adsda=1', code: 200, body: '<html></html>'
storage.include_query_string_in_uuid = false
storage.add p
expect(storage.exists?(p_no_query)).to be_truthy
storage.remove p
end
it 'should store a page removing a query string from the uuid generation no ending slash' do
p = page_factory 'http://www.asd.com?asd=lol', code: 200, body: '<html></html>'
p_no_query = page_factory 'http://www.asd.com', code: 200, body: '<html></html>'
storage.include_query_string_in_uuid = false
storage.add p
expect(storage.exists?(p_no_query)).to be_truthy
storage.remove p
end
it 'should store a page with user data associated' do
p = page_factory 'http://www.user.com', code: 200, body: '<html></html>'
p.user_data.name = 'Test User Data'
storage.add p
expect(storage.exists?(p)).to be_truthy
p = storage.get(p)
expect(p.user_data.name).to eq('Test User Data')
storage.remove p
end
it 'should honor the except parameters' do
storage = Polipus::Storage.mongo_store(@mongo, '_test_pages', ['body'])
p = page_factory 'http://www.user-doo.com', code: 200, body: '<html></html>'
storage.add p
p = storage.get p
expect(p.body).to be_empty
storage.clear
end
it 'should return false if a doc not exists' do
storage.include_query_string_in_uuid = false
p_other = page_factory 'http://www.asdrrrr.com', code: 200, body: '<html></html>'
expect(storage.exists?(p_other)).to be_falsey
storage.add p_other
expect(storage.exists?(p_other)).to be_truthy
p_other = page_factory 'http://www.asdrrrr.com?trk=asd-lol', code: 200, body: '<html></html>'
expect(storage.exists?(p_other)).to be_truthy
storage.include_query_string_in_uuid = true
expect(storage.exists?(p_other)).to be_falsey
end
end
| 36.102273 | 105 | 0.676739 |
7ab13cbcc3378984f4d15cd1c2e92a28121772af | 3,156 | require 'ruby_crypto_etf'
module RubyCryptoETF
describe Coin do
context "initialization" do
it "should create a new Coin" do
coin = Coin.new(symbol: 'btc',
amount: BigDecimal("0.001"),
exchange: 'BiNaNcE',
value: BigDecimal("8856.01"))
expect(coin.symbol).to_not be_nil
expect(coin.amount).to_not be_nil
expect(coin.exchange).to_not be_nil
expect(coin.value).to_not be_nil
expect(coin.symbol).to eq('BTC')
expect(coin.amount).to eq(BigDecimal("0.001"))
expect(coin.exchange).to eq('binance')
expect(coin.value).to eq(BigDecimal("8856.01"))
end
end
it "sets default values for initializer" do
coin = Coin.new
expect(coin.symbol).to eq("")
expect(coin.exchange).to eq("")
expect(coin.amount).to eq(BigDecimal("0"))
expect(coin.value).to eq(BigDecimal("0"))
end
it "will always convert a numeric amount to BigDecimal" do
coin_amount_float = 1.2345
coin_value_float = 89714.144
coin = Coin.new(amount: coin_amount_float, value: coin_value_float)
expect(coin.amount.class).to eq(BigDecimal)
expect(coin.amount).to eq(BigDecimal(coin_amount_float.to_s))
expect(coin.value.class).to eq(BigDecimal)
expect(coin.value).to eq(BigDecimal(coin_value_float.to_s))
end
it "will always add amount as a positive BigDecimal" do
coin = Coin.new(symbol: 'ETH', amount: 1.23)
coin.amount += 0.004
expect(coin.amount).to eq(BigDecimal("1.234"))
coin.amount += 2.00000
expect(coin.amount).to eq(BigDecimal("3.234"))
coin.amount += -4.0
expect(coin.amount).to eq(BigDecimal("3.234"))
coin.amount += -3.0
expect(coin.amount).to eq(BigDecimal("0.234"))
coin.amount += -3.0
expect(coin.amount).to eq(BigDecimal("0.234"))
coin.amount = coin.amount + 1
expect(coin.amount).to eq(BigDecimal("1.234"))
end
it "will set a new Coin initialized with negative amount to zero" do
coin = Coin.new(symbol: 'XVG', amount: -10.234555, exchange: 'Binance',
value: -13151)
expect(coin.amount).to eq(BigDecimal("0"))
expect(coin.value).to eq(BigDecimal("0"))
end
it "will only set new positive values" do
coin = Coin.new()
coin_value = BigDecimal("100")
coin.value = coin_value
expect(coin.value).to eq(coin_value)
coin.value = -1.23145
expect(coin.value).to eq(coin_value)
coin.value = 200
expect(coin.value).to_not eq(coin_value)
expect(coin.value).to eq(BigDecimal("200"))
coin.value *= -1
expect(coin.value).to eq(BigDecimal("200"))
coin.value *= 10
expect(coin.value).to eq(BigDecimal("2000"))
end
it "should be serialized to a hash" do
coin = Coin.new(symbol: 'ETH', amount: 1.23, exchange: 'gemini', value: 246)
coin_hash = { symbol: 'ETH', amount: BigDecimal("1.23"), exchange: 'gemini', value: BigDecimal("246") }
expect(coin.to_h).to eq(coin_hash)
end
end
end
| 29.773585 | 109 | 0.615653 |
d5eb5b42a3a2b2f16c14a767ee2978178b82e91f | 844 | # frozen_string_literal: true
RSpec.describe Fear::Struct do
describe "pattern matching" do
subject do
case struct
in Fear::Struct(a: 42)
"a = 42"
in Fear::Struct(a: 43, **rest)
"a = 43, #{rest}"
in Fear::Struct(a:)
"a = #{a}"
end
end
let(:struct_class) { described_class.with_attributes(:a, :b) }
context "when match single value" do
let(:struct) { struct_class.new(b: 43, a: 42) }
it { is_expected.to eq("a = 42") }
end
context "when match single value and capture the rest" do
let(:struct) { struct_class.new(b: 42, a: 43) }
it { is_expected.to eq("a = 43, {:b=>42}") }
end
context "when capture a value" do
let(:struct) { struct_class.new(b: 45, a: 44) }
it { is_expected.to eq("a = 44") }
end
end
end
| 22.810811 | 66 | 0.562796 |
621daead7e4c7a8c0355ac19c4e9c9967234c14a | 924 | # frozen_string_literal: true
require "datadog/statsd" if Rails.env.production?
require "./lib/github_classroom/null_statsd"
module GitHubClassroom
APP_NAME = ENV["HEROKU_APP_NAME"] || "github-classroom"
DYNO = ENV["DYNO"] || 1
def self.statsd
@statsd ||= if Rails.env.production?
::Datadog::Statsd.new("localhost", 8125, tags: ["application:#{APP_NAME}", "dyno_id:#{DYNO}"])
else
::GitHubClassroom::NullStatsD.new
end
end
end
ActiveSupport::Notifications.subscribe("process_action.action_controller") do |_name, _start, _finish, _id, payload|
next if payload[:path].match? %r{\A\/peek/}
view_time = payload[:view_runtime]
db_time = payload[:db_runtime]
next unless view_time.respond_to?(:+) && db_time.respond_to?(:+)
total_time = view_time + db_time
GitHubClassroom.statsd.timing("request.response_time", total_time)
end
| 30.8 | 116 | 0.678571 |
bfdbe857dd4e3d164ff6bd09e1551b4de6fcaa9e | 13,295 | require 'test_helper'
module Sources
class TwitterTest < ActiveSupport::TestCase
setup do
skip "Twitter credentials are not configured" if !Source::Extractor::Twitter.enabled?
end
context "An extended tweet" do
should "extract the correct image url" do
@site = Source::Extractor.find("https://twitter.com/onsen_musume_jp/status/865534101918330881")
assert_equal(["https://pbs.twimg.com/media/DAL-ntWV0AEbhes.jpg:orig"], @site.image_urls)
end
should "extract all the image urls" do
@site = Source::Extractor.find("https://twitter.com/baalbuddy/status/1455330043828264963")
urls = %w[
https://pbs.twimg.com/media/FDJekEfX0AQZ-Mx.png:orig
https://pbs.twimg.com/media/FDJekEkWYAICHzF.png:orig
https://pbs.twimg.com/media/FDJekEiWEAEsPky.png:orig
https://pbs.twimg.com/media/FDJekEkWEAo4sMQ.png:orig
]
assert_equal(urls, @site.image_urls)
end
end
context "A video" do
should "get the correct urls" do
@site = Source::Extractor.find("https://twitter.com/CincinnatiZoo/status/859073537713328129")
assert_equal(["https://video.twimg.com/ext_tw_video/859073467769126913/pu/vid/1280x720/cPGgVROXHy3yrK6u.mp4"], @site.image_urls)
assert_equal("https://twitter.com/CincinnatiZoo/status/859073537713328129", @site.page_url)
end
should "work when given a video thumbnail" do
# https://twitter.com/Kekeflipnote/status/1241038667898118144
@site = Source::Extractor.find("https://pbs.twimg.com/tweet_video_thumb/ETkN_L3X0AMy1aT.jpg:small")
assert_equal(["https://pbs.twimg.com/tweet_video_thumb/ETkN_L3X0AMy1aT.jpg:orig"], @site.image_urls)
end
should "work when given an external video thumbnail" do
# https://twitter.com/chivedips/status/1243850897056133121
@site = Source::Extractor.find("https://pbs.twimg.com/ext_tw_video_thumb/1243725361986375680/pu/img/JDA7g7lcw7wK-PIv.jpg:small")
assert_equal(["https://pbs.twimg.com/ext_tw_video_thumb/1243725361986375680/pu/img/JDA7g7lcw7wK-PIv.jpg:orig"], @site.image_urls)
end
should "work when given an amplify video thumbnail" do
# https://twitter.com/UNITED_CINEMAS/status/1223138847417978881
@site = Source::Extractor.find("https://pbs.twimg.com/amplify_video_thumb/1215590775364259840/img/lolCkEEioFZTb5dl.jpg:small")
assert_equal(["https://pbs.twimg.com/amplify_video_thumb/1215590775364259840/img/lolCkEEioFZTb5dl.jpg:orig"], @site.image_urls)
end
end
context "An animated gif" do
setup do
@site = Source::Extractor.find("https://twitter.com/i/web/status/1252517866059907073")
end
should "get the image url" do
assert_equal(["https://video.twimg.com/tweet_video/EWHWVrmVcAAp4Vw.mp4"], @site.image_urls)
end
end
context "A twitter summary card from twitter with a :large image" do
setup do
@site = Source::Extractor.find("https://twitter.com/aranobu/status/817736083567820800")
end
should "get the image url" do
assert_equal(["https://pbs.twimg.com/media/C1kt72yVEAEGpOv.jpg:orig"], @site.image_urls)
end
should "get the page url" do
assert_equal("https://twitter.com/aranobu/status/817736083567820800", @site.page_url)
end
end
context "The source site for a restricted twitter" do
setup do
@site = Source::Extractor.find("https://mobile.twitter.com/Strangestone/status/556440271961858051")
end
should "get the urls" do
assert_equal(["https://pbs.twimg.com/media/B7jfc1JCcAEyeJh.png:orig"], @site.image_urls)
assert_equal("https://twitter.com/Strangestone/status/556440271961858051", @site.page_url)
end
end
context "A tweet without any images" do
should "not fail" do
@site = Source::Extractor.find("https://twitter.com/teruyo/status/1058452066060853248")
assert_equal([], @site.image_urls)
assert_nothing_raised { @site.to_h }
end
end
context "The source site for twitter" do
setup do
@site = Source::Extractor.find("https://mobile.twitter.com/nounproject/status/540944400767922176")
end
should "get the main profile url" do
assert_equal("https://twitter.com/nounproject", @site.profile_url)
end
should "get the profile urls" do
assert_includes(@site.profile_urls, "https://twitter.com/nounproject")
assert_includes(@site.profile_urls, "https://twitter.com/intent/user?user_id=88996186")
end
should "get the artist name" do
assert_equal("Noun Project", @site.artist_name)
end
should "get the image urls" do
assert_equal(["https://pbs.twimg.com/media/B4HSEP5CUAA4xyu.png:orig"], @site.image_urls)
end
should "get the page url" do
assert_equal("https://twitter.com/nounproject/status/540944400767922176", @site.page_url)
end
should "get the tags" do
assert_equal([], @site.tags)
end
should "get the artist commentary" do
assert_not_nil(@site.artist_commentary_desc)
end
should "convert a page into a json representation" do
assert_nothing_raised do
@site.to_json
end
end
end
context "The source site for a direct image and a referer" do
setup do
@site = Source::Extractor.find("https://pbs.twimg.com/media/B4HSEP5CUAA4xyu.png:large", "https://twitter.com/nounproject/status/540944400767922176")
end
should "get the source data" do
assert_equal("nounproject", @site.tag_name)
assert_equal("Noun Project", @site.artist_name)
assert_equal(["https://pbs.twimg.com/media/B4HSEP5CUAA4xyu.png:orig"], @site.image_urls)
end
end
context "The source site for a direct image url (pbs.twimg.com/media/*.jpg) without a referer url" do
setup do
@site = Source::Extractor.find("https://pbs.twimg.com/media/B4HSEP5CUAA4xyu.png:large")
end
should "work" do
assert_equal(["https://pbs.twimg.com/media/B4HSEP5CUAA4xyu.png:orig"], @site.image_urls)
assert(@site.artist_name.blank?)
assert(@site.profile_url.blank?)
assert(@site.artists.empty?)
assert(@site.tags.empty?)
assert(@site.artist_commentary_desc.blank?)
assert(@site.dtext_artist_commentary_desc.blank?)
assert_nothing_raised { @site.to_h }
end
end
context "The source site for a direct image url (pbs.twimg.com/media/*?format=jpg&name=*) without a referer url" do
setup do
@site = Source::Extractor.find("https://pbs.twimg.com/media/EBGp2YdUYAA19Uj?format=jpg&name=small")
end
should "work" do
assert_equal(["https://pbs.twimg.com/media/EBGp2YdUYAA19Uj.jpg:orig"], @site.image_urls)
end
should "work for filenames containing dashes" do
@site = Source::Extractor.find("https://pbs.twimg.com/media/EAjc-OWVAAAxAgQ.jpg", "https://twitter.com/asteroid_ill/status/1155420330128625664")
assert_equal(["https://pbs.twimg.com/media/EAjc-OWVAAAxAgQ.jpg:orig"], @site.image_urls)
end
end
context "The source site for a https://twitter.com/i/web/status/:id url" do
setup do
@site = Source::Extractor.find("https://twitter.com/i/web/status/943446161586733056")
end
should "fetch the source data" do
assert_equal("https://twitter.com/motty08111213", @site.profile_url)
end
should "get the page url" do
assert_equal("https://twitter.com/motty08111213/status/943446161586733056", @site.page_url)
end
end
context "A deleted tweet" do
should "still find the artist name" do
@site = Source::Extractor.find("https://twitter.com/masayasuf/status/870734961778630656")
@artist = FactoryBot.create(:artist, name: "masayasuf", url_string: @site.url)
assert_equal("masayasuf", @site.tag_name)
assert_equal("https://twitter.com/masayasuf", @site.profile_url)
assert_equal([@artist], @site.artists)
end
end
context "A tweet" do
setup do
@site = Source::Extractor.find("https://twitter.com/noizave/status/875768175136317440")
end
should "convert urls, hashtags, and mentions to dtext" do
desc = 'test "#foo":[https://twitter.com/hashtag/foo] "#ホワイトデー":[https://twitter.com/hashtag/ホワイトデー] "@noizave":[https://twitter.com/noizave]\'s blah http://www.example.com <>& 😀'
assert_equal(desc, @site.dtext_artist_commentary_desc)
end
should "get the tags" do
tags = [
%w[foo https://twitter.com/hashtag/foo],
%w[ホワイトデー https://twitter.com/hashtag/ホワイトデー]
]
assert_equal(tags, @site.tags)
end
end
context "A profile banner image" do
should "work" do
@site = Source::Extractor.find("https://pbs.twimg.com/profile_banners/1225702850002468864/1588597370/1500x500")
assert_equal([@site.url], @site.image_urls)
assert_nothing_raised { @site.to_h }
end
end
context "A tweet containing non-normalized Unicode text" do
should "be normalized to nfkc" do
site = Source::Extractor.find("https://twitter.com/aprilarcus/status/367557195186970624")
desc1 = "𝖸𝗈 𝐔𝐧𝐢𝐜𝐨𝐝𝐞 𝗅 𝗁𝖾𝗋𝖽 𝕌 𝗅𝗂𝗄𝖾 𝑡𝑦𝑝𝑒𝑓𝑎𝑐𝑒𝑠 𝗌𝗈 𝗐𝖾 𝗉𝗎𝗍 𝗌𝗈𝗆𝖾 𝚌𝚘𝚍𝚎𝚙𝚘𝚒𝚗𝚝𝚜 𝗂𝗇 𝗒𝗈𝗎𝗋 𝔖𝔲𝔭𝔭𝔩𝔢𝔪𝔢𝔫𝔱𝔞𝔯𝔶 𝔚𝔲𝔩𝔱𝔦𝔩𝔦𝔫𝔤𝔳𝔞𝔩 𝔓𝔩𝔞𝔫𝔢 𝗌𝗈 𝗒𝗈𝗎 𝖼𝖺𝗇 𝓮𝓷𝓬𝓸𝓭𝓮 𝕗𝕠𝕟𝕥𝕤 𝗂𝗇 𝗒𝗈𝗎𝗋 𝒇𝒐𝒏𝒕𝒔."
desc2 = "Yo Unicode l herd U like typefaces so we put some codepoints in your Supplementary Wultilingval Plane so you can encode fonts in your fonts."
assert_equal(desc1, site.artist_commentary_desc)
assert_equal(desc2, site.dtext_artist_commentary_desc)
end
should "normalize full-width hashtags" do
site = Source::Extractor.find("https://twitter.com/corpsmanWelt/status/1037724260075069441")
desc1 = %{新しいおともだち\n#けものフレンズ https://t.co/sEAuu16yAQ}
desc2 = %{新しいおともだち\n"#けものフレンズ":[https://twitter.com/hashtag/けものフレンズ]}
assert_equal(desc1, site.artist_commentary_desc)
assert_equal(desc2, site.dtext_artist_commentary_desc)
end
end
context "A twitter post with a pixiv referer" do
should "use the twitter strategy" do
site = Source::Extractor.find("https://twitter.com/Mityubi/status/849630665603665920", "https://www.pixiv.net/member_illust.php?mode=medium&illust_id=56735489")
assert_equal(site.site_name, "Twitter")
assert_equal(["https://pbs.twimg.com/media/C8p-gPhVoAMZupS.png:orig"], site.image_urls)
end
end
context "A tweet from a suspended user" do
should "not fail" do
site = Source::Extractor.find("https://twitter.com/tanso_panz/status/1192429800717029377")
assert_equal(site.site_name, "Twitter")
assert_equal("tanso_panz", site.tag_name)
assert_equal("https://twitter.com/tanso_panz", site.profile_url)
assert_equal([], site.image_urls)
end
end
context "A tweet with hashtags" do
should "ignore common suffixes when translating hashtags" do
as(create(:user)) do
create(:tag, name: "nishizumi_miho", post_count: 1)
create(:wiki_page, title: "nishizumi_miho", other_names: "西住みほ")
end
site = Source::Extractor.find("https://twitter.com/kasaishin100/status/1186658635226607616")
assert_includes(site.tags.map(&:first), "西住みほ生誕祭2019")
assert_includes(site.normalized_tags, "西住みほ")
assert_includes(site.translated_tags.map(&:name), "nishizumi_miho")
end
end
should "Parse Twitter URLs correctly" do
assert(Source::URL.image_url?("https://pbs.twimg.com/media/EBGbJe_U8AA4Ekb.jpg"))
assert(Source::URL.image_url?("https://pbs.twimg.com/media/EBGbJe_U8AA4Ekb.jpg:small"))
assert(Source::URL.image_url?("https://pbs.twimg.com/media/EBGbJe_U8AA4Ekb?format=jpg&name=900x900"))
assert(Source::URL.image_url?("https://pbs.twimg.com/tweet_video_thumb/ETkN_L3X0AMy1aT.jpg"))
assert(Source::URL.image_url?("https://pbs.twimg.com/ext_tw_video_thumb/1243725361986375680/pu/img/JDA7g7lcw7wK-PIv.jpg"))
assert(Source::URL.image_url?("https://pbs.twimg.com/amplify_video_thumb/1215590775364259840/img/lolCkEEioFZTb5dl.jpg"))
assert(Source::URL.page_url?("https://twitter.com/i/web/status/1261877313349640194"))
assert(Source::URL.page_url?("https://twitter.com/BOW999/status/1261877313349640194"))
assert(Source::URL.page_url?("https://twitter.com/BOW999/status/1261877313349640194/photo/1"))
assert(Source::URL.page_url?("https://twitter.com/BOW999/status/1261877313349640194?s=19"))
assert(Source::URL.profile_url?("https://www.twitter.com/irt_5433"))
assert(Source::URL.profile_url?("https://www.twitter.com/irt_5433/likes"))
assert(Source::URL.profile_url?("https://twitter.com/intent/user?user_id=1485229827984531457"))
assert(Source::URL.profile_url?("https://twitter.com/intent/user?screen_name=ryuudog_NFT"))
assert(Source::URL.profile_url?("https://twitter.com/i/user/889592953"))
refute(Source::URL.profile_url?("https://twitter.com/home"))
end
end
end
| 42.206349 | 187 | 0.684167 |
335cbe911b7f42f77ed3eaa6fc9ce0bfad357dc7 | 3,387 |
# Copyright 2018 Google Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ----------------------------------------------------------------------------
#
# *** AUTO GENERATED CODE *** AUTO GENERATED CODE ***
#
# ----------------------------------------------------------------------------
#
# This file is automatically generated by Magic Modules and manual
# changes will be clobbered when the file is regenerated.
#
# Please read more about how to change this file in README.md and
# CONTRIBUTING.md located at the root of this package.
#
# ----------------------------------------------------------------------------
# The following example requires two environment variables to be set:
# * CRED_PATH - the path to a JSON service_account file
# * PROJECT - the name of your GCP project.
#
# For convenience you optionally can add these to your ~/.bash_profile (or the
# respective .profile settings) environment:
#
# export CRED_PATH=/path/to/my/cred.json
# export PROJECT=/path/to/my/cred.json
#
# The following command will run this example:
# CRED_PATH=/path/to/my/cred.json \
# PROJECT='my-test-project'
# chef-client -z --runlist \
# "recipe[gcompute::tests~target_http_proxy]"
#
# ________________________
raise "Missing parameter 'CRED_PATH'. Please read docs at #{__FILE__}" \
unless ENV.key?('CRED_PATH')
raise "Missing parameter 'PROJECT'. Please read docs at #{__FILE__}" \
unless ENV.key?('PROJECT')
# For more information on the gauth_credential parameters and providers please
# refer to its detailed documentation at:
# https://github.com/GoogleCloudPlatform/chef-google-auth
gauth_credential 'mycred' do
action :serviceaccount
path ENV['CRED_PATH'] # e.g. '/path/to/my_account.json'
scopes [
'https://www.googleapis.com/auth/compute'
]
end
gcompute_instance_group 'chef-e2e-my-chef-servers' do
action :create
zone 'us-central1-a'
project ENV['PROJECT'] # ex: 'my-test-project'
credential 'mycred'
end
# Google::Functions must be included at runtime to ensure that the
# gcompute_health_check_ref function can be used in health_check blocks.
::Chef::Resource.send(:include, Google::Functions)
gcompute_backend_service 'chef-e2e-my-app-backend' do
action :create
backends [
{ group: 'chef-e2e-my-chef-servers' }
]
enable_cdn true
health_checks [
gcompute_health_check_ref('another-hc', ENV['PROJECT']) # ex: 'my-test-project'
]
project ENV['PROJECT'] # ex: 'my-test-project'
credential 'mycred'
end
gcompute_url_map 'chef-e2e-my-url-map' do
action :create
default_service 'chef-e2e-my-app-backend'
project ENV['PROJECT'] # ex: 'my-test-project'
credential 'mycred'
end
gcompute_target_http_proxy 'chef-e2e-my-http-proxy' do
action :create
url_map 'chef-e2e-my-url-map'
project ENV['PROJECT'] # ex: 'my-test-project'
credential 'mycred'
end
| 33.87 | 83 | 0.677 |
d514cb71f4887d4f7198f961ff96354b148d6546 | 156 | HUSH_ENCRYPTION_KEY = String(ENV['HUSH_ENCRYPTION_KEY'])
if HUSH_ENCRYPTION_KEY.blank?
raise 'You must set environment variable HUSH_ENCRYPTION_KEY'
end
| 26 | 63 | 0.826923 |
1aa715ac9cd247e4477806b485e4de7d08c78e85 | 3,344 | # encoding: utf-8
# frozen_string_literal: true
module RuboCop
module Cop
module Performance
# This cop identifies places where `Hash#merge!` can be replaced by
# `Hash#[]=`.
#
# @example
# hash.merge!(a: 1)
# hash.merge!({'key' => 'value'})
# hash.merge!(a: 1, b: 2)
class RedundantMerge < Cop
AREF_ASGN = '%s[%s] = %s'.freeze
MSG = 'Use `%s` instead of `%s`.'.freeze
def_node_matcher :redundant_merge, '(send $_ :merge! (hash $...))'
def_node_matcher :modifier_flow_control, '[{if while until} #modifier?]'
def_node_matcher :each_with_object_node, <<-END
(block (send _ :each_with_object _) (args _ $_) ...)
END
def on_send(node)
redundant_merge(node) do |receiver, pairs|
if node.value_used?
parent = node.parent
grandparent = parent.parent if parent.begin_type?
second_arg = each_with_object_node(grandparent || parent)
next if second_arg.nil?
next unless receiver.loc.name.source == second_arg.loc.name.source
end
next if pairs.size > 1 && !receiver.pure?
next if pairs.size > max_key_value_pairs
assignments = to_assignments(receiver, pairs).join('; ')
message = format(MSG, assignments, node.source)
add_offense(node, :expression, message)
end
end
def autocorrect(node)
redundant_merge(node) do |receiver, pairs|
lambda do |corrector|
new_source = to_assignments(receiver, pairs).join("\n")
parent = node.parent
if parent && pairs.size > 1
if modifier_flow_control(parent)
new_source = rewrite_with_modifier(node, parent, new_source)
node = parent
else
padding = "\n#{leading_spaces(node)}"
new_source.gsub!(/\n/, padding)
end
end
corrector.replace(node.source_range, new_source)
end
end
end
private
def to_assignments(receiver, pairs)
pairs.map do |pair|
key, value = *pair
key_src = if key.sym_type? && !key.source.start_with?(':')
":#{key.source}"
else
key.source
end
format(AREF_ASGN, receiver.source, key_src, value.source)
end
end
def rewrite_with_modifier(node, parent, new_source)
cond, = *parent
padding = "\n#{(' ' * indent_width) + leading_spaces(node)}"
new_source.gsub!(/\n/, padding)
parent.loc.keyword.source << ' ' << cond.source << padding <<
new_source << "\n" << leading_spaces(node) << 'end'
end
def leading_spaces(node)
node.source_range.source_line[/\A\s*/]
end
def indent_width
@config.for_cop('IndentationWidth')['Width'] || 2
end
def modifier?(node)
node.loc.respond_to?(:end) && node.loc.end.nil?
end
def max_key_value_pairs
cop_config['MaxKeyValuePairs'].to_i
end
end
end
end
end
| 31.54717 | 80 | 0.532596 |
4a54f9da3c5a53c60b89fa97ee6900694b4b3b28 | 5,138 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Network::Mgmt::V2020_06_01
module Models
#
# Route table resource.
#
class RouteTable < Resource
include MsRestAzure
# @return [Array<Route>] Collection of routes contained within a route
# table.
attr_accessor :routes
# @return [Array<Subnet>] A collection of references to subnets.
attr_accessor :subnets
# @return [Boolean] Whether to disable the routes learned by BGP on that
# route table. True means disable.
attr_accessor :disable_bgp_route_propagation
# @return [ProvisioningState] The provisioning state of the route table
# resource. Possible values include: 'Succeeded', 'Updating', 'Deleting',
# 'Failed'
attr_accessor :provisioning_state
# @return [String] A unique read-only string that changes whenever the
# resource is updated.
attr_accessor :etag
#
# Mapper for RouteTable class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'RouteTable',
type: {
name: 'Composite',
class_name: 'RouteTable',
model_properties: {
id: {
client_side_validation: true,
required: false,
serialized_name: 'id',
type: {
name: 'String'
}
},
name: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'name',
type: {
name: 'String'
}
},
type: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'type',
type: {
name: 'String'
}
},
location: {
client_side_validation: true,
required: false,
serialized_name: 'location',
type: {
name: 'String'
}
},
tags: {
client_side_validation: true,
required: false,
serialized_name: 'tags',
type: {
name: 'Dictionary',
value: {
client_side_validation: true,
required: false,
serialized_name: 'StringElementType',
type: {
name: 'String'
}
}
}
},
routes: {
client_side_validation: true,
required: false,
serialized_name: 'properties.routes',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'RouteElementType',
type: {
name: 'Composite',
class_name: 'Route'
}
}
}
},
subnets: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'properties.subnets',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'SubnetElementType',
type: {
name: 'Composite',
class_name: 'Subnet'
}
}
}
},
disable_bgp_route_propagation: {
client_side_validation: true,
required: false,
serialized_name: 'properties.disableBgpRoutePropagation',
type: {
name: 'Boolean'
}
},
provisioning_state: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'properties.provisioningState',
type: {
name: 'String'
}
},
etag: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'etag',
type: {
name: 'String'
}
}
}
}
}
end
end
end
end
| 30.766467 | 79 | 0.43441 |
4ae0176166176b2cdf613694b756093d32629e97 | 5,352 | #!/usr/bin/env ruby
# Copyright (c) 2004-2020 Microchip Technology Inc. and its subsidiaries.
# SPDX-License-Identifier: MIT
require_relative 'libeasy/et'
$ts = get_test_setup("mesa_pc_b2b_2x")
#---------- Configuration -----------------------------------------------------
$idx_npi = 0
$idx_rx = 1
test "conf" do
t_i("NPI port")
conf = $ts.dut.call("mesa_npi_conf_get")
conf["enable"] = true
conf["port_no"] = $ts.dut.port_list[$idx_npi]
$ts.dut.call("mesa_npi_conf_set", conf)
end
#---------- Frame testing -----------------------------------------------------
def frame_tx(port, prio, id, len, npi = true)
if (npi)
f1 = cmd_tx_ifh_push({"dst_port": port, "cos": prio, "afi_id": id}) + "eth data pattern cnt #{len - 20 - 18}"
$ts.pc.run("sudo ef name f1 #{f1} tx #{$ts.pc.p[$idx_npi]} name f1")
else
tx_info = $ts.dut.call("mesa_packet_tx_info_init")
tx_info["dst_port_mask"] = (1 << port)
tx_info["cos"] = prio
tx_info["afi_id"] = id
frame = []
for i in 0..(len - 1)
frame[i] = (i < 6 ? 0xff : i < 11 ? 0 : i == 11 ? 1 : 0xaa)
end
$ts.dut.call("mesa_packet_tx_frame", tx_info, frame, len);
end
end
def frame_tx_manual(port, prio, id, len)
end
def frame_rx(idx, usec)
cnt = 10
$ts.pc.run("sudo ef -c #{$ts.pc.p[idx]},1,adapter_unsynced,,#{cnt}")
pkts = $ts.pc.get_pcap("#{$ts.pc.p[idx]}.pcap")
if (pkts.size == cnt)
t_i("Logged #{cnt} packets, expect #{usec} usec between each")
t0 = 0
pkts.each_with_index do |p, i|
t = p[:us_rel]
txt = "t: #{t}"
if (i == 0)
t_i(txt)
else
diff = (t - t0)
txt += ", diff: #{diff}"
if (diff < 0.9*usec or diff > 1.1*usec)
t_e(txt)
else
t_i(txt)
end
end
t0 = t
end
else
t_e("Logged #{pkts.size} packets, expected #{cnt}");
end
end
def check_cnt(port, prio)
cnt = $ts.dut.call("mesa_port_counters_get", port)
tx_cnt = cnt["rmon"]["tx_etherStatsPkts"]
tx_prio = cnt["prio"][prio]["tx"]
txt = "tx_cnt: #{tx_cnt}, tx_prio[#{prio}]: #{tx_prio}"
if (tx_cnt == tx_prio)
t_i(txt)
else
t_e(txt)
end
end
test "frame-io-afi-v1" do
if (cap_get("AFI_V1") == 0)
break
end
idx = $idx_rx
prio = 3
len = 128
rate = 10 # 10 fps
usec = 100000
port = $ts.dut.port_list[idx]
# Clear counters
$ts.dut.call("mesa_port_counters_clear", port)
t_i("allocate afi injection")
id = $ts.dut.call("mesa_afi_alloc", {"fps": rate})[1]
# Send and hijack frame
# Hijacking via NPI injection does not seem to work, so manual injection is used.
t_i("send frame before hijacking")
frame_tx(port, prio, id, len, false)
$ts.dut.call("mesa_afi_hijack", id)
# Check rate of received frames
frame_rx(idx, usec)
t_i("stop and free afi")
$ts.dut.call("mesa_afi_free", id)
# Check counters
check_cnt(port, prio)
end
test "frame-io-afi-v2-slow" do
if (cap_get("AFI_V2") == 0)
break
end
idx = $idx_rx
prio = 4
len = 256
rate = 36000 # 10 fps
usec = 100000
port = $ts.dut.port_list[idx]
# Clear counters
$ts.dut.call("mesa_port_counters_clear", port)
t_i("allocate afi injection")
conf = {}
conf["port_no"] = port
conf["prio"] = prio
conf["masquerade_port_no"] = 0
id = $ts.dut.call("mesa_afi_slow_inj_alloc", conf)
# Send and hijack frame
t_i("send frame before hijacking")
frame_tx(port, prio, id, len)
$ts.dut.call("mesa_afi_slow_inj_frm_hijack", id)
t_i("start afi")
conf = {}
conf["fph"] = rate
conf["jitter_mode"] = 0
conf["first_frame_urgent"] = false
$ts.dut.call("mesa_afi_slow_inj_start", id, conf)
# Check rate of received frames
frame_rx(idx, usec)
t_i("stop and free afi")
$ts.dut.call("mesa_afi_slow_inj_stop", id)
$ts.dut.call("mesa_afi_slow_inj_free", id)
# Check counters
check_cnt(port, prio)
end
test "frame-io-afi-v2-fast" do
if (cap_get("AFI_V2") == 0)
break
end
idx = $idx_rx
prio = 5
len = (1000/8) # 1000 bits per frame
rate = 1000*1000
usec = (rate / (len * 8))
port = $ts.dut.port_list[idx]
# Clear counters
$ts.dut.call("mesa_port_counters_clear", port)
t_i("allocate afi injection")
conf = {}
conf["port_no"] = port
conf["prio"] = prio
conf["frm_cnt"] = 1
conf["masquerade_port_no"] = 0
id = $ts.dut.call("mesa_afi_fast_inj_alloc", conf)
# Send and hijack frame
t_i("send frame before hijacking")
frame_tx(port, prio, id, len)
conf = {}
conf["frm_size"] = len
$ts.dut.call("mesa_afi_fast_inj_frm_hijack", id, conf)
t_i("start afi")
conf = {}
conf["bps"] = rate
conf["seq_cnt"] = 0 # Forever
act = $ts.dut.call("mesa_afi_fast_inj_start", id, conf)
# Check rate of received frames
frame_rx(idx, usec)
t_i("stop and free afi")
$ts.dut.call("mesa_afi_fast_inj_stop", id)
$ts.dut.call("mesa_afi_fast_inj_free", id)
# Check counters
check_cnt(port, prio)
end
| 25.485714 | 117 | 0.55867 |
26416be624c857a32a372349cfaf591325a26886 | 364 | # frozen_string_literal: true
# This will guess the User class
FactoryBot.define do
factory :user do
name { Faker::Name.name }
sequence :email do |n|
"person#{n}@example.com"
end
password { 'password' }
password_confirmation { 'password' }
factory :admin do
admin { true }
end
confirmed_at { Date.current }
end
end
| 20.222222 | 40 | 0.642857 |
bffd99c9a45eab28ba0fc924f7281218a1b30a16 | 707 | # encoding: utf-8
# This file is autogenerated. Do not edit it manually.
# If you want change the content of this file, edit
#
# /spec/fixtures/responses/whois.dns.pl/pl/property_expires_on_not_defined.expected
#
# and regenerate the tests with the following rake task
#
# $ rake spec:generate
#
require 'spec_helper'
describe "whois.dns.pl", :aggregate_failures do
subject do
file = fixture("responses", "whois.dns.pl/pl/property_expires_on_not_defined.txt")
part = Whois::Record::Part.new(body: File.read(file), host: "whois.dns.pl")
Whois::Parser.parser_for(part)
end
it "matches property_expires_on_not_defined.expected" do
expect(subject.expires_on).to eq(nil)
end
end
| 26.185185 | 86 | 0.739745 |
7917dc218600c1191c85a200c1992f7014c5d7b5 | 6,228 | # $Id$
module Projects
module Api
require File.dirname(__FILE__).chomp("/projects/api") + '/projects/api/API'
require File.dirname(__FILE__).chomp("/projects/api") + '/projects/util/ZohoHTTPClient'
require File.dirname(__FILE__).chomp("/projects/api") + '/projects/parser/TaskParser'
# * TasksAPI is used to:
#
# * Get list of tasks.
#
# * Get list of task for the tasklist.
#
# * Get the details of a task.
#
# * Create a new task.
#
# * Update the details of a task.
#
# * Delete an existing task.
class TasksAPI < API
include Projects::Parser
include Projects::Util
# TaskParser is used to parse the JSON response into respective objects.
$taskParser = Projects::Parser::TaskParser.new
# * Construct a new TasksAPI using User's authToken and portalId.
#
# ==== Parameters
#
# * authToken:: User's authToken.
#
# * portalId:: - User's portalId.
def initialize(authToken,portalId)
super(authToken,portalId)
end
# * Get list of tasks for the project.
#
# ==== Parameters
#
# * projectId:: - ID of the project.
#
# * queryMap:: - This queryMap contains the filters in the form of key-value pair.
#
# ==== Returns
#
# * List of Task object.
def getTasks(projectId, queryMap)
url = getBaseURL+"projects/"+String(projectId)+"/tasks/"
response = ZohoHTTPClient.get(url, getQueryMap(queryMap))
return $taskParser.getTasks(response)
end
# * Get list of tasks for the task list.
#
# ==== Parameters
#
# * projectId:: - ID of the project.
#
# * tasklistId:: - ID of the tasklist.
#
# * queryMap:: - This queryMap contains the filters in the form of key-value pair.
#
# ==== Returns
#
# * List Task object.
def getTasklistTasks(projectId, tasklistId, queryMap)
url = getBaseURL+"projects/"+String(projectId)+"/tasklists/"+String(tasklistId)+"/tasks/"
response = ZohoHTTPClient.get(url, getQueryMap(queryMap))
return $taskParser.getTasks(response)
end
# * Get the details of a task.
#
# ==== Parameters
#
# * projectId:: - ID of the project.
#
# * taskId:: - ID of the task.
#
# ==== Returns
#
# * Task object.
def get(projectId, taskId)
url = getBaseURL+"projects/"+String(projectId)+"/tasks/"+String(taskId)+"/"
response = ZohoHTTPClient.get(url, getQueryMap)
return $taskParser.getTask(response)
end
# * Create a new task for the project.
#
# ==== Parameters
#
# * projectId:: - ID of the project.
#
# * task:: - Task object.
#
# ==== Returns
#
# * Task object.
def create(projectId, task)
url = getBaseURL+"projects/"+String(projectId)+"/tasks/"
response = ZohoHTTPClient.post(url, getQueryMap, task.toParamMAP)
return $taskParser.getTask(response)
end
# * Update the details of a task.
#
# ==== Parameters
#
# * projectId:: - ID of the project.
#
# * task:: - Task object.
#
# ==== Returns
#
# * Task object.
def update(projectId, task)
url = getBaseURL+"projects/"+String(projectId)+"/tasks/"+String(task.getId())+"/"
response = ZohoHTTPClient.post(url, getQueryMap, task.toParamMAP)
return $taskParser.getTask(response)
end
# * Delete an existing task.
#
# ==== Parameters
#
# * projectId:: - ID of the project.
#
# * taskId:: - ID of the task.
#
# ==== Returns
#
# * String object.
def delete(projectId, taskId)
url = getBaseURL+"projects/"+String(projectId)+"/tasks/"+String(taskId)+"/"
response = ZohoHTTPClient.delete(url, getQueryMap)
return $taskParser.getResult(response)
end
# * Get all the subtasks of the given task.
#
# ==== Parameters
#
# * projectId:: - ID of the project.
#
# * taskId:: - ID of the task.
#
# ==== Returns
#
# * List of Task object.
def getSubtasks(projectId, taskId, queryMap)
url = getBaseURL+"projects/"+String(projectId)+"/tasks/"+String(taskId)+"/subtasks/"
response = ZohoHTTPClient.get(url, getQueryMap(queryMap))
return @taskParser.getTasks(response)
end
# * Get all the task comment.
#
# ==== Parameters
#
# * projectId:: - ID of the project.
#
# * taskId:: - ID of the task.
#
# ==== Returns
#
# * List of Comment object.
def getComments(projectId, taskId, queryMap)
url = getBaseURL+"projects/"+String(projectId)+"/tasks/"+String(taskId)+"/comments/"
response = ZohoHTTPClient.get(url, getQueryMap(queryMap))
return @taskParser.getComments(response)
end
# * Add the task comment.
#
# ==== Parameters
#
# * projectId:: - ID of the project.
#
# * taskId:: - ID of the task.
#
# * content:: - Comment of the task
#
# ==== Returns
#
# * Returns the Comment object.
def addComment(projectId, taskId, content)
url = getBaseURL+"projects/"+String(projectId)+"/tasks/"+String(taskId)+"/comments/"
paramMap = Hash.new
paramMap["content"] = content
response = ZohoHTTPClient.post(url, getQueryMap, paramMap)
return @taskParser.getComment(response)
end
# * Delete an existing task comment.
#
# ==== Parameters
#
# * projectId:: - ID of the project.
#
# * taskId:: - ID of the task.
#
# * commentId:: - ID of the task Comment.
#
# ==== Returns
#
# * Returns the success message(Comment Deleted Successfully).
def deleteComment(projectId, taskId, commentId)
url = getBaseURL+"projects/"+String(projectId)+"/tasks/"+String(taskId)+"/comments/"+String(commentId)+"/"
response = ZohoHTTPClient.delete(url, getQueryMap)
return @taskParser.getResult(response)
end
end
end
end
| 24.519685 | 111 | 0.57097 |
26c3af350355e86aa3dd95560606bb4d236074f0 | 368 | require "bundler/setup"
require "gym_schedule"
RSpec.configure do |config|
# Enable flags like --only-failures and --next-failure
config.example_status_persistence_file_path = ".rspec_status"
# Disable RSpec exposing methods globally on `Module` and `main`
config.disable_monkey_patching!
config.expect_with :rspec do |c|
c.syntax = :expect
end
end
| 24.533333 | 66 | 0.755435 |
018919154d22115bfda0bfd974deb9af0430b418 | 374 | # frozen_string_literal: true
require "rediscratch"
RSpec.configure do |config|
# Enable flags like --only-failures and --next-failure
config.example_status_persistence_file_path = ".rspec_status"
# Disable RSpec exposing methods globally on `Module` and `main`
config.disable_monkey_patching!
config.expect_with :rspec do |c|
c.syntax = :expect
end
end
| 23.375 | 66 | 0.756684 |
79ae1bb43624c92766c6ef82338465d7abed96bd | 236 | class AddChangedFieldsToVulnerability < ActiveRecord::Migration[5.1]
def change
add_column :vulnerabilities, :changed_fields, :text, array: true, default: []
add_index :vulnerabilities, :changed_fields, using: :gin
end
end
| 33.714286 | 81 | 0.758475 |
ffce610fc10cd725feb56eb591e84777980fd4d5 | 619 | # frozen_string_literal: true
require "fileutils"
module FoxPage
class SiteBuilder
include Builders::Assets
include Builders::FileCopy
include Builders::Models
include Builders::Pages
def self.build(app)
new(app).build
end
attr_reader :app, :output_directory
def initialize(app)
@app = app
@output_directory = app.root.join(OUTPUT_DIRECTORY)
end
def build
puts "==> Building site #{App.config.site&.title}"
FileUtils.mkdir_p output_directory
load_models
build_assets
build_pages
copy_public_files
end
end
end
| 17.685714 | 57 | 0.672052 |
e2c9edd4a4a19981df0a9ef5710361989c79424c | 1,231 | # frozen_string_literal: true
module RuboCop
module Cop
module Style
# Checks for uses of the `then` keyword in multi-line if statements.
#
# @example
# # bad
# # This is considered bad practice.
# if cond then
# end
#
# # good
# # If statements can contain `then` on the same line.
# if cond then a
# elsif cond then b
# end
class MultilineIfThen < Cop
include OnNormalIfUnless
include RangeHelp
NON_MODIFIER_THEN = /then\s*(#.*)?$/.freeze
MSG = 'Do not use `then` for multi-line `%<keyword>s`.'
def on_normal_if_unless(node)
return unless non_modifier_then?(node)
add_offense(node, location: :begin,
message: format(MSG, keyword: node.keyword))
end
def autocorrect(node)
lambda do |corrector|
corrector.remove(
range_with_surrounding_space(range: node.loc.begin, side: :left)
)
end
end
private
def non_modifier_then?(node)
NON_MODIFIER_THEN.match?(node.loc.begin&.source_line)
end
end
end
end
end
| 24.137255 | 78 | 0.552396 |
39f1d9bed7a05b3ad44413409f0782529f6b7027 | 50 | module RailsParam #:nodoc
VERSION = "1.0.1"
end
| 12.5 | 25 | 0.68 |
62d7dcccf85194d96a94c091c5c845d15c2eac7b | 1,612 | require "capybara/rspec"
require "capybara/rails"
require "capybara/poltergeist"
require "capybara-screenshot/rspec"
Capybara.register_driver :poltergeist do |app|
Capybara::Poltergeist::Driver.new(app, {
:phantomjs_logger => File.open("#{Rails.root}/log/test_phantomjs.log", "a"),
})
end
Capybara.javascript_driver = :poltergeist
# Set a longer timeout for places like TravisCI where things can sometimes be
# slower.
Capybara.default_max_wait_time = 15
module CapybaraFeatureHelpers
def wait_for_ajax
Timeout.timeout(Capybara.default_max_wait_time) do
loop until finished_all_ajax_requests?
end
end
def finished_all_ajax_requests?
page.evaluate_script('jQuery.active').zero?
end
def wait_for_datatables_filter
sleep 1
end
def wait_for_loading_spinners
page.should_not have_selector(".loading-overlay .spinner")
page.should_not have_selector(".dataTables_wrapper .blockOverlay")
page.should_not have_selector(".dataTables_wrapper .blockMsg")
end
def wait_until
require "timeout"
Timeout.timeout(Capybara.default_max_wait_time) do
sleep(0.1) until(value = yield) # rubocop:disable Lint/AssignmentInCondition
value
end
end
def delay_all_ajax_calls(delay = 1500)
page.execute_script <<-eos
$.ajaxOrig = $.ajax;
$.ajax = function() {
var args = arguments;
var self = this;
setTimeout(function() {
$.ajaxOrig.apply(self, args);
}, #{delay});
};
eos
end
end
RSpec.configure do |config|
config.include CapybaraFeatureHelpers, :type => :feature
end
| 25.1875 | 82 | 0.716501 |
87987cedc2a2517a239aabe625a89f2c361b29d7 | 340 | class DigestMailer < ApplicationMailer
add_template_helper(UsersHelper)
add_template_helper(PostsHelper)
# TODO: change mail to: @user.email when it's production ready
def daily_email(user)
@user = user
@recommended = Post.latest(4).published
mail to: "[email protected]", subject: "Stories Daily Digest"
end
end
| 28.333333 | 69 | 0.744118 |
7ad425aca9776deaa989aea5f8487bed7e83a032 | 6,113 | require 'spec_helper'
describe 'Checking Out', :js do
let!(:user) { create(:user) }
let!(:other_buying_user) { create(:user) }
let!(:buyer) { create(:organization, :single_location, :buyer, users: [user, other_buying_user]) }
let!(:credit_card) { create(:bank_account, :credit_card, bankable: buyer) }
let!(:bank_account) { create(:bank_account, :checking, :verified, bankable: buyer) }
let!(:seller) { create(:user) }
let!(:fulton_farms) { create(:organization, :seller, :single_location, name: 'Fulton St. Farms', users:[seller, create(:user)]) }
let!(:ada_farms){ create(:organization, :seller, :single_location, name: 'Ada Farms', users: [create(:user)]) }
let!(:market_manager) { create(:user) }
let!(:market) do
create(:market, :with_addresses,
organizations: [buyer, fulton_farms, ada_farms],
managers: [market_manager],
sellers_edit_orders: true)
end
let!(:delivery_schedule) { create(:delivery_schedule, :percent_fee, order_cutoff: 24, fee:nil, market: market, day: 5, require_delivery: false, require_cross_sell_delivery: false, seller_delivery_start: '8:00 AM', seller_delivery_end: '5:00 PM', buyer_pickup_location_id: 0, buyer_pickup_start: '12:00 AM', buyer_pickup_end: '12:00 AM', market_pickup: false) }
let!(:delivery_schedule2) { create(:delivery_schedule, :percent_fee, order_cutoff: 24, fee:nil, market: market, day: 5, require_delivery: false, require_cross_sell_delivery: false, seller_delivery_start: '8:00 AM', seller_delivery_end: '5:00 PM', buyer_pickup_location_id: 0, buyer_pickup_start: '12:00 AM', buyer_pickup_end: '12:00 AM', market_pickup: false) }
# Fulton St. Farms
let!(:bananas) { create(:product, :sellable, name: 'Bananas', organization: fulton_farms) }
let!(:bananas_lot) { create(:lot, product: bananas, quantity: 100) }
let!(:bananas_price_buyer_base) {
create(:price, :past_price, market: market, product: bananas, min_quantity: 1, organization: buyer, sale_price: 0.50)
}
let!(:kale) { create(:product, :sellable, name: 'Kale', organization: fulton_farms) }
let!(:kale_lot) { kale.lots.first.update_attribute(:quantity, 100) }
let!(:kale_price_tier1) {
create(:price, :past_price, market: market, product: kale, min_quantity: 4, sale_price: 2.50)
}
let!(:kale_price_tier2) {
create(:price, :past_price, market: market, product: kale, min_quantity: 6, sale_price: 1.00)
}
# Ada Farms
let!(:potatoes) { create(:product, :sellable, name: 'Potatoes', organization: ada_farms) }
let!(:potatoes_lot) { create(:lot, product: potatoes, quantity: 100) }
let!(:spinach) { create(:product, :sellable, name: 'Spinach', organization: ada_farms) }
def cart_link
Dom::CartLink.first
end
before do
Timecop.travel(DateTime.now - delivery_schedule.order_cutoff - 25.hours)
end
after do
Timecop.return
end
def checkout_with_po
choose 'Pay by Purchase Order'
fill_in 'PO Number', with: '12345'
VCR.use_cassette('place-order-by-purchase-order') do
click_button 'Place Order'
end
end
def add_to_order_as_market_manager
sign_in_as(market_manager)
visit admin_order_path(1)
click_button 'Add Items'
within('#supplierCatalog') do
find('.app-product-input', match: :first).set('9') # Kale
expect(page).to have_content('$9.00')
end
click_button 'Add items and Update quantities'
end
def add_to_order_as_seller
sign_in_as(seller)
visit admin_order_path(1)
click_button 'Add Items'
within('#supplierCatalog') do
find('.app-product-input', match: :first).set('9') # Kale
expect(page).to have_content('$9.00')
end
click_button 'Add items and Update quantities'
end
context 'buyer fills their cart' do
before do
switch_to_subdomain(market.subdomain)
sign_in_as(user)
choose_delivery
expect(page).to have_content('Bananas')
Dom::ProductListing.find_by_name('Bananas').set_quantity("1")
expect(page).to have_content('Added to cart!')
expect(page).to_not have_content('Added to cart!')
expect(page).to have_text('Cart 1')
cart_link.node.click
end
context 'then cutoff time passes, and buyer checks out' do
it 'shows them a past cutoff error' do
Timecop.travel((Delivery.last.cutoff_time + 8.minutes).to_s)
checkout_with_po
expect(page).to have_content('Ordering for your selected pickup or delivery date ended')
end
end
context 'then buyer checks out' do
before do
checkout_with_po
expect(page).to have_content('Thank you for your order!')
sign_out
end
it 'permits the market manager to add to the order' do
skip 'Fails intermittently, revisit w/ rails 5 transactional rollbacks in specs'
add_to_order_as_market_manager
expect(page).to have_content('Order successfully updated')
expect(page).to have_content('Kale')
end
context 'then cutoff time passes' do
before do
Timecop.travel((Delivery.last.cutoff_time + 8.minutes).to_s)
end
it 'still permits the market manager to add to the order' do
add_to_order_as_market_manager
expect(page).to have_content('Order successfully updated')
expect(page).to have_content('Kale')
end
end
context 'market has seller editing enabled' do
context 'before cutoff passes' do
it 'seller can add items to order' do
add_to_order_as_seller
expect(page).to have_content('Order successfully updated')
expect(page).to have_content('Kale')
end
end
context 'after cutoff passes' do
before do
Timecop.travel((Delivery.last.cutoff_time + 8.minutes).to_s)
end
it 'seller can still add items to order' do
add_to_order_as_seller
expect(page).to have_content('Order successfully updated')
expect(page).to have_content('Kale')
end
end
end
end
end
end
| 37.048485 | 363 | 0.675282 |
39471144f16889368e933fc256b289ba009b1a87 | 381 | # frozen_string_literal: true
# Sends broadcast emails.
class BroadcastMailer < ApplicationMailer
def broadcast(to:, subject:, body:, mission:)
@body = body
@mission = mission
@site_name = site_name
# TODO: We should send a separate email to each recipient
# like we do with an SMS broadcast
mail(to: to, subject: "[#{site_name}] #{subject}")
end
end
| 25.4 | 61 | 0.687664 |
d56e615db1f5f8a661c241b46d3b07a6b9889b45 | 14,383 | # Copyright (c) 2016, 2022, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
require 'date'
# rubocop:disable Lint/UnneededCopDisableDirective, Metrics/LineLength
module OCI
# The information about a UserDefinedFunction validation.
class DataIntegration::Models::UserDefinedFunctionValidationSummary
# The total number of validation messages.
# @return [Integer]
attr_accessor :total_message_count
# The total number of validation error messages.
# @return [Integer]
attr_accessor :error_message_count
# The total number of validation warning messages.
# @return [Integer]
attr_accessor :warn_message_count
# The total number of validation information messages.
# @return [Integer]
attr_accessor :info_message_count
# The detailed information of the UserDefinedFunction object validation.
# @return [Hash<String, Array<OCI::DataIntegration::Models::ValidationMessage>>]
attr_accessor :validation_messages
# Objects will use a 36 character key as unique ID. It is system generated and cannot be modified.
# @return [String]
attr_accessor :key
# The type of the object.
# @return [String]
attr_accessor :model_type
# The model version of the object.
# @return [String]
attr_accessor :model_version
# @return [OCI::DataIntegration::Models::ParentReference]
attr_accessor :parent_ref
# Free form text without any restriction on permitted characters. Name can have letters, numbers, and special characters. The value is editable and is restricted to 1000 characters.
# @return [String]
attr_accessor :name
# Detailed description for the object.
# @return [String]
attr_accessor :description
# The version of the object that is used to track changes in the object instance.
# @return [Integer]
attr_accessor :object_version
# The status of an object that can be set to value 1 for shallow references across objects, other values reserved.
# @return [Integer]
attr_accessor :object_status
# Value can only contain upper case letters, underscore, and numbers. It should begin with upper case letter or underscore. The value can be modified.
# @return [String]
attr_accessor :identifier
# @return [OCI::DataIntegration::Models::ObjectMetadata]
attr_accessor :metadata
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
# rubocop:disable Style/SymbolLiteral
'total_message_count': :'totalMessageCount',
'error_message_count': :'errorMessageCount',
'warn_message_count': :'warnMessageCount',
'info_message_count': :'infoMessageCount',
'validation_messages': :'validationMessages',
'key': :'key',
'model_type': :'modelType',
'model_version': :'modelVersion',
'parent_ref': :'parentRef',
'name': :'name',
'description': :'description',
'object_version': :'objectVersion',
'object_status': :'objectStatus',
'identifier': :'identifier',
'metadata': :'metadata'
# rubocop:enable Style/SymbolLiteral
}
end
# Attribute type mapping.
def self.swagger_types
{
# rubocop:disable Style/SymbolLiteral
'total_message_count': :'Integer',
'error_message_count': :'Integer',
'warn_message_count': :'Integer',
'info_message_count': :'Integer',
'validation_messages': :'Hash<String, Array<OCI::DataIntegration::Models::ValidationMessage>>',
'key': :'String',
'model_type': :'String',
'model_version': :'String',
'parent_ref': :'OCI::DataIntegration::Models::ParentReference',
'name': :'String',
'description': :'String',
'object_version': :'Integer',
'object_status': :'Integer',
'identifier': :'String',
'metadata': :'OCI::DataIntegration::Models::ObjectMetadata'
# rubocop:enable Style/SymbolLiteral
}
end
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:disable Metrics/MethodLength, Layout/EmptyLines, Style/SymbolLiteral
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
# @option attributes [Integer] :total_message_count The value to assign to the {#total_message_count} property
# @option attributes [Integer] :error_message_count The value to assign to the {#error_message_count} property
# @option attributes [Integer] :warn_message_count The value to assign to the {#warn_message_count} property
# @option attributes [Integer] :info_message_count The value to assign to the {#info_message_count} property
# @option attributes [Hash<String, Array<OCI::DataIntegration::Models::ValidationMessage>>] :validation_messages The value to assign to the {#validation_messages} property
# @option attributes [String] :key The value to assign to the {#key} property
# @option attributes [String] :model_type The value to assign to the {#model_type} property
# @option attributes [String] :model_version The value to assign to the {#model_version} property
# @option attributes [OCI::DataIntegration::Models::ParentReference] :parent_ref The value to assign to the {#parent_ref} property
# @option attributes [String] :name The value to assign to the {#name} property
# @option attributes [String] :description The value to assign to the {#description} property
# @option attributes [Integer] :object_version The value to assign to the {#object_version} property
# @option attributes [Integer] :object_status The value to assign to the {#object_status} property
# @option attributes [String] :identifier The value to assign to the {#identifier} property
# @option attributes [OCI::DataIntegration::Models::ObjectMetadata] :metadata The value to assign to the {#metadata} property
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
self.total_message_count = attributes[:'totalMessageCount'] if attributes[:'totalMessageCount']
raise 'You cannot provide both :totalMessageCount and :total_message_count' if attributes.key?(:'totalMessageCount') && attributes.key?(:'total_message_count')
self.total_message_count = attributes[:'total_message_count'] if attributes[:'total_message_count']
self.error_message_count = attributes[:'errorMessageCount'] if attributes[:'errorMessageCount']
raise 'You cannot provide both :errorMessageCount and :error_message_count' if attributes.key?(:'errorMessageCount') && attributes.key?(:'error_message_count')
self.error_message_count = attributes[:'error_message_count'] if attributes[:'error_message_count']
self.warn_message_count = attributes[:'warnMessageCount'] if attributes[:'warnMessageCount']
raise 'You cannot provide both :warnMessageCount and :warn_message_count' if attributes.key?(:'warnMessageCount') && attributes.key?(:'warn_message_count')
self.warn_message_count = attributes[:'warn_message_count'] if attributes[:'warn_message_count']
self.info_message_count = attributes[:'infoMessageCount'] if attributes[:'infoMessageCount']
raise 'You cannot provide both :infoMessageCount and :info_message_count' if attributes.key?(:'infoMessageCount') && attributes.key?(:'info_message_count')
self.info_message_count = attributes[:'info_message_count'] if attributes[:'info_message_count']
self.validation_messages = attributes[:'validationMessages'] if attributes[:'validationMessages']
raise 'You cannot provide both :validationMessages and :validation_messages' if attributes.key?(:'validationMessages') && attributes.key?(:'validation_messages')
self.validation_messages = attributes[:'validation_messages'] if attributes[:'validation_messages']
self.key = attributes[:'key'] if attributes[:'key']
self.model_type = attributes[:'modelType'] if attributes[:'modelType']
raise 'You cannot provide both :modelType and :model_type' if attributes.key?(:'modelType') && attributes.key?(:'model_type')
self.model_type = attributes[:'model_type'] if attributes[:'model_type']
self.model_version = attributes[:'modelVersion'] if attributes[:'modelVersion']
raise 'You cannot provide both :modelVersion and :model_version' if attributes.key?(:'modelVersion') && attributes.key?(:'model_version')
self.model_version = attributes[:'model_version'] if attributes[:'model_version']
self.parent_ref = attributes[:'parentRef'] if attributes[:'parentRef']
raise 'You cannot provide both :parentRef and :parent_ref' if attributes.key?(:'parentRef') && attributes.key?(:'parent_ref')
self.parent_ref = attributes[:'parent_ref'] if attributes[:'parent_ref']
self.name = attributes[:'name'] if attributes[:'name']
self.description = attributes[:'description'] if attributes[:'description']
self.object_version = attributes[:'objectVersion'] if attributes[:'objectVersion']
raise 'You cannot provide both :objectVersion and :object_version' if attributes.key?(:'objectVersion') && attributes.key?(:'object_version')
self.object_version = attributes[:'object_version'] if attributes[:'object_version']
self.object_status = attributes[:'objectStatus'] if attributes[:'objectStatus']
raise 'You cannot provide both :objectStatus and :object_status' if attributes.key?(:'objectStatus') && attributes.key?(:'object_status')
self.object_status = attributes[:'object_status'] if attributes[:'object_status']
self.identifier = attributes[:'identifier'] if attributes[:'identifier']
self.metadata = attributes[:'metadata'] if attributes[:'metadata']
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:enable Metrics/MethodLength, Layout/EmptyLines, Style/SymbolLiteral
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity, Layout/EmptyLines
# Checks equality by comparing each attribute.
# @param [Object] other the other object to be compared
def ==(other)
return true if equal?(other)
self.class == other.class &&
total_message_count == other.total_message_count &&
error_message_count == other.error_message_count &&
warn_message_count == other.warn_message_count &&
info_message_count == other.info_message_count &&
validation_messages == other.validation_messages &&
key == other.key &&
model_type == other.model_type &&
model_version == other.model_version &&
parent_ref == other.parent_ref &&
name == other.name &&
description == other.description &&
object_version == other.object_version &&
object_status == other.object_status &&
identifier == other.identifier &&
metadata == other.metadata
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity, Layout/EmptyLines
# @see the `==` method
# @param [Object] other the other object to be compared
def eql?(other)
self == other
end
# rubocop:disable Metrics/AbcSize, Layout/EmptyLines
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[total_message_count, error_message_count, warn_message_count, info_message_count, validation_messages, key, model_type, model_version, parent_ref, name, description, object_version, object_status, identifier, metadata].hash
end
# rubocop:enable Metrics/AbcSize, Layout/EmptyLines
# rubocop:disable Metrics/AbcSize, Layout/EmptyLines
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /^Array<(.*)>/i
# check to ensure the input is an array given that the the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
public_method("#{key}=").call(
attributes[self.class.attribute_map[key]]
.map { |v| OCI::Internal::Util.convert_to_type(Regexp.last_match(1), v) }
)
end
elsif !attributes[self.class.attribute_map[key]].nil?
public_method("#{key}=").call(
OCI::Internal::Util.convert_to_type(type, attributes[self.class.attribute_map[key]])
)
end
# or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# rubocop:enable Metrics/AbcSize, Layout/EmptyLines
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = public_method(attr).call
next if value.nil? && !instance_variable_defined?("@#{attr}")
hash[param] = _to_hash(value)
end
hash
end
private
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
# rubocop:enable Lint/UnneededCopDisableDirective, Metrics/LineLength
| 44.119632 | 245 | 0.702009 |
f8f3cd5109e292993e4d4f6e1b1f604d53f7d082 | 1,428 | # frozen_string_literal: true
require_relative 'git_path'
module Travis
module PackerBuild
class GitPath
def initialize(repo, path, default_ref = 'HEAD')
@repo = repo
@path = path
@default_ref = default_ref
end
attr_reader :repo, :path, :default_ref
def namespaced_path
"#{origin_url}::#{path}"
end
def origin_url
remotes.select { |r| r.name == 'origin' }.first.url
end
def show(ref = default_ref, show_path = path)
@show_at ||= {}
unless @show_at.fetch(ref, {}).fetch(show_path, nil)
@show_at[ref] ||= {}
@show_at[ref][show_path] = repo.show(ref, show_path)
end
@show_at[ref][show_path]
end
def files(matching = /.*/, ref = default_ref)
files = files_at(ref)
matching_files = files.map { |p, _| p }.select do |p|
p =~ /^#{path}/ && p =~ matching
end
matching_files.map do |f|
Travis::PackerBuild::GitPath.new(repo, f, ref)
end
end
private
def remotes
@remotes ||= repo.remotes
end
def files_at(ref = default_ref)
@files_at ||= {}
unless @files_at[ref]
repo.with_temp_working do
repo.checkout(ref)
@files_at[ref] = repo.ls_files('.')
end
end
@files_at[ref]
end
end
end
end
| 22.666667 | 62 | 0.539216 |
4a3155d8d07239ad5950f385d395a1c2984f27e3 | 1,299 | class PatchBlocks < Patch
MagicBlocks = [
["KZD", nil, "Khaliz-Dorahn", "KZD", "AFM"],
["IMP", nil, "Imperial Legacies", "IMP", "PSA", "TOJ"],
["TWR", nil, "A Tourney at Whiterun", "TWR", "PFP"],
["GNJ", nil, "Goliaths of Nangjiao", "GNJ", "SUR"],
["OPH", nil, "Ophorio", "OPH", "ORP",],
["CAC", nil, "Carpe Arcanum", "CAC"],
["DYA", nil, "Death of Yakizma", "DYA"],
["HI12", nil, "High Noon", "HI12"],
["K15", nil, "Kore Set 2015", "K15"],
["KLC", nil, "Kaleidoscope", "KLC"],
["LNG", nil, "Langor", "LNG"],
["MIS", nil, "Mious", "MIS"],
["NVA", nil, "Novea", "NVA"],
["POA", nil, "Pyramids of Atuum", "POA"],
["SOR", nil, "Spark of Revolution", "SOR"],
["TGE", nil, "The Golden Era", "TGE"],
["TOW", nil, "Tides of War", "TOW"],
["XPM", nil, "Xoltan Pre Modern", "XPM"],
["ZER", nil, "Zero", "ZER"],
["L", nil, "The Land Bundle", "L"],
]
def block_by_set_code(set_code)
MagicBlocks.find do |block_info|
block_info[3..-1].include?(set_code)
end
end
def call
each_set do |set|
code, code2, name = block_by_set_code(set["code"])
set.merge!({
"block_code" => code,
"official_block_code" => code2,
"block_name" => name,
}.compact)
end
end
end
| 30.209302 | 59 | 0.52271 |
ac7224e4b245df122696bc33445e6fc5a2332f7f | 21,403 | # encoding: utf-8
# This file is distributed under New Relic's license terms.
# See https://github.com/newrelic/rpm/blob/master/LICENSE for complete details.
require 'new_relic/agent/transaction'
require 'new_relic/agent/instrumentation/queue_time'
module NewRelic
module Agent
# @api public
module Instrumentation
# == NewRelic instrumentation for controller actions and tasks
#
# This instrumentation is applied to the action controller to collect
# metrics for every web request.
#
# It can also be used to capture performance information for
# background tasks and other non-web transactions, including
# detailed transaction traces and traced errors.
#
# For details on how to instrument background tasks see
# ClassMethods#add_transaction_tracer and
# #perform_action_with_newrelic_trace
#
# @api public
#
module ControllerInstrumentation
def self.included(clazz) # :nodoc:
clazz.extend(ClassMethods)
end
# This module is for importing stubs when the agent is disabled
module ClassMethodsShim # :nodoc:
def newrelic_ignore(*args); end
def newrelic_ignore_apdex(*args); end
def newrelic_ignore_enduser(*args); end
end
module Shim # :nodoc:
def self.included(clazz)
clazz.extend(ClassMethodsShim)
end
def newrelic_notice_error(*args); end
def new_relic_trace_controller_action(*args); yield; end
def perform_action_with_newrelic_trace(*args); yield; end
end
module ClassMethods
# Have NewRelic ignore actions in this controller. Specify the actions as hash options
# using :except and :only. If no actions are specified, all actions are ignored.
#
# @api public
#
def newrelic_ignore(specifiers={})
newrelic_ignore_aspect('do_not_trace', specifiers)
end
# Have NewRelic omit apdex measurements on the given actions. Typically used for
# actions that are not user facing or that skew your overall apdex measurement.
# Accepts :except and :only options, as with #newrelic_ignore.
#
# @api public
#
def newrelic_ignore_apdex(specifiers={})
newrelic_ignore_aspect('ignore_apdex', specifiers)
end
# @api public
def newrelic_ignore_enduser(specifiers={})
newrelic_ignore_aspect('ignore_enduser', specifiers)
end
def newrelic_ignore_aspect(property, specifiers={}) # :nodoc:
if specifiers.empty?
self.newrelic_write_attr property, true
elsif ! (Hash === specifiers)
::NewRelic::Agent.logger.error "newrelic_#{property} takes an optional hash with :only and :except lists of actions (illegal argument type '#{specifiers.class}')"
else
self.newrelic_write_attr property, specifiers
end
end
# Should be monkey patched into the controller class implemented
# with the inheritable attribute mechanism.
def newrelic_write_attr(attr_name, value) # :nodoc:
instance_variable_set "@#{attr_name}", value
end
def newrelic_read_attr(attr_name) # :nodoc:
instance_variable_get "@#{attr_name}"
end
# Add transaction tracing to the given method. This will treat
# the given method as a main entrypoint for instrumentation, just
# like controller actions are treated by default. Useful especially
# for background tasks.
#
# Example for background job:
# class Job
# include NewRelic::Agent::Instrumentation::ControllerInstrumentation
# def run(task)
# ...
# end
# # Instrument run so tasks show up under task.name. Note single
# # quoting to defer eval to runtime.
# add_transaction_tracer :run, :name => '#{args[0].name}'
# end
#
# Here's an example of a controller that uses a dispatcher
# action to invoke operations which you want treated as top
# level actions, so they aren't all lumped into the invoker
# action.
#
# MyController < ActionController::Base
# include NewRelic::Agent::Instrumentation::ControllerInstrumentation
# # dispatch the given op to the method given by the service parameter.
# def invoke_operation
# op = params['operation']
# send op
# end
# # Ignore the invoker to avoid double counting
# newrelic_ignore :only => 'invoke_operation'
# # Instrument the operations:
# add_transaction_tracer :print
# add_transaction_tracer :show
# add_transaction_tracer :forward
# end
#
# Here's an example of how to pass contextual information into the transaction
# so it will appear in transaction traces:
#
# class Job
# include NewRelic::Agent::Instrumentation::ControllerInstrumentation
# def process(account)
# ...
# end
# # Include the account name in the transaction details. Note the single
# # quotes to defer eval until call time.
# add_transaction_tracer :process, :params => '{ :account_name => args[0].name }'
# end
#
# See NewRelic::Agent::Instrumentation::ControllerInstrumentation#perform_action_with_newrelic_trace
# for the full list of available options.
#
# @api public
#
def add_transaction_tracer(method, options={})
# The metric path:
options[:name] ||= method.to_s
# create the argument list:
options_arg = []
options.each do |key, value|
valuestr = case
when value.is_a?(Symbol)
value.inspect
when key == :params
value.to_s
else
%Q["#{value.to_s}"]
end
options_arg << %Q[:#{key} => #{valuestr}]
end
traced_method, punctuation = method.to_s.sub(/([?!=])$/, ''), $1
visibility = NewRelic::Helper.instance_method_visibility self, method
without_method_name = "#{traced_method.to_s}_without_newrelic_transaction_trace#{punctuation}"
with_method_name = "#{traced_method.to_s}_with_newrelic_transaction_trace#{punctuation}"
if NewRelic::Helper.instance_methods_include?(self, with_method_name)
::NewRelic::Agent.logger.warn("Transaction tracer already in place for class = #{self.name}, method = #{method.to_s}, skipping")
return
end
class_eval <<-EOC
def #{traced_method.to_s}_with_newrelic_transaction_trace#{punctuation}(*args, &block)
perform_action_with_newrelic_trace(#{options_arg.join(',')}) do
#{traced_method.to_s}_without_newrelic_transaction_trace#{punctuation}(*args, &block)
end
end
EOC
alias_method without_method_name, method.to_s
alias_method method.to_s, with_method_name
send visibility, method
send visibility, with_method_name
::NewRelic::Agent.logger.debug("Traced transaction: class = #{self.name}, method = #{method.to_s}, options = #{options.inspect}")
end
end
class TransactionNamer
def initialize(traced_obj)
@traced_obj = traced_obj
if (@traced_obj.is_a?(Class) || @traced_obj.is_a?(Module))
@traced_class_name = @traced_obj.name
else
@traced_class_name = @traced_obj.class.name
end
end
def name(options={})
name = "#{category_name(options[:category])}/#{path_name(options)}"
end
def category_name(type = nil)
type ||= Transaction.current && Transaction.current.type
case type
when :controller, nil then 'Controller'
when :task then 'OtherTransaction/Background'
when :rack then 'Controller/Rack'
when :uri then 'Controller'
when :sinatra then 'Controller/Sinatra'
# for internal use only
else type.to_s
end
end
def path_name(options={})
# if we have the path, use the path
path = options[:path]
class_name = options[:class_name] || @traced_class_name
# if we have an explicit action name option, use that
if options[:name]
path ||= [ class_name, options[:name] ].compact.join('/')
end
# if newrelic_metric_path() is defined, use that
if @traced_obj.respond_to?(:newrelic_metric_path)
path ||= @traced_obj.newrelic_metric_path
end
# fall back on just the traced class name
path ||= class_name
return path
end
end
# Yield to the given block with NewRelic tracing. Used by
# default instrumentation on controller actions in Rails and Merb.
# But it can also be used in custom instrumentation of controller
# methods and background tasks.
#
# This is the method invoked by instrumentation added by the
# <tt>ClassMethods#add_transaction_tracer</tt>.
#
# Here's a more verbose version of the example shown in
# <tt>ClassMethods#add_transaction_tracer</tt> using this method instead of
# #add_transaction_tracer.
#
# Below is a controller with an +invoke_operation+ action which
# dispatches to more specific operation methods based on a
# parameter (very dangerous, btw!). With this instrumentation,
# the +invoke_operation+ action is ignored but the operation
# methods show up in New Relic as if they were first class controller
# actions
#
# MyController < ActionController::Base
# include NewRelic::Agent::Instrumentation::ControllerInstrumentation
# # dispatch the given op to the method given by the service parameter.
# def invoke_operation
# op = params['operation']
# perform_action_with_newrelic_trace(:name => op) do
# send op, params['message']
# end
# end
# # Ignore the invoker to avoid double counting
# newrelic_ignore :only => 'invoke_operation'
# end
#
#
# When invoking this method explicitly as in the example above, pass in a
# block to measure with some combination of options:
#
# * <tt>:category => :controller</tt> indicates that this is a
# controller action and will appear with all the other actions. This
# is the default.
# * <tt>:category => :task</tt> indicates that this is a
# background task and will show up in New Relic with other background
# tasks instead of in the controllers list
# * <tt>:category => :rack</tt> if you are instrumenting a rack
# middleware call. The <tt>:name</tt> is optional, useful if you
# have more than one potential transaction in the #call.
# * <tt>:category => :uri</tt> indicates that this is a
# web transaction whose name is a normalized URI, where 'normalized'
# means the URI does not have any elements with data in them such
# as in many REST URIs.
# * <tt>:name => action_name</tt> is used to specify the action
# name used as part of the metric name
# * <tt>:params => {...}</tt> to provide information about the context
# of the call, used in transaction trace display, for example:
# <tt>:params => { :account => @account.name, :file => file.name }</tt>
# These are treated similarly to request parameters in web transactions.
#
# Seldomly used options:
#
# * <tt>:force => true</tt> indicates you should capture all
# metrics even if the #newrelic_ignore directive was specified
# * <tt>:class_name => aClass.name</tt> is used to override the name
# of the class when used inside the metric name. Default is the
# current class.
# * <tt>:path => metric_path</tt> is *deprecated* in the public API. It
# allows you to set the entire metric after the category part. Overrides
# all the other options.
# * <tt>:request => Rack::Request#new(env)</tt> is used to pass in a
# request object that may respond to uri and referer.
#
# If a single argument is passed in, it is treated as a metric
# path. This form is deprecated.
#
# @api public
#
def perform_action_with_newrelic_trace(*args, &block)
request = newrelic_request(args)
NewRelic::Agent::TransactionState.reset(request)
# Skip instrumentation based on the value of 'do_not_trace' and if
# we aren't calling directly with a block.
if !block_given? && do_not_trace?
# Also ignore all instrumentation in the call sequence
NewRelic::Agent.disable_all_tracing do
return perform_action_without_newrelic_trace(*args)
end
end
control = NewRelic::Control.instance
return perform_action_with_newrelic_profile(args, &block) if control.profiling?
txn = _start_transaction(block_given? ? args : [])
begin
options = { :force => txn.force_flag, :transaction => true }
return yield if !(NewRelic::Agent.is_execution_traced? || options[:force])
options[:metric] = true if options[:metric].nil?
options[:deduct_call_time_from_parent] = true if options[:deduct_call_time_from_parent].nil?
_, expected_scope = NewRelic::Agent::MethodTracer::TraceExecutionScoped.trace_execution_scoped_header(options, txn.start_time.to_f)
begin
NewRelic::Agent::BusyCalculator.dispatcher_start txn.start_time
if block_given?
yield
else
perform_action_without_newrelic_trace(*args)
end
rescue => e
txn.notice_error(e)
raise
end
ensure
end_time = Time.now
txn.freeze_name
metric_names = Array(recorded_metrics(txn))
txn_name = metric_names.shift
NewRelic::Agent::MethodTracer::TraceExecutionScoped.trace_execution_scoped_footer(txn.start_time.to_f, txn_name, metric_names, expected_scope, options, end_time.to_f)
NewRelic::Agent::BusyCalculator.dispatcher_finish(end_time)
txn.record_apdex(end_time) unless ignore_apdex?
txn = Transaction.stop(txn_name, end_time)
NewRelic::Agent::TransactionState.get.request_ignore_enduser = true if ignore_enduser?
end
end
def recorded_metrics(txn)
metric_parser = NewRelic::MetricParser::MetricParser.for_metric_named(txn.name)
metrics = [txn.name]
metrics += metric_parser.summary_metrics unless txn.has_parent?
metrics
end
protected
def newrelic_request(args)
opts = args.first
# passed as a parameter to add_transaction_tracer
if opts.respond_to?(:keys) && opts.respond_to?(:[]) && opts[:request]
opts[:request]
# in a Rack app
elsif opts.respond_to?(:keys) && opts.respond_to?(:[]) &&
opts['rack.version']
Rack::Request.new(args)
# in a Rails app
elsif self.respond_to?(:request)
self.request
end
end
# Should be implemented in the dispatcher class
def newrelic_response_code; end
def newrelic_request_headers
self.respond_to?(:request) && self.request.respond_to?(:headers) && self.request.headers
end
# overrideable method to determine whether to trace an action
# or not - you may override this in your controller and supply
# your own logic for ignoring transactions.
def do_not_trace?
_is_filtered?('do_not_trace')
end
# overrideable method to determine whether to trace an action
# for purposes of apdex measurement - you can use this to
# ignore things like api calls or other fast non-user-facing
# actions
def ignore_apdex?
_is_filtered?('ignore_apdex')
end
def ignore_enduser?
_is_filtered?('ignore_enduser')
end
private
# Profile the instrumented call. Dev mode only. Experimental
# - should definitely not be used on production applications
def perform_action_with_newrelic_profile(args)
txn = _start_transaction(block_given? ? args : [])
val = nil
NewRelic::Agent.trace_execution_scoped txn.metric_name do
NewRelic::Agent.disable_all_tracing do
# turn on profiling
profile = RubyProf.profile do
if block_given?
val = yield
else
val = perform_action_without_newrelic_trace(*args)
end
end
NewRelic::Agent.instance.transaction_sampler.notice_profile profile
end
end
return val
ensure
txn.pop
end
# Write a transaction onto a thread local if there isn't already one there.
# If there is one, just update it.
def _start_transaction(args) # :nodoc:
# If a block was passed in, then the arguments represent options for the instrumentation,
# not app method arguments.
options = {}
if args.any?
if args.last.is_a?(Hash)
options = args.pop
end
available_params = options[:params] || {}
options[:name] ||= args.first
else
available_params = self.respond_to?(:params) ? self.params : {}
end
options[:request] ||= self.request if self.respond_to? :request
options[:filtered_params] = (respond_to? :filter_parameters) ? filter_parameters(available_params) : available_params
category = options[:category] || :controller
txn = Transaction.start(category, options)
txn.name = TransactionNamer.new(self).name(options)
txn.apdex_start = _detect_upstream_wait(txn.start_time)
_record_queue_length
return txn
end
# Filter out a request if it matches one of our parameters for
# ignoring it - the key is either 'do_not_trace' or 'ignore_apdex'
def _is_filtered?(key)
ignore_actions = self.class.newrelic_read_attr(key) if self.class.respond_to? :newrelic_read_attr
case ignore_actions
when nil; false
when Hash
only_actions = Array(ignore_actions[:only])
except_actions = Array(ignore_actions[:except])
only_actions.include?(action_name.to_sym) || (except_actions.any? && !except_actions.include?(action_name.to_sym))
else
true
end
end
# Take a guess at a measure representing the number of requests waiting in mongrel
# or heroku.
def _record_queue_length
if newrelic_request_headers
if queue_depth = newrelic_request_headers['HTTP_X_HEROKU_QUEUE_DEPTH']
queue_depth = queue_depth.to_i rescue nil
elsif mongrel = NewRelic::Control.instance.local_env.mongrel
# Always subtrace 1 for the active mongrel
queue_depth = [mongrel.workers.list.length.to_i - 1, 0].max rescue nil
end
NewRelic::Agent.record_metric('Mongrel/Queue Length', queue_depth) if queue_depth
end
end
# Return a Time instance representing the upstream start time.
# now is a Time instance to fall back on if no other candidate
# for the start time is found.
def _detect_upstream_wait(now)
if newrelic_request_headers
queue_start = QueueTime.parse_frontend_timestamp(newrelic_request_headers, now)
QueueTime.record_frontend_metrics(queue_start, now) if queue_start
end
queue_start || now
rescue => e
::NewRelic::Agent.logger.error("Error detecting upstream wait time:", e)
now
end
end
end
end
end
| 41.802734 | 178 | 0.599402 |
18c9e63c8d4d4943327564b72a931baf3e4aa9f4 | 422 | require 'pp'
# Note: test not needed, simply work up from the generated list of triangular numbers
def is_triangular?(i)
root = Math.sqrt(i*2)
return (!(root % 1).zero? and 0.5 * root.floor() * root.ceil() == i)
end
def is_pentagonal?(i)
return (((Math.sqrt(24 * i + 1) + 1) / 6) % 1).zero?
end
def is_hexagonal?(i)
return (((Math.sqrt(8 * i + 1) + 1) / 4) % 1).zero?
end
pp (1..200).select{|i| is_hexagonal?(i)}
| 22.210526 | 85 | 0.623223 |
0891ae7c6b261b110d3c0c7ad7af7cc507c8464f | 1,067 | Новая книга Джоан Роулинг "Гарри Поттер и проклятое дитя" стала самой продаваемой в Великобритании, сообщает The Daily Mail.
Роман обогнал по популярности пьесу "Ромео и Джульетта" Уильяма Шекспира.
По информации издания, за первую неделю после выхода восьмой части саги о волшебнике было продано 847,9 тысяч экземпляров.
Книга "Гарри Поттер и проклятое дитя" принесла 8,76 миллиона фунтов стерлингов.
При этом трагедия Шекспира оказалась на втором месте - с 1998 года было продано почти 127,7 тысяч книг издательства Penguin Classics.
Восьмая часть саги о волшебнике из Хогвартса "Гарри Поттер и проклятое дитя" вышла в свет 31 июля.
Действие пьесы разворачивается спустя 19 лет после событий, описанных в книге "Гарри Поттер и дары Смерти".
В тот же день в Лондоне на сцене театра Palace состоялась премьера спектакля по пьесе.
В России англоязычная версия романа поступила на прилавки "Московского дома книги" - единственного официального продавца романа - 5 августа.
Все экземпляры были распроданы за сутки.
На русском новая часть саги выйдет до конца 2016 года.
| 88.916667 | 140 | 0.819119 |
e9e5ca121b4bc95ed18d0a7a3fb756ed84476b54 | 956 |
lib = File.expand_path("../lib", __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require "unifonic_sms/version"
Gem::Specification.new do |spec|
spec.name = "unifonic_sms"
spec.version = UnifonicSms::VERSION
spec.required_ruby_version = '>= 2.3.0'
spec.authors = ["Assen Deghady"]
spec.email = ["[email protected]"]
spec.summary = %q{Send SMS messages using Unifonic Api.}
spec.homepage = "https://github.com/AssemDeghady/unifonic_sms"
spec.license = "MIT"
spec.files = Dir['lib/**/*.rb']
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.16"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_development_dependency "rspec", "~> 3.0"
spec.add_development_dependency "webmock", '~> 3.1', '>= 3.1.1'
end
| 32.965517 | 74 | 0.643305 |
18f12840b9f4abefd136bbd2c562ac036eaf12f7 | 2,248 | describe ManageIQ::Providers::AzureStack::CloudManager do
let(:url) { "https://#{Rails.application.secrets.azure_stack.try(:[], 'host') || 'AZURE_STACK_HOST'}" }
let(:tenant) { Rails.application.secrets.azure_stack.try(:[], 'tenant') || 'AZURE_STACK_TENANT' }
let(:userid) { Rails.application.secrets.azure_stack.try(:[], 'userid') || 'AZURE_STACK_USERID' }
let(:password) { Rails.application.secrets.azure_stack.try(:[], 'password') || 'AZURE_STACK_PASSWORD' }
let(:subscription) { Rails.application.secrets.azure_stack.try(:[], 'subscription') || 'AZURE_STACK_SUBSCRIPTION' }
supported_api_versions do |api_version|
describe '#raw_connect' do
let(:args) { [url, tenant, userid, password, subscription, :Resources, api_version] }
it 'when successful' do
vcr_with_auth("#{described_class.name.underscore}/#{api_version}/raw_connect-success") do
described_class.raw_connect(*args, :validate => true)
end
end
context 'when bad tenant id' do
let(:tenant) { 'bad-value' }
it 'raises MIQ error' do
VCR.use_cassette("#{described_class.name.underscore}/#{api_version}/raw_connect-bad_tenant") do
expect { described_class.raw_connect(*args, :validate => true) }.to raise_error(MiqException::MiqInvalidCredentialsError)
end
end
end
context 'when bad username and password' do
let(:userid) { 'bad-value' }
let(:password) { 'bad-value' }
it 'raises MIQ error' do
VCR.use_cassette("#{described_class.name.underscore}/#{api_version}/raw_connect-bad_username_password") do
expect { described_class.raw_connect(*args, :validate => true) }.to raise_error(MiqException::MiqInvalidCredentialsError)
end
end
end
context 'when bad subscription' do
let(:subscription) { 'bad-value' }
it 'raises MIQ error' do
vcr_with_auth("#{described_class.name.underscore}/#{api_version}/raw_connect-bad_subscription") do
expect { described_class.raw_connect(*args, :validate => true) }.to raise_error(MiqException::MiqInvalidCredentialsError)
end
end
end
end
end
end
| 46.833333 | 133 | 0.657028 |
1891449a586ae6662fc014c16787ca7f734516fa | 95 | FactoryGirl.define do
factory :user do
email "[email protected]"
password "123456"
end
end | 11.875 | 23 | 0.715789 |
18a34b1be87afc26fa2d60ebe31734a12d231f83 | 7,484 |
def print_zpl_str(name, label)
zpl = ''
label.dump_contents zpl
puts "\n#{name}:\n#{zpl}\n\n"
end
def new_label
Zebra::Zpl::Label.new(
width: 600,
length: 305,
print_speed: 6,
print_density: 5,
copies: 1
)
end
################################################################################
# Text
################################################################################
label = new_label
text = Zebra::Zpl::Text.new(
data: "Hello, printer!",
position: [10, 125],
font_size: Zebra::Zpl::FontSize::SIZE_5,
justification: Zebra::Zpl::Justification::CENTER
)
box = Zebra::Zpl::Graphic.new(
graphic_type: 'B',
position: [10,5],
graphic_width: label.width,
graphic_height: label.length-10,
line_thickness: 1,
rounding_degree: 1
)
label << text
label << box
print_zpl_str('text', label)
################################################################################
# Barcode
################################################################################
label = new_label
barcode = Zebra::Zpl::Barcode.new(
data: 'F112358',
position: [80, 50],
height: 150,
width: 4,
print_human_readable_code: true,
type: Zebra::Zpl::BarcodeType::CODE_128_AUTO
)
label << barcode
print_zpl_str('barcode', label)
################################################################################
# QR Code
################################################################################
label = new_label
qrcode = Zebra::Zpl::Qrcode.new(
data: 'www.github.com',
position: [200, 45],
scale_factor: 8,
correction_level: 'H',
)
label << qrcode
print_zpl_str('qrcode', label)
################################################################################
# Data Matrix
################################################################################
label = new_label
datamatrix = Zebra::Zpl::Datamatrix.new(
data: 'www.github.com',
position: [225, 75],
symbol_height: 10,
aspect_ratio: 1
)
label << datamatrix
print_zpl_str('datamatrix', label)
################################################################################
# Graphics
################################################################################
label = new_label
box = Zebra::Zpl::Graphic.new(
graphic_type: 'B',
position: [10,10],
graphic_width: 80,
graphic_height: 80,
line_thickness: 2,
rounding_degree: 2
)
circle = Zebra::Zpl::Graphic.new(
graphic_type: 'C',
position: [100,10],
graphic_width: 80,
line_thickness: 3
)
diagonal1 = Zebra::Zpl::Graphic.new(
graphic_type: 'D',
position: [190,10],
graphic_width: 80,
graphic_height: 80,
line_thickness: 3,
orientation: 'R'
)
diagonal2 = diagonal1.dup
diagonal2.orientation = 'L'
ellipse = Zebra::Zpl::Graphic.new(
graphic_type: 'E',
position: [280,10],
graphic_width: 40,
graphic_height: 80,
line_thickness: 3
)
symbol = Zebra::Zpl::Graphic.new(
graphic_type: 'S',
symbol_type: 'B',
position: [335,10],
graphic_width: 80,
graphic_height: 80
)
label << box
label << circle
label << diagonal1
label << diagonal2
label << ellipse
label << symbol
label.elements.each { |e| e.position = [e.x + 110 , e.y + 90] }
print_zpl_str('graphics', label)
################################################################################
# Images
################################################################################
label = new_label
image = Zebra::Zpl::Image.new(
path: File.expand_path('./images/earth.jpg', File.dirname(__FILE__)),
position: [145, 0],
width: 305,
height: 305
)
label << image
print_zpl_str('image', label)
# inverted image
label = new_label
image = Zebra::Zpl::Image.new(
path: File.expand_path('./images/earth.jpg', File.dirname(__FILE__)),
position: [145, 0],
width: 305,
height: 305,
invert: true
)
label << image
print_zpl_str('image_inverted', label)
################################################################################
# Image Manipulation
################################################################################
label = new_label
image = Zebra::Zpl::Image.new(
path: File.expand_path('./images/ruby.png', File.dirname(__FILE__)),
position: [0, 0],
width: 305,
height: 305,
black_threshold: 0.65
)
src = image.source
src.background('white').flatten
# perform edge detection on the image
MiniMagick::Tool::Convert.new do |convert|
convert << src.path
convert << '-colorspace' << 'gray'
convert << '-edge' << '4'
convert << '-negate'
convert << src.path
end
src.trim
label << image
print_zpl_str('image_manipulation', label)
################################################################################
# Justification
################################################################################
label = new_label
t1 = Zebra::Zpl::Text.new(
data: "ZPL",
position: [10, 25],
font_size: Zebra::Zpl::FontSize::SIZE_5,
justification: Zebra::Zpl::Justification::LEFT
)
t2 = Zebra::Zpl::Text.new(
data: "ZPL",
position: [10, 65],
font_size: Zebra::Zpl::FontSize::SIZE_5,
justification: Zebra::Zpl::Justification::CENTER
)
t3 = Zebra::Zpl::Text.new(
data: "ZPL",
position: [10, 105],
font_size: Zebra::Zpl::FontSize::SIZE_5,
justification: Zebra::Zpl::Justification::RIGHT
)
t4 = Zebra::Zpl::Text.new(
data: "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua",
position: [10, 180],
font_size: Zebra::Zpl::FontSize::SIZE_4,
justification: Zebra::Zpl::Justification::JUSTIFIED
)
box = Zebra::Zpl::Graphic.new(
graphic_type: 'B',
position: [10,5],
graphic_width: label.width,
graphic_height: label.length-10,
line_thickness: 1,
rounding_degree: 1
)
label << t1
label << t2
label << t3
label << t4
label << box
print_zpl_str('justification', label)
################################################################################
# Rotation
################################################################################
label = new_label
t1 = Zebra::Zpl::Text.new(
data: "Zero",
position: [10, 125],
font_size: Zebra::Zpl::FontSize::SIZE_5,
rotation: Zebra::Zpl::Rotation::NO_ROTATION,
max_lines: 1
)
t2 = Zebra::Zpl::Text.new(
data: "90",
position: [100, 125],
font_size: Zebra::Zpl::FontSize::SIZE_5,
rotation: Zebra::Zpl::Rotation::DEGREES_90,
max_lines: 1
)
t3 = Zebra::Zpl::Text.new(
data: "180",
position: [175, 125],
font_size: Zebra::Zpl::FontSize::SIZE_5,
rotation: Zebra::Zpl::Rotation::DEGREES_180,
justification: Zebra::Zpl::Justification::RIGHT,
max_lines: 1
)
t4 = Zebra::Zpl::Text.new(
data: "270",
position: [275, 125],
font_size: Zebra::Zpl::FontSize::SIZE_5,
rotation: Zebra::Zpl::Rotation::DEGREES_270,
justification: Zebra::Zpl::Justification::RIGHT,
max_lines: 1
)
box = Zebra::Zpl::Graphic.new(
graphic_type: 'B',
position: [10,5],
graphic_width: label.width,
graphic_height: label.length-10,
line_thickness: 1,
rounding_degree: 1
)
label << t1
label << t2
label << t3
label << t4
label.elements.each { |e| e.position = [e.x + 150, e.y] }
label << box
print_zpl_str('rotation', label)
| 26.920863 | 139 | 0.514965 |
08bf77940c527ae059849bb9167ad21994a04069 | 634 | # Quick script to get 'interesting' 11-circle patterns.
# for ticker-tape level
require 'HendecagonUtils.rb'
include HendecagonUtils
s1 = getUniqStates(false)
complex = []
for key in s1.keys
if (key.length > 5)
sum = 0
str = dec2bin(s1[key])
for i in 0..str.size-1
if str[i,1] == "1"
sum = sum + 1
end
end
if (sum == 6)
if (!isCircleSym(dec2bin(s1[key])))
complex.push(key)
end
end
end
end
puts "Complex Configurations: run-length encoding"
for c in complex
puts c
end
puts "Complex Configurations: binary integer encoding"
for c in complex
puts s1[c]
end
| 17.135135 | 55 | 0.637224 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.