hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
613b9cfafc823b18b59e1044cf2345860d77b58e | 927 | require 'json'
package = JSON.parse(File.read(File.join(__dir__, 'package.json')))
Pod::Spec.new do |s|
s.name = 'ABI41_0_0EXDocumentPicker'
s.version = package['version']
s.summary = package['description']
s.description = package['description']
s.license = package['license']
s.author = package['author']
s.homepage = package['homepage']
s.platform = :ios, '11.0'
s.source = { git: 'https://github.com/expo/expo.git' }
s.dependency 'ABI41_0_0UMCore'
s.dependency 'ABI41_0_0UMFileSystemInterface'
if !$ExpoUseSources&.include?(package['name']) && ENV['EXPO_USE_SOURCE'].to_i == 0 && File.exist?("#{s.name}.xcframework") && Gem::Version.new(Pod::VERSION) >= Gem::Version.new('1.10.0')
s.source_files = "#{s.name}/**/*.h"
s.vendored_frameworks = "#{s.name}.xcframework"
else
s.source_files = "#{s.name}/**/*.{h,m}"
end
end
| 35.653846 | 188 | 0.617044 |
e8e02946becf3f698a90c0d5e1005254e1f3b9d3 | 353 | cask "font-kelly-slab" do
version :latest
sha256 :no_check
# github.com/google/fonts/ was verified as official when first introduced to the cask
url "https://github.com/google/fonts/raw/master/ofl/kellyslab/KellySlab-Regular.ttf"
name "Kelly Slab"
homepage "https://fonts.google.com/specimen/Kelly+Slab"
font "KellySlab-Regular.ttf"
end
| 29.416667 | 87 | 0.753541 |
1c354230100fc0d5e5b2b87ac5574cd53ce98c7f | 2,154 | # Copyright 2007-2014 Greg Hurrell. All rights reserved.
# Licensed under the terms of the BSD 2-clause license.
require 'walrat'
module Walrat
# Simple wrapper for MatchData objects that implements length, to_s and
# to_str methods.
#
# By implementing to_str, MatchDataWrappers can be directly compared with
# Strings using the == method. The original MatchData instance can be
# obtained using the match_data accessor. Upon creation a clone of the passed
# in MatchData object is stored; this means that the $~ global variable can
# be conveniently wrapped without having to worry that subsequent operations
# will alter the contents of the variable.
class MatchDataWrapper
include Walrat::LocationTracking
attr_reader :match_data
# Raises if data is nil.
def initialize data
raise ArgumentError, 'nil data' if data.nil?
self.match_data = data
end
# The definition of this method, in conjunction with the == method, allows
# automatic comparisons with String objects using the == method.
# This is because in a parser matches essentially are Strings (just like
# Exceptions and Pathnames); it's just that this class encapsulates a
# little more information (the match data) for those who want it.
def to_str
self.to_s
end
# Although this method explicitly allows for MatchDataWrapper to
# MatchDataWrapper comparisons, note that all such comparisons will return
# false except for those between instances which were initialized with
# exactly the same match data instance; this is because the MatchData class
# itself always returns false when compared with other MatchData instances.
def ==(other)
if other.kind_of? MatchDataWrapper
self.match_data == other.match_data
elsif other.respond_to? :to_str
self.to_str == other.to_str
else
false
end
end
def to_s
@match_data[0]
end
def jlength
self.to_s.jlength
end
private
def match_data=(data)
@match_data = (data.clone rescue data)
end
end # class MatchDataWrapper
end # module Walrat
| 32.636364 | 79 | 0.71727 |
3369c23f7725aad93bb6238c8562f5ba6c6cdbf5 | 6,518 | require 'rubygems'
require 'pathname'
gem 'dm-core', '=0.9.2'
require 'dm-core'
dir = Pathname(__FILE__).dirname.expand_path / 'dm-validations'
require dir / 'validation_errors'
require dir / 'contextual_validators'
require dir / 'auto_validate'
require dir / 'generic_validator'
require dir / 'required_field_validator'
require dir / 'primitive_validator'
require dir / 'absent_field_validator'
require dir / 'confirmation_validator'
require dir / 'format_validator'
require dir / 'length_validator'
require dir / 'within_validator'
require dir / 'numeric_validator'
require dir / 'method_validator'
require dir / 'uniqueness_validator'
require dir / 'acceptance_validator'
require dir / 'custom_validator'
require dir / 'support' / 'object'
module DataMapper
module Validate
def self.included(model)
if model.method_defined?(:save) && !model.method_defined?(:save!)
model.send(:alias_method, :save!, :save)
model.send(:alias_method, :save, :save_with_validations)
end
end
# Validate the resource before saving. Use #save! to save
# the record without validations.
#
def save_with_validations(context = :default)
return false unless valid?(context)
save!
end
# Return the ValidationErrors
#
def errors
@errors ||= ValidationErrors.new
end
# Mark this resource as validatable. When we validate associations of a
# resource we can check if they respond to validatable? before trying to
# recursivly validate them
#
def validatable?()
true
end
# Alias for valid?(:default)
#
def valid_for_default?
valid?(:default)
end
# Check if a resource is valid in a given context
#
def valid?(context = :default)
self.class.validators.execute(context,self)
end
# Begin a recursive walk of the model checking validity
#
def all_valid?(context = :default)
recursive_valid?(self,context,true)
end
# Do recursive validity checking
#
def recursive_valid?(target, context, state)
valid = state
target.instance_variables.each do |ivar|
ivar_value = target.instance_variable_get(ivar)
if ivar_value.validatable?
valid = valid && recursive_valid?(ivar_value,context,valid)
elsif ivar_value.respond_to?(:each)
ivar_value.each do |item|
if item.validatable?
valid = valid && recursive_valid?(item,context,valid)
end
end
end
end
return valid && target.valid?
end
def validation_property_value(name)
return self.instance_variable_get("@#{name}") if self.instance_variables.include?(name)
return self.send(name) if self.respond_to?(name)
nil
end
# Get the corresponding Resource property, if it exists.
#
# Note: DataMapper validations can be used on non-DataMapper resources.
# In such cases, the return value will be nil.
def validation_property(field_name)
if DataMapper::Resource > self.class
self.class.properties(self.repository.name)[field_name]
end
end
def validation_association_keys(name)
if self.class.relationships.has_key?(name)
result = []
relation = self.class.relationships[name]
relation.child_key.each do |key|
result << key.name
end
return result
end
nil
end
module ClassMethods
include DataMapper::Validate::ValidatesPresent
include DataMapper::Validate::ValidatesAbsent
include DataMapper::Validate::ValidatesIsConfirmed
include DataMapper::Validate::ValidatesIsPrimitive
include DataMapper::Validate::ValidatesIsAccepted
include DataMapper::Validate::ValidatesFormat
include DataMapper::Validate::ValidatesLength
include DataMapper::Validate::ValidatesWithin
include DataMapper::Validate::ValidatesIsNumber
include DataMapper::Validate::ValidatesWithMethod
include DataMapper::Validate::ValidatesIsUnique
include DataMapper::Validate::AutoValidate
# Return the set of contextual validators or create a new one
#
def validators
@validations ||= ContextualValidators.new
end
# Clean up the argument list and return a opts hash, including the
# merging of any default opts. Set the context to default if none is
# provided. Also allow :context to be aliased to :on, :when & group
#
def opts_from_validator_args(args, defaults = nil)
opts = args.last.kind_of?(Hash) ? args.pop : {}
context = :default
context = opts[:context] if opts.has_key?(:context)
context = opts.delete(:on) if opts.has_key?(:on)
context = opts.delete(:when) if opts.has_key?(:when)
context = opts.delete(:group) if opts.has_key?(:group)
opts[:context] = context
opts.mergs!(defaults) unless defaults.nil?
opts
end
# Given a new context create an instance method of
# valid_for_<context>? which simply calls valid?(context)
# if it does not already exist
#
def create_context_instance_methods(context)
name = "valid_for_#{context.to_s}?"
if !self.instance_methods.include?(name)
class_eval <<-EOS
def #{name}
valid?('#{context.to_s}'.to_sym)
end
EOS
end
all = "all_valid_for_#{context.to_s}?"
if !self.instance_methods.include?(all)
class_eval <<-EOS
def #{all}
all_valid?('#{context.to_s}'.to_sym)
end
EOS
end
end
# Create a new validator of the given klazz and push it onto the
# requested context for each of the attributes in the fields list
#
def add_validator_to_context(opts, fields, klazz)
fields.each do |field|
if opts[:context].is_a?(Symbol)
validators.context(opts[:context]) << klazz.new(field, opts)
create_context_instance_methods(opts[:context])
elsif opts[:context].is_a?(Array)
opts[:context].each do |c|
validators.context(c) << klazz.new(field, opts)
create_context_instance_methods(c)
end
end
end
end
end # module ClassMethods
end # module Validate
Resource.append_inclusions Validate
Model.append_extensions Validate::ClassMethods
end # module DataMapper
| 31.186603 | 93 | 0.656797 |
f7dd61fea1bb34f040ccdaf533f263e4eb88613f | 1,974 | require "rails_helper"
module FloodRiskEngine
RSpec.describe GridReferenceValidator, type: :model do
class Foo
include ActiveModel::Validations
attr_accessor :grid_reference, :with_message, :with_allow_blank
def initialize(input)
@grid_reference = @with_message = @with_allow_blank = input
end
validates(
:grid_reference,
"flood_risk_engine/grid_reference" => true
)
validates(
:with_message,
"flood_risk_engine/grid_reference" => {
message: "Custom"
}
)
validates(
:with_allow_blank,
"flood_risk_engine/grid_reference" => {
allow_blank: true
}
)
end
let(:grid_reference) { "ST 12345 67890" }
let(:foo) { Foo.new grid_reference }
let(:all_tested) { %i[grid_reference with_allow_blank with_message] }
it "should be valid" do
expect(foo.valid?).to be true
end
context "invalid grid_reference" do
let(:grid_reference) { "invalid" }
before do
foo.valid?
end
it "should raise errors on all" do
expect(foo.errors.keys.sort).to eq(all_tested)
end
it "should have error from exception" do
expect(foo.errors[:grid_reference]).not_to eq(["Custom"])
expect(foo.errors[:grid_reference].first).to be_a(String)
end
it "with_message should have custom message" do
expect(foo.errors[:with_message]).to eq(["Custom"])
end
end
context "blank grid_reference" do
let(:grid_reference) { "" }
before do
foo.valid?
end
it "should raise errors on all except with_allow_blank" do
expect(foo.errors.keys.sort).to eq(all_tested - [:with_allow_blank])
end
end
context "lower case grid_reference" do
let(:grid_reference) { "st 12345 67890" }
it "should be valid" do
expect(foo.valid?).to be true
end
end
end
end
| 25.636364 | 76 | 0.620061 |
3919a757d916d17b0a6213b2d05fc39ca3c52e13 | 980 | #
# This file is auto-generated, do not edit
#
module RecombeeApiClient
require_relative 'request'
require_relative '../errors'
##
#Get all the current property values of a given item.
#
class GetItemValues < ApiRequest
attr_reader :item_id
attr_accessor :timeout
attr_accessor :ensure_https
##
# * *Required arguments*
# - +item_id+ -> ID of the item properties of which are to be obtained.
#
#
def initialize(item_id)
@item_id = item_id
@timeout = 1000
@ensure_https = false
end
# HTTP method
def method
:get
end
# Values of body parameters as a Hash
def body_parameters
p = Hash.new
p
end
# Values of query parameters as a Hash.
# name of parameter => value of the parameter
def query_parameters
params = {}
params
end
# Relative path to the endpoint
def path
"/{databaseId}/items/#{@item_id}"
end
end
end
| 18.846154 | 75 | 0.622449 |
9106939f0170f568c5b2296d2aa51cebc13fa7f3 | 917 | $LOAD_PATH.push File.expand_path('../lib', __FILE__)
require 'activerecord/jsonb/associations/version'
Gem::Specification.new do |spec|
spec.name = 'activerecord-jsonb-associations'
spec.version = ActiveRecord::JSONB::Associations::VERSION
spec.authors = ['Yury Lebedev']
spec.email = ['[email protected]']
spec.license = 'MIT'
spec.homepage =
'https://github.com/lebedev-yury/activerecord-jsonb-associations'
spec.summary =
'Gem for storing association information using PostgreSQL JSONB columns'
spec.description =
'Use PostgreSQL JSONB fields to store association information '\
'of your models'
spec.files = Dir[
'{app,config,db,lib}/**/*', 'MIT-LICENSE', 'Rakefile', 'README.md'
]
spec.required_ruby_version = '>= 2.0'
spec.add_dependency 'activerecord', '>= 5.1.0'
spec.add_development_dependency 'rspec', '>= 3.7.0'
end
| 31.62069 | 76 | 0.684842 |
e29a31aba1d5999bab06c9fb4dd9e9a9682be306 | 2,373 | require File.dirname(__FILE__) + '/spec_helper.rb'
require File.dirname(__FILE__) + '/../lib/curies.rb'
describe "The Curie library" do
it "should be able to parse a safe curie into a fully qualified URI" do
Curie.parse("[foaf:person]").should eql("http://xmlns.com/foaf/0.1/person")
end
it "should be able to parse a safe curie into its components, a fully qualified prefix and a reference" do
Curie.parse("[foaf:person]", :in_parts => true).should eql(["http://xmlns.com/foaf/0.1/", "person"])
end
it "should be able to parse a standard curie into a fully qualified URI" do
Curie.parse("foaf:person").should eql("http://xmlns.com/foaf/0.1/person")
end
it "should be able to parse a standard curie into its components, a fully qualified prefix and a reference" do
Curie.parse("foaf:person", :in_parts => true).should eql(["http://xmlns.com/foaf/0.1/", "person"])
end
it "should raise an error when parsing a curie whose prefix has not been registed" do
lambda { Curie.parse("foo:bar") }.should raise_error("Sorry, no namespace or prefix registered for curies with the prefix foo")
end
it "should allow users to add new prefixes when the prefix is a symbol" do
Curie.add_prefixes! :foo => "http://foobaz.com"
Curie.parse("foo:bar", :in_parts => true).should eql(["http://foobaz.com", "bar"])
end
it "should allow users to add new prefixes when the prefix is a string" do
Curie.add_prefixes! "foo" => "http://foobaz.com"
Curie.parse("foo:bar", :in_parts => true).should eql(["http://foobaz.com", "bar"])
end
it "should allow users to remove prefixes when the prefix is a string" do
Curie.add_prefixes! "foo" => "http://foobaz.com"
Curie.parse("foo:bar", :in_parts => true).should eql(["http://foobaz.com", "bar"])
Curie.remove_prefixes! "foo"
lambda { Curie.parse("foo:bar") }.should raise_error("Sorry, no namespace or prefix registered for curies with the prefix foo")
end
it "should allow users to remove prefixes when the prefix is a string" do
Curie.add_prefixes! "foo" => "http://foobaz.com"
Curie.parse("foo:bar", :in_parts => true).should eql(["http://foobaz.com", "bar"])
Curie.remove_prefixes! :foo
lambda { Curie.parse("foo:bar") }.should raise_error("Sorry, no namespace or prefix registered for curies with the prefix foo")
end
end | 49.4375 | 131 | 0.689844 |
4a639807511f980d48bf02f46bb9f450e3cafc8a | 6,504 | # Eneroth Townhouse System
# Copyright Julia Christina Eneroth, [email protected]
module EneBuildings
file = __FILE__
unless file_loaded? file
file_loaded file
# Context menu for select tool.
UI.add_context_menu_handler do |menu|
if b = Building.get_from_selection
menu.add_separator
menu.add_item("Building Position") { Sketchup.active_model.select_tool BuildingPositionTool.new(b) }
menu.add_item("Building Properties") { b.properties_panel }
end
if TemplateEditor.active_template_component
menu.add_separator
item = menu.add_item("Template Info") { TemplateEditor.info_toggle }
menu.set_validation_proc(item) { TemplateEditor.info_opened? ? MF_CHECKED : MF_UNCHECKED }
if TemplateEditor.inside_template_component
item = menu.add_item("Part Info") { TemplateEditor.part_info_toggle }
menu.set_validation_proc(item) { TemplateEditor.part_info_opened? ? MF_CHECKED : MF_UNCHECKED }
end
end
end
# Menu bar.
menu = UI.menu("Plugins").add_submenu NAME
item = menu.add_item("Add Building") { Sketchup.active_model.select_tool BuildingInsertTool.new }
menu.set_validation_proc(item) { TemplateEditor.inside_template_component ? MF_GRAYED : MF_ENABLED }
item = menu.add_item("Building Position") { Sketchup.active_model.select_tool BuildingPositionTool.new }
menu.set_validation_proc(item) { TemplateEditor.inside_template_component ? MF_GRAYED : MF_ENABLED }
item = menu.add_item("Building Properties") { Building.get_from_selection.properties_panel }
menu.set_validation_proc(item) { Building.selection_is_building? ? MF_ENABLED : MF_GRAYED }
menu.add_separator
t_menu = menu.add_submenu "Template Editing"
item = t_menu.add_item("New") { TemplateEditor.new }
t_menu.set_validation_proc(item) { TemplateEditor.new_available? ? MF_ENABLED : MF_GRAYED }
item = t_menu.add_item("Open...") { TemplateEditor.open }
t_menu.set_validation_proc(item) { TemplateEditor.inside_template_component ? MF_GRAYED : MF_ENABLED }
t_menu.add_item("Save") { TemplateEditor.save }
item = t_menu.add_item("Template Info") { TemplateEditor.info_toggle }
t_menu.set_validation_proc(item) { TemplateEditor.info_opened? ? MF_CHECKED : MF_UNCHECKED }
item = t_menu.add_item("Part Info") { TemplateEditor.part_info_toggle }
t_menu.set_validation_proc(item) { TemplateEditor.part_info_opened? ? MF_CHECKED : MF_UNCHECKED }
t_menu.add_separator
item = t_menu.add_item("Manually Downsample Previews") { Template.manually_resize_previews = !Template.manually_resize_previews }
t_menu.set_validation_proc(item) { Template.manually_resize_previews ? MF_CHECKED : MF_UNCHECKED }
item = t_menu.add_item("Update Previewss on Save") { TemplateEditor.update_previes = !TemplateEditor.update_previes }
t_menu.set_validation_proc(item) { TemplateEditor.update_previes ? MF_CHECKED : MF_UNCHECKED }
t_menu.add_separator
t_menu.add_item("Open Template Directory") { Template.open_dir }
t_menu.add_item("Documentation") { TemplateEditor.show_docs }
# Toolbar.
tb = UI::Toolbar.new NAME
cmd = UI::Command.new("Add Building") { Sketchup.active_model.select_tool BuildingInsertTool.new }
cmd.large_icon = "toolbar_icons/building_insert.png"
cmd.small_icon = "toolbar_icons/building_insert_small.png"
cmd.tooltip = "Add Building"
cmd.status_bar_text = "Draw new building to model."
cmd.set_validation_proc { TemplateEditor.inside_template_component ? MF_GRAYED : MF_ENABLED }
tb.add_item cmd
cmd = UI::Command.new("Building Position") { Sketchup.active_model.select_tool BuildingPositionTool.new }
cmd.large_icon = "toolbar_icons/building_position.png"
cmd.small_icon = "toolbar_icons/building_position_small.png"
cmd.tooltip = "Building Position"
cmd.status_bar_text = "Reposition building by changing their path and end angles."
cmd.set_validation_proc { TemplateEditor.inside_template_component ? MF_GRAYED : MF_ENABLED }
tb.add_item cmd
cmd = UI::Command.new("Building Properties") { Building.get_from_selection.properties_panel }
cmd.large_icon = "toolbar_icons/building_properties.png"
cmd.small_icon = "toolbar_icons/building_properties_small.png"
cmd.tooltip = "Building Properties"
cmd.status_bar_text = "Open properties window for selected building."
cmd.set_validation_proc { Building.selection_is_building? ? MF_ENABLED : MF_GRAYED }
tb.add_item cmd
tb.show
# Template Toolbar (separate so non-advanced users can hide it).
tb = UI::Toolbar.new "#{NAME} Template Editing"
cmd = UI::Command.new("New Template") { TemplateEditor.new }
cmd.large_icon = "toolbar_icons/template_new.png"
cmd.small_icon = "toolbar_icons/template_new_small.png"
cmd.tooltip = "New Template"
cmd.status_bar_text = "Create a new template component from the selected entities."
cmd.set_validation_proc { TemplateEditor.new_available? ? MF_ENABLED : MF_GRAYED }
tb.add_item cmd
cmd = UI::Command.new("Open Template...") { TemplateEditor.open }
cmd.large_icon = "toolbar_icons/template_open.png"
cmd.small_icon = "toolbar_icons/template_open_small.png"
cmd.tooltip = "Open Template..."
cmd.status_bar_text = "Load template component from library for editing."
cmd.set_validation_proc { TemplateEditor.inside_template_component ? MF_GRAYED : MF_ENABLED }
tb.add_item cmd
cmd = UI::Command.new("Save Template") { TemplateEditor.save }
cmd.large_icon = "toolbar_icons/template_save.png"
cmd.small_icon = "toolbar_icons/template_save_small.png"
cmd.tooltip = "Save Template"
cmd.status_bar_text = "Save template component to library."
tb.add_item cmd
tb.add_separator
cmd = UI::Command.new("Template Info") { TemplateEditor.info_toggle }
cmd.large_icon = "toolbar_icons/template_info.png"
cmd.small_icon = "toolbar_icons/template_info_small.png"
cmd.tooltip = "Template Info"
cmd.status_bar_text = "Set template information such as name, ID and built year."
cmd.set_validation_proc { TemplateEditor.info_opened? ? MF_CHECKED : MF_UNCHECKED }
tb.add_item cmd
cmd = UI::Command.new("Part Info") { TemplateEditor.part_info_toggle }
cmd.large_icon = "toolbar_icons/template_part_info.png"
cmd.small_icon = "toolbar_icons/template_part_info_small.png"
cmd.tooltip = "Part Information"
cmd.status_bar_text = "Set part information such as positioning within building."
cmd.set_validation_proc { TemplateEditor.part_info_opened? ? MF_CHECKED : MF_UNCHECKED }
tb.add_item cmd
tb.show
end
end
| 46.12766 | 131 | 0.771064 |
bb7ccc6b4c9ac1a0506148ba16785ec9718f5d4f | 485 | # encoding: UTF-8
# frozen_string_literal: true
require 'yaml'
namespace :seed do
task blockchains: :environment do
Blockchain.transaction do
blockchain = Blockchain.find_by_key("eth")
if blockchain.blank?
blockchain = Blockchain.create!("key":"eth", "status":"active", "height":5817659)
else
blockchain.key = "eth"
blockchain.status = "active"
blockchain.height = 5817659
blockchain.save!
end
end
end
end
| 24.25 | 89 | 0.645361 |
6253c24f1bbdf2949f5663aba38f24c59b8a8ad1 | 2,240 | require 'spec_helper'
describe SessionsController, type: :controller do
let!(:users) { create_list(:user, 2) }
let(:user) { users.first }
context "using json", :zoo_home_user do
before(:each) do
request.env["HTTP_ACCEPT"] = "application/json"
request.env["CONTENT_TYPE"] = "application/json"
request.env["devise.mapping"] = Devise.mappings[:user]
end
describe "#create" do
%w(login email display_name).each do |attr|
context "with login as #{ attr }" do
let(:params) do
{ password: user.password, login: user.send(attr) }
end
it 'should respond with the user object' do
post :create, user: params
expect(json_response).to include('users')
end
it 'should respond with a 200' do
post :create, user: params
expect(response.status).to eq(200)
end
it 'should sign in the user' do
expect(controller).to receive(:sign_in)
post :create, user: params
end
it "should ignore #{ attr } case" do
expect(controller).to receive(:sign_in)
params[:login] = user.send(attr).upcase
post :create, user: params
end
end
end
end
describe "#destroy" do
context "a signed in user" do
before(:each) do
request.env['devise.user'] = user
sign_in user
end
it 'should return no-content' do
delete :destroy
expect(response.status).to eq(204)
end
it 'should sign out the user' do
expect(controller).to receive(:sign_out)
delete :destroy
end
end
end
describe "#new" do
let(:providers) { %w(facebook) }
before(:each) do
get :new
end
it 'should return 200' do
expect(response.status).to eq(200)
end
it 'should return a json response of login routes' do
expect(json_response).to include('login', *providers)
end
it 'should return the url for each omniauth provider' do
expect(json_response['facebook']).to eq('/users/auth/facebook')
end
end
end
end
| 26.352941 | 71 | 0.570089 |
abd1847dd39dd67fb8fe9a68d428b6e51ac3f332 | 986 | # Copyright 2016, BlackBerry Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'chef/nexus'
require "#{File.dirname(__FILE__)}/../../../config.rb"
nexus 'chef-nexus rspec test' do
nexus_url NEXUS_URL
nexus_repo NEXUS_REPO
nexus_auth NEXUS_AUTH
use_auth USE_AUTH
update_if_exists true
remote_url "#{NEXUS_URL.chomp('/')}/repositories/#{NEXUS_REPO}/chef-nexus-rspec-test/no_parse"
local_file '/tmp/chef_nexus_rspec_temp/downloaded.test'
action :download
end
| 32.866667 | 96 | 0.762677 |
622b6b236ebf2a8460e47dec244f8338dd434f50 | 455 | module Slowpoke
module Migration
def connection
connection = super
if Slowpoke.migration_statement_timeout && !@migration_statement_timeout_set
connection.execute("SET statement_timeout = #{Slowpoke.migration_statement_timeout.to_i}")
@migration_statement_timeout_set = true
end
connection
end
end
end
ActiveSupport.on_load(:active_record) do
ActiveRecord::Migration.prepend(Slowpoke::Migration)
end
| 26.764706 | 98 | 0.749451 |
180d63ac4fc6e8b3ea85877652be0bb855d90f26 | 3,976 | # ----------------------------------------------------------------------------
# <copyright company="Aspose" file="copy_file_request.rb">
# Copyright (c) 2018-2020 Aspose Pty Ltd. All rights reserved.
# </copyright>
# <summary>
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
# </summary>
# ----------------------------------------------------------------------------
require_relative './imaging_request'
require_relative './http_request'
module AsposeImagingCloud
# Request model for copy_file operation.
class CopyFileRequest < ImagingRequest
# Copy file
# @param [String] src_path Source file path e.g. '/folder/file.ext'
# @param [String] dest_path Destination file path
# @param [String] src_storage_name Source storage name
# @param [String] dest_storage_name Destination storage name
# @param [String] version_id File version ID to copy
def initialize(src_path, dest_path, src_storage_name = nil, dest_storage_name = nil, version_id = nil)
@src_path = src_path
@dest_path = dest_path
@src_storage_name = src_storage_name
@dest_storage_name = dest_storage_name
@version_id = version_id
end
def to_http_info(config)
# verify the required parameter 'src_path' is set
if config.client_side_validation && @src_path.nil?
raise ArgumentError, "Missing the required parameter 'src_path' when calling ImagingApi.copy_file"
end
# verify the required parameter 'dest_path' is set
if config.client_side_validation && @dest_path.nil?
raise ArgumentError, "Missing the required parameter 'dest_path' when calling ImagingApi.copy_file"
end
# resource path
local_var_path = '/imaging/storage/file/copy/{srcPath}'.sub('{' + 'srcPath' + '}', @src_path.to_s)
# query parameters
query_params = {}
query_params[:destPath] = @dest_path
query_params[:srcStorageName] = @src_storage_name unless @src_storage_name.nil?
query_params[:destStorageName] = @dest_storage_name unless @dest_storage_name.nil?
query_params[:versionId] = @version_id unless @version_id.nil?
# form parameters
form_params = {}
# http body (model)
post_body = nil
auth_names = ['JWT']
# header parameters
header_params = {}
# HTTP header 'Accept' (if needed)
header_params['Accept'] = select_header_accept(['application/json'])
# HTTP header 'Content-Type'
header_params['Content-Type'] = form_params.any? ? 'multipart/form-data' : select_header_content_type(['application/json'])
AsposeImagingCloud::HttpRequest.new(local_var_path,
header_params,
query_params,
form_params,
post_body,
auth_names)
end
end
end
| 43.217391 | 129 | 0.654427 |
4ab2ec209f39cafebad43b269a422c3c2dc779a8 | 982 | require 'elastic_apm'
module ElasticAPM
module Grape
# ActiveSupport::Notifications.subscribe('endpoint_run.grape') do |name, started, finished, unique_id, payload|
# env = payload[:env]
# route_name = env['api.endpoint']&.routes&.first&.pattern&.origin || env['REQUEST_PATH']
# span_name = [env['REQUEST_METHOD'], route_name].join(' ')
# puts 'TEST HERE'
# if ElasticAPM.current_transaction
# ElasticAPM.current_transaction.name = span_name
# end
# end
# class ProcessGrapeNormalizer < ::ElasticAPM::Normalizers::Normalizer
# register 'endpoint_run.grape'
# TYPE = 'app.grape.request'.freeze
#
# def normalize(transaction, _name, payload)
# env = payload[:env]
# route_name = env['api.endpoint']&.routes&.first&.pattern&.origin || env['REQUEST_PATH']
# span_name = [env['REQUEST_METHOD'], route_name].join(' ')
# [span_name, TYPE, nil]
# end
# end
end
end | 36.37037 | 115 | 0.639511 |
03a50f16244c4a3e748df669f36e4110e49df3cf | 908 | Rails.application.routes.draw do
get 'password_resets/new'
get 'password_resets/edit'
get 'sessions/new'
root 'static_pages#home'
get '/help', to: 'static_pages#help'
get '/about', to: 'static_pages#about'
get '/contact', to: 'static_pages#contact'
get '/signup', to: 'users#new'
post '/signup', to: 'users#create'
get '/login', to: 'sessions#new'
post '/login', to: 'sessions#create'
delete '/logout', to: 'sessions#destroy'
resources :users do
member do
get :following, :followers
end
end
resources :account_activations, only: [:edit]
resources :password_resets, only: [:new, :create, :edit, :update]
resources :microposts, only: [:create, :destroy]
resources :relationships, only: [:create, :destroy]
# For details on the DSL available within this file, see http://guides.rubyonrails.org/routing.html
end
| 30.266667 | 101 | 0.656388 |
017d99cd977a67ccc7d21ce90d30026b4e42544d | 1,601 | class Fastlane < Formula
desc "Easiest way to build and release mobile apps"
homepage "https://fastlane.tools"
url "https://github.com/fastlane/fastlane/archive/2.158.0.tar.gz"
sha256 "7e9b4e9092f9f19589eb988f1f9b3bde8fc07c81420a80f9fdbf9ae45f5c456b"
license "MIT"
head "https://github.com/fastlane/fastlane.git"
livecheck do
url :head
regex(/^([\d.]+)$/i)
end
bottle do
cellar :any
sha256 "9ffb9654b43b0d9397fd87caa9a8741b91c6407df60a5957c84122294d1e585c" => :catalina
sha256 "1886b6dd9d4e4f6de436029c73f8a81510439e570a0e1d8235f7b6f7e4757551" => :mojave
sha256 "892c75d45539a4456fc8b1a5858e912f7848b389050094f4694b76eeddd1b97e" => :high_sierra
sha256 "f38c3af19c22afb5b103204be93b5581cedb943add5b2baba8039542699eae15" => :x86_64_linux
end
depends_on "ruby"
def install
ENV["GEM_HOME"] = libexec
ENV["GEM_PATH"] = libexec
system "gem", "build", "fastlane.gemspec"
system "gem", "install", "fastlane-#{version}.gem", "--no-document"
(bin/"fastlane").write <<~EOS
#!/bin/bash
export PATH="#{Formula["ruby"].opt_bin}:#{libexec}/bin:$PATH"
GEM_HOME="#{libexec}" GEM_PATH="#{libexec}" \\
exec "#{libexec}/bin/fastlane" "$@"
EOS
chmod "+x", bin/"fastlane"
end
test do
assert_match "fastlane #{version}", shell_output("#{bin}/fastlane --version")
actions_output = shell_output("#{bin}/fastlane actions")
assert_match "gym", actions_output
assert_match "pilot", actions_output
assert_match "screengrab", actions_output
assert_match "supply", actions_output
end
end
| 32.02 | 94 | 0.709557 |
f745c56109a6f606b48fb8a65b2fbd8723834250 | 365 | require "bundler/setup"
require "dsl_parse"
RSpec.configure do |config|
# Enable flags like --only-failures and --next-failure
config.example_status_persistence_file_path = ".rspec_status"
# Disable RSpec exposing methods globally on `Module` and `main`
config.disable_monkey_patching!
config.expect_with :rspec do |c|
c.syntax = :expect
end
end
| 24.333333 | 66 | 0.753425 |
f7d092c6a7845403753315577060f503f5a8abd5 | 1,991 | class DeviseTokenAuthCreateUsers < ActiveRecord::Migration[5.0]
def change
change_table :users do |t|
t.text :tokens
end
# add_index :users, :email
add_index :users, [:uid, :provider], :unique => true
# add_index :users , :reset_password_token, :unique => true
end
# def change
# create_table(:users) do |t|
# ## Required
# t.string :provider, :null => false, :default => "email"
# t.string :uid, :null => false, :default => ""
#
# ## Database authenticatable
# t.string :encrypted_password, :null => false, :default => ""
#
# ## Recoverable
# t.string :reset_password_token
# t.datetime :reset_password_sent_at
#
# ## Rememberable
# t.datetime :remember_created_at
#
# ## Trackable
# t.integer :sign_in_count, :default => 0, :null => false
# t.datetime :current_sign_in_at
# t.datetime :last_sign_in_at
# t.string :current_sign_in_ip
# t.string :last_sign_in_ip
#
# ## Confirmable
# t.string :confirmation_token
# t.datetime :confirmed_at
# t.datetime :confirmation_sent_at
# t.string :unconfirmed_email # Only if using reconfirmable
#
# ## Lockable
# # t.integer :failed_attempts, :default => 0, :null => false # Only if lock strategy is :failed_attempts
# # t.string :unlock_token # Only if unlock strategy is :email or :both
# # t.datetime :locked_at
#
# ## User Info
# t.string :name
# t.string :nickname
# t.string :image
# t.string :email
#
# ## Tokens
# t.json :tokens
#
# t.timestamps
# end
#
# add_index :users, :email, unique: true
# add_index :users, [:uid, :provider], unique: true
# add_index :users, :reset_password_token, unique: true
# add_index :users, :confirmation_token, unique: true
# # add_index :users, :unlock_token, unique: true
# end
end
| 31.109375 | 112 | 0.594174 |
871ddcac09b12358763dd4f1eaed8eee221573a4 | 996 | Sketchup::require "geores_src/geores_import/geores_rexml/child.rb"
module REXML
module DTD
class NotationDecl < Child
START = "<!NOTATION"
START_RE = /^\s*#{START}/um
PUBLIC = /^\s*#{START}\s+(\w[\w-]*)\s+(PUBLIC)\s+((["']).*?\4)\s*>/um
SYSTEM = /^\s*#{START}\s+(\w[\w-]*)\s+(SYSTEM)\s+((["']).*?\4)\s*>/um
def initialize src
super()
if src.match( PUBLIC )
md = src.match( PUBLIC, true )
elsif src.match( SYSTEM )
md = src.match( SYSTEM, true )
else
raise ParseException.new( "error parsing notation: no matching pattern", src )
end
@name = md[1]
@middle = md[2]
@rest = md[3]
end
def to_s
"<!NOTATION #@name #@middle #@rest>"
end
def write( output, indent )
indent( output, indent )
output << to_s
end
def NotationDecl.parse_source source, listener
md = source.match( PATTERN_RE, true )
thing = md[0].squeeze(" \t\n\r")
listener.send inspect.downcase, thing
end
end
end
end
| 24.9 | 83 | 0.588353 |
61d344b3a7cd8c16df7d12cf66235cef72c70a47 | 987 | require File.expand_path(File.dirname(__FILE__) + "/test_helper")
class ListDocLogsBangTest < MiniTest::Test
describe "doc_logs!" do
before do
DocRaptor.api_key "something something"
end
describe "with bogus arguments" do
it "should raise an error if something other than an options hash is passed in" do
assert_raises(ArgumentError) { DocRaptor.list_doc_logs!(true) }
assert_raises(ArgumentError) { DocRaptor.list_doc_logs!(nil) }
end
end
describe "with good arguments" do
it "should give me a valid response" do
stub_http_response_with("simple_doc_logs.xml", :get)
assert_equal file_fixture("simple_doc_logs.xml"), DocRaptor.list_doc_logs!.body
end
it "raise an exception when the list get fails" do
stub_http_response_with("invalid_list_docs.xml", :get, 422)
assert_raises(DocRaptorException::DocumentListingFailure) { DocRaptor.list_doc_logs! }
end
end
end
end
| 34.034483 | 94 | 0.70922 |
2898ef041e5f9805ba8519be058ac5031d5096df | 13,110 | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require File.join(File.dirname(File.dirname(__FILE__)), 'accessible_form')
require File.join(File.dirname(File.dirname(__FILE__)), 'helper')
require File.join(File.dirname(__FILE__), 'accessible_event_implementation')
require File.join(
File.dirname(File.dirname(__FILE__)),
'util',
'common_functions'
)
require File.join(File.dirname(File.dirname(__FILE__)), 'util', 'id_generator')
require File.join(
File.dirname(File.dirname(__FILE__)),
'util',
'html',
'html_dom_parser'
)
##
# The Hatemile module contains the interfaces with the acessibility solutions.
module Hatemile
##
# The Hatemile::Implementation module contains the official implementation of
# interfaces solutions.
module Implementation
##
# The AccessibleFormImplementation class is official implementation of
# AccessibleForm interface.
class AccessibleFormImplementation < AccessibleForm
public_class_method :new
##
# The ID of script element that contains the list of IDs of fields with
# validation.
ID_SCRIPT_LIST_VALIDATION_FIELDS =
'hatemile-scriptlist-validation-fields'.freeze
##
# The ID of script element that execute validations on fields.
ID_SCRIPT_EXECUTE_VALIDATION = 'hatemile-validation-script'.freeze
##
# The client-site required fields list.
REQUIRED_FIELDS_LIST = 'required_fields'.freeze
##
# The client-site pattern fields list.
PATTERN_FIELDS_LIST = 'pattern_fields'.freeze
##
# The client-site fields with length list.
LIMITED_FIELDS_LIST = 'fields_with_length'.freeze
##
# The client-site range fields list.
RANGE_FIELDS_LIST = 'range_fields'.freeze
##
# The client-site week fields list.
WEEK_FIELDS_LIST = 'week_fields'.freeze
##
# The client-site month fields list.
MONTH_FIELDS_LIST = 'month_fields'.freeze
##
# The client-site datetime fields list.
DATETIME_FIELDS_LIST = 'datetime_fields'.freeze
##
# The client-site time fields list.
TIME_FIELDS_LIST = 'time_fields'.freeze
##
# The client-site date fields list.
DATE_FIELDS_LIST = 'date_fields'.freeze
##
# The client-site email fields list.
EMAIL_FIELDS_LIST = 'email_fields'.freeze
##
# The client-site URL fields list.
URL_FIELDS_LIST = 'url_fields'.freeze
protected
##
# Returns the appropriate value for attribute aria-autocomplete of field.
#
# @param field [Hatemile::Util::Html::HTMLDOMElement] The field.
# @return [String] The ARIA value of field.
def get_aria_autocomplete(field)
tag_name = field.get_tag_name
type = nil
if field.has_attribute?('type')
type = field.get_attribute('type').downcase
end
if (tag_name == 'TEXTAREA') ||
(
(tag_name == 'INPUT') &&
!(
(type == 'button') ||
(type == 'submit') ||
(type == 'reset') ||
(type == 'image') ||
(type == 'file') ||
(type == 'checkbox') ||
(type == 'radio') ||
(type == 'hidden')
)
)
value = nil
if field.has_attribute?('list')
list_id = field.get_attribute('list')
unless @parser.find("datalist[id=\"#{list_id}\"]").first_result.nil?
return 'list'
end
end
if field.has_attribute?('autocomplete')
value = field.get_attribute('autocomplete').downcase
else
form = @parser.find(field).find_ancestors('form').first_result
if form.nil? && field.has_attribute?('form')
form = @parser.find(
"##{field.get_attribute('form')}"
).first_result
end
if !form.nil? && form.has_attribute?('autocomplete')
value = form.get_attribute('autocomplete').downcase
end
end
return 'both' if value == 'on'
return 'none' if value == 'off'
end
nil
end
##
# Include the scripts used by solutions.
def generate_validation_scripts
local = @parser.find('head,body').first_result
unless local.nil?
if @parser.find(
"##{AccessibleEventImplementation::ID_SCRIPT_COMMON_FUNCTIONS}"
).first_result.nil?
common_functions_script = @parser.create_element('script')
common_functions_script.set_attribute(
'id',
AccessibleEventImplementation::ID_SCRIPT_COMMON_FUNCTIONS
)
common_functions_script.set_attribute('type', 'text/javascript')
common_functions_script.append_text(
File.read(
File.join(
File.dirname(File.dirname(File.dirname(__FILE__))),
'js',
'common.js'
)
)
)
local.prepend_element(common_functions_script)
end
end
@script_list_fields_with_validation = @parser.find(
"##{ID_SCRIPT_LIST_VALIDATION_FIELDS}"
).first_result
if @script_list_fields_with_validation.nil?
@script_list_fields_with_validation = @parser.create_element('script')
@script_list_fields_with_validation.set_attribute(
'id',
ID_SCRIPT_LIST_VALIDATION_FIELDS
)
@script_list_fields_with_validation.set_attribute(
'type',
'text/javascript'
)
@script_list_fields_with_validation.append_text(
File.read(
File.join(
File.dirname(File.dirname(File.dirname(__FILE__))),
'js',
'scriptlist_validation_fields.js'
)
)
)
local.append_element(@script_list_fields_with_validation)
end
if @parser.find("##{ID_SCRIPT_EXECUTE_VALIDATION}").first_result.nil?
script_function = @parser.create_element('script')
script_function.set_attribute('id', ID_SCRIPT_EXECUTE_VALIDATION)
script_function.set_attribute('type', 'text/javascript')
script_function.append_text(
File.read(
File.join(
File.dirname(File.dirname(File.dirname(__FILE__))),
'js',
'validation.js'
)
)
)
@parser.find('body').first_result.append_element(script_function)
end
@scripts_added = true
end
##
# Validate the field when its value change.
#
# @param field [Hatemile::Util::Html::HTMLDOMElement] The field.
# @param list_attribute [String] The list attribute of field with
# validation.
def validate(field, list_attribute)
generate_validation_scripts unless @scripts_added
@id_generator.generate_id(field)
@script_list_fields_with_validation.append_text(
"hatemileValidationList.#{list_attribute}.push(" \
"'#{field.get_attribute('id')}');"
)
end
public
##
# Initializes a new object that manipulate the accessibility of the forms
# of parser.
#
# @param parser [Hatemile::Util::Html::HTMLDOMParser] The HTML parser.
def initialize(parser)
Hatemile::Helper.require_not_nil(parser)
Hatemile::Helper.require_valid_type(
parser,
Hatemile::Util::Html::HTMLDOMParser
)
@parser = parser
@id_generator = Hatemile::Util::IDGenerator.new('form')
@scripts_added = false
@script_list_fields_with_validation = nil
end
##
# @see Hatemile::AccessibleForm#mark_required_field
def mark_required_field(required_field)
return unless required_field.has_attribute?('required')
required_field.set_attribute('aria-required', 'true')
end
##
# @see Hatemile::AccessibleForm#mark_all_required_fields
def mark_all_required_fields
required_fields = @parser.find('[required]').list_results
required_fields.each do |required_field|
if Hatemile::Util::CommonFunctions.is_valid_element?(required_field)
mark_required_field(required_field)
end
end
end
##
# @see Hatemile::AccessibleForm#mark_range_field
def mark_range_field(range_field)
if range_field.has_attribute?('min')
range_field.set_attribute(
'aria-valuemin',
range_field.get_attribute('min')
)
end
return unless range_field.has_attribute?('max')
range_field.set_attribute(
'aria-valuemax',
range_field.get_attribute('max')
)
end
##
# @see Hatemile::AccessibleForm#mark_all_range_fields
def mark_all_range_fields
range_fields = @parser.find('[min],[max]').list_results
range_fields.each do |range_field|
if Hatemile::Util::CommonFunctions.is_valid_element?(range_field)
mark_range_field(range_field)
end
end
end
##
# @see Hatemile::AccessibleForm#mark_autocomplete_field
def mark_autocomplete_field(autocomplete_field)
aria_autocomplete = get_aria_autocomplete(autocomplete_field)
return if aria_autocomplete.nil?
autocomplete_field.set_attribute('aria-autocomplete', aria_autocomplete)
end
##
# @see Hatemile::AccessibleForm#mark_all_autocomplete_fields
def mark_all_autocomplete_fields
elements = @parser.find(
'input[autocomplete],textarea[autocomplete],form[autocomplete] ' \
'input,form[autocomplete] textarea,[list],[form]'
).list_results
elements.each do |element|
if Hatemile::Util::CommonFunctions.is_valid_element?(element)
mark_autocomplete_field(element)
end
end
end
##
# @see Hatemile::AccessibleForm#mark_invalid_field
def mark_invalid_field(field)
if field.has_attribute?('required') ||
(
field.has_attribute?('aria-required') &&
field.get_attribute('aria-required').casecmp('true').zero?
)
validate(field, REQUIRED_FIELDS_LIST)
end
validate(field, PATTERN_FIELDS_LIST) if field.has_attribute?('pattern')
if field.has_attribute?('minlength') ||
field.has_attribute?('maxlength')
validate(field, LIMITED_FIELDS_LIST)
end
if field.has_attribute?('aria-valuemin') ||
field.has_attribute?('aria-valuemax')
validate(field, RANGE_FIELDS_LIST)
end
return unless field.has_attribute?('type')
type_field = field.get_attribute('type').downcase
if type_field == 'week'
validate(field, WEEK_FIELDS_LIST)
elsif type_field == 'month'
validate(field, MONTH_FIELDS_LIST)
elsif %w[datetime-local datetime].include?(type_field)
validate(field, DATETIME_FIELDS_LIST)
elsif type_field == 'time'
validate(field, TIME_FIELDS_LIST)
elsif type_field == 'date'
validate(field, DATE_FIELDS_LIST)
elsif %w[number range].include?(type_field)
validate(field, RANGE_FIELDS_LIST)
elsif type_field == 'email'
validate(field, EMAIL_FIELDS_LIST)
elsif type_field == 'url'
validate(field, AccessibleFormImplementation.URL_FIELDS_LIST)
end
end
##
# @see Hatemile::AccessibleForm#mark_all_invalid_fields
def mark_all_invalid_fields
fields = @parser.find(
'[required],input[pattern],input[minlength],input[maxlength],' \
'textarea[minlength],textarea[maxlength],input[type=week],' \
'input[type=month],input[type=datetime-local],' \
'input[type=datetime],input[type=time],input[type=date],' \
'input[type=number],input[type=range],input[type=email],' \
'input[type=url],[aria-required=true],input[aria-valuemin],' \
'input[aria-valuemax]'
).list_results
fields.each do |field|
if Hatemile::Util::CommonFunctions.is_valid_element?(field)
mark_invalid_field(field)
end
end
end
end
end
end
| 33.875969 | 80 | 0.614569 |
0818dd9cf1e604e9f91db4355ec7b200554ebc61 | 31,401 | require 'traject'
require 'traject/util'
require 'traject/indexer/settings'
require 'traject/horizon_bib_auth_merge'
require 'marc'
require 'marc/marc4j' # for marc4j jars
module Traject
#
# = Settings
#
# == Connection
#
# [horizon.jdbc_url] JDBC connection URL using jtds. Should include username, but not password.
# See `horizon.jdbc_password` setting, kept seperate so we can try to suppress
# it from logging. Eg: "jdbc:jtds:sybase://horizon.lib.univ.edu:2025/dbname;user=dbuser"
# * In command line, you'll have to use quotes: -s 'horizon.jdbc_url=jdbc:jtds:sybase://horizon.lib.univ.edu:2025/dbname;user=dbuser'
#
#
# [horizon.password] Password to use for JDBC connection. We'll try to suppress it from being logged.
#
# Instead of jdbc_url, you can provide all the other elements individually:
#
# [horizon.host]
# [horizon.port] (default 2025)
# [horizon.user]
# [horizon.database] (default 'horizon')
# [horizon.jtds_type] default 'sybase', or 'sqlserver'
#
# [horizon.timeout] value in SECONDS given to jtds driver for socketTimeout and loginTimeout.
# if this is not set, if Horizon db or server goes down in mid-export,
# your program may hang forever. However, the trade-off is when this is set,
# a slow query may trigger timeout. As a compromise, we've set DEFAULT
# to 1200 seconds (20 minutes).
#
#
# == What to export
#
# Normally exports the entire horizon database, for diagnostic or batch purposes you
# can export just one bib, or a range of bibs instead.
#
# [horizon.first_bib] Greater than equal to this bib number. Can be combined with horizon.last_bib
# [horizon.last_bib] Less than or equal to this bib number. Can be combined with horizon.first_bib
# [horizon.only_bib] Only this single bib number.
#
# You can also control whether to export staff-only bibs, copies, and items.
#
# [horizon.public_only] Default true. If set to true, only includes bibs that are NOT staff_only,
# also only include copy/item that are not staff-only if including copy/item.
#
# You can also exclude certain tags:
#
# [horizon.exclude_tags] Default nil. A comma-seperated string (so easy to supply on command line)
# of tag names to exclude from export. You probably want to at least include the tags
# you are using for horizon.item_tag and horizon.copy_tag, to avoid collision
# from tags already in record.
#
# == Item/Copy Inclusion
#
# The HorizonReader can export MARC with holdings information (horizon items and copies) included
# in the MARC. Each item or copy record will be represented as one marc field -- the tags
# used are configurable. You can configure how individual columns from item or copy tables
# map to MARC subfields in that field -- and also include columns from other tables joined
# to item or copy.
#
# [horizon.include_holdings] * false, nil, or empty string: Do not include holdings. (DEFAULT)
# * all: include copies and items
# * items: only include items
# * copies: only include copies
# * direct: only include copies OR items, but not both; if bib has
# include copies, otherwise include items if present.
#
# Each item or copy will be one marc field, you can configure what tags these fields
# will have.
#
# [horizon.item_tag] Default "991".
# [horizon.copy_tag] Default "937"
#
# Which columns from item or copy tables will be mapped to which subfields in those
# fields is controlled by hashes in settings, hash from column name (with table prefix)
# to subfield code. There are defaults, see HorizonReader.default_settings. Example for
# item_map default:
#
# "horizon.item_map" => {
# "item.call_reconstructed" => "a",
# "collection.call_type" => "b",
# "item.copy_reconstructed" => "c",
# "call_type.processor" => "f",
# "item.item#" => "i",
# "item.collection" => "l",
# "item.location" => "m",
# "item.notes" => "n",
# "item.staff_only" => "q"
# }
#
# [horizon.item_map]
# [horizon.copy_map]
#
# The column-to-subfield maps can include columns from other tables
# joined in, with a join clause configured in settings too.
# By default both item and copy join to: collection, and call_type --
# using some clever SQL to join to call_type on the item/copy fk, OR the
# associated collection fk if no specific item/copy one is defined.
#
# [horizon.item_join_clause]
# [horizon.copy_join_clause]
#
# == Character Encoding
#
# The HorizonReader can convert from Marc8 to UTF8. By default `horizon.source_encoding` is set to "MARC8"
# and `horizon.destination_encoding` is set to "UTF8", which will make it do that conversion, as well
# as set the leader byte for char encoding properly.
#
# Any other configuration of those settings, and no transcoding will take place, HorizonReader
# is not currently capable of doing any other transcoding. Set
# or `horizon.destination_encoding` to nil if you don't want any transcoding to happen --
# you'd only want this for diagnostic purposes, or if your horizon db is already utf8 (is
# that possible? We don't know.)
#
# [horizon.codepoint_translate] translates from Horizon's weird <U+nnnn> unicode
# codepoint escaping to actual UTF-8 bytes. Defaults to true. Will be ignored
# unless horizon.destination_encoding is UTF8 though.
#
# [horizon.character_reference_translate] Default true. Convert HTML/XML-style
# character references like "‏" to actual UTF-8 bytes, when converting
# to UTF8. These character references are oddly legal representations of UTF8 in
# MARC8. http://www.loc.gov/marc/specifications/speccharconversion.html#lossless
#
# Note HorizonReader will also remove control chars from output (except for ones
# with legal meaning in binary MARC) -- these are errors in Horizon db which mean
# nothing, are illegal in MARC binary serialization, and can mess things up.
#
# == Misc
#
# [horizon.batch_size] Batch size to use for fetching item/copy info on each bib. Default 400.
# [debug_ascii_progress] if true, will output a "<" and a ">" to stderr around every copy/item
# subsidiary fetch. See description of this setting in docs/settings.md
#
# [jtds.jar_path] Normally we'll use a distribution of jtds bundled with this gem.
# But specify a filepath of a directory containing jtds jar(s),
# and all jars in that dir will be loaded instead of our bundled jtds.
class HorizonReader
attr_reader :settings
attr_reader :things_to_close
# We ignore the iostream even though we get one, we're gonna
# read from a Horizon DB!
def initialize(iostream, settings)
# we ignore the iostream, we're fetching from Horizon db
@settings = Traject::Indexer::Settings.new( self.class.default_settings).merge(settings)
require_jars!
logger.info(" #{self.class.name} reading records from #{jdbc_url(false)}")
end
# Requires marc4j and jtds, and java_import's some classes.
def require_jars!
require 'jruby'
# ask marc-marc4j gem to load the marc4j jars
MARC::MARC4J.new(:jardir => settings['marc4j_reader.jar_dir'])
# For some reason we seem to need to java_import it, and use
# a string like this. can't just refer to it by full
# qualified name, not sure why, but this seems to work.
java_import "org.marc4j.converter.impl.AnselToUnicode"
unless defined? Java::net.sourceforge.jtds.jdbc.Driver
jtds_jar_dir = settings["jtds.jar_path"] || File.expand_path("../../vendor/jtds", File.dirname(__FILE__))
Dir.glob("#{jtds_jar_dir}/*.jar") do |x|
require x
end
# For confusing reasons, in normal Java need to
# Class.forName("net.sourceforge.jtds.jdbc.Driver")
# to get the jtds driver to actually be recognized by JDBC.
#
# In Jruby, Class.forName doesn't work, but this seems
# to do the same thing:
Java::net.sourceforge.jtds.jdbc.Driver
end
# So we can refer to these classes as just ResultSet, etc.
java_import java.sql.ResultSet, java.sql.PreparedStatement, java.sql.Driver
end
def fetch_result_set!(conn)
#fullbib is a view in Horizon, I think it was an SD default view, that pulls
#in stuff from multiple tables, including authority tables, to get actual
# text.
# You might think need an ORDER BY, but doing so makes it incredibly slow
# to retrieve results, can't do it. We just count on the view returning
# the rows properly. (ORDER BY bib#, tagord)
#
# We start with the fullbib view defined out of the box in Horizon, but
# need to join in bib_control to have access to the staff_only column.
#
sql = <<-EOS
SELECT b.bib#, b.tagord, b.tag,
indicators = substring(b.indicators+' ',1,2)+a.indicators,
b.text, b.cat_link_type#, b.cat_link_xref#, b.link_type,
bl.longtext, xref_text = a.text, xref_longtext = al.longtext,
b.timestamp, auth_timestamp = a.timestamp,
bc.staff_only
FROM bib b
left join bib_control bc on b.bib# = bc.bib#
left join bib_longtext bl on b.bib# = bl.bib# and b.tag = bl.tag and b.tagord = bl.tagord
left join auth a on b.cat_link_xref# = a.auth# and a.tag like '1[0-9][0-9]'
left join auth_longtext al on b.cat_link_xref# = al.auth# and al.tag like '1[0-9][0-9]'
WHERE 1 = 1
EOS
sql = <<-EOS
SELECT b.*
FROM fullbib b
WHERE 1 = 1
EOS
# Oddly, Sybase seems to do a lot BETTER when we make this a sub-query
# as opposed to a join. Join was resulting in "Can't allocate space for object 'temp worktable' in database 'tempdb'"
# from Sybase, but somehow we get away with subquery?
#
# Note this subquery we managed to not refer to outer scope, that's the key.
if settings["horizon.public_only"].to_s == "true"
sql+= " AND b.bib# NOT IN (SELECT DISTINCT bc.bib# from bib_control bc WHERE bc.staff_only = 1) "
end
# settings should not be coming from untrusted user input not going
# to bother worrying about sql injection.
if settings.has_key? "horizon.only_bib"
sql += " AND b.bib# = #{settings['horizon.only_bib']} "
elsif settings.has_key?("horizon.first_bib") || settings.has_key?("horizon.last_bib")
clauses = []
clauses << " b.bib# >= #{settings['horizon.first_bib']}" if settings['horizon.first_bib']
clauses << " b.bib# <= #{settings['horizon.last_bib']}" if settings['horizon.last_bib']
sql += " AND " + clauses.join(" AND ") + " "
end
# without the order by, rows come back in mostly the right order,
# by bib#, but fairly common do NOT as well.
# -- when they don't, it can cause one real
# record to be split up into multiple partial output record, which
# cna overwrite each other in the solr index.
#
# So we sort -- which makes query slower, and makes it a lot harder
# to avoid Sybase "cannot allocate space" errors, but we've got
# no real choice. Ideally we might include 'tagord'
# in the sort too, but that seems to make performance even worse,
# we're willing to risk tags not being reassembled in exactly the
# right order, usually they are anyway, and it doesn't usually matter anyway.
sql+= " ORDER BY b.bib# " # ", tagord" would be even better, but slower.
pstmt = conn.prepareStatement(sql, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
# this may be what's neccesary to keep the driver from fetching
# entire result set into memory.
pstmt.setFetchSize(10000)
logger.debug("HorizonReader: Executing query: #{sql}")
rs = pstmt.executeQuery
logger.debug("HorizonReader: Executed!")
return rs
end
# Converts from Marc8 to UTF8 if neccesary.
#
# Also replaces escaped unicode codepoints using custom Horizon "<U+nnnn>" format
# Or standard MARC 'lossless encoding' "&#xHHHH;" format.
def convert_text!(text, error_handler)
text = AnselToUnicode.new(error_handler, true).convert(text) if convert_marc8_to_utf8?
# Turn Horizon's weird escaping into UTF8: <U+nnnn> where nnnn is a hex unicode
# codepoint, turn it UTF8 for that codepoint
if settings["horizon.destination_encoding"] == "UTF8" &&
(settings["horizon.codepoint_translate"].to_s == "true" ||
settings["horizon.character_reference_translate"].to_s == "true")
regexp = if settings["horizon.codepoint_translate"].to_s == "true" && settings["horizon.character_reference_translate"].to_s == "true"
# unicode codepoint in either HTML char reference form OR
# weird horizon form.
/(?:\<U\+|&#x)([0-9A-Fa-f]{4})(?:\>|;)/
elsif settings["horizon.codepoint_translate"].to_s == "true"
# just weird horizon form
/\<U\+([0-9A-Fa-f]{4})\>/
else # just character references
/&#x([0-9A-Fa-f]{4});/
end
text.gsub!(regexp) do
[$1.hex].pack("U")
end
end
# eliminate illegal control chars. All ASCII less than 0x20
# _except_ for four legal ones (including MARC delimiters).
# http://www.loc.gov/marc/specifications/specchargeneral.html#controlfunction
# this is all bytes from 0x00 to 0x19 except for the allowed 1B, 1D, 1E, 1F.
begin
text.gsub!(/[\x00-\x1A\x1C]/, '')
rescue StandardError => e
logger.info "HorizonReader, illegal chars found #{e}"
logger.info text
text.scrub!
end
return text
end
# Read rows from horizon database, assemble them into MARC::Record's, and yield each
# MARC::Record to caller.
def each
# Need to close the connection, teh result_set, AND the result_set.getStatement when
# we're done.
connection = open_connection!
# We're going to need to ask for item/copy info while in the
# middle of streaming our results. JDBC is happier and more performant
# if we use a seperate connection for this.
extra_connection = open_connection! if include_some_holdings?
# We're going to make our marc records in batches, and only yield
# them to caller in batches, so we can fetch copy/item info in batches
# for efficiency.
batch_size = settings["horizon.batch_size"].to_i
record_batch = []
exclude_tags = (settings["horizon.exclude_tags"] || "").split(",")
rs = self.fetch_result_set!(connection)
current_bib_id = nil
record = nil
record_count = 0
error_handler = org.marc4j.ErrorHandler.new
while(rs.next)
bib_id = rs.getInt("bib#");
if bib_id != current_bib_id
record_count += 1
if settings["debug_ascii_progress"] && (record_count % settings["solrj_writer.batch_size"] == 0)
$stderr.write ","
end
# new record! Put old one on batch queue.
record_batch << record if record
# prepare and yield batch?
if (record_count % batch_size == 0)
enhance_batch!(extra_connection, record_batch)
record_batch.each do |r|
# set current_bib_id for error logging
current_bib_id = r['001'].value
yield r
end
record_batch.clear
end
# And start new record we've encountered.
error_handler = org.marc4j.ErrorHandler.new
current_bib_id = bib_id
record = MARC::Record.new
record.append MARC::ControlField.new("001", bib_id.to_s)
end
tagord = rs.getInt("tagord");
tag = rs.getString("tag")
# just silently skip it, some weird row in the horizon db, it happens.
# plus any of our exclude_tags.
next if tag.nil? || tag == "" || exclude_tags.include?(tag)
indicators = rs.getString("indicators")
# a packed byte array could be in various columns, in order of preference...
# the xref stuff is joined in from the auth table
# Have to get it as bytes and then convert it to String to avoid JDBC messing
# up the encoding marc8 grr
authtext = rs.getBytes("xref_longtext") || rs.getBytes("xref_text")
text = rs.getBytes("longtext") || rs.getBytes("text")
if tag == "000"
# Horizon puts a \x1E marc field terminator on the end of hte
# leader in the db too, but that's not really part of it.
record.leader = String.from_java_bytes(text).chomp("\x1E")
fix_leader!(record.leader)
elsif tag != "001"
# we add an 001 ourselves with bib id in another part of code.
field = build_marc_field!(error_handler, tag, indicators, text, authtext)
record.append field unless field.nil?
end
end
# last one
record_batch << record if record
# yield last batch
enhance_batch!(extra_connection, record_batch)
record_batch.each do |r|
yield r
end
record_batch.clear
rescue Exception => e
logger.fatal "HorizonReader, unexpected exception at bib id:#{current_bib_id}: #{e}"
raise e
ensure
logger.info("HorizonReader: Closing all JDBC objects...")
# have to cancel the statement to keep us from waiting on entire
# result set when exception is raised in the middle of stream.
statement = rs && rs.getStatement
if statement
statement.cancel
statement.close
end
rs.close if rs
# shouldn't actually need to close the resultset and statement if we cancel, I think.
connection.close if connection
extra_connection.close if extra_connection
logger.info("HorizonReader: Closed JDBC objects")
end
# Returns a DataField or ControlField, can return
# nil if determined no field can/should be created.
#
# Do not call for field '0' (leader) or field 001,
# this doesn't handle those, will just return nil.
#
# First arg is a Marc4J ErrorHandler object, kind of a weird implementation
# detail.
#
# Other args are objects fetched from Horizon db via JDBC --
# text and authtext must be byte arrays.
def build_marc_field!(error_handler, tag, indicators, text, authtext)
# convert text and authtext from java bytes to a ruby
# binary string.
if text
text = String.from_java_bytes(text)
text.force_encoding("binary")
end
if authtext
authtext = String.from_java_bytes(authtext)
authtext.force_encoding("binary")
end
text = Traject::HorizonBibAuthMerge.new(tag, text, authtext).merge!
return nil if text.nil? # sometimes there's nothing there, skip it.
# convert from MARC8 to UTF8 if needed
text = convert_text!(text, error_handler)
if MARC::ControlField.control_tag?(tag)
# control field
return MARC::ControlField.new(tag, text )
else
# data field
indicator1 = indicators.slice(0)
indicator2 = indicators.slice(1)
data_field = MARC::DataField.new( tag, indicator1, indicator2 )
subfields = text.split("\x1F")
subfields.each do |subfield|
next if subfield.empty?
subfield_code = subfield.slice(0)
subfield_text = subfield.slice(1, subfield.length)
data_field.append MARC::Subfield.new(subfield_code, subfield_text)
end
return data_field
end
end
# Pass in an array of MARC::Records', adds fields for copy and item
# info if so configured. Returns record_batch so you can chain if you want.
def enhance_batch!(conn, record_batch)
return record_batch if record_batch.nil? || record_batch.empty?
copy_info = get_joined_table(
conn, record_batch,
:table_name => "copy",
:column_map => settings['horizon.copy_map'],
:join_clause => settings['horizon.copy_join_clause'],
:public_only => (settings['horizon.public_only'].to_s == "true")
) if %w{all copies direct}.include? settings['horizon.include_holdings'].to_s
item_info = get_joined_table(
conn, record_batch,
:table_name => "item",
:column_map => settings['horizon.item_map'],
:join_clause => settings['horizon.item_join_clause'],
:public_only => (settings['horizon.public_only'].to_s == "true")
) if %w{all items direct}.include? settings['horizon.include_holdings'].to_s
if item_info || copy_info
record_batch.each do |record|
id = record['001'].value.to_s
record_copy_info = copy_info && copy_info[id]
record_item_info = item_info && item_info[id]
record_copy_info.each do |copy_row|
field = MARC::DataField.new( settings["horizon.copy_tag"] )
copy_row.each_pair do |subfield, value|
field.append MARC::Subfield.new(subfield, value)
end
record.append field
end if record_copy_info
record_item_info.each do |item_row|
field = MARC::DataField.new( settings["horizon.item_tag"] )
item_row.each_pair do |subfield, value|
field.append MARC::Subfield.new(subfield, value)
end
record.append field
end if record_item_info && ((settings['horizon.include_holdings'].to_s != "direct") || record_copy_info.empty?)
end
end
return record_batch
end
# Can be used to fetch a batch of subsidiary info from other tables:
# Used to fetch item or copy information. Can fetch with joins too.
# Usually called by passing in settings, but a literal call might look something
# like this for items:
#
# get_joined_table(jdbc_conn, array_of_marc_records,
# :table_name => "item",
# :column_map => {"item.item#" => "i", "call_type.processor" => "k"},
# :join_clause => "JOIN call_type ON item.call_type = call_type.call_type"
# )
#
# Returns a hash keyed by bibID, value is an array of hashes of subfield->value, eg:
#
# {'343434' => [
# {
# 'i' => "012124" # item.item#
# 'k' => 'lccn' # call_type.processor
# }
# ]
# }
#
# Can also pass in a `:public_only => true` option, will add on a staff_only != 1
# where clause, assumes primary table has a staff_only column.
def get_joined_table(conn, batch, options = {})
table_name = options[:table_name] or raise ArgumentError.new("Need a :table_name option")
column_map = options[:column_map] or raise ArgumentError.new("Need a :column_map option")
join_clause = options[:join_clause] || ""
public_only = options[:public_only]
results = Hash.new {|h, k| h[k] = [] }
bib_ids_joined = batch.collect do |record|
record['001'].value.to_s
end.join(",")
# We include the column name with prefix as an "AS", so we can fetch it out
# of the result set later just like that.
columns_clause = column_map.keys.collect {|c| "#{c} AS '#{c}'"}.join(",")
sql = <<-EOS
SELECT bib#, #{columns_clause}
FROM #{table_name}
#{join_clause}
WHERE bib# IN (#{bib_ids_joined})
EOS
if public_only
sql += " AND staff_only != 1"
end
$stderr.write "<" if settings["debug_ascii_progress"]
# It might be higher performance to refactor to re-use the same prepared statement
# for each item/copy fetch... but appears to be no great way to do that in JDBC3
# where you need to parameterize "IN" values. JDBC4 has got it, but jTDS is just JDBC3.
pstmt = conn.prepareStatement(sql, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
rs = pstmt.executeQuery
while (rs.next)
bib_id = rs.getString("bib#")
row_hash = {}
column_map.each_pair do |column, subfield|
value = rs.getString( column )
if value
# Okay, total hack to deal with the fact that holding notes
# seem to be in UTF8 even though records are in MARC... which
# ends up causing problems for exporting as marc8, which is
# handled kind of not very well anyway.
# I don't even totally understand what I'm doing, after 6 hours working on it,
# sorry, just a hack.
value.force_encoding("BINARY") unless settings["horizon.destination_encoding"] == "UTF8"
row_hash[subfield] = value
end
end
results[bib_id] << row_hash
end
return results
ensure
pstmt.cancel if pstmt
pstmt.close if pstmt
rs.close if rs
$stderr.write ">" if settings["debug_ascii_progress"]
end
# Mutate string passed in to fix leader bytes for marc21
def fix_leader!(leader)
if leader.length < 24
# pad it to 24 bytes, leader is supposed to be 24 bytes
leader.replace( leader.ljust(24, ' ') )
elsif leader.length > 24
# Also a problem, slice it
leader.replace( leader.byteslice(0, 24))
end
# http://www.loc.gov/marc/bibliographic/ecbdldrd.html
leader[10..11] = '22'
leader[20..23] = '4500'
if settings['horizon.destination_encoding'] == "UTF8"
leader[9] = 'a'
end
# leader should only have ascii chars in it; invalid non-ascii
# chars can cause ruby encoding problems down the line.
# additionally, a force_encoding may be neccesary to
# deal with apparent weird hard to isolate jruby bug prob same one
# as at https://github.com/jruby/jruby/issues/886
leader.force_encoding('ascii')
unless leader.valid_encoding?
# replace any non-ascii chars with a space.
# Can't access leader.chars when it's not a valid encoding
# without a weird index out of bounds exception, think it's
# https://github.com/jruby/jruby/issues/886
# Grr.
#leader.replace( leader.chars.collect { |c| c.valid_encoding? ? c : ' ' }.join('') )
leader.replace(leader.split('').collect { |c| c.valid_encoding? ? c : ' ' }.join(''))
end
end
def include_some_holdings?
! [false, nil, ""].include?(settings['horizon.include_holdings'])
end
def convert_marc8_to_utf8?
settings['horizon.source_encoding'] == "MARC8" && settings['horizon.destination_encoding'] == "UTF8"
end
# Looks up JDBC url from settings, either 'horizon.jdbc_url' if present,
# or individual settings. Will include password from `horizon.password`
# only if given a `true` arg -- leave false for output to logs, to keep
# password out of logs.
def jdbc_url(include_password=false)
url = if settings.has_key? "horizon.jdbc_url"
settings["horizon.jdbc_url"]
else
jtds_type = settings['horizon.jtds_type'] || 'sybase'
database = settings['horizon.database'] || 'horizon'
host = settings['horizon.host'] or raise ArgumentError.new("Need horizon.host setting, or horizon.jdbc_url")
port = settings['horizon.port'] || '2025'
user = settings['horizon.user'] or raise ArgumentError.new("Need horizon.user setting, or horizon.jdbc_url")
"jdbc:jtds:#{jtds_type}://#{host}:#{port}/#{database};user=#{user}"
end
# Not sure if useCursors makes a difference, but just in case.
url += ";useCursors=true"
if timeout = settings['horizon.timeout']
url += ";socketTimeout=#{timeout};loginTimeout=#{timeout}"
end
if include_password
password = settings['horizon.password'] or raise ArgumentError.new("Need horizon.password setting")
url += ";password=#{password}"
end
return url
end
def open_connection!
logger.debug("HorizonReader: Opening JDBC Connection at #{jdbc_url(false)} ...")
conn = java.sql.DriverManager.getConnection( jdbc_url(true) )
# If autocommit on, fetchSize later has no effect, and JDBC slurps
# the whole result set into memory, which we can not handle.
conn.setAutoCommit false
logger.debug("HorizonReader: Opened JDBC Connection.")
return conn
end
def logger
settings["logger"] || Yell::Logger.new(STDERR, :level => "gt.fatal") # null logger
end
def self.default_settings
{
"horizon.timeout" => 1200, # 1200 seconds == 20 minutes
"horizon.batch_size" => 400,
"horizon.public_only" => true,
"horizon.source_encoding" => "MARC8",
"horizon.destination_encoding" => "UTF8",
"horizon.codepoint_translate" => true,
"horizon.character_reference_translate" => true,
"horizon.item_tag" => "991",
# Crazy isnull() in the call_type join to join to call_type directly on item
# if specified otherwise calltype on colleciton. Phew!
"horizon.item_join_clause" => "LEFT OUTER JOIN collection ON item.collection = collection.collection LEFT OUTER JOIN call_type ON isnull(item.call_type, collection.call_type) = call_type.call_type",
"horizon.item_map" => {
"item.call_reconstructed" => "a",
"call_type.processor" => "f",
"call_type.call_type" => "b",
"item.copy_reconstructed" => "c",
"item.staff_only" => "q",
"item.item#" => "i",
"item.collection" => "l",
"item.notes" => "n",
"item.location" => "m"
},
"horizon.copy_tag" => "937",
# Crazy isnull() in the call_type join to join to call_type directly on item
# if specified otherwise calltype on colleciton. Phew!
"horizon.copy_join_clause" => "LEFT OUTER JOIN collection ON copy.collection = collection.collection LEFT OUTER JOIN call_type ON isnull(copy.call_type, collection.call_type) = call_type.call_type",
"horizon.copy_map" => {
"copy.copy#" => "8",
"copy.call" => "a",
"copy.copy_number" => "c",
"call_type.processor" => "f",
"copy.staff_only" => "q",
"copy.location" => "m",
"copy.collection" => "l",
"copy.pac_note" => "n"
}
}
end
end
end
| 40.206146 | 207 | 0.629088 |
2159ad09c780fb16161aca705b7a6ceb235a5bd7 | 351 | class Needle
attr_accessor :name, :mtime, :size
def initialize(path)
@path = path
@name = File.basename(@path)
@mtime = File.mtime(@path)
@size = Filesize.from("#{File.size(@path)} B").pretty
end
def directory?
File.directory? @path
end
def file?
File.file? @path
end
end | 18.473684 | 61 | 0.555556 |
edfed190e8b9b8ff302661dd2fb7753d50d9b24e | 507 | cask 'bloodhound' do
version '1.2.1'
sha256 '63abbfb483811a004171a8680402e613ca6129c7bd152d67321e65d90b70810b'
url "https://github.com/BloodHoundAD/BloodHound/releases/download/#{version}/BloodHound-darwin-x64.zip"
appcast 'https://github.com/BloodHoundAD/BloodHound/releases.atom',
checkpoint: 'e97ce200da16ade01d53aefca9252e16bc0370a3a554291d3d47780919c78ada'
name 'bloodhound'
homepage 'https://github.com/BloodHoundAD/BloodHound'
app 'BloodHound-darwin-x64/BloodHound.app'
end
| 39 | 105 | 0.798817 |
ab9cf05c90384f88b214e13e82a189214ce2da37 | 1,256 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'flash-messenger/version'
Gem::Specification.new do |spec|
spec.name = "flash-messenger"
spec.version = FlashMessenger::VERSION
spec.authors = ["Chen, Yi-Cyuan"]
spec.email = ["[email protected]"]
spec.summary = %q{Integrate with flash-messenger to provide a simple flash messages.}
spec.description = %q{Integrate with flash-messenger to provide a simple flash messages.}
spec.homepage = "https://github.com/emn178/flash-messenger-rails"
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
spec.require_paths = ["lib"]
spec.add_dependency "request_store"
spec.add_development_dependency "bundler", "~> 1.10"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_development_dependency "actionview"
spec.add_development_dependency "activerecord"
spec.add_development_dependency "sqlite3"
spec.add_development_dependency "rspec"
spec.add_development_dependency "rspec-its"
spec.add_development_dependency "simplecov"
spec.add_development_dependency "coveralls"
end
| 39.25 | 104 | 0.714968 |
0303a3551e02476b57a10e37ec86a9da52423921 | 557 | # frozen_string_literal: true
module RubyCritic
module SourceControlSystem
class Mercurial < Base
register_system
def self.supported?
`hg verify 2>&1` && $CHILD_STATUS.success?
end
def self.to_s
'Mercurial'
end
def revisions_count(path)
`hg log #{path.shellescape} --template '1'`.size
end
def date_of_last_commit(path)
`hg log #{path.shellescape} --template '{date|isodate}' --limit 1`.chomp
end
def revision?
false
end
end
end
end
| 18.566667 | 80 | 0.601436 |
0367a1f7faa8e9d5801233b27ba2b652e2c7e536 | 3,109 | require 'test_helper'
class UserTest < ActiveSupport::TestCase
def setup
@user = User.new(name: "Example User", email: "[email protected]",
password: "foobar", password_confirmation: "foobar")
end
test "should be valid" do
assert @user.valid?
end
test "name should be present" do
@user.name = " "
assert_not @user.valid?
end
test "email should be present" do
@user.email = " "
assert_not @user.valid?
end
test "name should not be too long" do
@user.name = "a" * 51
assert_not @user.valid?
end
test "email should not be too long" do
@user.email = "a" * 244 + "@example.com"
assert_not @user.valid?
end
test "email validation should accept valid addresses" do
valid_addresses = %w[[email protected] [email protected] [email protected]
[email protected] [email protected]]
valid_addresses.each do |valid_address|
@user.email = valid_address
assert @user.valid?, "#{valid_address.inspect} should be valid"
end
end
test "email validation should reject invalid addresses" do
invalid_addresses = %w[user@example,com user_at_foo.org user.name@example.
foo@bar_baz.com foo@bar+baz.com]
invalid_addresses.each do |invalid_address|
@user.email = invalid_address
assert_not @user.valid?, "#{invalid_address.inspect} should be invalid"
end
end
test "email addresses should be unique" do
duplicate_user = @user.dup
duplicate_user.email = @user.email.upcase
@user.save
assert_not duplicate_user.valid?
end
test "password should be present (nonblank)" do
@user.password = @user.password_confirmation = " " * 6
assert_not @user.valid?
end
test "password should have a minimum length" do
@user.password = @user.password_confirmation = "a" * 5
assert_not @user.valid?
end
test "authenticated? should return false for a user with nil digest" do
assert_not @user.authenticated?(:remember, '')
end
test "associated microposts should be destroyed" do
@user.save
@user.microposts.create(content: "Lorem ipsum")
assert_difference 'Micropost.count', -1 do
@user.destroy
end
end
test "should follow and unfollow a user" do
michael = users(:michael)
isus = users(:isus)
assert_not michael.following?(isus)
michael.follow(isus)
assert michael.following?(isus)
assert isus.followers.include?(michael)
michael.unfollow(isus)
assert_not michael.following?(isus)
end
test "feed should have the right posts" do
michael = users(:michael)
isus = users(:isus)
lucifer = users(:lucifer)
# Posts from followed user
lucifer.microposts.each do |post_following|
assert michael.feed.include?(post_following)
end
# Posts from self
michael.microposts.each do |post_self|
assert michael.feed.include?(post_self)
end
# Posts from unfollowed user
isus.microposts.each do |post_unfollowed|
assert_not michael.feed.include?(post_unfollowed)
end
end
end
| 28.263636 | 78 | 0.671599 |
4ae43a72f9fdbe3317cdc8ff106746b2a7b58c31 | 1,961 | # This file should contain all the record creation needed to seed the database with its default values.
# The data can then be loaded with the rails db:seed command (or created alongside the database with db:setup).
#
# Examples:
#
# movies = Movie.create([{ name: 'Star Wars' }, { name: 'Lord of the Rings' }])
# Character.create(name: 'Luke', movie: movies.first)
Ingredient.destroy_all
Vessel.delete_all
Brine.delete_all
Note.delete_all
Project.delete_all
User.delete_all
jonny = User.create(username: 'JonnyR', first_name: 'Jonny', last_name: 'Riecke', email: '[email protected]')
p1 = Project.create(name: 'Kimchi', end_date: '04/30/2020', user: User.first)
p2 = Project.create(name: 'Garlic Honey', end_date: '05/07/2020', user: User.first)
Ingredient.create(name: 'Cabbage', quantity: 3, units: 'heads', prep: 'Quartered', project: p1.id)
Ingredient.create(name: 'Korean Chili Flakes', quantity: 3, units: 'cups', prep: 'Scooped', project: p1.id)
Ingredient.create(name: 'Asian Pear', quantity: 1, units: 'nil', prep: 'Matchstick cut', project: p1.id)
Ingredient.create(name: 'Carrots ', quantity: 3, units: 'Carrots', prep: 'Matchstick cut', project: p1.id)
Ingredient.create(name: 'Honey ', quantity: 2, units: 'Cups', prep: 'Measured', project: p2.id)
Ingredient.create(name: 'Garlic ', quantity: 1, units: 'Head', prep: 'Peeled', project: p2.id)
Brine.create(amount: 12, units: 'cups', salt: 10, sugar: 0, project: p1.id)
Vessel.create(vessel: 'Crock', volume: 3, units: 'Liters', airlock: false, weight: true, material: 'Clay', notes: 'Grandma\'s crock', project: p1.id)
Vessel.create(vessel: 'Ball Jar', volume: 1, units: 'Liter', airlock: false, weight: true, material: 'Glass', notes: 'Found at GoodWill', project: p2.id)
Note.create(text: "I made this Kimchi to hurt my mouth", project: p1.id, day_id: 1)
Note.create(text: "No activity yet", project: p1.id, day_id: 2)
Note.create(text: "Looks sticky", project: p2.id, day_id: 2) | 59.424242 | 153 | 0.713921 |
edb39742a1facf4a2f5a2d8109748eba06392c2b | 534 | class UserMailer < ApplicationMailer
# Subject can be set in your I18n file at config/locales/en.yml
# with the following lookup:
#
# en.user_mailer.account_activation.subject
#
def account_activation(user)
@user = user
mail to: @user.email, subject: "Account activation"
end
# Subject can be set in your I18n file at config/locales/en.yml
# with the following lookup:
#
# en.user_mailer.password_reset.subject
#
def password_reset
@greeting = "Hi"
mail to: "[email protected]"
end
end
| 22.25 | 65 | 0.696629 |
f8abbd9f4c11c5758182efdad41a536cf6dee604 | 3,350 | # frozen_string_literal: true
require 'cucumber/core/test/step'
module Cucumber::Core::Test
describe Step do
describe "describing itself" do
let(:step_or_hook) { double }
before(:each) do
allow( step_or_hook ).to receive(:location)
end
it "describes itself to a visitor" do
visitor = double
args = double
test_step = Step.new([step_or_hook])
expect( visitor ).to receive(:test_step).with(test_step, args)
test_step.describe_to(visitor, args)
end
it "describes its source to a visitor" do
feature, scenario = double, double
visitor = double
args = double
expect( feature ).to receive(:describe_to).with(visitor, args)
expect( scenario ).to receive(:describe_to).with(visitor, args)
expect( step_or_hook ).to receive(:describe_to).with(visitor, args)
test_step = Step.new([feature, scenario, step_or_hook])
test_step.describe_source_to(visitor, args)
end
end
describe "executing" do
let(:ast_step) { double }
before(:each) do
allow( ast_step ).to receive(:location)
end
it "passes arbitrary arguments to the action's block" do
args_spy = nil
expected_args = [double, double]
test_step = Step.new([ast_step]).with_action do |*actual_args|
args_spy = actual_args
end
test_step.execute(*expected_args)
expect(args_spy).to eq expected_args
end
context "when a passing action exists" do
it "returns a passing result" do
test_step = Step.new([ast_step]).with_action {}
expect( test_step.execute ).to be_passed
end
end
context "when a failing action exists" do
let(:exception) { StandardError.new('oops') }
it "returns a failing result" do
test_step = Step.new([ast_step]).with_action { raise exception }
result = test_step.execute
expect( result ).to be_failed
expect( result.exception ).to eq exception
end
end
context "with no action" do
it "returns an Undefined result" do
test_step = Step.new([ast_step])
result = test_step.execute
expect( result ).to be_undefined
end
end
end
it "exposes the text, location and original location of the AST step or hook as attributes" do
text, location, original_location = double, double, double
step_or_hook = double(text: text, location: location, original_location: original_location)
test_step = Step.new([step_or_hook])
expect( test_step.text ).to eq text
expect( test_step.location ).to eq location
expect( test_step.original_location ).to eq original_location
end
it "exposes the location of the action as attribute" do
location = double
action = double(location: location)
test_step = Step.new([double], action)
expect( test_step.action_location ).to eq location
end
it "returns the text of the AST step when converted to a string" do
ast_step = double(text: 'a passing step', location: double)
test_step = Step.new([ast_step])
expect( test_step.to_s ).to eq 'a passing step'
end
end
end
| 33.5 | 98 | 0.630746 |
b9c35692345dee1bb8036ab4587655624bca886c | 2,365 | module Bunny
def self.instance
@instance
end
def self.run(options = {})
@instance = BunnyInstance.new(options)
yield @instance
end
def self.new(options = {})
@instance = BunnyInstance.new(options)
@instance
end
class Consumer
def initialize(channel, queue, consumer_tag = 'test', no_ack = true, exclusive = false, arguments = {})
@channel = channel
@queue = queue
end
end
class BunnyInstance
attr_accessor :queues
attr_accessor :options
attr_accessor :channel
attr_accessor :exchange
def initialize(options = {})
@options = options
@queues = {}
end
def start
@channel = create_channel
self
end
def exchange(exchange = nil)
@exchange = BunnyExchange.new(self, exchange) if exchange
@exchange
end
def queue(queue, options = {})
@queues[queue] = BunnyQueue.new
end
def create_channel
self
end
def connection
self
end
def host
@options[:host]
end
def port
@options[:port]
end
def user
@options[:user]
end
def pass
@options[:pass]
end
def vhost
@options[:vhost]
end
def number
1
end
end
class BunnyExchange
attr_accessor :name
attr_accessor :key
attr_accessor :message
attr_accessor :channel
def initialize(channel, name)
@channel = channel
@name = name
end
def publish(message, options = {})
@key = options[:routing_key] || options[:key]
@message = message
end
end
class BunnyQueue
attr_accessor :arguments
attr_accessor :block
attr_accessor :bound_exchange
attr_accessor :bound_key
attr_accessor :consumer
def initialize
@bindings = []
end
def bind(exchange, options)
@bound_exchange = exchange
@bound_key = options[:routing_key] || options[:key]
@bindings << {:exchange => @bound_exchange.name, :routing_key => @bound_key}
end
def subscribe(*args, &block)
@arguments = *args
@block = block
end
def subscribe_with(consumer, opts = {:block => false})
@block ||= opts[:block]
@consumer = consumer
@consumer
end
def name
@bound_key
end
def channel
@bound_exchange.channel
end
end
end
| 17.389706 | 107 | 0.606765 |
b9a42e0b854070e8d0c59881fe6fcd814e58afac | 47 | class PlanetSchematic < ActiveRecord::Base
end
| 15.666667 | 42 | 0.829787 |
e2593a8355fabcb721bbfc142c367be64b4b8340 | 2,461 | module Autoini
class Contents
KLASSES = [Pair, Section, Comment, BlankLine]
attr_reader :lines
def self.parse(contents)
return new if contents.nil? || contents.empty?
elements = []
section = nil
contents.split("\n").each do |l|
e = KLASSES.map{ |k| k.parse_with_comment(Autoini.divide(l.strip)) }
.select(&:itself)
.first || raise(ArgumentError, "couldn't parse line: #{l.inspect}")
if e.is_a?(Section)
section = e
elements << section
else
(section || elements) << e
end
end
new(*elements)
end
def self.hash(contents)
parse(contents).to_h
end
def self.[](hash)
raise ArgumentError, "must pass a hash" unless hash.is_a?(Hash)
new(
*hash.map do |k, v|
if v.is_a?(Hash)
Section[k, v]
else
Pair.new(k, v)
end
end
)
end
def initialize(*contents)
@lines = []
self << contents
end
def <<(contents)
Autoini.wrap(contents).each do |c|
unless c.is_a?(Element)
raise ArgumentError, "#{c.class.name} must extend Autoini::Element"
end
if !c.is_a?(Section) && lines.last.is_a?(Section)
raise ArgumentError, "Error on line #{c.inspect}: all elements " \
"after a section must be in a section"
end
lines << c
end
end
def section(key)
lines.select{ |l| l.is_a?(Section) && l.title.to_s == key.to_s }.first
end
def pair(key)
lines.select{ |l| l.is_a?(Pair) && l.key.to_s == key.to_s }.first
end
def merge!(other_contents)
unless other_contents.is_a?(Contents)
raise ArgumentError, "must pass a Autoini::Contents"
end
other_contents.lines.each do |l|
case l
when Section
if s = section(l.title)
s.merge!(l)
else
self << l
end
when Pair
if p = pair(l.key)
p.value = l.value
else
self << l
end
end
end
self
end
def to_s
lines.map(&:to_s).join(Autoini.newline)
end
def to_h
lines.map(&:to_a).reject(&:empty?).to_h
end
def ==(c)
c.is_a?(Contents) && c.lines.length == lines.length &&
lines.map.with_index{ |l, i| c.lines[i] == l }.all?
end
end
end
| 23.438095 | 77 | 0.525396 |
21902298c0b4109329d056de27a7cdec260856a9 | 5,464 | require 'test/unit'
require 'tmpdir'
require 'fileutils'
require 'pathname'
class TestDir < Test::Unit::TestCase
def setup
@verbose = $VERBOSE
$VERBOSE = nil
@root = Pathname.new(Dir.mktmpdir('__test_dir__')).realpath.to_s
@nodir = File.join(@root, "dummy")
for i in ?a..?z
if i.ord % 2 == 0
FileUtils.touch(File.join(@root, i))
else
FileUtils.mkdir(File.join(@root, i))
end
end
end
def teardown
$VERBOSE = @verbose
FileUtils.remove_entry_secure @root if File.directory?(@root)
end
def test_seek
dir = Dir.open(@root)
begin
cache = []
loop do
pos = dir.tell
break unless name = dir.read
cache << [pos, name]
end
for x,y in cache.sort_by {|z| z[0] % 3 } # shuffle
dir.seek(x)
assert_equal(y, dir.read)
end
ensure
dir.close
end
end
def test_JVN_13947696
b = lambda {
d = Dir.open('.')
$SAFE = 4
d.close
}
assert_raise(SecurityError) { b.call }
end
def test_nodir
assert_raise(Errno::ENOENT) { Dir.open(@nodir) }
end
def test_inspect
d = Dir.open(@root)
assert_match(/^#<Dir:#{ Regexp.quote(@root) }>$/, d.inspect)
assert_match(/^#<Dir:.*>$/, Dir.allocate.inspect)
ensure
d.close
end
def test_path
d = Dir.open(@root)
assert_equal(@root, d.path)
assert_nil(Dir.allocate.path)
ensure
d.close
end
def test_set_pos
d = Dir.open(@root)
loop do
i = d.pos
break unless x = d.read
d.pos = i
assert_equal(x, d.read)
end
ensure
d.close
end
def test_rewind
d = Dir.open(@root)
a = (0..5).map { d.read }
d.rewind
b = (0..5).map { d.read }
assert_equal(a, b)
assert_raise(SecurityError) do
Thread.new do
$SAFE = 4
d.rewind
end.join
end
ensure
d.close
end
def test_chdir
@pwd = Dir.pwd
@env_home = ENV["HOME"]
@env_logdir = ENV["LOGDIR"]
ENV.delete("HOME")
ENV.delete("LOGDIR")
assert_raise(Errno::ENOENT) { Dir.chdir(@nodir) }
assert_raise(ArgumentError) { Dir.chdir }
ENV["HOME"] = @pwd
Dir.chdir do
assert_equal(@pwd, Dir.pwd)
Dir.chdir(@root)
assert_equal(@root, Dir.pwd)
end
ensure
begin
Dir.chdir(@pwd)
rescue
abort("cannot return the original directory: #{ @pwd }")
end
if @env_home
ENV["HOME"] = @env_home
else
ENV.delete("HOME")
end
if @env_logdir
ENV["LOGDIR"] = @env_logdir
else
ENV.delete("LOGDIR")
end
end
def test_chroot_nodir
assert_raise(NotImplementedError, Errno::ENOENT, Errno::EPERM
) { Dir.chroot(File.join(@nodir, "")) }
end
def test_close
d = Dir.open(@root)
d.close
assert_raise(IOError) { d.read }
end
def test_glob
assert_equal((%w(. ..) + (?a..?z).to_a).map{|f| File.join(@root, f) },
Dir.glob(File.join(@root, "*"), File::FNM_DOTMATCH).sort)
assert_equal([@root] + (?a..?z).map {|f| File.join(@root, f) }.sort,
Dir.glob([@root, File.join(@root, "*")]).sort)
assert_equal([@root] + (?a..?z).map {|f| File.join(@root, f) }.sort,
Dir.glob(@root + "\0\0\0" + File.join(@root, "*")).sort)
assert_equal((?a..?z).step(2).map {|f| File.join(File.join(@root, f), "") }.sort,
Dir.glob(File.join(@root, "*/")).sort)
FileUtils.touch(File.join(@root, "{}"))
assert_equal(%w({} a).map{|f| File.join(@root, f) },
Dir.glob(File.join(@root, '{\{\},a}')))
assert_equal([], Dir.glob(File.join(@root, '[')))
assert_equal([], Dir.glob(File.join(@root, '[a-\\')))
assert_equal([File.join(@root, "a")], Dir.glob(File.join(@root, 'a\\')))
assert_equal((?a..?f).map {|f| File.join(@root, f) }.sort, Dir.glob(File.join(@root, '[abc/def]')).sort)
end
def test_glob_recursive
bug6977 = '[ruby-core:47418]'
Dir.chdir(@root) do
FileUtils.mkdir_p("a/b/c/d/e/f")
assert_equal(["a/b/c/d/e/f"], Dir.glob("a/**/e/f"), bug6977)
assert_equal(["a/b/c/d/e/f"], Dir.glob("a/**/d/e/f"), bug6977)
assert_equal(["a/b/c/d/e/f"], Dir.glob("a/**/c/d/e/f"), bug6977)
assert_equal(["a/b/c/d/e/f"], Dir.glob("a/**/b/c/d/e/f"), bug6977)
assert_equal(["a/b/c/d/e/f"], Dir.glob("a/**/c/?/e/f"), bug6977)
assert_equal(["a/b/c/d/e/f"], Dir.glob("a/**/c/**/d/e/f"), bug6977)
assert_equal(["a/b/c/d/e/f"], Dir.glob("a/**/c/**/d/e/f"), bug6977)
end
end
def test_foreach
assert_equal(Dir.foreach(@root).to_a.sort, %w(. ..) + (?a..?z).to_a)
end
def test_dir_enc
dir = Dir.open(@root, encoding: "UTF-8")
begin
while name = dir.read
assert_equal(Encoding.find("UTF-8"), name.encoding)
end
ensure
dir.close
end
dir = Dir.open(@root, encoding: "ASCII-8BIT")
begin
while name = dir.read
assert_equal(Encoding.find("ASCII-8BIT"), name.encoding)
end
ensure
dir.close
end
end
def test_symlink
begin
["dummy", *?a..?z].each do |f|
File.symlink(File.join(@root, f),
File.join(@root, "symlink-#{ f }"))
end
rescue NotImplementedError
return
end
assert_equal([*?a..?z, *"symlink-a".."symlink-z"].each_slice(2).map {|f, _| File.join(@root, f + "/") }.sort,
Dir.glob(File.join(@root, "*/")).sort)
Dir.glob(File.join(@root, "**/"))
end
end
| 24.392857 | 113 | 0.560212 |
5dff00c5d0cd8a6e57287aff55eb72cd5a94cea6 | 2,542 | require 'rails_helper'
# rubocop:disable Metrics/BlockLength
RSpec.describe Framework, type: :model do
context 'Validations for Framework' do
let(:user) { User.new(name: 'Testing1') }
let(:language) { Language.new(name: 'Language1') }
let(:framework) { Framework.new(name: 'Framework1') }
it 'Should be valid if a framework contains a user/name/language' do
framework.user = user
framework.language = language
framework.icon.attach(io: File.open(Rails.root.join('app', 'assets', 'images', 'other.png')),
filename: 'other.png',
content_type: 'image/png')
expect(framework).to be_valid
end
it 'Should be invalid if a framework doesnt contains a name' do
framework.icon.attach(io: File.open(Rails.root.join('app', 'assets', 'images', 'other.png')),
filename: 'other.png',
content_type: 'image/png')
framework.user = user
framework.language = language
framework.name = ''
expect(framework).not_to be_valid
end
it 'should be invalid if it doesnt have a user' do
framework.icon.attach(io: File.open(Rails.root.join('app', 'assets', 'images', 'other.png')),
filename: 'other.png',
content_type: 'image/png')
framework.language = language
expect(framework).not_to be_valid
end
it 'Should be invalid if it doesnt belongs to a language' do
framework.icon.attach(io: File.open(Rails.root.join('app', 'assets', 'images', 'other.png')),
filename: 'other.png',
content_type: 'image/png')
framework.user = user
expect(framework).not_to be_valid
end
it 'Should be invalid if it doesnt have an icon' do
framework.user = user
framework.language = language
expect(framework).not_to be_valid
end
end
context 'Assocs for framework' do
it { should belong_to(:user) }
it { should belong_to(:language) }
end
context 'Should have an icon' do
let(:framework) { Framework.new(name: 'Testing1') }
it 'Should have a valid image' do
framework.icon.attach(io: File.open(Rails.root.join('app', 'assets', 'images', 'unknown_user.jpeg')),
filename: 'unknown_user.jpeg',
content_type: 'image/jpeg')
expect(framework.icon).to be_attached
end
end
end
# rubocop:enable Metrics/BlockLength
| 37.940299 | 107 | 0.605429 |
4a5870a69695df769878dbc3332a8e7e933df585 | 467 | require 'set'
def parse_changes
File.open('frequency_changes.txt').map { |line| Integer(line.chomp!) }
end
def first_recurring(changes)
freq = 0
frequencies = Set.new
changes.cycle do |change|
frequencies << freq
freq += change
if frequencies.include? freq
return freq
end
end
return nil
end
changes = parse_changes
puts "Resulting frequency: #{changes.sum}"
puts puts "First recurring frequency: #{first_recurring(changes)}"
| 17.296296 | 72 | 0.704497 |
ab74f6b085d633ea6358f9160eae27f93277fa43 | 2,175 | require 'rails_event_store_active_record'
require 'support/rspec_defaults'
require 'support/mutant_timeout'
require 'rails'
$verbose = ENV.has_key?('VERBOSE') ? true : false
ENV['DATABASE_URL'] ||= 'sqlite3:db.sqlite3'
ENV['RAILS_VERSION'] ||= Rails::VERSION::STRING
MigrationCode = File.read(File.expand_path('../../lib/rails_event_store_active_record/generators/templates/migration_template.rb', __FILE__) )
migration_version = Gem::Version.new(ActiveRecord::VERSION::STRING) < Gem::Version.new("5.0.0") ? "" : "[4.2]"
MigrationCode.gsub!("<%= migration_version %>", migration_version)
MigrationCode.gsub!("force: false", "force: true")
module SchemaHelper
def establish_database_connection
ActiveRecord::Base.establish_connection(ENV['DATABASE_URL'])
end
def load_database_schema
ActiveRecord::Schema.define do
self.verbose = false
eval(MigrationCode) unless defined?(CreateEventStoreEvents)
CreateEventStoreEvents.new.change
end
end
def load_legacy_database_schema
ActiveRecord::Schema.define do
self.verbose = false
create_table(:event_store_events, force: false) do |t|
t.string :stream, null: false
t.string :event_type, null: false
t.string :event_id, null: false
t.text :metadata
t.text :data, null: false
t.datetime :created_at, null: false
end
add_index :event_store_events, :stream
add_index :event_store_events, :created_at
add_index :event_store_events, :event_type
add_index :event_store_events, :event_id, unique: true
end
end
def drop_legacy_database
ActiveRecord::Migration.drop_table("event_store_events")
rescue ActiveRecord::StatementInvalid
end
def drop_database
ActiveRecord::Migration.drop_table("event_store_events")
ActiveRecord::Migration.drop_table("event_store_events_in_streams")
end
end
RSpec.configure do |config|
config.failure_color = :magenta
end
RSpec::Matchers.define :contains_ids do |expected_ids|
match do |enum|
@actual = enum.map(&:event_id)
values_match?(expected_ids, @actual)
end
diffable
end | 31.985294 | 142 | 0.716322 |
7a47ad7d532beb28e6436063119af1e0be669bb2 | 1,425 | require 'fog/core/collection'
require 'fog/rackspace/models/dns/record'
module Fog
module DNS
class Rackspace
class Records < Fog::Collection
attribute :zone
attribute :total_entries, :aliases => 'totalEntries'
model Fog::DNS::Rackspace::Record
def all
requires :zone
data = service.list_records(zone.identity)
load(data.body['records'])
end
alias_method :each_record_this_page, :each
def each
requires :zone
return self unless block_given?
entries = 0
begin
body = service.list_records(zone.id, :offset => entries).body
entries += body['records'].size
self.merge_attributes(body)
subset = dup.load(body['records'])
subset.each_record_this_page {|record| yield record }
end while entries < total_entries
self
end
def get(record_id)
requires :zone
data = service.list_record_details(zone.identity, record_id).body
new(data)
#nil or empty string will trigger an argument error
rescue ArgumentError
nil
rescue Fog::DNS::Rackspace::NotFound
nil
end
def new(attributes = {})
requires :zone
super({ :zone => zone }.merge!(attributes))
end
end
end
end
end
| 24.152542 | 75 | 0.575439 |
79055d049aa78b06df86ab13264810752b0b3575 | 1,709 | #-- copyright
# OpenProject is an open source project management software.
# Copyright (C) 2012-2021 the OpenProject GmbH
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License version 3.
#
# OpenProject is a fork of ChiliProject, which is a fork of Redmine. The copyright follows:
# Copyright (C) 2006-2013 Jean-Philippe Lang
# Copyright (C) 2010-2013 the ChiliProject Team
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# See docs/COPYRIGHT.rdoc for more details.
#++
module Components
class ConfirmationDialog
include Capybara::DSL
include RSpec::Matchers
def initialize; end
def container
'.op-modal'
end
def expect_open
expect(page).to have_selector(container)
end
def confirm
page.within(container) do
page.find('.confirm-form-submit--continue').click
end
end
def cancel
page.within(container) do
page.find('.confirm-form-submit--cancel').click
end
end
end
end
| 29.982456 | 91 | 0.726156 |
b9e61d7d305e04fb5a94bade926f634107d2fc12 | 2,712 | module CanonicalVocabulary
module Renewals
module RenewalBuilder
def residency(member)
if member.person.addresses.blank?
member = @primary
end
member.person.addresses[0].location_state_code == 'DC' ? 'D.C. Resident' : 'Not a D.C. Resident'
end
def citizenship(family_member)
return if family_member.person_demographics.blank?
demographics = family_member.person_demographics
if demographics.citizen_status.blank?
raise "Citizenship status missing for person #{family_member.person.name_first} #{family_member.person.name_last}"
end
citizenship_mapping = {
"U.S. Citizen" => %W(us_citizen naturalized_citizen indian_tribe_member),
"Lawfully Present" => %W(alien_lawfully_present lawful_permanent_resident),
"Not Lawfully Present" => %W(undocumented_immigrant not_lawfully_present_in_us)
}
citizen_status = demographics.citizen_status
citizenship_mapping.each do |key, value|
return key if value.include?(citizen_status)
end
end
def tax_status(family_member)
return if family_member.financial_statements.empty?
financial_statement = family_member.financial_statements[0]
tax_status = financial_statement.tax_filing_status.split('#')[1]
case tax_status
when 'non-filer'
'Non-filer'
when 'dependent'
'Tax Dependent'
when 'tax_filer'
tax_filer_status(family_member, financial_statement)
end
end
def tax_filer_status(family_member, financial_statement)
return 'Single' if is_single?(family_member)
(financial_statement.is_tax_filing_together == 'true') ? 'Married Filing Jointly' : 'Married Filing Separately'
end
def is_single?(family_member)
relation = family_member.person_relationships.detect{|i| ['spouse', 'life_partner'].include?(i.relationship_uri)}
relation.blank? ? true : false
end
def incarcerated?(member)
if member.person_demographics.blank?
return 'No'
end
member.person_demographics.is_incarcerated == 'true' ? 'Yes' : 'No'
end
# def member_mec(member)
# if es_coverage = assistance_eligibility.at_xpath("n1:is_enrolled_for_es_coverage").text
# return 'Yes'
# end
# benefit = assistance_eligibility.xpath("n1:alternate_benefits/n1:alternate_benefit").detect do |benefit|
# Date.strptime(benefit.at_xpath("n1:end_date"), "%Y%m%d") <= Date.parse("2015-1-1")
# end
# benefit.blank? ? 'No' : 'Yes'
# end
end
end
end | 37.666667 | 124 | 0.656342 |
e2bae83b1f3efe5a372a7d90f4665634269b9045 | 414 | require 'spec_helper'
describe 'dhcp::host', :type => :define do
let :title do
'test_host'
end
let(:facts) do
{
:concat_basedir => '/dne',
:osfamily => 'RedHat',
}
end
let :params do
{
'ip' => '1.2.3.4',
'mac' => '90:FB:A6:E4:08:9F',
'comment' => 'test_comment'
}
end
it { should contain_concat__fragment("dhcp_host_#{title}") }
end
| 18 | 62 | 0.521739 |
612909ef143b444a09f00991ed2fb3c687f42812 | 4,853 | require 'thread'
module CfCanaries
class Breeder
def initialize(options)
@options = options
end
def breed(logger, runner)
logger.info 'targeting and logging in'
runner.cf!([
"api",
@options.target,
@options.skip_ssl_validation ? "--skip-ssl-validation" : nil
].compact.join(' '))
logger.info "Logging in as '#{@options.username}' user to '#{@options.organization}' org, '#{@options.space}' space."
runner.cf!("login -u '#{@options.username}' -p '#{@options.password}' -o #{@options.organization} -s #{@options.space}", :skip_logging_command => true, :password=>@options.password)
logger.info "Succeeded logging in."
logger.info 'breeding canaries'
push_zero_downtime_canary(logger, runner)
push_aviary(logger, runner)
push_cpu_canary(logger, runner)
push_disk_canary(logger, runner)
push_memory_canary(logger, runner)
push_network_canary(logger, runner)
push_instances_canary(logger, runner)
push_long_running_canary(logger, runner)
logger.info 'TWEET TWEET'
end
private
def push_zero_downtime_canary(logger, runner)
number_of_canaries = @options.number_of_zero_downtime_apps
logger.info "pushing #{number_of_canaries} zero-downtime canaries"
number_of_canaries.times do |i|
push_app(
logger, runner, "zero-downtime-canary#{i + 1}", {},
directory_name: 'zero-downtime/src/zero-downtime',
memory: '128M'
)
end
end
def push_aviary(logger, runner)
env = {
TARGET: @options.target,
USERNAME: @options.username,
PASSWORD: @options.password,
DOMAIN: @options.app_domain,
ORG: @options.organization,
SPACE: @options.space,
ZERO_DOWNTIME_NUM_INSTANCES: @options.number_of_zero_downtime_apps,
INSTANCES_CANARY_NUM_INSTANCES: @options.number_of_instances_canary_instances,
}
push_app(logger, runner, 'aviary', env)
end
def push_cpu_canary(logger, runner)
push_app(logger, runner, 'cpu', {}, memory: '512M')
end
def push_disk_canary(logger, runner)
push_app(logger, runner, 'disk', {SPACE: '768'}, memory: '512M')
end
def push_memory_canary(logger, runner)
push_app(logger, runner, 'memory', {MEMORY: '112M'})
end
def push_network_canary(logger, runner)
push_app(
logger, runner, 'network', {},
memory: '128M',
directory_name: 'network/src/network-canary')
end
def push_instances_canary(logger, runner)
push_app(
logger, runner, 'instances-canary', {
AVIARY: "aviary.#{@options.app_domain}"
},
instances: @options.number_of_instances_canary_instances,
memory: '128M',
directory_name: 'instances'
)
end
def push_long_running_canary(logger, runner)
push_app(
logger, runner, 'long-running-canary', {},
memory: '128M',
directory_name: 'long-running'
)
end
def push_app(logger, runner, name, env = {}, options = {})
directory_name = options.fetch(:directory_name, name)
instances = options.fetch(:instances, @options.number_of_instances_per_app)
memory = options.fetch(:memory, '256M')
buildpack = options.fetch(:buildpack, '')
logger.info "pushing #{name} canary"
if app_exists?(logger, runner, name)
logger.info 'skipping'
return
end
logger.info 'pushing!'
command =
[
"push #{name}",
"--no-start",
"-p #{canary_path(directory_name)}",
"-n #{name}",
"-d #{@options.app_domain}",
"-i #{instances}",
"-m #{memory}",
"-b '#{buildpack}'"
].join(' ')
runner.cf!(command)
if @options.diego
runner.cf!("set-env #{name} CF_DIEGO_RUN_BETA true")
end
env.each do |k, v|
command = "set-env #{name} #{k} '#{v}'"
if k == :PASSWORD
logger.info "Setting environment variable '#{k}' for app '#{name}'."
runner.cf!(command, :skip_logging_command => true, :hide_command_output => true)
logger.info "Succeeded setting environment variable."
else
runner.cf!(command)
end
end
runner.cf!("start #{name}")
end
def canary_path(name)
File.expand_path(name, canary_base_path)
end
def canary_base_path
File.expand_path('../../assets', File.dirname(__FILE__))
end
def app_exists?(logger, runner, name)
logger.info "checking for app #{name}"
begin
runner.cf!("app #{name}")
true
rescue RuntimeError => e
logger.error(e)
false
end
end
end
end
| 28.380117 | 187 | 0.603338 |
ff31cb899da22e7bc868f67f706e9deb53e9313f | 1,394 | $:.push File.expand_path("lib", __dir__)
# Maintain your gem's version:
require "decidim/meetings/extras/version"
# Describe your gem and declare its dependencies:
Gem::Specification.new do |spec|
spec.name = "decidim-meetings-extras"
spec.version = Decidim::Meetings::Extras::VERSION
spec.authors = ["Alexandru Emil Lupu"]
spec.email = ["[email protected]"]
spec.homepage = "https://github.com/tremend-cofe/decidim-meetings-extras"
spec.summary = "Decidim meetings enhancements"
spec.description = "Decidim meetings enhancements"
spec.license = "AGPL-3.0"
# Prevent pushing this gem to RubyGems.org. To allow pushes either set the 'allowed_push_host'
# to allow pushing to a single host or delete this section to allow pushing to any host.
if spec.respond_to?(:metadata)
spec.metadata["allowed_push_host"] = "TODO: Set to 'http://mygemserver.com'"
else
raise "RubyGems 2.0 or newer is required to protect against " \
"public gem pushes."
end
spec.files = Dir["{app,config,db,lib}/**/*", "MIT-LICENSE", "Rakefile", "README.md"]
spec.add_dependency "deface"
spec.add_dependency "decidim-admin"
spec.add_dependency "decidim-core"
spec.add_dependency "decidim-meetings"
spec.add_dependency "virtus"
spec.add_development_dependency "decidim-dev"
spec.add_development_dependency "decidim-proposals"
end
| 38.722222 | 96 | 0.721664 |
03013d8e0cee4783b7a820cbcada36300a251e55 | 901 | # frozen_string_literal: true
module Dynflow
class Action::Suspended
attr_reader :execution_plan_id, :step_id
def initialize(action)
@world = action.world
@execution_plan_id = action.execution_plan_id
@step_id = action.run_step_id
end
def plan_event(event, time, sent = Concurrent::Promises.resolvable_future, optional: false)
@world.plan_event(execution_plan_id, step_id, event, time, sent, optional: optional)
end
def event(event, sent = Concurrent::Promises.resolvable_future, optional: false)
# TODO: deprecate 2 levels backtrace (to know it's called from clock or internaly)
# remove lib/dynflow/clock.rb ClockReference#ping branch condition on removal.
plan_event(event, nil, sent, optional: optional)
end
def <<(event = nil)
event event
end
alias_method :ask, :event
end
end
| 31.068966 | 95 | 0.693674 |
086c4636ac2e947fe8269536e8daaca4203f8eec | 2,694 | require 'spec_helper'
describe Agents::PublicTransportAgent do
before do
valid_params = {
"name" => "sf muni agent",
"options" => {
"alert_window_in_minutes" => "20",
"stops" => ['N|5221', 'N|5215'],
"agency" => "sf-muni"
}
}
@agent = Agents::PublicTransportAgent.new(valid_params)
@agent.user = users(:bob)
@agent.save!
end
describe "#check" do
before do
stub_request(:get, "http://webservices.nextbus.com/service/publicXMLFeed?a=sf-muni&command=predictionsForMultiStops&stops=N%7C5215").
with(:headers => {'User-Agent'=>'Typhoeus - https://github.com/typhoeus/typhoeus'}).
to_return(:status => 200, :body => File.read(Rails.root.join("spec/data_fixtures/public_transport_agent.xml")), :headers => {})
end
it "should create 4 events" do
lambda { @agent.check }.should change {@agent.events.count}.by(4)
end
it "should add 4 items to memory" do
time_travel_to Time.parse("2014-01-14 20:21:30 +0500") do
@agent.memory.should == {}
@agent.check
@agent.save
@agent.reload.memory.should == {"existing_routes" => [
{"stopTag"=>"5221", "tripTag"=>"5840324", "epochTime"=>"1389706393991", "currentTime"=>Time.now.to_s},
{"stopTag"=>"5221", "tripTag"=>"5840083", "epochTime"=>"1389706512784", "currentTime"=>Time.now.to_s},
{"stopTag"=>"5215", "tripTag"=>"5840324", "epochTime"=>"1389706282012", "currentTime"=>Time.now.to_s},
{"stopTag"=>"5215", "tripTag"=>"5840083", "epochTime"=>"1389706400805", "currentTime"=>Time.now.to_s}
]
}
end
end
it "should not create events twice" do
lambda { @agent.check }.should change {@agent.events.count}.by(4)
lambda { @agent.check }.should_not change {@agent.events.count}
end
it "should reset memory after 2 hours" do
time_travel_to Time.parse("2014-01-14 20:21:30 +0500") do
lambda { @agent.check }.should change {@agent.events.count}.by(4)
end
time_travel_to "2014-01-14 23:21:30 +0500".to_time do
@agent.cleanup_old_memory
lambda { @agent.check }.should change {@agent.events.count}.by(4)
end
end
end
describe "validation" do
it "should validate presence of stops" do
@agent.options['stops'] = nil
@agent.should_not be_valid
end
it "should validate presence of agency" do
@agent.options['agency'] = ""
@agent.should_not be_valid
end
it "should validate presence of alert_window_in_minutes" do
@agent.options['alert_window_in_minutes'] = ""
@agent.should_not be_valid
end
end
end
| 35.447368 | 139 | 0.622123 |
61bbcba6dd0acc52a9821dc4b5af00f3f979c8b9 | 988 | require 'active_support/concern'
module CloakPolicy
module Scorable
extend ActiveSupport::Concern
included do
has_many :scores, as: :scorable, dependent: :destroy
has_many :vectors, through: :scores, as: :scorable
end
def weight_for(vector_id)
array = scores.where(vector_id: vector_id)
array.empty? ? 100 : array.first.weight
end
def point_total_for(vector)
total = 0
case self.class.to_s
when "Setting"
platform.settings.each do |setting|
total += setting.weight_for(:vector)
end
total
end
end
def share_for(vector, platform=nil)
case self.class.to_s
when "Recommendation"
rs = recommendations_platforms.find_by(platform_id: platform.id)
(rs.weight_for(vector).to_f / point_total_for(vector).to_f * 100).round
when "Setting"
(weight_for(vector).to_f / point_total_for(vector).to_f * 100).round
end
end
end
end | 24.7 | 79 | 0.65081 |
287c381b832788ffc11900c3e7cbac2ddd5ea296 | 1,141 | require 'open-uri'
require 'uri'
require 'nokogiri'
class Crawler
MAX_DEPTH = 5.freeze
def self.run
Root.all.each do |root|
new(root.url, root).crawl
end
end
def initialize(url, root, depth = 1)
@url = url
@root = root
@depth = depth
end
def crawl
unless source_exists?
store_content
end
crawl_links
end
def content
@content ||= open(@url).read rescue ''
end
def crawl_links
if @depth < MAX_DEPTH
links.each do |link|
next unless link['href']
next if %w(/ #).include? link['href'].strip
absolute_url = (uri + link['href'].strip).to_s
next unless absolute_url =~ /#{@root.crawl_scope}/
Crawler.new(absolute_url, @root, @depth + 1).crawl
end
end
end
def links
doc.xpath('//a')
end
def title
doc.at('//title').text
end
def store_content
Source.create(content: content, url: @url, root_id: @root.id, title: title)
end
def uri
URI.parse(@url)
end
def source_exists?
Source.where(url: @url).count > 0
end
def doc
@doc ||= Nokogiri::HTML(content)
end
end
| 17.029851 | 79 | 0.601227 |
f8570d614e2d6f7bb4547163c3d01d37b7e773ff | 507 | require 'terminal-announce'
require 'bundler/cli'
module Gears
# Deals with the dependencies from Gears packages
module Dependencies
def self.install(from_path:)
install_gems from_path
end
def self.install_gems(package_path)
gemfile = "#{package_path}/Gemfile"
return unless File.exist? gemfile
name = File.basename(package_path).split('/').last
Announce.info "Installing gems for #{name}"
system "bundle install --gemfile #{gemfile}"
end
end
end
| 25.35 | 56 | 0.698225 |
0373c2a05c1ff159590777195fd0740d835f4dfd | 626 | # frozen_string_literal: true
require "forwardable"
module EveOnline
module ESI
class UniverseStar < Base
extend Forwardable
API_PATH = "/v1/universe/stars/%<star_id>s/"
attr_reader :id
def initialize(options)
super
@id = options.fetch(:id)
end
def_delegators :model, :as_json, :age, :luminosity, :name, :radius,
:solar_system_id, :spectral_class, :temperature, :type_id
def model
@model ||= Models::Star.new(response)
end
def scope
end
def path
format(API_PATH, star_id: id)
end
end
end
end
| 17.388889 | 73 | 0.605431 |
187daf9ad8e356852f2a77e56cff197c61156086 | 368 | # frozen_string_literal: true
# Class who represents CartPage Page in SauceDemo
class CartPage < SitePrism::Page
element :list_cart_name, '.inventory_item_name'
element :list_cart_item, '.cart_item'
def remove_item(name)
find('button[class="btn_secondary cart_button"]', text: 'REMOVE', visible: true).click if list_cart_name.text.include?(name)
end
end
| 30.666667 | 128 | 0.766304 |
18f804c7daf37b576a88b46dc7e03e290006c51a | 791 | require File.expand_path('../../../spec_helper', __FILE__)
require 'cgi'
describe "CGI.escapeElement when passed String, elements, ..." do
it "escapes only the tags of the passed elements in the passed String" do
res = CGI.escapeElement('<BR><A HREF="url"></A>', "A", "IMG")
res.should == "<BR><A HREF="url"></A>"
res = CGI.escapeElement('<BR><A HREF="url"></A>', ["A", "IMG"])
res.should == "<BR><A HREF="url"></A>"
end
it "is case-insensitive" do
res = CGI.escapeElement('<BR><A HREF="url"></A>', "a", "img")
res.should == '<BR><A HREF="url"></A>'
res = CGI.escapeElement('<br><a href="url"></a>', "A", "IMG")
res.should == '<br><a href="url"></a>'
end
end
| 37.666667 | 75 | 0.59292 |
f86390e4839c6d88ddd8a3f60478d7a66a775c06 | 1,574 | class ContainerNodeController < ApplicationController
include ContainersCommonMixin
include ContainersExternalLoggingSupportMixin
include Mixins::BreadcrumbsMixin
before_action :check_privileges
before_action :get_session_data
after_action :cleanup_action
after_action :set_session_data
def show_list
process_show_list(:named_scope => :active)
end
def textual_group_list
[
%i[properties container_labels compliance miq_custom_attributes],
%i[relationships conditions smart_management]
]
end
helper_method :textual_group_list
def show_ad_hoc_metrics
if @record && @record.try(:ems_id)
ems = find_record_with_rbac(ExtManagementSystem, @record.ems_id)
tags = {:type => "node", :hostname => @record.name}.to_json
redirect_to(polymorphic_path(ems, :display => "ad_hoc_metrics", :tags => tags))
end
end
def self.custom_display_modes
%w[ad_hoc_metrics]
end
def download_data
assert_privileges('container_node_show_list')
super
end
def download_summary_pdf
assert_privileges('container_node_show')
super
end
def breadcrumbs_options
{
:breadcrumbs => [
{:title => _("Compute")},
{:title => _("Containers")},
{:title => _("Nodes"), :url => controller_url},
],
}
end
menu_section :cnt
feature_for_actions "#{controller_name}_show_list", *ADV_SEARCH_ACTIONS
feature_for_actions "#{controller_name}_timeline", :tl_chooser
feature_for_actions "#{controller_name}_perf", :perf_top_chart
has_custom_buttons
end
| 24.984127 | 85 | 0.724269 |
7a4bad07784474a120f5adec108c42e52e96fc8a | 343 | max_threads_count = ENV.fetch("RAILS_MAX_THREADS") { 5 }
min_threads_count = ENV.fetch("RAILS_MIN_THREADS") { max_threads_count }
threads min_threads_count, max_threads_count
port ENV.fetch("PORT") { 3000 }
environment ENV.fetch("RAILS_ENV") { "development" }
pidfile ENV.fetch("PIDFILE") { "tmp/pids/server.pid" }
plugin :tmp_restart
| 38.111111 | 72 | 0.749271 |
87909c7517d29c534b05a26270cb4d5ee96e578e | 2,125 | require "spec_helper"
describe 'build_taxon_concept (spec helper method)' do
before(:all) do
load_foundation_cache
@event = HarvestEvent.gen
@scientific_name = 'Something cool'
@hierarchy = Hierarchy.gen
@taxon_concept = build_taxon_concept(comments: [], toc: [], bhl: [], images: [], sounds: [], youtube: [], flash: [])
@taxon_concept_with_args = build_taxon_concept(
:hierarchy => @hierarchy,
:event => @event,
scientific_name: @scientific_name,
comments: [],
toc: [],
bhl: [],
images: [],
sounds: [],
youtube: [],
flash: []
)
@taxon_concept_naked = build_taxon_concept(
images: [], toc: [], flash: [], youtube: [], comments: [], bhl: []
)
EOL::Solr::DataObjectsCoreRebuilder.begin_rebuild
end
it 'should not have a common name by defaut' do
@taxon_concept.preferred_common_name_in_language(Language.default).blank?.should be_true
end
it 'should put all new hierarchy_entries under the default hierarchy if none supplied' do
@taxon_concept.hierarchy_entries.each do |he|
he.hierarchy.should == Hierarchy.default
end
end
it 'should put all new hierarchy_entries under the hierarchy supplied' do
@taxon_concept_with_args.hierarchy_entries.each do |he|
he.hierarchy.should == @hierarchy
end
end
it 'should use default HarvestEvent if no alternative provided' do
@taxon_concept.images_from_solr(100).each do |img|
expect(Array(img.harvest_events.sort)).to \
eq(Array(default_harvest_event).sort)
end
end
it 'should use the supplied HarvestEvent to create all data objects' do
@taxon_concept_with_args.images_from_solr(100).each do |img|
expect(Array(img.harvest_events.sort)).to \
eq([@event])
end
end
it 'should create a scientific name' do
@taxon_concept_naked.title.should_not be_nil
@taxon_concept_naked.title.should_not == ''
end
it 'should create a scientific name when specified' do
@taxon_concept_with_args.title.should == @scientific_name
end
end
| 30.797101 | 122 | 0.672941 |
9197252c44ce0a055b36b43da29da459f00e71cf | 380 | require 'base_kde_formula'
class Poxml < BaseKdeFormula
homepage 'http://www.kde.org/'
url 'http://download.kde.org/stable/4.11.4/src/poxml-4.11.4.tar.xz'
sha1 'e73ae271f82ff077b454ce6c72a5f4ba4dc108aa'
devel do
url 'http://download.kde.org/stable/4.12.0/src/poxml-4.12.0.tar.xz'
sha1 '30d8d0f06a47965f336fd5428326a1c92fcaa7be'
end
depends_on 'kdelibs'
end
| 25.333333 | 71 | 0.742105 |
ed2a1ed41765f15424f327e536d2749b6707448d | 1,110 | class MutableVote < ActiveRecord::Base
include VoteBehaviour
belongs_to :voter
belongs_to :candidate
belongs_to :election
validates_uniqueness_of :voter,
null: false,
scope: :election_id
validates_presence_of :candidate_id,
:election_id,
:voter_id
validate :candidate_belongs_to_election
def self.update_or_create_by(voter_id:, election_id:, candidate_id:)
existing = self.find_by_voter_id_and_election_id voter_id, election_id
if existing.present?
existing.candidate_id = candidate_id
existing.save
return existing
else
return self.create voter_id: voter_id,
election_id: election_id,
candidate_id: candidate_id
end
end
protected
def candidate_belongs_to_election
Election.find(election_id).candidates.find(candidate_id)
rescue ActiveRecord::RecordNotFound
errors.add(:candidate, "Candidate #{candidate_id} does not belong to election #{election_id}")
nil
end
end
| 26.428571 | 98 | 0.66036 |
ac8c76bf2474823cfdec0ca48ca6a0564c6689c7 | 68 | # frozen_string_literal: true
module Alain
VERSION = "0.5.0"
end
| 11.333333 | 29 | 0.720588 |
7975dcffba274e62e638e1c6e7c612c369d2a506 | 1,135 | ##
# This module requires Metasploit: https://metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
module MetasploitModule
CachedSize = 1468324
include Msf::Payload::Single
include Msf::Sessions::MeterpreterOptions
include Msf::Sessions::MettleConfig
def initialize(info = {})
super(
update_info(
info,
'Name' => 'Linux Meterpreter, Reverse HTTP Inline',
'Description' => 'Run the Meterpreter / Mettle server payload (stageless)',
'Author' => [
'Adam Cammack <adam_cammack[at]rapid7.com>',
'Brent Cook <brent_cook[at]rapid7.com>',
'timwr'
],
'Platform' => 'linux',
'Arch' => ARCH_MIPSBE,
'License' => MSF_LICENSE,
'Handler' => Msf::Handler::ReverseHttp,
'Session' => Msf::Sessions::Meterpreter_mipsbe_Linux
)
)
end
def generate
opts = {
scheme: 'http',
stageless: true
}
MetasploitPayloads::Mettle.new('mips-linux-muslsf', generate_config(opts)).to_binary :exec
end
end
| 26.395349 | 94 | 0.595595 |
384a0b3d2c815997d561fa5b86befd79ad720590 | 1,585 | Pod::Spec.new do |s|
s.name = "CrashReporterAC"
s.version = "0.3.0"
s.summary = "Introduces the missing Crash Dialog for Microsofts AppCenter."
s.description = <<-DESC
A bug report contains device logs, stack traces, and other diagnostic information to help you find and fix bugs in your app. It should also include user feedback that helps you to reproduce the issue. Unfortunately that's not part of Microsoft's AppCenter implementation for macOS. However there are APIs that allow you to send text attachments with each crash. CrashReporterAC asks the user for feedback and submits it with the crash details to AppCenter.
DESC
s.homepage = "https://github.com/Iomegan/CrashReporterAC"
s.screenshots = 'https://raw.githubusercontent.com/Iomegan/CrashReporterAC/master/Screenshot.png', 'https://raw.githubusercontent.com/Iomegan/CrashReporterAC/master/Screenshot2.png'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { "Daniel Witt" => "[email protected]" }
s.social_media_url = 'https://twitter.com/witt_software'
s.platform = :osx, "10.10"
s.osx.deployment_target = "10.10"
s.source = { :git => "https://github.com/Iomegan/CrashReporterAC.git", :tag => "0.3.0" }
s.source_files = 'Sources/*.{swift}'
s.resource_bundle = { "CrashReporterACResources" => ["Sources/*.lproj/*.strings", "Sources/*.lproj/*.xib"] }
s.dependency 'AppCenter/Crashes'
s.requires_arc = true
s.static_framework = true
end
| 72.045455 | 479 | 0.671924 |
0339bae1b4bffa6c3cf41e289235c90f897aad49 | 148 | FactoryGirl.define do
factory :establishment do
sequence(:name) {|n| "Establishment #{n}"}
sequence(:short_name) {|n| "SSS#{n}"}
end
end | 24.666667 | 46 | 0.655405 |
01a7807a1df08f992895aa2729fbf16739d69c8c | 78 | class AddIndex < ActiveRecord::Migration
def up
end
def down
end
end
| 9.75 | 40 | 0.705128 |
e81475b89abd975dc03d9b0fa8e8b0e1227d0057 | 3,107 | # Cloud Foundry Java Buildpack
# Copyright 2013-2017 the original author or authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'java_buildpack/component/modular_component'
require 'java_buildpack/container'
require 'java_buildpack/container/tomcat/tomcat_insight_support'
require 'java_buildpack/container/tomcat/tomcat_instance'
require 'java_buildpack/container/tomcat/tomcat_external_configuration'
require 'java_buildpack/container/tomcat/tomcat_lifecycle_support'
require 'java_buildpack/container/tomcat/tomcat_logging_support'
require 'java_buildpack/container/tomcat/tomcat_access_logging_support'
require 'java_buildpack/container/tomcat/tomcat_redis_store'
require 'java_buildpack/util/java_main_utils'
module JavaBuildpack
module Container
# Encapsulates the detect, compile, and release functionality for Tomcat applications.
class Tomcat < JavaBuildpack::Component::ModularComponent
protected
# (see JavaBuildpack::Component::ModularComponent#command)
def command
@droplet.java_opts.add_system_property 'http.port', '$PORT'
[
@droplet.environment_variables.as_env_vars,
@droplet.java_home.as_env_var,
@droplet.java_opts.as_env_var,
'exec',
"$PWD/#{(@droplet.sandbox + 'bin/catalina.sh').relative_path_from(@droplet.root)}",
'run'
].flatten.compact.join(' ')
end
# (see JavaBuildpack::Component::ModularComponent#sub_components)
def sub_components(context)
components = [
TomcatInstance.new(sub_configuration_context(context, 'tomcat')),
TomcatLifecycleSupport.new(sub_configuration_context(context, 'lifecycle_support')),
TomcatLoggingSupport.new(sub_configuration_context(context, 'logging_support')),
TomcatAccessLoggingSupport.new(sub_configuration_context(context, 'access_logging_support')),
TomcatRedisStore.new(sub_configuration_context(context, 'redis_store')),
TomcatInsightSupport.new(context)
]
tomcat_configuration = @configuration['tomcat']
components << TomcatExternalConfiguration.new(sub_configuration_context(context, 'external_configuration')) if
tomcat_configuration['external_configuration_enabled']
components
end
# (see JavaBuildpack::Component::ModularComponent#supports?)
def supports?
web_inf? && !JavaBuildpack::Util::JavaMainUtils.main_class(@application)
end
private
def web_inf?
(@application.root + 'WEB-INF').exist?
end
end
end
end
| 37.890244 | 118 | 0.738977 |
d5f196b5f153dd335a4dc0ba9ea6275e2316ac2b | 629 | class RelationshipsController < ApplicationController
before_action :logged_in_user
# POST /relationships
def create
@user = User.find(params[:followed_id])
current_user.follow(@user)
respond_to do |format|
format.html { redirect_to @user }
format.js # => app/views/relationships/create.js.erb
end
end
# DELETE /relationships/:id
def destroy
@user = Relationship.find(params[:id]).followed
current_user.unfollow(@user)
respond_to do |format|
format.html { redirect_to @user }
format.js # => app/views/relationships/destroy.js.erb
end
end
end
| 25.16 | 62 | 0.677266 |
038ee2412d8936ba4b4dfa0dcd8569fd5ab0ad59 | 936 | cask 'boostnote' do
version '0.15.3'
sha256 '6ab1815ff0eb555b2dc01239243f849541f072eb51325fb57a0d5008e40713d7'
# github.com/BoostIO/boost-releases/ was verified as official when first introduced to the cask
url "https://github.com/BoostIO/boost-releases/releases/download/v#{version}/Boostnote-mac.zip"
appcast 'https://github.com/BoostIO/boost-releases/releases.atom'
name 'Boostnote'
homepage 'https://boostnote.io/'
auto_updates true
app 'Boostnote.app'
uninstall signal: [
['TERM', 'com.maisin.boost.helper'],
['TERM', 'com.maisin.boost'],
]
zap trash: [
'~/Library/Application Support/boost',
'~/Library/Preferences/com.maisin.boost.plist',
'~/Library/Preferences/com.maisin.boost.helper.plist',
'~/Library/Saved Application State/com.maisin.boost.savedState',
]
end
| 34.666667 | 97 | 0.642094 |
f7fcb5f430bdb1340398d4097876d5f9335d08bd | 4,512 | #
# Deploys OpsWorks CKAN App to jobs layer
#
# Copyright 2021, Queensland Government
#
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
service "supervisord" do
action :stop
end
include_recipe "datashades::stackparams"
include_recipe "datashades::ckan-deploy"
service_name = "ckan"
app = search("aws_opsworks_app", "shortname:#{node['datashades']['app_id']}-#{node['datashades']['version']}*").first
if not app
app = search("aws_opsworks_app", "shortname:#{service_name}-#{node['datashades']['version']}*").first
end
config_dir = "/etc/ckan/default"
config_file = "#{config_dir}/production.ini"
shared_fs_dir = "/var/shared_content/#{app['shortname']}"
virtualenv_dir = "/usr/lib/ckan/default"
pip = "#{virtualenv_dir}/bin/pip --cache-dir=/tmp/"
ckan_cli = "#{virtualenv_dir}/bin/ckan_cli"
install_dir = "#{virtualenv_dir}/src/#{service_name}"
# Setup Site directories
#
paths = {
"/var/log/#{service_name}" => "#{service_name}",
"#{shared_fs_dir}" => "#{service_name}",
}
paths.each do |nfs_path, dir_owner|
directory nfs_path do
owner dir_owner
group "#{service_name}"
recursive true
mode '0775'
action :create
end
end
#
# Create job worker config files.
#
cookbook_file "/etc/supervisor/conf.d/supervisor-ckan-worker.conf" do
source "supervisor-ckan-worker.conf"
owner "root"
group "root"
mode "0644"
end
service "supervisord" do
action [:enable]
end
# Set up maintenance cron jobs
cookbook_file "/usr/local/bin/archive-resource-revisions.sql" do
source "archive-resource-revisions.sql"
owner "root"
group "root"
mode "0644"
end
cookbook_file "/usr/local/bin/archive-resource-revisions.sh" do
source "archive-resource-revisions.sql"
owner "root"
group "root"
mode "0755"
end
template "/usr/local/bin/pick-job-server.sh" do
source "pick-job-server.sh.erb"
owner "root"
group "root"
mode "0755"
end
# Remove unwanted cron job
file '/etc/cron.daily/ckan-tracking-update' do
action :delete
end
# Remove unwanted cron job from higher environments
file '/etc/cron.hourly/ckan-tracking-update' do
action :delete
not_if { node['datashades']['version'] == 'DEV' || node['datashades']['version'] == 'TEST' }
end
# Only set cron job for lower environments
file '/etc/cron.hourly/ckan-tracking-update' do
content "/usr/local/bin/pick-job-server.sh && #{ckan_cli} tracking update >/dev/null 2>&1\n"
mode '0755'
owner "root"
group "root"
only_if { node['datashades']['version'] == 'DEV' || node['datashades']['version'] == 'TEST' }
end
# Run tracking update at 8:30am everywhere
file "/etc/cron.d/ckan-tracking-update" do
content "30 8 * * * root /usr/local/bin/pick-job-server.sh && #{ckan_cli} tracking update >/dev/null 2>&1\n"
mode '0644'
owner "root"
group "root"
end
# Run dataset require updates notifications at 7am and 7:15am on batch
if File.foreach(config_file).grep(/^\s*ckan[.]plugins\s*=.*\bdata_qld(_integration)?\b/).any?
file "/etc/cron.d/ckan-dataset-notification-due" do
content "00 7 * * MON root /usr/local/bin/pick-job-server.sh && PASTER_PLUGIN=ckanext-data-qld #{ckan_cli} send_email_dataset_due_to_publishing_notification >/dev/null 2>&1\n"\
"15 7 * * MON root /usr/local/bin/pick-job-server.sh && PASTER_PLUGIN=ckanext-data-qld #{ckan_cli} send_email_dataset_overdue_notification >/dev/null 2>&1\n"
mode '0644'
owner "root"
group "root"
end
end
file "/etc/cron.hourly/ckan-email-notifications" do
content "/usr/local/bin/pick-job-server.sh && curl -d '{}' #{app['domains'][0]}#{node['datashades']['ckan_web']['endpoint']}api/action/send_email_notifications > /dev/null 2>&1\n"
mode '0755'
owner "root"
group "root"
end
file "/etc/cron.daily/ckan-revision-archival" do
content "/usr/local/bin/pick-job-server.sh && /usr/local/bin/archive-resource-revisions.sh >/dev/null 2>&1\n"
mode '0755'
owner "root"
group "root"
end
| 30.486486 | 184 | 0.691046 |
d57a5db851c7aea084c9878f6dec5acfc252c75b | 702 | cask 'qbittorrent' do
version '3.3.12'
sha256 '85296a52d45d5ed814e3a66c7c687d95b192d80bfaa9821ac4a97be7e2f791cf'
# sourceforge.net/qbittorrent was verified as official when first introduced to the cask
url "https://downloads.sourceforge.net/qbittorrent/qbittorrent-mac/qbittorrent-#{version}/qbittorrent-#{version}.dmg"
appcast 'https://sourceforge.net/projects/qbittorrent/rss?path=/qbittorrent-mac',
checkpoint: '23d7672f0e057c154f7e398e579834f05152e8fe9eef4171b6464ac9f56ff453'
name 'qBittorrent'
homepage 'https://www.qbittorrent.org/'
# Renamed for consistency: app name is different in the Finder and in a shell.
app 'qbittorrent.app', target: 'qBittorrent.app'
end
| 46.8 | 119 | 0.787749 |
1d3b426c307c209d1c2c8101d3b5d42f6c619ff3 | 330 | # frozen_string_literal: true
module FoxPage
module Refinements
module Underscore
refine String do
def underscore
gsub(/([a-z\d])([A-Z])/, '\1_\2').downcase
end
end
refine Symbol do
def underscore
to_s.underscore.to_sym
end
end
end
end
end
| 16.5 | 52 | 0.563636 |
61ebf91c21204cfce2d2d2ddf2e08632fec11f29 | 192 | class CreateComments < ActiveRecord::Migration[5.2]
def change
create_table :comments do |t|
t.text :content
t.integer :user_id
t.integer :post_id
end
end
end
| 19.2 | 51 | 0.651042 |
e892dc88f6d9a1fea0047e4a8cbc8132fd88c34b | 1,906 | require_dependency 'mailer'
module RedmineDiffEmail
module Patches
module MailerPatch
def self.included(base)
base.send(:include, InstanceMethods)
base.class_eval do
unloadable
end
end
module InstanceMethods
def changeset_added(changeset, is_attached)
@project = changeset.repository.project
@author = changeset.user unless changeset.user.nil?
@author_s = @author.nil? ? changeset.author.to_s : @author.login
redmine_headers 'Project' => @project.identifier,
'Committer' => @author_s,
'Revision' => changeset.revision
to = @project.notified_users.select {
|u| u.allowed_to?(:view_changesets, @project)
}.collect {
|u| u.mail
}
Rails.logger.info "mailing changeset to " + to.to_sentence
subject = "[#{@project.name}: #{l(:label_repository)}] #{changeset.format_identifier} #{changeset.short_comments}"
@is_attached = is_attached
@changeset = changeset
@changed_files = @changeset.repository.changed_files("", @changeset.revision)
diff = @changeset.repository.diff("", @changeset.revision, nil)
@changeset_url = url_for(controller: 'repositories', action: 'revision', rev: @changeset.revision, id: @project, repository_id: changeset.repository)
set_language_if_valid @changeset.user.language unless changeset.user.nil?
if !diff.nil? && @is_attached
attachments["changeset_r#{changeset.revision}.diff"] = diff.join
end
mail to: to,
subject: subject
end
end
end
end
end
unless Mailer.included_modules.include?(RedmineDiffEmail::Patches::MailerPatch)
Mailer.send(:include, RedmineDiffEmail::Patches::MailerPatch)
end
| 29.323077 | 159 | 0.623295 |
ac2e4ef99c9edd62024aa22a4bc5162ed245570d | 566 | module Kuby::KubeDB::DSL::API::V1
class S3Spec < ::KubeDSL::DSLObject
value_field :prefix
value_field :endpoint
value_field :bucket
validates :prefix, field: { format: :string }, presence: false
validates :endpoint, field: { format: :string }, presence: false
validates :bucket, field: { format: :string }, presence: false
def serialize
{}.tap do |result|
result[:prefix] = prefix
result[:endpoint] = endpoint
result[:bucket] = bucket
end
end
def kind_sym
:s3_spec
end
end
end
| 23.583333 | 68 | 0.627208 |
e8f165b47f2f6f9c6678c403bd7ce85e8eaea09e | 139 | class Article < ApplicationRecord
attribute :markdown
attribute :rendered
has_many_media
belongs_to :owner, polymorphic: true
end
| 17.375 | 38 | 0.791367 |
87d4ea00ec58f7e34b521e7b86a396b3ce553ef7 | 6,407 | # frozen_string_literal: true
require "support/vacols_database_cleaner"
require "rails_helper"
feature "Intake Add Issues Page", :all_dbs do
include IntakeHelpers
before do
setup_intake_flags
end
let(:veteran_file_number) { "123412345" }
let(:veteran) do
Generators::Veteran.build(file_number: veteran_file_number, first_name: "Ed", last_name: "Merica")
end
let(:profile_date) { 10.days.ago }
let(:promulgation_date) { 9.days.ago.to_date }
let!(:rating) do
Generators::Rating.build(
participant_id: veteran.participant_id,
promulgation_date: promulgation_date,
profile_date: profile_date,
issues: [
{ reference_id: "abc123", decision_text: "Left knee granted" },
{ reference_id: "def456", decision_text: "PTSD denied" },
{ reference_id: "def789", decision_text: "Looks like a VACOLS issue" }
]
)
end
context "check for correct time zone" do
scenario "when rating is added" do
start_higher_level_review(veteran)
visit "/intake"
click_intake_continue
expect(page).to have_current_path("/intake/add_issues")
click_intake_add_issue
add_intake_rating_issue("Left knee granted")
expect(page).not_to have_content("When you finish making changes, click \"Save\" to continue")
expect(page).to have_content("1. Left knee granted\nDecision date: #{promulgation_date.mdY}")
end
end
context "for an Appeal" do
context "when there is an invalid veteran" do
let!(:veteran) do
Generators::Veteran.build(
file_number: "25252525",
sex: nil,
ssn: nil,
country: nil,
address_line1: "this address is more than 20 chars"
)
end
scenario "check invalid veteran alert if any added issues are a VBMS benefit type" do
start_appeal(veteran)
visit "/intake"
click_intake_continue
expect(page).to have_current_path("/intake/add_issues")
# Add issue that is not a VBMS issue
click_intake_add_issue
click_intake_no_matching_issues
add_intake_nonrating_issue(
benefit_type: "Education",
category: "Accrued",
description: "Description for Accrued",
date: 1.day.ago.to_date.mdY
)
expect(page).to have_content("Description for Accrued")
expect(page).to_not have_content("The Veteran's profile has missing or invalid information")
expect(page).to have_button("Establish appeal", disabled: false)
# Add a rating issue
click_intake_add_issue
add_intake_rating_issue("Left knee granted")
expect(page).to have_content("The Veteran's profile has missing or invalid information")
expect(page).to have_content(
"the corporate database, then retry establishing the EP in Caseflow: country."
)
expect(page).to have_content("This Veteran's address is too long. Please edit it in VBMS or SHARE")
expect(page).to have_button("Establish appeal", disabled: true)
click_remove_intake_issue_by_text("Left knee granted")
expect(page).to_not have_content("The Veteran's profile has missing or invalid information")
expect(page).to have_button("Establish appeal", disabled: false)
# Add a compensation nonrating issue
click_intake_add_issue
click_intake_no_matching_issues
add_intake_nonrating_issue(
benefit_type: "Compensation",
category: "Apportionment",
description: "Description for Apportionment",
date: 2.days.ago.to_date.mdY
)
expect(page).to have_content("Description for Apportionment")
expect(page).to have_content("The Veteran's profile has missing or invalid information")
expect(page).to have_button("Establish appeal", disabled: true)
end
end
end
context "when edit contention text feature is enabled" do
before { FeatureToggle.enable!(:edit_contention_text) }
it "Allows editing contention text on intake" do
start_higher_level_review(veteran)
visit "/intake"
click_intake_continue
click_intake_add_issue
add_intake_rating_issue("Left knee granted")
edit_contention_text("Left knee granted", "Right knee")
expect(page).to_not have_content("Left knee granted")
expect(page).to have_content("Right knee")
click_intake_finish
expect(page).to have_content("Request for #{Constants.INTAKE_FORM_NAMES.higher_level_review} has been processed.")
expect(page).to have_content("Right knee")
expect(RequestIssue.where(edited_description: "Right knee")).to_not be_nil
end
end
context "check that none of these match works for VACOLS issue" do
before do
setup_legacy_opt_in_appeals(veteran.file_number)
end
scenario "User selects a vacols issue, then changes to none of these match" do
start_appeal(veteran, legacy_opt_in_approved: true)
visit "/intake/add_issues"
click_intake_add_issue
add_intake_rating_issue("Left knee granted")
expect(page).to have_content("Does issue 1 match any of these VACOLS issues?")
find("label", text: "intervertebral disc syndrome").click
find("label", text: "None of these match").click
safe_click ".add-issue"
expect(page).to have_content("Left knee granted\nDecision date")
expect(page).to_not have_content(
"Left knee granted is ineligible because the same issue is under review as a Legacy Appeal"
)
end
end
context "When the user adds an untimely issue" do
before do
Generators::Rating.build(
participant_id: veteran.participant_id,
promulgation_date: 2.years.ago,
profile_date: 2.years.ago,
issues: [
{ reference_id: "untimely", decision_text: "Untimely Issue" }
]
)
end
scenario "When the user selects untimely exemption it shows untimely exemption notes" do
start_appeal(veteran, legacy_opt_in_approved: true)
visit "/intake/add_issues"
click_intake_add_issue
add_intake_rating_issue("Untimely Issue")
expect(page).to_not have_content("Notes")
expect(page).to have_content("Issue 1 is an Untimely Issue")
find("label", text: "Yes").click
expect(page).to have_content("Notes")
end
end
end
| 36.19774 | 120 | 0.68472 |
ac4e7b948b41573b05489c1a98d3f2cde30ae90e | 820 | Pod::Spec.new do |s|
s.name = 'ZLPhotoBrowser'
s.version = '2.7.4'
s.summary = 'A simple way to multiselect photos from ablum, force touch to preview photo, support portrait and landscape, edit photo, multiple languages(Chinese,English,Japanese)'
s.homepage = 'https://github.com/longitachi/ZLPhotoBrowser'
s.license = 'MIT'
s.platform = :ios
s.author = {'longitachi' => '[email protected]'}
s.ios.deployment_target = '8.0'
s.source = {:git => 'https://github.com/longitachi/ZLPhotoBrowser.git', :tag => s.version}
s.source_files = 'PhotoBrowser/**/*.{h,m}'
s.resources = 'PhotoBrowser/resource/*.{png,xib,nib,bundle}'
s.requires_arc = true
s.frameworks = 'UIKit','Photos','PhotosUI'
s.dependency 'SDWebImage'
s.dependency 'GPUImage'
end
| 39.047619 | 186 | 0.656098 |
4ada1fc2d08fa2c3dc3a8f1d7248b2ec26fcae76 | 477 | Given /^an outline with the title "([^\"]*)"$/ do |title|
outline = {:kind => 'Outline', :title => title}
RestClient.put "#{host}/#{database}/#{title}", outline.to_json
end
Given /^an outline with the title "([^\"]*)" created "([^\"]*)" minutes ago$/ do |title, minutes|
outline = {:kind => 'Outline', :title => title, :created_at => (Time.now - minutes.to_i*60).strftime("%Y/%m/%d %H:%M:%S +0000")}
RestClient.put "#{host}/#{database}/#{title}", outline.to_json
end
| 43.363636 | 130 | 0.603774 |
18402afc89a39e64a1e73052e53bd3427f9340f8 | 916 | module GitAnalysis
# prints information about the repository
class Printer
attr_reader :repo
attr_reader :open_pr_count
attr_reader :closed_pr_count
def initialize(repo_object, open_pr_count, closed_pr_count)
@repo = repo_object
@open_pr_count = open_pr_count
@closed_pr_count = closed_pr_count
end
# print repo ID, name, owner, language
def basic_info
"ID: #{@repo.id}\nName: #{@repo.name}\nOwner: #{@repo.owner}\nLanguage: #{@repo.language}\n"
end
# print the number of open and closed pull requests
def num_prs
"Open PRs: #{open_pr_count}\nClosed PRs: #{closed_pr_count}\n"
end
# for each PR, print the size
def pr_size(pr_object)
"PR #{pr_object.number}\n Files: #{pr_object.file_count}\n Additions: #{pr_object.additions}\n Deletions: #{pr_object.deletions}\n Changes: #{pr_object.changes}\n"
end
end
end
| 30.533333 | 173 | 0.686681 |
2138f12a3f24d7d6b0cd6749dc1b2bd3a88f240e | 881 | # frozen_string_literal: true
lib = File.expand_path("lib", __dir__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require "break/version"
Gem::Specification.new do |spec|
spec.name = "break"
spec.version = Break::VERSION
spec.authors = ["Genadi Samokovarov"]
spec.email = ["[email protected]"]
spec.summary = "Lightweight Ruby debugger"
spec.description = "Lightweight Ruby debugger written in plain Ruby using the TracePoint API"
spec.homepage = "https://github.com/gsamokovarov/break"
spec.license = "MIT"
spec.files = Dir["CHANGELOG.md", "Rakefile", "README.md", "LICENSE.txt", "lib/**/*"]
spec.required_ruby_version = ">= 2.5.0"
spec.add_development_dependency "bundler", "~> 2.0"
spec.add_development_dependency "minitest", "~> 5.11"
spec.add_development_dependency "rake", ">= 12.3.3"
end
| 33.884615 | 95 | 0.684449 |
6ac605563f5ff900a0a05801a87070ea63925632 | 967 | # vim: syntax=ruby:expandtab:shiftwidth=2:softtabstop=2:tabstop=2
#
# Copyright (c) 2012-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree. An additional grant
# of patent rights can be found in the PATENTS file in the same directory.
default['fb_rsync'] = {
'server' => {
'enabled' => true,
'start_at_boot' => true,
},
'rsyncd.conf' => {
'enabled_modules' => [],
'global' => {
'gid' => 'root',
'log file' => '/var/log/rsyncd.log',
'log format' => '[%h %a] (%u) %o %m::%f %l (%b)',
'pid file' => '/var/run/rsyncd.pid',
'timeout' => '600',
'transfer logging' => 'yes',
'uid' => 'root',
'use chroot' => 'yes',
},
'modules' => {},
},
'rsync_server' => nil,
'rsync_command' =>
'rsync -avz --timeout=60 --delete --partial --partial-dir=.rsync-partial',
}
| 29.30303 | 78 | 0.580145 |
4aee8abdec313276af7f658edec45effb9458962 | 650 | class AlignmentsController < ApplicationController
include AlignmentsManager::BuildAlignments
def getPDBalignment
pdbId = params[:name]
info = fetchPDBalignment(pdbId)
return render json: info, status: :ok
end
def getPDBcoverage
pdbId_ch = params[:name]
info = fetchPDBcoverage(pdbId_ch)
return render json: info, status: :ok
end
def getPDBalignmentJSONP
pdbId = params[:name]
if pdbId == "undefined"
toReturnInfo = ""
else
info = fetchPDBalignment(pdbId)
toReturnInfo = "processAlignment("+info.to_json+")"
end
return render text: toReturnInfo, status: :ok
end
end
| 22.413793 | 57 | 0.690769 |
1cde4394433a3cb663665c43a7011455a5dd56ce | 1,146 | # Copyright 2014 Rico Simke, Leipzig University Library
# http://www.ub.uni-leipzig.de
#
# This code is the result of the project "Die Bibliothek der Milliarden Wörter".
# This project is funded by the European Social Fund. "Die Bibliothek der
# Milliarden Wörter" is a cooperation project between the Leipzig University
# Library, the Natural Language Processing Group at the Institute of Computer
# Science at Leipzig University, and the Image and Signal Processing Group
# at the Institute of Computer Science at Leipzig University.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
json.extract! @perseus_record, :id, :created_at, :updated_at
| 44.076923 | 80 | 0.776614 |
e9db60d544c5dcdec863788429fa31ee2fe9b582 | 513 | class InstructorSerializer < ActiveModel::Serializer
attributes :id, :first_name, :second_name, :course_list
has_many :courses
def course_list
object.courses.map do |course|
{
id: course.id,
instructor: {
id: course.instructor
},
user: {
id: course.user
},
description: course.description,
title: course.title,
department: course.department,
course_number: course.course_number
}
end
end
end
| 22.304348 | 57 | 0.60039 |
7a72b933e8edf819189d7bb9026913e0e77dadce | 1,894 | class RakudoStar < Formula
desc "Perl 6 compiler"
homepage "https://rakudo.org/"
url "https://rakudo.org/dl/star/rakudo-star-2020.01.tar.gz"
sha256 "f1696577670d4ff5b464e572b1b0b8c390e6571e1fb8471cbf369fa39712c668"
bottle do
sha256 "62c80b5c001ffede84e442a8b0f8cb8526e2b8ee2e5b15b6637d7c39c8164d04" => :catalina
sha256 "b338c7b0d69b5c058d8e8b826b36afb12c5452ee1b2a1b727cdcbca50015068a" => :mojave
sha256 "af8a220b91c21a5ce55d92a8015c81f575f257bee8e5b5a6877edc5b3f352f4b" => :high_sierra
end
depends_on "gmp"
depends_on "icu4c"
depends_on "libffi"
depends_on "pcre"
conflicts_with "moarvm", "nqp", :because => "rakudo-star currently ships with moarvm and nqp included"
conflicts_with "parrot"
conflicts_with "rakudo"
def install
libffi = Formula["libffi"]
ENV.remove "CPPFLAGS", "-I#{libffi.include}"
ENV.prepend "CPPFLAGS", "-I#{libffi.lib}/libffi-#{libffi.version}/include"
ENV.deparallelize # An intermittent race condition causes random build failures.
system "perl", "Configure.pl", "--prefix=#{prefix}",
"--backends=moar", "--gen-moar"
system "make"
# make install runs tests that can hang on sierra
# set this variable to skip those tests
ENV["NO_NETWORK_TESTING"] = "1"
system "make", "install"
# Panda is now in share/perl6/site/bin, so we need to symlink it too.
bin.install_symlink Dir[share/"perl6/site/bin/*"]
# Move the man pages out of the top level into share.
# Not all backends seem to generate man pages at this point (moar does not, parrot does),
# so we need to check if the directory exists first.
mv "#{prefix}/man", share if File.directory?("#{prefix}/man")
end
test do
out = `#{bin}/perl6 -e 'loop (my $i = 0; $i < 10; $i++) { print $i }'`
assert_equal "0123456789", out
assert_equal 0, $CHILD_STATUS.exitstatus
end
end
| 36.423077 | 104 | 0.703273 |
6123b62273be7df673c1501305e5d5048ac7e1f3 | 4,823 | require File.expand_path(File.dirname(__FILE__) + '/spec_helper')
describe "SiteZoneTargeting API" do
before(:all) do
client = Adzerk::Client.new(API_KEY)
@flights = client.flights
@advertisers = client.advertisers
@channels = client.channels
@campaigns = client.campaigns
@priorities = client.priorities
@sitezonetargeting = client.sitezonetargetings
@sites = client.sites
@zones = client.zones
advertiser = @advertisers.create(:title => "test")
$advertiserId = advertiser[:id].to_s
channel = @channels.create(:title => 'Test Channel ' + rand(1000000).to_s,
:commission => '0.0',
:engine => 'CPM',
:keywords => 'test',
'CPM' => '10.00',
:ad_types => [1,2,3,4])
$channel_id = channel[:id].to_s
priority = @priorities.create(:name => "High Priority Test",
:channel_id => $channel_id,
:weight => 1,
:is_deleted => false)
$priority_id = priority[:id].to_s
campaign = @campaigns.
create(:name => 'Test campaign ' + rand(1000000).to_s,
:start_date => "1/1/2011",
:end_date => "12/31/2011",
:is_active => false,
:price => '10.00',
:advertiser_id => $advertiserId,
:flights => [],
:is_deleted => false)
$campaign_id = campaign[:id]
new_flight = {
:priority_id => $priority_id,
:name => 'Test flight ' + rand(1000000).to_s,
:start_date => "1/1/2011",
:end_date => "12/31/2011",
:no_end_date => false,
:price => '15.00',
:option_type => 1,
:impressions => 10000,
:is_unlimited => false,
:is_full_speed => false,
:keywords => "test, test2",
:user_agent_keywords => nil,
:weight_override => nil,
:campaign_id => $campaign_id,
:is_active => true,
:is_deleted => false,
:goal_type => 1
}
flight = @flights.create(new_flight)
$flight_id = flight[:id].to_s
site_title = 'Test Site ' + rand(1000000).to_s
site = @sites.create(:title => site_title, :url => 'http://www.adzerk.com')
$site_id = site[:id].to_s
zone_name = 'Test Zone ' + rand(1000000).to_s
zone = @zones.create(:name => zone_name,
:site_id => $site_id,
:is_deleted => false)
$zone_id = zone[:id].to_s
end
after(:all) do
@flights.delete($flight_id)
@campaigns.delete($campaign_id)
@advertisers.delete($advertiserId)
@zones.delete($zone_id)
@sites.delete($site_id)
@priorities.delete($priority_id)
@channels.delete($channel_id)
end
it "should create a sitezone targeting" do
$sitezone_SiteId = $site_id;
$sitezone_ZoneId = $zone_id;
$sitezone_IsExclude = true;
new_sitezone = {
:site_id => $sitezone_SiteId,
:zone_id => $sitezone_ZoneId,
:is_exclude => true,
}
sitezone = @sitezonetargeting.create($flight_id, new_sitezone)
expect(sitezone[:site_id]).to eq($sitezone_SiteId.to_i)
expect(sitezone[:zone_id]).to eq($sitezone_ZoneId.to_i)
expect(sitezone[:is_exclude]).to eq(true)
$sitezone_id = sitezone[:id]
end
it "should retrieve a sitezone targeting" do
sitezone = @sitezonetargeting.get($flight_id,$sitezone_id)
expect(sitezone[:site_id]).to eq($sitezone_SiteId.to_i)
expect(sitezone[:zone_id]).to eq($sitezone_ZoneId.to_i)
expect(sitezone[:is_exclude]).to eq(true)
end
it "should update a sitezone targeting" do
data = {
:site_id => $sitezone_SiteId,
:zone_id => $sitezone_ZoneId,
:is_exclude => false,
}
sitezone = @sitezonetargeting.update($flight_id,$sitezone_id,data)
expect(sitezone[:is_exclude]).to eq(false)
end
it "should delete a sitezone targeting" do
sitezone = @sitezonetargeting.delete($flight_id,$sitezone_id)
expect(sitezone.body).to include("Successfully deleted")
end
it "should error when deleting a sitezone targeting that does not exist" do
expect {
@sitezonetargeting.delete($flight_id,1)
}.to raise_error "This PassSiteMap does not exist in your network."
end
it "should check if a flight is not a part of your network" do
non_network_flight = 123;
expect{ @sitezonetargeting.delete(non_network_flight,1) }.to raise_error("This Flight does not belong to your network.")
expect{ @sitezonetargeting.get(non_network_flight,1) }.to raise_error("This Flight does not belong to your network.")
expect{ @sitezonetargeting.update(non_network_flight,1,{}) }.to raise_error("Flight is not a part of your network")
end
end
| 34.205674 | 124 | 0.60792 |
6a987def403e4ebcf5156e245ba5b59d501ae151 | 287 | class WebkitNightly < Cask
version 'r159487'
sha256 '6d59673fdac0c380f66c1da6ae5742377b68add2f0bb33737bd7e1cec7ba821c'
url 'http://builds.nightly.webkit.org/files/trunk/mac/WebKit-SVN-r159487.dmg'
homepage 'http://nightly.webkit.org/'
license :unknown
app 'WebKit.app'
end
| 26.090909 | 79 | 0.780488 |
e2c88c77400b9ca0ac91e8a7fdad957fed851f52 | 971 | require 'rack'
require 'rack/utils'
if Rack.release > '2'
module Rack::Indifferent
class QueryParser < Rack::QueryParser
# Work around for invalid optimization in rack
def parse_nested_query(qs, d=nil)
return make_params.to_params_hash if qs.nil? || qs.empty?
super
end
class Params < Rack::QueryParser::Params
INDIFFERENT_PROC = lambda{|h,k| h[k.to_s] if k.is_a?(Symbol)}
def initialize(limit = Rack::Utils.key_space_limit)
@limit = limit
@size = 0
@params = Hash.new(&INDIFFERENT_PROC)
end
end
Rack::Utils.default_query_parser = new(Params, 65536, 100)
end
end
else
class Rack::Utils::KeySpaceConstrainedParams
INDIFFERENT_PROC = lambda{|h,k| h[k.to_s] if k.is_a?(Symbol)}
def initialize(limit = Rack::Utils.key_space_limit)
@limit = limit
@size = 0
@params = Hash.new(&INDIFFERENT_PROC)
end
end
end
| 26.243243 | 69 | 0.629248 |
019e0330dcc066a813a5d70071a7057245080aea | 1,583 | require "spec_helper"
describe ProcessOrderPrintable do
subject { described_class }
let(:order_printable_atts) {
{printable_type: "Arcturian Mega Donkey",
include_product_names: true}
}
let(:order_printable) { create(:order_printable, order_printable_atts) }
let(:order_printable_id) { order_printable.id }
let(:context) { double("result context",
pdf_result: double("Pdf result", data: "the pdf data")
)}
let(:request) { double "a request" }
def expect_generate_table_tents_or_posters
expect(GenerateTableTentsOrPosters).to receive(:perform).
with(order: order_printable.order,
type: order_printable.printable_type,
include_product_names: order_printable.include_product_names,
request: request).
and_return(context)
end
it "loads an OrderPrintable and generates the corresponding PDF document, stores that PDF as an attachment" do
expect_generate_table_tents_or_posters
subject.perform(order_printable_id: order_printable_id, request: request)
updated_order_printable = OrderPrintable.find(order_printable_id)
expect(updated_order_printable.pdf.file.read).to eq("the pdf data")
expect(updated_order_printable.pdf.name).to eq("Arcturian_Mega_Donkey.pdf")
end
it "heeds the include_product_names flag" do
order_printable_atts[:include_product_names] = false # the prior test used true
expect_generate_table_tents_or_posters
subject.perform(order_printable_id: order_printable_id, request: request)
end
end
| 35.177778 | 112 | 0.73784 |
082d2e73bbd44c71df866b4741f8361f1176181f | 1,384 | #
# Copyright 2015-2017, Noah Kantrowitz
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'spec_helper'
describe PoiseApplicationPython::Resources::Virtualenv do
shared_examples 'application_virtualenv' do
it { is_expected.to create_application_virtualenv('/test').with(path: '/test/.virtualenv') }
it { expect(chef_run.application('/test').app_state[:python]).to eq chef_run.application_virtualenv('/test') }
end # /shared_examples application_virtualenv
context 'with #python_virtualenv' do
recipe do
application '/test' do
python_virtualenv
end
end
it_behaves_like 'application_virtualenv'
end # /context with #python_virtualenv
context 'with #virtualenv' do
recipe do
application '/test' do
virtualenv
end
end
it_behaves_like 'application_virtualenv'
end # /context with #virtualenv
end
| 30.755556 | 114 | 0.739162 |
26cd8145189a958cfce3599e720746794cb1add0 | 272 | class CreateComments < ActiveRecord::Migration[5.1]
def change
create_table :comments do |t|
t.text :content
t.belongs_to :commentable, polymorphic: true
t.timestamps
end
add_index :comments, [:commentable_id, :commentable_type]
end
end
| 22.666667 | 61 | 0.698529 |
911ca14d295ed02d845c80d30684157aa9d0959e | 2,360 | class Etcd < Formula
desc "Key value store for shared configuration and service discovery"
homepage "https://github.com/coreos/etcd"
url "https://github.com/coreos/etcd/archive/v3.2.7.tar.gz"
sha256 "b91a40102b944ba8e4dad439721c7068eccbc1d0cb0e7f2ded9dad134d2875ce"
head "https://github.com/coreos/etcd.git"
bottle do
cellar :any_skip_relocation
sha256 "237f40262debee967ca6487de2bb0f9dec39480496b5ee671505cee40d3e0a6f" => :high_sierra
sha256 "8245f62c4af1c66720226aa29a7f02824e52d7ca330bd33bf7cdc6b9c5f8cdaa" => :sierra
sha256 "4504914d52f6e55f9144cb5143f5f104305405bd7cad87c9813e7ff882e72844" => :el_capitan
sha256 "89e310f706e6af8e39222d7b93736db3f23192a7e51efa59eb64a0142122a7d8" => :yosemite
end
depends_on "go" => :build
def install
ENV["GOPATH"] = buildpath
mkdir_p "src/github.com/coreos"
ln_s buildpath, "src/github.com/coreos/etcd"
system "./build"
bin.install "bin/etcd"
bin.install "bin/etcdctl"
end
plist_options :manual => "etcd"
def plist; <<-EOS.undent
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>KeepAlive</key>
<dict>
<key>SuccessfulExit</key>
<false/>
</dict>
<key>Label</key>
<string>#{plist_name}</string>
<key>ProgramArguments</key>
<array>
<string>#{opt_bin}/etcd</string>
</array>
<key>RunAtLoad</key>
<true/>
<key>WorkingDirectory</key>
<string>#{var}</string>
</dict>
</plist>
EOS
end
test do
begin
test_string = "Hello from brew test!"
etcd_pid = fork do
exec bin/"etcd", "--force-new-cluster", "--data-dir=#{testpath}"
end
# sleep to let etcd get its wits about it
sleep 10
etcd_uri = "http://127.0.0.1:2379/v2/keys/brew_test"
system "curl", "--silent", "-L", etcd_uri, "-XPUT", "-d", "value=#{test_string}"
curl_output = shell_output("curl --silent -L #{etcd_uri}")
response_hash = JSON.parse(curl_output)
assert_match(test_string, response_hash.fetch("node").fetch("value"))
ensure
# clean up the etcd process before we leave
Process.kill("HUP", etcd_pid)
end
end
end
| 32.328767 | 106 | 0.650847 |
ed3dfe325d93a6a978d9b23658819b63b44ff435 | 2,145 | require 'rails_helper'
describe 'Static pages', type: :request do
subject { page }
let(:user) { create :user }
describe 'home page' do
before do
2.times { create :project }
5.times { create :pull_request }
visit root_path
end
it { is_expected.to have_link('Log in with GitHub', href: login_path) }
it { is_expected.to have_content('7 Developers already involved') }
it { is_expected.to have_content('2 Suggested Projects') }
it { is_expected.to have_link('View All', href: users_path) }
it { is_expected.to have_link('View All', href: projects_path) }
it { is_expected.to have_link('View All', href: pull_requests_path) }
it { is_expected.to have_link('Suggest a project', href: new_project_path) }
it { is_expected.to_not have_css('.featured_projects') }
end
context "homepage when it has featured projects" do
before do
create :project, name: 'foobar', featured: true
visit root_path
end
it "show the featured project" do
is_expected.to have_css('.featured_projects span.project-title', text: 'foobar')
end
end
describe 'homepage in different dates' do
context "during December" do
it "doesnt show the finished partial on the first day" do
Timecop.travel(Date.new(CURRENT_YEAR, 12, 1))
visit root_path
is_expected.to_not have_content('24 Pull Requests is finished for')
end
it "doesnt show the finished partial on the last day" do
Timecop.travel(Date.new(CURRENT_YEAR, 12, 24))
visit root_path
is_expected.to_not have_content('24 Pull Requests is finished for')
end
end
context "not in December or November" do
it "shows the finished partial" do
Timecop.travel(Date.new(CURRENT_YEAR, 10, 29))
visit root_path
is_expected.to have_content('24 Pull Requests is finished for')
end
end
end
describe 'humans.txt' do
before do
allow(User).to receive(:contributors).and_return([user])
visit humans_path(format: :txt)
end
it { is_expected.to have_content('CONTRIBUTORS') }
end
end
| 31.086957 | 86 | 0.674126 |
03aea0d1f42131c4d9348b9ae38c3cfdf3a8f9a7 | 1,365 | # Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
require 'profiles/latest/resourcehealth_module_definition'
require 'profiles/latest/modules/resourcehealth_profile_module'
module Azure::ResourceHealth::Profiles::Latest
module Mgmt
#
# Client class for the Latest profile SDK.
#
class Client < ResourceHealthManagementClass
include MsRestAzure::Common::Configurable
#
# Initializes a new instance of the Client class.
# @param options [Hash] hash of client options.
# options = {
# tenant_id: 'YOUR TENANT ID',
# client_id: 'YOUR CLIENT ID',
# client_secret: 'YOUR CLIENT SECRET',
# subscription_id: 'YOUR SUBSCRIPTION ID',
# credentials: credentials,
# active_directory_settings: active_directory_settings,
# base_url: 'YOUR BASE URL',
# options: options
# }
# 'credentials' are optional and if not passed in the hash, will be obtained
# from MsRest::TokenCredentials using MsRestAzure::ApplicationTokenProvider.
#
# Also, base_url, active_directory_settings & options are optional.
#
def initialize(options = {})
super(options)
end
end
end
end
| 34.125 | 94 | 0.663004 |
b90acab3686da2f7ff2c479f5292990195c3bf2b | 2,431 | class Hardlink < Formula
desc "Replace file copies using hardlinks"
homepage "https://jak-linux.org/projects/hardlink/"
url "https://jak-linux.org/projects/hardlink/hardlink_0.3.0.tar.xz"
sha256 "e8c93dfcb24aeb44a75281ed73757cb862cc63b225d565db1c270af9dbb7300f"
license "MIT"
bottle do
sha256 cellar: :any, arm64_monterey: "88e89ffc8475533034d4b4374d9554702c77397a8e252e8e3746a5f5bc6f66b0"
sha256 cellar: :any, arm64_big_sur: "fe5acfbc7a123db425beb0257ca23f4286b3260bd76b81027ee7528cc05bfdfd"
sha256 cellar: :any, monterey: "3d173e277d659f561dae36da590d56edf53aa759cee210cbaaa8fcaa0f6dea6a"
sha256 cellar: :any, big_sur: "1c2d9bd0578affd02e5b3ea25f09167665f555b652254cea27aabf1b704bf294"
sha256 cellar: :any, catalina: "f0b2171598c5eb9111c2923649f46e32a182af7bc5e5f6012f4f13178651e3ed"
sha256 cellar: :any, mojave: "971dab4459ef06afd11cf2cf7c0ade1ee7bcf959e359938f83b2b8a7d86a7d17"
sha256 cellar: :any, high_sierra: "4738a658357798d756d8a96f96d3700f387ae89d1db769b81675634e85018c19"
sha256 cellar: :any, sierra: "56ac75c51db6d7e19efe41eef24aa6646cdc126a113f5aacadd5f80043efc0d5"
sha256 cellar: :any, el_capitan: "d8b6e2d26d8f49a207c5082a97f1e5c31b35041bcfbc17a217a1c2ad4ff68551"
sha256 cellar: :any, yosemite: "36c30ed90a3d2b9d2d4d07cb182c2838dfba276a05c22d022a42e16043e86f02"
sha256 cellar: :any_skip_relocation, x86_64_linux: "b849b6cdc2d96380221c9dddc48a4c6485db0f4257ea7c48ade59b50e29f7bfd"
end
deprecate! date: "2021-02-17", because: "has been merged into `util-linux`"
depends_on "pkg-config" => :build
depends_on "gnu-getopt"
depends_on "pcre"
on_linux do
keg_only "it conflicts with the maintained `hardlink` binary in `util-linux`"
end
def install
# xattr syscalls are provided by glibc
inreplace "hardlink.c", "#include <attr/xattr.h>", "#include <sys/xattr.h>"
system "make", "PREFIX=#{prefix}", "MANDIR=#{man}", "BINDIR=#{bin}", "install"
end
test do
(testpath/"foo").write "hello\n"
(testpath/"bar").write "hello\n"
system bin/"hardlink", "--ignore-time", testpath
(testpath/"foo").append_lines "world"
assert_equal <<~EOS, (testpath/"bar").read
hello
world
EOS
end
end
| 48.62 | 123 | 0.705471 |
33aafe69443571e4d31bd2a8bebfb565e3925e88 | 3,219 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Ci::Minutes::ProjectMonthlyUsage do
let_it_be(:project) { create(:project) }
describe 'unique index' do
before do
create(:ci_project_monthly_usage, project: project)
end
it 'raises unique index violation' do
expect { create(:ci_project_monthly_usage, project: project) }
.to raise_error { ActiveRecord::RecordNotUnique }
end
it 'does not raise exception if unique index is not violated' do
expect { create(:ci_project_monthly_usage, project: project, date: described_class.beginning_of_month(1.month.ago)) }
.to change { described_class.count }.by(1)
end
end
describe '.find_or_create_current' do
subject { described_class.find_or_create_current(project_id: project.id) }
shared_examples 'creates usage record' do
it 'creates new record and resets minutes consumption' do
freeze_time do
expect { subject }.to change { described_class.count }.by(1)
expect(subject.amount_used).to eq(0)
expect(subject.project).to eq(project)
expect(subject.date).to eq(described_class.beginning_of_month)
expect(subject.created_at).to eq(Time.current)
end
end
end
context 'when project usage does not exist' do
it_behaves_like 'creates usage record'
end
context 'when project usage exists for previous months' do
before do
create(:ci_project_monthly_usage, project: project, date: described_class.beginning_of_month(2.months.ago))
end
it_behaves_like 'creates usage record'
end
context 'when project usage exists for the current month' do
it 'returns the existing usage' do
freeze_time do
usage = create(:ci_project_monthly_usage, project: project)
expect(subject).to eq(usage)
end
end
end
context 'when a usage for another project exists for the current month' do
let!(:usage) { create(:ci_project_monthly_usage) }
it_behaves_like 'creates usage record'
end
end
describe '.increase_usage' do
let_it_be_with_refind(:current_usage) do
create(:ci_project_monthly_usage,
project: project,
amount_used: 100)
end
it_behaves_like 'CI minutes increase usage'
end
describe '.for_namespace_monthly_usage' do
it "fetches project monthly usages matching the namespace monthly usage's date and namespace" do
date_for_usage = Date.new(2021, 5, 1)
date_not_for_usage = date_for_usage + 1.month
namespace_usage = create(:ci_namespace_monthly_usage, namespace: project.namespace, amount_used: 50, date: date_for_usage)
matching_project_usage = create(:ci_project_monthly_usage, project: project, amount_used: 50, date: date_for_usage)
create(:ci_project_monthly_usage, project: project, amount_used: 50, date: date_not_for_usage)
create(:ci_project_monthly_usage, project: create(:project), amount_used: 50, date: date_for_usage)
project_usages = described_class.for_namespace_monthly_usage(namespace_usage)
expect(project_usages).to contain_exactly(matching_project_usage)
end
end
end
| 34.244681 | 128 | 0.714508 |
7a3f5e4c93c1b88b92ff1ca6d6bc4336665ce52c | 216 | class User < ActiveRecord::Base
has_many :tweets
has_secure_password
def slug
self.username.downcase.gsub(" ", "-")
end
def self.find_by_slug(slug)
self.all.find{|u| u.slug == slug }
end
end
| 13.5 | 41 | 0.657407 |
edfa5c04d3fb346eaac4281aeebc23ee4d18b58d | 5,287 | require 'microsoft_kiota_abstractions'
require_relative '../../../../users'
require_relative '../../../mail_folders'
require_relative '../../messages'
require_relative '../multi_value_extended_properties'
require_relative './item'
module Graphrubyv4::Users::MailFolders::Messages::MultiValueExtendedProperties::Item
##
# Builds and executes requests for operations under \users\{user-id}\mailFolders\{mailFolder-id}\messages\{message-id}\multiValueExtendedProperties\{multiValueLegacyExtendedProperty-id}
class MultiValueLegacyExtendedPropertyRequestBuilder
##
# Current path for the request
@current_path
##
# The http core service to use to execute the requests.
@http_core
##
# Path segment to use to build the URL for the current request builder
@path_segment
##
## Instantiates a new MultiValueLegacyExtendedPropertyRequestBuilder and sets the default values.
## @param currentPath Current path for the request
## @param httpCore The http core service to use to execute the requests.
## @return a void
##
def initialize(current_path, http_core)
@path_segment = ""
@http_core = http_core
@current_path = current_path
end
##
## Delete navigation property multiValueExtendedProperties for users
## @param h Request headers
## @param o Request options for HTTP middlewares
## @return a request_info
##
def create_delete_request_info(h=nil, o=nil)
request_info = MicrosoftKiotaAbstractions::RequestInfo.new()
request_info.uri = @current_path + @path_segment
request_info.http_method = :DELETE
request_info.set_headers_from_raw_object(h)
return request_info;
end
##
## Get multiValueExtendedProperties from users
## @param h Request headers
## @param o Request options for HTTP middlewares
## @param q Request query parameters
## @return a request_info
##
def create_get_request_info(q=nil, h=nil, o=nil)
request_info = MicrosoftKiotaAbstractions::RequestInfo.new()
request_info.uri = @current_path + @path_segment
request_info.http_method = :GET
request_info.set_headers_from_raw_object(h)
request_info.set_query_string_parameters_from_raw_object(q)
return request_info;
end
##
## Update the navigation property multiValueExtendedProperties in users
## @param body
## @param h Request headers
## @param o Request options for HTTP middlewares
## @return a request_info
##
def create_patch_request_info(body, h=nil, o=nil)
request_info = MicrosoftKiotaAbstractions::RequestInfo.new()
request_info.uri = @current_path + @path_segment
request_info.http_method = :PATCH
request_info.set_headers_from_raw_object(h)
request_info.set_content_from_parsable(body, self.serializer_factory, "application/json")
return request_info;
end
##
## Delete navigation property multiValueExtendedProperties for users
## @param h Request headers
## @param o Request options for HTTP middlewares
## @param responseHandler Response handler to use in place of the default response handling provided by the core service
## @return a CompletableFuture of void
##
def delete(h=nil, o=nil, response_handler=nil)
request_info = self.create_delete_request_info(
h
)
return @http_core.send_async(request_info, nil, response_handler)
end
##
## Get multiValueExtendedProperties from users
## @param h Request headers
## @param o Request options for HTTP middlewares
## @param q Request query parameters
## @param responseHandler Response handler to use in place of the default response handling provided by the core service
## @return a CompletableFuture of multi_value_legacy_extended_property
##
def get(q=nil, h=nil, o=nil, response_handler=nil)
request_info = self.create_get_request_info(
q, h
)
return @http_core.send_async(request_info, Graphrubyv4::Users::MailFolders::Messages::MultiValueExtendedProperties::Item::MultiValueLegacyExtendedProperty, response_handler)
end
##
## Update the navigation property multiValueExtendedProperties in users
## @param body
## @param h Request headers
## @param o Request options for HTTP middlewares
## @param responseHandler Response handler to use in place of the default response handling provided by the core service
## @return a CompletableFuture of void
##
def patch(body, h=nil, o=nil, response_handler=nil)
request_info = self.create_patch_request_info(
body, h
)
return @http_core.send_async(request_info, nil, response_handler)
end
end
end
| 44.428571 | 189 | 0.647815 |
2817ee9dda26c85e723ee24ae2eac825220952bb | 368 | require "bundler/setup"
require "tk_component"
RSpec.configure do |config|
# Enable flags like --only-failures and --next-failure
config.example_status_persistence_file_path = ".rspec_status"
# Disable RSpec exposing methods globally on `Module` and `main`
config.disable_monkey_patching!
config.expect_with :rspec do |c|
c.syntax = :expect
end
end
| 24.533333 | 66 | 0.755435 |
87319a136ecd2573d687ba9c80174bdbf8eaafee | 6,917 | =begin
#Kubernetes
#No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.8.3
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 2.2.3
=end
require 'date'
module Kubernetes
# IngressList is a collection of Ingress.
class V1beta1IngressList
# APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#resources
attr_accessor :api_version
# Items is the list of Ingress.
attr_accessor :items
# Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds
attr_accessor :kind
# Standard object's metadata. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#metadata
attr_accessor :metadata
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'api_version' => :'apiVersion',
:'items' => :'items',
:'kind' => :'kind',
:'metadata' => :'metadata'
}
end
# Attribute type mapping.
def self.swagger_types
{
:'api_version' => :'String',
:'items' => :'Array<V1beta1Ingress>',
:'kind' => :'String',
:'metadata' => :'V1ListMeta'
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}){|(k,v), h| h[k.to_sym] = v}
if attributes.has_key?(:'apiVersion')
self.api_version = attributes[:'apiVersion']
end
if attributes.has_key?(:'items')
if (value = attributes[:'items']).is_a?(Array)
self.items = value
end
end
if attributes.has_key?(:'kind')
self.kind = attributes[:'kind']
end
if attributes.has_key?(:'metadata')
self.metadata = attributes[:'metadata']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properies with the reasons
def list_invalid_properties
invalid_properties = Array.new
if @items.nil?
invalid_properties.push("invalid value for 'items', items cannot be nil.")
end
return invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
return false if @items.nil?
return true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
api_version == o.api_version &&
items == o.items &&
kind == o.kind &&
metadata == o.metadata
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[api_version, items, kind, metadata].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map{ |v| _deserialize($1, v) } )
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end # or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BOOLEAN
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
temp_model = Kubernetes.const_get(type).new
temp_model.build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map{ |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 30.471366 | 279 | 0.628596 |
f81d0d98deebd89e2815818a3769a51485b93759 | 2,036 | require 'spec_helper'
describe Spice::Connection do
before do
Spice.mock
end
after do
Spice.reset
end
let(:connection) do
Spice::Connection.new
end
describe "#clients" do
before do
stub_get("/search/client?q=%2A%3A%2A&sort=X_CHEF_id_CHEF_X+asc&start=0&rows=1000").
# with(:query => { "q"=>"*:*", "sort"=>"X_CHEF_id_CHEF_X asc", "start"=>"0", "rows"=>"1000"}).
to_return(:body => fixture('search/client.json'))
end
it "should return an array of clients" do
clients = connection.clients
clients.should be_an Array
end
end
describe "#client" do
before do
stub_get("/clients/name").to_return(:body => fixture('clients/show.json'))
end
it "should return a single client" do
client = connection.client("name")
client.should be_a Spice::Client
client.name.should == "monkeypants"
client.admin.should == true
end
end
describe "#update_client" do
before do
stub_put("/clients/name").
with(:body => {:admin => false }).
to_return(:body => fixture('clients/update.json'))
end
it "should update and return a single client" do
client = connection.update_client("name", :admin => false)
client.should be_a Spice::Client
client.admin.should == false
end
end
describe "#reregister_client" do
before do
stub_put("/clients/name").
with(:body => { :private_key => true }).
to_return(:body => fixture('clients/reregister.json'))
end
it "should update and return a single client with a new private key" do
client = connection.reregister_client("name")
client.should be_a Spice::Client
client.private_key.should_not be_nil
end
end
describe "#delete_client" do
before do
stub_delete("/clients/name")
end
it "should delete a client and return nil" do
client = connection.delete_client("name")
client.should be_nil
end
end
end | 24.829268 | 102 | 0.624263 |
08723ec1e532c6e837c150a104f7d546f5fe8582 | 6,971 | include_recipe 'bcpc-hadoop::hadoop_config'
::Chef::Recipe.send(:include, Bcpc_Hadoop::Helper)
::Chef::Resource::Bash.send(:include, Bcpc_Hadoop::Helper)
ruby_block 'create-yarn-directories' do
block do
node.run_state[:bcpc_hadoop_disks][:mounts].each do |disk_number|
Chef::Resource::Directory.new("/disk/#{disk_number}/yarn/local",
node.run_context).tap do |dd|
dd.owner 'yarn'
dd.group 'yarn'
dd.mode 0755
dd.recursive true
dd.run_action :create
end
Chef::Resource::Directory.new("/disk/#{disk_number}/yarn/logs",
node.run_context).tap do |dd|
dd.owner 'yarn'
dd.group 'yarn'
dd.mode 0755
dd.recursive true
dd.run_action :create
end
end
end
end
bash "create-hdfs-yarn-log" do
code "hdfs dfs -mkdir -p /var/log/hadoop-yarn && hdfs dfs -chmod 0777 /var/log/hadoop-yarn && hdfs dfs -chown yarn:mapred /var/log/hadoop-yarn"
user "hdfs"
not_if "hdfs dfs -test -d /var/log/hadoop-yarn", :user => "hdfs"
end
bash "create-hdfs-yarn-ats-log" do
code <<-EOH
hdfs dfs -mkdir -p /var/log/ats && hdfs dfs -chmod 1777 /var/log/ats && hdfs dfs -chown yarn:mapred /var/log/ats
hdfs dfs -mkdir -p /var/log/ats/active && hdfs dfs -chmod 1777 /var/log/ats/active && hdfs dfs -chown yarn:mapred /var/log/ats/active
hdfs dfs -mkdir -p /var/log/ats/done && hdfs dfs -chmod 0700 /var/log/ats/done && hdfs dfs -chown yarn:mapred /var/log/ats/done
EOH
user "hdfs"
only_if { node.roles.include?("BCPC-Hadoop-Head-YarnTimeLineServer") }
not_if "hdfs dfs -test -d /var/log/hadoop-yarn/ats/active && hdfs dfs -test -d /var/log/hadoop-yarn/ats/done", :user => "hdfs"
end
# list hdp packages to install
yarn_packages = %w{hadoop-yarn-resourcemanager hadoop-client hadoop-mapreduce-historyserver}
if node.roles.include?("BCPC-Hadoop-Head-YarnTimeLineServer")
yarn_packages.push("hadoop-yarn-timelineserver")
end
yarn_packages.each do |pkg|
package hwx_pkg_str(pkg, node[:bcpc][:hadoop][:distribution][:release]) do
action :install
end
hdp_select(pkg, node[:bcpc][:hadoop][:distribution][:active_release])
end
package hwx_pkg_str("tez", node[:bcpc][:hadoop][:distribution][:release]) do
action :install
only_if { node.roles.include?("BCPC-Hadoop-Head-YarnTimeLineServer") }
end
link "/etc/init.d/hadoop-yarn-resourcemanager" do
to "/usr/hdp/#{node[:bcpc][:hadoop][:distribution][:active_release]}/hadoop-yarn/etc/init.d/hadoop-yarn-resourcemanager"
notifies :run, 'bash[kill yarn-resourcemanager]', :immediate
end
include_recipe 'bcpc-hadoop::yarn_schedulers'
link "/etc/init.d/hadoop-yarn-timelineserver" do
to "/usr/hdp/#{node[:bcpc][:hadoop][:distribution][:active_release]}/hadoop-yarn/etc/init.d/hadoop-yarn-timelineserver"
notifies :run, 'bash[kill yarn-timelineserver]', :immediate
only_if { node.roles.include?("BCPC-Hadoop-Head-YarnTimeLineServer") }
end
link "/etc/init.d/hadoop-yarn-timelineserver" do
to "/usr/hdp/#{node[:bcpc][:hadoop][:distribution][:active_release]}/hadoop-yarn/etc/init.d/hadoop-yarn-timelineserver"
notifies :run, 'bash[kill yarn-timelineserver]', :immediate
only_if { node.roles.include?("BCPC-Hadoop-Head-YarnTimeLineServer") }
end
file "/etc/hadoop/conf/yarn.exclude" do
content node["bcpc"]["hadoop"]["decommission"]["hosts"].join("\n")
mode 0644
owner 'yarn'
group 'hdfs'
only_if { !node["bcpc"]["hadoop"]["decommission"]["hosts"].nil? }
end
bash "kill yarn-resourcemanager" do
code "pkill -u yarn -f resourcemanager"
action :nothing
returns [0, 1]
end
bash "kill yarn-timelineserver" do
code "pkill -u yarn -f timelineservice"
action :nothing
returns [0, 1]
end
configure_kerberos 'rm_spnego' do
service_name 'spnego'
end
configure_kerberos 'rm_kerb' do
service_name 'resourcemanager'
end
hdfs_write = "echo 'test' | hdfs dfs -copyFromLocal - /user/hdfs/chef-mapred-test"
hdfs_remove = "hdfs dfs -rm -skipTrash /user/hdfs/chef-mapred-test"
hdfs_test = "hdfs dfs -test -f /user/hdfs/chef-mapred-test"
# first, make sure the check file is not currently in hdfs, otherwise, the check for
# setup-mapreduce-app will fail
bash 'remove-check-file' do
code <<-EOH
#{hdfs_remove}
EOH
user 'hdfs'
only_if "#{hdfs_test}", :user => 'hdfs'
end
bash "setup-mapreduce-app" do
code <<-EOH
hdfs dfs -mkdir -p /hdp/apps/#{node[:bcpc][:hadoop][:distribution][:release]}/mapreduce/
hdfs dfs -put /usr/hdp/#{node[:bcpc][:hadoop][:distribution][:release]}/hadoop/mapreduce.tar.gz /hdp/apps/#{node[:bcpc][:hadoop][:distribution][:release]}/mapreduce/
hdfs dfs -chown -R hdfs:hadoop /hdp
hdfs dfs -chmod -R 555 /hdp/apps/#{node[:bcpc][:hadoop][:distribution][:release]}/mapreduce
hdfs dfs -chmod -R 444 /hdp/apps/#{node[:bcpc][:hadoop][:distribution][:release]}/mapreduce/mapreduce.tar.gz
EOH
user "hdfs"
not_if "hdfs dfs -test -f /hdp/apps/#{node[:bcpc][:hadoop][:distribution][:release]}/mapreduce/mapreduce.tar.gz", :user => "hdfs"
only_if "#{hdfs_write} && #{hdfs_remove}", :user => "hdfs"
end
service "hadoop-yarn-resourcemanager" do
action [:enable, :start]
supports :status => true, :restart => true, :reload => false
subscribes :restart, "template[/etc/hadoop/conf/hadoop-env.sh]", :delayed
subscribes :restart, "template[/etc/hadoop/conf/yarn-env.sh]", :delayed
subscribes :restart, "template[/etc/hadoop/conf/yarn-site.xml]", :delayed
subscribes :restart, "template[/etc/hadoop/conf/mapred-site.xml]", :delayed
subscribes :restart, "template[/etc/hadoop/conf/core-site.xml]", :delayed
subscribes :restart, "file[/etc/hadoop/conf/ldap-conn-pass.txt]", :delayed
subscribes :restart, "template[/etc/hadoop/conf/hdfs-site.xml]", :delayed
subscribes :restart, "bash[hdp-select hadoop-yarn-resourcemanager]", :delayed
subscribes :restart, "log[jdk-version-changed]", :delayed
end
service "hadoop-yarn-timelineserver" do
action [:enable, :start]
supports :status => true, :restart => true, :reload => false
subscribes :restart, "template[/etc/hadoop/conf/hadoop-env.sh]", :delayed
subscribes :restart, "template[/etc/hadoop/conf/yarn-env.sh]", :delayed
subscribes :restart, "template[/etc/hadoop/conf/yarn-site.xml]", :delayed
subscribes :restart, "template[/etc/hadoop/conf/mapred-site.xml]", :delayed
subscribes :restart, "template[/etc/hadoop/conf/core-site.xml]", :delayed
subscribes :restart, "file[/etc/hadoop/conf/ldap-conn-pass.txt]", :delayed
subscribes :restart, "template[/etc/hadoop/conf/hdfs-site.xml]", :delayed
subscribes :restart, "bash[hdp-select hadoop-yarn-resourcemanager]", :delayed
subscribes :restart, "log[jdk-version-changed]", :delayed
only_if { node.roles.include?("BCPC-Hadoop-Head-YarnTimeLineServer") }
end
bash "reload mapreduce nodes" do
code "yarn rmadmin -refreshNodes"
user "mapred"
action :nothing
subscribes :run, "template[/etc/hadoop/conf/yarn.exclude]", :delayed
end
| 40.294798 | 167 | 0.704347 |
6aed99bc836be2bf8f12a8737522b9df585a2399 | 1,260 | # frozen_string_literal: true
require 'shark_on_lambda/rake_tasks'
RSpec.describe 'Rake task `shark-on-lambda:routes`' do
subject(:task) do
Rake::Task.tasks.find { |task| task.name == 'shark-on-lambda:routes' }
end
before do
Class.new(SharkOnLambda::Application)
SharkOnLambda.application.routes.draw do
resources :sharks do
resources :victims, only: %i[index]
end
end
end
it 'prints all known routes' do
expected_output = <<~OUTPUT.chomp
Prefix Verb URI Pattern Controller#Action
shark_victims GET /sharks/:shark_id/victims(.:format) victims#index
sharks GET /sharks(.:format) sharks#index
POST /sharks(.:format) sharks#create
shark GET /sharks/:id(.:format) sharks#show
PATCH /sharks/:id(.:format) sharks#update
PUT /sharks/:id(.:format) sharks#update
DELETE /sharks/:id(.:format) sharks#destroy
OUTPUT
allow($stdout).to receive(:write)
task.invoke
expect($stdout).to have_received(:write).with(expected_output, "\n")
end
end
| 35 | 80 | 0.568254 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.