hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
e210949dbd3108811b5ffd5c6e22c33c9166e786
| 2,007 |
# frozen_string_literal: true
module CI
module Queue
class Static
include Common
class << self
def from_uri(uri, config)
tests = uri.opaque.split(':').map { |t| CGI.unescape(t) }
new(tests, config)
end
end
attr_reader :progress, :total
def initialize(tests, config)
@queue = tests
@config = config
@progress = 0
@total = tests.size
end
def build
@build ||= BuildRecord.new(self)
end
def supervisor
raise NotImplementedError, "This type of queue can't be supervised"
end
def retry_queue
self
end
def populate(tests, random: nil)
@index = tests.map { |t| [t.id, t] }.to_h
self
end
def populated?
!!defined?(@index)
end
def to_a
@queue.map { |i| index.fetch(i) }
end
def size
@queue.size
end
def poll
while config.circuit_breakers.none?(&:open?) && !max_test_failed? && test = @queue.shift
yield index.fetch(test)
end
end
def exhausted?
@queue.empty?
end
def acknowledge(test)
@progress += 1
true
end
def increment_test_failed
@test_failed = test_failed + 1
end
def test_failed
@test_failed ||= 0
end
def max_test_failed?
return false if config.max_test_failed.nil?
test_failed >= config.max_test_failed
end
def requeue(test)
test_key = test.id
return false unless should_requeue?(test_key)
requeues[test_key] += 1
@queue.unshift(test_key)
true
end
private
attr_reader :index
def should_requeue?(key)
requeues[key] < config.max_requeues && requeues.values.inject(0, :+) < config.global_max_requeues(total)
end
def requeues
@requeues ||= Hash.new(0)
end
end
end
end
| 19.298077 | 112 | 0.550075 |
874615c9dca6566e8c4a11a0addb8944781cfa7b
| 558 |
require_relative 'boot'
require 'rails/all'
Bundler.require(*Rails.groups)
require "cats"
module Dummy
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
config.load_defaults 5.2
# Settings in config/environments/* take precedence over those specified here.
# Application configuration can go into files in config/initializers
# -- all .rb files in that directory are automatically loaded after loading
# the framework and any gems in your application.
end
end
| 27.9 | 82 | 0.763441 |
b979df3e3c131dc2b363ae7d5e60891a47622d5e
| 9,768 |
# Copyright:: Copyright 2017 Trimble Inc.
# License:: The MIT License (MIT)
# This observer interface is implemented to react to model events. To
# implement this observer, create a Ruby class of this type, override the
# desired methods, and add an instance of the observer to the model.
#
# Note that the observers related to transactions (aka undoable operations)
# are primarily for reporting and debugging. Performing any edit operations
# of your own (such as modifying the model) inside the observer callback
# should be avoided, as it could cause crashes or model corruption. The most
# common use for these callbacks is to help debug problems where your
# Ruby script's {Sketchup::Model#start_operation} and
# {Sketchup::Model#commit_operation} calls are somehow conflicting with
# SketchUp's native undo operations. You can set up an observer set to watch
# precisely what is going on.
#
# @example
# # This is an example of an observer that watches the
# # component placement event.
# class MyModelObserver < Sketchup::ModelObserver
# def onPlaceComponent(instance)
# puts "onPlaceComponent: #{instance}"
# end
# end
#
# # Attach the observer.
# Sketchup.active_model.add_observer(MyModelObserver.new)
#
# @version SketchUp 6.0
class Sketchup::ModelObserver
# Instance Methods
# The {#onActivePathChanged} method is invoked when the user opens or closes a
# {Sketchup::ComponentInstance} or {Sketchup::Group} for editing.
#
# When the user opens an instance for editing the positions and transformations
# of the entities in the opened instance will be relative to global world
# coordinates instead of the local coordinates relative to their parent.
#
# See {Sketchup::Model#active_path} and {Sketchup::Model#edit_transform} for
# methods that report the current edit origin vs. the global origin, etc.
#
# By using this observer callback, you can keep track of the various nested
# transformations as your users double click to drill into and out of
# component edits.
#
# @example
# # As the user double clicks into component edits, show the "path" along
# # the model hierarchy they have followed to get here.
# def onActivePathChanged(model)
# puts "onActivePathChanged: #{model}"
# end
#
# @param [Sketchup::Model] model
#
# @return [nil]
#
# @see Sketchup::Model#edit_transform
#
# @see Sketchup::Model#active_path
def onActivePathChanged(model)
end
# The {#onAfterComponentSaveAs} method is invoked when the user context-clicks >
# Save As on a component instance. It is called just after the component is
# written to disk, so you can restore the component to some state before
# returning control to the user.
#
# @example
# def onAfterComponentSaveAs(model)
# puts "onAfterComponentSaveAs: #{model}"
# end
#
# @param [Sketchup::Model] model
#
# @return [nil]
#
# @see #onBeforeComponentSaveAs
#
# @version SketchUp 7.0
def onAfterComponentSaveAs(model)
end
# The {#onBeforeComponentSaveAs} method is invoked when the user
# context-clicks > Save As on a component instance. It is called just before
# the component is written to disk, so you can make changes within the handler
# and it will make it into the save.
#
# For example, you may decide that you want to add some attribute to every
# component that is saved out, but you do not want that attribute sticking
# around inside the current model. Within {#onBeforeComponentSaveAs} you could
# add the attribute, and within {#onAfterComponentSaveAs} you could delete that
# attribute.
#
# The callback is not sent the component that is to be saved, but the
# model's selection will contain it.
#
# @example
# def onBeforeComponentSaveAs(model)
# puts "onBeforeComponentSaveAs: #{model}"
# end
#
# @param [Sketchup::Model] model
#
# @return [nil]
#
# @version SketchUp 7.0
def onBeforeComponentSaveAs(model)
end
# The {#onDeleteModel} method is invoked when a model is deleted.
#
# @example
# def onDeleteModel(model)
# puts "onDeleteModel: #{model}"
# end
#
# @param [Sketchup::Model] model
#
# @return [nil]
#
# @version SketchUp 6.0
def onDeleteModel(model)
end
# The {#onEraseAll} method is invoked when everything in a model is erased.
#
# @example
# def onEraseAll(model)
# puts "onEraseAll: #{model}"
# end
#
# @param [Sketchup::Model] model
#
# @return [nil]
#
# @version SketchUp 6.0
def onEraseAll(model)
end
# The {#onExplode method} is invoked whenever a component anywhere in this
# model is exploded. This is an easier way to watch explode events vs.
# attaching an InstanceObserver to every instance in the model.
#
# Since the callback does not return what was exploded, one solution is to
# place a selection observer that keeps track of which entities whose
# explosion you are interested in are in the selection. Since SketchUp's
# user interface only provides a means of exploding the selection, this
# method is a reliable way to know what was just exploded.
#
# Another method would be to watch
# {Sketchup::ComponentDefinition.count_instances} to determine what just
# changed, as any components that were exploded will now be less an instance.
#
# @example
# def onExplode(model)
# puts "onExplode: #{model}"
# end
#
# @param [Sketchup::Model] model
#
# @return [nil]
#
# @version SketchUp 7.0
def onExplode(model)
end
# The {#onPidChanged} method is invoked when a persistent id in the model
# changes. For example when entities are grouped.
#
# @example
# def onPidChanged(model, old_pid, new_pid)
# puts "onPidChanged: #{model}, #{old_pid} => #{new_pid}"
# end
#
# @param [Sketchup::Model] model
#
# @param [Integer] old_pid
#
# @param [Integer] new_pid
#
# @return [nil]
#
# @version SketchUp 2017
def onPidChanged(model, old_pid, new_pid)
end
# The {#onPlaceComponent} method is invoked when a component is "placed" into
# the model, meaning it is dragged from the Component Browser.
#
# @example
# def onPlaceComponent(model)
# puts "onPlaceComponent: #{model}"
# end
#
# @param [Sketchup::Model] model
#
# @return [nil]
#
# @version SketchUp 7.0
def onPlaceComponent(model)
end
# The {#onPostSaveModel} method is invoked after a model has been saved to disk.
#
# @example
# def onPostSaveModel(model)
# puts "onPostSaveModel: #{model}"
# end
#
# @param [Sketchup::Model] model
#
# @return [nil]
#
# @version SketchUp 8.0
def onPostSaveModel(model)
end
# The {#onPreSaveModel} method is invoked before a model is saved to disk.
#
# @example
# def onPreSaveModel(model)
# puts "onPreSaveModel: #{model}"
# end
#
# @param [Sketchup::Model] model
#
# @return [nil]
#
# @version SketchUp 8.0
def onPreSaveModel(model)
end
# The {#onSaveModel} method is invoked after a model has been saved to disk.
#
# @example
# def onSaveModel(model)
# puts "onSaveModel: #{model}"
# end
#
# @param [Sketchup::Model] model
#
# @return [nil]
#
# @version SketchUp 6.0
def onSaveModel(model)
end
# The {#onTransactionAbort} method is invoked when a transaction is aborted.
#
# @example
# def onTransactionAbort(model)
# puts "onTransactionAbort: #{model}"
# end
#
# @param [Sketchup::Model] model
#
# @return [nil]
#
# @see Sketchup::Model#abort_operation
#
# @version SketchUp 6.0
def onTransactionAbort(model)
end
# The {#onTransactionCommit} method is invoked when a transaction is completed.
#
# @example
# def onTransactionCommit(model)
# puts "onTransactionCommit: #{model}"
# end
#
# @param [Sketchup::Model] model
#
# @return [nil]
#
# @see Sketchup::Model#commit_operation
#
# @version SketchUp 6.0
def onTransactionCommit(model)
end
# The {#onTransactionEmpty} method is invoked when a transaction
# (aka an undoable operation) starts and then is committed without anything
# being altered in between.
#
# @example
# def onTransactionEmpty(model)
# puts "onTransactionEmpty: #{model}"
# end
#
# @param [Sketchup::Model] model
#
# @return [nil]
#
# @see Sketchup::Model#start_operation
#
# @version SketchUp 6.0
def onTransactionEmpty(model)
end
# The {#onTransactionRedo} method is invoked when the user "redoes" a
# transaction (aka undo operation.) You can programmatically fire a redo by
# calling +Sketchup.sendAction("editRedo")+.
#
# @example
# def onTransactionRedo(model)
# puts "onTransactionRedo: #{model}"
# end
#
# @param [Sketchup::Model] model
#
# @return [nil]
#
# @version SketchUp 6.0
def onTransactionRedo(model)
end
# The {#onTransactionStart} method is invoked when a transaction
# (aka an undoable operation) starts.
#
# @example
# def onTransactionStart(model)
# puts "onTransactionStart: #{model}"
# end
#
# @param [Sketchup::Model] model
#
# @return [nil]
#
# @see Sketchup::Model#start_operation
#
# @version SketchUp 6.0
def onTransactionStart(model)
end
# The {#onTransactionUndo method} is invoked when the user "undoes" a
# transaction (aka undo operation.) You can programmatically fire an undo by
# calling +Sketchup.sendAction("editUndo")+.
#
# @example
# def onTransactionUndo(model)
# puts "onTransactionUndo: #{model}"
# end
#
# @param [Sketchup::Model] model
#
# @return [nil]
#
# @version SketchUp 6.0
def onTransactionUndo(model)
end
end
| 27.361345 | 82 | 0.682023 |
acf056086c850e1dbdcb1ac97b78c52e1af7bf7b
| 234 |
class AddAttachmentImagemToTipoUsuarios < ActiveRecord::Migration
def self.up
change_table :tipo_usuarios do |t|
t.attachment :imagem
end
end
def self.down
drop_attached_file :tipo_usuarios, :imagem
end
end
| 19.5 | 65 | 0.739316 |
795c08aafb8137e93ed81413c41e78f507d72fcb
| 5,247 |
# This file should contain all the record creation needed to seed the database with its default values.
# The data can then be loaded with the rails db:seed command (or created alongside the database with db:setup).
#
# Examples:
#
# movies = Movie.create([{ name: 'Star Wars' }, { name: 'Lord of the Rings' }])
# Character.create(name: 'Luke', movie: movies.first)
Needy.delete_all
Needy.create([
{
address: 'Rua João da Rocha - Vila Constancia, São Paulo - SP, Brasil',
latitude: '-23.6703651',
longitude: '-46.675999'
},
{
address: 'Rua Guiomar Branco da Silva - Vila Marari, São Paulo - SP, Brasil',
latitude: '-23.6682756',
longitude: '-46.6615089'
},
{
address: 'Rua Saturnino Barbosa da Silva - Campo Grande, São Paulo - SP, Brasil',
latitude: '-23.6737314',
longitude: '-46.6794502'
},
{
address: 'Rua Jupi - Santo Amaro, São Paulo - SP, Brasil',
latitude: '-23.6465722',
longitude: '-46.7122612'
},
{
address: 'Rua Donato - Vila Pirituba, São Paulo - SP, Brasil',
latitude: '-23.4984514',
longitude: '-46.7122326'
},
{
address: 'Avenida Washington Luís - Campo Belo, São Paulo - SP, Brasil',
latitude: '-23.6315305',
longitude: '-46.6688851'
},
{
address: 'Avenida Marari - Vila Marari, São Paulo - SP, Brasil',
latitude: '-23.665824',
longitude: '-46.664496'
},
{
address: 'Travessa José Maria Chacon - Vila Constanca, São Paulo - SP, Brasil',
latitude: '-23.6666364',
longitude: '-46.6771369'
},
{
address: 'Viela George de Mestral - Vila Constancia, São Paulo - SP, Brasil',
latitude: '-23.6695166',
longitude: '-46.6696085'
},
{
address: 'Chácara Santo Antônio (Zona Sul), São Paulo - SP, Brasil',
latitude: '-23.630928',
longitude: '-46.7041338'
},
{
address: 'Rua dos Jequitibás - Jardim Oriental, São Paulo - SP, Brasil',
latitude: '-23.6463255',
longitude: '-46.643094'
},
{
address: 'Rua Amador Bueno - Santo Amaro, São Paulo - SP, Brasil',
latitude: '-23.6527622',
longitude: '-46.7124'
},
{
address: 'Rua Marcondésia - Chácara Monte Alegre, São Paulo - SP, Brasil',
latitude: '-23.644964',
longitude: '-46.6782235'
},
{
address: 'Rua Márcio Mazzei - Vila Nova Mazzei, São Paulo - SP, Brasil',
latitude: '-23.4615544',
longitude: '-46.5954711'
},
{
address: 'Rua Soares de Avellar - Vila Monte Alegre, São Paulo - SP, Brasil',
latitude: '-23.6303201',
longitude: '-46.6373525'
},
{
address: 'Rua Alves Guimarães - Pinheiros, São Paulo - SP, Brasil',
latitude: '-23.5556837',
longitude: '-46.6782927'
},
{
address: 'Rua Barreto Muniz - Vila Guarani (Zona Sul), São Paulo - SP, Brasil',
latitude: '-23.6312471',
longitude: '-46.6294115'
},
{
address: 'Rua Alves Barreto Visconde de Nova Granada, Castanheira de Pêra, Portugal',
latitude: '40.00665',
longitude: '-8.2117462'
},
{
address: 'Rua Santo Agostinho - Centro, Diadema - SP, Brasil',
latitude: '-23.6834247',
longitude: '-46.6313174'
},
{
address: 'Rua Jeremias Júnior - Vila Nair, São Paulo - SP, Brasil',
latitude: '-23.6085251',
longitude: '-46.610603'
},
{
address: 'Rua Geminiano Góis - Freguesia (Jacarepaguá), Rio de Janeiro - RJ, Brasil',
latitude: '-22.9334986',
longitude: '-43.3372047'
},
{
address: 'Rua Vanilda Machado Teixeira - Santana, Itaguaí - RJ, Brasil',
latitude: '-22.8656598',
longitude: '-43.7571653'
},
{
address: 'Rua Maria Lígia - Vila Moraes, São Paulo - SP, Brasil',
latitude: '-23.6355395',
longitude: '-46.6181563'
},
{
address: 'Rua Jovelina - Vila Domitila, São Paulo - SP, Brasil',
latitude: '-23.5131225',
longitude: '-46.5099517'
},
{
address: 'Rua Júnior Rocha - Parque Manibura, Fortaleza - CE, Brasil',
latitude: '-3.7912908',
longitude: '-38.4877338'
},
{
address: 'Rua Ceará - Consolação, São Paulo - SP, Brasil',
latitude: '-23.5475333',
longitude: '-46.6617843'
},
{
address: 'Rua Rio de Janeiro - Osvaldo Cruz, São Caetano do Sul - SP, Brasil',
latitude: '-23.6229591',
longitude: '-46.5638886'
},
{
address: 'Rua Minas Gerais - Higienópolis, São Paulo - SP, Brasil',
latitude: '-23.5545204',
longitude: '-46.6640652'
},
{
address: 'Rua Espírito Santo - Cerâmica, São Caetano do Sul - SP, Brasil',
latitude: '-23.6255807',
longitude: '-46.5733367'
},
{
address: 'Rua Pernambucana - Conceicao, Osasco - SP, Brasil',
latitude: '-23.5725656',
longitude: '-46.8056043'
}
])
Category.delete_all
Category.create([
{name: 'Vestuário'},
{name: 'Calçados'},
{name: 'Alimentação'},
{name: 'Água'},
{name: 'Educação'},
{name: 'Trabalho'},
{name: 'Higiene bucal'},
{name: 'Moradia'},
{name: 'Educação'},
{name: 'Lazer'},
{name: 'Higiene'},
{name: 'Cobertores'},
{name: 'Material didático'},
{name: 'Material de limpeza'},
{name: 'Higiene pessoal'},
{name: 'Móveis'},
])
| 28.98895 | 111 | 0.595197 |
1d0063e1b36b0be5a28bf7784163df0c8de66734
| 1,148 |
require_relative 'test_helper'
class ParimatchTest < Minitest::Test
def setup
@pm = Parimatch.new
@html_folder = 'test/html/parimatch/'
end
def test_parimatch_live_page_parsing
html = open("#{@html_folder}pm_live.html").read
result = @pm.live_page_parsed(html)
assert_equal Hash, result.class
assert_equal 16, result.size
result.each do |addr, who|
assert addr.include?('parimatch.com')
assert_equal String, addr.class
assert_equal String, who.class
end
# result.each {|k,r| puts k; puts r}
end
def test_parimatch_event_parsing
events = []
(1..4).each do |num|
events << "#{@html_folder}parimatch#{num}.html"
end
events.each do |event_page|
@pm = Parimatch.new
res = @pm.event_parsed(open(event_page).read)
# p res
assert_equal(String, res[:home_player][:name].class)
assert_equal(String, res[:away_player][:name].class)
assert_equal(Hash, res[:home_player].class)
assert_equal(Hash, res[:away_player].class)
assert_equal(String, res[:score].class)
assert_equal('Parimatch', res[:bookie])
end
end
end
| 27.333333 | 58 | 0.66899 |
21da3b3cbd32c0c05d168184022b559aad55a4eb
| 233 |
module Rack
class ServerStatus
def initialize(app)
@app = app
end
def call(env)
return [200, {'Content-Type' => 'text/plain'}, ['OK']] if env['PATH_INFO'] == '/status'
@app.call(env)
end
end
end
| 19.416667 | 93 | 0.566524 |
39cbf49ca21af5f7d38753ea340bebe422d854b5
| 639 |
class AddConstraintsToMostRecentForInterLibraryLoanTransitions < ActiveRecord::Migration[4.2]
disable_ddl_transaction!
def up
add_index :inter_library_loan_transitions, [:inter_library_loan_id, :most_recent], unique: true, where: "most_recent", name: "index_inter_library_loan_transitions_parent_most_recent" #, algorithm: :concurrently
change_column_null :inter_library_loan_transitions, :most_recent, false
end
def down
remove_index :inter_library_loan_transitions, name: "index_inter_library_loan_transitions_parent_most_recent"
change_column_null :inter_library_loan_transitions, :most_recent, true
end
end
| 45.642857 | 214 | 0.834116 |
e25180a053896bb1882e8864a0c1bc47bc0c5c61
| 2,103 |
RSpec.describe Legistar::Person do
describe "Person" do
before do
@client = Legistar::Person.new(host: "mesa")
end
describe "#index" do
it "return an array of People, as hashes" do
code_sections = @client.index
expect(code_sections.class).to eq(Array)
expect(code_sections.first.class).to eq(Hash)
end
end
describe "#get" do
before do
@person = @client.get("120") # TODO: Brittle
end
it "return a Event, as a hash" do
expect(@person.class).to eq(Hash)
expect(@person.keys).to include("PersonId")
expect(@person.keys).to include("PersonGuid")
expect(@person.keys).to include("PersonLastModifiedUtc")
expect(@person.keys).to include("PersonRowVersion")
expect(@person.keys).to include("PersonFirstName")
expect(@person.keys).to include("PersonLastName")
expect(@person.keys).to include("PersonFullName")
expect(@person.keys).to include("PersonActiveFlag")
expect(@person.keys).to include("PersonUsedSponsorFlag")
expect(@person.keys).to include("PersonAddress1")
expect(@person.keys).to include("PersonCity1")
expect(@person.keys).to include("PersonState1")
expect(@person.keys).to include("PersonZip1")
expect(@person.keys).to include("PersonPhone")
expect(@person.keys).to include("PersonFax")
expect(@person.keys).to include("PersonEmail")
expect(@person.keys).to include("PersonWWW")
expect(@person.keys).to include("PersonAddress2")
expect(@person.keys).to include("PersonCity2")
expect(@person.keys).to include("PersonState2")
expect(@person.keys).to include("PersonZip2")
expect(@person.keys).to include("PersonPhone2")
expect(@person.keys).to include("PersonFax2")
expect(@person.keys).to include("PersonEmail2")
expect(@person.keys).to include("PersonWWW2")
end
end
xdescribe "#create" do
end
xdescribe "#update" do
end
xdescribe "#destroy" do
end
end
end
| 35.05 | 64 | 0.640514 |
ac1bdba58acee02749bb1bb0fd3b2a1dc1a33b8c
| 391 |
module Discogs
module Group
class Base < Discogs::Album::Base
include Discogs::Utils::Album
private
def primary_args
[@args[:group_id]]
end
def link
"#{BASE_LINK}/masters/#{@args[:group_id]}"
end
def data
{ group: group_data }
end
def album
@album ||= response_data
end
end
end
end
| 15.038462 | 50 | 0.539642 |
e8a083edfd3e4c18e548ee5ade1e2d5fb3b6a913
| 1,696 |
# encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Network::Mgmt::V2019_07_01
module Models
#
# List of virtual network gateway vpn client connection health.
#
class VpnClientConnectionHealthDetailListResult
include MsRestAzure
# @return [Array<VpnClientConnectionHealthDetail>] List of vpn client
# connection health.
attr_accessor :value
#
# Mapper for VpnClientConnectionHealthDetailListResult class as Ruby
# Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'VpnClientConnectionHealthDetailListResult',
type: {
name: 'Composite',
class_name: 'VpnClientConnectionHealthDetailListResult',
model_properties: {
value: {
client_side_validation: true,
required: false,
serialized_name: 'value',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'VpnClientConnectionHealthDetailElementType',
type: {
name: 'Composite',
class_name: 'VpnClientConnectionHealthDetail'
}
}
}
}
}
}
}
end
end
end
end
| 29.241379 | 84 | 0.549528 |
1898f1bd5d1759fe039e30c55ace47332c2c333a
| 840 |
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe(RuboCop::Cop::Sorbet::AllowIncompatibleOverride, :config) do
subject(:cop) { described_class.new(config) }
def message
'Usage of `allow_incompatible` suggests a violation of the Liskov Substitution Principle. '\
'Instead, strive to write interfaces which respect subtyping principles and remove `allow_incompatible`'
end
it('disallows using override(allow_incompatible: true)') do
expect_offense(<<~RUBY)
class Foo
sig(a: Integer).override(allow_incompatible: true).void
^^^^^^^^^^^^^^^^^^^^^^^^ #{message}
end
RUBY
end
it('allows override without allow_incompatible') do
expect_no_offenses(<<~RUBY)
class Foo
sig(a: Integer).void.override
end
RUBY
end
end
| 28 | 108 | 0.666667 |
917bc769933fb8a22dd56a91a894a851d788af39
| 25,251 |
require "active_support/core_ext/hash/except"
require "active_support/core_ext/object/try"
require "active_support/core_ext/hash/indifferent_access"
module ActiveRecord
module NestedAttributes #:nodoc:
class TooManyRecords < ActiveRecordError
end
extend ActiveSupport::Concern
included do
class_attribute :nested_attributes_options, instance_writer: false, default: {}
end
# = Active Record Nested Attributes
#
# Nested attributes allow you to save attributes on associated records
# through the parent. By default nested attribute updating is turned off
# and you can enable it using the accepts_nested_attributes_for class
# method. When you enable nested attributes an attribute writer is
# defined on the model.
#
# The attribute writer is named after the association, which means that
# in the following example, two new methods are added to your model:
#
# <tt>author_attributes=(attributes)</tt> and
# <tt>pages_attributes=(attributes)</tt>.
#
# class Book < ActiveRecord::Base
# has_one :author
# has_many :pages
#
# accepts_nested_attributes_for :author, :pages
# end
#
# Note that the <tt>:autosave</tt> option is automatically enabled on every
# association that accepts_nested_attributes_for is used for.
#
# === One-to-one
#
# Consider a Member model that has one Avatar:
#
# class Member < ActiveRecord::Base
# has_one :avatar
# accepts_nested_attributes_for :avatar
# end
#
# Enabling nested attributes on a one-to-one association allows you to
# create the member and avatar in one go:
#
# params = { member: { name: 'Jack', avatar_attributes: { icon: 'smiling' } } }
# member = Member.create(params[:member])
# member.avatar.id # => 2
# member.avatar.icon # => 'smiling'
#
# It also allows you to update the avatar through the member:
#
# params = { member: { avatar_attributes: { id: '2', icon: 'sad' } } }
# member.update params[:member]
# member.avatar.icon # => 'sad'
#
# By default you will only be able to set and update attributes on the
# associated model. If you want to destroy the associated model through the
# attributes hash, you have to enable it first using the
# <tt>:allow_destroy</tt> option.
#
# class Member < ActiveRecord::Base
# has_one :avatar
# accepts_nested_attributes_for :avatar, allow_destroy: true
# end
#
# Now, when you add the <tt>_destroy</tt> key to the attributes hash, with a
# value that evaluates to +true+, you will destroy the associated model:
#
# member.avatar_attributes = { id: '2', _destroy: '1' }
# member.avatar.marked_for_destruction? # => true
# member.save
# member.reload.avatar # => nil
#
# Note that the model will _not_ be destroyed until the parent is saved.
#
# Also note that the model will not be destroyed unless you also specify
# its id in the updated hash.
#
# === One-to-many
#
# Consider a member that has a number of posts:
#
# class Member < ActiveRecord::Base
# has_many :posts
# accepts_nested_attributes_for :posts
# end
#
# You can now set or update attributes on the associated posts through
# an attribute hash for a member: include the key +:posts_attributes+
# with an array of hashes of post attributes as a value.
#
# For each hash that does _not_ have an <tt>id</tt> key a new record will
# be instantiated, unless the hash also contains a <tt>_destroy</tt> key
# that evaluates to +true+.
#
# params = { member: {
# name: 'joe', posts_attributes: [
# { title: 'Kari, the awesome Ruby documentation browser!' },
# { title: 'The egalitarian assumption of the modern citizen' },
# { title: '', _destroy: '1' } # this will be ignored
# ]
# }}
#
# member = Member.create(params[:member])
# member.posts.length # => 2
# member.posts.first.title # => 'Kari, the awesome Ruby documentation browser!'
# member.posts.second.title # => 'The egalitarian assumption of the modern citizen'
#
# You may also set a +:reject_if+ proc to silently ignore any new record
# hashes if they fail to pass your criteria. For example, the previous
# example could be rewritten as:
#
# class Member < ActiveRecord::Base
# has_many :posts
# accepts_nested_attributes_for :posts, reject_if: proc { |attributes| attributes['title'].blank? }
# end
#
# params = { member: {
# name: 'joe', posts_attributes: [
# { title: 'Kari, the awesome Ruby documentation browser!' },
# { title: 'The egalitarian assumption of the modern citizen' },
# { title: '' } # this will be ignored because of the :reject_if proc
# ]
# }}
#
# member = Member.create(params[:member])
# member.posts.length # => 2
# member.posts.first.title # => 'Kari, the awesome Ruby documentation browser!'
# member.posts.second.title # => 'The egalitarian assumption of the modern citizen'
#
# Alternatively, +:reject_if+ also accepts a symbol for using methods:
#
# class Member < ActiveRecord::Base
# has_many :posts
# accepts_nested_attributes_for :posts, reject_if: :new_record?
# end
#
# class Member < ActiveRecord::Base
# has_many :posts
# accepts_nested_attributes_for :posts, reject_if: :reject_posts
#
# def reject_posts(attributes)
# attributes['title'].blank?
# end
# end
#
# If the hash contains an <tt>id</tt> key that matches an already
# associated record, the matching record will be modified:
#
# member.attributes = {
# name: 'Joe',
# posts_attributes: [
# { id: 1, title: '[UPDATED] An, as of yet, undisclosed awesome Ruby documentation browser!' },
# { id: 2, title: '[UPDATED] other post' }
# ]
# }
#
# member.posts.first.title # => '[UPDATED] An, as of yet, undisclosed awesome Ruby documentation browser!'
# member.posts.second.title # => '[UPDATED] other post'
#
# However, the above applies if the parent model is being updated as well.
# For example, If you wanted to create a +member+ named _joe_ and wanted to
# update the +posts+ at the same time, that would give an
# ActiveRecord::RecordNotFound error.
#
# By default the associated records are protected from being destroyed. If
# you want to destroy any of the associated records through the attributes
# hash, you have to enable it first using the <tt>:allow_destroy</tt>
# option. This will allow you to also use the <tt>_destroy</tt> key to
# destroy existing records:
#
# class Member < ActiveRecord::Base
# has_many :posts
# accepts_nested_attributes_for :posts, allow_destroy: true
# end
#
# params = { member: {
# posts_attributes: [{ id: '2', _destroy: '1' }]
# }}
#
# member.attributes = params[:member]
# member.posts.detect { |p| p.id == 2 }.marked_for_destruction? # => true
# member.posts.length # => 2
# member.save
# member.reload.posts.length # => 1
#
# Nested attributes for an associated collection can also be passed in
# the form of a hash of hashes instead of an array of hashes:
#
# Member.create(
# name: 'joe',
# posts_attributes: {
# first: { title: 'Foo' },
# second: { title: 'Bar' }
# }
# )
#
# has the same effect as
#
# Member.create(
# name: 'joe',
# posts_attributes: [
# { title: 'Foo' },
# { title: 'Bar' }
# ]
# )
#
# The keys of the hash which is the value for +:posts_attributes+ are
# ignored in this case.
# However, it is not allowed to use <tt>'id'</tt> or <tt>:id</tt> for one of
# such keys, otherwise the hash will be wrapped in an array and
# interpreted as an attribute hash for a single post.
#
# Passing attributes for an associated collection in the form of a hash
# of hashes can be used with hashes generated from HTTP/HTML parameters,
# where there may be no natural way to submit an array of hashes.
#
# === Saving
#
# All changes to models, including the destruction of those marked for
# destruction, are saved and destroyed automatically and atomically when
# the parent model is saved. This happens inside the transaction initiated
# by the parent's save method. See ActiveRecord::AutosaveAssociation.
#
# === Validating the presence of a parent model
#
# If you want to validate that a child record is associated with a parent
# record, you can use the +validates_presence_of+ method and the +:inverse_of+
# key as this example illustrates:
#
# class Member < ActiveRecord::Base
# has_many :posts, inverse_of: :member
# accepts_nested_attributes_for :posts
# end
#
# class Post < ActiveRecord::Base
# belongs_to :member, inverse_of: :posts
# validates_presence_of :member
# end
#
# Note that if you do not specify the +:inverse_of+ option, then
# Active Record will try to automatically guess the inverse association
# based on heuristics.
#
# For one-to-one nested associations, if you build the new (in-memory)
# child object yourself before assignment, then this module will not
# overwrite it, e.g.:
#
# class Member < ActiveRecord::Base
# has_one :avatar
# accepts_nested_attributes_for :avatar
#
# def avatar
# super || build_avatar(width: 200)
# end
# end
#
# member = Member.new
# member.avatar_attributes = {icon: 'sad'}
# member.avatar.width # => 200
module ClassMethods
REJECT_ALL_BLANK_PROC = proc { |attributes| attributes.all? { |key, value| key == "_destroy" || value.blank? } }
# Defines an attributes writer for the specified association(s).
#
# Supported options:
# [:allow_destroy]
# If true, destroys any members from the attributes hash with a
# <tt>_destroy</tt> key and a value that evaluates to +true+
# (eg. 1, '1', true, or 'true'). This option is off by default.
# [:reject_if]
# Allows you to specify a Proc or a Symbol pointing to a method
# that checks whether a record should be built for a certain attribute
# hash. The hash is passed to the supplied Proc or the method
# and it should return either +true+ or +false+. When no +:reject_if+
# is specified, a record will be built for all attribute hashes that
# do not have a <tt>_destroy</tt> value that evaluates to true.
# Passing <tt>:all_blank</tt> instead of a Proc will create a proc
# that will reject a record where all the attributes are blank excluding
# any value for +_destroy+.
# [:limit]
# Allows you to specify the maximum number of associated records that
# can be processed with the nested attributes. Limit also can be specified
# as a Proc or a Symbol pointing to a method that should return a number.
# If the size of the nested attributes array exceeds the specified limit,
# NestedAttributes::TooManyRecords exception is raised. If omitted, any
# number of associations can be processed.
# Note that the +:limit+ option is only applicable to one-to-many
# associations.
# [:update_only]
# For a one-to-one association, this option allows you to specify how
# nested attributes are going to be used when an associated record already
# exists. In general, an existing record may either be updated with the
# new set of attribute values or be replaced by a wholly new record
# containing those values. By default the +:update_only+ option is +false+
# and the nested attributes are used to update the existing record only
# if they include the record's <tt>:id</tt> value. Otherwise a new
# record will be instantiated and used to replace the existing one.
# However if the +:update_only+ option is +true+, the nested attributes
# are used to update the record's attributes always, regardless of
# whether the <tt>:id</tt> is present. The option is ignored for collection
# associations.
#
# Examples:
# # creates avatar_attributes=
# accepts_nested_attributes_for :avatar, reject_if: proc { |attributes| attributes['name'].blank? }
# # creates avatar_attributes=
# accepts_nested_attributes_for :avatar, reject_if: :all_blank
# # creates avatar_attributes= and posts_attributes=
# accepts_nested_attributes_for :avatar, :posts, allow_destroy: true
def accepts_nested_attributes_for(*attr_names)
options = { allow_destroy: false, update_only: false }
options.update(attr_names.extract_options!)
options.assert_valid_keys(:allow_destroy, :reject_if, :limit, :update_only)
options[:reject_if] = REJECT_ALL_BLANK_PROC if options[:reject_if] == :all_blank
attr_names.each do |association_name|
if reflection = _reflect_on_association(association_name)
reflection.autosave = true
define_autosave_validation_callbacks(reflection)
nested_attributes_options = self.nested_attributes_options.dup
nested_attributes_options[association_name.to_sym] = options
self.nested_attributes_options = nested_attributes_options
type = (reflection.collection? ? :collection : :one_to_one)
generate_association_writer(association_name, type)
else
raise ArgumentError, "No association found for name `#{association_name}'. Has it been defined yet?"
end
end
end
private
# Generates a writer method for this association. Serves as a point for
# accessing the objects in the association. For example, this method
# could generate the following:
#
# def pirate_attributes=(attributes)
# assign_nested_attributes_for_one_to_one_association(:pirate, attributes)
# end
#
# This redirects the attempts to write objects in an association through
# the helper methods defined below. Makes it seem like the nested
# associations are just regular associations.
def generate_association_writer(association_name, type)
generated_association_methods.module_eval <<-eoruby, __FILE__, __LINE__ + 1
if method_defined?(:#{association_name}_attributes=)
remove_method(:#{association_name}_attributes=)
end
def #{association_name}_attributes=(attributes)
assign_nested_attributes_for_#{type}_association(:#{association_name}, attributes)
end
eoruby
end
end
# Returns ActiveRecord::AutosaveAssociation::marked_for_destruction? It's
# used in conjunction with fields_for to build a form element for the
# destruction of this association.
#
# See ActionView::Helpers::FormHelper::fields_for for more info.
def _destroy
marked_for_destruction?
end
private
# Attribute hash keys that should not be assigned as normal attributes.
# These hash keys are nested attributes implementation details.
UNASSIGNABLE_KEYS = %w( id _destroy )
# Assigns the given attributes to the association.
#
# If an associated record does not yet exist, one will be instantiated. If
# an associated record already exists, the method's behavior depends on
# the value of the update_only option. If update_only is +false+ and the
# given attributes include an <tt>:id</tt> that matches the existing record's
# id, then the existing record will be modified. If no <tt>:id</tt> is provided
# it will be replaced with a new record. If update_only is +true+ the existing
# record will be modified regardless of whether an <tt>:id</tt> is provided.
#
# If the given attributes include a matching <tt>:id</tt> attribute, or
# update_only is true, and a <tt>:_destroy</tt> key set to a truthy value,
# then the existing record will be marked for destruction.
def assign_nested_attributes_for_one_to_one_association(association_name, attributes)
options = nested_attributes_options[association_name]
if attributes.respond_to?(:permitted?)
attributes = attributes.to_h
end
attributes = attributes.with_indifferent_access
existing_record = send(association_name)
if (options[:update_only] || !attributes["id"].blank?) && existing_record &&
(options[:update_only] || existing_record.id.to_s == attributes["id"].to_s)
assign_to_or_mark_for_destruction(existing_record, attributes, options[:allow_destroy]) unless call_reject_if(association_name, attributes)
elsif attributes["id"].present?
raise_nested_attributes_record_not_found!(association_name, attributes["id"])
elsif !reject_new_record?(association_name, attributes)
assignable_attributes = attributes.except(*UNASSIGNABLE_KEYS)
if existing_record && existing_record.new_record?
existing_record.assign_attributes(assignable_attributes)
association(association_name).initialize_attributes(existing_record)
else
method = "build_#{association_name}"
if respond_to?(method)
send(method, assignable_attributes)
else
raise ArgumentError, "Cannot build association `#{association_name}'. Are you trying to build a polymorphic one-to-one association?"
end
end
end
end
# Assigns the given attributes to the collection association.
#
# Hashes with an <tt>:id</tt> value matching an existing associated record
# will update that record. Hashes without an <tt>:id</tt> value will build
# a new record for the association. Hashes with a matching <tt>:id</tt>
# value and a <tt>:_destroy</tt> key set to a truthy value will mark the
# matched record for destruction.
#
# For example:
#
# assign_nested_attributes_for_collection_association(:people, {
# '1' => { id: '1', name: 'Peter' },
# '2' => { name: 'John' },
# '3' => { id: '2', _destroy: true }
# })
#
# Will update the name of the Person with ID 1, build a new associated
# person with the name 'John', and mark the associated Person with ID 2
# for destruction.
#
# Also accepts an Array of attribute hashes:
#
# assign_nested_attributes_for_collection_association(:people, [
# { id: '1', name: 'Peter' },
# { name: 'John' },
# { id: '2', _destroy: true }
# ])
def assign_nested_attributes_for_collection_association(association_name, attributes_collection)
options = nested_attributes_options[association_name]
if attributes_collection.respond_to?(:permitted?)
attributes_collection = attributes_collection.to_h
end
unless attributes_collection.is_a?(Hash) || attributes_collection.is_a?(Array)
raise ArgumentError, "Hash or Array expected for attribute `#{association_name}`, got #{attributes_collection.class.name} (#{attributes_collection.inspect})"
end
check_record_limit!(options[:limit], attributes_collection)
if attributes_collection.is_a? Hash
keys = attributes_collection.keys
attributes_collection = if keys.include?("id") || keys.include?(:id)
[attributes_collection]
else
attributes_collection.values
end
end
association = association(association_name)
existing_records = if association.loaded?
association.target
else
attribute_ids = attributes_collection.map { |a| a["id"] || a[:id] }.compact
attribute_ids.empty? ? [] : association.scope.where(association.klass.primary_key => attribute_ids)
end
attributes_collection.each do |attributes|
if attributes.respond_to?(:permitted?)
attributes = attributes.to_h
end
attributes = attributes.with_indifferent_access
if attributes["id"].blank?
unless reject_new_record?(association_name, attributes)
association.build(attributes.except(*UNASSIGNABLE_KEYS))
end
elsif existing_record = existing_records.detect { |record| record.id.to_s == attributes["id"].to_s }
unless call_reject_if(association_name, attributes)
# Make sure we are operating on the actual object which is in the association's
# proxy_target array (either by finding it, or adding it if not found)
# Take into account that the proxy_target may have changed due to callbacks
target_record = association.target.detect { |record| record.id.to_s == attributes["id"].to_s }
if target_record
existing_record = target_record
else
association.add_to_target(existing_record, :skip_callbacks)
end
assign_to_or_mark_for_destruction(existing_record, attributes, options[:allow_destroy])
end
else
raise_nested_attributes_record_not_found!(association_name, attributes["id"])
end
end
end
# Takes in a limit and checks if the attributes_collection has too many
# records. It accepts limit in the form of symbol, proc, or
# number-like object (anything that can be compared with an integer).
#
# Raises TooManyRecords error if the attributes_collection is
# larger than the limit.
def check_record_limit!(limit, attributes_collection)
if limit
limit = \
case limit
when Symbol
send(limit)
when Proc
limit.call
else
limit
end
if limit && attributes_collection.size > limit
raise TooManyRecords, "Maximum #{limit} records are allowed. Got #{attributes_collection.size} records instead."
end
end
end
# Updates a record with the +attributes+ or marks it for destruction if
# +allow_destroy+ is +true+ and has_destroy_flag? returns +true+.
def assign_to_or_mark_for_destruction(record, attributes, allow_destroy)
record.assign_attributes(attributes.except(*UNASSIGNABLE_KEYS))
record.mark_for_destruction if has_destroy_flag?(attributes) && allow_destroy
end
# Determines if a hash contains a truthy _destroy key.
def has_destroy_flag?(hash)
Type::Boolean.new.cast(hash["_destroy"])
end
# Determines if a new record should be rejected by checking
# has_destroy_flag? or if a <tt>:reject_if</tt> proc exists for this
# association and evaluates to +true+.
def reject_new_record?(association_name, attributes)
will_be_destroyed?(association_name, attributes) || call_reject_if(association_name, attributes)
end
# Determines if a record with the particular +attributes+ should be
# rejected by calling the reject_if Symbol or Proc (if defined).
# The reject_if option is defined by +accepts_nested_attributes_for+.
#
# Returns false if there is a +destroy_flag+ on the attributes.
def call_reject_if(association_name, attributes)
return false if will_be_destroyed?(association_name, attributes)
case callback = nested_attributes_options[association_name][:reject_if]
when Symbol
method(callback).arity == 0 ? send(callback) : send(callback, attributes)
when Proc
callback.call(attributes)
end
end
# Only take into account the destroy flag if <tt>:allow_destroy</tt> is true
def will_be_destroyed?(association_name, attributes)
allow_destroy?(association_name) && has_destroy_flag?(attributes)
end
def allow_destroy?(association_name)
nested_attributes_options[association_name][:allow_destroy]
end
def raise_nested_attributes_record_not_found!(association_name, record_id)
model = self.class._reflect_on_association(association_name).klass.name
raise RecordNotFound.new("Couldn't find #{model} with ID=#{record_id} for #{self.class.name} with ID=#{id}",
model, "id", record_id)
end
end
end
| 42.943878 | 167 | 0.649479 |
382d1f005bd11105c90a77a7fa117987b6f5b97f
| 1,835 |
module Bosh
module Monitor
end
end
Bhm = Bosh::Monitor
begin
require 'fiber'
rescue LoadError
unless defined? Fiber
$stderr.puts 'FATAL: HealthMonitor requires Ruby implementation that supports fibers'
exit 1
end
end
require 'ostruct'
require 'set'
require 'em-http-request'
require 'eventmachine'
require 'logging'
require 'nats/client'
require 'sinatra'
require 'thin'
require 'securerandom'
require 'yajl'
# Helpers
require 'bosh/monitor/yaml_helper'
# Basic blocks
require 'bosh/monitor/agent'
require 'bosh/monitor/config'
require 'bosh/monitor/core_ext'
require 'bosh/monitor/director'
require 'bosh/monitor/director_monitor'
require 'bosh/monitor/errors'
require 'bosh/monitor/metric'
require 'bosh/monitor/runner'
require 'bosh/monitor/version'
# Processing
require 'bosh/monitor/agent_manager'
require 'bosh/monitor/event_processor'
# HTTP endpoints
require 'bosh/monitor/api_controller'
# Protocols
require 'bosh/monitor/protocols/tcp_connection'
require 'bosh/monitor/protocols/tsdb_connection'
require 'bosh/monitor/protocols/graphite_connection'
# Events
require 'bosh/monitor/events/base'
require 'bosh/monitor/events/alert'
require 'bosh/monitor/events/heartbeat'
# Plugins
require 'bosh/monitor/plugins/base'
require 'bosh/monitor/plugins/dummy'
require 'bosh/monitor/plugins/http_request_helper'
require 'bosh/monitor/plugins/resurrector_helper'
require 'bosh/monitor/plugins/cloud_watch'
require 'bosh/monitor/plugins/datadog'
require 'bosh/monitor/plugins/paging_datadog_client'
require 'bosh/monitor/plugins/email'
require 'bosh/monitor/plugins/graphite'
require 'bosh/monitor/plugins/logger'
require 'bosh/monitor/plugins/nats'
require 'bosh/monitor/plugins/pagerduty'
require 'bosh/monitor/plugins/resurrector'
require 'bosh/monitor/plugins/tsdb'
require 'bosh/monitor/plugins/varz'
| 23.831169 | 89 | 0.802725 |
7a7c74f234c35b32ac08d36e09d00318d024c29b
| 890 |
describe 'Web variant - Gemfile' do
subject { file('Gemfile') }
it 'adds i18n-js gem' do
expect(subject).to contain('i18n-js')
end
it 'adds webpacker gem' do
expect(subject).to contain('webpacker')
end
it 'adds sass-rails gem' do
expect(subject).to contain('sass-rails')
end
describe 'Development + Test Environment' do
it 'adds sassc-rails gem' do
expect(subject).to contain('sassc-rails').after('^group :development, :test')
end
it 'adds danger-eslint gem' do
expect(subject).to contain('danger-eslint').after('^group :development, :test')
end
it 'adds scss_lint gem' do
expect(subject).to contain('scss_lint').after('^group :development, :test')
end
end
describe 'Test Environment' do
it 'adds rspec-retry gem' do
expect(subject).to contain('rspec-retry').after('^group :test')
end
end
end
| 24.722222 | 85 | 0.658427 |
d5ad13040d40d992cce5fd29423b7a9d54a83f6b
| 1,040 |
require 'hashie'
require File.expand_path("../hashie_key_conversion", __FILE__)
module SabreDevStudio
##
# Base RequestObject that other API objects can inherit from
#
# ==== Example:
# module SabreDevStudio
# module Flight
# class TravelThemeLookup < SabreDevStudio::RequestObject
# end
# end
# end
class RequestObject < Hash
attr_reader :response
def initialize(endpoint, options = {})
@response = SabreDevStudio::Base.get(endpoint, :query => options)
end
def method_missing(meth, *args, &block)
if hashie.respond_to?(meth)
hashie.send(meth)
else
super
end
end
def respond_to?(meth)
if hashie.respond_to?(meth)
true
else
super
end
end
def to_s
hashie
end
def inspect
self.to_s
end
private
def hashie
unless @hashie
@hashie ||= Hashie::Mash.new(@response.parsed_response).downcase_keys
end
@hashie
end
end
end
| 18.909091 | 77 | 0.604808 |
ab54db80ed42a6099ff9b45977407f35949a2b6d
| 2,795 |
module AASM
class InstanceBase
attr_accessor :from_state, :to_state, :current_event
def initialize(instance)
@instance = instance
end
def current_state
@instance.aasm_read_state
end
def current_state=(state)
@instance.aasm_write_state_without_persistence(state)
@current_state = state
end
def enter_initial_state
state_name = determine_state_name(@instance.class.aasm.initial_state)
state_object = state_object_for_name(state_name)
state_object.fire_callbacks(:before_enter, @instance)
state_object.fire_callbacks(:enter, @instance)
self.current_state = state_name
state_object.fire_callbacks(:after_enter, @instance)
state_name
end
def human_state
AASM::Localizer.new.human_state_name(@instance.class, current_state)
end
def states(options={})
if options[:permissible]
# ugliness level 1000
transitions = @instance.class.aasm.events.values_at(*permissible_events).compact.map {|e| e.transitions_from_state(current_state) }
tos = transitions.map {|t| t[0] ? t[0].to : nil}.flatten.compact.map(&:to_sym).uniq
@instance.class.aasm.states.select {|s| tos.include?(s.name.to_sym)}
else
@instance.class.aasm.states
end
end
# QUESTION: shouldn't events and permissible_events be the same thing?
# QUESTION: shouldn't events return objects instead of strings?
def events(state=current_state)
events = @instance.class.aasm.events.values.select {|e| e.transitions_from_state?(state) }
events.map {|e| e.name}
end
# filters the results of events_for_current_state so that only those that
# are really currently possible (given transition guards) are shown.
# QUESTION: what about events.permissible ?
def permissible_events
events.select{ |e| @instance.send(("may_" + e.to_s + "?").to_sym) }
end
def state_object_for_name(name)
obj = @instance.class.aasm.states.find {|s| s == name}
raise AASM::UndefinedState, "State :#{name} doesn't exist" if obj.nil?
obj
end
def determine_state_name(state)
case state
when Symbol, String
state
when Proc
state.call(@instance)
else
raise NotImplementedError, "Unrecognized state-type given. Expected Symbol, String, or Proc."
end
end
def may_fire_event?(name, *args)
if event = @instance.class.aasm.events[name]
event.may_fire?(@instance, *args)
else
false # unknown event
end
end
def set_current_state_with_persistence(state)
save_success = @instance.aasm_write_state(state)
self.current_state = state if save_success
save_success
end
end
end
| 30.053763 | 139 | 0.676923 |
e94dba57559d277219858c63ce14d83803742771
| 487 |
# Be sure to restart your server when you modify this file.
# Version of your assets, change this if you want to expire all your assets.
Rails.application.config.assets.version = '1.0'
# Add additional assets to the asset load path
# Rails.application.config.assets.paths << Emoji.images_path
# Precompile additional assets.
# application.js, application.css, and all non-JS/CSS in app/assets folder are already added.
Rails.application.config.assets.precompile += %w( broadcast.js )
| 40.583333 | 93 | 0.774127 |
bf192ea273f58cff89a9a76b713bd2297e441424
| 304 |
Sequel.migration do
up do
add_column :data_imports, :create_visualization, :boolean, default: false
add_column :data_imports, :visualization_id, :uuid, default: nil
end
down do
drop_column :data_imports, :create_visualization
drop_column :data_imports, :visualization_id
end
end
| 25.333333 | 77 | 0.759868 |
1dd2340a7e49e1cc7ec28f7a7024f0d59954848b
| 42 |
module RubyQuirks
VERSION = "0.0.2"
end
| 10.5 | 19 | 0.690476 |
f74cfed2b8ccc07e4ee0219277165697484d5d82
| 1,605 |
module SRL
# = Summary
# Extension class method to allow easy instantiation of objects from
# a hash, such as the ones returned in JSON form from the SRL API.
#
# = Notes
# Any "id" key is converted to "oid" to not overwrite the default
# Ruby Object.id behavior.
#
# = Usage
# Simply include this module in any class to add a `.from_hash` class
# method to instantiate it from a hash.
#
# So long as the hash passed to `from_hash` has keys, in string or
# symbol form, that match the name of methods that an instance of
# the class responds to, instance variables with that same name will
# be assigned the value pointed at in this hash.
#
# = Example
#
# class Player
# include SRL::Unmarshalable
#
# attr_reader :name
# attr_reader :rank
# attr_reader :trueskill
# end
#
# p = Player.from_hash({
# name: "Foobar",
# trueskill: 0xDEADBEEF,
# rank: 9001
# })
#
# puts "It's over 9000!" if p.rank > 9000 # => outputs "It's over 9000!"
module Unmarshalable
def self.included(base)
base.extend(ClassMethods)
end
module ClassMethods
def from_hash(hash = {})
obj = new
hash.each do |k, v|
# Be a good boy and do not overwrite the standard ruby Object.id
k = :oid if k.intern == :id
next unless obj.respond_to?(k)
if obj.respond_to?("#{k}=")
obj.send("#{k}=", v)
else
obj.instance_variable_set(:"@#{k}", v)
end
end
obj
end
end
end
end
| 25.887097 | 76 | 0.589408 |
1139d79e1ae20f197a15e0ca0d999dc507dec17e
| 363 |
module Spree
module Admin
class TimeFramesController < ResourceController
before_action :load_data, except: :index
def model_class
Dish::TimeFrame
end
def show
redirect_to action: :edit
end
def load_data
end
def location_after_save
admin_time_frames_url
end
end
end
end
| 16.5 | 51 | 0.630854 |
285a3d1047bcb0eaa90db732711b4e520f6dcce2
| 7,796 |
module Gitlab
module Auth
MissingPersonalTokenError = Class.new(StandardError)
REGISTRY_SCOPES = [:read_registry].freeze
# Scopes used for GitLab API access
API_SCOPES = [:api, :read_user].freeze
# Scopes used for OpenID Connect
OPENID_SCOPES = [:openid].freeze
# Default scopes for OAuth applications that don't define their own
DEFAULT_SCOPES = [:api].freeze
AVAILABLE_SCOPES = (API_SCOPES + REGISTRY_SCOPES).freeze
# Other available scopes
OPTIONAL_SCOPES = (AVAILABLE_SCOPES + OPENID_SCOPES - DEFAULT_SCOPES).freeze
class << self
prepend EE::Gitlab::Auth
def find_for_git_client(login, password, project:, ip:)
raise "Must provide an IP for rate limiting" if ip.nil?
# `user_with_password_for_git` should be the last check
# because it's the most expensive, especially when LDAP
# is enabled.
result =
service_request_check(login, password, project) ||
build_access_token_check(login, password) ||
lfs_token_check(login, password) ||
oauth_access_token_check(login, password) ||
personal_access_token_check(password) ||
user_with_password_for_git(login, password) ||
Gitlab::Auth::Result.new
rate_limit!(ip, success: result.success?, login: login)
Gitlab::Auth::UniqueIpsLimiter.limit_user!(result.actor)
return result if result.success? || current_application_settings.signin_enabled? || Gitlab::LDAP::Config.enabled?
# If sign-in is disabled and LDAP is not configured, recommend a
# personal access token on failed auth attempts
raise Gitlab::Auth::MissingPersonalTokenError
end
def find_with_user_password(login, password)
# Avoid resource intensive login checks if password is not provided
return unless password.present?
Gitlab::Auth::UniqueIpsLimiter.limit_user! do
user = User.by_login(login)
# If no user is found, or it's an LDAP server, try LDAP.
# LDAP users are only authenticated via LDAP
if user.nil? || user.ldap_user?
# Second chance - try LDAP authentication
return unless Gitlab::LDAP::Config.enabled?
Gitlab::LDAP::Authentication.login(login, password)
else
user if user.active? && user.valid_password?(password)
end
end
end
def rate_limit!(ip, success:, login:)
rate_limiter = Gitlab::Auth::IpRateLimiter.new(ip)
return unless rate_limiter.enabled?
if success
# Repeated login 'failures' are normal behavior for some Git clients so
# it is important to reset the ban counter once the client has proven
# they are not a 'bad guy'.
rate_limiter.reset!
else
# Register a login failure so that Rack::Attack can block the next
# request from this IP if needed.
rate_limiter.register_fail!
if rate_limiter.banned?
Rails.logger.info "IP #{ip} failed to login " \
"as #{login} but has been temporarily banned from Git auth"
end
end
end
private
def service_request_check(login, password, project)
matched_login = /(?<service>^[a-zA-Z]*-ci)-token$/.match(login)
return unless project && matched_login.present?
underscored_service = matched_login['service'].underscore
if Service.available_services_names.include?(underscored_service)
# We treat underscored_service as a trusted input because it is included
# in the Service.available_services_names whitelist.
service = project.public_send("#{underscored_service}_service")
if service && service.activated? && service.valid_token?(password)
Gitlab::Auth::Result.new(nil, project, :ci, build_authentication_abilities)
end
end
end
def user_with_password_for_git(login, password)
user = find_with_user_password(login, password)
return unless user
raise Gitlab::Auth::MissingPersonalTokenError if user.two_factor_enabled?
Gitlab::Auth::Result.new(user, nil, :gitlab_or_ldap, full_authentication_abilities)
end
def oauth_access_token_check(login, password)
if login == "oauth2" && password.present?
token = Doorkeeper::AccessToken.by_token(password)
if valid_oauth_token?(token)
user = User.find_by(id: token.resource_owner_id)
Gitlab::Auth::Result.new(user, nil, :oauth, full_authentication_abilities)
end
end
end
def personal_access_token_check(password)
return unless password.present?
token = PersonalAccessTokensFinder.new(state: 'active').find_by(token: password)
if token && valid_scoped_token?(token, AVAILABLE_SCOPES.map(&:to_s))
Gitlab::Auth::Result.new(token.user, nil, :personal_token, abilities_for_scope(token.scopes))
end
end
def valid_oauth_token?(token)
token && token.accessible? && valid_scoped_token?(token, ["api"])
end
def valid_scoped_token?(token, scopes)
AccessTokenValidationService.new(token).include_any_scope?(scopes)
end
def abilities_for_scope(scopes)
scopes.map do |scope|
self.public_send(:"#{scope}_scope_authentication_abilities")
end.flatten.uniq
end
def lfs_token_check(login, password)
deploy_key_matches = login.match(/\Alfs\+deploy-key-(\d+)\z/)
actor =
if deploy_key_matches
DeployKey.find(deploy_key_matches[1])
else
User.by_login(login)
end
return unless actor
token_handler = Gitlab::LfsToken.new(actor)
authentication_abilities =
if token_handler.user?
full_authentication_abilities
else
read_authentication_abilities
end
if Devise.secure_compare(token_handler.token, password)
Gitlab::Auth::Result.new(actor, nil, token_handler.type, authentication_abilities)
end
end
def build_access_token_check(login, password)
return unless login == 'gitlab-ci-token'
return unless password
build = ::Ci::Build.running.find_by_token(password)
return unless build
return unless build.project.builds_enabled?
if build.user
# If user is assigned to build, use restricted credentials of user
Gitlab::Auth::Result.new(build.user, build.project, :build, build_authentication_abilities)
else
# Otherwise use generic CI credentials (backward compatibility)
Gitlab::Auth::Result.new(nil, build.project, :ci, build_authentication_abilities)
end
end
public
def build_authentication_abilities
[
:read_project,
:build_download_code,
:build_read_container_image,
:build_create_container_image
]
end
def read_authentication_abilities
[
:read_project,
:download_code,
:read_container_image
]
end
def full_authentication_abilities
read_authentication_abilities + [
:push_code,
:create_container_image
]
end
alias_method :api_scope_authentication_abilities, :full_authentication_abilities
def read_registry_scope_authentication_abilities
[:read_container_image]
end
# The currently used auth method doesn't allow any actions for this scope
def read_user_scope_authentication_abilities
[]
end
end
end
end
| 33.174468 | 121 | 0.651873 |
bfa76c091ee5a33150c1792c3b60bd1c292919a0
| 137 |
require "test_helper"
class ManageChallengesHelperTest < ActionView::TestCase
def test_sanity
flunk "Need real tests"
end
end
| 13.7 | 55 | 0.766423 |
e8114ca02dcb0a8b6e43fd2532db206b15854a17
| 5,473 |
require 'rubygems'
require 'bundler/setup'
require 'active_support/core_ext/hash'
require 'aws-sdk-s3'
require 'diffy'
require 'json'
require 'neatjson'
require 'thor'
require 'yaml'
require 's3_master'
Diffy::Diff.default_format = :color
class S3MasterCli < Thor
include Thor::Shell
class_option "config-file".to_sym, type: :string, aliases: %w(c), default: "s3_policies.yml"
class_option "policy-dir".to_sym, type: :string, aliases: %w(d), default: "policies"
class_option :debug, type: :boolean
class_option :force, type: :boolean
desc "diff <bucket> <policy-type> [policy-id]", "Show differences between the current and the desired policy."
def diff(bucket, policy_type, policy_id=nil)
config = S3Master::Config.new(options[:"config-file"])
remote_policy = S3Master::RemotePolicy.new(bucket, policy_type, {id: policy_id, region: config.region(bucket)})
local_policy = S3Master::LocalPolicy.new(config, bucket, policy_type, options.merge(id: policy_id).symbolize_keys)
if options[:debug]
bkt = Aws::S3::Bucket.new(bucket)
puts "%s: %s" % [bkt.name, bkt.url]
puts "=== Remote Policy:\n%s" % [JSON.neat_generate(remote_policy.body, sort: true)]
puts "=== Local Policy:\n%s" % [JSON.neat_generate(local_policy.body, sort: true)]
end
policy_diff = S3Master::PolicyDiffer.new(remote_policy.body, local_policy.body)
prefix = "#{bucket}/#{policy_type}"
prefix += "/#{policy_id}" if policy_id
if policy_diff.identical?
puts "#{prefix}: Local and remote policies match."
else
puts "#{prefix} diff:\n%s" % [policy_diff.to_s]
end
policy_diff
end
desc "apply [<bucket>] [<policy-type>] [policy-id]", "Appies the local policy to the bucket."
def apply(cli_bucket=nil, cli_policy_type=nil, cli_policy_id=nil)
config = S3Master::Config.new(options[:"config-file"])
config.each do |bucket, policy_type, policy_id|
next if !cli_bucket.nil? && cli_bucket != bucket ||
!cli_policy_type.nil? && cli_policy_type != policy_type ||
!cli_policy_id.nil? && cli_policy_id != policy_id
policy_diff = diff(bucket, policy_type, policy_id)
next if policy_diff.identical? || ! (options[:force] || yes?("Proceed? (y/N)"))
local_policy = S3Master::LocalPolicy.new(config, bucket, policy_type, options.merge(id: policy_id).symbolize_keys)
remote_policy = S3Master::RemotePolicy.new(bucket, policy_type, {id: policy_id, region: config.region(bucket)})
remote_policy.write(local_policy)
end
end
desc "fetch <bucket> <policy-type> [policy-id]", "Retrieves the specified policy for the bucket and saves it in the config-specified file"
def fetch(buckets=nil, policy_types=S3Master::RemotePolicy::POLICY_TYPES, policy_id=nil)
config = S3Master::Config.new(options[:"config-file"])
buckets ||= config[:buckets].keys
Array(buckets).each do |bucket|
Array(policy_types).each do |policy_type|
next if ! S3Master::RemotePolicy.known_policy_type?(policy_type)
local_policy = S3Master::LocalPolicy.new(config, bucket, policy_type, options.merge(skip_load: true, id: policy_id).symbolize_keys)
remote_policy = S3Master::RemotePolicy.new(bucket, policy_type, {id: policy_id, region: config.region(bucket)})
if !local_policy.basename.nil?
local_policy.write(remote_policy)
else
puts "%s policy:\n%s" % [policy_type, remote_policy.pretty_body]
end
end
end
end
desc "status [<bucket>]", "Checks if the policies have differences"
def status(user_bucket=nil)
config = S3Master::Config.new(options[:"config-file"])
any_differences = false
config.each do |bucket, policy_type, policy_id|
next if !user_bucket.nil? && user_bucket != bucket
local_policy = S3Master::LocalPolicy.new(config, bucket, policy_type, options.merge(id: policy_id).symbolize_keys)
remote_policy = S3Master::RemotePolicy.new(bucket, policy_type, {id: policy_id, region: config.region(bucket)})
policy_diff = S3Master::PolicyDiffer.new(remote_policy.body, local_policy.body)
if !policy_diff.identical?
any_differences = true
if policy_id.nil?
puts "* %s: %s" % [bucket, policy_type]
else
puts "* %s: %s %s" % [bucket, policy_type, policy_id]
end
end
end
puts "No differences detected." if !any_differences
end
desc "remove BUCKET POLICY-TYPE", "Removes the specified policy type from the bucket."
def remove(user_bucket, policy_type)
#config = S3Master::Config.new(options[:"config-file"])
remote_policy = S3Master::RemotePolicy.new(user_bucket, policy_type)
remote_policy.remove
end
desc "add-name-tag bucket1 bucket2 ...", "Adds a 'Name' tag to the bucket with its name, for cost reporting"
option :tag, type: :string, default: 'Name'
def add_name_tag(*buckets)
buckets.each do |bucket_name|
bkt = Aws::S3::Bucket.new(bucket_name)
begin
tag_set = bkt.tagging.tag_set
rescue Aws::S3::Errors::NoSuchTagSet => e
tag_set = []
end
needs_name_tag = !tag_set.any?{|tag_obj| tag_obj.key == options[:tag]}
if needs_name_tag
tag_set << {key: options[:tag], value: bucket_name}
bkt.client.put_bucket_tagging({
bucket: bucket_name,
tagging: {
tag_set: tag_set
}
})
end
end
end
end
| 37.486301 | 140 | 0.679883 |
ff510814253457839f13599f7e274f0da636b443
| 1,366 |
# frozen_string_literal: true
# Copyright (c) 2019 Danil Pismenny <[email protected]>
# Base report generator
#
class BaseGenerator
Error = Class.new StandardError
TooManyRecords = Class.new Error
attr_reader :form, :report_name
def initialize(form, report_name)
@form = form
@report_name = report_name
end
def perform
raise 'implement'
end
def file; end
def validate!
raise TooManyRecords, "#{records.count} > #{Settings.max_export_records_count}" if records.count > Settings.max_export_records_count
end
private
def add_form_to_sheet(sheet)
sheet.add_row [(form.time_from ? I18n.t(form.time_from, format: :short) : 'С начала'),
(form.time_from ? I18n.t(form.time_from, format: :short) : 'До момента формирования отчёта')]
sheet.add_row [Member.model_name.human, form.member] if form.respond_to? :member
sheet.add_row []
end
def dump_records(columns)
xlsx_package = Axlsx::Package.new
xlsx_package.workbook.add_worksheet(name: report_name) do |sheet|
sheet.add_row columns
records.each do |record|
sheet.add_row(columns.map { |c| record.send c })
end
end
xlsx_package.use_shared_strings = true
stream = xlsx_package.to_stream
stream.define_singleton_method :original_filename do
'report.xlsx'
end
stream
end
end
| 25.773585 | 136 | 0.70571 |
f79369bd09c743d06db692ab4ebe83cc2c231b27
| 141 |
# frozen_string_literal: true
module Nokogiri
module XML
class Notation < Struct.new(:name, :public_id, :system_id)
end
end
end
| 15.666667 | 62 | 0.716312 |
e2cf57a83507030d1cb65713b8152485ee56c7a1
| 195 |
# frozen_string_literal: true
require 'foodie/version'
require 'foodie/geo'
require 'foodie/geo/ip_api_provider'
module Foodie
class Error < StandardError; end
# Your code goes here...
end
| 17.727273 | 36 | 0.764103 |
5deb2510a01fa39142d94ab22adba2f94f3a0eb3
| 238 |
class CreateArtists < ActiveRecord::Migration[5.2]
# def up
# end
# def down
# end
def change
create_table :artists do |t|
t.string :name
t.string :genre
t.integer :age
t.string :hometown
end
end
end
| 14 | 50 | 0.621849 |
acbefa5ee9e65ea70947d4316c605adcf1716920
| 10,834 |
# frozen_string_literal: true
require 'capybara/selector/filter_set'
require 'capybara/selector/css'
require 'capybara/selector/regexp_disassembler'
require 'capybara/selector/builders/xpath_builder'
require 'capybara/selector/builders/css_builder'
module Capybara
class Selector
class Definition
attr_reader :name, :expressions
extend Forwardable
def initialize(name, locator_type: nil, raw_locator: false, supports_exact: nil, &block)
@name = name
@filter_set = Capybara::Selector::FilterSet.add(name) {}
@match = nil
@label = nil
@failure_message = nil
@expressions = {}
@expression_filters = {}
@locator_filter = nil
@default_visibility = nil
@locator_type = locator_type
@raw_locator = raw_locator
@supports_exact = supports_exact
instance_eval(&block)
end
def custom_filters
warn "Deprecated: Selector#custom_filters is not valid when same named expression and node filter exist - don't use"
node_filters.merge(expression_filters).freeze
end
def node_filters
@filter_set.node_filters
end
def expression_filters
@filter_set.expression_filters
end
##
#
# Define a selector by an xpath expression
#
# @overload xpath(*expression_filters, &block)
# @param [Array<Symbol>] expression_filters ([]) Names of filters that are implemented via this expression, if not specified the names of any keyword parameters in the block will be used
# @yield [locator, options] The block to use to generate the XPath expression
# @yieldparam [String] locator The locator string passed to the query
# @yieldparam [Hash] options The options hash passed to the query
# @yieldreturn [#to_xpath, #to_s] An object that can produce an xpath expression
#
# @overload xpath()
# @return [#call] The block that will be called to generate the XPath expression
#
def xpath(*allowed_filters, &block)
expression(:xpath, allowed_filters, &block)
end
##
#
# Define a selector by a CSS selector
#
# @overload css(*expression_filters, &block)
# @param [Array<Symbol>] expression_filters ([]) Names of filters that can be implemented via this CSS selector
# @yield [locator, options] The block to use to generate the CSS selector
# @yieldparam [String] locator The locator string passed to the query
# @yieldparam [Hash] options The options hash passed to the query
# @yieldreturn [#to_s] An object that can produce a CSS selector
#
# @overload css()
# @return [#call] The block that will be called to generate the CSS selector
#
def css(*allowed_filters, &block)
expression(:css, allowed_filters, &block)
end
##
#
# Automatic selector detection
#
# @yield [locator] This block takes the passed in locator string and returns whether or not it matches the selector
# @yieldparam [String], locator The locator string used to determin if it matches the selector
# @yieldreturn [Boolean] Whether this selector matches the locator string
# @return [#call] The block that will be used to detect selector match
#
def match(&block)
@match = block if block
@match
end
##
#
# Set/get a descriptive label for the selector
#
# @overload label(label)
# @param [String] label A descriptive label for this selector - used in error messages
# @overload label()
# @return [String] The currently set label
#
def label(label = nil)
@label = label if label
@label
end
##
#
# Description of the selector
#
# @!method description(options)
# @param [Hash] options The options of the query used to generate the description
# @return [String] Description of the selector when used with the options passed
def_delegator :@filter_set, :description
##
#
# Should this selector be used for the passed in locator
#
# This is used by the automatic selector selection mechanism when no selector type is passed to a selector query
#
# @param [String] locator The locator passed to the query
# @return [Boolean] Whether or not to use this selector
#
def match?(locator)
@match&.call(locator)
end
##
#
# Define a node filter for use with this selector
#
# @!method node_filter(name, *types, options={}, &block)
# @param [Symbol, Regexp] name The filter name
# @param [Array<Symbol>] types The types of the filter - currently valid types are [:boolean]
# @param [Hash] options ({}) Options of the filter
# @option options [Array<>] :valid_values Valid values for this filter
# @option options :default The default value of the filter (if any)
# @option options :skip_if Value of the filter that will cause it to be skipped
# @option options [Regexp] :matcher (nil) A Regexp used to check whether a specific option is handled by this filter. If not provided the filter will be used for options matching the filter name.
#
# If a Symbol is passed for the name the block should accept | node, option_value |, while if a Regexp
# is passed for the name the block should accept | node, option_name, option_value |. In either case
# the block should return `true` if the node passes the filer or `false` if it doesn't
##
#
# Define an expression filter for use with this selector
#
# @!method expression_filter(name, *types, matcher: nil, **options, &block)
# @param [Symbol, Regexp] name The filter name
# @param [Regexp] matcher (nil) A Regexp used to check whether a specific option is handled by this filter
# @param [Array<Symbol>] types The types of the filter - currently valid types are [:boolean]
# @param [Hash] options ({}) Options of the filter
# @option options [Array<>] :valid_values Valid values for this filter
# @option options :default The default value of the filter (if any)
# @option options :skip_if Value of the filter that will cause it to be skipped
# @option options [Regexp] :matcher (nil) A Regexp used to check whether a specific option is handled by this filter. If not provided the filter will be used for options matching the filter name.
#
# If a Symbol is passed for the name the block should accept | current_expression, option_value |, while if a Regexp
# is passed for the name the block should accept | current_expression, option_name, option_value |. In either case
# the block should return the modified expression
def_delegators :@filter_set, :node_filter, :expression_filter, :filter
def locator_filter(*types, **options, &block)
types.each { |type| options[type] = true }
@locator_filter = Capybara::Selector::Filters::LocatorFilter.new(block, **options) if block
@locator_filter
end
def filter_set(name, filters_to_use = nil)
@filter_set.import(name, filters_to_use)
end
def_delegator :@filter_set, :describe
def describe_expression_filters(&block)
if block_given?
describe(:expression_filters, &block)
else
describe(:expression_filters) do |**options|
describe_all_expression_filters(**options)
end
end
end
def describe_all_expression_filters(**opts)
expression_filters.map do |ef_name, ef|
if ef.matcher?
handled_custom_keys(ef, opts.keys).map { |key| " with #{ef_name}[#{key} => #{opts[key]}]" }.join
elsif opts.key?(ef_name)
" with #{ef_name} #{opts[ef_name]}"
end
end.join
end
def describe_node_filters(&block)
describe(:node_filters, &block)
end
##
#
# Set the default visibility mode that shouble be used if no visibile option is passed when using the selector.
# If not specified will default to the behavior indicated by Capybara.ignore_hidden_elements
#
# @param [Symbol] default_visibility Only find elements with the specified visibility:
# * :all - finds visible and invisible elements.
# * :hidden - only finds invisible elements.
# * :visible - only finds visible elements.
def visible(default_visibility = nil, &block)
@default_visibility = block || default_visibility
end
def default_visibility(fallback = Capybara.ignore_hidden_elements, options = {})
vis = if @default_visibility&.respond_to?(:call)
@default_visibility.call(options)
else
@default_visibility
end
vis.nil? ? fallback : vis
end
# @api private
def raw_locator?
!!@raw_locator
end
# @api private
def supports_exact?
@supports_exact
end
def default_format
return nil if @expressions.keys.empty?
if @expressions.size == 1
@expressions.keys.first
else
:xpath
end
end
# @api private
def locator_types
return nil unless @locator_type
Array(@locator_type)
end
private
def handled_custom_keys(filter, keys)
keys.select do |key|
filter.handles_option?(key) && !::Capybara::Queries::SelectorQuery::VALID_KEYS.include?(key)
end
end
def parameter_names(block)
block.parameters.select { |(type, _name)| %i[key keyreq].include? type }.map { |(_type, name)| name }
end
def expression(type, allowed_filters, &block)
if block
@expressions[type] = block
allowed_filters = parameter_names(block) if allowed_filters.empty?
allowed_filters.flatten.each do |ef|
expression_filters[ef] = Capybara::Selector::Filters::IdentityExpressionFilter.new(ef)
end
end
@expressions[type]
end
end
end
end
| 39.111913 | 204 | 0.61187 |
4acf2ec62f743f97049d60666f25a79c42565e26
| 4,902 |
require "#{File.expand_path(File.dirname(__FILE__))}/../../lib/rubycom.rb"
# A command module used for testing
#
#This module contains most of the test case input methods.
module UtilTestModule
# A test non-command method
def non_command
puts 'fail'
end
# A basic test command
def self.test_command
puts 'command test'
end
def self.test_command_no_docs
puts 'command test'
end
# A test_command with one arg
#
# @param [String] test_arg a test argument
def self.test_command_with_arg(test_arg)
"test_arg=#{test_arg}"
end
# A test_command with an arg named arg
#
# @param [String] arg a test argument whose parameter name is arg
def self.test_command_arg_named_arg(arg)
"arg=#{arg}"
end
# A test_command with two args
# @param [String] test_arg a test argument
# @param [String] another_test_arg another test argument
def self.test_command_with_args(test_arg, another_test_arg)
puts "test_arg=#{test_arg},another_test_arg=#{another_test_arg}"
end
# A test_command with an optional argument
# @param [String] test_arg a test argument
# @param [String] test_option an optional test argument
def self.test_command_with_options(test_arg, test_option='option_default')
puts "test_arg=#{test_arg},test_option=#{test_option}"
end
# A test_command with all optional arguments
# @param [String] test_arg an optional test argument
# @param [String] test_option another optional test argument
def self.test_command_all_options(test_arg='test_arg_default', test_option='test_option_default')
puts "Output is test_arg=#{test_arg},test_option=#{test_option}"
end
# A test_command with a nil optional argument
# @param [String] test_arg a test argument
# @param [String] test_option an optional test argument with a nil default value
# @return [String] a message including the value and class of each parameter
def self.test_command_nil_option(test_arg, test_option=nil)
"test_arg=#{test_arg}, test_arg.class=#{test_arg.class}, test_option=#{test_option}, test_option.class=#{test_option.class}"
end
# A test_command with an options array
# @param [String] test_option an optional test argument
# @param [Array] test_options an optional array of arguments
def self.test_command_options_arr (
test_option='test_option_default',
*test_options
)
puts "Output is test_option=#{test_option},test_option_arr=#{test_options}"
end
# A test_command with a return argument
#
# @param [String] test_arg a test argument
# @param [Integer] test_option_int an optional test argument which happens to be an Integer
# @return [Array] an array including both params if test_option_int != 1
# @return [String] the first param if test_option_int == 1
def self.test_command_with_return(test_arg, test_option_int=1)
ret = [test_arg, test_option_int]
if test_option_int == 1
ret = test_arg
end
ret
end
# A test_command with a Timestamp argument and an unnecessarily long description which should overflow when
# it tries to line up with other descriptions.
#
# some more stuff
#
# @param [Timestamp] test_time a test Timestamp argument
# @return [Hash] a hash including the given argument
def self.test_command_arg_timestamp(test_time)
{:test_time => test_time}
end
# A test_command with a Boolean argument
# @param [Boolean] test_flag a test Boolean argument
# @return [Boolean] the flag passed in
def self.test_command_arg_false(test_flag=false)
test_flag
end
# A test_command with an array argument
#
# @param [Array] test_arr an Array test argument
def self.test_command_arg_arr(test_arr=[])
test_arr
end
# A test_command with an Hash argument
# @param [Hash] test_hash a Hash test argument
def self.test_command_arg_hash(test_hash={})
test_hash
end
# A test_command with several mixed options
#
# @param [String] test_arg
# @param [Array] test_arr
# @param [String] test_opt
# @param [Fixnum] test_opt
def self.test_command_mixed_options(test_arg, test_arr=[], test_opt='test_opt_arg', test_hsh={}, test_bool=true, *test_rest)
"test_arg=#{test_arg} test_arr=#{test_arr} test_opt=#{test_opt} test_hsh=#{test_hsh} test_bool=#{test_bool} test_rest=#{test_rest}"
end
# A test_command with several mixed options with varying names
#
# @param [Object] arg_test anything
# @param [Array] arr an array of things
# @param [String] opt an optional string
# @param [Hash] hsh a hash representing some test keys and values
# @param [TrueClass|FalseClass] bool a true or false
# @param [Array] rest_test everything else
def self.test_command_mixed_names(arg_test, arr=[], opt='test_opt_arg', hsh={}, bool=true, *rest_test)
"arg_test=#{arg_test} arr=#{arr} opt=#{opt} hsh=#{hsh} bool=#{bool} rest_test=#{rest_test}"
end
include Rubycom
end
| 34.27972 | 135 | 0.731946 |
01008bf5189de78de665c33791d635948e3f2a2c
| 10,210 |
# frozen_string_literal: true
module DwcaHunter
# Wikispecies source
class ResourceWikispecies < DwcaHunter::Resource
def initialize(opts = { download: true, unpack: true })
@wikisp_path = File.join(Dir.tmpdir, "dwca_hunter", "wikispecies")
@problems_file = File.open(File.join(Dir.tmpdir, "problems.txt"), "w:utf-8")
@command = "wikispecies"
@title = "Wikispecies"
@url = "http://dumps.wikimedia.org/specieswiki/latest/" \
"specieswiki-latest-pages-articles.xml.bz2"
@url = opts[:url] if opts[:url]
@uuid = "68923690-0727-473c-b7c5-2ae9e601e3fd"
@download_path = File.join(@wikisp_path, "data.xml.bz2")
@data = []
@templates = {}
@taxon_ids = {}
@tree = {}
@paths = {}
@extensions = []
@parser = Biodiversity::Parser
@re = {
page_start: /^\s*<page>\s*$/,
page_end: %r{^\s*</page>\s*$},
template: /Template:/i,
template_link: /\{\{([^}]*)\}\}/,
vernacular_names: /\{\{\s*VN\s*\|([^}]+)\}\}/i
}
super(opts)
end
def download
puts "Downloading from the source"
`curl -L #{@url} -o #{@download_path}`
end
def unpack
unpack_bz2
end
def make_dwca
enrich_data
generate_dwca
end
private
def enrich_data
DwcaHunter.logger_write(object_id,
"Extracting data from xml file...")
Dir.chdir(@download_dir)
f = open("data.xml", "r:utf-8")
page_on = false
page = ""
page_num = 0
f.each do |l|
if l.match(@re[:page_start])
page += l
page_on = true
elsif page_on
page += l
if l.match(@re[:page_end])
page_on = false
page_xml = Nokogiri::XML.parse(page)
if template?(page_xml)
process_template(page_xml)
else
process_species(page_xml)
end
page_num += 1
if (page_num % BATCH_SIZE).zero?
DwcaHunter.logger_write(object_id,
"Traversed #{page_num} pages")
end
page = ""
@page_title = nil
@page_id = nil
end
end
end
DwcaHunter.logger_write(object_id,
"Extracted total %s pages" % page_num)
f.close
end
def process_template(x)
name = page_title(x).gsub!(@re[:template], "").strip
text = x.xpath("//text").text.strip
parent_name = text.match(@re[:template_link])
if parent_name
return if parent_name[1].match(/\#if/)
list = parent_name[1].split("|")
parent_name = if list.size == 1
list[0]
elsif list[0].match(/Taxonav/i)
list[1]
else
list[0]
end
end
name.gsub!(/_/, " ")
parent_name&.gsub!(/_/, " ")
@templates[name] = { parentName: parent_name, id: page_id(x) }
end
def process_species(x)
return if page_title(x).match(/Wikispecies/i)
items = find_species_components(x)
if items
@data << {
taxonId: page_id(x),
canonicalForm: page_title(x),
scientificName: page_title(x),
classificationPath: [],
vernacularNames: []
}
get_full_scientific_name(items)
get_vernacular_names(items)
end
end
def get_full_scientific_name(items)
name_ary = items["{{int:name}}"]
if name_ary.nil? || name_ary.empty?
@problems_file.write("%s\n" % @data[-1][:canonicalForm])
return
end
name = name_ary[0]
name = parse_name(name, @data[-1])
if name != ""
@data[-1][:scientificName] = name
end
end
def get_vernacular_names(items)
vern = items["{{int:vernacular names}}"]
if vern.is_a?(Array) && vern.size.positive?
vn_string = vern.join("")
vn = vn_string.match(@re[:vernacular_names])
if vn
vn_list = vn[1].strip.split("|")
vnames = []
vn_list.each do |item|
language, name = item.split("=").map(&:strip)
next unless language && name && language.size < 4 && name.valid_encoding?
vnames << {
name: name,
language: language
}
end
@data[-1][:vernacularNames] = vnames
end
end
end
def init_classification_path(items)
# ignore non-template links
items["taxonavigation"]&.each do |line|
line.gsub!(/\[\[.*\]\]/, "") # ignore non-template links
next unless template_link = line.match(@re[:template_link])
template_link = template_link[1].
strip.gsub(/Template:/, "").gsub(/_/, " ")
unless template_link.match(/\|/)
@data[-1][:classificationPath] << template_link
break
end
end
end
def find_species_components(x)
items = get_items(x.xpath("//text").text)
is_taxon_item = items.key?("{{int:name}}") &&
items.key?("{{int:taxonavigation}}")
return nil unless is_taxon_item
items
end
def get_items(txt)
item_on = false
items = {}
current_item = nil
txt.split("\n").each do |l|
item = l.match(/=+([^=]+)=+/)
if item
current_item = item[1].strip.downcase
items[current_item] = []
elsif current_item && !l.empty?
items[current_item] << l
end
end
items
end
def page_title(x)
@page_title ||= x.xpath("//title").first.text
end
def page_id(x)
@page_id ||= x.xpath("//id").first.text
end
def template?(page_xml)
!!page_title(page_xml).match(@re[:template])
end
def parse_name(name_string, taxa)
name_string.gsub!("BASEPAGENAME", taxa[:canonicalForm])
name_string = name_string.strip
old_l = name_string.dup
name_string.gsub!(/^\*\s*/, "")
name_string.gsub!(/\[\[([^\]]+\|)?([^\]]*)\]\]/, '\2')
name_string.gsub!(/\{\{([^}]+\|)?([^}]*)\}\}/, '\2')
name_string.gsub!(/'{2,}/, " ")
name_string.gsub!(/"{2,}/, " ")
name_string.gsub!(/:\s*\d.*$/, "")
name_string.gsub!(/,\s*\[RSD\]/i, "")
name_string.gsub!(/^\s*†\s*/, "")
name_string.gsub!(/(:\s*)?\[http:[^\]]+\]/, "")
# name_string = DwcaHunter::XML.unescape(name_string)
name_string.gsub!(/<nowiki>.*$/, "")
name_string.gsub!(%r{<br\s*/?\s*>}, "")
name_string.gsub!(/^\s*†\s*/, "")
name_string.gsub!(/ /, " ")
name_string.gsub!(/\s+/, " ")
res = name_string.strip
parsed = @parser.parse(res, simple: true)
if !["1","2"].include?(parsed[:quality])
return ""
end
res
end
def generate_dwca
DwcaHunter.logger_write(object_id,
"Creating DarwinCore Archive file")
@core = [
["http://rs.tdwg.org/dwc/terms/taxonID",
"http://rs.tdwg.org/dwc/terms/scientificName",
"http://globalnames.org/terms/canonicalForm",
"http://purl.org/dc/terms/source"]
]
DwcaHunter.logger_write(object_id, "Assembling Core Data")
count = 0
@data.map do |d|
count += 1
if (count % BATCH_SIZE).zero?
DwcaHunter.logger_write(object_id,
"Traversing %s core data record" % count)
end
taxon_id = begin
(if d[:classificationPath].empty?
d[:taxonId]
else
@templates[d[:classificationPath].
last][:id]
end)
rescue StandardError
d[:taxonId]
end
@taxon_ids[d[:taxonId]] = taxon_id
parentNameUsageId = begin
(@templates[d[:classificationPath][-2]][:id] if d[:classificationPath].size > 1)
rescue StandardError
nil
end
url = "http://species.wikimedia.org/wiki/#{CGI.escape(d[:canonicalForm].gsub(' ', '_'))}"
path = d[:classificationPath]
path.pop if path[-1] == d[:canonicalForm]
canonical_form = d[:canonicalForm].gsub(/\(.*\)\s*$/, "").strip
scientific_name = if d[:scientificName] == d[:canonicalForm]
canonical_form
else
d[:scientificName]
end
@core << [taxon_id,
scientific_name,
canonical_form,
url]
end
@extensions << { data: [[
"http://rs.tdwg.org/dwc/terms/TaxonID",
"http://rs.tdwg.org/dwc/terms/vernacularName",
"http://purl.org/dc/terms/language"
]], file_name: "vernacular_names.txt" }
DwcaHunter.logger_write(object_id,
"Creating verncaular name extension for DarwinCore Archive file")
count = 0
@data.each do |d|
count += 1
if (count % BATCH_SIZE).zero?
DwcaHunter.logger_write(object_id,
"Traversing %s extension data record" % count)
end
d[:vernacularNames].each do |vn|
taxon_id = @taxon_ids[d[:taxonId]] || nil
@extensions[-1][:data] << [taxon_id, vn[:name], vn[:language]] if taxon_id
end
end
@eml = {
id: @uuid,
title: @title,
license: "http://creativecommons.org/licenses/by-sa/3.0/",
authors: [
{ first_name: "Stephen",
last_name: "Thorpe",
email: "[email protected]",
url: "http://species.wikimedia.org/wiki/Main_Page" }
],
abstract: "The free species directory that anyone can edit.",
metadata_providers: [
{ first_name: "Dmitry",
last_name: "Mozzherin",
email: "[email protected]" }
],
url: "http://species.wikimedia.org/wiki/Main_Page"
}
super
end
end
end
| 30.939394 | 97 | 0.515965 |
1a2cb9c86d57fe610475834067830df45c8789fd
| 48,495 |
require 'openssl'
require 'webrick/httputils'
module Mixins
module EmsCommonAngular
extend ActiveSupport::Concern
OPENSTACK_PARAMS = %i(name provider_region api_version default_security_protocol keystone_v3_domain_id default_hostname default_api_port default_userid event_stream_selection).freeze
OPENSTACK_AMQP_PARAMS = %i(name provider_region api_version amqp_security_protocol keystone_v3_domain_id amqp_hostname amqp_api_port amqp_userid event_stream_selection).freeze
included do
include Mixins::GenericFormMixin
end
def update
assert_privileges("#{permission_prefix}_edit")
case params[:button]
when "cancel" then update_ems_button_cancel
when "save" then update_ems_button_save
when "validate" then update_ems_button_validate
end
end
def update_ems_button_cancel
update_ems = find_record_with_rbac(model, params[:id])
flash_to_session(_("Edit of %{model} \"%{name}\" was cancelled by the user") %
{:model => ui_lookup(:model => model.to_s), :name => update_ems.name})
url_args = {
:action => @lastaction == 'show_dashboard' ? 'show' : @lastaction,
:id => update_ems.id,
:display => session[:ems_display],
:record => update_ems
}
begin
javascript_redirect(javascript_process_redirect_args(url_args))
rescue ActionController::UrlGenerationError
# if the target URL does not exist, redirect to 'show'
url_args[:action] = 'show'
javascript_redirect(javascript_process_redirect_args(url_args))
end
end
def update_ems_button_save
update_ems = find_record_with_rbac(model, params[:id])
set_ems_record_vars(update_ems)
if update_ems.save
update_ems.reload
add_flash(
_("%{model} \"%{name}\" was saved") % {:model => ui_lookup(:model => model.to_s), :name => update_ems.name}
)
construct_edit_for_audit(update_ems)
AuditEvent.success(build_saved_audit(update_ems, @edit))
update_ems.authentication_check_types_queue(update_ems.authentication_for_summary.pluck(:authtype),
:save => true)
flash_to_session
javascript_redirect(@lastaction == 'show_list' ? ems_path('show_list') : ems_path(update_ems))
else
update_ems.errors.each do |field, msg|
add_flash("#{field.to_s.capitalize} #{msg}", :error)
end
drop_breadcrumb(:name => _("Edit %{table} '%{name}'") %
{:table => ui_lookup(:table => table_name), :name => update_ems.name},
:url => "/#{table_name}/edit/#{update_ems.id}")
@in_a_form = true
render_flash
end
end
def update_ems_button_validate
result, details = realtime_authentication_check
render_validation_result(result, details)
end
def realtime_authentication_check(verify_ems = nil)
verify_ems ||= find_record_with_rbac(model, params[:id])
set_ems_record_vars(verify_ems, :validate)
@in_a_form = true
verify_ems.authentication_check(params[:cred_type], :save => false, :database => params[:metrics_database_name])
end
def realtime_raw_connect(ems_type)
ems_type.raw_connect(*get_task_args(ems_type))
true
rescue => err
[false, err.message]
end
def create_ems_button_validate
@in_a_form = true
ems_type = model.model_from_emstype(params[:emstype])
result, details = if %w(ems_cloud ems_infra).include?(params[:controller]) && session[:selected_roles].try(:include?, 'user_interface')
realtime_raw_connect(ems_type)
elsif %w(ems_cloud ems_infra).include?(params[:controller])
ems_type.validate_credentials_task(get_task_args(ems_type), session[:userid], params[:zone])
else
realtime_authentication_check(ems_type.new)
end
render_validation_result(result, details)
end
def render_validation_result(result, details)
if result
msg = _("Credential validation was successful")
else
msg = _("Credential validation was not successful: %{details}") % {:details => strip_tags(details)}
level = :error
end
render_flash_json(msg, level, :long_alert => true)
end
def create
assert_privileges("#{permission_prefix}_new")
case params[:button]
when "add" then create_ems_button_add
when "validate" then create_ems_button_validate
when "cancel" then create_ems_button_cancel
end
end
def get_task_args(ems)
user, password = params[:default_userid], MiqPassword.encrypt(params[:default_password])
case ems.to_s
when 'ManageIQ::Providers::Openstack::CloudManager', 'ManageIQ::Providers::Openstack::InfraManager'
case params[:cred_type]
when 'default'
[password, params.to_hash.symbolize_keys.slice(*OPENSTACK_PARAMS)]
when 'amqp'
[MiqPassword.encrypt(params[:amqp_password]), params.to_hash.symbolize_keys.slice(*OPENSTACK_AMQP_PARAMS)]
end
when 'ManageIQ::Providers::Amazon::CloudManager'
uri = URI.parse(WEBrick::HTTPUtils.escape(params[:default_url]))
[user, password, :EC2, params[:provider_region], ems.http_proxy_uri, true, uri]
when 'ManageIQ::Providers::Azure::CloudManager'
uri = URI.parse(WEBrick::HTTPUtils.escape(params[:default_url]))
[user, password, params[:azure_tenant_id], params[:subscription], ems.http_proxy_uri, params[:provider_region], uri]
when 'ManageIQ::Providers::Vmware::CloudManager'
case params[:cred_type]
when 'amqp'
[params[:amqp_hostname], params[:amqp_api_port], params[:amqp_userid], MiqPassword.encrypt(params[:amqp_password]), params[:api_version], true]
when 'default'
[params[:default_hostname], params[:default_api_port], user, password, params[:api_version], true]
end
when 'ManageIQ::Providers::Google::CloudManager'
[params[:project], MiqPassword.encrypt(params[:service_account]), {:service => "compute"}, ems.http_proxy_uri, true]
when 'ManageIQ::Providers::Microsoft::InfraManager'
connect_opts = {
:hostname => params[:default_hostname],
:user => user,
:password => password,
:port => params[:default_api_port],
:realm => params[:realm],
:security_protocol => params[:default_security_protocol],
}
[ems.build_connect_params(connect_opts), true]
when 'ManageIQ::Providers::Redhat::InfraManager'
metrics_user, metrics_password = params[:metrics_userid], MiqPassword.encrypt(params[:metrics_password])
[{
:username => user,
:password => password,
:server => params[:default_hostname],
:port => params[:default_api_port],
:verify_ssl => params[:default_tls_verify] == 'on' ? OpenSSL::SSL::VERIFY_PEER : OpenSSL::SSL::VERIFY_NONE,
:ca_certs => params[:default_tls_ca_certs],
:metrics_username => metrics_user,
:metrics_password => metrics_password,
:metrics_server => params[:metrics_hostname],
:metrics_port => params[:metrics_api_port],
:metrics_database => params[:metrics_database_name],
}]
when 'ManageIQ::Providers::Kubevirt::InfraManager'
[{
:password => params[:kubevirt_password],
:server => params[:kubevirt_hostname],
:port => params[:kubevirt_api_port],
:verify_ssl => params[:kubevirt_tls_verify] == 'on' ? OpenSSL::SSL::VERIFY_PEER : OpenSSL::SSL::VERIFY_NONE,
:ca_certs => params[:kubevirt_tls_ca_certs],
}]
when 'ManageIQ::Providers::Vmware::InfraManager'
case params[:cred_type]
when 'console'
[{:pass => MiqPassword.encrypt(params[:console_password]), :user => params[:console_userid], :ip => params[:default_hostname], :use_broker => false}]
when 'default'
[{:pass => password, :user => user, :ip => params[:default_hostname], :use_broker => false}]
end
when 'ManageIQ::Providers::Nuage::NetworkManager'
endpoint_opts = {:protocol => params[:default_security_protocol], :hostname => params[:default_hostname], :api_port => params[:default_api_port], :api_version => params[:api_version]}
[user, params[:default_password], endpoint_opts]
when 'ManageIQ::Providers::Lenovo::PhysicalInfraManager'
[user, password, params[:default_hostname], params[:default_api_port], "token", false, true]
when 'ManageIQ::Providers::Redfish::PhysicalInfraManager'
[user, password, params[:default_hostname], params[:default_api_port],
params[:default_security_protocol]]
end
end
def create_ems_button_add
ems = model.model_from_emstype(params[:emstype]).new
set_ems_record_vars(ems) unless @flash_array
if ems.valid? && ems.save
construct_edit_for_audit(ems)
AuditEvent.success(build_created_audit(ems, @edit))
flash_to_session(_("%{model} \"%{name}\" was saved") % {:model => ui_lookup(:tables => table_name), :name => ems.name})
javascript_redirect(:action => 'show_list')
else
@in_a_form = true
ems.errors.each do |field, msg|
add_flash("#{ems.class.human_attribute_name(field, :ui => true)} #{msg}", :error)
end
drop_breadcrumb(:name => _("Add New %{tables}") % {:tables => ui_lookup(:tables => table_name)},
:url => new_ems_path)
javascript_flash
end
end
def create_ems_button_cancel
model_name = model.to_s
flash_to_session(_("Add of %{model} was cancelled by the user") % {:model => ui_lookup(:model => model_name)})
javascript_redirect(:action => @lastaction,
:display => session[:ems_display])
end
def ems_form_fields
assert_privileges("#{permission_prefix}_edit")
@ems = params[:id] == 'new' ? model.new : find_record_with_rbac(model, params[:id])
default_endpoint = @ems.default_endpoint
default_security_protocol = default_endpoint.security_protocol || security_protocol_default
default_tls_verify = default_endpoint.verify_ssl != 0
default_tls_ca_certs = default_endpoint.certificate_authority || ""
amqp_userid = ""
amqp_hostname = ""
amqp_port = ""
amqp_security_protocol = ""
console_userid = ""
smartstate_docker_userid = ""
ssh_keypair_userid = ""
metrics_userid = ""
metrics_hostname = ""
metrics_port = ""
metrics_database_name = ""
metrics_security_protocol = security_protocol_default
metrics_tls_ca_certs = ""
keystone_v3_domain_id = ""
prometheus_alerts_hostname = ""
prometheus_alerts_api_port = ""
prometheus_alerts_security_protocol = security_protocol_default
prometheus_alerts_tls_ca_certs = ""
kubevirt_hostname = ""
kubevirt_api_port = ""
kubevirt_security_protocol = default_security_protocol
kubevirt_tls_ca_certs = ""
kubevirt_password = ""
kubevirt_tls_verify = false
provider_options = @ems.options || {}
if @ems.connection_configurations.amqp.try(:endpoint)
amqp_hostname = @ems.connection_configurations.amqp.endpoint.hostname
amqp_port = @ems.connection_configurations.amqp.endpoint.port
amqp_security_protocol = @ems.connection_configurations.amqp.endpoint.security_protocol || 'ssl'
end
if @ems.has_authentication_type?(:amqp)
amqp_userid = @ems.authentication_userid(:amqp).to_s
amqp_auth_status = @ems.authentication_status_ok?(:amqp)
end
if @ems.has_authentication_type?(:console)
console_userid = @ems.authentication_userid(:console).to_s
console_auth_status = true
end
if @ems.has_authentication_type?(:smartstate_docker)
smartstate_docker_userid = @ems.authentication_userid(:smartstate_docker).to_s
end
if @ems.has_authentication_type?(:ssh_keypair)
ssh_keypair_userid = @ems.authentication_userid(:ssh_keypair).to_s
ssh_keypair_auth_status = @ems.authentication_status_ok?(:ssh_keypair)
end
if @ems.connection_configurations.metrics.try(:endpoint)
metrics_hostname = @ems.connection_configurations.metrics.endpoint.hostname
metrics_port = @ems.connection_configurations.metrics.endpoint.port
metrics_database_name = @ems.connection_configurations.metrics.endpoint.path
end
if @ems.has_authentication_type?(:metrics)
metrics_userid = @ems.authentication_userid(:metrics).to_s
metrics_auth_status = @ems.authentication_status_ok?(:metrics)
end
if @ems.respond_to?(:keystone_v3_domain_id)
keystone_v3_domain_id = @ems.keystone_v3_domain_id
end
if respond_to?(:retrieve_metrics_selection)
metrics_selection = retrieve_metrics_selection.to_sym
connection_configurations_metrics_endpoint = @ems.connection_configurations.try(metrics_selection).try(:endpoint)
else
connection_configurations_metrics_endpoint = nil
end
if connection_configurations_metrics_endpoint
metrics_hostname = connection_configurations_metrics_endpoint.hostname
metrics_port = connection_configurations_metrics_endpoint.port
metrics_auth_status = @ems.authentication_status_ok?(metrics_selection)
metrics_security_protocol = connection_configurations_metrics_endpoint.security_protocol
metrics_security_protocol ||= security_protocol_default
metrics_tls_ca_certs = connection_configurations_metrics_endpoint.certificate_authority
end
if @ems.connection_configurations.prometheus_alerts.try(:endpoint)
prometheus_alerts_hostname = @ems.connection_configurations.prometheus_alerts.endpoint.hostname
prometheus_alerts_api_port = @ems.connection_configurations.prometheus_alerts.endpoint.port
prometheus_alerts_auth_status = @ems.authentication_status_ok?(:alerts)
prometheus_alerts_security_protocol = @ems.connection_configurations.prometheus_alerts.endpoint.security_protocol
prometheus_alerts_security_protocol ||= security_protocol_default
prometheus_alerts_tls_ca_certs = @ems.connection_configurations.prometheus_alerts.endpoint.certificate_authority
end
if @ems.connection_configurations.kubevirt.try(:endpoint)
kubevirt_hostname = @ems.connection_configurations.kubevirt.endpoint.hostname
kubevirt_api_port = @ems.connection_configurations.kubevirt.endpoint.port
kubevirt_auth_status = @ems.authentication_status_ok?(:kubevirt)
kubevirt_security_protocol = @ems.connection_configurations.kubevirt.endpoint.security_protocol
kubevirt_security_protocol ||= default_security_protocol
kubevirt_tls_ca_certs = @ems.connection_configurations.kubevirt.endpoint.certificate_authority
kubevirt_tls_verify = @ems.connection_configurations.kubevirt.endpoint.verify_ssl
end
if @ems.connection_configurations.default.try(:endpoint)
default_hostname = @ems.connection_configurations.default.endpoint.hostname
default_api_port = @ems.connection_configurations.default.endpoint.port
else
default_hostname = @ems.hostname
default_api_port = @ems.port
end
@ems_types = Array(model.supported_types_and_descriptions_hash.invert).sort_by(&:first)
if @ems.kind_of?(ManageIQ::Providers::Vmware::InfraManager)
host_default_vnc_port_start = @ems.host_default_vnc_port_start.to_s
host_default_vnc_port_end = @ems.host_default_vnc_port_end.to_s
end
if @ems.kind_of?(ManageIQ::Providers::Azure::CloudManager)
azure_tenant_id = @ems.azure_tenant_id
subscription = @ems.subscription
client_id = @ems.authentication_userid.to_s
client_key = @ems.authentication_password.to_s
end
if @ems.kind_of?(ManageIQ::Providers::Google::CloudManager)
project = @ems.project
service_account = @ems.authentication_token
service_account_auth_status = @ems.authentication_status_ok?
end
if @ems.kind_of?(ManageIQ::Providers::Nuage::NetworkManager)
amqp_fallback_hostname1 = @ems.connection_configurations.amqp_fallback1 ? @ems.connection_configurations.amqp_fallback1.endpoint.hostname : ""
amqp_fallback_hostname2 = @ems.connection_configurations.amqp_fallback2 ? @ems.connection_configurations.amqp_fallback2.endpoint.hostname : ""
end
if %w(ems_cloud ems_network).include?(controller_name)
render :json => {:name => @ems.name,
:emstype => @ems.emstype,
:zone => zone,
:tenant_mapping_enabled => @ems.tenant_mapping_enabled == true,
:provider_id => @ems.provider_id || "",
:hostname => @ems.hostname,
:default_hostname => default_hostname,
:amqp_hostname => amqp_hostname,
:default_api_port => default_api_port || "",
:amqp_api_port => amqp_port || "",
:api_version => @ems.api_version || "v2",
:default_security_protocol => default_security_protocol,
:amqp_security_protocol => amqp_security_protocol,
:provider_region => @ems.provider_region,
:openstack_infra_providers_exist => retrieve_openstack_infra_providers.length.positive?,
:default_userid => @ems.authentication_userid.to_s,
:amqp_userid => amqp_userid,
:ssh_keypair_userid => ssh_keypair_userid,
:smartstate_docker_userid => smartstate_docker_userid,
:service_account => service_account.to_s,
:azure_tenant_id => azure_tenant_id.to_s,
:keystone_v3_domain_id => keystone_v3_domain_id,
:subscription => subscription.to_s,
:client_id => client_id.to_s,
:client_key => client_key.to_s,
:project => project.to_s,
:emstype_vm => @ems.kind_of?(ManageIQ::Providers::Vmware::InfraManager),
:event_stream_selection => retrieve_event_stream_selection,
:ems_controller => controller_name,
:default_auth_status => default_auth_status,
:amqp_auth_status => amqp_auth_status,
:ssh_keypair_auth_status => ssh_keypair_auth_status.nil? ? true : ssh_keypair_auth_status,
:service_account_auth_status => service_account_auth_status,
:amqp_fallback_hostname1 => amqp_fallback_hostname1 ? amqp_fallback_hostname1 : "",
:amqp_fallback_hostname2 => amqp_fallback_hostname2 ? amqp_fallback_hostname2 : "",
:default_url => @ems.endpoints.first.url}
end
if controller_name == "ems_infra"
render :json => { :name => @ems.name,
:emstype => @ems.emstype,
:zone => zone,
:provider_id => @ems.provider_id || "",
:default_hostname => @ems.connection_configurations.default.endpoint.hostname,
:amqp_hostname => amqp_hostname,
:console_userid => console_userid,
:metrics_hostname => metrics_hostname,
:metrics_database_name => metrics_database_name,
:metrics_default_database_name => metrics_default_database_name,
:default_api_port => default_api_port || "",
:amqp_api_port => amqp_port || "",
:metrics_api_port => metrics_port || "",
:default_security_protocol => default_security_protocol,
:amqp_security_protocol => amqp_security_protocol,
:default_tls_verify => default_tls_verify,
:default_tls_ca_certs => default_tls_ca_certs,
:api_version => @ems.api_version || "v2",
:provider_region => @ems.provider_region,
:default_userid => @ems.authentication_userid.to_s,
:amqp_userid => amqp_userid,
:ssh_keypair_userid => ssh_keypair_userid,
:metrics_userid => metrics_userid,
:keystone_v3_domain_id => keystone_v3_domain_id,
:emstype_vm => @ems.kind_of?(ManageIQ::Providers::Vmware::InfraManager),
:host_default_vnc_port_start => host_default_vnc_port_start || "",
:host_default_vnc_port_end => host_default_vnc_port_end || "",
:event_stream_selection => retrieve_event_stream_selection,
:ems_controller => controller_name,
:default_auth_status => default_auth_status,
:console_auth_status => console_auth_status,
:metrics_auth_status => metrics_auth_status.nil? ? true : metrics_auth_status,
:ssh_keypair_auth_status => ssh_keypair_auth_status.nil? ? true : ssh_keypair_auth_status,
:non_default_current_tab => @ems.emstype == "kubevirt" ? "kubevirt" : nil,
:kubevirt_api_port => kubevirt_api_port,
:kubevirt_hostname => kubevirt_hostname,
:kubevirt_security_protocol => kubevirt_security_protocol,
:kubevirt_tls_verify => kubevirt_tls_verify,
:kubevirt_tls_ca_certs => kubevirt_tls_ca_certs,
:kubevirt_auth_status => kubevirt_auth_status,
:kubevirt_password => kubevirt_password,
:kubevirt_password_exists => [email protected]_token(:kubevirt).nil?}
end
if controller_name == "ems_container"
render :json => {:name => @ems.name,
:emstype => @ems.emstype,
:zone => zone,
:hostname => @ems.hostname,
:default_hostname => @ems.connection_configurations.default.endpoint.hostname,
:default_api_port => @ems.connection_configurations.default.endpoint.port,
:metrics_selection => retrieve_metrics_selection,
:metrics_selection_default => @ems.emstype == 'kubernetes' ? 'disabled' : 'enabled',
:metrics_hostname => metrics_hostname,
:metrics_api_port => metrics_port,
:metrics_tls_ca_certs => metrics_tls_ca_certs,
:metrics_security_protocol => metrics_security_protocol,
:api_version => @ems.api_version || "v2",
:default_security_protocol => default_security_protocol,
:default_tls_ca_certs => default_tls_ca_certs,
:provider_region => @ems.provider_region,
:default_userid => @ems.authentication_userid.to_s,
:service_account => service_account.to_s,
:bearer_token_exists => [email protected]_token(:bearer).nil?,
:ems_controller => controller_name,
:default_auth_status => default_auth_status,
:metrics_auth_status => metrics_auth_status.nil? ? false : metrics_auth_status,
:prometheus_alerts_api_port => prometheus_alerts_api_port,
:prometheus_alerts_hostname => prometheus_alerts_hostname,
:prometheus_alerts_security_protocol => prometheus_alerts_security_protocol,
:prometheus_alerts_tls_ca_certs => prometheus_alerts_tls_ca_certs,
:prometheus_alerts_auth_status => prometheus_alerts_auth_status,
:provider_options => provider_options,
:alerts_selection => retrieve_alerts_selection,
:kubevirt_api_port => kubevirt_api_port,
:kubevirt_hostname => kubevirt_hostname,
:kubevirt_security_protocol => kubevirt_security_protocol,
:kubevirt_tls_verify => kubevirt_tls_verify,
:kubevirt_tls_ca_certs => kubevirt_tls_ca_certs,
:kubevirt_auth_status => kubevirt_auth_status,
:kubevirt_password => kubevirt_password,
:kubevirt_password_exists => [email protected]_token(:kubevirt).nil?,
:virtualization_selection => retrieve_virtualization_selection}
end
if controller_name == "ems_middleware"
render :json => {:name => @ems.name,
:emstype => @ems.emstype,
:zone => zone,
:default_hostname => @ems.connection_configurations.default.endpoint.hostname,
:default_api_port => @ems.connection_configurations.default.endpoint.port,
:default_userid => @ems.authentication_userid.to_s,
:default_security_protocol => default_security_protocol,
:default_tls_ca_certs => default_tls_ca_certs,
:ems_controller => controller_name,
:default_auth_status => default_auth_status}
end
end
private
def metrics_default_database_name
if @ems.class.name == 'ManageIQ::Providers::Redhat::InfraManager'
ManageIQ::Providers::Redhat::InfraManager.default_history_database_name
end
end
def security_protocol_default
case controller_name
when "ems_container" then "ssl-with-validation"
when "ems_middleware" then "non-ssl"
else "ssl"
end
end
def table_name
self.class.table_name
end
def set_ems_record_vars(ems, mode = nil)
ems.name = params[:name].strip if params[:name]
ems.provider_region = params[:provider_region] if params[:provider_region]
ems.api_version = params[:api_version].strip if params[:api_version]
ems.provider_id = params[:provider_id]
ems.zone = Zone.find_by(:name => params[:zone])
ems.tenant_mapping_enabled = params[:tenant_mapping_enabled] == "on" if ems.class.supports_cloud_tenant_mapping?
ems.security_protocol = params[:default_security_protocol].strip if params[:default_security_protocol]
hostname = params[:default_hostname].strip if params[:default_hostname]
port = params[:default_api_port].strip if params[:default_api_port]
amqp_hostname = params[:amqp_hostname].strip if params[:amqp_hostname]
amqp_fallback_hostname1 = params[:amqp_fallback_hostname1].strip if params[:amqp_fallback_hostname1]
amqp_fallback_hostname2 = params[:amqp_fallback_hostname2].strip if params[:amqp_fallback_hostname2]
amqp_port = params[:amqp_api_port].strip if params[:amqp_api_port]
amqp_security_protocol = params[:amqp_security_protocol].strip if params[:amqp_security_protocol]
metrics_hostname = params[:metrics_hostname].strip if params[:metrics_hostname]
metrics_port = params[:metrics_api_port].strip if params[:metrics_api_port]
metrics_database_name = params[:metrics_database_name].strip if params[:metrics_database_name]
metrics_security_protocol = params[:metrics_security_protocol].strip if params[:metrics_security_protocol]
metrics_tls_ca_certs = params[:metrics_tls_ca_certs].strip if params[:metrics_tls_ca_certs]
default_tls_ca_certs = params[:default_tls_ca_certs].strip if params[:default_tls_ca_certs]
prometheus_alerts_tls_ca_certs = params[:prometheus_alerts_tls_ca_certs].strip if params[:prometheus_alerts_tls_ca_certs]
prometheus_alerts_hostname = params[:prometheus_alerts_hostname].strip if params[:prometheus_alerts_hostname]
prometheus_alerts_api_port = params[:prometheus_alerts_api_port].strip if params[:prometheus_alerts_api_port]
prometheus_alerts_security_protocol = params[:prometheus_alerts_security_protocol].strip if params[:prometheus_alerts_security_protocol]
kubevirt_tls_ca_certs = params[:kubevirt_tls_ca_certs].strip if params[:kubevirt_tls_ca_certs]
kubevirt_hostname = params[:kubevirt_hostname].strip if params[:kubevirt_hostname]
kubevirt_api_port = params[:kubevirt_api_port].strip if params[:kubevirt_api_port]
kubevirt_security_protocol = ems.security_protocol
default_endpoint = {}
amqp_endpoint = {}
amqp_fallback_endpoint1 = {}
amqp_fallback_endpoint2 = {}
ceilometer_endpoint = {}
ssh_keypair_endpoint = {}
metrics_endpoint = {}
hawkular_endpoint = {}
prometheus_endpoint = {}
prometheus_alerts_endpoint = {}
kubevirt_endpoint = {}
if ems.kind_of?(ManageIQ::Providers::Openstack::CloudManager) || ems.kind_of?(ManageIQ::Providers::Openstack::InfraManager)
default_endpoint = {:role => :default, :hostname => hostname, :port => port, :security_protocol => ems.security_protocol}
ems.keystone_v3_domain_id = params[:keystone_v3_domain_id]
if params[:event_stream_selection] == "amqp"
amqp_endpoint = {:role => :amqp, :hostname => amqp_hostname, :port => amqp_port, :security_protocol => amqp_security_protocol}
else
ceilometer_endpoint = {:role => :ceilometer}
end
end
if ems.kind_of?(ManageIQ::Providers::Openstack::CloudManager) || ems.kind_of?(ManageIQ::Providers::Openstack::InfraManager) || ems.kind_of?(ManageIQ::Providers::Redhat::InfraManager)
ssh_keypair_endpoint = {:role => :ssh_keypair}
end
if ems.kind_of?(ManageIQ::Providers::Redhat::InfraManager)
default_endpoint = {
:role => :default,
:hostname => hostname,
:port => port,
:security_protocol => ems.security_protocol,
:verify_ssl => params[:default_tls_verify] == 'on' ? OpenSSL::SSL::VERIFY_PEER : OpenSSL::SSL::VERIFY_NONE,
:certificate_authority => params[:default_tls_ca_certs],
}
metrics_endpoint = { :role => :metrics,
:hostname => metrics_hostname,
:port => metrics_port,
:path => metrics_database_name }
end
if ems.kind_of?(ManageIQ::Providers::Kubevirt::InfraManager)
kubevirt_endpoint = {
:role => :kubevirt,
:hostname => kubevirt_hostname,
:port => kubevirt_api_port,
}
kubevirt_endpoint.merge!(endpoint_security_options(kubevirt_security_protocol, kubevirt_tls_ca_certs))
end
if ems.kind_of?(ManageIQ::Providers::Google::CloudManager)
ems.project = params[:project]
end
if ems.kind_of?(ManageIQ::Providers::Microsoft::InfraManager)
default_endpoint = {:role => :default, :hostname => hostname, :security_protocol => ems.security_protocol}
ems.realm = params[:realm]
end
if ems.kind_of?(ManageIQ::Providers::Vmware::InfraManager)
default_endpoint = {:role => :default, :hostname => hostname}
ems.host_default_vnc_port_start = params[:host_default_vnc_port_start].blank? ? nil : params[:host_default_vnc_port_start].to_i
ems.host_default_vnc_port_end = params[:host_default_vnc_port_end].blank? ? nil : params[:host_default_vnc_port_end].to_i
end
if ems.kind_of?(ManageIQ::Providers::Vmware::CloudManager)
default_endpoint = {:role => :default, :hostname => hostname, :port => port}
if params[:event_stream_selection] == "amqp"
amqp_endpoint = {:role => :amqp, :hostname => amqp_hostname, :port => amqp_port, :security_protocol => amqp_security_protocol}
end
end
if ems.kind_of?(ManageIQ::Providers::Azure::CloudManager)
ems.azure_tenant_id = params[:azure_tenant_id]
ems.subscription = params[:subscription] if params[:subscription].present?
uri = URI.parse(WEBrick::HTTPUtils.escape(params[:default_url]))
default_endpoint = {:role => :default, :hostname => uri.host, :port => uri.port, :path => uri.path, :url => params[:default_url]}
end
if ems.kind_of?(ManageIQ::Providers::ContainerManager)
params[:cred_type] = ems.default_authentication_type if params[:cred_type] == "default"
default_endpoint = {:role => :default, :hostname => hostname, :port => port}
default_endpoint.merge!(endpoint_security_options(ems.security_protocol, default_tls_ca_certs))
if params[:metrics_selection] == 'hawkular'
params[:cred_type] = "hawkular" if params[:cred_type] == "metrics"
hawkular_endpoint = {:role => :hawkular, :hostname => metrics_hostname, :port => metrics_port}
hawkular_endpoint.merge!(endpoint_security_options(metrics_security_protocol, metrics_tls_ca_certs))
elsif params[:metrics_selection] == 'prometheus'
params[:cred_type] = "prometheus" if params[:cred_type] == "metrics"
prometheus_endpoint = {:role => :prometheus, :hostname => metrics_hostname, :port => metrics_port}
prometheus_endpoint.merge!(endpoint_security_options(metrics_security_protocol, metrics_tls_ca_certs))
end
if params[:alerts_selection] == 'prometheus'
prometheus_alerts_endpoint = {:role => :prometheus_alerts, :hostname => prometheus_alerts_hostname, :port => prometheus_alerts_api_port}
prometheus_alerts_endpoint.merge!(endpoint_security_options(prometheus_alerts_security_protocol, prometheus_alerts_tls_ca_certs))
end
if params[:virtualization_selection] == 'kubevirt'
kubevirt_endpoint = {:role => :kubevirt, :hostname => kubevirt_hostname, :port => kubevirt_api_port}
kubevirt_endpoint.merge!(endpoint_security_options(kubevirt_security_protocol, kubevirt_tls_ca_certs))
end
end
if ems.kind_of?(ManageIQ::Providers::Nuage::NetworkManager)
default_endpoint = {:role => :default, :hostname => hostname, :port => port, :security_protocol => ems.security_protocol}
amqp_endpoint = {:role => :amqp, :hostname => amqp_hostname, :port => amqp_port, :security_protocol => amqp_security_protocol}
amqp_fallback_endpoint1 = {:role => :amqp_fallback1, :hostname => amqp_fallback_hostname1, :port => amqp_port, :security_protocol => amqp_security_protocol} if amqp_fallback_hostname1.present?
amqp_fallback_endpoint2 = {:role => :amqp_fallback2, :hostname => amqp_fallback_hostname2, :port => amqp_port, :security_protocol => amqp_security_protocol} if amqp_fallback_hostname2.present?
end
if ems.kind_of?(ManageIQ::Providers::Lenovo::PhysicalInfraManager)
default_endpoint = {:role => :default, :hostname => hostname, :port => port}
end
if ems.kind_of?(ManageIQ::Providers::Amazon::CloudManager)
uri = URI.parse(WEBrick::HTTPUtils.escape(params[:default_url]))
default_endpoint = {:role => :default, :hostname => uri.host, :port => uri.port, :path => uri.path, :url => params[:default_url]}
end
if ems.kind_of?(ManageIQ::Providers::Redfish::PhysicalInfraManager)
default_endpoint = {
:role => :default,
:hostname => hostname,
:port => port,
:security_protocol => ems.security_protocol
}
end
new_options = {}
if ems.class.respond_to?(:advanced_settings)
ems.class.advanced_settings.each do |section_name, section|
section[:settings].each_key do |opt|
new_options[section_name.to_sym] ||= {}
value = params["provider_options_#{section_name}_#{opt}".to_sym]
new_options[section_name.to_sym][opt.to_sym] = value if value.present?
end
end
end
if ems.class.respond_to?(:proxy_settings)
new_options[:proxy_settings] = {}
ems.class.proxy_settings.each_key do |opt|
value = params["provider_options_proxy_settings_#{opt}".to_sym]
new_options[:proxy_settings][opt] = value if value.present?
end
end
ems.options = new_options
endpoints = {:default => default_endpoint,
:ceilometer => ceilometer_endpoint,
:amqp => amqp_endpoint,
:console => default_endpoint,
:smartstate_docker => default_endpoint,
:amqp_fallback1 => amqp_fallback_endpoint1,
:amqp_fallback2 => amqp_fallback_endpoint2,
:ssh_keypair => ssh_keypair_endpoint,
:metrics => metrics_endpoint,
:hawkular => hawkular_endpoint,
:prometheus => prometheus_endpoint,
:prometheus_alerts => prometheus_alerts_endpoint,
:kubevirt => kubevirt_endpoint}
build_connection(ems, endpoints, mode)
end
def endpoint_security_options(security_protocol, certificate_authority)
{
:security_protocol => security_protocol,
:verify_ssl => %w(ssl-without-validation non-ssl).exclude?(security_protocol),
:certificate_authority => security_protocol == 'ssl-with-validation-custom-ca' ? certificate_authority : nil
}
end
def build_connection(ems, endpoints, mode)
authentications = build_credentials(ems, mode)
configurations = []
%i(default ceilometer amqp amqp_fallback1 amqp_fallback2 console smartstate_docker ssh_keypair metrics hawkular prometheus prometheus_alerts kubevirt).each do |role|
configurations << build_configuration(ems, authentications, endpoints, role)
end
ems.connection_configurations = configurations
end
def build_configuration(ems, authentications, endpoints, role)
authtype = role == :default ? ems.default_authentication_type.to_sym : role
return {:endpoint => endpoints[role], :authentication => nil} unless authentications[authtype]
authentication = authentications.delete(authtype)
authentication[:role] = authtype.to_s
authentication[:authtype] = authtype.to_s
{:endpoint => endpoints[role], :authentication => authentication}
end
def build_credentials(ems, mode)
creds = {}
if params[:default_userid]
default_password = params[:default_password] ? params[:default_password] : ems.authentication_password
creds[:default] = {:userid => params[:default_userid], :password => default_password, :save => (mode != :validate)}
end
if ems.supports_authentication?(:amqp) && params[:amqp_userid]
amqp_password = params[:amqp_password] ? params[:amqp_password] : ems.authentication_password(:amqp)
creds[:amqp] = {:userid => params[:amqp_userid], :password => amqp_password, :save => (mode != :validate)}
end
if ems.kind_of?(ManageIQ::Providers::Vmware::InfraManager) &&
ems.supports_authentication?(:console) && params[:console_userid]
console_password = params[:console_password] ? params[:console_password] : ems.authentication_password(:console)
creds[:console] = {:userid => params[:console_userid], :password => console_password, :save => (mode != :validate)} # FIXME: skateman was here
end
if ems.kind_of?(ManageIQ::Providers::Amazon::CloudManager) &&
ems.supports_authentication?(:smartstate_docker) && params[:smartstate_docker_userid]
smartstate_docker_password = params[:smartstate_docker_password] ? params[:smartstate_docker_password] : ems.authentication_password(:smartstate_docker)
creds[:smartstate_docker] = {:userid => params[:smartstate_docker_userid], :password => smartstate_docker_password, :save => true}
end
if (ems.kind_of?(ManageIQ::Providers::Openstack::InfraManager) ||
ems.kind_of?(ManageIQ::Providers::Openstack::CloudManager) ||
ems.kind_of?(ManageIQ::Providers::Redhat::InfraManager)) &&
ems.supports_authentication?(:ssh_keypair) && params[:ssh_keypair_userid]
ssh_keypair_password = params[:ssh_keypair_password] ? params[:ssh_keypair_password].gsub(/\r\n/, "\n") : ems.authentication_key(:ssh_keypair)
creds[:ssh_keypair] = {:userid => params[:ssh_keypair_userid], :auth_key => ssh_keypair_password, :save => (mode != :validate)}
end
if ems.kind_of?(ManageIQ::Providers::Redhat::InfraManager) &&
ems.supports_authentication?(:metrics) && params[:metrics_userid]
metrics_password = params[:metrics_password] ? params[:metrics_password] : ems.authentication_password(:metrics)
creds[:metrics] = {:userid => params[:metrics_userid], :password => metrics_password, :save => (mode != :validate)}
end
if ems.kind_of?(ManageIQ::Providers::Kubevirt::InfraManager)
creds[:kubevirt] = {
:auth_key => params[:kubevirt_password] ? params[:kubevirt_password] : ems.authentication_token(:kubevirt),
:save => mode != :validate,
}
end
if ems.supports_authentication?(:auth_key) && params[:service_account]
creds[:default] = {:auth_key => params[:service_account], :userid => "_", :save => (mode != :validate)}
end
if ems.supports_authentication?(:oauth) && session[:oauth_response].present?
auth = session[:oauth_response]
credentials = auth["credentials"]
creds[:oauth] = {:refresh_token => credentials["refresh_token"],
:access_token => credentials["access_token"],
:expires => credentials["expires"],
:userid => auth["info"]["name"],
:save => (mode != :validate)}
session[:oauth_response] = nil
end
if ems.kind_of?(ManageIQ::Providers::ContainerManager)
default_key = params[:default_password] ? params[:default_password] : ems.authentication_key
if params[:metrics_selection] == "hawkular"
creds[:hawkular] = {:auth_key => default_key, :save => (mode != :validate)}
elsif params[:metrics_selection] == "prometheus"
creds[:prometheus] = {:auth_key => default_key, :save => (mode != :validate)}
end
if params[:alerts_selection] == 'prometheus'
creds[:prometheus_alerts] = {:auth_key => default_key, :save => (mode != :validate)}
end
if params[:virtualization_selection] == 'kubevirt'
kubevirt_key = params[:kubevirt_password] ? params[:kubevirt_password] : ems.authentication_key(:kubevirt)
creds[:kubevirt] = { :auth_key => kubevirt_key, :save => (mode != :validate) }
end
creds[:bearer] = {:auth_key => default_key, :save => (mode != :validate)}
creds.delete(:default)
end
creds
end
def retrieve_event_stream_selection
return 'amqp' if @ems.connection_configurations.amqp&.endpoint&.hostname&.present?
return 'ceilometer' if @ems.connection_configurations.ceilometer&.endpoint&.hostname&.present?
@ems.kind_of?(ManageIQ::Providers::Openstack::CloudManager) || @ems.kind_of?(ManageIQ::Providers::Openstack::InfraManager) ? 'ceilometer' : 'none'
end
def construct_edit_for_audit(ems)
@edit ||= {}
azure_tenant_id = ems.kind_of?(ManageIQ::Providers::Azure::CloudManager) ? ems.azure_tenant_id : nil
@edit[:current] = {
:name => ems.name,
:provider_region => ems.provider_region,
:hostname => ems.hostname,
:azure_tenant_id => azure_tenant_id,
:keystone_v3_domain_id => ems.respond_to?(:keystone_v3_domain_id) ? ems.keystone_v3_domain_id : nil,
:subscription => ems.subscription,
:port => ems.port,
:api_version => ems.api_version,
:security_protocol => ems.security_protocol,
:provider_id => ems.provider_id,
:zone => ems.zone
}
@edit[:current][:tenant_mapping_enabled] = ems.tenant_mapping_enabled if ems.class.supports_cloud_tenant_mapping?
@edit[:new] = {:name => params[:name],
:provider_region => params[:provider_region],
:hostname => params[:hostname],
:azure_tenant_id => params[:azure_tenant_id],
:keystone_v3_domain_id => params[:keystone_v3_domain_id],
:port => params[:port],
:api_version => params[:api_version],
:security_protocol => params[:default_security_protocol],
:provider_id => params[:provider_id],
:zone => params[:zone]}
@edit[:new][:tenant_mapping_enabled] = params[:tenant_mapping_enabled] if ems.class.supports_cloud_tenant_mapping?
end
def zone
if @ems.zone.nil? || @ems.my_zone == ""
"default"
else
@ems.my_zone
end
end
def default_auth_status
@ems.authentication_status_ok? unless @ems.kind_of?(ManageIQ::Providers::Google::CloudManager)
end
end
end
| 55.296465 | 200 | 0.618311 |
5d5f4191738b54bd5e09693b4d852f49d129a0df
| 3,272 |
Brakeman.load_brakeman_dependency 'terminal-table'
class Brakeman::Report::Table < Brakeman::Report::Base
def generate_report
out = text_header <<
"\n\n+SUMMARY+\n\n" <<
truncate_table(generate_overview.to_s) << "\n\n" <<
truncate_table(generate_warning_overview.to_s) << "\n"
#Return output early if only summarizing
return out if tracker.options[:summary_only]
if tracker.options[:report_routes] or tracker.options[:debug]
out << "\n+CONTROLLERS+\n" <<
truncate_table(generate_controllers.to_s) << "\n"
end
if tracker.options[:debug]
out << "\n+TEMPLATES+\n\n" <<
truncate_table(generate_templates.to_s) << "\n"
end
res = generate_errors
out << "+Errors+\n" << truncate_table(res.to_s) if res
res = generate_warnings
out << "\n\n+SECURITY WARNINGS+\n\n" << truncate_table(res.to_s) if res
res = generate_controller_warnings
out << "\n\n\nController Warnings:\n\n" << truncate_table(res.to_s) if res
res = generate_model_warnings
out << "\n\n\nModel Warnings:\n\n" << truncate_table(res.to_s) if res
res = generate_template_warnings
out << "\n\nView Warnings:\n\n" << truncate_table(res.to_s) if res
out << "\n"
out
end
def generate_overview
num_warnings = all_warnings.length
Terminal::Table.new(:headings => ['Scanned/Reported', 'Total']) do |t|
t.add_row ['Controllers', tracker.controllers.length]
t.add_row ['Models', tracker.models.length - 1]
t.add_row ['Templates', number_of_templates(@tracker)]
t.add_row ['Errors', tracker.errors.length]
t.add_row ['Security Warnings', "#{num_warnings} (#{warnings_summary[:high_confidence]})"]
t.add_row ['Ignored Warnings', ignored_warnings.length] unless ignored_warnings.empty?
end
end
#Generate listings of templates and their output
def generate_templates
out_processor = Brakeman::OutputProcessor.new
template_rows = {}
tracker.templates.each do |name, template|
unless template[:outputs].empty?
template[:outputs].each do |out|
out = out_processor.format out
template_rows[name] ||= []
template_rows[name] << out.gsub("\n", ";").gsub(/\s+/, " ")
end
end
end
template_rows = template_rows.sort_by{|name, value| name.to_s}
output = ''
template_rows.each do |template|
output << template.first.to_s << "\n\n"
table = Terminal::Table.new(:headings => ['Output']) do |t|
# template[1] is an array of calls
template[1].each do |v|
t.add_row [v]
end
end
output << table.to_s << "\n\n"
end
output
end
def render_array template, headings, value_array, locals
return if value_array.empty?
Terminal::Table.new(:headings => headings) do |t|
value_array.each { |value_row| t.add_row value_row }
end
end
#Generate header for text output
def text_header
<<-HEADER
+BRAKEMAN REPORT+
Application path: #{File.expand_path tracker.options[:app_path]}
Rails version: #{rails_version}
Brakeman version: #{Brakeman::Version}
Started at #{tracker.start_time}
Duration: #{tracker.duration} seconds
Checks run: #{checks.checks_run.sort.join(", ")}
HEADER
end
end
| 29.745455 | 96 | 0.65923 |
18938cf9dc10cfced4baf35d62e77f4f930286f7
| 1,800 |
class Admin::UsersController < Admin::BaseController
crudify :user, :order => 'login', :title_attribute => 'login'
# Protect these actions behind an admin login
before_filter :find_user, :except => [:index, :new, :create]
before_filter :load_available_plugins, :only => [:new, :create, :edit, :update]
layout 'admin'
def new
@user = User.new
@selected_plugin_names = []
end
def create
@user = User.new(params[:user])
@selected_plugin_names = params[:user][:plugins] || []
@user.add_role(:refinery)
if @user.save
@user.plugins = @selected_plugin_names
redirect_to(admin_users_url, :notice => t('refinery.crudify.created', :what => @user.login))
else
render :action => 'new'
end
end
def edit
@user = User.find params[:id]
@selected_plugin_names = @user.plugins.collect{|p| p.name}
end
def update
@selected_plugin_names = params[:user][:plugins]
# Prevent the current user from locking themselves out of the User manager
if current_user.id == @user.id and !params[:user][:plugins].include?("refinery_users")
flash.now[:error] = t('admin.users.update.cannot_remove_user_plugin_from_current_user')
render :action => "edit"
else
@previously_selected_plugin_names = @user.plugins.collect{|p| p.name}
if @user.update_attributes params[:user]
redirect_to admin_users_url, :notice => t('refinery.crudify.updated', :what => @user.login)
else
@user.plugins = @previously_selected_plugin_names
@user.save
render :action => 'edit'
end
end
end
protected
def load_available_plugins
@available_plugins = ::Refinery::Plugins.registered.in_menu.collect{|a| {:name => a.name, :title => a.title} }.sort_by {|a| a[:title]}
end
end
| 30.508475 | 138 | 0.668889 |
2824b2c413508780a0ec40c9498b0d0a50262c71
| 1,829 |
module PermissionsBuilder
class Standard < Base
attr_reader :permissions
def initialize(*args)
super
@permissions = {}
@permissions['RW+'] = {}
@permissions['RW'] = {}
@permissions['R'] = {}
end
def build
# Build permissions
build_permissions
# Return them
[merge_permissions(permissions, old_permissions)]
end
private
def build_permissions
@permissions['RW+'][''] = gitolite_users[:rewind_users] unless has_no_users?(:rewind_users)
@permissions['RW'][''] = gitolite_users[:write_users] unless has_no_users?(:write_users)
@permissions['R'][''] = gitolite_users[:read_users] unless has_no_users?(:read_users)
end
def merge_permissions(current_permissions, old_permissions)
merge_permissions = {}
merge_permissions['RW+'] = {}
merge_permissions['RW'] = {}
merge_permissions['R'] = {}
current_permissions.each do |perm, branch_settings|
branch_settings.each do |branch, user_list|
if user_list.any?
if !merge_permissions[perm].has_key?(branch)
merge_permissions[perm][branch] = []
end
merge_permissions[perm][branch] += user_list
end
end
end
old_permissions.each do |perm, branch_settings|
branch_settings.each do |branch, user_list|
if user_list.any?
if !merge_permissions[perm].has_key?(branch)
merge_permissions[perm][branch] = []
end
merge_permissions[perm][branch] += user_list
end
end
end
merge_permissions.each do |perm, branch_settings|
merge_permissions.delete(perm) if merge_permissions[perm].empty?
end
merge_permissions
end
end
end
| 28.138462 | 97 | 0.613997 |
4af5061d7c8c86a3a406888f41910457d23beb3b
| 829 |
unless defined?(RSpec::Matchers.all)
module RSpec
module Matchers
# aruba assumes this is defined
def all
end
# aruba doesn't alias this itself on 2.99
def an_output_string_including(partial)
match partial
end
end
end
end
require 'aruba/cucumber'
Before do
if RUBY_PLATFORM =~ /java/
@aruba_timeout_seconds = 30
else
@aruba_timeout_seconds = 10
end
end
Aruba.configure do |config|
config.before_cmd do |cmd|
set_env('JRUBY_OPTS', "-X-C #{ENV['JRUBY_OPTS']}") # disable JIT since these processes are so short lived
end
end if RUBY_PLATFORM == 'java'
Aruba.configure do |config|
config.before_cmd do |cmd|
set_env('RBXOPT', "-Xint=true #{ENV['RBXOPT']}") # disable JIT since these processes are so short lived
end
end if defined?(Rubinius)
| 22.405405 | 109 | 0.68275 |
4aa9efc73e11264c078bbfb3ce43eb446b344814
| 445 |
class Subscription < ActiveRecord::Base
include Mongoid::Document
include Mongoid::Timestamps
field :uid, type: String
field :plan_id, type: String
field :plan_key, type: String
field :plan_name, type: String
field :started_at, type: Date
field :canceled_at, type: Date
attr_accessible :canceled_at, :plan_id, :plan_key, :plan_name, :started_at, :uid
end
| 31.785714 | 82 | 0.626966 |
bf9252530acd4d5db13d3b2f5bd69acf5c99dc8e
| 354 |
module Stripes
class Source < ApplicationRecord
belongs_to(
:payment,
class_name: 'Stripes::Payment',
foreign_key: :stripes_payment_id,
inverse_of: :source
)
enum status: { pending: 0, chargeable: 1, consumed: 2, failed: 3, canceled: 4 }
def redirect_url
details.dig('redirect', 'url')
end
end
end
| 20.823529 | 83 | 0.638418 |
edcec84dc9a029f8e0eeb7b652d1804050891639
| 1,221 |
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'octopress-filters/version'
Gem::Specification.new do |spec|
spec.name = "octopress-filters"
spec.version = Octopress::Filters::VERSION
spec.authors = ["Brandon Mathis"]
spec.email = ["[email protected]"]
spec.summary = %q{A set of handy liquid filters used by Octopress}
spec.homepage = "https://github.com/octopress/filters"
spec.license = "MIT"
spec.files = `git ls-files`.split("\n").grep(%r{^(bin\/|lib\/|assets\/|changelog|readme|license)}i)
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
spec.add_runtime_dependency "jekyll"
spec.add_runtime_dependency "rubypants-unicode"
spec.add_runtime_dependency "titlecase"
spec.add_runtime_dependency "octopress-hooks", "~> 2.0"
spec.add_development_dependency "bundler", "~> 1.6"
spec.add_development_dependency "rake"
spec.add_development_dependency "clash"
spec.add_development_dependency "octopress-filter-tag"
if RUBY_VERSION >= "2"
spec.add_development_dependency "pry-byebug"
end
end
| 37 | 109 | 0.69697 |
7aa602e92faff7e2d6a40c6f96e9d4a895108881
| 4,944 |
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# Generate digests for assets URLs.
config.assets.digest = true
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
config.action_controller.asset_host = "https://d3nul2kglwi6pb.cloudfront.net"
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Mount Action Cable outside main process or domain
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
config.log_tags = [ :request_id ]
# Use a different cache store in production.
if ENV["MEMCACHEDCLOUD_SERVERS"]
config.cache_store = :mem_cache_store, ENV["MEMCACHEDCLOUD_SERVERS"].split(','),
{ :username => ENV["MEMCACHEDCLOUD_USERNAME"], :password => ENV["MEMCACHEDCLOUD_PASSWORD"] }
else
config.cache_store = :dalli_store, ENV["MEMCACHED_URL"]
end
config.session_store ActionDispatch::Session::CacheStore, expire_after: 7.days
# Use a real queuing backend for Active Job (and separate queues per environment)
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "skyway_#{Rails.env}"
config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
config.paperclip_defaults = {
storage: :s3,
s3_credentials: {
bucket: ENV['S3_BUCKET_NAME'],
access_key_id: ENV['AWS_ACCESS_KEY_ID'],
secret_access_key: ENV['AWS_SECRET_ACCESS_KEY']
},
s3_region: 'us-east-1',
s3_protocol: :https,
s3_host_alias: "files.aqueousband.net",
url: ":s3_alias_url",
path: '/:class/:attachment/:id_partition/:style/:filename'
}
config.middleware.use Rack::Deflater
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = true
config.public_file_server.headers = {
'Access-Control-Allow-Origin' => '*',
'Access-Control-Allow-Methods' => 'GET',
'Access-Control-Allow-Headers' => 'x-requested-with',
'Access-Control-Max-Age' => '3628800',
'Cache-Control' => 'public, max-age=31536000',
'Expires' => "#{1.year.from_now.to_formatted_s(:rfc822)}"
}
end
| 39.870968 | 102 | 0.740696 |
1894e857cdf4fc67220fe367be0fb7aece431cf6
| 4,764 |
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
# Ensures that a master key has been made available in either ENV["RAILS_MASTER_KEY"]
# or in config/master.key. This key is used to decrypt credentials (and other encrypted files).
# config.require_master_key = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Store uploaded files on the local file system (see config/storage.yml for options).
config.active_storage.service = :local
# Mount Action Cable outside main process or domain.
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
config.log_tags = [:request_id]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment).
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "longbox_production"
config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new($stdout)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
# Inserts middleware to perform automatic connection switching.
# The `database_selector` hash is used to pass options to the DatabaseSelector
# middleware. The `delay` is used to determine how long to wait after a write
# to send a subsequent read to the primary.
#
# The `database_resolver` class is used by the middleware to determine which
# database is appropriate to use based on the time delay.
#
# The `database_resolver_context` class is used by the middleware to set
# timestamps for the last write to the primary. The resolver uses the context
# class timestamps to determine how long to wait before reading from the
# replica.
#
# By default Rails will store a last write timestamp in the session. The
# DatabaseSelector middleware is designed as such you can define your own
# strategy for connection switching and pass that into the middleware through
# these configuration options.
# config.active_record.database_selector = { delay: 2.seconds }
# config.active_record.database_resolver = ActiveRecord::Middleware::DatabaseSelector::Resolver
# config.active_record.database_resolver_context = ActiveRecord::Middleware::DatabaseSelector::Resolver::Session
end
| 44.943396 | 114 | 0.763224 |
1c5320d8f5d9c8ce700372ed86fa880595f46209
| 529 |
# frozen_string_literal: true
require "test_helper"
module Fourier
module Services
module Lint
class FixturegenTest < TestCase
def test_calls_system_with_the_right_arguments
# Given
directories = [
File.expand_path("Sources", Constants::FIXTUREGEN_DIRECTORY),
]
Utilities::SwiftLinter.expects(:lint)
.with(directories: directories, fix: true)
# When/Then
Fixturegen.call(fix: true)
end
end
end
end
end
| 22.041667 | 73 | 0.618147 |
fffe986e5b45cc17323a8db9c1362b7b8abf5920
| 45 |
module NombreRegionesBibliografiasHelper
end
| 15 | 40 | 0.933333 |
28b0b6cbcd01e8efb14ac811b0fea447926b21b7
| 1,132 |
# See `dropzone_helpers.rb` for the step relating to checkboxes and a comment
# explaining why that step is there.
Given(/^I show my environment$/) do
puts "Running against: #{ENV.fetch('DOWNLOADER_URI')}"
end
When(/^I visit "(.*?)"$/) do |path|
visit path
end
Then(/^I should be on "([^"]*)"$/) do |page_name|
expect("#{Capybara.app_host}#{URI.parse(current_url).path}").to eql("#{Capybara.app_host}#{page_name}")
end
Then(/^I should see "(.*?)"$/) do |text|
expect(page).to have_text(text)
end
Then(/^I should not see "(.*?)"$/) do |text|
expect(page).not_to have_text(text)
end
When(/^I click the "(.*?)" link$/) do |text|
click_link(text)
end
When(/^I click the "(.*?)" button$/) do |text|
begin
find("input[value='#{text}']").click
rescue Capybara::Poltergeist::MouseEventFailed
find("input[value='#{text}']").trigger('click')
end
end
Given(/^I fill in my login details$/) do
fill_in("Email", with: ENV.fetch('SMOKETEST_USER'))
fill_in("Password", with: ENV.fetch('SMOKETEST_PASSWORD'))
end
When(/^I fill in "([^"]*)" with "([^"]*)"$/) do |field, value|
fill_in(field, with: value)
end
| 25.727273 | 105 | 0.64311 |
e28a42fd2954690011f3da77335439e1b07ea114
| 130 |
module DashboardHelper
def refresh_dashboard
page.execute_script "window.jQuery.ajax({url: 'refresh_dashboard'})"
end
end
| 21.666667 | 72 | 0.776923 |
7ae373ede7d0432b2c8db3a1e91373c6b7cd150c
| 2,072 |
# rubocop:disable all
module NavigationHelpers
# Maps a name to a path. Used by the
#
# When /^I go to (.+)$/ do |page_name|
#
# step definition in web_steps.rb
#
def path_to(page_name)
case page_name
when /the dashboard page/
authenticated_root_path
when /the home\s?page/
'/'
when /the sign in page/
new_user_session_path
when /the unauthorized page/
'/unauthorized'
when /new event page/
'/events/new'
when /event topics page/
"/events/#{Event.last.id}/events_learning_topics"
when /event show page/
"/events/#{Event.last.id}"
when /department selection page/
"/events/#{Event.last.id}/department_event/new"
when /^#{capture_model}(?:'s)? (?:admin) dashboard$/
"/admin/#{$1.pluralize}/"
# the following are examples using path_to_pickle
when /^#{capture_model}(?:'s)? page$/ # eg. the forum's page
path_to_pickle $1
when /^#{capture_model}(?:'s)? #{capture_model}(?:'s)? page$/ # eg. the forum's post's page
path_to_pickle $1, $2
when /^#{capture_model}(?:'s)? #{capture_model}'s (.+?) page$/ # eg. the forum's post's comments page
path_to_pickle $1, $2, :extra => $3 # or the forum's post's edit page
when /^#{capture_model}(?:'s)? (.+?) page$/ # eg. the forum's posts page
path_to_pickle $1, :extra => $2 # or the forum's edit page
# Add more mappings here.
# Here is an example that pulls values out of the Regexp:
#
# when /^(.*)'s profile page$/i
# user_profile_path(User.find_by_login($1))
else
begin
page_name =~ /the (.*) page/
path_components = $1.split(/\s+/)
self.send(path_components.push('path').join('_').to_sym)
rescue Object => _e
raise "Can't find mapping from \"#{page_name}\" to a path.\n" +
"Now, go and add a mapping in #{__FILE__}"
end
end
end
end
World(NavigationHelpers)
| 28.383562 | 106 | 0.570946 |
1ab91a27f4859ae2e0553dc729ca3c7659ab6c15
| 731 |
class ConfirmationEmailAddressesPresenter
delegate :primary_claimant, :representative, :secondary_claimants, to: :claim
attr_reader :claim
def initialize(claim)
@claim = claim
end
def self.email_addresses_for(claim)
new(claim).filter_email_addresses
end
def filter_email_addresses
[primary_claimant_email, representative_email].
reject { |email| email.first.blank? }
end
private
def primary_claimant_email
[primary_claimant.email_address, checked: tick_primary_claimant?]
end
def representative_email
[representative.try(:email_address), checked: true]
end
def tick_primary_claimant?
secondary_claimants.none? || representative.try(:email_address).blank?
end
end
| 22.151515 | 79 | 0.763338 |
1ab7406c0890aa0f9db7ec57516d28512eb3cbdd
| 579 |
#!/usr/bin/env ruby
# encoding: UTF-8
########################################################################
# Edit the Finder selection in TextMate.
# If there's no selection, edit the folder for the active Finder window.
########################################################################
require 'appscript'
finder = Appscript::app('Finder')
paths = finder.selection.get(:result_type => :alias).map(&:path)
paths << finder.insertion_location.get(:result_type => :file_ref).get(:result_type => :alias).path if paths.empty?
system "mate", *paths unless paths.empty?
| 36.1875 | 114 | 0.545769 |
f7a51c6e2876126a6b26faeb2053978b50329108
| 3,356 |
require "rbprolog/version"
require 'rbprolog/context'
require 'rbprolog/rule'
require 'rbprolog/deduction'
require 'rbprolog/evaluation'
require 'rbprolog/var'
=begin rdoc
Simulate the prolog logic processing partially by using ruby based DSL
= Representations (Conventions)
[rule - class level]
* name arg0, arg1, ..., {:if => [deduction0, deduction1, ...]}
* name: defined in the keywords
* arg: use const to present the variable, and non-const for value
* deduction: see below
[fact - class level]
* name: arg0, arg1, ...
[deduction - class level]
* name? arg0, arg1, ...
* return: true or false
[question - instance level]
* name? arg0, arg1, ...
[enumerator - instance level]
* name! arg0, arg1, ...
* Deduce all possible answers
= Example
class FriendLogic
include Rbprolog
keywords :likes, :friends
likes 'p1', 's1'
likes 'p1', 's2'
likes 'p2', 's2'
likes 'p3', 's1'
likes 'p4', X
friends 'p1', W, :if => likes?(W, 's2')
friends X, Y, :if => [likes?(X, Z), likes?(Y, Z)]
end
l = FriendLogic.new
l.likes?('p1', 's1') #=> true
l.friends?('p1', 'p4') #=> true
=end
module Rbprolog
def self.included(mod)
class << mod
include ClassMethods
attr_accessor :rules, :syms
end
end
#
#Initialize the rbprolog instance, each instance can have its
#own fact and rules. The definition can be passed in as string or block.
#string is required when variable such as X is used.
# l = FriendLogic.new do
# likes 'p5', 's1'
# end
#or
# l = FriendLogic.new %q{
# friends 'p2', X, :if => likes?(X, 's1')
# }
#
def initialize(string = nil, &block)
if string || block
self.extend(Rbprolog)
self.singleton_class.keywords(*self.class.syms)
self.singleton_class.class_eval(string) if string
self.singleton_class.class_eval(&block) if block
end
end
def rules
self.class.rules + (self.singleton_class.rules || [])
end
module ClassMethods
#Define the vocabulary of rules and facts
def keywords(*syms)
raise if syms.any? {|sym| sym.to_s.end_with? '?'}
self.syms ||= []
self.syms.concat(syms)
end
def const_missing(sym)
Var.new(sym)
end
#Generate rule, fact and deduction based on conventions
def method_missing(sym, *args)
if self.syms.include? sym
Hash === args.last ? rule(sym, *args) : rule(sym, *args, :if => [])
elsif self.syms.include? sym.to_s.chomp('?').to_sym
Deduction.new(sym.to_s.chomp('?').to_sym, *args)
else
super
end
end
#Internal class method to install instance methods for question and enumerator
def rule(sym, *args, options)
self.rules ||= []
self.rules << Rule.new(sym, *args, options[:if])
unless method_defined?(sym)
define_method("#{sym}!") do |*args|
deduction = Deduction.new(sym, *args)
deduction.extend(Enumerable)
rules = self.rules
deduction.define_singleton_method(:each) do |&block|
each_deduce(Context.new, rules, []) do |hash|
block.call hash
end
end
deduction
end
define_method("#{sym}?") do |*args|
self.send("#{sym}!", *args).any? {|hash| true}
end
end
end
end
end
| 24.318841 | 82 | 0.610548 |
1ddf0de5aa5764992928537422e70d64e5508610
| 113 |
class AddUuidToOrganism < ActiveRecord::Migration
def change
add_column :organisms,:uuid,:string
end
end
| 18.833333 | 49 | 0.769912 |
5df63aa8c8afcd656034ef50ea6825496247be58
| 3,803 |
module NotesHelper
# Helps to distinguish e.g. commit notes in mr notes list
def note_for_main_target?(note)
@noteable.class.name == note.noteable_type && !note.diff_note?
end
def note_target_fields(note)
if note.noteable
hidden_field_tag(:target_type, note.noteable.class.name.underscore) +
hidden_field_tag(:target_id, note.noteable.id)
end
end
def note_editable?(note)
note.editable? && can?(current_user, :admin_note, note)
end
def noteable_json(noteable)
{
id: noteable.id,
class: noteable.class.name,
resources: noteable.class.table_name,
project_id: noteable.project.id,
}.to_json
end
def diff_view_data
return {} unless @comments_target
@comments_target.slice(:noteable_id, :noteable_type, :commit_id)
end
def diff_view_line_data(line_code, position, line_type)
return if @diff_notes_disabled
use_legacy_diff_note = @use_legacy_diff_notes
# If the controller doesn't force the use of legacy diff notes, we
# determine this on a line-by-line basis by seeing if there already exist
# active legacy diff notes at this line, in which case newly created notes
# will use the legacy technology as well.
# We do this because the discussion_id values of legacy and "new" diff
# notes, which are used to group notes on the merge request discussion tab,
# are incompatible.
# If we didn't, diff notes that would show for the same line on the changes
# tab, would show in different discussions on the discussion tab.
use_legacy_diff_note ||= begin
line_diff_notes = @grouped_diff_notes[line_code]
line_diff_notes && line_diff_notes.any?(&:legacy_diff_note?)
end
data = {
line_code: line_code,
line_type: line_type,
}
if use_legacy_diff_note
discussion_id = LegacyDiffNote.build_discussion_id(
@comments_target[:noteable_type],
@comments_target[:noteable_id] || @comments_target[:commit_id],
line_code
)
data.merge!(
note_type: LegacyDiffNote.name,
discussion_id: discussion_id
)
else
discussion_id = DiffNote.build_discussion_id(
@comments_target[:noteable_type],
@comments_target[:noteable_id] || @comments_target[:commit_id],
position
)
data.merge!(
position: position.to_json,
note_type: DiffNote.name,
discussion_id: discussion_id
)
end
data
end
def link_to_reply_discussion(note, line_type = nil)
return unless current_user
data = {
noteable_type: note.noteable_type,
noteable_id: note.noteable_id,
commit_id: note.commit_id,
discussion_id: note.discussion_id,
line_type: line_type
}
if note.diff_note?
data[:note_type] = note.type
data.merge!(note.diff_attributes)
end
content_tag(:div, class: "discussion-reply-holder") do
button_tag '回复...', class: 'btn btn-text-field js-discussion-reply-button',
data: data, title: '增加回复'
end
end
def note_max_access_for_user(note)
@max_access_by_user_id ||= Hash.new do |hash, key|
project = key[:project]
hash[key] = project.team.human_max_access(key[:user_id])
end
full_key = { project: note.project, user_id: note.author_id }
@max_access_by_user_id[full_key]
end
def diff_note_path(note)
return unless note.diff_note?
if note.for_merge_request? && note.active?
diffs_namespace_project_merge_request_path(note.project.namespace, note.project, note.noteable, anchor: note.line_code)
elsif note.for_commit?
namespace_project_commit_path(note.project.namespace, note.project, note.noteable, anchor: note.line_code)
end
end
end
| 29.944882 | 125 | 0.687878 |
bb91f1096745df3b23791e321d7ed7ca1bfb65cf
| 8,949 |
# Copyright (c) 2016, 2021, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
require 'date'
require 'logger'
# rubocop:disable Lint/UnneededCopDisableDirective, Metrics/LineLength
module OCI
# TransferApplianceSummary model.
class Dts::Models::TransferApplianceSummary
LIFECYCLE_STATE_ENUM = [
LIFECYCLE_STATE_REQUESTED = 'REQUESTED'.freeze,
LIFECYCLE_STATE_ORACLE_PREPARING = 'ORACLE_PREPARING'.freeze,
LIFECYCLE_STATE_SHIPPING = 'SHIPPING'.freeze,
LIFECYCLE_STATE_DELIVERED = 'DELIVERED'.freeze,
LIFECYCLE_STATE_PREPARING = 'PREPARING'.freeze,
LIFECYCLE_STATE_FINALIZED = 'FINALIZED'.freeze,
LIFECYCLE_STATE_RETURN_DELAYED = 'RETURN_DELAYED'.freeze,
LIFECYCLE_STATE_RETURN_SHIPPED = 'RETURN_SHIPPED'.freeze,
LIFECYCLE_STATE_RETURN_SHIPPED_CANCELLED = 'RETURN_SHIPPED_CANCELLED'.freeze,
LIFECYCLE_STATE_ORACLE_RECEIVED = 'ORACLE_RECEIVED'.freeze,
LIFECYCLE_STATE_ORACLE_RECEIVED_CANCELLED = 'ORACLE_RECEIVED_CANCELLED'.freeze,
LIFECYCLE_STATE_PROCESSING = 'PROCESSING'.freeze,
LIFECYCLE_STATE_COMPLETE = 'COMPLETE'.freeze,
LIFECYCLE_STATE_CUSTOMER_NEVER_RECEIVED = 'CUSTOMER_NEVER_RECEIVED'.freeze,
LIFECYCLE_STATE_ORACLE_NEVER_RECEIVED = 'ORACLE_NEVER_RECEIVED'.freeze,
LIFECYCLE_STATE_CUSTOMER_LOST = 'CUSTOMER_LOST'.freeze,
LIFECYCLE_STATE_CANCELLED = 'CANCELLED'.freeze,
LIFECYCLE_STATE_DELETED = 'DELETED'.freeze,
LIFECYCLE_STATE_REJECTED = 'REJECTED'.freeze,
LIFECYCLE_STATE_ERROR = 'ERROR'.freeze,
LIFECYCLE_STATE_UNKNOWN_ENUM_VALUE = 'UNKNOWN_ENUM_VALUE'.freeze
].freeze
# @return [String]
attr_accessor :label
# @return [String]
attr_reader :lifecycle_state
# @return [String]
attr_accessor :serial_number
# @return [DateTime]
attr_accessor :creation_time
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
# rubocop:disable Style/SymbolLiteral
'label': :'label',
'lifecycle_state': :'lifecycleState',
'serial_number': :'serialNumber',
'creation_time': :'creationTime'
# rubocop:enable Style/SymbolLiteral
}
end
# Attribute type mapping.
def self.swagger_types
{
# rubocop:disable Style/SymbolLiteral
'label': :'String',
'lifecycle_state': :'String',
'serial_number': :'String',
'creation_time': :'DateTime'
# rubocop:enable Style/SymbolLiteral
}
end
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:disable Metrics/MethodLength, Layout/EmptyLines, Style/SymbolLiteral
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
# @option attributes [String] :label The value to assign to the {#label} property
# @option attributes [String] :lifecycle_state The value to assign to the {#lifecycle_state} property
# @option attributes [String] :serial_number The value to assign to the {#serial_number} property
# @option attributes [DateTime] :creation_time The value to assign to the {#creation_time} property
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
self.label = attributes[:'label'] if attributes[:'label']
self.lifecycle_state = attributes[:'lifecycleState'] if attributes[:'lifecycleState']
raise 'You cannot provide both :lifecycleState and :lifecycle_state' if attributes.key?(:'lifecycleState') && attributes.key?(:'lifecycle_state')
self.lifecycle_state = attributes[:'lifecycle_state'] if attributes[:'lifecycle_state']
self.serial_number = attributes[:'serialNumber'] if attributes[:'serialNumber']
raise 'You cannot provide both :serialNumber and :serial_number' if attributes.key?(:'serialNumber') && attributes.key?(:'serial_number')
self.serial_number = attributes[:'serial_number'] if attributes[:'serial_number']
self.creation_time = attributes[:'creationTime'] if attributes[:'creationTime']
raise 'You cannot provide both :creationTime and :creation_time' if attributes.key?(:'creationTime') && attributes.key?(:'creation_time')
self.creation_time = attributes[:'creation_time'] if attributes[:'creation_time']
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:enable Metrics/MethodLength, Layout/EmptyLines, Style/SymbolLiteral
# Custom attribute writer method checking allowed values (enum).
# @param [Object] lifecycle_state Object to be assigned
def lifecycle_state=(lifecycle_state)
# rubocop:disable Style/ConditionalAssignment
if lifecycle_state && !LIFECYCLE_STATE_ENUM.include?(lifecycle_state)
OCI.logger.debug("Unknown value for 'lifecycle_state' [" + lifecycle_state + "]. Mapping to 'LIFECYCLE_STATE_UNKNOWN_ENUM_VALUE'") if OCI.logger
@lifecycle_state = LIFECYCLE_STATE_UNKNOWN_ENUM_VALUE
else
@lifecycle_state = lifecycle_state
end
# rubocop:enable Style/ConditionalAssignment
end
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity, Layout/EmptyLines
# Checks equality by comparing each attribute.
# @param [Object] other the other object to be compared
def ==(other)
return true if equal?(other)
self.class == other.class &&
label == other.label &&
lifecycle_state == other.lifecycle_state &&
serial_number == other.serial_number &&
creation_time == other.creation_time
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity, Layout/EmptyLines
# @see the `==` method
# @param [Object] other the other object to be compared
def eql?(other)
self == other
end
# rubocop:disable Metrics/AbcSize, Layout/EmptyLines
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[label, lifecycle_state, serial_number, creation_time].hash
end
# rubocop:enable Metrics/AbcSize, Layout/EmptyLines
# rubocop:disable Metrics/AbcSize, Layout/EmptyLines
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /^Array<(.*)>/i
# check to ensure the input is an array given that the the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
public_method("#{key}=").call(
attributes[self.class.attribute_map[key]]
.map { |v| OCI::Internal::Util.convert_to_type(Regexp.last_match(1), v) }
)
end
elsif !attributes[self.class.attribute_map[key]].nil?
public_method("#{key}=").call(
OCI::Internal::Util.convert_to_type(type, attributes[self.class.attribute_map[key]])
)
end
# or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# rubocop:enable Metrics/AbcSize, Layout/EmptyLines
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = public_method(attr).call
next if value.nil? && !instance_variable_defined?("@#{attr}")
hash[param] = _to_hash(value)
end
hash
end
private
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
# rubocop:enable Lint/UnneededCopDisableDirective, Metrics/LineLength
| 39.422907 | 245 | 0.696502 |
91b098c7a3fe1911e1baae3edf867f9fbb49d756
| 233 |
#!/usr/bin/env ruby
`ln -sf #{File.expand_path(File.dirname(__FILE__))}/vfl2objc.rb /usr/bin/`
`rm -rf ~/Library/Services/vfl-file.workflow`
`cp -r #{File.expand_path(File.dirname(__FILE__))}/vfl-file.workflow ~/Library/Services/`
| 33.285714 | 89 | 0.72103 |
4a88b1bdf67e8643743fde9c5a27f53dca8d10c1
| 1,759 |
class Vip < Formula
desc "Program that provides for interactive editing in a pipeline"
homepage "https://www.cs.duke.edu/~des/vip.html"
url "https://www.cs.duke.edu/~des/scripts/vip"
version "19971113"
sha256 "171278e8bd43abdbd3a4c35addda27a0d3c74fc784dbe60e4783d317ac249d11"
bottle do
cellar :any_skip_relocation
sha256 "da936f8d9a839a1235962c772ae957563c13f089d5953df7c1ba64b694cb0687" => :mojave
sha256 "5622623485848fc1e4238404c3491f056f4220c6a80fbe9342ec89cd34b15bcb" => :high_sierra
sha256 "12eec6f5294a94f2fb09c54f218470aab2fb7bad58570e8a82c789d8ba5e9639" => :sierra
sha256 "1bf2041f43bcea1e8c503119a9b34f8849b751da767ec5b5094fd5fa8fe5f297" => :el_capitan
sha256 "8e60ec9a240192f872f5d730ca93c9bc9e73d4644e685173554ff786b634ef7c" => :yosemite
sha256 "96ae6a94171da559b1762970dc99b1c458ccd68c061d40248879d16bb6df8511" => :mavericks
end
resource "man" do
url "https://www.cs.duke.edu/~des/scripts/vip.man"
sha256 "37b2753f7c7b39c81f97b10ea3f8e2dd5ea92ea8d130144fa99ed54306565f6f"
end
# use awk and /var/tmp as temporary directory
patch :DATA
def install
bin.install "vip"
resource("man").stage do
man1.install "vip.man" => "vip.1"
end
end
end
__END__
diff --git a/vip b/vip
index f150167..e517675 100644
--- a/vip
+++ b/vip
@@ -66,7 +66,7 @@ Usage: $PROG [ -no ] [ command ]
otherwise stdin is used;
"
-: ${TMPDIR:="/usr/tmp"} # where temp. files go
+: ${TMPDIR:="/var/tmp"} # where temp. files go
TEMP_FILE="$TMPDIR/$PROG.$$" # temp. file to hold data to edit
COMMAND="cat" # default command to produce input
DFLT_ED="vi" # default editor
@@ -81,6 +81,10 @@ case "$SYS" in
;;
"HP-UX "*)
AWK=awk
+ ;;
+ "Darwin "*)
+ AWK=awk
+ ;;
esac
#
| 29.316667 | 93 | 0.719727 |
bfc72ce4d9be5d122bccd64405d05ab27e3f67ef
| 896 |
# -*- encoding : utf-8 -*-
require 'spec_helper'
describe OCRSDK::Verifiers::Profile do
let (:class_with_module) {
Class.new do
include OCRSDK::Verifiers::Profile
end
}
subject { class_with_module.new }
it "should have list of possible profiles" do
OCRSDK::Verifiers::Profile::PROFILES.length.should > 0
end
it "should convert profile to string" do
subject.profile_to_s(:meow_meow).should == 'meowMeow'
end
describe ".supported_profile?" do
it "should return false for incorrect profile" do
subject.supported_profile?(:meow_meow).should be_false
end
it "should return true for correct profile as symbol" do
subject.supported_profile?(:document_conversion).should be_true
end
it "should return true for correct profile as string" do
subject.supported_profile?("documentConversion").should be_true
end
end
end
| 27.151515 | 69 | 0.714286 |
4a5bd840d6459bbd898cd91e788d62a32cb4143a
| 2,156 |
# frozen_string_literal: true
module Get
class Parser
SUPPORTED_ENCODING = Set.new(%w[deflate gzip x-gzip]).freeze
attr_reader :headers
def initialize(on_body:)
@state = ::HttpParser::Parser.new_instance { |i| i.type = :response }
@on_body = on_body
reset
end
def parse!(chunk)
http_parser.parse(@state, chunk)
end
def status
@state.http_status
end
def error
@error || @state.error
end
def done?
@done
end
def reset
@state.reset!
@headers = InsensitiveHash.new
@error = nil
@reading = false
@done = false
@field = +""
@field_value = +""
end
private
def append_header
@headers[@field] = @field_value
@reading = false
@field_value = +""
@field = +""
end
def http_parser
@http_parser ||= HttpParser::Parser.new do |parser|
parser.on_header_field do |instance, data|
append_header if @reading
@field << data
end
parser.on_header_value do |instance, data|
@reading = true
@field_value << data
end
parser.on_message_complete do |instance|
@done = true
end
parser.on_headers_complete do |instance, data|
append_header if @reading
# @state.stop!
end
parser.on_body do |instance, data|
if @on_body.respond_to?(:call)
data = inflate(data) if inflate?
@on_body.call(data)
end
rescue => exception
@error = exception
end
end
end
def inflate(chunk)
if chunk
chunk = zstream.inflate(chunk)
elsif !zstream.closed?
zstream.finish
zstream.close
end
chunk
end
def zstream
@zstream ||= Zlib::Inflate.new(32 + Zlib::MAX_WBITS)
end
def inflate?
SUPPORTED_ENCODING.include?(@headers["content-encoding"])
end
end
end
| 20.932039 | 77 | 0.522263 |
6a7b25a0647d59a93a96b32f5b9b14ec6ab85212
| 369 |
module EventStore
module HTTP
module NetHTTP
class Substitute
module Telemetry
class Sink
include ::Telemetry::Sink
record :requested
record :responded
end
Requested = Struct.new :request
Responded = Struct.new :response, :request
end
end
end
end
end
| 18.45 | 52 | 0.552846 |
bf84d4a81c6c5a64651491764cfacba51d3128be
| 2,065 |
# encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::ContainerInstance::Mgmt::V2018_04_01
module Models
#
# Represents a volume that is populated with the contents of a git
# repository
#
class GitRepoVolume
include MsRestAzure
# @return [String] Target directory name. Must not contain or start with
# '..'. If '.' is supplied, the volume directory will be the git
# repository. Otherwise, if specified, the volume will contain the git
# repository in the subdirectory with the given name.
attr_accessor :directory
# @return [String] Repository URL
attr_accessor :repository
# @return [String] Commit hash for the specified revision.
attr_accessor :revision
#
# Mapper for GitRepoVolume class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'GitRepoVolume',
type: {
name: 'Composite',
class_name: 'GitRepoVolume',
model_properties: {
directory: {
client_side_validation: true,
required: false,
serialized_name: 'directory',
type: {
name: 'String'
}
},
repository: {
client_side_validation: true,
required: true,
serialized_name: 'repository',
type: {
name: 'String'
}
},
revision: {
client_side_validation: true,
required: false,
serialized_name: 'revision',
type: {
name: 'String'
}
}
}
}
}
end
end
end
end
| 28.287671 | 78 | 0.532203 |
d5b7a740e9d5cdb5d56381c37b89b3a859570b78
| 1,429 |
Pod::Spec.new do |s|
s.name = "JWPlayerPlugin"
s.version = '3.4.5'
s.summary = 'JWPlayer Player plugin implementation.'
s.description = 'An implementation for JWPlayer as a Zapp Player Plugin in Objective C.'
s.homepage = "https://github.com/applicaster/zapp-player-plugin-jw"
s.license = 'MIT'
s.author = { "Jesus De Meyer" => "[email protected]" }
s.source = { :git => "[email protected]:applicaster/zapp-player-plugin-jw.git", :tag => 'ios-' + s.version.to_s }
s.ios.deployment_target = "10.0"
s.platform = :ios, '10.0'
s.requires_arc = true
s.source_files = 'iOS/JWPlayerPlugin/**/*.{swift,h,m}'
s.public_header_files = 'iOS/JWPlayerPlugin/**/*.h'
s.resources = 'iOS/JWPlayerPlugin/**/*.{storyboard,png}'
s.xcconfig = { 'CLANG_ALLOW_NON_MODULAR_INCLUDES_IN_FRAMEWORK_MODULES' => 'YES',
'ENABLE_BITCODE' => 'YES',
# Workaround until this will be released https://github.com/CocoaPods/CocoaPods/pull/9045
'OTHER_LDFLAGS' => '$(inherited) -framework "JWPlayer_iOS_SDK"',
'SWIFT_VERSION' => '5.1'
}
s.dependency 'ZappCore'
s.dependency 'ApplicasterSDK'
s.dependency 'JWPlayer-SDK', '~> 3.10.0'
s.dependency 'GoogleAds-IMA-iOS-SDK', '= 3.9.2'
s.dependency 'google-cast-sdk-no-bluetooth'
end
| 43.30303 | 122 | 0.60112 |
01a291534376ea344ea82c409293ada0e0bab9a5
| 229 |
# frozen_string_literal: true
source "https://rubygems.org"
# Specify your gem's dependencies in bake.gemspec
gemspec
group :maintenance, optional: true do
gem 'bake-modernize'
gem 'bake-bundler'
gem 'utopia-project'
end
| 16.357143 | 49 | 0.755459 |
d5c15a53e2b6033093493d7a853b9ecb28ad31b1
| 2,681 |
# frozen_string_literal: true
# Copyright 2019 Matthew B. Gray
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# UpgradeOffer is used for a User who is trying to upgrade between different membership types
# This price is the difference between what the user has paid, and the Membership they're trying to set
# e.g.
# Silver Fern upgrading to Adult cost $50 on CoNZealand launch ($375 - $325 = $50)
# But when prices rotated, upgrading to Adult cost $75 ($400 - $325 = $75)
class UpgradeOffer
attr_reader :from_membership, :to_membership
delegate :description, to: :to_membership
def self.from(current_membership, target_membership: nil)
# List options that are higher price
options = Membership.active.where("price_cents > ?", current_membership.price_cents)
# But don't let the name match, i.e. no upgrade adult to adult upgrade option
options = options.where.not(name: current_membership.name)
# If requested, only create offers for the target
options = options.where(id: target_membership) if target_membership.present?
# Map matching memberships over the class and return as a list
options.order_by_price.map do |membership|
UpgradeOffer.new(from: current_membership, to: membership)
end
end
def initialize(from:, to:)
@from_membership = from
@to_membership = to
end
def to_s
"Upgrade to #{to_membership} (#{formatted_price})"
end
def hash
"#{to_membership} #{formatted_price}"
end
def link_text
"Upgrade to #{to_membership}"
end
def name
"Upgrade to #{to_membership}"
end
def membership_rights
to_membership.all_rights
end
def link_description
if to_membership.description.present?
"#{to_membership.description}, for #{formatted_price}"
else
"for #{formatted_price}"
end
end
def confirm_text
"This will upgrade your membership to #{to_membership} at a cost of #{formatted_price}. Are you sure?"
end
def formatted_price
price.format(with_currency: true)
end
def price
@price ||= to_membership.price - from_membership.price
end
def offer_for_purchase?
!to_membership.private_membership_option
end
end
| 29.141304 | 106 | 0.732189 |
034d6662ac5acaa7c535e04da2e9b896d45bb14f
| 823 |
Pod::Spec.new do |s|
s.name = 'YangDefaultPages'
s.version = '2.2.0'
s.summary = 'A short description of YangDefaultPages.'
s.description = <<-DESC
TODO: Add long description of the pod here.
DESC
s.homepage = 'https://github.com/xilankong/YangDefaultPages'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'xilankong' => '[email protected]' }
s.source = { :git => 'https://github.com/xilankong/YangDefaultPages.git', :tag => s.version.to_s }
s.swift_version = '4.1'
s.ios.deployment_target = '8.0'
s.source_files = 'YangDefaultPages/Classes/**/*'
s.resource = 'YangDefaultPages/Assets/**/*'
s.pod_target_xcconfig = { "SWIFT_VERSION" => "4.1" }
s.dependency 'FLAnimatedImage'
end
| 37.409091 | 110 | 0.588092 |
01b2b0075f1457d807f06866209ea91506e9c54d
| 38 |
module Varkon
VERSION = "0.1.0"
end
| 9.5 | 19 | 0.657895 |
d5ec236e2e40ee14656459eb756d9e5cd23c1dae
| 615 |
require 'test_helper'
class StaticPagesControllerTest < ActionDispatch::IntegrationTest
test "should get home" do
get root_path
assert_response :success
assert_select "title", full_title('Home')
end
test "should get help" do
get help_path
assert_response :success
assert_select "title", full_title('Help')
end
test "should get about" do
get about_path
assert_response :success
assert_select "title", full_title('About')
end
test "should get contact" do
get contact_path
assert_response :success
assert_select "title", full_title('Contact')
end
end
| 22.777778 | 65 | 0.721951 |
4a020ebbfb174c4555da856d3ec1870e85ce5a1c
| 7,560 |
# ==========================================
# Unity Project - A Test Framework for C
# Copyright (c) 2007 Mike Karlesky, Mark VanderVoord, Greg Williams
# [Released under MIT License. Please refer to license.txt for details]
# ==========================================
# This script creates all the files with start code necessary for a new module.
# A simple module only requires a source file, header file, and test file.
# Triad modules require a source, header, and test file for each triad type (like model, conductor, and hardware).
require 'rubygems'
require 'fileutils'
HERE = File.expand_path(File.dirname(__FILE__)) + '/'
#help text when requested
HELP_TEXT = [ "\nGENERATE MODULE\n-------- ------",
"\nUsage: ruby generate_module [options] module_name",
" -i\"include\" sets the path to output headers to 'include' (DEFAULT ../src)",
" -s\"../src\" sets the path to output source to '../src' (DEFAULT ../src)",
" -t\"C:/test\" sets the path to output source to 'C:/test' (DEFAULT ../test)",
" -p\"MCH\" sets the output pattern to MCH.",
" dh - driver hardware.",
" dih - driver interrupt hardware.",
" mch - model conductor hardware.",
" mvp - model view presenter.",
" src - just a single source module. (DEFAULT)",
" -d destroy module instead of creating it.",
" -u update subversion too (requires subversion command line)",
" -y\"my.yml\" selects a different yaml config file for module generation",
"" ].join("\n")
#Built in patterns
PATTERNS = { 'src' => {'' => { :inc => [] } },
'dh' => {'Driver' => { :inc => ['%1$sHardware.h'] },
'Hardware' => { :inc => [] }
},
'dih' => {'Driver' => { :inc => ['%1$sHardware.h', '%1$sInterrupt.h'] },
'Interrupt'=> { :inc => ['%1$sHardware.h'] },
'Hardware' => { :inc => [] }
},
'mch' => {'Model' => { :inc => [] },
'Conductor'=> { :inc => ['%1$sModel.h', '%1$sHardware.h'] },
'Hardware' => { :inc => [] }
},
'mvp' => {'Model' => { :inc => [] },
'Presenter'=> { :inc => ['%1$sModel.h', '%1$sView.h'] },
'View' => { :inc => [] }
}
}
#TEMPLATE_TST
TEMPLATE_TST = %q[#include "unity.h"
%2$s#include "%1$s.h"
void setUp(void)
{
}
void tearDown(void)
{
}
void test_%1$s_NeedToImplement(void)
{
TEST_IGNORE();
}
]
#TEMPLATE_SRC
TEMPLATE_SRC = %q[%2$s#include "%1$s.h"
]
#TEMPLATE_INC
TEMPLATE_INC = %q[#ifndef _%3$s_H
#define _%3$s_H%2$s
#endif // _%3$s_H
]
# Parse the command line parameters.
ARGV.each do |arg|
case(arg)
when /^-d/ then @destroy = true
when /^-u/ then @update_svn = true
when /^-p(\w+)/ then @pattern = $1
when /^-s(.+)/ then @path_src = $1
when /^-i(.+)/ then @path_inc = $1
when /^-t(.+)/ then @path_tst = $1
when /^-y(.+)/ then @yaml_config = $1
when /^(\w+)/
raise "ERROR: You can't have more than one Module name specified!" unless @module_name.nil?
@module_name = arg
when /^-(h|-help)/
puts HELP_TEXT
exit
else
raise "ERROR: Unknown option specified '#{arg}'"
end
end
raise "ERROR: You must have a Module name specified! (use option -h for help)" if @module_name.nil?
#load yaml file if one was requested
if @yaml_config
require 'yaml'
cfg = YAML.load_file(HERE + @yaml_config)[:generate_module]
@path_src = cfg[:defaults][:path_src] if @path_src.nil?
@path_inc = cfg[:defaults][:path_inc] if @path_inc.nil?
@path_tst = cfg[:defaults][:path_tst] if @path_tst.nil?
@update_svn = cfg[:defaults][:update_svn] if @update_svn.nil?
@extra_inc = cfg[:includes]
@boilerplates = cfg[:boilerplates]
else
@boilerplates = {}
end
# Create default file paths if none were provided
@path_src = HERE + "../src/" if @path_src.nil?
@path_inc = @path_src if @path_inc.nil?
@path_tst = HERE + "../test/" if @path_tst.nil?
@path_src += '/' unless (@path_src[-1] == 47)
@path_inc += '/' unless (@path_inc[-1] == 47)
@path_tst += '/' unless (@path_tst[-1] == 47)
@pattern = 'src' if @pattern.nil?
@includes = { :src => [], :inc => [], :tst => [] }
@includes.merge!(@extra_inc) unless @extra_inc.nil?
#create triad definition
TRIAD = [ { :ext => '.c', :path => @path_src, :template => TEMPLATE_SRC, :inc => :src, :boilerplate => @boilerplates[:src] },
{ :ext => '.h', :path => @path_inc, :template => TEMPLATE_INC, :inc => :inc, :boilerplate => @boilerplates[:inc] },
{ :ext => '.c', :path => @path_tst+'Test', :template => TEMPLATE_TST, :inc => :tst, :boilerplate => @boilerplates[:tst] },
]
#prepare the pattern for use
@patterns = PATTERNS[@pattern.downcase]
raise "ERROR: The design pattern specified isn't one that I recognize!" if @patterns.nil?
# Assemble the path/names of the files we need to work with.
files = []
TRIAD.each do |triad|
@patterns.each_pair do |pattern_file, pattern_traits|
files << {
:path => "#{triad[:path]}#{@module_name}#{pattern_file}#{triad[:ext]}",
:name => "#{@module_name}#{pattern_file}",
:template => triad[:template],
:boilerplate => triad[:boilerplate],
:includes => case(triad[:inc])
when :src then @includes[:src] | pattern_traits[:inc].map{|f| f % [@module_name]}
when :inc then @includes[:inc]
when :tst then @includes[:tst] | pattern_traits[:inc].map{|f| "Mock#{f}"% [@module_name]}
end
}
end
end
# destroy files if that was what was requested
if @destroy
files.each do |filespec|
file = filespec[:path]
if File.exist?(file)
if @update_svn
`svn delete \"#{file}\" --force`
puts "File #{file} deleted and removed from source control"
else
FileUtils.remove(file)
puts "File #{file} deleted"
end
else
puts "File #{file} does not exist so cannot be removed."
end
end
puts "Destroy Complete"
exit
end
#Abort if any module already exists
files.each do |file|
raise "ERROR: File #{file[:name]} already exists. Exiting." if File.exist?(file[:path])
end
# Create Source Modules
files.each_with_index do |file, i|
File.open(file[:path], 'w') do |f|
f.write(file[:boilerplate] % [file[:name]]) unless file[:boilerplate].nil?
f.write(file[:template] % [ file[:name],
file[:includes].map{|f| "#include \"#{f}\"\n"}.join,
file[:name].upcase ]
)
end
if (@update_svn)
`svn add \"#{file[:path]}\"`
if $?.exitstatus == 0
puts "File #{file[:path]} created and added to source control"
else
puts "File #{file[:path]} created but FAILED adding to source control!"
end
else
puts "File #{file[:path]} created"
end
end
puts 'Generate Complete'
| 37.241379 | 134 | 0.525132 |
e9b042a76cda779d72f4e4b2aa27dfe80e5cfe95
| 4,176 |
require 'test_helper'
require 'unit/response_stubs/find_payment_method_stubs'
class FindPaymentMethodTest < Test::Unit::TestCase
include FindPaymentMethodStubs
def setup
@environment = Spreedly::Environment.new("key", "secret")
end
def test_successful_find_card
card = find_using(successful_get_card_response)
assert_kind_of(Spreedly::CreditCard, card)
assert_equal("ROGJFe89QtbJL8QvjaJNMH0UG50", card.token)
assert_equal("[email protected]", card.email)
assert_equal("4445", card.last_four_digits)
assert_equal("411111", card.first_six_digits)
assert_equal('XXXX-XXXX-XXXX-4445', card.number)
assert_equal("<some_attribute>5</some_attribute>", card.data)
assert_equal(1369504152, card.created_at.to_i)
assert_equal(1369508044, card.updated_at.to_i)
assert_equal('master', card.card_type)
assert_equal('Alcatraz', card.first_name)
assert_equal('Smedry', card.last_name)
assert_equal('8', card.month)
assert_equal('2020', card.year)
assert_equal('123 Freedom Street', card.address1)
assert_equal('Apt. 8', card.address2)
assert_equal('Wanaque', card.city)
assert_equal('NJ', card.state)
assert_equal('02124', card.zip)
assert_equal('USA', card.country)
assert_equal('201.344.7712', card.phone_number)
assert_equal('retained', card.storage_state)
end
def test_find_valid_card
card = find_using(successful_get_card_response)
assert card.valid?
assert_equal([], card.errors)
end
def test_find_invalid_card
card = find_using(successful_get_invalid_card_response)
assert !card.valid?
expected_errors = [
{ attribute: "first_name", key: "errors.blank", message: "First name can't be blank" },
{ attribute: "last_name", key: "errors.blank", message: "Last name can't be blank" },
{ attribute: "year", key: "errors.expired", message: "Year is expired" },
{ attribute: "year", key: "errors.invalid", message: "Year is invalid" },
{ attribute: "number", key: "errors.blank", message: "Number can't be blank" }
]
assert_equal(expected_errors, card.errors)
end
def test_successfully_find_sprel
sprel = find_using(successful_get_sprel_response)
assert_kind_of(Spreedly::Sprel, sprel)
assert_equal "RZf8ZQgvmgOfdWaRtAzMLXPSQbk", sprel.token
assert_equal "[email protected]", sprel.email
assert_equal(1366981867, sprel.created_at.to_i)
assert_equal(1366982301, sprel.updated_at.to_i)
assert_equal("Some Pretty Data", sprel.data)
assert_equal("cached", sprel.storage_state)
end
def test_successfully_find_paypal
paypal = find_using(successful_get_paypal_response)
assert_kind_of(Spreedly::Paypal, paypal)
assert_equal "X7DkJT3NUMNMJ0ZVvRMJBEyUe9B", paypal.token
assert_equal "[email protected]", paypal.email
assert_equal(1375288019, paypal.created_at.to_i)
assert_equal(1375288046, paypal.updated_at.to_i)
assert_equal("", paypal.data)
assert_equal("retained", paypal.storage_state)
end
def test_successfully_find_bank_account
b = find_using(successful_get_bank_account_response)
assert_kind_of(Spreedly::BankAccount, b)
assert_equal "seeQDV0jwJwFa1FUUsph6kPMTj", b.token
assert_equal "[email protected]", b.email
assert_equal(1376673633, b.created_at.to_i)
assert_equal(1376673633, b.updated_at.to_i)
assert_equal("GeekyNote", b.data)
assert_equal("cached", b.storage_state)
assert_equal("Daniel", b.first_name)
assert_equal("Waterhouse", b.last_name)
assert_equal("Daniel Waterhouse", b.full_name)
assert_equal("First Bank of Crypto", b.bank_name)
assert_equal("checking", b.account_type)
assert_equal("personal", b.account_holder_type)
assert_equal("021", b.routing_number_display_digits)
assert_equal("4321", b.account_number_display_digits)
assert_equal("021*", b.routing_number)
assert_equal("*4321", b.account_number)
assert_equal([], b.errors)
end
private
def find_using(response)
@environment.stubs(:raw_ssl_request).returns(response)
@environment.find_payment_method("IgnoredTokenSinceResponseIsStubbed")
end
end
| 36.313043 | 93 | 0.740661 |
ed2349975eead4825e3f8b7d96cc193d236d1f04
| 8,035 |
require 'test_helper'
module Downloads
class PixivTest < ActiveSupport::TestCase
def assert_downloaded(expected_filesize, source)
tempfile = Tempfile.new("danbooru-test")
download = Downloads::File.new(source, tempfile.path)
assert_nothing_raised(Downloads::File::Error) do
download.download!
end
assert_equal(expected_filesize, tempfile.size, "Tested source URL: #{source}")
end
def assert_rewritten(expected_source, test_source)
tempfile = Tempfile.new("danbooru-test")
download = Downloads::File.new(test_source, tempfile.path)
rewritten_source, headers, _ = download.before_download(test_source, {}, {})
assert_equal(expected_source, rewritten_source, "Tested source URL: #{test_source}")
end
def assert_not_rewritten(source)
assert_rewritten(source, source)
end
context "An ugoira site for pixiv" do
setup do
@tempfile = Tempfile.new("danbooru-test")
@download = Downloads::File.new("http://www.pixiv.net/member_illust.php?mode=medium&illust_id=62247364", @tempfile.path)
@download.download!
end
teardown do
@tempfile.unlink
end
should "capture the data" do
assert_equal("https://i1.pixiv.net/img-zip-ugoira/img/2017/04/04/08/57/38/62247364_ugoira1920x1080.zip", @download.source)
assert_equal([{"file"=>"000000.jpg", "delay"=>125}, {"file"=>"000001.jpg", "delay"=>125}], @download.data[:ugoira_frame_data])
end
end
context "in all cases" do
# Test an old illustration (one uploaded before 2014-09-16). New
# /img-original/ and /img-master/ URLs currently don't work for images
# uploaded before this date. Only old /imgXX/img/username/ URLs work.
context "downloading an old PNG illustration" do
setup do
@medium_page = "http://www.pixiv.net/member_illust.php?mode=medium&illust_id=14901720"
@big_page = "http://www.pixiv.net/member_illust.php?mode=big&illust_id=14901720"
@new_small_thumbnail = "http://i1.pixiv.net/c/150x150/img-master/img/2010/11/30/08/39/58/14901720_p0_master1200.jpg"
@new_medium_thumbnail = "http://i1.pixiv.net/c/600x600/img-master/img/2010/11/30/08/39/58/14901720_p0_master1200.jpg"
@new_full_size_image = "http://i1.pixiv.net/img-original/img/2010/11/30/08/39/58/14901720_p0.png"
@file_size = 1_083
end
should "work when using new URLs" do
# Don't know the actual file size of the thumbnails since they don't work.
assert_downloaded(1083, @new_small_thumbnail)
assert_downloaded(1083, @new_medium_thumbnail)
assert_downloaded(@file_size, @new_full_size_image)
end
end
# Test a new illustration (one uploaded after 2014-09-30). New illustrations
# must use /img-original/ for full size URLs. Old /imgXX/img/username/ style URLs
# don't work for images uploaded after this date.
context "downloading a new PNG illustration" do
setup do
@medium_page = "http://www.pixiv.net/member_illust.php?mode=medium&illust_id=62247350"
@medium_thumbnail = "https://i.pximg.net/c/600x600/img-master/img/2017/04/04/08/54/15/62247350_p0_master1200.jpg"
@full_size_image = "https://i.pximg.net/img-original/img/2017/04/04/08/54/15/62247350_p0.png"
@file_size = 16275
end
should "download the full size image" do
assert_not_rewritten(@full_size_image)
assert_downloaded(@file_size, @full_size_image)
end
should "download the full size image instead of the HTML page" do
assert_rewritten(@full_size_image, @medium_page)
assert_downloaded(@file_size, @medium_page)
end
should "download the full size image instead of the thumbnail" do
assert_rewritten(@full_size_image, @medium_thumbnail)
assert_downloaded(@file_size, @medium_thumbnail)
end
end
context "downloading a new manga image" do
setup do
@medium_page = "http://www.pixiv.net/member_illust.php?mode=medium&illust_id=46304614"
@manga_page = "http://www.pixiv.net/member_illust.php?mode=manga&illust_id=46304614"
@manga_big_p1_page = "http://www.pixiv.net/member_illust.php?mode=manga_big&illust_id=46304614&page=1"
@p0_large_thumbnail = "https://i.pximg.net/c/1200x1200/img-master/img/2014/10/02/14/21/39/46304614_p0_master1200.jpg"
@p1_large_thumbnail = "https://i.pximg.net/c/1200x1200/img-master/img/2014/10/02/14/21/39/46304614_p1_master1200.jpg"
@p0_full_size_image = "https://i.pximg.net/img-original/img/2014/10/02/14/21/39/46304614_p0.gif"
@p0_full_size_image_3 = "https://i.pximg.net/img-original/img/2014/10/02/14/21/39/46304614_p0.gif"
@p1_full_size_image = "https://i.pximg.net/img-original/img/2014/10/02/14/21/39/46304614_p1.gif"
@p1_full_size_image_3 = "https://i.pximg.net/img-original/img/2014/10/02/14/21/39/46304614_p1.gif"
@p0_file_size = 61_131
@p1_file_size = 46_818
end
should "download the full size image" do
assert_not_rewritten(@p0_full_size_image)
assert_not_rewritten(@p1_full_size_image)
assert_downloaded(@p0_file_size, @p0_full_size_image)
assert_downloaded(@p1_file_size, @p1_full_size_image)
end
should "download the full size image instead of the HTML page" do
assert_rewritten(@p0_full_size_image_3, @medium_page)
assert_rewritten(@p0_full_size_image_3, @manga_page)
assert_rewritten(@p1_full_size_image_3, @manga_big_p1_page)
assert_downloaded(@p0_file_size, @medium_page)
assert_downloaded(@p0_file_size, @manga_page)
assert_downloaded(@p1_file_size, @manga_big_p1_page)
end
should "download the full size image instead of the thumbnail" do
assert_rewritten(@p0_full_size_image_3, @p0_large_thumbnail)
assert_rewritten(@p1_full_size_image_3, @p1_large_thumbnail)
assert_downloaded(@p0_file_size, @p0_large_thumbnail)
assert_downloaded(@p1_file_size, @p1_large_thumbnail)
end
end
context "downloading a ugoira" do
setup do
@medium_page = "http://www.pixiv.net/member_illust.php?mode=medium&illust_id=62247364"
@zip_file = "https://i1.pixiv.net/img-zip-ugoira/img/2017/04/04/08/57/38/62247364_ugoira1920x1080.zip"
@file_size = 2804
end
should "download the zip file instead of the HTML page" do
assert_rewritten(@zip_file, @medium_page)
assert_downloaded(@file_size, @medium_page)
end
should "download the zip file" do
assert_not_rewritten(@zip_file)
assert_downloaded(@file_size, @zip_file)
end
end
context "downloading a profile image" do
should "download new profile images" do
@file_url = "http://i2.pixiv.net/img130/profile/minono_aki/8733472.jpg"
@file_size = 23266
assert_not_rewritten(@file_url)
assert_downloaded(@file_size, @file_url)
end
end
context "downloading a background image" do
should "download the image" do
@file_url = "http://i1.pixiv.net/background/img/2016/05/17/12/05/48/2074388_d4ac52034f7ca0af3e083d59fde7e97f.jpg"
@file_size = 386_500
assert_not_rewritten(@file_url)
assert_downloaded(@file_size, @file_url)
end
end
context "downloading a novel image" do
should "download new novel images" do
@file_url = "http://i1.pixiv.net/novel-cover-original/img/2016/11/03/20/10/58/7436075_f75af69f3eacd1656d3733c72aa959cf.jpg"
@file_size = 316_133
assert_not_rewritten(@file_url)
assert_downloaded(@file_size, @file_url)
end
end
end
end
end
| 42.068063 | 134 | 0.673304 |
79b642d6bda45a4d2abdd9fdbf05e4b2294bcd52
| 2,635 |
require 'test_helper'
class CcStatementsControllerTest < ActionController::TestCase
setup do
@user = users :User_1
sign_in @user
@cc_statement = cc_statements(:CcStatement_1)
@instrument = instruments(:Instrument_1)
end
test "should get index" do
get :index, format: :json, params: { instrument_id: @instrument.id }
assert_response :success
assert_not_nil assigns(:collection)
end
test "should create cc_statement" do
assert_difference('CcStatement.count') do
post :create, format: :json,
params: {
cc_statement: {
instrument_id: @instrument.id,
literal: @cc_statement.literal,
type: 'statement',
parent: {
id: @instrument.cc_sequences.first.id,
type: 'sequence'
}
},
instrument_id: @instrument.id
}
end
assert_response :success
end
test "should show cc_statement" do
get :show, format: :json, params: { instrument_id: @instrument.id, id: @cc_statement }
assert_response :success
end
test "should update cc_statement" do
patch :update, format: :json, params: { instrument_id: @instrument.id, id: @cc_statement, cc_statement: {literal: @cc_statement.literal, parent: {
id: @instrument.cc_sequences.first.id,
type: 'sequence'
}} }
assert_response :success
assert_equal @cc_statement.reload.parent, @instrument.cc_sequences.first
end
test "should update cc_statement when parent type matches class name" do
patch :update, format: :json, params: { instrument_id: @instrument.id, id: @cc_statement, cc_statement: {literal: @cc_statement.literal, parent: {
id: @instrument.cc_sequences.first.id,
type: 'CcSequence'
}} }
assert_response :success
assert_equal @cc_statement.reload.parent, @instrument.cc_sequences.first
end
test "should update cc_statement when parent is found outside of the cc_statement hash and parent type matches class name" do
patch :update, format: :json, params: { instrument_id: @instrument.id, id: @cc_statement, cc_statement: {literal: @cc_statement.literal}, parent: {
id: @instrument.cc_sequences.first.id,
type: 'CcSequence'
}}
assert_response :success
assert_equal @cc_statement.reload.parent, @instrument.cc_sequences.first
end
test "should destroy cc_statement" do
assert_difference('CcStatement.count', -1) do
delete :destroy, format: :json, params: { instrument_id: @instrument.id, id: @cc_statement.id }
end
assert_response :success
end
end
| 32.9375 | 151 | 0.674763 |
1d2d84f5eba2a61a01806bff82ea9300a3d79395
| 486 |
# Source: http://andreapavoni.com/blog/2013/4/create-recursive-openstruct-from-a-ruby-hash
require 'ostruct'
module Utils
class DeepStruct < OpenStruct
def initialize(hash=nil)
@table = {}
@hash_table = {}
if hash
hash.each do |k,v|
@table[k.to_sym] = (v.is_a?(Hash) ? self.class.new(v) : v)
@hash_table[k.to_sym] = v
new_ostruct_member(k)
end
end
end
def to_h
@hash_table
end
end
end
| 18.692308 | 90 | 0.580247 |
bfc05f7195967bfd5c04b8df0b13b6c54ab925b0
| 11,171 |
require "active_support/core_ext/string/access"
module ActiveMerchant
module Billing
class DataCashGateway < Gateway
self.default_currency = 'GBP'
self.supported_countries = ['GB']
self.supported_cardtypes = [ :visa, :master, :american_express, :discover, :diners_club, :jcb, :maestro, :switch, :solo, :laser ]
self.homepage_url = 'http://www.datacash.com/'
self.display_name = 'DataCash'
self.test_url = 'https://testserver.datacash.com/Transaction'
self.live_url = 'https://mars.transaction.datacash.com/Transaction'
AUTH_TYPE = 'auth'
CANCEL_TYPE = 'cancel'
FULFILL_TYPE = 'fulfill'
PRE_TYPE = 'pre'
REFUND_TYPE = 'refund'
TRANSACTION_REFUND_TYPE = 'txn_refund'
POLICY_ACCEPT = 'accept'
POLICY_REJECT = 'reject'
def initialize(options = {})
requires!(options, :login, :password)
super
end
def purchase(money, authorization_or_credit_card, options = {})
requires!(options, :order_id)
if authorization_or_credit_card.is_a?(String)
request = build_purchase_or_authorization_request_with_continuous_authority_reference_request(AUTH_TYPE, money, authorization_or_credit_card, options)
else
request = build_purchase_or_authorization_request_with_credit_card_request(AUTH_TYPE, money, authorization_or_credit_card, options)
end
commit(request)
end
def authorize(money, authorization_or_credit_card, options = {})
requires!(options, :order_id)
if authorization_or_credit_card.is_a?(String)
request = build_purchase_or_authorization_request_with_continuous_authority_reference_request(AUTH_TYPE, money, authorization_or_credit_card, options)
else
request = build_purchase_or_authorization_request_with_credit_card_request(PRE_TYPE, money, authorization_or_credit_card, options)
end
commit(request)
end
def capture(money, authorization, options = {})
commit(build_void_or_capture_request(FULFILL_TYPE, money, authorization, options))
end
def void(authorization, options = {})
request = build_void_or_capture_request(CANCEL_TYPE, nil, authorization, options)
commit(request)
end
def credit(money, reference_or_credit_card, options = {})
if reference_or_credit_card.is_a?(String)
ActiveMerchant.deprecated CREDIT_DEPRECATION_MESSAGE
refund(money, reference_or_credit_card)
else
request = build_credit_request(money, reference_or_credit_card, options)
commit(request)
end
end
def refund(money, reference, options = {})
commit(build_transaction_refund_request(money, reference))
end
private
def build_void_or_capture_request(type, money, authorization, options)
parsed_authorization = parse_authorization_string(authorization)
xml = Builder::XmlMarkup.new :indent => 2
xml.instruct!
xml.tag! :Request, :version => '2' do
add_authentication(xml)
xml.tag! :Transaction do
xml.tag! :HistoricTxn do
xml.tag! :reference, parsed_authorization[:reference]
xml.tag! :authcode, parsed_authorization[:auth_code]
xml.tag! :method, type
end
if money
xml.tag! :TxnDetails do
xml.tag! :merchantreference, format_reference_number(options[:order_id])
xml.tag! :amount, amount(money), :currency => options[:currency] || currency(money)
xml.tag! :capturemethod, 'ecomm'
end
end
end
end
xml.target!
end
def build_purchase_or_authorization_request_with_credit_card_request(type, money, credit_card, options)
xml = Builder::XmlMarkup.new :indent => 2
xml.instruct!
xml.tag! :Request, :version => '2' do
add_authentication(xml)
xml.tag! :Transaction do
if options[:set_up_continuous_authority]
xml.tag! :ContAuthTxn, :type => 'setup'
end
xml.tag! :CardTxn do
xml.tag! :method, type
add_credit_card(xml, credit_card, options[:billing_address])
end
xml.tag! :TxnDetails do
xml.tag! :merchantreference, format_reference_number(options[:order_id])
xml.tag! :amount, amount(money), :currency => options[:currency] || currency(money)
xml.tag! :capturemethod, 'ecomm'
end
end
end
xml.target!
end
def build_purchase_or_authorization_request_with_continuous_authority_reference_request(type, money, authorization, options)
parsed_authorization = parse_authorization_string(authorization)
raise ArgumentError, "The continuous authority reference is required for continuous authority transactions" if parsed_authorization[:ca_reference].blank?
xml = Builder::XmlMarkup.new :indent => 2
xml.instruct!
xml.tag! :Request, :version => '2' do
add_authentication(xml)
xml.tag! :Transaction do
xml.tag! :ContAuthTxn, :type => 'historic'
xml.tag! :HistoricTxn do
xml.tag! :reference, parsed_authorization[:ca_reference]
xml.tag! :method, type
end
xml.tag! :TxnDetails do
xml.tag! :merchantreference, format_reference_number(options[:order_id])
xml.tag! :amount, amount(money), :currency => options[:currency] || currency(money)
xml.tag! :capturemethod, 'cont_auth'
end
end
end
xml.target!
end
def build_transaction_refund_request(money, authorization)
parsed_authorization = parse_authorization_string(authorization)
xml = Builder::XmlMarkup.new :indent => 2
xml.instruct!
xml.tag! :Request, :version => '2' do
add_authentication(xml)
xml.tag! :Transaction do
xml.tag! :HistoricTxn do
xml.tag! :reference, parsed_authorization[:reference]
xml.tag! :method, TRANSACTION_REFUND_TYPE
end
unless money.nil?
xml.tag! :TxnDetails do
xml.tag! :amount, amount(money)
end
end
end
end
xml.target!
end
def build_credit_request(money, credit_card, options)
xml = Builder::XmlMarkup.new :indent => 2
xml.instruct!
xml.tag! :Request, :version => '2' do
add_authentication(xml)
xml.tag! :Transaction do
xml.tag! :CardTxn do
xml.tag! :method, REFUND_TYPE
add_credit_card(xml, credit_card, options[:billing_address])
end
xml.tag! :TxnDetails do
xml.tag! :merchantreference, format_reference_number(options[:order_id])
xml.tag! :amount, amount(money)
end
end
end
xml.target!
end
def add_authentication(xml)
xml.tag! :Authentication do
xml.tag! :client, @options[:login]
xml.tag! :password, @options[:password]
end
end
def add_credit_card(xml, credit_card, address)
xml.tag! :Card do
# DataCash calls the CC number 'pan'
xml.tag! :pan, credit_card.number
xml.tag! :expirydate, format_date(credit_card.month, credit_card.year)
# optional values - for Solo etc
if [ 'switch', 'solo' ].include?(card_brand(credit_card).to_s)
xml.tag! :issuenumber, credit_card.issue_number unless credit_card.issue_number.blank?
if !credit_card.start_month.blank? && !credit_card.start_year.blank?
xml.tag! :startdate, format_date(credit_card.start_month, credit_card.start_year)
end
end
xml.tag! :Cv2Avs do
xml.tag! :cv2, credit_card.verification_value if credit_card.verification_value?
if address
xml.tag! :street_address1, address[:address1] unless address[:address1].blank?
xml.tag! :street_address2, address[:address2] unless address[:address2].blank?
xml.tag! :street_address3, address[:address3] unless address[:address3].blank?
xml.tag! :street_address4, address[:address4] unless address[:address4].blank?
xml.tag! :postcode, address[:zip] unless address[:zip].blank?
end
# The ExtendedPolicy defines what to do when the passed data
# matches, or not...
#
# All of the following elements MUST be present for the
# xml to be valid (or can drop the ExtendedPolicy and use
# a predefined one
xml.tag! :ExtendedPolicy do
xml.tag! :cv2_policy,
:notprovided => POLICY_REJECT,
:notchecked => POLICY_REJECT,
:matched => POLICY_ACCEPT,
:notmatched => POLICY_REJECT,
:partialmatch => POLICY_REJECT
xml.tag! :postcode_policy,
:notprovided => POLICY_ACCEPT,
:notchecked => POLICY_ACCEPT,
:matched => POLICY_ACCEPT,
:notmatched => POLICY_REJECT,
:partialmatch => POLICY_ACCEPT
xml.tag! :address_policy,
:notprovided => POLICY_ACCEPT,
:notchecked => POLICY_ACCEPT,
:matched => POLICY_ACCEPT,
:notmatched => POLICY_REJECT,
:partialmatch => POLICY_ACCEPT
end
end
end
end
def commit(request)
response = parse(ssl_post(test? ? self.test_url : self.live_url, request))
Response.new(response[:status] == '1', response[:reason], response,
:test => test?,
:authorization => "#{response[:datacash_reference]};#{response[:authcode]};#{response[:ca_reference]}"
)
end
def format_date(month, year)
"#{format(month,:two_digits)}/#{format(year, :two_digits)}"
end
def parse(body)
response = {}
xml = REXML::Document.new(body)
root = REXML::XPath.first(xml, "//Response")
root.elements.to_a.each do |node|
parse_element(response, node)
end
response
end
def parse_element(response, node)
if node.has_elements?
node.elements.each{|e| parse_element(response, e) }
else
response[node.name.underscore.to_sym] = node.text
end
end
def format_reference_number(number)
number.to_s.gsub(/[^A-Za-z0-9]/, '').rjust(6, "0").first(30)
end
def parse_authorization_string(authorization)
reference, auth_code, ca_reference = authorization.to_s.split(';')
{:reference => reference, :auth_code => auth_code, :ca_reference => ca_reference}
end
end
end
end
| 36.387622 | 161 | 0.609883 |
5d3bdc7efdce06b79545f218eb615e39439423e7
| 292 |
#!/usr/bin/env ruby
# frozen_string_literal: false
require 'tk'
require 'tkextlib/iwidgets'
Tk::Iwidgets::Pushbutton.new(:text=>'Hello',
:command=>proc{puts 'Hello World'},
:defaultring=>true).pack(:padx=>10, :pady=>10)
Tk.mainloop
| 26.545455 | 75 | 0.578767 |
1d13e03ec17d11015b4ab5c94c1ac31acf1e935e
| 937 |
# frozen_string_literal: true
RSpec.describe Bundler::Source::Path do
before do
allow(Bundler).to receive(:root) { Pathname.new("root") }
end
describe "#eql?" do
subject { described_class.new("path" => "gems/a") }
context "with two equivalent relative paths from different roots" do
let(:a_gem_opts) { { "path" => "../gems/a", "root_path" => Bundler.root.join("nested") } }
let(:a_gem) { described_class.new a_gem_opts }
it "returns true" do
expect(subject).to eq a_gem
end
end
context "with the same (but not equivalent) relative path from different roots" do
subject { described_class.new("path" => "gems/a") }
let(:a_gem_opts) { { "path" => "gems/a", "root_path" => Bundler.root.join("nested") } }
let(:a_gem) { described_class.new a_gem_opts }
it "returns false" do
expect(subject).to_not eq a_gem
end
end
end
end
| 29.28125 | 96 | 0.624333 |
337203ddcab7fc9b23942032b3c102b0211fa238
| 766 |
#
# Author:: Bryan McLellan ([email protected])
# Copyright:: Copyright (c) 2009 Bryan McLellan
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
provides "hostname", "fqdn"
hostname from("hostname -s")
fqdn from("hostname")
| 33.304348 | 74 | 0.748042 |
215ee22102ea049b33f85bc7b903153e79079813
| 232 |
json.extract! image, :id, :caption, :position, :creator_id, :created_at, :updated_at
json.url image_url(image, format: :json)
json.content_url image_content_url(image)
json.user_roles image.user_roles unless image.user_roles.empty?
| 46.4 | 84 | 0.801724 |
39ee5ab126abe8905f8947e5767c2a28285617f7
| 10,751 |
# frozen_string_literal: true
module Specify
module Service
# A StubGenerator creates Specify::Model::CollectionObject stub records
# (mostly empty records with a minmum of information) in collection in a
# Specify::Database.
class StubGenerator < Service
# An existing Specify::Model::Accession.
attr_reader :accession
# An existing Specify::Model::Agent.
attr_reader :cataloger
# An existing Specify::Model::GeographicName.
attr_reader :collecting_geography
# An existing Specify::Model::Locality.
attr_reader :collecting_locality
# String; the name for the #record_set that will be created for the
# generated Specify::Model::CollectionObject records.
attr_accessor :dataset_name
# String; the name of the Specify::Model::Locality that will be created if
# no existing Specify::Model::Locality is passed via #collecting_data=.
attr_accessor :default_locality_name
# Integer. See Specify::Model::Preparation#count.
attr_reader :preparation_count
# An existing Specify::Model::PreparationType.
attr_reader :preparation_type
# A Specify::Model::RecordSet.
attr_reader :record_set
# An existing Specify::Model::Taxon.
attr_reader :taxon
# Returns a new StubGenerator with attributes from a YAML +file+.
#
# +file+ should have the structure:
# ---
# :stub_generator:
# :host: <hostname>
# :database: <database name>
# :collection: <collection name>
# :config: <database configuration file>
# dataset_name: <record set name>
# accession: <accession number>
# cataloger: <specify user name>
# collecting_data:
# <1st rank>: <name>
# <2nd rank>: <name>
# <3rd rank>: <name>
# :locality: <name>
# default_locality_name: <name>
# determination:
# <1st rank>: <name>
# <2nd rank>: <name>
# <3rd rank>: <name>
# preparation:
# :type: <preparation type>
# :count: <preparation count>
#
# Items prefixed with +:+ in the example above will be deserialized as
# Ruby symbols and need to be prefixed with +:+ in the file. Leave out any
# items that are not to be set. The section +:stub_generator:+ is
# required.
def self.load_yaml(file)
unwrap Psych.load_file(file)
end
# Returns a new StubGenerator with attributes from +hash+.
#
# +hash+ should have the structure
# {
# stub_generator: {
# host: <hostname>,
# database: <database name>,
# collection: <collection name>,
# config: <database configuration file>
# },
# dataset_name => <record set name>,
# accession => <accession number>,
# cataloger => <specify user name>,
# collecting_data => {
# <1st rank> => <name>,
# <2nd rank> => <name>,
# <3rd rank> => <name>,
# locality: <name>
# },
# default_locality_name => <name>,
# determination => {
# <1st rank> => <name>,
# <2nd rank> => <name>,
# <3rd rank> => <name>
# },
# preparation => {
# type: <preparation type>,
# count: <preparation count>
# }
# }
# Items that are symbols in the example above need to be symbols in the
# +hash+ passed. Leave out any items that are not to be set. The key
# +:stub_generator+ is required.
def self.unwrap(hash)
new hash.delete(:stub_generator) do |stubs|
hash.each do |key, value|
setter = (key + '=').to_sym
puts "#{setter}#{value}"
next unless value
stubs.public_send(setter, value)
end
end
end
# Returns a new StubGenerator.
def initialize(collection:,
config:,
host:,
database:,
specify_user: nil)
super
@accession = nil
@cataloger = agent
@collecting_geography = nil
@collecting_locality = nil
@default_locality_name = 'not cataloged, see label'
@dataset_name = "stub record set #{Time.now}"
@preparation_type = nil
@preparation_count = nil
@record_set = nil
@taxon = nil
yield(self) if block_given?
end
# Sets #accession to the Specify::Model::Accession with +accession_number+
def accession=(accession_number)
@accession = division.accessions_dataset
.first AccessionNumber: accession_number
raise ACCESSION_NOT_FOUND_ERROR + accession_number unless accession
end
# Sets #cataloger to the Specify::Model::Agent representing the
# Specify::Model::User with +user_name+ in #division.
def cataloger=(user_name)
cataloger_user = Model::User.first(Name: user_name)
raise USER_NOT_FOUND_ERROR + user_name unless cataloger_user
@cataloger = cataloger_user.agents_dataset.first division: division
end
# Sets #collecting_geography and #collecting_locality.
#
# +vals+ is a Hash with the structure <tt>{ 'rank' => 'name',
# :locality => 'name' }</tt> where +rank+ is an existing
# Specify::Model::AdministrativeDivision#name, +name+ an existing
# Specify::Model::GeographicName#name with that rank. +:locality+ is not a
# geographic rank and must be given as a symbol. When traversing a tree
# hierarchy, give key value paris in descencing order of rank:
# { 'Country' => 'United States',
# 'State' => 'Kansas',
# 'County' => 'Douglas County',
# :locality => 'Freestate Brewery' }
def collecting_data=(vals)
locality = vals.delete :locality
unless vals.empty?
@collecting_geography = geography.search_tree(vals)
unless @collecting_geography
missing_geo = vals.values.join(', ')
raise GEOGRAPHY_NOT_FOUND_ERROR + missing_geo
end
end
return unless locality
@collecting_locality = find_locality locality
raise LOCALITY_NOT_FOUND_ERROR + locality unless collecting_locality
end
# Returns #collecting_locality or #default_locality if
# #collecting_locality is +nil+ but #collecting_geography is not;
# Will create the Specify::Model::GeographicName for #default_locality
# if it does not exist in #localities.
def collecting_locality!
return collecting_locality if collecting_locality
return unless collecting_geography
default_locality!
end
# Creates +count+ records for Specify::Model::CollectionObject with the
# attributes of +self+.
def create(count)
@record_set = collection.add_record_set Name: dataset_name,
user: cataloger.user
count.times do
stub = create_stub
@record_set.add_record_set_item collection_object: stub
end
end
# Returns the Specify::Model::GeographicName for #default locality if it
# exists.
def default_locality
find_locality default_locality_name
end
# Returns the Specify::Model::GeographicName for #default locality.
# Creates the record if it does not exist in #localities.
def default_locality!
return default_locality if default_locality
default_locality ||
discipline.add_locality(LocalityName: default_locality_name,
geographic_name: collecting_geography)
end
# Sets #taxon, to which stub records will be determined.
# +vals+ is a Hash with the structure <tt>{ 'rank' => 'name' }</tt> where
# +rank+ is an existing Specify::Model::Rank#name, +name+ an existing
# Specify::Model::Taxon#name with that rank. When traversing a tree
# hierarchy, give key value paris in descencing order of rank:
# { 'Phylum' => 'Arthropoda',
# 'Class' => 'Trilobita',
# 'Order' => 'Asaphida',
# 'Family' => 'Asaphidae' }
def determination=(vals)
@taxon = taxonomy.search_tree vals
raise TAXON_NOT_FOUND_ERROR + vals.to_s unless taxon
end
# Returns the Specify::Model::Locality for +locality_name+ in #localities.
def find_locality(locality_name)
locality_matches = localities.where LocalityName: locality_name
raise Model::AMBIGUOUS_MATCH_ERROR if locality_matches.count > 1
locality_matches.first
end
# Returns the Specify::Model::CollectionObject records in #record_set
# (the records created by #create).
def generated
record_set&.collection_objects
end
# Returns the Specify::Model::Geography for #discipline.
def geography
discipline.geography
end
# Returns a Sequel::Dataset of Specify::Model::Locality records in
# #collecting_geography or #division if #collecting_geography is +nil+.
def localities
@collecting_geography&.localities_dataset ||
discipline.localities_dataset
end
# Sets #preparation_type and #preparation_count. +type+ must be an
# existing Specify::Model::PreparationType#name. +count+ should be an
# Integer.
#
# Returns an array with the #preparation_type and #preparation_count.
def preparation=(type:, count: nil)
@preparation_type = collection.preparation_types_dataset
.first Name: type
raise PREPTYPE_NOT_FOUND_ERROR + type unless preparation_type
@preparation_count = count
[preparation_type, preparation_count].compact
end
# Returns the Specify::Model::Taxonomy for #discipline.
def taxonomy
discipline.taxonomy
end
private
def create_stub
co = collection.add_collection_object(cataloger: cataloger)
co.accession = accession
co.geo_locate(locality: collecting_locality!) if collecting_locality!
co.identify(taxon: taxon) if taxon
make_preparation(co) if preparation_type
co.save
end
def make_preparation(collection_object)
collection_object.add_preparation collection: collection,
preparation_type: preparation_type,
CountAmt: preparation_count
end
end
end
end
| 36.818493 | 80 | 0.611478 |
1a6048ff507d5333dc0aa8c3f33403f6d9621aa0
| 633 |
module ActiveRecord
class Base
# EOL: the change here is to send along the preload options
def self.preload_associations(instances, associations, preload_options = {})
new_options = {}
if preload_options && preload_options[:select]
new_options[:select] = preload_options[:select].dup
end
ActiveRecord::Associations::Preloader.new(instances, associations, new_options).run
end
# EOL: giving the function to instances too
def preload_associations(associations, preload_options = {})
self.class.preload_associations(self, associations, preload_options)
end
end
end
| 35.166667 | 89 | 0.720379 |
e8565cdacad46ebf077ac92749b73e943628d503
| 261 |
class CreateCategories < ActiveRecord::Migration[6.0]
def change
create_table :categories do |t|
t.text :name, default: "Custom Spread"
t.text :question_1
t.text :question_2
t.text :question_3
t.timestamps
end
end
end
| 20.076923 | 53 | 0.655172 |
7978c0b0c84961d75de0b8e49baf8e9621ef5091
| 1,771 |
cask '[email protected]' do
version '5.5.0f3,38b4efef76f0'
sha256 'd79c39acd653d35c2e34df4636ccbd957e8f1ca0a137309d9cbd07227d8c447d'
url "http://download.unity3d.com/download_unity/#{version.after_comma}/MacEditorTargetInstaller/UnitySetup-iOS-Support-for-Editor-#{version.before_comma}.pkg"
name 'Unity iOS Build Support'
homepage 'https://unity3d.com/unity/'
depends_on cask: '[email protected]'
pkg "UnitySetup-iOS-Support-for-Editor-#{version.before_comma}.pkg"
preflight do
if File.exist? '/Applications/Unity'
FileUtils.move '/Applications/Unity', '/Applications/Unity.temp'
end
if File.exist? "/Applications/Unity-#{@cask.version.before_comma}"
FileUtils.move "/Applications/Unity-#{@cask.version.before_comma}", '/Applications/Unity'
end
end
postflight do
if File.exist? '/Applications/Unity'
FileUtils.move '/Applications/Unity', "/Applications/Unity-#{@cask.version.before_comma}"
end
if File.exist? '/Applications/Unity.temp'
FileUtils.move '/Applications/Unity.temp', '/Applications/Unity'
end
end
uninstall_preflight do
if File.exist? '/Applications/Unity'
FileUtils.move '/Applications/Unity', '/Applications/Unity.temp'
end
if File.exist? "/Applications/Unity-#{@cask.version.before_comma}"
FileUtils.move "/Applications/Unity-#{@cask.version.before_comma}", '/Applications/Unity'
end
end
uninstall_postflight do
if File.exist? '/Applications/Unity'
FileUtils.move '/Applications/Unity', "/Applications/Unity-#{@cask.version.before_comma}"
end
if File.exist? '/Applications/Unity.temp'
FileUtils.move '/Applications/Unity.temp', '/Applications/Unity'
end
end
uninstall pkgutil: ''
end
| 32.2 | 160 | 0.719368 |
629383284c8ddfb1d2aa8aade17ad06f87979f6b
| 2,043 |
class User < ActiveRecord::Base
# Include default devise modules. Others available are:
# :confirmable, :lockable, :timeoutable and
devise :database_authenticatable, :registerable,
:recoverable, :rememberable, :trackable, :validatable
devise :omniauthable, :omniauth_providers => [:facebook]
has_many :projects
has_many :comments
has_many :notes
has_many :tags
has_many :user_projects, foreign_key: "collaborator_id"
has_many :projects, foreign_key: "owner_id"
has_many :collaboration_projects, through: :user_projects
has_many :assigned_tasks, through: :user_tasks
has_many :user_tasks, foreign_key: "assigned_user_id"
has_many :tasks, foreign_key: "owner_id"
enum role: [:user, :admin]
has_attached_file :avatar, default_url: ':style/default.png', styles: { thumb: "100x100>", icon: "30X30>" }
validates_attachment_content_type :avatar, content_type: /\Aimage\/.*\Z/
def self.from_omniauth(auth)
where(provider: auth.provider, uid: auth.uid).first_or_create do |user|
user.email = auth.info.email
user.name = auth.info.name
user.password = Devise.friendly_token[0,20]
end
end
def all_tasks
(self.tasks + self.assigned_tasks).uniq
end
def all_projects
(self.projects + self.collaboration_projects).uniq
end
def all_active_projects
self.projects.active + self.collaboration_projects
end
def active_projects
self.projects.active
end
def complete_projects
self.projects.complete
end
def overdue_projects
self.projects.overdue
end
def active_tasks
self.tasks.active
end
def complete_tasks
self.tasks.complete
end
def overdue_tasks
self.tasks.overdue
end
def active_assigned_tasks
self.assigned_tasks.active
end
def complete_assigned_tasks
self.assigned_tasks.complete
end
def overdue_assigned_tasks
self.assigned_tasks.overdue
end
def collaboration_tags
self.collaboration_projects.collect {|task| task.tags.collect {|tag| tag}}
end
end
| 23.215909 | 109 | 0.733235 |
d52b1e321991d7f222c3686205cdcbd839556f03
| 3,216 |
#!/usr/bin/ruby
# encoding: utf-8
#
# $File: create-moodle-list.rb
#
# $Description:
# A partir de un fichero CSV pasado por parámetro, con los datos
# de los alumnos de informática (según lo genera el programa PinceEkade),
# este script crea unos ficheros TXT por cada grupo para su carga en
# Moodle.
# Además cada fichero TXT servirá para que cada tutor comunique dichos
# datos a sus alumnos de grupo.
=begin
Formato de entrada:
grupo
clave(dni)
nombre
apellido1
apellido2
email
Formato de salida:
username, password, firstname, lastname, email, city, country
juanb, secreto, Juan, Benítez, [email protected], DAW, Spain
=end
class ListPeople
def initialize
@debug=false
@verbose=true
@outputfilename='usuarios'
@output={}
@change=[ ['á','a'], ['é','e'], ['í','i'], ['ó','o'], ['ú','u'], ['Á','a'] , ['É','e'], ['Í','i'], ['Ó','o'], ['Ú','u'], ['Ñ','n'], ['ñ','n'], [' ',''] ]
end
def create_list_for(pArg)
if pArg=='--help' then
show_help
else
@filename=pArg
process
end
end
def process
verbose "\n[INFO] Processing..."
if !File.exists? @filename then
puts "[ERROR] <#{@filename}> dosn't exist!\n"
raise "[ERROR] <#{@filename}> dosn't exist!\n"
end
@file=File.open(@filename,'r')
@[email protected]
@data.each do |line|
items=line.split(",")
#items=line.force_encoding("iso-8859-1").split(",")
grupo=items[0].downcase
dni=items[1]
nombre=items[2]
apellido1=items[3]
apellido2=items[4]
apellidos=(apellido1+' '+apellido2).strip
email=items[5]
#username
u=nombre[0..2]+apellido1.gsub(' ','')[0..2]
u=u+(apellido2.gsub(' ','')[0..2]||apellido1.gsub(' ','')[0..2])
username=u.downcase
sanitize!(username)
#@change.each { |i| username.gsub!(i[0],i[1]) }
email.gsub!("\n",'')
if email.size<2
email="#{nombre}.#{apellido1}#{apellido2}@iespuertodelacruz.es"
sanitize!(email.downcase!)
end
dni="123456" if dni.size<2
if @output[grupo.to_sym].nil? then
f=File.open("#{@outputfilename}_#{grupo}.txt",'w')
@output[grupo.to_sym]=f
#f.write("username;password;firstname;lastname;email;city;country\n")
f.write("username;password;firstname;lastname;email;city\n")
end
#username, password, firstname, lastname, email, city, country
verbose("#{username};#{dni};#{nombre};#{apellidos};#{email};#{grupo}")
@output[grupo.to_sym].write("#{username};#{dni};#{nombre};#{apellidos};#{email};#{grupo}\n")
end
@file.close
@output.each_value { |i| i.close }
end
private
def execute_command(lsCommand)
verbose "(*) Executing: #{lsCommand}"
system(lsCommand) if !@debug
end
def sanitize!(text)
@change.each { |i| text.gsub!(i[0],i[1]) }
text
end
def show_help
puts "Uso:\n"
puts " #{$0} FICHERO.csv"
puts "\nFormato de entrada:"
puts " grupo, clave(dni), nombre, apellido1, apellido2, email"
puts "\nFormato de salida:"
puts " username; password; firstname; lastname; email; city"
puts " manrodper; clave; Manuel; Rodríguez Pérez; [email protected]; DAW"
end
def verbose(lsText)
if @verbose then
puts lsText
end
end
end
i = ListPeople.new
i.create_list_for (ARGV.first||'--help')
| 24.549618 | 155 | 0.641169 |
62ad3d6f8f9f1a9826d1030ef6386f452049986a
| 75 |
# frozen_string_literal: true
module Elasticfusion
VERSION = '1.0.0'
end
| 15 | 29 | 0.76 |
08d1ab41bc5c7ade99250f8d32a01eb282a96764
| 1,467 |
# This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# This file is the source Rails uses to define your schema when running `rails
# db:schema:load`. When creating a new database, `rails db:schema:load` tends to
# be faster and is potentially less error prone than running all of your
# migrations from scratch. Old migrations may fail to apply correctly if those
# migrations use external dependencies or application code.
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 2020_12_26_074127) do
# These are extensions that must be enabled in order to support this database
enable_extension "plpgsql"
create_table "users", force: :cascade do |t|
t.string "first_name"
t.string "last_name"
t.string "email"
t.string "phone"
t.boolean "admin", default: false
t.boolean "manager", default: false
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.string "encrypted_password", limit: 128
t.string "confirmation_token", limit: 128
t.string "remember_token", limit: 128
t.index ["email"], name: "index_users_on_email"
t.index ["remember_token"], name: "index_users_on_remember_token"
end
end
| 41.914286 | 86 | 0.748466 |
ff7eebd33794f829d8272c9fbc38c6742ef6599b
| 1,961 |
# frozen_string_literal: true
require "fileutils"
require "jekyll"
module Jekyll
module EbookReader
# Generates static files
class EbookEmbed < Jekyll::Generator
safe false
priority :low
def generate(site)
@site = site
@site = site.static_files += add_all_static_files
end
private
# Destination for sitemap.xml file within the site source directory
def destination_path(file)
@site.in_dest_dir("/assets/jekyll-ebookreader/#{file}")
end
def static_files_to_include
["reader.js", "ebookreader_main.css", "ebookreader_iframe.css", "ajax-loader.gif"]
end
def add_all_static_files
files ||= []
static_files_to_include.each do |file_name|
incl = Jekyll::StaticFile.new(@site, File.join(File.dirname(__FILE__), "ebook_reader"),
"assets/jekyll-ebookreader-assets", file_name)
incl.destination(destination_path(file_name))
files.append(incl)
end
files
end
end
end
end
# Adds relevant scripts and html tags to html body
class EBookReaderTag < Liquid::Tag
def initialize(tag_name, input, tokens)
super
@input = input
end
def render(context)
path = File.join(File.dirname(__FILE__), "ebook_reader",
"assets/jekyll-ebookreader-assets/ebook_include_body_tag.html")
include_string = File.read(path)
Liquid::Template.parse(include_string).render(context)
end
end
# Add relevant scripts and style tags to doc head
Jekyll::Hooks.register [:pages, :posts], :post_render do |post|
path = File.join(File.dirname(__FILE__), "ebook_reader", "assets/jekyll-ebookreader-assets/ebook_include_header.html")
include_string = File.read(path)
post.output = post.output.gsub("</head>", "#{include_string}</head>") if post.data.key?("ebook_path")
end
Liquid::Template.register_tag("ebook", EBookReaderTag)
| 30.169231 | 120 | 0.672616 |
917e43bf1820e71ead98d8d746e5706151f8c596
| 3,541 |
# frozen_string_literal: true
require "spec_helper"
describe GraphQL::Schema::Member::AcceptsDefinition do
class AcceptsDefinitionSchema < GraphQL::Schema
accepts_definition :set_metadata
set_metadata :a, 999
class BaseField < GraphQL::Schema::Field
class BaseField < GraphQL::Schema::Argument
accepts_definition :metadata
end
argument_class BaseField
accepts_definition :metadata
end
GraphQL::Schema::Object.accepts_definition(:metadata2)
class BaseObject < GraphQL::Schema::Object
field_class BaseField
accepts_definition :metadata
end
GraphQL::Schema::Interface.accepts_definition(:metadata2)
module BaseInterface
include GraphQL::Schema::Interface
field_class BaseField
accepts_definition :metadata
end
class Option < GraphQL::Schema::Enum
class EnumValue < GraphQL::Schema::EnumValue
accepts_definition :metadata
end
enum_value_class EnumValue
accepts_definition :metadata
metadata :a, 123
value "A", metadata: [:a, 456]
value "B"
end
module Thing
include BaseInterface
metadata :z, 888
metadata2 :a, :bc
end
module Thing2
include Thing
end
class SomeObject < BaseObject
metadata :a, :aaa
end
class SomeObject2 < SomeObject
end
class Query < BaseObject
metadata :a, :abc
metadata2 :xyz, :zyx
field :option, Option, null: false do
metadata :a, :def
argument :value, Integer, required: true, metadata: [:a, :ghi]
end
field :thing, Thing, null: false
field :thing2, Thing2, null: false
field :some_object, SomeObject, null: false
field :some_object2, SomeObject2, null: false
end
query(Query)
end
it "passes along configs for types" do
assert_equal [:a, 123], AcceptsDefinitionSchema::Option.metadata
assert_equal 123, AcceptsDefinitionSchema::Option.graphql_definition.metadata[:a]
assert_equal [:a, :abc], AcceptsDefinitionSchema::Query.metadata
assert_equal :abc, AcceptsDefinitionSchema::Query.graphql_definition.metadata[:a]
assert_equal :zyx, AcceptsDefinitionSchema::Query.graphql_definition.metadata[:xyz]
assert_equal [:z, 888], AcceptsDefinitionSchema::Thing.metadata
assert_equal 888, AcceptsDefinitionSchema::Thing.graphql_definition.metadata[:z]
assert_equal :bc, AcceptsDefinitionSchema::Thing.graphql_definition.metadata[:a]
# Interface inheritance
assert_equal [:z, 888], AcceptsDefinitionSchema::Thing2.metadata
assert_equal 888, AcceptsDefinitionSchema::Thing2.graphql_definition.metadata[:z]
assert_equal :bc, AcceptsDefinitionSchema::Thing2.graphql_definition.metadata[:a]
# Object inheritance
assert_equal :aaa, AcceptsDefinitionSchema::SomeObject.graphql_definition.metadata[:a]
assert_equal :aaa, AcceptsDefinitionSchema::SomeObject2.graphql_definition.metadata[:a]
end
it "passes along configs for fields and arguments" do
assert_equal :def, AcceptsDefinitionSchema.find("Query.option").metadata[:a]
assert_equal :ghi, AcceptsDefinitionSchema.find("Query.option.value").metadata[:a]
end
it "passes along configs for enum values" do
assert_equal 456, AcceptsDefinitionSchema.find("Option.A").metadata[:a]
assert_nil AcceptsDefinitionSchema.find("Option.B").metadata[:a]
end
it "passes along configs for schemas" do
assert_equal 999, AcceptsDefinitionSchema.graphql_definition.metadata[:a]
end
end
| 31.616071 | 91 | 0.722677 |
913386edd0134b188ca12a6022623161959edf3f
| 3,526 |
require 'tooling'
# This file was generated by the `rspec --init` command. Conventionally, all
# specs live under a `spec` directory, which RSpec adds to the `$LOAD_PATH`.
# The generated `.rspec` file contains `--require spec_helper` which will cause this
# file to always be loaded, without a need to explicitly require it in any files.
#
# Given that it is always loaded, you are encouraged to keep this file as
# light-weight as possible. Requiring heavyweight dependencies from this file
# will add to the boot time of your test suite on EVERY test run, even for an
# individual file that may not need all of that loaded. Instead, make a
# separate helper file that requires this one and then use it only in the specs
# that actually need it.
#
# The `.rspec` file also contains a few flags that are not defaults but that
# users commonly want.
#
# See http://rubydoc.info/gems/rspec-core/RSpec/Core/Configuration
RSpec.configure do |config|
# The settings below are suggested to provide a good initial experience
# with RSpec, but feel free to customize to your heart's content.
=begin
# These two settings work together to allow you to limit a spec run
# to individual examples or groups you care about by tagging them with
# `:focus` metadata. When nothing is tagged with `:focus`, all examples
# get run.
config.filter_run :focus
config.run_all_when_everything_filtered = true
# Many RSpec users commonly either run the entire suite or an individual
# file, and it's useful to allow more verbose output when running an
# individual spec file.
if config.files_to_run.one?
# Use the documentation formatter for detailed output,
# unless a formatter has already been configured
# (e.g. via a command-line flag).
config.default_formatter = 'doc'
end
# Print the 10 slowest examples and example groups at the
# end of the spec run, to help surface which specs are running
# particularly slow.
config.profile_examples = 10
# Run specs in random order to surface order dependencies. If you find an
# order dependency and want to debug it, you can fix the order by providing
# the seed, which is printed after each run.
# --seed 1234
config.order = :random
# Seed global randomization in this process using the `--seed` CLI option.
# Setting this allows you to use `--seed` to deterministically reproduce
# test failures related to randomization by passing the same `--seed` value
# as the one that triggered the failure.
Kernel.srand config.seed
# rspec-expectations config goes here. You can use an alternate
# assertion/expectation library such as wrong or the stdlib/minitest
# assertions if you prefer.
config.expect_with :rspec do |expectations|
# Enable only the newer, non-monkey-patching expect syntax.
# For more details, see:
# - http://myronmars.to/n/dev-blog/2012/06/rspecs-new-expectation-syntax
expectations.syntax = :expect
end
# rspec-mocks config goes here. You can use an alternate test double
# library (such as bogus or mocha) by changing the `mock_with` option here.
config.mock_with :rspec do |mocks|
# Enable only the newer, non-monkey-patching expect syntax.
# For more details, see:
# - http://teaisaweso.me/blog/2013/05/27/rspecs-new-message-expectation-syntax/
mocks.syntax = :expect
# Prevents you from mocking or stubbing a method that does not exist on
# a real object. This is generally recommended.
mocks.verify_partial_doubles = true
end
=end
end
| 43.530864 | 85 | 0.74135 |
1d9476f6afa8513033c9479d17ea3c702c8e69a5
| 1,777 |
Rails.application.routes.draw do
get 'user/profile'
resources :events
resources :user, only: [:edit] do
collection do
patch 'update_profile'
end
end
devise_for :users
get 'home/index'
# The priority is based upon order of creation: first created -> highest priority.
# See how all your routes lay out with "rake routes".
# You can have the root of your site routed with "root"
root 'home#index'
# Example of regular route:
# get 'products/:id' => 'catalog#view'
# Example of named route that can be invoked with purchase_url(id: product.id)
# get 'products/:id/purchase' => 'catalog#purchase', as: :purchase
# Example resource route (maps HTTP verbs to controller actions automatically):
# resources :products
# Example resource route with options:
# resources :products do
# member do
# get 'short'
# post 'toggle'
# end
#
# collection do
# get 'sold'
# end
# end
# Example resource route with sub-resources:
# resources :products do
# resources :comments, :sales
# resource :seller
# end
# Example resource route with more complex sub-resources:
# resources :products do
# resources :comments
# resources :sales do
# get 'recent', on: :collection
# end
# end
# Example resource route with concerns:
# concern :toggleable do
# post 'toggle'
# end
# resources :posts, concerns: :toggleable
# resources :photos, concerns: :toggleable
# Example resource route within a namespace:
# namespace :admin do
# # Directs /admin/products/* to Admin::ProductsController
# # (app/controllers/admin/products_controller.rb)
# resources :products
# end
end
| 25.385714 | 84 | 0.646595 |
38fe4e4c7b8b40cfb701a4ad73ac5b0d22cb9127
| 270 |
# 駅乗降人員数を表すオブジェクトへのリンクの配列
class TokyoMetro::Api::Station::Info::LinkToPassengerSurvey::List < TokyoMetro::Api::Station::Info::Common::List
def self.factory_for_seeding_instance_of_this_class
factory_for_seeding_link_to_passenger_surveys_of_each_station
end
end
| 33.75 | 112 | 0.840741 |
f8d0d915e8761f7ae42e39bd8fc63484015cda4a
| 1,324 |
#!/usr/bin/env ruby
require "rexml/document"
gir_path = "/usr/share/gir-1.0"
def print_help
puts %q(
usage:
overview_of_gir_file.rb pattern [type] [expand]
pattern is for example Atk for the file Atk-1.0.gir
)
end
if ARGV.size < 1 && ARGV.size > 3
STDERR.puts "Bad number of arguments"
print_help
exit 1
end
expand = false
type = nil
expand = ARGV.include?("expand")
ARGV[1..2].each do |arg|
type = arg unless arg == "expand"
end
file_name = ARGV[0]
girs_files = Dir.glob("#{gir_path}/#{file_name}-?.*gir")
gir_file = girs_files.last
if gir_file.nil?
STDERR.puts "#{ARGV[0]} does not match any gir files"
print_help
exit 1
end
file = File.new(gir_file)
gir_document = REXML::Document.new file
element_names = {}
gir_document.elements.each("repository/namespace/*") do |element|
attr_name = element.attributes["name"]
if element_names[element.name].class == Array
element_names[element.name] << attr_name
else
element_names[element.name] = [attr_name]
end
end
def display_information(name, elements, expand)
puts name
if expand
elements.each do |e|
puts "\t* #{e}"
end
end
end
element_names.each do |name, elements|
if type
display_information(name, elements, expand) if name == type
else
display_information(name, elements, expand)
end
end
| 18.388889 | 65 | 0.69864 |
213723c2dcbe1b9647e5d12074bfadfb6e3bf9fd
| 2,728 |
# frozen_string_literal: true
require 'spec_helper'
describe Sha256Attribute do
let(:model) { Class.new { include Sha256Attribute } }
before do
columns = [
double(:column, name: 'name', type: :text),
double(:column, name: 'sha256', type: :binary)
]
allow(model).to receive(:columns).and_return(columns)
end
describe '#sha_attribute' do
context 'when in non-production' do
before do
stub_rails_env('development')
end
context 'when the table exists' do
before do
allow(model).to receive(:table_exists?).and_return(true)
end
it 'defines a SHA attribute for a binary column' do
expect(model).to receive(:attribute)
.with(:sha256, an_instance_of(Gitlab::Database::Sha256Attribute))
model.sha256_attribute(:sha256)
end
it 'raises ArgumentError when the column type is not :binary' do
expect { model.sha256_attribute(:name) }.to raise_error(ArgumentError)
end
end
context 'when the table does not exist' do
it 'allows the attribute to be added and issues a warning' do
allow(model).to receive(:table_exists?).and_return(false)
expect(model).not_to receive(:columns)
expect(model).to receive(:attribute)
expect(model).to receive(:warn)
model.sha256_attribute(:name)
end
end
context 'when the column does not exist' do
it 'allows the attribute to be added and issues a warning' do
allow(model).to receive(:table_exists?).and_return(true)
expect(model).to receive(:columns)
expect(model).to receive(:attribute)
expect(model).to receive(:warn)
model.sha256_attribute(:no_name)
end
end
context 'when other execeptions are raised' do
it 'logs and re-rasises the error' do
allow(model).to receive(:table_exists?).and_raise(ActiveRecord::NoDatabaseError.new('does not exist'))
expect(model).not_to receive(:columns)
expect(model).not_to receive(:attribute)
expect(Gitlab::AppLogger).to receive(:error)
expect { model.sha256_attribute(:name) }.to raise_error(ActiveRecord::NoDatabaseError)
end
end
end
context 'when in production' do
before do
stub_rails_env('production')
end
it 'defines a SHA attribute' do
expect(model).not_to receive(:table_exists?)
expect(model).not_to receive(:columns)
expect(model).to receive(:attribute).with(:sha256, an_instance_of(Gitlab::Database::Sha256Attribute))
model.sha256_attribute(:sha256)
end
end
end
end
| 29.652174 | 112 | 0.639663 |
6a6f8b4f4d51dbf5f3aa5c61c1772e830ca02c25
| 9,954 |
require 'spec_helper'
feature 'File blob', :js, feature: true do
let(:project) { create(:project, :public) }
def visit_blob(path, fragment = nil)
visit namespace_project_blob_path(project.namespace, project, File.join('master', path), anchor: fragment)
end
context 'Ruby file' do
before do
visit_blob('files/ruby/popen.rb')
wait_for_ajax
end
it 'displays the blob' do
aggregate_failures do
# shows highlighted Ruby code
expect(page).to have_content("require 'fileutils'")
# does not show a viewer switcher
expect(page).not_to have_selector('.js-blob-viewer-switcher')
# shows an enabled copy button
expect(page).to have_selector('.js-copy-blob-source-btn:not(.disabled)')
# shows a raw button
expect(page).to have_link('Open raw')
end
end
end
context 'Markdown file' do
context 'visiting directly' do
before do
visit_blob('files/markdown/ruby-style-guide.md')
wait_for_ajax
end
it 'displays the blob using the rich viewer' do
aggregate_failures do
# hides the simple viewer
expect(page).to have_selector('.blob-viewer[data-type="simple"]', visible: false)
expect(page).to have_selector('.blob-viewer[data-type="rich"]')
# shows rendered Markdown
expect(page).to have_link("PEP-8")
# shows a viewer switcher
expect(page).to have_selector('.js-blob-viewer-switcher')
# shows a disabled copy button
expect(page).to have_selector('.js-copy-blob-source-btn.disabled')
# shows a raw button
expect(page).to have_link('Open raw')
end
end
context 'switching to the simple viewer' do
before do
find('.js-blob-viewer-switch-btn[data-viewer=simple]').click
wait_for_ajax
end
it 'displays the blob using the simple viewer' do
aggregate_failures do
# hides the rich viewer
expect(page).to have_selector('.blob-viewer[data-type="simple"]')
expect(page).to have_selector('.blob-viewer[data-type="rich"]', visible: false)
# shows highlighted Markdown code
expect(page).to have_content("[PEP-8](http://www.python.org/dev/peps/pep-0008/)")
# shows an enabled copy button
expect(page).to have_selector('.js-copy-blob-source-btn:not(.disabled)')
end
end
context 'switching to the rich viewer again' do
before do
find('.js-blob-viewer-switch-btn[data-viewer=rich]').click
wait_for_ajax
end
it 'displays the blob using the rich viewer' do
aggregate_failures do
# hides the simple viewer
expect(page).to have_selector('.blob-viewer[data-type="simple"]', visible: false)
expect(page).to have_selector('.blob-viewer[data-type="rich"]')
# shows an enabled copy button
expect(page).to have_selector('.js-copy-blob-source-btn:not(.disabled)')
end
end
end
end
end
context 'visiting with a line number anchor' do
before do
visit_blob('files/markdown/ruby-style-guide.md', 'L1')
wait_for_ajax
end
it 'displays the blob using the simple viewer' do
aggregate_failures do
# hides the rich viewer
expect(page).to have_selector('.blob-viewer[data-type="simple"]')
expect(page).to have_selector('.blob-viewer[data-type="rich"]', visible: false)
# highlights the line in question
expect(page).to have_selector('#LC1.hll')
# shows highlighted Markdown code
expect(page).to have_content("[PEP-8](http://www.python.org/dev/peps/pep-0008/)")
# shows an enabled copy button
expect(page).to have_selector('.js-copy-blob-source-btn:not(.disabled)')
end
end
end
end
context 'Markdown file (stored in LFS)' do
before do
project.add_master(project.creator)
Files::CreateService.new(
project,
project.creator,
start_branch: 'master',
branch_name: 'master',
commit_message: "Add Markdown in LFS",
file_path: 'files/lfs/file.md',
file_content: project.repository.blob_at('master', 'files/lfs/lfs_object.iso').data
).execute
end
context 'when LFS is enabled on the project' do
before do
allow(Gitlab.config.lfs).to receive(:enabled).and_return(true)
project.update_attribute(:lfs_enabled, true)
visit_blob('files/lfs/file.md')
wait_for_ajax
end
it 'displays an error' do
aggregate_failures do
# hides the simple viewer
expect(page).to have_selector('.blob-viewer[data-type="simple"]', visible: false)
expect(page).to have_selector('.blob-viewer[data-type="rich"]')
# shows an error message
expect(page).to have_content('The rendered file could not be displayed because it is stored in LFS. You can view the source or download it instead.')
# shows a viewer switcher
expect(page).to have_selector('.js-blob-viewer-switcher')
# does not show a copy button
expect(page).not_to have_selector('.js-copy-blob-source-btn')
# shows a raw button
expect(page).to have_link('Open raw')
end
end
context 'switching to the simple viewer' do
before do
find('.js-blob-viewer-switcher .js-blob-viewer-switch-btn[data-viewer=simple]').click
wait_for_ajax
end
it 'displays an error' do
aggregate_failures do
# hides the rich viewer
expect(page).to have_selector('.blob-viewer[data-type="simple"]')
expect(page).to have_selector('.blob-viewer[data-type="rich"]', visible: false)
# shows an error message
expect(page).to have_content('The source could not be displayed because it is stored in LFS. You can download it instead.')
# does not show a copy button
expect(page).not_to have_selector('.js-copy-blob-source-btn')
end
end
end
end
context 'when LFS is disabled on the project' do
before do
visit_blob('files/lfs/file.md')
wait_for_ajax
end
it 'displays the blob' do
aggregate_failures do
# shows text
expect(page).to have_content('size 1575078')
# does not show a viewer switcher
expect(page).not_to have_selector('.js-blob-viewer-switcher')
# shows an enabled copy button
expect(page).to have_selector('.js-copy-blob-source-btn:not(.disabled)')
# shows a raw button
expect(page).to have_link('Open raw')
end
end
end
end
context 'PDF file' do
before do
project.add_master(project.creator)
Files::CreateService.new(
project,
project.creator,
start_branch: 'master',
branch_name: 'master',
commit_message: "Add PDF",
file_path: 'files/test.pdf',
file_content: File.read(Rails.root.join('spec/javascripts/blob/pdf/test.pdf'))
).execute
visit_blob('files/test.pdf')
wait_for_ajax
end
it 'displays the blob' do
aggregate_failures do
# shows rendered PDF
expect(page).to have_selector('.js-pdf-viewer')
# does not show a viewer switcher
expect(page).not_to have_selector('.js-blob-viewer-switcher')
# does not show a copy button
expect(page).not_to have_selector('.js-copy-blob-source-btn')
# shows a download button
expect(page).to have_link('Download')
end
end
end
context 'ISO file (stored in LFS)' do
context 'when LFS is enabled on the project' do
before do
allow(Gitlab.config.lfs).to receive(:enabled).and_return(true)
project.update_attribute(:lfs_enabled, true)
visit_blob('files/lfs/lfs_object.iso')
wait_for_ajax
end
it 'displays the blob' do
aggregate_failures do
# shows a download link
expect(page).to have_link('Download (1.5 MB)')
# does not show a viewer switcher
expect(page).not_to have_selector('.js-blob-viewer-switcher')
# does not show a copy button
expect(page).not_to have_selector('.js-copy-blob-source-btn')
# shows a download button
expect(page).to have_link('Download')
end
end
end
context 'when LFS is disabled on the project' do
before do
visit_blob('files/lfs/lfs_object.iso')
wait_for_ajax
end
it 'displays the blob' do
aggregate_failures do
# shows text
expect(page).to have_content('size 1575078')
# does not show a viewer switcher
expect(page).not_to have_selector('.js-blob-viewer-switcher')
# shows an enabled copy button
expect(page).to have_selector('.js-copy-blob-source-btn:not(.disabled)')
# shows a raw button
expect(page).to have_link('Open raw')
end
end
end
end
context 'ZIP file' do
before do
visit_blob('Gemfile.zip')
wait_for_ajax
end
it 'displays the blob' do
aggregate_failures do
# shows a download link
expect(page).to have_link('Download (2.11 KB)')
# does not show a viewer switcher
expect(page).not_to have_selector('.js-blob-viewer-switcher')
# does not show a copy button
expect(page).not_to have_selector('.js-copy-blob-source-btn')
# shows a download button
expect(page).to have_link('Download')
end
end
end
end
| 29.625 | 159 | 0.611915 |
1ad4a38e0b94dff395154d0d759f1e79721658f8
| 45 |
module Hutch
VERSION = '0.6.0'.freeze
end
| 9 | 26 | 0.666667 |
61352f0fa533f33509e536bdc43356ad2edf563e
| 256 |
module TD::Types
# File generation is no longer needed.
#
# @attr generation_id [Integer] Unique identifier for the generation process.
class Update::FileGenerationStop < Update
attribute :generation_id, TD::Types::Coercible::Integer
end
end
| 28.444444 | 79 | 0.75 |
33618d2e8479a98c76e8a3d831530364b4de6872
| 1,108 |
# coding: utf-8
module Fog
module Network
class SakuraCloud
class Real
def collect_monitor_router( id ,start_time = nil, end_time = nil)
filter = {}
filter['Start'] = start_time if start_time
filter['End'] = end_time if end_time
request(
:headers => {
'Authorization' => "Basic #{@auth_encode}"
},
:expects => [200],
:method => 'GET',
:path => "#{Fog::SakuraCloud.build_endpoint(@api_zone)}/internet/#{id}/monitor",
:query => URI.encode(Fog::JSON.encode(filter))
)
end
end # Real
class Mock
def collect_monitor_router( id )
response = Excon::Response.new
response.status = 200
response.body = {
"Data"=>{
"2015-12-16T18:00:00+09:00"=>{
"In"=>500000,
"Out"=>70000000
}
},
"is_ok"=>true
}
response
end
end # Mock
end # SakuraCloud
end # Network
end # Fog
| 26.380952 | 92 | 0.472924 |
ed93ea57e14bdd433104241417d69571bc0e9e49
| 814 |
# encoding: utf-8
module Nanoc::Filters
class Sass
# Essentially the `Sass::Importers::Filesystem` but registering each
# import file path.
class SassFilesystemImporter < ::Sass::Importers::Filesystem
private
def _find(dir, name, options)
full_filename, syntax = ::Sass::Util.destructure(find_real_file(dir, name, options))
return unless full_filename && File.readable?(full_filename)
filter = Nanoc::Filters::Sass.current # FIXME ew global
item = filter.imported_filename_to_item(full_filename)
filter.depend_on([ item ]) unless item.nil?
options[:syntax] = syntax
options[:filename] = full_filename
options[:importer] = self
::Sass::Engine.new(File.read(full_filename), options)
end
end
end
end
| 28.068966 | 92 | 0.668305 |
bbff120c90f8a48259286d1dfc9849a05a848066
| 184 |
class TestScript
PATH = "script.rb"
CODE = File.read(PATH)
# デバッグするスクリプトを読み込む
eval(CODE, binding, fname=PATH)
end
# 読み込んだスクリプトをハンドラーとして定義
map("/") do
run TestScript.new
end
| 15.333333 | 33 | 0.711957 |
e2a6ba70a9688738701c1474b1be8b36f1deaab2
| 517 |
class WithdrawalTxesController < ApplicationController
before_action :authenticate_user!
def create
withdrawal = WithdrawalTx.new
withdrawal.amount = current_user.account.balance
withdrawal.account = current_user.account
withdrawal.method = current_user.withdrawal_option.option_type
byebug
if withdrawal.save
redirect_to dashboard_path, notice: 'Withdrawal pending'
else
redirect_to dashboard_path, notice: 'Something went wrong, please contact support'
end
end
end
| 32.3125 | 88 | 0.773694 |
ab2e12d8dffa699fa8e3be00f426acbdeec23c88
| 762 |
module SuperDiff
module Test
class Player
attr_reader :handle, :character, :inventory, :shields, :health, :ultimate
def initialize(
handle:,
character:,
inventory:,
shields:,
health:,
ultimate:
)
@handle = handle
@character = character
@inventory = inventory
@shields = shields
@health = health
@ultimate = ultimate
end
def ==(other)
other.is_a?(self.class) &&
other.handle == handle &&
other.character == character &&
other.inventory == inventory &&
other.shields == shields &&
other.health == health &&
other.ultimate == ultimate
end
end
end
end
| 22.411765 | 79 | 0.526247 |
01bbbad56edd307b24f625d379b773b6f15d69f1
| 352 |
module Sastrawi
module Morphology
module Disambiguator
class DisambiguatorPrefixRule26a
def disambiguate(word)
contains = /^pem([aiueo])(.*)$/.match(word)
if contains
matches = contains.captures
return "m#{matches[0]}#{matches[1]}"
end
end
end
end
end
end
| 19.555556 | 53 | 0.5625 |
1caa08ac7f34fc0e81d2bfaf2ba99f77c15cd67b
| 3,207 |
require 'spec_helper'
describe HelloBlock::Transaction, '.query' do
it 'defaults to the query hash' do
expect(HelloBlock::Transaction.query).to eq(
{ path: '/transactions/', params: {} }
)
end
end
describe HelloBlock::Transaction, 'inspect' do
it 'kicks the query and resets the query hash' do
HelloBlock::Transaction.find('bananas')
HelloBlock::Transaction.inspect
expect(HelloBlock::Transaction.query).to eq(
{ path: '/transactions/', params: {} }
)
end
end
describe HelloBlock::Transaction, '.find' do
let(:tx) { 'f37e6181661473c14a123cca6f0ad0ab3303d011246f1d4bb4ccf3fccef2d700' }
after :each do
HelloBlock::Transaction.inspect #clear the query for other specs
end
it 'adds a single transaction hash to the path' do
HelloBlock::Transaction.find(tx)
expect(HelloBlock::Transaction.query[:path]).to eq("/transactions/#{tx}")
end
end
describe HelloBlock::Transaction, '.where' do
let(:address) { '1DQN9nopGvSCDnM3LH1w7j36FtnQDZKnej' }
let(:tx) { 'f37e6181661473c14a123cca6f0ad0ab3303d011246f1d4bb4ccf3fccef2d700' }
after :each do
HelloBlock::Transaction.inspect
end
it 'adds a batch of transaction hashes to the params and alters the path' do
HelloBlock::Transaction.where(tx_hashes: [tx, tx])
expect(HelloBlock::Transaction.query[:params]).to eq(
txHashes: [tx, tx]
)
end
it 'adds a batch of addresses to the params' do
HelloBlock::Transaction.where(addresses: [address, address])
expect(HelloBlock::Transaction.query[:path]).to eq '/addresses/transactions'
expect(HelloBlock::Transaction.query[:params]).to eq(
{ addresses: [address, address] }
)
end
end
describe HelloBlock::Transaction, '.limit' do
after :each do
HelloBlock::Transaction.inspect
end
it 'changes the path to the latest transactions path and passes a limit' do
HelloBlock::Transaction.limit(5)
expect(HelloBlock::Transaction.query[:path]).to eq '/transactions/latest'
expect(HelloBlock::Transaction.query[:params]).to eq({ limit: 5 })
end
end
describe HelloBlock::Transaction, '.offset' do
after :each do
HelloBlock::Transaction.inspect
end
it 'changes the path to the latest transactions path and passes a limit' do
HelloBlock::Transaction.offset(5)
expect(HelloBlock::Transaction.query[:params]).to eq({ offset: 5 })
end
end
describe HelloBlock::Transaction, '.propagate' do
let(:tx) { '0100000001dfcc651d60fae6f086fba5a6d2729cfea5cb867f4c1bca' +
'25192fcb60823490d6000000006b483045022100a7a7194ca4329369' +
'3249ccbe5bbb54efb17d22dcbfdd27c47fec1c6f2287553b02204eb7' +
'873322565308b06cc8a9e43c68c987d5d7eec570b2e135625c0fbe4b' +
'286101210355b6182f1d4ce3caad921d6abf37a20143c49f492ea606' +
'e8f66d0d291b0d4ab3ffffffff0110270000000000001976a91439a9' +
'becbf4c55b7346de80e307596901a3491c9c88ac00000000' }
after :each do
HelloBlock::Transaction.inspect
end
it 'sets the raw transaction hex in the parameters' do
HelloBlock::Transaction.propagate(tx)
expect(HelloBlock::Transaction.query[:params]).to eq({ rawTxHex: tx, post: true })
end
end
| 29.694444 | 86 | 0.722482 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.