hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
621df63d166d354965ef86f514a3f087a26fc027 | 293 | module PageObject
module Platforms
module SeleniumWebDriver
module TextField
#
# Set the value of the TextField
#
def value=(new_value)
element.clear
element.send_keys(new_value)
end
end
end
end
end
| 16.277778 | 40 | 0.556314 |
911a45612e9189479d02fb021a88def63105209f | 3,446 | require "language/go"
class Nsq < Formula
desc "Realtime distributed messaging platform"
homepage "http://nsq.io"
url "https://github.com/bitly/nsq/archive/v0.3.5.tar.gz"
sha256 "4120ad24e3700be1e65549b9a55eab5d4e744cd114d9b39779a47b6dedda0b35"
head "https://github.com/bitly/nsq.git"
bottle do
cellar :any
sha256 "eb9dd459eec6603dd720b58b8fcee5ccfc122999aed51845ff2c98e0bb3fabfe" => :yosemite
sha256 "b17656e4e8b93abf9d6b0b65b614ffa7fecc2b0920a8d0329aa6abda2e5a2f7e" => :mavericks
sha256 "dd00b16b6708fd69764ec0276568f88b4d3c8729c6be45859b3450b313a0f3b6" => :mountain_lion
end
depends_on "go" => :build
go_resource "github.com/BurntSushi/toml" do
url "https://github.com/BurntSushi/toml.git",
:revision => "2dff11163ee667d51dcc066660925a92ce138deb"
end
go_resource "github.com/bitly/go-hostpool" do
url "https://github.com/bitly/go-hostpool.git",
:revision => "58b95b10d6ca26723a7f46017b348653b825a8d6"
end
go_resource "github.com/bitly/go-nsq" do
url "https://github.com/bitly/go-nsq.git",
:revision => "22a8bd48c443ec23bb559675b6df8284bbbdab29"
end
go_resource "github.com/bitly/go-simplejson" do
url "https://github.com/bitly/go-simplejson.git",
:revision => "fc395a5db941cf38922b1ccbc083640cd76fe4bc"
end
go_resource "github.com/bmizerany/perks" do
url "https://github.com/bmizerany/perks.git",
:revision => "6cb9d9d729303ee2628580d9aec5db968da3a607"
end
go_resource "github.com/mreiferson/go-options" do
url "https://github.com/mreiferson/go-options.git",
:revision => "2cf7eb1fdd83e2bb3375fef6fdadb04c3ad564da"
end
go_resource "github.com/mreiferson/go-snappystream" do
url "https://github.com/mreiferson/go-snappystream.git",
:revision => "028eae7ab5c4c9e2d1cb4c4ca1e53259bbe7e504"
end
go_resource "github.com/bitly/timer_metrics" do
url "https://github.com/bitly/timer_metrics.git",
:revision => "afad1794bb13e2a094720aeb27c088aa64564895"
end
go_resource "github.com/blang/semver" do
url "https://github.com/blang/semver.git",
:revision => "9bf7bff48b0388cb75991e58c6df7d13e982f1f2"
end
def install
# build a proper GOPATH tree for local dependencies
(buildpath + "src/github.com/bitly/nsq").install "internal", "nsqlookupd", "nsqd", "nsqadmin"
ENV["GOPATH"] = buildpath
Language::Go.stage_deps resources, buildpath/"src"
system "make"
system "make", "DESTDIR=#{prefix}", "PREFIX=", "install"
end
test do
begin
lookupd = fork do
system bin/"nsqlookupd"
end
sleep 2
d = fork do
system bin/"nsqd", "--lookupd-tcp-address=127.0.0.1:4160"
end
sleep 2
admin = fork do
system bin/"nsqadmin", "--lookupd-http-address=127.0.0.1:4161"
end
sleep 2
to_file = fork do
system bin/"nsq_to_file", "--topic=test", "--output-dir=#{testpath}",
"--lookupd-http-address=127.0.0.1:4161"
end
sleep 2
system "curl", "-d", "hello", "http://127.0.0.1:4151/put?topic=test"
dat = File.read(Dir["*.dat"].first)
assert_match "test", dat
assert_match version.to_s, dat
ensure
Process.kill(9, lookupd)
Process.kill(9, d)
Process.kill(9, admin)
Process.kill(9, to_file)
Process.wait lookupd
Process.wait d
Process.wait admin
Process.wait to_file
end
end
end
| 30.767857 | 97 | 0.690075 |
1143a68c5aecaa524893835a3ef279133f2b9557 | 520 | require "bundler/setup"
require "tomosia_icons8_crawl"
RSpec.configure do |config|
config.treat_symbols_as_metadata_keys_with_true_values = true
config.run_all_when_everything_filtered = true
config.filter_run :focus
# Enable flags like --only-failures and --next-failure
config.example_status_persistence_file_path = ".rspec_status"
# Disable RSpec exposing methods globally on `Module` and `main`
config.disable_monkey_patching!
config.expect_with :rspec do |c|
c.syntax = :expect
end
end
| 27.368421 | 66 | 0.780769 |
6a112560e38cd31ba0cac25bb35ada6e4bd307b6 | 135 | # frozen_string_literal: true
I18n.load_path += Dir[GitStats.root.join('config/locales/*.yml')]
I18n.enforce_available_locales = true
| 27 | 65 | 0.785185 |
26087084add2a41a1d9ff1e587a9a9fc89711c10 | 1,481 | class Ngspice < Formula
desc "Spice circuit simulator"
homepage "https://ngspice.sourceforge.io/"
url "https://downloads.sourceforge.net/project/ngspice/ng-spice-rework/27/ngspice-27.tar.gz"
sha256 "0c08c7d57a2e21cf164496f3237f66f139e0c78e38345fbe295217afaf150695"
bottle do
sha256 "245e35d1ad12c0b06da9089eb9fe05a51da73aefa8e4840c2dc4f6514c6af902" => :sierra
sha256 "d220c96f72941f8a05dab330ffdb813f294f37daaffede4292821d1b1ed7d7a0" => :el_capitan
end
head do
url "https://git.code.sf.net/p/ngspice/ngspice.git"
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "bison" => :build
depends_on "libtool" => :build
end
option "without-xspice", "Build without x-spice extensions"
deprecated_option "with-x" => "with-x11"
depends_on :x11 => :optional
def install
system "./autogen.sh" if build.head?
args = %W[
--disable-dependency-tracking
--prefix=#{prefix}
--with-editline=yes
]
if build.with? "x11"
args << "--with-x"
else
args << "--without-x"
end
args << "--enable-xspice" if build.with? "xspice"
system "./configure", *args
system "make", "install"
end
test do
(testpath/"test.cir").write <<-EOS.undent
RC test circuit
v1 1 0 1
r1 1 2 1
c1 2 0 1 ic=0
.tran 100u 100m uic
.control
run
quit
.endc
.end
EOS
system "#{bin}/ngspice", "test.cir"
end
end
| 23.887097 | 94 | 0.648886 |
6a67d1439a4f82a9766e143cfc1e9abf1463e35d | 515 | require_relative './pipeline'
module GOCD
module PIPELINE_CONFIG
class PipelineGroup
include GOCD::PIPELINE_CONFIG
attr_reader :name, :pipelines
def initialize(group_data)
@name = group_data['@group']
@pipelines = to_pipelines(group_data['pipeline']) || []
end
private
def to_pipelines(pipelines_data)
to_array(pipelines_data).map { |pipeline| GOCD::PIPELINE_CONFIG::Pipeline.new(pipeline) } unless pipelines_data.nil?
end
end
end
end | 25.75 | 124 | 0.67767 |
61c7de768f6877b8a32a11170f53ab6fbe6e7397 | 11,895 | # *******************************************************************************
# Honeybee OpenStudio Gem, Copyright (c) 2020, Alliance for Sustainable
# Energy, LLC, Ladybug Tools LLC and other contributors. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# (1) Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# (2) Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# (3) Neither the name of the copyright holder nor the names of any contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission from the respective party.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) AND ANY CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER(S), ANY CONTRIBUTORS, THE
# UNITED STATES GOVERNMENT, OR THE UNITED STATES DEPARTMENT OF ENERGY, NOR ANY OF
# THEIR EMPLOYEES, BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
# STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# *******************************************************************************
require 'from_honeybee/model_object'
require 'from_honeybee/simulation/extension'
require 'from_honeybee/simulation/designday'
require 'openstudio'
module FromHoneybee
class SimulationParameter
attr_reader :errors, :warnings
# Read Simulation Parameter JSON from disk
def self.read_from_disk(file)
hash = nil
File.open(File.join(file), 'r') do |f|
hash = JSON.parse(f.read, symbolize_names: true)
end
SimulationParameter.new(hash)
end
# Load ModelObject from symbolized hash
def initialize(hash)
# initialize class variable @@extension only once
@@extension = ExtensionSimulationParameter.new
@@schema = nil
File.open(@@extension.schema_file) do |f|
@@schema = JSON.parse(f.read, symbolize_names: true)
end
@hash = hash
@type = @hash[:type]
raise 'Unknown model type' if @type.nil?
raise "Incorrect model type for SimulationParameter '#{@type}'" unless @type == 'SimulationParameter'
end
# check if the model is valid
def valid?
if Gem.loaded_specs.has_key?("json-schema")
return validation_errors.empty?
else
return true
end
end
# return detailed model validation errors
def validation_errors
if Gem.loaded_specs.has_key?("json-schema")
require 'json-schema'
JSON::Validator.fully_validate(@@schema, @hash)
end
end
def defaults
@@schema[:components][:schemas]
end
# convert to openstudio model, clears errors and warnings
def to_openstudio_model(openstudio_model=nil, log_report=false)
@errors = []
@warnings = []
if log_report
puts 'Starting SimulationParameter translation from Honeybee to OpenStudio'
end
@openstudio_model = if openstudio_model
openstudio_model
else
OpenStudio::Model::Model.new
end
create_openstudio_objects
if log_report
puts 'Done with SimulationParameter translation!'
end
@openstudio_model
end
def create_openstudio_objects
# get the defaults for each sub-object
simct_defaults = defaults[:SimulationControl][:properties]
shdw_defaults = defaults[:ShadowCalculation][:properties]
siz_defaults = defaults[:SizingParameter][:properties]
out_defaults = defaults[:SimulationOutput][:properties]
runper_defaults = defaults[:RunPeriod][:properties]
simpar_defaults = defaults[:SimulationParameter][:properties]
# set defaults for the Model's SimulationControl object
os_sim_control = @openstudio_model.getSimulationControl
os_sim_control.setDoZoneSizingCalculation(simct_defaults[:do_zone_sizing][:default])
os_sim_control.setDoSystemSizingCalculation(simct_defaults[:do_system_sizing][:default])
os_sim_control.setDoPlantSizingCalculation(simct_defaults[:do_plant_sizing][:default])
os_sim_control.setRunSimulationforWeatherFileRunPeriods(simct_defaults[:run_for_run_periods][:default])
os_sim_control.setRunSimulationforSizingPeriods(simct_defaults[:run_for_sizing_periods][:default])
os_sim_control.setSolarDistribution(shdw_defaults[:solar_distribution][:default])
# override any SimulationControl defaults with lodaded JSON
if @hash[:simulation_control]
unless @hash[:simulation_control][:do_zone_sizing].nil?
os_sim_control.setDoZoneSizingCalculation(@hash[:simulation_control][:do_zone_sizing])
end
unless @hash[:simulation_control][:do_system_sizing].nil?
os_sim_control.setDoSystemSizingCalculation(@hash[:simulation_control][:do_system_sizing])
end
unless @hash[:simulation_control][:do_plant_sizing].nil?
os_sim_control.setDoPlantSizingCalculation(@hash[:simulation_control][:do_plant_sizing])
end
unless @hash[:simulation_control][:run_for_run_periods].nil?
os_sim_control.setRunSimulationforWeatherFileRunPeriods(@hash[:simulation_control][:run_for_run_periods])
end
unless @hash[:simulation_control][:run_for_sizing_periods].nil?
os_sim_control.setRunSimulationforSizingPeriods(@hash[:simulation_control][:run_for_sizing_periods])
end
end
# set defaults for the Model's ShadowCalculation object
os_shadow_calc = @openstudio_model.getShadowCalculation
os_shadow_calc.setCalculationFrequency(shdw_defaults[:calculation_frequency][:default])
os_shadow_calc.setMaximumFiguresInShadowOverlapCalculations(shdw_defaults[:maximum_figures][:default])
os_shadow_calc.setCalculationMethod(shdw_defaults[:calculation_method][:default])
# override any ShadowCalculation defaults with lodaded JSON
if @hash[:shadow_calculation]
if @hash[:shadow_calculation][:calculation_frequency]
os_shadow_calc.setCalculationFrequency(@hash[:shadow_calculation][:calculation_frequency])
end
if @hash[:shadow_calculation][:maximum_figures]
os_shadow_calc.setMaximumFiguresInShadowOverlapCalculations(@hash[:shadow_calculation][:maximum_figures])
end
if @hash[:shadow_calculation][:calculation_method]
os_shadow_calc.setCalculationMethod(@hash[:shadow_calculation][:calculation_method])
end
if @hash[:shadow_calculation][:solar_distribution]
os_sim_control.setSolarDistribution(@hash[:shadow_calculation][:solar_distribution])
end
end
# set defaults for the Model's SizingParameter object
os_sizing_par = @openstudio_model.getSizingParameters
os_sizing_par.setHeatingSizingFactor(siz_defaults[:heating_factor][:default])
os_sizing_par.setCoolingSizingFactor(siz_defaults[:cooling_factor][:default])
# override any SizingParameter defaults with lodaded JSON
if @hash[:sizing_parameter]
if @hash[:sizing_parameter][:heating_factor]
os_sizing_par.setHeatingSizingFactor(@hash[:sizing_parameter][:heating_factor])
end
if @hash[:sizing_parameter][:cooling_factor]
os_sizing_par.setCoolingSizingFactor(@hash[:sizing_parameter][:cooling_factor])
end
# set any design days
if @hash[:sizing_parameter][:design_days]
@hash[:sizing_parameter][:design_days].each do |des_day|
des_day_object = DesignDay.new(des_day)
os_des_day = des_day_object.to_openstudio(@openstudio_model)
end
end
end
# set Outputs for the simulation
if @hash[:output]
if @hash[:output][:outputs]
@hash[:output][:outputs].each do |output|
os_output = OpenStudio::Model::OutputVariable.new(output, @openstudio_model)
if @hash[:output][:reporting_frequency]
os_output.setReportingFrequency(@hash[:output][:reporting_frequency])
else
os_output.setReportingFrequency(out_defaults[:reporting_frequency][:default])
end
end
end
if @hash[:output][:summary_reports]
begin
os_report = OpenStudio::Model::OutputTableSummaryReports.new(@openstudio_model)
rescue NameError
end
@hash[:output][:summary_reports].each do |report|
begin
os_report.addSummaryReport(report)
rescue NoMethodError
end
end
end
end
# set defaults for the year description
year_description = @openstudio_model.getYearDescription
year_description.setDayofWeekforStartDay(runper_defaults[:start_day_of_week][:default])
# set up the simulation RunPeriod
if @hash[:run_period]
# set the leap year
if @hash[:run_period][:leap_year]
year_description.setIsLeapYear(@hash[:run_period][:leap_year])
end
# set the start day of the week
if @hash[:run_period][:start_day_of_week]
year_description.setDayofWeekforStartDay(@hash[:run_period][:start_day_of_week])
end
# set the run preiod start and end dates
openstudio_runperiod = @openstudio_model.getRunPeriod
openstudio_runperiod.setBeginMonth(@hash[:run_period][:start_date][0])
openstudio_runperiod.setBeginDayOfMonth(@hash[:run_period][:start_date][1])
openstudio_runperiod.setEndMonth(@hash[:run_period][:end_date][0])
openstudio_runperiod.setEndDayOfMonth(@hash[:run_period][:end_date][1])
# set the daylight savings time
if @hash[:run_period][:daylight_saving_time]
os_dl_saving = @openstudio_model.getRunPeriodControlDaylightSavingTime
os_dl_saving.setStartDate(
OpenStudio::MonthOfYear.new(@hash[:run_period][:daylight_saving_time][:start_date][0]),
@hash[:run_period][:daylight_saving_time][:start_date][1])
os_dl_saving.setEndDate(
OpenStudio::MonthOfYear.new(@hash[:run_period][:daylight_saving_time][:end_date][0]),
@hash[:run_period][:daylight_saving_time][:end_date][1])
end
# TODO: set the holidays once they are available in OpenStudio SDK
end
# set the simulation timestep
os_timestep = @openstudio_model.getTimestep
if @hash[:timestep]
os_timestep.setNumberOfTimestepsPerHour(@hash[:timestep])
else
os_timestep.setNumberOfTimestepsPerHour(simpar_defaults[:timestep][:default])
end
# assign the north
if @hash[:north_angle]
@openstudio_model.getBuilding.setNorthAxis(-@hash[:north_angle])
end
# assign the terrain
os_site = @openstudio_model.getSite
os_site.setTerrain(simpar_defaults[:terrain_type][:default])
if @hash[:terrain_type]
os_site.setTerrain(@hash[:terrain_type])
end
end
end #SimulationParameter
end #FromHoneybee
| 42.78777 | 115 | 0.696511 |
ed34771669606ef566ffed98b5a4a18e7e05d36b | 966 | # This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 2021_02_15_164125) do
create_table "shouts", force: :cascade do |t|
t.text "message"
t.binary "image"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
end
| 42 | 86 | 0.768116 |
f8f2119e418be8460314749c1f99cfa56d5ea58d | 1,016 | # frozen_string_literal: true
module FinishedGoods
module Ecert
module EcertTrackingUnit
class New
def self.call(govt_inspection_sheet_id: nil, form_values: nil, form_errors: nil, remote: true)
ui_rule = UiRules::Compiler.new(:ecert_tracking_unit, :new, govt_inspection_sheet_id: govt_inspection_sheet_id, form_values: form_values)
rules = ui_rule.compile
layout = Crossbeams::Layout::Page.build(rules) do |page|
page.form_object ui_rule.form_object
page.form_values form_values
page.form_errors form_errors
page.form do |form|
form.caption 'New eCert eLot'
form.action '/finished_goods/ecert/ecert_tracking_units'
form.remote! if remote
form.add_field :ecert_agreement_id
form.add_field :pallet_list
form.add_field :govt_inspection_sheet_id
end
end
layout
end
end
end
end
end
| 32.774194 | 147 | 0.637795 |
f8f597e2fc7a8afcfaad2456bc9170777632e417 | 1,037 | cask "webcatalog" do
version "33.0.0"
if Hardware::CPU.intel?
sha256 "6db4355dccd13c951a35cd7bef3dfde5783088301c9fee830480e1858b49cedd"
url "https://github.com/webcatalog/webcatalog-app/releases/download/v#{version}/WebCatalog-#{version}.dmg",
verified: "github.com/webcatalog/webcatalog-app/"
else
sha256 "1d0d75925c6400134c7e121beeba63e042b57a9e63b54809a95964b4689a666b"
url "https://github.com/webcatalog/webcatalog-app/releases/download/v#{version}/WebCatalog-#{version}-arm64.dmg",
verified: "github.com/webcatalog/webcatalog-app/"
end
name "WebCatalog"
desc "Tool to run web apps like desktop apps"
homepage "https://webcatalog.app/"
auto_updates true
app "WebCatalog.app"
zap trash: [
"~/Library/Application Support/WebCatalog",
"~/Library/Caches/com.webcatalog.jordan",
"~/Library/Caches/com.webcatalog.jordan.ShipIt",
"~/Library/Preferences/com.webcatalog.jordan.plist",
"~/Library/Saved Application State/com.webcatalog.jordan.savedState",
]
end
| 32.40625 | 117 | 0.739634 |
b9c4229b38580830450a7c38eee3714f390ea9f0 | 1,638 | require "rails_helper"
RSpec.describe OrganisationPolicy do
subject { described_class.new(user, organisation) }
let(:organisation) { create(:delivery_partner_organisation) }
context "as user that belongs to BEIS" do
let(:user) { build_stubbed(:beis_user) }
it { is_expected.to permit_action(:index) }
it { is_expected.to permit_action(:show) }
it { is_expected.to permit_new_and_create_actions }
it { is_expected.to permit_edit_and_update_actions }
it { is_expected.to permit_action(:destroy) }
it { is_expected.to permit_action(:download) }
end
context "as user that does NOT belong to BEIS" do
context "when the user belongs to that organisation" do
let(:user) { build_stubbed(:delivery_partner_user, organisation: organisation) }
it { is_expected.to forbid_action(:index) }
it { is_expected.to permit_action(:show) }
it { is_expected.to forbid_new_and_create_actions }
it { is_expected.to permit_edit_and_update_actions }
it { is_expected.to forbid_action(:destroy) }
it { is_expected.to forbid_action(:download) }
end
context "when the user does NOT belong to that organisation" do
let(:user) { build_stubbed(:delivery_partner_user, organisation: create(:delivery_partner_organisation)) }
it { is_expected.to forbid_action(:index) }
it { is_expected.to forbid_action(:show) }
it { is_expected.to forbid_new_and_create_actions }
it { is_expected.to forbid_edit_and_update_actions }
it { is_expected.to forbid_action(:destroy) }
it { is_expected.to forbid_action(:download) }
end
end
end
| 38.093023 | 112 | 0.721001 |
21493e91e0afe7d1e081c928fd7f292363b3b5b5 | 269 | if ENV['PLATFORM'] == 'ios'
require 'calabash-cucumber/cucumber'
require_relative '../ios/pages/standard/IosCommon'
elsif ENV['PLATFORM'] == 'android'
require 'calabash-android/cucumber'
require_relative '../../features/android/pages/standard/DroidCommon'
end
| 29.888889 | 70 | 0.739777 |
ab48103a1ec6e36a0254be98d9244a8c4538a98d | 682 | $:.push File.expand_path("lib", __dir__)
# Maintain your gem's version:
require "spree_api_docs/version"
# Describe your gem and declare its dependencies:
Gem::Specification.new do |s|
s.name = "spree_api_docs"
s.version = SpreeApiDocs::VERSION
s.authors = ["Spark Solutions"]
s.email = ["[email protected]"]
s.homepage = "https://spreecommerce.org"
s.summary = "Swagger documentation for Spree Commerce."
s.description = "."
s.license = "MIT"
s.files = Dir["{app,config,db,lib,vendor,public}/**/*", "MIT-LICENSE", "Rakefile", "README.md"]
s.add_dependency "rails", "~> 5.2.0"
s.add_development_dependency "sqlite3"
end
| 29.652174 | 97 | 0.662757 |
9170e518853bb05077bad531ce1e09f89c24c561 | 9,201 | =begin
#NSX-T Data Center Policy API
#VMware NSX-T Data Center Policy REST API
OpenAPI spec version: 3.1.0.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 2.4.19
=end
require 'date'
module NSXTPolicy
# This action is used to redirect HTTP request messages to a new URL. The reply_status value specified in this action is used as the status code of HTTP response message which is sent back to client (Normally a browser). The HTTP status code for redirection is 3xx, for example, 301, 302, 303, 307, etc. The redirect_url is the new URL that the HTTP request message is redirected to. Normally browser will send another HTTP request to the new URL after receiving a redirection response message. Captured variables and built-in variables can be used in redirect_url field. For example, to redirect all HTTP requests to HTTPS requests for a virtual server. We create an LBRule without any conditions, add an LBHttpRedirectAction to the rule. Set the redirect_url field of the LBHttpRedirectAction to: https://$_host$_request_uri And set redirect_status to \"302\", which means found. This rule will redirect all HTTP requests to HTTPS server port on the same host.
class LBHttpRedirectAction
# The property identifies the load balancer rule action type.
attr_accessor :type
# HTTP response status code.
attr_accessor :redirect_status
# The URL that the HTTP request is redirected to.
attr_accessor :redirect_url
class EnumAttributeValidator
attr_reader :datatype
attr_reader :allowable_values
def initialize(datatype, allowable_values)
@allowable_values = allowable_values.map do |value|
case datatype.to_s
when /Integer/i
value.to_i
when /Float/i
value.to_f
else
value
end
end
end
def valid?(value)
!value || allowable_values.include?(value)
end
end
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'type' => :'type',
:'redirect_status' => :'redirect_status',
:'redirect_url' => :'redirect_url'
}
end
# Attribute type mapping.
def self.swagger_types
{
:'type' => :'String',
:'redirect_status' => :'String',
:'redirect_url' => :'String'
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
if attributes.has_key?(:'type')
self.type = attributes[:'type']
end
if attributes.has_key?(:'redirect_status')
self.redirect_status = attributes[:'redirect_status']
end
if attributes.has_key?(:'redirect_url')
self.redirect_url = attributes[:'redirect_url']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
if @type.nil?
invalid_properties.push('invalid value for "type", type cannot be nil.')
end
if @redirect_status.nil?
invalid_properties.push('invalid value for "redirect_status", redirect_status cannot be nil.')
end
if @redirect_url.nil?
invalid_properties.push('invalid value for "redirect_url", redirect_url cannot be nil.')
end
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
return false if @type.nil?
type_validator = EnumAttributeValidator.new('String', ['LBSelectPoolAction', 'LBHttpRequestUriRewriteAction', 'LBHttpRequestHeaderRewriteAction', 'LBHttpRejectAction', 'LBHttpRedirectAction', 'LBHttpResponseHeaderRewriteAction', 'LBHttpRequestHeaderDeleteAction', 'LBHttpResponseHeaderDeleteAction', 'LBVariableAssignmentAction', 'LBVariablePersistenceOnAction', 'LBVariablePersistenceLearnAction', 'LBJwtAuthAction', 'LBSslModeSelectionAction', 'LBConnectionDropAction'])
return false unless type_validator.valid?(@type)
return false if @redirect_status.nil?
return false if @redirect_url.nil?
true
end
# Custom attribute writer method checking allowed values (enum).
# @param [Object] type Object to be assigned
def type=(type)
validator = EnumAttributeValidator.new('String', ['LBSelectPoolAction', 'LBHttpRequestUriRewriteAction', 'LBHttpRequestHeaderRewriteAction', 'LBHttpRejectAction', 'LBHttpRedirectAction', 'LBHttpResponseHeaderRewriteAction', 'LBHttpRequestHeaderDeleteAction', 'LBHttpResponseHeaderDeleteAction', 'LBVariableAssignmentAction', 'LBVariablePersistenceOnAction', 'LBVariablePersistenceLearnAction', 'LBJwtAuthAction', 'LBSslModeSelectionAction', 'LBConnectionDropAction'])
unless validator.valid?(type)
fail ArgumentError, 'invalid value for "type", must be one of #{validator.allowable_values}.'
end
@type = type
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
type == o.type &&
redirect_status == o.redirect_status &&
redirect_url == o.redirect_url
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[type, redirect_status, redirect_url].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end # or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BOOLEAN
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
temp_model = NSXTPolicy.const_get(type).new
temp_model.build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 35.941406 | 967 | 0.662645 |
e2a79a82bd1278c0f57aaa4e00e883c1a8875508 | 220 | # frozen_string_literal: true
FactoryBot.define do
factory :topic do
sequence(:title) { |n| "Topic Title #{n}" }
sequence(:body) { |n| "Topic Body #{n}" }
association :user
association :node
end
end
| 20 | 47 | 0.640909 |
7aac71878a957ef7dd903c8710019bba0e7b2d0f | 243 | require File.expand_path('../../../spec_helper', __FILE__)
require File.expand_path('../shared/matched_size.rb', __FILE__)
require 'strscan'
describe "StringScanner#matched_size" do
it_behaves_like(:strscan_matched_size, :matched_size)
end
| 30.375 | 63 | 0.781893 |
b9f784a59def31db8952984af768a620e52c64a7 | 6,831 | =begin
#Swagger Petstore
#This spec is mainly for testing Petstore server and contains fake endpoints, models. Please do not use this for any other purpose. Special characters: \" \\
OpenAPI spec version: 1.0.0
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=end
require 'date'
module Petstore
class ArrayTest
attr_accessor :array_of_string
attr_accessor :array_array_of_integer
attr_accessor :array_array_of_model
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'array_of_string' => :'array_of_string',
:'array_array_of_integer' => :'array_array_of_integer',
:'array_array_of_model' => :'array_array_of_model'
}
end
# Attribute type mapping.
def self.swagger_types
{
:'array_of_string' => :'Array<String>',
:'array_array_of_integer' => :'Array<Array<Integer>>',
:'array_array_of_model' => :'Array<Array<ReadOnlyFirst>>'
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}){|(k,v), h| h[k.to_sym] = v}
if attributes.has_key?(:'array_of_string')
if (value = attributes[:'array_of_string']).is_a?(Array)
self.array_of_string = value
end
end
if attributes.has_key?(:'array_array_of_integer')
if (value = attributes[:'array_array_of_integer']).is_a?(Array)
self.array_array_of_integer = value
end
end
if attributes.has_key?(:'array_array_of_model')
if (value = attributes[:'array_array_of_model']).is_a?(Array)
self.array_array_of_model = value
end
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properies with the reasons
def list_invalid_properties
invalid_properties = Array.new
return invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
return true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
array_of_string == o.array_of_string &&
array_array_of_integer == o.array_array_of_integer &&
array_array_of_model == o.array_array_of_model
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[array_of_string, array_array_of_integer, array_array_of_model].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map{ |v| _deserialize($1, v) } )
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end # or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BOOLEAN
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
temp_model = Petstore.const_get(type).new
temp_model.build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map{ |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 30.495536 | 157 | 0.644122 |
38660462e037fcf52f537a3f2d7b395748c76d88 | 258 | # frozen_string_literal: true
require "rails_helper"
RSpec.describe Alchemy::PictureThumb do
it { should belong_to(:picture).class_name("Alchemy::Picture") }
it { should validate_presence_of(:signature) }
it { should validate_presence_of(:uid) }
end
| 25.8 | 66 | 0.763566 |
e2bf9807ca12512c28141a7ccbdeba7455fe4565 | 2,998 | module Google
module Language
Languages = {
'af' => 'AFRIKAANS',
'sq' => 'ALBANIAN',
'am' => 'AMHARIC',
'ar' => 'ARABIC',
'hy' => 'ARMENIAN',
'az' => 'AZERBAIJANI',
'eu' => 'BASQUE',
'be' => 'BELARUSIAN',
'bn' => 'BENGALI',
'bh' => 'BIHARI',
'bg' => 'BULGARIAN',
'my' => 'BURMESE',
'ca' => 'CATALAN',
'chr' => 'CHEROKEE',
'zh' => 'CHINESE',
'zh-CN' => 'CHINESE_SIMPLIFIED',
'zh-TW' => 'CHINESE_TRADITIONAL',
'hr' => 'CROATIAN',
'cs' => 'CZECH',
'da' => 'DANISH',
'dv' => 'DHIVEHI',
'nl' => 'DUTCH',
'en' => 'ENGLISH',
'eo' => 'ESPERANTO',
'et' => 'ESTONIAN',
'tl' => 'FILIPINO',
'fi' => 'FINNISH',
'fr' => 'FRENCH',
'gl' => 'GALICIAN',
'ka' => 'GEORGIAN',
'de' => 'GERMAN',
'el' => 'GREEK',
'gn' => 'GUARANI',
'gu' => 'GUJARATI',
'he' => 'HEBREW',
'hi' => 'HINDI',
'hu' => 'HUNGARIAN',
'is' => 'ICELANDIC',
'id' => 'INDONESIAN',
'iu' => 'INUKTITUT',
'it' => 'ITALIAN',
'ja' => 'JAPANESE',
'kn' => 'KANNADA',
'kk' => 'KAZAKH',
'km' => 'KHMER',
'ko' => 'KOREAN',
'ku' => 'KURDISH',
'ky' => 'KYRGYZ',
'lo' => 'LAOTHIAN',
'lv' => 'LATVIAN',
'lt' => 'LITHUANIAN',
'mk' => 'MACEDONIAN',
'ms' => 'MALAY',
'ml' => 'MALAYALAM',
'mt' => 'MALTESE',
'mr' => 'MARATHI',
'mn' => 'MONGOLIAN',
'ne' => 'NEPALI',
'no' => 'NORWEGIAN',
'or' => 'ORIYA',
'ps' => 'PASHTO',
'fa' => 'PERSIAN',
'pl' => 'POLISH',
'pt-PT' => 'PORTUGUESE',
'pa' => 'PUNJABI',
'ro' => 'ROMANIAN',
'ru' => 'RUSSIAN',
'sa' => 'SANSKRIT',
'sr' => 'SERBIAN',
'sd' => 'SINDHI',
'si' => 'SINHALESE',
'sk' => 'SLOVAK',
'sl' => 'SLOVENIAN',
'es' => 'SPANISH',
'sw' => 'SWAHILI',
'sv' => 'SWEDISH',
'tg' => 'TAJIK',
'ta' => 'TAMIL',
'tl' => 'TAGALOG',
'te' => 'TELUGU',
'th' => 'THAI',
'bo' => 'TIBETAN',
'tr' => 'TURKISH',
'uk' => 'UKRAINIAN',
'ur' => 'URDU',
'uz' => 'UZBEK',
'ug' => 'UIGHUR',
'vi' => 'VIETNAMESE',
'' => 'UNKNOWN'
}
# judge whether the language is supported by google translate
def supported?(language)
if Languages.key?(language) || Languages.value?(language.upcase)
true
else
false
end
end
module_function :supported?
# get the abbrev of the language
def abbrev(language)
if supported?(language)
if Languages.key?(language)
language
else
language.upcase!
Languages.each do |k,v|
if v == language
return k
end
end
end
else
nil
end
end
module_function :abbrev
end
end
| 20.534247 | 70 | 0.412608 |
ed09dc61c62ea22be9ec212df9d7ccbd19b1972a | 48 | class Foo
# this is a comment with no end
end
| 12 | 33 | 0.708333 |
b9a31c86e81b8d84a6955b5ce8acaaf16b213a99 | 223 | require 'rspec'
require 'guard/compat/test/helper'
require 'guard/resque-pool'
ENV['GUARD_ENV'] = 'test'
RSpec.configure do |config|
config.filter_run :focus => true
config.run_all_when_everything_filtered = true
end
| 20.272727 | 48 | 0.762332 |
b9d814c5df4f1d034a6a74a368fbac6de1c91565 | 1,754 | module CompletePaymentSystems
class Response
attr_reader :xml, :response_hash, :code, :message
def initialize(xml)
@xml = xml
@response_hash = parse_response(@xml)
@code = @response_hash["resultCode"]
@message = @response_hash["resultMessage"]
end
def ok?
return signature_ok? && message.match(CPS.config.success_regex).present? && code == "000"
end
def signature_ok?
digi_signature = response_hash["digiSignature"]
hash_string = build_hashable_string
decoded_resp = Base64.decode64(digi_signature)
public_key = OpenSSL::X509::Certificate.new(
File.read "#{CPS.root}/lib/complete_payment_systems/certs/cps.cer").
public_key
return public_key.verify(OpenSSL::Digest::SHA1.new, decoded_resp, hash_string)
end
private
def parse_response xml
@response = Nokogiri::XML(xml) do |config|
config.strict.nonet
end
return {
"referenceId" => @response.xpath("//referenceId").text,
"orderId" => @response.xpath("//orderId").text,
"value" => @response.xpath("//value").text,
"currency" => @response.xpath("//currency").text,
"resultCode" => @response.xpath("//resultCode").text,
"resultMessage" => @response.xpath("//resultMessage").text, # Important
"resultText" => @response.xpath("//resultText").text, # Informative only
"digiSignature" => @response.xpath("//digiSignature").text
}
end
def build_hashable_string
rh = @response_hash
return "#{rh["referenceId"]}#{rh["orderId"]}#{rh["value"]}#{rh["currency"]}#{rh["resultCode"]}#{rh["resultMessage"]}#{rh["resultText"]}"
end
end
end | 33.09434 | 144 | 0.623147 |
acd0c0e0ce10be3120f742b3668624c028bb773d | 7,699 | require 'spec_helper'
RSpec.describe DropletKit::DropletResource do
subject(:resource) { described_class.new(connection: connection) }
include_context 'resources'
# Theres a lot to check
def check_droplet(droplet)
expect(droplet.id).to eq(19)
expect(droplet.name).to eq('test.example.com')
expect(droplet.memory).to eq(1024)
expect(droplet.vcpus).to eq(2)
expect(droplet.disk).to eq(20)
expect(droplet.locked).to eq(false)
expect(droplet.status).to eq('active')
expect(droplet.created_at).to be_present
expect(droplet.backup_ids).to include(449676382)
expect(droplet.snapshot_ids).to include(449676383)
expect(droplet.action_ids).to be_empty
expect(droplet.features).to include('ipv6')
expect(droplet.region).to be_kind_of(DropletKit::Region)
expect(droplet.region.slug).to eq('nyc1')
expect(droplet.region.name).to eq('New York')
expect(droplet.region.sizes).to include('1024mb', '512mb')
expect(droplet.region.available).to be(true)
expect(droplet.region.features).to include("virtio", "private_networking", "backups", "ipv6")
expect(droplet.image).to be_kind_of(DropletKit::Image)
expect(droplet.image.id).to eq(119192817)
expect(droplet.image.name).to eq("Ubuntu 13.04")
expect(droplet.image.distribution).to eq("ubuntu")
expect(droplet.image.slug).to eq("ubuntu1304")
expect(droplet.image.public).to eq(true)
expect(droplet.image.regions).to include('nyc1')
expect(droplet.size).to be_kind_of(DropletKit::Size)
expect(droplet.size.slug).to eq("1024mb")
expect(droplet.size.transfer).to eq(2)
expect(droplet.size.price_monthly).to eq(10.0)
expect(droplet.size.price_hourly).to eq(0.01488)
expect(droplet.networks).to be_kind_of(DropletKit::NetworkHash)
v4_network = droplet.networks.v4.first
expect(v4_network.ip_address).to eq('127.0.0.19')
expect(v4_network.netmask).to eq("255.255.255.0")
expect(v4_network.gateway).to eq("127.0.0.20")
expect(v4_network.type).to eq("public")
v6_network = droplet.networks.v6.first
expect(v6_network.ip_address).to eq('2001::13')
expect(v6_network.gateway).to eq("2400:6180:0000:00D0:0000:0000:0009:7000")
expect(v6_network.cidr).to eq(124)
expect(v6_network.type).to eq("public")
expect(droplet.kernel).to be_kind_of(DropletKit::Kernel)
expect(droplet.kernel.id).to eq(485432985)
expect(droplet.kernel.name).to eq("DO-recovery-static-fsck")
expect(droplet.kernel.version).to eq("3.8.0-25-generic")
end
describe '#all' do
it 'returns all of the droplets' do
stub_do_api('/v2/droplets', :get).to_return(body: api_fixture('droplets/all'))
droplets = resource.all
expect(droplets).to all(be_kind_of(DropletKit::Droplet))
check_droplet(droplets.first)
end
end
describe '#find' do
it 'returns a singular droplet' do
stub_do_api('/v2/droplets/20', :get).to_return(body: api_fixture('droplets/find'))
droplet = resource.find(id: 20)
expect(droplet).to be_kind_of(DropletKit::Droplet)
check_droplet(droplet)
end
end
describe '#create' do
context 'for a successful create' do
it 'returns the created droplet' do
droplet = DropletKit::Droplet.new(
name: 'test.example.com',
region: 'nyc1',
size: '512mb',
image: 'ubuntu-14-04-x86',
ssh_keys: [123],
backups: true,
ipv6: true
)
as_hash = DropletKit::DropletMapping.representation_for(:create, droplet, NullHashLoad)
expect(as_hash[:name]).to eq(droplet.name)
expect(as_hash[:region]).to eq(droplet.region)
expect(as_hash[:size]).to eq(droplet.size)
expect(as_hash[:image]).to eq(droplet.image)
expect(as_hash[:ssh_keys]).to eq(droplet.ssh_keys)
expect(as_hash[:backups]).to eq(droplet.backups)
expect(as_hash[:ipv6]).to eq(droplet.ipv6)
as_string = DropletKit::DropletMapping.representation_for(:create, droplet)
stub_do_api('/v2/droplets', :post).with(body: as_string).to_return(body: api_fixture('droplets/create'), status: 202)
created_droplet = resource.create(droplet)
check_droplet(created_droplet)
end
end
context 'for an unsuccessful create' do
it 'raises a FailedCreate exception with the message attached' do
response_body = { id: :unprocessable_entity, message: 'Something is not right' }
stub_do_api('/v2/droplets', :post).to_return(body: response_body.to_json, status: 422)
expect { resource.create(DropletKit::Droplet.new) }.to raise_exception(DropletKit::FailedCreate).with_message(response_body[:message])
end
end
end
describe '#kernels' do
it 'returns a list of kernels for a droplet' do
stub_do_api('/v2/droplets/1066/kernels', :get).to_return(body: api_fixture('droplets/list_kernels'))
kernels = resource.kernels(id: 1066)
expect(kernels).to all(be_kind_of(DropletKit::Kernel))
expect(kernels[0].id).to eq(61833229)
expect(kernels[0].name).to eq('Ubuntu 14.04 x32 vmlinuz-3.13.0-24-generic')
expect(kernels[0].version).to eq('3.13.0-24-generic')
expect(kernels[1].id).to eq(485432972)
expect(kernels[1].name).to eq('Ubuntu 14.04 x64 vmlinuz-3.13.0-24-generic (1221)')
expect(kernels[1].version).to eq('3.13.0-24-generic')
end
end
describe '#snapshots' do
it 'returns a list of kernels for a droplet' do
stub_do_api('/v2/droplets/1066/snapshots', :get).to_return(body: api_fixture('droplets/list_snapshots'))
snapshots = resource.snapshots(id: 1066)
expect(snapshots).to all(be_kind_of(DropletKit::Snapshot))
expect(snapshots[0].id).to eq(449676387)
expect(snapshots[0].name).to eq("Ubuntu 13.04")
expect(snapshots[0].distribution).to eq("ubuntu")
expect(snapshots[0].slug).to eq(nil)
expect(snapshots[0].public).to eq(false)
expect(snapshots[0].regions).to eq(["nyc1"])
expect(snapshots[0].created_at).to eq("2014-07-29T14:35:38Z")
end
end
describe '#backups' do
it 'returns a list of backups for a droplet' do
stub_do_api('/v2/droplets/1066/backups', :get).to_return(body: api_fixture('droplets/list_backups'))
backups = resource.backups(id: 1066)
expect(backups).to all(be_kind_of(DropletKit::Backup))
expect(backups[0].id).to eq(449676388)
expect(backups[0].name).to eq("Ubuntu 13.04")
expect(backups[0].distribution).to eq("ubuntu")
expect(backups[0].slug).to eq(nil)
expect(backups[0].public).to eq(false)
expect(backups[0].regions).to eq(["nyc1"])
expect(backups[0].created_at).to eq("2014-07-29T14:35:38Z")
end
end
describe '#actions' do
it 'returns a list of actions for the droplet' do
stub_do_api('/v2/droplets/1066/actions', :get).to_return(body: api_fixture('droplets/list_actions'))
actions = resource.actions(id: 1066)
expect(actions).to all(be_kind_of(DropletKit::Action))
expect(actions[0].id).to eq(19)
expect(actions[0].status).to eq("in-progress")
expect(actions[0].type).to eq("create")
expect(actions[0].started_at).to eq("2014-07-29T14:35:39Z")
expect(actions[0].completed_at).to eq(nil)
expect(actions[0].resource_id).to eq(24)
expect(actions[0].resource_type).to eq("droplet")
expect(actions[0].region).to eq("nyc1")
end
end
describe '#delete' do
it 'sends a delete request for the droplet' do
request = stub_do_api('/v2/droplets/1066', :delete)
resource.delete(id: 1066)
expect(request).to have_been_made
end
end
end
| 39.482051 | 142 | 0.682167 |
016a2a7dfc26ef74f6a00514da8eabf6a98184d5 | 629 | # encoding: utf-8
shared_examples 'a subclass of Storage::SCMBase' do
it_behaves_like 'a subclass of Storage::SSHBase'
describe '#transfer!' do
let(:connection) { mock }
let(:ssh) { mock }
let(:remote_path) { File.join('my/path') }
before do
storage.ip = '123.45.678.90'
storage.path = 'my/path'
connection.stubs(:ssh).returns(ssh)
end
it 'call abstract methods' do
storage.expects(:connection).yields(connection)
storage.expects(:init_repo)
storage.syncer.expects(:perform!)
storage.expects(:commit)
storage.send(:transfer!)
end
end
end
| 20.290323 | 53 | 0.6407 |
7970e72176e1321c9daaf5b0b9312dec749ec583 | 352 | # frozen_string_literal: true
class RemoveUrlsFromCurrencies < ActiveRecord::Migration[4.2]
def change
remove_column :currencies, :wallet_url_template, :string if column_exists? :currencies, :wallet_url_template
remove_column :currencies, :transaction_url_template, :string if column_exists? :currencies, :transaction_url_template
end
end
| 39.111111 | 122 | 0.8125 |
8777b9010b2c1c80b63d14cb522d22c8160726f9 | 523 | module Kashflow
class ApiMethod
attr_accessor :name, :request_attrs, :response_attrs
def initialize(name, fields)
@name = name
# split into request/response attrs
@request_attrs, @response_attrs = fields.partition{|f| f[:direction] == 'IN' }.map do |arr|
arr.map do |fields|
# get rid of the :direction and cleanup the description text
fields.slice!(:type, :desc, :name)
fields[:desc].try(:strip!)
fields
end
end
end
end
end
| 26.15 | 97 | 0.609943 |
f8c3d9f2eb39c5694fbfa7be22b5ae92aa5fbfcb | 1,653 | # This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# This file is the source Rails uses to define your schema when running `rails
# db:schema:load`. When creating a new database, `rails db:schema:load` tends to
# be faster and is potentially less error prone than running all of your
# migrations from scratch. Old migrations may fail to apply correctly if those
# migrations use external dependencies or application code.
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 2020_04_18_192806) do
create_table "courses", force: :cascade do |t|
t.string "name"
t.text "description"
t.string "location"
t.integer "maximum_participants"
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
end
create_table "registrations", force: :cascade do |t|
t.string "wizard_id"
t.string "course_id"
t.string "time_slot"
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.integer "current_participants"
end
create_table "wizards", force: :cascade do |t|
t.string "email"
t.string "password_digest"
t.string "first_name"
t.string "last_name"
t.string "house"
t.boolean "admin"
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.string "UID"
end
end
| 35.934783 | 86 | 0.727163 |
ab2eb4e9ecf2ac08b3d8a27074968f841b284e03 | 602 | #
# Copyright 2008 Blanton Black
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'glfw/glfw'
| 33.444444 | 74 | 0.757475 |
1c3487194c2030e5a4b5247b6942310827feccb7 | 118 | class Message < ApplicationRecord
belongs_to :user
belongs_to :conversation
validates_presence_of :content
end
| 16.857143 | 33 | 0.813559 |
e2f2c229f50195b1ec44852ad0752f7a26d42cdd | 4,937 | require 'puppet'
require 'yaml'
require 'json'
require 'uri'
require 'net/https'
require 'socket'
begin
require 'carrier-pigeon'
rescue LoadError => e
Puppet.info "You need the `carrier-pigeon` gem to use the IRC report"
end
unless Puppet.version >= '2.6.5'
fail "This report processor requires Puppet version 2.6.5 or later"
end
Puppet::Reports.register_report(:irc) do
configfile = File.join([File.dirname(Puppet.settings[:config]), "irc.yaml"])
raise "IRC report config file #{configfile} not readable" unless File.exist?(configfile)
CONFIG = YAML.load_file(configfile)
desc <<-DESC
Send notification of failed reports to an IRC channel and if configured create a Gist with the log output.
DESC
def process
if self.status == 'failed'
output = []
self.logs.each do |log|
output << log
end
if self.environment.nil?
self.environment == 'production'
end
CONFIG[:timeout] ||= 8
message = "Puppet #{self.environment} run for #{self.host} #{self.status} at #{Time.now.asctime}."
if CONFIG[:github_user] && CONFIG[:github_password]
gist_id = gist(self.host,output)
message << " Created a Gist showing the output at #{gist_id}"
end
if CONFIG[:parsed_reports_dir]
report_server = Socket.gethostname
report_path = last_report
message << " Summary at #{report_server}:#{report_path}"
end
if CONFIG[:report_url] and CONFIG[:report_url].is_a?(String)
map = {
'c' => self.respond_to?(:configuration_version) ? self.configuration_version : nil,
'e' => self.respond_to?(:environment) ? self.environment : nil,
'h' => self.respond_to?(:host) ? self.host : nil,
'k' => self.respond_to?(:kind) ? self.kind : nil,
's' => self.respond_to?(:status) ? self.status : nil,
't' => self.respond_to?(:time) ? self.time : nil,
'v' => self.respond_to?(:puppet_version) ? self.puppet_version : nil,
}
message << " Report URL: "
message << CONFIG[:report_url].gsub(/%([#{map.keys}])/) {|s| map[$1].to_s }
end
max_attempts = 2
begin
timeout(CONFIG[:timeout]) do
Puppet.debug "Sending status for #{self.host} to IRC."
params = {
:uri => CONFIG[:irc_server],
:message => message,
:ssl => CONFIG[:irc_ssl],
:register_first => CONFIG[:irc_register_first],
:join => CONFIG[:irc_join],
}
if CONFIG.has_key?(:irc_password)
params[:channel_password] = CONFIG[:irc_password]
end
CarrierPigeon.send(params)
end
rescue Timeout::Error
Puppet.notice "Failed to send report to #{CONFIG[:irc_server]} retrying..."
max_attempts -= 1
if max_attempts > 0
retry
else
Puppet.err "Failed to send report to #{CONFIG[:irc_server]}"
end
end
end
end
def gist(host,output)
max_attempts = 2
begin
timeout(CONFIG[:timeout]) do
https = Net::HTTP.new('api.github.com', 443)
https.use_ssl = true
https.verify_mode = OpenSSL::SSL::VERIFY_NONE
https.start {
req = Net::HTTP::Post.new('/gists')
req.basic_auth "#{CONFIG[:github_user]}", "#{CONFIG[:github_password]}"
req.add_field("User-Agent", "#{CONFIG[:github_user]}")
req.content_type = 'application/json'
req.body = JSON.dump({
"files" => { "#{host}-#{Time.now.to_i.to_s}" => { "content" => output.join("\n") } },
"description" => "Puppet #{environment} run failed on #{host} @ #{Time.now.asctime}",
"public" => false
})
response = https.request(req)
gist_id = JSON.parse(response.body)["html_url"]
}
end
rescue Timeout::Error
Puppet.notice "Timed out while attempting to create a GitHub Gist, retrying ..."
max_attempts -= 1
if max_attempts > 0
retry
else
Puppet.err "Timed out while attempting to create a GitHub Gist."
end
end
end
def last_report
destfile = File.join([CONFIG[:parsed_reports_dir], self.host + '-' + rand.to_s])
File.open(destfile, 'w+', 0644) do |f|
f.puts("\n\n\n#### Report for #{self.name},\n")
f.puts(" puppet run at #{self.time}:\n\n")
self.resource_statuses.each do |resource,properties|
if properties.failed
f.puts "\n#{resource} failed:\n #{properties.file} +#{properties.line}\n"
end
end
f.puts "\n\n#### Logs captured on the node:\n\n"
self.logs.each do |log|
f.puts log
end
f.puts "\n\n#### Summary:\n\n"
f.puts self.summary
end
destfile
end
end
| 32.058442 | 108 | 0.574843 |
217e70b8dba7b833dd01307933eb8acfb8b29315 | 4,054 | class Cms::Column::Value::UrlField2 < Cms::Column::Value::Base
field :html_tag, type: String
field :html_additional_attr, type: String, default: ''
field :link_url, type: String
field :link_label, type: String
field :link_target, type: String
field :link_item_type, type: String
field :link_item_id, type: Object
permit_values :link_url, :link_label, :link_target
before_validation :set_link_item, unless: ->{ @new_clone }
liquidize do
export :effective_link_url, as: :link_url
export :effective_link_label, as: :link_label
export :link_target
end
def html_additional_attr_to_h
attrs = {}
if html_additional_attr.present?
attrs = html_additional_attr.scan(/\S+?=".+?"/m).
map { |s| s.split(/=/).size == 2 ? s.delete('"').split(/=/) : nil }.
compact.to_h
end
if link_target.present?
attrs[:target] = link_target
end
attrs
end
def effective_link_url
link_url.presence || link_item.try(:url)
end
def effective_link_label
link_label.presence || link_item.try(:name)
end
def import_csv(values)
super
values.map do |name, value|
case name
when self.class.t(:link_url)
self.link_url = value
when self.class.t(:link_label)
self.link_label = value
end
end
end
private
def set_link_item
if link_url.blank?
self.link_item_type = nil
self.link_item_id = nil
remove_instance_variable :@link_item if defined? @link_item
return
end
site = _parent.site || _parent.instance_variable_get(:@cur_site)
u = URI.parse(link_url)
if u.relative?
node = _parent.parent
base_url = node ? node.full_url : site.full_url
u = URI.join(base_url, link_url)
end
searches = [ "#{u.host}:#{u.port}" ]
if u.port == 80 || u.port == 443
searches << u.host
end
if site.domains.any? { |domain| searches.include?(domain) }
# internal link
filename = u.path[1..-1]
content = Cms::Page.site(site).where(filename: filename).first
content ||= Cms::Node.site(site).where(filename: filename).first
if content.present?
self.link_item_type = content.collection_name.to_s
self.link_item_id = content.id
remove_instance_variable :@link_item if defined? @link_item
return
end
end
# external link
self.link_item_type = nil
self.link_item_id = nil
remove_instance_variable :@link_item if defined? @link_item
end
def link_item
return if link_item_type.blank? || link_item_id.blank?
return @link_item if defined? @link_item
@link_item ||= begin
site = _parent.site || _parent.instance_variable_get(:@cur_site)
case link_item_type
when "cms_pages"
Cms::Page.site(site).where(id: link_item_id).first.try(:becomes_with_route)
when "cms_nodes"
Cms::Node.site(site).where(id: link_item_id).first.try(:becomes_with_route)
end
end
end
def validate_value
return if column.blank?
if column.required? && effective_link_url.blank?
self.errors.add(:link_url, :blank)
end
if link_label.present? && column.label_max_length.present? && column.label_max_length > 0
if link_label.length > column.label_max_length
self.errors.add(:link_label, :too_long, count: column.label_max_length)
end
end
if link_url.present? && column.link_max_length.present? && column.link_max_length > 0
if link_url.length > column.link_max_length
self.errors.add(:link_url, :too_long, count: column.link_max_length)
end
end
end
def copy_column_settings
super
return if column.blank?
self.html_tag = column.html_tag
self.html_additional_attr = column.html_additional_attr
end
def to_default_html
return '' if effective_link_url.blank?
options = html_additional_attr_to_h
ApplicationController.helpers.link_to(effective_link_label.presence || effective_link_url, effective_link_url, options)
end
end
| 26.671053 | 123 | 0.675876 |
08f455925d37de61a613d1ba2380031d11f7d33d | 1,455 | module Almanack
module Representation
class BuiltIcalEvent
attr_reader :event
def initialize(event)
@event = event
end
def ical_event
@ical_event ||= build!
end
def self.for(event)
new(event).ical_event
end
private
def build!
@ical_event = Icalendar::Event.new
set_summary
set_start_time
set_end_time
set_description
set_location
ical_event
end
def set_summary
ical_event.summary = event.title
end
def set_start_time
if event.start_time.is_a?(Icalendar::Values::Date)
ical_event.dtstart = event.start_time
else
ical_event.dtstart = event.start_time.utc
end
end
def set_end_time
if event.end_time.is_a?(Icalendar::Values::Date)
ical_event.dtend = event.end_time
else
ical_event.dtend = (event.end_time || event.start_time + default_event_duration ).utc
end
end
def set_description
ical_event.description = event.description if event.description
end
def set_location
ical_event.location = event.location if event.location
end
def default_event_duration
# Three hours is the duration for events missing end dates, a
# recommendation suggested by Meetup.com.
3 * ONE_HOUR
end
end
end
end
| 22.045455 | 95 | 0.612371 |
21bb643db3ad75a13cad3b0f92e4f113141d6265 | 841 | require 'spec_helper'
module CloudController::Errors
RSpec.describe Details do
describe '.new(name)' do
let(:name) { 'DomainInvalid' }
subject(:details) do
Details.new(name)
end
it 'knows the error name' do
expect(details.name).to eq('DomainInvalid')
end
it 'knows the error http_code' do
expect(details.response_code).to eq(400)
end
it 'knows code' do
expect(details.code).to eq(130001)
end
it 'knows the error message_format' do
expect(details.message_format).to eq('The domain is invalid: %s')
end
end
describe '.new(name) with an invalid code' do
let(:name) { 'invalid name' }
it 'blows up immeditately' do
expect { Details.new(name) }.to raise_error(KeyError)
end
end
end
end
| 22.131579 | 73 | 0.612366 |
03ec6020c304f06faffb262a5467d972f59231fa | 4,982 | require "abstract_unit"
class ExecutorTest < ActiveSupport::TestCase
class DummyError < RuntimeError
end
def test_wrap_invokes_callbacks
called = []
executor.to_run { called << :run }
executor.to_complete { called << :complete }
executor.wrap do
called << :body
end
assert_equal [:run, :body, :complete], called
end
def test_callbacks_share_state
result = false
executor.to_run { @foo = true }
executor.to_complete { result = @foo }
executor.wrap {}
assert result
end
def test_separated_calls_invoke_callbacks
called = []
executor.to_run { called << :run }
executor.to_complete { called << :complete }
state = executor.run!
called << :body
state.complete!
assert_equal [:run, :body, :complete], called
end
def test_exceptions_unwind
called = []
executor.to_run { called << :run_1 }
executor.to_run { raise DummyError }
executor.to_run { called << :run_2 }
executor.to_complete { called << :complete }
assert_raises(DummyError) do
executor.wrap { called << :body }
end
assert_equal [:run_1, :complete], called
end
def test_avoids_double_wrapping
called = []
executor.to_run { called << :run }
executor.to_complete { called << :complete }
executor.wrap do
called << :early
executor.wrap do
called << :body
end
called << :late
end
assert_equal [:run, :early, :body, :late, :complete], called
end
def test_hooks_carry_state
supplied_state = :none
hook = Class.new do
define_method(:run) do
:some_state
end
define_method(:complete) do |state|
supplied_state = state
end
end.new
executor.register_hook(hook)
executor.wrap {}
assert_equal :some_state, supplied_state
end
def test_nil_state_is_sufficient
supplied_state = :none
hook = Class.new do
define_method(:run) do
nil
end
define_method(:complete) do |state|
supplied_state = state
end
end.new
executor.register_hook(hook)
executor.wrap {}
assert_equal nil, supplied_state
end
def test_exception_skips_uninvoked_hook
supplied_state = :none
hook = Class.new do
define_method(:run) do
:some_state
end
define_method(:complete) do |state|
supplied_state = state
end
end.new
executor.to_run do
raise DummyError
end
executor.register_hook(hook)
assert_raises(DummyError) do
executor.wrap {}
end
assert_equal :none, supplied_state
end
def test_exception_unwinds_invoked_hook
supplied_state = :none
hook = Class.new do
define_method(:run) do
:some_state
end
define_method(:complete) do |state|
supplied_state = state
end
end.new
executor.register_hook(hook)
executor.to_run do
raise DummyError
end
assert_raises(DummyError) do
executor.wrap {}
end
assert_equal :some_state, supplied_state
end
def test_hook_insertion_order
invoked = []
supplied_state = []
hook_class = Class.new do
attr_accessor :letter
define_method(:initialize) do |letter|
self.letter = letter
end
define_method(:run) do
invoked << :"run_#{letter}"
:"state_#{letter}"
end
define_method(:complete) do |state|
invoked << :"complete_#{letter}"
supplied_state << state
end
end
executor.register_hook(hook_class.new(:a))
executor.register_hook(hook_class.new(:b))
executor.register_hook(hook_class.new(:c), outer: true)
executor.register_hook(hook_class.new(:d))
executor.wrap {}
assert_equal [:run_c, :run_a, :run_b, :run_d, :complete_a, :complete_b, :complete_d, :complete_c], invoked
assert_equal [:state_a, :state_b, :state_d, :state_c], supplied_state
end
def test_class_serial_is_unaffected
skip if !defined?(RubyVM)
hook = Class.new do
define_method(:run) do
nil
end
define_method(:complete) do |state|
nil
end
end.new
executor.register_hook(hook)
before = RubyVM.stat(:class_serial)
executor.wrap {}
executor.wrap {}
executor.wrap {}
after = RubyVM.stat(:class_serial)
assert_equal before, after
end
def test_separate_classes_can_wrap
other_executor = Class.new(ActiveSupport::Executor)
called = []
executor.to_run { called << :run }
executor.to_complete { called << :complete }
other_executor.to_run { called << :other_run }
other_executor.to_complete { called << :other_complete }
executor.wrap do
other_executor.wrap do
called << :body
end
end
assert_equal [:run, :other_run, :body, :other_complete, :complete], called
end
private
def executor
@executor ||= Class.new(ActiveSupport::Executor)
end
end
| 20.672199 | 110 | 0.642714 |
39640a5bea22ec86a8dc8f60b5b7a15cd6163adc | 1,815 | module SystemBrowser
class RequestProcessor
ACTIONS = {
'get' => 'add',
'autoget' => 'autoadd'
}.tap { |h| h.default = 'add' }
def initialize(request:, session:)
@request = request
@session = session
@services = [
Services::GemService,
Services::BehaviourService,
Services::MethodService,
Services::SourceService
]
end
def process
@request.parse
if @request.sets_client_pid?
@session.set_client_pid(@request.client_pid)
else
self.process_services
end
end
protected
def process_services
service = self.find_service_for(@request.resource).new(
data: @request.scope,
other: @request.other)
data = service.__send__(@request.action)
data = self.replace_weird_characters(data) if data.instance_of?(String)
action = self.process_action
scope = self.process_scope(action)
data[:behaviour] = @request.scope if scope.empty?
action_str = "#{action}:#{@request.resource}:#{scope}"
response = Response.new(action: action_str, data: data)
response.set_callback_id(@request.callback_id)
@session.send(response)
end
def process_action
ACTIONS[@request.action]
end
def process_scope(action)
case action
when 'add' then @request.scope
when 'autoadd' then ''
else @request.scope
end
end
def find_service_for(req_service)
@services.find { |service| service.service_name == req_service }
end
##
# Temporary hack before we support weird characters for real.
def replace_weird_characters(str)
ascii_str = str.force_encoding('ASCII-8BIT')
ascii_str.encode('UTF-8', undef: :replace, replace: '')
end
end
end
| 24.2 | 77 | 0.633609 |
ac54b18fd3e56db3ecb567e675ce0374aab1472e | 1,045 | # frozen_string_literal: true
module RuboCop
module Cop
module Sequel
# JSONColumn looks for non-JSONB columns.
class JSONColumn < Cop
MSG = 'Use JSONB rather than JSON or hstore'
def_node_matcher :json_or_hstore?, <<-MATCHER
(send _ :add_column ... (sym {:json :hstore}))
MATCHER
def_node_matcher :column_type?, <<-MATCHER
(send _ {:json :hstore} ...)
MATCHER
def_node_matcher :column_method?, <<-MATCHER
(send _ :column ... (sym {:json :hstore}))
MATCHER
def on_send(node)
return unless json_or_hstore?(node)
add_offense(node, location: :selector, message: MSG)
end
def on_block(node)
return unless node.send_node.method_name == :create_table
node.each_node(:send) do |method|
next unless column_method?(method) || column_type?(method)
add_offense(method, location: :selector, message: MSG)
end
end
end
end
end
end
| 27.5 | 70 | 0.594258 |
acaa4233057f83d995d7867f4dac524c95e03f58 | 1,629 | # frozen_string_literal: false
require 'spec_helper'
RSpec.describe Admin::Tag, type: :model do
let(:valid_attributes) do
{
scope: 'value for scope',
tag: 'value for tag'
}
end
let(:bad_attributes) do
{
scope: 'no blank tags',
tag: ''
}
end
it 'should create a new instance given valid attributes' do
Admin::Tag.create!(valid_attributes)
end
it 'should not create a new instance given bad attributes' do
expect(Admin::Tag.create(bad_attributes)).not_to be_valid
end
# TODO: auto-generated
describe '.searchable_attributes' do
it 'searchable_attributes' do
result = described_class.searchable_attributes
expect(result).not_to be_nil
end
end
# TODO: auto-generated
describe '.search' do
it 'search' do
options = {}
result = described_class.search(options, 1, nil)
expect(result).not_to be_nil
end
end
# TODO: auto-generated
describe '.fetch_tag' do
it 'fetch_tag' do
options = {}
result = described_class.fetch_tag(options)
expect(result).not_to be_nil
end
end
# TODO: auto-generated
describe '.add_new_admin_tags' do
xit 'add_new_admin_tags' do
taggable = double('taggable')
tag_type = double('tag_type')
tag_list = double('tag_list')
result = described_class.add_new_admin_tags(taggable, tag_type, tag_list)
expect(result).not_to be_nil
end
end
# TODO: auto-generated
describe '#name' do
it 'name' do
tag = described_class.new
result = tag.name
expect(result).not_to be_nil
end
end
end
| 21.155844 | 79 | 0.658686 |
7ac899fa95697d61fdf917223ef68e448ceb0188 | 1,615 | #! /usr/bin/env ruby
#
# beanstalkd-metrics
#
# DESCRIPTION:
# This plugin checks the beanstalkd stats, using the beaneater gem
#
# OUTPUT:
# metric-data
#
# PLATFORMS:
# Linux
#
# DEPENDENCIES:
# gem: beaneater
# gem: sensu-plugin
#
# USAGE:
# #YELLOW
#
# NOTES:
#
# LICENSE:
# Copyright 2014 99designs, Inc <[email protected]>
# Released under the same terms as Sensu (the MIT license); see LICENSE
# for details.
#
require 'sensu-plugin/metric/cli'
require 'json'
require 'beaneater'
#
# Checks the queue levels
#
class BeanstalkdMetrics < Sensu::Plugin::Metric::CLI::Graphite
option :server,
description: 'beanstalkd server',
short: '-s SERVER',
long: '--server SERVER',
default: 'localhost'
option :port,
description: 'beanstalkd server port',
short: '-p PORT',
long: '--port PORT',
default: '11300'
option :scheme,
description: 'Metric naming scheme, text to prepend to metric',
short: '-s SCHEME',
long: '--scheme SCHEME',
default: "#{Socket.gethostname}.beanstalkd"
def acquire_beanstalkd_connection
begin
conn = Beaneater::Pool.new(["#{config[:server]}:#{config[:port]}"])
rescue StandardError
warning 'could not connect to beanstalkd'
end
conn
end
def run
stats = acquire_beanstalkd_connection.stats
stats.keys.sort.each do |key|
next if key == 'version' # The version X.Y.Z is not a number
output "#{config[:scheme]}.#{key}", stats.public_send(key)
end
ok
end
end
| 21.533333 | 73 | 0.619814 |
1d4ef0ce16392b4ae8b40578aa1ec3db7b1be80b | 621 | class CreateAutomationSeriesEmailTemplates < ActiveRecord::Migration
def change
create_table :automation_series_email_templates do |t|
t.references :automation_series, index: {name: "automation_series_id"}, foreign_key: true
t.references :email_template, index: {name: "email_template_id"}, foreign_key: true
t.references :default_receipient_type, index: {name: "receipient_id"}, foreign_key: true
t.references :default_trigger_type, index: {name: "trigger_id"}, foreign_key: true
t.integer :trigger_days
t.string :template_name
t.timestamps null: false
end
end
end
| 41.4 | 95 | 0.743961 |
7a373fb6e581d0183cf7136951b247e545c78f2d | 646 | module Spree
module Admin
class HeroCarouselItemsController < ResourceController
before_action :load_hero_carousel, except: [:create, :update]
def location_after_save
load_hero_carousel
spree.admin_hero_carousel_hero_carousel_items_url(@hero_carousel)
end
private
def permitted_resource_params
params.require(:hero_carousel_item).
permit(:id, :hero_carousel_id, :link, :large_image, :small_image, :html, :active, :position)
end
def load_hero_carousel
@hero_carousel = Spree::HeroCarousel.find(params[:hero_carousel_id])
end
end
end
end
| 24.846154 | 104 | 0.698142 |
03141e6fe5f403c26d2b9325b87358115986a4c4 | 6,772 | require 'manageiq/providers/openstack/legacy/openstack_handle/handle'
require 'fog/openstack'
describe OpenstackHandle::Handle do
before do
@original_log = $fog_log
$fog_log = double.as_null_object
end
after do
$fog_log = @original_log
end
it ".auth_url" do
expect(described_class.auth_url("::1")).to eq "http://[::1]:5000"
end
context "errors from services" do
before do
@openstack_svc = double('network_service')
@openstack_project = double('project')
@handle = OpenstackHandle::Handle.new("dummy", "dummy", "dummy")
allow(@handle).to receive(:service_for_each_accessible_tenant).and_return([[@openstack_svc, @openstack_project]])
end
it "ignores 404 errors from services" do
expect(@openstack_svc).to receive(:security_groups).and_raise(Fog::Network::OpenStack::NotFound)
data = @handle.accessor_for_accessible_tenants("Network", :security_groups, :id)
expect(data).to be_empty
end
it "ignores 404 errors from services returning arrays" do
security_groups = double("security_groups").as_null_object
expect(security_groups).to receive(:to_a).and_raise(Fog::Network::OpenStack::NotFound)
expect(@openstack_svc).to receive(:security_groups).and_return(security_groups)
data = @handle.accessor_for_accessible_tenants("Network", :security_groups, :id)
expect(data).to be_empty
end
end
context "supports ssl" do
it "handles default ssl type connections just fine" do
fog = double('fog')
handle = OpenstackHandle::Handle.new("dummy", "dummy", "address")
auth_url = OpenstackHandle::Handle.auth_url("address", 5000, "https")
expect(OpenstackHandle::Handle).to receive(:raw_connect).with(
"dummy",
"dummy",
"https://address:5000",
"Compute",
:openstack_tenant => "admin",
:openstack_identity_api_version => 'v2.0',
:openstack_region => nil,
:connection_options => {:ssl_verify_peer => false}
).once do |_, _, address|
expect(address).to eq(auth_url)
fog
end
expect(handle.connect(:openstack_project_name => "admin")).to eq(fog)
end
it "handles non ssl connections just fine" do
fog = double('fog')
handle = OpenstackHandle::Handle.new("dummy", "dummy", "address", 5000, 'v2', 'non-ssl')
auth_url = OpenstackHandle::Handle.auth_url("address", 5000, "http")
expect(OpenstackHandle::Handle).to receive(:raw_connect).with(
"dummy",
"dummy",
"http://address:5000",
"Compute",
:openstack_tenant => "admin",
:openstack_identity_api_version => 'v2.0',
:openstack_region => nil,
:connection_options => {}
).once do |_, _, address|
expect(address).to eq(auth_url)
fog
end
expect(handle.connect(:openstack_project_name => "admin")).to eq(fog)
end
it "handles ssl connections just fine, too" do
fog = double('fog')
handle = OpenstackHandle::Handle.new("dummy", "dummy", "address", 5000, 'v2', 'ssl')
auth_url_ssl = OpenstackHandle::Handle.auth_url("address", 5000, "https")
expect(OpenstackHandle::Handle).to receive(:raw_connect).with(
"dummy",
"dummy",
"https://address:5000",
"Compute",
:openstack_tenant => "admin",
:openstack_identity_api_version => 'v2.0',
:openstack_region => nil,
:connection_options => {:ssl_verify_peer => false}
) do |_, _, address|
expect(address).to eq(auth_url_ssl)
fog
end
expect(handle.connect(:tenant_name => "admin")).to eq(fog)
end
it "handles ssl with validation connections just fine, too" do
fog = double('fog')
handle = OpenstackHandle::Handle.new("dummy", "dummy", "address", 5000, 'v2', 'ssl-with-validation')
auth_url_ssl = OpenstackHandle::Handle.auth_url("address", 5000, "https")
expect(OpenstackHandle::Handle).to receive(:raw_connect).with(
"dummy",
"dummy",
"https://address:5000",
"Compute",
:openstack_tenant => "admin",
:openstack_identity_api_version => 'v2.0',
:openstack_region => nil,
:connection_options => {:ssl_verify_peer => true}
) do |_, _, address|
expect(address).to eq(auth_url_ssl)
fog
end
expect(handle.connect(:tenant_name => "admin")).to eq(fog)
end
it "handles ssl passing of extra params validation connections just fine, too" do
fog = double('fog')
extra_options = {
:ssl_ca_file => "file",
:ssl_ca_path => "path",
:ssl_cert_store => "store_obj"
}
expected_options = {
:openstack_tenant => "admin",
:openstack_identity_api_version => 'v2.0',
:openstack_region => nil,
:connection_options => {
:ssl_verify_peer => true,
:ssl_ca_file => "file",
:ssl_ca_path => "path",
:ssl_cert_store => "store_obj"
}
}
handle = OpenstackHandle::Handle.new("dummy", "dummy", "address", 5000, 'v2', 'ssl-with-validation', extra_options)
auth_url_ssl = OpenstackHandle::Handle.auth_url("address", 5000, "https")
expect(OpenstackHandle::Handle).to receive(:raw_connect).with(
"dummy",
"dummy",
"https://address:5000",
"Compute",
expected_options
) do |_, _, address|
expect(address).to eq(auth_url_ssl)
fog
end
expect(handle.connect(:tenant_name => "admin")).to eq(fog)
end
end
context "supports regions" do
it "handles connections with region just fine" do
fog = double('fog')
handle = OpenstackHandle::Handle.new("dummy", "dummy", "address", 5000, 'v2', 'non-ssl', :region => 'RegionOne')
auth_url = OpenstackHandle::Handle.auth_url("address", 5000, "http")
expect(OpenstackHandle::Handle).to receive(:raw_connect).with(
"dummy",
"dummy",
"http://address:5000",
"Compute",
:openstack_tenant => "admin",
:openstack_identity_api_version => 'v2.0',
:openstack_region => 'RegionOne',
:connection_options => {}
).once do |_, _, address|
expect(address).to eq(auth_url)
fog
end
expect(handle.connect(:openstack_project_name => "admin")).to eq(fog)
end
end
end
| 35.088083 | 127 | 0.590224 |
114d3da461b93f0e088f061416cd2018bff595c3 | 1,782 | module SuperDiff
module RSpec
module OperationTreeBuilders
class ObjectHavingAttributes < SuperDiff::OperationTreeBuilders::DefaultObject
def self.applies_to?(expected, _actual)
SuperDiff::RSpec.an_object_having_some_attributes?(expected)
end
protected
def build_operation_tree
find_operation_tree_for(actual)
end
def attribute_names
if actual.respond_to?(:attributes_for_super_diff)
actual.attributes_for_super_diff.keys | expected.expected.keys
else
expected.expected.keys
end
end
private
def establish_expected_and_actual_attributes
@expected_attributes = attribute_names.reduce({}) do |hash, name|
if expected.expected.include?(name)
hash.merge(name => expected.expected[name])
else
hash
end
end
@actual_attributes = attribute_names.reduce({}) do |hash, name|
if actual.respond_to?(name)
hash.merge(name => actual.public_send(name))
else
hash
end
end
end
def should_add_noop_operation?(attribute_name)
!expected_attributes.include?(attribute_name) || (
actual_attributes.include?(attribute_name) &&
expected_attributes[attribute_name] == actual_attributes[attribute_name]
)
end
def should_add_insert_operation?(attribute_name)
expected_attributes.include?(attribute_name) &&
actual_attributes.include?(attribute_name) &&
expected_attributes[attribute_name] != actual_attributes[attribute_name]
end
end
end
end
end
| 30.20339 | 84 | 0.621212 |
1810c0f1947aae722c0df01e5bd5fbb5317377a4 | 1,323 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
require_relative 'workspace_request'
module Typeform
class UpdateWorkspaceRequest < WorkspaceRequest
def initialize(workspace, operations, token: APIConfig.token)
request(
method: :patch,
url: "#{APIConfig.workspaces_api_request_url}/#{workspace.id}",
headers: {
'Authorization' => "Bearer #{token}",
'Content-Type' => 'application/json'
},
payload: operations.map(&:payload).to_json
)
end
def success?
@response.code == 204
end
end
end
| 33.923077 | 71 | 0.712774 |
01b80d566f13e1384fe524a3e1c59b61a2765fe2 | 5,150 | # Copyright 2011-2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
module AWS
class S3
# Represents a collection of buckets.
#
# You can use this to create a bucket:
#
# s3.buckets.create(:name => "mybucket")
#
# You can get a handle for a specific bucket with indifferent
# access:
#
# bucket = s3.buckets[:mybucket]
# bucket = s3.buckets['mybucket']
#
# You can also use it to find out which buckets are in your account:
#
# s3.buckets.collect(&:name)
# #=> ['bucket1', 'bucket2', ...]
#
class BucketCollection
include Core::Model
include Enumerable
# Creates and returns a new Bucket. For example:
#
# @note If your bucket name contains one or more periods and it
# is hosted in a non-US region, you should make requests
# against the bucket using the S3 endpoint specific to the
# region in which your bucket resides. For example:
#
# s3 = AWS::S3.new(:region => "eu-west-1")
# bucket = s3.buckets.create("my.eu.bucket")
#
# For a full list of endpoints and regions, see
# {http://docs.amazonwebservices.com/general/latest/gr/index.html?rande.html
# Regions and Endpoints} in the Amazon Web Services General
# Reference.
#
# @example
#
# bucket = s3.buckets.create('my-bucket')
# bucket.name #=> "my-bucket"
# bucket.exists? #=> true
#
# @param [String] bucket_name
#
# @param [Hash] options
#
# @option options [String] :location_constraint (nil) The
# location where the bucket should be created. Defaults to
# the classic US region; however, if you configure a regional
# endpoint for Amazon S3 this option will default to the
# appropriate location constraint for the endpoint. For
# example:
#
# s3 = AWS::S3.new(:region => "us-west-1")
# bucket = s3.buckets.create("my-us-west-bucket")
# bucket.location_constraint # => "us-west-1"
#
# @option options [Symbol,String] :acl (:private) Sets the ACL of the
# bucket you are creating. Valid Values include:
# * `:private`
# * `:public_read`
# * `:public_read_write`
# * `:authenticated_read`
# * `:log_delivery_write`
#
# @option options [String] :grant_read
# @option options [String] :grant_write
# @option options [String] :grant_read_acp
# @option options [String] :grant_write_acp
# @option options [String] :grant_full_control
#
# @return [Bucket]
#
def create bucket_name, options = {}
# convert the symbolized-canned acl into the string version
if acl = options[:acl]
options[:acl] = acl.to_s.tr('_', '-')
end
# auto set the location constraint for the user if it is not
# passed in and the endpoint is not the us-standard region. don't
# override the location constraint though, even it is wrong,
unless
config.s3_endpoint == 's3.amazonaws.com' or
options[:location_constraint]
then
constraint = guess_constraint
options[:location_constraint] = constraint if constraint
end
client.create_bucket(options.merge(:bucket_name => bucket_name))
bucket_named(bucket_name)
end
# Returns the Bucket with the given name.
#
# Makes no requests. The returned bucket object can
# be used to make requets for the bucket and its objects.
#
# @example
#
# bucket = s3.buckets[:mybucket],
# bucket = s3.buckets['mybucket'],
#
# @param [String] bucket_name
# @return [Bucket]
def [] bucket_name
bucket_named(bucket_name)
end
# Iterates the buckets in this collection.
#
# @example
#
# s3.buckets.each do |bucket|
# puts bucket.name
# end
#
# @return [nil]
def each &block
response = client.list_buckets
response.buckets.each do |b|
yield(bucket_named(b.name, response.owner))
end
nil
end
private
def bucket_named name, owner = nil
S3::Bucket.new(name.to_s, :owner => owner, :config => config)
end
def guess_constraint
case config.s3_endpoint
when 's3-eu-west-1.amazonaws.com' then 'EU'
when /^s3[.-](.*)\.amazonaws\.com/ then $1
end
end
end
end
end
| 31.595092 | 84 | 0.596311 |
1de14f2fb3ea667dbc735a78c67a68516aab4e0c | 671 | # frozen_string_literal: true
require "spec_helper"
require "anyway/ext/deep_freeze"
describe Anyway::Ext::DeepFreeze do
using Anyway::Ext::DeepFreeze
it "freezes nested arrays and hashes", :aggregate_failures do
source = {
a: 1,
b: "hello",
c: {
id: 1,
list: [1, 2, {name: "John"}]
},
d: [{id: 1}, {id: 2}]
}
dup = source.deep_freeze
expect(dup).to be_frozen
expect(dup[:c]).to be_frozen
expect(dup[:d]).to be_frozen
expect(dup[:c][:list]).to be_frozen
expect(dup[:c][:list].last).to be_frozen
expect(dup[:d].first).to be_frozen
expect(dup[:d].last).to be_frozen
end
end
| 20.333333 | 63 | 0.603577 |
017f15aef1d92d1f848cb0e7d8e085d13efb245d | 2,915 | module Asciidoctor
module NIST
class Converter < Standoc::Converter
def title_validate(root)
nil
end
def content_validate(doc)
super
bibdata_validate(doc.root)
end
def bibdata_validate(doc)
doctype_validate(doc)
stage_validate(doc)
substage_validate(doc)
iteration_validate(doc)
series_validate(doc)
end
def doctype_validate(xmldoc)
doctype = xmldoc&.at("//bibdata/ext/doctype")&.text
%w(standard).include? doctype or
warn "Document Attributes: #{doctype} is not a recognised document type"
end
def stage_validate(xmldoc)
stage = xmldoc&.at("//bibdata/status/stage")&.text
%w(draft-internal draft-wip draft-prelim draft-public draft-approval
final final-review).include? stage or
warn "Document Attributes: #{stage} is not a recognised stage"
end
def substage_validate(xmldoc)
substage = xmldoc&.at("//bibdata/status/substage")&.text or return
%w(active retired withdrawn).include? substage or
warn "Document Attributes: #{substage} is not a recognised substage"
end
def iteration_validate(xmldoc)
iteration = xmldoc&.at("//bibdata/status/iteration")&.text or return
%w(final).include? iteration.downcase or /^\d+$/.match(iteration) or
warn "Document Attributes: #{iteration} is not a recognised iteration"
end
def series_validate(xmldoc)
series = xmldoc&.at("//bibdata/series/title")&.text or return
found = false
SERIES.each { |_, v| found = true if v == series }
found or
warn "Document Attributes: #{series} is not a recognised series"
end
def validate(doc)
content_validate(doc)
schema_validate(formattedstr_strip(doc.dup),
File.join(File.dirname(__FILE__), "nist.rng"))
end
def introduction_validate(doc)
intro = doc.at("//sections/clause/title")
intro&.text == "Introduction" or
warn "First section of document body should be Introduction, "\
"not #{intro&.text}"
end
REF_SECTIONS_TO_VALIDATE = "//references[not(parent::clause)]/title | "\
"//clause[descendant::references][not(parent::clause)]/title".freeze
def section_validate(doc)
super
introduction_validate(doc)
references_validate(doc)
end
def references_validate(doc)
f = doc.xpath(REF_SECTIONS_TO_VALIDATE)
names = f.map { |s| s&.text }
return if names.empty?
return if names == ["References"]
return if names == ["Bibliography"]
return if names == ["References", "Bibliography"]
warn "Reference clauses #{names.join(', ')} do not follow expected "\
"pattern in NIST"
end
end
end
end
| 32.752809 | 82 | 0.619897 |
87b6011461caade76928842287b88d939c64b414 | 131 | # frozen_string_literal: true
module Importers
module Cards
class PlanarCardImporter < NormalCardImporter
end
end
end
| 14.555556 | 49 | 0.770992 |
1cfcdec677c8b7a18089000befab443961d0d729 | 290 | # frozen_string_literal: false
require_relative "../atom"
module RSS
module Atom
Feed.install_ns(DC_PREFIX, DC_URI)
class Feed
include DublinCoreModel
class Entry; include DublinCoreModel; end
end
class Entry
include DublinCoreModel
end
end
end
| 16.111111 | 47 | 0.703448 |
1842816b312bbc604b2236d15534bd3a59f03740 | 1,587 | cask "microsoft-edge-dev" do
version "93.0.933.1"
if Hardware::CPU.intel?
sha256 "81db1578afa0d39abff5ef6405e6e1971d1ddaa31462d71c5e92d9bc5fc3348e"
url "https://officecdn-microsoft-com.akamaized.net/pr/C1297A47-86C4-4C1F-97FA-950631F94777/MacAutoupdate/MicrosoftEdgeDev-#{version}.pkg",
verified: "officecdn-microsoft-com.akamaized.net/"
else
sha256 "ddce450f42a3972be14e5cba3902b7cdab3edb1094a5592caf71005c970ad453"
url "https://officecdn-microsoft-com.akamaized.net/pr/03adf619-38c6-4249-95ff-4a01c0ffc962/MacAutoupdate/MicrosoftEdgeDev-#{version}.pkg",
verified: "officecdn-microsoft-com.akamaized.net/"
end
name "Microsoft Edge Dev"
desc "Multi-platform web browser"
homepage "https://www.microsoftedgeinsider.com/"
livecheck do
url "https://go.microsoft.com/fwlink/?linkid=2069340"
strategy :header_match
end
auto_updates true
depends_on cask: "microsoft-auto-update"
pkg "MicrosoftEdgeDev-#{version}.pkg",
choices: [
{
"choiceIdentifier" => "com.microsoft.package.Microsoft_AutoUpdate.app", # Office16_all_autoupdate.pkg
"choiceAttribute" => "selected",
"attributeSetting" => 0,
},
]
uninstall pkgutil: "com.microsoft.edgemac.Dev"
zap trash: [
"~/Library/Application Support/Microsoft Edge Dev",
"~/Library/Caches/Microsoft Edge Dev",
"~/Library/Preferences/com.microsoft.edgemac.Dev.plist",
"~/Library/Saved Application State/com.microsoft.edgemac.Dev.savedState",
],
rmdir: "/Library/Application Support/Microsoft"
end
| 33.765957 | 142 | 0.720857 |
e9343551e2ca56ea007946b97a2fc4a6b2e95749 | 1,473 | module Lono::Bundler
class Component
extend Memoist
extend Props::Extension
props :export_to, :name, :sha, :source, :subfolder, :source_type, :type, :url, :clone_with
delegate :repo, :org, :repo_folder, :org_folder, to: :org_repo
include Concerns::StackConcern
include Concerns::LocalConcern
attr_reader :props, :version, :ref, :tag, :branch
def initialize(options={})
@props = Component::Props.new(options.deep_symbolize_keys).build
# These props are used for version comparing by VersionComparer
@version, @ref, @tag, @branch = @props[:version], @props[:ref], @props[:tag], @props[:branch]
end
# support variety of options, prefer version
def checkout_version
@version || @ref || @tag || @branch
end
def latest_sha
fetcher = Fetcher.new(self).instance
fetcher.run
fetcher.sha
end
def vcs_provider
if url.include?('http')
# "https://github.com/org/repo" => github.com
url.match(%r{http[s]?://(.*?)/})[1]
elsif url.include?('http') # git@
# "[email protected]:org/repo" => github.com
url.match(%r{git@(.*?):})[1]
else # ssh://[email protected]/path/to/repo
'none'
end
end
def export_path
export_to = self.export_to || LB.config.export_to
"#{export_to}/#{type.pluralize}/#{name}"
end
private
def org_repo
OrgRepo.new(url)
end
memoize :org_repo
end
end
| 27.792453 | 99 | 0.616429 |
1dfebc8c53ec9c29c872008615caa28966467f48 | 297 | # frozen_string_literal: true
require "rails_helper"
module Renalware::Drugs
describe Classification, type: :model do
it :aggregate_failures do
is_expected.to belong_to(:drug).touch(true)
is_expected.to belong_to(:drug_type)
is_expected.to be_versioned
end
end
end
| 21.214286 | 49 | 0.737374 |
ed3330d797292867a2e9dd2cd2668ab9752db214 | 3,196 | # Generated by Rawr version 1.7.1
configuration do |c|
# The name for your resulting application file (e.g., if the project_name is 'foo' then you'll get foo.jar, foo.exe, etc.)
# default value: "BareWiiUseJ"
#
#c.project_name = "BareWiiUseJ"
# Undocumented option 'output_dir'
# default value: "package"
#
#c.output_dir = "package"
# The type of executable to create (console or gui)
# default value: "gui"
#
#c.executable_type = "gui"
# The main ruby file to invoke, minus the .rb extension
# default value: "main"
#
#c.main_ruby_file = "main"
# The fully-qualified name of the main Java file used to initiate the application.
# default value: "org.monkeybars.rawr.Main"
#
#c.main_java_file = "org.monkeybars.rawr.Main"
# A list of directories where source files reside
# default value: ["src"]
#
c.source_dirs = ['src', 'lib/ruby']
# A list of regexps of files to exclude
# default value: []
#
#c.source_exclude_filter = []
# The base directory that holds Mirah files, or subdirectories with Mirah files.
# default value: "src"
#
#c.mirah_source_root = "src"
# Whether Ruby source files should be compiled into .class files. Setting this to true currently breaks packaging
# default value: false
#
#c.compile_ruby_files = false
# A list of individual Java library files to include.
# default value: []
#
c.java_lib_files = Dir.glob "lib/java/*.{dll,so}"
# A list of directories for rawr to include . All files in the given directories get bundled up.
# default value: ["lib/java"]
# THIS IS A LIE. Seems rawr only grabs jar files, and ignores .so and .dll, etc.
c.java_lib_dirs = ["lib/java"]
# A list of files that will be copied into the `<output_dir>/jar` folder. Note that the files maintain their directory path when copied.
# default value: []
#
#c.files_to_copy = []
# Undocumented option 'source_jvm_version'
# default value: 1.7
#
#c.source_jvm_version = 1.7
# Undocumented option 'target_jvm_version'
# default value: 1.7
#
#c.target_jvm_version = 1.7
# Undocumented option 'jvm_arguments'
# default value: ""
#
#c.jvm_arguments = ""
# Undocumented option 'java_library_path'
# default value: ""
#
#c.java_library_path = ""
# Undocumented option 'extra_user_jars'
# default value: {}
#
#c.extra_user_jars[:data] = { :directory => 'data/images/png',
# :location_in_jar => 'images',
# :exclude => /*.bak$/ }
c.jars[:data] = { :directory => 'data/images', :location_in_jar => 'images' }
# Undocumented option 'verbose'
# default value: false
#
#c.verbose = false
# Undocumented option 'mac_do_not_generate_plist'
# default value: false
#
#c.mac_do_not_generate_plist = false
# working directory specified in plist file
# default value: "$APP_PACKAGE"
#
#c.mac_plist_working_directory = "$APP_PACKAGE"
# Undocumented option 'mac_icon_path'
# default value: nil
#
#c.mac_icon_path = nil
# Undocumented option 'windows_icon_path'
# default value: nil
#
#c.windows_icon_path = nil
c.mac_icon_path = File.expand_path('icons/monkeybars.icns')
c.windows_icon_path = File.expand_path('icons/monkeybars.ico')
end
| 26.633333 | 139 | 0.690238 |
ffbed2dcb35704028c7363d10ac522911b3583ae | 3,032 | require 'spec_helper'
describe package('docker-ce') do
it { should be_installed }
end
describe command('dpkg -l docker-ce') do
its(:stdout) { should match /ii docker-ce/ }
its(:stdout) { should match /5:18.09.7~3-0~debian/ }
its(:stdout) { should match /arm64/ }
its(:exit_status) { should eq 0 }
end
describe command('dpkg -l docker-ce-cli') do
its(:stdout) { should match /ii docker-ce-cli/ }
its(:stdout) { should match /5:18.09.7~3-0~debian/ }
its(:stdout) { should match /arm64/ }
its(:exit_status) { should eq 0 }
end
describe command('dpkg -l containerd.io') do
its(:stdout) { should match /ii containerd.io/ }
its(:stdout) { should match /1.2.6-3/ }
its(:stdout) { should match /arm64/ }
its(:exit_status) { should eq 0 }
end
describe file('/usr/bin/docker') do
it { should be_file }
it { should be_mode 755 }
it { should be_owned_by 'root' }
end
describe file('/usr/bin/docker-init') do
it { should be_file }
it { should be_mode 755 }
it { should be_owned_by 'root' }
end
describe file('/usr/bin/docker-proxy') do
it { should be_file }
it { should be_mode 755 }
it { should be_owned_by 'root' }
end
describe file('/usr/bin/dockerd') do
it { should be_file }
it { should be_owned_by 'root' }
end
describe file('/usr/bin/dockerd-ce') do
it { should be_file }
it { should be_mode 755 }
it { should be_owned_by 'root' }
end
describe file('/usr/bin/containerd') do
it { should be_file }
it { should be_mode 755 }
it { should be_owned_by 'root' }
end
describe file('/usr/bin/containerd-shim') do
it { should be_file }
it { should be_mode 755 }
it { should be_owned_by 'root' }
end
describe file('/lib/systemd/system/docker.socket') do
it { should be_file }
it { should be_mode 644 }
it { should be_owned_by 'root' }
end
describe file('/var/run/docker.sock') do
it { should be_socket }
it { should be_mode 660 }
it { should be_owned_by 'root' }
it { should be_grouped_into 'docker' }
end
describe file('/etc/default/docker') do
it { should be_file }
it { should be_mode 644 }
it { should be_owned_by 'root' }
end
describe file('/var/lib/docker') do
it { should be_directory }
it { should be_mode 711 }
it { should be_owned_by 'root' }
end
describe file('/var/lib/docker/overlay2') do
it { should be_directory }
it { should be_mode 700 }
it { should be_owned_by 'root' }
end
describe command('docker -v') do
its(:stdout) { should match /Docker version 18.09.7, build/ }
its(:exit_status) { should eq 0 }
end
describe command('docker info') do
its(:stdout) { should match /Storage Driver: overlay2/ }
its(:exit_status) { should eq 0 }
end
describe interface('lo') do
it { should exist }
end
describe interface('docker0') do
it { should exist }
end
describe service('docker') do
it { should be_enabled }
it { should be_running }
end
describe command('grep docker /var/log/syslog') do
its(:stdout) { should match /Daemon has completed initialization/ }
its(:exit_status) { should eq 0 }
end
| 23.874016 | 69 | 0.676121 |
e80da95595b06144640970e79990f04f48531af0 | 4,073 | #!/usr/bin/env rspec
require 'spec_helper'
augeas = Puppet::Type.type(:augeas)
describe augeas do
describe "when augeas is present", :if => Puppet.features.augeas?, :'fails_on_ruby_1.9.2' => true do
it "should have a default provider inheriting from Puppet::Provider" do
augeas.defaultprovider.ancestors.should be_include(Puppet::Provider)
end
it "should have a valid provider" do
augeas.new(:name => "foo").provider.class.ancestors.should be_include(Puppet::Provider)
end
end
describe "basic structure" do
it "should be able to create a instance" do
provider_class = Puppet::Type::Augeas.provider(Puppet::Type::Augeas.providers[0])
Puppet::Type::Augeas.expects(:defaultprovider).returns provider_class
augeas.new(:name => "bar").should_not be_nil
end
it "should have an parse_commands feature" do
augeas.provider_feature(:parse_commands).should_not be_nil
end
it "should have an need_to_run? feature" do
augeas.provider_feature(:need_to_run?).should_not be_nil
end
it "should have an execute_changes feature" do
augeas.provider_feature(:execute_changes).should_not be_nil
end
properties = [:returns]
params = [:name, :context, :onlyif, :changes, :root, :load_path, :type_check]
properties.each do |property|
it "should have a #{property} property" do
augeas.attrclass(property).ancestors.should be_include(Puppet::Property)
end
it "should have documentation for its #{property} property" do
augeas.attrclass(property).doc.should be_instance_of(String)
end
end
params.each do |param|
it "should have a #{param} parameter" do
augeas.attrclass(param).ancestors.should be_include(Puppet::Parameter)
end
it "should have documentation for its #{param} parameter" do
augeas.attrclass(param).doc.should be_instance_of(String)
end
end
end
describe "default values" do
before do
provider_class = augeas.provider(augeas.providers[0])
augeas.expects(:defaultprovider).returns provider_class
end
it "should be blank for context" do
augeas.new(:name => :context)[:context].should == ""
end
it "should be blank for onlyif" do
augeas.new(:name => :onlyif)[:onlyif].should == ""
end
it "should be blank for load_path" do
augeas.new(:name => :load_path)[:load_path].should == ""
end
it "should be / for root" do
augeas.new(:name => :root)[:root].should == "/"
end
it "should be false for type_check" do
augeas.new(:name => :type_check)[:type_check].should == :false
end
end
describe "provider interaction" do
it "should return 0 if it does not need to run" do
provider = stub("provider", :need_to_run? => false)
resource = stub('resource', :resource => nil, :provider => provider, :line => nil, :file => nil)
changes = augeas.attrclass(:returns).new(:resource => resource)
changes.retrieve.should == 0
end
it "should return :need_to_run if it needs to run" do
provider = stub("provider", :need_to_run? => true)
resource = stub('resource', :resource => nil, :provider => provider, :line => nil, :file => nil)
changes = augeas.attrclass(:returns).new(:resource => resource)
changes.retrieve.should == :need_to_run
end
end
describe "loading specific files" do
it "should require lens when incl is used" do
lambda { augeas.new(:name => :no_lens, :incl => "/etc/hosts")}.should raise_error(Puppet::Error)
end
it "should require incl when lens is used" do
lambda { augeas.new(:name => :no_incl, :lens => "Hosts.lns") }.should raise_error(Puppet::Error)
end
it "should set the context when a specific file is used" do
fake_provider = stub_everything "fake_provider"
augeas.stubs(:defaultprovider).returns fake_provider
augeas.new(:name => :no_incl, :lens => "Hosts.lns", :incl => "/etc/hosts")[:context].should == "/files/etc/hosts"
end
end
end
| 33.941667 | 119 | 0.668795 |
d502c65752d38b47345cadb6beaf1d2e82d90707 | 1,242 | # frozen_string_literal: true
require "erb"
require "uri"
require "active_support/actionable_error"
module ActionDispatch
class ActionableExceptions # :nodoc:
cattr_accessor :endpoint, default: "/rails/actions"
def initialize(app)
@app = app
end
def call(env)
request = ActionDispatch::Request.new(env)
return @app.call(env) unless actionable_request?(request)
ActiveSupport::ActionableError.dispatch(request.params[:error].to_s.safe_constantize, request.params[:action])
redirect_to request.params[:location]
end
private
def actionable_request?(request)
request.get_header("action_dispatch.show_detailed_exceptions") && request.post? && request.path == endpoint
end
def redirect_to(location)
uri = URI.parse location
if uri.relative? || uri.scheme == "http" || uri.scheme == "https"
body = ""
else
return [400, { "Content-Type" => "text/plain" }, ["Invalid redirection URI"]]
end
[302, {
"Content-Type" => "text/html; charset=#{Response.default_charset}",
"Content-Length" => body.bytesize.to_s,
"Location" => location,
}, [body]]
end
end
end
| 27 | 116 | 0.638486 |
ab4bd4c5be93ddebb9536054bb365708edd1518a | 1,069 | # frozen_string_literal: true
module Thredded
class MessageboardNotificationsForFollowedTopics < ActiveRecord::Base
belongs_to :user_preference,
primary_key: :user_id,
foreign_key: :user_id,
inverse_of: :messageboard_notifications_for_followed_topics
belongs_to :user,
class_name: Thredded.user_class_name,
inverse_of: :thredded_messageboard_notifications_for_followed_topics
belongs_to :messageboard
scope :for_messageboard, ->(messageboard) { where(messageboard_id: messageboard.id) }
validates :user_id, presence: true
validates :messageboard_id, presence: true
def self.in(messageboard)
where(messageboard_id: messageboard.id)
end
include Thredded::NotifierPreference
def self.default(_notifier)
# could be moved to `notifier.defaults(:notifications_for_followed_topics)` or
# `notifier.defaults(:messageboard_notifications_for_followed_topics)`
Thredded::BaseNotifier::NotificationsDefault.new(true)
end
end
end
| 34.483871 | 89 | 0.730589 |
01841b018d0c1967be47851b677461f8b0ca092b | 2,314 | require 'spec_helper'
RSpec.describe Lapidarist::CLI do
describe '#run' do
context 'when there are is only one outdated gem' do
context 'and it passes the test' do
it 'leaves the single commit added by update' do
stub_outdated_gems(stub_gems([stub_gem]))
stub_update(stub_gems([stub_gem]))
stub_test_command(success: true)
git = stub_git(count: 1)
Lapidarist::CLI.new(['-q']).run
expect(git).not_to have_received(:reset_hard).with('HEAD^')
end
end
context 'and it fails the test' do
it 'removes the single commit added by update' do
stub_outdated_gems(stub_gems([stub_gem]), stub_gems)
stub_update(stub_gems([stub_gem]), stub_gems)
stub_test_command(success: false)
git = stub_git(count: 1)
Lapidarist::CLI.new(['-q']).run
expect(git).to have_received(:reset_hard).with('HEAD^')
end
it 'does not run git bisect' do
stub_outdated_gems(stub_gems([stub_gem]), stub_gems)
stub_update(stub_gems([stub_gem]), stub_gems)
stub_test_command(success: false)
git = stub_git(count: 1)
Lapidarist::CLI.new(['-q']).run
expect(git).not_to have_received(:bisect)
end
end
end
context 'when there are multiple outdated gems' do
context 'and they fail the test' do
it 'runs git bisect' do
stub_outdated_gems(stub_gems([stub_gem, stub_gem]), stub_gems)
stub_update(stub_gems([stub_gem, stub_gem]))
stub_test_command(success: false)
git = stub_git(count: 2)
Lapidarist::CLI.new(['-q']).run
expect(git).to have_received(:bisect)
end
end
end
end
context 'when all the gems to be updated result in no changes' do
it 'skips to the next loop without running specs' do
stub_outdated_gems(stub_gems([stub_gem(name: 'rake')]), stub_gems)
stub_update(stub_gems([stub_skipped_gem(name: 'rake')]))
stub_git(count: 0)
test_command = stub_test_command(success: true)
cli = Lapidarist::CLI.new(['-q'])
allow(cli).to receive(:loop).and_yield
cli.run
expect(test_command).not_to have_received(:success?)
end
end
end
| 30.853333 | 72 | 0.625324 |
bb45e54fc4b309a417f6278b1843f79c125f4a51 | 1,747 | RSpec.describe Spree::PrintInvoiceSetting do
subject { described_class.new }
describe '#page_sizes' do
it 'has a list of page sizes' do
expect(subject.page_sizes).to be_a(Array)
expect(subject.page_sizes.size).to be(50)
end
end
describe '#page_layouts' do
it 'has a list of layouts' do
expect(subject.page_layouts).to be_a(Array)
expect(subject.page_layouts).to match_array %w(landscape portrait)
end
end
describe '#use_sequential_number?' do
context 'when :next_number set' do
before { subject.next_number = 100 }
it 'uses sequential number' do
expect(subject.use_sequential_number?).to be(true)
end
end
context 'when :next_number nil' do
before { subject.next_number = nil }
it 'does not use sequential number' do
expect(subject.use_sequential_number?).to be(false)
end
end
end
describe '#increase_invoice_number' do
it 'increases invoice numer by one' do
subject.next_number = 100
subject.increase_invoice_number!
expect(subject.next_number).to be(101)
end
end
describe '#font_faces' do
it 'has a list of font faces' do
expect(subject.font_faces).to be_a(Array)
expect(subject.font_faces).to match_array %w(Courier Helvetica Times-Roman DejaVuSans msjh msyh)
end
end
describe '#font_sizes' do
it 'has a list of font sizes' do
expect(subject.font_sizes).to be_a(Array)
expect(subject.font_sizes.first).to be(7)
expect(subject.font_sizes.last).to be(14)
end
end
describe '#logo_scaling' do
it 'converts logo scale to percent' do
subject.logo_scale = 100
expect(subject.logo_scaling).to be(1.0)
end
end
end
| 26.469697 | 102 | 0.681168 |
e2dce5034518622fb9f9ba6a6106078f2d014665 | 1,918 | require 'test_helper'
# was the web request successful?
# was the user redirected to the right page?
# was the user successfully authenticated?
# was the correct object stored in the response?
# was the appropriate message delivered in the json payload?
class Overrides::PasswordsControllerTest < ActionDispatch::IntegrationTest
describe Overrides::PasswordsController do
before do
@resource = evil_users(:confirmed_email_user)
@redirect_url = Faker::Internet.url
post "/evil_user_auth/password", {
email: @resource.email,
redirect_url: @redirect_url
}
@mail = ActionMailer::Base.deliveries.last
@resource.reload
@mail_config_name = CGI.unescape(@mail.body.match(/config=([^&]*)&/)[1])
@mail_redirect_url = CGI.unescape(@mail.body.match(/redirect_url=([^&]*)&/)[1])
@mail_reset_token = @mail.body.match(/reset_password_token=(.*)\"/)[1]
get '/evil_user_auth/password/edit', {
reset_password_token: @mail_reset_token,
redirect_url: @mail_redirect_url
}
@resource.reload
raw_qs = response.location.split('?')[1]
@qs = Rack::Utils.parse_nested_query(raw_qs)
@client_id = @qs["client_id"]
@expiry = @qs["expiry"]
@reset_password = @qs["reset_password"]
@token = @qs["token"]
@uid = @qs["uid"]
@override_proof = @qs["override_proof"]
end
test 'response should have success redirect status' do
assert_equal 302, response.status
end
test 'response should contain auth params + override proof' do
assert @client_id
assert @expiry
assert @reset_password
assert @token
assert @uid
assert @override_proof
end
test 'override proof is correct' do
assert_equal @override_proof, Overrides::PasswordsController::OVERRIDE_PROOF
end
end
end
| 30.444444 | 85 | 0.654327 |
18c464fe73aadc0ff460fd7b320f686589f5b042 | 593 | cask 'hp-eprint' do
version '2.5.0'
sha256 'cba1598dc5d03fbf28fa649dafca5cd251f273066cc3a050966834a73ba66c3e'
url "https://ftp.hp.com/pub/softlib/software13/COL43009/ds-104730-8/HP-ePrint_v#{version}.dmg"
name 'HP ePrint'
homepage 'http://h20331.www2.hp.com/hpsub/us/en/eprint/overview.html'
license :gratis
pkg 'HP ePrint Installer.pkg'
uninstall :pkgutil => 'com.hp.pkg.cloudprint.HP-ePrint-Mobile'
zap :delete => [
'~/Library/Containers/com.hp.cloudprint.HP-ePrint-Mobile',
'~/Library/PDF Services/HP ePrint'
]
end
| 31.210526 | 96 | 0.676223 |
abeb63c5917e1f2fb2a55d2a7aba0dc1b5d2243c | 763 | require 'test/unit'
require "cases/helper"
require 'active_support/core_ext/class/inheritable_attributes'
class A
include ClassInheritableAttributes
end
class B < A
write_inheritable_array "first", [ :one, :two ]
end
class C < A
write_inheritable_array "first", [ :three ]
end
class D < B
write_inheritable_array "first", [ :four ]
end
class ClassInheritableAttributesTest < ActiveRecord::TestCase
def test_first_level
assert_equal [ :one, :two ], B.read_inheritable_attribute("first")
assert_equal [ :three ], C.read_inheritable_attribute("first")
end
def test_second_level
assert_equal [ :one, :two, :four ], D.read_inheritable_attribute("first")
assert_equal [ :one, :two ], B.read_inheritable_attribute("first")
end
end
| 23.121212 | 77 | 0.741809 |
e895ba97899619d650b89413afcc080c6b841365 | 462 | module Wepay
module Api
class Base
def initialize
end
private
def get(path, params = {})
make_request(:get, path, params)
end
def post(path, params = {})
make_request(:post, path, params)
end
def make_request(http_method, path, params = {})
client.send(http_method, path, params)
end
def client
@client ||= ::Wepay::Client::Data.new
end
end
end
end | 18.48 | 54 | 0.556277 |
e987abf144de7f0229bc9a5c70a6b28b12ee132a | 138 | # frozen_string_literal: true
require 'pry'
require 'rails'
require 'active_record'
require 'active_support'
require 'action_controller'
| 17.25 | 29 | 0.811594 |
08d54ca6673e0babb7bb1de324d8d5344a2f203a | 698 | component "rubygem-semantic_puppet" do |pkg, settings, platform|
# Projects may define a :rubygem_semantic_puppet_version setting, or we use 1.0.4 by default
version = settings[:rubygem_semantic_puppet_version] || '1.0.4'
pkg.version version
case version
when '0.1.2'
pkg.md5sum '192ae7729997cb5d5364f64b99b13121'
when '1.0.4'
pkg.sha256sum "5d8380bf733c1552ef77e06a7c44a6d5b48def7d390ecf3bd71cad477f5ce13d"
else
raise "rubygem-semantic_puppet version #{version} has not been configured; Cannot continue."
end
instance_eval File.read('configs/components/_base-rubygem.rb')
pkg.environment "GEM_HOME", (settings[:puppet_gem_vendor_dir] || settings[:gem_home])
end
| 36.736842 | 96 | 0.772206 |
3868e604a751bf7d4a80aebec406925f89edff28 | 287 | Rails.application.routes.draw do
get 'oauth_test/index'
devise_for :users, controllers: {
registrations: "users/registrations",
omniauth_callbacks: "users/omniauth_callbacks"
}
resources :groups
resources :users
resources :events
root :to => 'oauth_test#index'
end
| 23.916667 | 50 | 0.738676 |
21bc06a52d9624e600e998d30c8906e3311a777b | 5,667 | # frozen_string_literal: true
RSpec.describe Dry::Types::Array do
describe "#of" do
context "primitive" do
shared_context "array with a member type" do
it "returns an array with correct member values" do
expect(array[Set[1, 2, 3]]).to eql(%w[1 2 3])
end
it_behaves_like Dry::Types::Nominal do
subject(:type) { array }
end
end
context "using string identifiers" do
subject(:array) { Dry::Types["coercible.array<coercible.string>"] }
include_context "array with a member type"
end
context "try" do
subject(:array) do
Dry::Types["nominal.array"].of(Dry::Types["strict.string"])
end
it "with a valid array" do
expect(array.try(%w[a b])).to be_success
end
it "an invalid type should be a failure" do
expect(array.try("some string")).to be_failure
end
it "a broken constraint should be a failure" do
expect(array.try(["1", 2])).to be_failure
end
it "a broken constraint with block" do
expect(
array.try(["1", "2", 3]) { |error| "error: #{error}" }
).to match(/error: 3/)
end
it "an invalid type with a block" do
expect(
array.try("X") { |x| "error: #{x}" }
).to eql("error: X is not an array")
end
end
context "using method" do
subject(:array) { Dry::Types["coercible.array"].of(Dry::Types["coercible.string"]) }
include_context "array with a member type"
end
context "using a constrained type" do
subject(:array) do
Dry::Types["array"].of(Dry::Types["coercible.integer"].constrained(gt: 2))
end
it "passes values through member type" do
expect(array[%w[3 4 5]]).to eql([3, 4, 5])
end
it "raises when input is not valid" do
expect { array[%w[1 2 3]] }.to raise_error(
Dry::Types::ConstraintError,
'"1" violates constraints (gt?(2, 1) failed)'
)
end
it_behaves_like Dry::Types::Nominal do
subject(:type) { array }
it_behaves_like "a composable constructor"
end
end
context "constructor types" do
subject(:array) do
Dry::Types["array"].of(Dry::Types["coercible.integer"])
end
it "yields partially coerced values" do
expect(array.(["1", 2, "foo"]) { |xs| xs }).to eql([1, 2, "foo"])
end
end
context "undefined" do
subject(:array) do
Dry::Types["array"].of(
Dry::Types["nominal.string"].constructor { |value|
value == "" ? Dry::Types::Undefined : value
}
)
end
it "filters out undefined values" do
expect(array[["", "foo"]]).to eql(["foo"])
end
end
end
end
describe "#valid?" do
subject(:array) { Dry::Types["array"].of(Dry::Types["string"]) }
it "detects invalid input of the completely wrong type" do
expect(array.valid?(5)).to be(false)
end
it "detects invalid input of the wrong member type" do
expect(array.valid?([5])).to be(false)
end
it "recognizes valid input" do
expect(array.valid?(["five"])).to be(true)
end
end
describe "#===" do
subject(:array) { Dry::Types["strict.array"].of(Dry::Types["strict.string"]) }
it "returns boolean" do
expect(array.===(%w[hello world])).to eql(true)
expect(array.===(["hello", 1234])).to eql(false)
end
context "in case statement" do
let(:value) do
case %w[hello world]
when array then "accepted"
else "invalid"
end
end
it "returns correct value" do
expect(value).to eql("accepted")
end
end
end
context "member" do
describe "#to_s" do
subject(:type) { Dry::Types["nominal.array"].of(Dry::Types["nominal.string"]) }
it "returns string representation of the type" do
expect(type.to_s).to eql("#<Dry::Types[Array<Nominal<String>>]>")
end
it "shows meta" do
expect(type.meta(foo: :bar).to_s).to eql("#<Dry::Types[Array<Nominal<String> meta={foo: :bar}>]>")
end
end
describe "#constructor" do
subject(:type) { Dry::Types["params.array<params.integer>"] }
example "getting member from a constructor type" do
expect(type.member.("1")).to be(1)
end
describe "#lax" do
subject(:type) { Dry::Types["array<integer>"].constructor(&:to_a) }
it "makes type recursively lax" do
expect(type.lax.member).to eql(Dry::Types["nominal.integer"])
end
end
describe "#constrained" do
it "applies constraints on top of constructor" do
expect(type.constrained(size: 1).(["1"])).to eql([1])
expect(type.constrained(size: 1).([]) { :fallback }).to be(:fallback)
end
end
end
context "nested array" do
let(:strings) do
Dry::Types["array"].of("string")
end
subject(:type) do
Dry::Types["array"].of(strings)
end
it "still discards constructor" do
expect(type.constructor(&:to_a).member.type).to eql(strings)
end
end
end
describe "#to_s" do
subject(:type) { Dry::Types["nominal.array"] }
it "returns string representation of the type" do
expect(type.to_s).to eql("#<Dry::Types[Array]>")
end
it "adds meta" do
expect(type.meta(foo: :bar).to_s).to eql("#<Dry::Types[Array meta={foo: :bar}]>")
end
end
end
| 27.245192 | 106 | 0.560967 |
1c5ca018b61eeb56c5b16b76f78496bf11d0abc2 | 960 | #---
# Excerpted from "The RSpec Book",
# published by The Pragmatic Bookshelf.
# Copyrights apply to this code. It may not be used to create training material,
# courses, books, articles, and the like. Contact us if you are in doubt.
# We make no guarantees that this code is fit for any purpose.
# Visit http://www.pragmaticprogrammer.com/titles/achbd for more book information.
#---
require 'spec_helper'
describe MessagesController do
describe "POST create" do
it "creates a new message" do
Message.should_receive(:new).with("text" => "a quick brown fox")
post :create, :message => { "text" => "a quick brown fox" }
end
it "saves the message" do
message = mock_model(Message)
Message.stub(:new).and_return(message)
message.should_receive(:save)
post :create
end
it "redirects to the Messages index" do
post :create
response.should redirect_to(:action => "index")
end
end
end
| 30.967742 | 82 | 0.686458 |
e2b154976386a8e951273b3be28380dfc8e4f49d | 3,924 | # frozen_string_literal: true
module Ci
class Stage < Ci::ApplicationRecord
include Importable
include Ci::HasStatus
include Gitlab::OptimisticLocking
include Presentable
enum status: Ci::HasStatus::STATUSES_ENUM
belongs_to :project
belongs_to :pipeline
has_many :statuses, class_name: 'CommitStatus', foreign_key: :stage_id
has_many :latest_statuses, -> { ordered.latest }, class_name: 'CommitStatus', foreign_key: :stage_id
has_many :retried_statuses, -> { ordered.retried }, class_name: 'CommitStatus', foreign_key: :stage_id
has_many :processables, class_name: 'Ci::Processable', foreign_key: :stage_id
has_many :builds, foreign_key: :stage_id
has_many :bridges, foreign_key: :stage_id
scope :ordered, -> { order(position: :asc) }
scope :in_pipelines, ->(pipelines) { where(pipeline: pipelines) }
scope :by_name, ->(names) { where(name: names) }
with_options unless: :importing? do
validates :project, presence: true
validates :pipeline, presence: true
validates :name, presence: true
validates :position, presence: true
end
after_initialize do
self.status = DEFAULT_STATUS if self.status.nil?
end
before_validation unless: :importing? do
next if position.present?
self.position = statuses.select(:stage_idx)
.where.not(stage_idx: nil)
.group(:stage_idx)
.order('COUNT(id) DESC')
.first&.stage_idx.to_i
end
state_machine :status, initial: :created do
event :enqueue do
transition any - [:pending] => :pending
end
event :request_resource do
transition any - [:waiting_for_resource] => :waiting_for_resource
end
event :prepare do
transition any - [:preparing] => :preparing
end
event :run do
transition any - [:running] => :running
end
event :skip do
transition any - [:skipped] => :skipped
end
event :drop do
transition any - [:failed] => :failed
end
event :succeed do
transition any - [:success] => :success
end
event :cancel do
transition any - [:canceled] => :canceled
end
event :block do
transition any - [:manual] => :manual
end
event :delay do
transition any - [:scheduled] => :scheduled
end
end
def set_status(new_status)
retry_optimistic_lock(self, name: 'ci_stage_set_status') do
case new_status
when 'created' then nil
when 'waiting_for_resource' then request_resource
when 'preparing' then prepare
when 'pending' then enqueue
when 'running' then run
when 'success' then succeed
when 'failed' then drop
when 'canceled' then cancel
when 'manual' then block
when 'scheduled' then delay
when 'skipped', nil then skip
else
raise Ci::HasStatus::UnknownStatusError,
"Unknown status `#{new_status}`"
end
end
end
def update_legacy_status
set_status(latest_stage_status.to_s)
end
def groups
@groups ||= Ci::Group.fabricate(project, self)
end
def has_warnings?
number_of_warnings > 0
end
def number_of_warnings
BatchLoader.for(id).batch(default_value: 0) do |stage_ids, loader|
::CommitStatus.where(stage_id: stage_ids)
.latest
.failed_but_allowed
.group(:stage_id)
.count
.each { |id, amount| loader.call(id, amount) }
end
end
def detailed_status(current_user)
Gitlab::Ci::Status::Stage::Factory
.new(self, current_user)
.fabricate!
end
def manual_playable?
blocked? || skipped?
end
def latest_stage_status
statuses.latest.composite_status(project: project) || 'skipped'
end
end
end
| 26.513514 | 106 | 0.629205 |
e2637cee9c341cad08768bcb46742ac2ddfb909a | 477 | # frozen_string_literal: true
require_relative '../bin/check-windows-service.rb'
describe CheckWinService do
before(:all) do
CheckWinService.class_variable_set(:@@autorun, nil)
end
let(:check) do
CheckWinService.new ['--service', 'A Sample Service']
end
it 'should use quotes' do
expect(check).to receive(:system).with('tasklist /svc|findstr /i /c:"A Sample Service"').and_raise SystemExit
expect { check.run }.to raise_error SystemExit
end
end
| 25.105263 | 113 | 0.721174 |
ff5d517414aad099184cd21ebbce8edd87892228 | 854 | class Mdp < Formula
desc "Command-line based markdown presentation tool"
homepage "https://github.com/visit1985/mdp"
url "https://github.com/visit1985/mdp/archive/1.0.0.tar.gz"
sha1 "24821e0602f3c7f141ce610de2beda8108050584"
head "https://github.com/visit1985/mdp.git"
bottle do
cellar :any
sha256 "954220348b4a6ce5aa97edfa1eab40aff944bd804226f1d01117bd22ba220b07" => :yosemite
sha256 "b4dac048d07a19e5b4955578030e9e68638f28b8d7c26d98492cf1eea152b1f0" => :mavericks
sha256 "84f070d0885fef35a596ae523609f38c9d72697a9b7facdbe1d8d2657f509f4e" => :mountain_lion
end
def install
system "make"
system "make", "install", "PREFIX=#{prefix}"
share.install "sample.md"
end
test do
# Go through two slides and quit.
ENV["TERM"] = "xterm"
pipe_output "#{bin}/mdp #{share}/sample.md", "jjq", 0
end
end
| 31.62963 | 95 | 0.736534 |
0841e338a2b500fa3800119a68e04bb98d95004a | 96 | module DNS
class Zone
# Version number (major.minor.tiny)
Version = '0.3.1'
end
end
| 13.714286 | 39 | 0.635417 |
e23f4fa4154c64233b9270b5941e6dd5828e7abe | 3,623 | RSpec.describe Game do
describe "#remove_expired_items" do
let(:current_time) { 10 }
let(:expired_item) { Item.new("Expired", "00:07", "1") }
let(:valid_item) { Item.new("Valid", "00:08", "1") }
before do
subject.items = [expired_item, valid_item]
subject.remove_expired_items(current_time)
end
it "removes the expired items" do
expect(subject.items).not_to include(expired_item)
end
it "keeps all valid items" do
expect(subject.items).to contain_exactly(valid_item)
end
end
describe "#to_hash" do
let(:expected_hash) do
{
players: [
{
id: player.id,
name: player.name,
inventory: {
"Knife" => 1
}
}
]
}
end
let(:player) { Player.new("1", "Renata") }
let(:item) { Item.new("Knife", "00:00", player.id) }
before do
subject.add_player(player)
subject.add_item(item)
end
it "returns the correct game hash" do
expect(subject.to_hash).to eq(expected_hash)
end
end
describe "#inventories" do
let(:player) { Player.new("1") }
let(:item) { Item.new("Knife", "00:00", player.id) }
let(:expected_inventories_hash) do
{
player.id => {
item.name => 1
}
}
end
before do
subject.add_player(player)
subject.add_item(item)
end
it "returns the expected inventories hash" do
expect(subject.inventories).to eq(expected_inventories_hash)
end
end
describe "#add_player" do
let(:player) { Player.new("1") }
it "adds a new player to the game" do
expect {
subject.add_player(player)
}.to change {
subject.players
}.from({}).to({ player.id => player })
end
end
describe "#add_item" do
let(:item) { Item.new("Knife", "00:00", "1") }
it "adds a new item to the game" do
expect {
subject.add_item(item)
}.to change {
subject.items
}.from([]).to([item])
end
end
describe "#change_items_ownership" do
context "when player is killed by another player" do
let(:killed) { Player.new("1", "Killed") }
let(:killer) { Player.new("2", "Killer") }
let(:item) { Item.new("Knife", "00:00", killed.id) }
before do
subject.add_player(killed)
subject.add_player(killer)
subject.add_item(item)
end
it "changes item ownership" do
expect {
subject.change_items_ownership(killed.id, killer.id)
}.to change {
subject.items.first.owner_id
}.from(killed.id).to(killer.id)
end
end
context "when player is killed by world" do
let(:killed) { Player.new("1", "Killed") }
let(:item) { Item.new("Knife", "00:00", killed.id) }
before do
subject.add_player(killed)
subject.add_item(item)
end
it "changes item ownership" do
expect {
subject.change_items_ownership(killed.id, "1022", true)
}.to change {
subject.items.count
}.from(1).to(0)
end
end
context "when player commits suicide" do
let(:killed) { Player.new("1", "Killed") }
let(:item) { Item.new("Knife", "00:00", killed.id) }
before do
subject.add_player(killed)
subject.add_item(item)
end
it "changes item ownership" do
expect {
subject.change_items_ownership(killed.id, killed.id)
}.to change {
subject.items.count
}.from(1).to(0)
end
end
end
end
| 23.525974 | 66 | 0.566105 |
79ac97c8af93db75f87f11142fad8af10da77e9f | 2,278 |
# generated from template-files/ios/ExpoKit.podspec
Pod::Spec.new do |s|
s.name = "ExpoKit"
s.version = "2.9.0"
s.summary = 'ExpoKit'
s.description = 'ExpoKit allows native projects to integrate with the Expo SDK.'
s.homepage = 'http://docs.expo.io'
s.license = 'MIT'
s.author = "650 Industries, Inc."
s.requires_arc = true
s.platform = :ios, "10.0"
s.default_subspec = "Core"
s.source = { :git => "http://github.com/expo/expo.git" }
s.subspec "Core" do |ss|
ss.source_files = "ios/Exponent/**/*.{h,m}", "template-files/keys.json"
ss.preserve_paths = "ios/Exponent/**/*.{h,m}"
ss.exclude_files = "ios/Exponent/Supporting/**", "ios/Exponent/Versioned/Optional/**/*.{h,m}"
ss.dependency 'Amplitude-iOS', '~> 3.8'
ss.dependency 'Analytics', '~> 3.5'
ss.dependency 'AppAuth', '~> 0.4'
ss.dependency 'CocoaLumberjack', '~> 3.2.1'
ss.dependency 'Crashlytics', '~> 3.8'
ss.dependency 'FBAudienceNetwork', '4.99.0'
ss.dependency 'FBSDKCoreKit', '4.37.0'
ss.dependency 'FBSDKLoginKit', '4.37.0'
ss.dependency 'FBSDKShareKit', '4.37.0'
ss.dependency 'Fabric', '~> 1.6'
ss.dependency 'GoogleSignIn', '~> 4.1'
ss.dependency 'GoogleMaps', '~> 2.5.0'
ss.dependency 'Google-Maps-iOS-Utils', '~> 2.1.0'
ss.dependency 'lottie-ios', '~> 2.5.0'
ss.dependency 'JKBigInteger2', '0.0.5'
ss.dependency 'Branch', '~> 0.24.2'
ss.dependency 'Google-Mobile-Ads-SDK', '~> 7.22.0'
ss.dependency 'React' # explicit dependency required for CocoaPods >= 1.5.0
# Universal modules required by ExpoKit so the code compiles
ss.dependency 'EXCore'
ss.dependency 'EXReactNativeAdapter'
ss.dependency 'EXSensorsInterface'
ss.dependency 'EXFileSystemInterface'
ss.dependency 'EXPermissionsInterface'
ss.dependency 'EXCameraInterface'
ss.dependency 'EXConstantsInterface'
end
s.subspec "Payments" do |ss|
ss.dependency "ExpoKit/Core"
ss.dependency 'Stripe', '~> 10.1.0'
ss.source_files = 'ios/Exponent/Versioned/Optional/Payments/*.{h,m}'
end
s.subspec "AR" do |ss|
ss.dependency "ExpoKit/Core"
ss.source_files = 'ios/Exponent/Versioned/Optional/ARKit/**'
end
s.subspec "FaceDetector" do |ss|
ss.dependency "EXFaceDetector"
end
end
| 34.515152 | 97 | 0.657594 |
79d331962388c7b9fd917c89834fcf8239bfaf3e | 736 | cask "macloggerdx-beta" do
version "6.37b13"
sha256 :no_check
url "https://www.dogparksoftware.com/files/MacLoggerDX.beta.dmg"
name "MacLoggerDX"
desc "Ham radio logging and rig control software"
homepage "https://www.dogparksoftware.com/MacLoggerDX.html"
livecheck do
url :homepage
regex(/Download:\s*v?(\d+(?:\.\d+)+b\d+)/i)
end
conflicts_with cask: "macloggerdx"
depends_on macos: ">= :high_sierra"
app "MacLoggerDX.app"
zap trash: [
"~/Library/Caches/com.apple.helpd/Generated/MacLoggerDX Help*",
"~/Library/Caches/com.dogparksoftware.MacLoggerDX",
"~/Library/HTTPStorages/com.dogparksoftware.MacLoggerDX",
"~/Library/Preferences/com.dogparksoftware.MacLoggerDX*.plist",
]
end
| 27.259259 | 67 | 0.711957 |
7a4e13e1bde03393a342a8dafff1e636c9977dff | 4,332 | ########################################################################
# TerraformCommands provides command line terraform commands
# +plan+ and +apply+ for GeoEngineer
########################################################################
module GeoCLI::TerraformCommands
def create_terraform_files(with_state = true)
# If GPS is included then write some files to help debug
write_gps if @gps
# create terraform file
File.open("#{@tmpdir}/#{@terraform_file}", 'w') { |file|
file.write(JSON.pretty_generate(@environment.to_terraform_json()))
}
# create terrafrom state
write_state if with_state
end
def write_state
File.open("#{@tmpdir}/#{@terraform_state_file}", 'w') { |file|
file.write(JSON.pretty_generate(@environment.to_terraform_state()))
}
end
def write_gps
File.open("#{@tmpdir}/gps.yml", 'w') { |file|
file.write(gps.to_h.to_yaml)
}
File.open("#{@tmpdir}/gps.expand.yml", 'w') { |file|
file.write(gps.expanded_hash.to_yaml)
}
end
def terraform_parallelism
Parallel.processor_count * 3 # Determined through trial/error
end
def terraform_plan
plan_commands = [
"cd #{@tmpdir}",
"terraform init #{@no_color}",
"terraform refresh #{@no_color}",
"terraform plan --refresh=false -parallelism=#{terraform_parallelism}" \
" -state=#{@terraform_state_file} -out=#{@plan_file} #{@no_color}"
]
exit_code = shell_exec(plan_commands.join(" && "), true).exitstatus
return unless exit_code.nonzero?
puts "Plan Broken"
exit exit_code
end
def terraform_plan_destroy
plan_destroy_commands = [
"cd #{@tmpdir}",
"terraform refresh #{@no_color}",
"terraform plan -destroy --refresh=false -parallelism=#{terraform_parallelism}" \
" -state=#{@terraform_state_file} -out=#{@plan_file} #{@no_color}"
]
shell_exec(plan_destroy_commands.join(" && "), true)
end
def terraform_apply
apply_commands = [
"cd #{@tmpdir}",
"terraform apply -parallelism=#{terraform_parallelism}" \
" #{@plan_file} #{@no_color}"
]
shell_exec(apply_commands.join(" && "), true)
end
def terraform_destroy
destroy_commands = [
"cd #{@tmpdir}",
"terraform apply -parallelism=#{terraform_parallelism}" \
" #{@plan_file} #{@no_color}"
]
shell_exec(destroy_commands.join(" && "), true)
end
def test_cmd
command :test do |c|
c.syntax = 'geo test [<geo_files>]'
c.description = 'Generates files while mocking AWS (useful for testing/debugging)'
action = lambda do |args, options|
create_terraform_files(false)
end
c.action ->(args, options) { GeoCLI::TestCmdStubs.stub! && init_action(:plan, &action).call(args, options) }
end
end
def plan_cmd
command :plan do |c|
c.syntax = 'geo plan [<geo_files>]'
c.description = 'Generate and show an execution plan'
action = lambda do |args, options|
create_terraform_files
terraform_plan
end
c.action init_action(:plan, &action)
end
end
def apply_cmd
command :apply do |c|
c.syntax = 'geo apply [<geo_files>]'
c.option '--yes', 'Ignores the sanity check'
c.description = 'Apply an execution plan'
action = lambda do |args, options|
create_terraform_files
terraform_plan
unless options.yes || yes?("Apply the above plan? [YES/NO]")
puts "Rejecting Plan"
exit 1
end
exit_code = terraform_apply.exitstatus
exit exit_code if exit_code.nonzero?
end
c.action init_action(:apply, &action)
end
end
def destroy_cmd
command :destroy do |c|
c.syntax = 'geo destroy [<geo_files>]'
c.description = 'Destroy an execution plan'
action = lambda do |args, options|
create_terraform_files
exit_code = terraform_plan_destroy.exitstatus
if exit_code.nonzero?
puts "Plan Broken"
exit exit_code
end
unless yes?("Apply the above plan? [YES/NO]")
puts "Rejecting Plan"
exit 1
end
exit_code = terraform_destroy.exitstatus
exit exit_code if exit_code.nonzero?
end
c.action init_action(:destroy, &action)
end
end
end
| 29.469388 | 114 | 0.613112 |
38998e550faf58e2b7f2bc3dadbcdb4ee905bc98 | 2,095 | # frozen_string_literal: true
require 'spec_helper'
require 'valkyrie/specs/shared_specs'
RSpec.describe Valkyrie::Persistence::Fedora::MetadataAdapter, :wipe_fedora do
[4, 5, 6].each do |fedora_version|
context "fedora #{fedora_version}" do
let(:version) { fedora_version }
let(:adapter) { described_class.new(fedora_adapter_config(base_path: "test_fed", fedora_version: version)) }
it_behaves_like "a Valkyrie::MetadataAdapter"
describe "#schema" do
context "by default" do
specify { expect(adapter.schema).to be_a Valkyrie::Persistence::Fedora::PermissiveSchema }
end
context "with a custom schema" do
let(:adapter) { described_class.new(fedora_adapter_config(base_path: "test_fed", schema: "custom-schema", fedora_version: version)) }
specify { expect(adapter.schema).to eq("custom-schema") }
end
end
describe "#id_to_uri" do
it "converts ids with a slash" do
id = "test/default"
if adapter.fedora_version == 4
expect(adapter.id_to_uri(id).to_s).to eq "http://localhost:8988/rest/test_fed/te/st/test%2Fdefault"
else
expect(adapter.id_to_uri(id).to_s).to eq "#{adapter.url_prefix}/test_fed/test%2Fdefault"
end
end
end
describe "#uri_to_id" do
it "converts ids with a slash" do
uri = adapter.id_to_uri("test/default")
expect(adapter.uri_to_id(uri).to_s).to eq "test/default"
end
end
describe "#pair_path" do
it "creates pairs until the first dash" do
expect(adapter.pair_path('abcdef-ghijkl')).to eq('ab/cd/ef')
end
it "creates pairs until the first slash" do
expect(adapter.pair_path('admin_set/default')).to eq('ad/mi/n_/se/t')
end
end
describe "#id" do
it "creates an md5 hash from the connection_prefix" do
expected = Digest::MD5.hexdigest adapter.connection_prefix
expect(adapter.id.to_s).to eq expected
end
end
end
end
end
| 34.916667 | 143 | 0.639618 |
e89ea8b21d5700f00df15e481703c2840b152408 | 12,620 | module ActionDispatch
module Routing
# Polymorphic URL helpers are methods for smart resolution to a named route call when
# given an Active Record model instance. They are to be used in combination with
# ActionController::Resources.
#
# These methods are useful when you want to generate the correct URL or path to a RESTful
# resource without having to know the exact type of the record in question.
#
# Nested resources and/or namespaces are also supported, as illustrated in the example:
#
# polymorphic_url([:admin, @article, @comment])
#
# results in:
#
# admin_article_comment_url(@article, @comment)
#
# == Usage within the framework
#
# Polymorphic URL helpers are used in a number of places throughout the \Rails framework:
#
# * <tt>url_for</tt>, so you can use it with a record as the argument, e.g.
# <tt>url_for(@article)</tt>;
# * ActionView::Helpers::FormHelper uses <tt>polymorphic_path</tt>, so you can write
# <tt>form_for(@article)</tt> without having to specify <tt>:url</tt> parameter for the form
# action;
# * <tt>redirect_to</tt> (which, in fact, uses <tt>url_for</tt>) so you can write
# <tt>redirect_to(post)</tt> in your controllers;
# * ActionView::Helpers::AtomFeedHelper, so you don't have to explicitly specify URLs
# for feed entries.
#
# == Prefixed polymorphic helpers
#
# In addition to <tt>polymorphic_url</tt> and <tt>polymorphic_path</tt> methods, a
# number of prefixed helpers are available as a shorthand to <tt>action: "..."</tt>
# in options. Those are:
#
# * <tt>edit_polymorphic_url</tt>, <tt>edit_polymorphic_path</tt>
# * <tt>new_polymorphic_url</tt>, <tt>new_polymorphic_path</tt>
#
# Example usage:
#
# edit_polymorphic_path(@post) # => "/posts/1/edit"
# polymorphic_path(@post, format: :pdf) # => "/posts/1.pdf"
#
# == Usage with mounted engines
#
# If you are using a mounted engine and you need to use a polymorphic_url
# pointing at the engine's routes, pass in the engine's route proxy as the first
# argument to the method. For example:
#
# polymorphic_url([blog, @post]) # calls blog.post_path(@post)
# form_for([blog, @post]) # => "/blog/posts/1"
#
module PolymorphicRoutes
# Constructs a call to a named RESTful route for the given record and returns the
# resulting URL string. For example:
#
# # calls post_url(post)
# polymorphic_url(post) # => "http://example.com/posts/1"
# polymorphic_url([blog, post]) # => "http://example.com/blogs/1/posts/1"
# polymorphic_url([:admin, blog, post]) # => "http://example.com/admin/blogs/1/posts/1"
# polymorphic_url([user, :blog, post]) # => "http://example.com/users/1/blog/posts/1"
# polymorphic_url(Comment) # => "http://example.com/comments"
#
# ==== Options
#
# * <tt>:action</tt> - Specifies the action prefix for the named route:
# <tt>:new</tt> or <tt>:edit</tt>. Default is no prefix.
# * <tt>:routing_type</tt> - Allowed values are <tt>:path</tt> or <tt>:url</tt>.
# Default is <tt>:url</tt>.
#
# Also includes all the options from <tt>url_for</tt>. These include such
# things as <tt>:anchor</tt> or <tt>:trailing_slash</tt>. Example usage
# is given below:
#
# polymorphic_url([blog, post], anchor: 'my_anchor')
# # => "http://example.com/blogs/1/posts/1#my_anchor"
# polymorphic_url([blog, post], anchor: 'my_anchor', script_name: "/my_app")
# # => "http://example.com/my_app/blogs/1/posts/1#my_anchor"
#
# For all of these options, see the documentation for {url_for}[rdoc-ref:ActionDispatch::Routing::UrlFor].
#
# ==== Functionality
#
# # an Article record
# polymorphic_url(record) # same as article_url(record)
#
# # a Comment record
# polymorphic_url(record) # same as comment_url(record)
#
# # it recognizes new records and maps to the collection
# record = Comment.new
# polymorphic_url(record) # same as comments_url()
#
# # the class of a record will also map to the collection
# polymorphic_url(Comment) # same as comments_url()
#
def polymorphic_url(record_or_hash_or_array, options = {})
if Hash === record_or_hash_or_array
options = record_or_hash_or_array.merge(options)
record = options.delete :id
return polymorphic_url record, options
end
if mapping = polymorphic_mapping(record_or_hash_or_array)
return mapping.call(self, [record_or_hash_or_array, options], false)
end
opts = options.dup
action = opts.delete :action
type = opts.delete(:routing_type) || :url
HelperMethodBuilder.polymorphic_method self,
record_or_hash_or_array,
action,
type,
opts
end
# Returns the path component of a URL for the given record. It uses
# <tt>polymorphic_url</tt> with <tt>routing_type: :path</tt>.
def polymorphic_path(record_or_hash_or_array, options = {})
if Hash === record_or_hash_or_array
options = record_or_hash_or_array.merge(options)
record = options.delete :id
return polymorphic_path record, options
end
if mapping = polymorphic_mapping(record_or_hash_or_array)
return mapping.call(self, [record_or_hash_or_array, options], true)
end
opts = options.dup
action = opts.delete :action
type = :path
HelperMethodBuilder.polymorphic_method self,
record_or_hash_or_array,
action,
type,
opts
end
%w(edit new).each do |action|
module_eval <<-EOT, __FILE__, __LINE__ + 1
def #{action}_polymorphic_url(record_or_hash, options = {})
polymorphic_url_for_action("#{action}", record_or_hash, options)
end
def #{action}_polymorphic_path(record_or_hash, options = {})
polymorphic_path_for_action("#{action}", record_or_hash, options)
end
EOT
end
private
def polymorphic_url_for_action(action, record_or_hash, options)
polymorphic_url(record_or_hash, options.merge(action: action))
end
def polymorphic_path_for_action(action, record_or_hash, options)
polymorphic_path(record_or_hash, options.merge(action: action))
end
def polymorphic_mapping(record)
if record.respond_to?(:to_model)
_routes.polymorphic_mappings[record.to_model.model_name.name]
else
_routes.polymorphic_mappings[record.class.name]
end
end
class HelperMethodBuilder # :nodoc:
CACHE = { "path" => {}, "url" => {} }
def self.get(action, type)
type = type.to_s
CACHE[type].fetch(action) { build action, type }
end
def self.url; CACHE["url".freeze][nil]; end
def self.path; CACHE["path".freeze][nil]; end
def self.build(action, type)
prefix = action ? "#{action}_" : ""
suffix = type
if action.to_s == "new"
HelperMethodBuilder.singular prefix, suffix
else
HelperMethodBuilder.plural prefix, suffix
end
end
def self.singular(prefix, suffix)
new(->(name) { name.singular_route_key }, prefix, suffix)
end
def self.plural(prefix, suffix)
new(->(name) { name.route_key }, prefix, suffix)
end
def self.polymorphic_method(recipient, record_or_hash_or_array, action, type, options)
builder = get action, type
case record_or_hash_or_array
when Array
record_or_hash_or_array = record_or_hash_or_array.compact
if record_or_hash_or_array.empty?
raise ArgumentError, "Nil location provided. Can't build URI."
end
if record_or_hash_or_array.first.is_a?(ActionDispatch::Routing::RoutesProxy)
recipient = record_or_hash_or_array.shift
end
method, args = builder.handle_list record_or_hash_or_array
when String, Symbol
method, args = builder.handle_string record_or_hash_or_array
when Class
method, args = builder.handle_class record_or_hash_or_array
when nil
raise ArgumentError, "Nil location provided. Can't build URI."
else
method, args = builder.handle_model record_or_hash_or_array
end
if options.empty?
recipient.send(method, *args)
else
recipient.send(method, *args, options)
end
end
attr_reader :suffix, :prefix
def initialize(key_strategy, prefix, suffix)
@key_strategy = key_strategy
@prefix = prefix
@suffix = suffix
end
def handle_string(record)
[get_method_for_string(record), []]
end
def handle_string_call(target, str)
target.send get_method_for_string str
end
def handle_class(klass)
[get_method_for_class(klass), []]
end
def handle_class_call(target, klass)
target.send get_method_for_class klass
end
def handle_model(record)
args = []
model = record.to_model
named_route = if model.persisted?
args << model
get_method_for_string model.model_name.singular_route_key
else
get_method_for_class model
end
[named_route, args]
end
def handle_model_call(target, record)
if mapping = polymorphic_mapping(target, record)
mapping.call(target, [record], suffix == "path")
else
method, args = handle_model(record)
target.send(method, *args)
end
end
def handle_list(list)
record_list = list.dup
record = record_list.pop
args = []
route = record_list.map { |parent|
case parent
when Symbol, String
parent.to_s
when Class
args << parent
parent.model_name.singular_route_key
else
args << parent.to_model
parent.to_model.model_name.singular_route_key
end
}
route <<
case record
when Symbol, String
record.to_s
when Class
@key_strategy.call record.model_name
else
model = record.to_model
if model.persisted?
args << model
model.model_name.singular_route_key
else
@key_strategy.call model.model_name
end
end
route << suffix
named_route = prefix + route.join("_")
[named_route, args]
end
private
def polymorphic_mapping(target, record)
if record.respond_to?(:to_model)
target._routes.polymorphic_mappings[record.to_model.model_name.name]
else
target._routes.polymorphic_mappings[record.class.name]
end
end
def get_method_for_class(klass)
name = @key_strategy.call klass.model_name
get_method_for_string name
end
def get_method_for_string(str)
"#{prefix}#{str}_#{suffix}"
end
[nil, "new", "edit"].each do |action|
CACHE["url"][action] = build action, "url"
CACHE["path"][action] = build action, "path"
end
end
end
end
end
| 35.954416 | 112 | 0.56775 |
ffdc7c74e1929ea47dd34dc47274809357fd2c27 | 1,947 | RSpec::Matchers.define :respond_with_status do |expected_status|
match do |action|
action.call
expect(response).to have_http_status(expected_status)
true
end
failure_message do
"expected action to respond with #{expected_status}, but got #{response.status}"
end
end
class RespondWithRedirectMatcher
def initialize(rspec, response, target_path, &target_path_block)
@rspec = rspec
@response = response
@target_path = target_path
@target_path_block = target_path_block
end
def matches?(block)
block.call
target_path = @target_path_block.try(:call) || @target_path
@rspec.expect(@response).to @rspec.redirect_to(target_path)
true
end
def failure_message
"expected a redirect to #{@target_path}"
end
def description
"respond with redirect"
end
end
define_method :respond_with_redirect_to do |*target_paths, &target_path_block|
target_path = target_paths.first
RespondWithRedirectMatcher.new(self, response, target_path, &target_path_block)
end
RSpec::Matchers.define :respond_with_template do |template_name|
match do |block|
block.call
expect(response).to have_rendered(template_name)
true
end
end
RSpec::Matchers.define :assign do |*vars|
match do |block|
block.call
vars.all? { |var| assigns.symbolize_keys[var] }
end
end
RSpec::Matchers.define :set_flash do |type|
chain :to do |message|
@expected_message = message
end
chain :now do
@now = true
end
match do |block|
block.call
message = @now ? flash.now[type] : flash[type]
if @expected_message
message.match(@expected_message)
else
message.present?
end
end
failure_message do |_actual|
message = "Expected flash#{'.now' if @now}[#{type}] to "
if @expected_message
"#{message} match '#{@expected_message}', but was '#{flash[type]}'"
else
"#{message} be set, but it was not"
end
end
end
| 22.125 | 84 | 0.700051 |
d5b7effed84d45bb1017bb4c2bccda22c8ccbcc8 | 57 | Spree::Admin::PaymentsController.helper :braintree_admin
| 28.5 | 56 | 0.859649 |
bbdb186324c2c870198a923cd459ddbc3c82ce94 | 2,532 | module Feedjira
module FeedUtilities
UPDATABLE_ATTRIBUTES = %w(title feed_url url last_modified etag).freeze
attr_writer :new_entries, :updated, :last_modified
attr_accessor :etag
def self.included(base)
base.extend ClassMethods
end
module ClassMethods
def parse(xml, &block)
xml = xml.lstrip
xml = preprocess(xml) if preprocess_xml
super xml, &block
end
def preprocess(xml)
# noop
xml
end
def preprocess_xml=(value)
@preprocess_xml = value
end
def preprocess_xml
@preprocess_xml
end
end
def last_modified
@last_modified ||= begin
published = entries.reject { |e| e.published.nil? }
entry = published.sort_by { |e| e.published if e.published }.last
entry ? entry.published : nil
end
end
def updated?
@updated || false
end
def new_entries
@new_entries ||= []
end
def new_entries?
!new_entries.empty?
end
def update_from_feed(feed)
self.new_entries += find_new_entries_for(feed)
entries.unshift(*self.new_entries)
@updated = false
UPDATABLE_ATTRIBUTES.each do |name|
@updated ||= update_attribute(feed, name)
end
end
def update_attribute(feed, name)
old_value = send(name)
new_value = feed.send(name)
if old_value != new_value
send("#{name}=", new_value)
true
else
false
end
end
def sanitize_entries!
entries.each(&:sanitize!)
end
private
# This implementation is a hack, which is why it's so ugly. It's to get
# around the fact that not all feeds have a published date. However,
# they're always ordered with the newest one first. So we go through the
# entries just parsed and insert each one as a new entry until we get to
# one that has the same id as the the newest for the feed.
def find_new_entries_for(feed)
return feed.entries if entries.length.zero?
latest_entry = entries.first
found_new_entries = []
feed.entries.each do |entry|
break unless new_entry?(entry, latest_entry)
found_new_entries << entry
end
found_new_entries
end
def new_entry?(entry, latest)
nil_ids = entry.entry_id.nil? && latest.entry_id.nil?
new_id = entry.entry_id != latest.entry_id
new_url = entry.url != latest.url
(nil_ids || new_id) && new_url
end
end
end
| 23.018182 | 76 | 0.625987 |
b948d7efaa2d9a09be05b2ddc450376c51217383 | 10,471 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Network::Mgmt::V2018_12_01
module Models
#
# P2SVpnServerConfiguration Resource.
#
class P2SVpnServerConfiguration < SubResource
include MsRestAzure
# @return [String] The name of the P2SVpnServerConfiguration that is
# unique within a VirtualWan in a resource group. This name can be used
# to access the resource along with Paren VirtualWan resource name.
attr_accessor :p2svpn_server_configuration_properties_name
# @return [Array<VpnGatewayTunnelingProtocol>] VPN protocols for the
# P2SVpnServerConfiguration.
attr_accessor :vpn_protocols
# @return [Array<P2SVpnServerConfigVpnClientRootCertificate>] VPN client
# root certificate of P2SVpnServerConfiguration.
attr_accessor :p2svpn_server_config_vpn_client_root_certificates
# @return [Array<P2SVpnServerConfigVpnClientRevokedCertificate>] VPN
# client revoked certificate of P2SVpnServerConfiguration.
attr_accessor :p2svpn_server_config_vpn_client_revoked_certificates
# @return [Array<P2SVpnServerConfigRadiusServerRootCertificate>] Radius
# Server root certificate of P2SVpnServerConfiguration.
attr_accessor :p2svpn_server_config_radius_server_root_certificates
# @return [Array<P2SVpnServerConfigRadiusClientRootCertificate>] Radius
# client root certificate of P2SVpnServerConfiguration.
attr_accessor :p2svpn_server_config_radius_client_root_certificates
# @return [Array<IpsecPolicy>] VpnClientIpsecPolicies for
# P2SVpnServerConfiguration.
attr_accessor :vpn_client_ipsec_policies
# @return [String] The radius server address property of the
# P2SVpnServerConfiguration resource for point to site client connection.
attr_accessor :radius_server_address
# @return [String] The radius secret property of the
# P2SVpnServerConfiguration resource for point to site client connection.
attr_accessor :radius_server_secret
# @return [String] The provisioning state of the
# P2SVpnServerConfiguration resource. Possible values are: 'Updating',
# 'Deleting', and 'Failed'.
attr_accessor :provisioning_state
# @return [Array<SubResource>]
attr_accessor :p2svpn_gateways
# @return [String] A unique read-only string that changes whenever the
# resource is updated.
attr_accessor :p2svpn_server_configuration_properties_etag
# @return [String] The name of the resource that is unique within a
# resource group. This name can be used to access the resource.
attr_accessor :name
# @return [String] Gets a unique read-only string that changes whenever
# the resource is updated.
attr_accessor :etag
#
# Mapper for P2SVpnServerConfiguration class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'P2SVpnServerConfiguration',
type: {
name: 'Composite',
class_name: 'P2SVpnServerConfiguration',
model_properties: {
id: {
client_side_validation: true,
required: false,
serialized_name: 'id',
type: {
name: 'String'
}
},
p2svpn_server_configuration_properties_name: {
client_side_validation: true,
required: false,
serialized_name: 'properties.name',
type: {
name: 'String'
}
},
vpn_protocols: {
client_side_validation: true,
required: false,
serialized_name: 'properties.vpnProtocols',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'VpnGatewayTunnelingProtocolElementType',
type: {
name: 'String'
}
}
}
},
p2svpn_server_config_vpn_client_root_certificates: {
client_side_validation: true,
required: false,
serialized_name: 'properties.p2SVpnServerConfigVpnClientRootCertificates',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'P2SVpnServerConfigVpnClientRootCertificateElementType',
type: {
name: 'Composite',
class_name: 'P2SVpnServerConfigVpnClientRootCertificate'
}
}
}
},
p2svpn_server_config_vpn_client_revoked_certificates: {
client_side_validation: true,
required: false,
serialized_name: 'properties.p2SVpnServerConfigVpnClientRevokedCertificates',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'P2SVpnServerConfigVpnClientRevokedCertificateElementType',
type: {
name: 'Composite',
class_name: 'P2SVpnServerConfigVpnClientRevokedCertificate'
}
}
}
},
p2svpn_server_config_radius_server_root_certificates: {
client_side_validation: true,
required: false,
serialized_name: 'properties.p2SVpnServerConfigRadiusServerRootCertificates',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'P2SVpnServerConfigRadiusServerRootCertificateElementType',
type: {
name: 'Composite',
class_name: 'P2SVpnServerConfigRadiusServerRootCertificate'
}
}
}
},
p2svpn_server_config_radius_client_root_certificates: {
client_side_validation: true,
required: false,
serialized_name: 'properties.p2SVpnServerConfigRadiusClientRootCertificates',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'P2SVpnServerConfigRadiusClientRootCertificateElementType',
type: {
name: 'Composite',
class_name: 'P2SVpnServerConfigRadiusClientRootCertificate'
}
}
}
},
vpn_client_ipsec_policies: {
client_side_validation: true,
required: false,
serialized_name: 'properties.vpnClientIpsecPolicies',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'IpsecPolicyElementType',
type: {
name: 'Composite',
class_name: 'IpsecPolicy'
}
}
}
},
radius_server_address: {
client_side_validation: true,
required: false,
serialized_name: 'properties.radiusServerAddress',
type: {
name: 'String'
}
},
radius_server_secret: {
client_side_validation: true,
required: false,
serialized_name: 'properties.radiusServerSecret',
type: {
name: 'String'
}
},
provisioning_state: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'properties.provisioningState',
type: {
name: 'String'
}
},
p2svpn_gateways: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'properties.p2SVpnGateways',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'SubResourceElementType',
type: {
name: 'Composite',
class_name: 'SubResource'
}
}
}
},
p2svpn_server_configuration_properties_etag: {
client_side_validation: true,
required: false,
serialized_name: 'properties.etag',
type: {
name: 'String'
}
},
name: {
client_side_validation: true,
required: false,
serialized_name: 'name',
type: {
name: 'String'
}
},
etag: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'etag',
type: {
name: 'String'
}
}
}
}
}
end
end
end
end
| 37.665468 | 98 | 0.523446 |
3371c6007ebb859d55e907596240aeacddd0fd66 | 406 | require 'spec_helper'
describe 'Puppet_metrics_dashboard::HostList' do
it do
is_expected.to allow_value(['some-host.test',
'some-other.host.test'])
end
it do
is_expected.to allow_value(['some-host.test',
['some-other.host.test', 9140]])
end
it do
is_expected.not_to allow_value([['some-host.test', '9140']])
end
end
| 23.882353 | 64 | 0.586207 |
01f2fcd55c1688364c853b7518fdf8c61ca4ada9 | 2,176 | # Licensed to Elasticsearch B.V. under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
# ownership. Elasticsearch B.V. licenses this file to you under
# the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
module Elasticsearch
module API
module Rollup
module Actions
# Creates a rollup job.
# This functionality is Experimental and may be changed or removed
# completely in a future release. Elastic will take a best effort approach
# to fix any issues, but experimental features are not subject to the
# support SLA of official GA features.
#
# @option arguments [String] :id The ID of the job to create
# @option arguments [Hash] :headers Custom HTTP headers
# @option arguments [Hash] :body The job configuration (*Required*)
#
# @see https://www.elastic.co/guide/en/elasticsearch/reference/master/rollup-put-job.html
#
def put_job(arguments = {})
raise ArgumentError, "Required argument 'body' missing" unless arguments[:body]
raise ArgumentError, "Required argument 'id' missing" unless arguments[:id]
arguments = arguments.clone
headers = arguments.delete(:headers) || {}
body = arguments.delete(:body)
_id = arguments.delete(:id)
method = Elasticsearch::API::HTTP_PUT
path = "_rollup/job/#{Utils.__listify(_id)}"
params = {}
Elasticsearch::API::Response.new(
perform_request(method, path, params, body, headers)
)
end
end
end
end
end
| 38.175439 | 97 | 0.674632 |
ac768a06d181b7703276c85b0aa445354b909311 | 10,502 |
# This file was created from a native ruby build on a rhel-6-s390x system. Any
# changes made to this file will be lost the next time ruby is built.
module RbConfig
RUBY_VERSION == "2.4.4" or
raise "ruby lib version (2.4.4) doesn't match executable version (#{RUBY_VERSION})"
TOPDIR = File.dirname(__FILE__).chomp!("/lib/ruby/2.4.0/s390x-linux")
DESTDIR = '' unless defined? DESTDIR
CONFIG = {}
CONFIG["DESTDIR"] = DESTDIR
CONFIG["MAJOR"] = "2"
CONFIG["MINOR"] = "4"
CONFIG["TEENY"] = "4"
CONFIG["PATCHLEVEL"] = "296"
CONFIG["INSTALL"] = '/usr/bin/install -c'
CONFIG["EXEEXT"] = ""
CONFIG["prefix"] = (TOPDIR || DESTDIR + "/opt/puppetlabs/puppet")
CONFIG["ruby_install_name"] = "ruby"
CONFIG["RUBY_INSTALL_NAME"] = "ruby"
CONFIG["RUBY_SO_NAME"] = "ruby"
CONFIG["exec"] = "exec"
CONFIG["ruby_pc"] = "ruby-2.4.pc"
CONFIG["PACKAGE"] = "ruby"
CONFIG["BUILTIN_TRANSSRCS"] = " newline.c"
CONFIG["USE_RUBYGEMS"] = "YES"
CONFIG["MANTYPE"] = "doc"
CONFIG["NROFF"] = "/usr/bin/nroff"
CONFIG["vendorarchhdrdir"] = "$(vendorhdrdir)/$(sitearch)"
CONFIG["sitearchhdrdir"] = "$(sitehdrdir)/$(sitearch)"
CONFIG["rubyarchhdrdir"] = "$(rubyhdrdir)/$(arch)"
CONFIG["vendorhdrdir"] = "$(rubyhdrdir)/vendor_ruby"
CONFIG["sitehdrdir"] = "$(rubyhdrdir)/site_ruby"
CONFIG["rubyhdrdir"] = "$(includedir)/$(RUBY_VERSION_NAME)"
CONFIG["RUBY_SEARCH_PATH"] = ""
CONFIG["UNIVERSAL_INTS"] = ""
CONFIG["UNIVERSAL_ARCHNAMES"] = ""
CONFIG["configure_args"] = " '--prefix=/opt/puppetlabs/puppet' '--with-opt-dir=/opt/puppetlabs/puppet' '--enable-shared' '--enable-bundled-libyaml' '--disable-install-doc' '--disable-install-rdoc'"
CONFIG["CONFIGURE"] = "configure"
CONFIG["vendorarchdir"] = "$(vendorlibdir)/$(sitearch)"
CONFIG["vendorlibdir"] = "$(vendordir)/$(ruby_version)"
CONFIG["vendordir"] = "$(rubylibprefix)/vendor_ruby"
CONFIG["sitearchdir"] = "$(sitelibdir)/$(sitearch)"
CONFIG["sitelibdir"] = "$(sitedir)/$(ruby_version)"
CONFIG["sitedir"] = "$(rubylibprefix)/site_ruby"
CONFIG["rubyarchdir"] = "$(rubylibdir)/$(arch)"
CONFIG["rubylibdir"] = "$(rubylibprefix)/$(ruby_version)"
CONFIG["ruby_version"] = "2.4.0"
CONFIG["sitearch"] = "$(arch)"
CONFIG["arch"] = "s390x-linux"
CONFIG["sitearchincludedir"] = "$(includedir)/$(sitearch)"
CONFIG["archincludedir"] = "$(includedir)/$(arch)"
CONFIG["sitearchlibdir"] = "$(libdir)/$(sitearch)"
CONFIG["archlibdir"] = "$(libdir)/$(arch)"
CONFIG["libdirname"] = "libdir"
CONFIG["RUBY_EXEC_PREFIX"] = "/opt/puppetlabs/puppet"
CONFIG["RUBY_LIB_VERSION"] = ""
CONFIG["RUBY_LIB_VERSION_STYLE"] = "3\t/* full */"
CONFIG["RI_BASE_NAME"] = "ri"
CONFIG["ridir"] = "$(datarootdir)/$(RI_BASE_NAME)"
CONFIG["rubysitearchprefix"] = "$(rubylibprefix)/$(sitearch)"
CONFIG["rubyarchprefix"] = "$(rubylibprefix)/$(arch)"
CONFIG["MAKEFILES"] = "Makefile GNUmakefile"
CONFIG["PLATFORM_DIR"] = ""
CONFIG["THREAD_MODEL"] = "pthread"
CONFIG["SYMBOL_PREFIX"] = ""
CONFIG["EXPORT_PREFIX"] = ""
CONFIG["COMMON_HEADERS"] = ""
CONFIG["COMMON_MACROS"] = ""
CONFIG["COMMON_LIBS"] = ""
CONFIG["MAINLIBS"] = ""
CONFIG["ENABLE_SHARED"] = "yes"
CONFIG["DLDLIBS"] = " -lc"
CONFIG["SOLIBS"] = "$(LIBS)"
CONFIG["LIBRUBYARG_SHARED"] = "-Wl,-R -Wl,$(libdir) -L$(libdir) -l$(RUBY_SO_NAME)"
CONFIG["LIBRUBYARG_STATIC"] = "-Wl,-R -Wl,$(libdir) -L$(libdir) -l$(RUBY_SO_NAME)-static"
CONFIG["LIBRUBYARG"] = "$(LIBRUBYARG_SHARED)"
CONFIG["LIBRUBY"] = "$(LIBRUBY_SO)"
CONFIG["LIBRUBY_ALIASES"] = "lib$(RUBY_SO_NAME).so.$(MAJOR).$(MINOR) lib$(RUBY_SO_NAME).so"
CONFIG["LIBRUBY_SO"] = "lib$(RUBY_SO_NAME).so.$(MAJOR).$(MINOR).$(TEENY)"
CONFIG["LIBRUBY_A"] = "lib$(RUBY_SO_NAME)-static.a"
CONFIG["RUBYW_INSTALL_NAME"] = ""
CONFIG["rubyw_install_name"] = ""
CONFIG["EXTDLDFLAGS"] = ""
CONFIG["EXTLDFLAGS"] = ""
CONFIG["strict_warnflags"] = "-std=iso9899:1999"
CONFIG["warnflags"] = "-Wall -Wextra -Wno-unused-parameter -Wno-parentheses -Wno-long-long -Wno-missing-field-initializers -Wunused-variable -Wpointer-arith -Wwrite-strings -Wdeclaration-after-statement -Wimplicit-function-declaration"
CONFIG["debugflags"] = "-ggdb3"
CONFIG["optflags"] = "-O3 -fno-fast-math"
CONFIG["cxxflags"] = " $(optflags) $(debugflags) $(warnflags)"
CONFIG["cflags"] = " $(optflags) $(debugflags) $(warnflags)"
CONFIG["cppflags"] = ""
CONFIG["NULLCMD"] = ":"
CONFIG["DLNOBJ"] = "dln.o"
CONFIG["INSTALLDOC"] = "nodoc"
CONFIG["CAPITARGET"] = "nodoc"
CONFIG["RDOCTARGET"] = "nodoc"
CONFIG["DTRACE_GLOMMED_OBJ"] = ""
CONFIG["DTRACE_OBJ"] = ""
CONFIG["DTRACE_EXT"] = "dmyh"
CONFIG["EXECUTABLE_EXTS"] = ""
CONFIG["ARCHFILE"] = ""
CONFIG["LIBRUBY_RELATIVE"] = "no"
CONFIG["EXTOUT"] = ".ext"
CONFIG["RUNRUBY_COMMAND"] = "$(MINIRUBY) $(srcdir)/tool/runruby.rb --extout=$(EXTOUT) $(RUNRUBYOPT)"
CONFIG["PREP"] = "miniruby$(EXEEXT)"
CONFIG["BTESTRUBY"] = "$(MINIRUBY)"
CONFIG["CROSS_COMPILING"] = "no"
CONFIG["TEST_RUNNABLE"] = "yes"
CONFIG["rubylibprefix"] = "$(libdir)/$(RUBY_BASE_NAME)"
CONFIG["setup"] = "Setup"
CONFIG["ENCSTATIC"] = ""
CONFIG["EXTSTATIC"] = ""
CONFIG["STRIP"] = "strip -S -x"
CONFIG["TRY_LINK"] = ""
CONFIG["PRELOADENV"] = "LD_PRELOAD"
CONFIG["LIBPATHENV"] = "LD_LIBRARY_PATH"
CONFIG["RPATHFLAG"] = " -Wl,-R%1$-s"
CONFIG["LIBPATHFLAG"] = " -L%1$-s"
CONFIG["LINK_SO"] = ""
CONFIG["ASMEXT"] = "S"
CONFIG["LIBEXT"] = "a"
CONFIG["DLEXT2"] = ""
CONFIG["DLEXT"] = "so"
CONFIG["LDSHAREDXX"] = "$(CXX) -shared"
CONFIG["LDSHARED"] = "$(CC) -shared"
CONFIG["CCDLFLAGS"] = "-fPIC"
CONFIG["STATIC"] = ""
CONFIG["ARCH_FLAG"] = ""
CONFIG["DLDFLAGS"] = "-L/opt/puppetlabs/puppet/lib -Wl,-R/opt/puppetlabs/puppet/lib "
CONFIG["ALLOCA"] = ""
CONFIG["codesign"] = ""
CONFIG["POSTLINK"] = ":"
CONFIG["WERRORFLAG"] = "-Werror"
CONFIG["CHDIR"] = "cd -P"
CONFIG["RMALL"] = "rm -fr"
CONFIG["RMDIRS"] = "rmdir --ignore-fail-on-non-empty -p"
CONFIG["RMDIR"] = "rmdir --ignore-fail-on-non-empty"
CONFIG["CP"] = "cp"
CONFIG["RM"] = "rm -f"
CONFIG["PKG_CONFIG"] = "pkg-config"
CONFIG["PYTHON"] = ""
CONFIG["DOXYGEN"] = ""
CONFIG["DOT"] = ""
CONFIG["DTRACE"] = ""
CONFIG["MAKEDIRS"] = "/bin/mkdir -p"
CONFIG["MKDIR_P"] = "/bin/mkdir -p"
CONFIG["INSTALL_DATA"] = "$(INSTALL) -m 644"
CONFIG["INSTALL_SCRIPT"] = "$(INSTALL)"
CONFIG["INSTALL_PROGRAM"] = "$(INSTALL)"
CONFIG["SET_MAKE"] = ""
CONFIG["LN_S"] = "ln -s"
CONFIG["NM"] = "nm"
CONFIG["DLLWRAP"] = ""
CONFIG["WINDRES"] = ""
CONFIG["OBJCOPY"] = ":"
CONFIG["OBJDUMP"] = "objdump"
CONFIG["ASFLAGS"] = ""
CONFIG["AS"] = "as"
CONFIG["AR"] = "ar"
CONFIG["RANLIB"] = "ranlib"
CONFIG["try_header"] = ""
CONFIG["CC_VERSION"] = "$(CC) -v"
CONFIG["COUTFLAG"] = "-o "
CONFIG["OUTFLAG"] = "-o "
CONFIG["CPPOUTFILE"] = "-o conftest.i"
CONFIG["GNU_LD"] = "yes"
CONFIG["LD"] = "ld"
CONFIG["GCC"] = "yes"
CONFIG["EGREP"] = "/bin/grep -E"
CONFIG["GREP"] = "/bin/grep"
CONFIG["CPP"] = "$(CC) -E"
CONFIG["CXXFLAGS"] = "$(cxxflags)"
CONFIG["CXX"] = "g++"
CONFIG["OBJEXT"] = "o"
CONFIG["CPPFLAGS"] = " -I/opt/puppetlabs/puppet/include $(DEFS) $(cppflags)"
CONFIG["LDFLAGS"] = "-L. -fstack-protector -rdynamic -Wl,-export-dynamic -L/opt/puppetlabs/puppet/lib -Wl,-R/opt/puppetlabs/puppet/lib "
CONFIG["CFLAGS"] = "$(cflags) -fPIC"
CONFIG["CC"] = "gcc"
CONFIG["NACL_LIB_PATH"] = ""
CONFIG["NACL_SDK_VARIANT"] = ""
CONFIG["NACL_SDK_ROOT"] = ""
CONFIG["NACL_TOOLCHAIN"] = ""
CONFIG["target_os"] = "linux"
CONFIG["target_vendor"] = "ibm"
CONFIG["target_cpu"] = "s390x"
CONFIG["target"] = "s390x-ibm-linux-gnu"
CONFIG["host_os"] = "linux-gnu"
CONFIG["host_vendor"] = "ibm"
CONFIG["host_cpu"] = "s390x"
CONFIG["host"] = "s390x-ibm-linux-gnu"
CONFIG["RUBY_VERSION_NAME"] = "$(RUBY_BASE_NAME)-$(ruby_version)"
CONFIG["RUBYW_BASE_NAME"] = "rubyw"
CONFIG["RUBY_BASE_NAME"] = "ruby"
CONFIG["build_os"] = "linux-gnu"
CONFIG["build_vendor"] = "ibm"
CONFIG["build_cpu"] = "s390x"
CONFIG["build"] = "s390x-ibm-linux-gnu"
CONFIG["RUBY_RELEASE_DATE"] = "2017-12-14"
CONFIG["RUBY_PROGRAM_VERSION"] = "2.4.4"
CONFIG["target_alias"] = ""
CONFIG["host_alias"] = ""
CONFIG["build_alias"] = ""
CONFIG["LIBS"] = "-lpthread -lrt -ldl -lcrypt -lm "
CONFIG["ECHO_T"] = ""
CONFIG["ECHO_N"] = "-n"
CONFIG["ECHO_C"] = ""
CONFIG["DEFS"] = ""
CONFIG["mandir"] = "$(datarootdir)/man"
CONFIG["localedir"] = "$(datarootdir)/locale"
CONFIG["libdir"] = "$(exec_prefix)/lib"
CONFIG["psdir"] = "$(docdir)"
CONFIG["pdfdir"] = "$(docdir)"
CONFIG["dvidir"] = "$(docdir)"
CONFIG["htmldir"] = "$(docdir)"
CONFIG["infodir"] = "$(datarootdir)/info"
CONFIG["docdir"] = "$(datarootdir)/doc/$(PACKAGE)"
CONFIG["oldincludedir"] = "/usr/include"
CONFIG["includedir"] = "$(prefix)/include"
CONFIG["localstatedir"] = "$(prefix)/var"
CONFIG["sharedstatedir"] = "$(prefix)/com"
CONFIG["sysconfdir"] = "$(prefix)/etc"
CONFIG["datadir"] = "$(datarootdir)"
CONFIG["datarootdir"] = "$(prefix)/share"
CONFIG["libexecdir"] = "$(exec_prefix)/libexec"
CONFIG["sbindir"] = "$(exec_prefix)/sbin"
CONFIG["bindir"] = "$(exec_prefix)/bin"
CONFIG["exec_prefix"] = "$(prefix)"
CONFIG["PACKAGE_URL"] = ""
CONFIG["PACKAGE_BUGREPORT"] = ""
CONFIG["PACKAGE_STRING"] = ""
CONFIG["PACKAGE_VERSION"] = ""
CONFIG["PACKAGE_TARNAME"] = ""
CONFIG["PACKAGE_NAME"] = ""
CONFIG["PATH_SEPARATOR"] = ":"
CONFIG["SHELL"] = "/bin/sh"
CONFIG["archdir"] = "$(rubyarchdir)"
CONFIG["topdir"] = File.dirname(__FILE__)
MAKEFILE_CONFIG = {}
CONFIG.each{|k,v| MAKEFILE_CONFIG[k] = v.dup}
def RbConfig::expand(val, config = CONFIG)
newval = val.gsub(/\$\$|\$\(([^()]+)\)|\$\{([^{}]+)\}/) {
var = $&
if !(v = $1 || $2)
'$'
elsif key = config[v = v[/\A[^:]+(?=(?::(.*?)=(.*))?\z)/]]
pat, sub = $1, $2
config[v] = false
config[v] = RbConfig::expand(key, config)
key = key.gsub(/#{Regexp.quote(pat)}(?=\s|\z)/n) {sub} if pat
key
else
var
end
}
val.replace(newval) unless newval == val
val
end
CONFIG.each_value do |val|
RbConfig::expand(val)
end
# returns the absolute pathname of the ruby command.
def RbConfig.ruby
File.join(
RbConfig::CONFIG["bindir"],
RbConfig::CONFIG["ruby_install_name"] + RbConfig::CONFIG["EXEEXT"]
)
end
end
autoload :Config, "rbconfig/obsolete.rb" # compatibility for ruby-1.8.4 and older.
CROSS_COMPILING = nil unless defined? CROSS_COMPILING
| 38.328467 | 237 | 0.623786 |
ff647f7e646c6a9ddad87dbf09b7a4e3e5d50be6 | 224 | # frozen_string_literal: true
RSpec.configure do |c|
c.before do
stub_const('ImageOptim::Config::GLOBAL_PATH', '/dev/null')
stub_const('ImageOptim::Config::LOCAL_PATH', '/dev/null')
end
c.order = :random
end
| 20.363636 | 62 | 0.696429 |
bfe1b1bf567f3ca1ef112bdfe26e7b862639323c | 11,070 | Pubann::Application.routes.draw do
require 'sidekiq/web'
devise_scope :user do
authenticate :user, ->(user) {user.root} do
mount Sidekiq::Web => '/sidekiq'
end
end
resources :evaluators
resources :evaluations do
post 'select_reference_project' => 'projects#select_reference_project'
post 'delete_reference_project' => 'projects#delete_reference_project'
post 'generate' => 'evaluations#generate'
get 'result' => 'evaluations#result'
get 'falses' => 'evaluations#falses'
end
resources :collections do
member do
post 'create_annotations_rdf' => 'collections#create_annotations_rdf'
post 'create_spans_rdf' => 'collections#create_spans_rdf'
post '/add_project' => 'collections#add_project'
end
resources :projects do
member do
delete '/' => 'collections#remove_project'
put '/toggle_primary' => "collections#project_toggle_primary"
put '/toggle_secondary' => "collections#project_toggle_secondary"
end
end
get 'jobs/latest_jobs_table' => 'jobs#latest_jobs_table'
get 'jobs/latest_gear_icon' => 'jobs#latest_gear_icon'
resources :jobs do
member do
get 'messages' => 'messages#index'
end
end
delete 'jobs' => 'jobs#clear_finished_jobs', as: 'clear_finished_jobs'
resources :queries
end
resources :queries
resources :sequencers
resources :annotators
resources :editors
devise_for :users, controllers: {
:omniauth_callbacks => 'callbacks',
:confirmations => 'confirmations',
:sessions => 'sessions',
:registrations => 'users/registrations'
}
get "home/index"
resources :notices, only: :destroy do
collection do
get 'delete_project_notices/:id' => 'notices#delete_project_notices', as: 'delete_project'
end
end
namespace :relations do
get :sql
end
namespace :spans do
get :sql
end
resource :sql do
get :index
end
resource :users do
get '/' => 'users#index'
get :autocomplete_username, :on => :collection
end
get '/users/:name' => 'users#show', :as => 'show_user'
resources :docs do
collection do
get 'open' => 'docs#open'
# list sourcedb
get 'sourcedb' => 'docs#sourcedb_index'
get 'search' => 'docs#search'
get 'store_span_rdf' => 'docs#store_span_rdf'
get 'update_numbers' => 'docs#update_numbers'
get :autocomplete_doc_sourcedb
end
end
# routings for /docs/sourcedb....
scope 'docs', :as => 'doc' do
scope 'sourcedb', :as => 'sourcedb' do
scope ':sourcedb' do
# list sourceids
get '/' => 'docs#index', :as => 'index'
scope 'sourceid', :as => 'sourceid' do
scope ':sourceid' do
get '/' => 'docs#show', :as =>'show'
get 'annotations' => 'annotations#doc_annotations_index'
get 'annotations/merge_view' => 'annotations#doc_annotations_merge_view'
get 'annotations/list_view' => 'annotations#doc_annotations_list_view'
get 'annotations/visualize' => 'annotations#doc_annotations_list_view'
post 'annotations' => 'annotations#align'
get 'edit' => 'docs#edit'
get 'uptodate' => 'docs#uptodate'
delete '/' => 'docs#delete', :as=>'delete'
get 'spans' => 'spans#doc_spans_index'
post 'spans' => 'spans#get_url'
get 'spans/:begin-:end' => 'docs#show', :as => 'span_show'
get 'spans/:begin-:end/annotations' => 'annotations#doc_annotations_index'
get 'spans/:begin-:end/annotations/merge_view' => 'annotations#doc_annotations_merge_view'
get 'spans/:begin-:end/annotations/list_view' => 'annotations#doc_annotations_list_view'
get 'spans/:begin-:end/annotations/visualize' => 'annotations#doc_annotations_list_view'
end
end
end
end
end
resources :projects do
get 'spans/sql' => 'spans#sql'
get 'relations/sql' => 'relations#sql'
get 'annotations.tgz' => 'annotations#project_annotations_tgz', :as => 'annotations_tgz'
get 'annotations.tgz/create' => 'annotations#create_project_annotations_tgz', :as => 'create_annotations_tgz'
post 'annotations.tgz' => 'annotations#create_from_tgz', :as => 'create_annotations_from_tgz'
get 'delete_annotations_tgz' => 'annotations#delete_project_annotations_tgz', :as => 'delete_annotations_tgz'
get 'annotations.rdf' => 'annotations#project_annotations_rdf', :as => 'annotations_rdf'
get 'annotations.rdf/create' => 'annotations#create_project_annotations_rdf', :as => 'create_annotations_rdf'
post 'docs/upload' => 'docs#create_from_upload', :as => 'create_docs_from_upload'
post 'annotations/upload' => 'annotations#create_from_upload', :as => 'create_annotations_from_upload'
post 'annotations/delete' => 'annotations#delete_from_upload', :as => 'delete_annotations_from_upload'
post 'annotations/obtain' => 'annotations#obtain_batch'
get 'notices' => 'notices#index'
get 'tasks' => 'notices#tasks'
resources :annotations
resources :associate_maintainers, :only => [:destroy]
get 'jobs/latest_jobs_table' => 'jobs#latest_jobs_table'
get 'jobs/latest_gear_icon' => 'jobs#latest_gear_icon'
resources :jobs do
member do
get 'messages' => 'messages#index'
end
end
member do
post 'create_annotations_rdf' => 'projects#create_annotations_rdf'
post 'create_spans_rdf' => 'projects#create_spans_rdf'
post 'store_annotation_rdf' => 'projects#store_annotation_rdf'
delete 'delete_annotation_rdf' => 'projects#delete_annotation_rdf'
get 'store_span_rdf' => 'projects#store_span_rdf'
get 'clean' => 'projects#clean'
get 'add_docs' => 'projects#add_docs'
get 'upload_docs' => 'projects#upload_docs'
get 'uptodate_docs' => 'projects#uptodate_docs'
get 'obtain_annotations' => 'projects#obtain_annotations'
get 'rdfize_annotations' => 'projects#rdfize_annotations'
get 'upload_annotations' => 'projects#upload_annotations'
get 'delete_annotations' => 'projects#delete_annotations'
get 'autocomplete_sourcedb' => 'projects#autocomplete_sourcedb'
get 'autocomplete_project_name'
end
collection do
# auto complete path which use scope and scope argument required :scope_argument param
get 'autocomplete_project_name'
get 'autocomplete_editable_project_name'
get 'autocomplete_project_author'
get 'zip_upload' => 'projects#zip_upload'
post 'create_from_tgz' => 'projects#create_from_tgz'
# get 'store_annotation_rdf' => 'projects#store_annotation_rdf'
get 'clean' => 'projects#clean'
end
end
resources :projects do
post 'annotations' => 'annotations#create'
delete 'docs' => 'projects#delete_all_docs', as: 'delete_all_docs'
delete 'annotations' => 'projects#destroy_all_annotations', as: 'destroy_all_annotations'
delete 'jobs' => 'jobs#clear_finished_jobs', as: 'clear_finished_jobs'
resources :evaluations
resources :docs do
collection do
get 'index' => 'docs#index'
post 'add' => 'docs#add'
post 'add_from_upload' => 'docs#add_from_upload'
post 'import' => 'docs#import'
get 'search' => 'docs#search'
get 'open' => 'docs#open'
scope 'sourcedb', :as => 'sourcedb' do
# list sourcedb
get '/' => 'docs#sourcedb_index'
scope ':sourcedb' do
# list sourceids
get '/' => 'docs#index', :as => 'index'
scope 'sourceid', :as => 'sourceid' do
scope ':sourceid' do
get '/' => 'docs#show_in_project', :as =>'show'
delete '/' => 'docs#project_delete_doc', :as=>'delete'
get 'annotations' => 'annotations#project_doc_annotations_index'
get 'annotations/visualize' => 'annotations#doc_annotations_list_view'
post 'annotations' => 'annotations#create'
post 'annotations/obtain' => 'annotations#obtain'
delete 'annotations' => 'annotations#destroy', as: 'destroy_annotations'
get 'spans' => 'spans#project_doc_spans_index', :as => 'spans_index'
get 'spans/:begin-:end' => 'docs#show_in_project', :as => 'span_show'
get 'spans/:begin-:end/annotations' => 'annotations#project_doc_annotations_index', :as => 'span_annotations'
post 'spans/:begin-:end/annotations' => 'annotations#create'
delete 'spans/:begin-:end/annotations' => 'annotations#destroy', as: 'destroy_annotations_in_span'
post 'spans/:begin-:end/annotations/obtain' => 'annotations#obtain', as: 'annotations_obtain_in_span'
end
end
end
end
end
resources :annotations do
end
end
resources :annotations do
collection do
post 'import' => 'annotations#import'
end
end
resources :queries
end
resources :messages do
member do
get '/data_source' => 'messages#data_source'
get '/data_target' => 'messages#data_target'
end
end
match '/projects/:project_id/docs/sourcedb/:sourcedb/sourceid/:sourceid/annotations' => 'application#cors_preflight_check', :via => ["OPTIONS"]
match '/projects/:project_id/docs/sourcedb/:sourcedb/sourceid/:sourceid/spans/:begin-:end/annotations' => 'application#cors_preflight_check', :via => ["OPTIONS"]
match '/annotations/align' => 'annotations#align', :via => ["POST"]
resources :news_notifications, path: :news do
collection do
get 'category/:category' => 'news_notifications#category', as: 'category'
end
end
# The priority is based upon order of creation:
# first created -> highest priority.
# Sample of regular route:
# match 'products/:id' => 'catalog#view'
# Keep in mind you can assign values other than :controller and :action
# Sample of named route:
# match 'products/:id/purchase' => 'catalog#purchase', :as => :purchase
# This route can be invoked with purchase_url(:id => product.id)
# Sample resource route (maps HTTP verbs to controller actions automatically):
# resources :products
# Sample resource route with options:
# resources :products do
# member do
# get 'short'
# post 'toggle'
# end
#
# collection do
# get 'sold'
# end
# end
# Sample resource route with sub-resources:
# resources :products do
# resources :comments, :sales
# resource :seller
# end
# Sample resource route with more complex sub-resources
# resources :products do
# resources :comments
# resources :sales do
# get 'recent', :on => :collection
# end
# end
# Sample resource route within a namespace:
# namespace :admin do
# # Directs /admin/products/* to Admin::ProductsController
# # (app/controllers/admin/products_controller.rb)
# resources :products
# end
# You can have the root of your site routed with "root"
# just remember to delete public/index.html.
root :to => 'home#index'
get '/' => 'home#index', :as => :home
get '/search' => 'graphs#show', :as => :sparql
get '/projects/:project_name/search' => 'graphs#show', :as => :sparql_project
get '/collections/:collection_name/search' => 'graphs#show', :as => :sparql_collection
# See how all your routes lay out with "rake routes"
# This is a legacy wild controller route that's not recommended for RESTful applications.
# Note: This route will make all actions in every controller accessible via GET requests.
# match ':controller(/:action(/:id))(.:format)'
end
| 34.485981 | 162 | 0.693767 |
7944bbb0dcc2bc4560e7bebfadc5d83ff44801cc | 3,559 | # encoding: UTF-8
# frozen_string_literal: true
module APITestHelpers
extend Memoist
def post_json(destination, body, headers = {})
post destination,
String === body ? body : body.to_json,
headers.reverse_merge('Content-Type' => 'application/json')
end
def put_json(destination, body, headers = {})
put destination,
String === body ? body : body.to_json,
headers.reverse_merge('Content-Type' => 'application/json')
end
def api_request(method, url, options = {})
headers = options.fetch(:headers, {})
params = options.fetch(:params, {})
options[:token].tap { |t| headers['Authorization'] = 'Bearer ' + t if t }
send(method, url, params, headers)
end
def api_get(*args)
api_request(:get, *args)
end
def api_post(*args)
api_request(:post, *args)
end
def api_delete(*args)
api_request(:delete, *args)
end
#
# Generates valid JWT for member, allows to pass additional payload.
#
def jwt_for(member, payload = { x: 'x', y: 'y', z: 'z' })
jwt_build(payload.merge(email: member.email, uid: member.uid, \
role: member.role, state: member.state, level: member.level))
end
#
# Generates valid JWT. Accepts payload as argument. Add fields required for JWT to be valid.
#
def jwt_build(payload)
jwt_encode payload.reverse_merge \
iat: Time.now.to_i,
exp: 20.minutes.from_now.to_i,
jti: SecureRandom.uuid,
sub: 'session',
iss: 'peatio',
aud: ['peatio']
end
#
# Generates JWT token based on payload. Doesn't add any extra fields to payload.
#
def jwt_encode(payload)
OpenSSL::PKey.read(Base64.urlsafe_decode64(jwt_keypair_encoded[:private])).yield_self do |key|
JWT.encode(payload, key, ENV.fetch('JWT_ALGORITHM'))
end
end
def jwt_keypair_encoded
require 'openssl'
require 'base64'
OpenSSL::PKey::RSA.generate(2048).yield_self do |p|
Rails.configuration.x.jwt_public_key = p.public_key
{ public: Base64.urlsafe_encode64(p.public_key.to_pem),
private: Base64.urlsafe_encode64(p.to_pem) }
end
end
memoize :jwt_keypair_encoded
def multisig_jwt(payload, keychain, signers, algorithms)
JWT::Multisig.generate_jwt(payload, keychain.slice(*signers), algorithms)
end
def multisig_jwt_management_api_v1(payload, *signers)
multisig_jwt(payload, management_api_v1_keychain, signers, management_api_v1_algorithms)
end
def management_api_v1_keychain
require 'openssl'
{ james: OpenSSL::PKey::RSA.generate(2048),
john: OpenSSL::PKey::RSA.generate(2048 ),
david: OpenSSL::PKey::RSA.generate(2048 ),
robert: OpenSSL::PKey::RSA.generate(2048 ),
alex: OpenSSL::PKey::RSA.generate(2048 ),
jeff: OpenSSL::PKey::RSA.generate(2048 ) }
end
memoize :management_api_v1_keychain
def management_api_v1_algorithms
management_api_v1_keychain.each_with_object({}) { |(k, v), memo| memo[k] = 'RS256' }
end
memoize :management_api_v1_algorithms
def management_api_v1_security_configuration
Rails.configuration.x.security_configuration
end
def defaults_for_management_api_v1_security_configuration!
config = { jwt: {} }
config[:keychain] = management_api_v1_keychain.each_with_object({}) do |(signer, key), memo|
memo[signer] = { algorithm: management_api_v1_algorithms.fetch(signer), value: key.public_key }
end
Rails.configuration.x.security_configuration = config
end
end
RSpec.configure { |config| config.include APITestHelpers }
| 30.161017 | 101 | 0.690643 |
1d93fd72dae9a630da99cbe2be58fbbe6b0fa6b1 | 1,400 | class CadenceWorkflow < Formula
desc "Distributed, scalable, durable, and highly available orchestration engine"
homepage "https://cadenceworkflow.io/"
url "https://github.com/uber/cadence.git",
tag: "v0.24.0",
revision: "517c6c135a24a4f23eea6f3a3747e14e59b5d49e"
license "MIT"
head "https://github.com/uber/cadence.git", branch: "master"
bottle do
root_url "https://github.com/gromgit/homebrew-core-mojave/releases/download/cadence-workflow"
sha256 cellar: :any_skip_relocation, mojave: "d71636651bf258051d6bcaf973a534136a9dce0450733fa3bee2976faa7d9597"
end
depends_on "go" => :build
conflicts_with "cadence", because: "both install an `cadence` executable"
def install
system "make", ".fake-codegen"
system "make", "cadence", "cadence-server", "cadence-canary", "cadence-sql-tool", "cadence-cassandra-tool"
bin.install "cadence"
bin.install "cadence-server"
bin.install "cadence-canary"
bin.install "cadence-sql-tool"
bin.install "cadence-cassandra-tool"
(etc/"cadence").install "config", "schema"
end
test do
output = shell_output("#{bin}/cadence-server start 2>&1", 1)
assert_match "Loading config; env=development,zone=,configDir", output
output = shell_output("#{bin}/cadence --domain samples-domain domain desc ", 1)
assert_match "Error: Operation DescribeDomain failed", output
end
end
| 35.897436 | 115 | 0.720714 |
39ce72baf3336eb82f793955d05f7bebef6d0fb9 | 127 | require 'rails_helper'
RSpec.describe Staff::CustomersController, type: :controller do
pending 'I haven\'t writed yet.'
end
| 21.166667 | 63 | 0.771654 |
e93f2e2e46cd45efac2df6d528f567f01ad024c2 | 190 | # frozen_string_literal: true
FactoryBot.define do
factory :node do
sequence(:name) { |n| "name#{n}" }
section { |s| s.association(:section) }
summary { "summary" }
end
end
| 19 | 43 | 0.636842 |
214359b46e8e804eac7ee8c5ccb259f6dc0e907d | 5,315 | #
# Be sure to run `pod spec lint flutter_module_demo.podspec' to ensure this is a
# valid spec and to remove all comments including this before submitting the spec.
#
# To learn more about Podspec attributes see https://guides.cocoapods.org/syntax/podspec.html
# To see working Podspecs in the CocoaPods repo see https://github.com/CocoaPods/Specs/
#
Pod::Spec.new do |spec|
# βββ Spec Metadata ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ #
#
# These will help people to find your library, and whilst it
# can feel like a chore to fill in it's definitely to your advantage. The
# summary should be tweet-length, and the description more in depth.
#
spec.name = "flutter_module_demo"
spec.version = "0.0.1"
spec.summary = "flutter_module_demo summary."
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
spec.description = <<-DESC
flutter_module_demo description
DESC
spec.homepage = "https://github.com/flutter_module_demo"
# spec.screenshots = "www.example.com/screenshots_1.gif", "www.example.com/screenshots_2.gif"
# βββ Spec License βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ #
#
# Licensing your code is important. See https://choosealicense.com for more info.
# CocoaPods will detect a license file if there is a named LICENSE*
# Popular ones are 'MIT', 'BSD' and 'Apache License, Version 2.0'.
#
spec.license = { :type => "MIT" }
# spec.license = { :type => "MIT" }
# βββ Author Metadata βββββββββββββββββββββββββββββββββββββββββββββββββββββββββ #
#
# Specify the authors of the library, with email addresses. Email addresses
# of the authors are extracted from the SCM log. E.g. $ git log. CocoaPods also
# accepts just a name if you'd rather not provide an email address.
#
# Specify a social_media_url where others can refer to, for example a twitter
# profile URL.
#
spec.author = { "wyhazq" => "[email protected]" }
# Or just: spec.author = "wyhazq"
# spec.authors = { "wyhazq" => "[email protected]" }
# spec.social_media_url = "https://twitter.com/wyhazq"
# βββ Platform Specifics βββββββββββββββββββββββββββββββββββββββββββββββββββββββ #
#
# If this Pod runs only on iOS or OS X, then specify the platform and
# the deployment target. You can optionally include the target after the platform.
#
# spec.platform = :ios
# spec.platform = :ios, "5.0"
# When using multiple platforms
# spec.ios.deployment_target = "5.0"
# spec.osx.deployment_target = "10.7"
# spec.watchos.deployment_target = "2.0"
# spec.tvos.deployment_target = "9.0"
# βββ Source Location ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ #
#
# Specify the location from where the source should be retrieved.
# Supports git, hg, bzr, svn and HTTP.
#
spec.source = { :git => "https://github.com/flutter_module_demo/flutter_module_demo.git", :tag => "#{spec.version}" }
# βββ Source Code ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ #
#
# CocoaPods is smart about how it includes source code. For source files
# giving a folder will include any swift, h, m, mm, c & cpp files.
# For header files it will include any header in the folder.
# Not including the public_header_files will make all headers public.
#
spec.source = { :git => "https://github.com/flutter_module_demo/flutter_module_demo.git", :tag => "#{spec.version}" }
spec.exclude_files = "Classes/Exclude"
# spec.public_header_files = "Classes/**/*.h"
spec.vendored_frameworks = "App.framework"
# βββ Resources ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ #
#
# A list of resources included with the Pod. These are copied into the
# target bundle with a build phase script. Anything else will be cleaned.
# You can preserve files from being cleaned, please don't preserve
# non-essential files like tests, examples and documentation.
#
# spec.resource = "icon.png"
# spec.resources = "Resources/*.png"
# spec.preserve_paths = "FilesToSave", "MoreFilesToSave"
# βββ Project Linking ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ #
#
# Link your library with frameworks, or libraries. Libraries do not include
# the lib prefix of their name.
#
# spec.framework = "SomeFramework"
# spec.frameworks = "SomeFramework", "AnotherFramework"
# spec.library = "iconv"
# spec.libraries = "iconv", "xml2"
# βββ Project Settings βββββββββββββββββββββββββββββββββββββββββββββββββββββββββ #
#
# If your library depends on compiler flags you can set them in the xcconfig hash
# where they will only apply to your library. If you depend on other Podspecs
# you can include multiple dependencies to ensure it works.
# spec.requires_arc = true
# spec.xcconfig = { "HEADER_SEARCH_PATHS" => "$(SDKROOT)/usr/include/libxml2" }
# spec.dependency "JSONKit", "~> 1.4"
end
| 37.964286 | 125 | 0.605644 |
6a719634b5c0a145adda3881cb2e8304809ab413 | 312 | Rails.application.routes.draw do
resources :authors do
collection do
post :import
end
end
resources :publications do
collection do
get :search
post :import
end
end
# For details on the DSL available within this file, see http://guides.rubyonrails.org/routing.html
end
| 20.8 | 101 | 0.692308 |
d596be580c11a57077703e3188a8d807038664d6 | 2,203 | module Mongoid
module Document
# List of all embedded model names defined for this instance
def all_embedded_model_names
names = []
self.relations.each do |name, relation|
names << name if [:embeds_one, :embeds_many].include? relation.macro.to_sym
end
names
end
# List of embedded model names populated with data for this instance
def embedded_populated_model_names
names = []
self.all_embedded_model_names.each { |em| names << em unless self.send(em.to_sym).empty? }
names
end
def all_embedded_documents
docs = []
self.all_embedded_model_names.each { |em| docs << self.send(em.to_sym) unless self.send(em.to_sym).empty? }
docs
end
def all_embedded_documents_valid?
self.all_embedded_documents.each do |doc|
doc.all? { |rec| rec.valid? }
end
end
#
def all_embedded_document_changes
data = {}
self.embedded_populated_model_names.each do |name|
model_data = self.send(name.to_sym).map { |ed| ed.changes_for_document }
model_data = model_data.select { |md| md.keys.many? }
next unless model_data.any?
data[name.to_sym] = model_data
end
data
end
def changes_with_embedded
embedded_data = self.all_embedded_document_changes
self_changes = self.changes_for_document
field_data = self_changes.keys.many? ? {self.model_name.underscore.to_sym => Array[self_changes]} : {}
field_data.merge!(embedded_data) unless embedded_data.empty?
field_data
end
def changes_for_document
data = {_id: self._id}
self.changes.each do |key, change|
unless only_blanked_a_nil?(change)
data[key.to_sym] = {from: change[0], to: change[1]}
end
end
data
end
def only_blanked_a_nil?(change)
change[0].blank? && change[1].blank?
end
def embedded_error_messages
msgs = []
self.all_embedded_documents.flatten.each do |doc|
doc.errors.messages.each do |field_name, messages|
messages.each do |msg|
msgs << msg
end
end
end
msgs
end
end
end
| 27.197531 | 113 | 0.641852 |
18106e70036529164a5e4d0ca3a5e20f51e365e1 | 2,160 | class Cms::ArticlesController < ApplicationController
layout 'cms'
before_action :authenticate_user!, except: [:index, :show]
def top
must_be_admin!
a = Cms::Article.find params[:article_id]
a.update_column :top, params[:act]=='enable'
redirect_to cms_article_path(a)
end
def index
@articles = Cms::Article.select(:id, :summary, :title, :logo).where(lang: I18n.locale).order(id: :desc).page params[:page]
@title = t 'cms.articles.index.title'
end
def show
@article = Cms::Article.find params[:id]
@article.update_column :visits, @article.visits+1
@hot_articles = Cms::Article.select(:id, :title).where(lang: I18n.locale).order(visits: :desc).limit(12)
@latest_articles = Cms::Article.select(:id, :title).where(lang: I18n.locale).order(id: :desc).limit(12)
@near_articles = Cms::Article.select(:id, :title).where('id >= ? AND id <= ?', @article.id-6, @article.id+6).order(visits: :desc).limit(12)
@comment = Cms::Comment.new article: @article
end
def new
@article = Cms::Article.new
end
def create
a = Cms::Article.new _params
a.user = current_user
a.lang = I18n.locale
if a.save
flash[:notice] = t 'messages.success'
redirect_to(cms_article_path(a))
else
flash[:alert] = a.errors.full_messages
@article = a
render 'new'
end
end
def edit
@article = Cms::Article.find params[:id]
unless @article.can_edit?(current_user)
head :forbidden
end
end
def update
a = Cms::Article.find params[:id]
unless a.can_edit?(current_user)
head :forbidden
end
begin
if a.update _params
flash[:notice] = t 'messages.success'
redirect_to(cms_article_path(a)) and return
end
rescue => e
flash[:alert] = e.to_s
end
@article = a
render 'edit'
end
def destroy
a = Cms::Article.find params[:id]
if a.can_edit?(current_user)
a.destroy
redirect_to cms_articles_path
else
head :forbidden
end
end
private
def _params
params.require(:cms_article).permit(:title, :summary, :body, tag_ids: [])
end
end
| 24 | 143 | 0.643519 |
03e4657bbbffdb440a89661eec5db2a6ec492949 | 1,750 | # frozen_string_literal: true
# == Schema Information
#
# Table name: certificates
#
# id :bigint(8) not null, primary key
# cert_add_info :string
# cert_applic_address :string
# cert_applic_inn :string
# cert_applic_name :string
# cert_applic_phone :string
# cert_applic_postcode :string
# cert_blank_num :string
# cert_chief_org :string
# cert_code_okp :string
# cert_code_tn_ved :string
# cert_expert :string
# cert_expiry_date :datetime
# cert_manuf_address :string
# cert_manuf_doc :string
# cert_manuf_inn :string
# cert_manuf_name :string
# cert_manuf_postcode :string
# cert_manuf_regulations :string
# cert_name_product :string
# cert_name_product_size_font :integer
# cert_place_marking :string
# cert_registration_date :datetime
# cert_registration_num :string
# cert_test_report :string
# created_at :datetime not null
# updated_at :datetime not null
# solution_id :bigint(8)
#
# Indexes
#
# index_certificates_on_solution_id (solution_id)
#
# Model certificates
class Certificate < ApplicationRecord
belongs_to :solution
validates :cert_blank_num, uniqueness: true
validates :cert_expiry_date, :cert_registration_date, :cert_registration_num,
:cert_blank_num, :cert_applic_name, :cert_manuf_name, presence: true
has_one :permission, dependent: :destroy
has_one :ikcontract, dependent: :destroy
has_many :attachments, dependent: :destroy
end
| 33.653846 | 80 | 0.627429 |
87b251d725e866b46066d6b2cd72090554aefc4c | 879 | # frozen_string_literal: true
FactoryBot.define do
factory :community_data, class: Hash do
initialize_with { attributes }
currentTier { 'A' }
otherIds { { crn: 'X362207'} }
offenderManagers { [ build(:community_offender_manager) ] }
trait :crc do
enhancedResourcing { false }
end
trait :nps do
enhancedResourcing { true }
end
end
factory :community_offender_manager, class: Hash do
initialize_with { attributes }
active { true }
probationArea { { nps: true } }
staff { { unallocated: false, surname: 'Jones', forenames: 'Ruth Mary' } }
end
factory :community_registration, class: Hash do
initialize_with { attributes }
active { true }
trait :mappa_2 do
registerLevel {
{
code: 'M2',
description: 'MAPPA Level 2'
}
}
end
end
end
| 19.533333 | 78 | 0.61661 |
61a8967351071ba99e6d6cb1c1235257c2a8e793 | 171 | require 'coveralls'
Coveralls.wear!
SimpleCov.formatter = SimpleCov::Formatter::HTMLFormatter
require 'minitest/autorun'
require 'minitest/pride'
require 'rack-heartbeat'
| 24.428571 | 57 | 0.818713 |
ac64045e16116c7b96c5d19eb2d43843d23a969a | 1,130 | # coding: utf-8
lib = File.expand_path("../lib", __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require "hanami/authentication/version"
Gem::Specification.new do |spec|
spec.name = 'hanami-authentication'
spec.version = Hanami::Authentication::VERSION
spec.authors = ['LegalForce Inc.']
spec.email = ['[email protected]']
spec.summary = 'A simple authentication module for hanami.'
spec.description = 'A simple authentication module for hanami.'
spec.homepage = 'https://github.com/legalforce/hanami-authentication'
spec.license = 'MIT'
spec.files = `git ls-files -z`.split("\x0").reject do |f|
f.match(%r{^(test|spec|features)/})
end
spec.bindir = 'exe'
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ['lib']
spec.add_dependency 'bcrypt', '~> 3.1'
spec.add_dependency 'hanami-utils', '~> 1.0'
spec.add_development_dependency 'bundler', '~> 1.15'
spec.add_development_dependency 'rake', '~> 10.0'
spec.add_development_dependency 'rspec', '~> 3.0'
end
| 35.3125 | 76 | 0.656637 |
032b4085c273289f53e1285f2a3189605b5243b3 | 202 | require 'synaccess_connect/net_booter/http/http_connection'
require 'synaccess_connect/net_booter/http/rev_a'
require 'synaccess_connect/net_booter/http/rev_b'
module NetBooter
module Http
end
end
| 22.444444 | 59 | 0.841584 |
4a21bd278cc96114db0d7c4c8f6175d2fe0c9807 | 3,096 | # Encoding: utf-8
#
# This is auto-generated code, changes will be overwritten.
#
# Copyright:: Copyright 2017, Google Inc. All Rights Reserved.
# License:: Licensed under the Apache License, Version 2.0.
#
# Code generated by AdsCommon library 1.0.0 on 2017-10-13 16:16:07.
require 'ads_common/savon_service'
require 'dfp_api/v201711/custom_targeting_service_registry'
module DfpApi; module V201711; module CustomTargetingService
class CustomTargetingService < AdsCommon::SavonService
def initialize(config, endpoint)
namespace = 'https://www.google.com/apis/ads/publisher/v201711'
super(config, endpoint, namespace, :v201711)
end
def create_custom_targeting_keys(*args, &block)
return execute_action('create_custom_targeting_keys', args, &block)
end
def create_custom_targeting_keys_to_xml(*args)
return get_soap_xml('create_custom_targeting_keys', args)
end
def create_custom_targeting_values(*args, &block)
return execute_action('create_custom_targeting_values', args, &block)
end
def create_custom_targeting_values_to_xml(*args)
return get_soap_xml('create_custom_targeting_values', args)
end
def get_custom_targeting_keys_by_statement(*args, &block)
return execute_action('get_custom_targeting_keys_by_statement', args, &block)
end
def get_custom_targeting_keys_by_statement_to_xml(*args)
return get_soap_xml('get_custom_targeting_keys_by_statement', args)
end
def get_custom_targeting_values_by_statement(*args, &block)
return execute_action('get_custom_targeting_values_by_statement', args, &block)
end
def get_custom_targeting_values_by_statement_to_xml(*args)
return get_soap_xml('get_custom_targeting_values_by_statement', args)
end
def perform_custom_targeting_key_action(*args, &block)
return execute_action('perform_custom_targeting_key_action', args, &block)
end
def perform_custom_targeting_key_action_to_xml(*args)
return get_soap_xml('perform_custom_targeting_key_action', args)
end
def perform_custom_targeting_value_action(*args, &block)
return execute_action('perform_custom_targeting_value_action', args, &block)
end
def perform_custom_targeting_value_action_to_xml(*args)
return get_soap_xml('perform_custom_targeting_value_action', args)
end
def update_custom_targeting_keys(*args, &block)
return execute_action('update_custom_targeting_keys', args, &block)
end
def update_custom_targeting_keys_to_xml(*args)
return get_soap_xml('update_custom_targeting_keys', args)
end
def update_custom_targeting_values(*args, &block)
return execute_action('update_custom_targeting_values', args, &block)
end
def update_custom_targeting_values_to_xml(*args)
return get_soap_xml('update_custom_targeting_values', args)
end
private
def get_service_registry()
return CustomTargetingServiceRegistry
end
def get_module()
return DfpApi::V201711::CustomTargetingService
end
end
end; end; end
| 32.589474 | 85 | 0.764535 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.