hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
bbe36b27476dbde080c946765c420f6e5377ca4e | 688 | $:.push File.expand_path("../lib", __FILE__)
# Maintain your gem's version:
require "piggybak_variants/version"
# Describe your gem and declare its dependencies:
Gem::Specification.new do |s|
s.name = "piggybak_variants"
s.version = PiggybakVariants::VERSION
s.authors = ["Tim Case, Steph Skardal"]
s.email = ["[email protected]"]
s.homepage = "http://www.piggybak.org"
s.summary = "Advanced variant support for Piggybak."
s.description = "Advanced variant support for Piggybak."
s.files = Dir["{app,config,db,lib}/**/*"] + ["MIT-LICENSE", "Rakefile", "README.md"]
s.test_files = Dir["test/**/*"]
s.add_dependency 'paperclip'
end
| 32.761905 | 86 | 0.665698 |
ac58a0a50116c4c0958ea6b38f6dc590c231aceb | 628 | cask 'flash-player' do
version '31.0.0.153'
sha256 '6a34978100dabdbb8653e9e9d707adb196f2300d4a7caacb8df1fc11770c2d1e'
url "https://fpdownload.adobe.com/pub/flashplayer/updaters/#{version.major}/flashplayer_#{version.major}_sa.dmg"
appcast 'https://fpdownload.adobe.com/pub/flashplayer/update/current/xml/version_en_mac_pl.xml'
name 'Adobe Flash Player projector'
homepage 'https://www.adobe.com/support/flashplayer/debug_downloads.html'
app 'Flash Player.app'
zap trash: [
'~/Library/Caches/Adobe/Flash Player',
'~/Library/Logs/FlashPlayerInstallManager.log',
]
end
| 36.941176 | 114 | 0.727707 |
ff491f13ffd7b11cd34a952f8bebb81b6a883a8a | 908 | Pod::Spec.new do |s|
s.name = 'Hyperconnectivity'
s.version = '1.1.0'
s.swift_version = '5.0'
s.summary = 'Modern replacement for Apple\'s Reachability written in Swift and made elegant using Combine'
s.description = <<-DESC
Hyperconnectivity provides Internet connectivity and captive portal detection in Swift using Combine.
DESC
s.homepage = 'https://github.com/rwbutler/Hyperconnectivity'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'Ross Butler' => '[email protected]' }
s.source = { :git => 'https://github.com/rwbutler/Hyperconnectivity.git', :tag => s.version.to_s }
s.social_media_url = 'https://twitter.com/ross_w_butler'
s.ios.deployment_target = '13.0'
s.frameworks = 'Combine', 'Network'
s.source_files = 'Hyperconnectivity/Classes/**/*'
end
| 50.444444 | 117 | 0.623348 |
2644abcc468547ac142da258fd9c49d2549b1099 | 965 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'cocoapods-dependManager/gem_version.rb'
Gem::Specification.new do |spec|
spec.name = 'cocoapods-dependManager'
spec.version = CocoapodsDependmanager::VERSION
spec.authors = ['huainanzi']
spec.email = ['[email protected]']
spec.description = "edit Podfile(add,remove,list)"
spec.summary = %q{A longer description of cocoapods-dependManager.}
spec.homepage = 'https://github.com/EXAMPLE/cocoapods-dependManager'
spec.license = 'MIT'
# spec.files = `git ls-files`.split($/)
spec.files = Dir['lib/**/*']
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ['lib']
spec.add_development_dependency 'bundler'
spec.add_development_dependency 'rake'
end
| 38.6 | 75 | 0.672539 |
b95e642749edce8bc077c19dd47e608e05029c33 | 6,488 | # frozen_string_literal: true
require_relative '../spec_helper'
RSpec.shared_examples 'installs successfully using pip' do
it 'installs successfully using pip' do
app.deploy do |app|
expect(app.output).to include('Installing requirements with pip')
expect(app.output).to include('Successfully installed')
end
end
end
RSpec.describe 'Pip support' do
context 'when requirements.txt is unchanged since the last build' do
let(:app) { Hatchet::Runner.new('spec/fixtures/python_version_unspecified') }
it 're-uses packages from the cache' do
app.deploy do |app|
expect(clean_output(app.output)).to match(Regexp.new(<<~REGEX))
remote: -----> Python app detected
remote: -----> Installing python-#{DEFAULT_PYTHON_VERSION}
remote: -----> Installing pip 20.2.4, setuptools 47.1.1 and wheel 0.36.2
remote: -----> Installing SQLite3
remote: -----> Installing requirements with pip
remote: Collecting urllib3
remote: Downloading urllib3-.*
remote: Installing collected packages: urllib3
remote: Successfully installed urllib3-.*
REGEX
app.commit!
app.push!
expect(clean_output(app.output)).to include(<<~OUTPUT)
remote: -----> Python app detected
remote: -----> No change in requirements detected, installing from cache
remote: -----> Installing pip 20.2.4, setuptools 47.1.1 and wheel 0.36.2
remote: -----> Installing SQLite3
remote: -----> Installing requirements with pip
remote: -----> Discovering process types
OUTPUT
end
end
end
context 'when requirements.txt has changed since the last build' do
let(:app) { Hatchet::Runner.new('spec/fixtures/python_version_unspecified') }
it 'clears the cache before installing the packages again' do
app.deploy do |app|
File.write('requirements.txt', 'six', mode: 'a')
app.commit!
app.push!
expect(clean_output(app.output)).to match(Regexp.new(<<~REGEX))
remote: -----> Python app detected
remote: -----> Requirements file has been changed, clearing cached dependencies
remote: -----> Installing python-#{DEFAULT_PYTHON_VERSION}
remote: -----> Installing pip 20.2.4, setuptools 47.1.1 and wheel 0.36.2
remote: -----> Installing SQLite3
remote: -----> Installing requirements with pip
remote: Collecting urllib3
remote: Downloading urllib3-.*
remote: Collecting six
remote: Downloading six-.*
remote: Installing collected packages: urllib3, six
remote: Successfully installed six-.* urllib3-.*
REGEX
end
end
end
context 'when requirements.txt contains popular compiled packages' do
let(:app) { Hatchet::Runner.new('spec/fixtures/requirements_compiled') }
include_examples 'installs successfully using pip'
end
context 'when requirements.txt contains Git/Mercurial requirements URLs' do
let(:app) { Hatchet::Runner.new('spec/fixtures/requirements_vcs') }
include_examples 'installs successfully using pip'
end
context 'when requirements.txt contains editable requirements' do
let(:app) { Hatchet::Runner.new('spec/fixtures/requirements_editable') }
# TODO: Make this test the path rewriting, and --src directory handling,
# and that the packages work during all of hooks, later buildpacks, runtime,
# and on subsequent builds (where the paths have to be migrated back).
include_examples 'installs successfully using pip'
end
context 'when there is only a setup.py' do
let(:app) { Hatchet::Runner.new('spec/fixtures/setup_py_only') }
it 'installs packages from setup.py' do
app.deploy do |app|
expect(app.output).to include('Running setup.py develop for test')
expect(app.output).to include('Successfully installed six')
end
end
end
context 'when there is both a requirements.txt and setup.py' do
let(:app) { Hatchet::Runner.new('spec/fixtures/requirements_txt_and_setup_py') }
it 'installs packages only from requirements.txt' do
app.deploy do |app|
expect(clean_output(app.output)).to include(<<~OUTPUT)
remote: -----> Installing requirements with pip
remote: Collecting urllib3
OUTPUT
expect(app.output).not_to include('Running setup.py develop')
end
end
end
context 'when using pysqlite and Python 2', stacks: %w[heroku-16 heroku-18] do
# This is split out from the requirements_compiled fixture, since the original
# pysqlite package (as opposed to the newer pysqlite3) only supports Python 2.
# This test has to be skipped on newer stacks where Python 2 is not available.
let(:app) { Hatchet::Runner.new('spec/fixtures/requirements_pysqlite_python_2') }
include_examples 'installs successfully using pip'
end
context 'when requirements.txt contains GDAL but the GDAL C++ library is missing' do
let(:app) { Hatchet::Runner.new('spec/fixtures/requirements_gdal', allow_failure: true) }
it 'outputs instructions for how to resolve the build failure' do
app.deploy do |app|
expect(clean_output(app.output)).to include(<<~OUTPUT)
remote: ! Hello! Package installation failed since the GDAL library was not found.
remote: ! For GDAL, GEOS and PROJ support, use the Geo buildpack alongside the Python buildpack:
remote: ! https://github.com/heroku/heroku-geo-buildpack
remote: ! -- Much Love, Heroku.
OUTPUT
end
end
end
context 'when the legacy BUILD_WITH_GEO_LIBRARIES env var is set' do
let(:config) { { 'BUILD_WITH_GEO_LIBRARIES' => '' } }
let(:app) { Hatchet::Runner.new('spec/fixtures/python_version_unspecified', config: config, allow_failure: true) }
it 'aborts the build with an unsupported error message' do
app.deploy do |app|
expect(clean_output(app.output)).to include(<<~OUTPUT)
remote: -----> Python app detected
remote: ! The Python buildpack's legacy BUILD_WITH_GEO_LIBRARIES functonality is
remote: ! no longer supported:
remote: ! https://devcenter.heroku.com/changelog-items/1947
OUTPUT
end
end
end
end
| 41.324841 | 118 | 0.657676 |
b991b8a53e2322644692e6c5d196bde6ea947c25 | 110 | ##
# IndexError ISO Test
assert('IndexError', '15.2.33') do
assert_equal Class, IndexError.class
end
| 15.714286 | 39 | 0.681818 |
794e69443bc77abc62517a5f51e7f24edf2924a7 | 872 | class SessionsController < ApplicationController
def new
end
def create
user = User.find_by(email: params[:session][:email].downcase)
if user && user.authenticate(params[:session][:password])
if user.activated?
log_in user
params[:session][:remember_me] == '1' ? remember(user) : forget(user)
redirect_back_or user
else
message = "Account not activated. "
message += "Check your email for the activation link."
flash[:warning] = message
redirect_to root_url
end
# log_in @user
# params[:session][:remember_me] == '1' ? remember(@user) : forget(@user)
# redirect_back_or @user
else
flash.now[:danger] = 'Invalid email/password combination'
render 'new'
end
end
def destroy
log_out if logged_in?
redirect_to root_url
end
end
| 26.424242 | 79 | 0.629587 |
33e4460f626dad64e8c2c39e49bc4a45dd38a51d | 1,677 | describe Solargraph::Pin::Attribute do
it "is a kind of attribute/property" do
source = Solargraph::Source.load_string(%(
class Foo
attr_reader :bar
end
))
map = Solargraph::SourceMap.map(source)
pin = map.pins.select{|p| p.kind == Solargraph::Pin::ATTRIBUTE}.first
expect(pin).not_to be_nil
expect(pin.kind).to eq(Solargraph::Pin::ATTRIBUTE)
expect(pin.completion_item_kind).to eq(Solargraph::LanguageServer::CompletionItemKinds::PROPERTY)
expect(pin.symbol_kind).to eq(Solargraph::LanguageServer::SymbolKinds::PROPERTY)
end
it "uses return type tags" do
pin = Solargraph::Pin::Attribute.new(nil, 'Foo', 'bar', '@return [File]', :reader, :instance, :public)
expect(pin.return_type.tag).to eq('File')
end
it "has empty parameters" do
pin = Solargraph::Pin::Attribute.new(nil, 'Foo', 'bar', '', :reader, :instance, :public)
expect(pin.parameters).to be_empty
expect(pin.parameter_names).to be_empty
end
it "detects undefined types" do
pin = Solargraph::Pin::Attribute.new(nil, 'Foo', 'bar', '', :reader, :instance, :public)
expect(pin.return_type).to be_undefined
end
it "generates paths" do
ipin = Solargraph::Pin::Attribute.new(nil, 'Foo', 'bar', '', :reader, :instance, :public)
expect(ipin.path).to eq('Foo#bar')
cpin = Solargraph::Pin::Attribute.new(nil, 'Foo', 'bar', '', :reader, :class, :public)
expect(cpin.path).to eq('Foo.bar')
end
it "handles invalid return type tags" do
pin = Solargraph::Pin::Attribute.new(nil, 'Foo', 'bar', '@return [Array<]', :reader, :instance, :public)
expect(pin.return_complex_type).to be_undefined
end
end
| 38.113636 | 108 | 0.674419 |
38238ee3cdf5e3f46b7816dfa2cd577fc1f08587 | 225 | # Example:
# require "client_library"
module TerraspacePluginGitlab
module Clients
extend Memoist
# Example:
# def client_resource
# ClientResource.new
# end
# memoize :client_resource
end
end
| 15 | 30 | 0.688889 |
114f438c50c7bc608cb356ecf5b19a052ece681f | 747 | cask "font-sarasa-gothic" do
version "0.34.7"
sha256 "b307c09577231edda407de67cc6cef155348ff9f3232404c8ad8fca01644e11b"
url "https://github.com/be5invis/Sarasa-Gothic/releases/download/v#{version}/sarasa-gothic-ttc-#{version}.7z"
name "Sarasa Gothic"
name "更纱黑体"
name "更紗黑體"
name "更紗ゴシック"
name "사라사고딕"
desc "CJK programming font based on Iosevka and Source Han Sans"
homepage "https://github.com/be5invis/Sarasa-Gothic"
font "sarasa-bold.ttc"
font "sarasa-bolditalic.ttc"
font "sarasa-extralight.ttc"
font "sarasa-extralightitalic.ttc"
font "sarasa-italic.ttc"
font "sarasa-light.ttc"
font "sarasa-lightitalic.ttc"
font "sarasa-regular.ttc"
font "sarasa-semibold.ttc"
font "sarasa-semibolditalic.ttc"
end
| 29.88 | 111 | 0.745649 |
e22f55ef9886d75de4d43f2bf48d39684ef5eec8 | 548 | module NumbersAndWords
module Strategies
module ArrayJoiner
module Languages
class Base
include Languages::Families::Base
attr_accessor :strategy, :elements, :translations, :strings, :options
def initialize strategy
@strategy = strategy
@elements = strategy.elements
@translations = strategy.translations
@options = strategy.options
end
def join
elements_logic
end
end
end
end
end
end
| 21.92 | 79 | 0.580292 |
910dbd22f15123404140830dcd78514b9fc96e25 | 4,073 | open_regex = /^You open|^That is already open\.$|^There doesn't seem to be any way to do that\.$|^What were you referring to\?|^I could not find what you were referring to\./
get_regex = /^You (?:shield the opening of .*? from view as you |discreetly |carefully |deftly )?(?:remove|draw|grab|reach|slip|tuck|retrieve|already have|unsheathe|detach)|^Get what\?$|^Why don't you leave some for others\?$|^You need a free hand|^You already have that/
put_regex = /^You (?:attempt to shield .*? from view as you |discreetly |carefully |absent-mindedly )?(?:put|place|slip|tuck|add|hang|drop|untie your|find an incomplete bundle|wipe off .*? and sheathe)|^A sigh of grateful pleasure can be heard as you feed .*? to your|^As you place|^I could not find what you were referring to\.$|^Your bundle would be too large|^The .+ is too large to be bundled\.|^As you place your|^The .*? is already a bundle|^Your .*? won't fit in .*?\.$|^You can't .+ It's closed!$|^You can't put/
sack_name = UserVars.day_pass_sack
sack = (
GameObj.inv.find { |obj| obj.noun == sack_name} ||
GameObj.inv.find { |obj| obj.name == sack_name } ||
GameObj.inv.find { |obj| obj.name =~ /\b#{Regexp.escape(sack_name)}$/i } ||
GameObj.inv.find { |obj| obj.name =~ /\b#{sack_name.split(' ').collect { |n| Regexp.escape(n) }.join(".*\\b")}/i }
)
close_sack = false
if sack.contents.nil?
open_result = dothistimeout "open ##{sack.id}", 10, open_regex
if open_result =~ /^You open/
close_sack = true
else
dothistimeout "look in ##{sack.id}", 10, /In the .*? you see/
end
end
empty_hand
$mapdb_day_passes.keys.each { |id|
if $mapdb_day_passes[id][:expires] < (Time.now - 10)
dothistimeout "_drag ##{id} drop", 2, /^As you let go|^You drop/
$mapdb_day_passes.delete(id)
end
}
pass_id = $mapdb_day_passes.keys.find { |id| $mapdb_day_passes[id][:towns].include?("Solhaven") and $mapdb_day_passes[id][:towns].include?('Icemule Trace') and $mapdb_day_passes[id][:expires] > (Time.now + 10) }
if pass_id
dothistimeout "get ##{pass_id}", 10, get_regex
elsif UserVars.mapdb_buy_day_pass =~ /^yes$|\bimt,sol\b/i
move 'go corridor'
fput 'unhide' if hidden? or invisible?
dothistimeout "ask halfling for solhaven", 10, /says to you/
result = dothistimeout "ask halfling for solhaven", 10, /don't have enough|quickly hands you/
if (result =~ /don't have enough/) and $go2_get_silvers
[ 'out', 'go gate', 'southeast', 'southeast', 'northeast', 'southeast', 'south', 'west', 'south', 'east', 'east', 'south', 'south', 'east', 'north', 'north', 'go archway' ].each { |dir| move dir }
dothistimeout 'withdraw 5000', 10, /carefully records|through the books/
[ 'out', 'south', 'south', 'west', 'north', 'north', 'west', 'west', 'north', 'east', 'north', 'northwest', 'southwest', 'northwest', 'northwest', 'go gate', 'go door' ].each { |dir| move dir }
fput 'unhide' if hidden? or invisible?
result = dothistimeout "ask halfling for solhaven", 10, /says to you|don't have enough|quickly hands you/
if result =~ /says to you/
result = dothistimeout "ask halfling for solhaven", 10, /don't have enough|quickly hands you/
end
end
if result =~ /quickly hands you/
if GameObj.right_hand.noun == 'pass'
pass_id = GameObj.right_hand.id
elsif GameObj.left_hand.noun == 'pass'
pass_id = GameObj.left_hand.id
end
move 'go corridor'
elsif result =~ /don't have enough/
echo 'error: You are too poor to buy Chronomage day passes. Turning off buy_day_pass setting.'
sleep 3
UserVars.mapdb_buy_day_pass = 'no'
end
end
if pass_id
result = dothistimeout "raise ##{pass_id}", 10, /whirlwind of color subsides|pass is expired|not valid for departures|As you go to raise your pass, you realize|Raise what/
dothistimeout "_drag ##{pass_id} ##{sack.id}", 10, put_regex
else
$mapdb_day_passes.clear
UserVars.mapdb_find_day_pass = 'yes'
$restart_go2 = true
end
fill_hand
if close_sack
dothistimeout "close ##{sack.id}", 2, /^You close/
end | 58.185714 | 520 | 0.669286 |
d534d7fa8a0aba43ab933cdecc03987ba66a3eb1 | 273 | class CreateFoods < ActiveRecord::Migration[6.1]
def change
create_table :foods do |t|
t.string :name
t.integer :calories
t.integer :protein
t.integer :fat
t.integer :carb
t.float :servinginoz
t.timestamps
end
end
end
| 18.2 | 48 | 0.622711 |
4a56c5129b79cc32dbfe93973c740ca9a6a9a23c | 1,342 | # encoding: utf-8
class ProfileUploader < CarrierWave::Uploader::Base
# Include RMagick or MiniMagick support:
# include CarrierWave::RMagick
# include CarrierWave::MiniMagick
# Choose what kind of storage to use for this uploader:
# storage :file
# storage :fog
# Override the directory where uploaded files will be stored.
# This is a sensible default for uploaders that are meant to be mounted:
# def store_dir
# "uploads/#{model.class.to_s.underscore}/#{mounted_as}/#{model.id}"
# end
# Provide a default URL as a default if there hasn't been a file uploaded:
# def default_url
# "/images/fallback/" + [version_name, "default.png"].compact.join('_')
# end
# Process files as they are uploaded:
# process :scale => [200, 300]
#
# def scale(width, height)
# # do something
# end
# Create different versions of your uploaded files:
# version :thumb do
# process :scale => [50, 50]
# end
# Add a white list of extensions which are allowed to be uploaded.
# For images you might use something like this:
# def extension_white_list
# %w(jpg jpeg gif png)
# end
# Override the filename of the uploaded files:
# Avoid using model.id or version_name here, see uploader/store.rb for details.
# def filename
# "something.jpg" if original_filename
# end
end
| 27.387755 | 81 | 0.69225 |
bbfd215567ed8231ce446ccd559048fd79c501c8 | 413 | cask 'google-earth-web-plugin' do
version :latest
sha256 :no_check
url 'http://r2---sn-po4vapo3-j3ae.c.pack.google.com/edgedl/earth/plugin/current/googleearth-mac-plugin-intel.dmg'
name 'Google Earth plug-in'
homepage 'https://www.google.com/intl/en/earth/explore/products/plugin.html'
license :gratis
pkg 'Install Google Earth.pkg'
uninstall pkgutil: 'com.Google.GoogleEarthPlugin.plugin'
end
| 29.5 | 115 | 0.755448 |
e9d673440af8a9bb321c79de57aefa7ca31d0387 | 17,178 | # encoding: UTF-8
# This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20180807120644) do
create_table "account_versions", force: true do |t|
t.integer "member_id"
t.integer "account_id"
t.integer "reason"
t.decimal "balance", precision: 32, scale: 16
t.decimal "locked", precision: 32, scale: 16
t.decimal "fee", precision: 32, scale: 16
t.decimal "amount", precision: 32, scale: 16
t.integer "modifiable_id"
t.string "modifiable_type"
t.datetime "created_at"
t.datetime "updated_at"
t.integer "currency"
t.integer "fun"
end
add_index "account_versions", ["account_id", "reason"], name: "index_account_versions_on_account_id_and_reason", using: :btree
add_index "account_versions", ["account_id"], name: "index_account_versions_on_account_id", using: :btree
add_index "account_versions", ["member_id", "reason"], name: "index_account_versions_on_member_id_and_reason", using: :btree
add_index "account_versions", ["modifiable_id", "modifiable_type"], name: "index_account_versions_on_modifiable_id_and_modifiable_type", using: :btree
create_table "accounts", force: true do |t|
t.integer "member_id"
t.integer "currency"
t.decimal "balance", precision: 32, scale: 16
t.decimal "locked", precision: 32, scale: 16
t.datetime "created_at"
t.datetime "updated_at"
t.decimal "in", precision: 32, scale: 16
t.decimal "out", precision: 32, scale: 16
t.integer "default_withdraw_fund_source_id"
end
add_index "accounts", ["member_id", "currency"], name: "index_accounts_on_member_id_and_currency", using: :btree
add_index "accounts", ["member_id"], name: "index_accounts_on_member_id", using: :btree
create_table "api_tokens", force: true do |t|
t.integer "member_id", null: false
t.string "access_key", limit: 50, null: false
t.string "secret_key", limit: 50, null: false
t.datetime "created_at"
t.datetime "updated_at"
t.string "trusted_ip_list"
t.string "label"
t.integer "oauth_access_token_id"
t.datetime "expire_at"
t.string "scopes"
t.datetime "deleted_at"
end
add_index "api_tokens", ["access_key"], name: "index_api_tokens_on_access_key", unique: true, using: :btree
add_index "api_tokens", ["secret_key"], name: "index_api_tokens_on_secret_key", unique: true, using: :btree
create_table "assets", force: true do |t|
t.string "type"
t.integer "attachable_id"
t.string "attachable_type"
t.string "file"
end
create_table "audit_logs", force: true do |t|
t.string "type"
t.integer "operator_id"
t.datetime "created_at"
t.datetime "updated_at"
t.integer "auditable_id"
t.string "auditable_type"
t.string "source_state"
t.string "target_state"
end
add_index "audit_logs", ["auditable_id", "auditable_type"], name: "index_audit_logs_on_auditable_id_and_auditable_type", using: :btree
add_index "audit_logs", ["operator_id"], name: "index_audit_logs_on_operator_id", using: :btree
create_table "authentications", force: true do |t|
t.string "provider"
t.string "uid"
t.string "token"
t.string "secret"
t.integer "member_id"
t.datetime "created_at"
t.datetime "updated_at"
t.string "nickname"
end
add_index "authentications", ["member_id"], name: "index_authentications_on_member_id", using: :btree
add_index "authentications", ["provider", "uid"], name: "index_authentications_on_provider_and_uid", using: :btree
create_table "comments", force: true do |t|
t.text "content"
t.integer "author_id"
t.integer "ticket_id"
t.datetime "created_at"
t.datetime "updated_at"
end
create_table "deposits", force: true do |t|
t.integer "account_id"
t.integer "member_id"
t.integer "currency"
t.decimal "amount", precision: 32, scale: 16
t.decimal "fee", precision: 32, scale: 16
t.string "fund_uid"
t.string "fund_extra"
t.string "txid"
t.integer "state"
t.string "aasm_state"
t.datetime "created_at"
t.datetime "updated_at"
t.datetime "done_at"
t.string "confirmations"
t.string "type"
t.integer "payment_transaction_id"
t.integer "txout"
t.string "fund_routing_number"
t.string "paypal_email"
end
add_index "deposits", ["txid", "txout"], name: "index_deposits_on_txid_and_txout", using: :btree
create_table "document_translations", force: true do |t|
t.integer "document_id", null: false
t.string "locale", null: false
t.datetime "created_at"
t.datetime "updated_at"
t.string "title"
t.text "body"
t.text "desc"
t.text "keywords"
end
add_index "document_translations", ["document_id"], name: "index_document_translations_on_document_id", using: :btree
add_index "document_translations", ["locale"], name: "index_document_translations_on_locale", using: :btree
create_table "documents", force: true do |t|
t.string "key"
t.string "title"
t.text "body"
t.boolean "is_auth"
t.datetime "created_at"
t.datetime "updated_at"
t.text "desc"
t.text "keywords"
end
create_table "fund_sources", force: true do |t|
t.integer "member_id"
t.integer "currency"
t.string "extra"
t.string "uid"
t.boolean "is_locked", default: false
t.datetime "created_at"
t.datetime "updated_at"
t.datetime "deleted_at"
t.string "routing_number"
end
create_table "id_documents", force: true do |t|
t.integer "id_document_type"
t.string "name"
t.string "id_document_number"
t.integer "member_id"
t.datetime "created_at"
t.datetime "updated_at"
t.date "birth_date"
t.text "address"
t.string "city"
t.string "country"
t.string "zipcode"
t.integer "id_bill_type"
t.string "aasm_state"
end
create_table "identities", force: true do |t|
t.string "email"
t.string "password_digest"
t.boolean "is_active"
t.integer "retry_count"
t.boolean "is_locked"
t.datetime "locked_at"
t.datetime "last_verify_at"
t.datetime "created_at"
t.datetime "updated_at"
end
create_table "members", force: true do |t|
t.string "sn"
t.string "display_name"
t.string "email"
t.integer "identity_id"
t.datetime "created_at"
t.datetime "updated_at"
t.integer "state"
t.boolean "activated"
t.integer "country_code"
t.string "phone_number"
t.boolean "disabled", default: false
t.boolean "api_disabled", default: false
t.string "nickname"
end
create_table "oauth_access_grants", force: true do |t|
t.integer "resource_owner_id", null: false
t.integer "application_id", null: false
t.string "token", null: false
t.integer "expires_in", null: false
t.text "redirect_uri", null: false
t.datetime "created_at", null: false
t.datetime "revoked_at"
t.string "scopes"
end
add_index "oauth_access_grants", ["token"], name: "index_oauth_access_grants_on_token", unique: true, using: :btree
create_table "oauth_access_tokens", force: true do |t|
t.integer "resource_owner_id"
t.integer "application_id"
t.string "token", null: false
t.string "refresh_token"
t.integer "expires_in"
t.datetime "revoked_at"
t.datetime "created_at", null: false
t.string "scopes"
t.datetime "deleted_at"
end
add_index "oauth_access_tokens", ["refresh_token"], name: "index_oauth_access_tokens_on_refresh_token", unique: true, using: :btree
add_index "oauth_access_tokens", ["resource_owner_id"], name: "index_oauth_access_tokens_on_resource_owner_id", using: :btree
add_index "oauth_access_tokens", ["token"], name: "index_oauth_access_tokens_on_token", unique: true, using: :btree
create_table "oauth_applications", force: true do |t|
t.string "name", null: false
t.string "uid", null: false
t.string "secret", null: false
t.text "redirect_uri", null: false
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "oauth_applications", ["uid"], name: "index_oauth_applications_on_uid", unique: true, using: :btree
create_table "orders", force: true do |t|
t.integer "bid"
t.integer "ask"
t.integer "currency"
t.decimal "price", precision: 32, scale: 16
t.decimal "volume", precision: 32, scale: 16
t.decimal "origin_volume", precision: 32, scale: 16
t.integer "state"
t.datetime "done_at"
t.string "type", limit: 8
t.integer "member_id"
t.datetime "created_at"
t.datetime "updated_at"
t.string "sn"
t.string "source", null: false
t.string "ord_type", limit: 10
t.decimal "locked", precision: 32, scale: 16
t.decimal "origin_locked", precision: 32, scale: 16
t.decimal "funds_received", precision: 32, scale: 16, default: 0.0
t.integer "trades_count", default: 0
t.decimal "order_fee", precision: 32, scale: 16
end
add_index "orders", ["currency", "state"], name: "index_orders_on_currency_and_state", using: :btree
add_index "orders", ["member_id", "state"], name: "index_orders_on_member_id_and_state", using: :btree
add_index "orders", ["member_id"], name: "index_orders_on_member_id", using: :btree
add_index "orders", ["state"], name: "index_orders_on_state", using: :btree
create_table "partial_trees", force: true do |t|
t.integer "proof_id", null: false
t.integer "account_id", null: false
t.text "json", null: false
t.datetime "created_at"
t.datetime "updated_at"
t.string "sum"
end
create_table "payment_addresses", force: true do |t|
t.integer "account_id"
t.string "address"
t.datetime "created_at"
t.datetime "updated_at"
t.integer "currency"
end
create_table "payment_transactions", force: true do |t|
t.string "txid"
t.decimal "amount", precision: 32, scale: 16
t.integer "confirmations"
t.string "address"
t.integer "state"
t.string "aasm_state"
t.datetime "created_at"
t.datetime "updated_at"
t.datetime "receive_at"
t.datetime "dont_at"
t.integer "currency"
t.string "type", limit: 60
t.integer "txout"
end
add_index "payment_transactions", ["txid", "txout"], name: "index_payment_transactions_on_txid_and_txout", using: :btree
add_index "payment_transactions", ["type"], name: "index_payment_transactions_on_type", using: :btree
create_table "proofs", force: true do |t|
t.string "root"
t.integer "currency"
t.boolean "ready", default: false
t.datetime "created_at"
t.datetime "updated_at"
t.string "sum"
t.text "addresses"
t.string "balance", limit: 30
end
create_table "read_marks", force: true do |t|
t.integer "readable_id"
t.integer "member_id", null: false
t.string "readable_type", limit: 20, null: false
t.datetime "timestamp"
end
add_index "read_marks", ["member_id"], name: "index_read_marks_on_member_id", using: :btree
add_index "read_marks", ["readable_type", "readable_id"], name: "index_read_marks_on_readable_type_and_readable_id", using: :btree
create_table "running_accounts", force: true do |t|
t.integer "category"
t.decimal "income", precision: 32, scale: 16, default: 0.0, null: false
t.decimal "expenses", precision: 32, scale: 16, default: 0.0, null: false
t.integer "currency"
t.integer "member_id"
t.integer "source_id"
t.string "source_type"
t.string "note"
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "running_accounts", ["member_id"], name: "index_running_accounts_on_member_id", using: :btree
add_index "running_accounts", ["source_id", "source_type"], name: "index_running_accounts_on_source_id_and_source_type", using: :btree
create_table "signup_histories", force: true do |t|
t.integer "member_id"
t.string "ip"
t.string "accept_language"
t.string "ua"
t.datetime "created_at"
end
add_index "signup_histories", ["member_id"], name: "index_signup_histories_on_member_id", using: :btree
create_table "simple_captcha_data", force: true do |t|
t.string "key", limit: 40
t.string "value", limit: 6
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "simple_captcha_data", ["key"], name: "idx_key", using: :btree
create_table "taggings", force: true do |t|
t.integer "tag_id"
t.integer "taggable_id"
t.string "taggable_type"
t.integer "tagger_id"
t.string "tagger_type"
t.string "context", limit: 128
t.datetime "created_at"
end
add_index "taggings", ["tag_id", "taggable_id", "taggable_type", "context", "tagger_id", "tagger_type"], name: "taggings_idx", unique: true, using: :btree
create_table "tags", force: true do |t|
t.string "name"
end
add_index "tags", ["name"], name: "index_tags_on_name", unique: true, using: :btree
create_table "tickets", force: true do |t|
t.string "title"
t.text "content"
t.string "aasm_state"
t.integer "author_id"
t.datetime "created_at"
t.datetime "updated_at"
end
create_table "tokens", force: true do |t|
t.string "token"
t.datetime "expire_at"
t.integer "member_id"
t.boolean "is_used", default: false
t.string "type"
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "tokens", ["type", "token", "expire_at", "is_used"], name: "index_tokens_on_type_and_token_and_expire_at_and_is_used", using: :btree
create_table "trades", force: true do |t|
t.decimal "price", precision: 32, scale: 16
t.decimal "volume", precision: 32, scale: 16
t.integer "ask_id"
t.integer "bid_id"
t.integer "trend"
t.integer "currency"
t.datetime "created_at"
t.datetime "updated_at"
t.integer "ask_member_id"
t.integer "bid_member_id"
t.decimal "funds", precision: 32, scale: 16
end
add_index "trades", ["ask_id"], name: "index_trades_on_ask_id", using: :btree
add_index "trades", ["ask_member_id"], name: "index_trades_on_ask_member_id", using: :btree
add_index "trades", ["bid_id"], name: "index_trades_on_bid_id", using: :btree
add_index "trades", ["bid_member_id"], name: "index_trades_on_bid_member_id", using: :btree
add_index "trades", ["created_at"], name: "index_trades_on_created_at", using: :btree
add_index "trades", ["currency"], name: "index_trades_on_currency", using: :btree
create_table "two_factors", force: true do |t|
t.integer "member_id"
t.string "otp_secret"
t.datetime "last_verify_at"
t.boolean "activated"
t.string "type"
t.datetime "refreshed_at"
end
create_table "versions", force: true do |t|
t.string "item_type", null: false
t.integer "item_id", null: false
t.string "event", null: false
t.string "whodunnit"
t.text "object"
t.datetime "created_at"
end
add_index "versions", ["item_type", "item_id"], name: "index_versions_on_item_type_and_item_id", using: :btree
create_table "withdraws", force: true do |t|
t.string "sn"
t.integer "account_id"
t.integer "member_id"
t.integer "currency"
t.decimal "amount", precision: 32, scale: 16
t.decimal "fee", precision: 32, scale: 16
t.string "fund_uid"
t.string "fund_extra"
t.datetime "created_at"
t.datetime "updated_at"
t.datetime "done_at"
t.string "txid"
t.string "aasm_state"
t.decimal "sum", precision: 32, scale: 16, default: 0.0, null: false
t.string "type"
t.string "paypal_email"
end
end
| 36.012579 | 156 | 0.651124 |
792c2c91cb956e5e4c47768de0061395527da93b | 184 | # The controller to hand the /Schedule requests.
class ScheduleController < ApplicationController
before_action :valid_patient_check
def index
get_fhir("/Schedule")
end
end
| 20.444444 | 48 | 0.782609 |
d5590b0f357697f287f066da179ed97a919b8479 | 3,779 | # coding: utf-8
require 'test_helper'
describe "JSON Parsing" do
TESTS = {
%q({"data": "G\u00fcnter"}) => {"data" => "Günter"},
%q({"html": "\u003Cdiv\\u003E"}) => {"html" => "<div>"},
%q({"returnTo":{"\/categories":"\/"}}) => {"returnTo" => {"/categories" => "/"}},
%q({returnTo:{"\/categories":"\/"}}) => {"returnTo" => {"/categories" => "/"}},
%q({"return\\"To\\":":{"\/categories":"\/"}}) => {"return\"To\":" => {"/categories" => "/"}},
%q({"returnTo":{"\/categories":1}}) => {"returnTo" => {"/categories" => 1}},
%({"returnTo":[1,"a"]}) => {"returnTo" => [1, "a"]},
%({"returnTo":[1,"\\"a\\",", "b"]}) => {"returnTo" => [1, "\"a\",", "b"]},
%({"a": "'", "b": "5,000"}) => {"a" => "'", "b" => "5,000"},
%({"a": "a's, b's and c's", "b": "5,000"}) => {"a" => "a's, b's and c's", "b" => "5,000"},
%({"a": "2007-01-01"}) => {'a' => Date.new(2007, 1, 1)},
%({"first_date": "2016-01-25", "second_date": "2014-01-26"}) => {'first_date' => Date.new(2016, 1, 25), 'second_date' => Date.new(2014, 1, 26)},
%({"first_date": "2016-01-25", "non_date": "Abc", "second_date": "2014-01-26"}) => {'first_date' => Date.new(2016, 1, 25), 'non_date' => 'Abc', 'second_date' => Date.new(2014, 1, 26)},
%({"a": "2007-01-01 01:12:34 Z"}) => {'a' => Time.utc(2007, 1, 1, 1, 12, 34)},
# Handle ISO 8601 date/time format http://en.wikipedia.org/wiki/ISO_8601
%({"a": "2007-01-01T01:12:34Z"}) => {'a' => Time.utc(2007, 1, 1, 1, 12, 34)},
# no time zone
%({"a": "2007-01-01 01:12:34"}) => {'a' => "2007-01-01 01:12:34"},
%({"bio": "1985-01-29: birthdate"}) => {'bio' => '1985-01-29: birthdate'},
%({"regex": /foo.*/}) => {'regex' => /foo.*/},
%({"regex": /foo.*/i}) => {'regex' => /foo.*/i},
%({"regex": /foo.*/mix}) => {'regex' => /foo.*/mix},
%([]) => [],
%({}) => {},
%(1) => 1,
%("") => "",
%("\\"") => "\"",
%(null) => nil,
%(true) => true,
%(false) => false,
%q("http:\/\/test.host\/posts\/1") => "http://test.host/posts/1",
# \u0000 and \x00 escape sequences
%q({"foo":"bar\u0000"}) => {"foo" => "bar"},
%q({"foo":"bar\u0000baz"}) => {"foo" => "barbaz"},
%q(bar\u0000) => "bar",
%q(bar\u0000baz) => "barbaz",
%q({"foo":"bar\x00"}) => {"foo" => "bar\x00"},
%q({"foo":"bar\x00baz"}) => {"foo" => "bar\x00baz"}
}
TESTS.each do |json, expected|
it "decode json (#{json})" do
Crack::JSON.parse(json).must_equal expected
end
end
it "is not vulnerable to YAML deserialization exploits" do
class Foo; end
refute_instance_of(Foo, Crack::JSON.parse("# '---/\n--- !ruby/object:Foo\n foo: bar"))
end
it "raise error for failed decoding" do
assert_raises(Crack::ParseError) {
Crack::JSON.parse(%({: 1}))
}
end
it "be able to parse a JSON response from a Twitter search about 'firefox'" do
data = ''
File.open(File.dirname(__FILE__) + "/data/twittersearch-firefox.json", "r") { |f|
data = f.read
}
Crack::JSON.parse(data)
end
it "be able to parse a JSON response from a Twitter search about 'internet explorer'" do
data = ''
File.open(File.dirname(__FILE__) + "/data/twittersearch-ie.json", "r") { |f|
data = f.read
}
Crack::JSON.parse(data)
end
it "does not raise SystemStackError when parsing large JSON files" do
data = ''
File.open(File.dirname(__FILE__) + "/data/large_dataset.json", "r") { |f|
data = f.read
}
Crack::JSON.parse(data)
end
end
| 41.076087 | 188 | 0.463879 |
ac8c93628c3fbbf2dd209cf67a1426ab85afc9eb | 504 | # frozen_string_literal: true
class ApplicationController < ActionController::Base
before_action :configure_permitted_parameters, if: :devise_controller?
protected
def configure_permitted_parameters
devise_parameter_sanitizer.permit(:sign_up, keys: %i[fullname mobile_no address])
devise_parameter_sanitizer.permit(:account_update, keys: %i[fullname mobile_no address])
end
rescue_from CanCan::AccessDenied do
flash[:error] = 'Access denied!'
redirect_to root_url
end
end
| 28 | 92 | 0.791667 |
38ba924b9f3b96bac47eb2297934859052dfe52d | 1,849 | # frozen_string_literal: true
require 'git_snip/printer'
RSpec.describe GitSnip::Printer do
[
[:force_option_needed, '-f option is needed to delete branches.', :red],
[:deleting_branches, "Deleting the following branches...\n\n", :green],
[:no_branches_deleted, 'No branches were deleted.', :green],
[:will_delete_branches, "Would delete the following branches...\n\n", :green],
[:no_branches_to_delete, 'No branches would be deleted.', :green],
[:done, "\nDone.", :green]
].each do |method, text, color|
describe "##{method}" do
let(:sayer) { spy('sayer') }
let(:printer) { described_class.new(sayer) }
it "should print #{text.inspect} in #{color}" do
printer.send(method)
expect(sayer).to have_received(:say).with(text, color)
end
end
end
describe '#branch_info' do
let(:sayer) { spy('sayer') }
let(:printer) { described_class.new(sayer) }
it 'should print a row of branch info' do
printer.branch_info(row_double)
expect(sayer).to have_received(:say).with('sha ', :yellow).ordered
expect(sayer).to have_received(:say).with('name ', :magenta).ordered
expect(sayer).to have_received(:say).with('date ', :green).ordered
expect(sayer).to have_received(:say).with('author ', [:blue, :bold]).ordered
expect(sayer).to have_received(:say).with("hello world\n").ordered
end
it 'should strip the last row and append new line' do
printer.branch_info(row_double(message: " hello world \n\n\n"))
expect(sayer).to have_received(:say).with("hello world\n")
end
def row_double(attrs = {})
instance_double('GitSnip::Branch::Row', {
sha: 'sha',
name: 'name',
date: 'date',
author: 'author',
message: 'hello world'
}.merge(attrs))
end
end
end
| 33.017857 | 82 | 0.636019 |
1cbd10309d198666d5cbfc307a5b2bb1668f3ea6 | 810 | $:.push File.expand_path("../lib", __FILE__)
# Maintain your gem's version:
require "action_bouncer/version"
# Describe your gem and declare its dependencies:
Gem::Specification.new do |s|
s.name = "action_bouncer"
s.version = ActionBouncer::VERSION
s.authors = ["Oswaldo Ferreira"]
s.email = ["[email protected]"]
s.summary = "Dead simple rails authorization"
s.description = "Rails authorization for well defined authorization objects interfaces"
s.license = "MIT"
s.files = Dir["{app,config,db,lib}/**/*", "MIT-LICENSE", "Rakefile", "README.md"]
s.test_files = Dir["spec/**/*"]
s.add_dependency "rails"
s.add_development_dependency "sqlite3"
s.add_development_dependency "rspec-rails"
s.add_development_dependency "codeclimate-test-reporter"
end
| 32.4 | 89 | 0.7 |
d5944ee64d945929f60ec3835a306f4183d74624 | 1,064 | require_relative 'boot'
require "action_controller/railtie"
require "action_mailer/railtie"
require "sprockets/railtie"
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module RubyxWebpage
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
config.load_defaults 5.2
# Settings in config/environments/* take precedence over those specified here.
# Application configuration can go into files in config/initializers
# -- all .rb files in that directory are automatically loaded after loading
# the framework and any gems in your application.
config.assets.paths << Gem.loaded_specs['susy'].full_gem_path+'/sass'
config.blog_path = Rails.root.to_s + "/app/views/posts"
config.assets.configure do |env|
env.cache = ActiveSupport::Cache.lookup_store(:memory_store,
{ size: 64.megabytes })
end
end
end
| 34.322581 | 82 | 0.716165 |
e937c5e0786aa9f921f708bb96398a6632e2945f | 1,501 | #
# Be sure to run `pod lib lint PersistentValue.podspec' to ensure this is a
# valid spec before submitting.
#
# Any lines starting with a # are optional, but their use is encouraged
# To learn more about a Podspec see https://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = 'PersistentValue'
s.version = '0.6.0'
s.summary = 'Type safe persistent values for NSUserDefaults, KeyChain, and values backed by an app file'
# Temporary-- this Pod really needs to be pushed up to Cocoapods
# s.homepage = 'https://github.com/RosterHQ/PersistentValue'
s.homepage = 'https://github.com/crspybits/PersistentValue'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'crspybits' => '[email protected]' }
# Temporary-- this Pod really needs to be pushed up to Cocoapods
# s.source = { :git => 'https://github.com/RosterHQ/PersistentValue.git', :tag => s.version.to_s }
s.source = { :git => 'https://github.com/crspybits/PersistentValue.git', :tag => s.version.to_s }
s.ios.deployment_target = '8.0'
s.source_files = 'Sources/PersistentValue/**/*'
s.dependency 'KeychainAccess', '~> 3.1'
# Having a probleem with crash in Release mode-- "GenericCache: cyclic metadata dependency detected, aborting", trying to fix.
# s.dependency 'SwiftyUserDefaults', '~> 4.0'
s.dependency 'SwiftyUserDefaults', '~> 5.0.0-beta'
s.swift_version = '4.2'
end
| 42.885714 | 128 | 0.65956 |
e22a5880718c0eaeae9908324713261cb95ded7a | 20,578 | require 'test_helper'
describe BrBoleto::Conta::Caixa do
subject { FactoryGirl.build(:conta_caixa) }
it "deve herdar de Conta::Base" do
subject.class.superclass.must_equal BrBoleto::Conta::Base
end
context "valores padrões" do
it "deve setar a carteira da carteira com 14'" do
subject.class.new.carteira.must_equal '14'
end
it "deve setar a valid_carteira_required com true" do
subject.class.new.valid_carteira_required.must_equal true
end
it "deve setar a valid_carteira_length com 2" do
subject.class.new.valid_carteira_length.must_equal 2
end
it "deve setar a valid_carteira_inclusion com %w[14 24]" do
subject.class.new.valid_carteira_inclusion.must_equal %w[14 24]
end
it "deve setar a valid_convenio_maximum com 6" do
subject.class.new.valid_convenio_maximum.must_equal 6
end
it "deve setar a versao_aplicativo com '0'" do
subject.class.new.versao_aplicativo.must_equal '0000'
end
end
describe "Validations" do
it { must validate_presence_of(:agencia) }
it { must validate_presence_of(:razao_social) }
it { must validate_presence_of(:cpf_cnpj) }
it do
subject.agencia_dv = 21
must_be_message_error(:agencia_dv, :custom_length_is, {count: 1})
end
it 'versao_aplicativo deve ter no maximo 4 digitos' do
subject.versao_aplicativo = '12345'
must_be_message_error(:versao_aplicativo, :custom_length_maximum, {count: 4})
subject.versao_aplicativo = '1234'
wont_be_message_error(:versao_aplicativo, :custom_length_maximum, {count: 4})
end
context 'Validações padrões da carteira da carteira' do
subject { BrBoleto::Conta::Caixa.new }
it { must validate_presence_of(:carteira) }
it 'Tamanho deve ser de 2' do
subject.carteira = '132'
must_be_message_error(:carteira, :custom_length_is, {count: 2})
end
it "valores aceitos" do
subject.carteira = '04'
must_be_message_error(:carteira, :custom_inclusion, {list: '14, 24'})
end
end
context 'Validações padrões da convenio' do
subject { BrBoleto::Conta::Caixa.new }
it { must validate_presence_of(:convenio) }
it 'Tamanho deve ter o tamanho maximo de 6' do
subject.convenio = '1234567'
must_be_message_error(:convenio, :custom_length_maximum, {count: 6})
end
end
end
it "codigo do banco" do
subject.codigo_banco.must_equal '104'
end
it '#codigo_banco_dv' do
subject.codigo_banco_dv.must_equal '0'
end
describe "#nome_banco" do
it "valor padrão para o nome_banco" do
subject.nome_banco.must_equal 'CAIXA ECONOMICA FEDERAL'
end
it "deve ser possível mudar o valor do nome do banco" do
subject.nome_banco = 'MEU'
subject.nome_banco.must_equal 'MEU'
end
end
it "#versao_layout_arquivo_cnab_240" do
subject.versao_layout_arquivo_cnab_240.must_equal '050'
end
it "#versao_layout_lote_cnab_240" do
subject.versao_layout_lote_cnab_240.must_equal '030'
end
describe '#agencia_dv' do
it "deve ser personalizavel pelo usuario" do
subject.agencia_dv = 88
subject.agencia_dv.must_equal 88
end
it "se não passar valor deve calcular automatico" do
subject.agencia = '1234'
BrBoleto::Calculos::Modulo11FatorDe2a9RestoZero.expects(:new).with('1234').returns(stub(to_s: 5))
subject.agencia_dv.must_equal 5
end
end
describe '#conta_corrente_dv' do
it "deve ser personalizavel pelo usuario" do
subject.conta_corrente_dv = 88
subject.conta_corrente_dv.must_equal 88
end
it "se não passar valor deve calcular automatico" do
subject.conta_corrente_dv = nil
subject.conta_corrente = '6688'
BrBoleto::Calculos::Modulo11FatorDe2a9RestoZero.expects(:new).with('6688').returns(stub(to_s: 5))
subject.conta_corrente_dv.must_equal 5
end
end
describe '#convenio_dv' do
it "deve ser personalizavel pelo usuario" do
subject.convenio_dv = 88
subject.convenio_dv.must_equal 88
end
it "se não passar valor deve calcular automatico" do
subject.convenio_dv = nil
subject.convenio = '6688'
BrBoleto::Calculos::Modulo11FatorDe2a9RestoZero.expects(:new).with('006688').returns(stub(to_s: 5))
subject.convenio_dv.must_equal 5
end
end
describe '#versao_aplicativo' do
it "deve ajustar valor para 4 digitos" do
subject.versao_aplicativo = 14
subject.versao_aplicativo.must_equal '0014'
end
end
describe "#carteira_formatada - Conforme o manual da caixa deve retornar RG para carteira com registro e SR para carteira sem registro" do
it "para a carteira 14 deve retornar RG" do
subject.carteira = '14'
subject.carteira_formatada.must_equal 'RG'
end
it "para a carteira 24 deve retornar SR" do
subject.carteira = '24'
subject.carteira_formatada.must_equal 'SR'
end
end
describe "#get_codigo_movimento_retorno" do
context "CÓDIGOS para o Caixa" do
it { subject.get_codigo_movimento_retorno('01', 240).must_equal '01' } # Solicitação de Impressão de Títulos Confirmada
it { subject.get_codigo_movimento_retorno('35', 240).must_equal '135' } # Confirmação de Inclusão Banco de Sacado
it { subject.get_codigo_movimento_retorno('36', 240).must_equal '136' } # Confirmação de Alteração Banco de Sacado
it { subject.get_codigo_movimento_retorno('37', 240).must_equal '137' } # Confirmação de Exclusão Banco de Sacado
it { subject.get_codigo_movimento_retorno('38', 240).must_equal '138' } # Emissão de Bloquetos de Banco de Sacado
it { subject.get_codigo_movimento_retorno('39', 240).must_equal '139' } # Manutenção de Sacado Rejeitada
it { subject.get_codigo_movimento_retorno('40', 240).must_equal '140' } # Entrada de Título via Banco de Sacado Rejeitada
it { subject.get_codigo_movimento_retorno('41', 240).must_equal '141' } # Manutenção de Banco de Sacado Rejeitada
it { subject.get_codigo_movimento_retorno('44', 240).must_equal '144' } # Estorno de Baixa / Liquidação
it { subject.get_codigo_movimento_retorno('45', 240).must_equal '145' } # Alteração de Dados
end
end
describe "#get_codigo_motivo_ocorrencia" do
context "CÓDIGOS para oa Caixa com Motivo Ocorrência A para CNAB 240" do
it { subject.get_codigo_motivo_ocorrencia('11', '02', 240).must_equal 'A115' } # Data de Geração Inválida
it { subject.get_codigo_motivo_ocorrencia('64', '03', 240).must_equal 'A116' } # Entrada Inválida para Cobrança Caucionada
it { subject.get_codigo_motivo_ocorrencia('65', '26', 240).must_equal 'A117' } # CEP do Pagador não encontrado
it { subject.get_codigo_motivo_ocorrencia('66', '30', 240).must_equal 'A118' } # Agencia Cobradora não encontrada
it { subject.get_codigo_motivo_ocorrencia('67', '02', 240).must_equal 'A119' } # Agencia Beneficiário não encontrada
it { subject.get_codigo_motivo_ocorrencia('68', '03', 240).must_equal 'A120' } # Movimentação inválida para título
it { subject.get_codigo_motivo_ocorrencia('69', '26', 240).must_equal 'A121' } # Alteração de dados inválida
it { subject.get_codigo_motivo_ocorrencia('70', '02', 240).must_equal 'A122' } # Apelido do cliente não cadastrado
it { subject.get_codigo_motivo_ocorrencia('71', '03', 240).must_equal 'A123' } # Erro na composição do arquivo
it { subject.get_codigo_motivo_ocorrencia('72', '26', 240).must_equal 'A124' } # Lote de serviço inválido
it { subject.get_codigo_motivo_ocorrencia('73', '30', 240).must_equal 'A105' } # Código do Beneficiário inválido
it { subject.get_codigo_motivo_ocorrencia('74', '02', 240).must_equal 'A125' } # Beneficiário não pertencente a Cobrança Eletrônica
it { subject.get_codigo_motivo_ocorrencia('75', '03', 240).must_equal 'A126' } # Nome da Empresa inválido
it { subject.get_codigo_motivo_ocorrencia('76', '26', 240).must_equal 'A127' } # Nome do Banco inválido
it { subject.get_codigo_motivo_ocorrencia('77', '30', 240).must_equal 'A128' } # Código da Remessa inválido
it { subject.get_codigo_motivo_ocorrencia('78', '02', 240).must_equal 'A129' } # Data/Hora Geração do arquivo inválida
it { subject.get_codigo_motivo_ocorrencia('79', '03', 240).must_equal 'A130' } # Número Sequencial do arquivo inválido
it { subject.get_codigo_motivo_ocorrencia('80', '26', 240).must_equal 'A131' } # Versão do Lay out do arquivo inválido
it { subject.get_codigo_motivo_ocorrencia('81', '30', 240).must_equal 'A132' } # Literal REMESSA-TESTE - Válido só p/ fase testes
it { subject.get_codigo_motivo_ocorrencia('82', '02', 240).must_equal 'A133' } # Literal REMESSA-TESTE - Obrigatório p/ fase testes
it { subject.get_codigo_motivo_ocorrencia('83', '03', 240).must_equal 'A134' } # Tp Número Inscrição Empresa inválido
it { subject.get_codigo_motivo_ocorrencia('84', '26', 240).must_equal 'A135' } # Tipo de Operação inválido
it { subject.get_codigo_motivo_ocorrencia('85', '02', 240).must_equal 'A136' } # Tipo de serviço inválido
it { subject.get_codigo_motivo_ocorrencia('86', '03', 240).must_equal 'A137' } # Forma de lançamento inválido
it { subject.get_codigo_motivo_ocorrencia('87', '26', 240).must_equal 'A138' } # Número da remessa inválido
it { subject.get_codigo_motivo_ocorrencia('88', '30', 240).must_equal 'A139' } # Número da remessa menor/igual remessa anterior
it { subject.get_codigo_motivo_ocorrencia('89', '02', 240).must_equal 'A140' } # Lote de serviço divergente
it { subject.get_codigo_motivo_ocorrencia('90', '03', 240).must_equal 'A141' } # Número sequencial do registro inválido
it { subject.get_codigo_motivo_ocorrencia('91', '26', 240).must_equal 'A142' } # Erro seq de segmento do registro detalhe
it { subject.get_codigo_motivo_ocorrencia('92', '30', 240).must_equal 'A143' } # Cod movto divergente entre grupo de segm
it { subject.get_codigo_motivo_ocorrencia('93', '02', 240).must_equal 'A144' } # Qtde registros no lote inválido
it { subject.get_codigo_motivo_ocorrencia('94', '03', 240).must_equal 'A145' } # Qtde registros no lote divergente
it { subject.get_codigo_motivo_ocorrencia('95', '26', 240).must_equal 'A146' } # Qtde lotes no arquivo inválido
it { subject.get_codigo_motivo_ocorrencia('96', '30', 240).must_equal 'A147' } # Qtde lotes no arquivo divergente
it { subject.get_codigo_motivo_ocorrencia('97', '02', 240).must_equal 'A148' } # Qtde registros no arquivo inválido
it { subject.get_codigo_motivo_ocorrencia('98', '03', 240).must_equal 'A149' } # Qtde registros no arquivo divergente
it { subject.get_codigo_motivo_ocorrencia('99', '26', 240).must_equal 'A150' } # Código de DDD inválido
end
context "CÓDIGOS para oa Caixa com Motivo Ocorrência B para CNAB 240" do
it { subject.get_codigo_motivo_ocorrencia('12', '28', 240).must_equal 'B21' } # Redisponibilização de Arquivo Retorno Eletrônico
it { subject.get_codigo_motivo_ocorrencia('15', '28', 240).must_equal 'B22' } # Banco de Pagadores
it { subject.get_codigo_motivo_ocorrencia('17', '28', 240).must_equal 'B23' } # Entrega Aviso Disp Boleto via e-amail ao pagador (s/ emissão Boleto)
it { subject.get_codigo_motivo_ocorrencia('18', '28', 240).must_equal 'B24' } # Emissão de Boleto Pré-impresso CAIXA matricial
it { subject.get_codigo_motivo_ocorrencia('19', '28', 240).must_equal 'B25' } # Emissão de Boleto Pré-impresso CAIXA A4
it { subject.get_codigo_motivo_ocorrencia('20', '28', 240).must_equal 'B26' } # Emissão de Boleto Padrão CAIXA
it { subject.get_codigo_motivo_ocorrencia('21', '28', 240).must_equal 'B27' } # Emissão de Boleto/Carnê
it { subject.get_codigo_motivo_ocorrencia('31', '28', 240).must_equal 'B28' } # Emissão de Aviso de Vencido
it { subject.get_codigo_motivo_ocorrencia('42', '28', 240).must_equal 'B29' } # Alteração cadastral de dados do título - sem emissão de aviso
it { subject.get_codigo_motivo_ocorrencia('45', '28', 240).must_equal 'B30' } # Emissão de 2a via de Boleto Cobrança Registrada
end
context "CÓDIGOS para oa Caixa com Motivo Ocorrência C para CNAB 240" do
it { subject.get_codigo_motivo_ocorrencia('02', '06', 240).must_equal 'C100' } # Casa Lotérica
it { subject.get_codigo_motivo_ocorrencia('03', '09', 240).must_equal 'C101' } # Agências CAIXA
it { subject.get_codigo_motivo_ocorrencia('07', '17', 240).must_equal 'C102' } # Correspondente Bancário
end
context "CÓDIGOS para oa Caixa com Motivo Ocorrência C para CNAB 240 com Código 08" do
it { subject.get_codigo_motivo_ocorrencia('01', '08', 240).must_equal 'C35' } # Liquidação em Dinheiro
it { subject.get_codigo_motivo_ocorrencia('02', '08', 240).must_equal 'C36' } # Liquidação em Cheque
end
context "CÓDIGOS para oa Caixa com Motivo Ocorrência A para CNAB 400" do
it { subject.get_codigo_motivo_ocorrencia('01', '02', 400).must_equal 'A220' } # Movimento sem Beneficiário Correspondente
it { subject.get_codigo_motivo_ocorrencia('02', '03', 400).must_equal 'A221' } # Movimento sem Título Correspondente
it { subject.get_codigo_motivo_ocorrencia('08', '26', 400).must_equal 'A222' } # Movimento para título já com movimentação no dia
it { subject.get_codigo_motivo_ocorrencia('09', '30', 400).must_equal 'A223' } # Nosso Número não pertence ao Beneficiário
it { subject.get_codigo_motivo_ocorrencia('10', '30', 400).must_equal 'A224' } # Inclusão de título já existente na base
it { subject.get_codigo_motivo_ocorrencia('12', '26', 400).must_equal 'A225' } # Movimento duplicado
it { subject.get_codigo_motivo_ocorrencia('13', '03', 400).must_equal 'A116' } # Entrada Inválida para Cobrança Caucionada (Beneficiário não possui conta Caução)
it { subject.get_codigo_motivo_ocorrencia('20', '02', 400).must_equal 'A117' } # CEP do Pagador não encontrado (não foi possível a determinação da Agência Cobradora para o título)
it { subject.get_codigo_motivo_ocorrencia('21', '02', 400).must_equal 'A118' } # Agência cobradora não encontrada (agência designada para cobradora não cadastrada no sistema)
it { subject.get_codigo_motivo_ocorrencia('22', '03', 400).must_equal 'A119' } # Agência Beneficiário não encontrada (Agência do Beneficiário não cadastrada no sistema)
it { subject.get_codigo_motivo_ocorrencia('45', '26', 400).must_equal 'A226' } # Data de Vencimento com prazo superior ao limite
it { subject.get_codigo_motivo_ocorrencia('49', '30', 400).must_equal 'A227' } # Movimento inválido para título Baixado/Liquidado
it { subject.get_codigo_motivo_ocorrencia('50', '02', 400).must_equal 'A228' } # Movimento inválido para título enviado a Cartório
it { subject.get_codigo_motivo_ocorrencia('54', '03', 400).must_equal 'A229' } # Faixa de CEP da Agência Cobradora não abrange CEP do Pagador
it { subject.get_codigo_motivo_ocorrencia('55', '26', 400).must_equal 'A230' } # Título já com opção de Devolução
it { subject.get_codigo_motivo_ocorrencia('56', '30', 400).must_equal 'A231' } # Processo de Protesto em andamento
it { subject.get_codigo_motivo_ocorrencia('57', '26', 400).must_equal 'A232' } # Título já com opção de Protesto
it { subject.get_codigo_motivo_ocorrencia('58', '03', 400).must_equal 'A233' } # Processo de devolução em andamento
it { subject.get_codigo_motivo_ocorrencia('59', '30', 400).must_equal 'A234' } # Novo prazo p/ Protesto/Devolução inválido
it { subject.get_codigo_motivo_ocorrencia('76', '02', 400).must_equal 'A235' } # Alteração do prazo de protesto inválida
it { subject.get_codigo_motivo_ocorrencia('77', '03', 400).must_equal 'A236' } # Alteração do prazo de devolução inválida
it { subject.get_codigo_motivo_ocorrencia('81', '26', 400).must_equal 'A237' } # CEP do Pagador inválido
it { subject.get_codigo_motivo_ocorrencia('82', '02', 400).must_equal 'A238' } # CNPJ/CPF do Pagador inválido (dígito não confere)
it { subject.get_codigo_motivo_ocorrencia('83', '30', 400).must_equal 'A86' } # Número do Documento (seu número) inválido
it { subject.get_codigo_motivo_ocorrencia('84', '02', 400).must_equal 'A239' } # Protesto inválido para título sem Número do documento (seu número)
end
context "CÓDIGOS para oa Caixa com Motivo Ocorrência C para CNAB 400" do
it { subject.get_codigo_motivo_ocorrencia('002', '06', 400).must_equal 'C100' } # Unidade Lotérica
it { subject.get_codigo_motivo_ocorrencia('003', '09', 400).must_equal 'C101' } # Agências CAIXA
it { subject.get_codigo_motivo_ocorrencia('004', '101', 400).must_equal 'C04' } # Compensação Eletrônica
it { subject.get_codigo_motivo_ocorrencia('006', '151', 400).must_equal 'C06' } # Internet Banking
it { subject.get_codigo_motivo_ocorrencia('007', '152', 400).must_equal 'C102' } # Correspondente CAIXA aqui
it { subject.get_codigo_motivo_ocorrencia('008', '06', 400).must_equal 'C08' } # Em Cartório
it { subject.get_codigo_motivo_ocorrencia('009', '09', 400).must_equal 'C09' } # Comandada Banco
it { subject.get_codigo_motivo_ocorrencia('010', '101', 400).must_equal 'C10' } # Comandada Cliente via Arquivo
it { subject.get_codigo_motivo_ocorrencia('011', '151', 400).must_equal 'C11' } # Comandada Cliente On-line
end
end
describe "#get_codigo_movimento_retorno" do
context "CÓDIGOS para o cnab 400 da Caixa" do
it { subject.get_codigo_movimento_retorno('01', 400).must_equal '02' } # Entrada Confirmada
it { subject.get_codigo_movimento_retorno('02', 400).must_equal '09' } # Baixa Manual Confirmada
it { subject.get_codigo_movimento_retorno('03', 400).must_equal '12' } # Abatimento Concedido
it { subject.get_codigo_movimento_retorno('04', 400).must_equal '13' } # Abatimento Cancelado
it { subject.get_codigo_movimento_retorno('05', 400).must_equal '14' } # Vencimento Alterado
it { subject.get_codigo_movimento_retorno('06', 400).must_equal '146' } # Uso da Empresa Alterado
it { subject.get_codigo_movimento_retorno('08', 400).must_equal '147' } # Prazo de Devolução Alterado
it { subject.get_codigo_movimento_retorno('09', 400).must_equal '27' } # Alteração Confirmada
it { subject.get_codigo_movimento_retorno('10', 400).must_equal '148' } # Alteração com reemissão de Boleto Confirmada
it { subject.get_codigo_movimento_retorno('11', 400).must_equal '149' } # Alteração da opção de Protesto para Devolução Confirmada
it { subject.get_codigo_movimento_retorno('12', 400).must_equal '150' } # Alteração da opção de Devolução para Protesto Confirmada
it { subject.get_codigo_movimento_retorno('20', 400).must_equal '11' } # Em Ser
it { subject.get_codigo_movimento_retorno('21', 400).must_equal '06' } # Liquidação
it { subject.get_codigo_movimento_retorno('22', 400).must_equal '101' } # Liquidação em Cartório
it { subject.get_codigo_movimento_retorno('23', 400).must_equal '151' } # Baixa por Devolução
it { subject.get_codigo_movimento_retorno('25', 400).must_equal '152' } # Baixa por Protesto
it { subject.get_codigo_movimento_retorno('26', 400).must_equal '23' } # Título enviado para Cartório
it { subject.get_codigo_movimento_retorno('27', 400).must_equal '20' } # Sustação de Protesto
it { subject.get_codigo_movimento_retorno('28', 400).must_equal '153' } # Estorno de Protesto
it { subject.get_codigo_movimento_retorno('29', 400).must_equal '154' } # Estorno de Sustação de Protesto
it { subject.get_codigo_movimento_retorno('30', 400).must_equal '61' } # Alteração de Título
it { subject.get_codigo_movimento_retorno('31', 400).must_equal '108' } # Tarifa sobre Título Vencido
it { subject.get_codigo_movimento_retorno('32', 400).must_equal '155' } # Outras Tarifas de Alteração
it { subject.get_codigo_movimento_retorno('33', 400).must_equal '144' } # Estorno de Baixa / Liquidação
it { subject.get_codigo_movimento_retorno('34', 400).must_equal '156' } # Tarifas Diversas
end
end
describe "#get_codigo_protesto" do
context "CÓDIGOS para o Caixa" do
it { subject.get_codigo_protesto('3', 400).must_equal '2' } # Devolver (Não Protestar)
end
end
describe "#get_codigo_moeda" do
context "CÓDIGOS para o Caixa" do
it { subject.get_codigo_moeda('09', 400).must_equal '1' } # Real
end
end
describe '#get_distribuicao_boleto -> Para a caixa os códigos são diferentes da FEBRABAN' do
it "Quando o banco distribui deve converter o código para 2" do
subject.get_distribuicao_boleto('1').must_equal '2'
end
it "Quando o Cliente distribui deve converter o código para 0" do
subject.get_distribuicao_boleto('2').must_equal '0'
end
it "Quando envia e-mail deve manter o código 3" do
subject.get_distribuicao_boleto('3').must_equal '3'
end
it "Quando envia SMS deve manter o código 4" do
subject.get_distribuicao_boleto('4').must_equal '4'
end
end
end | 62.737805 | 183 | 0.738507 |
ed75fab1aff3b31cf0d44761b603ce2b982b112c | 1,165 | class BoardsController < ApplicationController
before_filter :authorize
skip_before_action :verify_authenticity_token
def new
@board = Board.new
render json: @board
end
def index
@boards = Board.all.where("user_id = ?", User.current_user.id)
render json: @boards
end
def create
@board = Board.create(board_params)
@board.user_id = User.current_user.id
if @board.save
render json: @board
end
end
def edit
@board = Board.find(params[:id])
render json: @board
end
def update
@board = Board.find(params[:id])
@board.resources << Resource.find(params[:resource_id])
if @board.save
render json: @board
end
end
def show
@board = Board.find(params[:id])
render json: @board
end
def destroy
@board = Board.find(params[:id])
@board.destroy
end
def remove
@board = Board.find(params[:id])
@board.resources.delete(Resource.find(params[:resource_id]))
if @board.save
render json: @board
end
end
private
def board_params
params.require(:board).permit(:id, :name, :user_id, :resource_id, :resources => [])
end
end
| 19.745763 | 87 | 0.650644 |
2164f9ef248dc92a20100a1217444c8a549ff4d0 | 1,725 | require_relative '../mikrotik_api'
Puppet::Type.type(:mikrotik_tool_netwatch).provide(:mikrotik_api, :parent => Puppet::Provider::Mikrotik_Api) do
confine :feature => :mtik
mk_resource_methods
def self.instances
watches = Puppet::Provider::Mikrotik_Api::get_all("/tool/netwatch")
instances = watches.collect { |watch| netwatchCheck(watch) }
instances
end
def self.netwatchCheck(data)
if data['disabled'] == "true"
state = :disabled
else
state = :enabled
end
new(
:ensure => :present,
:state => state,
:name => data['host'],
:interval => data['interval'],
:timeout => data['timeout'],
:down_script => data['down-script'],
:up_script => data['up-script'],
:comment => data['comment']
)
end
def flush
Puppet.debug("Flushing Netwatch check #{resource[:name]}")
params = {}
if @property_hash[:state] == :disabled
params["disabled"] = 'yes'
elsif @property_hash[:state] == :enabled
params["disabled"] = 'no'
end
params["host"] = resource[:name]
params["interval"] = resource[:interval] if ! resource[:interval].nil?
params["timeout"] = resource[:timeout] if ! resource[:timeout].nil?
params["down-script"] = resource[:down_script] if ! resource[:down_script].nil?
params["up-script"] = resource[:up_script] if ! resource[:up_script].nil?
params["comment"] = resource[:comment] if ! resource[:comment].nil?
lookup = {}
lookup["host"] = resource[:name]
Puppet.debug("Params: #{params.inspect} - Lookup: #{lookup.inspect}")
simple_flush("/tool/netwatch", params, lookup)
end
end
| 29.237288 | 111 | 0.608116 |
abe8419d565a229188a624fa251d13252f3edbdd | 303 | class Admin::BlogPostsCustomNewAndEditController < ApplicationController
layout 'admin'
admin_assistant_for BlogPost do |a|
a.actions :index, :show, :destroy
end
def new
render :text => 'custom form for new'
end
def edit
render :text => 'custom form for edit'
end
end
| 18.9375 | 72 | 0.689769 |
1ce91655f3138088466e268d55be7077723c5fb1 | 291 | require 'stringio'
require 'zlib'
describe "Zlib::GzipWriter#<<" do
before :each do
@io = StringIO.new
end
it "returns self" do
Zlib::GzipWriter.wrap @io do |gzio|
(gzio << "test").should equal(gzio)
end
end
it "needs to be reviewed for spec completeness"
end
| 17.117647 | 49 | 0.649485 |
1caa0a9187c09f8658fdf952b58eabd65d7dbd0a | 13,242 | # Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
require 'azure_mgmt_peering'
module Azure::Peering::Profiles::Latest
module Mgmt
LegacyPeerings = Azure::Peering::Mgmt::V2019_08_01_preview::LegacyPeerings
Operations = Azure::Peering::Mgmt::V2019_08_01_preview::Operations
PeerAsns = Azure::Peering::Mgmt::V2019_08_01_preview::PeerAsns
PeeringLocations = Azure::Peering::Mgmt::V2019_08_01_preview::PeeringLocations
Peerings = Azure::Peering::Mgmt::V2019_08_01_preview::Peerings
PeeringServiceLocations = Azure::Peering::Mgmt::V2019_08_01_preview::PeeringServiceLocations
PeeringServicePrefixes = Azure::Peering::Mgmt::V2019_08_01_preview::PeeringServicePrefixes
Prefixes = Azure::Peering::Mgmt::V2019_08_01_preview::Prefixes
PeeringServiceProviders = Azure::Peering::Mgmt::V2019_08_01_preview::PeeringServiceProviders
PeeringServices = Azure::Peering::Mgmt::V2019_08_01_preview::PeeringServices
module Models
PeeringBandwidthOffer = Azure::Peering::Mgmt::V2019_08_01_preview::Models::PeeringBandwidthOffer
CheckServiceProviderAvailabilityInput = Azure::Peering::Mgmt::V2019_08_01_preview::Models::CheckServiceProviderAvailabilityInput
PeeringLocationPropertiesDirect = Azure::Peering::Mgmt::V2019_08_01_preview::Models::PeeringLocationPropertiesDirect
BgpSession = Azure::Peering::Mgmt::V2019_08_01_preview::Models::BgpSession
ExchangePeeringFacility = Azure::Peering::Mgmt::V2019_08_01_preview::Models::ExchangePeeringFacility
SubResource = Azure::Peering::Mgmt::V2019_08_01_preview::Models::SubResource
PeeringLocationPropertiesExchange = Azure::Peering::Mgmt::V2019_08_01_preview::Models::PeeringLocationPropertiesExchange
ExchangeConnection = Azure::Peering::Mgmt::V2019_08_01_preview::Models::ExchangeConnection
DirectConnection = Azure::Peering::Mgmt::V2019_08_01_preview::Models::DirectConnection
PeeringPropertiesDirect = Azure::Peering::Mgmt::V2019_08_01_preview::Models::PeeringPropertiesDirect
PeeringListResult = Azure::Peering::Mgmt::V2019_08_01_preview::Models::PeeringListResult
PeeringPropertiesExchange = Azure::Peering::Mgmt::V2019_08_01_preview::Models::PeeringPropertiesExchange
Operation = Azure::Peering::Mgmt::V2019_08_01_preview::Models::Operation
ContactInfo = Azure::Peering::Mgmt::V2019_08_01_preview::Models::ContactInfo
PeeringLocationListResult = Azure::Peering::Mgmt::V2019_08_01_preview::Models::PeeringLocationListResult
PeerAsnListResult = Azure::Peering::Mgmt::V2019_08_01_preview::Models::PeerAsnListResult
ResourceTags = Azure::Peering::Mgmt::V2019_08_01_preview::Models::ResourceTags
PeeringSku = Azure::Peering::Mgmt::V2019_08_01_preview::Models::PeeringSku
Resource = Azure::Peering::Mgmt::V2019_08_01_preview::Models::Resource
OperationDisplayInfo = Azure::Peering::Mgmt::V2019_08_01_preview::Models::OperationDisplayInfo
PeeringServiceLocationListResult = Azure::Peering::Mgmt::V2019_08_01_preview::Models::PeeringServiceLocationListResult
PeeringServiceProviderListResult = Azure::Peering::Mgmt::V2019_08_01_preview::Models::PeeringServiceProviderListResult
DirectPeeringFacility = Azure::Peering::Mgmt::V2019_08_01_preview::Models::DirectPeeringFacility
ErrorResponse = Azure::Peering::Mgmt::V2019_08_01_preview::Models::ErrorResponse
OperationListResult = Azure::Peering::Mgmt::V2019_08_01_preview::Models::OperationListResult
PeeringServicePrefixListResult = Azure::Peering::Mgmt::V2019_08_01_preview::Models::PeeringServicePrefixListResult
PeeringServiceListResult = Azure::Peering::Mgmt::V2019_08_01_preview::Models::PeeringServiceListResult
Peering = Azure::Peering::Mgmt::V2019_08_01_preview::Models::Peering
PeerAsn = Azure::Peering::Mgmt::V2019_08_01_preview::Models::PeerAsn
PeeringLocation = Azure::Peering::Mgmt::V2019_08_01_preview::Models::PeeringLocation
PeeringServiceLocation = Azure::Peering::Mgmt::V2019_08_01_preview::Models::PeeringServiceLocation
PeeringServicePrefix = Azure::Peering::Mgmt::V2019_08_01_preview::Models::PeeringServicePrefix
PeeringServiceProvider = Azure::Peering::Mgmt::V2019_08_01_preview::Models::PeeringServiceProvider
PeeringService = Azure::Peering::Mgmt::V2019_08_01_preview::Models::PeeringService
Name = Azure::Peering::Mgmt::V2019_08_01_preview::Models::Name
Tier = Azure::Peering::Mgmt::V2019_08_01_preview::Models::Tier
Family = Azure::Peering::Mgmt::V2019_08_01_preview::Models::Family
Size = Azure::Peering::Mgmt::V2019_08_01_preview::Models::Size
Kind = Azure::Peering::Mgmt::V2019_08_01_preview::Models::Kind
SessionAddressProvider = Azure::Peering::Mgmt::V2019_08_01_preview::Models::SessionAddressProvider
ConnectionState = Azure::Peering::Mgmt::V2019_08_01_preview::Models::ConnectionState
SessionStateV4 = Azure::Peering::Mgmt::V2019_08_01_preview::Models::SessionStateV4
SessionStateV6 = Azure::Peering::Mgmt::V2019_08_01_preview::Models::SessionStateV6
DirectPeeringType = Azure::Peering::Mgmt::V2019_08_01_preview::Models::DirectPeeringType
ProvisioningState = Azure::Peering::Mgmt::V2019_08_01_preview::Models::ProvisioningState
ValidationState = Azure::Peering::Mgmt::V2019_08_01_preview::Models::ValidationState
PrefixValidationState = Azure::Peering::Mgmt::V2019_08_01_preview::Models::PrefixValidationState
LearnedType = Azure::Peering::Mgmt::V2019_08_01_preview::Models::LearnedType
end
#
# PeeringManagementClass
#
class PeeringManagementClass
attr_reader :legacy_peerings, :operations, :peer_asns, :peering_locations, :peerings, :peering_service_locations, :peering_service_prefixes, :prefixes, :peering_service_providers, :peering_services, :configurable, :base_url, :options, :model_classes
def initialize(options = {})
if options.is_a?(Hash) && options.length == 0
@options = setup_default_options
else
@options = options
end
reset!(options)
@configurable = self
@base_url = options[:base_url].nil? ? nil:options[:base_url]
@options = options[:options].nil? ? nil:options[:options]
@client_0 = Azure::Peering::Mgmt::V2019_08_01_preview::PeeringClient.new(configurable.credentials, base_url, options)
if(@client_0.respond_to?(:subscription_id))
@client_0.subscription_id = configurable.subscription_id
end
add_telemetry(@client_0)
@legacy_peerings = @client_0.legacy_peerings
@operations = @client_0.operations
@peer_asns = @client_0.peer_asns
@peering_locations = @client_0.peering_locations
@peerings = @client_0.peerings
@peering_service_locations = @client_0.peering_service_locations
@peering_service_prefixes = @client_0.peering_service_prefixes
@prefixes = @client_0.prefixes
@peering_service_providers = @client_0.peering_service_providers
@peering_services = @client_0.peering_services
@model_classes = ModelClasses.new
end
def add_telemetry(client)
profile_information = 'Profiles/Latest/Peering/Mgmt'
client.add_user_agent_information(profile_information)
end
def method_missing(method, *args)
if @client_0.respond_to?method
@client_0.send(method, *args)
else
super
end
end
end
class ModelClasses
def peering_bandwidth_offer
Azure::Peering::Mgmt::V2019_08_01_preview::Models::PeeringBandwidthOffer
end
def check_service_provider_availability_input
Azure::Peering::Mgmt::V2019_08_01_preview::Models::CheckServiceProviderAvailabilityInput
end
def peering_location_properties_direct
Azure::Peering::Mgmt::V2019_08_01_preview::Models::PeeringLocationPropertiesDirect
end
def bgp_session
Azure::Peering::Mgmt::V2019_08_01_preview::Models::BgpSession
end
def exchange_peering_facility
Azure::Peering::Mgmt::V2019_08_01_preview::Models::ExchangePeeringFacility
end
def sub_resource
Azure::Peering::Mgmt::V2019_08_01_preview::Models::SubResource
end
def peering_location_properties_exchange
Azure::Peering::Mgmt::V2019_08_01_preview::Models::PeeringLocationPropertiesExchange
end
def exchange_connection
Azure::Peering::Mgmt::V2019_08_01_preview::Models::ExchangeConnection
end
def direct_connection
Azure::Peering::Mgmt::V2019_08_01_preview::Models::DirectConnection
end
def peering_properties_direct
Azure::Peering::Mgmt::V2019_08_01_preview::Models::PeeringPropertiesDirect
end
def peering_list_result
Azure::Peering::Mgmt::V2019_08_01_preview::Models::PeeringListResult
end
def peering_properties_exchange
Azure::Peering::Mgmt::V2019_08_01_preview::Models::PeeringPropertiesExchange
end
def operation
Azure::Peering::Mgmt::V2019_08_01_preview::Models::Operation
end
def contact_info
Azure::Peering::Mgmt::V2019_08_01_preview::Models::ContactInfo
end
def peering_location_list_result
Azure::Peering::Mgmt::V2019_08_01_preview::Models::PeeringLocationListResult
end
def peer_asn_list_result
Azure::Peering::Mgmt::V2019_08_01_preview::Models::PeerAsnListResult
end
def resource_tags
Azure::Peering::Mgmt::V2019_08_01_preview::Models::ResourceTags
end
def peering_sku
Azure::Peering::Mgmt::V2019_08_01_preview::Models::PeeringSku
end
def resource
Azure::Peering::Mgmt::V2019_08_01_preview::Models::Resource
end
def operation_display_info
Azure::Peering::Mgmt::V2019_08_01_preview::Models::OperationDisplayInfo
end
def peering_service_location_list_result
Azure::Peering::Mgmt::V2019_08_01_preview::Models::PeeringServiceLocationListResult
end
def peering_service_provider_list_result
Azure::Peering::Mgmt::V2019_08_01_preview::Models::PeeringServiceProviderListResult
end
def direct_peering_facility
Azure::Peering::Mgmt::V2019_08_01_preview::Models::DirectPeeringFacility
end
def error_response
Azure::Peering::Mgmt::V2019_08_01_preview::Models::ErrorResponse
end
def operation_list_result
Azure::Peering::Mgmt::V2019_08_01_preview::Models::OperationListResult
end
def peering_service_prefix_list_result
Azure::Peering::Mgmt::V2019_08_01_preview::Models::PeeringServicePrefixListResult
end
def peering_service_list_result
Azure::Peering::Mgmt::V2019_08_01_preview::Models::PeeringServiceListResult
end
def peering
Azure::Peering::Mgmt::V2019_08_01_preview::Models::Peering
end
def peer_asn
Azure::Peering::Mgmt::V2019_08_01_preview::Models::PeerAsn
end
def peering_location
Azure::Peering::Mgmt::V2019_08_01_preview::Models::PeeringLocation
end
def peering_service_location
Azure::Peering::Mgmt::V2019_08_01_preview::Models::PeeringServiceLocation
end
def peering_service_prefix
Azure::Peering::Mgmt::V2019_08_01_preview::Models::PeeringServicePrefix
end
def peering_service_provider
Azure::Peering::Mgmt::V2019_08_01_preview::Models::PeeringServiceProvider
end
def peering_service
Azure::Peering::Mgmt::V2019_08_01_preview::Models::PeeringService
end
def name
Azure::Peering::Mgmt::V2019_08_01_preview::Models::Name
end
def tier
Azure::Peering::Mgmt::V2019_08_01_preview::Models::Tier
end
def family
Azure::Peering::Mgmt::V2019_08_01_preview::Models::Family
end
def size
Azure::Peering::Mgmt::V2019_08_01_preview::Models::Size
end
def kind
Azure::Peering::Mgmt::V2019_08_01_preview::Models::Kind
end
def session_address_provider
Azure::Peering::Mgmt::V2019_08_01_preview::Models::SessionAddressProvider
end
def connection_state
Azure::Peering::Mgmt::V2019_08_01_preview::Models::ConnectionState
end
def session_state_v4
Azure::Peering::Mgmt::V2019_08_01_preview::Models::SessionStateV4
end
def session_state_v6
Azure::Peering::Mgmt::V2019_08_01_preview::Models::SessionStateV6
end
def direct_peering_type
Azure::Peering::Mgmt::V2019_08_01_preview::Models::DirectPeeringType
end
def provisioning_state
Azure::Peering::Mgmt::V2019_08_01_preview::Models::ProvisioningState
end
def validation_state
Azure::Peering::Mgmt::V2019_08_01_preview::Models::ValidationState
end
def prefix_validation_state
Azure::Peering::Mgmt::V2019_08_01_preview::Models::PrefixValidationState
end
def learned_type
Azure::Peering::Mgmt::V2019_08_01_preview::Models::LearnedType
end
end
end
end
| 48.863469 | 255 | 0.738786 |
ff9e9c9068ab4f4bf30987098e195c6f12a5a302 | 499 | # Be sure to restart your server when you modify this file.
# Your secret key for verifying the integrity of signed cookies.
# If you change this key, all old signed cookies will become invalid!
# Make sure the secret is at least 30 characters and all random,
# no regular words or you'll be exposed to dictionary attacks.
Dummy::Application.config.secret_key_base = '86cb455c4df276e3134772b90d6c12c80be606777b4b5d701deb934afa8ab74899a91691d870b3c42b0b2905fabdab15ceed7fa1a7c49c588264079e43d1e350'
| 62.375 | 174 | 0.833667 |
333b35a2d9c51a4f4561f1f664b5de60a6ae9236 | 445 | # frozen_string_literal: true
class User
attr_reader :attributes
def initialize
@attributes = {}
end
def method_missing(name, *args, &block)
if name.end_with?("=")
@attributes[name.to_s[0..-2].to_sym] = args.first
else
@attributes[name]
end
end
def respond_to?(name)
true
end
end
user = User.new
p user.respond_to?(:email)
p user.respond_to?(:email=)
p user.respond_to?(:upcase)
p user.upcase
| 15.344828 | 55 | 0.660674 |
7987fe4614917832b363532ba6bee583259b52b8 | 49 | module ImBoredCliProject
VERSION = "0.1.0"
end
| 12.25 | 24 | 0.734694 |
1dd828747a600716579e8775270cfd52bd731b8e | 1,616 | # Copyright (c) Universidade Federal Fluminense (UFF).
# This file is part of SAPOS. Please, consult the license terms in the LICENSE file.
# Read about factories at https://github.com/thoughtbot/factory_bot
FactoryBot.define do
factory :role do
sequence :name do |name|
"Role_#{name}"
end
sequence :description do |name|
"RoleDescription_#{name}"
end
factory :role_administrador do
id { Role::ROLE_ADMINISTRADOR }
name { "Administrador" }
description { "Descricao Administrador" }
initialize_with {Role.where(id: id).first_or_create(name: name, description: description)}
end
factory :role_coordenacao do
id { Role::ROLE_COORDENACAO }
name { "Coordenacao" }
description { "Descricao Coordenacao" }
initialize_with {Role.where(id: id).first_or_create(name: name, description: description)}
end
factory :role_secretaria do
id { Role::ROLE_SECRETARIA }
name { "Secretaria" }
description { "Descricao Secretaria" }
initialize_with {Role.where(id: id).first_or_create(name: name, description: description)}
end
factory :role_professor do
id { Role::ROLE_PROFESSOR }
name { "Professor" }
description { "Descricao Professor" }
initialize_with {Role.where(id: id).first_or_create(name: name, description: description)}
end
factory :role_aluno do
id { Role::ROLE_ALUNO }
name { "Aluno" }
description { "Descricao Aluno" }
initialize_with {Role.where(id: id).first_or_create(name: name, description: description)}
end
end
end
| 31.686275 | 96 | 0.67698 |
b99fb3a99a77f9536be02360ba9238d90be98417 | 1,558 | class OfficerAssignment < ApplicationRecord
belongs_to :account
belongs_to :officer
belongs_to :user
attr_accessor :start_date_str, :end_date_str
scope :active, ->{ where(arel_table[:end_date].eq(nil).or(arel_table[:end_date].gteq(Time.now))) }
scope :inactive, ->{ where(arel_table[:end_date].lt(Time.now))}
after_save ->{ user.update_role_from_roles(account) }
START_REASONS_HASH = {
"Election by membership" => 'election-by-membership',
"Appointment by board" => 'appointment-by-board'
}
END_REASONS_HASH = {
"End of term" => 'end-of-term',
"Resignation" => 'resignation',
"Termination" => 'termination'
}
validate :dates_and_times_are_valid
def set_accessors
self.start_date_str = self.start_date.strftime("%m/%d/%Y") if self.start_date
self.end_date_str = self.end_date.strftime("%m/%d/%Y") if self.end_date
end
def dates_and_times_are_valid
valid_start_date = validate_date(:start_date)
valid_end_date = validate_date(:end_date)
if valid_start_date && valid_end_date
return if start_date.blank? && end_date.blank?
if start_date.present? && end_date.present?
if start_date > end_date
errors.add(:base, "the start date should be before the end date")
end
else
errors.add(:base, "both start and end dates (or neither) must be present")
end
end
end
def active?
start_date &&
end_date &&
Time.zone.now >= start_date &&
Time.zone.now <= end_date
end
end | 28.851852 | 103 | 0.665597 |
624d01a196bbd1bb0d0d95a65a19899eb3872fbe | 1,325 | #
# Be sure to run `pod lib lint RubyCore.podspec' to ensure this is a
# valid spec before submitting.
#
# Any lines starting with a # are optional, but their use is encouraged
# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = 'RubyCore'
s.version = '0.1.0'
s.summary = 'Ruby Interface For Objective-C'
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = <<-DESC
ruby interface for Objective-C
DESC
s.homepage = 'https://github.com/stephenwzl/RubyCore'
# s.screenshots = 'www.example.com/screenshots_1', 'www.example.com/screenshots_2'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'summerbabybiu' => '[email protected]' }
s.source = { :git => 'https://github.com/stephenwzl/RubyCore.git', :tag => s.version.to_s }
s.ios.deployment_target = '8.0'
s.source_files = 'RubyCore/Classes/**/*'
s.dependency 'mruby', '~> 1.3'
end
| 37.857143 | 103 | 0.639245 |
f747525eed8bd99360fe01d35cc7a1609c7d304c | 266 | # frozen_string_literal: true
require "#{File.dirname(__FILE__)}/../../test_helper"
module BlogEngine
class ImageTest < ActiveSupport::TestCase
fixtures :images
# Replace this with your real tests.
def test_truth
assert true
end
end
end
| 17.733333 | 53 | 0.703008 |
ab393c304cf1a33fca4c01483588526073ab4c73 | 108 | require 'rubygems'
require 'redis'
$TESTING = true
$:.unshift File.join(File.dirname(__FILE__), '..', 'lib') | 27 | 57 | 0.694444 |
5da9db83a8e3a1fcaeffba544bae52c5e15a528b | 5,020 | #
# Be sure to run `pod spec lint ownspec.podspec' to ensure this is a
# valid spec and to remove all comments including this before submitting the spec.
#
# To learn more about Podspec attributes see http://docs.cocoapods.org/specification.html
# To see working Podspecs in the CocoaPods repo see https://github.com/CocoaPods/Specs/
#
Pod::Spec.new do |s|
# ――― Spec Metadata ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# These will help people to find your library, and whilst it
# can feel like a chore to fill in it's definitely to your advantage. The
# summary should be tweet-length, and the description more in depth.
#
s.name = "ownspec"
s.version = "0.0.1"
s.summary = "A short description of ownspec."
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = <<-DESC
DESC
s.homepage = "http://EXAMPLE/ownspec"
# s.screenshots = "www.example.com/screenshots_1.gif", "www.example.com/screenshots_2.gif"
# ――― Spec License ――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Licensing your code is important. See http://choosealicense.com for more info.
# CocoaPods will detect a license file if there is a named LICENSE*
# Popular ones are 'MIT', 'BSD' and 'Apache License, Version 2.0'.
#
s.license = "MIT (example)"
# s.license = { :type => "MIT", :file => "FILE_LICENSE" }
# ――― Author Metadata ――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Specify the authors of the library, with email addresses. Email addresses
# of the authors are extracted from the SCM log. E.g. $ git log. CocoaPods also
# accepts just a name if you'd rather not provide an email address.
#
# Specify a social_media_url where others can refer to, for example a twitter
# profile URL.
#
s.author = { "xuyang" => "[email protected]" }
# Or just: s.author = "xuyang"
# s.authors = { "xuyang" => "[email protected]" }
# s.social_media_url = "http://twitter.com/xuyang"
# ――― Platform Specifics ――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# If this Pod runs only on iOS or OS X, then specify the platform and
# the deployment target. You can optionally include the target after the platform.
#
# s.platform = :ios
# s.platform = :ios, "5.0"
# When using multiple platforms
# s.ios.deployment_target = "5.0"
# s.osx.deployment_target = "10.7"
# s.watchos.deployment_target = "2.0"
# s.tvos.deployment_target = "9.0"
# ――― Source Location ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Specify the location from where the source should be retrieved.
# Supports git, hg, bzr, svn and HTTP.
#
s.source = { :git => "http://EXAMPLE/ownspec.git", :tag => "#{s.version}" }
# ――― Source Code ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# CocoaPods is smart about how it includes source code. For source files
# giving a folder will include any swift, h, m, mm, c & cpp files.
# For header files it will include any header in the folder.
# Not including the public_header_files will make all headers public.
#
s.source_files = "Classes", "Classes/**/*.{h,m}"
s.exclude_files = "Classes/Exclude"
# s.public_header_files = "Classes/**/*.h"
# ――― Resources ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# A list of resources included with the Pod. These are copied into the
# target bundle with a build phase script. Anything else will be cleaned.
# You can preserve files from being cleaned, please don't preserve
# non-essential files like tests, examples and documentation.
#
# s.resource = "icon.png"
# s.resources = "Resources/*.png"
# s.preserve_paths = "FilesToSave", "MoreFilesToSave"
# ――― Project Linking ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Link your library with frameworks, or libraries. Libraries do not include
# the lib prefix of their name.
#
# s.framework = "SomeFramework"
# s.frameworks = "SomeFramework", "AnotherFramework"
# s.library = "iconv"
# s.libraries = "iconv", "xml2"
# ――― Project Settings ――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# If your library depends on compiler flags you can set them in the xcconfig hash
# where they will only apply to your library. If you depend on other Podspecs
# you can include multiple dependencies to ensure it works.
# s.requires_arc = true
# s.xcconfig = { "HEADER_SEARCH_PATHS" => "$(SDKROOT)/usr/include/libxml2" }
# s.dependency "JSONKit", "~> 1.4"
end
| 36.376812 | 93 | 0.587849 |
1ad6879df327d0da182955f5a1e343ce1647bdb9 | 1,328 | # frozen_string_literal: true
require 'jekyll'
require 'jekyll-fa-icons/version'
# TODO :
# * deal with other tags like color, etc.
# * make DEFAULT_ICONS configurable from _config.yaml / Jekyll.configuration({})
class FontAwesomeIcons < Liquid::Tag
DEFAULT_ICONS = { 'gh' => 'fab fa-brands fa-github',
'k8s' => 'fas fa-solid fa-dharmachakra',
'rb' => 'fas fa-solid fa-gem',
'sc' => 'fa-duotone fa-maximize',
'wiki' => 'fab fa-brands fa-wikipedia-w',
'linux' => 'fab fa-brands fa-linux',
'rhel' => 'fa-brands fa-redhat',
'dell' => 'fas fa-laptop-code' }
def initialize(tagName, input, tokens)
super
@input = input
end
def render(context)
icon = ""
# Github is the most used icon for me
if @input.nil? || @input.empty?
icon = DEFAULT_ICONS['gh']
else
# Font Awesome codes are always 2 words (e.g <i class="fab fa-500px"></i>)
# Less means that's one of the shortcut
if @input.split.length < 2
icon = DEFAULT_ICONS["#{@input.strip}"] #be sure to remove trailing spaces
else
icon = @input
end
end
%(<i class="#{icon}"></i>)
end
Liquid::Template.register_tag('fai', self)
end
| 31.619048 | 82 | 0.558735 |
62d250f50508fb87bf702e5e273575f9bd4cc0c0 | 45 | module TestRedditkit
VERSION = "0.0.1"
end
| 11.25 | 20 | 0.711111 |
03f675b690c754e58f9f344f2e4dd98be42d4dbe | 16,468 | # encoding: utf-8
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
require 'azure_mgmt_machine_learning_services'
module Azure::Profiles::Latest
module MachineLearningServices
module Mgmt
Operations = Azure::MachineLearningServices::Mgmt::V2019_05_01::Operations
Workspaces = Azure::MachineLearningServices::Mgmt::V2019_05_01::Workspaces
Usages = Azure::MachineLearningServices::Mgmt::V2019_05_01::Usages
VirtualMachineSizes = Azure::MachineLearningServices::Mgmt::V2019_05_01::VirtualMachineSizes
MachineLearningCompute = Azure::MachineLearningServices::Mgmt::V2019_05_01::MachineLearningCompute
module Models
AKSProperties = Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::AKSProperties
OperationDisplay = Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::OperationDisplay
DataLakeAnalyticsProperties = Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::DataLakeAnalyticsProperties
OperationListResult = Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::OperationListResult
PaginatedComputeResourcesList = Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::PaginatedComputeResourcesList
WorkspaceUpdateParameters = Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::WorkspaceUpdateParameters
SystemService = Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::SystemService
Usage = Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::Usage
SslConfiguration = Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::SslConfiguration
VirtualMachineSize = Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::VirtualMachineSize
AksNetworkingConfiguration = Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::AksNetworkingConfiguration
WorkspaceListResult = Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::WorkspaceListResult
Operation = Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::Operation
Resource = Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::Resource
Password = Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::Password
ScaleSettings = Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::ScaleSettings
ListWorkspaceKeysResult = Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::ListWorkspaceKeysResult
UserAccountCredentials = Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::UserAccountCredentials
ErrorResponse = Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::ErrorResponse
NodeStateCounts = Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::NodeStateCounts
Compute = Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::Compute
AmlComputeProperties = Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::AmlComputeProperties
UsageName = Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::UsageName
ComputeSecrets = Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::ComputeSecrets
VirtualMachineSizeListResult = Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::VirtualMachineSizeListResult
VirtualMachineSshCredentials = Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::VirtualMachineSshCredentials
ResourceId = Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::ResourceId
VirtualMachineProperties = Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::VirtualMachineProperties
ErrorDetail = Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::ErrorDetail
AmlComputeNodeInformation = Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::AmlComputeNodeInformation
ServicePrincipalCredentials = Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::ServicePrincipalCredentials
ListUsagesResult = Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::ListUsagesResult
HDInsightProperties = Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::HDInsightProperties
RegistryListCredentialsResult = Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::RegistryListCredentialsResult
ComputeNodesInformation = Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::ComputeNodesInformation
Identity = Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::Identity
ClusterUpdateParameters = Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::ClusterUpdateParameters
MachineLearningServiceError = Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::MachineLearningServiceError
DatabricksProperties = Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::DatabricksProperties
Workspace = Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::Workspace
ComputeResource = Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::ComputeResource
AKS = Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::AKS
AmlCompute = Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::AmlCompute
VirtualMachine = Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::VirtualMachine
HDInsight = Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::HDInsight
DataFactory = Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::DataFactory
Databricks = Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::Databricks
DataLakeAnalytics = Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::DataLakeAnalytics
AmlComputeNodesInformation = Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::AmlComputeNodesInformation
AksComputeSecrets = Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::AksComputeSecrets
VirtualMachineSecrets = Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::VirtualMachineSecrets
DatabricksComputeSecrets = Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::DatabricksComputeSecrets
ProvisioningState = Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::ProvisioningState
UsageUnit = Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::UsageUnit
ResourceIdentityType = Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::ResourceIdentityType
VmPriority = Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::VmPriority
AllocationState = Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::AllocationState
ComputeType = Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::ComputeType
UnderlyingResourceAction = Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::UnderlyingResourceAction
end
class MachineLearningServicesManagementClass
attr_reader :operations, :workspaces, :usages, :virtual_machine_sizes, :machine_learning_compute, :configurable, :base_url, :options, :model_classes
def initialize(configurable, base_url=nil, options=nil)
@configurable, @base_url, @options = configurable, base_url, options
@client_0 = Azure::MachineLearningServices::Mgmt::V2019_05_01::MachineLearningServicesClient.new(configurable.credentials, base_url, options)
if(@client_0.respond_to?(:subscription_id))
@client_0.subscription_id = configurable.subscription_id
end
add_telemetry(@client_0)
@operations = @client_0.operations
@workspaces = @client_0.workspaces
@usages = @client_0.usages
@virtual_machine_sizes = @client_0.virtual_machine_sizes
@machine_learning_compute = @client_0.machine_learning_compute
@model_classes = ModelClasses.new
end
def add_telemetry(client)
profile_information = "Profiles/azure_sdk/#{Azure::VERSION}/Latest/MachineLearningServices/Mgmt"
client.add_user_agent_information(profile_information)
end
def method_missing(method, *args)
if @client_0.respond_to?method
@client_0.send(method, *args)
else
super
end
end
class ModelClasses
def aksproperties
Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::AKSProperties
end
def operation_display
Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::OperationDisplay
end
def data_lake_analytics_properties
Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::DataLakeAnalyticsProperties
end
def operation_list_result
Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::OperationListResult
end
def paginated_compute_resources_list
Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::PaginatedComputeResourcesList
end
def workspace_update_parameters
Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::WorkspaceUpdateParameters
end
def system_service
Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::SystemService
end
def usage
Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::Usage
end
def ssl_configuration
Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::SslConfiguration
end
def virtual_machine_size
Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::VirtualMachineSize
end
def aks_networking_configuration
Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::AksNetworkingConfiguration
end
def workspace_list_result
Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::WorkspaceListResult
end
def operation
Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::Operation
end
def resource
Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::Resource
end
def password
Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::Password
end
def scale_settings
Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::ScaleSettings
end
def list_workspace_keys_result
Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::ListWorkspaceKeysResult
end
def user_account_credentials
Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::UserAccountCredentials
end
def error_response
Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::ErrorResponse
end
def node_state_counts
Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::NodeStateCounts
end
def compute
Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::Compute
end
def aml_compute_properties
Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::AmlComputeProperties
end
def usage_name
Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::UsageName
end
def compute_secrets
Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::ComputeSecrets
end
def virtual_machine_size_list_result
Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::VirtualMachineSizeListResult
end
def virtual_machine_ssh_credentials
Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::VirtualMachineSshCredentials
end
def resource_id
Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::ResourceId
end
def virtual_machine_properties
Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::VirtualMachineProperties
end
def error_detail
Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::ErrorDetail
end
def aml_compute_node_information
Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::AmlComputeNodeInformation
end
def service_principal_credentials
Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::ServicePrincipalCredentials
end
def list_usages_result
Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::ListUsagesResult
end
def hdinsight_properties
Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::HDInsightProperties
end
def registry_list_credentials_result
Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::RegistryListCredentialsResult
end
def compute_nodes_information
Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::ComputeNodesInformation
end
def identity
Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::Identity
end
def cluster_update_parameters
Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::ClusterUpdateParameters
end
def machine_learning_service_error
Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::MachineLearningServiceError
end
def databricks_properties
Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::DatabricksProperties
end
def workspace
Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::Workspace
end
def compute_resource
Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::ComputeResource
end
def aks
Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::AKS
end
def aml_compute
Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::AmlCompute
end
def virtual_machine
Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::VirtualMachine
end
def hdinsight
Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::HDInsight
end
def data_factory
Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::DataFactory
end
def databricks
Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::Databricks
end
def data_lake_analytics
Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::DataLakeAnalytics
end
def aml_compute_nodes_information
Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::AmlComputeNodesInformation
end
def aks_compute_secrets
Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::AksComputeSecrets
end
def virtual_machine_secrets
Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::VirtualMachineSecrets
end
def databricks_compute_secrets
Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::DatabricksComputeSecrets
end
def provisioning_state
Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::ProvisioningState
end
def usage_unit
Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::UsageUnit
end
def resource_identity_type
Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::ResourceIdentityType
end
def vm_priority
Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::VmPriority
end
def allocation_state
Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::AllocationState
end
def compute_type
Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::ComputeType
end
def underlying_resource_action
Azure::MachineLearningServices::Mgmt::V2019_05_01::Models::UnderlyingResourceAction
end
end
end
end
end
end
| 56.013605 | 156 | 0.716723 |
2177d9ae74d2fac6728899a49f39e360065a9b76 | 430 | cask 'robofont' do
version '3.1'
sha256 '82e03426bcae68c0712b6733f177c1f5b4376508eee67836be1729dbbc7ca77e'
# static.typemytype.com/robofont was verified as official when first introduced to the cask
url 'http://static.typemytype.com/robofont/RoboFont.dmg'
appcast 'https://doc.robofont.com/appcast.xml'
name 'RoboFont'
homepage 'https://robofont.com/'
depends_on macos: '>= :mavericks'
app 'RoboFont.app'
end
| 28.666667 | 93 | 0.760465 |
4a6fc84828b35beffa52b2670aebf41115a8c492 | 2,062 | module Prawn
module AutoLayout
class FrameLayouter
def initialize(document, &block)
@document = document
@contents = block
end
def layout(bounds)
within_bounding_box(bounds) do
do_layout(bounds)
end
end
private
def do_layout(bounds)
@contents.call
end
def within_bounding_box(bounds, &block)
@document.bounding_box(bounds.top_left, width: bounds.width, height: bounds.height, &block)
end
end
class ContainerLayouter < FrameLayouter
def initialize(document, &block)
super(document, &block)
@layouters = []
end
def columns(&block)
@layouters << ColumnsLayouter.new(@document, &block)
end
def rows(&block)
@layouters << RowsLayouter.new(@document, &block)
end
def frame(&block)
@layouters << FrameLayouter.new(@document, &block)
end
private
def do_layout(bounds)
super(bounds)
@layouters.each { |l| l.layout(bounds) }
end
end
class ColumnsLayouter < ContainerLayouter
end
class RowsLayouter < ContainerLayouter
end
module Extensions
def columns(&block)
@current_layouter.columns(&block)
end
def rows(&block)
@current_layouter.rows(&block)
end
def frame(&block)
@current_layouter.frame(&block)
end
private
def layout(bounds)
@current_layouter.layout(bounds)
end
def init_layout(&block)
@current_layouter = ContainerLayouter.new(self, &block)
end
end
end
class Document
def self.generate_with_autolayout(filename, options={}, &block)
generate(filename, options) do
if block
class << self
include AutoLayout::Extensions
end
init_layout do
block.arity < 1 ? instance_eval(&block) : block[self]
end
layout(bounds)
end
end
end
private
end
end
| 19.271028 | 99 | 0.587779 |
877641a5509dddd63eceae8e7d4688cda2baeaa0 | 9,288 | require 'addressable'
require 'active_support/core_ext/object/try'
require 'active_support/core_ext/module/attribute_accessors'
require 'cdo/aws/s3'
require 'honeybadger'
#
# BucketHelper
#
class BucketHelper
cattr_accessor :s3
def initialize(bucket, base_dir)
@bucket = bucket
@base_dir = base_dir
self.s3 ||= AWS::S3.create_client
end
def allowed_file_type?(extension)
allowed_file_types.include? extension.downcase
end
def allowed_file_types
[]
end
# Ignore client-specified mime type. Infer it from file extension when serving
# assets.
def category_from_file_type(extension)
mime_type = Sinatra::Base.mime_type(extension)
if mime_type == 'application/pdf'
'pdf'
elsif ['.doc', '.docx'].include? extension
'doc'
else
mime_type.try(:split, '/').try(:first)
end
end
# How long an object retrieved from this bucket should be cached
def cache_duration_seconds
0
end
def app_size(encrypted_channel_id)
owner_id, channel_id = storage_decrypt_channel_id(encrypted_channel_id)
prefix = s3_path owner_id, channel_id
s3.list_objects(bucket: @bucket, prefix: prefix).contents.map(&:size).reduce(:+).to_i
end
#
# Retrieve the total asset size of an app and the size of an individual object
# within that app with a single S3 request.
#
# @param [String] encrypted_channel_id - Token identifying app channel to read.
# @param [String] target_object - S3 key relative to channel of the single
# object whose size we should return.
# @return [[Int, Int]] size of target_object and size of entire app
def object_and_app_size(encrypted_channel_id, target_object)
owner_id, channel_id = storage_decrypt_channel_id(encrypted_channel_id)
app_prefix = s3_path owner_id, channel_id
target_object_prefix = s3_path owner_id, channel_id, target_object
objects = s3.list_objects(bucket: @bucket, prefix: app_prefix).contents
target_object = objects.find {|x| x.key == target_object_prefix}
app_size = objects.map(&:size).reduce(:+).to_i
object_size = target_object.nil? ? nil : target_object.size.to_i
[object_size, app_size]
end
def list(encrypted_channel_id)
owner_id, channel_id = storage_decrypt_channel_id(encrypted_channel_id)
prefix = s3_path owner_id, channel_id
s3.list_objects(bucket: @bucket, prefix: prefix).contents.map do |fileinfo|
filename = %r{#{prefix}(.+)$}.match(fileinfo.key)[1]
category = category_from_file_type(File.extname(filename))
{filename: filename, category: category, size: fileinfo.size}
end
end
def get(encrypted_channel_id, filename, if_modified_since = nil, version = nil)
begin
owner_id, channel_id = storage_decrypt_channel_id(encrypted_channel_id)
rescue ArgumentError, OpenSSL::Cipher::CipherError
return {status: 'NOT_FOUND'}
end
key = s3_path owner_id, channel_id, filename
begin
s3_object = s3.get_object(bucket: @bucket, key: key, if_modified_since: if_modified_since, version_id: version)
{status: 'FOUND', body: s3_object.body, version_id: s3_object.version_id, last_modified: s3_object.last_modified, metadata: s3_object.metadata}
rescue Aws::S3::Errors::NotModified
{status: 'NOT_MODIFIED'}
rescue Aws::S3::Errors::NoSuchKey
{status: 'NOT_FOUND'}
rescue Aws::S3::Errors::NoSuchVersion
{status: 'NOT_FOUND'}
rescue Aws::S3::Errors::InvalidArgument
# Can happen when passed an invalid S3 version id
{status: 'NOT_FOUND'}
end
end
def get_abuse_score(encrypted_channel_id, filename, version = nil)
response = get(encrypted_channel_id, filename, nil, version)
if response.nil?
0
else
metadata = response[:metadata]
[metadata['abuse_score'].to_i, metadata['abuse-score'].to_i].max
end
end
def copy_files(src_channel, dest_channel, options={})
src_owner_id, src_channel_id = storage_decrypt_channel_id(src_channel)
dest_owner_id, dest_channel_id = storage_decrypt_channel_id(dest_channel)
src_prefix = s3_path src_owner_id, src_channel_id
result = s3.list_objects(bucket: @bucket, prefix: src_prefix).contents.map do |fileinfo|
filename = %r{#{src_prefix}(.+)$}.match(fileinfo.key)[1]
next unless (!options[:filenames] && (!options[:exclude_filenames] || !options[:exclude_filenames].include?(filename))) || options[:filenames].try(:include?, filename)
mime_type = Sinatra::Base.mime_type(File.extname(filename))
category = mime_type.split('/').first # e.g. 'image' or 'audio'
src = "#{@bucket}/#{src_prefix}#{filename}"
dest = s3_path dest_owner_id, dest_channel_id, filename
# Temporary: Add additional context to exceptions reported here, to help
# diagnose a recurring issue where we pass a bad copy_source to the S3
# API on remix.
# https://app.honeybadger.io/projects/3240/faults/35329035/8aba7532-c087-11e7-8280-13b5745130ae
Honeybadger.context(
{
copy_source: URI.encode(src),
copy_dest_bucket: @bucket,
copy_dest_key: dest
}
)
response = s3.copy_object(bucket: @bucket, key: dest, copy_source: URI.encode(src), metadata_directive: 'REPLACE')
{filename: filename, category: category, size: fileinfo.size, versionId: response.version_id}
end
result.compact
end
def restore_file_version(encrypted_channel_id, filename, version)
owner_id, channel_id = storage_decrypt_channel_id(encrypted_channel_id)
key = s3_path owner_id, channel_id, filename
s3.copy_object(bucket: @bucket, copy_source: URI.encode("#{@bucket}/#{key}?versionId=#{version}"), key: key, metadata_directive: 'REPLACE')
end
def replace_abuse_score(encrypted_channel_id, filename, abuse_score)
owner_id, channel_id = storage_decrypt_channel_id(encrypted_channel_id)
key = s3_path owner_id, channel_id, filename
s3.copy_object(bucket: @bucket, copy_source: URI.encode("#{@bucket}/#{key}"), key: key, metadata: {abuse_score: abuse_score.to_s}, metadata_directive: 'REPLACE')
end
def create_or_replace(encrypted_channel_id, filename, body, version = nil, abuse_score = 0)
owner_id, channel_id = storage_decrypt_channel_id(encrypted_channel_id)
key = s3_path owner_id, channel_id, filename
response = s3.put_object(bucket: @bucket, key: key, body: body, metadata: {abuse_score: abuse_score.to_s})
# Delete the old version, if doing an in-place replace
s3.delete_object(bucket: @bucket, key: key, version_id: version) if version
response
end
#
# Copy an object within a channel, creating a new object in the channel.
#
# @param [String] encrypted_channel_id - App-identifying token
# @param [String] filename - Destination name for new object
# @param [String] source_filename - Name of object to be copied
# @param [String] version - Version of destination object to replace
# @return [Hash] S3 response from copy operation
def copy(encrypted_channel_id, filename, source_filename, version = nil)
owner_id, channel_id = storage_decrypt_channel_id(encrypted_channel_id)
key = s3_path owner_id, channel_id, filename
copy_source = @bucket + '/' + s3_path(owner_id, channel_id, source_filename)
response = s3.copy_object(bucket: @bucket, key: key, copy_source: copy_source)
# TODO: (bbuchanan) Handle abuse_score metadata for animations.
# When copying an object, should also copy its abuse_score metadata.
# https://www.pivotaltracker.com/story/show/117949241
# Delete the old version, if doing an in-place replace
s3.delete_object(bucket: @bucket, key: key, version_id: version) if version
response
end
def delete(encrypted_channel_id, filename)
owner_id, channel_id = storage_decrypt_channel_id(encrypted_channel_id)
key = s3_path owner_id, channel_id, filename
s3.delete_object(bucket: @bucket, key: key)
end
def delete_multiple(encrypted_channel_id, filenames)
owner_id, channel_id = storage_decrypt_channel_id(encrypted_channel_id)
objects = filenames.map {|filename| {key: s3_path(owner_id, channel_id, filename)}}
s3.delete_objects(bucket: @bucket, delete: {objects: objects, quiet: true})
end
def list_versions(encrypted_channel_id, filename)
owner_id, channel_id = storage_decrypt_channel_id(encrypted_channel_id)
key = s3_path owner_id, channel_id, filename
s3.list_object_versions(bucket: @bucket, prefix: key).versions.map do |version|
{
versionId: version.version_id,
lastModified: version.last_modified,
isLatest: version.is_latest
}
end
end
# Copies the given version of the file to make it the current revision.
# (All intermediate versions are preserved.)
def restore_previous_version(encrypted_channel_id, filename, version_id)
owner_id, channel_id = storage_decrypt_channel_id(encrypted_channel_id)
key = s3_path owner_id, channel_id, filename
s3.copy_object(bucket: @bucket, key: key, copy_source: "#{@bucket}/#{key}?versionId=#{version_id}")
end
protected
def s3_path(owner_id, channel_id, filename = nil)
"#{@base_dir}/#{owner_id}/#{channel_id}/#{Addressable::URI.unencode(filename)}"
end
end
| 38.222222 | 173 | 0.725345 |
f8e3f35aa45dbf598f4ff9b8c33823de34289a67 | 206 | require 'uri'
require 'date'
module Postmates
module Utils
def urlify(href)
URI(href) if href
end
def timeify(timestamp)
DateTime.iso8601 timestamp if timestamp
end
end
end | 14.714286 | 45 | 0.674757 |
ffaab0701c4b88f0e2119970957e53038670ee71 | 168 | class AddExpenseItemMaxPerRegistrant < ActiveRecord::Migration[4.2]
def change
add_column :expense_items, :maximum_per_registrant, :integer, default: 0
end
end
| 28 | 76 | 0.791667 |
875aa386feb33a18bbb4ac280f51c34fa4cc8940 | 419 | # frozen_string_literal: true
module Shrink
module Wrap
module Transformer
class Base
ATTRIBUTES = %i[
options
].freeze
attr_accessor(*ATTRIBUTES)
def initialize(opts = {})
self.options = opts
end
def transform(_input)
raise NotImplementedError, 'must define #transform in a subclass'
end
end
end
end
end
| 17.458333 | 75 | 0.577566 |
1853564cfca8bb729cdfaa96c5d94c51b22e01be | 5,776 | module Erp::Deliveries
class Delivery < ApplicationRecord
belongs_to :creator, class_name: "Erp::User"
belongs_to :employee, class_name: "Erp::User"
if Erp::Core.available?("orders")
after_save :order_update_cache_delivery_status
belongs_to :order, class_name: "Erp::Orders::Order"
# update order cache payment status
def order_update_cache_delivery_status
if order.present?
order.update_cache_delivery_status
end
end
end
if Erp::Core.available?("contacts")
belongs_to :contact, class_name: "Erp::Contacts::Contact"
belongs_to :supplier, class_name: "Erp::Contacts::Contact"
def contact_name
contact.present? ? contact.contact_name : ''
end
def supplier_name
supplier.present? ? supplier.contact_name : ''
end
end
if Erp::Core.available?("warehouses")
belongs_to :warehouse, class_name: "Erp::Warehouses::Warehouse"
def warehouse_name
warehouse.present? ? warehouse.warehouse_name : ''
end
end
has_many :delivery_details, inverse_of: :delivery, dependent: :destroy
accepts_nested_attributes_for :delivery_details, :reject_if => lambda { |a| a[:order_detail_id].blank? || a[:quantity].blank? || a[:quantity].to_i <= 0 }
# class const
TYPE_IMPORT = 'import'
TYPE_EXPORT = 'export'
DELIVERY_STATUS_DELIVERED = 'delivered'
DELIVERY_STATUS_DELETED = 'deleted'
# Filters
def self.filter(query, params)
params = params.to_unsafe_hash
and_conds = []
show_archived = false
#filters
if params["filters"].present?
params["filters"].each do |ft|
or_conds = []
ft[1].each do |cond|
# in case filter is show archived
if cond[1]["name"] == 'show_archived'
show_archived = true
else
or_conds << "#{cond[1]["name"]} = '#{cond[1]["value"]}'"
end
end
and_conds << '('+or_conds.join(' OR ')+')' if !or_conds.empty?
end
end
# show archived items condition - default: false
show_archived = false
#filters
if params["filters"].present?
params["filters"].each do |ft|
or_conds = []
ft[1].each do |cond|
# in case filter is show archived
if cond[1]["name"] == 'show_archived'
# show archived items
show_archived = true
else
or_conds << "#{cond[1]["name"]} = '#{cond[1]["value"]}'"
end
end
and_conds << '('+or_conds.join(' OR ')+')' if !or_conds.empty?
end
end
#keywords
if params["keywords"].present?
params["keywords"].each do |kw|
or_conds = []
kw[1].each do |cond|
or_conds << "LOWER(#{cond[1]["name"]}) LIKE '%#{cond[1]["value"].downcase.strip}%'"
end
and_conds << '('+or_conds.join(' OR ')+')'
end
end
# join with users table for search creator
query = query.joins(:creator)
# showing archived items if show_archived is not true
query = query.where(archived: false) if show_archived == false
# add conditions to query
query = query.where(and_conds.join(' AND ')) if !and_conds.empty?
return query
end
def self.search(params)
query = self.all
query = self.filter(query, params)
# order
if params[:sort_by].present?
order = params[:sort_by]
order += " #{params[:sort_direction]}" if params[:sort_direction].present?
query = query.order(order)
end
return query
end
# data for dataselect ajax
def self.dataselect(keyword='')
query = self.all
if keyword.present?
keyword = keyword.strip.downcase
query = query.where('LOWER(name) LIKE ?', "%#{keyword}%")
end
query = query.limit(8).map{|delivery| {value: delivery.id, text: delivery.code} }
end
def creator_name
creator.present? ? creator.name : ''
end
def employee_name
employee.present? ? employee.name : ''
end
def archive
update_attributes(archived: true)
end
def unarchive
update_attributes(archived: false)
end
def status_delivered
update_attributes(status: Erp::Deliveries::Delivery::DELIVERY_STATUS_DELIVERED)
end
def status_deleted
update_attributes(status: Erp::Deliveries::Delivery::DELIVERY_STATUS_DELETED)
end
def self.archive_all
update_all(archived: true)
end
def self.unarchive_all
update_all(archived: false)
end
def self.status_delivered_all
update_all(status: Erp::Deliveries::Delivery::DELIVERY_STATUS_DELIVERED)
end
def self.status_deleted_all
update_all(status: Erp::Deliveries::Delivery::DELIVERY_STATUS_DELETED)
end
def count_delivery_detail
delivery_details.count
end
def total_delivery_invoice
total = 0.0
delivery_details.each do |dt|
total += dt.total
end
return total
end
def total_ordered_quantity
amount = 0
delivery_details.each do |dd|
amount += dd.get_ordered_quantity
end
return amount
end
def total_delivered_quantity
delivery_details.sum(:quantity)
end
def remain_delivery_quantity
return total_ordered_quantity - total_delivered_quantity
end
def get_detail_by_order_detail(order_detail)
delivery_details.where(order_detail_id: order_detail.id).first
end
end
end
| 27.504762 | 157 | 0.599377 |
61f25e54acf905bad48cecb706f83dfff4c4a904 | 1,659 | # frozen_string_literal: true
require_relative "lib/ruby_mal_client/version"
Gem::Specification.new do |spec|
spec.name = "ruby_mal_client"
spec.version = RubyMalClient::VERSION
spec.authors = ["Miguel Pat"]
spec.email = ["[email protected]"]
spec.summary = "Ruby client for connecting to the myanimelist.net API"
spec.description = "Soon to be expanded"
spec.homepage = "https://github.com/MichaelAPL/ruby_mal_client"
spec.license = "MIT"
spec.required_ruby_version = ">= 2.6.0"
# spec.metadata["allowed_push_host"] = "https://example.com"
spec.metadata["homepage_uri"] = spec.homepage
spec.metadata["source_code_uri"] = "https://github.com/MichaelAPL/ruby_mal_client"
spec.metadata["changelog_uri"] = "https://github.com/MichaelAPL/ruby_mal_client"
# Specify which files should be added to the gem when it is released.
# The `git ls-files -z` loads the files in the RubyGem that have been added into git.
spec.files = Dir.chdir(File.expand_path(__dir__)) do
`git ls-files -z`.split("\x0").reject do |f|
(f == __FILE__) || f.match(%r{\A(?:(?:test|spec|features)/|\.(?:git|travis|circleci)|appveyor)})
end
end
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{\Aexe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
# Uncomment to register a new dependency of your gem
# spec.add_dependency "example-gem", "~> 1.0"
# For more information and examples about making a new gem, checkout our
# guide at: https://bundler.io/guides/creating_gem.html
spec.metadata = {
"rubygems_mfa_required" => "true"
}
end
| 38.581395 | 102 | 0.678722 |
7adff945e2ca5f930e07e30cb408c526c765c7be | 425 | # frozen_string_literal: true
require 'rspec'
require 'sqlite3'
require 'active_record'
class Record < ActiveRecord::Base
end
ActiveRecord::Base.establish_connection(
adapter: 'sqlite3',
database: ':memory:'
)
ActiveRecord::Schema.define do
create_table :records, force: true do |t|
t.string :name
end
end
RSpec.configure do |c|
c.around do |example|
ActiveRecord::Base.transaction(&example)
end
end
| 16.346154 | 44 | 0.734118 |
5db64434ac3cb10cf4b06e5c66ac3500d11db283 | 445 | require "rails_helper"
require_relative "../../../../support/api/v1/views/index"
describe "api/v1/videos/index.json.jbuilder" do
before(:each) do
assign(:resources, [create(:video), create(:video)])
render
end
it_behaves_like "an index view", 2, [
:id,
:artist,
:instrument,
:title,
:description,
:tags,
:parts,
:scores,
:favorited_by,
:viewed_by,
:created_at,
:updated_at
]
end
| 17.8 | 57 | 0.613483 |
e87bc70109df60e6a0d5cd7590eba67165621895 | 62 | module SectionInfoConfigure
module SectionsHelper
end
end
| 12.4 | 27 | 0.83871 |
7947e5dc33b8dbffa35385f9332ac7816ae38361 | 2,075 | require_relative '../../app/models/series_list'
describe SeriesList do
def h(n)
# just an abbreviation to keep the tests readable
{ all: n, online: nil }
end
it 'does basic grouping and sorting' do
grouped = SeriesList.new({ 'MOUSE' => 1,
'cat' => 1,
'moose' => 1,
'dog' => 1,
'mousse' => 100 }, {}).by_first_letter
expect(grouped).to eq([
['C', [['cat', h(1)]]],
['D', [['dog', h(1)]]],
['M', [['moose', h(1)], ['MOUSE', h(1)], ['mousse', h(100)]]]
])
end
it 'handles articles' do
grouped = SeriesList.new({ 'An A' => 1,
'A B' => 2,
'The C' => 3,
'the D' => 4,
'THE E' => 5,
'able' => 1,
'baker' => 2,
'charlie' => 3,
'delta' => 4,
'easy' => 5 }, {}).by_first_letter
expect(grouped).to eq([
['A', [['An A', h(1)], ['able', h(1)]]],
['B', [['A B', h(2)], ['baker', h(2)]]],
['C', [['The C', h(3)], ['charlie', h(3)]]],
['D', [['the D', h(4)], ['delta', h(4)]]],
['E', [['THE E', h(5)], ['easy', h(5)]]]
])
end
it 'handles for digits and other' do
grouped = SeriesList.new({ '¿Que pasa?' => 1,
'3-2-1 Contact' => 2,
' evil whitespace' => 3,
'wimpy' => 0,
'xerox' => 1,
'a yuppie' => 2,
'the zebra' => 3 }, {}).by_first_letter
expect(grouped).to eq([
['E', [[' evil whitespace', h(3)]]],
['W', [['wimpy', h(0)]]],
['XYZ', [['xerox', h(1)], ['a yuppie', h(2)], ['the zebra', h(3)]]],
['other', [['3-2-1 Contact', h(2)], ['¿Que pasa?', h(1)]]]
])
end
end
| 36.403509 | 74 | 0.333012 |
87824fd482b518e8fbcc291843b3bf2d6545f7c8 | 2,746 | require "multi_level_logger/version"
include Logger::Severity
module MultiLevelLogger
class MLogger
def self.create(opts)
Logger.class_eval do
define_method "add" do |severity, message = nil, progname = nil, &block|
severity ||= UNKNOWN
progname ||= @progname
if message.nil?
if block_given?
message = yield
else
message = progname
progname = @progname
end
end
@shit_age = opts[:shift_age] || 0
@shift_size = opts[:shift_size] || 1048576
unless File.directory?("log")
Dir.mkdir("log")
end
@logdev_warn ||= Logger::LogDevice.new(opts[:warn] || "log/warn.log", :shift_age => @shift_age, :shift_size => @shift_size) if (opts[:all] || opts[:warn])
@logdev_info ||= Logger::LogDevice.new(opts[:info] || "log/info.log", :shift_age => @shit_age, :shift_size => @shift_size) if (opts[:all] || opts[:info])
@logdev_debug ||= Logger::LogDevice.new(opts[:debug] || "log/debug.log", :shift_age => @shit_age, :shift_size => @shift_size) if (opts[:all] || opts[:debug])
@logdev_error ||= Logger::LogDevice.new(opts[:error] || "log/error.log", :shift_age => @shit_age, :shift_size => @shift_size) if (opts[:all] || opts[:error])
@logdev_fatal ||= Logger::LogDevice.new(opts[:fatal] || "log/fatal.log", :shift_age => @shit_age, :shift_size => @shift_size) if (opts[:all] || opts[:fatal])
@logdev_unknown ||= Logger::LogDevice.new(opts[:unknown] || "log/unknown.log", :shift_age => @shit_age, :shift_size => @shift_size) if (opts[:all] || opts[:unknow])
if @logdev_warn && severity == WARN
@logdev_warn.write(
format_message(format_severity(WARN), Time.now, progname, message))
end
if @logdev_info && severity == INFO
@logdev_info.write(
format_message(format_severity(INFO), Time.now, progname, message))
end
if @logdev_debug && severity == DEBUG
@logdev_debug.write(
format_message(format_severity(DEBUG), Time.now, progname, message))
end
if @logdev_error && severity == ERROR
@logdev_error.write(
format_message(format_severity(ERROR), Time.now, progname, message))
end
if @logdev_fatal && severity == FATAL
@logdev_fatal.write(
format_message(format_severity(FATAL), Time.now, progname, message))
end
if @logdev_unknown && severity == UNKNOWN
@logdev_unknown.write(
format_message(format_severity(UNKNOWN), Time.now, progname, message))
end
if @logdev.nil? or severity < @level or !opts[:default_logger]
return true
end
@logdev.write(
format_message(format_severity(severity), Time.now, progname, message))
true
end
end
Logger.new "log/development.log"
end
end
end
| 38.138889 | 168 | 0.653314 |
3369a0491fbf664ac676473e9a7119dd102dc8b8 | 956 | module AnalyticalValue
class Dynamics
attr_reader :value, :prev
delegate :positive?, :negative?, :zero?, :abs, to: :difference_signed
def initialize(prev, value)
@prev = prev
@value = value
end
def percentage
return nil if undefined?
return 0 if empty?
return 100 if prev.zero?
(difference * 100.0 / prev).round
end
def difference_signed
value.to_i - prev.to_i
end
def ==(other)
value == other.value && prev == other.prev
end
def undefined?
value.nil? || prev.nil?
end
def empty?
value.zero? && prev.zero?
end
def to_s
return 'undefined' if undefined?
return 'empty' if empty?
return 'up' if positive?
return 'down' if negative?
return 'stable' if stable?
''
end
alias increased? positive?
alias decreased? negative?
alias stable? zero?
alias difference abs
end
end
| 18.745098 | 73 | 0.59728 |
7afe0a09270dc1a7acea61eb285800db5cbeb262 | 17,466 | require File.dirname(__FILE__) + "/../test_helper.rb"
class MediaFileTest < ActiveSupport::TestCase
def test_simple
item = AssetType.first
asset = Asset.first
assert !item.simple.is_full?
assert_difference "::AssetRelation.count" do
assert_difference "item.simple.size" do
item.simple << asset
end
end
assert item.simple.is_full?
end
def test_multiple
item = AssetType.first
asset_one = Asset.first
assert !item.multiple.is_full?
assert_difference "::AssetRelation.count" do
assert_difference "item.multiple.size" do
item.multiple << asset_one
end
end
assert !item.multiple.is_full?
end
def test_sized
item = AssetType.first
asset_one, asset_two = two_assets
assert asset_one != asset_two
assert !item.sized.is_full?
assert_difference "::AssetRelation.count",2 do
assert_difference "item.sized.size",2 do
item.sized << [asset_one,asset_two]
end
end
assert item.sized.is_full?
end
def test_all_types
t = AssetType.first
a = assets(:video)
assert t.all_types.accepts?(a)
a = assets(:audio)
assert t.all_types.accepts?(a)
end
def test_some_types
t = AssetType.first
a = assets(:video)
assert t.some_types.accepts?(a)
a = assets(:doc)
assert !t.some_types.accepts?(a)
end
def test_insertion_of_asset_relations
AssetRelation.destroy_all
item = AssetType.first
asset = Asset.first
assert_difference "::AssetRelation.count" do
assert_difference "item.simple.size" do
item.simple << asset
end
end
rel = AssetRelation.first
assert rel.field_name == 'simple'
end
def test_insertion_on_save_and_create
asset = Asset.first
item = nil
assert_no_difference "::AssetRelation.count" do
item = AssetType.new :simple_ids => [asset.id.to_s]
end
assert_equal 1, item.simple.size
assert_no_difference "item.simple.size" do
assert_difference "::AssetRelation.count" do
assert item.save
end
end
end
def test_named_relations
asset_one, asset_two = two_assets
item = nil
assert_difference "::AssetRelation.count", 2 do
item = AssetType.create :multiple_asset_relations_attributes => [
{"asset_id" => asset_one.id.to_s, "name" => "Test name", :field_name => 'multiple'},
{"asset_id" => asset_two.id.to_s, "name" => "Test name 2", :field_name => 'multiple'}
]
end
item.multiple.reload
assert_equal item.name_for_asset(:multiple, item.multiple[0]), "Test name"
assert_equal item.name_for_asset(:multiple, item.multiple[1]), "Test name 2"
end
def test_updating_by_named_relations
# This is what happens when, in the browser workflow,
# you change the name to an existing relation.
asset_one, asset_two = two_assets
item = AssetType.create :multiple_attributes => [
{"asset_id" => asset_one.id.to_s, "name" => "Test name"},
{"asset_id" => asset_two.id.to_s, "name" => "Test name 2"}
]
relations = item.asset_relations
assert_difference "::AssetRelation.count", 0 do
item.update_attributes :multiple_attributes => {
1 => {"asset_id" => asset_one.id.to_s, "name" => "Test name", 'position' => '1', "id" => relations.first.id.to_s},
2 => {"asset_id" => asset_two.id.to_s, "name" => "New Test name", 'position' => '2', "id" => relations.last.id.to_s}
}
end
assert_equal item.name_for_asset(:multiple, item.multiple[0]), "Test name"
assert_equal item.name_for_asset(:multiple, item.multiple[1]), "New Test name"
end
def test_updating_by_named_relations_destroys_every_non_referenced_asset_relation
# In the view, we simply don't send any to-be-destroyed association information,
# and the model must know that any missing id will be fired.
asset_one, asset_two = two_assets
item = AssetType.create :multiple_attributes => [
{"asset_id" => asset_one.id.to_s, "name" => "Test name"},
{"asset_id" => asset_two.id.to_s, "name" => "Test name 2"}
]
relations = item.asset_relations
assert_difference "::AssetRelation.count", -1 do
item.update_attributes :multiple_attributes => {
2 => {"asset_id" => asset_two.id.to_s, "name" => "New Test name", 'position' => '2', "id" => relations.last.id.to_s}
}
end
assert_equal item.name_for_asset(:multiple, item.multiple[0]), "New Test name"
assert_equal [asset_two], item.multiple
assert_difference "::AssetRelation.count", -1 do
item.update_attributes :multiple_attributes => {}
end
assert_equal [], item.reload.multiple
assert_equal [], item.multiple_asset_relations
end
def test_asset_relations_attribute_on_unsaved_instance
# This is how controllers and media_selector use these fields
# If this passes it means that media_selector will retain and display assets
# when the related element has not been saved due to errors.
asset_one, asset_two = two_assets
item = nil
assert_no_difference "::AssetRelation.count" do
item = AssetType.new :multiple_attributes => [
{"asset_id" => asset_one.id.to_s, "name" => "Test name"},
{"asset_id" => asset_two.id.to_s, "name" => "Test name 2"}
]
end
assert_equal 2, item.multiple_asset_relations.size
assert_equal ["Test name", "Test name 2"], item.multiple_asset_relations.map(&:name)
end
def test_array_of_ids
asset = Asset.first
item = nil
assert_difference "::AssetRelation.count" do
item = AssetType.create :simple_ids => [asset.id]
end
assert_equal item.simple.size, 1
end
def test_hashed_ids_with_positions
asset_one, asset_two = two_assets
item = nil
assert_difference "::AssetRelation.count", 2 do
item = AssetType.create :multiple_asset_relations_attributes => {
'0.524' => {'asset_id' => asset_one.id.to_s, 'position' => '4', 'field_name' => 'multiple'},
'0.425' => {'asset_id' => asset_two.id.to_s, 'position' => '5', 'field_name' => 'multiple'}
}
end
assert_equal 2, item.multiple.size
assert_equal_set [4,5], item.asset_relations.map(&:position)
assert_equal asset_one, item.multiple.first
end
def test_relation_order_on_creation
AssetRelation.delete_all
asset_one, asset_two = two_assets
assert_difference "::AssetRelation.count", 2 do
AssetType.create :multiple_asset_relations_attributes => [
{"asset_id" => asset_one.id.to_s, "name" => "Test name", "field_name" => 'multiple'},
{"asset_id" => asset_two.id.to_s, "name" => "Test name 2", "field_name" => 'multiple'}
]
end
assert_equal 1, AssetRelation.first.position
assert_equal 2, AssetRelation.first(:offset => 1).position
end
def test_relation_order_on_update
AssetRelation.delete_all
asset_one, asset_two = two_assets
item = AssetType.create :multiple_attributes => two_asset_relation_attributes
item.multiple = [asset_two, asset_one]
assert_equal 2, asset_one.reload.asset_relations.first.position
assert_equal "Relation to asset one", asset_one.reload.asset_relations.first.name
assert_equal 1, asset_two.reload.asset_relations.first.position
assert_equal "Relation to asset two", asset_two.reload.asset_relations.first.name
end
def test_does_not_create_new_relations_on_assignation
# This kind of consistence is not required per se since for this plugin AssetRelation
# is just an intermediate table, but ensuring it allows third parties to expand
# this table if they need/want it.
AssetRelation.delete_all
asset_one, asset_two = two_assets
item = AssetType.create :multiple_attributes => two_asset_relation_attributes
original_ids = item.asset_relations.map(&:id)
item.multiple = [asset_two, asset_one] # already there, just reassigning
assert_equal_set original_ids, item.reload.asset_relations.map(&:id)
end
def test_alias_for_assigning_attributes_with_array
asset_one, asset_two = two_assets
item = AssetType.create :multiple_attributes => two_asset_relation_attributes
assert_equal [asset_one, asset_two], item.reload.multiple
assert_equal [], item.simple
end
def test_alias_for_assigning_attributes_with_hash
asset_one = Asset.first
item = AssetType.create
asset_relations = { '1' => {"asset_id" => asset_one.id, "name" => "Relation to asset one" }}
item.update_attributes :multiple_attributes => asset_relations
assert_equal [asset_one], item.reload.multiple
assert_equal [], item.simple
end
def test_name_for_asset_should_work_when_multiple_media_attachments_are_in_use
asset = assets(:audio)
item = AssetType.create :simple_ids => [asset.id]
item.name_for_asset(:simple, asset)
item.update_attributes :some_type_ids => [asset.id]
item = AssetType.find(item.id)
assert_equal [asset], item.some_types
end
def test_should_destroy_old_relations
AssetRelation.destroy_all
asset_one, asset_two = two_assets
item = nil
assert_difference "AssetRelation.count" do
item = AssetType.create :simple_ids => [asset_one.id]
end
assert_no_difference "AssetRelation.count" do
item.simple_ids = [asset_two.id]
item.save
end
end
def test_should_require_all_n_assets_if_true
AssetRelation.destroy_all
asset_one, asset_two = two_assets
item = AssetType.new(:sized => [asset_one])
item.sized.options[:required] = true
begin
assert !item.valid?
assert item.errors.include?(:sized)
item.update_attributes :sized => [asset_one, asset_two]
assert item.valid?
ensure
# cleanup
item.sized.options[:required] = false
end
end
def test_should_not_accept_more_than_size
AssetRelation.destroy_all
asset_one, asset_two = two_assets
item = AssetType.new(:sized => [asset_one, asset_two, Asset.first(:offset => 3)])
item.sized.options[:required] = true
begin
assert !item.valid?
assert item.errors.include?(:sized)
item.update_attributes :sized => [asset_one, asset_two]
assert item.valid?
ensure
# cleanup
item.sized.options[:required] = false
end
end
def test_should_not_limit_size_when_many_is_especified
AssetRelation.destroy_all
asset_one, asset_two = two_assets
item = AssetType.new
item.sized.options[:required] = 2
item.sized.options[:size] = :many
begin
assert !item.valid?
assert item.errors.include?(:sized)
item.update_attributes :sized => [asset_one]
assert !item.valid?
assert item.errors.include?(:sized)
item.update_attributes :sized => item.sized + [Asset.first(:offset => 2)]
assert item.valid?
assert !item.errors.include?(:sized)
item.update_attributes :sized => item.sized + [Asset.first(:offset => 3)]
assert item.valid?
assert !item.errors.include?(:sized)
ensure
# cleanup
item.sized.options[:required] = false
item.sized.options[:size] = 2
end
end
# as above, but with the method that controllers will use
def test_should_require_all_n_assets_if_true_using_attributes
AssetRelation.destroy_all
item = AssetType.new
item.sized.options[:required] = true
begin
item.update_attributes :sized_attributes => two_asset_relation_attributes
assert item.valid?
ensure
# cleanup
item.sized.options[:required] = false
end
end
def test_should_require_some_assets_if_provided_by_number
AssetRelation.destroy_all
asset_one = Asset.first
item = AssetType.new
item.sized.options[:required] = 1
begin
assert !item.valid?
assert item.errors.include?(:sized)
item.update_attributes :sized => [asset_one]
assert item.valid?
ensure
# cleanup
item.sized.options[:required] = false
end
end
# as above, but with the method that controllers will use
def test_should_require_some_assets_if_provided_by_number_using_attributes
AssetRelation.destroy_all
item = AssetType.new
item.sized.options[:required] = 1
begin
item.update_attributes :sized_attributes => [two_asset_relation_attributes.first]
assert item.valid?
ensure
# cleanup
item.sized.options[:required] = false
end
end
def test_should_require_one_asset_if_true_and_size_many
AssetRelation.destroy_all
asset_one = Asset.first
item = AssetType.new
item.multiple.options[:required] = true
begin
assert !item.valid?
assert item.errors.include?(:multiple)
item.update_attributes :multiple => [asset_one]
assert item.valid?
ensure
# cleanup
item.multiple.options[:required] = false
end
end
# as above, but with the method that controllers will use
def test_should_require_one_asset_if_true_and_size_many_with_attributes
AssetRelation.destroy_all
item = AssetType.new
item.multiple.options[:required] = true
begin
item.update_attributes :multiple_attributes => [two_asset_relation_attributes.first]
assert item.valid?
ensure
# cleanup
item.multiple.options[:required] = false
end
end
# the edge case with our implementation: field_asset_relations is empty but field isn't
def test_should_require_one_asset_if_has_been_deleted_with_attributes
AssetRelation.destroy_all
item = AssetType.new
item.multiple.options[:required] = true
begin
item.update_attributes :multiple_attributes => two_asset_relation_attributes
assert_equal 2, item.multiple.size
assert item.valid?
assert !item.update_attributes(:multiple_attributes => {})
ensure
# cleanup
item.multiple.options[:required] = false
end
end
# Test for the issue detected in #268
def test_should_not_modify_config_when_defining_paperclip_styles
styles_hash = {
:style_name => {
:processors => [:example_processor],
}
}
# short way to recursivelly clone
styles_hash_copy = Marshal.load(Marshal.dump(styles_hash))
Ubiquo::Settings.context(:ubiquo_media).set do |config|
config.media_styles_list = styles_hash_copy
end
begin
# Reload the AssetPublic class, that uses this defined option
path = File.expand_path(File.join(File.dirname(__FILE__),
'..',
'..',
'app/models/asset_public.rb'))
# NOTE: The Kernel#load method is enough to reload the class definition
load path
# this triggers the Style initialization, which uses the hash
asset = AssetPublic.new
asset.attachment_for(:resource).styles
assert_equal styles_hash, Ubiquo::Settings.context(:ubiquo_media).get(:media_styles_list)
assert !styles_hash[:style_name].blank?
ensure
# cleanup
AssetPublic.attachment_definitions[:resource] = AssetPrivate.attachment_definitions[:resource]
end
end
def test_shoud_not_save_wrong_asset_type_relations
t = AssetType.first
a = assets(:doc)
t.some_types << a
assert !t.valid?
assert !t.save
end
def test_should_validate_asset_type_even_when_the_asset_type_instance_change
# setup, initial asset_type and media_attachment definition
asset_type_attributes = {:key => 'my_asset_type'}
asset_type = AssetType.create(asset_type_attributes)
assert !asset_type.new_record?
AssetType.send(:media_attachment, :my_attachment, :types => ["my_asset_type"])
object = nil
test_asset_addition = lambda { |current_asset_type|
# create_asset
asset = AssetPublic.new(:name => 'my_asset',
:resource => sample_image,
:asset_type => current_asset_type)
# skip validation, sample file is a image
asset.expects(:set_asset_type).returns(true)
assert asset.save
# create_object with the asset_relation
object = AssetType.create(
:my_attachment_attributes => [{:asset_id => asset.id}],
:key => "my_new_object_for_#{current_asset_type.id}")
assert !object.new_record?, object.errors.full_messages.to_sentence
assert object.accepts_asset_for_my_attachment?(asset)
}
# test
test_asset_addition.call(asset_type)
assert asset_type.destroy
object.reload
assert !object.valid?
# new asset_type instance with the same key,
# assets with this asset_type_id should also be valid for the media_attachment
asset_type_copy = AssetType.create(asset_type_attributes)
assert !asset_type.new_record?
test_asset_addition.call(asset_type_copy)
end
def test_should_validate_asset_type
AssetType.send(:media_attachment, :my_attachment, :types => ["image", "video"])
model = AssetType.new
assert !model.accepts_asset_for_my_attachment?(assets(:doc))
assert model.accepts_asset_for_my_attachment?(assets(:image))
assert model.accepts_asset_for_my_attachment?(assets(:video))
end
protected
def two_assets
[Asset.first, Asset.first(:offset => 1)]
end
def two_asset_relation_attributes
asset_one, asset_two = two_assets
[
{"asset_id" => asset_one.id, "name" => "Relation to asset one" },
{"asset_id" => asset_two.id, "name" => "Relation to asset two" }
]
end
end
| 32.52514 | 124 | 0.694206 |
e24ad0a2df06c3bafff142d8c8886f0e184c80ac | 1,020 | def failed_not_contains_message file, contents
msg = "Expected #{file} to contain:\n\t#{contents.gsub("\n", "\n\t")}\nWhen"
if File.file?(file)
msg += " it contained:\n#{File.read(file)}".gsub("\n", "\n\t")
else
msg += ' it did not exist.'
end
msg
end
RSpec::Matchers.define :have_in_result do |result, contents|
result += '.js' unless result.end_with?('.js')
match do |glue|
File.join(glue.destination, result).should have_contents(contents)
end
failure_message do |glue|
failed_not_contains_message File.join(glue.destination, result), contents
end
end
RSpec::Matchers.define :have_contents do |contents|
match do |file|
File.file?(file) && File.read(file).chomp.should == contents.chomp
end
failure_message do |file|
failed_not_contains_message file, contents
end
end
RSpec::Matchers.define :exist do
match do |file|
File.exists?(file)
end
end
RSpec::Matchers.define :contain do |substr|
match do |str|
!str.index(substr).nil?
end
end | 22.666667 | 78 | 0.685294 |
1a6b6f136a3dbac3447ba226a633b7ac806e2922 | 458 | cask 'zesarux' do
version '4.2'
sha256 '63608613276db8062d8688b0a8f9f9aa7445e73be695be3d2a81599226c96d78'
url "https://downloads.sourceforge.net/zesarux/ZEsarUX_bin-#{version}-MountainLion_or_higher.dmg.gz"
appcast 'https://sourceforge.net/projects/zesarux/rss',
checkpoint: '5687bee64ebde5d8c48533c17d30bb91dabf0b25c5ccca9679d69652e3b7f7cb'
name 'ZEsarUX'
homepage 'https://sourceforge.net/projects/zesarux/'
app 'ZEsarUX.app'
end
| 35.230769 | 102 | 0.790393 |
ffa6d5842d2df86a6ae7261d90b8ba60bba11d5a | 273 | class CreateCharacters < ActiveRecord::Migration[6.0]
def change
create_table :characters do |t|
t.integer :user_id
t.integer :class_id
t.integer :race_id
t.string :name
t.integer :level, default: 1
t.timestamps
end
end
end
| 19.5 | 53 | 0.644689 |
91e9df091ac3e7156e1dd3ec7d0774c0098203ea | 17,585 | # WARNING ABOUT GENERATED CODE
#
# This file is generated. See the contributing guide for more information:
# https://github.com/aws/aws-sdk-ruby/blob/master/CONTRIBUTING.md
#
# WARNING ABOUT GENERATED CODE
require 'aws-sdk-core/waiters'
module Aws::DatabaseMigrationService
module Waiters
# Wait until testing endpoint is deleted.
class EndpointDeleted
# @param [Hash] options
# @option options [required, Client] :client
# @option options [Integer] :max_attempts (60)
# @option options [Integer] :delay (5)
# @option options [Proc] :before_attempt
# @option options [Proc] :before_wait
def initialize(options)
@client = options.fetch(:client)
@waiter = Aws::Waiters::Waiter.new({
max_attempts: 60,
delay: 5,
poller: Aws::Waiters::Poller.new(
operation_name: :describe_endpoints,
acceptors: [
{
"expected" => "ResourceNotFoundFault",
"matcher" => "error",
"state" => "success"
},
{
"argument" => "endpoints[].status",
"expected" => "active",
"matcher" => "pathAny",
"state" => "failure"
},
{
"argument" => "endpoints[].status",
"expected" => "creating",
"matcher" => "pathAny",
"state" => "failure"
}
]
)
}.merge(options))
end
# @option (see Client#describe_endpoints)
# @return (see Client#describe_endpoints)
def wait(params = {})
@waiter.wait(client: @client, params: params)
end
# @api private
attr_reader :waiter
end
# Wait until DMS replication instance is available.
class ReplicationInstanceAvailable
# @param [Hash] options
# @option options [required, Client] :client
# @option options [Integer] :max_attempts (60)
# @option options [Integer] :delay (60)
# @option options [Proc] :before_attempt
# @option options [Proc] :before_wait
def initialize(options)
@client = options.fetch(:client)
@waiter = Aws::Waiters::Waiter.new({
max_attempts: 60,
delay: 60,
poller: Aws::Waiters::Poller.new(
operation_name: :describe_replication_instances,
acceptors: [
{
"argument" => "replication_instances[].replication_instance_status",
"expected" => "available",
"matcher" => "pathAll",
"state" => "success"
},
{
"argument" => "replication_instances[].replication_instance_status",
"expected" => "deleting",
"matcher" => "pathAny",
"state" => "failure"
},
{
"argument" => "replication_instances[].replication_instance_status",
"expected" => "incompatible-credentials",
"matcher" => "pathAny",
"state" => "failure"
},
{
"argument" => "replication_instances[].replication_instance_status",
"expected" => "incompatible-network",
"matcher" => "pathAny",
"state" => "failure"
},
{
"argument" => "replication_instances[].replication_instance_status",
"expected" => "inaccessible-encryption-credentials",
"matcher" => "pathAny",
"state" => "failure"
}
]
)
}.merge(options))
end
# @option (see Client#describe_replication_instances)
# @return (see Client#describe_replication_instances)
def wait(params = {})
@waiter.wait(client: @client, params: params)
end
# @api private
attr_reader :waiter
end
# Wait until DMS replication instance is deleted.
class ReplicationInstanceDeleted
# @param [Hash] options
# @option options [required, Client] :client
# @option options [Integer] :max_attempts (60)
# @option options [Integer] :delay (15)
# @option options [Proc] :before_attempt
# @option options [Proc] :before_wait
def initialize(options)
@client = options.fetch(:client)
@waiter = Aws::Waiters::Waiter.new({
max_attempts: 60,
delay: 15,
poller: Aws::Waiters::Poller.new(
operation_name: :describe_replication_instances,
acceptors: [
{
"argument" => "replication_instances[].replication_instance_status",
"expected" => "available",
"matcher" => "pathAny",
"state" => "failure"
},
{
"expected" => "ResourceNotFoundFault",
"matcher" => "error",
"state" => "success"
}
]
)
}.merge(options))
end
# @option (see Client#describe_replication_instances)
# @return (see Client#describe_replication_instances)
def wait(params = {})
@waiter.wait(client: @client, params: params)
end
# @api private
attr_reader :waiter
end
# Wait until DMS replication task is deleted.
class ReplicationTaskDeleted
# @param [Hash] options
# @option options [required, Client] :client
# @option options [Integer] :max_attempts (60)
# @option options [Integer] :delay (15)
# @option options [Proc] :before_attempt
# @option options [Proc] :before_wait
def initialize(options)
@client = options.fetch(:client)
@waiter = Aws::Waiters::Waiter.new({
max_attempts: 60,
delay: 15,
poller: Aws::Waiters::Poller.new(
operation_name: :describe_replication_tasks,
acceptors: [
{
"argument" => "replication_tasks[].status",
"expected" => "ready",
"matcher" => "pathAny",
"state" => "failure"
},
{
"argument" => "replication_tasks[].status",
"expected" => "creating",
"matcher" => "pathAny",
"state" => "failure"
},
{
"argument" => "replication_tasks[].status",
"expected" => "stopped",
"matcher" => "pathAny",
"state" => "failure"
},
{
"argument" => "replication_tasks[].status",
"expected" => "running",
"matcher" => "pathAny",
"state" => "failure"
},
{
"argument" => "replication_tasks[].status",
"expected" => "failed",
"matcher" => "pathAny",
"state" => "failure"
},
{
"expected" => "ResourceNotFoundFault",
"matcher" => "error",
"state" => "success"
}
]
)
}.merge(options))
end
# @option (see Client#describe_replication_tasks)
# @return (see Client#describe_replication_tasks)
def wait(params = {})
@waiter.wait(client: @client, params: params)
end
# @api private
attr_reader :waiter
end
# Wait until DMS replication task is ready.
class ReplicationTaskReady
# @param [Hash] options
# @option options [required, Client] :client
# @option options [Integer] :max_attempts (60)
# @option options [Integer] :delay (15)
# @option options [Proc] :before_attempt
# @option options [Proc] :before_wait
def initialize(options)
@client = options.fetch(:client)
@waiter = Aws::Waiters::Waiter.new({
max_attempts: 60,
delay: 15,
poller: Aws::Waiters::Poller.new(
operation_name: :describe_replication_tasks,
acceptors: [
{
"argument" => "replication_tasks[].status",
"expected" => "ready",
"matcher" => "pathAll",
"state" => "success"
},
{
"argument" => "replication_tasks[].status",
"expected" => "starting",
"matcher" => "pathAny",
"state" => "failure"
},
{
"argument" => "replication_tasks[].status",
"expected" => "running",
"matcher" => "pathAny",
"state" => "failure"
},
{
"argument" => "replication_tasks[].status",
"expected" => "stopping",
"matcher" => "pathAny",
"state" => "failure"
},
{
"argument" => "replication_tasks[].status",
"expected" => "stopped",
"matcher" => "pathAny",
"state" => "failure"
},
{
"argument" => "replication_tasks[].status",
"expected" => "failed",
"matcher" => "pathAny",
"state" => "failure"
},
{
"argument" => "replication_tasks[].status",
"expected" => "modifying",
"matcher" => "pathAny",
"state" => "failure"
},
{
"argument" => "replication_tasks[].status",
"expected" => "testing",
"matcher" => "pathAny",
"state" => "failure"
},
{
"argument" => "replication_tasks[].status",
"expected" => "deleting",
"matcher" => "pathAny",
"state" => "failure"
}
]
)
}.merge(options))
end
# @option (see Client#describe_replication_tasks)
# @return (see Client#describe_replication_tasks)
def wait(params = {})
@waiter.wait(client: @client, params: params)
end
# @api private
attr_reader :waiter
end
# Wait until DMS replication task is running.
class ReplicationTaskRunning
# @param [Hash] options
# @option options [required, Client] :client
# @option options [Integer] :max_attempts (60)
# @option options [Integer] :delay (15)
# @option options [Proc] :before_attempt
# @option options [Proc] :before_wait
def initialize(options)
@client = options.fetch(:client)
@waiter = Aws::Waiters::Waiter.new({
max_attempts: 60,
delay: 15,
poller: Aws::Waiters::Poller.new(
operation_name: :describe_replication_tasks,
acceptors: [
{
"argument" => "replication_tasks[].status",
"expected" => "running",
"matcher" => "pathAll",
"state" => "success"
},
{
"argument" => "replication_tasks[].status",
"expected" => "ready",
"matcher" => "pathAny",
"state" => "failure"
},
{
"argument" => "replication_tasks[].status",
"expected" => "creating",
"matcher" => "pathAny",
"state" => "failure"
},
{
"argument" => "replication_tasks[].status",
"expected" => "stopping",
"matcher" => "pathAny",
"state" => "failure"
},
{
"argument" => "replication_tasks[].status",
"expected" => "stopped",
"matcher" => "pathAny",
"state" => "failure"
},
{
"argument" => "replication_tasks[].status",
"expected" => "failed",
"matcher" => "pathAny",
"state" => "failure"
},
{
"argument" => "replication_tasks[].status",
"expected" => "modifying",
"matcher" => "pathAny",
"state" => "failure"
},
{
"argument" => "replication_tasks[].status",
"expected" => "testing",
"matcher" => "pathAny",
"state" => "failure"
},
{
"argument" => "replication_tasks[].status",
"expected" => "deleting",
"matcher" => "pathAny",
"state" => "failure"
}
]
)
}.merge(options))
end
# @option (see Client#describe_replication_tasks)
# @return (see Client#describe_replication_tasks)
def wait(params = {})
@waiter.wait(client: @client, params: params)
end
# @api private
attr_reader :waiter
end
# Wait until DMS replication task is stopped.
class ReplicationTaskStopped
# @param [Hash] options
# @option options [required, Client] :client
# @option options [Integer] :max_attempts (60)
# @option options [Integer] :delay (15)
# @option options [Proc] :before_attempt
# @option options [Proc] :before_wait
def initialize(options)
@client = options.fetch(:client)
@waiter = Aws::Waiters::Waiter.new({
max_attempts: 60,
delay: 15,
poller: Aws::Waiters::Poller.new(
operation_name: :describe_replication_tasks,
acceptors: [
{
"argument" => "replication_tasks[].status",
"expected" => "stopped",
"matcher" => "pathAll",
"state" => "success"
},
{
"argument" => "replication_tasks[].status",
"expected" => "ready",
"matcher" => "pathAny",
"state" => "failure"
},
{
"argument" => "replication_tasks[].status",
"expected" => "creating",
"matcher" => "pathAny",
"state" => "failure"
},
{
"argument" => "replication_tasks[].status",
"expected" => "starting",
"matcher" => "pathAny",
"state" => "failure"
},
{
"argument" => "replication_tasks[].status",
"expected" => "running",
"matcher" => "pathAny",
"state" => "failure"
},
{
"argument" => "replication_tasks[].status",
"expected" => "failed",
"matcher" => "pathAny",
"state" => "failure"
},
{
"argument" => "replication_tasks[].status",
"expected" => "modifying",
"matcher" => "pathAny",
"state" => "failure"
},
{
"argument" => "replication_tasks[].status",
"expected" => "testing",
"matcher" => "pathAny",
"state" => "failure"
},
{
"argument" => "replication_tasks[].status",
"expected" => "deleting",
"matcher" => "pathAny",
"state" => "failure"
}
]
)
}.merge(options))
end
# @option (see Client#describe_replication_tasks)
# @return (see Client#describe_replication_tasks)
def wait(params = {})
@waiter.wait(client: @client, params: params)
end
# @api private
attr_reader :waiter
end
# Wait until testing connection succeeds.
class TestConnectionSucceeds
# @param [Hash] options
# @option options [required, Client] :client
# @option options [Integer] :max_attempts (60)
# @option options [Integer] :delay (5)
# @option options [Proc] :before_attempt
# @option options [Proc] :before_wait
def initialize(options)
@client = options.fetch(:client)
@waiter = Aws::Waiters::Waiter.new({
max_attempts: 60,
delay: 5,
poller: Aws::Waiters::Poller.new(
operation_name: :describe_connections,
acceptors: [
{
"argument" => "connections[].status",
"expected" => "successful",
"matcher" => "pathAll",
"state" => "success"
},
{
"argument" => "connections[].status",
"expected" => "failed",
"matcher" => "pathAny",
"state" => "failure"
}
]
)
}.merge(options))
end
# @option (see Client#describe_connections)
# @return (see Client#describe_connections)
def wait(params = {})
@waiter.wait(client: @client, params: params)
end
# @api private
attr_reader :waiter
end
end
end
| 32.266055 | 84 | 0.464771 |
79d630e03685259f15c9bc79414582050c1a5f1d | 738 | module Api
class PinBindingsController < Api::AbstractController
def index
maybe_paginate pin_bindings
end
def show
render json: pin_binding
end
def destroy
mutate PinBindings::Destroy.run(pin_binding: pin_binding)
end
def create
mutate PinBindings::Create.run(raw_json, device: current_device)
end
def update
mutate PinBindings::Update.run(raw_json, update_params)
end
private
def update_params
@update_params ||= { device: current_device, pin_binding: pin_binding }
end
def pin_bindings
PinBinding.where(device: current_device)
end
def pin_binding
@pin_binding ||= pin_bindings.find(params[:id])
end
end
end
| 19.421053 | 77 | 0.681572 |
03ab35a36d262b15553794bc6bc3cd2376ccaacd | 258 | describe Fastlane::Actions::XamversionAction do
describe "#run" do
it "prints a message" do
expect(Fastlane::UI).to receive(:message).with("The xamversion plugin is working!")
Fastlane::Actions::XamversionAction.run(nil)
end
end
end
| 25.8 | 89 | 0.705426 |
5d0fe8d08be434e09532068de25bd4ad649ae58a | 2,591 | # Licensed to Elasticsearch B.V. under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
# ownership. Elasticsearch B.V. licenses this file to you under
# the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
UNIT_TESTED_PROJECTS = [
'elasticsearch',
'elasticsearch-transport',
'elasticsearch-dsl',
'elasticsearch-api'
].freeze
INTEGRATION_TESTED_PROJECTS = (UNIT_TESTED_PROJECTS - ['elasticsearch-api']).freeze
namespace :test do
require 'open-uri'
task bundle: 'bundle:install'
desc 'Run all tests in all subprojects'
task client: [:unit, :integration]
desc 'Run unit tests in all subprojects'
task :unit do
UNIT_TESTED_PROJECTS.each do |project|
puts '-' * 80
sh "cd #{CURRENT_PATH.join(project)} && unset BUNDLE_GEMFILE && unset BUNDLE_PATH && unset BUNDLE_BIN && bundle exec rake test:unit"
puts "\n"
end
end
desc 'Run integration tests in all subprojects'
task :integration do
INTEGRATION_TESTED_PROJECTS.each do |project|
puts '-' * 80
sh "cd #{CURRENT_PATH.join(project)} && unset BUNDLE_GEMFILE && bundle exec rake test:integration"
puts "\n"
end
end
desc 'Run rest api tests'
task rest_api: ['elasticsearch:wait_for_green'] do
puts '-' * 80
sh "cd #{CURRENT_PATH.join('elasticsearch-api')} && unset BUNDLE_GEMFILE && bundle exec rake test:rest_api"
puts "\n"
end
desc 'Run security (Platinum) rest api yaml tests'
task security: 'elasticsearch:wait_for_green' do
puts '-' * 80
sh "cd #{CURRENT_PATH.join('elasticsearch-api')} && unset BUNDLE_GEMFILE && TEST_SUITE=platinum bundle exec rake test:rest_api"
puts "\n"
end
# Returns: version_number, build_hash
def cluster_info
require 'elasticsearch'
version_info = admin_client.info['version']
abort('[!] Cannot determine cluster version information -- Is the server running?') unless version_info
version_info
rescue Faraday::ConnectionFailed => e
STDERR.puts "[!] Test cluster not running?"
abort e
end
end
| 33.217949 | 138 | 0.719799 |
eda52646c44b75ae65c9cb26a8f0e2496600fbb5 | 92 |
module <%= class_name %>
helpers do
def helper_hello
'hello'
end
end
end
| 10.222222 | 24 | 0.586957 |
4a92a76f6a8b2f8ca59f3773f52c5cc45d7fbe68 | 1,767 | # frozen_string_literal: true
describe ::EthereumGateway::Collector do
subject { described_class.new(ethereum_client) }
let(:peatio_transaction) do
Peatio::Transaction.new(
currency_id: 'eth',
amount: 12,
from_address: '123',
to_address: '145',
block_number: 1,
status: 'pending'
)
end
let(:gas_factor) { 1 }
let(:native_amount) { 1_000_000 }
let(:contract_amount) { 1_000_000_000_000_000_000 }
let(:contract_address) { Faker::Blockchain::Ethereum.address }
let(:from_address) { Faker::Blockchain::Ethereum.address }
let(:to_address) { Faker::Blockchain::Ethereum.address }
it 'collects tokens first' do
EthereumGateway::TransactionCreator
.any_instance
.stubs(:call)
.with(from_address: from_address,
to_address: to_address,
amount: contract_amount,
secret: nil,
subtract_fee: false,
gas_limit: 1,
gas_factor: gas_factor,
contract_address: contract_address)
.once
.returns(peatio_transaction)
EthereumGateway::TransactionCreator
.any_instance
.stubs(:call)
.with(from_address: from_address,
to_address: to_address,
amount: native_amount,
secret: nil,
gas_factor: 1,
subtract_fee: true,
gas_limit: 2,
contract_address: nil)
.once
.returns(peatio_transaction)
subject.send(:call,
from_address: from_address,
to_address: to_address,
gas_limits: { contract_address => 1, nil => 2 },
amounts: { contract_address => contract_amount, nil => native_amount },
secret: nil)
end
end
| 30.465517 | 88 | 0.609508 |
18572357204090b4c4ee5736bb9f245b0a97192c | 619 | Pod::Spec.new do |s|
s.name = 'PullUpController'
s.version = '0.8.0'
s.summary = 'Pull up controller with multiple sticky points like in iOS Maps.'
s.homepage = 'https://github.com/MarioIannotta/PullUpController'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'Mario Iannotta' => '[email protected]' }
s.source = { :git => 'https://github.com/MarioIannotta/PullUpController.git', :tag => s.version.to_s }
s.ios.deployment_target = '9.0'
s.source_files = 'PullUpController/**/*.swift'
s.swift_version = '5'
end
| 47.615385 | 114 | 0.597738 |
261e2f905e2e10405882fbe66befc57b01d92648 | 261 | class ManageIQ::Providers::StorageManager::CinderManager::EventCatcher < ::MiqEventCatcher
require_nested :Runner
def self.ems_class
ManageIQ::Providers::StorageManager::CinderManager
end
def self.settings_name
:event_catcher_cinder
end
end
| 21.75 | 90 | 0.785441 |
ac5224a07f82606050c901a2c26d25ed550d093c | 817 | cask "cookie" do
version "6.5.2"
sha256 :no_check
url "https://sweetpproductions.com/products/cookieapp/Cookie.dmg"
name "Cookie"
desc "Protection from tracking and online profiling"
homepage "https://sweetpproductions.com/"
livecheck do
url "https://sweetpproductions.com/products/cookieapp/appcast.xml"
strategy :sparkle
end
depends_on macos: ">= :mojave"
app "Cookie.app"
zap trash: [
"~/Library/Application Scripts/com.sweetpproductions.Cookie5",
"~/Library/Containers/com.sweetpproductions.Cookie5",
"~/Library/Preferences/com.sweetpproductions.Cookie5.plist",
"~/Library/Application Scripts/com.sweetpproductions.CookieApp",
"~/Library/Containers/com.sweetpproductions.CookieApp",
"~/Library/Preferences/com.sweetpproductions.CookieApp.plist",
]
end
| 29.178571 | 70 | 0.738066 |
e8cbbde21123b0765b9985558322830abfd1365b | 1,813 | # frozen_string_literal: true
require 'test_helper'
describe Vissen::Parameterized::Value do
subject { TestHelper::ValueMock }
let(:value_mock) { subject.new }
describe '.new' do
it 'accepts an initial value' do
value = subject.new 42
assert_equal 42, value.value
end
it 'defaults to the value stored in DEFAULT' do
assert_equal subject::DEFAULT, value_mock.value
end
it 'marks the value as tainted' do
assert value_mock.tainted?
end
it 'supports boolean false' do
value = subject.new false
assert_equal false, value.value
end
end
describe '#write' do
it 'updates the value' do
value_mock.write 42
assert_same 42, value_mock.value
end
it 'taints untainted values' do
value_mock.untaint!
value_mock.write rand
assert value_mock.tainted?
end
it 'does not taint the value if the same value is written' do
value_mock.untaint!
value_mock.write value_mock.value
refute value_mock.tainted?
end
it 'returns true when the value is changed' do
res = value_mock.write value_mock.value + 1
assert res
end
it 'returns false when the value is unchanged' do
res = value_mock.write value_mock.value
refute res
end
end
describe '#scope' do
it 'returns the global scope' do
assert_same Vissen::Parameterized::GlobalScope.instance, value_mock.scope
end
end
describe '#to_s' do
it 'returns a string representation of the value when tainted' do
value_mock.write 42
assert_equal '42*', value_mock.to_s
end
it 'returns a string representation of the value when untainted' do
value_mock.write 42
value_mock.untaint!
assert_equal '42', value_mock.to_s
end
end
end
| 23.545455 | 79 | 0.676779 |
1dbe4640e29c9f9b18e779a34919018d9920f1cb | 1,578 | module Fog
module Compute
class HPV2
class Real
# List metadata for specific collections
#
# ==== Parameters
# * 'collection_name'<~String> - name of the collection i.e. images, servers for which the metadata is intended.
# * 'parent_id'<~Integer> - id of the collection i.e. image_id or the server_id
#
# ==== Returns
# * response<~Excon::Response>:
# * body<~Hash>:
# * 'metadata'<~Hash>: hash of key/value pair for the metadata items found
#
def list_metadata(collection_name, parent_id)
request(
:expects => [200, 203],
:method => 'GET',
:path => "/#{collection_name}/#{parent_id}/metadata"
)
end
end
class Mock
def list_metadata(collection_name, parent_id)
mdata = {}
if collection_name == "images" then
if get_image_details(parent_id)
mdata = self.data[:images][parent_id]['metadata']
else
raise Fog::Compute::HPV2::NotFound
end
end
if collection_name == "servers" then
if get_server_details(parent_id)
mdata = self.data[:servers][parent_id]['metadata']
else
raise Fog::Compute::HPV2::NotFound
end
end
response = Excon::Response.new
response.status = 200
response.body = {'metadata' => mdata}
response
end
end
end
end
end
| 27.684211 | 120 | 0.524715 |
e9fedaf507802b42681fa7896d6f3888e0b1cbe6 | 813 | RailsAdmin.config do |config|
config.main_app_name = Proc.new { |controller| [ "OneBnb", "Admin - #{controller.params[:action].try(:titleize)}" ] }
## == Devise ==
config.authenticate_with do
warden.authenticate! scope: :user
end
config.current_user_method(&:current_user)
config.authorize_with do
if current_user.kind != "admin"
reset_session
redirect_to '/users/sign_in'
end
end
require Rails.root.join('lib', 'rails_admin', 'rails_admin_pdf.rb')
RailsAdmin::Config::Actions.register(RailsAdmin::Config::Actions::Pdf)
config.excluded_models << "Photo"
config.actions do
dashboard # mandatory
index # mandatory
new
export
bulk_delete
show
edit
delete
show_in_app
pdf
end
end
| 23.228571 | 119 | 0.644526 |
ac80e41b8e14c0f461ba5bf8831c6466a517c41c | 1,327 | # Copyright (c) 2018-2019 VMware, Inc. All Rights Reserved.
# SPDX-License-Identifier: MIT
# DO NOT MODIFY. THIS CODE IS GENERATED. CHANGES WILL BE OVERWRITTEN.
# vapi - vAPI is an extensible API Platform for modelling and delivering APIs/SDKs/CLIs.
require 'spec_helper'
require 'json'
require 'date'
# Unit tests for VSphereAutomation::VAPI::VapiMetadataCliCommandIdentity
# Automatically generated by openapi-generator (https://openapi-generator.tech)
# Please update as you see appropriate
describe 'VapiMetadataCliCommandIdentity' do
before do
# run before each test
@instance = VSphereAutomation::VAPI::VapiMetadataCliCommandIdentity.new
end
after do
# run after each test
end
describe 'test an instance of VapiMetadataCliCommandIdentity' do
it 'should create an instance of VapiMetadataCliCommandIdentity' do
expect(@instance).to be_instance_of(VSphereAutomation::VAPI::VapiMetadataCliCommandIdentity)
end
end
describe 'test attribute "path"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "name"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
end
| 30.159091 | 102 | 0.752826 |
ff911d02d7b555482de228f4ad0e53e41a3444e0 | 1,127 | # frozen_string_literal: true
require "faraday"
require "faraday_middleware"
require "oj" unless defined?(JRUBY_VERSION)
require "faraday_middleware/multi_json"
require "oauth2"
require "yaml"
require "hashie"
require "vkontakte_api/version"
require "vkontakte_api/error"
require "vkontakte_api/execute_error"
require "vkontakte_api/configuration"
require "vkontakte_api/authorization"
require "vkontakte_api/uploading"
require "vkontakte_api/utils"
require "vkontakte_api/api"
require "vkontakte_api/resolver"
require "vkontakte_api/resolvable"
require "vkontakte_api/client"
require "vkontakte_api/namespace"
require "vkontakte_api/method"
require "vkontakte_api/result"
require "vkontakte_api/logger"
# Main module.
module VkontakteApi
extend VkontakteApi::Configuration
extend VkontakteApi::Authorization
extend VkontakteApi::Uploading
class << self
# Creates a short alias `VK` for `VkontakteApi` module.
def register_alias
Object.const_set(:VK, VkontakteApi)
end
# Removes the `VK` alias.
def unregister_alias
Object.send(:remove_const, :VK) if defined?(VK)
end
end
end
| 25.044444 | 59 | 0.786158 |
ac07369b4a5427e640dde43301e18bd1ab0cbc09 | 2,613 | #
# Copyright 2011-2013, Dell
# Copyright 2013-2014, SUSE LINUX Products GmbH
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'spec_helper'
describe DeployQueueController do
render_views
describe "GET index" do
before do
# We don't have Pacemaker at hand and the helper returns false because of
# that, need to stub this
ServiceObject.stubs(:is_cluster?).returns(true)
end
let(:prop) { ProposalObject.find_proposal("database", "default") }
it "is successful" do
get :index
response.should be_success
end
describe "with existing nodes" do
before do
# Simulate the expansion to a node whose look up we can fake
ServiceObject.stubs(:expand_nodes_for_all).returns([["testing.crowbar.com"],[]])
end
it "is successful when a prop with clusters is deployed" do
# Is now deploying
@controller.stubs(:currently_deployed).returns(prop)
get :index
response.should be_success
end
it "is successful when there are clusters in the queue" do
# Is queued
queue_item = { "barclamp" => prop.barclamp, "inst" => prop.name, "elements" => prop.elements, "deps" => [] }
@controller.stubs(:deployment_queue).returns([queue_item])
get :index
response.should be_success
end
end
describe "with non-existing nodes" do
before do
# Cluster referencing a non-existent node (deleted)
ServiceObject.stubs(:expand_nodes_for_all).returns([["I just dont exist"],[]])
end
it "is successful when a prop with clusters is deployed" do
# Is now deploying
@controller.stubs(:currently_deployed).returns(prop)
get :index
response.should be_success
end
it "is successful for clusters in the queue" do
queue_item = { "barclamp" => prop.barclamp, "inst" => prop.name, "elements" => prop.elements, "deps" => [] }
@controller.stubs(:deployment_queue).returns([queue_item])
get :index
response.should be_success
end
end
end
end
| 30.741176 | 116 | 0.669346 |
bb0906123664ddabdb558f9c1b9720b9fafc6a82 | 1,779 | # == Schema Information
#
# Table name: users
#
# id :bigint not null, primary key
# admin :boolean
# current_sign_in_at :datetime
# current_sign_in_ip :inet
# email_bidx :string
# email_ciphertext :string
# encrypted_password :string default(""), not null
# failed_attempts :integer default(0), not null
# last_sign_in_at :datetime
# last_sign_in_ip :inet
# locked_at :datetime
# otp_auth_secret :string
# otp_challenge_expires :datetime
# otp_enabled :boolean default(FALSE), not null
# otp_enabled_on :datetime
# otp_failed_attempts :integer default(0), not null
# otp_mandatory :boolean default(FALSE), not null
# otp_persistence_seed :string
# otp_recovery_counter :integer default(0), not null
# otp_recovery_secret :string
# otp_session_challenge :string
# remember_created_at :datetime
# reset_password_sent_at :datetime
# reset_password_token :string
# sign_in_count :integer default(0), not null
# unlock_token :string
# created_at :datetime not null
# updated_at :datetime not null
# local_group_id :bigint
#
class User < ApplicationRecord
# Include default devise modules. Others available are:
# :confirmable, :lockable, :timeoutable, :trackable and :omniauthable
devise :database_authenticatable, :recoverable,
:lockable, :trackable, :timeoutable
encrypts :email
blind_index :email
belongs_to :local_group, optional: true
# @TODO: Remove this line after dropping email column
self.ignored_columns = ['email']
end
| 35.58 | 71 | 0.634064 |
79021e706a0bedee4d1d239e12bd8d644d94571b | 3,309 | # frozen_string_literal: true
require 'spec_helper'
describe Spree::ProductsController, type: :controller do
let!(:product) { create(:product, available_on: 1.year.from_now) }
let(:taxon) { create(:taxon) }
# Regression test for https://github.com/spree/spree/issues/1390
it 'allows admins to view non-active products' do
allow(controller).to receive_messages try_spree_current_user: mock_model(Spree.user_class, has_spree_role?: true, last_incomplete_spree_order: nil, spree_api_key: 'fake')
get :show, id: product.to_param
expect(response.status).to eq(200)
end
it 'cannot view non-active products' do
expect { get :show, params: { id: product.to_param } }.to raise_error(ActiveRecord::RecordNotFound)
end
it 'should provide the current user to the searcher class' do
user = mock_model(Spree.user_class, last_incomplete_spree_order: nil, spree_api_key: 'fake')
allow(controller).to receive_messages try_spree_current_user: user
expect_any_instance_of(Spree::Config.searcher_class).to receive(:current_user=).with(user)
get :index
expect(response.status).to eq(200)
end
# Regression test for https://github.com/spree/spree/issues/2249
it "doesn't error when given an invalid referer" do
current_user = mock_model(Spree.user_class, has_spree_role?: true, last_incomplete_spree_order: nil, generate_spree_api_key!: nil)
allow(controller).to receive_messages try_spree_current_user: current_user
request.env['HTTP_REFERER'] = 'not|a$url'
# Previously a URI::InvalidURIError exception was being thrown
get :show, params: { id: product.to_param }
end
context 'with history slugs present' do
let!(:product) { create(:product, available_on: 1.day.ago) }
it 'will redirect with a 301 with legacy url used' do
legacy_params = product.to_param
product.name = product.name + ' Brand New'
product.slug = nil
product.save!
get :show, params: { id: legacy_params }
expect(response.status).to eq(301)
end
it 'will redirect with a 301 with id used' do
product.name = product.name + ' Brand New'
product.slug = nil
product.save!
get :show, params: { id: product.id }
expect(response.status).to eq(301)
end
it 'will keep url params on legacy url redirect' do
legacy_params = product.to_param
product.name = product.name + ' Brand New'
product.slug = nil
product.save!
get :show, params: {
id: legacy_params,
taxon_id: taxon.id
}
expect(response.status).to eq(301)
expect(response.header['Location']).to include("taxon_id=#{taxon.id}")
end
end
context 'index products' do
it 'calls includes when the retrieved_products object responds to it' do
searcher = double('Searcher')
allow(controller).to receive_messages build_searcher: searcher
expect(searcher).to receive_message_chain('retrieve_products.includes')
get :index
end
it "does not call includes when it's not available" do
searcher = double('Searcher')
allow(controller).to receive_messages build_searcher: searcher
allow(searcher).to receive(:retrieve_products).and_return([])
get :index
expect(assigns(:products)).to eq([])
end
end
end | 36.362637 | 174 | 0.703234 |
e8b177283bbbe21b8a80de59d824ca32c528aa4a | 4,976 | require_dependency "user_impersonate/application_controller"
module TinderfieldsUserImpersonate
class ImpersonateController < ApplicationController
before_filter :authenticate_the_user, except: ["destroy"]
before_filter :current_user_must_be_staff!, except: ["destroy"]
# Display list of all users, except current (staff) user
# Is this exclusion unnecessary complexity?
# Normal apps wouldn't bother with this action; rather they would
# go straight to GET /impersonate/user/123 (create action)
def index
users_table = Arel::Table.new(user_table.to_sym) # e.g. :users
id_column = users_table[user_id_column.to_sym] # e.g. users_table[:id]
@users = user_class.order("updated_at DESC").
where(
id_column.not_in [
current_staff.send(user_id_column.to_sym) # e.g. current_user.id
])
if params[:search]
@users = @users.where("#{user_name_column} like ?", "%#{params[:search]}%")
end
end
# Perform the user impersonate action
# GET /impersonate/user/123
def create
puts 'create'
@user = find_user(params[:user_id])
puts @user
impersonate(@user)
redirect_on_impersonate(@user)
end
# Revert the user impersonation
# DELETE /impersonation/revert
def destroy
unless current_staff_user
flash[:notice] = "You weren't impersonating anyone"
redirect_on_revert and return
end
user = current_staff
revert_impersonate
if user
flash[:notice] = "No longer impersonating #{user}"
redirect_on_revert(user)
else
flash[:notice] = "No longer impersonating a user"
redirect_on_revert
end
end
private
def current_staff
@current_staff ||= begin
current_staff_method = config_or_default(:current_staff, "current_user").to_sym
send(current_staff_method) if respond_to? current_staff_method
end
end
def current_user_must_be_staff!
unless user_is_staff?(current_staff)
flash[:error] = "You don't have access to this section."
redirect_to :back
end
rescue ActionController::RedirectBackError
redirect_to '/'
end
# current_staff changes from a staff user to
# +new_user+; current user stored in +session[:staff_user_id]+
def impersonate(new_user)
puts 'impersonate'
session[:staff_user_id] = current_staff.id #
sign_in_user new_user
end
# revert the +current_staff+ back to the staff user
# stored in +session[:staff_user_id]+
def revert_impersonate
puts 'revert_impersonate'
puts current_staff_user
puts current_user
puts current_linguist
puts session[:staff_user_id]
return unless current_staff_user
sign_in_user current_staff_user
session[:staff_user_id] = nil
end
def sign_in_user(user)
puts 'sign_in_user'
method = config_or_default :sign_in_user_method, "sign_in"
puts method
puts user
self.send(method.to_sym, user)
end
def authenticate_the_user
method = config_or_default :authenticate_user_method, "authenticate_user!"
self.send(method.to_sym)
end
# Helper to load a User, using all the TinderfieldsUserImpersonate config options
def find_user(id)
user_class.send(user_finder_method, id)
end
# Similar to user.staff?
# Using all the TinderfieldsUserImpersonate config options
def user_is_staff?(user)
current_staff.respond_to?(user_is_staff_method.to_sym) &&
current_staff.send(user_is_staff_method.to_sym)
end
def user_finder_method
(config_or_default :user_finder, "find").to_sym
end
def user_class_name
puts 'user_class_name'
puts config_or_default :user_class, "User"
config_or_default :user_class, "User"
end
def user_class
user_class_name.constantize
end
def user_table
user_class_name.tableize.tr('/', '_')
end
def user_id_column
config_or_default :user_id_column, "id"
end
def user_name_column
config_or_default :user_name_column, "name"
end
def user_is_staff_method
config_or_default :user_is_staff_method, "staff?"
end
def redirect_on_impersonate(impersonated_user)
url = config_or_default :redirect_on_impersonate, root_url
redirect_to url
end
def redirect_on_revert(impersonated_user = nil)
url = config_or_default :redirect_on_revert, root_url
redirect_to url
end
# gets overridden config value for engine, else returns default
def config_or_default(attribute, default)
attribute = attribute.to_sym
if TinderfieldsUserImpersonate::Engine.config.respond_to?(attribute)
TinderfieldsUserImpersonate::Engine.config.send(attribute)
else
default
end
end
end
end
| 29.796407 | 88 | 0.683079 |
1d372f1ee80a6fb1e483033d7d28946ed4858882 | 2,980 | # frozen_string_literal: true
require 'spec_helper'
describe RuboCop::Cop::Style::YodaCondition, :config do
let(:cop_config) { { 'EnforcedStyle' => 'all_comparison_operators' } }
subject(:cop) { described_class.new(config) }
let(:error_message) { 'Reverse the order of the operands `%s`.' }
# needed because of usage of safe navigation operator
let(:ruby_version) { 2.3 }
before { inspect_source(source) }
shared_examples 'accepts' do |code|
let(:source) { code }
it 'does not register an offense' do
expect(cop.offenses).to be_empty
end
end
shared_examples 'offense' do |code|
let(:source) { code }
it "registers an offense for #{code}" do
expect(cop.offenses.size).to eq(1)
expect(cop.offenses.first.message).to(
eq(format(error_message, code))
)
end
end
shared_examples 'autocorrect' do |code, corrected|
let(:source) { code }
it 'autocorrects code' do
expect(autocorrect_source(source)).to eq(corrected)
end
end
it_behaves_like 'accepts', 'b.value == 2'
it_behaves_like 'accepts', 'b&.value == 2'
it_behaves_like 'accepts', '@value == 2'
it_behaves_like 'accepts', '@@value == 2'
it_behaves_like 'accepts', 'b = 1; b == 2'
it_behaves_like 'accepts', '$var == 5'
it_behaves_like 'accepts', 'foo == "bar"'
it_behaves_like 'accepts', 'foo[0] > "bar" || baz != "baz"'
it_behaves_like 'accepts', 'node = last_node.parent'
it_behaves_like 'accepts', '(first_line - second_line) > 0'
it_behaves_like 'accepts', '5 == 6'
it_behaves_like 'accepts', '[1, 2, 3] <=> [4, 5, 6]'
it_behaves_like 'accepts', '!true'
it_behaves_like 'accepts', 'not true'
it_behaves_like 'offense', '"foo" == bar'
it_behaves_like 'offense', 'nil == bar'
it_behaves_like 'offense', 'false == active?'
it_behaves_like 'offense', '15 != @foo'
it_behaves_like 'offense', '42 < bar'
context 'autocorrection' do
it_behaves_like(
'autocorrect', 'if 10 == my_var; end', 'if my_var == 10; end'
)
it_behaves_like(
'autocorrect', 'if 2 < bar;end', 'if bar > 2;end'
)
it_behaves_like(
'autocorrect', 'foo = 42 if 42 > bar', 'foo = 42 if bar < 42'
)
it_behaves_like(
'autocorrect', '42 <= foo ? bar : baz', 'foo >= 42 ? bar : baz'
)
it_behaves_like(
'autocorrect', '42 >= foo ? bar : baz', 'foo <= 42 ? bar : baz'
)
it_behaves_like(
'autocorrect', 'nil != foo ? bar : baz', 'foo != nil ? bar : baz'
)
it_behaves_like(
'autocorrect', 'false === foo ? bar : baz', 'foo === false ? bar : baz'
)
end
context 'with EnforcedStyle: equality_operators_only' do
let(:cop_config) { { 'EnforcedStyle' => 'equality_operators_only' } }
it_behaves_like 'accepts', '42 < bar'
it_behaves_like 'accepts', 'nil >= baz'
it_behaves_like 'accepts', '3 < a && a < 5'
it_behaves_like 'offense', '42 != answer'
it_behaves_like 'offense', 'false == foo'
end
end
| 29.50495 | 77 | 0.62651 |
33620245e7456e830b339ba81afa6bb3012a5a1b | 1,071 | module Spine
module Restrictions
class Registration
attr_reader :restriction, :restrictions, :exceptions
def initialize(restriction)
@restriction = restriction
@exceptions = {}
@restrictions = {}
end
def restrict(action, resource)
add(restrictions, action, resource)
self
end
def except(action, resource)
add(exceptions, action, resource)
self
end
def applies?(action, resource)
return false if exception?(action, resource)
includes?(restrictions, action, resource)
end
private
def exception?(action, resource)
includes?(exceptions, action, resource)
end
def includes?(collection, action, resource)
actions = collection[resource] || collection[:all]
return false unless actions
actions[action] || actions[:all]
end
def add(collection, action, resource)
collection[resource] ||= {}
collection[resource][action] = true
end
end
end
end
| 22.3125 | 58 | 0.611578 |
08b4651b69657a7316b2454f8c1246c9449de4ab | 9,754 | # Ports from Dalli(https://github.com/mperham/dalli/blob/master/test/test_rack_session.rb)
require 'spec_helper'
describe Rack::Session::Memcached do
let(:incrementor_proc) {
->(env) {
env['rack.session']['counter'] ||= 0
env['rack.session']['counter'] += 1
Rack::Response.new(env['rack.session'].inspect).to_a
}
}
[:drop, :renew, :defer, :skip].each do |fn|
let("#{fn.to_s}_session".to_sym) {
Rack::Lint.new ->(env) {
env['rack.session.options'][fn] = true
incrementor_proc.call(env)
}
}
end
let(:incrementor) { Rack::Lint.new incrementor_proc }
let(:req) { Rack::MockRequest.new(Rack::Session::Memcached.new(incrementor)) }
let(:session_match) { /#{session_key}=([0-9a-fA-F]+);/ }
let(:session_key) { Rack::Session::Memcached::DEFAULT_OPTIONS[:key] }
it 'connects to existing server' do
expect{
session = Rack::Session::Memcached.new(incrementor, namespace: 'test:rack:session')
session.pool.set('ping', '')
}.not_to raise_error
end
it 'passes options to Mamcached' do
session = Rack::Session::Memcached.new(incrementor, namespace: 'test:rack:session')
expect(session.pool.instance_eval{@options[:prefix_key]}).to eq 'test:rack:session'
end
it 'creates a new cookie' do
res = req.get('/')
expect(res["Set-Cookie"]).to include("#{session_key}=")
expect(res.body).to eq '{"counter"=>1}'
end
it 'determins session from cookie' do
res = req.get('/')
cookie= res['Set-Cookie']
expect(req.get('/', 'HTTP_COOKIE' => cookie).body).to eq '{"counter"=>2}'
expect(req.get('/', 'HTTP_COOKIE' => cookie).body).to eq '{"counter"=>3}'
end
it 'determins session only from a cookie by default' do
res = req.get('/')
sid = res['Set-Cookie'][session_match, 1]
expect(req.get("/?rack.session=#{sid}").body).to eq '{"counter"=>1}'
expect(req.get("/?rack.session=#{sid}").body).to eq '{"counter"=>1}'
end
it 'determins session from params' do
req = Rack::MockRequest.new(
Rack::Session::Memcached.new(incrementor, cookie_only: false)
)
res = req.get('/')
sid = res['Set-Cookie'][session_match, 1]
expect(req.get("/?rack.session=#{sid}").body).to eq '{"counter"=>2}'
expect(req.get("/?rack.session=#{sid}").body).to eq '{"counter"=>3}'
end
it 'survives nonexistant cookies' do
bad_cookie = 'rack.session=blahblahblah'
res = req.get('/', 'HTTP_COOKIE' => bad_cookie)
expect(res.body).to eq '{"counter"=>1}'
expect(res['Set-Cookie'][session_match]).not_to match /#{bad_cookie}/
end
it 'survives nonexistant blank cookies' do
bad_cookie = 'rack.session='
res = req.get('/', 'HTTP_COOKIE' => bad_cookie)
expect(res.body).to eq '{"counter"=>1}'
expect(res['Set-Cookie'][session_match]).not_to match /#{bad_cookie}$/
end
it 'maintains freshness' do
req = Rack::MockRequest.new(
Rack::Session::Memcached.new(incrementor, expire_after: 3)
)
res = req.get('/')
expect(res.body).to eq '{"counter"=>1}'
cookie = res['Set-Cookie']
res = req.get('/', 'HTTP_COOKIE' => cookie)
expect(res.body).to eq '{"counter"=>2}'
sleep 4
res = req.get('/', 'HTTP_COOKIE' => cookie)
expect(res.body).to eq '{"counter"=>1}'
end
it 'does not send the same session id if it did not change' do
res0 = req.get('/')
cookie = res0['Set-Cookie'][session_match]
expect(res0.body).to eq '{"counter"=>1}'
res1 = req.get('/', 'HTTP_COOKIE' => cookie)
expect(res1['Set-Cookie']).to be nil
expect(res1.body).to eq '{"counter"=>2}'
res2 = req.get('/', 'HTTP_COOKIE' => cookie)
expect(res2['Set-Cookie']).to be nil
expect(res2.body).to eq '{"counter"=>3}'
end
it 'deletes cookies with :drop option' do
rsm = Rack::Session::Memcached.new(incrementor)
req = Rack::MockRequest.new(rsm)
drop = Rack::Utils::Context.new(rsm, drop_session)
dreq = Rack::MockRequest.new(drop)
res1 = req.get('/')
session = (cookie = res1['Set-Cookie'])[session_match]
expect(res1.body).to eq '{"counter"=>1}'
res2 = dreq.get('/', 'HTTP_COOKIE' => cookie)
expect(res2['Set-Cookie']).to be nil
expect(res2.body).to eq '{"counter"=>2}'
res3 = req.get('/', 'HTTP_COOKIE' => cookie)
expect(res3['Set-Cookie'][session_match]).not_to eq session
expect(res3.body).to eq '{"counter"=>1}'
end
it 'provides new session id with :renew option' do
rsm = Rack::Session::Memcached.new(incrementor)
req = Rack::MockRequest.new(rsm)
renew = Rack::Utils::Context.new(rsm, renew_session)
rreq = Rack::MockRequest.new(renew)
res1 = req.get('/')
session = (cookie = res1['Set-Cookie'])[session_match]
expect(res1.body).to eq '{"counter"=>1}'
res2 = rreq.get('/', 'HTTP_COOKIE' => cookie)
new_cookie = res2['Set-Cookie']
new_session = new_cookie[session_match]
expect(new_session).not_to eq session
expect(res2.body).to eq '{"counter"=>2}'
res3 = req.get('/', 'HTTP_COOKIE' => new_cookie)
expect(res3.body).to eq '{"counter"=>3}'
# Old cookie was deleted
res4 = req.get('/', 'HTTP_COOKIE' => cookie)
expect(res4.body).to eq '{"counter"=>1}'
end
it 'omits cookie with :defer option but still updates the state' do
rsm = Rack::Session::Memcached.new(incrementor)
count = Rack::Utils::Context.new(rsm, incrementor)
defer = Rack::Utils::Context.new(rsm, defer_session)
dreq = Rack::MockRequest.new(defer)
creq = Rack::MockRequest.new(count)
res0 = dreq.get('/')
expect(res0['Set-Cookie']).to be nil
expect(res0.body).to eq '{"counter"=>1}'
res0 = creq.get('/')
res1 = dreq.get('/', 'HTTP_COOKIE' => res0['Set-Cookie'])
expect(res1.body).to eq '{"counter"=>2}'
res2 = dreq.get('/', 'HTTP_COOKIE' => res0['Set-Cookie'])
expect(res2.body).to eq '{"counter"=>3}'
end
it 'omits cookie and state update with :skip option' do
rsm = Rack::Session::Memcached.new(incrementor)
count = Rack::Utils::Context.new(rsm, incrementor)
skip = Rack::Utils::Context.new(rsm, skip_session)
sreq = Rack::MockRequest.new(skip)
creq = Rack::MockRequest.new(count)
res0 = sreq.get('/')
expect(res0['Set-Cookie']).to be nil
expect(res0.body).to eq '{"counter"=>1}'
res0 = creq.get('/')
res1 = sreq.get('/', 'HTTP_COOKIE' => res0['Set-Cookie'])
expect(res1.body).to eq '{"counter"=>2}'
res2 = sreq.get('/', 'HTTP_COOKIE' => res0['Set-Cookie'])
expect(res2.body).to eq '{"counter"=>2}'
end
it 'updates deep hashes correctly' do
hash_check = ->(env) {
session = env['rack.session']
unless session.include? 'test'
session.update a: :b, c: {d: :e}, f: {g: {h: :i}}, 'test' => true
else
session[:f][:g][:h] = :j
end
[200, {}, [session.inspect]]
}
rsm = Rack::Session::Memcached.new(hash_check)
req = Rack::MockRequest.new(rsm)
res0 = req.get('/')
session_id = (cookie = res0['Set-Cookie'])[session_match, 1]
ses0 = rsm.safe_get(session_id, true)
req.get('/', 'HTTP_COOKIE' => cookie)
ses1 = rsm.safe_get(session_id, true)
expect(ses1).not_to eq ses0
end
# on Dalli, this test is incoherent.
it 'cleanly merges sessions when multithreaded', multithread: true do
rsm = Rack::Session::Memcached.new(incrementor)
req = Rack::MockRequest.new(rsm)
res = req.get('/')
expect(res.body).to eq '{"counter"=>1}'
cookie = res['Set-Cookie']
session_id = cookie[session_match, 1]
#delta_incrementor = ->(env) {
# env['rack.session'] = env['rack.session'].dup
# Thread.stop
# env['rack.session'][(Time.now.usec * rand).to_i] = true
# incrementor.call(env)
#}
#tnum = rand(7).to_i + 5
#r = Array.new(tnum) do
# t = Thread.new do
# tses = Rack::Utils::Context.new(rsm.clone, delta_incrementor)
# treq = Rack::MockRequest.new(tses)
# treq.get('/', 'HTTP_COOKIE' => cookie, 'rack.multithread' => true)
# end
# p t #dummy output
# t
## FIXME: sometime failed on this wakeup. why??
#end.reverse.map{|t| t.wakeup.join.value}
#r.each.with_index(2) do |res, i|
# expect(res.body).to include "\"counter\"=>#{i}"
#end
#session = rsm.safe_get(session_id)
#expect(session.size).to be (tnum + 1)
#expect(session['counter']).to be (tnum + 1)
start_at = Time.now
time_delta = ->(env) {
delta = Time.now - start_at
env['rack.session']['time_delta'] = delta
[200, {'Content-Type' => 'text/plain'}, delta.to_s]
}
tnum = rand(7).to_i + 5
r = Array.new(tnum) do |i|
app = Rack::Utils::Context.new(rsm, time_delta)
req = Rack::MockRequest.new(app)
Thread.new(req) do |run|
run.get('/', 'HTTP_COOKIE' => cookie, 'rack.multithread' => true)
end.join.value
end.reverse
r.each do |res|
expect(res.body.to_i).to be >= 0
end
session = rsm.safe_get(session_id)
expect(session['time_delta']).to be >= 0
drop_counter = ->(env) {
env['rack.session'].delete('counter')
env['rack.session']['foo'] = 'bar'
[200, {'Content-Type' => 'text/plain'}, env['rack.session'].inspect]
}
tses = Rack::Utils::Context.new(rsm, drop_counter)
treq = Rack::MockRequest.new(tses)
tnum = rand(7).to_i + 5
r = Array.new(tnum) do |i|
Thread.new(treq) do |run|
run.get('/', 'HTTP_COOKIE' => cookie, 'rack.multithread' => true)
end.run.join.value
end.reverse
r.each do |res|
expect(res.body).to include '"foo"=>"bar"'
end
session = rsm.safe_get(session_id)
expect(session['counter']).to be nil
expect(session['foo']).to eq 'bar'
end
end
| 33.176871 | 90 | 0.611852 |
1d314c59f9666e6ce43fb40c34a3294db2a2f707 | 2,640 | #-- encoding: UTF-8
#-- copyright
# OpenProject is an open source project management software.
# Copyright (C) 2012-2020 the OpenProject GmbH
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License version 3.
#
# OpenProject is a fork of ChiliProject, which is a fork of Redmine. The copyright follows:
# Copyright (C) 2006-2017 Jean-Philippe Lang
# Copyright (C) 2010-2013 the ChiliProject Team
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# See docs/COPYRIGHT.rdoc for more details.
#++
module ProjectSettingsHelper
extend self
def project_settings_tabs
[
{
name: 'generic',
action: { controller: '/project_settings/generic', action: 'show' },
label: :label_information_plural
},
{
name: 'modules',
action: { controller: '/project_settings/modules', action: 'show' },
label: :label_module_plural
},
{
name: 'types',
action: { controller: '/project_settings/types', action: 'show' },
label: :label_work_package_types
},
{
name: 'custom_fields',
action: { controller: '/project_settings/custom_fields', action: 'show' },
label: :label_custom_field_plural
},
{
name: 'versions',
action: { controller: '/project_settings/versions', action: 'show' },
label: :label_version_plural
},
{
name: 'categories',
action: { controller: '/project_settings/categories', action: 'show' },
label: :label_work_package_category_plural,
last: true
},
{
name: 'repository',
action: { controller: '/project_settings/repository', action: 'show' },
label: :label_repository
},
{
name: 'activities',
action: { controller: '/project_settings/activities', action: 'show' },
label: :enumeration_activities
}
]
end
end
| 33.417722 | 91 | 0.65947 |
915cdfb4931835f1edfe0ec1a3129fadf837ad0e | 4,834 | ###
### Code originated from Julien Sobrier's excellent safebrowsing client:
### https://github.com/juliensobrier/google-safe-browsing-lookup-ruby
###
# Author:: Julien Sobrier (mailto:[email protected])
# Copyright:: Copyright (c) 2015 Julien Sobrier
# License:: Distributes under the same terms as Ruby
require 'uri'
require 'net/https'
module Intrigue
module Client
module Search
module Google
class SafebrowsingLookup
# API key
attr_reader :key
# Enable debug & error output to the standard output
attr_reader :debug
# Enable error output to the standard output
attr_reader :error
# Contain last error
attr_reader :last_error
# Library version
attr_reader :version
# Google API version
attr_reader :api_version
# New client
#
# +key+:: API key
# +debug+:: Set to true to print debug & error output to the standard output. false (disabled) by default.
# +error+:: Set to true to print error output to the standard output. false (disabled) by default.
def initialize(key='', debug=false, error=false)
@key = key || ''
@debug = debug || false
@error = error || false
@last_error = ''
@version = '0.2'
@api_version = '3.1'
raise ArgumentError, "Missing API key" if (@key == '')
end
# Lookup a list of URLs against the Google Safe Browsing v2 lists.
#
# Returns a hash <url>: <Gooogle match>. The possible values for <Gooogle match> are: "ok" (no match), "malware", "phishing", "malware,phishing" (match both lists) and "error".
#
# +urls+:: List of URLs to lookup. The Lookup API allows only 10,000 URL checks a day. If you need more, find a Ruby implementation of the full Google Safe Browsing v2 API. Each requests must contain 500 URLs at most. The lookup() method will split the list of URLS in blocks of 500 URLs if needed.
def lookup(urls='')
if (urls.respond_to?('each') == false)
urls = Array.new(1, urls)
end
# urls_copy = Array.new(urls)
results = { }
# while (urls_copy.length > 0)
# inputs = urls_copy.slice!(0, 500)
count = 0
while (count * 500 < urls.length)
inputs = urls.slice(count * 500, 500)
body = inputs.length.to_s
inputs.each do |url|
puts "URL: #{url}"
puts "CANONICAL #{canonical(url)}"
body = body + "\n" + canonical(url)
end
debug("BODY:\n#{body}\n\n")
uri = URI.parse("https://sb-ssl.google.com/safebrowsing/api/lookup?client=ruby&key=#{@key}&appver=#{@version}&pver=#{@api_version}")
http = Net::HTTP.new(uri.host, uri.port)
http.open_timeout = 30
http.read_timeout = 30
http.use_ssl = true
http.verify_mode = OpenSSL::SSL::VERIFY_NONE
response = http.request_post("#{uri.path}?#{uri.query}", body)
case response
when Net::HTTPOK # 200
debug("At least 1 match\n")
results.merge!( parse(inputs, response.body) )
when Net::HTTPNoContent # 204
debug("No match\n")
results.merge!( ok(inputs) )
when Net::HTTPBadRequest # 400
error("Invalid request")
results.merge( errors(inputs) )
when Net::HTTPUnauthorized # 401
error("Invalid API key")
results.merge!( errors(inputs) )
when Net::HTTPServiceUnavailable # 503
error("Server error, client may have sent too many requests")
results.merge!( errors(inputs) )
else
self.error("Unexpected server response: #{response.code}")
results.merge!( errors(inputs) )
end
count = count + 1
end
return results
end
private
# Not much is actually done, full URL canonicalization is not required with the Lookup library according to the API documentation
def canonical(url='')
# remove leading/ending white spaces
url.strip!
# make sure whe have a scheme
if (url !~ /^https?\:\/\//i)
url = "http://#{url}"
end
uri = URI.parse(url)
return uri.to_s
end
def parse(urls=[], response)
lines = response.split("\n")
if (urls.length != lines.length)
error("Number of URLs in the reponse does not match the number of URLs in the request")
debug("#{urls.length} / #{lines.length}")
debug(response);
return errors(urls);
end
results = { }
for i in (0..lines.length - 1)
results[urls[i]] = lines[i]
debug(urls[i] + " => " + lines[i])
end
return results
end
def errors(urls=[])
return Hash[*urls.map {|url| [url, 'error']}.flatten]
end
def ok(urls=[])
return Hash[*urls.map {|url| [url, 'ok']}.flatten]
end
def debug(message='')
puts message if (@debug == true)
end
def error(message='')
puts "#{message}\n" if (@debug == true or @error == true)
@last_error = message
end
end
end
end
end
end | 26.707182 | 300 | 0.629293 |
6ab32d8bec32aafe8929e397a712ca2671c26822 | 349 | class CreateSpreeReturnAuthorizationInventoryUnit < ActiveRecord::Migration
def change
create_table :spree_return_authorization_inventory_units do |t|
t.integer :return_authorization_id
t.integer :inventory_unit_id
t.integer :exchange_variant_id
t.datetime :received_at
t.timestamps null: true
end
end
end
| 26.846154 | 75 | 0.762178 |
d55bf5c54e53ba777e28a5807e4b4d97ee26a62a | 44 | module DanarchyJaml
VERSION = "0.1.0"
end
| 11 | 19 | 0.704545 |
91a3f882b0dbbb0b9f4697cc6a01bff491ae6f26 | 211 | require "hotel_beds/model"
module HotelBeds
module Model
class Destination
include HotelBeds::Model
# attributes
attribute :code, String
attribute :name, String
end
end
end
| 15.071429 | 30 | 0.672986 |
18531c58eaf2184f4792226dd0f10518c0d4f7d4 | 7,123 | require 'spec_helper'
describe Pubnub::Subscribe do
around(:each) do |example|
@response_output = StringIO.new
@message_output = StringIO.new
@callback = lambda { |envelope|
Pubnub.logger.debug 'FIRING CALLBACK FROM TEST'
@response_output.write envelope.response
@message_output.write envelope.msg
@after_callback = true
}
@pn = Pubnub.new(:max_retries => 0, :subscribe_key => :ds, :publish_key => :ds, :secret_key => 'some_secret_key')
@pn.uuid = 'rubytests'
Celluloid.boot
example.run
Celluloid.shutdown
end
context 'https' do
before :each do
@ssl = true
end
it 'receives message sent to foo.bar when subscribed on foo.*' do
VCR.use_cassette("wc-sub-ssl-#{@ssl}-1", :record => :once) do
@pn.subscribe(:ssl => @ssl, :channel => 'foo.*', :callback => @callback, http_sync: true)
event = @pn.subscribe(:ssl => @ssl, :channel => 'foo.*', :callback => @callback)
expect(event.value.map { |e| e.response }).to eq ["[[{\"text\":\"hey\"}],\"14376641318913945\",\"foo.*\",\"foo.foo\"]"]
expect(event.value.map { |e| e.message }).to eq [{"text"=>"hey"}]
expect(@after_callback).to eq true
end
end
it 'is able to subscribe on foo.* and receive presence events on foo.bar-pnpress when presence callback is provided.' do
VCR.use_cassette("wc-sub-ssl-#{@ssl}-2", :record => :once) do
@pn.subscribe(:ssl => @ssl, :channel => 'foo.*', :callback => @callback, :presence_callback => @callback, http_sync: true)
event = @pn.subscribe(:ssl => @ssl, :channel => 'foo.*', :callback => @callback, :presence_callback => @callback)
expect(event.value.map { |e| e.response }).to eq ["[[{\"action\": \"leave\", \"timestamp\": 1437664166, \"uuid\": \"c7769435-68b3-48b0-9065-08cafce285df\", \"occupancy\": 0}],\"14376641662543427\",\"foo.*\",\"foo.foo-pnpres\"]"]
expect(event.value.map { |e| e.message }).to eq [{"action"=>"leave", "timestamp"=>1437664166, "uuid"=>"c7769435-68b3-48b0-9065-08cafce285df", "occupancy"=>0}]
expect(@after_callback).to eq true
end
end
it 'does not receive presence events when subscribed to foo.* when presence callback is not provided.' do
VCR.use_cassette("wc-sub-ssl-#{@ssl}-3", :record => :once) do
@pn.subscribe(:ssl => @ssl, :channel => 'foo.*', :callback => @callback, http_sync: true)
event = @pn.subscribe(:ssl => @ssl, :channel => 'foo.*', :callback => @callback)
# expect(event.value.map { |e| e.response }).to eq []
# expect(event.value.map { |e| e.message }).to eq []
event.value
expect(@after_callback).to eq nil
end
end
it 'is able to bo subscribed to non-WC channel, channel group and wildcard channel at same time using multiplexing and should receive messages appropriately when message is published on corresponding channel' do
VCR.use_cassette("wc-sub-ssl-#{@ssl}-4", :record => :once) do
@pn.subscribe(:ssl => @ssl, :channel => 'foo.*,foo', :group => 'group', :callback => @callback, :presence_callback => @callback)
sleep(0.3)
@response_output.seek 0
@message_output.seek 0
expect(@response_output.read).to eq "[[{\"text\":\"hey\"}],\"14376642242988715\",\"foo.*\",\"foo.foo\"][[{\"action\": \"leave\", \"timestamp\": 1437664227, \"uuid\": \"c7769435-68b3-48b0-9065-08cafce285df\", \"occupancy\": 0}],\"14376642278720422\",\"foo.*\",\"foo.foo-pnpres\"][[{\"text\":\"hey\"}],\"14376642302336303\",\"foo\",\"foo\"]"
expect(@message_output.read).to eq "{\"text\"=>\"hey\"}{\"action\"=>\"leave\", \"timestamp\"=>1437664227, \"uuid\"=>\"c7769435-68b3-48b0-9065-08cafce285df\", \"occupancy\"=>0}{\"text\"=>\"hey\"}"
expect(@after_callback).to eq true
end
end
end
context 'https' do
before :each do
@ssl = false
end
it 'receives message sent to foo.bar when subscribed on foo.*' do
VCR.use_cassette("wc-sub-ssl-#{@ssl}-1", :record => :once) do
@pn.subscribe(:ssl => @ssl, :channel => 'foo.*', :callback => @callback, http_sync: true)
event = @pn.subscribe(:ssl => @ssl, :channel => 'foo.*', :callback => @callback)
expect(event.value.map { |e| e.response }).to eq ["[[{\"text\":\"hey\"}],\"14376641318913945\",\"foo.*\",\"foo.foo\"]"]
expect(event.value.map { |e| e.message }).to eq [{"text"=>"hey"}]
expect(@after_callback).to eq true
end
end
it 'is able to subscribe on foo.* and receive presence events on foo.bar-pnpress when presence callback is provided.' do
VCR.use_cassette("wc-sub-ssl-#{@ssl}-2", :record => :once) do
@pn.subscribe(:ssl => @ssl, :channel => 'foo.*', :callback => @callback, :presence_callback => @callback, http_sync: true)
event = @pn.subscribe(:ssl => @ssl, :channel => 'foo.*', :callback => @callback, :presence_callback => @callback)
expect(event.value.map { |e| e.response }).to eq ["[[{\"action\": \"leave\", \"timestamp\": 1437664166, \"uuid\": \"c7769435-68b3-48b0-9065-08cafce285df\", \"occupancy\": 0}],\"14376641662543427\",\"foo.*\",\"foo.foo-pnpres\"]"]
expect(event.value.map { |e| e.message }).to eq [{"action"=>"leave", "timestamp"=>1437664166, "uuid"=>"c7769435-68b3-48b0-9065-08cafce285df", "occupancy"=>0}]
expect(@after_callback).to eq true
end
end
it 'does not receive presence events when subscribed to foo.* when presence callback is not provided.' do
VCR.use_cassette("wc-sub-ssl-#{@ssl}-3", :record => :once) do
@pn.subscribe(:ssl => @ssl, :channel => 'foo.*', :callback => @callback, http_sync: true)
event = @pn.subscribe(:ssl => @ssl, :channel => 'foo.*', :callback => @callback)
# expect(event.value.map { |e| e.response }).to eq []
# expect(event.value.map { |e| e.message }).to eq []
event.value
expect(@after_callback).to eq nil
end
end
it 'is able to bo subscribed to non-WC channel, channel group and wildcard channel at same time using multiplexing and should receive messages appropriately when message is published on corresponding channel' do
VCR.use_cassette("wc-sub-ssl-#{@ssl}-4", :record => :once) do
@pn.subscribe(:ssl => @ssl, :channel => 'foo.*,foo', :group => 'group', :callback => @callback, :presence_callback => @callback)
sleep(0.3)
@response_output.seek 0
@message_output.seek 0
expect(@response_output.read).to eq "[[{\"text\":\"hey\"}],\"14376642242988715\",\"foo.*\",\"foo.foo\"][[{\"action\": \"leave\", \"timestamp\": 1437664227, \"uuid\": \"c7769435-68b3-48b0-9065-08cafce285df\", \"occupancy\": 0}],\"14376642278720422\",\"foo.*\",\"foo.foo-pnpres\"][[{\"text\":\"hey\"}],\"14376642302336303\",\"foo\",\"foo\"]"
expect(@message_output.read).to eq "{\"text\"=>\"hey\"}{\"action\"=>\"leave\", \"timestamp\"=>1437664227, \"uuid\"=>\"c7769435-68b3-48b0-9065-08cafce285df\", \"occupancy\"=>0}{\"text\"=>\"hey\"}"
expect(@after_callback).to eq true
end
end
end
end | 51.244604 | 347 | 0.609013 |
d518f3a8961eff669766b5940cbb5443c9ceb577 | 271 | require Rails.root.join('features', 'steps', 'groups')
class Spinach::Features::Groups < Spinach::FeatureSteps
# EE only step
step 'I go to "Audit Events"' do
page.within '.sidebar-wrapper' do
find(:link, 'Audit Events').trigger('click')
end
end
end
| 24.636364 | 55 | 0.671587 |
ed9c910be3103fd4a8ec5028b0820a2c8746b8da | 8,502 | =begin
PureCloud Platform API
With the PureCloud Platform API, you can control all aspects of your PureCloud environment. With the APIs you can access the system configuration, manage conversations and more.
OpenAPI spec version: v2
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
License: UNLICENSED
https://help.mypurecloud.com/articles/terms-and-conditions/
Terms of Service: https://help.mypurecloud.com/articles/terms-and-conditions/
=end
require 'date'
module PureCloud
# Schedule Adherence Configuration
class AdherenceSettings
# The threshold in minutes where an alert will be triggered when an agent is considered severely out of adherence
attr_accessor :severe_alert_threshold_minutes
# Target adherence percentage
attr_accessor :adherence_target_percent
# The threshold in seconds for which agents should not be penalized for being momentarily out of adherence
attr_accessor :adherence_exception_threshold_seconds
# Whether to treat all non-on-queue activities as equivalent for adherence purposes
attr_accessor :non_on_queue_activities_equivalent
# Whether to track on-queue activities
attr_accessor :track_on_queue_activity
# Activity categories that should be ignored for adherence purposes
attr_accessor :ignored_activity_categories
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'severe_alert_threshold_minutes' => :'severeAlertThresholdMinutes',
:'adherence_target_percent' => :'adherenceTargetPercent',
:'adherence_exception_threshold_seconds' => :'adherenceExceptionThresholdSeconds',
:'non_on_queue_activities_equivalent' => :'nonOnQueueActivitiesEquivalent',
:'track_on_queue_activity' => :'trackOnQueueActivity',
:'ignored_activity_categories' => :'ignoredActivityCategories'
}
end
# Attribute type mapping.
def self.swagger_types
{
:'severe_alert_threshold_minutes' => :'Integer',
:'adherence_target_percent' => :'Integer',
:'adherence_exception_threshold_seconds' => :'Integer',
:'non_on_queue_activities_equivalent' => :'BOOLEAN',
:'track_on_queue_activity' => :'BOOLEAN',
:'ignored_activity_categories' => :'IgnoredActivityCategories'
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}){|(k,v), h| h[k.to_sym] = v}
if attributes.has_key?(:'severeAlertThresholdMinutes')
self.severe_alert_threshold_minutes = attributes[:'severeAlertThresholdMinutes']
end
if attributes.has_key?(:'adherenceTargetPercent')
self.adherence_target_percent = attributes[:'adherenceTargetPercent']
end
if attributes.has_key?(:'adherenceExceptionThresholdSeconds')
self.adherence_exception_threshold_seconds = attributes[:'adherenceExceptionThresholdSeconds']
end
if attributes.has_key?(:'nonOnQueueActivitiesEquivalent')
self.non_on_queue_activities_equivalent = attributes[:'nonOnQueueActivitiesEquivalent']
end
if attributes.has_key?(:'trackOnQueueActivity')
self.track_on_queue_activity = attributes[:'trackOnQueueActivity']
end
if attributes.has_key?(:'ignoredActivityCategories')
self.ignored_activity_categories = attributes[:'ignoredActivityCategories']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properies with the reasons
def list_invalid_properties
invalid_properties = Array.new
return invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
severe_alert_threshold_minutes == o.severe_alert_threshold_minutes &&
adherence_target_percent == o.adherence_target_percent &&
adherence_exception_threshold_seconds == o.adherence_exception_threshold_seconds &&
non_on_queue_activities_equivalent == o.non_on_queue_activities_equivalent &&
track_on_queue_activity == o.track_on_queue_activity &&
ignored_activity_categories == o.ignored_activity_categories
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[severe_alert_threshold_minutes, adherence_target_percent, adherence_exception_threshold_seconds, non_on_queue_activities_equivalent, track_on_queue_activity, ignored_activity_categories].hash
end
# build the object from hash
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /^Array<(.*)>/i
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map{ |v| _deserialize($1, v) } )
else
#TODO show warning in debug mode
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
else
# data not found in attributes(hash), not an issue as the data can be optional
end
end
self
end
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BOOLEAN
if value.to_s =~ /^(true|t|yes|y|1)$/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
_model = Object.const_get("PureCloud").const_get(type).new
_model.build_from_hash(value)
end
end
def to_s
to_hash.to_s
end
# to_body is an alias to to_body (backward compatibility))
def to_body
to_hash
end
# return the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
hash[param] = _to_hash(value)
end
hash
end
# Method to output non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
def _to_hash(value)
if value.is_a?(Array)
value.compact.map{ |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 25.45509 | 198 | 0.620677 |
28631608edd99d57818b8169240f5d10d98137aa | 1,060 | require 'rails_helper'
RSpec.describe Admin::BannersController do
before { signin_user }
describe 'already had a banner' do
let(:banner) { create :banner }
it 'GET /admin/banners' do
get '/admin/banners'
expect(response).to be_success
end
it 'GET /admin/banners/new' do
get '/admin/banners/new'
expect(response).to be_success
end
it 'GET /admin/banners/123/edit' do
get "/admin/banners/#{banner.id}/edit"
expect(response).to be_success
end
it 'PUT /admin/banners/123' do
expect {
put "/admin/banners/#{banner.id}", admin_banner: { weight: 2 }
}.to change { banner.reload.weight }.to(2)
expect(response).to be_redirect
end
it 'DELETE /admin/banners/123' do
delete "/admin/banners/#{banner.id}"
expect(Banner.count).to be_zero
end
end
it 'POST /admin/banners' do
expect {
post '/admin/banners', admin_banner: attributes_for(:banner)
}.to change { Banner.count }.by(1)
expect(response).to be_redirect
end
end
| 24.090909 | 70 | 0.637736 |
ac6e549ac181fde992a0e7c010ab7d70667dedcc | 722 | ENV["RAILS_ENV"] = "test"
require "rspec"
require "byebug"
require File.expand_path("../../spec/dummy/config/environment.rb", __FILE__)
ActiveRecord::Migrator.migrations_paths = [File.expand_path("../../spec/dummy/db/migrate", __FILE__)]
# Load support files
Dir[File.join(File.dirname(__FILE__), 'support', '**', '*.rb')].each { |f| require f }
# Load fixtures from the engine
if ActiveSupport::TestCase.respond_to?(:fixture_path=)
ActiveSupport::TestCase.fixture_path = File.expand_path("../fixtures", __FILE__)
ActiveSupport::TestCase.fixtures :all
end
require 'securerandom'
require 'stringio'
require 'paul_bunyan'
RSpec.configure do |config|
config.color = true
config.include NotificationHelpers
end
| 27.769231 | 101 | 0.743767 |
035cf5bec5886e7e301eab7cee84a323a750878f | 1,176 | module RuboCop
module Cop
module Airbnb
# Cop to tell developers to use :class => "MyClass" instead of :class => MyClass,
# because the latter slows down reloading zeus.
class FactoryClassUseString < Cop
MSG = 'Instead of :class => MyClass, use :class => "MyClass". ' \
"This enables faster spec startup time and faster Zeus reload time.".freeze
def on_send(node)
return unless node.command?(:factory)
class_pair = class_node(node)
if class_pair && !string_class_name?(class_pair)
add_offense(class_pair)
end
end
private
# Return the descendant node that is a hash pair (:key => value) whose key
# is :class.
def class_node(node)
node.descendants.detect do |e|
e.is_a?(Parser::AST::Node) &&
e.pair_type? &&
e.children[0].children[0] == :class
end
end
# Given a hash pair :class_name => value, is the value a hardcoded string?
def string_class_name?(class_pair)
class_pair.children[1].str_type?
end
end
end
end
end
| 29.4 | 87 | 0.583333 |
1a61494130580ecd45d696c976d3625fb1b7dcc3 | 840 | # frozen_string_literal: true
RSpec.describe "bundle package" do
before do
gemfile <<-G
source "#{file_uri_for(gem_repo1)}"
gem "rack"
G
end
context "with --cache-path" do
it "caches gems at given path" do
bundle :package, "cache-path" => "vendor/cache-foo"
expect(bundled_app("vendor/cache-foo/rack-1.0.0.gem")).to exist
end
end
context "with config cache_path" do
it "caches gems at given path" do
bundle "config set cache_path vendor/cache-foo"
bundle :package
expect(bundled_app("vendor/cache-foo/rack-1.0.0.gem")).to exist
end
end
context "with absolute --cache-path" do
it "caches gems at given path" do
bundle :package, "cache-path" => "/tmp/cache-foo"
expect(bundled_app("/tmp/cache-foo/rack-1.0.0.gem")).to exist
end
end
end
| 25.454545 | 69 | 0.65 |
62df7ea79fe1bdb5292cec3ccc55ba1992789e85 | 13,935 | require "inspec/utils/deprecation"
# For backwards compatibility during the rename (see #3802),
# maintain the Inspec::Attribute namespace for people checking for
# Inspec::Attribute::DEFAULT_ATTRIBUTE
module Inspec
class Attribute
# This only exists to create the Inspec::Attribute::DEFAULT_ATTRIBUTE symbol with a class
class DEFAULT_ATTRIBUTE; end # rubocop: disable Naming/ClassAndModuleCamelCase
end
end
module Inspec
class Input
class Error < Inspec::Error; end
class ValidationError < Error
attr_accessor :input_name
attr_accessor :input_value
attr_accessor :input_type
end
class TypeError < Error
attr_accessor :input_type
end
class RequiredError < Error
attr_accessor :input_name
end
#===========================================================================#
# Class Input::Event
#===========================================================================#
# TODO: break this out to its own file under inspec/input?
# Information about how the input obtained its value.
# Each time it changes, an Input::Event is added to the #events array.
class Event
EVENT_PROPERTIES = [
:action, # :create, :set, :fetch
:provider, # Name of the plugin
:priority, # Priority of this plugin for resolving conflicts. 1-100, higher numbers win.
:value, # New value, if provided.
:file, # File containing the input-changing action, if known
:line, # Line in file containing the input-changing action, if known
:hit, # if action is :fetch, true if the remote source had the input
].freeze
# Value has a special handler
EVENT_PROPERTIES.reject { |p| p == :value }.each do |prop|
attr_accessor prop
end
attr_reader :value
def initialize(properties = {})
@value_has_been_set = false
properties.each do |prop_name, prop_value|
if EVENT_PROPERTIES.include? prop_name
# OK, save the property
send((prop_name.to_s + "=").to_sym, prop_value)
else
raise "Unrecognized property to Input::Event: #{prop_name}"
end
end
end
def value=(the_val)
# Even if set to nil or false, it has indeed been set; note that fact.
@value_has_been_set = true
@value = the_val
end
def value_has_been_set?
@value_has_been_set
end
def diagnostic_string
to_h.reject { |_, val| val.nil? }.to_a.map { |pair| "#{pair[0]}: '#{pair[1]}'" }.join(", ")
end
def to_h
EVENT_PROPERTIES.each_with_object({}) do |prop, hash|
hash[prop] = send(prop)
end
end
def self.probe_stack
frames = caller_locations(2, 40)
frames.reject! { |f| f.path && f.path.include?("/lib/inspec/") }
frames.first
end
end # class Event
#===========================================================================#
# Class NO_VALUE_SET
#===========================================================================#
# This special class is used to represent the value when an input has
# not been assigned a value. This allows a user to explicitly assign nil
# to an input.
class NO_VALUE_SET # rubocop: disable Naming/ClassAndModuleCamelCase
def initialize(name, warn_on_create = true)
@name = name
# output warn message if we are in a exec call
if warn_on_create && Inspec::BaseCLI.inspec_cli_command == :exec
Inspec::Log.warn(
"Input '#{@name}' does not have a value. "\
"Use --input-file or --input to provide a value for '#{@name}' or specify a "\
"value with `input('#{@name}', value: 'somevalue', ...)`."
)
end
end
def method_missing(*_)
self
end
def respond_to_missing?(_, _)
true
end
def to_s
"Input '#{@name}' does not have a value. Skipping test."
end
def is_a?(klass)
if klass == Inspec::Attribute::DEFAULT_ATTRIBUTE
Inspec.deprecate(:rename_attributes_to_inputs, "Don't check for `is_a?(Inspec::Attribute::DEFAULT_ATTRIBUTE)`, check for `Inspec::Input::NO_VALUE_SET")
true # lie for backward compatibility
else
super(klass)
end
end
def kind_of?(klass)
if klass == Inspec::Attribute::DEFAULT_ATTRIBUTE
Inspec.deprecate(:rename_attributes_to_inputs, "Don't check for `kind_of?(Inspec::Attribute::DEFAULT_ATTRIBUTE)`, check for `Inspec::Input::NO_VALUE_SET")
true # lie for backward compatibility
else
super(klass)
end
end
end # class NO_VALUE_SET
#===========================================================================#
# Class Inspec::Input
#===========================================================================#
# Validation types for input values
VALID_TYPES = %w{
String
Numeric
Regexp
Array
Hash
Boolean
Any
}.freeze
# TODO: this is not used anywhere?
# If you call `input` in a control file, the input will receive this priority.
# You can override that with a :priority option.
DEFAULT_PRIORITY_FOR_DSL_ATTRIBUTES = 20
# If you somehow manage to initialize an Input outside of the DSL,
# AND you don't provide an Input::Event, this is the priority you get.
DEFAULT_PRIORITY_FOR_UNKNOWN_CALLER = 10
# If you directly call value=, this is the priority assigned.
# This is the highest priority within InSpec core; though plugins
# are free to go higher.
DEFAULT_PRIORITY_FOR_VALUE_SET = 60
attr_reader :description, :events, :identifier, :name, :required, :title, :type
def initialize(name, options = {})
@name = name
@opts = options
if @opts.key?(:default)
Inspec.deprecate(:attrs_value_replaces_default, "input name: '#{name}'")
if @opts.key?(:value)
Inspec::Log.warn "Input #{@name} created using both :default and :value options - ignoring :default"
@opts.delete(:default)
end
end
# Array of Input::Event objects. These compete with one another to determine
# the value of the input when value() is called, as well as providing a
# debugging record of when and how the value changed.
@events = []
events.push make_creation_event(options)
update(options)
end
# TODO: is this here just for testing?
def set_events
events.select { |e| e.action == :set }
end
def diagnostic_string
"Input #{name}, with history:\n" +
events.map(&:diagnostic_string).map { |line| " #{line}" }.join("\n")
end
#--------------------------------------------------------------------------#
# Managing Value
#--------------------------------------------------------------------------#
def update(options)
_update_set_metadata(options)
normalize_type_restriction!
# Values are set by passing events in; but we can also infer an event.
if options.key?(:value) || options.key?(:default)
if options.key?(:event)
if options.key?(:value) || options.key?(:default)
Inspec::Log.warn "Do not provide both an Event and a value as an option to attribute('#{name}') - using value from event"
end
else
self.class.infer_event(options) # Sets options[:event]
end
end
events << options[:event] if options.key? :event
enforce_type_restriction!
end
# We can determine a value:
# 1. By event.value (preferred)
# 2. By options[:value]
# 3. By options[:default] (deprecated)
def self.infer_event(options)
# Don't rely on this working; you really should be passing a proper Input::Event
# with the context information you have.
location = Input::Event.probe_stack
event = Input::Event.new(
action: :set,
provider: options[:provider] || :unknown,
priority: options[:priority] || Inspec::Input::DEFAULT_PRIORITY_FOR_UNKNOWN_CALLER,
file: location.path,
line: location.lineno
)
if options.key?(:default)
Inspec.deprecate(:attrs_value_replaces_default, "attribute name: '#{name}'")
if options.key?(:value)
Inspec::Log.warn "Input #{@name} created using both :default and :value options - ignoring :default"
options.delete(:default)
else
options[:value] = options.delete(:default)
end
end
event.value = options[:value] if options.key?(:value)
options[:event] = event
end
private
def _update_set_metadata(options)
# Basic metadata
@title = options[:title] if options.key?(:title)
@description = options[:description] if options.key?(:description)
@required = options[:required] if options.key?(:required)
@identifier = options[:identifier] if options.key?(:identifier) # TODO: determine if this is ever used
@type = options[:type] if options.key?(:type)
end
def make_creation_event(options)
loc = options[:location] || Event.probe_stack
Input::Event.new(
action: :create,
provider: options[:provider],
file: loc.path,
line: loc.lineno
)
end
# Determine the current winning value, but don't validate it
def current_value(warn_on_missing = true)
# Examine the events to determine highest-priority value. Tie-break
# by using the last one set.
events_that_set_a_value = events.select(&:value_has_been_set?)
winning_priority = events_that_set_a_value.map(&:priority).max
winning_events = events_that_set_a_value.select { |e| e.priority == winning_priority }
winning_event = winning_events.last # Last for tie-break
if winning_event.nil?
# No value has been set - return special no value object
NO_VALUE_SET.new(name, warn_on_missing)
else
winning_event.value # May still be nil
end
end
public
def value=(new_value, priority = DEFAULT_PRIORITY_FOR_VALUE_SET)
# Inject a new Event with the new value.
location = Event.probe_stack
events << Event.new(
action: :set,
provider: :value_setter,
priority: priority,
value: new_value,
file: location.path,
line: location.lineno
)
enforce_type_restriction!
end
def value
enforce_required_validation!
current_value
end
def has_value?
!current_value(false).is_a? NO_VALUE_SET
end
def to_hash
as_hash = { name: name, options: {} }
%i{description title identifier type required value}.each do |field|
val = send(field)
next if val.nil?
as_hash[:options][field] = val
end
as_hash
end
#--------------------------------------------------------------------------#
# Value Type Coercion
#--------------------------------------------------------------------------#
def to_s
"Input #{name} with #{current_value}"
end
#--------------------------------------------------------------------------#
# Validation
#--------------------------------------------------------------------------#
private
def enforce_required_validation!
return unless required
# skip if we are not doing an exec call (archive/vendor/check)
return unless Inspec::BaseCLI.inspec_cli_command == :exec
proposed_value = current_value(false)
if proposed_value.nil? || proposed_value.is_a?(NO_VALUE_SET)
error = Inspec::Input::RequiredError.new
error.input_name = name
raise error, "Input '#{error.input_name}' is required and does not have a value."
end
end
def enforce_type_restriction! # rubocop:disable Metrics/CyclomaticComplexity, Metrics/PerceivedComplexity
return unless type
return unless has_value?
type_req = type
return if type_req == "Any"
proposed_value = current_value(false)
invalid_type = false
if type_req == "Regexp"
invalid_type = true unless valid_regexp?(proposed_value)
elsif type_req == "Numeric"
invalid_type = true unless valid_numeric?(proposed_value)
elsif type_req == "Boolean"
invalid_type = true unless [true, false].include?(proposed_value)
elsif proposed_value.is_a?(Module.const_get(type_req)) == false
# TODO: why is this case here?
invalid_type = true
end
if invalid_type == true
error = Inspec::Input::ValidationError.new
error.input_name = @name
error.input_value = proposed_value
error.input_type = type_req
raise error, "Input '#{error.input_name}' with value '#{error.input_value}' does not validate to type '#{error.input_type}'."
end
end
def normalize_type_restriction!
return unless type
type_req = type.capitalize
abbreviations = {
"Num" => "Numeric",
"Regex" => "Regexp",
}
type_req = abbreviations[type_req] if abbreviations.key?(type_req)
unless VALID_TYPES.include?(type_req)
error = Inspec::Input::TypeError.new
error.input_type = type_req
raise error, "Type '#{error.input_type}' is not a valid input type."
end
@type = type_req
end
def valid_numeric?(value)
Float(value)
true
rescue
false
end
def valid_regexp?(value)
# check for invalid regex syntax
Regexp.new(value)
true
rescue
false
end
end
end
| 33.021327 | 164 | 0.582777 |
e2e5b26a767c87861f87011b8eb0d5bf8979080c | 1,671 | require File.dirname(__FILE__) + '/test_helper'
class ActsAsRDocTest < ActsAsMarkupTestCase
context 'acts_as_rdoc' do
setup do
@rdoctext = "== RDoc Test Text"
class ::Post < ActiveRecord::Base
acts_as_rdoc :body
end
@post = Post.create!(:title => 'Blah', :body => @rdoctext)
end
should "have a RDocText object returned for the column value" do
assert_kind_of RDocText, @post.body
end
should "return original RDoc text for a `to_s` method call on the column value" do
assert_equal @rdoctext, @post.body.to_s
end
should 'return false for .blank?' do
assert [email protected]?
end
should "return formated html for a `to_html` method call on the column value" do
assert_match(/<h2[^>]*>\s*RDoc Test Text\s*<\/h2>/, @post.body.to_html)
end
context "changing value of RDoc field should return new RDoc object" do
setup do
@old_body = @post.body
@post.body = "http://www.example.com/"
end
should "still have an RDocText object but not the same object" do
assert_kind_of RDocText, @post.body
assert_not_same @post.body, @old_body
end
should "return correct text for `to_s`" do
assert_equal "http://www.example.com/", @post.body.to_s
end
should "return correct HTML for the `to_html` method" do
assert_match(/<a href="http:\/\/www.example.com">www.example.com<\/a>/, @post.body.to_html)
end
teardown do
@old_body = nil
end
end
teardown do
@rdoctext, @post = nil
Post.delete_all
end
end
end
| 28.322034 | 99 | 0.619988 |
e81373fc2c793ab56456dee7a9f73468cd00c4c3 | 1,025 | cask 'bartender' do
version '2.1.6'
sha256 '013bb1f5dcc29ff1ecbc341da96b6e399dc3c85fc95bd8c7bee153ab0d8756f5'
url "https://macbartender.com/B2/updates/#{version.dots_to_hyphens}/Bartender%20#{version.major}.zip",
referer: 'https://www.macbartender.com'
appcast 'https://www.macbartender.com/B2/updates/updates.php',
checkpoint: '6d5406613e77584527da5dfcc997d13f6b2985ae81ec732f399216743fe00a16'
name 'Bartender'
homepage 'https://www.macbartender.com/'
license :commercial
auto_updates true
app "Bartender #{version.major}.app"
postflight do
suppress_move_to_applications
end
uninstall login_item: 'Bartender 2'
zap delete: [
'/Library/ScriptingAdditions/BartenderHelper.osax',
'~/Library/Preferences/com.surteesstudios.Bartender.plist',
'/Library/PrivilegedHelperTools/com.surteesstudios.Bartender.BartenderInstallHelper',
'/System/Library/ScriptingAdditions/BartenderSystemHelper.osax',
]
end
| 34.166667 | 104 | 0.723902 |
79321dde7d06224cd9cf04963c995d04bdd284bd | 250 | # frozen_string_literal: true
class AddProtectedFieldAndRestrictedFieldToFieldTests < ActiveRecord::Migration[5.0]
def change
add_column :field_tests, :restricted_field, :string
add_column :field_tests, :protected_field, :string
end
end
| 27.777778 | 84 | 0.8 |
7a72aefba207197217c38bc52dc2566c9f20f75a | 601 | class ShowingsController < ApplicationController
before_filter :manager_authorized?, :find_listing
def create
showing = Showing.new(params[:showing])
showing.listing_id = @listing.id
showing.save
flash[:notice] = 'Showing created successfully.'
to_listing
end
def edit
@showing = @listing.showings.find(params[:id])
end
def update
showing = @listing.showings.find(params[:id])
showing.update_attributes(params[:showing])
to_listing
end
def destroy
showing = @listing.showings.find(params[:id])
showing.destroy
to_listing
end
end
| 20.033333 | 52 | 0.705491 |
5d68703f6f845b4445fef7c6c299d744c5d7bec9 | 82 | class ChildMailer < ParentMailer
has_history message: false, only: [:other]
end
| 20.5 | 44 | 0.768293 |
7ad2bf73767b4304fca8f82b767607c122762fc3 | 752 | # frozen_string_literal: true
module FlagIconsRails
class << self
def load!
if rails?
register_rails_engine
else
configure_sass
end
end
def root
@root ||= File.expand_path('..', File.dirname(__FILE__))
end
def rails?
defined?(::Rails)
end
private
def stylesheets_path
File.join(assets_path, 'stylesheets')
end
def assets_path
@assets_path ||= File.join(root, 'app', 'assets')
end
def configure_sass
require 'sass'
::Sass.load_paths << stylesheets_path
end
def register_rails_engine
require 'flag-icons-rails/rails/engine'
require 'flag-icons-rails/rails/railtie'
end
end
end
FlagIconsRails.load!
| 16.711111 | 62 | 0.62367 |
ff97f964ae4bcd60fe201912f3c2f4e8b34c490a | 232 | class Recipe < ActiveRecord::Base
has_and_belongs_to_many(:categories)
has_and_belongs_to_many(:ingredients)
validates(:title, {:presence => true})
# private
# def rating_sort
# Recipe.order(rating: asc)
# end
end
| 19.333333 | 40 | 0.719828 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.