hexsha
stringlengths
40
40
size
int64
2
1.01M
content
stringlengths
2
1.01M
avg_line_length
float64
1.5
100
max_line_length
int64
2
1k
alphanum_fraction
float64
0.25
1
d57c65fd185661577b49f0e5b43d2db6f7265ebc
589
require File.dirname(__FILE__) + '/test_helper.rb' class TestNicovideoRanking < Test::Unit::TestCase def setup account = YAML.load_file(ENV['HOME'] + '/.nicovideo/account.yml') @nv = Nicovideo.new(account['mail'], account['password']) @nv.login end def test_ranking_valid rv = nil assert_nothing_raised { rv = @nv.ranking } rv.each {|v| # puts v.id + ':' + v.published_at.to_s + ':' + v.title puts v.id + ':' + v.title sleep 1 } sleep 1 end def test_ranking_invalid end def test_ranking_notopened end end
18.40625
69
0.617997
613d11afd5a63b487c2c6cc36b436ad6f9c9eb96
113
#!/usr/bin/env jruby require_relative "../helper" describe Doubleshot::Dependencies::JarDependencyList do end
14.125
55
0.778761
91e53d4fccd5191d15123e242f326deaed03735a
103
require 'spec_helper' describe DayX do describe '#part1' do end describe '#part2' do end end
10.3
22
0.68932
390e47d76314afe868d1f1cf4aa9b81274b898c0
647
class AddedLastTransitionMetadataToBalanceTransfers < ActiveRecord::Migration def change execute %Q{ CREATE OR REPLACE VIEW "1"."balance_transfers" AS SELECT bt.id, bt.user_id, bt.project_id, bt.amount, bt.transfer_id, zone_timestamp(bt.created_at) AS created_at, zone_timestamp(weekdays_from(10, zone_timestamp(bt.created_at))) AS transfer_limit_date, current_state(bt.*) AS state, btt.metadata as last_transition_metadata FROM balance_transfers bt LEFT JOIN balance_transfer_transitions btt on btt.balance_transfer_id = bt.id and btt.most_recent WHERE is_owner_or_admin(bt.user_id); } end end
32.35
101
0.766615
e2144159403c2e47eba69324cb3af11e96428583
155
# Load the Rails application. require File.expand_path('../application', __FILE__) # Initialize the Rails application. AstroDash::Application.initialize!
25.833333
52
0.793548
ff579daa9ec3f99e0f9900dc3b45908eea1598de
655
require "foreman" class Foreman::Env attr_reader :entries def initialize(filename) @entries = File.read(filename).split("\n").inject({}) do |ax, line| if line =~ /\A([A-Za-z_0-9]+)=(.*)\z/ key = $1 case val = $2 # Remove single quotes when /\A'(.*)'\z/ then ax[key] = $1 # Remove double quotes and unescape string preserving newline characters when /\A"(.*)"\z/ then ax[key] = $1.gsub('\n', "\n").gsub(/\\(.)/, '\1') else ax[key] = val end end ax end end def entries @entries.each do |key, value| yield key, value end end end
21.833333
82
0.519084
ac6a32be6f70486c0362a2210c1c2f951d8765cb
517
class TitleScreen def initialize(ui, options) @ui = ui @options = options @messages = Messages[:title] end def render ui.message(0, 0, messages[:name]) ui.message(7, 1, messages[:by]) handle_choice prompt end private attr_reader :ui, :options, :messages def prompt ui.choice_prompt(0, 3, messages[:pick_random], "ynq") end def handle_choice(choice) case choice when "q" then options[:quit] = true when "y" then options[:randall] = true end end end
17.827586
57
0.644101
4ac0a79fcfa9ba8ad5eb6007b2c6faee09667a92
2,503
# -*- coding: utf-8 -*- # Copyright (C) 2011 Rocky Bernstein <[email protected]> require 'optparse' require 'rubygems'; require 'require_relative' require_relative '../command' require_relative '../../app/default' require_relative '../../interface/server' # server interface (remote debugging) class Trepan::Command::ServerCommand < Trepan::Command include Trepanning unless defined?(HELP) CATEGORY = 'support' DEFAULT_OPTIONS = { :host => Trepan::DEFAULT_SETTINGS[:host], :port => Trepan::DEFAULT_SETTINGS[:port] } MAX_ARGS = 4 # Need at most this many NAME = File.basename(__FILE__, '.rb') HELP = <<-HELP #{NAME} [{--port|-p} NUM] [{--host|-h} HOST-OR-IP] Put session into server mode which allows an out-of-process or remote connection to the debugged program. --port and --host can be supplied to specify the port number to use and the host name for TCP connections. If neither is given, the default host (#{Trepan::DEFAULT_SETTINGS[:host]}) and the default port (#{Trepan::DEFAULT_SETTINGS[:port]}) are used. Examples: #{NAME} # Accept remote connections using defaults #{NAME} --port 123 # Accept remote connections on port 123 #{NAME} --host my.host.net --port 2048 #{NAME} -h my.host.net -p 2048 # same as above HELP SHORT_HELP = 'Go into out-of-process debugging (server) mode' end def parse_options(options, args) # nodoc parser = OptionParser.new do |opts| opts.on("-h", "--host NAME", String, "Host or IP used in TCP connections for --server or --client. " + "Default is #{DEFAULT_SETTINGS[:host].inspect}.") do |name_or_ip| options[:host] = name_or_ip end opts.on("-p", "--port NUMBER", Integer, "Port number used in TCP connections for --server or --client. " + "Default is #{DEFAULT_SETTINGS[:port]}.") do |num| options[:port] = num end end parser.parse(args) return options end # This method runs the command def run(args) # :nodoc options = parse_options(DEFAULT_OPTIONS.dup, args[1..-1]) msg("starting debugger in out-of-process mode on host #{options[:host].inspect}, " + "port: #{options[:port]}") @proc.dbgr.intf << Trepan::ServerInterface.new(nil, nil, options) end end if __FILE__ == $0 require_relative '../mock' dbgr, cmd = MockDebugger::setup # cmd.run([cmd.name]) # cmd.run([cmd.name, '--server']) end
34.287671
88
0.641231
797fa5774636661bccb3ce2e302a6dda0c64e785
128
# typed: true module Vonage class AbstractAuthentication def initialize(config) @config = config end end end
12.8
30
0.6875
d5738bf8cf19868f63822d788457769606c096d8
1,877
class Id3ed < Formula desc "ID3 tag editor for MP3 files" homepage "http://code.fluffytapeworm.com/projects/id3ed" url "http://code.fluffytapeworm.com/projects/id3ed/id3ed-1.10.4.tar.gz" sha256 "56f26dfde7b6357c5ad22644c2a379f25fce82a200264b5d4ce62f2468d8431b" license "GPL-2.0" bottle do sha256 cellar: :any_skip_relocation, arm64_big_sur: "0adb83739ee928667c488d4213369f5854900380f159219d0f04dc929ff731cc" sha256 cellar: :any_skip_relocation, big_sur: "a560e42fb11482b14d22079ffa0ffb2dd2307f5e740b22acd2636b4fa6e4a307" sha256 cellar: :any_skip_relocation, catalina: "9520d236327bce01cc292421934e19476163d8d72b4848740d3067cbc71b2572" sha256 cellar: :any_skip_relocation, mojave: "2079b26fd26395f4eb016c61afafa007045d7b87b5030b05650705959c3bd87a" sha256 cellar: :any_skip_relocation, high_sierra: "c31762b13640d1e9713ea26df41d5e9cb675a8d3565cd84b70efc526663ddfb8" sha256 cellar: :any_skip_relocation, sierra: "e930552e37599e7926efebaf0d893f888576a26bddef6a91e356cf1b5de15b9e" sha256 cellar: :any_skip_relocation, el_capitan: "6448c8e19c8e0874ed5141193c7db06c443ac6c33ab2f6bbe8811098b063c0d1" sha256 cellar: :any_skip_relocation, yosemite: "8ca64da5c8c0cbbc7ec64af436fcf3a7ae457c8d8a8073887fc63ec4e89c98b9" sha256 cellar: :any_skip_relocation, mavericks: "8dd4a14922e94245dd016a266aa23c7bcebb18a56e574c8179df83c2d68ff23c" end def install system "./configure", "--disable-debug", "--disable-dependency-tracking", "--disable-silent-rules", "--prefix=#{prefix}", "--bindir=#{bin}/", "--mandir=#{man1}" bin.mkpath man1.mkpath system "make", "install" end test do system "#{bin}/id3ed", "-r", "-q", test_fixtures("test.mp3") end end
52.138889
122
0.729355
6ae61725f80b06abb87798d7c4e814344224bf55
602
require 'spec_helper' describe Reru::IO::Reader do it "spits what it reads down the stream" do StringIO.open("hello world\nhere we go!") do |input| reader = Reru::IO::Reader.new(input) xs = [] reader.perform { |x| xs << x } Reru.run xs.should == ["hello world\n", "here we go!"] end end it "stops reading if told so" do StringIO.open('x' * 2_000_000) do |input| reader = Reru.read(input) xs = [] reader.perform { |x| xs << x reader.stop } Reru.run xs.should == ['x' * 1_000_000] end end end
23.153846
56
0.549834
03d04c15b5e0de97db3a434975441f6caca044bf
1,535
class Student::WriteupsController < ApplicationController before_action except: :index do @writeup_topic = WriteupTopic.where(cohort_id: params[:cohort_id]).find(params[:writeup_topic_id]) end def index @writeups = Writeup.where(user_id: current_user). order('created_at desc'). includes(:writeup_topic, comments: :user) @pending_topics = @cohort.writeup_topics.active - @writeups.map(&:writeup_topic) end def new @writeup = @writeup_topic.writeups.new(writeup_topic_id: params[:writeup_topic_id]) end def create @writeup = @writeup_topic.writeups.new(writeup_params) @writeup.user = current_user if @writeup.save redirect_to( cohort_writeups_path(@cohort), :notice => "Your writeup was submitted" ) else render :new end end def edit @writeup = @writeup_topic.writeups. where(user_id: current_user). find(params[:id]) end def update @writeup = @writeup_topic.writeups. where(user_id: current_user). find(params[:id]) if @writeup.update(writeup_params) redirect_to( cohort_writeups_path(@cohort), :notice => "Your writeup was submitted" ) else render :edit end end def destroy @writeup = @writeup_topic.writeups. where(user_id: current_user). find(params[:id]) @writeup.destroy redirect_to cohort_writeups_path(@cohort) end private def writeup_params params.require(:writeup).permit(:response) end end
22.573529
102
0.672964
7a7487810aa7f07ba687b589339166832833e18e
536
require 'whmcs' class Biller::WHMCSOnboarder < Biller::Onboarder include Biller::WHMCSRegistrar def initialize register end def onboard(onboard_options={}) begin WHMCS::Client.add_client(onboard_options).attributes rescue StandardError => se {:result => "error", :error => "errors.desc.not_found"} end end def after_onboard(onboarded) result = Biller::Result.new onboarded.each { |k, v| result.send("#{k}=", v) } result.to_hash end end
20.615385
65
0.621269
3333c8c611bfbe3b8b96af337538eaf786f988c1
2,875
Given(/^that the user is on the Family Relationships page$/) do login_as consumer, scope: :user visit financial_assistance_applications_path create_plan click_button "Start new application" find(".btn", text: "ADD INCOME & COVERAGE INFO").click find("#income_from_employer_no").click find("#self_employed_no").click find("#other_income_no").click find("#adjustments_income_no").click find("#enrolled_in_coverage_no").click find("#access_to_other_coverage_no").click find(:xpath, "//input[@value='CONTINUE'][@name='commit']").click find("#is_required_to_file_taxes_yes").click find("#is_claimed_as_tax_dependent_no").click find(:xpath, "//input[@value='CONTINUE'][@name='commit']").click find("#is_pregnant_no").click find("#is_post_partum_period_no").click find("#is_self_attested_blind_no").click find("#has_daily_living_no").click find("#has_daily_living_help_no").click find(:xpath, "//input[@value='CONTINUE'][@name='commit']").click page.should have_xpath('//span[@class="complete-icon"]') find('.interaction-click-control-add-member').click fill_in "dependent_first_name", with: 'johnson' fill_in "dependent_last_name", with: 'smith' fill_in "family_member_dob_", with: '10/10/1984' fill_in "dependent_ssn", with: '123456543' find(:xpath, '//label[@for="radio_female"]').click find(:xpath, '//*[@id="new_dependent"]/div[1]/div[4]/div[1]/div/div[2]/p').click find(:xpath, '//*[@id="new_dependent"]/div[1]/div[4]/div[1]/div/div[3]/div/ul/li[7]').click find(:xpath, '//label[@for="is_applying_coverage_false"]').click find(".btn", text: "CONFIRM MEMBER").click expect(page).to have_content('ADD INCOME & COVERAGE INFO') find(".btn", text: "ADD INCOME & COVERAGE INFO").click find("#income_from_employer_no").click find("#self_employed_no").click find("#other_income_no").click find("#adjustments_income_no").click find("#enrolled_in_coverage_no").click find("#access_to_other_coverage_no").click find(:xpath, "//input[@value='CONTINUE'][@name='commit']").click find("#is_required_to_file_taxes_yes").click find("#is_claimed_as_tax_dependent_no").click find(:xpath, "//input[@value='CONTINUE'][@name='commit']").click find("#is_pregnant_no").click find("#is_post_partum_period_no").click find("#is_self_attested_blind_no").click find("#has_daily_living_no").click find("#has_daily_living_help_no").click find(:xpath, "//input[@value='CONTINUE'][@name='commit']").click page.should have_xpath('//span[@class="complete-icon"]') find('.btn', text: 'CONTINUE').click end When(/^there is no missing relatioships$/) do expect(page).to have_content('All the relationships are added. Please continue.') end Then(/^the CONTINUE button is enabled$/) do page.should have_no_xpath "//a[@disabled]" end Then(/^the user will navigate to the Review & Submit page$/) do expect(page).to have_content('Review Your Application') end
41.071429
92
0.735652
21292d8ca088731bfa124d03b122638f0f73793b
2,428
require "mkmf" RbConfig::MAKEFILE_CONFIG['CC'] = ENV['CC'] if ENV['CC'] require "debase/ruby_core_source" require "native-package-installer" class NilClass def empty?; true; end end # Just a replacement of have_header because have_header searches not recursively :( def real_have_header(header_name) if (have_header(header_name)) return true end yes_msg = "checking for #{header_name}... yes" no_msg = "checking for #{header_name}... no" include_env = ENV["C_INCLUDE_PATH"] if !include_env.empty? && !Dir.glob("#{include_env}/**/#{header_name}").empty? puts yes_msg return true end if !Dir.glob("/usr/include/**/#{header_name}").empty? puts yes_msg return true end puts no_msg return false end if !real_have_header('glib.h') && !NativePackageInstaller.install(:alt_linux => "glib2-devel", :debian => "libglib2.0-dev", :redhat => "glib2-devel", :arch_linux => "glib2", :homebrew => "glib", :macports => "glib2", :msys2 => "glib2") exit(false) end hdrs = proc { have_header("vm_core.h") and have_header("iseq.h") and have_header("version.h") and have_header("vm_core.h") and have_header("vm_insnhelper.h") and have_header("vm_core.h") and have_header("method.h") } # Allow use customization of compile options. For example, the # following lines could be put in config_options to to turn off # optimization: # $CFLAGS='-fPIC -fno-strict-aliasing -g3 -ggdb -O2 -fPIC' config_file = File.join(File.dirname(__FILE__), 'config_options.rb') load config_file if File.exist?(config_file) if ENV['debase_debug'] $CFLAGS+=' -Wall -Werror -g3' end $CFLAGS += ' `pkg-config --cflags --libs glib-2.0`' $DLDFLAGS += ' `pkg-config --cflags --libs glib-2.0`' dir_config("ruby") if !Debase::RubyCoreSource.create_makefile_with_core(hdrs, "arg_scanner/arg_scanner") STDERR.print("Makefile creation failed\n") STDERR.print("*************************************************************\n\n") STDERR.print(" NOTE: If your headers were not found, try passing\n") STDERR.print(" --with-ruby-include=PATH_TO_HEADERS \n\n") STDERR.print("*************************************************************\n\n") exit(1) end
31.947368
85
0.594728
1d189c0d238320806adea4d0dcd7bee4dd358055
994
require 'rails_helper' describe "Edit paper page", type: :feature do it "should save changes" do paper = FactoryGirl.create :paper visit edit_paper_path(paper) fill_in 'Title', with: 'BEAUTIFUL DATA, BEAUTIFUL MIND' click_button 'Update Paper' end it "should allow to select 5 authors from 5 separate drop downs" do paper = FactoryGirl.create :paper visit edit_paper_path(paper) expect(page).to have_field("Author 1") expect(page).to have_field("Author 2") expect(page).to have_field("Author 3") expect(page).to have_field("Author 4") expect(page).to have_field("Author 5") end it "should save selected authors" do author = FactoryGirl.create :author paper = FactoryGirl.create :paper visit edit_paper_path(paper) select author.name, from: 'Author 1' click_button 'Update Paper' expect(paper.authors[0]).to eq(author) end end
28.4
71
0.645875
7a4e734393eacfed582f5020fe9d73af1feaca32
4,779
require 'spec_helper_integration' test_name 'LDAP user access' # There have been SIMP problems in the past in which some LDAP users # have not been able to login when their respective passwords have # been changed. This test is to make sure that regression never occurs! # facts gathered here are executed when the file first loads and # use the factor gem temporarily installed into system ruby master_fqdn = fact_on(master, 'fqdn') agents = hosts_with_role(hosts, 'agent') # subset of LDAP users for whom we want to execute login tests ldap_users = [ 'admin2', 'auditor1' ] describe 'LDAP user access' do let(:files_dir) { 'spec/acceptance/common_files' } let(:base_dn) do query = "puppet lookup --environment production --node #{master_fqdn} simp_options::ldap::base_dn" on(master, query).stdout.gsub('---','').gsub('...','').strip end context 'LDAP user creation' do let(:puppet_master_yaml) { "#{hiera_datadir(master)}/hosts/#{master_fqdn}.yaml" } let(:puppet_master_hieradata) do require 'digest/sha1' require 'base64' salt = 'SALT'.force_encoding('UTF-8') digest = Digest::SHA1.digest( test_password + salt ).force_encoding('UTF-8') password_hash = '{SSHA}' + Base64.encode64( digest + salt ).strip master_hiera = YAML.load(on(master, "cat #{puppet_master_yaml}").stdout) # Handle legacy layouts if master_hiera['classes'] master_hiera['classes'] << 'site::test_ldifs' else master_hiera['simp::server::classes'] ||= [] master_hiera['simp::server::classes'] << 'site::test_ldifs' end master_hiera['site::test_ldifs::user_password_hash'] = password_hash master_hiera end it 'should update hieradata' do create_remote_file(master, puppet_master_yaml, puppet_master_hieradata.to_yaml) end it 'should generate ldif files' do on(master, 'puppet agent -t', :accept_all_exit_codes => true) on(master, 'ls /root/ldifs/add_test_users.ldif /root/ldifs/modify_test_users.ldif /root/ldifs/force_test_users_password_reset.ldif') end it 'should create LDAP users' do # add users ldap_cmd = "/usr/bin/ldapadd -Z -x -w #{test_password} -D \"cn=LDAPAdmin,OU=People,#{base_dn}\" -f /root/ldifs/add_test_users.ldif" on(master, ldap_cmd) # modify some user groups ldap_cmd = "/usr/bin/ldapmodify -Z -x -w #{test_password} -D \"cn=LDAPAdmin,OU=People,#{base_dn}\" -f /root/ldifs/modify_test_users.ldif" on(master, ldap_cmd) # verify existence of LDAP users we are using in this test hosts.each do |host| ldap_users.each do |ldap_user| retry_on(host, "id -u #{ldap_user}", :retry_interval => 5, :max_retries => 5) end end end end context 'LDAP user login' do it 'should install expect scripts on master' do # This expect script ssh's to a host as a user and then runs 'date'. install_expect_script(master, "#{files_dir}/ssh_cmd_script") # This expect script ssh's to a host as a user and changes the user's password # at login install_expect_script(master, "#{files_dir}/ssh_password_change_required_script") end ldap_users.each do |ldap_user| it "LDAP user #{ldap_user} should be able to login via ssh" do hosts.each do |host| base_cmd ="#{EXPECT_SCRIPT_DIR}/ssh_cmd_script #{ldap_user} #{host.name} #{test_password(0)} date" # FIXME: Workaround for SIMP-5082 cmd = adjust_ssh_ciphers_for_expect_script(base_cmd, master, host) on(master, cmd) end end end it 'should be able to force password resets' do ldap_cmd = "/usr/bin/ldapmodify -Z -x -w #{test_password} -D \"cn=LDAPAdmin,OU=People,#{base_dn}\" -f /root/ldifs/force_test_users_password_reset.ldif" on(master, ldap_cmd) end ldap_users.each do |ldap_user| it "LDAP user #{ldap_user} should be forced to change password upon login via ssh" do base_cmd ="#{EXPECT_SCRIPT_DIR}/ssh_password_change_required_script #{ldap_user} #{agents[0].name} #{test_password(0)} #{test_password(1)}" # FIXME: Workaround for SIMP-5082 cmd = adjust_ssh_ciphers_for_expect_script(base_cmd, master, agents[0]) on(master, cmd) end end ldap_users.each do |ldap_user| it "LDAP user #{ldap_user} should be able to login with new password via ssh" do hosts.each do |host| base_cmd ="#{EXPECT_SCRIPT_DIR}/ssh_cmd_script #{ldap_user} #{host.name} #{test_password(1)} date" # FIXME: Workaround for SIMP-5082 cmd = adjust_ssh_ciphers_for_expect_script(base_cmd, master, host) on(master, cmd) end end end end end
36.761538
157
0.672316
b92a02598598085071c2fd25c7224fb326fc0c3e
958
class ApplicationController < ActionController::Base # Prevent CSRF attacks by raising an exception. # For APIs, you may want to use :null_session instead. protect_from_forgery with: :null_session before_action :authenticate before_action :set_logged_in_user def set_logged_in_user email, password = ActionController::HttpAuthentication::Basic::user_name_and_password(request) @logged_in_user = User.where(email: email).first puts "####### user email = #{@logged_in_user.email}" end def authenticate (authenticate_basic_auth || render_unauthorized) end def authenticate_basic_auth authenticate_or_request_with_http_basic do |email, submitted_password| User.authenticate(email,submitted_password) end end def render_unauthorized self.headers['WWW-Authenticate'] = 'Basic realm="Pets"' respond_to do |format| format.json { render json: 'Bad credentials', status: 401} end end end
29.030303
98
0.750522
62a41b164cef8dcaa192c360011f39642fc52740
208
module Chewy module Resque class Worker @queue = :chewy def self.perform(index_name, ids) type = Chewy.derive_type(index_name) type.import(ids) end end end end
14.857143
44
0.605769
01c8afb538c5190b2c2eef9b774355ba65d0b40e
934
# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. module Google module Apis module DatacatalogV1 # Version of the google-apis-datacatalog_v1 gem GEM_VERSION = "0.7.0" # Version of the code generator used to generate this client GENERATOR_VERSION = "0.4.0" # Revision of the discovery document this client was generated from REVISION = "20210827" end end end
32.206897
74
0.730193
911874b7aba3951730d94733a880e93cbb117dbe
5,210
# frozen_string_literal: true require_relative '../test_helper' SingleCov.covered! describe SlackMessage do let(:deploy) { deploys(:succeeded_test) } let(:msg) { SlackMessage.new deploy } let(:body) { msg.message_body } let(:super_admin) { users(:super_admin) } let(:deployer_buddy) { users(:deployer_buddy) } let(:super_admin_identifier) { slack_identifiers(:super_admin) } let(:deployer_buddy_identifier) { slack_identifiers(:deployer_buddy) } before do deploy.changeset.stubs(:pull_requests).returns([]) end describe '#message_body' do describe 'waiting for a buddy' do def add_prs deploy.changeset.stubs(:pull_requests).returns( [ stub( url: 'http://example.com/pr1', number: '12345', title: 'Foo the Bars', risks: nil ), stub( url: 'http://example.com/pr2', number: '23456', title: 'Baz the Flibbutzes', risks: nil ) ] ) end def add_prs_with_risk deploy.changeset.stubs(:pull_requests).returns( [ stub( url: 'http://example.com/pr1', number: '12345', title: 'Foo the Bars', risks: '- Kittens' ), stub( url: 'http://example.com/pr2', number: '23456', title: 'Baz the Flibbutzes', risks: '- Puppies' ) ] ) end before do deploy.stubs(:waiting_for_buddy?).returns(true) end it 'includes a 👍 button' do body[:attachments][0][:actions][0][:text].must_include ':+1: Approve' end it 'describes a deploy with no PRs' do body[:text].must_include deploy.stage.name body[:text].must_include deploy.project.name body[:attachments][0][:fields][0][:value].must_equal '(no PRs)' body[:attachments][0][:fields][1][:value].must_equal '(no risks)' end it 'describes a deploy with PRs and no risks' do add_prs body[:attachments][0][:fields][0][:value].must_include 'Foo the Bars' body[:attachments][0][:fields][1][:value].must_equal '(no risks)' end it 'describes a deploy with risky PRs' do add_prs_with_risk body[:attachments][0][:fields][0][:value].must_include 'Foo the Bars' body[:attachments][0][:fields][1][:value].must_include 'Kittens' body[:attachments][0][:fields][1][:value].must_include 'Puppies' end end describe 'during deploy' do before { deploy.stubs(:running?).returns(true) } before { deploy.stubs(:succeeded?).returns(false) } before { deploy.stubs(:waiting_for_buddy?).returns(false) } it 'has no buttons' do text = "<@Uadmin> is deploying <http://www.test-url.com/projects/foo/deploys/178003093|*Project* to *Staging*>." assert_equal body, attachments: [{ text: 'Deploying…', fields: [ { title: "PRs", value: "(no PRs)", short: true }, { title: "Risks", value: "(no risks)", short: true } ], color: 'warning' }], response_type: 'in_channel', text: text end it 'mentions both users when the deploy has a buddy' do deploy.stubs(:buddy).returns(deployer_buddy) body[:text].must_include super_admin_identifier.identifier body[:text].must_include deployer_buddy_identifier.identifier end it 'uses email addresses if a user is not attached to slack' do user = users(:viewer) deploy.stubs(:user).returns(user) body[:text].must_include user.email end end describe 'after deploy is finished' do it 'says if a deploy failed' do deploy.stubs(:failed?).returns(true) body[:text].must_include 'failed to deploy' end it 'says if a deploy errored' do deploy.stubs(:errored?).returns(true) body[:text].must_include 'failed to deploy' end it 'says if a deploy succeeded' do body[:text].must_include 'successfully deployed' end it 'mentions both users when the deploy has a buddy' do deploy.stubs(:buddy).returns(deployer_buddy) body[:text].must_include super_admin_identifier.identifier body[:text].must_include deployer_buddy_identifier.identifier body[:text].must_include 'successfully deployed' end end end describe '#deliver' do describe 'with no URL linkage' do it 'does not use the network' do msg.deliver end end describe 'with a URL linkage' do before do DeployResponseUrl.create! deploy_id: deploy.id, response_url: 'http://example.com/xyz' stub_request(:post, 'http://example.com/xyz'). with(body: /successfully deployed/) end it 'sends a request to the response URL' do msg.deliver end end end end
30.290698
120
0.573512
e9b85e9cb8c4e35be7caca95940ba24aa47532b2
398
# frozen_string_literal: true # rubocop:disable Style/Documentation module Gitlab module BackgroundMigration module UserMentions module Models class MergeRequestUserMention < ActiveRecord::Base self.table_name = 'merge_request_user_mentions' def self.resource_foreign_key :merge_request_id end end end end end end
20.947368
58
0.678392
b925efb5baafeaf725593d101a7218ccba78ad8a
3,130
# encoding: utf-8 # This file is autogenerated. Do not edit it manually. # If you want change the content of this file, edit # # /spec/fixtures/responses/whois.dns.pl/pl/status_registered.expected # # and regenerate the tests with the following rake task # # $ rake spec:generate # require 'spec_helper' describe "whois.dns.pl", :aggregate_failures do subject do file = fixture("responses", "whois.dns.pl/pl/status_registered.txt") part = Whois::Record::Part.new(body: File.read(file), host: "whois.dns.pl") Whois::Parser.parser_for(part) end it "matches status_registered.expected" do expect(subject.domain).to eq("google.pl") expect { subject.domain_id }.to raise_error(Whois::AttributeNotSupported) expect(subject.status).to eq(:registered) expect(subject.available?).to eq(false) expect(subject.registered?).to eq(true) expect(subject.created_on).to be_a(Time) expect(subject.created_on).to eq(DateTime.parse("2002-09-19 13:00:00")) expect(subject.updated_on).to be_a(Time) expect(subject.updated_on).to eq(DateTime.parse("2012-08-17 11:21:09")) expect(subject.expires_on).to be_a(Time) expect(subject.expires_on).to eq(DateTime.parse("2013-09-18 14:00:00")) expect(subject.registrar).to be_a(Whois::Parser::Registrar) expect(subject.registrar.id).to eq(nil) expect(subject.registrar.name).to eq("Markmonitor, Inc.") expect(subject.registrar.url).to eq(nil) expect { subject.registrant_contacts }.to raise_error(Whois::AttributeNotSupported) expect { subject.admin_contacts }.to raise_error(Whois::AttributeNotSupported) expect(subject.technical_contacts).to be_a(Array) expect(subject.technical_contacts.size).to eq(1) expect(subject.technical_contacts[0]).to be_a(Whois::Parser::Contact) expect(subject.technical_contacts[0].type).to eq(Whois::Parser::Contact::TYPE_TECHNICAL) expect(subject.technical_contacts[0].id).to eq("ks_5271013") expect(subject.technical_contacts[0].name).to eq(nil) expect(subject.technical_contacts[0].organization).to eq("DNS Admin\nGoogle Inc.") expect(subject.technical_contacts[0].address).to eq("1600 Amphitheatre Parkway") expect(subject.technical_contacts[0].city).to eq("Mountain View") expect(subject.technical_contacts[0].zip).to eq("94043") expect(subject.technical_contacts[0].state).to eq(nil) expect(subject.technical_contacts[0].country).to eq(nil) expect(subject.technical_contacts[0].country_code).to eq("US") expect(subject.technical_contacts[0].phone).to eq("+1.6503300100") expect(subject.technical_contacts[0].fax).to eq("+1.6506188571") expect(subject.technical_contacts[0].email).to eq(nil) expect(subject.nameservers).to be_a(Array) expect(subject.nameservers.size).to eq(2) expect(subject.nameservers[0]).to be_a(Whois::Parser::Nameserver) expect(subject.nameservers[0].name).to eq("ns2.google.com") expect(subject.nameservers[1]).to be_a(Whois::Parser::Nameserver) expect(subject.nameservers[1].name).to eq("ns1.google.com") expect(subject.response_throttled?).to eq(false) end end
47.424242
92
0.73738
f8579a28e151aa80298d0b152b67f672ed657880
441
require 'rails_helper' module Dallal RSpec.describe DallalJob, type: :job do let(:args) { { class: 'User', id: '1', event: 'create'} } describe '#perform' do it 'constantizes and calls #create_notification on notifier with correct args' do expect(UserNotifier).to receive(:create_notification).with(id: '1', event: :create) subject.perform(args[:class], args[:id], args[:event]) end end end end
29.4
91
0.659864
26624b72af0d584132c0be78b2b482cbaf386277
1,601
# OMF_VERSIONS = 6.0 def create_switch(ovs) ovs.create(:ovs_proxy, {name: "test"}) do |reply_msg| if reply_msg.success? switch = reply_msg.resource switch.on_subscribed do info ">>> Connected to newly created switch #{reply_msg[:res_id]} with name #{reply_msg[:name]}" on_switch_created(switch) end after(10) do ovs.release(switch) do |reply_msg| info ">>> Released switch #{reply_msg[:res_id]}" end end else error ">>> Switch creation failed - #{reply_msg[:reason]}" end end end def on_switch_created(switch) switch.configure(ports: {operation: 'add', name: 'tun0', type: 'tunnel'}) do |reply_msg| info "> Switch configured ports: #{reply_msg[:ports]}" switch.request([:tunnel_port_numbers]) do |reply_msg| info "> Switch requested tunnel port: #{reply_msg[:tunnel_port_numbers]}" switch.configure(tunnel_port: {name: 'tun0', remote_ip: '127.0.0.1', remote_port: '1234'}) do |reply_msg| info "> Switch configured tunnel port: #{reply_msg[:tunnel_port]}" end end end # Monitor all status, error or warn information from the switch #switch.on_status do |msg| # msg.each_property do |name, value| # info "#{name} => #{value}" # end #end switch.on_error do |msg| error msg[:reason] end switch.on_warn do |msg| warn msg[:reason] end end OmfCommon.comm.subscribe('ovs') do |ovs| unless ovs.error? create_switch(ovs) else error ovs.inspect end after(20) { info 'Disconnecting ...'; OmfCommon.comm.disconnect } end
27.135593
111
0.64772
7959bf2d42c406ddb597ab32561ff3f69ab9ab26
433
class CreatePages < ActiveRecord::Migration def self.up create_table :pages do |t| t.string :name t.string :url t.integer :parent_id t.integer :lft t.integer :rgt t.timestamps end add_index :pages, :parent_id # Create parent Welcome page Page.create(:name => 'Welcome', :body => 'Getting started guide') end def self.down drop_table :pages end end
18.041667
69
0.612009
f78152dfde9e6f479e6c41e38b0e647f0d67b41c
2,075
require "unit_soup/measurement" require "unit_soup/unit" include UnitSoup describe "Measurement" do describe ".new" do describe "from separate strings" do m = Measurement.new("3/4", "cm") it "should have initialized correctly" do expect(m.amount).to eq(3/4r) expect(m.unit).to eq(Unit.new(:cm)) end end describe "from single string" do m = Measurement.new("4/5 inch") it "should have initialized correctly" do expect(m.amount).to eq(4/5r) expect(m.unit).to eq(Unit.new(:inch)) end end describe "from objects" do m = Measurement.new(2.5, :cm) it "should have initialized correctly" do expect(m.amount).to eq(2.5) expect(m.unit).to eq(Unit.new(:cm)) end end describe "from measurement" do m = Measurement.new(Measurement.new(2, :cm)) it "should have initialized from measurement" do expect(m.amount).to eq(2) expect(m.unit.symbol).to eq(:cm) end end end describe "to_s" do m = Measurement.new(2, :cm) it "should convert to string correctly" do expect(m.to_s).to eq("2/1 cm") end end describe "==" do m = Measurement.from("2.5 cm") it "should equal another with same values" do expect(m).to eq(Measurement.new(5/2.to_r, "cm")) end it "should not equal different values" do expect(m).to_not eq(Measurement.new(3/2.to_r, "cm")) end end describe "valid" do it "should be valid for int amount" do expect(Measurement.valid?("2 cm")).to be(true) end it "should be valid for decimal amount" do expect(Measurement.valid?("2.5 cm")).to be(true) end it "should be valid for rational amount" do expect(Measurement.valid?("3/5 cm")).to be(true) end it "should be valid for rational amount" do expect(Measurement.valid?("3.3/5 cm")).to be(true) end it "should be invalid for invalid format" do expect(Measurement.valid?("invalid string 2 cm")).to be(false) end end end
24.127907
68
0.618795
bf13504dec706b0dfb9cfa792e1990e0723aa8a2
2,288
class Haxe < Formula desc "Multi-platform programming language" homepage "https://haxe.org/" url "https://github.com/HaxeFoundation/haxe.git", :tag => "3.4.7", :revision => "bb7b827a9c135fbfd066da94109a728351b87b92" bottle do cellar :any rebuild 1 sha256 "c0fb5f2d7e35d16ac66311d1d9389aa1fa668309d88248cbd938b7a119abcc65" => :mojave sha256 "d9a2655a44eb67e429b2a0a46fdbba40608fa888143f7592a64f7101efceab74" => :high_sierra sha256 "759280efbec7f6ab98ca83639639325892f688a6fc9c6c7daa24432545086f47" => :sierra end head do url "https://github.com/HaxeFoundation/haxe.git", :branch => "development" depends_on "aspcud" => :build depends_on "opam" => :build depends_on "pkg-config" => :build end depends_on "camlp4" => :build depends_on "cmake" => :build depends_on "ocaml" => :build depends_on "neko" depends_on "pcre" def install ENV["OCAMLPARAM"] = "safe-string=0,_" # OCaml 4.06.0 compat # Build requires targets to be built in specific order ENV.deparallelize if build.head? Dir.mktmpdir("opamroot") do |opamroot| ENV["OPAMROOT"] = opamroot ENV["OPAMYES"] = "1" system "opam", "init", "--no-setup", "--disable-sandboxing" system "opam", "config", "exec", "--", "opam", "pin", "add", "haxe", buildpath, "--no-action" system "opam", "config", "exec", "--", "opam", "install", "haxe", "--deps-only" system "opam", "config", "exec", "--", "make", "ADD_REVISION=1" end else system "make", "OCAMLOPT=ocamlopt.opt" end # Rebuild haxelib as a valid binary cd "extra/haxelib_src" do system "cmake", "." system "make" end rm "haxelib" cp "extra/haxelib_src/haxelib", "haxelib" bin.mkpath system "make", "install", "INSTALL_BIN_DIR=#{bin}", "INSTALL_LIB_DIR=#{lib}/haxe", "INSTALL_STD_DIR=#{lib}/haxe/std" end def caveats; <<~EOS Add the following line to your .bashrc or equivalent: export HAXE_STD_PATH="#{HOMEBREW_PREFIX}/lib/haxe/std" EOS end test do ENV["HAXE_STD_PATH"] = "#{HOMEBREW_PREFIX}/lib/haxe/std" system "#{bin}/haxe", "-v", "Std" system "#{bin}/haxelib", "version" end end
29.714286
93
0.627622
33e52aa749d2ebc3e539b5a8fb0336c1887ad9e1
186
class AddCommitUrlColumn < ActiveRecord::Migration def self.up add_column :commits, :url, :string, :null => false end def self.down remove_column :commits, :url end end
18.6
54
0.704301
33aca6cb527b4ff3a3a0ac4745945405bcb54704
18,872
require 'rails_helper' describe 'Issue Boards', js: true do include DragTo let(:group) { create(:group, :nested) } let(:project) { create(:project, :public, namespace: group) } let(:board) { create(:board, project: project) } let(:user) { create(:user) } let!(:user2) { create(:user) } before do project.team << [user, :master] project.team << [user2, :master] page.driver.set_cookie('sidebar_collapsed', 'true') sign_in(user) end context 'no lists' do before do visit project_board_path(project, board) wait_for_requests expect(page).to have_selector('.board', count: 3) end it 'shows blank state' do expect(page).to have_content('Welcome to your Issue Board!') end it 'shows tooltip on add issues button' do button = page.find('.filter-dropdown-container button', text: 'Add issues') expect(button[:"data-original-title"]).to eq("Please add a list to your board first") end it 'hides the blank state when clicking nevermind button' do page.within(find('.board-blank-state')) do click_button("Nevermind, I'll use my own") end expect(page).to have_selector('.board', count: 2) end it 'creates default lists' do lists = ['Backlog', 'To Do', 'Doing', 'Closed'] page.within(find('.board-blank-state')) do click_button('Add default lists') end wait_for_requests expect(page).to have_selector('.board', count: 4) page.all('.board').each_with_index do |list, i| expect(list.find('.board-title')).to have_content(lists[i]) end end end context 'with lists' do let(:milestone) { create(:milestone, project: project) } let(:planning) { create(:label, project: project, name: 'Planning', description: 'Test') } let(:development) { create(:label, project: project, name: 'Development') } let(:testing) { create(:label, project: project, name: 'Testing') } let(:bug) { create(:label, project: project, name: 'Bug') } let!(:backlog) { create(:label, project: project, name: 'Backlog') } let!(:closed) { create(:label, project: project, name: 'Closed') } let!(:accepting) { create(:label, project: project, name: 'Accepting Merge Requests') } let!(:list1) { create(:list, board: board, label: planning, position: 0) } let!(:list2) { create(:list, board: board, label: development, position: 1) } let!(:confidential_issue) { create(:labeled_issue, :confidential, project: project, author: user, labels: [planning], relative_position: 9) } let!(:issue1) { create(:labeled_issue, project: project, title: 'aaa', description: '111', assignees: [user], labels: [planning], relative_position: 8) } let!(:issue2) { create(:labeled_issue, project: project, title: 'bbb', description: '222', author: user2, labels: [planning], relative_position: 7) } let!(:issue3) { create(:labeled_issue, project: project, title: 'ccc', description: '333', labels: [planning], relative_position: 6) } let!(:issue4) { create(:labeled_issue, project: project, title: 'ddd', description: '444', labels: [planning], relative_position: 5) } let!(:issue5) { create(:labeled_issue, project: project, title: 'eee', description: '555', labels: [planning], milestone: milestone, relative_position: 4) } let!(:issue6) { create(:labeled_issue, project: project, title: 'fff', description: '666', labels: [planning, development], relative_position: 3) } let!(:issue7) { create(:labeled_issue, project: project, title: 'ggg', description: '777', labels: [development], relative_position: 2) } let!(:issue8) { create(:closed_issue, project: project, title: 'hhh', description: '888') } let!(:issue9) { create(:labeled_issue, project: project, title: 'iii', description: '999', labels: [planning, testing, bug, accepting], relative_position: 1) } before do visit project_board_path(project, board) wait_for_requests expect(page).to have_selector('.board', count: 4) expect(find('.board:nth-child(2)')).to have_selector('.card') expect(find('.board:nth-child(3)')).to have_selector('.card') expect(find('.board:nth-child(4)')).to have_selector('.card') end it 'shows description tooltip on list title' do page.within('.board:nth-child(2)') do expect(find('.board-title span.has-tooltip')[:title]).to eq('Test') end end it 'shows issues in lists' do wait_for_board_cards(2, 8) wait_for_board_cards(3, 2) end it 'shows confidential issues with icon' do page.within(find('.board:nth-child(2)')) do expect(page).to have_selector('.confidential-icon', count: 1) end end it 'search closed list' do find('.filtered-search').set(issue8.title) find('.filtered-search').native.send_keys(:enter) wait_for_requests expect(find('.board:nth-child(2)')).to have_selector('.card', count: 0) expect(find('.board:nth-child(3)')).to have_selector('.card', count: 0) expect(find('.board:nth-child(4)')).to have_selector('.card', count: 1) end it 'search list' do find('.filtered-search').set(issue5.title) find('.filtered-search').native.send_keys(:enter) wait_for_requests expect(find('.board:nth-child(2)')).to have_selector('.card', count: 1) expect(find('.board:nth-child(3)')).to have_selector('.card', count: 0) expect(find('.board:nth-child(4)')).to have_selector('.card', count: 0) end it 'allows user to delete board' do page.within(find('.board:nth-child(2)')) do find('.board-delete').click end wait_for_requests expect(page).to have_selector('.board', count: 3) end it 'removes checkmark in new list dropdown after deleting' do click_button 'Add list' wait_for_requests find('.dropdown-menu-close').click page.within(find('.board:nth-child(2)')) do find('.board-delete').click end wait_for_requests expect(page).to have_selector('.board', count: 3) end it 'infinite scrolls list' do 50.times do create(:labeled_issue, project: project, labels: [planning]) end visit project_board_path(project, board) wait_for_requests page.within(find('.board:nth-child(2)')) do expect(page.find('.board-header')).to have_content('58') expect(page).to have_selector('.card', count: 20) expect(page).to have_content('Showing 20 of 58 issues') evaluate_script("document.querySelectorAll('.board .board-list')[1].scrollTop = document.querySelectorAll('.board .board-list')[1].scrollHeight") wait_for_requests expect(page).to have_selector('.card', count: 40) expect(page).to have_content('Showing 40 of 58 issues') evaluate_script("document.querySelectorAll('.board .board-list')[1].scrollTop = document.querySelectorAll('.board .board-list')[1].scrollHeight") wait_for_requests expect(page).to have_selector('.card', count: 58) expect(page).to have_content('Showing all issues') end end context 'closed' do it 'shows list of closed issues' do wait_for_board_cards(4, 1) wait_for_requests end it 'moves issue to closed' do drag(list_from_index: 1, list_to_index: 3) wait_for_board_cards(2, 7) wait_for_board_cards(3, 2) wait_for_board_cards(4, 2) expect(find('.board:nth-child(2)')).not_to have_content(issue9.title) expect(find('.board:nth-child(4)')).to have_selector('.card', count: 2) expect(find('.board:nth-child(4)')).to have_content(issue9.title) expect(find('.board:nth-child(4)')).not_to have_content(planning.title) end it 'removes all of the same issue to closed' do drag(list_from_index: 1, list_to_index: 3) wait_for_board_cards(2, 7) wait_for_board_cards(3, 2) wait_for_board_cards(4, 2) expect(find('.board:nth-child(2)')).not_to have_content(issue9.title) expect(find('.board:nth-child(4)')).to have_content(issue9.title) expect(find('.board:nth-child(4)')).not_to have_content(planning.title) end end context 'lists' do it 'changes position of list' do drag(list_from_index: 2, list_to_index: 1, selector: '.board-header') wait_for_board_cards(2, 2) wait_for_board_cards(3, 8) wait_for_board_cards(4, 1) expect(find('.board:nth-child(2)')).to have_content(development.title) expect(find('.board:nth-child(2)')).to have_content(planning.title) end it 'issue moves between lists' do drag(list_from_index: 1, from_index: 1, list_to_index: 2) wait_for_board_cards(2, 7) wait_for_board_cards(3, 2) wait_for_board_cards(4, 1) expect(find('.board:nth-child(3)')).to have_content(issue6.title) expect(find('.board:nth-child(3)').all('.card').last).to have_content(development.title) end it 'issue moves between lists' do drag(list_from_index: 2, list_to_index: 1) wait_for_board_cards(2, 9) wait_for_board_cards(3, 1) wait_for_board_cards(4, 1) expect(find('.board:nth-child(2)')).to have_content(issue7.title) expect(find('.board:nth-child(2)').all('.card').first).to have_content(planning.title) end it 'issue moves from closed' do drag(list_from_index: 2, list_to_index: 3) wait_for_board_cards(2, 8) wait_for_board_cards(3, 1) wait_for_board_cards(4, 2) expect(find('.board:nth-child(4)')).to have_content(issue8.title) end context 'issue card' do it 'shows assignee' do page.within(find('.board:nth-child(2)')) do expect(page).to have_selector('.avatar', count: 1) end end end context 'new list' do it 'shows all labels in new list dropdown' do click_button 'Add list' wait_for_requests page.within('.dropdown-menu-issues-board-new') do expect(page).to have_content(planning.title) expect(page).to have_content(development.title) expect(page).to have_content(testing.title) end end it 'creates new list for label' do click_button 'Add list' wait_for_requests page.within('.dropdown-menu-issues-board-new') do click_link testing.title end wait_for_requests expect(page).to have_selector('.board', count: 5) end it 'creates new list for Backlog label' do click_button 'Add list' wait_for_requests page.within('.dropdown-menu-issues-board-new') do click_link backlog.title end wait_for_requests expect(page).to have_selector('.board', count: 5) end it 'creates new list for Closed label' do click_button 'Add list' wait_for_requests page.within('.dropdown-menu-issues-board-new') do click_link closed.title end wait_for_requests expect(page).to have_selector('.board', count: 5) end it 'keeps dropdown open after adding new list' do click_button 'Add list' wait_for_requests page.within('.dropdown-menu-issues-board-new') do click_link closed.title end wait_for_requests expect(page).to have_css('#js-add-list.open') end it 'creates new list from a new label' do click_button 'Add list' wait_for_requests click_link 'Create new label' fill_in('new_label_name', with: 'Testing New Label') first('.suggest-colors a').click click_button 'Create' wait_for_requests wait_for_requests expect(page).to have_selector('.board', count: 5) end end end context 'filtering' do it 'filters by author' do set_filter("author", user2.username) click_filter_link(user2.username) submit_filter wait_for_requests wait_for_board_cards(2, 1) wait_for_empty_boards((3..4)) end it 'filters by assignee' do set_filter("assignee", user.username) click_filter_link(user.username) submit_filter wait_for_requests wait_for_board_cards(2, 1) wait_for_empty_boards((3..4)) end it 'filters by milestone' do set_filter("milestone", "\"#{milestone.title}\"") click_filter_link(milestone.title) submit_filter wait_for_requests wait_for_board_cards(2, 1) wait_for_board_cards(3, 0) wait_for_board_cards(4, 0) end it 'filters by label' do set_filter("label", testing.title) click_filter_link(testing.title) submit_filter wait_for_requests wait_for_board_cards(2, 1) wait_for_empty_boards((3..4)) end it 'filters by label with space after reload' do set_filter("label", "\"#{accepting.title}\"") click_filter_link(accepting.title) submit_filter # Test after reload page.evaluate_script 'window.location.reload()' wait_for_board_cards(2, 1) wait_for_empty_boards((3..4)) wait_for_requests page.within(find('.board:nth-child(2)')) do expect(page.find('.board-header')).to have_content('1') expect(page).to have_selector('.card', count: 1) end page.within(find('.board:nth-child(3)')) do expect(page.find('.board-header')).to have_content('0') expect(page).to have_selector('.card', count: 0) end end it 'removes filtered labels' do set_filter("label", testing.title) click_filter_link(testing.title) submit_filter wait_for_board_cards(2, 1) find('.clear-search').click submit_filter wait_for_board_cards(2, 8) end it 'infinite scrolls list with label filter' do 50.times do create(:labeled_issue, project: project, labels: [planning, testing]) end set_filter("label", testing.title) click_filter_link(testing.title) submit_filter wait_for_requests page.within(find('.board:nth-child(2)')) do expect(page.find('.board-header')).to have_content('51') expect(page).to have_selector('.card', count: 20) expect(page).to have_content('Showing 20 of 51 issues') evaluate_script("document.querySelectorAll('.board .board-list')[1].scrollTop = document.querySelectorAll('.board .board-list')[1].scrollHeight") expect(page).to have_selector('.card', count: 40) expect(page).to have_content('Showing 40 of 51 issues') evaluate_script("document.querySelectorAll('.board .board-list')[1].scrollTop = document.querySelectorAll('.board .board-list')[1].scrollHeight") expect(page).to have_selector('.card', count: 51) expect(page).to have_content('Showing all issues') end end it 'filters by multiple labels' do set_filter("label", testing.title) click_filter_link(testing.title) set_filter("label", bug.title) click_filter_link(bug.title) submit_filter wait_for_requests wait_for_board_cards(2, 1) wait_for_empty_boards((3..4)) end it 'filters by clicking label button on issue' do page.within(find('.board:nth-child(2)')) do expect(page).to have_selector('.card', count: 8) expect(find('.card', match: :first)).to have_content(bug.title) click_button(bug.title) wait_for_requests end page.within('.tokens-container') do expect(page).to have_content(bug.title) end wait_for_requests wait_for_board_cards(2, 1) wait_for_empty_boards((3..4)) end it 'removes label filter by clicking label button on issue' do page.within(find('.board:nth-child(2)')) do page.within(find('.card', match: :first)) do click_button(bug.title) end wait_for_requests expect(page).to have_selector('.card', count: 1) end wait_for_requests end end end context 'keyboard shortcuts' do before do visit project_board_path(project, board) wait_for_requests end it 'allows user to use keyboard shortcuts' do find('.boards-list').native.send_keys('i') expect(page).to have_content('New Issue') end end context 'signed out user' do before do sign_out(:user) visit project_board_path(project, board) wait_for_requests end it 'displays lists' do expect(page).to have_selector('.board') end it 'does not show create new list' do expect(page).not_to have_selector('.js-new-board-list') end it 'does not allow dragging' do expect(page).not_to have_selector('.user-can-drag') end end context 'as guest user' do let(:user_guest) { create(:user) } before do project.team << [user_guest, :guest] sign_out(:user) sign_in(user_guest) visit project_board_path(project, board) wait_for_requests end it 'does not show create new list' do expect(page).not_to have_selector('.js-new-board-list') end end def drag(selector: '.board-list', list_from_index: 0, from_index: 0, to_index: 0, list_to_index: 0) drag_to(selector: selector, scrollable: '#board-app', list_from_index: list_from_index, from_index: from_index, to_index: to_index, list_to_index: list_to_index) end def wait_for_board_cards(board_number, expected_cards) page.within(find(".board:nth-child(#{board_number})")) do expect(page.find('.board-header')).to have_content(expected_cards.to_s) expect(page).to have_selector('.card', count: expected_cards) end end def wait_for_empty_boards(board_numbers) board_numbers.each do |board| wait_for_board_cards(board, 0) end end def set_filter(type, text) find('.filtered-search').native.send_keys("#{type}:#{text}") end def submit_filter find('.filtered-search').native.send_keys(:enter) end def click_filter_link(link_text) page.within('.filtered-search-box') do expect(page).to have_button(link_text) click_button(link_text) end end end
31.505843
163
0.630988
616c98d9c37540bef0e062773636919d8c545cf3
3,214
# encoding: utf-8 # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. module Azure::Network::Mgmt::V2018_01_01 module Models # # Vpn Client Parameters for package generation # class VpnClientParameters include MsRestAzure # @return [ProcessorArchitecture] VPN client Processor Architecture. # Possible values are: 'AMD64' and 'X86'. Possible values include: # 'Amd64', 'X86' attr_accessor :processor_architecture # @return [AuthenticationMethod] VPN client Authentication Method. # Possible values are: 'EAPTLS' and 'EAPMSCHAPv2'. Possible values # include: 'EAPTLS', 'EAPMSCHAPv2' attr_accessor :authentication_method # @return [String] The public certificate data for the radius server # authentication certificate as a Base-64 encoded string. Required only # if external radius authentication has been configured with EAPTLS # authentication. attr_accessor :radius_server_auth_certificate # @return [Array<String>] A list of client root certificates public # certificate data encoded as Base-64 strings. Optional parameter for # external radius based authentication with EAPTLS. attr_accessor :client_root_certificates # # Mapper for VpnClientParameters class as Ruby Hash. # This will be used for serialization/deserialization. # def self.mapper() { client_side_validation: true, required: false, serialized_name: 'VpnClientParameters', type: { name: 'Composite', class_name: 'VpnClientParameters', model_properties: { processor_architecture: { client_side_validation: true, required: false, serialized_name: 'processorArchitecture', type: { name: 'String' } }, authentication_method: { client_side_validation: true, required: false, serialized_name: 'authenticationMethod', type: { name: 'String' } }, radius_server_auth_certificate: { client_side_validation: true, required: false, serialized_name: 'radiusServerAuthCertificate', type: { name: 'String' } }, client_root_certificates: { client_side_validation: true, required: false, serialized_name: 'clientRootCertificates', type: { name: 'Sequence', element: { client_side_validation: true, required: false, serialized_name: 'StringElementType', type: { name: 'String' } } } } } } } end end end end
33.134021
77
0.554138
ff2a3a3b2b103d16d929a3b8ea8b0a24e5395afd
2,288
# encoding: utf-8 # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. module Azure::Compute::Mgmt::V2018_06_01 module Models # # Describes a virtual machines scale set IP Configuration's PublicIPAddress # configuration # class VirtualMachineScaleSetUpdatePublicIPAddressConfiguration include MsRestAzure # @return [String] The publicIP address configuration name. attr_accessor :name # @return [Integer] The idle timeout of the public IP address. attr_accessor :idle_timeout_in_minutes # @return [VirtualMachineScaleSetPublicIPAddressConfigurationDnsSettings] # The dns settings to be applied on the publicIP addresses . attr_accessor :dns_settings # # Mapper for VirtualMachineScaleSetUpdatePublicIPAddressConfiguration # class as Ruby Hash. # This will be used for serialization/deserialization. # def self.mapper() { client_side_validation: true, required: false, serialized_name: 'VirtualMachineScaleSetUpdatePublicIPAddressConfiguration', type: { name: 'Composite', class_name: 'VirtualMachineScaleSetUpdatePublicIPAddressConfiguration', model_properties: { name: { client_side_validation: true, required: false, serialized_name: 'name', type: { name: 'String' } }, idle_timeout_in_minutes: { client_side_validation: true, required: false, serialized_name: 'properties.idleTimeoutInMinutes', type: { name: 'Number' } }, dns_settings: { client_side_validation: true, required: false, serialized_name: 'properties.dnsSettings', type: { name: 'Composite', class_name: 'VirtualMachineScaleSetPublicIPAddressConfigurationDnsSettings' } } } } } end end end end
31.342466
93
0.586976
d53adc0b2e46fb63647294d2f206c1b77ff5dbca
827
require 'spec_helper' RSpec.describe Interactors::Players::CreatePlayer do let(:player_attributes) { { 'id' => 1, 'name' => 'octane' } } let(:team) { { 'id' => 'X0klA3' } } let(:access_token) { 'fdgdfg908g9n9gf09fgh8' } let(:repository) { PlayerRepository.new } let(:create_player) { described_class.new(repository: repository) } describe '#call' do let(:result) do create_player.call( player_attributes: player_attributes, team: team, access_token: access_token ) end it 'succeeds' do expect(result.successful?).to be(true) end it 'creates a player' do expect(result.player).to have_attributes( player_id: '1', name: 'octane', team_id: 'X0klA3', access_token: 'fdgdfg908g9n9gf09fgh8' ) end end end
25.060606
69
0.625151
7a2fc6b1184ba0a9be3990dab8b735ca95891ace
2,996
class Api::V3::ArticlesController < Api::V3::BaseController before_filter :load_article, only: [:update, :destroy] def index # Filter by source parameter, filter out private sources unless admin # Load articles from ids listed in query string, use type parameter if present # Translate type query parameter into column name # Limit number of ids to 50 source_ids = get_source_ids(params[:source]) type = { "doi" => :doi, "pmid" => :pmid, "pmcid" => :pmcid, "mendeley" => :mendeley_uuid }.values_at(params[:type]).first || Article.uid_as_sym ids = params[:ids].nil? ? nil : params[:ids].split(",")[0...50].map { |id| Article.clean_id(id) } id_hash = { :articles => { type => ids }, :retrieval_statuses => { :source_id => source_ids }} @articles = ArticleDecorator.where(id_hash).includes(:retrieval_statuses).order("articles.updated_at DESC").decorate(context: { days: params[:days], months: params[:months], year: params[:year], info: params[:info], source: params[:source] }) # Return 404 HTTP status code and error message if article wasn't found, or no valid source specified if @articles.blank? if params[:source].blank? @error = "Article not found." else @error = "Source not found." end render "error", :status => :not_found end end def show # Load one article given query params source_ids = get_source_ids(params[:source]) id_hash = { :articles => Article.from_uri(params[:id]), :retrieval_statuses => { :source_id => source_ids }} @article = ArticleDecorator.includes(:retrieval_statuses).where(id_hash).decorate(context: { days: params[:days], months: params[:months], year: params[:year], info: params[:info], source: params[:source] }) # Return 404 HTTP status code and error message if article wasn't found, or no valid source specified if @article.blank? if params[:source].blank? @error = "Article not found." else @error = "Source not found." end render "error", :status => :not_found end end protected def load_article # Load one article given query params id_hash = Article.from_uri(params[:id]) if id_hash.respond_to?("key") key, value = id_hash.first @article = Article.where(key => value).first else @article = nil end end # Filter by source parameter, filter out private sources unless admin def get_source_ids(source_names) if source_names && current_user.try(:is_admin_or_staff?) source_ids = Source.where("lower(name) in (?)", source_names.split(",")).order("name").pluck(:id) elsif source_names source_ids = Source.where("private = ?", false).where("lower(name) in (?)", source_names.split(",")).order("name").pluck(:id) elsif current_user.try(:is_admin_or_staff?) source_ids = Source.order("name").pluck(:id) else source_ids = Source.where("private = ?", false).order("name").pluck(:id) end end end
41.611111
246
0.665554
e8ea6aff64f932dff49bf840f0cf77f3fa075c77
6,174
require 'fileutils' # http://www.ruby-doc.org/stdlib-1.9.3/libdoc/fileutils/rdoc/index.html require 'matrix' # http://www.ruby-doc.org/stdlib-1.9.3/libdoc/matrix/rdoc/index.html require 'mustache' # https://github.com/defunkt/mustache THIS_DIR = File.expand_path('..', __FILE__) PROJECT_ROOT = File.expand_path('../../../..', THIS_DIR) if !defined? PROJECT_ROOT DATA_PATH = File.join(THIS_DIR, "data") CLASSIC_PATH = File.join(DATA_PATH, "classic") NEXTGEN_PATH = File.join(DATA_PATH, "nextgen") INDEX_PATH = File.join(DATA_PATH, "index.txt") # some conversion constants for later JOULES_PER_EV = 1.6021770000000003e-19 KILOGRAMS_PER_DALTON = 1.66054e-27 MW_VELOCITY_UNITS_PER_METER_PER_SECOND = 1e-6 # x, y grid position of atom i (0 <= i < NX*NY) SQRT_3_OVER_2 = Math.sqrt(3)/2 # Boltzmann constant KB = 8.6173423e-5 # eV/K # Define the elements SIGMA = 0.07 # nm MASS = 39.95 # g/mol (amu, Dalton) RMIN = 2 ** (1.0/6) * SIGMA # dimensions of container HEIGHT = 5.0 # nm WIDTH = 5.0 # nm # Organize particles into grid of NX x NY particles NX = 7 NY = 7 N = 2 * NX * NY # Lennard-Jones potential for squared pairwise separation = rsq def lj(rsq, epsilon, sigma=SIGMA) alpha = 4 * epsilon * sigma ** 12 beta = 4 * epsilon * sigma ** 6 return alpha * rsq ** -6 - beta * rsq ** -3 end # Location on the unit-length hexagonal lattice of the ith particle def grid_pos(i) x = i % NX y = i / NX x += 0.5 if y % 2 == 0 y *= SQRT_3_OVER_2 return [x, y] end # potential energy of NX * NY particles arranged in a hexagonal lattice # with interparticle distance r def pe(r, epsilon) # calculate pe from LJ formula pe = 0 n = NX * NY for i in 0...n-1 for j in i+1...n xi, yi = grid_pos(i) xj, yj = grid_pos(j) rsq = r * r * ((xi-xj).abs ** 2 + (yi-yj).abs ** 2) pe += lj(rsq, epsilon) end end return pe end # returns [x,y] vectors for NX*NY particles arranged in a hexagonal lattice # with separation r def positions(r) x = [] y = [] leftx = -r*(NX - 1) / 2 topy = -SQRT_3_OVER_2 * r * (NY - 1) / 2 for i in 0...NX*NY do a, b = grid_pos(i) x.push(leftx + r*a) y.push(topy + r*b) end return [x, y] end # VX, VY angles for n particles which should have total KE, in Joules, # 'initial_ke_in_ev'. All particles will translate in direction 'angle' def velocities(initial_ke_in_ev, n, angle) vx = [] vy = [] ke_per_atom_in_joules = initial_ke_in_ev * JOULES_PER_EV / n mass_in_kg = MASS * KILOGRAMS_PER_DALTON v_per_atom_in_mks = Math.sqrt(2 * ke_per_atom_in_joules / mass_in_kg) v = v_per_atom_in_mks * MW_VELOCITY_UNITS_PER_METER_PER_SECOND for i in 0...n do vx.push(v * Math.cos(angle)) vy.push(v * Math.sin(angle)) end return [vx, vy] end class Cml < Mustache self.template_path = File.dirname(__FILE__) attr_reader :model_number def initialize(model_num) @model_number = model_num end end class Mml < Mustache self.template_path = File.dirname(__FILE__) attr_reader :number_of_particles, :epsilon, :sigma, :mass, :width, :height, :atoms def initialize(number_of_particles, epsilon, sigma, mass, width, height, atoms) @number_of_particles = number_of_particles @epsilon = epsilon @sigma = sigma @mass = mass @width = width @height = height @atoms = atoms end end def generate_mw_files(num, epsilon, x, y, vx, vy) File.open(File.join(CLASSIC_PATH, "model#{num}.cml"), 'w') { |f| f.write Cml.new(num).render } atoms = [x, y, vx, vy].transpose.collect { |a| { "rx" => 100*a[0], "ry" => 100*a[1], "vx" => 100*a[2], "vy" => 100*a[3] } } File.open(File.join(CLASSIC_PATH, "model#{num}$0.mml"), 'w') do |f| f.write Mml.new(atoms.length, epsilon, 100*SIGMA, MASS/120, WIDTH*100, HEIGHT*100, atoms).render end end def convert_mml_file(num) converter = 'node' + File.join(PROJECT_ROOT, "node-bin/convert-mml.js") input_mml_file = File.join(CLASSIC_PATH, "model#{num}$0.mml") output_json_file = File.join(NEXTGEN_PATH, "model#{num}.json") cmd = "#{converter} '#{input_mml_file}' #{output_json_file}" puts "\ncommand:\n#{cmd}" system(cmd) end def generate_md2d_data_file(num) generator = 'node' + File.join(PROJECT_ROOT, "node-bin/run-md2d.js") input_json_file = File.join(NEXTGEN_PATH, "model#{num}.json") output_txt_file = File.join(NEXTGEN_PATH, "model#{num}.data.txt") total_time = 41000 cmd = "#{generator} -i '#{input_json_file}' -o #{output_txt_file} -t #{total_time}" puts "\ncommand:\n#{cmd}" system(cmd) end def linspace(start, stop, number) interval = (stop-start)/(number-1) results = [start] results.push(results.last + interval) while results.last <= stop results end FileUtils.mkdir_p CLASSIC_PATH FileUtils.mkdir_p NEXTGEN_PATH # erase any files in the nextgen path FileUtils.rm_r Dir.glob(NEXTGEN_PATH + '/*') INDEX_FORMAT_STR = "%d\t%.3f\t%.3f\t%.3f\t%.3f\n" File.open(INDEX_PATH, "w") do |index_file| index_file.write(sprintf("%s\t%s\t%s\t%s\t%s\n", 'model', 'epsilon', 'initial PE', 'initial KE', 'approx. final KE')) model_num = 1 ['solid', 'gas'].each do |state| linspace(0.01, 0.1, 5).each do |epsilon| initial_pe = 2*pe(RMIN, epsilon) if state == 'gas' final_ke = N * KB * 1000 initial_ke = final_ke - initial_pe else initial_ke = 0 final_ke = 0 end index_file.write(sprintf(INDEX_FORMAT_STR, model_num, epsilon, initial_pe, initial_ke, final_ke)) x, y = positions(RMIN) # some atoms headed down top_x = x.collect { |i| i + WIDTH / 2 } top_y = y.collect { |i| i + HEIGHT / 4 } top_vx, top_vy = velocities(initial_ke/2, N/2, Math::PI/2) # and some atoms headed up bottom_x = x.collect { |i| i + WIDTH / 2 } bottom_y = y.collect { |i| i + 3 * HEIGHT / 4 } bottom_vx, bottom_vy = velocities(initial_ke/2, N/2, -Math::PI/2) generate_mw_files(model_num, epsilon, top_x+bottom_x, top_y+bottom_y, top_vx+bottom_vx, top_vy+bottom_vy) convert_mml_file(model_num) model_num += 1 end end end (1..10).each { |num| generate_md2d_data_file(num) }
28.850467
119
0.657272
bb79af5c0bd7d43145f63618859d54b642966902
10,323
module VerificationHelper include DocumentsVerificationStatus def doc_status_label(doc) case doc.status when "not submitted" "warning" when "downloaded" "default" when "verified" "success" else "danger" end end def ridp_type_status(type, person) consumer = person.consumer_role case type when 'Identity' if consumer.identity_verified? consumer.identity_validation elsif consumer.has_ridp_docs_for_type?(type) && !consumer.identity_rejected 'in review' else 'outstanding' end when 'Application' if consumer.application_verified? consumer.application_validation elsif consumer.has_ridp_docs_for_type?(type) && !consumer.application_rejected 'in review' else 'outstanding' end end end def verification_type_class(status) case status when "verified" "success" when "review" "warning" when "outstanding" "danger" when "curam" "default" when "attested" "default" when "valid" "success" when "pending" "info" when "expired" "default" when "unverified" "default" end end def ridp_type_class(type, person) case ridp_type_status(type, person) when 'valid' 'success' when 'in review' 'warning' when 'outstanding' 'danger' end end def unverified?(person) person.consumer_role.aasm_state != "fully_verified" end def enrollment_group_unverified?(person) person.primary_family.contingent_enrolled_active_family_members.flat_map(&:person).flat_map(&:consumer_role).flat_map(&:verification_types).select{|type| type.is_type_outstanding?}.any? end def verification_needed?(person) person.primary_family.active_household.hbx_enrollments.verification_needed.any? if person.try(:primary_family).try(:active_household).try(:hbx_enrollments) end def has_enrolled_policy?(family_member) return true if family_member.blank? family_member.family.enrolled_policy(family_member).present? end def is_not_verified?(family_member, v_type) return true if family_member.blank? !(["na", "verified", "attested", "expired"].include?(v_type.validation_status)) end def can_show_due_date?(person) person.primary_family.contingent_enrolled_active_family_members.flat_map(&:person).flat_map(&:consumer_role).flat_map(&:verification_types).select{|type| VerificationType::DUE_DATE_STATES.include?(type.validation_status)}.any? end def documents_uploaded @person.primary_family.active_family_members.all? { |member| docs_uploaded_for_all_types(member) } end def member_has_uploaded_docs(member) true if member.person.consumer_role.try(:vlp_documents).any? { |doc| doc.identifier } end def member_has_uploaded_paper_applications(member) true if member.person.resident_role.try(:paper_applications).any? { |doc| doc.identifier } end def docs_uploaded_for_all_types(member) member.person.verification_types.all? do |type| member.person.consumer_role.vlp_documents.any?{ |doc| doc.identifier && doc.verification_type == type } end end def documents_count(family) family.family_members.map(&:person).flat_map(&:consumer_role).flat_map(&:vlp_documents).select{|doc| doc.identifier}.count end def get_person_v_type_status(people) v_type_status_list = [] people.each do |person| person.verification_types.each do |v_type| v_type_status_list << verification_type_status(v_type, person) end end v_type_status_list end def show_send_button_for_consumer? current_user.has_consumer_role? && hbx_enrollment_incomplete && documents_uploaded end def hbx_enrollment_incomplete if @person.primary_family.active_household.hbx_enrollments.verification_needed.any? @person.primary_family.active_household.hbx_enrollments.verification_needed.first.review_status == "incomplete" end end #use this method to send docs to review for family member level def all_docs_rejected(person) person.try(:consumer_role).try(:vlp_documents).select{|doc| doc.identifier}.all?{|doc| doc.status == "rejected"} end def no_enrollments @person.primary_family.active_household.hbx_enrollments.empty? end def enrollment_incomplete if @person.primary_family.active_household.hbx_enrollments.verification_needed.any? @person.primary_family.active_household.hbx_enrollments.verification_needed.first.review_status == "incomplete" end end def all_family_members_verified @family_members.all?{|member| member.person.consumer_role.aasm_state == "fully_verified"} end def show_doc_status(status) ["verified", "rejected"].include?(status) end def show_v_type(status, admin = nil) if status == "curam" admin ? "External Source".center(12) : "verified".capitalize.center(12).gsub(' ', '&nbsp;').html_safe elsif status status = "verified" if status == "valid" status.capitalize.center(12).gsub(' ', '&nbsp;').html_safe end end def show_ridp_type(ridp_type, person) case ridp_type_status(ridp_type, person) when 'in review' "&nbsp;&nbsp;&nbsp;In Review&nbsp;&nbsp;&nbsp;".html_safe when 'valid' "&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;Verified&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;".html_safe else "&nbsp;&nbsp;Outstanding&nbsp;&nbsp;".html_safe end end # returns vlp_documents array for verification type def documents_list(person, v_type) person.consumer_role.vlp_documents.select{|doc| doc.identifier && doc.verification_type == v_type } if person.consumer_role end # returns ridp_documents array for ridp verification type def ridp_documents_list(person, ridp_type) person.consumer_role.ridp_documents.select{|doc| doc.identifier && doc.ridp_verification_type == ridp_type } if person.consumer_role end def admin_actions(v_type, f_member) options_for_select(build_admin_actions_list(v_type, f_member)) end def mod_attr(attr, val) attr.to_s + " => " + val.to_s end def ridp_admin_actions(ridp_type, person) options_for_select(build_ridp_admin_actions_list(ridp_type, person)) end def build_admin_actions_list(v_type, f_member) if f_member.consumer_role.aasm_state == 'unverified' ::VlpDocument::ADMIN_VERIFICATION_ACTIONS.reject{ |el| el == 'Call HUB' } elsif verification_type_status(v_type, f_member) == 'outstanding' ::VlpDocument::ADMIN_VERIFICATION_ACTIONS.reject{|el| el == "Reject" } else ::VlpDocument::ADMIN_VERIFICATION_ACTIONS end end def build_reject_reason_list(v_type) case v_type when "Citizenship" ::VlpDocument::CITIZEN_IMMIGR_TYPE_ADD_REASONS + ::VlpDocument::ALL_TYPES_REJECT_REASONS when "Immigration status" ::VlpDocument::CITIZEN_IMMIGR_TYPE_ADD_REASONS + ::VlpDocument::ALL_TYPES_REJECT_REASONS when "Income" #will be implemented later ::VlpDocument::INCOME_TYPE_ADD_REASONS + ::VlpDocument::ALL_TYPES_REJECT_REASONS else ::VlpDocument::ALL_TYPES_REJECT_REASONS end end def build_ridp_admin_actions_list(ridp_type, person) if ridp_type_status(ridp_type, person) == 'outstanding' ::RidpDocument::ADMIN_VERIFICATION_ACTIONS.reject{|el| el == 'Reject'} else ::RidpDocument::ADMIN_VERIFICATION_ACTIONS end end def type_unverified?(v_type, person) !["verified", "valid", "attested"].include?(verification_type_status(v_type, person)) end def request_response_details(person, record, v_type) if record.event_request_record_id v_type == "DC Residency" ? show_residency_request(person, record) : show_ssa_dhs_request(person, record) elsif record.event_response_record_id v_type == "DC Residency" ? show_residency_response(person, record) : show_ssa_dhs_response(person, record) end end def show_residency_request(person, record) raw_request = person.consumer_role.local_residency_requests.select{ |request| request.id == BSON::ObjectId.from_string(record.event_request_record_id) } raw_request.any? ? Nokogiri::XML(raw_request.first.body) : "no request record" end def show_ssa_dhs_request(person, record) requests = person.consumer_role.lawful_presence_determination.ssa_requests + person.consumer_role.lawful_presence_determination.vlp_requests raw_request = requests.select{|request| request.id == BSON::ObjectId.from_string(record.event_request_record_id)} if requests.any? raw_request.any? ? Nokogiri::XML(raw_request.first.body) : "no request record" end def show_residency_response(person, record) raw_response = person.consumer_role.local_residency_responses.select{ |response| response.id == BSON::ObjectId.from_string(record.event_response_record_id) } raw_response.any? ? Nokogiri::XML(raw_response.first.body) : "no response record" end def show_ssa_dhs_response(person, record) responses = person.consumer_role.lawful_presence_determination.ssa_responses + person.consumer_role.lawful_presence_determination.vlp_responses raw_request = responses.select{|response| response.id == BSON::ObjectId.from_string(record.event_response_record_id)} if responses.any? raw_request.any? ? Nokogiri::XML(raw_request.first.body) : "no response record" end def any_members_with_consumer_role?(family_members) family_members.present? && family_members.map(&:person).any?(&:has_active_consumer_role?) end def has_active_resident_members?(family_members) family_members.present? && family_members.map(&:person).any?(&:is_resident_role_active?) end def has_active_consumer_dependent?(person,dependent) person.consumer_role && person.is_consumer_role_active? && (dependent.try(:family_member).try(:person).nil? || dependent.try(:family_member).try(:person).is_consumer_role_active?) end def has_active_resident_dependent?(person,dependent) (dependent.try(:family_member).try(:person).nil? || dependent.try(:family_member).try(:person).is_resident_role_active?) end def ridp_type_unverified?(ridp_type, person) ridp_type_status(ridp_type, person) != 'valid' end end
34.99322
230
0.734573
381034eab384e33c0efcfc41a16cb9b129a4ccdf
918
# # Session implementation for Android # require 'gcm' module Pntfr module Session class Android < Pntfr::Session::Base attr_reader :gcm def initialize notification_key=nil if notification_key.nil? notification_key= Pntfr.config.gcm[:notification_key] end @gcm= ::GCM.new(notification_key) end def notify(push_ids, data) options = {data: data} if Pntfr.test_env? push_ids.each { |push_id| Pntfr.add_delivery(push_id, options) } Session::SuccessResponse.new else rs= @gcm.send_notification(push_ids, options) parse_response rs end end #--------------------------------------------------------- private #--------------------------------------------------------- def parse_response rs Session::GcmResponse.new(rs) end end end end
23.538462
74
0.530501
91efb049652434e6e3b186df2f1a4342285ac526
9,137
module GraphQL class Schema # Used to convert your {GraphQL::Schema} to a GraphQL schema string # # @example print your schema to standard output # MySchema = GraphQL::Schema.define(query: QueryType) # puts GraphQL::Schema::Printer.print_schema(MySchema) # module Printer extend self # Return a GraphQL schema string for the defined types in the schema # @param schema [GraphQL::Schema] def print_schema(schema) print_filtered_schema(schema, lambda { |n| !is_spec_directive(n) }, method(:is_defined_type)) end # Return the GraphQL schema string for the introspection type system def print_introspection_schema query_root = ObjectType.define do name "Root" end schema = GraphQL::Schema.define(query: query_root) print_filtered_schema(schema, method(:is_spec_directive), method(:is_introspection_type)) end private def print_filtered_schema(schema, directive_filter, type_filter) directives = schema.directives.values.select{ |directive| directive_filter.call(directive) } directive_definitions = directives.map{ |directive| print_directive(directive) } types = schema.types.values.select{ |type| type_filter.call(type) }.sort_by(&:name) type_definitions = types.map{ |type| print_type(type) } [print_schema_definition(schema)].compact .concat(directive_definitions) .concat(type_definitions).join("\n\n") end def print_schema_definition(schema) if (schema.query.nil? || schema.query.name == 'Query') && (schema.mutation.nil? || schema.mutation.name == 'Mutation') && (schema.subscription.nil? || schema.subscription.name == 'Subscription') return end operations = [:query, :mutation, :subscription].map do |operation_type| object_type = schema.public_send(operation_type) " #{operation_type}: #{object_type.name}\n" if object_type end.compact.join "schema {\n#{operations}}" end BUILTIN_SCALARS = Set.new(["String", "Boolean", "Int", "Float", "ID"]) private_constant :BUILTIN_SCALARS def is_spec_directive(directive) ['skip', 'include', 'deprecated'].include?(directive.name) end def is_introspection_type(type) type.name.start_with?("__") end def is_defined_type(type) !is_introspection_type(type) && !BUILTIN_SCALARS.include?(type.name) end def print_type(type) TypeKindPrinters::STRATEGIES.fetch(type.kind).print(type) end def print_directive(directive) TypeKindPrinters::DirectivePrinter.print(directive) end module TypeKindPrinters module DeprecatedPrinter def print_deprecated(field_or_enum_value) return unless field_or_enum_value.deprecation_reason case field_or_enum_value.deprecation_reason when nil '' when '', GraphQL::Directive::DEFAULT_DEPRECATION_REASON ' @deprecated' else " @deprecated(reason: #{field_or_enum_value.deprecation_reason.to_s.inspect})" end end end module DescriptionPrinter def print_description(definition, indentation='', first_in_block=true) return '' unless definition.description description = indentation != '' && !first_in_block ? "\n" : "" description << GraphQL::Language::Comments.commentize(definition.description, indent: indentation) end end module ArgsPrinter include DescriptionPrinter def print_args(field, indentation = '') return if field.arguments.empty? field_arguments = field.arguments.values if field_arguments.all?{ |arg| !arg.description } return "(#{field_arguments.map{ |arg| print_input_value(arg) }.join(", ")})" end out = "(\n" out << field_arguments.map.with_index{ |arg, i| "#{print_description(arg, " #{indentation}", i == 0)} #{indentation}"\ "#{print_input_value(arg)}" }.join("\n") out << "\n#{indentation})" end def print_input_value(arg) if arg.default_value.nil? default_string = nil else default_string = " = #{print_value(arg.default_value, arg.type)}" end "#{arg.name}: #{arg.type.to_s}#{default_string}" end def print_value(value, type) case type when FLOAT_TYPE value.to_f.inspect when INT_TYPE value.to_i.inspect when BOOLEAN_TYPE (!!value).inspect when ScalarType, ID_TYPE, STRING_TYPE value.to_s.inspect when EnumType type.coerce_result(value) when InputObjectType fields = value.to_h.map{ |field_name, field_value| field_type = type.input_fields.fetch(field_name.to_s).type "#{field_name}: #{print_value(field_value, field_type)}" }.join(", ") "{#{fields}}" when NonNullType print_value(value, type.of_type) when ListType "[#{value.to_a.map{ |v| print_value(v, type.of_type) }.join(", ")}]" else raise NotImplementedError, "Unexpected value type #{type.inspect}" end end end module FieldPrinter include DeprecatedPrinter include ArgsPrinter include DescriptionPrinter def print_fields(type) type.all_fields.map.with_index{ |field, i| "#{print_description(field, ' ', i == 0)}"\ " #{field.name}#{print_args(field, ' ')}: #{field.type}#{print_deprecated(field)}" }.join("\n") end end class DirectivePrinter extend ArgsPrinter extend DescriptionPrinter def self.print(directive) "#{print_description(directive)}"\ "directive @#{directive.name}#{print_args(directive)} "\ "on #{directive.locations.join(' | ')}" end end class ScalarPrinter extend DescriptionPrinter def self.print(type) "#{print_description(type)}"\ "scalar #{type.name}" end end class ObjectPrinter extend FieldPrinter extend DescriptionPrinter def self.print(type) if type.interfaces.any? implementations = " implements #{type.interfaces.map(&:to_s).join(", ")}" else implementations = nil end "#{print_description(type)}"\ "type #{type.name}#{implementations} {\n"\ "#{print_fields(type)}\n"\ "}" end end class InterfacePrinter extend FieldPrinter extend DescriptionPrinter def self.print(type) "#{print_description(type)}"\ "interface #{type.name} {\n#{print_fields(type)}\n}" end end class UnionPrinter extend DescriptionPrinter def self.print(type) "#{print_description(type)}"\ "union #{type.name} = #{type.possible_types.map(&:to_s).join(" | ")}" end end class EnumPrinter extend DeprecatedPrinter extend DescriptionPrinter def self.print(type) values = type.values.values.map{ |v| " #{v.name}#{print_deprecated(v)}" }.join("\n") values = type.values.values.map.with_index { |v, i| "#{print_description(v, ' ', i == 0)}"\ " #{v.name}#{print_deprecated(v)}" }.join("\n") "#{print_description(type)}"\ "enum #{type.name} {\n#{values}\n}" end end class InputObjectPrinter extend FieldPrinter extend DescriptionPrinter def self.print(type) fields = type.input_fields.values.map.with_index{ |field, i| "#{print_description(field, " ", i == 0)}"\ " #{print_input_value(field)}" }.join("\n") "#{print_description(type)}"\ "input #{type.name} {\n#{fields}\n}" end end STRATEGIES = { GraphQL::TypeKinds::SCALAR => ScalarPrinter, GraphQL::TypeKinds::OBJECT => ObjectPrinter, GraphQL::TypeKinds::INTERFACE => InterfacePrinter, GraphQL::TypeKinds::UNION => UnionPrinter, GraphQL::TypeKinds::ENUM => EnumPrinter, GraphQL::TypeKinds::INPUT_OBJECT => InputObjectPrinter, } end private_constant :TypeKindPrinters end end end
34.609848
110
0.56605
b9b12a05b89331aeadbc7f9d3628f0883ef29183
148
class Test::Unit::TestCase def assert_matcher_accepts(matcher, instance) assert matcher.matches?(instance), matcher.failure_message end end
24.666667
62
0.790541
0125f171ecb6117a2ddc2b7c991afda9f2d3f691
4,834
# frozen_string_literal: true require 'spec_helper' RSpec.describe EventFilter do describe '#filter' do it 'returns "all" if given filter is nil' do expect(described_class.new(nil).filter).to eq(described_class::ALL) end it 'returns "all" if given filter is ""' do expect(described_class.new('').filter).to eq(described_class::ALL) end it 'returns "all" if given filter is "foo"' do expect(described_class.new('foo').filter).to eq('all') end end describe '#apply_filter' do let_it_be(:public_project) { create(:project, :public) } let_it_be(:push_event) { create(:push_event, project: public_project) } let_it_be(:merged_event) { create(:event, :merged, project: public_project, target: public_project) } let_it_be(:created_event) { create(:event, :created, project: public_project, target: create(:issue, project: public_project)) } let_it_be(:updated_event) { create(:event, :updated, project: public_project, target: create(:issue, project: public_project)) } let_it_be(:closed_event) { create(:event, :closed, project: public_project, target: create(:issue, project: public_project)) } let_it_be(:reopened_event) { create(:event, :reopened, project: public_project, target: create(:issue, project: public_project)) } let_it_be(:comments_event) { create(:event, :commented, project: public_project, target: public_project) } let_it_be(:joined_event) { create(:event, :joined, project: public_project, target: public_project) } let_it_be(:left_event) { create(:event, :left, project: public_project, target: public_project) } let_it_be(:wiki_page_event) { create(:wiki_page_event) } let_it_be(:wiki_page_update_event) { create(:wiki_page_event, :updated) } let_it_be(:design_event) { create(:design_event) } let(:filtered_events) { described_class.new(filter).apply_filter(Event.all) } context 'with the "push" filter' do let(:filter) { described_class::PUSH } it 'filters push events only' do expect(filtered_events).to contain_exactly(push_event) end end context 'with the "merged" filter' do let(:filter) { described_class::MERGED } it 'filters merged events only' do expect(filtered_events).to contain_exactly(merged_event) end end context 'with the "issue" filter' do let(:filter) { described_class::ISSUE } it 'filters issue events only' do expect(filtered_events).to contain_exactly(created_event, updated_event, closed_event, reopened_event) end end context 'with the "comments" filter' do let(:filter) { described_class::COMMENTS } it 'filters comment events only' do expect(filtered_events).to contain_exactly(comments_event) end end context 'with the "team" filter' do let(:filter) { described_class::TEAM } it 'filters team events only' do expect(filtered_events).to contain_exactly(joined_event, left_event) end end context 'with the "all" filter' do let(:filter) { described_class::ALL } it 'returns all events' do expect(filtered_events).to eq(Event.all) end end context 'with the "design" filter' do let(:filter) { described_class::DESIGNS } it 'returns only design events' do expect(filtered_events).to contain_exactly(design_event) end context 'the :design_activity_events feature is disabled' do before do stub_feature_flags(design_activity_events: false) end it 'does not return design events' do expect(filtered_events).to match_array(Event.not_design) end end end context 'with the "wiki" filter' do let(:filter) { described_class::WIKI } it 'returns only wiki page events' do expect(filtered_events).to contain_exactly(wiki_page_event, wiki_page_update_event) end end context 'with an unknown filter' do let(:filter) { 'foo' } it 'returns all events' do expect(filtered_events).to eq(Event.all) end end context 'with a nil filter' do let(:filter) { nil } it 'returns all events' do expect(filtered_events).to eq(Event.all) end end end describe '#active?' do let(:event_filter) { described_class.new(described_class::TEAM) } it 'returns false if filter does not include the given key' do expect(event_filter.active?('foo')).to eq(false) end it 'returns false if the given key is nil' do expect(event_filter.active?(nil)).to eq(false) end it 'returns true if filter does not include the given key' do expect(event_filter.active?(described_class::TEAM)).to eq(true) end end end
33.569444
135
0.674597
624abfc64ae55ffc9d5feb684f989e418bdb3d97
1,760
require_relative 'model/person' require_relative 'renderers/renderer' require_relative 'logging' require 'thread' class Pat attr_accessor :svc_mgr, :person_mgr def initialize(svc_mgr, person_mgr) @svc_mgr = svc_mgr @person_mgr = person_mgr end def retrieve_events(selected_persons, selected_services, days) all_events = [] all_events_mutex = Mutex.new threads = [] # For each enabled service configuration selected_services.each do |service_id| thr = Thread.new do svc_instance = svc_mgr.service_instance(service_id) # collect persons with this service enabled enabled_persons = selected_persons.map do |person_id| person = person_mgr.person(person_id) person.service_mappings[service_id].nil? ? nil : person end.compact # For each selected person that has non-nil service mapping enabled_persons.each do |person| # Prepare query query = Query.new(person) query.to = DateTime.now query.from = DateTime.now - days # Fetch data Logging.logger.info("Querying #{service_id} for #{person.id}") events = svc_instance.events(query) all_events_mutex.synchronize do all_events.concat(events) end # mutex end # persons end # thread threads.push thr end # services threads.each do |thr| thr.join end all_events end def render_events(selected_persons, selected_services, days, verbose, group, format) events = retrieve_events(selected_persons, selected_services, days) renderer = Renderer.new(verbose, group, format) renderer.render(events) end end
27.5
86
0.65625
62a782e468232b332d77fa2f856abfd65221d0bb
1,227
module AStar # A heuristic is a lambda that takes two arguments (Vertex, Vertex) and returns # a Double # # this factory contains lambdas used to calculate heuristic. # class HeuristicFactory # Returns a function used to calculate heuristic between two # nodes for a 2D grid. # # Assumption is that `x` and `y` are set with the position # of the vertex within the 2D grid. # # (compatible when using `GraphFactory#two_dimensional_graph`) # def two_dimensional_manhatten_heuristic -> (lhs, rhs) do lhs_x = lhs.x lhs_y = lhs.y rhs_x = rhs.x rhs_y = rhs.y x = [lhs_x, rhs_x].max - [lhs_x, rhs_x].min y = [lhs_y, rhs_y].max - [lhs_y, rhs_y].min (x + y).abs.to_f end end # TODO: # http://theory.stanford.edu/~amitp/GameProgramming/MapRepresentations.html # A piece of information A* needs is travel times between the points. That will be manhattan distance or diagonal grid distance if your units move on a grid, or straight line distance if they can move directly between the navigation points. # http://theory.stanford.edu/~amitp/GameProgramming/Heuristics.html#euclidean-distance end end
34.083333
244
0.673187
bfd16cd2322b71a5590ff17c30b50635ee94493f
185
# frozen_string_literal: true desc 'Get the application set up for development' task bootstrap: :environment do Rake::Task['db:create'].invoke Rake::Task['db:migrate'].invoke end
20.555556
49
0.751351
1d2504a6f2339ec11c9008fb43de34beed6763ad
1,957
# Copyright © 2011 MUSC Foundation for Research Development # All rights reserved. # Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: # 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following # disclaimer in the documentation and/or other materials provided with the distribution. # 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products # derived from this software without specific prior written permission. # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, # BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT # SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR # TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. class CreateHumanSubjectsInfo < ActiveRecord::Migration def change create_table :human_subjects_info do |t| t.integer :protocol_id t.string :hr_number t.string :pro_number t.string :irb_of_record t.string :submission_type t.datetime :irb_approval_date t.datetime :irb_expiration_date t.timestamps end add_index :human_subjects_info, :protocol_id end end
51.5
145
0.784364
18f98b00a302151ddbfeca7f6e8f8eb642d38144
199
# Constants Variables WIN_WIDTH = 640 WIN_HEIGHT = 400 MENU_BUTTON_WIDTH = 130 MENU_BUTTON_HEIGHT = 50 FONT_SMALL = Gosu::Font.new(20) FONT_MEDIUM = Gosu::Font.new(30) FONT_LARGE = Gosu::Font.new(50)
24.875
32
0.768844
1ab5652881e11ec93fc65df699f8a66db557ae0c
6,890
require 'mechanize/test_case' class TestMechanizeParser < Mechanize::TestCase class P include Mechanize::Parser attr_accessor :filename attr_accessor :response attr_accessor :uri def initialize @uri = URI 'http://example' @full_path = false end end def setup super @parser = P.new end def test_extract_filename @parser.response = {} assert_equal 'index.html', @parser.extract_filename end def test_extract_filename_content_disposition @parser.uri = URI 'http://example/foo' @parser.response = { 'content-disposition' => 'attachment; filename=genome.jpeg' } assert_equal 'genome.jpeg', @parser.extract_filename end def test_extract_filename_content_disposition_bad @parser.uri = URI 'http://example/foo' @parser.response = { 'content-disposition' => "inline; filename*=UTF-8''X%20Y.jpg" } assert_equal 'foo.html', @parser.extract_filename @parser.response = { 'content-disposition' => "inline; filename=\"\"" } assert_equal 'foo.html', @parser.extract_filename end def test_extract_filename_content_disposition_path @parser.uri = URI 'http://example' @parser.response = { 'content-disposition' => 'attachment; filename="../genome.jpeg"' } assert_equal 'example/genome.jpeg', @parser.extract_filename(true) @parser.response = { 'content-disposition' => 'attachment; filename="foo/genome.jpeg"' } assert_equal 'example/genome.jpeg', @parser.extract_filename(true) end def test_extract_filename_content_disposition_path_windows @parser.uri = URI 'http://example' @parser.response = { 'content-disposition' => 'attachment; filename="..\\\\genome.jpeg"' } assert_equal 'example/genome.jpeg', @parser.extract_filename(true) @parser.response = { 'content-disposition' => 'attachment; filename="foo\\\\genome.jpeg"' } assert_equal 'example/genome.jpeg', @parser.extract_filename(true) end def test_extract_filename_content_disposition_full_path @parser.uri = URI 'http://example/foo' @parser.response = { 'content-disposition' => 'attachment; filename=genome.jpeg' } assert_equal 'example/genome.jpeg', @parser.extract_filename(true) end def test_extract_filename_content_disposition_quoted @parser.uri = URI 'http://example' @parser.response = { 'content-disposition' => 'attachment; filename="\"some \"file\""' } assert_equal '_some__file_', @parser.extract_filename end def test_extract_filename_content_disposition_special @parser.uri = URI 'http://example/foo' @parser.response = { 'content-disposition' => 'attachment; filename="/\\\\<>:\\"|?*"' } assert_equal '_______', @parser.extract_filename chars = (0..12).map { |c| c.chr }.join chars += "\\\r" chars += (14..31).map { |c| c.chr }.join @parser.response = { 'content-disposition' => "attachment; filename=\"#{chars}\"" } assert_equal '_' * 32, @parser.extract_filename end def test_extract_filename_content_disposition_windows_special @parser.uri = URI 'http://example' windows_special = %w[ AUX COM1 COM2 COM3 COM4 COM5 COM6 COM7 COM8 COM9 CON LPT1 LPT2 LPT3 LPT4 LPT5 LPT6 LPT7 LPT8 LPT9 NUL PRN ] windows_special.each do |special| @parser.response = { 'content-disposition' => "attachment; filename=#{special}" } assert_equal "_#{special}", @parser.extract_filename end end def test_extract_filename_content_disposition_empty @parser.uri = URI 'http://example' @parser.response = { 'content-disposition' => 'inline; filename="/"' } assert_equal '', @parser.extract_filename end def test_extract_filename_host @parser.response = {} @parser.uri = URI 'http://example' assert_equal 'example/index.html', @parser.extract_filename(true) end def test_extract_filename_special_character @parser.response = {} invisible = "\t\n\v\f\r" invisible.chars.each do |char| begin @parser.uri = URI "http://example/#{char}" assert_equal 'index.html', @parser.extract_filename, char.inspect rescue URI::InvalidURIError # ignore end end escaped = "<>\"\\|" escaped.chars.each do |char| escaped_char = CGI.escape char @parser.uri = URI "http://example/#{escaped_char}" assert_equal "#{escaped_char}.html", @parser.extract_filename, char end @parser.uri = URI "http://example/?" assert_equal 'index.html_', @parser.extract_filename, 'empty query' @parser.uri = URI "http://example/:" assert_equal '_.html', @parser.extract_filename, 'colon' @parser.uri = URI "http://example/*" assert_equal '_.html', @parser.extract_filename, 'asterisk' end def test_extract_filename_uri @parser.response = {} @parser.uri = URI 'http://example/foo' assert_equal 'foo.html', @parser.extract_filename @parser.uri += '/foo.jpg' assert_equal 'foo.jpg', @parser.extract_filename end def test_extract_filename_uri_full_path @parser.response = {} @parser.uri = URI 'http://example/foo' assert_equal 'example/foo.html', @parser.extract_filename(true) @parser.uri += '/foo.jpg' assert_equal 'example/foo.jpg', @parser.extract_filename(true) end def test_extract_filename_uri_query @parser.response = {} @parser.uri = URI 'http://example/?id=5' assert_equal 'index.html_id=5', @parser.extract_filename @parser.uri += '/foo.html?id=5' assert_equal 'foo.html_id=5', @parser.extract_filename end def test_extract_filename_uri_slash @parser.response = {} @parser.uri = URI 'http://example/foo/' assert_equal 'example/foo/index.html', @parser.extract_filename(true) @parser.uri += '/foo///' assert_equal 'example/foo/index.html', @parser.extract_filename(true) end def test_extract_filename_windows_special @parser.uri = URI 'http://example' @parser.response = {} windows_special = %w[ AUX COM1 COM2 COM3 COM4 COM5 COM6 COM7 COM8 COM9 CON LPT1 LPT2 LPT3 LPT4 LPT5 LPT6 LPT7 LPT8 LPT9 NUL PRN ] windows_special.each do |special| @parser.uri += "/#{special}" assert_equal "_#{special}.html", @parser.extract_filename end end def test_fill_header @parser.fill_header 'a' => 'b' expected = { 'a' => 'b' } assert_equal expected, @parser.response end def test_fill_header_nil @parser.fill_header nil assert_empty @parser.response end end
21.803797
74
0.642816
ff96d1d91bd382e02b01ca1476c2054312ef4cd4
1,661
require 'spec_helper' describe TemplateFileSerializer do let(:template_model) { Template.new } it 'exposes the attributes to be jsonified' do serialized = described_class.new(template_model).as_json expected_keys = [ :name, :description, :keywords, :type, :documentation, :images ] expect(serialized.keys).to match_array expected_keys end describe '#to_yaml' do let(:template_model) do Template.new( name: 'foo', description: 'bar', keywords: 'fizz, bin', type: 'wordpress', documentation: "This\n\is\nthe\ndocumentation", images: [ Image.new( name: 'abc', source: 'def', type: 'ghi', categories: ['jkl'], expose: [8000], environment: [{ 'variable' => 'mno', 'value' => 'pqr' }], volumes: [{ 'host_path' => '/tmp/foo', 'container_path' => '/tmp/bar' }], volumes_from: [{ 'service' => 'baz' }], deployment: { 'count' => 2 } ) ] ) end it 'yamlizes the model appropriately' do response = described_class.new(template_model).to_yaml expect(response).to eq <<-EXPECTED --- name: foo description: bar keywords: fizz, bin type: wordpress documentation: |- This is the documentation images: - name: abc source: def category: jkl type: ghi expose: - 8000 environment: - variable: mno value: pqr volumes: - host_path: "/tmp/foo" container_path: "/tmp/bar" volumes_from: - service: baz deployment: count: 2 EXPECTED end end end
20.506173
85
0.569536
265e3abf844bf8d0b19f3dc2b6d52a5da6b1849a
2,328
require 'spec_helper' require 'skyed' require 'highline/import' describe 'Skyed::Destroy.execute' do context 'when destroying vagrant machine' do let(:options) { { rds: false } } let(:opsworks) { double('Aws::OpsWorks::Client') } let(:repo_path) { '/home/ifosch/opsworks' } let(:hostname) { 'test-ifosch' } let(:stack_id) { 'e1403a56-286e-4b5e-6798-c3406c947b4a' } let(:instance_id) { '12345678-1234-4321-5678-210987654321' } let(:instance_online) do { instance_id: instance_id, hostname: hostname, stack_id: stack_id, status: 'online' } end let(:instance_shutting) do { instance_id: instance_id, hostname: hostname, stack_id: stack_id, status: 'shutting_down' } end let(:instance_term) do { instance_id: instance_id, hostname: hostname, stack_id: stack_id, status: 'terminated' } end before(:each) do expect(Skyed::Settings) .to receive(:empty?) .and_return(false) expect(Skyed::Settings) .to receive(:repo) .and_return(repo_path) expect(Skyed::Destroy) .to receive(:`) .with("cd #{repo_path} && vagrant ssh -c hostname") .and_return(hostname) expect(Skyed::Destroy) .to receive(:`) .with("cd #{repo_path} && vagrant destroy -f") expect(Skyed::AWS::OpsWorks) .to receive(:login) .and_return(opsworks) expect(Skyed::AWS::OpsWorks) .to receive(:deregister_instance) .with(hostname, opsworks) expect(Skyed::AWS::OpsWorks) .to receive(:delete_user) .with(opsworks) end it 'destroys the vagrant machine' do Skyed::Destroy.execute(nil, options, nil) end end context 'when destroying RDS instance' do let(:options) do { rds: true, final_snapshot_name: '' } end let(:args) { ['my-rds'] } before(:each) do expect(Skyed::Settings) .to receive(:empty?) .and_return(false) expect(Skyed::AWS::RDS) .to receive(:destroy_instance) .with(args[0], options) end it 'destroys the RDS instance' do Skyed::Destroy.execute(nil, options, args) end end end
27.069767
64
0.583333
186f90ac2b94420799436b34af07cf4fb25ba390
579
# This service class is intented to provide callback behaviour to handle # the case where a subscription order cannot be processed because a payment # failed module SolidusSubscriptions class PaymentFailedDispatcher < Dispatcher def dispatch order.touch :completed_at order.cancel! installments.each { |i| i.payment_failed!(order) } super end private def message " The following installments could not be processed due to payment authorization failure: #{installments.map(&:id).join(', ')} " end end end
24.125
75
0.696028
7a427b1b419175a818c4ea3584fba12610747977
1,875
# # Be sure to run `pod lib lint ImageDuplicate.podspec' to ensure this is a # valid spec before submitting. # # Any lines starting with a # are optional, but their use is encouraged # To learn more about a Podspec see https://guides.cocoapods.org/syntax/podspec.html # Pod::Spec.new do |s| s.name = 'ImageDuplicate' s.version = '0.1.5' s.swift_version = '5.0' s.summary = 'Elegant developer tool to find image duplicates' # This description is used to generate tags and improve search results. # * Think: What does it do? Why did you write it? What is the focus? # * Try to keep it short, snappy and to the point. # * Write the description between the DESC delimiters below. # * Finally, don't worry about the indent, CocoaPods strips it! s.description = <<-DESC ImageDuplicateFinder is a developer tool to find duplicate images. Your duplicate images can be images having same data or same images present in different bundles. It will give you paths of that images.This will reduce the app size of the app as well DESC s.homepage = 'https://github.com/arnavgupta180/ImageDuplicate' # s.screenshots = 'www.example.com/screenshots_1', 'www.example.com/screenshots_2' s.license = { :type => 'MIT', :file => 'LICENSE' } s.author = { 'arnavgupta180' => '[email protected]' } s.source = { :git => 'https://github.com/arnavgupta180/ImageDuplicate.git', :tag => s.version.to_s } # s.social_media_url = 'https://twitter.com/<TWITTER_USERNAME>' s.ios.deployment_target = '10.0' s.source_files = 'Source/**/*' # s.resource_bundles = { # 'ImageDuplicate' => ['ImageDuplicate/Assets/*.png'] # } # s.public_header_files = 'Pod/Classes/**/*.h' # s.frameworks = 'UIKit', 'MapKit' # s.dependency 'AFNetworking', '~> 2.3' end
41.666667
252
0.666133
7a72a1390d14c2c71003497d8bbbb61ee9cc2e7d
50
# https://docs.ruby-lang.org/en/3.0.0/String.html
25
49
0.7
b9cf783f37aed78fde3dc051f53c3f5ae0a58aef
1,225
#snippet-sourcedescription:[<<FILENAME>> demonstrates how to ...] #snippet-keyword:[Ruby] #snippet-keyword:[Code Sample] #snippet-keyword:[AWS Identity and Access Management (IAM)] #snippet-service:[iam] #snippet-sourcetype:[<<snippet or full-example>>] #snippet-sourcedate:[] #snippet-sourceauthor:[AWS] # Copyright 2010-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # This file is licensed under the Apache License, Version 2.0 (the "License"). # You may not use this file except in compliance with the License. A copy of the # License is located at # # http://aws.amazon.com/apache2.0/ # # This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS # OF ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. require 'aws-sdk-iam' # v2: require 'aws-sdk' iam = Aws::IAM::Client.new(region: 'us-west-2') begin user = iam.user(user_name: 'my_groovy_user') key_pair = user.create_access_key_pair puts "Access key: #{key_pair.access_key_id}" puts "Secret key: #{key_pair.secret}" rescue Aws::IAM::Errors::NoSuchEntity => ex puts 'User does not exist' end
35
81
0.720816
1c83007353f94694f7dfdf6c638a7cc5b4fc46dd
2,088
require File::expand_path('../../spec_helper', __FILE__) require 'opbeat' describe Opbeat do before do @send = double("send") @event = double("event") allow(Opbeat).to receive(:send) { @send } allow(Opbeat::Event).to receive(:from_message) { @event } allow(Opbeat::Event).to receive(:from_exception) { @event } end it 'capture_message should send result of Event.from_message' do message = "Test message" expect(Opbeat::Event).to receive(:from_message).with(message, an_instance_of(Array), {}) expect(Opbeat).to receive(:send).with(@event) Opbeat.capture_message(message) end it 'capture_message with options should send result of Event.from_message' do message = "Test message" options = {:extra => {:hello => "world"}} expect(Opbeat::Event).to receive(:from_message).with(message, an_instance_of(Array), options) expect(Opbeat).to receive(:send).with(@event) Opbeat.capture_message(message, options) end it 'capture_exception should send result of Event.from_exception' do exception = build_exception() expect(Opbeat::Event).to receive(:from_exception).with(exception, {}) expect(Opbeat).to receive(:send).with(@event) Opbeat.capture_exception(exception) end context "async" do it 'capture_message should send result of Event.from_message' do async = lambda {} message = "Test message" expect(Opbeat::Event).to receive(:from_message).with(message, an_instance_of(Array), {}) expect(Opbeat).to_not receive(:send) expect(async).to receive(:call).with(@event) Opbeat.configuration.async = async Opbeat.capture_message(message) end it 'capture_exception should send result of Event.from_exception' do async = lambda {} exception = build_exception() expect(Opbeat::Event).to receive(:from_exception).with(exception, {}) expect(Opbeat).to_not receive(:send) expect(async).to receive(:call).with(@event) Opbeat.configuration.async = async Opbeat.capture_exception(exception) end end end
32.123077
97
0.694923
6a52e4afb220bcad2ba4703d3b94602ffc085d93
1,047
# frozen_string_literal: true lib = File.expand_path('./lib', __dir__) $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib) require 'tolerance/version' Gem::Specification.new do |s| s.name = 'tolerance' s.version = Tolerance::VERSION s.authors = ['Yoshiyuki Hirano'] s.email = ['[email protected]'] s.homepage = 'https://github.com/yhirano55/tolerance' s.summary = %(Regards all typo as tolerance) s.description = s.summary s.license = 'MIT' s.files = Dir.chdir(File.expand_path('.', __dir__)) do `git ls-files -z`.split("\x0").reject do |f| f.match(%r{^(test|spec|features|images)/}) end end s.bindir = 'exe' s.executables = s.files.grep(%r{^exe/}) { |f| File.basename(f) } s.require_paths = ['lib'] s.required_ruby_version = '>= 2.5.0' s.required_rubygems_version = '>= 1.8.11' s.add_development_dependency 'bundler', '~> 2.0' s.add_development_dependency 'rake', '~> 10.0' s.add_development_dependency 'rspec', '~> 3.0' end
31.727273
68
0.625597
e2eb85b30e4021eb5d5ffe32c647d721791bb171
939
module RockSolid module Mappers class ListItems < RockSolid::Mappers::Base def to_domain list_items.map do |list_item| RockSolid::Domain::ListItem.new(dasherized_attributes(list_item)) end end private def list_items if empty_list? [] else parse_list_items end end def empty_list? parse_list_items.to_s.strip.blank? end def parse_list_items @parsed_list_items ||= @response_object['LiquidateDirect']['GetListItemsByListName']['ListItems']['Item'] end def camel_attribute_keys %w(liid Qty Name Storage ProductCondition Cost MarketplaceID ListName LastApplyTemplateName LastApplyTemplateDate SerialNumberTrackingType WarehouseName ProductData MyStore) end def custom_attributes { 'LIIDSKU' => 'liid_sku' } end end end end
23.475
113
0.636848
011f2125985acce31031e0ca629b4c289d4dbc85
4,357
require 'json' # require 'yelp' require 'http' require 'optparse' # Place holders for Yelp Fusion's OAuth 2.0 credentials. Grab them # from https://www.yelp.com/developers/v3/manage_app CLIENT_ID = "zIReGNNX_JUrNTHXFvmd6A" CLIENT_SECRET = "3z7eEXqt3kFJOBFtXqDLftUZtHeo1WvvDoopMjyEMVDoPWj5nhuyrIF4NynSsaj5" # Constants, do not change these API_HOST = "https://api.yelp.com" SEARCH_PATH = "/v3/businesses/search" BUSINESS_PATH = "/v3/businesses/" # trailing / because we append the business id to the path TOKEN_PATH = "/oauth2/token" GRANT_TYPE = "client_credentials" DEFAULT_BUSINESS_ID = "yelp-san-francisco" DEFAULT_TERM = "dinner" DEFAULT_LOCATION = "San Francisco, CA" SEARCH_LIMIT = 5 # Make a request to the Fusion API token endpoint to get the access token. # # host - the API's host # path - the oauth2 token path # # Examples # # bearer_token # # => "Bearer some_fake_access_token" # # Returns your access token def bearer_token # Put the url together url = "#{API_HOST}#{TOKEN_PATH}" raise "Please set your CLIENT_ID" if CLIENT_ID.nil? raise "Please set your CLIENT_SECRET" if CLIENT_SECRET.nil? # Build our params hash params = { client_id: CLIENT_ID, client_secret: CLIENT_SECRET, grant_type: GRANT_TYPE } response = HTTP.post(url, params: params) parsed = response.parse "#{parsed['token_type']} #{parsed['access_token']}" end # Make a request to the Fusion search endpoint. Full documentation is online at: # https://www.yelp.com/developers/documentation/v3/business_search # # term - search term used to find businesses # location - what geographic location the search should happen # # Examples # # search("burrito", "san francisco") # # => { # "total": 1000000, # "businesses": [ # "name": "El Farolito" # ... # ] # } # # search("sea food", "Seattle") # # => { # "total": 1432, # "businesses": [ # "name": "Taylor Shellfish Farms" # ... # ] # } # # Returns a parsed json object of the request def search(term, location) url = "#{API_HOST}#{SEARCH_PATH}" params = { term: term, location: location, limit: SEARCH_LIMIT } response = HTTP.auth(bearer_token).get(url, params: params) data1=response.parse["businesses"][0]["name"] data2=response.parse["businesses"][1]["name"] data3=response.parse["businesses"][2]["name"] data4=response.parse["businesses"][3]["name"] data5=response.parse["businesses"][4]["name"] data6=response.parse["businesses"][0]["location"] location1=data6["display_address"] data7=response.parse["businesses"][1]["location"] location2=data7["display_address"] data8=response.parse["businesses"][2]["location"] location3=data8["display_address"] data9=response.parse["businesses"][3]["location"] location4=data9["display_address"] data10=response.parse["businesses"][4]["location"] location5=data10["display_address"] data10=response.parse["businesses"][4]["location"] location5=data10["display_address"] data11=response.parse["businesses"][0]["display_phone"] data12=response.parse["businesses"][1]["display_phone"] data13=response.parse["businesses"][2]["display_phone"] data14=response.parse["businesses"][3]["display_phone"] data15=response.parse["businesses"][4]["display_phone"] data21=response.parse["businesses"][0]["rating"] data22=response.parse["businesses"][1]["rating"] data23=response.parse["businesses"][2]["rating"] data24=response.parse["businesses"][3]["rating"] data25=response.parse["businesses"][4]["rating"] data31=response.parse["businesses"][0]["image_url"] data32=response.parse["businesses"][1]["image_url"] data33=response.parse["businesses"][2]["image_url"] data34=response.parse["businesses"][3]["image_url"] data35=response.parse["businesses"][4]["image_url"] [[data1,data21,data11,location1,data31],[data2,data22,data12,location2,data32],[data3,data23,data13,location3,data33],[data4,data24,data14,location4,data34],[data5,data25,data15,location5,data35]] end # def coffee_shop(location) # if location=="los angeles" # puts "Starbucks" # elsif location=="san francisco" # puts "Hiccups" # elsif location=="san jose" # puts "Dunkin Donuts" # else # "City not available" # end # end # # puts coffee_shop(answer)
26.567073
196
0.694056
f8187de7f5d5523c7b0520127424c21fc6bd3fbe
175
class CreateQuestions < ActiveRecord::Migration def change create_table :questions do |t| t.text :question_text t.timestamps null: false end end end
17.5
47
0.691429
e9b7c9445b4deef4a444538687227111c1debbd8
3,787
FactoryGirl.define do factory :chargeback_rate_detail do chargeback_rate detail_currency { FactoryGirl.create(:chargeback_rate_detail_currency) } transient do tiers_params nil end trait :tiers do after(:create) do |chargeback_rate_detail, evaluator| if evaluator.tiers_params evaluator.tiers_params.each do |tier| chargeback_rate_detail.chargeback_tiers << FactoryGirl.create(*[:chargeback_tier, tier]) end else chargeback_rate_detail.chargeback_tiers << FactoryGirl.create(:chargeback_tier) end end end trait :tiers_with_three_intervals do chargeback_tiers do [ FactoryGirl.create(:chargeback_tier_first_of_three), FactoryGirl.create(:chargeback_tier_second_of_three), FactoryGirl.create(:chargeback_tier_third_of_three) ] end end end trait :megabytes do per_unit "megabytes" end trait :kbps do per_unit "kbps" end trait :gigabytes do per_unit "gigabytes" end trait :daily do per_time "daily" end trait :hourly do per_time "hourly" end factory :chargeback_rate_detail_cpu_used, :parent => :chargeback_rate_detail do per_unit "megahertz" chargeable_field { FactoryGirl.build(:chargeable_field_cpu_used) } end factory :chargeback_rate_detail_cpu_cores_used, :parent => :chargeback_rate_detail do per_unit "cores" chargeable_field { FactoryGirl.build(:chargeable_field_cpu_cores_used) } end factory :chargeback_rate_detail_cpu_cores_allocated, :parent => :chargeback_rate_detail do per_unit "cores" chargeable_field { FactoryGirl.build(:chargeable_field_cpu_cores_allocated) } end factory :chargeback_rate_detail_cpu_allocated, :traits => [:daily], :parent => :chargeback_rate_detail do per_unit "cpu" chargeable_field { FactoryGirl.build(:chargeable_field_cpu_allocated) } end factory :chargeback_rate_detail_memory_allocated, :traits => [:megabytes, :daily], :parent => :chargeback_rate_detail do chargeable_field { FactoryGirl.build(:chargeable_field_memory_allocated) } end factory :chargeback_rate_detail_memory_used, :traits => [:megabytes, :hourly], :parent => :chargeback_rate_detail do chargeable_field { FactoryGirl.build(:chargeable_field_memory_used) } end factory :chargeback_rate_detail_disk_io_used, :traits => [:kbps], :parent => :chargeback_rate_detail do chargeable_field { FactoryGirl.build(:chargeable_field_disk_io_used) } end factory :chargeback_rate_detail_net_io_used, :traits => [:kbps], :parent => :chargeback_rate_detail do chargeable_field { FactoryGirl.build(:chargeable_field_net_io_used) } end factory :chargeback_rate_detail_storage_used, :traits => [:gigabytes], :parent => :chargeback_rate_detail do chargeable_field { FactoryGirl.build(:chargeable_field_storage_used) } end factory :chargeback_rate_detail_storage_allocated, :traits => [:gigabytes], :parent => :chargeback_rate_detail do chargeable_field { FactoryGirl.build(:chargeable_field_storage_allocated) } end factory :chargeback_rate_detail_fixed_compute_cost, :traits => [:daily], :parent => :chargeback_rate_detail do chargeable_field { FactoryGirl.build(:chargeable_field_fixed_compute_1) } end factory :chargeback_rate_detail_metering_used, :traits => [:daily], :parent => :chargeback_rate_detail do chargeable_field { FactoryGirl.build(:chargeable_field_metering_used) } end end
34.427273
112
0.695801
79a5f5b9d428f3ffb6092520f6a344e89ab763df
6,263
$:.unshift(File.dirname(__FILE__)) require 'helper' class XmlParamsParsingTest < ActionDispatch::IntegrationTest class TestController < ActionController::Base class << self attr_accessor :last_request_parameters end def parse self.class.last_request_parameters = request.request_parameters head :ok end end def teardown TestController.last_request_parameters = nil end test "parses a strict rack.input" do class Linted undef call if method_defined?(:call) def call(env) bar = env['action_dispatch.request.request_parameters']['foo'] result = "<ok>#{bar}</ok>" [200, {"Content-Type" => "application/xml", "Content-Length" => result.length.to_s}, [result]] end end req = Rack::MockRequest.new(ActionDispatch::XmlParamsParser.new(Linted.new)) resp = req.post('/', "CONTENT_TYPE" => "application/xml", :input => "<foo>bar</foo>", :lint => true) assert_equal "<ok>bar</ok>", resp.body end def assert_parses(expected, xml) with_test_routing do post "/parse", xml, default_headers assert_response :ok assert_equal(expected, TestController.last_request_parameters) end end test "nils are stripped from collections" do assert_parses( {"hash" => { "person" => nil} }, "<hash><person type=\"array\"><person nil=\"true\"/></person></hash>") assert_parses( {"hash" => { "person" => ['foo']} }, "<hash><person type=\"array\"><person>foo</person><person nil=\"true\"/></person>\n</hash>") end test "namespaces are stripped from xml" do with_test_routing do xml = "<ns2:person><name>David:Goliath</name></ns2:person>" post "/parse", xml, default_headers assert_response :ok assert_equal({"person" => {"name" => "David:Goliath"}}, TestController.last_request_parameters) end end test "parses hash params" do with_test_routing do xml = "<person><name>David</name></person>" post "/parse", xml, default_headers assert_response :ok assert_equal({"person" => {"name" => "David"}}, TestController.last_request_parameters) end end test "parses single file" do with_test_routing do xml = "<person><name>David</name><avatar type='file' name='me.jpg' content_type='image/jpg'>#{::Base64.encode64('ABC')}</avatar></person>" post "/parse", xml, default_headers assert_response :ok person = TestController.last_request_parameters assert_equal "image/jpg", person['person']['avatar'].content_type assert_equal "me.jpg", person['person']['avatar'].original_filename assert_equal "ABC", person['person']['avatar'].read end end test "logs error if parsing unsuccessful" do with_test_routing do output = StringIO.new xml = "<person><name>David</name><avatar type='file' name='me.jpg' content_type='image/jpg'>#{::Base64.encode64('ABC')}</avatar></pineapple>" post "/parse", xml, default_headers.merge('action_dispatch.show_exceptions' => true, 'action_dispatch.logger' => ActiveSupport::Logger.new(output)) assert_response :bad_request output.rewind && err = output.read assert err =~ /Error occurred while parsing request parameters/ end end test "occurring a parse error if parsing unsuccessful" do with_test_routing do begin $stderr = StringIO.new # suppress the log xml = "<person><name>David</name></pineapple>" exception = assert_raise(ActionDispatch::ParamsParser::ParseError) { post "/parse", xml, default_headers.merge('action_dispatch.show_exceptions' => false) } assert_equal REXML::ParseException, exception.original_exception.class assert_equal exception.original_exception.message, exception.message ensure $stderr = STDERR end end end test "parses multiple files" do xml = <<-end_body <person> <name>David</name> <avatars> <avatar type='file' name='me.jpg' content_type='image/jpg'>#{::Base64.encode64('ABC')}</avatar> <avatar type='file' name='you.gif' content_type='image/gif'>#{::Base64.encode64('DEF')}</avatar> </avatars> </person> end_body with_test_routing do post "/parse", xml, default_headers assert_response :ok end person = TestController.last_request_parameters assert_equal "image/jpg", person['person']['avatars']['avatar'].first.content_type assert_equal "me.jpg", person['person']['avatars']['avatar'].first.original_filename assert_equal "ABC", person['person']['avatars']['avatar'].first.read assert_equal "image/gif", person['person']['avatars']['avatar'].last.content_type assert_equal "you.gif", person['person']['avatars']['avatar'].last.original_filename assert_equal "DEF", person['person']['avatars']['avatar'].last.read end private def with_test_routing with_routing do |set| set.draw do post ':action', :to => ::XmlParamsParsingTest::TestController end yield end end def default_headers {'CONTENT_TYPE' => 'application/xml'} end end class LegacyXmlParamsParsingTest < XmlParamsParsingTest private def default_headers {'HTTP_X_POST_DATA_FORMAT' => 'xml'} end end class RootLessXmlParamsParsingTest < ActionDispatch::IntegrationTest class TestController < ActionController::Base wrap_parameters :person, :format => :xml class << self attr_accessor :last_request_parameters end def parse self.class.last_request_parameters = request.request_parameters head :ok end end def teardown TestController.last_request_parameters = nil end test "parses hash params" do with_test_routing do xml = "<name>David</name>" post "/parse", xml, {'CONTENT_TYPE' => 'application/xml'} assert_response :ok assert_equal({"name" => "David", "person" => {"name" => "David"}}, TestController.last_request_parameters) end end private def with_test_routing with_routing do |set| set.draw do post ':action', :to => ::RootLessXmlParamsParsingTest::TestController end yield end end end
32.450777
164
0.663101
91861a3feee6c2c147b6a8391adc7dfb59ce8561
142
class AddDropoffLocationToSettings < ActiveRecord::Migration[5.2] def change add_column :settings, :dropoff_location, :string end end
23.666667
65
0.78169
21ff13ac39d0a9b3ce811916e88eabfc44fcba79
1,106
# frozen_string_literal: true require 'bundler/setup' require 'k_usecases' require 'ktg_configuration_management' require 'pry' # Dir.chdir('lib') do # Dir['ktg_configuration_management/document_usecase/*.rb'].sort.each { |file| require file } # end RSpec.configure do |config| # Enable flags like --only-failures and --next-failure config.example_status_persistence_file_path = '.rspec_status' config.filter_run_when_matching :focus # Disable RSpec exposing methods globally on `Module` and `main` config.disable_monkey_patching! config.expect_with :rspec do |c| c.syntax = :expect end # ---------------------------------------------------------------------- # Usecase Documentator # ---------------------------------------------------------------------- KUsecases.configure(config) config.before(:context, :usecases) do @documentor = KUsecases::Documentor.new(self.class) end config.after(:context, :usecases) do @documentor.render end end def fixture_path(gem_name, name) File.join(File.expand_path("../fixtures/#{gem_name}", __FILE__), name) end
26.333333
95
0.644665
1a426ac2eaac3e3767bcb3bfd31e15bf1a50292c
217
module Rails module Commands module Environment extend self def fork Kernel.fork do yield Kernel.exit end Process.waitall end end end end
13.5625
23
0.529954
4a2a5bcd1908d8a35ae84630e4a8edbf22d5fad2
1,264
require 'fog/openstack/volume/models/snapshot' module Fog module OpenStack class Volume class V2 class Snapshot < Fog::OpenStack::Volume::Snapshot identity :id attribute :name attribute :status attribute :description attribute :metadata attribute :force attribute :size def save requires :name data = if id.nil? service.create_snapshot(attributes[:volume_id], name, description, force) else service.update_snapshot(id, attributes.reject { |k, _v| k == :id }) end merge_attributes(data.body['snapshot']) true end def create requires :name # volume_id, name, description, force=false response = service.create_snapshot(attributes[:volume_id], attributes[:name], attributes[:description], attributes[:force]) merge_attributes(response.body['snapshot']) self end end end end end end
28.088889
94
0.488133
38eba7063262087dbc0c9a7f6f2f822ccf321180
948
# frozen_string_literal: true require 'base64' class InvokeBinaryCommand < EvilerWinRM::Command NAME = 'exec' ALIASES = [].freeze def initialize @loaded = false end def call(args) unless @loaded conn.shell.run(File.read(File.expand_path('../../../data/Invoke-Binary.ps1', __dir__))) @loaded = true end if args.empty? EvilerWinRM::LOGGER.error('Please provide an executable to invoke') return end fname = args.shift unless File.exist? fname EvilerWinRM::LOGGER.error('Executable does not exist') return end begin File.new(fname, 'rb').close rescue Erron::EACCES EvilerWinRM::LOGGER.error('Unable to open executable') return end exe64 = Base64.strict_encode64(File.binread(fname)) conn.shell.run('Invoke-Binary', [exe64] + args) do |stdout, stderr| STDOUT.print(stdout) STDERR.print(stderr&.red) end end end
21.545455
93
0.649789
5d531991358499c56feecd357a997b3fd58d11ce
433
# Read about factories at https://github.com/thoughtbot/factory_girl FactoryGirl.define do factory :sponsor do name { Faker::Company.name } website_url { Faker::Internet.url } description { Faker::Lorem.paragraph } sponsorship_level after(:create) do |sponsor| File.open("spec/support/logos/#{1 + rand(13)}.png") do |file| sponsor.picture = file end sponsor.save! end end end
22.789474
68
0.660508
03e05f253f32e2ff795c273794cfc43b62cbfb16
4,411
# encoding: UTF-8 require 'image_optim/cmd' require 'image_optim/configuration_error' require 'image_optim/worker/class_methods' require 'shellwords' require 'English' class ImageOptim # Base class for all workers class Worker extend ClassMethods class << self # Default init for worker is new # Check example of override in gifsicle worker alias_method :init, :new end # Configure (raises on extra options) def initialize(image_optim, options = {}) unless image_optim.is_a?(ImageOptim) fail ArgumentError, 'first parameter should be an ImageOptim instance' end @image_optim = image_optim parse_options(options) assert_no_unknown_options!(options) end # Return hash with worker options def options hash = {} self.class.option_definitions.each do |option| hash[option.name] = send(option.name) end hash end # Optimize image at src, output at dst, must be overriden in subclass # return true on success def optimize(_src, _dst) fail NotImplementedError, "implement method optimize in #{self.class}" end # List of formats which worker can optimize def image_formats format_from_name = self.class.name.downcase[/gif|jpeg|png|svg/] unless format_from_name fail "#{self.class}: can't guess applicable format from worker name" end [format_from_name.to_sym] end # Ordering in list of workers, 0 by default def run_order 0 end # List of bins used by worker def used_bins [self.class.bin_sym] end # Resolve used bins, raise exception concatenating all messages def resolve_used_bins! errors = BinResolver.collect_errors(used_bins) do |bin| @image_optim.resolve_bin!(bin) end return if errors.empty? fail BinResolver::Error, wrap_resolver_error_message(errors.join(', ')) end # Check if operation resulted in optimized file def optimized?(src, dst) dst.size? && dst.size < src.size end # Short inspect def inspect options_string = self.class.option_definitions.map do |option| " @#{option.name}=#{send(option.name).inspect}" end.join(',') "#<#{self.class}#{options_string}>" end private def parse_options(options) self.class.option_definitions.each do |option_definition| value = option_definition.value(self, options) instance_variable_set("@#{option_definition.name}", value) end end def assert_no_unknown_options!(options) known_keys = self.class.option_definitions.map(&:name) unknown_options = options.reject{ |key, _value| known_keys.include?(key) } return if unknown_options.empty? fail ConfigurationError, "unknown options #{unknown_options.inspect} "\ "for #{self}" end # Forward bin resolving to image_optim def resolve_bin!(bin) @image_optim.resolve_bin!(bin) rescue BinResolver::Error => e raise e, wrap_resolver_error_message(e.message), e.backtrace end def wrap_resolver_error_message(message) name = self.class.bin_sym "#{name} worker: #{message}; please provide proper binary or "\ "disable this worker (--no-#{name} argument or "\ "`:#{name} => false` through options)" end # Run command setting priority and hiding output def execute(bin, *arguments) resolve_bin!(bin) cmd_args = [bin, *arguments].map(&:to_s) start = Time.now success = run_command(cmd_args) if @image_optim.verbose seconds = Time.now - start $stderr << "#{success ? '✓' : '✗'} #{seconds}s #{cmd_args.shelljoin}\n" end success end # Run command defining environment, setting nice level, removing output and # reraising signal exception def run_command(cmd_args) args = if RUBY_VERSION < '1.9' || defined?(JRUBY_VERSION) %W[ env PATH=#{@image_optim.env_path.shellescape} nice -n #{@image_optim.nice} #{cmd_args.shelljoin} > /dev/null 2>&1 ].join(' ') else [ {'PATH' => @image_optim.env_path}, %W[nice -n #{@image_optim.nice}], cmd_args, {:out => '/dev/null', :err => '/dev/null'}, ].flatten end Cmd.run(*args) end end end
28.275641
80
0.642485
621832acb9ff7f56d0564f7a462f7de9f5b19d3e
890
module Fastlane module Actions class ImportFromGitAction < Action def self.run(params) # this is implemented in the fast_file.rb end ##################################################### # @!group Documentation ##################################################### def self.description "Import another Fastfile from a remote git repository to use its lanes" end def self.details [ "This is useful if you have shared lanes across multiple apps and you want to store the Fastfile", "in a remote git repository." ].join("\n") end def self.available_options end def self.output [] end def self.authors ["fabiomassimo", "KrauseFx"] end def self.is_supported?(platform) true end end end end
22.25
108
0.506742
6a73d1b051a1713ea5ce472b9693357dd3ab8e08
339
# frozen_string_literal: true module Renalware module LowClearance class MDMPresenter < Renalware::MDMPresenter def low_clearance @low_clearance ||= begin LowClearance.cast_patient(patient).profile&.document || Renalware::LowClearance::ProfileDocument.new end end end end end
22.6
65
0.678466
ab168f6e0e883e36e5eab1bfc34ce4f1e6d83bd7
663
module Phrasing class String attr_reader :value def initialize(value) @value = value end def to_bool if major_version >= 5 ActiveModel::Type::Boolean.new.cast(value) elsif major_version == 4 && minor_version >= 2 ActiveRecord::Type::Boolean.new.type_cast_from_database(value) else ActiveRecord::ConnectionAdapters::Column.value_to_boolean(value) end end private def major_version rails_version.first.to_i end def minor_version rails_version.second.to_i end def rails_version @rails_version ||= ::Rails.version.split('.') end end end
19.5
72
0.650075
624bd1937fed386f61f69025ec29ec95138ec2f2
749
cask 'font-fantasquesansmono-nerd-font' do version '1.0.0' sha256 'c083b7af3f507ef86e19b73fd0bc0d6109fd59bb7d05bea29f9717a3a75a2cf0' url "https://github.com/ryanoasis/nerd-fonts/releases/download/v#{version}/FantasqueSansMono.zip" appcast 'https://github.com/ryanoasis/nerd-fonts/releases.atom', checkpoint: '5677b051ebf92449af603ba6f1837c1bba529a881fd41fe0ec5dea830e1f7867' name 'FantasqueSansMono Nerd Font (FantasqueSansMono)' homepage 'https://github.com/ryanoasis/nerd-fonts' font 'Fantasque Sans Mono Bold Nerd Font Complete.ttf' font 'Fantasque Sans Mono Bold Italic Nerd Font Complete.ttf' font 'Fantasque Sans Mono Regular Nerd Font Complete.ttf' font 'Fantasque Sans Mono Italic Nerd Font Complete.ttf' end
46.8125
99
0.794393
333cd51f439410b6d28d92e619ccd48702e1e367
144
module MailEngine module Sendgrid extend ActiveSupport::Autoload autoload :Base autoload :SmtpApi autoload :RestApi end end
16
34
0.729167
e2d53eec94d130f472c2db040eb599764cf416a7
5,415
RSpec.describe "coronavirus_local_restrictions/results.html" do include CoronavirusLocalRestrictionsHelpers include CoronavirusContentItemHelper describe "current restrictions" do it "rendering tier 4 results for a postcode in tier 4" do render_results_view(local_restriction: { current_alert_level: 4 }) expect(rendered).to match(I18n.t("coronavirus_local_restrictions.results.level_four.heading_pretext")) expect(rendered).to match(I18n.t("coronavirus_local_restrictions.results.level_four.heading_tier_label")) end it "rendering tier 3 results for a postcode in tier 3" do render_results_view(local_restriction: { current_alert_level: 3 }) expect(rendered).to match(I18n.t("coronavirus_local_restrictions.results.level_three.heading_pretext")) expect(rendered).to match(I18n.t("coronavirus_local_restrictions.results.level_three.heading_tier_label")) end it "rendering tier 2 results for a postcode in tier 2" do render_results_view(local_restriction: { current_alert_level: 2 }) expect(rendered).to match(I18n.t("coronavirus_local_restrictions.results.level_two.heading_pretext")) expect(rendered).to match(I18n.t("coronavirus_local_restrictions.results.level_two.heading_tier_label")) end it "rendering tier 1 results for a postcode in tier 1" do render_results_view(local_restriction: { current_alert_level: 1 }) expect(rendered).to match(I18n.t("coronavirus_local_restrictions.results.level_one.heading_pretext")) expect(rendered).to match(I18n.t("coronavirus_local_restrictions.results.level_one.heading_tier_label")) end end describe "devolved nations" do it "rendering results for a Welsh postcode" do render_results_view(local_restriction: { country_name: "Wales" }) expect(rendered).to match(I18n.t("coronavirus_local_restrictions.results.devolved_nations.wales.guidance.label")) end it "rendering results for a Scottish postcode" do render_results_view(local_restriction: { country_name: "Scotland" }) expect(rendered).to match(I18n.t("coronavirus_local_restrictions.results.devolved_nations.scotland.guidance.label")) end it "rendering results for a Northern Irish postcode" do render_results_view(local_restriction: { country_name: "Northern Ireland" }) expect(rendered).to match(I18n.t("coronavirus_local_restrictions.results.devolved_nations.northern_ireland.guidance.label")) end end describe "future restrictions" do before { @area = "Naboo" } it "rendering restrictions changing from level one to level two" do render_results_view(local_restriction: { current_alert_level: 1, future_alert_level: 2, name: @area }) expect(rendered).to match(I18n.t("coronavirus_local_restrictions.results.level_one.changing_alert_level", area: @area)) expect(rendered).to match(I18n.t("coronavirus_local_restrictions.results.future.level_two.alert_level", area: @area)) end it "rendering restrictions changing from level two to level three" do render_results_view(local_restriction: { current_alert_level: 2, future_alert_level: 3, name: @area }) expect(rendered).to match(I18n.t("coronavirus_local_restrictions.results.level_two.changing_alert_level", area: @area)) expect(rendered).to match(I18n.t("coronavirus_local_restrictions.results.future.level_three.alert_level", area: @area)) end it "rendering restrictions changing from level three to level four" do render_results_view(local_restriction: { current_alert_level: 3, future_alert_level: 4, name: @area }) expect(rendered).to match(I18n.t("coronavirus_local_restrictions.results.level_three.changing_alert_level", area: @area)) expect(rendered).to match(I18n.t("coronavirus_local_restrictions.results.future.level_four.alert_level", area: @area)) end end describe "out of date restrictions" do it "rendering an out of date warning on the tier one page" do render_results_view(local_restriction: { current_alert_level: 1 }, out_of_date: true) expect(rendered).to match(I18n.t("coronavirus_local_restrictions.out_of_date_warning")) end it "rendering an out of date warning on the tier two page" do render_results_view(local_restriction: { current_alert_level: 2 }, out_of_date: true) expect(rendered).to match(I18n.t("coronavirus_local_restrictions.out_of_date_warning")) end it "rendering an out of date warning on the tier three page" do render_results_view(local_restriction: { current_alert_level: 3 }, out_of_date: true) expect(rendered).to match(I18n.t("coronavirus_local_restrictions.out_of_date_warning")) end it "rendering an out of date warning on the tier four page" do render_results_view(local_restriction: { current_alert_level: 4 }, out_of_date: true) expect(rendered).to match(I18n.t("coronavirus_local_restrictions.out_of_date_warning")) end end def render_results_view(local_restriction: {}, out_of_date: false) local_restriction = { postcode: "E1 8QS" }.merge(local_restriction) @search = PostcodeLocalRestrictionSearch.new(local_restriction[:postcode]) stub_local_restriction(**local_restriction) allow(view) .to receive(:out_of_date?) .and_return(out_of_date) render template: "coronavirus_local_restrictions/results" end end
46.282051
130
0.76362
acb00ef0673b109555541d7bec13527979d29171
789
# frozen_string_literal: true module BannerMessageSteps step 'I edit the banner message' do @banner_message = Fabricate.build(:banner_message) click_on 'Banner Message' fill_in 'banner_message_title', with: @banner_message.title within '.markdown-field-wrapper__bannermessage__body' do value = @banner_message.body element = find('.CodeMirror', visible: false) execute_script("arguments[0].CodeMirror.getDoc().setValue('#{value}')", element) end first('input[name="commit"]').click end step 'my banner message should be viewable on the homepage' do visit '/' expect(page).to have_content(@banner_message.title) expect(page).to have_content(@banner_message.body) end end RSpec.configure { |c| c.include BannerMessageSteps }
31.56
86
0.724968
79726a03c2919436846ec001197cf64f0df8300a
1,074
class FixReportSettingCols < ActiveRecord::Migration[4.2] def up Report::Report.all.each do |r| # convert values in the report display_type, bar_style, question_labels, and percent_type columns to be underscored values, which work as i18n keys %w(display_type bar_style question_labels percent_type).each do |col| r.send("#{col}=", r.send(col).underscore.gsub(" ", "_")) unless r.send(col).nil? end # fix any 'undefined' values in the bar_style col r.bar_style = "side_by_side" if r.bar_style == "undefined" # change any blank percent type to 'none' r.percent_type = "none" if r.percent_type.blank? r.save(:validate => false) end # fix default values change_column :report_reports, :display_type, :string, :default => "table" change_column :report_reports, :bar_style, :string, :default => "side_by_side" change_column :report_reports, :question_labels, :string, :default => "code" change_column :report_reports, :percent_type, :string, :default => "none" end def down end end
38.357143
153
0.689944
08062b03d5b625f0f7bd14e20e373fbb2830636e
1,238
class RemoveDesolateInstances < ActiveRecord::Migration def up # Rest all column information in case someone runs all migrations at once Character.reset_column_information Feat.reset_column_information Item.reset_column_information Log.reset_column_information PowerAttribute.reset_column_information PowerWeapon.reset_column_information Power.reset_column_information RitualAttribute.reset_column_information Ritual.reset_column_information Skill.reset_column_information User.reset_column_information # Start deleting deslote records Character.where("user_id NOT IN (SELECT DISTINCT(id) FROM users)").destroy_all Character.where("user_id IS NULL").destroy_all models = [ Feat, Item, Log, Power, Ritual, Skill ] models.each do |m| m.where("character_id NOT IN (SELECT DISTINCT(id) FROM characters)").delete_all end PowerAttribute.where("power_id NOT IN (SELECT DISTINCT(id) FROM powers)"). delete_all PowerWeapon.where("power_id NOT IN (SELECT DISTINCT(id) FROM powers)"). delete_all RitualAttribute.where("ritual_id NOT IN (SELECT DISTINCT(id) FROM rituals)"). delete_all end end
30.195122
85
0.732633
620b3ec8b35860aee8eed3e940583fc7dd4db5aa
587
require 'rubygems' require 'sinatra' require 'sinatra/reloader' require 'sinatra/activerecord' set :database, 'sqlite3:timetable.db' class Client < ActiveRecord::Base validates :name, presence: true validates :role, presence: true end class Week < ActiveRecord::Base end before do @clients = Client.all @c = Client.new end get "/client/:id/edit" do erb "edit" end get "/client/:id/remove" do Client.destroy params[:id] erb :index end get '/' do erb :index end post '/' do @c = Client.new params[:client] if [email protected] @error = "Error" end erb :index end
14.675
37
0.688245
f87cc4bdadee460444d01789023a0ad432ded244
257
class ApplicationController < ActionController::Base # Prevent CSRF attacks by raising an exception. # For APIs, you may want to use :null_session instead. protect_from_forgery with: :exception include SessionsHelper before_action :logged_in? end
32.125
56
0.793774
397d5efbac667985f7f44854e8f82929ded55292
349
class FontBentham < Cask version '002.001' sha256 '40e607c31bbd253acc04a7f0772d2a4c1cbd693771f5c3585ce255522057a524' url 'https://googlefontdirectory.googlecode.com/hg-history/67342bc472599b4c32201ee4a002fe59a6447a42/ofl/bentham/Bentham-Regular.ttf' homepage 'http://www.google.com/fonts/specimen/Bentham' font 'Bentham-Regular.ttf' end
34.9
134
0.82235
bb9971a3d614761282f8426a9ecfce2d59d0ce67
547
# frozen_string_literal: true # Selects a story from given request class LoadStoryService attr_reader :story def initialize(request) @request = request set_story end def update(user) save_game = user.save_games.find_or_create_by story: story user.update active_game: save_game end def found? story.present? end private def set_story return @story = Story.all.sample if title == 'random story' @story = Story.find_by title: title end def title @title ||= @request.slot :story end end
16.575758
63
0.700183
6264e8b3efd94486b1db584fb2f46f24c7b12179
4,906
require File.dirname(__FILE__) + '/test_helper' class Tree def self.human_name; 'Tree'; end end class TreesController < InheritedResources::Base has_scope :color, :unless => :show_all_colors? has_scope :only_tall, :boolean => true, :only => :index, :if => :restrict_to_only_tall_trees? has_scope :shadown_range, :default => 10, :except => [ :index, :show, :destroy, :new ] has_scope :root_type, :as => :root has_scope :calculate_height, :default => proc {|c| c.session[:height] || 20 }, :only => :new protected def restrict_to_only_tall_trees? true end def show_all_colors? false end end class HasScopeTest < ActionController::TestCase tests TreesController def setup @controller.stubs(:resource_url).returns('/') @controller.stubs(:collection_url).returns('/') end def test_boolean_scope_is_called_when_boolean_param_is_true Tree.expects(:only_tall).with().returns(Tree).in_sequence Tree.expects(:find).with(:all).returns([mock_tree]).in_sequence get :index, :only_tall => 'true' assert_equal([mock_tree], assigns(:trees)) assert_equal({ :only_tall => 'true' }, assigns(:current_scopes)) end def test_boolean_scope_is_called_when_boolean_param_is_false Tree.expects(:only_tall).never Tree.expects(:find).with(:all).returns([mock_tree]) get :index, :only_tall => 'false' assert_equal([mock_tree], assigns(:trees)) assert_equal({ :only_tall => 'false' }, assigns(:current_scopes)) end def test_scope_is_called_only_on_index Tree.expects(:only_tall).never Tree.expects(:find).with('42').returns(mock_tree) get :show, :only_tall => 'true', :id => '42' assert_equal(mock_tree, assigns(:tree)) assert_equal({ }, assigns(:current_scopes)) end def test_scope_is_skipped_when_if_option_is_false @controller.stubs(:restrict_to_only_tall_trees?).returns(false) Tree.expects(:only_tall).never Tree.expects(:find).with(:all).returns([mock_tree]) get :index, :only_tall => 'true' assert_equal([mock_tree], assigns(:trees)) assert_equal({ }, assigns(:current_scopes)) end def test_scope_is_skipped_when_unless_option_is_true @controller.stubs(:show_all_colors?).returns(true) Tree.expects(:color).never Tree.expects(:find).with(:all).returns([mock_tree]) get :index, :color => 'blue' assert_equal([mock_tree], assigns(:trees)) assert_equal({ }, assigns(:current_scopes)) end def test_scope_is_called_except_on_index Tree.expects(:shadown_range).with().never Tree.expects(:find).with(:all).returns([mock_tree]) get :index, :shadown_range => 20 assert_equal([mock_tree], assigns(:trees)) assert_equal({ }, assigns(:current_scopes)) end def test_scope_is_called_with_arguments Tree.expects(:color).with('blue').returns(Tree).in_sequence Tree.expects(:find).with(:all).returns([mock_tree]).in_sequence get :index, :color => 'blue' assert_equal([mock_tree], assigns(:trees)) assert_equal({ :color => 'blue' }, assigns(:current_scopes)) end def test_multiple_scopes_are_called Tree.expects(:only_tall).with().returns(Tree) Tree.expects(:color).with('blue').returns(Tree) Tree.expects(:find).with(:all).returns([mock_tree]) get :index, :color => 'blue', :only_tall => 'true' assert_equal([mock_tree], assigns(:trees)) assert_equal({ :color => 'blue', :only_tall => 'true' }, assigns(:current_scopes)) end def test_scope_is_called_with_default_value Tree.expects(:shadown_range).with(10).returns(Tree).in_sequence Tree.expects(:find).with('42').returns(mock_tree).in_sequence get :edit, :id => '42' assert_equal(mock_tree, assigns(:tree)) assert_equal({ :shadown_range => 10 }, assigns(:current_scopes)) end def test_default_scope_value_can_be_overwritten Tree.expects(:shadown_range).with('20').returns(Tree).in_sequence Tree.expects(:find).with('42').returns(mock_tree).in_sequence get :edit, :id => '42', :shadown_range => '20' assert_equal(mock_tree, assigns(:tree)) assert_equal({ :shadown_range => '20' }, assigns(:current_scopes)) end def test_scope_with_different_key Tree.expects(:root_type).with('outside').returns(Tree).in_sequence Tree.expects(:find).with('42').returns(mock_tree).in_sequence get :show, :id => '42', :root => 'outside' assert_equal(mock_tree, assigns(:tree)) assert_equal({ :root => 'outside' }, assigns(:current_scopes)) end def test_scope_with_default_value_as_proc session[:height] = 100 Tree.expects(:calculate_height).with(100).returns(Tree).in_sequence Tree.expects(:new).returns(mock_tree).in_sequence get :new assert_equal(mock_tree, assigns(:tree)) assert_equal({ :calculate_height => 100 }, assigns(:current_scopes)) end protected def mock_tree(stubs={}) @mock_tree ||= mock(stubs) end end
35.042857
95
0.704036
085c50cd6486dcc6a37b322630ffcde67bcea149
1,874
Rails.application.routes.draw do devise_for :users root to: "home#index" get 'home/index' get 'terms' => 'pages#terms' get 'about' => 'pages#about' resource :contacts, only: [:new, :create], path_names: {:new => '' } resources :articles do resources :comments, only: :create end # The priority is based upon order of creation: first created -> highest priority. # See how all your routes lay out with "rake routes". # You can have the root of your site routed with "root" # root 'welcome#index' # Example of regular route: # get 'products/:id' => 'catalog#view' # Example of named route that can be invoked with purchase_url(id: product.id) # get 'products/:id/purchase' => 'catalog#purchase', as: :purchase # Example resource route (maps HTTP verbs to controller actions automatically): # resources :products # Example resource route with options: # resources :products do # member do # get 'short' # post 'toggle' # end # # collection do # get 'sold' # end # end # Example resource route with sub-resources: # resources :products do # resources :comments, :sales # resource :seller # end # Example resource route with more complex sub-resources: # resources :products do # resources :comments # resources :sales do # get 'recent', on: :collection # end # end # Example resource route with concerns: # concern :toggleable do # post 'toggle' # end # resources :posts, concerns: :toggleable # resources :photos, concerns: :toggleable # Example resource route within a namespace: # namespace :admin do # # Directs /admin/products/* to Admin::ProductsController # # (app/controllers/admin/products_controller.rb) # resources :products # end end
26.394366
84
0.64301
61b271ddd7c3f58adeca329b90c53a284b07e6e2
1,448
# frozen_string_literal: true #-- # Copyright (c) 2014-2021 David Heinemeier Hansson # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. #++ require "active_support" require "active_support/rails" require "active_job/version" require "global_id" module ActiveJob extend ActiveSupport::Autoload autoload :Base autoload :QueueAdapters autoload :Serializers autoload :ConfiguredJob autoload :TestCase autoload :TestHelper end
35.317073
72
0.780387
d54898c111bbacd8f517c3f94bc77489e988c993
573
# frozen_string_literal: true # == Schema Information # # Table name: interviews # # id :bigint not null, primary key # time :datetime # priority :string(10) # job_application_id :integer # created_at :datetime not null # updated_at :datetime not null # location :string # comment :text # FactoryBot.define do factory :interview do time { rand(1..10).days.from_now } priority job_application location { 'Location' } comment { 'Comment' } end end
22.92
61
0.579407
031892ab5a6d823ccd8c99e725b1445509938b1a
78
# frozen_string_literal: true module GraphqlGraceful VERSION = "0.1.0" end
13
29
0.75641
3847eac462a3fc1048b413d49862ab52983d0e5c
180
class CreateJsons < ActiveRecord::Migration def change create_table :jsons do |t| t.string :type t.string :data t.timestamps null: false end end end
16.363636
43
0.65
ffdafc7e84f85a7b9e578b981d5fbe1d989c5c13
1,231
# frozen_string_literal: true # WARNING ABOUT GENERATED CODE # # This file is generated. See the contributing guide for more information: # https://github.com/aws/aws-sdk-ruby/blob/version-3/CONTRIBUTING.md # # WARNING ABOUT GENERATED CODE Gem::Specification.new do |spec| spec.name = 'aws-sdk-apprunner' spec.version = File.read(File.expand_path('../VERSION', __FILE__)).strip spec.summary = 'AWS SDK for Ruby - AWS App Runner' spec.description = 'Official AWS Ruby gem for AWS App Runner. This gem is part of the AWS SDK for Ruby.' spec.author = 'Amazon Web Services' spec.homepage = 'https://github.com/aws/aws-sdk-ruby' spec.license = 'Apache-2.0' spec.email = ['[email protected]'] spec.require_paths = ['lib'] spec.files = Dir['LICENSE.txt', 'CHANGELOG.md', 'VERSION', 'lib/**/*.rb'] spec.metadata = { 'source_code_uri' => 'https://github.com/aws/aws-sdk-ruby/tree/version-3/gems/aws-sdk-apprunner', 'changelog_uri' => 'https://github.com/aws/aws-sdk-ruby/tree/version-3/gems/aws-sdk-apprunner/CHANGELOG.md' } spec.add_dependency('aws-sdk-core', '~> 3', '>= 3.119.0') spec.add_dependency('aws-sigv4', '~> 1.1') end
38.46875
113
0.6645
7976480ef06b3f3b5e0899b85fdbf95482b969f0
442
class ConditionIsStorageland < ConditionNickname def names [ "calciform pools", "crucible of the spirit dragon", "dreadship reef", "fountain of cho", "fungal reaches", "mage-ring network", "mercadian bazaar", "molten slagheap", "rushwood grove", "saltcrusted steppe", "saprazzan cove", "subterranean hangar", ] end def to_s "is:storageland" end end
19.217391
48
0.597285
4a0a2d5a999c52ecf167ce0aacd00086d848d2c8
130
module EnvLint class Variable < Struct.new(:name, :value, :optional, :comment) alias_method :optional?, :optional end end
21.666667
65
0.715385
bbccc99c43e1851edb82d69bbd2d9d5eaa8d7f28
53
# no longer required, all methods moved to install.rb
53
53
0.792453
873eef09ffa352083c3a1ba737bafaec698d84f1
2,915
require "spec_helper" require "teaspoon/environment" describe Teaspoon::Environment do subject { Teaspoon::Environment } describe ".load" do it "calls require_environment if Rails isn't available" do subject.should_receive(:rails_loaded?).and_return(false) subject.should_receive(:require_environment) subject.should_receive(:rails_loaded?).and_return(true) Teaspoon::Environment.load end it "raises if Rails can't be found" do subject.should_receive(:rails_loaded?).twice.and_return(false) subject.should_receive(:require_environment) expect{ Teaspoon::Environment.load }.to raise_error("Rails environment not found.") end it "calls configure_from_options if the environment is ready" do subject.should_receive(:rails_loaded?).and_return(true) subject.should_receive(:configure_from_options) Teaspoon::Environment.load end end describe ".require_environment" do it "allows passing an override" do subject.should_receive(:require_env).with(File.expand_path("override", Dir.pwd)) subject.require_environment("override") end it "looks for the standard files" do subject.stub(:require_env) File.should_receive(:exists?).with(File.expand_path("spec/teaspoon_env.rb", Dir.pwd)).and_return(true) subject.should_receive(:require_env).with(File.expand_path("spec/teaspoon_env.rb", Dir.pwd)) subject.require_environment File.should_receive(:exists?).with(File.expand_path("spec/teaspoon_env.rb", Dir.pwd)).and_return(false) File.should_receive(:exists?).with(File.expand_path("test/teaspoon_env.rb", Dir.pwd)).and_return(true) subject.should_receive(:require_env).with(File.expand_path("test/teaspoon_env.rb", Dir.pwd)) subject.require_environment File.should_receive(:exists?).with(File.expand_path("spec/teaspoon_env.rb", Dir.pwd)).and_return(false) File.should_receive(:exists?).with(File.expand_path("test/teaspoon_env.rb", Dir.pwd)).and_return(false) File.should_receive(:exists?).with(File.expand_path("teaspoon_env.rb", Dir.pwd)).and_return(true) subject.should_receive(:require_env).with(File.expand_path("teaspoon_env.rb", Dir.pwd)) subject.require_environment end it "raises if no env file was found" do File.stub(:exists?) expect{ subject.require_environment }.to raise_error(Teaspoon::EnvironmentNotFound) end end describe ".standard_environments" do it "returns an array" do expect(subject.standard_environments).to eql(["spec/teaspoon_env.rb", "test/teaspoon_env.rb", "teaspoon_env.rb"]) end end describe ".configure_from_options" do it "allows overriding configuration directives from options" do Teaspoon.configuration.should_receive(:color=).with(false) Teaspoon::Environment.configure_from_options(color: false) end end end
36.4375
119
0.734134
62abc57e7ec1df72cdcc58919b7f79747df5dfa4
7,619
# Generated by the protocol buffer compiler. DO NOT EDIT! # Source: google/cloud/automl/v1/service.proto for package 'Google.Cloud.AutoML.V1' # Original file comments: # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # require 'grpc' require 'google/cloud/automl/v1/service_pb' module Google module Cloud module AutoML module V1 module AutoMl # AutoML Server API. # # The resource names are assigned by the server. # The server never reuses names that it has created after the resources with # those names are deleted. # # An ID of a resource is the last element of the item's resource name. For # `projects/{project_id}/locations/{location_id}/datasets/{dataset_id}`, then # the id for the item is `{dataset_id}`. # # Currently the only supported `location_id` is "us-central1". # # On any input that is documented to expect a string parameter in # snake_case or kebab-case, either of those cases is accepted. class Service include GRPC::GenericService self.marshal_class_method = :encode self.unmarshal_class_method = :decode self.service_name = 'google.cloud.automl.v1.AutoMl' # Creates a dataset. rpc :CreateDataset, ::Google::Cloud::AutoML::V1::CreateDatasetRequest, ::Google::Longrunning::Operation # Gets a dataset. rpc :GetDataset, ::Google::Cloud::AutoML::V1::GetDatasetRequest, ::Google::Cloud::AutoML::V1::Dataset # Lists datasets in a project. rpc :ListDatasets, ::Google::Cloud::AutoML::V1::ListDatasetsRequest, ::Google::Cloud::AutoML::V1::ListDatasetsResponse # Updates a dataset. rpc :UpdateDataset, ::Google::Cloud::AutoML::V1::UpdateDatasetRequest, ::Google::Cloud::AutoML::V1::Dataset # Deletes a dataset and all of its contents. # Returns empty response in the # [response][google.longrunning.Operation.response] field when it completes, # and `delete_details` in the # [metadata][google.longrunning.Operation.metadata] field. rpc :DeleteDataset, ::Google::Cloud::AutoML::V1::DeleteDatasetRequest, ::Google::Longrunning::Operation # Imports data into a dataset. # For Tables this method can only be called on an empty Dataset. # # For Tables: # * A # [schema_inference_version][google.cloud.automl.v1.InputConfig.params] # parameter must be explicitly set. # Returns an empty response in the # [response][google.longrunning.Operation.response] field when it completes. rpc :ImportData, ::Google::Cloud::AutoML::V1::ImportDataRequest, ::Google::Longrunning::Operation # Exports dataset's data to the provided output location. # Returns an empty response in the # [response][google.longrunning.Operation.response] field when it completes. rpc :ExportData, ::Google::Cloud::AutoML::V1::ExportDataRequest, ::Google::Longrunning::Operation # Gets an annotation spec. rpc :GetAnnotationSpec, ::Google::Cloud::AutoML::V1::GetAnnotationSpecRequest, ::Google::Cloud::AutoML::V1::AnnotationSpec # Creates a model. # Returns a Model in the [response][google.longrunning.Operation.response] # field when it completes. # When you create a model, several model evaluations are created for it: # a global evaluation, and one evaluation for each annotation spec. rpc :CreateModel, ::Google::Cloud::AutoML::V1::CreateModelRequest, ::Google::Longrunning::Operation # Gets a model. rpc :GetModel, ::Google::Cloud::AutoML::V1::GetModelRequest, ::Google::Cloud::AutoML::V1::Model # Lists models. rpc :ListModels, ::Google::Cloud::AutoML::V1::ListModelsRequest, ::Google::Cloud::AutoML::V1::ListModelsResponse # Deletes a model. # Returns `google.protobuf.Empty` in the # [response][google.longrunning.Operation.response] field when it completes, # and `delete_details` in the # [metadata][google.longrunning.Operation.metadata] field. rpc :DeleteModel, ::Google::Cloud::AutoML::V1::DeleteModelRequest, ::Google::Longrunning::Operation # Updates a model. rpc :UpdateModel, ::Google::Cloud::AutoML::V1::UpdateModelRequest, ::Google::Cloud::AutoML::V1::Model # Deploys a model. If a model is already deployed, deploying it with the # same parameters has no effect. Deploying with different parametrs # (as e.g. changing # # [node_number][google.cloud.automl.v1p1beta.ImageObjectDetectionModelDeploymentMetadata.node_number]) # will reset the deployment state without pausing the model's availability. # # Only applicable for Text Classification, Image Object Detection , Tables, and Image Segmentation; all other domains manage # deployment automatically. # # Returns an empty response in the # [response][google.longrunning.Operation.response] field when it completes. rpc :DeployModel, ::Google::Cloud::AutoML::V1::DeployModelRequest, ::Google::Longrunning::Operation # Undeploys a model. If the model is not deployed this method has no effect. # # Only applicable for Text Classification, Image Object Detection and Tables; # all other domains manage deployment automatically. # # Returns an empty response in the # [response][google.longrunning.Operation.response] field when it completes. rpc :UndeployModel, ::Google::Cloud::AutoML::V1::UndeployModelRequest, ::Google::Longrunning::Operation # Exports a trained, "export-able", model to a user specified Google Cloud # Storage location. A model is considered export-able if and only if it has # an export format defined for it in # [ModelExportOutputConfig][google.cloud.automl.v1.ModelExportOutputConfig]. # # Returns an empty response in the # [response][google.longrunning.Operation.response] field when it completes. rpc :ExportModel, ::Google::Cloud::AutoML::V1::ExportModelRequest, ::Google::Longrunning::Operation # Gets a model evaluation. rpc :GetModelEvaluation, ::Google::Cloud::AutoML::V1::GetModelEvaluationRequest, ::Google::Cloud::AutoML::V1::ModelEvaluation # Lists model evaluations. rpc :ListModelEvaluations, ::Google::Cloud::AutoML::V1::ListModelEvaluationsRequest, ::Google::Cloud::AutoML::V1::ListModelEvaluationsResponse end Stub = Service.rpc_stub_class end end end end end
55.210145
154
0.648248
eda7d65c82b50b1dd407d044910accbd31bdad23
1,109
=begin #Selling Partner API for Merchant Fulfillment #The Selling Partner API for Merchant Fulfillment helps you build applications that let sellers purchase shipping for non-Prime and Prime orders using Amazon’s Buy Shipping Services. OpenAPI spec version: v0 Generated by: https://github.com/swagger-api/swagger-codegen.git Swagger Codegen version: 3.0.33 =end require 'spec_helper' require 'json' require 'date' # Unit tests for AmzSpApi::MerchantFulfillmentV0::AvailableFormatOptionsForLabel # Automatically generated by swagger-codegen (github.com/swagger-api/swagger-codegen) # Please update as you see appropriate describe 'AvailableFormatOptionsForLabel' do before do # run before each test @instance = AmzSpApi::MerchantFulfillmentV0::AvailableFormatOptionsForLabel.new end after do # run after each test end describe 'test an instance of AvailableFormatOptionsForLabel' do it 'should create an instance of AvailableFormatOptionsForLabel' do expect(@instance).to be_instance_of(AmzSpApi::MerchantFulfillmentV0::AvailableFormatOptionsForLabel) end end end
31.685714
182
0.798918
873d7d6d09903b632685bf27a05ee630d5a92f51
2,683
# This file is auto-generated from the current state of the database. Instead # of editing this file, please use the migrations feature of Active Record to # incrementally modify your database, and then regenerate this schema definition. # # Note that this schema.rb definition is the authoritative source for your # database schema. If you need to create the application database on another # system, you should be using db:schema:load, not running all the migrations # from scratch. The latter is a flawed and unsustainable approach (the more migrations # you'll amass, the slower it'll run and the greater likelihood for issues). # # It's strongly recommended that you check this file into your version control system. ActiveRecord::Schema.define(version: 20181010122808) do create_table "messages", force: :cascade do |t| t.integer "from_id" t.integer "to_id" t.string "content" t.datetime "created_at", null: false t.datetime "updated_at", null: false t.index ["from_id", "created_at"], name: "index_messages_on_from_id_and_created_at" t.index ["from_id"], name: "index_messages_on_from_id" t.index ["to_id", "created_at"], name: "index_messages_on_to_id_and_created_at" t.index ["to_id"], name: "index_messages_on_to_id" end create_table "microposts", force: :cascade do |t| t.text "content" t.integer "user_id" t.datetime "created_at", null: false t.datetime "updated_at", null: false t.string "picture" t.string "in_reply_to" t.index ["user_id", "created_at"], name: "index_microposts_on_user_id_and_created_at" t.index ["user_id"], name: "index_microposts_on_user_id" end create_table "relationships", force: :cascade do |t| t.integer "follower_id" t.integer "followed_id" t.datetime "created_at", null: false t.datetime "updated_at", null: false t.index ["followed_id"], name: "index_relationships_on_followed_id" t.index ["follower_id", "followed_id"], name: "index_relationships_on_follower_id_and_followed_id", unique: true t.index ["follower_id"], name: "index_relationships_on_follower_id" end create_table "users", force: :cascade do |t| t.string "name" t.string "email" t.datetime "created_at", null: false t.datetime "updated_at", null: false t.string "password_digest" t.string "remember_digest" t.boolean "admin", default: false t.string "activation_digest" t.boolean "activated", default: false t.datetime "activated_at" t.string "reset_digest" t.datetime "reset_sent_at" t.index ["email"], name: "index_users_on_email", unique: true t.index ["name"], name: "index_users_on_name" end end
40.651515
116
0.727544
0358a3251cb7ca6971c305c9dd41dc15f64410cb
272
require_relative 'plan' require_relative 'input' require_relative 'worker_group' plan = Plan.new(INPUT) workers = WorkerGroup.new(5, plan: plan) while (plan.more?) do workers.work_on plan.next_steps end puts "The total time to complete was #{workers.time_completed}"
20.923077
63
0.775735
79269dc7cbf34c846d50d0da24c6007439348cbd
26,046
# frozen_string_literal: true # rubocop:disable Lint/RedundantCopDisableDirective, Layout/LineLength, Layout/HeredocIndentation module Engine module Config module Game module G18CO JSON = <<-'DATA' { "filename": "18_co", "modulename": "18CO", "currencyFormatStr": "$%d", "bankCash": 10000, "certLimit": { "3": 19, "4": 14, "5": 12, "6": 10 }, "startingCash": { "3": 500, "4": 375, "5": 300, "6": 250 }, "capitalization": "incremental", "layout": "pointy", "axes": { "rows": "numbers", "columns": "letters" }, "mustSellInBlocks": false, "locationNames": { "A11": "Laramie, WY", "A17": "Cheyenne, WY", "B10": "Walden", "B22": "Sterling", "B26": "Lincoln, NE (SLC +100)", "C7": "Craig", "C9": "Steamboat Springs", "C15": "Fort Collins", "C17": "Greeley", "C21": "Fort Morgan", "D4": "Meeker", "D14": "Boulder", "D24": "Wray", "E1": "Salt Lake City, UT", "E5": "Rifle", "E7": "Glenwood Springs", "E11": "Dillon", "E15": "Denver", "E27": "Kansas City, KS (SLC +100)", "F8": "Aspen", "F12": "South Park", "F20": "Limon", "F24": "Burlington", "G3": "Grand Junction", "G17": "Colorado Springs", "G27": "Kansas City, KS (SLC +100)", "H6": "Montrose", "H8": "Gunnison", "H12": "Salida", "H14": "Canon City", "I17": "Pueblo", "I21": "La Junta", "I23": "Lamar", "J6": "Silverton", "J26": "Wichita, KS (SLC +100)", "K5": "Durango", "K13": "Alamosa", "K17": "Trinidad", "L2": "Farmington, NM", "L14": "Santa Fe, NM", "L20": "Fort Worth, TX" }, "tiles": { "3": 6, "4": 6, "5": 3, "6": 6, "7": 15, "8": 25, "9": 25, "57": 6, "58": 6, "co1": { "count": 1, "color": "yellow", "code": "city=revenue:30,slots:2;city=revenue:30;city=revenue:30;path=a:5,b:_0;path=a:_0,b:0;path=a:1,b:_1;path=a:_1,b:2;path=a:3,b:_2;path=a:_2,b:4;label=D;" }, "co5": { "count": 1, "color": "yellow", "code": "city=revenue:20;city=revenue:20,hide:1;path=a:0,b:_0;path=a:_0,b:5;path=a:2,b:_1;path=a:_1,b:4;label=CS;" }, "14": 4, "15": 4, "16": 2, "17": 2, "18": 2, "19": 2, "20": 2, "21": 1, "22": 1, "23": 3, "24": 3, "25": 2, "26": 2, "27": 2, "28": 2, "29": 2, "co8": { "count": 1, "color": "green", "code": "town=revenue:20;junction;path=a:0,b:_0;path=a:1,b:_0;path=a:3,b:_0" }, "co9": { "count": 1, "color": "green", "code": "town=revenue:20;junction;path=a:0,b:_0;path=a:5,b:_0;path=a:3,b:_0" }, "co10": { "count": 1, "color": "green", "code": "town=revenue:20;junction;path=a:0,b:_0;path=a:1,b:_0;path=a:2,b:_0" }, "co2": { "count": 1, "color": "green", "code": "city=revenue:50,slots:3;city=revenue:50,hide:1;path=a:0,b:_0;path=a:1,b:_0;path=a:4,b:_0;path=a:5,b:_0;path=a:2,b:_1;path=a:3,b:_1;label=D;" }, "co6": { "count": 1, "color": "green", "code": "city=revenue:40,slots:2;path=a:0,b:_0;path=a:2,b:_0;path=a:3,b:_0;path=a:4,b:_0;path=a:5,b:_0;label=CS;" }, "39": 1, "40": 2, "41": 1, "42": 1, "43": 1, "44": 1, "45": 1, "46": 1, "47": 1, "63": 6, "co3": { "count": 1, "color": "brown", "code": "city=revenue:70,slots:4;path=a:0,b:_0;path=a:1,b:_0;path=a:2,b:_0;path=a:3,b:_0;path=a:4,b:_0;path=a:5,b:_0;label=D;" }, "co4": { "count": 3, "color": "brown", "code": "city=revenue:50,slots:2;path=a:0,b:_0;path=a:1,b:_0;path=a:2,b:_0;path=a:3,b:_0;path=a:4,b:_0;" }, "co7": { "count": 1, "color": "brown", "code": "city=revenue:60,slots:3;path=a:0,b:_0,;path=a:1,b:_0;path=a:2,b:_0;path=a:3,b:_0;path=a:4,b:_0;path=a:5,b:_0;label=CS;" } }, "market": [ [ "140", "145", "150", "155", "165", "175", "190", "205", "225", "250", "280", "315", "355", "395", "440", "485" ], [ "110", "115", "120", "125", "135", "145p", "160p", "175", "195", "220", "250", "280", "315", "350", "385", "425" ], [ "85", "90", "95", "100p", "110p", "120p", "135p", "150", "170", "195", "220", "245", "275", "305", "335", "370" ], [ "65", "70", "75p", "80p", "90p", "100", "115", "130", "150", "170", "195", "215", "240", "265", "290", "320" ], [ "50", "55", "60p", "65", "75", "85", "100", "115", "130", "150", "170", "185", "205" ], [ "40", "45", "50p", "55", "65", "75", "85", "100", "115", "130" ], [ "30", "35", "40p", "45", "55", "65", "75", "85" ], [ "25", "30", "35", "40", "45", "55", "65" ], [ "20", "25", "30", "35", "40", "45" ], [ "15", "20", "25", "30", "35", "40" ], [ "10b", "15", "20", "25", "30" ], [ "10b", "10b", "15", "20", "20" ], [ "10b", "10b", "10b", "15b", "15b" ] ], "companies": [ { "sym": "IMC", "name": "Idarado Mining Company", "value": 30, "revenue": 5, "desc": "Money gained from mine tokens is doubled for the owning Corporation. If owned by a Corporation, closes on purchase of “6” train, otherwise closes on purchase of “5” train.", "abilities": [ ] }, { "sym": "GJGR", "name": "Grand Junction and Grand River Valley Railway", "value": 40, "revenue": 10, "desc": "An owning Corporation may upgrade a yellow town to a green city in additional to its normal tile lay. Action closes the company or closes on purchase of “5” train.", "abilities": [ { "type": "tile_lay", "free": true, "owner_type": "corporation", "when": "track", "count": 1, "tiles": [ "14", "15" ], "hexes": [ "B10", "B22", "C7", "C9", "C17", "C21", "D4", "D14", "D24", "E5", "E7", "E11", "F8", "F12", "F20", "F24", "H8", "H12", "H14", "I21", "I23", "J6", "K13" ] } ] }, { "sym": "DNP", "name": "Denver, Northwestern and Pacific Railroad", "value": 50, "revenue": 10, "desc": "An owning Corporation may return a station token to its charter to gain the token cost. Corporation must always have at least one token on the board. Action closes the company or closes on purchase of “5” train.", "abilities": [ ] }, { "sym": "Toll", "name": "Saguache & San Juan Toll Road Company", "value": 60, "revenue": 10, "desc": "An owning Corporation receives a $20 discount on the cost of tile lays. Closes on purchase of “5” train.", "abilities": [ { "type": "tile_discount", "discount": 20, "terrain": "mountain" } ] }, { "sym": "LNPW", "name": "Laramie, North Park and Western Railroad", "value": 70, "revenue": 15, "desc": "An owning Corporation may lay an extra tile at no cost in addition to its normal tile lay. Action closes the company or closes on purchase of “5” train.", "abilities": [ { "type": "tile_lay", "free": true, "special": false, "reachable": true, "owner_type": "corporation", "when": "track", "count": 1, "hexes": [ ], "tiles": [ ] } ] }, { "sym": "DPRT", "name": "Denver Pacific Railway and Telegraph Company", "value": 100, "revenue": 15, "desc": "The owner immediately receives one share of either Denver Pacific Railroad, Colorado and Southern Railroad, Kansas Pacific Railway or Colorado Midland Railway. The railroad receives money equal to the par value when the President’s Certificate is purchased. Closes on purchase of “5” train.", "abilities": [ { "type": "shares", "shares": "random_share", "corporations": [ "CS", "DPAC", "KPAC", "CM" ] } ] }, { "sym": "DRGR", "name": "Denver & Rio Grande Railway Silverton Branch", "value": 120, "revenue": 25, "desc": "The owner receives the Presidency of Durango and Silverton Narrow Gauge, which floats immediately. Closes when the DSNG runs a train or on purchase of “5” train. Cannot be purchased by a Corporation. Does not count towards net worth.", "abilities": [ { "type": "shares", "shares": "DSNG_0" }, { "type": "close", "corporation": "Durango and Silverton Narrow Gauge" }, { "type": "no_buy" } ] } ], "corporations": [ { "sym": "KPAC", "name": "Kansas Pacific Railway", "group": "III", "float_percent": 40, "always_market_price": true, "logo": "18_co/KPAC", "tokens": [ 0, 40, 100 ], "coordinates": "E27", "color": "brown" }, { "sym": "CM", "name": "Colorado Midland Railway", "group": "III", "float_percent": 40, "always_market_price": true, "logo": "18_co/CM", "tokens": [ 0, 40, 100, 100 ], "coordinates": "G17", "color": "lightBlue" }, { "sym": "CS", "name": "Colorado and Southern Railway", "group": "III", "float_percent": 40, "always_market_price": true, "logo": "18_co/CS", "tokens": [ 0, 40, 100, 100 ], "coordinates": "K17", "color": "black" }, { "sym": "DPAC", "name": "Denver Pacific Railway", "group": "III", "float_percent": 40, "always_market_price": true, "logo": "18_co/DPAC", "tokens": [ 0, 40 ], "coordinates": "E15", "color": "purple" }, { "sym": "DSL", "name": "Denver and Salt Lake Railroad", "group": "III", "float_percent": 40, "always_market_price": true, "logo": "18_co/DSL", "tokens": [ 0, 40 ], "coordinates": "E15", "color": "green" }, { "sym": "DRG", "name": "Denver and Rio Grande Railroad", "group": "II", "float_percent": 50, "always_market_price": true, "logo": "18_co/DRG", "tokens": [ 0, 40, 80, 100, 100, 100 ], "coordinates": "E15", "color": "yellow", "text_color": "black" }, { "sym": "ATSF", "name": "Atchinson, Tokepa and Santa Fe", "group": "II", "float_percent": 50, "always_market_price": true, "logo": "18_co/ATSF", "tokens": [ 0, 40, 80, 100, 100, 100 ], "coordinates": "J26", "color": "blue" }, { "sym": "CBQ", "name": "Chicago, Burlington and Quincy", "group": "I", "float_percent": 60, "always_market_price": true, "logo": "18_co/CBQ", "tokens": [ 0, 40, 80, 100, 100, 100, 100 ], "coordinates": "B26", "color": "orange", "text_color": "black" }, { "sym": "ROCK", "name": "Chicago, Rock Island and Pacific", "group": "I", "float_percent": 60, "always_market_price": true, "logo": "18_co/ROCK", "tokens": [ 0, 40, 80, 100, 100, 100, 100, 100 ], "coordinates": "G27", "color": "red" }, { "sym": "UP", "name": "Union Pacific", "group": "I", "float_percent": 60, "always_market_price": true, "logo": "18_co/UP", "tokens": [ 0, 40, 80, 100, 100, 100, 100, 100 ], "coordinates": "A17", "color": "white", "text_color": "black" }, { "sym": "DSNG", "name": "Durango and Silverton Narrow Gauge", "group": "X", "float_percent": 20, "always_market_price": true, "logo": "18_co/DSNG", "tokens": [ 0, 40 ], "coordinates": "K5", "color": "pink" } ], "trains": [ { "name": "2P", "distance": [ { "nodes": [ "city", "offboard" ], "pay": 2, "visit": 2 }, { "nodes": [ "town" ], "pay": 99, "visit": 99 } ], "price": 0, "num": 1 }, { "name": "2", "distance": [ { "nodes": [ "city", "offboard" ], "pay": 2, "visit": 2 }, { "nodes": [ "town" ], "pay": 99, "visit": 99 } ], "price": 100, "rusts_on": "4", "num": 6 }, { "name": "3", "distance": [ { "nodes": [ "city", "offboard" ], "pay": 3, "visit": 3 }, { "nodes": [ "town" ], "pay": 99, "visit": 99 } ], "price": 180, "rusts_on": "4D", "num": 5 }, { "name": "4", "distance": [ { "nodes": [ "city", "offboard" ], "pay": 4, "visit": 4 }, { "nodes": [ "town" ], "pay": 99, "visit": 99 } ], "price": 280, "rusts_on": "6", "num": 4 }, { "name": "5", "distance": [ { "nodes": [ "city", "offboard" ], "pay": 5, "visit": 5 }, { "nodes": [ "town" ], "pay": 99, "visit": 99 } ], "events": [ { "type": "close_companies" } ], "price": 500, "rusts_on": "E", "num": 2 }, { "name": "4D", "distance": [ { "nodes": [ "city", "offboard" ], "pay": 4, "visit": 4, "multiplier": 2 }, { "nodes": [ "town" ], "pay": 0, "visit": 99 } ], "available_on": "5", "price": 650, "num": 3 }, { "name": "6", "distance": [ { "nodes": [ "city", "offboard" ], "pay": 6, "visit": 6 }, { "nodes": [ "town" ], "pay": 99, "visit": 99 } ], "events": [ { "type": "remove_mines" } ], "price": 720, "num": 10 }, { "name": "5D", "distance": [ { "nodes": [ "city", "offboard" ], "pay": 5, "visit": 5, "multiplier": 2 }, { "nodes": [ "town" ], "pay": 0, "visit": 99 } ], "available_on": "6", "price": 850, "num": 2 }, { "name": "E", "distance": [ { "nodes": [ "city", "offboard" ], "pay": 99, "visit": 99 }, { "nodes": [ "town" ], "pay": 99, "visit": 99 } ], "available_on": "6", "price": 1000, "num": 1 } ], "hexes": { "white": { "": [ "B2", "B4", "B6", "B16", "B18", "B20", "B24", "C3", "C5", "C19", "C23", "C25", "D2", "D16", "D18", "D20", "D22", "E3", "E17", "E19", "E21", "E23", "E25", "F2", "F18", "F22", "G13", "G19", "G21", "G23", "G25", "H18", "H20", "H22", "H24", "I5", "I19", "I25", "J2", "J16", "J18", "J20", "J22", "J24", "K3", "K19", "K21", "K23", "K25" ], "city=revenue:0": [ "C15", "K17" ], "city=revenue:10;path=a:5,b:_0;path=a:_0,b:0;label=CS;border=edge:1,type:mountain,cost:40;": [ "G17" ], "city=revenue:10;city=revenue:0,loc:7;city=revenue:10;path=a:5,b:_0;path=a:3,b:_2;label=D;border=edge:0,type:mountain,cost:40;border=edge:1,type:mountain,cost:40;": [ "E15" ], "town=revenue:0": [ "B22", "C7", "D4", "D24", "F20", "F24", "I21", "I23" ], "town=revenue:0;icon=image:18_co/upgrade,sticky:1,name:upgrade": [ "C17", "C21" ], "border=edge:1,type:mountain,cost:40;": [ "B14", "K11" ], "border=edge:3,type:mountain,cost:40;": [ "I3" ], "border=edge:4,type:mountain,cost:40;": [ "J12" ], "border=edge:5,type:mountain,cost:40;": [ "F4" ], "border=edge:0,type:mountain,cost:40;border=edge:1,type:mountain,cost:40;": [ "F16" ], "border=edge:0,type:mountain,cost:40;border=edge:2,type:mountain,cost:40;": [ "H16" ], "border=edge:1,type:mountain,cost:40;border=edge:2,type:mountain,cost:40;": [ "I11" ], "border=edge:2,type:mountain,cost:40;border=edge:3,type:mountain,cost:40;": [ "G5" ], "border=edge:3,type:mountain,cost:40;border=edge:4,type:mountain,cost:40;": [ "H2" ], "border=edge:0,type:mountain,cost:40;border=edge:1,type:mountain,cost:40;border=edge:2,type:mountain,cost:40;": [ "D10" ], "town=revenue:0;border=edge:3,type:mountain,cost:40;": [ "E5" ], "town=revenue:0;border=edge:0,type:mountain,cost:40;border=edge:1,type:mountain,cost:40;": [ "B10" ], "town=revenue:0;border=edge:1,type:mountain,cost:40;border=edge:2,type:mountain,cost:40;": [ "F12" ], "town=revenue:0;icon=image:18_co/upgrade,sticky:1,name:upgrade;border=edge:4,type:mountain,cost:40;": [ "K13" ], "town=revenue:0;icon=image:18_co/upgrade,sticky:1,name:upgrade;border=edge:0,type:mountain,cost:40;border=edge:2,type:mountain,cost:40;border=edge:5,type:mountain,cost:40;": [ "H8" ], "town=revenue:0;icon=image:18_co/mine,sticky:1,name:mine;icon=image:18_co/upgrade,sticky:1,name:upgrade;border=edge:0,type:mountain,cost:40;border=edge:1,type:mountain,cost:40;": [ "D14" ], "town=revenue:0;upgrade=cost:40,terrain:mountain;border=edge:0,type:mountain,cost:40;border=edge:3,type:mountain,cost:40;border=edge:4,type:mountain,cost:40;border=edge:5,type:mountain,cost:40;": [ "F8" ], "town=revenue:0;upgrade=cost:40,terrain:mountain;icon=image:18_co/mine,sticky:1,name:mine;border=edge:1,type:mountain,cost:40;": [ "H12" ], "town=revenue:0;upgrade=cost:40,terrain:mountain;icon=image:18_co/mine,sticky:1,name:mine;border=edge:0,type:mountain,cost:40;border=edge:1,type:mountain,cost:40;border=edge:4,type:mountain,cost:40;border=edge:5,type:mountain,cost:40;": [ "E11" ], "town=revenue:0;upgrade=cost:40,terrain:mountain;icon=image:18_co/mine,sticky:1,name:mine;border=edge:0,type:mountain,cost:40;border=edge:0,type:mountain,cost:40;border=edge:4,type:mountain,cost:40;border=edge:5,type:mountain,cost:40;": [ "J6" ], "town=revenue:0;upgrade=cost:40,terrain:mountain;icon=image:18_co/upgrade,sticky:1,name:upgrade;border=edge:0,type:mountain,cost:40;": [ "H14" ], "town=revenue:0;upgrade=cost:40,terrain:mountain;icon=image:18_co/upgrade,sticky:1,name:upgrade;border=edge:2,type:mountain,cost:40;border=edge:3,type:mountain,cost:40;": [ "E7" ], "town=revenue:0;upgrade=cost:40,terrain:mountain;icon=image:18_co/upgrade,sticky:1,name:upgrade;border=edge:3,type:mountain,cost:40;border=edge:5,type:mountain,cost:40;": [ "C9" ], "city=revenue:0;border=edge:0,type:mountain,cost:40;": [ "G3" ], "city=revenue:0;border=edge:1,type:mountain,cost:40;": [ "I17" ], "city=revenue:0;border=edge:3,type:mountain,cost:40;border=edge:5,type:mountain,cost:40;": [ "H6" ], "upgrade=cost:40,terrain:mountain;border=edge:4,type:mountain,cost:40;": [ "B8", "B12" ], "upgrade=cost:40,terrain:mountain;border=edge:5,type:mountain,cost:40;": [ "C11", "J4" ], "upgrade=cost:40,terrain:mountain;border=edge:0,type:mountain,cost:40;border=edge:1,type:mountain,cost:40;": [ "H4" ], "upgrade=cost:40,terrain:mountain;border=edge:0,type:mountain,cost:40;border=edge:5,type:mountain,cost:40;": [ "D6" ], "upgrade=cost:40,terrain:mountain;border=edge:2,type:mountain,cost:40;border=edge:3,type:mountain,cost:40;": [ "K7" ], "upgrade=cost:40,terrain:mountain;border=edge:3,type:mountain,cost:40;border=edge:4,type:mountain,cost:40;": [ "F14" ], "upgrade=cost:40,terrain:mountain;border=edge:0,type:mountain,cost:40;border=edge:4,type:mountain,cost:40;border=edge:5,type:mountain,cost:40;": [ "D8" ], "upgrade=cost:40,terrain:mountain;border=edge:1,type:mountain,cost:40;border=edge:2,type:mountain,cost:40;border=edge:5,type:mountain,cost:40;": [ "J14" ], "upgrade=cost:40,terrain:mountain;border=edge:2,type:mountain,cost:40;border=edge:3,type:mountain,cost:40;border=edge:4,type:mountain,cost:40;": [ "K9" ], "upgrade=cost:40,terrain:mountain;border=edge:2,type:mountain,cost:40;border=edge:4,type:mountain,cost:40;border=edge:5,type:mountain,cost:40;": [ "D12" ], "upgrade=cost:40,terrain:mountain;border=edge:3,type:mountain,cost:40;border=edge:4,type:mountain,cost:40;border=edge:5,type:mountain,cost:40;": [ "I13" ], "upgrade=cost:40,terrain:mountain;border=edge:0,type:mountain,cost:40;border=edge:3,type:mountain,cost:40;border=edge:4,type:mountain,cost:40;border=edge:5,type:mountain,cost:40;": [ "H10" ], "upgrade=cost:40,terrain:mountain;border=edge:1,type:mountain,cost:40;border=edge:1,type:mountain,cost:40;border=edge:3,type:mountain,cost:40;border=edge:4,type:mountain,cost:40;": [ "I15" ], "upgrade=cost:40,terrain:mountain;border=edge:1,type:mountain,cost:40;border=edge:2,type:mountain,cost:40;border=edge:3,type:mountain,cost:40;border=edge:4,type:mountain,cost:40;": [ "G9" ], "upgrade=cost:40,terrain:mountain;border=edge:0,type:mountain,cost:40;border=edge:2,type:mountain,cost:40;border=edge:3,type:mountain,cost:40;border=edge:4,type:mountain,cost:40;border=edge:5,type:mountain,cost:40;": [ "E9" ], "upgrade=cost:40,terrain:mountain;icon=image:18_co/mine,sticky:1,name:mine;border=edge:0,type:mountain,cost:40;border=edge:5,type:mountain,cost:40;": [ "F6" ], "upgrade=cost:40,terrain:mountain;icon=image:18_co/mine,sticky:1,name:mine;border=edge:1,type:mountain,cost:40;border=edge:2,type:mountain,cost:40;": [ "K15" ], "upgrade=cost:40,terrain:mountain;icon=image:18_co/mine,sticky:1,name:mine;border=edge:2,type:mountain,cost:40;border=edge:3,type:mountain,cost:40;border=edge:5,type:mountain,cost:40;": [ "I7" ], "upgrade=cost:40,terrain:mountain;icon=image:18_co/mine,sticky:1,name:mine;border=edge:3,type:mountain,cost:40;border=edge:4,type:mountain,cost:40;border=edge:5,type:mountain,cost:40;": [ "G15" ], "upgrade=cost:40,terrain:mountain;icon=image:18_co/mine,sticky:1,name:mine;border=edge:1,type:mountain,cost:40;border=edge:2,type:mountain,cost:40;border=edge:3,type:mountain,cost:40;border=edge:4,type:mountain,cost:40;": [ "E13" ], "upgrade=cost:40,terrain:mountain;icon=image:18_co/mine,sticky:1,name:mine;border=edge:0,type:mountain,cost:40;border=edge:1,type:mountain,cost:40;border=edge:2,type:mountain,cost:40;border=edge:3,type:mountain,cost:40;border=edge:4,type:mountain,cost:40;": [ "F10" ], "upgrade=cost:40,terrain:mountain;icon=image:18_co/mine,sticky:1,name:mine;border=edge:0,type:mountain,cost:40;border=edge:1,type:mountain,cost:40;border=edge:2,type:mountain,cost:40;border=edge:3,type:mountain,cost:40;border=edge:5,type:mountain,cost:40;": [ "J8" ], "upgrade=cost:40,terrain:mountain;icon=image:18_co/mine,sticky:1,name:mine;border=edge:0,type:mountain,cost:40;border=edge:2,type:mountain,cost:40;border=edge:3,type:mountain,cost:40;border=edge:4,type:mountain,cost:40;border=edge:5,type:mountain,cost:40;": [ "G7", "I9" ], "icon=image:18_co/mine,sticky:1,name:mine;border=edge:0,type:mountain,cost:40;border=edge:1,type:mountain,cost:40;": [ "G11" ], "icon=image:18_co/mine,sticky:1,name:mine;border=edge:0,type:mountain,cost:40;border=edge:2,type:mountain,cost:40;": [ "J10" ] }, "red": { "offboard=revenue:yellow_50|brown_20;path=a:0,b:_0,terminal:1;": [ "A11" ], "city=revenue:yellow_40|brown_50;path=a:0,b:_0,terminal:1;path=a:5,b:_0,terminal:1;": [ "A17" ], "city=revenue:yellow_50|brown_30;path=a:0,b:_0,terminal:1;path=a:1,b:_0,terminal:1;": [ "B26" ], "offboard=revenue:yellow_50|brown_70;path=a:3,b:_0;path=a:5,b:_0,terminal:1;": [ "E1" ], "city=revenue:yellow_50|brown_30;path=a:0,b:_0,terminal:1;": [ "E27" ], "city=revenue:yellow_50|brown_30;path=a:2,b:_0,terminal:1;": [ "G27" ], "city=revenue:yellow_40|brown_20;path=a:0,b:_0;path=a:1,b:_0,terminal:1;path=a:2,b:_0,terminal:1": [ "J26" ], "offboard=revenue:yellow_20|brown_30;path=a:4,b:_0,terminal:1;": [ "L2" ], "offboard=revenue:yellow_30|brown_50;path=a:1,b:_0,terminal:1;": [ "L14", "L20" ] }, "gray": { "": [ "C13" ], "path=a:0,b:5;path=a:1,b:5;path=a:1,b:3;path=a:2,b:3": [ "F26" ], "path=a:2,b:1;path=a:3,b:1;": [ "L4" ], "path=a:2,b:4;path=a:3,b:4;": [ "L12", "L18" ] }, "yellow": { "city=revenue:0;border=edge:2,type:mountain,cost:40;border=edge:3,type:mountain,cost:40;": [ "K5" ] } }, "phases": [ { "name": "2", "train_limit": 4, "tiles": [ "yellow" ], "operating_rounds": 2, "status": [ "can_buy_companies_from_other_players" ] }, { "name": "3", "on": "3", "train_limit": 4, "tiles": [ "yellow", "green" ], "status": [ "can_buy_companies", "can_buy_companies_from_other_players" ], "operating_rounds": 2 }, { "name": "4", "on": "4", "train_limit": 3, "tiles": [ "yellow", "green" ], "status": [ "can_buy_companies", "can_buy_companies_from_other_players" ], "operating_rounds": 2 }, { "name": "5", "on": "5", "train_limit": 3, "tiles": [ "yellow", "green", "brown" ], "operating_rounds": 2 }, { "name": "5b", "on": "4D", "train_limit": 2, "tiles": [ "yellow", "green", "brown" ], "operating_rounds": 2 }, { "name": "6", "on": "6", "train_limit": 2, "tiles": [ "yellow", "green", "brown" ], "operating_rounds": 2 }, { "name": "6b", "on": "5D", "train_limit": 2, "tiles": [ "yellow", "green", "brown" ], "operating_rounds": 2 }, { "name": "7", "on": "E", "train_limit": 2, "tiles": [ "yellow", "green", "brown" ], "operating_rounds": 2 } ] } DATA end end end end # rubocop:enable Lint/RedundantCopDisableDirective, Layout/LineLength, Layout/HeredocIndentation
20.622328
304
0.534285
f73d860c048b08c465cf9bf2cb832e83db14e6b7
877
require 'test_helper' class UsersIndexTest < ActionDispatch::IntegrationTest # test "the truth" do # assert true # end def setup @admin = users(:michael) @non_admin = users(:archer) end test "index as admin including pagination and delete links" do log_in_as(@admin) get users_path assert_template 'users/index' assert_select 'div.pagination' first_page_of_users = User.paginate(page: 1) first_page_of_users.each do |user| assert_select 'a[href=?]', user_path(user), text: user.name unless user == @admin assert_select 'a[href=?]', user_path(user), text: 'delete' end end assert_difference 'User.count', -1 do delete user_path(@non_admin) end end test "index as non-admin" do log_in_as(@non_admin) get users_path assert_select 'a', text: 'delete', count: 0 end end
24.361111
66
0.669327
f724387581fe22a703b9bcd9e60e76d7a4970ae7
2,796
# # Copyright:: Copyright (c) 2015 Chef Software Inc. # License:: Apache License, Version 2.0 # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # require_relative "../exceptions" require_relative "../service_exceptions" require_relative "../policyfile/lister" module ChefCLI module PolicyfileServices class CleanPolicies Orphan = Struct.new(:policy_name, :revision_id) attr_reader :chef_config attr_reader :ui def initialize(config: nil, ui: nil) @chef_config = config @ui = ui end def run revisions_to_remove = orphaned_policies if revisions_to_remove.empty? ui.err("No policy revisions deleted") return true end results = revisions_to_remove.map do |policy| [ remove_policy(policy), policy ] end failures = results.select { |result, _policy| result.is_a?(Exception) } unless failures.empty? details = failures.map do |result, policy| "- #{policy.policy_name} (#{policy.revision_id}): #{result.class} #{result}" end message = "Failed to delete some policy revisions:\n" + details.join("\n") + "\n" raise PolicyfileCleanError.new(message, MultipleErrors.new("multiple errors")) end true end def orphaned_policies policy_lister.policies_by_name.keys.inject([]) do |orphans, policy_name| orphans + policy_lister.orphaned_revisions(policy_name).map do |revision_id| Orphan.new(policy_name, revision_id) end end rescue => e raise PolicyfileCleanError.new("Failed to list policies for cleaning.", e) end def policy_lister @policy_lister ||= Policyfile::Lister.new(config: chef_config) end def http_client @http_client ||= Chef::ServerAPI.new(chef_config.chef_server_url, signing_key_filename: chef_config.client_key, client_name: chef_config.node_name) end private def remove_policy(policy) ui.msg("DELETE #{policy.policy_name} #{policy.revision_id}") http_client.delete("/policies/#{policy.policy_name}/revisions/#{policy.revision_id}") :ok rescue => e e end end end end
29.125
93
0.658441
7917a11b57c21eaf39dbe8e0b225b9e5e6c7c74f
411
require './gemspec' Gem::Specification.new do |s| info(s) s.name = "vpim_icalendar" s.version = "1.1" s.summary = "Virtual gem depending on vPim's iCalendar support for ruby" s.description = <<'---' This is a virtual gem, it exists to depend on vPim, which provides iCalendar support for ruby. You can install vPim directly. --- s.add_dependency("vpim") end
27.4
84
0.635036
e254e9a5d1d804e6c539a848ff79ad88834cfb45
138
class ChangeForeignKeyForComments < ActiveRecord::Migration[7.0] def change rename_column :comments, :user_id, :author_id end end
23
64
0.775362
1a7bc5413757d0327260757334225a7f1b7b94b5
1,444
require 'test_helper' class FileInstructionsControllerTest < ActionController::TestCase setup do @file_instruction = file_instructions(:one) end test "should get index" do get :index assert_response :success assert_not_nil assigns(:file_instructions) end test "should get new" do get :new assert_response :success end test "should create file_instruction" do assert_difference('FileInstruction.count') do post :create, file_instruction: { file_type: @file_instruction.file_type, host_url: @file_instruction.host_url, instructions: @file_instruction.instructions } end assert_redirected_to file_instruction_path(assigns(:file_instruction)) end test "should show file_instruction" do get :show, id: @file_instruction assert_response :success end test "should get edit" do get :edit, id: @file_instruction assert_response :success end test "should update file_instruction" do patch :update, id: @file_instruction, file_instruction: { file_type: @file_instruction.file_type, host_url: @file_instruction.host_url, instructions: @file_instruction.instructions } assert_redirected_to file_instruction_path(assigns(:file_instruction)) end test "should destroy file_instruction" do assert_difference('FileInstruction.count', -1) do delete :destroy, id: @file_instruction end assert_redirected_to file_instructions_path end end
28.88
186
0.760388