hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
286340a3c7770782c834a8f07f3b51f9d623a62f | 1,619 | module Guard
class RSpec
class Inspector
attr_accessor :excluded, :spec_paths
def initialize(options = {})
self.excluded = options.fetch(:exclude, [])
self.spec_paths = options.fetch(:spec_paths, [])
end
def excluded=(pattern)
@excluded = Dir[pattern.to_s]
end
def spec_paths=(paths)
@spec_paths = Array(paths)
end
def clean(paths)
paths.uniq!
paths.compact!
clear_spec_files_list_after do
paths = paths.select { |path| should_run_spec_file?(path) }
end
paths.reject { |p| included_in_other_path?(p, paths) }
end
private
def should_run_spec_file?(path)
(spec_file?(path) || feature_file?(path) || spec_folder?(path)) && !excluded.include?(path)
end
def spec_file?(path)
spec_files.include?(path)
end
def feature_file?(path)
feature_files.include?(path)
end
def spec_folder?(path)
path.match(%r{^(#{spec_paths.join("|")})[^\.]*$})
end
def spec_files
@spec_files ||= spec_paths.collect { |path| Dir[File.join(path, "**{,/*/**}", "*[_.]spec.rb")] }.flatten
end
def feature_files
@feature_files ||= spec_paths.collect { |path| Dir[File.join(path, "**{,/*/**}", "*.feature")] }.flatten
end
def clear_spec_files_list_after
yield
@spec_files = nil
end
def included_in_other_path?(path, paths)
(paths - [path]).any? { |p| path.include?(p) && path.sub(p, '').include?('/') }
end
end
end
end
| 24.164179 | 112 | 0.567634 |
26c31f50926962b8e73a98a91df24d5cb77e5f80 | 2,392 | require 'aaws/version'
AUTHOR = 'John Nunemaker' # can also be an array of Authors
EMAIL = "[email protected]"
DESCRIPTION = "API wrapper for Amazon Associates Web Service"
GEM_NAME = 'aaws' # what ppl will type to install your gem
RUBYFORGE_PROJECT = 'aaws' # The unix name for your project
HOMEPATH = "http://#{RUBYFORGE_PROJECT}.rubyforge.org"
DOWNLOAD_PATH = "http://rubyforge.org/projects/#{RUBYFORGE_PROJECT}"
@config_file = "~/.rubyforge/user-config.yml"
@config = nil
RUBYFORGE_USERNAME = "unknown"
def rubyforge_username
unless @config
begin
@config = YAML.load(File.read(File.expand_path(@config_file)))
rescue
puts <<-EOS
ERROR: No rubyforge config file found: #{@config_file}
Run 'rubyforge setup' to prepare your env for access to Rubyforge
- See http://newgem.rubyforge.org/rubyforge.html for more details
EOS
exit
end
end
RUBYFORGE_USERNAME.replace @config["username"]
end
REV = nil
# UNCOMMENT IF REQUIRED:
# REV = `svn info`.each {|line| if line =~ /^Revision:/ then k,v = line.split(': '); break v.chomp; else next; end} rescue nil
VERS = Aaws::VERSION::STRING + (REV ? ".#{REV}" : "")
RDOC_OPTS = ['--quiet', '--title', 'aaws documentation',
"--opname", "index.html",
"--line-numbers",
"--main", "README",
"--inline-source"]
class Hoe
def extra_deps
@extra_deps.reject! { |x| Array(x).first == 'hoe' }
@extra_deps
end
end
# Generate all the Rake tasks
# Run 'rake -T' to see list of generated tasks (from gem root directory)
hoe = Hoe.new(GEM_NAME, VERS) do |p|
p.developer(AUTHOR, EMAIL)
p.description = DESCRIPTION
p.summary = DESCRIPTION
p.url = HOMEPATH
p.rubyforge_name = RUBYFORGE_PROJECT if RUBYFORGE_PROJECT
p.test_globs = ["test/**/test_*.rb"]
p.clean_globs |= ['**/.*.sw?', '*.gem', '.config', '**/.DS_Store'] #An array of file patterns to delete on clean.
# == Optional
p.changes = p.paragraphs_of("History.txt", 0..1).join("\n\n")
p.extra_deps = [ ['active_support', '>= 2.0.2'] ]
#p.spec_extras = {} # A hash of extra values to set in the gemspec.
end
CHANGES = hoe.paragraphs_of('History.txt', 0..1).join("\\n\\n")
PATH = (RUBYFORGE_PROJECT == GEM_NAME) ? RUBYFORGE_PROJECT : "#{RUBYFORGE_PROJECT}/#{GEM_NAME}"
hoe.remote_rdoc_dir = File.join(PATH.gsub(/^#{RUBYFORGE_PROJECT}\/?/,''), 'rdoc')
hoe.rsync_args = '-av --delete --ignore-errors' | 35.176471 | 126 | 0.672659 |
38289a14f766b07e29c22709d20a87864644abbc | 157 | module Spree
module PermittedAttributes
@@user_attributes = [:email, :password, :first_name, :last_name, :birthday, :avatar_url, :signup_type]
end
end | 31.4 | 105 | 0.751592 |
7a2b81de69a0f45138df9e5e80fcd70eac18f440 | 1,968 | # encoding: utf-8
# frozen_string_literal: true
require 'spec_helper'
describe "ContentTransferEncodingParser" do
it "should work" do
text = "quoted-printable"
a = Mail::Parsers::ContentTransferEncodingParser
expect(a.parse(text).error).to be_nil
expect(a.parse(text).encoding).to eq 'quoted-printable'
end
describe "trailing semi colons" do
it "should parse" do
text = "quoted-printable;"
a = Mail::Parsers::ContentTransferEncodingParser
expect(a.parse(text).error).to be_nil
expect(a.parse(text).encoding).to eq 'quoted-printable'
end
it "should parse with pre white space" do
text = 'quoted-printable ;'
a = Mail::Parsers::ContentTransferEncodingParser
expect(a.parse(text).error).to be_nil
expect(a.parse(text).encoding).to eq 'quoted-printable'
end
it "should parse with trailing white space" do
text = 'quoted-printable; '
a = Mail::Parsers::ContentTransferEncodingParser
expect(a.parse(text).error).to be_nil
expect(a.parse(text).encoding).to eq 'quoted-printable'
end
it "should parse with pre and trailing white space" do
text = 'quoted-printable ; '
a = Mail::Parsers::ContentTransferEncodingParser
expect(a.parse(text).error).to be_nil
expect(a.parse(text).encoding).to eq 'quoted-printable'
end
end
describe "x-token values" do
it "should work" do
text = 'x-my-token'
a = Mail::Parsers::ContentTransferEncodingParser
expect(a.parse(text).error).to be_nil
expect(a.parse(text).encoding).to eq 'x-my-token'
end
end
describe "wild content-transfer-encoding" do
%w(7bits 8bits 7-bit 8-bit).each do |mechanism|
it "should parse #{mechanism} variant" do
a = Mail::Parsers::ContentTransferEncodingParser
expect(a.parse(mechanism).error).to be_nil
expect(a.parse(mechanism).encoding).to eq mechanism
end
end
end
end
| 30.75 | 61 | 0.676829 |
21ffdef0babadc63b588aef4d99e29229059ce17 | 950 | require_relative "../canvas_base_mutation"
module LMSGraphQL
module Mutations
module Canvas
class PostReplyGroup < BaseMutation
argument :group_id, ID, required: true
argument :topic_id, ID, required: true
argument :entry_id, ID, required: true
argument :message, String, required: false
argument :attachment, String, required: false
field :return_value, Boolean, null: false
def resolve(group_id:, topic_id:, entry_id:, message: nil, attachment: nil)
context[:canvas_api].call("POST_REPLY_GROUPS").proxy(
"POST_REPLY_GROUPS",
{
"group_id": group_id,
"topic_id": topic_id,
"entry_id": entry_id
},
{
"message": message,
"attachment": attachment
},
).parsed_response
end
end
end
end
end | 28.787879 | 83 | 0.561053 |
e8f84063a3b26a622fcb1f51a953fe5b529f207e | 1,806 | require "codnar"
require "test/spec"
# Test highlighting syntax using GVim.
class TestGraphVizDiagrams < Test::Unit::TestCase
MINIMAL_DIAGRAM_SVG = <<-EOF.unindent #! ((( svg
<svg width="62pt" height="116pt"
viewBox="0.00 0.00 62.00 116.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
<g id="graph1" class="graph" transform="scale(1 1) rotate(0) translate(4 112)">
<title>_anonymous_0</title>
<polygon fill="white" stroke="white" points="-4,5 -4,-112 59,-112 59,5 -4,5"/>
<!-- A -->
<g id="node1" class="node"><title>A</title>
<ellipse fill="none" stroke="black" cx="27" cy="-90" rx="27" ry="18"/>
<text text-anchor="middle" x="27" y="-85.4" font-family="Times New Roman,serif" font-size="14.00">A</text>
</g>
<!-- B -->
<g id="node3" class="node"><title>B</title>
<ellipse fill="none" stroke="black" cx="27" cy="-18" rx="27" ry="18"/>
<text text-anchor="middle" x="27" y="-13.4" font-family="Times New Roman,serif" font-size="14.00">B</text>
</g>
<!-- A->B -->
<g id="edge2" class="edge"><title>A->B</title>
<path fill="none" stroke="black" d="M27,-71.8314C27,-64.131 27,-54.9743 27,-46.4166"/>
<polygon fill="black" stroke="black" points="30.5001,-46.4132 27,-36.4133 23.5001,-46.4133 30.5001,-46.4132"/>
</g>
</g>
</svg>
EOF
#! ))) svg
def test_valid_diagram
diagram = <<-EOF.unindent #! ((( dot
define(`X', `A')
digraph {
X -> B;
}
EOF
#! ))) dot
Codnar::GraphViz.to_html(diagram).should == MINIMAL_DIAGRAM_SVG
end
def test_invalid_diagram
diagram = <<-EOF.unindent #! ((( dot
digraph {
A ->
EOF
#! ))) dot
lambda { Codnar::GraphViz.to_html(diagram) }.should.raise
end
end
| 33.444444 | 116 | 0.587486 |
b956601dd75375781485ada6de3bfe66664013ed | 2,344 | require "active_support/core_ext/integer/time"
# The test environment is used exclusively to run your application's
# test suite. You never need to work with it otherwise. Remember that
# your test database is "scratch space" for the test suite and is wiped
# and recreated between test runs. Don't rely on the data there!
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
config.cache_classes = false
config.action_view.cache_template_loading = true
# Do not eager load code on boot. This avoids loading your whole application
# just for the purpose of running a single test. If you are using a tool that
# preloads Rails for running tests, you may have to set it to true.
config.eager_load = false
# Configure public file server for tests with Cache-Control for performance.
config.public_file_server.enabled = true
config.public_file_server.headers = {
'Cache-Control' => "public, max-age=#{1.hour.to_i}"
}
# Show full error reports and disable caching.
config.consider_all_requests_local = true
config.action_controller.perform_caching = false
config.cache_store = :null_store
# Raise exceptions instead of rendering exception templates.
config.action_dispatch.show_exceptions = false
# Disable request forgery protection in test environment.
config.action_controller.allow_forgery_protection = false
# Store uploaded files on the local file system in a temporary directory.
config.active_storage.service = :test
config.action_mailer.perform_caching = false
# Tell Action Mailer not to deliver emails to the real world.
# The :test delivery method accumulates sent emails in the
# ActionMailer::Base.deliveries array.
config.action_mailer.delivery_method = :test
# Print deprecation notices to the stderr.
config.active_support.deprecation = :stderr
# Raise exceptions for disallowed deprecations.
config.active_support.disallowed_deprecation = :raise
# Tell Active Support which deprecation messages to disallow.
config.active_support.disallowed_deprecation_warnings = []
# Raises error for missing translations.
# config.i18n.raise_on_missing_translations = true
# Annotate rendered view with file names.
# config.action_view.annotate_rendered_view_with_filenames = true
end
| 38.42623 | 85 | 0.780717 |
79356b11781ea8580a7f15ecd8ed770cad08409b | 966 | require "formula"
class SwitchaudioOsx < Formula
homepage "https://github.com/deweller/switchaudio-osx/"
url "https://github.com/deweller/switchaudio-osx/archive/1.0.0.tar.gz"
sha1 "4b7bae425f4b9ec71b67dc2c4c6f26fd195d623a"
head "https://github.com/deweller/switchaudio-osx.git"
bottle do
cellar :any
sha1 "5e5809f498765a2402a3082d3f643f7772f7851f" => :mavericks
sha1 "419a9487393be1d4b08fae2fc7aa6ab097b7cd75" => :mountain_lion
sha1 "94e17d2daaefe1118ce7915d26b4cbd0219b07c8" => :lion
end
depends_on :macos => :lion
depends_on :xcode => :build
def install
xcodebuild "-project", "AudioSwitcher.xcodeproj",
"-target", "SwitchAudioSource",
"SYMROOT=build",
"-verbose"
prefix.install Dir["build/Release/*"]
bin.write_exec_script "#{prefix}/SwitchAudioSource"
chmod 0755, "#{bin}/SwitchAudioSource"
end
test do
system "#{bin}/SwitchAudioSource", "-c"
end
end
| 29.272727 | 72 | 0.698758 |
4a593e94321d8998568cf2d6c07283bd92617ebf | 346 | module Buildable
def perform(payload_data)
payload = Payload.new(payload_data)
build_runner = BuildRunner.new(payload)
build_runner.run
rescue Resque::TermException
retry_job
rescue => exception
Rails.logger.warn "Exception #{exception}"
Raven.capture_exception(exception, payload: { data: payload_data })
end
end
| 26.615385 | 71 | 0.739884 |
bb7924d78e663b364ed5ba406c0ccbb6c2ba7f24 | 67 | class PagesController < ApplicationController
def show
end
end
| 13.4 | 45 | 0.80597 |
ed63b05c539fb02efe8323ea828218f2a2f7faf3 | 1,803 | # frozen_string_literal: true
# Brainfuck Language Specifications https://en.wikipedia.org/wiki/Brainfuck
# >: p_increment prt++;
# <: p_decrement prt-;
# +: increment (*ptr)++;
# -: decrement (*ptr)--;
# .: put putchar(*ptr);
# ,: get *ptr=getchar();
# [: open while(*ptr)
# ]: close }
module BfRuby
class Translator
MEM_SIZE = 1024
class << self
def translate(f_ing_src)
new(f_ing_src).translate
end
end
def initialize(f_ing_src)
@operation = f_ing_src
end
def translate
mem = Array.new(MEM_SIZE, 0)
ptr = 0
cur = 0
output = []
until @operation[cur].nil?
case @operation[cur]
when '>'
ptr += 1
when '<'
ptr -= 1
when '+'
mem[ptr] += 1
when '-'
mem[ptr] -= 1
when '.'
output << mem[ptr].chr
when ','
raise NotImplementedError
when '['
cur = jump_next_brace(cur) if mem[ptr].zero?
when ']'
cur = jump_before_brace(cur) unless mem[ptr].zero?
end
cur += 1
end
output.join
end
private
def jump_before_brace(cur, brace_stack = [])
loop do
case @operation[cur]
when ']'
brace_stack.push(1) # スタックにpushしていき、スタックが0になる=対応するカッコに到達したらloopから抜ける
when '['
brace_stack.pop
end
break if brace_stack.empty?
cur -= 1
end
cur
end
def jump_next_brace(cur, brace_stack = [])
loop do
case @operation[cur]
when '['
brace_stack.push(1)
when ']'
brace_stack.pop
end
break if brace_stack.empty?
cur += 1
end
cur
end
end
end
| 20.033333 | 78 | 0.506378 |
ab1087a9a15934e1b49becc53853abc9db2334e4 | 939 | class Comment
include Mongoid::Document
include Mongoid::Timestamps
field :name, :type => String
field :content, :type => String
field :email, :type=>String
belongs_to :post
validates :name, presence: true
validates :email, presence: true, format: { with: /\A([^@\s]+)@((?:[-a-z0-9]+\.)+[a-z]{2,})\z/i, message: '地址无效' }
validates :content, presence: true
validates_presence_of :post_id
def reply_emails
Comment.where(post_id: self.post_id).collect(&:email).uniq - [ self.email ]
end
after_create do
if ENV['MAIL_SERVER'].present? && ENV['ADMIN_USER'].present? && ENV['ADMIN_USER'] =~ /@/
Rails.logger.info 'comment created, comment worker start'
NewCommentWorker.perform_async(self.id.to_s, ENV['ADMIN_USER'])
end
if ENV['MAIL_SERVER'].present?
Rails.logger.info 'comment created, reply worker start'
NewReplyPostWorker.perform_async(self.id.to_s)
end
end
end
| 29.34375 | 116 | 0.676251 |
1a158c2464df91f1f5c22d2e985a10664f7f8856 | 95 | require 'spec_helper_acceptance'
# Ensure NIS CLient is not installed - Section 2.3.1
describe | 23.75 | 52 | 0.8 |
5d86e4de7095e0fe368faee04242d5bde9b2cf34 | 3,587 | require 'fileutils'
require 'chef/cookbook/metadata'
module DroneChef
#
# Class for uploading stuff to a Chef Server
#
class ChefServer
def initialize(config)
@config = config
@options = config.plugin_args
fail 'Chef organization required' unless @options.key? 'org'
end
def recursive
set_default(__method__, true)
end
def freeze
set_default(__method__, true)
end
#
# Are we uploading a cookbook?
#
def cookbook?
File.exist? "#{@config.workspace}/metadata.rb"
end
def berksfile?
return true if File.exist? "#{@config.workspace}/Berksfile"
return true if File.exist? "#{@config.workspace}/Berksfile.lock"
false
end
def write_configs
@config.write_configs
write_knife_rb
write_berks_config unless @config.ssl_verify
end
#
# Upload to chef server
#
def upload
berks_install if berksfile?
berks_upload if berksfile?
knife_upload unless cookbook? || !chef_data?
end
private
#
# Returns default value if one isn't provided
#
# @param key [String] The key to check a value for
# @param default_value The default value to return if none provided
#
# @return Returns the value provided in @options[key] if provided,
# else returns default_value
#
def set_default(key, default_value)
return default_value unless @options.key? key.to_s
@options[key.to_s]
end
def url
"#{@config.server}/organizations/#{@options['org']}"
end
def write_knife_rb
FileUtils.mkdir_p File.dirname @config.knife_rb
File.open(@config.knife_rb, 'w') do |f|
f.puts "node_name '#{@config.user}'"
f.puts "client_key '#{@config.key_path}'"
f.puts "chef_server_url '#{url}'"
f.puts "chef_repo_path '#{@config.workspace}'"
f.puts "ssl_verify_mode #{@config.ssl_verify_mode}"
end
end
def write_berks_config
FileUtils.mkdir_p "#{Dir.home}/.berkshelf"
File.open("#{Dir.home}/.berkshelf/config.json", 'w') do |f|
f.puts '{"ssl":{"verify":false}}'
end
end
#
# Command to gather necessary cookbooks
#
def berks_install
puts 'Retrieving cookbooks'
`berks install -b #{@config.workspace}/Berksfile`
fail 'Failed to retrieve cookbooks' unless process_last_status.success?
end
#
# Command to upload cookbook(s) with Berkshelf
#
def berks_upload
puts 'Running berks upload'
command = ['berks upload']
command << "#{cookbook.name}" unless recursive
command << "-b #{@config.workspace}/Berksfile"
command << '--no-freeze' unless freeze
puts `#{command.join(' ')}`
fail 'Failed to upload cookbook' unless process_last_status.success?
end
def chef_data?
!Dir.glob("#{@config.workspace}/{roles,environments,data_bags}").empty?
end
#
# Upload any roles, environments and data_bags
#
def knife_upload
puts 'Uploading roles, environments and data bags'
command = ['knife upload']
command << '.'
command << "-c #{@config.knife_rb}"
Dir.chdir(@config.workspace)
puts `#{command.join(' ')}`
fail 'knife upload failed' unless process_last_status.success?
end
def process_last_status
$?
end
def cookbook
@metadata ||= begin
metadata = Chef::Cookbook::Metadata.new
metadata.from_file("#{@config.workspace}/metadata.rb")
metadata
end
end
end
end
| 25.083916 | 77 | 0.629217 |
bfc15e640f505be9e56f0897dba35698793ccae7 | 38 | module Grocer
VERSION = '0.4.1'
end
| 9.5 | 19 | 0.657895 |
d5d8a09aaf1cf8e4e478d96c67808f6c80c868fc | 6,199 | class Patient < ApplicationRecord
include PgSearch
include CommonContent
attr_reader :object_pronoun,
:subject_pronoun,
:possessive_pronoun,
:vitals_show_header,
:vitals_by_date,
:age,
:full_name
scope :sorted, (-> { order(id: :asc) })
pg_search_scope :search,
against: %i[first_name last_name date_of_birth],
using: {
tsearch: {
prefix: true
}
}
self.per_page = 20
has_one :gallery, inverse_of: :patient
has_many :visits, inverse_of: :patient
has_many :vitals, dependent: :destroy
has_many :dissections, dependent: :destroy
has_many :genetic_tests, dependent: :destroy
has_many :heart_measurements, dependent: :destroy
has_many :family_members, dependent: :destroy
has_many :hospitalizations, dependent: :destroy
has_many :tests, dependent: :destroy
has_many :diagnoses, dependent: :destroy
has_many :medications, dependent: :destroy
has_many :procedures, dependent: :destroy
accepts_nested_attributes_for :gallery
accepts_nested_attributes_for :vitals
accepts_nested_attributes_for :diagnoses
accepts_nested_attributes_for :medications
accepts_nested_attributes_for :procedures
accepts_nested_attributes_for :hospitalizations
accepts_nested_attributes_for :tests
accepts_nested_attributes_for :family_members
validates :first_name,
presence: true,
format: {
with: /\A[a-zA-Z ']+\z/
}
validates :last_name,
presence: true,
format: {
with: /\A[a-zA-Z ']+\z/
}
validates :address_line_1,
presence: true
validates :city,
presence: true,
format: {
with: /\A[a-zA-Z ']+\z/
}
validates :state,
presence: true,
format: {
with: /\A[a-zA-Z ']+\z/
}
validates :postal_code,
presence: true,
numericality: true
validates :country,
presence: true,
format: {
with: /\A[a-zA-Z ']+\z/
}
validates :sex,
presence: true,
inclusion: %w[F M N]
validates :cause_of_death,
allow_nil: true,
allow_blank: true,
format: {
with: /\A[a-zA-Z ']+\z/
}
validates :email,
presence: true,
format: {
with: /.+@.+\..+/i
}
validates :phone_1,
presence: true
def self.perform_search(keyword)
if keyword.present?
Patient.search(keyword)
else
Patient.all
end.sorted
end
def exists_as_family_member
if FamilyMember.select{ |f| f.future_patient_data_hash["first_name"] =~ Regexp.new(first_name, 'i') && f.future_patient_data_hash["last_name"] =~ Regexp.new(last_name, 'i')}
output = FamilyMember.select{ |f| f.future_patient_data_hash["first_name"] =~ Regexp.new(first_name, 'i') && f.future_patient_data_hash["last_name"] =~ Regexp.new(last_name, 'i')}[0]
else
output = false
end
output
end
def age
if deceased?
'deceased'
else
age = (((Date.today - date_of_birth.to_date)/365).to_f).round()
"#{age} y/o"
end
end
def object_pronoun
if sex == 'F'
'her'
elsif sex == 'M'
'him'
else
'them'
end
end
def subject_pronoun
if sex == 'F'
'she'
elsif sex == 'M'
'he'
else
'they'
end
end
def possessive_pronoun
if sex == 'F'
'her'
elsif sex == 'M'
'his'
else
'their'
end
end
def full_name
"#{last_name}, #{first_name} #{middle_name unless middle_name.nil?}"
end
def vitals_show_header
['Date', 'Height (m)', 'Weight (kg)', 'BP (mmHG)', 'HR (bpm)', 'Temp (°C)']
end
def heart_measurements_by_date
heart_measurements.group_by { |m| m.created_at.to_date }
end
def vitals_by_date
vitals.group_by(&:visit_id)
end
def concerns
tests + procedures + diagnoses + hospitalizations + family_members + medications + dissections + vitals + genetic_tests
end
def letter_sort_by_topic
{
'genetic concerns': concerns.select { |c| c.topic.root.name == 'genetics' },
'cardiovascular concerns': concerns.select { |c| c.topic.root.name == 'cardiovascular' },
'morphology/physical findings': concerns.select { |c| c.topic.root.name == 'morphology/physical findings' },
'pulmonary concerns': concerns.select { |c| c.topic.root.name == 'pulmonary' },
'orthopedic concerns': concerns.select { |c| c.topic.root.name == 'orthopedic' },
'ophthalmologic concerns': concerns.select { |c| c.topic.root.name == 'ophthalmologic' },
'gynecologic/urologic concerns': concerns.select { |c| c.topic.root.name == 'gynecologic/urologic' },
'obstetric concerns': concerns.select { |c| c.topic.root.name == 'obstetric (pregnancy)' },
'neurologic concerns': concerns.select { |c| c.topic.root.name == 'neurologic' },
'gastrointestinal concerns': concerns.select { |c| c.topic.root.name == 'gastrointestinal' }
}
end
def sort_by_topic
conc = letter_sort_by_topic.merge(
'family history': concerns.select { |c| c.topic.root.name == 'family history' },
'medication': concerns.select { |c| c.topic.root.name == 'medication' },
'vitals': concerns.select { |c| c.topic.root.name == 'vitals' },
'heart_measurements': concerns.select { |c| c.topic.topic_type == 'heart_measurement'},
'lifestyle': concerns.select { |c| c.topic.root.name == 'lifestyle' },
)
conc.all? { |_k, v| v.empty? } ? 'No concerns noted yet' : conc
end
def sort_by_topic_then_type
all = sort_by_topic
if all.class == Hash
all.map { |k, v| [k, v.group_by(&:class)] }.to_h
k = all.transform_values { |arr|
arr.group_by(&:class).transform_keys { |key| key.name.pluralize }
}
result = k
elsif all.class == String
result = 'No concerns noted yet'
end
result
end
end
| 28.56682 | 188 | 0.598968 |
0333eb85fb6307cd049f7bdfbf7208c1dbbe2379 | 15,209 | # frozen_string_literal: true
require 'spec_helper'
describe SearchService do
let_it_be(:user) { create(:user) }
let_it_be(:accessible_group) { create(:group, :private) }
let_it_be(:inaccessible_group) { create(:group, :private) }
let_it_be(:group_member) { create(:group_member, group: accessible_group, user: user) }
let_it_be(:accessible_project) { create(:project, :repository, :private, name: 'accessible_project') }
let_it_be(:note) { create(:note_on_issue, project: accessible_project) }
let_it_be(:inaccessible_project) { create(:project, :repository, :private, name: 'inaccessible_project') }
let(:snippet) { create(:snippet, author: user) }
let(:group_project) { create(:project, group: accessible_group, name: 'group_project') }
let(:public_project) { create(:project, :public, name: 'public_project') }
let(:per_page) { described_class::DEFAULT_PER_PAGE }
subject(:search_service) { described_class.new(user, search: search, scope: scope, page: 1, per_page: per_page) }
before do
accessible_project.add_maintainer(user)
end
describe '#project' do
context 'when the project is accessible' do
it 'returns the project' do
project = described_class.new(user, project_id: accessible_project.id).project
expect(project).to eq accessible_project
end
it 'returns the project for guests' do
search_project = create :project
search_project.add_guest(user)
project = described_class.new(user, project_id: search_project.id).project
expect(project).to eq search_project
end
end
context 'when the project is not accessible' do
it 'returns nil' do
project = described_class.new(user, project_id: inaccessible_project.id).project
expect(project).to be_nil
end
end
context 'when there is no project_id' do
it 'returns nil' do
project = described_class.new(user).project
expect(project).to be_nil
end
end
end
describe '#group' do
context 'when the group is accessible' do
it 'returns the group' do
group = described_class.new(user, group_id: accessible_group.id).group
expect(group).to eq accessible_group
end
end
context 'when the group is not accessible' do
it 'returns nil' do
group = described_class.new(user, group_id: inaccessible_group.id).group
expect(group).to be_nil
end
end
context 'when there is no group_id' do
it 'returns nil' do
group = described_class.new(user).group
expect(group).to be_nil
end
end
end
describe '#show_snippets?' do
context 'when :snippets is \'true\'' do
it 'returns true' do
show_snippets = described_class.new(user, snippets: 'true').show_snippets?
expect(show_snippets).to be_truthy
end
end
context 'when :snippets is not \'true\'' do
it 'returns false' do
show_snippets = described_class.new(user, snippets: 'tru').show_snippets?
expect(show_snippets).to be_falsey
end
end
context 'when :snippets is missing' do
it 'returns false' do
show_snippets = described_class.new(user).show_snippets?
expect(show_snippets).to be_falsey
end
end
end
describe '#scope' do
context 'with accessible project_id' do
context 'and allowed scope' do
it 'returns the specified scope' do
scope = described_class.new(user, project_id: accessible_project.id, scope: 'notes').scope
expect(scope).to eq 'notes'
end
end
context 'and disallowed scope' do
it 'returns the default scope' do
scope = described_class.new(user, project_id: accessible_project.id, scope: 'projects').scope
expect(scope).to eq 'blobs'
end
end
context 'and no scope' do
it 'returns the default scope' do
scope = described_class.new(user, project_id: accessible_project.id).scope
expect(scope).to eq 'blobs'
end
end
end
context 'with \'true\' snippets' do
context 'and allowed scope' do
it 'returns the specified scope' do
scope = described_class.new(user, snippets: 'true', scope: 'snippet_titles').scope
expect(scope).to eq 'snippet_titles'
end
end
context 'and disallowed scope' do
it 'returns the default scope' do
scope = described_class.new(user, snippets: 'true', scope: 'projects').scope
expect(scope).to eq 'snippet_titles'
end
end
context 'and no scope' do
it 'returns the default scope' do
scope = described_class.new(user, snippets: 'true').scope
expect(scope).to eq 'snippet_titles'
end
end
end
context 'with no project_id, no snippets' do
context 'and allowed scope' do
it 'returns the specified scope' do
scope = described_class.new(user, scope: 'issues').scope
expect(scope).to eq 'issues'
end
end
context 'and disallowed scope' do
it 'returns the default scope' do
scope = described_class.new(user, scope: 'blobs').scope
expect(scope).to eq 'projects'
end
end
context 'and no scope' do
it 'returns the default scope' do
scope = described_class.new(user).scope
expect(scope).to eq 'projects'
end
end
end
end
describe '#search_results' do
context 'with accessible project_id' do
it 'returns an instance of Gitlab::ProjectSearchResults' do
search_results = described_class.new(
user,
project_id: accessible_project.id,
scope: 'notes',
search: note.note).search_results
expect(search_results).to be_a Gitlab::ProjectSearchResults
end
end
context 'with accessible project_id and \'true\' snippets' do
it 'returns an instance of Gitlab::ProjectSearchResults' do
search_results = described_class.new(
user,
project_id: accessible_project.id,
snippets: 'true',
scope: 'notes',
search: note.note).search_results
expect(search_results).to be_a Gitlab::ProjectSearchResults
end
end
context 'with \'true\' snippets' do
it 'returns an instance of Gitlab::SnippetSearchResults' do
search_results = described_class.new(
user,
snippets: 'true',
search: snippet.title).search_results
expect(search_results).to be_a Gitlab::SnippetSearchResults
end
end
context 'with no project_id and no snippets' do
it 'returns an instance of Gitlab::SearchResults' do
search_results = described_class.new(
user,
search: public_project.name).search_results
expect(search_results).to be_a Gitlab::SearchResults
end
end
end
describe '#search_objects' do
context 'handling per_page param' do
let(:search) { '' }
let(:scope) { nil }
context 'when nil' do
let(:per_page) { nil }
it "defaults to #{described_class::DEFAULT_PER_PAGE}" do
expect_any_instance_of(Gitlab::SearchResults)
.to receive(:objects)
.with(anything, hash_including(per_page: described_class::DEFAULT_PER_PAGE))
.and_call_original
subject.search_objects
end
end
context 'when empty string' do
let(:per_page) { '' }
it "defaults to #{described_class::DEFAULT_PER_PAGE}" do
expect_any_instance_of(Gitlab::SearchResults)
.to receive(:objects)
.with(anything, hash_including(per_page: described_class::DEFAULT_PER_PAGE))
.and_call_original
subject.search_objects
end
end
context 'when negative' do
let(:per_page) { '-1' }
it "defaults to #{described_class::DEFAULT_PER_PAGE}" do
expect_any_instance_of(Gitlab::SearchResults)
.to receive(:objects)
.with(anything, hash_including(per_page: described_class::DEFAULT_PER_PAGE))
.and_call_original
subject.search_objects
end
end
context 'when present' do
let(:per_page) { '50' }
it "converts to integer and passes to search results" do
expect_any_instance_of(Gitlab::SearchResults)
.to receive(:objects)
.with(anything, hash_including(per_page: 50))
.and_call_original
subject.search_objects
end
end
context "when greater than #{described_class::MAX_PER_PAGE}" do
let(:per_page) { described_class::MAX_PER_PAGE + 1 }
it "passes #{described_class::MAX_PER_PAGE}" do
expect_any_instance_of(Gitlab::SearchResults)
.to receive(:objects)
.with(anything, hash_including(per_page: described_class::MAX_PER_PAGE))
.and_call_original
subject.search_objects
end
end
end
context 'with accessible project_id' do
it 'returns objects in the project' do
search_objects = described_class.new(
user,
project_id: accessible_project.id,
scope: 'notes',
search: note.note).search_objects
expect(search_objects.first).to eq note
end
end
context 'with accessible project_id and \'true\' snippets' do
it 'returns objects in the project' do
search_objects = described_class.new(
user,
project_id: accessible_project.id,
snippets: 'true',
scope: 'notes',
search: note.note).search_objects
expect(search_objects.first).to eq note
end
end
context 'with \'true\' snippets' do
it 'returns objects in snippets' do
search_objects = described_class.new(
user,
snippets: 'true',
search: snippet.title).search_objects
expect(search_objects.first).to eq snippet
end
end
context 'with accessible group_id' do
it 'returns objects in the group' do
search_objects = described_class.new(
user,
group_id: accessible_group.id,
search: group_project.name).search_objects
expect(search_objects.first).to eq group_project
end
end
context 'with no project_id, group_id or snippets' do
it 'returns objects in global' do
search_objects = described_class.new(
user,
search: public_project.name).search_objects
expect(search_objects.first).to eq public_project
end
end
context 'redacting search results' do
let(:search) { 'anything' }
subject(:result) { search_service.search_objects }
def found_blob(project)
Gitlab::Search::FoundBlob.new(project: project)
end
def found_wiki_page(project)
Gitlab::Search::FoundWikiPage.new(found_blob(project))
end
before do
expect(search_service)
.to receive(:search_results)
.and_return(double('search results', objects: unredacted_results))
end
def ar_relation(klass, *objects)
klass.id_in(objects.map(&:id))
end
def kaminari_array(*objects)
Kaminari.paginate_array(objects).page(1).per(20)
end
context 'issues' do
let(:readable) { create(:issue, project: accessible_project) }
let(:unreadable) { create(:issue, project: inaccessible_project) }
let(:unredacted_results) { ar_relation(Issue, readable, unreadable) }
let(:scope) { 'issues' }
it 'redacts the inaccessible issue' do
expect(result).to contain_exactly(readable)
end
end
context 'notes' do
let(:readable) { create(:note_on_commit, project: accessible_project) }
let(:unreadable) { create(:note_on_commit, project: inaccessible_project) }
let(:unredacted_results) { ar_relation(Note, readable, unreadable) }
let(:scope) { 'notes' }
it 'redacts the inaccessible note' do
expect(result).to contain_exactly(readable)
end
end
context 'merge_requests' do
let(:readable) { create(:merge_request, source_project: accessible_project, author: user) }
let(:unreadable) { create(:merge_request, source_project: inaccessible_project) }
let(:unredacted_results) { ar_relation(MergeRequest, readable, unreadable) }
let(:scope) { 'merge_requests' }
it 'redacts the inaccessible merge request' do
expect(result).to contain_exactly(readable)
end
end
context 'project repository blobs' do
let(:readable) { found_blob(accessible_project) }
let(:unreadable) { found_blob(inaccessible_project) }
let(:unredacted_results) { kaminari_array(readable, unreadable) }
let(:scope) { 'blobs' }
it 'redacts the inaccessible blob' do
expect(result).to contain_exactly(readable)
end
end
context 'project wiki blobs' do
let(:readable) { found_wiki_page(accessible_project) }
let(:unreadable) { found_wiki_page(inaccessible_project) }
let(:unredacted_results) { kaminari_array(readable, unreadable) }
let(:scope) { 'wiki_blobs' }
it 'redacts the inaccessible blob' do
expect(result).to contain_exactly(readable)
end
end
context 'project snippets' do
let(:readable) { create(:project_snippet, project: accessible_project) }
let(:unreadable) { create(:project_snippet, project: inaccessible_project) }
let(:unredacted_results) { ar_relation(ProjectSnippet, readable, unreadable) }
let(:scope) { 'snippet_titles' }
it 'redacts the inaccessible snippet' do
expect(result).to contain_exactly(readable)
end
end
context 'personal snippets' do
let(:readable) { create(:personal_snippet, :private, author: user) }
let(:unreadable) { create(:personal_snippet, :private) }
let(:unredacted_results) { ar_relation(PersonalSnippet, readable, unreadable) }
let(:scope) { 'snippet_titles' }
it 'redacts the inaccessible snippet' do
expect(result).to contain_exactly(readable)
end
end
context 'commits' do
let(:readable) { accessible_project.commit }
let(:unreadable) { inaccessible_project.commit }
let(:unredacted_results) { kaminari_array(readable, unreadable) }
let(:scope) { 'commits' }
it 'redacts the inaccessible commit' do
expect(result).to contain_exactly(readable)
end
end
context 'users' do
let(:other_user) { create(:user) }
let(:unredacted_results) { ar_relation(User, user, other_user) }
let(:scope) { 'users' }
it 'passes the users through' do
# Users are always visible to everyone
expect(result).to contain_exactly(user, other_user)
end
end
end
end
end
| 30.418 | 115 | 0.637978 |
033398187b6952b27578dac88f3b45ec3d5be2cf | 1,075 | require File.expand_path("../../Abstract/abstract-php-extension", __FILE__)
class Php70Mcrypt < AbstractPhp70Extension
init
desc "An interface to the mcrypt library"
homepage "http://php.net/manual/en/book.mcrypt.php"
bottle do
revision 9
sha256 "0582320a1560683fd2a353cc3d1f7ad3fad157200fe7ec49e4b1812fc011c30f" => :el_capitan
sha256 "545fecd9f01f17f62a74f5d354b0b2bfc710c83537c23253425e839b81f11052" => :yosemite
sha256 "c65cdd222a03541a63276e251a0408028aa4321941a11a72c3fba95b30ba3072" => :mavericks
end
url PHP_SRC_TARBALL
sha256 PHP_CHECKSUM[:sha256]
version PHP_VERSION
depends_on "mcrypt"
def install
Dir.chdir "ext/mcrypt"
ENV.universal_binary if build.universal?
safe_phpize
system "./configure", "--prefix=#{prefix}",
phpconfig,
"--disable-dependency-tracking",
"--with-mcrypt=#{Formula["mcrypt"].opt_prefix}"
system "make"
prefix.install "modules/mcrypt.so"
write_config_file if build.with? "config-file"
end
end
| 29.861111 | 92 | 0.701395 |
28f7ccf9757879fdbce9daba136fbdf6c726ed5c | 120 | # frozen_string_literal: true
class VhaProgramOffice < Organization
def can_receive_task?(_task)
false
end
end
| 15 | 37 | 0.775 |
87d752f2854689221e90960467e5f7908c11a399 | 1,407 | # -*- encoding: utf-8 -*-
# stub: public_suffix 4.0.6 ruby lib
Gem::Specification.new do |s|
s.name = "public_suffix".freeze
s.version = "4.0.6"
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
s.metadata = { "bug_tracker_uri" => "https://github.com/weppos/publicsuffix-ruby/issues", "changelog_uri" => "https://github.com/weppos/publicsuffix-ruby/blob/master/CHANGELOG.md", "documentation_uri" => "https://rubydoc.info/gems/public_suffix/4.0.6", "homepage_uri" => "https://simonecarletti.com/code/publicsuffix-ruby", "source_code_uri" => "https://github.com/weppos/publicsuffix-ruby/tree/v4.0.6" } if s.respond_to? :metadata=
s.require_paths = ["lib".freeze]
s.authors = ["Simone Carletti".freeze]
s.date = "2020-09-02"
s.description = "PublicSuffix can parse and decompose a domain name into top level domain, domain and subdomains.".freeze
s.email = ["[email protected]".freeze]
s.extra_rdoc_files = ["LICENSE.txt".freeze]
s.files = ["LICENSE.txt".freeze]
s.homepage = "https://simonecarletti.com/code/publicsuffix-ruby".freeze
s.licenses = ["MIT".freeze]
s.required_ruby_version = Gem::Requirement.new(">= 2.3".freeze)
s.rubygems_version = "3.3.3".freeze
s.summary = "Domain name parser based on the Public Suffix List.".freeze
s.installed_by_version = "3.3.3" if s.respond_to? :installed_by_version
end
| 56.28 | 434 | 0.721393 |
336c53dec948af92a66f31d52c13e013416d07e4 | 4,646 | require File.join(File.dirname(__FILE__), 'test_helper')
module Resque::Durable
class DurableTest < Minitest::Test
describe 'Durable queue' do
before do
QueueAudit.delete_all
GUID.stubs(:generate).returns('abc/1/12345')
Resque.expects(:enqueue).with(Resque::Durable::MailQueueJob, :foo, :bar, 'abc/1/12345')
MailQueueJob.enqueue(:foo, :bar)
end
describe 'enqueue' do
it 'creates an audit' do
audit = QueueAudit.find_by_enqueued_id('abc/1/12345')
assert_equal 'abc/1/12345', audit.enqueued_id
end
end
describe 'enqueue failure' do
before do
QueueAudit.delete_all
Resque.expects(:enqueue).raises(ArgumentError.new)
end
it 'raises an error by default' do
assert_raises(ArgumentError) do
MailQueueJob.enqueue(:ka, :boom)
end
end
it 'has overridable exception handling' do
class NewMailQueueJob < MailQueueJob
def self.enqueue_failed(exception, args)
@called = [exception.class, args]
end
end
NewMailQueueJob.enqueue(:ka, :boom)
assert_equal [ArgumentError, [:ka, :boom, "abc/1/12345"]], NewMailQueueJob.instance_variable_get(:@called)
end
it 'creates an audit' do
assert_raises(ArgumentError) do
MailQueueJob.enqueue(:ka, :boom)
end
audit = QueueAudit.find_by_enqueued_id('abc/1/12345')
assert_equal 'abc/1/12345', audit.enqueued_id
end
end
describe 'a missing audit' do
it 'is reported with an exception' do
audit = QueueAudit.find_by_enqueued_id('abc/1/12345')
audit.destroy
assert_raises(ArgumentError) do
MailQueueJob.around_perform_manage_audit('hello', "foo", 'abc/1/12345') {}
end
end
end
describe 'around perform' do
it 'completes the audit' do
audit = QueueAudit.find_by_enqueued_id('abc/1/12345')
assert !audit.complete?
MailQueueJob.around_perform_manage_audit('hello', "foo", 'abc/1/12345') {}
audit = QueueAudit.find_by_enqueued_id('abc/1/12345')
assert audit.complete?
end
it 'should not complete on failure' do
audit = QueueAudit.find_by_enqueued_id('abc/1/12345')
assert !audit.complete?
MailQueueJob.around_perform_manage_audit('hello', "foo", 'abc/1/12345') { raise } rescue nil
audit.reload
assert !audit.complete?
end
it 'does not perform when the audit is already complete' do
audit = QueueAudit.find_by_enqueued_id('abc/1/12345')
assert !audit.complete?
MailQueueJob.around_perform_manage_audit('hello', "foo", 'abc/1/12345') {}
assert audit.reload.complete?
MailQueueJob.around_perform_manage_audit('hello', "foo", 'abc/1/12345') do
assert false, 'Should not have been executed'
end
end
end
describe 'when requeue_immediately! requested' do
before do
MailQueueJob.requeue_immediately!
end
it 're_enqueue_immediately? should return true' do
assert MailQueueJob.requeue_immediately
end
it 'should call audit.re_enqueue_immediately! and set enqueue_count to 1' do
audit = QueueAudit.find_by_enqueued_id('abc/1/12345')
MailQueueJob.around_perform_manage_audit('hello', "foo", 'abc/1/12345') {}
audit.reload
assert_equal 1, audit.enqueue_count
end
it 'should not complete' do
audit = QueueAudit.find_by_enqueued_id('abc/1/12345')
assert !audit.complete?
MailQueueJob.around_perform_manage_audit('hello', "foo", 'abc/1/12345') {}
audit.reload
assert !audit.complete?
end
it 'after the job finishes, requeue_immediately should set to false' do
MailQueueJob.around_perform_manage_audit('hello', "foo", 'abc/1/12345') {}
assert !MailQueueJob.requeue_immediately
end
end
end
describe 'background heartbeating' do
before do
QueueAudit.delete_all
Resque.inline = true
end
after do
Resque.inline = false
end
it 'heartbeats continously in the background' do
time_travel = Time.now + 10.years
BackgroundHeartbeatTestJob.enqueue(time_travel)
assert_operator QueueAudit.first.timeout_at, :>, time_travel
end
end
end
end
| 29.782051 | 116 | 0.618166 |
bfc9bbaa08405be49f5af00c594eba457bf9160b | 304 | module Keras::Wrappers
extend self
pyfrom 'keras.layers', import: 'TimeDistributed'
pyfrom 'keras.layers', import: 'Bidirectional'
def time_distributed(layer, **args)
TimeDistributed.new(layer, **args)
end
def bidirectional(layer, **args)
Bidirectional.new(layer, **args)
end
end
| 21.714286 | 50 | 0.713816 |
115120e00d3fd8546e23464f0de576da0b5a3ba8 | 324 | module Catherine
module Admin
class ResourceController < Catherine::Admin::BaseController
include ResourceHelper
before_action :require_user
before_action :require_site_admin
before_action :configure_resource_controller
def resource_url(obj)
url_for([:admin, obj])
end
end
end
end
| 18 | 62 | 0.743827 |
21f97431aa31bb66456d7a09e2ac11eb32ef8168 | 78 | require 'unidom/promotion/engine'
module Unidom
module Promotion
end
end
| 11.142857 | 33 | 0.782051 |
5dbe2c4c0a592a18e2feec04eab541683a86f6be | 1,228 | require 'spec_helper'
# Anodator::Validator::BlankValidator
require 'anodator/validator/blank_validator'
RSpec.describe Anodator::Validator::BlankValidator, '.new' do
context 'with no parameters' do
it 'should raise ArgumentError' do
expect do
Anodator::Validator::BlankValidator.new
end.to raise_error ArgumentError
end
end
context 'with only target parameter' do
it 'should not raise error' do
expect do
Anodator::Validator::BlankValidator.new('1')
end.not_to raise_error
end
end
end
RSpec.describe Anodator::Validator::BlankValidator, '#valid?' do
let(:validator) { Anodator::Validator::BlankValidator.new('1') }
context 'target value is blank' do
before(:each) do
Anodator::Validator::Base.values = { '1' => '' }
end
it { expect(validator).to be_valid }
end
context "target value is '1'" do
before(:each) do
Anodator::Validator::Base.values = { '1' => '1' }
end
it { expect(validator).not_to be_valid }
end
context "target value is 'some message'" do
before(:each) do
Anodator::Validator::Base.values = { '1' => 'some message' }
end
it { expect(validator).not_to be_valid }
end
end
| 24.078431 | 66 | 0.665309 |
035ee8508b5eb70334a5c56fe166c7d9387026ad | 415 | class TmuxinatorFzfStart < Formula
desc "A macOS tool for starting then waiting for Docker.app to be ready"
homepage "https://github.com/camspiers/tmuxinator-fzf-start"
url "https://github.com/camspiers/tmuxinator-fzf-start/archive/1.0.2.tar.gz"
sha256 "f214277ba5b0cfdfa1101f2dbe21a1cf8b1c7b41ef3ac5ee2ada00278992180b"
bottle :unneeded
def install
bin.install "tmuxinator-fzf-start.sh"
end
end
| 31.923077 | 78 | 0.783133 |
1c5bf31e9137cdbf71656531e9de63e779254479 | 875 | #
# Cookbook:: vm
# Test:: mysql_test
#
# Author: Tom Price
# Date: Dec 2018
#
# Inspec tests for recipe vm::mysql
#
# The Inspec reference, with examples and extensive documentation, can be
# found at http://inspec.io/docs/reference/resources/
#
describe package('mysql-server') do
it { should be_installed }
end
describe package('mysql-client') do
it { should be_installed }
end
describe mysql_conf('/etc/mysql/mysql.conf.d/mysqld.cnf').params('mysqld') do
its('user') { should eq 'mysql' }
its('port') { should eq '3306' }
its('socket') { should eq '/var/run/mysqld/mysqld.sock' }
its('bind-address') { should eq '127.0.0.1' }
end
describe port(3306) do
it { should be_listening }
its('protocols') { should include('tcp') }
end
describe command("sudo mysql -h localhost -u root -s -e 'show databases;'") do
its('stdout') { should match(/mysql/) }
end | 25 | 78 | 0.685714 |
01ffe3f4da93d2648cb1fde465d5be44f3984be3 | 179 | # frozen_string_literal: true
class AddCommencementStatusToTrainee < ActiveRecord::Migration[6.1]
def change
add_column :trainees, :commencement_status, :integer
end
end
| 22.375 | 67 | 0.793296 |
91103e1101cffc872380fc872e425ee78da5e9d9 | 1,072 | # frozen_string_literal: true
require 'syslog/logger'
require 'edools/version'
require 'edools/utils'
require 'edools/api_request'
require 'edools/pagination_proxy'
require 'edools/school'
require 'edools/course'
require 'edools/school_product'
require 'edools/invitation'
require 'edools/user'
require 'edools/session'
require 'edools/media'
require 'edools/enrollment'
require 'edools/lesson_progress'
require 'edools/exam_answer'
module Edools
class RequestFailed < StandardError
end
class BadRequest < StandardError
end
class NotFound < StandardError
end
class Unauthorized < StandardError
end
class AuthenticationException < StandardError
end
class RequestWithErrors < StandardError
attr_accessor :errors
def initialize(errors)
@errors = errors
end
end
class << self
attr_accessor :api_token, :school_api_url
attr_reader :api_version
end
@api_version = 'v1'
def self.base_url
self.school_api_url || 'https://core.myedools.com'
end
def self.logger
Syslog::Logger.new 'Edools'
end
end
| 18.482759 | 54 | 0.751866 |
11756c6fc3c53a6cd5c8293ff0f47af648a6ae66 | 547 | # frozen_string_literal: true
class ApplicationController < ActionController::Base
# helper_method allows you to call the method on the views
# helper_method :check_and_see_if_someone_is_logged_in?
# before_action :authorized_to_see_page
# def set_current_student
# @current_user = User.find_by(id: session[:user_id])
# end
# def check_and_see_if_someone_is_logged_in?
# !set_current_student.nil?
# end
# def authorized_to_see_page
# redirect_to login_path unless check_and_see_if_someone_is_logged_in?
# end
end
| 27.35 | 74 | 0.780622 |
1dde2f7f1352e95c83c23eed18da5d0602230c54 | 1,057 | #
# Cookbook Name:: serverdensity
# Provider:: plugin
def whyrun_supported?
true
end
# actions
action :enable do
config.run_action :create
plugin.run_action :create
new_resource.updated_by_last_action(
config.updated_by_last_action? || plugin.updated_by_last_action?
)
end
action :disable do
config.run_action :delete
plugin.run_action :delete
new_resource.updated_by_last_action(
config.updated_by_last_action? || plugin.updated_by_last_action?
)
end
# methods
def config
@config ||= begin
file = Chef::Resource::Template.new ::File.join(
'/etc/sd-agent/conf.d', "plugin-#{new_resource.name}.cfg"
), run_context
file.cookbook 'serverdensity'
file.source 'plugin.cfg.erb'
file.variables :name => new_resource.name,
:options => new_resource.config
file
end
end
def plugin
@plugin ||= begin
file = Chef::Resource::Link.new ::File.join(
node.serverdensity.plugin_dir, new_resource.name + '.py'
), run_context
file.to new_resource.path
file
end
end
| 20.72549 | 68 | 0.701987 |
e894c00eaf8cb3c74bc694ce70416901e50d4d0b | 1,878 | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'spec_helper'
describe "search/results.html.haml" do
before(:each) do
@admin = Factory(:admin)
view.stubs(:current_user).returns(@admin)
view.stubs(:current_token).returns(nil)
end
it "should handle no results" do
assign(:results, [])
render
rendered.should have_tag("strong", :content => "No domains found")
end
it "should handle results within the pagination limit" do
1.upto(4) do |i|
zone = Domain.new
zone.id = i
zone.name = "zone-#{i}.com"
zone.save( :validate => false ).should be_true
end
assign(:results, Domain.search( 'zone', 1, @admin ))
render
rendered.should have_tag("table a", :content => "zone-1.com")
end
it "should handle results with pagination and scoping" do
1.upto(100) do |i|
zone = Domain.new
zone.id = i
zone.name = "domain-#{i}.com"
zone.save( :validate => false ).should be_true
end
assign(:results, Domain.search( 'domain', 1, @admin ))
render
rendered.should have_tag("table a", :content => "domain-1.com")
end
end
| 28.892308 | 74 | 0.687966 |
612e70ebaab197c3b4e26394887dcbaba4eea73d | 1,730 | class MyLinkedList
=begin
Initialize your data structure here.
=end
def initialize()
@array = []
end
=begin
Get the value of the index-th node in the linked list. If the index is invalid, return -1.
:type index: Integer
:rtype: Integer
=end
def get(index)
@array[index] || -1
end
=begin
Add a node of value val before the first element of the linked list. After the insertion, the new node will be the first node of the linked list.
:type val: Integer
:rtype: Void
=end
def add_at_head(val)
@array.unshift(val)
end
=begin
Append a node of value val to the last element of the linked list.
:type val: Integer
:rtype: Void
=end
def add_at_tail(val)
@array << val
end
=begin
Add a node of value val before the index-th node in the linked list. If index equals to the length of linked list, the node will be appended to the end of linked list. If index is greater than the length, the node will not be inserted.
:type index: Integer
:type val: Integer
:rtype: Void
=end
def add_at_index(index, val)
@array.insert(index, val)
end
=begin
Delete the index-th node in the linked list, if the index is valid.
:type index: Integer
:rtype: Void
=end
def delete_at_index(index)
@array.delete_at(index)
end
end
# Your MyLinkedList object will be instantiated and called as such:
# obj = MyLinkedList.new()
# param_1 = obj.get(index)
# obj.add_at_head(val)
# obj.add_at_tail(val)
# obj.add_at_index(index, val)
# obj.delete_at_index(index)
| 24.366197 | 241 | 0.621965 |
1114a1b1406b6696f413725a392511b1d8c01b6b | 1,319 | require 'minitest_helper'
module SlideHero
describe GroupedSlides do
describe "nesting slides" do
it "groups sections" do
grouped_slides = GroupedSlides.new do
slide "banana" do
point "High in potassium"
end
slide "apple" do
point "Good if you hate doctors"
end
end
assert_dom_match grouped_slides.compile, "<section>" +
"<section data-transition=\"default\">" +
"<h2>banana</h2>" +
"<p>High in potassium</p>" +
"</section>" +
"<section data-transition=\"default\">" +
"<h2>apple</h2>" +
"<p>Good if you hate doctors</p>" +
"</section>" +
"</section>"
end
it "takes all arguments slide handles" do
grouped_slides = GroupedSlides.new do
slide "Title", headline_size: :medium, transition: :zoom do
point "Made one"
end
end
grouped_slides.compile.must_include "transition"
grouped_slides.compile.must_include "h2"
end
end
it "can have defaults passed in" do
grouped_slides = GroupedSlides.new(headline_size: :medium) do
slide "Title"
end
grouped_slides.compile.must_include "<h2>Title</h2>"
end
end
end
| 27.479167 | 69 | 0.567096 |
617e1cecbd59cdfc91d2d19c4071215ba162a30e | 108 | $LOAD_PATH.unshift File.expand_path('../../lib', __FILE__)
require 'spider_pig'
require 'minitest/autorun'
| 21.6 | 58 | 0.75 |
6a1819ec0e6f156adf6c29dad5cb090586fe97b1 | 6,982 | require 'oauth'
module Withings
module Api
module OAuth
include ::OAuth
end
# Methods to ease the OAuth setup steps for using the Withings API.
#
# Specifically, this class provides methods for OAuth access token creation. The steps are:
#
# 1. Request request tokens - via {#create_request_token}
# 1. Redirect to authorization URL (this is handled outside of these methods, ie: by the webapp, etc.)
# 1. Request access tokens (for permanent access to Withings content) - via {#create_access_token}
#
# After successfully creating an {AccessToken}, you can use the methods provided by {ApiActions} to
# query data from Withings.
module OAuthActions
include OAuthBase
Defaults = Withings::Api::Defaults
# Issues the "request_token" oauth HTTP request to Withings.
#
# For the details of this process, see http://www.withings.com/en/api/oauthguide#access
#
# To receive the consumer credentials mentioned below, directions on registering your application with Withings is
# @ http://www.withings.com/en/api/oauthguide#registration
#
# @overload create_request_token(consumer_key, consumer_secret, callback_url)
# @param [String] consumer_key the consumer (application) key assigned by Withings
# @param [String] consumer_secret the consumer (application) secret assigned by Withings
# @param [String] callback_url the URL Withings should use upon successful authentication and authorization by
# the user
#
# @overload create_request_token(consumer_token, callback_url)
# @param [ConsumerToken] conumer_token the consumer (application) token assigned by Withings
# @param [String] callback_url
#
# @return [RequestTokenResponse]
#
# @todo cleanup the list of exceptions raised
#
# @raise [Timeout::Error] on connection, or read timeout
# @raise [SystemCallError] on low level system call errors (connection timeout, connection refused)
# @raise [ProtocolError] for HTTP 5XX error response codes
# @raise [OAuth::Unauthorized] for HTTP 4XX error reponse codes
# @raise [StandardError] for everything else
def create_request_token(consumer_token, *arguments)
_consumer_token, _consumer_secret, _callback_url = nil
if arguments.length == 1 && consumer_token.instance_of?(Withings::Api::ConsumerToken)
_consumer_token, _consumer_secret = consumer_token.to_a
elsif arguments.length == 2
_consumer_token = consumer_token
_consumer_secret = arguments.shift
else
raise(ArgumentError)
end
_callback_url = arguments.shift
# TODO: warn if the callback URL isn't HTTPS
consumer = create_consumer(_consumer_token, _consumer_secret)
oauth_request_token = consumer.get_request_token({:oauth_callback => _callback_url})
RequestTokenResponse.new oauth_request_token
end
# Issues the "access_token" oauth HTTP request to Withings.
#
# @note This step needs to happen AFTER successfully retrieving the request token (see {#create_request_token})
# and retrieving the callback from Withings signifying the user has authorized your applications access).
#
# @overload create_access_token(request_token, consumer_token, user_id)
# @param [RequestToken] request_token the request token from a previous call to {#create_request_token}}
# @param [ConsumerToken] consumer_token (see #create_request_token)
# @param [String] user_id the Withings userid (note: not currently required by Withings)
#
# @overload create_access_token(request_token_response, user_id)
# @param [RequestTokenResponse] request_token_response the result received from a previous call to {#create_request_token}
# @param [String] user_id the Withings userid (note: not currently required by Withings)
#
# @return [AccessTokenResponse]
def create_access_token(request_token, *arguments)
_consumer, _request_token, _user_id = nil
if request_token.instance_of?(RequestTokenResponse) && arguments.length == 1
_consumer = request_token.oauth_consumer
_request_token = request_token.oauth_request_token
_user_id = arguments.shift
elsif request_token.instance_of?(RequestToken) && arguments.length == 2
request_token.instance_of?(ConsumerToken)
_consumer = create_consumer(*arguments.shift.to_a)
_request_token = OAuth::RequestToken.new(_consumer, *request_token.to_a)
_user_id = arguments.shift
else
raise ArgumentError
end
oauth_access_token = _consumer.get_access_token(_request_token)
# test for unauthorized token, since oauth + withings doesn't turn this into an explicit
# error code / exception
if oauth_access_token.params.key?(:"unauthorized token")
raise StandardError, :"unauthorized token"
end
AccessTokenResponse.new oauth_access_token
end
end
# Encapsulates the results of a call to {#create_request_token}
class RequestTokenResponse
# @return [String] the OAuth request token key
def token
self.oauth_request_token.token
end
alias :key :token
# @return [String] the OAuth request token secret
def secret
self.oauth_request_token.secret
end
# @return [String] URL to redirect the user to to authorize the access to their data
def authorization_url
self.oauth_request_token.authorize_url
end
# @return [RequestToken] a RequestToken representation of
def request_token
RequestToken.new(self.key, self.secret)
end
# @private
attr_accessor :oauth_request_token
# @private
def initialize(oauth_request_token)
self.oauth_request_token = oauth_request_token
end
# @private
def oauth_consumer
self.oauth_request_token.consumer
end
end
# Encapsulates the response from a call to #create_access_token
class AccessTokenResponse
# @private
def initialize(oauth_access_token)
@oauth_access_token = oauth_access_token
end
# @return [String] The retrieved OAuth access token key
def token
@oauth_access_token.token
end
alias :key :token
# @return [String] The retrieved OAuth access token secret
def secret
@oauth_access_token.secret
end
# @return [String] the user_id for the user @ Withings
def user_id
@oauth_access_token.params["userid"]
end
# @return [AccessToken] an AccessToken representation of the returned key + secret pair
def access_token
AccessToken.new(self.key, self.secret)
end
end
end
end
| 37.537634 | 130 | 0.689487 |
3326c3258139c10f6bb731573b4a68f4fd1b9c8b | 38 | require 'hyrarchy'
Hyrarchy.activate!
| 12.666667 | 18 | 0.815789 |
e8d365304eeab4cf0a687dc852c8234ab5fab227 | 8,529 | =begin
#NSX-T Data Center Policy API
#VMware NSX-T Data Center Policy REST API
OpenAPI spec version: 3.1.0.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 2.4.17
=end
require 'date'
module NSXTPolicy
# This condition is used to match the HTTP protocol version of the HTTP request messages.
class LBHttpRequestVersionCondition
# A flag to indicate whether reverse the match result of this condition
attr_accessor :inverse
# Type of load balancer rule condition
attr_accessor :type
# HTTP version
attr_accessor :version
class EnumAttributeValidator
attr_reader :datatype
attr_reader :allowable_values
def initialize(datatype, allowable_values)
@allowable_values = allowable_values.map do |value|
case datatype.to_s
when /Integer/i
value.to_i
when /Float/i
value.to_f
else
value
end
end
end
def valid?(value)
!value || allowable_values.include?(value)
end
end
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'inverse' => :'inverse',
:'type' => :'type',
:'version' => :'version'
}
end
# Attribute type mapping.
def self.swagger_types
{
:'inverse' => :'BOOLEAN',
:'type' => :'String',
:'version' => :'String'
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
if attributes.has_key?(:'inverse')
self.inverse = attributes[:'inverse']
else
self.inverse = false
end
if attributes.has_key?(:'type')
self.type = attributes[:'type']
end
if attributes.has_key?(:'version')
self.version = attributes[:'version']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
if @type.nil?
invalid_properties.push('invalid value for "type", type cannot be nil.')
end
if @version.nil?
invalid_properties.push('invalid value for "version", version cannot be nil.')
end
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
return false if @type.nil?
type_validator = EnumAttributeValidator.new('String', ['LBHttpRequestUriCondition', 'LBHttpRequestHeaderCondition', 'LBHttpRequestMethodCondition', 'LBHttpRequestUriArgumentsCondition', 'LBHttpRequestVersionCondition', 'LBHttpRequestCookieCondition', 'LBHttpRequestBodyCondition', 'LBHttpResponseHeaderCondition', 'LBTcpHeaderCondition', 'LBIpHeaderCondition', 'LBVariableCondition', 'LBHttpSslCondition', 'LBSslSniCondition'])
return false unless type_validator.valid?(@type)
return false if @version.nil?
version_validator = EnumAttributeValidator.new('String', ['HTTP_VERSION_1_0', 'HTTP_VERSION_1_1'])
return false unless version_validator.valid?(@version)
true
end
# Custom attribute writer method checking allowed values (enum).
# @param [Object] type Object to be assigned
def type=(type)
validator = EnumAttributeValidator.new('String', ['LBHttpRequestUriCondition', 'LBHttpRequestHeaderCondition', 'LBHttpRequestMethodCondition', 'LBHttpRequestUriArgumentsCondition', 'LBHttpRequestVersionCondition', 'LBHttpRequestCookieCondition', 'LBHttpRequestBodyCondition', 'LBHttpResponseHeaderCondition', 'LBTcpHeaderCondition', 'LBIpHeaderCondition', 'LBVariableCondition', 'LBHttpSslCondition', 'LBSslSniCondition'])
unless validator.valid?(type)
fail ArgumentError, 'invalid value for "type", must be one of #{validator.allowable_values}.'
end
@type = type
end
# Custom attribute writer method checking allowed values (enum).
# @param [Object] version Object to be assigned
def version=(version)
validator = EnumAttributeValidator.new('String', ['HTTP_VERSION_1_0', 'HTTP_VERSION_1_1'])
unless validator.valid?(version)
fail ArgumentError, 'invalid value for "version", must be one of #{validator.allowable_values}.'
end
@version = version
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
inverse == o.inverse &&
type == o.type &&
version == o.version
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[inverse, type, version].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end # or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BOOLEAN
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
temp_model = NSXTPolicy.const_get(type).new
temp_model.build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 32.184906 | 433 | 0.642045 |
33ff3c3a54c3b02a1037ba399905ef50fa9e304c | 115 | FactoryBot.define do
factory :address do
address_line_1 { "Street" }
association :organisation
end
end
| 16.428571 | 31 | 0.721739 |
039dfca1616fa1486628c297e4a7ea843daee0c1 | 4,820 | require 'spec_helper'
describe ISO8601::Time do
it "should raise an error for any unknown pattern" do
expect { ISO8601::Time.new('') }.to raise_error(ISO8601::Errors::UnknownPattern)
expect { ISO8601::Time.new('T') }.to raise_error(ISO8601::Errors::UnknownPattern)
expect { ISO8601::Time.new('T10:3012+0400') }.to raise_error(ISO8601::Errors::UnknownPattern)
expect { ISO8601::Time.new('T10:30:12+0400') }.to raise_error(ISO8601::Errors::UnknownPattern)
expect { ISO8601::Time.new('T10:30:12+040') }.to raise_error(ISO8601::Errors::UnknownPattern)
end
it "should raise an error for a correct pattern but an invalid date" do
expect { ISO8601::Time.new('T25:00:00') }.to raise_error(ISO8601::Errors::RangeError)
expect { ISO8601::Time.new('T00:61:00') }.to raise_error(ISO8601::Errors::RangeError)
expect { ISO8601::Time.new('T00:00:61') }.to raise_error(ISO8601::Errors::RangeError)
end
it "should parse any allowed pattern" do
expect { ISO8601::Time.new('T10') }.to_not raise_error
expect { ISO8601::Time.new('T10-00:00') }.to_not raise_error
expect { ISO8601::Time.new('T10Z') }.to_not raise_error
expect { ISO8601::Time.new('T10:30') }.to_not raise_error
expect { ISO8601::Time.new('T10:30Z') }.to_not raise_error
expect { ISO8601::Time.new('T10:30:12') }.to_not raise_error
expect { ISO8601::Time.new('T10:30:12Z') }.to_not raise_error
expect { ISO8601::Time.new('T10:30:12+04') }.to_not raise_error
expect { ISO8601::Time.new('T10:30:12+04:00') }.to_not raise_error
expect { ISO8601::Time.new('T10:30:12-04:00') }.to_not raise_error
expect { ISO8601::Time.new('T103012+0400') }.to_not raise_error
expect { ISO8601::Time.new('T103012+04') }.to_not raise_error
expect { ISO8601::Time.new('T10:30:12-00:00') }.to_not raise_error
expect { ISO8601::Time.new('T16:26:10,5Z') }.to_not raise_error
expect { ISO8601::Time.new('T10+00:00') }.to_not raise_error
end
context 'reduced patterns' do
it "should parse correctly reduced times" do
reduced_time = ISO8601::Time.new('T101112Z')
expect(reduced_time.hour).to eq(10)
expect(reduced_time.minute).to eq(11)
expect(reduced_time.second).to eq(12)
end
end
it "should return each atomic value" do
t = ISO8601::Time.new('T12:02:01+04:00', ::Date.parse('2010-05-09'))
expect(t.hour).to eq(12)
expect(t.minute).to eq(2)
expect(t.second).to eq(1)
expect(t.zone).to eq('+04:00')
end
it "should keep the correct fraction when using commma separators" do
expect(ISO8601::Time.new('T16:26:10,5Z').second).to eq(10.5)
end
it "should respond to delegated casting methods" do
expect(ISO8601::Time.new('T10:09:08Z')).to respond_to(:to_s, :to_time, :to_date, :to_datetime)
end
describe '#+' do
it "should return the result of the addition of a number" do
expect((ISO8601::Time.new('T20:20:20Z') + 10).to_s).to eq('T20:20:30+00:00')
expect((ISO8601::Time.new('T20:20:20.5Z') + 10).to_s).to eq('T20:20:30.5+00:00')
expect((ISO8601::Time.new('T20:20:20+02:00') + 10.09).to_s).to eq('T20:20:30.1+02:00')
expect((ISO8601::Time.new('T20:20:20+02:00') + 10.1).to_s).to eq('T20:20:30.1+02:00')
expect((ISO8601::Time.new('T20:20:20+02:00') + 10).second).to eq(30)
expect((ISO8601::Time.new('T20:20:20.5Z') + 10).second).to eq(30.5)
expect((ISO8601::Time.new('T20:20:20+02:00') + 10.09).second).to eq(30.1)
end
end
describe '#-' do
it "should return the result of the subtraction of a number" do
expect((ISO8601::Time.new('T20:20:20+01:00') - 10).to_s).to eq('T20:20:10+01:00')
expect((ISO8601::Time.new('T20:20:20.11+02:00') - 10).to_s).to eq('T20:20:10.1+02:00')
end
end
describe '#to_a' do
it "should return an array of atoms" do
expect(ISO8601::Time.new('T19:29:39Z').to_a).to eq([19, 29, 39, '+00:00'])
end
end
describe '#atoms' do
it "should return an array of atoms" do
expect(ISO8601::Time.new('T19:29:39+04:00').atoms).to eq([19, 29, 39, '+04:00'])
expect(ISO8601::Time.new('T19:29:39Z').atoms).to eq([19, 29, 39, 'Z'])
expect(ISO8601::Time.new('T19:29:39').atoms).to eq([19, 29, 39])
expect(ISO8601::Time.new('T19:29').atoms).to eq([19, 29, 0.0])
expect(ISO8601::Time.new('T19:29Z').atoms).to eq([19, 29, 0.0, 'Z'])
expect(ISO8601::Time.new('T19Z').atoms).to eq([19, 0, 0.0, 'Z'])
end
end
describe '#hash' do
it "should return the time hash" do
subject = ISO8601::Time.new('T20:11:10Z')
expect(subject).to respond_to(:hash)
end
it "should return the same hash" do
subject = ISO8601::Time.new('T20:11:10Z')
contrast = ISO8601::Time.new('T20:11:10Z')
expect(subject.hash).to eq(contrast.hash)
end
end
end
| 43.423423 | 98 | 0.651867 |
39a9792e0193cc6639c91fe1d351b7e9a3774cf9 | 4,545 | # Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::DataFactory::Mgmt::V2018_06_01
module Models
#
# Square Service dataset.
#
class SquareObjectDataset < Dataset
include MsRestAzure
def initialize
@type = "SquareObject"
end
attr_accessor :type
# @return The table name. Type: string (or Expression with resultType
# string).
attr_accessor :table_name
#
# Mapper for SquareObjectDataset class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'SquareObject',
type: {
name: 'Composite',
class_name: 'SquareObjectDataset',
model_properties: {
additional_properties: {
client_side_validation: true,
required: false,
type: {
name: 'Dictionary',
value: {
client_side_validation: true,
required: false,
serialized_name: 'ObjectElementType',
type: {
name: 'Object'
}
}
}
},
description: {
client_side_validation: true,
required: false,
serialized_name: 'description',
type: {
name: 'String'
}
},
structure: {
client_side_validation: true,
required: false,
serialized_name: 'structure',
type: {
name: 'Object'
}
},
schema: {
client_side_validation: true,
required: false,
serialized_name: 'schema',
type: {
name: 'Object'
}
},
linked_service_name: {
client_side_validation: true,
required: true,
serialized_name: 'linkedServiceName',
default_value: {},
type: {
name: 'Composite',
class_name: 'LinkedServiceReference'
}
},
parameters: {
client_side_validation: true,
required: false,
serialized_name: 'parameters',
type: {
name: 'Dictionary',
value: {
client_side_validation: true,
required: false,
serialized_name: 'ParameterSpecificationElementType',
type: {
name: 'Composite',
class_name: 'ParameterSpecification'
}
}
}
},
annotations: {
client_side_validation: true,
required: false,
serialized_name: 'annotations',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'ObjectElementType',
type: {
name: 'Object'
}
}
}
},
folder: {
client_side_validation: true,
required: false,
serialized_name: 'folder',
type: {
name: 'Composite',
class_name: 'DatasetFolder'
}
},
type: {
client_side_validation: true,
required: true,
serialized_name: 'type',
type: {
name: 'String'
}
},
table_name: {
client_side_validation: true,
required: false,
serialized_name: 'typeProperties.tableName',
type: {
name: 'Object'
}
}
}
}
}
end
end
end
end
| 29.705882 | 75 | 0.413201 |
08f5eb178ca453ed6e877eee0e537b14e70918fe | 816 | require('rspec')
require('prime_shifting')
require('pry')
describe('#prime_shifting') do
it('return 2 if number user enter is 2') do
prime_finder = PrimeNumber.new()
expect(prime_finder.find_prime(2)).to(eq("2"))
end
it('return 3 if number user enter is 2,3') do
prime_finder = PrimeNumber.new()
expect(prime_finder.find_prime(3)).to(eq("2,3"))
end
it('return 9 if number user enter is 2,3,5,7') do
prime_finder = PrimeNumber.new()
expect(prime_finder.find_prime(9)).to(eq("2,3,5,7"))
end
it('return 100 if number user enter is 2,3,5,7,11,13,17,19,23,29,31,37,41,43,47,53,59,61,67,71,73,79,83,89,97') do
prime_finder = PrimeNumber.new()
expect(prime_finder.find_prime(100)).to(eq("2,3,5,7,11,13,17,19,23,29,31,37,41,43,47,53,59,61,67,71,73,79,83,89,97"))
end
end
| 30.222222 | 121 | 0.671569 |
2100e3c262dcdbcebc716f58af03b014ea8cdf92 | 1,893 | # frozen_string_literal: true
RSpec.shared_examples 'inserted middleware' do |modification|
context ".#{modification}" do
let(:processed_collection) { double('processed_collection') }
let(:collection) { double('collection') }
let(:params) { {} }
def env
@env ||= []
end
subject(:inspector) do
# It writes result of evaluation of previous middleware into
# `result_env` variable accessible in all examples.
result_env = env
lambda do |builder|
builder.use ->(env) { (result_env << env).flatten! }
end
end
subject(:fetch!) do
# rubocop:disable Style/MultilineBlockChain
Class.new(described_class) do
def fetch_middleware
::Middleware::Builder.new
end
def process_params(params)
[params, []]
end
end.tap do |klass|
klass.resource_class = double('resource_class', all: collection)
klass.send(modification, &test_probe)
klass.middleware(&inspector)
end.new(params).call
# rubocop:enable Style/MultilineBlockChain
end
context 'when block returns tuple' do
let(:processed_params) { {} }
let(:test_probe) { ->(_resource, _params) { [processed_collection, processed_params] } }
before { fetch! }
it 'modify resource scope and leave params as is' do
scope, parameters = env
expect(scope).to eq(processed_collection)
expect(parameters).to eq(processed_params)
end
end
context 'when block returns invalid value' do
let(:test_probe) { ->(_resource, _params) { processed_collection } }
it 'modify resource scope and leave params as is' do
expect do
fetch!
end.to raise_error { |e|
expect(e.message).to eq('After block must return tuple of scope and params')
}
end
end
end
end
| 29.123077 | 94 | 0.628104 |
d55651de6be1e82af2bb861f05d697c008c1e5a2 | 4,362 | require 'spec_helper'
require 'action_controller'
RSpec.describe OpenApiAnnotator::PathsBuilder do
describe "#build" do
subject { builder.build }
let(:builder) { described_class.new }
let(:routes) do
[
OpenApiAnnotator::Route.new(
http_verb: "GET",
path: "/api/v1/books",
controller_name: "api/v1/books",
action_name: "index",
),
OpenApiAnnotator::Route.new(
http_verb: "GET",
path: "/api/v1/books/{id}",
controller_name: "api/v1/books",
action_name: "show",
),
]
end
let(:some_path_item) { double(:path_item) }
before do
allow_any_instance_of(OpenApiAnnotator::RoutesFinder).to receive(:find_all).and_return(routes)
allow(builder).to receive(:build_path_item).and_return(some_path_item)
end
it "returns an hash which has a String as key and an OpenApi::Paths as value" do
is_expected.to eq OpenApi::Paths.new(
"/api/v1/books/{id}": some_path_item,
"/api/v1/books": some_path_item,
)
end
end
describe "#build_path_item" do
subject { described_class.new.send(:build_path_item, routes) }
before do
stub_const("Api::BaseController", Class.new(ActionController::Base))
stub_const("Api::V1::BooksController", Class.new(Api::BaseController))
stub_const("Book", Class.new)
config = double(:config)
allow(config).to receive(:application_controller_class).and_return(Api::BaseController)
allow(OpenApiAnnotator).to receive(:config).and_return(config)
allow(Api::V1::BooksController).to receive(:endpoint_hash).and_return(
{
index: OpenApiAnnotator::Endpoint.new([Book]),
show: OpenApiAnnotator::Endpoint.new(Book),
update: OpenApiAnnotator::Endpoint.new(Book),
}
)
end
context "when path has multiple operations" do
let(:routes) do
[
OpenApiAnnotator::Route.new(
http_verb: "GET",
path: "/api/v1/books/{id}",
controller_name: "api/v1/books",
action_name: "show",
),
OpenApiAnnotator::Route.new(
http_verb: "PATCH",
path: "/api/v1/books/{id}",
controller_name: "api/v1/books",
action_name: "update",
),
]
end
it "returns OpenApi::PathItem" do
is_expected.to eq OpenApi::PathItem.new(
"GET": OpenApi::Operation.new(
responses: OpenApi::Responses.new(
"200": OpenApi::Response.new(
description: "Returns a Book",
content: {
"application/json" => OpenApi::MediaType.new(
schema: OpenApi::Reference.new(ref: "#/components/schemas/Book"),
)
}
)
)
),
"PATCH": OpenApi::Operation.new(
responses: OpenApi::Responses.new(
"200": OpenApi::Response.new(
description: "Returns a Book",
content: {
"application/json" => OpenApi::MediaType.new(
schema: OpenApi::Reference.new(ref: "#/components/schemas/Book"),
)
}
)
)
)
)
end
end
context "when media type is array of model" do
let(:routes) do
[
OpenApiAnnotator::Route.new(
http_verb: "GET",
path: "/api/v1/books",
controller_name: "api/v1/books",
action_name: "index",
),
]
end
it "returns OpenApi::PathItem" do
is_expected.to eq OpenApi::PathItem.new(
get: OpenApi::Operation.new(
responses: OpenApi::Responses.new(
"200": OpenApi::Response.new(
description: "Returns an array of Book",
content: {
"application/json" => OpenApi::MediaType.new(
schema: OpenApi::Schema.new(
type: "array",
items: OpenApi::Reference.new(ref: "#/components/schemas/Book"),
)
)
}
)
)
)
)
end
end
end
end
| 30.503497 | 100 | 0.530261 |
f7f99dfac14555daae5ca269e3589e6fa7e0707f | 750 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe SidekiqUniqueJobs::OnConflict::Strategy, redis: :redis do
let(:strategy) { described_class.new(item) }
let(:unique_digest) { 'uniquejobs:56c68cab5038eb57959538866377560d' }
let(:item) do
{ 'unique_digest' => unique_digest, 'queue' => :customqueue }
end
describe '#replace?' do
subject { strategy.replace? }
it { is_expected.to eq(false) }
end
describe '#call' do
let(:call) { strategy.call }
it 'raises an error' do
expect { call }.to raise_error(NotImplementedError, 'needs to be implemented in child class')
end
end
describe '#replace?' do
subject { strategy.replace? }
it { is_expected.to eq(false) }
end
end
| 23.4375 | 99 | 0.678667 |
915e4d1c3fc5068059d1e0fd729acf94ee1cb772 | 1,757 | class Muparser < Formula
desc "C++ math expression parser library"
homepage "http://beltoforion.de/article.php?a=muparser"
url "https://github.com/beltoforion/muparser/archive/v2.2.6.1.tar.gz"
sha256 "d2562853d972b6ddb07af47ce8a1cdeeb8bb3fa9e8da308746de391db67897b3"
head "https://github.com/beltoforion/muparser.git"
bottle do
cellar :any
sha256 "c0feb51e0b10602b323d46f49d898ebb4cb36e00dcee42963d61b6c7ca27c23a" => :mojave
sha256 "611da2016012d77dbe1e5a9c85872cc8f8de23967b019ec039177b49fad2a0d1" => :high_sierra
sha256 "d5d3fd87e54d300578836ed61e066ef08b665050d7986e46ed6995eeee819088" => :sierra
end
def install
system "./configure", "--disable-debug",
"--disable-dependency-tracking",
"--prefix=#{prefix}"
system "make", "install"
end
test do
(testpath/"test.cpp").write <<~EOS
#include <iostream>
#include "muParser.h"
double MySqr(double a_fVal)
{
return a_fVal*a_fVal;
}
int main(int argc, char* argv[])
{
using namespace mu;
try
{
double fVal = 1;
Parser p;
p.DefineVar("a", &fVal);
p.DefineFun("MySqr", MySqr);
p.SetExpr("MySqr(a)*_pi+min(10,a)");
for (std::size_t a=0; a<100; ++a)
{
fVal = a; // Change value of variable a
std::cout << p.Eval() << std::endl;
}
}
catch (Parser::exception_type &e)
{
std::cout << e.GetMsg() << std::endl;
}
return 0;
}
EOS
system ENV.cxx, "-I#{include}", "-L#{lib}", "-lmuparser",
testpath/"test.cpp", "-o", testpath/"test"
system "./test"
end
end
| 28.803279 | 93 | 0.582242 |
d5827e61c86a9a90702a103ec36b6b35739c8ae1 | 5,101 | require 'rails_helper'
module RemarkGenerators
RSpec.describe FrequencyChecker do
before { create :bank_holiday }
context 'state benefit payments' do
let(:amount) { 123.45 }
let(:dates) { [Date.current, 1.month.ago, 2.months.ago] }
let(:state_benefit) { create :state_benefit }
let(:assessment) { state_benefit.gross_income_summary.assessment }
let(:payment1) { create :state_benefit_payment, state_benefit: state_benefit, amount: amount, payment_date: dates[0] }
let(:payment2) { create :state_benefit_payment, state_benefit: state_benefit, amount: amount, payment_date: dates[1] }
let(:payment3) { create :state_benefit_payment, state_benefit: state_benefit, amount: amount, payment_date: dates[2] }
let(:collection) { [payment1, payment2, payment3] }
context 'regular payments' do
let(:dates) { [Date.current, 1.month.ago, 2.months.ago] }
it 'does not update the remarks class' do
original_remarks = assessment.remarks.as_json
described_class.call(assessment, collection)
expect(assessment.reload.remarks.as_json).to eq original_remarks
end
end
context 'variation in dates' do
let(:dates) { [2.days.ago, 10.days.ago, 55.days.ago] }
it 'adds the remark' do
expect_any_instance_of(Remarks).to receive(:add).with(:state_benefit_payment, :unknown_frequency, collection.map(&:client_id))
described_class.call(assessment, collection)
end
it 'stores the changed the remarks class on the assessment' do
original_remarks = assessment.remarks.as_json
described_class.call(assessment, collection)
expect(assessment.reload.remarks.as_json).not_to eq original_remarks
end
end
end
context 'outgoings' do
let(:disposable_income_summary) { create :disposable_income_summary }
let(:assessment) { disposable_income_summary.assessment }
let(:amount) { 277.67 }
let(:collection) do
[
create(:legal_aid_outgoing, disposable_income_summary: disposable_income_summary, payment_date: dates[0], amount: amount),
create(:legal_aid_outgoing, disposable_income_summary: disposable_income_summary, payment_date: dates[1], amount: amount),
create(:legal_aid_outgoing, disposable_income_summary: disposable_income_summary, payment_date: dates[2], amount: amount)
]
end
context 'regular payments' do
let(:dates) { [Date.current, 1.month.ago, 2.months.ago] }
it 'does not update the remarks class' do
original_remarks = assessment.remarks.as_json
described_class.call(assessment, collection)
expect(assessment.reload.remarks.as_json).to eq original_remarks
end
end
context 'irregular dates' do
let(:dates) { [Date.current, 1.week.ago, 9.weeks.ago] }
it 'adds the remark' do
expect_any_instance_of(Remarks).to receive(:add).with(:outgoings_legal_aid, :unknown_frequency, collection.map(&:client_id))
described_class.call(assessment, collection)
end
it 'stores the changed the remarks class on the assessment' do
original_remarks = assessment.remarks.as_json
described_class.call(assessment, collection)
expect(assessment.reload.remarks.as_json).not_to eq original_remarks
end
context 'when childcare costs with an amount variation are declared' do
let(:collection) do
[
create(:childcare_outgoing, disposable_income_summary: disposable_income_summary, payment_date: dates[0], amount: amount),
create(:childcare_outgoing, disposable_income_summary: disposable_income_summary, payment_date: dates[1], amount: amount + 0.01),
create(:childcare_outgoing, disposable_income_summary: disposable_income_summary, payment_date: dates[2], amount: amount)
]
end
context 'if the childcare costs are allowed as an outgoing' do
before { disposable_income_summary.child_care_bank = 1 }
it 'adds the remark' do
expect_any_instance_of(Remarks).to receive(:add).with(:outgoings_childcare, :unknown_frequency, collection.map(&:client_id))
described_class.call(assessment, collection)
end
it 'stores the changed the remarks class on the assessment' do
original_remarks = assessment.remarks.as_json
described_class.call(assessment, collection)
expect(assessment.reload.remarks.as_json).not_to eq original_remarks
end
end
context 'if the childcare costs are not allowed as an outgoing' do
it 'does not update the remarks class' do
original_remarks = assessment.remarks.as_json
described_class.call(assessment, collection)
expect(assessment.reload.remarks.as_json).to eq original_remarks
end
end
end
end
end
end
end
| 44.745614 | 143 | 0.67575 |
91b0fb5280403fb90aa6ca9c8951638fd3037345 | 393 | connection = ActiveRecord::Base.connection
connection.create_table :taxable_records do |t|
t.string :currency_code
t.decimal :amount
t.decimal :gross_amount
t.decimal :tax_factor
end
class TaxableRecord < ActiveRecord::Base
end
TaxableRecord.create!(currency_code: "GBP", amount: 123.45, gross_amount: 141.09,
tax_factor: 0.142857143)
Object.send(:remove_const, :TaxableRecord)
| 23.117647 | 81 | 0.778626 |
0807c5ff2722997c48ac4f0a9d86ac75d7835d80 | 964 | YARD::Config.load_plugin("sinatra")
YARD::Tags::Library.define_tag("API Doc", :apidoc)
YARD::Tags::Library.define_tag("Description", :description)
YARD::Tags::Library.define_tag("Note", :note)
YARD::Tags::Library.define_tag("Endpoint path", :path)
YARD::Tags::Library.define_tag("Category", :category)
YARD::Tags::Library.define_tag("API example", :example, :with_name)
YARD::Tags::Library.define_tag("HTTP verb", :http)
YARD::Tags::Library.define_tag("Return status", :status, :with_name)
YARD::Tags::Library.define_tag("Required param", :required, :with_types_and_name)
YARD::Tags::Library.define_tag("Optional param", :optional, :with_types_and_name)
module ApiBrowser
class Parser
def self.parse(paths)
paths.each do |path|
YARD.parse path
end
# Only keep stuff with @path on it
YARD::Registry.all.reject { |r| r.tags(:apidoc).empty? }.
map { |t| Endpoint.new(t) }.sort_by { |e| e.path || '' }
end
end
end
| 37.076923 | 81 | 0.698133 |
38df709eb5e67b0fd8b983da1e62cf9908fc8635 | 2,133 | require "interaction/version"
require "interaction/params"
require "interaction/validation_helpers"
module Interaction
# Override Ruby's module inclusion hook to prepend base with #perform,
# extend base with a .perform method, and include Params for Virtus.
#
# @api private
def self.included(base)
base.class_eval do
prepend Perform
extend ClassMethods
include Params
end
end
module Perform
# Executes use case logic
#
# Use cases must implement this method. Assumes success if failure is not
# called.
#
# @since 0.0.1
# @api public
def perform
catch :halt do
super.tap do
success unless result_specified?
end
end
end
end
module ClassMethods
# Executes and returns the use case
#
# A use case object is instantiated with the supplied
# arguments, perform is called and the object is returned.
#
# @param args [*args] Arguments to initialize the use case with
#
# @return [Object] returns the use case object
#
# @since 0.0.1
# @api public
def perform(*args)
new(*args).tap do |use_case|
use_case.perform
end
end
end
# Indicates if the use case was successful
#
# @return [TrueClass, FalseClass]
#
# @since 0.0.1
# @api public
def success?
!!@success
end
# Indicates whether the use case failed
#
# @return [TrueClass, FalseClass]
#
# @since 0.0.1
# @api public
def failed?
!success?
end
private
# Mark the use case as successful.
#
# @return [TrueClass]
#
# @since 0.0.1
# @api public
def success
@success = true
end
# Mark the use case as failed.
#
# @since 0.0.1
# @api public
def failure
@success = false
end
# Mark the use case as failed and exits the use case.
#
# @since 0.0.1
# @api public
def failure!
failure
throw :halt
end
# Indicates whether the use case called success or failure
#
# @return [TrueClass, FalseClass]
#
# @api private
# @since 0.0.1
def result_specified?
defined?(@success)
end
end
| 18.876106 | 77 | 0.630567 |
1c8b59eb65c1a7d318c509def0f7d40cff63cb3a | 1,890 | class CreateUsers < ActiveRecord::Migration
def change
create_table :users do |t|
t.string :username, limit: 100
t.string :realname
t.string :email
t.string :hashed_password
t.string :location
t.string :gamertag
t.string :stylesheet_url
t.text :description
t.boolean :banned, null: false, default: false
t.boolean :admin, null: false, default: false
t.boolean :trusted, null: false, default: false
t.boolean :user_admin, null: false, default: false
t.boolean :moderator, null: false, default: false
t.boolean :notify_on_message, null: false, default: true
t.datetime :last_active
t.date :birthday
t.integer :posts_count, default: 0, null: false
t.references :inviter
t.string :msn
t.string :gtalk
t.string :aim
t.string :twitter
t.string :flickr
t.string :last_fm
t.string :website
t.string :openid_url
t.float :longitude
t.float :latitude
t.timestamps null: false
t.integer :available_invites, null: false, default: 0
t.string :facebook_uid
t.integer :participated_count, null: false, default: 0
t.integer :favorites_count, null: false, default: 0
t.integer :following_count, null: false, default: 0
t.string :time_zone
t.datetime :banned_until
t.string :mobile_stylesheet_url
t.string :theme
t.string :mobile_theme
t.string :instagram
t.string :persistence_token
t.integer :public_posts_count, null: false, default: 0
t.integer :hidden_count, null: false, default: 0
t.string :preferred_format
t.string :sony
t.integer :avatar_id
t.text :previous_usernames
t.string :nintendo
t.string :steam
t.string :battlenet
t.index :last_active
t.index :username
end
end
end
| 31.5 | 62 | 0.650265 |
184cde2c3d23db29736105a4fafbcbff9563baef | 2,879 | require 'bundler'
ENV['RACK_ENV'] = 'test'
Bundler.require(:default, 'test')
Bundler.require(:default, 'development')
require 'rack/test'
require 'rspec'
require "capybara/rspec"
require 'capybara/webkit'
Sinatra::Application.environment = :test
Capybara.app = Autochthon::Web
Capybara.javascript_driver = :webkit
Capybara.default_selector = :xpath
require "autochthon"
module RSpecMixin
include Rack::Test::Methods
def app
Autochthon::Web
end
def get_json(path, data = {})
get path, data, "Content-Type" => "application/json"
end
def post_json(path, data)
post path, data.to_json, "Content-Type" => "application/json"
end
def put_json(path, data)
put path, data.to_json, "Content-Type" => "application/json"
end
def delete_json(path)
delete path, {}, "Content-Type" => "application/json"
end
end
RSpec.configure do |c|
c.include RSpecMixin
c.before(:suite) do
Autochthon.backend = I18n.backend
end
c.before(:each) do
Autochthon.backend.instance_variable_set(:@translations, nil)
end
c.before(:each, with_backend: Autochthon::Backend::ActiveRecord) do
Autochthon.backend = Autochthon::Backend::ActiveRecord.new
unless I18n::Backend::ActiveRecord::Translation.table_exists?
Autochthon::Backend::ActiveRecord::Migration.new.change
end
end
c.before(:all, with_backend: Autochthon::Backend::ActiveRecord) do
ActiveRecord::Base.establish_connection(
adapter: "sqlite3",
database: "local",
dbfile: ":memory:"
)
end
c.after(:each, with_backend: Autochthon::Backend::ActiveRecord) do
I18n::Backend::ActiveRecord::Translation.delete_all
end
c.before(:each, with_backend: Autochthon::Backend::Redis) do
Autochthon.backend = Autochthon::Backend::Redis.new(db: ENV['TEST_REDIS_DB'] || 15)
end
c.after(:each, with_backend: Autochthon::Backend::Redis) do
Autochthon.backend.store.flushdb
end
c.before(:each, with_backend: Autochthon::Backend::Simple) do
Autochthon.backend = Autochthon::Backend::Simple.new
end
end
RSpec.shared_examples_for :fetching_all_translations do
describe "#all" do
subject { Autochthon.backend }
before do
subject.store_translations(:en, {foo: {a: 'bar'}})
subject.store_translations(:en, {baz: {b: 'bar'}})
subject.store_translations(:pl, {foo: 'bar'})
end
it 'returns all translations' do
expect(subject.all).to include(key: :"foo.a", value: "bar", locale: :en)
expect(subject.all).to include(key: :"baz.b", value: "bar", locale: :en)
expect(subject.all).to include(key: :foo, value: "bar", locale: :pl)
end
context 'passing in locales' do
it 'returns translations for the passed locales only' do
expect(subject.all([:pl])).to eq([key: :foo, value: "bar", locale: :pl])
end
end
end
end
| 25.936937 | 87 | 0.686002 |
7a82f2187372a69d695a6ddbd6f82b75608ae9aa | 1,820 | require "rails_helper"
RSpec.describe "managing responses" do
it "lets pcmos track responses" do
pcmo = FactoryGirl.create :pcmo
e1, e2, e3, e4 = 4.times.map { FactoryGirl.create(:response, country: pcmo.country).user.email }
login_as pcmo
visit responses_path
find("tr", text: e1).click_on "Received"
find("tr", text: e2).click_on "Cancel"
find("tr", text: e3).click_on "Reorder"
expect(page).not_to have_content e1
expect(page).not_to have_content e2
expect(page).not_to have_content e3
expect(page).to have_content e4
click_on "Archived"
expect(page).to have_content e1
expect(page).to have_content e2
expect(page).to have_content e3
expect(page).not_to have_content e4
click_on "All"
expect(page).to have_content e1
expect(page).to have_content e2
expect(page).to have_content e3
expect(page).to have_content e4
end
it "lets pcvs flag responses" do
sms = FactoryGirl.create :sms
pcv = sms.user
response = FactoryGirl.create :response, user: pcv
login_as pcv
visit timeline_path
click_on "Flag for follow-up"
expect(flash).to have_content "flagged for follow-up from your PCMO"
end
it "validates responses are present" do
pcmo = FactoryGirl.create :pcmo
pcv = FactoryGirl.create :pcv, country: pcmo.country
order = FactoryGirl.create :order, user: pcv
login_as pcmo
visit new_user_response_path pcv
click_on "Send Response"
expect(flash).to have_content "No response"
end
it "lets pcmos view responses" do
pcmo = FactoryGirl.create :pcmo
response = FactoryGirl.create :response, country: pcmo.country
login_as pcmo
visit response_path response
expect(page).to have_content response.supplies.first.name
end
end
| 27.575758 | 100 | 0.701648 |
3325f757fad464dc90dafcfdf3959b71935cb2c7 | 1,691 | require 'money'
require 'sugar-high/delegate'
require 'money/mongoid/core_ext'
require 'money/orm/generic'
require 'money/mongoid/macros'
require 'money/mongoid/monetizable'
require 'money/mongoid/monetize'
require 'money/orm/generic'
if ::Mongoid::VERSION > '3'
require "money/mongoid/3x/money"
else
require "money/mongoid/2x/money"
end
module Mongoid
module Money
class << self
attr_accessor :default_polymorphic_money
end
end
module Moneys
def self.macros
[:value, :price, :cost]
end
macros.each do |klass|
name = "#{klass}_class"
# define getter
define_singleton_method name do
var_name = :"@#{name}"
unless instance_variable_get(var_name)
instance_variable_set(var_name, klass.to_s.camelize.constantize)
end
instance_variable_get(var_name)
end
end
macros.each do |name|
writ_klass = "#{name}_class"
# define attr writer
self.send(:attr_writer, writ_klass)
end
class << self
def classes= klass
macros.each {|m| send("#{m.to_s.underscore}_class=", klass) }
end
def macro_map
{
:costing => :cost,
:priced_at => :price,
:valued_at => :value
}
end
end
end
end
class Object
Mongoid::Moneys.macro_map.each do |method_name, klass|
define_method method_name do |amount, currency=nil|
currency = ::Money::Currency.new(currency || ::Money.default_currency)
money = Money.new(amount, currency)
class_name = "#{klass}_class"
money_klass = Mongoid::Moneys.send(class_name)
money_klass.new :price => money
end
end
end
| 21.679487 | 76 | 0.641041 |
03942e468fe09b9c10b75ea14e6627cf1fb5e9ce | 1,061 | require File.expand_path('../boot', __FILE__)
require 'rails/all'
Bundler.require(*Rails.groups)
require "shipit"
begin
require "pry"
rescue LoadError
end
module Shipit
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Do not swallow errors in after_commit/after_rollback callbacks.
config.active_record.raise_in_transactional_callbacks = true
end
end
| 33.15625 | 99 | 0.731385 |
33bada8c5434cd0dd346ca2defed1eceb8cc89b7 | 506 | require "meuh"
require "cinch"
module Meuh
# Plugin for Cinch
class CinchPlugin
include Cinch::Plugin
listen_to :channel, method: :on_channel
def on_channel(msg)
@brain ||= Brain.new
options = {
nickname: msg.user.nick,
message: msg.message,
nicknames: msg.channel.users.keys.map(&:nick),
}
@brain.botname = msg.bot.nick
@brain.message(options) do |answer|
sleep(0.2)
msg.reply answer
end
end
end
end
| 18.071429 | 55 | 0.596838 |
1d197dc47e159528e0a056002d96e4862134c8eb | 655 | require 'rails_helper'
RSpec.describe GlobalDirectMessageReceivedFlag, type: :model do
let(:instance) { described_class.new }
describe '#key' do
subject { instance.key }
it { is_expected.to eq("#{Rails.env}:GlobalDirectMessageReceivedFlag:86400:any_ids") }
end
describe '#cleanup' do
subject { instance.cleanup }
it do
expect(SortedSetCleanupWorker).to receive(:perform_async).with(described_class)
subject
end
context '#sync_mode is called' do
before { instance.sync_mode }
it do
expect(SortedSetCleanupWorker).not_to receive(:perform_async)
subject
end
end
end
end
| 24.259259 | 90 | 0.691603 |
e9647583a8b5363c7f2bd34711af58eec1001c1c | 1,862 | Dir.glob(File.expand_path(File.join(__FILE__, "..", "data_migrations", "*.rb"))).sort.each do | path |
require path
end
# Data migrations run *every* time the broker starts up, after the schema migrations.
# Their purpose is to ensure that data integrity is maintained during rolling migrations
# in architectures with multiple application instances running against the same
# database (eg. EC2 autoscaling group) where "old" data might be inserted by
# the application instance running the previous version of the code AFTER
# the schema migrations have been run on the first application instance with the
# new version of the code.
module PactBroker
module DB
class MigrateData
def self.call database_connection, _options = {}
DataMigrations::SetPacticipantIdsForVerifications.call(database_connection)
DataMigrations::SetConsumerIdsForPactPublications.call(database_connection)
DataMigrations::SetLatestVersionSequenceValue.call(database_connection)
DataMigrations::SetWebhooksEnabled.call(database_connection)
DataMigrations::DeleteDeprecatedWebhookExecutions.call(database_connection)
DataMigrations::SetCreatedAtForLatestPactPublications.call(database_connection)
DataMigrations::SetCreatedAtForLatestVerifications.call(database_connection)
DataMigrations::SetExtraColumnsForTags.call(database_connection)
DataMigrations::SetPacticipantDisplayName.call(database_connection)
DataMigrations::SetWebhookUuid.call(database_connection)
DataMigrations::SetConsumerVersionOrderForPactPublications.call(database_connection)
DataMigrations::SetExtraColumnsForTags.call(database_connection)
DataMigrations::CreateBranches.call(database_connection)
DataMigrations::MigrateIntegrations.call(database_connection)
end
end
end
end
| 53.2 | 102 | 0.794307 |
ff249fa939c3c32fe13fac5368a160cdded537ba | 13,158 | require_relative './spec_helper'
class GeoEngineer::RemoteResources < GeoEngineer::Resource
def self._fetch_remote_resources(provider)
[{ _geo_id: "geo_id1" }, { _geo_id: "geo_id2" }, { _geo_id: "geo_id2" }]
end
end
describe GeoEngineer::Resource do
let(:env) { GeoEngineer::Environment.new("testing") }
describe '#remote_resource' do
it 'should return a list of resources' do
rem_res = GeoEngineer::RemoteResources.new('rem', 'id') {
_geo_id "geo_id1"
}
norem_res = GeoEngineer::RemoteResources.new('rem', 'id') {
_geo_id "geo_id3"
}
expect(rem_res.remote_resource.nil?).to eq false
expect(norem_res.remote_resource.nil?).to eq true
end
it 'should error if you match more than one' do
rem = GeoEngineer::RemoteResources.new('rem', 'id') {
_geo_id "geo_id2"
}
expect { rem.remote_resource }.to raise_error(StandardError)
end
end
describe '#to_terraform_json' do
it 'should return _terraform_id as primary' do
class GeoEngineer::TFJSON < GeoEngineer::Resource
after :initialize, -> { _terraform_id "tid" }
end
res = GeoEngineer::TFJSON.new('tf_json', 'ididid') {
blue "TRUE"
tags {
not_blue "FALSE"
}
# i.e. s3 bucket multilevel subresources
lifecycle_rule {
expiration {
days 90
}
}
lifecycle_rule {
transition {
days 60
}
}
}
tfjson = res.to_terraform_json
expect(tfjson['blue']).to eq 'TRUE'
expect(tfjson['tags']['not_blue']).to eq 'FALSE'
expect(tfjson['lifecycle_rule'][0]['expiration'][0]['days']).to eq 90
expect(tfjson['lifecycle_rule'][1]['transition'][0]['days']).to eq 60
end
end
describe '#to_terraform_state' do
it 'should return _terraform_id as primary' do
class GeoEngineer::TFState < GeoEngineer::Resource
after :initialize, -> { _terraform_id "tid" }
end
tfs = GeoEngineer::TFState.new('tf_state', 'asd').to_terraform_state
expect(tfs[:type]).to eq 'tf_state'
expect(tfs[:primary][:id]).to eq 'tid'
end
it 'should return _terraform_id as primary' do
class GeoEngineer::TFState < GeoEngineer::Resource
after :initialize, -> { _terraform_id "tid" }
end
tfs = GeoEngineer::TFState.new('tf_state', 'asd').to_terraform_state
expect(tfs[:type]).to eq 'tf_state'
expect(tfs[:primary][:id]).to eq 'tid'
end
end
describe '#fetch_remote_resources' do
class GeoEngineer::FetchableResources < GeoEngineer::Resource
def self._fetch_remote_resources(provider)
[{ _geo_id: "geoid #{provider.id}" }]
end
end
it 'should return a list of resources' do
provider = GeoEngineer::Provider.new("prov_1")
resources = GeoEngineer::FetchableResources.fetch_remote_resources(provider)
expect(resources.length).to eq 1
expect(resources[0]._geo_id).to eq "geoid prov_1"
end
it 'should retrieve different resources for different providers' do
provider1 = GeoEngineer::Provider.new("prov_1")
resources = GeoEngineer::FetchableResources.fetch_remote_resources(provider1)
expect(resources.length).to eq 1
expect(resources[0]._geo_id).to eq "geoid prov_1"
provider2 = GeoEngineer::Provider.new("prov_2")
resources = GeoEngineer::FetchableResources.fetch_remote_resources(provider2)
expect(resources.length).to eq 1
expect(resources[0]._geo_id).to eq "geoid prov_2"
end
end
describe '#_resources_to_ignore' do
it 'lets you ignore certain resources' do
class GeoEngineer::IgnorableResources < GeoEngineer::Resource
def self._fetch_remote_resources(provider)
[{ _geo_id: "geoid1" }, { _geo_id: "geoid2" }, { _geo_id: "anotherid" }, { _geo_id: "otherid" }]
end
def self._resources_to_ignore
["otherid", /^geoid/]
end
end
resources = GeoEngineer::IgnorableResources
.fetch_remote_resources(GeoEngineer::Provider.new('aws'))
expect(resources.length).to eq 1
expect(resources[0]._geo_id).to eq "anotherid"
end
end
describe '#validate_required_subresource' do
it 'should return errors if it does not have a tag' do
class GeoEngineer::HasSRAttrResource < GeoEngineer::Resource
validate -> { validate_required_subresource :tags }
after :initialize, -> { _terraform_id "tid'" }
end
not_blue = GeoEngineer::HasSRAttrResource.new('has_attr', 'id') {}
with_blue = GeoEngineer::HasSRAttrResource.new('has_attr', 'id') {
tags {
blue "True"
}
}
expect(not_blue.errors.length).to eq 1
expect(with_blue.errors.length).to eq 0
end
end
describe '#validate_subresource_required_attributes' do
it 'should return errors if it does not have a tag' do
class GeoEngineer::HasSRAttrResource < GeoEngineer::Resource
validate -> { validate_subresource_required_attributes :tags, [:blue] }
after :initialize, -> { _terraform_id "tid'" }
end
not_blue = GeoEngineer::HasSRAttrResource.new('has_attr', 'id') {
tags {}
}
with_blue = GeoEngineer::HasSRAttrResource.new('has_attr', 'id') {
tags {
blue "True"
}
}
expect(not_blue.errors.length).to eq 1
expect(with_blue.errors.length).to eq 0
end
end
describe '#validate_required_attributes' do
it 'should return errors if it does not have a tag' do
class GeoEngineer::HasAttrResource < GeoEngineer::Resource
validate -> { validate_required_attributes [:blue] }
after :initialize, -> { _terraform_id "tid'" }
end
not_blue = GeoEngineer::HasAttrResource.new('has_attr', 'id')
with_blue = GeoEngineer::HasAttrResource.new('has_attr', 'id') {
blue "True"
}
expect(not_blue.errors.length).to eq 1
expect(with_blue.errors.length).to eq 0
end
end
describe '#validate_has_tag' do
it 'should return errors if it does not have a tag' do
class GeoEngineer::HasTagResource < GeoEngineer::Resource
validate -> { validate_has_tag :blue }
after :initialize, -> { _terraform_id "tid'" }
end
not_blue = GeoEngineer::HasTagResource.new('has_tag', 'id')
with_blue = GeoEngineer::HasTagResource.new('has_tag', 'id') {
tags {
blue "True"
}
}
expect(not_blue.errors.length).to eq 1
expect(with_blue.errors.length).to eq 0
end
end
describe '#validate_tag_merge' do
it 'combines resource and parent tags' do
environment = GeoEngineer::Environment.new('test') {
tags {
a '1'
}
}
project = GeoEngineer::Project.new('org', 'project_name', environment) {
tags {
b '2'
}
}
resource = project.resource('type', '1') {
tags {
c '3'
}
}
resource.merge_parent_tags
expect(resource.tags.attributes).to eq({ 'a' => '1', 'b' => '2', 'c' => '3' })
end
it 'works if just project is present' do
project = GeoEngineer::Project.new('org', 'project_name', nil) {
tags {
a '1'
}
}
resource = project.resource('type', '1') {
tags {
b '2'
}
}
resource.merge_parent_tags
expect(resource.tags.attributes).to eq({ 'a' => '1', 'b' => '2' })
end
it 'works if just environment is present' do
environment = GeoEngineer::Environment.new('test') {
tags {
a '1'
}
}
resource = environment.resource('type', '1') {
tags {
b '2'
}
}
resource.merge_parent_tags
expect(resource.tags.attributes).to eq({ 'a' => '1', 'b' => '2' })
end
it 'uses priority: resource > project > environment' do
environment = GeoEngineer::Environment.new('test') {
tags {
a '1'
}
}
project = GeoEngineer::Project.new('org', 'project_name', environment) {
tags {
a '2'
b '1'
}
}
resource = project.resource('type', '1') {
tags {
a '3'
b '2'
c '1'
}
}
resource.merge_parent_tags
expect(resource.tags.attributes).to eq({ 'a' => '3', 'b' => '2', 'c' => '1' })
end
it 'returns project tags if there are no resource tags' do
project = GeoEngineer::Project.new('org', 'project_name', env) {
tags {
a '1'
b '2'
}
}
resource = project.resource('type', '1') {}
resource.merge_parent_tags
expect(resource.tags.attributes).to eq({ 'a' => '1', 'b' => '2' })
end
it 'returns resource tags if there are no project tags' do
project = GeoEngineer::Project.new('org', 'project_name', env) {}
resource = project.resource('type', '1') {
tags {
c '3'
d '4'
}
}
resource.merge_parent_tags
expect(resource.tags.attributes).to eq({ 'c' => '3', 'd' => '4' })
end
end
describe '#reset' do
let(:subject) do
GeoEngineer::RemoteResources.new('resource', 'id') {
tags {
Name "foo"
}
_geo_id -> { tags['Name'] }
}
end
it 'resets lazily computed attributes' do
expect(subject._geo_id).to eq('foo')
subject.tags['Name'] = 'bar'
subject.reset
expect(subject._geo_id).to eq('bar')
end
it 'resets remote resource' do
expect(subject.remote_resource).to be_nil
subject.tags['Name'] = "geo_id1"
subject.reset
expect(subject.remote_resource).to_not be_nil
end
end
describe '#duplicate' do
let!(:project) do
GeoEngineer::Project.new('org', 'project_name', nil) {
tags {
a '1'
}
}
end
let!(:resource_class) do
class GeoEngineer::Resources::Derp < GeoEngineer::Resource
validate -> { validate_has_tag(:Name) }
after :initialize, -> {
_terraform_id -> { NullObject.maybe(remote_resource)._terraform_id }
}
after :initialize, -> { _geo_id -> { NullObject.maybe(tags)[:Name] } }
after :initialize, -> { _number -> { NullObject.maybe(_geo_id)[-1] } }
def self._fetch_remote_resources(provider)
[
{ _geo_id: "geo_id1", _terraform_id: "t1 baby!" },
{ _geo_id: "geo_id2", _terraform_id: "t who?" }
]
end
end
end
let(:subject) do
project.resource('derp', 'id') {
tags {
Name "geo_id1"
}
}
end
it 'copies over attributes and subresources' do
copy = subject.duplicate('duplicate')
# We haven't changed anything, so it should all match
expect(copy.type).to eq(subject.type)
expect(copy._geo_id).to eq(subject._geo_id)
expect(copy._terraform_id).to eq(subject._terraform_id)
expect(copy._number).to eq(subject._number)
expect(copy.tags["Name"]).to eq(subject.tags["Name"])
end
it 'handles procs appropriately' do
copy = subject.duplicate('duplicate')
copy.tags["Name"] = "geo_id2"
expect(copy.type).to eq(subject.type)
expect(copy._geo_id).to_not eq(subject._geo_id)
expect(copy._terraform_id).to_not eq(subject._terraform_id)
expect(copy._number).to_not eq(subject._number)
expect(copy._number).to eq("2")
end
end
describe 'class method' do
describe('#type_from_class_name') do
it 'should return resource' do
expect(GeoEngineer::Resource.type_from_class_name).to eq 'resource'
end
it 'should remove module' do
class GeoEngineer::ResourceType < GeoEngineer::Resource
end
expect(GeoEngineer::ResourceType.type_from_class_name).to eq 'resource_type'
end
end
end
describe '#_deep_symbolize_keys' do
let(:simple_obj) { JSON.parse({ foo: "bar", baz: "qux" }.to_json) }
let(:complex_obj) do
JSON.parse(
{
foo: {
bar: {
baz: [
{ qux: "quack" }
]
}
},
bar: [
{ foo: "bar" },
nil,
[{ baz: "qux" }],
1,
"baz"
]
}.to_json
)
end
it "converts top level keys to symbols" do
expect(simple_obj.keys.include?(:foo)).to eq(false)
expect(simple_obj.keys.include?("foo")).to eq(true)
converted = described_class._deep_symbolize_keys(simple_obj)
expect(converted.keys.include?(:foo)).to eq(true)
expect(converted.keys.include?("foo")).to eq(false)
end
it "converts deeply nested keys to symbols" do
converted = described_class._deep_symbolize_keys(complex_obj)
expect(converted[:foo][:bar][:baz].first[:qux]).to eq("quack")
expect(converted[:bar].first[:foo]).to eq("bar")
end
end
end
| 30.178899 | 106 | 0.592719 |
6109ee2425148521798ffe6cdd351a0e9968abd3 | 896 | $:.push File.expand_path("../lib", __FILE__)
# Maintain your gem's version:
require "third_rail/version"
# Describe your gem and declare its dependencies:
Gem::Specification.new do |s|
s.name = "third_rail"
s.version = ThirdRail::VERSION
s.authors = ["TODO: Your name"]
s.email = ["TODO: Your email"]
s.homepage = "TODO"
s.summary = "TODO: Summary of ThirdRail."
s.description = "TODO: Description of ThirdRail."
s.license = "MIT"
s.files = Dir["{app,config,db,lib}/**/*", "MIT-LICENSE", "Rakefile", "README.rdoc"]
s.test_files = Dir["test/**/*"]
s.add_dependency "rails", "~> 4.1.0"
s.add_dependency "volt_rails_wip"
s.add_dependency "volt-sockjs-rails-wip"
s.add_dependency 'volt-bootstrap'
s.add_dependency 'volt-bootstrap-jumbotron-theme'
s.add_dependency 'volt-user-templates'
s.add_development_dependency "sqlite3"
end
| 30.896552 | 85 | 0.671875 |
b9c962fac27f104cffcdc5f353fa422a8a310b1f | 1,120 | cask '[email protected]' do
version '2017.4.12f1,b582b87345b1'
sha256 :no_check
url "https://download.unity3d.com/download_unity/b582b87345b1/MacEditorTargetInstaller/UnitySetup-WebGL-Support-for-Editor-2017.4.12f1.pkg"
name 'WebGL Build Support'
homepage 'https://unity3d.com/unity/'
pkg 'UnitySetup-WebGL-Support-for-Editor-2017.4.12f1.pkg'
depends_on cask: '[email protected]'
preflight do
if File.exist? "/Applications/Unity"
FileUtils.move "/Applications/Unity", "/Applications/Unity.temp"
end
if File.exist? "/Applications/Unity-2017.4.12f1"
FileUtils.move "/Applications/Unity-2017.4.12f1", '/Applications/Unity'
end
end
postflight do
if File.exist? '/Applications/Unity'
FileUtils.move '/Applications/Unity', "/Applications/Unity-2017.4.12f1"
end
if File.exist? '/Applications/Unity.temp'
FileUtils.move '/Applications/Unity.temp', '/Applications/Unity'
end
end
uninstall quit: 'com.unity3d.UnityEditor5.x',
delete: '/Applications/Unity-2017.4.12f1/PlaybackEngines/WebGLSupport'
end
| 31.111111 | 141 | 0.716964 |
e9d8afe6684243545b48a4d1f7a57f9f171a35b0 | 827 | # encoding: utf-8
class SimpleTemplater
module ArgvParsingMixin
def parse!
self.inject(Hash.new) do |options, argument|
case argument
when /^--no-([^=]+)$/ # --no-git-repository
options[$1.gsub("-", "_").to_sym] = false
when /^--([^=]+)$/ # --git-repository
options[$1.gsub("-", "_").to_sym] = true
when /^--([^=]+)=([^,]+)$/ # --controller=post
key, value = $1, $2
options[key.gsub("-", "_").to_sym] = value.dup
when /^--([^=]+)=(.+)$/ # --controllers=posts,comments
key, value = $1, $2
options[key.gsub("-", "_").to_sym] = value.split(",")
else
raise "Parsing failed on: #{argument}"
end
options
end
end
end
end
ARGV.extend(SimpleTemplater::ArgvParsingMixin)
| 29.535714 | 65 | 0.500605 |
e87bd9c2a89baaad74fe7e8670d713f1a07dd314 | 1,212 | require 'spec_helper'
describe Taxem::BoundaryReaderZipFour do
before(:all) do
#TODO: This is wayyyy slow. Find a way to spped this up.
# we probably don't need to eat the whole file for this.
path_to_csv = boundary_data
@boundary_reader = Taxem::BoundaryReaderZipFour.new(path_to_csv)
end
subject { @boundary_reader }
it { should respond_to :boundaries }
its(:records_in_file) { should == 591393 }
its(:record_count) { should == 76871 }
describe 'boundaries' do
subject { @boundary_reader.boundaries }
it { should have_at_least(1).items }
it { should have(76871).items }
it 'has valid boundary objects' do
subject.each do |boundary|
boundary.record_type.should == '4'
boundary.beginning_effective_date.should <= Date.today
boundary.ending_effective_date.should >= Date.today
end
end
it 'has zip codes' do
subject.each do |boundary|
boundary.zip_extension_low.should_not == ''
boundary.zip_extension_high.should_not == ''
end
end
it 'has no composite SER code' do
subject.each do |boundary|
boundary.composite_ser_code.should == ""
end
end
end
end
| 24.734694 | 68 | 0.669142 |
1c107c0a131f7d1fe34a55aa16c9da2af9afc43b | 4,175 | # encoding: utf-8
# copyright: 2018, eNFence GmbH
title 'java_advisory_20180220'
control 'CVE-2018-2599' do
impact 0.48
title 'An unspecified vulnerability related to the Java SE JNDI component could allow an unauthenticated attacker to cause no confidentiality impact, low integrity impact, and low availability impact.'
desc 'Vulnerability in the Java SE, Java SE Embedded, JRockit component
of Oracle Java SE (subcomponent: JNDI). Supported versions that are
affected are Java SE: 6u171, 7u161, 8u152 and 9.0.1; Java SE Embedded:
8u151; JRockit: R28.3.16. Difficult to exploit vulnerability allows
unauthenticated attacker with network access via multiple protocols to
compromise Java SE, Java SE Embedded, JRockit. Successful attacks of this
vulnerability can result in unauthorized update, insert or delete access to
some of Java SE, Java SE Embedded, JRockit accessible data and unauthorized
ability to cause a partial denial of service (partial DOS) of Java SE, Java
SE Embedded, JRockit. Note: This vulnerability applies to client and server
deployment of Java. This vulnerability can be exploited through sandboxed Java
Web Start applications and sandboxed Java applets. It can also be exploited
by supplying data to APIs in the specified Component without using sandboxed
Java Web Start applications or sandboxed Java applets, such as through a web
service.
'
tag 'java', 'jre', 'java 8'
tag :score, '4.8'
ref 'http://www-01.ibm.com/support/docview.wss?uid=swg1IJ04044'
only_if do
os.aix?
end
describe command('/usr/bin/lslpp -Lqc Java6_64.sdk') do
its('stdout') { should_not match(/:6\.0\.0\.[0-5]/) }
its('stdout') { should_not match(/:6\.0\.0\.6[0-5]/) }
end
describe command('/usr/bin/lslpp -Lqc Java6.sdk') do
its('stdout') { should_not match(/:6\.0\.0\.[0-5]/) }
its('stdout') { should_not match(/:6\.0\.0\.6[0-5]/) }
end
describe command('/usr/bin/lslpp -Lqc Java7_64.jre') do
its('stdout') { should_not match(/:7\.0\.0\.[0-5]/) }
its('stdout') { should_not match(/:7\.0\.0\.6[0-1]/) }
end
describe command('/usr/bin/lslpp -Lqc Java7_64.sdk') do
its('stdout') { should_not match(/:7\.0\.0\.[0-5]/) }
its('stdout') { should_not match(/:7\.0\.0\.6[0-1]/) }
end
describe command('/usr/bin/lslpp -Lqc Java7.jre') do
its('stdout') { should_not match(/:7\.0\.0\.[0-5]/) }
its('stdout') { should_not match(/:7\.0\.0\.6[0-1]/) }
end
describe command('/usr/bin/lslpp -Lqc Java7.sdk') do
its('stdout') { should_not match(/:7\.0\.0\.[0-5]/) }
its('stdout') { should_not match(/:7\.0\.0\.6[0-1]/) }
end
describe command('/usr/bin/lslpp -Lqc Java71_64.jre') do
its('stdout') { should_not match(/:7\.1\.0\.[0-3]/) }
its('stdout') { should_not match(/:7\.1\.0\.4[0-1]/) }
end
describe command('/usr/bin/lslpp -Lqc Java71_64.sdk') do
its('stdout') { should_not match(/:7\.1\.0\.[0-3]/) }
its('stdout') { should_not match(/:7\.1\.0\.4[0-1]/) }
end
describe command('/usr/bin/lslpp -Lqc Java71.jre') do
its('stdout') { should_not match(/:7\.1\.0\.[0-3]/) }
its('stdout') { should_not match(/:7\.1\.0\.4[0-1]/) }
end
describe command('/usr/bin/lslpp -Lqc Java71.sdk') do
its('stdout') { should_not match(/:7\.1\.0\.[0-3]/) }
its('stdout') { should_not match(/:7\.1\.0\.4[0-1]/) }
end
describe command('/usr/bin/lslpp -Lqc Java8_64.jre') do
its('stdout') { should_not match(/:8\.0\.0\.[0-4]/) }
its('stdout') { should_not match(/:8\.0\.0\.50/) }
end
describe command('/usr/bin/lslpp -Lqc Java8_64.sdk') do
its('stdout') { should_not match(/:8\.0\.0\.[0-4]/) }
its('stdout') { should_not match(/:8\.0\.0\.50/) }
end
describe command('/usr/bin/lslpp -Lqc Java8.jre') do
its('stdout') { should_not match(/:8\.0\.0\.[0-4]/) }
its('stdout') { should_not match(/:8\.0\.0\.50/) }
end
describe command('/usr/bin/lslpp -Lqc Java8.sdk') do
its('stdout') { should_not match(/:8\.0\.0\.[0-4]/) }
its('stdout') { should_not match(/:8\.0\.0\.50/) }
end
end
| 40.144231 | 203 | 0.633054 |
33acdfc91cc55b2626cb9360cd3e0af7946b6052 | 1,234 | lib = File.expand_path('lib', __dir__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'twitter/version'
Gem::Specification.new do |spec|
spec.add_dependency 'addressable', '~> 2.3'
spec.add_dependency 'buftok', '~> 0.2.0'
spec.add_dependency 'equalizer', '~> 0.0.11'
spec.add_dependency 'http', '~> 4.0'
spec.add_dependency 'http-form_data', '~> 2.0'
spec.add_dependency 'http_parser.rb', '~> 0.6.0'
spec.add_dependency 'memoizable', '~> 0.4.0'
spec.add_dependency 'multipart-post', '~> 2.0'
spec.add_dependency 'naught', '~> 1.0'
spec.add_dependency 'simple_oauth', '~> 0.3.0'
spec.add_development_dependency 'bundler', '~> 1.0'
spec.authors = ['Erik Michaels-Ober', 'John Nunemaker', 'Wynn Netherland', 'Steve Richert', 'Steve Agalloco']
spec.description = 'A Ruby interface to the Twitter API.'
spec.email = %w[[email protected]]
spec.files = %w[.yardopts CHANGELOG.md CONTRIBUTING.md LICENSE.md README.md twitter.gemspec] + Dir['lib/**/*.rb']
spec.homepage = 'http://sferik.github.com/twitter/'
spec.licenses = %w[MIT]
spec.name = 'twitter'
spec.require_paths = %w[lib]
spec.required_ruby_version = '>= 2.3'
spec.summary = spec.description
spec.version = '6.2.0.1'
end
| 42.551724 | 115 | 0.688817 |
6a439f60bef408793125dd344e620bff33e9649e | 48 | #SimpleNavigationAcl::Base.contexts += %w(admin) | 48 | 48 | 0.770833 |
ab004c3a0365658f6b32b305cd846e7679ea2dd5 | 869 | module Fabric
module Webhooks
class InvoiceCreated
include Fabric::Webhook
def call(event)
check_idempotence(event) or return if Fabric.config.store_events
stripe_invoice = retrieve_resource(
'invoice', event['data']['object']['id']
)
return if stripe_invoice.nil?
handle(event, stripe_invoice)
persist_model(stripe_invoice) if Fabric.config.persist?(:invoice)
end
def persist_model(stripe_invoice)
customer = retrieve_local(:customer, stripe_invoice.customer)
return unless customer
invoice = Fabric::Invoice.new(customer: customer)
invoice.sync_with(stripe_invoice)
saved = invoice.save
Fabric.config.logger.info "InvoiceCreated: Created invoice: "\
"#{invoice.stripe_id} saved: #{saved}"
end
end
end
end
| 28.966667 | 73 | 0.657077 |
6a8ee613131ec5897ba6c0101dd5ce750935ade7 | 76 | class Award < ApplicationRecord
belongs_to :goal
belongs_to :runner
end
| 15.2 | 31 | 0.789474 |
abd7a1d091489d3dbf136a3d4723d92f91445816 | 464 | class User < ActiveRecord::Base
# Include default devise modules. Others available are:
# :confirmable, :lockable, :timeoutable and :omniauthable
devise :database_authenticatable, :recoverable,
:registerable, :rememberable, :timeoutable,
:trackable, :validatable
# Setup accessible (or protected) attributes for your model
# attr_accessible :email, :password, :password_confirmation, :remember_me
# attr_accessible :title, :body
end
| 38.666667 | 75 | 0.74569 |
392180b0d56ced960c26134c8745d81ded865e3c | 5,924 | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe Service do
subject { build(:service) }
it { should validate_presence_of(:uri) }
it { should validate_uniqueness_of(:uri) }
it 'auto-validate built service' do
build(:service).valid?
end
# NOTE: default uniqueness test seams to not work properly for PSQL Tables
it 'validates uri_aliases uniqueness' do
s1 = create(:service)
service = build(:service, uri_aliases: s1.uri_aliases)
expect(service).to_not be_valid
end
it 'doesn\'t allow to add service with URI already defined as alias' do
s1 = create(:service)
service = build(:service, uri: s1.uri_aliases.first)
expect(service).to_not be_valid
end
it 'doesn\'t allow to add service with alias already defined as URI' do
s1 = create(:service)
service = build(:service, uri_aliases: [s1.uri])
expect(service).to_not be_valid
end
it 'doesn\'t allow to add service with same URI and alias' do
service = build(:service, uri: 'https://my.service.pl', uri_aliases: ['https://my.service.pl'])
expect(service).to_not be_valid
end
it 'doesn\'t allow to add service with URI equal to one of the alias' do
service = build(:service)
service.uri = service.uri_aliases.first
expect(service).to_not be_valid
end
it { should have_many(:resources).dependent(:destroy) }
it { should have_many(:service_ownerships).dependent(:destroy) }
it 'creates unique token' do
expect(create(:service).token).to_not be_nil
end
it 'validates correct uri format' do
service = build(:service, uri: 'wrong$%^uri')
expect(service).to_not be_valid
end
it 'validates correct uri_aliases format' do
service = build(:service, uri_aliases: ['wrong$%^uri'])
expect(service).to_not be_valid
end
it 'doesn\'t allow to create second service with higher uri' do
create(:service, uri: 'https://my.service.pl/my/service')
service = build(:service, uri: 'https://my.service.pl')
expect(service).to_not be_valid
end
it 'doesn\'t allow to create second service with lower uri' do
create(:service, uri: 'https://my.service.pl/my/service')
service = build(:service, uri: 'https://my.service.pl/my/service/1')
expect(service).to_not be_valid
end
# Intentionally used similar URI to test false positives on lower uri test
it 'allow to create second service with equal-level uri' do
create(:service, uri: 'https://my.service.pl/my/service')
service = build(:service, uri: 'https://my.service.pl/my/service1')
expect(service).to be_valid
end
it 'allow to create second service with longer TLD' do
create(:service, uri: 'https://my.service.co')
service = build(:service, uri: 'https://my.service.com')
expect(service).to be_valid
end
it 'allow to create second service with shorter TLD' do
create(:service, uri: 'https://my.service.com')
service = build(:service, uri: 'https://my.service.co')
expect(service).to be_valid
end
it 'doesn\'t allow to create second service with higher uri as alias' do
create(:service, uri: 'https://my.service.pl/my/service')
service = build(:service, uri_aliases: ['https://my.service.pl'])
expect(service).to_not be_valid
end
it 'doesn\'t allow to create second service with higher uri_alias' do
create(:service, uri_aliases: ['https://my.service.pl/my/service'])
service = build(:service, uri: 'https://my.service.pl')
expect(service).to_not be_valid
end
it 'doesn\'t allow to create second service with lower uri as alias' do
create(:service, uri: 'https://my.service.pl/my/service')
service = build(:service, uri_aliases: ['https://my.service.pl/my/service/1'])
expect(service).to_not be_valid
end
it 'doesn\'t allow to create second service with lower uri_alias' do
create(:service, uri_aliases: ['https://my.service.pl/my/service'])
service = build(:service, uri: 'https://my.service.pl/my/service/1')
expect(service).to_not be_valid
end
it 'allow to create second service with equal uri as alias' do
create(:service, uri: 'https://my.service.pl/1')
service = build(:service, uri_aliases: ['https://my.service.pl/2'])
expect(service).to be_valid
end
it 'allow to create second service with equal uri_alias' do
create(:service, uri_aliases: ['https://my.service.pl/1'])
service = build(:service, uri: 'https://my.service.pl/2')
expect(service).to be_valid
end
it 'allow to create second service with longer TLD as alias' do
create(:service, uri: 'https://my.service.co')
service = build(:service, uri_aliases: ['https://my.service.com'])
expect(service).to be_valid
end
it 'allow to create second service with longer TLD in uri_alias' do
create(:service, uri_aliases: ['https://my.service.co'])
service = build(:service, uri: 'https://my.service.com')
expect(service).to be_valid
end
it 'allow to create second service with shorter TLD as alias' do
create(:service, uri: 'https://my.service.com')
service = build(:service, uri_aliases: ['https://my.service.co'])
expect(service).to be_valid
end
it 'allow to create second service with shorter TLD in uri_alias' do
create(:service, uri_aliases: ['https://my.service.com'])
service = build(:service, uri: 'https://my.service.co')
expect(service).to be_valid
end
it 'allows to update a service without failing URI validation' do
expect { create(:service).save! }.not_to raise_error
end
context 'service uri ends with slash' do
let(:slash_service) { build(:service, uri: 'http://host.pl/') }
it 'is invalid' do
expect(slash_service).to_not be_valid
end
it 'has a proper error message' do
slash_service.save
expect(slash_service.errors.messages).to eq(uri: ['Service URI cannot end with a slash'])
end
end
end
| 31.015707 | 99 | 0.691087 |
6a748a092d65545692c0deb040d7f3e1b7453f47 | 2,328 | # frozen_string_literal: true
RSpec.describe RuboCop::Cop::Rails::ORMAbstraction do
subject(:cop) { described_class.new(config) }
let(:config) { RuboCop::Config.new }
it 'registers an offense when breaking ORM abstraction in a where clause' do
expect_offense(<<-RUBY.strip_indent)
users.where(baz: 'cow', role_id: role.id, foo: 'bar')
^^^^^^^^^^^^^^^^ prefer `role: role`.
RUBY
end
it 'registers an offense when breaking ORM abstraction in a create clause' do
expect_offense(<<-RUBY.strip_indent)
users.create(baz: 'cow', role_id: role.id, foo: 'bar')
^^^^^^^^^^^^^^^^ prefer `role: role`.
RUBY
end
it 'registers an offense when breaking ORM abstraction in a new clause' do
expect_offense(<<-RUBY.strip_indent)
users.new(baz: 'cow', role_id: role.id, foo: 'bar')
^^^^^^^^^^^^^^^^ prefer `role: role`.
RUBY
end
it 'registers an offense when breaking ORM abstraction using string interpolation' do
expect_offense(<<-RUBY.strip_indent)
users.new(baz: 'cow', "\#{foo}_id" => role.id, foo: 'bar')
^^^^^^^^^^^^^^^^^^^^^^ prefer `"\#{foo}" => role`.
RUBY
end
it 'registers an offense when chaining methods on the receiver' do
expect_offense(<<-RUBY.strip_indent)
users.new(baz: 'cow', role_id: user2.role.id, foo: 'bar')
^^^^^^^^^^^^^^^^^^^^^^ prefer `role: user2.role`.
RUBY
end
it 'does not register an offense otherwise' do
expect_no_offenses(<<-RUBY.strip_indent)
users.where(
baz_id: baz_id,
role: role,
foo_id: params[:foo_id],
foo_uuid: uuid
)
RUBY
end
it 'autocorrect `role_id: role.id` to `role: role`' do
source = "users.where(baz_id: cow.id, role_id: role.id, foo: 'bar')"
autocorrect = "users.where(baz: cow, role: role, foo: 'bar')"
expect(autocorrect_source(source)).to eq(autocorrect)
end
it 'autocorrect `role_id => role.id` to `role => role`' do
source = "users.where('baz_id' => cow.id, \"x_\#{role}_id\" => role.id, 'foo' => 'bar')"
autocorrect = "users.where('baz' => cow, \"x_\#{role}\" => role, 'foo' => 'bar')"
expect(autocorrect_source(source)).to eq(autocorrect)
end
end
| 34.235294 | 92 | 0.585481 |
2899bd1033983812ca3f9ff9773dd54587fb4ffa | 355 | module Xiki
class Sass
def self.menu *args
txt = ENV['txt']
File.open("/tmp/tmp.sass", "w") { |f| f << txt }
css = `sass /tmp/tmp.sass`
css.gsub!("\n", '\n')
css.gsub!('"', '\"')
code = "$('head').append(\"<style>#{css}</style>\")"
Firefox.exec code
"@flash/- Loaded in browser!"
end
end
end
| 19.722222 | 58 | 0.48169 |
2679e0ec17c8612e8561ee2c808a4cc5fb0ee9d6 | 116 | json.extract! @near_place, :id, :kiosk, :disco, :beach, :national_park, :other, :place_id, :created_at, :updated_at
| 58 | 115 | 0.724138 |
b91185eab29c13c26e4e1bd6827fa3445d1f611d | 1,026 | module ArJdbc
module QuotedPrimaryKeyExtension
def self.extended(base)
# Rails 3 method Rails 2 method
meth = [:arel_attributes_values, :attributes_with_quotes].detect do |m|
base.private_instance_methods.include?(m.to_s)
end
pk_hash_key = "self.class.primary_key"
pk_hash_value = '"?"'
if meth == :arel_attributes_values
pk_hash_key = "self.class.arel_table[#{pk_hash_key}]"
pk_hash_value = "Arel::SqlLiteral.new(#{pk_hash_value})"
end
if meth
base.module_eval <<-PK, __FILE__, __LINE__
alias :#{meth}_pre_pk :#{meth}
def #{meth}(include_primary_key = true, *args) #:nodoc:
aq = #{meth}_pre_pk(include_primary_key, *args)
if connection.is_a?(ArJdbc::Oracle) || connection.is_a?(ArJdbc::Mimer)
aq[#{pk_hash_key}] = #{pk_hash_value} if include_primary_key && aq[#{pk_hash_key}].nil?
end
aq
end
PK
end
end
end
end
| 35.37931 | 101 | 0.601365 |
3914da48c7712112663956e22b09fc6ff8e3d731 | 265 | define_upgrade do
if Partybus.config.bootstrap_server
must_be_data_master
# 1. cookbook artifacts
# 2. adds last update tracking to keys table.
run_sqitch('@2.9.0', 'oc_erchef')
end
end
define_check do
check_sqitch('@2.9.0', 'oc_erchef')
end
| 20.384615 | 49 | 0.709434 |
e8e8bea66d634e5f20928373b8fced453aac660f | 251 | module Tunefind
class Show < Base
def self.search(offset: nil, updated: nil)
build_collection(get("show?offset=#{offset}&updated=#{updated}")['shows'])
end
def self.find(id)
build_object(get("show/#{id}"))
end
end
end
| 20.916667 | 80 | 0.633466 |
6ae1a09c0af2fbc85ffe63482c3e83fc25dea983 | 3,507 | name "tile"
description "Role applied to all tile servers"
default_attributes(
:accounts => {
:users => {
:tile => {
:members => [:jburgess, :tomh]
}
}
},
:apache => {
:mpm => "event",
:timeout => 60,
:event => {
:server_limit => 60,
:max_request_workers => 1200,
:threads_per_child => 20,
:min_spare_threads => 300,
:max_spare_threads => 1200,
:max_connections_per_child => 0,
:async_request_worker_factor => 4,
:listen_cores_buckets_ratio => 6
}
},
:munin => {
:plugins => {
:renderd_processed => {
:graph_order => "reqPrio req reqLow dirty reqBulk dropped",
:reqPrio => { :draw => "AREA" },
:req => { :draw => "STACK" }
}
}
},
:postgresql => {
:settings => {
:defaults => {
:max_connections => "250",
:temp_buffers => "32MB",
:work_mem => "128MB",
:wal_buffers => "1024kB",
:wal_writer_delay => "500ms",
:commit_delay => "10000",
:checkpoint_segments => "60",
:max_wal_size => "2880MB",
:random_page_cost => "1.1",
:jit => "off",
:track_activity_query_size => "16384",
:autovacuum_vacuum_scale_factor => "0.05",
:autovacuum_analyze_scale_factor => "0.02"
}
}
},
:ssl => {
:ct_report_uri => false
},
:sysctl => {
:sockets => {
:comment => "Increase size of connection queue",
:parameters => {
"net.core.somaxconn" => 10000
}
},
:kernel_scheduler_tune => {
:comment => "Tune kernel scheduler preempt",
:parameters => {
"kernel.sched_min_granularity_ns" => 10000000,
"kernel.sched_wakeup_granularity_ns" => 15000000
}
}
},
:tile => {
:database => {
:style_file => "/srv/tile.openstreetmap.org/styles/default/openstreetmap-carto.style",
:tag_transform_script => "/srv/tile.openstreetmap.org/styles/default/openstreetmap-carto.lua"
},
:data => {
:simplified_land_polygons => {
:url => "https://osmdata.openstreetmap.de/download/simplified-land-polygons-complete-3857.zip",
:refresh => true
},
:simplified_water_polygons => {
:url => "https://osmdata.openstreetmap.de/download/simplified-water-polygons-split-3857.zip",
:refresh => true
},
:admin_boundaries => {
:url => "http://www.naturalearthdata.com/http//www.naturalearthdata.com/download/110m/cultural/ne_110m_admin_0_boundary_lines_land.zip",
:directory => "ne_110m_admin_0_boundary_lines_land"
},
:land_polygons => {
:url => "https://osmdata.openstreetmap.de/download/land-polygons-split-3857.zip",
:refresh => true
},
:water_polygons => {
:url => "https://osmdata.openstreetmap.de/download/water-polygons-split-3857.zip",
:refresh => true
},
:antarctica_icesheet_polygons => {
:url => "https://osmdata.openstreetmap.de/download/antarctica-icesheet-polygons-3857.zip",
:refresh => true
},
:antarctica_icesheet_outlines => {
:url => "https://osmdata.openstreetmap.de/download/antarctica-icesheet-outlines-3857.zip",
:refresh => true
}
},
:styles => {
:default => {
:repository => "https://github.com/gravitystorm/openstreetmap-carto.git",
:revision => "v5.0.0",
:max_zoom => 19
}
}
}
)
run_list(
"recipe[tile]"
)
| 29.225 | 144 | 0.569718 |
083df8c92cd795d1b816417491948bc31ee6d5f7 | 178 | class ChangeVisitMonthToDate < ActiveRecord::Migration
def up
change_column :visits, :month, :date
end
def down
change_column :visits, :month, :datetime
end
end
| 17.8 | 54 | 0.724719 |
083967ca3bf99be56b87c71b2977c87e137141c5 | 661 | require 'rails'
module Tolk
class Engine < Rails::Engine
isolate_namespace Tolk
initializer :assets do |app|
app.config.assets.precompile += ['tolk/libraries.js']
end
# We need one of the two pagination engines loaded by this point.
# We don't care which one, just one of them will do.
begin
require 'kaminari'
rescue LoadError
begin
require 'will_paginate'
rescue LoadError
puts "Please add the kaminari or will_paginate gem to your application's Gemfile."
puts "The Tolk engine needs either kaminari or will_paginate in order to paginate."
exit
end
end
end
end
| 25.423077 | 90 | 0.67171 |
629eeac1d6ba6160b3377b1b498b04e67d6fc4e5 | 247 | module BrokerModelConcerns
class Engine < ::Rails::Engine
isolate_namespace BrokerModelConcerns
config.generators do |g|
g.test_framework :rspec
g.fixture_replacement :factory_girl, :dir => 'spec/factories'
end
end
end
| 24.7 | 67 | 0.724696 |
b9e8fbadda8e8e8dd8880eb948fd663464c157fc | 377 | module UsersHelper
# 引数で与えられたユーザーのGravatar画像を返す
# 渡されたユーザーのGravatar画像を返す
def gravatar_for(user, options = { size: 80 })
size = options[:size]
gravatar_id = Digest::MD5::hexdigest(user.email.downcase)
gravatar_url = "https://secure.gravatar.com/avatar/#{gravatar_id}?s=#{size}"
image_tag(gravatar_url, alt: user.name, class: "gravatar")
end
end
| 34.272727 | 80 | 0.702918 |
390c66017f4cbc15c478c3c569a07173aa902c78 | 2,712 | class Xxh < Formula
include Language::Python::Virtualenv
desc "Bring your favorite shell wherever you go through the ssh"
homepage "https://github.com/xxh/xxh"
url "https://files.pythonhosted.org/packages/ad/7f/effcf114577d392f270831ab36e5833d071ebf1f494dd5acb7cf3953f5fd/xxh-xxh-0.8.4.tar.gz"
sha256 "cf6cd0a55bb8befc09b701cf3df226d1e837758bf95110f335359355b4deb96e"
license "BSD-2-Clause"
livecheck do
url :stable
end
bottle do
cellar :any_skip_relocation
sha256 "2363936330ef4377578a60a1735fe1740c4849cba53f0b698b3c7247b09d470a" => :catalina
sha256 "ac8f21bed8844e190488e83d76f08a350c8efb7b724d3f55c9bca47ddd975e03" => :mojave
sha256 "23398378567de59e4a45b37d394467c7980b9db12f3c83ea63ff8da1c0f44c50" => :high_sierra
end
depends_on "[email protected]"
resource "pexpect" do
url "https://files.pythonhosted.org/packages/e5/9b/ff402e0e930e70467a7178abb7c128709a30dfb22d8777c043e501bc1b10/pexpect-4.8.0.tar.gz"
sha256 "fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c"
end
resource "ptyprocess" do
url "https://files.pythonhosted.org/packages/7d/2d/e4b8733cf79b7309d84c9081a4ab558c89d8c89da5961bf4ddb050ca1ce0/ptyprocess-0.6.0.tar.gz"
sha256 "923f299cc5ad920c68f2bc0bc98b75b9f838b93b599941a6b63ddbc2476394c0"
end
resource "PyYAML" do
url "https://files.pythonhosted.org/packages/64/c2/b80047c7ac2478f9501676c988a5411ed5572f35d1beff9cae07d321512c/PyYAML-5.3.1.tar.gz"
sha256 "b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d"
end
def install
virtualenv_install_with_resources
end
test do
assert_match version.to_s, shell_output("#{bin}/xxh --version")
(testpath/"config.xxhc").write <<~EOS
hosts:
test.localhost:
-o: HostName=127.0.0.1
+s: xxh-shell-zsh
EOS
begin
port = free_port
server = TCPServer.new(port)
server_pid = fork do
msg = server.accept.gets
server.close
assert_match "SSH", msg
end
stdout, stderr, = Open3.capture3(
"#{bin}/xxh", "test.localhost",
"-p", port.to_s,
"+xc", "#{testpath}/config.xxhc",
"+v"
)
argv = stdout.lines.grep(/^Final arguments list:/).first.split(":").second
args = JSON.parse argv.tr("'", "\"")
assert_include args, "xxh-shell-zsh"
ssh_argv = stderr.lines.grep(/^ssh arguments:/).first.split(":").second
ssh_args = JSON.parse ssh_argv.tr("'", "\"")
assert_include ssh_args, "Port=#{port}"
assert_include ssh_args, "HostName=127.0.0.1"
assert_match "Connection closed by remote host", stderr
ensure
Process.kill("TERM", server_pid)
end
end
end
| 33.481481 | 140 | 0.716814 |
bb328e5fbc45397930f3b9c05e57dadf8eccd223 | 4,268 | # == Schema Information
#
# Table name: listing_images
#
# id :integer not null, primary key
# listing_id :integer
# created_at :datetime
# updated_at :datetime
# image_file_name :string(255)
# image_content_type :string(255)
# image_file_size :integer
# image_updated_at :datetime
# image_processing :boolean
# image_downloaded :boolean default(FALSE)
# error :string(255)
# width :integer
# height :integer
# author_id :string(255)
#
# Indexes
#
# index_listing_images_on_listing_id (listing_id)
#
require 'spec_helper'
describe ListingImage do
it "is valid without image" do
@listing_image = ListingImage.new()
@listing_image.should be_valid
end
it "is valid when a valid image file is added" do
@listing_image = ListingImage.new(:image => uploaded_file("Bison_skull_pile.png", "image/png"))
@listing_image.should be_valid
end
it "is not valid when an invalid file is added" do
@listing_image = ListingImage.new(:image => uploaded_file("i_am_not_image.txt", "text/plain"))
@listing_image.should_not be_valid
end
it "detects right sized image for given aspect ratio" do
aspect_ratio = 3/2.to_f
expect(ListingImage.correct_size?(200, 400, aspect_ratio)).to eql(false)
# Edges
expect(ListingImage.correct_size?(599, 400, aspect_ratio)).to eql(false)
expect(ListingImage.correct_size?(600, 400, aspect_ratio)).to eql(true)
expect(ListingImage.correct_size?(601, 400, aspect_ratio)).to eql(false)
expect(ListingImage.correct_size?(800, 400, aspect_ratio)).to eql(false)
end
it "detects too narrow dimensions for given aspect ratio" do
aspect_ratio = 3/2.to_f
expect(ListingImage.too_narrow?(200, 400, aspect_ratio)).to eql(true)
# Edges
expect(ListingImage.too_narrow?(599, 400, aspect_ratio)).to eql(true)
expect(ListingImage.too_narrow?(600, 400, aspect_ratio)).to eql(false)
expect(ListingImage.too_narrow?(601, 400, aspect_ratio)).to eql(false)
expect(ListingImage.too_narrow?(800, 400, aspect_ratio)).to eql(false)
end
it "detects too wide dimensions for given aspect ratio" do
aspect_ratio = 3/2.to_f
expect(ListingImage.too_wide?(200, 400, aspect_ratio)).to eql(false)
# Edges
expect(ListingImage.too_wide?(599, 400, aspect_ratio)).to eql(false)
expect(ListingImage.too_wide?(600, 400, aspect_ratio)).to eql(false)
expect(ListingImage.too_wide?(601, 400, aspect_ratio)).to eql(true)
expect(ListingImage.too_wide?(800, 400, aspect_ratio)).to eql(true)
end
it "scales image to cover given area, preserves aspect ratio" do
def test(width, height, expected_width, expected_height)
expect(ListingImage.scale_to_cover({:width => width, :height => height}, {:width => 600, :height => 400})).to eql({:width => expected_width, :height => expected_height })
end
test(300, 100, 1200.0, 400.0)
test(300, 200, 600.0, 400.0)
test(300, 400, 600.0, 800.0)
test(300, 800, 600.0, 1600.0)
test(150, 400, 600.0, 1600.0)
test(300, 400, 600.0, 800.0)
test(600, 400, 600.0, 400.0)
test(1200, 400, 1200.0, 400.0)
test(2448, 3264, 600.0, 800.0)
end
it "returns image styles, crops landscape big images if needed" do
def test(width, height, expected)
expect(ListingImage.construct_big_style({:width => width, :height => height}, {:width => 600, :height => 400}, 0.2)).to eq expected
end
test(479, 400, "600x400>") # Width crop 0%, height crop 20% and a little bit more
test(480, 400, "600x400#") # Width crop 0%, height crop 20%
test(600, 400, "600x400#") # Width crop 0%, height crop 0%
test(750, 400, "600x400#") # Width crop 20%, height crop 0%
test(751, 400, "600x400>") # Width crop 20% and a little bit more, height crop 0%
test(600, 319, "600x400>") # Width crop 20% and a little bit more, height crop 0%
test(600, 320, "600x400#") # Width crop 20%, height crop 0%
test(600, 400, "600x400#") # Width crop 0%, height crop 0%
test(600, 500, "600x400#") # Width crop 0%, height crop 20%
test(600, 501, "600x400>") # Width crop 0%, height crop 20% and a little bit more
end
end
| 36.793103 | 176 | 0.67104 |
18d87a9b0483b1f85f3ba62285c8ddb046fbba0a | 1,756 | require File.expand_path('../boot', __FILE__)
require "rails"
# Pick the frameworks you want:
require "active_model/railtie"
require "active_job/railtie"
require "active_record/railtie"
require "action_controller/railtie"
require "action_mailer/railtie"
require "action_view/railtie"
require "sprockets/railtie"
# require "rails/test_unit/railtie"
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module Aungsayin
class Application < Rails::Application
config.generators do |g|
g.test_framework :rspec,
fixtures: true,
view_specs: false,
helper_specs: false,
routing_specs: false,
controller_specs: false,
request_specs: false
g.fixture_replacement :factory_girl, dir: "spec/factories"
end
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Do not swallow errors in after_commit/after_rollback callbacks.
config.active_record.raise_in_transactional_callbacks = true
config.assets.initialize_on_precompile = false
end
end
| 35.836735 | 99 | 0.730638 |
bf16fa1242dc03c970b2fd2030abea9ce3e38059 | 2,059 | # Copyright 2017 Google Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# https://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class Board
LETTERS = ('A'..'J').to_a
NUMBERS = ('1'..'10').to_a
attr_accessor :height, :width
def initialize
self.height = 10
self.width = 10
@board = Hash.new { |h, k| h[k] = [] }
end
def in_range? location
m = /([A-J])(\d+)/.match(location)
return false unless m
letter, number = /([A-J])(\d+)/.match(location).captures
LETTERS.include?(letter) and NUMBERS.include?(number)
end
def [] location
@board.each do |value, locations|
return value if locations.include?(location)
end
return "."
end
def []= location, value
@board[value] << location
end
def hit? location
@board.values.any? { |locations| location.include?(location) }
end
def empty? location
@board.values.none? { |locations| locations.include?(location) }
end
alias_method :miss?, :empty?
def to_s
inverted = Hash.new(".")
@board.each do |ship, locations|
locations.each do |l|
inverted[l] = ship
end
end
str = " 1234567890\n"
str += LETTERS.map { |l|
l + NUMBERS.map { |n| inverted["#{l}#{n}"][0] }.join
}.join("\n")
str << "\n"
str
end
def marshal_dump
@board.to_a
end
def marshal_load data
@board = Hash[data]
@board.default_proc = proc { |h, k| h[k] = [] }
end
# def marshal_load data
# @board = Hash.new { |h, k| h[k] = [] }
# data.each do |location, value|
# @board[location] = value
# end
# end
end
| 22.139785 | 74 | 0.625061 |
87fe85bffffc8042929fed7a3cd3820c458598ce | 792 | require 'rails_helper'
RSpec.describe "bio" do
before(:each) do
FactoryGirl.create(:image)
visit "/"
find("li > a#bio_link").click
end
it "populates the content section with bio content", :js => true do
expect(page).to have_content("BIO")
expect(page).to have_content("IF ONE CAN LIKEN THE COMBINATION OF LUSH SYNTH TONES AND CATCHY POP MELODIES TO A BABY, THE SOUND OF HTHNS CAN BE DESCRIBED AS SAID BABY PUNCHING YOU IN THE FACE. HTHNS IS AN ELECTRONIC ROCK DUO BASED OUT OF NASHVILLE, TN. FORMED IN 2014, THE DUO ASSIMILATES ELEMENTS PREVALENT IN MODERN COMMERCIAL MUSIC WITH DARK, FUZZY, AND QUIRKY TONES, PRESENTING THE IDEAL POP MUSIC EXPERIENCE FOR LISTENERS WHO DON'T LIKE POP MUSIC. THE DEBUT EP, GRTST HTS, IS SET FOR RELEASE IN MAY 2015.")
end
end | 56.571429 | 515 | 0.741162 |
4a8cde859cfa1880635eb2cc59a4c2fb9106a5d9 | 908 |
lib = File.expand_path("../lib", __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require "method_pattern/version"
Gem::Specification.new do |spec|
spec.name = "method_pattern"
spec.version = MethodPattern::VERSION
spec.authors = ["Jamie Gaskins"]
spec.email = ["[email protected]"]
spec.summary = %q{Pattern matching for methods}
spec.homepage = "https://github.com/jgaskins/method_pattern"
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0").reject do |f|
f.match(%r{^(test|spec|features)/})
end
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.16"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_development_dependency "rspec", "~> 3.0"
end
| 33.62963 | 74 | 0.643172 |
f7cbf522a32a27c928922f53f03814b817f74988 | 374 | class TodayService
def self.matches_today(query)
url = "https://api.football-data.org/v2/matches?#{query}"
resp = Faraday.get url do |req|
req.headers['X-Auth-Token'] = ENV['AUTH_TOKEN']
end
body = JSON.parse(resp.body)
if resp.success?
@response = body["matches"]
else
@response = "ERRORS"
end
@response
end
end | 17.809524 | 61 | 0.609626 |
086d5fe87a7e888c4db97fad4a0014e6ff4381d8 | 129 | module Podling
class ApplicationMailer < ActionMailer::Base
default from: '[email protected]'
layout 'mailer'
end
end
| 18.428571 | 46 | 0.728682 |
389bd818162d43f71c3cb2eaa365ac1ae5aedbdc | 1,096 | #!/usr/bin/env ruby
require 'rex/post/meterpreter/extensions/espia/tlv'
module Rex
module Post
module Meterpreter
module Extensions
module Espia
###
#
# This meterpreter extensions interface that is capable
# grab webcam frame and recor mic audio
#
###
class Espia < Extension
def initialize(client)
super(client, 'espia')
client.register_extension_aliases(
[
{
'name' => 'espia',
'ext' => self
},
])
end
def espia_video_get_dev_image()
request = Packet.create_request('espia_video_get_dev_image')
response = client.send_request(request)
return true
end
def espia_audio_get_dev_audio(rsecs)
request = Packet.create_request('espia_audio_get_dev_audio')
request.add_tlv(TLV_TYPE_DEV_RECTIME, rsecs)
response = client.send_request(request)
return true
end
def espia_image_get_dev_screen
request = Packet.create_request('espia_image_get_dev_screen')
response = client.send_request(request)
response.get_tlv_value(TLV_TYPE_DEV_SCREEN)
end
end
end; end; end; end; end
| 19.571429 | 67 | 0.706204 |
b99a7d9ba932363de2cf5d3e8c83357f248f1d1a | 1,510 | module RetinaTag
module TagHelper
def self.included(base)
base.module_eval do
alias_method :image_tag_without_retina, :image_tag
alias_method :image_tag, :image_tag_with_retina
end
end
def image_tag_with_retina(source, options={})
hidpi_asset_path = nil
src = options[:src] = path_to_image(source)
begin
retina_els = source.split('.')
extension = retina_els.last
retina_els.slice!(-1)
retina_path = "#{retina_els.join('.')}@2x.#{extension}"
retina_asset_present = if Rails.application.assets.present?
Rails.application.assets.find_asset(retina_path).present?
else
Rails.application.assets_manifest.files.values.any? { |asset| asset['logical_path'] == retina_path }
end
if retina_asset_present
hidpi_asset_path = asset_path(retina_path)
end
rescue
end
options_default = { 'data-hidpi-src' => hidpi_asset_path }
if lazy = options.delete(:lazy)
options['data-lazy-load'] = lazy
end
options_default.merge!(options)
if options_default[:'data-lazy-load']
options_default['data-lowdpi-src'] = options_default.delete(:src)
end
image_tag_without_retina(source, options_default)
end
end
class Engine < ::Rails::Engine
initializer :retina_tag_image_tag do
ActionView::Helpers::AssetTagHelper.module_eval do
include TagHelper
end
end
end
end
| 26.964286 | 110 | 0.65298 |
879c5ab59bb4e8178823a67a62f2a911db055426 | 940 | require 'karel_testcase'
class TestControlFlow < KarelTestCase
def test_define
WORLD <<END
K W
END
DEFINE('TURNRIGHT') {
ITERATE(3.TIMES) {
TURNLEFT()
}
}
TURNRIGHT()
MOVE()
assert [0,1],THE_WORLD.karel
end
def test_good_names
DEFINE('A9A') {}
DEFINE('A_A') {}
DEFINE('A_______A') {}
A9A()
A_A()
A_______A()
end
def test_bad_names
WORLD <<END
K W
END
assert_raises BadSubroutine do
DEFINE('turn_right') { }
end
assert_raises BadSubroutine do
DEFINE('A') { }
end
assert_raises BadSubroutine do
DEFINE('9A') { }
end
assert_raises BadSubroutine do
DEFINE('A9') { }
end
end
def test_method_missing_still_behaves
WORLD <<END
K W
END
assert_raises NoMethodError do
BLAH()
end
assert_raises NoMethodError do
BLAH(1,2,3)
end
end
end
| 14.6875 | 39 | 0.582979 |
620bd0a56bd29a8fd2b52280f7ac2a5cf8266788 | 3,788 | require 'spec_helper'
require 'rack/test'
require 'split/dashboard'
describe Split::Dashboard do
include Rack::Test::Methods
def app
@app ||= Split::Dashboard
end
def link(color)
Split::Alternative.new(color, experiment.name)
end
let(:experiment) {
Split::Experiment.find_or_create("link_color", "blue", "red")
}
let(:experiment_with_goals) {
Split::Experiment.find_or_create({"link_color" => ["goal_1", "goal_2"]}, "blue", "red")
}
let(:red_link) { link("red") }
let(:blue_link) { link("blue") }
it "should respond to /" do
get '/'
last_response.should be_ok
end
context "start experiment manually" do
before do
Split.configuration.start_manually = true
end
context "experiment without goals" do
it "should display a Start button" do
experiment
get '/'
last_response.body.should include('Start')
post "/start/#{experiment.name}"
get '/'
last_response.body.should include('Reset Data')
end
end
context "with goals" do
it "should display a Start button" do
experiment_with_goals
get '/'
last_response.body.should include('Start')
post "/start/#{experiment.name}"
get '/'
last_response.body.should include('Reset Data')
end
end
end
describe "index page" do
context "with winner" do
before { experiment.winner = 'red' }
it "displays `Reopen Experiment` button" do
get '/'
expect(last_response.body).to include('Reopen Experiment')
end
end
context "without winner" do
it "should not display `Reopen Experiment` button" do
get '/'
expect(last_response.body).to_not include('Reopen Experiment')
end
end
end
describe "reopen experiment" do
before { experiment.winner = 'red' }
it 'redirects' do
post "/reopen/#{experiment.name}"
expect(last_response).to be_redirect
end
it "removes winner" do
post "/reopen/#{experiment.name}"
expect(experiment).to_not have_winner
end
it "keeps existing stats" do
red_link.participant_count = 5
blue_link.participant_count = 7
experiment.winner = 'blue'
post "/reopen/#{experiment.name}"
expect(red_link.participant_count).to eq(5)
expect(blue_link.participant_count).to eq(7)
end
end
it "should reset an experiment" do
red_link.participant_count = 5
blue_link.participant_count = 7
experiment.winner = 'blue'
post "/reset/#{experiment.name}"
last_response.should be_redirect
new_red_count = red_link.participant_count
new_blue_count = blue_link.participant_count
new_blue_count.should eql(0)
new_red_count.should eql(0)
experiment.winner.should be_nil
end
it "should delete an experiment" do
delete "/#{experiment.name}"
last_response.should be_redirect
Split::Experiment.find(experiment.name).should be_nil
end
it "should mark an alternative as the winner" do
experiment.winner.should be_nil
post "/#{experiment.name}", :alternative => 'red'
last_response.should be_redirect
experiment.winner.name.should eql('red')
end
it "should display the start date" do
experiment_start_time = Time.parse('2011-07-07')
Time.stub(:now => experiment_start_time)
experiment
get '/'
last_response.body.should include('<small>2011-07-07</small>')
end
it "should handle experiments without a start date" do
experiment_start_time = Time.parse('2011-07-07')
Time.stub(:now => experiment_start_time)
Split.redis.hdel(:experiment_start_times, experiment.name)
get '/'
last_response.body.should include('<small>Unknown</small>')
end
end
| 23.675 | 91 | 0.661563 |
b9d50ecdc4919716edba7eed63d44ebb84db4648 | 727 | cask 'toggldesktop-dev' do
version '7.4.28'
sha256 '5f6a41982790b81bc6266dd6a3758c3d828aeab9ab64599ac6c645ed09792c0f'
# github.com/toggl/toggldesktop was verified as official when first introduced to the cask
url "https://github.com/toggl/toggldesktop/releases/download/v#{version}/TogglDesktop-#{version.dots_to_underscores}.dmg"
appcast 'https://assets.toggl.com/installers/darwin_dev_appcast.xml',
checkpoint: 'd6f5130a83bbc877d9a78d2ef871430e662e814f3393bd84a697f59ff665710b'
name 'TogglDesktop'
homepage 'https://www.toggl.com/'
conflicts_with cask: [
'toggldesktop',
'toggldesktop-beta',
]
app 'TogglDesktop.app'
end
| 38.263158 | 123 | 0.707015 |
1148e5e1ade4f533846c3627b7c1f7db2ebc8a5e | 1,303 | # mundi_api
#
# This file was automatically generated by APIMATIC v2.0
# ( https://apimatic.io ).
module MundiApi
# Object used for returning lists of objects with pagination
class PagingResponse < BaseModel
# Total number of pages
# @return [Integer]
attr_accessor :total
# Previous page
# @return [String]
attr_accessor :previous
# Next page
# @return [String]
attr_accessor :mnext
# A mapping from model property names to API property names.
def self.names
@_hash = {} if @_hash.nil?
@_hash['total'] = 'total'
@_hash['previous'] = 'previous'
@_hash['mnext'] = 'next'
@_hash
end
def initialize(total = nil,
previous = nil,
mnext = nil)
@total = total
@previous = previous
@mnext = mnext
end
# Creates an instance of the object from a hash.
def self.from_hash(hash)
return nil unless hash
# Extract variables from the hash.
total = hash['total']
previous = hash['previous']
mnext = hash['next']
# Create object from extracted values.
PagingResponse.new(total,
previous,
mnext)
end
end
end
| 24.12963 | 65 | 0.561013 |
21e2e21e9b3c168602f1a535e92b465bb384e85d | 9,382 | # frozen_string_literal: true
class CommitStatus < Ci::ApplicationRecord
include Ci::HasStatus
include Importable
include AfterCommitQueue
include Presentable
include EnumWithNil
include BulkInsertableAssociations
include TaggableQueries
self.table_name = 'ci_builds'
belongs_to :user
belongs_to :project
belongs_to :pipeline, class_name: 'Ci::Pipeline', foreign_key: :commit_id
belongs_to :auto_canceled_by, class_name: 'Ci::Pipeline'
has_many :needs, class_name: 'Ci::BuildNeed', foreign_key: :build_id, inverse_of: :build
enum scheduling_type: { stage: 0, dag: 1 }, _prefix: true
delegate :commit, to: :pipeline
delegate :sha, :short_sha, :before_sha, to: :pipeline
validates :pipeline, presence: true, unless: :importing?
validates :name, presence: true, unless: :importing?
alias_attribute :author, :user
alias_attribute :pipeline_id, :commit_id
scope :failed_but_allowed, -> do
where(allow_failure: true, status: [:failed, :canceled])
end
scope :order_id_desc, -> { order('ci_builds.id DESC') }
scope :exclude_ignored, -> do
# We want to ignore failed but allowed to fail jobs.
#
# TODO, we also skip ignored optional manual actions.
where("allow_failure = ? OR status IN (?)",
false, all_state_names - [:failed, :canceled, :manual])
end
scope :latest, -> { where(retried: [false, nil]) }
scope :retried, -> { where(retried: true) }
scope :ordered, -> { order(:name) }
scope :ordered_by_stage, -> { order(stage_idx: :asc) }
scope :latest_ordered, -> { latest.ordered.includes(project: :namespace) }
scope :retried_ordered, -> { retried.order(name: :asc, id: :desc).includes(project: :namespace) }
scope :ordered_by_pipeline, -> { order(pipeline_id: :asc) }
scope :before_stage, -> (index) { where('stage_idx < ?', index) }
scope :for_stage, -> (index) { where(stage_idx: index) }
scope :after_stage, -> (index) { where('stage_idx > ?', index) }
scope :for_project, -> (project_id) { where(project_id: project_id) }
scope :for_ref, -> (ref) { where(ref: ref) }
scope :by_name, -> (name) { where(name: name) }
scope :in_pipelines, ->(pipelines) { where(pipeline: pipelines) }
scope :with_pipeline, -> { joins(:pipeline) }
scope :updated_at_before, ->(date) { where('ci_builds.updated_at < ?', date) }
scope :created_at_before, ->(date) { where('ci_builds.created_at < ?', date) }
scope :scheduled_at_before, ->(date) {
where('ci_builds.scheduled_at IS NOT NULL AND ci_builds.scheduled_at < ?', date)
}
# The scope applies `pluck` to split the queries. Use with care.
scope :for_project_paths, -> (paths) do
# Pluck is used to split this query. Splitting the query is required for database decomposition for `ci_*` tables.
# https://docs.gitlab.com/ee/development/database/transaction_guidelines.html#database-decomposition-and-sharding
project_ids = Project.where_full_path_in(Array(paths)).pluck(:id)
for_project(project_ids)
end
scope :with_preloads, -> do
preload(:project, :user)
end
scope :with_project_preload, -> do
preload(project: :namespace)
end
scope :match_id_and_lock_version, -> (items) do
# it expects that items are an array of attributes to match
# each hash needs to have `id` and `lock_version`
or_conditions = items.inject(none) do |relation, item|
match = CommitStatus.default_scoped.where(item.slice(:id, :lock_version))
relation.or(match)
end
merge(or_conditions)
end
# We use `Enums::Ci::CommitStatus.failure_reasons` here so that EE can more easily
# extend this `Hash` with new values.
enum_with_nil failure_reason: Enums::Ci::CommitStatus.failure_reasons
default_value_for :retried, false
##
# We still create some CommitStatuses outside of CreatePipelineService.
#
# These are pages deployments and external statuses.
#
before_create unless: :importing? do
# rubocop: disable CodeReuse/ServiceClass
Ci::EnsureStageService.new(project, user).execute(self) do |stage|
self.run_after_commit { StageUpdateWorker.perform_async(stage.id) }
end
# rubocop: enable CodeReuse/ServiceClass
end
before_save if: :status_changed?, unless: :importing? do
# we mark `processed` as always changed:
# another process might change its value and our object
# will not be refreshed to pick the change
self.processed_will_change!
if latest?
self.processed = false # force refresh of all dependent ones
elsif retried?
self.processed = true # retried are considered to be already processed
end
end
state_machine :status do
event :process do
transition [:skipped, :manual] => :created
end
event :enqueue do
# A CommitStatus will never have prerequisites, but this event
# is shared by Ci::Build, which cannot progress unless prerequisites
# are satisfied.
transition [:created, :skipped, :manual, :scheduled] => :pending, if: :all_met_to_become_pending?
end
event :run do
transition pending: :running
end
event :skip do
transition [:created, :waiting_for_resource, :preparing, :pending] => :skipped
end
event :drop do
transition [:created, :waiting_for_resource, :preparing, :pending, :running, :manual, :scheduled] => :failed
end
event :success do
transition [:created, :waiting_for_resource, :preparing, :pending, :running] => :success
end
event :cancel do
transition [:created, :waiting_for_resource, :preparing, :pending, :running, :manual, :scheduled] => :canceled
end
before_transition [:created, :waiting_for_resource, :preparing, :skipped, :manual, :scheduled] => :pending do |commit_status|
commit_status.queued_at = Time.current
end
before_transition [:created, :preparing, :pending] => :running do |commit_status|
commit_status.started_at = Time.current
end
before_transition any => [:success, :failed, :canceled] do |commit_status|
commit_status.finished_at = Time.current
end
before_transition any => :failed do |commit_status, transition|
reason = ::Gitlab::Ci::Build::Status::Reason
.fabricate(commit_status, transition.args.first)
commit_status.failure_reason = reason.failure_reason_enum
commit_status.allow_failure = true if reason.force_allow_failure?
end
before_transition [:skipped, :manual] => :created do |commit_status, transition|
transition.args.first.try do |user|
commit_status.user = user
end
end
after_transition do |commit_status, transition|
next if transition.loopback?
next if commit_status.processed?
next unless commit_status.project
last_arg = transition.args.last
transition_options = last_arg.is_a?(Hash) && last_arg.extractable_options? ? last_arg : {}
commit_status.run_after_commit do
PipelineProcessWorker.perform_async(pipeline_id) unless transition_options[:skip_pipeline_processing]
expire_etag_cache!
end
end
after_transition any => :failed do |commit_status|
commit_status.run_after_commit do
::Gitlab::Ci::Pipeline::Metrics.job_failure_reason_counter.increment(reason: commit_status.failure_reason)
end
end
end
def self.names
select(:name)
end
def self.update_as_processed!
# Marks items as processed
# we do not increase `lock_version`, as we are the one
# holding given lock_version (Optimisitc Locking)
update_all(processed: true)
end
def self.locking_enabled?
false
end
def self.bulk_insert_tags!(statuses)
Gitlab::Ci::Tags::BulkInsert.new(statuses).insert!
end
def locking_enabled?
will_save_change_to_status?
end
def group_name
name.to_s.sub(%r{([\b\s:]+((\[.*\])|(\d+[\s:\/\\]+\d+)))+\s*\z}, '').strip
end
def failed_but_allowed?
allow_failure? && (failed? || canceled?)
end
# Time spent running.
def duration
calculate_duration(started_at, finished_at)
end
# Time spent in the pending state.
def queued_duration
calculate_duration(queued_at, started_at)
end
def latest?
!retried?
end
def playable?
false
end
def retryable?
false
end
def cancelable?
false
end
def archived?
false
end
def stuck?
false
end
def has_trace?
false
end
def all_met_to_become_pending?
true
end
def auto_canceled?
canceled? && auto_canceled_by_id?
end
def detailed_status(current_user)
Gitlab::Ci::Status::Factory
.new(self, current_user)
.fabricate!
end
def sortable_name
name.to_s.split(/(\d+)/).map do |v|
v =~ /\d+/ ? v.to_i : v
end
end
def recoverable?
failed? && !unrecoverable_failure?
end
def update_older_statuses_retried!
pipeline
.statuses
.latest
.where(name: name)
.where.not(id: id)
.update_all(retried: true, processed: true)
end
def expire_etag_cache!
job_path = Gitlab::Routing.url_helpers.project_build_path(project, id, format: :json)
Gitlab::EtagCaching::Store.new.touch(job_path)
end
private
def unrecoverable_failure?
script_failure? || missing_dependency_failure? || archived_failure? || scheduler_failure? || data_integrity_failure?
end
end
CommitStatus.prepend_mod_with('CommitStatus')
| 28.95679 | 129 | 0.694948 |
38ee6893d267a6e851d8a7df0ad3e938f860cfb8 | 2,132 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'extension_lister/version'
Gem::Specification.new do |spec|
spec.name = 'extension_lister'
spec.version = ExtensionLister::VERSION
spec.authors = ['Burin Choomnuan']
spec.email = ['[email protected]']
spec.summary = %q{List unique file extensions from any given directory recursively}
spec.description = %q{List unique file extensions from any starting directory recursively}
spec.homepage = 'https://github.com/agilecreativity/extension_lister'
spec.required_ruby_version = ">= 1.9.3"
spec.license = 'MIT'
spec.files = Dir.glob("{bin,lib}/**/*") + %w[Gemfile
Rakefile
extension_lister.gemspec
README.md
CHANGELOG.md
LICENSE
.rubocop.yml
.gitignore]
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ['lib']
spec.add_runtime_dependency 'thor', '~> 0.19'
spec.add_runtime_dependency 'hashie', '~> 3.3'
spec.add_development_dependency 'awesome_print', '~> 1.2'
spec.add_development_dependency 'bundler', '~> 1.10'
spec.add_development_dependency 'gem-ctags', '~> 1.0'
spec.add_development_dependency 'guard', '~> 2.6'
spec.add_development_dependency 'guard-minitest', '~> 2.3'
spec.add_development_dependency 'minitest', '~> 5.4'
spec.add_development_dependency 'minitest-spec-context', '~> 0.0'
spec.add_development_dependency 'pry', '~> 0.10'
spec.add_development_dependency 'rake', '~> 10.3'
spec.add_development_dependency 'rubocop', '~> 0.26'
spec.add_development_dependency 'yard', '~> 0.8'
end
| 53.3 | 94 | 0.575516 |
91ec90673b3a16bdbd20d1102f56a0aa1c9ed68b | 64 | require_relative 'load_path'
require 'shopping_cart_component'
| 16 | 33 | 0.859375 |
b99fb2df39dab54b2559e94541a5f64270f27593 | 2,094 | RSpec.describe Inquisition::Fasterer::Runner do
include_examples 'enablable', :fasterer
describe '#call' do
subject(:runner_result) { described_class.new.call }
let(:test_file) { 'app/controllers/application_controller.rb' }
let(:instance_file_traverser) { instance_double(Inquisition::Fasterer::FileTraverser) }
let(:offense_collector) { instance_double('Fasterer::Analyzer::OffenseCollector', offenses: offense) }
let(:offense) do
[instance_double(Fasterer::Offense, explanation: 'error', line_number: 1, offense_name: :gsub_vs_tr)]
end
let(:instance_analyzer) do
instance_double(Fasterer::Analyzer, file_path: "#{Rails.root}/#{test_file}", errors: offense_collector)
end
before do
allow(Inquisition::Fasterer::FileTraverser).to receive(:new).and_return(instance_file_traverser)
allow(instance_file_traverser).to receive(:scannable_files).and_return(["#{Rails.root}/#{test_file}"])
allow(Fasterer::Analyzer).to receive(:new).and_return(instance_analyzer)
allow(instance_analyzer).to receive(:scan).and_return(nil)
end
context 'when call with errors' do
before do
allow(instance_analyzer).to receive(:errors).and_return(offense)
allow(instance_file_traverser).to receive(:offenses_grouped_by_type).and_return(error: offense_collector)
end
it 'returns a collection of issues' do
expect(runner_result).to contain_exactly(
Inquisition::Issue.new(
severity: Inquisition::Severity::LOW,
line: offense.first.line_number,
path: test_file,
message: offense.first.explanation,
runner: nil,
context: offense.first.offense_name
)
)
end
end
context 'when call without errors' do
let(:instance_analyzer) { instance_double(Fasterer::Analyzer, file_path: test_file, errors: []) }
before do
allow(instance_file_traverser).to receive(:offenses_grouped_by_type).and_return({})
end
it { is_expected.to be_empty }
end
end
end
| 37.392857 | 113 | 0.692932 |
1ca251d5e94e376496a16334d998e3a7f51f75dc | 115 | # frozen_string_literal: true
require 'chefspec'
require 'chefspec/berkshelf'
require 'coveralls'
Coveralls.wear!
| 16.428571 | 29 | 0.808696 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.