hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
01b72d84cd6a87fb2ff5f74e947412a8cc2df9f7 | 293 | class ApiController < ApplicationController
def totals
total = Donation.sum('amount', :conditions => ["return_code = ?", '0']).to_s
donation = Donation.where("return_code = '0'").count
data = {'total' => total, 'donations' => donation}
render :json => data.to_json
end
end
| 32.555556 | 80 | 0.65529 |
21dbd10a11fe0ddafc1bc3319b9700078d86217a | 479 | class Address
attr_accessor :street, :city, :state, :zip
def initialize (street, city, state, zip)
@street = street
@city = city
@state = state
@zip = zip
end
def to_h
{'street' => @street, 'city' => @city, 'state' => @state, 'zip' => @zip}
end
def is_match?(query)
query.strip!
query.downcase!
@street.downcase.include?(query) || @city.downcase.include?(query) || @state.downcase.include?(query) || @zip.include?(query)
end
end
| 21.772727 | 129 | 0.613779 |
38d86640b7dec4e8c6a2876640c2bcda3b3314a7 | 4,265 | # frozen_string_literal: true
require 'test/unit'
require_relative '../lib/plus_codes/open_location_code'
class PlusCodesTest < Test::Unit::TestCase
def setup
@test_data_folder_path = File.join(
File.dirname(__FILE__), '..', '..', 'test_data'
)
@olc = PlusCodes::OpenLocationCode.new
end
def test_validity
read_csv_lines('validityTests.csv').each do |line|
cols = line.split(',')
code = cols[0]
is_valid = cols[1] == 'true'
is_short = cols[2] == 'true'
is_full = cols[3] == 'true'
is_valid_olc = @olc.valid?(code)
is_short_olc = @olc.short?(code)
is_full_olc = @olc.full?(code)
result = (is_valid_olc == is_valid && is_short_olc == is_short &&
is_full_olc == is_full)
assert(result)
end
end
def test_decode
read_csv_lines('decoding.csv').each do |line|
cols = line.split(',')
code_area = @olc.decode(cols[0])
assert_equal(cols[1].to_i, code_area.code_length, 'Also should be equal')
# Check returned coordinates are within 1e-10 of expected.
precision = 1e-10
assert((code_area.south_latitude - cols[2].to_f).abs < precision, 'South')
assert((code_area.west_longitude - cols[3].to_f).abs < precision, 'West')
assert((code_area.north_latitude - cols[4].to_f).abs < precision, 'North')
assert((code_area.east_longitude - cols[5].to_f).abs < precision, 'East')
end
end
def test_encode
read_csv_lines('encoding.csv').each do |line|
next if line.length.zero?
cols = line.split(',')
code = @olc.encode(cols[0].to_f, cols[1].to_f, cols[2].to_i)
assert_equal(cols[3], code)
end
end
def test_shorten
read_csv_lines('shortCodeTests.csv').each do |line|
cols = line.split(',')
code = cols[0]
lat = cols[1].to_f
lng = cols[2].to_f
short_code = cols[3]
test_type = cols[4]
if %w[B S].include?(test_type)
short = @olc.shorten(code, lat, lng)
assert_equal(short_code, short)
end
if %w[B R].include?(test_type)
expanded = @olc.recover_nearest(short_code, lat, lng)
assert_equal(code, expanded)
end
end
@olc.shorten('9C3W9QCJ+2VX', 60.3701125, 10.202665625)
end
def test_longer_encoding_with_special_case
assert_equal('CFX3X2X2+X2RRRRR', @olc.encode(90.0, 1.0, 15))
end
def test_exceptions
assert_raise ArgumentError do
@olc.encode(20, 30, 1)
end
assert_raise ArgumentError do
@olc.encode(20, 30, 9)
end
assert_raise ArgumentError do
@olc.recover_nearest('9C3W9QCJ-2VX', 51.3708675, -1.217765625)
end
@olc.recover_nearest('9C3W9QCJ+2VX', 51.3708675, -1.217765625)
assert_raise ArgumentError do
@olc.decode('sfdg')
end
assert_raise ArgumentError do
@olc.shorten('9C3W9Q+', 1, 2)
end
assert_raise ArgumentError do
@olc.shorten('9C3W9Q00+', 1, 2)
end
end
def test_valid_with_special_case
assert([email protected]?('3W00CJJJ+'))
end
def test_benchmark
test_data = []
100000.times do
exp = 10.0**rand(10)
lat = ((rand * 180 - 90) * exp).round / exp
lng = ((rand * 360 - 180) * exp).round / exp
len = rand(15)
len = rand(1..5) * 2 if len <= 10
test_data.push([lat, lng, len, @olc.encode(lat, lng, len)])
end
start_micros = (Time.now.to_f * 1e6).to_i
test_data.each do |lat, lng, len, _|
@olc.encode(lat, lng, len)
end
duration_micros = (Time.now.to_f * 1e6).to_i - start_micros
printf("Encode benchmark: %d usec total, %d loops, %f usec per call\n",
duration_micros, test_data.length,
duration_micros.to_f / test_data.length)
start_micros = (Time.now.to_f * 1e6).to_i
test_data.each do |_, _, _, code|
@olc.decode(code)
end
duration_micros = (Time.now.to_f * 1e6).to_i - start_micros
printf("Decode benchmark: %d usec total, %d loops, %f usec per call\n",
duration_micros, test_data.length,
duration_micros.to_f / test_data.length)
end
def read_csv_lines(csv_file)
f = File.open(File.join(@test_data_folder_path, csv_file), 'r')
f.each_line.lazy.reject { |line| line =~ /^\s*#/ }.map(&:chop)
end
end
| 30.683453 | 80 | 0.629543 |
e8595042654fbd6b053c3ed966dc10343c3fb62d | 840 | class Product < ActiveRecord::Base
include ObjectModel::Model
validates :price, numericality: { greater_than_or_equal_to: 0.01 }
validates :title, uniqueness: true
validates :description, presence: true, uniqueness: true
belongs_to :user
has_many :line_items
before_destroy :ensure_not_referenced_by_any_line_item
@model_of_attachment = 'uploaded_file'.parameterize.underscore.to_sym
include ValidationsForPicture
CATEGORY = %w[Mobile Laptop Car].freeze
def self.latest
Product.order(:updated_at).last
end
def self.order_paginate(page, products_per_page)
order(:title).paginate(page: page, per_page: products_per_page)
end
def ensure_not_referenced_by_any_line_item
if line_items.empty?
true
else
errors.add(:base, 'ΡΡΡΠ΅ΡΡΠ²ΡΡΡ ΡΠΎΠ²Π°ΡΠ½ΡΠ΅ ΠΏΠΎΠ·ΠΈΡΠΈΠΈ')
false
end
end
end
| 25.454545 | 71 | 0.757143 |
08c3084e666ac0ee154217d0ee6bb4832feaab6b | 1,938 | class Cfengine < Formula
desc "Help manage and understand IT infrastructure"
homepage "https://cfengine.com/"
url "https://cfengine-package-repos.s3.amazonaws.com/tarballs/cfengine-3.18.0.tar.gz"
sha256 "d601a3af30f3fba7d51a37476c9e1a00b750682149bf96f4a0002e804bc87783"
license all_of: ["BSD-3-Clause", "GPL-2.0-or-later", "GPL-3.0-only", "LGPL-2.0-or-later"]
livecheck do
url "https://cfengine-package-repos.s3.amazonaws.com/release-data/community/releases.json"
regex(/["']version["']:\s*["'](\d+(?:\.\d+)+)["']/i)
end
bottle do
sha256 arm64_big_sur: "3e755d3d93d4f9af8e38a035ae5dc43ee42fd6b5ff11e4dd8d9a42addc193de0"
sha256 big_sur: "369f0b971ef4b7968d2e1a8934ce03e4d841b88c9c0a789ca52e8e5d3b619acd"
sha256 catalina: "397a614052632c146a1a8668a5e0a1e8ab1569296d6bd94b411b5bf15a61c736"
sha256 mojave: "bc4f67e00fa8dc773ab0fcc1b9bb1376513f507fa958bceae50ef943ef5ff670"
sha256 x86_64_linux: "c0182838df4ece465cc5e1084657b650bc1190c1272a0cd50a6af1f7562dae32"
end
depends_on "lmdb"
depends_on "[email protected]"
depends_on "pcre"
on_linux do
depends_on "linux-pam"
end
resource "masterfiles" do
url "https://cfengine-package-repos.s3.amazonaws.com/tarballs/cfengine-masterfiles-3.18.0.tar.gz"
sha256 "968faee4920936739f914b5fcae441cd03354e909bb26c5dcdeb6750f1fde156"
end
def install
args = %W[
--disable-dependency-tracking
--prefix=#{prefix}
--with-workdir=#{var}/cfengine
--with-lmdb=#{Formula["lmdb"].opt_prefix}
--with-pcre=#{Formula["pcre"].opt_prefix}
--without-mysql
--without-postgresql
]
on_linux do
args << "--with-systemd-service=no"
end
system "./configure", *args
system "make", "install"
(pkgshare/"CoreBase").install resource("masterfiles")
end
test do
assert_equal "CFEngine Core #{version}", shell_output("#{bin}/cf-agent -V").chomp
end
end
| 33.413793 | 101 | 0.718266 |
6a1940370565e17a6537d54f452699eeea1fda30 | 2,316 | class Automake < Formula
desc "Tool for generating GNU Standards-compliant Makefiles"
homepage "https://www.gnu.org/software/automake/"
url "https://ftp.gnu.org/gnu/automake/automake-1.16.3.tar.xz"
mirror "https://ftpmirror.gnu.org/automake/automake-1.16.3.tar.xz"
sha256 "ff2bf7656c4d1c6fdda3b8bebb21f09153a736bcba169aaf65eab25fa113bf3a"
license "GPL-2.0-or-later"
bottle do
sha256 cellar: :any_skip_relocation, arm64_big_sur: "b989d3db71b5bc3456f52edd92b818d1fcb5c03e62ab5c6ffeb5bf404dc22aa5"
sha256 cellar: :any_skip_relocation, big_sur: "b19be0f4672d3ed2c258eee5f676d27429e5da189c80dc04ba8d01bc44ead320"
sha256 cellar: :any_skip_relocation, catalina: "25fe47e5fb1af734423e1e73f0dc53637e89d825ef8d8199add239352b5b974e"
sha256 cellar: :any_skip_relocation, mojave: "6e25193e573d0e11376322018c9cdf96ddd68ad7e4fe7bb464212380d5e6b9cf"
sha256 cellar: :any_skip_relocation, x86_64_linux: "719719480095f03a573c8251ddbc181d5f6c613860855a54e0ea8cc2ade5e75a"
end
depends_on "autoconf"
# Download more up-to-date config scripts.
resource "config" do
url "https://git.savannah.gnu.org/cgit/config.git/snapshot/config-0b5188819ba6091770064adf26360b204113317e.tar.gz"
sha256 "3dfb73df7d073129350b6896d62cabb6a70f479d3951f00144b408ba087bdbe8"
version "2020-08-17"
end
def install
on_macos do
ENV["PERL"] = "/usr/bin/perl"
end
resource("config").stage do
cp Dir["config.*"], buildpath/"lib"
end
system "./configure", "--prefix=#{prefix}"
system "make", "install"
# Our aclocal must go first. See:
# https://github.com/Homebrew/homebrew/issues/10618
(share/"aclocal/dirlist").write <<~EOS
#{HOMEBREW_PREFIX}/share/aclocal
/usr/share/aclocal
EOS
end
test do
(testpath/"test.c").write <<~EOS
int main() { return 0; }
EOS
(testpath/"configure.ac").write <<~EOS
AC_INIT(test, 1.0)
AM_INIT_AUTOMAKE
AC_PROG_CC
AC_CONFIG_FILES(Makefile)
AC_OUTPUT
EOS
(testpath/"Makefile.am").write <<~EOS
bin_PROGRAMS = test
test_SOURCES = test.c
EOS
system bin/"aclocal"
system bin/"automake", "--add-missing", "--foreign"
system "autoconf"
system "./configure"
system "make"
system "./test"
end
end
| 33.565217 | 122 | 0.717185 |
01f1753df885ae8008a0891dc608af81d09e964f | 2,504 | class String
def escape_heredoc
this = dup
lines = this.split(/\r\n|\r|\n/).select { |line| line.size > 0 }
levels = lines.map do |line|
match = line.match(/^( +)[^ ]+/)
match ? match[1].size : 0
end
level = levels.min
this.gsub!(/^#{' ' * level}/, '') if level > 0
this
end
end
module EasySwig
module Util
def lib_dir
File.expand_path(File.dirname(__FILE__)+'/..')
end
def home_dir
File.expand_path(lib_dir+"/..")
end
def output_dir
@output_dir
end
def escape_all(typename)
return del_prefix_class(escape_template(escape_const_ref_ptr(typename)))
end
def escape_const_ref_ptr(typename)
typename.gsub(/^ *const /,'').gsub(/ +(const)* *[&*]* *(const)* *$/,'').strip
end
def del_prefix_class(n) # Previuously escaped for const
n.gsub(%r{^[^<]*[:]}, "")
end
def is_primitive?(typename)
['void', 'bool', 'char', 'unsigned char',
'short', 'unsigned short', 'int', 'unsigned int',
'long', 'unsigned long', 'long long', 'unsigned long long int',
'unsigned long long', 'float', 'double', 'long double',
'size_t', 'uint32', 'uint8', 'uint16'].include?(typename)
end
def is_std?(typename) # TODO depends on language. What happens with templates?
['vector', 'string', 'pair', 'list',
'map', 'deque', 'multimap', 'set'].include?(typename)
end
def escape_template(typename)
typename.gsub(/<.+$/,'').strip
end
def logs_dir
@output_dir+"/logs"
end
def gen_dir
File.expand_path(output_dir+"/gen")
end
def swig_dir
File.expand_path(output_dir+"/swig")
end
def read_file file_name
file = File.open(file_name, "r")
data = file.read
file.close
return data
end
def write_file file_name, data
FileUtils::mkdir_p File.dirname(file_name)
file = File.open(file_name, "w")
count = file.write(data)
file.close
return count
end
def rename_files (dir, find, ext='*', &block)
if ext
Dir.glob(%Q{#{dir}/*.#{ext}}) { |file|
# do work on files ending in .ext in the desired directory
name = File.basename(file, "."+ext)
newname = name.gsub(find) { |match|
puts match
a = block.call(match, $1)
a
}
File.rename(file, file.gsub(name, newname))
}
end
end
end
end | 24.792079 | 83 | 0.563898 |
d5e762f4f29335ad82cfb28b1a9ae252669e09e9 | 5,305 | #
# Be sure to run `pod spec lint AXPickerView.podspec' to ensure this is a
# valid spec and to remove all comments including this before submitting the spec.
#
# To learn more about Podspec attributes see http://docs.cocoapods.org/specification.html
# To see working Podspecs in the CocoaPods repo see https://github.com/CocoaPods/Specs/
#
Pod::Spec.new do |s|
# βββ Spec Metadata ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ #
#
# These will help people to find your library, and whilst it
# can feel like a chore to fill in it's definitely to your advantage. The
# summary should be tweet-length, and the description more in depth.
#
s.name = "AXWebViewController"
s.version = "0.2.6"
s.summary = "A light weight web view controller in iOS."
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = <<-DESC
A multi style picker view in iOS.
* Think: Why did you write this? What is the focus? What does it do?
* CocoaPods will be using this to generate tags, and improve search results.
* Try to keep it short, snappy and to the point.
* Finally, don't worry about the indent, CocoaPods strips it!
DESC
s.homepage = "https://github.com/devedbox/AXWebViewController"
# s.screenshots = "www.example.com/screenshots_1.gif", "www.example.com/screenshots_2.gif"
# βββ Spec License βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ #
#
# Licensing your code is important. See http://choosealicense.com for more info.
# CocoaPods will detect a license file if there is a named LICENSE*
# Popular ones are 'MIT', 'BSD' and 'Apache License, Version 2.0'.
#
s.license = "MIT"
# s.license = { :type => "MIT", :file => "FILE_LICENSE" }
# βββ Author Metadata βββββββββββββββββββββββββββββββββββββββββββββββββββββββββ #
#
# Specify the authors of the library, with email addresses. Email addresses
# of the authors are extracted from the SCM log. E.g. $ git log. CocoaPods also
# accepts just a name if you'd rather not provide an email address.
#
# Specify a social_media_url where others can refer to, for example a twitter
# profile URL.
#
s.author = { "θΎζ" => "[email protected]" }
# Or just: s.author = "aiXing"
# s.authors = { "aiXing" => "[email protected]" }
# s.social_media_url = "http://twitter.com/aiXing"
# βββ Platform Specifics βββββββββββββββββββββββββββββββββββββββββββββββββββββββ #
#
# If this Pod runs only on iOS or OS X, then specify the platform and
# the deployment target. You can optionally include the target after the platform.
#
# s.platform = :ios
s.platform = :ios, "7.0"
# When using multiple platforms
# s.ios.deployment_target = "5.0"
# s.osx.deployment_target = "10.7"
# s.watchos.deployment_target = "2.0"
# s.tvos.deployment_target = "9.0"
# βββ Source Location ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ #
#
# Specify the location from where the source should be retrieved.
# Supports git, hg, bzr, svn and HTTP.
#
s.source = { :git => "https://github.com/devedbox/AXWebViewController.git", :tag => "0.2.6" }
# βββ Source Code ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ #
#
# CocoaPods is smart about how it includes source code. For source files
# giving a folder will include any swift, h, m, mm, c & cpp files.
# For header files it will include any header in the folder.
# Not including the public_header_files will make all headers public.
#
s.source_files = "AXWebViewController/AXWebViewController/*.{h,m}"
#s.exclude_files = "Classes/Exclude"
# s.public_header_files = "Classes/**/*.h"
# βββ Resources ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ #
#
# A list of resources included with the Pod. These are copied into the
# target bundle with a build phase script. Anything else will be cleaned.
# You can preserve files from being cleaned, please don't preserve
# non-essential files like tests, examples and documentation.
#
s.resource = "AXWebViewController/AXWebViewController/AXWebViewController.bundle"
# s.resources = "Resources/*.png"
# s.preserve_paths = "FilesToSave", "MoreFilesToSave"
# βββ Project Linking ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ #
#
# Link your library with frameworks, or libraries. Libraries do not include
# the lib prefix of their name.
#
# s.framework = "SomeFramework"
s.frameworks = "UIKit", "Foundation", "WebKit"
# s.library = "iconv"
# s.libraries = "iconv", "xml2"
# βββ Project Settings βββββββββββββββββββββββββββββββββββββββββββββββββββββββββ #
#
# If your library depends on compiler flags you can set them in the xcconfig hash
# where they will only apply to your library. If you depend on other Podspecs
# you can include multiple dependencies to ensure it works.
s.requires_arc = true
# s.xcconfig = { "HEADER_SEARCH_PATHS" => "$(SDKROOT)/usr/include/libxml2" }
# s.dependency "JSONKit", "~> 1.4"
# s.dependency "AXPracticalHUD"
s.dependency "NJKWebViewProgress"
s.dependency "Aspects"
end | 36.840278 | 99 | 0.633365 |
ff49f61e9eecece5f1ebf01cc029d02e58329944 | 3,849 | require 'csv'
require 'rest-client'
namespace :populate do
desc 'Export metadata'
task export_metadata: :environment do
metas = Metadata.all
CSV.open("#{Rails.root}/lib/metadata_export.csv", 'wb') do |csv|
csv << [name, display_name, description, units, datatype, user_defined]
metas.each do |meta|
csv << [meta.name, meta.display_name, meta.description, meta.unit.machine_name, meta.datatype, meta.user_defined]
end
end
end
desc 'Import metadata from CSV'
task import_metadata: :environment do
# fail 'Populating is only intended for sample data in development' unless Rails.env == 'development'
puts 'deleting and importing metadata from metadata.csv'
Meta.delete_all
# metadata.csv = real data, metadata_test.csv = test data
CSV.foreach("#{Rails.root}/lib/metadata.csv", headers: true, header_converters: :symbol) do |r|
next unless r[:name]
# check on units match first, don't save if it doesn't match anything
if r[:unit].nil?
puts "No unit specified. If no units are applicable, set unit to 'none', metadata #{r[:name]} was not saved"
next
else
units = Unit.where(name: r[:unit])
if units.count == 0
puts "No match for unit #{r[:unit]}, metadata #{r[:name]} was not saved"
next
elsif !units.first.allowable
puts "Unit #{r[:unit]} is not allowable, metadata #{r[:name]} was not saved"
next
end
end
# All the meta get deleted every time, but in the future we should use find_or_create_by in order
# to not delete user defined data potentially.
m = Meta.find_or_create_by(name: r[:name])
m.name = r[:name]
m.display_name = r[:display_name]
m.short_name = r[:short_name]
m.description = r[:description]
m.unit = units.first
m.datatype = r[:datatype]
m.user_defined = r[:user_defined] == 'true' ? true : false
m.save!
end
end
# Import Project Haystack units
desc 'import units from haystack excel file'
task units: :environment do
require 'roo'
puts 'Deleting and reimporting units'
Unit.delete_all
mapping_file = Rails.root.join('lib/project_haystack_units.xlsx')
puts "opening #{mapping_file}"
xls = Roo::Spreadsheet.open(mapping_file.to_s)
units = xls.sheet('haystack_definitions').parse
row_cnt = 0
units.each do |row|
row_cnt += 1
next if row_cnt <= 1
puts row.inspect
unit = Unit.find_or_create_by(name: row[1])
unit.type = row[0]
unit.display_name = row[2]
unit.symbol = row[3]
unit.symbol_alt = row[4] unless row[4].nil?
unit.allowable = row[6].to_s.downcase == 'true' ? true : false
unit.save!
end
# now go through the other sheet and add the "NREL mapped variables"
maps = xls.sheet('nrel_units').parse
row_cnt = 0
maps.each do |row|
row_cnt += 1
next if row_cnt <= 1
unit = Unit.where(name: row[3])
if unit.count == 0
fail("no nrel_unit found in database for machine_name: '#{row[3]}' and map of #{row[0]}")
elsif unit.count > 1
fail("found multiple machine names for: '#{row[3]}'")
else
unit = unit.first
if unit.mapped.nil?
puts "adding #{row[0]} to unit map for #{row[3]}"
unit.mapped = [row[0]]
else
unit.mapped << row[0] unless unit.mapped.include?(row[0])
end
unit.save!
end
end
# map a special case of "" to undefined
u = Unit.where(name: 'undefined').first
u.mapped << ''
u.save!
end
desc 'reset cache counters on analysis/structures relations'
task reset_counters: :environment do
Analysis.all.each do |a|
Analysis.reset_counters(a.id, :structures)
end
end
end
| 31.292683 | 121 | 0.627436 |
796eaed268ed15ae470d88f7462de778b37a205c | 149 | class AddDroppedOutToStartups < ActiveRecord::Migration[6.0]
def change
add_column :startups, :dropped_out, :boolean, default: false
end
end
| 24.833333 | 64 | 0.765101 |
e92d42d2484982d5f8b8619926b8da41c1fe6b34 | 2,120 | require File.expand_path("../spec_helper", __FILE__)
describe MerchantSidekick::ShoppingCart::LineItem do
def setup
@product = products(:widget)
end
it "should initialize and create" do
transaction do
item = MerchantSidekick::ShoppingCart::LineItem.new(valid_cart_line_item_attributes(:product => @product))
item.should be_valid
item.save!
item.product.should == @product
item.item_number.should == @product.id # as there is no sku, or number field in product
item.name.should == @product.title
item.description.should be_nil # as there is not description field
item.quantity.should == 5
item.unit.should == :piece
item.pieces.should == 1
item.total_amount.to_s.should == '149.75'
item.should be_taxable
end
end
it "should copy name and sku" do
transaction do
product = ProductWithNameAndSku.new(:price => Money.new(999, "USD"))
item = MerchantSidekick::ShoppingCart::LineItem.new(valid_cart_line_item_attributes(:product => product))
item.item_number.should == "PR1234"
item.description.should == "Wonderful name!"
end
end
it "should copy title and number" do
transaction do
product = ProductWithTitleAndNumber.new(:price => Money.new(999, "USD"))
item = MerchantSidekick::ShoppingCart::LineItem.new(valid_cart_line_item_attributes(:product => product))
item.name.should == "A beautiful title"
item.item_number.should == "PR1234"
item.description.should == "Wonderful title!"
end
end
it "should duplicate from copy methods" do
transaction do
product = ProductWithCopy.new(:price => Money.new(99, "USD"))
item = MerchantSidekick::ShoppingCart::LineItem.new(valid_cart_line_item_attributes(:product => product))
item.name.should == "customized name"
item.item_number.should == "customized item number"
item.description.should == "customized description"
item.unit_price.to_s.should == "99.99"
item.total_amount.to_s.should == "499.95"
item.price.to_s.should == "499.95"
end
end
end
| 35.932203 | 112 | 0.689151 |
e96dc6f16ea4ce69afb0199e8121a31ef8277dfe | 5,680 | # Copyright (c) 2016, 2022, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
require 'date'
require_relative 'abstract_read_attribute'
# rubocop:disable Lint/UnneededCopDisableDirective, Metrics/LineLength
module OCI
# Properties to configure reading from an Oracle Database.
class DataIntegration::Models::OracleReadAttributes < DataIntegration::Models::AbstractReadAttribute
# The fetch size for reading.
# @return [Integer]
attr_accessor :fetch_size
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
# rubocop:disable Style/SymbolLiteral
'model_type': :'modelType',
'fetch_size': :'fetchSize'
# rubocop:enable Style/SymbolLiteral
}
end
# Attribute type mapping.
def self.swagger_types
{
# rubocop:disable Style/SymbolLiteral
'model_type': :'String',
'fetch_size': :'Integer'
# rubocop:enable Style/SymbolLiteral
}
end
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:disable Metrics/MethodLength, Layout/EmptyLines, Style/SymbolLiteral
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
# @option attributes [Integer] :fetch_size The value to assign to the {#fetch_size} property
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
attributes['modelType'] = 'ORACLE_READ_ATTRIBUTE'
super(attributes)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
self.fetch_size = attributes[:'fetchSize'] if attributes[:'fetchSize']
raise 'You cannot provide both :fetchSize and :fetch_size' if attributes.key?(:'fetchSize') && attributes.key?(:'fetch_size')
self.fetch_size = attributes[:'fetch_size'] if attributes[:'fetch_size']
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:enable Metrics/MethodLength, Layout/EmptyLines, Style/SymbolLiteral
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity, Layout/EmptyLines
# Checks equality by comparing each attribute.
# @param [Object] other the other object to be compared
def ==(other)
return true if equal?(other)
self.class == other.class &&
model_type == other.model_type &&
fetch_size == other.fetch_size
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity, Layout/EmptyLines
# @see the `==` method
# @param [Object] other the other object to be compared
def eql?(other)
self == other
end
# rubocop:disable Metrics/AbcSize, Layout/EmptyLines
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[model_type, fetch_size].hash
end
# rubocop:enable Metrics/AbcSize, Layout/EmptyLines
# rubocop:disable Metrics/AbcSize, Layout/EmptyLines
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /^Array<(.*)>/i
# check to ensure the input is an array given that the the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
public_method("#{key}=").call(
attributes[self.class.attribute_map[key]]
.map { |v| OCI::Internal::Util.convert_to_type(Regexp.last_match(1), v) }
)
end
elsif !attributes[self.class.attribute_map[key]].nil?
public_method("#{key}=").call(
OCI::Internal::Util.convert_to_type(type, attributes[self.class.attribute_map[key]])
)
end
# or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# rubocop:enable Metrics/AbcSize, Layout/EmptyLines
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = public_method(attr).call
next if value.nil? && !instance_variable_defined?("@#{attr}")
hash[param] = _to_hash(value)
end
hash
end
private
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
# rubocop:enable Lint/UnneededCopDisableDirective, Metrics/LineLength
| 34.846626 | 245 | 0.673768 |
79da155c7d42b3fb8a16f4afe9da5d458c5feee3 | 162 | # frozen_string_literal: true
module User
# LegalEntityUser - 2019
class LegalEntityUser < BasicUser
def commission_amount
0.44
end
end
end
| 14.727273 | 35 | 0.709877 |
e93e14bdd8218e1ea815c083be458703c79e715a | 268 | require 'pi_piper'
class Mcp3008
attr_reader :channel
def initialize(channel)
@channel = channel
end
def read
PiPiper::Spi.begin do |spi|
adc = spi.write [0x1, (0x8 + @channel) << 4, 0x0]
((adc[1] & 0x3) << 8) + adc[2]
end
end
end
| 15.764706 | 55 | 0.589552 |
ff6e4b8d451596f34b191c161cffc95d1d71d45d | 201 | class AddTeamLeadIdToBatchApplication < ActiveRecord::Migration[6.0]
def change
add_column :batch_applications, :team_lead_id, :integer
add_index :batch_applications, :team_lead_id
end
end
| 28.714286 | 68 | 0.79602 |
011d3ed8e8e549607871f3b9d07ababada9a5e82 | 1,366 | require 'spec_helper'
require 'puppet/type'
require 'puppet/type/package'
describe 'package_provider', :type => :fact do
before(:each) { Facter.clear }
after(:each) { Facter.clear }
['4.2.2', '3.7.1 (Puppet Enterprise 3.2.1)'].each do |puppetversion|
describe "on puppet ''#{puppetversion}''" do
before :each do
allow(Facter).to receive(:value).and_return(puppetversion)
end
context 'when darwin' do
it 'returns pkgdmg' do
provider = Puppet::Type.type(:package).provider(:pkgdmg)
allow(Puppet::Type.type(:package)).to receive(:defaultprovider).and_return(provider)
expect(Facter.fact(:package_provider).value).to eq('pkgdmg')
end
end
context 'when centos 7' do
it 'returns yum' do
provider = Puppet::Type.type(:package).provider(:yum)
allow(Puppet::Type.type(:package)).to receive(:defaultprovider).and_return(provider)
expect(Facter.fact(:package_provider).value).to eq('yum')
end
end
context 'when ubuntu' do
it 'returns apt' do
provider = Puppet::Type.type(:package).provider(:apt)
allow(Puppet::Type.type(:package)).to receive(:defaultprovider).and_return(provider)
expect(Facter.fact(:package_provider).value).to eq('apt')
end
end
end
end
end
| 31.045455 | 94 | 0.632504 |
e896b7d08558e73b82343913102df033bf90d5a0 | 920 | class Clojure < Formula
desc "Dynamic, general-purpose programming language"
homepage "https://clojure.org"
url "https://download.clojure.org/install/clojure-tools-1.10.2.790.tar.gz"
sha256 "62966b4494812982842038f8d67b2d2a406277b020476aa70d287a1d6fed901a"
license "EPL-1.0"
version_scheme 1
livecheck do
url "https://raw.githubusercontent.com/clojure/homebrew-tools/master/Formula/clojure.rb"
regex(/url ".*?clojure-tools-v?(\d+(?:\.\d+)+)\.t/i)
end
bottle :unneeded
depends_on "openjdk"
depends_on "rlwrap"
uses_from_macos "ruby" => :build
def install
system "./install.sh", prefix
bin.env_script_all_files libexec/"bin", Language::Java.overridable_java_home_env
end
test do
ENV["TERM"] = "xterm"
system("#{bin}/clj", "-e", "nil")
%w[clojure clj].each do |clj|
assert_equal "2", shell_output("#{bin}/#{clj} -e \"(+ 1 1)\"").strip
end
end
end
| 27.058824 | 92 | 0.683696 |
f71476cfbcaa306becc1a4f4eda40463a9bdb73f | 13,670 | # frozen_string_literal: true
# Use this hook to configure devise mailer, warden hooks and so forth.
# Many of these configuration options can be set straight in your model.
Devise.setup do |config|
# The secret key used by Devise. Devise uses this key to generate
# random tokens. Changing this key will render invalid all existing
# confirmation, reset password and unlock tokens in the database.
# Devise will use the `secret_key_base` as its `secret_key`
# by default. You can change it below and use your own secret key.
# config.secret_key = 'c3db7d4a4ece09b2bef3abe2dc21aae3a9c5e8ce0d264b27b07ef1959e973de1597363b8c5a90cccded22cacd52cfee3e73c55c6f2c9bbbc63ee01056be36d4c'
# ==> Mailer Configuration
# Configure the e-mail address which will be shown in Devise::Mailer,
# note that it will be overwritten if you use your own mailer class
# with default "from" parameter.
config.mailer_sender = '[email protected]'
# Configure the class responsible to send e-mails.
# config.mailer = 'Devise::Mailer'
# Configure the parent class responsible to send e-mails.
# config.parent_mailer = 'ActionMailer::Base'
# ==> ORM configuration
# Load and configure the ORM. Supports :active_record (default) and
# :mongoid (bson_ext recommended) by default. Other ORMs may be
# available as additional gems.
require 'devise/orm/active_record'
# ==> Configuration for any authentication mechanism
# Configure which keys are used when authenticating a user. The default is
# just :email. You can configure it to use [:username, :subdomain], so for
# authenticating a user, both parameters are required. Remember that those
# parameters are used only when authenticating and not when retrieving from
# session. If you need permissions, you should implement that in a before filter.
# You can also supply a hash where the value is a boolean determining whether
# or not authentication should be aborted when the value is not present.
# config.authentication_keys = [:email]
# Configure parameters from the request object used for authentication. Each entry
# given should be a request method and it will automatically be passed to the
# find_for_authentication method and considered in your model lookup. For instance,
# if you set :request_keys to [:subdomain], :subdomain will be used on authentication.
# The same considerations mentioned for authentication_keys also apply to request_keys.
# config.request_keys = []
# Configure which authentication keys should be case-insensitive.
# These keys will be downcased upon creating or modifying a user and when used
# to authenticate or find a user. Default is :email.
config.case_insensitive_keys = [:email]
# Configure which authentication keys should have whitespace stripped.
# These keys will have whitespace before and after removed upon creating or
# modifying a user and when used to authenticate or find a user. Default is :email.
config.strip_whitespace_keys = [:email]
# Tell if authentication through request.params is enabled. True by default.
# It can be set to an array that will enable params authentication only for the
# given strategies, for example, `config.params_authenticatable = [:database]` will
# enable it only for database (email + password) authentication.
# config.params_authenticatable = true
# Tell if authentication through HTTP Auth is enabled. False by default.
# It can be set to an array that will enable http authentication only for the
# given strategies, for example, `config.http_authenticatable = [:database]` will
# enable it only for database authentication. The supported strategies are:
# :database = Support basic authentication with authentication key + password
# config.http_authenticatable = false
# If 401 status code should be returned for AJAX requests. True by default.
# config.http_authenticatable_on_xhr = true
# The realm used in Http Basic Authentication. 'Application' by default.
# config.http_authentication_realm = 'Application'
# It will change confirmation, password recovery and other workflows
# to behave the same regardless if the e-mail provided was right or wrong.
# Does not affect registerable.
# config.paranoid = true
# By default Devise will store the user in session. You can skip storage for
# particular strategies by setting this option.
# Notice that if you are skipping storage for all authentication paths, you
# may want to disable generating routes to Devise's sessions controller by
# passing skip: :sessions to `devise_for` in your config/routes.rb
config.skip_session_storage = [:http_auth]
# By default, Devise cleans up the CSRF token on authentication to
# avoid CSRF token fixation attacks. This means that, when using AJAX
# requests for sign in and sign up, you need to get a new CSRF token
# from the server. You can disable this option at your own risk.
# config.clean_up_csrf_token_on_authentication = true
# When false, Devise will not attempt to reload routes on eager load.
# This can reduce the time taken to boot the app but if your application
# requires the Devise mappings to be loaded during boot time the application
# won't boot properly.
# config.reload_routes = true
# ==> Configuration for :database_authenticatable
# For bcrypt, this is the cost for hashing the password and defaults to 11. If
# using other algorithms, it sets how many times you want the password to be hashed.
#
# Limiting the stretches to just one in testing will increase the performance of
# your test suite dramatically. However, it is STRONGLY RECOMMENDED to not use
# a value less than 10 in other environments. Note that, for bcrypt (the default
# algorithm), the cost increases exponentially with the number of stretches (e.g.
# a value of 20 is already extremely slow: approx. 60 seconds for 1 calculation).
config.stretches = Rails.env.test? ? 1 : 11
# Set up a pepper to generate the hashed password.
# config.pepper = 'e4ed366537b8c5a607609c5daf6d59279aab9ce4ddd2ff7e644bcdd24b3e5281981d3e1e236c1d90f239f7110a622b9b377730ac93042ac1b184c577b4dd6122'
# Send a notification to the original email when the user's email is changed.
# config.send_email_changed_notification = false
# Send a notification email when the user's password is changed.
# config.send_password_change_notification = false
# ==> Configuration for :confirmable
# A period that the user is allowed to access the website even without
# confirming their account. For instance, if set to 2.days, the user will be
# able to access the website for two days without confirming their account,
# access will be blocked just in the third day. Default is 0.days, meaning
# the user cannot access the website without confirming their account.
# config.allow_unconfirmed_access_for = 2.days
# A period that the user is allowed to confirm their account before their
# token becomes invalid. For example, if set to 3.days, the user can confirm
# their account within 3 days after the mail was sent, but on the fourth day
# their account can't be confirmed with the token any more.
# Default is nil, meaning there is no restriction on how long a user can take
# before confirming their account.
# config.confirm_within = 3.days
# If true, requires any email changes to be confirmed (exactly the same way as
# initial account confirmation) to be applied. Requires additional unconfirmed_email
# db field (see migrations). Until confirmed, new email is stored in
# unconfirmed_email column, and copied to email column on successful confirmation.
config.reconfirmable = true
# Defines which key will be used when confirming an account
# config.confirmation_keys = [:email]
# ==> Configuration for :rememberable
# The time the user will be remembered without asking for credentials again.
# config.remember_for = 2.weeks
# Invalidates all the remember me tokens when the user signs out.
config.expire_all_remember_me_on_sign_out = true
# If true, extends the user's remember period when remembered via cookie.
# config.extend_remember_period = false
# Options to be passed to the created cookie. For instance, you can set
# secure: true in order to force SSL only cookies.
# config.rememberable_options = {}
# ==> Configuration for :validatable
# Range for password length.
config.password_length = 6..128
# Email regex used to validate email formats. It simply asserts that
# one (and only one) @ exists in the given string. This is mainly
# to give user feedback and not to assert the e-mail validity.
config.email_regexp = /\A[^@\s]+@[^@\s]+\z/
# ==> Configuration for :timeoutable
# The time you want to timeout the user session without activity. After this
# time the user will be asked for credentials again. Default is 30 minutes.
# config.timeout_in = 30.minutes
# ==> Configuration for :lockable
# Defines which strategy will be used to lock an account.
# :failed_attempts = Locks an account after a number of failed attempts to sign in.
# :none = No lock strategy. You should handle locking by yourself.
# config.lock_strategy = :failed_attempts
# Defines which key will be used when locking and unlocking an account
# config.unlock_keys = [:email]
# Defines which strategy will be used to unlock an account.
# :email = Sends an unlock link to the user email
# :time = Re-enables login after a certain amount of time (see :unlock_in below)
# :both = Enables both strategies
# :none = No unlock strategy. You should handle unlocking by yourself.
# config.unlock_strategy = :both
# Number of authentication tries before locking an account if lock_strategy
# is failed attempts.
# config.maximum_attempts = 20
# Time interval to unlock the account if :time is enabled as unlock_strategy.
# config.unlock_in = 1.hour
# Warn on the last attempt before the account is locked.
# config.last_attempt_warning = true
# ==> Configuration for :recoverable
#
# Defines which key will be used when recovering the password for an account
# config.reset_password_keys = [:email]
# Time interval you can reset your password with a reset password key.
# Don't put a too small interval or your users won't have the time to
# change their passwords.
config.reset_password_within = 6.hours
# When set to false, does not sign a user in automatically after their password is
# reset. Defaults to true, so a user is signed in automatically after a reset.
# config.sign_in_after_reset_password = true
# ==> Configuration for :encryptable
# Allow you to use another hashing or encryption algorithm besides bcrypt (default).
# You can use :sha1, :sha512 or algorithms from others authentication tools as
# :clearance_sha1, :authlogic_sha512 (then you should set stretches above to 20
# for default behavior) and :restful_authentication_sha1 (then you should set
# stretches to 10, and copy REST_AUTH_SITE_KEY to pepper).
#
# Require the `devise-encryptable` gem when using anything other than bcrypt
# config.encryptor = :sha512
# ==> Scopes configuration
# Turn scoped views on. Before rendering "sessions/new", it will first check for
# "users/sessions/new". It's turned off by default because it's slower if you
# are using only default views.
config.scoped_views = true
# Configure the default scope given to Warden. By default it's the first
# devise role declared in your routes (usually :user).
# config.default_scope = :user
# Set this configuration to false if you want /users/sign_out to sign out
# only the current scope. By default, Devise signs out all scopes.
# config.sign_out_all_scopes = true
# ==> Navigation configuration
# Lists the formats that should be treated as navigational. Formats like
# :html, should redirect to the sign in page when the user does not have
# access, but formats like :xml or :json, should return 401.
#
# If you have any extra navigational formats, like :iphone or :mobile, you
# should add them to the navigational formats lists.
#
# The "*/*" below is required to match Internet Explorer requests.
# config.navigational_formats = ['*/*', :html]
# The default HTTP method used to sign out a resource. Default is :delete.
config.sign_out_via = :delete
# ==> OmniAuth
# Add a new OmniAuth provider. Check the wiki for more information on setting
# up on your models and hooks.
# config.omniauth :github, 'APP_ID', 'APP_SECRET', scope: 'user,public_repo'
# ==> Warden configuration
# If you want to use other strategies, that are not supported by Devise, or
# change the failure app, you can configure them inside the config.warden block.
#
# config.warden do |manager|
# manager.intercept_401 = false
# manager.default_strategies(scope: :user).unshift :some_external_strategy
# end
# ==> Mountable engine configurations
# When using Devise inside an engine, let's call it `MyEngine`, and this engine
# is mountable, there are some extra configurations to be taken into account.
# The following options are available, assuming the engine is mounted as:
#
# mount MyEngine, at: '/my_engine'
#
# The router that invoked `devise_for`, in the example above, would be:
# config.router_name = :my_engine
#
# When using OmniAuth, Devise cannot automatically set OmniAuth path,
# so you need to do it manually. For the users scope, it would be:
# config.omniauth_path_prefix = '/my_engine/users/auth'
end
| 48.821429 | 154 | 0.751573 |
1d9d701ec421ddfabc10b59aea74d729119a2c0e | 7,297 | # frozen_string_literal: true
module Para
module Cloneable
# This object acts as a service to compile a nested cloneable options hash to be
# provided to the `deep_clone` method from the `deep_cloneable` gem. It iterates over
# every reflections that must be included for a given model when it's cloned, and
# creates a nested hash of :include and :except directives based on the tree that
# is created by nested `acts_as_cloneable` calls on the different models of the
# application
#
# Example :
#
# Given the following model structure :
#
# class Article < ApplicationRecord
# acts_as_cloneable :category, :comments, except: [:publication_date]
#
# belongs_to :category
# has_many :comments
# end
#
# class Category < ApplicationRecord
# acts_as_cloneable :category, except: [:articles_count]
#
# has_many :articles
# end
#
# class Comment < ApplicationRecord
# acts_as_cloneable :author
#
# belongs_to :article
# belongs_to :author
# end
#
# class Author < ApplicationRecord
# acts_as_cloneable except: [:email]
#
# has_many :articles
# end
#
# The behavior would be :
#
# Para::Cloneable::IncludeTreeBuilder.new(article).build
# # => {
# include: [:category, { comments: :author }],
# except: [:publication_date, {
# category: [:articles_count],
# comments: { author: [:email] }
# }]
# }
#
class IncludeTreeBuilder
attr_reader :resource, :cloneable_options
def initialize(resource)
@resource = resource
@cloneable_options = resource.cloneable_options.deep_dup
end
def build
options_tree = build_cloneable_options_tree(resource)
exceptions = extract_exceptions_from(options_tree)
inclusions = clean_options_tree(options_tree)
cloneable_options.merge(include: inclusions, except: exceptions)
end
private
# The cloneable options tree iterates over the resources' relations that are
# declared as included in the cloneable_options of the provided resource, and
# recursively checks included relations for its associated resources.
#
# It returns a nested hash with the included relations and their :except array
# if it exist, which include the attributes that shouldn't be duplicated when
# the resource is cloned.
#
def build_cloneable_options_tree(resource, path = [])
cloneable_options = resource.cloneable_options
# Iterate over the resource's cloneable options' :include array and recursively
# add nested included resources to its own included resources.
options = cloneable_options[:include].each_with_object({}) do |reflection_name, hash|
# This avoids cyclic dependencies issues by stopping nested association
# inclusions before the cycle starts.
#
# For example, if a post includes its author, and the author includes its posts,
# this would make the system fail with a stack level too deep error. Here this
# guard allows the inclusion to stop at :
#
# { posts: { author: { posts: { author: {}}}}}
#
# Which ensures that, using the dictionary strategy of deep_cloneable, all
# posts' authors' posts will have their author mapped to an already cloned
# author when it comes to cloning the "author" 4th level of the include tree.
#
# This is not the most optimized solution, but works well enough as if the
# author's posts match previously cloned posts, they won't be cloned as they'll
# exist in the cloned resources dictionary.
next if path.length >= 4 &&
path[-4] == path[-2] &&
path[-2] == reflection_name &&
path[-3] == path[-1]
hash[reflection_name] = {}
unless (reflection = resource.class.reflections[reflection_name.to_s])
next
end
reflection_options = hash[reflection_name]
association_target = resource.send(reflection_name)
if reflection.collection?
association_target.each do |nested_resource|
add_reflection_options(
reflection_options,
nested_resource,
[*path, reflection_name]
)
end
else
add_reflection_options(
reflection_options,
association_target,
[*path, reflection_name]
)
end
end
# Add the :except array from the resource to the current options hash and merge
# it if one already exist from another resource of the same class.
options[:except] ||= []
options[:except] |= Array.wrap(cloneable_options[:except])
options
end
def add_reflection_options(reflection_options, nested_resource, path)
options = nested_resource.class.try(:cloneable_options)
return reflection_options unless options
target_options = build_cloneable_options_tree(nested_resource, path)
reflection_options.deep_merge!(target_options)
end
# Iterates over the generated options tree to extract all the nested :except options
# into their own separate hash, removing :except keys from the original options
# tree hash.
#
def extract_exceptions_from(tree)
exceptions = tree.delete(:except) || []
nested_exceptions = {}
tree.each do |key, value|
next unless value.is_a?(Hash) && !value.empty?
sub_exceptions = extract_exceptions_from(value)
nested_exceptions[key] = sub_exceptions unless sub_exceptions.empty?
end
exceptions += [nested_exceptions] unless nested_exceptions.empty?
exceptions
end
# Iterates over the remaining options tree hash and converts empty hash values' keys
# to be stored in an array, and returns an array of symbols and hashes that is
# compatible with what is expected as argument for the :include option of the
# `deep_clone` method.
#
# Example :
#
# clean_options_tree({ category: {}, comments: { author: {} } })
# # => [:category, { comments: [:author] }]
#
def clean_options_tree(tree)
shallow_relations = []
deep_relations = {}
tree.each do |key, value|
# If the value is an empty hash, consider it as a shallow relation and add
# it to the shallow relations array
if !value || value.empty?
shallow_relations << key
# If the value is a hash with nested keys, process its nested values and add
# the result to the deep relations hash
else
deep_relations[key] = clean_options_tree(value)
end
end
deep_relations.empty? ? shallow_relations : shallow_relations + [deep_relations]
end
end
end
end
| 37.040609 | 93 | 0.619844 |
03db3adb98e2215c887c05cc95662350d69930a8 | 270 | class CurseClient < Cask
url 'http://addons.cursecdn.com/files/595/169/Curse_Client-4.0.0.425.dmg'
homepage 'http://www.curse.com/client'
version '4.0.0.425'
sha256 'f76eda5d21b85ee4db080c794a3e852ecadaa71fe2d32308d698d95286f36a1f'
link 'Curse Client.app'
end
| 33.75 | 75 | 0.774074 |
61ef77fed89bf73c4f410b81f971fa822d236685 | 1,067 | # coding: utf-8
lib = File.expand_path("../lib", __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require "simple_discussion/version"
Gem::Specification.new do |spec|
spec.name = "simple_discussion"
spec.version = SimpleDiscussion::VERSION
spec.authors = ["Chris Oliver"]
spec.email = ["[email protected]"]
spec.summary = %q{A simple, extensible Rails forum}
spec.description = %q{A simple, extensible Rails forum}
spec.homepage = "https://github.com/excid3/simple_discussion"
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0").reject do |f|
f.match(%r{^(test|spec|features)/})
end
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
spec.add_dependency 'font-awesome-sass', '4.7.0'
spec.add_dependency 'friendly_id', '>= 5.2.0'
spec.add_dependency 'gravatar_image_tag'
spec.add_dependency 'rails', '>= 4.2'
spec.add_dependency 'will_paginate', '>= 3.1.0'
end
| 35.566667 | 74 | 0.651359 |
f7ac4546e6f3745a27f79db79e0f1de0320b863b | 632 | # frozen_string_literal: true
require 'sassc'
module Rake
module Builder
class Sass < Compiler
def dest(asset)
yield Paths.static "css/#{asset.src.basename.sub_ext('.css')}"
end
def dependencies(asset)
Rake::FileList["#{asset.src.dirname}/**/*"]
end
def build(asset)
sass = File.read(asset.src)
SassC::Engine.new(
sass,
load_paths: [asset.src.dirname.to_s],
syntax: asset.src.to_s.end_with?('sass') ? :sass : :scss,
style: Builder.production ? :compressed : :nested
).render
end
end
end
end
| 22.571429 | 71 | 0.571203 |
f7ea38c19a4e5358e2de2d2ad2e515875c1148c0 | 219 | #! /usr/bin/env ruby
# copycat: a very boring game for testing.
$stdin.sync = $stdout.sync = true
while (move = $stdin.getc.chr) do
if move == "q"
$stdout.write "bye!"
exit 0
end
$stdout.write(move)
end | 16.846154 | 42 | 0.630137 |
e2640210ca47856eea605532ef90d8cceea0d4e3 | 210 | class CreateUsers < ActiveRecord::Migration[6.0]
def change
create_table :users do |t|
t.string :first_name
t.string :last_name
t.string :email
t.timestamps
end
end
end
| 13.125 | 48 | 0.633333 |
6a22f7da141a776b95838bdb4618bee762908698 | 914 | require 'minitest_helper'
class TestObjectRemovalSanityCheck < Minitest::Test
def test_that_it_works
scene = Mittsu::Scene.new
camera = Mittsu::PerspectiveCamera.new(75.0, 1.0, 0.1, 1000.0)
renderer = Mittsu::OpenGLRenderer.new width: 100, height: 100, title: 'TestObjectRemovalSanityCheck'
geometry = Mittsu::BoxGeometry.new(1.0, 1.0, 1.0)
material = Mittsu::MeshBasicMaterial.new(color: 0x00ff00)
cube = Mittsu::Mesh.new(geometry, material)
scene.add(cube)
assert_includes scene.children, cube
camera.position.z = 5.0
renderer.window.run do
cube.rotation.x += 0.1
cube.rotation.y += 0.1
renderer.render(scene, camera)
scene.remove(cube)
refute_includes scene.children, cube
renderer.render(scene, camera)
scene.add(cube)
assert_includes scene.children, cube
renderer.render(scene, camera)
end
end
end
| 26.114286 | 104 | 0.690372 |
390fcb5a908938f956d98cca525ae379a21aa9e0 | 3,466 |
require 'uri'
require 'net/http'
require 'net/https'
require 'openssl'
require 'openssl/x509'
module ChefVPCToolkit
module CloudServersVPC
class Connection
MULTI_PART_BOUNDARY="jtZ!pZ1973um"
@@http=nil
@@auth_user=nil
@@auth_password=nil
def self.init_connection
configs=Util.load_configs
base_url = configs["cloud_servers_vpc_url"]
@@auth_user = configs["cloud_servers_vpc_username"]
@@auth_password = configs["cloud_servers_vpc_password"]
ssl_key = configs["ssl_key"]
ssl_cert = configs["ssl_cert"]
ssl_ca_cert = configs["ssl_ca_cert"]
url=URI.parse(base_url)
@@http = Net::HTTP.new(url.host,url.port)
if base_url =~ /^https/
@@http.use_ssl = true
if ssl_ca_cert then
@@http.verify_mode = OpenSSL::SSL::VERIFY_PEER
else
@@http.verify_mode = OpenSSL::SSL::VERIFY_NONE
end
if ssl_key then
pkey_data=IO.read(ssl_key)
if pkey_data =~ /^-----BEGIN RSA PRIVATE KEY-----/
@@http.key=OpenSSL::PKey::RSA.new(pkey_data)
else
@@http.key=OpenSSL::PKey::DSA.new(pkey_data)
end
end
@@http.cert=OpenSSL::X509::Certificate.new(IO.read(ssl_cert)) if ssl_cert
@@http.ca_file=ssl_ca_cert if ssl_ca_cert
end
end
def self.file_upload(url_path, file_data={}, post_data={})
init_connection if @@http.nil?
req = Net::HTTP::Post.new(url_path)
post_arr=[]
post_data.each_pair do |key, value|
post_arr << "--#{MULTI_PART_BOUNDARY}\r\n"
post_arr << "Content-Disposition: form-data; name=\"#{key}\"\r\n"
post_arr << "\r\n"
post_arr << value
post_arr << "\r\n"
end
file_data.each_pair do |name, file|
post_arr << "--#{MULTI_PART_BOUNDARY}\r\n"
post_arr << "Content-Disposition: form-data; name=\"#{name}\"; filename=\"#{File.basename(file)}\"\r\n"
post_arr << "Content-Type: text/plain\r\n"
post_arr << "\r\n"
post_arr << File.read(file)
post_arr << "\r\n--#{MULTI_PART_BOUNDARY}--\r\n"
end
post_arr << "--#{MULTI_PART_BOUNDARY}--\r\n\r\n"
req.body=post_arr.join
req.basic_auth @@auth_user, @@auth_password if @@auth_user and @@auth_password
req["Content-Type"] = "multipart/form-data, boundary=#{MULTI_PART_BOUNDARY}"
response = @@http.request(req)
case response
when Net::HTTPSuccess
return response.body
else
puts response.body
response.error!
end
end
def self.post(url_path, post_data)
init_connection if @@http.nil?
req = Net::HTTP::Post.new(url_path)
if post_data.kind_of?(String) then
req.body=post_data
elsif post_data.kind_of?(Hash) then
req.form_data=post_data
else
raise "Invalid post data type."
end
req.basic_auth @@auth_user, @@auth_password if @@auth_user and @@auth_password
response = @@http.request(req)
case response
when Net::HTTPSuccess
return response.body
else
puts response.body
response.error!
end
end
def self.get(url_path)
init_connection if @@http.nil?
req = Net::HTTP::Get.new(url_path)
req.basic_auth @@auth_user, @@auth_password if @@auth_user and @@auth_password
response = @@http.request(req)
case response
when Net::HTTPSuccess
return response.body
else
response.error!
end
end
def self.delete(url_path)
init_connection if @@http.nil?
req = Net::HTTP::Delete.new(url_path)
req.basic_auth @@auth_user, @@auth_password if @@auth_user and @@auth_password
response = @@http.request(req)
case response
when Net::HTTPSuccess
return response.body
else
response.error!
end
end
end
end
end
| 23.578231 | 106 | 0.695326 |
87322a3cb8c2a8cc46bb816354e67c1a58e98305 | 936 | # Shared config mixin used by all Thor namespaced tasks.
#
module Mofa
module Config
@@config = {}
def self.config
@@config
end
def self.load
unless Dir.exist?("#{ENV['HOME']}/.mofa")
#warn "Mofa config folder not present! You may use 'mofa setup' to get rid of this message."
warn "Mofa config folder not present! Please create a folder .mofa in your HOME directory: mkdir ~/.mofa"
end
unless File.exist?("#{ENV['HOME']}/.mofa/config.yml")
#warn "Mofa config file not present at #{ENV['HOME']}/.mofa/config.yml! You may use 'mofa setup' to get rid of this message."
warn "Mofa config file not present at #{ENV['HOME']}/.mofa/config.yml! Please create a config file first! (see README.md)"
end
if File.exist?("#{ENV['HOME']}/.mofa/config.yml")
@@config = YAML.load(File.open("#{ENV['HOME']}/.mofa/config.yml"))
end
end
end
end
| 34.666667 | 133 | 0.625 |
7a611b5f57fbc680dc26c0ae7e4395c0f3f45f49 | 252 | require 'gearbox/responses/base'
module Gearbox
module Response
module Model
class DockerImageMetaData < Response::Base
attr_reader :kind
def initialize(obj)
super(obj)
end
end
end
end
end
| 13.263158 | 48 | 0.615079 |
61154c122bba7bfde2cca1fc14eef7be843fbf2f | 3,318 | require 'spec_helper'
describe "#zinterstore(destination, keys, [:weights => [w,w,], [:aggregate => :sum|:min|:max])" do
before do
@odds = 'mock-redis-test:zinterstore:odds'
@primes = 'mock-redis-test:zinterstore:primes'
@dest = 'mock-redis-test:zinterstore:dest'
@redises.zadd(@odds, 1, 'one')
@redises.zadd(@odds, 3, 'three')
@redises.zadd(@odds, 5, 'five')
@redises.zadd(@odds, 7, 'seven')
@redises.zadd(@odds, 9, 'nine')
@redises.zadd(@primes, 2, 'two')
@redises.zadd(@primes, 3, 'three')
@redises.zadd(@primes, 5, 'five')
@redises.zadd(@primes, 7, 'seven')
end
it "returns the number of elements in the new set" do
@redises.zinterstore(@dest, [@odds, @primes]).should == 3
end
it "sums the members' scores by default" do
@redises.zinterstore(@dest, [@odds, @primes])
@redises.zrange(@dest, 0, -1, :with_scores => true).should ==
[["three", 6.0], ["five", 10.0], ["seven", 14.0]]
end
it "removes existing elements in destination" do
@redises.zadd(@dest, 10, 'ten')
@redises.zinterstore(@dest, [@primes])
@redises.zrange(@dest, 0, -1, :with_scores => true).should ==
[["two", 2.0], ["three", 3.0], ["five", 5.0], ["seven", 7.0]]
end
it "raises an error if keys is empty" do
lambda do
@redises.zinterstore(@dest, [])
end.should raise_error(RuntimeError)
end
context "the :weights argument" do
it "multiplies the scores by the weights while aggregating" do
@redises.zinterstore(@dest, [@odds, @primes], :weights => [2, 3])
@redises.zrange(@dest, 0, -1, :with_scores => true).should ==
[["three", 15.0], ["five", 25.0], ["seven", 35.0]]
end
it "raises an error if the number of weights != the number of keys" do
lambda do
@redises.zinterstore(@dest, [@odds, @primes], :weights => [1,2,3])
end.should raise_error(RuntimeError)
end
end
context "the :aggregate argument" do
before do
@smalls = 'mock-redis-test:zinterstore:smalls'
@bigs = 'mock-redis-test:zinterstore:bigs'
@redises.zadd(@smalls, 1, 'bert')
@redises.zadd(@smalls, 2, 'ernie')
@redises.zadd(@bigs, 100, 'bert')
@redises.zadd(@bigs, 200, 'ernie')
end
it "aggregates scores with min when :aggregate => :min is specified" do
@redises.zinterstore(@dest, [@bigs, @smalls], :aggregate => :min)
@redises.zrange(@dest, 0, -1, :with_scores => true).should ==
[["bert", 1.0], ["ernie", 2.0]]
end
it "aggregates scores with max when :aggregate => :max is specified" do
@redises.zinterstore(@dest, [@bigs, @smalls], :aggregate => :max)
@redises.zrange(@dest, 0, -1, :with_scores => true).should ==
[["bert", 100.0], ["ernie", 200.0]]
end
it "allows 'min', 'MIN', etc. as aliases for :min" do
@redises.zinterstore(@dest, [@bigs, @smalls], :aggregate => 'min')
@redises.zscore(@dest, 'bert').should == 1.0
@redises.zinterstore(@dest, [@bigs, @smalls], :aggregate => 'MIN')
@redises.zscore(@dest, 'bert').should == 1.0
end
it "raises an error for unknown aggregation function" do
lambda do
@redises.zinterstore(@dest, [@bigs, @smalls], :aggregate => :mix)
end.should raise_error(RuntimeError)
end
end
end
| 34.206186 | 98 | 0.603677 |
e83839051b760c4357099aa1aa9ed382f03180b1 | 1,558 | # frozen_string_literal: true
module RuboCop
module Cop
module Layout
# This cop checks the indentation of the first line of the
# right-hand-side of a multi-line assignment.
#
# @example
# # bad
# value =
# if foo
# 'bar'
# end
#
# # good
# value =
# if foo
# 'bar'
# end
#
# The indentation of the remaining lines can be corrected with
# other cops such as `IndentationConsistency` and `EndAlignment`.
class AssignmentIndentation < Base
include CheckAssignment
include Alignment
extend AutoCorrector
MSG = 'Indent the first line of the right-hand-side of a ' \
'multi-line assignment.'
private
def check_assignment(node, rhs)
return unless rhs
return unless node.loc.operator
return if node.loc.operator.line == rhs.first_line
base = display_column(leftmost_multiple_assignment(node).source_range)
check_alignment([rhs], base + configured_indentation_width)
end
def autocorrect(corrector, node)
AlignmentCorrector.correct(corrector, processed_source, node, column_delta)
end
def leftmost_multiple_assignment(node)
return node unless same_line?(node, node.parent) &&
node.parent.assignment?
leftmost_multiple_assignment(node.parent)
node.parent
end
end
end
end
end
| 26.40678 | 85 | 0.590501 |
bb8f2d74ed70c8756c2bdc0f63424389380d52b1 | 489 | class CoursesController < ApplicationController
before_action :set_course
def rsvp
invitation = CourseInvitation.find_or_create_by(course: @course, member: current_user)
redirect_to course_invitation_path(invitation)
end
def show
@host_address = AddressPresenter.new(@course.sponsor&.address)
@course = CoursePresenter.new(@course)
end
private
def set_course
slug = params[:id] || params[:course_id]
@course = Course.find_by(slug: slug)
end
end
| 23.285714 | 90 | 0.742331 |
26e3cb6d19af4ba52258b5569893f421495d68eb | 1,457 | require 'test_helper'
module AuditorGeneral
class ModelTest < ActiveSupport::TestCase
setup do
@admin = User.first
end
test 'Deletions are logged when flag is not set' do
assert_difference('AuditorGeneralLog.count') do
@admin.destroy
end
end
test 'Creations are not logged when flag is set' do
assert_no_difference('AuditorGeneralLog.count') do
User.create(name: 'test', role: 'user')
end
end
test 'Deletions log last attributes of record' do
record = @admin
@admin.destroy
log = AuditorGeneralLog.last
assert_equal record.model_name, log.model_type
assert_equal record.id, log.model_id
assert_equal 'destroy', log.action
assert_equal record.attributes, log.alterations
end
test "Changes to auditable attribute are logged" do
assert_difference('AuditorGeneralLog.count') do
@admin.update_attribute(:role, 'user')
end
end
test "Changes to auditable attribute record alterations" do
@admin.update_attribute(:role, 'user')
changes = {"role"=>["admin", "user"]}
log = AuditorGeneralLog.last
assert_equal changes, log.alterations
assert_nil changes['updated_at']
end
test "Changes to not auditable attributes are not logged" do
assert_no_difference('AuditorGeneralLog.count') do
@admin.update_attribute(:name, 'Test')
end
end
end
end
| 26.490909 | 64 | 0.673988 |
790fc3f1e87d69cf779e741c308e9e2bf95ba516 | 182 | class CreateProtocols < ActiveRecord::Migration
def change
create_table :protocols do |t|
t.string :title
t.text :description
t.timestamps
end
end
end
| 16.545455 | 47 | 0.67033 |
7a119e4d2cf7165e62d57c51d9f22674f4fe188f | 4,045 | # This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 2018_12_06_062912) do
# These are extensions that must be enabled in order to support this database
enable_extension "plpgsql"
create_table "active_storage_attachments", force: :cascade do |t|
t.string "name", null: false
t.string "record_type", null: false
t.bigint "record_id", null: false
t.bigint "blob_id", null: false
t.datetime "created_at", null: false
t.index ["blob_id"], name: "index_active_storage_attachments_on_blob_id"
t.index ["record_type", "record_id", "name", "blob_id"], name: "index_active_storage_attachments_uniqueness", unique: true
end
create_table "active_storage_blobs", force: :cascade do |t|
t.string "key", null: false
t.string "filename", null: false
t.string "content_type"
t.text "metadata"
t.bigint "byte_size", null: false
t.string "checksum", null: false
t.datetime "created_at", null: false
t.index ["key"], name: "index_active_storage_blobs_on_key", unique: true
end
create_table "banners", force: :cascade do |t|
t.string "image_url"
t.string "address"
t.datetime "start_at"
t.datetime "end_at"
t.string "ios_version"
t.string "android_version"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.bigint "ios_version_number"
t.bigint "android_version_number"
end
create_table "dapp_translations", force: :cascade do |t|
t.integer "dapp_id", null: false
t.string "locale", null: false
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.string "name"
t.text "intro"
t.text "desc"
t.index ["dapp_id"], name: "index_dapp_translations_on_dapp_id"
t.index ["locale"], name: "index_dapp_translations_on_locale"
end
create_table "dapp_type_translations", force: :cascade do |t|
t.integer "dapp_type_id", null: false
t.string "locale", null: false
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.string "name"
t.index ["dapp_type_id"], name: "index_dapp_type_translations_on_dapp_type_id"
t.index ["locale"], name: "index_dapp_type_translations_on_locale"
end
create_table "dapp_types", force: :cascade do |t|
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "dapps", force: :cascade do |t|
t.string "logo_url"
t.string "url_address"
t.integer "d_type"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.string "marketing_url"
t.datetime "start_at"
t.datetime "end_at"
t.string "ios_version"
t.string "android_version"
t.integer "score"
t.datetime "publish_at"
t.string "developer"
t.bigint "dapp_type_id"
t.bigint "ios_version_number"
t.bigint "android_version_number"
t.boolean "filter_ip", default: false
t.index ["dapp_type_id"], name: "index_dapps_on_dapp_type_id"
end
create_table "images", force: :cascade do |t|
t.string "name"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "users", force: :cascade do |t|
t.string "username"
t.string "password_digest"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
end
| 35.482456 | 126 | 0.711248 |
624f7f547588adc51c9c0839c8a3e84e7adb764a | 3,208 | class Tbb < Formula
desc "Rich and complete approach to parallelism in C++"
homepage "https://github.com/oneapi-src/oneTBB"
url "https://github.com/oneapi-src/oneTBB/archive/refs/tags/v2021.4.0.tar.gz"
sha256 "021796c7845e155e616f5ecda16daa606ebb4c6f90b996e5c08aebab7a8d3de3"
license "Apache-2.0"
bottle do
sha256 cellar: :any, arm64_big_sur: "562dbe3727195b7d22f5750f720ab8719e84dd557f120af380fe65ebf1de0f71"
sha256 cellar: :any, big_sur: "292efca6f88d8dc0dd396593ec9cd7fffee60457968f3bf4911e595e67b0e4e5"
sha256 cellar: :any, catalina: "ceab79696162f301977698d1274dfc220de372ba473845c0b89ce29572e2c54b"
sha256 cellar: :any, mojave: "d5c1155379f21962bc47d172a9b673c4a72b24656b5f7fed5990d3e34b909c98"
sha256 cellar: :any_skip_relocation, x86_64_linux: "5dfbbb5d279074f2bc885ef3fa4c8e28faf69115434f1e58212bc3b027e36fcf"
end
depends_on "cmake" => :build
depends_on "swig" => :build
depends_on "[email protected]"
# Fix installation of Python components
# See https://github.com/oneapi-src/oneTBB/issues/343
patch :DATA
def install
args = *std_cmake_args + %w[
-DTBB_TEST=OFF
-DTBB4PY_BUILD=ON
]
mkdir "build" do
system "cmake", "..", *args
system "make"
system "make", "install"
end
cd "python" do
ENV.append_path "CMAKE_PREFIX_PATH", prefix.to_s
ENV["LDFLAGS"] = "-rpath #{opt_lib}" if OS.mac?
ENV["TBBROOT"] = prefix
system Formula["[email protected]"].opt_bin/"python3", *Language::Python.setup_install_args(prefix)
end
inreplace_files = Dir[prefix/"rml/CMakeFiles/irml.dir/{flags.make,build.make,link.txt}"]
inreplace inreplace_files, Superenv.shims_path/ENV.cxx, "/usr/bin/c++" if OS.linux?
end
test do
(testpath/"sum1-100.cpp").write <<~EOS
#include <iostream>
#include <tbb/blocked_range.h>
#include <tbb/parallel_reduce.h>
int main()
{
auto total = tbb::parallel_reduce(
tbb::blocked_range<int>(0, 100),
0.0,
[&](tbb::blocked_range<int> r, int running_total)
{
for (int i=r.begin(); i < r.end(); ++i) {
running_total += i + 1;
}
return running_total;
}, std::plus<int>()
);
std::cout << total << std::endl;
return 0;
}
EOS
system ENV.cxx, "sum1-100.cpp", "--std=c++14", "-L#{lib}", "-ltbb", "-o", "sum1-100"
assert_equal "5050", shell_output("./sum1-100").chomp
system Formula["[email protected]"].opt_bin/"python3", "-c", "import tbb"
end
end
__END__
diff --git a/python/CMakeLists.txt b/python/CMakeLists.txt
index 1d2b05f..81ba8de 100644
--- a/python/CMakeLists.txt
+++ b/python/CMakeLists.txt
@@ -49,7 +49,7 @@ add_test(NAME python_test
-DPYTHON_MODULE_BUILD_PATH=${PYTHON_BUILD_WORK_DIR}/build
-P ${PROJECT_SOURCE_DIR}/cmake/python/test_launcher.cmake)
-install(DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/${PYTHON_BUILD_WORK_DIR}/build/
+install(DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/${PYTHON_BUILD_WORK_DIR}/
DESTINATION .
COMPONENT tbb4py)
| 34.12766 | 122 | 0.650561 |
6aac3fedb76ba940bdb344cb1500ecaaa1cb3d3f | 333 | begin
require 'json'
rescue LoadError
$stderr.puts "Missing json gem. Please run 'bundle install'"
exit 1
end
begin
require 'amqp'
rescue LoadError
$stderr.puts "Missing amqp gem. Please uncomment the amqp section in the Gemfile and run 'bundle install' if you wish to use the AMQP participant/listener pair in ruote"
end
| 25.615385 | 171 | 0.762763 |
8722db9c161a5cd8408ff725eb7c9d7f9064a38b | 280 | require 'dogapi'
api_key = '<DATADOG_API_KEY>'
app_key = '<DATADOG_APPLICATION_KEY>'
slo_id = '<YOUR_SLO_ID>'
dog = Dogapi::Client.new(api_key, app_key)
to_ts = 1_571_320_613
from_ts = to_ts - 60 * 60 * 24 * 30
dog.get_service_level_objective_history(slo_id, from_ts, to_ts)
| 21.538462 | 63 | 0.742857 |
1dfd42ac26e7631755bdf2cb55bfe7d1f15dbde1 | 1,316 | class Etl < Formula
desc "Extensible Template Library"
homepage "https://synfig.org"
url "https://downloads.sourceforge.net/project/synfig/releases/1.0.2/source/ETL-0.04.19.tar.gz"
sha256 "ba944c1a07fd321488f9d034467931b8ba9e48454abef502a633ff4835380c1c"
bottle do
cellar :any_skip_relocation
sha256 "2ed5639481b7ab8003063620f6714b4372d1cb8229e2b8369153906024b9c95f" => :high_sierra
sha256 "7275d40af2ee9e99feec8a04a9296b1167b24ca8f7125a875d08c13b4913e81b" => :sierra
sha256 "10244415e0dbf71f94c7585595632a09773a49dbc5bf5ac8de7e062f29c7f2b4" => :el_capitan
sha256 "29198ad9d848f2ff79b224a5467da1fb22a474de5ffc3e287196fd3822a45178" => :yosemite
sha256 "024271929c1e3de9d4c4e256a932fa9525395f7421fc174e7010251ab9a4b37e" => :mavericks
end
def install
system "./configure", "--disable-debug",
"--disable-dependency-tracking",
"--prefix=#{prefix}"
system "make", "install"
end
test do
(testpath/"test.cpp").write <<~EOS
#include <ETL/misc>
int main(int argc, char *argv[])
{
int rv = etl::ceil_to_int(5.5);
return 6 - rv;
}
EOS
flags = %W[
-I#{include}
-lpthread
]
system ENV.cxx, "test.cpp", "-o", "test", *flags
system "./test"
end
end
| 32.9 | 97 | 0.683131 |
797a64837866f9ba51dce730040f734247a2a36d | 1,374 | require 'rspec/expectations'
module DelayedJobHelper
RSpec::Matchers.define :delay_method do |expected|
def supports_block_expectations?
true
end
match do |proc|
Delayed::Worker.new.work_off
proc.call
job = Delayed::Job.last
@actual = job.payload_object.method_name.to_sym
(@actual == expected) || (@actual == "#{expected}_without_delay".to_sym)
end
failure_message do |proc|
"expected #{proc} to have delayed the method '#{expected}' but got '#{@actual}'"
end
failure_message_when_negated do |proc|
"expected #{proc} not to have delayed the method '#{expected}'"
end
end
RSpec::Matchers.define :have_delayed_job do |count|
def supports_block_expectations?
true
end
match do |proc|
Delayed::Worker.new.work_off
@expected_count = count || 1
before_count = Delayed::Job.count
proc.call
after_count = Delayed::Job.count
@actual_count = (after_count - before_count)
@actual_count == @expected_count
end
failure_message do |proc|
"expected #{proc} to have enqueued #{@expected_count} delayed job(s) but enqueued #{@actual_count}"
end
failure_message_when_negated do |proc|
"expected #{proc} not to have enqueued #{@expected_count} delayed job(s) but enqueued #{@actual_count}"
end
end
end
| 26.423077 | 109 | 0.665939 |
9181ec6d8ad4244dbc5e83eaf14102d8a6bd32e8 | 868 | module Xray
def self.config
@@config ||= Config.new
end
class Config
attr_accessor :editor
CONFIG_FILE = "#{Dir.home}/.xrayconfig"
DEFAULT_EDITOR = '/usr/local/bin/subl'
def editor
load_config[:editor]
end
def editor=(new_editor)
if new_editor && new_editor != editor
write_config(editor: new_editor)
true
else
false
end
end
def to_yaml
{editor: editor}.to_yaml
end
private
def write_config(new_config)
config = load_config.merge(new_config)
File.open(CONFIG_FILE, 'w') { |f| f.write(config.to_yaml) }
end
def load_config
default_config.merge(local_config)
end
def local_config
YAML.load_file(CONFIG_FILE)
rescue
{}
end
def default_config
{ editor: DEFAULT_EDITOR }
end
end
end
| 16.377358 | 65 | 0.612903 |
79bb56c6c71467c36f61f4897c470d1389dec171 | 3,452 | #!/usr/bin/env rspec
require 'spec_helper'
require 'facter/util/uptime'
describe Facter::Util::Uptime do
describe ".get_uptime_seconds_unix", :unless => Facter.value(:operatingsystem) == 'windows' do
describe "when /proc/uptime is available" do
before do
uptime_file = my_fixture("ubuntu_proc_uptime")
Facter::Util::Uptime.stubs(:uptime_file).returns("\"#{uptime_file}\"")
end
it "should return the uptime in seconds as an integer" do
Facter::Util::Uptime.get_uptime_seconds_unix.should == 5097686
end
end
describe "when /proc/uptime is not available" do
before :each do
@nonexistent_file = '/non/existent/file'
File.exists?(@nonexistent_file).should == false
Facter::Util::Uptime.stubs(:uptime_file).returns(@nonexistent_file)
end
it "should use 'sysctl -n kern.boottime' on OpenBSD" do
sysctl_output_file = my_fixture('sysctl_kern_boottime_openbsd') # Dec 09 21:11:46 +0000 2011
Facter::Util::Uptime.stubs(:uptime_sysctl_cmd).returns("cat \"#{sysctl_output_file}\"")
Time.stubs(:now).returns Time.parse("Dec 09 22:11:46 +0000 2011") # one hour later
Facter::Util::Uptime.get_uptime_seconds_unix.should == 60 * 60
end
it "should use 'sysctl -n kern.boottime' on Darwin, etc." do
sysctl_output_file = my_fixture('sysctl_kern_boottime_darwin') # Oct 30 21:52:27 +0000 2011
Facter::Util::Uptime.stubs(:uptime_sysctl_cmd).returns("cat \"#{sysctl_output_file}\"")
Time.stubs(:now).returns Time.parse("Oct 30 22:52:27 +0000 2011") # one hour later
Facter::Util::Uptime.get_uptime_seconds_unix.should == 60 * 60
end
describe "nor is 'sysctl kern.boottime'" do
before :each do
Facter::Util::Uptime.stubs(:uptime_sysctl_cmd).returns("cat \"#{@nonexistent_file}\"")
end
it "should use 'kstat -p unix:::boot_time'" do
kstat_output_file = my_fixture('kstat_boot_time') # unix:0:system_misc:boot_time 1236919980
Facter::Util::Uptime.stubs(:uptime_kstat_cmd).returns("cat \"#{kstat_output_file}\"")
Time.stubs(:now).returns Time.at(1236923580) #one hour later
Facter::Util::Uptime.get_uptime_seconds_unix.should == 60 * 60
end
describe "nor is 'kstat -p unix:::boot_time'" do
before :each do
Facter::Util::Uptime.stubs(:uptime_kstat_cmd).returns("cat \"#{@nonexistent_file}\"")
end
it "should use 'who -b'" do
who_b_output_file = my_fixture('who_b_boottime') # Aug 1 14:13
Facter::Util::Uptime.stubs(:uptime_who_cmd).returns("cat \"#{who_b_output_file}\"")
Time.stubs(:now).returns Time.parse("Aug 01 15:13") # one hour later
Facter::Util::Uptime.get_uptime_seconds_unix.should == 60 * 60
end
describe "nor is 'who -b'" do
before :each do
Facter::Util::Uptime.stubs(:uptime_who_cmd).returns("cat \"#{@nonexistent_file}\"")
end
it "should return nil" do
Facter::Util::Uptime.get_uptime_seconds_unix.should == nil
end
end
end
end
end
end
describe ".get_uptime_seconds_win", :if => Facter.value(:operatingsystem) == 'windows' do
it "should return a postive value" do
Facter::Util::Uptime.get_uptime_seconds_win.should > 0
end
end
end
| 40.139535 | 104 | 0.639919 |
18609503d1b3869618f46ae57885736655227e47 | 1,554 | begin
require 'rails/railtie'
rescue LoadError
else
require 'global_id'
require 'active_support'
require 'active_support/core_ext/string/inflections'
require 'active_support/core_ext/integer/time'
class GlobalID
# = GlobalID Railtie
# Set up the signed GlobalID verifier and include Active Record support.
class Railtie < Rails::Railtie # :nodoc:
config.global_id = ActiveSupport::OrderedOptions.new
config.eager_load_namespaces << GlobalID
initializer 'global_id' do |app|
default_expires_in = 1.month
default_app_name = app.railtie_name.remove('_application').dasherize
GlobalID.app = app.config.global_id.app ||= default_app_name
SignedGlobalID.expires_in = app.config.global_id.fetch(:expires_in, default_expires_in)
config.after_initialize do
GlobalID.app = app.config.global_id.app ||= default_app_name
SignedGlobalID.expires_in = app.config.global_id.fetch(:expires_in, default_expires_in)
app.config.global_id.verifier ||= begin
GlobalID::Verifier.new(app.key_generator.generate_key('signed_global_ids'))
rescue ArgumentError
nil
end
SignedGlobalID.verifier = app.config.global_id.verifier
end
ActiveSupport.on_load(:active_record) do
require 'global_id/identification'
send :include, GlobalID::Identification
end
ActiveSupport.on_load(:active_record_fixture_set) do
require 'global_id/fixture_set'
send :extend, GlobalID::FixtureSet
end
end
end
end
end
| 31.08 | 95 | 0.725869 |
62b6f292c9d4f5e24637ffea86073b6ebccd956e | 1,419 | class DeviseCreateUsers < ActiveRecord::Migration[5.1]
def change
create_table :users do |t|
## Database authenticatable
t.string :email, null: false, default: ""
t.string :encrypted_password, null: false, default: ""
## Recoverable
t.string :reset_password_token
t.datetime :reset_password_sent_at
## Rememberable
t.datetime :remember_created_at
## Trackable
t.integer :sign_in_count, default: 0, null: false
t.datetime :current_sign_in_at
t.datetime :last_sign_in_at
t.inet :current_sign_in_ip
t.inet :last_sign_in_ip
## Confirmable
# t.string :confirmation_token
# t.datetime :confirmed_at
# t.datetime :confirmation_sent_at
# t.string :unconfirmed_email # Only if using reconfirmable
## Lockable
# t.integer :failed_attempts, default: 0, null: false # Only if lock strategy is :failed_attempts
# t.string :unlock_token # Only if unlock strategy is :email or :both
# t.datetime :locked_at
# add_column :users, :admin, :boolean, default: false
t.timestamps null: false
end
add_index :users, :email, unique: true
add_index :users, :reset_password_token, unique: true
# add_index :users, :confirmation_token, unique: true
# add_index :users, :unlock_token, unique: true
end
end
| 33 | 104 | 0.649753 |
6171c16c00f171b441be351084ec4db813e4ce15 | 6,440 | module MARC
class IIIReader
class ParserError < RuntimeError; end
class NonExistentRecordError < RuntimeError; end
include HTTParty
attr_accessor :base_uri, :record_uri, :marc_uri
attr_reader :coder
def initialize(opac_uri, scope='')
@entities = HTMLEntities.new
@scope = scope
self.class.base_uri(opac_uri)
end
# Method for creating the appropriate MARC::Record
# based object by inspecting the record's leader
def create_record_for_type(leader)
leader = Leader.new(leader)
if RECORD_TYPES.has_key?(leader.get_type)
record = RECORD_TYPES[leader.get_type].new
else
record = MARC::Record.new
end
record.leader = leader
record
end
def get_page(uri)
resp = self.class.get(uri)
if resp.code < 400
resp.body
else
nil
end
end
def record_exists?(bibnumber)
page = get_page(URI_FOR_RECORD % [bibnumber, @scope])
return false unless page
page.include?('No Such Record') ? false : true
end
# b1000000
# b1069789
def crawl_records(bib_start, bib_end)
unless bib_start.starts_with('b') and bib_end.starts_with('b')
raise ArgumentError, 'Invalid bib record number'
end
bib_start = bib_start[1..-1].to_i
bib_end = bib_end[1..-1].to_i
records = Array.new
(bib_start..bib_end).each do |num|
record = get_record("b#{num}")
if record and block_given?
yield record
else
records << record if record
end
end
return records unless block_given?
end
# Method for retrieving a record from the opac, decoding it
# and returning a MARC::Record object
def get_record(bibnumber)
if record_exists?(bibnumber)
marc_url = URI_FOR_MARC % ([@scope] + Array.new(3, bibnumber))
record_url = URI_FOR_RECORD % [bibnumber, @scope]
# Retrieve MARC data and convert to UTF-8 prior to decoding ...
record_page = get_page(marc_url)
record_data = MARC_REGEX.match(record_page)
if record_data.nil?
raise ParserError, "Could not decode data: MARC data not found."
else
record_data = record_data[1].strip()
record_data = Iconv.conv('UTF-8', 'LATIN1', record_data)
end
record = decode_pseudo_marc(record_data)
unless record.nil?
record.bibnum = bibnumber
record.raw = record_data
record.record_url = "#{self.class.base_uri}#{record_url}"
record.marc_url = "#{self.class.base_uri}#{marc_url}"
end
return record
else
raise NonExistentRecordError, "Record not found."
end
rescue NonExistentRecordError => error
warn error.message
return nil
rescue ParserError => error
warn error.message
return nil
end
# Method for turning pseudo MARC data from III's OPAC
# into a MARC::Record object.
# ---
# Only data conversion done is replacing HTML entities with their
# corresponding characters
def decode_pseudo_marc(pseudo_marc)
raise ParserError, "Cannot decode empty string." if pseudo_marc == ""
pseudo_marc = pseudo_marc.split("\n")
raw_fields = []
if pseudo_marc[0][0..5] == "LEADER"
record = create_record_for_type(pseudo_marc[0][7..-1])
else
raise ParserError, "Cannot decode record without a leader."
end
pseudo_marc[1..pseudo_marc.length].each do |field|
data = @entities.decode(field[7..-1])
if field[0..2] != ' '
data = MARC::ControlField.control_tag?(field[0..2]) ? data : "a#{data}"
raw_fields << {
:tag => field[0..2],
:indicator1 => field[4,1],
:indicator2 => field[5,1],
:value => data.strip,
:raw => field.strip
}
else
raw_fields.last[:value] += " #{data}"
raw_fields.last[:raw] += field.strip
end
end
raw_fields.each do |field|
tag = field[:tag]
field_data = field[:value]
if MARC::ControlField.control_tag?(tag)
record.append(MARC::ControlField.new(tag, field_data))
else
datafield = MARC::DataField.new(tag)
datafield.indicator1 = field[:indicator1]
datafield.indicator2 = field[:indicator2]
field_data.split('|').each{|sub|
subfield = MARC::Subfield.new(sub[0,1], sub[1..-1])
datafield.append(subfield)
}
record.append(datafield)
end
end
return record
end
def keyword_search(query)
results = []
page = get_page(URI_FOR_KEYWORD_SEARCH % [@scope, URI.escape(query)])
return results unless page
doc = Nokogiri::HTML(page)
doc.xpath('//td[@class="briefCitRow"]').each do |row|
match = {}
match[:bibnum] = row.search('td[@class="briefcitEntryMark"] input[@type=checkbox]').attribute("value")
match[:title] = row.search('span[@class="briefcitTitle"]').text
match[:year] = row.search('td[@class="briefcitYear"]').text
results << match
end
results
end
def title_search(query)
results = []
page = get_page(URI_FOR_TITLE_SEARCH % [@scope, URI.escape(query)])
return results unless page
doc = Nokogiri::HTML(page)
doc.xpath('//tr[@class="browseEntry"]').each do |row|
match = {:title => '', :items => '', :author => '', :call_number => ''}
match[:title] = row.search('td[@class="browseEntryData"] a').text.strip_end_punctuation
match[:items] = row.search('td[@class="browseEntryEntries"]').text
auth_call = row.search('./td[@class="browseEntryData"]/a/following-sibling::text()')[0].text
auth_call = auth_call.strip_punctuation
if auth_call.include?(';')
auth, call = auth_call.strip.split(';')
match[:author] = auth.strip_punctuation
match[:call_number] = call.strip_punctuation
end
results << match
end
results
end
end
end
| 30.813397 | 110 | 0.579193 |
21ea8af9e7d8e06b1a6f54309e54d7c4f0867e69 | 327 | module Matest
class SkipMe
attr_reader :source_location
def initialize(the_caller=nil)
if the_caller
@the_caller = the_caller
file, lineno = the_caller.first.split(":")
@source_location = [file, lineno.to_i]
end
end
def to_proc
proc { SkipMe.new }
end
end
end
| 19.235294 | 50 | 0.623853 |
21adfb0beb812ad04b5ecfac2fcedb885bc45738 | 4,507 | # -*- encoding: utf-8 -*-
#
# Author:: SAWANOBORI Yukihiko (<[email protected]>)
#
# Copyright (C) 2015, HiganWorks LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require_relative "../../spec_helper"
require "logger"
require "stringio"
require "kitchen/verifier/shell"
require "kitchen/transport/ssh"
describe Kitchen::Verifier::Shell do
let(:logged_output) { StringIO.new }
let(:logger) { Logger.new(logged_output) }
let(:platform) { stub(:os_type => nil, :shell_type => nil, :name => "coolbeans") }
let(:suite) { stub(:name => "fries") }
let(:state) { Hash.new }
let(:config) do
{ :test_base_path => "/basist", :kitchen_root => "/rooty" }
end
let(:instance) do
stub(
:name => [platform.name, suite.name].join("-"),
:to_str => "instance",
:logger => logger,
:suite => suite,
:platform => platform
)
end
let(:verifier) do
Kitchen::Verifier::Shell.new(config).finalize_config!(instance)
end
it "verifier api_version is 1" do
verifier.diagnose_plugin[:api_version].must_equal 1
end
it "plugin_version is set to Kitchen::VERSION" do
verifier.diagnose_plugin[:version].must_equal Kitchen::VERSION
end
describe "configuration" do
it "sets :sleep to 0 by default" do
verifier[:sleep].must_equal 0
end
it "sets :command to 'true' by default" do
verifier[:command].must_equal "true"
end
it "sets :live_stream to stdout by default" do
verifier[:live_stream].must_equal $stdout
end
end
describe "#call" do
describe "#shell_out" do
it "calls sleep if :sleep value is greater than 0" do
config[:sleep] = 3
verifier.expects(:sleep).with(1).returns(true).at_least(3)
verifier.call(state)
end
it "states are set to environment" do
state[:hostname] = "testhost"
state[:server_id] = "i-xxxxxx"
state[:port] = 22
verifier.call(state)
config[:shellout_opts][:environment]["KITCHEN_HOSTNAME"].must_equal "testhost"
config[:shellout_opts][:environment]["KITCHEN_SERVER_ID"].must_equal "i-xxxxxx"
config[:shellout_opts][:environment]["KITCHEN_PORT"].must_equal "22"
config[:shellout_opts][:environment]["KITCHEN_INSTANCE"].must_equal "coolbeans-fries"
config[:shellout_opts][:environment]["KITCHEN_PLATFORM"].must_equal "coolbeans"
config[:shellout_opts][:environment]["KITCHEN_SUITE"].must_equal "fries"
end
it "raises ActionFailed if set false to :command" do
config[:command] = "false"
proc { verifier.call(state) }.must_raise Kitchen::ActionFailed
end
it "logs a converge event to INFO" do
verifier.call(state)
logged_output.string.must_match(/^.+ INFO .+ \[Shell\] Verify on .+$/)
end
end
describe "remote_exec" do
let(:transport) do
t = mock("transport")
t.responds_like_instance_of(Kitchen::Transport::Ssh)
t
end
let(:connection) do
c = mock("transport_connection")
c.responds_like_instance_of(Kitchen::Transport::Ssh::Connection)
c
end
let(:instance) do
stub(
:name => "coolbeans",
:to_str => "instance",
:logger => logger,
:platform => platform,
:suite => suite,
:transport => transport
)
end
before do
transport.stubs(:connection).yields(connection)
connection.stubs(:execute)
end
it "execute command onto instance." do
config[:remote_exec] = true
transport.expects(:connection).with(state).yields(connection)
verifier.call(state)
end
end
end
describe "#run_command" do
it "execute localy and returns nil" do
verifier.run_command
end
it "returns string when remote_exec" do
config[:remote_exec] = true
verifier.run_command.must_equal "true"
end
end
end
| 27.993789 | 93 | 0.642556 |
ff754684b6889a47075b417a014908314eb73f91 | 458 | require "rubygems"
require "rspec"
$LOAD_PATH.unshift File.dirname(__FILE__) + "/../lib"
require "rrd"
$VERBOSE = nil
RRD_FILE = File.expand_path(File.dirname(__FILE__) + "/vm.rrd")
IMG_FILE = File.expand_path(File.dirname(__FILE__) + "/vm.png")
XML_FILE = File.expand_path(File.dirname(__FILE__) + "/vm.xml")
$VERBOSE = false
RSpec::Runner.configure do |config|
config.before :each do
[RRD_FILE, IMG_FILE].each{|file| `rm #{file} 2>&1`}
end
end | 26.941176 | 63 | 0.703057 |
fffe58d154ed257ef5ae3fa31caa3fcb34b9f65e | 770 | Pod::Spec.new do |s|
s.name = "ObjcGitlabApi"
s.version = "1.0.2"
s.summary = "An Objective-C library for interacting with the GitLab API."
s.homepage = "https://github.com/Indatus/objc-gitlab-api"
s.license = 'MIT'
s.authors = { "Jeff Trespalacios" => "[email protected]", "Jon Staff" => "[email protected]" }
s.platform = :ios, '7.0'
s.source = { :git => "https://github.com/Indatus/objc-gitlab-api.git", :tag => "1.0.2" }
s.source_files = 'objc\ gitlab\ api/*.{h,m}', 'objc\ gitlab\ api/**/*.{h,m}', 'objc\ gitlab\ api/utilities/**/*.{h,m}'
s.public_header_files = 'objc\ gitlab\ api/GLGitlab.h', 'objc\ gitlab\ api/models/*.h', 'objc\ gitlab\ api/utilities/*.h'
s.requires_arc = true
end
| 55 | 123 | 0.601299 |
333af49aeea0c8609d98d1b7c341eed23bbd8363 | 129 | class ChangePostsVotesDefault < ActiveRecord::Migration[5.1]
def change
change_column_default(:posts, :votes, 0)
end
end
| 21.5 | 60 | 0.75969 |
2670c106954bac50697ffef33eb9172fb1f0a49d | 218 | # load oxd components
require 'oxd/config'
require 'oxd/oxd_connector'
require 'oxd/client_oxd_commands'
require 'oxd/uma_commands'
# @author Inderpal Singh
# Oxd Module namespace
# oxd_version 3.1.1
module Oxd
end | 18.166667 | 33 | 0.779817 |
bf9e806889d707dea27fb8e54add46d73c9a543b | 3,072 | module Druid
class PostAggregation
def method_missing(name, *args)
if args.empty?
PostAggregationField.new(name)
end
end
def js(*args)
if args.empty?
PostAggregationField.new(:js)
else
PostAggregationJavascript.new(args.first)
end
end
end
module PostAggregationOperators
def +(value)
PostAggregationOperation.new(self, :+, value)
end
def -(value)
PostAggregationOperation.new(self, :-, value)
end
def *(value)
PostAggregationOperation.new(self, :*, value)
end
def /(value)
PostAggregationOperation.new(self, :/, value)
end
end
class PostAggregationOperation
include PostAggregationOperators
attr_reader :left, :operator, :right, :name
def initialize(left, operator, right)
@left = left.is_a?(Numeric) ? PostAggregationConstant.new(left) : left
@operator = operator
@right = right.is_a?(Numeric) ? PostAggregationConstant.new(right) : right
end
def as(field)
@name = field.name.to_s
self
end
def get_field_names
field_names = []
field_names << left.get_field_names if left.respond_to?(:get_field_names)
field_names << right.get_field_names if right.respond_to?(:get_field_names)
field_names
end
def to_hash
hash = { "type" => "arithmetic", "fn" => @operator, "fields" => [@left.to_hash, @right.to_hash] }
hash["name"] = @name if @name
hash
end
def to_json(*a)
to_hash.to_json(*a)
end
def as_json(*a)
to_hash
end
end
class PostAggregationField
include PostAggregationOperators
attr_reader :name
def initialize(name)
@name = name
end
def get_field_names
@name
end
def to_hash
{ "type" => "fieldAccess", "name" => @name, "fieldName" => @name }
end
def to_json(*a)
to_hash.to_json(*a)
end
def as_json(*a)
to_hash
end
end
class PostAggregationConstant
include PostAggregationOperators
attr_reader :value
def initialize(value)
@value = value
end
def to_hash
{ "type" => "constant", "value" => @value }
end
def to_json(*a)
to_hash.to_json(*a)
end
def as_json(*a)
to_hash
end
end
class PostAggregationJavascript
include PostAggregationOperators
include Serializable
def initialize(function)
@field_names = extract_fields(function)
@function = function
end
def get_field_names
@field_names
end
def as(field)
@name = field.name.to_s
self
end
def to_hash
{
"type" => "javascript",
"name" => @name,
"fieldNames" => @field_names,
"function" => @function
}
end
private
def extract_fields(function)
match = function.match(/function\((.+)\)/)
raise 'Invalid Javascript function' unless match && match.captures
match.captures.first.split(',').map {|field| field.strip }
end
end
end | 19.566879 | 103 | 0.613932 |
d5b4f446162b9179b09f5e07a5f57b366bee1298 | 818 | # encoding: utf-8
class ChannelsController < ApplicationController
def show
@channel = Channel.find(params[:id])
@title = "#{@channel.name} - #{Siteconf.site_name}"
if params[:p].present?
@page_num = params[:p].to_i
@title += " (第 #{@page_num} 鑡)"
else
@page_num = 1
end
@total_topics = @channel.topics.count
@total_pages = (@total_topics * 1.0 / Siteconf.pagination_topics.to_i).ceil
@next_page_num = (@page_num < @total_pages) ? @page_num + 1 : 0
@prev_page_num = (@page_num > 1) ? @page_num - 1 : 0
@topics = @channel.topics.page(@page_num).per(Siteconf.pagination_topics.to_i).order('updated_at DESC')
@canonical_path = "/go/#{params[:key]}"
@canonical_path += "?p=#{@page_num}" if @page_num > 1
@seo_description = @channel.name
end
end
| 32.72 | 107 | 0.639364 |
7a4fda5a4c0bfc9ddea85322a7417e5ec333f059 | 256 | require File.dirname(__FILE__) + '/../../spec_helper'
ruby_version_is ""..."1.9" do
require File.dirname(__FILE__) + '/../../shared/enumerator/enum_for'
require 'enumerator'
describe "#to_enum" do
it_behaves_like :enum_for, :enum_for
end
end
| 23.272727 | 70 | 0.691406 |
910ac4547d9b87d2ccf206eb44633e549f2e2b73 | 2,784 | # frozen_string_literal: true
module Tianguis
module Parser
class FruitsWeekly < ResultTable
TableRow = Struct.new(:product, :prices, :avg_price)
def initialize(**args)
super
@year = args[:year]
@month = args[:month]
@week = args[:week]
end
def price_table
category = nil
product = nil
price_table = []
table.each do |row|
if row.css('.encabTIP2').any?
category = row.text.strip.downcase
next
end
product = if row.xpath('td[1]').text.strip == '-'
add_variant(row, product)
else
create_product(row, category)
end
next unless product
price_table << TableRow.new(product.to_h, create_prices(row), avg_price(row))
end
price_table
end
def products
products ||= price_table.map { |row| row[:product] }
end
private
attr_reader :year, :month, :week
def create_product(row, kind)
Product.new do |product|
product.category = :agricultural
product.kind = kind
product.name = row.xpath('td[1]').text.strip
product.quality = row.xpath('td[2]').text
product.variant = row.xpath('td[3]').text
product.state = row.xpath('td[4]').text.strip
end
end
def add_variant(row, product)
product.variant = row.xpath('td[3]').text
product.state = row.xpath('td[4]').text.strip
product
end
def create_prices(item)
(5..9).map do |day|
price = item.xpath("td[#{day}]").text.to_f
next if price.zero?
{
published_at: date(day),
cost_cents: (price * 100).to_i,
currency: 'MXN'
}
end.compact
end
def avg_price(item)
{
cost_cents: (item.xpath('td[10]').text.to_f * 100).to_i,
currency: 'MXN',
unit: :kg
}
end
def date(day)
day, month = table_header(day)&.split('/')
month = months[month.downcase.to_sym]
Date.new(current_year(month), month, day.to_i)
end
def current_year(current_month)
if current_month == 12 && month == 1
year - 1
elsif current_month == 1 && month == 12
year + 1
else
year
end
end
def months
@months ||= {
ene: 1,
feb: 2,
mar: 3,
abr: 4,
may: 5,
jun: 6,
jul: 7,
ago: 8,
sep: 9,
oct: 10,
nov: 11,
dic: 12
}.freeze
end
end
end
end
| 23.794872 | 87 | 0.489583 |
ed6e7edb82e6d8abe2ee12c433710552d481f1ad | 6,968 | require 'spec_helper'
require 'ddtrace/contrib/analytics_examples'
require 'rack/test'
require 'rack'
require 'ddtrace'
require 'ddtrace/contrib/rack/middlewares'
RSpec.describe 'Rack integration configuration' do
include Rack::Test::Methods
let(:tracer) { get_test_tracer }
let(:configuration_options) { { tracer: tracer } }
let(:spans) { tracer.writer.spans }
let(:span) { spans.first }
before(:each) do
Datadog.configure do |c|
c.use :rack, configuration_options
end
end
around do |example|
# Reset before and after each example; don't allow global state to linger.
Datadog.registry[:rack].reset_configuration!
example.run
Datadog.registry[:rack].reset_configuration!
end
shared_context 'an incoming HTTP request' do
subject(:response) { get '/' }
let(:app) do
Rack::Builder.new do
use Datadog::Contrib::Rack::TraceMiddleware
map '/' do
run(proc { |_env| [200, { 'Content-Type' => 'text/html' }, 'OK'] })
end
end.to_app
end
end
it_behaves_like 'analytics for integration', ignore_global_flag: false do
include_context 'an incoming HTTP request'
before { is_expected.to be_ok }
let(:analytics_enabled_var) { Datadog::Contrib::Rack::Ext::ENV_ANALYTICS_ENABLED }
let(:analytics_sample_rate_var) { Datadog::Contrib::Rack::Ext::ENV_ANALYTICS_SAMPLE_RATE }
end
describe 'request queueing' do
shared_context 'queue header' do
let(:queue_value) { "t=#{queue_time}" }
let(:queue_time) { (Time.now.utc - 5).to_i }
before(:each) do
header queue_header, queue_value
end
end
shared_context 'no queue header' do
let(:queue_header) { nil }
let(:queue_value) { nil }
end
shared_examples_for 'a Rack request with queuing' do
let(:queue_span) { spans.first }
let(:rack_span) { spans.last }
it 'produces a queued Rack trace' do
is_expected.to be_ok
expect(spans).to have(2).items
expect(queue_span.name).to eq('http_server.queue')
expect(queue_span.service).to eq(Datadog.configuration[:rack][:web_service_name])
expect(queue_span.start_time.to_i).to eq(queue_time)
# Queue span gets tagged for runtime metrics because its a local root span.
# TODO: It probably shouldn't get tagged like this in the future; it's not part of the runtime.
expect(queue_span.get_tag(Datadog::Ext::Runtime::TAG_LANG)).to eq('ruby')
expect(queue_span.get_tag(Datadog::Ext::Runtime::TAG_RUNTIME_ID)).to eq(Datadog::Runtime::Identity.id)
expect(rack_span.name).to eq('rack.request')
expect(rack_span.span_type).to eq('http')
expect(rack_span.service).to eq(Datadog.configuration[:rack][:service_name])
expect(rack_span.resource).to eq('GET 200')
expect(rack_span.get_tag('http.method')).to eq('GET')
expect(rack_span.get_tag('http.status_code')).to eq('200')
expect(rack_span.get_tag('http.url')).to eq('/')
expect(rack_span.get_tag(Datadog::Ext::Runtime::TAG_LANG)).to eq('ruby')
expect(rack_span.get_tag(Datadog::Ext::Runtime::TAG_RUNTIME_ID)).to eq(Datadog::Runtime::Identity.id)
expect(rack_span.status).to eq(0)
expect(queue_span.span_id).to eq(rack_span.parent_id)
end
end
shared_examples_for 'a Rack request without queuing' do
it 'produces a non-queued Rack trace' do
is_expected.to be_ok
expect(spans).to have(1).items
expect(span).to_not be nil
expect(span.name).to eq('rack.request')
expect(span.span_type).to eq('http')
expect(span.service).to eq(Datadog.configuration[:rack][:service_name])
expect(span.resource).to eq('GET 200')
expect(span.get_tag('http.method')).to eq('GET')
expect(span.get_tag('http.status_code')).to eq('200')
expect(span.get_tag('http.url')).to eq('/')
expect(span.get_tag(Datadog::Ext::Runtime::TAG_LANG)).to eq('ruby')
expect(span.get_tag(Datadog::Ext::Runtime::TAG_RUNTIME_ID)).to eq(Datadog::Runtime::Identity.id)
expect(span.status).to eq(0)
expect(span.parent_id).to eq(0)
end
end
context 'when enabled' do
let(:configuration_options) { super().merge(request_queuing: true) }
context 'and a request is received' do
include_context 'an incoming HTTP request'
context 'with X-Request-Start header' do
include_context 'queue header' do
let(:queue_header) { 'X-Request-Start' }
end
it_behaves_like 'a Rack request with queuing'
context 'given a custom web service name' do
let(:configuration_options) { super().merge(web_service_name: web_service_name) }
let(:web_service_name) { 'nginx' }
it_behaves_like 'a Rack request with queuing' do
it 'sets the custom service name' do
is_expected.to be_ok
expect(queue_span.service).to eq(web_service_name)
end
end
end
end
context 'with X-Queue-Start header' do
include_context 'queue header' do
let(:queue_header) { 'X-Queue-Start' }
end
it_behaves_like 'a Rack request with queuing'
end
# Ensure a queuing Span is NOT created if there is a clock skew
# where the starting time is greater than current host Time.now
context 'with a skewed queue header' do
before(:each) { header 'X-Request-Start', (Time.now.utc + 5).to_i }
it_behaves_like 'a Rack request without queuing'
end
# Ensure a queuing Span is NOT created if the header is wrong
context 'with a invalid queue header' do
before(:each) { header 'X-Request-Start', 'foobar' }
it_behaves_like 'a Rack request without queuing'
end
context 'without queue header' do
include_context 'no queue header'
it_behaves_like 'a Rack request without queuing'
end
end
end
context 'when disabled' do
let(:configuration_options) { super().merge(request_queuing: false) }
context 'and a request is received' do
include_context 'an incoming HTTP request'
context 'with X-Request-Start header' do
include_context 'queue header' do
let(:queue_header) { 'X-Request-Start' }
end
it_behaves_like 'a Rack request without queuing'
end
context 'with X-Queue-Start header' do
include_context 'queue header' do
let(:queue_header) { 'X-Queue-Start' }
end
it_behaves_like 'a Rack request without queuing'
end
context 'without queue header' do
include_context 'no queue header'
it_behaves_like 'a Rack request without queuing'
end
end
end
end
end
| 34.325123 | 110 | 0.642365 |
abb8948f13a62ca84dc00f3d39cb0fdf7dc125fe | 22,755 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe API::Lint do
describe 'POST /ci/lint' do
context 'when signup settings are disabled' do
before do
Gitlab::CurrentSettings.signup_enabled = false
end
context 'when unauthenticated' do
it 'returns authentication error' do
post api('/ci/lint'), params: { content: 'content' }
expect(response).to have_gitlab_http_status(:unauthorized)
end
end
context 'when authenticated' do
let_it_be(:api_user) { create(:user) }
it 'returns authorized' do
post api('/ci/lint', api_user), params: { content: 'content' }
expect(response).to have_gitlab_http_status(:ok)
end
end
context 'when authenticated as external user' do
let(:project) { create(:project) }
let(:api_user) { create(:user, :external) }
context 'when reporter in a project' do
before do
project.add_reporter(api_user)
end
it 'returns authorization failure' do
post api('/ci/lint', api_user), params: { content: 'content' }
expect(response).to have_gitlab_http_status(:unauthorized)
end
end
context 'when developer in a project' do
before do
project.add_developer(api_user)
end
it 'returns authorization success' do
post api('/ci/lint', api_user), params: { content: 'content' }
expect(response).to have_gitlab_http_status(:ok)
end
end
end
end
context 'when signup is enabled and not limited' do
before do
Gitlab::CurrentSettings.signup_enabled = true
stub_application_setting(domain_allowlist: [], email_restrictions_enabled: false, require_admin_approval_after_user_signup: false)
end
context 'when unauthenticated' do
it 'returns authorized success' do
post api('/ci/lint'), params: { content: 'content' }
expect(response).to have_gitlab_http_status(:ok)
end
end
context 'when authenticated' do
let_it_be(:api_user) { create(:user) }
it 'returns authentication success' do
post api('/ci/lint', api_user), params: { content: 'content' }
expect(response).to have_gitlab_http_status(:ok)
end
end
end
context 'when limited signup is enabled' do
before do
stub_application_setting(domain_allowlist: ['www.gitlab.com'])
Gitlab::CurrentSettings.signup_enabled = true
end
context 'when unauthenticated' do
it 'returns unauthorized' do
post api('/ci/lint'), params: { content: 'content' }
expect(response).to have_gitlab_http_status(:unauthorized)
end
end
context 'when authenticated' do
let_it_be(:api_user) { create(:user) }
it 'returns authentication success' do
post api('/ci/lint', api_user), params: { content: 'content' }
expect(response).to have_gitlab_http_status(:ok)
end
end
end
context 'when authenticated' do
let_it_be(:api_user) { create(:user) }
context 'with valid .gitlab-ci.yaml content' do
let(:yaml_content) do
File.read(Rails.root.join('spec/support/gitlab_stubs/gitlab_ci.yml'))
end
it 'passes validation without warnings or errors' do
post api('/ci/lint', api_user), params: { content: yaml_content }
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_an Hash
expect(json_response['status']).to eq('valid')
expect(json_response['warnings']).to match_array([])
expect(json_response['errors']).to match_array([])
end
it 'outputs expanded yaml content' do
post api('/ci/lint', api_user), params: { content: yaml_content, include_merged_yaml: true }
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to have_key('merged_yaml')
end
it 'outputs jobs' do
post api('/ci/lint', api_user), params: { content: yaml_content, include_jobs: true }
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to have_key('jobs')
end
end
context 'with valid .gitlab-ci.yaml with warnings' do
let(:yaml_content) { { job: { script: 'ls', rules: [{ when: 'always' }] } }.to_yaml }
it 'passes validation but returns warnings' do
post api('/ci/lint', api_user), params: { content: yaml_content }
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['status']).to eq('valid')
expect(json_response['warnings']).not_to be_empty
expect(json_response['errors']).to match_array([])
end
end
context 'with valid .gitlab-ci.yaml using deprecated keywords' do
let(:yaml_content) { { job: { script: 'ls', type: 'test' }, types: ['test'] }.to_yaml }
it 'passes validation but returns warnings' do
post api('/ci/lint', api_user), params: { content: yaml_content }
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['status']).to eq('valid')
expect(json_response['warnings']).not_to be_empty
expect(json_response['errors']).to match_array([])
end
end
context 'with an invalid .gitlab_ci.yml' do
context 'with invalid syntax' do
let(:yaml_content) { 'invalid content' }
it 'responds with errors about invalid syntax' do
post api('/ci/lint', api_user), params: { content: yaml_content }
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['status']).to eq('invalid')
expect(json_response['warnings']).to eq([])
expect(json_response['errors']).to eq(['Invalid configuration format'])
end
it 'outputs expanded yaml content' do
post api('/ci/lint', api_user), params: { content: yaml_content, include_merged_yaml: true }
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to have_key('merged_yaml')
end
it 'outputs jobs' do
post api('/ci/lint', api_user), params: { content: yaml_content, include_jobs: true }
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to have_key('jobs')
end
end
context 'with invalid configuration' do
let(:yaml_content) { '{ image: "ruby:2.7", services: ["postgres"] }' }
it 'responds with errors about invalid configuration' do
post api('/ci/lint', api_user), params: { content: yaml_content }
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['status']).to eq('invalid')
expect(json_response['warnings']).to eq([])
expect(json_response['errors']).to eq(['jobs config should contain at least one visible job'])
end
it 'outputs expanded yaml content' do
post api('/ci/lint', api_user), params: { content: yaml_content, include_merged_yaml: true }
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to have_key('merged_yaml')
end
it 'outputs jobs' do
post api('/ci/lint', api_user), params: { content: yaml_content, include_jobs: true }
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to have_key('jobs')
end
end
end
context 'without the content parameter' do
it 'responds with validation error about missing content' do
post api('/ci/lint', api_user)
expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['error']).to eq('content is missing')
end
end
end
end
describe 'GET /projects/:id/ci/lint' do
subject(:ci_lint) { get api("/projects/#{project.id}/ci/lint", api_user), params: { dry_run: dry_run, include_jobs: include_jobs } }
let(:project) { create(:project, :repository) }
let(:dry_run) { nil }
let(:include_jobs) { nil }
RSpec.shared_examples 'valid config with warnings' do
it 'passes validation with warnings' do
ci_lint
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['valid']).to eq(true)
expect(json_response['errors']).to eq([])
expect(json_response['warnings']).not_to be_empty
end
end
RSpec.shared_examples 'valid config without warnings' do
it 'passes validation' do
ci_lint
included_config = YAML.safe_load(included_content, [Symbol])
root_config = YAML.safe_load(yaml_content, [Symbol])
expected_yaml = included_config.merge(root_config).except(:include).deep_stringify_keys.to_yaml
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_an Hash
expect(json_response['merged_yaml']).to eq(expected_yaml)
expect(json_response['valid']).to eq(true)
expect(json_response['warnings']).to eq([])
expect(json_response['errors']).to eq([])
end
end
RSpec.shared_examples 'invalid config' do
it 'responds with errors about invalid configuration' do
ci_lint
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['merged_yaml']).to eq(yaml_content)
expect(json_response['valid']).to eq(false)
expect(json_response['warnings']).to eq([])
expect(json_response['errors']).to eq(['jobs config should contain at least one visible job'])
end
end
context 'when unauthenticated' do
let_it_be(:api_user) { nil }
it 'returns authentication error' do
ci_lint
expect(response).to have_gitlab_http_status(:not_found)
end
end
context 'when authenticated as non-member' do
let_it_be(:api_user) { create(:user) }
let(:yaml_content) do
{ include: { local: 'another-gitlab-ci.yml' }, test: { stage: 'test', script: 'echo 1' } }.to_yaml
end
context 'when project is private' do
before do
project.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
stub_ci_pipeline_yaml_file(yaml_content)
end
it 'returns authentication error' do
ci_lint
expect(response).to have_gitlab_http_status(:not_found)
end
end
context 'when project is public' do
before do
project.update!(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
end
context 'when running as dry run' do
let(:dry_run) { true }
before do
stub_ci_pipeline_yaml_file(yaml_content)
end
it 'returns pipeline creation error' do
ci_lint
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['merged_yaml']).to eq(nil)
expect(json_response['valid']).to eq(false)
expect(json_response['warnings']).to eq([])
expect(json_response['errors']).to eq(['Insufficient permissions to create a new pipeline'])
end
end
context 'when running static validation' do
let(:dry_run) { false }
let(:included_content) do
{ another_test: { stage: 'test', script: 'echo 1' } }.deep_stringify_keys.to_yaml
end
before do
project.repository.create_file(
project.creator,
'.gitlab-ci.yml',
yaml_content,
message: 'Automatically created .gitlab-ci.yml',
branch_name: 'master'
)
project.repository.create_file(
project.creator,
'another-gitlab-ci.yml',
included_content,
message: 'Automatically created another-gitlab-ci.yml',
branch_name: 'master'
)
end
it_behaves_like 'valid config without warnings'
end
end
end
context 'when authenticated as project guest' do
let_it_be(:api_user) { create(:user) }
before do
project.add_guest(api_user)
end
it 'returns authentication error' do
ci_lint
expect(response).to have_gitlab_http_status(:forbidden)
end
end
context 'when authenticated as project developer' do
let_it_be(:api_user) { create(:user) }
before do
project.add_developer(api_user)
end
context 'with valid .gitlab-ci.yml content' do
let(:yaml_content) do
{ include: { local: 'another-gitlab-ci.yml' }, test: { stage: 'test', script: 'echo 1' } }.to_yaml
end
let(:included_content) do
{ another_test: { stage: 'test', script: 'echo 1' } }.deep_stringify_keys.to_yaml
end
before do
project.repository.create_file(
project.creator,
'.gitlab-ci.yml',
yaml_content,
message: 'Automatically created .gitlab-ci.yml',
branch_name: 'master'
)
project.repository.create_file(
project.creator,
'another-gitlab-ci.yml',
included_content,
message: 'Automatically created another-gitlab-ci.yml',
branch_name: 'master'
)
end
context 'when running as dry run' do
let(:dry_run) { true }
it_behaves_like 'valid config without warnings'
end
context 'when running static validation' do
let(:dry_run) { false }
it_behaves_like 'valid config without warnings'
end
context 'when running with include jobs' do
let(:include_jobs) { true }
it_behaves_like 'valid config without warnings'
it 'returns jobs key' do
ci_lint
expect(json_response).to have_key('jobs')
end
end
context 'when running without include jobs' do
let(:include_jobs) { false }
it_behaves_like 'valid config without warnings'
it 'does not return jobs key' do
ci_lint
expect(json_response).not_to have_key('jobs')
end
end
context 'With warnings' do
let(:yaml_content) { { job: { script: 'ls', rules: [{ when: 'always' }] } }.to_yaml }
it_behaves_like 'valid config with warnings'
end
end
context 'with invalid .gitlab-ci.yml content' do
let(:yaml_content) do
{ image: 'ruby:2.7', services: ['postgres'] }.deep_stringify_keys.to_yaml
end
before do
stub_ci_pipeline_yaml_file(yaml_content)
end
context 'when running as dry run' do
let(:dry_run) { true }
it_behaves_like 'invalid config'
end
context 'when running static validation' do
let(:dry_run) { false }
it_behaves_like 'invalid config'
end
context 'when running with include jobs' do
let(:include_jobs) { true }
it_behaves_like 'invalid config'
it 'returns jobs key' do
ci_lint
expect(json_response).to have_key('jobs')
end
end
context 'when running without include jobs' do
let(:include_jobs) { false }
it_behaves_like 'invalid config'
it 'does not return jobs key' do
ci_lint
expect(json_response).not_to have_key('jobs')
end
end
end
end
end
describe 'POST /projects/:id/ci/lint' do
subject(:ci_lint) { post api("/projects/#{project.id}/ci/lint", api_user), params: { dry_run: dry_run, content: yaml_content, include_jobs: include_jobs } }
let(:project) { create(:project, :repository) }
let(:dry_run) { nil }
let(:include_jobs) { nil }
let_it_be(:api_user) { create(:user) }
let_it_be(:yaml_content) do
{ include: { local: 'another-gitlab-ci.yml' }, test: { stage: 'test', script: 'echo 1' } }.to_yaml
end
let_it_be(:included_content) do
{ another_test: { stage: 'test', script: 'echo 1' } }.to_yaml
end
RSpec.shared_examples 'valid project config' do
it 'passes validation' do
ci_lint
included_config = YAML.safe_load(included_content, [Symbol])
root_config = YAML.safe_load(yaml_content, [Symbol])
expected_yaml = included_config.merge(root_config).except(:include).deep_stringify_keys.to_yaml
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_an Hash
expect(json_response['merged_yaml']).to eq(expected_yaml)
expect(json_response['valid']).to eq(true)
expect(json_response['errors']).to eq([])
end
end
RSpec.shared_examples 'invalid project config' do
it 'responds with errors about invalid configuration' do
ci_lint
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['merged_yaml']).to eq(yaml_content)
expect(json_response['valid']).to eq(false)
expect(json_response['errors']).to eq(['jobs config should contain at least one visible job'])
end
end
context 'with an empty repository' do
let_it_be(:empty_project) { create(:project_empty_repo) }
let_it_be(:yaml_content) do
File.read(Rails.root.join('spec/support/gitlab_stubs/gitlab_ci.yml'))
end
before do
empty_project.add_developer(api_user)
end
it 'passes validation without errors' do
post api("/projects/#{empty_project.id}/ci/lint", api_user), params: { content: yaml_content }
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['valid']).to eq(true)
expect(json_response['errors']).to eq([])
end
end
context 'when unauthenticated' do
let_it_be(:api_user) { nil }
it 'returns authentication error' do
ci_lint
expect(response).to have_gitlab_http_status(:not_found)
end
context 'when project is public' do
before do
project.update!(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
end
it 'returns authentication error' do
ci_lint
expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
context 'when authenticated as non-member' do
context 'when project is private' do
before do
project.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
end
it 'returns authentication error' do
ci_lint
expect(response).to have_gitlab_http_status(:not_found)
end
end
context 'when project is public' do
before do
project.update!(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
end
context 'when running as dry run' do
let(:dry_run) { true }
it 'returns authentication error' do
ci_lint
expect(response).to have_gitlab_http_status(:forbidden)
end
end
context 'when running static validation' do
let(:dry_run) { false }
before do
project.repository.create_file(
project.creator,
'another-gitlab-ci.yml',
included_content,
message: 'Automatically created another-gitlab-ci.yml',
branch_name: 'master'
)
end
it 'returns authentication error' do
ci_lint
expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
end
context 'when authenticated as project guest' do
before do
project.add_guest(api_user)
end
it 'returns authentication error' do
ci_lint
expect(response).to have_gitlab_http_status(:forbidden)
end
end
context 'when authenticated as project developer' do
before do
project.add_developer(api_user)
end
context 'with valid .gitlab-ci.yml content' do
before do
project.repository.create_file(
project.creator,
'another-gitlab-ci.yml',
included_content,
message: 'Automatically created another-gitlab-ci.yml',
branch_name: 'master'
)
end
context 'when running as dry run' do
let(:dry_run) { true }
it_behaves_like 'valid project config'
end
context 'when running static validation' do
let(:dry_run) { false }
it_behaves_like 'valid project config'
end
context 'when running with include jobs param' do
let(:include_jobs) { true }
it_behaves_like 'valid project config'
it 'contains jobs key' do
ci_lint
expect(json_response).to have_key('jobs')
end
end
context 'when running without include jobs param' do
let(:include_jobs) { false }
it_behaves_like 'valid project config'
it 'does not contain jobs key' do
ci_lint
expect(json_response).not_to have_key('jobs')
end
end
end
context 'with invalid .gitlab-ci.yml content' do
let(:yaml_content) do
{ image: 'ruby:2.7', services: ['postgres'] }.deep_stringify_keys.to_yaml
end
context 'when running as dry run' do
let(:dry_run) { true }
it_behaves_like 'invalid project config'
end
context 'when running static validation' do
let(:dry_run) { false }
it_behaves_like 'invalid project config'
end
context 'when running with include jobs set to false' do
let(:include_jobs) { false }
it_behaves_like 'invalid project config'
it 'does not contain jobs key' do
ci_lint
expect(json_response).not_to have_key('jobs')
end
end
context 'when running with param include jobs' do
let(:include_jobs) { true }
it_behaves_like 'invalid project config'
it 'contains jobs key' do
ci_lint
expect(json_response).to have_key('jobs')
end
end
end
end
end
end
| 30.421123 | 160 | 0.605933 |
28f96351175c160d7f4e0a98aaa515f0d5c8c527 | 723 | # frozen_string_literal: true
module Ci
class DeleteUnitTestsService
include EachBatch
BATCH_SIZE = 100
def execute
purge_data!(Ci::UnitTestFailure)
purge_data!(Ci::UnitTest)
end
private
def purge_data!(klass)
loop do
break unless delete_batch!(klass)
end
end
# rubocop: disable CodeReuse/ActiveRecord
def delete_batch!(klass)
deleted = 0
ActiveRecord::Base.transaction do
ids = klass.deletable.lock('FOR UPDATE SKIP LOCKED').limit(BATCH_SIZE).pluck(:id)
break if ids.empty?
deleted = klass.where(id: ids).delete_all
end
deleted > 0
end
# rubocop: enable CodeReuse/ActiveRecord
end
end
| 19.026316 | 89 | 0.648686 |
9136fe10e749dcc682da2eb15b38e7ff1d16317a | 3,684 | module CalendarHelper
def calendar_for(objects, *args)
raise ArgumentError, "Missing block" unless block_given?
options = args.last.is_a?(Hash) ? args.pop : {}
html_options = options[:html]
builder = options[:builder] || CalendarBuilder
calendar = options[:calendar] || Calendar
concat(tag(:table, html_options, true))
yield builder.new(objects || [], self, calendar, options)
concat('</table>')
end
class CalendarBuilder < TableHelper::TableBuilder
def initialize(objects, template, calendar, options)
super(objects, template, options)
@calendar = calendar.new(options)
@today = options[:today] || Time.now
end
def day(*args)
raise ArgumentError, "Missing block" unless block_given?
options = options_from_hash(args)
day_method = options.delete(:day_method) || :date
id_pattern = options.delete(:id)
tbody do
@calendar.objects_for_days(@objects, day_method).to_a.sort{|a1, a2| a1.first <=> a2.first }.each do |o|
key, array = o
day, objects = array
concat(tag(:tr, options, true)) if(day.wday == @calendar.first_weekday)
concat(tag(:td, td_options(day, id_pattern), true))
yield(day, objects)
concat('</td>')
concat('</tr>') if(day.wday == @calendar.last_weekday)
end
end
end
private
def objects_for_days
@calendar.objects_for_days(@objects)
end
def td_options(day, id_pattern)
options = {}
if(day.strftime("%Y-%m-%d") == @today.strftime("%Y-%m-%d"))
options[:class] = 'today'
elsif(day.month != @calendar.month)
options[:class] = 'notmonth'
elsif(day.wday == 0 or day.wday == 6)
options[:class] = 'weekend'
end
if id_pattern
options[:id] = day.strftime(id_pattern)
end
options
end
end
class Calendar
attr_accessor :first_weekday, :last_weekday, :month
def initialize(options={})
@year = options[:year] || Time.now.year
@month = options[:month] || Time.now.month
@first_day_of_week = options[:first_day_of_week] || 0
@first_weekday = first_day_of_week(@first_day_of_week)
@last_weekday = last_day_of_week(@first_day_of_week)
@first = Date.civil(@year, @month, 1)
@last = Date.civil(@year, @month, -1)
end
def each_day
first_day.upto(last_day) do |day|
yield(day)
end
end
def last_day
last = @last
while(last.wday % 7 != @last_weekday % 7)
last = last.next
end
last
end
def first_day
first = @first - 6
while(first.wday % 7 != (@first_weekday) % 7)
first = first.next
end
first
end
def objects_for_days(objects, day_method)
unless @objects_for_days
@objects_for_days = {}
days.each{|day| @objects_for_days[day.strftime("%Y-%m-%d")] = [day, []]}
objects.each do |o|
date = o.send(day_method.to_sym).strftime("%Y-%m-%d")
if @objects_for_days[date]
@objects_for_days[date][1] << o
end
end
end
@objects_for_days
end
def days
unless @days
@days = []
each_day{|day| @days << day}
end
@days
end
def mjdays
unless @mjdays
@mdays = []
each_day{|day| @days << day}
end
@days
end
def first_day_of_week(day)
day
end
def last_day_of_week(day)
if day > 0
day - 1
else
6
end
end
end
end
| 26.314286 | 111 | 0.56949 |
21fd4e5fb26b423fde8540b9f88504e3e8169ff6 | 1,055 | cask "azure-data-studio" do
version "1.31.0"
sha256 "c16810cccc86992c5a13cfebaf0a2dc1552752658d8ee6a03ee7cec6db6f620e"
url "https://azuredatastudio-update.azurewebsites.net/#{version}/darwin/stable",
verified: "azuredatastudio-update.azurewebsites.net/"
name "Azure Data Studio"
desc "Data management tool that enables working with SQL Server"
homepage "https://docs.microsoft.com/en-us/sql/azure-data-studio/"
livecheck do
url "https://azuredatastudio-update.azurewebsites.net/api/update/darwin/stable/VERSION"
strategy :page_match
regex(/"productVersion"\s*:\s*"(\d+(:?\.\d+)*)"/)
end
auto_updates true
app "Azure Data Studio.app"
binary "#{appdir}/Azure Data Studio.app/Contents/Resources/app/bin/code", target: "azuredatastudio"
zap trash: [
"~/Library/Application Support/azuredatastudio",
"~/Library/Preferences/com.azuredatastudio.oss.helper.plist",
"~/Library/Preferences/com.azuredatastudio.oss.plist",
"~/Library/Saved Application State/com.azuredatastudio.oss.savedState",
]
end
| 36.37931 | 101 | 0.740284 |
1a830f54eb81a848a71cdb586bc1372982d4f43b | 66 | require "minitest_helper"
describe "Product" do
it "works"
end
| 11 | 25 | 0.742424 |
115658fcd396abc347e1d83c9b69c79329286fb2 | 1,588 | #-- copyright
# OpenProject is an open source project management software.
# Copyright (C) 2012-2021 the OpenProject GmbH
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License version 3.
#
# OpenProject is a fork of ChiliProject, which is a fork of Redmine. The copyright follows:
# Copyright (C) 2006-2013 Jean-Philippe Lang
# Copyright (C) 2010-2013 the ChiliProject Team
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# See docs/COPYRIGHT.rdoc for more details.
#++
require_dependency 'type'
module OpenProject::Backlogs::Patches::TypePatch
def self.included(base)
base.class_eval do
include InstanceMethods
extend ClassMethods
end
end
module ClassMethods
end
module InstanceMethods
def story?
Story.types.include?(id)
end
def task?
Task.type.present? && id == Task.type
end
end
end
| 30.538462 | 91 | 0.742443 |
e24e5c85714c52b0f651052578e0aabc0bfacc6c | 369 | FactoryGirl.define do
factory :user do
name { Faker::Name.name }
email { Faker::Internet.email }
password 'password'
confirmed_at Time.now.utc
after(:create) do |user|
if user.confirmed?
user.subscription_tracker.update(subscribed_to_email_notifications: true, email_digest_delivery_frequency: "weekly")
end
end
end
end
| 24.6 | 124 | 0.699187 |
03ac6dc9d10852d5a64cd368eb41078bf164108c | 3,079 | ##
# This module requires Metasploit: http//metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
require 'msf/core'
class Metasploit3 < Msf::Auxiliary
include Msf::Exploit::Remote::HttpClient
include Msf::Auxiliary::Scanner
def initialize(info={})
super(update_info(info,
'Name' => 'Ruby on Rails JSON Processor YAML Deserialization Scanner',
'Description' => %q{
This module attempts to identify Ruby on Rails instances vulnerable to
an arbitrary object instantiation flaw in the JSON request processor.
},
'Author' =>
[
'jjarmoc', # scanner module
'hdm' # CVE-2013-0156 scanner, basis of this technique.
],
'License' => MSF_LICENSE,
'References' =>
[
['CVE', '2013-0333']
]
))
register_options([
OptString.new('TARGETURI', [true, "The URI to test", "/"]),
OptEnum.new('HTTP_METHOD', [true, 'HTTP Method', 'POST', ['GET', 'POST', 'PUT']]),
], self.class)
end
def send_probe(pdata)
res = send_request_cgi({
'uri' => normalize_uri(datastore['TARGETURI']),
'method' => datastore['HTTP_METHOD'],
'ctype' => 'application/json',
'data' => pdata
})
end
def run_host(ip)
# Straight JSON as a baseline
res1 = send_probe(
"{ \"#{Rex::Text.rand_text_alpha(rand(8)+1)}\" : \"#{Rex::Text.rand_text_alpha(rand(8)+1)}\" }"
)
unless res1
vprint_status("#{rhost}:#{rport} No reply to the initial JSON request")
return
end
if res1.code.to_s =~ /^[5]/
vprint_error("#{rhost}:#{rport} The server replied with #{res1.code} for our initial JSON request, double check TARGETURI and HTTP_METHOD")
return
end
# Deserialize a hash, this should work if YAML deserializes.
res2 = send_probe("--- {}\n".gsub(':', '\u003a'))
unless res2
vprint_status("#{rhost}:#{rport} No reply to the initial YAML probe")
return
end
# Deserialize a malformed object, inducing an error.
res3 = send_probe("--- !ruby/object:\x00".gsub(':', '\u003a'))
unless res3
vprint_status("#{rhost}:#{rport} No reply to the second YAML probe")
return
end
vprint_status("Probe response codes: #{res1.code} / #{res2.code} / #{res3.code}")
if (res2.code == res1.code) and (res3.code != res2.code) and (res3.code != 200)
# If first and second requests are the same, and the third is different but not a 200, we're vulnerable.
print_good("#{rhost}:#{rport} is likely vulnerable due to a #{res3.code} reply for invalid YAML")
report_vuln({
:host => rhost,
:port => rport,
:proto => 'tcp',
:name => self.name,
:info => "Module triggered a #{res3.code} reply",
:refs => self.references
})
else
# Otherwise we're not likely vulnerable.
vprint_status("#{rhost}:#{rport} is not likely to be vulnerable or TARGETURI & HTTP_METHOD must be set")
end
end
end
| 30.79 | 145 | 0.603118 |
628f70efad6c0729536e39b3fe47797e5ae8b0f8 | 6,851 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Issues::SetCrmContactsService do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, group: group) }
let_it_be(:contacts) { create_list(:contact, 4, group: group) }
let(:issue) { create(:issue, project: project) }
let(:does_not_exist_or_no_permission) { "The resource that you are attempting to access does not exist or you don't have permission to perform this action" }
before do
create(:issue_customer_relations_contact, issue: issue, contact: contacts[0])
create(:issue_customer_relations_contact, issue: issue, contact: contacts[1])
end
subject(:set_crm_contacts) do
described_class.new(project: project, current_user: user, params: params).execute(issue)
end
describe '#execute' do
context 'when the user has no permission' do
let(:params) { { replace_ids: [contacts[1].id, contacts[2].id] } }
it 'returns expected error response' do
response = set_crm_contacts
expect(response).to be_error
expect(response.message).to eq('You have insufficient permissions to set customer relations contacts for this issue')
end
end
context 'when user has permission' do
before do
group.add_reporter(user)
end
context 'when the contact does not exist' do
let(:params) { { replace_ids: [non_existing_record_id] } }
it 'returns expected error response' do
response = set_crm_contacts
expect(response).to be_error
expect(response.message).to eq("Issue customer relations contacts #{non_existing_record_id}: #{does_not_exist_or_no_permission}")
end
end
context 'when the contact belongs to a different group' do
let(:group2) { create(:group) }
let(:contact) { create(:contact, group: group2) }
let(:params) { { replace_ids: [contact.id] } }
before do
group2.add_reporter(user)
end
it 'returns expected error response' do
response = set_crm_contacts
expect(response).to be_error
expect(response.message).to eq("Issue customer relations contacts #{contact.id}: #{does_not_exist_or_no_permission}")
end
end
context 'replace' do
let(:params) { { replace_ids: [contacts[1].id, contacts[2].id] } }
it 'updates the issue with correct contacts' do
response = set_crm_contacts
expect(response).to be_success
expect(issue.customer_relations_contacts).to match_array([contacts[1], contacts[2]])
end
end
context 'add' do
let(:params) { { add_ids: [contacts[3].id] } }
it 'updates the issue with correct contacts' do
response = set_crm_contacts
expect(response).to be_success
expect(issue.customer_relations_contacts).to match_array([contacts[0], contacts[1], contacts[3]])
end
end
context 'add by email' do
let(:params) { { add_emails: [contacts[3].email] } }
it 'updates the issue with correct contacts' do
response = set_crm_contacts
expect(response).to be_success
expect(issue.customer_relations_contacts).to match_array([contacts[0], contacts[1], contacts[3]])
end
end
context 'remove' do
let(:params) { { remove_ids: [contacts[0].id] } }
it 'updates the issue with correct contacts' do
response = set_crm_contacts
expect(response).to be_success
expect(issue.customer_relations_contacts).to match_array([contacts[1]])
end
end
context 'remove by email' do
let(:params) { { remove_emails: [contacts[0].email] } }
it 'updates the issue with correct contacts' do
response = set_crm_contacts
expect(response).to be_success
expect(issue.customer_relations_contacts).to match_array([contacts[1]])
end
end
context 'when attempting to add more than 6' do
let(:id) { contacts[0].id }
let(:params) { { add_ids: [id, id, id, id, id, id, id] } }
it 'returns expected error message' do
response = set_crm_contacts
expect(response).to be_error
expect(response.message).to eq('You can only add up to 6 contacts at one time')
end
end
context 'when trying to remove non-existent contact' do
let(:params) { { remove_ids: [non_existing_record_id] } }
it 'returns expected error message' do
response = set_crm_contacts
expect(response).to be_success
expect(response.message).to be_nil
end
end
context 'when combining params' do
let(:error_invalid_params) { 'You cannot combine replace_ids with add_ids or remove_ids' }
context 'add and remove' do
let(:params) { { remove_ids: [contacts[1].id], add_ids: [contacts[3].id] } }
it 'updates the issue with correct contacts' do
response = set_crm_contacts
expect(response).to be_success
expect(issue.customer_relations_contacts).to match_array([contacts[0], contacts[3]])
end
end
context 'replace and remove' do
let(:params) { { replace_ids: [contacts[3].id], remove_ids: [contacts[0].id] } }
it 'returns expected error response' do
response = set_crm_contacts
expect(response).to be_error
expect(response.message).to eq(error_invalid_params)
end
end
context 'replace and add' do
let(:params) { { replace_ids: [contacts[3].id], add_ids: [contacts[1].id] } }
it 'returns expected error response' do
response = set_crm_contacts
expect(response).to be_error
expect(response.message).to eq(error_invalid_params)
end
end
end
context 'when trying to add an existing issue contact' do
let(:params) { { add_ids: [contacts[0].id] } }
it 'does not return an error' do
response = set_crm_contacts
expect(response).to be_success
end
end
context 'when trying to add the same contact twice' do
let(:params) { { add_ids: [contacts[3].id, contacts[3].id] } }
it 'does not return an error' do
response = set_crm_contacts
expect(response).to be_success
end
end
context 'when trying to remove a contact not attached to the issue' do
let(:params) { { remove_ids: [contacts[3].id] } }
it 'does not return an error' do
response = set_crm_contacts
expect(response).to be_success
end
end
end
end
end
| 31.865116 | 159 | 0.628083 |
010ab23f118db0bce0475c61874cdfda7522c8ee | 502 | # Be sure to restart your server when you modify this file.
# Your secret key for verifying the integrity of signed cookies.
# If you change this key, all old signed cookies will become invalid!
# Make sure the secret is at least 30 characters and all random,
# no regular words or you'll be exposed to dictionary attacks.
Gemfile2pom::Application.config.secret_token = '34b39a37533b33972b85b89b5e304c16549bf09b3c98f9894adcdfa3419730759fc40ba5fc5a636628fed07907a16059edcbff559903ddd3cc299f9c46ef838d'
| 62.75 | 177 | 0.834661 |
11652c23d64377a77f1e6507adaf913824c0a6f7 | 197 | module RailsEmoji
class Engine < ::Rails::Engine
initializer 'rails_emoji.assets.precompile' do |app|
app.config.assets.precompile << %r(emojis\/.*\.(?:png|svg|gif)$)
end
end
end
| 24.625 | 70 | 0.664975 |
f81834d28b5421a52ec2c349766cbcd7b550e75d | 601 | describe KnapsackPro::RepositoryAdapterInitiator do
describe '.call' do
subject { described_class.call }
before do
expect(KnapsackPro::Config::Env).to receive(:repository_adapter).and_return(repository_adapter)
end
context 'when repository adapter is git' do
let(:repository_adapter) { 'git' }
it { should be_instance_of KnapsackPro::RepositoryAdapters::GitAdapter }
end
context 'when default repository adapter' do
let(:repository_adapter) { nil }
it { should be_instance_of KnapsackPro::RepositoryAdapters::EnvAdapter }
end
end
end
| 27.318182 | 101 | 0.720466 |
ffd9b2fb4dcb22387c7811f86530bb5af48babe2 | 2,160 | require 'rails_helper'
RSpec.describe FeedbackSubmissionJob, type: :job do
describe '#perform' do
let(:feedback_submission_job) { FeedbackSubmissionJob.new }
let(:emails_sent) { ::Et1::Test::EmailsSent.new }
it 'sends an HTML email to service now' do
feedback_submission_job.perform comments: 'lΓ©l', suggestions: 'lewl', email_address: '[email protected]'
expect(emails_sent.feedback_html_email_for email_address: '[email protected]').to be_present
end
it 'sends a text email to service now' do
feedback_submission_job.perform comments: 'lΓ©l', suggestions: 'lewl', email_address: '[email protected]'
expect(emails_sent.feedback_text_email_for email_address: '[email protected]').to be_present
end
it 'sends an text email to service now with the correct content' do
feedback_submission_job.perform comments: 'lΓ©l', suggestions: 'lewl', email_address: '[email protected]'
expect(emails_sent.feedback_text_email_for email_address: '[email protected]').to have_correct_content_for(comments: 'lΓ©l', suggestions: 'lewl', email_address: '[email protected]')
end
it 'sends an html email to service now with the correct content' do
feedback_submission_job.perform comments: 'lΓ©l', suggestions: 'lewl', email_address: '[email protected]'
expect(emails_sent.feedback_html_email_for email_address: '[email protected]').to have_correct_content_for(comments: 'lΓ©l', suggestions: 'lewl', email_address: '[email protected]')
end
context 'without an email address' do
let(:placeholder_email_address) { "[email protected]" }
it 'sends an HTML email to service now using a placeholder email' do
feedback_submission_job.perform comments: 'lΓ©l', suggestions: 'lewl'
expect(emails_sent.feedback_html_email_for email_address: placeholder_email_address).to be_present
end
it 'sends an HTML email to service now using a placeholder email' do
feedback_submission_job.perform comments: 'lΓ©l', suggestions: 'lewl'
expect(emails_sent.feedback_html_email_for email_address: placeholder_email_address).to be_present
end
end
end
end
| 45 | 182 | 0.744444 |
03d411861c4aa666acdf66ee9ee97b95572cf9f6 | 1,610 | # == Schema Information
#
# Table name: items
#
# id :integer not null, primary key
# name :string
# category :string
# created_at :datetime
# updated_at :datetime
# barcode_count :integer
# organization_id :integer
#
class Item < ApplicationRecord
belongs_to :organization # If these are universal this isn't necessary
belongs_to :canonical_item
validates_uniqueness_of :name, :scope => :organization
validates_presence_of :name
validates :organization, presence: true
has_many :line_items
has_many :inventory_items
has_many :barcode_items
has_many :storage_locations, through: :inventory_items
has_many :donations, through: :line_items, source: :itemizable, source_type: Donation
has_many :distributions, through: :line_items, source: :itemizable, source_type: Distribution
include Filterable
scope :alphabetized, -> { order(:name) }
scope :in_category, ->(category) { where(category: category) }
scope :in_same_category_as, ->(item) { where(category: item.category).where.not(id: item.id) }
def self.categories
select(:category).group(:category).order(:category)
end
def self.barcoded_items
joins(:barcode_items).order(:name).group(:id)
end
def self.storage_locations_containing(item)
StorageLocation.joins(:inventory_items).where('inventory_items.item_id = ?', item.id)
end
def self.barcodes_for(item)
BarcodeItem.where('item_id = ?', item.id)
end
# Convenience method so that other methods can be simplified to
# expect an id or an Item object
def to_i
id
end
end
| 29.272727 | 96 | 0.717391 |
7a9e122b44a4da7e4c1a36df010a1aeadce64311 | 3,499 | require File.join(File.dirname(__FILE__), "..", "test_helper")
require 'mocha/auto_verify'
require 'method_definer'
class AutoVerifyTest < Test::Unit::TestCase
attr_reader :test_case
def setup
@test_case = Object.new
class << test_case
def self.add_teardown_method(symbol); end
include Mocha::AutoVerify
end
end
def test_should_build_mock
mock = test_case.mock
assert mock.is_a?(Mocha::Mock)
end
def test_should_add_expectations_to_mock
mock = test_case.mock(:method_1 => 'result_1', :method_2 => 'result_2')
assert_equal 'result_1', mock.method_1
assert_equal 'result_2', mock.method_2
end
def test_should_build_stub
stub = test_case.stub
assert stub.is_a?(Mocha::Mock)
end
def test_should_add_expectation_to_stub
stub = test_case.stub(:method_1 => 'result_1', :method_2 => 'result_2')
assert_equal 'result_1', stub.method_1
assert_equal 'result_2', stub.method_2
end
def test_should_build_stub_that_stubs_all_methods
stub = test_case.stub_everything
assert stub.everything_stubbed
end
def test_should_add_expectations_to_stub_that_stubs_all_methods
stub = test_case.stub_everything(:method_1 => 'result_1', :method_2 => 'result_2')
assert_equal 'result_1', stub.method_1
assert_equal 'result_2', stub.method_2
end
def test_should_always_new_mock
assert_not_equal test_case.mock, test_case.mock
end
def test_should_always_build_new_stub
assert_not_equal test_case.stub, test_case.stub
end
def test_should_always_build_new_stub_that_stubs_all_methods
assert_not_equal test_case.stub, test_case.stub
end
def test_should_store_each_new_mock
expected = Array.new(3) { test_case.mock }
assert_equal expected, test_case.mocks
end
def test_should_store_each_new_stub
expected = Array.new(3) { test_case.stub }
assert_equal expected, test_case.mocks
end
def test_should_store_each_new_stub_that_stubs_all_methods
expected = Array.new(3) { test_case.stub_everything }
assert_equal expected, test_case.mocks
end
def test_should_verify_each_mock
mocks = Array.new(3) do
mock = Object.new
mock.define_instance_accessor(:verify_called)
class << mock
def verify(&block)
self.verify_called = true
end
end
mock
end
test_case.replace_instance_method(:mocks) { mocks }
test_case.verify_mocks
assert mocks.all? { |mock| mock.verify_called }
end
def test_should_yield_to_block_for_each_assertion
mock_class = Class.new do
def verify(&block); yield; end
end
mock = mock_class.new
test_case.replace_instance_method(:mocks) { [mock] }
yielded = false
test_case.verify_mocks { yielded = true }
assert yielded
end
def test_should_reset_mocks_on_teardown
mock = Class.new { define_method(:verify) {} }.new
test_case.mocks << mock
test_case.teardown_mocks
assert test_case.mocks.empty?
end
def test_should_create_named_mock
mock = test_case.mock('named_mock')
assert_equal '#<Mock:named_mock>', mock.mocha_inspect
end
def test_should_create_named_stub
stub = test_case.stub('named_stub')
assert_equal '#<Mock:named_stub>', stub.mocha_inspect
end
def test_should_create_named_stub_that_stubs_all_methods
stub = test_case.stub_everything('named_stub')
assert_equal '#<Mock:named_stub>', stub.mocha_inspect
end
end | 27.769841 | 86 | 0.728208 |
f889e747ca3814634a6fc6a616ca878afd08d0b4 | 669 | # Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
default['cloudsql']['binary'] = 'https://dl.google.com/cloudsql/cloud_sql_proxy.linux.amd64'
| 41.8125 | 92 | 0.759342 |
f7553d2e1d4168e02c088f46627370248c6f68d7 | 333 | arr = [
"Add Bootstrap to app",
"Seed better data to app",
"Make slides",
"Check git hooks",
"Discuss presentation order",
"Demo talk for coworkers",
"Practice lectures",
"Add MOAR CAT PICS",
"Test integrations",
"Incorporate team feedback"
]
10.times do |i|
Todo.create name: arr[i], order: i, done: false
end | 20.8125 | 49 | 0.663664 |
ed7eff96b14cf2b0e39d835bab99fe17dba556bc | 396 | module Featurable
extend ActiveSupport::Concern
included do
has_many :feature_lists, as: :featurable, dependent: :destroy
end
def feature_list_for_locale(locale)
feature_lists.find_by_locale(locale) || feature_lists.build(locale: locale)
end
def load_or_create_feature_list(locale)
feature_lists.find_by_locale(locale) || feature_lists.create(locale: locale)
end
end
| 24.75 | 80 | 0.777778 |
18bc77ac57ac3941f3473db98d248c85506563bf | 1,400 | module Delayed
class PerformableMethod < Struct.new(:object, :method, :args)
CLASS_STRING_FORMAT = /^CLASS\:([A-Z][\w\:]+)$/
AR_STRING_FORMAT = /^AR\:([A-Z][\w\:]+)\:(\d+)$/
def initialize(object, method, args)
raise NoMethodError, "undefined method `#{method}' for #{self.inspect}" unless object.respond_to?(method)
self.object = dump(object)
self.args = args.map { |a| dump(a) }
self.method = method.to_sym
end
def display_name
case self.object
when CLASS_STRING_FORMAT then "#{$1}.#{method}"
when AR_STRING_FORMAT then "#{$1}##{method}"
else "Unknown##{method}"
end
end
def perform
load(object).send(method, *args.map{|a| load(a)})
rescue ActiveRecord::RecordNotFound
# We cannot do anything about objects which were deleted in the meantime
true
end
private
def load(arg)
case arg
when CLASS_STRING_FORMAT then $1.constantize
when AR_STRING_FORMAT then $1.constantize.find($2)
else arg
end
end
def dump(arg)
case arg
when Class then class_to_string(arg)
when ActiveRecord::Base then ar_to_string(arg)
else arg
end
end
def ar_to_string(obj)
"AR:#{obj.class}:#{obj.id}"
end
def class_to_string(obj)
"CLASS:#{obj.name}"
end
end
end | 25.454545 | 111 | 0.598571 |
38025bdb772444f16d0172862f1b9075545a18de | 1,116 | AuthDemo::Application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the web server when you make code changes.
config.cache_classes = false
# Do not eager load code on boot.
config.eager_load = false
# Show full error reports and disable caching.
config.consider_all_requests_local = true
config.action_controller.perform_caching = false
# Don't care if the mailer can't send.
config.action_mailer.raise_delivery_errors = false
# Print deprecation notices to the Rails logger.
config.active_support.deprecation = :log
# Raise an error on page load if there are pending migrations
config.active_record.migration_error = :page_load
# Debug mode disables concatenation and preprocessing of assets.
# This option may cause significant delays in view rendering with a large
# number of complex assets.
config.assets.debug = true
end
| 37.2 | 85 | 0.772401 |
eda15bad596c60ff49433eb787944aa61e496473 | 587 | class User < ApplicationRecord
# Include default devise modules. Others available are:
# :confirmable, :lockable, :timeoutable and :omniauthable
devise :database_authenticatable, :registerable,
:recoverable, :rememberable, :trackable, :validatable
has_many :memberships
has_many :groups, through: :memberships
validates :username, :presence => true
has_attached_file :avatar, :styles => { :medium => "300x300>", :thumb => "100x100#" }, :default_url => "/images/:style/missing.png"
validates_attachment_content_type :avatar, :content_type => /\Aimage\/.*\Z/
end
| 45.153846 | 133 | 0.729131 |
87d177e9bbff1e19920c1c5b359d9ca88a4536d2 | 459 | #
# Cookbook Name:: fb_iproute
# Recipe:: packages
#
# Copyright (c) 2017-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree. An additional grant
# of patent rights can be found in the PATENTS file in the same directory.
#
package %w{iproute iproute-tc} do
only_if { node['fb_iproute']['manage_packages'] }
action :upgrade
end
| 27 | 77 | 0.740741 |
e237d493569c908e6d5999411c5e5ad7195f7b23 | 585 | module VagrantPlugins
module ProviderVMwareFree
module Action
class MatchMACAddress
def initialize(app, env)
@app = app
end
def call(env)
raise Vagrant::Errors::VMBaseMacNotSpecified if !env[:machine].config.vm.base_mac
# Create the proc which we want to use to modify the virtual machine
env[:ui].info I18n.t("vagrant.actions.vm.match_mac.matching")
env[:machine].provider.driver.set_mac_address(env[:machine].config.vm.base_mac)
@app.call(env)
end
end
end
end
end
| 26.590909 | 91 | 0.635897 |
1d036ca3c676bcb529c1da0e710e5d3e3d450219 | 5,754 | # Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::ApiManagement::Mgmt::V2019_12_01
module Models
#
# Api Operation details.
#
class OperationContract < Resource
include MsRestAzure
# @return [Array<ParameterContract>] Collection of URL template
# parameters.
attr_accessor :template_parameters
# @return [String] Description of the operation. May include HTML
# formatting tags.
attr_accessor :description
# @return [RequestContract] An entity containing request details.
attr_accessor :request
# @return [Array<ResponseContract>] Array of Operation responses.
attr_accessor :responses
# @return [String] Operation Policies
attr_accessor :policies
# @return [String] Operation Name.
attr_accessor :display_name
# @return [String] A Valid HTTP Operation Method. Typical Http Methods
# like GET, PUT, POST but not limited by only them.
attr_accessor :method
# @return [String] Relative URL template identifying the target resource
# for this operation. May include parameters. Example:
# /customers/{cid}/orders/{oid}/?date={date}
attr_accessor :url_template
#
# Mapper for OperationContract class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'OperationContract',
type: {
name: 'Composite',
class_name: 'OperationContract',
model_properties: {
id: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'id',
type: {
name: 'String'
}
},
name: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'name',
type: {
name: 'String'
}
},
type: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'type',
type: {
name: 'String'
}
},
template_parameters: {
client_side_validation: true,
required: false,
serialized_name: 'properties.templateParameters',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'ParameterContractElementType',
type: {
name: 'Composite',
class_name: 'ParameterContract'
}
}
}
},
description: {
client_side_validation: true,
required: false,
serialized_name: 'properties.description',
constraints: {
MaxLength: 1000
},
type: {
name: 'String'
}
},
request: {
client_side_validation: true,
required: false,
serialized_name: 'properties.request',
type: {
name: 'Composite',
class_name: 'RequestContract'
}
},
responses: {
client_side_validation: true,
required: false,
serialized_name: 'properties.responses',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'ResponseContractElementType',
type: {
name: 'Composite',
class_name: 'ResponseContract'
}
}
}
},
policies: {
client_side_validation: true,
required: false,
serialized_name: 'properties.policies',
type: {
name: 'String'
}
},
display_name: {
client_side_validation: true,
required: true,
serialized_name: 'properties.displayName',
constraints: {
MaxLength: 300,
MinLength: 1
},
type: {
name: 'String'
}
},
method: {
client_side_validation: true,
required: true,
serialized_name: 'properties.method',
type: {
name: 'String'
}
},
url_template: {
client_side_validation: true,
required: true,
serialized_name: 'properties.urlTemplate',
constraints: {
MaxLength: 1000,
MinLength: 1
},
type: {
name: 'String'
}
}
}
}
}
end
end
end
end
| 31.102703 | 78 | 0.452729 |
ff7b06ec4822c605b0a7c6b111071ef73c26dc02 | 2,764 | require "spec_helper"
require "trikeapps"
describe Trikeapps::StringDemystifier do
before(:all) do
str = "!SSWSWSKCOR !OTNAAAWAA!"
@string_small = Trikeapps::StringDemystifier.new str
str = "!YTIRCO!IQIIQIDEMGMMIM FO YMJMMSM!RA !EGEEJEHT ROEOOSOF PAEJEEBEL TN!AIKIITIG ENVNNMNO ,GQGGCGN!ILEKIZIISIRT A RJRRDROF PETOTTJTS LLZLLEL!AMSXSSMS ENODOOSO"
@string_large = Trikeapps::StringDemystifier.new str
end
describe 'rule1' do
it 'returns a string' do
expect(@string_small.rule1).to be_a String
end
it 'should not replace if the left and right values are same unless the surrounding character is a space' do
str = "!OTNAAAWAA A AXA"
str_dem = Trikeapps::StringDemystifier.new str
expect(str_dem.rule1).to eq '!OTNAAAAAA A AAA'
end
it 'should replace correct value in the middle if the left and right values are same for small_string' do
expect(@string_small.rule1).to eq '!SSSSSSKCOR !OTNAAAAAA!'
end
it 'should replace correct value in the middle if the left and right values are same for big_string' do
expect(@string_large.rule1).to eq '!YTIRCO!IIIIIIDEMMMMMM FO YMMMMMM!RA !EEEEEEHT ROOOOOOF PAEEEEEEL TN!AIIIIIIG ENNNNNNO ,GGGGGGN!ILEKIIIIIIRT A RRRRRROF PETTTTTTS LLLLLLL!AMSSSSSS ENOOOOOO'
end
end
describe 'rule2' do
it 'returns a string' do
expect(@string_small.rule2).to be_a String
end
it 'should replace any consecutive 6 character as 1 character in the small_string' do
expect(@string_small.rule2(@string_small.rule1)).to eq '!SKCOR !OTNA!'
end
it 'should replace any consecutive 6 character as 1 character in the large_string' do
expect(@string_large.rule2(@string_large.rule1)).to eq '!YTIRCO!IDEM FO YM!RA !EHT ROF PAEL TN!AIG ENO ,GN!ILEKIRT A ROF PETS LL!AMS ENO'
end
end
describe 'rule3' do
it 'returns a string' do
expect(@string_small.rule3).to be_a String
end
it 'should replace any consecutive 6 character as 1 character in the small_string' do
expect(@string_large.rule3(@string_small.rule2(@string_small.rule1))).to eq 'ANTO ROCKS'
end
it 'should replace any consecutive 6 character as 1 character in the large_string' do
expect(@string_large.rule3(@string_large.rule2(@string_large.rule1))).to eq 'ONE SMALL STEP FOR A TRIKELING, ONE GIANT LEAP FOR THE ARMY OF MEDIOCRITY'
end
end
describe 'Demystify' do
it 'should return correct output after running all rules for small_string' do
expect(@string_small.demystify).to eq 'ANTO ROCKS'
end
it 'should return correct output after running all rules for large_string' do
expect(@string_large.demystify).to eq 'ONE SMALL STEP FOR A TRIKELING, ONE GIANT LEAP FOR THE ARMY OF MEDIOCRITY'
end
end
end
| 34.55 | 196 | 0.738423 |
e803f9469225cc167ba8fe63436240d433b7a803 | 1,762 | RSpec.describe FirstExisting do
it "has a version number" do
expect(FirstExisting::VERSION).not_to be nil
end
describe ".first_existing" do
context "when given only existing arguments" do
it "returns the first one" do
expect(first_existing("string", true, 0, 15)).to eq "string"
end
end
context "when given only nil arguments" do
it "returns nil" do
expect(first_existing(nil, nil, nil, nil)).to eq nil
end
end
context "when given no arguments" do
it "returns nil" do
expect(first_existing).to eq nil
end
end
context "when given mixed arguments" do
context "when the first argument is existing" do
it "returns it" do
expect(first_existing("first", nil, nil, nil)).to eq "first"
end
end
context "when the first argument is not existing" do
context "when the second argument is existing" do
it "returns the second argument" do
expect(first_existing(nil, "second", nil, nil, nil)).to eq "second"
end
end
context "when only the last argument is existing" do
it "returns the last argument" do
expect(first_existing(nil, nil, nil, nil, "last")).to eq "last"
end
end
end
end
context "when given an empty string" do
it "qualifies as existing" do
expect(first_existing("")).to eq ""
end
end
context "when given a blank string" do
it "qualifies as existing" do
expect(first_existing("\t ")).to eq "\t "
end
end
context "when given false" do
it "qualifies as existing" do
expect(first_existing(false)).to eq false
end
end
end
end
| 26.69697 | 79 | 0.609535 |
ffbba3d7839e125c4b2956f63a868f0a351e00a6 | 453 | require 'rails_helper'
feature 'User can see service apps index' do
before :each do
@member = create :member
@service_app = create :service_app
end
scenario 'and see all service apps' do
login_as @member
visit service_apps_path
expect(page).to have_content('Service Apps')
expect(page).to have_content('Registrar')
expect(page).to have_content('Access token:')
find("#access_token_#{@service_app.id}")
end
end
| 23.842105 | 49 | 0.706402 |
28d692c3279132f5dd3cb39359ec725617aa7496 | 2,497 | require 'rails_helper'
RSpec.describe Product, type: :model do
let(:product) { create(:product, title: 'Leather Bag') }
describe '#create' do
describe 'regular' do
let(:product) { create(:product, product_type: :regular) }
it 'adds product with default variant' do
expect(product.variants.count).to eq(0)
end
end
# describe 'composite' do
# let(:product) { create(:product, :composite) }
# # should only contain 1 variant with type :composite
# end
end
describe 'validations' do
subject { product }
it { is_expected.to validate_presence_of(:title) }
it { is_expected.to validate_presence_of(:account_id) }
it 'validates uniqueness of handle' do
product
new_product = create(:product, title: 'Leather Bag',
account: product.account)
expect(new_product.handle).to eq('leather-bag-1')
end
it 'validates composite_variant_count to be 1' do
product = create(:product, :composite)
product.variants.create(attributes_for(:variant))
expect(product).to be_composite
expect(product).to be_valid
product.variants.new(attributes_for(:variant))
expect(product).to be_invalid
expect(product.variants.count).to eq(1)
expect(product.errors.full_messages)
.to include('Composite product should only have 1 variant')
end
end
describe 'associations' do
it { is_expected.to belong_to(:account) }
it { is_expected.to have_many(:variants) }
it { is_expected.to have_many(:invoice_lines) }
it { is_expected.to have_many(:components) }
end
describe 'callbacks' do
it do
expect(product).to callback(:create_unique_handle).before(:validation)
end
end
describe 'model' do
it { is_expected.to accept_nested_attributes_for(:variants) }
end
describe 'handle' do
before { product }
it { expect(product.handle).to eq('leather-bag') }
context 'with existing handle' do
let(:product2) do
create(:product, account: product.account, title: 'Leather Bag')
end
let(:product3) do
create(:product, account: product.account, title: 'Leather Bag')
end
before do
product
product2
product3
end
it { expect(product2.handle).to eq('leather-bag-1') }
it { expect(product3.handle).to eq('leather-bag-2') }
it { expect(product.reload.handle_count).to eq(2) }
end
end
end
| 29.376471 | 76 | 0.650781 |
bb7e3ee801ec5736032265271eba0ebbf9194cce | 100 | require 'setsumei'
Dir[File.join(File.dirname(__FILE__),"support/**/*.rb")].each { |f| require f }
| 25 | 79 | 0.67 |
1c3a4ef0b556f8ecd4cd74b93d790f6995bcda82 | 446 | RSpec.describe "Attachments", type: :request, skip_seed: true do
before do
sign_in(@user)
end
describe "DELETE #destroy" do
let(:partner) { create(:partner, documents: [Rack::Test::UploadedFile.new(Rails.root.join("spec/fixtures/files/dbase.pdf"), "application/pdf")]) }
it "redirects to referrer" do
delete attachment_path(partner.documents.first)
expect(response).to redirect_to(partners_path)
end
end
end
| 29.733333 | 150 | 0.70852 |
87a3868700b1b2030de02e9dfa625cae5ac3cb65 | 450 | class Cms::Extensions::AdditionalInfo < Array
def mongoize
self.to_a
end
class << self
def demongoize(object)
if object.present?
object.map { |h| h.symbolize_keys }
else
[]
end
end
def mongoize(object)
case object
when self.class then object.mongoize
when Array then
object.select { |ary| ary[:field].present? }
else
object
end
end
end
end
| 17.307692 | 52 | 0.577778 |
614429304987459915db213a5912e28f8ecf7ce3 | 58 | module ProtectedAttributes
VERSION = "1.8.1".freeze
end
| 14.5 | 26 | 0.758621 |
0311e34a0b74aacecd40febec783a25729d34976 | 2,580 | # Encoding: utf-8
# Cloud Foundry Java Buildpack
# Copyright (c) 2013 the original author or authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'spec_helper'
require 'component_helper'
require 'java_buildpack/util/filtering_pathname'
require 'java_buildpack/util/play/post22'
describe JavaBuildpack::Util::Play::Post22 do
include_context 'component_helper'
let(:play_app) { described_class.new(droplet) }
it 'should raise error if root method is unimplemented' do
expect { play_app.send(:root) }.to raise_error "Method 'root' must be defined"
end
context app_fixture: 'container_play_2.2_staged' do
before do
allow(play_app).to receive(:root).and_return(droplet.root)
end
it 'should correctly determine the version of a Play 2.2 application' do
expect(play_app.version).to eq('2.2.0')
end
it 'should correctly extend the classpath' do
play_app.compile
expect((app_dir + 'bin/play-application').read)
.to match 'declare -r app_classpath="\$app_home/../.additional_libs/test-jar-1.jar:\$app_home/../.additional_libs/test-jar-2.jar:'
end
it 'should return command' do
expect(play_app.release).to eq("PATH=#{java_home.root}/bin:$PATH #{java_home.as_env_var} $PWD/bin/play-application " \
'-Jtest-opt-2 -Jtest-opt-1 -J-Dhttp.port=$PORT')
end
context do
let(:java_opts) { super() << '-Xmx30m -Xms30m' }
it 'should not allow multiple options in a single JAVA_OPTS array entry' do
expect { play_app.release }.to raise_error(/Invalid Java option contains more than one option/)
end
end
context do
let(:java_opts) do
super() << '-Dappdynamics.agent.nodeName=$(expr "$VCAP_APPLICATION" : \'.*instance_id[": ]*"\([a-z0-9]\+\)".*\')'
end
it 'should allow options with expressions' do
play_app.release
expect(java_opts).to include('-Dappdynamics.agent.nodeName=$(expr "$VCAP_APPLICATION" : \'.*instance_id[": ]*"\([a-z0-9]\+\)".*\')')
end
end
end
end
| 33.947368 | 140 | 0.685271 |
39095d67dda5b78a87b8715ddd9e69d21167676b | 104 | class PieceSerializer
include FastJsonapi::ObjectSerializer
attributes :id, :board_location
end
| 20.8 | 40 | 0.798077 |
e81b73752a82406114633f99f92b9add5601e63e | 232 | class Message < ActiveRecord::Base
has_many :replies
belongs_to :user
delegate :name, :email, :to => :user, :allow_nil => true
validates_presence_of :body, :user_id
validates_length_of :body, :maximum => 140
end
| 21.090909 | 58 | 0.693966 |
216766b9ccc5af8bbf21c1c0b2b017874c26672a | 1,093 | class Navi < Formula
desc "Interactive cheatsheet tool for the command-line"
homepage "https://github.com/denisidoro/navi"
url "https://github.com/denisidoro/navi/archive/v2.12.0.tar.gz"
sha256 "2331a82f91dee35bd3a1c3cfc14b23a69f9d2ff65a8b7b840ead4e76e8e1f146"
license "Apache-2.0"
bottle do
sha256 cellar: :any_skip_relocation, catalina: "f3636ee09127ffb3746ab27d5f00a73910322ec4ad3aa17e31f323fb7cad5380"
sha256 cellar: :any_skip_relocation, mojave: "c21b0eb4bb7e9060ae39cb80d2b02c780d0bb26ac573bf5024aac9c12d57650e"
sha256 cellar: :any_skip_relocation, high_sierra: "986f19287953b58c8a9e29bb026c558bfc30b32dfb8c2b64d92b6adc3a36c011"
end
depends_on "rust" => :build
depends_on "fzf"
def install
system "cargo", "install", *std_cargo_args
end
test do
assert_match "navi " + version, shell_output("#{bin}/navi --version")
(testpath/"cheats/test.cheat").write "% test\n\n# foo\necho bar\n\n# lorem\necho ipsum\n"
assert_match "bar", shell_output("export RUST_BACKTRACE=1; #{bin}/navi --path #{testpath}/cheats best foo")
end
end
| 40.481481 | 120 | 0.762123 |
ff3ce9071d058e67062d7f5017aa8f8203b966fa | 81 | require 'flat-ui-sass/version'
module FlatUISass
# Your code goes here...
end
| 13.5 | 30 | 0.728395 |
e8cf8741227260dd514450e976adae9172a3ef9d | 1,457 | # encoding: utf-8
module Rubocop
module Cop
module Style
# The purpose of the this cop is advise the use of
# alias_method over the alias keyword whenever possible.
class Alias < Cop
MSG = 'Use alias_method instead of alias.'
def on_block(node)
method, _args, body = *node
_receiver, method_name = *method
# using alias is the only option in certain scenarios
# in such scenarios we don't want to report an offense
if method_name == :instance_exec
on_node(:alias, body) { |n| ignore_node(n) }
end
end
def on_alias(node)
return if ignored_node?(node)
# alias_method can't be used with global variables
new, old = *node
return if new.type == :gvar && old.type == :gvar
add_offense(node, :keyword)
end
def autocorrect(node)
@corrections << lambda do |corrector|
# replace alias with alias_method
corrector.replace(node.loc.keyword, 'alias_method')
# insert a comma
new, old = *node
corrector.insert_after(new.loc.expression, ',')
# convert bareword arguments to symbols
corrector.replace(new.loc.expression, ":#{new.children.first}")
corrector.replace(old.loc.expression, ":#{old.children.first}")
end
end
end
end
end
end
| 29.734694 | 75 | 0.582018 |
5dffa3263db6b326981bd6d7097d1687d7537d52 | 863 | # Version definitions
$AMPLIFY_VERSION = '1.0.3'
$AMPLIFY_RELEASE_TAG = "v#{$AMPLIFY_VERSION}"
Pod::Spec.new do |s|
s.name = 'CoreMLPredictionsPlugin'
s.version = $AMPLIFY_VERSION
s.summary = 'Amazon Web Services Amplify for iOS.'
s.description = 'AWS Amplify for iOS provides a declarative library for application development using cloud services'
s.homepage = 'https://github.com/aws-amplify/amplify-ios'
s.license = 'Apache License, Version 2.0'
s.author = { 'Amazon Web Services' => 'amazonwebservices' }
s.source = { :git => 'https://github.com/aws-amplify/amplify-ios.git', :tag => $AMPLIFY_RELEASE_TAG }
s.platform = :ios, '13.0'
s.swift_version = '5.0'
s.source_files = 'AmplifyPlugins/Predictions/CoreMLPredictionsPlugin/**/*.swift'
s.dependency 'Amplify', $AMPLIFY_VERSION
end
| 33.192308 | 120 | 0.680185 |
aca9ef9a22740761641a5ff8e98f3d10ce65cebe | 216 | require 'spec_helper'
describe WLMachineLearning do
it 'has a version number' do
expect(WLMachineLearning::VERSION).not_to be nil
end
it 'does something useful' do
expect(false).to eq(true)
end
end
| 18 | 52 | 0.731481 |
bb992f0116bcd5037b40bda434e9a121b76bf7b0 | 2,572 | # frozen_string_literal: true
module Spree
module Stock
class Estimator
class ShipmentRequired < StandardError; end
class OrderRequired < StandardError; end
# Estimate the shipping rates for a package.
#
# @param package [Spree::Stock::Package] the package to be shipped
# @param frontend_only [Boolean] restricts the shipping methods to only
# those marked frontend if truthy
# @return [Array<Spree::ShippingRate>] the shipping rates sorted by
# descending cost, with the least costly marked "selected"
def shipping_rates(package, frontend_only = true)
raise ShipmentRequired if package.shipment.nil?
raise OrderRequired if package.shipment.order.nil?
rates = calculate_shipping_rates(package)
rates.select! { |rate| rate.shipping_method.available_to_users? } if frontend_only
choose_default_shipping_rate(rates)
Spree::Config.shipping_rate_sorter_class.new(rates).sort
end
private
def choose_default_shipping_rate(shipping_rates)
unless shipping_rates.empty?
default_shipping_rate = Spree::Config.shipping_rate_selector_class.new(shipping_rates).find_default
default_shipping_rate.selected = true
end
end
def calculate_shipping_rates(package)
tax_calculator_class = Spree::Config.shipping_rate_tax_calculator_class
tax_calculator = tax_calculator_class.new(package.shipment.order)
shipping_methods(package).map do |shipping_method|
cost = shipping_method.calculator.compute(package)
if cost
rate = shipping_method.shipping_rates.new(
cost: cost,
shipment: package.shipment
)
tax_calculator.calculate(rate).each do |tax|
rate.taxes.new(
amount: tax.amount,
tax_rate: tax.tax_rate
)
end
rate
end
end.compact
end
def shipping_methods(package)
package.shipping_methods
.available_to_store(package.shipment.order.store)
.available_for_address(package.shipment.order.ship_address)
.includes(:calculator)
.to_a
.select do |ship_method|
calculator = ship_method.calculator
calculator.available?(package) &&
(calculator.preferences[:currency].blank? ||
calculator.preferences[:currency] == package.shipment.order.currency)
end
end
end
end
end
| 35.722222 | 109 | 0.65591 |
b9c3bcdb783de4dc5d090f31596dffd0efa930fd | 246 | class CreateGroupsRoles < ActiveRecord::Migration[5.1]
def change
create_table(:groups_roles, :id => false) do |t|
t.references :group
t.references :role
end
add_index(:groups_roles, [ :group_id, :role_id ])
end
end
| 20.5 | 54 | 0.666667 |
7adafcdb9484bcdc179698d90f9a7f60c9dc7752 | 1,772 | # frozen_string_literal: true
module RuboCop
module Cop
module Style
# This cop checks for potential uses of `Enumerable#minmax`.
#
# @example
#
# # bad
# bar = [foo.min, foo.max]
# return foo.min, foo.max
#
# # good
# bar = foo.minmax
# return foo.minmax
class MinMax < Cop
MSG = 'Use `%<receiver>s.minmax` instead of `%<offender>s`.'
def on_array(node)
min_max_candidate(node) do |receiver|
offender = offending_range(node)
add_offense(node, location: offender,
message: message(offender, receiver))
end
end
alias on_return on_array
def autocorrect(node)
receiver = node.children.first.receiver
lambda do |corrector|
corrector.replace(offending_range(node),
"#{receiver.source}.minmax")
end
end
private
def_node_matcher :min_max_candidate, <<-PATTERN
({array return} (send [$_receiver !nil?] :min) (send [$_receiver !nil?] :max))
PATTERN
def message(offender, receiver)
format(MSG, offender: offender.source,
receiver: receiver.source)
end
def offending_range(node)
case node.type
when :return
argument_range(node)
else
node.loc.expression
end
end
def argument_range(node)
first_argument_range = node.children.first.loc.expression
last_argument_range = node.children.last.loc.expression
first_argument_range.join(last_argument_range)
end
end
end
end
end
| 25.681159 | 88 | 0.549661 |
1cd05b829c6ce9627744b37684c6cf190d2183bd | 603 | name 'prometheus_exporters'
maintainer 'Evil Martians'
maintainer_email '[email protected]'
license 'Apache-2.0'
description 'Installs / configures Prometheus exporters'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '0.12.1'
chef_version '>= 12.14', '< 15.0'
supports 'centos', '>= 6.9'
supports 'debian', '>= 8.0'
supports 'ubuntu', '>= 14.04'
supports 'windows'
issues_url 'https://github.com/evilmartians/chef-prometheus-exporters/issues'
source_url 'https://github.com/evilmartians/chef-prometheus-exporters/'
| 31.736842 | 77 | 0.704809 |
e2db30dc05a16e5de33a520616a7231d68205bff | 3,626 | module MiqAeEngine
class MiqAeDomainSearch
def initialize
@fqns_id_cache = {}
@fqns_id_class_cache = {}
@partial_ns = []
end
def ae_user=(obj)
@sorted_domains ||= obj.current_tenant.enabled_domains.collect(&:name)
end
def get_alternate_domain(scheme, uri, ns, klass, instance)
return ns if ns.nil? || klass.nil?
return ns if scheme != "miqaedb"
return ns if @fqns_id_cache.key?(ns)
search(uri, ns, klass, instance, nil)
end
def get_alternate_domain_method(scheme, uri, ns, klass, method)
return ns if ns.nil? || klass.nil?
return ns if scheme != "miqaedb"
return ns if @fqns_id_cache.key?(ns)
search(uri, ns, klass, nil, method)
end
private
def search(uri, ns, klass, instance, method)
unless @partial_ns.include?(ns)
fqns = MiqAeNamespace.find_by_fqname(ns, false)
if fqns && !fqns.domain?
@fqns_id_cache[ns] = fqns.id
return ns
end
end
@partial_ns << ns unless @partial_ns.include?(ns)
find_first_fq_domain(uri, ns, klass, instance, method)
end
def find_first_fq_domain(uri, ns, klass, instance, method)
# Check if the namespace, klass and instance exist if it does
# swap out the namespace
parts = ns.split('/')
parts.unshift("")
matching_domain = get_matching_domain(parts, klass, instance, method)
matching_domain ||= get_matching_domain(parts, klass, MiqAeObject::MISSING_INSTANCE, method)
if matching_domain
parts[0] = matching_domain
ns = parts.join('/')
$miq_ae_logger.info("Updated namespace [#{uri} #{ns}]")
end
ns
end
def get_matching_domain(ns_parts, klass, instance, method)
@sorted_domains.detect do |domain|
ns_parts[0] = domain
ns_id = find_fqns_id(ns_parts)
cls_id = find_class_id(klass, ns_id) if ns_id
get_matching(cls_id, instance, method) if cls_id
end
end
def get_matching(cls_id, instance, method)
instance ? find_instance_id(instance, cls_id) : find_method_id(method, cls_id)
end
def find_fqns_id(fqns_parts)
fqname = fqns_parts.join('/')
return @fqns_id_cache[fqname] if @fqns_id_cache.key?(fqname)
ns = MiqAeNamespace.find_by_fqname(fqname, false)
@fqns_id_cache[fqname] = ns.id if ns
end
def find_class_id(class_name, ns_id)
return nil if class_name.nil? || ns_id.nil?
key_name = "#{class_name}#{ns_id}"
return @fqns_id_class_cache[key_name] if @fqns_id_class_cache.key?(key_name)
class_filter = MiqAeClass.arel_table[:name].lower.matches(class_name.downcase)
ae_class = MiqAeClass.where(class_filter).where(:namespace_id => ns_id)
@fqns_id_class_cache[key_name] = ae_class.first.id if ae_class.any?
end
def find_instance_id(instance_name, class_id)
return nil if instance_name.nil? || class_id.nil?
instance_name = ::ActiveRecordQueryParts.glob_to_sql_like(instance_name).downcase
ae_instance_filter = MiqAeInstance.arel_table[:name].lower.matches(instance_name)
ae_instances = MiqAeInstance.where(ae_instance_filter).where(:class_id => class_id)
ae_instances.first.try(:id)
end
def find_method_id(method_name, class_id)
return nil if method_name.nil? || class_id.nil?
ae_method_filter = ::MiqAeMethod.arel_table[:name].lower.matches(method_name)
ae_methods = ::MiqAeMethod.where(ae_method_filter).where(:class_id => class_id)
ae_methods.first.try(:id)
end
end
end
| 35.203883 | 98 | 0.667954 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.