hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
1d575c770a9d8eeb5fb355323760f7d0e7a564a2 | 595 | require "json"
package = JSON.parse(File.read(File.join(__dir__, "package.json")))
Pod::Spec.new do |s|
s.name = "react-native-liphy"
s.version = package["version"]
s.summary = package["description"]
s.homepage = package["homepage"]
s.license = package["license"]
s.authors = package["author"]
s.platforms = { :ios => "10.0" }
s.source = { :git => "https://github.com/tecky.io/react-native-liphy.git", :tag => "#{s.version}" }
s.source_files = "ios/**/*.{h,m,mm,swift}"
s.dependency "React-Core"
s.dependency 'LightFlySDK'
end
| 28.333333 | 107 | 0.6 |
ac3ab389a866a3b87c0291794a9bf4b8e200d5ea | 221 | combination = -> (n) do
-> (r) do
# https://en.wikipedia.org/wiki/Combination
(n-r+1..n).inject(:*) / (1..r).inject(:*)
end
end
n = gets.to_i
r = gets.to_i
nCr = combination.(n)
puts nCr.(r)
| 17 | 51 | 0.524887 |
62ab302bd928cf0ab6d2675c874cf3880e5204f0 | 398 | class AuthDigestNginxModule < Formula
desc "Digest Authentication for Nginx"
homepage "https://github.com/samizdatco/nginx-http-auth-digest"
url "https://github.com/samizdatco/nginx-http-auth-digest/archive/cd86418.tar.gz"
version "0.2.2"
sha256 "fe683831f832aae4737de1e1026a4454017c2d5f98cb88b08c5411dc380062f8"
bottle :unneeded
def install
pkgshare.install Dir["*"]
end
end
| 28.428571 | 83 | 0.776382 |
2121c78d4902b31b7d9284ab2f47e64e64635484 | 483 | class SessionsController < ApplicationController
def create
user = User.authenticate(params[:session][:email], params[:session][:password])
if user.nil?
flash.now[:error] = t(:"flash.sign_in_error")
render "new"
else
sign_in user
flash[:success] = t(:"flash.sign_in")
redirect_to user_path(user.username)
end
end
def destroy
sign_out
flash[:success] = t(:"flash.sign_out")
redirect_to root_path
end
end | 23 | 83 | 0.641822 |
087bd108b57e00f72ce1e3c26a0a006aab623d84 | 131 | require 'rails_helper'
RSpec.describe OrderExchange, type: :model do
pending "add some examples to (or delete) #{__FILE__}"
end
| 21.833333 | 56 | 0.755725 |
187bb3cb5ea01c3f1c4dc54067d4c67b81e50d4f | 4,916 | namespace :shf do
desc 'load conditions to DB'
task load_conditions: [:environment] do
# Schedules for when alerts are sent:
std_reminder_after_schedule = [2, 9, 14, 30, 60]
std_reminder_before_schedule = [60, 30, 14, 2]
# See the ConditionResponder class for defintions of the different possible
# timing methods and configurations.
# NUMBER OF DAYS TO KEEP BACKUPS ON THE PRODUCTION SERVER:
# -------------------------------------------------------
# 8 days is the default because: if there is a problem (e.g. coping to AWS)
# and we don't spot it for a week
# (perhaps we didn't have a weekly meeting on a Thursday), the backup files still exist on the production server.
DEFAULT_DAYS_TO_KEEP = 4
# Code also exists on GitHub and in a the version control system (git). [In fact, those
# are the authoritative/canonical source. ] So we don't need to keep very many days of backups.
DAYS_TO_KEEP_CODE = 3
DAYS_TO_KEEP_PUBLIC_FILES = DEFAULT_DAYS_TO_KEEP
# Filesystem locations
RUNNING_LOG = '~/NOTES-RUNNING-LOG.txt'
NGINX_LOG_DIR = '/var/log/nginx'
APP_DIR = File.join(ENV['SHF_APP_PATH'], 'current/')
PUBLIC_DIR = File.join(APP_DIR, 'public')
# Add a Hash for each Condition to be created
#
conditions_to_create = [
# Send this alert once a week on Tuesday ( = weekday #2 as defined in the Date class)
{ class_name: 'MembersNeedPacketsAlert',
timing: :day_of_week,
config: { days_of_week: [2]} },
# Once Repeating Task timing is implemented, the timing should be changed
# to repeat every 14 days.
{ class_name: 'MemberUnpaidOver6MonthsAlert',
timing: :day_of_month,
config: {days: [1, 15]} },
{ class_name: 'MembershipExpireAlert',
timing: :before,
config: { days: [30, 14, 2] } },
{ class_name: 'MembershipWillExpireRenewalReqsAlert',
timing: :before,
config: { days: [60] } },
{ class_name: 'MembershipLapsedAlert',
timing: :after,
config: { days: std_reminder_after_schedule } },
{ class_name: 'FirstMembershipFeeOwedAlert',
timing: :after,
config: { days: std_reminder_after_schedule } },
{ class_name: 'HBrandingFeeWillExpireAlert',
timing: :before,
config: { days: std_reminder_before_schedule } },
{ class_name: 'HBrandingFeeDueAlert',
timing: :after,
config: { days: std_reminder_after_schedule } },
{ class_name: 'CompanyInfoIncompleteAlert',
timing: :after,
config: { days: std_reminder_after_schedule } },
{ class_name: 'ShfAppNoUploadedFilesAlert',
timing: :after,
config: { days: [60, 30, 14, 9, 2] } },
# days_to_keep - specifies number of (daily) backups to retain on production server
# backup_directory - where daily backups are retained on production server;
# omit or set to nil to use default directory
#
{ class_name: 'Backup',
timing: :every_day,
config: { days_to_keep: { code_backup: DAYS_TO_KEEP_CODE,
db_backup: DEFAULT_DAYS_TO_KEEP },
backup_directory: nil,
filesets: [
{name: 'logs',
days_to_keep: DEFAULT_DAYS_TO_KEEP,
files: [RUNNING_LOG, NGINX_LOG_DIR, File.join(APP_DIR, 'log')]
},
{name: 'code',
days_to_keep: DAYS_TO_KEEP_CODE,
files: [APP_DIR],
excludes: ['public', 'docs', 'features', 'spec','tmp', '.yardoc']
},
{name: 'app-public',
days_to_keep: DAYS_TO_KEEP_PUBLIC_FILES,
files: [PUBLIC_DIR]
},
{name: 'config env secrets',
days_to_keep: DEFAULT_DAYS_TO_KEEP,
files: [File.join(APP_DIR, 'config', '*.yml'), '.env']
}
]
}
},
{ class_name: 'DinkursFetch',
timing: :every_day },
{ class_name: 'MembershipStatusCheck',
timing: :every_day }
]
# Start from scratch
Condition.delete_all
if Condition.create(conditions_to_create)
puts " #{conditions_to_create.size} Conditions were loaded into the db: #{conditions_to_create.map{|h_cond| h_cond[:class_name]}.join(', ')}"
end
end
end
| 37.526718 | 148 | 0.542107 |
d5200eb56e8f300cf5a1fcdf2ee17c5207d425e1 | 3,435 | require 'test/unit'
require 'jruby-kafka'
require 'util/producer'
require 'util/consumer'
class TestKafka < Test::Unit::TestCase
def test_01_run
topic = 'test_run'
send_test_messages topic
queue = SizedQueue.new(20)
consumer = Kafka::Consumer.new(consumer_options({:topic => topic }))
streams = consumer.message_streams
streams.each_with_index do |stream|
Thread.new { consumer_test_blk stream, queue}
end
begin
timeout(30) do
until queue.length > 3 do
sleep 1
next
end
end
end
consumer.shutdown
found = []
until queue.empty?
found << queue.pop
end
assert_equal([ "codec gzip test message",
"codec none test message",
"codec snappy test message",
"test message" ],
found.map(&:to_s).uniq.sort,)
end
def test_02_from_beginning
topic = 'test_run'
queue = SizedQueue.new(20)
options = {
:topic => topic,
:reset_beginning => 'from-beginning'
}
consumer = Kafka::Consumer.new(consumer_options(options))
streams = consumer.message_streams
streams.each_with_index do |stream|
Thread.new { consumer_test_blk stream, queue}
end
begin
timeout(30) do
until queue.length > 3 do
sleep 1
next
end
end
end
consumer.shutdown
found = []
until queue.empty?
found << queue.pop
end
assert_equal([ "codec gzip test message",
"codec none test message",
"codec snappy test message",
"test message" ],
found.map(&:to_s).uniq.sort)
end
def test_03_topic_whitelist
topic_prefix = 'whitelist'
produce_to_different_topics topic_prefix
queue = SizedQueue.new(20)
options = {
:zookeeper_connect => '127.0.0.1:2181',
:group_id => 'topics',
:include_topics => topic_prefix + 'ca.*',
}
consumer = Kafka::Consumer.new(filter_consumer_options(options))
streams = consumer.message_streams
streams.each_with_index do |stream|
Thread.new { consumer_test_blk stream, queue}
end
begin
timeout(30) do
until queue.length > 1 do
sleep 1
next
end
end
end
consumer.shutdown
found = []
until queue.empty?
found << queue.pop
end
assert(found.include?("cabin message"))
assert(found.include?("carburetor message"))
assert(!found.include?("apple message"))
end
def test_04_topic_blacklist
topic_prefix = 'blacklist'
produce_to_different_topics topic_prefix
queue = SizedQueue.new(20)
options = {
:zookeeper_connect => '127.0.0.1:2181',
:group_id => 'topics',
:exclude_topics => topic_prefix + 'ca.*',
}
consumer = Kafka::Consumer.new(filter_consumer_options(options))
streams = consumer.message_streams
streams.each_with_index do |stream|
Thread.new { consumer_test_blk stream, queue}
end
begin
timeout(30) do
until queue.length > 0 do
sleep 1
next
end
end
end
consumer.shutdown
found = []
until queue.empty?
found << queue.pop
end
assert(!found.include?("cabin message"))
assert(!found.include?("carburetor message"))
assert(found.include?("apple message"))
end
end
| 25.827068 | 72 | 0.605822 |
6ad089d7ba44b8953fca7700075916321adf53a9 | 192 | # -*- encoding : ascii-8bit -*-
require 'ethereum/db/base_db'
require 'ethereum/db/ephem_db'
require 'ethereum/db/overlay_db'
require 'ethereum/db/refcount_db'
require 'ethereum/db/level_db'
| 24 | 33 | 0.760417 |
38836b97a9db5e7bace456d21fca9c9fb42d4339 | 19,132 | # Copyright © 2020 MUSC Foundation for Research Development~
# All rights reserved.~
# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:~
# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.~
# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following~
# disclaimer in the documentation and/or other materials provided with the distribution.~
# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products~
# derived from this software without specific prior written permission.~
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,~
# BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT~
# SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL~
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS~
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR~
# TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.~
require 'dotenv/tasks'
task update_data: :environment do
begin
## turn off auditing for the duration of this script
Protocol.auditing_enabled = false
ResearchMaster.auditing_enabled = false
User.auditing_enabled = false
script_start = Time.now
$status_notifier = Slack::Notifier.new(ENV.fetch('CRONJOB_STATUS_WEBHOOK'))
$validated_states = ['Acknowledged', 'Approved', 'Completed', 'Disapproved', 'Exempt Approved', 'Expired', 'Expired - Continuation in Progress', 'External IRB Review Archive', 'Not Human Subjects Research', 'Suspended', 'Terminated']
$friendly_token = Devise.friendly_token
$research_masters = ResearchMaster.eager_load(:pi).all
$rmc_relations = ResearchMasterCoeusRelation.all
$departments = Department.all
$users = User.all
def log message
puts "#{message}\n"
$status_notifier.ping message
end
def find_or_create_department(pi_department)
name = pi_department || 'N/A'
dept = nil
unless dept = $departments.detect{ |d| d.name == name }
dept = Department.create(
name: name
)
end
dept
end
def update_eirb_study_pi(rm, first_name, last_name, email, net_id)
net_id.slice!('@musc.edu')
pi = nil
unless pi = $users.detect{ |u| u.net_id == net_id }
pi = User.create(
email: email,
net_id: net_id,
name: "#{first_name} #{last_name}",
password: $friendly_token,
password_confirmation: $friendly_token
)
end
if pi.valid?
existing_pi = rm.pi
rm.pi_id = pi.id
if rm.pi_id && rm.pi_id_changed? && existing_pi
begin
PiMailer.notify_pis(rm, existing_pi, rm.pi, rm.creator).deliver_now
rescue
if ENV.fetch('ENVIRONMENT') == 'production'
log "PI email failed to deliver"
log "#{pi.inspect}"
log "#{pi.errors.full_messages}"
end
end
end
elsif ENV.fetch('ENVIRONMENT') == 'production'
log ":heavy_exclamation_mark: PI record failed to update Research Master record"
log "- #{pi.inspect}"
log "- #{pi.errors.full_messages}"
end
end
log "*Cronjob has started.*"
log "- *Beginning data retrieval from APIs...*"
sparc_api = ENV.fetch("SPARC_API")
eirb_api = ENV.fetch("EIRB_API")
eirb_api_token = ENV.fetch("EIRB_API_TOKEN")
coeus_api = ENV.fetch("COEUS_API")
log "--- *Fetching from SPARC_API...*"
start = Time.now
protocols = HTTParty.get("#{sparc_api}/protocols", headers: {'Content-Type' => 'application/json'}, basic_auth: { username: ENV.fetch('SPARC_API_USERNAME'), password: ENV.fetch('SPARC_API_PASSWORD') }, timeout: 500)
finish = Time.now
if protocols.is_a?(String)
log "----- :heavy_check_mark: *Done!* (#{(finish - start).to_i} Seconds)"
else
log "----- :heavy_exclamation_mark: Error retrieving protocols from SPARC_API: #{protocols}"
end
log "--- *Fetching from EIRB_API...*"
start = Time.now
eirb_studies = HTTParty.get("#{eirdb_api}/studies.json?musc_studies=true", timeout: 500, headers: {'Content-Type' => 'application/json', "Authorization" => "Token token=\"#{eirb_api_token}\""})
finish = Time.now
if eirb_studies.is_a?(String)
log "----- :heavy_exclamation_mark: Error retrieving protocols from EIRB_API: #{eirb_studies}"
else
log "----- :heavy_check_mark: *Done!* (#{(finish - start).to_i} Seconds)"
end
log "--- *Fetching from COEUS_API...*"
start = Time.now
award_details = HTTParty.get("#{coeus_api}/award_details", timeout: 500, headers: {'Content-Type' => 'application/json'})
awards_hrs = HTTParty.get("#{coeus_api}/awards_hrs", timeout: 500, headers: {'Content-Type' => 'application/json'})
interfolio_users = HTTParty.get("#{coeus_api}/interfolio", timeout: 500, headers: {'Content-Type' => 'application/json'})
finish = Time.now
log "----- :heavy_check_mark: *Done!* (#{(finish - start).to_i} Seconds)"
unless protocols.is_a?(String)
ResearchMaster.update_all(sparc_protocol_id: nil)
log "- *Beginning SPARC_API data import...*"
log "--- Total number of protocols from SPARC_API: #{protocols.count}"
start = Time.now
count = 1
created_sparc_protocols = []
created_sparc_pis = []
# Preload SPARC Protocols to improve efficiency
sparc_protocols = Protocol.eager_load(:primary_pi).where(type: 'SPARC')
existing_sparc_ids = sparc_protocols.pluck(:sparc_id)
existing_sparc_protocols = protocols.select{ |p| existing_sparc_ids.include?(p['id']) }
new_sparc_protocols = protocols.select{ |p| existing_sparc_ids.exclude?(p['id']) }
# Update Existing SPARC Protocol Records
log "--- Updating existing SPARC protocols"
bar = ProgressBar.new(existing_sparc_protocols.count)
existing_sparc_protocols.each do |protocol|
existing_protocol = sparc_protocols.detect{ |p| p.sparc_id == protocol['id'] }
existing_protocol.short_title = protocol['short_title']
existing_protocol.long_title = protocol['title']
existing_protocol.save(validate: false)
if existing_protocol.primary_pi
existing_protocol.primary_pi.first_name = protocol['first_name']
existing_protocol.primary_pi.last_name = protocol['last_name']
existing_protocol.primary_pi.department = find_or_create_department(protocol['pi_department'])
existing_protocol.primary_pi.save(validate: false)
end
if protocol['research_master_id'] && rm = $research_masters.detect{ |rm| rm.id == protocol['research_master_id'] }
rm.sparc_protocol_id = existing_protocol.id
rm.sparc_association_date = DateTime.current unless rm.sparc_association_date
rm.save(validate: false)
end
bar.increment! rescue nil
end
# Create New SPARC Protocol Records
log "--- Creating new SPARC protocols"
bar = ProgressBar.new(new_sparc_protocols.count)
new_sparc_protocols.each do |protocol|
sparc_protocol = Protocol.new(
type: protocol['type'],
short_title: protocol['short_title'],
long_title: protocol['title'],
sparc_id: protocol['id'],
sparc_pro_number: protocol['pro_number']
)
created_sparc_protocols.append(sparc_protocol.id) if sparc_protocol.save
if protocol['first_name'] || protocol['last_name']
pi = PrimaryPi.new(
first_name: protocol['first_name'],
last_name: protocol['last_name'],
department: find_or_create_department(protocol['pi_department']),
protocol: sparc_protocol
)
created_sparc_pis.append(pi.id) if pi.save
end
if protocol['research_master_id'] && rm = $research_masters.detect{ |rm| rm.id == protocol['research_master_id'] }
rm.sparc_protocol_id = sparc_protocol.id
rm.sparc_association_date = DateTime.current unless rm.sparc_association_date
rm.save(validate: false)
end
bar.increment! rescue nil
end
finish = Time.now
log "--- :heavy_check_mark: *Done!*"
log "--- *New protocols total:* #{created_sparc_protocols.count}"
log "--- *New primary pis total:* #{created_sparc_pis.count}"
log "--- *Finished SPARC_API data import* (#{(finish - start).to_i} Seconds)."
end
unless eirb_studies.is_a?(String)
ResearchMaster.update_all(eirb_validated: false)
log "- *Beginning EIRB_API data import...*"
log "--- Total number of protocols from EIRB_API: #{eirb_studies.count}"
start = Time.now
count = 1
created_eirb_protocols = []
created_eirb_pis = []
ResearchMaster.update_all(eirb_protocol_id: nil)
# Preload eIRB Protocols to improve efficiency
eirb_protocols = Protocol.eager_load(:primary_pi).where(type: 'EIRB')
existing_eirb_ids = eirb_protocols.pluck(:eirb_id)
existing_eirb_studies = eirb_studies.select{ |s| existing_eirb_ids.include?(s['pro_number']) }
new_eirb_studies = eirb_studies.select{ |s| existing_eirb_ids.exclude?(s['pro_number']) }
# Update Existing eIRB Protocol Records
log "--- Updating existing eIRB protocols"
bar = ProgressBar.new(existing_eirb_studies.count)
existing_eirb_studies.each do |study|
existing_protocol = eirb_protocols.detect{ |p| p.eirb_id == study['pro_number'] }
existing_protocol.short_title = study['short_title']
existing_protocol.long_title = study['title']
existing_protocol.eirb_state = study['state']
existing_protocol.eirb_institution_id = study['institution_id']
existing_protocol.date_initially_approved = study['date_initially_approved']
existing_protocol.date_approved = study['date_approved']
existing_protocol.date_expiration = study['date_expiration']
existing_protocol.save(validate: false)
if existing_protocol.eirb_state == 'Completed' && existing_protocol.primary_pi
existing_protocol.primary_pi.first_name = study['first_name']
existing_protocol.primary_pi.last_name = study['last_name']
existing_protocol.primary_pi.email = study['pi_email']
existing_protocol.primary_pi.net_id = study['pi_net_id']
existing_protocol.primary_pi.department = find_or_create_department(study['pi_department'])
existing_protocol.primary_pi.save(validate: false)
end
if study['research_master_id'] && rm = $research_masters.detect{ |rm| rm.id == study['research_master_id'] }
rm.eirb_protocol_id = existing_protocol.id
rm.eirb_association_date = DateTime.current unless rm.sparc_association_date
if validated_states.include?(study['state'])
rm.eirb_validated = true
rm.short_tile = study['short_title']
rm.long_title = study['title']
update_eirb_study_pi(rm, study['first_name'], study['last_name'], study['email'], study['pi_net_id'], users)
end
rm.save(validate: false)
end
bar.increment! rescue nil
end
# Create New eIRB Protocol Records
log "--- Creating new eIRB protocols"
bar = ProgressBar.new(new_eirb_studies.count)
new_eirb_studies.each do |study|
eirb_protocol = Protocol.new(
type: study['type'],
short_title: study['short_title'] || "",
long_title: study['title'] || "",
eirb_id: study['pro_number'],
eirb_institution_id: study['institution_id'],
eirb_state: study['state'],
date_initially_approved: study['date_initially_approved'],
date_approved: study['date_approved'],
date_expiration: study['date_expiration']
)
created_eirb_protocols.append(eirb_protocol.id) if eirb_protocol.save
if study['first_name'] || study['last_name']
pi = PrimaryPi.new(
first_name: study['first_name'],
last_name: study['last_name'],
department: find_or_create_department(study['pi_department']),
protocol: eirb_protocol
)
created_eirb_pis.append(pi.id) if pi.save
end
if study['research_master_id'] && rm = $research_masters.detect{ |rm| rm.id == study['research_master_id'] }
rm.eirb_protocol_id = eirb_protocol.id
rm.eirb_association_date = DateTime.current unless rm.sparc_association_date
if validated_states.include?(study['state'])
rm.eirb_validated = true
rm.short_tile = study['short_title']
rm.long_title = study['title']
update_eirb_study_pi(rm, study['first_name'], study['last_name'], study['email'], study['pi_net_id'], users)
end
rm.save(validate: false)
end
bar.increment! rescue nil
end
finish = Time.now
log "--- :heavy_check_mark: *Done!*"
log "--- *New protocols total:* #{created_sparc_protocols.count}"
log "--- *New primary pis total:* #{created_sparc_pis.count}"
log "--- *Finished EIRB_API data import* (#{(finish - start).to_i} Seconds)."
end
log "- *Beginning COEUS API data import...*"
log "--- Total number of protocols from COEUS API: #{award_details.count}"
start = Time.now
count = 1
created_coeus_protocols = []
# Preload eIRB Protocols to improve efficiency
coeus_protocols = Protocol.where(type: 'COEUS')
existing_award_numbers = coeus_protocols.pluck(:mit_award_number)
existing_coeus_award_details = award_details.select{ |ad| existing_award_numbers.include?(ad['mit_award_number']) }
new_coeus_award_details = award_details.select{ |ad| existing_award_numbers.exclude?(ad['mit_award_number']) }
# Update Existing COEUS Protocol Records
log "--- Updating existing COEUS protocols"
bar = ProgressBar.new(existing_coeus_award_details.count)
existing_coeus_award_details.each do |ad|
existing_protocol = coeus_protocols.detect{ |p| p.mit_award_number == ad['mit_award_number'] }
existing_protocol.update_attributes(coeus_project_id: ad['coeus_project_id'])
if ad['rmid'] && rm = $research_masters.detect{ |rm| rm.id == ad['rmid'] }
unless $rmc_relations.any?{ |rmcr| rmcr.protocol_id == existing_protocol.id && rmcr.research_master_id == rm.id }
ResearchMasterCoeusRelation.create(
protocol: existing_protocol,
research_master: rm
)
end
end
bar.increment! rescue nil
end
# Create New COEUS Protocol Records
log "--- Creating new COEUS protocols"
bar = ProgressBar.new(new_coeus_award_details.count)
new_coeus_award_details.each do |ad|
coeus_protocol = Protocol.new(
type: 'COEUS',
title: ad['title'],
mit_award_number: ad['mit_award_number'],
sequence_number: ad['sequence_number'],
entity_award_number: ad['entity_award_number'],
coeus_project_id: ad['coeus_project_id']
)
if coeus_protocol.save
created_coeus_protocols.append(coeus_protocol.id)
if ad['rmid'] && rm = $research_masters.detect{ |rm| rm.id == ad['rmid'] }
ResearchMasterCoeusRelation.create(
protocol: coeus_protocol,
research_master: rm
)
end
end
bar.increment! rescue nil
end
log "--- Updating award numbers from COEUS API: #{awards_hrs.count}"
count = 1
existing_coeus_awards_hrs = awards_hrs.select{ |ah| existing_award_numbers.include?(ah['mit_award_number']) }
log "--- Updating COEUS award numbers"
bar = ProgressBar.new(existing_coeus_awards_hrs.count)
existing_coeus_awards_hrs.each do |ah|
existing_protocol = coeus_protocols.detect{ |p| p.mit_award_number == ah['mit_award_number'] }
existing_protocol.coeus_protocol_number = ah['protocol_number']
existing_protocol.save(validate: false)
bar.increment! rescue nil
end
puts("Updating users from COEUS API: #{interfolio_users.count}")
count = 1
interfolio_users.each do |user|
if User.exists?(net_id: user['netid'])
user_to_update = User.find_by(net_id: user['netid'])
user_to_update.update_attribute(:department, user['department'])
end
print(progress_bar(count, interfolio_users.count/10)) if count % (interfolio_users.count/10)
count += 1
end
finish = Time.now
log "--- :heavy_check_mark: *Done!*"
log "--- *New protocols total:* #{created_coeus_protocols.count}"
log "--- *Finished COEUS_API data import* (#{(finish - start).to_i} Seconds)."
total_protocols = created_sparc_protocols + created_eirb_protocols + created_coeus_protocols
total_pis = created_sparc_pis + created_eirb_pis
log "*Overview*"
log "- *New protocols total:* #{total_protocols.count}"
log "- *New primary pis total:* #{total_pis.count}"
log "- *New protocol ids:* #{total_protocols}"
log "- *New primary pi ids:* #{total_pis}"
script_finish = Time.now
log "- *Script Duration:* #{(script_finish - script_start).to_i} Seconds."
log ":heavy_check_mark: *Cronjob has completed successfully.*"
## turn on auditing
Protocol.auditing_enabled = true
ResearchMaster.auditing_enabled = true
User.auditing_enabled = true
rescue => error
Protocol.auditing_enabled = true
ResearchMaster.auditing_enabled = true
User.auditing_enabled = true
log ":heavy_exclamation_mark: *Cronjob has failed unexpectedly.*"
log error.inspect
end
end
| 40.277895 | 239 | 0.645568 |
4a8650a97fd9818ea5be27e8177568a5941ba463 | 2,424 | require File.join(File.dirname(__FILE__), "lib/fpm/version")
Gem::Specification.new do |spec|
files = []
dirs = %w{lib bin templates}
dirs.each do |dir|
files += Dir["#{dir}/**/*"]
end
files << "LICENSE"
files << "CONTRIBUTORS"
files << "CHANGELOG.rst"
files = files.reject { |path| path =~ /\.pyc$/ }
spec.name = "fpm"
spec.version = FPM::VERSION
spec.summary = "fpm - package building and mangling"
spec.description = "Convert directories, rpms, python eggs, rubygems, and " \
"more to rpms, debs, solaris packages and more. Win at package " \
"management without wasting pointless hours debugging bad rpm specs!"
spec.license = "MIT-like"
spec.required_ruby_version = '>= 1.9.3'
# For parsing JSON (required for some Python support, etc)
# http://flori.github.com/json/doc/index.html
spec.add_dependency("json", ">= 1.7.7", "< 2.0") # license: Ruby License
# For logging
# https://github.com/jordansissel/ruby-cabin
spec.add_dependency("cabin", ">= 0.6.0") # license: Apache 2
# For backports to older rubies
# https://github.com/marcandre/backports
spec.add_dependency("backports", ">= 2.6.2") # license: MIT
# For reading and writing rpms
spec.add_dependency("arr-pm", "~> 0.0.10") # license: Apache 2
# For command-line flag support
# https://github.com/mdub/clamp/blob/master/README.markdown
spec.add_dependency("clamp", "~> 1.0.0") # license: MIT
# For starting external processes across various ruby interpreters
spec.add_dependency("childprocess") # license: ???
# For calling functions in dynamic libraries
spec.add_dependency("ffi") # license: GPL3/LGPL3
spec.add_development_dependency("rake", "~> 10") # license: MIT
# For creating FreeBSD package archives (xz-compressed tars)
spec.add_dependency("ruby-xz", "~> 0.2.3") # license: MIT
# For sourcing from pleaserun
spec.add_dependency("pleaserun", "~> 0.0.29") # license: Apache 2
spec.add_dependency("stud")
spec.add_development_dependency("rspec", "~> 3.0.0") # license: MIT (according to wikipedia)
spec.add_development_dependency("insist", "~> 1.0.0") # license: Apache 2
spec.add_development_dependency("pry")
spec.files = files
spec.require_paths << "lib"
spec.bindir = "bin"
spec.executables << "fpm"
spec.author = "Jordan Sissel"
spec.email = "[email protected]"
spec.homepage = "https://github.com/jordansissel/fpm"
end
| 32.756757 | 94 | 0.685231 |
e2c188500eb5f05770c0fe541fd31100341d864d | 1,342 | module Puppet::Parser::Functions
newfunction(:zookeeper_servers_url, type: :rvalue, doc: <<-EOS
This function converts an array of ZooKeeper hostnames into a combined URL for
ZooKeeper HA. Optionally you can pass custom path in ZooKeeper and default
ZooKeeper port (applies only for servers without specified port)
Usage: zookeeper_servers_url([10.0.0.1,10.0.0.2],'mesos', 2181)
EOS
) do |args|
# Only 1 argument should be passed
if args.size > 3
raise(Puppet::ParseError, 'zookeeper_servers_url(): Wrong number of args ' + "given (#{args.size} for 1)")
end
zk_path = args[1] if args.size > 1
zk_path ||= 'mesos'
zk_port = args[2] if args.size > 2
zk_port ||= 2181
# The argument should be an Array
case args[0].class.name
when 'Array'
zookeeper_servers = args[0].clone
when 'String'
# backward compatibility, will be removed in 1.x
return args[0]
else
raise(Puppet::ParseError, 'zookeeper_servers_url() accepts an Array, you passed a ' + args[0].class.name)
end
uri = 'zk://'
zookeeper_servers.each_with_index do |server, i|
uri << ',' if i > 0
uri << if server.index(':')
server
else
"#{server}:#{zk_port}"
end
end
return "#{uri}/#{zk_path}"
end
end
| 30.5 | 112 | 0.628167 |
9110c4423f4d69e5acc66d106c521d9e235b7769 | 339 | require "bundler/setup"
require "exception_file_notifier"
require 'exception_notification'
RSpec.configure do |config|
# Enable flags like --only-failures and --next-failure
config.example_status_persistence_file_path = ".rspec_status"
config.expect_with :rspec do |c|
c.syntax = :expect
end
end
# ENV["RAILS_ENV"] = "test"
| 22.6 | 63 | 0.752212 |
086b9b1f398d13623baf6ee1fb879836f0e62264 | 3,937 | Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Ensures that a master key has been made available in either ENV["RAILS_MASTER_KEY"]
# or in config/master.key. This key is used to decrypt credentials (and other encrypted files).
# config.require_master_key = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Store uploaded files on the local file system (see config/storage.yml for options)
config.active_storage.service = :local
# Mount Action Cable outside main process or domain
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
config.log_tags = [ :request_id ]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment)
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "jetblue_#{Rails.env}"
config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
end
| 41.442105 | 102 | 0.758192 |
62f0f28871349832acc1ec10c1594b3a2186e72a | 3,956 | require 'spec_helper'
describe "gws_attendance_time_card", type: :feature, dbscope: :example, js: true do
let(:site) { gws_site }
let(:user) { gws_user }
let(:now) { Time.zone.now }
let(:this_month) { now.beginning_of_month }
let(:prev_month) { this_month - 1.month }
let(:next_month) { this_month + 1.month }
let!(:time_card_this_month) do
create :gws_attendance_time_card, :with_records, cur_site: site, cur_user: user, date: this_month
end
let!(:time_card_prev_month) do
create :gws_attendance_time_card, :with_records, cur_site: site, cur_user: user, date: prev_month
end
let!(:time_card_next_month) do
create :gws_attendance_time_card, :with_records, cur_site: site, cur_user: user, date: next_month
end
before do
site.attendance_break_time1_state = 'show'
site.attendance_break_time2_state = 'show'
site.attendance_break_time3_state = 'show'
site.save!
end
before { login_user user }
context 'move next' do
it do
visit gws_attendance_main_path(site)
within ".nav-group" do
click_on I18n.t("gws/attendance.next_month")
end
within "table.time-card" do
expect(page).to have_css(".date", text: I18n.l(next_month.to_date, format: :attendance_month_day))
expect(page).to have_css(".date", text: I18n.l(next_month.end_of_month.to_date, format: :attendance_month_day))
end
within ".nav-group" do
click_on I18n.t("gws/attendance.next_month")
end
expect(page).to have_content(I18n.t("gws/attendance.no_time_cards"))
end
end
context 'move prev' do
it do
visit gws_attendance_main_path(site)
within ".nav-group" do
click_on I18n.t("gws/attendance.prev_month")
end
within "table.time-card" do
expect(page).to have_css(".date", text: I18n.l(prev_month.to_date, format: :attendance_month_day))
expect(page).to have_css(".date", text: I18n.l(prev_month.end_of_month.to_date, format: :attendance_month_day))
end
within ".nav-group" do
click_on I18n.t("gws/attendance.prev_month")
end
expect(page).to have_content(I18n.t("gws/attendance.no_time_cards"))
end
end
context 'select year/month' do
it do
visit gws_attendance_main_path(site)
within ".nav-group" do
select I18n.l(prev_month.to_date, format: :attendance_year_month), from: "year_month"
end
within "table.time-card" do
expect(page).to have_css(".date", text: I18n.l(prev_month.to_date, format: :attendance_month_day))
expect(page).to have_css(".date", text: I18n.l(prev_month.end_of_month.to_date, format: :attendance_month_day))
end
within ".nav-group" do
select I18n.l(next_month.to_date, format: :attendance_year_month), from: "year_month"
end
within "table.time-card" do
expect(page).to have_css(".date", text: I18n.l(next_month.to_date, format: :attendance_month_day))
expect(page).to have_css(".date", text: I18n.l(next_month.end_of_month.to_date, format: :attendance_month_day))
end
end
end
describe "https://github.com/shirasagi/shirasagi/issues/3208" do
it do
visit gws_attendance_main_path(site)
within ".nav-group" do
click_on I18n.t("gws/attendance.next_month")
end
within ".mod-navi.current-navi" do
click_on I18n.t('modules.gws/attendance/management/time_card')
end
within ".breadcrumb" do
expect(page).to have_link(I18n.t('modules.gws/attendance/management/time_card'))
end
within "#menu" do
expect(page).to have_link(I18n.t('gws/attendance.links.lock'))
end
within ".list-items" do
month = I18n.l(now.to_date, format: :attendance_year_month)
title = I18n.t('gws/attendance.formats.time_card_full_name', user_name: user.name, month: month)
expect(page).to have_link(title)
end
end
end
end
| 34.4 | 119 | 0.67998 |
e268cf6e95b54eeac058f908630aeeaa2d6f0b5c | 640 | module Admin
class ChartsController < Admin::ApplicationController
def new
end
def show
end
def data
chart_data = Admin::ChartProcessor.call(
chart_params[:resource],
chart_params[:group_attribute],
chart_params[:attribute_to_apply_function],
chart_params[:function]
)
render json: chart_data
end
def resource_attributes
render json: Admin::AttributesList.call(chart_params[:resource])
end
private
def chart_params
params.permit(:chart_type, :resource, :group_attribute, :attribute_to_apply_function, :function)
end
end
end
| 20.645161 | 102 | 0.679688 |
6163303a025ca75f15b53f3f8685371391b2421a | 7,265 | require 'minitest/unit'
class Module # :nodoc:
def infect_an_assertion meth, new_name, dont_flip = false # :nodoc:
# warn "%-22p -> %p %p" % [meth, new_name, dont_flip]
self.class_eval <<-EOM
def #{new_name} *args
case
when #{!!dont_flip} then
Minitest::Spec.current.#{meth}(self, *args)
when Proc === self then
Minitest::Spec.current.#{meth}(*args, &self)
else
Minitest::Spec.current.#{meth}(args.first, self, *args[1..-1])
end
end
EOM
end
end
module Kernel # :nodoc:
##
# Describe a series of expectations for a given target +desc+.
#
# Defines a test class subclassing from either Minitest::Spec or
# from the surrounding describe's class. The surrounding class may
# subclass Minitest::Spec manually in order to easily share code:
#
# class MySpec < Minitest::Spec
# # ... shared code ...
# end
#
# class TestStuff < MySpec
# it "does stuff" do
# # shared code available here
# end
# describe "inner stuff" do
# it "still does stuff" do
# # ...and here
# end
# end
# end
#
# For more information on getting started with writing specs, see:
#
# http://www.rubyinside.com/a-minitestspec-tutorial-elegant-spec-style-testing-that-comes-with-ruby-5354.html
#
# For some suggestions on how to improve your specs, try:
#
# http://betterspecs.org
#
# but do note that several items there are debatable or specific to
# rspec.
#
# For more information about expectations, see Minitest::Expectations.
def describe desc, additional_desc = nil, &block # :doc:
stack = Minitest::Spec.describe_stack
name = [stack.last, desc, additional_desc].compact.join("::")
sclas = stack.last || if Class === self && is_a?(Minitest::Spec::DSL) then
self
else
Minitest::Spec.spec_type desc
end
cls = sclas.create name, desc
stack.push cls
cls.class_eval(&block)
stack.pop
cls
end
private :describe
end
##
# Minitest::Spec -- The faster, better, less-magical spec framework!
#
# For a list of expectations, see Minitest::Expectations.
class Minitest::Spec < Minitest::Test
def self.current # :nodoc:
Thread.current[:current_spec]
end
def initialize name # :nodoc:
super
Thread.current[:current_spec] = self
end
##
# Oh look! A Minitest::Spec::DSL module! Eat your heart out DHH.
module DSL
##
# Contains pairs of matchers and Spec classes to be used to
# calculate the superclass of a top-level describe. This allows for
# automatically customizable spec types.
#
# See: register_spec_type and spec_type
TYPES = [[//, Minitest::Spec]]
##
# Register a new type of spec that matches the spec's description.
# This method can take either a Regexp and a spec class or a spec
# class and a block that takes the description and returns true if
# it matches.
#
# Eg:
#
# register_spec_type(/Controller$/, Minitest::Spec::Rails)
#
# or:
#
# register_spec_type(Minitest::Spec::RailsModel) do |desc|
# desc.superclass == ActiveRecord::Base
# end
def register_spec_type(*args, &block)
if block then
matcher, klass = block, args.first
else
matcher, klass = *args
end
TYPES.unshift [matcher, klass]
end
##
# Figure out the spec class to use based on a spec's description. Eg:
#
# spec_type("BlahController") # => Minitest::Spec::Rails
def spec_type desc
TYPES.find { |matcher, klass|
if matcher.respond_to? :call then
matcher.call desc
else
matcher === desc.to_s
end
}.last
end
def describe_stack # :nodoc:
Thread.current[:describe_stack] ||= []
end
##
# Returns the children of this spec.
def children
@children ||= []
end
def nuke_test_methods! # :nodoc:
self.public_instance_methods.grep(/^test_/).each do |name|
self.send :undef_method, name
end
end
##
# Define a 'before' action. Inherits the way normal methods should.
#
# NOTE: +type+ is ignored and is only there to make porting easier.
#
# Equivalent to Minitest::Test#setup.
def before type = nil, &block
define_method :setup do
super()
self.instance_eval(&block)
end
end
##
# Define an 'after' action. Inherits the way normal methods should.
#
# NOTE: +type+ is ignored and is only there to make porting easier.
#
# Equivalent to Minitest::Test#teardown.
def after type = nil, &block
define_method :teardown do
self.instance_eval(&block)
super()
end
end
##
# Define an expectation with name +desc+. Name gets morphed to a
# proper test method name. For some freakish reason, people who
# write specs don't like class inheritance, so this goes way out of
# its way to make sure that expectations aren't inherited.
#
# This is also aliased to #specify and doesn't require a +desc+ arg.
#
# Hint: If you _do_ want inheritance, use minitest/test. You can mix
# and match between assertions and expectations as much as you want.
def it desc = "anonymous", &block
block ||= proc { skip "(no tests defined)" }
@specs ||= 0
@specs += 1
name = "test_%04d_%s" % [ @specs, desc ]
define_method name, &block
self.children.each do |mod|
mod.send :undef_method, name if mod.public_method_defined? name
end
name
end
##
# Essentially, define an accessor for +name+ with +block+.
#
# Why use let instead of def? I honestly don't know.
def let name, &block
name = name.to_s
pre, post = "let '#{name}' cannot ", ". Please use another name."
methods = Minitest::Spec.instance_methods.map(&:to_s) - %w[subject]
raise ArgumentError, "#{pre}begin with 'test'#{post}" if
name =~ /\Atest/
raise ArgumentError, "#{pre}override a method in Minitest::Spec#{post}" if
methods.include? name
define_method name do
@_memoized ||= {}
@_memoized.fetch(name) { |k| @_memoized[k] = instance_eval(&block) }
end
end
##
# Another lazy man's accessor generator. Made even more lazy by
# setting the name for you to +subject+.
def subject &block
let :subject, &block
end
def create name, desc # :nodoc:
cls = Class.new(self) do
@name = name
@desc = desc
nuke_test_methods!
end
children << cls
cls
end
def name # :nodoc:
defined?(@name) ? @name : super
end
def to_s # :nodoc:
name # Can't alias due to 1.8.7, not sure why
end
# :stopdoc:
attr_reader :desc
alias :specify :it
# :startdoc:
end
extend DSL
TYPES = DSL::TYPES # :nodoc:
end
require "minitest/expectations"
class Object # :nodoc:
include Minitest::Expectations unless ENV["MT_NO_EXPECTATIONS"]
end
| 25.671378 | 111 | 0.60468 |
1c8ae400beed59b0c8c8994730e1cc97558e07ef | 1,359 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/monitoring/dashboard/v1/widget.proto
require 'google/protobuf'
require 'google/api/field_behavior_pb'
require 'google/monitoring/dashboard/v1/alertchart_pb'
require 'google/monitoring/dashboard/v1/scorecard_pb'
require 'google/monitoring/dashboard/v1/text_pb'
require 'google/monitoring/dashboard/v1/xychart_pb'
require 'google/protobuf/empty_pb'
Google::Protobuf::DescriptorPool.generated_pool.build do
add_file("google/monitoring/dashboard/v1/widget.proto", :syntax => :proto3) do
add_message "google.monitoring.dashboard.v1.Widget" do
optional :title, :string, 1
oneof :content do
optional :xy_chart, :message, 2, "google.monitoring.dashboard.v1.XyChart"
optional :scorecard, :message, 3, "google.monitoring.dashboard.v1.Scorecard"
optional :text, :message, 4, "google.monitoring.dashboard.v1.Text"
optional :blank, :message, 5, "google.protobuf.Empty"
optional :alert_chart, :message, 7, "google.monitoring.dashboard.v1.AlertChart"
end
end
end
end
module Google
module Cloud
module Monitoring
module Dashboard
module V1
Widget = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.monitoring.dashboard.v1.Widget").msgclass
end
end
end
end
end
| 35.763158 | 125 | 0.733628 |
ed230f48cef00e7a494b1e9cd812bc7e4d9eaed3 | 5,061 | #!/usr/bin/env ruby
#
# Copyright 2011-2013, Dell
# Copyright 2013-2014, SUSE LINUX Products GmbH
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generate Excel spreadsheet comparing two tempest result sets or simply format a single result set
# The main usecase is to compare two sets of test results to see the delta but
# it will accept a single filename as input as well if you just want a simple spreadsheet from saved xml output.
require "rubygems"
require "xmlsimple"
require "axlsx"
class TempestResultsProcessor
DOES_NOT_EXIST = 0
ERROR = 1
FAILED = 2
SKIPPED = 3
DELTA_BG = "FFD0D0"
HEADER_BG = "004586"
HEADER_FG = "FFFFFF"
def run(args, options={"ForceArray" => false})
p = Axlsx::Package.new
styles = p.workbook.styles
delta_style = styles.add_style bg_color: DELTA_BG,
alignment: { horizontal: :left,
vertical: :top ,
wrap_text: true}
header_style = styles.add_style fg_color: HEADER_FG,
b: true,
bg_color: HEADER_BG,
sz: 12,
border: { style: :thin, color: "00" },
alignment: { horizontal: :left,
vertical: :top ,
wrap_text: false}
normal_style = styles.add_style alignment: { horizontal: :left,
vertical: :top ,
wrap_text: true}
no_compare = true if args.size==1
currXml = args[0]
prevXml = args[1] unless no_compare
currHash = XmlSimple.xml_in(currXml, options)
prevHash = XmlSimple.xml_in(prevXml, options) unless no_compare
p.workbook.add_worksheet(name: "Test Summary") do |sheet|
sheet.add_row ["File Name","Tests","Errors","Failures","Skipped"], style: header_style
sheet.add_row [currXml, currHash["tests"], currHash["errors"],currHash["failures"],currHash["skip"]], style: normal_style
sheet.add_row [prevXml, prevHash["tests"], prevHash["errors"],prevHash["failures"],prevHash["skip"]], style: normal_style unless no_compare
end
p.workbook.add_worksheet(name: "Test Results") do |sheet|
row = sheet.add_row ["Test Class","Test Name",currXml]
row.add_cell prevXml unless no_compare
row.style=header_style
currcases = currHash["testcase"]
prevcases = prevHash["testcase"] unless no_compare
classnames = currcases.uniq { |tc| tc["classname"] }.map { |tcc| tcc["classname"] }
classnames.each do |cn|
currcases.select { |tc| tc["classname"]==cn }.each do |cnn|
row = sheet.add_row [cn]
row.add_cell cnn["name"]
res1 = nil
res2 = nil
if !cnn["skipped"].nil?
row.add_cell "skipped"
res1 = SKIPPED
elsif !cnn["error"].nil?
row.add_cell "error"
res1 = ERROR
elsif !cnn["failure"].nil?
row.add_cell "failed"
res1 = FAILED
else
row.add_cell cnn["time"]
end
unless no_compare
# find previous result
prevcase = prevcases.select { |ptc| ptc["classname"]==cn && ptc["name"]==cnn["name"] }.first
if prevcase.nil?
row.add_cell "NA"
res2 = DOES_NOT_EXIST
elsif !prevcase["skipped"].nil?
row.add_cell "skipped"
res2 = SKIPPED
elsif !prevcase["error"].nil?
row.add_cell "error"
res2 = ERROR
elsif !prevcase["failure"].nil?
row.add_cell "failed"
res2 = FAILED
else
row.add_cell prevcase["time"]
end
end
row.style=normal_style
if !no_compare && res1 != res2
row.add_cell "*"
row.style=delta_style
end
end
end
end
tm = Time.new
time_stamp = "#{tm.year}#{tm.month}#{tm.day}#{tm.hour}#{tm.min}#{tm.sec}"
filename = "tempest_results_#{time_stamp}.xlsx"
p.use_shared_strings = true
p.serialize(filename)
puts "Output generated, file name: #{filename}"
end
end
if __FILE__ == $0
if ARGV.empty?
puts "Usage: tempest_results_processor.rb new_results.xml [prev_results.xml]"
exit
end
processor = TempestResultsProcessor.new
processor.run ARGV
end
| 35.640845 | 147 | 0.582098 |
e95045c1d4c6f10eff3f8e049105236e3c00debf | 707 | require 'java'
require 'lwjgl.jar'
require 'slick.jar'
java_import org.newdawn.slick.Image
require 'breakout/image_context'
require 'breakout/collision'
class Breakout
class Ball
include Breakout::ImageContext
include Breakout::Collision
attr_accessor :x, :y, :velocity, :angle
def initialize
@image = Image.new('media/ball.png')
reset
end
def reset
@x = 200
@y = 200
@angle = 45
@velocity = 0.3
end
def move(delta)
@x += @velocity * delta * Math.cos(@angle * Math::PI / 180)
@y -= @velocity * delta * Math.sin(@angle * Math::PI / 180)
end
def bounce
@angle = (@angle + 90) % 360
end
end
end
| 18.605263 | 65 | 0.601132 |
01df495f7a6ecaa7d6265d1fe50e5b533866270a | 132 | class User < ApplicationRecord
has_secure_password
validates :username, presence: true, uniqueness: {case_sensitive: true}
end
| 22 | 73 | 0.795455 |
4a4344f916c013836cc68fc6de328201439c00e9 | 1,044 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
Gem::Specification.new do |gem|
gem.name = "fluent-plugin-sumologic_output"
gem.version = "0.0.7"
gem.authors = ["Steven Adams"]
gem.email = ["[email protected]"]
gem.description = %q{Output plugin to SumoLogic HTTP Endpoint}
gem.summary = %q{Output plugin to SumoLogic HTTP Endpoint}
gem.homepage = "https://github.com/SumoLogic/fluentd-output-sumologic"
gem.license = "Apache-2.0"
gem.files = `git ls-files`.split($/)
gem.executables = gem.files.grep(%r{^bin/}) { |f| File.basename(f) }
gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
gem.require_paths = ["lib"]
gem.has_rdoc = false
gem.required_ruby_version = '>= 2.0.0'
gem.add_development_dependency "bundler", "~> 1.3"
gem.add_development_dependency "rake"
gem.add_runtime_dependency "fluentd"
gem.add_development_dependency 'test-unit', '~> 3.1.0'
end
| 37.285714 | 77 | 0.658046 |
33ef68c103bef67cfc015e50534c1d2e447dbd03 | 224 | require 'tzinfo/timezone_definition'
module TZInfo
module Definitions
module Etc
module Zulu
include TimezoneDefinition
linked_timezone 'Etc/Zulu', 'Etc/UTC'
end
end
end
end
| 16 | 45 | 0.647321 |
ab00ba9b7cbe9ce8737172efd5234c0dfe1de83d | 5,091 | module Blather
class Stream
# @private
class SASL < Features
class UnknownMechanism < BlatherError
register :sasl_unknown_mechanism
end
MECHANISMS = %w[
digest-md5
plain
anonymous
].freeze
SASL_NS = 'urn:ietf:params:xml:ns:xmpp-sasl'.freeze
register SASL_NS
def initialize(stream, succeed, fail)
super
@jid = @stream.jid
@pass = @stream.password
@mechanisms = []
end
def receive_data(stanza)
@node = stanza
case stanza.element_name
when 'mechanisms'
available_mechanisms = stanza.children.map { |m| m.content.downcase }
@mechanisms = MECHANISMS.select { |m| available_mechanisms.include? m }
next!
when 'failure'
next!
when 'success'
@stream.start
else
if self.respond_to?(stanza.element_name)
self.__send__(stanza.element_name)
else
fail! UnknownResponse.new(stanza)
end
end
end
protected
def next!
if @jid.node == ''
process_anonymous
else
@idx = @idx ? @idx+1 : 0
authenticate_with @mechanisms[@idx]
end
end
def process_anonymous
if @mechanisms.include?('anonymous')
authenticate_with 'anonymous'
else
fail! BlatherError.new('The server does not support ANONYMOUS login. You must provide a node in the JID')
end
end
def authenticate_with(method)
method = case method
when 'digest-md5' then DigestMD5
when 'plain' then Plain
when 'anonymous' then Anonymous
when nil then fail!(SASLError.import(@node))
else next!
end
if method.is_a?(Module)
extend method
authenticate
end
end
##
# Base64 Encoder
def b64(str)
[str].pack('m').gsub(/\s/,'')
end
##
# Builds a standard auth node
def auth_node(mechanism, content = nil)
node = XMPPNode.new 'auth'
node.content = content if content
node.namespace = SASL_NS
node[:mechanism] = mechanism
node
end
##
# Digest MD5 authentication
module DigestMD5 # :nodoc:
##
# Lets the server know we're going to try DigestMD5 authentication
def authenticate
@stream.send auth_node('DIGEST-MD5')
end
##
# Receive the challenge command.
def challenge
decode_challenge
respond
end
private
##
# Decodes digest strings 'foo=bar,baz="faz"'
# into {'foo' => 'bar', 'baz' => 'faz'}
def decode_challenge
text = @node.content.unpack('m').first
res = {}
text.split(',').each do |statement|
key, value = statement.split('=')
res[key] = value.delete('"') unless key.empty?
end
Blather.log "CHALLENGE DECODE: #{res.inspect}"
@nonce ||= res['nonce']
@realm ||= res['realm']
end
##
# Builds the properly encoded challenge response
def generate_response
a1 = "#{d("#{@response[:username]}:#{@response[:realm]}:#{@pass}")}:#{@response[:nonce]}:#{@response[:cnonce]}"
a2 = "AUTHENTICATE:#{@response[:'digest-uri']}"
h("#{h(a1)}:#{@response[:nonce]}:#{@response[:nc]}:#{@response[:cnonce]}:#{@response[:qop]}:#{h(a2)}")
end
##
# Send challenge response
def respond
node = XMPPNode.new 'response'
node.namespace = SASL_NS
unless @initial_response_sent
@initial_response_sent = true
@response = {
:nonce => @nonce,
:charset => 'utf-8',
:username => @jid.node,
:realm => @realm || @jid.domain,
:cnonce => h(Time.new.to_f.to_s),
:nc => '00000001',
:qop => 'auth',
:'digest-uri' => "xmpp/#{@jid.domain}",
}
@response[:response] = generate_response
@response.each { |k,v| @response[k] = "\"#{v}\"" unless [:nc, :qop, :response, :charset].include?(k) }
Blather.log "CHALLENGE RESPONSE: #{@response.inspect}"
Blather.log "CH RESP TXT: #{@response.map { |k,v| "#{k}=#{v}" } * ','}"
# order is to simplify testing
# Ruby 1.9 eliminates the need for this with ordered hashes
order = [:nonce, :charset, :username, :realm, :cnonce, :nc, :qop, :'digest-uri', :response]
node.content = b64(order.map { |k| v = @response[k]; "#{k}=#{v}" } * ',')
end
@stream.send node
end
def d(s); Digest::MD5.digest(s); end
def h(s); Digest::MD5.hexdigest(s); end
end #DigestMD5
# @private
module Plain
def authenticate
@stream.send auth_node('PLAIN', b64("#{@jid.stripped}\x00#{@jid.node}\x00#{@pass}"))
end
end #Plain
# @private
module Anonymous
def authenticate
@stream.send auth_node('ANONYMOUS')
end
end #Anonymous
end #SASL
end #Stream
end
| 26.65445 | 119 | 0.548812 |
01532caacbb4cd0b41267d745f745101a1c0cb13 | 17,508 | # Cannon Mallory
# [email protected]
#
# Methods for transferring items into and out of collections
needs 'Standard Libs/Units'
needs 'Standard Libs/AssociationManagement'
needs 'Small Instruments/Pipettors'
needs 'Collection Management/CollectionLocation'
needs 'Collection Management/CollectionData'
needs 'Collection Management/CollectionDisplay'
needs 'Standard Libs/ItemActions'
module CollectionTransfer
include Units
include AssociationManagement
include CollectionLocation
include CollectionData
include CollectionDisplay
include Pipettors
include ItemActions
VOL_TRANSFER = 'Volume Transferred'.to_sym
# Assigns samples to specific well locations
# The order of the samples and the order of the association map should be
# the same
#
# @param samples [Array<FieldValue>] or [Array<Samples>]
# @param to_collection [Collection]
# @param association_map map of where samples should go
# @raise if not enough space in collection
def add_samples_to_collection(samples, to_collection, association_map: nil)
slots_left = to_collection.get_empty.length
if samples.length > slots_left
raise "Not enough space in in collection #{to_collection}"
end
unless association_map.present?
to_collection.add_samples(samples)
return to_collection
end
samples.zip(association_map).each do |sample, map|
next if sample.nil?
if map.nil?
to_collection.add(sample)
else
rc = map[:to_loc]
to_collection.set(rc[0], rc[1], sample)
end
end
to_collection
end
# Creates a 'to_assocation_map' for all parts that share the same item or sample
#
# @param collection [Collection] the collection
# @param item [Item or Sample] that is to be found
def to_association_map(collection:, item:)
association_map = []
locations = collection.find(item)
locations.each do |loc|
association_map.push({ to_loc: loc })
end
association_map
end
# Creates a 'from_assocation_map' for all parts that share the same item or sample
#
# @param collection [Collection] the collection
# @param item [Item or Sample] that is to be found
def from_association_map(collection:, item:)
association_map = []
locations = collection.find(item)
locations.each do |loc|
association_map.push({ from_loc: loc })
end
association_map
end
# Direction to use multichannel pipettor to pipet from an item
# into a collection
#
# @param to_collection [Collection]
# @param source [String] the source of the media etc
# @param volume [Volume] volume class volume being transferred
# @param association_map [Array<{ to_loc: [row,col] }>] all the
# coordinate of where stuff is to go
def multichannel_item_to_collection(to_collection:,
source:,
volume:,
association_map:,
verbose: false)
pipettor = get_multi_channel_pipettor(volume: volume)
channels = pipettor.channels
if verbose
map_by_row = association_map.group_by { |map| map[:to_loc][0] }
map_by_row.each do |_row, map|
map.each_slice(channels).each do |rc_slice|
pipet_into_collection(to_collection: to_collection,
source: source,
pipettor: pipettor,
volume: volume,
association_map: rc_slice)
end
end
else
pipet_into_collection(to_collection: to_collection,
source: source,
pipettor: pipettor,
volume: volume,
association_map: association_map)
end
return {} unless to_collection.is_a?(Collection) && source.is_a?(Item)
transfer_from_item_to_collection(
from_item: source,
to_collection: to_collection,
association_map: association_map,
transfer_vol: volume
)
end
# Direction to use single channel pipettor to pipet from an item
# into a collection
#
# @param to_collection [Collection]
# @param source [String] the source of the media etc
# @param volume [Volume] volume class volume being transferred
# @param association_map [Array<{ to_loc: [row,col] }>] all the
# coordinate of where stuff is to go
def single_channel_item_to_collection(to_collection:,
source:,
volume:,
association_map:)
pipettor = get_single_channel_pipettor(volume: volume)
pipet_into_collection(to_collection: to_collection,
source: source,
volume: volume,
association_map: association_map,
pipettor: pipettor)
return {} unless to_collection.is_a?(Collection) && source.is_a?(Item)
transfer_from_item_to_collection(
from_item: source,
to_collection: to_collection,
association_map: association_map,
transfer_vol: volume
)
end
# Direction to use single channel pipettor to pipet from
# a collection into a collection
#
# @param to_collection [Collection]
# @param from_collection [Collection]
# @param volume [Volume] volume class volume being transferred
# @param association_map [Array<{ to_loc: [row,col], from_loc: {row, col} }>]
# all the coordinate of where parts are
def single_channel_collection_to_collection(to_collection:,
from_collection:,
volume:,
association_map:)
pipettor = get_single_channel_pipettor(volume: volume)
association_map.each do |loc_hash|
pipet_collection_to_collection(to_collection: to_collection,
from_collection: from_collection,
pipettor: pipettor,
volume: volume,
association_map: [loc_hash])
end
return {} unless to_collection.is_a?(Collection) && from_collection.is_a?(Item)
transfer_from_collection_to_collection(
from_collection: from_collection,
to_collection: to_collection,
association_map: association_map,
transfer_vol: volume
)
end
# Direction to use single channel pipettor to pipet from
# a collection into a collection
#
# @param to_collection [Collection]
# @param from_collection [Collection]
# @param volume [Volume] volume class volume being transferred
# @param association_map [Array<{ to_loc: [row,col], from_loc: {row, col} }>]
# all the coordinate of where parts are
def multichannel_collection_to_collection(to_collection:,
from_collection:,
volume:,
association_map:,
verbose: false)
pipettor = get_multi_channel_pipettor(volume: volume)
if verbose
association_map.each_slice(pipettor.channels).to_a.each do |map_slice|
pipet_collection_to_collection(to_collection: to_collection,
from_collection: from_collection,
pipettor: pipettor,
volume: volume,
association_map: map_slice)
end
else
pipet_collection_to_collection(to_collection: to_collection,
from_collection: from_collection,
pipettor: pipettor,
volume: volume,
association_map: association_map)
end
return {} unless to_collection.is_a?(Collection) && from_collection.is_a?(Item)
transfer_from_collection_to_collection(
from_collection: from_collection,
to_collection: to_collection,
association_map: association_map,
transfer_vol: volume
)
end
# Directions to use pipet to transfer from a collection to a collection
#
# @param to_collection [Collection]
# @param from_collection [Collection]
# @param volume [Volume] volume class volume being transferred
# @param association_map [Array<{ to_loc: [row,col], from_loc: {row, col} }>]
# all the coordinate of where parts are
# @param pipettor [Pipettor] the pipettor to be used
def pipet_collection_to_collection(to_collection:,
from_collection:,
pipettor:,
volume:,
association_map:)
to_rc_list = []
from_rc_list = []
association_map.each do |loc_hash|
to_rc_list.push(loc_hash[:to_loc])
from_rc_list.push(loc_hash[:from_loc])
end
show do
title 'Pipet from Collection to Wells'
note pipettor.pipet(volume: volume,
source: from_collection.id,
destination: "<b>#{to_collection.id}</b> as noted below}")
note "</b>From Collection:</b> #{from_collection}"
table highlight_collection_rc(from_collection, from_rc_list, check: false) { |r, c|
convert_coordinates_to_location([r, c])
}
separator
note "</b>To Collection:</b> #{to_collection}"
table highlight_collection_rc(to_collection, to_rc_list, check: false) { |r, c|
convert_coordinates_to_location([r, c])
}
end
end
# Provides directions for pipetting from an item into a collection
#
# @param to_collection [Collection]
# @param source []
# @param volume [Volume] volume class volume being transferred
# @param association_map [Array<{ to_loc: [row,col], from_loc: {row, col} }>]
# all the coordinate of where parts are
# @param pipettor [Pipettor] the pipettor to be used
def pipet_into_collection(to_collection:,
source:,
pipettor:,
volume:,
association_map:)
rc_list = association_map.map { |hash| hash[:to_loc] }
if pipettor.class::CHANNELS > 1
show_fill_reservoir(source, volume, rc_list.length)
source = "Media Reservoir #{source}"
end
show do
title "Pipet from #{source} to Wells"
note pipettor.pipet(volume: volume,
source: source,
destination: "the highlighted wells of #{to_collection}")
table highlight_collection_rc(to_collection, rc_list, check: false)
end
end
# DEPRECATED
# Instructions to tech to relabel plate
# TODO this should be deleted since this exists in Collection Actions
#
# @param from_collection [Collection]
# @param to_collection [Collection]
def relabel_plate(from_collection:, to_collection:)
show do
title 'Rename Plate'
note "Relabel plate <b>#{from_collection.id}</b> with
<b>#{to_collection.id}</b>"
end
end
# Transfers items from one collection to another per the association map
#
# @param from_collection [Collection]
# @param to_collection [Collection]
# @param association_map [Array<{ to_loc: [r,c], from_loc: [r,c] }]
# @param transfer_vol [{qty: int, units: string}]
def transfer_from_collection_to_collection(from_collection:,
to_collection:,
association_map:,
transfer_vol: nil)
association_map.each do |loc_hash|
to_loc = loc_hash[:to_loc]
from_loc = loc_hash[:from_loc]
begin
from_part = from_collection.part(from_loc[0], from_loc[1])
to_collection.set(to_loc[0], to_loc[1], from_part)
rescue
raise from_loc.to_s
end
end
associate_transfer_collection_to_collection(from_collection: from_collection,
to_collection: to_collection,
association_map: association_map,
transfer_vol: transfer_vol)
end
# Transfers from item to well in collection
#
# @param from_item [item]
# @param to_collection [Collection]
# @param association_map [Array<{ to_loc: [r,c], from_loc: [r,c] }]
# @param transfer_vol [{qty: int, units: string}]
def transfer_from_item_to_collection(from_item:,
to_collection:,
association_map:,
transfer_vol: nil)
association_map.each do |loc_hash|
to_loc = loc_hash[:to_loc]
to_collection.set(to_loc[0], to_loc[1], from_item)
end
associate_transfer_item_to_collection(from_item: from_item,
to_collection: to_collection,
association_map: association_map,
transfer_vol: transfer_vol)
end
# Associates/adds provenance for a transfer from a collection to
# a collection. It does NOT replace the item in the 'to_collection'
#
# @param from_collection [Collection]
# @param to_collection [Collection]
# @param association_map [Array<{ to_loc: [r,c], from_loc: [r,c] }]
# @param transfer_vol [{qty: int, units: string}]
def associate_transfer_collection_to_collection(from_collection:,
to_collection:,
association_map:,
transfer_vol: nil)
association_map.each do |loc_hash|
from_part = from_collection.part(loc_hash[:from_loc][0],
loc_hash[:from_loc][1])
to_part = to_collection.part(loc_hash[:to_loc][0],
loc_hash[:to_loc][1])
item_to_item_vol_transfer(volume: transfer_vol,
key: VOL_TRANSFER.to_s + from_part.id.to_s,
to_item: to_part,
from_item: from_part)
end
end
# Associates/adds provenance for a transfer from an item to
# a collection. It does NOT replace the item in the 'to_collection'
#
# @param from_Item [item]
# @param to_collection [Collection]
# @param association_map [Array<{ to_loc: [r,c], from_loc: [r,c] }]
# @param transfer_vol [{qty: int, units: string}]
def associate_transfer_item_to_collection(from_item:,
to_collection:,
association_map:,
transfer_vol: nil)
association_map.each do |loc_hash|
to_part = to_collection.part(loc_hash[:to_loc][0],
loc_hash[:to_loc][1])
item_to_item_vol_transfer(volume: transfer_vol,
key: VOL_TRANSFER.to_s + from_item.id.to_s,
to_item: to_part,
from_item: from_item)
end
end
# Associates/adds provenance for a transfer from a collection to
# an item.
#
# @param from_collection [Collection]
# @param to_item [item]
# @param association_map [Array<{ to_loc: [r,c], from_loc: [r,c] }]
# @param transfer_vol [{qty: int, units: string}]
def associate_transfer_collection_to_item(from_collection:,
to_item:,
association_map:,
transfer_vol: nil)
association_map.each do |loc_hash|
from_part = from_collection.part(loc_hash[:from_loc][0],
loc_hash[:from_loc][1])
item_to_item_vol_transfer(volume: transfer_vol,
key: VOL_TRANSFER.to_s + from_part.id.to_s,
to_item: to_item,
from_item: from_part)
end
end
# Creates a one to one association map
#
# @param to_collection [Collection] the to collection
# @param from_collection [Collection] the from collection
def one_to_one_association_map(from_collection:, skip_nil: true)
rows, cols = from_collection.dimensions
association_map = []
rows.times do |row|
cols.times do |col|
next if from_collection.part(row, col).nil? && skip_nil
loc = [row, col]
association_map.push({ to_loc: loc, from_loc: loc })
end
end
association_map
end
# Sets wells of one plate to the same sample as the from plate per
# the association map
#
# @param from_plate [Collection]
# @param to_collection [Collection]
# @param association_map [Array<Hash>] per previous instructions
def copy_wells(from_collection:, to_collection:, association_map:)
association_map.each do |map|
to_loc = map[:to_loc]
from_loc = map[:from_loc]
item = from_collection.part(from_loc[0], from_loc[1])
to_collection.set(to_loc[0], to_loc[1], item)
end
end
end
| 39.080357 | 89 | 0.594186 |
08828f039b434fa6726fc020eb5f98002cc2f87a | 4,789 | require 'uri'
module GreenButtonData
class Configuration
attr_accessor :base_url,
:application_information_path,
:authorization_path,
:interval_block_path,
:local_time_parameters_path,
:meter_reading_path,
:reading_type_path,
:subscription_path,
:usage_point_path,
:usage_summary_path,
:retail_customer_path,
:bulk_path
def application_information_url(id = nil)
return build_url @application_information_path, id
end
def application_information_url=(url)
uri = URI.parse url
@base_url = "#{uri.scheme}://#{uri.host}"
@application_information_path = uri.path
end
def authorization_url(id = nil)
return build_url @authorization_path, id
end
def authorization_url=(url)
uri = URI.parse url
@base_url = "#{uri.scheme}://#{uri.host}"
@authorization_path = uri.path
end
def interval_block_url(kwargs = {})
subscription_id = kwargs[:subscription_id]
usage_point_id = kwargs[:usage_point_id]
meter_reading_id = kwargs[:meter_reading_id]
interval_block_id = kwargs[:interval_block_id]
uri = if subscription_id && usage_point_id && meter_reading_id
meter_reading_uri = meter_reading_url(
subscription_id: subscription_id,
usage_point_id: usage_point_id,
meter_reading_id: meter_reading_id
)
URI.join meter_reading_uri, @interval_block_path
else
URI.join @base_url, @interval_block_path
end
uri = URI.join uri, "#{interval_block_id}/" if interval_block_id
return uri.to_s
end
def local_time_parameters_url(id = nil)
return build_url @local_time_parameters_path, id
end
def meter_reading_url(kwargs = {})
subscription_id = kwargs[:subscription_id]
usage_point_id = kwargs[:usage_point_id]
meter_reading_id = kwargs[:meter_reading_id]
uri = if subscription_id && usage_point_id
usage_point_uri = usage_point_url(
subscription_id: subscription_id,
usage_point_id: usage_point_id
)
URI.join usage_point_uri, @meter_reading_path
else
URI.join @base_url, @meter_reading_path
end
uri = URI.join uri, "#{meter_reading_id}/" if meter_reading_id
return uri.to_s
end
def reading_type_url(id = nil)
return build_url @reading_type_path, id
end
def subscription_url(id)
return build_url @subscription_path, id
end
def usage_point_url(kwargs = {})
subscription_id = kwargs[:subscription_id]
usage_point_id = kwargs[:usage_point_id]
uri = if subscription_id
subscription_uri = subscription_url subscription_id
URI.join subscription_uri, @usage_point_path
else
URI.join @base_url, @usage_point_path
end
uri = URI.join uri, "#{usage_point_id}/" if usage_point_id
return uri.to_s
end
def usage_summary_url(kwargs = {})
subscription_id = kwargs[:subscription_id]
usage_point_id = kwargs[:usage_point_id]
if subscription_id && usage_point_id
usage_point_uri = usage_point_url subscription_id: subscription_id,
usage_point_id: usage_point_id
return URI.join(usage_point_uri, @usage_summary_path).to_s
elsif subscription_id
raise ArgumentError.new "Missing required argument: usage_point_id"
elsif usage_point_id
raise ArgumentError.new "Missing required argument: subscription_id"
else
raise ArgumentError.new "Missing required arguments: subscription_id," +
" usage_point_id"
end
end
def retail_customer_url(kwargs = {})
retail_customer_id = kwargs[:subscription_id]
if retail_customer_id
retail_customer_url = build_url(@retail_customer_path)
return "#{retail_customer_url}/#{retail_customer_id}"
else
raise ArgumentError.new "Missing required arguments: subscription_id"
end
end
def bulk_url(kwargs = {})
subscription_id = kwargs[:subscription_id]
bulk_file_id = kwargs[:bulk_file_id]
if subscription_id && bulk_file_id
bulk_url = build_url(@bulk_path)
return "#{bulk_url}/#{subscription_id}/#{bulk_file_id}"
else
raise ArgumentError.new "Missing required arguments: subscription_id or bulk_file_id"
end
end
private
def build_url(path, id = nil)
uri = URI.join @base_url, path
uri = URI.join uri, "#{id}/" if id
return uri.to_s
end
end
end
| 29.380368 | 93 | 0.651284 |
28175fec086b28970a2a82e3c646b6f5e466f77d | 561 | cask "ghosttile" do
version "15,1510040474"
sha256 "6f723c7489a272a82648a3fb2a4ad0f91f6bd3d79eea748ea20a046a3aefe0ab"
# dl.devmate.com/im.kernelpanic.GhostTile/ was verified as official when first introduced to the cask
url "https://dl.devmate.com/im.kernelpanic.GhostTile/#{version.before_comma}/#{version.after_comma}/GhostTile-#{version.before_comma}.zip"
appcast "https://updates.devmate.com/im.kernelpanic.GhostTile.xml"
name "Kernelpanic GhostTile"
name "GhostTile"
homepage "https://ghosttile.kernelpanic.im/"
app "GhostTile.app"
end
| 40.071429 | 140 | 0.786096 |
f7727ef031c6df9e3e48f373a542ec85ba3c7ece | 7,816 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# Source: google/cloud/billing/v1/cloud_billing.proto for package 'google.cloud.billing.v1'
# Original file comments:
# Copyright 2019 Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
require 'grpc'
require 'google/cloud/billing/v1/cloud_billing_pb'
module Google
module Cloud
module Billing
module V1
module CloudBilling
# Retrieves GCP Console billing accounts and associates them with projects.
class Service
include ::GRPC::GenericService
self.marshal_class_method = :encode
self.unmarshal_class_method = :decode
self.service_name = 'google.cloud.billing.v1.CloudBilling'
# Gets information about a billing account. The current authenticated user
# must be a [viewer of the billing
# account](https://cloud.google.com/billing/docs/how-to/billing-access).
rpc :GetBillingAccount, ::Google::Cloud::Billing::V1::GetBillingAccountRequest, ::Google::Cloud::Billing::V1::BillingAccount
# Lists the billing accounts that the current authenticated user has
# permission to
# [view](https://cloud.google.com/billing/docs/how-to/billing-access).
rpc :ListBillingAccounts, ::Google::Cloud::Billing::V1::ListBillingAccountsRequest, ::Google::Cloud::Billing::V1::ListBillingAccountsResponse
# Updates a billing account's fields.
# Currently the only field that can be edited is `display_name`.
# The current authenticated user must have the `billing.accounts.update`
# IAM permission, which is typically given to the
# [administrator](https://cloud.google.com/billing/docs/how-to/billing-access)
# of the billing account.
rpc :UpdateBillingAccount, ::Google::Cloud::Billing::V1::UpdateBillingAccountRequest, ::Google::Cloud::Billing::V1::BillingAccount
# Creates a billing account.
# This method can only be used to create
# [billing subaccounts](https://cloud.google.com/billing/docs/concepts)
# by GCP resellers.
# When creating a subaccount, the current authenticated user must have the
# `billing.accounts.update` IAM permission on the master account, which is
# typically given to billing account
# [administrators](https://cloud.google.com/billing/docs/how-to/billing-access).
# This method will return an error if the master account has not been
# provisioned as a reseller account.
rpc :CreateBillingAccount, ::Google::Cloud::Billing::V1::CreateBillingAccountRequest, ::Google::Cloud::Billing::V1::BillingAccount
# Lists the projects associated with a billing account. The current
# authenticated user must have the `billing.resourceAssociations.list` IAM
# permission, which is often given to billing account
# [viewers](https://cloud.google.com/billing/docs/how-to/billing-access).
rpc :ListProjectBillingInfo, ::Google::Cloud::Billing::V1::ListProjectBillingInfoRequest, ::Google::Cloud::Billing::V1::ListProjectBillingInfoResponse
# Gets the billing information for a project. The current authenticated user
# must have [permission to view the
# project](https://cloud.google.com/docs/permissions-overview#h.bgs0oxofvnoo
# ).
rpc :GetProjectBillingInfo, ::Google::Cloud::Billing::V1::GetProjectBillingInfoRequest, ::Google::Cloud::Billing::V1::ProjectBillingInfo
# Sets or updates the billing account associated with a project. You specify
# the new billing account by setting the `billing_account_name` in the
# `ProjectBillingInfo` resource to the resource name of a billing account.
# Associating a project with an open billing account enables billing on the
# project and allows charges for resource usage. If the project already had a
# billing account, this method changes the billing account used for resource
# usage charges.
#
# *Note:* Incurred charges that have not yet been reported in the transaction
# history of the GCP Console might be billed to the new billing
# account, even if the charge occurred before the new billing account was
# assigned to the project.
#
# The current authenticated user must have ownership privileges for both the
# [project](https://cloud.google.com/docs/permissions-overview#h.bgs0oxofvnoo
# ) and the [billing
# account](https://cloud.google.com/billing/docs/how-to/billing-access).
#
# You can disable billing on the project by setting the
# `billing_account_name` field to empty. This action disassociates the
# current billing account from the project. Any billable activity of your
# in-use services will stop, and your application could stop functioning as
# expected. Any unbilled charges to date will be billed to the previously
# associated account. The current authenticated user must be either an owner
# of the project or an owner of the billing account for the project.
#
# Note that associating a project with a *closed* billing account will have
# much the same effect as disabling billing on the project: any paid
# resources used by the project will be shut down. Thus, unless you wish to
# disable billing, you should always call this method with the name of an
# *open* billing account.
rpc :UpdateProjectBillingInfo, ::Google::Cloud::Billing::V1::UpdateProjectBillingInfoRequest, ::Google::Cloud::Billing::V1::ProjectBillingInfo
# Gets the access control policy for a billing account.
# The caller must have the `billing.accounts.getIamPolicy` permission on the
# account, which is often given to billing account
# [viewers](https://cloud.google.com/billing/docs/how-to/billing-access).
rpc :GetIamPolicy, ::Google::Iam::V1::GetIamPolicyRequest, ::Google::Iam::V1::Policy
# Sets the access control policy for a billing account. Replaces any existing
# policy.
# The caller must have the `billing.accounts.setIamPolicy` permission on the
# account, which is often given to billing account
# [administrators](https://cloud.google.com/billing/docs/how-to/billing-access).
rpc :SetIamPolicy, ::Google::Iam::V1::SetIamPolicyRequest, ::Google::Iam::V1::Policy
# Tests the access control policy for a billing account. This method takes
# the resource and a set of permissions as input and returns the subset of
# the input permissions that the caller is allowed for that resource.
rpc :TestIamPermissions, ::Google::Iam::V1::TestIamPermissionsRequest, ::Google::Iam::V1::TestIamPermissionsResponse
end
Stub = Service.rpc_stub_class
end
end
end
end
end
| 61.0625 | 162 | 0.672723 |
39923bc15e0e4d26ceb5d78c90d0475889458601 | 2,526 | class Vdirsyncer < Formula
include Language::Python::Virtualenv
desc "Synchronize calendars and contacts"
homepage "https://github.com/pimutils/vdirsyncer"
url "https://github.com/pimutils/vdirsyncer.git",
:tag => "0.16.7",
:revision => "dcf5f701b7b5c21a8f4e8c80243db3e0baff1313"
head "https://github.com/pimutils/vdirsyncer.git"
bottle do
cellar :any_skip_relocation
sha256 "37dd9ebb3b0a3c41efa99f6e39e37dfac0c0ba3f22555ab64d02546a69bf8d6a" => :catalina
sha256 "2c40f6ad53b8a558ed7503b5fe2413ae86850ea74f6ab41b560c3019185719fc" => :mojave
sha256 "89f7e1832a262681843b223527d96c5f6a587ded3b2884df9a87204c463f1911" => :high_sierra
sha256 "57510d02d159de632f3fdb98d7de88aba5ea01d375b4f096d6d367c976e355bc" => :sierra
sha256 "81eaa19b3cbc91007a0a5cfe9979cca1f207b2ac2a72b87aabca41ae019838f7" => :el_capitan
end
depends_on "python"
def install
venv = virtualenv_create(libexec, "python3")
system libexec/"bin/pip", "install", "-v", "--no-binary", ":all:",
"--ignore-installed", "requests-oauthlib",
buildpath
system libexec/"bin/pip", "uninstall", "-y", "vdirsyncer"
venv.pip_install_and_link buildpath
prefix.install "contrib/vdirsyncer.plist"
inreplace prefix/"vdirsyncer.plist" do |s|
s.gsub! "@@WORKINGDIRECTORY@@", bin
s.gsub! "@@VDIRSYNCER@@", bin/name
s.gsub! "@@SYNCINTERVALL@@", "60"
end
end
def post_install
inreplace prefix/"vdirsyncer.plist", "@@LOCALE@@", ENV["LC_ALL"] || ENV["LANG"] || "en_US.UTF-8"
end
test do
ENV["LC_ALL"] = "en_US.UTF-8"
(testpath/".config/vdirsyncer/config").write <<~EOS
[general]
status_path = "#{testpath}/.vdirsyncer/status/"
[pair contacts]
a = "contacts_a"
b = "contacts_b"
collections = ["from a"]
[storage contacts_a]
type = "filesystem"
path = "~/.contacts/a/"
fileext = ".vcf"
[storage contacts_b]
type = "filesystem"
path = "~/.contacts/b/"
fileext = ".vcf"
EOS
(testpath/".contacts/a/foo/092a1e3b55.vcf").write <<~EOS
BEGIN:VCARD
VERSION:3.0
EMAIL;TYPE=work:[email protected]
FN:User Name Ö φ 風 ض
UID:092a1e3b55
N:Name;User
END:VCARD
EOS
(testpath/".contacts/b/foo/").mkpath
system "#{bin}/vdirsyncer", "discover"
system "#{bin}/vdirsyncer", "sync"
assert_match /Ö φ 風 ض/, (testpath/".contacts/b/foo/092a1e3b55.vcf").read
end
end
| 33.68 | 100 | 0.655186 |
e88746dc5566ae1b706cb964f82aeb86651a9545 | 90,038 | # frozen_string_literal: false
require 'test/unit'
class TestString < Test::Unit::TestCase
ENUMERATOR_WANTARRAY = RUBY_VERSION >= "3.0.0"
def initialize(*args)
@cls = String
@aref_re_nth = true
@aref_re_silent = false
@aref_slicebang_silent = true
super
end
def S(*args)
@cls.new(*args)
end
def test_s_new
assert_equal("", S())
assert_equal(Encoding::ASCII_8BIT, S().encoding)
assert_equal("", S(""))
assert_equal(__ENCODING__, S("").encoding)
src = "RUBY"
assert_equal(src, S(src))
assert_equal(__ENCODING__, S(src).encoding)
src.force_encoding("euc-jp")
assert_equal(src, S(src))
assert_equal(Encoding::EUC_JP, S(src).encoding)
assert_equal("", S(encoding: "euc-jp"))
assert_equal(Encoding::EUC_JP, S(encoding: "euc-jp").encoding)
assert_equal("", S("", encoding: "euc-jp"))
assert_equal(Encoding::EUC_JP, S("", encoding: "euc-jp").encoding)
src = "RUBY"
assert_equal(src, S(src, encoding: "euc-jp"))
assert_equal(Encoding::EUC_JP, S(src, encoding: "euc-jp").encoding)
src.force_encoding("euc-jp")
assert_equal(src, S(src, encoding: "utf-8"))
assert_equal(Encoding::UTF_8, S(src, encoding: "utf-8").encoding)
assert_equal("", S(capacity: 1000))
assert_equal(Encoding::ASCII_8BIT, S(capacity: 1000).encoding)
assert_equal("", S(capacity: 1000, encoding: "euc-jp"))
assert_equal(Encoding::EUC_JP, S(capacity: 1000, encoding: "euc-jp").encoding)
assert_equal("", S("", capacity: 1000))
assert_equal(__ENCODING__, S("", capacity: 1000).encoding)
assert_equal("", S("", capacity: 1000, encoding: "euc-jp"))
assert_equal(Encoding::EUC_JP, S("", capacity: 1000, encoding: "euc-jp").encoding)
end
def test_initialize
str = S("").freeze
assert_equal("", str.__send__(:initialize))
assert_raise(FrozenError){ str.__send__(:initialize, 'abc') }
assert_raise(FrozenError){ str.__send__(:initialize, capacity: 1000) }
assert_raise(FrozenError){ str.__send__(:initialize, 'abc', capacity: 1000) }
assert_raise(FrozenError){ str.__send__(:initialize, encoding: 'euc-jp') }
assert_raise(FrozenError){ str.__send__(:initialize, 'abc', encoding: 'euc-jp') }
assert_raise(FrozenError){ str.__send__(:initialize, 'abc', capacity: 1000, encoding: 'euc-jp') }
end
def test_initialize_nonstring
assert_raise(TypeError) {
S(1)
}
assert_raise(TypeError) {
S(1, capacity: 1000)
}
end
def test_initialize_memory_leak
assert_no_memory_leak([], <<-PREP, <<-CODE, rss: true)
code = proc {('x'*100000).__send__(:initialize, '')}
1_000.times(&code)
PREP
100_000.times(&code)
CODE
end
def test_AREF # '[]'
assert_equal("A", S("AooBar")[0])
assert_equal("B", S("FooBaB")[-1])
assert_equal(nil, S("FooBar")[6])
assert_equal(nil, S("FooBar")[-7])
assert_equal(S("Foo"), S("FooBar")[0,3])
assert_equal(S("Bar"), S("FooBar")[-3,3])
assert_equal(S(""), S("FooBar")[6,2])
assert_equal(nil, S("FooBar")[-7,10])
assert_equal(S("Foo"), S("FooBar")[0..2])
assert_equal(S("Foo"), S("FooBar")[0...3])
assert_equal(S("Bar"), S("FooBar")[-3..-1])
assert_equal(S(""), S("FooBar")[6..2])
assert_equal(nil, S("FooBar")[-10..-7])
assert_equal(S("Foo"), S("FooBar")[/^F../])
assert_equal(S("Bar"), S("FooBar")[/..r$/])
assert_equal(nil, S("FooBar")[/xyzzy/])
assert_equal(nil, S("FooBar")[/plugh/])
assert_equal(S("Foo"), S("FooBar")[S("Foo")])
assert_equal(S("Bar"), S("FooBar")[S("Bar")])
assert_equal(nil, S("FooBar")[S("xyzzy")])
assert_equal(nil, S("FooBar")[S("plugh")])
if @aref_re_nth
assert_equal(S("Foo"), S("FooBar")[/([A-Z]..)([A-Z]..)/, 1])
assert_equal(S("Bar"), S("FooBar")[/([A-Z]..)([A-Z]..)/, 2])
assert_equal(nil, S("FooBar")[/([A-Z]..)([A-Z]..)/, 3])
assert_equal(S("Bar"), S("FooBar")[/([A-Z]..)([A-Z]..)/, -1])
assert_equal(S("Foo"), S("FooBar")[/([A-Z]..)([A-Z]..)/, -2])
assert_equal(nil, S("FooBar")[/([A-Z]..)([A-Z]..)/, -3])
end
o = Object.new
def o.to_int; 2; end
assert_equal("o", "foo"[o])
assert_raise(ArgumentError) { "foo"[] }
end
def test_ASET # '[]='
s = S("FooBar")
s[0] = S('A')
assert_equal(S("AooBar"), s)
s[-1]= S('B')
assert_equal(S("AooBaB"), s)
assert_raise(IndexError) { s[-7] = S("xyz") }
assert_equal(S("AooBaB"), s)
s[0] = S("ABC")
assert_equal(S("ABCooBaB"), s)
s = S("FooBar")
s[0,3] = S("A")
assert_equal(S("ABar"),s)
s[0] = S("Foo")
assert_equal(S("FooBar"), s)
s[-3,3] = S("Foo")
assert_equal(S("FooFoo"), s)
assert_raise(IndexError) { s[7,3] = S("Bar") }
assert_raise(IndexError) { s[-7,3] = S("Bar") }
s = S("FooBar")
s[0..2] = S("A")
assert_equal(S("ABar"), s)
s[1..3] = S("Foo")
assert_equal(S("AFoo"), s)
s[-4..-4] = S("Foo")
assert_equal(S("FooFoo"), s)
assert_raise(RangeError) { s[7..10] = S("Bar") }
assert_raise(RangeError) { s[-7..-10] = S("Bar") }
s = S("FooBar")
s[/^F../]= S("Bar")
assert_equal(S("BarBar"), s)
s[/..r$/] = S("Foo")
assert_equal(S("BarFoo"), s)
if @aref_re_silent
s[/xyzzy/] = S("None")
assert_equal(S("BarFoo"), s)
else
assert_raise(IndexError) { s[/xyzzy/] = S("None") }
end
if @aref_re_nth
s[/([A-Z]..)([A-Z]..)/, 1] = S("Foo")
assert_equal(S("FooFoo"), s)
s[/([A-Z]..)([A-Z]..)/, 2] = S("Bar")
assert_equal(S("FooBar"), s)
assert_raise(IndexError) { s[/([A-Z]..)([A-Z]..)/, 3] = "None" }
s[/([A-Z]..)([A-Z]..)/, -1] = S("Foo")
assert_equal(S("FooFoo"), s)
s[/([A-Z]..)([A-Z]..)/, -2] = S("Bar")
assert_equal(S("BarFoo"), s)
assert_raise(IndexError) { s[/([A-Z]..)([A-Z]..)/, -3] = "None" }
end
s = S("FooBar")
s[S("Foo")] = S("Bar")
assert_equal(S("BarBar"), s)
s = S("a string")
s[0..s.size] = S("another string")
assert_equal(S("another string"), s)
o = Object.new
def o.to_int; 2; end
s = "foo"
s[o] = "bar"
assert_equal("fobar", s)
assert_raise(ArgumentError) { "foo"[1, 2, 3] = "" }
assert_raise(IndexError) {"foo"[RbConfig::LIMITS["LONG_MIN"]] = "l"}
end
def test_CMP # '<=>'
assert_equal(1, S("abcdef") <=> S("abcde"))
assert_equal(0, S("abcdef") <=> S("abcdef"))
assert_equal(-1, S("abcde") <=> S("abcdef"))
assert_equal(-1, S("ABCDEF") <=> S("abcdef"))
assert_nil("foo" <=> Object.new)
o = Object.new
def o.to_str; "bar"; end
assert_equal(1, "foo" <=> o)
class << o;remove_method :to_str;end
def o.<=>(x); nil; end
assert_nil("foo" <=> o)
class << o;remove_method :<=>;end
def o.<=>(x); 1; end
assert_equal(-1, "foo" <=> o)
class << o;remove_method :<=>;end
def o.<=>(x); 2**100; end
assert_equal(-1, "foo" <=> o)
end
def test_EQUAL # '=='
assert_not_equal(:foo, S("foo"))
assert_equal(S("abcdef"), S("abcdef"))
assert_not_equal(S("CAT"), S('cat'))
assert_not_equal(S("CaT"), S('cAt'))
o = Object.new
def o.to_str; end
def o.==(x); false; end
assert_equal(false, "foo" == o)
class << o;remove_method :==;end
def o.==(x); true; end
assert_equal(true, "foo" == o)
end
def test_LSHIFT # '<<'
assert_equal(S("world!"), S("world") << 33)
assert_equal(S("world!"), S("world") << S("!"))
s = "a"
10.times {|i|
s << s
assert_equal("a" * (2 << i), s)
}
s = ["foo"].pack("p")
l = s.size
s << "bar"
assert_equal(l + 3, s.size)
bug = '[ruby-core:27583]'
assert_raise(RangeError, bug) {S("a".force_encoding(Encoding::UTF_8)) << -3}
assert_raise(RangeError, bug) {S("a".force_encoding(Encoding::UTF_8)) << -2}
assert_raise(RangeError, bug) {S("a".force_encoding(Encoding::UTF_8)) << -1}
assert_raise(RangeError, bug) {S("a".force_encoding(Encoding::UTF_8)) << 0x81308130}
assert_nothing_raised {S("a".force_encoding(Encoding::GB18030)) << 0x81308130}
end
def test_MATCH # '=~'
assert_equal(10, S("FeeFieFoo-Fum") =~ /Fum$/)
assert_equal(nil, S("FeeFieFoo-Fum") =~ /FUM$/)
o = Object.new
def o.=~(x); x + "bar"; end
assert_equal("foobar", S("foo") =~ o)
assert_raise(TypeError) { S("foo") =~ "foo" }
end
def test_MOD # '%'
assert_equal(S("00123"), S("%05d") % 123)
assert_equal(S("123 |00000001"), S("%-5s|%08x") % [123, 1])
x = S("%3s %-4s%%foo %.0s%5d %#x%c%3.1f %b %x %X %#b %#x %#X") %
[S("hi"),
123,
S("never seen"),
456,
0,
?A,
3.0999,
11,
171,
171,
11,
171,
171]
assert_equal(S(' hi 123 %foo 456 0A3.1 1011 ab AB 0b1011 0xab 0XAB'), x)
end
def test_MUL # '*'
assert_equal(S("XXX"), S("X") * 3)
assert_equal(S("HOHO"), S("HO") * 2)
end
def test_PLUS # '+'
assert_equal(S("Yodel"), S("Yo") + S("del"))
end
def casetest(a, b, rev=false)
msg = proc {"#{a} should#{' not' if rev} match #{b}"}
case a
when b
assert(!rev, msg)
else
assert(rev, msg)
end
end
def test_VERY_EQUAL # '==='
# assert_equal(true, S("foo") === :foo)
casetest(S("abcdef"), S("abcdef"))
casetest(S("CAT"), S('cat'), true) # Reverse the test - we don't want to
casetest(S("CaT"), S('cAt'), true) # find these in the case.
end
def test_capitalize
assert_equal(S("Hello"), S("hello").capitalize)
assert_equal(S("Hello"), S("hELLO").capitalize)
assert_equal(S("123abc"), S("123ABC").capitalize)
end
def test_capitalize!
a = S("hello"); a.capitalize!
assert_equal(S("Hello"), a)
a = S("hELLO"); a.capitalize!
assert_equal(S("Hello"), a)
a = S("123ABC"); a.capitalize!
assert_equal(S("123abc"), a)
assert_equal(nil, S("123abc").capitalize!)
assert_equal(S("123abc"), S("123ABC").capitalize!)
assert_equal(S("Abc"), S("ABC").capitalize!)
assert_equal(S("Abc"), S("abc").capitalize!)
assert_equal(nil, S("Abc").capitalize!)
a = S("hello")
b = a.dup
assert_equal(S("Hello"), a.capitalize!)
assert_equal(S("hello"), b)
end
Bug2463 = '[ruby-dev:39856]'
def test_center
assert_equal(S("hello"), S("hello").center(4))
assert_equal(S(" hello "), S("hello").center(11))
assert_equal(S("ababaababa"), S("").center(10, "ab"), Bug2463)
assert_equal(S("ababaababab"), S("").center(11, "ab"), Bug2463)
end
def test_chomp
assert_equal(S("hello"), S("hello").chomp("\n"))
assert_equal(S("hello"), S("hello\n").chomp("\n"))
save = $/
$/ = "\n"
assert_equal(S("hello"), S("hello").chomp)
assert_equal(S("hello"), S("hello\n").chomp)
$/ = "!"
assert_equal(S("hello"), S("hello").chomp)
assert_equal(S("hello"), S("hello!").chomp)
$/ = save
assert_equal(S("a").hash, S("a\u0101").chomp(S("\u0101")).hash, '[ruby-core:22414]')
s = S("hello")
assert_equal("hel", s.chomp('lo'))
assert_equal("hello", s)
s = S("hello")
assert_equal("hello", s.chomp('he'))
assert_equal("hello", s)
s = S("\u{3053 3093 306b 3061 306f}")
assert_equal("\u{3053 3093 306b}", s.chomp("\u{3061 306f}"))
assert_equal("\u{3053 3093 306b 3061 306f}", s)
s = S("\u{3053 3093 306b 3061 306f}")
assert_equal("\u{3053 3093 306b 3061 306f}", s.chomp('lo'))
assert_equal("\u{3053 3093 306b 3061 306f}", s)
s = S("hello")
assert_equal("hello", s.chomp("\u{3061 306f}"))
assert_equal("hello", s)
# skip if argument is a broken string
s = S("\xe3\x81\x82")
assert_equal("\xe3\x81\x82", s.chomp("\x82"))
assert_equal("\xe3\x81\x82", s)
s = S("\x95\x5c").force_encoding("Shift_JIS")
assert_equal("\x95\x5c".force_encoding("Shift_JIS"), s.chomp("\x5c"))
assert_equal("\x95\x5c".force_encoding("Shift_JIS"), s)
# clear coderange
s = S("hello\u{3053 3093}")
assert_not_predicate(s, :ascii_only?)
assert_predicate(s.chomp("\u{3053 3093}"), :ascii_only?)
# argument should be converted to String
klass = Class.new { def to_str; 'a'; end }
s = S("abba")
assert_equal("abb", s.chomp(klass.new))
assert_equal("abba", s)
# chomp removes any of "\n", "\r\n", "\r" when "\n" is specified
s = "foo\n"
assert_equal("foo", s.chomp("\n"))
s = "foo\r\n"
assert_equal("foo", s.chomp("\n"))
s = "foo\r"
assert_equal("foo", s.chomp("\n"))
ensure
$/ = save
end
def test_chomp!
a = S("hello")
a.chomp!(S("\n"))
assert_equal(S("hello"), a)
assert_equal(nil, a.chomp!(S("\n")))
a = S("hello\n")
a.chomp!(S("\n"))
assert_equal(S("hello"), a)
save = $/
$/ = "\n"
a = S("hello")
a.chomp!
assert_equal(S("hello"), a)
a = S("hello\n")
a.chomp!
assert_equal(S("hello"), a)
$/ = "!"
a = S("hello")
a.chomp!
assert_equal(S("hello"), a)
a="hello!"
a.chomp!
assert_equal(S("hello"), a)
$/ = save
a = S("hello\n")
b = a.dup
assert_equal(S("hello"), a.chomp!)
assert_equal(S("hello\n"), b)
s = "foo\r\n"
s.chomp!
assert_equal("foo", s)
s = "foo\r"
s.chomp!
assert_equal("foo", s)
s = "foo\r\n"
s.chomp!("")
assert_equal("foo", s)
s = "foo\r"
s.chomp!("")
assert_equal("foo\r", s)
assert_equal(S("a").hash, S("a\u0101").chomp!(S("\u0101")).hash, '[ruby-core:22414]')
s = S("").freeze
assert_raise_with_message(FrozenError, /frozen/) {s.chomp!}
s = S("ax")
o = Struct.new(:s).new(s)
def o.to_str
s.freeze
"x"
end
assert_raise_with_message(FrozenError, /frozen/) {s.chomp!(o)}
s = S("hello")
assert_equal("hel", s.chomp!('lo'))
assert_equal("hel", s)
s = S("hello")
assert_equal(nil, s.chomp!('he'))
assert_equal("hello", s)
s = S("\u{3053 3093 306b 3061 306f}")
assert_equal("\u{3053 3093 306b}", s.chomp!("\u{3061 306f}"))
assert_equal("\u{3053 3093 306b}", s)
s = S("\u{3053 3093 306b 3061 306f}")
assert_equal(nil, s.chomp!('lo'))
assert_equal("\u{3053 3093 306b 3061 306f}", s)
s = S("hello")
assert_equal(nil, s.chomp!("\u{3061 306f}"))
assert_equal("hello", s)
# skip if argument is a broken string
s = S("\xe3\x81\x82")
assert_equal(nil, s.chomp!("\x82"))
assert_equal("\xe3\x81\x82", s)
s = S("\x95\x5c").force_encoding("Shift_JIS")
assert_equal(nil, s.chomp!("\x5c"))
assert_equal("\x95\x5c".force_encoding("Shift_JIS"), s)
# clear coderange
s = S("hello\u{3053 3093}")
assert_not_predicate(s, :ascii_only?)
assert_predicate(s.chomp!("\u{3053 3093}"), :ascii_only?)
# argument should be converted to String
klass = Class.new { def to_str; 'a'; end }
s = S("abba")
assert_equal("abb", s.chomp!(klass.new))
assert_equal("abb", s)
# chomp removes any of "\n", "\r\n", "\r" when "\n" is specified
s = "foo\n"
assert_equal("foo", s.chomp!("\n"))
s = "foo\r\n"
assert_equal("foo", s.chomp!("\n"))
s = "foo\r"
assert_equal("foo", s.chomp!("\n"))
ensure
$/ = save
end
def test_chop
assert_equal(S("hell"), S("hello").chop)
assert_equal(S("hello"), S("hello\r\n").chop)
assert_equal(S("hello\n"), S("hello\n\r").chop)
assert_equal(S(""), S("\r\n").chop)
assert_equal(S(""), S("").chop)
assert_equal(S("a").hash, S("a\u00d8").chop.hash)
end
def test_chop!
a = S("hello").chop!
assert_equal(S("hell"), a)
a = S("hello\r\n").chop!
assert_equal(S("hello"), a)
a = S("hello\n\r").chop!
assert_equal(S("hello\n"), a)
a = S("\r\n").chop!
assert_equal(S(""), a)
a = S("").chop!
assert_nil(a)
a = S("a\u00d8")
a.chop!
assert_equal(S("a").hash, a.hash)
a = S("hello\n")
b = a.dup
assert_equal(S("hello"), a.chop!)
assert_equal(S("hello\n"), b)
end
def test_clone
for taint in [ false, true ]
for frozen in [ false, true ]
a = S("Cool")
a.taint if taint
a.freeze if frozen
b = a.clone
assert_equal(a, b)
assert_not_same(a, b)
assert_equal(a.frozen?, b.frozen?)
assert_equal(a.tainted?, b.tainted?)
end
end
assert_equal("", File.read(IO::NULL).clone, '[ruby-dev:32819] reported by Kazuhiro NISHIYAMA')
end
def test_concat
assert_equal(S("world!"), S("world").concat(33))
assert_equal(S("world!"), S("world").concat(S('!')))
b = S("sn")
assert_equal(S("snsnsn"), b.concat(b, b))
bug7090 = '[ruby-core:47751]'
result = S("").force_encoding(Encoding::UTF_16LE)
result << 0x0300
expected = S("\u0300".encode(Encoding::UTF_16LE))
assert_equal(expected, result, bug7090)
assert_raise(TypeError) { 'foo' << :foo }
assert_raise(FrozenError) { 'foo'.freeze.concat('bar') }
end
def test_concat_literals
s="." * 50
assert_equal(Encoding::UTF_8, "#{s}x".encoding)
end
def test_count
a = S("hello world")
assert_equal(5, a.count(S("lo")))
assert_equal(2, a.count(S("lo"), S("o")))
assert_equal(4, a.count(S("hello"), S("^l")))
assert_equal(4, a.count(S("ej-m")))
assert_equal(0, S("y").count(S("a\\-z")))
assert_equal(5, "abc\u{3042 3044 3046}".count("^a"))
assert_equal(1, "abc\u{3042 3044 3046}".count("\u3042"))
assert_equal(5, "abc\u{3042 3044 3046}".count("^\u3042"))
assert_equal(2, "abc\u{3042 3044 3046}".count("a-z", "^a"))
assert_equal(0, "abc\u{3042 3044 3046}".count("a", "\u3042"))
assert_equal(0, "abc\u{3042 3044 3046}".count("\u3042", "a"))
assert_equal(0, "abc\u{3042 3044 3046}".count("\u3042", "\u3044"))
assert_equal(4, "abc\u{3042 3044 3046}".count("^a", "^\u3044"))
assert_equal(4, "abc\u{3042 3044 3046}".count("^\u3044", "^a"))
assert_equal(4, "abc\u{3042 3044 3046}".count("^\u3042", "^\u3044"))
assert_raise(ArgumentError) { "foo".count }
end
def test_crypt
assert_equal(S('aaGUC/JkO9/Sc'), S("mypassword").crypt(S("aa")))
assert_not_equal(S('aaGUC/JkO9/Sc'), S("mypassword").crypt(S("ab")))
assert_raise(ArgumentError) {S("mypassword").crypt(S(""))}
assert_raise(ArgumentError) {S("mypassword").crypt(S("\0a"))}
assert_raise(ArgumentError) {S("mypassword").crypt(S("a\0"))}
assert_raise(ArgumentError) {S("poison\u0000null").crypt(S("aa"))}
[Encoding::UTF_16BE, Encoding::UTF_16LE,
Encoding::UTF_32BE, Encoding::UTF_32LE].each do |enc|
assert_raise(ArgumentError) {S("mypassword").crypt(S("aa".encode(enc)))}
assert_raise(ArgumentError) {S("mypassword".encode(enc)).crypt(S("aa"))}
end
@cls == String and assert_no_memory_leak([], 's = ""', <<~'end;') # do
1000.times { s.crypt(-"..").clear }
end;
end
def test_delete
assert_equal(S("heo"), S("hello").delete(S("l"), S("lo")))
assert_equal(S("he"), S("hello").delete(S("lo")))
assert_equal(S("hell"), S("hello").delete(S("aeiou"), S("^e")))
assert_equal(S("ho"), S("hello").delete(S("ej-m")))
assert_equal("a".hash, "a\u0101".delete("\u0101").hash, '[ruby-talk:329267]')
assert_equal(true, "a\u0101".delete("\u0101").ascii_only?)
assert_equal(true, "a\u3041".delete("\u3041").ascii_only?)
assert_equal(false, "a\u3041\u3042".delete("\u3041").ascii_only?)
assert_equal("a", "abc\u{3042 3044 3046}".delete("^a"))
assert_equal("bc\u{3042 3044 3046}", "abc\u{3042 3044 3046}".delete("a"))
assert_equal("\u3042", "abc\u{3042 3044 3046}".delete("^\u3042"))
bug6160 = '[ruby-dev:45374]'
assert_equal("", '\\'.delete('\\'), bug6160)
end
def test_delete!
a = S("hello")
a.delete!(S("l"), S("lo"))
assert_equal(S("heo"), a)
a = S("hello")
a.delete!(S("lo"))
assert_equal(S("he"), a)
a = S("hello")
a.delete!(S("aeiou"), S("^e"))
assert_equal(S("hell"), a)
a = S("hello")
a.delete!(S("ej-m"))
assert_equal(S("ho"), a)
a = S("hello")
assert_nil(a.delete!(S("z")))
a = S("hello")
b = a.dup
a.delete!(S("lo"))
assert_equal(S("he"), a)
assert_equal(S("hello"), b)
a = S("hello")
a.delete!(S("^el"))
assert_equal(S("ell"), a)
assert_raise(ArgumentError) { S("foo").delete! }
end
def test_downcase
assert_equal(S("hello"), S("helLO").downcase)
assert_equal(S("hello"), S("hello").downcase)
assert_equal(S("hello"), S("HELLO").downcase)
assert_equal(S("abc hello 123"), S("abc HELLO 123").downcase)
end
def test_downcase!
a = S("helLO")
b = a.dup
assert_equal(S("hello"), a.downcase!)
assert_equal(S("hello"), a)
assert_equal(S("helLO"), b)
a=S("hello")
assert_nil(a.downcase!)
assert_equal(S("hello"), a)
end
def test_dump
a= S("Test") << 1 << 2 << 3 << 9 << 13 << 10
assert_equal(S('"Test\\x01\\x02\\x03\\t\\r\\n"'), a.dump)
b= S("\u{7F}")
assert_equal(S('"\\x7F"'), b.dump)
b= S("\u{AB}")
assert_equal(S('"\\u00AB"'), b.dump)
b= S("\u{ABC}")
assert_equal(S('"\\u0ABC"'), b.dump)
b= S("\uABCD")
assert_equal(S('"\\uABCD"'), b.dump)
b= S("\u{ABCDE}")
assert_equal(S('"\\u{ABCDE}"'), b.dump)
b= S("\u{10ABCD}")
assert_equal(S('"\\u{10ABCD}"'), b.dump)
end
def test_undump
a = S("Test") << 1 << 2 << 3 << 9 << 13 << 10
assert_equal(a, S('"Test\\x01\\x02\\x03\\t\\r\\n"').undump)
assert_equal(S("\\ca"), S('"\\ca"').undump)
assert_equal(S("\u{7F}"), S('"\\x7F"').undump)
assert_equal(S("\u{7F}A"), S('"\\x7FA"').undump)
assert_equal(S("\u{AB}"), S('"\\u00AB"').undump)
assert_equal(S("\u{ABC}"), S('"\\u0ABC"').undump)
assert_equal(S("\uABCD"), S('"\\uABCD"').undump)
assert_equal(S("\uABCD"), S('"\\uABCD"').undump)
assert_equal(S("\u{ABCDE}"), S('"\\u{ABCDE}"').undump)
assert_equal(S("\u{10ABCD}"), S('"\\u{10ABCD}"').undump)
assert_equal(S("\u{ABCDE 10ABCD}"), S('"\\u{ABCDE 10ABCD}"').undump)
assert_equal(S(""), S('"\\u{}"').undump)
assert_equal(S(""), S('"\\u{ }"').undump)
assert_equal(S("\u3042".encode("sjis")), S('"\x82\xA0"'.force_encoding("sjis")).undump)
assert_equal(S("\u8868".encode("sjis")), S("\"\\x95\\\\\"".force_encoding("sjis")).undump)
assert_equal(S("äöü"), S('"\u00E4\u00F6\u00FC"').undump)
assert_equal(S("äöü"), S('"\xC3\xA4\xC3\xB6\xC3\xBC"').undump)
assert_equal(Encoding::UTF_8, S('"\\u3042"').encode(Encoding::EUC_JP).undump.encoding)
assert_equal("abc".encode(Encoding::UTF_16LE),
'"a\x00b\x00c\x00".force_encoding("UTF-16LE")'.undump)
assert_equal('\#', '"\\\\#"'.undump)
assert_equal('\#{', '"\\\\\#{"'.undump)
assert_raise(RuntimeError) { S('\u3042').undump }
assert_raise(RuntimeError) { S('"\x82\xA0\u3042"'.force_encoding("SJIS")).undump }
assert_raise(RuntimeError) { S('"\u3042\x82\xA0"'.force_encoding("SJIS")).undump }
assert_raise(RuntimeError) { S('"".force_encoding()').undump }
assert_raise(RuntimeError) { S('"".force_encoding("').undump }
assert_raise(RuntimeError) { S('"".force_encoding("UNKNOWN")').undump }
assert_raise(RuntimeError) { S('"\u3042".force_encoding("UTF-16LE")').undump }
assert_raise(RuntimeError) { S('"\x00\x00".force_encoding("UTF-16LE")"').undump }
assert_raise(RuntimeError) { S('"\x00\x00".force_encoding("'+("a"*9999999)+'")"').undump }
assert_raise(RuntimeError) { S(%("\u00E4")).undump }
assert_raise(RuntimeError) { S('"').undump }
assert_raise(RuntimeError) { S('"""').undump }
assert_raise(RuntimeError) { S('""""').undump }
assert_raise(RuntimeError) { S('"a').undump }
assert_raise(RuntimeError) { S('"\u"').undump }
assert_raise(RuntimeError) { S('"\u{"').undump }
assert_raise(RuntimeError) { S('"\u304"').undump }
assert_raise(RuntimeError) { S('"\u304Z"').undump }
assert_raise(RuntimeError) { S('"\udfff"').undump }
assert_raise(RuntimeError) { S('"\u{dfff}"').undump }
assert_raise(RuntimeError) { S('"\u{3042"').undump }
assert_raise(RuntimeError) { S('"\u{3042 "').undump }
assert_raise(RuntimeError) { S('"\u{110000}"').undump }
assert_raise(RuntimeError) { S('"\u{1234567}"').undump }
assert_raise(RuntimeError) { S('"\x"').undump }
assert_raise(RuntimeError) { S('"\xA"').undump }
assert_raise(RuntimeError) { S('"\\"').undump }
assert_raise(RuntimeError) { S(%("\0")).undump }
end
def test_dup
for taint in [ false, true ]
for frozen in [ false, true ]
a = S("hello")
a.taint if taint
a.freeze if frozen
b = a.dup
assert_equal(a, b)
assert_not_same(a, b)
assert_not_predicate(b, :frozen?)
assert_equal(a.tainted?, b.tainted?)
end
end
end
def test_each
save = $/
$/ = "\n"
res=[]
S("hello\nworld").lines.each {|x| res << x}
assert_equal(S("hello\n"), res[0])
assert_equal(S("world"), res[1])
res=[]
S("hello\n\n\nworld").lines(S('')).each {|x| res << x}
assert_equal(S("hello\n\n"), res[0])
assert_equal(S("world"), res[1])
$/ = "!"
res=[]
S("hello!world").lines.each {|x| res << x}
assert_equal(S("hello!"), res[0])
assert_equal(S("world"), res[1])
ensure
$/ = save
end
def test_each_byte
s = S("ABC")
res = []
assert_equal s.object_id, s.each_byte {|x| res << x }.object_id
assert_equal(65, res[0])
assert_equal(66, res[1])
assert_equal(67, res[2])
assert_equal 65, s.each_byte.next
end
def test_bytes
s = S("ABC")
assert_equal [65, 66, 67], s.bytes
if ENUMERATOR_WANTARRAY
assert_warn(/block not used/) {
assert_equal [65, 66, 67], s.bytes {}
}
else
warning = /passing a block to String#bytes is deprecated/
assert_warning(warning) {
res = []
assert_equal s.object_id, s.bytes {|x| res << x }.object_id
assert_equal(65, res[0])
assert_equal(66, res[1])
assert_equal(67, res[2])
}
assert_warning(warning) {
s = S("ABC")
res = []
assert_same s, s.bytes {|x| res << x }
assert_equal [65, 66, 67], res
}
end
end
def test_each_codepoint
# Single byte optimization
assert_equal 65, S("ABC").each_codepoint.next
s = S("\u3042\u3044\u3046")
res = []
assert_equal s.object_id, s.each_codepoint {|x| res << x }.object_id
assert_equal(0x3042, res[0])
assert_equal(0x3044, res[1])
assert_equal(0x3046, res[2])
assert_equal 0x3042, s.each_codepoint.next
end
def test_codepoints
# Single byte optimization
assert_equal [65, 66, 67], S("ABC").codepoints
s = S("\u3042\u3044\u3046")
assert_equal [0x3042, 0x3044, 0x3046], s.codepoints
if ENUMERATOR_WANTARRAY
assert_warn(/block not used/) {
assert_equal [0x3042, 0x3044, 0x3046], s.codepoints {}
}
else
warning = /passing a block to String#codepoints is deprecated/
assert_warning(warning) {
res = []
assert_equal s.object_id, s.codepoints {|x| res << x }.object_id
assert_equal(0x3042, res[0])
assert_equal(0x3044, res[1])
assert_equal(0x3046, res[2])
}
assert_warning(warning) {
s = S("ABC")
res = []
assert_same s, s.codepoints {|x| res << x }
assert_equal [65, 66, 67], res
}
end
end
def test_each_char
s = S("ABC")
res = []
assert_equal s.object_id, s.each_char {|x| res << x }.object_id
assert_equal("A", res[0])
assert_equal("B", res[1])
assert_equal("C", res[2])
assert_equal "A", S("ABC").each_char.next
end
def test_chars
s = S("ABC")
assert_equal ["A", "B", "C"], s.chars
if ENUMERATOR_WANTARRAY
assert_warn(/block not used/) {
assert_equal ["A", "B", "C"], s.chars {}
}
else
warning = /passing a block to String#chars is deprecated/
assert_warning(warning) {
res = []
assert_equal s.object_id, s.chars {|x| res << x }.object_id
assert_equal("A", res[0])
assert_equal("B", res[1])
assert_equal("C", res[2])
}
end
end
def test_each_grapheme_cluster
[
"\u{20 200d}",
"\u{600 600}",
"\u{600 20}",
"\u{261d 1F3FB}",
"\u{1f600}",
"\u{20 308}",
"\u{1F477 1F3FF 200D 2640 FE0F}",
"\u{1F468 200D 1F393}",
"\u{1F46F 200D 2642 FE0F}",
"\u{1f469 200d 2764 fe0f 200d 1f469}",
].each do |g|
assert_equal [g], g.each_grapheme_cluster.to_a
end
assert_equal ["\u000A", "\u0308"], "\u{a 308}".each_grapheme_cluster.to_a
assert_equal ["\u000D", "\u0308"], "\u{d 308}".each_grapheme_cluster.to_a
assert_equal ["a", "b", "c"], "abc".b.each_grapheme_cluster.to_a
s = ("x"+"\u{10ABCD}"*250000)
assert_empty(s.each_grapheme_cluster {s.clear})
end
def test_grapheme_clusters
[
"\u{20 200d}",
"\u{600 600}",
"\u{600 20}",
"\u{261d 1F3FB}",
"\u{1f600}",
"\u{20 308}",
"\u{1F477 1F3FF 200D 2640 FE0F}",
"\u{1F468 200D 1F393}",
"\u{1F46F 200D 2642 FE0F}",
"\u{1f469 200d 2764 fe0f 200d 1f469}",
].each do |g|
assert_equal [g], g.grapheme_clusters
end
assert_equal ["\u000A", "\u0308"], "\u{a 308}".grapheme_clusters
assert_equal ["\u000D", "\u0308"], "\u{d 308}".grapheme_clusters
assert_equal ["a", "b", "c"], "abc".b.grapheme_clusters
if ENUMERATOR_WANTARRAY
assert_warn(/block not used/) {
assert_equal ["A", "B", "C"], "ABC".grapheme_clusters {}
}
else
warning = /passing a block to String#grapheme_clusters is deprecated/
assert_warning(warning) {
s = "ABC".b
res = []
assert_same s, s.grapheme_clusters {|x| res << x }
assert_equal("A", res[0])
assert_equal("B", res[1])
assert_equal("C", res[2])
}
end
end
def test_each_line
save = $/
$/ = "\n"
res=[]
S("hello\nworld").each_line {|x| res << x}
assert_equal(S("hello\n"), res[0])
assert_equal(S("world"), res[1])
res=[]
S("hello\n\n\nworld").each_line(S('')) {|x| res << x}
assert_equal(S("hello\n\n"), res[0])
assert_equal(S("world"), res[1])
res=[]
S("hello\r\n\r\nworld").each_line(S('')) {|x| res << x}
assert_equal(S("hello\r\n\r\n"), res[0])
assert_equal(S("world"), res[1])
$/ = "!"
res=[]
S("hello!world").each_line {|x| res << x}
assert_equal(S("hello!"), res[0])
assert_equal(S("world"), res[1])
$/ = "ab"
res=[]
S("a").lines.each {|x| res << x}
assert_equal(1, res.size)
assert_equal(S("a"), res[0])
$/ = save
s = nil
"foo\nbar".each_line(nil) {|s2| s = s2 }
assert_equal("foo\nbar", s)
assert_equal "hello\n", S("hello\nworld").each_line.next
assert_equal "hello\nworld", S("hello\nworld").each_line(nil).next
bug7646 = "[ruby-dev:46827]"
assert_nothing_raised(bug7646) do
"\n\u0100".each_line("\n") {}
end
ensure
$/ = save
end
def test_each_line_chomp
res = []
S("hello\nworld").each_line("\n", chomp: true) {|x| res << x}
assert_equal(S("hello"), res[0])
assert_equal(S("world"), res[1])
res = []
S("hello\n\n\nworld").each_line(S(''), chomp: true) {|x| res << x}
assert_equal(S("hello\n"), res[0])
assert_equal(S("world"), res[1])
res = []
S("hello\r\n\r\nworld").each_line(S(''), chomp: true) {|x| res << x}
assert_equal(S("hello\r\n"), res[0])
assert_equal(S("world"), res[1])
res = []
S("hello!world").each_line(S('!'), chomp: true) {|x| res << x}
assert_equal(S("hello"), res[0])
assert_equal(S("world"), res[1])
res = []
S("a").each_line(S('ab'), chomp: true).each {|x| res << x}
assert_equal(1, res.size)
assert_equal(S("a"), res[0])
s = nil
"foo\nbar".each_line(nil, chomp: true) {|s2| s = s2 }
assert_equal("foo\nbar", s)
assert_equal "hello", S("hello\nworld").each_line(chomp: true).next
assert_equal "hello\nworld", S("hello\nworld").each_line(nil, chomp: true).next
res = []
S("").each_line(chomp: true) {|x| res << x}
assert_equal([], res)
res = []
S("\n").each_line(chomp: true) {|x| res << x}
assert_equal([S("")], res)
res = []
S("\r\n").each_line(chomp: true) {|x| res << x}
assert_equal([S("")], res)
end
def test_lines
s = S("hello\nworld")
assert_equal ["hello\n", "world"], s.lines
assert_equal ["hello\nworld"], s.lines(nil)
if ENUMERATOR_WANTARRAY
assert_warn(/block not used/) {
assert_equal ["hello\n", "world"], s.lines {}
}
else
assert_warning(/passing a block to String#lines is deprecated/) {
res = []
assert_equal s.object_id, s.lines {|x| res << x }.object_id
assert_equal(S("hello\n"), res[0])
assert_equal(S("world"), res[1])
}
end
end
def test_empty?
assert_empty(S(""))
assert_not_empty(S("not"))
end
def test_end_with?
assert_send([S("hello"), :end_with?, S("llo")])
assert_not_send([S("hello"), :end_with?, S("ll")])
assert_send([S("hello"), :end_with?, S("el"), S("lo")])
bug5536 = '[ruby-core:40623]'
assert_raise(TypeError, bug5536) {S("str").end_with? :not_convertible_to_string}
end
def test_eql?
a = S("hello")
assert_operator(a, :eql?, S("hello"))
assert_operator(a, :eql?, a)
end
def test_gsub
assert_equal(S("h*ll*"), S("hello").gsub(/[aeiou]/, S('*')))
assert_equal(S("h<e>ll<o>"), S("hello").gsub(/([aeiou])/, S('<\1>')))
assert_equal(S("h e l l o "),
S("hello").gsub(/./) { |s| s[0].to_s + S(' ')})
assert_equal(S("HELL-o"),
S("hello").gsub(/(hell)(.)/) { |s| $1.upcase + S('-') + $2 })
assert_equal(S("<>h<>e<>l<>l<>o<>"), S("hello").gsub(S(''), S('<\0>')))
a = S("hello")
a.taint
assert_predicate(a.gsub(/./, S('X')), :tainted?)
assert_equal("z", "abc".gsub(/./, "a" => "z"), "moved from btest/knownbug")
assert_raise(ArgumentError) { "foo".gsub }
end
def test_gsub_encoding
a = S("hello world")
a.force_encoding Encoding::UTF_8
b = S("hi")
b.force_encoding Encoding::US_ASCII
assert_equal Encoding::UTF_8, a.gsub(/hello/, b).encoding
c = S("everybody")
c.force_encoding Encoding::US_ASCII
assert_equal Encoding::UTF_8, a.gsub(/world/, c).encoding
assert_equal S("a\u{e9}apos<"), S("a\u{e9}'<").gsub("'", "apos")
bug9849 = '[ruby-core:62669] [Bug #9849]'
assert_equal S("\u{3042 3042 3042}!foo!"), S("\u{3042 3042 3042}/foo/").gsub("/", "!"), bug9849
end
def test_gsub!
a = S("hello")
b = a.dup
a.gsub!(/[aeiou]/, S('*'))
assert_equal(S("h*ll*"), a)
assert_equal(S("hello"), b)
a = S("hello")
a.gsub!(/([aeiou])/, S('<\1>'))
assert_equal(S("h<e>ll<o>"), a)
a = S("hello")
a.gsub!(/./) { |s| s[0].to_s + S(' ')}
assert_equal(S("h e l l o "), a)
a = S("hello")
a.gsub!(/(hell)(.)/) { |s| $1.upcase + S('-') + $2 }
assert_equal(S("HELL-o"), a)
r = S('X')
r.taint
a.gsub!(/./, r)
assert_predicate(a, :tainted?)
a = S("hello")
assert_nil(a.sub!(S('X'), S('Y')))
end
def test_sub_hash
assert_equal('azc', 'abc'.sub(/b/, "b" => "z"))
assert_equal('ac', 'abc'.sub(/b/, {}))
assert_equal('a1c', 'abc'.sub(/b/, "b" => 1))
assert_equal('aBc', 'abc'.sub(/b/, Hash.new {|h, k| k.upcase }))
assert_equal('a[\&]c', 'abc'.sub(/b/, "b" => '[\&]'))
assert_equal('aBcabc', 'abcabc'.sub(/b/, Hash.new {|h, k| h[k] = k.upcase }))
assert_equal('aBcdef', 'abcdef'.sub(/de|b/, "b" => "B", "de" => "DE"))
end
def test_gsub_hash
assert_equal('azc', 'abc'.gsub(/b/, "b" => "z"))
assert_equal('ac', 'abc'.gsub(/b/, {}))
assert_equal('a1c', 'abc'.gsub(/b/, "b" => 1))
assert_equal('aBc', 'abc'.gsub(/b/, Hash.new {|h, k| k.upcase }))
assert_equal('a[\&]c', 'abc'.gsub(/b/, "b" => '[\&]'))
assert_equal('aBcaBc', 'abcabc'.gsub(/b/, Hash.new {|h, k| h[k] = k.upcase }))
assert_equal('aBcDEf', 'abcdef'.gsub(/de|b/, "b" => "B", "de" => "DE"))
end
def test_hash
assert_equal(S("hello").hash, S("hello").hash)
assert_not_equal(S("hello").hash, S("helLO").hash)
bug4104 = '[ruby-core:33500]'
assert_not_equal(S("a").hash, S("a\0").hash, bug4104)
bug9172 = '[ruby-core:58658] [Bug #9172]'
assert_not_equal(S("sub-setter").hash, S("discover").hash, bug9172)
end
def test_hex
assert_equal(255, S("0xff").hex)
assert_equal(-255, S("-0xff").hex)
assert_equal(255, S("ff").hex)
assert_equal(-255, S("-ff").hex)
assert_equal(0, S("-ralph").hex)
assert_equal(-15, S("-fred").hex)
assert_equal(15, S("fred").hex)
end
def test_include?
assert_include(S("foobar"), ?f)
assert_include(S("foobar"), S("foo"))
assert_not_include(S("foobar"), S("baz"))
assert_not_include(S("foobar"), ?z)
end
def test_index
assert_equal(0, S("hello").index(?h))
assert_equal(1, S("hello").index(S("ell")))
assert_equal(2, S("hello").index(/ll./))
assert_equal(3, S("hello").index(?l, 3))
assert_equal(3, S("hello").index(S("l"), 3))
assert_equal(3, S("hello").index(/l./, 3))
assert_nil(S("hello").index(?z, 3))
assert_nil(S("hello").index(S("z"), 3))
assert_nil(S("hello").index(/z./, 3))
assert_nil(S("hello").index(?z))
assert_nil(S("hello").index(S("z")))
assert_nil(S("hello").index(/z./))
assert_equal(0, S("").index(S("")))
assert_equal(0, S("").index(//))
assert_nil(S("").index(S("hello")))
assert_nil(S("").index(/hello/))
assert_equal(0, S("hello").index(S("")))
assert_equal(0, S("hello").index(//))
s = S("long") * 1000 << "x"
assert_nil(s.index(S("y")))
assert_equal(4 * 1000, s.index(S("x")))
s << "yx"
assert_equal(4 * 1000, s.index(S("x")))
assert_equal(4 * 1000, s.index(S("xyx")))
o = Object.new
def o.to_str; "bar"; end
assert_equal(3, "foobarbarbaz".index(o))
assert_raise(TypeError) { "foo".index(Object.new) }
assert_nil("foo".index(//, -100))
assert_nil($~)
end
def test_insert
assert_equal("Xabcd", S("abcd").insert(0, 'X'))
assert_equal("abcXd", S("abcd").insert(3, 'X'))
assert_equal("abcdX", S("abcd").insert(4, 'X'))
assert_equal("abXcd", S("abcd").insert(-3, 'X'))
assert_equal("abcdX", S("abcd").insert(-1, 'X'))
end
def test_intern
assert_equal(:koala, S("koala").intern)
assert_not_equal(:koala, S("Koala").intern)
end
def test_length
assert_equal(0, S("").length)
assert_equal(4, S("1234").length)
assert_equal(6, S("1234\r\n").length)
assert_equal(7, S("\0011234\r\n").length)
end
def test_ljust
assert_equal(S("hello"), S("hello").ljust(4))
assert_equal(S("hello "), S("hello").ljust(11))
assert_equal(S("ababababab"), S("").ljust(10, "ab"), Bug2463)
assert_equal(S("abababababa"), S("").ljust(11, "ab"), Bug2463)
end
def test_next
assert_equal(S("abd"), S("abc").next)
assert_equal(S("z"), S("y").next)
assert_equal(S("aaa"), S("zz").next)
assert_equal(S("124"), S("123").next)
assert_equal(S("1000"), S("999").next)
assert_equal(S("2000aaa"), S("1999zzz").next)
assert_equal(S("AAAAA000"), S("ZZZZ999").next)
assert_equal(S("*+"), S("**").next)
assert_equal(S("!"), S(" ").next)
assert_equal(S(""), S("").next)
end
def test_next!
a = S("abc")
b = a.dup
assert_equal(S("abd"), a.next!)
assert_equal(S("abd"), a)
assert_equal(S("abc"), b)
a = S("y")
assert_equal(S("z"), a.next!)
assert_equal(S("z"), a)
a = S("zz")
assert_equal(S("aaa"), a.next!)
assert_equal(S("aaa"), a)
a = S("123")
assert_equal(S("124"), a.next!)
assert_equal(S("124"), a)
a = S("999")
assert_equal(S("1000"), a.next!)
assert_equal(S("1000"), a)
a = S("1999zzz")
assert_equal(S("2000aaa"), a.next!)
assert_equal(S("2000aaa"), a)
a = S("ZZZZ999")
assert_equal(S("AAAAA000"), a.next!)
assert_equal(S("AAAAA000"), a)
a = S("**")
assert_equal(S("*+"), a.next!)
assert_equal(S("*+"), a)
a = S(" ")
assert_equal(S("!"), a.next!)
assert_equal(S("!"), a)
end
def test_oct
assert_equal(255, S("0377").oct)
assert_equal(255, S("377").oct)
assert_equal(-255, S("-0377").oct)
assert_equal(-255, S("-377").oct)
assert_equal(0, S("OO").oct)
assert_equal(24, S("030OO").oct)
end
def test_replace
a = S("foo")
assert_equal(S("f"), a.replace(S("f")))
a = S("foo")
assert_equal(S("foobar"), a.replace(S("foobar")))
a = S("foo")
a.taint
b = a.replace(S("xyz"))
assert_equal(S("xyz"), b)
assert_predicate(b, :tainted?)
s = "foo" * 100
s2 = ("bar" * 100).dup
s.replace(s2)
assert_equal(s2, s)
s2 = ["foo"].pack("p")
s.replace(s2)
assert_equal(s2, s)
fs = "".freeze
assert_raise(FrozenError) { fs.replace("a") }
assert_raise(FrozenError) { fs.replace(fs) }
assert_raise(ArgumentError) { fs.replace() }
assert_raise(FrozenError) { fs.replace(42) }
end
def test_reverse
assert_equal(S("beta"), S("ateb").reverse)
assert_equal(S("madamImadam"), S("madamImadam").reverse)
a=S("beta")
assert_equal(S("ateb"), a.reverse)
assert_equal(S("beta"), a)
end
def test_reverse!
a = S("beta")
b = a.dup
assert_equal(S("ateb"), a.reverse!)
assert_equal(S("ateb"), a)
assert_equal(S("beta"), b)
assert_equal(S("madamImadam"), S("madamImadam").reverse!)
a = S("madamImadam")
assert_equal(S("madamImadam"), a.reverse!) # ??
assert_equal(S("madamImadam"), a)
end
def test_rindex
assert_equal(3, S("hello").rindex(?l))
assert_equal(6, S("ell, hello").rindex(S("ell")))
assert_equal(7, S("ell, hello").rindex(/ll./))
assert_equal(3, S("hello,lo").rindex(?l, 3))
assert_equal(3, S("hello,lo").rindex(S("l"), 3))
assert_equal(3, S("hello,lo").rindex(/l./, 3))
assert_nil(S("hello").rindex(?z, 3))
assert_nil(S("hello").rindex(S("z"), 3))
assert_nil(S("hello").rindex(/z./, 3))
assert_nil(S("hello").rindex(?z))
assert_nil(S("hello").rindex(S("z")))
assert_nil(S("hello").rindex(/z./))
o = Object.new
def o.to_str; "bar"; end
assert_equal(6, "foobarbarbaz".rindex(o))
assert_raise(TypeError) { "foo".rindex(Object.new) }
assert_nil("foo".rindex(//, -100))
assert_nil($~)
assert_equal(3, "foo".rindex(//))
assert_equal([3, 3], $~.offset(0))
end
def test_rjust
assert_equal(S("hello"), S("hello").rjust(4))
assert_equal(S(" hello"), S("hello").rjust(11))
assert_equal(S("ababababab"), S("").rjust(10, "ab"), Bug2463)
assert_equal(S("abababababa"), S("").rjust(11, "ab"), Bug2463)
end
def test_scan
a = S("cruel world")
assert_equal([S("cruel"), S("world")],a.scan(/\w+/))
assert_equal([S("cru"), S("el "), S("wor")],a.scan(/.../))
assert_equal([[S("cru")], [S("el ")], [S("wor")]],a.scan(/(...)/))
res = []
a.scan(/\w+/) { |w| res << w }
assert_equal([S("cruel"), S("world") ],res)
res = []
a.scan(/.../) { |w| res << w }
assert_equal([S("cru"), S("el "), S("wor")],res)
res = []
a.scan(/(...)/) { |w| res << w }
assert_equal([[S("cru")], [S("el ")], [S("wor")]],res)
a = S("hello")
a.taint
res = []
a.scan(/./) { |w| res << w }
assert_predicate(res[0], :tainted?, '[ruby-core:33338] #4087')
/h/ =~ a
a.scan(/x/)
assert_nil($~)
/h/ =~ a
a.scan('x')
assert_nil($~)
assert_equal(3, S("hello hello hello").scan("hello".taint).count(&:tainted?))
end
def test_size
assert_equal(0, S("").size)
assert_equal(4, S("1234").size)
assert_equal(6, S("1234\r\n").size)
assert_equal(7, S("\0011234\r\n").size)
end
def test_slice
assert_equal(?A, S("AooBar").slice(0))
assert_equal(?B, S("FooBaB").slice(-1))
assert_nil(S("FooBar").slice(6))
assert_nil(S("FooBar").slice(-7))
assert_equal(S("Foo"), S("FooBar").slice(0,3))
assert_equal(S(S("Bar")), S("FooBar").slice(-3,3))
assert_nil(S("FooBar").slice(7,2)) # Maybe should be six?
assert_nil(S("FooBar").slice(-7,10))
assert_equal(S("Foo"), S("FooBar").slice(0..2))
assert_equal(S("Bar"), S("FooBar").slice(-3..-1))
assert_equal(S(""), S("FooBar").slice(6..2))
assert_nil(S("FooBar").slice(-10..-7))
assert_equal(S("Foo"), S("FooBar").slice(/^F../))
assert_equal(S("Bar"), S("FooBar").slice(/..r$/))
assert_nil(S("FooBar").slice(/xyzzy/))
assert_nil(S("FooBar").slice(/plugh/))
assert_equal(S("Foo"), S("FooBar").slice(S("Foo")))
assert_equal(S("Bar"), S("FooBar").slice(S("Bar")))
assert_nil(S("FooBar").slice(S("xyzzy")))
assert_nil(S("FooBar").slice(S("plugh")))
bug9882 = '[ruby-core:62842] [Bug #9882]'
substr = S("\u{30c6 30b9 30c8 2019}#{bug9882}").slice(4..-1)
assert_equal(S(bug9882).hash, substr.hash, bug9882)
assert_predicate(substr, :ascii_only?, bug9882)
end
def test_slice!
a = S("AooBar")
b = a.dup
assert_equal(?A, a.slice!(0))
assert_equal(S("ooBar"), a)
assert_equal(S("AooBar"), b)
a = S("FooBar")
assert_equal(?r,a.slice!(-1))
assert_equal(S("FooBa"), a)
a = S("FooBar")
if @aref_slicebang_silent
assert_nil( a.slice!(6) )
else
assert_raise(IndexError) { a.slice!(6) }
end
assert_equal(S("FooBar"), a)
if @aref_slicebang_silent
assert_nil( a.slice!(-7) )
else
assert_raise(IndexError) { a.slice!(-7) }
end
assert_equal(S("FooBar"), a)
a = S("FooBar")
assert_equal(S("Foo"), a.slice!(0,3))
assert_equal(S("Bar"), a)
a = S("FooBar")
assert_equal(S("Bar"), a.slice!(-3,3))
assert_equal(S("Foo"), a)
a=S("FooBar")
if @aref_slicebang_silent
assert_nil(a.slice!(7,2)) # Maybe should be six?
else
assert_raise(IndexError) {a.slice!(7,2)} # Maybe should be six?
end
assert_equal(S("FooBar"), a)
if @aref_slicebang_silent
assert_nil(a.slice!(-7,10))
else
assert_raise(IndexError) {a.slice!(-7,10)}
end
assert_equal(S("FooBar"), a)
a=S("FooBar")
assert_equal(S("Foo"), a.slice!(0..2))
assert_equal(S("Bar"), a)
a=S("FooBar")
assert_equal(S("Bar"), a.slice!(-3..-1))
assert_equal(S("Foo"), a)
a=S("FooBar")
if @aref_slicebang_silent
assert_equal(S(""), a.slice!(6..2))
else
assert_raise(RangeError) {a.slice!(6..2)}
end
assert_equal(S("FooBar"), a)
if @aref_slicebang_silent
assert_nil(a.slice!(-10..-7))
else
assert_raise(RangeError) {a.slice!(-10..-7)}
end
assert_equal(S("FooBar"), a)
a=S("FooBar")
assert_equal(S("Foo"), a.slice!(/^F../))
assert_equal(S("Bar"), a)
a=S("FooBar")
assert_equal(S("Bar"), a.slice!(/..r$/))
assert_equal(S("Foo"), a)
a=S("FooBar")
if @aref_slicebang_silent
assert_nil(a.slice!(/xyzzy/))
else
assert_raise(IndexError) {a.slice!(/xyzzy/)}
end
assert_equal(S("FooBar"), a)
if @aref_slicebang_silent
assert_nil(a.slice!(/plugh/))
else
assert_raise(IndexError) {a.slice!(/plugh/)}
end
assert_equal(S("FooBar"), a)
a=S("FooBar")
assert_equal(S("Foo"), a.slice!(S("Foo")))
assert_equal(S("Bar"), a)
a=S("FooBar")
assert_equal(S("Bar"), a.slice!(S("Bar")))
assert_equal(S("Foo"), a)
assert_raise(ArgumentError) { "foo".slice! }
end
def test_split
fs, $; = $;, nil
assert_equal([S("a"), S("b"), S("c")], S(" a b\t c ").split)
assert_equal([S("a"), S("b"), S("c")], S(" a b\t c ").split(S(" ")))
assert_equal([S(" a "), S(" b "), S(" c ")], S(" a | b | c ").split(S("|")))
assert_equal([S("a"), S("b"), S("c")], S("aXXbXXcXX").split(/X./))
assert_equal([S("a"), S("b"), S("c")], S("abc").split(//))
assert_equal([S("a|b|c")], S("a|b|c").split(S('|'), 1))
assert_equal([S("a"), S("b|c")], S("a|b|c").split(S('|'), 2))
assert_equal([S("a"), S("b"), S("c")], S("a|b|c").split(S('|'), 3))
assert_equal([S("a"), S("b"), S("c"), S("")], S("a|b|c|").split(S('|'), -1))
assert_equal([S("a"), S("b"), S("c"), S(""), S("")], S("a|b|c||").split(S('|'), -1))
assert_equal([S("a"), S(""), S("b"), S("c")], S("a||b|c|").split(S('|')))
assert_equal([S("a"), S(""), S("b"), S("c"), S("")], S("a||b|c|").split(S('|'), -1))
assert_equal([], "".split(//, 1))
assert_equal("[2, 3]", [1,2,3].slice!(1,10000).inspect, "moved from btest/knownbug")
ensure
$; = fs
end
def test_fs
assert_raise_with_message(TypeError, /\$;/) {
$; = []
}
assert_separately([], "#{<<~"begin;"}\n#{<<~'end;'}")
bug = '[ruby-core:79582] $; must not be GCed'
begin;
$; = " "
$a = nil
alias $; $a
alias $-F $a
GC.start
assert_equal([], "".split, bug)
end;
end
def test_split_encoding
bug6206 = '[ruby-dev:45441]'
Encoding.list.each do |enc|
next unless enc.ascii_compatible?
s = S("a:".force_encoding(enc))
assert_equal([enc]*2, s.split(":", 2).map(&:encoding), bug6206)
end
end
def test_split_wchar
bug8642 = '[ruby-core:56036] [Bug #8642]'
[
Encoding::UTF_16BE, Encoding::UTF_16LE,
Encoding::UTF_32BE, Encoding::UTF_32LE,
].each do |enc|
s = S("abc,def".encode(enc))
assert_equal(["abc", "def"].map {|c| c.encode(enc)},
s.split(",".encode(enc)),
"#{bug8642} in #{enc.name}")
end
end
def test_split_invalid_sequence
bug10886 = '[ruby-core:68229] [Bug #10886]'
broken = S("\xa1".force_encoding("utf-8"))
assert_raise(ArgumentError, bug10886) {
S("a,b").split(broken)
}
end
def test_split_invalid_argument
assert_raise(TypeError) {
S("a,b").split(BasicObject.new)
}
end
def test_split_dupped
s = "abc"
s.split("b", 1).map(&:upcase!)
assert_equal("abc", s)
end
def test_squeeze
assert_equal(S("abc"), S("aaabbbbccc").squeeze)
assert_equal(S("aa bb cc"), S("aa bb cc").squeeze(S(" ")))
assert_equal(S("BxTyWz"), S("BxxxTyyyWzzzzz").squeeze(S("a-z")))
end
def test_squeeze!
a = S("aaabbbbccc")
b = a.dup
assert_equal(S("abc"), a.squeeze!)
assert_equal(S("abc"), a)
assert_equal(S("aaabbbbccc"), b)
a = S("aa bb cc")
assert_equal(S("aa bb cc"), a.squeeze!(S(" ")))
assert_equal(S("aa bb cc"), a)
a = S("BxxxTyyyWzzzzz")
assert_equal(S("BxTyWz"), a.squeeze!(S("a-z")))
assert_equal(S("BxTyWz"), a)
a=S("The quick brown fox")
assert_nil(a.squeeze!)
end
def test_start_with?
assert_send([S("hello"), :start_with?, S("hel")])
assert_not_send([S("hello"), :start_with?, S("el")])
assert_send([S("hello"), :start_with?, S("el"), S("he")])
bug5536 = '[ruby-core:40623]'
assert_raise(TypeError, bug5536) {S("str").start_with? :not_convertible_to_string}
assert_equal(true, "hello".start_with?(/hel/))
assert_equal("hel", $&)
assert_equal(false, "hello".start_with?(/el/))
assert_nil($&)
end
def test_strip
assert_equal(S("x"), S(" x ").strip)
assert_equal(S("x"), S(" \n\r\t x \t\r\n\n ").strip)
assert_equal("0b0 ".force_encoding("UTF-16BE"),
"\x00 0b0 ".force_encoding("UTF-16BE").strip)
assert_equal("0\x000b0 ".force_encoding("UTF-16BE"),
"0\x000b0 ".force_encoding("UTF-16BE").strip)
end
def test_strip!
a = S(" x ")
b = a.dup
assert_equal(S("x") ,a.strip!)
assert_equal(S("x") ,a)
assert_equal(S(" x "), b)
a = S(" \n\r\t x \t\r\n\n ")
assert_equal(S("x"), a.strip!)
assert_equal(S("x"), a)
a = S("x")
assert_nil(a.strip!)
assert_equal(S("x") ,a)
end
def test_sub
assert_equal(S("h*llo"), S("hello").sub(/[aeiou]/, S('*')))
assert_equal(S("h<e>llo"), S("hello").sub(/([aeiou])/, S('<\1>')))
assert_equal(S("h ello"), S("hello").sub(/./) {
|s| s[0].to_s + S(' ')})
assert_equal(S("HELL-o"), S("hello").sub(/(hell)(.)/) {
|s| $1.upcase + S('-') + $2
})
assert_equal(S("h<e>llo"), S("hello").sub('e', S('<\0>')))
assert_equal(S("a\\aba"), S("ababa").sub(/b/, '\\'))
assert_equal(S("ab\\aba"), S("ababa").sub(/(b)/, '\1\\'))
assert_equal(S("ababa"), S("ababa").sub(/(b)/, '\1'))
assert_equal(S("ababa"), S("ababa").sub(/(b)/, '\\1'))
assert_equal(S("a\\1aba"), S("ababa").sub(/(b)/, '\\\1'))
assert_equal(S("a\\1aba"), S("ababa").sub(/(b)/, '\\\\1'))
assert_equal(S("a\\baba"), S("ababa").sub(/(b)/, '\\\\\1'))
assert_equal(S("a--ababababababababab"),
S("abababababababababab").sub(/(b)/, '-\9-'))
assert_equal(S("1-b-0"),
S("1b2b3b4b5b6b7b8b9b0").
sub(/(b).(b).(b).(b).(b).(b).(b).(b).(b)/, '-\9-'))
assert_equal(S("1-b-0"),
S("1b2b3b4b5b6b7b8b9b0").
sub(/(b).(b).(b).(b).(b).(b).(b).(b).(b)/, '-\\9-'))
assert_equal(S("1-\\9-0"),
S("1b2b3b4b5b6b7b8b9b0").
sub(/(b).(b).(b).(b).(b).(b).(b).(b).(b)/, '-\\\9-'))
assert_equal(S("k"),
S("1a2b3c4d5e6f7g8h9iAjBk").
sub(/.(.).(.).(.).(.).(.).(.).(.).(.).(.).(.).(.)/, '\+'))
assert_equal(S("ab\\aba"), S("ababa").sub(/b/, '\&\\'))
assert_equal(S("ababa"), S("ababa").sub(/b/, '\&'))
assert_equal(S("ababa"), S("ababa").sub(/b/, '\\&'))
assert_equal(S("a\\&aba"), S("ababa").sub(/b/, '\\\&'))
assert_equal(S("a\\&aba"), S("ababa").sub(/b/, '\\\\&'))
assert_equal(S("a\\baba"), S("ababa").sub(/b/, '\\\\\&'))
a = S("hello")
a.taint
x = a.sub(/./, S('X'))
assert_predicate(x, :tainted?)
o = Object.new
def o.to_str; "bar"; end
assert_equal("fooBARbaz", "foobarbaz".sub(o, "BAR"))
assert_raise(TypeError) { "foo".sub(Object.new, "") }
assert_raise(ArgumentError) { "foo".sub }
assert_raise(IndexError) { "foo"[/(?:(o$)|(x))/, 2] = 'bar' }
o = Object.new
def o.to_s; self; end
assert_match(/^foo#<Object:0x.*>baz$/, "foobarbaz".sub("bar") { o })
assert_equal(S("Abc"), S("abc").sub("a", "A"))
m = nil
assert_equal(S("Abc"), S("abc").sub("a") {m = $~; "A"})
assert_equal(S("a"), m[0])
assert_equal(/a/, m.regexp)
bug = '[ruby-core:78686] [Bug #13042] other than regexp has no name references'
assert_raise_with_message(IndexError, /oops/, bug) {
'hello'.gsub('hello', '\k<oops>')
}
end
def test_sub!
a = S("hello")
b = a.dup
a.sub!(/[aeiou]/, S('*'))
assert_equal(S("h*llo"), a)
assert_equal(S("hello"), b)
a = S("hello")
a.sub!(/([aeiou])/, S('<\1>'))
assert_equal(S("h<e>llo"), a)
a = S("hello")
a.sub!(/./) { |s| s[0].to_s + S(' ')}
assert_equal(S("h ello"), a)
a = S("hello")
a.sub!(/(hell)(.)/) { |s| $1.upcase + S('-') + $2 }
assert_equal(S("HELL-o"), a)
a=S("hello")
assert_nil(a.sub!(/X/, S('Y')))
r = S('X')
r.taint
a.sub!(/./, r)
assert_predicate(a, :tainted?)
end
def test_succ
assert_equal(S("abd"), S("abc").succ)
assert_equal(S("z"), S("y").succ)
assert_equal(S("aaa"), S("zz").succ)
assert_equal(S("124"), S("123").succ)
assert_equal(S("1000"), S("999").succ)
assert_equal(S("2.000"), S("1.999").succ)
assert_equal(S("No.10"), S("No.9").succ)
assert_equal(S("2000aaa"), S("1999zzz").succ)
assert_equal(S("AAAAA000"), S("ZZZZ999").succ)
assert_equal(S("*+"), S("**").succ)
assert_equal("abce", "abcd".succ)
assert_equal("THX1139", "THX1138".succ)
assert_equal("<\<koalb>>", "<\<koala>>".succ)
assert_equal("2000aaa", "1999zzz".succ)
assert_equal("AAAA0000", "ZZZ9999".succ)
assert_equal("**+", "***".succ)
assert_equal("!", " ".succ)
assert_equal("", "".succ)
bug = '[ruby-core:83062] [Bug #13952]'
s = "\xff".b
assert_not_predicate(s, :ascii_only?)
assert_predicate(s.succ, :ascii_only?, bug)
end
def test_succ!
a = S("abc")
b = a.dup
assert_equal(S("abd"), a.succ!)
assert_equal(S("abd"), a)
assert_equal(S("abc"), b)
a = S("y")
assert_equal(S("z"), a.succ!)
assert_equal(S("z"), a)
a = S("zz")
assert_equal(S("aaa"), a.succ!)
assert_equal(S("aaa"), a)
a = S("123")
assert_equal(S("124"), a.succ!)
assert_equal(S("124"), a)
a = S("999")
assert_equal(S("1000"), a.succ!)
assert_equal(S("1000"), a)
a = S("1999zzz")
assert_equal(S("2000aaa"), a.succ!)
assert_equal(S("2000aaa"), a)
a = S("ZZZZ999")
assert_equal(S("AAAAA000"), a.succ!)
assert_equal(S("AAAAA000"), a)
a = S("**")
assert_equal(S("*+"), a.succ!)
assert_equal(S("*+"), a)
a = S("No.9")
assert_equal(S("No.10"), a.succ!)
assert_equal(S("No.10"), a)
a = S(" ")
assert_equal(S("!"), a.succ!)
assert_equal(S("!"), a)
a = S("")
assert_equal(S(""), a.succ!)
assert_equal(S(""), a)
assert_equal("aaaaaaaaaaaa", "zzzzzzzzzzz".succ!)
assert_equal("aaaaaaaaaaaaaaaaaaaaaaaa", "zzzzzzzzzzzzzzzzzzzzzzz".succ!)
end
def test_sum
n = S("\001\001\001\001\001\001\001\001\001\001\001\001\001\001\001")
assert_equal(15, n.sum)
n += S("\001")
assert_equal(16, n.sum(17))
n[0] = 2.chr
assert_not_equal(15, n.sum)
assert_equal(17, n.sum(0))
assert_equal(17, n.sum(-1))
end
def check_sum(str, bits=16)
sum = 0
str.each_byte {|c| sum += c}
sum = sum & ((1 << bits) - 1) if bits != 0
assert_equal(sum, str.sum(bits))
end
def test_sum_2
assert_equal(0, "".sum)
assert_equal(294, "abc".sum)
check_sum("abc")
check_sum("\x80")
-3.upto(70) {|bits|
check_sum("xyz", bits)
}
end
def test_sum_long
s8421505 = "\xff" * 8421505
assert_equal(127, s8421505.sum(31))
assert_equal(2147483775, s8421505.sum(0))
s16843010 = ("\xff" * 16843010)
assert_equal(254, s16843010.sum(32))
assert_equal(4294967550, s16843010.sum(0))
end
def test_swapcase
assert_equal(S("hi&LOW"), S("HI&low").swapcase)
end
def test_swapcase!
a = S("hi&LOW")
b = a.dup
assert_equal(S("HI&low"), a.swapcase!)
assert_equal(S("HI&low"), a)
assert_equal(S("hi&LOW"), b)
a = S("$^#^%$#!!")
assert_nil(a.swapcase!)
assert_equal(S("$^#^%$#!!"), a)
end
def test_to_f
assert_equal(344.3, S("344.3").to_f)
assert_equal(5.9742e24, S("5.9742e24").to_f)
assert_equal(98.6, S("98.6 degrees").to_f)
assert_equal(0.0, S("degrees 100.0").to_f)
assert_equal([ 0.0].pack('G'), [S(" 0.0").to_f].pack('G'))
assert_equal([-0.0].pack('G'), [S("-0.0").to_f].pack('G'))
end
def test_to_i
assert_equal(1480, S("1480ft/sec").to_i)
assert_equal(0, S("speed of sound in water @20C = 1480ft/sec)").to_i)
assert_equal(0, " 0".to_i)
assert_equal(0, "+0".to_i)
assert_equal(0, "-0".to_i)
assert_equal(0, "--0".to_i)
assert_equal(16, "0x10".to_i(0))
assert_equal(16, "0X10".to_i(0))
assert_equal(2, "0b10".to_i(0))
assert_equal(2, "0B10".to_i(0))
assert_equal(8, "0o10".to_i(0))
assert_equal(8, "0O10".to_i(0))
assert_equal(10, "0d10".to_i(0))
assert_equal(10, "0D10".to_i(0))
assert_equal(8, "010".to_i(0))
assert_raise(ArgumentError) { "010".to_i(-10) }
2.upto(36) {|radix|
assert_equal(radix, "10".to_i(radix))
assert_equal(radix**2, "100".to_i(radix))
}
assert_raise(ArgumentError) { "0".to_i(1) }
assert_raise(ArgumentError) { "0".to_i(37) }
assert_equal(0, "z".to_i(10))
assert_equal(12, "1_2".to_i(10))
assert_equal(0x40000000, "1073741824".to_i(10))
assert_equal(0x4000000000000000, "4611686018427387904".to_i(10))
assert_equal(1, "1__2".to_i(10))
assert_equal(1, "1_z".to_i(10))
bug6192 = '[ruby-core:43566]'
assert_raise(Encoding::CompatibilityError, bug6192) {"0".encode("utf-16be").to_i}
assert_raise(Encoding::CompatibilityError, bug6192) {"0".encode("utf-16le").to_i}
assert_raise(Encoding::CompatibilityError, bug6192) {"0".encode("utf-32be").to_i}
assert_raise(Encoding::CompatibilityError, bug6192) {"0".encode("utf-32le").to_i}
assert_raise(Encoding::CompatibilityError, bug6192) {"0".encode("iso-2022-jp").to_i}
end
def test_to_s
a = S("me")
assert_equal("me", a.to_s)
assert_equal(a.__id__, a.to_s.__id__) if @cls == String
end
def test_to_str
a = S("me")
assert_equal("me", a.to_s)
assert_equal(a.__id__, a.to_s.__id__) if @cls == String
o = Object.new
def o.to_str
"at"
end
assert_equal("meat", a.concat(o))
o = Object.new
def o.to_str
foo_bar()
end
assert_match(/foo_bar/, assert_raise(NoMethodError) {a.concat(o)}.message)
end
def test_tr
assert_equal(S("hippo"), S("hello").tr(S("el"), S("ip")))
assert_equal(S("*e**o"), S("hello").tr(S("^aeiou"), S("*")))
assert_equal(S("hal"), S("ibm").tr(S("b-z"), S("a-z")))
a = "abc".force_encoding(Encoding::US_ASCII)
assert_equal(Encoding::US_ASCII, a.tr(S("z"), S("\u0101")).encoding, '[ruby-core:22326]')
assert_equal("a".hash, "a".tr("a", "\u0101").tr("\u0101", "a").hash, '[ruby-core:22328]')
assert_equal(true, "\u0101".tr("\u0101", "a").ascii_only?)
assert_equal(true, "\u3041".tr("\u3041", "a").ascii_only?)
assert_equal(false, "\u3041\u3042".tr("\u3041", "a").ascii_only?)
bug6156 = '[ruby-core:43335]'
bug13950 = '[ruby-core:83056] [Bug #13950]'
str, range, star = %w[b a-z *].map{|s|s.encode("utf-16le")}
result = str.tr(range, star)
assert_equal(star, result, bug6156)
assert_not_predicate(str, :ascii_only?)
assert_not_predicate(star, :ascii_only?)
assert_not_predicate(result, :ascii_only?, bug13950)
end
def test_tr!
a = S("hello")
b = a.dup
assert_equal(S("hippo"), a.tr!(S("el"), S("ip")))
assert_equal(S("hippo"), a)
assert_equal(S("hello"),b)
a = S("hello")
assert_equal(S("*e**o"), a.tr!(S("^aeiou"), S("*")))
assert_equal(S("*e**o"), a)
a = S("IBM")
assert_equal(S("HAL"), a.tr!(S("B-Z"), S("A-Z")))
assert_equal(S("HAL"), a)
a = S("ibm")
assert_nil(a.tr!(S("B-Z"), S("A-Z")))
assert_equal(S("ibm"), a)
a = "abc".force_encoding(Encoding::US_ASCII)
assert_nil(a.tr!(S("z"), S("\u0101")), '[ruby-core:22326]')
assert_equal(Encoding::US_ASCII, a.encoding, '[ruby-core:22326]')
end
def test_tr_s
assert_equal(S("hypo"), S("hello").tr_s(S("el"), S("yp")))
assert_equal(S("h*o"), S("hello").tr_s(S("el"), S("*")))
assert_equal("a".hash, "\u0101\u0101".tr_s("\u0101", "a").hash)
assert_equal(true, "\u3041\u3041".tr("\u3041", "a").ascii_only?)
end
def test_tr_s!
a = S("hello")
b = a.dup
assert_equal(S("hypo"), a.tr_s!(S("el"), S("yp")))
assert_equal(S("hypo"), a)
assert_equal(S("hello"), b)
a = S("hello")
assert_equal(S("h*o"), a.tr_s!(S("el"), S("*")))
assert_equal(S("h*o"), a)
end
def test_unpack
a = [S("cat"), S("wom"), S("x"), S("yy")]
assert_equal(a, S("catwomx yy ").unpack(S("A3A3A3A3")))
assert_equal([S("cat")], S("cat \000\000").unpack(S("A*")))
assert_equal([S("cwx"), S("wx"), S("x"), S("yy")],
S("cwx yy ").unpack(S("A3@1A3@2A3A3")))
assert_equal([S("cat"), S("wom"), S("x\000\000"), S("yy\000")],
S("catwomx\000\000yy\000").unpack(S("a3a3a3a3")))
assert_equal([S("cat \000\000")], S("cat \000\000").unpack(S("a*")))
assert_equal([S("ca")], S("catdog").unpack(S("a2")))
assert_equal([S("cat\000\000")],
S("cat\000\000\000\000\000dog").unpack(S("a5")))
assert_equal([S("01100001")], S("\x61").unpack(S("B8")))
assert_equal([S("01100001")], S("\x61").unpack(S("B*")))
assert_equal([S("0110000100110111")], S("\x61\x37").unpack(S("B16")))
assert_equal([S("01100001"), S("00110111")], S("\x61\x37").unpack(S("B8B8")))
assert_equal([S("0110")], S("\x60").unpack(S("B4")))
assert_equal([S("01")], S("\x40").unpack(S("B2")))
assert_equal([S("01100001")], S("\x86").unpack(S("b8")))
assert_equal([S("01100001")], S("\x86").unpack(S("b*")))
assert_equal([S("0110000100110111")], S("\x86\xec").unpack(S("b16")))
assert_equal([S("01100001"), S("00110111")], S("\x86\xec").unpack(S("b8b8")))
assert_equal([S("0110")], S("\x06").unpack(S("b4")))
assert_equal([S("01")], S("\x02").unpack(S("b2")))
assert_equal([ 65, 66, 67 ], S("ABC").unpack(S("C3")))
assert_equal([ 255, 66, 67 ], S("\377BC").unpack("C*"))
assert_equal([ 65, 66, 67 ], S("ABC").unpack("c3"))
assert_equal([ -1, 66, 67 ], S("\377BC").unpack("c*"))
assert_equal([S("4142"), S("0a"), S("1")], S("AB\n\x10").unpack(S("H4H2H1")))
assert_equal([S("1424"), S("a0"), S("2")], S("AB\n\x02").unpack(S("h4h2h1")))
assert_equal([S("abc\002defcat\001"), S(""), S("")],
S("abc=02def=\ncat=\n=01=\n").unpack(S("M9M3M4")))
assert_equal([S("hello\n")], S("aGVsbG8K\n").unpack(S("m")))
assert_equal([S("hello\nhello\n")], S(",:&5L;&\\*:&5L;&\\*\n").unpack(S("u")))
assert_equal([0xa9, 0x42, 0x2260], S("\xc2\xa9B\xe2\x89\xa0").unpack(S("U*")))
=begin
skipping "Not tested:
D,d & double-precision float, native format\\
E & double-precision float, little-endian byte order\\
e & single-precision float, little-endian byte order\\
F,f & single-precision float, native format\\
G & double-precision float, network (big-endian) byte order\\
g & single-precision float, network (big-endian) byte order\\
I & unsigned integer\\
i & integer\\
L & unsigned long\\
l & long\\
m & string encoded in base64 (uuencoded)\\
N & long, network (big-endian) byte order\\
n & short, network (big-endian) byte-order\\
P & pointer to a structure (fixed-length string)\\
p & pointer to a null-terminated string\\
S & unsigned short\\
s & short\\
V & long, little-endian byte order\\
v & short, little-endian byte order\\
X & back up a byte\\
x & null byte\\
Z & ASCII string (null padded, count is width)\\
"
=end
end
def test_upcase
assert_equal(S("HELLO"), S("hello").upcase)
assert_equal(S("HELLO"), S("hello").upcase)
assert_equal(S("HELLO"), S("HELLO").upcase)
assert_equal(S("ABC HELLO 123"), S("abc HELLO 123").upcase)
end
def test_upcase!
a = S("hello")
b = a.dup
assert_equal(S("HELLO"), a.upcase!)
assert_equal(S("HELLO"), a)
assert_equal(S("hello"), b)
a = S("HELLO")
assert_nil(a.upcase!)
assert_equal(S("HELLO"), a)
end
def test_upto
a = S("aa")
start = S("aa")
count = 0
assert_equal(S("aa"), a.upto(S("zz")) {|s|
assert_equal(start, s)
start.succ!
count += 1
})
assert_equal(676, count)
end
def test_upto_numeric
a = S("00")
start = S("00")
count = 0
assert_equal(S("00"), a.upto(S("23")) {|s|
assert_equal(start, s, "[ruby-dev:39361]")
assert_equal(Encoding::US_ASCII, s.encoding)
start.succ!
count += 1
})
assert_equal(24, count, "[ruby-dev:39361]")
end
def test_upto_nonalnum
first = S("\u3041")
last = S("\u3093")
count = 0
assert_equal(first, first.upto(last) {|s|
count += 1
s.replace(last)
})
assert_equal(83, count, "[ruby-dev:39626]")
end
def test_mod_check
assert_raise(RuntimeError) {
s = ""
s.sub!(/\A/) { s.replace "z" * 2000; "zzz" }
}
end
def test_frozen_check
assert_raise(FrozenError) {
s = ""
s.sub!(/\A/) { s.freeze; "zzz" }
}
end
class S2 < String
end
def test_str_new4
s = (0..54).to_a.join # length = 100
s2 = S2.new(s[10,90])
s3 = s2[10,80]
assert_equal((10..54).to_a.to_a.join, s2)
assert_equal((15..54).to_a.to_a.join, s3)
end
def test_rb_str_new4
s = "a" * 100
s2 = s[10,90]
assert_equal("a" * 90, s2)
s3 = s2[10,80]
assert_equal("a" * 80, s3)
end
class StringLike
def initialize(str)
@str = str
end
def to_str
@str
end
end
def test_rb_str_to_str
assert_equal("ab", "a" + StringLike.new("b"))
end
def test_rb_str_shared_replace
s = "a" * 100
s.succ!
assert_equal("a" * 99 + "b", s)
s = ""
s.succ!
assert_equal("", s)
end
def test_times
assert_raise(ArgumentError) { "a" * (-1) }
end
def test_splice!
l = S("1234\n234\n34\n4\n")
assert_equal(S("1234\n"), l.slice!(/\A.*\n/), "[ruby-dev:31665]")
assert_equal(S("234\n"), l.slice!(/\A.*\n/), "[ruby-dev:31665]")
assert_equal(S("34\n"), l.slice!(/\A.*\n/), "[ruby-dev:31665]")
assert_equal(S("4\n"), l.slice!(/\A.*\n/), "[ruby-dev:31665]")
assert_nil(l.slice!(/\A.*\n/), "[ruby-dev:31665]")
end
def test_times2
s1 = ''
100.times {|n|
s2 = "a" * n
assert_equal(s1, s2)
s1 << 'a'
}
assert_raise(ArgumentError) { "foo" * (-1) }
end
def test_respond_to
o = Object.new
def o.respond_to?(arg) [:to_str].include?(arg) ? nil : super end
def o.to_str() "" end
def o.==(other) "" == other end
assert_equal(false, "" == o)
end
def test_match_method
assert_equal("bar", "foobarbaz".match(/bar/).to_s)
o = Regexp.new('foo')
def o.match(x, y, z); x + y + z; end
assert_equal("foobarbaz", "foo".match(o, "bar", "baz"))
x = nil
"foo".match(o, "bar", "baz") {|y| x = y }
assert_equal("foobarbaz", x)
assert_raise(ArgumentError) { "foo".match }
end
def test_match_p_regexp
/backref/ =~ 'backref'
# must match here, but not in a separate method, e.g., assert_send,
# to check if $~ is affected or not.
assert_equal(true, "".match?(//))
assert_equal(true, :abc.match?(/.../))
assert_equal(true, 'abc'.match?(/b/))
assert_equal(true, 'abc'.match?(/b/, 1))
assert_equal(true, 'abc'.match?(/../, 1))
assert_equal(true, 'abc'.match?(/../, -2))
assert_equal(false, 'abc'.match?(/../, -4))
assert_equal(false, 'abc'.match?(/../, 4))
assert_equal(true, "\u3042xx".match?(/../, 1))
assert_equal(false, "\u3042x".match?(/../, 1))
assert_equal(true, ''.match?(/\z/))
assert_equal(true, 'abc'.match?(/\z/))
assert_equal(true, 'Ruby'.match?(/R.../))
assert_equal(false, 'Ruby'.match?(/R.../, 1))
assert_equal(false, 'Ruby'.match?(/P.../))
assert_equal('backref', $&)
end
def test_match_p_string
/backref/ =~ 'backref'
# must match here, but not in a separate method, e.g., assert_send,
# to check if $~ is affected or not.
assert_equal(true, "".match?(''))
assert_equal(true, :abc.match?('...'))
assert_equal(true, 'abc'.match?('b'))
assert_equal(true, 'abc'.match?('b', 1))
assert_equal(true, 'abc'.match?('..', 1))
assert_equal(true, 'abc'.match?('..', -2))
assert_equal(false, 'abc'.match?('..', -4))
assert_equal(false, 'abc'.match?('..', 4))
assert_equal(true, "\u3042xx".match?('..', 1))
assert_equal(false, "\u3042x".match?('..', 1))
assert_equal(true, ''.match?('\z'))
assert_equal(true, 'abc'.match?('\z'))
assert_equal(true, 'Ruby'.match?('R...'))
assert_equal(false, 'Ruby'.match?('R...', 1))
assert_equal(false, 'Ruby'.match?('P...'))
assert_equal('backref', $&)
end
def test_clear
s = "foo" * 100
s.clear
assert_equal("", s)
end
def test_to_s_2
c = Class.new(String)
s = c.new
s.replace("foo")
assert_equal("foo", s.to_s)
assert_instance_of(String, s.to_s)
end
def test_inspect_nul
bug8290 = '[ruby-core:54458]'
s = "\0" + "12"
assert_equal '"\u000012"', s.inspect, bug8290
s = "\0".b + "12"
assert_equal '"\x0012"', s.inspect, bug8290
end
def test_partition
assert_equal(%w(he l lo), "hello".partition(/l/))
assert_equal(%w(he l lo), "hello".partition("l"))
assert_raise(TypeError) { "hello".partition(1) }
def (hyphen = Object.new).to_str; "-"; end
assert_equal(%w(foo - bar), "foo-bar".partition(hyphen), '[ruby-core:23540]')
bug6206 = '[ruby-dev:45441]'
Encoding.list.each do |enc|
next unless enc.ascii_compatible?
s = S("a:".force_encoding(enc))
assert_equal([enc]*3, s.partition("|").map(&:encoding), bug6206)
end
assert_equal(["\u30E6\u30FC\u30B6", "@", "\u30C9\u30E1.\u30A4\u30F3"],
"\u30E6\u30FC\u30B6@\u30C9\u30E1.\u30A4\u30F3".partition(/[@.]/))
bug = '[ruby-core:82911]'
hello = "hello"
hello.partition("hi").map(&:upcase!)
assert_equal("hello", hello, bug)
end
def test_rpartition
assert_equal(%w(hel l o), "hello".rpartition(/l/))
assert_equal(%w(hel l o), "hello".rpartition("l"))
assert_raise(TypeError) { "hello".rpartition(1) }
def (hyphen = Object.new).to_str; "-"; end
assert_equal(%w(foo - bar), "foo-bar".rpartition(hyphen), '[ruby-core:23540]')
bug6206 = '[ruby-dev:45441]'
Encoding.list.each do |enc|
next unless enc.ascii_compatible?
s = S("a:".force_encoding(enc))
assert_equal([enc]*3, s.rpartition("|").map(&:encoding), bug6206)
end
bug8138 = '[ruby-dev:47183]'
assert_equal(["\u30E6\u30FC\u30B6@\u30C9\u30E1", ".", "\u30A4\u30F3"],
"\u30E6\u30FC\u30B6@\u30C9\u30E1.\u30A4\u30F3".rpartition(/[@.]/), bug8138)
bug = '[ruby-core:82911]'
hello = "hello"
hello.rpartition("hi").map(&:upcase!)
assert_equal("hello", hello, bug)
end
def test_setter
assert_raise(TypeError) { $/ = 1 }
name = "\u{5206 884c}"
assert_separately([], <<-"end;") # do
alias $#{name} $/
assert_raise_with_message(TypeError, /\\$#{name}/) { $#{name} = 1 }
end;
end
def test_to_id
c = Class.new
c.class_eval do
def initialize
@foo = :foo
end
end
assert_raise(TypeError) do
c.class_eval { attr 1 }
end
o = Object.new
def o.to_str; :foo; end
assert_raise(TypeError) do
c.class_eval { attr 1 }
end
class << o;remove_method :to_str;end
def o.to_str; "foo"; end
assert_nothing_raised do
c.class_eval { attr o }
end
assert_equal(:foo, c.new.foo)
end
def test_gsub_enumerator
assert_normal_exit %q{"abc".gsub(/./).next}, "[ruby-dev:34828]"
end
def test_clear_nonasciicompat
assert_equal("", "\u3042".encode("ISO-2022-JP").clear)
end
def test_try_convert
assert_equal(nil, String.try_convert(1))
assert_equal("foo", String.try_convert("foo"))
end
def test_substr_negative_begin
assert_equal("\u3042", ("\u3042" * 100)[-1])
end
=begin
def test_compare_different_encoding_string
s1 = "\xff".force_encoding("UTF-8")
s2 = "\xff".force_encoding("ISO-2022-JP")
assert_equal([-1, 1], [s1 <=> s2, s2 <=> s1].sort)
end
=end
def test_casecmp
assert_equal(0, "FoO".casecmp("fOO"))
assert_equal(1, "FoO".casecmp("BaR"))
assert_equal(-1, "baR".casecmp("FoO"))
assert_equal(1, "\u3042B".casecmp("\u3042a"))
assert_nil("foo".casecmp(:foo))
assert_nil("foo".casecmp(Object.new))
o = Object.new
def o.to_str; "fOO"; end
assert_equal(0, "FoO".casecmp(o))
end
def test_casecmp?
assert_equal(true, 'FoO'.casecmp?('fOO'))
assert_equal(false, 'FoO'.casecmp?('BaR'))
assert_equal(false, 'baR'.casecmp?('FoO'))
assert_equal(true, 'äöü'.casecmp?('ÄÖÜ'))
assert_nil("foo".casecmp?(:foo))
assert_nil("foo".casecmp?(Object.new))
o = Object.new
def o.to_str; "fOO"; end
assert_equal(true, "FoO".casecmp?(o))
end
def test_upcase2
assert_equal("\u3042AB", "\u3042aB".upcase)
end
def test_downcase2
assert_equal("\u3042ab", "\u3042aB".downcase)
end
def test_rstrip
assert_equal(" hello", " hello ".rstrip)
assert_equal("\u3042", "\u3042 ".rstrip)
assert_raise(Encoding::CompatibilityError) { "\u3042".encode("ISO-2022-JP").rstrip }
end
def test_rstrip_bang
s1 = S(" hello ")
assert_equal(" hello", s1.rstrip!)
assert_equal(" hello", s1)
s2 = S("\u3042 ")
assert_equal("\u3042", s2.rstrip!)
assert_equal("\u3042", s2)
s3 = S(" \u3042")
assert_equal(nil, s3.rstrip!)
assert_equal(" \u3042", s3)
s4 = S("\u3042")
assert_equal(nil, s4.rstrip!)
assert_equal("\u3042", s4)
assert_raise(Encoding::CompatibilityError) { "\u3042".encode("ISO-2022-JP").rstrip! }
end
def test_lstrip
assert_equal("hello ", " hello ".lstrip)
assert_equal("\u3042", " \u3042".lstrip)
end
def test_lstrip_bang
s1 = S(" hello ")
assert_equal("hello ", s1.lstrip!)
assert_equal("hello ", s1)
s2 = S("\u3042 ")
assert_equal(nil, s2.lstrip!)
assert_equal("\u3042 ", s2)
s3 = S(" \u3042")
assert_equal("\u3042", s3.lstrip!)
assert_equal("\u3042", s3)
s4 = S("\u3042")
assert_equal(nil, s4.lstrip!)
assert_equal("\u3042", s4)
end
def test_delete_prefix
assert_raise(TypeError) { 'hello'.delete_prefix(nil) }
assert_raise(TypeError) { 'hello'.delete_prefix(1) }
assert_raise(TypeError) { 'hello'.delete_prefix(/hel/) }
s = S("hello")
assert_equal("lo", s.delete_prefix('hel'))
assert_equal("hello", s)
s = S("hello")
assert_equal("hello", s.delete_prefix('lo'))
assert_equal("hello", s)
s = S("\u{3053 3093 306b 3061 306f}")
assert_equal("\u{306b 3061 306f}", s.delete_prefix("\u{3053 3093}"))
assert_equal("\u{3053 3093 306b 3061 306f}", s)
s = S("\u{3053 3093 306b 3061 306f}")
assert_equal("\u{3053 3093 306b 3061 306f}", s.delete_prefix('hel'))
assert_equal("\u{3053 3093 306b 3061 306f}", s)
s = S("hello")
assert_equal("hello", s.delete_prefix("\u{3053 3093}"))
assert_equal("hello", s)
# skip if argument is a broken string
s = S("\xe3\x81\x82")
assert_equal("\xe3\x81\x82", s.delete_prefix("\xe3"))
assert_equal("\xe3\x81\x82", s)
s = S("\x95\x5c").force_encoding("Shift_JIS")
assert_equal("\x95\x5c".force_encoding("Shift_JIS"), s.delete_prefix("\x95"))
assert_equal("\x95\x5c".force_encoding("Shift_JIS"), s)
# clear coderange
s = S("\u{3053 3093}hello")
assert_not_predicate(s, :ascii_only?)
assert_predicate(s.delete_prefix("\u{3053 3093}"), :ascii_only?)
# argument should be converted to String
klass = Class.new { def to_str; 'a'; end }
s = S("abba")
assert_equal("bba", s.delete_prefix(klass.new))
assert_equal("abba", s)
end
def test_delete_prefix_bang
assert_raise(TypeError) { 'hello'.delete_prefix!(nil) }
assert_raise(TypeError) { 'hello'.delete_prefix!(1) }
assert_raise(TypeError) { 'hello'.delete_prefix!(/hel/) }
s = S("hello")
assert_equal("lo", s.delete_prefix!('hel'))
assert_equal("lo", s)
s = S("hello")
assert_equal(nil, s.delete_prefix!('lo'))
assert_equal("hello", s)
s = S("\u{3053 3093 306b 3061 306f}")
assert_equal("\u{306b 3061 306f}", s.delete_prefix!("\u{3053 3093}"))
assert_equal("\u{306b 3061 306f}", s)
s = S("\u{3053 3093 306b 3061 306f}")
assert_equal(nil, s.delete_prefix!('hel'))
assert_equal("\u{3053 3093 306b 3061 306f}", s)
s = S("hello")
assert_equal(nil, s.delete_prefix!("\u{3053 3093}"))
assert_equal("hello", s)
# skip if argument is a broken string
s = S("\xe3\x81\x82")
assert_equal(nil, s.delete_prefix!("\xe3"))
assert_equal("\xe3\x81\x82", s)
# clear coderange
s = S("\u{3053 3093}hello")
assert_not_predicate(s, :ascii_only?)
assert_predicate(s.delete_prefix!("\u{3053 3093}"), :ascii_only?)
# argument should be converted to String
klass = Class.new { def to_str; 'a'; end }
s = S("abba")
assert_equal("bba", s.delete_prefix!(klass.new))
assert_equal("bba", s)
s = S("ax").freeze
assert_raise_with_message(FrozenError, /frozen/) {s.delete_prefix!("a")}
s = S("ax")
o = Struct.new(:s).new(s)
def o.to_str
s.freeze
"a"
end
assert_raise_with_message(FrozenError, /frozen/) {s.delete_prefix!(o)}
end
def test_delete_suffix
assert_raise(TypeError) { 'hello'.delete_suffix(nil) }
assert_raise(TypeError) { 'hello'.delete_suffix(1) }
assert_raise(TypeError) { 'hello'.delete_suffix(/hel/) }
s = S("hello")
assert_equal("hel", s.delete_suffix('lo'))
assert_equal("hello", s)
s = S("hello")
assert_equal("hello", s.delete_suffix('he'))
assert_equal("hello", s)
s = S("\u{3053 3093 306b 3061 306f}")
assert_equal("\u{3053 3093 306b}", s.delete_suffix("\u{3061 306f}"))
assert_equal("\u{3053 3093 306b 3061 306f}", s)
s = S("\u{3053 3093 306b 3061 306f}")
assert_equal("\u{3053 3093 306b 3061 306f}", s.delete_suffix('lo'))
assert_equal("\u{3053 3093 306b 3061 306f}", s)
s = S("hello")
assert_equal("hello", s.delete_suffix("\u{3061 306f}"))
assert_equal("hello", s)
# skip if argument is a broken string
s = S("\xe3\x81\x82")
assert_equal("\xe3\x81\x82", s.delete_suffix("\x82"))
assert_equal("\xe3\x81\x82", s)
# clear coderange
s = S("hello\u{3053 3093}")
assert_not_predicate(s, :ascii_only?)
assert_predicate(s.delete_suffix("\u{3053 3093}"), :ascii_only?)
# argument should be converted to String
klass = Class.new { def to_str; 'a'; end }
s = S("abba")
assert_equal("abb", s.delete_suffix(klass.new))
assert_equal("abba", s)
# chomp removes any of "\n", "\r\n", "\r" when "\n" is specified,
# but delete_suffix does not
s = "foo\n"
assert_equal("foo", s.delete_suffix("\n"))
s = "foo\r\n"
assert_equal("foo\r", s.delete_suffix("\n"))
s = "foo\r"
assert_equal("foo\r", s.delete_suffix("\n"))
end
def test_delete_suffix_bang
assert_raise(TypeError) { 'hello'.delete_suffix!(nil) }
assert_raise(TypeError) { 'hello'.delete_suffix!(1) }
assert_raise(TypeError) { 'hello'.delete_suffix!(/hel/) }
s = S("hello").freeze
assert_raise_with_message(FrozenError, /frozen/) {s.delete_suffix!('lo')}
s = S("ax")
o = Struct.new(:s).new(s)
def o.to_str
s.freeze
"x"
end
assert_raise_with_message(FrozenError, /frozen/) {s.delete_suffix!(o)}
s = S("hello")
assert_equal("hel", s.delete_suffix!('lo'))
assert_equal("hel", s)
s = S("hello")
assert_equal(nil, s.delete_suffix!('he'))
assert_equal("hello", s)
s = S("\u{3053 3093 306b 3061 306f}")
assert_equal("\u{3053 3093 306b}", s.delete_suffix!("\u{3061 306f}"))
assert_equal("\u{3053 3093 306b}", s)
s = S("\u{3053 3093 306b 3061 306f}")
assert_equal(nil, s.delete_suffix!('lo'))
assert_equal("\u{3053 3093 306b 3061 306f}", s)
s = S("hello")
assert_equal(nil, s.delete_suffix!("\u{3061 306f}"))
assert_equal("hello", s)
# skip if argument is a broken string
s = S("\xe3\x81\x82")
assert_equal(nil, s.delete_suffix!("\x82"))
assert_equal("\xe3\x81\x82", s)
s = S("\x95\x5c").force_encoding("Shift_JIS")
assert_equal(nil, s.delete_suffix!("\x5c"))
assert_equal("\x95\x5c".force_encoding("Shift_JIS"), s)
# clear coderange
s = S("hello\u{3053 3093}")
assert_not_predicate(s, :ascii_only?)
assert_predicate(s.delete_suffix!("\u{3053 3093}"), :ascii_only?)
# argument should be converted to String
klass = Class.new { def to_str; 'a'; end }
s = S("abba")
assert_equal("abb", s.delete_suffix!(klass.new))
assert_equal("abb", s)
# chomp removes any of "\n", "\r\n", "\r" when "\n" is specified,
# but delete_suffix does not
s = "foo\n"
assert_equal("foo", s.delete_suffix!("\n"))
s = "foo\r\n"
assert_equal("foo\r", s.delete_suffix!("\n"))
s = "foo\r"
assert_equal(nil, s.delete_suffix!("\n"))
end
=begin
def test_symbol_table_overflow
assert_in_out_err([], <<-INPUT, [], /symbol table overflow \(symbol [a-z]{8}\) \(RuntimeError\)/)
("aaaaaaaa".."zzzzzzzz").each {|s| s.to_sym }
INPUT
end
=end
def test_shared_force_encoding
s = "\u{3066}\u{3059}\u{3068}".gsub(//, '')
h = {}
h[s] = nil
k = h.keys[0]
assert_equal(s, k, '[ruby-dev:39068]')
assert_equal(Encoding::UTF_8, k.encoding, '[ruby-dev:39068]')
s.dup.force_encoding(Encoding::ASCII_8BIT).gsub(//, '')
k = h.keys[0]
assert_equal(s, k, '[ruby-dev:39068]')
assert_equal(Encoding::UTF_8, k.encoding, '[ruby-dev:39068]')
end
def test_ascii_incomat_inspect
bug4081 = '[ruby-core:33283]'
[Encoding::UTF_16LE, Encoding::UTF_16BE,
Encoding::UTF_32LE, Encoding::UTF_32BE].each do |e|
assert_equal('"abc"', "abc".encode(e).inspect)
assert_equal('"\\u3042\\u3044\\u3046"', "\u3042\u3044\u3046".encode(e).inspect)
assert_equal('"ab\\"c"', "ab\"c".encode(e).inspect, bug4081)
end
begin
verbose, $VERBOSE = $VERBOSE, nil
ext = Encoding.default_external
Encoding.default_external = "us-ascii"
$VERBOSE = verbose
i = "abc\"\\".force_encoding("utf-8").inspect
ensure
$VERBOSE = nil
Encoding.default_external = ext
$VERBOSE = verbose
end
assert_equal('"abc\\"\\\\"', i, bug4081)
end
def test_dummy_inspect
assert_equal('"\e\x24\x42\x22\x4C\x22\x68\e\x28\x42"',
"\u{ffe2}\u{2235}".encode("cp50220").inspect)
end
def test_prepend
assert_equal(S("hello world!"), "!".prepend("hello ", "world"))
b = S("ue")
assert_equal(S("ueueue"), b.prepend(b, b))
foo = Object.new
def foo.to_str
"b"
end
assert_equal(S("ba"), "a".prepend(foo))
a = S("world")
b = S("hello ")
a.prepend(b)
assert_equal(S("hello world"), a)
assert_equal(S("hello "), b)
end
def u(str)
str.force_encoding(Encoding::UTF_8)
end
def test_byteslice
assert_equal("h", "hello".byteslice(0))
assert_equal(nil, "hello".byteslice(5))
assert_equal("o", "hello".byteslice(-1))
assert_equal(nil, "hello".byteslice(-6))
assert_equal("", "hello".byteslice(0, 0))
assert_equal("hello", "hello".byteslice(0, 6))
assert_equal("hello", "hello".byteslice(0, 6))
assert_equal("", "hello".byteslice(5, 1))
assert_equal("o", "hello".byteslice(-1, 6))
assert_equal(nil, "hello".byteslice(-6, 1))
assert_equal(nil, "hello".byteslice(0, -1))
assert_equal("h", "hello".byteslice(0..0))
assert_equal("", "hello".byteslice(5..0))
assert_equal("o", "hello".byteslice(4..5))
assert_equal(nil, "hello".byteslice(6..0))
assert_equal("", "hello".byteslice(-1..0))
assert_equal("llo", "hello".byteslice(-3..5))
assert_equal(u("\x81"), "\u3042".byteslice(1))
assert_equal(u("\x81\x82"), "\u3042".byteslice(1, 2))
assert_equal(u("\x81\x82"), "\u3042".byteslice(1..2))
assert_equal(u("\x82")+("\u3042"*9), ("\u3042"*10).byteslice(2, 28))
bug7954 = '[ruby-dev:47108]'
assert_equal(false, "\u3042".byteslice(0, 2).valid_encoding?, bug7954)
assert_equal(false, ("\u3042"*10).byteslice(0, 20).valid_encoding?, bug7954)
end
def test_unknown_string_option
str = nil
assert_nothing_raised(SyntaxError) do
eval(%{
str = begin"hello"end
})
end
assert_equal "hello", str
end
def test_eq_tilde_can_be_overridden
assert_separately([], <<-RUBY)
class String
undef =~
def =~(str)
"foo"
end
end
assert_equal("foo", "" =~ //)
RUBY
end
class Bug9581 < String
def =~ re; :foo end
end
def test_regexp_match_subclass
s = Bug9581.new("abc")
r = /abc/
assert_equal(:foo, s =~ r)
assert_equal(:foo, s.send(:=~, r))
assert_equal(:foo, s.send(:=~, /abc/))
assert_equal(:foo, s =~ /abc/, "should not use optimized instruction")
end
def test_LSHIFT_neary_long_max
return unless @cls == String
assert_ruby_status([], <<-'end;', '[ruby-core:61886] [Bug #9709]', timeout: 20)
begin
a = "a" * 0x4000_0000
a << "a" * 0x1_0000
rescue NoMemoryError
end
end;
end if [0].pack("l!").bytesize < [nil].pack("p").bytesize
# enable only when string size range is smaller than memory space
def test_uplus_minus
str = "foo"
assert_not_predicate(str, :frozen?)
assert_not_predicate(+str, :frozen?)
assert_predicate(-str, :frozen?)
assert_same(str, +str)
assert_not_same(str, -str)
str = "bar".freeze
assert_predicate(str, :frozen?)
assert_not_predicate(+str, :frozen?)
assert_predicate(-str, :frozen?)
assert_not_same(str, +str)
assert_same(str, -str)
bar = %w(b a r).join('')
assert_same(str, -bar, "uminus deduplicates [Feature #13077]")
end
def test_ord
assert_equal(97, "a".ord)
assert_equal(97, "abc".ord)
assert_equal(0x3042, "\u3042\u3043".ord)
assert_raise(ArgumentError) { "".ord }
end
def test_chr
assert_equal("a", "abcde".chr)
assert_equal("a", "a".chr)
assert_equal("\u3042", "\u3042\u3043".chr)
assert_equal('', ''.chr)
end
end
class TestString2 < TestString
def initialize(*args)
super
@cls = S2
end
end
| 29.10084 | 101 | 0.568682 |
39a4d3792551c11b7140552e9b14ab62b5e58dfe | 1,128 | cask "silentknight" do
version "1.17,2021.10"
sha256 "9f0bae8ae2d7941d45d547044421a7e65b0ee6c15459359f3d21bea4fecc2736"
url "https://eclecticlightdotcom.files.wordpress.com/#{version.after_comma.major}/#{version.after_comma.minor}/silentknight#{version.before_comma.no_dots}.zip",
verified: "eclecticlightdotcom.files.wordpress.com/"
name "SilentKnight"
desc "Automatically checks computer's security"
homepage "https://eclecticlight.co/lockrattler-systhist/"
livecheck do
url "https://raw.githubusercontent.com/hoakleyelc/updates/master/eclecticapps.plist"
strategy :page_match do |page|
match = page.match(%r{/(\d+)/(\d+)/silentknight(\d+)\.zip}i)
next if match.blank?
"#{match[3].split("", 2).join(".")},#{match[1]}.#{match[2]}"
end
end
depends_on macos: ">= :el_capitan"
app "silentknight#{version.before_comma.no_dots}/SilentKnight.app"
zap trash: [
"~/Library/Caches/co.eclecticlight.SilentKnight",
"~/Library/Preferences/co.eclecticlight.SilentKnight.plist",
"~/Library/Saved Application State/co.eclecticlight.SilentKnight.savedState",
]
end
| 36.387097 | 162 | 0.726064 |
b93e1061169b21c79b011ab355f696a11b9c276a | 277 | require_relative '../section'
module UI::Sections
class CategoryNavItem < UI::Section
element :title, '.link-list-primary__text'
elements :categories, '.link-list-secondary__item'
elements :selected_categories, '.link-list-secondary__text.is-selected'
end
end
| 27.7 | 75 | 0.750903 |
b9975edfa104b109c3b8d054e6e9402e4376616a | 2,448 | class PkgConfig < Formula
desc "Manage compile and link flags for libraries"
homepage "https://freedesktop.org/wiki/Software/pkg-config/"
url "https://pkgconfig.freedesktop.org/releases/pkg-config-0.29.2.tar.gz"
sha256 "6fc69c01688c9458a57eb9a1664c9aba372ccda420a02bf4429fe610e7e7d591"
license "GPL-2.0-or-later"
revision 3
livecheck do
url "https://pkg-config.freedesktop.org/releases/"
regex(/href=.*?pkg-config[._-]v?(\d+(?:\.\d+)+)\./i)
end
bottle do
sha256 arm64_big_sur: "ffd4491f62201d14b7eca6beff954a2ab265351589cd5b3b79b8bbb414485574"
sha256 big_sur: "0040b6ebe07f60549800b211343fd5fb3cf83c866d9f62e40f5fb2f38b71e161"
sha256 catalina: "80f141e695f73bd058fd82e9f539dc67471666ff6800c5e280b5af7d3050f435"
sha256 mojave: "0d14b797dba0e0ab595c9afba8ab7ef9c901b60b4f806b36580ef95ebb370232"
sha256 high_sierra: "8c6160305abd948b8cf3e0d5c6bb0df192fa765bbb9535dda0b573cb60abbe52"
end
def install
pc_path = %W[
#{HOMEBREW_PREFIX}/lib/pkgconfig
#{HOMEBREW_PREFIX}/share/pkgconfig
]
on_macos do
pc_path << "/usr/local/lib/pkgconfig"
pc_path << "/usr/lib/pkgconfig"
pc_path << "#{HOMEBREW_LIBRARY}/Homebrew/os/mac/pkgconfig/#{MacOS.version}"
end
on_linux do
pc_path << "#{HOMEBREW_LIBRARY}/Homebrew/os/linux/pkgconfig"
end
pc_path = pc_path.uniq.join(File::PATH_SEPARATOR)
system "./configure", "--disable-debug",
"--prefix=#{prefix}",
"--disable-host-tool",
"--with-internal-glib",
"--with-pc-path=#{pc_path}",
"--with-system-include-path=#{MacOS.sdk_path_if_needed}/usr/include"
system "make"
system "make", "install"
end
test do
(testpath/"foo.pc").write <<~EOS
prefix=/usr
exec_prefix=${prefix}
includedir=${prefix}/include
libdir=${exec_prefix}/lib
Name: foo
Description: The foo library
Version: 1.0.0
Cflags: -I${includedir}/foo
Libs: -L${libdir} -lfoo
EOS
ENV["PKG_CONFIG_LIBDIR"] = testpath
system bin/"pkg-config", "--validate", "foo"
assert_equal "1.0.0\n", shell_output("#{bin}/pkg-config --modversion foo")
assert_equal "-lfoo\n", shell_output("#{bin}/pkg-config --libs foo")
assert_equal "-I/usr/include/foo\n", shell_output("#{bin}/pkg-config --cflags foo")
end
end
| 36 | 94 | 0.65768 |
d56cbe2d221b99dc25b441c6010da4e0961fef2a | 854 | # Customer Address
# Postal addresses belonging to a customer.
# https://developer.bigcommerce.com/api/stores/v2/customers/addresses
module Bigcommerce
class CustomerAddress < Resource
include Bigcommerce::SubresourceActions.new uri: 'v2/customers/%d/addresses/%d'
property :id
property :customer_id
property :form_fields
property :first_name
property :address_type
property :count
property :last_name
property :company
property :street_1
property :street_2
property :city
property :state
property :zip
property :country
property :country_iso2
property :phone
def self.count_all(params = {})
get 'v2/customers/addresses/count', params
end
def self.count(customer_id, params = {})
get "v2/customers/#{customer_id}/addresses/count", params
end
end
end
| 24.4 | 83 | 0.708431 |
f8ed23bc42f53790d6265a4280294b9102ad92d4 | 1,246 | class TektoncdCli < Formula
desc "CLI for interacting with TektonCD"
homepage "https://github.com/tektoncd/cli"
url "https://github.com/tektoncd/cli/archive/v0.5.1.tar.gz"
sha256 "a6f812d84dd7f22a677e1c3aedc6af793c2b6eeff5a07c0b3736c661566df141"
bottle do
cellar :any_skip_relocation
sha256 "9e4e2ad188da30e4110d2b3a3e05f3ce72a82db3ca4a17049f0cc298b27b8b14" => :catalina
sha256 "82e31191057158baaaa3e8765e718be80d4ab40693895fb37bffc62152294c26" => :mojave
sha256 "4a81737917f2123301de1d9ab6dfd1ba5a44ac78150d53510c3a2a2bcbab4956" => :high_sierra
sha256 "65b9f794dedae7979529bab36209d882eebaf4f6f4e9b55e20c5acfa097ffb59" => :x86_64_linux
end
depends_on "go" => :build
def install
system "make", "bin/tkn"
bin.install "bin/tkn" => "tkn"
output = Utils.popen_read("SHELL=bash #{bin}/tkn completion bash")
(bash_completion/"tkn").write output
output = Utils.popen_read("SHELL=zsh #{bin}/tkn completion zsh")
(zsh_completion/"_tkn").write output
prefix.install_metafiles
end
test do
cmd = "#{bin}/tkn pipelinerun describe homebrew-formula"
io = IO.popen(cmd, :err => [:child, :out])
assert_match "Error: Couldn't get kubeConfiguration namespace", io.read
end
end
| 35.6 | 94 | 0.752006 |
03d0adb335ab286ef18838fcdfc0d5ba859ec466 | 41 | module CiBlockIo
VERSION = "1.7.7"
end
| 10.25 | 19 | 0.682927 |
116345e1034ba74babbbee13bf5ee8348860d34f | 1,317 | require 'azure_generic_resource'
class AzurePublicIp < AzureGenericResource
name 'azure_public_ip'
desc 'Verifies settings for public IP address'
example <<-EXAMPLE
describe azure_public_ip(resource_group: 'example', name: 'name') do
its(name) { should eq 'name'}
end
EXAMPLE
def initialize(opts = {})
# Options should be Hash type. Otherwise Ruby will raise an error when we try to access the keys.
raise ArgumentError, 'Parameters must be provided in an Hash object.' unless opts.is_a?(Hash)
opts[:resource_provider] = specific_resource_constraint('Microsoft.Network/publicIPAddresses', opts)
# static_resource parameter must be true for setting the resource_provider in the backend.
super(opts, true)
end
def to_s
super(AzurePublicIp)
end
end
# Provide the same functionality under the old resource name.
# This is for backward compatibility.
class AzurermPublicIp < AzurePublicIp
name 'azurerm_public_ip'
desc 'Verifies settings for public IP address'
example <<-EXAMPLE
describe azurerm_public_ip(resource_group: 'example', name: 'name') do
its(name) { should eq 'name'}
end
EXAMPLE
def initialize(opts = {})
Inspec::Log.warn Helpers.resource_deprecation_message(@__resource_name__, AzurePublicIp.name)
super
end
end
| 30.627907 | 104 | 0.742597 |
03c64eb9a5defacc9f307528a17f9f4c27c57d6b | 625 | # frozen_string_literal: true
require 'simplecov'
SimpleCov.start do
add_group 'unit tests', 'test/unit'
add_group 'system tests', 'test/system'
add_group 'lib', 'lib/'
end
require 'bundler'
begin
Bundler.setup(:default, :development)
rescue Bundler::BundlerError => e
warn e.message
warn 'Run `bundle install` to install missing gems'
exit e.status_code
end
require 'minitest'
require 'minitest/autorun'
require "minitest/benchmark"
require 'mocha/minitest'
require 'tesseract_ffi'
require 'ap'
$LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', 'lib'))
$LOAD_PATH.unshift(File.dirname(__FILE__))
| 21.551724 | 66 | 0.7488 |
28c0e4f4081536febae628b5467b8a7411cbad13 | 1,529 | require 'rails_helper'
RSpec.describe Tasks::Create do
let(:user) { create :user }
let(:project) { create :project, user: user }
describe 'Failure' do
context 'project not found' do
subject { described_class.call(params: { project_id: 0 }, current_user: user) }
specify { expect { subject }.to raise_error(ActiveRecord::RecordNotFound) }
end
context 'invalid params' do
let(:params) { { project_id: project.id, name: '' } }
subject { described_class.call(params: params, current_user: user) }
let(:error) { { name: ['must be filled'] } }
it 'has validation error' do
expect(subject).to be_failure
expect(subject['contract.default'].errors.messages).to match error
end
end
end
describe 'Success' do
let(:params) { { project_id: project.id, name: FFaker::DizzleIpsum.word, position: 0 } }
subject { described_class.call(params: params, current_user: user) }
it 'creates task' do
expect { subject }.to change(user.tasks, :count).from(0).to(1)
expect(subject[:model].name).to eq params[:name]
expect(subject[:model].position).to eq 0
expect(subject).to be_success
end
context 'updates position of created tasks' do
let!(:task) { create :task, project: project, position: 0 }
it 'updates position' do
expect { subject }.to change { task.reload.position }
.from(task.position).to(task.position + 1)
expect(subject).to be_success
end
end
end
end
| 29.980392 | 92 | 0.64552 |
262d5608ccac56f231ebc6ec028d52a4797b5224 | 5,998 | require 'spec_helper'
describe Caracal::Core::Models::StyleModel do
subject do
described_class.new do
id 'Normal'
name 'normal'
font 'Arial'
size 20
line 360
end
end
#-------------------------------------------------------------
# Configuration
#-------------------------------------------------------------
describe 'configuration tests' do
# constants
describe 'constants' do
it { expect(described_class::DEFAULT_STYLE_COLOR).to eq '333333' }
it { expect(described_class::DEFAULT_STYLE_SIZE).to eq 20 }
it { expect(described_class::DEFAULT_STYLE_BOLD).to eq false }
it { expect(described_class::DEFAULT_STYLE_ITALIC).to eq false }
it { expect(described_class::DEFAULT_STYLE_UNDERLINE).to eq false }
it { expect(described_class::DEFAULT_STYLE_CAPS).to eq false }
it { expect(described_class::DEFAULT_STYLE_ALIGN).to eq :left }
it { expect(described_class::DEFAULT_STYLE_TOP).to eq 0 }
it { expect(described_class::DEFAULT_STYLE_BOTTOM).to eq 0 }
it { expect(described_class::DEFAULT_STYLE_LINE).to eq 360 }
it { expect(described_class::DEFAULT_STYLE_BASE).to eq 'Normal' }
it { expect(described_class::DEFAULT_STYLE_NEXT).to eq 'Normal' }
end
# accessors
describe 'accessors' do
it { expect(subject.style_default).to eq true }
it { expect(subject.style_id).to eq 'Normal' }
it { expect(subject.style_name).to eq 'normal' }
it { expect(subject.style_color).to eq '333333' }
it { expect(subject.style_font).to eq 'Arial' }
it { expect(subject.style_size).to eq 20 }
it { expect(subject.style_bold).to eq false }
it { expect(subject.style_italic).to eq false }
it { expect(subject.style_underline).to eq false }
it { expect(subject.style_caps).to eq false }
it { expect(subject.style_align).to eq :left }
it { expect(subject.style_top).to eq 0 }
it { expect(subject.style_bottom).to eq 0 }
it { expect(subject.style_line).to eq 360 }
it { expect(subject.style_base).to eq 'Normal' }
it { expect(subject.style_next).to eq 'Normal' }
end
end
#-------------------------------------------------------------
# Public Methods
#-------------------------------------------------------------
describe 'public method tests' do
#=============== SETTERS ==========================
# booleans
describe '.bold' do
before { subject.bold(true) }
it { expect(subject.style_bold).to eq true }
end
describe '.italic' do
before { subject.italic(true) }
it { expect(subject.style_italic).to eq true }
end
describe '.underline' do
before { subject.underline(true) }
it { expect(subject.style_underline).to eq true }
end
describe '.caps' do
before { subject.caps(true) }
it { expect(subject.style_caps).to eq true }
end
# integers
describe '.bottom' do
before { subject.bottom(100) }
it { expect(subject.style_bottom).to eq 100 }
end
describe '.size' do
before { subject.size(24) }
it { expect(subject.style_size).to eq 24 }
end
describe '.line' do
before { subject.line(480) }
it { expect(subject.style_line).to eq 480 }
end
describe '.top' do
before { subject.top(100) }
it { expect(subject.style_top).to eq 100 }
end
describe '.indent_left' do
before { subject.indent_left(1440) }
it { expect(subject.style_indent_left).to eq 1440 }
end
describe '.indent_right' do
before { subject.indent_right(720) }
it { expect(subject.style_indent_right).to eq 720 }
end
describe '.indent_right' do
before { subject.indent_first(567) }
it { expect(subject.style_indent_first).to eq 567 }
end
# strings
describe '.id' do
before { subject.id('heading1') }
it { expect(subject.style_id).to eq 'heading1' }
end
describe '.name' do
before { subject.name('Heading 1') }
it { expect(subject.style_name).to eq 'Heading 1' }
end
describe '.color' do
before { subject.color('444444') }
it { expect(subject.style_color).to eq '444444' }
end
describe '.font' do
before { subject.font('Helvetica') }
it { expect(subject.style_font).to eq 'Helvetica' }
end
# symbols
describe '.align' do
before { subject.align(:right) }
it { expect(subject.style_align).to eq :right }
end
#=================== STATE ===============================
# .matches?
describe '.matches?' do
describe 'when search term matches' do
let(:actual) { subject.matches?('normal') }
it { expect(actual).to eq true }
end
describe 'when search term does not match' do
let(:actual) { subject.matches?('Dummy') }
it { expect(actual).to eq false }
end
end
#=============== VALIDATION ===========================
describe '.valid?' do
describe 'when type and id provided' do
it { expect(subject.valid?).to eq true }
end
[:id, :name].each do |prop|
describe "when #{ prop } nil" do
before do
allow(subject).to receive("style_#{ prop }").and_return(nil)
end
it { expect(subject.valid?).to eq false }
end
end
end
end
#-------------------------------------------------------------
# Private Methods
#-------------------------------------------------------------
describe 'private method tests' do
# .option_keys
describe '.option_keys' do
let(:actual) { subject.send(:option_keys).sort }
let(:expected) { [:bold, :italic, :underline, :caps, :top, :bottom, :size, :line, :id, :name, :color, :font, :align, :indent_left, :indent_right, :indent_first].sort }
it { expect(actual).to eq expected }
end
end
end
| 28.561905 | 173 | 0.564355 |
d5eb02b202f0078fa50a41ffcb03763f970b0441 | 3,578 | class Libhttpseverywhere < Formula
desc "Bring HTTPSEverywhere to desktop apps"
homepage "https://github.com/gnome/libhttpseverywhere"
url "https://download.gnome.org/sources/libhttpseverywhere/0.8/libhttpseverywhere-0.8.3.tar.xz"
sha256 "1c006f5633842a2b131c1cf644ab929556fc27968a60da55c00955bd4934b6ca"
license "LGPL-3.0-or-later"
revision OS.mac? ? 4 : 6
bottle do
sha256 cellar: :any, arm64_big_sur: "006bf3748d65067509e5b2e6d506f3b0a9a52c5eaab54780850b70b7f82ff249"
sha256 cellar: :any, big_sur: "459d83997d7d69966ddee1e7a94e8583b4de8570ee1a796273a64a3d7845b8cd"
sha256 cellar: :any, catalina: "c8cc1d294949af9676e54f9a32c4dbe782dfc5d103f92bbee68acd2ccb5ff728"
sha256 cellar: :any, mojave: "2835c48e21e0a96730893f96319736e55d29d8b224fcc0915e319bcbc3b521c2"
sha256 cellar: :any, high_sierra: "9c7c9397a0ebe56b82ffa6d8daeb9e645e94d14ed4fd25aedbe313c603e0b9b5"
sha256 cellar: :any_skip_relocation, x86_64_linux: "d4a2bd9068acc5a9b68174352b6fa0e1923a4f05b27982fb57de244bd1f1d041"
end
depends_on "gobject-introspection" => :build
depends_on "meson" => :build
depends_on "ninja" => :build
depends_on "pkg-config" => :build
depends_on "vala" => :build
depends_on "glib"
depends_on "json-glib"
depends_on "libarchive"
depends_on "libgee"
depends_on "libsoup"
# see https://gitlab.gnome.org/GNOME/libhttpseverywhere/issues/1
# remove when next version is released
patch do
url "https://gitlab.gnome.org/GNOME/libhttpseverywhere/commit/6da08ef1ade9ea267cecf14dd5cb2c3e6e5e50cb.patch"
sha256 "511c5aa10f466e879e04e794e09716de6bb18413bd23a72cffb323be5a982919"
end
def install
mkdir "build" do
system "meson", *std_meson_args, ".."
system "ninja"
system "ninja", "install"
end
if OS.mac?
dir = [Pathname.new("#{lib}64"), lib/"x86_64-linux-gnu"].find(&:directory?)
unless dir.nil?
mkdir_p lib
system "/bin/mv", *Dir[dir/"*"], lib
rmdir dir
inreplace Dir[lib/"pkgconfig/*.pc"], %r{lib64|lib/x86_64-linux-gnu}, "lib"
end
end
end
test do
(testpath/"test.c").write <<~EOS
#include <httpseverywhere.h>
int main(int argc, char *argv[]) {
GType type = https_everywhere_context_get_type();
return 0;
}
EOS
ENV.libxml2
gettext = Formula["gettext"]
glib = Formula["glib"]
json_glib = Formula["json-glib"]
libarchive = Formula["libarchive"]
libgee = Formula["libgee"]
libsoup = Formula["libsoup"]
pcre = Formula["pcre"]
flags = (ENV.cflags || "").split + (ENV.cppflags || "").split + (ENV.ldflags || "").split
flags += %W[
-I#{gettext.opt_include}
-I#{glib.opt_include}/glib-2.0
-I#{glib.opt_lib}/glib-2.0/include
-I#{include}/httpseverywhere-0.8
-I#{json_glib.opt_include}/json-glib-1.0
-I#{libarchive.opt_include}
-I#{libgee.opt_include}/gee-0.8
-I#{libsoup.opt_include}/libsoup-2.4
-I#{pcre.opt_include}
-D_REENTRANT
-L#{gettext.opt_lib}
-L#{glib.opt_lib}
-L#{json_glib.opt_lib}
-L#{libarchive.opt_lib}
-L#{libgee.opt_lib}
-L#{libsoup.opt_lib}
-L#{lib}
-larchive
-lgee-0.8
-lgio-2.0
-lglib-2.0
-lgobject-2.0
-lhttpseverywhere-0.8
-ljson-glib-1.0
-lsoup-2.4
-lxml2
]
flags << "-lintl" if OS.mac?
system ENV.cc, "test.c", "-o", "test", *flags
system "./test"
end
end
| 34.07619 | 122 | 0.655674 |
87baed124375c1dafa4ac5c3079a49ef4812d184 | 2,376 | module BlacklightAdvancedSearch
# implementation for AdvancedHelper
module AdvancedHelperBehavior
# Fill in default from existing search, if present
# -- if you are using same search fields for basic
# search and advanced, will even fill in properly if existing
# search used basic search on same field present in advanced.
def label_tag_default_for(key)
if !params[key].blank?
return params[key]
elsif params["search_field"] == key
return params["q"]
else
return nil
end
end
# Is facet value in adv facet search results?
def facet_value_checked?(field, value)
BlacklightAdvancedSearch::QueryParser.new(params, blacklight_config).filters_include_value?(field, value)
end
def select_menu_for_field_operator
options = {
t('blacklight_advanced_search.all') => 'AND',
t('blacklight_advanced_search.any') => 'OR'
}.sort
select_tag(:op, options_for_select(options, params[:op]), class: 'input-small', aria: { label: 'filter-options' } )
end
# Current params without fields that will be over-written by adv. search,
# or other fields we don't want.
def advanced_search_context
my_params = search_state.params_for_search.except :page, :f_inclusive, :q, :search_field, :op, :index, :sort
my_params.except!(*search_fields_for_advanced_search.map { |_key, field_def| field_def[:key] })
end
def search_fields_for_advanced_search
@search_fields_for_advanced_search ||= begin
blacklight_config.search_fields.select { |_k, v| v.include_in_advanced_search || v.include_in_advanced_search.nil? }
end
end
def facet_field_names_for_advanced_search
@facet_field_names_for_advanced_search ||= begin
blacklight_config.facet_fields.select { |_k, v| v.include_in_advanced_search || v.include_in_advanced_search.nil? }.values.map(&:field)
end
end
# Use configured facet partial name for facet or fallback on 'catalog/facet_limit'
def advanced_search_facet_partial_name(display_facet)
facet_configuration_for_field(display_facet.name).try(:partial) || "catalog/facet_limit"
end
end
end
| 40.965517 | 186 | 0.664141 |
ff1c22fd0c8a71c8d0f1d8a336787a8ca3423e8d | 371 | require 'test_helper'
require 'generators/configure/configure_generator'
class ConfigureGeneratorTest < Rails::Generators::TestCase
tests ConfigureGenerator
destination Rails.root.join('tmp/generators')
setup :prepare_destination
# test "generator runs without errors" do
# assert_nothing_raised do
# run_generator ["arguments"]
# end
# end
end
| 24.733333 | 58 | 0.762803 |
ac3fdfbefd3cf022968917f0d260c8941964205b | 357 | # Helper functions relating to time formatting
module TimeHelper
def range_to_ampm(range)
start_hour = range[0..1].to_i
end_hour = range[6..7].to_i
(((start_hour-1)%12)+1).to_s + range[2..4] + ((start_hour%24)<12?' a.m. - ':' p.m. - ') + (((end_hour-1)%12)+1).to_s + range[8..10] + ((end_hour%24)<12?' a.m.':' p.m.')
end
end
| 39.666667 | 176 | 0.57423 |
8779084e7f1971dafb8aa7e6ea37a403158de294 | 343 | # frozen_string_literal: true
module Bridgetown
module Drops
class PageDrop < Drop
extend Forwardable
mutable false
def_delegators :@obj, :content, :dir, :name, :path, :url, :pager
private def_delegator :@obj, :data, :fallback_data
def relative_url
@obj.relative_url
end
end
end
end
| 18.052632 | 70 | 0.650146 |
e9ef4855227fd18e335a8d661925ce17bbfc62c5 | 3,808 | class AdvocacySite
def self.get_blocks(request)
[
{
type: "block",
title_actual: "TAKE ACTION",
text: "",
color1: "118, 101, 160",
color2: "166, 155, 193",
image: "/images/advocacy3.jpg",
links:
[
{
text_actual: "Letter to teacher or principal",
url: CDO.code_org_url("/promote/letter")
},
{
text_actual: "Letter to local elected official",
url: "https://www.votervoice.net/Code/campaigns/58463/respond",
new_tab: true
},
{
text_actual: "Presentation to advocate for CS",
url: CDO.code_org_url("/files/computer_science_advocacy.pptx"),
new_tab: true
},
{
text_actual: "Host an Hour of Code",
url: "https://hourofcode.com/how-to/public-officials"
}
]
},
{
type: "block",
title_actual: "NATIONWIDE MOMENTUM FOR CS",
text: "",
color1: "0, 148, 202",
color2: "89, 185, 220",
image: "/images/advocacy4.jpg",
links:
[
{
text_actual: "1-page summary",
url: "https://docs.google.com/document/d/15zBdBbXUA-yEzxEq0VeWAEb9nXuGjmNFWNrYp6UdM8U/edit?usp=sharing",
new_tab: true
},
{
text_actual: "National landscape state details",
url: "https://code.org/advocacy/landscape.pdf",
new_tab: true
},
{
text_actual: "Current state and federal legislation",
url: "/current-legislation"
},
{
text_actual: "Nation's leaders support CS",
url: CDO.code_org_url("/files/open_letter_for_cs.pdf"),
new_tab: true
}
]
},
{
type: "block",
title_actual: "POLICY RECOMMENDATIONS",
text: "",
color1: "0, 173, 188",
color2: "89, 202, 211",
image: "/images/advocacy1.jpg",
links:
[
{
text_actual: "Our state policy agenda",
url: CDO.code_org_url("/files/Making_CS_Fundamental.pdf"),
new_tab: true
},
{
text_actual: "Funding models document",
url: "https://docs.google.com/document/d/1yU2YS5YWHEZBN363pUUIy-FBqDDQVoOSFOsSb06qMbM/edit",
new_tab: true
},
{
text_actual: "Creating teacher pathways",
url: CDO.code_org_url("/files/TeacherPathwayRecommendations.pdf"),
new_tab: true
},
{
text_actual: "More policy resources",
url: "/policy-resources"
},
]
},
{
type: "block",
title_actual: "MORE RESOURCES",
text: "",
color1: "185, 191, 21",
color2: "209, 213, 103",
image: "/images/advocacy2.jpg",
links:
[
{
text_actual: "AP CS report",
url: CDO.code_org_url("/promote/ap")
},
{
text_actual: "State planning toolkit",
url: "https://docs.google.com/document/d/13N843-BjK9JHXNWKFzJlxhpw7f6Y2pJF6tpV2aHM1HU/edit",
new_tab: true
},
{
text_actual: "Bring CS to your school or district",
url: CDO.code_org_url("/yourschool")
},
{
text_actual: "Even more resources",
url: CDO.code_org_url("/promote/morestats")
}
]
}
]
end
end
| 29.75 | 118 | 0.470063 |
bb4f51742089e2a606dd57c4b7ecd8909322ef26 | 4,457 | class GraphTool < Formula
include Language::Python::Virtualenv
desc "Efficient network analysis for Python 3"
homepage "https://graph-tool.skewed.de/"
url "https://downloads.skewed.de/graph-tool/graph-tool-2.27.tar.bz2"
sha256 "4740c69720dfbebf8fb3e77057b3e6a257ccf0432cdaf7345f873247390e4313"
revision 3
bottle do
sha256 "022b9cb44fa4886a90355116359451343739f09dc9d9a53fdf145b4763a5a40e" => :mojave
sha256 "22d56edc05fabf65cb0da4f771a8062357793fb56d8abb05330653cd2bf9feab" => :sierra
end
depends_on "pkg-config" => :build
depends_on "boost"
depends_on "boost-python3"
depends_on "cairomm"
depends_on "cgal"
depends_on "google-sparsehash"
depends_on "gtk+3"
depends_on "librsvg"
depends_on :macos => :el_capitan # needs thread-local storage
depends_on "numpy"
depends_on "py3cairo"
depends_on "pygobject3"
depends_on "python"
depends_on "scipy"
resource "Cycler" do
url "https://files.pythonhosted.org/packages/c2/4b/137dea450d6e1e3d474e1d873cd1d4f7d3beed7e0dc973b06e8e10d32488/cycler-0.10.0.tar.gz"
sha256 "cd7b2d1018258d7247a71425e9f26463dfb444d411c39569972f4ce586b0c9d8"
end
resource "kiwisolver" do
url "https://files.pythonhosted.org/packages/31/60/494fcce70d60a598c32ee00e71542e52e27c978e5f8219fae0d4ac6e2864/kiwisolver-1.0.1.tar.gz"
sha256 "ce3be5d520b4d2c3e5eeb4cd2ef62b9b9ab8ac6b6fedbaa0e39cdb6f50644278"
end
resource "matplotlib" do
url "https://files.pythonhosted.org/packages/ec/ed/46b835da53b7ed05bd4c6cae293f13ec26e877d2e490a53a709915a9dcb7/matplotlib-2.2.2.tar.gz"
sha256 "4dc7ef528aad21f22be85e95725234c5178c0f938e2228ca76640e5e84d8cde8"
end
resource "pyparsing" do
url "https://files.pythonhosted.org/packages/3c/ec/a94f8cf7274ea60b5413df054f82a8980523efd712ec55a59e7c3357cf7c/pyparsing-2.2.0.tar.gz"
sha256 "0832bcf47acd283788593e7a0f542407bd9550a55a8a8435214a1960e04bcb04"
end
resource "python-dateutil" do
url "https://files.pythonhosted.org/packages/a0/b0/a4e3241d2dee665fea11baec21389aec6886655cd4db7647ddf96c3fad15/python-dateutil-2.7.3.tar.gz"
sha256 "e27001de32f627c22380a688bcc43ce83504a7bc5da472209b4c70f02829f0b8"
end
resource "pytz" do
url "https://files.pythonhosted.org/packages/10/76/52efda4ef98e7544321fd8d5d512e11739c1df18b0649551aeccfb1c8376/pytz-2018.4.tar.gz"
sha256 "c06425302f2cf668f1bba7a0a03f3c1d34d4ebeef2c72003da308b3947c7f749"
end
resource "six" do
url "https://files.pythonhosted.org/packages/16/d8/bc6316cf98419719bd59c91742194c111b6f2e85abac88e496adefaf7afe/six-1.11.0.tar.gz"
sha256 "70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9"
end
# Remove for > 2.27
# Upstream commit from 3 Jul 2018 "Fix incompatibility with Python 3.7"
patch do
url "https://git.skewed.de/count0/graph-tool/commit/0407f41a.diff"
sha256 "94559544ad95753a13ee701c02af706c8b296c54af2c1706520ec96e24aa6d39"
end
# Remove for > 2.27
# Upstream commit from 3 Oct 2018 "Fix compilation with CGAL 4.13"
patch do
url "https://git.skewed.de/count0/graph-tool/commit/aa39e4a6.diff"
sha256 "5a4ea386342c2de9422da5b07dd4272d47d2cdbba99d9b258bff65a69da562c1"
end
def install
# Work around "error: no member named 'signbit' in the global namespace"
ENV["SDKROOT"] = MacOS.sdk_path if MacOS.version == :high_sierra
xy = Language::Python.major_minor_version "python3"
venv = virtualenv_create(libexec, "python3")
resources.each do |r|
venv.pip_install_and_link r
end
args = %W[
--disable-debug
--disable-dependency-tracking
--prefix=#{prefix}
PYTHON=python3
PYTHON_LIBS=-undefined\ dynamic_lookup
--with-python-module-path=#{lib}/python#{xy}/site-packages
--with-boost-python=boost_python#{xy.to_s.delete(".")}-mt
]
args << "--with-expat=#{MacOS.sdk_path}/usr" if MacOS.sdk_path_if_needed
system "./configure", *args
system "make", "install"
site_packages = "lib/python#{xy}/site-packages"
pth_contents = "import site; site.addsitedir('#{libexec/site_packages}')\n"
(prefix/site_packages/"homebrew-graph-tool.pth").write pth_contents
end
test do
(testpath/"test.py").write <<~EOS
import graph_tool.all as gt
g = gt.Graph()
v1 = g.add_vertex()
v2 = g.add_vertex()
e = g.add_edge(v1, v2)
assert g.num_edges() == 1
assert g.num_vertices() == 2
EOS
system "python3", "test.py"
end
end
| 36.532787 | 145 | 0.755665 |
1c3cc6c158e17c6f04447be810a0e799471013d6 | 180 | # frozen_string_literal: true
class CertbotRenewWorker
include Sidekiq::Worker
def perform(*)
`sudo certbot renew --force-renewal & sudo service nginx restart`
end
end
| 18 | 69 | 0.75 |
3894b6d63861f036fff1d2ff1852ee69b395b1b5 | 195 | module NumbersAndWords
module Strategies
module FiguresConverter
module Languages
class Ua < Base
include Families::Cyrillic
end
end
end
end
end
| 16.25 | 36 | 0.630769 |
d517b7c833758d61f342c6f383e0a7bd62fdc7db | 231 | module Forms::LabelledFieldRenderer
def render
super do
result = ActiveSupport::SafeBuffer.new
result << label_tag(name, label) if field.label?
result << yield if block_given?
result
end
end
end
| 21 | 54 | 0.670996 |
f74d6acc18fb04f13c85fadca31df8790e8592bc | 1,182 | cask "visual-studio" do
version "8.10.15.32"
sha256 "0b0d36b752c084d53f4d1ea3afd99cb538da2600327902e11baec0e5036944db"
url "https://dl.xamarin.com/VsMac/VisualStudioForMac-#{version}.dmg",
verified: "dl.xamarin.com/VsMac/"
name "Microsoft Visual Studio"
desc "Integrated development environment"
homepage "https://www.visualstudio.com/vs/visual-studio-mac/"
livecheck do
url "https://docs.microsoft.com/en-us/visualstudio/releasenotes/vs2019-mac-relnotes"
regex(/Visual\s*Studio\s*\d+\s+for\s+Mac\s+version\s+\d+(?:\.\d+)*\s+\((\d+(?:\.\d+)+)\)/i)
end
auto_updates true
depends_on macos: ">= :high_sierra"
depends_on cask: "homebrew/cask-versions/mono-mdk-for-visual-studio"
app "Visual Studio.app"
zap trash: [
"/Applications/Xamarin Profiler.app",
"/Applications/Xamarin Workbooks.app",
"~/Library/Application Support/CrashReporter/VisualStudio*",
"~/Library/Application Support/VisualStudio",
"~/Library/Caches/VisualStudio",
"~/Library/Developer/Xamarin",
"~/Library/Logs/VisualStudio",
"~/Library/Preferences/Visual*Studio",
"~/Library/Preferences/Xamarin",
"~/Library/VisualStudio",
]
end
| 33.771429 | 95 | 0.70643 |
615cd862fe9ac5617df9d0d3327a3d4e00576908 | 195 | describe 'format' do
URL_TEST_DATA.each_pair do |url, result|
it "#{url}" do
parsed = MDUrl::Url.urlParse(url)
expect(MDUrl::Format.format(parsed)).to eq url
end
end
end
| 19.5 | 52 | 0.646154 |
bfd33ea4d2467bf4b134784c108f918b3075edad | 1,522 | require 'digest'
module ThinSearch
class QueryExpression
SINGLE_QUOTE = "'".freeze
DOUBLE_QUOTE = '"'.freeze
class Token
attr_accessor :quote_mark
attr_accessor :value
def self.quote_mark(s)
if s.start_with?(SINGLE_QUOTE) && s.end_with?(SINGLE_QUOTE) then
SINGLE_QUOTE
elsif s.start_with?(DOUBLE_QUOTE) && s.end_with?(DOUBLE_QUOTE) then
DOUBLE_QUOTE
else
nil
end
end
def self.quoted?(s)
!!quote_mark(s)
end
def self.unquote(s)
quoted?(s) ? s[1..-2] : s
end
def initialize(token)
@value = token
@quote_mark = Token.quote_mark(token)
unquote! if quoted?
end
def ==(other)
quote_mark == other.quote_mark && value == other.value
end
def quoted?
!!@quote_mark
end
def unquote!
@value.replace(Token.unquote(value))
end
def to_s
value
end
def sql_escape(char)
value.gsub(char, "#{char}#{char}")
end
def md5
::Digest::MD5.hexdigest(value)
end
def double_quoted
"#{DOUBLE_QUOTE}#{sql_escape(DOUBLE_QUOTE)}#{DOUBLE_QUOTE}"
end
def single_quoted
"#{SINGLE_QUOTE}#{sql_escape(SINGLE_QUOTE)}#{SINGLE_QUOTE}"
end
def inspect(*args, &block)
if quoted? then
"#{ value} q=#{ quote_mark }"
else
super
end
end
end
end
end
| 19.265823 | 75 | 0.545992 |
7abdcd228272daef4b0d1795901ae79fb68b9db0 | 228 | #!/usr/bin/env ruby
$LOAD_PATH.unshift "examples"
require 'acme/payment'
payment = Acme::Payment.new \
from: '[email protected]',
to: '[email protected]',
amount: 150_00
payment.process until payment.finished?
| 14.25 | 39 | 0.72807 |
ed4d887a1f2c0336da48807adf29311f97f6bb62 | 306 | Rails.application.routes.draw do
get 'start_page/select'
root 'start_page#select', as: "course_selection"
get 'all', to: 'week#courses'
get 'week/select'
get 'default', to: 'week#default'
# For details on the DSL available within this file, see http://guides.rubyonrails.org/routing.html
end
| 27.818182 | 101 | 0.72549 |
f71c7c66d3ace4926cebfe919905a75fcb6c6403 | 611 | Rails.application.routes.draw do
root 'static_pages#home'
get '/help', to: 'static_pages#help'
get '/about', to: 'static_pages#about'
get '/contact', to: 'static_pages#contact'
get '/signup', to: 'users#new'
post '/signup', to: 'users#create'
get '/login', to: 'sessions#new'
post '/login', to: 'sessions#create'
delete '/logout', to: 'sessions#destroy'
resources :users, only: [:show, :index, :edit, :update, :destroy]
resources :account_activations, only: [:edit]
resources :password_resets, only: [:new, :create, :edit, :update]
resources :microposts, only: [:create, :destroy]
end
| 38.1875 | 67 | 0.677578 |
b94509531e654bb7be9c5f4590dc81e47f1dc195 | 662 | require_relative 'boot'
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module Practiceapp
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
config.load_defaults 5.2
# Settings in config/environments/* take precedence over those specified here.
# Application configuration can go into files in config/initializers
# -- all .rb files in that directory are automatically loaded after loading
# the framework and any gems in your application.
end
end
| 33.1 | 82 | 0.765861 |
acb5cedde45437bf6c2ad6ab4bb23638688e3568 | 34,129 | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# EDITING INSTRUCTIONS
# This file was generated from the file
# https://github.com/googleapis/googleapis/blob/master/google/pubsub/v1/pubsub.proto,
# and updates to that file get reflected here through a refresh process.
# For the short term, the refresh process will only be runnable by Google
# engineers.
require "json"
require "pathname"
require "google/gax"
require "google/iam/v1/iam_policy_pb"
require "google/pubsub/v1/pubsub_pb"
require "google/cloud/pubsub/v1/credentials"
module Google
module Cloud
module PubSub
module V1
# The service that an application uses to manipulate topics, and to send
# messages to a topic.
#
# @!attribute [r] iam_policy_stub
# @return [Google::Iam::V1::IAMPolicy::Stub]
# @!attribute [r] publisher_stub
# @return [Google::Cloud::PubSub::V1::Publisher::Stub]
class PublisherClient
# @private
attr_reader :iam_policy_stub, :publisher_stub
# The default address of the service.
SERVICE_ADDRESS = "pubsub.googleapis.com".freeze
# The default port of the service.
DEFAULT_SERVICE_PORT = 443
# The default set of gRPC interceptors.
GRPC_INTERCEPTORS = []
DEFAULT_TIMEOUT = 30
PAGE_DESCRIPTORS = {
"list_topics" => Google::Gax::PageDescriptor.new(
"page_token",
"next_page_token",
"topics"),
"list_topic_subscriptions" => Google::Gax::PageDescriptor.new(
"page_token",
"next_page_token",
"subscriptions")
}.freeze
private_constant :PAGE_DESCRIPTORS
BUNDLE_DESCRIPTORS = {
"publish" => Google::Gax::BundleDescriptor.new(
"messages",
[
"topic"
],
subresponse_field: "message_ids")
}.freeze
private_constant :BUNDLE_DESCRIPTORS
# The scopes needed to make gRPC calls to all of the methods defined in
# this service.
ALL_SCOPES = [
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/pubsub"
].freeze
PROJECT_PATH_TEMPLATE = Google::Gax::PathTemplate.new(
"projects/{project}"
)
private_constant :PROJECT_PATH_TEMPLATE
TOPIC_PATH_TEMPLATE = Google::Gax::PathTemplate.new(
"projects/{project}/topics/{topic}"
)
private_constant :TOPIC_PATH_TEMPLATE
# Returns a fully-qualified project resource name string.
# @param project [String]
# @return [String]
def self.project_path project
PROJECT_PATH_TEMPLATE.render(
:"project" => project
)
end
# Returns a fully-qualified topic resource name string.
# @param project [String]
# @param topic [String]
# @return [String]
def self.topic_path project, topic
TOPIC_PATH_TEMPLATE.render(
:"project" => project,
:"topic" => topic
)
end
# @param credentials [Google::Auth::Credentials, String, Hash, GRPC::Core::Channel, GRPC::Core::ChannelCredentials, Proc]
# Provides the means for authenticating requests made by the client. This parameter can
# be many types.
# A `Google::Auth::Credentials` uses a the properties of its represented keyfile for
# authenticating requests made by this client.
# A `String` will be treated as the path to the keyfile to be used for the construction of
# credentials for this client.
# A `Hash` will be treated as the contents of a keyfile to be used for the construction of
# credentials for this client.
# A `GRPC::Core::Channel` will be used to make calls through.
# A `GRPC::Core::ChannelCredentials` for the setting up the RPC client. The channel credentials
# should already be composed with a `GRPC::Core::CallCredentials` object.
# A `Proc` will be used as an updater_proc for the Grpc channel. The proc transforms the
# metadata for requests, generally, to give OAuth credentials.
# @param scopes [Array<String>]
# The OAuth scopes for this service. This parameter is ignored if
# an updater_proc is supplied.
# @param client_config [Hash]
# A Hash for call options for each method. See
# Google::Gax#construct_settings for the structure of
# this data. Falls back to the default config if not specified
# or the specified config is missing data points.
# @param timeout [Numeric]
# The default timeout, in seconds, for calls made through this client.
# @param metadata [Hash]
# Default metadata to be sent with each request. This can be overridden on a per call basis.
# @param exception_transformer [Proc]
# An optional proc that intercepts any exceptions raised during an API call to inject
# custom error handling.
def initialize \
credentials: nil,
scopes: ALL_SCOPES,
client_config: {},
timeout: DEFAULT_TIMEOUT,
metadata: nil,
exception_transformer: nil,
lib_name: nil,
lib_version: ""
# These require statements are intentionally placed here to initialize
# the gRPC module only when it's required.
# See https://github.com/googleapis/toolkit/issues/446
require "google/gax/grpc"
require "google/iam/v1/iam_policy_services_pb"
require "google/pubsub/v1/pubsub_services_pb"
credentials ||= Google::Cloud::PubSub::V1::Credentials.default
if credentials.is_a?(String) || credentials.is_a?(Hash)
updater_proc = Google::Cloud::PubSub::V1::Credentials.new(credentials).updater_proc
end
if credentials.is_a?(GRPC::Core::Channel)
channel = credentials
end
if credentials.is_a?(GRPC::Core::ChannelCredentials)
chan_creds = credentials
end
if credentials.is_a?(Proc)
updater_proc = credentials
end
if credentials.is_a?(Google::Auth::Credentials)
updater_proc = credentials.updater_proc
end
package_version = Gem.loaded_specs['google-cloud-pubsub'].version.version
google_api_client = "gl-ruby/#{RUBY_VERSION}"
google_api_client << " #{lib_name}/#{lib_version}" if lib_name
google_api_client << " gapic/#{package_version} gax/#{Google::Gax::VERSION}"
google_api_client << " grpc/#{GRPC::VERSION}"
google_api_client.freeze
headers = { :"x-goog-api-client" => google_api_client }
headers.merge!(metadata) unless metadata.nil?
client_config_file = Pathname.new(__dir__).join(
"publisher_client_config.json"
)
defaults = client_config_file.open do |f|
Google::Gax.construct_settings(
"google.pubsub.v1.Publisher",
JSON.parse(f.read),
client_config,
Google::Gax::Grpc::STATUS_CODE_NAMES,
timeout,
bundle_descriptors: BUNDLE_DESCRIPTORS,
page_descriptors: PAGE_DESCRIPTORS,
errors: Google::Gax::Grpc::API_ERRORS,
metadata: headers
)
end
# Allow overriding the service path/port in subclasses.
service_path = self.class::SERVICE_ADDRESS
port = self.class::DEFAULT_SERVICE_PORT
interceptors = self.class::GRPC_INTERCEPTORS
@iam_policy_stub = Google::Gax::Grpc.create_stub(
service_path,
port,
chan_creds: chan_creds,
channel: channel,
updater_proc: updater_proc,
scopes: scopes,
interceptors: interceptors,
&Google::Iam::V1::IAMPolicy::Stub.method(:new)
)
@publisher_stub = Google::Gax::Grpc.create_stub(
service_path,
port,
chan_creds: chan_creds,
channel: channel,
updater_proc: updater_proc,
scopes: scopes,
interceptors: interceptors,
&Google::Cloud::PubSub::V1::Publisher::Stub.method(:new)
)
@create_topic = Google::Gax.create_api_call(
@publisher_stub.method(:create_topic),
defaults["create_topic"],
exception_transformer: exception_transformer,
params_extractor: proc do |request|
{'name' => request.name}
end
)
@update_topic = Google::Gax.create_api_call(
@publisher_stub.method(:update_topic),
defaults["update_topic"],
exception_transformer: exception_transformer,
params_extractor: proc do |request|
{'topic.name' => request.topic.name}
end
)
@publish = Google::Gax.create_api_call(
@publisher_stub.method(:publish),
defaults["publish"],
exception_transformer: exception_transformer,
params_extractor: proc do |request|
{'topic' => request.topic}
end
)
@get_topic = Google::Gax.create_api_call(
@publisher_stub.method(:get_topic),
defaults["get_topic"],
exception_transformer: exception_transformer,
params_extractor: proc do |request|
{'topic' => request.topic}
end
)
@list_topics = Google::Gax.create_api_call(
@publisher_stub.method(:list_topics),
defaults["list_topics"],
exception_transformer: exception_transformer,
params_extractor: proc do |request|
{'project' => request.project}
end
)
@list_topic_subscriptions = Google::Gax.create_api_call(
@publisher_stub.method(:list_topic_subscriptions),
defaults["list_topic_subscriptions"],
exception_transformer: exception_transformer,
params_extractor: proc do |request|
{'topic' => request.topic}
end
)
@delete_topic = Google::Gax.create_api_call(
@publisher_stub.method(:delete_topic),
defaults["delete_topic"],
exception_transformer: exception_transformer,
params_extractor: proc do |request|
{'topic' => request.topic}
end
)
@set_iam_policy = Google::Gax.create_api_call(
@iam_policy_stub.method(:set_iam_policy),
defaults["set_iam_policy"],
exception_transformer: exception_transformer,
params_extractor: proc do |request|
{'resource' => request.resource}
end
)
@get_iam_policy = Google::Gax.create_api_call(
@iam_policy_stub.method(:get_iam_policy),
defaults["get_iam_policy"],
exception_transformer: exception_transformer,
params_extractor: proc do |request|
{'resource' => request.resource}
end
)
@test_iam_permissions = Google::Gax.create_api_call(
@iam_policy_stub.method(:test_iam_permissions),
defaults["test_iam_permissions"],
exception_transformer: exception_transformer,
params_extractor: proc do |request|
{'resource' => request.resource}
end
)
end
# Service calls
# Creates the given topic with the given name. See the
# <a href="https://cloud.google.com/pubsub/docs/admin#resource_names">
# resource name rules</a>.
#
# @param name [String]
# The name of the topic. It must have the format
# `"projects/{project}/topics/{topic}"`. `{topic}` must start with a letter,
# and contain only letters (`[A-Za-z]`), numbers (`[0-9]`), dashes (`-`),
# underscores (`_`), periods (`.`), tildes (`~`), plus (`+`) or percent
# signs (`%`). It must be between 3 and 255 characters in length, and it
# must not start with `"goog"`.
# @param labels [Hash{String => String}]
# See <a href="https://cloud.google.com/pubsub/docs/labels"> Creating and
# managing labels</a>.
# @param message_storage_policy [Google::Cloud::PubSub::V1::MessageStoragePolicy | Hash]
# Policy constraining how messages published to the topic may be stored. It
# is determined when the topic is created based on the policy configured at
# the project level. It must not be set by the caller in the request to
# CreateTopic or to UpdateTopic. This field will be populated in the
# responses for GetTopic, CreateTopic, and UpdateTopic: if not present in the
# response, then no constraints are in effect.
# A hash of the same form as `Google::Cloud::PubSub::V1::MessageStoragePolicy`
# can also be provided.
# @param options [Google::Gax::CallOptions]
# Overrides the default settings for this call, e.g, timeout,
# retries, etc.
# @yield [result, operation] Access the result along with the RPC operation
# @yieldparam result [Google::Cloud::PubSub::V1::Topic]
# @yieldparam operation [GRPC::ActiveCall::Operation]
# @return [Google::Cloud::PubSub::V1::Topic]
# @raise [Google::Gax::GaxError] if the RPC is aborted.
# @example
# require "google/cloud/pubsub"
#
# publisher_client = Google::Cloud::PubSub::Publisher.new(version: :v1)
# formatted_name = Google::Cloud::PubSub::V1::PublisherClient.topic_path("[PROJECT]", "[TOPIC]")
# response = publisher_client.create_topic(formatted_name)
def create_topic \
name,
labels: nil,
message_storage_policy: nil,
options: nil,
&block
req = {
name: name,
labels: labels,
message_storage_policy: message_storage_policy
}.delete_if { |_, v| v.nil? }
req = Google::Gax::to_proto(req, Google::Cloud::PubSub::V1::Topic)
@create_topic.call(req, options, &block)
end
# Updates an existing topic. Note that certain properties of a
# topic are not modifiable.
#
# @param topic [Google::Cloud::PubSub::V1::Topic | Hash]
# The updated topic object.
# A hash of the same form as `Google::Cloud::PubSub::V1::Topic`
# can also be provided.
# @param update_mask [Google::Protobuf::FieldMask | Hash]
# Indicates which fields in the provided topic to update. Must be specified
# and non-empty. Note that if `update_mask` contains
# "message_storage_policy" then the new value will be determined based on the
# policy configured at the project or organization level. The
# `message_storage_policy` must not be set in the `topic` provided above.
# A hash of the same form as `Google::Protobuf::FieldMask`
# can also be provided.
# @param options [Google::Gax::CallOptions]
# Overrides the default settings for this call, e.g, timeout,
# retries, etc.
# @yield [result, operation] Access the result along with the RPC operation
# @yieldparam result [Google::Cloud::PubSub::V1::Topic]
# @yieldparam operation [GRPC::ActiveCall::Operation]
# @return [Google::Cloud::PubSub::V1::Topic]
# @raise [Google::Gax::GaxError] if the RPC is aborted.
# @example
# require "google/cloud/pubsub"
#
# publisher_client = Google::Cloud::PubSub::Publisher.new(version: :v1)
#
# # TODO: Initialize `topic`:
# topic = {}
#
# # TODO: Initialize `update_mask`:
# update_mask = {}
# response = publisher_client.update_topic(topic, update_mask)
def update_topic \
topic,
update_mask,
options: nil,
&block
req = {
topic: topic,
update_mask: update_mask
}.delete_if { |_, v| v.nil? }
req = Google::Gax::to_proto(req, Google::Cloud::PubSub::V1::UpdateTopicRequest)
@update_topic.call(req, options, &block)
end
# Adds one or more messages to the topic. Returns `NOT_FOUND` if the topic
# does not exist.
#
# @param topic [String]
# The messages in the request will be published on this topic.
# Format is `projects/{project}/topics/{topic}`.
# @param messages [Array<Google::Cloud::PubSub::V1::PubsubMessage | Hash>]
# The messages to publish.
# A hash of the same form as `Google::Cloud::PubSub::V1::PubsubMessage`
# can also be provided.
# @param options [Google::Gax::CallOptions]
# Overrides the default settings for this call, e.g, timeout,
# retries, etc.
# @yield [result, operation] Access the result along with the RPC operation
# @yieldparam result [Google::Cloud::PubSub::V1::PublishResponse]
# @yieldparam operation [GRPC::ActiveCall::Operation]
# @return [Google::Cloud::PubSub::V1::PublishResponse]
# @raise [Google::Gax::GaxError] if the RPC is aborted.
# @example
# require "google/cloud/pubsub"
#
# publisher_client = Google::Cloud::PubSub::Publisher.new(version: :v1)
# formatted_topic = Google::Cloud::PubSub::V1::PublisherClient.topic_path("[PROJECT]", "[TOPIC]")
# data = ''
# messages_element = { data: data }
# messages = [messages_element]
# response = publisher_client.publish(formatted_topic, messages)
def publish \
topic,
messages,
options: nil,
&block
req = {
topic: topic,
messages: messages
}.delete_if { |_, v| v.nil? }
req = Google::Gax::to_proto(req, Google::Cloud::PubSub::V1::PublishRequest)
@publish.call(req, options, &block)
end
# Gets the configuration of a topic.
#
# @param topic [String]
# The name of the topic to get.
# Format is `projects/{project}/topics/{topic}`.
# @param options [Google::Gax::CallOptions]
# Overrides the default settings for this call, e.g, timeout,
# retries, etc.
# @yield [result, operation] Access the result along with the RPC operation
# @yieldparam result [Google::Cloud::PubSub::V1::Topic]
# @yieldparam operation [GRPC::ActiveCall::Operation]
# @return [Google::Cloud::PubSub::V1::Topic]
# @raise [Google::Gax::GaxError] if the RPC is aborted.
# @example
# require "google/cloud/pubsub"
#
# publisher_client = Google::Cloud::PubSub::Publisher.new(version: :v1)
# formatted_topic = Google::Cloud::PubSub::V1::PublisherClient.topic_path("[PROJECT]", "[TOPIC]")
# response = publisher_client.get_topic(formatted_topic)
def get_topic \
topic,
options: nil,
&block
req = {
topic: topic
}.delete_if { |_, v| v.nil? }
req = Google::Gax::to_proto(req, Google::Cloud::PubSub::V1::GetTopicRequest)
@get_topic.call(req, options, &block)
end
# Lists matching topics.
#
# @param project [String]
# The name of the project in which to list topics.
# Format is `projects/{project-id}`.
# @param page_size [Integer]
# The maximum number of resources contained in the underlying API
# response. If page streaming is performed per-resource, this
# parameter does not affect the return value. If page streaming is
# performed per-page, this determines the maximum number of
# resources in a page.
# @param options [Google::Gax::CallOptions]
# Overrides the default settings for this call, e.g, timeout,
# retries, etc.
# @yield [result, operation] Access the result along with the RPC operation
# @yieldparam result [Google::Gax::PagedEnumerable<Google::Cloud::PubSub::V1::Topic>]
# @yieldparam operation [GRPC::ActiveCall::Operation]
# @return [Google::Gax::PagedEnumerable<Google::Cloud::PubSub::V1::Topic>]
# An enumerable of Google::Cloud::PubSub::V1::Topic instances.
# See Google::Gax::PagedEnumerable documentation for other
# operations such as per-page iteration or access to the response
# object.
# @raise [Google::Gax::GaxError] if the RPC is aborted.
# @example
# require "google/cloud/pubsub"
#
# publisher_client = Google::Cloud::PubSub::Publisher.new(version: :v1)
# formatted_project = Google::Cloud::PubSub::V1::PublisherClient.project_path("[PROJECT]")
#
# # Iterate over all results.
# publisher_client.list_topics(formatted_project).each do |element|
# # Process element.
# end
#
# # Or iterate over results one page at a time.
# publisher_client.list_topics(formatted_project).each_page do |page|
# # Process each page at a time.
# page.each do |element|
# # Process element.
# end
# end
def list_topics \
project,
page_size: nil,
options: nil,
&block
req = {
project: project,
page_size: page_size
}.delete_if { |_, v| v.nil? }
req = Google::Gax::to_proto(req, Google::Cloud::PubSub::V1::ListTopicsRequest)
@list_topics.call(req, options, &block)
end
# Lists the names of the subscriptions on this topic.
#
# @param topic [String]
# The name of the topic that subscriptions are attached to.
# Format is `projects/{project}/topics/{topic}`.
# @param page_size [Integer]
# The maximum number of resources contained in the underlying API
# response. If page streaming is performed per-resource, this
# parameter does not affect the return value. If page streaming is
# performed per-page, this determines the maximum number of
# resources in a page.
# @param options [Google::Gax::CallOptions]
# Overrides the default settings for this call, e.g, timeout,
# retries, etc.
# @yield [result, operation] Access the result along with the RPC operation
# @yieldparam result [Google::Gax::PagedEnumerable<String>]
# @yieldparam operation [GRPC::ActiveCall::Operation]
# @return [Google::Gax::PagedEnumerable<String>]
# An enumerable of String instances.
# See Google::Gax::PagedEnumerable documentation for other
# operations such as per-page iteration or access to the response
# object.
# @raise [Google::Gax::GaxError] if the RPC is aborted.
# @example
# require "google/cloud/pubsub"
#
# publisher_client = Google::Cloud::PubSub::Publisher.new(version: :v1)
# formatted_topic = Google::Cloud::PubSub::V1::PublisherClient.topic_path("[PROJECT]", "[TOPIC]")
#
# # Iterate over all results.
# publisher_client.list_topic_subscriptions(formatted_topic).each do |element|
# # Process element.
# end
#
# # Or iterate over results one page at a time.
# publisher_client.list_topic_subscriptions(formatted_topic).each_page do |page|
# # Process each page at a time.
# page.each do |element|
# # Process element.
# end
# end
def list_topic_subscriptions \
topic,
page_size: nil,
options: nil,
&block
req = {
topic: topic,
page_size: page_size
}.delete_if { |_, v| v.nil? }
req = Google::Gax::to_proto(req, Google::Cloud::PubSub::V1::ListTopicSubscriptionsRequest)
@list_topic_subscriptions.call(req, options, &block)
end
# Deletes the topic with the given name. Returns `NOT_FOUND` if the topic
# does not exist. After a topic is deleted, a new topic may be created with
# the same name; this is an entirely new topic with none of the old
# configuration or subscriptions. Existing subscriptions to this topic are
# not deleted, but their `topic` field is set to `_deleted-topic_`.
#
# @param topic [String]
# Name of the topic to delete.
# Format is `projects/{project}/topics/{topic}`.
# @param options [Google::Gax::CallOptions]
# Overrides the default settings for this call, e.g, timeout,
# retries, etc.
# @yield [result, operation] Access the result along with the RPC operation
# @yieldparam result []
# @yieldparam operation [GRPC::ActiveCall::Operation]
# @raise [Google::Gax::GaxError] if the RPC is aborted.
# @example
# require "google/cloud/pubsub"
#
# publisher_client = Google::Cloud::PubSub::Publisher.new(version: :v1)
# formatted_topic = Google::Cloud::PubSub::V1::PublisherClient.topic_path("[PROJECT]", "[TOPIC]")
# publisher_client.delete_topic(formatted_topic)
def delete_topic \
topic,
options: nil,
&block
req = {
topic: topic
}.delete_if { |_, v| v.nil? }
req = Google::Gax::to_proto(req, Google::Cloud::PubSub::V1::DeleteTopicRequest)
@delete_topic.call(req, options, &block)
nil
end
# Sets the access control policy on the specified resource. Replaces any
# existing policy.
#
# @param resource [String]
# REQUIRED: The resource for which the policy is being specified.
# `resource` is usually specified as a path. For example, a Project
# resource is specified as `projects/{project}`.
# @param policy [Google::Iam::V1::Policy | Hash]
# REQUIRED: The complete policy to be applied to the `resource`. The size of
# the policy is limited to a few 10s of KB. An empty policy is a
# valid policy but certain Cloud Platform services (such as Projects)
# might reject them.
# A hash of the same form as `Google::Iam::V1::Policy`
# can also be provided.
# @param options [Google::Gax::CallOptions]
# Overrides the default settings for this call, e.g, timeout,
# retries, etc.
# @yield [result, operation] Access the result along with the RPC operation
# @yieldparam result [Google::Iam::V1::Policy]
# @yieldparam operation [GRPC::ActiveCall::Operation]
# @return [Google::Iam::V1::Policy]
# @raise [Google::Gax::GaxError] if the RPC is aborted.
# @example
# require "google/cloud/pubsub"
#
# publisher_client = Google::Cloud::PubSub::Publisher.new(version: :v1)
# formatted_resource = Google::Cloud::PubSub::V1::PublisherClient.topic_path("[PROJECT]", "[TOPIC]")
#
# # TODO: Initialize `policy`:
# policy = {}
# response = publisher_client.set_iam_policy(formatted_resource, policy)
def set_iam_policy \
resource,
policy,
options: nil,
&block
req = {
resource: resource,
policy: policy
}.delete_if { |_, v| v.nil? }
req = Google::Gax::to_proto(req, Google::Iam::V1::SetIamPolicyRequest)
@set_iam_policy.call(req, options, &block)
end
# Gets the access control policy for a resource.
# Returns an empty policy if the resource exists and does not have a policy
# set.
#
# @param resource [String]
# REQUIRED: The resource for which the policy is being requested.
# `resource` is usually specified as a path. For example, a Project
# resource is specified as `projects/{project}`.
# @param options [Google::Gax::CallOptions]
# Overrides the default settings for this call, e.g, timeout,
# retries, etc.
# @yield [result, operation] Access the result along with the RPC operation
# @yieldparam result [Google::Iam::V1::Policy]
# @yieldparam operation [GRPC::ActiveCall::Operation]
# @return [Google::Iam::V1::Policy]
# @raise [Google::Gax::GaxError] if the RPC is aborted.
# @example
# require "google/cloud/pubsub"
#
# publisher_client = Google::Cloud::PubSub::Publisher.new(version: :v1)
# formatted_resource = Google::Cloud::PubSub::V1::PublisherClient.topic_path("[PROJECT]", "[TOPIC]")
# response = publisher_client.get_iam_policy(formatted_resource)
def get_iam_policy \
resource,
options: nil,
&block
req = {
resource: resource
}.delete_if { |_, v| v.nil? }
req = Google::Gax::to_proto(req, Google::Iam::V1::GetIamPolicyRequest)
@get_iam_policy.call(req, options, &block)
end
# Returns permissions that a caller has on the specified resource.
# If the resource does not exist, this will return an empty set of
# permissions, not a NOT_FOUND error.
#
# @param resource [String]
# REQUIRED: The resource for which the policy detail is being requested.
# `resource` is usually specified as a path. For example, a Project
# resource is specified as `projects/{project}`.
# @param permissions [Array<String>]
# The set of permissions to check for the `resource`. Permissions with
# wildcards (such as '*' or 'storage.*') are not allowed. For more
# information see
# [IAM Overview](https://cloud.google.com/iam/docs/overview#permissions).
# @param options [Google::Gax::CallOptions]
# Overrides the default settings for this call, e.g, timeout,
# retries, etc.
# @yield [result, operation] Access the result along with the RPC operation
# @yieldparam result [Google::Iam::V1::TestIamPermissionsResponse]
# @yieldparam operation [GRPC::ActiveCall::Operation]
# @return [Google::Iam::V1::TestIamPermissionsResponse]
# @raise [Google::Gax::GaxError] if the RPC is aborted.
# @example
# require "google/cloud/pubsub"
#
# publisher_client = Google::Cloud::PubSub::Publisher.new(version: :v1)
# formatted_resource = Google::Cloud::PubSub::V1::PublisherClient.topic_path("[PROJECT]", "[TOPIC]")
#
# # TODO: Initialize `permissions`:
# permissions = []
# response = publisher_client.test_iam_permissions(formatted_resource, permissions)
def test_iam_permissions \
resource,
permissions,
options: nil,
&block
req = {
resource: resource,
permissions: permissions
}.delete_if { |_, v| v.nil? }
req = Google::Gax::to_proto(req, Google::Iam::V1::TestIamPermissionsRequest)
@test_iam_permissions.call(req, options, &block)
end
end
end
end
Pubsub = PubSub unless const_defined? :Pubsub
end
end
| 44.438802 | 131 | 0.573032 |
d5d53ee1657d4ddaa80c8289fe027d624b75d4e3 | 1,008 | require_relative "lib/blorgh/version"
Gem::Specification.new do |spec|
spec.name = "blorgh"
spec.version = Blorgh::VERSION
spec.authors = ["Enow Mbi"]
spec.email = ["[email protected]"]
spec.homepage = "https://github.com/enowmbi/blorgh"
spec.summary = "Summary of Blorgh."
spec.description = "Description of Blorgh."
spec.license = "MIT"
# Prevent pushing this gem to RubyGems.org. To allow pushes either set the 'allowed_push_host'
# to allow pushing to a single host or delete this section to allow pushing to any host.
spec.metadata["allowed_push_host"] = "https://github.com/enowmbi/blorgh"
spec.metadata["homepage_uri"] = spec.homepage
spec.metadata["source_code_uri"] = "https://github.com/enowmbi/blorgh"
spec.metadata["changelog_uri"] = "https://github.com/enowmbi/blorgh/changelog.md"
spec.files = Dir["{app,config,db,lib}/**/*", "MIT-LICENSE", "Rakefile", "README.md"]
spec.add_dependency "rails", "~> 6.1.4", ">= 6.1.4.1"
end
| 40.32 | 96 | 0.684524 |
034d7ada65bdb5561ad812f913cde6900e9dcd59 | 12,048 | name "kkafka"
maintainer "Jim Dowling"
maintainer_email "[email protected]"
license "Apache v2.0"
description 'Installs/Configures/Runs kkafka. Karamelized version of https://github.com/mthssdrbrg/kafka-cookbook'
version "2.5.0"
recipe "kkafka::install", "Installs kafka binaries"
recipe "kkafka::default", "Configures Kafka"
#link:<a target='_blank' href='http://%host%:11111/'>Launch the WebUI for Kafka Monitor</a>
recipe "kkafka::monitor", "Helper webapp to monitor performance of kafka"
recipe "kkafka::client", "Kafka client installation"
recipe "kkafka::purge", "Removes and deletes Kafka"
depends "java", '~> 7.0.0'
depends 'kagent'
depends 'ndb'
depends 'conda'
depends 'kzookeeper'
depends 'hops'
%w{ ubuntu debian rhel centos }.each do |os|
supports os
end
attribute "kkafka/authorizer_version",
:description => "Hops Kafka Authorizer version",
:type => 'string'
attribute "kkafka/dir",
:description => "Base directory to install kafka (default: /opt)",
:type => 'string'
attribute "kkafka/user",
:description => "user to run kafka as",
:type => 'string'
attribute "kkafka/user_id",
:description => "Kafka user id. Default: 1504",
:type => 'string'
attribute "kkafka/user-home",
:description => "Home directory of kafka user",
:type => 'string'
attribute "kkafka/group",
:description => "group to run kafka as",
:type => 'string'
attribute "kkafka/group_id",
:description => "Kafka group id. Default: 1504",
:type => 'string'
attribute "kafka/ulimit",
:description => "ULimit for the max number of open files allowed",
:type => 'string'
attribute "kkafka/offset_monitor/port",
:description => "Port for Kafka monitor service",
:type => 'string'
attribute "kkafka/memory_mb",
:description => "Kafka server memory in mbs",
:type => 'string'
attribute "kkafka/broker/broker/id",
:description => "broker id",
:type => 'string'
attribute "kkafka/broker/host/name",
:description => "hostname to be used in server.properties",
:type => 'string'
attribute "kkafka/broker/advertised/listeners",
:description => "Listeners to publish to ZooKeeper for clients to use, if different than the `listeners` config property. For example, INTERNAL://hops1:9091,EXTERNAL://hops1:9092",
:type => 'string'
attribute "kkafka/broker/zookeeper_connection_timeout_ms",
:description => "",
:type => 'string'
attribute "kkafka/broker/log/retention/hours",
:description => "",
:type => 'string'
attribute "kkafka/broker/log/retention/size",
:description => "",
:type => 'string'
attribute "kkafka/broker/message/max/bytes",
:description => "",
:type => 'string'
attribute "kkafka/broker/num/network/threads",
:description => "",
:type => 'string'
attribute "kkafka/broker/num/io/threads",
:description => "",
:type => 'string'
attribute "kkafka/broker/num/recovery/threads/per/data/dir",
:description => "",
:type => 'string'
attribute "kkafka/broker/num/replica/fetchers",
:description => "",
:type => 'string'
attribute "kkafka/broker/queued/max/requests",
:description => "",
:type => 'string'
attribute "kkafka/broker/socket/send/buffer/bytes",
:description => "",
:type => 'string'
attribute "kkafka/brattribute oker/socket/receive/buffer/bytes",
:description => "",
:type => 'string'
attribute "kkafka/broker/sockeattribute t/request/max/bytes",
:description => "",
:type => 'string'
attribute "kkafka/broker/num/partitionsattribute ",
:description => "",
:type => 'string'
attribute "kkafka/broker/log/segment/bytesattribute ",
:description => "",
:type => 'string'
attribute "kkafka/broker/log/message/timestamp/difference/max/ms",
:description => "",
:type => 'string'
attribute "kkafka/broker/log/roll/hoursattribute ",
:description => "",
:type => 'string'
attribute "kkafka/broker/log/retention/hours",
:description => "",
:type => 'string'
attribute "kkafka/broker/log/retention/bytes",
:description => "",
:type => 'string'
attribute "kkafka/broker/log/retention/check/interval/ms",
:description => "",
:type => 'string'
attribute "kkafka/broker/log/index/size/max/bytes",
:description => "",
:type => 'string'
attribute "kkafka/broker/log/index/interval/bytesattribute ",
:description => "",
:type => 'string'
attribute "kkafka/broker/log/flush/interval/messagesattribute ",
:description => "",
:type => 'string'
attribute "kkafka/broker/log/flush/scheduler/interval/msattribute ",
:description => "",
:type => 'string'
attribute "kkafka/broker/log/flush/interval/msattribute ",
:description => "",
:type => 'string'
attribute "kkafka/broker/leader/imbalance/check/intervalattribute /seconds",
:description => "",
:type => 'string'
attribute "kkafka/broker/leader/imbalance/per/broker/percentageattribute ",
:description => "",
:type => 'string'
attribute "kkafka/broker/log/dir",
:description => "",
:type => 'string'
attribute "kkafka/broker/log/dirs",
:description => "",
:type => 'string'
attribute "kkafka/broker/log/flush/offset/checkpoint/interval/ms",
:description => "",
:type => 'string'
attribute "kkafka/broker/log/message/format/version",
:description => "",
:type => 'string'
attribute "kkafka/broker/offsets/topic/replication/factor",
:description => "",
:type => 'string'
attribute "kkafka/broker/port",
:description => "",
:type => 'string'
attribute "kkafka/broker/queued/max/requests",
:description => "",
:type => 'string'
attribute "kkafka/broker/quota/consumer/default",
:description => "",
:type => 'string'
attribute "kkafka/broker/quota/producer/default",
:description => "",
:type => 'string'
attribute "kkafka/broker/replica/fetch/max/bytes",
:description => "",
:type => 'string'
attribute "kkafka/broker/replica/fetch/min/bytes",
:description => "",
:type => 'string'
attribute "kkafka/broker/replica/fetch/wait/max/ms",
:description => "",
:type => 'string'
attribute "kkafka/broker/replica/high/watermark/checkpoint/interval/ms",
:description => "",
:type => 'string'
attribute "kkafka/broker/replica/lag/time/max/ms",
:description => "",
:type => 'string'
attribute "kkafka/broker/replica/socket/receive/buffer/bytes",
:description => "",
:type => 'string'
attribute "kkafka/broker/replica/socket/timeout/ms",
:description => "",
:type => 'string'
attribute "kkafka/broker/request/timeout/ms",
:description => "",
:type => 'string'
attribute "kkafka/broker/zookeeper/connection/timeout/ms",
:description => "",
:type => 'string'
attribute "kkafka/broker/zookeeper/session/timeout/ms",
:description => "",
:type => 'string'
attribute "kkafka/broker/zookeeper/set/acl",
:description => "",
:type => 'string'
attribute "kkafka/broker/replication/factor",
:description => "",
:type => 'string'
attribute "kkafka/broker/log/cleaner/enable",
:description => "",
:type => 'string'
attribute "kkafka/broker/log/cleaner/io/buffer/load/factor",
:description => "",
:type => 'string'
attribute "kkafka/broker/security/inter/broker/protocol",
:description => "",
:type => 'string'
attribute "kkafka/inter/broker/protocol/version",
:description => "",
:type => 'string'
attribute "kkafka/broker/rack",
:description => "",
:type => 'string'
attribute "kkafka/broker/ssl/client/auth",
:description => "",
:type => 'string'
attribute "kkafka/broker/ssl/key/password",
:description => "",
:type => 'string'
attribute "kkafka/broker/ssl/keystore/location",
:description => "",
:type => 'string'
attribute "kkafka/broker/ssl/keystore/password",
:description => "",
:type => 'string'
attribute "kkafka/broker/ssl/truststore/location",
:description => "",
:type => 'string'
attribute "kkafka/broker/ssl/truststore/password",
:description => "",
:type => 'string'
attribute "kkafka/broker/authorizer/class/name",
:description => "",
:type => 'string'
attribute "kkafka/broker/ssl/endpoint/identification/algorithm",
:description => "",
:type => 'string'
attribute "kkafka/broker/principal/builder/class",
:description => "",
:type => 'string'
attribute "kkafka/broker/zookeeper/synctime/ms",
:description => "",
:type => 'string'
attribute "kkafka/broker/zookeeper/connectiontimeout/ms",
:description => "",
:type => 'string'
attribute "kkafka/broker/zookeeper/sessiontimeout/ms",
:description => "",
:type => 'string'
attribute "kkafka/broker/zookeeper/synctime/ms",
:description => "",
:type => 'string'
attribute "kkafka/broker/super/users",
:description => "For example: User:dn0;User:glassfish",
:type => 'string'
attribute "kkafka/broker/database/pool/prepstmt/cache/enabled",
:description => "PreparedStatement of database pool of HopsAclAuthorizer enabled or not",
:type => 'string'
attribute "kkafka/broker/database/pool/prepstmt/cache/size",
:description => "PreparedStatement cache size of database pool",
:type => 'string'
attribute "kkafka/broker/database/pool/prepstmt/cache/sql/limit",
:description => "PreparedStatement sql cache limit of database pool",
:type => 'string'
attribute "kkafka/broker/database/pool/size",
:description => "Size of database pool for HopsAclAuthorizer",
:type => 'string'
attribute "kkafka/broker/acl/polling/frequency/ms",
:description => "Polling frequency of HopsKafkaAuthorizer to retrieve ACLs",
:type => 'string'
attribute "kkafka/mysql/user",
:description => "DB user for the Kafka service",
:type => 'string'
attribute "kkafka/mysql/password",
:description => "Password of the DB user for the Kafka service",
:type => 'string'
attribute "kkafka/default/private_ips",
:description => "Set ip addresses",
:type => "array"
attribute "kkafka/default/public_ips",
:description => "Set ip addresses",
:type => "array"
attribute "kagent/enabled",
:description => "'false' to disable. 'true' is default.",
:type => 'string'
attribute "install/dir",
:description => "Set to a base directory under which we will install.",
:type => "string"
attribute "install/user",
:description => "User to install the services as",
:type => "string"
attribute "kkafka/jmx_port",
:description => "JMX port on which Kafka JVM binds to",
:type => "string"
attribute "kkafka/jmx_user",
:description => "JMX user for Kafka JVM",
:type => "string"
attribute "kkafka/jmx_password",
:description => "JMX password for Kafka JVM",
:type => "string"
| 30.044888 | 190 | 0.594124 |
7940cbdbb448bba611f6504f615a99f0a5c9bc85 | 672 | require 'securerandom'
require 'wgif/downloader'
module WGif
class Converter
def initialize(args)
@url = args[:url]
@trim_from = args[:trim_from]
@duration = args[:duration]
@frames = args[:frames]
end
def video_to_frames
clip = video.trim(trim_from, duration)
clip.to_frames(frames: frames)
end
private
attr_reader :url, :trim_from, :duration, :frames
def video
@video ||= youtube_url? ? Downloader.new.get_video(url) : Video.new(SecureRandom.uuid, url)
end
def youtube_url?
uri = URI.parse(url)
uri && uri.host && uri.host.match(/(www\.)?youtube.com/)
end
end
end
| 19.764706 | 97 | 0.629464 |
7905a382e00e23c312ac22de171baa200d3d8aef | 2,377 | class Scalapack < Formula
desc "High-performance linear algebra for distributed memory machines"
homepage "https://www.netlib.org/scalapack/"
url "https://www.netlib.org/scalapack/scalapack-2.1.0.tgz"
sha256 "61d9216cf81d246944720cfce96255878a3f85dec13b9351f1fa0fd6768220a6"
license "BSD-3-Clause"
revision 2
livecheck do
url :homepage
regex(/href=.*?scalapack[._-]v?(\d+(?:\.\d+)+)\.t/i)
end
bottle do
sha256 cellar: :any, catalina: "281e3d5317f1616e8d5a6a3b9c37fbe6ee29a03b2abe14055854902a6c009a87"
sha256 cellar: :any, mojave: "b222f27ffed17605ffca2d1b0b4804f4c66ec916c9d2b5f2dd085ad2427fa791"
sha256 cellar: :any, high_sierra: "ea92d3247883a9e0de28483a34d1ca064d395d28c8a622fbac571f4cd6d0e64d"
end
depends_on "cmake" => :build
depends_on "gcc" # for gfortran
depends_on "open-mpi"
depends_on "openblas"
# Patch for compatibility with GCC 10
# https://github.com/Reference-ScaLAPACK/scalapack/pull/26
patch do
url "https://github.com/Reference-ScaLAPACK/scalapack/commit/bc6cad585362aa58e05186bb85d4b619080c45a9.patch?full_index=1"
sha256 "f0892888e5a83d984e023e76eabae8864ad89b90ae3a41d472b960c95fdab981"
end
def install
mkdir "build" do
blas = "-L#{Formula["openblas"].opt_lib} -lopenblas"
system "cmake", "..", *std_cmake_args, "-DBUILD_SHARED_LIBS=ON",
"-DBLAS_LIBRARIES=#{blas}", "-DLAPACK_LIBRARIES=#{blas}"
system "make", "all"
system "make", "install"
end
pkgshare.install "EXAMPLE"
end
test do
cp_r pkgshare/"EXAMPLE", testpath
cd "EXAMPLE" do
system "mpif90", "-o", "xsscaex", "psscaex.f", "pdscaexinfo.f", "-L#{opt_lib}", "-lscalapack"
assert `mpirun -np 4 ./xsscaex | grep 'INFO code' | awk '{print $NF}'`.to_i.zero?
system "mpif90", "-o", "xdscaex", "pdscaex.f", "pdscaexinfo.f", "-L#{opt_lib}", "-lscalapack"
assert `mpirun -np 4 ./xdscaex | grep 'INFO code' | awk '{print $NF}'`.to_i.zero?
system "mpif90", "-o", "xcscaex", "pcscaex.f", "pdscaexinfo.f", "-L#{opt_lib}", "-lscalapack"
assert `mpirun -np 4 ./xcscaex | grep 'INFO code' | awk '{print $NF}'`.to_i.zero?
system "mpif90", "-o", "xzscaex", "pzscaex.f", "pdscaexinfo.f", "-L#{opt_lib}", "-lscalapack"
assert `mpirun -np 4 ./xzscaex | grep 'INFO code' | awk '{print $NF}'`.to_i.zero?
end
end
end
| 40.982759 | 125 | 0.678586 |
8738d61351cf7c4d55656162fff0ec0da83ac317 | 119 | require 'test_helper'
class Attr1Test < ActiveSupport::TestCase
# test "the truth" do
# assert true
# end
end
| 14.875 | 41 | 0.697479 |
e80a364a1e908211461dc85dc313a3c691ee855c | 1,329 | class SigrokCli < Formula
desc "Command-line client for sigrok"
homepage "https://sigrok.org/"
url "https://sigrok.org/download/source/sigrok-cli/sigrok-cli-0.7.0.tar.gz"
sha256 "5669d968c2de3dfc6adfda76e83789b6ba76368407c832438cef5e7099a65e1c"
bottle do
root_url "https://archive.org/download/brewsci/bottles-science"
sha256 cellar: :any, sierra: "a41bd3e0acc877ec54a8019cf2b7640834eb7d531b6bb74c3d4bf345cd58360a"
sha256 cellar: :any, el_capitan: "50a9411021aa97979f58857e08a1f11034e0fadf52ca901dac0ab5b9deaa7a22"
sha256 cellar: :any, yosemite: "68ad9e7ecba9e55ad3ba1d8810fefdc57d1327a300ce45ca76d4745fa7e6505d"
sha256 cellar: :any, x86_64_linux: "daa914ca934055c095efefe16a6db86d7714e2e1e0d45b3c4641ec51220102b5"
end
head do
url "git://sigrok.org/sigrok-cli", shallow: false
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "glib"
depends_on "libtool" => :build unless OS.mac?
end
depends_on "cmake" => :build
depends_on "pkg-config" => :build
depends_on "glib"
depends_on "libsigrokdecode"
def install
system "./autogen.sh" if build.head?
system "./configure", "--prefix=#{prefix}"
system "make", "install"
end
test do
ENV.delete "PYTHONPATH"
system "#{bin}/sigrok-cli", "--version"
end
end
| 34.076923 | 105 | 0.733634 |
28fc3ac53d8ee708ceb147c9244b0bc4bc2eaa30 | 2,063 | # Encoding: utf-8
# ASP.NET Core Buildpack
# Copyright 2014-2016 the original author or authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
$LOAD_PATH << 'cf_spec'
require 'spec_helper'
require 'rspec'
describe AspNetCoreBuildpack::Out do
describe '#step' do
it 'prints step title prefixed with arrow' do
expect($stdout).to receive(:puts).with("-----> foo\n")
subject.step('foo')
end
end
describe '#warn' do
it 'prints warning message surrounded asterisks' do
expect($stdout).to receive(:puts).with("\n" \
" ************************************************************************\n" \
" * WARNING: xyz abc 123 should wrap blah blah blah foo bar baz bing bo *\n" \
" * o. this is the first message of line 2. *\n" \
" ************************************************************************\n" \
".\n")
subject.warn('xyz abc 123 should wrap blah blah blah foo bar baz bing boo. this is the first message of line 2.')
end
end
describe '#fail' do
it "prints indented failure message prefixed with 'FAILED'" do
expect($stdout).to receive(:puts).with(" FAILED: foo\n")
subject.fail('foo')
end
end
describe '#succeed' do
it 'prints indednted OK' do
expect($stdout).to receive(:puts).with(" OK\n")
subject.succeed
end
end
describe '#print' do
it 'prints indented message' do
expect($stdout).to receive(:puts).with(" foo\n")
subject.print('foo')
end
end
end
| 33.274194 | 119 | 0.60446 |
01afc9a1e8a007c3ded7bd87134a2e17a2faa974 | 1,390 | # -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{dot_xen}
s.version = "0.0.2"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Corey Donohoe"]
s.autorequire = %q{dot_xen}
s.date = %q{2008-10-14}
s.description = %q{A gem that provides reading and writing utils for xen config files. It's also a really simple use of treetop}
s.email = %q{[email protected]}
s.extra_rdoc_files = ["README", "LICENSE", "TODO"]
s.files = ["LICENSE", "README", "Rakefile", "TODO", "lib/dot_xen.rb", "lib/xen", "lib/xen/ast.rb", "lib/xen/grammar.treetop", "lib/xen/grammar_node_classes.rb", "lib/xen/pretty_print_visitor.rb", "lib/xen/visitor.rb"]
s.has_rdoc = true
s.homepage = %q{http://atmos.org}
s.require_paths = ["lib"]
s.rubygems_version = %q{1.3.0}
s.summary = %q{A gem that provides reading and writing utils for xen config files. It's also a really simple use of treetop}
if s.respond_to? :specification_version then
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
s.specification_version = 2
if Gem::Version.new(Gem::RubyGemsVersion) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<treetop>, [">= 0"])
else
s.add_dependency(%q<treetop>, [">= 0"])
end
else
s.add_dependency(%q<treetop>, [">= 0"])
end
end
| 40.882353 | 219 | 0.67554 |
1c87dd45066032acbfd60e5ec9648f24c53d5d2d | 2,127 | class Jobseekers::AlertMailer < Jobseekers::BaseMailer
ALERT_MAILER_TEST_VARIANTS = %w[present_subject_line subject_line_variant_1 subject_line_variant_2 subject_line_variant_3 subject_line_variant_4 subject_line_variant_5].freeze
after_action :jobseeker
self.delivery_job = AlertMailerJob
helper DatesHelper
helper VacanciesHelper
helper_method :subscription, :jobseeker
def alert(subscription_id, vacancy_ids)
@subscription_id = subscription_id
@template = subscription.daily? ? NOTIFY_SUBSCRIPTION_DAILY_TEMPLATE : NOTIFY_SUBSCRIPTION_WEEKLY_TEMPLATE
@to = subscription.email
@vacancies = VacanciesPresenter.new(Vacancy.where(id: vacancy_ids).order(:expires_at))
view_mail(@template,
to: @to,
subject: I18n.t("jobseekers.alert_mailer.alert.subject.#{ab_tests[:"2022_01_alert_mailer_subject_lines_ab_test"]}",
count: @vacancies.count,
count_minus_one: @vacancies.count - 1,
job_title: @vacancies.first.job_title,
keywords: @subscription.search_criteria["keyword"].nil? ? I18n.t("jobseekers.alert_mailer.alert.subject.no_keywords") : @subscription.search_criteria["keyword"]&.titleize,
school_name: @vacancies.first.parent_organisation_name))
end
def ab_tests
{ :"2022_01_alert_mailer_subject_lines_ab_test" => alert_mailer_test_selected_variant }
end
def alert_mailer_test_selected_variant
@alert_mailer_test_selected_variant ||= ALERT_MAILER_TEST_VARIANTS.sample
end
private
attr_reader :subscription_id
def email_event_data
{ subscription_identifier: StringAnonymiser.new(subscription.id), subscription_frequency: subscription.frequency }
end
def email_event_prefix
"jobseeker_subscription"
end
def jobseeker
@jobseeker ||= Jobseeker.find_by(email: subscription.email)
end
def subscription
@subscription ||= SubscriptionPresenter.new(Subscription.find(subscription_id))
end
def utm_campaign
"#{subscription.frequency}_alert"
end
end
| 34.306452 | 201 | 0.732017 |
210ea7f08e789d5639f6e4c9408880a6f3270e7a | 6,469 | require 'rails_helper'
RSpec.describe QuestionsController, type: :controller do
let(:user) { create(:user) }
before { login(user) }
describe 'GET #new' do
before { get :new }
it 'renders new view' do
expect(response).to render_template :new
end
it 'has new question instance' do
expect(assigns(:question)).to be_a_new(Question)
end
it 'has current user instance' do
expect(controller.current_user).to eq user
end
end
describe 'POST #create' do
context 'with valid attrubutes' do
let(:perform) do
post :create, params: { question: attributes_for(:question) },
format: :js
end
let(:resource_collection) { user.questions }
it_behaves_like 'resource saved'
it 'render to show view' do
post :create, params: { question: attributes_for(:question) }
expect(response).to redirect_to assigns(:question)
end
end
context 'with invalid attrubutes' do
it 'doesn`t save question ' do
expect do
post :create, params: { question: attributes_for(:question, :invalid) }
end.to_not change(Question, :count)
end
it 'renders :new' do
post :create, params: { question: attributes_for(:question, :invalid) }
expect(response).to render_template :new
end
end
end
describe 'GET #show' do
let(:question) { create(:question) }
before { get :show, params: { id: question.id } }
it 'renders show view' do
expect(response).to render_template :show
end
it 'has question instance with requested id' do
expect(assigns(:question)).to eq(question)
end
it 'has new answer instance' do
expect(assigns(:answer)).to be_a_new(Answer)
end
it 'has new answer instance that belongs to current question' do
expect(assigns(:answer).question_id).to eq question.id
end
it 'has new answer instance that belongs to current question' do
expect(assigns(:answer).author.id).to eq controller.current_user.id
end
context 'question has been answered' do
before do
create(:question, :with_answer)
get :show, params: { id: question.id }
end
it 'has array of answers' do
expect(assigns(:answers)).to match_array question.answers
end
end
end
describe 'GET #index' do
let(:questions) { create_list(:question, 3) }
before { get :index }
it 'renders index view' do
expect(response).to render_template :index
end
it 'has array of all questions' do
expect(assigns(:questions)).to match_array(questions)
end
end
describe 'DELETE #destroy' do
context 'question belongs to user' do
let(:user) { create(:user, :with_question) }
before { login(user) }
it 'destroys question' do
expect do
delete :destroy,
params: { id: user.questions.first.id }
end.to change(user.questions, :count).by(-1)
end
it 'redirect to index view' do
delete :destroy, params: { id: user.questions.first.id }
expect(response).to redirect_to questions_path
end
end
context 'question doesn`t belong to user' do
let(:user) { create(:user) }
let!(:question) { create(:question) }
before { login(user) }
it 'doesn`t destroy question' do
expect { delete :destroy, params: { id: question.id } }.to_not change(Question, :count)
end
it 'redirects to root path' do
delete :destroy, params: { id: question.id }
expect(response).to redirect_to root_path
end
end
end
describe 'PATCH #update' do
let!(:question) { create(:question) }
let!(:user) { question.author }
before { login(user) }
context 'with valid attributes' do
it 'changes answer attributes' do
patch :update, params: { id: question, question: { body: 'new body' } }, format: :js
question.reload
expect(question.body).to eq 'new body'
end
it 'renders update view' do
patch :update, params: { id: question, question: { body: 'new body' } }, format: :js
expect(response).to render_template :update
end
end
context 'with invalid attributes' do
it 'does not change answer attributes' do
expect do
patch :update, params: { id: question, question: attributes_for(:question, :invalid) }, format: :js
end.to_not change(question, :body)
end
it 'renders update view' do
patch :update, params: { id: question, question: attributes_for(:question, :invalid) }, format: :js
expect(response).to render_template :update
end
end
context 'Question doesn`t belong to user' do |_variable|
let!(:question) { create(:question) }
let!(:user) { create(:user) }
before { login(user) }
it 'does not change answer attributes' do
expect do
patch :update, params: { id: question, question: attributes_for(:question, :invalid) }, format: :js
end.to_not change(question, :body)
end
end
end
describe 'PATCH #mark_best' do
context 'Question belongs to user' do
let!(:question) { create(:question, :with_answer) }
let!(:user) { question.author }
before { login(user) }
it 'has best answer' do
patch :mark_best, params: { question_id: question.id, answer_id: question.answers.first.id }, format: :js
expect(assigns(:question).best_answer).to eq question.answers.first
end
it 'renders mark_best.js.erb' do
patch :mark_best, params: { question_id: question.id, answer_id: question.answers.first.id }, format: :js
expect(response).to render_template :mark_best
end
end
context 'Question doesn`t belong to user' do
let!(:question) { create(:question, :with_answer) }
let!(:user) { create(:user) }
before { login(user) }
it 'has not best answer' do
patch :mark_best, params: { question_id: question.id, answer_id: question.answers.first.id }, format: :js
expect(assigns(:question).best_answer).to eq nil
end
it 'renders mark_best.js.erb' do
patch :mark_best, params: { question_id: question.id, answer_id: question.answers.first.id }, format: :js
expect(flash[:alert]).to eq 'You must be author to mark answer as best'
end
end
end
end
| 29.404545 | 113 | 0.628691 |
18b67f53777cedaaf12c07a46cda86d4f067be3d | 71 | class Baza::Sqlspecs
def initialize(args)
@args = args
end
end
| 11.833333 | 22 | 0.676056 |
b9509f05d5cbfeea9326a09473f1085049f6c1ab | 2,656 | class AssetUploader < Kithe::AssetUploader
# gives us md5, sha1, sha512
plugin :kithe_checksum_signatures
# Used by our browse_everything integration, let's us set a hash with remote
# URL location, to be fetched on promotion.
plugin :kithe_accept_remote_url
# Re-set shrine derivatives setting, to put DERIVATIVES on restricted storage
# if so configured. Only effects initial upload, if setting changes, some code
# needs to manually move files.
Attacher.derivatives_storage do |derivative_key|
if record.derivative_storage_type == "restricted"
Asset::DERIVATIVE_STORAGE_TYPE_LOCATIONS.fetch("restricted")
else # public store
Asset::DERIVATIVE_STORAGE_TYPE_LOCATIONS.fetch("public")
end
end
THUMB_WIDTHS = {
mini: 54,
large: 525,
standard: 208
}
IMAGE_DOWNLOAD_WIDTHS = {
large: 2880,
medium: 1200,
small: 800
}
# define thumb derivatives for TIFF, PDF, and other image input: :thumb_mini, :thumb_mini_2X, etc.
THUMB_WIDTHS.each_pair do |key, width|
# Single-width thumbnails
Attacher.define_derivative("thumb_#{key}", content_type: "image") do |original_file|
Kithe::VipsCliImageToJpeg.new(max_width: width, thumbnail_mode: true).call(original_file)
end
Attacher.define_derivative("thumb_#{key}", content_type: "application/pdf") do |original_file|
Kithe::VipsCliPdfToJpeg.new(max_width: width).call(original_file)
end
# Double-width thumbnails
Attacher.define_derivative("thumb_#{key}_2X", content_type: "image") do |original_file|
Kithe::VipsCliImageToJpeg.new(max_width: width * 2, thumbnail_mode: true).call(original_file)
end
Attacher.define_derivative("thumb_#{key}_2X", content_type: "application/pdf") do |original_file|
Kithe::VipsCliPdfToJpeg.new(max_width: width * 2).call(original_file)
end
end
# Define download derivatives for TIFF and other image input.
IMAGE_DOWNLOAD_WIDTHS.each_pair do |key, derivative_width|
Attacher.define_derivative("download_#{key}", content_type: "image") do |original_file, attacher:|
# only create download if it would be SMALLER than original, we don't want to lossily upscale!
if attacher.file.width > derivative_width
Kithe::VipsCliImageToJpeg.new(max_width: derivative_width).call(original_file)
end
end
end
# and a full size jpg
Attacher.define_derivative("download_full", content_type: "image") do |original_file, attacher:|
# No need to do this if our original is a JPG
unless attacher.file.content_type == "image/jpeg"
Kithe::VipsCliImageToJpeg.new.call(original_file)
end
end
end
| 36.383562 | 102 | 0.733057 |
acba5fbc5de292a349b4879aefd55cb3d06a94f5 | 1,292 | #
# Cookbook Name:: masala_haproxy
# Recipe:: datadog
#
# Copyright 2016, Paytm Labs
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
primary_if = node['network']['interfaces'][node['system']['primary_interface']]
primary_addrs = primary_if['addresses']
primary_addrs_ipv4 = primary_addrs.select { |_addr, attrs| attrs['family'] == 'inet' }
primary_ip = primary_addrs_ipv4.keys.first
if node['masala_base']['dd_enable'] and not node['masala_base']['dd_api_key'].nil?
node.set['datadog']['haproxy']['instances'] = [ {
:url => "http://localhost:#{node['haproxy']['admin']['port']}",
:status_check => false,
:collect_aggregates_only => true,
:collect_status_metrics => true
#:username => "username",
#:password => "secret"
} ]
include_recipe 'datadog::haproxy'
end
| 35.888889 | 86 | 0.714396 |
62963e0065fff731623345654c930dffd851de73 | 1,958 | module Quarry
class Template
# Models the `template.yml` file.
#
class Config
# Lookup glob for `config.yml` file.
GLOB = TEMPLATE_DIRECTORY + '/config.{yml,yaml}'
#
def initialize(template)
@template = template
@file = Dir.glob(template.path + GLOB).first
if @file
@config = YAML.load_file(@file.to_s) || {}
else
@config = {}
end
end
#
attr :template
#
# The `template.yml` as a Pathname instance.
#
# @return [Pathname] The `template.yml` file.
#
attr :file
#
#
#
def [](key)
@config[key.to_s]
end
#
# Project files to use as metadata resources, if they exist.
# Supported formats are YAML and JSON. YAML is assumed if the
# file lacks an extension.
#
# @example
# resource:
# - .ruby
#
def resource
self[:resource] || self[:resources]
end
#
# Get `arguments` from config file.
#
def arguments
self['arguments']
end
# Take arguments from config and transform
# them into `name, options` form.
#
def script_arguments
@script_arguments ||= name_and_options(arguments)
end
private
#
def name_and_options(arguments)
case arguments
when Hash
args.map do |name, default|
[name, {:default=>default}]
end
else # Array
args.map do |entry|
case entry
when Hash
entry = entry.rekey
name = entry.delete(:name)
[name, entry]
when Array
[entry.first, :default=>entry.last]
else
[entry, {}]
end
end
end
end
end #class Config
end #class Template
end #module Quarry
| 20.395833 | 67 | 0.497957 |
6a909496522bdcb78ade1dcbb00ae3558fe9b9e9 | 1,297 | class UsersController < ApplicationController
get '/users/comics' do
@comics = current_user.comics
erb :'/users/user_comics'
end
get '/signup' do
if !logged_in?
erb :'/users/create_user'
else
redirect to '/comics'
end
end
post '/signup' do
user = User.new(params)
if user.save
flash[:notice] = "Thanks for signing up!"
session[:user_id] = user.id
redirect to '/comics'
else
flash[:notice] = user.errors.full_messages.join
redirect to '/signup' # redirect them to signup
end
end
get '/login' do
if !logged_in?
erb :'/users/login'
else
redirect '/comics'
end
end
post '/login' do
user = User.find_by(username: params[:username])
if user
if user.authenticate(params[:password])
session[:user_id] = user.id
redirect '/comics'
else
flash[:notice] = "Invalid username or password"
redirect to '/login'
end
else
flash[:notice] = "This account does not exist. Please create one."
redirect to '/signup'
end
end
get '/logout' do
if logged_in?
session.destroy
redirect to '/login'
flash[:notice] = 'Successfully logged out.'
else
redirect to '/'
end
end
end
| 20.919355 | 72 | 0.597533 |
266471e6d2b0e02e8c76bd6087725e5350e773e6 | 128 | require "lingua_franca/version"
require "lingua_franca/rails/engine" if defined?(::Rails)
require "lingua_franca/lingua_franca"
| 32 | 57 | 0.820313 |
28ab1c3b518f48ef2d3baf67aeeca7715dbc35ee | 2,142 | require "trycourier/version"
require "net/http"
require "json"
require "openssl"
module Courier
class ResponseError < StandardError; end
class InputError < StandardError; end
class SendResponse
attr_reader :code
attr_reader :message_id
def initialize(code, message_id)
@code = code
@message_id = message_id
end
end
class Client
def initialize(auth_token = nil)
@auth_token = auth_token || ENV['COURIER_AUTH_TOKEN']
@uri = URI.parse('https://api.trycourier.app/send')
if @auth_token == nil or @auth_token == ""
raise InputError, "Client requires an auth_token be supplied."
end
end
def send(body)
if not body.is_a?(Hash)
raise InputError, "Client#send must be passed a Hash as first argument."
elsif body["event"] == nil
raise InputError, "Must specify the 'event' key in Hash supplied to Client#send."
elsif body["recipient"] == nil
raise InputError, "Must specify the 'recipient' key in Hash supplied to Client#send."
elsif body["data"] != nil and not body["data"].is_a?(Hash)
raise InputError, "The 'data' key in the Hash supplied to Client#send must also be a Hash."
elsif body["profile"] != nil and not body["profile"].is_a?(Hash)
raise InputError, "The 'profile' key in the Hash supplied to Client#send must also be a Hash."
end
http = Net::HTTP.new(@uri.host, @uri.port)
http.use_ssl = true
http.verify_mode = OpenSSL::SSL::VERIFY_NONE
req = Net::HTTP::Post.new(@uri)
req["authorization"] = "Bearer #{@auth_token}"
req["content-type"] = "application/json"
req["User-Agent"] = "courier-ruby/#{Courier::VERSION}"
req.body = body.to_json
res = http.request(req)
code = res.code.to_i
obj = JSON.parse res.read_body
if code == 200
message_id = obj["messageId"]
return SendResponse.new(code, message_id)
elsif
message = obj["Message"] == nil ? obj["message"] : obj["Message"]
err = "#{code}: #{message}"
raise ResponseError, err
end
end
end
end
| 31.970149 | 102 | 0.635387 |
ac9e8de8aff92c4a498e76a0295b4b84fb2eb9c9 | 765 | Pod::Spec.new do |s|
s.name = "RxModal"
s.version = "1.0.2"
s.summary = "Subscribe to your modal flows"
s.description = <<-DESC
RxModal helps you handle any modal flow as a simple Observable sequence.
DESC
s.homepage = "https://github.com/RxSwiftCommunity/RxModal"
s.license = { :type => "MIT", :file => "LICENSE" }
s.author = { "Jérôme Alves" => "[email protected]" }
s.social_media_url = "https://twitter.com/jegnux"
s.ios.deployment_target = '9.0'
s.source = { :git => "https://github.com/RxSwiftCommunity/RxModal.git", :tag => s.version.to_s }
s.source_files = "Sources/**/*"
s.frameworks = "Foundation"
s.dependency 'RxSwift', '~> 6.0'
s.dependency 'RxCocoa', '~> 6.0'
end
| 36.428571 | 104 | 0.601307 |
7a7a1b4175fecdeb523bacb459860ecc99f73ed8 | 134 | class AddConfirmedAtToUser < ActiveRecord::Migration[6.0][5.0]
def change
add_column :users, :confirmed_at, :datetime
end
end
| 22.333333 | 62 | 0.746269 |
f81163ffedf525de3cc156dba12edd4da604722c | 535 | require 'spec_helper'
describe 'copperegg_revealcloud' do
describe command('2>&1 /usr/local/revealcloud/revealcloud -V') do
its(:exit_status) { should eq 0 }
its(:stdout) { should match 'RevealCloud' }
end
describe service('revealcloud') do
it { should be_running }
it { should be_enabled }
end
describe process('revealcloud') do
it { should be_running }
its(:args) { should match '-k my_key' }
its(:args) { should match '-l foo' }
its(:args) { should match '-t tag1 -t tag2' }
end
end
| 23.26087 | 67 | 0.654206 |
391bc9adc9d9b5925f22cb72cfb45df838e1fa92 | 350 | # frozen_string_literal: true
require 'togglv8'
require 'date'
require 'json'
require 'toggl/worktime/version'
require 'toggl/worktime/config'
require 'toggl/worktime/merger'
require 'toggl/worktime/time'
require 'toggl/worktime/driver'
require 'toggl/worktime/calendar'
require 'tty-table'
module Toggl
# Main module
module Worktime
end
end
| 18.421053 | 33 | 0.782857 |
112ded83531437eacc5c779b48d81a432f9fc711 | 264 | class Movie < ApplicationRecord
validates :imdb, :score, :title, :summary, :year, :genres, presence: true
validates :imdb, uniqueness: true
validates :score, inclusion: 0..10
validates :year, numericality: { greater_than: 0 }
has_one_attached :image
end
| 33 | 75 | 0.734848 |
f82746a5840fa950a44798866f75c754203edadf | 131 | # frozen_string_literal: true
FactoryBot.define do
factory :affiliate do
sequence(:name) { |n| "Affiliate #{n}" }
end
end
| 16.375 | 44 | 0.687023 |
08a09ebacc1f49ee8601fea7a449d0eda78625f0 | 2,306 | require 'global_id'
require 'active_support/message_verifier'
require 'time'
class SignedGlobalID < GlobalID
class ExpiredMessage < StandardError; end
class << self
attr_accessor :verifier
def parse(sgid, options = {})
if sgid.is_a? self
sgid
else
super verify(sgid, options), options
end
end
# Grab the verifier from options and fall back to SignedGlobalID.verifier.
# Raise ArgumentError if neither is available.
def pick_verifier(options)
options.fetch :verifier do
verifier || raise(ArgumentError, 'Pass a `verifier:` option with an `ActiveSupport::MessageVerifier` instance, or set a default SignedGlobalID.verifier.')
end
end
attr_accessor :expires_in
DEFAULT_PURPOSE = "default"
def pick_purpose(options)
options.fetch :for, DEFAULT_PURPOSE
end
private
def verify(sgid, options)
metadata = pick_verifier(options).verify(sgid)
raise_if_expired(metadata['expires_at'])
metadata['gid'] if pick_purpose(options) == metadata['purpose']
rescue ActiveSupport::MessageVerifier::InvalidSignature, ExpiredMessage
nil
end
def raise_if_expired(expires_at)
if expires_at && Time.now.utc > Time.iso8601(expires_at)
raise ExpiredMessage, 'This signed global id has expired.'
end
end
end
attr_reader :verifier, :purpose, :expires_at
def initialize(gid, options = {})
super
@verifier = self.class.pick_verifier(options)
@purpose = self.class.pick_purpose(options)
@expires_at = pick_expiration(options)
end
def to_s
@sgid ||= @verifier.generate(to_h)
end
alias to_param to_s
def to_h
# Some serializers decodes symbol keys to symbols, others to strings.
# Using string keys remedies that.
{ 'gid' => @uri.to_s, 'purpose' => purpose, 'expires_at' => encoded_expiration }
end
def ==(other)
super && @purpose == other.purpose
end
private
def encoded_expiration
expires_at.utc.iso8601(3) if expires_at
end
def pick_expiration(options)
return options[:expires_at] if options.key?(:expires_at)
if expires_in = options.fetch(:expires_in) { self.class.expires_in }
expires_in.from_now
end
end
end
| 25.622222 | 162 | 0.678231 |
b9481f964c60db86667a3254544d226744dff15c | 1,361 | class Dcadec < Formula
desc "DTS Coherent Acoustics decoder with support for HD extensions"
homepage "https://github.com/foo86/dcadec"
url "https://github.com/foo86/dcadec.git",
:tag => "v0.2.0",
:revision => "0e074384c9569e921f8facfe3863912cdb400596"
head "https://github.com/foo86/dcadec.git"
bottle do
cellar :any_skip_relocation
sha256 "68b350a3ec6a1ab7384eac3341a03762e8233dec742c35f8dc2afc213b3db567" => :mojave
sha256 "7f938bcd68b9078df3dc6e67d82e08beb55b10228a808d91543a6ed2d15a2002" => :high_sierra
sha256 "7a51fb1bfa07f08c45176df419087429e9ffce945cbcd28d71e403c456762c74" => :sierra
sha256 "89ddc5e9a5cfd72e604bdff54ee1f09f9ad4ec281fc79c93201971bbd380ccdd" => :el_capitan
sha256 "640914a5ce466bbb91b551bdb35a385e4a8b08c25f78509a16c016c654963805" => :yosemite
sha256 "6d373b4fe5dbb76648183d83cd3161970e8f3674ea29a3133fa4d3c0a9f82ca1" => :mavericks
end
resource "sample" do
url "https://github.com/foo86/dcadec-samples/raw/fa7dcf8c98c6d/xll_71_24_96_768.dtshd"
sha256 "d2911b34183f7379359cf914ee93228796894e0b0f0055e6ee5baefa4fd6a923"
end
def install
system "make", "all"
system "make", "check"
system "make", "PREFIX=#{prefix}", "install"
end
test do
resource("sample").stage do
system "#{bin}/dcadec", resource("sample").cached_download
end
end
end
| 37.805556 | 93 | 0.766348 |
bb42ef5700a2bdef720c8a5656b8cd0732121d4b | 8,407 | require "pact_broker/logging"
require "pact_broker/matrix/unresolved_selector"
require "pact_broker/date_helper"
require "pact_broker/db/clean/selector"
module PactBroker
module DB
class CleanIncremental
DEFAULT_KEEP_SELECTORS = [
PactBroker::DB::Clean::Selector.new(tag: true, latest: true),
PactBroker::DB::Clean::Selector.new(branch: true, latest: true),
PactBroker::DB::Clean::Selector.new(latest: true),
PactBroker::DB::Clean::Selector.new(deployed: true),
PactBroker::DB::Clean::Selector.new(released: true),
PactBroker::DB::Clean::Selector.new(max_age: 90)
]
TABLES = [:versions, :pact_publications, :pact_versions, :verifications, :triggered_webhooks, :webhook_executions]
def self.call database_connection, options = {}
new(database_connection, options).call
end
def initialize database_connection, options = {}
@db = database_connection
@options = options
end
def logger
options[:logger] || PactBroker.logger
end
def keep
@keep ||= if options[:keep]
# Could be a Matrix::UnresolvedSelector from the docker image, convert it
options[:keep].collect { | unknown_thing | PactBroker::DB::Clean::Selector.from_hash(unknown_thing.to_hash) }
else
DEFAULT_KEEP_SELECTORS
end
end
def limit
options[:limit] || 1000
end
def resolve_ids(query, column_name = :id)
query.collect { |h| h[column_name] }
end
def version_ids_to_delete
db[:versions].where(id: version_ids_to_keep).invert.limit(limit).order(Sequel.asc(:id))
end
def version_ids_to_keep
@version_ids_to_keep ||= selected_versions_to_keep.reduce(&:union)
end
def selected_versions_to_keep
keep.collect do | selector |
PactBroker::Domain::Version.select(:id).for_selector(selector)
end
end
def call
require "pact_broker/db/models"
if dry_run?
dry_run_results
else
db.transaction do
before_counts = current_counts
PactBroker::Domain::Version.where(id: resolve_ids(version_ids_to_delete)).delete
delete_orphan_pact_versions
after_counts = current_counts
TABLES.each_with_object({}) do | table_name, comparison_counts |
comparison_counts[table_name.to_s] = { "deleted" => before_counts[table_name] - after_counts[table_name], "kept" => after_counts[table_name] }
end
end
end
end
private
attr_reader :db, :options
def current_counts
TABLES.each_with_object({}) do | table_name, counts |
counts[table_name] = db[table_name].count
end
end
def dry_run?
options[:dry_run]
end
def delete_orphan_pact_versions
db[:pact_versions].where(id: orphan_pact_versions).delete
rescue Sequel::DatabaseError => e
raise unless e.cause.class.name == "Mysql2::Error"
ids = orphan_pact_versions.map { |row| row[:id] }
db[:pact_versions].where(id: ids).delete
end
def orphan_pact_versions
db[:pact_versions]
.left_join(:pact_publications, Sequel[:pact_publications][:pact_version_id]=> Sequel[:pact_versions][:id])
.left_join(:verifications, Sequel[:verifications][:pact_version_id]=> Sequel[:pact_versions][:id])
.select(Sequel[:pact_versions][:id])
.where(
Sequel[:pact_publications][:id] => nil,
Sequel[:verifications][:id] => nil
)
end
def version_info(version)
{
"number" => version.number,
"created" => DateHelper.distance_of_time_in_words(version.created_at, DateTime.now) + " ago",
"tags" => version.tags.collect(&:name).sort
}
end
def dry_run_results
to_delete = dry_run_to_delete
to_keep = dry_run_to_keep
kept_per_selector = keep.collect do | selector |
{
selector: selector.to_hash,
count: PactBroker::Domain::Version.for_selector(selector).count
}
end
pacticipant_results = pacticipants.each_with_object({}) do | pacticipant, results |
results[pacticipant.name] = {
"toDelete" => to_delete[pacticipant.name] || { "count" => 0 },
"toKeep" => to_keep[pacticipant.id]
}
end
total_versions_count = PactBroker::Domain::Version.count
versions_to_keep_count = version_ids_to_keep.count
versions_to_delete_count = version_ids_to_delete.count
{
"counts" => {
"totalVersions" => total_versions_count,
"versionsToDelete" => versions_to_delete_count,
"versionsNotToKeep" => total_versions_count - versions_to_keep_count,
"versionsToKeep" => versions_to_keep_count,
"versionsToKeepBySelector" => kept_per_selector,
},
"versionSummary" => pacticipant_results
}
end
def dry_run_latest_versions_to_keep
latest_undeleted_versions_by_order = PactBroker::Domain::Version.where(id: version_ids_to_delete.select(:id))
.invert
.select_group(:pacticipant_id)
.select_append{ max(order).as(latest_order) }
lv_versions_join = {
Sequel[:lv][:latest_order] => Sequel[:versions][:order],
Sequel[:lv][:pacticipant_id] => Sequel[:versions][:pacticipant_id]
}
PactBroker::Domain::Version
.select_all_qualified
.join(latest_undeleted_versions_by_order, lv_versions_join, { table_alias: :lv })
end
def dry_run_earliest_versions_to_keep
earliest_undeleted_versions_by_order = PactBroker::Domain::Version.where(id: version_ids_to_delete.select(:id))
.invert
.select_group(:pacticipant_id)
.select_append{ min(order).as(first_order) }
ev_versions_join = {
Sequel[:lv][:first_order] => Sequel[:versions][:order],
Sequel[:lv][:pacticipant_id] => Sequel[:versions][:pacticipant_id]
}
PactBroker::Domain::Version
.select_all_qualified
.join(earliest_undeleted_versions_by_order, ev_versions_join, { table_alias: :lv })
end
def dry_run_to_delete
PactBroker::Domain::Version
.where(id: version_ids_to_delete.select(:id))
.all
.group_by{ | v | v.pacticipant_id }
.each_with_object({}) do | (_pacticipant_id, versions), thing |
thing[versions.first.pacticipant.name] = {
"count" => versions.count,
"fromVersion" => version_info(versions.first),
"toVersion" => version_info(versions.last)
}
end
end
# rubocop: disable Metrics/CyclomaticComplexity
def dry_run_to_keep
latest_to_keep = dry_run_latest_versions_to_keep.eager(:tags).each_with_object({}) do | version, r |
r[version.pacticipant_id] = {
"firstVersion" => version_info(version)
}
end
earliest_to_keep = dry_run_earliest_versions_to_keep.eager(:tags).each_with_object({}) do | version, r |
r[version.pacticipant_id] = {
"latestVersion" => version_info(version)
}
end
counts = counts_to_keep
pacticipants.collect(&:id).each_with_object({}) do | pacticipant_id, results |
results[pacticipant_id] = { "count" => counts[pacticipant_id] || 0 }
.merge(earliest_to_keep[pacticipant_id] || {})
.merge(latest_to_keep[pacticipant_id] || {})
end
end
# rubocop: enable Metrics/CyclomaticComplexity
def counts_to_keep
db[:versions].where(id: version_ids_to_delete.select(:id))
.invert
.select_group(:pacticipant_id)
.select_append{ count(1).as(count) }
.all
.each_with_object({}) do | row, counts |
counts[row[:pacticipant_id]] = row[:count]
end
end
def pacticipants
@pacticipants ||= PactBroker::Domain::Pacticipant.order_ignore_case(:name).all
end
end
end
end
| 34.314286 | 156 | 0.616272 |
edc9a9723a43be7aa1781ea7c203a6b23263736a | 159 | class RemoveSauceLabsEmailTemplate < ActiveRecord::Migration
def up
EmailTemplate.where(name: 'saucelabs_report').delete_all
end
def down
end
end
| 17.666667 | 60 | 0.773585 |
f7251b461a728decd86919a684da0a136f192c1c | 1,740 | module ApplicationHelper
def login_helper style = ''
if current_user.is_a?(GuestUser)
(link_to 'Register', new_user_registration_path, class: style) +
" ".html_safe +
(link_to 'Login', new_user_session_path, class: style)
else
link_to 'Logout', destroy_user_session_path, method: :delete, class: style
end
end
def source_helper(styles)
if session[:source]
greeting = "Thank for visiting me from #{session[:source]}, please feel free to #{link_to "contact me", contact_path} if you'd like to work together."
content_tag(:p, greeting.html_safe, class: styles)
end
end
def copyright_generator
DevcampViewTool::Renderer.copyright 'Aleksey Peresmekhin', 'All right reserved'
end
def nav_items
[
{
url: root_path,
title: 'Home',
},
{
url: about_me_path,
title: 'About me',
},
{
url: contact_path,
title: 'Contact',
},
{
url: blogs_path,
title: 'Blog',
},
{
url: portfolios_path,
title: 'Portfolio',
},
{
url: tech_news_path,
title: "Tech News"
}
]
end
def nav_helper style, tag_type
safe_join nav_items
.map { |item| "<#{tag_type}><a href=\"#{item[:url]}\" class=\"#{style} #{active? item[:url]}\">#{item[:title]}</a></#{tag_type}>".html_safe}
end
def active? path
"active" if current_page? path
end
def alerts
alert = (flash[:alert] || flash[:error] || flash[:notice])
if (alert)
alert_generator alert
end
end
def alert_generator message
js add_gritter(message, title: "Aleksey Peresmekhin Portfolio", sticky: false)
end
end
| 24.166667 | 156 | 0.597126 |
11789c0455fcbe68c6e8ae8ab624456627a8ea31 | 141 | class RemoveBatchIdFromResource < ActiveRecord::Migration[6.0][5.1]
def change
remove_column :resources, :batch_id, :integer
end
end
| 23.5 | 67 | 0.758865 |
bf13a54b52876f4a93d267f4b64fbacfcbdb0811 | 190 | # Read about factories at https://github.com/thoughtbot/factory_girl
FactoryGirl.define do
factory :review do
user_id 1
score 1
remarks "MyText"
follow_up false
end
end
| 17.272727 | 68 | 0.721053 |
03ef9632068e2405c06f30a440a54ecc55ba8d64 | 117 | class User < ApplicationRecord
has_many :photos
has_secure_password
validates :name, presence: true
end
| 11.7 | 34 | 0.74359 |
38ecf0614bccf0a86bb512b5cad1ddf4b94f9a6f | 1,374 | # frozen_string_literal: true
# :nocov:
module Sail
# Graphql
#
# Module to include type definitions
# for GraphQL APIs.
module Graphql
autoload :Mutations, "sail/mutations"
module Types # :nodoc:
extend ActiveSupport::Concern
included do
field :sail_get, ::GraphQL::Types::JSON, null: true do
description "Returns the value for a given setting."
argument :name, String, required: true, description: "The setting's name."
end
field :sail_switcher, ::GraphQL::Types::JSON, null: true do
description "Switches between the positive or negative setting based on the throttle."
argument :positive, String, required: true, description: "The setting's name if the throttle is bigger than the desired amount."
argument :negative, String, required: true, description: "The setting's name if the throttle is smaller than the desired amount."
argument :throttled_by, String, required: true, description: "The throttle setting's name."
end
def sail_get(name:)
Sail.get(name)
end
def sail_switcher(positive:, negative:, throttled_by:)
Sail.switcher(
positive: positive,
negative: negative,
throttled_by: throttled_by
)
end
end
end
end
end
# :nocov:
| 31.227273 | 139 | 0.641194 |
61d9dbef59d7d0e6c4241a2f25de8972870b2aae | 47 | module ContentfulRails
VERSION = "0.3.0"
end
| 11.75 | 22 | 0.723404 |
f7449d65ae526b0f74095dff5b0568c71f9620d7 | 2,404 | # This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# This file is the source Rails uses to define your schema when running `bin/rails
# db:schema:load`. When creating a new database, `bin/rails db:schema:load` tends to
# be faster and is potentially less error prone than running all of your
# migrations from scratch. Old migrations may fail to apply correctly if those
# migrations use external dependencies or application code.
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema[7.0].define(version: 2022_05_01_195036) do
create_table "glad_bucket_entry_types", force: :cascade do |t|
t.integer "bucket_id", null: false
t.integer "entry_type_id", null: false
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["bucket_id", "entry_type_id"], name: "index_glad_bucket_entry_types_on_bucket_id_and_entry_type_id", unique: true
t.index ["bucket_id"], name: "index_glad_bucket_entry_types_on_bucket_id"
t.index ["entry_type_id"], name: "index_glad_bucket_entry_types_on_entry_type_id"
end
create_table "glad_buckets", force: :cascade do |t|
t.string "name", null: false
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "glad_entry_type_fields", force: :cascade do |t|
t.integer "entry_type_id", null: false
t.string "name", null: false
t.text "description"
t.boolean "mandatory", default: false
t.string "content_type", null: false
t.integer "position", null: false
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["entry_type_id"], name: "index_glad_entry_type_fields_on_entry_type_id"
end
create_table "glad_entry_types", force: :cascade do |t|
t.string "name", null: false
t.text "description"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
add_foreign_key "glad_bucket_entry_types", "glad_buckets", column: "bucket_id"
add_foreign_key "glad_bucket_entry_types", "glad_entry_types", column: "entry_type_id"
add_foreign_key "glad_entry_type_fields", "glad_entry_types", column: "entry_type_id"
end
| 45.358491 | 126 | 0.750416 |
03b9ea4ae60489882b2cbb616f2270041cd3b2cc | 275 | # frozen_string_literal: true
require 'oj'
module SimpleJson
module Json
class Oj
def self.encode(json)
::Oj.dump(json, mode: :rails)
end
def self.decode(json_string)
::Oj.load(json_string, mode: :rails)
end
end
end
end
| 15.277778 | 44 | 0.610909 |
01b0a4d6f1e7434dd7bf8bb1054d5c241476ba6f | 371 | require 'livingstyleguide/filters/highlights'
require 'livingstyleguide/filters/full_width'
require 'livingstyleguide/filters/haml'
require 'livingstyleguide/filters/javascript'
require 'livingstyleguide/filters/coffee_script'
require 'livingstyleguide/filters/add_wrapper_class'
require 'livingstyleguide/filters/font_example'
require 'livingstyleguide/filters/colors'
| 37.1 | 52 | 0.867925 |
ff3d2686fcce4637c89aa1bd18cc8d54b9002276 | 5,799 | # Copyright (c) 2018-2019 VMware, Inc. All Rights Reserved.
# SPDX-License-Identifier: MIT
# DO NOT MODIFY. THIS CODE IS GENERATED. CHANGES WILL BE OVERWRITTEN.
# vcenter - VMware vCenter Server provides a centralized platform for managing your VMware vSphere environments
require 'date'
module VSphereAutomation
module VCenter
class VcenterStoragePoliciesCompatibleDatastoreInfo
# Identifier of the datastore. When clients pass a value of this structure as a parameter, the field must be an identifier for the resource type: Datastore. When operations return a value of this structure as a result, the field will be an identifier for the resource type: Datastore.
attr_accessor :datastore
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'datastore' => :'datastore'
}
end
# Attribute type mapping.
def self.openapi_types
{
:'datastore' => :'String'
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
if attributes.has_key?(:'datastore')
self.datastore = attributes[:'datastore']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
if @datastore.nil?
invalid_properties.push('invalid value for "datastore", datastore cannot be nil.')
end
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
return false if @datastore.nil?
true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
datastore == o.datastore
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[datastore].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.openapi_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end # or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BOOLEAN, :Boolean
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
temp_model = VSphereAutomation::VCenter.const_get(type).new
temp_model.build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
end
| 30.68254 | 288 | 0.632868 |
1ca0e2cc8f292d58dfb0d1466cbce7e743fb174c | 1,688 | # -*- encoding: utf-8 -*-
# stub: jekyll-redirect-from 0.16.0 ruby lib
Gem::Specification.new do |s|
s.name = "jekyll-redirect-from".freeze
s.version = "0.16.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib".freeze]
s.authors = ["Parker Moore".freeze]
s.date = "2020-01-26"
s.email = ["[email protected]".freeze]
s.homepage = "https://github.com/jekyll/jekyll-redirect-from".freeze
s.licenses = ["MIT".freeze]
s.required_ruby_version = Gem::Requirement.new(">= 2.4.0".freeze)
s.rubygems_version = "3.3.11".freeze
s.summary = "Seamlessly specify multiple redirection URLs for your pages and posts".freeze
s.installed_by_version = "3.3.11" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
end
if s.respond_to? :add_runtime_dependency then
s.add_runtime_dependency(%q<jekyll>.freeze, [">= 3.3", "< 5.0"])
s.add_development_dependency(%q<bundler>.freeze, [">= 0"])
s.add_development_dependency(%q<jekyll-sitemap>.freeze, ["~> 1.0"])
s.add_development_dependency(%q<rake>.freeze, ["~> 12.0"])
s.add_development_dependency(%q<rspec>.freeze, ["~> 3.5"])
s.add_development_dependency(%q<rubocop-jekyll>.freeze, ["~> 0.10"])
else
s.add_dependency(%q<jekyll>.freeze, [">= 3.3", "< 5.0"])
s.add_dependency(%q<bundler>.freeze, [">= 0"])
s.add_dependency(%q<jekyll-sitemap>.freeze, ["~> 1.0"])
s.add_dependency(%q<rake>.freeze, ["~> 12.0"])
s.add_dependency(%q<rspec>.freeze, ["~> 3.5"])
s.add_dependency(%q<rubocop-jekyll>.freeze, ["~> 0.10"])
end
end
| 41.170732 | 112 | 0.672986 |
011f9a4d4f161f2b234590fa499fd8b07b083915 | 1,300 | # encoding: utf-8
require 'open_classes/object'
require 'open_classes/module'
require 'open_classes/array/together_helper'
# Array
class Array
include TogetherHelper
# Arrays bulk reverse.
#
# together_reverse has alias :treverse
#
# not empty case
# lists = [[1, 2], [5, 6]]
# ret = lists.together_reverse
# print ret # => [[2, 1], [6, 5]]
# print lists # => [[1, 2], [5, 6]]
#
# one empty case
# lists = [[1, 2], []]
# ret = lists.together_reverse
# print ret # => [[2, 1], []]
# print lists # => [[1, 2], []]
def together_reverse
if_not_contain_array_rails_type_error
reduce([]) { |ret, list|ret << list.reverse }
end
# Arrays bulk reverse!.
#
# together_reverse! has alias :treverse!
#
# not empty case
# lists = [[1, 2], [5, 6]]
# ret = lists.together_reverse!
# print ret # => [[2, 1], [6, 5]]
# print lists # => [[2, 1], [6, 5]]
#
# one empty case
# lists = [[1, 2], []]
# ret = lists.together_reverse!
# print ret # => [[2, 1], []]
# print lists # => [[2, 1], []]
def together_reverse!
if_not_contain_array_rails_type_error
reduce([]) { |ret, list|ret << list.reverse! }
end
alias_method :treverse, :together_reverse
alias_method :treverse!, :together_reverse!
end
| 24.074074 | 50 | 0.58 |
266dfc74e3a382fec500db4da700c04b28c8b76b | 143 | module Inky
module Rails
VERSION = '1.3.8.0'.freeze
end
NODE_VERSION, GEM_VERSION = Rails::VERSION.rpartition('.').map(&:freeze)
end
| 20.428571 | 74 | 0.685315 |
1d202adc32a66c4013f5ccd6b2673545bf03c459 | 2,140 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe RDStation::Builder::Field do
def valid_builder
described_class.new('cf_identifier')
end
describe 'when create a builder' do
context 'valid' do
let(:initial_parameters) do
'cf_api_identifier'
end
let(:builder) { described_class.new(initial_parameters) }
let(:expected_result) do
{
'api_identifier' => 'cf_api_identifier',
'data_type' => 'STRING',
'presentation_type' => 'TEXT_INPUT',
'label' => { 'pt-BR' => 'My label' },
'name' => { 'pt-BR' => 'My name' }
}
end
it 'returns an hash of required values' do
builder.label 'pt-BR', 'My label'
builder.name 'pt-BR', 'My name'
builder.data_type 'STRING'
builder.presentation_type 'TEXT_INPUT'
result = builder.build
expect(result).to eq(expected_result)
end
end
context 'invalid' do
it 'using invalid api_identifier ' do
expect { described_class.new('invald_identifier') }.to raise_error(
'api_identifier is not in a valid format, need start with "cf_"'
)
end
it 'using invalid data_type ' do
expect { valid_builder.data_type('invalid_data_type') }.to raise_error(
'Not valid data_type - ["STRING", "INTEGER", "BOOLEAN", "STRING[]"]'
)
end
it 'using invalid presentation_type ' do
expect { valid_builder.presentation_type('invalid presentation_type') }.to raise_error(
'Not valid presentation_type - ["TEXT_INPUT", "TEXT_AREA", "URL_INPUT", "PHONE_INPUT", "EMAIL_INPUT", "CHECK_BOX", "NUMBER_INPUT", "COMBO_BOX", "RADIO_BUTTON", "MULTIPLE_CHOICE"]'
)
end
it 'without api_identifier' do
expect { described_class.new(nil) }.to raise_error('api_identifier required')
end
it 'without required fields' do
expect { valid_builder.build }.to raise_error(
'Required fields are missing - ["data_type", "presentation_type", "label", "name"]'
)
end
end
end
end
| 30.571429 | 189 | 0.616355 |
ff4c2e65cd94c1528316bb7a463ac5457a2e5532 | 511 | require 'fog/rackspace/models/networking_v2/network'
module Fog
module Rackspace
class NetworkingV2
class Networks < Fog::Collection
model Fog::Rackspace::NetworkingV2::Network
def all
data = service.list_networks.body['networks']
load(data)
end
def get(id)
data = service.show_network(id).body['network']
new(data)
rescue Fog::Rackspace::NetworkingV2::NotFound
nil
end
end
end
end
end
| 21.291667 | 57 | 0.60274 |
edc0abed74c4ea4ff46001bbed0ddaad608413c6 | 511 | class CreateAdReferendumRelationships < ActiveRecord::Migration
def change
create_table :ad_referendum_relationships do |t|
t.belongs_to :ad_referendum, index: true
t.belongs_to :tiene_ad_referendum, index: true
t.date :desde
t.date :hasta
t.integer :dia
t.integer :mes
t.integer :anio
# 0 - No aclarado
# 1 - dia habil
# 2 - dia corrido
t.integer :dia_habil
t.text :observacion
t.timestamps null: false
end
end
end
| 20.44 | 63 | 0.639922 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.