_id
stringlengths 2
6
| title
stringlengths 9
130
| partition
stringclasses 3
values | text
stringlengths 66
10.5k
| language
stringclasses 1
value | meta_information
dict |
---|---|---|---|---|---|
q2500
|
Pandata.Parser.get_webnames_from_search
|
train
|
def get_webnames_from_search(html)
user_links = Nokogiri::HTML(html).css('.user_name a')
webnames = []
user_links.each do |link|
webnames << link['webname']
end
webnames
end
|
ruby
|
{
"resource": ""
}
|
q2501
|
Pandata.Parser.get_next_data_indices
|
train
|
def get_next_data_indices(html)
# .js-more-link is found on mobile pages.
show_more = Nokogiri::HTML(html).css('.show_more, .js-more-link')[0]
if show_more
next_indices = {}
data_attributes = ['nextStartIndex', 'nextLikeStartIndex', 'nextThumbStartIndex']
data_attributes.each do |attr_name|
attr = show_more.attributes['data-' + attr_name.downcase]
next_indices[attr_name.to_sym] = attr.value.to_i if attr
end
next_indices
else
false
end
end
|
ruby
|
{
"resource": ""
}
|
q2502
|
Pandata.Parser.infobox_each_link
|
train
|
def infobox_each_link(html)
Nokogiri::HTML(html).css('.infobox').each do |infobox|
infobox_body = infobox.css('.infobox-body')
title_link = infobox_body.css('h3 a').text.strip
subtitle_link = infobox_body.css('p a').first
subtitle_link = subtitle_link.text.strip if subtitle_link
yield(title_link, subtitle_link)
end
end
|
ruby
|
{
"resource": ""
}
|
q2503
|
Pandata.Parser.doublelink_each_link
|
train
|
def doublelink_each_link(html)
Nokogiri::HTML(html).css('.double-link').each do |doublelink|
title_link = doublelink.css('.media__bd__header').text.strip
subtitle_link = doublelink.css('.media__bd__subheader').text.strip
yield(title_link, subtitle_link)
end
end
|
ruby
|
{
"resource": ""
}
|
q2504
|
Pandata.Parser.get_followx_users
|
train
|
def get_followx_users(html)
users = []
Nokogiri::HTML(html).css('.follow_section').each do |section|
listener_name = section.css('.listener_name').first
webname = listener_name['webname']
# Remove any 'spans with a space' that sometimes appear with special characters.
listener_name.css('span').each(&:remove)
name = listener_name.text.strip
href = section.css('a').first['href']
users << { name: name, webname: webname, href: href }
end
users
end
|
ruby
|
{
"resource": ""
}
|
q2505
|
TTY.Platform.detect_system_properties
|
train
|
def detect_system_properties(arch)
parts = (arch || architecture).split('-', 2)
if parts.length == 1
@cpu, system = nil, parts.shift
else
@cpu, system = *parts
end
@os, @version = *find_os_and_version(system)
[@cpu, @os, @version]
end
|
ruby
|
{
"resource": ""
}
|
q2506
|
Masq.ServerController.decide
|
train
|
def decide
@site = current_account.sites.find_or_initialize_by_url(checkid_request.trust_root)
@site.persona = current_account.personas.find(params[:persona_id] || :first) if sreg_request || ax_store_request || ax_fetch_request
end
|
ruby
|
{
"resource": ""
}
|
q2507
|
Masq.ServerController.complete
|
train
|
def complete
if params[:cancel]
cancel
else
resp = checkid_request.answer(true, nil, identifier(current_account))
if params[:always]
@site = current_account.sites.find_or_create_by_persona_id_and_url(params[:site][:persona_id], params[:site][:url])
@site.update_attributes(params[:site])
elsif sreg_request || ax_fetch_request
@site = current_account.sites.find_or_initialize_by_persona_id_and_url(params[:site][:persona_id], params[:site][:url])
@site.attributes = params[:site]
elsif ax_store_request
@site = current_account.sites.find_or_initialize_by_persona_id_and_url(params[:site][:persona_id], params[:site][:url])
not_supported, not_accepted, accepted = [], [], []
ax_store_request.data.each do |type_uri, values|
if property = Persona.attribute_name_for_type_uri(type_uri)
store_attribute = params[:site][:ax_store][property.to_sym]
if store_attribute && !store_attribute[:value].blank?
@site.persona.update_attribute(property, values.first)
accepted << type_uri
else
not_accepted << type_uri
end
else
not_supported << type_uri
end
end
ax_store_response = (accepted.count > 0) ? OpenID::AX::StoreResponse.new : OpenID::AX::StoreResponse.new(false, "None of the attributes were accepted.")
resp.add_extension(ax_store_response)
end
resp = add_pape(resp, auth_policies, auth_level, auth_time)
resp = add_sreg(resp, @site.sreg_properties) if sreg_request && @site.sreg_properties
resp = add_ax(resp, @site.ax_properties) if ax_fetch_request && @site.ax_properties
render_response(resp)
end
end
|
ruby
|
{
"resource": ""
}
|
q2508
|
Masq.ServerController.handle_checkid_request
|
train
|
def handle_checkid_request
if allow_verification?
save_checkid_request
redirect_to proceed_path
elsif openid_request.immediate
render_response(openid_request.answer(false))
else
reset_session
request = save_checkid_request
session[:return_to] = proceed_path
redirect_to( request.from_trusted_domain? ? login_path : safe_login_path )
end
end
|
ruby
|
{
"resource": ""
}
|
q2509
|
Masq.ServerController.save_checkid_request
|
train
|
def save_checkid_request
clear_checkid_request
request = OpenIdRequest.create!(:parameters => openid_params)
session[:request_token] = request.token
request
end
|
ruby
|
{
"resource": ""
}
|
q2510
|
Masq.ServerController.ensure_valid_checkid_request
|
train
|
def ensure_valid_checkid_request
self.openid_request = checkid_request
if !openid_request.is_a?(OpenID::Server::CheckIDRequest)
redirect_to root_path, :alert => t(:identity_verification_request_invalid)
elsif !allow_verification?
flash[:notice] = logged_in? && !pape_requirements_met?(auth_time) ?
t(:service_provider_requires_reauthentication_last_login_too_long_ago) :
t(:login_to_verify_identity)
session[:return_to] = proceed_path
redirect_to login_path
end
end
|
ruby
|
{
"resource": ""
}
|
q2511
|
Masq.ServerController.transform_ax_data
|
train
|
def transform_ax_data(parameters)
data = {}
parameters.each_pair do |key, details|
if details['value']
data["type.#{key}"] = details['type']
data["value.#{key}"] = details['value']
end
end
data
end
|
ruby
|
{
"resource": ""
}
|
q2512
|
Masq.ServerController.render_openid_error
|
train
|
def render_openid_error(exception)
error = case exception
when OpenID::Server::MalformedTrustRoot then "Malformed trust root '#{exception.to_s}'"
else exception.to_s
end
render :text => h("Invalid OpenID request: #{error}"), :status => 500
end
|
ruby
|
{
"resource": ""
}
|
q2513
|
MicroMagick.Image.add_input_option
|
train
|
def add_input_option(option_name, *args)
(@input_options ||= []).push(option_name)
args.each { |ea| @input_options.push(Shellwords.escape(ea.to_s)) }
# Support call chaining:
self
end
|
ruby
|
{
"resource": ""
}
|
q2514
|
MicroMagick.Image.square_crop
|
train
|
def square_crop(gravity = 'Center')
gravity(gravity) unless gravity.nil?
d = [width, height].min
crop("#{d}x#{d}+0+0!")
end
|
ruby
|
{
"resource": ""
}
|
q2515
|
OpenGraphReader.Base.method_missing
|
train
|
def method_missing(method, *args, &block)
name = method.to_s
if respond_to_missing? name
@bases[name]
else
super(method, *args, &block)
end
end
|
ruby
|
{
"resource": ""
}
|
q2516
|
RandomData.Locations.uk_post_code
|
train
|
def uk_post_code
post_towns = %w(BM CB CV LE LI LS KT MK NE OX PL YO)
# Can't remember any othes at the moment
number_1 = rand(100).to_s
number_2 = rand(100).to_s
# Easier way to do this?
letters = ("AA".."ZZ").to_a.rand
return "#{post_towns.rand}#{number_1} #{number_2}#{letters}"
end
|
ruby
|
{
"resource": ""
}
|
q2517
|
GoogleStaticMapsHelper.Map.url
|
train
|
def url
raise BuildDataMissing, "We have to have markers, paths or center and zoom set when url is called!" unless can_build?
out = "#{API_URL}?"
params = []
(REQUIRED_OPTIONS + OPTIONAL_OPTIONS).each do |key|
value = send(key)
params << "#{key}=#{URI.escape(value.to_s)}" unless value.nil?
end
out += params.join('&')
params = []
grouped_markers.each_pair do |marker_options_as_url_params, markers|
markers_locations = markers.map { |m| m.location_to_url }.join('|')
params << "markers=#{marker_options_as_url_params}|#{markers_locations}"
end
out += "&#{params.join('&')}" unless params.empty?
params = []
paths.each {|path| params << path.url_params}
out += "&#{params.join('&')}" unless params.empty?
out
end
|
ruby
|
{
"resource": ""
}
|
q2518
|
GoogleStaticMapsHelper.Map.grouped_markers
|
train
|
def grouped_markers
markers.inject(Hash.new {|hash, key| hash[key] = []}) do |groups, marker|
groups[marker.options_to_url_params] << marker
groups
end
end
|
ruby
|
{
"resource": ""
}
|
q2519
|
GoogleStaticMapsHelper.Map.size=
|
train
|
def size=(size)
unless size.nil?
case size
when String
width, height = size.split('x')
when Array
width, height = size
when Hash
width = size[:width]
height = size[:height]
else
raise "Don't know how to set size from #{size.class}!"
end
self.width = width if width
self.height = height if height
end
end
|
ruby
|
{
"resource": ""
}
|
q2520
|
RandomData.Text.alphanumeric
|
train
|
def alphanumeric(size=16)
s = ""
size.times { s << (i = Kernel.rand(62); i += ((i < 10) ? 48 : ((i < 36) ? 55 : 61 ))).chr }
s
end
|
ruby
|
{
"resource": ""
}
|
q2521
|
RandomData.MarkovGenerator.insert
|
train
|
def insert(result)
# puts "insert called with #{result}"
tabindex = Marshal.dump(@state)
if @table[tabindex].has_key?(result)
@table[tabindex][result] += 1
else
@table[tabindex][result] = 1
end
# puts "table #{@table.inspect}"
next_state(result)
end
|
ruby
|
{
"resource": ""
}
|
q2522
|
Masq.PersonasHelper.countries_for_select
|
train
|
def countries_for_select
::I18nData.countries.map{|pair| pair.reverse}.sort{|x,y| x.first <=> y.first}
end
|
ruby
|
{
"resource": ""
}
|
q2523
|
Masq.PersonasHelper.languages_for_select
|
train
|
def languages_for_select
::I18nData.languages.map{|pair| pair.reverse}.sort{|x,y| x.first <=> y.first}
end
|
ruby
|
{
"resource": ""
}
|
q2524
|
Vcard.Vcard.lines
|
train
|
def lines(name=nil) #:yield: Line
# FIXME - this would be much easier if #lines was #each, and there was a
# different #lines that returned an Enumerator that used #each
unless block_given?
map do |f|
if( !name || f.name?(name) )
f2l(f)
else
nil
end
end.compact
else
each do |f|
if( !name || f.name?(name) )
line = f2l(f)
if line
yield line
end
end
end
self
end
end
|
ruby
|
{
"resource": ""
}
|
q2525
|
Vcard.Vcard.delete_if
|
train
|
def delete_if #:nodoc: :yield: line
# Do in two steps to not mess up progress through the enumerator.
rm = []
each do |f|
line = f2l(f)
if line && yield(line)
rm << f
# Hack - because we treat N and FN as one field
if f.name? "N"
rm << field("FN")
end
end
end
rm.each do |f|
@fields.delete( f )
@cache.delete( f )
end
end
|
ruby
|
{
"resource": ""
}
|
q2526
|
Breadcrumble.ActionController.add_breadcrumb_to
|
train
|
def add_breadcrumb_to(name, url, trail_index)
breadcrumb_trails
@breadcrumb_trails[trail_index] ||= []
@breadcrumb_trails[trail_index] << {
name: case name
when Proc then name.call(self)
else name
end,
url: case url
when Proc then url.call(self)
else url ? url_for(url) : nil
end
}
end
|
ruby
|
{
"resource": ""
}
|
q2527
|
Masq.AuthenticatedSystem.current_account=
|
train
|
def current_account=(new_account)
if self.auth_type_used != :basic
session[:account_id] = (new_account.nil? || new_account.is_a?(Symbol)) ? nil : new_account.id
end
@current_account = new_account || :false
end
|
ruby
|
{
"resource": ""
}
|
q2528
|
Masq.AuthenticatedSystem.access_denied
|
train
|
def access_denied
respond_to do |format|
format.html do
store_location
redirect_to login_path
end
format.any do
request_http_basic_authentication 'Web Password'
end
end
end
|
ruby
|
{
"resource": ""
}
|
q2529
|
OpenGraphReader.Object.[]=
|
train
|
def []= name, value
if property?(name)
public_send "#{name}=", value
elsif OpenGraphReader.config.strict
raise UndefinedPropertyError, "Undefined property #{name} on #{inspect}"
end
end
|
ruby
|
{
"resource": ""
}
|
q2530
|
RandomData.Dates.date
|
train
|
def date(dayrange=10)
if dayrange.is_a?(Range)
offset = rand(dayrange.max-dayrange.min) + dayrange.min
else
offset = rand(dayrange*2) - dayrange
end
Date.today + offset
end
|
ruby
|
{
"resource": ""
}
|
q2531
|
RandomData.Dates.date_between
|
train
|
def date_between(range)
min_date = range.min.is_a?(Date) ? range.min : Date.parse(range.min)
max_date = range.max.is_a?(Date) ? range.max : Date.parse(range.max)
diff = (max_date - min_date).to_i
min_date + rand(diff)
end
|
ruby
|
{
"resource": ""
}
|
q2532
|
GoogleStaticMapsHelper.Path.url_params
|
train
|
def url_params # :nodoc:
raise BuildDataMissing, "Need at least 2 points to create a path!" unless can_build?
out = 'path='
path_params = OPTIONAL_OPTIONS.inject([]) do |path_params, attribute|
value = send(attribute)
path_params << "#{attribute}:#{URI.escape(value.to_s)}" unless value.nil?
path_params
end.join('|')
out += "#{path_params}|" unless path_params.empty?
out += encoded_url_points if encoding_points?
out += unencoded_url_points unless encoding_points?
out
end
|
ruby
|
{
"resource": ""
}
|
q2533
|
GoogleStaticMapsHelper.Path.points=
|
train
|
def points=(array)
raise ArgumentError unless array.is_a? Array
@points = []
array.each {|point| self << point}
end
|
ruby
|
{
"resource": ""
}
|
q2534
|
Masq.OpenidServerSystem.add_pape
|
train
|
def add_pape(resp, policies = [], nist_auth_level = 0, auth_time = nil)
if papereq = OpenID::PAPE::Request.from_openid_request(openid_request)
paperesp = OpenID::PAPE::Response.new
policies.each { |p| paperesp.add_policy_uri(p) }
paperesp.nist_auth_level = nist_auth_level
paperesp.auth_time = auth_time.utc.iso8601
resp.add_extension(paperesp)
end
resp
end
|
ruby
|
{
"resource": ""
}
|
q2535
|
Masq.OpenidServerSystem.render_openid_response
|
train
|
def render_openid_response(resp)
signed_response = openid_server.signatory.sign(resp) if resp.needs_signing
web_response = openid_server.encode_response(resp)
case web_response.code
when OpenID::Server::HTTP_OK then render(:text => web_response.body, :status => 200)
when OpenID::Server::HTTP_REDIRECT then redirect_to(web_response.headers['location'])
else render(:text => web_response.body, :status => 400)
end
end
|
ruby
|
{
"resource": ""
}
|
q2536
|
Masq.Persona.property
|
train
|
def property(type)
prop = Persona.mappings.detect { |i| i[1].include?(type) }
prop ? self.send(prop[0]).to_s : nil
end
|
ruby
|
{
"resource": ""
}
|
q2537
|
Pandata.CLI.format_data
|
train
|
def format_data(data, json = false)
if json
JSON.generate(data)
else
data.map do |category, cat_data|
# Capitalize each word in the category symbol.
# e.g. :liked_tracks becomes 'Liked Tracks'
title = category.to_s.split('_').map(&:capitalize).join(' ')
output = if cat_data.empty?
" ** No Data **\n"
else
case category
when /liked_tracks/
formatter.tracks(cat_data)
when /liked_artists|liked_stations/
formatter.sort_list(cat_data)
when :liked_albums
formatter.albums(cat_data)
when /following|followers/
formatter.followx(cat_data)
end
end
"#{title}:\n#{output}"
end.join
end
end
|
ruby
|
{
"resource": ""
}
|
q2538
|
Pandata.CLI.download_data
|
train
|
def download_data
scraper_data = {}
@data_to_get.each do |data_category|
if /liked_(.*)/ =~ data_category
argument = $1.to_sym # :tracks, :artists, :stations or :albums
scraper_data[data_category] = @scraper.public_send(:likes, argument)
else
scraper_data[data_category] = @scraper.public_send(data_category)
end
end
scraper_data
end
|
ruby
|
{
"resource": ""
}
|
q2539
|
Pandata.CLI.scraper_for
|
train
|
def scraper_for(user_id)
scraper = Pandata::Scraper.get(user_id)
if scraper.kind_of?(Array)
log "No exact match for '#{user_id}'."
unless scraper.empty?
log "\nWebname results for '#{user_id}':\n#{formatter.list(scraper)}"
end
raise PandataError, "Could not create a scraper for '#{user_id}'."
end
scraper
end
|
ruby
|
{
"resource": ""
}
|
q2540
|
Masq.Site.ax_fetch=
|
train
|
def ax_fetch=(props)
props.each_pair do |property, details|
release_policies.build(:property => property, :type_identifier => details['type']) if details['value']
end
end
|
ruby
|
{
"resource": ""
}
|
q2541
|
Masq.Site.sreg_properties
|
train
|
def sreg_properties
props = {}
release_policies.each do |rp|
is_sreg = (rp.property == rp.type_identifier)
props[rp.property] = persona.property(rp.property) if is_sreg
end
props
end
|
ruby
|
{
"resource": ""
}
|
q2542
|
Masq.Site.ax_properties
|
train
|
def ax_properties
props = {}
release_policies.each do |rp|
if rp.type_identifier.match("://")
props["type.#{rp.property}"] = rp.type_identifier
props["value.#{rp.property}"] = persona.property(rp.type_identifier )
end
end
props
end
|
ruby
|
{
"resource": ""
}
|
q2543
|
Pandata.DataFormatter.custom_sort
|
train
|
def custom_sort(enumerable)
sorted_array = enumerable.sort_by do |key, _|
key.sub(/^the\s*/i, '').downcase
end
# sort_by() returns an array when called on hashes.
if enumerable.kind_of?(Hash)
# Rebuild the hash.
sorted_hash = {}
sorted_array.each { |item| sorted_hash[item[0]] = item[1] }
sorted_hash
else
sorted_array
end
end
|
ruby
|
{
"resource": ""
}
|
q2544
|
RandomData.ArrayRandomizer.roulette
|
train
|
def roulette(k=1)
wheel = []
weight = 0
# Create the cumulative array.
self.each do |x|
raise "Illegal negative weight #{x}" if x < 0
wheel.push(weight += x)
end
# print "wheel is #{wheel.inspect}\n";
# print "weight is #{weight.inspect}\n";
raise "Array had all zero weights" if weight.zero?
wheel.push(weight + 1) #Add extra element
if block_given?
k.times do
r = Kernel.rand() # so we don't pick up that from array.
# print "r is #{r.inspect}\n";
roll = weight.to_f * r
# print "roll is #{roll.inspect}\n";
0.upto(self.size - 1) do |i|
if wheel[i+1] > roll
yield i
break
end # if
end # upto
end # if block_given?
return nil
else
r = Kernel.rand() # so we don't pick up that from array.
# print "r is #{r.inspect}\n";
roll = weight.to_f * r
# print "roll is #{roll.inspect}\n";
0.upto(self.size - 1) do |i|
return i if wheel[i+1] > roll
end
end
end
|
ruby
|
{
"resource": ""
}
|
q2545
|
OpenGraphReader.Fetcher.body
|
train
|
def body
fetch_body unless fetched?
raise NoOpenGraphDataError, "No response body received for #{@uri}" if fetch_failed?
raise NoOpenGraphDataError, "Did not receive a HTML site at #{@uri}" unless html?
@get_response.body
end
|
ruby
|
{
"resource": ""
}
|
q2546
|
OpenGraphReader.Fetcher.html?
|
train
|
def html?
fetch_headers unless fetched_headers?
response = @get_response || @head_response
return false if fetch_failed?
return false unless response
return false unless response.success?
return false unless response["content-type"]
response["content-type"].include? "text/html"
end
|
ruby
|
{
"resource": ""
}
|
q2547
|
Boson.Scientist.redefine_command
|
train
|
def redefine_command(obj, command)
cmd_block = redefine_command_block(obj, command)
@no_option_commands << command if command.options.nil?
[command.name, command.alias].compact.each {|e|
obj.singleton_class.send(:define_method, e, cmd_block)
}
rescue Error
warn "Error: #{$!.message}"
end
|
ruby
|
{
"resource": ""
}
|
q2548
|
Boson.Scientist.redefine_command_block
|
train
|
def redefine_command_block(obj, command)
object_methods(obj)[command.name] ||= begin
obj.method(command.name)
rescue NameError
raise Error, "No method exists to redefine command '#{command.name}'."
end
lambda {|*args|
Scientist.analyze(obj, command, args) {|args|
Scientist.object_methods(obj)[command.name].call(*args)
}
}
end
|
ruby
|
{
"resource": ""
}
|
q2549
|
Boson.Scientist.analyze
|
train
|
def analyze(obj, command, args, &block)
@global_options, @command, @original_args = {}, command, args.dup
@args = translate_args(obj, args)
return run_help_option(@command) if @global_options[:help]
during_analyze(&block)
rescue OptionParser::Error, Error
raise if Boson.in_shell
warn "Error: #{$!}"
end
|
ruby
|
{
"resource": ""
}
|
q2550
|
Boson.OptionParser.formatted_usage
|
train
|
def formatted_usage
return "" if @opt_types.empty?
@opt_types.map do |opt, type|
val = respond_to?("usage_for_#{type}", true) ?
send("usage_for_#{type}", opt) : "#{opt}=:#{type}"
"[" + val + "]"
end.join(" ")
end
|
ruby
|
{
"resource": ""
}
|
q2551
|
Boson.OptionParser.print_usage_table
|
train
|
def print_usage_table(options={})
fields = get_usage_fields options[:fields]
fields, opts = get_fields_and_options(fields, options)
render_table(fields, opts, options)
end
|
ruby
|
{
"resource": ""
}
|
q2552
|
Boson.OptionParser.indifferent_hash
|
train
|
def indifferent_hash
Hash.new {|hash,key| hash[key.to_sym] if String === key }
end
|
ruby
|
{
"resource": ""
}
|
q2553
|
Boson.Loader.load
|
train
|
def load
load_source_and_set_module
module_callbacks if @module
yield if block_given? # load dependencies
detect_additions { load_commands } if load_commands?
set_library_commands
loaded_correctly? && (@loaded = true)
end
|
ruby
|
{
"resource": ""
}
|
q2554
|
Boson.Loader.detect_additions
|
train
|
def detect_additions(options={}, &block)
Util.detect(options, &block).tap do |detected|
@commands.concat detected[:methods].map(&:to_s)
end
end
|
ruby
|
{
"resource": ""
}
|
q2555
|
Boson.Loader.load_commands
|
train
|
def load_commands
@module = @module ? Util.constantize(@module) :
Util.create_module(Boson::Commands, clean_name)
before_load_commands
check_for_method_conflicts unless @force
actual_load_commands
rescue MethodConflictError => err
handle_method_conflict_error err
end
|
ruby
|
{
"resource": ""
}
|
q2556
|
RedSnow.Object.deep_symbolize_keys
|
train
|
def deep_symbolize_keys
return each_with_object({}) { |memo, (k, v)| memo[k.to_sym] = v.deep_symbolize_keys } if self.is_a?(Hash)
return each_with_object([]) { |memo, v| memo << v.deep_symbolize_keys } if self.is_a?(Array)
self
end
|
ruby
|
{
"resource": ""
}
|
q2557
|
RedSnow.NamedBlueprintNode.ensure_description_newlines
|
train
|
def ensure_description_newlines(buffer)
return if description.empty?
if description[-1, 1] != '\n'
buffer << '\n\n'
elsif description.length > 1 && description[-2, 1] != '\n'
buffer << '\n'
end
end
|
ruby
|
{
"resource": ""
}
|
q2558
|
RedSnow.KeyValueCollection.filter_collection
|
train
|
def filter_collection(ignore_keys)
return @collection if ignore_keys.blank?
@collection.select { |kv_item| !ignore_keys.include?(kv_item.keys.first) }
end
|
ruby
|
{
"resource": ""
}
|
q2559
|
Boson.Library.command_objects
|
train
|
def command_objects(names=self.commands, command_array=Boson.commands)
command_array.select {|e| names.include?(e.name) && e.lib == self.name }
end
|
ruby
|
{
"resource": ""
}
|
q2560
|
Boson.Util.create_module
|
train
|
def create_module(base_module, name)
desired_class = camelize(name)
possible_suffixes = [''] + %w{1 2 3 4 5 6 7 8 9 10}
if suffix = possible_suffixes.find {|e|
!base_module.const_defined?(desired_class+e) }
base_module.const_set(desired_class+suffix, Module.new)
end
end
|
ruby
|
{
"resource": ""
}
|
q2561
|
HandleInvalidPercentEncodingRequests.Middleware.call
|
train
|
def call(env)
# calling env.dup here prevents bad things from happening
request = Rack::Request.new(env.dup)
# calling request.params is sufficient to trigger the error see
# https://github.com/rack/rack/issues/337#issuecomment-46453404
request.params
@app.call(env)
# Rescue from that specific ArgumentError
rescue ArgumentError => e
raise unless e.message =~ /invalid %-encoding/
@logger.info "Bad request. Returning 400 due to #{e.message} from " + \
"request with env #{request.inspect}"
error_response
end
|
ruby
|
{
"resource": ""
}
|
q2562
|
Particle.Connection.connection
|
train
|
def connection
@connection ||= Faraday.new(conn_opts) do |http|
http.url_prefix = endpoint
if @access_token
http.authorization :Bearer, @access_token
end
end
end
|
ruby
|
{
"resource": ""
}
|
q2563
|
Boson.MethodInspector.new_method_added
|
train
|
def new_method_added(mod, meth)
self.current_module = mod
store[:temp] ||= {}
METHODS.each do |e|
store[e][meth.to_s] = store[:temp][e] if store[:temp][e]
end
if store[:temp][:option]
(store[:options][meth.to_s] ||= {}).merge! store[:temp][:option]
end
during_new_method_added mod, meth
store[:temp] = {}
if SCRAPEABLE_METHODS.any? {|m| has_inspector_method?(meth, m) }
set_arguments(mod, meth)
end
end
|
ruby
|
{
"resource": ""
}
|
q2564
|
Boson.OptionCommand.parse
|
train
|
def parse(args)
if args.size == 1 && args[0].is_a?(String)
args = Shellwords.shellwords(args[0]) if !Boson.in_shell
global_opt, parsed_options, args = parse_options args
# last string argument interpreted as args + options
elsif args.size > 1 && args[-1].is_a?(String)
temp_args = Boson.in_shell ? args : Shellwords.shellwords(args.pop)
global_opt, parsed_options, new_args = parse_options temp_args
Boson.in_shell ? args = new_args : args += new_args
# add default options
elsif @command.options.nil? || @command.options.empty? ||
(@command.numerical_arg_size? && args.size <= (@command.arg_size - 1).abs) ||
(@command.has_splat_args? && !args[-1].is_a?(Hash))
global_opt, parsed_options = parse_options([])[0,2]
# merge default options with given hash of options
elsif (@command.has_splat_args? || (args.size == @command.arg_size)) &&
args[-1].is_a?(Hash)
global_opt, parsed_options = parse_options([])[0,2]
parsed_options.merge!(args.pop)
end
[global_opt || {}, parsed_options, args]
end
|
ruby
|
{
"resource": ""
}
|
q2565
|
Boson.OptionCommand.modify_args
|
train
|
def modify_args(args)
if @command.default_option && @command.numerical_arg_size? &&
@command.arg_size <= 1 &&
!args[0].is_a?(Hash) && args[0].to_s[/./] != '-' && !args.join.empty?
args[0] = "--#{@command.default_option}=#{args[0]}"
end
end
|
ruby
|
{
"resource": ""
}
|
q2566
|
Boson.OptionCommand.check_argument_size
|
train
|
def check_argument_size(args)
if @command.numerical_arg_size? && args.size != @command.arg_size
command_size, args_size = args.size > @command.arg_size ?
[@command.arg_size, args.size] :
[@command.arg_size - 1, args.size - 1]
raise CommandArgumentError,
"wrong number of arguments (#{args_size} for #{command_size})"
end
end
|
ruby
|
{
"resource": ""
}
|
q2567
|
Boson.OptionCommand.add_default_args
|
train
|
def add_default_args(args, obj)
if @command.args && args.size < @command.arg_size - 1
# leave off last arg since its an option
@command.args.slice(0..-2).each_with_index {|arr,i|
next if args.size >= i + 1 # only fill in once args run out
break if arr.size != 2 # a default arg value must exist
begin
args[i] = @command.file_parsed_args ? obj.instance_eval(arr[1]) : arr[1]
rescue Exception
raise Scientist::Error, "Unable to set default argument at " +
"position #{i+1}.\nReason: #{$!.message}"
end
}
end
end
|
ruby
|
{
"resource": ""
}
|
q2568
|
Kakurenbo.Core.delete
|
train
|
def delete(options = {:hard => false})
if options[:hard]
self.class.delete(self.id, options)
else
return if new_record? or destroyed?
update_column kakurenbo_column, current_time_from_proper_timezone
end
end
|
ruby
|
{
"resource": ""
}
|
q2569
|
Kakurenbo.Core.destroy
|
train
|
def destroy(options = {:hard => false})
if options[:hard]
with_transaction_returning_status do
hard_destroy_associated_records
self.reload.hard_destroy
end
else
return true if destroyed?
with_transaction_returning_status do
destroy_at = Time.now
run_callbacks(:destroy){ update_column kakurenbo_column, destroy_at; self }
end
end
end
|
ruby
|
{
"resource": ""
}
|
q2570
|
Kakurenbo.Core.restore
|
train
|
def restore(options = {:recursive => true})
return false unless destroyed?
with_transaction_returning_status do
run_callbacks(:restore) do
restore_associated_records if options[:recursive]
update_column kakurenbo_column, nil
self
end
end
end
|
ruby
|
{
"resource": ""
}
|
q2571
|
Kakurenbo.Core.dependent_association_scopes
|
train
|
def dependent_association_scopes
self.class.reflect_on_all_associations.select { |reflection|
reflection.options[:dependent] == :destroy and reflection.klass.paranoid?
}.map { |reflection|
self.association(reflection.name).tap {|assoc| assoc.reset_scope }.scope
}
end
|
ruby
|
{
"resource": ""
}
|
q2572
|
KML.Geometry.altitude_mode=
|
train
|
def altitude_mode=(mode)
allowed_modes = %w(clampToGround relativeToGround absolute)
if allowed_modes.include?(mode)
@altitude_mode = mode
else
raise ArgumentError, "Must be one of the allowed altitude modes: #{allowed_modes.join(',')}"
end
end
|
ruby
|
{
"resource": ""
}
|
q2573
|
KML.Feature.render
|
train
|
def render(xm=Builder::XmlMarkup.new(:indent => 2))
[:name, :visibility, :address].each do |a|
xm.__send__(a, self.__send__(a)) unless self.__send__(a).nil?
end
xm.description { xm.cdata!(description) } unless description.nil?
xm.open(self.open) unless open.nil?
xm.phoneNumber(phone_number) unless phone_number.nil?
xm.styleUrl(style_url) unless style_url.nil?
unless address_details.nil?
xm.AddressDetails(:xmlns => "urn:oasis:names:tc:ciq:xsdschema:xAL:2.0") { address_details.render(xm) }
end
xm.Snippet(snippet.text, snippet.max_lines) unless snippet.nil?
xm.LookAt { look_at.render(xm) } unless look_at.nil?
xm.TimePrimitive { time_primitive.render(xm) } unless time_primitive.nil?
xm.StyleSelector { style_selector.render(xm) } unless style_selector.nil?
end
|
ruby
|
{
"resource": ""
}
|
q2574
|
RansackAdvancedSearch.SavedSearchUtils.perform_saved_searches_actions
|
train
|
def perform_saved_searches_actions(context, params={})
get_saved_searches(context)
save_or_update_saved_search(params.merge(context: context))
get_params_to_search(context)
end
|
ruby
|
{
"resource": ""
}
|
q2575
|
RansackAdvancedSearch.SavedSearchUtils.get_params_to_search
|
train
|
def get_params_to_search(context)
if params[:saved_search].present?
@saved_search = SavedSearch.find_by(id: params[:saved_search], context: context)
end
return params[:q] if params[:use_search_params].present?
params[:q] = @saved_search.try(:search_params) || params[:q]
end
|
ruby
|
{
"resource": ""
}
|
q2576
|
RansackAdvancedSearch.SavedSearchUtils.save_or_update_saved_search
|
train
|
def save_or_update_saved_search(params)
if params[:save_new_search].present? || params[:save_search].present?
if params[:save_new_search].present?
@saved_search = new_saved_search(params)
elsif params[:save_search].present? && params[:saved_search].present?
@saved_search = update_saved_search(params)
elsif params[:save_search].present?
@saved_search = new_saved_search(params)
end
if @saved_search.save
flash[:notice] = t('ransack.saved_search.save.success')
else
flash[:error] = t('ransack.saved_search.save.error')
end
end
end
|
ruby
|
{
"resource": ""
}
|
q2577
|
Sudo.Wrapper.start!
|
train
|
def start!
Sudo::System.check
@sudo_pid = spawn(
"#{SUDO_CMD} -E #{RUBY_CMD} -I#{LIBDIR} #{@ruby_opts} #{SERVER_SCRIPT} #{@socket} #{Process.uid}"
)
Process.detach(@sudo_pid) if @sudo_pid # avoid zombies
finalizer = Finalizer.new(pid: @sudo_pid, socket: @socket)
ObjectSpace.define_finalizer(self, finalizer)
if wait_for(timeout: 1){File.exists? @socket}
@proxy = DRbObject.new_with_uri(server_uri)
else
raise RuntimeError, "Couldn't create DRb socket #{@socket}"
end
load!
self
end
|
ruby
|
{
"resource": ""
}
|
q2578
|
Sudo.Wrapper.load_gems
|
train
|
def load_gems
load_paths
prospective_gems.each do |prospect|
gem_name = prospect.dup
begin
loaded = @proxy.proxy(Kernel, :require, gem_name)
# puts "Loading Gem: #{gem_name} => #{loaded}"
rescue LoadError, NameError => e
old_gem_name = gem_name.dup
gem_name.gsub!('-', '/')
retry if old_gem_name != gem_name
end
end
end
|
ruby
|
{
"resource": ""
}
|
q2579
|
ChefRunDeck.Config.add
|
train
|
def add(config = {})
config.each do |key, value|
define_setting key.to_sym, value
end
end
|
ruby
|
{
"resource": ""
}
|
q2580
|
ChefRunDeck.Chef.delete
|
train
|
def delete(node)
# => Make sure the Node Exists
return 'Node not found on Chef Server' unless Node.exists?(node)
# => Initialize the Admin API Client Settings
admin_api_client
# => Delete the Client & Node Object
Client.delete(node)
Node.delete(node)
'Client/Node Deleted from Chef Server'
end
|
ruby
|
{
"resource": ""
}
|
q2581
|
OctofactsUpdater.CLI.run
|
train
|
def run
unless opts[:action]
usage
exit 255
end
@config = {}
if opts[:config]
@config = YAML.load_file(opts[:config])
substitute_relative_paths!(@config, File.dirname(opts[:config]))
load_plugins(@config["plugins"]) if @config.key?("plugins")
end
@config[:options] = {}
opts.each do |k, v|
if v.is_a?(Hash)
@config[k.to_s] ||= {}
v.each do |v_key, v_val|
@config[k.to_s][v_key.to_s] = v_val
@config[k.to_s].delete(v_key.to_s) if v_val.nil?
end
else
@config[:options][k] = v
end
end
return handle_action_bulk if opts[:action] == "bulk"
return handle_action_facts if opts[:action] == "facts"
return handle_action_bulk if opts[:action] == "reindex"
usage
exit 255
end
|
ruby
|
{
"resource": ""
}
|
q2582
|
OctofactsUpdater.CLI.index_file
|
train
|
def index_file
@index_file ||= begin
if config.fetch("index", {})["file"]
return config["index"]["file"] if File.file?(config["index"]["file"])
raise Errno::ENOENT, "Index file (#{config['index']['file'].inspect}) does not exist"
end
raise ArgumentError, "No index file specified on command line (--index-file) or in configuration file"
end
end
|
ruby
|
{
"resource": ""
}
|
q2583
|
OctofactsUpdater.CLI.print_or_write
|
train
|
def print_or_write(data)
if opts[:output_file]
File.open(opts[:output_file], "w") { |f| f.write(data) }
else
puts data
end
end
|
ruby
|
{
"resource": ""
}
|
q2584
|
Rake.Builder.ensure_headers
|
train
|
def ensure_headers
missing = missing_headers
return if missing.size == 0
message = "Compilation cannot proceed as the following header files are missing:\n" + missing.join("\n")
raise Error.new(message)
end
|
ruby
|
{
"resource": ""
}
|
q2585
|
Rake.Builder.source_files
|
train
|
def source_files
return @source_files if @source_files
old_dir = Dir.pwd
Dir.chdir @rakefile_path
@source_files = Rake::Path.find_files(@source_search_paths, source_file_extension).uniq.sort
Dir.chdir old_dir
@source_files
end
|
ruby
|
{
"resource": ""
}
|
q2586
|
AttrDeprecated.ClassMethods._set_attribute_as_deprecated
|
train
|
def _set_attribute_as_deprecated(attribute)
original_attribute_method = instance_method(attribute.to_sym)
klass = self
define_method attribute.to_sym do |*args|
backtrace_cleaner = ActiveSupport::BacktraceCleaner.new
backtrace = backtrace_cleaner.clean(caller)
klass._notify_deprecated_attribute_call({klass: self, attribute: attribute, args: args, backtrace: backtrace})
# Call the original attribute method.
original_attribute_method.bind(self).call(*args)
end
end
|
ruby
|
{
"resource": ""
}
|
q2587
|
OctofactsUpdater.FactIndex.reindex
|
train
|
def reindex(facts_to_index, fixtures)
@index_data = {}
facts_to_index.each { |fact| add(fact, fixtures) }
set_top_level_nodes_fact(fixtures)
end
|
ruby
|
{
"resource": ""
}
|
q2588
|
OctofactsUpdater.FactIndex.write_file
|
train
|
def write_file(filename = nil)
filename ||= @filename
unless filename.is_a?(String)
raise ArgumentError, "Called write_file() for fact_index without a filename"
end
File.open(filename, "w") { |f| f.write(to_yaml) }
end
|
ruby
|
{
"resource": ""
}
|
q2589
|
Chore.Job.perform_async
|
train
|
def perform_async(*args)
self.class.run_hooks_for(:before_publish,*args)
@chore_publisher ||= self.class.options[:publisher]
@chore_publisher.publish(self.class.prefixed_queue_name,self.class.job_hash(args))
self.class.run_hooks_for(:after_publish,*args)
end
|
ruby
|
{
"resource": ""
}
|
q2590
|
BeValidAsset.BeValidFeed.response_indicates_valid?
|
train
|
def response_indicates_valid?(response)
REXML::Document.new(response.body).root.get_elements('//m:validity').first.text == 'true'
end
|
ruby
|
{
"resource": ""
}
|
q2591
|
Octofacts.Facts.method_missing
|
train
|
def method_missing(name, *args, &block)
if Octofacts::Manipulators.run(self, name, *args, &block)
@facts_manipulated = true
return self
end
if facts.respond_to?(name, false)
if args[0].is_a?(String) || args[0].is_a?(Symbol)
args[0] = string_or_symbolized_key(args[0])
end
return facts.send(name, *args)
end
raise NameError, "Unknown method '#{name}' in #{self.class}"
end
|
ruby
|
{
"resource": ""
}
|
q2592
|
ChefRunDeck.CLI.run
|
train
|
def run(argv = ARGV)
# => Parse CLI Configuration
cli = Options.new
cli.parse_options(argv)
# => Grab the Default Values
default = Config.options
# => Parse JSON Config File (If Specified and Exists)
json_config = Util.parse_json_config(cli.config[:config_file] || Config.config_file)
# => Merge Configuration (CLI Wins)
config = [default, json_config, cli.config].compact.reduce(:merge)
# => Apply Configuration
Config.setup do |cfg|
cfg.config_file = config[:config_file]
cfg.cache_timeout = config[:cache_timeout].to_i
cfg.bind = config[:bind]
cfg.port = config[:port]
cfg.auth_file = config[:auth_file]
cfg.state_file = config[:state_file]
cfg.environment = config[:environment].to_sym.downcase
cfg.chef_api_endpoint = config[:chef_api_endpoint]
cfg.chef_api_client = config[:chef_api_client]
cfg.chef_api_client_key = config[:chef_api_client_key]
cfg.chef_api_admin = config[:chef_api_admin]
cfg.chef_api_admin_key = config[:chef_api_admin_key]
cfg.rd_node_username = config[:rd_node_username]
end
# => Launch the API
API.run!
end
|
ruby
|
{
"resource": ""
}
|
q2593
|
Soracom.Client.auth_by_user
|
train
|
def auth_by_user(operator_id, user_name, password, endpoint)
endpoint = API_BASE_URL if endpoint.nil?
res = RestClient.post endpoint + '/auth',
{ operatorId: operator_id, userName: user_name, password: password },
'Content-Type' => 'application/json',
'Accept' => 'application/json'
result = JSON.parse(res.body)
fail result['message'] if res.code != '200'
Hash[JSON.parse(res.body).map { |k, v| [k.to_sym, v] }]
end
|
ruby
|
{
"resource": ""
}
|
q2594
|
OctofactsUpdater.Fact.set_value
|
train
|
def set_value(new_value, name_in = nil)
if name_in.nil?
if new_value.is_a?(Proc)
return @value = new_value.call(@value)
end
return @value = new_value
end
parts = if name_in.is_a?(String)
name_in.split("::")
elsif name_in.is_a?(Array)
name_in.map do |item|
if item.is_a?(String)
item
elsif item.is_a?(Hash) && item.key?("regexp")
Regexp.new(item["regexp"])
else
raise ArgumentError, "Unable to interpret structure item: #{item.inspect}"
end
end
else
raise ArgumentError, "Unable to interpret structure: #{name_in.inspect}"
end
set_structured_value(@value, parts, new_value)
end
|
ruby
|
{
"resource": ""
}
|
q2595
|
OctofactsUpdater.Fact.set_structured_value
|
train
|
def set_structured_value(subhash, parts, value)
return if subhash.nil?
raise ArgumentError, "Cannot set structured value at #{parts.first.inspect}" unless subhash.is_a?(Hash)
raise ArgumentError, "parts must be an Array, got #{parts.inspect}" unless parts.is_a?(Array)
# At the top level, find all keys that match the first item in the parts.
matching_keys = subhash.keys.select do |key|
if parts.first.is_a?(String)
key == parts.first
elsif parts.first.is_a?(Regexp)
parts.first.match(key)
else
# :nocov:
# This is a bug - this code should be unreachable because of the checking in `set_value`
raise ArgumentError, "part must be a string or regexp, got #{parts.first.inspect}"
# :nocov:
end
end
# Auto-create a new hash if there is a value, the part is a string, and the key doesn't exist.
if parts.first.is_a?(String) && !value.nil? && !subhash.key?(parts.first)
subhash[parts.first] = {}
matching_keys << parts.first
end
return unless matching_keys.any?
# If we are at the end, set the value or delete the key.
if parts.size == 1
if value.nil?
matching_keys.each { |k| subhash.delete(k) }
elsif value.is_a?(Proc)
matching_keys.each do |k|
new_value = value.call(subhash[k])
if new_value.nil?
subhash.delete(k)
else
subhash[k] = new_value
end
end
else
matching_keys.each { |k| subhash[k] = value }
end
return
end
# We are not at the end. Recurse down to the next level.
matching_keys.each { |k| set_structured_value(subhash[k], parts[1..-1], value) }
end
|
ruby
|
{
"resource": ""
}
|
q2596
|
Clash.Diff.diff_dirs
|
train
|
def diff_dirs(dir1, dir2)
mattching_dir_files(dir1, dir2).each do |file|
a = File.join(dir1, file)
b = File.join(dir2, file)
diff_files(a,b)
end
end
|
ruby
|
{
"resource": ""
}
|
q2597
|
Clash.Diff.dir_files
|
train
|
def dir_files(dir)
Find.find(dir).to_a.reject!{|f| File.directory?(f) }
end
|
ruby
|
{
"resource": ""
}
|
q2598
|
Clash.Diff.unique_files
|
train
|
def unique_files(dir, dir_files, common_files)
unique = dir_files - common_files
if !unique.empty?
@test_failures << yellowit("\nMissing from directory #{dir}:\n")
unique.each do |f|
failure = " - #{f}"
failure << "\n" if unique.last == f
@test_failures << failure
end
end
end
|
ruby
|
{
"resource": ""
}
|
q2599
|
OctofactsUpdater.Fixture.to_yaml
|
train
|
def to_yaml
sorted_facts = @facts.sort.to_h
facts_hash_with_expanded_values = Hash[sorted_facts.collect { |k, v| [k, v.value] }]
YAML.dump(facts_hash_with_expanded_values)
end
|
ruby
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.