_id
stringlengths 2
6
| title
stringlengths 9
130
| partition
stringclasses 3
values | text
stringlengths 66
10.5k
| language
stringclasses 1
value | meta_information
dict |
---|---|---|---|---|---|
q23400
|
JGrep.Scanner.get_token
|
train
|
def get_token
return nil if @token_index >= @arguments.size
begin
case chr(@arguments[@token_index])
when "["
return "statement", gen_substatement
when "]"
return "]"
when "("
return "(", "("
when ")"
return ")", ")"
when "n"
if (chr(@arguments[@token_index + 1]) == "o") && (chr(@arguments[@token_index + 2]) == "t") && ((chr(@arguments[@token_index + 3]) == " ") || (chr(@arguments[@token_index + 3]) == "("))
@token_index += 2
return "not", "not"
else
gen_statement
end
when "!"
return "not", "not"
when "a"
if (chr(@arguments[@token_index + 1]) == "n") && (chr(@arguments[@token_index + 2]) == "d") && ((chr(@arguments[@token_index + 3]) == " ") || (chr(@arguments[@token_index + 3]) == "("))
@token_index += 2
return "and", "and"
else
gen_statement
end
when "&"
if chr(@arguments[@token_index + 1]) == "&"
@token_index += 1
return "and", "and"
else
gen_statement
end
when "o"
if (chr(@arguments[@token_index + 1]) == "r") && ((chr(@arguments[@token_index + 2]) == " ") || (chr(@arguments[@token_index + 2]) == "("))
@token_index += 1
return "or", "or"
else
gen_statement
end
when "|"
if chr(@arguments[@token_index + 1]) == "|"
@token_index += 1
return "or", "or"
else
gen_statement
end
when "+"
value = ""
i = @token_index + 1
begin
value += chr(@arguments[i])
i += 1
end until (i >= @arguments.size) || (chr(@arguments[i]) =~ /\s|\)/)
@token_index = i - 1
return "+", value
when "-"
value = ""
i = @token_index + 1
begin
value += chr(@arguments[i])
i += 1
end until (i >= @arguments.size) || (chr(@arguments[i]) =~ /\s|\)/)
@token_index = i - 1
return "-", value
when " "
return " ", " "
else
gen_statement
end
end
rescue NoMethodError
raise "Error. Expression cannot be parsed."
end
|
ruby
|
{
"resource": ""
}
|
q23401
|
ArgParser.Definition.<<
|
train
|
def <<(arg)
case arg
when PositionalArgument, KeywordArgument, FlagArgument, RestArgument
if @arguments[arg.key]
raise ArgumentError, "An argument with key '#{arg.key}' has already been defined"
end
if arg.short_key && @short_keys[arg.short_key]
raise ArgumentError, "The short key '#{arg.short_key}' has already been registered by the '#{
@short_keys[arg.short_key]}' argument"
end
if arg.is_a?(RestArgument) && rest_args
raise ArgumentError, "Only one rest argument can be defined"
end
@arguments[arg.key] = arg
@short_keys[arg.short_key] = arg if arg.short_key
else
raise ArgumentError, "arg must be an instance of PositionalArgument, KeywordArgument, " +
"FlagArgument or RestArgument"
end
end
|
ruby
|
{
"resource": ""
}
|
q23402
|
ArgParser.Definition.positional_arg
|
train
|
def positional_arg(key, desc, opts = {}, &block)
self << ArgParser::PositionalArgument.new(key, desc, opts, &block)
end
|
ruby
|
{
"resource": ""
}
|
q23403
|
ArgParser.Definition.keyword_arg
|
train
|
def keyword_arg(key, desc, opts = {}, &block)
self << ArgParser::KeywordArgument.new(key, desc, opts, &block)
end
|
ruby
|
{
"resource": ""
}
|
q23404
|
ArgParser.Definition.flag_arg
|
train
|
def flag_arg(key, desc, opts = {}, &block)
self << ArgParser::FlagArgument.new(key, desc, opts, &block)
end
|
ruby
|
{
"resource": ""
}
|
q23405
|
ArgParser.Definition.rest_arg
|
train
|
def rest_arg(key, desc, opts = {}, &block)
self << ArgParser::RestArgument.new(key, desc, opts, &block)
end
|
ruby
|
{
"resource": ""
}
|
q23406
|
ArgParser.Definition.validate_requirements
|
train
|
def validate_requirements(args)
errors = []
@require_set.each do |req, sets|
sets.each do |set|
count = set.count{ |arg| args.has_key?(arg.key) && args[arg.key] }
case req
when :one
if count == 0
errors << "No argument has been specified for one of: #{set.join(', ')}"
elsif count > 1
errors << "Only one argument can been specified from: #{set.join(', ')}"
end
when :any
if count == 0
errors << "At least one of the arguments must be specified from: #{set.join(', ')}"
end
end
end
end
errors
end
|
ruby
|
{
"resource": ""
}
|
q23407
|
ArgParser.Definition.show_usage
|
train
|
def show_usage(out = STDERR, width = 80)
lines = ['']
pos_args = positional_args
opt_args = size - pos_args.size
usage_args = pos_args.map(&:to_use)
usage_args << (requires_some? ? 'OPTIONS' : '[OPTIONS]') if opt_args > 0
usage_args << rest_args.to_use if rest_args?
lines.concat(wrap_text("USAGE: #{RUBY_ENGINE} #{$0} #{usage_args.join(' ')}", width))
lines << ''
lines << 'Specify the /? or --help option for more detailed help'
lines << ''
lines.each{ |line| out.puts line } if out
lines
end
|
ruby
|
{
"resource": ""
}
|
q23408
|
ArgParser.Definition.show_help
|
train
|
def show_help(out = STDOUT, width = 80)
lines = ['', '']
lines << title
lines << title.gsub(/./, '=')
lines << ''
if purpose
lines.concat(wrap_text(purpose, width))
lines << ''
end
if copyright
lines.concat(wrap_text("Copyright (c) #{copyright}", width))
lines << ''
end
lines << 'USAGE'
lines << '-----'
pos_args = positional_args
opt_args = size - pos_args.size
usage_args = pos_args.map(&:to_use)
usage_args << (requires_some? ? 'OPTIONS' : '[OPTIONS]') if opt_args > 0
usage_args << rest_args.to_use if rest_args?
lines.concat(wrap_text(" #{RUBY_ENGINE} #{$0} #{usage_args.join(' ')}", width))
lines << ''
if positional_args?
max = positional_args.map{ |a| a.to_s.length }.max
pos_args = positional_args
pos_args << rest_args if rest_args?
pos_args.each do |arg|
if arg.usage_break
lines << ''
lines << arg.usage_break
end
desc = arg.description
desc << "\n[Default: #{arg.default}]" unless arg.default.nil?
wrap_text(desc, width - max - 6).each_with_index do |line, i|
lines << " %-#{max}s %s" % [[arg.to_s][i], line]
end
end
lines << ''
end
if non_positional_args?
lines << ''
lines << 'OPTIONS'
lines << '-------'
max = non_positional_args.map{ |a| a.to_use.length }.max
non_positional_args.each do |arg|
if arg.usage_break
lines << ''
lines << arg.usage_break
end
desc = arg.description
desc << "\n[Default: #{arg.default}]" unless arg.default.nil?
wrap_text(desc, width - max - 6).each_with_index do |line, i|
lines << " %-#{max}s %s" % [[arg.to_use][i], line]
end
end
end
lines << ''
lines.each{ |line| line.length < width ? out.puts(line) : out.print(line) } if out
lines
end
|
ruby
|
{
"resource": ""
}
|
q23409
|
ArgParser.Definition.wrap_text
|
train
|
def wrap_text(text, width)
if width > 0 && (text.length > width || text.index("\n"))
lines = []
start, nl_pos, ws_pos, wb_pos, end_pos = 0, 0, 0, 0, text.rindex(/[^\s]/)
while start < end_pos
last_start = start
nl_pos = text.index("\n", start)
ws_pos = text.rindex(/ +/, start + width)
wb_pos = text.rindex(/[\-,.;#)}\]\/\\]/, start + width - 1)
### Debug code ###
#STDERR.puts self
#ind = ' ' * end_pos
#ind[start] = '('
#ind[start+width < end_pos ? start+width : end_pos] = ']'
#ind[nl_pos] = 'n' if nl_pos
#ind[wb_pos] = 'b' if wb_pos
#ind[ws_pos] = 's' if ws_pos
#STDERR.puts ind
### End debug code ###
if nl_pos && nl_pos <= start + width
lines << text[start...nl_pos].strip
start = nl_pos + 1
elsif end_pos < start + width
lines << text[start..end_pos]
start = end_pos
elsif ws_pos && ws_pos > start && ((wb_pos.nil? || ws_pos > wb_pos) ||
(wb_pos && wb_pos > 5 && wb_pos - 5 < ws_pos))
lines << text[start...ws_pos]
start = text.index(/[^\s]/, ws_pos + 1)
elsif wb_pos && wb_pos > start
lines << text[start..wb_pos]
start = wb_pos + 1
else
lines << text[start...(start+width)]
start += width
end
if start <= last_start
# Detect an infinite loop, and just return the original text
STDERR.puts "Inifinite loop detected at #{__FILE__}:#{__LINE__}"
STDERR.puts " width: #{width}, start: #{start}, nl_pos: #{nl_pos}, " +
"ws_pos: #{ws_pos}, wb_pos: #{wb_pos}"
return [text]
end
end
lines
else
[text]
end
end
|
ruby
|
{
"resource": ""
}
|
q23410
|
ArgParser.Parser.parse
|
train
|
def parse(tokens = ARGV)
@show_usage = nil
@show_help = nil
@errors = []
begin
pos_vals, kw_vals, rest_vals = classify_tokens(tokens)
args = process_args(pos_vals, kw_vals, rest_vals) unless @show_help
rescue NoSuchArgumentError => ex
self.errors << ex.message
@show_usage = true
end
(@show_usage || @show_help) ? false : args
end
|
ruby
|
{
"resource": ""
}
|
q23411
|
ArgParser.Parser.process_arg_val
|
train
|
def process_arg_val(arg, val, hsh, is_default = false)
if is_default && arg.required? && (val.nil? || val.empty?)
self.errors << "No value was specified for required argument '#{arg}'"
return
end
if !is_default && val.nil? && KeywordArgument === arg
if arg.value_optional?
val = arg.value_optional
else
self.errors << "No value was specified for keyword argument '#{arg}'"
return
end
end
# Argument value validation
if ValueArgument === arg && arg.validation && val
case arg.validation
when Regexp
[val].flatten.each do |v|
add_value_error(arg, val) unless v =~ arg.validation
end
when Array
[val].flatten.each do |v|
add_value_error(arg, val) unless arg.validation.include?(v)
end
when Proc
begin
arg.validation.call(val, arg, hsh)
rescue StandardError => ex
self.errors << "An error occurred in the validation handler for argument '#{arg}': #{ex}"
return
end
else
raise "Unknown validation type: #{arg.validation.class.name}"
end
end
# TODO: Argument value coercion
# Call any registered on_parse handler
begin
val = arg.on_parse.call(val, arg, hsh) if val && arg.on_parse
rescue StandardError => ex
self.errors << "An error occurred in the on_parse handler for argument '#{arg}': #{ex}"
return
end
# Return result
val
end
|
ruby
|
{
"resource": ""
}
|
q23412
|
Postcodes.Base.method_missing
|
train
|
def method_missing(name, *args, &block)
return @info[name.to_s] if @info.key? name.to_s
return @info[name] if @info.key? name
super.method_missing name
end
|
ruby
|
{
"resource": ""
}
|
q23413
|
Resque.JRubyWorker.prune_dead_workers
|
train
|
def prune_dead_workers
all_workers = self.class.all
return if all_workers.empty?
known_workers = JRUBY ? worker_thread_ids : []
pids = nil, hostname = self.hostname
all_workers.each do |worker|
host, pid, thread, queues = self.class.split_id(worker.id)
next if host != hostname
next if known_workers.include?(thread) && pid == self.pid.to_s
# NOTE: allow flexibility of running workers :
# 1. worker might run in another JVM instance
# 2. worker might run as a process (with MRI)
next if (pids ||= system_pids).include?(pid)
log! "Pruning dead worker: #{worker}"
if worker.respond_to?(:unregister_worker)
worker.unregister_worker
else # Resque 2.x
Registry.for(worker).unregister
end
end
end
|
ruby
|
{
"resource": ""
}
|
q23414
|
Resque.JRubyWorker.worker_thread_ids
|
train
|
def worker_thread_ids
thread_group = java.lang.Thread.currentThread.getThreadGroup
threads = java.lang.reflect.Array.newInstance(
java.lang.Thread.java_class, thread_group.activeCount)
thread_group.enumerate(threads)
# NOTE: we shall check the name from $servlet_context.getServletContextName
# but that's an implementation detail of the factory currently that threads
# are named including their context name. thread grouping should be fine !
threads.map do |thread| # a convention is to name threads as "worker" :
thread && thread.getName.index(WORKER_THREAD_ID) ? thread.getName : nil
end.compact
end
|
ruby
|
{
"resource": ""
}
|
q23415
|
Resque.JRubyWorker.update_native_thread_name
|
train
|
def update_native_thread_name
thread = JRuby.reference(Thread.current)
set_thread_name = Proc.new do |prefix, suffix|
self.class.with_global_lock do
count = self.class.system_registered_workers.size
thread.native_thread.name = "#{prefix}##{count}#{suffix}"
end
end
if ! name = thread.native_thread.name
# "#{THREAD_ID}##{count}" :
set_thread_name.call(WORKER_THREAD_ID, nil)
elsif ! name.index(WORKER_THREAD_ID)
# "#{name}(#{THREAD_ID}##{count})" :
set_thread_name.call("#{name} (#{WORKER_THREAD_ID}", ')')
end
end
|
ruby
|
{
"resource": ""
}
|
q23416
|
Resque.JRubyWorker.system_unregister_worker
|
train
|
def system_unregister_worker # :nodoc
self.class.with_global_lock do
workers = self.class.system_registered_workers
workers.delete(self.id)
self.class.store_global_property(WORKERS_KEY, workers.join(','))
end
end
|
ruby
|
{
"resource": ""
}
|
q23417
|
Ngannotate.ProcessorCommon.parse_ngannotate_options
|
train
|
def parse_ngannotate_options
opt = config.options.clone
if ENV['NG_OPT']
opt_str = ENV['NG_OPT']
if opt_str
opt = Hash[opt_str.split(',').map { |e| e.split('=') }]
opt.symbolize_keys!
end
end
regexp = ENV['NG_REGEXP']
if regexp
opt[:regexp] = regexp
end
opt
end
|
ruby
|
{
"resource": ""
}
|
q23418
|
Spiceweasel.Knife.validate
|
train
|
def validate(command, allknifes)
return if allknifes.index { |x| x.start_with?("knife #{command}") }
STDERR.puts "ERROR: 'knife #{command}' is not a currently supported command for knife."
exit(-1)
end
|
ruby
|
{
"resource": ""
}
|
q23419
|
Guacamole.DocumentModelMapper.document_to_model
|
train
|
def document_to_model(document)
identity_map.retrieve_or_store model_class, document.key do
model = model_class.new(document.to_h)
model.key = document.key
model.rev = document.revision
handle_related_documents(model)
model
end
end
|
ruby
|
{
"resource": ""
}
|
q23420
|
Guacamole.DocumentModelMapper.model_to_document
|
train
|
def model_to_document(model)
document = model.attributes.dup.except(:key, :rev)
handle_embedded_models(model, document)
handle_related_models(document)
document
end
|
ruby
|
{
"resource": ""
}
|
q23421
|
TrustyCms.Initializer.initialize_metal
|
train
|
def initialize_metal
Rails::Rack::Metal.requested_metals = configuration.metals
Rails::Rack::Metal.metal_paths = ["#{TRUSTY_CMS_ROOT}/app/metal"] # reset Rails default to TRUSTY_CMS_ROOT
Rails::Rack::Metal.metal_paths += plugin_loader.engine_metal_paths
Rails::Rack::Metal.metal_paths += extension_loader.paths(:metal)
Rails::Rack::Metal.metal_paths.uniq!
configuration.middleware.insert_before(
:"ActionController::ParamsParser",
Rails::Rack::Metal, :if => Rails::Rack::Metal.metals.any?)
end
|
ruby
|
{
"resource": ""
}
|
q23422
|
TrustyCms.Initializer.initialize_i18n
|
train
|
def initialize_i18n
radiant_locale_paths = Dir[File.join(TRUSTY_CMS_ROOT, 'config', 'locales', '*.{rb,yml}')]
configuration.i18n.load_path = radiant_locale_paths + extension_loader.paths(:locale)
super
end
|
ruby
|
{
"resource": ""
}
|
q23423
|
Spiceweasel.Nodes.validate_run_list
|
train
|
def validate_run_list(node, run_list, cookbooks, roles)
run_list.split(",").each do |item|
if item.start_with?("recipe[")
# recipe[foo] or recipe[foo::bar]
cb = item.split(/\[|\]/)[1].split(":")[0]
unless cookbooks.member?(cb)
STDERR.puts "ERROR: '#{node}' run list cookbook '#{cb}' is missing from the list of cookbooks in the manifest."
exit(-1)
end
elsif item.start_with?("role[")
# role[blah]
role = item.split(/\[|\]/)[1]
unless roles.member?(role)
STDERR.puts "ERROR: '#{node}' run list role '#{role}' is missing from the list of roles in the manifest."
exit(-1)
end
else
STDERR.puts "ERROR: '#{node}' run list '#{item}' is an invalid run list entry in the manifest."
exit(-1)
end
end
end
|
ruby
|
{
"resource": ""
}
|
q23424
|
Spiceweasel.Nodes.validate_options
|
train
|
def validate_options(node, options, environments)
if options =~ /-E/ # check for environments
env = options.split("-E")[1].split[0]
unless environments.member?(env)
STDERR.puts "ERROR: '#{node}' environment '#{env}' is missing from the list of environments in the manifest."
exit(-1)
end
end
end
|
ruby
|
{
"resource": ""
}
|
q23425
|
Spiceweasel.Nodes.validate_node_file
|
train
|
def validate_node_file(name)
# read in the file
node = Chef::JSONCompat.from_json(IO.read("nodes/#{name}.json"))
# check the node name vs. contents of the file
return unless node["name"] != name
STDERR.puts "ERROR: Node '#{name}' listed in the manifest does not match the name '#{node['name']}' within the nodes/#{name}.json file."
exit(-1)
end
|
ruby
|
{
"resource": ""
}
|
q23426
|
Spiceweasel.Nodes.process_providers
|
train
|
def process_providers(names, count, name, options, run_list, create_command_options, knifecommands) # rubocop:disable CyclomaticComplexity
provider = names[0]
validate_provider(provider, names, count, options, knifecommands) unless Spiceweasel::Config[:novalidation]
provided_names = []
if name.nil? && options.split.index("-N") # pull this out for deletes
name = options.split[options.split.index("-N") + 1]
count.to_i.times { |i| provided_names << node_numerate(name, i + 1, count) } if name
end
# google can have names or numbers
if provider.eql?("google") && names[1].to_i == 0
do_google_numeric_provider(create_command_options, names, options, provided_names, run_list)
elsif Spiceweasel::Config[:parallel]
process_parallel(count, create_command_options, name, options, provider, run_list)
else
determine_cloud_provider(count, create_command_options, name, options, provider, run_list)
end
if Spiceweasel::Config[:bulkdelete] && provided_names.empty?
do_bulk_delete(provider)
else
provided_names.each do |p_name|
do_provided_names(p_name, provider)
end
end
end
|
ruby
|
{
"resource": ""
}
|
q23427
|
Spiceweasel.Nodes.validate_provider
|
train
|
def validate_provider(provider, names, _count, options, knifecommands)
unless knifecommands.index { |x| x.start_with?("knife #{provider}") }
STDERR.puts "ERROR: 'knife #{provider}' is not a currently installed plugin for knife."
exit(-1)
end
return unless provider.eql?("google")
return unless names[1].to_i != 0 && !options.split.member?("-N")
STDERR.puts "ERROR: 'knife google' currently requires providing a name. Please use -N within the options."
exit(-1)
end
|
ruby
|
{
"resource": ""
}
|
q23428
|
Spiceweasel.Nodes.chef_client_search
|
train
|
def chef_client_search(name, run_list, environment)
search = []
search.push("name:#{name}") if name
search.push("chef_environment:#{environment}") if environment
run_list.split(",").each do |item|
item.sub!(/\[/, ":")
item.chop!
item.sub!(/::/, '\:\:')
search.push(item)
end
"#{search.join(' AND ')}"
end
|
ruby
|
{
"resource": ""
}
|
q23429
|
Spiceweasel.Cookbooks.validate_metadata
|
train
|
def validate_metadata(cookbook, version)
# check metadata.rb for requested version
metadata = @loader.cookbooks_by_name[cookbook].metadata
Spiceweasel::Log.debug("validate_metadata: #{cookbook} #{metadata.name} #{metadata.version}")
# Should the cookbook directory match the name in the metadata?
if metadata.name.empty?
Spiceweasel::Log.warn("No cookbook name in the #{cookbook} metadata.rb.")
elsif cookbook != metadata.name
STDERR.puts "ERROR: Cookbook '#{cookbook}' does not match the name '#{metadata.name}' in #{cookbook}/metadata.rb."
exit(-1)
end
if version && metadata.version != version
STDERR.puts "ERROR: Invalid version '#{version}' of '#{cookbook}' requested, '#{metadata.version}' is already in the cookbooks directory."
exit(-1)
end
metadata.dependencies.each do |dependency|
Spiceweasel::Log.debug("cookbook #{cookbook} metadata dependency: #{dependency}")
@dependencies.push(dependency[0])
end
end
|
ruby
|
{
"resource": ""
}
|
q23430
|
Spiceweasel.Cookbooks.validate_dependencies
|
train
|
def validate_dependencies
Spiceweasel::Log.debug("cookbook validate_dependencies: '#{@dependencies}'")
@dependencies.each do |dep|
unless member?(dep)
STDERR.puts "ERROR: Cookbook dependency '#{dep}' is missing from the list of cookbooks in the manifest."
exit(-1)
end
end
end
|
ruby
|
{
"resource": ""
}
|
q23431
|
Renogen.Generator.generate!
|
train
|
def generate!
changelog = extraction_stratagy.extract
changelog.version = version
changelog.date = options['release_date']
writer.write!(changelog)
end
|
ruby
|
{
"resource": ""
}
|
q23432
|
Guacamole.AqlQuery.perfom_query
|
train
|
def perfom_query(iterator_with_mapping, &block)
iterator = perform_mapping? ? iterator_with_mapping : iterator_without_mapping(&block)
connection.execute(aql_string, options).each(&iterator)
end
|
ruby
|
{
"resource": ""
}
|
q23433
|
Spiceweasel.Clusters.cluster_process_nodes
|
train
|
def cluster_process_nodes(cluster, environment, cookbooks, environments, roles, knifecommands, rootoptions)
Spiceweasel::Log.debug("cluster::cluster_process_nodes '#{environment}' '#{cluster[environment]}'")
cluster[environment].each do |node|
node_name = node.keys.first
options = node[node_name]["options"] || ""
validate_environment(options, environment, environments) unless Spiceweasel::Config[:novalidation]
# push the Environment back on the options
node[node_name]["options"] = options + " -E #{environment}"
end
# let's reuse the Nodes logic
nodes = Spiceweasel::Nodes.new(cluster[environment], cookbooks, environments, roles, knifecommands, rootoptions)
@create.concat(nodes.create)
# what about providers??
nodes.delete.each do |del|
@delete.push(del) unless del.to_s =~ /^knife client|^knife node/
end
if bundler?
@delete.push(Command.new("for N in $(bundle exec knife node list -E #{environment}); do bundle exec knife client delete $N -y; bundle exec knife node delete $N -y; done"))
else
@delete.push(Command.new("for N in $(knife node list -E #{environment}); do knife client delete $N -y; knife node delete $N -y; done"))
end
end
|
ruby
|
{
"resource": ""
}
|
q23434
|
TrustyCms.ExtensionLoader.activate_extensions
|
train
|
def activate_extensions
initializer.initialize_views
ordered_extensions = []
configuration = TrustyCms::Application.config
if configuration.extensions.first == :all
ordered_extensions = extensions
else
configuration.extensions.each {|name| ordered_extensions << select_extension(name) }
end
ordered_extensions.flatten.each(&:activate)
Page.load_subclasses
end
|
ruby
|
{
"resource": ""
}
|
q23435
|
Jekyll.JekyllAppEngine.source_partial_exists?
|
train
|
def source_partial_exists?
if @site.respond_to?(:in_source_dir)
File.exists? @site.in_source_dir("_app.yaml")
else
File.exists? Jekyll.sanitized_path(@site.source, "_app.yaml")
end
end
|
ruby
|
{
"resource": ""
}
|
q23436
|
Jekyll.JekyllAppEngine.document_overrides
|
train
|
def document_overrides(document)
if document.respond_to?(:data) and document.data.has_key?("app_engine")
document.data.fetch("app_engine")
else
{}
end
end
|
ruby
|
{
"resource": ""
}
|
q23437
|
Jekyll.JekyllAppEngine.app_yaml_exists?
|
train
|
def app_yaml_exists?
if @site.respond_to?(:in_source_dir)
File.exists? @site.in_source_dir("app.yaml")
else
File.exists? Jekyll.sanitized_path(@site.source, "app.yaml")
end
end
|
ruby
|
{
"resource": ""
}
|
q23438
|
Spiceweasel.DataBags.validate_item
|
train
|
def validate_item(db, item)
unless File.exist?("data_bags/#{db}/#{item}.json")
STDERR.puts "ERROR: data bag '#{db}' item '#{item}' file 'data_bags/#{db}/#{item}.json' does not exist"
exit(-1)
end
f = File.read("data_bags/#{db}/#{item}.json")
begin
itemfile = JSON.parse(f)
rescue JSON::ParserError => e # invalid JSON
STDERR.puts "ERROR: data bag '#{db} item '#{item}' has JSON errors."
STDERR.puts e.message
exit(-1)
end
# validate the id matches the file name
item = item.split("/").last if item =~ /\// # pull out directories
return if item.eql?(itemfile["id"])
STDERR.puts "ERROR: data bag '#{db}' item '#{item}' listed in the manifest does not match the id '#{itemfile['id']}' within the 'data_bags/#{db}/#{item}.json' file."
exit(-1)
end
|
ruby
|
{
"resource": ""
}
|
q23439
|
Coercible.Coercer.initialize_coercer
|
train
|
def initialize_coercer(klass)
coercers[klass] =
begin
coercer = Coercer::Object.determine_type(klass) || Coercer::Object
args = [ self ]
args << config_for(coercer) if coercer.respond_to?(:config_name)
coercer.new(*args)
end
end
|
ruby
|
{
"resource": ""
}
|
q23440
|
Guacamole.Transaction.write_collections
|
train
|
def write_collections
edge_collections.flat_map do |target_state|
[target_state.edge_collection_name] +
(target_state.from_vertices + target_state.to_vertices).map(&:collection)
end.uniq.compact
end
|
ruby
|
{
"resource": ""
}
|
q23441
|
Guacamole.Transaction.transaction
|
train
|
def transaction
transaction = database.create_transaction(transaction_code,
write: write_collections,
read: read_collections)
transaction.wait_for_sync = true
transaction
end
|
ruby
|
{
"resource": ""
}
|
q23442
|
PDUTools.Helpers.decode16bit
|
train
|
def decode16bit data, length
data.split('').in_groups_of(4).collect()
double_octets = data.split('').in_groups_of(4).map(&:join).map{|x| x.to_i(16) }[0, length / 2] # integer values
double_octets.collect do |o|
[o].pack('S')
end.join.force_encoding('utf-16le').encode('utf-8')
end
|
ruby
|
{
"resource": ""
}
|
q23443
|
Guacamole.Query.each
|
train
|
def each(&block)
return to_enum(__callee__) unless block_given?
perfom_query ->(document) { block.call mapper.document_to_model(document) }, &block
end
|
ruby
|
{
"resource": ""
}
|
q23444
|
Guacamole.Query.perfom_query
|
train
|
def perfom_query(iterator, &block)
if example
connection.by_example(example, options).each(&iterator)
else
connection.all(options).each(&iterator)
end
end
|
ruby
|
{
"resource": ""
}
|
q23445
|
Mapnik.Map.style
|
train
|
def style(name)
style = Mapnik::Style.new
yield style
styles[name] = style
end
|
ruby
|
{
"resource": ""
}
|
q23446
|
Mapnik.Map.layer
|
train
|
def layer(name, srs = nil)
layer = Mapnik::Layer.new(name, srs)
layer.map = self
yield layer
layers << layer
end
|
ruby
|
{
"resource": ""
}
|
q23447
|
Mapnik.Map.render_to_file
|
train
|
def render_to_file(filename, format = nil)
if format
__render_to_file_with_format__(filename, format)
else
__render_to_file__(filename)
end
return File.exists?(filename)
end
|
ruby
|
{
"resource": ""
}
|
q23448
|
Rfm.Error.getError
|
train
|
def getError(code, message=nil)
klass = find_by_code(code)
message = build_message(klass, code, message)
error = klass.new(code, message)
error
end
|
ruby
|
{
"resource": ""
}
|
q23449
|
Mondo.Client.request
|
train
|
def request(method, path, opts = {})
raise ClientError, 'Access token missing' unless @access_token
opts[:headers] = {} if opts[:headers].nil?
opts[:headers]['Accept'] = 'application/json'
opts[:headers]['Content-Type'] = 'application/json' unless method == :get
opts[:headers]['User-Agent'] = user_agent
opts[:headers]['Authorization'] = "Bearer #{@access_token}"
if !opts[:data].nil?
opts[:body] = opts[:data].to_param
puts "SETTING BODY #{opts[:body]}"
opts[:headers]['Content-Type'] = 'application/x-www-form-urlencoded' # sob sob
end
path = URI.encode(path)
resp = connection.run_request(method, path, opts[:body], opts[:headers]) do |req|
req.params = opts[:params] if !opts[:params].nil?
end
response = Response.new(resp)
case response.status
when 301, 302, 303, 307
# TODO
when 200..299, 300..399
# on non-redirecting 3xx statuses, just return the response
response
when 400..599
error = ApiError.new(response)
raise(error, "Status code #{response.status}")
else
error = ApiError.new(response)
raise(error, "Unhandled status code value of #{response.status}")
end
end
|
ruby
|
{
"resource": ""
}
|
q23450
|
Rfm.Server.connect
|
train
|
def connect(action, args, options = {})
post = args.merge(expand_options(options)).merge({action => ''})
http_fetch("/fmi/xml/fmresultset.xml", post)
end
|
ruby
|
{
"resource": ""
}
|
q23451
|
CopycopterClient.I18nBackend.available_locales
|
train
|
def available_locales
cached_locales = cache.keys.map { |key| key.split('.').first }
(cached_locales + super).uniq.map { |locale| locale.to_sym }
end
|
ruby
|
{
"resource": ""
}
|
q23452
|
RAR.Archive.create!
|
train
|
def create!
rar_process = IO.popen command_line
# Wait for the child rar process to finish.
_, status = Process.wait2 rar_process.pid
if status.exitstatus > 1
if message = ExitCodeMessages[status.exitstatus]
raise CommandLineError, message
else
raise CommandLineError, "Unknown exit status: #{status}"
end
else
true
end
end
|
ruby
|
{
"resource": ""
}
|
q23453
|
CopycopterClient.Client.upload
|
train
|
def upload(data)
connect do |http|
response = http.post(uri('draft_blurbs'), data.to_json, 'Content-Type' => 'application/json')
check response
log 'Uploaded missing translations'
end
end
|
ruby
|
{
"resource": ""
}
|
q23454
|
CopycopterClient.Client.deploy
|
train
|
def deploy
connect do |http|
response = http.post(uri('deploys'), '')
check response
log 'Deployed'
end
end
|
ruby
|
{
"resource": ""
}
|
q23455
|
Hooray.Seek.nodes
|
train
|
def nodes
return @nodes if @nodes
@nodes = sweep.map do |k, v|
Node.new(ip: k, mac: Hooray::Local.arp_table[k.to_s], ports: v)
end # .reject { |n| n.mac.nil? } # remove those without mac
end
|
ruby
|
{
"resource": ""
}
|
q23456
|
Hooray.Seek.ping_class
|
train
|
def ping_class
return Net::Ping::External unless ports
return Net::Ping::TCP unless @protocol =~ /tcp|udp|http|wmi/
Net::Ping.const_get(@protocol.upcase)
end
|
ruby
|
{
"resource": ""
}
|
q23457
|
Hooray.Seek.scan_bot
|
train
|
def scan_bot(ip)
(ports || [nil]).each do |port|
Thread.new do
if ping_class.new(ip.to_s, port, TIMEOUT).ping?
@scan[ip] << port
print '.'
end
end
end
end
|
ruby
|
{
"resource": ""
}
|
q23458
|
Hooray.Seek.sweep
|
train
|
def sweep
network.to_range.each do |ip|
@scan[ip] = []
scan_bot(ip)
end
Thread.list.reject { |t| t == Thread.current }.each(&:join)
@scan.reject! { |_k, v| v.empty? }
end
|
ruby
|
{
"resource": ""
}
|
q23459
|
CopycopterClient.Cache.export
|
train
|
def export
keys = {}
lock do
@blurbs.sort.each do |(blurb_key, value)|
current = keys
yaml_keys = blurb_key.split('.')
0.upto(yaml_keys.size - 2) do |i|
key = yaml_keys[i]
# Overwrite en.key with en.sub.key
unless current[key].class == Hash
current[key] = {}
end
current = current[key]
end
current[yaml_keys.last] = value
end
end
unless keys.size < 1
keys.to_yaml
end
end
|
ruby
|
{
"resource": ""
}
|
q23460
|
CopycopterClient.Cache.wait_for_download
|
train
|
def wait_for_download
if pending?
logger.info 'Waiting for first download'
if logger.respond_to? :flush
logger.flush
end
while pending?
sleep 0.1
end
end
end
|
ruby
|
{
"resource": ""
}
|
q23461
|
RTurk.RegisterHITType.validate
|
train
|
def validate
missing_parameters = []
required_fields.each do |param|
missing_parameters << param.to_s unless self.send(param)
end
raise RTurk::MissingParameters, "Parameters: '#{missing_parameters.join(', ')}'" unless missing_parameters.empty?
end
|
ruby
|
{
"resource": ""
}
|
q23462
|
TableHelper.CollectionTable.default_class
|
train
|
def default_class
if collection.respond_to?(:proxy_reflection)
collection.proxy_reflection.klass
elsif !collection.empty?
collection.first.class
end
end
|
ruby
|
{
"resource": ""
}
|
q23463
|
TableHelper.RowBuilder.undef_cell
|
train
|
def undef_cell(name)
method_name = name.gsub('-', '_')
klass = class << self; self; end
klass.class_eval do
remove_method(method_name)
end
end
|
ruby
|
{
"resource": ""
}
|
q23464
|
TableHelper.Row.cell
|
train
|
def cell(name, *args)
name = name.to_s if name
options = args.last.is_a?(Hash) ? args.pop : {}
options[:namespace] = table.object_name
args << options
cell = Cell.new(name, *args)
cells[name] = cell
builder.define_cell(name) if name
cell
end
|
ruby
|
{
"resource": ""
}
|
q23465
|
TableHelper.Header.column
|
train
|
def column(*names)
# Clear the header row if this is being customized by the user
unless @customized
@customized = true
clear
end
# Extract configuration
options = names.last.is_a?(Hash) ? names.pop : {}
content = names.last.is_a?(String) ? names.pop : nil
args = [content, options].compact
names.collect! do |name|
column = row.cell(name, *args)
column.content_type = :header
column[:scope] ||= 'col'
column
end
names.length == 1 ? names.first : names
end
|
ruby
|
{
"resource": ""
}
|
q23466
|
TableHelper.Header.html
|
train
|
def html
html_options = @html_options.dup
html_options[:style] = 'display: none;' if table.empty? && hide_when_empty
content_tag(tag_name, content, html_options)
end
|
ruby
|
{
"resource": ""
}
|
q23467
|
Aliyun.Service.gen_request_parameters
|
train
|
def gen_request_parameters method, params
#add common parameters
params.merge! self.service.default_parameters
params.merge! self.options
params[:Action] = method.to_s
params[:Timestamp] = Time.now.utc.iso8601
params[:SignatureNonce] = SecureRandom.uuid
params[:Signature] = compute_signature params
params
end
|
ruby
|
{
"resource": ""
}
|
q23468
|
Aliyun.Service.compute_signature
|
train
|
def compute_signature params
if $DEBUG
puts "keys before sorted: #{params.keys}"
end
sorted_keys = params.keys.sort
if $DEBUG
puts "keys after sorted: #{sorted_keys}"
end
canonicalized_query_string = ""
canonicalized_query_string = sorted_keys.map {|key|
"%s=%s" % [safe_encode(key.to_s), safe_encode(params[key])]
}.join(self.service.separator)
length = canonicalized_query_string.length
string_to_sign = self.service.http_method + self.service.separator + safe_encode('/') + self.service.separator + safe_encode(canonicalized_query_string)
if $DEBUG
puts "string_to_sign is #{string_to_sign}"
end
signature = calculate_signature access_key_secret+"&", string_to_sign
end
|
ruby
|
{
"resource": ""
}
|
q23469
|
Aliyun.Service.calculate_signature
|
train
|
def calculate_signature key, string_to_sign
hmac = HMAC::SHA1.new(key)
hmac.update(string_to_sign)
signature = Base64.encode64(hmac.digest).gsub("\n", '')
if $DEBUG
puts "Signature #{signature}"
end
signature
end
|
ruby
|
{
"resource": ""
}
|
q23470
|
SubtitleIt.Subtitle.encode_dump
|
train
|
def encode_dump(dump)
dump = dump.read unless dump.is_a?(String)
enc = CharlockHolmes::EncodingDetector.detect(dump)
if enc[:encoding] != 'UTF-8'
puts "Converting `#{enc[:encoding]}` to `UTF-8`".yellow
dump = CharlockHolmes::Converter.convert dump, enc[:encoding], 'UTF-8'
end
dump
end
|
ruby
|
{
"resource": ""
}
|
q23471
|
BlockScore.Collection.create
|
train
|
def create(params = {})
fail Error, 'Create parent first' unless parent.id
assoc_params = default_params.merge(params)
add_instance(member_class.create(assoc_params))
end
|
ruby
|
{
"resource": ""
}
|
q23472
|
BlockScore.Collection.retrieve
|
train
|
def retrieve(id)
each do |item|
return item if item.id.eql?(id)
end
add_instance(member_class.retrieve(id))
end
|
ruby
|
{
"resource": ""
}
|
q23473
|
BlockScore.Collection.parent_id?
|
train
|
def parent_id?(item)
parent.id && item.send(foreign_key).eql?(parent.id)
end
|
ruby
|
{
"resource": ""
}
|
q23474
|
BlockScore.Collection.register_to_parent
|
train
|
def register_to_parent(item)
fail Error, 'None belonging' unless parent_id?(item)
ids << item.id
self << item
item
end
|
ruby
|
{
"resource": ""
}
|
q23475
|
TableHelper.Body.build_row
|
train
|
def build_row(object, index = table.collection.index(object))
row = BodyRow.new(object, self)
row.alternate = alternate ? index.send("#{alternate}?") : false
@builder.call(row.builder, object, index) if @builder
row.html
end
|
ruby
|
{
"resource": ""
}
|
q23476
|
PokerEngine.Cards.values_desc_by_occurency
|
train
|
def values_desc_by_occurency
values = cards.map(&:value)
values.sort do |a, b|
coefficient_occurency = (values.count(a) <=> values.count(b))
coefficient_occurency.zero? ? -(a <=> b) : -coefficient_occurency
end
end
|
ruby
|
{
"resource": ""
}
|
q23477
|
TableHelper.BodyRow.content
|
train
|
def content
number_to_skip = 0 # Keeps track of the # of columns to skip
html = ''
table.header.column_names.each do |column|
number_to_skip -= 1 and next if number_to_skip > 0
if cell = @cells[column]
number_to_skip = (cell[:colspan] || 1) - 1
else
cell = Cell.new(column, nil)
end
html << cell.html
end
html
end
|
ruby
|
{
"resource": ""
}
|
q23478
|
Beaker.AwsSdk.kill_instances
|
train
|
def kill_instances(instances)
running_instances = instances.compact.select do |instance|
instance_by_id(instance.instance_id).state.name == 'running'
end
instance_ids = running_instances.map(&:instance_id)
return nil if instance_ids.empty?
@logger.notify("aws-sdk: killing EC2 instance(s) #{instance_ids.join(', ')}")
client.terminate_instances(:instance_ids => instance_ids)
nil
end
|
ruby
|
{
"resource": ""
}
|
q23479
|
Beaker.AwsSdk.kill_zombie_volumes
|
train
|
def kill_zombie_volumes
# Occasionaly, tearing down ec2 instances leaves orphaned EBS volumes behind -- these stack up quickly.
# This simply looks for EBS volumes that are not in use
@logger.notify("aws-sdk: Kill Zombie Volumes!")
volume_count = 0
regions.each do |region|
@logger.debug "Reviewing: #{region}"
available_volumes = client(region).describe_volumes(
:filters => [
{ :name => 'status', :values => ['available'], }
]
).volumes
available_volumes.each do |volume|
begin
client(region).delete_volume(:volume_id => volume.id)
volume_count += 1
rescue Aws::EC2::Errors::InvalidVolume::NotFound => e
@logger.debug "Failed to remove volume: #{volume.id} #{e}"
end
end
end
@logger.notify "Freed #{volume_count} volume(s)"
end
|
ruby
|
{
"resource": ""
}
|
q23480
|
Beaker.AwsSdk.launch_all_nodes
|
train
|
def launch_all_nodes
@logger.notify("aws-sdk: launch all hosts in configuration")
ami_spec = YAML.load_file(@options[:ec2_yaml])["AMI"]
global_subnet_id = @options['subnet_id']
global_subnets = @options['subnet_ids']
if global_subnet_id and global_subnets
raise RuntimeError, 'Config specifies both subnet_id and subnet_ids'
end
no_subnet_hosts = []
specific_subnet_hosts = []
some_subnet_hosts = []
@hosts.each do |host|
if global_subnet_id or host['subnet_id']
specific_subnet_hosts.push(host)
elsif global_subnets
some_subnet_hosts.push(host)
else
no_subnet_hosts.push(host)
end
end
instances = [] # Each element is {:instance => i, :host => h}
begin
@logger.notify("aws-sdk: launch instances not particular about subnet")
launch_nodes_on_some_subnet(some_subnet_hosts, global_subnets, ami_spec,
instances)
@logger.notify("aws-sdk: launch instances requiring a specific subnet")
specific_subnet_hosts.each do |host|
subnet_id = host['subnet_id'] || global_subnet_id
instance = create_instance(host, ami_spec, subnet_id)
instances.push({:instance => instance, :host => host})
end
@logger.notify("aws-sdk: launch instances requiring no subnet")
no_subnet_hosts.each do |host|
instance = create_instance(host, ami_spec, nil)
instances.push({:instance => instance, :host => host})
end
wait_for_status(:running, instances)
rescue Exception => ex
@logger.notify("aws-sdk: exception #{ex.class}: #{ex}")
kill_instances(instances.map{|x| x[:instance]})
raise ex
end
# At this point, all instances should be running since wait
# either returns on success or throws an exception.
if instances.empty?
raise RuntimeError, "Didn't manage to launch any EC2 instances"
end
# Assign the now known running instances to their hosts.
instances.each {|x| x[:host]['instance'] = x[:instance]}
nil
end
|
ruby
|
{
"resource": ""
}
|
q23481
|
Beaker.AwsSdk.wait_for_status_netdev
|
train
|
def wait_for_status_netdev()
@hosts.each do |host|
if host['platform'] =~ /f5-|netscaler/
wait_for_status(:running, @hosts)
wait_for_status(nil, @hosts) do |instance|
instance_status_collection = client.describe_instance_status({:instance_ids => [instance.instance_id]})
first_instance = instance_status_collection.first[:instance_statuses].first
first_instance[:instance_status][:status] == "ok" if first_instance
end
break
end
end
end
|
ruby
|
{
"resource": ""
}
|
q23482
|
Beaker.AwsSdk.add_tags
|
train
|
def add_tags
@hosts.each do |host|
instance = host['instance']
# Define tags for the instance
@logger.notify("aws-sdk: Add tags for #{host.name}")
tags = [
{
:key => 'jenkins_build_url',
:value => @options[:jenkins_build_url],
},
{
:key => 'Name',
:value => host.name,
},
{
:key => 'department',
:value => @options[:department],
},
{
:key => 'project',
:value => @options[:project],
},
{
:key => 'created_by',
:value => @options[:created_by],
},
]
host[:host_tags].each do |name, val|
tags << { :key => name.to_s, :value => val }
end
client.create_tags(
:resources => [instance.instance_id],
:tags => tags.reject { |r| r[:value].nil? },
)
end
nil
end
|
ruby
|
{
"resource": ""
}
|
q23483
|
Beaker.AwsSdk.modify_network_interface
|
train
|
def modify_network_interface
@hosts.each do |host|
instance = host['instance']
host['sg_cidr_ips'] = host['sg_cidr_ips'] || '0.0.0.0/0';
sg_cidr_ips = host['sg_cidr_ips'].split(',')
# Define tags for the instance
@logger.notify("aws-sdk: Update network_interface for #{host.name}")
security_group = ensure_group(instance[:network_interfaces].first, Beaker::EC2Helper.amiports(host), sg_cidr_ips)
ping_security_group = ensure_ping_group(instance[:network_interfaces].first, sg_cidr_ips)
client.modify_network_interface_attribute(
:network_interface_id => "#{instance[:network_interfaces].first[:network_interface_id]}",
:groups => [security_group.group_id, ping_security_group.group_id],
)
end
nil
end
|
ruby
|
{
"resource": ""
}
|
q23484
|
Beaker.AwsSdk.populate_dns
|
train
|
def populate_dns
# Obtain the IP addresses and dns_name for each host
@hosts.each do |host|
@logger.notify("aws-sdk: Populate DNS for #{host.name}")
instance = host['instance']
host['ip'] = instance.public_ip_address || instance.private_ip_address
host['private_ip'] = instance.private_ip_address
host['dns_name'] = instance.public_dns_name || instance.private_dns_name
@logger.notify("aws-sdk: name: #{host.name} ip: #{host['ip']} private_ip: #{host['private_ip']} dns_name: #{host['dns_name']}")
end
nil
end
|
ruby
|
{
"resource": ""
}
|
q23485
|
Beaker.AwsSdk.enable_root
|
train
|
def enable_root(host)
if host['user'] != 'root'
if host['platform'] =~ /f5-/
enable_root_f5(host)
elsif host['platform'] =~ /netscaler/
enable_root_netscaler(host)
else
copy_ssh_to_root(host, @options)
enable_root_login(host, @options)
host['user'] = 'root'
end
host.close
end
end
|
ruby
|
{
"resource": ""
}
|
q23486
|
Beaker.AwsSdk.ensure_key_pair
|
train
|
def ensure_key_pair(region)
pair_name = key_name()
delete_key_pair(region, pair_name)
create_new_key_pair(region, pair_name)
end
|
ruby
|
{
"resource": ""
}
|
q23487
|
Beaker.AwsSdk.delete_key_pair_all_regions
|
train
|
def delete_key_pair_all_regions(keypair_name_filter=nil)
region_keypairs_hash = my_key_pairs(keypair_name_filter)
region_keypairs_hash.each_pair do |region, keypair_name_array|
keypair_name_array.each do |keypair_name|
delete_key_pair(region, keypair_name)
end
end
end
|
ruby
|
{
"resource": ""
}
|
q23488
|
Beaker.AwsSdk.my_key_pairs
|
train
|
def my_key_pairs(name_filter=nil)
keypairs_by_region = {}
key_name_filter = name_filter ? "#{name_filter}-*" : key_name
regions.each do |region|
keypairs_by_region[region] = client(region).describe_key_pairs(
:filters => [{ :name => 'key-name', :values => [key_name_filter] }]
).key_pairs.map(&:key_name)
end
keypairs_by_region
end
|
ruby
|
{
"resource": ""
}
|
q23489
|
Beaker.AwsSdk.delete_key_pair
|
train
|
def delete_key_pair(region, pair_name)
kp = client(region).describe_key_pairs(:key_names => [pair_name]).key_pairs.first
unless kp.nil?
@logger.debug("aws-sdk: delete key pair in region: #{region}")
client(region).delete_key_pair(:key_name => pair_name)
end
rescue Aws::EC2::Errors::InvalidKeyPairNotFound
nil
end
|
ruby
|
{
"resource": ""
}
|
q23490
|
Beaker.AwsSdk.create_new_key_pair
|
train
|
def create_new_key_pair(region, pair_name)
@logger.debug("aws-sdk: importing new key pair: #{pair_name}")
client(region).import_key_pair(:key_name => pair_name, :public_key_material => public_key)
begin
client(region).wait_until(:key_pair_exists, { :key_names => [pair_name] }, :max_attempts => 5, :delay => 2)
rescue Aws::Waiters::Errors::WaiterFailed
raise RuntimeError, "AWS key pair #{pair_name} can not be queried, even after import"
end
end
|
ruby
|
{
"resource": ""
}
|
q23491
|
Beaker.AwsSdk.group_id
|
train
|
def group_id(ports)
if ports.nil? or ports.empty?
raise ArgumentError, "Ports list cannot be nil or empty"
end
unless ports.is_a? Set
ports = Set.new(ports)
end
# Lolwut, #hash is inconsistent between ruby processes
"Beaker-#{Zlib.crc32(ports.inspect)}"
end
|
ruby
|
{
"resource": ""
}
|
q23492
|
Beaker.AwsSdk.create_ping_group
|
train
|
def create_ping_group(region_or_vpc, sg_cidr_ips = ['0.0.0.0/0'])
@logger.notify("aws-sdk: Creating group #{PING_SECURITY_GROUP_NAME}")
cl = region_or_vpc.is_a?(String) ? client(region_or_vpc) : client
params = {
:description => 'Custom Beaker security group to enable ping',
:group_name => PING_SECURITY_GROUP_NAME,
}
params[:vpc_id] = region_or_vpc.vpc_id if region_or_vpc.is_a?(Aws::EC2::Types::Vpc)
group = cl.create_security_group(params)
sg_cidr_ips.each do |cidr_ip|
add_ingress_rule(
cl,
group,
cidr_ip,
'8', # 8 == ICMPv4 ECHO request
'-1', # -1 == All ICMP codes
'icmp',
)
end
group
end
|
ruby
|
{
"resource": ""
}
|
q23493
|
Beaker.AwsSdk.create_group
|
train
|
def create_group(region_or_vpc, ports, sg_cidr_ips = ['0.0.0.0/0'])
name = group_id(ports)
@logger.notify("aws-sdk: Creating group #{name} for ports #{ports.to_s}")
@logger.notify("aws-sdk: Creating group #{name} with CIDR IPs #{sg_cidr_ips.to_s}")
cl = region_or_vpc.is_a?(String) ? client(region_or_vpc) : client
params = {
:description => "Custom Beaker security group for #{ports.to_a}",
:group_name => name,
}
params[:vpc_id] = region_or_vpc.vpc_id if region_or_vpc.is_a?(Aws::EC2::Types::Vpc)
group = cl.create_security_group(params)
unless ports.is_a? Set
ports = Set.new(ports)
end
sg_cidr_ips.each do |cidr_ip|
ports.each do |port|
add_ingress_rule(cl, group, cidr_ip, port, port)
end
end
group
end
|
ruby
|
{
"resource": ""
}
|
q23494
|
Beaker.AwsSdk.add_ingress_rule
|
train
|
def add_ingress_rule(cl, sg_group, cidr_ip, from_port, to_port, protocol = 'tcp')
cl.authorize_security_group_ingress(
:cidr_ip => cidr_ip,
:ip_protocol => protocol,
:from_port => from_port,
:to_port => to_port,
:group_id => sg_group.group_id,
)
end
|
ruby
|
{
"resource": ""
}
|
q23495
|
Beaker.AwsSdk.load_fog_credentials
|
train
|
def load_fog_credentials(dot_fog = '.fog')
default = get_fog_credentials(dot_fog)
raise "You must specify an aws_access_key_id in your .fog file (#{dot_fog}) for ec2 instances!" unless default[:aws_access_key_id]
raise "You must specify an aws_secret_access_key in your .fog file (#{dot_fog}) for ec2 instances!" unless default[:aws_secret_access_key]
Aws::Credentials.new(
default[:aws_access_key_id],
default[:aws_secret_access_key],
default[:aws_session_token]
)
end
|
ruby
|
{
"resource": ""
}
|
q23496
|
RailsApiBenchmark.ResultSet.compute_relative_speed
|
train
|
def compute_relative_speed
@results = @results.map do |r|
avgs = averages
res = r[:results]
avg_rt = avgs[:response_time]
avg_rps = avgs[:req_per_sec]
f_rt = ((res[:response_time].to_f - avg_rt) / avg_rt * 100).round(1)
f_rps = ((res[:req_per_sec].to_f - avg_rps) / avg_rps * 100).round(1)
r.merge(factors: { response_time: f_rt, req_per_sec: f_rps })
end
end
|
ruby
|
{
"resource": ""
}
|
q23497
|
Attrio.Helpers.symbolize_hash_keys
|
train
|
def symbolize_hash_keys(hash)
hash.inject({}) do |new_hash, (key, value)|
new_hash[(key.to_sym rescue key) || key] = value
new_hash
end
hash
end
|
ruby
|
{
"resource": ""
}
|
q23498
|
FcrepoWrapper.Instance.start
|
train
|
def start
extract_and_configure
if config.managed?
@pid = spawn(config.env, *process_arguments)
# Wait for fcrepo to start
while !status
sleep 1
end
end
end
|
ruby
|
{
"resource": ""
}
|
q23499
|
FcrepoWrapper.Instance.stop
|
train
|
def stop
if config.managed? && started?
Process.kill 'HUP', pid
# Wait for fcrepo to stop
while status
sleep 1
end
Process.waitpid(pid)
end
@pid = nil
end
|
ruby
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.