repo
stringlengths 5
58
| path
stringlengths 9
168
| func_name
stringlengths 9
130
| original_string
stringlengths 66
10.5k
| language
stringclasses 1
value | code
stringlengths 66
10.5k
| code_tokens
list | docstring
stringlengths 8
16k
| docstring_tokens
list | sha
stringlengths 40
40
| url
stringlengths 94
266
| partition
stringclasses 1
value |
---|---|---|---|---|---|---|---|---|---|---|---|
akerl/githubstats | lib/githubstats/data.rb | GithubStats.Data.pad | def pad(fill_value = -1, data = @raw.clone)
data = _pad data, 0, fill_value, 0
_pad data, -1, fill_value, 6
end | ruby | def pad(fill_value = -1, data = @raw.clone)
data = _pad data, 0, fill_value, 0
_pad data, -1, fill_value, 6
end | [
"def",
"pad",
"(",
"fill_value",
"=",
"-",
"1",
",",
"data",
"=",
"@raw",
".",
"clone",
")",
"data",
"=",
"_pad",
"data",
",",
"0",
",",
"fill_value",
",",
"0",
"_pad",
"data",
",",
"-",
"1",
",",
"fill_value",
",",
"6",
"end"
]
| Pad the dataset to full week increments | [
"Pad",
"the",
"dataset",
"to",
"full",
"week",
"increments"
]
| 39ac2383a6e7b83e36ea5f8ac67a0fdb74f4f5c2 | https://github.com/akerl/githubstats/blob/39ac2383a6e7b83e36ea5f8ac67a0fdb74f4f5c2/lib/githubstats/data.rb#L190-L193 | train |
airslie/renalware-core | app/helpers/renalware/application_helper.rb | Renalware.ApplicationHelper.page_title | def page_title(separator = Renalware.config.page_title_spearator)
[
content_for(:page_title),
Renalware.config.site_name
].compact.join(separator)
end | ruby | def page_title(separator = Renalware.config.page_title_spearator)
[
content_for(:page_title),
Renalware.config.site_name
].compact.join(separator)
end | [
"def",
"page_title",
"(",
"separator",
"=",
"Renalware",
".",
"config",
".",
"page_title_spearator",
")",
"[",
"content_for",
"(",
":page_title",
")",
",",
"Renalware",
".",
"config",
".",
"site_name",
"]",
".",
"compact",
".",
"join",
"(",
"separator",
")",
"end"
]
| For use in layouts | [
"For",
"use",
"in",
"layouts"
]
| 07f20ed4071fc88666590c43a848e7413d5e384b | https://github.com/airslie/renalware-core/blob/07f20ed4071fc88666590c43a848e7413d5e384b/app/helpers/renalware/application_helper.rb#L24-L29 | train |
auser/poolparty | lib/cloud_providers/ec2/ec2_instance.rb | CloudProviders.Ec2Instance.make_image | def make_image(opts={})
opts = {:volume => '/',
:size => 6000,
:destination => '/mnt/bundle',
:exclude => nil
}.merge(opts)
image_file = File.join(opts[:destination], opts[:prefix] )
cmds = ["mkdir -p #{opts[:destination]}"]
cmds << "dd if=/dev/zero of=#{image_file} bs=1M count=#{opts[:size]}"
cmds << "mkfs.ext3 -F -j #{image_file}"
cmds << "mkdir -p #{opts[:destination]}/loop"
cmds << "mount -o loop #{image_file} #{opts[:destination]}/loop"
cmds << "rsync -ax #{rsync_excludes(opts[:exclude])} #{opts[:volume]}/ #{opts[:destination]}/loop/"
cmds << "if [[ -f /etc/init.d/ec2-ssh-host-key-gen ]]; then chmod u+x /etc/init.d/ec2-ssh-host-key-gen ;fi"
cmds << "umount #{opts[:destination]}/loop"
self.ssh cmds
image_file
end | ruby | def make_image(opts={})
opts = {:volume => '/',
:size => 6000,
:destination => '/mnt/bundle',
:exclude => nil
}.merge(opts)
image_file = File.join(opts[:destination], opts[:prefix] )
cmds = ["mkdir -p #{opts[:destination]}"]
cmds << "dd if=/dev/zero of=#{image_file} bs=1M count=#{opts[:size]}"
cmds << "mkfs.ext3 -F -j #{image_file}"
cmds << "mkdir -p #{opts[:destination]}/loop"
cmds << "mount -o loop #{image_file} #{opts[:destination]}/loop"
cmds << "rsync -ax #{rsync_excludes(opts[:exclude])} #{opts[:volume]}/ #{opts[:destination]}/loop/"
cmds << "if [[ -f /etc/init.d/ec2-ssh-host-key-gen ]]; then chmod u+x /etc/init.d/ec2-ssh-host-key-gen ;fi"
cmds << "umount #{opts[:destination]}/loop"
self.ssh cmds
image_file
end | [
"def",
"make_image",
"(",
"opts",
"=",
"{",
"}",
")",
"opts",
"=",
"{",
":volume",
"=>",
"'/'",
",",
":size",
"=>",
"6000",
",",
":destination",
"=>",
"'/mnt/bundle'",
",",
":exclude",
"=>",
"nil",
"}",
".",
"merge",
"(",
"opts",
")",
"image_file",
"=",
"File",
".",
"join",
"(",
"opts",
"[",
":destination",
"]",
",",
"opts",
"[",
":prefix",
"]",
")",
"cmds",
"=",
"[",
"\"mkdir -p #{opts[:destination]}\"",
"]",
"cmds",
"<<",
"\"dd if=/dev/zero of=#{image_file} bs=1M count=#{opts[:size]}\"",
"cmds",
"<<",
"\"mkfs.ext3 -F -j #{image_file}\"",
"cmds",
"<<",
"\"mkdir -p #{opts[:destination]}/loop\"",
"cmds",
"<<",
"\"mount -o loop #{image_file} #{opts[:destination]}/loop\"",
"cmds",
"<<",
"\"rsync -ax #{rsync_excludes(opts[:exclude])} #{opts[:volume]}/ #{opts[:destination]}/loop/\"",
"cmds",
"<<",
"\"if [[ -f /etc/init.d/ec2-ssh-host-key-gen ]]; then chmod u+x /etc/init.d/ec2-ssh-host-key-gen ;fi\"",
"cmds",
"<<",
"\"umount #{opts[:destination]}/loop\"",
"self",
".",
"ssh",
"cmds",
"image_file",
"end"
]
| create an image file and copy this instance to the image file. | [
"create",
"an",
"image",
"file",
"and",
"copy",
"this",
"instance",
"to",
"the",
"image",
"file",
"."
]
| 8b4af051833addd84f4282bcedbdffa814d8e033 | https://github.com/auser/poolparty/blob/8b4af051833addd84f4282bcedbdffa814d8e033/lib/cloud_providers/ec2/ec2_instance.rb#L149-L166 | train |
auser/poolparty | lib/cloud_providers/ec2/helpers/elastic_auto_scaler.rb | CloudProviders.ElasticAutoScaler.teardown | def teardown
triggers.each do |trigger|
trigger.teardown
end
if autoscaling_groups.select {|n| n.name == name }.empty?
puts "Cloud #{cloud.name} autoscaling group does not exist"
else
self.minimum_instances = 0
self.maximum_instances = 0
@new_launch_configuration_name = old_launch_configuration_name
puts "Updating autoscaling group: #{@new_launch_configuration_name}"
update_autoscaling_group!
puts "Terminating nodes in autoscaling group: #{name}"
reset!
# cloud.nodes.each {|n| n.terminate! }
delete_autoscaling_group!
delete_launch_configuration!
puts ""
end
end | ruby | def teardown
triggers.each do |trigger|
trigger.teardown
end
if autoscaling_groups.select {|n| n.name == name }.empty?
puts "Cloud #{cloud.name} autoscaling group does not exist"
else
self.minimum_instances = 0
self.maximum_instances = 0
@new_launch_configuration_name = old_launch_configuration_name
puts "Updating autoscaling group: #{@new_launch_configuration_name}"
update_autoscaling_group!
puts "Terminating nodes in autoscaling group: #{name}"
reset!
# cloud.nodes.each {|n| n.terminate! }
delete_autoscaling_group!
delete_launch_configuration!
puts ""
end
end | [
"def",
"teardown",
"triggers",
".",
"each",
"do",
"|",
"trigger",
"|",
"trigger",
".",
"teardown",
"end",
"if",
"autoscaling_groups",
".",
"select",
"{",
"|",
"n",
"|",
"n",
".",
"name",
"==",
"name",
"}",
".",
"empty?",
"puts",
"\"Cloud #{cloud.name} autoscaling group does not exist\"",
"else",
"self",
".",
"minimum_instances",
"=",
"0",
"self",
".",
"maximum_instances",
"=",
"0",
"@new_launch_configuration_name",
"=",
"old_launch_configuration_name",
"puts",
"\"Updating autoscaling group: #{@new_launch_configuration_name}\"",
"update_autoscaling_group!",
"puts",
"\"Terminating nodes in autoscaling group: #{name}\"",
"reset!",
"delete_autoscaling_group!",
"delete_launch_configuration!",
"puts",
"\"\"",
"end",
"end"
]
| First, change the min_count to | [
"First",
"change",
"the",
"min_count",
"to"
]
| 8b4af051833addd84f4282bcedbdffa814d8e033 | https://github.com/auser/poolparty/blob/8b4af051833addd84f4282bcedbdffa814d8e033/lib/cloud_providers/ec2/helpers/elastic_auto_scaler.rb#L32-L51 | train |
reevoo/sapience-rb | lib/sapience/configuration.rb | Sapience.Configuration.map_levels | def map_levels
return [] unless defined?(::Logger::Severity)
@@map_levels ||=
::Logger::Severity.constants.each_with_object([]) do |constant, levels|
levels[::Logger::Severity.const_get(constant)] = level_by_index_or_error(constant)
end
end | ruby | def map_levels
return [] unless defined?(::Logger::Severity)
@@map_levels ||=
::Logger::Severity.constants.each_with_object([]) do |constant, levels|
levels[::Logger::Severity.const_get(constant)] = level_by_index_or_error(constant)
end
end | [
"def",
"map_levels",
"return",
"[",
"]",
"unless",
"defined?",
"(",
"::",
"Logger",
"::",
"Severity",
")",
"@@map_levels",
"||=",
"::",
"Logger",
"::",
"Severity",
".",
"constants",
".",
"each_with_object",
"(",
"[",
"]",
")",
"do",
"|",
"constant",
",",
"levels",
"|",
"levels",
"[",
"::",
"Logger",
"::",
"Severity",
".",
"const_get",
"(",
"constant",
")",
"]",
"=",
"level_by_index_or_error",
"(",
"constant",
")",
"end",
"end"
]
| Mapping of Rails and Ruby Logger levels to Sapience levels | [
"Mapping",
"of",
"Rails",
"and",
"Ruby",
"Logger",
"levels",
"to",
"Sapience",
"levels"
]
| db0da794d51d209fa3eddf4bc44bebdae6c321bd | https://github.com/reevoo/sapience-rb/blob/db0da794d51d209fa3eddf4bc44bebdae6c321bd/lib/sapience/configuration.rb#L85-L91 | train |
airslie/renalware-core | app/models/concerns/renalware/broadcasting.rb | Renalware.Broadcasting.broadcasting_to_configured_subscribers | def broadcasting_to_configured_subscribers
subscribers = Array(Renalware.config.broadcast_subscription_map[self.class.name])
subscribers.each do |subscriber|
# Support String subscribers eg a simple class name as well as Subscriber instances.
subscriber = Subscriber.new(subscriber) unless subscriber.respond_to?(:klass)
subscribe(subscriber.instance, async: subscriber.async?)
end
self
end | ruby | def broadcasting_to_configured_subscribers
subscribers = Array(Renalware.config.broadcast_subscription_map[self.class.name])
subscribers.each do |subscriber|
# Support String subscribers eg a simple class name as well as Subscriber instances.
subscriber = Subscriber.new(subscriber) unless subscriber.respond_to?(:klass)
subscribe(subscriber.instance, async: subscriber.async?)
end
self
end | [
"def",
"broadcasting_to_configured_subscribers",
"subscribers",
"=",
"Array",
"(",
"Renalware",
".",
"config",
".",
"broadcast_subscription_map",
"[",
"self",
".",
"class",
".",
"name",
"]",
")",
"subscribers",
".",
"each",
"do",
"|",
"subscriber",
"|",
"subscriber",
"=",
"Subscriber",
".",
"new",
"(",
"subscriber",
")",
"unless",
"subscriber",
".",
"respond_to?",
"(",
":klass",
")",
"subscribe",
"(",
"subscriber",
".",
"instance",
",",
"async",
":",
"subscriber",
".",
"async?",
")",
"end",
"self",
"end"
]
| Subscribes any listeners configured in Renalware.config.broadcast_subscription_map
to the current instance.
Example usage
class SomeServiceObject
include Broadcasting
def call
..
end
end
SomeServiceObject.new(..).broadcasting_to_configured_subscribers.call(..)
See https://github.com/krisleech/wisper | [
"Subscribes",
"any",
"listeners",
"configured",
"in",
"Renalware",
".",
"config",
".",
"broadcast_subscription_map",
"to",
"the",
"current",
"instance",
"."
]
| 07f20ed4071fc88666590c43a848e7413d5e384b | https://github.com/airslie/renalware-core/blob/07f20ed4071fc88666590c43a848e7413d5e384b/app/models/concerns/renalware/broadcasting.rb#L49-L57 | train |
auser/poolparty | lib/poolparty/chef.rb | PoolParty.Chef.recipe | def recipe(recipe_name, hsh={})
_recipes << recipe_name unless _recipes.include?(recipe_name)
head = {}
tail = head
recipe_name.split("::").each do |key|
unless key == "default"
n = {}
tail[key] = n
tail = n
end
end
tail.replace hsh
override_attributes.merge!(head) unless hsh.empty?
end | ruby | def recipe(recipe_name, hsh={})
_recipes << recipe_name unless _recipes.include?(recipe_name)
head = {}
tail = head
recipe_name.split("::").each do |key|
unless key == "default"
n = {}
tail[key] = n
tail = n
end
end
tail.replace hsh
override_attributes.merge!(head) unless hsh.empty?
end | [
"def",
"recipe",
"(",
"recipe_name",
",",
"hsh",
"=",
"{",
"}",
")",
"_recipes",
"<<",
"recipe_name",
"unless",
"_recipes",
".",
"include?",
"(",
"recipe_name",
")",
"head",
"=",
"{",
"}",
"tail",
"=",
"head",
"recipe_name",
".",
"split",
"(",
"\"::\"",
")",
".",
"each",
"do",
"|",
"key",
"|",
"unless",
"key",
"==",
"\"default\"",
"n",
"=",
"{",
"}",
"tail",
"[",
"key",
"]",
"=",
"n",
"tail",
"=",
"n",
"end",
"end",
"tail",
".",
"replace",
"hsh",
"override_attributes",
".",
"merge!",
"(",
"head",
")",
"unless",
"hsh",
".",
"empty?",
"end"
]
| Adds a chef recipe to the cloud
The hsh parameter is inserted into the override_attributes.
The insertion is performed as follows. If
the recipe name = "foo::bar" then effectively the call is
override_attributes.merge! { :foo => { :bar => hsh } } | [
"Adds",
"a",
"chef",
"recipe",
"to",
"the",
"cloud"
]
| 8b4af051833addd84f4282bcedbdffa814d8e033 | https://github.com/auser/poolparty/blob/8b4af051833addd84f4282bcedbdffa814d8e033/lib/poolparty/chef.rb#L93-L108 | train |
reevoo/sapience-rb | lib/sapience/log_methods.rb | Sapience.LogMethods.measure | def measure(level, message, params = {}, &block)
index = Sapience.config.level_to_index(level)
if level_index <= index
measure_internal(level, index, message, params, &block)
else
yield params if block
end
end | ruby | def measure(level, message, params = {}, &block)
index = Sapience.config.level_to_index(level)
if level_index <= index
measure_internal(level, index, message, params, &block)
else
yield params if block
end
end | [
"def",
"measure",
"(",
"level",
",",
"message",
",",
"params",
"=",
"{",
"}",
",",
"&",
"block",
")",
"index",
"=",
"Sapience",
".",
"config",
".",
"level_to_index",
"(",
"level",
")",
"if",
"level_index",
"<=",
"index",
"measure_internal",
"(",
"level",
",",
"index",
",",
"message",
",",
"params",
",",
"&",
"block",
")",
"else",
"yield",
"params",
"if",
"block",
"end",
"end"
]
| Dynamically supply the log level with every measurement call | [
"Dynamically",
"supply",
"the",
"log",
"level",
"with",
"every",
"measurement",
"call"
]
| db0da794d51d209fa3eddf4bc44bebdae6c321bd | https://github.com/reevoo/sapience-rb/blob/db0da794d51d209fa3eddf4bc44bebdae6c321bd/lib/sapience/log_methods.rb#L109-L116 | train |
airslie/renalware-core | app/helpers/renalware/pd_regimes_helper.rb | Renalware.PDRegimesHelper.available_pd_treatments_for | def available_pd_treatments_for(regime)
scope = "renalware.pd.treatments"
key = regime.capd? ? "capd" : "apd"
I18n.t(key, scope: scope)
end | ruby | def available_pd_treatments_for(regime)
scope = "renalware.pd.treatments"
key = regime.capd? ? "capd" : "apd"
I18n.t(key, scope: scope)
end | [
"def",
"available_pd_treatments_for",
"(",
"regime",
")",
"scope",
"=",
"\"renalware.pd.treatments\"",
"key",
"=",
"regime",
".",
"capd?",
"?",
"\"capd\"",
":",
"\"apd\"",
"I18n",
".",
"t",
"(",
"key",
",",
"scope",
":",
"scope",
")",
"end"
]
| The list of treatment options, stored in I18n | [
"The",
"list",
"of",
"treatment",
"options",
"stored",
"in",
"I18n"
]
| 07f20ed4071fc88666590c43a848e7413d5e384b | https://github.com/airslie/renalware-core/blob/07f20ed4071fc88666590c43a848e7413d5e384b/app/helpers/renalware/pd_regimes_helper.rb#L24-L28 | train |
reevoo/sapience-rb | lib/sapience/logger.rb | Sapience.Logger.log | def log(log, message = nil, progname = nil, &block)
# Compatibility with ::Logger
return add(log, message, progname, &block) unless log.is_a?(Sapience::Log)
if @@appender_thread
@@appender_thread << lambda do
Sapience.appenders.each do |appender|
next unless appender.valid?
begin
appender.log(log)
rescue StandardError => exc
$stderr.write("Appender thread: Failed to log to appender: #{appender.inspect}\n #{exc.inspect}")
end
end
Sapience.clear_tags!
end
end
end | ruby | def log(log, message = nil, progname = nil, &block)
# Compatibility with ::Logger
return add(log, message, progname, &block) unless log.is_a?(Sapience::Log)
if @@appender_thread
@@appender_thread << lambda do
Sapience.appenders.each do |appender|
next unless appender.valid?
begin
appender.log(log)
rescue StandardError => exc
$stderr.write("Appender thread: Failed to log to appender: #{appender.inspect}\n #{exc.inspect}")
end
end
Sapience.clear_tags!
end
end
end | [
"def",
"log",
"(",
"log",
",",
"message",
"=",
"nil",
",",
"progname",
"=",
"nil",
",",
"&",
"block",
")",
"return",
"add",
"(",
"log",
",",
"message",
",",
"progname",
",",
"&",
"block",
")",
"unless",
"log",
".",
"is_a?",
"(",
"Sapience",
"::",
"Log",
")",
"if",
"@@appender_thread",
"@@appender_thread",
"<<",
"lambda",
"do",
"Sapience",
".",
"appenders",
".",
"each",
"do",
"|",
"appender",
"|",
"next",
"unless",
"appender",
".",
"valid?",
"begin",
"appender",
".",
"log",
"(",
"log",
")",
"rescue",
"StandardError",
"=>",
"exc",
"$stderr",
".",
"write",
"(",
"\"Appender thread: Failed to log to appender: #{appender.inspect}\\n #{exc.inspect}\"",
")",
"end",
"end",
"Sapience",
".",
"clear_tags!",
"end",
"end",
"end"
]
| Returns a Logger instance
Return the logger for a specific class, supports class specific log levels
logger = Sapience::Logger.new(self)
OR
logger = Sapience::Logger.new('MyClass')
Parameters:
application
A class, module or a string with the application/class name
to be used in the logger
level
The initial log level to start with for this logger instance
Default: Sapience.config.default_level
filter [Regexp|Proc]
RegExp: Only include log messages where the class name matches the supplied
regular expression. All other messages will be ignored
Proc: Only include log messages where the supplied Proc returns true
The Proc must return true or false
Place log request on the queue for the Appender thread to write to each
appender in the order that they were registered | [
"Returns",
"a",
"Logger",
"instance"
]
| db0da794d51d209fa3eddf4bc44bebdae6c321bd | https://github.com/reevoo/sapience-rb/blob/db0da794d51d209fa3eddf4bc44bebdae6c321bd/lib/sapience/logger.rb#L132-L148 | train |
reevoo/sapience-rb | lib/sapience/base.rb | Sapience.Base.with_payload | def with_payload(payload)
current_payload = self.payload
Thread.current[:sapience_payload] = current_payload ? current_payload.merge(payload) : payload
yield
ensure
Thread.current[:sapience_payload] = current_payload
end | ruby | def with_payload(payload)
current_payload = self.payload
Thread.current[:sapience_payload] = current_payload ? current_payload.merge(payload) : payload
yield
ensure
Thread.current[:sapience_payload] = current_payload
end | [
"def",
"with_payload",
"(",
"payload",
")",
"current_payload",
"=",
"self",
".",
"payload",
"Thread",
".",
"current",
"[",
":sapience_payload",
"]",
"=",
"current_payload",
"?",
"current_payload",
".",
"merge",
"(",
"payload",
")",
":",
"payload",
"yield",
"ensure",
"Thread",
".",
"current",
"[",
":sapience_payload",
"]",
"=",
"current_payload",
"end"
]
| Thread specific context information to be logged with every log entry
Add a payload to all log calls on This Thread within the supplied block
logger.with_payload(tracking_number: 12345) do
logger.debug('Hello World')
end
If a log call already includes a pyload, this payload will be merged with
the supplied payload, with the supplied payload taking precedence
logger.with_payload(tracking_number: 12345) do
logger.debug('Hello World', result: 'blah')
end | [
"Thread",
"specific",
"context",
"information",
"to",
"be",
"logged",
"with",
"every",
"log",
"entry"
]
| db0da794d51d209fa3eddf4bc44bebdae6c321bd | https://github.com/reevoo/sapience-rb/blob/db0da794d51d209fa3eddf4bc44bebdae6c321bd/lib/sapience/base.rb#L135-L141 | train |
reevoo/sapience-rb | lib/sapience/base.rb | Sapience.Base.include_message? | def include_message?(log)
return true if @filter.nil?
if @filter.is_a?(Regexp)
!(@filter =~ log.name).nil?
elsif @filter.is_a?(Proc)
@filter.call(log) == true
end
end | ruby | def include_message?(log)
return true if @filter.nil?
if @filter.is_a?(Regexp)
!(@filter =~ log.name).nil?
elsif @filter.is_a?(Proc)
@filter.call(log) == true
end
end | [
"def",
"include_message?",
"(",
"log",
")",
"return",
"true",
"if",
"@filter",
".",
"nil?",
"if",
"@filter",
".",
"is_a?",
"(",
"Regexp",
")",
"!",
"(",
"@filter",
"=~",
"log",
".",
"name",
")",
".",
"nil?",
"elsif",
"@filter",
".",
"is_a?",
"(",
"Proc",
")",
"@filter",
".",
"call",
"(",
"log",
")",
"==",
"true",
"end",
"end"
]
| Whether to log the supplied message based on the current filter if any | [
"Whether",
"to",
"log",
"the",
"supplied",
"message",
"based",
"on",
"the",
"current",
"filter",
"if",
"any"
]
| db0da794d51d209fa3eddf4bc44bebdae6c321bd | https://github.com/reevoo/sapience-rb/blob/db0da794d51d209fa3eddf4bc44bebdae6c321bd/lib/sapience/base.rb#L205-L213 | train |
reevoo/sapience-rb | lib/sapience/base.rb | Sapience.Base.extract_backtrace | def extract_backtrace
stack = caller
while (first = stack.first) && first.include?(SELF_PATTERN)
stack.shift
end
stack
end | ruby | def extract_backtrace
stack = caller
while (first = stack.first) && first.include?(SELF_PATTERN)
stack.shift
end
stack
end | [
"def",
"extract_backtrace",
"stack",
"=",
"caller",
"while",
"(",
"first",
"=",
"stack",
".",
"first",
")",
"&&",
"first",
".",
"include?",
"(",
"SELF_PATTERN",
")",
"stack",
".",
"shift",
"end",
"stack",
"end"
]
| Extract the callers backtrace leaving out Sapience | [
"Extract",
"the",
"callers",
"backtrace",
"leaving",
"out",
"Sapience"
]
| db0da794d51d209fa3eddf4bc44bebdae6c321bd | https://github.com/reevoo/sapience-rb/blob/db0da794d51d209fa3eddf4bc44bebdae6c321bd/lib/sapience/base.rb#L290-L296 | train |
frictionlessdata/datapackage-rb | lib/datapackage/helpers.rb | DataPackage.Helpers.dereference_descriptor | def dereference_descriptor(resource, base_path: nil, reference_fields: nil)
options = {
base_path: base_path,
reference_fields: reference_fields,
}
case resource
when Hash
resource.inject({}) do |new_resource, (key, val)|
if reference_fields.nil? || reference_fields.include?(key)
new_resource[key] = dereference_descriptor(val, **options)
else
new_resource[key] = val
end
new_resource
end
when Enumerable
resource.map{ |el| dereference_descriptor(el, **options)}
when String
begin
resolve_json_reference(resource, deep_dereference: true, base_path: base_path)
rescue Errno::ENOENT
resource
end
else
resource
end
end | ruby | def dereference_descriptor(resource, base_path: nil, reference_fields: nil)
options = {
base_path: base_path,
reference_fields: reference_fields,
}
case resource
when Hash
resource.inject({}) do |new_resource, (key, val)|
if reference_fields.nil? || reference_fields.include?(key)
new_resource[key] = dereference_descriptor(val, **options)
else
new_resource[key] = val
end
new_resource
end
when Enumerable
resource.map{ |el| dereference_descriptor(el, **options)}
when String
begin
resolve_json_reference(resource, deep_dereference: true, base_path: base_path)
rescue Errno::ENOENT
resource
end
else
resource
end
end | [
"def",
"dereference_descriptor",
"(",
"resource",
",",
"base_path",
":",
"nil",
",",
"reference_fields",
":",
"nil",
")",
"options",
"=",
"{",
"base_path",
":",
"base_path",
",",
"reference_fields",
":",
"reference_fields",
",",
"}",
"case",
"resource",
"when",
"Hash",
"resource",
".",
"inject",
"(",
"{",
"}",
")",
"do",
"|",
"new_resource",
",",
"(",
"key",
",",
"val",
")",
"|",
"if",
"reference_fields",
".",
"nil?",
"||",
"reference_fields",
".",
"include?",
"(",
"key",
")",
"new_resource",
"[",
"key",
"]",
"=",
"dereference_descriptor",
"(",
"val",
",",
"**",
"options",
")",
"else",
"new_resource",
"[",
"key",
"]",
"=",
"val",
"end",
"new_resource",
"end",
"when",
"Enumerable",
"resource",
".",
"map",
"{",
"|",
"el",
"|",
"dereference_descriptor",
"(",
"el",
",",
"**",
"options",
")",
"}",
"when",
"String",
"begin",
"resolve_json_reference",
"(",
"resource",
",",
"deep_dereference",
":",
"true",
",",
"base_path",
":",
"base_path",
")",
"rescue",
"Errno",
"::",
"ENOENT",
"resource",
"end",
"else",
"resource",
"end",
"end"
]
| Dereference a resource that can be a URL or path to a JSON file or a hash
Returns a Hash with all values that are URLs or paths dereferenced | [
"Dereference",
"a",
"resource",
"that",
"can",
"be",
"a",
"URL",
"or",
"path",
"to",
"a",
"JSON",
"file",
"or",
"a",
"hash",
"Returns",
"a",
"Hash",
"with",
"all",
"values",
"that",
"are",
"URLs",
"or",
"paths",
"dereferenced"
]
| 75c082ab928ad417ed046819cde5b64fd0a98d20 | https://github.com/frictionlessdata/datapackage-rb/blob/75c082ab928ad417ed046819cde5b64fd0a98d20/lib/datapackage/helpers.rb#L6-L32 | train |
auser/poolparty | lib/cloud_providers/ec2/ec2.rb | CloudProviders.Ec2.describe_instances | def describe_instances(id=nil)
begin
@describe_instances = ec2.describe_instances.reservationSet.item.map do |r|
r.instancesSet.item.map do |i|
inst_options = i.merge(r.merge(:cloud => cloud)).merge(cloud.cloud_provider.dsl_options)
Ec2Instance.new(inst_options)
end
end.flatten
rescue AWS::InvalidClientTokenId => e # AWS credentials invalid
puts "Error contacting AWS: #{e}"
raise e
rescue Exception => e
[]
end
end | ruby | def describe_instances(id=nil)
begin
@describe_instances = ec2.describe_instances.reservationSet.item.map do |r|
r.instancesSet.item.map do |i|
inst_options = i.merge(r.merge(:cloud => cloud)).merge(cloud.cloud_provider.dsl_options)
Ec2Instance.new(inst_options)
end
end.flatten
rescue AWS::InvalidClientTokenId => e # AWS credentials invalid
puts "Error contacting AWS: #{e}"
raise e
rescue Exception => e
[]
end
end | [
"def",
"describe_instances",
"(",
"id",
"=",
"nil",
")",
"begin",
"@describe_instances",
"=",
"ec2",
".",
"describe_instances",
".",
"reservationSet",
".",
"item",
".",
"map",
"do",
"|",
"r",
"|",
"r",
".",
"instancesSet",
".",
"item",
".",
"map",
"do",
"|",
"i",
"|",
"inst_options",
"=",
"i",
".",
"merge",
"(",
"r",
".",
"merge",
"(",
":cloud",
"=>",
"cloud",
")",
")",
".",
"merge",
"(",
"cloud",
".",
"cloud_provider",
".",
"dsl_options",
")",
"Ec2Instance",
".",
"new",
"(",
"inst_options",
")",
"end",
"end",
".",
"flatten",
"rescue",
"AWS",
"::",
"InvalidClientTokenId",
"=>",
"e",
"puts",
"\"Error contacting AWS: #{e}\"",
"raise",
"e",
"rescue",
"Exception",
"=>",
"e",
"[",
"]",
"end",
"end"
]
| Describe instances
Describe the instances that are available on this cloud
@params id (optional) if present, details about the instance
with the id given will be returned
if not given, details for all instances will be returned | [
"Describe",
"instances",
"Describe",
"the",
"instances",
"that",
"are",
"available",
"on",
"this",
"cloud"
]
| 8b4af051833addd84f4282bcedbdffa814d8e033 | https://github.com/auser/poolparty/blob/8b4af051833addd84f4282bcedbdffa814d8e033/lib/cloud_providers/ec2/ec2.rb#L335-L349 | train |
auser/poolparty | lib/cloud_providers/ec2/ec2.rb | CloudProviders.Ec2.aws_options | def aws_options(opts={})
uri=URI.parse(ec2_url)
{ :access_key_id => access_key,
:secret_access_key=> secret_access_key,
:use_ssl => (uri.scheme=='https'),
:path => uri.path,
:host => uri.host,
:port => uri.port
}.merge(opts)
end | ruby | def aws_options(opts={})
uri=URI.parse(ec2_url)
{ :access_key_id => access_key,
:secret_access_key=> secret_access_key,
:use_ssl => (uri.scheme=='https'),
:path => uri.path,
:host => uri.host,
:port => uri.port
}.merge(opts)
end | [
"def",
"aws_options",
"(",
"opts",
"=",
"{",
"}",
")",
"uri",
"=",
"URI",
".",
"parse",
"(",
"ec2_url",
")",
"{",
":access_key_id",
"=>",
"access_key",
",",
":secret_access_key",
"=>",
"secret_access_key",
",",
":use_ssl",
"=>",
"(",
"uri",
".",
"scheme",
"==",
"'https'",
")",
",",
":path",
"=>",
"uri",
".",
"path",
",",
":host",
"=>",
"uri",
".",
"host",
",",
":port",
"=>",
"uri",
".",
"port",
"}",
".",
"merge",
"(",
"opts",
")",
"end"
]
| prepare options for AWS gem | [
"prepare",
"options",
"for",
"AWS",
"gem"
]
| 8b4af051833addd84f4282bcedbdffa814d8e033 | https://github.com/auser/poolparty/blob/8b4af051833addd84f4282bcedbdffa814d8e033/lib/cloud_providers/ec2/ec2.rb#L384-L394 | train |
auser/poolparty | lib/cloud_providers/ec2/ec2.rb | CloudProviders.Ec2.ec2 | def ec2
@ec2 ||= begin
AWS::EC2::Base.new( aws_options )
rescue AWS::ArgumentError => e # AWS credentials missing?
puts "Error contacting AWS: #{e}"
raise e
rescue Exception => e
puts "Generic error #{e.class}: #{e}"
end
end | ruby | def ec2
@ec2 ||= begin
AWS::EC2::Base.new( aws_options )
rescue AWS::ArgumentError => e # AWS credentials missing?
puts "Error contacting AWS: #{e}"
raise e
rescue Exception => e
puts "Generic error #{e.class}: #{e}"
end
end | [
"def",
"ec2",
"@ec2",
"||=",
"begin",
"AWS",
"::",
"EC2",
"::",
"Base",
".",
"new",
"(",
"aws_options",
")",
"rescue",
"AWS",
"::",
"ArgumentError",
"=>",
"e",
"puts",
"\"Error contacting AWS: #{e}\"",
"raise",
"e",
"rescue",
"Exception",
"=>",
"e",
"puts",
"\"Generic error #{e.class}: #{e}\"",
"end",
"end"
]
| Proxy to the raw Grempe amazon-aws @ec2 instance | [
"Proxy",
"to",
"the",
"raw",
"Grempe",
"amazon",
"-",
"aws"
]
| 8b4af051833addd84f4282bcedbdffa814d8e033 | https://github.com/auser/poolparty/blob/8b4af051833addd84f4282bcedbdffa814d8e033/lib/cloud_providers/ec2/ec2.rb#L397-L406 | train |
auser/poolparty | lib/cloud_providers/ec2/ec2.rb | CloudProviders.Ec2.credential_file | def credential_file(file=nil)
unless file.nil?
dsl_options[:credential_file]=file
dsl_options.merge!(Ec2.load_keys_from_credential_file(file))
else
fetch(:credential_file)
end
end | ruby | def credential_file(file=nil)
unless file.nil?
dsl_options[:credential_file]=file
dsl_options.merge!(Ec2.load_keys_from_credential_file(file))
else
fetch(:credential_file)
end
end | [
"def",
"credential_file",
"(",
"file",
"=",
"nil",
")",
"unless",
"file",
".",
"nil?",
"dsl_options",
"[",
":credential_file",
"]",
"=",
"file",
"dsl_options",
".",
"merge!",
"(",
"Ec2",
".",
"load_keys_from_credential_file",
"(",
"file",
")",
")",
"else",
"fetch",
"(",
":credential_file",
")",
"end",
"end"
]
| Read credentials from credential_file if one exists | [
"Read",
"credentials",
"from",
"credential_file",
"if",
"one",
"exists"
]
| 8b4af051833addd84f4282bcedbdffa814d8e033 | https://github.com/auser/poolparty/blob/8b4af051833addd84f4282bcedbdffa814d8e033/lib/cloud_providers/ec2/ec2.rb#L489-L496 | train |
config-files-api/config_files_api | lib/cfa/base_model.rb | CFA.BaseModel.generic_set | def generic_set(key, value, tree = data)
modify(key, value, tree) || uncomment(key, value, tree) ||
add_new(key, value, tree)
end | ruby | def generic_set(key, value, tree = data)
modify(key, value, tree) || uncomment(key, value, tree) ||
add_new(key, value, tree)
end | [
"def",
"generic_set",
"(",
"key",
",",
"value",
",",
"tree",
"=",
"data",
")",
"modify",
"(",
"key",
",",
"value",
",",
"tree",
")",
"||",
"uncomment",
"(",
"key",
",",
"value",
",",
"tree",
")",
"||",
"add_new",
"(",
"key",
",",
"value",
",",
"tree",
")",
"end"
]
| powerfull method that sets any value in config. It try to be
smart to at first modify existing value, then replace commented out code
and if even that doesn't work, then append it at the end
@note prefer to use specialized methods of children | [
"powerfull",
"method",
"that",
"sets",
"any",
"value",
"in",
"config",
".",
"It",
"try",
"to",
"be",
"smart",
"to",
"at",
"first",
"modify",
"existing",
"value",
"then",
"replace",
"commented",
"out",
"code",
"and",
"if",
"even",
"that",
"doesn",
"t",
"work",
"then",
"append",
"it",
"at",
"the",
"end"
]
| b5e62c465e278b048f82a8173832dbd631e6cf0a | https://github.com/config-files-api/config_files_api/blob/b5e62c465e278b048f82a8173832dbd631e6cf0a/lib/cfa/base_model.rb#L64-L67 | train |
tjchambers/paper_trail_scrapbook | lib/paper_trail_scrapbook/chapter.rb | PaperTrailScrapbook.Chapter.story | def story
updates = changes
return unless tell_story?(updates)
[preface, (updates unless destroy?)].compact.join("\n")
end | ruby | def story
updates = changes
return unless tell_story?(updates)
[preface, (updates unless destroy?)].compact.join("\n")
end | [
"def",
"story",
"updates",
"=",
"changes",
"return",
"unless",
"tell_story?",
"(",
"updates",
")",
"[",
"preface",
",",
"(",
"updates",
"unless",
"destroy?",
")",
"]",
".",
"compact",
".",
"join",
"(",
"\"\\n\"",
")",
"end"
]
| Single version historical analysis
@return [String] Human readable description of changes | [
"Single",
"version",
"historical",
"analysis"
]
| 0503a74909248fc052c56965eee53aa8f5634013 | https://github.com/tjchambers/paper_trail_scrapbook/blob/0503a74909248fc052c56965eee53aa8f5634013/lib/paper_trail_scrapbook/chapter.rb#L19-L24 | train |
config-files-api/config_files_api | lib/cfa/augeas_parser/writer.rb | CFA.AugeasWriter.report_error | def report_error
return if yield
error = aug.error
# zero is no error, so problem in lense
if aug.error[:code].nonzero?
raise "Augeas error #{error[:message]}. Details: #{error[:details]}."
end
msg = aug.get("/augeas/text/store/error/message")
location = aug.get("/augeas/text/store/error/lens")
raise "Augeas serializing error: #{msg} at #{location}"
end | ruby | def report_error
return if yield
error = aug.error
# zero is no error, so problem in lense
if aug.error[:code].nonzero?
raise "Augeas error #{error[:message]}. Details: #{error[:details]}."
end
msg = aug.get("/augeas/text/store/error/message")
location = aug.get("/augeas/text/store/error/lens")
raise "Augeas serializing error: #{msg} at #{location}"
end | [
"def",
"report_error",
"return",
"if",
"yield",
"error",
"=",
"aug",
".",
"error",
"if",
"aug",
".",
"error",
"[",
":code",
"]",
".",
"nonzero?",
"raise",
"\"Augeas error #{error[:message]}. Details: #{error[:details]}.\"",
"end",
"msg",
"=",
"aug",
".",
"get",
"(",
"\"/augeas/text/store/error/message\"",
")",
"location",
"=",
"aug",
".",
"get",
"(",
"\"/augeas/text/store/error/lens\"",
")",
"raise",
"\"Augeas serializing error: #{msg} at #{location}\"",
"end"
]
| Calls block and if it failed, raise exception with details from augeas
why it failed
@yield call to aug that is secured
@raise [RuntimeError] | [
"Calls",
"block",
"and",
"if",
"it",
"failed",
"raise",
"exception",
"with",
"details",
"from",
"augeas",
"why",
"it",
"failed"
]
| b5e62c465e278b048f82a8173832dbd631e6cf0a | https://github.com/config-files-api/config_files_api/blob/b5e62c465e278b048f82a8173832dbd631e6cf0a/lib/cfa/augeas_parser/writer.rb#L345-L357 | train |
tjchambers/paper_trail_scrapbook | lib/paper_trail_scrapbook/changes.rb | PaperTrailScrapbook.Changes.change_log | def change_log
text =
changes
.map { |k, v| digest(k, v) }
.compact
.join("\n")
text = text.gsub(' id:', ':') if PaperTrailScrapbook.config.drop_id_suffix
text
end | ruby | def change_log
text =
changes
.map { |k, v| digest(k, v) }
.compact
.join("\n")
text = text.gsub(' id:', ':') if PaperTrailScrapbook.config.drop_id_suffix
text
end | [
"def",
"change_log",
"text",
"=",
"changes",
".",
"map",
"{",
"|",
"k",
",",
"v",
"|",
"digest",
"(",
"k",
",",
"v",
")",
"}",
".",
"compact",
".",
"join",
"(",
"\"\\n\"",
")",
"text",
"=",
"text",
".",
"gsub",
"(",
"' id:'",
",",
"':'",
")",
"if",
"PaperTrailScrapbook",
".",
"config",
".",
"drop_id_suffix",
"text",
"end"
]
| Attribute change analysis
@return [String] Summary analysis of changes | [
"Attribute",
"change",
"analysis"
]
| 0503a74909248fc052c56965eee53aa8f5634013 | https://github.com/tjchambers/paper_trail_scrapbook/blob/0503a74909248fc052c56965eee53aa8f5634013/lib/paper_trail_scrapbook/changes.rb#L30-L39 | train |
heroku/configvar | lib/configvar/context.rb | ConfigVar.Context.method_missing | def method_missing(name, *args)
value = @values[name]
if value.nil? && [email protected]_key?(name)
address = "<#{self.class.name}:0x00#{(self.object_id << 1).to_s(16)}>"
raise NoMethodError.new("undefined method `#{name}' for ##{address}")
end
value
end | ruby | def method_missing(name, *args)
value = @values[name]
if value.nil? && [email protected]_key?(name)
address = "<#{self.class.name}:0x00#{(self.object_id << 1).to_s(16)}>"
raise NoMethodError.new("undefined method `#{name}' for ##{address}")
end
value
end | [
"def",
"method_missing",
"(",
"name",
",",
"*",
"args",
")",
"value",
"=",
"@values",
"[",
"name",
"]",
"if",
"value",
".",
"nil?",
"&&",
"!",
"@values",
".",
"has_key?",
"(",
"name",
")",
"address",
"=",
"\"<#{self.class.name}:0x00#{(self.object_id << 1).to_s(16)}>\"",
"raise",
"NoMethodError",
".",
"new",
"(",
"\"undefined method `#{name}' for ##{address}\"",
")",
"end",
"value",
"end"
]
| Fetch a configuration value. The name must be a lowercase version of an
uppercase name defined in the environment. A NoMethodError is raised if
no value matching the specified name is available. | [
"Fetch",
"a",
"configuration",
"value",
".",
"The",
"name",
"must",
"be",
"a",
"lowercase",
"version",
"of",
"an",
"uppercase",
"name",
"defined",
"in",
"the",
"environment",
".",
"A",
"NoMethodError",
"is",
"raised",
"if",
"no",
"value",
"matching",
"the",
"specified",
"name",
"is",
"available",
"."
]
| 1778604a7878bb49d4512a189c3e7664a0874295 | https://github.com/heroku/configvar/blob/1778604a7878bb49d4512a189c3e7664a0874295/lib/configvar/context.rb#L19-L26 | train |
heroku/configvar | lib/configvar/context.rb | ConfigVar.Context.required_string | def required_string(name)
required_custom(name) do |env|
if value = env[name.to_s.upcase]
{name => value}
else
raise ConfigError.new("A value must be provided for #{name.to_s.upcase}")
end
end
end | ruby | def required_string(name)
required_custom(name) do |env|
if value = env[name.to_s.upcase]
{name => value}
else
raise ConfigError.new("A value must be provided for #{name.to_s.upcase}")
end
end
end | [
"def",
"required_string",
"(",
"name",
")",
"required_custom",
"(",
"name",
")",
"do",
"|",
"env",
"|",
"if",
"value",
"=",
"env",
"[",
"name",
".",
"to_s",
".",
"upcase",
"]",
"{",
"name",
"=>",
"value",
"}",
"else",
"raise",
"ConfigError",
".",
"new",
"(",
"\"A value must be provided for #{name.to_s.upcase}\"",
")",
"end",
"end",
"end"
]
| Define a required string config var. | [
"Define",
"a",
"required",
"string",
"config",
"var",
"."
]
| 1778604a7878bb49d4512a189c3e7664a0874295 | https://github.com/heroku/configvar/blob/1778604a7878bb49d4512a189c3e7664a0874295/lib/configvar/context.rb#L29-L37 | train |
heroku/configvar | lib/configvar/context.rb | ConfigVar.Context.required_int | def required_int(name)
required_custom(name) do |env|
if value = env[name.to_s.upcase]
{name => parse_int(name, value)}
else
raise ConfigError.new("A value must be provided for #{name.to_s.upcase}")
end
end
end | ruby | def required_int(name)
required_custom(name) do |env|
if value = env[name.to_s.upcase]
{name => parse_int(name, value)}
else
raise ConfigError.new("A value must be provided for #{name.to_s.upcase}")
end
end
end | [
"def",
"required_int",
"(",
"name",
")",
"required_custom",
"(",
"name",
")",
"do",
"|",
"env",
"|",
"if",
"value",
"=",
"env",
"[",
"name",
".",
"to_s",
".",
"upcase",
"]",
"{",
"name",
"=>",
"parse_int",
"(",
"name",
",",
"value",
")",
"}",
"else",
"raise",
"ConfigError",
".",
"new",
"(",
"\"A value must be provided for #{name.to_s.upcase}\"",
")",
"end",
"end",
"end"
]
| Define a required integer config var. | [
"Define",
"a",
"required",
"integer",
"config",
"var",
"."
]
| 1778604a7878bb49d4512a189c3e7664a0874295 | https://github.com/heroku/configvar/blob/1778604a7878bb49d4512a189c3e7664a0874295/lib/configvar/context.rb#L40-L48 | train |
heroku/configvar | lib/configvar/context.rb | ConfigVar.Context.required_bool | def required_bool(name)
required_custom(name) do |env|
if value = env[name.to_s.upcase]
{name => parse_bool(name, value)}
else
raise ConfigError.new("A value must be provided for #{name.to_s.upcase}")
end
end
end | ruby | def required_bool(name)
required_custom(name) do |env|
if value = env[name.to_s.upcase]
{name => parse_bool(name, value)}
else
raise ConfigError.new("A value must be provided for #{name.to_s.upcase}")
end
end
end | [
"def",
"required_bool",
"(",
"name",
")",
"required_custom",
"(",
"name",
")",
"do",
"|",
"env",
"|",
"if",
"value",
"=",
"env",
"[",
"name",
".",
"to_s",
".",
"upcase",
"]",
"{",
"name",
"=>",
"parse_bool",
"(",
"name",
",",
"value",
")",
"}",
"else",
"raise",
"ConfigError",
".",
"new",
"(",
"\"A value must be provided for #{name.to_s.upcase}\"",
")",
"end",
"end",
"end"
]
| Define a required boolean config var. | [
"Define",
"a",
"required",
"boolean",
"config",
"var",
"."
]
| 1778604a7878bb49d4512a189c3e7664a0874295 | https://github.com/heroku/configvar/blob/1778604a7878bb49d4512a189c3e7664a0874295/lib/configvar/context.rb#L51-L59 | train |
heroku/configvar | lib/configvar/context.rb | ConfigVar.Context.optional_string | def optional_string(name, default)
optional_custom(name) do |env|
if value = env[name.to_s.upcase]
{name => value}
else
{name => default}
end
end
end | ruby | def optional_string(name, default)
optional_custom(name) do |env|
if value = env[name.to_s.upcase]
{name => value}
else
{name => default}
end
end
end | [
"def",
"optional_string",
"(",
"name",
",",
"default",
")",
"optional_custom",
"(",
"name",
")",
"do",
"|",
"env",
"|",
"if",
"value",
"=",
"env",
"[",
"name",
".",
"to_s",
".",
"upcase",
"]",
"{",
"name",
"=>",
"value",
"}",
"else",
"{",
"name",
"=>",
"default",
"}",
"end",
"end",
"end"
]
| Define a required string config var with a default value. | [
"Define",
"a",
"required",
"string",
"config",
"var",
"with",
"a",
"default",
"value",
"."
]
| 1778604a7878bb49d4512a189c3e7664a0874295 | https://github.com/heroku/configvar/blob/1778604a7878bb49d4512a189c3e7664a0874295/lib/configvar/context.rb#L72-L80 | train |
heroku/configvar | lib/configvar/context.rb | ConfigVar.Context.optional_int | def optional_int(name, default)
optional_custom(name) do |env|
if value = env[name.to_s.upcase]
{name => parse_int(name, value)}
else
{name => default}
end
end
end | ruby | def optional_int(name, default)
optional_custom(name) do |env|
if value = env[name.to_s.upcase]
{name => parse_int(name, value)}
else
{name => default}
end
end
end | [
"def",
"optional_int",
"(",
"name",
",",
"default",
")",
"optional_custom",
"(",
"name",
")",
"do",
"|",
"env",
"|",
"if",
"value",
"=",
"env",
"[",
"name",
".",
"to_s",
".",
"upcase",
"]",
"{",
"name",
"=>",
"parse_int",
"(",
"name",
",",
"value",
")",
"}",
"else",
"{",
"name",
"=>",
"default",
"}",
"end",
"end",
"end"
]
| Define a required integer config var with a default value. | [
"Define",
"a",
"required",
"integer",
"config",
"var",
"with",
"a",
"default",
"value",
"."
]
| 1778604a7878bb49d4512a189c3e7664a0874295 | https://github.com/heroku/configvar/blob/1778604a7878bb49d4512a189c3e7664a0874295/lib/configvar/context.rb#L83-L91 | train |
heroku/configvar | lib/configvar/context.rb | ConfigVar.Context.optional_bool | def optional_bool(name, default)
optional_custom(name) do |env|
if value = env[name.to_s.upcase]
{name => parse_bool(name, value)}
else
{name => default}
end
end
end | ruby | def optional_bool(name, default)
optional_custom(name) do |env|
if value = env[name.to_s.upcase]
{name => parse_bool(name, value)}
else
{name => default}
end
end
end | [
"def",
"optional_bool",
"(",
"name",
",",
"default",
")",
"optional_custom",
"(",
"name",
")",
"do",
"|",
"env",
"|",
"if",
"value",
"=",
"env",
"[",
"name",
".",
"to_s",
".",
"upcase",
"]",
"{",
"name",
"=>",
"parse_bool",
"(",
"name",
",",
"value",
")",
"}",
"else",
"{",
"name",
"=>",
"default",
"}",
"end",
"end",
"end"
]
| Define a required boolean config var with a default value. | [
"Define",
"a",
"required",
"boolean",
"config",
"var",
"with",
"a",
"default",
"value",
"."
]
| 1778604a7878bb49d4512a189c3e7664a0874295 | https://github.com/heroku/configvar/blob/1778604a7878bb49d4512a189c3e7664a0874295/lib/configvar/context.rb#L94-L102 | train |
heroku/configvar | lib/configvar/context.rb | ConfigVar.Context.parse_bool | def parse_bool(name, value)
if ['1', 'true', 'enabled'].include?(value.downcase)
true
elsif ['0', 'false'].include?(value.downcase)
false
else
raise ArgumentError.new("#{value} is not a valid boolean for #{name.to_s.upcase}")
end
end | ruby | def parse_bool(name, value)
if ['1', 'true', 'enabled'].include?(value.downcase)
true
elsif ['0', 'false'].include?(value.downcase)
false
else
raise ArgumentError.new("#{value} is not a valid boolean for #{name.to_s.upcase}")
end
end | [
"def",
"parse_bool",
"(",
"name",
",",
"value",
")",
"if",
"[",
"'1'",
",",
"'true'",
",",
"'enabled'",
"]",
".",
"include?",
"(",
"value",
".",
"downcase",
")",
"true",
"elsif",
"[",
"'0'",
",",
"'false'",
"]",
".",
"include?",
"(",
"value",
".",
"downcase",
")",
"false",
"else",
"raise",
"ArgumentError",
".",
"new",
"(",
"\"#{value} is not a valid boolean for #{name.to_s.upcase}\"",
")",
"end",
"end"
]
| Convert a string to boolean. An ArgumentError is raised if the string
is not a valid boolean. | [
"Convert",
"a",
"string",
"to",
"boolean",
".",
"An",
"ArgumentError",
"is",
"raised",
"if",
"the",
"string",
"is",
"not",
"a",
"valid",
"boolean",
"."
]
| 1778604a7878bb49d4512a189c3e7664a0874295 | https://github.com/heroku/configvar/blob/1778604a7878bb49d4512a189c3e7664a0874295/lib/configvar/context.rb#L125-L133 | train |
heroku/configvar | lib/configvar/context.rb | ConfigVar.Context.define_config | def define_config(name, &blk)
if @definitions.has_key?(name)
raise ConfigError.new("#{name.to_s.upcase} is already registered")
end
@definitions[name] = Proc.new do |env|
value = yield env
if value.kind_of?(Hash)
value
else
{name => value}
end
end
end | ruby | def define_config(name, &blk)
if @definitions.has_key?(name)
raise ConfigError.new("#{name.to_s.upcase} is already registered")
end
@definitions[name] = Proc.new do |env|
value = yield env
if value.kind_of?(Hash)
value
else
{name => value}
end
end
end | [
"def",
"define_config",
"(",
"name",
",",
"&",
"blk",
")",
"if",
"@definitions",
".",
"has_key?",
"(",
"name",
")",
"raise",
"ConfigError",
".",
"new",
"(",
"\"#{name.to_s.upcase} is already registered\"",
")",
"end",
"@definitions",
"[",
"name",
"]",
"=",
"Proc",
".",
"new",
"do",
"|",
"env",
"|",
"value",
"=",
"yield",
"env",
"if",
"value",
".",
"kind_of?",
"(",
"Hash",
")",
"value",
"else",
"{",
"name",
"=>",
"value",
"}",
"end",
"end",
"end"
]
| Define a handler for a configuration value. | [
"Define",
"a",
"handler",
"for",
"a",
"configuration",
"value",
"."
]
| 1778604a7878bb49d4512a189c3e7664a0874295 | https://github.com/heroku/configvar/blob/1778604a7878bb49d4512a189c3e7664a0874295/lib/configvar/context.rb#L136-L148 | train |
danmayer/churn | lib/churn/calculator.rb | Churn.ChurnCalculator.analyze | def analyze
@changes = sort_changes(@changes)
@changes = @changes.map {|file_path, times_changed| {:file_path => file_path, :times_changed => times_changed }}
calculate_revision_changes
@method_changes = sort_changes(@method_changes)
@method_changes = @method_changes.map {|method, times_changed| {'method' => method, 'times_changed' => times_changed }}
@class_changes = sort_changes(@class_changes)
@class_changes = @class_changes.map {|klass, times_changed| {'klass' => klass, 'times_changed' => times_changed }}
end | ruby | def analyze
@changes = sort_changes(@changes)
@changes = @changes.map {|file_path, times_changed| {:file_path => file_path, :times_changed => times_changed }}
calculate_revision_changes
@method_changes = sort_changes(@method_changes)
@method_changes = @method_changes.map {|method, times_changed| {'method' => method, 'times_changed' => times_changed }}
@class_changes = sort_changes(@class_changes)
@class_changes = @class_changes.map {|klass, times_changed| {'klass' => klass, 'times_changed' => times_changed }}
end | [
"def",
"analyze",
"@changes",
"=",
"sort_changes",
"(",
"@changes",
")",
"@changes",
"=",
"@changes",
".",
"map",
"{",
"|",
"file_path",
",",
"times_changed",
"|",
"{",
":file_path",
"=>",
"file_path",
",",
":times_changed",
"=>",
"times_changed",
"}",
"}",
"calculate_revision_changes",
"@method_changes",
"=",
"sort_changes",
"(",
"@method_changes",
")",
"@method_changes",
"=",
"@method_changes",
".",
"map",
"{",
"|",
"method",
",",
"times_changed",
"|",
"{",
"'method'",
"=>",
"method",
",",
"'times_changed'",
"=>",
"times_changed",
"}",
"}",
"@class_changes",
"=",
"sort_changes",
"(",
"@class_changes",
")",
"@class_changes",
"=",
"@class_changes",
".",
"map",
"{",
"|",
"klass",
",",
"times_changed",
"|",
"{",
"'klass'",
"=>",
"klass",
",",
"'times_changed'",
"=>",
"times_changed",
"}",
"}",
"end"
]
| Analyze the source control data, filter, sort, and find more information
on the edited files | [
"Analyze",
"the",
"source",
"control",
"data",
"filter",
"sort",
"and",
"find",
"more",
"information",
"on",
"the",
"edited",
"files"
]
| f48ba8f0712697d052c37846109b6ada10e332c5 | https://github.com/danmayer/churn/blob/f48ba8f0712697d052c37846109b6ada10e332c5/lib/churn/calculator.rb#L90-L100 | train |
danmayer/churn | lib/churn/calculator.rb | Churn.ChurnCalculator.to_h | def to_h
hash = {:churn => {:changes => @changes}}
hash[:churn][:class_churn] = @class_changes
hash[:churn][:method_churn] = @method_changes
#detail the most recent changes made this revision
first_revision = @revisions.first
first_revision_changes = @revision_changes[first_revision]
if first_revision_changes
changes = first_revision_changes
hash[:churn][:changed_files] = changes[:files]
hash[:churn][:changed_classes] = changes[:classes]
hash[:churn][:changed_methods] = changes[:methods]
end
# TODO crappy place to do this but save hash to revision file but
# while entirely under metric_fu only choice
ChurnHistory.store_revision_history(first_revision, hash, @churn_options.data_directory)
hash
end | ruby | def to_h
hash = {:churn => {:changes => @changes}}
hash[:churn][:class_churn] = @class_changes
hash[:churn][:method_churn] = @method_changes
#detail the most recent changes made this revision
first_revision = @revisions.first
first_revision_changes = @revision_changes[first_revision]
if first_revision_changes
changes = first_revision_changes
hash[:churn][:changed_files] = changes[:files]
hash[:churn][:changed_classes] = changes[:classes]
hash[:churn][:changed_methods] = changes[:methods]
end
# TODO crappy place to do this but save hash to revision file but
# while entirely under metric_fu only choice
ChurnHistory.store_revision_history(first_revision, hash, @churn_options.data_directory)
hash
end | [
"def",
"to_h",
"hash",
"=",
"{",
":churn",
"=>",
"{",
":changes",
"=>",
"@changes",
"}",
"}",
"hash",
"[",
":churn",
"]",
"[",
":class_churn",
"]",
"=",
"@class_changes",
"hash",
"[",
":churn",
"]",
"[",
":method_churn",
"]",
"=",
"@method_changes",
"first_revision",
"=",
"@revisions",
".",
"first",
"first_revision_changes",
"=",
"@revision_changes",
"[",
"first_revision",
"]",
"if",
"first_revision_changes",
"changes",
"=",
"first_revision_changes",
"hash",
"[",
":churn",
"]",
"[",
":changed_files",
"]",
"=",
"changes",
"[",
":files",
"]",
"hash",
"[",
":churn",
"]",
"[",
":changed_classes",
"]",
"=",
"changes",
"[",
":classes",
"]",
"hash",
"[",
":churn",
"]",
"[",
":changed_methods",
"]",
"=",
"changes",
"[",
":methods",
"]",
"end",
"ChurnHistory",
".",
"store_revision_history",
"(",
"first_revision",
",",
"hash",
",",
"@churn_options",
".",
"data_directory",
")",
"hash",
"end"
]
| collect all the data into a single hash data structure. | [
"collect",
"all",
"the",
"data",
"into",
"a",
"single",
"hash",
"data",
"structure",
"."
]
| f48ba8f0712697d052c37846109b6ada10e332c5 | https://github.com/danmayer/churn/blob/f48ba8f0712697d052c37846109b6ada10e332c5/lib/churn/calculator.rb#L103-L120 | train |
OSC/pbs-ruby | lib/pbs/batch.rb | PBS.Batch.get_status | def get_status(filters: [])
connect do |cid|
filters = PBS::Torque::Attrl.from_list filters
batch_status = Torque.pbs_statserver cid, filters, nil
batch_status.to_h.tap { Torque.pbs_statfree batch_status }
end
end | ruby | def get_status(filters: [])
connect do |cid|
filters = PBS::Torque::Attrl.from_list filters
batch_status = Torque.pbs_statserver cid, filters, nil
batch_status.to_h.tap { Torque.pbs_statfree batch_status }
end
end | [
"def",
"get_status",
"(",
"filters",
":",
"[",
"]",
")",
"connect",
"do",
"|",
"cid",
"|",
"filters",
"=",
"PBS",
"::",
"Torque",
"::",
"Attrl",
".",
"from_list",
"filters",
"batch_status",
"=",
"Torque",
".",
"pbs_statserver",
"cid",
",",
"filters",
",",
"nil",
"batch_status",
".",
"to_h",
".",
"tap",
"{",
"Torque",
".",
"pbs_statfree",
"batch_status",
"}",
"end",
"end"
]
| Get a hash with status info for this batch server
@example Status info for OSC Oakley batch server
my_conn.get_status
#=>
#{
# "oak-batch.osc.edu:15001" => {
# :server_state => "Idle",
# ...
# }
#}
@param filters [Array<Symbol>] list of attribs to filter on
@return [Hash] status info for batch server | [
"Get",
"a",
"hash",
"with",
"status",
"info",
"for",
"this",
"batch",
"server"
]
| 8f07c38db23392d10d4e1e3cc248b07a3e43730f | https://github.com/OSC/pbs-ruby/blob/8f07c38db23392d10d4e1e3cc248b07a3e43730f/lib/pbs/batch.rb#L89-L95 | train |
OSC/pbs-ruby | lib/pbs/batch.rb | PBS.Batch.get_queues | def get_queues(id: '', filters: [])
connect do |cid|
filters = PBS::Torque::Attrl.from_list(filters)
batch_status = Torque.pbs_statque cid, id.to_s, filters, nil
batch_status.to_h.tap { Torque.pbs_statfree batch_status }
end
end | ruby | def get_queues(id: '', filters: [])
connect do |cid|
filters = PBS::Torque::Attrl.from_list(filters)
batch_status = Torque.pbs_statque cid, id.to_s, filters, nil
batch_status.to_h.tap { Torque.pbs_statfree batch_status }
end
end | [
"def",
"get_queues",
"(",
"id",
":",
"''",
",",
"filters",
":",
"[",
"]",
")",
"connect",
"do",
"|",
"cid",
"|",
"filters",
"=",
"PBS",
"::",
"Torque",
"::",
"Attrl",
".",
"from_list",
"(",
"filters",
")",
"batch_status",
"=",
"Torque",
".",
"pbs_statque",
"cid",
",",
"id",
".",
"to_s",
",",
"filters",
",",
"nil",
"batch_status",
".",
"to_h",
".",
"tap",
"{",
"Torque",
".",
"pbs_statfree",
"batch_status",
"}",
"end",
"end"
]
| Get a list of hashes of the queues on the batch server
@example Status info for OSC Oakley queues
my_conn.get_queues
#=>
#{
# "parallel" => {
# :queue_type => "Execution",
# ...
# },
# "serial" => {
# :queue_type => "Execution",
# ...
# },
# ...
#}
@param id [#to_s] the id of requested information
@param filters [Array<Symbol>] list of attribs to filter on
@return [Hash] hash of details for the queues | [
"Get",
"a",
"list",
"of",
"hashes",
"of",
"the",
"queues",
"on",
"the",
"batch",
"server"
]
| 8f07c38db23392d10d4e1e3cc248b07a3e43730f | https://github.com/OSC/pbs-ruby/blob/8f07c38db23392d10d4e1e3cc248b07a3e43730f/lib/pbs/batch.rb#L115-L121 | train |
OSC/pbs-ruby | lib/pbs/batch.rb | PBS.Batch.get_nodes | def get_nodes(id: '', filters: [])
connect do |cid|
filters = PBS::Torque::Attrl.from_list(filters)
batch_status = Torque.pbs_statnode cid, id.to_s, filters, nil
batch_status.to_h.tap { Torque.pbs_statfree batch_status }
end
end | ruby | def get_nodes(id: '', filters: [])
connect do |cid|
filters = PBS::Torque::Attrl.from_list(filters)
batch_status = Torque.pbs_statnode cid, id.to_s, filters, nil
batch_status.to_h.tap { Torque.pbs_statfree batch_status }
end
end | [
"def",
"get_nodes",
"(",
"id",
":",
"''",
",",
"filters",
":",
"[",
"]",
")",
"connect",
"do",
"|",
"cid",
"|",
"filters",
"=",
"PBS",
"::",
"Torque",
"::",
"Attrl",
".",
"from_list",
"(",
"filters",
")",
"batch_status",
"=",
"Torque",
".",
"pbs_statnode",
"cid",
",",
"id",
".",
"to_s",
",",
"filters",
",",
"nil",
"batch_status",
".",
"to_h",
".",
"tap",
"{",
"Torque",
".",
"pbs_statfree",
"batch_status",
"}",
"end",
"end"
]
| Get a list of hashes of the nodes on the batch server
@example Status info for OSC Oakley nodes
my_conn.get_nodes
#=>
#{
# "n0001" => {
# :np => "12",
# ...
# },
# "n0002" => {
# :np => "12",
# ...
# },
# ...
#}
@param id [#to_s] the id of requested information
@param filters [Array<Symbol>] list of attribs to filter on
@return [Hash] hash of details for nodes | [
"Get",
"a",
"list",
"of",
"hashes",
"of",
"the",
"nodes",
"on",
"the",
"batch",
"server"
]
| 8f07c38db23392d10d4e1e3cc248b07a3e43730f | https://github.com/OSC/pbs-ruby/blob/8f07c38db23392d10d4e1e3cc248b07a3e43730f/lib/pbs/batch.rb#L158-L164 | train |
OSC/pbs-ruby | lib/pbs/batch.rb | PBS.Batch.select_jobs | def select_jobs(attribs: [])
connect do |cid|
attribs = PBS::Torque::Attropl.from_list(attribs.map(&:to_h))
batch_status = Torque.pbs_selstat cid, attribs, nil
batch_status.to_h.tap { Torque.pbs_statfree batch_status }
end
end | ruby | def select_jobs(attribs: [])
connect do |cid|
attribs = PBS::Torque::Attropl.from_list(attribs.map(&:to_h))
batch_status = Torque.pbs_selstat cid, attribs, nil
batch_status.to_h.tap { Torque.pbs_statfree batch_status }
end
end | [
"def",
"select_jobs",
"(",
"attribs",
":",
"[",
"]",
")",
"connect",
"do",
"|",
"cid",
"|",
"attribs",
"=",
"PBS",
"::",
"Torque",
"::",
"Attropl",
".",
"from_list",
"(",
"attribs",
".",
"map",
"(",
"&",
":to_h",
")",
")",
"batch_status",
"=",
"Torque",
".",
"pbs_selstat",
"cid",
",",
"attribs",
",",
"nil",
"batch_status",
".",
"to_h",
".",
"tap",
"{",
"Torque",
".",
"pbs_statfree",
"batch_status",
"}",
"end",
"end"
]
| Get a list of hashes of the selected jobs on the batch server
@example Status info for jobs owned by Bob
my_conn.select_jobs(attribs: [{name: "User_List", value: "bob", op: :eq}])
#=>
#{
# "10219837.oak-batch.osc.edu" => {
# :Job_Owner => "[email protected]",
# :Job_Name => "CFD_Solver",
# ...
# },
# "10219839.oak-batch.osc.edu" => {
# :Job_Owner => "[email protected]",
# :Job_Name => "CFD_Solver2",
# ...
# },
# ...
#}
@param attribs [Array<#to_h>] list of hashes describing attributes to
select on
@return [Hash] hash of details of selected jobs | [
"Get",
"a",
"list",
"of",
"hashes",
"of",
"the",
"selected",
"jobs",
"on",
"the",
"batch",
"server"
]
| 8f07c38db23392d10d4e1e3cc248b07a3e43730f | https://github.com/OSC/pbs-ruby/blob/8f07c38db23392d10d4e1e3cc248b07a3e43730f/lib/pbs/batch.rb#L203-L209 | train |
OSC/pbs-ruby | lib/pbs/batch.rb | PBS.Batch.get_jobs | def get_jobs(id: '', filters: [])
connect do |cid|
filters = PBS::Torque::Attrl.from_list(filters)
batch_status = Torque.pbs_statjob cid, id.to_s, filters, nil
batch_status.to_h.tap { Torque.pbs_statfree batch_status }
end
end | ruby | def get_jobs(id: '', filters: [])
connect do |cid|
filters = PBS::Torque::Attrl.from_list(filters)
batch_status = Torque.pbs_statjob cid, id.to_s, filters, nil
batch_status.to_h.tap { Torque.pbs_statfree batch_status }
end
end | [
"def",
"get_jobs",
"(",
"id",
":",
"''",
",",
"filters",
":",
"[",
"]",
")",
"connect",
"do",
"|",
"cid",
"|",
"filters",
"=",
"PBS",
"::",
"Torque",
"::",
"Attrl",
".",
"from_list",
"(",
"filters",
")",
"batch_status",
"=",
"Torque",
".",
"pbs_statjob",
"cid",
",",
"id",
".",
"to_s",
",",
"filters",
",",
"nil",
"batch_status",
".",
"to_h",
".",
"tap",
"{",
"Torque",
".",
"pbs_statfree",
"batch_status",
"}",
"end",
"end"
]
| Get a list of hashes of the jobs on the batch server
@example Status info for OSC Oakley jobs
my_conn.get_jobs
#=>
#{
# "10219837.oak-batch.osc.edu" => {
# :Job_Owner => "[email protected]",
# :Job_Name => "CFD_Solver",
# ...
# },
# "10219838.oak-batch.osc.edu" => {
# :Job_Owner => "[email protected]",
# :Job_Name => "FEA_Solver",
# ...
# },
# ...
#}
@param id [#to_s] the id of requested information
@param filters [Array<Symbol>] list of attribs to filter on
@return [Hash] hash of details for jobs | [
"Get",
"a",
"list",
"of",
"hashes",
"of",
"the",
"jobs",
"on",
"the",
"batch",
"server"
]
| 8f07c38db23392d10d4e1e3cc248b07a3e43730f | https://github.com/OSC/pbs-ruby/blob/8f07c38db23392d10d4e1e3cc248b07a3e43730f/lib/pbs/batch.rb#L231-L237 | train |
OSC/pbs-ruby | lib/pbs/batch.rb | PBS.Batch.submit_script | def submit_script(script, queue: nil, headers: {}, resources: {}, envvars: {}, qsub: true)
send(qsub ? :qsub_submit : :pbs_submit, script.to_s, queue.to_s, headers, resources, envvars)
end | ruby | def submit_script(script, queue: nil, headers: {}, resources: {}, envvars: {}, qsub: true)
send(qsub ? :qsub_submit : :pbs_submit, script.to_s, queue.to_s, headers, resources, envvars)
end | [
"def",
"submit_script",
"(",
"script",
",",
"queue",
":",
"nil",
",",
"headers",
":",
"{",
"}",
",",
"resources",
":",
"{",
"}",
",",
"envvars",
":",
"{",
"}",
",",
"qsub",
":",
"true",
")",
"send",
"(",
"qsub",
"?",
":qsub_submit",
":",
":pbs_submit",
",",
"script",
".",
"to_s",
",",
"queue",
".",
"to_s",
",",
"headers",
",",
"resources",
",",
"envvars",
")",
"end"
]
| Submit a script to the batch server
@example Submit a script with a few PBS directives
my_conn.submit_script("/path/to/script",
headers: {
Job_Name: "myjob",
Join_Path: "oe"
},
resources: {
nodes: "4:ppn=12",
walltime: "12:00:00"
},
envvars: {
TOKEN: "asd90f9sd8g90hk34"
}
)
#=> "6621251.oak-batch.osc.edu"
@param script [#to_s] path to the script
@param queue [#to_s] queue to submit script to
@param headers [Hash] pbs headers
@param resources [Hash] pbs resources
@param envvars [Hash] pbs environment variables
@param qsub [Boolean] whether use library or binary for submission
@return [String] the id of the job that was created
@deprecated Use {#submit} instead. | [
"Submit",
"a",
"script",
"to",
"the",
"batch",
"server"
]
| 8f07c38db23392d10d4e1e3cc248b07a3e43730f | https://github.com/OSC/pbs-ruby/blob/8f07c38db23392d10d4e1e3cc248b07a3e43730f/lib/pbs/batch.rb#L323-L325 | train |
OSC/pbs-ruby | lib/pbs/batch.rb | PBS.Batch.submit_string | def submit_string(string, **kwargs)
Tempfile.open('qsub.') do |f|
f.write string.to_s
f.close
submit_script(f.path, **kwargs)
end
end | ruby | def submit_string(string, **kwargs)
Tempfile.open('qsub.') do |f|
f.write string.to_s
f.close
submit_script(f.path, **kwargs)
end
end | [
"def",
"submit_string",
"(",
"string",
",",
"**",
"kwargs",
")",
"Tempfile",
".",
"open",
"(",
"'qsub.'",
")",
"do",
"|",
"f",
"|",
"f",
".",
"write",
"string",
".",
"to_s",
"f",
".",
"close",
"submit_script",
"(",
"f",
".",
"path",
",",
"**",
"kwargs",
")",
"end",
"end"
]
| Submit a script expanded into a string to the batch server
@param string [#to_s] script as a string
@param (see #submit_script)
@return [String] the id of the job that was created
@deprecated Use {#submit} instead. | [
"Submit",
"a",
"script",
"expanded",
"into",
"a",
"string",
"to",
"the",
"batch",
"server"
]
| 8f07c38db23392d10d4e1e3cc248b07a3e43730f | https://github.com/OSC/pbs-ruby/blob/8f07c38db23392d10d4e1e3cc248b07a3e43730f/lib/pbs/batch.rb#L332-L338 | train |
OSC/pbs-ruby | lib/pbs/batch.rb | PBS.Batch.submit | def submit(content, args: [], env: {}, chdir: nil)
call(:qsub, *args, env: env, stdin: content, chdir: chdir).strip
end | ruby | def submit(content, args: [], env: {}, chdir: nil)
call(:qsub, *args, env: env, stdin: content, chdir: chdir).strip
end | [
"def",
"submit",
"(",
"content",
",",
"args",
":",
"[",
"]",
",",
"env",
":",
"{",
"}",
",",
"chdir",
":",
"nil",
")",
"call",
"(",
":qsub",
",",
"*",
"args",
",",
"env",
":",
"env",
",",
"stdin",
":",
"content",
",",
"chdir",
":",
"chdir",
")",
".",
"strip",
"end"
]
| Submit a script expanded as a string to the batch server
@param content [#to_s] script as a string
@param args [Array<#to_s>] arguments passed to `qsub` command
@param env [Hash{#to_s => #to_s}] environment variables set
@param chdir [#to_s, nil] working directory where `qsub` is called from
@raise [Error] if `qsub` command exited unsuccessfully
@return [String] the id of the job that was created | [
"Submit",
"a",
"script",
"expanded",
"as",
"a",
"string",
"to",
"the",
"batch",
"server"
]
| 8f07c38db23392d10d4e1e3cc248b07a3e43730f | https://github.com/OSC/pbs-ruby/blob/8f07c38db23392d10d4e1e3cc248b07a3e43730f/lib/pbs/batch.rb#L347-L349 | train |
OSC/pbs-ruby | lib/pbs/batch.rb | PBS.Batch.pbs_submit | def pbs_submit(script, queue, headers, resources, envvars)
attribs = []
headers.each do |name, value|
attribs << { name: name, value: value }
end
resources.each do |rsc, value|
attribs << { name: :Resource_List, resource: rsc, value: value }
end
unless envvars.empty?
attribs << {
name: :Variable_List,
value: envvars.map {|k,v| "#{k}=#{v}"}.join(",")
}
end
connect do |cid|
attropl = Torque::Attropl.from_list attribs
Torque.pbs_submit cid, attropl, script, queue, nil
end
end | ruby | def pbs_submit(script, queue, headers, resources, envvars)
attribs = []
headers.each do |name, value|
attribs << { name: name, value: value }
end
resources.each do |rsc, value|
attribs << { name: :Resource_List, resource: rsc, value: value }
end
unless envvars.empty?
attribs << {
name: :Variable_List,
value: envvars.map {|k,v| "#{k}=#{v}"}.join(",")
}
end
connect do |cid|
attropl = Torque::Attropl.from_list attribs
Torque.pbs_submit cid, attropl, script, queue, nil
end
end | [
"def",
"pbs_submit",
"(",
"script",
",",
"queue",
",",
"headers",
",",
"resources",
",",
"envvars",
")",
"attribs",
"=",
"[",
"]",
"headers",
".",
"each",
"do",
"|",
"name",
",",
"value",
"|",
"attribs",
"<<",
"{",
"name",
":",
"name",
",",
"value",
":",
"value",
"}",
"end",
"resources",
".",
"each",
"do",
"|",
"rsc",
",",
"value",
"|",
"attribs",
"<<",
"{",
"name",
":",
":Resource_List",
",",
"resource",
":",
"rsc",
",",
"value",
":",
"value",
"}",
"end",
"unless",
"envvars",
".",
"empty?",
"attribs",
"<<",
"{",
"name",
":",
":Variable_List",
",",
"value",
":",
"envvars",
".",
"map",
"{",
"|",
"k",
",",
"v",
"|",
"\"#{k}=#{v}\"",
"}",
".",
"join",
"(",
"\",\"",
")",
"}",
"end",
"connect",
"do",
"|",
"cid",
"|",
"attropl",
"=",
"Torque",
"::",
"Attropl",
".",
"from_list",
"attribs",
"Torque",
".",
"pbs_submit",
"cid",
",",
"attropl",
",",
"script",
",",
"queue",
",",
"nil",
"end",
"end"
]
| Submit a script using Torque library | [
"Submit",
"a",
"script",
"using",
"Torque",
"library"
]
| 8f07c38db23392d10d4e1e3cc248b07a3e43730f | https://github.com/OSC/pbs-ruby/blob/8f07c38db23392d10d4e1e3cc248b07a3e43730f/lib/pbs/batch.rb#L353-L372 | train |
OSC/pbs-ruby | lib/pbs/batch.rb | PBS.Batch.qsub_arg | def qsub_arg(key, value)
case key
# common attributes
when :Execution_Time
['-a', value.to_s]
when :Checkpoint
['-c', value.to_s]
when :Error_Path
['-e', value.to_s]
when :fault_tolerant
['-f']
when :Hold_Types
['-h']
when :Join_Path
['-j', value.to_s]
when :Keep_Files
['-k', value.to_s]
when :Mail_Points
['-m', value.to_s]
when :Output_Path
['-o', value.to_s]
when :Priority
['-p', value.to_s]
when :Rerunable
['-r', value.to_s]
when :job_array_request
['-t', value.to_s]
when :User_List
['-u', value.to_s]
when :Account_Name
['-A', value.to_s]
when :Mail_Users
['-M', value.to_s]
when :Job_Name
['-N', value.to_s]
when :Shell_Path_List
['-S', value.to_s]
# uncommon attributes
when :job_arguments
['-F', value.to_s]
when :init_work_dir
['-d', value.to_s] # sets PBS_O_INITDIR
when :reservation_id
['-W', "x=advres:#{value}"] # use resource manager extensions for Moab
# everything else
else
['-W', "#{key}=#{value}"]
end
end | ruby | def qsub_arg(key, value)
case key
# common attributes
when :Execution_Time
['-a', value.to_s]
when :Checkpoint
['-c', value.to_s]
when :Error_Path
['-e', value.to_s]
when :fault_tolerant
['-f']
when :Hold_Types
['-h']
when :Join_Path
['-j', value.to_s]
when :Keep_Files
['-k', value.to_s]
when :Mail_Points
['-m', value.to_s]
when :Output_Path
['-o', value.to_s]
when :Priority
['-p', value.to_s]
when :Rerunable
['-r', value.to_s]
when :job_array_request
['-t', value.to_s]
when :User_List
['-u', value.to_s]
when :Account_Name
['-A', value.to_s]
when :Mail_Users
['-M', value.to_s]
when :Job_Name
['-N', value.to_s]
when :Shell_Path_List
['-S', value.to_s]
# uncommon attributes
when :job_arguments
['-F', value.to_s]
when :init_work_dir
['-d', value.to_s] # sets PBS_O_INITDIR
when :reservation_id
['-W', "x=advres:#{value}"] # use resource manager extensions for Moab
# everything else
else
['-W', "#{key}=#{value}"]
end
end | [
"def",
"qsub_arg",
"(",
"key",
",",
"value",
")",
"case",
"key",
"when",
":Execution_Time",
"[",
"'-a'",
",",
"value",
".",
"to_s",
"]",
"when",
":Checkpoint",
"[",
"'-c'",
",",
"value",
".",
"to_s",
"]",
"when",
":Error_Path",
"[",
"'-e'",
",",
"value",
".",
"to_s",
"]",
"when",
":fault_tolerant",
"[",
"'-f'",
"]",
"when",
":Hold_Types",
"[",
"'-h'",
"]",
"when",
":Join_Path",
"[",
"'-j'",
",",
"value",
".",
"to_s",
"]",
"when",
":Keep_Files",
"[",
"'-k'",
",",
"value",
".",
"to_s",
"]",
"when",
":Mail_Points",
"[",
"'-m'",
",",
"value",
".",
"to_s",
"]",
"when",
":Output_Path",
"[",
"'-o'",
",",
"value",
".",
"to_s",
"]",
"when",
":Priority",
"[",
"'-p'",
",",
"value",
".",
"to_s",
"]",
"when",
":Rerunable",
"[",
"'-r'",
",",
"value",
".",
"to_s",
"]",
"when",
":job_array_request",
"[",
"'-t'",
",",
"value",
".",
"to_s",
"]",
"when",
":User_List",
"[",
"'-u'",
",",
"value",
".",
"to_s",
"]",
"when",
":Account_Name",
"[",
"'-A'",
",",
"value",
".",
"to_s",
"]",
"when",
":Mail_Users",
"[",
"'-M'",
",",
"value",
".",
"to_s",
"]",
"when",
":Job_Name",
"[",
"'-N'",
",",
"value",
".",
"to_s",
"]",
"when",
":Shell_Path_List",
"[",
"'-S'",
",",
"value",
".",
"to_s",
"]",
"when",
":job_arguments",
"[",
"'-F'",
",",
"value",
".",
"to_s",
"]",
"when",
":init_work_dir",
"[",
"'-d'",
",",
"value",
".",
"to_s",
"]",
"when",
":reservation_id",
"[",
"'-W'",
",",
"\"x=advres:#{value}\"",
"]",
"else",
"[",
"'-W'",
",",
"\"#{key}=#{value}\"",
"]",
"end",
"end"
]
| Mapping of Torque attribute to `qsub` arguments | [
"Mapping",
"of",
"Torque",
"attribute",
"to",
"qsub",
"arguments"
]
| 8f07c38db23392d10d4e1e3cc248b07a3e43730f | https://github.com/OSC/pbs-ruby/blob/8f07c38db23392d10d4e1e3cc248b07a3e43730f/lib/pbs/batch.rb#L375-L423 | train |
OSC/pbs-ruby | lib/pbs/batch.rb | PBS.Batch.call | def call(cmd, *args, env: {}, stdin: "", chdir: nil)
cmd = bin.join(cmd.to_s).to_s
args = args.map(&:to_s)
env = env.to_h.each_with_object({}) {|(k,v), h| h[k.to_s] = v.to_s}.merge({
"PBS_DEFAULT" => host,
"LD_LIBRARY_PATH" => %{#{lib}:#{ENV["LD_LIBRARY_PATH"]}}
})
stdin = stdin.to_s
chdir ||= "."
o, e, s = Open3.capture3(env, cmd, *args, stdin_data: stdin, chdir: chdir.to_s)
s.success? ? o : raise(PBS::Error, e)
end | ruby | def call(cmd, *args, env: {}, stdin: "", chdir: nil)
cmd = bin.join(cmd.to_s).to_s
args = args.map(&:to_s)
env = env.to_h.each_with_object({}) {|(k,v), h| h[k.to_s] = v.to_s}.merge({
"PBS_DEFAULT" => host,
"LD_LIBRARY_PATH" => %{#{lib}:#{ENV["LD_LIBRARY_PATH"]}}
})
stdin = stdin.to_s
chdir ||= "."
o, e, s = Open3.capture3(env, cmd, *args, stdin_data: stdin, chdir: chdir.to_s)
s.success? ? o : raise(PBS::Error, e)
end | [
"def",
"call",
"(",
"cmd",
",",
"*",
"args",
",",
"env",
":",
"{",
"}",
",",
"stdin",
":",
"\"\"",
",",
"chdir",
":",
"nil",
")",
"cmd",
"=",
"bin",
".",
"join",
"(",
"cmd",
".",
"to_s",
")",
".",
"to_s",
"args",
"=",
"args",
".",
"map",
"(",
"&",
":to_s",
")",
"env",
"=",
"env",
".",
"to_h",
".",
"each_with_object",
"(",
"{",
"}",
")",
"{",
"|",
"(",
"k",
",",
"v",
")",
",",
"h",
"|",
"h",
"[",
"k",
".",
"to_s",
"]",
"=",
"v",
".",
"to_s",
"}",
".",
"merge",
"(",
"{",
"\"PBS_DEFAULT\"",
"=>",
"host",
",",
"\"LD_LIBRARY_PATH\"",
"=>",
"%{#{lib}:#{ENV[\"LD_LIBRARY_PATH\"]}}",
"}",
")",
"stdin",
"=",
"stdin",
".",
"to_s",
"chdir",
"||=",
"\".\"",
"o",
",",
"e",
",",
"s",
"=",
"Open3",
".",
"capture3",
"(",
"env",
",",
"cmd",
",",
"*",
"args",
",",
"stdin_data",
":",
"stdin",
",",
"chdir",
":",
"chdir",
".",
"to_s",
")",
"s",
".",
"success?",
"?",
"o",
":",
"raise",
"(",
"PBS",
"::",
"Error",
",",
"e",
")",
"end"
]
| Call a forked PBS command for a given host | [
"Call",
"a",
"forked",
"PBS",
"command",
"for",
"a",
"given",
"host"
]
| 8f07c38db23392d10d4e1e3cc248b07a3e43730f | https://github.com/OSC/pbs-ruby/blob/8f07c38db23392d10d4e1e3cc248b07a3e43730f/lib/pbs/batch.rb#L446-L457 | train |
luke-gru/riml | lib/riml/include_cache.rb | Riml.IncludeCache.fetch | def fetch(included_filename)
if source = @cache[included_filename]
return source
end
if @m.locked? && @owns_lock == Thread.current
@cache[included_filename] = yield
else
ret = nil
@cache[included_filename] = @m.synchronize do
begin
@owns_lock = Thread.current
ret = yield
ensure
@owns_lock = nil
end
end
ret
end
end | ruby | def fetch(included_filename)
if source = @cache[included_filename]
return source
end
if @m.locked? && @owns_lock == Thread.current
@cache[included_filename] = yield
else
ret = nil
@cache[included_filename] = @m.synchronize do
begin
@owns_lock = Thread.current
ret = yield
ensure
@owns_lock = nil
end
end
ret
end
end | [
"def",
"fetch",
"(",
"included_filename",
")",
"if",
"source",
"=",
"@cache",
"[",
"included_filename",
"]",
"return",
"source",
"end",
"if",
"@m",
".",
"locked?",
"&&",
"@owns_lock",
"==",
"Thread",
".",
"current",
"@cache",
"[",
"included_filename",
"]",
"=",
"yield",
"else",
"ret",
"=",
"nil",
"@cache",
"[",
"included_filename",
"]",
"=",
"@m",
".",
"synchronize",
"do",
"begin",
"@owns_lock",
"=",
"Thread",
".",
"current",
"ret",
"=",
"yield",
"ensure",
"@owns_lock",
"=",
"nil",
"end",
"end",
"ret",
"end",
"end"
]
| `fetch` can be called recursively in the `yield`ed block, so must
make sure not to try to lock the Mutex if it's already locked by the
current thread, as this would result in an error. | [
"fetch",
"can",
"be",
"called",
"recursively",
"in",
"the",
"yield",
"ed",
"block",
"so",
"must",
"make",
"sure",
"not",
"to",
"try",
"to",
"lock",
"the",
"Mutex",
"if",
"it",
"s",
"already",
"locked",
"by",
"the",
"current",
"thread",
"as",
"this",
"would",
"result",
"in",
"an",
"error",
"."
]
| 27e26e5fb66bfd1259bf9fbfda25ae0d93596d7e | https://github.com/luke-gru/riml/blob/27e26e5fb66bfd1259bf9fbfda25ae0d93596d7e/lib/riml/include_cache.rb#L14-L33 | train |
luke-gru/riml | lib/riml/compiler.rb | Riml.Compiler.compile | def compile(root_node)
root_node.extend CompilerAccessible
root_node.current_compiler = self
root_node.accept(NodesVisitor.new)
root_node.compiled_output
end | ruby | def compile(root_node)
root_node.extend CompilerAccessible
root_node.current_compiler = self
root_node.accept(NodesVisitor.new)
root_node.compiled_output
end | [
"def",
"compile",
"(",
"root_node",
")",
"root_node",
".",
"extend",
"CompilerAccessible",
"root_node",
".",
"current_compiler",
"=",
"self",
"root_node",
".",
"accept",
"(",
"NodesVisitor",
".",
"new",
")",
"root_node",
".",
"compiled_output",
"end"
]
| compiles nodes into output code | [
"compiles",
"nodes",
"into",
"output",
"code"
]
| 27e26e5fb66bfd1259bf9fbfda25ae0d93596d7e | https://github.com/luke-gru/riml/blob/27e26e5fb66bfd1259bf9fbfda25ae0d93596d7e/lib/riml/compiler.rb#L816-L821 | train |
zuazo/dockerspec | lib/dockerspec/docker_exception_parser.rb | Dockerspec.DockerExceptionParser.parse_exception | def parse_exception(e)
msg = e.to_s
json = msg.to_s.sub(/^Couldn't find id: /, '').split("\n").map(&:chomp)
json.map { |str| JSON.parse(str) }
rescue JSON::ParserError
raise e
end | ruby | def parse_exception(e)
msg = e.to_s
json = msg.to_s.sub(/^Couldn't find id: /, '').split("\n").map(&:chomp)
json.map { |str| JSON.parse(str) }
rescue JSON::ParserError
raise e
end | [
"def",
"parse_exception",
"(",
"e",
")",
"msg",
"=",
"e",
".",
"to_s",
"json",
"=",
"msg",
".",
"to_s",
".",
"sub",
"(",
"/",
"/",
",",
"''",
")",
".",
"split",
"(",
"\"\\n\"",
")",
".",
"map",
"(",
"&",
":chomp",
")",
"json",
".",
"map",
"{",
"|",
"str",
"|",
"JSON",
".",
"parse",
"(",
"str",
")",
"}",
"rescue",
"JSON",
"::",
"ParserError",
"raise",
"e",
"end"
]
| Parses the exception JSON message.
The message must be a list of JSON messages merged by a new line.
A valid exception message example:
```
{"stream":"Step 1 : FROM alpine:3.2\n"}
{"stream":" ---\u003e d6ead20d5571\n"}
{"stream":"Step 2 : RUN apk add --update wrong-package-name\n"}
{"stream":" ---\u003e Running in 290a46fa8bf4\n"}
{"stream":"fetch http://dl-4.alpinelinux.org/alpine/v3.2/main/...\n"}
{"stream":"ERROR: unsatisfiable constraints:\n"}
{"stream":" wrong-package-name (missing):\n required by: world...\n"}
{"errorDetail":{"message":"The command ..."},"error":"The command ..."}
```
@example
self.parse_exception(e)
#=> [{ "stream" => "Step 1 : FROM alpine:3.2\n" }, "errorDetail" => ...
@param e [Exception] The exception object to parse.
@return [Array<Hash>] The list of JSON messages parsed.
@return
@api private | [
"Parses",
"the",
"exception",
"JSON",
"message",
"."
]
| cb3868684cc2fdf39cf9e807d6a8844944275fff | https://github.com/zuazo/dockerspec/blob/cb3868684cc2fdf39cf9e807d6a8844944275fff/lib/dockerspec/docker_exception_parser.rb#L83-L89 | train |
zuazo/dockerspec | lib/dockerspec/docker_exception_parser.rb | Dockerspec.DockerExceptionParser.parse_streams | def parse_streams(e_ary)
e_ary.map { |x| x.is_a?(Hash) && x['stream'] }.compact.join
end | ruby | def parse_streams(e_ary)
e_ary.map { |x| x.is_a?(Hash) && x['stream'] }.compact.join
end | [
"def",
"parse_streams",
"(",
"e_ary",
")",
"e_ary",
".",
"map",
"{",
"|",
"x",
"|",
"x",
".",
"is_a?",
"(",
"Hash",
")",
"&&",
"x",
"[",
"'stream'",
"]",
"}",
".",
"compact",
".",
"join",
"end"
]
| Gets all the console output from the stream logs.
@param e_ary [Array<Hash>] The list of JSON messages already parsed.
@return [String] The generated stdout output.
@api private | [
"Gets",
"all",
"the",
"console",
"output",
"from",
"the",
"stream",
"logs",
"."
]
| cb3868684cc2fdf39cf9e807d6a8844944275fff | https://github.com/zuazo/dockerspec/blob/cb3868684cc2fdf39cf9e807d6a8844944275fff/lib/dockerspec/docker_exception_parser.rb#L116-L118 | train |
zuazo/dockerspec | lib/dockerspec/builder.rb | Dockerspec.Builder.source | def source
return @source unless @source.nil?
@source = %i(string template id path).find { |from| @options.key?(from) }
end | ruby | def source
return @source unless @source.nil?
@source = %i(string template id path).find { |from| @options.key?(from) }
end | [
"def",
"source",
"return",
"@source",
"unless",
"@source",
".",
"nil?",
"@source",
"=",
"%i(",
"string",
"template",
"id",
"path",
")",
".",
"find",
"{",
"|",
"from",
"|",
"@options",
".",
"key?",
"(",
"from",
")",
"}",
"end"
]
| Gets the source to generate the image from.
Possible values: `:string`, `:template`, `:id`, `:path`.
@example Building an Image from a Path
self.source #=> :path
@example Building an Image from a Template
self.source #=> :template
@return [Symbol] The source.
@api private | [
"Gets",
"the",
"source",
"to",
"generate",
"the",
"image",
"from",
"."
]
| cb3868684cc2fdf39cf9e807d6a8844944275fff | https://github.com/zuazo/dockerspec/blob/cb3868684cc2fdf39cf9e807d6a8844944275fff/lib/dockerspec/builder.rb#L166-L169 | train |
zuazo/dockerspec | lib/dockerspec/builder.rb | Dockerspec.Builder.image | def image(img = nil)
return @image if img.nil?
ImageGC.instance.add(img.id) if @options[:rm]
@image = img
end | ruby | def image(img = nil)
return @image if img.nil?
ImageGC.instance.add(img.id) if @options[:rm]
@image = img
end | [
"def",
"image",
"(",
"img",
"=",
"nil",
")",
"return",
"@image",
"if",
"img",
".",
"nil?",
"ImageGC",
".",
"instance",
".",
"add",
"(",
"img",
".",
"id",
")",
"if",
"@options",
"[",
":rm",
"]",
"@image",
"=",
"img",
"end"
]
| Sets or gets the Docker image.
@param img [Docker::Image] The Docker image to set.
@return [Docker::Image] The Docker image object.
@api private | [
"Sets",
"or",
"gets",
"the",
"Docker",
"image",
"."
]
| cb3868684cc2fdf39cf9e807d6a8844944275fff | https://github.com/zuazo/dockerspec/blob/cb3868684cc2fdf39cf9e807d6a8844944275fff/lib/dockerspec/builder.rb#L194-L198 | train |
zuazo/dockerspec | lib/dockerspec/builder.rb | Dockerspec.Builder.rspec_options | def rspec_options
config = ::RSpec.configuration
{}.tap do |opts|
opts[:path] = config.dockerfile_path if config.dockerfile_path?
opts[:rm] = config.rm_build if config.rm_build?
opts[:log_level] = config.log_level if config.log_level?
end
end | ruby | def rspec_options
config = ::RSpec.configuration
{}.tap do |opts|
opts[:path] = config.dockerfile_path if config.dockerfile_path?
opts[:rm] = config.rm_build if config.rm_build?
opts[:log_level] = config.log_level if config.log_level?
end
end | [
"def",
"rspec_options",
"config",
"=",
"::",
"RSpec",
".",
"configuration",
"{",
"}",
".",
"tap",
"do",
"|",
"opts",
"|",
"opts",
"[",
":path",
"]",
"=",
"config",
".",
"dockerfile_path",
"if",
"config",
".",
"dockerfile_path?",
"opts",
"[",
":rm",
"]",
"=",
"config",
".",
"rm_build",
"if",
"config",
".",
"rm_build?",
"opts",
"[",
":log_level",
"]",
"=",
"config",
".",
"log_level",
"if",
"config",
".",
"log_level?",
"end",
"end"
]
| Gets the default options configured using `RSpec.configuration`.
@example
self.rspec_options #=> {:path=>".", :rm=>true, :log_level=>:silent}
@return [Hash] The configuration options.
@api private | [
"Gets",
"the",
"default",
"options",
"configured",
"using",
"RSpec",
".",
"configuration",
"."
]
| cb3868684cc2fdf39cf9e807d6a8844944275fff | https://github.com/zuazo/dockerspec/blob/cb3868684cc2fdf39cf9e807d6a8844944275fff/lib/dockerspec/builder.rb#L210-L217 | train |
zuazo/dockerspec | lib/dockerspec/builder.rb | Dockerspec.Builder.default_options | def default_options
{
path: ENV['DOCKERFILE_PATH'] || '.',
# Autoremove images in all CIs except Travis (not supported):
rm: ci? && !travis_ci?,
# Avoid CI timeout errors:
log_level: ci? ? :ci : :silent
}.merge(rspec_options)
end | ruby | def default_options
{
path: ENV['DOCKERFILE_PATH'] || '.',
# Autoremove images in all CIs except Travis (not supported):
rm: ci? && !travis_ci?,
# Avoid CI timeout errors:
log_level: ci? ? :ci : :silent
}.merge(rspec_options)
end | [
"def",
"default_options",
"{",
"path",
":",
"ENV",
"[",
"'DOCKERFILE_PATH'",
"]",
"||",
"'.'",
",",
"rm",
":",
"ci?",
"&&",
"!",
"travis_ci?",
",",
"log_level",
":",
"ci?",
"?",
":ci",
":",
":silent",
"}",
".",
"merge",
"(",
"rspec_options",
")",
"end"
]
| Gets the default configuration options after merging them with RSpec
configuration options.
@example
self.default_options #=> {:path=>".", :rm=>true, :log_level=>:silent}
@return [Hash] The configuration options.
@api private | [
"Gets",
"the",
"default",
"configuration",
"options",
"after",
"merging",
"them",
"with",
"RSpec",
"configuration",
"options",
"."
]
| cb3868684cc2fdf39cf9e807d6a8844944275fff | https://github.com/zuazo/dockerspec/blob/cb3868684cc2fdf39cf9e807d6a8844944275fff/lib/dockerspec/builder.rb#L230-L238 | train |
zuazo/dockerspec | lib/dockerspec/builder.rb | Dockerspec.Builder.parse_options | def parse_options(opts)
opts_hs_ary = opts.map { |x| x.is_a?(Hash) ? x : { path: x } }
opts_hs_ary.reduce(default_options) { |a, e| a.merge(e) }
end | ruby | def parse_options(opts)
opts_hs_ary = opts.map { |x| x.is_a?(Hash) ? x : { path: x } }
opts_hs_ary.reduce(default_options) { |a, e| a.merge(e) }
end | [
"def",
"parse_options",
"(",
"opts",
")",
"opts_hs_ary",
"=",
"opts",
".",
"map",
"{",
"|",
"x",
"|",
"x",
".",
"is_a?",
"(",
"Hash",
")",
"?",
"x",
":",
"{",
"path",
":",
"x",
"}",
"}",
"opts_hs_ary",
".",
"reduce",
"(",
"default_options",
")",
"{",
"|",
"a",
",",
"e",
"|",
"a",
".",
"merge",
"(",
"e",
")",
"}",
"end"
]
| Parses the configuration options passed to the constructor.
@example
self.parse_options #=> {:path=>".", :rm=>true, :log_level=>:silent}
@param opts [Array<String, Hash>] The list of optitag. The strings will
be interpreted as `:path`, others will be merged.
@return [Hash] The configuration options.
@see #initialize
@api private | [
"Parses",
"the",
"configuration",
"options",
"passed",
"to",
"the",
"constructor",
"."
]
| cb3868684cc2fdf39cf9e807d6a8844944275fff | https://github.com/zuazo/dockerspec/blob/cb3868684cc2fdf39cf9e807d6a8844944275fff/lib/dockerspec/builder.rb#L255-L258 | train |
zuazo/dockerspec | lib/dockerspec/builder.rb | Dockerspec.Builder.build_from_string | def build_from_string(string, dir = '.')
dir = @options[:string_build_path] if @options[:string_build_path]
Dir.mktmpdir do |tmpdir|
FileUtils.cp_r("#{dir}/.", tmpdir)
dockerfile = File.join(tmpdir, 'Dockerfile')
File.open(dockerfile, 'w') { |f| f.write(string) }
build_from_dir(tmpdir)
end
end | ruby | def build_from_string(string, dir = '.')
dir = @options[:string_build_path] if @options[:string_build_path]
Dir.mktmpdir do |tmpdir|
FileUtils.cp_r("#{dir}/.", tmpdir)
dockerfile = File.join(tmpdir, 'Dockerfile')
File.open(dockerfile, 'w') { |f| f.write(string) }
build_from_dir(tmpdir)
end
end | [
"def",
"build_from_string",
"(",
"string",
",",
"dir",
"=",
"'.'",
")",
"dir",
"=",
"@options",
"[",
":string_build_path",
"]",
"if",
"@options",
"[",
":string_build_path",
"]",
"Dir",
".",
"mktmpdir",
"do",
"|",
"tmpdir",
"|",
"FileUtils",
".",
"cp_r",
"(",
"\"#{dir}/.\"",
",",
"tmpdir",
")",
"dockerfile",
"=",
"File",
".",
"join",
"(",
"tmpdir",
",",
"'Dockerfile'",
")",
"File",
".",
"open",
"(",
"dockerfile",
",",
"'w'",
")",
"{",
"|",
"f",
"|",
"f",
".",
"write",
"(",
"string",
")",
"}",
"build_from_dir",
"(",
"tmpdir",
")",
"end",
"end"
]
| Builds the image from a string. Generates the Docker tag if required.
It also saves the generated image in the object internally.
This creates a temporary directory where it copies all the files and
generates the temporary Dockerfile.
@param string [String] The Dockerfile content.
@param dir [String] The directory to copy the files from. Files that are
required by the Dockerfile passed in *string*. If not passed, then
the 'string_build_path' option is used. If that is not used, '.' is
assumed.
@return void
@api private | [
"Builds",
"the",
"image",
"from",
"a",
"string",
".",
"Generates",
"the",
"Docker",
"tag",
"if",
"required",
"."
]
| cb3868684cc2fdf39cf9e807d6a8844944275fff | https://github.com/zuazo/dockerspec/blob/cb3868684cc2fdf39cf9e807d6a8844944275fff/lib/dockerspec/builder.rb#L289-L297 | train |
zuazo/dockerspec | lib/dockerspec/builder.rb | Dockerspec.Builder.build_from_dir | def build_from_dir(dir)
image(::Docker::Image.build_from_dir(dir, &build_block))
add_repository_tag
rescue ::Docker::Error::DockerError => e
DockerExceptionParser.new(e)
end | ruby | def build_from_dir(dir)
image(::Docker::Image.build_from_dir(dir, &build_block))
add_repository_tag
rescue ::Docker::Error::DockerError => e
DockerExceptionParser.new(e)
end | [
"def",
"build_from_dir",
"(",
"dir",
")",
"image",
"(",
"::",
"Docker",
"::",
"Image",
".",
"build_from_dir",
"(",
"dir",
",",
"&",
"build_block",
")",
")",
"add_repository_tag",
"rescue",
"::",
"Docker",
"::",
"Error",
"::",
"DockerError",
"=>",
"e",
"DockerExceptionParser",
".",
"new",
"(",
"e",
")",
"end"
]
| Builds the image from a directory with a Dockerfile.
It also saves the generated image in the object internally.
@param dir [String] The directory path.
@return void
@raise [Dockerspec::DockerError] For underlaying docker errors.
@api private | [
"Builds",
"the",
"image",
"from",
"a",
"directory",
"with",
"a",
"Dockerfile",
"."
]
| cb3868684cc2fdf39cf9e807d6a8844944275fff | https://github.com/zuazo/dockerspec/blob/cb3868684cc2fdf39cf9e807d6a8844944275fff/lib/dockerspec/builder.rb#L332-L337 | train |
zuazo/dockerspec | lib/dockerspec/builder.rb | Dockerspec.Builder.build_from_path | def build_from_path(path)
if !File.directory?(path) && File.basename(path) == 'Dockerfile'
path = File.dirname(path)
end
File.directory?(path) ? build_from_dir(path) : build_from_file(path)
end | ruby | def build_from_path(path)
if !File.directory?(path) && File.basename(path) == 'Dockerfile'
path = File.dirname(path)
end
File.directory?(path) ? build_from_dir(path) : build_from_file(path)
end | [
"def",
"build_from_path",
"(",
"path",
")",
"if",
"!",
"File",
".",
"directory?",
"(",
"path",
")",
"&&",
"File",
".",
"basename",
"(",
"path",
")",
"==",
"'Dockerfile'",
"path",
"=",
"File",
".",
"dirname",
"(",
"path",
")",
"end",
"File",
".",
"directory?",
"(",
"path",
")",
"?",
"build_from_dir",
"(",
"path",
")",
":",
"build_from_file",
"(",
"path",
")",
"end"
]
| Builds the image from a directory or a file.
It also saves the generated image in the object internally.
@param path [String] The path.
@return void
@api private | [
"Builds",
"the",
"image",
"from",
"a",
"directory",
"or",
"a",
"file",
"."
]
| cb3868684cc2fdf39cf9e807d6a8844944275fff | https://github.com/zuazo/dockerspec/blob/cb3868684cc2fdf39cf9e807d6a8844944275fff/lib/dockerspec/builder.rb#L350-L355 | train |
zuazo/dockerspec | lib/dockerspec/builder.rb | Dockerspec.Builder.build_from_template | def build_from_template(file)
context = @options[:context] || {}
template = IO.read(file)
eruby = Erubis::Eruby.new(template)
string = eruby.evaluate(context)
build_from_string(string, File.dirname(file))
end | ruby | def build_from_template(file)
context = @options[:context] || {}
template = IO.read(file)
eruby = Erubis::Eruby.new(template)
string = eruby.evaluate(context)
build_from_string(string, File.dirname(file))
end | [
"def",
"build_from_template",
"(",
"file",
")",
"context",
"=",
"@options",
"[",
":context",
"]",
"||",
"{",
"}",
"template",
"=",
"IO",
".",
"read",
"(",
"file",
")",
"eruby",
"=",
"Erubis",
"::",
"Eruby",
".",
"new",
"(",
"template",
")",
"string",
"=",
"eruby",
".",
"evaluate",
"(",
"context",
")",
"build_from_string",
"(",
"string",
",",
"File",
".",
"dirname",
"(",
"file",
")",
")",
"end"
]
| Builds the image from a template.
It also saves the generated image in the object internally.
@param file [String] The Dockerfile [Erubis]
(http://www.kuwata-lab.com/erubis/users-guide.html) template path.
@return void
@api private | [
"Builds",
"the",
"image",
"from",
"a",
"template",
"."
]
| cb3868684cc2fdf39cf9e807d6a8844944275fff | https://github.com/zuazo/dockerspec/blob/cb3868684cc2fdf39cf9e807d6a8844944275fff/lib/dockerspec/builder.rb#L369-L376 | train |
zuazo/dockerspec | lib/dockerspec/builder.rb | Dockerspec.Builder.build_from_id | def build_from_id(id)
@image = ::Docker::Image.get(id)
add_repository_tag
rescue ::Docker::Error::NotFoundError
@image = ::Docker::Image.create('fromImage' => id)
add_repository_tag
rescue ::Docker::Error::DockerError => e
DockerExceptionParser.new(e)
end | ruby | def build_from_id(id)
@image = ::Docker::Image.get(id)
add_repository_tag
rescue ::Docker::Error::NotFoundError
@image = ::Docker::Image.create('fromImage' => id)
add_repository_tag
rescue ::Docker::Error::DockerError => e
DockerExceptionParser.new(e)
end | [
"def",
"build_from_id",
"(",
"id",
")",
"@image",
"=",
"::",
"Docker",
"::",
"Image",
".",
"get",
"(",
"id",
")",
"add_repository_tag",
"rescue",
"::",
"Docker",
"::",
"Error",
"::",
"NotFoundError",
"@image",
"=",
"::",
"Docker",
"::",
"Image",
".",
"create",
"(",
"'fromImage'",
"=>",
"id",
")",
"add_repository_tag",
"rescue",
"::",
"Docker",
"::",
"Error",
"::",
"DockerError",
"=>",
"e",
"DockerExceptionParser",
".",
"new",
"(",
"e",
")",
"end"
]
| Gets the image from a Image ID.
It also saves the image in the object internally.
@param id [String] The Docker image ID.
@return void
@raise [Dockerspec::DockerError] For underlaying docker errors.
@api private | [
"Gets",
"the",
"image",
"from",
"a",
"Image",
"ID",
"."
]
| cb3868684cc2fdf39cf9e807d6a8844944275fff | https://github.com/zuazo/dockerspec/blob/cb3868684cc2fdf39cf9e807d6a8844944275fff/lib/dockerspec/builder.rb#L391-L399 | train |
zuazo/dockerspec | lib/dockerspec/builder.rb | Dockerspec.Builder.add_repository_tag | def add_repository_tag
return unless @options.key?(:tag)
repo, repo_tag = @options[:tag].split(':', 2)
@image.tag(repo: repo, tag: repo_tag, force: true)
end | ruby | def add_repository_tag
return unless @options.key?(:tag)
repo, repo_tag = @options[:tag].split(':', 2)
@image.tag(repo: repo, tag: repo_tag, force: true)
end | [
"def",
"add_repository_tag",
"return",
"unless",
"@options",
".",
"key?",
"(",
":tag",
")",
"repo",
",",
"repo_tag",
"=",
"@options",
"[",
":tag",
"]",
".",
"split",
"(",
"':'",
",",
"2",
")",
"@image",
".",
"tag",
"(",
"repo",
":",
"repo",
",",
"tag",
":",
"repo_tag",
",",
"force",
":",
"true",
")",
"end"
]
| Adds a repository name and a tag to the Docker image.
@return void
@api private | [
"Adds",
"a",
"repository",
"name",
"and",
"a",
"tag",
"to",
"the",
"Docker",
"image",
"."
]
| cb3868684cc2fdf39cf9e807d6a8844944275fff | https://github.com/zuazo/dockerspec/blob/cb3868684cc2fdf39cf9e807d6a8844944275fff/lib/dockerspec/builder.rb#L408-L412 | train |
Intrepidd/working_hours | lib/working_hours/computation.rb | WorkingHours.Computation.in_config_zone | def in_config_zone time, config: nil
if time.respond_to? :in_time_zone
time.in_time_zone(config[:time_zone])
elsif time.is_a? Date
config[:time_zone].local(time.year, time.month, time.day)
else
raise TypeError.new("Can't convert #{time.class} to a Time")
end
end | ruby | def in_config_zone time, config: nil
if time.respond_to? :in_time_zone
time.in_time_zone(config[:time_zone])
elsif time.is_a? Date
config[:time_zone].local(time.year, time.month, time.day)
else
raise TypeError.new("Can't convert #{time.class} to a Time")
end
end | [
"def",
"in_config_zone",
"time",
",",
"config",
":",
"nil",
"if",
"time",
".",
"respond_to?",
":in_time_zone",
"time",
".",
"in_time_zone",
"(",
"config",
"[",
":time_zone",
"]",
")",
"elsif",
"time",
".",
"is_a?",
"Date",
"config",
"[",
":time_zone",
"]",
".",
"local",
"(",
"time",
".",
"year",
",",
"time",
".",
"month",
",",
"time",
".",
"day",
")",
"else",
"raise",
"TypeError",
".",
"new",
"(",
"\"Can't convert #{time.class} to a Time\"",
")",
"end",
"end"
]
| fix for ActiveRecord < 4, doesn't implement in_time_zone for Date | [
"fix",
"for",
"ActiveRecord",
"<",
"4",
"doesn",
"t",
"implement",
"in_time_zone",
"for",
"Date"
]
| ae17ce1935378505e0baa678038cbd6ef70d812d | https://github.com/Intrepidd/working_hours/blob/ae17ce1935378505e0baa678038cbd6ef70d812d/lib/working_hours/computation.rb#L208-L216 | train |
forever-inc/forever-style-guide | app/helpers/forever_style_guide/application_helper.rb | ForeverStyleGuide.ApplicationHelper.is_active? | def is_active?(page_name, product_types = nil)
controller.controller_name.include?(page_name) || controller.action_name.include?(page_name) || (@product != nil && product_types !=nil && product_types.split(',').include?(@product.product_type) && [email protected]?('Historian'))
end | ruby | def is_active?(page_name, product_types = nil)
controller.controller_name.include?(page_name) || controller.action_name.include?(page_name) || (@product != nil && product_types !=nil && product_types.split(',').include?(@product.product_type) && [email protected]?('Historian'))
end | [
"def",
"is_active?",
"(",
"page_name",
",",
"product_types",
"=",
"nil",
")",
"controller",
".",
"controller_name",
".",
"include?",
"(",
"page_name",
")",
"||",
"controller",
".",
"action_name",
".",
"include?",
"(",
"page_name",
")",
"||",
"(",
"@product",
"!=",
"nil",
"&&",
"product_types",
"!=",
"nil",
"&&",
"product_types",
".",
"split",
"(",
"','",
")",
".",
"include?",
"(",
"@product",
".",
"product_type",
")",
"&&",
"!",
"@product",
".",
"name",
".",
"include?",
"(",
"'Historian'",
")",
")",
"end"
]
| active state nav | [
"active",
"state",
"nav"
]
| 9027ea3040e3c1f46cf2a68d187551768557d259 | https://github.com/forever-inc/forever-style-guide/blob/9027ea3040e3c1f46cf2a68d187551768557d259/app/helpers/forever_style_guide/application_helper.rb#L71-L73 | train |
kytrinyx/etsy | lib/etsy/listing.rb | Etsy.Listing.admirers | def admirers(options = {})
options = options.merge(:access_token => token, :access_secret => secret) if (token && secret)
favorite_listings = FavoriteListing.find_all_listings_favored_by(id, options)
user_ids = favorite_listings.map {|f| f.user_id }.uniq
(user_ids.size > 0) ? Array(Etsy::User.find(user_ids, options)) : []
end | ruby | def admirers(options = {})
options = options.merge(:access_token => token, :access_secret => secret) if (token && secret)
favorite_listings = FavoriteListing.find_all_listings_favored_by(id, options)
user_ids = favorite_listings.map {|f| f.user_id }.uniq
(user_ids.size > 0) ? Array(Etsy::User.find(user_ids, options)) : []
end | [
"def",
"admirers",
"(",
"options",
"=",
"{",
"}",
")",
"options",
"=",
"options",
".",
"merge",
"(",
":access_token",
"=>",
"token",
",",
":access_secret",
"=>",
"secret",
")",
"if",
"(",
"token",
"&&",
"secret",
")",
"favorite_listings",
"=",
"FavoriteListing",
".",
"find_all_listings_favored_by",
"(",
"id",
",",
"options",
")",
"user_ids",
"=",
"favorite_listings",
".",
"map",
"{",
"|",
"f",
"|",
"f",
".",
"user_id",
"}",
".",
"uniq",
"(",
"user_ids",
".",
"size",
">",
"0",
")",
"?",
"Array",
"(",
"Etsy",
"::",
"User",
".",
"find",
"(",
"user_ids",
",",
"options",
")",
")",
":",
"[",
"]",
"end"
]
| Return a list of users who have favorited this listing | [
"Return",
"a",
"list",
"of",
"users",
"who",
"have",
"favorited",
"this",
"listing"
]
| 4d20e0cedea197aa6400ac9e4c64c1a3587c9af2 | https://github.com/kytrinyx/etsy/blob/4d20e0cedea197aa6400ac9e4c64c1a3587c9af2/lib/etsy/listing.rb#L244-L249 | train |
kytrinyx/etsy | lib/etsy/shop.rb | Etsy.Shop.listings | def listings(state = nil, options = {})
state = state ? {:state => state} : {}
Listing.find_all_by_shop_id(id, state.merge(options).merge(oauth))
end | ruby | def listings(state = nil, options = {})
state = state ? {:state => state} : {}
Listing.find_all_by_shop_id(id, state.merge(options).merge(oauth))
end | [
"def",
"listings",
"(",
"state",
"=",
"nil",
",",
"options",
"=",
"{",
"}",
")",
"state",
"=",
"state",
"?",
"{",
":state",
"=>",
"state",
"}",
":",
"{",
"}",
"Listing",
".",
"find_all_by_shop_id",
"(",
"id",
",",
"state",
".",
"merge",
"(",
"options",
")",
".",
"merge",
"(",
"oauth",
")",
")",
"end"
]
| The collection of listings associated with this shop | [
"The",
"collection",
"of",
"listings",
"associated",
"with",
"this",
"shop"
]
| 4d20e0cedea197aa6400ac9e4c64c1a3587c9af2 | https://github.com/kytrinyx/etsy/blob/4d20e0cedea197aa6400ac9e4c64c1a3587c9af2/lib/etsy/shop.rb#L70-L73 | train |
kytrinyx/etsy | lib/etsy/user.rb | Etsy.User.addresses | def addresses
options = (token && secret) ? {:access_token => token, :access_secret => secret} : {}
@addresses ||= Address.find(username, options)
end | ruby | def addresses
options = (token && secret) ? {:access_token => token, :access_secret => secret} : {}
@addresses ||= Address.find(username, options)
end | [
"def",
"addresses",
"options",
"=",
"(",
"token",
"&&",
"secret",
")",
"?",
"{",
":access_token",
"=>",
"token",
",",
":access_secret",
"=>",
"secret",
"}",
":",
"{",
"}",
"@addresses",
"||=",
"Address",
".",
"find",
"(",
"username",
",",
"options",
")",
"end"
]
| The addresses associated with this user. | [
"The",
"addresses",
"associated",
"with",
"this",
"user",
"."
]
| 4d20e0cedea197aa6400ac9e4c64c1a3587c9af2 | https://github.com/kytrinyx/etsy/blob/4d20e0cedea197aa6400ac9e4c64c1a3587c9af2/lib/etsy/user.rb#L51-L54 | train |
kytrinyx/etsy | lib/etsy/user.rb | Etsy.User.profile | def profile
unless @profile
if associated_profile
@profile = Profile.new(associated_profile)
else
options = {:fields => 'user_id', :includes => 'Profile'}
options = options.merge(:access_token => token, :access_secret => secret) if (token && secret)
tmp = User.find(username, options)
@profile = Profile.new(tmp.associated_profile)
end
end
@profile
end | ruby | def profile
unless @profile
if associated_profile
@profile = Profile.new(associated_profile)
else
options = {:fields => 'user_id', :includes => 'Profile'}
options = options.merge(:access_token => token, :access_secret => secret) if (token && secret)
tmp = User.find(username, options)
@profile = Profile.new(tmp.associated_profile)
end
end
@profile
end | [
"def",
"profile",
"unless",
"@profile",
"if",
"associated_profile",
"@profile",
"=",
"Profile",
".",
"new",
"(",
"associated_profile",
")",
"else",
"options",
"=",
"{",
":fields",
"=>",
"'user_id'",
",",
":includes",
"=>",
"'Profile'",
"}",
"options",
"=",
"options",
".",
"merge",
"(",
":access_token",
"=>",
"token",
",",
":access_secret",
"=>",
"secret",
")",
"if",
"(",
"token",
"&&",
"secret",
")",
"tmp",
"=",
"User",
".",
"find",
"(",
"username",
",",
"options",
")",
"@profile",
"=",
"Profile",
".",
"new",
"(",
"tmp",
".",
"associated_profile",
")",
"end",
"end",
"@profile",
"end"
]
| The profile associated with this user. | [
"The",
"profile",
"associated",
"with",
"this",
"user",
"."
]
| 4d20e0cedea197aa6400ac9e4c64c1a3587c9af2 | https://github.com/kytrinyx/etsy/blob/4d20e0cedea197aa6400ac9e4c64c1a3587c9af2/lib/etsy/user.rb#L58-L70 | train |
kytrinyx/etsy | lib/etsy/response.rb | Etsy.Response.result | def result
if success?
results = to_hash['results'] || []
count == 1 ? results.first : results
else
Etsy.silent_errors ? [] : validate!
end
end | ruby | def result
if success?
results = to_hash['results'] || []
count == 1 ? results.first : results
else
Etsy.silent_errors ? [] : validate!
end
end | [
"def",
"result",
"if",
"success?",
"results",
"=",
"to_hash",
"[",
"'results'",
"]",
"||",
"[",
"]",
"count",
"==",
"1",
"?",
"results",
".",
"first",
":",
"results",
"else",
"Etsy",
".",
"silent_errors",
"?",
"[",
"]",
":",
"validate!",
"end",
"end"
]
| Results of the API request | [
"Results",
"of",
"the",
"API",
"request"
]
| 4d20e0cedea197aa6400ac9e4c64c1a3587c9af2 | https://github.com/kytrinyx/etsy/blob/4d20e0cedea197aa6400ac9e4c64c1a3587c9af2/lib/etsy/response.rb#L53-L60 | train |
kytrinyx/etsy | lib/etsy/basic_client.rb | Etsy.BasicClient.client | def client # :nodoc:
if @client
return @client
else
@client = Net::HTTP.new(@host, Etsy.protocol == "http" ? 80 : 443)
@client.use_ssl = true if Etsy.protocol == "https"
return @client
end
end | ruby | def client # :nodoc:
if @client
return @client
else
@client = Net::HTTP.new(@host, Etsy.protocol == "http" ? 80 : 443)
@client.use_ssl = true if Etsy.protocol == "https"
return @client
end
end | [
"def",
"client",
"if",
"@client",
"return",
"@client",
"else",
"@client",
"=",
"Net",
"::",
"HTTP",
".",
"new",
"(",
"@host",
",",
"Etsy",
".",
"protocol",
"==",
"\"http\"",
"?",
"80",
":",
"443",
")",
"@client",
".",
"use_ssl",
"=",
"true",
"if",
"Etsy",
".",
"protocol",
"==",
"\"https\"",
"return",
"@client",
"end",
"end"
]
| Create a new client that will connect to the specified host | [
"Create",
"a",
"new",
"client",
"that",
"will",
"connect",
"to",
"the",
"specified",
"host"
]
| 4d20e0cedea197aa6400ac9e4c64c1a3587c9af2 | https://github.com/kytrinyx/etsy/blob/4d20e0cedea197aa6400ac9e4c64c1a3587c9af2/lib/etsy/basic_client.rb#L15-L23 | train |
kytrinyx/etsy | lib/etsy/secure_client.rb | Etsy.SecureClient.add_multipart_data | def add_multipart_data(req, params)
crlf = "\r\n"
boundary = Time.now.to_i.to_s(16)
req["Content-Type"] = "multipart/form-data; boundary=#{boundary}"
body = ""
params.each do |key,value|
esc_key = CGI.escape(key.to_s)
body << "--#{boundary}#{crlf}"
if value.respond_to?(:read)
body << "Content-Disposition: form-data; name=\"#{esc_key}\"; filename=\"#{File.basename(value.path)}\"#{crlf}"
body << "Content-Type: image/jpeg#{crlf*2}"
body << open(value.path, "rb") {|io| io.read}
else
body << "Content-Disposition: form-data; name=\"#{esc_key}\"#{crlf*2}#{value}"
end
body << crlf
end
body << "--#{boundary}--#{crlf*2}"
req.body = body
req["Content-Length"] = req.body.size
end | ruby | def add_multipart_data(req, params)
crlf = "\r\n"
boundary = Time.now.to_i.to_s(16)
req["Content-Type"] = "multipart/form-data; boundary=#{boundary}"
body = ""
params.each do |key,value|
esc_key = CGI.escape(key.to_s)
body << "--#{boundary}#{crlf}"
if value.respond_to?(:read)
body << "Content-Disposition: form-data; name=\"#{esc_key}\"; filename=\"#{File.basename(value.path)}\"#{crlf}"
body << "Content-Type: image/jpeg#{crlf*2}"
body << open(value.path, "rb") {|io| io.read}
else
body << "Content-Disposition: form-data; name=\"#{esc_key}\"#{crlf*2}#{value}"
end
body << crlf
end
body << "--#{boundary}--#{crlf*2}"
req.body = body
req["Content-Length"] = req.body.size
end | [
"def",
"add_multipart_data",
"(",
"req",
",",
"params",
")",
"crlf",
"=",
"\"\\r\\n\"",
"boundary",
"=",
"Time",
".",
"now",
".",
"to_i",
".",
"to_s",
"(",
"16",
")",
"req",
"[",
"\"Content-Type\"",
"]",
"=",
"\"multipart/form-data; boundary=#{boundary}\"",
"body",
"=",
"\"\"",
"params",
".",
"each",
"do",
"|",
"key",
",",
"value",
"|",
"esc_key",
"=",
"CGI",
".",
"escape",
"(",
"key",
".",
"to_s",
")",
"body",
"<<",
"\"--#{boundary}#{crlf}\"",
"if",
"value",
".",
"respond_to?",
"(",
":read",
")",
"body",
"<<",
"\"Content-Disposition: form-data; name=\\\"#{esc_key}\\\"; filename=\\\"#{File.basename(value.path)}\\\"#{crlf}\"",
"body",
"<<",
"\"Content-Type: image/jpeg#{crlf*2}\"",
"body",
"<<",
"open",
"(",
"value",
".",
"path",
",",
"\"rb\"",
")",
"{",
"|",
"io",
"|",
"io",
".",
"read",
"}",
"else",
"body",
"<<",
"\"Content-Disposition: form-data; name=\\\"#{esc_key}\\\"#{crlf*2}#{value}\"",
"end",
"body",
"<<",
"crlf",
"end",
"body",
"<<",
"\"--#{boundary}--#{crlf*2}\"",
"req",
".",
"body",
"=",
"body",
"req",
"[",
"\"Content-Length\"",
"]",
"=",
"req",
".",
"body",
".",
"size",
"end"
]
| Encodes the request as multipart | [
"Encodes",
"the",
"request",
"as",
"multipart"
]
| 4d20e0cedea197aa6400ac9e4c64c1a3587c9af2 | https://github.com/kytrinyx/etsy/blob/4d20e0cedea197aa6400ac9e4c64c1a3587c9af2/lib/etsy/secure_client.rb#L99-L119 | train |
Flipkart/multitenancy | lib/multitenancy/rack/filter.rb | Multitenancy.Filter.fix_headers! | def fix_headers!(env)
env.keys.select { |k| k =~ /^HTTP_X_/ }.each do |k|
env[k.gsub("HTTP_", "")] = env[k]
env.delete(k)
end
env
end | ruby | def fix_headers!(env)
env.keys.select { |k| k =~ /^HTTP_X_/ }.each do |k|
env[k.gsub("HTTP_", "")] = env[k]
env.delete(k)
end
env
end | [
"def",
"fix_headers!",
"(",
"env",
")",
"env",
".",
"keys",
".",
"select",
"{",
"|",
"k",
"|",
"k",
"=~",
"/",
"/",
"}",
".",
"each",
"do",
"|",
"k",
"|",
"env",
"[",
"k",
".",
"gsub",
"(",
"\"HTTP_\"",
",",
"\"\"",
")",
"]",
"=",
"env",
"[",
"k",
"]",
"env",
".",
"delete",
"(",
"k",
")",
"end",
"env",
"end"
]
| rack converts X_FOO to HTTP_X_FOO, so strip "HTTP_" | [
"rack",
"converts",
"X_FOO",
"to",
"HTTP_X_FOO",
"so",
"strip",
"HTTP_"
]
| cc91557edf0ccb2b92dabbfd88217ae6c498d4be | https://github.com/Flipkart/multitenancy/blob/cc91557edf0ccb2b92dabbfd88217ae6c498d4be/lib/multitenancy/rack/filter.rb#L19-L25 | train |
stackbuilders/stub_shell | lib/stub_shell/shell.rb | StubShell.Shell.resolve | def resolve command_string
if detected_command = @commands.detect{|cmd| cmd.matches? command_string }
detected_command
elsif parent_context
parent_context.resolve(command_string)
else
raise "Command #{command_string} could not be resolved from the current context."
end
end | ruby | def resolve command_string
if detected_command = @commands.detect{|cmd| cmd.matches? command_string }
detected_command
elsif parent_context
parent_context.resolve(command_string)
else
raise "Command #{command_string} could not be resolved from the current context."
end
end | [
"def",
"resolve",
"command_string",
"if",
"detected_command",
"=",
"@commands",
".",
"detect",
"{",
"|",
"cmd",
"|",
"cmd",
".",
"matches?",
"command_string",
"}",
"detected_command",
"elsif",
"parent_context",
"parent_context",
".",
"resolve",
"(",
"command_string",
")",
"else",
"raise",
"\"Command #{command_string} could not be resolved from the current context.\"",
"end",
"end"
]
| Look in current context and recursively through any available parent contexts to
find definition of command. An Exception is raised if no implementation of command
is found. | [
"Look",
"in",
"current",
"context",
"and",
"recursively",
"through",
"any",
"available",
"parent",
"contexts",
"to",
"find",
"definition",
"of",
"command",
".",
"An",
"Exception",
"is",
"raised",
"if",
"no",
"implementation",
"of",
"command",
"is",
"found",
"."
]
| e54ad6b40be5982cb8c72a2bbfbe8f749241142c | https://github.com/stackbuilders/stub_shell/blob/e54ad6b40be5982cb8c72a2bbfbe8f749241142c/lib/stub_shell/shell.rb#L28-L36 | train |
cognitect/transit-ruby | lib/transit/decoder.rb | Transit.Decoder.decode | def decode(node, cache=RollingCache.new, as_map_key=false)
case node
when String
if cache.has_key?(node)
cache.read(node)
else
parsed = if !node.start_with?(ESC)
node
elsif node.start_with?(TAG)
Tag.new(node[2..-1])
elsif handler = @handlers[node[1]]
handler.from_rep(node[2..-1])
elsif node.start_with?(ESC_ESC, ESC_SUB, ESC_RES)
node[1..-1]
else
@default_handler.from_rep(node[1], node[2..-1])
end
if cache.cacheable?(node, as_map_key)
cache.write(parsed)
end
parsed
end
when Array
return node if node.empty?
e0 = decode(node.shift, cache, false)
if e0 == MAP_AS_ARRAY
decode(Hash[*node], cache)
elsif Tag === e0
v = decode(node.shift, cache)
if handler = @handlers[e0.value]
handler.from_rep(v)
else
@default_handler.from_rep(e0.value,v)
end
else
[e0] + node.map {|e| decode(e, cache, as_map_key)}
end
when Hash
if node.size == 1
k = decode(node.keys.first, cache, true)
v = decode(node.values.first, cache, false)
if Tag === k
if handler = @handlers[k.value]
handler.from_rep(v)
else
@default_handler.from_rep(k.value,v)
end
else
{k => v}
end
else
node.keys.each do |k|
node.store(decode(k, cache, true), decode(node.delete(k), cache))
end
node
end
else
node
end
end | ruby | def decode(node, cache=RollingCache.new, as_map_key=false)
case node
when String
if cache.has_key?(node)
cache.read(node)
else
parsed = if !node.start_with?(ESC)
node
elsif node.start_with?(TAG)
Tag.new(node[2..-1])
elsif handler = @handlers[node[1]]
handler.from_rep(node[2..-1])
elsif node.start_with?(ESC_ESC, ESC_SUB, ESC_RES)
node[1..-1]
else
@default_handler.from_rep(node[1], node[2..-1])
end
if cache.cacheable?(node, as_map_key)
cache.write(parsed)
end
parsed
end
when Array
return node if node.empty?
e0 = decode(node.shift, cache, false)
if e0 == MAP_AS_ARRAY
decode(Hash[*node], cache)
elsif Tag === e0
v = decode(node.shift, cache)
if handler = @handlers[e0.value]
handler.from_rep(v)
else
@default_handler.from_rep(e0.value,v)
end
else
[e0] + node.map {|e| decode(e, cache, as_map_key)}
end
when Hash
if node.size == 1
k = decode(node.keys.first, cache, true)
v = decode(node.values.first, cache, false)
if Tag === k
if handler = @handlers[k.value]
handler.from_rep(v)
else
@default_handler.from_rep(k.value,v)
end
else
{k => v}
end
else
node.keys.each do |k|
node.store(decode(k, cache, true), decode(node.delete(k), cache))
end
node
end
else
node
end
end | [
"def",
"decode",
"(",
"node",
",",
"cache",
"=",
"RollingCache",
".",
"new",
",",
"as_map_key",
"=",
"false",
")",
"case",
"node",
"when",
"String",
"if",
"cache",
".",
"has_key?",
"(",
"node",
")",
"cache",
".",
"read",
"(",
"node",
")",
"else",
"parsed",
"=",
"if",
"!",
"node",
".",
"start_with?",
"(",
"ESC",
")",
"node",
"elsif",
"node",
".",
"start_with?",
"(",
"TAG",
")",
"Tag",
".",
"new",
"(",
"node",
"[",
"2",
"..",
"-",
"1",
"]",
")",
"elsif",
"handler",
"=",
"@handlers",
"[",
"node",
"[",
"1",
"]",
"]",
"handler",
".",
"from_rep",
"(",
"node",
"[",
"2",
"..",
"-",
"1",
"]",
")",
"elsif",
"node",
".",
"start_with?",
"(",
"ESC_ESC",
",",
"ESC_SUB",
",",
"ESC_RES",
")",
"node",
"[",
"1",
"..",
"-",
"1",
"]",
"else",
"@default_handler",
".",
"from_rep",
"(",
"node",
"[",
"1",
"]",
",",
"node",
"[",
"2",
"..",
"-",
"1",
"]",
")",
"end",
"if",
"cache",
".",
"cacheable?",
"(",
"node",
",",
"as_map_key",
")",
"cache",
".",
"write",
"(",
"parsed",
")",
"end",
"parsed",
"end",
"when",
"Array",
"return",
"node",
"if",
"node",
".",
"empty?",
"e0",
"=",
"decode",
"(",
"node",
".",
"shift",
",",
"cache",
",",
"false",
")",
"if",
"e0",
"==",
"MAP_AS_ARRAY",
"decode",
"(",
"Hash",
"[",
"*",
"node",
"]",
",",
"cache",
")",
"elsif",
"Tag",
"===",
"e0",
"v",
"=",
"decode",
"(",
"node",
".",
"shift",
",",
"cache",
")",
"if",
"handler",
"=",
"@handlers",
"[",
"e0",
".",
"value",
"]",
"handler",
".",
"from_rep",
"(",
"v",
")",
"else",
"@default_handler",
".",
"from_rep",
"(",
"e0",
".",
"value",
",",
"v",
")",
"end",
"else",
"[",
"e0",
"]",
"+",
"node",
".",
"map",
"{",
"|",
"e",
"|",
"decode",
"(",
"e",
",",
"cache",
",",
"as_map_key",
")",
"}",
"end",
"when",
"Hash",
"if",
"node",
".",
"size",
"==",
"1",
"k",
"=",
"decode",
"(",
"node",
".",
"keys",
".",
"first",
",",
"cache",
",",
"true",
")",
"v",
"=",
"decode",
"(",
"node",
".",
"values",
".",
"first",
",",
"cache",
",",
"false",
")",
"if",
"Tag",
"===",
"k",
"if",
"handler",
"=",
"@handlers",
"[",
"k",
".",
"value",
"]",
"handler",
".",
"from_rep",
"(",
"v",
")",
"else",
"@default_handler",
".",
"from_rep",
"(",
"k",
".",
"value",
",",
"v",
")",
"end",
"else",
"{",
"k",
"=>",
"v",
"}",
"end",
"else",
"node",
".",
"keys",
".",
"each",
"do",
"|",
"k",
"|",
"node",
".",
"store",
"(",
"decode",
"(",
"k",
",",
"cache",
",",
"true",
")",
",",
"decode",
"(",
"node",
".",
"delete",
"(",
"k",
")",
",",
"cache",
")",
")",
"end",
"node",
"end",
"else",
"node",
"end",
"end"
]
| Decodes a transit value to a corresponding object
@param node a transit value to be decoded
@param cache
@param as_map_key
@return decoded object | [
"Decodes",
"a",
"transit",
"value",
"to",
"a",
"corresponding",
"object"
]
| b4973f8c21d44657da8b486dc855a7d6a8bdf5a0 | https://github.com/cognitect/transit-ruby/blob/b4973f8c21d44657da8b486dc855a7d6a8bdf5a0/lib/transit/decoder.rb#L58-L117 | train |
djezzzl/database_consistency | lib/database_consistency/helper.rb | DatabaseConsistency.Helper.parent_models | def parent_models
models.group_by(&:table_name).each_value.map do |models|
models.min_by { |model| models.include?(model.superclass) ? 1 : 0 }
end
end | ruby | def parent_models
models.group_by(&:table_name).each_value.map do |models|
models.min_by { |model| models.include?(model.superclass) ? 1 : 0 }
end
end | [
"def",
"parent_models",
"models",
".",
"group_by",
"(",
"&",
":table_name",
")",
".",
"each_value",
".",
"map",
"do",
"|",
"models",
"|",
"models",
".",
"min_by",
"{",
"|",
"model",
"|",
"models",
".",
"include?",
"(",
"model",
".",
"superclass",
")",
"?",
"1",
":",
"0",
"}",
"end",
"end"
]
| Return list of not inherited models | [
"Return",
"list",
"of",
"not",
"inherited",
"models"
]
| cac53c79dcd36284298b9c60a4c784eb184fd6bc | https://github.com/djezzzl/database_consistency/blob/cac53c79dcd36284298b9c60a4c784eb184fd6bc/lib/database_consistency/helper.rb#L14-L18 | train |
cloudfoundry/vcap-common | lib/vcap/subprocess.rb | VCAP.Subprocess.run | def run(command, expected_exit_status=0, timeout=nil, options={}, env={})
# We use a pipe to ourself to time out long running commands (if desired) as follows:
# 1. Set up a pipe to ourselves
# 2. Install a signal handler that writes to one end of our pipe on SIGCHLD
# 3. Select on the read end of our pipe and check if our process exited
sigchld_r, sigchld_w = IO.pipe
prev_sigchld_handler = install_sigchld_handler(sigchld_w)
start = Time.now.to_i
child_pid, stdin, stdout, stderr = POSIX::Spawn.popen4(env, command, options)
stdin.close
# Used to look up the name of an io object when an errors occurs while
# reading from it, as well as to look up the corresponding buffer to
# append to.
io_map = {
stderr => { :name => 'stderr', :buf => '' },
stdout => { :name => 'stdout', :buf => '' },
sigchld_r => { :name => 'sigchld_r', :buf => '' },
sigchld_w => { :name => 'sigchld_w', :buf => '' },
}
status = nil
time_left = timeout
read_cands = [stdout, stderr, sigchld_r]
error_cands = read_cands.dup
begin
while read_cands.length > 0
active_ios = IO.select(read_cands, nil, error_cands, time_left)
# Check if timeout was hit
if timeout
time_left = timeout - (Time.now.to_i - start)
unless active_ios && (time_left > 0)
raise VCAP::SubprocessTimeoutError.new(timeout,
command,
io_map[stdout][:buf],
io_map[stderr][:buf])
end
end
# Read as much as we can from the readable ios before blocking
for io in active_ios[0]
begin
io_map[io][:buf] << io.read_nonblock(READ_SIZE)
rescue IO::WaitReadable
# Reading would block, so put ourselves back on the loop
rescue EOFError
# Pipe has no more data, remove it from the readable/error set
# NB: We cannot break from the loop here, as the other pipes may have data to be read
read_cands.delete(io)
error_cands.delete(io)
end
# Our signal handler notified us that >= 1 children have exited;
# check if our child has exited.
if (io == sigchld_r) && Process.waitpid(child_pid, Process::WNOHANG)
status = $?
read_cands.delete(sigchld_r)
error_cands.delete(sigchld_r)
end
end
# Error reading from one or more pipes.
unless active_ios[2].empty?
io_names = active_ios[2].map {|io| io_map[io][:name] }
raise SubprocessReadError.new(io_names.join(', '),
command,
io_map[stdout][:buf],
io_map[stderr][:buf])
end
end
rescue
# A timeout or an error occurred while reading from one or more pipes.
# Kill the process if we haven't reaped its exit status already.
kill_pid(child_pid) unless status
raise
ensure
# Make sure we reap the child's exit status, close our fds, and restore
# the previous SIGCHLD handler
unless status
Process.waitpid(child_pid)
status = $?
end
io_map.each_key {|io| io.close unless io.closed? }
trap('CLD') { prev_sigchld_handler.call } if prev_sigchld_handler
end
unless status.exitstatus == expected_exit_status
raise SubprocessStatusError.new(command,
io_map[stdout][:buf],
io_map[stderr][:buf],
status)
end
[io_map[stdout][:buf], io_map[stderr][:buf], status]
end | ruby | def run(command, expected_exit_status=0, timeout=nil, options={}, env={})
# We use a pipe to ourself to time out long running commands (if desired) as follows:
# 1. Set up a pipe to ourselves
# 2. Install a signal handler that writes to one end of our pipe on SIGCHLD
# 3. Select on the read end of our pipe and check if our process exited
sigchld_r, sigchld_w = IO.pipe
prev_sigchld_handler = install_sigchld_handler(sigchld_w)
start = Time.now.to_i
child_pid, stdin, stdout, stderr = POSIX::Spawn.popen4(env, command, options)
stdin.close
# Used to look up the name of an io object when an errors occurs while
# reading from it, as well as to look up the corresponding buffer to
# append to.
io_map = {
stderr => { :name => 'stderr', :buf => '' },
stdout => { :name => 'stdout', :buf => '' },
sigchld_r => { :name => 'sigchld_r', :buf => '' },
sigchld_w => { :name => 'sigchld_w', :buf => '' },
}
status = nil
time_left = timeout
read_cands = [stdout, stderr, sigchld_r]
error_cands = read_cands.dup
begin
while read_cands.length > 0
active_ios = IO.select(read_cands, nil, error_cands, time_left)
# Check if timeout was hit
if timeout
time_left = timeout - (Time.now.to_i - start)
unless active_ios && (time_left > 0)
raise VCAP::SubprocessTimeoutError.new(timeout,
command,
io_map[stdout][:buf],
io_map[stderr][:buf])
end
end
# Read as much as we can from the readable ios before blocking
for io in active_ios[0]
begin
io_map[io][:buf] << io.read_nonblock(READ_SIZE)
rescue IO::WaitReadable
# Reading would block, so put ourselves back on the loop
rescue EOFError
# Pipe has no more data, remove it from the readable/error set
# NB: We cannot break from the loop here, as the other pipes may have data to be read
read_cands.delete(io)
error_cands.delete(io)
end
# Our signal handler notified us that >= 1 children have exited;
# check if our child has exited.
if (io == sigchld_r) && Process.waitpid(child_pid, Process::WNOHANG)
status = $?
read_cands.delete(sigchld_r)
error_cands.delete(sigchld_r)
end
end
# Error reading from one or more pipes.
unless active_ios[2].empty?
io_names = active_ios[2].map {|io| io_map[io][:name] }
raise SubprocessReadError.new(io_names.join(', '),
command,
io_map[stdout][:buf],
io_map[stderr][:buf])
end
end
rescue
# A timeout or an error occurred while reading from one or more pipes.
# Kill the process if we haven't reaped its exit status already.
kill_pid(child_pid) unless status
raise
ensure
# Make sure we reap the child's exit status, close our fds, and restore
# the previous SIGCHLD handler
unless status
Process.waitpid(child_pid)
status = $?
end
io_map.each_key {|io| io.close unless io.closed? }
trap('CLD') { prev_sigchld_handler.call } if prev_sigchld_handler
end
unless status.exitstatus == expected_exit_status
raise SubprocessStatusError.new(command,
io_map[stdout][:buf],
io_map[stderr][:buf],
status)
end
[io_map[stdout][:buf], io_map[stderr][:buf], status]
end | [
"def",
"run",
"(",
"command",
",",
"expected_exit_status",
"=",
"0",
",",
"timeout",
"=",
"nil",
",",
"options",
"=",
"{",
"}",
",",
"env",
"=",
"{",
"}",
")",
"sigchld_r",
",",
"sigchld_w",
"=",
"IO",
".",
"pipe",
"prev_sigchld_handler",
"=",
"install_sigchld_handler",
"(",
"sigchld_w",
")",
"start",
"=",
"Time",
".",
"now",
".",
"to_i",
"child_pid",
",",
"stdin",
",",
"stdout",
",",
"stderr",
"=",
"POSIX",
"::",
"Spawn",
".",
"popen4",
"(",
"env",
",",
"command",
",",
"options",
")",
"stdin",
".",
"close",
"io_map",
"=",
"{",
"stderr",
"=>",
"{",
":name",
"=>",
"'stderr'",
",",
":buf",
"=>",
"''",
"}",
",",
"stdout",
"=>",
"{",
":name",
"=>",
"'stdout'",
",",
":buf",
"=>",
"''",
"}",
",",
"sigchld_r",
"=>",
"{",
":name",
"=>",
"'sigchld_r'",
",",
":buf",
"=>",
"''",
"}",
",",
"sigchld_w",
"=>",
"{",
":name",
"=>",
"'sigchld_w'",
",",
":buf",
"=>",
"''",
"}",
",",
"}",
"status",
"=",
"nil",
"time_left",
"=",
"timeout",
"read_cands",
"=",
"[",
"stdout",
",",
"stderr",
",",
"sigchld_r",
"]",
"error_cands",
"=",
"read_cands",
".",
"dup",
"begin",
"while",
"read_cands",
".",
"length",
">",
"0",
"active_ios",
"=",
"IO",
".",
"select",
"(",
"read_cands",
",",
"nil",
",",
"error_cands",
",",
"time_left",
")",
"if",
"timeout",
"time_left",
"=",
"timeout",
"-",
"(",
"Time",
".",
"now",
".",
"to_i",
"-",
"start",
")",
"unless",
"active_ios",
"&&",
"(",
"time_left",
">",
"0",
")",
"raise",
"VCAP",
"::",
"SubprocessTimeoutError",
".",
"new",
"(",
"timeout",
",",
"command",
",",
"io_map",
"[",
"stdout",
"]",
"[",
":buf",
"]",
",",
"io_map",
"[",
"stderr",
"]",
"[",
":buf",
"]",
")",
"end",
"end",
"for",
"io",
"in",
"active_ios",
"[",
"0",
"]",
"begin",
"io_map",
"[",
"io",
"]",
"[",
":buf",
"]",
"<<",
"io",
".",
"read_nonblock",
"(",
"READ_SIZE",
")",
"rescue",
"IO",
"::",
"WaitReadable",
"rescue",
"EOFError",
"read_cands",
".",
"delete",
"(",
"io",
")",
"error_cands",
".",
"delete",
"(",
"io",
")",
"end",
"if",
"(",
"io",
"==",
"sigchld_r",
")",
"&&",
"Process",
".",
"waitpid",
"(",
"child_pid",
",",
"Process",
"::",
"WNOHANG",
")",
"status",
"=",
"$?",
"read_cands",
".",
"delete",
"(",
"sigchld_r",
")",
"error_cands",
".",
"delete",
"(",
"sigchld_r",
")",
"end",
"end",
"unless",
"active_ios",
"[",
"2",
"]",
".",
"empty?",
"io_names",
"=",
"active_ios",
"[",
"2",
"]",
".",
"map",
"{",
"|",
"io",
"|",
"io_map",
"[",
"io",
"]",
"[",
":name",
"]",
"}",
"raise",
"SubprocessReadError",
".",
"new",
"(",
"io_names",
".",
"join",
"(",
"', '",
")",
",",
"command",
",",
"io_map",
"[",
"stdout",
"]",
"[",
":buf",
"]",
",",
"io_map",
"[",
"stderr",
"]",
"[",
":buf",
"]",
")",
"end",
"end",
"rescue",
"kill_pid",
"(",
"child_pid",
")",
"unless",
"status",
"raise",
"ensure",
"unless",
"status",
"Process",
".",
"waitpid",
"(",
"child_pid",
")",
"status",
"=",
"$?",
"end",
"io_map",
".",
"each_key",
"{",
"|",
"io",
"|",
"io",
".",
"close",
"unless",
"io",
".",
"closed?",
"}",
"trap",
"(",
"'CLD'",
")",
"{",
"prev_sigchld_handler",
".",
"call",
"}",
"if",
"prev_sigchld_handler",
"end",
"unless",
"status",
".",
"exitstatus",
"==",
"expected_exit_status",
"raise",
"SubprocessStatusError",
".",
"new",
"(",
"command",
",",
"io_map",
"[",
"stdout",
"]",
"[",
":buf",
"]",
",",
"io_map",
"[",
"stderr",
"]",
"[",
":buf",
"]",
",",
"status",
")",
"end",
"[",
"io_map",
"[",
"stdout",
"]",
"[",
":buf",
"]",
",",
"io_map",
"[",
"stderr",
"]",
"[",
":buf",
"]",
",",
"status",
"]",
"end"
]
| Runs the supplied command in a subshell.
@param command String The command to be run
@param expected_exit_status Integer The expected exit status of the command in [0, 255]
@param timeout Integer How long the command should be allowed to run for
nil indicates no timeout
@param options Hash Options to be passed to Posix::Spawn
See https://github.com/rtomayko/posix-spawn
@param env Hash Environment to be passed to Posix::Spawn
See https://github.com/rtomayko/posix-spawn
@raise VCAP::SubprocessStatusError Thrown if the exit status does not match the expected
exit status.
@raise VCAP::SubprocessTimeoutError Thrown if a timeout occurs.
@raise VCAP::SubprocessReadError Thrown if there is an error reading from any of the pipes
to the child.
@return Array An array of [stdout, stderr, status]. Note that status
is an instance of Process::Status. | [
"Runs",
"the",
"supplied",
"command",
"in",
"a",
"subshell",
"."
]
| 8d2825c7c678ffa3cf1854a635c7c4722fd054e5 | https://github.com/cloudfoundry/vcap-common/blob/8d2825c7c678ffa3cf1854a635c7c4722fd054e5/lib/vcap/subprocess.rb#L85-L184 | train |
LeakyBucket/google_apps | lib/google_apps/document_handler.rb | GoogleApps.DocumentHandler.create_doc | def create_doc(text, type = nil)
@documents.include?(type) ? doc_of_type(text, type) : unknown_type(text)
end | ruby | def create_doc(text, type = nil)
@documents.include?(type) ? doc_of_type(text, type) : unknown_type(text)
end | [
"def",
"create_doc",
"(",
"text",
",",
"type",
"=",
"nil",
")",
"@documents",
".",
"include?",
"(",
"type",
")",
"?",
"doc_of_type",
"(",
"text",
",",
"type",
")",
":",
"unknown_type",
"(",
"text",
")",
"end"
]
| create_doc creates a document of the specified format
from the given string. | [
"create_doc",
"creates",
"a",
"document",
"of",
"the",
"specified",
"format",
"from",
"the",
"given",
"string",
"."
]
| 5fb2cdf8abe0e92f86d321460ab392a2a2276d09 | https://github.com/LeakyBucket/google_apps/blob/5fb2cdf8abe0e92f86d321460ab392a2a2276d09/lib/google_apps/document_handler.rb#L9-L11 | train |
LeakyBucket/google_apps | lib/google_apps/document_handler.rb | GoogleApps.DocumentHandler.doc_of_type | def doc_of_type(text, type)
raise "No Atom document of type: #{type}" unless @documents.include?(type.to_s)
GoogleApps::Atom.send(type, text)
end | ruby | def doc_of_type(text, type)
raise "No Atom document of type: #{type}" unless @documents.include?(type.to_s)
GoogleApps::Atom.send(type, text)
end | [
"def",
"doc_of_type",
"(",
"text",
",",
"type",
")",
"raise",
"\"No Atom document of type: #{type}\"",
"unless",
"@documents",
".",
"include?",
"(",
"type",
".",
"to_s",
")",
"GoogleApps",
"::",
"Atom",
".",
"send",
"(",
"type",
",",
"text",
")",
"end"
]
| doc_of_type takes a document type and a string and
returns a document of that type in the current format. | [
"doc_of_type",
"takes",
"a",
"document",
"type",
"and",
"a",
"string",
"and",
"returns",
"a",
"document",
"of",
"that",
"type",
"in",
"the",
"current",
"format",
"."
]
| 5fb2cdf8abe0e92f86d321460ab392a2a2276d09 | https://github.com/LeakyBucket/google_apps/blob/5fb2cdf8abe0e92f86d321460ab392a2a2276d09/lib/google_apps/document_handler.rb#L21-L25 | train |
LeakyBucket/google_apps | lib/google_apps/client.rb | GoogleApps.Client.export_status | def export_status(username, id)
response = make_request(:get, URI(export + "/#{username}" + build_id(id)).to_s, headers: {'content-type' => 'application/atom+xml'})
create_doc(response.body, :export_status)
end | ruby | def export_status(username, id)
response = make_request(:get, URI(export + "/#{username}" + build_id(id)).to_s, headers: {'content-type' => 'application/atom+xml'})
create_doc(response.body, :export_status)
end | [
"def",
"export_status",
"(",
"username",
",",
"id",
")",
"response",
"=",
"make_request",
"(",
":get",
",",
"URI",
"(",
"export",
"+",
"\"/#{username}\"",
"+",
"build_id",
"(",
"id",
")",
")",
".",
"to_s",
",",
"headers",
":",
"{",
"'content-type'",
"=>",
"'application/atom+xml'",
"}",
")",
"create_doc",
"(",
"response",
".",
"body",
",",
":export_status",
")",
"end"
]
| export_status checks the status of a mailbox export
request. It takes the username and the request_id
as arguments
export_status 'username', 847576
export_status will return the body of the HTTP response
from Google | [
"export_status",
"checks",
"the",
"status",
"of",
"a",
"mailbox",
"export",
"request",
".",
"It",
"takes",
"the",
"username",
"and",
"the",
"request_id",
"as",
"arguments"
]
| 5fb2cdf8abe0e92f86d321460ab392a2a2276d09 | https://github.com/LeakyBucket/google_apps/blob/5fb2cdf8abe0e92f86d321460ab392a2a2276d09/lib/google_apps/client.rb#L42-L45 | train |
LeakyBucket/google_apps | lib/google_apps/client.rb | GoogleApps.Client.fetch_export | def fetch_export(username, req_id, filename)
export_status_doc = export_status(username, req_id)
if export_ready?(export_status_doc)
download_export(export_status_doc, filename).each_with_index { |url, index| url.gsub!(/.*/, "#{filename}#{index}") }
else
nil
end
end | ruby | def fetch_export(username, req_id, filename)
export_status_doc = export_status(username, req_id)
if export_ready?(export_status_doc)
download_export(export_status_doc, filename).each_with_index { |url, index| url.gsub!(/.*/, "#{filename}#{index}") }
else
nil
end
end | [
"def",
"fetch_export",
"(",
"username",
",",
"req_id",
",",
"filename",
")",
"export_status_doc",
"=",
"export_status",
"(",
"username",
",",
"req_id",
")",
"if",
"export_ready?",
"(",
"export_status_doc",
")",
"download_export",
"(",
"export_status_doc",
",",
"filename",
")",
".",
"each_with_index",
"{",
"|",
"url",
",",
"index",
"|",
"url",
".",
"gsub!",
"(",
"/",
"/",
",",
"\"#{filename}#{index}\"",
")",
"}",
"else",
"nil",
"end",
"end"
]
| fetch_export downloads the mailbox export from Google.
It takes a username, request id and a filename as
arguments. If the export consists of more than one file
the file name will have numbers appended to indicate the
piece of the export.
fetch_export 'lholcomb2', 838382, 'lholcomb2'
fetch_export reutrns nil in the event that the export is
not yet ready. | [
"fetch_export",
"downloads",
"the",
"mailbox",
"export",
"from",
"Google",
".",
"It",
"takes",
"a",
"username",
"request",
"id",
"and",
"a",
"filename",
"as",
"arguments",
".",
"If",
"the",
"export",
"consists",
"of",
"more",
"than",
"one",
"file",
"the",
"file",
"name",
"will",
"have",
"numbers",
"appended",
"to",
"indicate",
"the",
"piece",
"of",
"the",
"export",
"."
]
| 5fb2cdf8abe0e92f86d321460ab392a2a2276d09 | https://github.com/LeakyBucket/google_apps/blob/5fb2cdf8abe0e92f86d321460ab392a2a2276d09/lib/google_apps/client.rb#L74-L81 | train |
LeakyBucket/google_apps | lib/google_apps/client.rb | GoogleApps.Client.download | def download(url, filename)
File.open(filename, "w") do |file|
file.puts(make_request(:get, url, headers: {'content-type' => 'application/atom+xml'}).body)
end
end | ruby | def download(url, filename)
File.open(filename, "w") do |file|
file.puts(make_request(:get, url, headers: {'content-type' => 'application/atom+xml'}).body)
end
end | [
"def",
"download",
"(",
"url",
",",
"filename",
")",
"File",
".",
"open",
"(",
"filename",
",",
"\"w\"",
")",
"do",
"|",
"file",
"|",
"file",
".",
"puts",
"(",
"make_request",
"(",
":get",
",",
"url",
",",
"headers",
":",
"{",
"'content-type'",
"=>",
"'application/atom+xml'",
"}",
")",
".",
"body",
")",
"end",
"end"
]
| download makes a get request of the provided url
and writes the body to the provided filename.
download 'url', 'save_file' | [
"download",
"makes",
"a",
"get",
"request",
"of",
"the",
"provided",
"url",
"and",
"writes",
"the",
"body",
"to",
"the",
"provided",
"filename",
"."
]
| 5fb2cdf8abe0e92f86d321460ab392a2a2276d09 | https://github.com/LeakyBucket/google_apps/blob/5fb2cdf8abe0e92f86d321460ab392a2a2276d09/lib/google_apps/client.rb#L87-L91 | train |
LeakyBucket/google_apps | lib/google_apps/client.rb | GoogleApps.Client.get_groups | def get_groups(options = {})
limit = options[:limit] || 1000000
response = make_request(:get, group + "#{options[:extra]}" + "?startGroup=#{options[:start]}", headers: {'content-type' => 'application/atom+xml'})
pages = fetch_pages(response, limit, :feed)
return_all(pages)
end | ruby | def get_groups(options = {})
limit = options[:limit] || 1000000
response = make_request(:get, group + "#{options[:extra]}" + "?startGroup=#{options[:start]}", headers: {'content-type' => 'application/atom+xml'})
pages = fetch_pages(response, limit, :feed)
return_all(pages)
end | [
"def",
"get_groups",
"(",
"options",
"=",
"{",
"}",
")",
"limit",
"=",
"options",
"[",
":limit",
"]",
"||",
"1000000",
"response",
"=",
"make_request",
"(",
":get",
",",
"group",
"+",
"\"#{options[:extra]}\"",
"+",
"\"?startGroup=#{options[:start]}\"",
",",
"headers",
":",
"{",
"'content-type'",
"=>",
"'application/atom+xml'",
"}",
")",
"pages",
"=",
"fetch_pages",
"(",
"response",
",",
"limit",
",",
":feed",
")",
"return_all",
"(",
"pages",
")",
"end"
]
| get_groups retrieves all the groups from the domain
get_groups
get_groups returns the final response from Google. | [
"get_groups",
"retrieves",
"all",
"the",
"groups",
"from",
"the",
"domain"
]
| 5fb2cdf8abe0e92f86d321460ab392a2a2276d09 | https://github.com/LeakyBucket/google_apps/blob/5fb2cdf8abe0e92f86d321460ab392a2a2276d09/lib/google_apps/client.rb#L114-L120 | train |
LeakyBucket/google_apps | lib/google_apps/client.rb | GoogleApps.Client.get_next_page | def get_next_page(next_page_url, type)
response = make_request(:get, next_page_url, headers: {'content-type' => 'application/atom+xml'})
GoogleApps::Atom.feed(response.body)
end | ruby | def get_next_page(next_page_url, type)
response = make_request(:get, next_page_url, headers: {'content-type' => 'application/atom+xml'})
GoogleApps::Atom.feed(response.body)
end | [
"def",
"get_next_page",
"(",
"next_page_url",
",",
"type",
")",
"response",
"=",
"make_request",
"(",
":get",
",",
"next_page_url",
",",
"headers",
":",
"{",
"'content-type'",
"=>",
"'application/atom+xml'",
"}",
")",
"GoogleApps",
"::",
"Atom",
".",
"feed",
"(",
"response",
".",
"body",
")",
"end"
]
| get_next_page retrieves the next page in the response. | [
"get_next_page",
"retrieves",
"the",
"next",
"page",
"in",
"the",
"response",
"."
]
| 5fb2cdf8abe0e92f86d321460ab392a2a2276d09 | https://github.com/LeakyBucket/google_apps/blob/5fb2cdf8abe0e92f86d321460ab392a2a2276d09/lib/google_apps/client.rb#L285-L288 | train |
LeakyBucket/google_apps | lib/google_apps/client.rb | GoogleApps.Client.fetch_pages | def fetch_pages(response, limit, type)
pages = [GoogleApps::Atom.feed(response.body)]
while (pages.last.next_page) and (pages.count * PAGE_SIZE[:user] < limit)
pages << get_next_page(pages.last.next_page, type)
end
pages
end | ruby | def fetch_pages(response, limit, type)
pages = [GoogleApps::Atom.feed(response.body)]
while (pages.last.next_page) and (pages.count * PAGE_SIZE[:user] < limit)
pages << get_next_page(pages.last.next_page, type)
end
pages
end | [
"def",
"fetch_pages",
"(",
"response",
",",
"limit",
",",
"type",
")",
"pages",
"=",
"[",
"GoogleApps",
"::",
"Atom",
".",
"feed",
"(",
"response",
".",
"body",
")",
"]",
"while",
"(",
"pages",
".",
"last",
".",
"next_page",
")",
"and",
"(",
"pages",
".",
"count",
"*",
"PAGE_SIZE",
"[",
":user",
"]",
"<",
"limit",
")",
"pages",
"<<",
"get_next_page",
"(",
"pages",
".",
"last",
".",
"next_page",
",",
"type",
")",
"end",
"pages",
"end"
]
| fetch_feed retrieves the remaining pages in the request.
It takes a page and a limit as arguments. | [
"fetch_feed",
"retrieves",
"the",
"remaining",
"pages",
"in",
"the",
"request",
".",
"It",
"takes",
"a",
"page",
"and",
"a",
"limit",
"as",
"arguments",
"."
]
| 5fb2cdf8abe0e92f86d321460ab392a2a2276d09 | https://github.com/LeakyBucket/google_apps/blob/5fb2cdf8abe0e92f86d321460ab392a2a2276d09/lib/google_apps/client.rb#L292-L299 | train |
samuelgiles/duckface | lib/duckface/parameter_pair.rb | Duckface.ParameterPair.argument_name_without_leading_underscore | def argument_name_without_leading_underscore
name = if argument_name_string[FIRST_CHARACTER] == UNDERSCORE
argument_name_string.reverse.chop.reverse
else
argument_name_string
end
name.to_sym
end | ruby | def argument_name_without_leading_underscore
name = if argument_name_string[FIRST_CHARACTER] == UNDERSCORE
argument_name_string.reverse.chop.reverse
else
argument_name_string
end
name.to_sym
end | [
"def",
"argument_name_without_leading_underscore",
"name",
"=",
"if",
"argument_name_string",
"[",
"FIRST_CHARACTER",
"]",
"==",
"UNDERSCORE",
"argument_name_string",
".",
"reverse",
".",
"chop",
".",
"reverse",
"else",
"argument_name_string",
"end",
"name",
".",
"to_sym",
"end"
]
| Leading underscores are used to indicate a parameter isn't used | [
"Leading",
"underscores",
"are",
"used",
"to",
"indicate",
"a",
"parameter",
"isn",
"t",
"used"
]
| c297c1f8abb5dfaea7009da06c7d6026811a08ea | https://github.com/samuelgiles/duckface/blob/c297c1f8abb5dfaea7009da06c7d6026811a08ea/lib/duckface/parameter_pair.rb#L20-L27 | train |
conduit/conduit | lib/conduit/cli.rb | Conduit.CLI.copy_files | def copy_files
files_to_copy.each do |origin, destination|
template(origin, destination, force: true)
end
end | ruby | def copy_files
files_to_copy.each do |origin, destination|
template(origin, destination, force: true)
end
end | [
"def",
"copy_files",
"files_to_copy",
".",
"each",
"do",
"|",
"origin",
",",
"destination",
"|",
"template",
"(",
"origin",
",",
"destination",
",",
"force",
":",
"true",
")",
"end",
"end"
]
| Copy template files | [
"Copy",
"template",
"files"
]
| 34546f71d59eb30ecc4b3172ee4459a8b37dd5ba | https://github.com/conduit/conduit/blob/34546f71d59eb30ecc4b3172ee4459a8b37dd5ba/lib/conduit/cli.rb#L103-L107 | train |
conduit/conduit | lib/conduit/cli.rb | Conduit.CLI.modify_files | def modify_files
gemspec_file = "#{@base_path}/conduit-#{@dasherized_name}.gemspec"
# add gemspec dependencies
str = " # Dependencies\n"\
" #\n"\
" spec.add_dependency \"conduit\", \"~> 1.0.6\"\n"\
" # xml parser\n"\
" spec.add_dependency \"nokogiri\"\n\n"\
" # Development Dependencies\n"\
" #\n"\
" # to compare xml files in tests\n"\
" spec.add_development_dependency \"equivalent-xml\"\n"\
" spec.add_development_dependency \"rspec-its\"\n"\
" # for building CLI\n"\
" spec.add_development_dependency \"thor\"\n"\
" # for debugging\n"\
" spec.add_development_dependency \"byebug\"\n"
insert_into_file gemspec_file, str, after: "spec.require_paths = [\"lib\"]\n\n"
# remove description
gsub_file(gemspec_file, /spec\.description(.*)\n/, "")
# update summary
new_summary = "spec.summary = \"#{ActiveSupport::Inflector.humanize @underscored_name} Driver for Conduit\""
gsub_file(gemspec_file, /spec\.summary(.*)/, new_summary)
# update homepage
new_homepage = "spec.homepage = \"http://www.github.com/conduit/conduit-#{@dasherized_name}\""
gsub_file(gemspec_file, /spec\.homepage(.*)/, new_homepage)
end | ruby | def modify_files
gemspec_file = "#{@base_path}/conduit-#{@dasherized_name}.gemspec"
# add gemspec dependencies
str = " # Dependencies\n"\
" #\n"\
" spec.add_dependency \"conduit\", \"~> 1.0.6\"\n"\
" # xml parser\n"\
" spec.add_dependency \"nokogiri\"\n\n"\
" # Development Dependencies\n"\
" #\n"\
" # to compare xml files in tests\n"\
" spec.add_development_dependency \"equivalent-xml\"\n"\
" spec.add_development_dependency \"rspec-its\"\n"\
" # for building CLI\n"\
" spec.add_development_dependency \"thor\"\n"\
" # for debugging\n"\
" spec.add_development_dependency \"byebug\"\n"
insert_into_file gemspec_file, str, after: "spec.require_paths = [\"lib\"]\n\n"
# remove description
gsub_file(gemspec_file, /spec\.description(.*)\n/, "")
# update summary
new_summary = "spec.summary = \"#{ActiveSupport::Inflector.humanize @underscored_name} Driver for Conduit\""
gsub_file(gemspec_file, /spec\.summary(.*)/, new_summary)
# update homepage
new_homepage = "spec.homepage = \"http://www.github.com/conduit/conduit-#{@dasherized_name}\""
gsub_file(gemspec_file, /spec\.homepage(.*)/, new_homepage)
end | [
"def",
"modify_files",
"gemspec_file",
"=",
"\"#{@base_path}/conduit-#{@dasherized_name}.gemspec\"",
"str",
"=",
"\" # Dependencies\\n\"",
"\" #\\n\"",
"\" spec.add_dependency \\\"conduit\\\", \\\"~> 1.0.6\\\"\\n\"",
"\" # xml parser\\n\"",
"\" spec.add_dependency \\\"nokogiri\\\"\\n\\n\"",
"\" # Development Dependencies\\n\"",
"\" #\\n\"",
"\" # to compare xml files in tests\\n\"",
"\" spec.add_development_dependency \\\"equivalent-xml\\\"\\n\"",
"\" spec.add_development_dependency \\\"rspec-its\\\"\\n\"",
"\" # for building CLI\\n\"",
"\" spec.add_development_dependency \\\"thor\\\"\\n\"",
"\" # for debugging\\n\"",
"\" spec.add_development_dependency \\\"byebug\\\"\\n\"",
"insert_into_file",
"gemspec_file",
",",
"str",
",",
"after",
":",
"\"spec.require_paths = [\\\"lib\\\"]\\n\\n\"",
"gsub_file",
"(",
"gemspec_file",
",",
"/",
"\\.",
"\\n",
"/",
",",
"\"\"",
")",
"new_summary",
"=",
"\"spec.summary = \\\"#{ActiveSupport::Inflector.humanize @underscored_name} Driver for Conduit\\\"\"",
"gsub_file",
"(",
"gemspec_file",
",",
"/",
"\\.",
"/",
",",
"new_summary",
")",
"new_homepage",
"=",
"\"spec.homepage = \\\"http://www.github.com/conduit/conduit-#{@dasherized_name}\\\"\"",
"gsub_file",
"(",
"gemspec_file",
",",
"/",
"\\.",
"/",
",",
"new_homepage",
")",
"end"
]
| Adds missing lines to the files | [
"Adds",
"missing",
"lines",
"to",
"the",
"files"
]
| 34546f71d59eb30ecc4b3172ee4459a8b37dd5ba | https://github.com/conduit/conduit/blob/34546f71d59eb30ecc4b3172ee4459a8b37dd5ba/lib/conduit/cli.rb#L110-L141 | train |
makersacademy/pipekit | lib/pipekit/deal.rb | Pipekit.Deal.update_by_person | def update_by_person(email, params, person_repo: Person.new)
person = person_repo.find_exactly_by_email(email)
deal = get_by_person_id(person[:id], person_repo: person_repo).first
update(deal[:id], params)
end | ruby | def update_by_person(email, params, person_repo: Person.new)
person = person_repo.find_exactly_by_email(email)
deal = get_by_person_id(person[:id], person_repo: person_repo).first
update(deal[:id], params)
end | [
"def",
"update_by_person",
"(",
"email",
",",
"params",
",",
"person_repo",
":",
"Person",
".",
"new",
")",
"person",
"=",
"person_repo",
".",
"find_exactly_by_email",
"(",
"email",
")",
"deal",
"=",
"get_by_person_id",
"(",
"person",
"[",
":id",
"]",
",",
"person_repo",
":",
"person_repo",
")",
".",
"first",
"update",
"(",
"deal",
"[",
":id",
"]",
",",
"params",
")",
"end"
]
| Finds a person by their email, then finds the first deal related to that
person and updates it with the params provided | [
"Finds",
"a",
"person",
"by",
"their",
"email",
"then",
"finds",
"the",
"first",
"deal",
"related",
"to",
"that",
"person",
"and",
"updates",
"it",
"with",
"the",
"params",
"provided"
]
| ac8a0e6adbc875637cc87587fa4d4795927b1f11 | https://github.com/makersacademy/pipekit/blob/ac8a0e6adbc875637cc87587fa4d4795927b1f11/lib/pipekit/deal.rb#L14-L18 | train |
pjotrp/bioruby-alignment | lib/bio-alignment/tree.rb | Bio.Tree.clone_subtree | def clone_subtree start_node
new_tree = self.class.new
list = [start_node] + start_node.descendents
list.each do |x|
new_tree.add_node(x)
end
each_edge do |node1, node2, edge|
if new_tree.include?(node1) and new_tree.include?(node2)
new_tree.add_edge(node1, node2, edge)
end
end
new_tree
end | ruby | def clone_subtree start_node
new_tree = self.class.new
list = [start_node] + start_node.descendents
list.each do |x|
new_tree.add_node(x)
end
each_edge do |node1, node2, edge|
if new_tree.include?(node1) and new_tree.include?(node2)
new_tree.add_edge(node1, node2, edge)
end
end
new_tree
end | [
"def",
"clone_subtree",
"start_node",
"new_tree",
"=",
"self",
".",
"class",
".",
"new",
"list",
"=",
"[",
"start_node",
"]",
"+",
"start_node",
".",
"descendents",
"list",
".",
"each",
"do",
"|",
"x",
"|",
"new_tree",
".",
"add_node",
"(",
"x",
")",
"end",
"each_edge",
"do",
"|",
"node1",
",",
"node2",
",",
"edge",
"|",
"if",
"new_tree",
".",
"include?",
"(",
"node1",
")",
"and",
"new_tree",
".",
"include?",
"(",
"node2",
")",
"new_tree",
".",
"add_edge",
"(",
"node1",
",",
"node2",
",",
"edge",
")",
"end",
"end",
"new_tree",
"end"
]
| Create a deep clone of the tree | [
"Create",
"a",
"deep",
"clone",
"of",
"the",
"tree"
]
| 39430b55a6cfcb39f057ad696da2966a3d8c3068 | https://github.com/pjotrp/bioruby-alignment/blob/39430b55a6cfcb39f057ad696da2966a3d8c3068/lib/bio-alignment/tree.rb#L128-L140 | train |
pjotrp/bioruby-alignment | lib/bio-alignment/tree.rb | Bio.Tree.clone_tree_without_branch | def clone_tree_without_branch node
new_tree = self.class.new
original = [root] + root.descendents
# p "Original",original
skip = [node] + node.descendents
# p "Skip",skip
# p "Retain",root.descendents - skip
nodes.each do |x|
if not skip.include?(x)
new_tree.add_node(x)
else
end
end
each_edge do |node1, node2, edge|
if new_tree.include?(node1) and new_tree.include?(node2)
new_tree.add_edge(node1, node2, edge)
end
end
new_tree
end | ruby | def clone_tree_without_branch node
new_tree = self.class.new
original = [root] + root.descendents
# p "Original",original
skip = [node] + node.descendents
# p "Skip",skip
# p "Retain",root.descendents - skip
nodes.each do |x|
if not skip.include?(x)
new_tree.add_node(x)
else
end
end
each_edge do |node1, node2, edge|
if new_tree.include?(node1) and new_tree.include?(node2)
new_tree.add_edge(node1, node2, edge)
end
end
new_tree
end | [
"def",
"clone_tree_without_branch",
"node",
"new_tree",
"=",
"self",
".",
"class",
".",
"new",
"original",
"=",
"[",
"root",
"]",
"+",
"root",
".",
"descendents",
"skip",
"=",
"[",
"node",
"]",
"+",
"node",
".",
"descendents",
"nodes",
".",
"each",
"do",
"|",
"x",
"|",
"if",
"not",
"skip",
".",
"include?",
"(",
"x",
")",
"new_tree",
".",
"add_node",
"(",
"x",
")",
"else",
"end",
"end",
"each_edge",
"do",
"|",
"node1",
",",
"node2",
",",
"edge",
"|",
"if",
"new_tree",
".",
"include?",
"(",
"node1",
")",
"and",
"new_tree",
".",
"include?",
"(",
"node2",
")",
"new_tree",
".",
"add_edge",
"(",
"node1",
",",
"node2",
",",
"edge",
")",
"end",
"end",
"new_tree",
"end"
]
| Clone a tree without the branch starting at node | [
"Clone",
"a",
"tree",
"without",
"the",
"branch",
"starting",
"at",
"node"
]
| 39430b55a6cfcb39f057ad696da2966a3d8c3068 | https://github.com/pjotrp/bioruby-alignment/blob/39430b55a6cfcb39f057ad696da2966a3d8c3068/lib/bio-alignment/tree.rb#L143-L162 | train |
ridiculous/usable | lib/usable/config_multi.rb | Usable.ConfigMulti.+ | def +(other)
config = clone
specs = other.spec.to_h
specs.each { |key, val| config[key] = val }
methods = other.spec.singleton_methods
methods.map! { |name| name.to_s.tr('=', '').to_sym }
methods.uniq!
methods -= specs.keys
methods.each do |name|
config.spec.define_singleton_method(name) do
other.spec.public_method(name).call
end
config.instance_variable_get(:@lazy_loads) << name
end
config
end | ruby | def +(other)
config = clone
specs = other.spec.to_h
specs.each { |key, val| config[key] = val }
methods = other.spec.singleton_methods
methods.map! { |name| name.to_s.tr('=', '').to_sym }
methods.uniq!
methods -= specs.keys
methods.each do |name|
config.spec.define_singleton_method(name) do
other.spec.public_method(name).call
end
config.instance_variable_get(:@lazy_loads) << name
end
config
end | [
"def",
"+",
"(",
"other",
")",
"config",
"=",
"clone",
"specs",
"=",
"other",
".",
"spec",
".",
"to_h",
"specs",
".",
"each",
"{",
"|",
"key",
",",
"val",
"|",
"config",
"[",
"key",
"]",
"=",
"val",
"}",
"methods",
"=",
"other",
".",
"spec",
".",
"singleton_methods",
"methods",
".",
"map!",
"{",
"|",
"name",
"|",
"name",
".",
"to_s",
".",
"tr",
"(",
"'='",
",",
"''",
")",
".",
"to_sym",
"}",
"methods",
".",
"uniq!",
"methods",
"-=",
"specs",
".",
"keys",
"methods",
".",
"each",
"do",
"|",
"name",
"|",
"config",
".",
"spec",
".",
"define_singleton_method",
"(",
"name",
")",
"do",
"other",
".",
"spec",
".",
"public_method",
"(",
"name",
")",
".",
"call",
"end",
"config",
".",
"instance_variable_get",
"(",
":@lazy_loads",
")",
"<<",
"name",
"end",
"config",
"end"
]
| It's important to define all block specs we need to lazy load | [
"It",
"s",
"important",
"to",
"define",
"all",
"block",
"specs",
"we",
"need",
"to",
"lazy",
"load"
]
| 1b985164480a0a551af2a0c2037c0a155be51857 | https://github.com/ridiculous/usable/blob/1b985164480a0a551af2a0c2037c0a155be51857/lib/usable/config_multi.rb#L4-L19 | train |
makersacademy/pipekit | lib/pipekit/request.rb | Pipekit.Request.parse_body | def parse_body(body)
body.reduce({}) do |result, (field, value)|
value = Config.field_value_id(resource.singular, field, value)
field = Config.field_id(resource.singular, field)
result.tap { |result| result[field] = value }
end
end | ruby | def parse_body(body)
body.reduce({}) do |result, (field, value)|
value = Config.field_value_id(resource.singular, field, value)
field = Config.field_id(resource.singular, field)
result.tap { |result| result[field] = value }
end
end | [
"def",
"parse_body",
"(",
"body",
")",
"body",
".",
"reduce",
"(",
"{",
"}",
")",
"do",
"|",
"result",
",",
"(",
"field",
",",
"value",
")",
"|",
"value",
"=",
"Config",
".",
"field_value_id",
"(",
"resource",
".",
"singular",
",",
"field",
",",
"value",
")",
"field",
"=",
"Config",
".",
"field_id",
"(",
"resource",
".",
"singular",
",",
"field",
")",
"result",
".",
"tap",
"{",
"|",
"result",
"|",
"result",
"[",
"field",
"]",
"=",
"value",
"}",
"end",
"end"
]
| Replaces custom fields with their Pipedrive ID
if the ID is defined in the configuration
So if the body looked like this with a custom field
called middle_name:
{ middle_name: "Dave" }
And it has a Pipedrive ID ("123abc"), this will put in this custom ID
{ "123abc": "Dave" }
meaning you don't have to worry about the custom IDs | [
"Replaces",
"custom",
"fields",
"with",
"their",
"Pipedrive",
"ID",
"if",
"the",
"ID",
"is",
"defined",
"in",
"the",
"configuration"
]
| ac8a0e6adbc875637cc87587fa4d4795927b1f11 | https://github.com/makersacademy/pipekit/blob/ac8a0e6adbc875637cc87587fa4d4795927b1f11/lib/pipekit/request.rb#L106-L112 | train |
hongshu-corp/liquigen | lib/liquigen/scaffold/config.rb | Liquigen::Scaffold.Config.process | def process
# if not exist the .liquigen file create it
File.write(CONFIG_FILE, prepare_default_content.join("\n")) unless File.exist?(CONFIG_FILE)
# then open the vim editor
system('vi ' + CONFIG_FILE)
end | ruby | def process
# if not exist the .liquigen file create it
File.write(CONFIG_FILE, prepare_default_content.join("\n")) unless File.exist?(CONFIG_FILE)
# then open the vim editor
system('vi ' + CONFIG_FILE)
end | [
"def",
"process",
"File",
".",
"write",
"(",
"CONFIG_FILE",
",",
"prepare_default_content",
".",
"join",
"(",
"\"\\n\"",
")",
")",
"unless",
"File",
".",
"exist?",
"(",
"CONFIG_FILE",
")",
"system",
"(",
"'vi '",
"+",
"CONFIG_FILE",
")",
"end"
]
| write config file | [
"write",
"config",
"file"
]
| faa2f20eebed519bbe117fc4d374bb9750797873 | https://github.com/hongshu-corp/liquigen/blob/faa2f20eebed519bbe117fc4d374bb9750797873/lib/liquigen/scaffold/config.rb#L5-L11 | train |
gocardless/callcredit-ruby | lib/callcredit/request.rb | Callcredit.Request.perform | def perform(checks, check_data = {})
# check_data = Callcredit::Validator.clean_check_data(check_data)
response = @connection.get do |request|
request.path = @config[:api_endpoint]
request.body = build_request_xml(checks, check_data).to_s
end
@config[:raw] ? response : response.body
rescue Faraday::Error::ClientError => e
if e.response.nil?
raise APIError.new
else
raise APIError.new(e.response[:body], e.response[:status], e.response)
end
end | ruby | def perform(checks, check_data = {})
# check_data = Callcredit::Validator.clean_check_data(check_data)
response = @connection.get do |request|
request.path = @config[:api_endpoint]
request.body = build_request_xml(checks, check_data).to_s
end
@config[:raw] ? response : response.body
rescue Faraday::Error::ClientError => e
if e.response.nil?
raise APIError.new
else
raise APIError.new(e.response[:body], e.response[:status], e.response)
end
end | [
"def",
"perform",
"(",
"checks",
",",
"check_data",
"=",
"{",
"}",
")",
"response",
"=",
"@connection",
".",
"get",
"do",
"|",
"request",
"|",
"request",
".",
"path",
"=",
"@config",
"[",
":api_endpoint",
"]",
"request",
".",
"body",
"=",
"build_request_xml",
"(",
"checks",
",",
"check_data",
")",
".",
"to_s",
"end",
"@config",
"[",
":raw",
"]",
"?",
"response",
":",
"response",
".",
"body",
"rescue",
"Faraday",
"::",
"Error",
"::",
"ClientError",
"=>",
"e",
"if",
"e",
".",
"response",
".",
"nil?",
"raise",
"APIError",
".",
"new",
"else",
"raise",
"APIError",
".",
"new",
"(",
"e",
".",
"response",
"[",
":body",
"]",
",",
"e",
".",
"response",
"[",
":status",
"]",
",",
"e",
".",
"response",
")",
"end",
"end"
]
| Perform a credit check | [
"Perform",
"a",
"credit",
"check"
]
| cb48ddae90ac7245abb5b7aa0d1cfd6a28f20eb3 | https://github.com/gocardless/callcredit-ruby/blob/cb48ddae90ac7245abb5b7aa0d1cfd6a28f20eb3/lib/callcredit/request.rb#L9-L22 | train |
gocardless/callcredit-ruby | lib/callcredit/request.rb | Callcredit.Request.build_request_xml | def build_request_xml(checks, check_data = {})
builder = Nokogiri::XML::Builder.new do |xml|
xml.callvalidate do
authentication(xml)
xml.sessions do
xml.session("RID" => Time.now.to_f) do
xml.data do
personal_data(xml, check_data[:personal_data])
card_data(xml, check_data[:card_data])
bank_data(xml, check_data[:bank_data])
income_data(xml, check_data[:income_data])
required_checks(xml, checks)
end
end
end
xml.application @config[:application_name]
end
end
builder.doc
end | ruby | def build_request_xml(checks, check_data = {})
builder = Nokogiri::XML::Builder.new do |xml|
xml.callvalidate do
authentication(xml)
xml.sessions do
xml.session("RID" => Time.now.to_f) do
xml.data do
personal_data(xml, check_data[:personal_data])
card_data(xml, check_data[:card_data])
bank_data(xml, check_data[:bank_data])
income_data(xml, check_data[:income_data])
required_checks(xml, checks)
end
end
end
xml.application @config[:application_name]
end
end
builder.doc
end | [
"def",
"build_request_xml",
"(",
"checks",
",",
"check_data",
"=",
"{",
"}",
")",
"builder",
"=",
"Nokogiri",
"::",
"XML",
"::",
"Builder",
".",
"new",
"do",
"|",
"xml",
"|",
"xml",
".",
"callvalidate",
"do",
"authentication",
"(",
"xml",
")",
"xml",
".",
"sessions",
"do",
"xml",
".",
"session",
"(",
"\"RID\"",
"=>",
"Time",
".",
"now",
".",
"to_f",
")",
"do",
"xml",
".",
"data",
"do",
"personal_data",
"(",
"xml",
",",
"check_data",
"[",
":personal_data",
"]",
")",
"card_data",
"(",
"xml",
",",
"check_data",
"[",
":card_data",
"]",
")",
"bank_data",
"(",
"xml",
",",
"check_data",
"[",
":bank_data",
"]",
")",
"income_data",
"(",
"xml",
",",
"check_data",
"[",
":income_data",
"]",
")",
"required_checks",
"(",
"xml",
",",
"checks",
")",
"end",
"end",
"end",
"xml",
".",
"application",
"@config",
"[",
":application_name",
"]",
"end",
"end",
"builder",
".",
"doc",
"end"
]
| Compile the complete XML request to send to Callcredit | [
"Compile",
"the",
"complete",
"XML",
"request",
"to",
"send",
"to",
"Callcredit"
]
| cb48ddae90ac7245abb5b7aa0d1cfd6a28f20eb3 | https://github.com/gocardless/callcredit-ruby/blob/cb48ddae90ac7245abb5b7aa0d1cfd6a28f20eb3/lib/callcredit/request.rb#L25-L44 | train |
gocardless/callcredit-ruby | lib/callcredit/request.rb | Callcredit.Request.required_checks | def required_checks(xml, checks)
required_checks = [*checks].map { |c| Util.underscore(c).to_sym }
xml.ChecksRequired do
Constants::CHECKS.each do |check|
included = required_checks.include?(Util.underscore(check).to_sym)
xml.send(check, included ? "yes" : "no")
end
end
end | ruby | def required_checks(xml, checks)
required_checks = [*checks].map { |c| Util.underscore(c).to_sym }
xml.ChecksRequired do
Constants::CHECKS.each do |check|
included = required_checks.include?(Util.underscore(check).to_sym)
xml.send(check, included ? "yes" : "no")
end
end
end | [
"def",
"required_checks",
"(",
"xml",
",",
"checks",
")",
"required_checks",
"=",
"[",
"*",
"checks",
"]",
".",
"map",
"{",
"|",
"c",
"|",
"Util",
".",
"underscore",
"(",
"c",
")",
".",
"to_sym",
"}",
"xml",
".",
"ChecksRequired",
"do",
"Constants",
"::",
"CHECKS",
".",
"each",
"do",
"|",
"check",
"|",
"included",
"=",
"required_checks",
".",
"include?",
"(",
"Util",
".",
"underscore",
"(",
"check",
")",
".",
"to_sym",
")",
"xml",
".",
"send",
"(",
"check",
",",
"included",
"?",
"\"yes\"",
":",
"\"no\"",
")",
"end",
"end",
"end"
]
| Checks to be performed | [
"Checks",
"to",
"be",
"performed"
]
| cb48ddae90ac7245abb5b7aa0d1cfd6a28f20eb3 | https://github.com/gocardless/callcredit-ruby/blob/cb48ddae90ac7245abb5b7aa0d1cfd6a28f20eb3/lib/callcredit/request.rb#L58-L66 | train |
songkick/queue | lib/songkick_queue/worker.rb | SongkickQueue.Worker.stop_if_signal_caught | def stop_if_signal_caught
Thread.new do
loop do
sleep 1
if @shutdown
logger.info "Recevied SIG#{@shutdown}, shutting down consumers"
@consumer_instances.each { |instance| instance.shutdown }
@client.channel.work_pool.shutdown
@shutdown = nil
end
end
end
end | ruby | def stop_if_signal_caught
Thread.new do
loop do
sleep 1
if @shutdown
logger.info "Recevied SIG#{@shutdown}, shutting down consumers"
@consumer_instances.each { |instance| instance.shutdown }
@client.channel.work_pool.shutdown
@shutdown = nil
end
end
end
end | [
"def",
"stop_if_signal_caught",
"Thread",
".",
"new",
"do",
"loop",
"do",
"sleep",
"1",
"if",
"@shutdown",
"logger",
".",
"info",
"\"Recevied SIG#{@shutdown}, shutting down consumers\"",
"@consumer_instances",
".",
"each",
"{",
"|",
"instance",
"|",
"instance",
".",
"shutdown",
"}",
"@client",
".",
"channel",
".",
"work_pool",
".",
"shutdown",
"@shutdown",
"=",
"nil",
"end",
"end",
"end",
"end"
]
| Checks for presence of @shutdown every 1 second and if found instructs
all the channel's work pool consumers to shutdown. Each work pool thread
will finish its current task and then join the main thread. Once all the
threads have joined then `channel.work_pool.join` will cease blocking and
return, causing the process to terminate. | [
"Checks",
"for",
"presence",
"of"
]
| 856c5ecaf40259526e01c472e1c13fa6d00d5ff0 | https://github.com/songkick/queue/blob/856c5ecaf40259526e01c472e1c13fa6d00d5ff0/lib/songkick_queue/worker.rb#L49-L64 | train |
songkick/queue | lib/songkick_queue/worker.rb | SongkickQueue.Worker.subscribe_to_queue | def subscribe_to_queue(consumer_class)
queue = channel.queue(consumer_class.queue_name, durable: true,
arguments: { 'x-ha-policy' => 'all' })
queue.subscribe(manual_ack: true) do |delivery_info, properties, message|
process_message(consumer_class, delivery_info, properties, message)
end
logger.info "Subscribed #{consumer_class} to #{consumer_class.queue_name}"
end | ruby | def subscribe_to_queue(consumer_class)
queue = channel.queue(consumer_class.queue_name, durable: true,
arguments: { 'x-ha-policy' => 'all' })
queue.subscribe(manual_ack: true) do |delivery_info, properties, message|
process_message(consumer_class, delivery_info, properties, message)
end
logger.info "Subscribed #{consumer_class} to #{consumer_class.queue_name}"
end | [
"def",
"subscribe_to_queue",
"(",
"consumer_class",
")",
"queue",
"=",
"channel",
".",
"queue",
"(",
"consumer_class",
".",
"queue_name",
",",
"durable",
":",
"true",
",",
"arguments",
":",
"{",
"'x-ha-policy'",
"=>",
"'all'",
"}",
")",
"queue",
".",
"subscribe",
"(",
"manual_ack",
":",
"true",
")",
"do",
"|",
"delivery_info",
",",
"properties",
",",
"message",
"|",
"process_message",
"(",
"consumer_class",
",",
"delivery_info",
",",
"properties",
",",
"message",
")",
"end",
"logger",
".",
"info",
"\"Subscribed #{consumer_class} to #{consumer_class.queue_name}\"",
"end"
]
| Declare a queue and subscribe to it
@param consumer_class [Class] to subscribe to | [
"Declare",
"a",
"queue",
"and",
"subscribe",
"to",
"it"
]
| 856c5ecaf40259526e01c472e1c13fa6d00d5ff0 | https://github.com/songkick/queue/blob/856c5ecaf40259526e01c472e1c13fa6d00d5ff0/lib/songkick_queue/worker.rb#L69-L78 | train |
songkick/queue | lib/songkick_queue/worker.rb | SongkickQueue.Worker.process_message | def process_message(consumer_class, delivery_info, properties, message)
message = JSON.parse(message, symbolize_names: true)
message_id = message.fetch(:message_id)
produced_at = message.fetch(:produced_at)
payload = message.fetch(:payload)
logger.info "Processing message #{message_id} via #{consumer_class}, produced at #{produced_at}"
set_process_name(consumer_class, message_id)
consumer = consumer_class.new(delivery_info, logger)
@consumer_instances << consumer
instrumentation_options = {
consumer_class: consumer_class.to_s,
queue_name: consumer_class.queue_name,
message_id: message_id,
produced_at: produced_at,
}
ActiveSupport::Notifications.instrument('consume_message.songkick_queue', instrumentation_options) do
begin
consumer.process(payload)
ensure
@consumer_instances.delete(consumer)
end
end
rescue Object => exception
logger.error(exception)
channel.reject(delivery_info.delivery_tag, config.requeue_rejected_messages)
else
channel.ack(delivery_info.delivery_tag, false)
ensure
set_process_name
end | ruby | def process_message(consumer_class, delivery_info, properties, message)
message = JSON.parse(message, symbolize_names: true)
message_id = message.fetch(:message_id)
produced_at = message.fetch(:produced_at)
payload = message.fetch(:payload)
logger.info "Processing message #{message_id} via #{consumer_class}, produced at #{produced_at}"
set_process_name(consumer_class, message_id)
consumer = consumer_class.new(delivery_info, logger)
@consumer_instances << consumer
instrumentation_options = {
consumer_class: consumer_class.to_s,
queue_name: consumer_class.queue_name,
message_id: message_id,
produced_at: produced_at,
}
ActiveSupport::Notifications.instrument('consume_message.songkick_queue', instrumentation_options) do
begin
consumer.process(payload)
ensure
@consumer_instances.delete(consumer)
end
end
rescue Object => exception
logger.error(exception)
channel.reject(delivery_info.delivery_tag, config.requeue_rejected_messages)
else
channel.ack(delivery_info.delivery_tag, false)
ensure
set_process_name
end | [
"def",
"process_message",
"(",
"consumer_class",
",",
"delivery_info",
",",
"properties",
",",
"message",
")",
"message",
"=",
"JSON",
".",
"parse",
"(",
"message",
",",
"symbolize_names",
":",
"true",
")",
"message_id",
"=",
"message",
".",
"fetch",
"(",
":message_id",
")",
"produced_at",
"=",
"message",
".",
"fetch",
"(",
":produced_at",
")",
"payload",
"=",
"message",
".",
"fetch",
"(",
":payload",
")",
"logger",
".",
"info",
"\"Processing message #{message_id} via #{consumer_class}, produced at #{produced_at}\"",
"set_process_name",
"(",
"consumer_class",
",",
"message_id",
")",
"consumer",
"=",
"consumer_class",
".",
"new",
"(",
"delivery_info",
",",
"logger",
")",
"@consumer_instances",
"<<",
"consumer",
"instrumentation_options",
"=",
"{",
"consumer_class",
":",
"consumer_class",
".",
"to_s",
",",
"queue_name",
":",
"consumer_class",
".",
"queue_name",
",",
"message_id",
":",
"message_id",
",",
"produced_at",
":",
"produced_at",
",",
"}",
"ActiveSupport",
"::",
"Notifications",
".",
"instrument",
"(",
"'consume_message.songkick_queue'",
",",
"instrumentation_options",
")",
"do",
"begin",
"consumer",
".",
"process",
"(",
"payload",
")",
"ensure",
"@consumer_instances",
".",
"delete",
"(",
"consumer",
")",
"end",
"end",
"rescue",
"Object",
"=>",
"exception",
"logger",
".",
"error",
"(",
"exception",
")",
"channel",
".",
"reject",
"(",
"delivery_info",
".",
"delivery_tag",
",",
"config",
".",
"requeue_rejected_messages",
")",
"else",
"channel",
".",
"ack",
"(",
"delivery_info",
".",
"delivery_tag",
",",
"false",
")",
"ensure",
"set_process_name",
"end"
]
| Handle receipt of a subscribed message
@param consumer_class [Class] that was subscribed to
@param delivery_info [Bunny::DeliveryInfo]
@param properties [Bunny::MessageProperties]
@param message [String] to deserialize | [
"Handle",
"receipt",
"of",
"a",
"subscribed",
"message"
]
| 856c5ecaf40259526e01c472e1c13fa6d00d5ff0 | https://github.com/songkick/queue/blob/856c5ecaf40259526e01c472e1c13fa6d00d5ff0/lib/songkick_queue/worker.rb#L86-L119 | train |
songkick/queue | lib/songkick_queue/worker.rb | SongkickQueue.Worker.set_process_name | def set_process_name(status = 'idle', message_id = nil)
formatted_status = String(status)
.split('::')
.last
ident = [formatted_status, message_id]
.compact
.join('#')
$PROGRAM_NAME = "#{process_name}[#{ident}]"
end | ruby | def set_process_name(status = 'idle', message_id = nil)
formatted_status = String(status)
.split('::')
.last
ident = [formatted_status, message_id]
.compact
.join('#')
$PROGRAM_NAME = "#{process_name}[#{ident}]"
end | [
"def",
"set_process_name",
"(",
"status",
"=",
"'idle'",
",",
"message_id",
"=",
"nil",
")",
"formatted_status",
"=",
"String",
"(",
"status",
")",
".",
"split",
"(",
"'::'",
")",
".",
"last",
"ident",
"=",
"[",
"formatted_status",
",",
"message_id",
"]",
".",
"compact",
".",
"join",
"(",
"'#'",
")",
"$PROGRAM_NAME",
"=",
"\"#{process_name}[#{ident}]\"",
"end"
]
| Update the name of this process, as viewed in `ps` or `top`
@example idle
set_process_name #=> "songkick_queue[idle]"
@example consumer running, namespace is removed
set_process_name(Foo::TweetConsumer, 'a729bcd8') #=> "songkick_queue[TweetConsumer#a729bcd8]"
@param status [String] of the program
@param message_id [String] identifying the message currently being consumed | [
"Update",
"the",
"name",
"of",
"this",
"process",
"as",
"viewed",
"in",
"ps",
"or",
"top"
]
| 856c5ecaf40259526e01c472e1c13fa6d00d5ff0 | https://github.com/songkick/queue/blob/856c5ecaf40259526e01c472e1c13fa6d00d5ff0/lib/songkick_queue/worker.rb#L141-L151 | train |
songkick/queue | lib/songkick_queue/producer.rb | SongkickQueue.Producer.publish | def publish(queue_name, payload, options = {})
message_id = options.fetch(:message_id) { SecureRandom.hex(6) }
produced_at = options.fetch(:produced_at) { Time.now.utc.iso8601 }
message = {
message_id: message_id,
produced_at: produced_at,
payload: payload
}
message = JSON.generate(message)
exchange = client.default_exchange
instrumentation_options = {
queue_name: String(queue_name),
message_id: message_id,
produced_at: produced_at,
}
ActiveSupport::Notifications.instrument('produce_message.songkick_queue', instrumentation_options) do
exchange.publish(message, routing_key: String(queue_name))
end
self.reconnect_attempts = 0
logger.info "Published message #{message_id} to '#{queue_name}' at #{produced_at}"
exchange
rescue Bunny::ConnectionClosedError
self.reconnect_attempts += 1
if (reconnect_attempts > config.max_reconnect_attempts)
fail TooManyReconnectAttemptsError, "Attempted to reconnect more than " +
"#{config.max_reconnect_attempts} times"
end
logger.info "Attempting to reconnect to RabbitMQ, attempt #{reconnect_attempts} " +
"of #{config.max_reconnect_attempts}"
wait_for_bunny_session_to_reconnect
retry
end | ruby | def publish(queue_name, payload, options = {})
message_id = options.fetch(:message_id) { SecureRandom.hex(6) }
produced_at = options.fetch(:produced_at) { Time.now.utc.iso8601 }
message = {
message_id: message_id,
produced_at: produced_at,
payload: payload
}
message = JSON.generate(message)
exchange = client.default_exchange
instrumentation_options = {
queue_name: String(queue_name),
message_id: message_id,
produced_at: produced_at,
}
ActiveSupport::Notifications.instrument('produce_message.songkick_queue', instrumentation_options) do
exchange.publish(message, routing_key: String(queue_name))
end
self.reconnect_attempts = 0
logger.info "Published message #{message_id} to '#{queue_name}' at #{produced_at}"
exchange
rescue Bunny::ConnectionClosedError
self.reconnect_attempts += 1
if (reconnect_attempts > config.max_reconnect_attempts)
fail TooManyReconnectAttemptsError, "Attempted to reconnect more than " +
"#{config.max_reconnect_attempts} times"
end
logger.info "Attempting to reconnect to RabbitMQ, attempt #{reconnect_attempts} " +
"of #{config.max_reconnect_attempts}"
wait_for_bunny_session_to_reconnect
retry
end | [
"def",
"publish",
"(",
"queue_name",
",",
"payload",
",",
"options",
"=",
"{",
"}",
")",
"message_id",
"=",
"options",
".",
"fetch",
"(",
":message_id",
")",
"{",
"SecureRandom",
".",
"hex",
"(",
"6",
")",
"}",
"produced_at",
"=",
"options",
".",
"fetch",
"(",
":produced_at",
")",
"{",
"Time",
".",
"now",
".",
"utc",
".",
"iso8601",
"}",
"message",
"=",
"{",
"message_id",
":",
"message_id",
",",
"produced_at",
":",
"produced_at",
",",
"payload",
":",
"payload",
"}",
"message",
"=",
"JSON",
".",
"generate",
"(",
"message",
")",
"exchange",
"=",
"client",
".",
"default_exchange",
"instrumentation_options",
"=",
"{",
"queue_name",
":",
"String",
"(",
"queue_name",
")",
",",
"message_id",
":",
"message_id",
",",
"produced_at",
":",
"produced_at",
",",
"}",
"ActiveSupport",
"::",
"Notifications",
".",
"instrument",
"(",
"'produce_message.songkick_queue'",
",",
"instrumentation_options",
")",
"do",
"exchange",
".",
"publish",
"(",
"message",
",",
"routing_key",
":",
"String",
"(",
"queue_name",
")",
")",
"end",
"self",
".",
"reconnect_attempts",
"=",
"0",
"logger",
".",
"info",
"\"Published message #{message_id} to '#{queue_name}' at #{produced_at}\"",
"exchange",
"rescue",
"Bunny",
"::",
"ConnectionClosedError",
"self",
".",
"reconnect_attempts",
"+=",
"1",
"if",
"(",
"reconnect_attempts",
">",
"config",
".",
"max_reconnect_attempts",
")",
"fail",
"TooManyReconnectAttemptsError",
",",
"\"Attempted to reconnect more than \"",
"+",
"\"#{config.max_reconnect_attempts} times\"",
"end",
"logger",
".",
"info",
"\"Attempting to reconnect to RabbitMQ, attempt #{reconnect_attempts} \"",
"+",
"\"of #{config.max_reconnect_attempts}\"",
"wait_for_bunny_session_to_reconnect",
"retry",
"end"
]
| Serializes the given message and publishes it to the default RabbitMQ exchange
@param queue_name [String] to publish to
@param message [#to_json] to serialize and enqueue
@option options [String] :message_id to pass through to the consumer (will be logged)
@option options [String] :produced_at time when the message was created, ISO8601 formatted
@raise [TooManyReconnectAttemptsError] if max reconnect attempts is exceeded
@return [Bunny::Exchange] | [
"Serializes",
"the",
"given",
"message",
"and",
"publishes",
"it",
"to",
"the",
"default",
"RabbitMQ",
"exchange"
]
| 856c5ecaf40259526e01c472e1c13fa6d00d5ff0 | https://github.com/songkick/queue/blob/856c5ecaf40259526e01c472e1c13fa6d00d5ff0/lib/songkick_queue/producer.rb#L21-L63 | train |
guard/rb-inotify | lib/rb-inotify/watcher.rb | INotify.Watcher.close | def close
if Native.inotify_rm_watch(@notifier.fd, @id) == 0
@notifier.watchers.delete(@id)
return
end
raise SystemCallError.new("Failed to stop watching #{path.inspect}",
FFI.errno)
end | ruby | def close
if Native.inotify_rm_watch(@notifier.fd, @id) == 0
@notifier.watchers.delete(@id)
return
end
raise SystemCallError.new("Failed to stop watching #{path.inspect}",
FFI.errno)
end | [
"def",
"close",
"if",
"Native",
".",
"inotify_rm_watch",
"(",
"@notifier",
".",
"fd",
",",
"@id",
")",
"==",
"0",
"@notifier",
".",
"watchers",
".",
"delete",
"(",
"@id",
")",
"return",
"end",
"raise",
"SystemCallError",
".",
"new",
"(",
"\"Failed to stop watching #{path.inspect}\"",
",",
"FFI",
".",
"errno",
")",
"end"
]
| Disables this Watcher, so that it doesn't fire any more events.
@raise [SystemCallError] if the watch fails to be disabled for some reason | [
"Disables",
"this",
"Watcher",
"so",
"that",
"it",
"doesn",
"t",
"fire",
"any",
"more",
"events",
"."
]
| 3fae270905c7bd259e69fe420dbdcd59bb47fa3d | https://github.com/guard/rb-inotify/blob/3fae270905c7bd259e69fe420dbdcd59bb47fa3d/lib/rb-inotify/watcher.rb#L47-L55 | train |
guard/rb-inotify | lib/rb-inotify/event.rb | INotify.Event.absolute_name | def absolute_name
return watcher.path if name.empty?
return File.join(watcher.path, name)
end | ruby | def absolute_name
return watcher.path if name.empty?
return File.join(watcher.path, name)
end | [
"def",
"absolute_name",
"return",
"watcher",
".",
"path",
"if",
"name",
".",
"empty?",
"return",
"File",
".",
"join",
"(",
"watcher",
".",
"path",
",",
"name",
")",
"end"
]
| The absolute path of the file that the event occurred on.
This is actually only as absolute as the path passed to the {Watcher}
that created this event.
However, it is relative to the working directory,
assuming that hasn't changed since the watcher started.
@return [String] | [
"The",
"absolute",
"path",
"of",
"the",
"file",
"that",
"the",
"event",
"occurred",
"on",
"."
]
| 3fae270905c7bd259e69fe420dbdcd59bb47fa3d | https://github.com/guard/rb-inotify/blob/3fae270905c7bd259e69fe420dbdcd59bb47fa3d/lib/rb-inotify/event.rb#L66-L69 | train |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.