_id
stringlengths 2
6
| title
stringlengths 9
130
| partition
stringclasses 3
values | text
stringlengths 66
10.5k
| language
stringclasses 1
value | meta_information
dict |
---|---|---|---|---|---|
q3900
|
RightScale.ExecutableSequence.report_failure
|
train
|
def report_failure(title, msg)
@ok = false
@failure_title = title
@failure_message = msg
# note that the errback handler is expected to audit the message based on
# the preserved title and message and so we don't audit it here.
EM.next_tick { fail }
true
end
|
ruby
|
{
"resource": ""
}
|
q3901
|
RightScale.ExecutableSequence.chef_error
|
train
|
def chef_error(e)
if e.is_a?(::RightScale::Exceptions::Exec)
msg = "External command error: "
if match = /RightScale::Exceptions::Exec: (.*)/.match(e.message)
cmd_output = match[1]
else
cmd_output = e.message
end
msg += cmd_output
msg += "\nThe command was run from \"#{e.path}\"" if e.path
elsif e.is_a?(::Chef::Exceptions::ValidationFailed) && (e.message =~ /Option action must be equal to one of:/)
msg = "[chef] recipe references an action that does not exist. #{e.message}"
elsif e.is_a?(::NoMethodError) && (missing_action_match = /undefined method .action_(\S*)' for #<\S*:\S*>/.match(e.message)) && missing_action_match[1]
msg = "[chef] recipe references the action <#{missing_action_match[1]}> which is missing an implementation"
else
msg = "Execution error:\n"
msg += e.message
file, line, meth = e.backtrace[0].scan(BACKTRACE_LINE_REGEXP).flatten
line_number = line.to_i
if file && line && (line_number.to_s == line)
dir = AgentConfig.cookbook_download_dir
if file[0..dir.size - 1] == dir
path = "[COOKBOOKS]/" + file[dir.size..file.size]
else
path = file
end
msg += "\n\nThe error occurred line #{line} of #{path}"
msg += " in method '#{meth}'" if meth
context = ""
if File.readable?(file)
File.open(file, 'r') do |f|
lines = f.readlines
lines_count = lines.size
if lines_count >= line_number
upper = [lines_count, line_number + 2].max
padding = upper.to_s.size
context += context_line(lines, line_number - 2, padding)
context += context_line(lines, line_number - 1, padding)
context += context_line(lines, line_number, padding, '*')
context += context_line(lines, line_number + 1, padding)
context += context_line(lines, line_number + 2, padding)
end
end
end
msg += " while executing:\n\n#{context}" unless context.empty?
end
end
msg
end
|
ruby
|
{
"resource": ""
}
|
q3902
|
RightScale.ExecutableSequence.context_line
|
train
|
def context_line(lines, index, padding, prefix=nil)
return '' if index < 1 || index > lines.size
margin = prefix ? prefix * index.to_s.size : index.to_s
"#{margin}#{' ' * ([padding - margin.size, 0].max)} #{lines[index - 1]}"
end
|
ruby
|
{
"resource": ""
}
|
q3903
|
RightScale.ExecutableSequence.retry_execution
|
train
|
def retry_execution(retry_message, times = AgentConfig.max_packages_install_retries)
count = 0
success = false
begin
count += 1
success = yield
@audit.append_info("\n#{retry_message}\n") unless success || count > times
end while !success && count <= times
success
end
|
ruby
|
{
"resource": ""
}
|
q3904
|
JCR.JcrParts.get_start
|
train
|
def get_start( line )
retval = nil
m = /^\s*;\s*start_part\s*(.+)[^\s]*/.match( line )
if m && m[1]
retval = m[1]
end
return retval
end
|
ruby
|
{
"resource": ""
}
|
q3905
|
JCR.JcrParts.get_all
|
train
|
def get_all( line )
retval = nil
m = /^\s*;\s*all_parts\s*(.+)[^\s]*/.match( line )
if m && m[1]
retval = m[1]
end
return retval
end
|
ruby
|
{
"resource": ""
}
|
q3906
|
JCR.JcrParts.get_end
|
train
|
def get_end( line )
retval = nil
m = /^\s*;\s*end_part/.match( line )
if m
retval = true
end
return retval
end
|
ruby
|
{
"resource": ""
}
|
q3907
|
JCR.JcrParts.process_ruleset
|
train
|
def process_ruleset( ruleset, dirname = nil )
all_file_names = []
all_parts = []
all_parts_name = nil
current_part = nil
current_part_name = nil
ruleset.lines do |line|
if !all_parts_name && ( all_parts_name = get_all( line ) )
all_parts_name = File.join( dirname, all_parts_name ) if dirname
all_file_names << all_parts_name
elsif ( current_part_name = get_start( line ) )
current_part_name = File.join( dirname, current_part_name ) if dirname
if current_part
current_part.close
end
current_part = File.open( current_part_name, "w" )
all_file_names << current_part_name
elsif get_end( line ) && current_part
current_part.close
current_part = nil
elsif current_part
current_part.puts line
all_parts << line
else
all_parts << line
end
end
if current_part
current_part.close
end
if all_parts_name
f = File.open( all_parts_name, "w" )
all_parts.each do |line|
f.puts( line )
end
f.close
end
if all_file_names.length
xml_fn = File.basename( all_file_names[0],".*" ) + "_xml_entity_refs"
xml_fn = File.join( File.dirname( all_file_names[0] ), xml_fn )
xml = File.open( xml_fn, "w" )
all_file_names.each do |fn|
bn = File.basename( fn, ".*" )
xml.puts( "<!ENTITY #{bn} PUBLIC '' '#{fn}'>")
end
xml.close
end
end
|
ruby
|
{
"resource": ""
}
|
q3908
|
SadPanda.Polarity.call
|
train
|
def call
words = stems_for(remove_stopwords_in(@words))
score_polarities_for(frequencies_for(words))
polarities.empty? ? 5.0 : (polarities.inject(0){ |sum, polarity| sum + polarity } / polarities.length)
end
|
ruby
|
{
"resource": ""
}
|
q3909
|
SadPanda.Polarity.score_emoticon_polarity
|
train
|
def score_emoticon_polarity
happy = happy_emoticon?(words)
sad = sad_emoticon?(words)
polarities << 5.0 if happy && sad
polarities << 8.0 if happy
polarities << 2.0 if sad
end
|
ruby
|
{
"resource": ""
}
|
q3910
|
SadPanda.Polarity.score_polarities_for
|
train
|
def score_polarities_for(word_frequencies)
word_frequencies.each do |word, frequency|
polarity = SadPanda::Bank::POLARITIES[word.to_sym]
polarities << (polarity * frequency.to_f) if polarity
end
score_emoticon_polarity
end
|
ruby
|
{
"resource": ""
}
|
q3911
|
RightScale.CentosNetworkConfigurator.network_route_add
|
train
|
def network_route_add(network, nat_server_ip)
super
route_str = "#{network} via #{nat_server_ip}"
begin
if @boot
logger.info "Adding route to network #{route_str}"
device = route_device(network, nat_server_ip)
if device
update_route_file(network, nat_server_ip, device)
else
logger.warn "Unable to find associated device for #{route_str} in pre-networking section. As network devices aren't setup yet, will try again after network start."
end
else
if network_route_exists?(network, nat_server_ip)
logger.debug "Route already exists to #{route_str}"
else
logger.info "Adding route to network #{route_str}"
runshell("ip route add #{route_str}")
device = route_device(network, nat_server_ip)
if device
update_route_file(network, nat_server_ip, device)
else
logger.error "Unable to set route in system config: unable to find associated device for #{route_str} post-networking."
# No need to raise here -- ip route should have failed above if there is no device to attach to
end
end
end
rescue Exception => e
logger.error "Unable to set a route #{route_str}. Check network settings."
# XXX: for some reason network_route_exists? allowing mutple routes
# to be set. For now, don't fail if route already exists.
throw e unless e.message.include?("NETLINK answers: File exists")
end
true
end
|
ruby
|
{
"resource": ""
}
|
q3912
|
RightScale.CentosNetworkConfigurator.update_route_file
|
train
|
def update_route_file(network, nat_server_ip, device)
raise "ERROR: invalid nat_server_ip : '#{nat_server_ip}'" unless valid_ipv4?(nat_server_ip)
raise "ERROR: invalid CIDR network : '#{network}'" unless valid_ipv4_cidr?(network)
routes_file = routes_file(device)
ip_route_cmd = ip_route_cmd(network, nat_server_ip)
update_config_file(
routes_file,
ip_route_cmd,
"Route to #{ip_route_cmd} already exists in #{routes_file}",
"Appending #{ip_route_cmd} route to #{routes_file}"
)
true
end
|
ruby
|
{
"resource": ""
}
|
q3913
|
RightScale.CentosNetworkConfigurator.write_adaptor_config
|
train
|
def write_adaptor_config(device, data)
config_file = config_file(device)
raise "FATAL: invalid device name of '#{device}' specified for static IP allocation" unless device.match(/eth[0-9+]/)
logger.info "Writing persistent network configuration to #{config_file}"
File.open(config_file, "w") { |f| f.write(data) }
end
|
ruby
|
{
"resource": ""
}
|
q3914
|
RightScale.CentosNetworkConfigurator.update_config_file
|
train
|
def update_config_file(filename, line, exists_str=nil, append_str=nil)
FileUtils.mkdir_p(File.dirname(filename)) # make sure the directory exists
if read_config_file(filename).include?(line)
exists_str ||= "Config already exists in #{filename}"
logger.info exists_str
else
append_str ||= "Appending config to #{filename}"
logger.info append_str
append_config_file(filename, line)
end
true
end
|
ruby
|
{
"resource": ""
}
|
q3915
|
RightScale.CentosNetworkConfigurator.read_config_file
|
train
|
def read_config_file(filename)
contents = ""
File.open(filename, "r") { |f| contents = f.read() } if File.exists?(filename)
contents
end
|
ruby
|
{
"resource": ""
}
|
q3916
|
RightScale.CentosNetworkConfigurator.append_config_file
|
train
|
def append_config_file(filename, line)
File.open(filename, "a") { |f| f.puts(line) }
end
|
ruby
|
{
"resource": ""
}
|
q3917
|
RightScale.Cook.run
|
train
|
def run
# 1. Load configuration settings
options = OptionsBag.load
agent_id = options[:identity]
AgentConfig.root_dir = options[:root_dir]
Log.program_name = 'RightLink'
Log.facility = 'user'
Log.log_to_file_only(options[:log_to_file_only])
Log.init(agent_id, options[:log_path])
Log.level = CookState.log_level
# add an additional logger if the agent is set to log to an alternate
# location (install, operate, decommission, ...)
Log.add_logger(::Logger.new(CookState.log_file)) if CookState.log_file
Log.info("[cook] Process starting up with dev tags: [#{CookState.startup_tags.select { |tag| tag.include?(CookState::DEV_TAG_NAMESPACE)}.join(', ')}]")
fail('Missing command server listen port') unless options[:listen_port]
fail('Missing command cookie') unless options[:cookie]
@client = CommandClient.new(options[:listen_port], options[:cookie])
ShutdownRequestProxy.init(@client)
# 2. Retrieve bundle
input = gets.chomp
begin
platform = RightScale::Platform
if platform.windows?
bundle = MessageEncoder::SecretSerializer.new(InstanceState.identity, ENV[ExecutableSequenceProxy::DECRYPTION_KEY_NAME]).load(input)
else
bundle = MessageEncoder::Serializer.new.load(input)
end
rescue Exception => e
fail('Invalid bundle', e.message)
end
fail('Missing bundle', 'No bundle to run') if bundle.nil?
@thread_name = bundle.runlist_policy.thread_name if bundle.respond_to?(:runlist_policy) && bundle.runlist_policy
@thread_name ||= RightScale::AgentConfig.default_thread_name
options[:thread_name] = @thread_name
# Chef state needs the server secret so it can encrypt state on disk.
# The secret is the same for all instances of the server (i.e. is still
# valid after stop and restart server).
server_secret = bundle.server_secret || AgentConfig.default_server_secret
ChefState.init(agent_id, server_secret, reset=false)
# 3. Run bundle
@@instance = self
success = nil
Log.debug("[cook] Thread name associated with bundle = #{@thread_name}")
gatherer = ExternalParameterGatherer.new(bundle, options)
sequence = ExecutableSequence.new(bundle)
EM.threadpool_size = 1
EM.error_handler do |e|
Log.error("Execution failed", e, :trace)
fail('Exception caught', "The following exception was caught during execution:\n #{e.message}")
end
EM.run do
begin
AuditStub.instance.init(options)
check_for_missing_inputs(bundle)
gatherer.callback { EM.defer { sequence.run } }
gatherer.errback { success = false; report_failure(gatherer) }
sequence.callback { success = true; send_inputs_patch(sequence) }
sequence.errback { success = false; report_failure(sequence) }
EM.defer { gatherer.run }
rescue Exception => e
fail('Execution failed', Log.format("Execution failed", e, :trace))
end
end
rescue Exception => e
fail('Execution failed', Log.format("Run failed", e, :trace))
ensure
Log.info("[cook] Process stopping")
exit(1) unless success
end
|
ruby
|
{
"resource": ""
}
|
q3918
|
RightScale.Cook.send_push
|
train
|
def send_push(type, payload = nil, target = nil, opts = {})
cmd = {:name => :send_push, :type => type, :payload => payload, :target => target, :options => opts}
# Need to execute on EM main thread where command client is running
EM.next_tick { @client.send_command(cmd) }
end
|
ruby
|
{
"resource": ""
}
|
q3919
|
RightScale.Cook.load_tags
|
train
|
def load_tags(timeout)
cmd = { :name => :get_tags }
res = blocking_request(cmd, timeout)
raise TagError.new("Retrieving current tags failed: #{res.inspect}") unless res.kind_of?(Array)
::Chef::Log.info("Successfully loaded current tags: '#{res.join("', '")}'")
res
end
|
ruby
|
{
"resource": ""
}
|
q3920
|
RightScale.Cook.send_inputs_patch
|
train
|
def send_inputs_patch(sequence)
if has_default_thread?
begin
cmd = { :name => :set_inputs_patch, :patch => sequence.inputs_patch }
@client.send_command(cmd)
rescue Exception => e
fail('Failed to update inputs', Log.format("Failed to apply inputs patch after execution", e, :trace))
end
end
true
ensure
stop
end
|
ruby
|
{
"resource": ""
}
|
q3921
|
RightScale.Cook.report_failure
|
train
|
def report_failure(subject)
begin
AuditStub.instance.append_error(subject.failure_title, :category => RightScale::EventCategories::CATEGORY_ERROR) if subject.failure_title
AuditStub.instance.append_error(subject.failure_message) if subject.failure_message
rescue Exception => e
fail('Failed to report failure', Log.format("Failed to report failure after execution", e, :trace))
ensure
stop
end
end
|
ruby
|
{
"resource": ""
}
|
q3922
|
RightScale.Cook.fail
|
train
|
def fail(title, message=nil)
$stderr.puts title
$stderr.puts message || title
if @client
@client.stop { AuditStub.instance.stop { exit(1) } }
else
exit(1)
end
end
|
ruby
|
{
"resource": ""
}
|
q3923
|
RightScale.Cook.stop
|
train
|
def stop
AuditStub.instance.stop do
@client.stop do |timeout|
Log.info('[cook] Failed to stop command client cleanly, forcing shutdown...') if timeout
EM.stop
end
end
end
|
ruby
|
{
"resource": ""
}
|
q3924
|
RightScale.Cook.blocking_request
|
train
|
def blocking_request(cmd, timeout)
raise BlockingError, "Blocking request not allowed on EM main thread for command #{cmd.inspect}" if EM.reactor_thread?
# Use a queue to block and wait for response
response_queue = Queue.new
# Need to execute on EM main thread where command client is running
EM.next_tick { @client.send_command(cmd, false, timeout) { |response| response_queue << response } }
return response_queue.shift
end
|
ruby
|
{
"resource": ""
}
|
q3925
|
RightScale.Cook.load
|
train
|
def load(data, error_message, format = nil)
serializer = Serializer.new(format)
content = nil
begin
content = serializer.load(data)
rescue Exception => e
fail(error_message, "Failed to load #{serializer.format.to_s} data (#{e}):\n#{data.inspect}")
end
content
end
|
ruby
|
{
"resource": ""
}
|
q3926
|
Verse.Wrapping.wrap
|
train
|
def wrap(wrap_at = DEFAULT_WIDTH)
if text.length < wrap_at.to_i || wrap_at.to_i.zero?
return text
end
ansi_stack = []
text.split(NEWLINE, -1).map do |paragraph|
format_paragraph(paragraph, wrap_at, ansi_stack)
end * NEWLINE
end
|
ruby
|
{
"resource": ""
}
|
q3927
|
RTM.Endpoint.url_for
|
train
|
def url_for(method,params={},endpoint='rest')
params['api_key'] = @api_key
params['method'] = method if method
signature = sign(params)
url = BASE_URL + endpoint + '/' + params_to_url(params.merge({'api_sig' => signature}))
url
end
|
ruby
|
{
"resource": ""
}
|
q3928
|
RTM.Endpoint.params_to_url
|
train
|
def params_to_url(params)
string = '?'
params.each do |k,v|
string += CGI::escape(k)
string += '='
string += CGI::escape(v)
string += '&'
end
string
end
|
ruby
|
{
"resource": ""
}
|
q3929
|
RTM.Endpoint.sign
|
train
|
def sign(params)
raise "Something's wrong; @secret is nil" if @secret.nil?
sign_me = @secret
params.keys.sort.each do |key|
sign_me += key
raise "Omit params with nil values; key #{key} was nil" if params[key].nil?
sign_me += params[key]
end
return Digest::MD5.hexdigest(sign_me)
end
|
ruby
|
{
"resource": ""
}
|
q3930
|
Ralexa.TopSites.country
|
train
|
def country(code, limit, params = {})
paginating_collection(
limit,
PER_PAGE,
{"ResponseGroup" => "Country", "CountryCode" => code.to_s.upcase},
params,
&top_sites_parser
)
end
|
ruby
|
{
"resource": ""
}
|
q3931
|
Ralexa.TopSites.list_countries
|
train
|
def list_countries(params = {})
collection({"ResponseGroup" => "ListCountries"}, params) do |document|
path = "//TopSitesResult/Alexa/TopSites/Countries"
document.at(path).elements.map do |node|
Country.new(
node.at("Name").text,
node.at("Code").text,
node.at("TotalSites").text.to_i,
node.at("PageViews").text.to_f * 1_000_000,
node.at("Users").text.to_f * 1_000_000,
)
end
end
end
|
ruby
|
{
"resource": ""
}
|
q3932
|
Taskrabbit.Smash.reload
|
train
|
def reload(method, path, options = {})
self.loaded = true
response = request(method, path, self.class, Smash::filtered_options(options))
self.merge!(response)
clear_errors
!redirect?
rescue Smash::Error => e
self.merge!(e.response) if e.response.is_a?(Hash)
false
end
|
ruby
|
{
"resource": ""
}
|
q3933
|
Taskrabbit.Smash.[]
|
train
|
def [](property)
value = nil
return value unless (value = super(property)).nil?
if api and !loaded
# load the object if trying to access a property
self.loaded = true
fetch
end
super(property)
end
|
ruby
|
{
"resource": ""
}
|
q3934
|
KnifeTopo.BootstrapHelper.run_bootstrap
|
train
|
def run_bootstrap(data, bootstrap_args, overwrite = false)
node_name = data['name']
args = setup_bootstrap_args(bootstrap_args, data)
delete_client_node(node_name) if overwrite
ui.info "Bootstrapping node #{node_name}"
run_cmd(Chef::Knife::Bootstrap, args)
rescue StandardError => e
raise if Chef::Config[:verbosity] == 2
ui.warn "bootstrap of node #{node_name} exited with error"
humanize_exception(e)
false
end
|
ruby
|
{
"resource": ""
}
|
q3935
|
TrueVault.Authorization.login
|
train
|
def login(options = {})
body = {
body: {
username: options[:username],
password: options[:password],
account_id: options[:account_id]
}
}
self.class.post("/#{@api_ver}/auth/login", body)
end
|
ruby
|
{
"resource": ""
}
|
q3936
|
Plum.Frame.flags
|
train
|
def flags
fs = FRAME_FLAGS[type]
[0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80]
.select { |v| @flags_value & v > 0 }
.map { |val| fs && fs.key(val) || ("unknown_%02x" % val).to_sym }
end
|
ruby
|
{
"resource": ""
}
|
q3937
|
Plum.Frame.flags=
|
train
|
def flags=(values)
val = 0
FRAME_FLAGS_MAP.values_at(*values).each { |c|
val |= c if c
}
@flags_value = val
end
|
ruby
|
{
"resource": ""
}
|
q3938
|
Plum.Connection.receive
|
train
|
def receive(new_data)
return if @state == :closed
return if new_data.empty?
@buffer << new_data
consume_buffer
rescue RemoteConnectionError => e
callback(:connection_error, e)
goaway(e.http2_error_type)
close
end
|
ruby
|
{
"resource": ""
}
|
q3939
|
Plum.Connection.stream
|
train
|
def stream(stream_id, update_max_id = true)
raise ArgumentError, "stream_id can't be 0" if stream_id == 0
stream = @streams[stream_id]
if stream
if stream.state == :idle && stream_id < @max_stream_ids[stream_id % 2]
stream.set_state(:closed_implicitly)
end
elsif stream_id > @max_stream_ids[stream_id % 2]
@max_stream_ids[stream_id % 2] = stream_id if update_max_id
stream = Stream.new(self, stream_id, state: :idle)
callback(:stream, stream)
@streams[stream_id] = stream
else
stream = Stream.new(self, stream_id, state: :closed_implicitly)
callback(:stream, stream)
end
stream
end
|
ruby
|
{
"resource": ""
}
|
q3940
|
Plum.Connection.settings
|
train
|
def settings(**new_settings)
send_immediately Frame::Settings.new(**new_settings)
old_settings = @local_settings.dup
@local_settings.merge!(new_settings)
@hpack_decoder.limit = @local_settings[:header_table_size]
update_recv_initial_window_size(@local_settings[:initial_window_size] - old_settings[:initial_window_size])
end
|
ruby
|
{
"resource": ""
}
|
q3941
|
Plum.Connection.goaway
|
train
|
def goaway(error_type = :no_error, message = "")
last_id = @max_stream_ids.max
send_immediately Frame::Goaway.new(last_id, error_type, message)
end
|
ruby
|
{
"resource": ""
}
|
q3942
|
Transitionable.ClassMethods.transition
|
train
|
def transition(name, states = self::STATES, transitions = self::TRANSITIONS)
self.state_machines ||= {}
self.state_machines[name] = { states: states.values, transitions: transitions }
self.state_machines[name][:states].each do |this_state|
method_name = "#{this_state}?".to_sym
raise 'Method already defined' if self.instance_methods(false).include?(method_name)
define_method method_name do
current_state_based_on(this_state) == this_state
end
end
end
|
ruby
|
{
"resource": ""
}
|
q3943
|
Itcsscli.Core.inuit_find_modules
|
train
|
def inuit_find_modules(current_module)
current_config = YAML.load_file(@ITCSS_CONFIG_FILE)
current_inuit_modules = current_config["inuit_modules"].select{ |p| p.include? current_module }
current_inuit_modules.map{ |p| inuit_imports_path p }
end
|
ruby
|
{
"resource": ""
}
|
q3944
|
Syntaxer.Writer.block
|
train
|
def block name, param = nil, &b
sp = ' '*2 if name == :lang || name == :languages
body = yield self if block_given?
param = ":#{param.to_s}" unless param.nil?
"#{sp}#{name.to_s} #{param} do\n#{body}\n#{sp}end\n"
end
|
ruby
|
{
"resource": ""
}
|
q3945
|
Syntaxer.Writer.property
|
train
|
def property name, prop
return '' if EXCLUDE_PROPERTIES.include?(name.to_s) || prop.nil? || (prop.kind_of?(Array) && prop.empty?)
prop = prop.flatten.map{|p| "'#{p}'"}.join(', ') if prop.respond_to?(:flatten) && name.to_sym != :folders
prop = @paths.map{|f| "'#{f}'"}.join(',') if name.to_sym == :folders
prop = "'#{prop.exec_rule}'" if prop.instance_of?(Syntaxer::Runner::ExecRule) && !prop.exec_rule.nil?
prop = "Syntaxer::Runner.#{prop.language}" if prop.instance_of?(Syntaxer::Runner::ExecRule) && prop.exec_rule.nil?
' '*4 + "#{name.to_s} #{prop}\n"
end
|
ruby
|
{
"resource": ""
}
|
q3946
|
ScheduledJob.ScheduledJobClassMethods.schedule_job
|
train
|
def schedule_job(job = nil)
if can_schedule_job?(job)
callback = ScheduledJob.config.fast_mode
in_fast_mode = callback ? callback.call(self) : false
run_at = in_fast_mode ? Time.now.utc + 1 : time_to_recur(Time.now.utc)
Delayed::Job.enqueue(new, :run_at => run_at, :queue => queue_name)
end
end
|
ruby
|
{
"resource": ""
}
|
q3947
|
SitePrismPlus.Page.load_and_verify
|
train
|
def load_and_verify(verify_element, url_hash = nil)
result = true
@metrics.start_time
if url_hash.nil?
load
else
load(url_hash)
end
if verify_element
result = wait_till_element_visible(verify_element, 3)
end
@metrics.log_metric(@page_name, 'load', verify_element)
result
end
|
ruby
|
{
"resource": ""
}
|
q3948
|
AddressBook.Person.load_ab_person
|
train
|
def load_ab_person
@attributes ||= {}
Person.single_value_property_map.each do |ab_property, attr_key|
if attributes[attr_key]
set_field(ab_property, attributes[attr_key])
else
remove_field(ab_property)
end
end
if attributes[:is_org]
set_field(KABPersonKindProperty, KABPersonKindOrganization)
else
set_field(KABPersonKindProperty, KABPersonKindPerson)
end
Person.multi_value_property_map.each do |ab_property, attr_key|
if attributes[attr_key]
set_multi_valued(ab_property, attributes[attr_key])
else
remove_field(ab_property)
end
end
ab_person
end
|
ruby
|
{
"resource": ""
}
|
q3949
|
Rufus::RTM.Task.tags=
|
train
|
def tags= (tags)
tags = tags.split(',') if tags.is_a?(String)
@tags = TagArray.new(list_id, tags)
queue_operation('setTasks', tags.join(','))
end
|
ruby
|
{
"resource": ""
}
|
q3950
|
Plum.Response.on_chunk
|
train
|
def on_chunk(&block)
raise "Body already read" if @on_chunk
raise ArgumentError, "block must be given" unless block_given?
@on_chunk = block
unless @body.empty?
@body.each(&block)
@body.clear
end
self
end
|
ruby
|
{
"resource": ""
}
|
q3951
|
Bicho.Client.create_bug
|
train
|
def create_bug(product, component, summary, version, **kwargs)
params = {}
params = params.merge(kwargs)
params[:product] = product
params[:component] = component
params[:summary] = summary
params[:version] = version
ret = @client.call('Bug.create', params)
handle_faults(ret)
ret['id']
end
|
ruby
|
{
"resource": ""
}
|
q3952
|
Bicho.Client.search_bugs
|
train
|
def search_bugs(query)
# allow plain strings to be passed, interpretting them
query = Query.new.summary(query) if query.is_a?(String)
ret = @client.call('Bug.search', query.query_map)
handle_faults(ret)
bugs = []
ret['bugs'].each do |bug_data|
bugs << Bug.new(self, bug_data)
end
bugs
end
|
ruby
|
{
"resource": ""
}
|
q3953
|
Bicho.Client.expand_named_query
|
train
|
def expand_named_query(what)
url = @api_url.clone
url.path = '/buglist.cgi'
url.query = "cmdtype=runnamed&namedcmd=#{URI.escape(what)}&ctype=atom"
logger.info("Expanding named query: '#{what}' to #{url.request_uri}")
fetch_named_query_url(url, 5)
end
|
ruby
|
{
"resource": ""
}
|
q3954
|
Bicho.Client.fetch_named_query_url
|
train
|
def fetch_named_query_url(url, redirects_left)
raise 'You need to be authenticated to use named queries' unless @userid
http = Net::HTTP.new(@api_url.host, @api_url.port)
http.set_debug_output(Bicho::LoggerIODevice.new)
http.verify_mode = OpenSSL::SSL::VERIFY_NONE
http.use_ssl = (@api_url.scheme == 'https')
# request = Net::HTTP::Get.new(url.request_uri, {'Cookie' => self.cookie})
request = Net::HTTP::Get.new(url.request_uri)
request.basic_auth @api_url.user, @api_url.password
response = http.request(request)
case response
when Net::HTTPSuccess
bugs = []
begin
xml = Nokogiri::XML.parse(response.body)
xml.root.xpath('//xmlns:entry/xmlns:link/@href', xml.root.namespace).each do |attr|
uri = URI.parse attr.value
bugs << uri.query.split('=')[1]
end
return bugs
rescue Nokogiri::XML::XPath::SyntaxError
raise "Named query '#{url.request_uri}' not found"
end
when Net::HTTPRedirection
location = response['location']
if redirects_left.zero?
raise "Maximum redirects exceeded (redirected to #{location})"
end
new_location_uri = URI.parse(location)
logger.debug("Moved to #{new_location_uri}")
fetch_named_query_url(new_location_uri, redirects_left - 1)
else
raise "Error when expanding named query '#{url.request_uri}': #{response}"
end
end
|
ruby
|
{
"resource": ""
}
|
q3955
|
Bicho.Client.get_bugs
|
train
|
def get_bugs(*ids)
params = {}
params[:ids] = normalize_ids ids
bugs = []
ret = @client.call('Bug.get', params)
handle_faults(ret)
ret['bugs'].each do |bug_data|
bugs << Bug.new(self, bug_data)
end
bugs
end
|
ruby
|
{
"resource": ""
}
|
q3956
|
Bicho.Client.add_attachment
|
train
|
def add_attachment(summary, file, *ids, **kwargs)
params = {}
params[:ids] = ids
params[:summary] = summary
params[:content_type] = kwargs.fetch(:content_type, 'application/octet-stream')
params[:file_name] = kwargs.fetch(:file_name, File.basename(file))
params[:is_patch] = kwargs[:patch?] if kwargs[:patch?]
params[:is_private] = kwargs[:private?] if kwargs[:private?]
params[:comment] = kwargs[:comment] if kwargs[:comment]
params[:data] = XMLRPC::Base64.new(file.read)
ret = @client.call('Bug.add_attachment', params)
handle_faults(ret)
ret['ids']
end
|
ruby
|
{
"resource": ""
}
|
q3957
|
Sshez.Exec.connect
|
train
|
def connect(alias_name, options)
file = File.open(FILE_PATH, 'r')
servers = all_hosts_in(file)
if servers.include?alias_name
PRINTER.verbose_print "Connecting to #{alias_name}"
exec "ssh #{alias_name}"
else
PRINTER.print "Could not find host `#{alias_name}`"
end
end
|
ruby
|
{
"resource": ""
}
|
q3958
|
Sshez.Exec.add
|
train
|
def add(alias_name, user, host, options)
begin
PRINTER.verbose_print "Adding\n"
config_append = form(alias_name, user, host, options)
PRINTER.verbose_print config_append
unless options.test
file = File.open(FILE_PATH, 'a+')
file.write(config_append)
file.close
# causes a bug in fedore if permission was not updated to 0600
File.chmod(0600, FILE_PATH)
# system "chmod 600 #{FILE_PATH}"
end
rescue
return permission_error
end
PRINTER.verbose_print "to #{FILE_PATH}"
PRINTER.print "Successfully added `#{alias_name}` as an alias for `#{user}@#{host}`"
PRINTER.print "Try sshez connect #{alias_name}"
finish_exec
end
|
ruby
|
{
"resource": ""
}
|
q3959
|
Sshez.Exec.all_hosts_in
|
train
|
def all_hosts_in(file)
servers = []
file.each do |line|
if line.include?('Host ')
servers << line.sub('Host ', '').strip
end
end
servers
end
|
ruby
|
{
"resource": ""
}
|
q3960
|
Thinner.CommandLine.options!
|
train
|
def options!
@options = {}
@option_parser = OptionParser.new(BANNER) do |opts|
opts.on("-b", "--batch_length BATCH", "Number of urls to purge at once") do |b|
@options[:batch_length] = b.to_i
end
opts.on("-t", "--sleep_time SLEEP", "Time to wait in between batches") do |t|
@options[:sleep_time] = t.to_i
end
opts.on("-e", "--stdin", "Use stdin for urls") do
@urls = []
ARGF.each_line do |url|
@urls << url.chomp
end
end
opts.on("-s", "--server SERVER", "Varnish url, e.g. 127.0.0.1:6082") do |s|
@options[:server] = s
end
opts.on("-o", "--log_file LOG_PATH", "Log file to output to (default: Standard Out") do |o|
@options[:log_file] = o
end
opts.on("-n", "--no-kill", "Don't kill the running purgers if they exist") do |n|
@options[:no_kill] = n
end
opts.on_tail("-h", "--help", "Display this help message") do
puts opts.help
exit
end
end
begin
@option_parser.parse!(ARGV)
rescue OptionParser::InvalidOption => e
puts e.message
exit(1)
end
end
|
ruby
|
{
"resource": ""
}
|
q3961
|
Permit.Base.licenses
|
train
|
def licenses *names
names.to_strings.each do |name|
begin
module_name = "#{name.camelize}License"
clazz = module_name.constantize
rescue
raise "License #{module_name} is not defined"
end
begin
clazz.new(self).enforce!
rescue
raise "License #{clazz} could not be enforced using #{self.inspect}"
end
end
end
|
ruby
|
{
"resource": ""
}
|
q3962
|
Permit.Base.executor
|
train
|
def executor(user_account, options = {})
@executor ||= case self.class.name
when /System/
then Permit::Executor::System.new self, user_account, options
else
Permit::Executor::Base.new self, user_account, options
end
end
|
ruby
|
{
"resource": ""
}
|
q3963
|
SolrEad.Indexer.update
|
train
|
def update file
solr_doc = om_document(File.new(file)).to_solr
delete solr_doc["id"]
solr.add solr_doc
add_components(file) unless options[:simple]
solr.commit
end
|
ruby
|
{
"resource": ""
}
|
q3964
|
SolrEad.Indexer.om_document
|
train
|
def om_document file
options[:document] ? options[:document].from_xml(File.new(file)) : SolrEad::Document.from_xml(File.new(file))
end
|
ruby
|
{
"resource": ""
}
|
q3965
|
SolrEad.Indexer.om_component_from_node
|
train
|
def om_component_from_node node
options[:component] ? options[:component].from_xml(prep(node)) : SolrEad::Component.from_xml(prep(node))
end
|
ruby
|
{
"resource": ""
}
|
q3966
|
SolrEad.Indexer.solr_url
|
train
|
def solr_url
if defined?(Rails.root)
::YAML.load(ERB.new(File.read(File.join(Rails.root,"config","solr.yml"))).result)[Rails.env]['url']
elsif ENV['RAILS_ENV']
::YAML.load(ERB.new(File.read("config/solr.yml")).result)[ENV['RAILS_ENV']]['url']
else
::YAML.load(ERB.new(File.read("config/solr.yml")).result)['development']['url']
end
end
|
ruby
|
{
"resource": ""
}
|
q3967
|
Thinner.Client.purge_urls
|
train
|
def purge_urls
@current_job.each do |url|
begin
@varnish.start if @varnish.stopped?
while([email protected]?) do sleep 0.1 end
if @varnish.purge :url, url
@logger.info "Purged url: #{url}"
@purged_urls << url
else
@logger.warn "Could not purge: #{url}"
end
rescue *ERRORS => e
@logger.warn "Error on url: #{url}, message: #{e}"
sleep @timeout
end
end
end
|
ruby
|
{
"resource": ""
}
|
q3968
|
Thinner.Client.handle_errors
|
train
|
def handle_errors
trap('HUP') { }
trap('TERM') { close_log; Process.exit! }
trap('KILL') { close_log; Process.exit! }
trap('INT') { close_log; Process.exit! }
end
|
ruby
|
{
"resource": ""
}
|
q3969
|
Thinner.Client.logger
|
train
|
def logger
if !@log_file.respond_to?(:write)
STDOUT.reopen(File.open(@log_file, (File::WRONLY | File::APPEND | File::CREAT)))
end
@logger = Logger.new(STDOUT)
end
|
ruby
|
{
"resource": ""
}
|
q3970
|
ResqueManager.ResqueHelper.time_filter
|
train
|
def time_filter(id, name, value)
html = "<select id=\"#{id}\" name=\"#{name}\">"
html += "<option value=\"\">-</option>"
[1, 3, 6, 12, 24].each do |h|
selected = h.to_s == value ? 'selected="selected"' : ''
html += "<option #{selected} value=\"#{h}\">#{h} #{h==1 ? "hour" : "hours"} ago</option>"
end
[3, 7, 14, 28].each do |d|
selected = (d*24).to_s == value ? 'selected="selected"' : ''
html += "<option #{selected} value=\"#{d*24}\">#{d} days ago</option>"
end
html += "</select>"
html.html_safe
end
|
ruby
|
{
"resource": ""
}
|
q3971
|
Plum.Stream.receive_frame
|
train
|
def receive_frame(frame)
validate_received_frame(frame)
consume_recv_window(frame)
case frame
when Frame::Data then receive_data(frame)
when Frame::Headers then receive_headers(frame)
when Frame::Priority then receive_priority(frame)
when Frame::RstStream then receive_rst_stream(frame)
when Frame::WindowUpdate then receive_window_update(frame)
when Frame::Continuation then receive_continuation(frame)
when Frame::PushPromise then receive_push_promise(frame)
when Frame::Ping, Frame::Goaway, Frame::Settings
raise RemoteConnectionError.new(:protocol_error) # stream_id MUST be 0x00
else
# MUST ignore unknown frame
end
rescue RemoteStreamError => e
callback(:stream_error, e)
send_immediately Frame::RstStream.new(id, e.http2_error_type)
close
end
|
ruby
|
{
"resource": ""
}
|
q3972
|
Plum.Stream.promise
|
train
|
def promise(headers)
stream = @connection.reserve_stream(weight: self.weight + 1, parent: self)
encoded = @connection.hpack_encoder.encode(headers)
frame = Frame::PushPromise.new(id, stream.id, encoded, end_headers: true)
send frame
stream
end
|
ruby
|
{
"resource": ""
}
|
q3973
|
Plum.Stream.send_data
|
train
|
def send_data(data = "", end_stream: true)
max = @connection.remote_settings[:max_frame_size]
if data.is_a?(IO)
until data.eof?
fragment = data.readpartial(max)
send Frame::Data.new(id, fragment, end_stream: end_stream && data.eof?)
end
else
send Frame::Data.new(id, data, end_stream: end_stream)
end
@state = :half_closed_local if end_stream
end
|
ruby
|
{
"resource": ""
}
|
q3974
|
KnifeTopo.TopoDelete.remove_node_from_topology
|
train
|
def remove_node_from_topology(node_name)
# load then update and save the node
node = Chef::Node.load(node_name)
if node['topo'] && node['topo']['name'] == @topo_name
node.rm('topo', 'name')
ui.info "Removing node #{node.name} from topology"
node.save
end
node
rescue Net::HTTPServerException => e
raise unless e.to_s =~ /^404/
# Node has not been created
end
|
ruby
|
{
"resource": ""
}
|
q3975
|
ConeyIsland.JobsCache.caching_jobs
|
train
|
def caching_jobs(&blk)
_was_caching = caching_jobs?
cache_jobs
blk.call
flush_jobs
self.is_caching_jobs = _was_caching
self
end
|
ruby
|
{
"resource": ""
}
|
q3976
|
ConeyIsland.JobsCache.flush_jobs
|
train
|
def flush_jobs
# Get all the jobs, one at a time, pulling from the list
while job = self.cached_jobs.shift
# Map the array to the right things
job_id, args = *job
# Submit! takes care of rescuing, error logging, etc and never caches
submit! args, job_id
end
self
end
|
ruby
|
{
"resource": ""
}
|
q3977
|
SeafileApi.Connect.share_dir
|
train
|
def share_dir(email,path,perm="r",repo=self.repo,s_type="d")
post_share_dir(repo,{"email"=> email, "path"=> path,"s_type"=> s_type,"perm"=> perm})
end
|
ruby
|
{
"resource": ""
}
|
q3978
|
Syntaxer.RepoChecker.process
|
train
|
def process
@rule_files.each do |rule_name, rule|
if rule[:rule].deferred
@deferred_process << rule
else
rule[:files].each do |file|
full_path = File.join(@runner.options.root_path,file)
check(rule[:rule], full_path)
end
end
end
@deferred_process.each do |rule|
rule[:rule].exec_rule.run(@runner.options.root_path, rule[:files])
end
self
end
|
ruby
|
{
"resource": ""
}
|
q3979
|
Syntaxer.PlainChecker.process
|
train
|
def process
@deferred_process = []
@reader.rules.each do |rule|
if rule.deferred
@deferred_process << rule
else
rule.files_list(@runner.options.root_path).each do |file|
check(rule, file)
end
end
end
@deferred_process.each do |rule|
rule.exec_rule.run(@runner.options.root_path, rule.files_list(@runner.options.root_path))
end
self
end
|
ruby
|
{
"resource": ""
}
|
q3980
|
Danger.DangerJiraSync.configure
|
train
|
def configure(jira_url:, jira_username:, jira_api_token:)
warn "danger-jira_sync plugin configuration is missing jira_url" if jira_url.blank?
warn "danger-jira_sync plugin configuration is missing jira_username" if jira_username.blank?
warn "danger-jira_sync plugin configuration is missing jira_api_token" if jira_api_token.blank?
@jira_client = JIRA::Client.new(
site: jira_url,
username: jira_username,
password: jira_api_token,
context_path: "",
auth_type: :basic
)
end
|
ruby
|
{
"resource": ""
}
|
q3981
|
Danger.DangerJiraSync.autolabel_pull_request
|
train
|
def autolabel_pull_request(issue_prefixes, project: true, components: true, labels: false)
raise NotConfiguredError unless @jira_client
raise(ArgumentError, "issue_prefixes cannot be empty") if issue_prefixes.empty?
issue_keys = extract_issue_keys_from_pull_request(issue_prefixes)
return if issue_keys.empty?
labels = fetch_labels_from_issues(
issue_keys,
project: project,
components: components,
labels: labels
)
return if labels.empty?
create_missing_github_labels(labels)
add_labels_to_issue(labels)
labels
end
|
ruby
|
{
"resource": ""
}
|
q3982
|
Resque.Worker.startup
|
train
|
def startup
enable_gc_optimizations
if Thread.current == Thread.main
register_signal_handlers
prune_dead_workers
end
run_hook :before_first_fork
register_worker
# Fix buffering so we can `rake resque:work > resque.log` and
# get output from the child in there.
$stdout.sync = true
end
|
ruby
|
{
"resource": ""
}
|
q3983
|
Resque.Worker.reconnect
|
train
|
def reconnect
tries = 0
begin
redis.synchronize do |client|
client.reconnect
end
rescue Redis::BaseConnectionError
if (tries += 1) <= 3
log "Error reconnecting to Redis; retrying"
sleep(tries)
retry
else
log "Error reconnecting to Redis; quitting"
raise
end
end
end
|
ruby
|
{
"resource": ""
}
|
q3984
|
Sshez.Parser.options_for_add
|
train
|
def options_for_add(opts, options)
opts.on('-p', '--port PORT',
'Specify a port') do |port|
options.file_content.port_text = " Port #{port}\n"
end
opts.on('-i', '--identity_file [key]',
'Add identity') do |key_path|
options.file_content.identity_file_text =
" IdentityFile #{key_path}\n"
end
opts.on('-b', '--batch_mode', 'Batch Mode') do
options.file_content.batch_mode_text = " BatchMode yes\n"
end
end
|
ruby
|
{
"resource": ""
}
|
q3985
|
Sshez.Parser.common_options
|
train
|
def common_options(opts, options)
opts.separator ''
opts.separator 'Common options:'
# Another typical switch to print the version.
opts.on('-v', '--version', 'Show version') do
PRINTER.print Sshez.version
options.halt = true
end
opts.on('-z', '--verbose', 'Verbose Output') do
PRINTER.verbose!
end
# Prints everything
opts.on_tail('-h', '--help', 'Show this message') do
PRINTER.print opts
options.halt = true
end
end
|
ruby
|
{
"resource": ""
}
|
q3986
|
KnifeTopo.TopoExport.node_export
|
train
|
def node_export(node_name)
load_node_data(node_name, config[:min_priority])
rescue Net::HTTPServerException => e
raise unless e.to_s =~ /^404/
empty_node(node_name)
end
|
ruby
|
{
"resource": ""
}
|
q3987
|
KnifeTopo.TopoExport.update_nodes!
|
train
|
def update_nodes!(nodes)
@node_names.each do |node_name|
# find out if the node is already in the array
found = nodes.index { |n| n['name'] == node_name }
if found.nil?
nodes.push(node_export(node_name))
else
nodes[found] = node_export(node_name)
end
end
end
|
ruby
|
{
"resource": ""
}
|
q3988
|
Bicho::CLI::Commands.Attachments.download
|
train
|
def download(bug, supportconfig_only)
bug.attachments.each do |attachment|
filename = "bsc#{bug.id}-#{attachment.id}-#{attachment.props['file_name']}"
if supportconfig_only
next unless attachment.content_type == 'application/x-gzip' ||
attachment.content_type == 'application/x-bzip-compressed-tar'
next unless attachment.summary =~ /supportconfig/i
end
t.say("Downloading to #{t.color(filename, :even_row)}")
begin
data = attachment.data
File.open(filename, 'w') do |f|
f.write data.read
end
rescue StandardError => e
t.say("#{t.color('Error:', :error)} Download of #{filename} failed: #{e}")
raise
end
end
end
|
ruby
|
{
"resource": ""
}
|
q3989
|
Qualys.Config.load!
|
train
|
def load!(path)
settings = YAML.safe_load(ERB.new(File.new(path).read).result)['api']
from_hash(settings) if settings.is_a? Hash
end
|
ruby
|
{
"resource": ""
}
|
q3990
|
Esvg.Svgs.embed_script
|
train
|
def embed_script(names=nil)
if production?
embeds = buildable_svgs(names).map(&:embed)
else
embeds = find_svgs(names).map(&:embed)
end
write_cache if cache_stale?
if !embeds.empty?
"<script>#{js(embeds.join("\n"))}</script>"
end
end
|
ruby
|
{
"resource": ""
}
|
q3991
|
Ralexa.UrlInfo.get
|
train
|
def get(url, params = {})
result({"ResponseGroup" => "Related,TrafficData,ContentData", "Url" => url}, params) do |doc|
@document = doc
{
speed_median_load_time: speed_median_load_time,
speed_load_percentile: speed_load_percentile,
link_count: link_count,
ranking: ranking,
ranking_delta: ranking_delta,
reach_rank: reach_rank,
reach_rank_delta: reach_rank_delta,
reach_per_million: reach_per_million,
reach_per_million_delta: reach_per_million_delta,
page_views_rank: page_views_rank,
page_views_rank_delta: page_views_rank_delta,
page_views_per_million: page_views_per_million,
page_views_per_million_delta: page_views_per_million_delta,
page_views_per_user: page_views_per_user,
page_views_per_user_delta: page_views_per_user_delta
}
end
end
|
ruby
|
{
"resource": ""
}
|
q3992
|
MotionRecord.Persistence.apply_persistence_timestamps
|
train
|
def apply_persistence_timestamps
self.updated_at = Time.now if self.class.attribute_names.include?(:updated_at)
self.created_at ||= Time.now if self.class.attribute_names.include?(:created_at)
end
|
ruby
|
{
"resource": ""
}
|
q3993
|
Shop.ShopConfig.get
|
train
|
def get(namespace = false, key = false, defaultValue = '')
if namespace && key
value = @config[namespace][key]
if value
return value
else
return defaultValue
end
end
return @config if [email protected]?
get_config
end
|
ruby
|
{
"resource": ""
}
|
q3994
|
Attachable.ClassMethods.attachable
|
train
|
def attachable(options = {})
# Store the default prefix for file data
# Defaults to "file"
cattr_accessor :attachment_file_prefix
self.attachment_file_prefix = (options[:file_prefix] || :file).to_s
# Setup the default scope so the file data isn't included by default.
# Generate the default scope, which includes every column except for the data column.
# We use this so queries, by default, don't include the file data which could be quite large.
default_scope { select(column_names.reject { |n| n == "#{attachment_file_prefix}_data" }.collect {|n| "#{table_name}.#{n}" }.join(',')) }
# Include all the important stuff
include InstanceMethods
end
|
ruby
|
{
"resource": ""
}
|
q3995
|
TeXMath.Converter.convert
|
train
|
def convert(data)
Open3.popen3(command) do |stdin, stdout, stderr|
stdin.puts(data)
stdin.close
output = stdout.read
error = stderr.read
raise ConversionError, error unless error.empty?
return output.strip
end
rescue Errno::ENOENT
raise NoExecutableError, "Can't find the '#{executable}' executable."
end
|
ruby
|
{
"resource": ""
}
|
q3996
|
Sinatra.Mapping.map
|
train
|
def map(name, path = nil)
@locations ||= {}
if name.to_sym == :root
@locations[:root] = cleanup_paths("/#{path}/")
self.class.class_eval do
define_method "#{name}_path" do |*paths|
cleanup_paths("/#{@locations[:root]}/?")
end
end
else
@locations[name.to_sym] = cleanup_paths(path || name.to_s)
self.class.class_eval do
define_method("#{name}_path") do |*paths|
map_path_to(@locations[name.to_sym], paths << "/?")
end
end
end
Delegator.delegate "#{name}_path"
end
|
ruby
|
{
"resource": ""
}
|
q3997
|
Sinatra.Mapping.map_path_to
|
train
|
def map_path_to(*args)
script_name = args.shift if args.first.to_s =~ %r{^/\w.*}
path_mapped(script_name, *locations_get_from(*args))
end
|
ruby
|
{
"resource": ""
}
|
q3998
|
Sinatra.Mapping.path_mapped
|
train
|
def path_mapped(script_name, *args)
return cleanup_paths("/#{script_name}/#{@locations[:root]}") if args.empty?
a = replace_symbols(script_name, *args)
cleanup_paths("/#{script_name}/#{@locations[:root]}/#{a.join('/')}")
end
|
ruby
|
{
"resource": ""
}
|
q3999
|
Sinatra.Mapping.replace_symbols
|
train
|
def replace_symbols(script_name, *args)
args_new = []
args_copy = args.clone
url = args[0].clone
modifiers = args_copy[1]
if modifiers.class == Hash
modifiers.delete_if do |key, value|
delete = url.include? (":" + key.to_s)
if delete
url.sub!( (":" + key.to_s), value.to_s )
end
delete
end
end
i = 1
result = [url]
while args_copy[i]
unless args_copy[i].empty?
if args_copy[i].class == Array
value = args_copy[i]
else
value = [args_copy[i]]
end
if args_copy[i] != '/?' && url =~ /\*/
url.sub!("*", value.join(''))
else
result.concat(value)
end
end
i+= 1
end
result
end
|
ruby
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.