_id
stringlengths 2
6
| title
stringlengths 9
130
| partition
stringclasses 3
values | text
stringlengths 66
10.5k
| language
stringclasses 1
value | meta_information
dict |
---|---|---|---|---|---|
q4000
|
Sinatra.Mapping.locations_get_from
|
train
|
def locations_get_from(*args)
args.flatten.reject do |path|
path == :root
end.collect do |path|
@locations[path] || path
end
end
|
ruby
|
{
"resource": ""
}
|
q4001
|
Plum.ServerConnection.reserve_stream
|
train
|
def reserve_stream(**args)
next_id = @max_stream_ids[0] + 2
stream = stream(next_id)
stream.set_state(:reserved_local)
stream.update_dependency(**args)
stream
end
|
ruby
|
{
"resource": ""
}
|
q4002
|
Sshez.Runner.process
|
train
|
def process(args)
parser = Parser.new(Exec.new(self))
parser.parse(args)
PRINTER.output
end
|
ruby
|
{
"resource": ""
}
|
q4003
|
BootstrapBuilder.Builder.element
|
train
|
def element(label = ' ', value = '', type = 'text_field', &block)
value += @template.capture(&block) if block_given?
%{
<div class='control-group'>
<label class='control-label'>#{label}</label>
<div class='controls'>
#{value}
</div>
</div>
}.html_safe
end
|
ruby
|
{
"resource": ""
}
|
q4004
|
BootstrapBuilder.Builder.render_field
|
train
|
def render_field(field_name, method, options={}, html_options={}, &block)
case field_name
when 'check_box'
template = field_name
else
template = 'default_field'
end
@template.render(:partial => "#{BootstrapBuilder.config.template_folder}/#{template}", :locals => {
:builder => self,
:method => method,
:field => @template.capture(&block),
:label_text => label_text(method, html_options[:label]),
:required => html_options[:required],
:prepend => html_options[:prepend],
:append => html_options[:append],
:help_block => html_options[:help_block],
:error_messages => error_messages_for(method)
})
end
|
ruby
|
{
"resource": ""
}
|
q4005
|
KnifeTopo.CommandHelper.initialize_cmd_args
|
train
|
def initialize_cmd_args(args, name_args, new_name_args)
args = args.dup
args.shift(2 + name_args.length)
new_name_args + args
end
|
ruby
|
{
"resource": ""
}
|
q4006
|
KnifeTopo.CommandHelper.run_cmd
|
train
|
def run_cmd(command_class, args)
command = command_class.new(args)
command.config[:config_file] = config[:config_file]
command.configure_chef
command_class.load_deps
command.run
command
end
|
ruby
|
{
"resource": ""
}
|
q4007
|
KnifeTopo.CommandHelper.resource_exists?
|
train
|
def resource_exists?(relative_path)
rest.get_rest(relative_path)
true
rescue Net::HTTPServerException => e
raise unless e.response.code == '404'
false
end
|
ruby
|
{
"resource": ""
}
|
q4008
|
KnifeTopo.CommandHelper.check_chef_env
|
train
|
def check_chef_env(chef_env_name)
return unless chef_env_name
Chef::Environment.load(chef_env_name) if chef_env_name
rescue Net::HTTPServerException => e
raise unless e.to_s =~ /^404/
ui.info 'Creating chef environment ' + chef_env_name
chef_env = Chef::Environment.new
chef_env.name(chef_env_name)
chef_env.create
chef_env
end
|
ruby
|
{
"resource": ""
}
|
q4009
|
Ralexa.AbstractService.result
|
train
|
def result(*params, &parser)
Result.new(
@client,
host,
path,
merged_params(*params),
&parser
).result
end
|
ruby
|
{
"resource": ""
}
|
q4010
|
Ralexa.AbstractService.collection
|
train
|
def collection(*params, &parser)
LazyCollection.new(
@client,
host,
path,
merged_params(*params),
&parser
)
end
|
ruby
|
{
"resource": ""
}
|
q4011
|
Ralexa.AbstractService.merged_params
|
train
|
def merged_params(*params)
params.reduce(default_params) do |merged, params|
merged.merge(params)
end
end
|
ruby
|
{
"resource": ""
}
|
q4012
|
Shop.Template.custom_template_path
|
train
|
def custom_template_path(name)
config = ShopConfig.new
custom_path = config.get('template', 'path')
if File.exists?("#{custom_path}/#{name}")
"#{custom_path}/#{name}"
else
false
end
end
|
ruby
|
{
"resource": ""
}
|
q4013
|
Shop.Template.template_path
|
train
|
def template_path(name=false)
custom_path = custom_template_path(name)
if custom_path
custom_path
else
path = File.expand_path File.dirname(__FILE__)
return "#{path}/../../templates/#{name}" if name
"#{path}/../../templates"
end
end
|
ruby
|
{
"resource": ""
}
|
q4014
|
Shop.Template.template
|
train
|
def template(name, datas)
file = template_path(name)
content = File.read(file)
datas.each do |k, v|
k = "<%= #{k} %>"
content = content.gsub(k, v)
end
return content
end
|
ruby
|
{
"resource": ""
}
|
q4015
|
RBarman.CliCommand.binary=
|
train
|
def binary=(path)
raise(ArgumentError, "binary doesn't exist") if !File.exists?(path)
raise(ArgumentError, "binary isn't called \'barman\'") if File.basename(path) != 'barman'
@binary = path
end
|
ruby
|
{
"resource": ""
}
|
q4016
|
RBarman.CliCommand.backup
|
train
|
def backup(server, backup_id, opts = {})
raise(ArgumentError, "backup id must not be nil!") if backup_id.nil?
opts[:backup_id] = backup_id
return backups(server, opts)[0]
end
|
ruby
|
{
"resource": ""
}
|
q4017
|
RBarman.CliCommand.server
|
train
|
def server(name, opts = {})
lines = run_barman_command("show-server #{name}")
server = parse_show_server_lines(name, lines)
lines = run_barman_command("check #{name}", { :abort_on_error => false })
parse_check_lines(server, lines)
server.backups = backups(server.name, opts) if opts[:with_backups]
return server
end
|
ruby
|
{
"resource": ""
}
|
q4018
|
RBarman.CliCommand.servers
|
train
|
def servers(opts = {})
result = Servers.new
lines = run_barman_command("list-server")
server_names = parse_list_server_lines(lines)
server_names.each do |name|
result << server(name, opts)
end
return result
end
|
ruby
|
{
"resource": ""
}
|
q4019
|
RBarman.CliCommand.wal_files
|
train
|
def wal_files(server, backup_id)
lines = run_barman_command("list-files --target wal #{server} #{backup_id}")
wal_files = parse_wal_files_list(lines)
xlog_db = read_xlog_db(server)
wal_files.each do |w|
wal = "#{w.timeline}#{w.xlog}#{w.segment}"
entry = xlog_db[wal]
w.size = entry[:size]
w.compression = entry[:compression]
w.created = entry[:created].to_i
end
return wal_files
end
|
ruby
|
{
"resource": ""
}
|
q4020
|
RBarman.CliCommand.parse_list_server_lines
|
train
|
def parse_list_server_lines(lines)
result = Array.new
lines.each do |l|
result << l.split("-")[0].strip
end
return result
end
|
ruby
|
{
"resource": ""
}
|
q4021
|
RBarman.CliCommand.create_recovery_cmd_args
|
train
|
def create_recovery_cmd_args(opts={})
args = Array.new
args << "--remote-ssh-command='#{opts[:remote_ssh_cmd]}'" if opts[:remote_ssh_cmd]
args << "--target-time '#{opts[:target_time].to_s}'" if opts[:target_time]
args << "--target-xid #{opts[:target_xid]}" if opts[:target_xid]
args << "--exclusive" if opts[:exclusive]
return args.join(" ")
end
|
ruby
|
{
"resource": ""
}
|
q4022
|
KnifeTopo.NodeUpdateHelper.update_node
|
train
|
def update_node(node_updates, merge = false)
config[:disable_editing] = true
begin
# load then update and save the node
node = Chef::Node.load(node_updates['name'])
env = node_updates['chef_environment']
check_chef_env(env) unless env == node['chef_environment']
do_node_updates(node, node_updates, merge)
rescue Net::HTTPServerException => e
raise unless e.to_s =~ /^404/
# Node has not been created
end
node
end
|
ruby
|
{
"resource": ""
}
|
q4023
|
KnifeTopo.NodeUpdateHelper.update_node_with_values
|
train
|
def update_node_with_values(node, updates, merge = false)
updated = []
# merge the normal attributes (but not tags)
updated << 'normal' if update_attrs(node, updates['normal'], merge)
# update runlist
updated << 'run_list' if update_runlist(node, updates['run_list'])
# update chef env
if update_chef_env(node, updates['chef_environment'])
updated << 'chef_environment'
end
# merge tags
updated << 'tags' if update_tags(node, updates['tags'])
# return false if no updates, else return array of property names
!updated.empty? && updated
end
|
ruby
|
{
"resource": ""
}
|
q4024
|
KnifeTopo.NodeUpdateHelper.update_attrs
|
train
|
def update_attrs(node, attrs, merge = false)
return false unless attrs
# keep the current tags
attrs['tags'] = node.normal.tags || []
original = Marshal.load(Marshal.dump(node.normal))
node.normal = if merge
Chef::Mixin::DeepMerge.merge(node.normal, attrs)
else
attrs
end
original != node.normal
end
|
ruby
|
{
"resource": ""
}
|
q4025
|
TrueVault.User.create
|
train
|
def create(options = {})
query = {
query: {
username: options[:username],
password: options[:password],
attributes: hash_to_base64_json(options[:attributes])
}
}
new_options = default_options_to_merge_with.merge(query)
self.class.post("/#{@api_ver}/users", new_options)
end
|
ruby
|
{
"resource": ""
}
|
q4026
|
TrueVault.User.all
|
train
|
def all(read_attributes="01")
options = default_options_to_merge_with.merge({ query: { full: read_attributes} })
self.class.get("/#{@api_ver}/users", options)
end
|
ruby
|
{
"resource": ""
}
|
q4027
|
LeMeme.MemeLib.load_directory!
|
train
|
def load_directory!(dir)
paths = Dir.glob(dir).grep LeMeme::IMAGE_EXTENSIONS
@memes.merge!(paths.reduce({}) do |images, path|
path = File.expand_path(path)
name = path.split.last.sub(LeMeme::IMAGE_EXTENSIONS, '').to_s
images.merge(name => path)
end)
end
|
ruby
|
{
"resource": ""
}
|
q4028
|
LeMeme.MemeLib.meme
|
train
|
def meme(template: nil, top: nil, bottom: nil, watermark: nil)
path = template.nil? ? @memes.values.sample : @memes[template]
Meme.new(path, top: top, bottom: bottom, watermark: watermark)
end
|
ruby
|
{
"resource": ""
}
|
q4029
|
Permits.Ability.role_groups
|
train
|
def role_groups
groups = []
user_account_class.role_groups.map{|k,v| groups << k if user_account.has_any_role?(v)}
groups
end
|
ruby
|
{
"resource": ""
}
|
q4030
|
Plum.FlowControl.send
|
train
|
def send(frame)
if Frame::Data === frame
@send_buffer << frame
if @send_remaining_window < frame.length
if Stream === self
connection.callback(:send_deferred, self, frame)
else
callback(:send_deferred, self, frame)
end
else
consume_send_buffer
end
else
send_immediately frame
end
end
|
ruby
|
{
"resource": ""
}
|
q4031
|
Plum.FlowControl.window_update
|
train
|
def window_update(wsi)
@recv_remaining_window += wsi
sid = (Stream === self) ? self.id : 0
send_immediately Frame::WindowUpdate.new(sid, wsi)
end
|
ruby
|
{
"resource": ""
}
|
q4032
|
Bicho.Bug.add_attachment
|
train
|
def add_attachment(summary, file, **kwargs)
@client.add_attachment(summary, file, id, **kwargs).first
end
|
ruby
|
{
"resource": ""
}
|
q4033
|
PixelPi.Leds.show
|
train
|
def show
closed!
if @debug
ary = @leds.map { |value| Rainbow(@debug).color(*to_rgb(value)) }
$stdout.print "\r#{ary.join}"
end
self
end
|
ruby
|
{
"resource": ""
}
|
q4034
|
PixelPi.Leds.[]=
|
train
|
def []=( num, value )
closed!
if (num < 0 || num >= @leds.length)
raise IndexError, "index #{num} is outside of LED range: 0...#{@leds.length-1}"
end
@leds[num] = to_color(value)
end
|
ruby
|
{
"resource": ""
}
|
q4035
|
PixelPi.Leds.replace
|
train
|
def replace( ary )
closed!
@leds.length.times do |ii|
@leds[ii] = Integer(ary[ii])
end
self
end
|
ruby
|
{
"resource": ""
}
|
q4036
|
PixelPi.Leds.fill
|
train
|
def fill( *args )
closed!
if block_given?
@leds.fill do |ii|
value = yield(ii)
to_color(value)
end
else
value = to_color(args.shift)
@leds.fill(value, *args)
end
self
end
|
ruby
|
{
"resource": ""
}
|
q4037
|
Bicho.Query.each
|
train
|
def each
ret = Bicho.client.search_bugs(self)
return ret.each unless block_given?
ret.each { |bug| yield bug }
end
|
ruby
|
{
"resource": ""
}
|
q4038
|
Bicho.Query.method_missing
|
train
|
def method_missing(method_name, *args)
return super unless Bicho::SEARCH_FIELDS
.map(&:first)
.include?(method_name)
args.each do |arg|
append_query(method_name.to_s, arg)
end
self
end
|
ruby
|
{
"resource": ""
}
|
q4039
|
Bicho.Query.append_query
|
train
|
def append_query(param, value)
@query_map[param] = [] unless @query_map.key?(param)
@query_map[param] = [@query_map[param], value].flatten
end
|
ruby
|
{
"resource": ""
}
|
q4040
|
RTM.RTMMethodSpace.method_missing
|
train
|
def method_missing(symbol,*args)
if (@name == 'tasks' && symbol.to_s == 'notes')
return RTMMethodSpace.new("tasks.notes",@endpoint)
else
rtm_method = "rtm.#{@name}.#{symbol.to_s.rtmize}"
@endpoint.call_method(rtm_method,*args)
end
end
|
ruby
|
{
"resource": ""
}
|
q4041
|
Bitstamp.Model.attributes=
|
train
|
def attributes=(attributes = {})
attributes.each do |name, value|
begin
send("#{name}=", value)
rescue NoMethodError => e
puts "Unable to assign #{name}. No such method."
end
end
end
|
ruby
|
{
"resource": ""
}
|
q4042
|
Plum.Client.start
|
train
|
def start(&block)
raise IOError, "Session already started" if @started
_start
if block_given?
begin
ret = yield(self)
resume
return ret
ensure
close
end
end
self
end
|
ruby
|
{
"resource": ""
}
|
q4043
|
Plum.Client.request
|
train
|
def request(headers, body, options = {}, &block)
raise ArgumentError, ":method and :path headers are required" unless headers[":method"] && headers[":path"]
@session.request(headers, body, @config.merge(options), &block)
end
|
ruby
|
{
"resource": ""
}
|
q4044
|
PEROBS.FlatFile.open
|
train
|
def open
file_name = File.join(@db_dir, 'database.blobs')
new_db_created = false
begin
if File.exist?(file_name)
@f = File.open(file_name, 'rb+')
else
PEROBS.log.info "New FlatFile database '#{file_name}' created"
@f = File.open(file_name, 'wb+')
new_db_created = true
end
rescue IOError => e
PEROBS.log.fatal "Cannot open FlatFile database #{file_name}: " +
e.message
end
unless @f.flock(File::LOCK_NB | File::LOCK_EX)
PEROBS.log.fatal "FlatFile database '#{file_name}' is locked by " +
"another process"
end
@f.sync = true
open_index_files(!new_db_created)
end
|
ruby
|
{
"resource": ""
}
|
q4045
|
PEROBS.FlatFile.close
|
train
|
def close
@space_list.close if @space_list.is_open?
@index.close if @index.is_open?
if @marks
@marks.erase
@marks = nil
end
if @f
@f.flush
@f.flock(File::LOCK_UN)
@f.fsync
@f.close
@f = nil
end
end
|
ruby
|
{
"resource": ""
}
|
q4046
|
PEROBS.FlatFile.sync
|
train
|
def sync
begin
@f.flush
@f.fsync
rescue IOError => e
PEROBS.log.fatal "Cannot sync flat file database: #{e.message}"
end
@index.sync
@space_list.sync
end
|
ruby
|
{
"resource": ""
}
|
q4047
|
PEROBS.FlatFile.delete_obj_by_id
|
train
|
def delete_obj_by_id(id)
if (pos = find_obj_addr_by_id(id))
delete_obj_by_address(pos, id)
return true
end
return false
end
|
ruby
|
{
"resource": ""
}
|
q4048
|
PEROBS.FlatFile.delete_obj_by_address
|
train
|
def delete_obj_by_address(addr, id)
@index.remove(id) if @index.is_open?
header = FlatFileBlobHeader.read(@f, addr, id)
header.clear_flags
@space_list.add_space(addr, header.length) if @space_list.is_open?
end
|
ruby
|
{
"resource": ""
}
|
q4049
|
PEROBS.FlatFile.delete_unmarked_objects
|
train
|
def delete_unmarked_objects
# We don't update the index and the space list during this operation as
# we defragmentize the blob file at the end. We'll end the operation
# with an empty space list.
clear_index_files
deleted_objects_count = 0
@progressmeter.start('Sweeping unmarked objects', @f.size) do |pm|
each_blob_header do |header|
if header.is_valid? && [email protected]?(header.id)
delete_obj_by_address(header.addr, header.id)
deleted_objects_count += 1
end
pm.update(header.addr)
end
end
defragmentize
# Update the index file and create a new, empty space list.
regenerate_index_and_spaces
deleted_objects_count
end
|
ruby
|
{
"resource": ""
}
|
q4050
|
PEROBS.FlatFile.write_obj_by_id
|
train
|
def write_obj_by_id(id, raw_obj)
# Check if we have already an object with the given ID. We'll mark it as
# outdated and save the header for later deletion. In case this
# operation is aborted or interrupted we ensure that we either have the
# old or the new version available.
if (old_addr = find_obj_addr_by_id(id))
old_header = FlatFileBlobHeader.read(@f, old_addr)
old_header.set_outdated_flag
end
crc = checksum(raw_obj)
# If the raw_obj is larger then 256 characters we will compress it to
# safe some space in the database file. For smaller strings the
# performance impact of compression is not compensated by writing
# less data to the storage.
compressed = false
if raw_obj.bytesize > 256
raw_obj = Zlib.deflate(raw_obj)
compressed = true
end
addr, length = find_free_blob(raw_obj.bytesize)
begin
if length != -1
# Just a safeguard so we don't overwrite current data.
header = FlatFileBlobHeader.read(@f, addr)
if header.length != length
PEROBS.log.fatal "Length in free list (#{length}) and header " +
"(#{header.length}) for address #{addr} don't match."
end
if raw_obj.bytesize > header.length
PEROBS.log.fatal "Object (#{raw_obj.bytesize}) is longer than " +
"blob space (#{header.length})."
end
if header.is_valid?
PEROBS.log.fatal "Entry at address #{addr} with flags: " +
"#{header.flags} is already used for ID #{header.id}."
end
end
flags = 1 << FlatFileBlobHeader::VALID_FLAG_BIT
flags |= (1 << FlatFileBlobHeader::COMPRESSED_FLAG_BIT) if compressed
FlatFileBlobHeader.new(@f, addr, flags, raw_obj.bytesize, id, crc).write
@f.write(raw_obj)
if length != -1 && raw_obj.bytesize < length
# The new object was not appended and it did not completely fill the
# free space. So we have to write a new header to mark the remaining
# empty space.
unless length - raw_obj.bytesize >= FlatFileBlobHeader::LENGTH
PEROBS.log.fatal "Not enough space to append the empty space " +
"header (space: #{length} bytes, object: #{raw_obj.bytesize} " +
"bytes)."
end
space_address = @f.pos
space_length = length - FlatFileBlobHeader::LENGTH - raw_obj.bytesize
FlatFileBlobHeader.new(@f, space_address, 0, space_length,
0, 0).write
# Register the new space with the space list.
if @space_list.is_open? && space_length > 0
@space_list.add_space(space_address, space_length)
end
end
# Once the blob has been written we can update the index as well.
@index.insert(id, addr) if @index.is_open?
if old_addr
# If we had an existing object stored for the ID we have to mark
# this entry as deleted now.
old_header.clear_flags
# And register the newly freed space with the space list.
if @space_list.is_open?
@space_list.add_space(old_addr, old_header.length)
end
else
@f.flush
end
rescue IOError => e
PEROBS.log.fatal "Cannot write blob for ID #{id} to FlatFileDB: " +
e.message
end
addr
end
|
ruby
|
{
"resource": ""
}
|
q4051
|
PEROBS.FlatFile.read_obj_by_address
|
train
|
def read_obj_by_address(addr, id)
header = FlatFileBlobHeader.read(@f, addr, id)
if header.id != id
PEROBS.log.fatal "Database index corrupted: Index for object " +
"#{id} points to object with ID #{header.id}"
end
buf = nil
begin
@f.seek(addr + FlatFileBlobHeader::LENGTH)
buf = @f.read(header.length)
rescue IOError => e
PEROBS.log.fatal "Cannot read blob for ID #{id}: #{e.message}"
end
# Uncompress the data if the compression bit is set in the flags byte.
if header.is_compressed?
begin
buf = Zlib.inflate(buf)
rescue Zlib::BufError, Zlib::DataError
PEROBS.log.fatal "Corrupted compressed block with ID " +
"#{header.id} found."
end
end
if checksum(buf) != header.crc
PEROBS.log.fatal "Checksum failure while reading blob ID #{id}"
end
buf
end
|
ruby
|
{
"resource": ""
}
|
q4052
|
PEROBS.FlatFile.defragmentize
|
train
|
def defragmentize
distance = 0
new_file_size = 0
deleted_blobs = 0
corrupted_blobs = 0
valid_blobs = 0
# Iterate over all entries.
@progressmeter.start('Defragmentizing blobs file', @f.size) do |pm|
each_blob_header do |header|
# If we have stumbled over a corrupted blob we treat it similar to a
# deleted blob and reuse the space.
if header.corruption_start
distance += header.addr - header.corruption_start
corrupted_blobs += 1
end
# Total size of the current entry
entry_bytes = FlatFileBlobHeader::LENGTH + header.length
if header.is_valid?
# We have found a valid entry.
valid_blobs += 1
if distance > 0
begin
# Read current entry into a buffer
@f.seek(header.addr)
buf = @f.read(entry_bytes)
# Write the buffer right after the end of the previous entry.
@f.seek(header.addr - distance)
@f.write(buf)
# Mark the space between the relocated current entry and the
# next valid entry as deleted space.
FlatFileBlobHeader.new(@f, @f.pos, 0,
distance - FlatFileBlobHeader::LENGTH,
0, 0).write
@f.flush
rescue IOError => e
PEROBS.log.fatal "Error while moving blob for ID " +
"#{header.id}: #{e.message}"
end
end
new_file_size = header.addr - distance +
FlatFileBlobHeader::LENGTH + header.length
else
deleted_blobs += 1
distance += entry_bytes
end
pm.update(header.addr)
end
end
PEROBS.log.info "#{distance / 1000} KiB/#{deleted_blobs} blobs of " +
"#{@f.size / 1000} KiB/#{valid_blobs} blobs or " +
"#{'%.1f' % (distance.to_f / @f.size * 100.0)}% reclaimed"
if corrupted_blobs > 0
PEROBS.log.info "#{corrupted_blobs} corrupted blob(s) found. Space " +
"was recycled."
end
@f.flush
@f.truncate(new_file_size)
@f.flush
sync
end
|
ruby
|
{
"resource": ""
}
|
q4053
|
PEROBS.FlatFile.refresh
|
train
|
def refresh
# This iteration might look scary as we iterate over the entries while
# while we are rearranging them. Re-inserted items may be inserted
# before or at the current entry and this is fine. They also may be
# inserted after the current entry and will be re-read again unless they
# are inserted after the original file end.
file_size = @f.size
# We don't update the index and the space list during this operation as
# we defragmentize the blob file at the end. We'll end the operation
# with an empty space list.
clear_index_files
@progressmeter.start('Converting objects to new storage format',
@f.size) do |pm|
each_blob_header do |header|
if header.is_valid?
buf = read_obj_by_address(header.addr, header.id)
delete_obj_by_address(header.addr, header.id)
write_obj_by_id(header.id, buf)
end
# Some re-inserted blobs may be inserted after the original file end.
# No need to process those blobs again.
break if header.addr >= file_size
pm.update(header.addr)
end
end
# Reclaim the space saved by compressing entries.
defragmentize
# Recreate the index file and create an empty space list.
regenerate_index_and_spaces
end
|
ruby
|
{
"resource": ""
}
|
q4054
|
PEROBS.FlatFile.regenerate_index_and_spaces
|
train
|
def regenerate_index_and_spaces
PEROBS.log.warn "Re-generating FlatFileDB index and space files"
@index.open unless @index.is_open?
@index.clear
@space_list.open unless @space_list.is_open?
@space_list.clear
@progressmeter.start('Re-generating database index', @f.size) do |pm|
each_blob_header do |header|
if header.is_valid?
if (duplicate_pos = @index.get(header.id))
PEROBS.log.error "FlatFile contains multiple blobs for ID " +
"#{header.id}. First blob is at address #{duplicate_pos}. " +
"Other blob found at address #{header.addr}."
if header.length > 0
@space_list.add_space(header.addr, header.length)
end
discard_damaged_blob(header)
else
@index.insert(header.id, header.addr)
end
else
if header.length > 0
@space_list.add_space(header.addr, header.length)
end
end
pm.update(header.addr)
end
end
sync
end
|
ruby
|
{
"resource": ""
}
|
q4055
|
Chicago.RakeTasks.define
|
train
|
def define
namespace :db do
desc "Write Null dimension records"
task :create_null_records do
# TODO: replace this with proper logging.
warn "Loading NULL records."
@schema.dimensions.each do |dimension|
dimension.create_null_records(@staging_db)
end
end
desc "Writes a migration file to change the database based on defined Facts & Dimensions"
task :write_migrations do
writer = Database::MigrationFileWriter.new
writer.write_migration_file(@staging_db, @schema,
staging_directory)
if @presentation_db
writer.write_migration_file(@presentation_db, @schema,
presentation_directory, false)
end
end
end
end
|
ruby
|
{
"resource": ""
}
|
q4056
|
PEROBS.LockFile.lock
|
train
|
def lock
retries = @max_retries
while retries > 0
begin
@file = File.open(@file_name, File::RDWR | File::CREAT, 0644)
@file.sync = true
if @file.flock(File::LOCK_EX | File::LOCK_NB)
# We have taken the lock. Write the PID into the file and leave it
# open.
@file.write($$)
@file.flush
@file.fsync
@file.truncate(@file.pos)
PEROBS.log.debug "Lock file #{@file_name} has been taken for " +
"process #{$$}"
return true
else
# We did not manage to take the lock file.
if @file.mtime <= Time.now - @timeout_secs
pid = @file.read.to_i
PEROBS.log.info "Old lock file found for PID #{pid}. " +
"Removing lock."
if is_running?(pid)
send_signal('TERM', pid)
# Give the process 3 seconds to terminate gracefully.
sleep 3
# Then send a SIGKILL to ensure it's gone.
send_signal('KILL', pid) if is_running?(pid)
end
@file.close
File.delete(@file_name) if File.exist?(@file_name)
else
PEROBS.log.debug "Lock file #{@file_name} is taken. Trying " +
"to get it #{retries} more times."
end
end
rescue => e
PEROBS.log.error "Cannot take lock file #{@file_name}: #{e.message}"
return false
end
retries -= 1
sleep(@pause_secs)
end
PEROBS.log.info "Failed to get lock file #{@file_name} due to timeout"
false
end
|
ruby
|
{
"resource": ""
}
|
q4057
|
PEROBS.LockFile.unlock
|
train
|
def unlock
unless @file
PEROBS.log.error "There is no current lock to release"
return false
end
begin
@file.flock(File::LOCK_UN)
@file.fsync
@file.close
forced_unlock
PEROBS.log.debug "Lock file #{@file_name} for PID #{$$} has been " +
"released"
rescue => e
PEROBS.log.error "Releasing of lock file #{@file_name} failed: " +
e.message
return false
end
true
end
|
ruby
|
{
"resource": ""
}
|
q4058
|
PEROBS.LockFile.forced_unlock
|
train
|
def forced_unlock
@file = nil
if File.exist?(@file_name)
begin
File.delete(@file_name)
PEROBS.log.debug "Lock file #{@file_name} has been deleted."
rescue IOError => e
PEROBS.log.error "Cannot delete lock file #{@file_name}: " +
e.message
end
end
end
|
ruby
|
{
"resource": ""
}
|
q4059
|
Wmctile.Router.window
|
train
|
def window(index = 0)
if @arguments[:use_active_window]
Window.new(@arguments, Wmctile.current_window_id)
else
Window.new(@arguments, @window_strings[index])
end
rescue Errors::WindowNotFound
if @arguments[:exec]
# Exec the command
puts "Executing command: #{@arguments[:exec]}"
system "#{@arguments[:exec]} &"
else
raise Errors::WindowNotFound, @window_strings[index]
end
end
|
ruby
|
{
"resource": ""
}
|
q4060
|
Wmctile.Router.switch_to_workspace
|
train
|
def switch_to_workspace(target_workspace)
if target_workspace == 'next'
target_workspace = Wmctile.current_workspace + 1
elsif target_workspace == 'previous'
target_workspace = Wmctile.current_workspace - 1
elsif target_workspace == 'history'
# must be -2 as -1 is the current workspace
target_workspace = Wmctile.memory.get(:workspace_history)[-2]
end
system "wmctrl -s #{target_workspace}"
target_workspace
end
|
ruby
|
{
"resource": ""
}
|
q4061
|
StateManager.Base.transition_to
|
train
|
def transition_to(path, current_state=self.current_state)
path = path.to_s
state = current_state || self
exit_states = []
# Find the nearest parent state on the path of the current state which
# has a sub-state at the given path
new_states = state.find_states(path)
while(!new_states) do
exit_states << state
state = state.parent_state
raise(StateNotFound, transition_error(path)) unless state
new_states = state.find_states(path)
end
# The first time we enter a state, the state_manager gets entered as well
new_states.unshift(self) unless has_state?
# Can only transition to leaf states
# TODO: transition to the initial_state of the state?
raise(InvalidTransition, transition_error(path)) unless new_states.last.leaf?
enter_states = new_states - exit_states
exit_states = exit_states - new_states
from_state = current_state
# TODO: does it make more sense to throw an error instead of allowing
# a transition to the current state?
to_state = enter_states.last || from_state
run_before_callbacks(from_state, to_state, current_event, enter_states, exit_states)
# Set the state on the underlying resource
self.current_state = to_state
run_after_callbacks(from_state, to_state, current_event, enter_states, exit_states)
end
|
ruby
|
{
"resource": ""
}
|
q4062
|
StateManager.Base.available_events
|
train
|
def available_events
state = current_state
ret = {}
while(state) do
ret = state.class.specification.events.merge(ret)
state = state.parent_state
end
ret
end
|
ruby
|
{
"resource": ""
}
|
q4063
|
Bugzilla.Product.enterable_products
|
train
|
def enterable_products
ids = get_enterable_products
Hash[*get(ids)['products'].map {|x| [x['name'], x]}.flatten]
end
|
ruby
|
{
"resource": ""
}
|
q4064
|
Bugzilla.Product.accessible_products
|
train
|
def accessible_products
ids = get_accessible_products
Hash[*get(ids)['products'].map {|x| [x['name'], x]}.flatten]
end
|
ruby
|
{
"resource": ""
}
|
q4065
|
PEROBS.FlatFileDB.put_hash
|
train
|
def put_hash(name, hash)
file_name = File.join(@db_dir, name + '.json')
begin
RobustFile.write(file_name, hash.to_json)
rescue IOError => e
PEROBS.log.fatal "Cannot write hash file '#{file_name}': #{e.message}"
end
end
|
ruby
|
{
"resource": ""
}
|
q4066
|
PolyBelongsTo.Core.pbt_parent
|
train
|
def pbt_parent
val = pbt
if val && !pbt_id.nil?
if poly?
"#{pbt_type}".constantize.where(id: pbt_id).first
else
"#{val}".camelize.constantize.where(id: pbt_id).first
end
end
end
|
ruby
|
{
"resource": ""
}
|
q4067
|
PolyBelongsTo.Core.pbt_top_parent
|
train
|
def pbt_top_parent
record = self
return nil unless record.pbt_parent
no_repeat = PolyBelongsTo::SingletonSet.new
while !no_repeat.include?(record.pbt_parent) && !record.pbt_parent.nil?
no_repeat.add?(record)
record = record.pbt_parent
end
record
end
|
ruby
|
{
"resource": ""
}
|
q4068
|
PolyBelongsTo.Core.pbt_parents
|
train
|
def pbt_parents
if poly?
Array[pbt_parent].compact
else
self.class.pbts.map do |i|
try{ "#{i}".camelize.constantize.where(id: send("#{i}_id")).first }
end.compact
end
end
|
ruby
|
{
"resource": ""
}
|
q4069
|
NdrSupport.Password.valid?
|
train
|
def valid?(string, word_list: [])
string = prepare_string(string.to_s.dup)
slug = slugify(strip_common_words(string, word_list))
meets_requirements?(slug)
end
|
ruby
|
{
"resource": ""
}
|
q4070
|
Synvert::Core.Rewriter.add_file
|
train
|
def add_file(filename, content)
return if @sandbox
filepath = File.join(Configuration.instance.get(:path), filename)
if File.exist?(filepath)
puts "File #{filepath} already exists."
return
end
FileUtils.mkdir_p File.dirname(filepath)
File.open filepath, 'w' do |file|
file.write content
end
end
|
ruby
|
{
"resource": ""
}
|
q4071
|
Synvert::Core.Rewriter.remove_file
|
train
|
def remove_file(filename)
return if @sandbox
file_path = File.join(Configuration.instance.get(:path), filename)
File.delete(file_path) if File.exist?(file_path)
end
|
ruby
|
{
"resource": ""
}
|
q4072
|
EPPClient.Connection.open_connection
|
train
|
def open_connection
@tcpserver = TCPSocket.new(server, port)
@socket = OpenSSL::SSL::SSLSocket.new(@tcpserver, @context)
# Synchronously close the connection & socket
@socket.sync_close
# Connect
@socket.connect
# Get the initial greeting frame
greeting_process(one_frame)
end
|
ruby
|
{
"resource": ""
}
|
q4073
|
EPPClient.Connection.close_connection
|
train
|
def close_connection
if defined?(@socket) && @socket.is_a?(OpenSSL::SSL::SSLSocket)
@socket.close
@socket = nil
end
if defined?(@tcpserver) && @tcpserver.is_a?(TCPSocket)
@tcpserver.close
@tcpserver = nil
end
return true if @tcpserver.nil? && @socket.nil?
end
|
ruby
|
{
"resource": ""
}
|
q4074
|
EPPClient.Connection.one_frame
|
train
|
def one_frame
size = @socket.read(4)
raise SocketError, @socket.eof? ? 'Connection closed by remote server' : 'Error reading frame from remote server' if size.nil?
size = size.unpack('N')[0]
@recv_frame = @socket.read(size - 4)
recv_frame_to_xml
end
|
ruby
|
{
"resource": ""
}
|
q4075
|
Snogmetrics.KissmetricsApi.identify
|
train
|
def identify(identity)
unless @session[:km_identity] == identity
queue.delete_if { |e| e.first == 'identify' }
queue << ['identify', identity]
@session[:km_identity] = identity
end
end
|
ruby
|
{
"resource": ""
}
|
q4076
|
GoogleVoice.Api.sms
|
train
|
def sms(remote_number, text_message)
login unless logged_in?
remote_number = validate_number(remote_number)
text_message = @coder.encode(text_message)
@agent.post('https://www.google.com/voice/sms/send/', :id => '', :phoneNumber => remote_number, :text => text_message, "_rnr_se" => @rnr_se)
end
|
ruby
|
{
"resource": ""
}
|
q4077
|
GoogleVoice.Api.call
|
train
|
def call(remote_number, forwarding_number)
login unless logged_in?
remote_number = validate_number(remote_number)
forwarding_number = validate_number(forwarding_number)
@agent.post('https://www.google.com/voice/call/connect/', :outgoingNumber => remote_number, :forwardingNumber => forwarding_number, :phoneType => 2, :subscriberNumber => 'undefined', :remember => '0', "_rnr_se" => @rnr_se)
end
|
ruby
|
{
"resource": ""
}
|
q4078
|
GoogleVoice.Api.init_xml_methods
|
train
|
def init_xml_methods()
(class << self; self; end).class_eval do
%w{ unread inbox starred all spam trash voicemail sms trash recorded placed received missed }.each do |method|
define_method "#{method}_xml".to_sym do
get_xml_document("https://www.google.com/voice/inbox/recent/#{method}")
end
end
end
end
|
ruby
|
{
"resource": ""
}
|
q4079
|
QuickBase.Client.setHTTPConnection
|
train
|
def setHTTPConnection( useSSL, org = "www", domain = "quickbase", proxy_options = nil )
@useSSL = useSSL
@org = org
@domain = domain
if USING_HTTPCLIENT
if proxy_options
@httpConnection = HTTPClient.new( "#{proxy_options["proxy_server"]}:#{proxy_options["proxy_port"] || useSSL ? "443" : "80"}" )
@httpConnection.set_auth(proxy_options["proxy_server"], proxy_options["proxy_user"], proxy_options["proxy_password"])
else
@httpConnection = HTTPClient.new
end
else
if proxy_options
@httpProxy = Net::HTTP::Proxy(proxy_options["proxy_server"], proxy_options["proxy_port"], proxy_options["proxy_user"], proxy_options["proxy_password"])
@httpConnection = @httpProxy.new( "#{@org}.#{@domain}.com", useSSL ? 443 : 80)
else
@httpConnection = Net::HTTP.new( "#{@org}.#{@domain}.com", useSSL ? 443 : 80 )
end
@httpConnection.use_ssl = useSSL
@httpConnection.verify_mode = OpenSSL::SSL::VERIFY_NONE
end
end
|
ruby
|
{
"resource": ""
}
|
q4080
|
QuickBase.Client.setHTTPConnectionAndqbhost
|
train
|
def setHTTPConnectionAndqbhost( useSSL, org = "www", domain = "quickbase", proxy_options = nil )
setHTTPConnection( useSSL, org, domain, proxy_options )
setqbhost( useSSL, org, domain )
end
|
ruby
|
{
"resource": ""
}
|
q4081
|
QuickBase.Client.sendRequest
|
train
|
def sendRequest( api_Request, xmlRequestData = nil )
fire( "onSendRequest" )
resetErrorInfo
# set up the request
getDBforRequestURL( api_Request )
getAuthenticationXMLforRequest( api_Request )
isHTMLRequest = isHTMLRequest?( api_Request )
api_Request = "API_" + api_Request.to_s if prependAPI?( api_Request )
xmlRequestData << toXML( :udata, @udata ) if @udata and @udata.length > 0
xmlRequestData << toXML( :rdr, @rdr ) if @rdr and @rdr.length > 0
xmlRequestData << toXML( :xsl, @xsl ) if @xsl and @xsl.length > 0
xmlRequestData << toXML( :encoding, @encoding ) if @encoding and @encoding.length > 0
if xmlRequestData
@requestXML = toXML( :qdbapi, @authenticationXML + xmlRequestData )
else
@requestXML = toXML( :qdbapi, @authenticationXML )
end
@requestHeaders = @standardRequestHeaders
@requestHeaders["Content-Length"] = "#{@requestXML.length}"
@requestHeaders["QUICKBASE-ACTION"] = api_Request
@requestURL = "#{@qbhost}#{@dbidForRequestURL}"
printRequest( @requestURL, @requestHeaders, @requestXML ) if @printRequestsAndResponses
@logger.logRequest( @dbidForRequestURL, api_Request, @requestXML ) if @logger
begin
# send the request
if USING_HTTPCLIENT
response = @httpConnection.post( @requestURL, @requestXML, @requestHeaders )
@responseCode = response.status
@responseXML = response.content
else
if Net::HTTP.version_1_2?
response = @httpConnection.post( @requestURL, @requestXML, @requestHeaders )
@responseCode = response.code
@responseXML = response.body
else
@responseCode, @responseXML = @httpConnection.post( @requestURL, @requestXML, @requestHeaders )
end
end
printResponse( @responseCode, @responseXML ) if @printRequestsAndResponses
if not isHTMLRequest
processResponse( @responseXML )
end
@logger.logResponse( @lastError, @responseXML ) if @logger
fireDBChangeEvents
rescue Net::HTTPBadResponse => error
@lastError = "Bad HTTP Response: #{error}"
rescue Net::HTTPHeaderSyntaxError => error
@lastError = "Bad HTTP header syntax: #{error}"
rescue StandardError => error
@lastError = "Error processing #{api_Request} request: #{error}"
end
@requestSucceeded = ( @errcode == "0" and @lastError == "" )
fire( @requestSucceeded ? "onRequestSucceeded" : "onRequestFailed" )
if @stopOnError and !@requestSucceeded
raise @lastError
end
end
|
ruby
|
{
"resource": ""
}
|
q4082
|
QuickBase.Client.prependAPI?
|
train
|
def prependAPI?( request )
ret = true
ret = false if request.to_s.include?("API_") or request.to_s.include?("QBIS_")
ret
end
|
ruby
|
{
"resource": ""
}
|
q4083
|
QuickBase.Client.toggleTraceInfo
|
train
|
def toggleTraceInfo( showTrace )
if showTrace
# this will print a very large amount of stuff
set_trace_func proc { |event, file, line, id, binding, classname| printf "%8s %s:%-2d %10s %8s\n", event, file, line, id, classname }
if block_given?
yield
set_trace_func nil
end
else
set_trace_func nil
if block_given?
yield
set_trace_func proc { |event, file, line, id, binding, classname| printf "%8s %s:%-2d %10s %8s\n", event, file, line, id, classname }
end
end
self
end
|
ruby
|
{
"resource": ""
}
|
q4084
|
QuickBase.Client.getErrorInfoFromResponse
|
train
|
def getErrorInfoFromResponse
if @responseXMLdoc
errcode = getResponseValue( :errcode )
@errcode = errcode ? errcode : ""
errtext = getResponseValue( :errtext )
@errtext = errtext ? errtext : ""
errdetail = getResponseValue( :errdetail )
@errdetail = errdetail ? errdetail : ""
if @errcode != "0"
@lastError = "Error code: #{@errcode} text: #{@errtext}: detail: #{@errdetail}"
end
end
@lastError
end
|
ruby
|
{
"resource": ""
}
|
q4085
|
QuickBase.Client.parseResponseXML
|
train
|
def parseResponseXML( xml )
if xml
xml.gsub!( "\r", "" ) if @ignoreCR and @ignoreCR == true
xml.gsub!( "\n", "" ) if @ignoreLF and @ignoreLF == true
xml.gsub!( "\t", "" ) if @ignoreTAB and @ignoreTAB == true
xml.gsub!( "<BR/>", "<BR/>" ) if @escapeBR
@qdbapi = @responseXMLdoc = REXML::Document.new( xml )
end
end
|
ruby
|
{
"resource": ""
}
|
q4086
|
QuickBase.Client.getResponseValue
|
train
|
def getResponseValue( field )
@fieldValue = nil
if field and @responseXMLdoc
@fieldValue = @responseXMLdoc.root.elements[ field.to_s ]
@fieldValue = fieldValue.text if fieldValue and fieldValue.has_text?
end
@fieldValue
end
|
ruby
|
{
"resource": ""
}
|
q4087
|
QuickBase.Client.getResponsePathValues
|
train
|
def getResponsePathValues( path )
@fieldValue = ""
e = getResponseElements( path )
e.each{ |e| @fieldValue << e.text if e and e.is_a?( REXML::Element ) and e.has_text? }
@fieldValue
end
|
ruby
|
{
"resource": ""
}
|
q4088
|
QuickBase.Client.getResponsePathValueByDBName
|
train
|
def getResponsePathValueByDBName ( path, dbName)
@fieldValue = ""
if path and @responseXMLdoc
e = @responseXMLdoc.root.elements[ path.to_s ]
end
e.each { |e|
if e and e.is_a?( REXML::Element ) and e.dbinfo.dbname == dbName
return e.dbinfo.dbid
end
}
@fieldValue
end
|
ruby
|
{
"resource": ""
}
|
q4089
|
QuickBase.Client.getAttributeString
|
train
|
def getAttributeString( element )
attributes = ""
if element.is_a?( REXML::Element ) and element.has_attributes?
attributes = "("
element.attributes.each { |name,value|
attributes << "#{name}=#{value} "
}
attributes << ")"
end
attributes
end
|
ruby
|
{
"resource": ""
}
|
q4090
|
QuickBase.Client.lookupFieldName
|
train
|
def lookupFieldName( element )
name = ""
if element and element.is_a?( REXML::Element )
name = element.name
if element.name == "f" and @fields
fid = element.attributes[ "id" ]
field = lookupField( fid ) if fid
label = field.elements[ "label" ] if field
name = label.text if label
end
end
name
end
|
ruby
|
{
"resource": ""
}
|
q4091
|
QuickBase.Client.lookupFieldType
|
train
|
def lookupFieldType( element )
type = ""
if element and element.is_a?( REXML::Element )
if element.name == "f" and @fields
fid = element.attributes[ "id" ]
field = lookupField( fid ) if fid
type = field.attributes[ "field_type" ] if field
end
end
type
end
|
ruby
|
{
"resource": ""
}
|
q4092
|
QuickBase.Client.lookupFieldPropertyByName
|
train
|
def lookupFieldPropertyByName( fieldName, property )
theproperty = nil
if isValidFieldProperty?(property)
fid = lookupFieldIDByName( fieldName )
field = lookupField( fid ) if fid
theproperty = field.elements[ property ] if field
theproperty = theproperty.text if theproperty and theproperty.has_text?
end
theproperty
end
|
ruby
|
{
"resource": ""
}
|
q4093
|
QuickBase.Client.isRecordidField?
|
train
|
def isRecordidField?( fid )
fields = lookupFieldsByType( "recordid" )
(fields and fields.last and fields.last.attributes[ "id" ] == fid)
end
|
ruby
|
{
"resource": ""
}
|
q4094
|
QuickBase.Client.formatFieldValue
|
train
|
def formatFieldValue( value, type, options = nil )
if value and type
case type
when "date"
value = formatDate( value )
when "date / time","timestamp"
value = formatDate( value, "%m-%d-%Y %I:%M %p" )
when "timeofday"
value = formatTimeOfDay( value, options )
when "duration"
value = formatDuration( value, options )
when "currency"
value = formatCurrency( value, options )
when "percent"
value = formatPercent( value, options )
end
end
value
end
|
ruby
|
{
"resource": ""
}
|
q4095
|
QuickBase.Client.findElementByAttributeValue
|
train
|
def findElementByAttributeValue( elements, attribute_name, attribute_value )
element = nil
if elements
if elements.is_a?( REXML::Element )
elements.each_element_with_attribute( attribute_name, attribute_value ) { |e| element = e }
elsif elements.is_a?( Array )
elements.each{ |e|
if e.is_a?( REXML::Element ) and e.attributes[ attribute_name ] == attribute_value
element = e
end
}
end
end
element
end
|
ruby
|
{
"resource": ""
}
|
q4096
|
QuickBase.Client.findElementsByAttributeName
|
train
|
def findElementsByAttributeName( elements, attribute_name )
elementArray = []
if elements
elements.each_element_with_attribute( attribute_name ) { |e| elementArray << e }
end
elementArray
end
|
ruby
|
{
"resource": ""
}
|
q4097
|
QuickBase.Client.lookupFieldData
|
train
|
def lookupFieldData( fid )
@field_data = nil
if @field_data_list
@field_data_list.each{ |field|
if field and field.is_a?( REXML::Element ) and field.has_elements?
fieldid = field.elements[ "fid" ]
if fieldid and fieldid.has_text? and fieldid.text == fid.to_s
@field_data = field
end
end
}
end
@field_data
end
|
ruby
|
{
"resource": ""
}
|
q4098
|
QuickBase.Client.getFieldDataValue
|
train
|
def getFieldDataValue(fid)
value = nil
if @field_data_list
field_data = lookupFieldData(fid)
if field_data
valueElement = field_data.elements[ "value" ]
value = valueElement.text if valueElement.has_text?
end
end
value
end
|
ruby
|
{
"resource": ""
}
|
q4099
|
QuickBase.Client.getFieldDataPrintableValue
|
train
|
def getFieldDataPrintableValue(fid)
printable = nil
if @field_data_list
field_data = lookupFieldData(fid)
if field_data
printableElement = field_data.elements[ "printable" ]
printable = printableElement.text if printableElement and printableElement.has_text?
end
end
printable
end
|
ruby
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.