_id
stringlengths 2
6
| title
stringlengths 9
130
| partition
stringclasses 3
values | text
stringlengths 66
10.5k
| language
stringclasses 1
value | meta_information
dict |
---|---|---|---|---|---|
q4300
|
Synvert::Core.Rewriter::Instance.remove_code_or_whole_line
|
train
|
def remove_code_or_whole_line(source, line)
newline_at_end_of_line = source[-1] == "\n"
source_arr = source.split("\n")
if source_arr[line - 1] && source_arr[line - 1].strip.empty?
source_arr.delete_at(line - 1)
if source_arr[line - 2] && source_arr[line - 2].strip.empty? && source_arr[line - 1] && source_arr[line - 1].strip.empty?
source_arr.delete_at(line - 1)
end
source_arr.join("\n") + (newline_at_end_of_line ? "\n" : '')
else
source
end
end
|
ruby
|
{
"resource": ""
}
|
q4301
|
OpenSSL.CCM.encrypt
|
train
|
def encrypt(data, nonce, additional_data = '')
valid?(data, nonce, additional_data)
crypt(data, nonce) + mac(data, nonce, additional_data)
end
|
ruby
|
{
"resource": ""
}
|
q4302
|
UTF8Encoding.ForceBinary.binary_encode_if_any_high_ascii
|
train
|
def binary_encode_if_any_high_ascii(string)
string = ensure_utf8(string)
string.force_encoding('BINARY') if string.bytes.detect { |byte| byte > 127 }
string
end
|
ruby
|
{
"resource": ""
}
|
q4303
|
UTF8Encoding.ForceBinary.binary_encode_any_high_ascii_in_hash
|
train
|
def binary_encode_any_high_ascii_in_hash(hash)
Hash[hash.map { |key, value| [key, binary_encode_any_high_ascii(value)] }]
end
|
ruby
|
{
"resource": ""
}
|
q4304
|
Parser::AST.Node.has_key?
|
train
|
def has_key?(key)
if :hash == self.type
self.children.any? { |pair_node| pair_node.key.to_value == key }
else
raise Synvert::Core::MethodNotSupported.new "has_key? is not handled for #{self.debug_info}"
end
end
|
ruby
|
{
"resource": ""
}
|
q4305
|
Parser::AST.Node.hash_value
|
train
|
def hash_value(key)
if :hash == self.type
value_node = self.children.find { |pair_node| pair_node.key.to_value == key }
value_node ? value_node.value : nil
else
raise Synvert::Core::MethodNotSupported.new "has_key? is not handled for #{self.debug_info}"
end
end
|
ruby
|
{
"resource": ""
}
|
q4306
|
Parser::AST.Node.to_value
|
train
|
def to_value
case self.type
when :int, :str, :sym
self.children.last
when :true
true
when :false
false
when :array
self.children.map(&:to_value)
when :irange
(self.children.first.to_value..self.children.last.to_value)
when :begin
self.children.first.to_value
else
raise Synvert::Core::MethodNotSupported.new "to_value is not handled for #{self.debug_info}"
end
end
|
ruby
|
{
"resource": ""
}
|
q4307
|
Parser::AST.Node.recursive_children
|
train
|
def recursive_children
self.children.each do |child|
if Parser::AST::Node === child
yield child
child.recursive_children { |c| yield c }
end
end
end
|
ruby
|
{
"resource": ""
}
|
q4308
|
Parser::AST.Node.match?
|
train
|
def match?(rules)
flat_hash(rules).keys.all? do |multi_keys|
if multi_keys.last == :any
actual_values = actual_value(self, multi_keys[0...-1])
expected = expected_value(rules, multi_keys)
actual_values.any? { |actual| match_value?(actual, expected) }
elsif multi_keys.last == :not
actual = actual_value(self, multi_keys[0...-1])
expected = expected_value(rules, multi_keys)
!match_value?(actual, expected)
else
actual = actual_value(self, multi_keys)
expected = expected_value(rules, multi_keys)
match_value?(actual, expected)
end
end
end
|
ruby
|
{
"resource": ""
}
|
q4309
|
Parser::AST.Node.match_value?
|
train
|
def match_value?(actual, expected)
case expected
when Symbol
if Parser::AST::Node === actual
actual.to_source == ":#{expected}"
else
actual.to_sym == expected
end
when String
if Parser::AST::Node === actual
actual.to_source == expected ||
(actual.to_source[0] == ':' && actual.to_source[1..-1] == expected) ||
actual.to_source[1...-1] == expected
else
actual.to_s == expected
end
when Regexp
if Parser::AST::Node === actual
actual.to_source =~ Regexp.new(expected.to_s, Regexp::MULTILINE)
else
actual.to_s =~ Regexp.new(expected.to_s, Regexp::MULTILINE)
end
when Array
return false unless expected.length == actual.length
actual.zip(expected).all? { |a, e| match_value?(a, e) }
when NilClass
actual.nil?
when Numeric
if Parser::AST::Node === actual
actual.children[0] == expected
else
actual == expected
end
when TrueClass
:true == actual.type
when FalseClass
:false == actual.type
when Parser::AST::Node
actual == expected
else
raise Synvert::Core::MethodNotSupported.new "#{expected.class} is not handled for match_value?"
end
end
|
ruby
|
{
"resource": ""
}
|
q4310
|
Parser::AST.Node.flat_hash
|
train
|
def flat_hash(h, k = [])
new_hash = {}
h.each_pair do |key, val|
if val.is_a?(Hash)
new_hash.merge!(flat_hash(val, k + [key]))
else
new_hash[k + [key]] = val
end
end
new_hash
end
|
ruby
|
{
"resource": ""
}
|
q4311
|
Parser::AST.Node.actual_value
|
train
|
def actual_value(node, multi_keys)
multi_keys.inject(node) { |n, key|
if n
key == :source ? n.send(key) : n.send(key)
end
}
end
|
ruby
|
{
"resource": ""
}
|
q4312
|
Parser::AST.Node.expected_value
|
train
|
def expected_value(rules, multi_keys)
multi_keys.inject(rules) { |o, key| o[key] }
end
|
ruby
|
{
"resource": ""
}
|
q4313
|
PEROBS.BigArrayNode.set
|
train
|
def set(index, value)
node = self
# Traverse the tree to find the right node to add or replace the value.
while node do
# Once we have reached a leaf node we can insert or replace the value.
if node.is_leaf?
if index >= node.values.size
node.fatal "Set index (#{index}) larger than values array " +
"(#{node.values.size})."
end
node.values[index] = value
return
else
# Descend into the right child node to add the value to.
cidx = node.search_child_index(index)
index -= node.offsets[cidx]
node = node.children[cidx]
end
end
node.fatal "Could not find proper node to set the value while " +
"looking for index #{index}"
end
|
ruby
|
{
"resource": ""
}
|
q4314
|
PEROBS.BigArrayNode.insert
|
train
|
def insert(index, value)
node = self
cidx = nil
# Traverse the tree to find the right node to add or replace the value.
while node do
# All nodes that we find on the way that are full will be split into
# two half-full nodes.
if node.size >= @tree.node_size
# Re-add the index from the last parent node since we will descent
# into one of the split nodes.
index += node.parent.offsets[cidx] if node.parent
node = node.split_node
end
# Once we have reached a leaf node we can insert or replace the value.
if node.is_leaf?
node.values.insert(index, value)
node.parent.adjust_offsets(node, 1) if node.parent
return
else
# Descend into the right child node to add the value to.
cidx = node.search_child_index(index)
if (index -= node.offsets[cidx]) < 0
node.fatal "Index (#{index}) became negative"
end
node = node.children[cidx]
end
end
node.fatal "Could not find proper node to insert the value while " +
"looking for index #{index}"
end
|
ruby
|
{
"resource": ""
}
|
q4315
|
PEROBS.BigArrayNode.value_index
|
train
|
def value_index(idx)
node = self
while node.parent
idx += node.parent.offsets[node.index_in_parent_node]
node = node.parent
end
idx
end
|
ruby
|
{
"resource": ""
}
|
q4316
|
PEROBS.BigArrayNode.move_first_element_of_successor_to_child
|
train
|
def move_first_element_of_successor_to_child(child_index)
child = @children[child_index]
succ = @children[child_index + 1]
if child.is_leaf?
# Adjust offset for the successor node
@offsets[child_index + 1] += 1
# Move the value
child.values << succ.values.shift
else
# Before:
#
# Root Node +--------------------------------+
# Offsets | 0 7 |
# Children | |
# child v succ v
# Level 1 +---------------++-------------------------------------+
# Offsets | 0 4 || 0 4 6 9 |
# Children | | | | | |
# v v v v v v
# Leaves +---------++-------++----------++-------++----------++-------+
# Values | A B C D || E F G || H I J K || L M || N O P || Q R |
#
# Index 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17
#
# After:
#
# Root Node +--------------------------------+
# Offsets | 0 11 |
# Children | |
# child v succ v
# Level 1 +--------------------------++--------------------------+
# Offsets | 0 4 7 || 0 2 5 |
# Children | | | | | |
# v v v v v v
# Leaves +---------++-------++----------++-------++----------++-------+
# Values | A B C D || E F G || H I J K || L M || N O P || Q R |
#
# Index 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17
#
# Adjust the offsets of the successor. The 2nd original offset
# determines the delta for the parent node.
succ.offsets.shift
delta = succ.offsets.first
succ.offsets.map! { |o| o -= delta }
# The additional child offset can be taken from the parent node
# reference.
child.offsets << @offsets[child_index + 1]
# The parent node offset of the successor needs to be corrected by the
# delta value.
@offsets[child_index + 1] += delta
# Move the child reference
child.children << succ.children.shift
child.children.last.parent = child
end
end
|
ruby
|
{
"resource": ""
}
|
q4317
|
PEROBS.BigArrayNode.move_last_element_of_predecessor_to_child
|
train
|
def move_last_element_of_predecessor_to_child(child_index)
pred = @children[child_index - 1]
child = @children[child_index]
if child.is_leaf?
# Adjust offset for the predecessor node
@offsets[child_index] -= 1
# Move the value
child.values.unshift(pred.values.pop)
else
# Before:
#
# Root Node +--------------------------------+
# Offsets | 0 13 |
# Children | |
# pred v child v
# Level 1 +---------------------------------++-------------------+
# Offsets | 0 4 7 11 || 0 3 |
# Children | | | | | |
# v v v v v v
# Leaves +---------++-------++----------++-------++----------++-------+
# Values | A B C D || E F G || H I J K || L M || N O P || Q R |
#
# Index 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17
#
# After:
#
# Root Node +--------------------------------+
# Offsets | 0 11 |
# Children | |
# prepd v child v
# Level 1 +--------------------------++--------------------------+
# Offsets | 0 4 7 || 0 2 5 |
# Children | | | | | |
# v v v v v v
# Leaves +---------++-------++----------++-------++----------++-------+
# Values | A B C D || E F G || H I J K || L M || N O P || Q R |
#
# Index 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17
#
# Remove the last predecessor offset and update the child offset with
# it
delta = @offsets[child_index] - pred.offsets.last
@offsets[child_index] = pred.offsets.pop
# Adjust all the offsets of the child
child.offsets.map! { |o| o += delta }
# And prepend the 0 offset
child.offsets.unshift(0)
# Move the child reference
child.children.unshift(pred.children.pop)
child.children.first.parent = child
end
end
|
ruby
|
{
"resource": ""
}
|
q4318
|
PEROBS.BigHash.[]=
|
train
|
def []=(key, value)
hashed_key = hash_key(key)
@store.transaction do
entry = @store.new(Entry, key, value)
if (existing_entry = @btree.get(hashed_key))
# There is already an existing entry for this hashed key.
if existing_entry.is_a?(Collisions)
# Find the right index to insert the new entry. If there is
# already an entry with the same key overwrite that entry.
index_to_insert = 0
overwrite = false
existing_entry.each do |ae|
if ae.key == key
overwrite = true
break
end
index_to_insert += 1
end
self.entry_counter += 1 unless overwrite
existing_entry[index_to_insert] = entry
elsif existing_entry.key == key
# The existing value is for the identical key. We can safely
# overwrite
@btree.insert(hashed_key, entry)
else
# There is a single existing entry, but for a different key. Create
# a new PEROBS::Array and store both entries.
array_entry = @store.new(Collisions)
array_entry << existing_entry
array_entry << entry
@btree.insert(hashed_key, array_entry)
self.entry_counter += 1
end
else
# No existing entry. Insert the new entry.
@btree.insert(hashed_key, entry)
self.entry_counter += 1
end
end
end
|
ruby
|
{
"resource": ""
}
|
q4319
|
PEROBS.BigHash.[]
|
train
|
def [](key)
hashed_key = hash_key(key)
unless (entry = @btree.get(hashed_key))
return nil
end
if entry.is_a?(PEROBS::Array)
entry.each do |ae|
return ae.value if ae.key == key
end
else
return entry.value if entry.key == key
end
nil
end
|
ruby
|
{
"resource": ""
}
|
q4320
|
PEROBS.BigHash.has_key?
|
train
|
def has_key?(key)
hashed_key = hash_key(key)
unless (entry = @btree.get(hashed_key))
return false
end
if entry.is_a?(PEROBS::Array)
entry.each do |ae|
return true if ae.key == key
end
else
return true if entry.key == key
end
false
end
|
ruby
|
{
"resource": ""
}
|
q4321
|
PEROBS.BigHash.delete
|
train
|
def delete(key)
hashed_key = hash_key(key)
unless (entry = @btree.get(hashed_key))
return nil
end
if entry.is_a?(PEROBS::Array)
entry.each_with_index do |ae, i|
if ae.key == key
self.entry_counter -= 1
return entry.delete_at(i).value
end
end
else
return entry.value if entry.key == key
end
nil
end
|
ruby
|
{
"resource": ""
}
|
q4322
|
PEROBS.BigHash.check
|
train
|
def check
return false unless @btree.check
i = 0
each do |k, v|
i += 1
end
unless @entry_counter == i
PEROBS.log.error "BigHash contains #{i} values but entry counter " +
"is #{@entry_counter}"
return false
end
true
end
|
ruby
|
{
"resource": ""
}
|
q4323
|
RegApi2.SymHash.method_missing
|
train
|
def method_missing(key, *args, &block)
if key.to_s =~ /\A(.+)=\z/
self[$1] = args.first
return args.first
end
if key.to_s =~ /\A(.+)\?\z/
return !!self[$1]
end
return self[key] if has_key?(key)
nil
end
|
ruby
|
{
"resource": ""
}
|
q4324
|
PEROBS.SpaceTree.open
|
train
|
def open
@nodes.open
@cache.clear
node = @nodes.total_entries == 0 ?
SpaceTreeNode::create(self) :
SpaceTreeNode::load(self, @nodes.first_entry)
@root_address = node.node_address
end
|
ruby
|
{
"resource": ""
}
|
q4325
|
PEROBS.SpaceTree.add_space
|
train
|
def add_space(address, size)
if size <= 0
PEROBS.log.fatal "Size (#{size}) must be larger than 0."
end
# The following check is fairly costly and should never trigger unless
# there is a bug in the PEROBS code. Only use this for debugging.
#if has_space?(address, size)
# PEROBS.log.fatal "The space with address #{address} and size " +
# "#{size} can't be added twice."
#end
root.add_space(address, size)
end
|
ruby
|
{
"resource": ""
}
|
q4326
|
PEROBS.SpaceTree.get_space
|
train
|
def get_space(size)
if size <= 0
PEROBS.log.fatal "Size (#{size}) must be larger than 0."
end
if (address_size = root.find_matching_space(size))
# First we try to find an exact match.
return address_size
elsif (address_size = root.find_equal_or_larger_space(size))
return address_size
else
return nil
end
end
|
ruby
|
{
"resource": ""
}
|
q4327
|
PEROBS.SpaceTree.each
|
train
|
def each
root.each do |node, mode, stack|
if mode == :on_enter
yield(node.blob_address, node.size)
end
end
end
|
ruby
|
{
"resource": ""
}
|
q4328
|
EPPClient.AFNIC.legalEntityInfos
|
train
|
def legalEntityInfos(leI) #:nodoc:
ret = {}
ret[:legalStatus] = leI.xpath('frnic:legalStatus', EPPClient::SCHEMAS_URL).attr('s').value
unless (r = leI.xpath('frnic:idStatus', EPPClient::SCHEMAS_URL)).empty?
ret[:idStatus] = { :value => r.text }
ret[:idStatus][:when] = r.attr('when').value if r.attr('when')
ret[:idStatus][:source] = r.attr('source').value if r.attr('source')
end
%w(siren VAT trademark DUNS local).each do |val|
unless (r = leI.xpath("frnic:#{val}", EPPClient::SCHEMAS_URL)).empty?
ret[val.to_sym] = r.text
end
end
unless (asso = leI.xpath('frnic:asso', EPPClient::SCHEMAS_URL)).empty?
ret[:asso] = {}
if !(r = asso.xpath('frnic:waldec', EPPClient::SCHEMAS_URL)).empty?
ret[:asso][:waldec] = r.text
else
unless (decl = asso.xpath('frnic:decl', EPPClient::SCHEMAS_URL)).empty?
ret[:asso][:decl] = Date.parse(decl.text)
end
publ = asso.xpath('frnic:publ', EPPClient::SCHEMAS_URL)
ret[:asso][:publ] = {
:date => Date.parse(publ.text),
:page => publ.attr('page').value,
}
if (announce = publ.attr('announce')) && announce.value != '0'
ret[:asso][:publ][:announce] = announce.value
end
end
end
ret
end
|
ruby
|
{
"resource": ""
}
|
q4329
|
PEROBS.IDList.insert
|
train
|
def insert(id)
# Find the index of the page that should hold ID.
index = @page_records.bsearch_index { |pr| pr.max_id >= id }
# Get the corresponding IDListPageRecord object.
page = @page_records[index]
# In case the page is already full we'll have to create a new page.
# There is no guarantee that a split will yield an page with space as we
# split by ID range, not by distributing the values evenly across the
# two pages.
while page.is_full?
new_page = page.split
# Store the newly created page into the page_records list.
@page_records.insert(index + 1, new_page)
if id >= new_page.min_id
# We need to insert the ID into the newly created page. Adjust index
# and page reference accordingly.
index += 1
page = new_page
end
end
# Insert the ID into the page.
page.insert(id)
end
|
ruby
|
{
"resource": ""
}
|
q4330
|
PEROBS.IDList.check
|
train
|
def check
last_max = -1
unless (min_id = @page_records.first.min_id) == 0
raise RuntimeError, "min_id of first record (#{min_id}) " +
"must be 0."
end
@page_records.each do |pr|
unless pr.min_id == last_max + 1
raise RuntimeError, "max_id of previous record (#{last_max}) " +
"must be exactly 1 smaller than current record (#{pr.min_id})."
end
last_max = pr.max_id
pr.check
end
unless last_max == 2 ** 64
raise RuntimeError, "max_id of last records " +
"(#{@page_records.last.max_id}) must be #{2 ** 64})."
end
end
|
ruby
|
{
"resource": ""
}
|
q4331
|
LNCS.Paper.paths_to_pdfs
|
train
|
def paths_to_pdfs
paths = []
Zip::ZipFile.open(path) do |zipfile|
zipfile.select { |file| zipfile.get_entry(file).file? }.each do |file|
paths << file.name if file.name.end_with? ".pdf"
end
end
paths
end
|
ruby
|
{
"resource": ""
}
|
q4332
|
Tenacity.ClassMethods.t_has_one
|
train
|
def t_has_one(name, options={})
extend(Associations::HasOne::ClassMethods)
association = _t_create_association(:t_has_one, name, options)
initialize_has_one_association(association)
define_method(association.name) do |*params|
get_associate(association, params) do
has_one_associate(association)
end
end
define_method("#{association.name}=") do |associate|
set_associate(association, associate) do
set_has_one_associate(association, associate)
end
end
end
|
ruby
|
{
"resource": ""
}
|
q4333
|
Tenacity.ClassMethods.t_belongs_to
|
train
|
def t_belongs_to(name, options={})
extend(Associations::BelongsTo::ClassMethods)
association = _t_create_association(:t_belongs_to, name, options)
initialize_belongs_to_association(association)
define_method(association.name) do |*params|
get_associate(association, params) do
belongs_to_associate(association)
end
end
define_method("#{association.name}=") do |associate|
set_associate(association, associate) do
set_belongs_to_associate(association, associate)
end
end
end
|
ruby
|
{
"resource": ""
}
|
q4334
|
Tenacity.ClassMethods.t_has_many
|
train
|
def t_has_many(name, options={})
extend(Associations::HasMany::ClassMethods)
association = _t_create_association(:t_has_many, name, options)
initialize_has_many_association(association)
define_method(association.name) do |*params|
get_associate(association, params) do
has_many_associates(association)
end
end
define_method("#{association.name}=") do |associates|
_t_mark_dirty if respond_to?(:_t_mark_dirty)
set_associate(association, associates) do
set_has_many_associates(association, associates)
end
end
define_method("#{ActiveSupport::Inflector.singularize(association.name.to_s)}_ids") do
has_many_associate_ids(association)
end
define_method("#{ActiveSupport::Inflector.singularize(association.name.to_s)}_ids=") do |associate_ids|
_t_mark_dirty if respond_to?(:_t_mark_dirty)
set_has_many_associate_ids(association, associate_ids)
end
private
define_method(:_t_save_without_callback) do
save_without_callback
end
end
|
ruby
|
{
"resource": ""
}
|
q4335
|
YARD::MRuby::CodeObjects.FunctionObject.aliases
|
train
|
def aliases
list = []
return list unless namespace.is_a?(HeaderObject)
namespace.aliases.each do |o, aname|
list << o if aname == name && o.scope == scope
end
list
end
|
ruby
|
{
"resource": ""
}
|
q4336
|
Glicko2.RatingPeriod.game
|
train
|
def game(game_seeds, ranks)
game_seeds.each_with_index do |iseed, i|
game_seeds.each_with_index do |jseed, j|
next if i == j
@raters[iseed].add(player(jseed).rating, Util.ranks_to_score(ranks[i], ranks[j]))
end
end
end
|
ruby
|
{
"resource": ""
}
|
q4337
|
Gnucash.Value.+
|
train
|
def +(other)
if other.is_a?(Value)
lcm_div = @div.lcm(other.div)
Value.new((@val * (lcm_div / @div)) + (other.val * (lcm_div / other.div)), lcm_div)
elsif other.is_a?(Numeric)
to_f + other
else
raise "Unexpected argument"
end
end
|
ruby
|
{
"resource": ""
}
|
q4338
|
Gnucash.Value.*
|
train
|
def *(other)
if other.is_a?(Value)
other = other.to_f
end
if other.is_a?(Numeric)
to_f * other
else
raise "Unexpected argument (#{other.inspect})"
end
end
|
ruby
|
{
"resource": ""
}
|
q4339
|
Chicago.StarSchema.add
|
train
|
def add(schema_table)
if schema_table.kind_of? Schema::Fact
collection = @facts
elsif schema_table.kind_of? Schema::Dimension
collection = @dimensions
end
add_to_collection collection, schema_table
end
|
ruby
|
{
"resource": ""
}
|
q4340
|
Chicago.StarSchema.define_fact
|
train
|
def define_fact(name, &block)
add Schema::Builders::FactBuilder.new(self).build(name, &block)
end
|
ruby
|
{
"resource": ""
}
|
q4341
|
Chicago.StarSchema.define_dimension
|
train
|
def define_dimension(name, &block)
add Schema::Builders::DimensionBuilder.new(self).build(name, &block)
end
|
ruby
|
{
"resource": ""
}
|
q4342
|
Chicago.StarSchema.define_shrunken_dimension
|
train
|
def define_shrunken_dimension(name, base_name, &block)
add Schema::Builders::ShrunkenDimensionBuilder.new(self, base_name).
build(name, &block)
end
|
ruby
|
{
"resource": ""
}
|
q4343
|
Tapestry.DataSetter.use_data_with
|
train
|
def use_data_with(key, value)
element = send(key.to_s.tr(' ', '_'))
set_and_select(key, element, value)
check_and_uncheck(key, element, value)
end
|
ruby
|
{
"resource": ""
}
|
q4344
|
Bugzilla.Bugzilla.requires_version
|
train
|
def requires_version(cmd, version_)
v = check_version(version_)
raise NoMethodError, sprintf("%s is not supported in Bugzilla %s", cmd, v[1]) unless v[0]
end
|
ruby
|
{
"resource": ""
}
|
q4345
|
PEROBS.IDListPageFile.load
|
train
|
def load(page_idx, record)
# The IDListPageRecord will tell us the actual number of values stored
# in this page.
values = []
unless (entries = record.page_entries) == 0
begin
@f.seek(page_idx * @page_size * 8)
values = @f.read(entries * 8).unpack("Q#{entries}")
rescue IOError => e
PEROBS.log.fatal "Cannot read cache file #{@file_name}: #{e.message}"
end
end
# Create the IDListPage object with the given values.
p = IDListPage.new(self, record, page_idx, values)
@pages.insert(p, false)
p
end
|
ruby
|
{
"resource": ""
}
|
q4346
|
PEROBS.IDListPageFile.new_page
|
train
|
def new_page(record, values = [])
idx = @page_counter
@page_counter += 1
mark_page_as_modified(IDListPage.new(self, record, idx, values))
idx
end
|
ruby
|
{
"resource": ""
}
|
q4347
|
PEROBS.IDListPageFile.page
|
train
|
def page(record)
p = @pages.get(record.page_idx, record) || load(record.page_idx, record)
unless p.uid == record.page_idx
raise RuntimeError, "Page reference mismatch. Record " +
"#{record.page_idx} points to page #{p.uid}"
end
p
end
|
ruby
|
{
"resource": ""
}
|
q4348
|
PEROBS.IDListPageFile.save_page
|
train
|
def save_page(p)
if p.record.page_entries != p.values.length
raise RuntimeError, "page_entries mismatch for node #{p.uid}"
end
begin
@f.seek(p.uid * @page_size * 8)
@f.write(p.values.pack('Q*'))
rescue IOError => e
PEROBS.log.fatal "Cannot write cache file #{@file_name}: #{e.message}"
end
end
|
ruby
|
{
"resource": ""
}
|
q4349
|
CALLR.Api.send
|
train
|
def send(method, params = [], id = nil)
check_auth()
json = {
:id => id.nil? || id.is_a?(Integer) == false ? rand(999 - 100) + 100 : id,
:jsonrpc => "2.0",
:method => method,
:params => params.is_a?(Array) ? params : []
}.to_json
uri = URI.parse(API_URL)
http = http_or_http_proxy(uri)
req = Net::HTTP::Post.new(uri.request_uri, @headers)
req.basic_auth(@login, @password)
req.add_field('User-Agent', "sdk=RUBY; sdk-version=#{SDK_VERSION}; lang-version=#{RUBY_VERSION}; platform=#{RUBY_PLATFORM}")
req.add_field('CALLR-Login-As', @login_as) unless @login_as.to_s.empty?
begin
res = http.request(req, json)
if res.code.to_i != 200
raise CallrException.new("HTTP_CODE_ERROR", -1, {:http_code => res.code.to_i, :http_message => res.message})
end
return parse_response(res)
rescue Timeout::Error, Errno::EINVAL, Errno::ECONNRESET, EOFError, Errno::ETIMEDOUT, Errno::ECONNREFUSED,
Net::HTTPBadResponse, Net::HTTPHeaderSyntaxError, Net::ProtocolError => e
raise CallrException.new("HTTP_EXCEPTION", -2, {:exception => e})
end
end
|
ruby
|
{
"resource": ""
}
|
q4350
|
PEROBS.BTreeBlob.write_object
|
train
|
def write_object(id, raw)
if @entries.length > @btreedb.max_blob_size
# The blob has reached the maximum size. Replace the blob with a BTree
# node directory and distribute the blob entires into the sub-blobs of
# the new BTree node.
split_blob
# Insert the passed object into the newly created BTree node.
@btreedb.put_raw_object(raw, id)
else
bytes = raw.bytesize
crc32 = Zlib.crc32(raw, 0)
start_address = reserve_bytes(id, bytes, crc32)
if write_to_blobs_file(raw, start_address) != bytes
PEROBS.log.fatal 'Object length does not match written bytes'
end
write_index
end
end
|
ruby
|
{
"resource": ""
}
|
q4351
|
PEROBS.BTreeBlob.mark
|
train
|
def mark(id)
found = false
@entries.each do |entry|
if entry[ID] == id
entry[MARKED] = 1
found = true
break
end
end
unless found
PEROBS.log.fatal "Cannot find an entry for ID #{'%016X' % id} " +
"#{id} to mark"
end
write_index
end
|
ruby
|
{
"resource": ""
}
|
q4352
|
PEROBS.BTreeBlob.is_marked?
|
train
|
def is_marked?(id, ignore_errors = false)
@entries.each do |entry|
return entry[MARKED] != 0 if entry[ID] == id
end
return false if ignore_errors
PEROBS.log.fatal "Cannot find an entry for ID #{'%016X' % id} to check"
end
|
ruby
|
{
"resource": ""
}
|
q4353
|
PEROBS.BTreeBlob.delete_unmarked_entries
|
train
|
def delete_unmarked_entries
deleted_ids = []
# First remove the entry from the hash table.
@entries_by_id.delete_if do |id, e|
if e[MARKED] == 0
deleted_ids << id
true
else
false
end
end
# Then delete the entry itself.
@entries.delete_if { |e| e[MARKED] == 0 }
write_index
deleted_ids
end
|
ruby
|
{
"resource": ""
}
|
q4354
|
PEROBS.BTreeBlob.check
|
train
|
def check(repair = false)
# Determine size of the data blobs file.
data_file_size = File.exist?(@blobs_file_name) ?
File.size(@blobs_file_name) : 0
next_start = 0
prev_entry = nil
@entries.each do |entry|
# Entries should never overlap
if prev_entry && next_start > entry[START]
PEROBS.log.fatal
"#{@dir}: Index entries are overlapping\n" +
"ID: #{'%016X' % prev_entry[ID]} " +
"Start: #{prev_entry[START]} " +
"Bytes: #{prev_entry[BYTES]}\n" +
"ID: #{'%016X' % entry[ID]} Start: #{entry[START]} " +
"Bytes: #{entry[BYTES]}"
end
next_start = entry[START] + entry[BYTES]
# Entries must fit within the data file
if next_start > data_file_size
PEROBS.log.fatal
"#{@dir}: Entry for ID #{'%016X' % entry[ID]} " +
"goes beyond 'data' file " +
"size (#{data_file_size})\n" +
"ID: #{'%016X' % entry[ID]} Start: #{entry[START]} " +
"Bytes: #{entry[BYTES]}"
end
prev_entry = entry
end
true
end
|
ruby
|
{
"resource": ""
}
|
q4355
|
PEROBS.BTreeBlob.write_to_blobs_file
|
train
|
def write_to_blobs_file(raw, address)
begin
File.write(@blobs_file_name, raw, address)
rescue IOError => e
PEROBS.log.fatal "Cannot write blobs file #{@blobs_file_name}: " +
e.message
end
end
|
ruby
|
{
"resource": ""
}
|
q4356
|
PEROBS.BTreeBlob.read_from_blobs_file
|
train
|
def read_from_blobs_file(entry)
begin
raw = File.read(@blobs_file_name, entry[BYTES], entry[START])
rescue => e
PEROBS.log.fatal "Cannot read blobs file #{@blobs_file_name}: " +
e.message
end
if Zlib.crc32(raw, 0) != entry[CRC]
PEROBS.log.fatal "BTreeBlob for object #{entry[ID]} has been " +
"corrupted: Checksum mismatch"
end
raw
end
|
ruby
|
{
"resource": ""
}
|
q4357
|
PEROBS.BTreeBlob.reserve_bytes
|
train
|
def reserve_bytes(id, bytes, crc32)
# index of first blob after the last seen entry
end_of_last_entry = 0
# blob index of best fit segment
best_fit_start = nil
# best fir segment size in bytes
best_fit_bytes = nil
# Index where to insert the new entry. Append by default.
best_fit_index = -1
# If there is already an entry for an object with the _id_, we mark it
# for deletion.
entry_to_delete = nil
@entries.each.with_index do |entry, i|
if entry[ID] == id
# We've found an old entry for this ID. Mark it for deletion.
entry_to_delete = entry
next
end
gap = entry[START] - end_of_last_entry
if gap >= bytes &&
(best_fit_bytes.nil? || gap < best_fit_bytes)
# We've found a segment that fits the requested bytes and fits
# better than any previous find.
best_fit_start = end_of_last_entry
best_fit_bytes = gap
# The old entry gets deleted before the new one gets inserted. We
# need to correct the index appropriately.
best_fit_index = i - (entry_to_delete ? 1 : 0)
end
end_of_last_entry = entry[START] + entry[BYTES]
end
# Delete the old entry if requested.
@entries.delete(entry_to_delete) if entry_to_delete
# Create a new entry and insert it. The order must match the above
# defined constants!
# Object reads can trigger creation of new objects. As the marking
# process triggers reads as well, all newly created objects are always
# marked to prevent them from being collected right after creation.
entry = [ id, bytes, best_fit_start || end_of_last_entry, 1, crc32 ]
@entries.insert(best_fit_index, entry)
@entries_by_id[id] = entry
entry[START]
end
|
ruby
|
{
"resource": ""
}
|
q4358
|
PEROBS.PersistentObjectCache.get
|
train
|
def get(uid, ref = nil)
# First check if it's a modified object.
if (object = @modified_entries[uid])
return object
end
# Then check the unmodified object list.
if (object = @unmodified_entries[uid % @size]) && object.uid == uid
return object
end
# If we don't have it in memory we need to load it.
@klass::load(@collection, uid, ref)
end
|
ruby
|
{
"resource": ""
}
|
q4359
|
PEROBS.PersistentObjectCache.delete
|
train
|
def delete(uid)
@modified_entries.delete(uid)
index = uid % @size
if (object = @unmodified_entries[index]) && object.uid == uid
@unmodified_entries[index] = nil
end
end
|
ruby
|
{
"resource": ""
}
|
q4360
|
PEROBS.PersistentObjectCache.flush
|
train
|
def flush(now = false)
if now || (@flush_counter -= 1) <= 0
@modified_entries.each do |id, object|
object.save
end
@modified_entries = ::Hash.new
@flush_counter = @flush_delay
end
@flush_times += 1
end
|
ruby
|
{
"resource": ""
}
|
q4361
|
UTF8Encoding.ControlCharacters.escape_control_chars_in_object!
|
train
|
def escape_control_chars_in_object!(object)
case object
when String
escape_control_chars!(object)
when Hash
escape_control_chars_in_hash!(object)
when Array
escape_control_chars_in_array!(object)
else
object
end
end
|
ruby
|
{
"resource": ""
}
|
q4362
|
UTF8Encoding.ControlCharacters.escape_control_chars!
|
train
|
def escape_control_chars!(string)
string.gsub!(CONTROL_CHARACTERS) do |character|
UTF8Encoding::REPLACEMENT_SCHEME[character]
end
string
end
|
ruby
|
{
"resource": ""
}
|
q4363
|
StateManager.State.find_states
|
train
|
def find_states(path)
state = self
parts = path.split('.')
ret = []
parts.each do |name|
state = state.states[name.to_sym]
ret << state
return unless state
end
ret
end
|
ruby
|
{
"resource": ""
}
|
q4364
|
StateManager.State.initial_state
|
train
|
def initial_state
if state = self.class.specification.initial_state
find_state(state.to_s)
elsif leaf?
self
else
states.values.first.initial_state
end
end
|
ruby
|
{
"resource": ""
}
|
q4365
|
PEROBS.BTree.open
|
train
|
def open(file_must_exist = false)
if @dirty_flag.is_locked?
PEROBS.log.fatal "Index file #{@nodes.file_name} is already " +
"locked"
end
if file_must_exist && [email protected]_exist?
PEROBS.log.fatal "Index file #{@nodes.file_name} does not exist"
end
@node_cache.clear
@nodes.open
if @nodes.total_entries == 0
# We've created a new nodes file
node = BTreeNode::create(self)
else
# We are loading an existing tree.
node = BTreeNode::load_and_link(self, @nodes.first_entry)
@first_leaf = BTreeNode::load_and_link(
self, @nodes.get_custom_data('first_leaf'))
@last_leaf = BTreeNode::load_and_link(
self, @nodes.get_custom_data('last_leaf'))
end
set_root(node)
# Get the total number of entries that are stored in the tree.
@size = @nodes.get_custom_data('btree_size')
end
|
ruby
|
{
"resource": ""
}
|
q4366
|
PEROBS.BTree.check
|
train
|
def check(&block)
sync
return false unless @nodes.check
entries = 0
res = true
@progressmeter.start('Checking index structure', @size) do |pm|
res = @root.check do |k, v|
pm.update(entries += 1)
block_given? ? yield(k, v) : true
end
end
unless entries == @size
PEROBS.log.error "The BTree size (#{@size}) and the number of " +
"found entries (#{entries}) don't match"
return false
end
res
end
|
ruby
|
{
"resource": ""
}
|
q4367
|
PEROBS.BTree.remove
|
train
|
def remove(key)
@size -= 1 unless (removed_value = @root.remove(key)).nil?
# Check if the root node only contains one child link after the delete
# operation. Then we can delete that node and pull the tree one level
# up. This could happen for a sequence of nodes that all got merged to
# single child nodes.
while [email protected]_leaf && @root.children.size == 1
old_root = @root
set_root(@root.children.first)
@root.parent = nil
delete_node(old_root.node_address)
end
@node_cache.flush
removed_value
end
|
ruby
|
{
"resource": ""
}
|
q4368
|
Glicko2.Rater.add
|
train
|
def add(other_rating, score)
g, e = other_rating.gravity_expected_score(rating.mean)
@v_pre += g**2 * e * (1 - e)
@delta_pre += g * (score - e)
end
|
ruby
|
{
"resource": ""
}
|
q4369
|
Glicko2.Rater.rate
|
train
|
def rate(tau)
v = @v_pre**-1
delta2 = (@delta_pre * v)**2
sd2 = rating.sd**2
a = Math.log(rating.volatility**2)
if v.finite?
f = lambda do |x|
expX = Math.exp(x)
(expX * (delta2 - sd2 - v - expX)) / (2 * (sd2 + v + expX)**2) - (x - a) / tau**2
end
if delta2 > sd2 + v
b = Math.log(delta2 - sd2 - v)
else
k = 1
k += 1 while f.call(a - k * tau) < 0
b = a - k * tau
end
a = Util.illinois_method(a, b, &f)
end
volatility = Math.exp(a / 2.0)
sd_pre = Math.sqrt(sd2 + volatility**2)
sd = 1 / Math.sqrt(1.0 / sd_pre**2 + 1 / v)
mean = rating.mean + sd**2 * @delta_pre
Rating.new(mean, sd, volatility)
end
|
ruby
|
{
"resource": ""
}
|
q4370
|
Schemaker.Models.configure
|
train
|
def configure
return quick_join if !join_model
[subject_model, object_model, join_model].compact.each do |model|
model.configure
end
end
|
ruby
|
{
"resource": ""
}
|
q4371
|
Schemaker.Models.get_class
|
train
|
def get_class type
case type
when Class
type
when BaseModel
type.my_class
when String, Symbol
return get_class send("#{type}_model") if [:subject, :object, :join].include?(type.to_sym)
type.to_s.constantize
else
raise "Can't determine a class from: #{type}"
end
end
|
ruby
|
{
"resource": ""
}
|
q4372
|
BuildStatusServer.Config.load_config_file
|
train
|
def load_config_file(config_file = nil)
curated_file = nil
if config_file
f = File.expand_path(config_file)
if File.exists?(f)
curated_file = f
else
raise "Supplied config file (#{config_file}) doesn't seem to exist"
end
else
locations_to_try.each do |possible_conf_file|
f = File.expand_path(possible_conf_file)
if File.exists?(f)
curated_file = f
break
end
end
if curated_file.nil?
show_config_file_suggestion
return YAML.load(get_example_config)
end
end
YAML.load_file(curated_file).tap do |config|
raise "This is an invalid configuration file!" unless config.class == Hash
end
end
|
ruby
|
{
"resource": ""
}
|
q4373
|
Qe.AnswerPagesHelper.li_page_active_if
|
train
|
def li_page_active_if(condition, attributes = {}, &block)
if condition
attributes[:class] ||= ''
attributes[:class] += " active"
end
content_tag("li", attributes, &block)
end
|
ruby
|
{
"resource": ""
}
|
q4374
|
MiniSpec.ClassAPI.after
|
train
|
def after *matchers, &proc
proc || raise(ArgumentError, 'block is missing')
matchers.flatten!
matchers = [:*] if matchers.empty?
return if after?.find {|x| x[0] == matchers && x[1].source_location == proc.source_location}
after?.push([matchers, proc])
end
|
ruby
|
{
"resource": ""
}
|
q4375
|
Bombshell.Shell._prompt
|
train
|
def _prompt
if self.class.bombshell_prompt.is_a? String
self.class.bombshell_prompt
elsif self.class.bombshell_prompt.is_a? Proc and self.class.bombshell_prompt.arity == 1
self.class.bombshell_prompt.call self
elsif self.class.bombshell_prompt.is_a? Proc
self.class.bombshell_prompt.call
else
'[Bombshell]'
end
end
|
ruby
|
{
"resource": ""
}
|
q4376
|
Guard.BundlerAudit.audit
|
train
|
def audit
res = ::Bundler::Audit::Scanner.new.scan.to_a.map do |vuln|
case vuln
when ::Bundler::Audit::Scanner::InsecureSource
insecure_source_message vuln
when ::Bundler::Audit::Scanner::UnpatchedGem
insecure_gem_message vuln
else
insecure_message vuln
end
end
if res.any?
message = "Vulnerabilities found:\n" + res.join("\n")
color = :red
notify message
else
message = "No vulnerabilities found."
color = :green
end
UI.info(UI.send(:color, message, color))
end
|
ruby
|
{
"resource": ""
}
|
q4377
|
CamperVan.IRCD.receive_line
|
train
|
def receive_line(line)
if @active
cmd = parse(line)
handle cmd
end
rescue HandlerMissing
logger.info "ignoring irc command #{cmd.inspect}: no handler"
end
|
ruby
|
{
"resource": ""
}
|
q4378
|
CamperVan.IRCD.check_campfire_authentication
|
train
|
def check_campfire_authentication(&callback)
# invalid user only returns a nil result!
campfire.user("me") do |user|
if user.name
yield
else
command_reply :notice, "AUTH", "could not connect to campfire: invalid API key"
shutdown
end
end
rescue Firering::Connection::HTTPError => e
command_reply :notice, "AUTH", "could not connect to campfire: #{e.message}"
shutdown
end
|
ruby
|
{
"resource": ""
}
|
q4379
|
CamperVan.IRCD.check_nick_matches_authenticated_user
|
train
|
def check_nick_matches_authenticated_user
campfire.user("me") do |user|
name = irc_name user.name
if name != nick
user_reply :nick, name
@nick = name
end
end
end
|
ruby
|
{
"resource": ""
}
|
q4380
|
RestCore::RailsUtilUtil.InstanceMethod.rc_setup
|
train
|
def rc_setup client, options={}
rc_options_ctl(client).merge!(
rc_options_extract(client.members, options, :reject))
rc_options_new(client).merge!(
rc_options_extract(client.members, options, :select))
end
|
ruby
|
{
"resource": ""
}
|
q4381
|
JavaProperties.Properties.to_s
|
train
|
def to_s
string = ""
# Sort to make testing easier -> output will consistent
@props.sort_by do |key,val|
key.to_s
end.each do |key,val|
string << Encoding.encode(key.to_s) << "=" << Encoding.encode(val) << "\n"
end
string
end
|
ruby
|
{
"resource": ""
}
|
q4382
|
Qe.AnswerPagesPresenter.page_list
|
train
|
def page_list(answer_sheets, a = nil, custom_pages = nil)
page_list = []
answer_sheets.each do |answer_sheet|
pages.each do |page|
page_list << new_page_link(answer_sheet, page, a)
end
end
page_list = page_list + custom_pages unless custom_pages.nil?
page_list
end
|
ruby
|
{
"resource": ""
}
|
q4383
|
InheritedViews.Helpers.render_partial_or_default
|
train
|
def render_partial_or_default(name, options = {})
render options.merge(:partial => name)
rescue ActionView::MissingTemplate
render options.merge(:partial => "#{controller.class.default_views}/#{name}")
end
|
ruby
|
{
"resource": ""
}
|
q4384
|
MmJsonClient.Client.client_objects_to_h
|
train
|
def client_objects_to_h(value)
case value.class.to_s
when /^MmJsonClient/
client_objects_to_h(value.to_h)
when 'Hash'
Hash[value.map { |k, v| [k, client_objects_to_h(v)] }]
when 'Array'
value.map { |v| client_objects_to_h(v) }
else
value
end
end
|
ruby
|
{
"resource": ""
}
|
q4385
|
Relish.OptionsFile.store
|
train
|
def store(options)
@options = self.options.merge(options)
FileUtils.touch(@path)
File.open(@path, 'w') do |file|
YAML.dump(@options, file)
end
end
|
ruby
|
{
"resource": ""
}
|
q4386
|
WebPageParser.NewYorkTimesPageParserV1.retrieve_page
|
train
|
def retrieve_page
return nil unless url
spurl = url
spurl << (spurl.include?("?") ? "&" : "?")
spurl << "pagewanted=all"
p = super(spurl)
# If it fails, reset the session and try one more time
unless retreive_successful?(p)
self.class.retrieve_session ||= WebPageParser::HTTP::Session.new
p = super(spurl)
end
if retreive_successful?(p)
p
else
raise RetrieveError, "Blocked by NYT paywall"
end
end
|
ruby
|
{
"resource": ""
}
|
q4387
|
Tickly.Parser.wrap_io_or_string
|
train
|
def wrap_io_or_string(io_or_str)
return io_or_str if io_or_str.respond_to?(:read_one_char) # Bychar or R
return R.new(io_or_str) if io_or_str.respond_to?(:read)
R.new(StringIO.new(io_or_str))
end
|
ruby
|
{
"resource": ""
}
|
q4388
|
Tickly.Parser.wrap_up
|
train
|
def wrap_up(expressions, stack, buf, stack_depth, multiple_expressions)
stack << buf if (buf.length > 0)
return stack unless multiple_expressions
expressions << stack if stack.any?
# Make sure that all of the expresisons get collapsed
expressions = expressions.map do | e |
compact_subexpr(e, stack_depth + 1)
end
return expressions
end
|
ruby
|
{
"resource": ""
}
|
q4389
|
Tickly.Parser.consume_remaining_buffer
|
train
|
def consume_remaining_buffer(stack, buf)
return if buf.length == 0
stack << buf.dup
buf.replace('')
end
|
ruby
|
{
"resource": ""
}
|
q4390
|
Tickly.Parser.parse_expr
|
train
|
def parse_expr(io, stop_char = nil, stack_depth = 0, multiple_expressions = false)
# A standard stack is an expression that does not evaluate to a string
expressions = []
stack = []
buf = ''
loop do
char = io.read_one_char
# Ignore carriage returns
next if char == "\r"
if stop_char && char.nil?
raise Error, "IO ran out when parsing a subexpression (expected to end on #{stop_char.inspect})"
elsif char == stop_char # Bail out of a subexpr or bail out on nil
# TODO: default stop_char is nil, and this is also what gets returned from a depleted
# IO on IO#read(). We should do that in Bychar.
# Handle any remaining subexpressions
return wrap_up(expressions, stack, buf, stack_depth, multiple_expressions)
elsif char == " " || char == "\n" # Space
if buf.length > 0
stack << buf
buf = ''
end
if TERMINATORS.include?(char) && stack.any? # Introduce a stack separator! This is a new line
# First get rid of the remaining buffer data
consume_remaining_buffer(stack, buf)
# Since we now finished an expression and it is on the stack,
# we can run this expression through the filter
filtered_expr = compact_subexpr(stack, stack_depth + 1)
# Only preserve the parsed expression if it's not nil
expressions << filtered_expr unless filtered_expr.nil?
# Reset the stack for the next expression
stack = []
# Note that we will return multiple expressions instead of one
multiple_expressions = true
end
elsif char == '[' # Opens a new string expression
consume_remaining_buffer(stack, buf)
stack << [:b] + parse_expr(io, ']', stack_depth + 1)
elsif char == '{' # Opens a new literal expression
consume_remaining_buffer(stack, buf)
stack << [:c] + parse_expr(io, '}', stack_depth + 1)
elsif QUOTES.include?(char) # String
consume_remaining_buffer(stack, buf)
stack << parse_str(io, char)
else
buf << char
end
end
raise Error, "Should never happen"
end
|
ruby
|
{
"resource": ""
}
|
q4391
|
Tickly.Parser.parse_str
|
train
|
def parse_str(io, stop_quote)
buf = ''
loop do
c = io.read_one_char
if c.nil?
raise Error, "The IO ran out before the end of a literal string"
elsif buf.length > 0 && buf[-1..-1] == ESC # If this char was escaped
# Trim the escape character at the end of the buffer
buf = buf[0..-2]
buf << c
elsif c == stop_quote
return buf
else
buf << c
end
end
end
|
ruby
|
{
"resource": ""
}
|
q4392
|
ClassKit.AttributeHelper.get_attributes
|
train
|
def get_attributes(klass)
return @attribute_store[klass] if @attribute_store.key?(klass)
attributes = []
klass.ancestors.map do |k|
hash = k.instance_variable_get(:@class_kit_attributes)
if hash != nil
hash.values.each do |a|
attributes.push(a)
end
end
end
attributes.compact!
@attribute_store[klass] = attributes
attributes
end
|
ruby
|
{
"resource": ""
}
|
q4393
|
ClassKit.AttributeHelper.get_attribute
|
train
|
def get_attribute(klass:, name:)
get_attributes(klass).detect { |a| a[:name] == name } ||
raise(ClassKit::Exceptions::AttributeNotFoundError, "Attribute: #{name}, could not be found.")
end
|
ruby
|
{
"resource": ""
}
|
q4394
|
Cfan122.Reloader.cleanup
|
train
|
def cleanup(parent = Object, current = @top)
return unless all_project_objects_lookup[current]
current.constants.each {|const| cleanup current, current.const_get(const)}
parent.send(:remove_const, current.to_s.split('::').last.to_sym)
end
|
ruby
|
{
"resource": ""
}
|
q4395
|
ClassKit.Helper.to_hash
|
train
|
def to_hash(object, use_alias = false)
return object.map { |i| to_hash(i, use_alias) } if object.is_a?(Array)
validate_class_kit(object.class)
hash = {}
attributes = @attribute_helper.get_attributes(object.class)
attributes.each do |attribute|
key = use_alias ? (attribute[:alias] || attribute[:name]) : attribute[:name]
type = attribute[:type]
value = object.public_send(attribute[:name])
if value != nil
hash[key] = if is_class_kit?(type)
to_hash(value, use_alias)
elsif type == Array
value.map do |i|
if is_class_kit?(i.class)
to_hash(i, use_alias)
else
i
end
end
else
value
end
end
end
@hash_helper.indifferent!(hash)
hash
end
|
ruby
|
{
"resource": ""
}
|
q4396
|
ClassKit.Helper.to_json
|
train
|
def to_json(object, use_alias = false)
hash = to_hash(object, use_alias)
JSON.dump(hash)
end
|
ruby
|
{
"resource": ""
}
|
q4397
|
ClassKit.Helper.from_json
|
train
|
def from_json(json:, klass:, use_alias: false)
hash = JSON.load(json)
from_hash(hash: hash, klass: klass, use_alias: use_alias)
end
|
ruby
|
{
"resource": ""
}
|
q4398
|
RailsFriendlyUrls.FriendlyUrl.set_destination_data!
|
train
|
def set_destination_data!
route_info = Rails.application.routes.recognize_path self.path
self.controller = route_info[:controller]
self.action = route_info[:action]
self.defaults = route_info.reject { |k, v| [:controller, :action].include? k }
end
|
ruby
|
{
"resource": ""
}
|
q4399
|
TelestreamCloud::Tts.TtsApi.corpora
|
train
|
def corpora(project_id, opts = {})
data, _status_code, _headers = corpora_with_http_info(project_id, opts)
return data
end
|
ruby
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.