_id
stringlengths 2
6
| title
stringlengths 9
130
| partition
stringclasses 3
values | text
stringlengths 66
10.5k
| language
stringclasses 1
value | meta_information
dict |
---|---|---|---|---|---|
q4200
|
PEROBS.FlatFileBlobHeader.write
|
train
|
def write
begin
buf = [ @flags, @length, @id, @crc].pack(FORMAT)
crc = Zlib.crc32(buf, 0)
@file.seek(@addr)
@file.write(buf + [ crc ].pack('L'))
rescue IOError => e
PEROBS.log.fatal "Cannot write blob header into flat file DB: " +
e.message
end
end
|
ruby
|
{
"resource": ""
}
|
q4201
|
Watir.Element.dom_updated?
|
train
|
def dom_updated?(delay: 1.1)
element_call do
begin
driver.manage.timeouts.script_timeout = delay + 1
driver.execute_async_script(DOM_OBSERVER, wd, delay)
rescue Selenium::WebDriver::Error::StaleElementReferenceError
# This situation can occur when the DOM changes between two calls to
# some element or aspect of the page. In this case, we are expecting
# the DOM to be different so what's being handled here are those hard
# to anticipate race conditions when "weird things happen" and DOM
# updating plus script execution get interleaved.
retry
rescue Selenium::WebDriver::Error::JavascriptError => e
# This situation can occur if the script execution has started before
# a new page is fully loaded. The specific error being checked for
# here is one that occurs when a new page is loaded as that page is
# trying to execute a JavaScript function.
retry if e.message.include?(
'document unloaded while waiting for result'
)
raise
ensure
# Note that this setting here means any user-defined timeout would
# effectively be overwritten.
driver.manage.timeouts.script_timeout = 1
end
end
end
|
ruby
|
{
"resource": ""
}
|
q4202
|
PEROBS.DataBase.serialize
|
train
|
def serialize(obj)
begin
case @serializer
when :marshal
Marshal.dump(obj)
when :json
obj.to_json
when :yaml
YAML.dump(obj)
end
rescue => e
PEROBS.log.fatal "Cannot serialize object as #{@serializer}: " +
e.message
end
end
|
ruby
|
{
"resource": ""
}
|
q4203
|
PEROBS.DataBase.deserialize
|
train
|
def deserialize(raw)
begin
case @serializer
when :marshal
Marshal.load(raw)
when :json
JSON.parse(raw, :create_additions => true)
when :yaml
YAML.load(raw)
end
rescue => e
PEROBS.log.fatal "Cannot de-serialize object with #{@serializer} " +
"parser: " + e.message
end
end
|
ruby
|
{
"resource": ""
}
|
q4204
|
PEROBS.DataBase.check_option
|
train
|
def check_option(name)
value = instance_variable_get('@' + name)
if @config.include?(name)
# The database already existed and has a setting for this config
# option. If it does not match the instance variable, adjust the
# instance variable accordingly.
unless @config[name] == value
instance_variable_set('@' + name, @config[name])
end
else
# There is no such config option yet. Create it with the value of the
# corresponding instance variable.
@config[name] = value
end
end
|
ruby
|
{
"resource": ""
}
|
q4205
|
PEROBS.DataBase.ensure_dir_exists
|
train
|
def ensure_dir_exists(dir)
unless Dir.exist?(dir)
begin
Dir.mkdir(dir)
rescue IOError => e
PEROBS.log.fatal "Cannote create DB directory '#{dir}': #{e.message}"
end
end
end
|
ruby
|
{
"resource": ""
}
|
q4206
|
Synvert::Core.Rewriter::GemSpec.match?
|
train
|
def match?
gemfile_lock_path = File.join(Configuration.instance.get(:path), 'Gemfile.lock')
if File.exists? gemfile_lock_path
parser = Bundler::LockfileParser.new(File.read(gemfile_lock_path))
if spec = parser.specs.find { |spec| spec.name == @name }
Gem::Version.new(spec.version).send(OPERATORS[@operator], @version)
else
false
end
else
raise GemfileLockNotFound.new 'Gemfile.lock does not exist'
end
end
|
ruby
|
{
"resource": ""
}
|
q4207
|
CommunityZero.CookbookVersionsVersionEndpoint.response_hash_for
|
train
|
def response_hash_for(cookbook)
{
'cookbook' => url_for(cookbook),
'average_rating' => cookbook.average_rating,
'version' => cookbook.version,
'license' => cookbook.license,
'file' => "http://s3.amazonaws.com/#{cookbook.name}.tgz",
'tarball_file_size' => cookbook.name.split('').map(&:ord).inject(&:+) * 25, # don't even
'created_at' => cookbook.created_at,
'updated_at' => cookbook.upadated_at,
}
end
|
ruby
|
{
"resource": ""
}
|
q4208
|
Chicago.Query.order
|
train
|
def order(*ordering)
@order = ordering.map do |c|
if c.kind_of?(String)
{:column => c, :ascending => true}
else
c.symbolize_keys!
end
end
self
end
|
ruby
|
{
"resource": ""
}
|
q4209
|
PEROBS.StackFile.close
|
train
|
def close
begin
@f.flush
@f.flock(File::LOCK_UN)
@f.close
rescue IOError => e
PEROBS.log.fatal "Cannot close stack file #{@file_name}: #{e.message}"
end
end
|
ruby
|
{
"resource": ""
}
|
q4210
|
PEROBS.StackFile.push
|
train
|
def push(bytes)
if bytes.length != @entry_bytes
PEROBS.log.fatal "All stack entries must be #{@entry_bytes} " +
"long. This entry is #{bytes.length} bytes long."
end
begin
@f.seek(0, IO::SEEK_END)
@f.write(bytes)
rescue => e
PEROBS.log.fatal "Cannot push to stack file #{@file_name}: #{e.message}"
end
end
|
ruby
|
{
"resource": ""
}
|
q4211
|
PEROBS.StackFile.pop
|
train
|
def pop
begin
return nil if @f.size == 0
@f.seek(-@entry_bytes, IO::SEEK_END)
bytes = @f.read(@entry_bytes)
@f.truncate(@f.size - @entry_bytes)
@f.flush
rescue => e
PEROBS.log.fatal "Cannot pop from stack file #{@file_name}: " +
e.message
end
bytes
end
|
ruby
|
{
"resource": ""
}
|
q4212
|
Bugzilla.Bug.get_comments
|
train
|
def get_comments(bugs)
params = {}
if bugs.kind_of?(Array) then
params['ids'] = bugs
elsif bugs.kind_of?(Integer) ||
bugs.kind_of?(String) then
params['ids'] = [bugs]
else
raise ArgumentError, sprintf("Unknown type of arguments: %s", bugs.class)
end
result = comments(params)
# not supporting comment_ids. so drop "comments".
result['bugs']
end
|
ruby
|
{
"resource": ""
}
|
q4213
|
Tapestry.Ready.when_ready
|
train
|
def when_ready(simple_check = false, &_block)
already_marked_ready = ready
unless simple_check
no_ready_check_possible unless block_given?
end
self.ready = ready?
not_ready_validation(ready_error || 'NO REASON PROVIDED') unless ready
yield self if block_given?
ensure
self.ready = already_marked_ready
end
|
ruby
|
{
"resource": ""
}
|
q4214
|
Tapestry.Ready.ready_validations_pass?
|
train
|
def ready_validations_pass?
self.class.ready_validations.all? do |validation|
passed, message = instance_eval(&validation)
self.ready_error = message if message && !passed
passed
end
end
|
ruby
|
{
"resource": ""
}
|
q4215
|
Synvert::Core.Rewriter::UnlessExistCondition.match?
|
train
|
def match?
match = false
@instance.current_node.recursive_children do |child_node|
match = match || (child_node && child_node.match?(@rules))
end
!match
end
|
ruby
|
{
"resource": ""
}
|
q4216
|
RandomUniqueId.ClassMethods.add_rid_related_validations
|
train
|
def add_rid_related_validations(options)
validates(options[:field], presence: true)
validates(options[:field], uniqueness: true) if options[:random_generation_method] != :uuid # If we're generating UUIDs, don't check for uniqueness
end
|
ruby
|
{
"resource": ""
}
|
q4217
|
RandomUniqueId.ClassMethods.define_rid_accessors
|
train
|
def define_rid_accessors(related_class, relationship_name)
define_method("#{relationship_name}_rid") do
self.send(relationship_name).try(random_unique_id_options[:field])
end
define_method("#{relationship_name}_rid=") do |rid|
record = related_class.find_by_rid(rid)
self.send("#{relationship_name}=", record)
record
end
end
|
ruby
|
{
"resource": ""
}
|
q4218
|
Effective.Menu.build
|
train
|
def build(&block)
raise 'build must be called with a block' if !block_given?
root = menu_items.build(title: 'Home', url: '/', lft: 1, rgt: 2)
root.parent = true
instance_exec(&block) # A call to dropdown or item
root.rgt = menu_items.map(&:rgt).max
self
end
|
ruby
|
{
"resource": ""
}
|
q4219
|
PEROBS.BTreeNode.search_key_index
|
train
|
def search_key_index(key)
# Handle special case for empty keys list.
return 0 if @keys.empty?
# Keys are unique and always sorted. Use a binary search to find the
# index that fits the given key.
li = pi = 0
ui = @keys.size - 1
while li <= ui
# The pivot element is always in the middle between the lower and upper
# index.
pi = li + (ui - li) / 2
if key < @keys[pi]
# The pivot element is smaller than the key. Set the upper index to
# the pivot index.
ui = pi - 1
elsif key > @keys[pi]
# The pivot element is larger than the key. Set the lower index to
# the pivot index.
li = pi + 1
else
# We've found an exact match. For leaf nodes return the found index.
# For branch nodes we have to add one to the index since the larger
# child is the right one.
return @is_leaf ? pi : pi + 1
end
end
# No exact match was found. For the insert operaton we need to return
# the index of the first key that is larger than the given key.
@keys[pi] < key ? pi + 1 : pi
end
|
ruby
|
{
"resource": ""
}
|
q4220
|
SASL.DigestMD5.response_value
|
train
|
def response_value(nonce, nc, cnonce, qop, a2_prefix='AUTHENTICATE')
a1_h = h("#{preferences.username}:#{preferences.realm}:#{preferences.password}")
a1 = "#{a1_h}:#{nonce}:#{cnonce}"
if preferences.authzid
a1 += ":#{preferences.authzid}"
end
if qop && (qop.downcase == 'auth-int' || qop.downcase == 'auth-conf')
a2 = "#{a2_prefix}:#{preferences.digest_uri}:00000000000000000000000000000000"
else
a2 = "#{a2_prefix}:#{preferences.digest_uri}"
end
hh("#{hh(a1)}:#{nonce}:#{nc}:#{cnonce}:#{qop}:#{hh(a2)}")
end
|
ruby
|
{
"resource": ""
}
|
q4221
|
RegApi2.RequestContract.to_hash
|
train
|
def to_hash arr
return {} if arr.nil?
return arr if arr.kind_of?(Hash)
arr = [ arr.to_sym ] unless arr.kind_of?(Array)
ret = {}
arr.each { |key| ret[key.to_sym] = {} }
ret
end
|
ruby
|
{
"resource": ""
}
|
q4222
|
RegApi2.RequestContract.fields_to_validate
|
train
|
def fields_to_validate
required_fields = to_hash opts[:required]
optional_fields = to_hash opts[:optional]
required_fields.keys.each { |key| required_fields[key][:required] = true }
optional_fields.merge(required_fields)
end
|
ruby
|
{
"resource": ""
}
|
q4223
|
RegApi2.RequestContract.validate_ipaddr
|
train
|
def validate_ipaddr key, value, opts
if opts[:ipaddr] == true && value.kind_of?(String)
value = IPAddr.new(value)
end
value.to_s
end
|
ruby
|
{
"resource": ""
}
|
q4224
|
RegApi2.RequestContract.validate_presence_of_required_fields
|
train
|
def validate_presence_of_required_fields form, fields
absent_fields = []
fields.each_pair do |key, opts|
next unless opts[:required]
if !form.has_key?(key) || form[key].nil?
absent_fields << key
end
end
unless absent_fields.empty?
raise RegApi2::ContractError.new(
"Required fields missed: #{absent_fields.join(', ')}",
absent_fields
)
end
nil
end
|
ruby
|
{
"resource": ""
}
|
q4225
|
RegApi2.RequestContract.validate
|
train
|
def validate(form)
fields = fields_to_validate
return form if fields.empty?
validate_presence_of_required_fields form, fields
fields.each_pair do |key, opts|
next if !form.has_key?(key) || form[key].nil?
form[key] = validate_re key, form[key], opts
form[key] = validate_iso_date key, form[key], opts
form[key] = validate_ipaddr key, form[key], opts
end
form
end
|
ruby
|
{
"resource": ""
}
|
q4226
|
PEROBS.BigTreeNode.has_key?
|
train
|
def has_key?(key)
node = self
while node do
# Find index of the entry that best fits the key.
i = node.search_key_index(key)
if node.is_leaf?
# This is a leaf node. Check if there is an exact match for the
# given key and return the corresponding value or nil.
return node.keys[i] == key
end
# Descend into the right child node to continue the search.
node = node.children[i]
end
PEROBS.log.fatal "Could not find proper node to get from while " +
"looking for key #{key}"
end
|
ruby
|
{
"resource": ""
}
|
q4227
|
PEROBS.BigTreeNode.remove_element
|
train
|
def remove_element(index)
# Delete the key at the specified index.
unless (key = @keys.delete_at(index))
PEROBS.log.fatal "Could not remove element #{index} from BigTreeNode " +
"@#{@_id}"
end
update_branch_key(key) if index == 0
# Delete the corresponding value.
removed_value = @values.delete_at(index)
if @keys.length < min_keys
if @prev_sibling && @prev_sibling.parent == @parent
borrow_from_previous_sibling(@prev_sibling) ||
@prev_sibling.merge_with_leaf_node(myself)
elsif @next_sibling && @next_sibling.parent == @parent
borrow_from_next_sibling(@next_sibling) ||
merge_with_leaf_node(@next_sibling)
elsif @parent
PEROBS.log.fatal "Cannot not find adjecent leaf siblings"
end
end
# The merge has potentially invalidated this node. After this method has
# been called this copy of the node should no longer be used.
removed_value
end
|
ruby
|
{
"resource": ""
}
|
q4228
|
PEROBS.BigTreeNode.remove_child
|
train
|
def remove_child(node)
unless (index = search_node_index(node))
PEROBS.log.fatal "Cannot remove child #{node._id} from node #{@_id}"
end
if index == 0
# Removing the first child is a bit more complicated as the
# corresponding branch key is in a parent node.
key = @keys.shift
update_branch_key(key)
else
# For all other children we can just remove the corresponding key.
@keys.delete_at(index - 1)
end
# Remove the child node link.
child = @children.delete_at(index)
# If we remove the first or last leaf node we must update the reference
# in the BigTree object.
@tree.first_leaf = child.next_sibling if child == @tree.first_leaf
@tree.last_leaf = child.prev_sibling if child == @tree.last_leaf
# Unlink the neighbouring siblings from the child
child.prev_sibling.next_sibling = child.next_sibling if child.prev_sibling
child.next_sibling.prev_sibling = child.prev_sibling if child.next_sibling
if @keys.length < min_keys
# The node has become too small. Try borrowing a node from an adjecent
# sibling or merge with an adjecent node.
if @prev_sibling && @prev_sibling.parent == @parent
borrow_from_previous_sibling(@prev_sibling) ||
@prev_sibling.merge_with_branch_node(myself)
elsif @next_sibling && @next_sibling.parent == @parent
borrow_from_next_sibling(@next_sibling) ||
merge_with_branch_node(@next_sibling)
end
end
if @parent.nil? && @children.length <= 1
# If the node just below the root only has one child it will become
# the new root node.
new_root = @children.first
new_root.parent = nil
@tree.root = new_root
end
end
|
ruby
|
{
"resource": ""
}
|
q4229
|
PEROBS.BigTreeNode.statistics
|
train
|
def statistics(stats)
traverse do |node, position, stack|
if position == 0
if node.is_leaf?
stats.leaf_nodes += 1
depth = stack.size + 1
if stats.min_depth.nil? || stats.min_depth < depth
stats.min_depth = depth
end
if stats.max_depth.nil? || stats.max_depth > depth
stats.max_depth = depth
end
else
stats.branch_nodes += 1
end
end
end
end
|
ruby
|
{
"resource": ""
}
|
q4230
|
PEROBS.BTreeDB.include?
|
train
|
def include?(id)
!(blob = find_blob(id)).nil? && !blob.find(id).nil?
end
|
ruby
|
{
"resource": ""
}
|
q4231
|
PEROBS.BTreeDB.get_object
|
train
|
def get_object(id)
return nil unless (blob = find_blob(id)) && (obj = blob.read_object(id))
deserialize(obj)
end
|
ruby
|
{
"resource": ""
}
|
q4232
|
PEROBS.BTreeDB.is_marked?
|
train
|
def is_marked?(id, ignore_errors = false)
(blob = find_blob(id)) && blob.is_marked?(id, ignore_errors)
end
|
ruby
|
{
"resource": ""
}
|
q4233
|
Cloudshaper.Command.env
|
train
|
def env
vars = {}
@stack.variables.each { |k, v| vars["TF_VAR_#{k}"] = v }
SECRETS.each do |_provider, secrets|
if secrets.is_a?(Hash)
secrets.each do |k, v|
vars[k.to_s] = v
end
end
end
vars
end
|
ruby
|
{
"resource": ""
}
|
q4234
|
CommunityZero.Store.search
|
train
|
def search(query)
regex = Regexp.new(query, 'i')
_cookbooks.collect do |_, v|
v[v.keys.first] if regex.match(v[v.keys.first].name)
end.compact
end
|
ruby
|
{
"resource": ""
}
|
q4235
|
CommunityZero.Store.add
|
train
|
def add(cookbook)
cookbook = cookbook.dup
cookbook.created_at = Time.now
cookbook.updated_at = Time.now
entry = _cookbooks[cookbook.name] ||= {}
entry[cookbook.version] = cookbook
end
|
ruby
|
{
"resource": ""
}
|
q4236
|
CommunityZero.Store.remove
|
train
|
def remove(cookbook)
return unless has_cookbook?(cookbook.name, cookbook.version)
_cookbooks[cookbook.name].delete(cookbook.version)
end
|
ruby
|
{
"resource": ""
}
|
q4237
|
CommunityZero.Store.find
|
train
|
def find(name, version = nil)
possibles = _cookbooks[name]
return nil if possibles.nil?
version ||= possibles.keys.sort.last
possibles[version]
end
|
ruby
|
{
"resource": ""
}
|
q4238
|
CommunityZero.Store.versions
|
train
|
def versions(name)
name = name.respond_to?(:name) ? name.name : name
(_cookbooks[name] && _cookbooks[name].keys.sort) || []
end
|
ruby
|
{
"resource": ""
}
|
q4239
|
PEROBS.Hash._referenced_object_ids
|
train
|
def _referenced_object_ids
@data.each_value.select { |v| v && v.respond_to?(:is_poxreference?) }.
map { |o| o.id }
end
|
ruby
|
{
"resource": ""
}
|
q4240
|
PEROBS.Hash._delete_reference_to_id
|
train
|
def _delete_reference_to_id(id)
@data.delete_if do |k, v|
v && v.respond_to?(:is_poxreference?) && v.id == id
end
@store.cache.cache_write(self)
end
|
ruby
|
{
"resource": ""
}
|
q4241
|
PEROBS.IDListPageRecord.split
|
train
|
def split
# Determine the new max_id for the old page.
max_id = @min_id + (@max_id - @min_id) / 2
# Create a new page that stores the upper half of the ID range. Remove
# all IDs from this page that now belong into the new page and transfer
# them.
new_page_record = IDListPageRecord.new(@page_file, max_id + 1, @max_id,
page.delete(max_id))
# Adjust the max_id of the current page.
@max_id = max_id
new_page_record
end
|
ruby
|
{
"resource": ""
}
|
q4242
|
Chicago::Schema::Builders.ShrunkenDimensionBuilder.columns
|
train
|
def columns(*names)
columns = @base.columns.select {|c| names.include?(c.name) }
check_columns_subset_of_base_dimension names, columns
@options[:columns] = columns
end
|
ruby
|
{
"resource": ""
}
|
q4243
|
Synvert::Core.Rewriter::InsertAction.insert_position
|
train
|
def insert_position(node)
case node.type
when :block
node.children[1].children.empty? ? node.children[0].loc.expression.end_pos + 3 : node.children[1].loc.expression.end_pos
when :class
node.children[1] ? node.children[1].loc.expression.end_pos : node.children[0].loc.expression.end_pos
else
node.children.last.loc.expression.end_pos
end
end
|
ruby
|
{
"resource": ""
}
|
q4244
|
Tapestry.Element.accessor_aspects
|
train
|
def accessor_aspects(element, *signature)
identifier = signature.shift
locator_args = {}
qualifier_args = {}
gather_aspects(identifier, element, locator_args, qualifier_args)
[locator_args, qualifier_args]
end
|
ruby
|
{
"resource": ""
}
|
q4245
|
PEROBS.DynamoDB.delete_database
|
train
|
def delete_database
dynamodb = Aws::DynamoDB::Client.new
dynamodb.delete_table(:table_name => @table_name)
dynamodb.wait_until(:table_not_exists, table_name: @table_name)
end
|
ruby
|
{
"resource": ""
}
|
q4246
|
PEROBS.DynamoDB.delete_unmarked_objects
|
train
|
def delete_unmarked_objects
deleted_objects_count = 0
each_item do |id|
unless dynamo_is_marked?(id)
dynamo_delete_item(id)
deleted_objects_count += 1
@item_counter -= 1
end
end
dynamo_put_item('item_counter', @item_counter.to_s)
deleted_objects_count
end
|
ruby
|
{
"resource": ""
}
|
q4247
|
PEROBS.DynamoDB.check_db
|
train
|
def check_db(repair = false)
unless (item_counter = dynamo_get_item('item_counter')) &&
item_counter == @item_counter
PEROBS.log.error "@item_counter variable (#{@item_counter}) and " +
"item_counter table entry (#{item_counter}) don't match"
end
item_counter = 0
each_item { item_counter += 1 }
unless item_counter == @item_counter
PEROBS.log.error "Table contains #{item_counter} items but " +
"@item_counter is #{@item_counter}"
end
end
|
ruby
|
{
"resource": ""
}
|
q4248
|
PEROBS.POXReference.method_missing
|
train
|
def method_missing(method_sym, *args, &block)
unless (obj = _referenced_object)
::PEROBS.log.fatal "Internal consistency error. No object with ID " +
"#{@id} found in the store."
end
if obj.respond_to?(:is_poxreference?)
::PEROBS.log.fatal "POXReference that references a POXReference found."
end
obj.send(method_sym, *args, &block)
end
|
ruby
|
{
"resource": ""
}
|
q4249
|
PEROBS.ObjectBase._transfer
|
train
|
def _transfer(store)
@store = store
# Remove the previously defined finalizer as it is attached to the old
# store.
ObjectSpace.undefine_finalizer(self)
# Register the object as in-memory object with the new store.
@store._register_in_memory(self, @_id)
# Register the finalizer for the new store.
ObjectSpace.define_finalizer(
self, ObjectBase._finalize(@store, @_id, object_id))
@myself = POXReference.new(@store, @_id)
end
|
ruby
|
{
"resource": ""
}
|
q4250
|
PEROBS.ObjectBase._restore
|
train
|
def _restore(level)
# Find the most recently stored state of this object. This could be on
# any previous stash level or in the regular object DB. If the object
# was created during the transaction, there is not previous state to
# restore to.
data = nil
if @_stash_map
(level - 1).downto(0) do |lvl|
break if (data = @_stash_map[lvl])
end
end
if data
# We have a stashed version that we can restore from.
_deserialize(data)
elsif @store.db.include?(@_id)
# We have no stashed version but can restore from the database.
db_obj = store.db.get_object(@_id)
_deserialize(db_obj['data'])
end
end
|
ruby
|
{
"resource": ""
}
|
q4251
|
EPPClient.Domain.domain_check
|
train
|
def domain_check(*domains)
domains.flatten!
response = send_request(domain_check_xml(*domains))
get_result(:xml => response, :callback => :domain_check_process)
end
|
ruby
|
{
"resource": ""
}
|
q4252
|
EPPClient.Domain.domain_info
|
train
|
def domain_info(args)
args = { :name => args } if args.is_a?(String)
response = send_request(domain_info_xml(args))
get_result(:xml => response, :callback => :domain_info_process)
end
|
ruby
|
{
"resource": ""
}
|
q4253
|
PEROBS.EquiBlobsFile.set_custom_data
|
train
|
def set_custom_data(name, value)
unless @custom_data_labels.include?(name)
PEROBS.log.fatal "Unknown custom data field #{name}"
end
@custom_data_values[@custom_data_labels.index(name)] = value
write_header if @f
end
|
ruby
|
{
"resource": ""
}
|
q4254
|
PEROBS.EquiBlobsFile.free_address
|
train
|
def free_address
if @first_space == 0
# There is currently no free entry. Create a new reserved entry at the
# end of the file.
begin
offset = @f.size
@f.seek(offset)
write_n_bytes([1] + ::Array.new(@entry_bytes, 0))
write_header
return offset_to_address(offset)
rescue IOError => e
PEROBS.log.fatal "Cannot create reserved space at #{@first_space} " +
"in EquiBlobsFile #{@file_name}: #{e.message}"
end
else
begin
free_space_address = offset_to_address(@first_space)
@f.seek(@first_space)
marker = read_char
@first_space = read_unsigned_int
unless marker == 0
PEROBS.log.fatal "Free space list of EquiBlobsFile #{@file_name} " +
"points to non-empty entry at address #{@first_space}"
end
# Mark entry as reserved by setting the mark byte to 1.
@f.seek(-(1 + 8), IO::SEEK_CUR)
write_char(1)
# Update the file header
@total_spaces -= 1
write_header
return free_space_address
rescue IOError => e
PEROBS.log.fatal "Cannot mark reserved space at " +
"#{free_space_address} in EquiBlobsFile #{@file_name}: " +
"#{e.message}"
end
end
end
|
ruby
|
{
"resource": ""
}
|
q4255
|
PEROBS.EquiBlobsFile.store_blob
|
train
|
def store_blob(address, bytes)
unless address >= 0
PEROBS.log.fatal "Blob storage address must be larger than 0, " +
"not #{address}"
end
if bytes.length != @entry_bytes
PEROBS.log.fatal "All stack entries must be #{@entry_bytes} " +
"long. This entry is #{bytes.length} bytes long."
end
marker = 1
begin
offset = address_to_offset(address)
if offset > (file_size = @f.size)
PEROBS.log.fatal "Cannot store blob at address #{address} in " +
"EquiBlobsFile #{@file_name}. Address is larger than file size. " +
"Offset: #{offset} File size: #{file_size}"
end
@f.seek(offset)
# The first byte is the marker byte. It's set to 2 for cells that hold
# a blob. 1 for reserved cells and 0 for empty cells. The cell must be
# either already be in use or be reserved. It must not be 0.
if file_size > offset &&
(marker = read_char) != 1 && marker != 2
PEROBS.log.fatal "Marker for entry at address #{address} of " +
"EquiBlobsFile #{@file_name} must be 1 or 2 but is #{marker}"
end
@f.seek(offset)
write_char(2)
@f.write(bytes)
@f.flush
rescue IOError => e
PEROBS.log.fatal "Cannot store blob at address #{address} in " +
"EquiBlobsFile #{@file_name}: #{e.message}"
end
# Update the entries counter if we inserted a new blob.
if marker == 1
@total_entries += 1
write_header
end
end
|
ruby
|
{
"resource": ""
}
|
q4256
|
PEROBS.EquiBlobsFile.retrieve_blob
|
train
|
def retrieve_blob(address)
unless address > 0
PEROBS.log.fatal "Blob retrieval address must be larger than 0, " +
"not #{address}"
end
begin
if (offset = address_to_offset(address)) >= @f.size
PEROBS.log.fatal "Cannot retrieve blob at address #{address} " +
"of EquiBlobsFile #{@file_name}. Address is beyond end of file."
end
@f.seek(address_to_offset(address))
if (marker = read_char) != 2
PEROBS.log.fatal "Cannot retrieve blob at address #{address} " +
"of EquiBlobsFile #{@file_name}. Blob is " +
(marker == 0 ? 'empty' : marker == 1 ? 'reserved' : 'corrupted') +
'.'
end
bytes = @f.read(@entry_bytes)
rescue IOError => e
PEROBS.log.fatal "Cannot retrieve blob at adress #{address} " +
"of EquiBlobsFile #{@file_name}: " + e.message
end
bytes
end
|
ruby
|
{
"resource": ""
}
|
q4257
|
PEROBS.EquiBlobsFile.delete_blob
|
train
|
def delete_blob(address)
unless address >= 0
PEROBS.log.fatal "Blob address must be larger than 0, " +
"not #{address}"
end
offset = address_to_offset(address)
begin
@f.seek(offset)
if (marker = read_char) != 1 && marker != 2
PEROBS.log.fatal "Cannot delete blob stored at address #{address} " +
"of EquiBlobsFile #{@file_name}. Blob is " +
(marker == 0 ? 'empty' : 'corrupted') + '.'
end
@f.seek(address_to_offset(address))
write_char(0)
write_unsigned_int(@first_space)
rescue IOError => e
PEROBS.log.fatal "Cannot delete blob at address #{address}: " +
e.message
end
@first_space = offset
@total_spaces += 1
@total_entries -= 1 unless marker == 1
write_header
if offset == @f.size - 1 - @entry_bytes
# We have deleted the last entry in the file. Make sure that all empty
# entries are removed up to the now new last used entry.
trim_file
end
end
|
ruby
|
{
"resource": ""
}
|
q4258
|
PEROBS.EquiBlobsFile.check
|
train
|
def check
sync
return false unless check_spaces
return false unless check_entries
expected_size = address_to_offset(@total_entries + @total_spaces + 1)
actual_size = @f.size
if actual_size != expected_size
PEROBS.log.error "Size mismatch in EquiBlobsFile #{@file_name}. " +
"Expected #{expected_size} bytes but found #{actual_size} bytes."
return false
end
true
end
|
ruby
|
{
"resource": ""
}
|
q4259
|
PEROBS.ILogger.open
|
train
|
def open(io)
begin
@@logger = Logger.new(io, *@@options)
rescue IOError => e
@@logger = Logger.new($stderr)
$stderr.puts "Cannot open log file: #{e.message}"
end
@@logger.level = @@level
@@logger.formatter = @@formatter
end
|
ruby
|
{
"resource": ""
}
|
q4260
|
PEROBS.ClassMap.rename
|
train
|
def rename(rename_map)
@by_id.each.with_index do |klass, id|
# Some entries can be nil. Ignore them.
next unless klass
if (new_name = rename_map[klass])
# We have a rename request. Update the current @by_id entry.
@by_id[id] = new_name
# Remove the old class name from @by_class hash.
@by_class.delete(klass)
# Insert the new one with the current ID.
@by_class[new_name] = id
end
end
end
|
ruby
|
{
"resource": ""
}
|
q4261
|
PEROBS.ClassMap.keep
|
train
|
def keep(classes)
@by_id.each.with_index do |klass, id|
unless classes.include?(klass)
# Delete the class from the @by_id list by setting the entry to nil.
@by_id[id] = nil
# Delete the corresponding @by_class entry as well.
@by_class.delete(klass)
end
end
end
|
ruby
|
{
"resource": ""
}
|
q4262
|
Wmctile.Memory.read_file
|
train
|
def read_file
@file_path = '~/.local/share/wmctile'
@file_name = 'memory.yml'
@file_full = File.expand_path([@file_path, @file_name].join('/'))
if File.exist? @file_full
file_contents = File.read(@file_full)
@memory = YAML.load(file_contents)
else
create_file
write_file
end
end
|
ruby
|
{
"resource": ""
}
|
q4263
|
CommunityZero.Endpoint.call
|
train
|
def call(request)
m = request.method.downcase.to_sym
# Only respond to listed methods
unless respond_to?(m)
allowed = METHODS.select { |m| respond_to?(m) }.map(&:upcase).join(', ')
return [
405,
{ 'Content-Type' => 'text/plain', 'Allow' => allowed },
"Method not allowed: '#{request.env['REQUEST_METHOD']}'"
]
end
begin
send(m, request)
rescue RestError => e
error(e.response_code, e.error)
end
end
|
ruby
|
{
"resource": ""
}
|
q4264
|
Effective.Page.duplicate!
|
train
|
def duplicate!
Page.new(attributes.except('id', 'updated_at', 'created_at')).tap do |page|
page.title = page.title + ' (Copy)'
page.slug = page.slug + '-copy'
page.draft = true
regions.each do |region|
page.regions.build(region.attributes.except('id', 'updated_at', 'created_at'))
end
page.save!
end
end
|
ruby
|
{
"resource": ""
}
|
q4265
|
YARD::MRuby::Parser::C.HeaderParser.consume_directive
|
train
|
def consume_directive
super if @in_body_statements
@newline = false
start = @index
line = @line
statement = DirectiveStatement.new(nil, @file, line)
@statements << statement
attach_comment(statement)
multiline = false
advance_loop do
chr = char
case chr
when '\\'; multiline=true; advance
when /\s/; consume_whitespace
else advance
end
if @newline
if multiline
multiline = false
else
break
end
end
end
decl = @content[start...@index]
statement.declaration = decl
end
|
ruby
|
{
"resource": ""
}
|
q4266
|
Xrc.Client.reply
|
train
|
def reply(options)
say(
body: options[:body],
from: options[:to].to,
to: options[:to].from,
type: options[:to].type,
)
end
|
ruby
|
{
"resource": ""
}
|
q4267
|
Xrc.Client.say
|
train
|
def say(options)
post Elements::Message.new(
body: options[:body],
from: options[:from],
to: options[:to],
type: options[:type],
)
end
|
ruby
|
{
"resource": ""
}
|
q4268
|
PEROBS.BigArray.[]=
|
train
|
def []=(index, value)
index = validate_index_range(index)
@store.transaction do
if index < @entry_counter
# Overwrite of an existing element
@root.set(index, value)
elsif index == @entry_counter
# Append right at the end
@root.insert(index, value)
self.entry_counter += 1
else
# Append with nil padding
@entry_counter.upto(index - 1) do |i|
@root.insert(i, nil)
end
@root.insert(index, value)
self.entry_counter = index + 1
end
end
end
|
ruby
|
{
"resource": ""
}
|
q4269
|
PEROBS.BigArray.insert
|
train
|
def insert(index, value)
index = validate_index_range(index)
if index < @entry_counter
# Insert in between existing elements
@store.transaction do
@root.insert(index, value)
self.entry_counter += 1
end
else
self[index] = value
end
end
|
ruby
|
{
"resource": ""
}
|
q4270
|
PEROBS.BigArray.delete_if
|
train
|
def delete_if
old_root = @root
clear
old_root.each do |k, v|
if !yield(k, v)
insert(k, v)
end
end
end
|
ruby
|
{
"resource": ""
}
|
q4271
|
PEROBS.BigArray.each
|
train
|
def each(&block)
node = @first_leaf
while node
break unless node.each(&block)
node = node.next_sibling
end
end
|
ruby
|
{
"resource": ""
}
|
q4272
|
PEROBS.BigArray.reverse_each
|
train
|
def reverse_each(&block)
node = @last_leaf
while node
break unless node.reverse_each(&block)
node = node.prev_sibling
end
end
|
ruby
|
{
"resource": ""
}
|
q4273
|
PEROBS.BigArray.to_a
|
train
|
def to_a
ary = []
node = @first_leaf
while node do
ary += node.values
node = node.next_sibling
end
ary
end
|
ruby
|
{
"resource": ""
}
|
q4274
|
PEROBS.SpaceTreeNode.save
|
train
|
def save
bytes = [ @blob_address, @size,
@parent ? @parent.node_address : 0,
@smaller ? @smaller.node_address : 0,
@equal ? @equal.node_address : 0,
@larger ? @larger.node_address : 0].pack(NODE_BYTES_FORMAT)
@tree.nodes.store_blob(@node_address, bytes)
end
|
ruby
|
{
"resource": ""
}
|
q4275
|
PEROBS.SpaceTreeNode.add_space
|
train
|
def add_space(address, size)
node = self
loop do
if node.size == 0
# This happens only for the root node if the tree is empty.
node.set_size_and_address(size, address)
break
elsif size < node.size
# The new size is smaller than this node.
if node.smaller
# There is already a smaller node, so pass it on.
node = node.smaller
else
# There is no smaller node yet, so we create a new one as a
# smaller child of the current node.
node.set_link('@smaller',
SpaceTreeNode::create(@tree, address, size, node))
break
end
elsif size > node.size
# The new size is larger than this node.
if node.larger
# There is already a larger node, so pass it on.
node = node.larger
else
# There is no larger node yet, so we create a new one as a larger
# child of the current node.
node.set_link('@larger',
SpaceTreeNode::create(@tree, address, size, node))
break
end
else
# Same size as current node. Insert new node as equal child at top of
# equal list.
new_node = SpaceTreeNode::create(@tree, address, size, node)
new_node.set_link('@equal', node.equal)
node.set_link('@equal', new_node)
break
end
end
end
|
ruby
|
{
"resource": ""
}
|
q4276
|
PEROBS.SpaceTreeNode.has_space?
|
train
|
def has_space?(address, size)
node = self
loop do
if node.blob_address == address
return size == node.size
elsif size < node.size && node.smaller
node = node.smaller
elsif size > node.size && node.larger
node = node.larger
elsif size == node.size && node.equal
node = node.equal
else
return false
end
end
end
|
ruby
|
{
"resource": ""
}
|
q4277
|
PEROBS.SpaceTreeNode.relink_parent
|
train
|
def relink_parent(node)
if @parent
if @parent.smaller == self
@parent.set_link('@smaller', node)
elsif @parent.equal == self
@parent.set_link('@equal', node)
elsif @parent.larger == self
@parent.set_link('@larger', node)
else
PEROBS.log.fatal "Cannot relink unknown child node with address " +
"#{node.node_address} from #{parent.to_s}"
end
else
if node
@tree.set_root(node)
node.parent = nil
else
set_size_and_address(0, 0)
end
end
end
|
ruby
|
{
"resource": ""
}
|
q4278
|
PEROBS.SpaceTreeNode.to_a
|
train
|
def to_a
ary = []
each do |node, mode, stack|
if mode == :on_enter
ary << [ node.blob_address, node.size ]
end
end
ary
end
|
ruby
|
{
"resource": ""
}
|
q4279
|
PEROBS.SpaceTreeNode.check
|
train
|
def check(flat_file, count)
node_counter = 0
max_depth = 0
@tree.progressmeter.start('Checking space list entries', count) do |pm|
each do |node, mode, stack|
max_depth = stack.size if stack.size > max_depth
case mode
when :smaller
if node.smaller
return false unless node.check_node_link('smaller', stack)
smaller_node = node.smaller
if smaller_node.size >= node.size
PEROBS.log.error "Smaller SpaceTreeNode size " +
"(#{smaller_node}) is not smaller than #{node}"
return false
end
end
when :equal
if node.equal
return false unless node.check_node_link('equal', stack)
equal_node = node.equal
if equal_node.smaller || equal_node.larger
PEROBS.log.error "Equal node #{equal_node} must not have " +
"smaller/larger childs"
return false
end
if node.size != equal_node.size
PEROBS.log.error "Equal SpaceTreeNode size (#{equal_node}) " +
"is not equal parent node #{node}"
return false
end
end
when :larger
if node.larger
return false unless node.check_node_link('larger', stack)
larger_node = node.larger
if larger_node.size <= node.size
PEROBS.log.error "Larger SpaceTreeNode size " +
"(#{larger_node}) is not larger than #{node}"
return false
end
end
when :on_exit
if flat_file &&
!flat_file.has_space?(node.blob_address, node.size)
PEROBS.log.error "SpaceTreeNode has space at offset " +
"#{node.blob_address} of size #{node.size} that isn't " +
"available in the FlatFile."
return false
end
pm.update(node_counter += 1)
end
end
end
PEROBS.log.debug "#{node_counter} SpaceTree nodes checked"
PEROBS.log.debug "Maximum tree depth is #{max_depth}"
return true
end
|
ruby
|
{
"resource": ""
}
|
q4280
|
PEROBS.SpaceTreeNode.check_node_link
|
train
|
def check_node_link(link, stack)
if (node = instance_variable_get('@' + link))
# Node links must only be of class SpaceTreeNodeLink
unless node.nil? || node.is_a?(SpaceTreeNodeLink)
PEROBS.log.error "Node link #{link} of node #{to_s} " +
"is of class #{node.class}"
return false
end
# Link must not point back to self.
if node == self
PEROBS.log.error "#{link} address of node " +
"#{node.to_s} points to self #{to_s}"
return false
end
# Link must not point to any of the parent nodes.
if stack.include?(node)
PEROBS.log.error "#{link} address of node #{to_s} " +
"points to parent node #{node}"
return false
end
# Parent link of node must point back to self.
if node.parent != self
PEROBS.log.error "@#{link} node #{node.to_s} does not have parent " +
"link pointing " +
"to parent node #{to_s}. Pointing at " +
"#{node.parent.nil? ? 'nil' : node.parent.to_s} instead."
return false
end
end
true
end
|
ruby
|
{
"resource": ""
}
|
q4281
|
PEROBS.SpaceTreeNode.to_tree_s
|
train
|
def to_tree_s
str = ''
each do |node, mode, stack|
if mode == :on_enter
begin
branch_mark = node.parent.nil? ? '' :
node.parent.smaller == node ? '<' :
node.parent.equal == node ? '=' :
node.parent.larger == node ? '>' : '@'
str += "#{node.text_tree_prefix}#{branch_mark}-" +
"#{node.smaller || node.equal || node.larger ? 'v-' : '--'}" +
"#{node.to_s}\n"
rescue
str += "#{node.text_tree_prefix}- @@@@@@@@@@\n"
end
end
end
str
end
|
ruby
|
{
"resource": ""
}
|
q4282
|
PEROBS.SpaceTreeNode.text_tree_prefix
|
train
|
def text_tree_prefix
if (node = @parent)
str = '+'
else
# Prefix start for root node line
str = 'o'
end
while node
last_child = false
if node.parent
if node.parent.smaller == node
last_child = node.parent.equal.nil? && node.parent.larger.nil?
elsif node.parent.equal == node
last_child = node.parent.larger.nil?
elsif node.parent.larger == node
last_child = true
end
else
# Padding for the root node
str = ' ' + str
break
end
str = (last_child ? ' ' : '| ') + str
node = node.parent
end
str
end
|
ruby
|
{
"resource": ""
}
|
q4283
|
PEROBS.PersistentObjectCacheLine.flush
|
train
|
def flush(now)
if now || @entries.length > WATERMARK
@entries.each do |e|
if e.modified
e.obj.save
e.modified = false
end
end
# Delete all but the first WATERMARK entry.
@entries = @entries[0..WATERMARK - 1] if @entries.length > WATERMARK
end
end
|
ruby
|
{
"resource": ""
}
|
q4284
|
EPPClient.Contact.contact_check
|
train
|
def contact_check(*contacts)
contacts.flatten!
response = send_request(contact_check_xml(*contacts))
get_result(:xml => response, :callback => :contact_check_process)
end
|
ruby
|
{
"resource": ""
}
|
q4285
|
EPPClient.Contact.contact_info
|
train
|
def contact_info(args)
args = { :id => args } if args.is_a?(String)
response = send_request(contact_info_xml(args))
get_result(:xml => response, :callback => :contact_info_process)
end
|
ruby
|
{
"resource": ""
}
|
q4286
|
Wmctile.Window.find_window
|
train
|
def find_window(current_workspace_only = false)
@matching_windows = Wmctile.window_list.grep(@regexp_string)
filter_out_workspaces if current_workspace_only
if @matching_windows.count > 1
filter_more_matching_windows
elsif @matching_windows.count == 1
@matching_line = @matching_windows[0]
else
fail Errors::WindowNotFound, @window_string
end
extract_matching_line_information
end
|
ruby
|
{
"resource": ""
}
|
q4287
|
Synvert::Core.Rewriter::ReplaceErbStmtWithExprAction.begin_pos
|
train
|
def begin_pos
node_begin_pos = @node.loc.expression.begin_pos
while @node.loc.expression.source_buffer.source[node_begin_pos -= 1] == ' '
end
node_begin_pos - Engine::ERUBY_STMT_SPLITTER.length + 1
end
|
ruby
|
{
"resource": ""
}
|
q4288
|
Synvert::Core.Rewriter::ReplaceErbStmtWithExprAction.end_pos
|
train
|
def end_pos
node_begin_pos = @node.loc.expression.begin_pos
node_begin_pos += @node.loc.expression.source.index "do"
while @node.loc.expression.source_buffer.source[node_begin_pos += 1] != '@'
end
node_begin_pos
end
|
ruby
|
{
"resource": ""
}
|
q4289
|
Synvert::Core.Rewriter::ReplaceErbStmtWithExprAction.rewritten_code
|
train
|
def rewritten_code
@node.loc.expression.source_buffer.source[begin_pos...end_pos].sub(Engine::ERUBY_STMT_SPLITTER, "@output_buffer.append= ")
.sub(Engine::ERUBY_STMT_SPLITTER, Engine::ERUBY_EXPR_SPLITTER)
end
|
ruby
|
{
"resource": ""
}
|
q4290
|
Tapestry.Factory.verify_page
|
train
|
def verify_page(context)
return if context.url_match_attribute.nil?
return if context.has_correct_url?
raise Tapestry::Errors::PageURLFromFactoryNotVerified
end
|
ruby
|
{
"resource": ""
}
|
q4291
|
PEROBS.Cache.cache_read
|
train
|
def cache_read(obj)
# This is just a safety check. It can probably be disabled in the future
# to increase performance.
if obj.respond_to?(:is_poxreference?)
# If this condition triggers, we have a bug in the library.
PEROBS.log.fatal "POXReference objects should never be cached"
end
@reads[index(obj)] = obj
end
|
ruby
|
{
"resource": ""
}
|
q4292
|
PEROBS.Cache.begin_transaction
|
train
|
def begin_transaction
if @transaction_stack.empty?
# The new transaction is the top-level transaction. Flush the write
# buffer to save the current state of all objects.
flush
else
# Save a copy of all objects that were modified during the enclosing
# transaction.
@transaction_stack.last.each do |id|
@transaction_objects[id]._stash(@transaction_stack.length - 1)
end
end
# Push a transaction buffer onto the transaction stack. This buffer will
# hold a reference to all objects modified during this transaction.
@transaction_stack.push(::Array.new)
end
|
ruby
|
{
"resource": ""
}
|
q4293
|
PEROBS.Cache.end_transaction
|
train
|
def end_transaction
case @transaction_stack.length
when 0
PEROBS.log.fatal 'No ongoing transaction to end'
when 1
# All transactions completed successfully. Write all modified objects
# into the backend storage.
@transaction_stack.pop.each { |id| @transaction_objects[id]._sync }
@transaction_objects = ::Hash.new
else
# A nested transaction completed successfully. We add the list of
# modified objects to the list of the enclosing transaction.
transactions = @transaction_stack.pop
# Merge the two lists
@transaction_stack.push(@transaction_stack.pop + transactions)
# Ensure that each object is only included once in the list.
@transaction_stack.last.uniq!
end
end
|
ruby
|
{
"resource": ""
}
|
q4294
|
PEROBS.Cache.abort_transaction
|
train
|
def abort_transaction
if @transaction_stack.empty?
PEROBS.log.fatal 'No ongoing transaction to abort'
end
@transaction_stack.pop.each do |id|
@transaction_objects[id]._restore(@transaction_stack.length)
end
end
|
ruby
|
{
"resource": ""
}
|
q4295
|
Glicko2.Player.update_obj
|
train
|
def update_obj
mean, sd = rating.to_glicko_rating
@obj.rating = mean
@obj.rating_deviation = sd
@obj.volatility = volatility
end
|
ruby
|
{
"resource": ""
}
|
q4296
|
Abuelo.Graph.add_node
|
train
|
def add_node(node)
raise Abuelo::Exceptions::NodeAlreadyExistsError if has_node?(node)
@nodes[node.name] = node
node.graph = self
self
end
|
ruby
|
{
"resource": ""
}
|
q4297
|
Abuelo.Graph.add_edge
|
train
|
def add_edge(edge, opts = {})
raise Abuelo::Exceptions::EdgeAlreadyExistsError if has_edge?(edge)
@edges[edge.node_1] ||= {}
@edges[edge.node_1][edge.node_2] = edge
if undirected? && !opts[:symmetric]
add_edge(edge.symmetric, symmetric: true)
end
self
end
|
ruby
|
{
"resource": ""
}
|
q4298
|
Synvert::Core.Rewriter::Instance.process
|
train
|
def process
file_pattern = File.join(Configuration.instance.get(:path), @file_pattern)
Dir.glob(file_pattern).each do |file_path|
unless Configuration.instance.get(:skip_files).include? file_path
begin
conflict_actions = []
source = self.class.file_source(file_path)
ast = self.class.file_ast(file_path)
@current_file = file_path
self.process_with_node ast do
begin
instance_eval &@block
rescue NoMethodError
puts @current_node.debug_info
raise
end
end
if @actions.length > 0
@actions.sort_by! { |action| action.send(@options[:sort_by]) }
conflict_actions = get_conflict_actions
@actions.reverse.each do |action|
source[action.begin_pos...action.end_pos] = action.rewritten_code
source = remove_code_or_whole_line(source, action.line)
end
@actions = []
self.class.write_file(file_path, source)
end
rescue Parser::SyntaxError
puts "[Warn] file #{file_path} was not parsed correctly."
# do nothing, iterate next file
end while !conflict_actions.empty?
end
end
end
|
ruby
|
{
"resource": ""
}
|
q4299
|
Synvert::Core.Rewriter::Instance.get_conflict_actions
|
train
|
def get_conflict_actions
i = @actions.length - 1
j = i - 1
conflict_actions = []
return if i < 0
begin_pos = @actions[i].begin_pos
while j > -1
if begin_pos <= @actions[j].end_pos
conflict_actions << @actions.delete_at(j)
else
i = j
begin_pos = @actions[i].begin_pos
end
j -= 1
end
conflict_actions
end
|
ruby
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.