_id
stringlengths 2
6
| title
stringlengths 9
130
| partition
stringclasses 3
values | text
stringlengths 66
10.5k
| language
stringclasses 1
value | meta_information
dict |
---|---|---|---|---|---|
q4700
|
GeoWorks.Install.inject_solr_document_behavior
|
train
|
def inject_solr_document_behavior
file_path = 'app/models/solr_document.rb'
if File.exist?(file_path)
inject_into_file file_path, after: /include Blacklight::Solr::Document.*$/ do
"\n # Adds GeoWorks behaviors to the SolrDocument.\n" \
" include GeoWorks::SolrDocumentBehavior\n"
end
else
Rails.logger.info " \e[31mFailure\e[0m GeoWorks requires a SolrDocument object. This generators assumes that the model is defined in the file #{file_path}, which does not exist."
end
end
|
ruby
|
{
"resource": ""
}
|
q4701
|
QuestionproRails.SurveyResponse.response_set
|
train
|
def response_set
extracted_sets = []
unless self.qp_response_set.nil?
self.qp_response_set.each do |set|
extracted_sets.push(ResponseSet.new(set))
end
end
return extracted_sets
end
|
ruby
|
{
"resource": ""
}
|
q4702
|
RailsPaginate::Renderers.Base.url_for_page
|
train
|
def url_for_page(page)
view.url_for(view.default_url_options.merge({page_param.to_sym => page}).merge(options[:params] || {}))
end
|
ruby
|
{
"resource": ""
}
|
q4703
|
RailsPaginate::Renderers.Base.link_to_page
|
train
|
def link_to_page(page, key, link_options = {})
css_class = "#{link_options[:class]} #{page == current_page ? 'current' : ''}"
if key.nil?
content_tag :span, "..", :class => "spacer"
elsif page.nil?
content_tag :span, t(key), :class => "#{css_class} unavailable"
else
link_to t(key, :page => page), url_for_page(page), :class => css_class, :alt => view.strip_tags(t(key, :page => page)), :remote => options[:remote], :method => options[:method]
end
end
|
ruby
|
{
"resource": ""
}
|
q4704
|
Communist.Server.stop
|
train
|
def stop
server = Communist.servers.delete(app.object_id) { |s| NullServer.new }
if Communist.server.respond_to?(:shutdown)
server.shutdown
elsif Communist.server.respond_to?(:stop!)
server.stop!
else
server.stop
end
@server_thread.join
end
|
ruby
|
{
"resource": ""
}
|
q4705
|
Pagination.Collection.displayed_pages
|
train
|
def displayed_pages(limit = 10, left_offset = -5, right_offset = 4)
lower, upper = nil, nil
if page + left_offset < 1 || page + right_offset > pages.last
lower = [page, [pages.last - limit, 0].max + 1].min
upper = [page + limit - 1, pages.last].min
else
lower = page + left_offset
upper = page + right_offset
end
(lower..upper).to_a
end
|
ruby
|
{
"resource": ""
}
|
q4706
|
OrangeData.Transport.ping
|
train
|
def ping
res = transport.get(''){|r| r.headers['Accept'] = 'text/plain' }
res.status == 200 && res.body == "Nebula.Api v2"
rescue StandardError => _e
return false
end
|
ruby
|
{
"resource": ""
}
|
q4707
|
Unipept.BatchOrder.wait
|
train
|
def wait(i, &block)
@order[i] = block
return unless i == @current
while order[@current]
order.delete(@current).call
@current += 1
end
end
|
ruby
|
{
"resource": ""
}
|
q4708
|
Take2.InstanceMethods.call_api_with_retry
|
train
|
def call_api_with_retry(options = {})
config = self.class.retriable_configuration
config.merge!(Take2.local_defaults(options)) unless options.empty?
tries ||= config[:retries]
begin
yield
rescue => e
if config[:retriable].map { |klass| e.class <= klass }.any?
unless tries.zero? || config[:retry_condition_proc]&.call(e)
config[:retry_proc]&.call(e, tries)
rest(config, tries)
tries -= 1
retry
end
end
raise e
end
end
|
ruby
|
{
"resource": ""
}
|
q4709
|
Take2.ClassMethods.number_of_retries
|
train
|
def number_of_retries(num)
raise ArgumentError, 'Must be positive Integer' unless num.is_a?(Integer) && num.positive?
self.retries = num
end
|
ruby
|
{
"resource": ""
}
|
q4710
|
Take2.ClassMethods.retriable_errors
|
train
|
def retriable_errors(*errors)
message = 'All retriable errors must be StandardError decendants'
raise ArgumentError, message unless errors.all? { |e| e <= StandardError }
self.retriable = errors
end
|
ruby
|
{
"resource": ""
}
|
q4711
|
Take2.ClassMethods.backoff_strategy
|
train
|
def backoff_strategy(options)
available_types = [:constant, :linear, :fibonacci, :exponential]
raise ArgumentError, 'Incorrect backoff type' unless available_types.include?(options[:type])
self.backoff_intervals = Backoff.new(options[:type], options[:start]).intervals
end
|
ruby
|
{
"resource": ""
}
|
q4712
|
Take2.ClassMethods.retriable_configuration
|
train
|
def retriable_configuration
Take2::Configuration::CONFIG_ATTRS.each_with_object({}) do |key, hash|
hash[key] = send(key)
end
end
|
ruby
|
{
"resource": ""
}
|
q4713
|
Jim.Installer.install
|
train
|
def install
fetch
parse_package_json
determine_name_and_version
if !name || name.to_s =~ /^\s*$/ # blank
raise(Jim::InstallError, "Could not determine name for #{@fetched_path}")
end
logger.info "Installing #{name} #{version}"
logger.debug "fetched_path #{@fetched_path}"
if options[:shallow]
shallow_filename = [name, (version == "0" ? nil : version)].compact.join('-')
final_path = install_path + "#{shallow_filename}#{fetched_path.extname}"
else
final_path = install_path + 'lib' + "#{name}-#{version}" + "#{name}.js"
end
if @fetched_path.directory?
# install every js file
installed_paths = []
sub_options = options.merge({
:name => nil,
:version => nil,
:parent_version => version,
:package_json => package_json.merge("name" => nil)
})
Jim.each_path_in_directories([@fetched_path], '.js', IGNORE_DIRS) do |subfile|
logger.debug "Found file #{subfile}"
installed_paths << Jim::Installer.new(subfile, install_path, sub_options).install
end
logger.debug "Extracted to #{install_path}, #{installed_paths.length} file(s)"
return installed_paths
end
logger.debug "Installing to #{final_path}"
if final_path.exist?
logger.debug "#{final_path} already exists"
if options[:force]
FileUtils.rm_rf(final_path)
elsif Digest::MD5.hexdigest(File.read(final_path)) == Digest::MD5.hexdigest(File.read(@fetched_path))
logger.warn "Duplicate file, skipping"
return final_path
else
logger.error "Trying to install to #{final_path}, but file already exists and is different."
return false
end
end
Downlow.extract(@fetched_path, :destination => final_path, :tmp_dir => tmp_root)
# install json
install_package_json(final_path.dirname + 'package.json') if !options[:shallow]
installed = final_path.directory? ? Dir.glob(final_path + '**/*').length : 1
logger.debug "Extracted to #{final_path}, #{installed} file(s)"
final_path
ensure
FileUtils.rm_rf(@fetched_path) if @fetched_path && @fetched_path.exist?
final_path
end
|
ruby
|
{
"resource": ""
}
|
q4714
|
CouchRest.Validation.validate_casted_arrays
|
train
|
def validate_casted_arrays
result = true
array_casted_properties = self.class.properties.select { |property| property.casted && property.type.instance_of?(Array) }
array_casted_properties.each do |property|
casted_values = self.send(property.name)
next unless casted_values.is_a?(Array) && casted_values.first.respond_to?(:valid?)
casted_values.each do |value|
result = (result && value.valid?) if value.respond_to?(:valid?)
end
end
result
end
|
ruby
|
{
"resource": ""
}
|
q4715
|
CouchRest.Validation.recursive_valid?
|
train
|
def recursive_valid?(target, context, state)
valid = state
target.each do |key, prop|
if prop.is_a?(Array)
prop.each do |item|
if item.validatable?
valid = recursive_valid?(item, context, valid) && valid
end
end
elsif prop.validatable?
valid = recursive_valid?(prop, context, valid) && valid
end
end
target._run_validate_callbacks do
target.class.validators.execute(context, target) && valid
end
end
|
ruby
|
{
"resource": ""
}
|
q4716
|
HoneyFormat.Registry.call
|
train
|
def call(value, type)
return type.call(value) if type.respond_to?(:call)
self[type].call(value)
end
|
ruby
|
{
"resource": ""
}
|
q4717
|
HoneyFormat.Registry.[]=
|
train
|
def []=(type, caller)
type = to_key(type)
if type?(type)
raise(Errors::TypeExistsError, "type '#{type}' already exists")
end
@callers[type] = caller
end
|
ruby
|
{
"resource": ""
}
|
q4718
|
Cleverbot.Client.write
|
train
|
def write message=''
response = self.class.write message, @params
message = response['message']
response.keep_if { |key, value| DEFAULT_PARAMS.keys.include? key }
@params.merge! response
@params.delete_if { |key, value| DEFAULT_PARAMS[key] == value }
message
end
|
ruby
|
{
"resource": ""
}
|
q4719
|
Command.Runner.pass!
|
train
|
def pass!(interops = {}, options = {}, &block)
options[:unsafe] = @unsafe
env = options.delete(:env) || {}
backend.call(*contents(interops), env, options, &block)
rescue Errno::ENOENT
raise NoCommandError, @command
end
|
ruby
|
{
"resource": ""
}
|
q4720
|
XO.Grid.each
|
train
|
def each
(1..ROWS).each do |r|
(1..COLS).each do |c|
yield(r, c, self[r, c])
end
end
end
|
ruby
|
{
"resource": ""
}
|
q4721
|
XO.Grid.each_open
|
train
|
def each_open
self.each { |r, c, _| yield(r, c) if open?(r, c) }
end
|
ruby
|
{
"resource": ""
}
|
q4722
|
Postamt.ConnectionHandler.connected?
|
train
|
def connected?(klass)
return false if Process.pid != @process_pid.get
conn = self.retrieve_connection_pool(klass)
conn && conn.connected?
end
|
ruby
|
{
"resource": ""
}
|
q4723
|
CodeModels.NavigationExtensions.all_children
|
train
|
def all_children(flag=nil)
also_foreign = (flag==:also_foreign)
arr = []
ecore = self.class.ecore
# Awful hack to forbid the same reference is visited twice when
# two references with the same name are found
already_used_references = []
ecore.eAllReferences.sort_by{|r| r.name}.select {|r| r.containment}.each do |ref|
unless already_used_references.include?(ref.name)
res = self.send(ref.name.to_sym)
if ref.many
d = arr.count
res.each do |el|
arr << el unless res==nil
end
elsif res!=nil
d = arr.count
arr << res
end
already_used_references << ref.name
end
end
if also_foreign
arr.concat(self.foreign_asts)
end
arr
end
|
ruby
|
{
"resource": ""
}
|
q4724
|
CodeModels.NavigationExtensions.all_children_deep
|
train
|
def all_children_deep(flag=nil)
arr = []
all_children(flag).each do |c|
arr << c
c.all_children_deep(flag).each do |cc|
arr << cc
end
end
arr
end
|
ruby
|
{
"resource": ""
}
|
q4725
|
CodeModels.NavigationExtensions.traverse
|
train
|
def traverse(flag=nil,&op)
op.call(self)
all_children_deep(flag).each do |c|
op.call(c)
end
end
|
ruby
|
{
"resource": ""
}
|
q4726
|
Rtasklib.Execute.handle_response
|
train
|
def handle_response stdout, stderr, thread
unless thread.value.success?
dump = "#{thread.value} \n Stderr: #{stderr.read} \n Stdout: #{stdout.read} \n"
raise dump
end
end
|
ruby
|
{
"resource": ""
}
|
q4727
|
HoneyFormat.Configuration.header_deduplicator=
|
train
|
def header_deduplicator=(strategy)
if header_deduplicator_registry.type?(strategy)
@header_deduplicator = header_deduplicator_registry[strategy]
elsif strategy.respond_to?(:call)
@header_deduplicator = strategy
else
message = "unknown deduplication strategy: '#{strategy}'"
raise(Errors::UnknownDeduplicationStrategyError, message)
end
end
|
ruby
|
{
"resource": ""
}
|
q4728
|
HoneyFormat.Configuration.default_header_deduplicators
|
train
|
def default_header_deduplicators
@default_header_deduplicators ||= {
deduplicate: proc do |columns|
Helpers.key_count_to_deduplicated_array(columns)
end,
raise: proc do |columns|
duplicates = Helpers.duplicated_items(columns)
if duplicates.any?
message = "all columns must be unique, duplicates are: #{duplicates}"
raise(Errors::DuplicateHeaderColumnError, message)
end
columns
end,
none: proc { |columns| columns },
}.freeze
end
|
ruby
|
{
"resource": ""
}
|
q4729
|
HoneyFormat.Configuration.default_converters
|
train
|
def default_converters
@default_converters ||= {
# strict variants
decimal!: StrictConvertDecimal,
integer!: StrictConvertInteger,
date!: StrictConvertDate,
datetime!: StrictConvertDatetime,
symbol!: StrictConvertSymbol,
downcase!: StrictConvertDowncase,
upcase!: StrictConvertUpcase,
boolean!: StrictConvertBoolean,
# safe variants
decimal: ConvertDecimal,
decimal_or_zero: ConvertDecimalOrZero,
integer: ConvertInteger,
integer_or_zero: ConvertIntegerOrZero,
date: ConvertDate,
datetime: ConvertDatetime,
symbol: ConvertSymbol,
downcase: ConvertDowncase,
upcase: ConvertUpcase,
boolean: ConvertBoolean,
md5: ConvertMD5,
hex: ConvertHex,
nil: ConvertNil,
blank: ConvertBlank,
header_column: ConvertHeaderColumn,
method_name: ConvertHeaderColumn,
}.freeze
end
|
ruby
|
{
"resource": ""
}
|
q4730
|
Unipept.Commands::ApiRunner.host
|
train
|
def host
# find host in opts first
host = options[:host] || @configuration['host']
host = 'http://api.unipept.ugent.be' if host.nil? || host.empty?
# add http:// if needed
if host.start_with?('http://', 'https://')
host
else
"http://#{host}"
end
end
|
ruby
|
{
"resource": ""
}
|
q4731
|
Unipept.Commands::ApiRunner.input_iterator
|
train
|
def input_iterator
return arguments.each unless arguments.empty?
return IO.foreach(options[:input]) if options[:input]
$stdin.each_line
end
|
ruby
|
{
"resource": ""
}
|
q4732
|
Unipept.Commands::ApiRunner.selected_fields
|
train
|
def selected_fields
return @selected_fields unless @selected_fields.nil?
fields = [*options[:select]].map { |f| f.split(',') }.flatten
fields.concat(required_fields) if @fasta && !fields.empty?
@selected_fields = fields.map { |f| glob_to_regex(f) }
end
|
ruby
|
{
"resource": ""
}
|
q4733
|
Unipept.Commands::ApiRunner.run
|
train
|
def run
ServerMessage.new(@host).print unless options[:quiet]
hydra = Typhoeus::Hydra.new(max_concurrency: concurrent_requests)
batch_order = Unipept::BatchOrder.new
last_id = 0
batch_iterator.iterate(input_iterator) do |input_slice, batch_id, fasta_mapper|
last_id = batch_id
@fasta = !fasta_mapper.nil?
request = ::RetryableTyphoeus::Request.new(
@url,
method: :post,
body: construct_request_body(input_slice),
accept_encoding: 'gzip',
headers: { 'User-Agent' => @user_agent }
)
request.on_complete do |resp|
block = handle_response(resp, batch_id, fasta_mapper)
batch_order.wait(batch_id, &block)
end
hydra.queue request
hydra.run if (batch_id % queue_size).zero?
end
hydra.run
batch_order.wait(last_id + 1) { output_writer.write_line formatter.footer }
end
|
ruby
|
{
"resource": ""
}
|
q4734
|
Unipept.Commands::ApiRunner.save_error
|
train
|
def save_error(message)
path = error_file_path
FileUtils.mkdir_p File.dirname(path)
File.open(path, 'w') { |f| f.write message }
warn "API request failed! log can be found in #{path}"
end
|
ruby
|
{
"resource": ""
}
|
q4735
|
Unipept.Commands::ApiRunner.handle_response
|
train
|
def handle_response(response, batch_id, fasta_mapper)
if response.success?
handle_success_response(response, batch_id, fasta_mapper)
else
handle_failed_response(response)
end
end
|
ruby
|
{
"resource": ""
}
|
q4736
|
Unipept.Commands::ApiRunner.filter_result
|
train
|
def filter_result(json_response)
result = JSON[json_response] rescue []
result = [result] unless result.is_a? Array
result.map! { |r| r.select! { |k, _v| selected_fields.any? { |f| f.match k } } } unless selected_fields.empty?
result
end
|
ruby
|
{
"resource": ""
}
|
q4737
|
Gitolite.GitoliteAdmin.save
|
train
|
def save
Dir.chdir(@gl_admin.working_dir) do
#Process config file (if loaded, i.e. may be modified)
if @config
new_conf = @config.to_file(@confdir)
@gl_admin.add(new_conf)
end
#Process ssh keys (if loaded, i.e. may be modified)
if @ssh_keys
files = list_keys(@keydir).map{|f| File.basename f}
keys = @ssh_keys.values.map{|f| f.map {|t| t.filename}}.flatten
to_remove = (files - keys).map { |f| File.join(@keydir, f)}
@gl_admin.remove(to_remove)
@ssh_keys.each_value do |key|
#Write only keys from sets that has been modified
next if key.respond_to?(:dirty?) && !key.dirty?
key.each do |k|
@gl_admin.add(k.to_file(@keydir))
end
end
end
end
end
|
ruby
|
{
"resource": ""
}
|
q4738
|
Gitolite.GitoliteAdmin.reset!
|
train
|
def reset!
Dir.chdir(@gl_admin.working_dir) do
@gl_admin.git.reset({:hard => true}, 'HEAD')
@gl_admin.git.clean({:d => true, :q => true, :f => true})
end
reload!
end
|
ruby
|
{
"resource": ""
}
|
q4739
|
Gitolite.GitoliteAdmin.update
|
train
|
def update(options = {})
options = {:reset => true, :rebase => false }.merge(options)
reset! if options[:reset]
Dir.chdir(@gl_admin.working_dir) do
@gl_admin.git.pull({:rebase => options[:rebase]}, "origin", "master")
end
reload!
end
|
ruby
|
{
"resource": ""
}
|
q4740
|
Gitolite.GitoliteAdmin.load_keys
|
train
|
def load_keys(path = nil)
path ||= File.join(@path, @keydir)
keys = Hash.new {|k,v| k[v] = DirtyProxy.new([])}
list_keys(path).each do |key|
new_key = SSHKey.from_file(File.join(path, key))
owner = new_key.owner
keys[owner] << new_key
end
#Mark key sets as unmodified (for dirty checking)
keys.values.each{|set| set.clean_up!}
keys
end
|
ruby
|
{
"resource": ""
}
|
q4741
|
Dailycred.Client.event
|
train
|
def event(user_id, key, val="")
opts = {
:key => key,
:valuestring => val,
:user_id => user_id
}
post "/admin/api/customevent.json", opts
end
|
ruby
|
{
"resource": ""
}
|
q4742
|
Mongoid::Globalize.Adapter.prepare_translations!
|
train
|
def prepare_translations!
stash.each do |locale, attrs|
if attrs.any?
translation = record.translations.find_by_locale(locale)
translation ||= record.translations.build(:locale => locale)
attrs.each{ |name, value| translation[name] = value }
end
end
reset
end
|
ruby
|
{
"resource": ""
}
|
q4743
|
Mongoid::Globalize.Adapter.fetch_attribute
|
train
|
def fetch_attribute(locale, name)
translation = record.translation_for(locale)
return translation && translation.send(name)
end
|
ruby
|
{
"resource": ""
}
|
q4744
|
Grantinee.Configuration.url=
|
train
|
def url=(url)
uri = URI.parse url
case uri.scheme
when /^mysql/
default_port = 3306
@engine = :mysql
when /^postgres/
default_port = 5432
@engine = :postgres
end
raise 'Invalid database url' unless uri.user && uri.host && uri.path
@username = uri.user
@password = uri.password
@hostname = uri.host
@port = uri.port || default_port
@database = (uri.path || '').split('/').last
end
|
ruby
|
{
"resource": ""
}
|
q4745
|
Jim.Bundler.bundle_dir=
|
train
|
def bundle_dir=(new_dir)
if new_dir
new_dir = Pathname.new(new_dir)
new_dir.mkpath
end
@bundle_dir = new_dir
end
|
ruby
|
{
"resource": ""
}
|
q4746
|
Jim.Bundler.jimfile_to_json
|
train
|
def jimfile_to_json
h = {
"bundle_dir" => bundle_dir
}.merge(options)
h['bundles'] = {}
self.bundles.each do |bundle_name, requirements|
h['bundles'][bundle_name] = []
requirements.each do |name, version|
h['bundles'][bundle_name] << if version.nil? || version.strip == ''
name
else
[name, version]
end
end
end
Yajl::Encoder.encode(h, :pretty => true)
end
|
ruby
|
{
"resource": ""
}
|
q4747
|
Jim.Bundler.resolve!
|
train
|
def resolve!
self.bundles.each do |bundle_name, requirements|
self.paths[bundle_name] = []
requirements.each do |name, version|
path = self.index.find(name, version)
if !path
raise(MissingFile,
"Could not find #{name} #{version} in any of these paths #{index.directories.join(':')}")
end
self.paths[bundle_name] << [path, name, version]
end
end
paths
end
|
ruby
|
{
"resource": ""
}
|
q4748
|
GeoWorks.VectorFileBehavior.vector_work
|
train
|
def vector_work
parents.select do |parent|
parent.class.included_modules.include?(::GeoWorks::VectorWorkBehavior)
end.to_a
end
|
ruby
|
{
"resource": ""
}
|
q4749
|
Usmu.Plugin.invoke
|
train
|
def invoke(method, *args)
@log.debug("Invoking plugin API #{method}")
plugins.map do |p|
if p.respond_to? method
@log.debug("Sending message to #{p.class.name}")
p.public_send method, *args
else
nil
end
end.select {|i| i}
end
|
ruby
|
{
"resource": ""
}
|
q4750
|
Usmu.Plugin.alter
|
train
|
def alter(method, value, *context)
@log.debug("Invoking plugin alter API #{method}")
plugins.each do |p|
if p.respond_to? "#{method}_alter"
@log.debug("Sending message to #{p.class.name}")
value = p.public_send "#{method}_alter", value, *context
end
end
value
end
|
ruby
|
{
"resource": ""
}
|
q4751
|
Usmu.Plugin.load_gem
|
train
|
def load_gem(spec)
load_path = spec.name.gsub('-', '/')
require load_path
unless @loaded.include? load_path
@loaded << load_path
klass = path_to_class(load_path)
@log.debug("Loading plugin #{klass} from '#{load_path}'")
plugins.push plugin_get(klass)
end
nil
end
|
ruby
|
{
"resource": ""
}
|
q4752
|
HoneyFormat.BenchmarkCLI.expected_runtime_seconds
|
train
|
def expected_runtime_seconds(report_count:)
runs = report_count * options[:lines_multipliers].length
warmup_time_seconds = runs * options[:benchmark_warmup]
bench_time_seconds = runs * options[:benchmark_time]
warmup_time_seconds + bench_time_seconds
end
|
ruby
|
{
"resource": ""
}
|
q4753
|
HoneyFormat.BenchmarkCLI.fetch_default_benchmark_csv
|
train
|
def fetch_default_benchmark_csv
cache_path = CSV_TEST_DATA_CACHE_PATH
if File.exist?(cache_path)
writer.puts "Cache file found at #{cache_path}.", verbose: true
@used_input_path = cache_path
return File.read(cache_path)
end
writer.print 'Downloading test data file from GitHub..', verbose: true
require 'open-uri'
open(CSV_TEST_DATA_URL).read.tap do |csv| # rubocop:disable Security/Open
@used_input_path = CSV_TEST_DATA_URL
writer.puts 'done!', verbose: true
File.write(cache_path, csv)
writer.puts "Wrote cache file to #{cache_path}..", verbose: true
end
end
|
ruby
|
{
"resource": ""
}
|
q4754
|
Stagehand.Auditor.incomplete_end_operations
|
train
|
def incomplete_end_operations
last_entry_per_session = Staging::CommitEntry.group(:session).select('MAX(id) AS id')
return Staging::CommitEntry.uncontained.end_operations.where.not(:id => last_entry_per_session)
end
|
ruby
|
{
"resource": ""
}
|
q4755
|
Stagehand.Auditor.incomplete_start_operations
|
train
|
def incomplete_start_operations
last_start_entry_per_session = Staging::CommitEntry.start_operations.group(:session).select('MAX(id) AS id')
return Staging::CommitEntry.uncontained.start_operations.where.not(:id => last_start_entry_per_session)
end
|
ruby
|
{
"resource": ""
}
|
q4756
|
GeoWorks.MetadataExtractionHelper.populate_metadata
|
train
|
def populate_metadata(id)
extract_metadata(id).each do |k, v|
send("#{k}=".to_sym, v) # set each property
end
end
|
ruby
|
{
"resource": ""
}
|
q4757
|
Usmu.SiteGenerator.generate_page
|
train
|
def generate_page(page)
output_filename = page.output_filename
@log.success("creating #{output_filename}...")
@log.debug("Rendering #{output_filename} from #{page.name}")
file = File.join(@configuration.destination_path, output_filename)
directory = File.dirname(file)
unless File.directory?(directory)
FileUtils.mkdir_p(directory)
end
File.write file, page.render
FileUtils.touch file, mtime: page.mtime
nil
end
|
ruby
|
{
"resource": ""
}
|
q4758
|
GemfileLocker.GemEntry.replace_string_node
|
train
|
def replace_string_node(target, value)
quote = target.loc.begin.source
rewriter.replace(target.loc.expression, "#{quote}#{value}#{quote}")
end
|
ruby
|
{
"resource": ""
}
|
q4759
|
GemfileLocker.GemEntry.remove_node_with_comma
|
train
|
def remove_node_with_comma(target)
expression = target.loc.expression
comma_pos = expression.source_buffer.source.rindex(',', expression.begin_pos)
rewriter.remove(expression.with(begin_pos: comma_pos))
end
|
ruby
|
{
"resource": ""
}
|
q4760
|
RailsPaginate::Helpers.ActionView.paginate
|
train
|
def paginate(*args)
options = args.extract_options!
raise ArgumentError, "first argument must be a RailsPaginate::Collection" unless args.first.is_a? RailsPaginate::Collection
collection = args.first
# p @controller
# p url_for(:action => :index, :controller => :dummy)
# renderer
renderer = options[:renderer] || RailsPaginate.default_renderer
pager = options[:pager] || RailsPaginate.default_pager
attributes = {}
attributes[:class] = "pagination #{options[:class]}".strip
attributes[:id] = options[:id] unless options[:id].blank?
# load classes
renderer = RailsPaginate.renderer(renderer)
pager = RailsPaginate.pager(pager)
content_tag :div, attributes do
renderer.new(self, collection, pager.new(collection), options).render
end
end
|
ruby
|
{
"resource": ""
}
|
q4761
|
GoogleCheckout.Notification.acknowledgment_xml
|
train
|
def acknowledgment_xml
xml = Builder::XmlMarkup.new
xml.instruct!
@xml = xml.tag!('notification-acknowledgment', {
:xmlns => "http://checkout.google.com/schema/2",
'serial-number' => serial_number
})
@xml
end
|
ruby
|
{
"resource": ""
}
|
q4762
|
GoogleCheckout.Notification.method_missing
|
train
|
def method_missing(method_name, *args)
element_name = method_name.to_s.gsub(/_/, '-')
if element = (@doc.at element_name)
if element.respond_to?(:inner_html)
return element.inner_html
end
end
super
end
|
ruby
|
{
"resource": ""
}
|
q4763
|
HoneyFormat.Matrix.to_csv
|
train
|
def to_csv(columns: nil, &block)
columns = columns&.map(&:to_sym)
@header.to_csv(columns: columns) + @rows.to_csv(columns: columns, &block)
end
|
ruby
|
{
"resource": ""
}
|
q4764
|
Rtasklib.Controller.some
|
train
|
def some ids: nil, tags: nil, dom: nil, active: true
some = []
f = Helpers.filter(ids: ids, tags: tags, dom: dom)
a = Helpers.pending_or_waiting(active)
Execute.task_popen3(*@override_a, f, a, "export") do |i, o, e, t|
some = MultiJson.load(o.read).map do |x|
Rtasklib::Models::TaskModel.new(x)
end
end
return some
end
|
ruby
|
{
"resource": ""
}
|
q4765
|
Rtasklib.Controller.get_rc
|
train
|
def get_rc
res = []
Execute.task_popen3(*@override_a, "_show") do |i, o, e, t|
res = o.read.each_line.map { |l| l.chomp }
end
Taskrc.new(res, :array)
end
|
ruby
|
{
"resource": ""
}
|
q4766
|
Rtasklib.Controller.get_version
|
train
|
def get_version
version = nil
Execute.task_popen3("_version") do |i, o, e, t|
version = Helpers.to_gem_version(o.read.chomp)
end
version
end
|
ruby
|
{
"resource": ""
}
|
q4767
|
Rtasklib.Controller.get_udas
|
train
|
def get_udas
udas = {}
taskrc.config.attributes
.select { |attr, val| Helpers.uda_attr? attr }
.sort
.chunk { |attr, val| Helpers.arbitrary_attr attr }
.each do |attr, arr|
uda = arr.map do |pair|
[Helpers.deep_attr(pair[0]), pair[1]]
end
udas[attr.to_sym] = Hash[uda]
end
return udas
end
|
ruby
|
{
"resource": ""
}
|
q4768
|
Rtasklib.Controller.update_config!
|
train
|
def update_config! attr, val
Execute.task_popen3(*override_a, "config #{attr} #{val}") do |i, o, e, t|
return t.value
end
end
|
ruby
|
{
"resource": ""
}
|
q4769
|
Rtasklib.Controller.add_udas_to_model!
|
train
|
def add_udas_to_model! uda_hash, type=nil, model=Models::TaskModel
uda_hash.each do |attr, val|
val.each do |k, v|
type = Helpers.determine_type(v) if type.nil?
model.attribute attr, type
end
end
end
|
ruby
|
{
"resource": ""
}
|
q4770
|
Rtasklib.Controller.get_uda_names
|
train
|
def get_uda_names
Execute.task_popen3(*@override_a, "_udas") do |i, o, e, t|
return o.read.each_line.map { |l| l.chomp }
end
end
|
ruby
|
{
"resource": ""
}
|
q4771
|
Rtasklib.Controller.sync!
|
train
|
def sync!
Execute.task_popen3(*override_a, "sync") do |i, o, e, t|
return t.value
end
end
|
ruby
|
{
"resource": ""
}
|
q4772
|
Mercurial.Manifest.contents
|
train
|
def contents(revision=nil, cmd_options={})
revision ||= 'tip'
hg(manifest_cmd(revision), cmd_options).tap do |res|
if RUBY_VERSION >= '1.9.1'
res.force_encoding('utf-8')
end
end
end
|
ruby
|
{
"resource": ""
}
|
q4773
|
ODT2HTML.AnalyzeStyles.process_normal_style_attr
|
train
|
def process_normal_style_attr( selector, property, value )
if (@style_info[selector] == nil) then
@style_info[selector] = DeclarationBlock.new( )
@style_info[selector].push Declaration.new(property, value)
else
found = @style_info[selector].find { |obj|
obj.property == property }
if (found != nil) then
found.value = value
else
@style_info[selector].push Declaration.new(property, value)
end
end
end
|
ruby
|
{
"resource": ""
}
|
q4774
|
R6502.Assembler.asm_instr
|
train
|
def asm_instr(instr)
command = extract_command(instr)
param = extract_param(instr)
# Branch instructions always in relative
# mode. No other instructions use this mode.
# Relative mode and zero-page mode look the
# same to addr_mode(), so we need to handle
# this here.
if [:bpl, :bmi, :bvc, :bvs,
:bcc, :bcs, :bne, :beq].
include?(command)
mode = :rel
else
mode = addr_mode(param)
end
bytes = []
bytes << opcode(command, mode)
# If implied mode, it's a 1-byte instruction.
if [:imp].include?(mode)
return bytes
end
# Handle label or address / immediate value
if param =~ /\$/ # non-labels always have a $
# Extract hex number from param string.
number = /[0-9a-f]{1,4}/.match(param)[0].to_i(16)
else
# Store a dummy value and record this location
# to be updated in 2nd pass.
defer_value(@pc + 1, param)
number = mode == :rel ? 0xff : 0xffff
end
# These instructions take 1 byte.
if [:imm, :zp, :zpx, :zpy,
:indx, :indy, :rel].include?(mode)
(number <= 0xff) || (raise "#{command}'s number too big")
return bytes << number
# These instructions take 2 bytes.
elsif [:abs, :absx, :absy, :ind].include?(mode)
(number <= 0xffff) || (raise 'number too big')
bytes << (number & 0xff) # least-sig. byte
bytes << (number >> 8) # most-sig. byte
end
end
|
ruby
|
{
"resource": ""
}
|
q4775
|
EventMachine.Q.all
|
train
|
def all(*promises)
deferred = Q.defer
counter = promises.length
results = []
if counter > 0
promises.each_index do |index|
ref(promises[index]).then(proc {|result|
if results[index].nil?
results[index] = result
counter -= 1
deferred.resolve(results) if counter <= 0
end
result
}, proc {|reason|
if results[index].nil?
deferred.reject(reason)
end
reason
})
end
else
deferred.resolve(results)
end
return deferred.promise
end
|
ruby
|
{
"resource": ""
}
|
q4776
|
GoogleCheckout.Cart.shipping_cost_xml
|
train
|
def shipping_cost_xml
xml = Builder::XmlMarkup.new
if @flat_rate_shipping
xml.price(:currency => currency) {
xml.text! @flat_rate_shipping[:price].to_s
}
else
xml.price(:currency => @currency) {
xml.text! shipping_cost.to_s
}
end
end
|
ruby
|
{
"resource": ""
}
|
q4777
|
GoogleCheckout.Cart.shipping_cost
|
train
|
def shipping_cost
currency = 'USD'
shipping = @contents.inject(0) { |total,item|
total + item[:regular_shipping].to_i
}.to_s
end
|
ruby
|
{
"resource": ""
}
|
q4778
|
GoogleCheckout.Cart.currency
|
train
|
def currency
# Mixing currency not allowed; this
# library can't convert between
# currencies.
currencies = @contents.map { |item| item[:currency] }.uniq || "USD"
case currencies.count
when 0
"USD"
when 1
currencies.first
else
raise RuntimeError.new("Mixing currency not allowed")
end
end
|
ruby
|
{
"resource": ""
}
|
q4779
|
GoogleCheckout.Cart.signature
|
train
|
def signature
@xml or to_xml
digest = OpenSSL::Digest::Digest.new('sha1')
OpenSSL::HMAC.digest(digest, @merchant_key, @xml)
end
|
ruby
|
{
"resource": ""
}
|
q4780
|
GoogleCheckout.Cart.checkout_button
|
train
|
def checkout_button(button_opts = {})
@xml or to_xml
burl = button_url(button_opts)
html = Builder::XmlMarkup.new(:indent => 2)
html.form({
:action => submit_url,
:style => 'border: 0;',
:id => 'BB_BuyButtonForm',
:method => 'post',
:name => 'BB_BuyButtonForm'
}) do
html.input({
:name => 'cart',
:type => 'hidden',
:value => Base64.encode64(@xml).gsub("\n", '')
})
html.input({
:name => 'signature',
:type => 'hidden',
:value => Base64.encode64(signature).gsub("\n", '')
})
html.input({
:alt => 'Google Checkout',
:style => "width: auto;",
:src => button_url(button_opts),
:type => 'image'
})
end
end
|
ruby
|
{
"resource": ""
}
|
q4781
|
XMLA.Cube.table
|
train
|
def table
if (header.size == 1 && y_size == 0)
cell_data[0]
else
(0...y_axe.size).reduce(header) do |result, j|
result << ( y_axe[j] + (0...x_size).map { |i| "#{cell_data[i + j]}" })
end
end
end
|
ruby
|
{
"resource": ""
}
|
q4782
|
RailsPaginate.Collection.load_result
|
train
|
def load_result
if array_or_relation.is_a? Array
result = array_or_relation[offset..(offset + per_page - 1)]
else
result = array_or_relation.limit(per_page).offset(offset).all
end
self.replace result.nil? ? [] : result
end
|
ruby
|
{
"resource": ""
}
|
q4783
|
Munin.Connection.open
|
train
|
def open
begin
begin
with_timeout do
@socket = TCPSocket.new(@host, @port)
@socket.sync = true
welcome = @socket.gets
unless welcome =~ /^# munin node at/
raise Munin::AccessDenied
end
@connected = true
end
rescue Timeout::Error
raise Munin::ConnectionError, "Timed out talking to #{@host}"
end
rescue Errno::ETIMEDOUT, Errno::ECONNREFUSED, Errno::ECONNRESET => ex
raise Munin::ConnectionError, ex.message
rescue EOFError
raise Munin::AccessDenied
rescue Exception => ex
raise Munin::ConnectionError, ex.message
end
end
|
ruby
|
{
"resource": ""
}
|
q4784
|
Munin.Connection.send_data
|
train
|
def send_data(str)
if !connected?
if [email protected]? && @reconnect == false
raise Munin::ConnectionError, "Not connected."
else
open
end
end
begin
with_timeout { @socket.puts("#{str.strip}\n") }
rescue Timeout::Error
raise Munin::ConnectionError, "Timed out on #{@host} trying to send."
end
end
|
ruby
|
{
"resource": ""
}
|
q4785
|
Munin.Connection.read_line
|
train
|
def read_line
begin
with_timeout { @socket.gets.to_s.strip }
rescue Errno::ETIMEDOUT, Errno::ECONNREFUSED, Errno::ECONNRESET, EOFError => ex
raise Munin::ConnectionError, ex.message
rescue Timeout::Error
raise Munin::ConnectionError, "Timed out reading from #{@host}."
end
end
|
ruby
|
{
"resource": ""
}
|
q4786
|
Munin.Connection.read_packet
|
train
|
def read_packet
begin
with_timeout do
lines = []
while(str = @socket.readline.to_s) do
break if str.strip == '.'
lines << str.strip
end
parse_error(lines)
lines
end
rescue Errno::ETIMEDOUT, Errno::ECONNREFUSED, Errno::ECONNRESET, EOFError => ex
raise Munin::ConnectionError, ex.message
rescue Timeout::Error
raise Munin::ConnectionError, "Timed out reading from #{@host}."
end
end
|
ruby
|
{
"resource": ""
}
|
q4787
|
Munin.Connection.with_timeout
|
train
|
def with_timeout(time=@options[:timeout])
raise ArgumentError, "Block required" if !block_given?
if Munin::TIMEOUT_CLASS.respond_to?(:timeout_after)
Munin::TIMEOUT_CLASS.timeout_after(time) { yield }
else
Munin::TIMEOUT_CLASS.timeout(time) { yield }
end
end
|
ruby
|
{
"resource": ""
}
|
q4788
|
RestfulApiAuthentication.Checker.authorized?
|
train
|
def authorized?(options = {})
raise "Configuration values not found. Please run rails g restful_api_authentication:install to generate a config file." if @@header_timestamp.nil? || @@header_signature.nil? || @@header_api_key.nil? || @@time_window.nil? || @@disabled_message.nil?
return_val = false
if headers_have_values?
if in_time_window?
if test_hash.downcase == @http_headers[@@header_signature].downcase
if is_disabled?
@errors << @@disabled_message
return false
end
if options[:require_master] == true
if is_master?
return_val = true
else
@errors << "client does not have the required permissions"
end
else
return_val = true
end
else
@errors << "signature is invalid"
end
else
@errors << "request is outside the required time window of #{@@time_window.to_s} minutes"
end
else
@errors << "one or more required headers is missing"
end
if return_val == false && @errors.count == 0
@errors << "authentication failed"
end
return_val
end
|
ruby
|
{
"resource": ""
}
|
q4789
|
RestfulApiAuthentication.Checker.is_disabled?
|
train
|
def is_disabled?
client = RestClient.where(:api_key => @http_headers[@@header_api_key]).first
return true if client.nil?
return false if client.is_disabled.nil?
client.is_disabled
end
|
ruby
|
{
"resource": ""
}
|
q4790
|
RestfulApiAuthentication.Checker.in_time_window?
|
train
|
def in_time_window?
@@time_window = 4 if @@time_window < 4
minutes = (@@time_window / 2).floor
ts = Chronic.parse @http_headers[@@header_timestamp]
before = Time.now.utc - 60*minutes
after = Time.now.utc + 60*minutes
if ts.nil?
@errors << "timestamp was in an invalid format; should be YYYY-MM-DD HH:MM:SS UTC"
return false
end
ts > before && ts < after
end
|
ruby
|
{
"resource": ""
}
|
q4791
|
RestfulApiAuthentication.Checker.str_to_hash
|
train
|
def str_to_hash
client = RestClient.where(:api_key => @http_headers[@@header_api_key]).first
if client.nil?
@errors << "client is not registered"
end
client.nil? ? "" : client.secret + @request_uri.gsub( /\?.*/, "" ) + @http_headers[@@header_timestamp]
end
|
ruby
|
{
"resource": ""
}
|
q4792
|
Unipept.Formatter.format
|
train
|
def format(data, fasta_mapper, first)
data = integrate_fasta_headers(data, fasta_mapper) if fasta_mapper
convert(data, first)
end
|
ruby
|
{
"resource": ""
}
|
q4793
|
Unipept.Formatter.integrate_fasta_headers
|
train
|
def integrate_fasta_headers(data, fasta_mapper)
data_dict = group_by_first_key(data)
data = fasta_mapper.map do |header, key|
result = data_dict[key]
unless result.nil?
result = result.map do |row|
copy = { fasta_header: header }
copy.merge(row)
end
end
result
end
data.compact.flatten(1)
end
|
ruby
|
{
"resource": ""
}
|
q4794
|
Unipept.JSONFormatter.convert
|
train
|
def convert(data, first)
output = data.map(&:to_json).join(',')
first ? output : ',' + output
end
|
ruby
|
{
"resource": ""
}
|
q4795
|
Unipept.CSVFormatter.header
|
train
|
def header(data, fasta_mapper = nil)
CSV.generate do |csv|
first = data.first
keys = fasta_mapper ? ['fasta_header'] : []
csv << (keys + first.keys).map(&:to_s) if first
end
end
|
ruby
|
{
"resource": ""
}
|
q4796
|
Unipept.CSVFormatter.convert
|
train
|
def convert(data, _first)
CSV.generate do |csv|
data.each do |o|
csv << o.values.map { |v| v == '' ? nil : v }
end
end
end
|
ruby
|
{
"resource": ""
}
|
q4797
|
Unipept.BlastFormatter.convert
|
train
|
def convert(data, _first)
data
.reject { |o| o['refseq_protein_ids'].empty? }
.map do |o|
"#{o['peptide']}\tref|#{o['refseq_protein_ids']}|\t100\t10\t0\t0\t0\t10\t0\t10\t1e-100\t100\n"
end
.join
end
|
ruby
|
{
"resource": ""
}
|
q4798
|
ActsAsRoleRestricted.ClassMethods.for_role
|
train
|
def for_role(*roles)
sql = with_role_sql(roles) || ''
sql += ' OR ' if sql.present?
sql += "(#{self.table_name}.roles_mask = 0) OR (#{self.table_name}.roles_mask IS NULL)"
where(sql)
end
|
ruby
|
{
"resource": ""
}
|
q4799
|
Softlayer.Client.auth_params
|
train
|
def auth_params
return {} unless Softlayer.configuration
auth_hash = {
authenticate: {
'username' => Softlayer.configuration.username,
'apiKey' => Softlayer.configuration.api_key
}
}
auth_hash.merge!({
"clientLegacySession" =>
{
"userId" => Softlayer.configuration.impersonate_user,
"authToken" => Softlayer.configuration.impersonate_token
}
}) if Softlayer.impersonating?
auth_hash
end
|
ruby
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.