_id
stringlengths 2
6
| title
stringlengths 9
130
| partition
stringclasses 3
values | text
stringlengths 30
4.3k
| language
stringclasses 1
value | meta_information
dict |
---|---|---|---|---|---|
q3100
|
Elasticrawl.Config.delete_config_dir
|
train
|
def delete_config_dir
begin
FileUtils.rm_r(config_dir) if dir_exists?
rescue StandardError => e
|
ruby
|
{
"resource": ""
}
|
q3101
|
Elasticrawl.Config.status_message
|
train
|
def status_message(bucket_name, state)
message = ['', "Bucket s3://#{bucket_name} #{state}"]
message <<
|
ruby
|
{
"resource": ""
}
|
q3102
|
Migrate.Migrator.exec_migration
|
train
|
def exec_migration(migration, is_up)
migration_dir = self.migration_dir(migration)
result = @lang.exec_migration(migration_dir, is_up)
if @lang.ext != "sql"
puts result
end
Log.info("Updating current
|
ruby
|
{
"resource": ""
}
|
q3103
|
Migrate.Migrator.exec_migrations
|
train
|
def exec_migrations(is_up=true)
Log.info("Executing migrations...")
migrations = yield @db.current_version
if migrations.count == 0
Log.warn("Migrations not found")
return
end
migrations.each do |migration|
|
ruby
|
{
"resource": ""
}
|
q3104
|
Sparkr.CLI.run
|
train
|
def run(*args)
if args.empty? || (args.size == 1 && %w(-h --help).include?(args.first))
puts help
else
|
ruby
|
{
"resource": ""
}
|
q3105
|
OpenTox.Compound.fingerprint
|
train
|
def fingerprint type=DEFAULT_FINGERPRINT
unless fingerprints[type]
return [] unless self.smiles
#http://openbabel.org/docs/dev/FileFormats/MolPrint2D_format.html#molprint2d-format
if type == "MP2D"
fp = obconversion(smiles,"smi","mpd").strip.split("\t")
name = fp.shift # remove Title
fingerprints[type] = fp.uniq # no fingerprint counts
#http://openbabel.org/docs/dev/FileFormats/Multilevel_Neighborhoods_of_Atoms_(MNA).html
elsif type== "MNA"
level = 2 # TODO: level as parameter, evaluate level 1, see paper
fp = obconversion(smiles,"smi","mna","xL\"#{level}\"").split("\n")
fp.shift # remove Title
fingerprints[type] = fp
else # standard fingerprints
fp = OpenBabel::OBFingerprint.find_fingerprint(type)
|
ruby
|
{
"resource": ""
}
|
q3106
|
OpenTox.Compound.smarts_match
|
train
|
def smarts_match smarts, count=false
obconversion = OpenBabel::OBConversion.new
obmol = OpenBabel::OBMol.new
obconversion.set_in_format('smi')
obconversion.read_string(obmol,self.smiles)
smarts_pattern = OpenBabel::OBSmartsPattern.new
smarts.collect do |sma|
smarts_pattern.init(sma.smarts)
if
|
ruby
|
{
"resource": ""
}
|
q3107
|
OpenTox.Compound.svg
|
train
|
def svg
if self.svg_id.nil?
svg = obconversion(smiles,"smi","svg")
file = Mongo::Grid::File.new(svg, :filename
|
ruby
|
{
"resource": ""
}
|
q3108
|
OpenTox.Compound.png
|
train
|
def png
if self.png_id.nil?
png = obconversion(smiles,"smi","_png2")
file = Mongo::Grid::File.new(Base64.encode64(png), :filename
|
ruby
|
{
"resource": ""
}
|
q3109
|
Scruffy::Layers.Average.generate_coordinates
|
train
|
def generate_coordinates(options = {})
key_layer = layers.find { |layer| layer.relevant_data? }
options[:point_distance] = width / (key_layer.points.size - 1).to_f
coords = []
#TODO this will likely break with the new hash model
key_layer.points.each_with_index do |layer, idx|
sum, objects = points.inject([0, 0]) do |arr, elem|
if elem.relevant_data?
arr[0] += elem.points[idx]
arr[1] += 1
end
|
ruby
|
{
"resource": ""
}
|
q3110
|
SocialSnippet::Repository.RepositoryManager.resolve_snippet_path
|
train
|
def resolve_snippet_path(context, tag)
if tag.has_repo?
pkg = find_package_by_tag(tag)
pkg.snippet_path tag.path
else
|
ruby
|
{
"resource": ""
}
|
q3111
|
SocialSnippet::Repository.RepositoryManager.find_package_by_tag
|
train
|
def find_package_by_tag(tag)
if tag.has_ref?
find_package(tag.repo, tag.ref)
else
|
ruby
|
{
"resource": ""
}
|
q3112
|
SocialSnippet::Repository.RepositoryManager.find_package
|
train
|
def find_package(name, ref = nil)
repo = find_repository(name)
ref ||= repo.latest_package_version || repo.current_ref
|
ruby
|
{
"resource": ""
}
|
q3113
|
Hyperb.Images.remove_image
|
train
|
def remove_image(params = {})
raise ArgumentError, 'Invalid arguments.' unless check_arguments(params, 'name')
path = '/images/' + params[:name]
query = {}
query[:force]
|
ruby
|
{
"resource": ""
}
|
q3114
|
Hyperb.Images.inspect_image
|
train
|
def inspect_image(params = {})
raise ArgumentError, 'Invalid arguments.' unless check_arguments(params, 'name')
path = '/images/' + params[:name] + '/json'
|
ruby
|
{
"resource": ""
}
|
q3115
|
Redwood.Node.add_child
|
train
|
def add_child(name)
child = self.class.new(name, self)
yield child if
|
ruby
|
{
"resource": ""
}
|
q3116
|
Redwood.Node.[]
|
train
|
def [](key)
selected_child = children.select {|child| child.name == key }
|
ruby
|
{
"resource": ""
}
|
q3117
|
Manageable.ApplicationController.respond_with_with_storage
|
train
|
def respond_with_with_storage(*args, &block)
@responded_with = args.last.is_a?(Hash) ? args - [args.last] : args
|
ruby
|
{
"resource": ""
}
|
q3118
|
Hyperb.HostConfig.fmt
|
train
|
def fmt
formated = {}
attrs.each_key do |key|
formated[camelize(key)]
|
ruby
|
{
"resource": ""
}
|
q3119
|
Beardley.Report.to_pdf
|
train
|
def to_pdf(*args)
options = extract_options!(args)
datasource = args[0]
_JasperPrint = Rjb.import('net.sf.jasperreports.engine.JasperPrint')
_JasperExportManager = Rjb.import('net.sf.jasperreports.engine.JasperExportManager')
|
ruby
|
{
"resource": ""
}
|
q3120
|
Beardley.Report.to_file
|
train
|
def to_file(format, *args)
options = extract_options!(args)
datasource = args[0]
path = options[:path] || File.join(Dir.tmpdir, "beardley_#{Time.now.to_i.to_s(36)}_#{rand(100_000_000_000).to_s(36)}.#{format}")
if format == :pdf
_JasperPrint = Rjb.import('net.sf.jasperreports.engine.JasperPrint')
_JasperExportManager = Rjb.import('net.sf.jasperreports.engine.JasperExportManager')
_JasperExportManager._invoke('exportReportToPdfFile', 'Lnet.sf.jasperreports.engine.JasperPrint;Ljava.lang.String;', prepare(datasource),
|
ruby
|
{
"resource": ""
}
|
q3121
|
Beardley.Report.to
|
train
|
def to(format, *args)
options = extract_options!(args)
datasource = args[0]
file = Tempfile.new("to_#{format}")
exporter = Beardley.with_warnings { Rjb.import(Beardley.exporters[format]) }.new
_JRExporterParameter =
|
ruby
|
{
"resource": ""
}
|
q3122
|
Beardley.Report.prepare
|
train
|
def prepare(datasource = nil)
# Compile it, if needed
if @source_file && ((!@object_file.exist? && @source_file.exist?) || (@source_file.exist? && @source_file.mtime > @object_file.mtime))
_JasperCompileManager = Rjb.import('net.sf.jasperreports.engine.JasperCompileManager')
|
ruby
|
{
"resource": ""
}
|
q3123
|
Beardley.Report.prepare_params
|
train
|
def prepare_params
_HashMap = Rjb.import('java.util.HashMap')
_JavaString = Rjb.import('java.lang.String')
# Converting default report params to java HashMap
params = _HashMap.new
Beardley.config[:report_params].each do |k, v|
params.put(k, v)
end
# Convert the ruby parameters' hash to a java HashMap, but keeps it as
|
ruby
|
{
"resource": ""
}
|
q3124
|
Beardley.Report.load_datasource
|
train
|
def load_datasource(datasource = nil)
jasper_params = prepare_params
# Parse and load XML as datasource
if datasource
_InputSource = Rjb.import('org.xml.sax.InputSource')
_StringReader = Rjb.import('java.io.StringReader')
_JRXmlUtils = Rjb.import('net.sf.jasperreports.engine.util.JRXmlUtils')
_JRXPathQueryExecuterFactory = Beardley.with_warnings { Rjb.import('net.sf.jasperreports.engine.query.JRXPathQueryExecuterFactory') }
input_source = _InputSource.new
input_source.setCharacterStream(_StringReader.new(datasource.to_s))
|
ruby
|
{
"resource": ""
}
|
q3125
|
Beardley.Report.fill_report
|
train
|
def fill_report(params, datasource = nil)
_JasperFillManager = Rjb.import('net.sf.jasperreports.engine.JasperFillManager')
if datasource
return _JasperFillManager.fillReport(@object_file.to_s, params)
else
_JREmptyDataSource =
|
ruby
|
{
"resource": ""
}
|
q3126
|
Outbox.MessageFields.fields
|
train
|
def fields(new_fields = nil)
if new_fields.nil?
fields = {}
self.class.fields.each do |field|
fields[field] = public_send(field)
|
ruby
|
{
"resource": ""
}
|
q3127
|
Outbox.MessageFields.fields=
|
train
|
def fields=(new_fields)
new_fields.each do |field, value|
|
ruby
|
{
"resource": ""
}
|
q3128
|
Outbox.MessageFields.validate_fields
|
train
|
def validate_fields
self.class.required_fields.each do |field|
value = public_send(field)
if value.nil?
|
ruby
|
{
"resource": ""
}
|
q3129
|
GitFeats.Serializer.serialize
|
train
|
def serialize(path, data)
# Make a path to the data file if one doesn't already exist
mkpath_to path
File.open(path,
|
ruby
|
{
"resource": ""
}
|
q3130
|
GitFeats.Serializer.unserialize
|
train
|
def unserialize(path)
if File.exists?(path) && !File.zero?(path)
begin
return JSON.parse(IO.binread(path))
|
ruby
|
{
"resource": ""
}
|
q3131
|
Phonology.OrthographyTranslator.translate
|
train
|
def translate(string)
@string = string
@max = array.length
SoundSequence.new(array.each_index.map do |index|
|
ruby
|
{
"resource": ""
}
|
q3132
|
SemanticDateTimeTags.ViewHelpers.semantic_date_time_tag
|
train
|
def semantic_date_time_tag(date_time, options = {})
|
ruby
|
{
"resource": ""
}
|
q3133
|
SemanticDateTimeTags.ViewHelpers.semantic_time_tag
|
train
|
def semantic_time_tag(time, options = {})
|
ruby
|
{
"resource": ""
}
|
q3134
|
Lumber.InheritanceRegistry.register_inheritance_handler
|
train
|
def register_inheritance_handler
synchronize do
return if defined?(Object.inherited_with_lumber_registry)
Object.class_eval do
class << self
def inherited_with_lumber_registry(subclass)
inherited_without_lumber_registry(subclass)
# Add a logger to 'subclass' if it is directly in the registry
# No need to check full inheritance chain LoggerSupport handles it
# Also prevent rails from subsequently overriding our logger when rails
# is loaded after registering logger
|
ruby
|
{
"resource": ""
}
|
q3135
|
IndexFor.Helper.index_for
|
train
|
def index_for objects, html_options = {}, &block
html_options = html_options.dup
objects = fetch_objects objects, html_options
tag = html_options[:table_tag] || IndexFor.table_tag
klass = html_options[:klass] || objects.try(:klass) || objects.first.class
|
ruby
|
{
"resource": ""
}
|
q3136
|
IndexFor.Helper.index_for_actions
|
train
|
def index_for_actions object, *action_names, &block
html_options = action_names.extract_options!
action_names = [:show, :edit, :destroy] if action_names == [:all]
builder = html_options[:action_builder] || IndexFor::ActionBuilder
|
ruby
|
{
"resource": ""
}
|
q3137
|
IndexFor.Helper.show_for
|
train
|
def show_for object, html_options = {}, &block
html_options = html_options.dup
tag = html_options[:list_tag] || IndexFor.list_tag
html_options[:id] ||= show_for_id(object)
html_options[:class] = show_for_class(object, html_options)
builder
|
ruby
|
{
"resource": ""
}
|
q3138
|
Brightcove.API.post
|
train
|
def post(api_method, parameters = {})
parameters.merge!({"token" => @token})
body = {}
body.merge!({:method => api_method})
body.merge!({:params => parameters})
|
ruby
|
{
"resource": ""
}
|
q3139
|
Hicube.ContentsController.load_resource
|
train
|
def load_resource
@page = Hicube::Page.unscoped.find(params[:page_id])
|
ruby
|
{
"resource": ""
}
|
q3140
|
HipChat.API.rooms_create
|
train
|
def rooms_create(name, owner_user_id, privacy = 'public', topic = '', guest_access = 0)
self.class.post(hipchat_api_url_for('rooms/create'), :body => {:auth_token => @token, :name => name, :owner_user_id
|
ruby
|
{
"resource": ""
}
|
q3141
|
HipChat.API.rooms_message
|
train
|
def rooms_message(room_id, from, message, notify = 0, color = 'yellow', message_format = 'html')
self.class.post(hipchat_api_url_for('rooms/message'), :body => {:auth_token => @token, :room_id => room_id, :from => from,
|
ruby
|
{
"resource": ""
}
|
q3142
|
HipChat.API.users_create
|
train
|
def users_create(email, name, title, is_group_admin = 0, password = nil, timezone = 'UTC')
self.class.post(hipchat_api_url_for('users/create'), :body => {:auth_token => @token, :email => email, :name => name, :title => title,
|
ruby
|
{
"resource": ""
}
|
q3143
|
Markascend.Macro.parse_video
|
train
|
def parse_video
# standard
unless /\A\s*(?<width>\d+)x(?<height>\d+)\s+(?<url>.+)\z/ =~ content
env.warn 'can not parse \video content, should be "#{WIDTH}x#{HEIGHT} #{URL}"'
return
end
case url
when /youtu\.?be/
# NOTE merging them into one regexp fails (because longest match?)
unless id = url[/(?<=watch\?v=)\w+/] || url[/(?<=embed\/)\w+/] || url[/(?<=youtu\.be\/)\w+/]
env.warn 'can not parse youtube id'
return
end
%Q|<iframe width="#{width}" height="#{height}" src="https://www.youtube-nocookie.com/embed/#{id}?rel=0" frameborder="0" allowfullscreen></iframe>|
when /vimeo/
unless id = url[/(?<=vimeo\.com\/)\w+/]
env.warn 'can not parse vimeo id, should use link like this "http://vimeo.com/#{DIGITS}"'
return
end
|
ruby
|
{
"resource": ""
}
|
q3144
|
KTree.KTree.create_tree
|
train
|
def create_tree(vupper, vlower, &block)
@root
|
ruby
|
{
"resource": ""
}
|
q3145
|
AsiBod.Asi.array_data_to_hash
|
train
|
def array_data_to_hash(array_data)
array_data.each_with_object({}) do |node, memo|
|
ruby
|
{
"resource": ""
}
|
q3146
|
Lumber.LevelUtil.start_monitor
|
train
|
def start_monitor(interval=10)
t = MonitorThread.new do
loop do
break if Thread.current.should_exit
begin
activate_levels
rescue => e
$stderr.puts "Failure activating log levels: #{e}"
|
ruby
|
{
"resource": ""
}
|
q3147
|
Lumber.LevelUtil.backup_levels
|
train
|
def backup_levels(loggers)
synchronize do
loggers.each do |name|
outputter = Log4r::Outputter[name]
if outputter
@original_outputter_levels[name] ||= outputter.level
else
logger = Lumber.find_or_create_logger(name)
|
ruby
|
{
"resource": ""
}
|
q3148
|
Lumber.LevelUtil.restore_levels
|
train
|
def restore_levels
synchronize do
@original_outputter_levels.each do |name, level|
outputter = Log4r::Outputter[name]
outputter.level = level if outputter.level != level
end
@original_outputter_levels.clear
|
ruby
|
{
"resource": ""
}
|
q3149
|
GAAPI.Row.method_missing
|
train
|
def method_missing(method, *args)
if (i = dimension_method_names.find_index(method))
define_singleton_method(method) do
dimensions[i]
end
send(method)
elsif (i = metric_method_names.find_index(method))
|
ruby
|
{
"resource": ""
}
|
q3150
|
GAAPI.Row.convert_metric
|
train
|
def convert_metric(i)
case report.metric_type(i)
when "INTEGER"
# INTEGER Integer metric.
metrics[i].to_i
when "FLOAT", "PERCENT"
# FLOAT Float metric.
# PERCENT Percentage metric.
metrics[i].to_f
when "CURRENCY"
# CURRENCY Currency metric.
# TODO: Do this better.
metrics[i].to_f
when "TIME"
# Google documentation claims to following:
# TIME Time metric in HH:MM:SS format.
# It also says it's seconds, and that's what I see in real results.
# So comment out the
|
ruby
|
{
"resource": ""
}
|
q3151
|
MiniTerm.Mapper.[]=
|
train
|
def []=(indexes, value)
indexes = [indexes] unless indexes.is_a?(Range)
indexes.each do |index|
process_non_terminals(index)
if @map.has_key?(index)
|
ruby
|
{
"resource": ""
}
|
q3152
|
MiniTerm.Mapper.process_non_terminals
|
train
|
def process_non_terminals(index)
seq = ""
index.chop.chars.each do |char|
seq << char
if @map.has_key?(seq) && @map[seq]
|
ruby
|
{
"resource": ""
}
|
q3153
|
Malcolm.SOAPBuilder.wrap
|
train
|
def wrap(data)
"<?xml version=\"1.0\" encoding=\"UTF-8\"?><env:Envelope xmlns:env=\"http://schemas.xmlsoap.org/soap/envelope/\"><env:Body>".tap do |soap_envelope|
unless data.blank?
|
ruby
|
{
"resource": ""
}
|
q3154
|
List.Matcher.bud
|
train
|
def bud(opts={})
opts = {
atomic: @atomic,
backtracking: @backtracking,
bound: @_bound,
strip: @strip,
case_insensitive: @case_insensitive,
multiline: @multiline,
not_extended:
|
ruby
|
{
"resource": ""
}
|
q3155
|
List.Matcher.pattern
|
train
|
def pattern( list, opts={} )
return '(?!)' unless list.any?
return bud(opts).pattern list unless opts.empty?
list = list.compact.map(&:to_s).select{ |s| s.length > 0 }
list.map!(&:strip).select!{ |s| s.length > 0 } if strip
list.map!{ |s| s.gsub %r/\s++/, ' ' } if normalize_whitespace
return nil if list.empty?
specializer = Special.new self, @symbols, list
list = specializer.normalize
root = tree list, specializer
root.root = true
root.flatten
rx = root.convert
if m = modifiers
|
ruby
|
{
"resource": ""
}
|
q3156
|
Missinglink.SurveyQuestion.possible_responses
|
train
|
def possible_responses(search_other = false)
{}.tap do |hash|
survey_response_answers.each do |sra|
sa_row = (sra.row_survey_answer_id ? SurveyAnswer.find(sra.row_survey_answer_id) : nil)
sa_col = (sra.col_survey_answer_id ? SurveyAnswer.find(sra.col_survey_answer_id) : nil)
sa_col_choice = (sra.col_choice_survey_answer_id ? SurveyAnswer.find(sra.col_choice_survey_answer_id) : nil)
case answer_strategy
when "first_survey_response_answer_text"
hash[sra.text] = sra.id unless (sra.text.nil? || hash[sra.text])
when "answer_row_for_subquestion"
other_text = (sra.text.nil? ? nil : "#{ (sa_row.try(:text) || "Other") }: #{ sra.text }")
hash[other_text] = sra.id unless (other_text.nil? || hash[other_text])
when "answer_row_for_response"
other_text = ((!search_other || sra.text.nil?) ? nil : "#{ (sa_row.try(:text) || "Other") }: #{ sra.text }")
hash[sa_row.text] = sra.id unless (sa_row.nil? || hash[sa_row.text])
hash[other_text] = sra.id unless (other_text.nil? || hash[other_text])
when "answer_row_and_column_for_response"
main_text = "#{ sa_row.try(:text) }: #{ sa_col.try(:text) }"
other_text = ((!search_other || sra.text.nil? || !sa_row.nil?) ? nil : "Other: #{ sra.text }")
hash[main_text] = sra.id
|
ruby
|
{
"resource": ""
}
|
q3157
|
Squash.Uploader.http_post
|
train
|
def http_post(url, headers, bodies)
uri = URI.parse(url)
http = Net::HTTP.new(uri.host, uri.port)
http.use_ssl = (uri.scheme == 'https')
http.open_timeout = options[:open_timeout]
http.read_timeout = options[:read_timeout]
http.verify_mode = OpenSSL::SSL::VERIFY_NONE if options[:skip_verification]
http.start do |session|
bodies.each do |body|
request = Net::HTTP::Post.new(uri.request_uri)
headers.each { |k, v| request.add_field k, v }
request.body = body
response = session.request(request)
if options[:success].none?
|
ruby
|
{
"resource": ""
}
|
q3158
|
Modernize.Modernizer.translate
|
train
|
def translate(context, hash)
# makes sure that the context is a hash
raise ArgumentError.new('did not pass a hash for the context') unless context.is_a?(Hash)
raise ArgumentError.new('cannot provide include hash in context') if context[:hash]
# create the context instance for instance variables
struct = StructContext.new(context, hash)
# instantiate MapMethods to perform translations and define lambda
# for how to tranlate a field
#
translate = lambda { |t|
MapMethods.send(t[:name], struct, t[:field], t[:block])
}
# determine the version of the incoming hash
#
struct_version = struct.instance_exec(&@migrations.version)
raise StandardError.new('calculated version is not valid') unless Gem::Version.correct?(struct_version)
# gets a list of the potential versions
#
migration_versions = @migrations.translations.keys
migration_versions.delete(:first)
migration_versions.delete(:last)
# get the first and last translations
#
firsts = @migrations.translations[:first]
lasts = @migrations.translations[:last]
# sorts the versions
#
migration_versions.sort! do |x,y|
Gem::Version.new(x) <=> Gem::Version.new(y)
end
# reverse order if descending was specified
#
migration_versions = @migrations.order == :descending ? migration_versions.reverse : migration_versions
# run the first translations if
|
ruby
|
{
"resource": ""
}
|
q3159
|
Hoiio.RequestUtil.check_nil_or_empty
|
train
|
def check_nil_or_empty(required_param_names=[], params)
required_param_names.each { |p|
if params[p].nil? || params[p].empty?
|
ruby
|
{
"resource": ""
}
|
q3160
|
Hoiio.RequestUtil.check_for_mutual_exclusivity
|
train
|
def check_for_mutual_exclusivity(required_param_names=[], params)
i = 0
required_param_names.each { |p|
if !params[p].nil? && !params[p].empty?
i += 1
end
}
if i == 0
|
ruby
|
{
"resource": ""
}
|
q3161
|
Generator.Context.render_partial
|
train
|
def render_partial(file_name)
# The "default" version of the partial.
file_to_render = "#{@input_folder}/partials/#{file_name.to_s}.haml"
if @scope
# Look for a partial prefixed with the current "scope" (which is just the name of the
# primary template being rendered).
scope_file = "#{@input_folder}/partials/#{@scope.to_s}_#{file_name.to_s}.haml"
|
ruby
|
{
"resource": ""
}
|
q3162
|
ShakeTheCounter.Client.access_token
|
train
|
def access_token
@access_token ||= ShakeTheCounter::Authentication.renew_access_token(client_id: id, client_secret:
|
ruby
|
{
"resource": ""
}
|
q3163
|
ShakeTheCounter.Client.call
|
train
|
def call(path, http_method: :get, body: {}, header: {})
# add bearer token to header
header[:authorization] = "Bearer #{access_token}"
|
ruby
|
{
"resource": ""
}
|
q3164
|
ShakeTheCounter.Client.start_payment
|
train
|
def start_payment(reservation_key)
path = "reservation/#{reservation_key}/payment"
result = call(path, http_method: :post)
if result.code.to_i == 200
|
ruby
|
{
"resource": ""
}
|
q3165
|
MyJohnDeere.APISupportItem.to_hash
|
train
|
def to_hash()
ret_hash = {}
self.class.json_attributes.each do |attrib|
|
ruby
|
{
"resource": ""
}
|
q3166
|
UserInput.Prompt.ask
|
train
|
def ask
@fd.print full_message
disable_echo if @secret
input = _ask
return input if valid(input)
|
ruby
|
{
"resource": ""
}
|
q3167
|
UserInput.Prompt.valid
|
train
|
def valid(input)
return true unless @validation
_, method = VALIDATIONS.find { |klass, _| @validation.is_a? klass
|
ruby
|
{
"resource": ""
}
|
q3168
|
UserInput.Prompt._ask
|
train
|
def _ask
input = STDIN.gets.chomp
input = @default if input.empty? && @default
|
ruby
|
{
"resource": ""
}
|
q3169
|
Hyperb.Utils.check_arguments
|
train
|
def check_arguments(params, *args)
contains = true
args.each do |arg|
|
ruby
|
{
"resource": ""
}
|
q3170
|
Hyperb.Utils.prepare_json
|
train
|
def prepare_json(params = {})
json = {}
params.each do |key, value|
|
ruby
|
{
"resource": ""
}
|
q3171
|
Tily.TileSystem.each_tile
|
train
|
def each_tile level
size = tile_size level
(0...size).each do |y|
(0...size).each do |x|
|
ruby
|
{
"resource": ""
}
|
q3172
|
Tily.TileSystem.each_tile_with_index
|
train
|
def each_tile_with_index level
idx = 0
size = tile_size level
(0...size).each do
|
ruby
|
{
"resource": ""
}
|
q3173
|
SimplePoParser.Parser.parse
|
train
|
def parse(message)
@result = {}
@scanner = StringScanner.new(message.strip)
begin
lines
rescue ParserError => pe
error_msg = "SimplePoParser::ParserError"
error_msg += pe.message
error_msg += "\nParseing result before error: '#{@result}'"
error_msg += "\nSimplePoParser filtered backtrace: SimplePoParser::ParserError"
|
ruby
|
{
"resource": ""
}
|
q3174
|
SimplePoParser.Parser.msgctxt
|
train
|
def msgctxt
begin
if @scanner.scan(/msgctxt/)
skip_whitespace
text = message_line
add_result(:msgctxt, text)
message_multiline(:msgctxt) if text.empty?
end
msgid
|
ruby
|
{
"resource": ""
}
|
q3175
|
SimplePoParser.Parser.msgid
|
train
|
def msgid
begin
if @scanner.scan(/msgid/)
skip_whitespace
text = message_line
add_result(:msgid, text)
message_multiline(:msgid) if text.empty?
if msgid_plural
msgstr_plural
else
|
ruby
|
{
"resource": ""
}
|
q3176
|
SimplePoParser.Parser.msgid_plural
|
train
|
def msgid_plural
begin
if @scanner.scan(/msgid_plural/)
skip_whitespace
text = message_line
add_result(:msgid_plural, text)
message_multiline(:msgid_plural) if text.empty?
true
else
|
ruby
|
{
"resource": ""
}
|
q3177
|
SimplePoParser.Parser.msgstr
|
train
|
def msgstr
begin
if @scanner.scan(/msgstr/)
skip_whitespace
text = message_line
add_result(:msgstr, text)
message_multiline(:msgstr) if text.empty?
skip_whitespace
raise PoSyntaxError, "Unexpected content after expected message end #{@scanner.peek(10).inspect}" unless @scanner.eos?
else
raise PoSyntaxError, "Singular message without msgstr is not allowed. Line
|
ruby
|
{
"resource": ""
}
|
q3178
|
SimplePoParser.Parser.msgstr_plural
|
train
|
def msgstr_plural(num = 0)
begin
msgstr_key = @scanner.scan(/msgstr\[\d\]/) # matches 'msgstr[0]' to 'msgstr[9]'
if msgstr_key
# msgstr plurals must come in 0-based index in order
msgstr_num = msgstr_key.match(/\d/)[0].to_i
raise PoSyntaxError, "Bad 'msgstr[index]' index." if msgstr_num != num
skip_whitespace
text = message_line
add_result(msgstr_key, text)
|
ruby
|
{
"resource": ""
}
|
q3179
|
SimplePoParser.Parser.previous_comments
|
train
|
def previous_comments
begin
# next part must be msgctxt, msgid or msgid_plural
if @scanner.scan(/msg/)
if @scanner.scan(/id/)
if @scanner.scan(/_plural/)
key = :previous_msgid_plural
else
key = :previous_msgid
end
elsif @scanner.scan(/ctxt/)
key = :previous_msgctxt
else
raise PoSyntaxError, "Previous comment type #{("msg" + @scanner.peek(10)).inspect} unknown."
end
skip_whitespace
text = message_line
|
ruby
|
{
"resource": ""
}
|
q3180
|
SimplePoParser.Parser.previous_multiline
|
train
|
def previous_multiline(key)
begin
# scan multilines until no further multiline is hit
# /#\|\p{Blank}"/ needs to catch the double quote to ensure it hits a previous
# multiline and not another line type.
if @scanner.scan(/#\|\p{Blank}*"/)
@scanner.pos = @scanner.pos - 1 # go one character back, so we can reuse the "message line" method
|
ruby
|
{
"resource": ""
}
|
q3181
|
SimplePoParser.Parser.message_multiline
|
train
|
def message_multiline(key)
begin
skip_whitespace
if @scanner.check(/"/)
add_result(key, message_line)
message_multiline(key)
end
rescue PoSyntaxError => pe
raise
|
ruby
|
{
"resource": ""
}
|
q3182
|
SimplePoParser.Parser.message_line
|
train
|
def message_line
begin
if @scanner.getch == '"'
text = message_text
unless @scanner.getch == '"'
err_msg = "The message text '#{text}' must be finished with the double quote character '\"'."
raise PoSyntaxError, err_msg
end
skip_whitespace
unless end_of_line
err_msg = "There should be only whitespace until the end of line"
err_msg += " after the double quote character of a message text."
raise PoSyntaxError.new(err_msg)
end
text
|
ruby
|
{
"resource": ""
}
|
q3183
|
SimplePoParser.Parser.add_result
|
train
|
def add_result(key, text)
if @result[key]
if @result[key].is_a? Array
@result[key].push(text)
else
|
ruby
|
{
"resource": ""
}
|
q3184
|
SocialSnippet.Resolvers::BaseResolver.resolve_tag_repo_ref!
|
train
|
def resolve_tag_repo_ref!(tag)
return unless tag.has_repo?
repo = core.repo_manager.find_repository(tag.repo)
# set latest version
if tag.has_ref? === false
if repo.has_package_versions?
tag.set_ref repo.latest_package_version
else
tag.set_ref repo.current_ref
|
ruby
|
{
"resource": ""
}
|
q3185
|
Featureflow.EventsClient.register_features
|
train
|
def register_features(with_features)
Thread.new do
features = []
features = with_features.each do | feature |
features.push(key: feature[:key],
variants: feature[:variants],
|
ruby
|
{
"resource": ""
}
|
q3186
|
HidApi.DeviceInfo.each
|
train
|
def each
return enum_for(:each) unless block_given?
pointer = self
loop do
break if
|
ruby
|
{
"resource": ""
}
|
q3187
|
ProbeDockCucumber.Formatter.comment_line
|
train
|
def comment_line(comment)
# Take care of annotation only if matched
if comment.match(ProbeDockProbe::Annotation::ANNOTATION_REGEXP)
# If the feature already started, the annotations are for scenarios
if @current_feature_started
|
ruby
|
{
"resource": ""
}
|
q3188
|
NetworkUtils.UrlInfo.is?
|
train
|
def is?(type)
return false if type.to_s.empty?
expected_types = Array.wrap(type).map(&:to_s)
content_type && expected_types.select do |t|
|
ruby
|
{
"resource": ""
}
|
q3189
|
Prawn.EsrRecipe.esr9_format_account_id
|
train
|
def esr9_format_account_id(account_id)
(pre, main, post) = account_id.split('-')
|
ruby
|
{
"resource": ""
}
|
q3190
|
LDAPGroupsLookup.Configuration.config
|
train
|
def config
if @config.nil?
if defined? Rails
configure(Rails.root.join('config', 'ldap_groups_lookup.yml').to_s)
else
|
ruby
|
{
"resource": ""
}
|
q3191
|
CachedCounts.Cache.clear
|
train
|
def clear
invalid_keys = all_keys.select { |key| key.include?(@scope.table_name.downcase) }
invalid_keys.each {
|
ruby
|
{
"resource": ""
}
|
q3192
|
QiitaScouter.Core.analyze
|
train
|
def analyze(target_user)
user = read_user(target_user)
articles = read_articles(target_user)
|
ruby
|
{
"resource": ""
}
|
q3193
|
ParallelAppium.IOS.simulator_information
|
train
|
def simulator_information
re = /\([0-9]+\.[0-9](\.[0-9])?\) \[[0-9A-Z-]+\]/m
# Filter out simulator info for iPhone platform version and udid
@simulators.select { |simulator_data| simulator_data.include?('iPhone') && !simulator_data.include?('Apple Watch') }
|
ruby
|
{
"resource": ""
}
|
q3194
|
Frankenstein.Request.measure
|
train
|
def measure(labels = {})
start_time = Time.now
unless block_given?
raise NoBlockError,
"No block passed to #{self.class}#measure"
end
@requests.increment(labels, 1)
@mutex.synchronize { @current.set(labels, (@current.get(labels) || 0) + 1) }
res_labels = labels.dup
begin
yield(res_labels).tap do
elapsed_time = Time.now - start_time
@durations.observe(res_labels, elapsed_time)
end
|
ruby
|
{
"resource": ""
}
|
q3195
|
Frankenstein.CollectedMetric.values
|
train
|
def values
begin
@collector.call(self).tap do |results|
unless results.is_a?(Hash)
@logger.error(progname) { "Collector proc did not return a hash, got #{results.inspect}" }
@errors_metric.increment(class: "NotAHashError")
|
ruby
|
{
"resource": ""
}
|
q3196
|
Frankenstein.CollectedMetric.validate_type
|
train
|
def validate_type(type)
unless %i{gauge counter histogram summary}.include?(type)
|
ruby
|
{
"resource": ""
}
|
q3197
|
Salesforce.ChatterFeed.search_chatter_feeds
|
train
|
def search_chatter_feeds(object_type, query_string, binding, limit=100)
return get_all_chatter_feeds_with_attachments(nil,
|
ruby
|
{
"resource": ""
}
|
q3198
|
Phonology.Inventory.with
|
train
|
def with(*features)
pos, neg = mangle_args(*features)
self.class.new(Hash[@sets.select do |key, val|
|
ruby
|
{
"resource": ""
}
|
q3199
|
Phonology.Inventory.with_all
|
train
|
def with_all(*features)
pos, neg = mangle_args(*features)
self.class.new(Hash[@sets.select do |key, val|
|
ruby
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.