_id
stringlengths 2
6
| title
stringlengths 9
130
| partition
stringclasses 3
values | text
stringlengths 66
10.5k
| language
stringclasses 1
value | meta_information
dict |
|---|---|---|---|---|---|
q3100
|
Elasticrawl.Config.delete_config_dir
|
train
|
def delete_config_dir
begin
FileUtils.rm_r(config_dir) if dir_exists?
rescue StandardError => e
raise FileAccessError, e.message
end
end
|
ruby
|
{
"resource": ""
}
|
q3101
|
Elasticrawl.Config.status_message
|
train
|
def status_message(bucket_name, state)
message = ['', "Bucket s3://#{bucket_name} #{state}"]
message << "Config dir #{config_dir} #{state}"
state = 'complete' if state == 'created'
message << "Config #{state}"
message.join("\n")
end
|
ruby
|
{
"resource": ""
}
|
q3102
|
Migrate.Migrator.exec_migration
|
train
|
def exec_migration(migration, is_up)
migration_dir = self.migration_dir(migration)
result = @lang.exec_migration(migration_dir, is_up)
if @lang.ext != "sql"
puts result
end
Log.info("Updating current version number...")
version = migration["version"]
is_up ? @db.log_up(version) : @db.log_down(version)
end
|
ruby
|
{
"resource": ""
}
|
q3103
|
Migrate.Migrator.exec_migrations
|
train
|
def exec_migrations(is_up=true)
Log.info("Executing migrations...")
migrations = yield @db.current_version
if migrations.count == 0
Log.warn("Migrations not found")
return
end
migrations.each do |migration|
self.exec_migration(migration, is_up)
end
Log.success("Migrations executed. Current version: #{@db.current_version}")
end
|
ruby
|
{
"resource": ""
}
|
q3104
|
Sparkr.CLI.run
|
train
|
def run(*args)
if args.empty? || (args.size == 1 && %w(-h --help).include?(args.first))
puts help
else
sparkline = Sparkline.new(args.map(&:to_f))
puts sparkline.to_s
end
end
|
ruby
|
{
"resource": ""
}
|
q3105
|
OpenTox.Compound.fingerprint
|
train
|
def fingerprint type=DEFAULT_FINGERPRINT
unless fingerprints[type]
return [] unless self.smiles
#http://openbabel.org/docs/dev/FileFormats/MolPrint2D_format.html#molprint2d-format
if type == "MP2D"
fp = obconversion(smiles,"smi","mpd").strip.split("\t")
name = fp.shift # remove Title
fingerprints[type] = fp.uniq # no fingerprint counts
#http://openbabel.org/docs/dev/FileFormats/Multilevel_Neighborhoods_of_Atoms_(MNA).html
elsif type== "MNA"
level = 2 # TODO: level as parameter, evaluate level 1, see paper
fp = obconversion(smiles,"smi","mna","xL\"#{level}\"").split("\n")
fp.shift # remove Title
fingerprints[type] = fp
else # standard fingerprints
fp = OpenBabel::OBFingerprint.find_fingerprint(type)
obmol = OpenBabel::OBMol.new
obconversion = OpenBabel::OBConversion.new
obconversion.set_in_format "smi"
obconversion.read_string obmol, self.smiles
result = OpenBabel::VectorUnsignedInt.new
fp.get_fingerprint(obmol,result)
# TODO: %ignore *::DescribeBits @ line 163 openbabel/scripts/openbabel-ruby.i
#p OpenBabel::OBFingerprint.describe_bits(result)
# convert result to a list of the bits that are set
# from openbabel/scripts/python/pybel.py line 830
# see also http://openbabel.org/docs/dev/UseTheLibrary/Python_Pybel.html#fingerprints
result = result.to_a
bitsperint = OpenBabel::OBFingerprint.getbitsperint()
bits_set = []
start = 1
result.each do |x|
i = start
while x > 0 do
bits_set << i if (x % 2) == 1
x >>= 1
i += 1
end
start += bitsperint
end
fingerprints[type] = bits_set
end
save
end
fingerprints[type]
end
|
ruby
|
{
"resource": ""
}
|
q3106
|
OpenTox.Compound.smarts_match
|
train
|
def smarts_match smarts, count=false
obconversion = OpenBabel::OBConversion.new
obmol = OpenBabel::OBMol.new
obconversion.set_in_format('smi')
obconversion.read_string(obmol,self.smiles)
smarts_pattern = OpenBabel::OBSmartsPattern.new
smarts.collect do |sma|
smarts_pattern.init(sma.smarts)
if smarts_pattern.match(obmol)
count ? value = smarts_pattern.get_map_list.to_a.size : value = 1
else
value = 0
end
value
end
end
|
ruby
|
{
"resource": ""
}
|
q3107
|
OpenTox.Compound.svg
|
train
|
def svg
if self.svg_id.nil?
svg = obconversion(smiles,"smi","svg")
file = Mongo::Grid::File.new(svg, :filename => "#{id}.svg", :content_type => "image/svg")
update(:svg_id => $gridfs.insert_one(file))
end
$gridfs.find_one(_id: self.svg_id).data
end
|
ruby
|
{
"resource": ""
}
|
q3108
|
OpenTox.Compound.png
|
train
|
def png
if self.png_id.nil?
png = obconversion(smiles,"smi","_png2")
file = Mongo::Grid::File.new(Base64.encode64(png), :filename => "#{id}.png", :content_type => "image/png")
update(:png_id => $gridfs.insert_one(file))
end
Base64.decode64($gridfs.find_one(_id: self.png_id).data)
end
|
ruby
|
{
"resource": ""
}
|
q3109
|
Scruffy::Layers.Average.generate_coordinates
|
train
|
def generate_coordinates(options = {})
key_layer = layers.find { |layer| layer.relevant_data? }
options[:point_distance] = width / (key_layer.points.size - 1).to_f
coords = []
#TODO this will likely break with the new hash model
key_layer.points.each_with_index do |layer, idx|
sum, objects = points.inject([0, 0]) do |arr, elem|
if elem.relevant_data?
arr[0] += elem.points[idx]
arr[1] += 1
end
arr
end
average = sum / objects.to_f
x_coord = options[:point_distance] * idx
relative_percent = ((average == min_value) ? 0 : ((average - min_value) / (max_value - min_value).to_f))
y_coord = (height - (height * relative_percent))
coords << [x_coord, y_coord].join(',')
end
return coords
end
|
ruby
|
{
"resource": ""
}
|
q3110
|
SocialSnippet::Repository.RepositoryManager.resolve_snippet_path
|
train
|
def resolve_snippet_path(context, tag)
if tag.has_repo?
pkg = find_package_by_tag(tag)
pkg.snippet_path tag.path
else
new_context = context.clone
new_context.dirname + "/" + tag.filename
end
end
|
ruby
|
{
"resource": ""
}
|
q3111
|
SocialSnippet::Repository.RepositoryManager.find_package_by_tag
|
train
|
def find_package_by_tag(tag)
if tag.has_ref?
find_package(tag.repo, tag.ref)
else
find_package(tag.repo)
end
end
|
ruby
|
{
"resource": ""
}
|
q3112
|
SocialSnippet::Repository.RepositoryManager.find_package
|
train
|
def find_package(name, ref = nil)
repo = find_repository(name)
ref ||= repo.latest_package_version || repo.current_ref
raise "invalid references" unless repo.has_ref?(ref)
Models::Package.find_by(
:repo_name => name,
:rev_hash => repo.rev_hash[ref],
)
end
|
ruby
|
{
"resource": ""
}
|
q3113
|
Hyperb.Images.remove_image
|
train
|
def remove_image(params = {})
raise ArgumentError, 'Invalid arguments.' unless check_arguments(params, 'name')
path = '/images/' + params[:name]
query = {}
query[:force] = true if params.key?(:force)
res = JSON.parse(Hyperb::Request.new(self, path, query, 'delete').perform)
downcase_symbolize(res)
end
|
ruby
|
{
"resource": ""
}
|
q3114
|
Hyperb.Images.inspect_image
|
train
|
def inspect_image(params = {})
raise ArgumentError, 'Invalid arguments.' unless check_arguments(params, 'name')
path = '/images/' + params[:name] + '/json'
res = JSON.parse(Hyperb::Request.new(self, path, {}, 'get').perform)
downcase_symbolize(res)
end
|
ruby
|
{
"resource": ""
}
|
q3115
|
Redwood.Node.add_child
|
train
|
def add_child(name)
child = self.class.new(name, self)
yield child if block_given?
children << child
child
end
|
ruby
|
{
"resource": ""
}
|
q3116
|
Redwood.Node.[]
|
train
|
def [](key)
selected_child = children.select {|child| child.name == key }
selected_child.size.eql?(1) ? selected_child.first : selected_child
end
|
ruby
|
{
"resource": ""
}
|
q3117
|
Manageable.ApplicationController.respond_with_with_storage
|
train
|
def respond_with_with_storage(*args, &block)
@responded_with = args.last.is_a?(Hash) ? args - [args.last] : args
respond_with_without_storage(*args, &block)
end
|
ruby
|
{
"resource": ""
}
|
q3118
|
Hyperb.HostConfig.fmt
|
train
|
def fmt
formated = {}
attrs.each_key do |key|
formated[camelize(key)] = attrs[key]
end
formated
end
|
ruby
|
{
"resource": ""
}
|
q3119
|
Beardley.Report.to_pdf
|
train
|
def to_pdf(*args)
options = extract_options!(args)
datasource = args[0]
_JasperPrint = Rjb.import('net.sf.jasperreports.engine.JasperPrint')
_JasperExportManager = Rjb.import('net.sf.jasperreports.engine.JasperExportManager')
_JasperExportManager._invoke('exportReportToPdf', 'Lnet.sf.jasperreports.engine.JasperPrint;', prepare(datasource))
end
|
ruby
|
{
"resource": ""
}
|
q3120
|
Beardley.Report.to_file
|
train
|
def to_file(format, *args)
options = extract_options!(args)
datasource = args[0]
path = options[:path] || File.join(Dir.tmpdir, "beardley_#{Time.now.to_i.to_s(36)}_#{rand(100_000_000_000).to_s(36)}.#{format}")
if format == :pdf
_JasperPrint = Rjb.import('net.sf.jasperreports.engine.JasperPrint')
_JasperExportManager = Rjb.import('net.sf.jasperreports.engine.JasperExportManager')
_JasperExportManager._invoke('exportReportToPdfFile', 'Lnet.sf.jasperreports.engine.JasperPrint;Ljava.lang.String;', prepare(datasource), Rjb.import('java.lang.String').new(path.to_s))
elsif Beardley.exporters[format]
exporter = Beardley.with_warnings { Rjb.import(Beardley.exporters[format]) }.new
_JRExporterParameter = Rjb.import('net.sf.jasperreports.engine.JRExporterParameter')
exporter.setParameter(_JRExporterParameter.JASPER_PRINT, prepare(datasource))
exporter.setParameter(_JRExporterParameter.OUTPUT_FILE_NAME, path.to_s)
exporter.exportReport
else
raise "Invalid export format: #{format.inspect}"
end
path
end
|
ruby
|
{
"resource": ""
}
|
q3121
|
Beardley.Report.to
|
train
|
def to(format, *args)
options = extract_options!(args)
datasource = args[0]
file = Tempfile.new("to_#{format}")
exporter = Beardley.with_warnings { Rjb.import(Beardley.exporters[format]) }.new
_JRExporterParameter = Rjb.import('net.sf.jasperreports.engine.JRExporterParameter')
exporter.setParameter(_JRExporterParameter.JASPER_PRINT, prepare(datasource))
exporter.setParameter(_JRExporterParameter.OUTPUT_FILE_NAME, file.path.to_s)
exporter.exportReport
file.rewind
report = file.read
file.close(true)
report
end
|
ruby
|
{
"resource": ""
}
|
q3122
|
Beardley.Report.prepare
|
train
|
def prepare(datasource = nil)
# Compile it, if needed
if @source_file && ((!@object_file.exist? && @source_file.exist?) || (@source_file.exist? && @source_file.mtime > @object_file.mtime))
_JasperCompileManager = Rjb.import('net.sf.jasperreports.engine.JasperCompileManager')
_JasperCompileManager.compileReportToFile(@source_file.to_s, @object_file.to_s)
end
load_datasource(datasource)
end
|
ruby
|
{
"resource": ""
}
|
q3123
|
Beardley.Report.prepare_params
|
train
|
def prepare_params
_HashMap = Rjb.import('java.util.HashMap')
_JavaString = Rjb.import('java.lang.String')
# Converting default report params to java HashMap
params = _HashMap.new
Beardley.config[:report_params].each do |k, v|
params.put(k, v)
end
# Convert the ruby parameters' hash to a java HashMap, but keeps it as
# default when they already represent a JRB entity.
# Pay attention that, for now, all other parameters are converted to string!
@parameters.each do |key, value|
params.put(_JavaString.new(key.to_s), parameter_value_of(value))
end
params
end
|
ruby
|
{
"resource": ""
}
|
q3124
|
Beardley.Report.load_datasource
|
train
|
def load_datasource(datasource = nil)
jasper_params = prepare_params
# Parse and load XML as datasource
if datasource
_InputSource = Rjb.import('org.xml.sax.InputSource')
_StringReader = Rjb.import('java.io.StringReader')
_JRXmlUtils = Rjb.import('net.sf.jasperreports.engine.util.JRXmlUtils')
_JRXPathQueryExecuterFactory = Beardley.with_warnings { Rjb.import('net.sf.jasperreports.engine.query.JRXPathQueryExecuterFactory') }
input_source = _InputSource.new
input_source.setCharacterStream(_StringReader.new(datasource.to_s))
data_document = Beardley.with_warnings do
_JRXmlUtils._invoke('parse', 'Lorg.xml.sax.InputSource;', input_source)
end
jasper_params.put(_JRXPathQueryExecuterFactory.PARAMETER_XML_DATA_DOCUMENT, data_document)
end
# Build JasperPrint
fill_report(jasper_params, datasource)
end
|
ruby
|
{
"resource": ""
}
|
q3125
|
Beardley.Report.fill_report
|
train
|
def fill_report(params, datasource = nil)
_JasperFillManager = Rjb.import('net.sf.jasperreports.engine.JasperFillManager')
if datasource
return _JasperFillManager.fillReport(@object_file.to_s, params)
else
_JREmptyDataSource = Rjb.import('net.sf.jasperreports.engine.JREmptyDataSource')
return _JasperFillManager.fillReport(@object_file.to_s, params, _JREmptyDataSource.new)
end
end
|
ruby
|
{
"resource": ""
}
|
q3126
|
Outbox.MessageFields.fields
|
train
|
def fields(new_fields = nil)
if new_fields.nil?
fields = {}
self.class.fields.each do |field|
fields[field] = public_send(field)
end
fields
else
self.fields = new_fields
end
end
|
ruby
|
{
"resource": ""
}
|
q3127
|
Outbox.MessageFields.fields=
|
train
|
def fields=(new_fields)
new_fields.each do |field, value|
public_send(field, value) if respond_to?(field)
end
end
|
ruby
|
{
"resource": ""
}
|
q3128
|
Outbox.MessageFields.validate_fields
|
train
|
def validate_fields
self.class.required_fields.each do |field|
value = public_send(field)
if value.nil? || value.respond_to?(:empty?) && value.empty?
raise Outbox::MissingRequiredFieldError, "Missing required field: #{field}"
end
end
end
|
ruby
|
{
"resource": ""
}
|
q3129
|
GitFeats.Serializer.serialize
|
train
|
def serialize(path, data)
# Make a path to the data file if one doesn't already exist
mkpath_to path
File.open(path, "w") do |f|
f.puts data.to_json
end
end
|
ruby
|
{
"resource": ""
}
|
q3130
|
GitFeats.Serializer.unserialize
|
train
|
def unserialize(path)
if File.exists?(path) && !File.zero?(path)
begin
return JSON.parse(IO.binread(path))
rescue JSON::ParserError => e
puts e
end
end
end
|
ruby
|
{
"resource": ""
}
|
q3131
|
Phonology.OrthographyTranslator.translate
|
train
|
def translate(string)
@string = string
@max = array.length
SoundSequence.new(array.each_index.map do |index|
@index = index
instance_eval(&@rules)
end.flatten.compact)
ensure
@max = 0
@string = nil
@array = nil
@index = nil
@last_sound = nil
end
|
ruby
|
{
"resource": ""
}
|
q3132
|
SemanticDateTimeTags.ViewHelpers.semantic_date_time_tag
|
train
|
def semantic_date_time_tag(date_time, options = {})
SemanticDateTimeTags::Tag::DateTime.new(date_time, options).to_html
end
|
ruby
|
{
"resource": ""
}
|
q3133
|
SemanticDateTimeTags.ViewHelpers.semantic_time_tag
|
train
|
def semantic_time_tag(time, options = {})
SemanticDateTimeTags::Tag::Time.new(time, options).to_html
end
|
ruby
|
{
"resource": ""
}
|
q3134
|
Lumber.InheritanceRegistry.register_inheritance_handler
|
train
|
def register_inheritance_handler
synchronize do
return if defined?(Object.inherited_with_lumber_registry)
Object.class_eval do
class << self
def inherited_with_lumber_registry(subclass)
inherited_without_lumber_registry(subclass)
# Add a logger to 'subclass' if it is directly in the registry
# No need to check full inheritance chain LoggerSupport handles it
# Also prevent rails from subsequently overriding our logger when rails
# is loaded after registering logger inheritance
if Lumber::InheritanceRegistry[subclass.name]
subclass.send(:include, Lumber.logger_concern)
end
end
alias_method_chain :inherited, :lumber_registry
end
end
end
end
|
ruby
|
{
"resource": ""
}
|
q3135
|
IndexFor.Helper.index_for
|
train
|
def index_for objects, html_options = {}, &block
html_options = html_options.dup
objects = fetch_objects objects, html_options
tag = html_options[:table_tag] || IndexFor.table_tag
klass = html_options[:klass] || objects.try(:klass) || objects.first.class
html_options[:id] ||= index_for_id(klass)
html_options[:class] = index_for_class(klass, html_options)
head = index_for_head(klass.new, html_options, &block)
body = index_for_body(objects, html_options, &block)
content = head + body
content_tag(tag, content, html_options)
end
|
ruby
|
{
"resource": ""
}
|
q3136
|
IndexFor.Helper.index_for_actions
|
train
|
def index_for_actions object, *action_names, &block
html_options = action_names.extract_options!
action_names = [:show, :edit, :destroy] if action_names == [:all]
builder = html_options[:action_builder] || IndexFor::ActionBuilder
builder = builder.new(object, html_options, self)
content = capture(builder) do |a|
action_names.map do |action_name|
a.action_link action_name
end.join.html_safe
end
content += capture(builder, &block) if block
content
end
|
ruby
|
{
"resource": ""
}
|
q3137
|
IndexFor.Helper.show_for
|
train
|
def show_for object, html_options = {}, &block
html_options = html_options.dup
tag = html_options[:list_tag] || IndexFor.list_tag
html_options[:id] ||= show_for_id(object)
html_options[:class] = show_for_class(object, html_options)
builder = html_options[:builder] || IndexFor::ListColumnBuilder
content = capture(builder.new(object, html_options, self), &block)
content_tag(tag, content, html_options)
end
|
ruby
|
{
"resource": ""
}
|
q3138
|
Brightcove.API.post
|
train
|
def post(api_method, parameters = {})
parameters.merge!({"token" => @token})
body = {}
body.merge!({:method => api_method})
body.merge!({:params => parameters})
self.class.post(@write_api_url, {:body => {:json => JSON.generate(body)}})
end
|
ruby
|
{
"resource": ""
}
|
q3139
|
Hicube.ContentsController.load_resource
|
train
|
def load_resource
@page = Hicube::Page.unscoped.find(params[:page_id])
@content = @page.content.find_or_create_by(name: params[:id])
end
|
ruby
|
{
"resource": ""
}
|
q3140
|
HipChat.API.rooms_create
|
train
|
def rooms_create(name, owner_user_id, privacy = 'public', topic = '', guest_access = 0)
self.class.post(hipchat_api_url_for('rooms/create'), :body => {:auth_token => @token, :name => name, :owner_user_id => owner_user_id,
:topic => topic, :privacy => privacy, :guest_access => guest_access})
end
|
ruby
|
{
"resource": ""
}
|
q3141
|
HipChat.API.rooms_message
|
train
|
def rooms_message(room_id, from, message, notify = 0, color = 'yellow', message_format = 'html')
self.class.post(hipchat_api_url_for('rooms/message'), :body => {:auth_token => @token, :room_id => room_id, :from => from,
:message => message, :notify => notify, :color => color, :message_format => message_format})
end
|
ruby
|
{
"resource": ""
}
|
q3142
|
HipChat.API.users_create
|
train
|
def users_create(email, name, title, is_group_admin = 0, password = nil, timezone = 'UTC')
self.class.post(hipchat_api_url_for('users/create'), :body => {:auth_token => @token, :email => email, :name => name, :title => title,
:is_group_admin => is_group_admin, :password => password, :timezone => timezone}.reject{|key, value| value.nil?})
end
|
ruby
|
{
"resource": ""
}
|
q3143
|
Markascend.Macro.parse_video
|
train
|
def parse_video
# standard
unless /\A\s*(?<width>\d+)x(?<height>\d+)\s+(?<url>.+)\z/ =~ content
env.warn 'can not parse \video content, should be "#{WIDTH}x#{HEIGHT} #{URL}"'
return
end
case url
when /youtu\.?be/
# NOTE merging them into one regexp fails (because longest match?)
unless id = url[/(?<=watch\?v=)\w+/] || url[/(?<=embed\/)\w+/] || url[/(?<=youtu\.be\/)\w+/]
env.warn 'can not parse youtube id'
return
end
%Q|<iframe width="#{width}" height="#{height}" src="https://www.youtube-nocookie.com/embed/#{id}?rel=0" frameborder="0" allowfullscreen></iframe>|
when /vimeo/
unless id = url[/(?<=vimeo\.com\/)\w+/]
env.warn 'can not parse vimeo id, should use link like this "http://vimeo.com/#{DIGITS}"'
return
end
%Q|<iframe width="#{width}" height="#{height}" src="https://player.vimeo.com/video/#{id}" frameborder="0" allowFullScreen></iframe>|
when /sm/
unless id = url[/\bsm\d+/]
env.warn 'can not find "sm#{DIGITS}" from link'
return
end
%Q|<script src="https://ext.nicovideo.jp/thumb_watch/#{id}?w=#{width}&h=#{height}"></script>"|
else
env.warn 'failed to parse video link, currently only youtube, vimeo and niconico are supported'
return
end
end
|
ruby
|
{
"resource": ""
}
|
q3144
|
KTree.KTree.create_tree
|
train
|
def create_tree(vupper, vlower, &block)
@root = Node.new(vupper, vlower, @depth)
@root.create_children &block
end
|
ruby
|
{
"resource": ""
}
|
q3145
|
AsiBod.Asi.array_data_to_hash
|
train
|
def array_data_to_hash(array_data)
array_data.each_with_object({}) do |node, memo|
memo[node['Address'].to_i] = clean_node(node)
end
end
|
ruby
|
{
"resource": ""
}
|
q3146
|
Lumber.LevelUtil.start_monitor
|
train
|
def start_monitor(interval=10)
t = MonitorThread.new do
loop do
break if Thread.current.should_exit
begin
activate_levels
rescue => e
$stderr.puts "Failure activating log levels: #{e}"
end
sleep interval
end
end
at_exit { t.should_exit = true }
t
end
|
ruby
|
{
"resource": ""
}
|
q3147
|
Lumber.LevelUtil.backup_levels
|
train
|
def backup_levels(loggers)
synchronize do
loggers.each do |name|
outputter = Log4r::Outputter[name]
if outputter
@original_outputter_levels[name] ||= outputter.level
else
logger = Lumber.find_or_create_logger(name)
# only store the old level if we haven't overriden it's logger yet
@original_levels[name] ||= logger.level
end
end
end
end
|
ruby
|
{
"resource": ""
}
|
q3148
|
Lumber.LevelUtil.restore_levels
|
train
|
def restore_levels
synchronize do
@original_outputter_levels.each do |name, level|
outputter = Log4r::Outputter[name]
outputter.level = level if outputter.level != level
end
@original_outputter_levels.clear
@original_levels.each do |name, level|
logger = Lumber.find_or_create_logger(name)
logger.level = level if logger.level != level
end
@original_levels.clear
end
end
|
ruby
|
{
"resource": ""
}
|
q3149
|
GAAPI.Row.method_missing
|
train
|
def method_missing(method, *args)
if (i = dimension_method_names.find_index(method))
define_singleton_method(method) do
dimensions[i]
end
send(method)
elsif (i = metric_method_names.find_index(method))
define_singleton_method(method) do
convert_metric(i)
end
send(method)
else
super
end
end
|
ruby
|
{
"resource": ""
}
|
q3150
|
GAAPI.Row.convert_metric
|
train
|
def convert_metric(i)
case report.metric_type(i)
when "INTEGER"
# INTEGER Integer metric.
metrics[i].to_i
when "FLOAT", "PERCENT"
# FLOAT Float metric.
# PERCENT Percentage metric.
metrics[i].to_f
when "CURRENCY"
# CURRENCY Currency metric.
# TODO: Do this better.
metrics[i].to_f
when "TIME"
# Google documentation claims to following:
# TIME Time metric in HH:MM:SS format.
# It also says it's seconds, and that's what I see in real results.
# So comment out the following:
# (metrics[i][0..1].to_i +
# metrics[i][3..4].to_i * 60 +
# metrics[i][6..7].to_i * 24 * 60)
# Simply make it a float.
metrics[i].to_f
else
# METRIC_TYPE_UNSPECIFIED Metric type is unspecified.
metric[i]
end
end
|
ruby
|
{
"resource": ""
}
|
q3151
|
MiniTerm.Mapper.[]=
|
train
|
def []=(indexes, value)
indexes = [indexes] unless indexes.is_a?(Range)
indexes.each do |index|
process_non_terminals(index)
if @map.has_key?(index)
fail MiniTermKME, "Duplicate entry #{index.inspect}"
end
@map[index] = [value, index]
end
end
|
ruby
|
{
"resource": ""
}
|
q3152
|
MiniTerm.Mapper.process_non_terminals
|
train
|
def process_non_terminals(index)
seq = ""
index.chop.chars.each do |char|
seq << char
if @map.has_key?(seq) && @map[seq]
fail MiniTermKME, "Ambiguous entry #{index.inspect}"
end
@map[seq] = false
end
end
|
ruby
|
{
"resource": ""
}
|
q3153
|
Malcolm.SOAPBuilder.wrap
|
train
|
def wrap(data)
"<?xml version=\"1.0\" encoding=\"UTF-8\"?><env:Envelope xmlns:env=\"http://schemas.xmlsoap.org/soap/envelope/\"><env:Body>".tap do |soap_envelope|
unless data.blank?
soap_envelope << (data.is_a?(Hash) ? Gyoku.xml(data) : data)
end
soap_envelope << "</env:Body></env:Envelope>"
end
end
|
ruby
|
{
"resource": ""
}
|
q3154
|
List.Matcher.bud
|
train
|
def bud(opts={})
opts = {
atomic: @atomic,
backtracking: @backtracking,
bound: @_bound,
strip: @strip,
case_insensitive: @case_insensitive,
multiline: @multiline,
not_extended: @not_extended,
normalize_whitespace: @normalize_whitespace,
symbols: @symbols,
name: @name,
vet: @vet && opts[:symbols]
}.merge opts
self.class.new(**opts)
end
|
ruby
|
{
"resource": ""
}
|
q3155
|
List.Matcher.pattern
|
train
|
def pattern( list, opts={} )
return '(?!)' unless list.any?
return bud(opts).pattern list unless opts.empty?
list = list.compact.map(&:to_s).select{ |s| s.length > 0 }
list.map!(&:strip).select!{ |s| s.length > 0 } if strip
list.map!{ |s| s.gsub %r/\s++/, ' ' } if normalize_whitespace
return nil if list.empty?
specializer = Special.new self, @symbols, list
list = specializer.normalize
root = tree list, specializer
root.root = true
root.flatten
rx = root.convert
if m = modifiers
rx = "(?#{m}:#{rx})"
grouped = true
end
if name
rx = "(?<#{name}>#{rx})"
grouped = true
end
return rx if grouped && backtracking
if atomic && !root.atomic?
wrap rx
else
rx
end
end
|
ruby
|
{
"resource": ""
}
|
q3156
|
Missinglink.SurveyQuestion.possible_responses
|
train
|
def possible_responses(search_other = false)
{}.tap do |hash|
survey_response_answers.each do |sra|
sa_row = (sra.row_survey_answer_id ? SurveyAnswer.find(sra.row_survey_answer_id) : nil)
sa_col = (sra.col_survey_answer_id ? SurveyAnswer.find(sra.col_survey_answer_id) : nil)
sa_col_choice = (sra.col_choice_survey_answer_id ? SurveyAnswer.find(sra.col_choice_survey_answer_id) : nil)
case answer_strategy
when "first_survey_response_answer_text"
hash[sra.text] = sra.id unless (sra.text.nil? || hash[sra.text])
when "answer_row_for_subquestion"
other_text = (sra.text.nil? ? nil : "#{ (sa_row.try(:text) || "Other") }: #{ sra.text }")
hash[other_text] = sra.id unless (other_text.nil? || hash[other_text])
when "answer_row_for_response"
other_text = ((!search_other || sra.text.nil?) ? nil : "#{ (sa_row.try(:text) || "Other") }: #{ sra.text }")
hash[sa_row.text] = sra.id unless (sa_row.nil? || hash[sa_row.text])
hash[other_text] = sra.id unless (other_text.nil? || hash[other_text])
when "answer_row_and_column_for_response"
main_text = "#{ sa_row.try(:text) }: #{ sa_col.try(:text) }"
other_text = ((!search_other || sra.text.nil? || !sa_row.nil?) ? nil : "Other: #{ sra.text }")
hash[main_text] = sra.id unless (sa_row.nil? || sa_col.nil? || hash[main_text])
hash[other_text] = sra.id unless (other_text.nil? || hash[other_text])
when "answer_row_column_choice_for_response"
main_text = "#{ sa_row.try(:text) }, #{ sa_col.try(:text) }: #{ sa_col_choice.try(:text) }"
other_text = ((!search_other || sra.text.nil? || !sa_row.nil?) ? nil : "Other: #{ sra.text }")
hash[main_text] = sra.id unless (sa_row.nil? || sa_col.nil? || sa_col_choice.nil? || hash[main_text])
hash[other_text] = sra.id unless (other_text.nil? || hash[other_text])
end
end
end
end
|
ruby
|
{
"resource": ""
}
|
q3157
|
Squash.Uploader.http_post
|
train
|
def http_post(url, headers, bodies)
uri = URI.parse(url)
http = Net::HTTP.new(uri.host, uri.port)
http.use_ssl = (uri.scheme == 'https')
http.open_timeout = options[:open_timeout]
http.read_timeout = options[:read_timeout]
http.verify_mode = OpenSSL::SSL::VERIFY_NONE if options[:skip_verification]
http.start do |session|
bodies.each do |body|
request = Net::HTTP::Post.new(uri.request_uri)
headers.each { |k, v| request.add_field k, v }
request.body = body
response = session.request(request)
if options[:success].none? { |cl|
if cl.kind_of?(Class)
response.kind_of?(cl)
elsif cl.kind_of?(Fixnum) || cl.kind_of?(String)
response.code.to_i == cl.to_i
else
raise ArgumentError, "Unknown :success value #{cl}"
end
}
raise "Unexpected response from Squash host: #{response.code}"
end
end
end
end
|
ruby
|
{
"resource": ""
}
|
q3158
|
Modernize.Modernizer.translate
|
train
|
def translate(context, hash)
# makes sure that the context is a hash
raise ArgumentError.new('did not pass a hash for the context') unless context.is_a?(Hash)
raise ArgumentError.new('cannot provide include hash in context') if context[:hash]
# create the context instance for instance variables
struct = StructContext.new(context, hash)
# instantiate MapMethods to perform translations and define lambda
# for how to tranlate a field
#
translate = lambda { |t|
MapMethods.send(t[:name], struct, t[:field], t[:block])
}
# determine the version of the incoming hash
#
struct_version = struct.instance_exec(&@migrations.version)
raise StandardError.new('calculated version is not valid') unless Gem::Version.correct?(struct_version)
# gets a list of the potential versions
#
migration_versions = @migrations.translations.keys
migration_versions.delete(:first)
migration_versions.delete(:last)
# get the first and last translations
#
firsts = @migrations.translations[:first]
lasts = @migrations.translations[:last]
# sorts the versions
#
migration_versions.sort! do |x,y|
Gem::Version.new(x) <=> Gem::Version.new(y)
end
# reverse order if descending was specified
#
migration_versions = @migrations.order == :descending ? migration_versions.reverse : migration_versions
# run the first translations if they exist
#
firsts.each(&translate) if firsts
# determine the first version to run translations
#
first_index = @migrations.order == :ascending ? migration_versions.find_index(struct_version) : nil
last_index = @migrations.order == :descending ? migration_versions.find_index(struct_version) : nil
# run all subsequent version translations
#
migration_versions.each_with_index do |version, index|
next unless !first_index || index >= first_index
next unless !last_index || index <= last_index
@migrations.translations[version].each(&translate)
end
# run the first translations if they exist
#
lasts.each(&translate) if lasts
# return hash
#
struct.hash
end
|
ruby
|
{
"resource": ""
}
|
q3159
|
Hoiio.RequestUtil.check_nil_or_empty
|
train
|
def check_nil_or_empty(required_param_names=[], params)
required_param_names.each { |p|
if params[p].nil? || params[p].empty?
raise Hoiio::RequestError.new "Param " << p << " is missing"
end
}
end
|
ruby
|
{
"resource": ""
}
|
q3160
|
Hoiio.RequestUtil.check_for_mutual_exclusivity
|
train
|
def check_for_mutual_exclusivity(required_param_names=[], params)
i = 0
required_param_names.each { |p|
if !params[p].nil? && !params[p].empty?
i += 1
end
}
if i == 0
raise Hoiio::RequestError.new "All required params are missing"
elsif i > 1
raise Hoiio::RequestError.new "More than 1 required, mutually exclusive param are present."
end
end
|
ruby
|
{
"resource": ""
}
|
q3161
|
Generator.Context.render_partial
|
train
|
def render_partial(file_name)
# The "default" version of the partial.
file_to_render = "#{@input_folder}/partials/#{file_name.to_s}.haml"
if @scope
# Look for a partial prefixed with the current "scope" (which is just the name of the
# primary template being rendered).
scope_file = "#{@input_folder}/partials/#{@scope.to_s}_#{file_name.to_s}.haml"
# Use it if it's there.
file_to_render = scope_file if File.exists? scope_file
end
# If we found a matching partial (either the scoped one or the default), render it now.
if File.exists? file_to_render
partial = Haml::Engine.new(File.read(file_to_render), @options)
partial.render self
else
nil
end
rescue Exception => e
raise $!, "#{$!} PARTIAL::#{file_name} ", $!.backtrace
end
|
ruby
|
{
"resource": ""
}
|
q3162
|
ShakeTheCounter.Client.access_token
|
train
|
def access_token
@access_token ||= ShakeTheCounter::Authentication.renew_access_token(client_id: id, client_secret: secret, refresh_token: refresh_token)["access_token"]
end
|
ruby
|
{
"resource": ""
}
|
q3163
|
ShakeTheCounter.Client.call
|
train
|
def call(path, http_method: :get, body: {}, header: {})
# add bearer token to header
header[:authorization] = "Bearer #{access_token}"
return ShakeTheCounter::API.call(path, http_method: http_method, body: body, header: header)
end
|
ruby
|
{
"resource": ""
}
|
q3164
|
ShakeTheCounter.Client.start_payment
|
train
|
def start_payment(reservation_key)
path = "reservation/#{reservation_key}/payment"
result = call(path, http_method: :post)
if result.code.to_i == 200
return true
else
raise ShakeTheCounterError.new "Payment failed"
end
end
|
ruby
|
{
"resource": ""
}
|
q3165
|
MyJohnDeere.APISupportItem.to_hash
|
train
|
def to_hash()
ret_hash = {}
self.class.json_attributes.each do |attrib|
ret_hash[attrib] = self.send(attrib.to_s.underscore)
end
return ret_hash
end
|
ruby
|
{
"resource": ""
}
|
q3166
|
UserInput.Prompt.ask
|
train
|
def ask
@fd.print full_message
disable_echo if @secret
input = _ask
return input if valid(input)
check_counter
ask
ensure
enable_echo if @secret
end
|
ruby
|
{
"resource": ""
}
|
q3167
|
UserInput.Prompt.valid
|
train
|
def valid(input)
return true unless @validation
_, method = VALIDATIONS.find { |klass, _| @validation.is_a? klass }
return @validation.send(method, input) if method
raise "Supported validation type not provided #{@validation.class}"
end
|
ruby
|
{
"resource": ""
}
|
q3168
|
UserInput.Prompt._ask
|
train
|
def _ask
input = STDIN.gets.chomp
input = @default if input.empty? && @default
@fd.puts if @secret
input
end
|
ruby
|
{
"resource": ""
}
|
q3169
|
Hyperb.Utils.check_arguments
|
train
|
def check_arguments(params, *args)
contains = true
args.each do |arg|
contains = false unless params.key? arg.to_sym
end
contains
end
|
ruby
|
{
"resource": ""
}
|
q3170
|
Hyperb.Utils.prepare_json
|
train
|
def prepare_json(params = {})
json = {}
params.each do |key, value|
value = prepare_json(value) if value.is_a?(Hash)
json[camelize(key)] = value
end
json
end
|
ruby
|
{
"resource": ""
}
|
q3171
|
Tily.TileSystem.each_tile
|
train
|
def each_tile level
size = tile_size level
(0...size).each do |y|
(0...size).each do |x|
yield(x, y) if block_given?
end
end
end
|
ruby
|
{
"resource": ""
}
|
q3172
|
Tily.TileSystem.each_tile_with_index
|
train
|
def each_tile_with_index level
idx = 0
size = tile_size level
(0...size).each do |y|
(0...size).each do |x|
yield(x, y, idx) if block_given?
idx += 1
end
end
end
|
ruby
|
{
"resource": ""
}
|
q3173
|
SimplePoParser.Parser.parse
|
train
|
def parse(message)
@result = {}
@scanner = StringScanner.new(message.strip)
begin
lines
rescue ParserError => pe
error_msg = "SimplePoParser::ParserError"
error_msg += pe.message
error_msg += "\nParseing result before error: '#{@result}'"
error_msg += "\nSimplePoParser filtered backtrace: SimplePoParser::ParserError"
backtrace = "#{pe.backtrace.select{|i| i =~ /lib\/simple_po_parser/}.join("\n\tfrom ")}"
raise ParserError, error_msg, backtrace
end
@result
end
|
ruby
|
{
"resource": ""
}
|
q3174
|
SimplePoParser.Parser.msgctxt
|
train
|
def msgctxt
begin
if @scanner.scan(/msgctxt/)
skip_whitespace
text = message_line
add_result(:msgctxt, text)
message_multiline(:msgctxt) if text.empty?
end
msgid
rescue PoSyntaxError => pe
raise PoSyntaxError, "Syntax error in msgctxt\n" + pe.message, pe.backtrace
end
end
|
ruby
|
{
"resource": ""
}
|
q3175
|
SimplePoParser.Parser.msgid
|
train
|
def msgid
begin
if @scanner.scan(/msgid/)
skip_whitespace
text = message_line
add_result(:msgid, text)
message_multiline(:msgid) if text.empty?
if msgid_plural
msgstr_plural
else
msgstr
end
else
err_msg = "Message without msgid is not allowed."
err_msg += "The Line started unexpectedly with #{@scanner.peek(10).inspect}."
raise PoSyntaxError, err_msg
end
rescue PoSyntaxError => pe
raise PoSyntaxError, "Syntax error in msgid\n" + pe.message, pe.backtrace
end
end
|
ruby
|
{
"resource": ""
}
|
q3176
|
SimplePoParser.Parser.msgid_plural
|
train
|
def msgid_plural
begin
if @scanner.scan(/msgid_plural/)
skip_whitespace
text = message_line
add_result(:msgid_plural, text)
message_multiline(:msgid_plural) if text.empty?
true
else
false
end
rescue PoSyntaxError => pe
raise PoSyntaxError, "Syntax error in msgid\n" + pe.message, pe.backtrace
end
end
|
ruby
|
{
"resource": ""
}
|
q3177
|
SimplePoParser.Parser.msgstr
|
train
|
def msgstr
begin
if @scanner.scan(/msgstr/)
skip_whitespace
text = message_line
add_result(:msgstr, text)
message_multiline(:msgstr) if text.empty?
skip_whitespace
raise PoSyntaxError, "Unexpected content after expected message end #{@scanner.peek(10).inspect}" unless @scanner.eos?
else
raise PoSyntaxError, "Singular message without msgstr is not allowed. Line started unexpectedly with #{@scanner.peek(10).inspect}."
end
rescue PoSyntaxError => pe
raise PoSyntaxError, "Syntax error in msgstr\n" + pe.message, pe.backtrace
end
end
|
ruby
|
{
"resource": ""
}
|
q3178
|
SimplePoParser.Parser.msgstr_plural
|
train
|
def msgstr_plural(num = 0)
begin
msgstr_key = @scanner.scan(/msgstr\[\d\]/) # matches 'msgstr[0]' to 'msgstr[9]'
if msgstr_key
# msgstr plurals must come in 0-based index in order
msgstr_num = msgstr_key.match(/\d/)[0].to_i
raise PoSyntaxError, "Bad 'msgstr[index]' index." if msgstr_num != num
skip_whitespace
text = message_line
add_result(msgstr_key, text)
message_multiline(msgstr_key) if text.empty?
msgstr_plural(num+1)
elsif num == 0 # and msgstr_key was false
raise PoSyntaxError, "Plural message without msgstr[0] is not allowed. Line started unexpectedly with #{@scanner.peek(10).inspect}."
else
raise PoSyntaxError, "End of message was expected, but line started unexpectedly with #{@scanner.peek(10).inspect}" unless @scanner.eos?
end
rescue PoSyntaxError => pe
raise PoSyntaxError, "Syntax error in msgstr_plural\n" + pe.message, pe.backtrace
end
end
|
ruby
|
{
"resource": ""
}
|
q3179
|
SimplePoParser.Parser.previous_comments
|
train
|
def previous_comments
begin
# next part must be msgctxt, msgid or msgid_plural
if @scanner.scan(/msg/)
if @scanner.scan(/id/)
if @scanner.scan(/_plural/)
key = :previous_msgid_plural
else
key = :previous_msgid
end
elsif @scanner.scan(/ctxt/)
key = :previous_msgctxt
else
raise PoSyntaxError, "Previous comment type #{("msg" + @scanner.peek(10)).inspect} unknown."
end
skip_whitespace
text = message_line
add_result(key, text)
previous_multiline(key) if text.empty?
else
raise PoSyntaxError, "Previous comments must start with '#| msg'. #{@scanner.peek(10).inspect} unknown."
end
rescue PoSyntaxError => pe
raise PoSyntaxError, "Syntax error in previous_comments\n" + pe.message, pe.backtrace
end
end
|
ruby
|
{
"resource": ""
}
|
q3180
|
SimplePoParser.Parser.previous_multiline
|
train
|
def previous_multiline(key)
begin
# scan multilines until no further multiline is hit
# /#\|\p{Blank}"/ needs to catch the double quote to ensure it hits a previous
# multiline and not another line type.
if @scanner.scan(/#\|\p{Blank}*"/)
@scanner.pos = @scanner.pos - 1 # go one character back, so we can reuse the "message line" method
add_result(key, message_line)
previous_multiline(key) # go on until we no longer hit a multiline line
end
rescue PoSyntaxError => pe
raise PoSyntaxError, "Syntax error in previous_multiline\n" + pe.message, pe.backtrace
end
end
|
ruby
|
{
"resource": ""
}
|
q3181
|
SimplePoParser.Parser.message_multiline
|
train
|
def message_multiline(key)
begin
skip_whitespace
if @scanner.check(/"/)
add_result(key, message_line)
message_multiline(key)
end
rescue PoSyntaxError => pe
raise PoSyntaxError, "Syntax error in message_multiline with key '#{key}'\n" + pe.message, pe.backtrace
end
end
|
ruby
|
{
"resource": ""
}
|
q3182
|
SimplePoParser.Parser.message_line
|
train
|
def message_line
begin
if @scanner.getch == '"'
text = message_text
unless @scanner.getch == '"'
err_msg = "The message text '#{text}' must be finished with the double quote character '\"'."
raise PoSyntaxError, err_msg
end
skip_whitespace
unless end_of_line
err_msg = "There should be only whitespace until the end of line"
err_msg += " after the double quote character of a message text."
raise PoSyntaxError.new(err_msg)
end
text
else
@scanner.pos = @scanner.pos - 1
err_msg = "A message text needs to start with the double quote character '\"',"
err_msg += " but this was found: #{@scanner.peek(10).inspect}"
raise PoSyntaxError, err_msg
end
rescue PoSyntaxError => pe
raise PoSyntaxError, "Syntax error in message_line\n" + pe.message, pe.backtrace
end
end
|
ruby
|
{
"resource": ""
}
|
q3183
|
SimplePoParser.Parser.add_result
|
train
|
def add_result(key, text)
if @result[key]
if @result[key].is_a? Array
@result[key].push(text)
else
@result[key] = [@result[key], text]
end
else
@result[key] = text
end
end
|
ruby
|
{
"resource": ""
}
|
q3184
|
SocialSnippet.Resolvers::BaseResolver.resolve_tag_repo_ref!
|
train
|
def resolve_tag_repo_ref!(tag)
return unless tag.has_repo?
repo = core.repo_manager.find_repository(tag.repo)
# set latest version
if tag.has_ref? === false
if repo.has_package_versions?
tag.set_ref repo.latest_package_version
else
tag.set_ref repo.current_ref
end
else
unless repo.has_ref?(tag.ref)
new_ref = repo.latest_version(tag.ref)
raise "error" if new_ref.nil?
tag.set_ref new_ref
end
end
end
|
ruby
|
{
"resource": ""
}
|
q3185
|
Featureflow.EventsClient.register_features
|
train
|
def register_features(with_features)
Thread.new do
features = []
features = with_features.each do | feature |
features.push(key: feature[:key],
variants: feature[:variants],
failoverVariant: feature[:failover_variant])
end
send_event 'Register Features', :put, '/api/sdk/v1/register', features
end
end
|
ruby
|
{
"resource": ""
}
|
q3186
|
HidApi.DeviceInfo.each
|
train
|
def each
return enum_for(:each) unless block_given?
pointer = self
loop do
break if pointer.null?
yield pointer
pointer = pointer.next
end
end
|
ruby
|
{
"resource": ""
}
|
q3187
|
ProbeDockCucumber.Formatter.comment_line
|
train
|
def comment_line(comment)
# Take care of annotation only if matched
if comment.match(ProbeDockProbe::Annotation::ANNOTATION_REGEXP)
# If the feature already started, the annotations are for scenarios
if @current_feature_started
@annotation = ProbeDockProbe::Annotation.new(comment)
else
@feature_annotation = ProbeDockProbe::Annotation.new(comment)
end
end
end
|
ruby
|
{
"resource": ""
}
|
q3188
|
NetworkUtils.UrlInfo.is?
|
train
|
def is?(type)
return false if type.to_s.empty?
expected_types = Array.wrap(type).map(&:to_s)
content_type && expected_types.select do |t|
content_type.select { |ct| ct.start_with?(t) }
end.any?
end
|
ruby
|
{
"resource": ""
}
|
q3189
|
Prawn.EsrRecipe.esr9_format_account_id
|
train
|
def esr9_format_account_id(account_id)
(pre, main, post) = account_id.split('-')
sprintf('%02i%06i%1i', pre.to_i, main.to_i, post.to_i)
end
|
ruby
|
{
"resource": ""
}
|
q3190
|
LDAPGroupsLookup.Configuration.config
|
train
|
def config
if @config.nil?
if defined? Rails
configure(Rails.root.join('config', 'ldap_groups_lookup.yml').to_s)
else
configure(File.join(__dir__, '..', '..', 'config', 'ldap_groups_lookup.yml').to_s)
end
end
@config
end
|
ruby
|
{
"resource": ""
}
|
q3191
|
CachedCounts.Cache.clear
|
train
|
def clear
invalid_keys = all_keys.select { |key| key.include?(@scope.table_name.downcase) }
invalid_keys.each { |key| Rails.cache.delete(key) }
Rails.cache.write(list_key, all_keys - invalid_keys)
end
|
ruby
|
{
"resource": ""
}
|
q3192
|
QiitaScouter.Core.analyze
|
train
|
def analyze(target_user)
user = read_user(target_user)
articles = read_articles(target_user)
calc_power_levels(user, articles)
end
|
ruby
|
{
"resource": ""
}
|
q3193
|
ParallelAppium.IOS.simulator_information
|
train
|
def simulator_information
re = /\([0-9]+\.[0-9](\.[0-9])?\) \[[0-9A-Z-]+\]/m
# Filter out simulator info for iPhone platform version and udid
@simulators.select { |simulator_data| simulator_data.include?('iPhone') && !simulator_data.include?('Apple Watch') }
.map { |simulator_data| simulator_data.match(re).to_s.tr('()[]', '').split }[0, ENV['THREADS'].to_i]
end
|
ruby
|
{
"resource": ""
}
|
q3194
|
Frankenstein.Request.measure
|
train
|
def measure(labels = {})
start_time = Time.now
unless block_given?
raise NoBlockError,
"No block passed to #{self.class}#measure"
end
@requests.increment(labels, 1)
@mutex.synchronize { @current.set(labels, (@current.get(labels) || 0) + 1) }
res_labels = labels.dup
begin
yield(res_labels).tap do
elapsed_time = Time.now - start_time
@durations.observe(res_labels, elapsed_time)
end
rescue Exception => ex
@exceptions.increment(labels.merge(class: ex.class.to_s), 1)
raise
ensure
@mutex.synchronize { @current.set(labels, @current.get(labels) - 1) }
end
end
|
ruby
|
{
"resource": ""
}
|
q3195
|
Frankenstein.CollectedMetric.values
|
train
|
def values
begin
@collector.call(self).tap do |results|
unless results.is_a?(Hash)
@logger.error(progname) { "Collector proc did not return a hash, got #{results.inspect}" }
@errors_metric.increment(class: "NotAHashError")
return {}
end
results.keys.each { |labelset| @validator.validate(labelset) }
end
rescue StandardError => ex
@logger.error(progname) { (["Exception in collection: #{ex.message} (#{ex.class})"] + ex.backtrace).join("\n ") }
@errors_metric.increment(class: ex.class.to_s)
{}
end
end
|
ruby
|
{
"resource": ""
}
|
q3196
|
Frankenstein.CollectedMetric.validate_type
|
train
|
def validate_type(type)
unless %i{gauge counter histogram summary}.include?(type)
raise ArgumentError, "type must be one of :gauge, :counter, :histogram, or :summary (got #{type.inspect})"
end
end
|
ruby
|
{
"resource": ""
}
|
q3197
|
Salesforce.ChatterFeed.search_chatter_feeds
|
train
|
def search_chatter_feeds(object_type, query_string, binding, limit=100)
return get_all_chatter_feeds_with_attachments(nil, object_type, binding, 'no-attachment-for-search', limit, false, query_string)
end
|
ruby
|
{
"resource": ""
}
|
q3198
|
Phonology.Inventory.with
|
train
|
def with(*features)
pos, neg = mangle_args(*features)
self.class.new(Hash[@sets.select do |key, val|
!key.intersection(pos).empty?
end]).without_any(neg)
end
|
ruby
|
{
"resource": ""
}
|
q3199
|
Phonology.Inventory.with_all
|
train
|
def with_all(*features)
pos, neg = mangle_args(*features)
self.class.new(Hash[@sets.select do |key, val|
pos.subset?(key)
end]).without_any(neg)
end
|
ruby
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.