_id
stringlengths 2
6
| title
stringlengths 9
130
| partition
stringclasses 3
values | text
stringlengths 66
10.5k
| language
stringclasses 1
value | meta_information
dict |
---|---|---|---|---|---|
q3000
|
ActsAsRevisionable.ClassMethods.restore_revision
|
train
|
def restore_revision(id, revision_number)
revision_record = revision(id, revision_number)
return revision_record.restore if revision_record
end
|
ruby
|
{
"resource": ""
}
|
q3001
|
ActsAsRevisionable.ClassMethods.restore_revision!
|
train
|
def restore_revision!(id, revision_number)
record = restore_revision(id, revision_number)
if record
record.store_revision do
save_restorable_associations(record, revisionable_associations)
end
end
return record
end
|
ruby
|
{
"resource": ""
}
|
q3002
|
ActsAsRevisionable.ClassMethods.restore_last_revision!
|
train
|
def restore_last_revision!(id)
record = restore_last_revision(id)
if record
record.store_revision do
save_restorable_associations(record, revisionable_associations)
end
end
return record
end
|
ruby
|
{
"resource": ""
}
|
q3003
|
ActsAsRevisionable.ClassMethods.revisionable_associations
|
train
|
def revisionable_associations(options = acts_as_revisionable_options[:associations])
return nil unless options
options = [options] unless options.kind_of?(Array)
associations = {}
options.each do |association|
if association.kind_of?(Symbol)
associations[association] = true
elsif association.kind_of?(Hash)
association.each_pair do |key, value|
associations[key] = revisionable_associations(value)
end
end
end
return associations
end
|
ruby
|
{
"resource": ""
}
|
q3004
|
ActsAsRevisionable.InstanceMethods.store_revision
|
train
|
def store_revision
if new_record? || @revisions_disabled
return yield
else
retval = nil
revision = nil
begin
revision_record_class.transaction do
begin
read_only = self.class.first(:conditions => {self.class.primary_key => self.id}, :readonly => true)
if read_only
revision = read_only.create_revision!
truncate_revisions!
end
rescue => e
logger.warn(e) if logger
end
disable_revisioning do
retval = yield
end
raise ActiveRecord::Rollback unless errors.empty?
revision.trash! if destroyed?
end
rescue => e
# In case the database doesn't support transactions
if revision
begin
revision.destroy
rescue => e
logger.warn(e) if logger
end
end
raise e
end
return retval
end
end
|
ruby
|
{
"resource": ""
}
|
q3005
|
ActsAsRevisionable.InstanceMethods.create_revision!
|
train
|
def create_revision!
revision_options = self.class.acts_as_revisionable_options
revision = revision_record_class.new(self, revision_options[:encoding])
if revision_options[:meta].is_a?(Hash)
revision_options[:meta].each do |attribute, value|
set_revision_meta_attribute(revision, attribute, value)
end
elsif revision_options[:meta].is_a?(Array)
revision_options[:meta].each do |attribute|
set_revision_meta_attribute(revision, attribute, attribute.to_sym)
end
elsif revision_options[:meta]
set_revision_meta_attribute(revision, revision_options[:meta], revision_options[:meta].to_sym)
end
revision.save!
return revision
end
|
ruby
|
{
"resource": ""
}
|
q3006
|
ActsAsRevisionable.InstanceMethods.set_revision_meta_attribute
|
train
|
def set_revision_meta_attribute(revision, attribute, value)
case value
when Symbol
value = self.send(value)
when Proc
value = value.call(self)
end
revision.send("#{attribute}=", value)
end
|
ruby
|
{
"resource": ""
}
|
q3007
|
ShipCompliant.BaseResult.errors
|
train
|
def errors
return [] if success?
@errors ||= Array.wrap(response[:errors]).map do |error|
ErrorResult.new(error[:error])
end
end
|
ruby
|
{
"resource": ""
}
|
q3008
|
Scruffy.Graph.render
|
train
|
def render(options = {})
options[:theme] ||= theme
options[:value_formatter] ||= value_formatter
options[:key_formatter] ||= key_formatter
options[:point_markers] ||= point_markers
options[:point_markers_rotation] ||= point_markers_rotation
options[:point_markers_ticks] ||= point_markers_ticks
options[:size] ||= (options[:width] ? [options[:width], (options.delete(:width) * 0.6).to_i] : [600, 360])
options[:title] ||= title
options[:x_legend] ||= x_legend
options[:y_legend] ||= y_legend
options[:layers] ||= layers
options[:min_value] ||= bottom_value(options[:padding] ? options[:padding] : nil)
options[:max_value] ||= top_value(options[:padding] ? options[:padding] : nil)
options[:min_key] ||= bottom_key
options[:max_key] ||= top_key
options[:graph] ||= self
# Removed for now.
# Added for making smaller fonts more legible, but may not be needed after all.
#
# if options[:as] && (options[:size][0] <= 300 || options[:size][1] <= 200)
# options[:actual_size] = options[:size]
# options[:size] = [800, (800.to_f * (options[:actual_size][1].to_f / options[:actual_size][0].to_f))]
# end
svg = ( options[:renderer].nil? ? self.renderer.render( options ) : options[:renderer].render( options ) )
# SVG to file.
if options[:to] && options[:as].nil?
File.open(options[:to], 'w') { |file|
file.write(svg)
}
end
options[:as] ? rasterizer.rasterize(svg, options) : svg
end
|
ruby
|
{
"resource": ""
}
|
q3009
|
Scruffy::Layers.Stacked.render
|
train
|
def render(svg, options = {})
#TODO ensure this works with new points
current_points = points
layers.each do |layer|
real_points = layer.points
layer.points = current_points
layer_options = options.dup
layer_options[:color] = layer.preferred_color || layer.color || options[:theme].next_color
layer.render(svg, layer_options)
options.merge(layer_options)
layer.points = real_points
layer.points.each_with_index { |val, idx| current_points[idx] -= val }
end
end
|
ruby
|
{
"resource": ""
}
|
q3010
|
Scruffy::Layers.Stacked.legend_data
|
train
|
def legend_data
if relevant_data?
retval = []
layers.each do |layer|
retval << layer.legend_data
end
retval
else
nil
end
end
|
ruby
|
{
"resource": ""
}
|
q3011
|
Scruffy::Renderers.Base.render
|
train
|
def render(options = {})
options[:graph_id] ||= 'scruffy_graph'
options[:complexity] ||= (global_complexity || :normal)
# Allow subclasses to muck with components prior to renders.
rendertime_renderer = self.clone
rendertime_renderer.instance_eval { before_render if respond_to?(:before_render) }
svg = Builder::XmlMarkup.new(:indent => 2)
unless options[:no_doctype_header]
svg.instruct!
svg.declare! :DOCTYPE, :svg, :PUBLIC, "-//W3C//DTD SVG 1.0//EN", "http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/svg10.dtd"
end
svg.svg(:xmlns => "http://www.w3.org/2000/svg", 'xmlns:xlink' => "http://www.w3.org/1999/xlink", :width => options[:size].first, :height => options[:size].last) {
svg.g(:id => options[:graph_id]) {
rendertime_renderer.components.each do |component|
component.render(svg,
bounds_for( options[:size], component.position, component.size ),
options)
end
}
}
svg.target!
end
|
ruby
|
{
"resource": ""
}
|
q3012
|
Elasticrawl.ParseJob.set_segments
|
train
|
def set_segments(crawl_segments, max_files = nil)
self.job_name = set_job_name
self.job_desc = set_job_desc(crawl_segments, max_files)
self.max_files = max_files
crawl_segments.each do |segment|
self.job_steps.push(create_job_step(segment))
end
end
|
ruby
|
{
"resource": ""
}
|
q3013
|
Elasticrawl.ParseJob.run
|
train
|
def run
emr_config = job_config['emr_config']
job_flow_id = run_job_flow(emr_config)
if job_flow_id.present?
self.job_flow_id = job_flow_id
self.job_steps.each do |step|
segment = step.crawl_segment
segment.parse_time = DateTime.now
segment.save
end
self.save
self.result_message
end
end
|
ruby
|
{
"resource": ""
}
|
q3014
|
Elasticrawl.ParseJob.segment_list
|
train
|
def segment_list
segments = ['Segments']
job_steps.each do |job_step|
if job_step.crawl_segment.present?
segment = job_step.crawl_segment
segments.push(segment.segment_desc)
end
end
segments.push('')
end
|
ruby
|
{
"resource": ""
}
|
q3015
|
Elasticrawl.ParseJob.set_job_desc
|
train
|
def set_job_desc(segments, max_files)
if segments.count > 0
crawl_name = segments[0].crawl.crawl_name if segments[0].crawl.present?
file_desc = max_files.nil? ? 'all files' : "#{max_files} files per segment"
end
"Crawl: #{crawl_name} Segments: #{segments.count} Parsing: #{file_desc}"
end
|
ruby
|
{
"resource": ""
}
|
q3016
|
PartialDate.Date.old_to_s
|
train
|
def old_to_s(format = :default)
format = FORMATS[format] if format.is_a?(Symbol)
result = format.dup
FORMAT_METHODS.each_pair do |key, value|
result.gsub!( key, value.call( self )) if result.include? key
end
# Remove any leading "/-," chars.
# Remove double white spaces.
# Remove any duplicate "/-," chars and replace with the single char.
# Remove any trailing "/-," chars.
# Anything else - you're on your own ;-)
lead_trim = (year != 0 && format.lstrip.start_with?("%Y")) ? /\A[\/\,\s]+/ : /\A[\/\,\-\s]+/
result = result.gsub(lead_trim, '').gsub(/\s\s/, ' ').gsub(/[\/\-\,]([\/\-\,])/, '\1').gsub(/[\/\,\-\s]+\z/, '')
end
|
ruby
|
{
"resource": ""
}
|
q3017
|
Scruffy::Layers.Line.draw
|
train
|
def draw(svg, coords, options={})
# Include options provided when the object was created
options.merge!(@options)
stroke_width = (options[:relativestroke]) ? relative(options[:stroke_width]) : options[:stroke_width]
style = (options[:style]) ? options[:style] : ''
if options[:shadow]
svg.g(:class => 'shadow', :transform => "translate(#{relative(0.5)}, #{relative(0.5)})") {
svg.polyline( :points => stringify_coords(coords).join(' '), :fill => 'transparent',
:stroke => 'black', 'stroke-width' => stroke_width,
:style => 'fill-opacity: 0; stroke-opacity: 0.35' )
if options[:dots]
coords.each { |coord| svg.circle( :cx => coord.first, :cy => coord.last + relative(0.9), :r => stroke_width,
:style => "stroke-width: #{stroke_width}; stroke: black; opacity: 0.35;" ) }
end
}
end
svg.polyline( :points => stringify_coords(coords).join(' '), :fill => 'none', :stroke => @color.to_s,
'stroke-width' => stroke_width, :style => style )
if options[:dots]
coords.each { |coord| svg.circle( :cx => coord.first, :cy => coord.last, :r => stroke_width,
:style => "stroke-width: #{stroke_width}; stroke: #{color.to_s}; fill: #{color.to_s}" ) }
end
end
|
ruby
|
{
"resource": ""
}
|
q3018
|
Elasticrawl.Crawl.status
|
train
|
def status
total = self.crawl_segments.count
remaining = CrawlSegment.where(:crawl_id => self.id,
:parse_time => nil).count
parsed = total - remaining
status = self.crawl_name
status += " Segments: to parse #{remaining}, "
status += "parsed #{parsed}, total #{total}"
end
|
ruby
|
{
"resource": ""
}
|
q3019
|
Elasticrawl.Crawl.create_segments
|
train
|
def create_segments
file_paths = warc_paths(self.crawl_name)
segments = parse_segments(file_paths)
save if segments.count > 0
segments.keys.each do |segment_name|
file_count = segments[segment_name]
CrawlSegment.create_segment(self, segment_name, file_count)
end
segments.count
end
|
ruby
|
{
"resource": ""
}
|
q3020
|
Elasticrawl.Crawl.reset
|
train
|
def reset
segments = CrawlSegment.where('crawl_id = ? and parse_time is not null',
self.id)
segments.map { |segment| segment.update_attribute(:parse_time, nil) }
status
end
|
ruby
|
{
"resource": ""
}
|
q3021
|
Elasticrawl.Crawl.warc_paths
|
train
|
def warc_paths(crawl_name)
s3_path = [Elasticrawl::COMMON_CRAWL_PATH,
crawl_name,
Elasticrawl::WARC_PATHS].join('/')
begin
s3 = AWS::S3.new
bucket = s3.buckets[Elasticrawl::COMMON_CRAWL_BUCKET]
object = bucket.objects[s3_path]
uncompress_file(object)
rescue AWS::Errors::Base => s3e
raise S3AccessError.new(s3e.http_response), 'Failed to get WARC paths'
rescue Exception => e
raise S3AccessError, 'Failed to get WARC paths'
end
end
|
ruby
|
{
"resource": ""
}
|
q3022
|
Elasticrawl.Crawl.uncompress_file
|
train
|
def uncompress_file(s3_object)
result = ''
if s3_object.exists?
io = StringIO.new
io.write(s3_object.read)
io.rewind
gz = Zlib::GzipReader.new(io)
result = gz.read
gz.close
end
result
end
|
ruby
|
{
"resource": ""
}
|
q3023
|
Elasticrawl.Crawl.parse_segments
|
train
|
def parse_segments(warc_paths)
segments = Hash.new 0
warc_paths.split.each do |warc_path|
segment_name = warc_path.split('/')[3]
segments[segment_name] += 1 if segment_name.present?
end
segments
end
|
ruby
|
{
"resource": ""
}
|
q3024
|
Scruffy::Layers.MultiArea.draw
|
train
|
def draw(svg, coords, options={})
# Check whether to use color from theme, or whether to use user defined colors from the area_colors array
color_count = nil
if @area_colors && @area_colors.size > 0
area_color = @area_colors[0]
color_count = 1
else
puts "Never Set Area Color"
area_color = color
end
# Draw Bottom Level Polygons (Original Coords)
draw_poly(svg, coords, area_color, options = {})
# Draw Lower Area Polygons
if @baselines
# Get the Color of this Area
puts "Drawing Baselines"
@baselines.sort! {|x,y| y <=> x }
@baselines.each do |baseline|
if color_count
area_color = area_colors[color_count]
color_count = color_count + 1
puts area_color.to_s
if color_count >= area_colors.size
color_count = 0
end
end
lower_poly_coords = create_lower_polygon_coords(translate_number(baseline), coords, options)
draw_poly(svg, lower_poly_coords, area_color, options = {})
end
end
end
|
ruby
|
{
"resource": ""
}
|
q3025
|
Sparkr.Sparkline.normalize_numbers
|
train
|
def normalize_numbers(_numbers)
numbers = _numbers.map(&:to_i)
min = numbers.min
numbers.map do |n|
n - min
end
end
|
ruby
|
{
"resource": ""
}
|
q3026
|
OpenTox.Nanoparticle.parse_ambit_value
|
train
|
def parse_ambit_value feature, v, dataset
# TODO add study id to warnings
v.delete "unit"
# TODO: ppm instead of weights
if v.keys == ["textValue"]
add_feature feature, v["textValue"], dataset
elsif v.keys == ["loValue"]
add_feature feature, v["loValue"], dataset
elsif v.keys.size == 2 and v["errorValue"]
add_feature feature, v["loValue"], dataset
#warn "Ignoring errorValue '#{v["errorValue"]}' for '#{feature.name}'."
elsif v.keys.size == 2 and v["loQualifier"] == "mean"
add_feature feature, v["loValue"], dataset
#warn "'#{feature.name}' is a mean value. Original data is not available."
elsif v.keys.size == 2 and v["loQualifier"] #== ">="
#warn "Only min value available for '#{feature.name}', entry ignored"
elsif v.keys.size == 2 and v["upQualifier"] #== ">="
#warn "Only max value available for '#{feature.name}', entry ignored"
elsif v.keys.size == 3 and v["loValue"] and v["loQualifier"].nil? and v["upQualifier"].nil?
add_feature feature, v["loValue"], dataset
#warn "loQualifier and upQualifier are empty."
elsif v.keys.size == 3 and v["loValue"] and v["loQualifier"] == "" and v["upQualifier"] == ""
add_feature feature, v["loValue"], dataset
#warn "loQualifier and upQualifier are empty."
elsif v.keys.size == 4 and v["loValue"] and v["loQualifier"].nil? and v["upQualifier"].nil?
add_feature feature, v["loValue"], dataset
#warn "loQualifier and upQualifier are empty."
elsif v.size == 4 and v["loQualifier"] and v["upQualifier"] and v["loValue"] and v["upValue"]
#add_feature feature, [v["loValue"],v["upValue"]].mean, dataset
#warn "Using mean value of range #{v["loValue"]} - #{v["upValue"]} for '#{feature.name}'. Original data is not available."
elsif v.size == 4 and v["loQualifier"] == "mean" and v["errorValue"]
#warn "'#{feature.name}' is a mean value. Original data is not available. Ignoring errorValue '#{v["errorValue"]}' for '#{feature.name}'."
add_feature feature, v["loValue"], dataset
elsif v == {} # do nothing
else
warn "Cannot parse Ambit eNanoMapper value '#{v}' for feature '#{feature.name}'."
end
end
|
ruby
|
{
"resource": ""
}
|
q3027
|
OpenTox.Dataset.substances
|
train
|
def substances
@substances ||= data_entries.keys.collect{|id| OpenTox::Substance.find id}.uniq
@substances
end
|
ruby
|
{
"resource": ""
}
|
q3028
|
OpenTox.Dataset.features
|
train
|
def features
@features ||= data_entries.collect{|sid,data| data.keys.collect{|id| OpenTox::Feature.find(id)}}.flatten.uniq
@features
end
|
ruby
|
{
"resource": ""
}
|
q3029
|
OpenTox.Dataset.values
|
train
|
def values substance,feature
substance = substance.id if substance.is_a? Substance
feature = feature.id if feature.is_a? Feature
if data_entries[substance.to_s] and data_entries[substance.to_s][feature.to_s]
data_entries[substance.to_s][feature.to_s]
else
[nil]
end
end
|
ruby
|
{
"resource": ""
}
|
q3030
|
OpenTox.Dataset.add
|
train
|
def add(substance,feature,value)
substance = substance.id if substance.is_a? Substance
feature = feature.id if feature.is_a? Feature
data_entries[substance.to_s] ||= {}
data_entries[substance.to_s][feature.to_s] ||= []
data_entries[substance.to_s][feature.to_s] << value
#data_entries[substance.to_s][feature.to_s].uniq! if value.numeric? # assuming that identical values come from the same source
end
|
ruby
|
{
"resource": ""
}
|
q3031
|
OpenTox.Dataset.folds
|
train
|
def folds n
len = self.substances.size
indices = (0..len-1).to_a.shuffle
mid = (len/n)
chunks = []
start = 0
1.upto(n) do |i|
last = start+mid
last = last-1 unless len%n >= i
test_idxs = indices[start..last] || []
test_substances = test_idxs.collect{|i| substances[i]}
training_idxs = indices-test_idxs
training_substances = training_idxs.collect{|i| substances[i]}
chunk = [training_substances,test_substances].collect do |substances|
dataset = self.class.create(:name => "#{self.name} (Fold #{i-1})",:source => self.id )
substances.each do |substance|
substance.dataset_ids << dataset.id
substance.dataset_ids.uniq!
substance.save
dataset.data_entries[substance.id.to_s] = data_entries[substance.id.to_s] ||= {}
end
dataset.save
dataset
end
start = last+1
chunks << chunk
end
chunks
end
|
ruby
|
{
"resource": ""
}
|
q3032
|
OpenTox.Dataset.to_csv
|
train
|
def to_csv(inchi=false)
CSV.generate() do |csv|
compound = substances.first.is_a? Compound
if compound
csv << [inchi ? "InChI" : "SMILES"] + features.collect{|f| f.name}
else
csv << ["Name"] + features.collect{|f| f.name}
end
substances.each do |substance|
if compound
name = (inchi ? substance.inchi : substance.smiles)
else
name = substance.name
end
nr_measurements = features.collect{|f| data_entries[substance.id.to_s][f.id.to_s].size if data_entries[substance.id.to_s][f.id.to_s]}.compact.uniq
if nr_measurements.size > 1
warn "Unequal number of measurements (#{nr_measurements}) for '#{name}'. Skipping entries."
else
(0..nr_measurements.first-1).each do |i|
row = [name]
features.each do |f|
values(substance,f) ? row << values(substance,f)[i] : row << ""
end
csv << row
end
end
end
end
end
|
ruby
|
{
"resource": ""
}
|
q3033
|
Rango.LoggerMixin.inspect
|
train
|
def inspect(*args)
if args.first.is_a?(Hash) && args.length.eql?(1)
args.first.each do |name, value|
self.debug("#{name}: #{value.inspect}")
end
else
args = args.map { |arg| arg.inspect }
self.debug(*args)
end
end
|
ruby
|
{
"resource": ""
}
|
q3034
|
GraphQL::Relay::Walker.Queue.add
|
train
|
def add(frame)
return false if max_size && queue.length >= max_size
return false if seen.include?(frame.gid)
seen.add(frame.gid)
idx = random_idx ? rand(queue.length + 1) : queue.length
queue.insert(idx, frame)
true
end
|
ruby
|
{
"resource": ""
}
|
q3035
|
GraphQL::Relay::Walker.Queue.add_gid
|
train
|
def add_gid(gid, parent = nil)
frame = Frame.new(self, gid, parent)
add(frame)
end
|
ruby
|
{
"resource": ""
}
|
q3036
|
ShipCompliant.GetInventoryDetailsResult.location
|
train
|
def location(key)
location = locations.select { |l| l[:fulfillment_location] == key }.first
return {} if location.nil?
location
end
|
ruby
|
{
"resource": ""
}
|
q3037
|
Cog.Config.prepare
|
train
|
def prepare(opt={})
throw :ConfigInstanceAlreadyPrepared if @prepared && !opt[:force_reset]
@prepared = true
@fullpaths = opt[:fullpaths]
@project_path = nil
@project_generator_path = nil
@project_plugin_path = nil
@project_template_path = nil
@generator_path = []
@plugin_path = []
@template_path = []
@plugins = {}
@target_language = Language.new
@active_languages = [Language.new] # active language stack
@language = {}
@language_extension_map = {}
process_cogfiles opt
post_cogfile_processing
build_language_extension_map
end
|
ruby
|
{
"resource": ""
}
|
q3038
|
ShipCompliant.CheckComplianceResult.taxes_for_shipment
|
train
|
def taxes_for_shipment(shipment_key)
shipment = shipment_sales_tax_rates.select { |s| s[:@shipment_key] == shipment_key }.first
# convert attribute keys to symbols
freight = attributes_to_symbols(shipment[:freight_sales_tax_rate])
# wrap products in ProductSalesTaxRate
products = wrap_products(shipment[:product_sales_tax_rates])
ShipmentSalesTaxRate.new(shipment_key, FreightSalesTaxRate.new(freight), products)
end
|
ruby
|
{
"resource": ""
}
|
q3039
|
ShipCompliant.CheckComplianceResult.compliance_rules_for_shipment
|
train
|
def compliance_rules_for_shipment(shipment_key)
shipment = shipment_compliance_rules.select { |s| s[:key] == shipment_key }.first
ShipmentCompliance.new(shipment)
end
|
ruby
|
{
"resource": ""
}
|
q3040
|
PoiseProfiler.Config.gather_from_env
|
train
|
def gather_from_env
ENV.each do |key, value|
if key.downcase =~ /^poise(_|-)profiler_(.+)$/
self[$2] = YAML.safe_load(value)
end
end
end
|
ruby
|
{
"resource": ""
}
|
q3041
|
PoiseProfiler.Config.gather_from_node
|
train
|
def gather_from_node
return unless defined?(Chef.node)
(Chef.node['poise-profiler'] || {}).each do |key, value|
self[key] = value
end
end
|
ruby
|
{
"resource": ""
}
|
q3042
|
ShipCompliant.InventoryProduct.inventory_levels
|
train
|
def inventory_levels
levels = {}
product[:inventory_levels][:inventory_level].each do |level|
key = level[:inventory_type].underscore.to_sym
value = level[:quantity].to_f
levels[key] = value
end
levels
end
|
ruby
|
{
"resource": ""
}
|
q3043
|
Elasticrawl.Cluster.create_job_flow
|
train
|
def create_job_flow(job, emr_config = nil)
config = Config.new
Elasticity.configure do |c|
c.access_key = config.access_key_id
c.secret_key = config.secret_access_key
end
job_flow = Elasticity::JobFlow.new
job_flow.name = "Job: #{job.job_name} #{job.job_desc}"
job_flow.log_uri = job.log_uri
configure_job_flow(job_flow)
configure_instances(job_flow)
configure_bootstrap_actions(job_flow, emr_config)
job_flow
end
|
ruby
|
{
"resource": ""
}
|
q3044
|
Elasticrawl.Cluster.configure_job_flow
|
train
|
def configure_job_flow(job_flow)
ec2_key_name = config_setting('ec2_key_name')
placement = config_setting('placement')
emr_ami_version = config_setting('emr_ami_version')
job_flow_role = config_setting('job_flow_role')
service_role = config_setting('service_role')
ec2_subnet_id = config_setting('ec2_subnet_id')
job_flow.ec2_subnet_id = ec2_subnet_id if ec2_subnet_id.present?
job_flow.ec2_key_name = ec2_key_name if ec2_key_name.present?
job_flow.placement = placement if placement.present?
job_flow.ami_version = emr_ami_version if emr_ami_version.present?
job_flow.job_flow_role = job_flow_role if job_flow_role.present?
job_flow.service_role = service_role if service_role.present?
end
|
ruby
|
{
"resource": ""
}
|
q3045
|
Elasticrawl.Cluster.configure_bootstrap_actions
|
train
|
def configure_bootstrap_actions(job_flow, emr_config = nil)
bootstrap_scripts = config_setting('bootstrap_scripts')
if bootstrap_scripts.present?
bootstrap_scripts.each do |script_uri|
action = Elasticity::BootstrapAction.new(script_uri, '', '')
job_flow.add_bootstrap_action(action)
end
end
if emr_config.present?
action = Elasticity::HadoopFileBootstrapAction.new(emr_config)
job_flow.add_bootstrap_action(action)
end
end
|
ruby
|
{
"resource": ""
}
|
q3046
|
Scruffy::Components.Legend.relevant_legend_info
|
train
|
def relevant_legend_info(layers, categories=(@options[:category] ? [@options[:category]] : @options[:categories]))
legend_info = layers.inject([]) do |arr, layer|
if categories.nil? ||
(categories.include?(layer.options[:category]) ||
(layer.options[:categories] && (categories & layer.options[:categories]).size > 0) )
data = layer.legend_data
arr << data if data.is_a?(Hash)
arr = arr + data if data.is_a?(Array)
end
arr
end
end
|
ruby
|
{
"resource": ""
}
|
q3047
|
ShipCompliant.OrderSearch.to_h
|
train
|
def to_h
details.reject do |key|
!KEYS.include?(key)
end.deep_transform_keys { |key| key.to_s.camelize }
end
|
ruby
|
{
"resource": ""
}
|
q3048
|
Scruffy::Renderers.Cubed3d.graph_block
|
train
|
def graph_block(graph_filter)
block = Proc.new { |components|
components << Scruffy::Components::Grid.new(:grid, :position => [10, 0], :size => [90, 89])
components << Scruffy::Components::ValueMarkers.new(:value_markers, :position => [0, 2], :size => [8, 89])
components << Scruffy::Components::DataMarkers.new(:data_markers, :position => [10, 92], :size => [90, 8])
components << Scruffy::Components::Graphs.new(:graphs, :position => [10, 0], :size => [90, 89], :only => graph_filter)
}
block
end
|
ruby
|
{
"resource": ""
}
|
q3049
|
Elasticrawl.JobStep.job_flow_step
|
train
|
def job_flow_step(job_config)
jar = job_config['jar']
max_files = self.job.max_files
step_args = []
step_args[0] = job_config['class']
step_args[1] = self.input_paths
step_args[2] = self.output_path
# All arguments must be strings.
step_args[3] = max_files.to_s if max_files.present?
step = Elasticity::CustomJarStep.new(jar)
step.name = set_step_name
step.arguments = step_args
step
end
|
ruby
|
{
"resource": ""
}
|
q3050
|
Elasticrawl.JobStep.set_step_name
|
train
|
def set_step_name
case self.job.type
when 'Elasticrawl::ParseJob'
if self.crawl_segment.present?
max_files = self.job.max_files || 'all'
"#{self.crawl_segment.segment_desc} Parsing: #{max_files}"
end
when 'Elasticrawl::CombineJob'
paths = self.input_paths.split(',')
"Combining #{paths.count} jobs"
end
end
|
ruby
|
{
"resource": ""
}
|
q3051
|
GraphQL::Relay::Walker.Frame.found_gids
|
train
|
def found_gids(data = result)
[].tap do |ids|
case data
when Hash
ids.concat(Array(data['id']))
ids.concat(found_gids(data.values))
when Array
data.each { |datum| ids.concat(found_gids(datum)) }
end
end
end
|
ruby
|
{
"resource": ""
}
|
q3052
|
Rubydoop.JobDefinition.secondary_sort
|
train
|
def secondary_sort(start_index, end_index)
@job.set_partitioner_class(Hadoop::Mapreduce::Lib::Partition::BinaryPartitioner)
Hadoop::Mapreduce::Lib::Partition::BinaryPartitioner.set_offsets(@job.configuration, start_index, end_index)
@job.set_grouping_comparator_class(Humboldt::JavaLib::BinaryComparator)
Humboldt::JavaLib::BinaryComparator.set_offsets(@job.configuration, start_index, end_index)
end
|
ruby
|
{
"resource": ""
}
|
q3053
|
Scruffy::Layers.Base.render
|
train
|
def render(svg, options)
setup_variables(options)
coords = generate_coordinates(options)
draw(svg, coords, options)
end
|
ruby
|
{
"resource": ""
}
|
q3054
|
Scruffy::Layers.Base.setup_variables
|
train
|
def setup_variables(options = {})
@color = (preferred_color || options.delete(:color))
@outline = (preferred_outline || options.delete(:outline))
@width, @height = options.delete(:size)
@min_value, @max_value = options[:min_value], options[:max_value]
@opacity = options[:opacity] || 1.0
@complexity = options[:complexity]
end
|
ruby
|
{
"resource": ""
}
|
q3055
|
ShipCompliant.ProductAttributes.to_h
|
train
|
def to_h
details.deep_transform_keys do |key|
# handle special cases
pascal_key = key.to_s.camelize
if SPECIAL_CASES.has_key?(key)
pascal_key = SPECIAL_CASES[key]
end
pascal_key
end
end
|
ruby
|
{
"resource": ""
}
|
q3056
|
TinyRails.Actions.gem
|
train
|
def gem(*args)
options = extract_options!(args)
name, version = args
# Set the message to be shown in logs. Uses the git repo if one is given,
# otherwise use name (version).
parts, message = [ name.inspect ], name
if version ||= options.delete(:version)
parts << version.inspect
message << " (#{version})"
end
message = options[:git] if options[:git]
say_status :gemfile, message
options.each do |option, value|
parts << "#{option}: #{value.inspect}"
end
in_root do
str = "gem #{parts.join(", ")}"
str = "\n" + str
append_file "Gemfile", str, :verbose => false
end
end
|
ruby
|
{
"resource": ""
}
|
q3057
|
TinyRails.Actions.application
|
train
|
def application(data=nil, &block)
data = block.call if !data && block_given?
data = "\n#{data}" unless data =~ /^\n/
data << "\n" unless data =~ /\n$/
inject_into_file 'boot.rb', data, :after => /^ config\.secret_token = .+\n/
end
|
ruby
|
{
"resource": ""
}
|
q3058
|
MongoMysqlRelations.ClassMethods.to_mysql_belongs_to
|
train
|
def to_mysql_belongs_to(name, options = {})
field "#{name}_id", type: Integer
object_class = options[:class] || name.to_s.titleize.delete(' ').constantize
self.instance_eval do
define_method(name) do |reload = false|
if reload
self.instance_variable_set("@#{name}", nil)
end
if self.instance_variable_get("@#{name}").blank?
self.instance_variable_set("@#{name}", object_class.where(object_class.primary_key => self.send("#{name}_id")).first)
end
self.instance_variable_get("@#{name}")
end
define_method("#{name}=(new_instance)") do
self.send("#{name}_id=", new_instance.id)
self.instance_variable_set("@#{name}", nil)
end
end
end
|
ruby
|
{
"resource": ""
}
|
q3059
|
GraphQL::Relay::Walker.QueryBuilder.build_query
|
train
|
def build_query
GraphQL.parse(BASE_QUERY).tap do |d_ast|
selections = d_ast.definitions.first.selections.first.selections
node_types.each do |type|
selections << inline_fragment_ast(type) if include?(type)
end
selections.compact!
end
end
|
ruby
|
{
"resource": ""
}
|
q3060
|
GraphQL::Relay::Walker.QueryBuilder.inline_fragment_ast
|
train
|
def inline_fragment_ast(type, with_children: true)
selections = []
if with_children
type.all_fields.each do |field|
field_type = field.type.unwrap
if node_field?(field) && include?(field_type)
selections << node_field_ast(field)
elsif connection_field?(field) && include?(field_type)
selections << connection_field_ast(field)
end
end
elsif id = type.get_field('id')
selections << field_ast(id)
end
selections.compact!
if selections.none?
nil
else
GraphQL::Language::Nodes::InlineFragment.new(
type: make_type_name_node(type.name),
selections: selections,
)
end
end
|
ruby
|
{
"resource": ""
}
|
q3061
|
GraphQL::Relay::Walker.QueryBuilder.field_ast
|
train
|
def field_ast(field, arguments = {}, &blk)
type = field.type.unwrap
# Bail unless we have the required arguments.
required_args_are_present = field.arguments.all? do |arg_name, arg|
arguments.key?(arg_name) || valid_input?(arg.type, nil)
end
if !required_args_are_present
nil
else
f_alias = field.name == 'id' ? nil : random_alias
f_args = arguments.map do |name, value|
GraphQL::Language::Nodes::Argument.new(name: name, value: value)
end
GraphQL::Language::Nodes::Field.new(name: field.name, alias: f_alias, arguments: f_args)
end
end
|
ruby
|
{
"resource": ""
}
|
q3062
|
GraphQL::Relay::Walker.QueryBuilder.node_field_ast
|
train
|
def node_field_ast(field)
f_ast = field_ast(field)
return nil if f_ast.nil?
type = field.type.unwrap
selections = f_ast.selections.dup
if type.kind.object?
selections << field_ast(type.get_field('id'))
else
possible_node_types(type).each do |if_type|
selections << inline_fragment_ast(if_type, with_children: false)
end
end
selections.compact!
if f_ast.respond_to?(:merge) # GraphQL-Ruby 1.9+
f_ast = f_ast.merge(selections: selections)
else
f_ast.selections = selections
end
f_ast
end
|
ruby
|
{
"resource": ""
}
|
q3063
|
GraphQL::Relay::Walker.QueryBuilder.edges_field_ast
|
train
|
def edges_field_ast(field)
f_ast = field_ast(field)
return nil if f_ast.nil?
node_fields = [node_field_ast(field.type.unwrap.get_field('node'))]
if f_ast.respond_to?(:merge) # GraphQL-Ruby 1.9+
f_ast.merge(selections: f_ast.selections + node_fields)
else
f_ast.selections.concat(node_fields)
f_ast
end
end
|
ruby
|
{
"resource": ""
}
|
q3064
|
GraphQL::Relay::Walker.QueryBuilder.connection_field_ast
|
train
|
def connection_field_ast(field)
f_ast = field_ast(field, connection_arguments)
return nil if f_ast.nil?
edges_fields = [edges_field_ast(field.type.unwrap.get_field('edges'))]
if f_ast.respond_to?(:merge) # GraphQL-Ruby 1.9+
f_ast.merge(selections: f_ast.selections + edges_fields)
else
f_ast.selections.concat(edges_fields)
f_ast
end
end
|
ruby
|
{
"resource": ""
}
|
q3065
|
GraphQL::Relay::Walker.QueryBuilder.node_field?
|
train
|
def node_field?(field)
type = field.type.unwrap
kind = type.kind
if kind.object?
node_types.include?(type)
elsif kind.interface? || kind.union?
possible_node_types(type).any?
end
end
|
ruby
|
{
"resource": ""
}
|
q3066
|
GraphQL::Relay::Walker.QueryBuilder.connection_field?
|
train
|
def connection_field?(field)
type = field.type.unwrap
if edges_field = type.get_field('edges')
edges = edges_field.type.unwrap
if node_field = edges.get_field('node')
return node_field?(node_field)
end
end
false
end
|
ruby
|
{
"resource": ""
}
|
q3067
|
GraphQL::Relay::Walker.QueryBuilder.possible_types
|
train
|
def possible_types(type)
if type.kind.interface?
schema.possible_types(type)
elsif type.kind.union?
type.possible_types
end
end
|
ruby
|
{
"resource": ""
}
|
q3068
|
ShipCompliant.ShipmentCompliance.rules
|
train
|
def rules
return [] if result[:rules].nil?
Array.wrap(result[:rules][:rule_compliance_response]).map do |rule|
ComplianceRule.new(rule)
end
end
|
ruby
|
{
"resource": ""
}
|
q3069
|
Ccavenue.Payment.request
|
train
|
def request(order_Id,amount,billing_cust_name,billing_cust_address,billing_cust_city,billing_zip_code,billing_cust_state,billing_cust_country,billing_cust_email,billing_cust_tel,billing_cust_notes="",delivery_cust_name="",delivery_cust_address="",delivery_cust_city="",delivery_zip_code="",delivery_cust_state="",delivery_cust_country="",delivery_cust_email="",delivery_cust_tel="",delivery_cust_notes="")
checksum = getChecksum(order_Id,amount)
raw_request = "Merchant_Id=#{@merchant_Id}&Amount=#{amount}&Order_Id=#{order_Id}&Redirect_Url=#{@redirect_Url}&billing_cust_name=#{billing_cust_name}&billing_cust_address=#{billing_cust_address}&billing_cust_country=#{billing_cust_country}&billing_cust_state=#{billing_cust_state}&billing_cust_city=#{billing_cust_city}&billing_zip_code=#{billing_zip_code}&billing_cust_tel=#{billing_cust_tel}&billing_cust_email=#{billing_cust_email}&billing_cust_notes=#{billing_cust_notes}&delivery_cust_name=#{delivery_cust_name}&delivery_cust_address=#{delivery_cust_address}&delivery_cust_country=#{delivery_cust_country}&delivery_cust_state=#{delivery_cust_state}&delivery_cust_city=#{delivery_cust_city}&delivery_zip_code=#{delivery_zip_code}&delivery_cust_tel=#{delivery_cust_tel}&billing_cust_notes=#{delivery_cust_notes}&Checksum=#{checksum.to_s}"
return encrypt_data(raw_request,@working_Key,"AES-128-CBC")[0]
end
|
ruby
|
{
"resource": ""
}
|
q3070
|
Ccavenue.Payment.response
|
train
|
def response(response)
raw_response = CGI::parse(decrypt_data(response,@working_Key,"AES-128-CBC"))
auth_desc = raw_response["AuthDesc"][0]
order_id = raw_response["Order_Id"][0]
amount = raw_response["Amount"][0]
checksum = raw_response["Checksum"][0]
verification = verifyChecksum(order_id,amount,auth_desc,checksum)
return auth_desc,verification,raw_response
end
|
ruby
|
{
"resource": ""
}
|
q3071
|
Ccavenue.Payment.verifyChecksum
|
train
|
def verifyChecksum( order_Id, amount, authDesc, checksum)
String str = @merchant_Id+"|"+order_Id+"|"+amount+"|"+authDesc+"|"+@working_Key
String newChecksum = Zlib::adler32(str).to_s
return (newChecksum.eql?(checksum)) ? true : false
end
|
ruby
|
{
"resource": ""
}
|
q3072
|
Ccavenue.Payment.hextobin
|
train
|
def hextobin(hexstring)
length = hexstring.length
binString = ""
count = 0
while count < length do
substring = hexstring[count,2]
substring = [substring]
packedString = substring.pack('H*')
if count == 0
binString = packedString
else
binString +=packedString
end
count+=2
end
return binString
end
|
ruby
|
{
"resource": ""
}
|
q3073
|
GraphQL::Relay::Walker.ClientExt.walk
|
train
|
def walk(from_id:, except: nil, only: nil, variables: {}, context: {})
query_string = GraphQL::Relay::Walker.query_string(schema, except: except, only: only)
walker_query = parse(query_string)
GraphQL::Relay::Walker.walk(from_id: from_id) do |frame|
response = query(
walker_query,
variables: variables.merge('id' => frame.gid),
context: context
)
frame.context[:response] = response
frame.result = response.respond_to?(:data) && response.data ? response.data.to_h : {}
frame.enqueue_found_gids
yield(frame) if block_given?
end
end
|
ruby
|
{
"resource": ""
}
|
q3074
|
Anyplayer.Selector.player
|
train
|
def player
PLAYERS.each do |player|
player_load(player) || next
instance = player_class(player).new
player_on_platform?(instance) || next
return instance if player_launched?(instance)
end
nil
end
|
ruby
|
{
"resource": ""
}
|
q3075
|
Scruffy::Helpers.Canvas.bounds_for
|
train
|
def bounds_for(canvas_size, position, size)
return nil if (position.nil? || size.nil?)
bounds = {}
bounds[:x] = canvas_size.first * (position.first / 100.to_f)
bounds[:y] = canvas_size.last * (position.last / 100.to_f)
bounds[:width] = canvas_size.first * (size.first / 100.to_f)
bounds[:height] = canvas_size.last * (size.last / 100.to_f)
bounds
end
|
ruby
|
{
"resource": ""
}
|
q3076
|
Scruffy::Formatters.Base.route_format
|
train
|
def route_format(target, idx, options = {})
args = [target, idx, options]
if respond_to?(:format)
send :format, *args[0...self.method(:format).arity]
elsif respond_to?(:format!)
send :format!, *args[0...self.method(:format!).arity]
target
else
raise NameError, "Formatter subclass must container either a format() method or format!() method."
end
end
|
ruby
|
{
"resource": ""
}
|
q3077
|
Scruffy::Formatters.Number.format
|
train
|
def format(target, idx, options)
my_precision = @precision
if @precision == :auto
my_precision = options[:all_values].inject(0) do |highest, current|
cur = current.to_f.to_s.split(".").last.size
cur > highest ? cur : highest
end
my_precision = @precision_limit if my_precision > @precision_limit
elsif @precision == :none
my_precision = 0
end
my_separator = @separator
my_separator = "" unless my_precision > 0
begin
number = ""
if @roundup == :none
parts = number_with_precision(target, my_precision).split('.')
number = parts[0].to_s.gsub(/(\d)(?=(\d\d\d)+(?!\d))/, "\\1#{@delimiter}") + my_separator + parts[1].to_s
else
number = roundup(target.to_f, @roundup).to_i.to_s
end
number
rescue StandardError => e
target
end
end
|
ruby
|
{
"resource": ""
}
|
q3078
|
Scruffy::Formatters.Currency.format
|
train
|
def format(target, idx, options)
@separator = "" unless @precision > 0
begin
parts = number_with_precision(target, @precision).split('.')
if @special_negatives && (target.to_f < 0)
number = "(" + @unit + parts[0].to_i.abs.to_s.gsub(/(\d)(?=(\d\d\d)+(?!\d))/, "\\1#{@delimiter}") + @separator + parts[1].to_s + ")"
else
number = @unit + parts[0].to_s.gsub(/(\d)(?=(\d\d\d)+(?!\d))/, "\\1#{@delimiter}") + @separator + parts[1].to_s
number.gsub!(@unit + '-', '-' + @unit)
end
if (target.to_f < 0) && @negative_color
options[:marker_color_override] = @negative_color
else
options[:marker_color_override] = nil
end
number
rescue
target
end
end
|
ruby
|
{
"resource": ""
}
|
q3079
|
Scruffy::Formatters.Percentage.format
|
train
|
def format(target)
begin
number = number_with_precision(target, @precision)
parts = number.split('.')
if parts.at(1).nil?
parts[0] + "%"
else
parts[0] + @separator + parts[1].to_s + "%"
end
rescue
target
end
end
|
ruby
|
{
"resource": ""
}
|
q3080
|
OpenTox.PhysChem.openbabel
|
train
|
def openbabel descriptor, compound
obdescriptor = OpenBabel::OBDescriptor.find_type descriptor
obmol = OpenBabel::OBMol.new
obconversion = OpenBabel::OBConversion.new
obconversion.set_in_format 'smi'
obconversion.read_string obmol, compound.smiles
{"#{library.capitalize}.#{descriptor}" => fix_value(obdescriptor.predict(obmol))}
end
|
ruby
|
{
"resource": ""
}
|
q3081
|
Tsuga::Model.Tile.neighbours
|
train
|
def neighbours
offsets = (-1..1).to_a.product((-1..1).to_a)
offsets.map do |lat, lng|
begin
neighbour(lat:lat, lng:lng)
rescue ArgumentError
nil # occurs on world boundaries
end
end.compact
end
|
ruby
|
{
"resource": ""
}
|
q3082
|
Cog.Seed.stamp_class
|
train
|
def stamp_class(path, opt={})
Cog.activate_language opt[:language] do
l = Cog.active_language
raise Errors::ActiveLanguageDoesNotSupportSeeds.new :language => l if l.nil? || l.seed_extension.nil?
@in_header = false
@header_path = if l.seed_header
"#{path}.#{l.seed_header}"
end
stamp "cog/#{l.key}/seed.#{l.seed_extension}", "#{path}.#{l.seed_extension}"
if l.seed_header
@in_header = true
stamp "cog/#{l.key}/seed.#{l.seed_header}", @header_path
end
end
end
|
ruby
|
{
"resource": ""
}
|
q3083
|
Scruffy::Layers.Box.draw
|
train
|
def draw(svg, coords, options = {})
coords.each_with_index do |coord,idx|
x, y, bar_height = (coord.first), coord.last, 1#(height - coord.last)
valh = max_value + min_value * -1 #value_height
maxh = max_value * height / valh #positive area height
minh = min_value * height / valh #negative area height
#puts "height = #{height} and max_value = #{max_value} and min_value = #{min_value} and y = #{y} and point = #{points[idx]}"
#if points[idx] > 0
# bar_height = points[idx]*maxh/max_value
#else
# bar_height = points[idx]*minh/min_value
#end
#puts " y = #{y} and point = #{points[idx]}"
#svg.g(:transform => "translate(-#{relative(0.5)}, -#{relative(0.5)})") {
# svg.rect( :x => x, :y => y, :width => @bar_width + relative(1), :height => bar_height + relative(1),
# :style => "fill: black; fill-opacity: 0.15; stroke: none;" )
# svg.rect( :x => x+relative(0.5), :y => y+relative(2), :width => @bar_width + relative(1), :height => bar_height - relative(0.5),
# :style => "fill: black; fill-opacity: 0.15; stroke: none;" )
#
#}
svg.line(:x1=>x+@bar_width/2,:x2=>x+@bar_width/2,:y1=>y[0],:y2=>y[4], :style => "stroke:#{(outline.to_s || options[:theme].marker || 'white').to_s}; stroke-width:1")
svg.line(:x1=>x+@bar_width/4,:x2=>x+@bar_width/4*3,:y1=>y[0],:y2=>y[0], :style => "stroke:#{(outline.to_s || options[:theme].marker || 'white').to_s}; stroke-width:1")
svg.line(:x1=>x+@bar_width/4,:x2=>x+@bar_width/4*3,:y1=>y[4],:y2=>y[4], :style => "stroke:#{(outline.to_s || options[:theme].marker || 'white').to_s}; stroke-width:1")
svg.rect( :x => x, :y => y[1], :width => @bar_width, :height => (y[1]-y[3])*-1,
:fill => color.to_s, 'style' => "opacity: #{opacity}; stroke:#{(outline.to_s || options[:theme].marker || 'white').to_s}; stroke-width:1;" )
svg.line(:x1=>x,:x2=>x+@bar_width,:y1=>y[2],:y2=>y[2], :style => "stroke:#{(outline.to_s || options[:theme].marker || 'white').to_s}; stroke-width:1")
#svg.rect( :x => x, :y => y, :width => @bar_width, :height => bar_height,
# :fill => color.to_s, 'style' => "opacity: #{opacity}; stroke: none;" )
end
end
|
ruby
|
{
"resource": ""
}
|
q3084
|
Cog.Embeds.copy_keeps
|
train
|
def copy_keeps(original, scratch)
Cog.activate_language(:filename => original) do
original = scratch unless File.exists? original
keeps = gather_keeps original, scratch
keeps.each_pair do |hook, c|
result = update c, :type => 'keep' do |c|
c.keep_body
end
raise Errors::UnrecognizedKeepHook.new :hook => hook, :filename => original if result.nil?
end
end
end
|
ruby
|
{
"resource": ""
}
|
q3085
|
Scruffy::Layers.Multi.render
|
train
|
def render(svg, options = {})
#TODO ensure this works with new points
#current_points = points
layers.each_with_index do |layer,i|
#real_points = layer.points
#layer.points = current_points
layer_options = options.dup
layer_options[:num_bars] = layers.size
layer_options[:position] = i
layer_options[:color] = layer.preferred_color || layer.color || options[:theme].next_color
layer.render(svg, layer_options)
options.merge(layer_options)
#layer.points = real_points
#layer.points.each_with_index { |val, idx| current_points[idx] -= val }
end
end
|
ruby
|
{
"resource": ""
}
|
q3086
|
Scruffy::Layers.Bar.draw
|
train
|
def draw(svg, coords, options = {})
coords.each_with_index do |coord,idx|
next if coord.nil?
x, y, bar_height = (coord.first), coord.last, 1#(height - coord.last)
valh = max_value + min_value * -1 #value_height
maxh = max_value * height / valh #positive area height
minh = min_value * height / valh #negative area height
#puts "height = #{height} and max_value = #{max_value} and min_value = #{min_value} and y = #{y} and point = #{points[idx]}"
if points[idx] > 0
bar_height = points[idx]*maxh/max_value
else
bar_height = points[idx]*minh/min_value
end
#puts " y = #{y} and point = #{points[idx]}"
unless options[:border] == false
svg.g(:transform => "translate(-#{relative(0.5)}, -#{relative(0.5)})") {
svg.rect( :x => x, :y => y, :width => @bar_width + relative(1), :height => bar_height + relative(1),
:style => "fill: black; fill-opacity: 0.15; stroke: none;" )
svg.rect( :x => x+relative(0.5), :y => y+relative(2), :width => @bar_width + relative(1), :height => bar_height - relative(0.5),
:style => "fill: black; fill-opacity: 0.15; stroke: none;" )
}
end
current_colour = color.is_a?(Array) ? color[idx % color.size] : color
svg.rect( :x => x, :y => y, :width => @bar_width, :height => bar_height,
:fill => current_colour.to_s, 'style' => "opacity: #{opacity}; stroke: none;" )
end
end
|
ruby
|
{
"resource": ""
}
|
q3087
|
Cog.Generator.stamp
|
train
|
def stamp(template_path, destination=nil, opt={})
# Ignore destination if its a hash, its meant to be opt
opt, destination = destination, nil if destination.is_a? Hash
# Render and filter
r = find_and_render template_path, opt
r = filter_through r, opt[:filter]
return r if destination.nil?
# Place it in a file
write_scratch_file(destination, r, opt[:absolute_destination]) do |path, scratch|
updated = File.exists? path
Embeds.copy_keeps(path, scratch)
if files_are_same?(path, scratch) || (opt[:once] && updated)
FileUtils.rm scratch
else
FileUtils.mv scratch, path
STDOUT.write "#{updated ? :Updated : :Created} #{path.relative_to_project_root}\n".color(updated ? :white : :green) unless opt[:quiet]
end
end
nil
end
|
ruby
|
{
"resource": ""
}
|
q3088
|
Cog.Generator.embed
|
train
|
def embed(hook, &block)
eaten = 0 # keep track of eaten statements so that the index can be adjusted
Embeds.find(hook) do |c|
c.eaten = eaten
if Embeds.update c, &block
eaten += 1 if c.once?
STDOUT.write "Updated #{c.path.relative_to_project_root} - #{(c.index + 1).ordinalize} occurrence of embed '#{c.hook}'\n".color :white
end
end
end
|
ruby
|
{
"resource": ""
}
|
q3089
|
Elasticrawl.CombineJob.set_input_jobs
|
train
|
def set_input_jobs(input_jobs)
segment_count = 0
input_paths = []
input_jobs.each do |job_name|
input_job = Job.where(:job_name => job_name,
:type => 'Elasticrawl::ParseJob').first_or_initialize
step_count = input_job.job_steps.count
if step_count > 0
segment_count += step_count
input_paths << set_input_path(input_job)
end
end
self.job_name = set_job_name
self.job_desc = set_job_desc(segment_count)
job_steps.push(create_job_step(input_paths.join(',')))
end
|
ruby
|
{
"resource": ""
}
|
q3090
|
Elasticrawl.CombineJob.run
|
train
|
def run
emr_config = job_config['emr_config']
job_flow_id = run_job_flow(emr_config)
if job_flow_id.present?
self.job_flow_id = job_flow_id
self.save
self.result_message
end
end
|
ruby
|
{
"resource": ""
}
|
q3091
|
Elasticrawl.CombineJob.set_input_path
|
train
|
def set_input_path(input_job)
job_name = input_job.job_name
input_filter = job_config['input_filter']
s3_path = "/data/1-parse/#{job_name}/segments/*/#{input_filter}"
build_s3_uri(s3_path)
end
|
ruby
|
{
"resource": ""
}
|
q3092
|
Elasticrawl.Config.load_config
|
train
|
def load_config(config_file)
if dir_exists?
begin
config_file = File.join(config_dir, "#{config_file}.yml")
config = YAML::load(File.open(config_file))
rescue StandardError => e
raise FileAccessError, e.message
end
else
raise ConfigDirMissingError, 'Config dir missing. Run init command'
end
end
|
ruby
|
{
"resource": ""
}
|
q3093
|
Elasticrawl.Config.load_database
|
train
|
def load_database
if dir_exists?
config = {
'adapter' => 'sqlite3',
'database' => File.join(config_dir, DATABASE_FILE),
'pool' => 5,
'timeout' => 5000
}
begin
ActiveRecord::Base.establish_connection(config)
ActiveRecord::Migrator.migrate(File.join(File.dirname(__FILE__), \
'../../db/migrate'), ENV['VERSION'] ? ENV['VERSION'].to_i : nil )
rescue StandardError => e
raise DatabaseAccessError, e.message
end
else
raise ConfigDirMissingError, 'Config dir missing. Run init command'
end
end
|
ruby
|
{
"resource": ""
}
|
q3094
|
Elasticrawl.Config.bucket_exists?
|
train
|
def bucket_exists?(bucket_name)
begin
s3 = AWS::S3.new
s3.buckets[bucket_name].exists?
rescue AWS::S3::Errors::SignatureDoesNotMatch => e
raise AWSCredentialsInvalidError, 'AWS access credentials are invalid'
rescue AWS::Errors::Base => s3e
raise S3AccessError.new(s3e.http_response), s3e.message
end
end
|
ruby
|
{
"resource": ""
}
|
q3095
|
Elasticrawl.Config.create_bucket
|
train
|
def create_bucket(bucket_name)
begin
s3 = AWS::S3.new
s3.buckets.create(bucket_name)
rescue AWS::Errors::Base => s3e
raise S3AccessError.new(s3e.http_response), s3e.message
end
end
|
ruby
|
{
"resource": ""
}
|
q3096
|
Elasticrawl.Config.delete_bucket
|
train
|
def delete_bucket(bucket_name)
begin
s3 = AWS::S3.new
bucket = s3.buckets[bucket_name]
bucket.delete!
rescue AWS::Errors::Base => s3e
raise S3AccessError.new(s3e.http_response), s3e.message
end
end
|
ruby
|
{
"resource": ""
}
|
q3097
|
Elasticrawl.Config.deploy_templates
|
train
|
def deploy_templates(bucket_name)
begin
Dir.mkdir(config_dir, 0755) if dir_exists? == false
TEMPLATE_FILES.each do |template_file|
FileUtils.cp(File.join(File.dirname(__FILE__), TEMPLATES_DIR, template_file),
File.join(config_dir, template_file))
end
save_config('jobs', { 'BUCKET_NAME' => bucket_name })
save_aws_config
rescue StandardError => e
raise FileAccessError, e.message
end
end
|
ruby
|
{
"resource": ""
}
|
q3098
|
Elasticrawl.Config.save_aws_config
|
train
|
def save_aws_config
env_key = ENV['AWS_ACCESS_KEY_ID']
env_secret = ENV['AWS_SECRET_ACCESS_KEY']
creds = {}
creds['ACCESS_KEY_ID'] = @access_key_id unless @access_key_id == env_key
creds['SECRET_ACCESS_KEY'] = @secret_access_key \
unless @secret_access_key == env_secret
save_config('aws', creds)
end
|
ruby
|
{
"resource": ""
}
|
q3099
|
Elasticrawl.Config.save_config
|
train
|
def save_config(template, params)
config_file = File.join(config_dir, "#{template}.yml")
config = File.read(config_file)
params.map { |key, value| config = config.gsub(key, value) }
File.open(config_file, 'w') { |file| file.write(config) }
end
|
ruby
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.