_id
stringlengths 2
6
| title
stringlengths 9
130
| partition
stringclasses 3
values | text
stringlengths 30
4.3k
| language
stringclasses 1
value | meta_information
dict |
---|---|---|---|---|---|
q3000
|
ActsAsRevisionable.ClassMethods.restore_revision
|
train
|
def restore_revision(id, revision_number)
revision_record = revision(id, revision_number)
|
ruby
|
{
"resource": ""
}
|
q3001
|
ActsAsRevisionable.ClassMethods.restore_revision!
|
train
|
def restore_revision!(id, revision_number)
record = restore_revision(id, revision_number)
if record
record.store_revision do
|
ruby
|
{
"resource": ""
}
|
q3002
|
ActsAsRevisionable.ClassMethods.restore_last_revision!
|
train
|
def restore_last_revision!(id)
record = restore_last_revision(id)
if record
record.store_revision do
|
ruby
|
{
"resource": ""
}
|
q3003
|
ActsAsRevisionable.ClassMethods.revisionable_associations
|
train
|
def revisionable_associations(options = acts_as_revisionable_options[:associations])
return nil unless options
options = [options] unless options.kind_of?(Array)
associations = {}
options.each do |association|
if association.kind_of?(Symbol)
associations[association] = true
elsif association.kind_of?(Hash)
|
ruby
|
{
"resource": ""
}
|
q3004
|
ActsAsRevisionable.InstanceMethods.store_revision
|
train
|
def store_revision
if new_record? || @revisions_disabled
return yield
else
retval = nil
revision = nil
begin
revision_record_class.transaction do
begin
read_only = self.class.first(:conditions => {self.class.primary_key => self.id}, :readonly => true)
if read_only
revision = read_only.create_revision!
truncate_revisions!
end
rescue => e
logger.warn(e) if logger
end
disable_revisioning do
retval = yield
end
raise ActiveRecord::Rollback unless errors.empty?
|
ruby
|
{
"resource": ""
}
|
q3005
|
ActsAsRevisionable.InstanceMethods.create_revision!
|
train
|
def create_revision!
revision_options = self.class.acts_as_revisionable_options
revision = revision_record_class.new(self, revision_options[:encoding])
if revision_options[:meta].is_a?(Hash)
revision_options[:meta].each do |attribute, value|
set_revision_meta_attribute(revision, attribute, value)
end
elsif revision_options[:meta].is_a?(Array)
revision_options[:meta].each do |attribute|
|
ruby
|
{
"resource": ""
}
|
q3006
|
ActsAsRevisionable.InstanceMethods.set_revision_meta_attribute
|
train
|
def set_revision_meta_attribute(revision, attribute, value)
case value
when Symbol
value = self.send(value)
when Proc
|
ruby
|
{
"resource": ""
}
|
q3007
|
ShipCompliant.BaseResult.errors
|
train
|
def errors
return [] if success?
@errors ||= Array.wrap(response[:errors]).map do |error|
|
ruby
|
{
"resource": ""
}
|
q3008
|
Scruffy.Graph.render
|
train
|
def render(options = {})
options[:theme] ||= theme
options[:value_formatter] ||= value_formatter
options[:key_formatter] ||= key_formatter
options[:point_markers] ||= point_markers
options[:point_markers_rotation] ||= point_markers_rotation
options[:point_markers_ticks] ||= point_markers_ticks
options[:size] ||= (options[:width] ? [options[:width], (options.delete(:width) * 0.6).to_i] : [600, 360])
options[:title] ||= title
options[:x_legend] ||= x_legend
options[:y_legend] ||= y_legend
options[:layers] ||= layers
options[:min_value] ||= bottom_value(options[:padding] ? options[:padding] : nil)
options[:max_value] ||= top_value(options[:padding] ? options[:padding] : nil)
options[:min_key] ||= bottom_key
options[:max_key] ||= top_key
options[:graph] ||= self
# Removed for now.
# Added for making smaller fonts more legible, but may not be needed after all.
#
|
ruby
|
{
"resource": ""
}
|
q3009
|
Scruffy::Layers.Stacked.render
|
train
|
def render(svg, options = {})
#TODO ensure this works with new points
current_points = points
layers.each do |layer|
real_points = layer.points
layer.points = current_points
layer_options = options.dup
layer_options[:color] = layer.preferred_color || layer.color || options[:theme].next_color
layer.render(svg,
|
ruby
|
{
"resource": ""
}
|
q3010
|
Scruffy::Layers.Stacked.legend_data
|
train
|
def legend_data
if relevant_data?
retval = []
layers.each do |layer|
retval << layer.legend_data
|
ruby
|
{
"resource": ""
}
|
q3011
|
Scruffy::Renderers.Base.render
|
train
|
def render(options = {})
options[:graph_id] ||= 'scruffy_graph'
options[:complexity] ||= (global_complexity || :normal)
# Allow subclasses to muck with components prior to renders.
rendertime_renderer = self.clone
rendertime_renderer.instance_eval { before_render if respond_to?(:before_render) }
svg = Builder::XmlMarkup.new(:indent => 2)
unless options[:no_doctype_header]
svg.instruct!
svg.declare! :DOCTYPE, :svg, :PUBLIC, "-//W3C//DTD SVG 1.0//EN", "http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/svg10.dtd"
end
svg.svg(:xmlns => "http://www.w3.org/2000/svg", 'xmlns:xlink' => "http://www.w3.org/1999/xlink", :width
|
ruby
|
{
"resource": ""
}
|
q3012
|
Elasticrawl.ParseJob.set_segments
|
train
|
def set_segments(crawl_segments, max_files = nil)
self.job_name = set_job_name
self.job_desc = set_job_desc(crawl_segments, max_files)
|
ruby
|
{
"resource": ""
}
|
q3013
|
Elasticrawl.ParseJob.run
|
train
|
def run
emr_config = job_config['emr_config']
job_flow_id = run_job_flow(emr_config)
if job_flow_id.present?
self.job_flow_id = job_flow_id
|
ruby
|
{
"resource": ""
}
|
q3014
|
Elasticrawl.ParseJob.segment_list
|
train
|
def segment_list
segments = ['Segments']
job_steps.each do |job_step|
if job_step.crawl_segment.present?
segment = job_step.crawl_segment
|
ruby
|
{
"resource": ""
}
|
q3015
|
Elasticrawl.ParseJob.set_job_desc
|
train
|
def set_job_desc(segments, max_files)
if segments.count > 0
crawl_name = segments[0].crawl.crawl_name if segments[0].crawl.present?
file_desc = max_files.nil? ? 'all files' : "#{max_files} files per
|
ruby
|
{
"resource": ""
}
|
q3016
|
PartialDate.Date.old_to_s
|
train
|
def old_to_s(format = :default)
format = FORMATS[format] if format.is_a?(Symbol)
result = format.dup
FORMAT_METHODS.each_pair do |key, value|
result.gsub!( key, value.call( self )) if result.include? key
end
# Remove any leading "/-," chars.
# Remove double white spaces.
# Remove any duplicate "/-," chars and replace with the single char.
# Remove any
|
ruby
|
{
"resource": ""
}
|
q3017
|
Scruffy::Layers.Line.draw
|
train
|
def draw(svg, coords, options={})
# Include options provided when the object was created
options.merge!(@options)
stroke_width = (options[:relativestroke]) ? relative(options[:stroke_width]) : options[:stroke_width]
style = (options[:style]) ? options[:style] : ''
if options[:shadow]
svg.g(:class => 'shadow', :transform => "translate(#{relative(0.5)}, #{relative(0.5)})") {
svg.polyline( :points => stringify_coords(coords).join(' '), :fill => 'transparent',
:stroke => 'black', 'stroke-width' => stroke_width,
:style => 'fill-opacity: 0; stroke-opacity: 0.35' )
if options[:dots]
coords.each { |coord| svg.circle( :cx => coord.first, :cy => coord.last + relative(0.9), :r => stroke_width,
:style => "stroke-width: #{stroke_width}; stroke: black; opacity: 0.35;" ) }
|
ruby
|
{
"resource": ""
}
|
q3018
|
Elasticrawl.Crawl.status
|
train
|
def status
total = self.crawl_segments.count
remaining = CrawlSegment.where(:crawl_id => self.id,
:parse_time => nil).count
parsed = total - remaining
status = self.crawl_name
|
ruby
|
{
"resource": ""
}
|
q3019
|
Elasticrawl.Crawl.create_segments
|
train
|
def create_segments
file_paths = warc_paths(self.crawl_name)
segments = parse_segments(file_paths)
save if segments.count > 0
segments.keys.each do |segment_name|
|
ruby
|
{
"resource": ""
}
|
q3020
|
Elasticrawl.Crawl.reset
|
train
|
def reset
segments = CrawlSegment.where('crawl_id = ? and parse_time is not null',
self.id)
|
ruby
|
{
"resource": ""
}
|
q3021
|
Elasticrawl.Crawl.warc_paths
|
train
|
def warc_paths(crawl_name)
s3_path = [Elasticrawl::COMMON_CRAWL_PATH,
crawl_name,
Elasticrawl::WARC_PATHS].join('/')
begin
s3 = AWS::S3.new
bucket = s3.buckets[Elasticrawl::COMMON_CRAWL_BUCKET]
object = bucket.objects[s3_path]
uncompress_file(object)
rescue AWS::Errors::Base => s3e
|
ruby
|
{
"resource": ""
}
|
q3022
|
Elasticrawl.Crawl.uncompress_file
|
train
|
def uncompress_file(s3_object)
result = ''
if s3_object.exists?
io = StringIO.new
io.write(s3_object.read)
io.rewind
|
ruby
|
{
"resource": ""
}
|
q3023
|
Elasticrawl.Crawl.parse_segments
|
train
|
def parse_segments(warc_paths)
segments = Hash.new 0
warc_paths.split.each do |warc_path|
segment_name = warc_path.split('/')[3]
|
ruby
|
{
"resource": ""
}
|
q3024
|
Scruffy::Layers.MultiArea.draw
|
train
|
def draw(svg, coords, options={})
# Check whether to use color from theme, or whether to use user defined colors from the area_colors array
color_count = nil
if @area_colors && @area_colors.size > 0
area_color = @area_colors[0]
color_count = 1
else
puts "Never Set Area Color"
area_color = color
end
# Draw Bottom Level Polygons (Original Coords)
draw_poly(svg, coords, area_color, options = {})
# Draw Lower Area Polygons
if @baselines
# Get the Color of this Area
puts "Drawing Baselines"
@baselines.sort! {|x,y| y <=> x }
@baselines.each do
|
ruby
|
{
"resource": ""
}
|
q3025
|
Sparkr.Sparkline.normalize_numbers
|
train
|
def normalize_numbers(_numbers)
numbers = _numbers.map(&:to_i)
|
ruby
|
{
"resource": ""
}
|
q3026
|
OpenTox.Nanoparticle.parse_ambit_value
|
train
|
def parse_ambit_value feature, v, dataset
# TODO add study id to warnings
v.delete "unit"
# TODO: ppm instead of weights
if v.keys == ["textValue"]
add_feature feature, v["textValue"], dataset
elsif v.keys == ["loValue"]
add_feature feature, v["loValue"], dataset
elsif v.keys.size == 2 and v["errorValue"]
add_feature feature, v["loValue"], dataset
#warn "Ignoring errorValue '#{v["errorValue"]}' for '#{feature.name}'."
elsif v.keys.size == 2 and v["loQualifier"] == "mean"
add_feature feature, v["loValue"], dataset
#warn "'#{feature.name}' is a mean value. Original data is not available."
elsif v.keys.size == 2 and v["loQualifier"] #== ">="
#warn "Only min value available for '#{feature.name}', entry ignored"
elsif v.keys.size == 2 and v["upQualifier"] #== ">="
#warn "Only max value available for '#{feature.name}', entry ignored"
|
ruby
|
{
"resource": ""
}
|
q3027
|
OpenTox.Dataset.substances
|
train
|
def substances
@substances ||= data_entries.keys.collect{|id|
|
ruby
|
{
"resource": ""
}
|
q3028
|
OpenTox.Dataset.features
|
train
|
def features
@features ||= data_entries.collect{|sid,data|
|
ruby
|
{
"resource": ""
}
|
q3029
|
OpenTox.Dataset.values
|
train
|
def values substance,feature
substance = substance.id if substance.is_a? Substance
feature = feature.id if feature.is_a? Feature
if data_entries[substance.to_s]
|
ruby
|
{
"resource": ""
}
|
q3030
|
OpenTox.Dataset.add
|
train
|
def add(substance,feature,value)
substance = substance.id if substance.is_a? Substance
feature = feature.id if feature.is_a? Feature
data_entries[substance.to_s] ||= {}
data_entries[substance.to_s][feature.to_s] ||= []
data_entries[substance.to_s][feature.to_s]
|
ruby
|
{
"resource": ""
}
|
q3031
|
OpenTox.Dataset.folds
|
train
|
def folds n
len = self.substances.size
indices = (0..len-1).to_a.shuffle
mid = (len/n)
chunks = []
start = 0
1.upto(n) do |i|
last = start+mid
last = last-1 unless len%n >= i
test_idxs = indices[start..last] || []
test_substances = test_idxs.collect{|i| substances[i]}
training_idxs = indices-test_idxs
training_substances = training_idxs.collect{|i| substances[i]}
chunk = [training_substances,test_substances].collect do |substances|
dataset = self.class.create(:name => "#{self.name} (Fold #{i-1})",:source => self.id )
|
ruby
|
{
"resource": ""
}
|
q3032
|
OpenTox.Dataset.to_csv
|
train
|
def to_csv(inchi=false)
CSV.generate() do |csv|
compound = substances.first.is_a? Compound
if compound
csv << [inchi ? "InChI" : "SMILES"] + features.collect{|f| f.name}
else
csv << ["Name"] + features.collect{|f| f.name}
end
substances.each do |substance|
if compound
name = (inchi ? substance.inchi : substance.smiles)
else
name =
|
ruby
|
{
"resource": ""
}
|
q3033
|
Rango.LoggerMixin.inspect
|
train
|
def inspect(*args)
if args.first.is_a?(Hash) && args.length.eql?(1)
args.first.each do |name, value|
|
ruby
|
{
"resource": ""
}
|
q3034
|
GraphQL::Relay::Walker.Queue.add
|
train
|
def add(frame)
return false if max_size && queue.length >= max_size
return false if seen.include?(frame.gid)
seen.add(frame.gid)
idx = random_idx ?
|
ruby
|
{
"resource": ""
}
|
q3035
|
GraphQL::Relay::Walker.Queue.add_gid
|
train
|
def add_gid(gid, parent = nil)
frame
|
ruby
|
{
"resource": ""
}
|
q3036
|
ShipCompliant.GetInventoryDetailsResult.location
|
train
|
def location(key)
location = locations.select { |l| l[:fulfillment_location] == key }.first
|
ruby
|
{
"resource": ""
}
|
q3037
|
Cog.Config.prepare
|
train
|
def prepare(opt={})
throw :ConfigInstanceAlreadyPrepared if @prepared && !opt[:force_reset]
@prepared = true
@fullpaths = opt[:fullpaths]
@project_path = nil
@project_generator_path = nil
@project_plugin_path = nil
@project_template_path = nil
@generator_path = []
@plugin_path = []
@template_path = []
@plugins
|
ruby
|
{
"resource": ""
}
|
q3038
|
ShipCompliant.CheckComplianceResult.taxes_for_shipment
|
train
|
def taxes_for_shipment(shipment_key)
shipment = shipment_sales_tax_rates.select { |s| s[:@shipment_key] == shipment_key }.first
# convert attribute keys to symbols
freight = attributes_to_symbols(shipment[:freight_sales_tax_rate])
# wrap products in ProductSalesTaxRate
|
ruby
|
{
"resource": ""
}
|
q3039
|
ShipCompliant.CheckComplianceResult.compliance_rules_for_shipment
|
train
|
def compliance_rules_for_shipment(shipment_key)
shipment = shipment_compliance_rules.select { |s| s[:key] ==
|
ruby
|
{
"resource": ""
}
|
q3040
|
PoiseProfiler.Config.gather_from_env
|
train
|
def gather_from_env
ENV.each do |key, value|
if key.downcase =~ /^poise(_|-)profiler_(.+)$/
|
ruby
|
{
"resource": ""
}
|
q3041
|
PoiseProfiler.Config.gather_from_node
|
train
|
def gather_from_node
return unless defined?(Chef.node)
(Chef.node['poise-profiler'] || {}).each do
|
ruby
|
{
"resource": ""
}
|
q3042
|
ShipCompliant.InventoryProduct.inventory_levels
|
train
|
def inventory_levels
levels = {}
product[:inventory_levels][:inventory_level].each do |level|
key = level[:inventory_type].underscore.to_sym
|
ruby
|
{
"resource": ""
}
|
q3043
|
Elasticrawl.Cluster.create_job_flow
|
train
|
def create_job_flow(job, emr_config = nil)
config = Config.new
Elasticity.configure do |c|
c.access_key = config.access_key_id
c.secret_key = config.secret_access_key
end
job_flow = Elasticity::JobFlow.new
job_flow.name = "Job: #{job.job_name} #{job.job_desc}"
job_flow.log_uri = job.log_uri
|
ruby
|
{
"resource": ""
}
|
q3044
|
Elasticrawl.Cluster.configure_job_flow
|
train
|
def configure_job_flow(job_flow)
ec2_key_name = config_setting('ec2_key_name')
placement = config_setting('placement')
emr_ami_version = config_setting('emr_ami_version')
job_flow_role = config_setting('job_flow_role')
service_role = config_setting('service_role')
ec2_subnet_id = config_setting('ec2_subnet_id')
job_flow.ec2_subnet_id = ec2_subnet_id if ec2_subnet_id.present?
job_flow.ec2_key_name = ec2_key_name if ec2_key_name.present?
|
ruby
|
{
"resource": ""
}
|
q3045
|
Elasticrawl.Cluster.configure_bootstrap_actions
|
train
|
def configure_bootstrap_actions(job_flow, emr_config = nil)
bootstrap_scripts = config_setting('bootstrap_scripts')
if bootstrap_scripts.present?
bootstrap_scripts.each do |script_uri|
action = Elasticity::BootstrapAction.new(script_uri, '', '')
job_flow.add_bootstrap_action(action)
end
|
ruby
|
{
"resource": ""
}
|
q3046
|
Scruffy::Components.Legend.relevant_legend_info
|
train
|
def relevant_legend_info(layers, categories=(@options[:category] ? [@options[:category]] : @options[:categories]))
legend_info = layers.inject([]) do |arr, layer|
if categories.nil? ||
(categories.include?(layer.options[:category]) ||
(layer.options[:categories] && (categories & layer.options[:categories]).size > 0) )
|
ruby
|
{
"resource": ""
}
|
q3047
|
ShipCompliant.OrderSearch.to_h
|
train
|
def to_h
details.reject do |key|
!KEYS.include?(key)
|
ruby
|
{
"resource": ""
}
|
q3048
|
Scruffy::Renderers.Cubed3d.graph_block
|
train
|
def graph_block(graph_filter)
block = Proc.new { |components|
components << Scruffy::Components::Grid.new(:grid, :position => [10, 0], :size => [90, 89])
components << Scruffy::Components::ValueMarkers.new(:value_markers, :position => [0, 2], :size => [8, 89])
|
ruby
|
{
"resource": ""
}
|
q3049
|
Elasticrawl.JobStep.job_flow_step
|
train
|
def job_flow_step(job_config)
jar = job_config['jar']
max_files = self.job.max_files
step_args = []
step_args[0] = job_config['class']
step_args[1] = self.input_paths
|
ruby
|
{
"resource": ""
}
|
q3050
|
Elasticrawl.JobStep.set_step_name
|
train
|
def set_step_name
case self.job.type
when 'Elasticrawl::ParseJob'
if self.crawl_segment.present?
max_files = self.job.max_files || 'all'
"#{self.crawl_segment.segment_desc} Parsing: #{max_files}"
end
|
ruby
|
{
"resource": ""
}
|
q3051
|
GraphQL::Relay::Walker.Frame.found_gids
|
train
|
def found_gids(data = result)
[].tap do |ids|
case data
when Hash
ids.concat(Array(data['id']))
ids.concat(found_gids(data.values))
|
ruby
|
{
"resource": ""
}
|
q3052
|
Rubydoop.JobDefinition.secondary_sort
|
train
|
def secondary_sort(start_index, end_index)
@job.set_partitioner_class(Hadoop::Mapreduce::Lib::Partition::BinaryPartitioner)
Hadoop::Mapreduce::Lib::Partition::BinaryPartitioner.set_offsets(@job.configuration,
|
ruby
|
{
"resource": ""
}
|
q3053
|
Scruffy::Layers.Base.render
|
train
|
def render(svg, options)
setup_variables(options)
|
ruby
|
{
"resource": ""
}
|
q3054
|
Scruffy::Layers.Base.setup_variables
|
train
|
def setup_variables(options = {})
@color = (preferred_color || options.delete(:color))
@outline = (preferred_outline || options.delete(:outline))
@width, @height = options.delete(:size)
|
ruby
|
{
"resource": ""
}
|
q3055
|
ShipCompliant.ProductAttributes.to_h
|
train
|
def to_h
details.deep_transform_keys do |key|
# handle special cases
pascal_key = key.to_s.camelize
if SPECIAL_CASES.has_key?(key)
|
ruby
|
{
"resource": ""
}
|
q3056
|
TinyRails.Actions.gem
|
train
|
def gem(*args)
options = extract_options!(args)
name, version = args
# Set the message to be shown in logs. Uses the git repo if one is given,
# otherwise use name (version).
parts, message = [ name.inspect ], name
if version ||= options.delete(:version)
|
ruby
|
{
"resource": ""
}
|
q3057
|
TinyRails.Actions.application
|
train
|
def application(data=nil, &block)
data = block.call if !data && block_given?
data = "\n#{data}" unless data =~ /^\n/
data << "\n" unless data =~ /\n$/
|
ruby
|
{
"resource": ""
}
|
q3058
|
MongoMysqlRelations.ClassMethods.to_mysql_belongs_to
|
train
|
def to_mysql_belongs_to(name, options = {})
field "#{name}_id", type: Integer
object_class = options[:class] || name.to_s.titleize.delete(' ').constantize
self.instance_eval do
define_method(name) do |reload = false|
if reload
self.instance_variable_set("@#{name}", nil)
|
ruby
|
{
"resource": ""
}
|
q3059
|
GraphQL::Relay::Walker.QueryBuilder.build_query
|
train
|
def build_query
GraphQL.parse(BASE_QUERY).tap do |d_ast|
selections = d_ast.definitions.first.selections.first.selections
node_types.each do |type|
|
ruby
|
{
"resource": ""
}
|
q3060
|
GraphQL::Relay::Walker.QueryBuilder.inline_fragment_ast
|
train
|
def inline_fragment_ast(type, with_children: true)
selections = []
if with_children
type.all_fields.each do |field|
field_type = field.type.unwrap
if node_field?(field) && include?(field_type)
selections << node_field_ast(field)
elsif connection_field?(field) && include?(field_type)
|
ruby
|
{
"resource": ""
}
|
q3061
|
GraphQL::Relay::Walker.QueryBuilder.field_ast
|
train
|
def field_ast(field, arguments = {}, &blk)
type = field.type.unwrap
# Bail unless we have the required arguments.
required_args_are_present = field.arguments.all? do |arg_name, arg|
arguments.key?(arg_name) || valid_input?(arg.type, nil)
end
if !required_args_are_present
nil
else
f_alias = field.name == 'id' ? nil : random_alias
f_args = arguments.map do |name, value|
|
ruby
|
{
"resource": ""
}
|
q3062
|
GraphQL::Relay::Walker.QueryBuilder.node_field_ast
|
train
|
def node_field_ast(field)
f_ast = field_ast(field)
return nil if f_ast.nil?
type = field.type.unwrap
selections = f_ast.selections.dup
if type.kind.object?
selections << field_ast(type.get_field('id'))
else
possible_node_types(type).each do |if_type|
selections << inline_fragment_ast(if_type, with_children: false)
|
ruby
|
{
"resource": ""
}
|
q3063
|
GraphQL::Relay::Walker.QueryBuilder.edges_field_ast
|
train
|
def edges_field_ast(field)
f_ast = field_ast(field)
return nil if f_ast.nil?
node_fields = [node_field_ast(field.type.unwrap.get_field('node'))]
if f_ast.respond_to?(:merge) # GraphQL-Ruby 1.9+
|
ruby
|
{
"resource": ""
}
|
q3064
|
GraphQL::Relay::Walker.QueryBuilder.connection_field_ast
|
train
|
def connection_field_ast(field)
f_ast = field_ast(field, connection_arguments)
return nil if f_ast.nil?
edges_fields = [edges_field_ast(field.type.unwrap.get_field('edges'))]
if f_ast.respond_to?(:merge) # GraphQL-Ruby 1.9+
|
ruby
|
{
"resource": ""
}
|
q3065
|
GraphQL::Relay::Walker.QueryBuilder.node_field?
|
train
|
def node_field?(field)
type = field.type.unwrap
kind = type.kind
if kind.object?
|
ruby
|
{
"resource": ""
}
|
q3066
|
GraphQL::Relay::Walker.QueryBuilder.connection_field?
|
train
|
def connection_field?(field)
type = field.type.unwrap
if edges_field = type.get_field('edges')
edges = edges_field.type.unwrap
if node_field =
|
ruby
|
{
"resource": ""
}
|
q3067
|
GraphQL::Relay::Walker.QueryBuilder.possible_types
|
train
|
def possible_types(type)
if type.kind.interface?
schema.possible_types(type)
|
ruby
|
{
"resource": ""
}
|
q3068
|
ShipCompliant.ShipmentCompliance.rules
|
train
|
def rules
return [] if result[:rules].nil?
Array.wrap(result[:rules][:rule_compliance_response]).map
|
ruby
|
{
"resource": ""
}
|
q3069
|
Ccavenue.Payment.request
|
train
|
def request(order_Id,amount,billing_cust_name,billing_cust_address,billing_cust_city,billing_zip_code,billing_cust_state,billing_cust_country,billing_cust_email,billing_cust_tel,billing_cust_notes="",delivery_cust_name="",delivery_cust_address="",delivery_cust_city="",delivery_zip_code="",delivery_cust_state="",delivery_cust_country="",delivery_cust_email="",delivery_cust_tel="",delivery_cust_notes="")
checksum = getChecksum(order_Id,amount)
raw_request = "Merchant_Id=#{@merchant_Id}&Amount=#{amount}&Order_Id=#{order_Id}&Redirect_Url=#{@redirect_Url}&billing_cust_name=#{billing_cust_name}&billing_cust_address=#{billing_cust_address}&billing_cust_country=#{billing_cust_country}&billing_cust_state=#{billing_cust_state}&billing_cust_city=#{billing_cust_city}&billing_zip_code=#{billing_zip_code}&billing_cust_tel=#{billing_c
|
ruby
|
{
"resource": ""
}
|
q3070
|
Ccavenue.Payment.response
|
train
|
def response(response)
raw_response = CGI::parse(decrypt_data(response,@working_Key,"AES-128-CBC"))
auth_desc = raw_response["AuthDesc"][0]
order_id = raw_response["Order_Id"][0]
amount = raw_response["Amount"][0]
checksum = raw_response["Checksum"][0]
|
ruby
|
{
"resource": ""
}
|
q3071
|
Ccavenue.Payment.verifyChecksum
|
train
|
def verifyChecksum( order_Id, amount, authDesc, checksum)
String str = @merchant_Id+"|"+order_Id+"|"+amount+"|"+authDesc+"|"+@working_Key
String
|
ruby
|
{
"resource": ""
}
|
q3072
|
Ccavenue.Payment.hextobin
|
train
|
def hextobin(hexstring)
length = hexstring.length
binString = ""
count = 0
while count < length do
substring = hexstring[count,2]
|
ruby
|
{
"resource": ""
}
|
q3073
|
GraphQL::Relay::Walker.ClientExt.walk
|
train
|
def walk(from_id:, except: nil, only: nil, variables: {}, context: {})
query_string = GraphQL::Relay::Walker.query_string(schema, except: except, only: only)
walker_query = parse(query_string)
GraphQL::Relay::Walker.walk(from_id: from_id) do |frame|
|
ruby
|
{
"resource": ""
}
|
q3074
|
Anyplayer.Selector.player
|
train
|
def player
PLAYERS.each do |player|
player_load(player) || next
instance = player_class(player).new
player_on_platform?(instance) || next
|
ruby
|
{
"resource": ""
}
|
q3075
|
Scruffy::Helpers.Canvas.bounds_for
|
train
|
def bounds_for(canvas_size, position, size)
return nil if (position.nil? || size.nil?)
bounds = {}
bounds[:x] = canvas_size.first * (position.first / 100.to_f)
bounds[:y] = canvas_size.last * (position.last / 100.to_f)
|
ruby
|
{
"resource": ""
}
|
q3076
|
Scruffy::Formatters.Base.route_format
|
train
|
def route_format(target, idx, options = {})
args = [target, idx, options]
if respond_to?(:format)
send :format, *args[0...self.method(:format).arity]
|
ruby
|
{
"resource": ""
}
|
q3077
|
Scruffy::Formatters.Number.format
|
train
|
def format(target, idx, options)
my_precision = @precision
if @precision == :auto
my_precision = options[:all_values].inject(0) do |highest, current|
cur = current.to_f.to_s.split(".").last.size
cur > highest ? cur : highest
end
my_precision = @precision_limit if my_precision > @precision_limit
elsif @precision == :none
my_precision = 0
end
my_separator = @separator
my_separator = "" unless my_precision > 0
begin
number = ""
|
ruby
|
{
"resource": ""
}
|
q3078
|
Scruffy::Formatters.Currency.format
|
train
|
def format(target, idx, options)
@separator = "" unless @precision > 0
begin
parts = number_with_precision(target, @precision).split('.')
if @special_negatives && (target.to_f < 0)
number = "(" + @unit + parts[0].to_i.abs.to_s.gsub(/(\d)(?=(\d\d\d)+(?!\d))/, "\\1#{@delimiter}") + @separator + parts[1].to_s
|
ruby
|
{
"resource": ""
}
|
q3079
|
Scruffy::Formatters.Percentage.format
|
train
|
def format(target)
begin
number = number_with_precision(target, @precision)
parts = number.split('.')
if parts.at(1).nil?
parts[0] + "%"
else
|
ruby
|
{
"resource": ""
}
|
q3080
|
OpenTox.PhysChem.openbabel
|
train
|
def openbabel descriptor, compound
obdescriptor = OpenBabel::OBDescriptor.find_type descriptor
obmol = OpenBabel::OBMol.new
obconversion = OpenBabel::OBConversion.new
obconversion.set_in_format 'smi'
obconversion.read_string obmol,
|
ruby
|
{
"resource": ""
}
|
q3081
|
Tsuga::Model.Tile.neighbours
|
train
|
def neighbours
offsets = (-1..1).to_a.product((-1..1).to_a)
offsets.map do |lat, lng|
begin
neighbour(lat:lat, lng:lng)
|
ruby
|
{
"resource": ""
}
|
q3082
|
Cog.Seed.stamp_class
|
train
|
def stamp_class(path, opt={})
Cog.activate_language opt[:language] do
l = Cog.active_language
raise Errors::ActiveLanguageDoesNotSupportSeeds.new :language => l if l.nil? || l.seed_extension.nil?
@in_header = false
@header_path = if l.seed_header
"#{path}.#{l.seed_header}"
|
ruby
|
{
"resource": ""
}
|
q3083
|
Scruffy::Layers.Box.draw
|
train
|
def draw(svg, coords, options = {})
coords.each_with_index do |coord,idx|
x, y, bar_height = (coord.first), coord.last, 1#(height - coord.last)
valh = max_value + min_value * -1 #value_height
maxh = max_value * height / valh #positive area height
minh = min_value * height / valh #negative area height
#puts "height = #{height} and max_value = #{max_value} and min_value = #{min_value} and y = #{y} and point = #{points[idx]}"
#if points[idx] > 0
# bar_height = points[idx]*maxh/max_value
#else
# bar_height = points[idx]*minh/min_value
#end
#puts " y = #{y} and point = #{points[idx]}"
#svg.g(:transform => "translate(-#{relative(0.5)}, -#{relative(0.5)})") {
# svg.rect( :x => x, :y => y, :width => @bar_width + relative(1), :height => bar_height + relative(1),
# :style => "fill: black; fill-opacity: 0.15; stroke: none;" )
|
ruby
|
{
"resource": ""
}
|
q3084
|
Cog.Embeds.copy_keeps
|
train
|
def copy_keeps(original, scratch)
Cog.activate_language(:filename => original) do
original = scratch unless File.exists? original
keeps = gather_keeps original, scratch
keeps.each_pair do |hook, c|
result = update c, :type => 'keep' do |c|
|
ruby
|
{
"resource": ""
}
|
q3085
|
Scruffy::Layers.Multi.render
|
train
|
def render(svg, options = {})
#TODO ensure this works with new points
#current_points = points
layers.each_with_index do |layer,i|
#real_points = layer.points
#layer.points = current_points
layer_options = options.dup
layer_options[:num_bars] = layers.size
layer_options[:position] = i
layer_options[:color] = layer.preferred_color || layer.color || options[:theme].next_color
|
ruby
|
{
"resource": ""
}
|
q3086
|
Scruffy::Layers.Bar.draw
|
train
|
def draw(svg, coords, options = {})
coords.each_with_index do |coord,idx|
next if coord.nil?
x, y, bar_height = (coord.first), coord.last, 1#(height - coord.last)
valh = max_value + min_value * -1 #value_height
maxh = max_value * height / valh #positive area height
minh = min_value * height / valh #negative area height
#puts "height = #{height} and max_value = #{max_value} and min_value = #{min_value} and y = #{y} and point = #{points[idx]}"
if points[idx] > 0
bar_height = points[idx]*maxh/max_value
else
bar_height = points[idx]*minh/min_value
|
ruby
|
{
"resource": ""
}
|
q3087
|
Cog.Generator.stamp
|
train
|
def stamp(template_path, destination=nil, opt={})
# Ignore destination if its a hash, its meant to be opt
opt, destination = destination, nil if destination.is_a? Hash
# Render and filter
r = find_and_render template_path, opt
r = filter_through r, opt[:filter]
return r if destination.nil?
# Place it in a file
write_scratch_file(destination, r, opt[:absolute_destination]) do |path, scratch|
updated = File.exists? path
|
ruby
|
{
"resource": ""
}
|
q3088
|
Cog.Generator.embed
|
train
|
def embed(hook, &block)
eaten = 0 # keep track of eaten statements so that the index can be adjusted
Embeds.find(hook) do |c|
c.eaten = eaten
if Embeds.update c, &block
eaten += 1 if c.once?
|
ruby
|
{
"resource": ""
}
|
q3089
|
Elasticrawl.CombineJob.set_input_jobs
|
train
|
def set_input_jobs(input_jobs)
segment_count = 0
input_paths = []
input_jobs.each do |job_name|
input_job = Job.where(:job_name => job_name,
:type => 'Elasticrawl::ParseJob').first_or_initialize
step_count = input_job.job_steps.count
if step_count > 0
segment_count +=
|
ruby
|
{
"resource": ""
}
|
q3090
|
Elasticrawl.CombineJob.run
|
train
|
def run
emr_config = job_config['emr_config']
job_flow_id = run_job_flow(emr_config)
if job_flow_id.present?
self.job_flow_id
|
ruby
|
{
"resource": ""
}
|
q3091
|
Elasticrawl.CombineJob.set_input_path
|
train
|
def set_input_path(input_job)
job_name = input_job.job_name
input_filter = job_config['input_filter']
s3_path =
|
ruby
|
{
"resource": ""
}
|
q3092
|
Elasticrawl.Config.load_config
|
train
|
def load_config(config_file)
if dir_exists?
begin
config_file = File.join(config_dir, "#{config_file}.yml")
config = YAML::load(File.open(config_file))
rescue StandardError => e
|
ruby
|
{
"resource": ""
}
|
q3093
|
Elasticrawl.Config.load_database
|
train
|
def load_database
if dir_exists?
config = {
'adapter' => 'sqlite3',
'database' => File.join(config_dir, DATABASE_FILE),
'pool' => 5,
'timeout' => 5000
}
begin
ActiveRecord::Base.establish_connection(config)
ActiveRecord::Migrator.migrate(File.join(File.dirname(__FILE__), \
'../../db/migrate'), ENV['VERSION'] ? ENV['VERSION'].to_i : nil )
|
ruby
|
{
"resource": ""
}
|
q3094
|
Elasticrawl.Config.bucket_exists?
|
train
|
def bucket_exists?(bucket_name)
begin
s3 = AWS::S3.new
s3.buckets[bucket_name].exists?
rescue AWS::S3::Errors::SignatureDoesNotMatch => e
raise AWSCredentialsInvalidError, 'AWS access credentials are invalid'
|
ruby
|
{
"resource": ""
}
|
q3095
|
Elasticrawl.Config.create_bucket
|
train
|
def create_bucket(bucket_name)
begin
s3 = AWS::S3.new
s3.buckets.create(bucket_name)
rescue AWS::Errors::Base => s3e
|
ruby
|
{
"resource": ""
}
|
q3096
|
Elasticrawl.Config.delete_bucket
|
train
|
def delete_bucket(bucket_name)
begin
s3 = AWS::S3.new
bucket = s3.buckets[bucket_name]
bucket.delete!
rescue AWS::Errors::Base
|
ruby
|
{
"resource": ""
}
|
q3097
|
Elasticrawl.Config.deploy_templates
|
train
|
def deploy_templates(bucket_name)
begin
Dir.mkdir(config_dir, 0755) if dir_exists? == false
TEMPLATE_FILES.each do |template_file|
FileUtils.cp(File.join(File.dirname(__FILE__), TEMPLATES_DIR, template_file),
File.join(config_dir, template_file))
end
|
ruby
|
{
"resource": ""
}
|
q3098
|
Elasticrawl.Config.save_aws_config
|
train
|
def save_aws_config
env_key = ENV['AWS_ACCESS_KEY_ID']
env_secret = ENV['AWS_SECRET_ACCESS_KEY']
creds = {}
creds['ACCESS_KEY_ID'] = @access_key_id unless @access_key_id == env_key
|
ruby
|
{
"resource": ""
}
|
q3099
|
Elasticrawl.Config.save_config
|
train
|
def save_config(template, params)
config_file = File.join(config_dir, "#{template}.yml")
config = File.read(config_file)
|
ruby
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.