_id
stringlengths
2
6
title
stringlengths
9
130
partition
stringclasses
3 values
text
stringlengths
66
10.5k
language
stringclasses
1 value
meta_information
dict
q1100
Rubyipmi::Ipmitool.BaseCommand.find_fix
train
def find_fix(result) return unless result # The errorcode code hash contains the fix begin fix = ErrorCodes.search(result) @options.merge_notify!(fix) rescue raise "Could not find fix for error code: \n#{result}" end end
ruby
{ "resource": "" }
q1101
Rubyipmi::Freeipmi.Chassis.bootdevice
train
def bootdevice(device, reboot = false, persistent = false) if config.bootdevices.include?(device) bootstatus = config.bootdevice(device, persistent) power.cycle if reboot && bootstatus else logger.error("Device with name: #{device} is not a valid boot device for host #{options['hostname']}") if logger raise "Device with name: #{device} is not a valid boot device for host #{options['hostname']}" end end
ruby
{ "resource": "" }
q1102
Rubyipmi::Freeipmi.Chassis.bootpxe
train
def bootpxe(reboot = false, persistent = false) bootstatus = config.bootpxe(persistent) # Only reboot if setting the boot flag was successful power.cycle if reboot && bootstatus end
ruby
{ "resource": "" }
q1103
Rubyipmi::Freeipmi.Chassis.bootdisk
train
def bootdisk(reboot = false, persistent = false) bootstatus = config.bootdisk(persistent) # Only reboot if setting the boot flag was successful power.cycle if reboot && bootstatus end
ruby
{ "resource": "" }
q1104
Rubyipmi::Freeipmi.Chassis.bootcdrom
train
def bootcdrom(reboot = false, persistent = false) bootstatus = config.bootcdrom(persistent) # Only reboot if setting the boot flag was successful power.cycle if reboot && bootstatus end
ruby
{ "resource": "" }
q1105
Rubyipmi::Ipmitool.Chassis.bootbios
train
def bootbios(reboot = false, persistent = false) bootstatus = config.bootbios(persistent) # Only reboot if setting the boot flag was successful power.cycle if reboot && bootstatus bootstatus end
ruby
{ "resource": "" }
q1106
Rubyipmi::Ipmitool.Chassis.identifystatus
train
def identifystatus options["cmdargs"] = "chassis identify status" value = runcmd options.delete_notify("cmdargs") @result.chomp.split(":").last.strip if value end
ruby
{ "resource": "" }
q1107
Rubyipmi::Freeipmi.FruData.parse
train
def parse(data) return unless data data.each do |line| key, value = line.split(':', 2) if key =~ /^FRU.*/ if value =~ /([\w\s]*)\(.*\)/ self[:name] = $~[1].strip.gsub(/\ /, '_').downcase end else key = key.strip.gsub(/\ /, '_').downcase.gsub(/fru_/, '') self[key] = value.strip unless value.nil? end end end
ruby
{ "resource": "" }
q1108
GithubMarkdownPreview.HtmlPreview.pipeline_filters
train
def pipeline_filters(options) filters = [ HTML::Pipeline::MarkdownFilter, HTML::Pipeline::SanitizationFilter, HTML::Pipeline::ImageMaxWidthFilter, HTML::Pipeline::HttpsFilter, HTML::Pipeline::EmojiFilter, GithubMarkdownPreview::Pipeline::TaskListFilter ] if HtmlPreview::SYNTAX_HIGHLIGHTS filters << HTML::Pipeline::SyntaxHighlightFilter end if options[:comment_mode] filters << HTML::Pipeline::MentionFilter else filters << HTML::Pipeline::TableOfContentsFilter end filters end
ruby
{ "resource": "" }
q1109
GithubMarkdownPreview.HtmlPreview.update
train
def update unless File.exist?(@source_file) raise FileNotFoundError.new("Source file deleted") end markdown_render = @preview_pipeline.call(IO.read(@source_file), @pipeline_context, {})[:output].to_s preview_html = wrap_preview(markdown_render) File.open(@preview_file, 'w') do |f| f.write(preview_html) end @update_callbacks.each { |callback| callback.call } end
ruby
{ "resource": "" }
q1110
Rubyipmi::Freeipmi.BmcDevice.reset
train
def reset(type = 'cold') if ['cold', 'warm'].include?(type) key = "#{type}-reset" command(key) else logger.error("reset type: #{type} is not a valid choice, use warm or cold") if logger raise "reset type: #{type} is not a valid choice, use warm or cold" end end
ruby
{ "resource": "" }
q1111
Rubyipmi::Ipmitool.Bmc.reset
train
def reset(type = 'cold') if ['cold', 'warm'].include?(type) @options["cmdargs"] = "bmc reset #{type}" value = runcmd @options.delete_notify("cmdargs") return value else logger.error("reset type: #{type} is not a valid choice, use warm or cold") if logger raise "reset type: #{type} is not a valid choice, use warm or cold" end end
ruby
{ "resource": "" }
q1112
Rubyipmi::Ipmitool.Bmc.retrieve
train
def retrieve @options["cmdargs"] = "bmc info" status = runcmd @options.delete_notify("cmdargs") subkey = nil if !status raise @result else @result.lines.each do |line| # clean up the data from spaces item = line.split(':') key = item.first.strip value = item.last.strip # if the following condition is met we have subvalues if value.empty? subkey = key @bmcinfo[subkey] = [] elsif key == value && subkey # subvalue found @bmcinfo[subkey] << value else # Normal key/value pair with no subkeys subkey = nil @bmcinfo[key] = value end end return @bmcinfo end end
ruby
{ "resource": "" }
q1113
Rubyipmi.SensorsMixin.fanlist
train
def fanlist(refreshdata = false) refresh if refreshdata list.each_with_object({}) { |(name, sensor), flist| flist[name] = sensor if name =~ /.*fan.*/ } end
ruby
{ "resource": "" }
q1114
Rubyipmi.SensorsMixin.templist
train
def templist(refreshdata = false) refresh if refreshdata list.each_with_object({}) do |(name, sensor), tlist| tlist[name] = sensor if (sensor[:unit] =~ /.*degree.*/ || name =~ /.*temp.*/) end end
ruby
{ "resource": "" }
q1115
Rubyipmi::Freeipmi.ChassisConfig.checkout
train
def checkout(section = nil) @options["checkout"] = false @options["section"] = section if section value = runcmd @options.delete_notify("checkout") @options.delete_notify("section") if section value end
ruby
{ "resource": "" }
q1116
Rubyipmi::Ipmitool.Fru.parse
train
def parse(data) return unless data parsed_data = [] data.lines.each do |line| if line =~ /^FRU.*/ # this is the either the first line of of the fru or another fru if parsed_data.count != 0 # we have reached a new fru device so lets record the previous fru new_fru = FruData.new(parsed_data) parsed_data = [] @list[new_fru[:name]] = new_fru end end parsed_data << line end # process the last fru return if parsed_data.count == 0 # we have reached a new fru device so lets record the previous fru new_fru = FruData.new(parsed_data) parsed_data = [] @list[new_fru[:name]] = new_fru end
ruby
{ "resource": "" }
q1117
RSpecKickstarter.Generator.write_spec
train
def write_spec(file_path, force_write = false, dry_run = false, rails_mode = false) class_or_module = RSpecKickstarter::RDocFactory.get_rdoc_class_or_module(file_path) if class_or_module spec_path = get_spec_path(file_path) if force_write && File.exist?(spec_path) append_to_existing_spec(class_or_module, dry_run, rails_mode, spec_path) else create_new_spec(class_or_module, dry_run, rails_mode, file_path, spec_path) end else puts "#{file_path} skipped (Class/Module not found)." end end
ruby
{ "resource": "" }
q1118
RSpecKickstarter.Generator.create_new_spec
train
def create_new_spec(class_or_module, dry_run, rails_mode, file_path, spec_path) # These names are used in ERB template, don't delete. # rubocop:disable Lint/UselessAssignment methods_to_generate = class_or_module.method_list.select { |m| m.visibility == :public } c = class_or_module self_path = to_string_value_to_require(file_path) # rubocop:enable Lint/UselessAssignment erb = RSpecKickstarter::ERBFactory.new(@full_template).get_instance_for_new_spec(rails_mode, file_path) code = erb.result(binding) if dry_run puts "----- #{spec_path} -----" puts code else if File.exist?(spec_path) puts "#{spec_path} already exists." else FileUtils.mkdir_p(File.dirname(spec_path)) File.open(spec_path, 'w') { |f| f.write(code) } puts "#{spec_path} created." end end end
ruby
{ "resource": "" }
q1119
RSpecKickstarter.ERBFactory.get_instance_for_new_spec
train
def get_instance_for_new_spec(rails_mode, target_path) template = get_erb_template(@custom_template, true, rails_mode, target_path) ERB.new(template, nil, '-', '_new_spec_code') end
ruby
{ "resource": "" }
q1120
RSpecKickstarter.ERBFactory.get_instance_for_appending
train
def get_instance_for_appending(rails_mode, target_path) template = get_erb_template(@custom_template, false, rails_mode, target_path) ERB.new(template, nil, '-', '_additional_spec_code') end
ruby
{ "resource": "" }
q1121
RSpecKickstarter.ERBFactory.get_erb_template
train
def get_erb_template(custom_template, is_full, rails_mode, target_path) if custom_template custom_template elsif rails_mode && target_path.match(/controllers/) get_rails_controller_template(is_full) elsif rails_mode && target_path.match(/helpers/) get_rails_helper_template(is_full) else get_basic_template(is_full) end end
ruby
{ "resource": "" }
q1122
ActiveRecord.ConnectionHandling.percona_connection
train
def percona_connection(config) mysql2_connection = mysql2_connection(config) config[:username] = 'root' if config[:username].nil? connection_details = Departure::ConnectionDetails.new(config) verbose = ActiveRecord::Migration.verbose sanitizers = [ Departure::LogSanitizers::PasswordSanitizer.new(connection_details) ] percona_logger = Departure::LoggerFactory.build(sanitizers: sanitizers, verbose: verbose) cli_generator = Departure::CliGenerator.new(connection_details) runner = Departure::Runner.new( percona_logger, cli_generator, mysql2_connection ) connection_options = { mysql_adapter: mysql2_connection } ConnectionAdapters::DepartureAdapter.new( runner, logger, connection_options, config ) end
ruby
{ "resource": "" }
q1123
Lhm.Adapter.add_index
train
def add_index(columns, index_name = nil) options = { name: index_name } if index_name migration.add_index(table_name, columns, options || {}) end
ruby
{ "resource": "" }
q1124
Lhm.Adapter.remove_index
train
def remove_index(columns, index_name = nil) options = if index_name { name: index_name } else { column: columns } end migration.remove_index(table_name, options) end
ruby
{ "resource": "" }
q1125
Lhm.Adapter.change_column
train
def change_column(column_name, definition) attributes = column_attributes(column_name, definition) migration.change_column(*attributes) end
ruby
{ "resource": "" }
q1126
Lhm.Adapter.add_unique_index
train
def add_unique_index(columns, index_name = nil) options = { unique: true } options.merge!(name: index_name) if index_name # rubocop:disable Performance/RedundantMerge migration.add_index(table_name, columns, options) end
ruby
{ "resource": "" }
q1127
Lhm.Adapter.column
train
def column(name, definition) @column ||= if definition.is_a?(Symbol) ColumnWithType.new(name, definition) else ColumnWithSql.new(name, definition) end end
ruby
{ "resource": "" }
q1128
Lhm.ColumnWithSql.column
train
def column cast_type = ActiveRecord::Base.connection.lookup_cast_type(definition) @column ||= self.class.column_factory.new( name, default_value, cast_type, definition, null_value ) end
ruby
{ "resource": "" }
q1129
Lhm.ColumnWithSql.default_value
train
def default_value match = if definition =~ /timestamp|datetime/i /default '?(.+[^'])'?/i.match(definition) else /default '?(\w+)'?/i.match(definition) end return unless match match[1].downcase != 'null' ? match[1] : nil end
ruby
{ "resource": "" }
q1130
Lhm.ColumnWithSql.null_value
train
def null_value match = /((\w*) NULL)/i.match(definition) return true unless match match[2].downcase == 'not' ? false : true end
ruby
{ "resource": "" }
q1131
Departure.Command.run_in_process
train
def run_in_process Open3.popen3(full_command) do |_stdin, stdout, _stderr, waith_thr| begin loop do IO.select([stdout]) data = stdout.read_nonblock(8) logger.write_no_newline(data) end rescue EOFError # rubocop:disable Lint/HandleExceptions # noop ensure @status = waith_thr.value end end end
ruby
{ "resource": "" }
q1132
Departure.Command.validate_status!
train
def validate_status! raise SignalError.new(status) if status.signaled? # rubocop:disable Style/RaiseArgs raise CommandNotFoundError if status.exitstatus == COMMAND_NOT_FOUND raise Error, error_message unless status.success? end
ruby
{ "resource": "" }
q1133
Departure.CliGenerator.all_options
train
def all_options env_variable_options = UserOptions.new global_configuration_options = UserOptions.new(Departure.configuration.global_percona_args) options = env_variable_options.merge(global_configuration_options).merge(DEFAULT_OPTIONS) options.to_a.join(' ') end
ruby
{ "resource": "" }
q1134
SimpleIDN.Punycode.adapt
train
def adapt(delta, numpoints, firsttime) delta = firsttime ? (delta / DAMP) : (delta >> 1) delta += (delta / numpoints) k = 0 while delta > (((BASE - TMIN) * TMAX) / 2) delta /= BASE - TMIN k += BASE end k + (BASE - TMIN + 1) * delta / (delta + SKEW) end
ruby
{ "resource": "" }
q1135
SimpleIDN.Punycode.encode
train
def encode(input) input_encoding = input.encoding input = input.encode(Encoding::UTF_8).codepoints.to_a output = [] # Initialize the state: n = INITIAL_N delta = 0 bias = INITIAL_BIAS # Handle the basic code points: output = input.select { |char| char <= ASCII_MAX } h = b = output.length # h is the number of code points that have been handled, b is the # number of basic code points output << DELIMITER if b > 0 # Main encoding loop: while h < input.length # All non-basic code points < n have been # handled already. Find the next larger one: m = MAXINT input.each do |char| m = char if char >= n && char < m end # Increase delta enough to advance the decoder's # <n,i> state to <m,0>, but guard against overflow: raise(ConversionError, "punycode_overflow (1)") if m - n > ((MAXINT - delta) / (h + 1)).floor delta += (m - n) * (h + 1) n = m input.each_with_index do |char, _| if char < n delta += 1 raise(ConversionError, "punycode_overflow(2)") if delta > MAXINT end next unless char == n # Represent delta as a generalized variable-length integer: q = delta k = BASE loop do t = k <= bias ? TMIN : k >= bias + TMAX ? TMAX : k - bias break if q < t output << encode_digit(t + (q - t) % (BASE - t)) q = ((q - t) / (BASE - t)).floor k += BASE end output << encode_digit(q) bias = adapt(delta, h + 1, h == b) delta = 0 h += 1 end delta += 1 n += 1 end output.collect {|c| c.chr(Encoding::UTF_8)}.join(EMPTY).encode(input_encoding) end
ruby
{ "resource": "" }
q1136
Akami.WSSE.to_xml
train
def to_xml if signature? and signature.have_document? Gyoku.xml wsse_signature.merge!(hash) elsif username_token? && timestamp? Gyoku.xml wsse_username_token.merge!(wsu_timestamp) { |key, v1, v2| v1.merge!(v2) { |key, v1, v2| v1.merge!(v2) } } elsif username_token? Gyoku.xml wsse_username_token.merge!(hash) elsif timestamp? Gyoku.xml wsu_timestamp.merge!(hash) else "" end end
ruby
{ "resource": "" }
q1137
Akami.WSSE.digest_password
train
def digest_password token = nonce + timestamp + password Base64.encode64(Digest::SHA1.digest(token)).chomp! end
ruby
{ "resource": "" }
q1138
Shog.Formatter.call
train
def call( severity, time, progname, msg ) return if msg.blank? || _silence?( msg ) msg = [ _tagged( time, :timestamp ), _tagged( progname, :progname ), formatted_severity_tag( severity ), formatted_message( severity, msg ) ].compact.join(" ") super severity, time, progname, msg end
ruby
{ "resource": "" }
q1139
Shog.Formatter.formatted_severity_tag
train
def formatted_severity_tag( severity ) length = configuration[:severity_tags][:_length] ||= begin configuration[:severity_tags].reduce(0){ |l,(k,_)| [k.length,l].max } end return if length == 0 padded_severity = severity.ljust length formatted = if proc = configuration[:severity_tags][severity] proc.call padded_severity else padded_severity end _tagged formatted, :severity_tags end
ruby
{ "resource": "" }
q1140
Shog.Formatter.format_time
train
def format_time( time, expected = 30 ) timef = time.uncolorize.to_f case when timef > expected * 2 then time.to_s.uncolorize.red when timef > expected then time.to_s.uncolorize.yellow else time end end
ruby
{ "resource": "" }
q1141
Hazel.CLI.create_empty_directories
train
def create_empty_directories %w{config/initializers lib spec}.each do |dir| empty_directory File.join(@app_path, dir) end empty_directory File.join(@app_path, 'db/migrate') unless @database.empty? create_file File.join(@app_path, "lib", ".gitkeep") end
ruby
{ "resource": "" }
q1142
GemBench.GemfileLineTokenizer.following_non_gem_lines
train
def following_non_gem_lines all_lines[(index+1)..(-1)]. reject {|x| x.strip.empty? || x.match(GemBench::TRASH_REGEX) }. map(&:strip). inject([]) do |following_lines, next_line| break following_lines if next_line.match(GEM_REGEX) following_lines << next_line end end
ruby
{ "resource": "" }
q1143
Cliver.Filter.requirements
train
def requirements(requirements) requirements.map do |requirement| req_parts = requirement.split(/\b(?=\d)/, 2) version = req_parts.last version.replace apply(version) req_parts.join end end
ruby
{ "resource": "" }
q1144
Cliver.Dependency.installed_versions
train
def installed_versions return enum_for(:installed_versions) unless block_given? find_executables.each do |executable_path| version = detect_version(executable_path) break(2) if yield(executable_path, version) end end
ruby
{ "resource": "" }
q1145
Cliver.Dependency.detect!
train
def detect! installed = {} installed_versions.each do |path, version| installed[path] = version return path if ENV['CLIVER_NO_VERIFY'] return path if requirement_satisfied_by?(version) strict? end # dependency not met. raise the appropriate error. raise_not_found! if installed.empty? raise_version_mismatch!(installed) end
ruby
{ "resource": "" }
q1146
YamlLint.Linter.check
train
def check(path) raise FileNotFoundError, "#{path}: no such file" unless File.exist?(path) valid = false unless disable_extension_check unless check_filename(path) errors[path] = ['File extension must be .yaml or .yml'] return valid end end File.open(path, 'r') do |f| error_array = [] valid = check_data(f.read, error_array) errors[path] = error_array unless error_array.empty? end valid end
ruby
{ "resource": "" }
q1147
YamlLint.Linter.check_stream
train
def check_stream(io_stream) yaml_data = io_stream.read error_array = [] valid = check_data(yaml_data, error_array) errors[''] = error_array unless error_array.empty? valid end
ruby
{ "resource": "" }
q1148
YamlLint.Linter.display_errors
train
def display_errors errors.each do |path, errors| puts path errors.each do |err| puts " #{err}" end end end
ruby
{ "resource": "" }
q1149
YamlLint.Linter.check_filename
train
def check_filename(filename) extension = filename.split('.').last return true if valid_extensions.include?(extension) false end
ruby
{ "resource": "" }
q1150
YamlLint.Linter.check_data
train
def check_data(yaml_data, errors_array) valid = check_not_empty?(yaml_data, errors_array) valid &&= check_syntax_valid?(yaml_data, errors_array) valid &&= check_overlapping_keys?(yaml_data, errors_array) valid end
ruby
{ "resource": "" }
q1151
YamlLint.Linter.check_not_empty?
train
def check_not_empty?(yaml_data, errors_array) if yaml_data.empty? errors_array << 'The YAML should not be an empty string' false elsif yaml_data.strip.empty? errors_array << 'The YAML should not just be spaces' false else true end end
ruby
{ "resource": "" }
q1152
YamlLint.Linter.check_syntax_valid?
train
def check_syntax_valid?(yaml_data, errors_array) YAML.safe_load(yaml_data) true rescue YAML::SyntaxError => e errors_array << e.message false end
ruby
{ "resource": "" }
q1153
YamlLint.Linter.check_overlapping_keys?
train
def check_overlapping_keys?(yaml_data, errors_array) overlap_detector = KeyOverlapDetector.new data = Psych.parser.parse(yaml_data) overlap_detector.parse(data) overlap_detector.overlapping_keys.each do |key| err_meg = "The same key is defined more than once: #{key.join('.')}" errors_array << err_meg end overlap_detector.overlapping_keys.empty? end
ruby
{ "resource": "" }
q1154
Term.ANSIColor.uncolor
train
def uncolor(string = nil) # :yields: if block_given? yield.to_str.gsub(COLORED_REGEXP, '') elsif string.respond_to?(:to_str) string.to_str.gsub(COLORED_REGEXP, '') elsif respond_to?(:to_str) to_str.gsub(COLORED_REGEXP, '') else '' end.extend(Term::ANSIColor) end
ruby
{ "resource": "" }
q1155
Term.ANSIColor.color
train
def color(name, string = nil, &block) attribute = Attribute[name] or raise ArgumentError, "unknown attribute #{name.inspect}" result = '' result << "\e[#{attribute.code}m" if Term::ANSIColor.coloring? if block_given? result << yield.to_s elsif string.respond_to?(:to_str) result << string.to_str elsif respond_to?(:to_str) result << to_str else return result #only switch on end result << "\e[0m" if Term::ANSIColor.coloring? result.extend(Term::ANSIColor) end
ruby
{ "resource": "" }
q1156
YamlLint.CLI.execute!
train
def execute! files_to_check = parse_options.leftovers YamlLint.logger.level = Logger::DEBUG if opts.debug no_yamls_to_check_msg = "Error: need at least one YAML file to check.\n"\ 'Try --help for help.' abort(no_yamls_to_check_msg) if files_to_check.empty? lint(files_to_check) end
ruby
{ "resource": "" }
q1157
Cliver.Detector.detect_version
train
def detect_version(executable_path) capture = ShellCapture.new(version_command(executable_path)) unless capture.command_found raise Cliver::Dependency::NotFound.new( "Could not find an executable at given path '#{executable_path}'." + "If this path was not specified explicitly, it is probably a " + "bug in [Cliver](https://github.com/yaauie/cliver/issues)." ) end capture.stdout[version_pattern] || capture.stderr[version_pattern] end
ruby
{ "resource": "" }
q1158
Fusioncharts.Chart.render
train
def render config = json_escape JSON.generate(self.options) if @timeSeriesSource config.gsub! '"__DataSource__"', json_escape(@timeSeriesSource) end dataUrlFormat = self.jsonUrl? ? "json" : ( self.xmlUrl ? "xml" : nil ) template = File.read(File.expand_path("../../../templates/chart.erb", __FILE__)) renderer = ERB.new(template) return raw renderer.result(binding) end
ruby
{ "resource": "" }
q1159
Fusioncharts.Chart.parse_options
train
def parse_options newOptions = nil @options.each do |key, value| if key.downcase.to_s.eql? "timeseries" @timeSeriesData = value.GetDataStore() @timeSeriesSource = value.GetDataSource() newOptions = {} newOptions['dataSource'] = "__DataSource__" @options.delete(key) end end if newOptions @options.merge!(newOptions) end keys = @options.keys keys.each{ |k| instance_variable_set "@#{k}".to_sym, @options[k] if self.respond_to? k } #parse_datasource_json end
ruby
{ "resource": "" }
q1160
GPX.Segment.smooth_location_by_average
train
def smooth_location_by_average(opts = {}) seconds_either_side = opts[:averaging_window] || 20 # calculate the first and last points to which the smoothing should be applied earliest = (find_point_by_time_or_offset(opts[:start]) || @earliest_point).time latest = (find_point_by_time_or_offset(opts[:end]) || @latest_point).time tmp_points = [] @points.each do |point| if point.time > latest || point.time < earliest tmp_points.push point # add the point unaltered next end lat_av = 0.to_f lon_av = 0.to_f alt_av = 0.to_f n = 0 # k ranges from the time of the current point +/- 20s (-1 * seconds_either_side..seconds_either_side).each do |k| # find the point nearest to the time offset indicated by k contributing_point = closest_point(point.time + k) # sum up the contributions to the average lat_av += contributing_point.lat lon_av += contributing_point.lon alt_av += contributing_point.elevation n += 1 end # calculate the averages tmp_point = point.clone tmp_point.lon = (lon_av / n).round(7) tmp_point.elevation = (alt_av / n).round(2) tmp_point.lat = (lat_av / n).round(7) tmp_points.push tmp_point end @points.clear reset_meta_data # now commit the averages back and recalculate the distances tmp_points.each do |point| append_point(point) end end
ruby
{ "resource": "" }
q1161
GPX.GPXFile.crop
train
def crop(area) reset_meta_data keep_tracks = [] tracks.each do |trk| trk.crop(area) unless trk.empty? update_meta_data(trk) keep_tracks << trk end end @tracks = keep_tracks routes.each { |rte| rte.crop(area) } waypoints.each { |wpt| wpt.crop(area) } end
ruby
{ "resource": "" }
q1162
GPX.GPXFile.calculate_duration
train
def calculate_duration @duration = 0 if @tracks.nil? || @tracks.size.zero? || @tracks[0].segments.nil? || @tracks[0].segments.size.zero? return @duration end @duration = (@tracks[-1].segments[-1].points[-1].time - @tracks.first.segments.first.points.first.time) rescue StandardError @duration = 0 end
ruby
{ "resource": "" }
q1163
GPX.Bounds.contains?
train
def contains?(pt) ((pt.lat >= min_lat) && (pt.lat <= max_lat) && (pt.lon >= min_lon) && (pt.lon <= max_lon)) end
ruby
{ "resource": "" }
q1164
GPX.Track.contains_time?
train
def contains_time?(time) segments.each do |seg| return true if seg.contains_time?(time) end false end
ruby
{ "resource": "" }
q1165
GPX.Track.delete_area
train
def delete_area(area) reset_meta_data segments.each do |seg| seg.delete_area(area) update_meta_data(seg) unless seg.empty? end segments.delete_if(&:empty?) end
ruby
{ "resource": "" }
q1166
Resque::Delayed.Worker.work
train
def work(interval = 5.0) interval = Float(interval) $0 = "resque-delayed: harvesting" startup loop do break if shutdown? # harvest delayed jobs while they are available while job = Resque::Delayed.next do log "got: #{job.inspect}" queue, klass, *args = job Resque::Job.create(queue, klass, *args) end break if interval.zero? log! "Sleeping for #{interval} seconds" sleep interval end end
ruby
{ "resource": "" }
q1167
Resque::Delayed.Worker.log
train
def log(message) if verbose puts "*** #{message}" elsif very_verbose time = Time.now.strftime('%H:%M:%S %Y-%m-%d') puts "** [#{time}] #$$: #{message}" end end
ruby
{ "resource": "" }
q1168
Jekyll.TypogrifyFilter.custom_caps
train
def custom_caps(text) # $1 and $2 are excluded HTML tags, $3 is the part before the caps and $4 is the caps match text.gsub(%r{ (<[^/][^>]*?>)| # Ignore any opening tag, so we don't mess up attribute values (\s|&nbsp;|^|'|"|>|) # Make sure our capture is preceded by whitespace or quotes ([A-Z\d](?:(\.|'|-|&|&amp;|&\#38;)?[A-Z\d][\.']?){1,}) # Capture capital words, with optional dots, numbers or ampersands in between (?!\w) # ...which must not be followed by a word character. }x) do |str| tag, before, caps = $1, $2, $3 # Do nothing with the contents if ignored tags, the inside of an opening HTML element # so we don't mess up attribute values, or if our capture is only digits. if tag || caps =~ /^\d+\.?$/ str elsif $3 =~ /^[\d\.]+$/ before + caps else before + '<span class="caps">' + caps + '</span>' end end end
ruby
{ "resource": "" }
q1169
ActiveRecord.HierarchicalQuery.join_recursive
train
def join_recursive(join_options = {}, &block) raise ArgumentError, 'block expected' unless block_given? query = Query.new(klass) if block.arity == 0 query.instance_eval(&block) else block.call(query) end query.join_to(self, join_options) end
ruby
{ "resource": "" }
q1170
Auditable.Auditing.audit_tag_with
train
def audit_tag_with(tag) if audit = last_audit audit.update_attribute(:tag, tag) # Force the trigger of a reload if audited_version is used. Happens automatically otherwise audits.reload if self.class.audited_version else self.audit_tag = tag snap! end end
ruby
{ "resource": "" }
q1171
Auditable.Auditing.snap
train
def snap serialize_attribute = lambda do |attribute| # If a proc, do nothing, cannot be serialized # XXX: raise warning on passing in a proc? if attribute.is_a? Proc # noop # Is an ActiveRecord, serialize as hash instead of serializing the object elsif attribute.class.ancestors.include?(ActiveRecord::Base) attribute.serializable_hash # If an array, such as from an association, serialize the elements in the array elsif attribute.is_a?(Array) || attribute.is_a?(ActiveRecord::Associations::CollectionProxy) attribute.map { |element| serialize_attribute.call(element) } # otherwise, return val else attribute end end {}.tap do |s| self.class.audited_attributes.each do |attr| val = self.send attr s[attr.to_s] = serialize_attribute.call(val) end end end
ruby
{ "resource": "" }
q1172
Auditable.Auditing.snap!
train
def snap!(options = {}) data = options.merge(:modifications => self.snap) data[:tag] = self.audit_tag if self.audit_tag data[:action] = self.audit_action if self.audit_action data[:changed_by] = self.audit_changed_by if self.audit_changed_by self.save_audit( data ) end
ruby
{ "resource": "" }
q1173
Auditable.Auditing.last_change_of
train
def last_change_of(attribute) raise "#{attribute} is not audited for model #{self.class}. Audited attributes: #{self.class.audited_attributes}" unless self.class.audited_attributes.include? attribute.to_sym attribute = attribute.to_s # support symbol as well last = audits.size - 1 last.downto(1) do |i| if audits[i].modifications[attribute] != audits[i-1].modifications[attribute] return audits[i].diff(audits[i-1])[attribute] end end nil end
ruby
{ "resource": "" }
q1174
Auditable.Base.diff
train
def diff(other_audit) other_modifications = other_audit ? other_audit.modifications : {} {}.tap do |d| # find keys present only in this audit (self.modifications.keys - other_modifications.keys).each do |k| d[k] = [nil, self.modifications[k]] if self.modifications[k] end # find keys present only in other audit (other_modifications.keys - self.modifications.keys).each do |k| d[k] = [other_modifications[k], nil] if other_modifications[k] end # find common keys and diff values self.modifications.keys.each do |k| if self.modifications[k] != other_modifications[k] d[k] = [other_modifications[k], self.modifications[k]] end end end end
ruby
{ "resource": "" }
q1175
Auditable.Base.diff_since
train
def diff_since(time) other_audit = auditable.audits.where("created_at <= ? AND id != ?", time, id).order("id DESC").limit(1).first diff(other_audit) end
ruby
{ "resource": "" }
q1176
Auditable.Base.diff_since_version
train
def diff_since_version(version) other_audit = auditable.audits.where("version <= ? AND id != ?", version, id).order("version DESC").limit(1).first diff(other_audit) end
ruby
{ "resource": "" }
q1177
BioVcf.VcfHeader.tag
train
def tag h h2 = h.dup [:show_help,:skip_header,:verbose,:quiet,:debug].each { |key| h2.delete(key) } info = h2.map { |k,v| k.to_s.capitalize+'='+'"'+v.to_s+'"' }.join(',') line = '##BioVcf=<'+info+'>' @lines.insert(-2,line) line end
ruby
{ "resource": "" }
q1178
BioVcf.VcfRecord.method_missing
train
def method_missing(m, *args, &block) name = m.to_s if name =~ /\?$/ # Query for empty sample name @sample_index ||= @header.sample_index return !VcfSample::empty?(@fields[@sample_index[name.chop]]) else sample[name] end end
ruby
{ "resource": "" }
q1179
BioVcf.VCFfile.each
train
def each return enum_for(:each) unless block_given? io = nil if @is_gz infile = open(@file) io = Zlib::GzipReader.new(infile) else io = File.open(@file) end header = BioVcf::VcfHeader.new io.each_line do |line| line.chomp! if line =~ /^##fileformat=/ header.add(line) next end if line =~ /^#/ header.add(line) next end fields = BioVcf::VcfLine.parse(line) rec = BioVcf::VcfRecord.new(fields,header) yield rec end end
ruby
{ "resource": "" }
q1180
BioVcf.VcfGenotypeField.method_missing
train
def method_missing(m, *args, &block) return nil if @is_empty if m =~ /\?$/ # query if a value exists, e.g., r.info.dp? or s.dp? v = values[fetch(m.to_s.upcase.chop)] return (not VcfValue::empty?(v)) else v = values[fetch(m.to_s.upcase)] return nil if VcfValue::empty?(v) v = v.to_i if v =~ /^\d+$/ v = v.to_f if v =~ /^\d+\.\d+$/ v end end
ruby
{ "resource": "" }
q1181
BioVcf.VcfGenotypeField.ilist
train
def ilist name v = fetch_value(name) return nil if not v v.split(',').map{|i| i.to_i} end
ruby
{ "resource": "" }
q1182
RSpec.Matchers.be_json
train
def be_json(expected = Saharspec::Matchers::BeJson::NONE) Saharspec::Matchers::BeJson.new(expected) end
ruby
{ "resource": "" }
q1183
Growlyflash.ControllerAdditions.flash_to_headers
train
def flash_to_headers if response.xhr? && growlyhash(true).size > 0 response.headers['X-Message'] = URI.escape(growlyhash.to_json) growlyhash.each_key { |k| flash.discard(k) } end end
ruby
{ "resource": "" }
q1184
Growlyflash.ControllerAdditions.growlyflash_static_notices
train
def growlyflash_static_notices(js_var = 'window.flashes') return if flash.empty? script = "#{js_var} = #{growlyhash.to_json.html_safe};".freeze view_context.javascript_tag(script, defer: 'defer') end
ruby
{ "resource": "" }
q1185
Growlyflash.ControllerAdditions.growlyhash
train
def growlyhash(force = false) @growlyhash = nil if force @growlyhash ||= flash.to_hash.select { |k, v| v.is_a? String } end
ruby
{ "resource": "" }
q1186
Protokoll.ClassMethods.protokoll
train
def protokoll(column, _options = {}) options = { :pattern => "%Y%m#####", :number_symbol => "#", :column => column, :start => 0, :scope_by => nil } options.merge!(_options) raise ArgumentError.new("pattern can't be nil!") if options[:pattern].nil? raise ArgumentError.new("pattern requires at least one counter symbol #{options[:number_symbol]}") unless pattern_includes_symbols?(options) # Defining custom method send :define_method, "reserve_#{options[:column]}!".to_sym do self[column] = Counter.next(self, options) end # Signing before_create before_create do |record| unless record[column].present? record[column] = Counter.next(self, options) end end end
ruby
{ "resource": "" }
q1187
Lex.Lexer.lex
train
def lex(input) @input = input return enum_for(:lex, input) unless block_given? if debug logger.info "lex: tokens = #{@dsl.lex_tokens}" logger.info "lex: states = #{@dsl.state_info}" logger.info "lex: ignore = #{@dsl.state_ignore}" logger.info "lex: error = #{@dsl.state_error}" end stream_tokens(input) do |token| yield token end end
ruby
{ "resource": "" }
q1188
Lex.Lexer.stream_tokens
train
def stream_tokens(input, &block) scanner = StringScanner.new(input) while !scanner.eos? current_char = scanner.peek(1) if @dsl.state_ignore[current_state].include?(current_char) scanner.pos += current_char.size @char_pos_in_line += current_char.size next end if debug logger.info "lex: [#{current_state}]: lexemes = #{@dsl.state_lexemes[current_state].map(&:name)}" end # Look for regex match longest_token = nil @dsl.state_lexemes[current_state].each do |lexeme| match = lexeme.match(scanner) next if match.nil? longest_token = match if longest_token.nil? next if longest_token.value.length >= match.value.length longest_token = match end if longest_token if longest_token.action new_token = longest_token.action.call(self, longest_token) # No value returned from action move to the next token if new_token.nil? || !new_token.is_a?(Token) chars_to_skip = longest_token.value.to_s.length scanner.pos += chars_to_skip unless longest_token.name == :newline @char_pos_in_line += chars_to_skip end next end end move_by = longest_token.value.to_s.length start_char_pos_in_token = @char_pos_in_line + current_char.size longest_token.update_line(current_line, start_char_pos_in_token) advance_column(move_by) scanner.pos += move_by end # No match if longest_token.nil? # Check in errors if @dsl.state_error[current_state] token = Token.new(:error, current_char) start_char_pos_in_token = @char_pos_in_line + current_char.size token.update_line(current_line, start_char_pos_in_token) new_token = @dsl.state_error[current_state].call(self, token) advance_column(current_char.length) scanner.pos += current_char.length if new_token.is_a?(Token) || !new_token.nil? longest_token = new_token else next end end if longest_token.nil? complain("Illegal character `#{current_char}`") end end logger.info "lex: #{longest_token}" if debug block.call(longest_token) end end
ruby
{ "resource": "" }
q1189
Kraken.Client.add_order
train
def add_order(opts={}) required_opts = %w{ pair type ordertype volume } leftover = required_opts - opts.keys.map(&:to_s) if leftover.length > 0 raise ArgumentError.new("Required options, not given. Input must include #{leftover}") end post_private 'AddOrder', opts end
ruby
{ "resource": "" }
q1190
XmlSitemap.RenderEngine.render_nokogiri
train
def render_nokogiri unless defined? Nokogiri raise ArgumentError, "Nokogiri not found!" end builder = Nokogiri::XML::Builder.new(:encoding => "UTF-8") do |xml| xml.urlset(XmlSitemap::MAP_SCHEMA_OPTIONS) { |s| @items.each do |item| s.url do |u| u.loc item.target # Format and image tag specifications found at http://support.google.com/webmasters/bin/answer.py?hl=en&answer=178636 if item.image_location u["image"].image do |a| a["image"].loc item.image_location a["image"].caption item.image_caption if item.image_caption a["image"].title item.image_title if item.image_title a["image"].license item.image_license if item.image_license a["image"].geo_location item.image_geolocation if item.image_geolocation end end # Format and video tag specifications found at http://support.google.com/webmasters/bin/answer.py?hl=en&answer=80472&topic=10079&ctx=topic#2 if item.video_thumbnail_location && item.video_title && item.video_description && (item.video_content_location || item.video_player_location) u["video"].video do |a| a["video"].thumbnail_loc item.video_thumbnail_location a["video"].title item.video_title a["video"].description item.video_description a["video"].content_loc item.video_content_location if item.video_content_location a["video"].player_loc item.video_player_location if item.video_player_location a["video"].duration item.video_duration.to_s if item.video_duration a["video"].expiration_date item.video_expiration_date_value if item.video_expiration_date a["video"].rating item.video_rating.to_s if item.video_rating a["video"].view_count item.video_view_count.to_s if item.video_view_count a["video"].publication_date item.video_publication_date_value if item.video_publication_date a["video"].family_friendly item.video_family_friendly if item.video_family_friendly a["video"].category item.video_category if item.video_category a["video"].restriction item.video_restriction, :relationship => "allow" if item.video_restriction a["video"].gallery_loc item.video_gallery_location if item.video_gallery_location a["video"].price item.video_price.to_s, :currency => "USD" if item.video_price a["video"].requires_subscription item.video_requires_subscription if item.video_requires_subscription a["video"].uploader item.video_uploader if item.video_uploader a["video"].platform item.video_platform, :relationship => "allow" if item.video_platform a["video"].live item.video_live if item.video_live end end u.lastmod item.lastmod_value u.changefreq item.changefreq.to_s if item.changefreq u.priority item.priority.to_s if item.priority end end } end builder.to_xml end
ruby
{ "resource": "" }
q1191
XmlSitemap.RenderEngine.render_string
train
def render_string result = '<?xml version="1.0" encoding="UTF-8"?>' + "\n<urlset" XmlSitemap::MAP_SCHEMA_OPTIONS.each do |key, val| result << ' ' + key + '="' + val + '"' end result << ">\n" item_results = [] @items.each do |item| item_string = " <url>\n" item_string << " <loc>#{CGI::escapeHTML(item.target)}</loc>\n" # Format and image tag specifications found at http://support.google.com/webmasters/bin/answer.py?hl=en&answer=178636 if item.image_location item_string << " <image:image>\n" item_string << " <image:loc>#{CGI::escapeHTML(item.image_location)}</image:loc>\n" item_string << " <image:caption>#{CGI::escapeHTML(item.image_caption)}</image:caption>\n" if item.image_caption item_string << " <image:title>#{CGI::escapeHTML(item.image_title)}</image:title>\n" if item.image_title item_string << " <image:license>#{CGI::escapeHTML(item.image_license)}</image:license>\n" if item.image_license item_string << " <image:geo_location>#{CGI::escapeHTML(item.image_geolocation)}</image:geo_location>\n" if item.image_geolocation item_string << " </image:image>\n" end # Format and video tag specifications found at http://support.google.com/webmasters/bin/answer.py?hl=en&answer=80472&topic=10079&ctx=topic#2 if item.video_thumbnail_location && item.video_title && item.video_description && (item.video_content_location || item.video_player_location) item_string << " <video:video>\n" item_string << " <video:thumbnail_loc>#{CGI::escapeHTML(item.video_thumbnail_location)}</video:thumbnail_loc>\n" item_string << " <video:title>#{CGI::escapeHTML(item.video_title)}</video:title>\n" item_string << " <video:description>#{CGI::escapeHTML(item.video_description)}</video:description>\n" item_string << " <video:content_loc>#{CGI::escapeHTML(item.video_content_location)}</video:content_loc>\n" if item.video_content_location item_string << " <video:player_loc>#{CGI::escapeHTML(item.video_player_location)}</video:player_loc>\n" if item.video_player_location item_string << " <video:duration>#{CGI::escapeHTML(item.video_duration.to_s)}</video:duration>\n" if item.video_duration item_string << " <video:expiration_date>#{item.video_expiration_date_value}</video:expiration_date>\n" if item.video_expiration_date item_string << " <video:rating>#{CGI::escapeHTML(item.video_rating.to_s)}</video:rating>\n" if item.video_rating item_string << " <video:view_count>#{CGI::escapeHTML(item.video_view_count.to_s)}</video:view_count>\n" if item.video_view_count item_string << " <video:publication_date>#{item.video_publication_date_value}</video:publication_date>\n" if item.video_publication_date item_string << " <video:family_friendly>#{CGI::escapeHTML(item.video_family_friendly)}</video:family_friendly>\n" if item.video_family_friendly item_string << " <video:category>#{CGI::escapeHTML(item.video_category)}</video:category>\n" if item.video_category item_string << " <video:restriction relationship=\"allow\">#{CGI::escapeHTML(item.video_restriction)}</video:restriction>\n" if item.video_restriction item_string << " <video:gallery_loc>#{CGI::escapeHTML(item.video_gallery_location)}</video:gallery_loc>\n" if item.video_gallery_location item_string << " <video:price currency=\"USD\">#{CGI::escapeHTML(item.video_price.to_s)}</video:price>\n" if item.video_price item_string << " <video:requires_subscription>#{CGI::escapeHTML(item.video_requires_subscription)}</video:requires_subscription>\n" if item.video_requires_subscription item_string << " <video:uploader>#{CGI::escapeHTML(item.video_uploader)}</video:uploader>\n" if item.video_uploader item_string << " <video:platform relationship=\"allow\">#{CGI::escapeHTML(item.video_platform)}</video:platform>\n" if item.video_platform item_string << " <video:live>#{CGI::escapeHTML(item.video_live)}</video:live>\n" if item.video_live item_string << " </video:video>\n" end item_string << " <lastmod>#{item.lastmod_value}</lastmod>\n" item_string << " <changefreq>#{item.changefreq}</changefreq>\n" if item.changefreq item_string << " <priority>#{item.priority}</priority>\n" if item.priority item_string << " </url>\n" item_results << item_string end result << item_results.join("") result << "</urlset>\n" result end
ruby
{ "resource": "" }
q1192
SimonSays.Authorizer.find_resource
train
def find_resource(resource, options = {}) resource = resource.to_s scope, query = resource_scope_and_query(resource, options) through = options[:through] ? options[:through].to_s : nil assoc = through || (options[:from] ? resource.pluralize : nil) scope = scope.send(assoc) if assoc && scope.respond_to?(assoc) record = scope.where(query).first! if through instance_variable_set "@#{through.singularize}", record record = record.send(resource) end instance_variable_set "@#{resource}", record end
ruby
{ "resource": "" }
q1193
SimonSays.Authorizer.authorize
train
def authorize(required = nil, options) if through = options[:through] name = through.to_s.singularize.to_sym else name = options[:resource] end record = instance_variable_get("@#{name}") if record.nil? # must be devise scope record = send("current_#{name}") send "authenticate_#{name}!" end role_attr = record.class.role_attribute_name actual = record.send(role_attr) required ||= options[role_attr] required = [required] unless Array === required # actual roles must have at least # one required role (array intersection) ((required & actual).size > 0).tap do |res| raise Denied.new(role_attr, required, actual) unless res end end
ruby
{ "resource": "" }
q1194
XmlSitemap.Index.add
train
def add(map, use_offsets=true) raise ArgumentError, 'XmlSitemap::Map object required!' unless map.kind_of?(XmlSitemap::Map) raise ArgumentError, 'Map is empty!' if map.empty? @maps << { :loc => use_offsets ? map.index_url(@offsets[map.group], @secure) : map.plain_index_url(@secure), :lastmod => map.created_at.utc.iso8601 } @offsets[map.group] += 1 end
ruby
{ "resource": "" }
q1195
XmlSitemap.Index.render
train
def render xml = Builder::XmlMarkup.new(:indent => 2) xml.instruct!(:xml, :version => '1.0', :encoding => 'UTF-8') xml.sitemapindex(XmlSitemap::INDEX_SCHEMA_OPTIONS) { |s| @maps.each do |item| s.sitemap do |m| m.loc item[:loc] m.lastmod item[:lastmod] end end }.to_s end
ruby
{ "resource": "" }
q1196
XmlSitemap.Index.render_to
train
def render_to(path, options={}) overwrite = options[:overwrite] || true path = File.expand_path(path) if File.exists?(path) && !overwrite raise RuntimeError, "File already exists and not overwritable!" end File.open(path, 'w') { |f| f.write(self.render) } end
ruby
{ "resource": "" }
q1197
XmlSitemap.Map.add
train
def add(target, opts={}) raise RuntimeError, 'Only up to 50k records allowed!' if @items.size > 50000 raise ArgumentError, 'Target required!' if target.nil? raise ArgumentError, 'Target is empty!' if target.to_s.strip.empty? url = process_target(target) if url.length > 2048 raise ArgumentError, "Target can't be longer than 2,048 characters!" end opts[:updated] = @created_at unless opts.key?(:updated) item = XmlSitemap::Item.new(url, opts) @items << item item end
ruby
{ "resource": "" }
q1198
XmlSitemap.Map.render_to
train
def render_to(path, options={}) overwrite = options[:overwrite] == true || true compress = options[:gzip] == true || false path = File.expand_path(path) path << ".gz" unless path =~ /\.gz\z/i if compress if File.exists?(path) && !overwrite raise RuntimeError, "File already exists and not overwritable!" end File.open(path, 'wb') do |f| unless compress f.write(self.render) else gz = Zlib::GzipWriter.new(f) gz.write(self.render) gz.close end end end
ruby
{ "resource": "" }
q1199
XmlSitemap.Map.process_target
train
def process_target(str) if @root == true url(str =~ /^\// ? str : "/#{str}") else str =~ /^(http|https)/i ? str : url(str =~ /^\// ? str : "/#{str}") end end
ruby
{ "resource": "" }