_id
stringlengths
2
6
title
stringlengths
9
130
partition
stringclasses
3 values
text
stringlengths
66
10.5k
language
stringclasses
1 value
meta_information
dict
q2000
RTP.Record.delete_children
train
def delete_children(attribute) self.send(attribute).each { |c| c.parent = nil } self.send(attribute).clear end
ruby
{ "resource": "" }
q2001
RTP.Record.set_attributes
train
def set_attributes(values) import_indices([values.length - 1, @max_elements - 1].min).each_with_index do |indices, i| param = nil if indices param = values.values_at(*indices) param = param[0] if param.length == 1 end self.send("#{@attributes[i]}=", param) end @crc = values[-1] end
ruby
{ "resource": "" }
q2002
RTP.Record.discard_unsupported_attributes
train
def discard_unsupported_attributes(values, options={}) case self when SiteSetup options[:version].to_f >= 2.6 ? values : values[0..-4] when Field options[:version].to_f >= 2.64 ? values : values[0..-4] when ExtendedField options[:version].to_f >= 2.4 ? values : values[0..-5] when ControlPoint options[:version].to_f >= 2.64 ? values : values[0..31] + values[35..-1] else values end end
ruby
{ "resource": "" }
q2003
Cfer.Config.include_config
train
def include_config(*files) include_base = File.dirname(@config_file) if @config_file files.each do |file| path = File.join(include_base, file) if include_base include_file(path || file) end end
ruby
{ "resource": "" }
q2004
RTP.Plan.write
train
def write(file, options={}) f = open_file(file) f.write(to_s(options)) f.close end
ruby
{ "resource": "" }
q2005
Cfer::Cfn.Client.tail
train
def tail(options = {}) q = [] event_id_highwater = nil counter = 0 number = options[:number] || 0 for_each_event name do |fetched_event| q.unshift fetched_event if counter < number counter = counter + 1 end while q.size > 0 event = q.shift yield event event_id_highwater = event.event_id end sleep_time = 1 running = true if options[:follow] while running sleep_time = [sleep_time * (options[:backoff] || 1), options[:backoff_max_wait] || 15].min begin stack_status = describe_stacks(stack_name: name).stacks.first.stack_status running = running && (/.+_(COMPLETE|FAILED)$/.match(stack_status) == nil) yielding = true for_each_event name do |fetched_event| if event_id_highwater == fetched_event.event_id yielding = false end if yielding q.unshift fetched_event end end rescue Aws::CloudFormation::Errors::Throttling Cfer::LOGGER.debug "AWS SDK is being throttled..." # Keep going though. rescue Aws::CloudFormation::Errors::ValidationError running = false end while q.size > 0 event = q.shift yield event event_id_highwater = event.event_id sleep_time = 1 end sleep sleep_time if running unless options[:no_sleep] end end end
ruby
{ "resource": "" }
q2006
Cfer.Block.build_from_block
train
def build_from_block(*args, &block) pre_block Docile.dsl_eval(self, *args, &block) if block post_block self end
ruby
{ "resource": "" }
q2007
RTP.Plan.add_angle
train
def add_angle(item, angle_tag, direction_tag, angle, direction, current_angle) if !self.send(current_angle) || angle != self.send(current_angle) self.send("#{current_angle}=", angle) DICOM::Element.new(angle_tag, angle, :parent => item) DICOM::Element.new(direction_tag, (direction.empty? ? 'NONE' : direction), :parent => item) end end
ruby
{ "resource": "" }
q2008
RTP.Plan.add_couch_position
train
def add_couch_position(item, tag, value, current) if !self.send(current) || value != self.send(current) self.send("#{current}=", value) DICOM::Element.new(tag, (value.empty? ? '' : value.to_f * 10), :parent => item) end end
ruby
{ "resource": "" }
q2009
RTP.Plan.add_doserate
train
def add_doserate(value, item) if !@current_doserate || value != @current_doserate @current_doserate = value DICOM::Element.new('300A,0115', value, :parent => item) end end
ruby
{ "resource": "" }
q2010
RTP.Plan.create_control_point
train
def create_control_point(cp, sequence, options={}) cp_item = DICOM::Item.new(:parent => sequence) # Some CP attributes will always be written (CP index, BLD positions & Cumulative meterset weight). # The other attributes are only written if they are different from the previous control point. # Control Point Index: DICOM::Element.new('300A,0112', "#{cp.index}", :parent => cp_item) # Beam Limiting Device Position Sequence: create_beam_limiting_device_positions(cp_item, cp, options) # Source to Surface Distance: add_ssd(cp.ssd, cp_item) # Cumulative Meterset Weight: DICOM::Element.new('300A,0134', cp.monitor_units.to_f, :parent => cp_item) # Referenced Dose Reference Sequence: create_referenced_dose_reference(cp_item) if options[:dose_ref] # Attributes that are only added if they carry an updated value: # Nominal Beam Energy: add_energy(cp.energy, cp_item) # Dose Rate Set: add_doserate(cp.doserate, cp_item) # Gantry Angle & Rotation Direction: add_angle(cp_item, '300A,011E', '300A,011F', cp.gantry_angle, cp.gantry_dir, :current_gantry) # Beam Limiting Device Angle & Rotation Direction: add_angle(cp_item, '300A,0120', '300A,0121', cp.collimator_angle, cp.collimator_dir, :current_collimator) # Patient Support Angle & Rotation Direction: add_angle(cp_item, '300A,0122', '300A,0123', cp.couch_pedestal, cp.couch_ped_dir, :current_couch_pedestal) # Table Top Eccentric Angle & Rotation Direction: add_angle(cp_item, '300A,0125', '300A,0126', cp.couch_angle, cp.couch_dir, :current_couch_angle) # Table Top Vertical Position: add_couch_position(cp_item, '300A,0128', cp.couch_vertical, :current_couch_vertical) # Table Top Longitudinal Position: add_couch_position(cp_item, '300A,0129', cp.couch_longitudinal, :current_couch_longitudinal) # Table Top Lateral Position: add_couch_position(cp_item, '300A,012A', cp.couch_lateral, :current_couch_lateral) # Isocenter Position (x\y\z): add_isosenter(cp.parent.parent.site_setup, cp_item) cp_item end
ruby
{ "resource": "" }
q2011
RTP.Plan.create_beam_limiting_devices
train
def create_beam_limiting_devices(beam_item, field) bl_seq = DICOM::Sequence.new('300A,00B6', :parent => beam_item) # The ASYMX item ('backup jaws') doesn't exist on all models: if ['SYM', 'ASY'].include?(field.field_x_mode.upcase) bl_item_x = DICOM::Item.new(:parent => bl_seq) DICOM::Element.new('300A,00B8', "ASYMX", :parent => bl_item_x) DICOM::Element.new('300A,00BC', "1", :parent => bl_item_x) end # The ASYMY item is always created: bl_item_y = DICOM::Item.new(:parent => bl_seq) # RT Beam Limiting Device Type: DICOM::Element.new('300A,00B8', "ASYMY", :parent => bl_item_y) # Number of Leaf/Jaw Pairs: DICOM::Element.new('300A,00BC', "1", :parent => bl_item_y) # MLCX item is only created if leaves are defined: # (NB: The RTP file doesn't specify leaf position boundaries, so we # have to set these based on a set of known MLC types, their number # of leaves, and their leaf boundary positions.) if field.control_points.length > 0 bl_item_mlcx = DICOM::Item.new(:parent => bl_seq) DICOM::Element.new('300A,00B8', "MLCX", :parent => bl_item_mlcx) num_leaves = field.control_points.first.mlc_leaves.to_i DICOM::Element.new('300A,00BC', num_leaves.to_s, :parent => bl_item_mlcx) DICOM::Element.new('300A,00BE', "#{RTP.leaf_boundaries(num_leaves).join("\\")}", :parent => bl_item_mlcx) end bl_seq end
ruby
{ "resource": "" }
q2012
RTP.Plan.create_asym_item
train
def create_asym_item(cp, dcm_parent, axis, options={}) val1 = cp.send("dcm_collimator_#{axis.to_s}1", options[:scale]) val2 = cp.send("dcm_collimator_#{axis.to_s}2", options[:scale]) item = DICOM::Item.new(:parent => dcm_parent) # RT Beam Limiting Device Type: DICOM::Element.new('300A,00B8', "ASYM#{axis.to_s.upcase}", :parent => item) # Leaf/Jaw Positions: DICOM::Element.new('300A,011C', "#{val1}\\#{val2}", :parent => item) item end
ruby
{ "resource": "" }
q2013
RTP.Plan.create_dose_reference
train
def create_dose_reference(dcm, description) dr_seq = DICOM::Sequence.new('300A,0010', :parent => dcm) dr_item = DICOM::Item.new(:parent => dr_seq) # Dose Reference Number: DICOM::Element.new('300A,0012', '1', :parent => dr_item) # Dose Reference Structure Type: DICOM::Element.new('300A,0014', 'SITE', :parent => dr_item) # Dose Reference Description: DICOM::Element.new('300A,0016', description, :parent => dr_item) # Dose Reference Type: DICOM::Element.new('300A,0020', 'TARGET', :parent => dr_item) dr_seq end
ruby
{ "resource": "" }
q2014
RTP.Plan.create_referenced_dose_reference
train
def create_referenced_dose_reference(cp_item) # Referenced Dose Reference Sequence: rd_seq = DICOM::Sequence.new('300C,0050', :parent => cp_item) rd_item = DICOM::Item.new(:parent => rd_seq) # Cumulative Dose Reference Coeffecient: DICOM::Element.new('300A,010C', '', :parent => rd_item) # Referenced Dose Reference Number: DICOM::Element.new('300C,0051', '1', :parent => rd_item) rd_seq end
ruby
{ "resource": "" }
q2015
YamlRecord.Base.save
train
def save run_callbacks(:before_save) run_callbacks(:before_create) unless self.is_created existing_items = self.class.all if self.new_record? existing_items << self else # update existing record updated_item = existing_items.find { |item| item.id == self.id } return false unless updated_item updated_item.attributes = self.attributes end raw_data = existing_items ? existing_items.map { |item| item.persisted_attributes } : [] self.class.write_contents(raw_data) if self.valid? run_callbacks(:after_create) unless self.is_created run_callbacks(:after_save) true rescue IOError false end
ruby
{ "resource": "" }
q2016
YamlRecord.Base.destroy
train
def destroy run_callbacks(:before_destroy) new_data = self.class.all.reject { |item| item.persisted_attributes == self.persisted_attributes }.map { |item| item.persisted_attributes } self.class.write_contents(new_data) self.is_destroyed = true run_callbacks(:after_destroy) true rescue IOError false end
ruby
{ "resource": "" }
q2017
Cfer::Core.Resource.tag
train
def tag(k, v, **options) self[:Properties][:Tags] ||= [] self[:Properties][:Tags].delete_if { |kv| kv["Key"] == k } self[:Properties][:Tags].unshift({"Key" => k, "Value" => v}.merge(options)) end
ruby
{ "resource": "" }
q2018
Cfer::Core.Stack.parameter
train
def parameter(name, options = {}) param = {} options.each do |key, v| next if v === nil k = key.to_s.camelize.to_sym param[k] = case k when :AllowedPattern if v.class == Regexp v.source end when :Default @parameters[name] ||= v end param[k] ||= v end param[:Type] ||= 'String' self[:Parameters][name] = param end
ruby
{ "resource": "" }
q2019
Cfer::Core.Stack.resource
train
def resource(name, type, options = {}, &block) Preconditions.check_argument(/[[:alnum:]]+/ =~ name, "Resource name must be alphanumeric") clazz = Cfer::Core::Resource.resource_class(type) rc = clazz.new(name, type, self, options, &block) self[:Resources][name] = rc rc.handle end
ruby
{ "resource": "" }
q2020
Cfer::Core.Stack.include_template
train
def include_template(*files) include_base = options[:include_base] || File.dirname(caller.first.split(/:\d/,2).first) files.each do |file| path = File.join(include_base, file) include_file(path) end end
ruby
{ "resource": "" }
q2021
Cfer::Core.Stack.lookup_outputs
train
def lookup_outputs(stack) client = @options[:client] || raise(Cfer::Util::CferError, "Can not fetch stack outputs without a client") client.fetch_outputs(stack) end
ruby
{ "resource": "" }
q2022
RackFakeS3.SortedObjectList.list
train
def list(options) marker = options[:marker] prefix = options[:prefix] max_keys = options[:max_keys] || 1000 delimiter = options[:delimiter] ms = S3MatchSet.new marker_found = true pseudo = nil if marker marker_found = false if !@object_map[marker] pseudo = S3Object.new pseudo.name = marker @sorted_set << pseudo end end count = 0 @sorted_set.each do |s3_object| if marker_found && (!prefix or s3_object.name.index(prefix) == 0) count += 1 if count <= max_keys ms.matches << s3_object else is_truncated = true break end end if marker and marker == s3_object.name marker_found = true end end if pseudo @sorted_set.delete(pseudo) end return ms end
ruby
{ "resource": "" }
q2023
Gusteau.Config.build_node
train
def build_node(node_name, env_hash, node_hash) node_config = { 'server' => node_hash.slice('host', 'port', 'user', 'password', 'platform', 'vagrant'), 'attributes' => (node_hash['attributes'] || {}).deep_merge(env_hash['attributes'] || {}), 'run_list' => node_hash['run_list'] || env_hash['run_list'], 'before' => env_hash['before'] || @config['before'], 'after' => env_hash['after'] || @config['after'] } node_config['server'].delete 'attributes' Gusteau::Node.new(node_name, node_config) end
ruby
{ "resource": "" }
q2024
RackFakeS3.Servlet.normalize_request
train
def normalize_request(rack_req) host = rack_req.host s_req = Request.new s_req.path = path_for_rack_request(rack_req) s_req.is_path_style = true s_req.rack_request = rack_req if !@root_hostnames.include?(host) s_req.bucket = host.split(".")[0] s_req.is_path_style = false end s_req.http_verb = rack_req.request_method case rack_req.request_method when 'PUT' normalize_put(rack_req,s_req) when 'GET','HEAD' normalize_get(rack_req,s_req) when 'DELETE' normalize_delete(rack_req,s_req) when 'POST' normalize_post(rack_req,s_req) when 'OPTIONS' nomalize_options(rack_req,s_req) else return false end if s_req.type.nil? return false end return s_req end
ruby
{ "resource": "" }
q2025
Metior.Report.generate
train
def generate(target_dir, with_assets = true) target_dir = File.expand_path target_dir copy_assets target_dir if with_assets render.each do |view_name, output| file_name = File.join target_dir, view_name.to_s.downcase + '.html' begin output_file = File.open file_name, 'wb' output_file.write output ensure output_file.close end end end
ruby
{ "resource": "" }
q2026
Metior.Report.copy_assets
train
def copy_assets(target_dir) FileUtils.mkdir_p target_dir self.class.assets.map do |asset| asset_path = self.class.find asset asset_dir = File.join target_dir, File.dirname(asset) FileUtils.mkdir_p asset_dir unless File.exists? asset_dir FileUtils.cp_r asset_path, asset_dir end end
ruby
{ "resource": "" }
q2027
Metior.Repository.actor
train
def actor(actor) id = self.class::Actor.id_for(actor) @actors[id] ||= self.class::Actor.new(self, actor) end
ruby
{ "resource": "" }
q2028
Metior.Repository.commits
train
def commits(range = current_branch) range = parse_range range commits = cached_commits range if commits.empty? base_commit, raw_commits = load_commits(range) commits = build_commits raw_commits unless base_commit.nil? base_commit = self.class::Commit.new(self, base_commit) base_commit.add_child commits.last.id @commits[base_commit.id] = base_commit end else if range.first == '' unless commits.last.parents.empty? raw_commits = load_commits(''..commits.last.id).last commits += build_commits raw_commits[0..-2] end else if commits.first.id != range.last raw_commits = load_commits(commits.first.id..range.last).last commits = build_commits(raw_commits) + commits end unless commits.last.parents.include? range.first raw_commits = load_commits(range.first..commits.last.id).last commits += build_commits raw_commits end end end CommitCollection.new commits, range end
ruby
{ "resource": "" }
q2029
Metior.Repository.file_stats
train
def file_stats(range = current_branch) support! :file_stats stats = {} commits(range).each_value do |commit| commit.added_files.each do |file| stats[file] = { :modifications => 0 } unless stats.key? file stats[file][:added_date] = commit.authored_date stats[file][:modifications] += 1 end commit.modified_files.each do |file| stats[file] = { :modifications => 0 } unless stats.key? file stats[file][:last_modified_date] = commit.authored_date stats[file][:modifications] += 1 end commit.deleted_files.each do |file| stats[file] = { :modifications => 0 } unless stats.key? file stats[file][:deleted_date] = commit.authored_date end end stats end
ruby
{ "resource": "" }
q2030
Metior.Repository.build_commits
train
def build_commits(raw_commits) child_commit_id = nil raw_commits.map do |commit| commit = self.class::Commit.new(self, commit) commit.add_child child_commit_id unless child_commit_id.nil? child_commit_id = commit.id @commits[commit.id] = commit commit end end
ruby
{ "resource": "" }
q2031
Metior.Repository.cached_commits
train
def cached_commits(range) commits = [] direction = nil if @commits.key? range.last current_commits = [@commits[range.last]] direction = :parents elsif @commits.key? range.first current_commits = [@commits[range.first]] direction = :children end unless direction.nil? while !current_commits.empty? do new_commits = [] current_commits.each do |commit| new_commits += commit.send direction commits << commit if commit.id != range.first if direction == :parents && new_commits.include?(range.first) new_commits = [] break end end unless new_commits.include? range.first current_commits = new_commits.uniq.map do |commit| commit = @commits[commit] commits.include?(commit) ? nil : commit end.compact end end end commits.sort_by { |c| c.committed_date }.reverse end
ruby
{ "resource": "" }
q2032
Metior.Repository.parse_range
train
def parse_range(range) unless range.is_a? Range range = range.to_s.split '..' range = ((range.size == 1) ? '' : range.first)..range.last end range = id_for_ref(range.first)..range.last if range.first != '' range.first..id_for_ref(range.last) end
ruby
{ "resource": "" }
q2033
Metior::Adapter.ClassMethods.register_for
train
def register_for(vcs) vcs = Metior.find_vcs vcs vcs.register_adapter id, self class_variable_set :@@vcs, vcs end
ruby
{ "resource": "" }
q2034
BentoSearch.OpenurlCreator.ensure_no_tags
train
def ensure_no_tags(str) return str unless str.html_safe? str = str.to_str # get it out of HTMLSafeBuffer, which messes things up str = strip_tags(str) str = HTMLEntities.new.decode(str) return str end
ruby
{ "resource": "" }
q2035
Route53.DNSRecord.update
train
def update(name,type,ttl,values,comment=nil, zone_apex = nil) prev = self.clone @name = name unless name.nil? @type = type unless type.nil? @ttl = ttl unless ttl.nil? @values = values unless values.nil? @zone_apex = zone_apex unless zone_apex.nil? @zone.perform_actions([ {:action => "DELETE", :record => prev}, {:action => "CREATE", :record => self}, ],comment) end
ruby
{ "resource": "" }
q2036
Route53.DNSRecord.update_dirty
train
def update_dirty(name,type,ttl,values,zone_apex = nil) prev = self.clone @name = name unless name.nil? @type = type unless type.nil? @ttl = ttl unless ttl.nil? @values = values unless values.nil? @zone_apex = zone_apex unless zone_apex.nil? return [{:action => "DELETE", :record => prev}, {:action => "CREATE", :record => self}] end
ruby
{ "resource": "" }
q2037
Metior.ActorCollection.most_significant
train
def most_significant(count = 3) support! :line_stats authors = ActorCollection.new sort_by { |author| -author.modifications }.each do |author| authors << author break if authors.size == count end authors end
ruby
{ "resource": "" }
q2038
Metior.ActorCollection.top
train
def top(count = 3) authors = ActorCollection.new sort_by { |author| -author.authored_commits.size }.each do |author| authors << author break if authors.size == count end authors end
ruby
{ "resource": "" }
q2039
Metior.ActorCollection.load_commits
train
def load_commits(commit_type, actor_id = nil) commits = CommitCollection.new if actor_id.nil? each { |actor| commits.merge! actor.send(commit_type) } elsif key? actor_id commits = self[actor_id].send commit_type end commits end
ruby
{ "resource": "" }
q2040
Metior::Adapter::Octokit.Repository.load_commits
train
def load_commits(range) base_commit = nil commits = [] last_commit = nil loop do new_commits = ::Octokit.commits(@path, nil, :last_sha => last_commit, :per_page => 100, :top => range.last) break if new_commits.empty? base_commit_index = new_commits.find_index do |commit| commit.sha == range.first end unless range.first == '' unless base_commit_index.nil? if base_commit_index > 0 commits += new_commits[0..base_commit_index-1] end base_commit = new_commits[base_commit_index] break end commits += new_commits last_commit = new_commits.last.sha end [base_commit, commits] end
ruby
{ "resource": "" }
q2041
Autosign.Validator.validate
train
def validate(challenge_password, certname, raw_csr) @log.debug "running validate" fail unless challenge_password.is_a?(String) fail unless certname.is_a?(String) case perform_validation(challenge_password, certname, raw_csr) when true @log.debug "validated successfully" @log.info "Validated '#{certname}' using '#{name}' validator" return true when false @log.debug "validation failed" @log.debug "Unable to validate '#{certname}' using '#{name}' validator" return false else @log.error "perform_validation returned a non-boolean result" raise "perform_validation returned a non-boolean result" end end
ruby
{ "resource": "" }
q2042
Autosign.Validator.settings
train
def settings @log.debug "merging settings" setting_sources = [get_override_settings, load_config, default_settings] merged_settings = setting_sources.inject({}) { |merged, hash| merged.deep_merge(hash) } @log.debug "using merged settings: " + merged_settings.to_s @log.debug "validating merged settings" if validate_settings(merged_settings) @log.debug "successfully validated merged settings" return merged_settings else @log.warn "validation of merged settings failed" @log.warn "unable to validate settings in #{self.name} validator" raise "settings validation error" end end
ruby
{ "resource": "" }
q2043
Autosign.Validator.load_config
train
def load_config @log.debug "loading validator-specific configuration" config = Autosign::Config.new if config.settings.to_hash[self.name].nil? @log.warn "Unable to load validator-specific configuration" @log.warn "Cannot load configuration section named '#{self.name}'" return {} else @log.debug "Set validator-specific settings from config file: " + config.settings.to_hash[self.name].to_s return config.settings.to_hash[self.name] end end
ruby
{ "resource": "" }
q2044
BentoSearch.SearchEngine.fill_in_search_metadata_for
train
def fill_in_search_metadata_for(results, normalized_arguments = {}) results.search_args = normalized_arguments results.start = normalized_arguments[:start] || 0 results.per_page = normalized_arguments[:per_page] results.engine_id = configuration.id results.display_configuration = configuration.for_display # We copy some configuraton info over to each Item, as a convenience # to display logic that may have decide what to do given only an item, # and may want to parameterize based on configuration. results.each do |item| item.engine_id = configuration.id item.decorator = configuration.lookup!("for_display.decorator") item.display_configuration = configuration.for_display end results end
ruby
{ "resource": "" }
q2045
AllscriptsUnityClient.JSONUnityRequest.to_hash
train
def to_hash action = @parameters[:action] userid = @parameters[:userid] appname = @parameters[:appname] || @appname patientid = @parameters[:patientid] token = @parameters[:token] || @security_token parameter1 = process_date(@parameters[:parameter1]) || '' parameter2 = process_date(@parameters[:parameter2]) || '' parameter3 = process_date(@parameters[:parameter3]) || '' parameter4 = process_date(@parameters[:parameter4]) || '' parameter5 = process_date(@parameters[:parameter5]) || '' parameter6 = process_date(@parameters[:parameter6]) || '' data = Utilities::encode_data(@parameters[:data]) || '' { 'Action' => action, 'AppUserID' => userid, 'Appname' => appname, 'PatientID' => patientid, 'Token' => token, 'Parameter1' => parameter1, 'Parameter2' => parameter2, 'Parameter3' => parameter3, 'Parameter4' => parameter4, 'Parameter5' => parameter5, 'Parameter6' => parameter6, 'Data' => data } end
ruby
{ "resource": "" }
q2046
AllscriptsUnityClient.Client.get_encounter_list
train
def get_encounter_list( userid, patientid, encounter_type = nil, when_param = nil, nostradamus = 0, show_past_flag = true, billing_provider_user_name = nil, show_all = false) magic_parameters = { action: 'GetEncounterList', userid: userid, patientid: patientid, parameter1: encounter_type, parameter2: when_param, parameter3: nostradamus, parameter4: unity_boolean_parameter(show_past_flag), parameter5: billing_provider_user_name, # According to the developer guide this parameter is no longer # used. parameter6: show_all ? 'all' : nil } response = magic(magic_parameters) unless response.is_a?(Array) response = [ response ] end # Remove nil encounters response.delete_if do |value| value[:id] == '0' && value[:patientid] == '0' end end
ruby
{ "resource": "" }
q2047
AllscriptsUnityClient.Client.get_task_list
train
def get_task_list(userid = nil, since = nil, delegated = nil, task_types = nil, task_statuses = nil) magic_parameters = { action: 'GetTaskList', userid: userid, parameter1: since, parameter2: task_types, parameter3: task_statuses, parameter4: delegated } response = magic(magic_parameters) unless response.is_a?(Array) response = [ response ] end response end
ruby
{ "resource": "" }
q2048
Route53.Zone.perform_actions
train
def perform_actions(change_list,comment=nil) xml_str = gen_change_xml(change_list,comment) @conn.request(@conn.base_url + @host_url+"/rrset","POST",xml_str) end
ruby
{ "resource": "" }
q2049
Route53.CLI.process_options
train
def process_options @options.verbose = false if @options.quiet @options.file = (user_home+"/.route53") if @options.file.nil? #setup file if @options.setup setup end load_config @config['access_key'] = @options.access unless @options.access.nil? @config['secret_key'] = @options.secret unless @options.secret.nil? required_options("",["--access-key"]) if @config['access_key'].nil? || @config['access_key'] == "" required_options("",["--secret_key"]) if @config['secret_key'].nil? || @config['secret_key'] == "" end
ruby
{ "resource": "" }
q2050
Route53.CLI.process_arguments
train
def process_arguments if @options.new_zone new_zone elsif @options.delete_zone delete_zone elsif @options.create_record create_record elsif @options.remove_record remove_record elsif @options.change_record change_record else list end end
ruby
{ "resource": "" }
q2051
Mongoid.CachedJson.as_json_partial
train
def as_json_partial(options = {}) options ||= {} if options[:properties] && !all_json_properties.member?(options[:properties]) fail ArgumentError.new("Unknown properties option: #{options[:properties]}") end # partial, unmaterialized JSON keys, partial_json = self.class.materialize_json({ properties: :short, is_top_level_json: true, version: Mongoid::CachedJson.config.default_version }.merge(options), object: self) [keys, partial_json] end
ruby
{ "resource": "" }
q2052
Mongoid.CachedJson.as_json_cached
train
def as_json_cached(options = {}) keys, json = as_json_partial(options) Mongoid::CachedJson.materialize_json_references_with_read_multi(keys, json) end
ruby
{ "resource": "" }
q2053
Mongoid.CachedJson.expire_cached_json
train
def expire_cached_json all_json_properties.each do |properties| [true, false].each do |is_top_level_json| all_json_versions.each do |version| Mongoid::CachedJson.config.cache.delete(self.class.cached_json_key({ properties: properties, is_top_level_json: is_top_level_json, version: version }, self.class, id)) end end end end
ruby
{ "resource": "" }
q2054
Autosign.Journal.setup
train
def setup @log.debug "using journalfile: " + self.settings['journalfile'] journalfile = self.settings['journalfile'] store = YAML::Store.new(journalfile, true) store.ultra_safe = true return store end
ruby
{ "resource": "" }
q2055
Autosign.Journal.validate_uuid
train
def validate_uuid(uuid) unless uuid.is_a?(String) @log.error "UUID is not a string" return false end unless !!/^\S{8}-\S{4}-4\S{3}-[89abAB]\S{3}-\S{12}$/.match(uuid.to_s) @log.error "UUID is not a valid V4 UUID" return false end return true end
ruby
{ "resource": "" }
q2056
Metior.CommitCollection.<<
train
def <<(commit) return self if key? commit.id if @additions.nil? && empty? && commit.line_stats? @additions = commit.additions @deletions = commit.deletions elsif [email protected]? @additions += commit.additions @deletions += commit.deletions end super end
ruby
{ "resource": "" }
q2057
Metior.CommitCollection.activity
train
def activity activity = {} return activity if empty? commit_count = values.size active_days = {} each do |commit| date = commit.committed_date.utc day = Time.utc(date.year, date.month, date.day).send :to_date if active_days.key? day active_days[day] += 1 else active_days[day] = 1 end end most_active_day = active_days.sort_by { |day, count| count }.last.first activity[:first_commit_date] = last.committed_date activity[:last_commit_date] = first.committed_date age_in_days = (Time.now - activity[:first_commit_date]) / 86400.0 activity[:active_days] = active_days activity[:most_active_day] = most_active_day activity[:commits_per_day] = commit_count / age_in_days activity[:commits_per_active_day] = commit_count.to_f / active_days.size activity end
ruby
{ "resource": "" }
q2058
Metior.CommitCollection.authors
train
def authors(commit_id = nil) authors = ActorCollection.new if commit_id.nil? each { |commit| authors << commit.author } elsif key? commit_id authors << self[commit_id].author end authors end
ruby
{ "resource": "" }
q2059
Metior.CommitCollection.before
train
def before(date) date = Time.parse date if date.is_a? String commits = CommitCollection.new each do |commit| commits << commit if commit.committed_date < date end commits end
ruby
{ "resource": "" }
q2060
Metior.CommitCollection.by
train
def by(*author_ids) author_ids = author_ids.flatten.map do |author_id| author_id.is_a?(Actor) ? author_id.id : author_id end commits = CommitCollection.new each do |commit| commits << commit if author_ids.include? commit.author.id end commits end
ruby
{ "resource": "" }
q2061
Metior.CommitCollection.changing
train
def changing(*files) support! :file_stats commits = CommitCollection.new each do |commit| commit_files = commit.added_files + commit.deleted_files + commit.modified_files commits << commit unless (commit_files & files).empty? end commits end
ruby
{ "resource": "" }
q2062
Metior.CommitCollection.committers
train
def committers(commit_id = nil) committers = ActorCollection.new if commit_id.nil? each { |commit| committers << commit.committer } elsif key? commit_id committers << self[commit_id].committer end committers end
ruby
{ "resource": "" }
q2063
Metior.CommitCollection.line_history
train
def line_history support! :line_stats history = { :additions => [], :deletions => [] } values.reverse.each do |commit| history[:additions] << commit.additions history[:deletions] << -commit.deletions end history end
ruby
{ "resource": "" }
q2064
Metior.CommitCollection.most_significant
train
def most_significant(count = 10) support! :line_stats commits = CommitCollection.new sort_by { |commit| -commit.modifications }.each do |commit| commits << commit break if commits.size == count end commits end
ruby
{ "resource": "" }
q2065
Metior.CommitCollection.with_impact
train
def with_impact(line_count) support! :line_stats commits = CommitCollection.new each do |commit| commits << commit if commit.modifications >= line_count end commits end
ruby
{ "resource": "" }
q2066
Metior.CommitCollection.load_line_stats
train
def load_line_stats @additions = 0 @deletions = 0 return if empty? line_stats = nil if @range.nil? ids = values.reject { |c| c.line_stats? }.map { |c| c.id } line_stats = first.repo.load_line_stats ids unless ids.empty? else line_stats = first.repo.load_line_stats @range end unless line_stats.nil? line_stats.each do |id, stats| commit = self[id] commit.line_stats = stats end end each do |commit| @additions += commit.additions @deletions += commit.deletions end end
ruby
{ "resource": "" }
q2067
Metior::Report.View.render
train
def render(*args) begin features = self.class.send :class_variable_get, :@@required_features super if features.all? { |feature| repository.supports? feature } rescue Metior::UnsupportedError end end
ruby
{ "resource": "" }
q2068
BentoSearch.SearchController.search
train
def search engine = BentoSearch.get_engine(params[:engine_id]) # put it in an iVar mainly for testing purposes. @engine = engine unless engine.configuration.allow_routable_results == true raise AccessDenied.new("engine needs to be registered with :allow_routable_results => true") end @results = engine.search safe_search_args(engine, params) # template name of a partial with 'yield' to use to wrap the results @partial_wrapper = @results.display_configuration.lookup!("ajax.wrapper_template") # partial HTML results render "bento_search/search/search", :layout => false end
ruby
{ "resource": "" }
q2069
Metior::Adapter::Grit.Repository.current_branch
train
def current_branch branch = @grit_repo.head return branch.name unless branch.nil? commit = @grit_repo.commit('HEAD') commit.id unless commit.nil? end
ruby
{ "resource": "" }
q2070
Metior::Adapter::Grit.Repository.load_line_stats
train
def load_line_stats(ids) if ids.is_a? Range if ids.first == '' range = ids.last else range = '%s..%s' % [ids.first, ids.last] end options = { :numstat => true, :timeout => false } output = @grit_repo.git.native :log, options, range commit_stats = ::Grit::CommitStats.list_from_string @grit_repo, output else commit_stats = [] ids.each_slice(500) do |id_slice| options = { :numstat => true, :timeout => false } output = @grit_repo.git.native :log, options, *id_slice commit_stats += ::Grit::CommitStats.list_from_string @grit_repo, output end end Hash[commit_stats.map do |stats| [stats.first, [stats.last.additions, stats.last.deletions]] end] end
ruby
{ "resource": "" }
q2071
Metior::Adapter::Grit.Repository.load_branches
train
def load_branches Hash[@grit_repo.branches.map { |b| [b.name, b.commit.id] }] end
ruby
{ "resource": "" }
q2072
Metior::Adapter::Grit.Repository.load_commits
train
def load_commits(range) if range.first == '' base_commit = nil range = range.last else base_commit = @grit_repo.commit(range.first) range = '%s..%s' % [range.first, range.last] end options = { :pretty => 'raw', :timeout => false } output = @grit_repo.git.native :rev_list, options, range commits = ::Grit::Commit.list_from_string @grit_repo, output [base_commit, commits] end
ruby
{ "resource": "" }
q2073
Metior::Adapter::Grit.Repository.load_name_and_description
train
def load_name_and_description description = @grit_repo.description if description.start_with? 'Unnamed repository' @name = '' @description = '' else description = description.lines.to_a @name = description.shift.strip @description = description.join("\n").strip end end
ruby
{ "resource": "" }
q2074
Metior::Adapter::Grit.Repository.load_tags
train
def load_tags Hash[@grit_repo.tags.map { |b| [b.name, b.commit.id] }] end
ruby
{ "resource": "" }
q2075
Nestive.LayoutHelper.area
train
def area(name, content=nil, &block) content = capture(&block) if block_given? append name, content render_area name end
ruby
{ "resource": "" }
q2076
Nestive.LayoutHelper.render_area
train
def render_area(name) [].tap do |output| @_area_for.fetch(name, []).reverse_each do |method_name, content| output.public_send method_name, content end end.join.html_safe end
ruby
{ "resource": "" }
q2077
BentoSearch.GoogleBooksEngine.hash_to_item
train
def hash_to_item(item_response) v_info = item_response["volumeInfo"] || {} item = ResultItem.new item.unique_id = item_response["id"] item.title = format_title(v_info) item.publisher = v_info["publisher"] # previewLink gives you your search results highlighted, preferable # if it exists. item.link = v_info["previewLink"] || v_info["canonicalVolumeLink"] item.abstract = sanitize v_info["description"] item.year = get_year v_info["publishedDate"] # sometimes we have yyyy-mm, but we need a date to make a ruby Date, # we'll just say the 1st. item.publication_date = case v_info["publishedDate"] when /(\d\d\d\d)-(\d\d)/ then Date.parse "#{$1}-#{$2}-01" when /(\d\d\d\d)-(\d\d)-(\d\d)/ then Date.parse v_info["published_date"] else nil end item.format = if v_info["printType"] == "MAGAZINE" :serial else "Book" end item.language_code = v_info["language"] (v_info["authors"] || []).each do |author_name| item.authors << Author.new(:display => author_name) end # Find ISBN's, prefer ISBN-13 item.isbn = (v_info["industryIdentifiers"] || []).find {|node| node["type"] == "ISBN_13"}.try {|node| node["identifier"]} unless item.isbn # Look for ISBN-10 okay item.isbn = (v_info["industryIdentifiers"] || []).find {|node| node["type"] == "ISBN_10"}.try {|node| node["identifier"]} end # only VERY occasionally does a GBS hit have an OCLC number, but let's look # just in case. item.oclcnum = (v_info["industryIdentifiers"] || []). find {|node| node["type"] == "OTHER" && node["identifier"].starts_with?("OCLC:") }. try do |node| node =~ /OCLC:(.*)/ ? $1 : nil end # save viewability status in custom_data. PARTIAL, ALL_PAGES, NO_PAGES or UNKNOWN. # https://developers.google.com/books/docs/v1/reference/volumes#resource item.custom_data[:viewability] = item_response["accessInfo"].try {|h| h["viewability"]} item.link_is_fulltext = (item.custom_data[:viewability] == "ALL_PAGES") if item.custom_data[:viewability] return item end
ruby
{ "resource": "" }
q2078
BentoSearch.GoogleBooksEngine.args_to_search_url
train
def args_to_search_url(arguments) query = if arguments[:query].kind_of? Hash #multi-field arguments[:query].collect {|field, query_value| fielded_query(query_value, field)}.join(" ") elsif arguments[:search_field] fielded_query(arguments[:query], arguments[:search_field]) else arguments[:query] end query_url = base_url + "volumes?q=#{CGI.escape query}" if configuration.api_key query_url += "&key=#{configuration.api_key}" end if arguments[:per_page] query_url += "&maxResults=#{arguments[:per_page]}" end if arguments[:start] query_url += "&startIndex=#{arguments[:start]}" end if arguments[:sort] && (defn = sort_definitions[arguments[:sort]]) && (value = defn[:implementation]) query_url += "&orderBy=#{CGI.escape(value)}" end return query_url end
ruby
{ "resource": "" }
q2079
BentoSearch.StandardDecorator.render_authors_list
train
def render_authors_list parts = [] first_three = self.authors.slice(0,3) first_three.each_with_index do |author, index| parts << _h.content_tag("span", :class => "author") do self.author_display(author) end if (index + 1) < first_three.length parts << "; " end end if self.authors.length > 3 parts << I18n.t("bento_search.authors_et_al") end return _h.safe_join(parts, "") end
ruby
{ "resource": "" }
q2080
BentoSearch.StandardDecorator.render_citation_details
train
def render_citation_details # \u00A0 is unicode non-breaking space to keep labels and values from # getting separated. result_elements = [] result_elements.push("#{I18n.t('bento_search.volume')}\u00A0#{volume}") if volume.present? result_elements.push("#{I18n.t('bento_search.issue')}\u00A0#{issue}") if issue.present? if (! start_page.blank?) && (! end_page.blank?) result_elements.push html_escape "#{I18n.t('bento_search.pages')}\u00A0#{start_page}-#{end_page}" elsif ! start_page.blank? result_elements.push html_escape "#{I18n.t('bento_search.page')}\u00A0#{start_page}" end return nil if result_elements.empty? return result_elements.join(", ").html_safe end
ruby
{ "resource": "" }
q2081
BentoSearch.StandardDecorator.render_summary
train
def render_summary summary = nil max_chars = (self.display_configuration.try {|h| h["summary_max_chars"]}) || 280 if self.snippets.length > 0 && !(self.display_configuration.try {|h| h["prefer_abstract_as_summary"]} && self.abstract) summary = self.snippets.first self.snippets.slice(1, self.snippets.length).each do |snippet| summary += ' '.html_safe + snippet if (summary.length + snippet.length) <= max_chars end else summary = _h.bento_truncate( self.abstract, :length => max_chars ) end summary end
ruby
{ "resource": "" }
q2082
Autosign.Config.configfile
train
def configfile @log.debug "Finding config file" @config_file_paths.each { |file| @log.debug "Checking if file '#{file}' exists" if File.file?(file) @log.debug "Reading config file from: " + file config_file = File.read(file) parsed_config_file = YAML.load(config_file) #parsed_config_file = IniParse.parse(config_file).to_hash @log.debug "configuration read from config file: " + parsed_config_file.to_s return parsed_config_file if parsed_config_file.is_a?(Hash) else @log.debug "Configuration file '#{file}' not found" end } return {} end
ruby
{ "resource": "" }
q2083
Autosign.Config.validate_config_file
train
def validate_config_file(configfile = location) @log.debug "validating config file" unless File.file?(configfile) @log.error "configuration file not found at: #{configfile}" raise Autosign::Exceptions::NotFound end # check if file is world-readable if File.world_readable?(configfile) or File.world_writable?(configfile) @log.error "configuration file #{configfile} is world-readable or world-writable, which is a security risk" raise Autosign::Exceptions::Permissions end configfile end
ruby
{ "resource": "" }
q2084
BentoSearch.RISCreator.format_author_name
train
def format_author_name(author) if author.last.present? && author.first.present? str = "#{author.last}, #{author.first}" if author.middle.present? middle = author.middle middle += "." if middle.length == 1 str += " #{middle}" end return str elsif author.display.present? return author.display elsif author.last.present? return author.last? else return nil end end
ruby
{ "resource": "" }
q2085
SendWithUs.Api.send_email
train
def send_email(email_id, to, options = {}) if email_id.nil? raise SendWithUs::ApiNilEmailId, 'email_id cannot be nil' end payload = { email_id: email_id, recipient: to } if options[:data] && options[:data].any? payload[:email_data] = options[:data] end if options[:from] && options[:from].any? payload[:sender] = options[:from] end if options[:cc] && options[:cc].any? payload[:cc] = options[:cc] end if options[:bcc] && options[:bcc].any? payload[:bcc] = options[:bcc] end if options[:esp_account] payload[:esp_account] = options[:esp_account] end if options[:version_name] payload[:version_name] = options[:version_name] end if options[:headers] && options[:headers].any? payload[:headers] = options[:headers] end if options[:tags] && options[:tags].any? payload[:tags] = options[:tags] end if options[:locale] payload[:locale] = options[:locale] end if options[:files] && options[:files].any? payload[:files] = options[:files].map do |file_data| SendWithUs::File.new(file_data).to_h end end SendWithUs::ApiRequest.new(@configuration).post(:send, payload.to_json) end
ruby
{ "resource": "" }
q2086
ActiveAdmin.Duplicatable.enable_resource_duplication_via_form
train
def enable_resource_duplication_via_form action_item(*compatible_action_item_parameters) do if controller.action_methods.include?('new') && authorized?(ActiveAdmin::Auth::CREATE, active_admin_config.resource_class) link_to(I18n.t(:duplicate_model, default: "Duplicate %{model}", scope: [:active_admin], model: active_admin_config.resource_label), action: :new, _source_id: resource.id) end end controller do before_action only: :new do if !params[:_source_id].blank? source = resource_class.find(params[:_source_id]) @resource ||= source.amoeba_dup if source end end end end
ruby
{ "resource": "" }
q2087
ActiveAdmin.Duplicatable.enable_resource_duplication_via_save
train
def enable_resource_duplication_via_save action_item(*compatible_action_item_parameters) do if controller.action_methods.include?('new') && authorized?(ActiveAdmin::Auth::CREATE, active_admin_config.resource_class) link_to(I18n.t(:duplicate_model, default: "Duplicate %{model}", scope: [:active_admin], model: active_admin_config.resource_label), action: :duplicate) end end member_action :duplicate do resource = resource_class.find(params[:id]) authorize! ActiveAdmin::Auth::CREATE, resource duplicate = resource.amoeba_dup if duplicate.save redirect_to({ action: :edit, id: duplicate.id }, flash: { notice: "#{active_admin_config.resource_label} was successfully duplicated." }) else redirect_to({ action: :show }, flash: { error: "#{active_admin_config.resource_label} could not be duplicated." }) end end end
ruby
{ "resource": "" }
q2088
ActiveAdmin.Duplicatable.enable_resource_duplication_via_custom_method
train
def enable_resource_duplication_via_custom_method(method) action_item(*compatible_action_item_parameters) do if controller.action_methods.include?('new') && authorized?(ActiveAdmin::Auth::CREATE, active_admin_config.resource_class) link_to(I18n.t(:duplicate_model, default: "Duplicate %{model}", scope: [:active_admin], model: active_admin_config.resource_label), action: :duplicate) end end member_action :duplicate do resource = resource_class.find(params[:id]) authorize! ActiveAdmin::Auth::CREATE, resource begin duplicate = resource.send method redirect_to({ action: :edit, id: duplicate.id }, flash: { notice: "#{active_admin_config.resource_label} was successfully duplicated." }) rescue => e Rails.logger.warn(e) redirect_to({ action: :show }, flash: { error: "#{active_admin_config.resource_label} could not be duplicated." }) end end end
ruby
{ "resource": "" }
q2089
GnuplotRB.ErrorHandling.check_errors
train
def check_errors(raw: false) return if @err_array.empty? command = '' rest = '' @semaphore.synchronize do command = @err_array.first rest = @err_array[1..-1].join('; ') @err_array.clear end message = if raw "#{command};#{rest}}" else "Error in previous command (\"#{command}\"): \"#{rest}\"" end fail GnuplotError, message end
ruby
{ "resource": "" }
q2090
GnuplotRB.ErrorHandling.handle_stderr
train
def handle_stderr(stream) @err_array = [] # synchronize access to @err_array @semaphore = Mutex.new Thread.new do until (line = stream.gets).nil? line.strip! @semaphore.synchronize { @err_array << line if line.size > 3 } end end end
ruby
{ "resource": "" }
q2091
GnuplotRB.OptionHandling.option
train
def option(key, *value) if value.empty? value = options[key] value = value[0] if value && value.size == 1 value else options(key => value) end end
ruby
{ "resource": "" }
q2092
Blurrily.Client.put
train
def put(needle, ref, weight = 0) check_valid_needle(needle) check_valid_ref(ref) raise(ArgumentError, "WEIGHT value must be in #{WEIGHT_RANGE}") unless WEIGHT_RANGE.include?(weight) cmd = ["PUT", @db_name, needle, ref, weight] send_cmd_and_get_results(cmd) return end
ruby
{ "resource": "" }
q2093
SfnParameters.Utils.lock_content
train
def lock_content(content) content = content.to_smash content.merge!(:sfn_lock_enabled => true) safe = SfnParameters::Safe.build( config.fetch(:sfn_parameters, :safe, Smash.new) ) safe.lock(dump_json(content)) end
ruby
{ "resource": "" }
q2094
SfnParameters.Utils.unlock_content
train
def unlock_content(content) content = content.to_smash if content[:sfn_parameters_lock] safe = SfnParameters::Safe.build( config.fetch(:sfn_parameters, :safe, Smash.new) ) load_json(safe.unlock(content)).to_smash.merge(:sfn_lock_enabled => true) else content end end
ruby
{ "resource": "" }
q2095
Clockwork.API.balance
train
def balance xml = Clockwork::XML::Balance.build( self ) response = Clockwork::HTTP.post( Clockwork::API::BALANCE_URL, xml, @use_ssl ) balance = Clockwork::XML::Balance.parse( response ) end
ruby
{ "resource": "" }
q2096
GnuplotRB.Multiplot.mix_options
train
def mix_options(options) all_options = @options.merge(options) specific_options, plot_options = all_options.partition { |key, _value| specific_option?(key) } yield(plot_options, default_options.merge(specific_options)) end
ruby
{ "resource": "" }
q2097
RRSchedule.Schedule.generate
train
def generate(params={}) raise "You need to specify at least 1 team" if @teams.nil? || @teams.empty? raise "You need to specify at least 1 rule" if @rules.nil? || @rules.empty? arrange_flights init_stats @gamedays = []; @rounds = [] @flights.each_with_index do |teams,flight_id| current_cycle = current_round = 0 teams = teams.sort_by{rand} if @shuffle #loop to generate the whole round-robin(s) for the current flight begin t = teams.clone games = [] #process one round while !t.empty? do team_a = t.shift team_b = t.reverse!.shift t.reverse! x = (current_cycle % 2) == 0 ? [team_a,team_b] : [team_b,team_a] matchup = {:team_a => x[0], :team_b => x[1]} games << matchup end #done processing round current_round += 1 #Team rotation (the first team is fixed) teams = teams.insert(1,teams.delete_at(teams.size-1)) #add the round in memory @rounds ||= [] @rounds[flight_id] ||= [] @rounds[flight_id] << Round.new( :round => current_round, :cycle => current_cycle + 1, :round_with_cycle => current_cycle * (teams.size-1) + current_round, :flight => flight_id, :games => games.collect { |g| Game.new( :team_a => g[:team_a], :team_b => g[:team_b] ) } ) #done adding round #have we completed a full round-robin for the current flight? if current_round == teams.size-1 current_cycle += 1 current_round = 0 if current_cycle < self.cycles end end until current_round == teams.size-1 && current_cycle==self.cycles end dispatch_games(@rounds) self end
ruby
{ "resource": "" }
q2098
RRSchedule.Schedule.to_s
train
def to_s res = "" res << "#{self.gamedays.size.to_s} gamedays\n" self.gamedays.each do |gd| res << gd.date.strftime("%Y-%m-%d") + "\n" res << "==========\n" gd.games.sort{|g1,g2| g1.gt == g2.gt ? g1.ps <=> g2.ps : g1.gt <=> g2.gt}.each do |g| res << "#{g.ta.to_s} VS #{g.tb.to_s} on playing surface #{g.ps} at #{g.gt.strftime("%I:%M %p")}\n" end res << "\n" end res end
ruby
{ "resource": "" }
q2099
RRSchedule.Schedule.next_game_date
train
def next_game_date(dt,wday) dt += 1 until wday == dt.wday && !self.exclude_dates.include?(dt) dt end
ruby
{ "resource": "" }