_id
stringlengths 2
6
| title
stringlengths 9
130
| partition
stringclasses 3
values | text
stringlengths 66
10.5k
| language
stringclasses 1
value | meta_information
dict |
---|---|---|---|---|---|
q2600
|
Regression.Base.covariance2
|
train
|
def covariance2(xs, ys)
raise "Length xs and ys must be equal" unless xs.length == ys.length
ev_x, ev_y = mean(xs), mean(ys)
xs.zip(ys)
.map{|x,y| (x-ev_x) * (y-ev_y)}
.inject(0) {|sum, x| sum += x} / xs.length
end
|
ruby
|
{
"resource": ""
}
|
q2601
|
GeoElevation.Srtm.get_file_name
|
train
|
def get_file_name(latitude, longitude)
north_south = latitude >= 0 ? 'N' : 'S'
east_west = longitude >= 0 ? 'E' : 'W'
lat = latitude.floor.to_i.abs.to_s.rjust(2, '0')
lon = longitude.floor.to_i.abs.to_s.rjust(3, '0')
"#{north_south}#{lat}#{east_west}#{lon}.hgt"
end
|
ruby
|
{
"resource": ""
}
|
q2602
|
GeoElevation.SrtmFile.get_elevation
|
train
|
def get_elevation(latitude, longitude)
if ! (@latitude <= latitude && latitude < @latitude + 1)
raise "Invalid latitude #{latitude} for file #{@file_name}"
end
if ! (@longitude <= longitude && longitude < @longitude + 1)
raise "Invalid longitude #{longitude} for file #{@file_name}"
end
row, column = get_row_and_column(latitude, longitude)
#points = self.square_side ** 2
get_elevation_from_row_and_column(row.to_i, column.to_i)
end
|
ruby
|
{
"resource": ""
}
|
q2603
|
GeoElevation.Undulations.get_value_at_file_position
|
train
|
def get_value_at_file_position(position)
@file.seek(4 + position * 4)
bytes = @file.read(4)
begin
value = bytes[0].ord * 256**0 + bytes[1].ord * 256**1 + bytes[2].ord * 256**2 + bytes[3].ord * 256**3
result = unpack(value)
rescue
result = 0
end
result
end
|
ruby
|
{
"resource": ""
}
|
q2604
|
GeoElevation.Undulations.unpack
|
train
|
def unpack(n)
sign = n >> 31
exponent = (n >> (32 - 9)) & 0b11111111
value = n & 0b11111111111111111111111
resul = nil
if 1 <= exponent and exponent <= 254
result = (-1)**sign * (1 + value * 2**(-23)) * 2**(exponent - 127)
elsif exponent == 0
result = (-1)**sign * value * 2**(-23) * 2**(-126)
else
# NaN, infinity...
raise 'Invalid binary'
end
result.to_f
end
|
ruby
|
{
"resource": ""
}
|
q2605
|
ChefRunDeck.State.add_state
|
train
|
def add_state(node, user, params)
# => Create a Node-State Object
(n = {}) && (n[:name] = node)
n[:created] = DateTime.now
n[:creator] = user
n[:type] = params['type'] if params['type']
# => Build the Updated State
update_state(n)
# => Return the Added Node
find_state(node)
end
|
ruby
|
{
"resource": ""
}
|
q2606
|
ChefRunDeck.State.delete_state
|
train
|
def delete_state(node)
# => Find the Node
existing = find_state(node)
return 'Node not present in state' unless existing
# => Delete the Node from State
state.delete(existing)
# => Write Out the Updated State
write_state
# => Return the Deleted Node
existing
end
|
ruby
|
{
"resource": ""
}
|
q2607
|
Somemoji.EmojiCollection.replace_character
|
train
|
def replace_character(string, &block)
string.gsub(character_pattern) do |character|
block.call(find_by_character(character), character)
end
end
|
ruby
|
{
"resource": ""
}
|
q2608
|
Somemoji.EmojiCollection.replace_code
|
train
|
def replace_code(string, &block)
string.gsub(code_pattern) do |matched_string|
block.call(find_by_code(::Regexp.last_match(1)), matched_string)
end
end
|
ruby
|
{
"resource": ""
}
|
q2609
|
Somemoji.EmojiCollection.search_by_code
|
train
|
def search_by_code(pattern)
self.class.new(
select do |emoji|
pattern === emoji.code || emoji.aliases.any? do |alias_code|
pattern === alias_code
end
end
)
end
|
ruby
|
{
"resource": ""
}
|
q2610
|
Scorpion.Hunt.inject
|
train
|
def inject( object )
trip.object = object
object.send :scorpion_hunt=, self
object.injected_attributes.each do |attr|
next if object.send "#{ attr.name }?"
next if attr.lazy?
object.send :inject_dependency, attr, fetch( attr.contract )
end
object.send :on_injected
object
end
|
ruby
|
{
"resource": ""
}
|
q2611
|
Scorpion.AttributeSet.define_attribute
|
train
|
def define_attribute( name, contract, **options )
attributes[name.to_sym] = Attribute.new name, contract, options
end
|
ruby
|
{
"resource": ""
}
|
q2612
|
Pluginator::Extensions.PluginsMap.plugins_map
|
train
|
def plugins_map(type)
@plugins_map ||= {}
type = type.to_s
@plugins_map[type] ||= Hash[
@plugins[type].map do |plugin|
[class2name(plugin), plugin]
end
]
end
|
ruby
|
{
"resource": ""
}
|
q2613
|
WebsocketGui.Base.run!
|
train
|
def run!(runtime_config = {})
@websocket_config.merge! runtime_config
EM.run do
if @websocket_config[:tick_interval]
EM.add_periodic_timer(@websocket_config[:tick_interval]) do
socket_trigger(:on_tick, @socket_connected)
end
end
EventMachine::WebSocket.run(host: @websocket_config[:socket_host], port: @websocket_config[:socket_port]) do |socket|
@socket_active = socket
socket.onopen do |handshake|
@socket_connected = true
socket_trigger(:on_socket_open, handshake)
end
socket.onmessage do |msg|
process_message(msg)
end
socket.onclose do
socket_trigger(:on_socket_close)
@socket_connected = false
end
end
Launchy.open("http://#{@websocket_config[:http_host]}:#{@websocket_config[:http_port]}/")
WebsocketGui::SinatraWrapper.view_path = @websocket_config[:view]
WebsocketGui::SinatraWrapper.run!(
port: @websocket_config[:http_port],
bind: @websocket_config[:http_host])
end
end
|
ruby
|
{
"resource": ""
}
|
q2614
|
Pluginator::Extensions.FirstClass.first_class!
|
train
|
def first_class!(type, klass)
@plugins[type] or raise Pluginator::MissingType.new(type, @plugins.keys)
klass = string2class(klass)
plugins_map(type)[klass] or
raise Pluginator::MissingPlugin.new(type, klass, plugins_map(type).keys)
end
|
ruby
|
{
"resource": ""
}
|
q2615
|
ZerigoDNS::Resource.ClassMethods.process_response
|
train
|
def process_response response
without_root = response.body.values.first
case
when without_root.is_a?(Array) then process_array(response, without_root)
when without_root.is_a?(Hash) then from_response(response, without_root)
else without_root
end
end
|
ruby
|
{
"resource": ""
}
|
q2616
|
Scorpion.Object.inject_from
|
train
|
def inject_from( dependencies, overwrite = false )
injected_attributes.each do |attr|
next unless dependencies.key? attr.name
if overwrite || !send( "#{ attr.name }?" )
send( "#{ attr.name }=", dependencies[ attr.name ] )
end
end
dependencies
end
|
ruby
|
{
"resource": ""
}
|
q2617
|
Scorpion.Object.inject_from!
|
train
|
def inject_from!( dependencies, overwrite = false )
injected_attributes.each do |attr|
next unless dependencies.key? attr.name
val = dependencies.delete( attr.name )
if overwrite || !send( "#{ attr.name }?" )
send( "#{ attr.name }=", val )
end
end
dependencies
end
|
ruby
|
{
"resource": ""
}
|
q2618
|
ZerigoDNS::Resource::Attributes.InstanceMethods.method_missing
|
train
|
def method_missing mtd, *args
if mtd.to_s.chars.to_a.last == '='
raise ArgumentError, "Invalid number of arguments (#{args.length} for 1)" if args.length != 1
attributes[mtd.to_s.slice(0,mtd.to_s.length-1)] = args.first
else
raise ArgumentError, "Invalid number of arguments (#{args.length} for 0)" if args.length != 0
attributes[mtd.to_s]
end
end
|
ruby
|
{
"resource": ""
}
|
q2619
|
Beaglebone.AINPin.run_on_change
|
train
|
def run_on_change(callback, mv_change=10, interval=0.01, repeats=nil)
AIN::run_on_change(callback, @pin, mv_change, interval, repeats)
end
|
ruby
|
{
"resource": ""
}
|
q2620
|
Beaglebone.AINPin.run_on_threshold
|
train
|
def run_on_threshold(callback, mv_lower, mv_upper, mv_reset=10, interval=0.01, repeats=nil)
AIN::run_on_threshold(callback, @pin, mv_lower, mv_upper, mv_reset, interval, repeats)
end
|
ruby
|
{
"resource": ""
}
|
q2621
|
Beaglebone.AINPin.run_once_on_threshold
|
train
|
def run_once_on_threshold(callback, mv_lower, mv_upper, mv_reset=10, interval=0.01)
AIN::run_once_on_threshold(callback, @pin, mv_lower, mv_upper, mv_reset, interval)
end
|
ruby
|
{
"resource": ""
}
|
q2622
|
DataMapper.Validation.validate
|
train
|
def validate(context_name = default_validation_context)
errors.clear
validation_violations(context_name).each { |v| errors.add(v) }
self
end
|
ruby
|
{
"resource": ""
}
|
q2623
|
Pluginator.Group.register_plugin
|
train
|
def register_plugin(type, klass)
type = type.to_s
@plugins[type] ||= []
@plugins[type].push(klass) unless @plugins[type].include?(klass)
end
|
ruby
|
{
"resource": ""
}
|
q2624
|
Scorpion.Hunter.find_dependency
|
train
|
def find_dependency( hunt )
dependency = dependency_map.find( hunt.contract )
dependency ||= parent.find_dependency( hunt ) if parent
dependency
end
|
ruby
|
{
"resource": ""
}
|
q2625
|
Pluginator::Extensions.FirstAsk.first_ask!
|
train
|
def first_ask!(type, method_name, *params)
@plugins[type] or raise Pluginator::MissingType.new(type, @plugins.keys)
try_to_find(type, method_name, params) or
raise Pluginator::MissingPlugin.new(type, "first_ask: #{method_name}", plugins_map(type).keys)
end
|
ruby
|
{
"resource": ""
}
|
q2626
|
Beaglebone.GPIOPin.run_on_edge
|
train
|
def run_on_edge(callback, edge, timeout=nil, repeats=nil)
GPIO::run_on_edge(callback, @pin, edge, timeout, repeats)
end
|
ruby
|
{
"resource": ""
}
|
q2627
|
ZerigoDNS::Resource::Naming.ClassMethods.default_resource_name
|
train
|
def default_resource_name
result = self.to_s.split("::").last.gsub(/([A-Z])/, '_\1').downcase
result.slice 1, result.length
end
|
ruby
|
{
"resource": ""
}
|
q2628
|
Hdf5.H5Dataspace.offset_simple
|
train
|
def offset_simple(offsets)
raise ArgumentError.new("offsets should have ndims elements") unless offsets.size == ndims
basic_offset_simple(@id, offsets.ffi_mem_pointer_hsize_t)
end
|
ruby
|
{
"resource": ""
}
|
q2629
|
Rundeck.Request.api_token_header
|
train
|
def api_token_header(options, path = nil)
return nil if path == '/j_security_check'
unless @api_token
fail Error::MissingCredentials, 'Please set a api_token for user'
end
options[:headers] = {} if options[:headers].nil?
options[:headers].merge!('X-Rundeck-Auth-Token' => @api_token)
end
|
ruby
|
{
"resource": ""
}
|
q2630
|
Pluginator.NameConverter.name2class
|
train
|
def name2class(name)
klass = Kernel
name.to_s.split(%r{/}).each do |part|
klass = klass.const_get(
part.capitalize.gsub(/[_-](.)/) { |match| match[1].upcase }
)
end
klass
end
|
ruby
|
{
"resource": ""
}
|
q2631
|
Scorpion.Stinger.sting!
|
train
|
def sting!( object )
return object unless scorpion
if object
assign_scorpion object
assign_scorpion_to_enumerable object
end
object
end
|
ruby
|
{
"resource": ""
}
|
q2632
|
Pluginator::Extensions.Matching.matching
|
train
|
def matching(type, list)
list.map do |plugin|
(plugins_map(type) || {})[string2class(plugin)]
end
end
|
ruby
|
{
"resource": ""
}
|
q2633
|
Pluginator::Extensions.Matching.matching!
|
train
|
def matching!(type, list)
@plugins[type] or raise Pluginator::MissingType.new(type, @plugins.keys)
list.map do |plugin|
plugin = string2class(plugin)
plugins_map(type)[plugin] or
raise Pluginator::MissingPlugin.new(type, plugin, plugins_map(type).keys)
end
end
|
ruby
|
{
"resource": ""
}
|
q2634
|
Rundeck.Client.objectify
|
train
|
def objectify(result)
if result.is_a?(Hash)
ObjectifiedHash.new(result)
elsif result.is_a?(Array)
result.map { |e| ObjectifiedHash.new(e) }
elsif result.nil?
nil
else
fail Error::Parsing, "Couldn't parse a response body"
end
end
|
ruby
|
{
"resource": ""
}
|
q2635
|
ZerigoDNS::Resource::Rest.ClassMethods.convert
|
train
|
def convert object
return {resource_name => object} if object.is_a? Hash
{resource_name => object.to_hash}
end
|
ruby
|
{
"resource": ""
}
|
q2636
|
ReqresRspec.Collector.sort
|
train
|
def sort
self.records.sort! do |x,y|
comp = x[:request][:symbolized_path] <=> y[:request][:symbolized_path]
comp.zero? ? (x[:title] <=> y[:title]) : comp
end
end
|
ruby
|
{
"resource": ""
}
|
q2637
|
ReqresRspec.Collector.read_response_headers
|
train
|
def read_response_headers(response)
raw_headers = response.headers
headers = {}
EXCLUDE_RESPONSE_HEADER_PATTERNS.each do |pattern|
raw_headers = raw_headers.reject { |h| h if h.starts_with? pattern }
end
raw_headers.each do |key, val|
headers.merge!(cleanup_header(key) => val)
end
headers
end
|
ruby
|
{
"resource": ""
}
|
q2638
|
ReqresRspec.Collector.get_symbolized_path
|
train
|
def get_symbolized_path(request)
request_path = (request.env['REQUEST_URI'] || request.path).dup
request_params =
request.env['action_dispatch.request.parameters'] ||
request.env['rack.request.form_hash'] ||
request.env['rack.request.query_hash']
if request_params
request_params
.except(*EXCLUDE_PARAMS)
.select { |_, value| value.is_a?(String) }
.each { |key, value| request_path.sub!("/#{value}", "/:#{key}") if value.to_s != '' }
end
request_path.freeze
end
|
ruby
|
{
"resource": ""
}
|
q2639
|
Scorpion.DependencyMap.capture
|
train
|
def capture( contract, **options, &builder )
active_dependency_set.unshift Dependency::CapturedDependency.new( define_dependency( contract, options, &builder ) ) # rubocop:disable Metrics/LineLength
end
|
ruby
|
{
"resource": ""
}
|
q2640
|
Scorpion.DependencyMap.replicate_from
|
train
|
def replicate_from( other_map )
other_map.each do |dependency|
if replica = dependency.replicate
dependency_set << replica
end
end
self
end
|
ruby
|
{
"resource": ""
}
|
q2641
|
Beaglebone.SPIDevice.xfer
|
train
|
def xfer(tx_data, readbytes=0, speed=nil, delay=nil, bpw=nil)
SPI::xfer(@spi, tx_data, readbytes, speed, delay, bpw)
end
|
ruby
|
{
"resource": ""
}
|
q2642
|
Rundeck.Configuration.options
|
train
|
def options
VALID_OPTIONS_KEYS.reduce({}) do |option, key|
option.merge!(key => send(key))
end
end
|
ruby
|
{
"resource": ""
}
|
q2643
|
MessageMediaMessages.RepliesController.check_replies
|
train
|
def check_replies
# Prepare query url.
_path_url = '/v1/replies'
_query_builder = Configuration.base_uri.dup
_query_builder << _path_url
_query_url = APIHelper.clean_url _query_builder
# Prepare headers.
_headers = {
'accept' => 'application/json'
}
# Prepare and execute HttpRequest.
_request = @http_client.get(
_query_url,
headers: _headers
)
AuthManager.apply(_request, _path_url)
_context = execute_request(_request)
validate_response(_context)
# Return appropriate response type.
decoded = APIHelper.json_deserialize(_context.response.raw_body)
CheckRepliesResponse.from_hash(decoded)
end
|
ruby
|
{
"resource": ""
}
|
q2644
|
FitgemOauth2.Client.heartrate_time_series
|
train
|
def heartrate_time_series(start_date: nil, end_date: nil, period: nil)
warn '[DEPRECATION] `heartrate_time_series` is deprecated. Please use `hr_series_for_date_range` or `hr_series_for_period` instead.'
regular_time_series_guard(
start_date: start_date,
end_date: end_date,
period: period
)
second = period || format_date(end_date)
url = ['user', user_id, 'activities/heart/date', format_date(start_date), second].join('/')
get_call(url + '.json')
end
|
ruby
|
{
"resource": ""
}
|
q2645
|
FitgemOauth2.Client.intraday_heartrate_time_series
|
train
|
def intraday_heartrate_time_series(start_date: nil, end_date: nil, detail_level: nil, start_time: nil, end_time: nil)
intraday_series_guard(
start_date: start_date,
end_date: end_date,
detail_level: detail_level,
start_time: start_time,
end_time: end_time
)
end_date = format_date(end_date) || '1d'
url = ['user', user_id, 'activities/heart/date', format_date(start_date), end_date, detail_level].join('/')
if start_time && end_time
url = [url, 'time', format_time(start_time), format_time(end_time)].join('/')
end
get_call(url + '.json')
end
|
ruby
|
{
"resource": ""
}
|
q2646
|
MessageMediaMessages.FaradayClient.execute_as_string
|
train
|
def execute_as_string(http_request)
response = @connection.send(
http_request.http_method.downcase,
http_request.query_url
) do |request|
request.headers = http_request.headers
unless http_request.parameters.empty?
request.body = http_request.parameters
end
end
convert_response(response)
end
|
ruby
|
{
"resource": ""
}
|
q2647
|
MessageMediaMessages.FaradayClient.convert_response
|
train
|
def convert_response(response)
HttpResponse.new(response.status, response.headers, response.body)
end
|
ruby
|
{
"resource": ""
}
|
q2648
|
RailsAdminClone.ModelCloner.clone_object
|
train
|
def clone_object(old_object)
object = build_from(old_object)
assign_attributes_for(object, get_model_attributes_from(old_object))
object
end
|
ruby
|
{
"resource": ""
}
|
q2649
|
RailsAdminClone.ModelCloner.clone_has_one
|
train
|
def clone_has_one(old_object, new_object)
old_object.class.reflect_on_all_associations(:has_one).each do |association|
old_association = old_object.send(association.name)
build_has_one(new_object, association, old_association) if build_has_one?(old_object, association)
end
new_object
end
|
ruby
|
{
"resource": ""
}
|
q2650
|
RailsAdminClone.ModelCloner.clone_has_many
|
train
|
def clone_has_many(old_object, new_object)
associations = old_object.class.reflect_on_all_associations(:has_many)
.select{|a| !a.options.keys.include?(:through)}
associations.each do |association|
old_object.send(association.name).each do |old_association|
new_object.send(association.name).build.tap do |new_association|
assign_association(association, old_association, new_association)
end
end
end
new_object
end
|
ruby
|
{
"resource": ""
}
|
q2651
|
WatirNokogiri.Button.text
|
train
|
def text
assert_exists
tn = @element.node_name.downcase
case tn
when 'input'
@element.get_attribute(:value)
when 'button'
@element.text
else
raise Exception::Error, "unknown tag name for button: #{tn}"
end
end
|
ruby
|
{
"resource": ""
}
|
q2652
|
FitgemOauth2.Client.intraday_activity_time_series
|
train
|
def intraday_activity_time_series(resource: nil, start_date: nil, end_date: nil, detail_level: nil,
start_time: nil, end_time: nil)
# converting to symbol to allow developer to use either 'calories' or :calories
resource = resource.to_sym
unless %i[calories steps distance floors elevation].include?(resource)
raise FitgemOauth2::InvalidArgumentError,
'Must specify resource to fetch intraday time series data for.'\
' One of (:calories, :steps, :distance, :floors, or :elevation) is required.'
end
unless start_date
raise FitgemOauth2::InvalidArgumentError,
'Must specify the start_date to fetch intraday time series data'
end
end_date ||= '1d'
unless detail_level && %w(1min 15min).include?(detail_level)
raise FitgemOauth2::InvalidArgumentError,
'Must specify the data resolution to fetch intraday time series data for.'\
' One of (\"1d\" or \"15min\") is required.'
end
resource_path = [
'user', @user_id,
'activities', resource,
'date', format_date(start_date),
end_date, detail_level
].join('/')
if start_time || end_time
resource_path =
[resource_path, 'time', format_time(start_time), format_time(end_time)].join('/')
end
get_call("#{resource_path}.json")
end
|
ruby
|
{
"resource": ""
}
|
q2653
|
FitgemOauth2.Client.activity_list
|
train
|
def activity_list(date, sort, limit)
date_param = format_date(date)
if sort == "asc"
date_param = "afterDate=#{date_param}"
elsif sort == "desc"
date_param = "beforeDate=#{date_param}"
else
raise FitgemOauth2::InvalidArgumentError, "sort can either be asc or desc"
end
get_call("user/#{user_id}/activities/list.json?offset=0&limit=#{limit}&sort=#{sort}&#{date_param}")
end
|
ruby
|
{
"resource": ""
}
|
q2654
|
FitgemOauth2.Client.update_activity_goals
|
train
|
def update_activity_goals(period, params)
unless period && %w(daily weekly).include?(period)
raise FitgemOauth2::InvalidArgumentError, "Goal period should either be 'daily' or 'weekly'"
end
post_call("user/#{user_id}/activities/goals/#{period}.json", params)
end
|
ruby
|
{
"resource": ""
}
|
q2655
|
FitgemOauth2.Client.sleep_time_series
|
train
|
def sleep_time_series(resource: nil, start_date: nil, end_date: nil, period: nil)
unless start_date
raise FitgemOauth2::InvalidArgumentError, 'Start date not provided.'
end
unless resource && SLEEP_RESOURCES.include?(resource)
raise FitgemOauth2::InvalidArgumentError, "Invalid resource: #{resource}. Valid resources are #{SLEEP_RESOURCES}."
end
if period && end_date
raise FitgemOauth2::InvalidArgumentError, 'Both end_date and period specified. Specify only one.'
end
if period && !SLEEP_PERIODS.include?(period)
raise FitgemOauth2::InvalidArgumentError, "Invalid period: #{period}. Valid periods are #{SLEEP_PERIODS}."
end
second = period || format_date(end_date)
url = ['user', user_id, 'sleep', resource, 'date', format_date(start_date), second].join('/')
get_call(url + '.json')
end
|
ruby
|
{
"resource": ""
}
|
q2656
|
WatirNokogiri.Option.text
|
train
|
def text
assert_exists
# A little unintuitive - we'll return the 'label' or 'text' attribute if
# they exist, otherwise the inner text of the element
attribute = [:label, :text].find { |a| attribute? a }
if attribute
@element.get_attribute(attribute)
else
@element.text
end
end
|
ruby
|
{
"resource": ""
}
|
q2657
|
WatirNokogiri.Element.style
|
train
|
def style(property = nil)
assert_exists
styles = attribute_value('style').to_s.strip
if property
properties = Hash[styles.downcase.split(";").map { |p| p.split(":").map(&:strip) }]
properties[property]
else
styles
end
end
|
ruby
|
{
"resource": ""
}
|
q2658
|
WatirNokogiri.Element.parent
|
train
|
def parent
assert_exists
e = @element.parent
if e.kind_of?(Nokogiri::XML::Element)
WatirNokogiri.element_class_for(e.node_name.downcase).new(@parent, :element => e)
end
end
|
ruby
|
{
"resource": ""
}
|
q2659
|
DhtSensor.App.to_hash
|
train
|
def to_hash(val)
if @options[:humidity] then
return {"humidity" => val.humidity}
end
if @options[:unit] == :c then
if @options[:temperature] then
return {"temperature" => val.temp}
else
return {"temperature" => val.temp, "humidity" => val.humidity}
end
else
if @options[:temperature] then
return {"temperature" => val.temp_f}
else
return {"temperature" => val.temp_f, "humidity" => val.humidity}
end
end
end
|
ruby
|
{
"resource": ""
}
|
q2660
|
DhtSensor.App.print
|
train
|
def print(val)
if @options[:humidity] then
puts sprintf("Humidity: %.2f%%", val.humidity)
return
end
if @options[:unit] == :c then
if @options[:temperature] then
puts sprintf("Temperature: %.2f C", val.temp)
else
puts sprintf("Temperature: %.2f C Humidity: %.2f%%", val.temp, val.humidity)
end
else
if @options[:temperature] then
puts sprintf("Temperature: %.2f F", val.temp_f, val.humidity)
else
puts sprintf("Temperature: %.2f F Humidity: %.2f%%", val.temp_f, val.humidity)
end
end
end
|
ruby
|
{
"resource": ""
}
|
q2661
|
WatirNokogiri.CellContainer.cell
|
train
|
def cell(*args)
cell = TableCell.new(self, extract_selector(args).merge(:tag_name => /^(th|td)$/))
cell.locator_class = ChildCellLocator
cell
end
|
ruby
|
{
"resource": ""
}
|
q2662
|
WatirNokogiri.CellContainer.cells
|
train
|
def cells(*args)
cells = TableCellCollection.new(self, extract_selector(args).merge(:tag_name => /^(th|td)$/))
cells.locator_class = ChildCellLocator
cells
end
|
ruby
|
{
"resource": ""
}
|
q2663
|
MessageMediaMessages.BaseModel.to_hash
|
train
|
def to_hash
hash = {}
instance_variables.each do |name|
value = instance_variable_get(name)
next if value.nil?
name = name[1..-1]
key = self.class.names.key?(name) ? self.class.names[name] : name
if value.instance_of? Array
hash[key] = value.map { |v| v.is_a?(BaseModel) ? v.to_hash : v }
elsif value.instance_of? Hash
hash[key] = {}
value.each do |k, v|
hash[key][k] = v.is_a?(BaseModel) ? v.to_hash : v
end
else
hash[key] = value.is_a?(BaseModel) ? value.to_hash : value
end
end
hash
end
|
ruby
|
{
"resource": ""
}
|
q2664
|
WatirNokogiri.Document.goto
|
train
|
def goto(file_path)
html = File.read(file_path)
@driver = Nokogiri::HTML.parse(html)
end
|
ruby
|
{
"resource": ""
}
|
q2665
|
Titlekit.Job.run
|
train
|
def run
@wants.each do |want|
@haves.each do |have|
import(have)
retime(have, want)
cull(have)
group(have)
want.subtitles += have.subtitles.clone
end
polish(want)
export(want)
end
return true
rescue AbortJob
return false
end
|
ruby
|
{
"resource": ""
}
|
q2666
|
Titlekit.Job.cull
|
train
|
def cull(have)
have.subtitles.reject! { |subtitle| subtitle[:end] < 0 }
have.subtitles.each do |subtitle|
subtitle[:start] = 0 if subtitle[:start] < 0
end
end
|
ruby
|
{
"resource": ""
}
|
q2667
|
Titlekit.Job.retime_by_framerate
|
train
|
def retime_by_framerate(have, want)
ratio = want.fps.to_f / have.fps.to_f
have.subtitles.each do |subtitle|
subtitle[:start] *= ratio
subtitle[:end] *= ratio
end
end
|
ruby
|
{
"resource": ""
}
|
q2668
|
MessageMediaMessages.HttpClient.get
|
train
|
def get(query_url,
headers: {})
HttpRequest.new(HttpMethodEnum::GET,
query_url,
headers: headers)
end
|
ruby
|
{
"resource": ""
}
|
q2669
|
MessageMediaMessages.HttpClient.head
|
train
|
def head(query_url,
headers: {})
HttpRequest.new(HttpMethodEnum::HEAD,
query_url,
headers: headers)
end
|
ruby
|
{
"resource": ""
}
|
q2670
|
MessageMediaMessages.HttpClient.post
|
train
|
def post(query_url,
headers: {},
parameters: {})
HttpRequest.new(HttpMethodEnum::POST,
query_url,
headers: headers,
parameters: parameters)
end
|
ruby
|
{
"resource": ""
}
|
q2671
|
MessageMediaMessages.HttpClient.put
|
train
|
def put(query_url,
headers: {},
parameters: {})
HttpRequest.new(HttpMethodEnum::PUT,
query_url,
headers: headers,
parameters: parameters)
end
|
ruby
|
{
"resource": ""
}
|
q2672
|
MessageMediaMessages.HttpClient.patch
|
train
|
def patch(query_url,
headers: {},
parameters: {})
HttpRequest.new(HttpMethodEnum::PATCH,
query_url,
headers: headers,
parameters: parameters)
end
|
ruby
|
{
"resource": ""
}
|
q2673
|
MessageMediaMessages.HttpClient.delete
|
train
|
def delete(query_url,
headers: {},
parameters: {})
HttpRequest.new(HttpMethodEnum::DELETE,
query_url,
headers: headers,
parameters: parameters)
end
|
ruby
|
{
"resource": ""
}
|
q2674
|
Capistrano.LogWithAwesome.log
|
train
|
def log(level, message, line_prefix=nil)
if level <= self.level
indent = "%*s" % [Capistrano::Logger::MAX_LEVEL, "*" * (Capistrano::Logger::MAX_LEVEL - level)]
(RUBY_VERSION >= "1.9" ? message.lines : message).each do |line|
if line_prefix
self.class.log_with_awesome "#{indent} [#{line_prefix}] #{line.strip}"
else
self.class.log_with_awesome "#{indent} #{line.strip}"
end
end
end
super(level, message, line_prefix)
end
|
ruby
|
{
"resource": ""
}
|
q2675
|
ActsAsFerret.ClassMethods.records_for_rebuild
|
train
|
def records_for_rebuild(batch_size = 1000)
transaction do
if use_fast_batches?
offset = 0
while (rows = where([ "#{table_name}.id > ?", offset ]).limit(batch_size).all).any?
offset = rows.last.id
yield rows, offset
end
else
order = "#{primary_key} ASC" # fixes #212
0.step(self.count, batch_size) do |offset|
yield scoped.limit(batch_size).offset(offset).order(order).all, offset
end
end
end
end
|
ruby
|
{
"resource": ""
}
|
q2676
|
ActsAsFerret.ClassMethods.records_for_bulk_index
|
train
|
def records_for_bulk_index(ids, batch_size = 1000)
transaction do
offset = 0
ids.each_slice(batch_size) do |id_slice|
records = where(:id => id_slice).all
#yield records, offset
yield where(:id => id_slice).all, offset
offset += batch_size
end
end
end
|
ruby
|
{
"resource": ""
}
|
q2677
|
Sorcerer.Subexpression.within_method_sexp
|
train
|
def within_method_sexp(sexp)
case sexp.first
when :call # [:call, target, ".", meth]
recur(sexp[1])
when :method_add_block # [:method_add_block, call, block]
within_method_sexp(sexp[1])
when :method_add_arg # [:method_add_arg, call, args]
recur(sexp[2])
within_method_sexp(sexp[1])
else
recur(sexp)
end
end
|
ruby
|
{
"resource": ""
}
|
q2678
|
Brice.Colours.enable_irb
|
train
|
def enable_irb
IRB::Inspector.class_eval {
unless method_defined?(:inspect_value_with_colour)
alias_method :inspect_value_without_colour, :inspect_value
def inspect_value_with_colour(value)
Colours.colourize(inspect_value_without_colour(value))
end
end
alias_method :inspect_value, :inspect_value_with_colour
}
end
|
ruby
|
{
"resource": ""
}
|
q2679
|
Brice.Colours.colourize
|
train
|
def colourize(str)
''.tap { |res| Tokenizer.tokenize(str.to_s) { |token, value|
res << colourize_string(value, colours[token])
} }
rescue => err
Brice.error(self, __method__, err)
str
end
|
ruby
|
{
"resource": ""
}
|
q2680
|
GScraper.HasPages.each_page
|
train
|
def each_page(indices)
unless block_given?
enum_for(:each_page,indices)
else
indices.map { |index| yield page_cache[index] }
end
end
|
ruby
|
{
"resource": ""
}
|
q2681
|
GScraper.HasPages.each
|
train
|
def each
return enum_for(:each) unless block_given?
index = 1
until ((next_page = page_cache[index]).empty?) do
yield next_page
index = index + 1
end
return self
end
|
ruby
|
{
"resource": ""
}
|
q2682
|
GScraper.HasPages.page_cache
|
train
|
def page_cache
@page_cache ||= Hash.new { |hash,key| hash[key] = page(key.to_i) }
end
|
ruby
|
{
"resource": ""
}
|
q2683
|
ActsAsFerret.AbstractIndex.change_index_dir
|
train
|
def change_index_dir(new_dir)
logger.debug "[#{index_name}] changing index dir to #{new_dir}"
index_definition[:index_dir] = index_definition[:ferret][:path] = new_dir
reopen!
logger.debug "[#{index_name}] index dir is now #{new_dir}"
end
|
ruby
|
{
"resource": ""
}
|
q2684
|
XCRes::XCAssets.Bundle.read
|
train
|
def read
@resource_paths = Dir.chdir(path) do
Dir['**/Contents.json'].map { |p| Pathname(p) + '..' }
end
@resources = @resource_paths.map do |path|
Resource.new(self, path)
end
self
end
|
ruby
|
{
"resource": ""
}
|
q2685
|
XCRes.StringsAnalyzer.build_section
|
train
|
def build_section
selected_file_refs = selected_strings_file_refs
# Apply ignore list
file_paths = filter_exclusions(selected_file_refs.map(&:path))
filtered_file_refs = selected_file_refs.select { |file_ref| file_paths.include? file_ref.path }
rel_file_paths = filtered_file_refs.map { |p| p.real_path.relative_path_from(Pathname.pwd) }
log 'Non-ignored .strings files: %s', rel_file_paths.map(&:to_s)
keys_by_file = {}
for path in rel_file_paths
keys_by_file[path] = keys_by_file(path)
end
items = keys_by_file.values.reduce({}, :merge)
new_section('Strings', items)
end
|
ruby
|
{
"resource": ""
}
|
q2686
|
XCRes.StringsAnalyzer.info_plist_paths
|
train
|
def info_plist_paths
@info_plist_paths ||= target.build_configurations.map do |config|
config.build_settings['INFOPLIST_FILE']
end.compact.map { |file| Pathname(file) }.flatten.to_set
end
|
ruby
|
{
"resource": ""
}
|
q2687
|
XCRes.StringsAnalyzer.native_dev_languages
|
train
|
def native_dev_languages
@native_dev_languages ||= absolute_info_plist_paths.map do |path|
begin
read_plist_key(path, :CFBundleDevelopmentRegion)
rescue ArgumentError => e
warn e
end
end.compact.to_set
end
|
ruby
|
{
"resource": ""
}
|
q2688
|
XCRes.StringsAnalyzer.keys_by_file
|
train
|
def keys_by_file(path)
begin
# Load strings file contents
strings = read_strings_file(path)
# Reject generated identifiers used by Interface Builder
strings.reject! { |key, _| /^[a-zA-Z0-9]{3}-[a-zA-Z0-9]{2,3}-[a-zA-Z0-9]{3}/.match(key) }
keys = Hash[strings.map do |key, value|
[key, { value: key, comment: value.gsub(/[\r\n]/, ' ') }]
end]
log 'Found %s keys in file %s', keys.count, path
keys
rescue ArgumentError => error
raise ArgumentError, 'Error while reading %s: %s' % [path, error]
end
end
|
ruby
|
{
"resource": ""
}
|
q2689
|
ActsAsFerret.InstanceMethods.ferret_enabled?
|
train
|
def ferret_enabled?(is_bulk_index = false)
@ferret_disabled.nil? && (is_bulk_index || self.class.ferret_enabled?) && (aaf_configuration[:if].nil? || aaf_configuration[:if].call(self))
end
|
ruby
|
{
"resource": ""
}
|
q2690
|
JMX.RubyNotificationEmitter.removeNotificationListener
|
train
|
def removeNotificationListener(listener, filter=nil, handback=nil)
found = false
listeners.delete_if do |clistener, (cfilter, chandback)|
v = listener == clistener && filter == cfilter && handback == chandback
found = true if v
v
end
raise javax.management.ListenerNotFoundException.new unless found
end
|
ruby
|
{
"resource": ""
}
|
q2691
|
JMX.MBeanProxy.[]
|
train
|
def [](name)
attribute = @server.getAttribute(@object_name, name.to_s)
return attribute.value if attribute.kind_of? javax.management.Attribute
attribute
end
|
ruby
|
{
"resource": ""
}
|
q2692
|
JMX.MBeanProxy.[]=
|
train
|
def []=(name, value)
@server.setAttribute @object_name, javax.management.Attribute.new(name.to_s, value)
end
|
ruby
|
{
"resource": ""
}
|
q2693
|
JMX.MBeanProxy.invoke
|
train
|
def invoke(name, *params)
op = @info.operations.find { |o| o.name == name.to_s }
raise NoMethodError.new("No such operation #{name}") unless op
jargs, jtypes = java_args(op.signature, params)
@server.invoke @object_name, op.name, jargs, jtypes
end
|
ruby
|
{
"resource": ""
}
|
q2694
|
JMX.MBeanProxy.java_types
|
train
|
def java_types(params)
return nil if params.nil?
params.map {|e| e.class.java_class.name }.to_java(:string)
end
|
ruby
|
{
"resource": ""
}
|
q2695
|
Apkstats::Command.Executable.compare_with
|
train
|
def compare_with(apk_filepath, other_apk_filepath)
base = Apkstats::Entity::ApkInfo.new(self, apk_filepath)
other = Apkstats::Entity::ApkInfo.new(self, other_apk_filepath)
Apkstats::Entity::ApkInfoDiff.new(base, other).to_h
end
|
ruby
|
{
"resource": ""
}
|
q2696
|
XCRes.Analyzer.new_section
|
train
|
def new_section(name, data, options={})
XCRes::Section.new(name, data, self.options.merge(options))
end
|
ruby
|
{
"resource": ""
}
|
q2697
|
XCRes.Analyzer.filter_exclusions
|
train
|
def filter_exclusions file_paths
file_paths.reject do |path|
exclude_file_patterns.any? { |pattern| File.fnmatch("#{pattern}", path) || File.fnmatch("**/#{pattern}", path) }
end
end
|
ruby
|
{
"resource": ""
}
|
q2698
|
XCRes.Analyzer.find_file_refs_by_extname
|
train
|
def find_file_refs_by_extname(extname)
project.files.select do |file_ref|
File.extname(file_ref.path) == extname \
&& is_file_ref_included_in_application_target?(file_ref)
end
end
|
ruby
|
{
"resource": ""
}
|
q2699
|
XCRes.Analyzer.resources_files
|
train
|
def resources_files
target.resources_build_phase.files.map do |build_file|
if build_file.file_ref.is_a?(Xcodeproj::Project::Object::PBXGroup)
build_file.file_ref.recursive_children
else
[build_file.file_ref]
end
end.flatten.compact
end
|
ruby
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.