_id
stringlengths 2
6
| title
stringlengths 9
130
| partition
stringclasses 3
values | text
stringlengths 30
4.3k
| language
stringclasses 1
value | meta_information
dict |
---|---|---|---|---|---|
q300
|
Epuber.Book.targets
|
train
|
def targets(*names, &block)
if names.empty?
UI.warning('Book#targets to get all targets is deprecated, use #all_targets instead', location: caller_locations.first)
|
ruby
|
{
"resource": ""
}
|
q301
|
SmugMug.HTTP.request
|
train
|
def request(api, args)
uri = api == :uploading ? UPLOAD_URI : API_URI
args[:method] = "smugmug.#{api}" unless api == :uploading
http = ::Net::HTTP.new(uri.host, uri.port, @http_proxy_host, @http_proxy_port, @http_proxy_user, @http_proxy_pass)
http.set_debug_output(@config[:debug_output]) if @config[:debug_output]
# Configure HTTPS if needed
if uri.scheme == "https"
http.use_ssl = true
if @config[:http] and @config[:http][:verify_mode]
http.verify_mode = @config[:http][:verify_mode]
http.ca_file = @config[:http][:ca_file]
http.ca_path = @config[:http][:ca_path]
else
http.verify_mode = OpenSSL::SSL::VERIFY_NONE
end
end
# Upload request, which requires special handling
if api == :uploading
postdata = args.delete(:content)
headers = @headers.merge("Content-Length" => postdata.length.to_s, "Content-MD5" => Digest::MD5.hexdigest(postdata), "X-Smug-Version" => "1.3.0", "X-Smug-ResponseType" => "JSON")
UPLOAD_HEADERS.each do |key|
next unless args[key] and args[key] != ""
headers["X-Smug-#{key}"] = args[key].to_s
end
oauth = self.sign_request("POST", uri, nil)
headers["Authorization"] = "OAuth oauth_consumer_key=\"#{oauth["oauth_consumer_key"]}\", oauth_nonce=\"#{oauth["oauth_nonce"]}\", oauth_signature_method=\"#{oauth["oauth_signature_method"]}\", oauth_signature=\"#{oauth["oauth_signature"]}\", oauth_timestamp=\"#{oauth["oauth_timestamp"]}\", oauth_version=\"#{oauth["oauth_version"]}\", oauth_token=\"#{oauth["oauth_token"]}\""
# Normal API method
else
postdata = self.sign_request("POST", uri, args)
headers = @headers
end
response = http.request_post(uri.request_uri, postdata, headers)
if response.code == "204"
return nil
elsif response.code != "200"
raise SmugMug::HTTPError.new("HTTP #{response.code}, #{response.message}", response.code, response.message)
end
# Check for GZIP encoding
if response.header["content-encoding"] == "gzip"
|
ruby
|
{
"resource": ""
}
|
q302
|
SmugMug.HTTP.sign_request
|
train
|
def sign_request(method, uri, form_args)
# Convert non-string keys to strings so the sort works
args = {}
if form_args
form_args.each do |key, value|
next unless value and value != ""
key = key.to_s unless key.is_a?(String)
args[key] = value
end
end
# Add the necessary OAuth args
args["oauth_version"] = "1.0"
args["oauth_consumer_key"] = @config[:api_key]
args["oauth_nonce"] = Digest::MD5.hexdigest("#{Time.now.to_f}#{rand(10 ** 30)}")
args["oauth_signature_method"] = "HMAC-SHA1"
args["oauth_timestamp"] = Time.now.utc.to_i
args["oauth_token"] = @config[:user][:token]
# RFC 1738 (http://www.ietf.org/rfc/rfc1738.txt) says:
#
# Thus, only alphanumerics, the special characters "$-_.+!*'(),", and
# reserved characters used for their reserved purposes may be used
# unencoded within a URL.
#
# However, if we don't escape apostrophes and parentheses the SmugMug API fails
# with an invalid signature error:
#
# Error #35, invalid signature (SmugMug::OAuthError)
#
# To overcome this, define a new unreserved character list and use this in URI::escape
unreserved = "\\-_.!~*a-zA-Z\\d"
unsafe = Regexp.new("[^#{unreserved}]", false, 'N')
|
ruby
|
{
"resource": ""
}
|
q303
|
Grooveshark.Client.get_user_by_id
|
train
|
def get_user_by_id(id)
resp = request('getUserByID',
|
ruby
|
{
"resource": ""
}
|
q304
|
Grooveshark.Client.get_user_by_username
|
train
|
def get_user_by_username(name)
resp = request('getUserByUsername',
|
ruby
|
{
"resource": ""
}
|
q305
|
Grooveshark.Client.popular_songs
|
train
|
def popular_songs(type = 'daily')
fail ArgumentError, 'Invalid type' unless
|
ruby
|
{
"resource": ""
}
|
q306
|
Grooveshark.Client.top_broadcasts
|
train
|
def top_broadcasts(count = 10)
top_broadcasts = []
request('getTopBroadcastsCombined').each do |key, _val|
|
ruby
|
{
"resource": ""
}
|
q307
|
Grooveshark.Client.search
|
train
|
def search(type, query)
results = []
search = request('getResultsFromSearch', type: type, query: query)
results = search['result'].map do |data|
next Song.new data if type == 'Songs'
|
ruby
|
{
"resource": ""
}
|
q308
|
Grooveshark.Client.get_stream_auth_by_songid
|
train
|
def get_stream_auth_by_songid(song_id)
result = request('getStreamKeyFromSongIDEx',
'type' => 0,
'prefetch' => false,
'songID' => song_id,
'country' => @country,
'mobile' => false)
if result ==
|
ruby
|
{
"resource": ""
}
|
q309
|
Grooveshark.Client.request
|
train
|
def request(method, params = {}, secure = false)
refresh_token if @comm_token
url = "#{secure ? 'https' : 'http'}://grooveshark.com/more.php?#{method}"
begin
data = RestClient.post(url,
body(method, params).to_json,
|
ruby
|
{
"resource": ""
}
|
q310
|
Oeffi.Configuration.provider=
|
train
|
def provider=(sym)
klass = "#{sym.to_s.capitalize}Provider"
begin
java_import "de.schildbach.pte.#{klass}"
@provider = Oeffi::const_get(klass).new
rescue
|
ruby
|
{
"resource": ""
}
|
q311
|
CrmFormatter.Address.check_addr_status
|
train
|
def check_addr_status(hsh)
full_addr = hsh[:full_addr]
full_addr_f = hsh[:full_addr_f]
status = nil
if full_addr && full_addr_f
|
ruby
|
{
"resource": ""
}
|
q312
|
CrmFormatter.Address.make_full_address_original
|
train
|
def make_full_address_original(hsh)
full_adr = [hsh[:street],
hsh[:city],
hsh[:state],
|
ruby
|
{
"resource": ""
}
|
q313
|
MoreViewHooks.HookCollection.add
|
train
|
def add(name, options)
fail ArgumentError, "A view hook '#{name}' already exists" if @hooks[name]
context = options.delete(:context)
hook = @hooks[name]
|
ruby
|
{
"resource": ""
}
|
q314
|
BlueprintClient.HierarchyApi.add_node
|
train
|
def add_node(namespace_inc_global, body, opts = {})
data, _status_code, _headers =
|
ruby
|
{
"resource": ""
}
|
q315
|
BlueprintClient.HierarchyApi.delete_node
|
train
|
def delete_node(namespace, id, type, opts = {})
delete_node_with_http_info(namespace,
|
ruby
|
{
"resource": ""
}
|
q316
|
BlueprintClient.HierarchyApi.get_node
|
train
|
def get_node(namespace, id, type, opts = {})
data, _status_code, _headers =
|
ruby
|
{
"resource": ""
}
|
q317
|
BlueprintClient.HierarchyApi.replace_node
|
train
|
def replace_node(namespace, id, body, type, opts = {})
data, _status_code, _headers =
|
ruby
|
{
"resource": ""
}
|
q318
|
BlueprintClient.HierarchyApi.update_node
|
train
|
def update_node(namespace, id, body, type, opts = {})
data, _status_code, _headers =
|
ruby
|
{
"resource": ""
}
|
q319
|
Berkshelf.HgLocation.install
|
train
|
def install
if cached?
# Update and checkout the correct ref
Dir.chdir(cache_path) do
hg %|pull|
end
else
# Ensure the cache directory is present before doing anything
FileUtils.mkdir_p(cache_path)
Dir.chdir(cache_path) do
hg %|clone #{uri} .|
end
end
Dir.chdir(cache_path) do
hg %|update --clean --rev #{revision || ref}|
@revision ||= hg %|id -i|
end
# Gab the path where we should copy from (since it might be relative to
# the root).
copy_path = rel ? cache_path.join(rel) : cache_path
begin
# Validate the thing we are copying is a Chef cookbook
validate_cached!(copy_path)
# Remove the current cookbook at this location (this is required or else
|
ruby
|
{
"resource": ""
}
|
q320
|
Berkshelf.HgLocation.hg
|
train
|
def hg(command, error = true)
unless Berkshelf.which('hg') || Berkshelf.which('hg.exe')
raise HgNotInstalled.new
end
Berkshelf.log.debug("Running:hg #{command}")
response = shell_out(%|hg #{command}|)
Berkshelf.log.debug("response:hg
|
ruby
|
{
"resource": ""
}
|
q321
|
Berkshelf.HgLocation.cache_path
|
train
|
def cache_path
Pathname.new(Berkshelf.berkshelf_path)
.join('.cache',
|
ruby
|
{
"resource": ""
}
|
q322
|
Cadenero::V1.Account::SessionsController.create
|
train
|
def create
if env['warden'].authenticate(:password, :scope => :user)
#return the user JSON on success
render json: current_user, status: :created
|
ruby
|
{
"resource": ""
}
|
q323
|
Cadenero::V1.Account::SessionsController.delete
|
train
|
def delete
user = Cadenero::User.find_by_id(params[:id])
if user_signed_in?
env['warden'].logout(:user)
render json: {message: "Successful logout"}, status: :ok
|
ruby
|
{
"resource": ""
}
|
q324
|
Bunto.BuntoSitemap.file_exists?
|
train
|
def file_exists?(file_path)
if @site.respond_to?(:in_source_dir)
File.exist? @site.in_source_dir(file_path)
|
ruby
|
{
"resource": ""
}
|
q325
|
RiddlerAdmin.StepsController.internal_preview
|
train
|
def internal_preview
@preview_context = ::RiddlerAdmin::PreviewContext.find_by_id params["pctx_id"]
if @preview_context.nil?
render(status: 400, json: {message: "Invalid pctx_id"}) and return
end
|
ruby
|
{
"resource": ""
}
|
q326
|
GeneValidatorApp.Config.write_config_file
|
train
|
def write_config_file
return unless config_file
File.open(config_file, 'w') do |f|
|
ruby
|
{
"resource": ""
}
|
q327
|
GeneValidatorApp.Config.symbolise
|
train
|
def symbolise(data)
return {} unless data
# Symbolize keys.
|
ruby
|
{
"resource": ""
}
|
q328
|
GeneValidatorApp.Config.defaults
|
train
|
def defaults
{
num_threads: 1,
mafft_threads: 1,
port: 5678,
ssl: false,
host: '0.0.0.0',
|
ruby
|
{
"resource": ""
}
|
q329
|
RippleRest.Account.trustlines
|
train
|
def trustlines
data = RippleRest
.get("v1/accounts/#{@address}/trustlines")["trustlines"]
|
ruby
|
{
"resource": ""
}
|
q330
|
RippleRest.Account.settings
|
train
|
def settings
data = RippleRest.get("v1/accounts/#{@address}/settings")["settings"]
|
ruby
|
{
"resource": ""
}
|
q331
|
RippleRest.Account.payments
|
train
|
def payments
payments ||= lambda {
obj = Payments.new
|
ruby
|
{
"resource": ""
}
|
q332
|
RippleRest.Payments.find_path
|
train
|
def find_path destination_account, destination_amount, source_currencies = nil
uri = "v1/accounts/#{account.address}/payments/paths/#{destination_account.to_s}/#{destination_amount.to_s}"
if source_currencies
cur = source_currencies.join(",")
uri +=
|
ruby
|
{
"resource": ""
}
|
q333
|
RippleRest.Payments.create
|
train
|
def create destination_account, destination_amount
payment = Payment.new
payment.account = account
|
ruby
|
{
"resource": ""
}
|
q334
|
RippleRest.Payments.query
|
train
|
def query options = {}
qs = ""
if options && options.size > 0
qs = "?" + options.map { |k,v| "#{k}=#{CGI::escape(v.to_s)}" }.join('&')
end
|
ruby
|
{
"resource": ""
}
|
q335
|
QUnited.Application.handle_options
|
train
|
def handle_options
drivers = ::QUnited::Driver.constants.reject { |d| d == :Base }
valid_drivers_string = "Valid drivers include: #{drivers.map { |d| d.to_s }.join(', ')}"
args_empty = ARGV.empty?
# This is a bit of a hack, but OptionParser removes the -- that separates the source
# and test files and we need to put it back in the right place. Save the distance from
# the end to do this later.
double_dash_neg_index = ARGV.find_index('--') && (ARGV.find_index('--') - ARGV.size)
optparse = OptionParser.new do |opts|
opts.banner = <<-HELP_TEXT
Usage: qunited [OPTIONS] [JS_SOURCE_FILES...] -- [JS_TEST_FILES..]
Runs JavaScript unit tests with QUnit.
JS_SOURCE_FILES are the JavaScript files that you want to test. They will all be
loaded for running each test.
JS_TEST_FILES are files that contain the QUnit tests to run.
Options:
HELP_TEXT
opts.on('-d', '--driver [NAME]', 'Specify the driver to use in running the tests',
valid_drivers_string) do |name|
raise UsageError, 'Must specify a driver name with -d or --driver option' unless name
names_and_drivers = Hash[drivers.map { |d| d.to_s.downcase }.zip(drivers)]
driver = names_and_drivers[name.downcase]
raise UsageError, "Invalid driver specified: #{name}\n#{valid_drivers_string}" unless driver
options[:driver] = driver
|
ruby
|
{
"resource": ""
}
|
q336
|
AmpleAssets.ViewHelper.image_asset
|
train
|
def image_asset(object, args={})
# Gracefully handle nil
return if object.try(:file).nil? && args[:object].nil?
# Define default opts and merge with parameter args
opts = {
:alt => '',
:video_dimensions => '500x350',
:encode => :png
}.merge(args)
# Override alt text with object title if it exists
opts[:alt] = escape_javascript(object.title) if object.respond_to?('title')
# See if optional file object actually contains a dfly instance
if opts[:object]
attachment = opts[:object].attachment
attachment_gravity = opts[:object].attachment_gravity
# Override alt text with attachment alt_text if it exists
opts[:alt] = escape_javascript(opts[:object].alt_text) if opts[:object].respond_to?('alt_text') && !opts[:object].send(:alt_text).blank?
else
attachment = object.file.attachment
|
ruby
|
{
"resource": ""
}
|
q337
|
BlueprintClient.AssetTypeConfigsApi.get
|
train
|
def get(namespace, asset_type, opts = {})
data, _status_code, _headers =
|
ruby
|
{
"resource": ""
}
|
q338
|
AgglomerativeClustering.SilhouetteCoefficient.measure
|
train
|
def measure clusters
silhouettes = []
average_distances = []
main_cluster.points.each do |point1|
a1 = calculate_a1(point1)
(clusters - [main_cluster]).each do |cluster|
distances = []
cluster.points.each do |point2|
distances << euclidean_distance(point1, point2).round(2)
end
|
ruby
|
{
"resource": ""
}
|
q339
|
AgglomerativeClustering.SilhouetteCoefficient.calculate_a1
|
train
|
def calculate_a1 point
distances = []
main_cluster.points.each do |point1|
distances << euclidean_distance(point, point1).round(2)
end
return 0 if
|
ruby
|
{
"resource": ""
}
|
q340
|
ActsAsSolr.CommonMethods.get_solr_field_type
|
train
|
def get_solr_field_type(field_type)
if field_type.is_a?(Symbol)
t = TypeMapping[field_type]
raise "Unknown field_type symbol: #{field_type}" if t.nil?
t
|
ruby
|
{
"resource": ""
}
|
q341
|
ActsAsSolr.CommonMethods.solr_add
|
train
|
def solr_add(add_xml)
ActsAsSolr::Post.execute(Solr:
|
ruby
|
{
"resource": ""
}
|
q342
|
ActsAsSolr.CommonMethods.solr_delete
|
train
|
def solr_delete(solr_ids)
ActsAsSolr::P
|
ruby
|
{
"resource": ""
}
|
q343
|
Dk::Remote.BaseCmd.build_ssh_cmd_str
|
train
|
def build_ssh_cmd_str(cmd_str, host, args, host_args)
|
ruby
|
{
"resource": ""
}
|
q344
|
QUnited.RakeTask.files_array
|
train
|
def files_array(files)
return [] unless files
files.is_a?(Array) ? files
|
ruby
|
{
"resource": ""
}
|
q345
|
LibComponent.Pin.introspect
|
train
|
def introspect
iface = Hash.new
pin = Hash.new
meth = Array.new
if self.respond_to?(:read)
meth << "read"
end
if self.respond_to?(:write)
meth <<
|
ruby
|
{
"resource": ""
}
|
q346
|
LibComponent.Component.<<
|
train
|
def <<(pin_)
if pin_.kind_of?(Input)
@inputs << pin_
elsif pin_.kind_of?(Output)
@outputs << pin_
|
ruby
|
{
"resource": ""
}
|
q347
|
LibComponent.Component.run
|
train
|
def run
# execute startup methods
@inputs.each do |inp|
inp.start if inp.respond_to?(:start)
end
@outputs.each do |outp|
outp.start if outp.respond_to?(:start)
end
intro = introspect
if @options[:introspect]
print intro.to_yaml
else
@bus = create_bus
#create dbus input pins
dbusinputs = LibComponent::DbusInput.create_dbusinputs_from_introspect(intro["input"]["pin"],self)
name = "org.openplacos.components.#{@name.downcase}"
if (@bus.proxy.ListNames[0].member?(name))
quit_server(255, "#{name} already exists")
end
@service = @bus.request_service(name)
dbusinputs.each { |pin|
@service.export(pin)
}
|
ruby
|
{
"resource": ""
}
|
q348
|
LibComponent.Component.introspect
|
train
|
def introspect
inputs_h = Hash.new
outputs_h = Hash.new
#call all inputs introspect and merge values
@inputs.each { |input|
inputs_h.merge!(input.introspect) { |key, old, new| old.merge(new) }
}
#call all outputs introspect and merge values
@outputs.each { |output|
outputs_h.merge!(output.introspect) { |key, old, new| old.merge(new) }
}
res = Hash.new
res["input"] = {"pin" => inputs_h}
res["output"] = {"pin" => outputs_h}
# component options
opt = @options.dup
|
ruby
|
{
"resource": ""
}
|
q349
|
LibComponent.Component.quit_server
|
train
|
def quit_server(status_, str_)
$stderr.puts str_
if ([email protected]?) && !@options[:debug]
bus = DBus::ASessionBus.new
server = bus.service("org.openplacos.server.internal")
|
ruby
|
{
"resource": ""
}
|
q350
|
Zadt.FaceGraph.make_original_face
|
train
|
def make_original_face(num_edges)
num_edges_check(num_edges)
# Make the vertices
vert_ref = Array.new(num_edges) {Vertex.new}
edge_ref = []
# Connect each vertex to the one before it (including the first one :)
(num_edges).times do |vert_id|
edge_ref << make_connection(vert_ref[vert_id - 1], vert_ref[vert_id])
end
|
ruby
|
{
"resource": ""
}
|
q351
|
Zadt.FaceGraph.add_attached_face
|
train
|
def add_attached_face(vertex_array, num_edges)
vertex_array_check(vertex_array)
num_edges_check(num_edges)
# Make the vertices into a line
vertex_line = confirm_vertex_line(vertex_array)
# This finds the "ends" of the vertex line
end_vertices = [vertex_line.first, vertex_line.last]
# Find the neighbors that will be added later
new_neighbors = find_neighbors(vertex_array)
# How many vertices and edges to be made
vertices_to_make = num_edges - vertex_array.length
edges_to_make = vertices_to_make + 1
# Make new vertices
vert_ref = Array.new(vertices_to_make) {Vertex.new}
edge_ref = []
# Connect new vertices in a
|
ruby
|
{
"resource": ""
}
|
q352
|
Zadt.FaceGraph.find_neighbors
|
train
|
def find_neighbors(vertex_array)
vertex_array_check(vertex_array)
neighbors = []
vertex_array.each do |vertex|
@faces.each do |face|
|
ruby
|
{
"resource": ""
}
|
q353
|
Zadt.FaceGraph.find_face_neighbors
|
train
|
def find_face_neighbors(face)
raise "not a face" unless face.is_a?(Face)
|
ruby
|
{
"resource": ""
}
|
q354
|
CrmFormatter.Proper.check_proper_status
|
train
|
def check_proper_status(hsh)
proper = hsh[:proper]
proper_f = hsh[:proper_f]
status = 'invalid'
status = proper != proper_f ? 'formatted' : 'unchanged' if proper &&
|
ruby
|
{
"resource": ""
}
|
q355
|
Finitio.UnionType.dress
|
train
|
def dress(value, handler = DressHelper.new)
error = nil
# Do nothing on TypeError as the next candidate could be the good one!
candidates.each do |c|
success, uped = handler.just_try do
c.dress(value, handler)
end
|
ruby
|
{
"resource": ""
}
|
q356
|
Eluka.FeatureVectors.define_features
|
train
|
def define_features
@fvs.each do |vector, label|
|
ruby
|
{
"resource": ""
}
|
q357
|
BlueprintClient.IntegrationsApi.add_integration
|
train
|
def add_integration(namespace, body, opts = {})
data, _status_code, _headers =
|
ruby
|
{
"resource": ""
}
|
q358
|
BlueprintClient.IntegrationsApi.delete_integration
|
train
|
def delete_integration(namespace, integration_id, integration_type, opts = {})
|
ruby
|
{
"resource": ""
}
|
q359
|
BlueprintClient.IntegrationsApi.get_integration
|
train
|
def get_integration(namespace, integration_type, integration_id, opts = {})
data, _status_code,
|
ruby
|
{
"resource": ""
}
|
q360
|
BlueprintClient.IntegrationsApi.replace_integration
|
train
|
def replace_integration(namespace, integration_id, integration_type, body, opts = {})
data, _status_code, _headers = replace_integration_with_http_info(namespace,
|
ruby
|
{
"resource": ""
}
|
q361
|
TinyCI.GitUtils.repo_root
|
train
|
def repo_root
return git_directory_path if inside_bare_repo?
if inside_git_directory?
File.expand_path('..', git_directory_path)
elsif inside_work_tree?
execute(git_cmd('rev-parse',
|
ruby
|
{
"resource": ""
}
|
q362
|
ActsAsSolr.ParserMethods.parse_results
|
train
|
def parse_results(solr_data, options = {})
results = {
:docs => [],
:total => 0
}
configuration = {
:format => :objects
}
results.update(:spellcheck => solr_data.data['spellcheck']) unless solr_data.nil?
results.update(:facets => {'facet_fields' => []}) if options[:facets]
unless solr_data.nil? or solr_data.header['params'].nil?
header = solr_data.header
results.update :rows => header['params']['rows']
results.update :start => header['params']['start']
end
return SearchResults.new(results) if (solr_data.nil? || solr_data.total_hits == 0)
configuration.update(options) if options.is_a?(Hash)
ids = solr_data.hits.collect {|doc| doc["#{solr_configuration[:primary_key_field]}"]}.flatten
result = find_objects(ids, options, configuration)
add_scores(result, solr_data) if configuration[:format] == :objects && options[:scores]
highlighted = {}
solr_data.highlighting.map do |x,y|
|
ruby
|
{
"resource": ""
}
|
q363
|
ActsAsSolr.ParserMethods.reorder
|
train
|
def reorder(things, ids)
ordered_things = []
ids.each do |id|
thing =
|
ruby
|
{
"resource": ""
}
|
q364
|
ActsAsSolr.ParserMethods.add_scores
|
train
|
def add_scores(results, solr_data)
with_score = []
solr_data.hits.each do |doc|
with_score.push([doc["score"],
results.find {|record| scorable_record?(record, doc) }])
end
with_score.each
|
ruby
|
{
"resource": ""
}
|
q365
|
CrmFormatter.Web.check_web_status
|
train
|
def check_web_status(hsh)
status = 'invalid' if hsh[:web_neg]&.include?('error')
if hsh[:url] && hsh[:url_f] && status.nil?
status = hsh[:url] != hsh[:url_f] ? 'formatted' :
|
ruby
|
{
"resource": ""
}
|
q366
|
CrmFormatter.Web.extract_path
|
train
|
def extract_path(url_hash)
path_parts = url_hash[:url_f].split('//').last.split('/')[1..-1]
path = "/#{path_parts.join('/')}"
if path&.length > 2
|
ruby
|
{
"resource": ""
}
|
q367
|
ColumnPack.ViewHelpers.pack_element
|
train
|
def pack_element(height, content = nil, &block)
return if @column_packer.nil?
if block_given?
|
ruby
|
{
"resource": ""
}
|
q368
|
PsUtilities.PreBuiltPost.u_students_extension
|
train
|
def u_students_extension(data)
db_extensions = { "name"=>"u_students_extension", "recordFound"=>false,
|
ruby
|
{
"resource": ""
}
|
q369
|
PsUtilities.PreBuiltPost.u_studentsuserfields
|
train
|
def u_studentsuserfields(data)
db_extensions = { "name"=>"u_studentsuserfields", "recordFound"=>false,
|
ruby
|
{
"resource": ""
}
|
q370
|
Rews.Util.strip_bang
|
train
|
def strip_bang(k)
if k.is_a? Symbol
k.to_s[0...-1].to_sym
else
|
ruby
|
{
"resource": ""
}
|
q371
|
Rews.Util.camelize
|
train
|
def camelize(s)
if s.is_a?(Symbol)
s.to_s.split('_').map(&:capitalize).join.to_sym
|
ruby
|
{
"resource": ""
}
|
q372
|
Rews.Util.camel_keys
|
train
|
def camel_keys(h)
Hash[h.map{|k,v|
|
ruby
|
{
"resource": ""
}
|
q373
|
Rews.Util.apply_namespace
|
train
|
def apply_namespace(qname, apply_prefix, apply_uri)
local_part, prefix, uri = qname
|
ruby
|
{
"resource": ""
}
|
q374
|
Rews.Util.camelize_qname
|
train
|
def camelize_qname(qname)
local_part, prefix, uri = qname
|
ruby
|
{
"resource": ""
}
|
q375
|
Rews.Util.with_error_check
|
train
|
def with_error_check(client, *response_msg_keys)
raise "no block" if !block_given?
begin
response = yield
hash_response = response.to_hash
statuses = hash_response.fetch_in(*response_msg_keys)
if statuses.is_a?(Array)
|
ruby
|
{
"resource": ""
}
|
q376
|
Rews.Util.single_error_check
|
train
|
def single_error_check(client, status)
begin
response_class = status[:response_class]
rescue
raise "no response_class found: #{status.inspect}" if !response_class
end
if status[:response_class] == "Error"
return "#{status[:response_code]} - #{status[:message_text]}"
|
ruby
|
{
"resource": ""
}
|
q377
|
Finitio.Attribute.fetch_on
|
train
|
def fetch_on(arg, &bl)
unless arg.respond_to?(:fetch)
raise ArgumentError, "Object responding to `fetch` expected"
end
|
ruby
|
{
"resource": ""
}
|
q378
|
YaLoremJa.WordResource.sentences
|
train
|
def sentences(total)
list = []
total.times do
word_count = rand(word_count_range_in_a_sentence)
sentence_len = word_count.times.inject(0){ |sum| sum + rand(char_count_range_in_a_word) }
|
ruby
|
{
"resource": ""
}
|
q379
|
Skyper.SkypeObject.set_property
|
train
|
def set_property(property, value)
cmd = ["SET",self.class.object_name,
|
ruby
|
{
"resource": ""
}
|
q380
|
Finitio.TypeFactory.subtype
|
train
|
def subtype(super_type, constraints = nil, name = nil, metadata = nil, &bl)
super_type = type(super_type)
constraints = constraints(constraints,
|
ruby
|
{
"resource": ""
}
|
q381
|
Finitio.TypeFactory.tuple
|
train
|
def tuple(heading, name = nil, metadata = nil)
heading = heading(heading)
name = name(name)
|
ruby
|
{
"resource": ""
}
|
q382
|
YaLoremJa.Lorem.image
|
train
|
def image(size, options={})
domain = options[:domain] || 'http://placehold.it'
src = "#{domain}/#{size}"
hex = %w(a b c d e f 0 1 2 3 4 5 6 7 8 9)
background_color = options[:background_color]
color = options[:color]
if options[:random_color]
background_color = hex.shuffle[0...6].join
color = hex.shuffle[0...6].join
|
ruby
|
{
"resource": ""
}
|
q383
|
RippleRest.AccountSettings.save
|
train
|
def save
raise ArgumentError.new("Account is missing.") unless account
account.require_secret
hash = {}
|
ruby
|
{
"resource": ""
}
|
q384
|
RippleRest.Payment.submit
|
train
|
def submit
@account.require_secret
hash = {}
hash["payment"] = self.to_hash
hash["secret"] = @account.secret
hash["client_resource_id"] = client_resource_id = RippleRest.next_uuid
source_account
|
ruby
|
{
"resource": ""
}
|
q385
|
Rews.Item.read_items
|
train
|
def read_items(client, items)
return [] if !items
items.map do |item_class,items_of_class|
items_of_class = [items_of_class] if !items_of_class.is_a?(Array)
|
ruby
|
{
"resource": ""
}
|
q386
|
Rews.Item.read_get_item_response_messages
|
train
|
def read_get_item_response_messages(client, get_item_response_messages)
get_item_response_messages = [get_item_response_messages] if !get_item_response_messages.is_a?(Array)
items
|
ruby
|
{
"resource": ""
}
|
q387
|
Krikri::Enrichments.DcmiTypeMap.most_similar
|
train
|
def most_similar(value, threshold = 0.5)
@white ||= Text::WhiteSimilarity.new
result = @map.max_by { |str, _| @white.similarity(value, str) }
|
ruby
|
{
"resource": ""
}
|
q388
|
Restful.Actions.index
|
train
|
def index(options = {}, &block)
respond_with(collection, options, &block) if stale?(collection,
|
ruby
|
{
"resource": ""
}
|
q389
|
Restful.Actions.create
|
train
|
def create(options = {}, &block)
object = get_resource_ivar || create_resource
options[:location] =
|
ruby
|
{
"resource": ""
}
|
q390
|
Krikri::Harvesters.PrimoHarvester.enumerate_records
|
train
|
def enumerate_records(xml)
doc = Nokogiri::XML(xml)
doc.root.add_namespace_definition('nmbib', PRIMO_NS)
doc.xpath('//sear:DOC').lazy.map do |record|
identifier = record.xpath('./nmbib:PrimoNMBib/nmbib:record/' \
|
ruby
|
{
"resource": ""
}
|
q391
|
RiceCooker.Helpers.sortable_fields_for
|
train
|
def sortable_fields_for(model)
if model.respond_to?(:sortable_fields)
model.sortable_fields.map(&:to_sym)
|
ruby
|
{
"resource": ""
}
|
q392
|
RiceCooker.Helpers.filterable_fields_for
|
train
|
def filterable_fields_for(model)
if model.respond_to?(:filterable_fields)
model.filterable_fields.map(&:to_sym)
elsif model.respond_to?(:column_names)
|
ruby
|
{
"resource": ""
}
|
q393
|
StatusCat.StatusHelper.status_report
|
train
|
def status_report(checkers)
format, format_length = status_report_format(checkers)
header = status_report_header(format)
length = [format_length, header.length].max
|
ruby
|
{
"resource": ""
}
|
q394
|
StatusCat.StatusHelper.status_report_format
|
train
|
def status_report_format(checkers)
name_max = status_report_format_max_length(checkers, :name)
value_max = status_report_format_max_length(checkers, :value)
status_max = status_report_format_max_length(checkers, :status)
|
ruby
|
{
"resource": ""
}
|
q395
|
StatusCat.StatusHelper.status_report_header
|
train
|
def status_report_header(format = StatusCat::Checkers::Base::FORMAT)
name = I18n.t(:name, scope: :status_cat)
value = I18n.t(:value, scope: :status_cat)
status
|
ruby
|
{
"resource": ""
}
|
q396
|
Vnstat.Parser.extract_month_from_xml_element
|
train
|
def extract_month_from_xml_element(element)
month = element.xpath('date/month').text.to_i
|
ruby
|
{
"resource": ""
}
|
q397
|
Vnstat.Parser.extract_date_from_xml_element
|
train
|
def extract_date_from_xml_element(element)
day = element.xpath('date/day').text.to_i
year, month =
|
ruby
|
{
"resource": ""
}
|
q398
|
Vnstat.Parser.extract_datetime_from_xml_element
|
train
|
def extract_datetime_from_xml_element(element)
date = extract_date_from_xml_element(element)
hour = element.xpath('time/hour').text.to_i
|
ruby
|
{
"resource": ""
}
|
q399
|
Vnstat.Parser.extract_transmitted_bytes_from_xml_element
|
train
|
def extract_transmitted_bytes_from_xml_element(element)
bytes_received = element.xpath('rx').text.to_i * 1024
bytes_sent
|
ruby
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.