_id
stringlengths 2
6
| title
stringlengths 9
130
| partition
stringclasses 3
values | text
stringlengths 66
10.5k
| language
stringclasses 1
value | meta_information
dict |
---|---|---|---|---|---|
q3400
|
SOAP.Request.call
|
train
|
def call( methodName, args )
args = (args || {}) #.keys_to_sym!
# Get Binding
binding = @wsdl.bindings.getBindingForOperationName( @binding, methodName )
if binding.size == 0
raise SOAP::LCNoMethodError, "Undefined method `#{methodName}'"
elsif binding.size > 1
raise SOAP::LCError, "Ambigous method name `#{methodName}', please, specify a binding name"
else
binding = binding[0]
@binding = binding.name
end
# Get Binding Operation
binding_operation = binding.operations[methodName]
# Get PortType
portType = @wsdl.portTypes[binding.type.nns]
portType_operation = portType.operations[methodName]
# Get message for input operation
input_message = @wsdl.messages[portType_operation[:input][:message].nns]
# Create method
soap_method = "<#{methodName} xmlns=\"#{@wsdl.targetNamespace}\">\n"
input_message.parts.each do |_, attrs|
case attrs[:mode]
when :type
if SOAP::XSD::ANY_SIMPLE_TYPE.include?( attrs[attrs[:mode]].nns )
# Part refer to a builtin SimpleType
soap_method << SOAP::XSD.displayBuiltinType( attrs[:name], args, 1, 1 )
else
# Part refer to an XSD simpleType or complexType defined in types
element = @wsdl.types[attrs[attrs[:mode]].nns][:value]
case element[:type]
when :simpleType
soap_method << "<#{attrs[:name]}>\n#{element.display( @wsdl.types, args )}\n</#{attrs[:name]}>\n" # MAYBE ##########
when :complexType
soap_method << "<#{attrs[:name]}>\n#{element.display( @wsdl.types, args )}\n</#{attrs[:name]}>\n" # MAYBE ##########
else
raise SOAP::LCWSDLError, "Malformated part #{attrs[:name]}"
end
end
when :element
# Part refer to an XSD element
element = @wsdl.types[attrs[attrs[:mode]].nns][:value]
case element[:type]
when :simpleType
soap_method << element[element[:type]].display( @wsdl.types, args )
when :complexType
soap_method << element[element[:type]].display( @wsdl.types, args )
else
raise SOAL::LCWSDLError, "Malformated element `#{attrs[attrs[:mode]]}'"
end
## TODO ---------- USE element[:key]
else
raise SOAP::LCWSDLError, "Malformated part #{attrs[:name]}"
end
end
soap_method += "</#{methodName}>\n"
# Create SOAP Envelope
envelope = soap_envelop do
soap_header + soap_body( soap_method )
end
# Create headers
headers = Hash.new
# Add SOAPAction to headers (if exist)
action = begin
binding_operation[:soapAction]
rescue
nil
end
headers['SOAPAction'] = action unless action.nil? or action.length == 0
# Search URI
service_port = @wsdl.services.getServicePortForBindingName( binding.name )
address = service_port[:address]
# Complete request
@request = {
:headers => make_header( envelope, headers ),
:envelope => envelope,
:uri => address,
:wsdl => @wsdl,
:response => @wsdl.messages[portType_operation[:output][:message].nns].name,
:binding => @binding,
:method => methodName
}
return self
end
|
ruby
|
{
"resource": ""
}
|
q3401
|
LDAPGroupsLookup.Search.lookup_dn
|
train
|
def lookup_dn(cn)
service.search(base: tree, filter: Net::LDAP::Filter.equals('cn', cn), attributes: 'dn').first&.dn
end
|
ruby
|
{
"resource": ""
}
|
q3402
|
LDAPGroupsLookup.Search.lookup_mail
|
train
|
def lookup_mail(cn)
service&.search(base: tree,
filter: Net::LDAP::Filter.equals('cn', cn),
attributes: 'mail')&.first&.mail&.first.to_s
end
|
ruby
|
{
"resource": ""
}
|
q3403
|
LDAPGroupsLookup.Search.walk_ldap_members
|
train
|
def walk_ldap_members(groups, dn, seen = [])
groups.each do |g|
members = ldap_members(g)
return true if members.include? dn
next if seen.include? g
seen << g
member_groups = members.collect do |mg|
dn_to_cn(mg) if (mg.include?('OU=Groups') || mg.include?('OU=Applications'))
end
member_groups.compact!
return true if walk_ldap_members(member_groups, dn, seen)
end
return false
end
|
ruby
|
{
"resource": ""
}
|
q3404
|
LDAPGroupsLookup.Search.ldap_members
|
train
|
def ldap_members(cn, start=0)
return [] if service.nil?
# print "Getting members of #{cn} at index #{start}\n"
entry = service.search(base: tree,
filter: Net::LDAP::Filter.equals('cn', cn),
attributes: ["member;range=#{start}-*"]).first
return [] if entry.nil?
field_name = entry.attribute_names[1] # Is this always ordered [dn, member]?
return [] if field_name.nil? # Sometimes member is not present.
range_end = field_name.to_s.match(/^member;range=\d+-([0-9*]+)$/)[1]
# print "#{start}-#{range_end}\n"
members = entry[field_name]#.collect { |mo| mo.split(',').first.split('=')[1] }
members.concat ldap_members(cn, range_end.to_i+1) unless range_end == '*'
return members
end
|
ruby
|
{
"resource": ""
}
|
q3405
|
Singularity.Request.deploy
|
train
|
def deploy
if is_paused
puts ' PAUSED, SKIPPING.'
return
else
@data['requestId'] = @data['id']
@data['id'] = "#{@release}.#{Time.now.to_i}"
@data['containerInfo']['docker']['image'] =
File.exist?('dcos-deploy/config.yml') ?
YAML.load_file(File.join(Dir.pwd, 'dcos-deploy/config.yml'))['repo']+":#{@release}" :
"#{JSON.parse(File.read('.mescal.json'))['image'].split(':').first}:#{@release}"
@deploy = {
'deploy' => @data,
'user' => `whoami`.chomp,
'unpauseOnSuccessfulDeploy' => false
}
# deploy the request
RestClient.post "#{@uri}/api/deploys", @deploy.to_json, :content_type => :json
puts ' Deploy succeeded: '.green + @data['requestId'].light_blue
end
end
|
ruby
|
{
"resource": ""
}
|
q3406
|
Outbox.Message.body
|
train
|
def body(value)
each_message_type do |_, message|
next if message.nil?
message.body = value
end
end
|
ruby
|
{
"resource": ""
}
|
q3407
|
Outbox.Message.deliver
|
train
|
def deliver(audience)
audience = Outbox::Accessor.new(audience)
each_message_type do |message_type, message|
next if message.nil?
recipient = audience[message_type]
message.deliver(recipient) if recipient
end
end
|
ruby
|
{
"resource": ""
}
|
q3408
|
Hicube.ApplicationController.notify
|
train
|
def notify(type, message, options = {})
options[:now] ||= false
# Convert and cleanup.
type = type.to_s.downcase.to_sym
# Sanity check for type.
unless FLASH_TYPES.include?(type)
raise ArgumentError, "Invalid value for argument type: #{type}, expected one of: #{FLASH_TYPES.to_sentence}."
end
logger.info("FLASH (#{options.inspect}) #{type.to_s.upcase}: #{message}")
if options[:now] == true
flash.now[type] ||= []
flash.now[type] << message
else
flash[type] ||= []
flash[type] << message
end
logger.debug("DEBUG: FLASH #{flash.inspect}")
return true
end
|
ruby
|
{
"resource": ""
}
|
q3409
|
ParallelAppium.Android.start_emulators
|
train
|
def start_emulators
emulators = `emulator -list-avds`.split("\n")
emulators = emulators[0, ENV['THREADS'].to_i]
Parallel.map(emulators, in_threads: emulators.size) do |emulator|
spawn("emulator -avd #{emulator} -no-snapshot-load -scale 100dpi -no-boot-anim -no-audio -accel on &", out: '/dev/null')
end
emulators
end
|
ruby
|
{
"resource": ""
}
|
q3410
|
ShakeTheCounter.Section.make_reservation
|
train
|
def make_reservation(price_type_list: {}, affiliate: '', first_name: '', last_name: '', email: '')
# step 1: make the reservation
path = "event/#{performance.event.key}/performance/#{performance.key}/section/#{performance_section_key}/reservation/#{performance.event.client.language_code}"
body = {
PriceTypeList: price_type_list
}
result = performance.event.client.call(path, http_method: :post, body: body.to_json)
reservation = ShakeTheCounter::Reservation.new(result)
# step 2: create a contact
path = "contact/#{performance.event.client.language_code}"
body = {
FirstName: first_name,
LastName: last_name,
MailAddress: email,
LanguageCode: performance.event.client.language_code
}
result = performance.event.client.call(path, http_method: :post, body: body.to_json)
contact = ShakeTheCounter::Contact.new(result)
# step 3: link contact to the reservation
path = "reservation/#{reservation.key}/contact"
body = {
ContactKey: contact.key
}
result = performance.event.client.call(path, http_method: :post, body: body.to_json)
return reservation
end
|
ruby
|
{
"resource": ""
}
|
q3411
|
Modelish.Base.to_hash
|
train
|
def to_hash
out = {}
self.class.properties.each { |p| out[hash_key(p)] = hash_value(p) }
out
end
|
ruby
|
{
"resource": ""
}
|
q3412
|
GitFeats.Checker.check
|
train
|
def check(args)
# Load history and completed
Completed.unserialize
History.unserialize
# request flag
upload = false
# Check for feats and update history
Feats.all.each do |pattern, feats|
if args.match?(pattern)
History.add(pattern)
feats.each do |feat, value|
if History.count(pattern) >= value[:count]
unless Completed.exists?(feat)
Completed.add(feat)
Reporter.report(value)
upload = true
end
end
end
end
end
# upload feats if the request flag is set
upload_feats if upload
# Write out history and completed feats
Completed.serialize
History.serialize
end
|
ruby
|
{
"resource": ""
}
|
q3413
|
Shack.Stamp.rounded_corner
|
train
|
def rounded_corner(horizontal, vertical)
css = [] << "border"
attrs = {
top: "bottom", bottom: "top",
left: "right", right: "left" }
css << attrs.fetch(vertical)
css << attrs.fetch(horizontal)
css << "radius"
css.join("-")
end
|
ruby
|
{
"resource": ""
}
|
q3414
|
Imgurr.Storage.bootstrap
|
train
|
def bootstrap
return if File.exist?(json_file)
FileUtils.touch json_file
File.open(json_file, 'w') {|f| f.write(to_json) }
save
end
|
ruby
|
{
"resource": ""
}
|
q3415
|
GraphStarter.ApplicationHelper.asset_icon
|
train
|
def asset_icon(asset, image = (image_unspecified = true; nil))
image_url = if !image_unspecified
image.source.url if image.present?
elsif (asset.class.has_images? || asset.class.has_image?) && asset.first_image_source_url.present?
asset.first_image_source_url
end
if image_url
image_tag image_url, class: 'ui avatar image'
else
content_tag :i, '', class: [asset.class.icon_class || 'folder', 'large', 'icon']
end
end
|
ruby
|
{
"resource": ""
}
|
q3416
|
Beaker.GoogleComputeHelper.get_platform_project
|
train
|
def get_platform_project(name)
if name =~ /debian/
return DEBIAN_PROJECT
elsif name =~ /centos/
return CENTOS_PROJECT
elsif name =~ /rhel/
return RHEL_PROJECT
elsif name =~ /sles/
return SLES_PROJECT
else
raise "Unsupported platform for Google Compute Engine: #{name}"
end
end
|
ruby
|
{
"resource": ""
}
|
q3417
|
Beaker.GoogleComputeHelper.set_compute_api
|
train
|
def set_compute_api version, start, attempts
try = (Time.now - start)/SLEEPWAIT
while try <= attempts
begin
@compute = @client.discovered_api('compute', version)
@logger.debug("Google Compute API discovered")
return
rescue => e
@logger.debug("Failed to discover Google Compute API")
if try >= attempts
raise e
end
end
try += 1
end
end
|
ruby
|
{
"resource": ""
}
|
q3418
|
Beaker.GoogleComputeHelper.authenticate
|
train
|
def authenticate(keyfile, password, email, start, attempts)
# OAuth authentication, using the service account
key = ::Google::APIClient::PKCS12.load_key(keyfile, password)
service_account = ::Google::APIClient::JWTAsserter.new(
email,
AUTH_URL,
key)
try = (Time.now - start) / SLEEPWAIT
while try <= attempts
begin
@client.authorization = service_account.authorize
@logger.debug("Authorized to use Google Compute")
return
rescue => e
@logger.debug("Failed to authorize to use Google Compute")
if try >= attempts
raise e
end
end
try += 1
end
end
|
ruby
|
{
"resource": ""
}
|
q3419
|
Beaker.GoogleComputeHelper.execute
|
train
|
def execute req, start, attempts
last_error = parsed = nil
try = (Time.now - start) / SLEEPWAIT
while try <= attempts
begin
result = @client.execute(req)
parsed = JSON.parse(result.body)
if not result.success?
error_code = parsed["error"] ? parsed["error"]["code"] : 0
if error_code == 404
raise GoogleComputeError, "Resource Not Found: #{result.body}"
elsif error_code == 400
raise GoogleComputeError, "Bad Request: #{result.body}"
else
raise GoogleComputeError, "Error attempting Google Compute API execute: #{result.body}"
end
end
return parsed
# retry errors
rescue Faraday::Error::ConnectionFailed => e
@logger.debug "ConnectionFailed attempting Google Compute execute command"
try += 1
last_error = e
end
end
# we only get down here if we've used up all our tries
raise last_error
end
|
ruby
|
{
"resource": ""
}
|
q3420
|
Beaker.GoogleComputeHelper.list_firewalls
|
train
|
def list_firewalls(start, attempts)
result = execute( firewall_list_req(), start, attempts )
firewalls = result["items"]
firewalls.delete_if{|f| f['name'] =~ /default-allow-internal|default-ssh/}
firewalls
end
|
ruby
|
{
"resource": ""
}
|
q3421
|
Beaker.GoogleComputeHelper.create_firewall
|
train
|
def create_firewall(name, network, start, attempts)
execute( firewall_insert_req( name, network['selfLink'] ), start, attempts )
end
|
ruby
|
{
"resource": ""
}
|
q3422
|
Beaker.GoogleComputeHelper.create_disk
|
train
|
def create_disk(name, img, start, attempts)
#create a new boot disk for this instance
disk = execute( disk_insert_req( name, img['selfLink'] ), start, attempts )
status = ''
try = (Time.now - start) / SLEEPWAIT
while status !~ /READY/ and try <= attempts
begin
disk = execute( disk_get_req( name ), start, attempts )
status = disk['status']
rescue GoogleComputeError => e
@logger.debug("Waiting for #{name} disk creation")
sleep(SLEEPWAIT)
end
try += 1
end
if status == ''
raise "Unable to create disk #{name}"
end
disk
end
|
ruby
|
{
"resource": ""
}
|
q3423
|
Beaker.GoogleComputeHelper.create_instance
|
train
|
def create_instance(name, img, machineType, disk, start, attempts)
#add a new instance of the image
instance = execute( instance_insert_req( name, img['selfLink'], machineType['selfLink'], disk['selfLink'] ), start, attempts)
status = ''
try = (Time.now - start) / SLEEPWAIT
while status !~ /RUNNING/ and try <= attempts
begin
instance = execute( instance_get_req( name ), start, attempts )
status = instance['status']
rescue GoogleComputeError => e
@logger.debug("Waiting for #{name} instance creation")
sleep(SLEEPWAIT)
end
try += 1
end
if status == ''
raise "Unable to create instance #{name}"
end
instance
end
|
ruby
|
{
"resource": ""
}
|
q3424
|
Beaker.GoogleComputeHelper.delete_instance
|
train
|
def delete_instance(name, start, attempts)
result = execute( instance_delete_req( name ), start, attempts )
# Ensure deletion of instance
try = (Time.now - start) / SLEEPWAIT
while try <= attempts
begin
result = execute( instance_get_req( name ), start, attempts )
@logger.debug("Waiting for #{name} instance deletion")
sleep(SLEEPWAIT)
rescue GoogleComputeError => e
@logger.debug("#{name} instance deleted!")
return
end
try += 1
end
@logger.debug("#{name} instance was not removed before timeout, may still exist")
end
|
ruby
|
{
"resource": ""
}
|
q3425
|
Beaker.GoogleComputeHelper.delete_disk
|
train
|
def delete_disk(name, start, attempts)
result = execute( disk_delete_req( name ), start, attempts )
# Ensure deletion of disk
try = (Time.now - start) / SLEEPWAIT
while try <= attempts
begin
disk = execute( disk_get_req( name ), start, attempts )
@logger.debug("Waiting for #{name} disk deletion")
sleep(SLEEPWAIT)
rescue GoogleComputeError => e
@logger.debug("#{name} disk deleted!")
return
end
try += 1
end
@logger.debug("#{name} disk was not removed before timeout, may still exist")
end
|
ruby
|
{
"resource": ""
}
|
q3426
|
Beaker.GoogleComputeHelper.delete_firewall
|
train
|
def delete_firewall(name, start, attempts)
result = execute( firewall_delete_req( name ), start, attempts )
#ensure deletion of disk
try = (Time.now - start) / SLEEPWAIT
while try <= attempts
begin
firewall = execute( firewall_get_req( name ), start, attempts )
@logger.debug("Waiting for #{name} firewall deletion")
sleep(SLEEPWAIT)
rescue GoogleComputeError => e
@logger.debug("#{name} firewall deleted!")
return
end
try += 1
end
@logger.debug("#{name} firewall was not removed before timeout, may still exist")
end
|
ruby
|
{
"resource": ""
}
|
q3427
|
Beaker.GoogleComputeHelper.instance_setMetadata_req
|
train
|
def instance_setMetadata_req(name, fingerprint, data)
{ :api_method => @compute.instances.set_metadata,
:parameters => { 'project' => @options[:gce_project], 'zone' => DEFAULT_ZONE_NAME, 'instance' => name },
:body_object => { 'kind' => 'compute#metadata',
'fingerprint' => fingerprint,
'items' => data }
}
end
|
ruby
|
{
"resource": ""
}
|
q3428
|
Beaker.GoogleComputeHelper.instance_insert_req
|
train
|
def instance_insert_req(name, image, machineType, disk)
{ :api_method => @compute.instances.insert,
:parameters => { 'project' => @options[:gce_project], 'zone' => DEFAULT_ZONE_NAME },
:body_object => { 'name' => name,
'image' => image,
'zone' => default_zone,
'machineType' => machineType,
'disks' => [ { 'source' => disk,
'type' => 'PERSISTENT', 'boot' => 'true'} ],
'networkInterfaces' => [ { 'accessConfigs' => [{ 'type' => 'ONE_TO_ONE_NAT', 'name' => 'External NAT' }],
'network' => default_network } ] } }
end
|
ruby
|
{
"resource": ""
}
|
q3429
|
LetItGo.MethodCall.method_array
|
train
|
def method_array
@parser = nil
@caller_lines.each do |kaller|
code = Ripper.sexp(kaller.contents)
code ||= Ripper.sexp(kaller.contents.sub(/^\W*(if|unless)/, ''.freeze)) # if and unless "block" statements aren't valid one line ruby code
code ||= Ripper.sexp(kaller.contents.sub(/do \|.*\|$/, ''.freeze)) # remove trailing do |thing| to make valid code
code ||= Ripper.sexp(kaller.contents.sub(/(and|or)\W*$/, ''.freeze))# trailing and || or
code ||= Ripper.sexp(kaller.contents.sub(/:\W*$/, ''.freeze)) # multi line ternary statements
code ||= Ripper.sexp(kaller.contents.sub(/(^\W*)|({ \|?.*\|?)}/, ''.freeze)) # multi line blocks using {}
puts "LetItGoFailed parse (#{kaller.file_name}:#{kaller.line_number}: \n \033[0;31m"+ kaller.contents.strip + "\e[0m".freeze if ENV['LET_IT_GO_RECORD_FAILED_CODE'] && code.nil? && kaller.contents.match(/"|'/)
parser = ::LetItGo::WTFParser.new(code, contents: kaller.contents)
if parser.each_method.any? { |m| m.method_name == method_name }
@line_number = kaller.line_number
@file_name = kaller.file_name
@parser = parser
parser.each_method.each(&:arg_types)
break
else
next
end
end
@parser || []
end
|
ruby
|
{
"resource": ""
}
|
q3430
|
LetItGo.MethodCall.called_with_string_literal?
|
train
|
def called_with_string_literal?
@string_allocation_count = 0
method_array.each do |m|
positions.each {|position| @string_allocation_count += 1 if m.arg_types[position] == :string_literal }
end
!@string_allocation_count.zero?
end
|
ruby
|
{
"resource": ""
}
|
q3431
|
Maximus.Lint.refine
|
train
|
def refine(data)
@task ||= ''
data = parse_data(data)
return puts data if data.is_a?(String)
evaluate_severities(data)
puts summarize
if @config.is_dev?
puts dev_format(data)
ceiling_warning
else
# Because this should be returned in the format it was received
@output[:raw_data] = data.to_json
end
@output
end
|
ruby
|
{
"resource": ""
}
|
q3432
|
Maximus.Lint.files_inspected
|
train
|
def files_inspected(ext, delimiter = ',', remove = @config.working_dir)
@path.is_a?(Array) ? @path.split(delimiter) : file_list(@path, ext, remove)
end
|
ruby
|
{
"resource": ""
}
|
q3433
|
Maximus.Lint.relevant_output
|
train
|
def relevant_output(lint, files)
all_files = {}
files.each do |file|
# sometimes data will be blank but this is good - it means no errors were raised in the lint
next if lint.blank? || file.blank? || !file.is_a?(Hash) || !file.key?(:filename)
lint_file = lint[file[:filename]]
next if lint_file.blank?
expanded = lines_added_to_range(file)
revert_name = strip_working_dir(file[:filename])
all_files[revert_name] = []
lint_file.each do |l|
if expanded.include?(l['line'].to_i)
all_files[revert_name] << l
end
end
# If there's nothing there, then it definitely isn't a relevant lint
all_files.delete(revert_name) if all_files[revert_name].blank?
end
@output[:files_linted] = all_files.keys
all_files
end
|
ruby
|
{
"resource": ""
}
|
q3434
|
Maximus.Lint.evaluate_severities
|
train
|
def evaluate_severities(data)
@output[:lint_warnings] = []
@output[:lint_errors] = []
@output[:lint_conventions] = []
@output[:lint_refactors] = []
@output[:lint_fatals] = []
return if data.blank?
data.each do |filename, error_list|
error_list.each do |message|
# so that :raw_data remains unaffected
message = message.clone
message.delete('length')
message['filename'] = filename.nil? ? '' : strip_working_dir(filename)
severity = "lint_#{message['severity']}s".to_sym
message.delete('severity')
@output[severity] << message if @output.key?(severity)
end
end
@output
end
|
ruby
|
{
"resource": ""
}
|
q3435
|
Maximus.Lint.summarize
|
train
|
def summarize
success = @task.color(:green)
success << ": "
success << "[#{@output[:lint_warnings].length}]".color(:yellow)
success << " [#{@output[:lint_errors].length}]".color(:red)
if @task == 'rubocop'
success << " [#{@output[:lint_conventions].length}]".color(:cyan)
success << " [#{@output[:lint_refactors].length}]".color(:white)
success << " [#{@output[:lint_fatals].length}]".color(:magenta)
end
success << "\n#{'Warning'.color(:red)}: #{@output[:lint_errors].length} errors found in #{@task}" if @output[:lint_errors].length > 0
success
end
|
ruby
|
{
"resource": ""
}
|
q3436
|
Maximus.Lint.ceiling_warning
|
train
|
def ceiling_warning
lint_length = (@output[:lint_errors].length + @output[:lint_warnings].length + @output[:lint_conventions].length + @output[:lint_refactors].length + @output[:lint_fatals].length)
return unless lint_length > 100
failed_task = @task.color(:green)
errors = "#{lint_length} failures.".color(:red)
errormsg = [
"You wouldn't stand a chance in Rome.\nResolve thy errors and train with #{failed_task} again.",
"The gods frown upon you, mortal.\n#{failed_task}. Again.",
"Do not embarrass the city. Fight another day. Use #{failed_task}.",
"You are without honor. Replenish it with another #{failed_task}.",
"You will never claim the throne with a performance like that.",
"Pompeii has been lost.",
"A wise choice. Do not be discouraged from another #{failed_task}."
].sample
errormsg << "\n\n"
go_on = prompt "\n#{errors} Continue? (y/n) "
abort errormsg unless truthy?(go_on)
end
|
ruby
|
{
"resource": ""
}
|
q3437
|
Maximus.Lint.dev_format
|
train
|
def dev_format(errors = @output[:raw_data])
return if errors.blank?
pretty_output = ''
errors.each do |filename, error_list|
filename = strip_working_dir(filename)
pretty_output << "\n#{filename.color(:cyan).underline} \n"
error_list.each do |message|
pretty_output << severity_color(message['severity'])
pretty_output << " #{message['line'].to_s.color(:blue)} #{message['linter'].color(:green)}: #{message['reason']} \n"
end
end
pretty_output << "-----\n\n"
pretty_output
end
|
ruby
|
{
"resource": ""
}
|
q3438
|
Maximus.Lint.parse_data
|
train
|
def parse_data(data)
# Prevent abortive empty JSON.parse error
data = '{}' if data.blank?
return "Error from #{@task}: #{data}" if data.is_a?(String) && data.include?('No such')
data = JSON.parse(data) if data.is_a?(String)
@output[:relevant_output] = relevant_output( data, @git_files ) unless @git_files.blank?
data = @output[:relevant_output] unless @settings[:commit].blank?
data
end
|
ruby
|
{
"resource": ""
}
|
q3439
|
Biffbot.Bulk.generate_post_body
|
train
|
def generate_post_body name, api_url, urls = [], options = {}
post_body = {token: @token, name: name, apiUrl: api_url, urls: urls}
options.each do |key, value|
next unless %w(notifyEmail maxRounds notifyWebHook pageProcessPattern).include?(key.to_s)
post_body[key] = value
end
post_body
end
|
ruby
|
{
"resource": ""
}
|
q3440
|
Biffbot.Bulk.retrieve_data
|
train
|
def retrieve_data jobName, _options = {}
# TODO: add support for csv
endpoint = "http://api.diffbot.com/v3/bulk/download/#{@token}-#{jobName}_data.json"
JSON.parse(HTTParty.get(endpoint).body).each_pair do |key, value|
self[key] = value
end
end
|
ruby
|
{
"resource": ""
}
|
q3441
|
Maximus.Config.evaluate_settings
|
train
|
def evaluate_settings(settings_data = @settings)
settings_data.each do |key, value|
next if value.is_a?(FalseClass)
value = {} if value.is_a?(TrueClass)
case key
when :jshint, :JSHint, :JShint
value = load_config(value)
jshint_ignore settings_data[key]
@settings[:jshint] = temp_it('jshint.json', value.to_json)
when :scsslint, :SCSSlint
value = load_config(value)
@settings[:scsslint] = temp_it('scsslint.yml', value.to_yaml)
when :rubocop, :Rubocop, :RuboCop
value = load_config(value)
@settings[:rubocop] = temp_it('rubocop.yml', value.to_yaml)
when :brakeman
@settings[:brakeman] = settings_data[key]
when :rails_best_practice, :railsbp
@settings[:railsbp] = settings_data[key]
when :stylestats, :Stylestats
value = load_config(value)
@settings[:stylestats] = temp_it('stylestats.json', value.to_json)
when :phantomas, :Phantomas
value = load_config(value)
@settings[:phantomas] = temp_it('phantomas.json', value.to_json)
when :wraith, :Wraith
value = load_config(value)
evaluate_for_wraith(value)
# Configuration important to all of maximus
when :is_dev, :log, :root_dir, :domain, :port, :paths, :commit
@settings[key] = settings_data[key]
end
end
@settings
end
|
ruby
|
{
"resource": ""
}
|
q3442
|
Maximus.Config.destroy_temp
|
train
|
def destroy_temp(filename = nil)
if filename.nil?
@temp_files.each { |filename, file| file.unlink }
@temp_files = {}
else
return if @temp_files[filename.to_sym].blank?
@temp_files[filename.to_sym].unlink
@temp_files.delete(filename.to_sym)
end
end
|
ruby
|
{
"resource": ""
}
|
q3443
|
Maximus.Config.load_config_file
|
train
|
def load_config_file(file_path, root)
conf_location = if file_path.present? && File.exist?(file_path)
file_path
else
config_exists('.maximus.yml', root) || config_exists('maximus.yml', root) || config_exists('config/maximus.yml', root)
end
return {} if conf_location.is_a?(FalseClass)
yaml = YAML.load_file conf_location
yaml = {} if yaml.blank?
yaml.symbolize_keys
end
|
ruby
|
{
"resource": ""
}
|
q3444
|
Maximus.Config.set_families
|
train
|
def set_families(head_of_house, family)
if @settings.key?(head_of_house)
family.each { |f| @settings[f] ||= @settings[head_of_house].is_a?(TrueClass) }
end
end
|
ruby
|
{
"resource": ""
}
|
q3445
|
Maximus.Config.load_config
|
train
|
def load_config(value)
return value unless value.is_a?(String)
if value =~ /^http/
begin open(value)
YAML.load open(value).read
rescue
puts "#{value} not accessible"
{}
end
elsif File.exist?(value)
YAML.load_file(value)
else
puts "#{value} not found"
{}
end
end
|
ruby
|
{
"resource": ""
}
|
q3446
|
Maximus.Config.temp_it
|
train
|
def temp_it(filename, data)
ext = filename.split('.')
file = Tempfile.new([filename, ".#{ext[1]}"]).tap do |f|
f.rewind
f.write(data)
f.close
end
@temp_files[ext[0].to_sym] = file
file.path
end
|
ruby
|
{
"resource": ""
}
|
q3447
|
Maximus.Config.split_paths
|
train
|
def split_paths(paths)
new_paths = {}
paths.each do |p|
if p.split('/').length > 1
new_paths[p.split('/').last.to_s] = p
else
new_paths['home'] = '/'
end
end
new_paths
end
|
ruby
|
{
"resource": ""
}
|
q3448
|
Maximus.Config.wraith_setup
|
train
|
def wraith_setup(value, name = 'phantomjs')
if @settings.key?(:urls)
value['domains'] = @settings[:urls]
else
value['domains'] = {}
# @see #domain
value['domains']['main'] = domain
end
# Set wraith defaults unless they're already defined
# Wraith requires this screen_width config to be present
value['screen_widths'] ||= [1280, 1024, 767]
value['fuzz'] ||= '20%'
value['threshold'] ||= 0
value['paths'] = @settings[:paths]
temp_it("#{name}.yaml", value.to_yaml)
end
|
ruby
|
{
"resource": ""
}
|
q3449
|
Maximus.Config.config_exists
|
train
|
def config_exists(file, root)
present_location = File.join(root, file)
File.exist?(present_location) ? present_location : false
end
|
ruby
|
{
"resource": ""
}
|
q3450
|
Maximus.Config.jshint_ignore
|
train
|
def jshint_ignore(settings_data_key)
return unless settings_data_key.is_a?(Hash) && settings_data_key.key?('jshintignore')
jshintignore_file = []
settings_data_key['jshintignore'].each { |i| jshintignore_file << "#{i}\n" }
@settings[:jshintignore] = temp_it('jshintignore.json', jshintignore_file)
end
|
ruby
|
{
"resource": ""
}
|
q3451
|
CEF.Event.format_prefix
|
train
|
def format_prefix
values = CEF::PREFIX_ATTRIBUTES.keys.map { |k| self.send(k) }
escaped = values.map do |value|
escape_prefix_value(value)
end
escaped.join('|')
end
|
ruby
|
{
"resource": ""
}
|
q3452
|
CEF.Event.format_extension
|
train
|
def format_extension
extensions = CEF::EXTENSION_ATTRIBUTES.keys.map do |meth|
value = self.send(meth)
next if value.nil?
shortname = CEF::EXTENSION_ATTRIBUTES[meth]
[shortname, escape_extension_value(value)].join("=")
end
# make sure time comes out as milliseconds since epoch
times = CEF::TIME_ATTRIBUTES.keys.map do |meth|
value = self.send(meth)
next if value.nil?
shortname = CEF::TIME_ATTRIBUTES[meth]
[shortname, escape_extension_value(value)].join("=")
end
(extensions + times).compact.join(" ")
end
|
ruby
|
{
"resource": ""
}
|
q3453
|
Unsakini.ShareBoardController.validate_params
|
train
|
def validate_params
if params[:encrypted_password].nil? or params[:shared_user_ids].nil? or params[:board].nil?
render json: {}, status: 422
return
end
result = has_board_access(params[:board][:id])
if result[:status] != :ok
render json: {}, status: result[:status]
return
else
if !result[:user_board].is_admin
render json: {}, status: :forbidden
return
end
@board = result[:board]
@user_board = result[:user_board]
end
if params[:posts]
params[:posts].each do |post|
s = has_post_access(params[:board][:id], post[:id])[:status]
if s != :ok
render json: {}, status: s
return
end
if post[:comments]
post[:comments].each do |comment|
s = has_comment_access(post[:id], comment[:id])[:status]
if s != :ok
render json: {}, status: s
return
end
end
end
end
end
end
|
ruby
|
{
"resource": ""
}
|
q3454
|
Unsakini.UsersController.create
|
train
|
def create
user = User.new(user_params)
if user.save
UserMailer.confirm_account(user).deliver_now
render json: user, status: :created
else
render json: user.errors, status: 422
end
end
|
ruby
|
{
"resource": ""
}
|
q3455
|
Unsakini.UsersController.confirm
|
train
|
def confirm
token = params[:token].to_s
user = User.find_by(confirmation_token: token)
if user.present? && user.confirmation_token_valid?
if user.mark_as_confirmed!
render json: {status: 'Account confirmed successfully.'}
else
render json: user.errors, status: 422
end
else
render json: ['Invalid token'], status: :not_found
end
end
|
ruby
|
{
"resource": ""
}
|
q3456
|
Unsakini.UsersController.search
|
train
|
def search
user = User.where("email = ? AND id != ?", params[:email], @user.id).first
if user
render json: user
else
render json: {}, status: :not_found
end
end
|
ruby
|
{
"resource": ""
}
|
q3457
|
Maximus.GitControl.commit_export
|
train
|
def commit_export(commit_sha = head_sha)
commit_sha = commit_sha.to_s
ce_commit = @g.gcommit(commit_sha)
if first_commit == commit_sha
ce_diff = diff_initial(first_commit)
else
last_commit = @g.gcommit(previous_commit(commit_sha))
ce_diff = diff(last_commit, ce_commit)
end
{
commit_sha: commit_sha,
branch: branch,
message: ce_commit.message,
remote_repo: remote,
git_author: ce_commit.author.name,
git_author_email: ce_commit.author.email,
commit_date: ce_commit.author.date.to_s,
diff: ce_diff
}
end
|
ruby
|
{
"resource": ""
}
|
q3458
|
Maximus.GitControl.compare
|
train
|
def compare(sha1 = master_commit_sha, sha2 = head_sha)
diff_return = {}
sha1 = define_psuedo_commit if @settings[:commit]
# Reverse so that we go in chronological order
git_spread = commit_range(sha1, sha2).reverse
git_spread.each do |git_sha|
# Grab all files in that commit and group them by extension
# If working copy, just give the diff names of the files changed
files = @psuedo_commit ? working_copy_files : files_by_sha(git_sha)
diff_return[git_sha] = match_associations(git_sha, files)
end
diff_return
end
|
ruby
|
{
"resource": ""
}
|
q3459
|
Maximus.GitControl.lints_and_stats
|
train
|
def lints_and_stats(lint_by_path = false, git_shas = compare, nuclear = false)
return false if git_shas.blank?
base_branch = branch
git_ouput = {}
git_shas.each do |sha, exts|
create_branch(sha) unless @psuedo_commit
sha = sha.to_s
puts sha.color(:blue)
exts.each do |ext, files|
# For relevant_lines data
lint_opts = {
git_files: files,
config: @config,
file_paths: (lint_file_paths(files, ext) if lint_by_path)
}
git_ouput[sha] = nuclear ? lints_and_stats_nuclear(lint_opts) : lints_and_stats_switch(ext, lint_opts)
end
destroy_branch(base_branch, sha) unless @psuedo_commit
end
git_ouput
end
|
ruby
|
{
"resource": ""
}
|
q3460
|
Maximus.GitControl.associations
|
train
|
def associations
{
css: ['css'],
scss: ['scss', 'sass'],
js: ['js'],
ruby: ['rb', 'Gemfile', 'lock', 'yml', 'Rakefile', 'ru', 'rdoc', 'rake', 'Capfile', 'jbuilder'],
rails: ['slim', 'haml', 'jbuilder', 'erb'],
images: ['png', 'jpg', 'jpeg', 'gif'],
static: ['pdf', 'txt', 'doc', 'docx', 'csv', 'xls', 'xlsx'],
markup: ['html', 'xml', 'xhtml'],
markdown: ['md', 'markdown', 'mdown'],
php: ['php', 'ini']
}
end
|
ruby
|
{
"resource": ""
}
|
q3461
|
Maximus.GitControl.diff
|
train
|
def diff(old_commit, new_commit)
stats = @g.diff(old_commit, new_commit).stats
lines = lines_added(new_commit.sha)
return if !lines.is_a?(Hash) || stats.blank?
lines.each do |filename, filelines|
stats[:files][filename][:lines_added] = filelines if stats[:files].key?(filename)
end
stats
end
|
ruby
|
{
"resource": ""
}
|
q3462
|
Maximus.GitControl.diff_initial
|
train
|
def diff_initial(commit_sha)
data = commit_information(commit_sha)
value = {
total: {
insertions: 0,
deletions: 0,
lines: 0,
files: data.length
},
files: {}
}
data.each do |d|
item = d.split("\t")
insertions = item[0].to_i
value[:total][:insertions] += insertions
value[:total][:lines] += insertions
value[:files][item[2]] = {
insertions: insertions,
deletions: 0,
lines_added: ["0..#{item[0]}"]
}
end
value
end
|
ruby
|
{
"resource": ""
}
|
q3463
|
Maximus.GitControl.match_associations
|
train
|
def match_associations(commit_sha, files)
new_lines = lines_added(commit_sha)
files = files.split("\n").group_by { |f| f.split('.').pop }
associations.each do |ext, related|
files[ext] ||= []
related.each do |child|
next if files[child].blank?
files[child].each do |c|
# hack to ignore deleted files
files[child] = new_lines[c].blank? ? [] : [ filename: File.join(@config.working_dir, c), changes: new_lines[c] ]
end
files[ext].concat(files[child])
files.delete(child)
end
end
files.delete_if { |k,v| v.blank? || k.nil? }
files
end
|
ruby
|
{
"resource": ""
}
|
q3464
|
Maximus.GitControl.lints_and_stats_nuclear
|
train
|
def lints_and_stats_nuclear(lint_opts)
{
lints: {
scsslint: Maximus::Scsslint.new(lint_opts).result,
jshint: Maximus::Jshint.new(lint_opts).result,
rubocop: Maximus::Rubocop.new(lint_opts).result,
railsbp: Maximus::Railsbp.new(lint_opts).result,
brakeman: Maximus::Brakeman.new(lint_opts).result
},
statistics: {
stylestat: Maximus::Stylestats.new({config: @config}).result,
phantomas: Maximus::Phantomas.new({config: @config}).result,
wraith: Maximus::Wraith.new({config: @config}).result
}
}
end
|
ruby
|
{
"resource": ""
}
|
q3465
|
Maximus.GitControl.lints_and_stats_switch
|
train
|
def lints_and_stats_switch(ext, lint_opts)
result = {
lints: {},
statistics: {}
}
lints = result[:lints]
statistics = result[:statistics]
case ext
when :scss
lints[:scsslint] = Maximus::Scsslint.new(lint_opts).result
# @todo stylestat is singular here because model name in Rails is singular.
# But adding a .classify when it's converted to a model chops off the end s on 'phantomas',
# which breaks the model name.
statistics[:stylestat] = Maximus::Stylestats.new({config: @config}).result
# @todo double pipe here is best way to say, if it's already run, don't run again, right?
statistics[:phantomas] ||= Maximus::Phantomas.new({config: @config}).result
statistics[:wraith] ||= Maximus::Wraith.new({config: @config}).result
when :js
lints[:jshint] = Maximus::Jshint.new(lint_opts).result
statistics[:phantomas] ||= Maximus::Phantomas.new({config: @config}).result
# @todo double pipe here is best way to say, if it's already run, don't run again, right?
statistics[:wraith] ||= Maximus::Wraith.new({config: @config}).result
when :ruby
lints[:rubocop] = Maximus::Rubocop.new(lint_opts).result
lints[:railsbp] ||= Maximus::Railsbp.new(lint_opts).result
lints[:brakeman] = Maximus::Brakeman.new(lint_opts).result
when :rails
lints[:railsbp] ||= Maximus::Railsbp.new(lint_opts).result
end
result
end
|
ruby
|
{
"resource": ""
}
|
q3466
|
Maximus.GitControl.destroy_branch
|
train
|
def destroy_branch(base_branch, sha)
if base_branch == "maximus_#{sha}"
@g.branch('master').checkout
else
@g.branch(base_branch).checkout
end
@g.branch("maximus_#{sha}").delete
end
|
ruby
|
{
"resource": ""
}
|
q3467
|
Maximus.GitControl.lint_file_paths
|
train
|
def lint_file_paths(files, ext)
file_list = files.map { |f| f[:filename] }.compact
# Lints accept files differently
ext == :ruby ? file_list.join(' ') : file_list.join(',')
end
|
ruby
|
{
"resource": ""
}
|
q3468
|
Ronin.Path.join
|
train
|
def join(*names)
joined_path = if root? then ''
else self.to_s
end
names.each do |name|
name = name.to_s
joined_path << @separator unless name.start_with?(@separator)
joined_path << name unless name == @separator
end
return self.class.new(joined_path,@separator)
end
|
ruby
|
{
"resource": ""
}
|
q3469
|
Sandal.Claims.validate_claims
|
train
|
def validate_claims(options = {})
validate_exp(options[:max_clock_skew]) unless options[:ignore_exp]
validate_nbf(options[:max_clock_skew]) unless options[:ignore_nbf]
validate_iss(options[:valid_iss])
validate_aud(options[:valid_aud])
self
end
|
ruby
|
{
"resource": ""
}
|
q3470
|
Sandal.Claims.validate_exp
|
train
|
def validate_exp(max_clock_skew = 0)
max_clock_skew ||= 0
exp = time_claim("exp")
if exp && exp <= (Time.now - max_clock_skew)
raise Sandal::ExpiredTokenError, "The token has expired."
end
end
|
ruby
|
{
"resource": ""
}
|
q3471
|
Sandal.Claims.validate_nbf
|
train
|
def validate_nbf(max_clock_skew = 0)
max_clock_skew ||= 0
nbf = time_claim("nbf")
if nbf && nbf > (Time.now + max_clock_skew)
raise Sandal::ClaimError, "The token is not valid yet."
end
end
|
ruby
|
{
"resource": ""
}
|
q3472
|
Sandal.Claims.validate_iss
|
train
|
def validate_iss(valid_iss)
return unless valid_iss && valid_iss.length > 0
unless valid_iss.include?(self["iss"])
raise Sandal::ClaimError, "The issuer is invalid."
end
end
|
ruby
|
{
"resource": ""
}
|
q3473
|
Sandal.Claims.validate_aud
|
train
|
def validate_aud(valid_aud)
return unless valid_aud && valid_aud.length > 0
aud = self["aud"]
aud = [aud] unless aud.is_a?(Array)
unless (aud & valid_aud).length > 0
raise Sandal::ClaimError, "The audence is invalid."
end
end
|
ruby
|
{
"resource": ""
}
|
q3474
|
Sandal.Claims.time_claim
|
train
|
def time_claim(name)
claim = self[name]
if claim
begin
Time.at(claim)
rescue
raise Sandal::ClaimError, "The \"#{name}\" claim is invalid."
end
end
end
|
ruby
|
{
"resource": ""
}
|
q3475
|
Xcunique.Sorter.sort
|
train
|
def sort
objects.values.each do |object|
SORTABLE_ITEMS.select { |key| object.has_key?(key) }.each do |key|
object[key].sort_by!(&method(:comparator))
end
end
project
end
|
ruby
|
{
"resource": ""
}
|
q3476
|
Xcunique.Sorter.comparator
|
train
|
def comparator uuid
prefix = objects[uuid][Keys::ISA] == Keys::PBXGroup ? ' ' : ''
prefix + Helpers.resolve_attributes(uuid, objects)
end
|
ruby
|
{
"resource": ""
}
|
q3477
|
CodeAnalyzer::CheckingVisitor.Default.check
|
train
|
def check(filename, content)
node = parse(filename, content)
node.file = filename
check_node(node)
end
|
ruby
|
{
"resource": ""
}
|
q3478
|
CodeAnalyzer::CheckingVisitor.Default.after_check
|
train
|
def after_check
@checkers.each do |checker|
after_check_callbacks = checker.class.get_callbacks(:after_check)
after_check_callbacks.each do |block|
checker.instance_exec &block
end
end
end
|
ruby
|
{
"resource": ""
}
|
q3479
|
CodeAnalyzer::CheckingVisitor.Default.parse
|
train
|
def parse(filename, content)
Sexp.from_array(Ripper::SexpBuilder.new(content).parse)
rescue Exception
raise AnalyzerException.new("#{filename} looks like it's not a valid Ruby file. Skipping...")
end
|
ruby
|
{
"resource": ""
}
|
q3480
|
CodeAnalyzer::CheckingVisitor.Default.check_node
|
train
|
def check_node(node)
checkers = @checks[node.sexp_type]
if checkers
checkers.each { |checker| checker.node_start(node) if checker.parse_file?(node.file) }
end
node.children.each { |child_node|
child_node.file = node.file
check_node(child_node)
}
if checkers
checkers.each { |checker| checker.node_end(node) if checker.parse_file?(node.file) }
end
end
|
ruby
|
{
"resource": ""
}
|
q3481
|
CEF.UDPSender.emit
|
train
|
def emit(event)
self.socksetup if self.sock.nil?
# process eventDefaults - we are expecting a hash here. These will
# override any values in the events passed to us. i know. brutal.
unless self.eventDefaults.nil?
self.eventDefaults.each do |k,v|
event.send("%s=" % k,v)
end
end
self.sock.send event.to_s, 0
end
|
ruby
|
{
"resource": ""
}
|
q3482
|
Librevox.Applications.bind_meta_app
|
train
|
def bind_meta_app args={}, &block
arg_string =
args.values_at(:key, :listen_to, :respond_on, :application).join(" ")
arg_string += "::#{args[:parameters]}" if args[:parameters]
application "bind_meta_app", arg_string, &block
end
|
ruby
|
{
"resource": ""
}
|
q3483
|
Librevox.Applications.bridge
|
train
|
def bridge *args, &block
variables = if args.last.is_a? Hash
# We need to sort the key/value pairs to facilitate testing.
# This can be removed once 1.8-compat is dropped.
key_value_pairs = args.pop.sort {|x,y| x.to_s <=> y.to_s}
key_value_pairs.map! {|k,v| "#{k}=#{v}"}
"{#{key_value_pairs.join(",")}}"
else
""
end
endpoints = if args.first.is_a? Array
args.map {|e| e.join(",")}.join("|")
else
args.join ","
end
application "bridge", variables + endpoints, &block
end
|
ruby
|
{
"resource": ""
}
|
q3484
|
Librevox.Applications.play_and_get_digits
|
train
|
def play_and_get_digits file, invalid_file, args={}, &block
min = args[:min] || 1
max = args[:max] || 2
tries = args[:tries] || 3
terminators = args[:terminators] || "#"
timeout = args[:timeout] || 5000
variable = args[:variable] || "read_digits_var"
regexp = args[:regexp] || "\\d+"
args = [min, max, tries, timeout, terminators, file, invalid_file,
variable, regexp].join " "
params = {:variable => variable}
application "play_and_get_digits", args, params, &block
end
|
ruby
|
{
"resource": ""
}
|
q3485
|
Librevox.Applications.record
|
train
|
def record path, params={}, &block
args = [path, params[:limit]].compact.join(" ")
application "record", args, &block
end
|
ruby
|
{
"resource": ""
}
|
q3486
|
Quantile.Estimator.query
|
train
|
def query(rank)
flush
current = @head
return unless current
mid_rank = (rank * @observations).floor
max_rank = mid_rank + (invariant(mid_rank, @observations) / 2).floor
rank = 0.0
while current.successor
rank += current.rank
if rank + current.successor.rank + current.successor.delta > max_rank
return current.value
end
current = current.successor
end
return current.value
end
|
ruby
|
{
"resource": ""
}
|
q3487
|
Maximus.Helper.file_list
|
train
|
def file_list(path, ext = 'scss', remover = '')
# Necessary so that directories aren't counted
collect_path = path.include?("*") ? path : "#{path}/**/*.#{ext}"
# Remove first slash from path if present. probably a better way to do this.
Dir[collect_path].collect { |file| file.gsub(remover, '').gsub(/^\/app\//, 'app/') if File.file?(file) }
end
|
ruby
|
{
"resource": ""
}
|
q3488
|
Maximus.Helper.truthy?
|
train
|
def truthy?(str)
return true if str == true || str =~ (/^(true|t|yes|y|1)$/i)
return false if str == false || str.blank? || str =~ (/^(false|f|no|n|0)$/i)
end
|
ruby
|
{
"resource": ""
}
|
q3489
|
Maximus.Helper.edit_yaml
|
train
|
def edit_yaml(yaml_location, &block)
d = YAML.load_file(yaml_location)
block.call(d)
File.open(yaml_location, 'w') {|f| f.write d.to_yaml }
end
|
ruby
|
{
"resource": ""
}
|
q3490
|
Maximus.Helper.path_exists?
|
train
|
def path_exists?(path = @path)
path = path.split(' ') if path.is_a?(String) && path.include?(' ')
if path.is_a?(Array)
path.each do |p|
unless File.exist?(p)
puts "#{p} does not exist"
return false
end
end
else
path = path.gsub('/**', '').gsub('/*', '').gsub(/\/\.*/, '') if path.include?('*')
if File.exist?(path)
return true
else
puts "#{path} does not exist"
return false
end
end
end
|
ruby
|
{
"resource": ""
}
|
q3491
|
Maximus.Helper.discover_path
|
train
|
def discover_path(root = @config.working_dir, folder = '', extension = '')
return @path unless @path.blank?
if is_middleman?
File.join(root, 'source', folder)
elsif is_rails?
File.join(root, 'app', 'assets', folder)
else
extension.blank? ? File.join(root) : File.join(root, '/**', "/*.#{extension}")
end
end
|
ruby
|
{
"resource": ""
}
|
q3492
|
Unsakini.CommentOwnerControllerConcern.ensure_comment
|
train
|
def ensure_comment
post_id = params[:post_id]
comment_id = params[:comment_id] || params[:id]
result = has_comment_access post_id, comment_id
@comment = result[:comment]
status = result[:status]
head status if status != :ok
end
|
ruby
|
{
"resource": ""
}
|
q3493
|
Unsakini.CommentOwnerControllerConcern.has_comment_access
|
train
|
def has_comment_access(post_id, comment_id)
comment = Unsakini::Comment.where(id: comment_id, post_id: post_id, user_id: @user.id).first
if comment.nil?
return {status: :forbidden, comment: comment}
else
return {status: :ok, comment: comment}
end
end
|
ruby
|
{
"resource": ""
}
|
q3494
|
RipperPlus.ScopeStack.with_closed_scope
|
train
|
def with_closed_scope(is_method = false)
old_in_method = @in_method
@in_method ||= is_method
@stack.push(SCOPE_BLOCKER_9000)
@stack.push(Set.new)
yield
ensure
@stack.pop # pop closed scope
@stack.pop # pop scope blocker
@in_method = old_in_method
end
|
ruby
|
{
"resource": ""
}
|
q3495
|
RipperPlus.ScopeStack.has_variable?
|
train
|
def has_variable?(var)
@stack.reverse_each do |scope|
if SCOPE_BLOCKER_9000 == scope
return false
elsif scope.include?(var)
return true
end
end
end
|
ruby
|
{
"resource": ""
}
|
q3496
|
Unsakini.CommentsController.create
|
train
|
def create
@comment = Comment.new(params.permit(:content))
@comment.user = @user
@comment.post = @post
if @comment.save
render json: @comment
else
render json: @comment.errors, status: 422
end
end
|
ruby
|
{
"resource": ""
}
|
q3497
|
Unsakini.CommentsController.update
|
train
|
def update
if @comment.update(params.permit(:content))
render json: @comment
else
render json: @comment.errors, status: 422
end
end
|
ruby
|
{
"resource": ""
}
|
q3498
|
Unsakini.BoardOwnerControllerConcern.ensure_board
|
train
|
def ensure_board
board_id = params[:board_id] || params[:id]
result = has_board_access(board_id)
@board = result[:board]
@user_board = result[:user_board]
head result[:status] if result[:status] != :ok
end
|
ruby
|
{
"resource": ""
}
|
q3499
|
Unsakini.BoardOwnerControllerConcern.has_board_access
|
train
|
def has_board_access(board_id)
board = nil
if !board_id.nil?
board = Unsakini::Board.find_by_id(board_id)
else
return {status: :bad_request}
end
if (board)
user_board = Unsakini::UserBoard.where(user_id: @user.id, board_id: board_id).first
return {status: :forbidden } if user_board.nil?
return {status: :ok, board: board, user_board: user_board}
else
return {status: :not_found}
end
end
|
ruby
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.