code
stringlengths 12
2.05k
| label_name
stringclasses 5
values | label
int64 0
4
|
---|---|---|
it 'is possible to return hash errors in jsonapi format' do
get '/'
expect(['{"error":"rain!","detail":"missing widget"}',
'{"detail":"missing widget","error":"rain!"}']).to include(last_response.body)
end | Base | 1 |
def test_should_sanitize_with_trailing_space
raw = "display:block; "
expected = "display: block;"
assert_equal expected, sanitize_css(raw)
end | Base | 1 |
it "raises an error when trying to download a local file" do
expect { subject.download('/etc/passwd') }.to raise_error(CarrierWave::DownloadError)
end | Base | 1 |
def add_location_constraint_rule(
session, resource, rule, score, force=false, autocorrect=true
)
cmd = [PCS, "constraint", "location", resource, "rule"]
if score != ''
if is_score(score.upcase)
cmd << "score=#{score.upcase}"
else
cmd << "score-attribute=#{score}"
end
end
cmd.concat(rule.shellsplit())
cmd << '--force' if force
cmd << '--autocorrect' if autocorrect
stdout, stderr, retval = run_cmd(session, *cmd)
return retval, stderr.join(' ')
end | Compound | 4 |
it 'should return the right response' do
email_token.update!(created_at: 999.years.ago)
get "/session/email-login/#{email_token.token}"
expect(response.status).to eq(200)
expect(CGI.unescapeHTML(response.body)).to match(
I18n.t('email_login.invalid_token')
)
end | Class | 2 |
def socket_for(mode)
if options[:retain_socket]
@socket ||= cluster.socket_for(mode)
else
cluster.socket_for(mode)
end
end | Class | 2 |
def verify_cert_key_pair(cert, key)
errors = []
cert_modulus = nil
key_modulus = nil
stdout, stderr, retval = run_cmd_options(
PCSAuth.getSuperuserSession(),
{
'stdin' => cert,
},
'/usr/bin/openssl', 'x509', '-modulus', '-noout'
)
if retval != 0
errors << "Invalid certificate: #{stderr.join}"
else
cert_modulus = stdout.join.strip
end
stdout, stderr, retval = run_cmd_options(
PCSAuth.getSuperuserSession(),
{
'stdin' => key,
},
'/usr/bin/openssl', 'rsa', '-modulus', '-noout'
)
if retval != 0
errors << "Invalid key: #{stderr.join}"
else
key_modulus = stdout.join.strip
end
if errors.empty? and cert_modulus and key_modulus
if cert_modulus != key_modulus
errors << 'Certificate does not match the key'
end
end
return errors
end | Compound | 4 |
it "does not change the original session's options" do
original_options = options.dup
session.with(new_options) do |new_session|
session.options.should eql original_options
end
end | Class | 2 |
it 'returns a list of all private messages that has been archived' do
UserArchivedMessage.archive!(user_2.id, private_message)
GroupArchivedMessage.archive!(user_2.id, group_message)
topics = TopicQuery.new(nil).list_private_messages_all_archive(user_2).topics
expect(topics).to contain_exactly(private_message, group_message)
end | Class | 2 |
def resource_metadata(params, request, session)
if not allowed_for_local_cluster(session, Permissions::READ)
return 403, 'Permission denied'
end
return 200 if not params[:resourcename] or params[:resourcename] == ""
resource_name = params[:resourcename][params[:resourcename].rindex(':')+1..-1]
class_provider = params[:resourcename][0,params[:resourcename].rindex(':')]
@resource = ResourceAgent.new(params[:resourcename])
if class_provider == "ocf:heartbeat"
@resource.required_options, @resource.optional_options, @resource.info = getResourceMetadata(session, HEARTBEAT_AGENTS_DIR + resource_name)
elsif class_provider == "ocf:pacemaker"
@resource.required_options, @resource.optional_options, @resource.info = getResourceMetadata(session, PACEMAKER_AGENTS_DIR + resource_name)
elsif class_provider == 'nagios'
@resource.required_options, @resource.optional_options, @resource.info = getResourceMetadata(session, NAGIOS_METADATA_DIR + resource_name + '.xml')
end
@new_resource = params[:new]
@resources, @groups = getResourcesGroups(session)
erb :resourceagentform
end | Compound | 4 |
def call(exception, locale, key, options)
if exception.is_a?(MissingTranslation)
options[:rescue_format] == :html ? exception.html_message : exception.message
elsif exception.is_a?(Exception)
raise exception
else
throw :exception, exception
end
end | Base | 1 |
it "should unstage user" do
staged_user = Fabricate(:staged, email: '[email protected]', active: true, username: 'staged1', name: 'Stage Name')
invite = Fabricate(:invite, email: '[email protected]')
user = InviteRedeemer.create_user_from_invite(invite: invite, email: invite.email, username: 'walter', name: 'Walter White')
expect(user.id).to eq(staged_user.id)
expect(user.username).to eq('walter')
expect(user.name).to eq('Walter White')
expect(user.staged).to eq(false)
expect(user.email).to eq('[email protected]')
expect(user.approved).to eq(true)
end | Class | 2 |
def first
session.simple_query(operation)
end | Class | 2 |
it 'fails when local logins via email is disabled' do
SiteSetting.enable_local_logins_via_email = false
get "/session/email-login/#{email_token.token}"
expect(response.status).to eq(404)
end | Class | 2 |
def get_cluster_name()
if ISRHEL6
stdout, stderror, retval = run_cmd(
PCSAuth.getSuperuserSession, COROSYNC_CMAPCTL, "cluster"
)
if retval == 0
stdout.each { |line|
match = /^cluster\.name=(.*)$/.match(line)
return match[1] if match
}
end
begin
cluster_conf = Cfgsync::ClusterConf.from_file().text()
rescue
return ''
end
conf_dom = REXML::Document.new(cluster_conf)
if conf_dom.root and conf_dom.root.name == 'cluster'
return conf_dom.root.attributes['name']
end
return ''
end
stdout, stderror, retval = run_cmd(
PCSAuth.getSuperuserSession, COROSYNC_CMAPCTL, "totem.cluster_name"
)
if retval != 0 and not ISRHEL6
# Cluster probably isn't running, try to get cluster name from
# corosync.conf
begin
corosync_conf = CorosyncConf::parse_string(
Cfgsync::CorosyncConf.from_file().text()
)
# mimic corosync behavior - the last cluster_name found is used
cluster_name = nil
corosync_conf.sections('totem').each { |totem|
totem.attributes('cluster_name').each { |attrib|
cluster_name = attrib[1]
}
}
return cluster_name if cluster_name
rescue
return ''
end
return ""
else
return stdout.join().gsub(/.*= /,"").strip
end
end | Compound | 4 |
def send_local_configs_to_nodes(
session, nodes, force=false, clear_local_permissions=false
)
configs = Cfgsync::get_configs_local(true)
if clear_local_permissions
pcs_config = PCSConfig.new(configs[Cfgsync::PcsdSettings.name].text())
pcs_config.permissions_local = Permissions::PermissionsSet.new([])
configs[Cfgsync::PcsdSettings.name].text = pcs_config.text()
end
publisher = Cfgsync::ConfigPublisher.new(
session, configs.values(), nodes, $cluster_name
)
return publisher.send(force)
end | Compound | 4 |
def test_verify_security_policy_checksum_missing
skip 'openssl is missing' unless defined?(OpenSSL::SSL)
@spec.cert_chain = [PUBLIC_CERT.to_pem]
@spec.signing_key = PRIVATE_KEY
build = Gem::Package.new @gem
build.spec = @spec
build.setup_signer
FileUtils.mkdir 'lib'
FileUtils.touch 'lib/code.rb'
File.open @gem, 'wb' do |gem_io|
Gem::Package::TarWriter.new gem_io do |gem|
build.add_metadata gem
build.add_contents gem
# write bogus data.tar.gz to foil signature
bogus_data = Gem.gzip 'hello'
gem.add_file_simple 'data.tar.gz', 0444, bogus_data.length do |io|
io.write bogus_data
end
# pre rubygems 2.0 gems do not add checksums
end
end
Gem::Security.trust_dir.trust_cert PUBLIC_CERT
package = Gem::Package.new @gem
package.security_policy = Gem::Security::HighSecurity
e = assert_raises Gem::Security::Exception do
package.verify
end
assert_equal 'invalid signature', e.message
refute package.instance_variable_get(:@spec), '@spec must not be loaded'
assert_empty package.instance_variable_get(:@files), '@files must empty'
end | Base | 1 |
def self.set_statistics_groups(group_ids)
yaml = Research.get_config_yaml
yaml = Hash.new if yaml.nil?
if group_ids.nil?
unless yaml[:statistics].nil?
yaml[:statistics].delete :groups
end
else
if yaml[:statistics].nil?
yaml[:statistics] = Hash.new
end
yaml[:statistics][:groups] = group_ids.join('|')
end
Research.save_config_yaml yaml
return ary
end | Base | 1 |
def index
respond_to do |format|
format.json do
dir = params[:dir]
dir = 'desc' unless SORT_DIRECTIONS.include?(dir.try(:upcase))
@occurrences = @bug.occurrences.order("occurred_at #{dir}").limit(50)
last = params[:last].present? ? @bug.occurrences.find_by_number(params[:last]) : nil
@occurrences = @occurrences.where(infinite_scroll_clause('occurred_at', dir, last, 'occurrences.number')) if last
render json: decorate(@occurrences)
end
format.atom { @occurrences = @bug.occurrences.order('occurred_at DESC').limit(100) } # index.atom.builder
end
end | Base | 1 |
def login
Log.add_info(request, '') # Not to show passwords.
user = User.authenticate(params[:user])
if user.nil?
flash[:notice] = '<span class=\'font_msg_bold\'>'+t('user.u_name')+'</span>'+t('msg.or')+'<span class=\'font_msg_bold\'>'+t('password.name')+'</span>'+t('msg.is_invalid')
if params[:fwd_controller].nil? or params[:fwd_controller].empty?
redirect_to(:controller => 'login', :action => 'index')
else
url_h = {:controller => 'login', :action => 'index', :fwd_controller => params[:fwd_controller], :fwd_action => params[:fwd_action]}
unless params[:fwd_params].nil?
params[:fwd_params].each do |key, val|
url_h["fwd_params[#{key}]"] = val
end
end
redirect_to(url_h)
end
else
@login_user = LoginHelper.on_login(user, session)
if params[:fwd_controller].nil? or params[:fwd_controller].empty?
prms = ApplicationHelper.get_fwd_params(params)
prms.delete('user')
prms[:controller] = 'desktop'
prms[:action] = 'show'
redirect_to(prms)
else
url_h = {:controller => params[:fwd_controller], :action => params[:fwd_action]}
url_h = url_h.update(params[:fwd_params]) unless params[:fwd_params].nil?
redirect_to(url_h)
end
end
end | Base | 1 |
it "re-enabled functionality if whitelisted" do
Rack::MiniProfiler.config.authorization_mode = :whitelist
expect(Rack::MiniProfiler).to receive(:request_authorized?) { true }.twice
get '/html?pp=enable'
last_response.body.should include('/mini-profiler-resources/includes.js')
end | Class | 2 |
it "returns the group permissions for everyone group too" do
category.set_permissions(everyone: :readonly)
category.save!
json = described_class.new(category, scope: Guardian.new(admin), root: false).as_json
expect(json[:group_permissions]).to eq([
{ permission_type: CategoryGroup.permission_types[:readonly], group_name: 'everyone' },
])
end | Base | 1 |
it 'fails when local logins via email is disabled' do
SiteSetting.enable_local_logins_via_email = false
get "/session/email-login/#{email_token.token}"
expect(response.status).to eq(404)
end | Class | 2 |
def cluster_destroy(params, request, session)
if not allowed_for_local_cluster(session, Permissions::FULL)
return 403, 'Permission denied'
end
out, errout, retval = run_cmd(session, PCS, "cluster", "destroy")
if retval == 0
return [200, "Successfully destroyed cluster"]
else
return [400, "Error destroying cluster:\n#{out}\n#{errout}\n#{retval}\n"]
end
end | Compound | 4 |
def current_user
User.except_hidden.find_by_login(self.user)
end | Class | 2 |
def new
Log.add_info(request, params.inspect)
mail_account_id = params[:mail_account_id]
if mail_account_id.nil? or mail_account_id.empty?
account_xtype = params[:mail_account_xtype]
@mail_account = MailAccount.get_default_for(@login_user.id, account_xtype)
else
@mail_account = MailAccount.find(mail_account_id)
if @mail_account.user_id != @login_user.id
flash[:notice] = 'ERROR:' + t('msg.need_to_be_owner')
render(:partial => 'common/flash_notice', :layout => false)
return
end
end
if $thetis_config[:menu]['disp_user_list'] == '1'
unless params[:to_user_ids].blank?
@email = Email.new
to_addrs = []
@user_obj_cache ||= {}
params[:to_user_ids].each do |user_id|
user = User.find_with_cache(user_id, @user_obj_cache)
user_emails = user.get_emails_by_type(nil)
user_emails.each do |user_email|
disp = EmailsHelper.format_address_exp(user.get_name, user_email, false)
entry_val = "#{disp}" # "#{disp}#{Email::ADDR_ORDER_SEPARATOR}#{user.get_xorder(@group_id)}"
to_addrs << entry_val
end
end
@email.to_addresses = to_addrs.join(Email::ADDRESS_SEPARATOR)
end
end
render(:action => 'edit', :layout => (!request.xhr?))
end | Base | 1 |
def testLoginByToken
users = []
users << {"username" => "user1", "token" => "token1"}
users << {"username" => "user2", "token" => "token2"}
users << {"username" => SUPERUSER, "token" => "tokenS"}
password_file = File.open($user_pass_file, File::RDWR|File::CREAT)
password_file.truncate(0)
password_file.rewind
password_file.write(JSON.pretty_generate(users))
password_file.close()
session = {}
cookies = {}
result = PCSAuth.loginByToken(session, cookies)
assert_equal(false, result)
assert_equal({}, session)
session = {}
cookies = {'token' => 'tokenX'}
result = PCSAuth.loginByToken(session, cookies)
assert_equal(false, result)
assert_equal({}, session)
session = {}
cookies = {'token' => 'token1'}
result = PCSAuth.loginByToken(session, cookies)
assert_equal(true, result)
assert_equal(
{:username => 'user1', :usergroups => ['group1', 'haclient']},
session
)
session = {}
cookies = {
'token' => 'token1',
'CIB_user' => 'userX',
'CIB_user_groups' => PCSAuth.cookieUserEncode('groupX')
}
result = PCSAuth.loginByToken(session, cookies)
assert_equal(true, result)
assert_equal(
{:username => 'user1', :usergroups => ['group1', 'haclient']},
session
)
session = {}
cookies = {'token' => 'tokenS'}
result = PCSAuth.loginByToken(session, cookies)
assert_equal(true, result)
assert_equal(
{:username => SUPERUSER, :usergroups => []},
session
)
session = {}
cookies = {
'token' => 'tokenS',
'CIB_user' => 'userX',
'CIB_user_groups' => PCSAuth.cookieUserEncode('groupX')
}
result = PCSAuth.loginByToken(session, cookies)
assert_equal(true, result)
assert_equal(
{:username => 'userX', :usergroups => ['groupX']},
session
)
end | Compound | 4 |
def self.get_for(user_id)
SqlHelper.validate_token([user_id])
return Research.where("user_id=#{user_id}").first
end | Base | 1 |
it 'should not choke on valueless attributes' do
@s.fragment('foo <a href>foo</a> bar')
.must_equal 'foo <a href="" rel="nofollow">foo</a> bar'
end | Class | 2 |
def cluster_disable(params, request, session)
if params[:name]
code, response = send_request_with_token(
session, params[:name], 'cluster_disable', true
)
else
if not allowed_for_local_cluster(session, Permissions::WRITE)
return 403, 'Permission denied'
end
success = disable_cluster(session)
if not success
return JSON.generate({"error" => "true"})
end
return "Cluster Disabled"
end
end | Compound | 4 |
it "has an empty list of primaries" do
cluster.primaries.should be_empty
end | Class | 2 |
def remote_add_node(params, request, session, all=false)
if not allowed_for_local_cluster(session, Permissions::FULL)
return 403, 'Permission denied'
end
auto_start = false
if params[:auto_start] and params[:auto_start] == "1"
auto_start = true
end
if params[:new_nodename] != nil
node = params[:new_nodename]
if params[:new_ring1addr] != nil
node += ',' + params[:new_ring1addr]
end
retval, output = add_node(session, node, all, auto_start)
end
if retval == 0
return [200, JSON.generate([retval, get_corosync_conf()])]
end
return [400,output]
end | Compound | 4 |
def initialize(name, value = nil, charset = 'utf-8')
case
when name =~ /:/ # Field.new("field-name: field data")
@charset = value.blank? ? charset : value
@name, @value = split(name)
when name !~ /:/ && value.blank? # Field.new("field-name")
@name = name
@value = nil
@charset = charset
else # Field.new("field-name", "value")
@name = name
@value = value
@charset = charset
end | Base | 1 |
def process_preflight(env)
result = Result.preflight(env)
resource, error = match_resource(env)
unless resource
result.miss(error)
return {}
end
return resource.process_preflight(env, result)
end | Base | 1 |
def user_with_viewer_rights_should_fail_to_edit_a_domain
setup_users
get :edit, {:id => Domain.first.id}
assert @response.status == '403 Forbidden'
end | Class | 2 |
def create_workflow
Log.add_info(request, params.inspect)
@tmpl_folder, @tmpl_workflows_folder = TemplatesHelper.get_tmpl_subfolder(TemplatesHelper::TMPL_WORKFLOWS)
@group_id = params[:group_id]
if @group_id.nil? or @group_id.empty?
@group_id = '0' # '0' for ROOT
elsif @group_id == '0'
;
else
group = nil
begin
group = Group.find(@group_id)
rescue
end
if group.nil?
render(:text => 'ERROR:' + t('msg.already_deleted', :name => Group.model_name.human))
return
end
end
unless @tmpl_workflows_folder.nil?
item = Item.new_workflow(@tmpl_workflows_folder.id)
item.title = t('workflow.new')
item.user_id = 0
item.save!
workflow = Workflow.new
workflow.item_id = item.id
workflow.user_id = 0
workflow.status = Workflow::STATUS_NOT_APPLIED
if @group_id == '0'
workflow.groups = nil
else
workflow.groups = '|' + @group_id + '|'
end
workflow.save!
else
Log.add_error(request, nil, '/'+TemplatesHelper::TMPL_ROOT+'/'+TemplatesHelper::TMPL_WORKFLOWS+' NOT found!')
end
render(:partial => 'groups/ajax_group_workflows', :layout => false)
end | Base | 1 |
it "yields all documents in the cursor" do
cursor = Moped::Cursor.allocate
cursor.stub(:to_enum).and_return([1, 2].to_enum)
Moped::Cursor.stub(new: cursor)
query.to_a.should eq [1, 2]
end | Class | 2 |
def piped_category_name(category_id)
return "-" unless category_id
category = Category.find_by(id: category_id)
return "#{category_id}" unless category
categories = [category.name]
while category.parent_category_id && category = category.parent_category
categories << category.name
end
categories.reverse.join("|")
end | Class | 2 |
def properly_encode(fragment, options)
fragment.xml? ? fragment.to_xml(options) : fragment.to_html(options)
end | Base | 1 |
it 'sets file extension based on content-type if missing' do
expect(subject.original_filename).to eq "test.jpeg"
end | Base | 1 |
it "adds the server to the list" do
cluster.sync_server server
cluster.servers.should include server
end | Class | 2 |
it "returns an enumerator" do
cursor = mock(Moped::Cursor)
Moped::Cursor.stub(new: cursor)
query.each.should be_a Enumerator
end | Class | 2 |
def resource_cleanup(params, request, session)
if not allowed_for_local_cluster(session, Permissions::WRITE)
return 403, 'Permission denied'
end
stdout, stderr, retval = run_cmd(
session, PCS, "resource", "cleanup", params[:resource]
)
if retval == 0
return JSON.generate({"success" => "true"})
else
return JSON.generate({"error" => "true", "stdout" => stdout, "stderror" => stderr})
end
end | Compound | 4 |
def private_messages_for(user, type)
options = @options
options.reverse_merge!(per_page: per_page_setting)
result = Topic.includes(:allowed_users)
result = result.includes(:tags) if SiteSetting.tagging_enabled
if type == :group
result = result.joins(
"INNER JOIN topic_allowed_groups tag ON tag.topic_id = topics.id AND tag.group_id IN (SELECT id FROM groups WHERE LOWER(name) = '#{PG::Connection.escape_string(@options[:group_name].downcase)}')"
)
unless user.admin?
result = result.joins("INNER JOIN group_users gu ON gu.group_id = tag.group_id AND gu.user_id = #{user.id.to_i}")
end
elsif type == :user
result = result.where("topics.id IN (SELECT topic_id FROM topic_allowed_users WHERE user_id = #{user.id.to_i})")
elsif type == :all
group_ids = group_with_messages_ids(user)
result = result.joins(<<~SQL)
LEFT JOIN topic_allowed_users tau
ON tau.topic_id = topics.id
AND tau.user_id = #{user.id.to_i}
LEFT JOIN topic_allowed_groups tag
ON tag.topic_id = topics.id
#{group_ids.present? ? "AND tag.group_id IN (#{group_ids.join(",")})" : ""}
SQL
result = result
.where("tag.topic_id IS NOT NULL OR tau.topic_id IS NOT NULL")
.distinct
end
result = result.joins("LEFT OUTER JOIN topic_users AS tu ON (topics.id = tu.topic_id AND tu.user_id = #{user.id.to_i})")
.order("topics.bumped_at DESC")
.private_messages
result = result.limit(options[:per_page]) unless options[:limit] == false
result = result.visible if options[:visible] || @user.nil? || @user.regular?
if options[:page]
offset = options[:page].to_i * options[:per_page]
result = result.offset(offset) if offset > 0
end
result
end | Class | 2 |
def create_event(comment)
Event.create! bug_id: comment.bug_id, kind: 'comment', data: {'comment_id' => comment.id}, user_id: comment.user_id
end | Base | 1 |
def setup
@namespace = "theplaylist"
@store = Redis::Store.new :namespace => @namespace, :marshalling => false # TODO remove mashalling option
@client = @store.instance_variable_get(:@client)
@rabbit = "bunny"
@default_store = Redis::Store.new
@other_namespace = 'other'
@other_store = Redis::Store.new :namespace => @other_namespace
end | Base | 1 |
def stub_jwks
stub_request(:get, 'https://samples.auth0.com/.well-known/jwks.json')
.to_return(
headers: { 'Content-Type' => 'application/json' },
body: jwks.to_json,
status: 200
)
end | Base | 1 |
def check_auth(params, request, session)
if params.include?("check_auth_only")
return [200, "{\"success\":true}"]
end
return JSON.generate({
'success' => true,
'node_list' => get_token_node_list,
})
end | Compound | 4 |
it "does not drop other indexes" do
indexes[age: -1].should_not be_nil
end | Class | 2 |
it "should return :file if the request key is fully qualified" do
@request.expects(:key).returns File.expand_path('/foo')
@object.select_terminus(@request).should == :file
end | Class | 2 |
it "returns a random slave connection" do
secondaries = [server]
cluster.stub(secondaries: secondaries)
secondaries.should_receive(:sample).and_return(server)
cluster.socket_for(:read).should eq socket
end | Class | 2 |
def get_group_users
Log.add_info(request, params.inspect)
@group_id = nil
if !params[:thetisBoxSelKeeper].nil?
@group_id = params[:thetisBoxSelKeeper].split(':').last
elsif !params[:group_id].nil? and !params[:group_id].empty?
@group_id = params[:group_id]
end
@users = Group.get_users @group_id
render(:partial => 'ajax_select_users', :layout => false)
end | Base | 1 |
def test_update_invalid
AuthSourceLdap.any_instance.stubs(:valid?).returns(false)
put :update, {:id => AuthSourceLdap.first, :auth_source_ldap => {:name => AuthSourceLdap.first.name} }, set_session_user
assert_template 'edit'
end | Class | 2 |
def build_query(path, details)
query = @pattern.dup
query.gsub!(/\:prefix(\/)?/, path.prefix.empty? ? "" : "#{path.prefix}\\1") # prefix can be empty...
query.gsub!(/\:action/, path.partial? ? "_#{path.name}" : path.name)
details.each do |ext, variants|
query.gsub!(/\:#{ext}/, "{#{variants.compact.uniq.join(',')}}")
end
File.expand_path(query, @path)
end | Class | 2 |
it "returns default error message for spoofed media type" do
build_validator
file = File.new(fixture_file("5k.png"), "rb")
@dummy.avatar.assign(file)
detector = mock("detector", :spoofed? => true)
Paperclip::MediaTypeSpoofDetector.stubs(:using).returns(detector)
@validator.validate(@dummy)
assert_equal "has an extension that does not match its contents", @dummy.errors[:avatar].first
end | Base | 1 |
def initialize(file, name)
@file = file
@name = name
end | Base | 1 |
def test_parse_bigger
xml = <<-XML.strip_heredoc
<request id="1027" creator="Iggy">
<action type="submit">
<source project="home:Iggy" package="TestPack" rev="1"/>
<target project="kde4" package="mypackage"/>
<options>
<sourceupdate>cleanup</sourceupdate>
</options>
<acceptinfo rev="1" srcmd5="806a6e27ed7915d1bb8d8a989404fd5a" osrcmd5="d41d8cd98f00b204e9800998ecf8427e"/>
</action>
<priority>critical</priority>
<state name="review" who="Iggy" when="2012-11-07T21:13:12">
<comment>No comment</comment>
</state>
<review state="new" when="2017-09-01T09:11:11" by_user="adrian"/>
<review state="new" when="2017-09-01T09:11:11" by_group="test_group"/>
<review state="accepted" when="2012-11-07T21:13:12" who="tom" by_user="tom">
<comment>review1</comment>
</review>
<review state="new" when="2012-11-07T21:13:13" who="tom" by_user="tom">
<comment>please accept</comment>
</review>
<description>Left blank</description>
</request>
XML
req = BsRequest.new_from_xml(xml)
req.save!
# number got increased by one
assert_equal 1027, req.number
newxml = req.render_xml
assert_equal xml, newxml
wi = req.webui_infos(diffs: false)
# iggy is *not* target maintainer
assert_equal false, wi['is_target_maintainer']
assert_equal wi['actions'][0], type: :submit,
sprj: 'home:Iggy',
spkg: 'TestPack',
srev: '1',
tprj: 'kde4',
tpkg: 'mypackage',
name: 'Submit TestPack'
end | Class | 2 |
it "generates the correct messages for a secure topic" do
Jobs.run_immediately!
UserActionManager.enable
admin = Fabricate(:admin)
cat = Fabricate(:category)
cat.set_permissions(admins: :full)
cat.save
created_post = nil
messages = MessageBus.track_publish do
created_post = PostCreator.new(admin, basic_topic_params.merge(category: cat.id)).create
_reply = PostCreator.new(admin, raw: "this is my test reply 123 testing", topic_id: created_post.topic_id).create
end
messages.filter! { |m| m.channel != "/distributed_hash" }
channels = messages.map { |m| m.channel }.sort
# 2 for topic, one to notify of new topic another for tracking state
expect(channels).to eq(
[
"/new",
"/u/#{admin.username}",
"/u/#{admin.username}",
"/unread/#{admin.id}",
"/unread/#{admin.id}",
"/latest",
"/latest",
"/topic/#{created_post.topic_id}",
"/topic/#{created_post.topic_id}",
"/user",
"/user",
"/user"
].sort
)
admin_ids = [Group[:admins].id]
expect(messages.any? { |m| m.group_ids != admin_ids && m.user_ids != [admin.id] }).to eq(false)
end | Class | 2 |
it 'returns success' do
get "/session/email-login/#{email_token.token}"
expect(response).to redirect_to("/")
end | Class | 2 |
def edit
Log.add_info(request, params.inspect)
date_s = params[:date]
if date_s.nil? or date_s.empty?
@date = Date.today
date_s = @date.strftime(Schedule::SYS_DATE_FORM)
else
@date = Date.parse(date_s)
end
if params[:user_id].nil?
@selected_user = @login_user
else
@selected_user = User.find(params[:user_id])
end
@timecard = Timecard.get_for(@selected_user.id, date_s)
if @selected_user == @login_user
@schedules = Schedule.get_user_day(@login_user, @date)
end
if !params[:display].nil? and params[:display].split('_').first == 'group'
@group_id = params[:display].split('_').last
end
end | Base | 1 |
def insert(documents)
documents = [documents] unless documents.is_a? Array
insert = Protocol::Insert.new(database.name, name, documents)
database.session.with(consistency: :strong) do |session|
session.execute insert
end
end | Class | 2 |
it "does not activate user invited via links" do
invite = Fabricate(:invite, email: '[email protected]', emailed_status: Invite.emailed_status_types[:not_required])
user = InviteRedeemer.create_user_from_invite(invite: invite, email: invite.email, username: 'walter', name: 'Walter White')
expect(user.username).to eq('walter')
expect(user.name).to eq('Walter White')
expect(user.email).to eq('[email protected]')
expect(user.approved).to eq(true)
expect(user.active).to eq(false)
end | Class | 2 |
it "returns the socket" do
cluster.stub(:sync) { cluster.servers << server }
cluster.socket_for(:write).should eq socket
end | Class | 2 |
def self.get_tmpl_folder
tmpl_folder = Folder.where("folders.name='#{TMPL_ROOT}'").first
if tmpl_folder.nil?
ary = self.setup_tmpl_folder
unless ary.nil? or ary.empty?
tmpl_folder = ary[0]
tmpl_system_folder = ary[1]
tmpl_workflows_folder = ary[2]
tmpl_local_folder = ary[3]
tmpl_q_folder = ary[4]
end
else
folders = Folder.where("parent_id=#{tmpl_folder.id}").to_a
unless folders.nil?
folders.each do |child|
case child.name
when TMPL_SYSTEM
tmpl_system_folder = child
when TMPL_WORKFLOWS
tmpl_workflows_folder = child
when TMPL_LOCAL
tmpl_local_folder = child
when TMPL_RESEARCH
tmpl_q_folder = child
end
end
end
end | Base | 1 |
it 'avoids xss attacks' do
h = last_response.headers['X-MiniProfiler-Ids']
id = ::JSON.parse(h)[0]
get "/mini-profiler-resources/results?id=%22%3E%3Cqss%3E"
last_response.should_not be_ok
last_response.body.should_not =~ /<qss>/
last_response.body.should =~ /<qss>/
end | Class | 2 |
def Sendmail.call(path, arguments, destinations, mail)
IO.popen("#{path} #{arguments} #{destinations}", "w+") do |io|
io.puts mail.encoded.to_lf
io.flush
end
end | Class | 2 |
def set_sync_options(params, request, session)
if not allowed_for_local_cluster(session, Permissions::FULL)
return 403, 'Permission denied'
end
options = [
'sync_thread_pause', 'sync_thread_resume',
'sync_thread_disable', 'sync_thread_enable',
]
if params.keys.count { |key| options.include?(key) } != 1
return [400, 'Exactly one option has to be specified']
end
if params['sync_thread_disable']
if Cfgsync::ConfigSyncControl.sync_thread_disable($semaphore_cfgsync)
return 'sync thread disabled'
else
return [400, 'sync thread disable error']
end
end
if params['sync_thread_enable']
if Cfgsync::ConfigSyncControl.sync_thread_enable()
return 'sync thread enabled'
else
return [400, 'sync thread enable error']
end
end
if params['sync_thread_resume']
if Cfgsync::ConfigSyncControl.sync_thread_resume()
return 'sync thread resumed'
else
return [400, 'sync thread resume error']
end
end
if params['sync_thread_pause']
if Cfgsync::ConfigSyncControl.sync_thread_pause(
$semaphore_cfgsync, params['sync_thread_pause']
)
return 'sync thread paused'
else
return [400, 'sync thread pause error']
end
end
return [400, 'Exactly one option has to be specified']
end | Compound | 4 |
it "yields all indexes on the collection" do
indexes.to_a.should eq \
session[:"system.indexes"].find(ns: "moped_test.users").to_a
end | Class | 2 |
def test_update_invalid
Medium.any_instance.stubs(:valid?).returns(false)
put :update, {:id => Medium.first, :medium => {:name => nil}}, set_session_user
assert_template 'edit'
end | Class | 2 |
def ajax_delete_items
Log.add_info(request, params.inspect)
folder_id = params[:id]
unless params[:check_item].blank?
is_admin = @login_user.admin?(User::AUTH_ITEM)
count = 0
params[:check_item].each do |item_id, value|
if value == '1'
begin
item = Item.find(item_id)
next if !is_admin and item.user_id != @login_user.id
item.destroy
rescue => evar
Log.add_error(request, evar)
end
count += 1
end
end
flash[:notice] = t('item.deleted', :count => count)
end
get_items
end | Base | 1 |
def self.parse_csv_row(row, book, idxs, user)
imp_id = (idxs[0].nil? or row[idxs[0]].nil?)?(nil):(row[idxs[0]].strip)
unless imp_id.nil? or imp_id.empty?
org_address = Address.find_by_id(imp_id)
end
if org_address.nil?
address = Address.new
else
address = org_address
end
address.id = imp_id
attr_names = [
:name,
:name_ruby,
:nickname,
:screenname,
:email1,
:email2,
:email3,
:postalcode,
:address,
:tel1_note,
:tel1,
:tel2_note,
:tel2,
:tel3_note,
:tel3,
:fax,
:url,
:organization,
:title,
:memo,
:xorder,
:groups,
:teams
]
attr_names.each_with_index do |attr_name, idx|
row_idx = idxs[idx+1]
break if row_idx.nil?
val = (row[row_idx].nil?)?(nil):(row[row_idx].strip)
address.send(attr_name.to_s + '=', val)
end
if (address.groups == Address::EXP_IMP_FOR_ALL) \
or (book == Address::BOOK_COMMON and address.groups.blank? and address.teams.blank?)
address.groups = nil
address.teams = nil
address.owner_id = 0
elsif !address.groups.blank? or !address.teams.blank?
address.owner_id = 0
else
address.owner_id = user.id
end
return address
end | Base | 1 |
def destroy
Log.add_info(request, params.inspect)
return unless request.post?
begin
Item.destroy(params[:id])
rescue => evar
Log.add_error(request, evar)
end
if params[:from_action].nil?
render(:text => params[:id])
else
params.delete(:controller)
params.delete(:action)
params.delete(:id)
flash[:notice] = t('msg.delete_success')
params[:action] = params[:from_action]
redirect_to(params)
end
end | Base | 1 |
def get_nodes_status()
corosync_online = []
corosync_offline = []
pacemaker_online = []
pacemaker_offline = []
pacemaker_standby = []
in_pacemaker = false
stdout, stderr, retval = run_cmd(
PCSAuth.getSuperuserSession, PCS, "status", "nodes", "both"
)
stdout.each {|l|
l = l.chomp
if l.start_with?("Pacemaker Nodes:")
in_pacemaker = true
end
if l.start_with?("Pacemaker Remote Nodes:")
break
end
if l.end_with?(":")
next
end
title,nodes = l.split(/: /,2)
if nodes == nil
next
end
if title == " Online"
in_pacemaker ? pacemaker_online.concat(nodes.split(/ /)) : corosync_online.concat(nodes.split(/ /))
elsif title == " Standby"
if in_pacemaker
pacemaker_standby.concat(nodes.split(/ /))
end
elsif title == " Maintenance"
if in_pacemaker
pacemaker_online.concat(nodes.split(/ /))
end
else
in_pacemaker ? pacemaker_offline.concat(nodes.split(/ /)) : corosync_offline.concat(nodes.split(/ /))
end
}
return {
'corosync_online' => corosync_online,
'corosync_offline' => corosync_offline,
'pacemaker_online' => pacemaker_online,
'pacemaker_offline' => pacemaker_offline,
'pacemaker_standby' => pacemaker_standby,
}
end | Compound | 4 |
it "creates an index with the provided name" do
indexes.create(key, name: "custom_index_name")
indexes[key]["name"].should eq "custom_index_name"
end | Class | 2 |
def default_ids_hash(populate_values = false)
ids = HashWithIndifferentAccess.new
hash_keys.each do |col|
ids[col] = populate_values ? Array(self.send(col)) : []
end
ids
end | Class | 2 |
def ffi_lib(*names)
raise LoadError.new("library names list must not be empty") if names.empty?
lib_flags = defined?(@ffi_lib_flags) ? @ffi_lib_flags : FFI::DynamicLibrary::RTLD_LAZY | FFI::DynamicLibrary::RTLD_LOCAL
ffi_libs = names.map do |name|
if name == FFI::CURRENT_PROCESS
FFI::DynamicLibrary.open(nil, FFI::DynamicLibrary::RTLD_LAZY | FFI::DynamicLibrary::RTLD_LOCAL)
else
libnames = (name.is_a?(::Array) ? name : [ name ]).map { |n| [ n, FFI.map_library_name(n) ].uniq }.flatten.compact
lib = nil
errors = {}
libnames.each do |libname|
begin
orig = libname
lib = FFI::DynamicLibrary.open(libname, lib_flags)
break if lib
rescue Exception => ex
ldscript = false
if ex.message =~ /(([^ \t()])+\.so([^ \t:()])*):([ \t])*(invalid ELF header|file too short|invalid file format)/
if File.read($1) =~ /(?:GROUP|INPUT) *\( *([^ \)]+)/
libname = $1
ldscript = true
end
end
if ldscript
retry
else
# TODO better library lookup logic
libname = libname.to_s
unless libname.start_with?("/")
path = ['/usr/lib/','/usr/local/lib/'].find do |pth|
File.exist?(pth + libname)
end
if path
libname = path + libname
retry
end
end
libr = (orig == libname ? orig : "#{orig} #{libname}")
errors[libr] = ex
end
end
end
if lib.nil?
raise LoadError.new(errors.values.join(".\n"))
end
# return the found lib
lib
end
end
@ffi_libs = ffi_libs
end | Base | 1 |
def next
now = Time.new.to_i
counter = @mutex.synchronize do
last_timestamp, @last_timestamp = @last_timestamp, now
if last_timestamp == now
@counter += 1
else
@counter = 0
end
end
generate(now, counter)
end | Class | 2 |
def self.get_tree_by_group_for_admin(group_id)
SqlHelper.validate_token([group_id])
folder_tree = {}
tree_id = '0'
if group_id.to_s == '0'
sql = 'select distinct * from folders'
where = " where (parent_id = #{tree_id})"
where << " and ((xtype is null) or not(xtype = '#{XTYPE_GROUP}' or xtype = '#{XTYPE_USER}'))"
order_by = ' order by xorder ASC, id ASC'
else
sql = 'select distinct Folder.* from folders Folder, users User'
where = " where (Folder.parent_id = #{tree_id})"
where << ' and ('
where << "(Folder.xtype = '#{XTYPE_GROUP}' and Folder.owner_id = #{group_id})"
where << ' or '
where << "(Folder.xtype = '#{XTYPE_USER}' and Folder.owner_id = User.id and #{SqlHelper.get_sql_like(['User.groups'], "|#{group_id}|")})"
where << ' )'
order_by = ' order by Folder.xorder ASC, Folder.id ASC'
end
sql << where + order_by
folder_tree[tree_id] = Folder.find_by_sql(sql)
folder_tree[tree_id].each do |folder|
folder_tree = Folder.get_tree(folder_tree, nil, folder, true)
end
return Folder.sort_tree(folder_tree)
end | Base | 1 |
def add_acl_role_remote(params, request, session)
if not allowed_for_local_cluster(session, Permissions::GRANT)
return 403, 'Permission denied'
end
retval = add_acl_role(session, params["name"], params["description"])
if retval == ""
return [200, "Successfully added ACL role"]
else
return [
400,
retval.include?("cib_replace failed") ? "Error adding ACL role" : retval
]
end
end | Compound | 4 |
it :test_render_parse_nil_param do
assert_raises(ArgumentError) { parser.parse_memory(nil) }
end | Base | 1 |
def test_jail_classes_should_have_limited_methods
expected = ["new", "methods", "name", "inherited", "method_added", "inspect",
"allow", "allowed?", "allowed_methods", "init_allowed_methods",
"<", # < needed in Rails Object#subclasses_of
"ancestors", "==" # ancestors and == needed in Rails::Generator::Spec#lookup_class
]
objects.each do |object|
assert_equal expected.sort, reject_pretty_methods(object.to_jail.class.methods.map(&:to_s).sort)
end
end | Class | 2 |
def create_cluster(params, request, session)
if not allowed_for_superuser(session)
return 403, 'Permission denied'
end
if set_corosync_conf(params, request, session)
cluster_start(params, request, session)
else
return "Failed"
end
end | Compound | 4 |
it "should escape evil haxxor attemptes" do
Mail.defaults do
delivery_method :sendmail, :arguments => nil
end
mail = Mail.new do
from '"foo\";touch /tmp/PWNED;\""@blah.com'
to '[email protected]'
subject 'invalid RFC2822'
end
Mail::Sendmail.should_receive(:call).with('/usr/sbin/sendmail',
"-f \"\\\"foo\\\\\\\"\\;touch /tmp/PWNED\\;\\\\\\\"\\\"@blah.com\"",
'[email protected]',
mail)
mail.deliver!
end | Class | 2 |
it "with Pathname command" do
cl = subject.build_command_line(Pathname.new("/usr/bin/ruby"))
expect(cl).to eq "/usr/bin/ruby"
end | Base | 1 |
def auth(params, request, session)
token = PCSAuth.validUser(params['username'],params['password'], true)
# If we authorized to this machine, attempt to authorize everywhere
node_list = []
if token and params["bidirectional"]
params.each { |k,v|
if k.start_with?("node-")
node_list.push(v)
end
}
if node_list.length > 0
pcs_auth(
session, node_list, params['username'], params['password'],
params["force"] == "1"
)
end
end
return token
end | Compound | 4 |
def initialize(string)
super("'#{string}' is not a valid object id.")
end | Class | 2 |
it "limits the query" do
session.should_receive(:query) do |query|
query.limit.should eq(-1)
reply
end
session.simple_query(query)
end | Class | 2 |
it "inserts the document" do
session.should_receive(:execute).with do |insert|
insert.documents.should eq [{a: 1}]
end
collection.insert(a: 1)
end | Class | 2 |
it 'returns the right response' do
get "/session/email-login/adasdad"
expect(response.status).to eq(200)
expect(CGI.unescapeHTML(response.body)).to match(
I18n.t('email_login.invalid_token')
)
end | Class | 2 |
def setup
FactoryGirl.create(:host)
end | Class | 2 |
it "sanitizes crazy params" do
cl = subject.build_command_line("true", modified_params)
expect(cl).to eq "true --user bob --pass P@\\$sw0\\^\\&\\ \\|\\<\\>/-\\+\\*d\\% --db --desc=Some\\ Description --symkey --symkey-dash pkg1 some\\ pkg --pool 123 --pool 456"
end | Base | 1 |
it "returns the right category group permissions for an anon user" do
json = described_class.new(category, scope: Guardian.new, root: false).as_json
expect(json[:group_permissions]).to eq([
{ permission_type: CategoryGroup.permission_types[:full], group_name: Group[:everyone]&.name }
])
end | Base | 1 |
def index
@applications = Doorkeeper.config.application_model.authorized_for(current_resource_owner)
respond_to do |format|
format.html
format.json { render json: @applications }
end
end | Class | 2 |
def self.save_sync_new_tokens(config, new_tokens, nodes, cluster_name)
with_new_tokens = PCSTokens.new(config.text)
with_new_tokens.tokens.update(new_tokens)
config_new = PcsdTokens.from_text(with_new_tokens.text)
if not cluster_name or cluster_name.empty?
# we run on a standalone host, no config syncing
config_new.version += 1
config_new.save()
return true, {}
end
# we run in a cluster so we need to sync the config
publisher = ConfigPublisher.new(
PCSAuth.getSuperuserSession(), [config_new], nodes, cluster_name,
new_tokens
)
old_configs, node_responses = publisher.publish()
if not old_configs.include?(config_new.class.name)
# no node had newer tokens file, we are ok, everything done
return true, node_responses
end
# get tokens from all nodes and merge them
fetcher = ConfigFetcher.new(
PCSAuth.getSuperuserSession(), [config_new.class], nodes, cluster_name
)
fetched_tokens = fetcher.fetch_all()[config_new.class.name]
config_new = Cfgsync::merge_tokens_files(config, fetched_tokens, new_tokens)
# and try to publish again
return Cfgsync::save_sync_new_version(
config_new, nodes, cluster_name, true, new_tokens
)
end | Compound | 4 |
def edit_timecard
Log.add_info(request, params.inspect)
date_s = params[:date]
if date_s.nil? or date_s.empty?
@date = Date.today
date_s = @date.strftime(Schedule::SYS_DATE_FORM)
else
@date = Date.parse(date_s)
end
@timecard = Timecard.get_for(@login_user.id, date_s)
render(:partial => 'timecard', :layout => false)
end | Base | 1 |
it "stores whether the connection is direct" do
cluster.direct.should be_true
end | Class | 2 |
def add_constraint_rule_remote(params, request, session)
if not allowed_for_local_cluster(session, Permissions::WRITE)
return 403, 'Permission denied'
end
if params["c_type"] == "loc"
retval, error = add_location_constraint_rule(
session,
params["res_id"], params["rule"], params["score"], params["force"],
!params['disable_autocorrect']
)
else
return [400, "Unknown constraint type: #{params["c_type"]}"]
end
if retval == 0
return [200, "Successfully added constraint"]
else
return [400, "Error adding constraint: #{error}"]
end
end | Compound | 4 |
it "should allow requests that are whitelisted" do
set_cookie("__profilin=stylin")
get '/whitelisted'
last_response.headers['X-MiniProfiler-Ids'].should_not be_nil
end | Class | 2 |
def simple_query(query)
query.limit = -1
query(query).documents.first
end | Class | 2 |
def remove_node(session, new_nodename, all=false)
if all
# we check for a quorum loss warning in remote_remove_nodes
out, stderror, retval = run_cmd(
session, PCS, "cluster", "node", "remove", new_nodename, "--force"
)
else
out, stderror, retval = run_cmd(
session, PCS, "cluster", "localnode", "remove", new_nodename
)
end
$logger.info("Removing #{new_nodename} from pcs_settings.conf")
corosync_nodes = get_corosync_nodes()
pcs_config = PCSConfig.new(Cfgsync::PcsdSettings.from_file('{}').text())
pcs_config.update_cluster($cluster_name, corosync_nodes)
sync_config = Cfgsync::PcsdSettings.from_text(pcs_config.text())
# on version conflict just go on, config will be corrected eventually
# by displaying the cluster in the web UI
Cfgsync::save_sync_new_version(
sync_config, corosync_nodes, $cluster_name, true
)
return retval, out + stderror
end | Compound | 4 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.