hexsha
stringlengths
40
40
size
int64
2
1.01M
content
stringlengths
2
1.01M
avg_line_length
float64
1.5
100
max_line_length
int64
2
1k
alphanum_fraction
float64
0.25
1
0844bec2fbcf6b8bd9170770cb0d2120ebe5ce02
241
class Hash def assert_required_keys(*required_keys) missing_keys = required_keys.select {|key| !keys.include?(key)} raise ArgumentError, "Missing required option(s): #{missing_keys.join(", ")}" unless missing_keys.empty? end end
34.428571
108
0.738589
1afd8a399999d1b1e4d555ae82faef2fb76cccb0
276
begin require 'simplecov' SimpleCov.start do add_filter '/spec/' end rescue LoadError warn 'warning: simplecov gem not found; skipping coverage' end require 'minitest/autorun' require 'minitest/pride' require 'minitest/spec' require_relative '../lib/foodcritic'
18.4
60
0.757246
62a36d44309f89fb50da3bfe2eaf3b4fa1a44e70
743
log "Creating #{node['tiger_common']['sec_tests_settings_path']}" [ node['tiger_common']['sec_tests_settings_path'] ].each do |dir_name| directory dir_name do action :create recursive true end end cookbook_file '/home/tiger/.ssh/known_hosts' do source 'known_hosts' owner 'tiger' group 'tiger' mode '0755' action :create end execute "Changing #{node['tiger_common']['sec_tests_settings_path']} permissions" do command "chown -R tiger #{node['tiger_common']['sec_tests_settings_path']}" end git node['tiger_common']['sec_tests_settings_path'] do revision 'master' repository node['tiger_common']['sec_tests_settings_git_path'] # ssh_wrapper "ssh -i /home/tiger/.ssh/id_rsa" user "tiger" action :sync end
23.21875
84
0.732167
1127c385f0b923101a94ecfadef731a9d2b6739e
581
cask 'trickster' do if MacOS.release <= :lion version '2.1.1' sha256 'cddc4a27c3c2a016f86d1688ef9708d3e8c605cfe06302470471309ccdc241db' else version '2.4.2' sha256 'afe5fcf0de994e6a993bf259da564783ed7c0619ad635c665ecf5d3067ba5049' end url "http://dl.apparentsoft.com/Trickster_#{version}.zip" appcast 'http://dl.apparentsoft.com/trickster.rss', checkpoint: '0a6c6015159cd8037e6e3f9464d3ed10c9ec05aa3ea063d012913b90be2aa218' name 'Trickster' homepage 'http://www.apparentsoft.com/trickster/' license :commercial app 'Trickster.app' end
30.578947
88
0.765921
b99d39f8d1b0e82bf6a07e2e7418a380bc20d3e0
812
class Kma < Formula desc "Align long and short reads to redundant sequence databases" homepage "https://bitbucket.org/genomicepidemiology/kma" url "https://bitbucket.org/genomicepidemiology/kma/get/1.3.6.zip" sha256 "b672b72103bc38977060c8e3f7f158bb7f06ffdc8eabfc4aa72985c250b57c26" bottle do root_url "https://archive.org/download/brewsci/bottles-bio" sha256 cellar: :any_skip_relocation, catalina: "e8986aca613c9b2c302cf25484d17d1e7f1dfef0d395d9b142f52cda3171fffc" sha256 cellar: :any_skip_relocation, x86_64_linux: "95351af4a7d8e9ece5040641c8c53a4e28938f73e966dbd1670743ea1c677c82" end uses_from_macos "zlib" def install system "make" bin.install %w[kma kma_index kma_shm] end test do assert_match version.to_s, shell_output("#{bin}/kma -v 2>&1") end end
33.833333
121
0.780788
28c3a0fc06b323bddd9b7c1c18d35e52a949b875
286
class UpdateForums < ActiveRecord::Migration def self.up add_column :forums, :forumable_type, :string add_column :forums, :forumable_id, :integer end def self.down remove_column :forums, :forumable_type remove_column :forums, :forumable_id end end
22
49
0.706294
4a66a4abd970c40b48e59cd7f77b8b60662bb42f
2,015
describe ManageIQ::Providers::EmbeddedAnsible::Provider do subject { FactoryGirl.create(:provider_embedded_ansible) } let(:miq_server) { FactoryGirl.create(:miq_server) } before do EvmSpecHelper.assign_embedded_ansible_role(miq_server) end it_behaves_like 'ansible provider' context "DefaultAnsibleObjects concern" do context "with no attributes" do %w(organization credential inventory host).each do |obj_name| it "#default_#{obj_name} returns nil" do expect(subject.public_send("default_#{obj_name}")).to be_nil end it "#default_#{obj_name}= creates a new custom attribute" do subject.public_send("default_#{obj_name}=", obj_name.length) expect(subject.default_ansible_objects.find_by(:name => obj_name).value.to_i).to eq(obj_name.length) end end end context "with attributes saved" do before do %w(organization credential inventory host).each do |obj_name| subject.default_ansible_objects.create(:name => obj_name, :value => obj_name.length) end end %w(organization credential inventory host).each do |obj_name| it "#default_#{obj_name} returns the saved value" do expect(subject.public_send("default_#{obj_name}")).to eq(obj_name.length) end it "#default_#{obj_name}= doesn't create a second object if we pass the same value" do subject.public_send("default_#{obj_name}=", obj_name.length) expect(subject.default_ansible_objects.where(:name => obj_name).count).to eq(1) end end end context "Embedded Ansible role" do it "disabled #raw_connect" do miq_server.active_roles.delete_all expect { described_class.raw_connect('a', 'b', 'c', 'd') }.to raise_exception(StandardError, 'Embedded ansible is disabled') end it "enabled #raw_connect" do expect(described_class.raw_connect('a', 'b', 'c', 'd')).to be_truthy end end end end
35.350877
132
0.675931
18c14224aaa5c8868e27761ced23f1ab1b374662
1,802
# frozen_string_literal: true module EE module Projects module DestroyService extend ::Gitlab::Utils::Override override :execute def execute super.tap do # It's possible that some error occurred, but at the end of the day # if the project is destroyed from the database, we should log events # and clean up where we can. if project&.destroyed? mirror_cleanup(project) end end end override :log_destroy_event def log_destroy_event super log_geo_event(project) log_audit_event(project) end def mirror_cleanup(project) return unless project.mirror? ::Gitlab::Mirror.decrement_capacity(project.id) end def log_geo_event(project) ::Geo::RepositoryDeletedEventStore.new( project, repo_path: project.disk_path, wiki_path: project.wiki.disk_path ).create! end # Removes physical repository in a Geo replicated secondary node # There is no need to do any database operation as it will be # replicated by itself. def geo_replicate return unless ::Gitlab::Geo.secondary? # Flush the cache for both repositories. This has to be done _before_ # removing the physical repositories as some expiration code depends on # Git data (e.g. a list of branch names). flush_caches(project) trash_project_repositories! log_info("Project \"#{project.name}\" was removed") end private def log_audit_event(project) ::AuditEventService.new( current_user, project, action: :destroy ).for_project.security_event end end end end
25.742857
79
0.624306
bb34b536f5d5e7bd063f255ac663b4b0147e482e
1,381
Pod::Spec.new do |s| s.name = "MonkeyBaseWork" s.version = "1.1.1" s.summary = "MonkeyBaseWork Sample Task....." s.homepage = "https://github.com/junhaiyang/MonkeyBaseWork" s.license = "MIT" s.author = { "yangjunhai" => "[email protected]" } s.ios.deployment_target = "7.0" s.ios.framework = 'UIKit' s.source = { :git => 'https://github.com/junhaiyang/MonkeyBaseWork.git' , :tag => '1.1.1'} s.requires_arc = true s.source_files = '*.{h,m,mm}' s.subspec 'CommonObject' do |ds| ds.source_files = 'CommonObject/*.{h,m,mm}' end s.subspec 'AppObject' do |ds| ds.source_files = 'AppObject/*.{h,m,mm}' end s.subspec 'SBTabBarViewController' do |ds| ds.source_files = 'SBTabBarViewController/*.{h,m,mm}' end s.subspec 'ViewController' do |ds| ds.source_files = 'ViewController/*.{h,m,mm}' ds.subspec 'KKNavigationController' do |ks| ks.source_files = 'ViewController/KKNavigationController/*.{h,m,mm}' end end s.dependency 'RDVTabBarController', '1.1.9' s.dependency 'MJRefresh', '3.1.12' s.dependency 'Masonry', '1.0.2' s.dependency 'DZNEmptyDataSet', '1.8.1' s.dependency 'DSAlertView-Blocks', '1.1.2' s.dependency 'MBProgressHUD', '1.0.0' end
22.274194
93
0.583635
e907b9dad08553c4db44b78ae2d8517c03167b1e
1,383
RSpec.describe Event do describe 'Validations' do it { is_expected.to validate_presence_of(:action) } it do is_expected.to define_enum_for(:action) .with_values([:created, :updated]) end describe '#resource_presence validation' do let(:post) { Fabricate(:post) } let(:member) { Fabricate(:member) } let(:event) { Event.new action: 'created' } context 'has no resources' do it { expect(event).to_not be_valid } end context 'has one resource' do before { event.post = post } it { expect(event).to be_valid } end context 'has two resources' do before { event.post = post; event.member = member } it { expect(event).to_not be_valid } end end end describe 'Relations' do it { is_expected.to belong_to(:post).optional } it { is_expected.to belong_to(:member).optional } it { is_expected.to belong_to(:transfer).optional } it { is_expected.to have_many(:push_notifications) } it { is_expected.to have_db_column(:post_id) } it { is_expected.to have_db_column(:member_id) } it { is_expected.to have_db_column(:transfer_id) } end describe 'Indexes' do it { is_expected.to have_db_index(:post_id) } it { is_expected.to have_db_index(:member_id) } it { is_expected.to have_db_index(:transfer_id) } end end
28.22449
59
0.651482
01d3e4d1045498c4fc2cf7749cb13ce07b158a28
787
require 'rails_helper' RSpec.describe 'runs/index' do context 'while logged out' do before { allow(view).to receive(:current_user).and_return(nil) } it 'renders the index template' do assign(:example_run, FactoryBot.create(:livesplit16_run, :parsed)) assign(:example_segment, FactoryBot.create(:segment)) render expect(view).to render_template('runs/index') end end context 'while logged in' do before { allow(view).to receive(:current_user).and_return(FactoryBot.build(:user)) } it 'renders the index template' do assign(:example_run, FactoryBot.create(:livesplit16_run, :parsed)) assign(:example_segment, FactoryBot.create(:segment)) render expect(view).to render_template('runs/index') end end end
28.107143
88
0.700127
7910d00b4d1e5b8993a9dbdb8557544a465564e2
1,082
# frozen_string_literal: true require 'spec_helper' RSpec.describe Gitlab::Analytics::CycleAnalytics::StageEvents::IssueLabelAdded do it_behaves_like 'value stream analytics event' do let(:label_id) { 10 } let(:params) { { label: GroupLabel.new(id: label_id) } } let(:expected_hash_code) { Digest::SHA256.hexdigest("#{instance.class.identifier}-#{label_id}") } end it_behaves_like 'LEFT JOIN-able value stream analytics event' do let_it_be(:project) { create(:project) } let_it_be(:record_with_data) { create(:issue, project: project) } let_it_be(:record_without_data) { create(:issue) } let_it_be(:label) { create(:label, project: project) } let_it_be(:user) { project.owner } let(:params) { { label: label } } before(:context) do # adding label via the service so the resource_label_events record is populated Sidekiq::Worker.skipping_transaction_check do Issues::UpdateService.new(project: project, current_user: user, params: { label_ids: [label.id] }).execute(record_with_data) end end end end
37.310345
132
0.709797
62679f069b29a769f8f377961e309be6f9de66d0
286
FactoryBot.define do factory :user do sequence(:user_name) { |n| Faker::Internet.user_name + n.to_s } first_name { Faker::Name.first_name } last_name { Faker::Name.last_name } email { Faker::Internet.email } display_name { "#{first_name} #{last_name}" } end end
28.6
67
0.671329
28ebf85455d58ece777e4eb7de03581a7646a7ff
469
#!/opt/local/bin/ruby1.9 require 'common' zk = ZooKeeper.new(:host => "#{ZK_HOST}:#{ZK_PORT}") wait_until { zk.connected? } if zk.connected? cb = ZooKeeper::DataCallback.new do logit "------------------- data callback fired" logit "return_code #{cb.return_code}" logit "stat #{cb.stat.inspect}" sleep 2 end rc = zk.get(:path => ZK_PATH, :callback => cb, :context => 'la') wait_until { cb.completed? } end zk.close wait_until { zk.closed? }
21.318182
66
0.622601
87b07dda9f4257303a786744f210c9f298a6717e
579
cask "texmacs" do version "1.99.20" sha256 "5113622cc423263b01fe45afa07ef05210000f560cf34fd5247063c9838b5172" url "https://ftp.texmacs.org/TeXmacs/tmftp/macos/TeXmacs-#{version}.dmg" name "GNU TeXmacs" desc "Scientific editing platform" homepage "https://www.texmacs.org/" livecheck do url "http://ftp.texmacs.org/TeXmacs/appcast/macos.xml" strategy :sparkle end app "TeXmacs.app" zap trash: [ "~/.TeXmacs", "~/Library/Preferences/org.texmacs.TeXmacs.plist", "~/Library/Saved Application State/org.texmacs.TeXmacs.savedState", ] end
25.173913
75
0.71848
612c7f88e46fff8a59df56855d3cf122c433ad52
4,103
Rails.application.configure do # Settings specified here will take precedence over those in config/application.rb. # Code is not reloaded between requests. config.cache_classes = true # Eager load code on boot. This eager loads most of Rails and # your application in memory, allowing both threaded web servers # and those relying on copy on write to perform better. # Rake tasks automatically ignore this option for performance. config.eager_load = true # Full error reports are disabled and caching is turned on. config.consider_all_requests_local = false config.action_controller.perform_caching = true # Disable serving static files from the `/public` folder by default since # Apache or NGINX already handles this. config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present? # Compress JavaScripts and CSS. config.assets.js_compressor = :uglifier # config.assets.css_compressor = :sass # Do not fallback to assets pipeline if a precompiled asset is missed. config.assets.compile = false # `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb # Enable serving of images, stylesheets, and JavaScripts from an asset server. # config.action_controller.asset_host = 'http://assets.example.com' # Specifies the header that your server uses for sending files. # config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache # config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX # Mount Action Cable outside main process or domain # config.action_cable.mount_path = nil # config.action_cable.url = 'wss://example.com/cable' # config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ] # Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies. config.force_ssl = true # Use the lowest log level to ensure availability of diagnostic information # when problems arise. config.log_level = :debug # Prepend all log lines with the following tags. config.log_tags = [ :request_id ] # Use a different cache store in production. # config.cache_store = :mem_cache_store # Use a real queuing backend for Active Job (and separate queues per environment) # config.active_job.queue_adapter = :resque # config.active_job.queue_name_prefix = "sample_app_#{Rails.env}" config.action_mailer.perform_caching = false config.action_mailer.raise_delivery_errors = true config.action_mailer.delivery_method = :smtp host = 'sampleappdeployment.herokuapp.com' config.action_mailer.default_url_options = { host: host } ActionMailer::Base.smtp_settings = { :address => 'smtp.sendgrid.net', :port => '587', :authentication => :plain, :user_name => ENV['SENDGRID_USERNAME'], :password => ENV['SENDGRID_PASSWORD'], :domain => 'heroku.com', :enable_starttls_auto => true } # Ignore bad email addresses and do not raise email delivery errors. # Set this to true and configure the email server for immediate delivery to raise delivery errors. # config.action_mailer.raise_delivery_errors = false # Enable locale fallbacks for I18n (makes lookups for any locale fall back to # the I18n.default_locale when a translation cannot be found). config.i18n.fallbacks = true # Send deprecation notices to registered listeners. config.active_support.deprecation = :notify # Use default logging formatter so that PID and timestamp are not suppressed. config.log_formatter = ::Logger::Formatter.new # Use a different logger for distributed setups. # require 'syslog/logger' # config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name') if ENV["RAILS_LOG_TO_STDOUT"].present? logger = ActiveSupport::Logger.new(STDOUT) logger.formatter = config.log_formatter config.logger = ActiveSupport::TaggedLogging.new(logger) end # Do not dump schema after migrations. config.active_record.dump_schema_after_migration = false end
40.623762
102
0.744577
21fc49e6be95189e7a5baf238598009b44dff5cf
282
class CreateComments < ActiveRecord::Migration def change create_table :comments do |t| t.belongs_to :user, index: true, foreign_key: true t.belongs_to :post, index: true, foreign_key: true t.string :message t.timestamps null: false end end end
23.5
56
0.684397
03da97e047b1ce0fa5ca1444f1fe782c0dca9402
2,948
class Sip < Formula desc "Tool to create Python bindings for C and C++ libraries" homepage "https://www.riverbankcomputing.com/software/sip/intro" url "https://www.riverbankcomputing.com/static/Downloads/sip/4.19.24/sip-4.19.24.tar.gz" sha256 "edcd3790bb01938191eef0f6117de0bf56d1136626c0ddb678f3a558d62e41e5" license any_of: ["GPL-2.0-only", "GPL-3.0-only"] head "https://www.riverbankcomputing.com/hg/sip", using: :hg livecheck do url "https://riverbankcomputing.com/software/sip/download" regex(/href=.*?sip[._-]v?(\d+(\.\d+)+)\.t/i) end bottle do cellar :any_skip_relocation sha256 "27b5d4e78eee2d5cba8154d292b334e111a1d1e7c718bde664c352a542e15426" => :catalina sha256 "59a0106736b84dd8f03c720ac425e5608e1bde788ba73ccc923397aa2dbdcef3" => :mojave sha256 "a0f6f7d9f231644e1ab81c3a40de9e4f8afcae06b1d54959613263e84adfa958" => :high_sierra sha256 "6caecc9ce56c128f9392edc79c44325c75436954c698f5dbea7f5fc98c028d6d" => :x86_64_linux end depends_on "[email protected]" def install ENV.prepend_path "PATH", Formula["[email protected]"].opt_bin ENV.delete("SDKROOT") # Avoid picking up /Application/Xcode.app paths if build.head? # Link the Mercurial repository into the download directory so # build.py can use it to figure out a version number. ln_s cached_download/".hg", ".hg" # build.py doesn't run with python3 system "python", "build.py", "prepare" end version = Language::Python.major_minor_version "python3" system "python3", "configure.py", *("--deployment-target=#{MacOS.version}" if OS.mac?), "--destdir=#{lib}/python#{version}/site-packages", "--bindir=#{bin}", "--incdir=#{include}", "--sipdir=#{HOMEBREW_PREFIX}/share/sip", "--sip-module", "PyQt5.sip" system "make" system "make", "install" end def post_install (HOMEBREW_PREFIX/"share/sip").mkpath end test do (testpath/"test.h").write <<~EOS #pragma once class Test { public: Test(); void test(); }; EOS (testpath/"test.cpp").write <<~EOS #include "test.h" #include <iostream> Test::Test() {} void Test::test() { std::cout << "Hello World!" << std::endl; } EOS (testpath/"test.sip").write <<~EOS %Module test class Test { %TypeHeaderCode #include "test.h" %End public: Test(); void test(); }; EOS if OS.mac? system ENV.cxx, "-shared", "-Wl,-install_name,#{testpath}/libtest.dylib", "-o", "libtest.dylib", "test.cpp" else system ENV.cxx, "-fPIC", "-shared", "-Wl,-soname,#{testpath}/libtest.so", "-o", "libtest.so", "test.cpp" end system bin/"sip", "-b", "test.build", "-c", ".", "test.sip" end end
32.395604
94
0.608887
e202d800dabdc9241c4c83e3cf36321dc3747c74
35
module Rp1 VERSION = "0.1.0" end
8.75
19
0.628571
2855a596cc24a2cf2ced78cc98c131f88af8febc
1,089
require "./test/test_helper" class ApplicationRecordTest < ActiveSupport::TestCase setup do @class_methods = [ :configure_logging_options, :default_log_fields, :logging_options, :set_logging_callbacks!, ] @log_create = "PLEASE DO!" ApplicationRecord.configure_logging_options do |opts| opts[:log_create] = @log_create end end test "has logger class methods" do assert @class_methods.all?{|meth| ApplicationRecord.respond_to?(meth)} end test "has appropriate logging options" do assert_equal ApplicationRecord.logging_options, ApplicationRecordLogger.config.merge(log_fields: [], log_create: @log_create) end test "Has set log_create" do assert_equal ApplicationRecord.logging_options[:log_create], @log_create end test "Change of log_create has not afflicted any other class" do assert_not_equal ApplicationRecord.logging_options[:log_create], Invoice.logging_options[:log_create] assert_not_equal ApplicationRecord.logging_options[:log_create], User.logging_options[:log_create] end end
31.114286
105
0.752984
f834f37df4cff7e2d6191008513b085625a583b1
2,152
Rails.application.configure do # Settings specified here will take precedence over those in config/application.rb. # In the development environment your application's code is reloaded on # every request. This slows down response time but is perfect for development # since you don't have to restart the web server when you make code changes. config.cache_classes = false # Do not eager load code on boot. config.eager_load = true # Show full error reports. config.consider_all_requests_local = true # Enable/disable caching. By default caching is disabled. # Run rails dev:cache to toggle caching. if Rails.root.join('tmp', 'caching-dev.txt').exist? config.action_controller.perform_caching = true config.cache_store = :memory_store config.public_file_server.headers = { 'Cache-Control' => "public, max-age=#{2.days.to_i}" } else config.action_controller.perform_caching = false config.cache_store = :null_store end # Don't care if the mailer can't send. config.action_mailer.raise_delivery_errors = false config.action_mailer.perform_caching = false # Print deprecation notices to the Rails logger. config.active_support.deprecation = :log # Raise an error on page load if there are pending migrations. config.active_record.migration_error = :page_load # Highlight code that triggered database queries in logs. config.active_record.verbose_query_logs = true # Debug mode disables concatenation and preprocessing of assets. # This option may cause significant delays in view rendering with a large # number of complex assets. config.assets.debug = true # Suppress logger output for asset requests. config.assets.quiet = true # Raises error for missing translations # config.action_view.raise_on_missing_translations = true # Use an evented file watcher to asynchronously detect changes in source code, # routes, locales, etc. This feature depends on the listen gem. config.file_watcher = ActiveSupport::EventedFileUpdateChecker # Telegram setup # Bot sessions config.telegram_updates_controller.session_store = :memory_store end
32.606061
85
0.761617
e95d3b1219b9b85ab6d41994e37cd4200502107d
2,156
require 'test_helper' require 'netzke/core_ext' class CoreExtTest < ActiveSupport::TestCase test "recursive delete if nil" do assert_equal({:a => 1, :b => {:c => 4, :d => 5}}, {:a => 1, :aa => nil, :b => {:c => 4, :d => 5, :cc => nil}}.recursive_delete_if_nil) assert_equal({:a => [{:e => 5}, {:f => 7}], :b => {:c => 4, :d => 5}}, {:a => [{:e => 5, :ee => nil},{:f => 7, :ff => nil}], :aa => nil, :b => {:c => 4, :d => 5, :cc => nil}}.recursive_delete_if_nil) assert_equal([ {:a => [{:e => 5}]}, {}, {:b => {:c => 4, :d => 5}} ], [ {:a => [{:e => 5, :ee => nil}]}, {:aa => nil}, {:b => {:c => 4, :d => 5, :cc => nil}} ].recursive_delete_if_nil) end test "convert keys" do assert_equal({:a => 1, :b => {:bb => 2}}, {"a" => 1, "b" => {"bb" => 2}}.deep_convert_keys{ |k| k.to_sym }) assert_equal([{"a" => 1}, {"b" => {"bb" => 2}}], [{:a => 1}, {:b => {:bb => 2}}].deep_convert_keys{ |k| k.to_s }) assert_equal([ {:aB => 1, :cDD => [{:lookMa => true},{:wowNow => true}]} ],[:a_b => 1, :c_d_d => [{:look_ma => true},{:wow_now => true}]].deep_convert_keys{|k| k.to_s.camelize(:lower).to_sym}) end test "jsonify" do assert_equal({:aB => 1, "cD" => [[1, {:eF => "stay_same"}], {"literal_symbol" => :should_not_change, "literal_string".l => "also_should_not"}]}, {:a_b => 1, "c_d" => [[1, {:e_f => "stay_same"}], {:literal_symbol.l => :should_not_change, "literal_string".l => "also_should_not"}]}.jsonify) end test "flatten_with_type" do test_flatten_with_type = { :one => 1, :two => 2.5, :three => { :four => true, :five => { :six => "a string" } } }.flatten_with_type assert_equal(4, test_flatten_with_type.size) test_flatten_with_type.each do |i| assert([{ :name => :one, :value => 1, :type => :Fixnum },{ :name => :two, :value => 2.5, :type => :Float },{ :name => :three__four, :value => true, :type => :Boolean },{ :name => :three__five__six, :value => "a string", :type => :String }].include?(i)) end end end
34.774194
292
0.481911
f850ad3138a031964312490eb18439cb264c7a41
250
class InvoiceBlurb < ActiveRecord::Base include SalorScope include SalorBase belongs_to :vendor belongs_to :company validates_uniqueness_of :is_header, :scope => [:vendor_id, :lang] validates_presence_of :vendor_id, :company_id end
22.727273
67
0.768
e924fbd7ad53e668ac869e0350a8eb18478be95d
480
class AddSelectorFieldsToFinishes < ActiveRecord::Migration def up Finish.all.each do |finish| finish.selected = false finish.save! end add_column :finishes, :selected_by_id, :integer add_foreign_key :finishes, :users, column: :selected_by_id add_column :finishes, :selected_at, :datetime add_index :finishes, :selected_by_id end def down remove_column :finishes, :selected_by_id remove_column :finishes, :selected_at end end
25.263158
62
0.727083
e8e5d64b81d9886b9e65b9a7278a9fcc3ec74e28
2,855
#!/usr/bin/env ruby # # test version inference $:.unshift File.join(File.dirname(__FILE__), '../..', 'lib') require 'libclimate' require 'xqsr3/extensions/test/unit' require 'test/unit' require 'stringio' class Test_Climate_parse_and_verify < Test::Unit::TestCase class VerifyException < RuntimeError; end class MissingRequiredException < VerifyException; end class UnrecognisedArgumentException < VerifyException; end class UnusedArgumentException < VerifyException; end def test_empty_specs_empty_args stdout = StringIO.new stderr = StringIO.new climate = LibCLImate::Climate.new do |cl| cl.stdout = $stdout cl.stderr = $stderr end assert $stdout.equal? climate.stdout assert $stderr.equal? climate.stderr argv = [ ] r = climate.parse_and_verify argv assert_eql climate, r.climate assert_equal 0, r.flags.size assert_equal 0, r.options.size assert_equal 0, r.values.size end def test_one_flag_with_block stdout = StringIO.new stderr = StringIO.new debug = false climate = LibCLImate::Climate.new do |cl| cl.add_flag('--debug', alias: '-d') { debug = true } cl.stdout = $stdout cl.stderr = $stderr end assert $stdout.equal? climate.stdout assert $stderr.equal? climate.stderr argv = [ '-d', ] r = climate.parse_and_verify argv assert_true debug assert_eql climate, r.climate assert_equal 1, r.flags.size assert_equal 0, r.options.size assert_equal 0, r.values.size flag0 = r.flags[0] assert_equal '-d', flag0.given_name assert_equal '--debug', flag0.name end def test_one_option_with_block stdout = StringIO.new stderr = StringIO.new verb = nil climate = LibCLImate::Climate.new do |cl| cl.add_option('--verbosity', alias: '-v') do |o, s| verb = o.value end cl.stdout = $stdout cl.stderr = $stderr end assert $stdout.equal? climate.stdout assert $stderr.equal? climate.stderr argv = [ '-v', 'chatty', ] r = climate.parse_and_verify argv assert_equal 'chatty', verb assert_eql climate, r.climate assert_equal 0, r.flags.size assert_equal 1, r.options.size assert_equal 0, r.values.size option0 = r.options[0] assert_equal '-v', option0.given_name assert_equal '--verbosity', option0.name end def test_one_required_flag_that_is_missing stdout = StringIO.new stderr = StringIO.new climate = LibCLImate::Climate.new do |cl| cl.add_option('--verbosity', alias: '-v', required: true) do |o, s| verb = o.value end cl.stdout = $stdout cl.stderr = $stderr end assert $stdout.equal? climate.stdout assert $stderr.equal? climate.stderr argv = [ ] assert_raise_with_message(MissingRequiredException, /.*verbosity.*not specified/) do climate.parse_and_verify argv, raise_on_required: MissingRequiredException end end end
18.06962
86
0.705429
03197947f099c577df70ca1f7dbe79723b234082
1,829
# # ==== Standalone <%= module_name %> configuration # # This configuration/environment file is only loaded by bin/slice, which can be # used during development of the slice. It has no effect on this slice being # loaded in a host application. To run your slice in standalone mode, just # run 'slice' from its directory. The 'slice' command is very similar to # the 'merb' command, and takes all the same options, including -i to drop # into an irb session for example. # # The usual Merb configuration directives and init.rb setup methods apply, # including use_orm and before_app_loads/after_app_loads. # # If you need need different configurations for different environments you can # even create the specific environment file in config/environments/ just like # in a regular Merb application. # # In fact, a slice is no different from a normal # Merb application - it only # differs by the fact that seamlessly integrates into a so called 'host' # application, which in turn can override or finetune the slice implementation # code and views. # Merb::Config.use do |c| # Sets up a custom session id key which is used for the session persistence # cookie name. If not specified, defaults to '_session_id'. # c[:session_id_key] = '_session_id' # The session_secret_key is only required for the cookie session store. c[:session_secret_key] = '<%= Digest::SHA1.hexdigest(rand(100000000000).to_s).to_s %>' # There are various options here, by default Merb comes with 'cookie', # 'memory', 'memcache' or 'container'. # You can of course use your favorite ORM instead: # 'datamapper', 'sequel' or 'activerecord'. c[:session_store] = 'cookie' # When running a slice standalone, you're usually developing it, # so enable template reloading by default. c[:reload_templates] = true end
42.534884
89
0.739748
036b17f22c27163b87abc072732fd4643aeeb481
1,893
class UsersController < ApplicationController before_action :logged_in_user, only: [:index, :edit, :update, :destroy, :following, :followers] before_action :correct_user, only: [:edit, :update] before_action :admin_user, only: :destroy def index @users = User.paginate(page: params[:page]) end def new @user = User.new end def show @user = User.find(params[:id]) @microposts = @user.microposts.paginate(page: params[:page]) end def create @user = User.new(user_params) if @user.save @user.send_activation_email flash[:info] = "Please check your email to activate your account." redirect_to root_url else render 'new' end end def edit @user = User.find(params[:id]) end def update @user = User.find(params[:id]) if @user.update(user_params) flash[:success] = "Profile updated" redirect_to @user else render 'edit' end end def destroy User.find(params[:id]).destroy flash[:success] = "User deleted" redirect_to users_url end def following @title = "Following" @user = User.find(params[:id]) @users = @user.following.paginate(page: params[:page]) render 'show_follow' end def followers @title = "Followers" @user = User.find(params[:id]) @users = @user.followers.paginate(page: params[:page]) render 'show_follow' end private def user_params params.require(:user).permit(:name, :email, :password, :password_confirmation) end # beforeフィルター # 正しいユーザーかどうかを確認 def correct_user @user = User.find(params[:id]) redirect_to(root_url) unless current_user?(@user) end # 管理者かどうかを確認 def admin_user redirect_to(root_url) unless current_user.admin? end end
22.270588
73
0.618595
e2442d7256289b180fd1d5d5d4dd96b2d26d517d
511
# frozen_string_literal: true # Be sure to restart your server when you modify this file. # Avoid CORS issues when API is called from the frontend app. # Handle Cross-Origin Resource Sharing (CORS) in order to accept cross-origin AJAX requests. # Read more: https://github.com/cyu/rack-cors Rails.application.config.middleware.insert_before 0, Rack::Cors do allow do origins '*' resource '*', headers: :any, methods: %i[get post put patch delete options head] end end
26.894737
92
0.704501
911140313040bcb95c0fd98923cf050822477297
1,194
# == Schema Information # # Table name: users # # id :bigint(8) not null, primary key # email :string default(""), not null # encrypted_password :string default(""), not null # full_name :string # name :string # password_digest :string # remember_created_at :datetime # reset_password_sent_at :datetime # reset_password_token :string # status :integer # created_at :datetime not null # updated_at :datetime not null # # Indexes # # index_users_on_reset_password_token (reset_password_token) UNIQUE # class User < ApplicationRecord # Include default devise modules. Others available are: # :confirmable, :lockable, :timeoutable, :validatable, :trackable, :rememberable, :registerable, :recoverable and :omniauthable # devise :database_authenticatable has_many :posts, dependent: :destroy has_many :weekly_apps, dependent: :destroy has_many :apps, dependent: :destroy has_many :sources, dependent: :destroy has_many :source_reports, dependent: :destroy has_secure_password end
33.166667
131
0.639866
bf916ab908fe367b6fbb530747ca72aa200d6e90
410
require 'rubygems' require 'gruff' require File.join(File.dirname(__FILE__), 'new_image.rb') g = Gruff::Bar.new g.title = "Bar Graph With Manual Colors" g.labels = { 0 => '5/6', 1 => '5/15', 2 => '5/24', 3 => '5/30', } g.data(:Art, [0, 5, 8, 15], '#990000') g.data(:Philosophy, [10, 3, 2, 8], '#009900') g.data(:Science, [2, 15, 8, 11], '#990099') g.minimum_value = 0 g.write("gruff_bar_2.jpg")
20.5
57
0.585366
792283f79b160a28d1cd975d213a5dacb1bd30c7
713
# # Copyright:: Copyright (c) 2016 Chef Software Inc. # License:: Apache License, Version 2.0 # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # module Habitat class Client VERSION = '0.6.0'.freeze end end
31
74
0.741935
910f4f33f3c54b56ae3314cbed22e83185bc52e9
18,672
=begin #NSX-T Manager API #VMware NSX-T Manager REST API OpenAPI spec version: 2.5.1.0.0 Generated by: https://github.com/swagger-api/swagger-codegen.git Swagger Codegen version: 2.4.7 =end require 'date' module NSXT # Registering a Service is the first step in the ServiceInsertion mechanism. A ServiceDefinition is used to create a service. class ServiceDefinition # Link to this resource attr_accessor :_self # The server will populate this field when returing the resource. Ignored on PUT and POST. attr_accessor :_links # Schema for this resource attr_accessor :_schema # The _revision property describes the current revision of the resource. To prevent clients from overwriting each other's changes, PUT operations must include the current _revision of the resource, which clients should obtain by issuing a GET operation. If the _revision provided in a PUT request is missing or stale, the operation will be rejected. attr_accessor :_revision # Indicates system owned resource attr_accessor :_system_owned # Defaults to ID if not set attr_accessor :display_name # Description of this resource attr_accessor :description # Opaque identifiers meaningful to the API user attr_accessor :tags # ID of the user who created this resource attr_accessor :_create_user # Protection status is one of the following: PROTECTED - the client who retrieved the entity is not allowed to modify it. NOT_PROTECTED - the client who retrieved the entity is allowed to modify it REQUIRE_OVERRIDE - the client who retrieved the entity is a super user and can modify it, but only when providing the request header X-Allow-Overwrite=true. UNKNOWN - the _protection field could not be determined for this entity. attr_accessor :_protection # Timestamp of resource creation attr_accessor :_create_time # Timestamp of last modification attr_accessor :_last_modified_time # ID of the user who last modified this resource attr_accessor :_last_modified_user # Unique identifier of this resource attr_accessor :id # The type of this resource. attr_accessor :resource_type # Service Deployment Specification defines takes in information required to deploy and configure a partner appliance/service-vm. attr_accessor :service_deployment_spec # Service capability. attr_accessor :service_capability # The capabilities provided by the services. Needs to be one or more of the following | NG_FW - Next Generation Firewall | IDS_IPS - Intrusion detection System / Intrusion Prevention System | NET_MON - Network Monitoring | HCX - Hybrid Cloud Exchange | BYOD - Bring Your Own Device | EPP - Endpoint Protection.(Third party AntiVirus partners using NXGI should use this functionality for the service) attr_accessor :functionalities # The point at which the service is deployed/attached for redirecting the traffic to the the partner appliance. Attachment Point is required if Service caters to any functionality other than EPP. attr_accessor :attachment_point # ID of the service manager to which this service is attached with. This field is not set during creation of service. This field will be set explicitly when Service Manager is created successfully using this service. attr_accessor :service_manager_id # Id which is unique to a vendor or partner for which the service is created. attr_accessor :vendor_id # Failure policy for the service tells datapath, the action to take i.e to Allow or Block traffic during failure scenarios. For north-south ServiceInsertion, failure policy in the service instance takes precedence. For east-west ServiceInsertion, failure policy in the service chain takes precedence. BLOCK is not supported for Endpoint protection (EPP) functionality. attr_accessor :on_failure_policy # Transport Type of the service, which is the mechanism of redirecting the traffic to the the partner appliance. Transport type is required if Service caters to any functionality other than EPP. attr_accessor :transports # This indicates the insertion point of the service i.e whether the service will be used to protect North-South or East-West traffic in the datacenter. attr_accessor :implementations class EnumAttributeValidator attr_reader :datatype attr_reader :allowable_values def initialize(datatype, allowable_values) @allowable_values = allowable_values.map do |value| case datatype.to_s when /Integer/i value.to_i when /Float/i value.to_f else value end end end def valid?(value) !value || allowable_values.include?(value) end end # Attribute mapping from ruby-style variable name to JSON key. def self.attribute_map { :'_self' => :'_self', :'_links' => :'_links', :'_schema' => :'_schema', :'_revision' => :'_revision', :'_system_owned' => :'_system_owned', :'display_name' => :'display_name', :'description' => :'description', :'tags' => :'tags', :'_create_user' => :'_create_user', :'_protection' => :'_protection', :'_create_time' => :'_create_time', :'_last_modified_time' => :'_last_modified_time', :'_last_modified_user' => :'_last_modified_user', :'id' => :'id', :'resource_type' => :'resource_type', :'service_deployment_spec' => :'service_deployment_spec', :'service_capability' => :'service_capability', :'functionalities' => :'functionalities', :'attachment_point' => :'attachment_point', :'service_manager_id' => :'service_manager_id', :'vendor_id' => :'vendor_id', :'on_failure_policy' => :'on_failure_policy', :'transports' => :'transports', :'implementations' => :'implementations' } end # Attribute type mapping. def self.swagger_types { :'_self' => :'SelfResourceLink', :'_links' => :'Array<ResourceLink>', :'_schema' => :'String', :'_revision' => :'Integer', :'_system_owned' => :'BOOLEAN', :'display_name' => :'String', :'description' => :'String', :'tags' => :'Array<Tag>', :'_create_user' => :'String', :'_protection' => :'String', :'_create_time' => :'Integer', :'_last_modified_time' => :'Integer', :'_last_modified_user' => :'String', :'id' => :'String', :'resource_type' => :'String', :'service_deployment_spec' => :'ServiceDeploymentSpec', :'service_capability' => :'ServiceCapability', :'functionalities' => :'Array<String>', :'attachment_point' => :'Array<String>', :'service_manager_id' => :'String', :'vendor_id' => :'String', :'on_failure_policy' => :'String', :'transports' => :'Array<String>', :'implementations' => :'Array<String>' } end # Initializes the object # @param [Hash] attributes Model attributes in the form of hash def initialize(attributes = {}) return unless attributes.is_a?(Hash) # convert string to symbol for hash key attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v } if attributes.has_key?(:'_self') self._self = attributes[:'_self'] end if attributes.has_key?(:'_links') if (value = attributes[:'_links']).is_a?(Array) self._links = value end end if attributes.has_key?(:'_schema') self._schema = attributes[:'_schema'] end if attributes.has_key?(:'_revision') self._revision = attributes[:'_revision'] end if attributes.has_key?(:'_system_owned') self._system_owned = attributes[:'_system_owned'] end if attributes.has_key?(:'display_name') self.display_name = attributes[:'display_name'] end if attributes.has_key?(:'description') self.description = attributes[:'description'] end if attributes.has_key?(:'tags') if (value = attributes[:'tags']).is_a?(Array) self.tags = value end end if attributes.has_key?(:'_create_user') self._create_user = attributes[:'_create_user'] end if attributes.has_key?(:'_protection') self._protection = attributes[:'_protection'] end if attributes.has_key?(:'_create_time') self._create_time = attributes[:'_create_time'] end if attributes.has_key?(:'_last_modified_time') self._last_modified_time = attributes[:'_last_modified_time'] end if attributes.has_key?(:'_last_modified_user') self._last_modified_user = attributes[:'_last_modified_user'] end if attributes.has_key?(:'id') self.id = attributes[:'id'] end if attributes.has_key?(:'resource_type') self.resource_type = attributes[:'resource_type'] end if attributes.has_key?(:'service_deployment_spec') self.service_deployment_spec = attributes[:'service_deployment_spec'] end if attributes.has_key?(:'service_capability') self.service_capability = attributes[:'service_capability'] end if attributes.has_key?(:'functionalities') if (value = attributes[:'functionalities']).is_a?(Array) self.functionalities = value end end if attributes.has_key?(:'attachment_point') if (value = attributes[:'attachment_point']).is_a?(Array) self.attachment_point = value end end if attributes.has_key?(:'service_manager_id') self.service_manager_id = attributes[:'service_manager_id'] end if attributes.has_key?(:'vendor_id') self.vendor_id = attributes[:'vendor_id'] end if attributes.has_key?(:'on_failure_policy') self.on_failure_policy = attributes[:'on_failure_policy'] else self.on_failure_policy = 'ALLOW' end if attributes.has_key?(:'transports') if (value = attributes[:'transports']).is_a?(Array) self.transports = value end end if attributes.has_key?(:'implementations') if (value = attributes[:'implementations']).is_a?(Array) self.implementations = value end end end # Show invalid properties with the reasons. Usually used together with valid? # @return Array for valid properties with the reasons def list_invalid_properties invalid_properties = Array.new if !@display_name.nil? && @display_name.to_s.length > 255 invalid_properties.push('invalid value for "display_name", the character length must be smaller than or equal to 255.') end if [email protected]? && @description.to_s.length > 1024 invalid_properties.push('invalid value for "description", the character length must be smaller than or equal to 1024.') end if @functionalities.nil? invalid_properties.push('invalid value for "functionalities", functionalities cannot be nil.') end if @vendor_id.nil? invalid_properties.push('invalid value for "vendor_id", vendor_id cannot be nil.') end if @implementations.nil? invalid_properties.push('invalid value for "implementations", implementations cannot be nil.') end invalid_properties end # Check to see if the all the properties in the model are valid # @return true if the model is valid def valid? return false if !@display_name.nil? && @display_name.to_s.length > 255 return false if [email protected]? && @description.to_s.length > 1024 return false if @functionalities.nil? return false if @vendor_id.nil? on_failure_policy_validator = EnumAttributeValidator.new('String', ['ALLOW', 'BLOCK']) return false unless on_failure_policy_validator.valid?(@on_failure_policy) return false if @implementations.nil? true end # Custom attribute writer method with validation # @param [Object] display_name Value to be assigned def display_name=(display_name) if !display_name.nil? && display_name.to_s.length > 255 fail ArgumentError, 'invalid value for "display_name", the character length must be smaller than or equal to 255.' end @display_name = display_name end # Custom attribute writer method with validation # @param [Object] description Value to be assigned def description=(description) if !description.nil? && description.to_s.length > 1024 fail ArgumentError, 'invalid value for "description", the character length must be smaller than or equal to 1024.' end @description = description end # Custom attribute writer method checking allowed values (enum). # @param [Object] on_failure_policy Object to be assigned def on_failure_policy=(on_failure_policy) validator = EnumAttributeValidator.new('String', ['ALLOW', 'BLOCK']) unless validator.valid?(on_failure_policy) fail ArgumentError, 'invalid value for "on_failure_policy", must be one of #{validator.allowable_values}.' end @on_failure_policy = on_failure_policy end # Checks equality by comparing each attribute. # @param [Object] Object to be compared def ==(o) return true if self.equal?(o) self.class == o.class && _self == o._self && _links == o._links && _schema == o._schema && _revision == o._revision && _system_owned == o._system_owned && display_name == o.display_name && description == o.description && tags == o.tags && _create_user == o._create_user && _protection == o._protection && _create_time == o._create_time && _last_modified_time == o._last_modified_time && _last_modified_user == o._last_modified_user && id == o.id && resource_type == o.resource_type && service_deployment_spec == o.service_deployment_spec && service_capability == o.service_capability && functionalities == o.functionalities && attachment_point == o.attachment_point && service_manager_id == o.service_manager_id && vendor_id == o.vendor_id && on_failure_policy == o.on_failure_policy && transports == o.transports && implementations == o.implementations end # @see the `==` method # @param [Object] Object to be compared def eql?(o) self == o end # Calculates hash code according to all attributes. # @return [Fixnum] Hash code def hash [_self, _links, _schema, _revision, _system_owned, display_name, description, tags, _create_user, _protection, _create_time, _last_modified_time, _last_modified_user, id, resource_type, service_deployment_spec, service_capability, functionalities, attachment_point, service_manager_id, vendor_id, on_failure_policy, transports, implementations].hash end # Builds the object from hash # @param [Hash] attributes Model attributes in the form of hash # @return [Object] Returns the model itself def build_from_hash(attributes) return nil unless attributes.is_a?(Hash) self.class.swagger_types.each_pair do |key, type| if type =~ /\AArray<(.*)>/i # check to ensure the input is an array given that the the attribute # is documented as an array but the input is not if attributes[self.class.attribute_map[key]].is_a?(Array) self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) }) end elsif !attributes[self.class.attribute_map[key]].nil? self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]])) end # or else data not found in attributes(hash), not an issue as the data can be optional end self end # Deserializes the data based on type # @param string type Data type # @param string value Value to be deserialized # @return [Object] Deserialized data def _deserialize(type, value) case type.to_sym when :DateTime DateTime.parse(value) when :Date Date.parse(value) when :String value.to_s when :Integer value.to_i when :Float value.to_f when :BOOLEAN if value.to_s =~ /\A(true|t|yes|y|1)\z/i true else false end when :Object # generic object (usually a Hash), return directly value when /\AArray<(?<inner_type>.+)>\z/ inner_type = Regexp.last_match[:inner_type] value.map { |v| _deserialize(inner_type, v) } when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/ k_type = Regexp.last_match[:k_type] v_type = Regexp.last_match[:v_type] {}.tap do |hash| value.each do |k, v| hash[_deserialize(k_type, k)] = _deserialize(v_type, v) end end else # model temp_model = NSXT.const_get(type).new temp_model.build_from_hash(value) end end # Returns the string representation of the object # @return [String] String presentation of the object def to_s to_hash.to_s end # to_body is an alias to to_hash (backward compatibility) # @return [Hash] Returns the object in the form of hash def to_body to_hash end # Returns the object in the form of hash # @return [Hash] Returns the object in the form of hash def to_hash hash = {} self.class.attribute_map.each_pair do |attr, param| value = self.send(attr) next if value.nil? hash[param] = _to_hash(value) end hash end # Outputs non-array value in the form of hash # For object, use to_hash. Otherwise, just return the value # @param [Object] value Any valid value # @return [Hash] Returns the value in the form of hash def _to_hash(value) if value.is_a?(Array) value.compact.map { |v| _to_hash(v) } elsif value.is_a?(Hash) {}.tap do |hash| value.each { |k, v| hash[k] = _to_hash(v) } end elsif value.respond_to? :to_hash value.to_hash else value end end end end
36.683694
508
0.65301
082b28bca36d4feec1fd34b7539987bee5f86344
413
require 'spec_helper' class Factorial include Patme::PatternMatching def calculate(n=0) 1 end def calculate(n) n * calculate(n-1) end end describe Factorial do subject{ described_class.new } it 'calculates factorial of 0 correctly' do expect( subject.calculate(0) ).to eq 1 end it 'calculates factorial of 5 correctly' do expect( subject.calculate(5) ).to eq 120 end end
15.884615
45
0.699758
87b08903fedc00cb0ab2a85f478bc676c722124b
762
module PageObjects module Pages module Assignments class AssignToTeamMemberPage < PageObjects::Pages::Base set_url '/cases/{case_id}/assignments/assign_to_team_member' section :primary_navigation, PageObjects::Sections::PrimaryNavigationSection, '.global-nav' section :page_heading, PageObjects::Sections::PageHeadingSection, '.page-heading' section :team_members, :xpath, '//fieldset[contains(.,"Choose a new team member for this case")]' do elements :users, 'label' end element :confirm_button, '.button' def choose_assignment_user(user) make_radio_button_choice "assignment_user_id_#{user.id}" end end end end end
29.307692
108
0.65748
61d1f08305f447981a9db27ebb489552611c0371
448
class MoveEditorAsString < ActiveRecord::Migration def self.up remove_column :users, :editor add_column :users, :editor, :string, :default => 'simple' unless $schema_generator users = User.find(:all) users.each do |user| user.editor = 'simple' user.save! end end end def self.down remove_column :users, :editor add_column :users, :editor, :integer, :default => 0 end end
21.333333
61
0.625
03c4637d2f76fc013eb57ddae14b275a81512ff6
320
# Functions for outputting various types of messages to the user. module Output def self.put_success(text) $stdout.puts "\e[0;32m" + text + "\e[0m" end def self.put_error(text) $stderr.puts "\e[0;31m" + text + "\e[0m" end def self.put_info(text) $stdout.puts "\e[0m" + text + "\e[0m" end end
18.823529
65
0.63125
ff8df851ba7e9d8c8bfee2d947bdd1967dbabd7d
709
require "arena/cache/adaptor/generic" module Arena module Cache module Adaptor class Rails < Arena::Cache::Adaptor::Generic def self.method_missing(method, *args, &block) # Brittle/ugly but works for the time being # key = args.present? ? "arena_#{method}_#{args.first}" : "arena_#{method}" cache = Rails.cache.read(key) if cache.nil? object = args.present? ? Arena.send(method, args.first) : Arena.send(method) Rails.cache.write(key, object, expires_in: Arena.expires_in) cache = Rails.cache.read(key) end cache end end end end end
26.259259
88
0.564175
2852a42e6f22617c54be9e5e34af9eee5f059936
3,396
#encoding: utf-8 require 'redcarpet_extensions' module ApplicationHelper include Recaptcha::ClientHelper def sitename if current_region.present? "hacken.in/#{current_region.main_slug}" else "hacken.in" end end def truncate_html(html, length=30, opts={}) HTML_Truncator.truncate(html, length, opts).html_safe end def html_unsafe_convert_markdown(markdown_text, without_follow=false) return "" if markdown_text.nil? if without_follow render_class = HTMLwithoutFollow else render_class = MdEmoji::Render end markdown_compiler = Redcarpet::Markdown.new(render_class.new filter_html: false, no_styles: true, safe_links_only: true, no_intra_emphasis: true) markdown_compiler.render(ActionController::Base.helpers.sanitize(markdown_text)) end def convert_markdown(markdown_text, without_follow=false) raw html_unsafe_convert_markdown(markdown_text, without_follow) end def collect_links(item) links = [] if item.url.present? links << { url: item.url, title: truncate(item.url, length: 40) } end if item.twitter.present? links << { url: "http://twitter.com/#{item.twitter}", title: "@#{item.twitter}" } end if item.twitter_hashtag.present? links << { url: "https://twitter.com/search/%23#{CGI.escape item.twitter_hashtag}", title: "##{item.twitter_hashtag}" } end links end def string_for_rule(rule) if rule.instance_of? IceCube::MonthlyRule string_for_monthly_rule(rule) elsif rule.instance_of? IceCube::WeeklyRule string_for_weekly_rule(rule) end end def string_for_monthly_rule(rule) if rule.validations_for(:day_of_week).first.occ == -1 occurrence = "letzten" else occurrence = "#{rule.validations_for(:day_of_week).first.occ}." end "An jedem #{occurrence} #{I18n.t("date.day_names")[rule.validations_for(:day_of_week).first.day]} des Monats" end def string_for_weekly_rule(rule) occurrence = "" if rule.validations_for(:interval).first.interval > 1 occurrence = "#{rule.validations_for(:interval).first.interval}." end "An jedem #{occurrence} #{I18n.t("date.day_names")[rule.validations_for(:day).first.day]}" end def avatar_for_user(user, size = 16, class_name = nil) if user.gravatar_email.present? gravatar_image_tag(user.gravatar_email, title: user.nickname, alt: user.nickname, class: class_name, gravatar: { default: :identicon, size: size }) elsif user.email.present? gravatar_image_tag(user.email, title: user.nickname, alt: user.nickname, class: class_name, gravatar: { default: :identicon, size: size }) else image_tag(user.image_url, width: size, title: user.nickname, alt: user.nickname, class: class_name) end end # ----------------------------------------------------------- # Devise methods (needed if a loginform is displayed on a page # other than the actual login page) # ----------------------------------------------------------- def resource_name :user end def resource @resource ||= User.new end def devise_mapping @devise_mapping ||= Devise.mappings[:user] end # ----------------------------------------------------------- # End of Devise methods # ----------------------------------------------------------- end
29.789474
153
0.644287
d58fbd2d5b85b6ce0e8df97736f4bfcc0126cca1
7,988
require 'errata' require 'remote_table' require 'upsert' class DataMiner class Step # A step that imports data from a remote source. # # Create these by calling +import+ inside a +data_miner+ block. # # @see DataMiner::ActiveRecordClassMethods#data_miner Overview of how to define data miner scripts inside of ActiveRecord models. # @see DataMiner::Script#import Creating an import step by calling DataMiner::Script#import from inside a data miner script # @see DataMiner::Attribute The Attribute class, which maps local columns and remote data fields from within an import step class Import < Step # The mappings of local columns to remote data source fields. # @return [Array<DataMiner::Attribute>] attr_reader :attributes # Description of what this step does. # @return [String] attr_reader :description # Max number of rows to import. # @return [Numeric] attr_reader :limit # Number from zero to one representing what percentage of rows to skip. Defaults to 0, of course :) # @return [Numeric] attr_reader :random_skip # @private attr_reader :listeners # @private def initialize(script, description, settings, &blk) settings = settings.stringify_keys if settings.has_key?('table') raise ::ArgumentError, %{[data_miner] :table is no longer an allowed setting.} end if (errata_settings = settings['errata']) and not errata_settings.is_a?(::Hash) raise ::ArgumentError, %{[data_miner] :errata must be a hash of initialization settings to Errata} end @script = script @attributes = ::ActiveSupport::OrderedHash.new @validate_query = !!settings['validate'] @description = description if settings.has_key? 'errata' errata_settings = settings['errata'].stringify_keys errata_settings['responder'] ||= model settings['errata'] = errata_settings end @table_settings = settings.dup @table_settings['streaming'] = true @table_mutex = ::Mutex.new @limit = settings.fetch 'limit', (1.0/0) @random_skip = settings['random_skip'] @listeners = [] instance_eval(&blk) end # Store data into a model column. # # @see DataMiner::Attribute The actual Attribute class. # # @param [String] attr_name The name of the local model column. # @param [optional, Hash] attr_options Options that will be passed to +DataMiner::Attribute.new+ # @option attr_options [*] anything Any option for +DataMiner::Attribute+. # # @return [nil] def store(attr_name, attr_options = {}, &blk) attr_name = attr_name.to_s if attributes.has_key? attr_name raise "You should only call store or key once for #{model.name}##{attr_name}" end attributes[attr_name] = DataMiner::Attribute.new self, attr_name, attr_options, &blk end # Store data into a model column AND use it as the key. # # @see DataMiner::Attribute The actual Attribute class. # # Enables idempotency. In other words, you can run the data miner script multiple times, get updated data, and not get duplicate rows. # # @param [String] attr_name The name of the local model column. # @param [optional, Hash] attr_options Options that will be passed to +DataMiner::Attribute.new+ # @option attr_options [*] anything Any option for +DataMiner::Attribute+. # # @return [nil] def key(attr_name, attr_options = {}) attr_name = attr_name.to_s if attributes.has_key? attr_name raise "You should only call store or key once for #{model.name}##{attr_name}" end @key = attr_name store attr_name, attr_options end # @private def start upsert_enabled? ? save_with_upsert : save_with_find_or_initialize refresh nil end # @private # Whether to run ActiveRecord validations. Slows things down because Upsert isn't used. def validate? @validate_query == true end def register(step) if step.target?(self) listeners << step end end private def upsert_enabled? (not validate?) and (storing_primary_key? or table_has_autoincrementing_primary_key?) end def count_every @count_every ||= ENV.fetch('DATA_MINER_COUNT_EVERY', -1).to_i end def save_with_upsert c = model.connection_pool.checkout attrs_except_key = attributes.except(@key).values count = 0 Upsert.stream(c, model.table_name) do |upsert| table.each do |row| next if random_skip and random_skip > Kernel.rand $stderr.puts "#{count}..." if count_every > 0 and count % count_every == 0 break if count > limit count += 1 selector = @key ? { @key => attributes[@key].read(row) } : { model.primary_key => nil } document = attrs_except_key.inject({}) do |memo, attr| attr.updates(row).each do |k, v| case memo[k] when ::Hash memo[k] = memo[k].merge v else memo[k] = v end end memo end upsert.row selector, document listeners.select! do |listener| listener.notify self, count end end end model.connection_pool.checkin c end def save_with_find_or_initialize count = 0 table.each do |row| next if random_skip and random_skip > Kernel.rand $stderr.puts "#{count}..." if count_every > 0 and count % count_every == 0 break if count > limit count += 1 record = @key ? model.send("find_or_initialize_by_#{@key}", attributes[@key].read(row)) : model.new attributes.each { |_, attr| attr.set_from_row record, row } record.save! listeners.select! do |listener| listener.notify self, count end end end def table_has_autoincrementing_primary_key? return @table_has_autoincrementing_primary_key_query if defined?(@table_has_autoincrementing_primary_key_query) c = model.connection_pool.checkout answer = if (pk = model.primary_key) and model.columns_hash[pk].type == :integer case c.adapter_name when /mysql/i extra = c.select_value %{SELECT EXTRA FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_SCHEMA = #{c.quote(c.current_database)} AND TABLE_NAME = #{c.quote(model.table_name)} AND COLUMN_NAME = #{c.quote(pk)}} extra.to_s.include?('auto_increment') when /postgres/i column_default = c.select_value %{SELECT COLUMN_DEFAULT FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_NAME = #{c.quote(model.table_name)} AND COLUMN_NAME = #{c.quote(pk)}} column_default.to_s.include?('nextval') when /sqlite/i # FIXME doesn't work # row = c.select_rows("PRAGMA table_info(#{model.quoted_table_name})").detect { |r| r[1] == pk } # row[2] == 'INTEGER' and row[3] == 1 and row[5] == 1 true end end model.connection_pool.checkin c @table_has_autoincrementing_primary_key_query = answer end def storing_primary_key? return @storing_primary_key_query if defined?(@storing_primary_key_query) @storing_primary_key_query = model.primary_key && attributes.has_key?(model.primary_key) end def table @table || @table_mutex.synchronize do @table ||= ::RemoteTable.new(@table_settings) end end def refresh @table = nil nil end end end end
37.327103
213
0.618553
f7ccad4c74c7e53f07bff4f9988229c496b43517
1,024
class Configuration < ActiveRecord::Base validates_presence_of :name class << self # This method returns the values of the config simulating a Hash, like: # Configuration[:foo] # It can also bring Arrays of keys, like: # Configuration[:foo, :bar] # ... so you can pass it to a method using *. # It is memoized, so it will be correctly cached. def [] *keys if keys.size == 1 get keys.shift else keys.map{|key| get key } end end def fetch(key) find_by!(name: key).value rescue ActiveRecord::RecordNotFound raise "No \"#{key}\" configuration defined." end def []= key, value set key, value end private def get key find_by(name: key).try(:value) end def set key, value begin find_by_name(key).update_attribute :value, value rescue create!(name: key, value: value) end Rails.cache.write("/configurations/#{key}", value) value end end end
22.755556
75
0.599609
38dbc2d3cf6689b6a3119cce1d50591d0105429c
281
# -*- encoding: us-ascii -*- class Exception def ==(other) other.kind_of?(Exception) && message == other.message && backtrace == other.backtrace end def to_s if @reason_message @reason_message.to_s else self.class.to_s end end end
15.611111
34
0.604982
e8d7798e295f1544dfed9a682ba14867ede887fd
40
module Ilovepdf VERSION = "1.2.0" end
10
19
0.675
1c14c2bd7a69851b3ca356d810a4875232d9cf44
3,785
shared_examples_for 'extending a given api template' do describe 'multiple times' do before(:each) do User.api_accessible :public do |t| t.add :first_name end User.api_accessible :for_buddies, extend: :public do |t| t.add :age end User.api_accessible :private, extend: :for_buddies do |t| t.add :last_name end end subject(:response) { @luke.as_api_response(:private) } it 'returns a hash' do expect(response).to be_kind_of(Hash) end it 'returns the correct number of fields' do expect(response).to have(3).keys end it 'returns all specified fields' do expect(response.keys.sort_by(&:to_s)).to eql([:age, :first_name, :last_name]) end it 'returns the correct values for the specified fields' do expect(response.values.sort_by(&:to_s)).to eql([@luke.age, @luke.first_name, @luke.last_name].sort_by(&:to_s)) end end describe 'and removing a former added value' do subject(:response) { @luke.as_api_response(:age_and_first_name) } it 'returns a hash' do expect(response).to be_kind_of(Hash) end it 'returns the correct number of fields' do expect(response).to have(2).keys end it 'returns all specified fields' do expect(response.keys.sort_by(&:to_s)).to eql([:first_name, :age].sort_by(&:to_s)) end it 'returns the correct values for the specified fields' do expect(response.values.sort_by(&:to_s)).to eql([@luke.first_name, @luke.age].sort_by(&:to_s)) end end describe 'extending two templates' do subject(:response) { @luke.as_api_response(:age_and_first_name_and_full_name) } it 'returns a hash' do expect(response).to be_kind_of(Hash) end it 'returns the correct number of fields' do expect(response).to have(3).keys end it 'returns all specified fields' do expect(response.keys.sort_by(&:to_s)).to eql([:first_name, :full_name, :age].sort_by(&:to_s)) end it 'returns the correct values for the specified fields' do expect(response.values.sort_by(&:to_s)).to eql([@luke.first_name, @luke.full_name, @luke.age].sort_by(&:to_s)) end end describe 'and inherit a field using another template name' do before(:each) do Task.acts_as_api Task.api_accessible :other_template do |t| t.add :description t.add :time_spent end User.api_accessible :extending_other_template, extend: :other_sub_template end subject(:response) { @luke.as_api_response(:extending_other_template) } it 'returns a hash' do expect(response).to be_kind_of(Hash) end it 'returns the correct number of fields' do expect(response).to have(2).keys end it 'returns all specified fields' do expect(response.keys).to include(:first_name) end it 'returns the correct values for the specified fields' do expect(response.values).to include(@luke.first_name) end it 'returns all specified fields' do expect(response.keys).to include(:tasks) end it 'returns the correct values for the specified fields' do expect(response[:tasks]).to be_an Array expect(response[:tasks].size).to eq(3) end it 'contains the associated child models with the determined api template' do response[:tasks].each do |task| expect(task.keys).to include(:description, :time_spent) expect(task.keys.size).to eq(2) end end it 'contains the correct data of the child models' do task_hash = [@destroy_deathstar, @study_with_yoda, @win_rebellion].collect { |t| { description: t.description, time_spent: t.time_spent } } expect(response[:tasks]).to eql task_hash end end end
30.039683
145
0.672919
6189fb689e0d9b30a9b26b523338460530f6dbd2
267
class CreateInvitations < ActiveRecord::Migration def self.up create_table :invitations do |t| t.integer :organization_id t.string :email t.string :token t.timestamps end end def self.down drop_table :invitations end end
16.6875
49
0.674157
e23d4deba024cb498399db021b305325c3fdccb4
4,006
require 'hubspot/utils' module Hubspot # # HubSpot Deals API # # {http://developers.hubspot.com/docs/methods/deals/deals_overview} # class Deal CREATE_DEAL_PATH = "/deals/v1/deal" DEAL_PATH = "/deals/v1/deal/:deal_id" RECENT_UPDATED_PATH = "/deals/v1/deal/recent/modified" UPDATE_DEAL_PATH = '/deals/v1/deal/:deal_id' ASSOCIATE_DEAL_PATH = '/deals/v1/deal/:deal_id/associations/:OBJECTTYPE?id=:objectId' ASSOCIATED_DEAL_PATH = "/deals/v1/deal/associated/:objectType/:objectId" attr_reader :properties attr_reader :portal_id attr_reader :deal_id attr_reader :company_ids attr_reader :vids def initialize(response_hash) @portal_id = response_hash["portalId"] @deal_id = response_hash["dealId"] @company_ids = response_hash["associations"]["associatedCompanyIds"] @vids = response_hash["associations"]["associatedVids"] @properties = Hubspot::Utils.properties_to_hash(response_hash["properties"]) end class << self def create!(portal_id, company_ids, vids, params={}) #TODO: clean following hash, Hubspot::Utils should do the trick associations_hash = {"portalId" => portal_id, "associations" => { "associatedCompanyIds" => company_ids, "associatedVids" => vids}} post_data = associations_hash.merge({ properties: Hubspot::Utils.hash_to_properties(params, key_name: "name") }) response = Hubspot::Connection.post_json(CREATE_DEAL_PATH, params: {}, body: post_data ) new(response) end # Associate a deal with a contact or company # {http://developers.hubspot.com/docs/methods/deals/associate_deal} # Usage # Hubspot::Deal.associate!(45146940, [], [52]) def associate!(deal_id, company_ids=[], vids=[]) objecttype = company_ids.any? ? 'COMPANY' : 'CONTACT' object_ids = (company_ids.any? ? company_ids : vids).join('&id=') Hubspot::Connection.put_json(ASSOCIATE_DEAL_PATH, params: { deal_id: deal_id, OBJECTTYPE: objecttype, objectId: object_ids}, body: {}) end def find(deal_id) response = Hubspot::Connection.get_json(DEAL_PATH, { deal_id: deal_id }) new(response) end # Find recent updated deals. # {http://developers.hubspot.com/docs/methods/deals/get_deals_modified} # @param count [Integer] the amount of deals to return. # @param offset [Integer] pages back through recent contacts. def recent(opts = {}) response = Hubspot::Connection.get_json(RECENT_UPDATED_PATH, opts) response['results'].map { |d| new(d) } end # Find all deals associated to a company # {http://developers.hubspot.com/docs/methods/deals/get-associated-deals} # @param company [Hubspot::Company] the company # @return [Array] Array of Hubspot::Deal records def find_by_company(company) path = ASSOCIATED_DEAL_PATH params = { objectType: :company, objectId: company.vid } response = Hubspot::Connection.get_json(path, params) response["results"].map { |deal_id| find(deal_id) } end end # Archives the contact in hubspot # {https://developers.hubspot.com/docs/methods/contacts/delete_contact} # @return [TrueClass] true def destroy! Hubspot::Connection.delete_json(DEAL_PATH, {deal_id: deal_id}) @destroyed = true end def destroyed? !!@destroyed end def [](property) @properties[property] end # Updates the properties of a deal # {https://developers.hubspot.com/docs/methods/deals/update_deal} # @param params [Hash] hash of properties to update # @return [Hubspot::Deal] self def update!(params) query = {"properties" => Hubspot::Utils.hash_to_properties(params.stringify_keys!, key_name: 'name')} Hubspot::Connection.put_json(UPDATE_DEAL_PATH, params: { deal_id: deal_id }, body: query) @properties.merge!(params) self end end end
37.439252
143
0.670245
ede8312e19e3b5d04d89f7044acb27e16c2aa2c5
5,815
=begin #convertapi #Convert API lets you effortlessly convert file formats and types. OpenAPI spec version: v1 Generated by: https://github.com/swagger-api/swagger-codegen.git Swagger Codegen version: 2.4.14 =end require 'date' module CloudmersiveConvertApiClient # Input to Create a new Spreadsheet request class CreateSpreadsheetFromDataRequest # Optional; The new Spreadsheet will have a default Worksheet in it; supply a name, or if left empty, will default to Worksheet1 attr_accessor :worksheet_name # Required; Rows and cells to populate the spreadsheet with attr_accessor :rows # Attribute mapping from ruby-style variable name to JSON key. def self.attribute_map { :'worksheet_name' => :'WorksheetName', :'rows' => :'Rows' } end # Attribute type mapping. def self.swagger_types { :'worksheet_name' => :'String', :'rows' => :'Array<XlsxSpreadsheetRow>' } end # Initializes the object # @param [Hash] attributes Model attributes in the form of hash def initialize(attributes = {}) return unless attributes.is_a?(Hash) # convert string to symbol for hash key attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v } if attributes.has_key?(:'WorksheetName') self.worksheet_name = attributes[:'WorksheetName'] end if attributes.has_key?(:'Rows') if (value = attributes[:'Rows']).is_a?(Array) self.rows = value end end end # Show invalid properties with the reasons. Usually used together with valid? # @return Array for valid properties with the reasons def list_invalid_properties invalid_properties = Array.new invalid_properties end # Check to see if the all the properties in the model are valid # @return true if the model is valid def valid? true end # Checks equality by comparing each attribute. # @param [Object] Object to be compared def ==(o) return true if self.equal?(o) self.class == o.class && worksheet_name == o.worksheet_name && rows == o.rows end # @see the `==` method # @param [Object] Object to be compared def eql?(o) self == o end # Calculates hash code according to all attributes. # @return [Fixnum] Hash code def hash [worksheet_name, rows].hash end # Builds the object from hash # @param [Hash] attributes Model attributes in the form of hash # @return [Object] Returns the model itself def build_from_hash(attributes) return nil unless attributes.is_a?(Hash) self.class.swagger_types.each_pair do |key, type| if type =~ /\AArray<(.*)>/i # check to ensure the input is an array given that the attribute # is documented as an array but the input is not if attributes[self.class.attribute_map[key]].is_a?(Array) self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) }) end elsif !attributes[self.class.attribute_map[key]].nil? self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]])) end # or else data not found in attributes(hash), not an issue as the data can be optional end self end # Deserializes the data based on type # @param string type Data type # @param string value Value to be deserialized # @return [Object] Deserialized data def _deserialize(type, value) case type.to_sym when :DateTime DateTime.parse(value) when :Date Date.parse(value) when :String value.to_s when :Integer value.to_i when :Float value.to_f when :BOOLEAN if value.to_s =~ /\A(true|t|yes|y|1)\z/i true else false end when :Object # generic object (usually a Hash), return directly value when /\AArray<(?<inner_type>.+)>\z/ inner_type = Regexp.last_match[:inner_type] value.map { |v| _deserialize(inner_type, v) } when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/ k_type = Regexp.last_match[:k_type] v_type = Regexp.last_match[:v_type] {}.tap do |hash| value.each do |k, v| hash[_deserialize(k_type, k)] = _deserialize(v_type, v) end end else # model temp_model = CloudmersiveConvertApiClient.const_get(type).new temp_model.build_from_hash(value) end end # Returns the string representation of the object # @return [String] String presentation of the object def to_s to_hash.to_s end # to_body is an alias to to_hash (backward compatibility) # @return [Hash] Returns the object in the form of hash def to_body to_hash end # Returns the object in the form of hash # @return [Hash] Returns the object in the form of hash def to_hash hash = {} self.class.attribute_map.each_pair do |attr, param| value = self.send(attr) next if value.nil? hash[param] = _to_hash(value) end hash end # Outputs non-array value in the form of hash # For object, use to_hash. Otherwise, just return the value # @param [Object] value Any valid value # @return [Hash] Returns the value in the form of hash def _to_hash(value) if value.is_a?(Array) value.compact.map { |v| _to_hash(v) } elsif value.is_a?(Hash) {}.tap do |hash| value.each { |k, v| hash[k] = _to_hash(v) } end elsif value.respond_to? :to_hash value.to_hash else value end end end end
29.221106
132
0.624592
284f4dc14db4d0881f9fee12466450ea1739efaf
1,833
# global_miles_airline_api # # This file was automatically generated by APIMATIC v2.0 # ( https://apimatic.io ). module GlobalMilesAirlineApi # A complex object for representing a minimal passenger information. class PassengerMin < BaseModel # First name of the passenger. Maximum length is 40 characters. # @return [String] attr_accessor :first_name # Last name of the passenger. Maximum length is 40 characters. # @return [String] attr_accessor :last_name # Mobile phone number of the passenger. It is a unique identifier for # passengers in the system. The format is E.164. # @return [String] attr_accessor :mobile # Email address of the passenger. # @return [String] attr_accessor :email # A mapping from model property names to API property names. def self.names @_hash = {} if @_hash.nil? @_hash['first_name'] = 'first_name' @_hash['last_name'] = 'last_name' @_hash['mobile'] = 'mobile' @_hash['email'] = 'email' @_hash end def initialize(first_name = nil, last_name = nil, mobile = nil, email = nil) @first_name = first_name @last_name = last_name @mobile = mobile @email = email end # Creates an instance of the object from a hash. def self.from_hash(hash) return nil unless hash # Extract variables from the hash. first_name = hash['first_name'] last_name = hash['last_name'] mobile = hash['mobile'] email = hash['email'] # Create object from extracted values. PassengerMin.new(first_name, last_name, mobile, email) end end end
28.640625
74
0.592471
7a58bc4c4ad26ca50df089004a5b83463abf4180
1,036
require "spec_helper" describe Spree::LoyaltyPointsDebitTransaction do let(:loyalty_points_debit_transaction) { build(:loyalty_points_debit_transaction) } it "is valid with valid attributes" do expect(loyalty_points_debit_transaction).to be_valid end describe 'update_user_balance' do it "should decrement user's loyalty_points_balance" do expect { loyalty_points_debit_transaction.send(:update_user_balance) }.to change{ loyalty_points_debit_transaction.user.loyalty_points_balance }.by(-loyalty_points_debit_transaction.loyalty_points) end end describe 'update_balance' do let(:user_balance) { 300 } before :each do allow(loyalty_points_debit_transaction.user).to receive(:loyalty_points_balance).and_return(user_balance) loyalty_points_debit_transaction.send(:update_balance) end it "should set balance" do expect(loyalty_points_debit_transaction.balance).to eq(user_balance - loyalty_points_debit_transaction.loyalty_points) end end end
28.777778
134
0.779923
b92c7b63816d6cc9b3a59704c2515874d1bda9fd
113
# frozen_string_literal: true class Paragraph < ActiveRecord::Base belongs_to :section has_paper_trail end
14.125
36
0.79646
2104477216a7793985238841f99ff03d4f5b99c8
353
module Universal module Concerns module Polymorphic extend ActiveSupport::Concern included do belongs_to :subject, polymorphic: true index subject_type: 1 index subject_id: 1 scope :for_subject, ->(subject){where(subject_type: subject.class.to_s, subject_id: subject.id)} end end end end
20.764706
104
0.665722
62762da36209f1ff942b21ac653e91bf65753575
611
class BasePresenter include ERB::Util attr_reader :object, :view_context def self.presenter(object) "#{object.class}Presenter".constantize end def self.presents(name) define_method(name) { object } end def initialize(object, view_context) @object = object @view_context = view_context end private def respond_to_missing?(method, _include_private = false) view_context.respond_to?(method) end def method_missing(method, *args, &block) if view_context.respond_to?(method) view_context.send(method, *args, &block) else super end end end
18.515152
59
0.702128
084402e2c9dbc9e6994227de1968780b72867876
866
# (C) Copyright 2017 Hewlett Packard Enterprise Development LP # # Licensed under the Apache License, Version 2.0 (the "License"); # You may not use this file except in compliance with the License. # You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR # CONDITIONS OF ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. require 'spec_helper' klass = OneviewSDK::API500::Synergy::Event RSpec.describe klass, integration: true, type: CREATE, sequence: seq(klass) do let(:current_client) { $client_500_synergy } include_examples 'EventCreateExample', 'integration api500 context' end
45.578947
85
0.779446
87197dbc61761672a01071217cd9695d427b4e1d
92
Pushover.configure do |config| config.token = Lobsters::Config[:pushover][:api_token] end
23
56
0.76087
030ef244af7b6c991bdf9fcf1335e89d1a319cf3
906
os_version = node[:platform_version].split('.')[0].to_i case node[:platform] when "centos", "redhat" if os_version >= 7 execute "timedatectl --no-ask-password set-timezone #{node[:tz]}" else template '/etc/sysconfig/clock' do source 'clock.erb' owner 'root' group 'root' mode 0644 end execute 'update' do command '/usr/sbin/tzdata-update' action :nothing only_if { ::File.executable?('/usr/sbin/tzdata-update') } end end when "amazon" if os_version >= 2020 execute "timedatectl --no-ask-password set-timezone #{node[:tz]}" else template '/etc/sysconfig/clock' do source 'clock.erb' owner 'root' group 'root' mode 0644 end script "update-tz" do interpreter "bash" user "root" code <<-"EOS" cp /usr/share/zoneinfo/#{node[:tz]} /etc/localtime EOS end end end
23.842105
69
0.60596
abda2a32e7ebb71d99198d79f103aec6743083f8
4,628
# # Cookbook:: syslog_ng # Spec:: filter_helpers_spec # # Copyright:: Ben Hughes <[email protected]> # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. require 'spec_helper' describe 'SyslogNg::Cookbook::FilterHelpers' do let(:dummy_class) { Class.new { include SyslogNg::Cookbook::FilterHelpers } } describe 'filter_builder' do context('given basic filter') do param = { 'facility' => 'kern', } it 'returns valid config string' do expect(dummy_class.new.filter_builder(param)).to be_a(String) expect(dummy_class.new.filter_builder(param)).to eql('facility(kern)') end end context('given complex filter') do param = { 'level' => 'info..emerg', 'and_not' => { 'container' => { 'operator' => 'or', 'facility' => %w(mail authpriv cron), }, }, } it 'returns valid config string' do expect(dummy_class.new.filter_builder(param)).to be_a(String) expect(dummy_class.new.filter_builder(param)).to eql('level(info..emerg) and not (facility(mail) or facility(authpriv) or facility(cron))') end end context('given inplicit _and_ filter') do param = { 'container' => { 'facility' => %w(mail authpriv cron), }, } it 'returns valid config string with _and_ present' do expect(dummy_class.new.filter_builder(param)).to be_a(String) expect(dummy_class.new.filter_builder(param)).to eql('(facility(mail) and facility(authpriv) and facility(cron))') end end context('given contained string') do param = { 'container' => { 'facility' => 'mail', }, } it 'returns valid config string with _and_ present' do expect(dummy_class.new.filter_builder(param)).to be_a(String) expect(dummy_class.new.filter_builder(param)).to eql('(facility(mail))') end end context('given multiple contained strings') do param = { 'container_outside' => { 'operator' => 'and', 'container_1' => { 'facility' => 'mail', }, 'container_2' => { 'facility' => 'cron', }, }, } it 'returns valid config string with _and_ present' do expect(dummy_class.new.filter_builder(param)).to be_a(String) expect(dummy_class.new.filter_builder(param)).to eql('((facility(mail)) and (facility(cron)))') end end context('given contained integer') do param = { 'container_outside' => { 'operator' => 'and', 'container_1' => { 'port' => 514, }, }, } it 'raises RuntimeError' do expect { dummy_class.new.filter_builder(param) }.to raise_exception(RuntimeError) end end context('given hash array key') do param = { 'facility' => %w(mail authpriv cron), } it 'returns valid config string' do expect(dummy_class.new.filter_builder(param)).to be_a(String) expect(dummy_class.new.filter_builder(param)).to eql('facility(mail) facility(authpriv) facility(cron)') end end context('given array') do param = [ 'facility(mail)', 'facility(authpriv)', 'facility(cron)', ] it 'returns valid config string' do expect(dummy_class.new.filter_builder(param)).to be_a(String) expect(dummy_class.new.filter_builder(param)).to eql('facility(mail) facility(authpriv) facility(cron)') end end context('given string') do param = 'level(emerg)' it 'returns valid config string' do expect(dummy_class.new.filter_builder(param)).to be_a(String) expect(dummy_class.new.filter_builder(param)).to eql('level(emerg)') end end context('invalid filter') do param = { 'invalidfilter': 'bollocks', } it 'raises ArgumentError' do expect { dummy_class.new.filter_builder(param) }.to raise_exception(ArgumentError) end end end end
29.666667
147
0.611711
bf12319fd0ba631f9702c76a1a058a79017ccb3c
11,178
class User < ActiveRecord::Base acts_as_authentic do |config| config.validates_uniqueness_of_email_field_options = {if: -> { false }} # Don't validate email uniqueness config.crypto_provider = Authlogic::CryptoProviders::Sha1 Authlogic::CryptoProviders::Sha1.join_token = '' Authlogic::CryptoProviders::Sha1.stretches = 1 end has_many :participants, class_name: 'Participant', foreign_key: 'user_id', dependent: :destroy has_many :assignment_participants, class_name: 'AssignmentParticipant', foreign_key: 'user_id', dependent: :destroy has_many :assignments, through: :participants has_many :bids, dependent: :destroy has_many :teams_users, dependent: :destroy has_many :teams, through: :teams_users has_many :sent_invitations, class_name: 'Invitation', foreign_key: 'from_id', dependent: :destroy has_many :received_invitations, class_name: 'Invitation', foreign_key: 'to_id', dependent: :destroy has_many :children, class_name: 'User', foreign_key: 'parent_id' belongs_to :parent, class_name: 'User' belongs_to :role validates_presence_of :name validates_uniqueness_of :name validates_presence_of :email, message: "can't be blank" validates_format_of :email, with: /\A[A-Z0-9._%+-]+@[A-Z0-9.-]+\.[A-Z]{2,4}\z/i, allow_blank: true before_validation :randomize_password, if: ->(user) { user.new_record? && user.password.blank? } # AuthLogic after_create :email_welcome scope :superadministrators, -> { where role_id: Role.superadministrator } scope :superadmins, -> { superadministrators } scope :administrators, -> { where role_id: Role.administrator } scope :admins, -> { administrators } scope :instructors, -> { where role_id: Role.instructor } scope :tas, -> { where role_id: Role.ta } scope :students, -> { where role_id: Role.student } has_paper_trail def salt_first? true end def list_mine(object_type, user_id) object_type.where(["instructor_id = ?", user_id]) end def get_available_users(name) lesser_roles = role.get_parents all_users = User.all(conditions: ['name LIKE ?', "#{name}%"], limit: 20) # higher limit, since we're filtering visible_users = all_users.select {|user| lesser_roles.include? user.role } visible_users[0, 10] # the first 10 end def can_impersonate?(user) return true if self.role.super_admin? return true if self.is_teaching_assistant_for?(user) return true if self.is_recursively_parent_of(user) false end def is_recursively_parent_of(user) p = user.parent return false if p.nil? return true if p == self return false if p.role.super_admin? self.is_recursively_parent_of(p) end def get_user_list user_list = [] # If the user is a super admin, fetch all users if self.role.super_admin? User.all.find_each do |user| user_list << user end end # If the user is an instructor, fetch all users in his course/assignment if self.role.instructor? participants = [] Course.where(instructor_id: self.id).find_each do |course| participants << course.get_participants end Assignment.where(instructor_id: self.id).find_each do |assignment| participants << assignment.participants end participants.each do |p_s| next if p_s.empty? p_s.each do |p| user_list << p.user if self.role.hasAllPrivilegesOf(p.user.role) end end end # If the user is a TA, fetch all users in his courses if self.role.ta? courses = Ta.get_mapped_courses(self.id) participants = [] courses.each do |course_id| course = Course.find(course_id) participants << course.get_participants end participants.each do |p_s| next if p_s.empty? p_s.each do |p| user_list << p.user if self.role.hasAllPrivilegesOf(p.user.role) end end end # Add the children to the list unless self.role.super_admin? User.all.find_each do |u| if is_recursively_parent_of(u) user_list << u unless user_list.include?(u) end end end user_list.uniq end def first_name fullname.try(:[], /,.+/).try(:[], /\w+/) || '' end def super_admin? role.name == 'Super-Administrator' end delegate :admin?, to: :role delegate :student?, to: :role def is_creator_of?(user) self == user.creator end # Function which has a MailerHelper which sends the mail welcome email to the user after signing up def email_welcome MailerHelper.send_mail_to_user(self, "Your Expertiza password has been created", "user_welcome", password) end def valid_password?(password) Authlogic::CryptoProviders::Sha1.stretches = 1 Authlogic::CryptoProviders::Sha1.matches?(crypted_password, *[self.password_salt.to_s + password]) end # Resets the password to be mailed to the user def reset_password randomize_password save password end def self.import(row, _row_header, session, _id = nil) if row.length != 3 raise ArgumentError, "Not enough items: expect 3 columns: your login name, your full name (first and last name, not seperated with the delimiter), and your email." end user = User.find_by_name(row[0]) if user.nil? attributes = ImportFileHelper.define_attributes(row) user = ImportFileHelper.create_new_user(attributes, session) password = user.reset_password # the password is reset MailerHelper.send_mail_to_user(user, "Your Expertiza account has been created.", "user_welcome", password).deliver else user.email = row[2].strip user.fullname = row[1].strip user.parent_id = (session[:user]).id user.save end end def self.yesorno(elt) if elt == true "yes" elsif elt == false "no" else "" end end # locate User based on provided login. # If user supplies e-mail or name, the # helper will try to find that User account. def self.find_by_login(login) user = User.find_by_email(login) if user.nil? items = login.split("@") shortName = items[0] userList = User.where ["name =?", shortName] user = userList.first if !userList.nil? && userList.length == 1 end user end def set_instructor(new_assign) new_assign.instructor_id = self.id end def get_instructor self.id end def instructor_id case role.name when 'Super-Administrator' then id when 'Administrator' then id when 'Instructor' then id when 'Teaching Assistant' then Ta.get_my_instructor(id) else raise NotImplementedError.new "for role #{role.name}" end end # generate a new RSA public/private key pair and create our own X509 digital certificate which we # save in the database. The private key is returned by the method but not saved. def generate_keys # check if we are replacing a digital certificate already generated replacing_key = true unless self.digital_certificate.nil? # generate the new key pair new_key = OpenSSL::PKey::RSA.generate(1024) self.public_key = new_key.public_key.to_pem save # when replacing an existing key, update any digital signatures made previously with the new key if replacing_key participants = AssignmentParticipant.where(user_id: self.id) for participant in participants if participant.permission_granted AssignmentParticipant.grant_publishing_rights(new_key.to_pem, [participant]) end end end # return the new private key new_key.to_pem end def initialize(attributes = nil) super(attributes) Authlogic::CryptoProviders::Sha1.stretches = 1 @email_on_review = true @email_on_submission = true @email_on_review_of_review = true @copy_of_emails = false end def self.export(csv, _parent_id, options) users = User.all users.each do |user| tcsv = [] if options["personal_details"] == "true" tcsv.push(user.name, user.fullname, user.email) end tcsv.push(user.role.name) if options["role"] == "true" tcsv.push(user.parent.name) if options["parent"] == "true" if options["email_options"] == "true" tcsv.push(user.email_on_submission, user.email_on_review, user.email_on_review_of_review, user.copy_of_emails) end tcsv.push(user.handle) if options["handle"] == "true" csv << tcsv end end def creator parent end def self.export_fields(options) fields = [] if options["personal_details"] == "true" fields.push("name", "full name", "email") end fields.push("role") if options["role"] == "true" fields.push("parent") if options["parent"] == "true" if options["email_options"] == "true" fields.push("email on submission", "email on review", "email on metareview") end fields.push("handle") if options["handle"] == "true" fields end def self.from_params(params) user = if params[:user_id] User.find(params[:user_id]) else User.find_by_name(params[:user][:name]) end if user.nil? newuser = url_for controller: 'users', action: 'new' raise "Please <a href='#{newuser}'>create an account</a> for this user to continue." end user end def is_teaching_assistant_for?(student) return false unless is_teaching_assistant? return false if student.role.name != 'Student' # We have to use the Ta object instead of User object # because single table inheritance is not currently functioning ta = Ta.find(id) return true if ta.courses_assisted_with.any? do |c| c.assignments.map(&:participants).flatten.map(&:user_id).include? student.id end end def is_teaching_assistant? return true if self.role.ta? end def self.search_users(role, user_id, letter, search_by) if search_by == '1' # search by user name search_filter = '%' + letter + '%' users = User.order('name').where("(role_id in (?) or id = ?) and name like ?", role.get_available_roles, user_id, search_filter) elsif search_by == '2' # search by full name search_filter = '%' + letter + '%' users = User.order('name').where("(role_id in (?) or id = ?) and fullname like ?", role.get_available_roles, user_id, search_filter) elsif search_by == '3' # search by email search_filter = '%' + letter + '%' users = User.order('name').where("(role_id in (?) or id = ?) and email like ?", role.get_available_roles, user_id, search_filter) else # default used when clicking on letters search_filter = letter + '%' users = User.order('name').where("(role_id in (?) or id = ?) and name like ?", role.get_available_roles, user_id, search_filter) end users end end
33.467066
170
0.656915
4a47e49867869c99c11e2d7a1f252468a9a98c2a
842
RSpec.describe HttpProxy do let(:machine) { double(:machine, vm_ip: '192.168.99.100', host_ip: '192.168.99.1') } let(:proxy) { described_class.new(machine, nil) } it 'defaults to `docker` as domain when no preference exists' do expect(proxy.dinghy_domain).to eq "docker" expect(proxy.resolver_file).to eq Pathname.new("/etc/resolver/docker") expect(proxy.send(:run_args)).to be_include "DOMAIN_TLD=docker" end it 'sets the domain to the one set in the preferences' do proxy.dinghy_domain = "dev" expect(proxy.dinghy_domain).to eq "dev" expect(proxy.resolver_file).to eq Pathname.new("/etc/resolver/dev") expect(proxy.send(:run_args)).to be_include "DOMAIN_TLD=dev" end it 'sets the host machine IP option' do expect(proxy.send(:run_args)).to be_include "HOSTMACHINE_IP=192.168.99.1" end end
38.272727
86
0.716152
f8b0e4ada79b0c62c59ed13f828886d9ce6b9409
408
Sequel.migration do change do drop_table(:rendered_templates_archives) create_table(:rendered_templates_archives) do primary_key :id foreign_key :instance_id, :instances, null: false String :blobstore_id, null: false String :sha1, null: false String :content_sha1, null: false Time :created_at, null: false index :created_at end end end
22.666667
55
0.676471
26bfb8dced623186d0e7be04cb2e3b846b44f3b1
1,148
# # Author:: Seth Chisamore (<[email protected]>) # Cookbook Name:: php-fpm # Recipe:: package # # Copyright 2011-2017, Chef Software, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # template node['php-fpm']['conf_file'] do source 'php-fpm.conf.erb' mode 00644 owner 'root' group 'root' notifies :restart, 'service[php-fpm]' end if node['php-fpm']['pools'] node['php-fpm']['pools'].each do |pool| if pool.is_a?(Array) pool_name = pool[0] pool = pool[1] else pool_name = pool[:name] end php_fpm_pool pool_name do pool.each do |k, v| params[k.to_sym] = v end end end end
26.090909
74
0.682056
f80f19368e807fe4ebb4e3636fe6b541b7636559
460
# Be sure to restart your server when you modify this file. require 'nucore' if NUCore::Database.oracle? ActiveRecord::ConnectionAdapters::OracleEnhancedAdapter.class_eval do self.emulate_integers_by_column_name = true self.default_sequence_start_value = 1 end # OCI8 does not realize that SafeBuffer is a type of string, so we need to # tell it what to do. OCI8::BindType::Mapping['ActiveSupport::SafeBuffer'] = OCI8::BindType::String end
32.857143
79
0.767391
ab812c1e33d151130b550058da0c045ce1fe0e29
88
# desc "Explaining what the task does" # task :meta_field do # # Task goes here # end
17.6
38
0.681818
6a7f0a558cf41215e3efdceb3f88cbb9f391bd44
1,126
# Generated by the protocol buffer compiler. DO NOT EDIT! # source: grafeas/v1/intoto_statement.proto require 'google/protobuf' require 'grafeas/v1/intoto_provenance_pb' require 'grafeas/v1/slsa_provenance_pb' Google::Protobuf::DescriptorPool.generated_pool.build do add_file("grafeas/v1/intoto_statement.proto", :syntax => :proto3) do add_message "grafeas.v1.InTotoStatement" do optional :type, :string, 1, json_name: "_type" repeated :subject, :message, 2, "grafeas.v1.Subject" optional :predicate_type, :string, 3 oneof :predicate do optional :provenance, :message, 4, "grafeas.v1.InTotoProvenance" optional :slsa_provenance, :message, 5, "grafeas.v1.SlsaProvenance" end end add_message "grafeas.v1.Subject" do optional :name, :string, 1 map :digest, :string, :string, 2 end end end module Grafeas module V1 InTotoStatement = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("grafeas.v1.InTotoStatement").msgclass Subject = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("grafeas.v1.Subject").msgclass end end
34.121212
117
0.726465
7a68a6af10331ff642ba4624c5ec735a7e0dfd0a
8,201
require 'rails_helper' module BenefitSponsors RSpec.describe Profiles::Employers::EmployerStaffRolesController, type: :controller, dbclean: :after_each do routes { BenefitSponsors::Engine.routes } let!(:security_question) { FactoryGirl.create_default :security_question } let(:staff_class) { BenefitSponsors::Organizations::OrganizationForms::StaffRoleForm } let(:site) { create(:benefit_sponsors_site, :with_benefit_market, :as_hbx_profile, :cca) } let(:benefit_market) { site.benefit_markets.first } let(:benefit_sponsor) { FactoryGirl.create(:benefit_sponsors_organizations_general_organization, :with_aca_shop_cca_employer_profile, site: site) } let(:new_benefit_sponsor) { FactoryGirl.create(:benefit_sponsors_organizations_general_organization, :with_aca_shop_cca_employer_profile, site: site) } let(:employer_profile) { benefit_sponsor.employer_profile } let!(:active_employer_staff_role) {FactoryGirl.build(:benefit_sponsor_employer_staff_role, aasm_state:'is_active', benefit_sponsor_employer_profile_id: employer_profile.id)} let!(:person) { FactoryGirl.create(:person, employer_staff_roles:[active_employer_staff_role]) } let!(:new_person_for_staff) { FactoryGirl.create(:person) } let(:applicant_employer_staff_role) {FactoryGirl.create(:benefit_sponsor_employer_staff_role, aasm_state:'is_applicant', benefit_sponsor_employer_profile_id: employer_profile.id)} let!(:applicant_person) { FactoryGirl.create(:person,employer_staff_roles:[applicant_employer_staff_role]) } let(:user) { FactoryGirl.create(:user, :person => person)} describe "GET new" do before do sign_in user xhr :get, :new end it "should render new template" do expect(response).to render_template("new") end it "should initialize staff" do expect(assigns(:staff).class).to eq staff_class end it "should return http success" do expect(response).to have_http_status(:success) end end describe "POST create", dbclean: :after_each do context "creating staff role with existing person params" do let!(:staff_params) { { :profile_id => employer_profile.id, :staff => {:first_name => new_person_for_staff.first_name, :last_name => new_person_for_staff.last_name, :dob => new_person_for_staff.dob} } } before :each do sign_in user post :create, staff_params end it "should initialize staff" do expect(assigns(:staff).class).to eq staff_class end it "should redirect" do expect(response).to have_http_status(:redirect) end it "should redirect to edit page of benefit_sponsor" do expect(response).to redirect_to(edit_profiles_registration_path(id:employer_profile.id)) expect(response.location.include?("edit")).to eq true end it "should get an notice" do expect(flash[:notice]).to match /Role added sucessfully/ end end context "creating staff role with non existing person params" do let!(:staff_params) { { :profile_id => employer_profile.id, :staff => {:first_name => "first_name", :last_name => 'last_name', :dob => "10/10/1989"} } } before :each do sign_in user post :create, staff_params end it "should redirect" do expect(response).to have_http_status(:redirect) end it "should redirect to edit page of benefit_sponsor" do expect(response).to redirect_to(edit_profiles_registration_path(id:employer_profile.id)) expect(response.location.include?("edit")).to eq true end it "should get an error" do expect(flash[:error]).to match /Role was not added because Person does not exist on the HBX Exchange/ end end end describe "GET approve", dbclean: :after_each do context "approve applicant staff role" do let!(:staff_params) { { :id => employer_profile.id, :person_id => applicant_person.id } } before :each do sign_in user get :approve, staff_params end it "should initialize staff" do expect(assigns(:staff).class).to eq staff_class end it "should redirect" do expect(response).to have_http_status(:redirect) end it "should redirect to edit page of benefit_sponsor" do expect(response).to redirect_to(edit_profiles_registration_path(id:employer_profile.id)) expect(response.location.include?("edit")).to eq true end it "should get an notice" do expect(flash[:notice]).to match /Role is approved/ end it "should update employer_staff_role aasm_state to is_active" do applicant_employer_staff_role.reload expect(applicant_employer_staff_role.aasm_state).to eq "is_active" end end context "approving invalid staff role" do let!(:staff_params) { { :id => employer_profile.id, :person_id => applicant_person.id } } before :each do sign_in user applicant_employer_staff_role.update_attributes(aasm_state:'is_closed') get :approve, staff_params end it "should redirect" do expect(response).to have_http_status(:redirect) end it "should redirect to edit page of benefit_sponsor" do expect(response).to redirect_to(edit_profiles_registration_path(id:employer_profile.id)) expect(response.location.include?("edit")).to eq true end it "should get an error" do expect(flash[:error]).to match /Please contact HBX Admin to report this error/ end end end describe "DELETE destroy", dbclean: :after_each do context "should deactivate staff role" do let!(:staff_params) { { :id => employer_profile.id, :person_id => applicant_person.id } } before :each do sign_in user delete :destroy, staff_params end it "should initialize staff" do expect(assigns(:staff).class).to eq staff_class end it "should redirect" do expect(response).to have_http_status(:redirect) end it "should redirect to edit page of benefit_sponsor" do expect(response).to redirect_to(edit_profiles_registration_path(id:employer_profile.id)) expect(response.location.include?("edit")).to eq true end it "should get an notice" do expect(flash[:notice]).to match /Staff role was deleted/ end it "should update employer_staff_role aasm_state to is_active" do applicant_employer_staff_role.reload expect(applicant_employer_staff_role.aasm_state).to eq "is_closed" end end context "should not deactivate only staff role of employer" do let!(:staff_params) { { :id => employer_profile.id, :person_id => person.id } } before :each do applicant_employer_staff_role.update_attributes(benefit_sponsor_employer_profile_id: new_benefit_sponsor.employer_profile.id) sign_in user delete :destroy, staff_params end it "should redirect" do expect(response).to have_http_status(:redirect) end it "should redirect to edit page of benefit_sponsor" do expect(response).to redirect_to(edit_profiles_registration_path(id:employer_profile.id)) expect(response.location.include?("edit")).to eq true end it "should get an error" do expect(flash[:error]).to match /Role was not deactivated because Please add another staff role before deleting this role/ end end end end end
33.202429
183
0.643214
bbe11c1d7ab98ff034a01801a53fe09aefe23ba1
23,665
require 'libxml' class Publication < ApplicationRecord include Seek::Rdf::RdfGeneration include PublicationsHelper alias_attribute :description, :abstract # searchable must come before acts_as_asset is called if Seek::Config.solr_enabled searchable(auto_index: false) do text :journal, :pubmed_id, :doi, :published_date, :human_disease_terms text :publication_authors do seek_authors.map(&:person).collect(&:name) end text :non_seek_authors do non_seek_authors.compact.map(&:first_name) | non_seek_authors.compact.map(&:last_name) end end end has_many :related_relationships, -> { where(predicate: Relationship::RELATED_TO_PUBLICATION) }, class_name: 'Relationship', as: :other_object, dependent: :destroy, inverse_of: :other_object has_many :data_files, through: :related_relationships, source: :subject, source_type: 'DataFile' has_many :models, through: :related_relationships, source: :subject, source_type: 'Model' has_many :assays, through: :related_relationships, source: :subject, source_type: 'Assay' has_many :studies, through: :related_relationships, source: :subject, source_type: 'Study' has_many :investigations, through: :related_relationships, source: :subject, source_type: 'Investigation' has_many :presentations, through: :related_relationships, source: :subject, source_type: 'Presentation' has_and_belongs_to_many :human_diseases has_filter :human_disease acts_as_asset validates :title, length: { maximum: 65_535 } has_many :publication_authors, dependent: :destroy, autosave: true has_many :people, through: :publication_authors belongs_to :publication_type VALID_DOI_REGEX = /\A10.\d{4,9}\/[<>\-._;()\/:A-Za-z0-9]+\z/ VALID_PUBMED_REGEX = /\A(([1-9])([0-9]{0,7}))\z/ # Note that the PubMed regex deliberately does not allow versions validates :doi, format: { with: VALID_DOI_REGEX, message: 'is invalid' }, allow_blank: true validates :pubmed_id, numericality: { greater_than: 0, message: 'is invalid' }, allow_blank: true validates :publication_type_id, presence: true, on: :create # validation differences between OpenSEEK and the VLN SEEK validates_uniqueness_of :pubmed_id, allow_nil: true, allow_blank: true, if: -> { Seek::Config.is_virtualliver } validates_uniqueness_of :doi, allow_nil: true, allow_blank: true, if: -> { Seek::Config.is_virtualliver } validates_uniqueness_of :title, if: -> { Seek::Config.is_virtualliver } validate :check_uniqueness_within_project, unless: -> { Seek::Config.is_virtualliver } attr_writer :refresh_policy before_save :refresh_policy, on: :update after_update :update_creators_from_publication_authors accepts_nested_attributes_for :publication_authors # http://bioruby.org/rdoc/Bio/Reference.html#method-i-format # key for the file-extension and format used in the route # value contains the format used by bioruby that name for the view and mimetype for the response EXPORT_TYPES = Hash.new { |_hash, key| raise("Export type #{key} is not supported") }.update( # http://filext.com/file-extension/ENW enw: { format: 'endnote', name: 'Endnote', mimetype: 'application/x-endnote-refer' }, # http://filext.com/file-extension/bibtex bibtex: { format: 'bibtex', name: 'BiBTeX', mimetype: 'application/x-bibtex' }, # (option available) # http://filext.com/file-extension/EMBL # ftp://ftp.embl.de/pub/databases/embl/doc/usrman.txt embl: { format: 'embl', name: 'EMBL', mimetype: 'chemical/x-embl-dl-nucleotide' } ).freeze def update_creators_from_publication_authors self.creators = seek_authors.map(&:person) end def publication_authors_attributes=(*args) self.refresh_policy = true super(*args) end def refresh_policy if @refresh_policy policy.permissions.clear populate_policy_from_authors(policy) policy.save self.refresh_policy = false end true end if Seek::Config.events_enabled has_and_belongs_to_many :events else def events [] end def event_ids [] end def event_ids=(events_ids); end end # Returns the columns to be shown on the table view for the resource def columns_default super + ['abstract','published_date','journal'] end def columns_allowed super + ['abstract','published_date','journal','last_used_at','doi','citation','deleted_contributor','registered_mode','booktitle','publisher','editor','url'] end def pubmed_uri "https://www.ncbi.nlm.nih.gov/pubmed/#{pubmed_id}" if pubmed_id end def doi_uri "https://doi.org/#{doi}" if doi end # Automatically extract the actual DOI if the user put in the full URL def doi=(doi) doi = doi.gsub(/(https?:\/\/)?(dx\.)?doi\.org\//,'') if doi super(doi) end def human_disease_terms human_diseases.collect(&:searchable_terms).flatten end def default_policy Policy.new(name: 'publication_policy', access_type: Policy::VISIBLE).tap do |policy| populate_policy_from_authors(policy) end end def seek_authors publication_authors.select(&:person) end def non_seek_authors publication_authors.where(person_id: nil) end def contributor_credited? false end def extract_metadata(pubmed_id, doi) reference = fetch_pubmed_or_doi_result(pubmed_id, doi) if reference.nil? || self.errors.any? return end if reference.respond_to?(:pubmed) result = extract_pubmed_metadata(reference) else result = extract_doi_metadata(reference) end reference.authors.each_with_index do |author, index| publication_authors.build(first_name: author.first_name, last_name: author.last_name, author_index: index) end return reference end # @param reference Bio::Reference # @see https://github.com/bioruby/bioruby/blob/master/lib/bio/reference.rb def extract_pubmed_metadata(reference) self.registered_mode = 1 self.title = reference.title.chomp # remove full stop self.abstract = reference.abstract self.journal = reference.journal self.pubmed_id = reference.pubmed self.published_date = reference.published_date self.citation = reference.citation #currently the metadata fetched by pubmed id doesn't contain the following items. # TODO self.publisher = nil self.booktitle = nil self.editor = nil end # @param doi_record DOI::Record # @see https://github.com/SysMO-DB/doi_query_tool/blob/master/lib/doi_record.rb def extract_doi_metadata(doi_record) self.registered_mode = 2 self.title = doi_record.title self.published_date = doi_record.date_published self.journal = doi_record.journal self.doi = doi_record.doi self.citation = doi_record.citation self.publisher = doi_record.publisher self.booktitle = doi_record.booktitle self.editor = doi_record.editors.map(&:name).join(" and ") end # @param bibtex_record BibTeX entity from bibtex-ruby gem def extract_bibtex_metadata(bibtex_record) self.registered_mode = 4 self.publication_type_id = PublicationType.get_publication_type_id(bibtex_record) self.title = bibtex_record[:title].try(:to_s).gsub /{|}/, '' unless bibtex_record[:title].nil? self.title = bibtex_record[:chapter].try(:to_s).gsub /{|}/, '' if (self.title.nil? && !bibtex_record[:chapter].nil?) self.title += ( ":"+ (bibtex_record[:subtitle].try(:to_s).gsub /{|}/, '')) unless bibtex_record[:subtitle].nil? if check_bibtex_file (bibtex_record) self.abstract = bibtex_record[:abstract].try(:to_s) self.journal = bibtex_record.journal.try(:to_s) month = bibtex_record[:month].try(:to_s) year = bibtex_record[:year].try(:to_s) self.published_date = Date.new(bibtex_record.year.try(:to_i) || 1, bibtex_record.month_numeric || 1, bibtex_record[:day].try(:to_i) || 1) self.published_date = nil if self.published_date.to_s == "0001-01-01" self.doi = bibtex_record[:doi].try(:to_s) self.pubmed_id = bibtex_record[:pubmed_id].try(:to_s) self.booktitle = bibtex_record[:booktitle].try(:to_s) self.publisher = bibtex_record[:publisher].try(:to_s) self.editor = bibtex_record[:editors].try(:to_s) self.url = parse_bibtex_url(bibtex_record).try(:to_s) unless bibtex_record[:author].nil? plain_authors = bibtex_record[:author].split(' and ') # by bibtex definition plain_authors.each_with_index do |author, index| # multiselect next if author.empty? last_name, first_name = author.split(', ') # by bibtex definition unless first_name.nil? first_name = first_name.try(:to_s).gsub /^{|}$/, '' end unless last_name.nil? last_name = last_name.try(:to_s).gsub /^{|}$/, '' end pa = PublicationAuthor.new(publication: self, first_name: first_name, last_name: last_name, author_index: index) publication_authors << pa end end unless bibtex_record[:editor].nil? && bibtex_record[:editors].nil? self.editor = bibtex_record[:editor].try(:to_s) || bibtex_record[:editors].try(:to_s) end # in some cases, e.g. proceedings, book, there are no authors but only editors if bibtex_record[:author].nil? && !self.editor.nil? plain_editors = self.editor.split(' and ') # by bibtex definition plain_editors.each_with_index do |editor, index| # multiselect next if editor.empty? last_name, first_name = editor.split(', ') # by bibtex definition unless first_name.nil? first_name = first_name.try(:to_s).gsub /^{|}$/, '' end unless last_name.nil? last_name = last_name.try(:to_s).gsub /^{|}$/, '' end pa = PublicationAuthor.new(publication: self, first_name: first_name, last_name: last_name, author_index: index) publication_authors << pa end end #using doi/pubmed_id to fetch the metadata result = fetch_pubmed_or_doi_result(self.pubmed_id, self.doi) if self.pubmed_id.present? || self.doi.present? unless result.nil? self.citation = result.citation unless result.citation.nil? if self.journal.nil? && !result.journal.nil? self.journal = result.journal end self.published_date = result.date_published unless result.date_published.nil? end if self.citation.nil? self.generate_citation(bibtex_record) end return true else return false end end # generating the citations for different types of publications by using the data from Bibtex file when no doi/pubmed_id def generate_citation(bibtex_record) self.citation = '' month = bibtex_record[:month].try(:to_s) year = bibtex_record[:year].try(:to_s) page_or_pages = (bibtex_record[:pages].try(:to_s).match?(/[^0-9]/) ? "pp." : "p." ) unless bibtex_record[:pages].nil? pages = bibtex_record[:pages].try(:to_s) volume = bibtex_record[:volume].try(:to_s) series = bibtex_record[:series].try(:to_s) number = bibtex_record[:number].try(:to_s) address = bibtex_record[:address].try(:to_s) school = bibtex_record[:school].try(:to_s) tutor = bibtex_record[:tutor].try(:to_s) tutorhits = bibtex_record[:tutorhits].try(:to_s) institution = bibtex_record[:institution].try(:to_s) type = bibtex_record[:type].try(:to_s) note = bibtex_record[:note].try(:to_s) archivePrefix = bibtex_record[:archiveprefix].try(:to_s) primaryClass = bibtex_record[:primaryclass].try(:to_s) eprint= bibtex_record[:eprint].try(:to_s) url = parse_bibtex_url(bibtex_record).try(:to_s) publication_type = PublicationType.find(self.publication_type_id) if publication_type.is_journal? self.citation += self.journal.nil? ? '':self.journal self.citation += volume.blank? ? '': ' '+volume self.citation += number.nil? ? '' : '('+ number+')' self.citation += pages.blank? ? '' : (':'+pages) =begin unless year.nil? self.citation += year.nil? ? '' : (' '+year) end =end elsif publication_type.is_booklet? self.citation += howpublished.blank? ? '': ''+ howpublished self.citation += address.nil? ? '' : (', '+ address) =begin unless year.nil? self.citation += year.nil? ? '' : (' '+year) end =end elsif publication_type.is_inbook? self.citation += self.booktitle.nil? ? '' : ('In '+ self.booktitle) self.citation += volume.blank? ? '' : (', volume '+ volume) self.citation += series.blank? ? '' : (' of '+series) self.citation += pages.blank? ? '' : (', '+ page_or_pages + ' '+pages) self.citation += self.editor.blank? ? '' : (', Eds: '+ self.editor) self.citation += self.publisher.blank? ? '' : (', '+ self.publisher) unless address.nil? || (self.booktitle.try(:include?, address)) self.citation += address.nil? ? '' : (', '+ address) end =begin unless self.booktitle.try(:include?, year) unless year.nil? self.citation += year.nil? ? '' : (' '+year) end end =end elsif publication_type.is_inproceedings? || publication_type.is_incollection? || publication_type.is_book? # InProceedings / InCollection self.citation += self.booktitle.nil? ? '' : ('In '+ self.booktitle) self.citation += volume.blank? ? '' : (', vol. '+ volume) self.citation += series.blank? ? '' : (' of '+series) self.citation += pages.blank? ? '' : (', '+ page_or_pages + ' '+pages) self.citation += self.editor.blank? ? '' : (', Eds: '+ self.editor) self.citation += self.publisher.blank? ? '' : (', '+ self.publisher) unless address.nil? || (self.booktitle.try(:include?, address)) self.citation += address.nil? ? '' : (', '+ address) end =begin unless self.booktitle.try(:include?, year) unless year.nil? self.citation += year.nil? ? '' : (', '+year) end end =end elsif publication_type.is_phd_thesis? || publication_type.is_masters_thesis? || publication_type.is_bachelor_thesis? #PhD/Master Thesis self.citation += school.nil? ? '' : (' '+ school) self.errors.add(:base,'A thesis need to have a school') if school.nil? self.citation += year.nil? ? '' : (', '+ year) self.citation += tutor.nil? ? '' : (', '+ tutor+'(Tutor)') self.citation += tutorhits.nil? ? '' : (', '+ tutorhits+'(HITS Tutor)') self.citation += url.nil? ? '' : (', '+ url) elsif publication_type.is_proceedings? # Proceedings are conference proceedings, it has no authors but editors # Book self.journal = self.title self.citation += volume.blank? ? '' : ('vol. '+ volume) self.citation += series.blank? ? '' : (' of '+series) self.citation += self.publisher.blank? ? '' : (', '+ self.publisher) =begin unless month.nil? && year.nil? self.citation += self.citation.blank? ? '' : ',' self.citation += month.nil? ? '' : (' '+ month.capitalize) self.citation += year.nil? ? '' : (' '+year) end =end elsif publication_type.is_tech_report? self.citation += institution.blank? ? ' ': institution self.citation += type.blank? ? ' ' : (', '+type) elsif publication_type.is_unpublished? self.citation += note.blank? ? ' ': note end if self.doi.blank? && self.citation.blank? self.citation += archivePrefix unless archivePrefix.nil? self.citation += (self.citation.blank? ? primaryClass : (','+primaryClass)) unless primaryClass.nil? self.citation += (self.citation.blank? ? eprint : (','+eprint)) unless eprint.nil? self.journal = self.citation if self.journal.blank? end if self.doi.blank? && self.citation.blank? self.citation += url.blank? ? '': url end self.citation = self.citation.try(:to_s).strip.gsub(/^,/,'').strip end def fetch_pubmed_or_doi_result(pubmed_id, doi) result = nil @error = nil if !pubmed_id.blank? begin result = Bio::MEDLINE.new(Bio::PubMed.efetch(pubmed_id).first).reference @error = result.error rescue => exception raise exception unless Rails.env.production? result ||= Bio::Reference.new({}) @error = 'There was a problem contacting the PubMed query service. Please try again later' Seek::Errors::ExceptionForwarder.send_notification(exception, data: {message: "Problem accessing ncbi using pubmed id #{pubmed_id}"}) end elsif !doi.blank? begin query = DOI::Query.new(Seek::Config.crossref_api_email) result = query.fetch(doi) @error = 'Unable to get result' if result.blank? @error = 'Unable to get DOI' if result.title.blank? rescue DOI::MalformedDOIException @error = 'The DOI you entered appears to be malformed.' rescue DOI::NotFoundException @error = 'The DOI you entered could not be resolved.' rescue RuntimeError => exception @error = 'There was an problem contacting the DOI query service. Please try again later' Seek::Errors::ExceptionForwarder.send_notification(exception, data: {message: "Problem accessing crossref using DOI #{doi}"}) end else @error = 'Please enter either a DOI or a PubMed ID for the publication.' end self.errors.add(:base, @error) unless @error.nil? result end def associate(item) clause = { subject_type: item.class.name, subject_id: item.id, predicate: Relationship::RELATED_TO_PUBLICATION, other_object_type: 'Publication', other_object_id: id } related_relationships.where(clause).first_or_create! end has_many :assay_data_files, through: :assays, source: :data_files # includes those related directly, or through an assay def related_data_files DataFile.where(id: related_data_file_ids) end def related_data_file_ids data_file_ids | assay_data_file_ids end has_many :assay_models, through: :assays, source: :models # includes those related directly, or through an assay def related_models Model.where(id: related_model_ids) end def related_model_ids model_ids | assay_model_ids end # indicates whether the publication has data files or models linked to it (either directly or via an assay) def has_assets? assets.none? end def assets data_files | models | presentations end has_many :assays_organisms, through: :assays, source: :organisms has_many :models_organisms, through: :models, source: :organism def related_organisms Organism.where(id: related_organism_ids) end def related_organism_ids assays_organism_ids | models_organism_ids end has_filter organism: Seek::Filtering::Filter.new( value_field: 'organisms.id', label_field: 'organisms.title', joins: [:assays_organisms, :models_organisms] ) # returns a list of related human diseases, related through either the assay or the model def related_human_diseases (assays.collect(&:human_diseases).flatten | models.collect(&:human_disease).flatten).uniq end def self.subscribers_are_notified_of?(action) action == 'create' end # export the publication as one of the available types: # http://bioruby.org/rdoc/Bio/Reference.html # @export_type a registered mime_type that is a valid key to EXPORT_TYPES def export(export_type) bio_reference.format(EXPORT_TYPES[export_type][:format]) end def publication_author_names publication_authors.map(&:full_name) end def has_doi? self.doi.present? end def latest_citable_resource self end private def populate_policy_from_authors(pol) # add managers (authors + contributor) (creators | seek_authors.map(&:person)).each do |author| pol.permissions.build(contributor: author, access_type: Policy::MANAGING) end # Add contributor c = contributor || default_contributor pol.permissions.build(contributor: c.person, access_type: Policy::MANAGING) if c pol.permissions end def pubmed_entry if pubmed_id Rails.cache.fetch("bio-reference-#{pubmed_id}") do entry = Bio::PubMed.efetch(pubmed_id).first raise "PubMed entry was nil" if entry.nil? entry end end end def bio_reference if pubmed_id Bio::MEDLINE.new(pubmed_entry).reference else # TODO: Bio::Reference supports a 'url' option. Should this be the URL on seek, or the URL of the 'View Publication' button, or neither? Bio::Reference.new({ title: title, journal: journal, abstract: abstract, authors: publication_authors.map { |e| e.person ? [e.person.last_name, e.person.first_name].join(', ') : [e.last_name, e.first_name].join(', ') }, year: published_date.try(:year) }.with_indifferent_access) end end def check_uniqueness_within_project { title: 'title', doi: 'DOI', pubmed_id: 'PubMed ID' }.each do |attr, name| next unless send(attr).present? existing = Publication.where(attr => send(attr)).to_a - [self] next unless existing.any? matching_projects = existing.collect(&:projects).flatten.uniq & projects if matching_projects.any? errors.add(:base, "You cannot register the same #{name} within the same project.") return false end end end def check_bibtex_file (bibtex_record) if self.title.blank? errors.add(:base, "Please check your bibtex files, each publication should contain a title or a chapter name.") return false end if (%w[InCollection InProceedings].include? self.publication_type.title) && (bibtex_record[:booktitle].blank?) errors.add(:base, "An #{self.publication_type.title} needs to have a booktitle.") return false end unless %w[Booklet Manual Misc Proceedings].include? self.publication_type.title if bibtex_record[:author].nil? && self.editor.nil? self.errors.add(:base, "You need at least one author or editor for the #{self.publication_type.title}.") return false end end if self.publication_type.is_phd_thesis? || self.publication_type.is_masters_thesis? || self.publication_type.is_bachelor_thesis? if bibtex_record[:school].try(:to_s).nil? self.errors.add(:base,"A #{self.publication_type.title} needs to have a school.") return false end end return true end def parse_bibtex_url(bibtex_record) pub_url=nil howpublished = bibtex_record[:howpublished].try(:to_s) note = bibtex_record[:note].try(:to_s) url = bibtex_record[:url].try(:to_s) biburl = bibtex_record[:biburl].try(:to_s) pub_url = url if url.try(:include?,'http') pub_url ||= howpublished if howpublished.try(:include?,'http') pub_url ||= note if note.try(:include?,'http') pub_url ||= biburl if biburl.try(:include?,'http') if (pub_url.try(:start_with?,'\url')) pub_url = pub_url.gsub('\url', '') end pub_url end # defines that this is a user_creatable object type, and appears in the "New Object" gadget def self.user_creatable? Seek::Config.publications_enabled end end
37.034429
173
0.666596
e88fc520455768cec2301a013bebb0ab293c164d
349
class RolloutStatusEntity < Grape::Entity include RequestAwareEntity expose :status, as: :status expose :instances, if: -> (rollout_status, _) { rollout_status.found? } expose :completion, if: -> (rollout_status, _) { rollout_status.found? } expose :complete?, as: :is_completed, if: -> (rollout_status, _) { rollout_status.found? } end
34.9
92
0.716332
39a0e67c8e4990709d1cf2ae3f995c916d5d8649
1,062
class Datomic < Formula desc "Database that simplifies by separating transactions, storage and queries" homepage "http://www.datomic.com/" url "https://my.datomic.com/downloads/free/0.9.5198" sha256 "1d42979079bd62c2b4df9fbc3c18cdc15d49ac04905eceab4c75720e23ed8fc7" version "0.9.5198" depends_on :java def install libexec.install Dir["*"] (bin/"datomic").write_env_script libexec/"bin/datomic", Language::Java.java_home_env %w[transactor repl repl-jline rest shell groovysh maven-install].each do |file| (bin/"datomic-#{file}").write_env_script libexec/"bin/#{file}", Language::Java.java_home_env end end def caveats <<-EOS.undent All commands have been installed with the prefix "datomic-". We agreed to the Datomic Free Edition License for you: http://www.datomic.com/datomic-free-edition-license.html If this is unacceptable you should uninstall. EOS end test do help = pipe_output("#{bin}/datomic-shell", "Shell.help();\n") assert_match(/^\* Basics/, help) end end
32.181818
98
0.710923
874bcacb75b81d670286a1d42def549a44823e43
1,184
require 'test_helper' class UserTest < ActiveSupport::TestCase def setup @user = User.new(name:"Example User", email:"[email protected]", password: "foobar", password_confirmation: "foobar") end test "should be valid" do assert @user.valid? end test "name should be present" do @user.name = "a" * 51 assert_not @user.valid? end test "email validation should reject invalid addresses" do invalid_addresses = %w[user@example,com user_at_foo.org user.name@example. foo@bar_baz.com foo@bar+baz.com] invalid_addresses.each do |invalid_address| @user.email = invalid_address assert_not @user.valid?, "#{invalid_address.inspect} should be invalid" end end test "email addresses should be unique" do duplicate_user = @user.dup duplicate_user.email = @user.email.upcase @user.save assert_not duplicate_user.valid? end test "password should be present (nonblank)" do @user.password = @user.password_confirmation = " " * 6 assert_not @user.valid? end test "password should have a minimum length" do @user.password = @user.password_confirmation = "a" * 5 assert_not @user.valid? end end
27.534884
120
0.701858
33ca26b7e8e04d15bbfef3bc2254e6801b4b39d2
283
require 'wordfor/core_ext/array' require 'wordfor/core_ext/dir' require 'wordfor/version' require 'wordfor/configuration' require 'wordfor/plaintext' require 'wordfor/lookup' module Wordfor def self.logger @logger ||= Logger.new(Configuration.log_file, 1, 1024000) end end
20.214286
62
0.773852
bf1409a25bd025e016516ba71005455e7d0f18f0
1,353
require File.expand_path(File.dirname(__FILE__) + '/spec_helper') describe "MongoMapper::Plugins::Sluggable" do describe "with SCI" do before do Animal = Class.new do include MongoMapper::Document key :name end Animal.collection.remove Dog = Class.new(Animal) end after do Object.send(:remove_const, :Animal) Object.send(:remove_const, :Dog) end describe "when defined in the base class" do before do Animal.instance_eval do plugin MongoMapper::Plugins::Sluggable sluggable :name end end it "should scope it to the base class" do animal = Animal.new(:name => "rover") animal.save! animal.slug.should == "rover" dog = Dog.new(:name => "rover") dog.save! dog.slug.should == "rover-1" end end describe "when defined on the subclass" do before do Dog.instance_eval do plugin MongoMapper::Plugins::Sluggable sluggable :name end end it "should scope it to the subclass" do animal = Animal.new(:name => "rover") animal.save! animal.should_not respond_to(:slug) dog = Dog.new(:name => "rover") dog.save! dog.slug.should == "rover" end end end end
22.55
65
0.579453
4a059dcc84f88200fdeab1e40f9a09f5905c6d4d
6,038
require 'spec_helper' module Spree class Promotion module Actions describe CreateItemAdjustments, :type => :model do let(:order) { create(:order) } let(:promotion) { create(:promotion) } let(:action) { CreateItemAdjustments.new } let!(:line_item) { create(:line_item, :order => order) } let(:payload) { { order: order, promotion: promotion } } before do allow(action).to receive(:promotion).and_return(promotion) promotion.promotion_actions = [action] end context "#perform" do # Regression test for #3966 context "when calculator computes 0" do before do allow(action).to receive_messages :compute_amount => 0 end it "does not create an adjustment when calculator returns 0" do action.perform(payload) expect(action.adjustments).to be_empty end end context "when calculator returns a non-zero value" do before do promotion.promotion_actions = [action] allow(action).to receive_messages :compute_amount => 10 end it "creates adjustment with item as adjustable" do action.perform(payload) expect(action.adjustments.count).to eq(1) expect(line_item.reload.adjustments).to eq(action.adjustments) end it "creates adjustment with self as source" do action.perform(payload) expect(line_item.reload.adjustments.first.source).to eq action end it "does not perform twice on the same item" do 2.times { action.perform(payload) } expect(action.adjustments.count).to eq(1) end context "with products rules" do let(:rule) { double Spree::Promotion::Rules::Product } before { promotion.stub(:eligible_rules) { [rule] } } context "when the rule is actionable" do before { rule.stub(:actionable?).and_return(true) } it "creates an adjustment" do expect { expect { action.perform(payload) }.to change { action.adjustments.count }.by(1) }.to change { line_item.adjustments.count }.by(1) expect(action.adjustments.last).to eq line_item.adjustments.last end end context "when the rule is not actionable" do before { rule.stub(:actionable?).and_return(false) } it "does not create an adjustment" do expect { expect { action.perform(payload) }.to_not change { action.adjustments.count } }.to_not change { line_item.adjustments.count } end end end context "when a promotion code is used" do let(:promotion_code) { create(:promotion_code) } let(:promotion) { promotion_code.promotion } let(:payload) { { order: order, promotion: promotion, promotion_code: promotion_code } } it "should connect the adjustment to the promotion_code" do expect { action.perform(payload) }.to change { line_item.adjustments.count }.by(1) expect(line_item.adjustments.last.promotion_code).to eq promotion_code end end end end context "#compute_amount" do before { promotion.promotion_actions = [action] } context "when the adjustable is actionable" do it "calls compute on the calculator" do expect(action.calculator).to receive(:compute).with(line_item) action.compute_amount(line_item) end context "calculator returns amount greater than item total" do before do expect(action.calculator).to receive(:compute).with(line_item).and_return(300) allow(line_item).to receive_messages(amount: 100) end it "does not exceed it" do expect(action.compute_amount(line_item)).to eql(-100) end end end context "when the adjustable is not actionable" do before { allow(promotion).to receive(:line_item_actionable?) { false } } it 'returns 0' do expect(action.compute_amount(line_item)).to eql(0) end end end context "#destroy" do let!(:action) { promotion.actions.first } let(:other_action) { other_promotion.actions.first } let(:promotion) { create(:promotion, :with_line_item_adjustment) } let(:other_promotion) { create(:promotion, :with_line_item_adjustment) } it "destroys adjustments for incompleted orders" do order = Order.create action.adjustments.create!(label: "Check", amount: 0, order: order, adjustable: order) expect { action.destroy }.to change { Adjustment.count }.by(-1) end it "nullifies adjustments for completed orders" do order = Order.create(completed_at: Time.now) adjustment = action.adjustments.create!(label: "Check", amount: 0, order: order, adjustable: order) expect { action.destroy }.to change { adjustment.reload.source_id }.from(action.id).to nil end it "doesnt mess with unrelated adjustments" do other_action.adjustments.create!(label: "Check", amount: 0, order: order, adjustable: order) expect { action.destroy }.not_to change { other_action.adjustments.count } end end end end end end
36.373494
111
0.56045
919c8dc8f79fd918b78ca63fa37afe6de6818169
4,540
# frozen_string_literal: true # This file was generated by the `rails generate rspec:install` command. Conventionally, all # specs live under a `spec` directory, which RSpec adds to the `$LOAD_PATH`. # The generated `.rspec` file contains `--require spec_helper` which will cause # this file to always be loaded, without a need to explicitly require it in any # files. # # Given that it is always loaded, you are encouraged to keep this file as # light-weight as possible. Requiring heavyweight dependencies from this file # will add to the boot time of your test suite on EVERY test run, even for an # individual file that may not need all of that loaded. Instead, consider making # a separate helper file that requires the additional dependencies and performs # the additional setup, and require it from the spec files that actually need # it. # # The `.rspec` file also contains a few flags that are not defaults but that # users commonly want. # # See http://rubydoc.info/gems/rspec-core/RSpec/Core/Configuration RSpec.configure do |config| # rspec-expectations config goes here. You can use an alternate # assertion/expectation library such as wrong or the stdlib/minitest # assertions if you prefer. config.expect_with :rspec do |expectations| # This option will default to `true` in RSpec 4. It makes the `description` # and `failure_message` of custom matchers include text for helper methods # defined using `chain`, e.g.: # be_bigger_than(2).and_smaller_than(4).description # # => "be bigger than 2 and smaller than 4" # ...rather than: # # => "be bigger than 2" expectations.include_chain_clauses_in_custom_matcher_descriptions = true end # rspec-mocks config goes here. You can use an alternate test double # library (such as bogus or mocha) by changing the `mock_with` option here. config.mock_with :rspec do |mocks| # Prevents you from mocking or stubbing a method that does not exist on # a real object. This is generally recommended, and will default to # `true` in RSpec 4. mocks.verify_partial_doubles = true end # The settings below are suggested to provide a good initial experience # with RSpec, but feel free to customize to your heart's content. # # These two settings work together to allow you to limit a spec run # # to individual examples or groups you care about by tagging them with # # `:focus` metadata. When nothing is tagged with `:focus`, all examples # # get run. # config.filter_run :focus # config.run_all_when_everything_filtered = true # # # Allows RSpec to persist some state between runs in order to support # # the `--only-failures` and `--next-failure` CLI options. We recommend # # you configure your source control system to ignore this file. # config.example_status_persistence_file_path = "spec/examples.txt" # # # Limits the available syntax to the non-monkey patched syntax that is # # recommended. For more details, see: # # - http://rspec.info/blog/2012/06/rspecs-new-expectation-syntax/ # # - http://www.teaisaweso.me/blog/2013/05/27/rspecs-new-message-expectation-syntax/ # # - http://rspec.info/blog/2014/05/notable-changes-in-rspec-3/#zero-monkey-patching-mode # config.disable_monkey_patching! # # # Many RSpec users commonly either run the entire suite or an individual # # file, and it's useful to allow more verbose output when running an # # individual spec file. # if config.files_to_run.one? # # Use the documentation formatter for detailed output, # # unless a formatter has already been configured # # (e.g. via a command-line flag). # config.default_formatter = 'doc' # end # # # Print the 10 slowest examples and example groups at the # # end of the spec run, to help surface which specs are running # # particularly slow. # config.profile_examples = 10 # # # Run specs in random order to surface order dependencies. If you find an # # order dependency and want to debug it, you can fix the order by providing # # the seed, which is printed after each run. # # --seed 1234 # config.order = :random # # # Seed global randomization in this process using the `--seed` CLI option. # # Setting this allows you to use `--seed` to deterministically reproduce # # test failures related to randomization by passing the same `--seed` value # # as the one that triggered the failure. # Kernel.srand config.seed end
49.347826
96
0.713877
aba0c63d61aba8f492454448328c75238f231b6b
824
class Dir # visit dir, then all files in dir, then walk_dir each directory in dir def self.walk(dir, &block) yield dir all = Dir.entries(dir) partition = all.partition{|e| File.file?("#{dir}/#{e}")} files = partition[0] dirs = partition[1] files.each{|f| yield "#{dir}/#{f}" unless f.start_with?(".")} dirs.each{|d| walk("#{dir}/#{d}", &block) unless d.start_with?(".")} end def self.print(dir) puts "/#{dir}" Dir.walk(dir) {|full_path| path = full_path.gsub(Regexp.new("#{dir}\/?"), '') next if path.empty? prefix = "|" path.scan(/\//).size.times do prefix += " |" end path.gsub!(/^.*\//, '') puts "#{prefix}-- #{path}" } end def self.within_dir?(dir1, dir2) (dir1.split('/') - dir2.split('/')).empty? end end
24.969697
73
0.540049
7acfb8c19af8aa9b66876bae9c6b92df0b52fcb4
645
# frozen_string_literal: true require 'spec_helper' RSpec.describe RawUsageData do describe 'validations' do it { is_expected.to validate_presence_of(:payload) } it { is_expected.to validate_presence_of(:recorded_at) } context 'uniqueness validation' do let!(:existing_record) { create(:raw_usage_data) } it { is_expected.to validate_uniqueness_of(:recorded_at) } end describe '#update_sent_at!' do let(:raw_usage_data) { create(:raw_usage_data) } it 'updates sent_at' do raw_usage_data.update_sent_at! expect(raw_usage_data.sent_at).not_to be_nil end end end end
23.888889
64
0.705426
d5ed2355e43a67afac101feff6a137ebe21ec4be
832
require "helper" class SomeRandomModel < ActiveRecord::Base; end class TestActiveSupportSpecType < Minitest::Test def assert_support actual assert_equal ActiveSupport::TestCase, actual end def refute_support actual refute_equal ActiveSupport::TestCase, actual end def test_spec_type_resolves_for_actitive_record_constants assert_support Minitest::Spec.spec_type(SomeRandomModel) end def test_spec_type_doesnt_resolve_random_strings refute_support Minitest::Spec.spec_type("Unmatched String") end def test_spec_type_resolves_for_additional_desc_model assert_support Minitest::Spec.spec_type("Unmatched String", :model) assert_support Minitest::Spec.spec_type("Unmatched String", :model, :other) assert_support Minitest::Spec.spec_type("Unmatched String", :other, :model) end end
29.714286
79
0.800481
3368122d88af7c7f294f142025b4633006548d35
190
Rails.configuration.stripe = { :publishable_key => ENV['STRIPE_TEST_PUBLISHABLE_KEY'], :secret_key => ENV['STRIPE_TEST_SECRET_KEY'] } Stripe.api_key = Rails.configuration.stripe[:secret_key]
38
56
0.794737
f7b3215c736f5d35f11c0246c29a66d39f4ca2c8
4,477
# frozen_string_literal: true # == Schema Information # # Table name: campaigns # # id :bigint not null, primary key # name :string not null # created_at :datetime not null # updated_at :datetime not null # start_date :date not null # end_date :date not null # objective :string # campaign_type :string not null # product :string # manager_id :bigint # company_id :integer # company_type :string # require 'test_helper' class CampaignTest < ActiveSupport::TestCase setup do @image = Rack::Test::UploadedFile.new('test/fixtures/files/example.jpg', 'image/jpg') end test 'campaign is valid with all attributes' do vans = Campaign.new(name: 'Vans campaign', start_date: Time.now, end_date: Time.now - 1.day, objective: 'Increase number of clients', campaign_type: 'Positioning Positioning', product: 'iPhone 11', manager: users(:manager), image: @image, company: Company.first) assert vans.valid? end test 'campaign is not valid with no image' do vans = Campaign.new(name: 'Vans', start_date: Time.now, end_date: Time.now - 1.day, objective: 'Posicionamiento', campaign_type: 1, product: 'iPhone 11', manager: users(:manager)) refute vans.valid? end test 'campaign is not valid with no name' do vans = Campaign.new(start_date: Time.now, end_date: Time.now - 1.day, objective: 'Posicionamiento', campaign_type: 1, product: 'iPhone 11', manager: users(:manager), image: @image) refute vans.valid? end test 'campaign is not valid with not start_date' do vans = Campaign.new(name: 'Vans', end_date: Time.now - 1.day, objective: 'Posicionamiento', campaign_type: 1, product: 'iPhone 11', manager: users(:manager), image: @image) refute vans.valid? end test 'campaign is not valid with no end_date' do vans = Campaign.new(name: 'Vans', start_date: Time.now, objective: 'Posicionamiento', campaign_type: 1, product: 'iPhone 11', manager: users(:manager), image: @image) refute vans.valid? end test 'campaign is not valid with no objective' do vans = Campaign.new(name: 'Vans', start_date: Time.now, end_date: Time.now - 1.day, campaign_type: 1, product: 'iPhone 11', manager: users(:manager), image: @image) refute vans.valid? end test 'campaign is not valid with no campaign_type' do vans = Campaign.new(name: 'Vans', start_date: Time.now, end_date: Time.now - 1.day, objective: 'Posicionamiento', product: 'iPhone 11', manager: users(:manager), image: @image) refute vans.valid? end test 'campaign is not valid with no product' do vans = Campaign.new(name: 'Vans', start_date: Time.now, end_date: Time.now - 1.day, objective: 'Posicionamiento', campaign_type: 1, manager: users(:manager), image: @image) refute vans.valid? end test 'campaign is not valid with no manager' do vans = Campaign.new(name: 'Vans', start_date: Time.now, end_date: Time.now - 1.day, objective: 'Posicionamiento', campaign_type: 1, product: 'iPhone 11', image: @image) refute vans.valid? end end
32.442029
89
0.476212
79d9d2427e5282dc1fc073b433bf901c241f418f
1,055
# encoding: UTF-8 # This file contains data derived from the IANA Time Zone Database # (https://www.iana.org/time-zones). module TZInfo module Data module Definitions module Pacific module Majuro include TimezoneDefinition timezone 'Pacific/Majuro' do |tz| tz.offset :o0, 41088, 0, :LMT tz.offset :o1, 39600, 0, :'+11' tz.offset :o2, 32400, 0, :'+09' tz.offset :o3, 36000, 0, :'+10' tz.offset :o4, 43200, 0, :'+12' tz.transition 1900, 12, :o1, -2177493888, 1086923261, 450 tz.transition 1914, 9, :o2, -1743678000, 58089745, 24 tz.transition 1919, 1, :o1, -1606813200, 19375921, 8 tz.transition 1936, 12, :o3, -1041418800, 58284817, 24 tz.transition 1941, 3, :o2, -907408800, 29161021, 12 tz.transition 1944, 1, :o1, -818067600, 19448953, 8 tz.transition 1969, 9, :o4, -7988400, 58571881, 24 end end end end end end
31.969697
69
0.553555
38de92f5f54aa0f5bc220f3f4722d30e756cdbc7
664
class Vote < ApplicationRecord belongs_to :user belongs_to :article validates_presence_of :user, :article validate :vote_is_unique, on: :create def vote_is_unique vote = Vote.where(article_id: self[:article_id], user_id: self[:user_id]) errors.add(:vote_is_unique, 'you can only vote once') unless vote.empty? end def self.count_by_article group(:article_id).order('count_all desc').count end def self.count_by_category joins('INNER JOIN articles ON articles.id = votes.article_id') .joins('INNER JOIN categories ON categories.id=articles.category_id') .group(:category_id).order('count_all desc').count end end
28.869565
77
0.736446
87db2f06d02c190b6e6c759e6c6a09d9a6ef5489
324
# frozen_string_literal: true require 'yaml' module Fakerjp class Helper class << self def fetcher(module_name, method_name) data = YAML.load_file("#{File.dirname(__FILE__)}/data/#{module_name}.yml")[method_name] index = rand(0..(data.length-1)) data[index] end end end end
20.25
95
0.641975
7ae9d5a141fb3b6f7293788250a96cd335209b5f
641
require 'rails_helper' describe 'Tests mailer' do it 'should send email to required email address with proper content ' do # Send the email, then test that it got queued email = Mailer.sync_message( to: '[email protected]', subject: "Test", body: { obj_name: 'assignment', type: 'submission', location: '1', first_name: 'User', partial_name: 'update' } ).deliver_now expect(email.from[0]).to eq("[email protected]") expect(email.to[0]).to eq('[email protected]') expect(email.subject).to eq('Test') end end
27.869565
74
0.614665
ff3f1d0072b03dbc81b715693a610dc8b89ca0fd
10,376
require 'spec_helper' describe Spree::Preferences::Preferable, type: :model do before :all do class A include Spree::Preferences::Preferable attr_reader :id def initialize @id = rand(999) end def preferences @preferences ||= default_preferences end preference :color, :string, default: 'green', deprecated: 'Please use colour instead' end class B < A preference :flavor, :string end end before do @a = A.new allow(@a).to receive_messages(persisted?: true) @b = B.new allow(@b).to receive_messages(persisted?: true) # ensure we're persisting as that is the default # store = Spree::Preferences::Store.instance store.persistence = true end describe 'preference definitions' do it 'parent should not see child definitions' do expect(@a.has_preference?(:color)).to be true expect(@a.has_preference?(:flavor)).not_to be true end it 'child should have parent and own definitions' do expect(@b.has_preference?(:color)).to be true expect(@b.has_preference?(:flavor)).to be true end it 'instances have defaults' do expect(@a.preferred_color).to eq 'green' expect(@b.preferred_color).to eq 'green' expect(@b.preferred_flavor).to be_nil end it 'can be asked if it has a preference definition' do expect(@a.has_preference?(:color)).to be true expect(@a.has_preference?(:bad)).to be false end it 'can be asked and raises' do expect do @a.has_preference! :flavor end.to raise_error(NoMethodError, 'flavor preference not defined') end it 'has a type' do expect(@a.preferred_color_type).to eq :string expect(@a.preference_type(:color)).to eq :string end it 'has a default' do expect(@a.preferred_color_default).to eq 'green' expect(@a.preference_default(:color)).to eq 'green' end it 'can have a deprecation message' do expect(@a.preferred_color_deprecated).to eq 'Please use colour instead' expect(@a.preference_deprecated(:color)).to eq 'Please use colour instead' end it 'raises if not defined' do expect do @a.get_preference :flavor end.to raise_error(NoMethodError, 'flavor preference not defined') end end describe 'preference access' do it 'handles ghost methods for preferences' do @a.preferred_color = 'blue' expect(@a.preferred_color).to eq 'blue' end it 'parent and child instances have their own prefs' do @a.preferred_color = 'red' @b.preferred_color = 'blue' expect(@a.preferred_color).to eq 'red' expect(@b.preferred_color).to eq 'blue' end it 'raises when preference not defined' do expect do @a.set_preference(:bad, :bone) end.to raise_exception(NoMethodError, 'bad preference not defined') end it 'builds a hash of preferences' do @b.preferred_flavor = :strawberry expect(@b.preferences[:flavor]).to eq 'strawberry' expect(@b.preferences[:color]).to eq 'green' # default from A end it 'builds a hash of preference defaults' do expect(@b.default_preferences).to eq(flavor: nil, color: 'green') end it 'builds a array of deprecated preferences' do expect(@b.deprecated_preferences).to eq([{ name: :color, message: 'Please use colour instead' }]) end context 'converts integer preferences to integer values' do before do A.preference :is_integer, :integer end it 'with strings' do @a.set_preference(:is_integer, '3') expect(@a.preferences[:is_integer]).to eq(3) @a.set_preference(:is_integer, '') expect(@a.preferences[:is_integer]).to eq(0) end end context 'converts decimal preferences to BigDecimal values' do before do A.preference :if_decimal, :decimal end it 'returns a BigDecimal' do @a.set_preference(:if_decimal, 3.3) expect(@a.preferences[:if_decimal].class).to eq(BigDecimal) end it 'with strings' do @a.set_preference(:if_decimal, '3.3') expect(@a.preferences[:if_decimal]).to eq(3.3) @a.set_preference(:if_decimal, '') expect(@a.preferences[:if_decimal]).to eq(0.0) end end context 'converts boolean preferences to boolean values' do before do A.preference :is_boolean, :boolean, default: true end it 'with strings' do @a.set_preference(:is_boolean, '0') expect(@a.preferences[:is_boolean]).to be false @a.set_preference(:is_boolean, 'f') expect(@a.preferences[:is_boolean]).to be false @a.set_preference(:is_boolean, 't') expect(@a.preferences[:is_boolean]).to be true end it 'with integers' do @a.set_preference(:is_boolean, 0) expect(@a.preferences[:is_boolean]).to be false @a.set_preference(:is_boolean, 1) expect(@a.preferences[:is_boolean]).to be true end it 'with an empty string' do @a.set_preference(:is_boolean, '') expect(@a.preferences[:is_boolean]).to be false end it 'with an empty hash' do @a.set_preference(:is_boolean, []) expect(@a.preferences[:is_boolean]).to be false end end context 'converts array preferences to array values' do before do A.preference :is_array, :array, default: [] end it 'with arrays' do @a.set_preference(:is_array, []) expect(@a.preferences[:is_array]).to be_is_a(Array) end it 'with string' do @a.set_preference(:is_array, 'string') expect(@a.preferences[:is_array]).to be_is_a(Array) end it 'with hash' do @a.set_preference(:is_array, {}) expect(@a.preferences[:is_array]).to be_is_a(Array) end end context 'converts hash preferences to hash values' do before do A.preference :is_hash, :hash, default: {} end it 'with hash' do @a.set_preference(:is_hash, {}) expect(@a.preferences[:is_hash]).to be_is_a(Hash) end it 'with hash and keys are integers' do @a.set_preference(:is_hash, 1 => 2, 3 => 4) expect(@a.preferences[:is_hash]).to eql(1 => 2, 3 => 4) end it 'with string' do @a.set_preference(:is_hash, '{"0"=>{"answer"=>"1", "value"=>"No"}}') expect(@a.preferences[:is_hash]).to be_is_a(Hash) end it 'with boolean' do @a.set_preference(:is_hash, false) expect(@a.preferences[:is_hash]).to be_is_a(Hash) @a.set_preference(:is_hash, true) expect(@a.preferences[:is_hash]).to be_is_a(Hash) end it 'with simple array' do @a.set_preference(:is_hash, ['key', 'value', 'another key', 'another value']) expect(@a.preferences[:is_hash]).to be_is_a(Hash) expect(@a.preferences[:is_hash]['key']).to eq('value') expect(@a.preferences[:is_hash]['another key']).to eq('another value') end it 'with a nested array' do @a.set_preference(:is_hash, [['key', 'value'], ['another key', 'another value']]) expect(@a.preferences[:is_hash]).to be_is_a(Hash) expect(@a.preferences[:is_hash]['key']).to eq('value') expect(@a.preferences[:is_hash]['another key']).to eq('another value') end it 'with single array' do expect { @a.set_preference(:is_hash, ['key']) }.to raise_error(ArgumentError) end end context 'converts any preferences to any values' do before do A.preference :product_ids, :any, default: [] A.preference :product_attributes, :any, default: {} end it 'with array' do expect(@a.preferences[:product_ids]).to eq([]) @a.set_preference(:product_ids, [1, 2]) expect(@a.preferences[:product_ids]).to eq([1, 2]) end it 'with hash' do expect(@a.preferences[:product_attributes]).to eq({}) @a.set_preference(:product_attributes, id: 1, name: 2) expect(@a.preferences[:product_attributes]).to eq(id: 1, name: 2) end end end describe 'persisted preferables' do before(:all) do class CreatePrefTest < ActiveRecord::Migration[4.2] def self.up create_table :pref_tests do |t| t.string :col t.text :preferences end end def self.down drop_table :pref_tests end end @migration_verbosity = ActiveRecord::Migration.verbose ActiveRecord::Migration.verbose = false CreatePrefTest.migrate(:up) class PrefTest < Spree::Base preference :pref_test_pref, :string, default: 'abc' preference :pref_test_any, :any, default: [] end end after(:all) do CreatePrefTest.migrate(:down) ActiveRecord::Migration.verbose = @migration_verbosity end before do # load PrefTest table PrefTest.first @pt = PrefTest.create end describe 'pending preferences for new activerecord objects' do it 'saves preferences after record is saved' do pr = PrefTest.new pr.set_preference(:pref_test_pref, 'XXX') expect(pr.get_preference(:pref_test_pref)).to eq('XXX') pr.save! expect(pr.get_preference(:pref_test_pref)).to eq('XXX') end it 'saves preferences for serialized object' do pr = PrefTest.new pr.set_preference(:pref_test_any, [1, 2]) expect(pr.get_preference(:pref_test_any)).to eq([1, 2]) pr.save! expect(pr.get_preference(:pref_test_any)).to eq([1, 2]) end end it 'clear preferences' do @pt.set_preference(:pref_test_pref, 'xyz') expect(@pt.preferred_pref_test_pref).to eq('xyz') @pt.clear_preferences expect(@pt.preferred_pref_test_pref).to eq('abc') end it 'clear preferences when record is deleted' do @pt.save! @pt.preferred_pref_test_pref = 'lmn' @pt.save! @pt.destroy @pt1 = PrefTest.new(col: 'aaaa') @pt1.id = @pt.id @pt1.save! expect(@pt1.get_preference(:pref_test_pref)).to eq('abc') end end end
29.816092
91
0.617965
08213a53a5da55b86b0fec75a7b389c563a57f99
1,984
Puppet::Type.newtype(:data_fragment) do @doc = "Create a data_fragment to be used by a data_file. the `data_fragment` type creates a file fragment to be collected based on the tag. The example is based on exported resources. Example: @@data_fragment { \"uniqe_name_${::fqdn}\": tag => 'unique_name', order => 10, # Optional. Default to 10 content => 'some content' # OR content => template('template.erb') # OR source => 'puppet:///path/to/file' } " newparam(:name, :namevar => true) do desc "Unique name" end newparam(:target) do desc "Target" end newparam(:content) do desc "Content" end newparam(:source) do desc "Source" end newparam(:order) do desc "Order" defaultto '10' validate do |val| fail Puppet::ParseError, '$order is not a string or integer.' if !(val.is_a? String or val.is_a? Integer) fail Puppet::ParseError, "Order cannot contain '/', ':', or '\n'." if val.to_s =~ /[:\n\/]/ end end newparam(:tag) do desc "Tag name to be used by data_file to collect all data_fragments by tag name" end autorequire(:file) do if catalog.resources.select {|x| x.class == Puppet::Type.type(:data_file) and (x[:path] == self[:target] || x.title == self[:target]) }.empty? warning "Target Data_file with path of #{self[:target]} not found in the catalog" end end validate do # Check if target is set fail Puppet::ParseError, "Target not set" if self[:target].nil? # Check if tag is set fail Puppet::ParseError, "Tag not set" if self[:tag].nil? # Check if either source or content is set. raise error if none is set fail Puppet::ParseError, "Set either 'source' or 'content'" if self[:source].nil? && self[:content].nil? # Check if both are set, if so rais error fail Puppet::ParseError, "Can't use 'source' and 'content' at the same time" if !self[:source].nil? && !self[:content].nil? end end
30.523077
146
0.641633
08aa8cd34907cf710603c3cef9181239cf05f9a3
778
module GitlabCli module Util class Groups def self.get_all begin response = Array.new per_page = 100 page = 0 # If we get `per_page` results per page then we keep going. # If we get less than that we're done. while response.length == page * per_page do page += 1 url = "groups?page=%s&per_page=%s" % [page, per_page] page_data = GitlabCli::Util.rest "get", url response.concat JSON.parse(page_data) end rescue Exception => e raise e else projects = response.map do |p| GitlabCli::Group.new(p['id'],p['name'],p['path'],p['owner_id']) end end end end end end
25.096774
75
0.517995
1c07de02f6a7603eafb07ba1dce87a6113b0ba45
1,176
# This file is auto-generated from the current state of the database. Instead # of editing this file, please use the migrations feature of Active Record to # incrementally modify your database, and then regenerate this schema definition. # # This file is the source Rails uses to define your schema when running `rails # db:schema:load`. When creating a new database, `rails db:schema:load` tends to # be faster and is potentially less error prone than running all of your # migrations from scratch. Old migrations may fail to apply correctly if those # migrations use external dependencies or application code. # # It's strongly recommended that you check this file into your version control system. ActiveRecord::Schema.define(version: 2019_12_16_000049) do create_table "comics", force: :cascade do |t| t.string "publisher" t.string "year" t.string "issue_num" t.string "cover_price" t.string "title" t.string "subtitle" t.string "condition" t.string "html_link_to_dealer" t.string "user_id" end create_table "users", force: :cascade do |t| t.string "username" t.string "email" t.string "password_digest" end end
34.588235
86
0.742347
f77cbef83560985c59233b9d44866ed2755041d0
3,285
# ## Schema Information # Schema version: 20131205021701 # # Table name: `tags` # # ### Columns # # Name | Type | Attributes # ----------- | ------------------ | --------------------------- # **`id`** | `integer` | `not null, primary key` # **`name`** | `string(255)` | # class Tag < ActiveRecord::Base acts_as_followable VALID_PROGRAMMING_LANGUAGES = ["github", "slideshare", "python", "ruby", "javascript", "php", "objective-c", "java", "viml", "perl", "clojure", "coffeescript", "scala", "erlang", "emacslisp", "go", "haskell", "actionscript", "lua", "groovy", "git", "commonlisp", "puppet", "hackerdesk", "css", "assembly", "ocaml", "haxe", "scheme", "vim", "coldfusion", "d", "rails", "powershell", "objective-j", "bash", "ios", "html", "dart", "matlab", "jquery", "android", "arduino", "xcode", "osx", "html5", "css3", "visualbasic", "rubyonrails", "mysql", "delphi", "smalltalk", "mac", "iphone", "linux", "ipad", "mirah", "nodejs", "tcl", "apex", "wordpress", "cocoa", "nodejs", "heroku", "io", "js", "dcpu-16asm", "django", "zsh", "rspec", "programming", "vala", "sql", "mongodb", "workspace", "racket", "twitter", "terminal", "development", "opensource", "testing", "design", "emberjs", "security", "verilog", "net", "blurandpure", "mobile", "sass", "code", "webkit", "api", "json", "nginx", "elixir", "agile", "bundler", "emacs", "web", "drupal", "unix", "csharp", "photoshop", "nodejs", "facebook", "log", "reference", "cli", "sublimetext", "responsive", "tdd", "puredata", "asp", "codeigniter", "maven", "rubygems", "gem", "oracle", "nosql", "rvm", "ui", "branch", "responsivedesign", "fortran", "postgresql", "latex", "nimrod", "documentation", "rubymotion", "redis", "backbone", "ubuntu", "regex", "textmate", "fancy", "ssh", "performance", "spring", "sublimetext2", "boo", "flex", "coq", "aliases", "browser", "webdevelopment", "rest", "eclipse", "tips", "factor", "commandline", "sublimetext", "ooc", "blog", "unittesting", "server", "history", "lion", "tip", "autohotkey", "alias", "prolog", "apple", "standardml", "vhdl", "objectivec", "statistics", "impactgameengine", "apache", "cucumber", "cpp", "meta", "gist", "dropbox", "gitignore", "rails3", "debug", "flask", "cplusplus", "monitoring", "angularjs", "oauth", "oop", "usability", "flexmojos", "sentry", "expressionengine", "ee"] scope :from_topic, lambda { |topic| where(name: topic) } def subscribe(user) user.follow(self) end def unsubscribe(user) user.stop_following(self) end end
61.981132
121
0.459361
21fa7a28871e114b4c30d71f6a04d775be2e8bcc
2,031
test_name 'Windows ACL Module - Remove Permissions from a File' confine(:to, :platform => 'windows') #Globals target_parent = 'c:/temp' target = 'c:/temp/rem_perm_file.txt' user_id = 'bob' file_content = 'I love puppet, puppet love puppet, puppet love!' verify_content_command = "cat /cygdrive/c/temp/rem_perm_file.txt" file_content_regex = /#{file_content}/ verify_acl_command = "icacls #{target}" acl_regex = /.*\\bob:\(F\)/ #Apply Manifest acl_manifest_apply = <<-MANIFEST file { '#{target_parent}': ensure => directory } file { '#{target}': ensure => file, content => '#{file_content}', require => File['#{target_parent}'] } user { '#{user_id}': ensure => present, groups => 'Users', managehome => true, password => "L0v3Pupp3t!" } acl { '#{target}': permissions => [ { identity => '#{user_id}', rights => ['full'] }, ], } MANIFEST #Remove Manifest acl_manifest_remove = <<-MANIFEST acl { '#{target}': purge => 'listed_permissions', permissions => [ { identity => '#{user_id}', rights => ['full'] }, ], } MANIFEST #Tests agents.each do |agent| step "Execute Apply Manifest" on(agent, puppet('apply', '--debug'), :stdin => acl_manifest_apply) do |result| assert_no_match(/Error:/, result.stderr, 'Unexpected error was detected!') end step "Verify that ACL Rights are Correct" on(agent, verify_acl_command) do |result| assert_match(acl_regex, result.stdout, 'Expected ACL was not present!') end step "Execute Remove Manifest" on(agent, puppet('apply', '--debug'), :stdin => acl_manifest_remove) do |result| assert_no_match(/Error:/, result.stderr, 'Unexpected error was detected!') end step "Verify that ACL Rights are Correct" on(agent, verify_acl_command) do |result| assert_no_match(acl_regex, result.stdout, 'Unexpected ACL was present!') end step "Verify File Data Integrity" on(agent, verify_content_command) do |result| assert_match(file_content_regex, result.stdout, 'Expected file content is invalid!') end end
25.3875
88
0.679468
115254bad9fc3d2954cefd2757392a961ba30461
39,557
# coding: utf-8 # These specs are a kind of integration spec. They're not unit testing small pieces # of code, it's just parsing a range of PDF files and ensuring the result is # consistent. An extra check to make sure parsing these files will continue # to work for our users. # # Where possible, specs that unit test correctly should be written in addition to # these describe PDF::Reader, "integration specs" do context "cairo-unicode-short" do let(:filename) { pdf_spec_file("cairo-unicode-short") } it "interprets unicode strings correctly" do PDF::Reader.open(filename) do |reader| page = reader.page(1) expect(page.text).to eql("Chunky Bacon") end end end context "vertical-text-in-identity-v" do let(:filename) { pdf_spec_file("vertical-text-in-identity-v") } it "interprets Identity-V encoded strings correctly" do PDF::Reader.open(filename) do |reader| page = reader.page(1) expect(page.text.split.map(&:strip)).to eql(%w{V e r t i c a l T e x t}) end end end context "adobe_sample" do let(:filename) { pdf_spec_file("adobe_sample") } it "extracts text correctly" do PDF::Reader.open(filename) do |reader| page = reader.page(1) expect(page.text).to include("This is a sample PDF file") expect(page.text).to include("If you can read this, you already have Adobe Acrobat") end end end context "dutch PDF with NBSP characters" do let(:filename) { pdf_spec_file("dutch") } it "extracts text correctly" do PDF::Reader.open(filename) do |reader| expect(reader.pages.size).to eql(3) page = reader.page(1) expect(page.text).to include("Dit\302\240is\302\240een\302\240pdf\302\240test\302\240van\302\240drie\302\240pagina") expect(page.text).to include("’s") expect(page.text).to include("Pagina\302\2401") end end end context "PDF with a difference table" do let(:filename) { pdf_spec_file("difference_table") } it "extracts text correctly" do PDF::Reader.open(filename) do |reader| page = reader.page(1) expect(page.text).to eql("Goiás") end end end context "PDF with a difference table (v2)" do let(:filename) { pdf_spec_file("difference_table2") } it "extracts text correctly" do PDF::Reader.open(filename) do |reader| page = reader.page(1) expect(page.text).to eql("This PDF contains ligatures,for example in “file”and “floor”.") end end end context "PDF with a content stream that has trailing whitespace" do let(:filename) { pdf_spec_file("content_stream_trailing_whitespace") } it "extracts text correctly" do PDF::Reader.open(filename) do |reader| page = reader.page(1) expect(page.text).to match(/Tax\s+Invoice/) end end end context "PDF with a content stream that is missing an operator (has hanging params)" do let(:filename) { pdf_spec_file("content_stream_missing_final_operator") } it "extracts text correctly" do PDF::Reader.open(filename) do |reader| expect(reader.page(1).text).to match(/Locatrix/) expect(reader.page(2).text).to match(/Ubuntu/) end end end # this spec is to detect an hard lock issue some people were encountering on some OSX # systems. Real pain to debug. context "PDF with a string containing a high byte (D1) under MacRomanEncoding" do let(:filename) { pdf_spec_file("hard_lock_under_osx") } it "extracts text correctly" do PDF::Reader.open(filename) do |reader| expect(reader.page(1).text[0,1]).to eql("’") end end end context "PDF that has a content stream with a broken string" do let(:filename) { pdf_spec_file("broken_string") } # this file used to get us into a hard, endless loop. Make sure that doesn't still happen it "doesn't hang when extracting doc info" do Timeout::timeout(3) do expect { reader = PDF::Reader.new(filename) reader.info }.to raise_error(PDF::Reader::MalformedPDFError) end end end context "PDF with a stream that has its length specified as an indirect reference" do let(:filename) { pdf_spec_file("content_stream_with_length_as_ref") } it "extracts text correctly" do PDF::Reader.open(filename) do |reader| expect(reader.page(1).text).to eql("Hello World") end end end # PDF::Reader::XRef#object was saving an incorrect position when seeking. We # were saving the current pos of the underlying IO stream, then seeking back # to it. This was fine, except when there was still content in the buffer. context "PDF with a stream that has its length specified as an indirect reference and uses windows line breaks" do let(:filename) { pdf_spec_file("content_stream_with_length_as_ref_and_windows_breaks") } it "extracts text correctly" do PDF::Reader.open(filename) do |reader| expect(reader.page(1).text).to eql("Hello World") end end end context "PDF has a content stream refers to a non-existant font" do let(:filename) { pdf_spec_file("content_stream_refers_to_invalid_font") } it "raises an exception" do expect { reader = PDF::Reader.new(filename) reader.page(1).text }.to raise_error(PDF::Reader::MalformedPDFError) end end context "Empty file" do it "raises an exception" do expect { PDF::Reader.new(StringIO.new("")) }.to raise_error(PDF::Reader::MalformedPDFError) end end context "PDF that uses an ASCII85Decode filter" do let(:filename) { pdf_spec_file("ascii85_filter") } it "extracts text correctly" do PDF::Reader.open(filename) do |reader| expect(reader.page(1).text).to match(/Et Iunia se/) end end end context "PDF that has an inline image in a content stream with no line breaks" do let(:filename) { pdf_spec_file("inline_image_single_line_content_stream") } it "extracts text correctly" do PDF::Reader.open(filename) do |reader| expect(reader.page(1).text.strip[0,7]).to eql("WORKING") end end end context "PDF that uses Form XObjects to repeat content" do let(:filename) { pdf_spec_file("form_xobject") } it "extracts text correctly" do PDF::Reader.open(filename) do |reader| expect(reader.page(1).text).to eql("James Healy") expect(reader.page(2).text).to eql("James Healy") end end end context "PDF that uses Form XObjects to repeat content" do let(:filename) { pdf_spec_file("form_xobject_more") } it "extracts text correctly" do PDF::Reader.open(filename) do |reader| expect(reader.page(1).text).to include("Some regular content") expect(reader.page(1).text).to include("James Healy") expect(reader.page(2).text).to include("€10") expect(reader.page(2).text).to include("James Healy") end end end context "PDF that uses indirect Form XObjects to repeat content" do let(:filename) { pdf_spec_file("indirect_xobject") } it "extracts text correctly" do PDF::Reader.open(filename) do |reader| expect(reader.page(1).text).not_to be_nil end end end context "PDF that has a Form XObjects that references itself" do let(:filename) { pdf_spec_file("form_xobject_recursive") } it "extracts text correctly" do PDF::Reader.open(filename) do |reader| expect(reader.page(1).text).to include("this form XObject contains a reference to itself") end end end context "PDF that uses multiple content streams for a single page" do let(:filename) { pdf_spec_file("split_params_and_operator") } it "extracts text correctly" do PDF::Reader.open(filename) do |reader| expect(reader.page(1).text).to include("My name is") expect(reader.page(1).text).to include("James Healy") end end end context "PDF that has a single space after the EOF marker" do let(:filename) { pdf_spec_file("space_after_eof") } it "extracts text correctly" do PDF::Reader.open(filename) do |reader| expect(reader.page(1).text).to eql("Hello World") end end end context "PDF that was generated in open office 3" do let(:filename) { pdf_spec_file("oo3") } it "extracts text correctly" do PDF::Reader.open(filename) do |reader| expect(reader.page(1).text).to include("test") end end end context "PDF has newlines at the start of a content stream" do let(:filename) { pdf_spec_file("content_stream_begins_with_newline") } it "extracts text correctly" do PDF::Reader.open(filename) do |reader| expect(reader.page(1).text).to eql("This file has a content stream that begins with \\n\\n") end end end context "encrypted_version1_revision2_40bit_rc4_user_pass_apples" do let(:filename) { pdf_spec_file("encrypted_version1_revision2_40bit_rc4_user_pass_apples") } context "with the user pass" do let(:pass) { "apples" } it "correctly extracts text" do PDF::Reader.open(filename, :password => pass) do |reader| expect(reader.page(1).text).to include("This sample file is encrypted") end end it "correctly extracts info" do PDF::Reader.open(filename, :password => pass) do |reader| expect(reader.info).to eq( :Creator=>"Writer", :Producer=>"LibreOffice 3.3", :CreationDate=>"D:20110814231057+10'00'", :ModDate=>"D:20170115142929+11'00'" ) end end end context "with the owner pass" do let(:pass) { "password" } it "correctly extracts text" do PDF::Reader.open(filename, :password => pass) do |reader| expect(reader.page(1).text).to include("This sample file is encrypted") end end it "correctly extracts info" do PDF::Reader.open(filename, :password => pass) do |reader| expect(reader.info).to eq( :Creator=>"Writer", :Producer=>"LibreOffice 3.3", :CreationDate=>"D:20110814231057+10'00'", :ModDate=>"D:20170115142929+11'00'" ) end end end end context "encrypted_version1_revision2_128bit_rc4_blank_user_password" do let(:filename) { pdf_spec_file("encrypted_version1_revision2_128bit_rc4_blank_user_password") } context "with no user pass" do it "correctly extracts text" do PDF::Reader.open(filename) do |reader| expect(reader.page(1).text).to eql("WOOOOO DOCUMENT!") end end end context "with the owner pass" do it "correctly extracts text" end end context "encrypted_version2_revision3_128bit_rc4_blank_user_pass" do let(:filename) { pdf_spec_file("encrypted_version2_revision3_128bit_rc4_blank_user_pass") } context "with no user pass" do it "correctly extracts text" do PDF::Reader.open(filename) do |reader| expect(reader.page(1).text).to eql("This sample file is encrypted with no user password") end end end context "with the owner pass" do it "correctly extracts text" end end context "encrypted_version1_revision2_128bit_rc4_no_doc_id" do let(:filename) {pdf_spec_file("encrypted_version1_revision2_128bit_rc4_no_doc_id") } context "with no user pass" do it "correctly extracts text" do PDF::Reader.open(filename) do |reader| expect(reader.page(1).text).to eql( "This encryped file breaks compatability with the PDF spec " \ "because it has no document ID" ) end end end context "with the owner pass" do it "correctly extracts text" end end context "encrypted_version2_revision3_128bit_rc4_user_pass_apples" do let(:filename) { pdf_spec_file("encrypted_version2_revision3_128bit_rc4_user_pass_apples") } context "with the user pass" do let(:pass) { "apples" } it "correctly extracts text" do PDF::Reader.open(filename, :password => pass) do |reader| expect(reader.page(1).text).to include("This sample file is encrypted") end end it "correctly extracts info" do PDF::Reader.open(filename, :password => pass) do |reader| expect(reader.info).to eq( :Creator=>"Writer", :Producer=>"LibreOffice 3.3", :CreationDate=>"D:20110814231057+10'00'" ) end end end context "with the owner pass" do let(:pass) { "password" } it "correctly extracts text" do PDF::Reader.open(filename, :password => pass) do |reader| expect(reader.page(1).text).to include("This sample file is encrypted") end end it "correctly extracts info" do PDF::Reader.open(filename, :password => pass) do |reader| expect(reader.info).to eq( :Creator=>"Writer", :Producer=>"LibreOffice 3.3", :CreationDate=>"D:20110814231057+10'00'" ) end end end context "with no pass" do it "raises an exception" do expect { PDF::Reader.open(filename) do |reader| reader.page(1).text end }.to raise_error(PDF::Reader::EncryptedPDFError) end end end context "encrypted_version4_revision_4user_pass_apples_enc_metadata" do let(:filename) { pdf_spec_file("encrypted_version4_revision4_128bit_rc4_user_pass_apples_enc_metadata") } context "with the user pass" do let(:pass) { "apples" } it "correctly extracts text" do PDF::Reader.open(filename, :password => pass) do |reader| expect(reader.page(1).text).to include("This sample file is encrypted") end end it "correctly extracts info" do PDF::Reader.open(filename, :password => pass) do |reader| expect(reader.info).to eq( :Creator=>"Writer", :Producer=>"LibreOffice 3.3", :CreationDate=>"D:20110814231057+10'00'", :ModDate=>"D:20170114125054+11'00'" ) end end end context "with the owner pass" do let(:pass) { "password" } it "correctly extracts text" do PDF::Reader.open(filename, :password => pass) do |reader| expect(reader.page(1).text).to include("This sample file is encrypted") end end it "correctly extracts info" do PDF::Reader.open(filename, :password => pass) do |reader| expect(reader.info).to eq( :Creator=>"Writer", :Producer=>"LibreOffice 3.3", :CreationDate=>"D:20110814231057+10'00'", :ModDate=>"D:20170114125054+11'00'" ) end end end end context "encrypted_version4_revision4_128bit_rc4_user_pass_apples_unenc_metadata" do let(:filename) { pdf_spec_file("encrypted_version4_revision4_128bit_rc4_user_pass_apples_unenc_metadata") } context "with the user pass" do let(:pass) { "apples" } it "correctly extracts text" do PDF::Reader.open(filename, :password => pass) do |reader| expect(reader.page(1).text).to include("This sample file is encrypted") end end it "correctly extracts info" do PDF::Reader.open(filename, :password => pass) do |reader| expect(reader.info).to eq( :Creator=>"Writer", :Producer=>"LibreOffice 3.3", :CreationDate=>"D:20110814231057+10'00'", :ModDate => "D:20170114125141+11'00'" ) end end end context "with the owner pass" do let(:pass) { "password" } it "correctly extracts text" do PDF::Reader.open(filename, :password => pass) do |reader| expect(reader.page(1).text).to include("This sample file is encrypted") end end it "correctly extracts info" do PDF::Reader.open(filename, :password => pass) do |reader| expect(reader.info).to eq( :Creator=>"Writer", :Producer=>"LibreOffice 3.3", :CreationDate=>"D:20110814231057+10'00'", :ModDate => "D:20170114125141+11'00'" ) end end end end context "encrypted_version4_revision4_128bit_aes_user_pass_apples_enc_metadata" do let(:filename) { pdf_spec_file("encrypted_version4_revision4_128bit_aes_user_pass_apples_enc_metadata") } context "with the user pass" do let(:pass) { "apples" } it "correctly extracts text" do PDF::Reader.open(filename, :password => pass) do |reader| expect(reader.page(1).text).to include("This sample file is encrypted") end end it "correctly extracts info" do PDF::Reader.open(filename, :password => pass) do |reader| expect(reader.info).to eq( :CreationDate=>"D:20110814231057+10'00'", :Creator=>"Writer", :ModDate=>"D:20170115224117+11'00'", :Producer=>"LibreOffice 3.3", ) end end end context "with the owner pass" do let(:pass) { "password" } it "correctly extracts text" do PDF::Reader.open(filename, :password => pass) do |reader| expect(reader.page(1).text).to include("This sample file is encrypted") end end it "correctly extracts info" do PDF::Reader.open(filename, :password => pass) do |reader| expect(reader.info).to eq( :CreationDate=>"D:20110814231057+10'00'", :Creator=>"Writer", :ModDate=>"D:20170115224117+11'00'", :Producer=>"LibreOffice 3.3", ) end end end end context "encrypted_version4_revision4_128bit_aes_user_pass_apples_unenc_metadata" do let(:filename) { pdf_spec_file("encrypted_version4_revision4_128bit_aes_user_pass_apples_unenc_metadata") } context "with the user pass" do let(:pass) { "apples" } it "correctly extracts text" do PDF::Reader.open(filename, :password => pass) do |reader| expect(reader.page(1).text).to include("This sample file is encrypted") end end it "correctly extracts info" do PDF::Reader.open(filename, :password => pass) do |reader| expect(reader.info).to eq( :CreationDate=>"D:20110814231057+10'00'", :Creator=>"Writer", :ModDate=>"D:20170115224244+11'00'", :Producer=>"LibreOffice 3.3", ) end end end context "with the owner pass" do let(:pass) { "password" } it "correctly extracts text" do PDF::Reader.open(filename, :password => pass) do |reader| expect(reader.page(1).text).to include("This sample file is encrypted") end end it "correctly extracts info" do PDF::Reader.open(filename, :password => pass) do |reader| expect(reader.info).to eq( :CreationDate=>"D:20110814231057+10'00'", :Creator=>"Writer", :ModDate=>"D:20170115224244+11'00'", :Producer=>"LibreOffice 3.3", ) end end end end context "encrypted_version5_revision5_256bit_aes_user_pass_apples_enc_metadata" do let(:filename) { pdf_spec_file("encrypted_version5_revision5_256bit_aes_user_pass_apples_enc_metadata") } context "with the user pass" do let(:pass) { "apples" } it "correctly extracts text" do PDF::Reader.open(filename, :password => pass) do |reader| expect(reader.page(1).text).to include("This sample file is encrypted") end end it "correctly extracts info" do PDF::Reader.open(filename, :password => pass) do |reader| expect(reader.info).to eq( :Author => "Gyuchang Jun", :CreationDate => "D:20170312093033+00'00'", :Creator => "Microsoft Word", :ModDate => "D:20170312093033+00'00'" ) end end end context "with the owner pass" do let(:pass) { "password" } it "correctly extracts text" do PDF::Reader.open(filename, :password => pass) do |reader| expect(reader.page(1).text).to include("This sample file is encrypted") end end it "correctly extracts info" do PDF::Reader.open(filename, :password => pass) do |reader| expect(reader.info).to eq( :Author => "Gyuchang Jun", :CreationDate => "D:20170312093033+00'00'", :Creator => "Microsoft Word", :ModDate => "D:20170312093033+00'00'" ) end end end end context "encrypted_version5_revision5_256bit_aes_user_pass_apples_unenc_metadata" do let(:filename) { pdf_spec_file("encrypted_version5_revision5_256bit_aes_user_pass_apples_unenc_metadata") } context "with the user pass" do let(:pass) { "apples" } it "correctly extracts text" do PDF::Reader.open(filename, :password => pass) do |reader| expect(reader.page(1).text).to include("This sample file is encrypted") end end it "correctly extracts info" do PDF::Reader.open(filename, :password => pass) do |reader| expect(reader.info).to eq( :Author => "Gyuchang Jun", :CreationDate => "D:20170312093033+00'00'", :Creator => "Microsoft Word", :ModDate => "D:20170312093033+00'00'" ) end end end context "with the owner pass" do let(:pass) { "password" } it "correctly extracts text" do PDF::Reader.open(filename, :password => pass) do |reader| expect(reader.page(1).text).to include("This sample file is encrypted") end end it "correctly extracts info" do PDF::Reader.open(filename, :password => pass) do |reader| expect(reader.info).to eq( :Author => "Gyuchang Jun", :CreationDate => "D:20170312093033+00'00'", :Creator => "Microsoft Word", :ModDate => "D:20170312093033+00'00'" ) end end end end context "encrypted_version5_revision6_256bit_aes_user_pass_apples_enc_metadata" do let(:filename) { pdf_spec_file("encrypted_version5_revision6_256bit_aes_user_pass_apples_enc_metadata") } context "with the user pass" do let(:pass) { "apples" } # TODO: remove this spec it "raises UnsupportedFeatureError" do expect { PDF::Reader.open(filename, :password => pass) do |reader| reader.page(1).text end }.to raise_error(PDF::Reader::EncryptedPDFError) end it "correctly extracts text" it "correctly extracts info" end context "with the owner pass" do it "correctly extracts text" it "correctly extracts info" end end context "encrypted_version5_revision6_256bit_aes_user_pass_apples_unenc_metadata" do let(:filename) { pdf_spec_file("encrypted_version5_revision6_256bit_aes_user_pass_apples_unenc_metadata") } context "with the user pass" do let(:pass) { "apples" } # TODO: remove this spec it "raises UnsupportedFeatureError" do expect { PDF::Reader.open(filename, :password => pass) do |reader| reader.page(1).text end }.to raise_error(PDF::Reader::EncryptedPDFError) end it "correctly extracts text" it "correctly extracts info" end context "with the owner pass" do it "correctly extracts text" it "correctly extracts info" end end context "Encrypted PDF with an xref stream" do let(:filename) { pdf_spec_file("encrypted_and_xref_stream") } it "correctly extracts text" do PDF::Reader.open(filename) do |reader| expect(reader.page(1).text).to eq("This text is encrypted") end end it "correctly parses indirect objects" do PDF::Reader.open(filename) do |reader| expect { reader.objects.values }.not_to raise_error end end end context "PDF with inline images" do let(:filename) { pdf_spec_file("inline_image") } it "extracts inline images correctly" do @browser = PDF::Reader.new(filename) @page = @browser.page(1) receiver = PDF::Reader::RegisterReceiver.new @page.walk(receiver) callbacks = receiver.series(:begin_inline_image, :begin_inline_image_data, :end_inline_image) # inline images should trigger 3 callbacks. The first with no args. expect(callbacks[0]).to eql(:name => :begin_inline_image, :args => []) # the second with the image header (colorspace, etc) expect(callbacks[1]).to eql(:name => :begin_inline_image_data, :args => [:CS, :RGB, :I, true, :W, 234, :H, 70, :BPC, 8]) # the last with the image data expect(callbacks[2][:name]).to eql :end_inline_image image_data = callbacks[2][:args].first expect(image_data).to be_a(String) expect(image_data.size).to eql 49140 expect(image_data[0,3].unpack("C*")).to eql [255,255,255] expect(image_data[-3,3].unpack("C*")).to eql [255,255,255] end end context "PDF with a page that has multiple content streams" do let(:filename) { pdf_spec_file("content_stream_as_array") } it "extracts text correctly" do PDF::Reader.open(filename) do |reader| expect(reader.page(1).text).to include("Arkansas Declaration Relating") end end end context "PDF with a junk prefix" do let(:filename) { pdf_spec_file("junk_prefix") } it "extracts text correctly" do PDF::Reader.open(filename) do |reader| page = reader.page(1) expect(page.text).to eql("This PDF contains junk before the %-PDF marker") end end end context "PDF with a 1024 bytes of junk prefix" do let(:filename) { pdf_spec_file("junk_prefix_1024") } it "extracts text correctly" do PDF::Reader.open(filename) do |reader| page = reader.page(1) expect(page.text).to eql("This PDF contains junk before the %-PDF marker") end end end context "PDF that has a cmap entry that uses ligatures" do let(:filename) { pdf_spec_file("ligature_integration_sample") } it "extracts text correctly" do # there are two locations in the following pdf that have the following sequence # [ 85, 68, 73, 192, 70] after cmap translation this should yield # [[114], [97], [102], [102, 105], [99]] or more specifically # [r, a, f, fi, c] # # prior to commit d37b4bf52e243dfb999fa0cda791449c50f6d16d # the fi would be returned as f PDF::Reader.open(filename) do |reader| page = reader.page(1) m = /raffic/.match(page.text) expect(m[0].to_s).to eql("raffic") end end end context "PDF that has a cmap entry that contains surrogate pairs" do let(:filename) { pdf_spec_file("surrogate_pair_integration_sample") } it "extracts text correctly" do # the following pdf has a sequence in it that requires 32-bit Unicode, pdf requires # all text to be stored in 16-bit. To acheive this surrogate-pairs are used. cmap # converts the surrogate-pairs back to 32-bit and ruby handles them nicely. # the following sequence exists in this pdf page # \u{1d475}\u{1d468}\u{1d47a}\u{1d46a}\u{1d468}\u{1d479} => NASCAR # these codepoints are in the "Math Alphanumeric Symbols (Italic) section of Unicode" # # prior to commit d37b4bf52e243dfb999fa0cda791449c50f6d16d # pdf-reader would return Nil instead of the correct unicode character PDF::Reader.open(filename) do |reader| page = reader.page(1) # 𝑵𝑨𝑺𝑪𝑨𝑹 utf8_str = [0x1d475, 0x1d468, 0x1d47a, 0x1d46a, 0x1d468, 0x1d479].pack("U*") expect(page.text).to include(utf8_str) end end end context "PDF that uses a standatd font and a ligature" do let(:filename) { pdf_spec_file("standard_font_with_a_difference") } it "extracts text correctly" do PDF::Reader.open(filename) do |reader| page = reader.page(1) expect(page.text).to eq("The following word uses a ligature: five") end end end context "PDF that uses a type1 font that isn't embedded and isn't one of the 14 built-ins" do let(:filename) { pdf_spec_file("type1-arial") } it "extracts text correctly" do PDF::Reader.open(filename) do |reader| page = reader.page(1) expect(page.text).to eq("This text uses a Type1 font that isn't embedded") end end end context "PDF that uses a TrueType font that isn't embedded and has no metrics" do let(:filename) { pdf_spec_file("truetype-arial") } it "extracts text correctly" do PDF::Reader.open(filename) do |reader| page = reader.page(1) expect(page.text).to start_with("This text uses a TrueType font that isn't embedded") end end end context "PDF that uses a type3 bitmap font" do let(:filename) { pdf_spec_file("type3_font") } it "extracts text correctly" do PDF::Reader.open(filename) do |reader| page = reader.page(1) expect(page.text).to eq("a\nb\nc") end end end context "PDF that uses a type3 bitmap font with a rare FontMatrix" do let(:filename) { pdf_spec_file("type3_font_with_rare_font_matrix") } # TODO most type3 fonts have a FontMatrix entry of [ 0.001 0 0 0.001 0 0 ], # which matches the glyph scale factor of 1000 that non-type3 fonts use. # It's permitted for type3 fonts to use other FontMatrix values though, # and we should do a better job of extracting the text. # The Page is 200pts wide and 50pts high. The first letters for each word # *should* be positioned like so: # # P - X: 10.3 Y: 20 Width: 7.35 Height: 8.55 # G - X: 56.5 Y: 19.7 Width: 8.25 Height: 9.15 # A - X: 101.5 Y: 20 Width: 8.25 Height: 9 # it "extracts text correctly" do pending PDF::Reader.open(filename) do |reader| page = reader.page(1) expect(page.text).to include("Parallel Genetic Algorithms") end end end context "PDF with a Type0 font and Encoding is a CMap called OneByteIdentityH" do let(:filename) { pdf_spec_file("one-byte-identity") } # I'm not 100% confident that we'rr correctly handling OneByteIdentityH files in a way # that will always work. It works for the sample file I have though, so that's better than # nothing it "extracts text correctly" do PDF::Reader.open(filename) do |reader| page = reader.page(1) expect(page.text).to eq("abc") end end end context "PDF with rotated text" do let(:filename) { pdf_spec_file("rotated_text") } # TODO this spec isn't ideal as our support for extracting rotated text is quite # rubbish. I've added this to ensure we don't throw an exception with # rotated text. It's a start. it "extracts text without raising an exception" do PDF::Reader.open(filename) do |reader| page = reader.page(1) expect(page.text.split("\n").map(&:strip).slice(0,2)).to eq(["°","9"]) end end end context "PDF with a TJ operator that receives an array starting with a number" do let(:filename) { pdf_spec_file("TJ_starts_with_a_number") } it "extracts text correctly" do PDF::Reader.open(filename) do |reader| page = reader.page(1) expect(page.text[0,18]).to eq("This file has a TJ") end end end context "PDF with a TJ operator that aims to correct for character spacing" do let(:filename) { pdf_spec_file("TJ_and_char_spacing") } it "extracts text correctly" do PDF::Reader.open(filename) do |reader| page = reader.page(1) expect(page.text[15,17]).to eq("The big brown fox") end end end context "PDF with a page that's missing the MediaBox attribute" do let(:filename) { pdf_spec_file("mediabox_missing") } it "extracts text correctly" do PDF::Reader.open(filename) do |reader| page = reader.page(1) expect(page.text[0,54]).to eq("This page is missing the compulsory MediaBox attribute") end end end context "PDF using a standard fint and no difference table" do let(:filename) { pdf_spec_file("standard_font_with_no_difference") } it "extracts text correctly" do PDF::Reader.open(filename) do |reader| page = reader.page(1) expect(page.text).to eq("This page uses contains a €") end end end context "PDF using zapf dingbats" do let(:filename) { pdf_spec_file("zapf") } it "extracts text correctly" do PDF::Reader.open(filename) do |reader| page = reader.page(1) expect(page.text).to include("✄☎✇") end end end context "PDF using symbol text" do let(:filename) { pdf_spec_file("symbol") } it "extracts text correctly" do PDF::Reader.open(filename) do |reader| page = reader.page(1) expect(page.text).to include("θρ") end end end context "Scanned PDF with invisible text added by ClearScan" do let(:filename) { pdf_spec_file("clearscan") } it "extracts text correctly" do PDF::Reader.open(filename) do |reader| page = reader.page(1) expect(page.text).to eq("This document was scanned and then OCRd with Adobe ClearScan") end end end context "PDF with text that contains a control char" do let(:filename) { pdf_spec_file("times-with-control-character") } it "extracts text correctly" do PDF::Reader.open(filename) do |reader| page = reader.page(1) expect(page.text).to include("This text includes an ASCII control") end end end context "PDF where the top-level Pages object has no Type" do let(:filename) { pdf_spec_file("pages_object_missing_type") } it "extracts text correctly" do PDF::Reader.open(filename) do |reader| page = reader.page(1) expect(page.text).to include("The top level Pages object has no Type") end end end context "PDF where the entries in a Kids array are direct objects, rather than indirect" do let(:filename) { pdf_spec_file("kids-as-direct-objects") } it "extracts text correctly" do PDF::Reader.open(filename) do |reader| page = reader.page(1) expect(page.text).to eq("page 1") end end end context "PDF with text positioned at 0,0" do let(:filename) { pdf_spec_file("minimal") } it "extracts text correctly" do PDF::Reader.open(filename) do |reader| page = reader.page(1) expect(page.text).to eq("Hello World") end end end context "Malformed PDF" do let(:filename) { pdf_spec_file("trailer_root_is_not_a_dict") } it "raises an exception if trailer Root is not a dict" do PDF::Reader.open(filename) do |reader| expect { reader.page(1) }.to raise_error(PDF::Reader::MalformedPDFError) end end end context "PDF with missing page data" do let(:filename) { pdf_spec_file("invalid_pages") } it "raises a MalformedPDFError when an InvalidPageError is raised internally" do PDF::Reader.open(filename) do |reader| expect { reader.pages }.to raise_error(PDF::Reader::MalformedPDFError) end end end context "PDF with MediaBox specified as an indirect object" do let(:filename) { pdf_spec_file("indirect_mediabox") } it "extracts text correctly" do PDF::Reader.open(filename) do |reader| page = reader.page(1) expect(page.text).to eq("The MediaBox for this page is specified via an indirect object") end end end context "PDF with overlapping chars to achieve fake bold effect" do let(:filename) { pdf_spec_file("overlapping-chars-xy-fake-bold") } let(:text) { "Some characters that overlap with different X and Y to achieve a fake bold effect" } it "extracts text correctly" do PDF::Reader.open(filename) do |reader| page = reader.page(1) expect(page.text).to eq(text) end end end context "PDF with overlapping chars (same Y pos) to achieve fake bold effect" do let(:filename) { pdf_spec_file("overlapping-chars-x-fake-bold") } let(:text) { "Some characters that overlap with different X to achieve a fake bold effect" } it "extracts text correctly" do PDF::Reader.open(filename) do |reader| page = reader.page(1) expect(page.text).to eq(text) end end end context "PDF with 180 page rotation followed by matrix transformations to undo it" do let(:filename) { pdf_spec_file("rotate-180") } let(:text) { "This text is rendered upside down\nand then the page is rotated" } it "extracts text correctly" do PDF::Reader.open(filename) do |reader| page = reader.page(1) expect(page.text).to eq(text) end end end context "PDF with page rotation followed by matrix transformations to undo it" do let(:filename) { pdf_spec_file("rotate-then-undo") } let(:text) { "This page uses matrix transformations to print text sideways, " + "then has a Rotate key to fix it" } it "extracts text correctly" do PDF::Reader.open(filename) do |reader| page = reader.page(1) expect(page.text).to eq(text) end end end context "PDF with page rotation of 90 degrees followed by matrix transformations to undo it" do let(:filename) { pdf_spec_file("rotate-90-then-undo") } let(:text) { "1: This PDF has Rotate:90 in the page metadata\n" + "2: to get a landscape layout, and then uses matrix\n" + "3: transformation to rotate the text back to normal" } it "extracts text correctly" do PDF::Reader.open(filename) do |reader| page = reader.page(1) expect(page.text).to eq(text) end end end context "PDF with page rotation of 90 degrees followed by matrix transformations to undo it" do let(:filename) { pdf_spec_file("rotate-90-then-undo-with-br-text") } it "extracts text correctly" do PDF::Reader.open(filename) do |reader| page = reader.page(1) expect(page.text).to include("This PDF ha sRotate:90 in the page") expect(page.text).to include("metadata to get a landscape layout") expect(page.text).to include("and text in bottom right quadrant") end end end end
32.160163
126
0.628384
2137e35a9afff9db4fe4e3b12fa5f46665999e4a
404
module I18n module Alchemy module TimeParser include DateParser extend self private def build_object(parsed_date) Time.utc(*parsed_date.values_at(:year, :mon, :mday, :hour, :min, :sec)) end def i18n_scope :time end def valid_for_localization?(value) value.is_a?(Time) || value.is_a?(DateTime) end end end end
17.565217
79
0.59901
7a0195605e563e028a45dbf81eb0f6145e4d8525
5,934
require 'everywhere/util' module ActiveRecord class PredicateBuilder if ActiveRecord::VERSION::STRING > '3.1' class << self include Everywhere::Util # >= 3.2.4, >= 3.1.5 if ActiveRecord::PredicateBuilder.method(:build_from_hash).arity == -4 def build_from_hash_with_not_and_like_and_not_like(engine, attributes, default_table, allow_table_name = true) attributes_with_not_and_like_and_not_like = attributes.map do |column, value| # {key: {not: value}} if value.is_a?(Hash) && (value.keys.size == 1) && value.keys.first.in?([:not, :like, :not_like]) ["#{column}__#{value.keys.first}__", value.values.first] else [column, value] end end build_from_hash_without_not_and_like_and_not_like(engine, attributes_with_not_and_like_and_not_like, default_table, allow_table_name).map do |rel| if rel.left.name.to_s.ends_with?('__not__') rel.left.name = rel.left.name.to_s.sub(/__not__$/, '').to_sym negate rel elsif rel.left.name.to_s.ends_with?('__like__') rel.left.name = rel.left.name.to_s.sub(/__like__$/, '').to_sym Arel::Nodes::Matches.new rel.left, rel.right elsif rel.left.name.to_s.ends_with?('__not_like__') rel.left.name = rel.left.name.to_s.sub(/__not_like__$/, '').to_sym Arel::Nodes::DoesNotMatch.new rel.left, rel.right else rel end end end # < 3.2.4, < 3.1.5, >= 4.0? else def build_from_hash_with_not_and_like_and_not_like(engine, attributes, default_table) attributes_with_not_and_like_and_not_like = attributes.map do |column, value| # {key: {not: value}} if value.is_a?(Hash) && (value.keys.size == 1) && value.keys.first.in?([:not, :like, :not_like]) ["#{column}__#{value.keys.first}__", value.values.first] else [column, value] end end build_from_hash_without_not_and_like_and_not_like(engine, attributes_with_not_and_like_and_not_like, default_table).map do |rel| if rel.left.name.to_s.ends_with?('__not__') rel.left.name = rel.left.name.to_s.sub(/__not__$/, '').to_sym negate rel elsif rel.left.name.to_s.ends_with?('__like__') rel.left.name = rel.left.name.to_s.sub(/__like__$/, '').to_sym Arel::Nodes::Matches.new rel.left, rel.right elsif rel.left.name.to_s.ends_with?('__not_like__') rel.left.name = rel.left.name.to_s.sub(/__not_like__$/, '').to_sym Arel::Nodes::DoesNotMatch.new rel.left, rel.right else rel end end end end alias_method_chain :build_from_hash, :not_and_like_and_not_like end else include Everywhere::Util # >= 3.0.13 if ActiveRecord::PredicateBuilder.method(:build_from_hash).arity == -3 def build_from_hash_with_not_and_like_and_not_like(attributes, default_table, allow_table_name = true) attributes_with_not_and_like_and_not_like = attributes.map do |column, value| # {key: {not: value}} if value.is_a?(Hash) && (value.keys.size == 1) && ((value.keys.first == :not) || (value.keys.first == :like) || (value.keys.first == :not_like)) ["#{column}__#{value.keys.first}__", value.values.first] else [column, value] end end build_from_hash_without_not_and_like_and_not_like(attributes_with_not_and_like_and_not_like, default_table, allow_table_name).map do |rel| if rel.left.name.to_s.ends_with?('__not__') rel.left.name = rel.left.name.to_s.sub(/__not__$/, '').to_sym negate rel elsif rel.left.name.to_s.ends_with?('__like__') rel.left.name = rel.left.name.to_s.sub(/__like__$/, '').to_sym Arel::Nodes::Matches.new rel.left, rel.right elsif rel.left.name.to_s.ends_with?('__not_like__') rel.left.name = rel.left.name.to_s.sub(/__not_like__$/, '').to_sym Arel::Nodes::DoesNotMatch.new rel.left, rel.right else rel end end end # < 3.0.13 else def build_from_hash_with_not_and_like_and_not_like(attributes, default_table) attributes_with_not_and_like_and_not_like = attributes.map do |column, value| # {key: {not: value}} if value.is_a?(Hash) && (value.keys.size == 1) && ((value.keys.first == :not) || (value.keys.first == :like) || (value.keys.first == :not_like)) ["#{column}__#{value.keys.first}__", value.values.first] else [column, value] end end build_from_hash_without_not_and_like_and_not_like(attributes_with_not_and_like_and_not_like, default_table).map do |rel| if rel.left.name.to_s.ends_with?('__not__') rel.left.name = rel.left.name.to_s.sub(/__not__$/, '').to_sym negate rel elsif rel.left.name.to_s.ends_with?('__like__') rel.left.name = rel.left.name.to_s.sub(/__like__$/, '').to_sym Arel::Nodes::Matches.new rel.left, rel.right elsif rel.left.name.to_s.ends_with?('__not_like__') rel.left.name = rel.left.name.to_s.sub(/__not_like__$/, '').to_sym Arel::Nodes::DoesNotMatch.new rel.left, rel.right else rel end end end end alias_method_chain :build_from_hash, :not_and_like_and_not_like end end end
47.854839
158
0.583418
019d5fc3c5f4fa12ff0a0840f751aa412a7b2788
1,024
module Gitlab class License module Boundary BOUNDARY_START = /(\A|\r?\n)-*BEGIN .+? LICENSE-*\r?\n/.freeze BOUNDARY_END = /\r?\n-*END .+? LICENSE-*(\r?\n|\z)/.freeze class << self def add_boundary(data, product_name) data = remove_boundary(data) product_name.upcase! pad = lambda do |message, width| total_padding = [width - message.length, 0].max padding = total_padding / 2.0 [ '-' * padding.ceil, message, '-' * padding.floor ].join end [ pad.call("BEGIN #{product_name} LICENSE", 60), data.strip, pad.call("END #{product_name} LICENSE", 60) ].join("\n") end def remove_boundary(data) after_boundary = data.split(BOUNDARY_START).last in_boundary = after_boundary.split(BOUNDARY_END).first in_boundary end end end end end
24.97561
69
0.510742
5d2b6b2d56a01ba277dbf5218fdaf7e23c749c1d
2,708
#-- copyright # OpenProject is a project management system. # Copyright (C) 2012-2014 the OpenProject Foundation (OPF) # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License version 3. # # OpenProject is a fork of ChiliProject, which is a fork of Redmine. The copyright follows: # Copyright (C) 2006-2013 Jean-Philippe Lang # Copyright (C) 2010-2013 the ChiliProject Team # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # # See doc/COPYRIGHT.rdoc for more details. #++ require 'spec_helper' module OpenProject describe I18n do include Redmine::I18n let(:format) { '%d/%m/%Y' } after do Time.zone = nil end describe 'with user time zone' do before { allow(User.current).to receive(:time_zone).and_return(ActiveSupport::TimeZone['Athens'])} it 'returns a date in the user timezone for a utc timestamp' do Time.zone = 'UTC' time = Time.zone.local(2013, 06, 30, 23, 59) expect(format_time_as_date(time,format)).to eq '01/07/2013' end it 'returns a date in the user timezone for a non-utc timestamp' do Time.zone = 'Berlin' time = Time.zone.local(2013, 06, 30, 23, 59) expect(format_time_as_date(time,format)).to eq '01/07/2013' end end describe 'without user time zone' do before { allow(User.current).to receive(:time_zone).and_return(nil)} it 'returns a date in the local system timezone for a utc timestamp' do Time.zone = 'UTC' time = Time.zone.local(2013, 06, 30, 23, 59) allow(time).to receive(:localtime).and_return(ActiveSupport::TimeZone['Athens'].local(2013, 07, 01, 01, 59)) expect(format_time_as_date(time,format)).to eq '01/07/2013' end it 'returns a date in the original timezone for a non-utc timestamp' do Time.zone = 'Berlin' time = Time.zone.local(2013, 06, 30, 23, 59) expect(format_time_as_date(time,format)).to eq '30/06/2013' end end end end
36.594595
116
0.692024
ffbb2e3a61b75d7b0ba185a9c2ee65b24dd36bec
3,273
# frozen_string_literal: true RSpec.describe "bundle install" do context "with duplicated gems" do it "will display a warning" do install_gemfile <<-G, :raise_on_error => false gem 'rails', '~> 4.0.0' gem 'rails', '~> 4.0.0' G expect(err).to include("more than once") end end context "with --gemfile" do it "finds the gemfile" do gemfile bundled_app("NotGemfile"), <<-G source "#{file_uri_for(gem_repo1)}" gem 'rack' G bundle :install, :gemfile => bundled_app("NotGemfile") # Specify BUNDLE_GEMFILE for `the_bundle` # to retrieve the proper Gemfile ENV["BUNDLE_GEMFILE"] = "NotGemfile" expect(the_bundle).to include_gems "rack 1.0.0" end end context "with gemfile set via config" do before do gemfile bundled_app("NotGemfile"), <<-G source "#{file_uri_for(gem_repo1)}" gem 'rack' G bundle "config set --local gemfile #{bundled_app("NotGemfile")}" end it "uses the gemfile to install" do bundle "install" bundle "list" expect(out).to include("rack (1.0.0)") end it "uses the gemfile while in a subdirectory" do bundled_app("subdir").mkpath bundle "install", :dir => bundled_app("subdir") bundle "list", :dir => bundled_app("subdir") expect(out).to include("rack (1.0.0)") end end context "with deprecated features" do it "reports that lib is an invalid option" do gemfile <<-G gem "rack", :lib => "rack" G bundle :install, :raise_on_error => false expect(err).to match(/You passed :lib as an option for gem 'rack', but it is invalid/) end end context "with engine specified in symbol", :jruby do it "does not raise any error parsing Gemfile" do install_gemfile <<-G source "#{file_uri_for(gem_repo1)}" ruby "#{RUBY_VERSION}", :engine => :jruby, :engine_version => "#{RUBY_ENGINE_VERSION}" G expect(out).to match(/Bundle complete!/) end it "installation succeeds" do install_gemfile <<-G source "#{file_uri_for(gem_repo1)}" ruby "#{RUBY_VERSION}", :engine => :jruby, :engine_version => "#{RUBY_ENGINE_VERSION}" gem "rack" G expect(the_bundle).to include_gems "rack 1.0.0" end end context "with a Gemfile containing non-US-ASCII characters" do it "reads the Gemfile with the UTF-8 encoding by default" do install_gemfile <<-G str = "Il était une fois ..." puts "The source encoding is: " + str.encoding.name G expect(out).to include("The source encoding is: UTF-8") expect(out).not_to include("The source encoding is: ASCII-8BIT") expect(out).to include("Bundle complete!") end it "respects the magic encoding comment" do # NOTE: This works thanks to #eval interpreting the magic encoding comment install_gemfile <<-G # encoding: iso-8859-1 str = "Il #{"\xE9".dup.force_encoding("binary")}tait une fois ..." puts "The source encoding is: " + str.encoding.name G expect(out).to include("The source encoding is: ISO-8859-1") expect(out).to include("Bundle complete!") end end end
29.486486
94
0.620837
03b04e9a3ba556815a6c3dae520ba8a7c6ec7acd
1,084
Pod::Spec.new do |s| s.name = 'iface' s.version = '3.4.3' s.summary = 'iface' s.homepage = 'https://goodrequest.com/' s.author = { 'Dominik Petho' => '[email protected]' } s.license = 'MIT' s.platform = :ios s.source = { :http => "https://s3.eu-central-1.amazonaws.com/ios-frameworks.innovatrics.com/iface/#{s.version}/iface.framework.zip" } # s.source_files = "*", "DOT/*", "DOT.framework/Headers/*", "DOT.framework/*" s.ios.deployment_target = '10.1' s.ios.vendored_frameworks = "iface.framework" end # '0.1' exact version 0.1 # '> 0.1' Any version higher than 0.1 # '>= 0.1' Version 0.1 and any higher version # '< 0.1' Any version lower than 0.1 # '<= 0.1' Version 0.1 and any lower version # '~> 0.1.2' Version 0.1.2 and the versions up to 0.2, not including 0.2 and higher # '~> 0.1' Version 0.1 and the versions up to 1.0, not including 1.0 and higher # '~> 0' Version 0 and higher, this is basically the same as not having it.
40.148148
148
0.586716
d562d8b25ea4f8ed00a13e9ce866cab2afaae142
45,630
require 'spec_helper' module Ci describe GitlabCiYamlProcessor, lib: true do let(:path) { 'path' } describe "#builds_for_ref" do let(:type) { 'test' } it "returns builds if no branch specified" do config = YAML.dump({ before_script: ["pwd"], rspec: { script: "rspec" } }) config_processor = GitlabCiYamlProcessor.new(config, path) expect(config_processor.builds_for_stage_and_ref(type, "master").size).to eq(1) expect(config_processor.builds_for_stage_and_ref(type, "master").first).to eq({ stage: "test", stage_idx: 1, except: nil, name: :rspec, only: nil, commands: "pwd\nrspec", tag_list: [], options: {}, allow_failure: false, when: "on_success", environment: nil, }) end describe :only do it "does not return builds if only has another branch" do config = YAML.dump({ before_script: ["pwd"], rspec: { script: "rspec", only: ["deploy"] } }) config_processor = GitlabCiYamlProcessor.new(config, path) expect(config_processor.builds_for_stage_and_ref(type, "master").size).to eq(0) end it "does not return builds if only has regexp with another branch" do config = YAML.dump({ before_script: ["pwd"], rspec: { script: "rspec", only: ["/^deploy$/"] } }) config_processor = GitlabCiYamlProcessor.new(config, path) expect(config_processor.builds_for_stage_and_ref(type, "master").size).to eq(0) end it "returns builds if only has specified this branch" do config = YAML.dump({ before_script: ["pwd"], rspec: { script: "rspec", only: ["master"] } }) config_processor = GitlabCiYamlProcessor.new(config, path) expect(config_processor.builds_for_stage_and_ref(type, "master").size).to eq(1) end it "returns builds if only has a list of branches including specified" do config = YAML.dump({ before_script: ["pwd"], rspec: { script: "rspec", type: type, only: ["master", "deploy"] } }) config_processor = GitlabCiYamlProcessor.new(config, path) expect(config_processor.builds_for_stage_and_ref(type, "deploy").size).to eq(1) end it "returns builds if only has a branches keyword specified" do config = YAML.dump({ before_script: ["pwd"], rspec: { script: "rspec", type: type, only: ["branches"] } }) config_processor = GitlabCiYamlProcessor.new(config, path) expect(config_processor.builds_for_stage_and_ref(type, "deploy").size).to eq(1) end it "does not return builds if only has a tags keyword" do config = YAML.dump({ before_script: ["pwd"], rspec: { script: "rspec", type: type, only: ["tags"] } }) config_processor = GitlabCiYamlProcessor.new(config, path) expect(config_processor.builds_for_stage_and_ref(type, "deploy").size).to eq(0) end it "returns builds if only has a triggers keyword specified and a trigger is provided" do config = YAML.dump({ before_script: ["pwd"], rspec: { script: "rspec", type: type, only: ["triggers"] } }) config_processor = GitlabCiYamlProcessor.new(config, path) expect(config_processor.builds_for_stage_and_ref(type, "deploy", false, true).size).to eq(1) end it "does not return builds if only has a triggers keyword specified and no trigger is provided" do config = YAML.dump({ before_script: ["pwd"], rspec: { script: "rspec", type: type, only: ["triggers"] } }) config_processor = GitlabCiYamlProcessor.new(config, path) expect(config_processor.builds_for_stage_and_ref(type, "deploy").size).to eq(0) end it "returns builds if only has current repository path" do config = YAML.dump({ before_script: ["pwd"], rspec: { script: "rspec", type: type, only: ["branches@path"] } }) config_processor = GitlabCiYamlProcessor.new(config, path) expect(config_processor.builds_for_stage_and_ref(type, "deploy").size).to eq(1) end it "does not return builds if only has different repository path" do config = YAML.dump({ before_script: ["pwd"], rspec: { script: "rspec", type: type, only: ["branches@fork"] } }) config_processor = GitlabCiYamlProcessor.new(config, path) expect(config_processor.builds_for_stage_and_ref(type, "deploy").size).to eq(0) end it "returns build only for specified type" do config = YAML.dump({ before_script: ["pwd"], rspec: { script: "rspec", type: "test", only: ["master", "deploy"] }, staging: { script: "deploy", type: "deploy", only: ["master", "deploy"] }, production: { script: "deploy", type: "deploy", only: ["master@path", "deploy"] }, }) config_processor = GitlabCiYamlProcessor.new(config, 'fork') expect(config_processor.builds_for_stage_and_ref("deploy", "deploy").size).to eq(2) expect(config_processor.builds_for_stage_and_ref("test", "deploy").size).to eq(1) expect(config_processor.builds_for_stage_and_ref("deploy", "master").size).to eq(1) end context 'for invalid value' do let(:config) { { rspec: { script: "rspec", type: "test", only: only } } } let(:processor) { GitlabCiYamlProcessor.new(YAML.dump(config)) } shared_examples 'raises an error' do it do expect { processor }.to raise_error(GitlabCiYamlProcessor::ValidationError, 'rspec job: only parameter should be an array of strings or regexps') end end context 'when it is integer' do let(:only) { 1 } it_behaves_like 'raises an error' end context 'when it is an array of integers' do let(:only) { [1, 1] } it_behaves_like 'raises an error' end context 'when it is invalid regex' do let(:only) { ["/*invalid/"] } it_behaves_like 'raises an error' end end end describe :except do it "returns builds if except has another branch" do config = YAML.dump({ before_script: ["pwd"], rspec: { script: "rspec", except: ["deploy"] } }) config_processor = GitlabCiYamlProcessor.new(config, path) expect(config_processor.builds_for_stage_and_ref(type, "master").size).to eq(1) end it "returns builds if except has regexp with another branch" do config = YAML.dump({ before_script: ["pwd"], rspec: { script: "rspec", except: ["/^deploy$/"] } }) config_processor = GitlabCiYamlProcessor.new(config, path) expect(config_processor.builds_for_stage_and_ref(type, "master").size).to eq(1) end it "does not return builds if except has specified this branch" do config = YAML.dump({ before_script: ["pwd"], rspec: { script: "rspec", except: ["master"] } }) config_processor = GitlabCiYamlProcessor.new(config, path) expect(config_processor.builds_for_stage_and_ref(type, "master").size).to eq(0) end it "does not return builds if except has a list of branches including specified" do config = YAML.dump({ before_script: ["pwd"], rspec: { script: "rspec", type: type, except: ["master", "deploy"] } }) config_processor = GitlabCiYamlProcessor.new(config, path) expect(config_processor.builds_for_stage_and_ref(type, "deploy").size).to eq(0) end it "does not return builds if except has a branches keyword specified" do config = YAML.dump({ before_script: ["pwd"], rspec: { script: "rspec", type: type, except: ["branches"] } }) config_processor = GitlabCiYamlProcessor.new(config, path) expect(config_processor.builds_for_stage_and_ref(type, "deploy").size).to eq(0) end it "returns builds if except has a tags keyword" do config = YAML.dump({ before_script: ["pwd"], rspec: { script: "rspec", type: type, except: ["tags"] } }) config_processor = GitlabCiYamlProcessor.new(config, path) expect(config_processor.builds_for_stage_and_ref(type, "deploy").size).to eq(1) end it "does not return builds if except has a triggers keyword specified and a trigger is provided" do config = YAML.dump({ before_script: ["pwd"], rspec: { script: "rspec", type: type, except: ["triggers"] } }) config_processor = GitlabCiYamlProcessor.new(config, path) expect(config_processor.builds_for_stage_and_ref(type, "deploy", false, true).size).to eq(0) end it "returns builds if except has a triggers keyword specified and no trigger is provided" do config = YAML.dump({ before_script: ["pwd"], rspec: { script: "rspec", type: type, except: ["triggers"] } }) config_processor = GitlabCiYamlProcessor.new(config, path) expect(config_processor.builds_for_stage_and_ref(type, "deploy").size).to eq(1) end it "does not return builds if except has current repository path" do config = YAML.dump({ before_script: ["pwd"], rspec: { script: "rspec", type: type, except: ["branches@path"] } }) config_processor = GitlabCiYamlProcessor.new(config, path) expect(config_processor.builds_for_stage_and_ref(type, "deploy").size).to eq(0) end it "returns builds if except has different repository path" do config = YAML.dump({ before_script: ["pwd"], rspec: { script: "rspec", type: type, except: ["branches@fork"] } }) config_processor = GitlabCiYamlProcessor.new(config, path) expect(config_processor.builds_for_stage_and_ref(type, "deploy").size).to eq(1) end it "returns build except specified type" do config = YAML.dump({ before_script: ["pwd"], rspec: { script: "rspec", type: "test", except: ["master", "deploy", "test@fork"] }, staging: { script: "deploy", type: "deploy", except: ["master"] }, production: { script: "deploy", type: "deploy", except: ["master@fork"] }, }) config_processor = GitlabCiYamlProcessor.new(config, 'fork') expect(config_processor.builds_for_stage_and_ref("deploy", "deploy").size).to eq(2) expect(config_processor.builds_for_stage_and_ref("test", "test").size).to eq(0) expect(config_processor.builds_for_stage_and_ref("deploy", "master").size).to eq(0) end context 'for invalid value' do let(:config) { { rspec: { script: "rspec", except: except } } } let(:processor) { GitlabCiYamlProcessor.new(YAML.dump(config)) } shared_examples 'raises an error' do it do expect { processor }.to raise_error(GitlabCiYamlProcessor::ValidationError, 'rspec job: except parameter should be an array of strings or regexps') end end context 'when it is integer' do let(:except) { 1 } it_behaves_like 'raises an error' end context 'when it is an array of integers' do let(:except) { [1, 1] } it_behaves_like 'raises an error' end context 'when it is invalid regex' do let(:except) { ["/*invalid/"] } it_behaves_like 'raises an error' end end end end describe "Scripts handling" do let(:config_data) { YAML.dump(config) } let(:config_processor) { GitlabCiYamlProcessor.new(config_data, path) } subject { config_processor.builds_for_stage_and_ref("test", "master").first } describe "before_script" do context "in global context" do let(:config) do { before_script: ["global script"], test: { script: ["script"] } } end it "return commands with scripts concencaced" do expect(subject[:commands]).to eq("global script\nscript") end end context "overwritten in local context" do let(:config) do { before_script: ["global script"], test: { before_script: ["local script"], script: ["script"] } } end it "return commands with scripts concencaced" do expect(subject[:commands]).to eq("local script\nscript") end end end describe "script" do let(:config) do { test: { script: ["script"] } } end it "return commands with scripts concencaced" do expect(subject[:commands]).to eq("script") end end describe "after_script" do context "in global context" do let(:config) do { after_script: ["after_script"], test: { script: ["script"] } } end it "return after_script in options" do expect(subject[:options][:after_script]).to eq(["after_script"]) end end context "overwritten in local context" do let(:config) do { after_script: ["local after_script"], test: { after_script: ["local after_script"], script: ["script"] } } end it "return after_script in options" do expect(subject[:options][:after_script]).to eq(["local after_script"]) end end end end describe "Image and service handling" do it "returns image and service when defined" do config = YAML.dump({ image: "ruby:2.1", services: ["mysql"], before_script: ["pwd"], rspec: { script: "rspec" } }) config_processor = GitlabCiYamlProcessor.new(config, path) expect(config_processor.builds_for_stage_and_ref("test", "master").size).to eq(1) expect(config_processor.builds_for_stage_and_ref("test", "master").first).to eq({ except: nil, stage: "test", stage_idx: 1, name: :rspec, only: nil, commands: "pwd\nrspec", tag_list: [], options: { image: "ruby:2.1", services: ["mysql"] }, allow_failure: false, when: "on_success", environment: nil, }) end it "returns image and service when overridden for job" do config = YAML.dump({ image: "ruby:2.1", services: ["mysql"], before_script: ["pwd"], rspec: { image: "ruby:2.5", services: ["postgresql"], script: "rspec" } }) config_processor = GitlabCiYamlProcessor.new(config, path) expect(config_processor.builds_for_stage_and_ref("test", "master").size).to eq(1) expect(config_processor.builds_for_stage_and_ref("test", "master").first).to eq({ except: nil, stage: "test", stage_idx: 1, name: :rspec, only: nil, commands: "pwd\nrspec", tag_list: [], options: { image: "ruby:2.5", services: ["postgresql"] }, allow_failure: false, when: "on_success", environment: nil, }) end end describe 'Variables' do context 'when global variables are defined' do it 'returns global variables' do variables = { VAR1: 'value1', VAR2: 'value2', } config = YAML.dump({ variables: variables, before_script: ['pwd'], rspec: { script: 'rspec' } }) config_processor = GitlabCiYamlProcessor.new(config, path) expect(config_processor.global_variables).to eq(variables) end end context 'when job variables are defined' do context 'when syntax is correct' do it 'returns job variables' do variables = { KEY1: 'value1', SOME_KEY_2: 'value2' } config = YAML.dump( { before_script: ['pwd'], rspec: { variables: variables, script: 'rspec' } }) config_processor = GitlabCiYamlProcessor.new(config, path) expect(config_processor.job_variables(:rspec)).to eq variables end end context 'when syntax is incorrect' do context 'when variables defined but invalid' do it 'raises error' do variables = [:KEY1, 'value1', :KEY2, 'value2'] config = YAML.dump( { before_script: ['pwd'], rspec: { variables: variables, script: 'rspec' } }) expect { GitlabCiYamlProcessor.new(config, path) } .to raise_error(GitlabCiYamlProcessor::ValidationError, /job: variables should be a map/) end end context 'when variables key defined but value not specified' do it 'returns empty array' do config = YAML.dump( { before_script: ['pwd'], rspec: { variables: nil, script: 'rspec' } }) config_processor = GitlabCiYamlProcessor.new(config, path) ## # TODO, in next version of CI configuration processor this # should be invalid configuration, see #18775 and #15060 # expect(config_processor.job_variables(:rspec)) .to be_an_instance_of(Array).and be_empty end end end end context 'when job variables are not defined' do it 'returns empty array' do config = YAML.dump({ before_script: ['pwd'], rspec: { script: 'rspec' } }) config_processor = GitlabCiYamlProcessor.new(config, path) expect(config_processor.job_variables(:rspec)).to eq [] end end end describe "When" do %w(on_success on_failure always).each do |when_state| it "returns #{when_state} when defined" do config = YAML.dump({ rspec: { script: "rspec", when: when_state } }) config_processor = GitlabCiYamlProcessor.new(config, path) builds = config_processor.builds_for_stage_and_ref("test", "master") expect(builds.size).to eq(1) expect(builds.first[:when]).to eq(when_state) end end end describe "Caches" do it "returns cache when defined globally" do config = YAML.dump({ cache: { paths: ["logs/", "binaries/"], untracked: true, key: 'key' }, rspec: { script: "rspec" } }) config_processor = GitlabCiYamlProcessor.new(config) expect(config_processor.builds_for_stage_and_ref("test", "master").size).to eq(1) expect(config_processor.builds_for_stage_and_ref("test", "master").first[:options][:cache]).to eq( paths: ["logs/", "binaries/"], untracked: true, key: 'key', ) end it "returns cache when defined in a job" do config = YAML.dump({ rspec: { cache: { paths: ["logs/", "binaries/"], untracked: true, key: 'key' }, script: "rspec" } }) config_processor = GitlabCiYamlProcessor.new(config) expect(config_processor.builds_for_stage_and_ref("test", "master").size).to eq(1) expect(config_processor.builds_for_stage_and_ref("test", "master").first[:options][:cache]).to eq( paths: ["logs/", "binaries/"], untracked: true, key: 'key', ) end it "overwrite cache when defined for a job and globally" do config = YAML.dump({ cache: { paths: ["logs/", "binaries/"], untracked: true, key: 'global' }, rspec: { script: "rspec", cache: { paths: ["test/"], untracked: false, key: 'local' }, } }) config_processor = GitlabCiYamlProcessor.new(config) expect(config_processor.builds_for_stage_and_ref("test", "master").size).to eq(1) expect(config_processor.builds_for_stage_and_ref("test", "master").first[:options][:cache]).to eq( paths: ["test/"], untracked: false, key: 'local', ) end end describe "Artifacts" do it "returns artifacts when defined" do config = YAML.dump({ image: "ruby:2.1", services: ["mysql"], before_script: ["pwd"], rspec: { artifacts: { paths: ["logs/", "binaries/"], untracked: true, name: "custom_name", expire_in: "7d" }, script: "rspec" } }) config_processor = GitlabCiYamlProcessor.new(config) expect(config_processor.builds_for_stage_and_ref("test", "master").size).to eq(1) expect(config_processor.builds_for_stage_and_ref("test", "master").first).to eq({ except: nil, stage: "test", stage_idx: 1, name: :rspec, only: nil, commands: "pwd\nrspec", tag_list: [], options: { image: "ruby:2.1", services: ["mysql"], artifacts: { name: "custom_name", paths: ["logs/", "binaries/"], untracked: true, expire_in: "7d" } }, when: "on_success", allow_failure: false, environment: nil, }) end %w[on_success on_failure always].each do |when_state| it "returns artifacts for when #{when_state} defined" do config = YAML.dump({ rspec: { script: "rspec", artifacts: { paths: ["logs/", "binaries/"], when: when_state } } }) config_processor = GitlabCiYamlProcessor.new(config, path) builds = config_processor.builds_for_stage_and_ref("test", "master") expect(builds.size).to eq(1) expect(builds.first[:options][:artifacts][:when]).to eq(when_state) end end end describe '#environment' do let(:config) do { deploy_to_production: { stage: 'deploy', script: 'test', environment: environment } } end let(:processor) { GitlabCiYamlProcessor.new(YAML.dump(config)) } let(:builds) { processor.builds_for_stage_and_ref('deploy', 'master') } context 'when a production environment is specified' do let(:environment) { 'production' } it 'does return production' do expect(builds.size).to eq(1) expect(builds.first[:environment]).to eq(environment) end end context 'when no environment is specified' do let(:environment) { nil } it 'does return nil environment' do expect(builds.size).to eq(1) expect(builds.first[:environment]).to be_nil end end context 'is not a string' do let(:environment) { 1 } it 'raises error' do expect { builds }.to raise_error("deploy_to_production job: environment parameter #{Gitlab::Regex.environment_name_regex_message}") end end context 'is not a valid string' do let(:environment) { 'production staging' } it 'raises error' do expect { builds }.to raise_error("deploy_to_production job: environment parameter #{Gitlab::Regex.environment_name_regex_message}") end end end describe "Dependencies" do let(:config) do { build1: { stage: 'build', script: 'test' }, build2: { stage: 'build', script: 'test' }, test1: { stage: 'test', script: 'test', dependencies: dependencies }, test2: { stage: 'test', script: 'test' }, deploy: { stage: 'test', script: 'test' } } end subject { GitlabCiYamlProcessor.new(YAML.dump(config)) } context 'no dependencies' do let(:dependencies) { } it { expect { subject }.not_to raise_error } end context 'dependencies to builds' do let(:dependencies) { ['build1', 'build2'] } it { expect { subject }.not_to raise_error } end context 'dependencies to builds defined as symbols' do let(:dependencies) { [:build1, :build2] } it { expect { subject }.not_to raise_error } end context 'undefined dependency' do let(:dependencies) { ['undefined'] } it { expect { subject }.to raise_error(GitlabCiYamlProcessor::ValidationError, 'test1 job: undefined dependency: undefined') } end context 'dependencies to deploy' do let(:dependencies) { ['deploy'] } it { expect { subject }.to raise_error(GitlabCiYamlProcessor::ValidationError, 'test1 job: dependency deploy is not defined in prior stages') } end end describe "Hidden jobs" do let(:config_processor) { GitlabCiYamlProcessor.new(config) } subject { config_processor.builds_for_stage_and_ref("test", "master") } shared_examples 'hidden_job_handling' do it "doesn't create jobs that start with dot" do expect(subject.size).to eq(1) expect(subject.first).to eq({ except: nil, stage: "test", stage_idx: 1, name: :normal_job, only: nil, commands: "test", tag_list: [], options: {}, when: "on_success", allow_failure: false, environment: nil, }) end end context 'when hidden job have a script definition' do let(:config) do YAML.dump({ '.hidden_job' => { image: 'ruby:2.1', script: 'test' }, 'normal_job' => { script: 'test' } }) end it_behaves_like 'hidden_job_handling' end context "when hidden job doesn't have a script definition" do let(:config) do YAML.dump({ '.hidden_job' => { image: 'ruby:2.1' }, 'normal_job' => { script: 'test' } }) end it_behaves_like 'hidden_job_handling' end end describe "YAML Alias/Anchor" do let(:config_processor) { GitlabCiYamlProcessor.new(config) } subject { config_processor.builds_for_stage_and_ref("build", "master") } shared_examples 'job_templates_handling' do it "is correctly supported for jobs" do expect(subject.size).to eq(2) expect(subject.first).to eq({ except: nil, stage: "build", stage_idx: 0, name: :job1, only: nil, commands: "execute-script-for-job", tag_list: [], options: {}, when: "on_success", allow_failure: false, environment: nil, }) expect(subject.second).to eq({ except: nil, stage: "build", stage_idx: 0, name: :job2, only: nil, commands: "execute-script-for-job", tag_list: [], options: {}, when: "on_success", allow_failure: false, environment: nil, }) end end context 'when template is a job' do let(:config) do <<EOT job1: &JOBTMPL stage: build script: execute-script-for-job job2: *JOBTMPL EOT end it_behaves_like 'job_templates_handling' end context 'when template is a hidden job' do let(:config) do <<EOT .template: &JOBTMPL stage: build script: execute-script-for-job job1: *JOBTMPL job2: *JOBTMPL EOT end it_behaves_like 'job_templates_handling' end context 'when job adds its own keys to a template definition' do let(:config) do <<EOT .template: &JOBTMPL stage: build job1: <<: *JOBTMPL script: execute-script-for-job job2: <<: *JOBTMPL script: execute-script-for-job EOT end it_behaves_like 'job_templates_handling' end end describe "Error handling" do it "fails to parse YAML" do expect{GitlabCiYamlProcessor.new("invalid: yaml: test")}.to raise_error(Psych::SyntaxError) end it "indicates that object is invalid" do expect{GitlabCiYamlProcessor.new("invalid_yaml")}.to raise_error(GitlabCiYamlProcessor::ValidationError) end it "returns errors if tags parameter is invalid" do config = YAML.dump({ rspec: { script: "test", tags: "mysql" } }) expect do GitlabCiYamlProcessor.new(config, path) end.to raise_error(GitlabCiYamlProcessor::ValidationError, "rspec job: tags parameter should be an array of strings") end it "returns errors if before_script parameter is invalid" do config = YAML.dump({ before_script: "bundle update", rspec: { script: "test" } }) expect do GitlabCiYamlProcessor.new(config, path) end.to raise_error(GitlabCiYamlProcessor::ValidationError, "before_script should be an array of strings") end it "returns errors if job before_script parameter is not an array of strings" do config = YAML.dump({ rspec: { script: "test", before_script: [10, "test"] } }) expect do GitlabCiYamlProcessor.new(config, path) end.to raise_error(GitlabCiYamlProcessor::ValidationError, "rspec job: before_script should be an array of strings") end it "returns errors if after_script parameter is invalid" do config = YAML.dump({ after_script: "bundle update", rspec: { script: "test" } }) expect do GitlabCiYamlProcessor.new(config, path) end.to raise_error(GitlabCiYamlProcessor::ValidationError, "after_script should be an array of strings") end it "returns errors if job after_script parameter is not an array of strings" do config = YAML.dump({ rspec: { script: "test", after_script: [10, "test"] } }) expect do GitlabCiYamlProcessor.new(config, path) end.to raise_error(GitlabCiYamlProcessor::ValidationError, "rspec job: after_script should be an array of strings") end it "returns errors if image parameter is invalid" do config = YAML.dump({ image: ["test"], rspec: { script: "test" } }) expect do GitlabCiYamlProcessor.new(config, path) end.to raise_error(GitlabCiYamlProcessor::ValidationError, "image should be a string") end it "returns errors if job name is blank" do config = YAML.dump({ '' => { script: "test" } }) expect do GitlabCiYamlProcessor.new(config, path) end.to raise_error(GitlabCiYamlProcessor::ValidationError, "job name should be non-empty string") end it "returns errors if job name is non-string" do config = YAML.dump({ 10 => { script: "test" } }) expect do GitlabCiYamlProcessor.new(config, path) end.to raise_error(GitlabCiYamlProcessor::ValidationError, "job name should be non-empty string") end it "returns errors if job image parameter is invalid" do config = YAML.dump({ rspec: { script: "test", image: ["test"] } }) expect do GitlabCiYamlProcessor.new(config, path) end.to raise_error(GitlabCiYamlProcessor::ValidationError, "rspec job: image should be a string") end it "returns errors if services parameter is not an array" do config = YAML.dump({ services: "test", rspec: { script: "test" } }) expect do GitlabCiYamlProcessor.new(config, path) end.to raise_error(GitlabCiYamlProcessor::ValidationError, "services should be an array of strings") end it "returns errors if services parameter is not an array of strings" do config = YAML.dump({ services: [10, "test"], rspec: { script: "test" } }) expect do GitlabCiYamlProcessor.new(config, path) end.to raise_error(GitlabCiYamlProcessor::ValidationError, "services should be an array of strings") end it "returns errors if job services parameter is not an array" do config = YAML.dump({ rspec: { script: "test", services: "test" } }) expect do GitlabCiYamlProcessor.new(config, path) end.to raise_error(GitlabCiYamlProcessor::ValidationError, "rspec job: services should be an array of strings") end it "returns errors if job services parameter is not an array of strings" do config = YAML.dump({ rspec: { script: "test", services: [10, "test"] } }) expect do GitlabCiYamlProcessor.new(config, path) end.to raise_error(GitlabCiYamlProcessor::ValidationError, "rspec job: services should be an array of strings") end it "returns errors if there are unknown parameters" do config = YAML.dump({ extra: "bundle update" }) expect do GitlabCiYamlProcessor.new(config, path) end.to raise_error(GitlabCiYamlProcessor::ValidationError, "Unknown parameter: extra") end it "returns errors if there are unknown parameters that are hashes, but doesn't have a script" do config = YAML.dump({ extra: { services: "test" } }) expect do GitlabCiYamlProcessor.new(config, path) end.to raise_error(GitlabCiYamlProcessor::ValidationError, "Unknown parameter: extra") end it "returns errors if there are no jobs defined" do config = YAML.dump({ before_script: ["bundle update"] }) expect do GitlabCiYamlProcessor.new(config, path) end.to raise_error(GitlabCiYamlProcessor::ValidationError, "Please define at least one job") end it "returns errors if job allow_failure parameter is not an boolean" do config = YAML.dump({ rspec: { script: "test", allow_failure: "string" } }) expect do GitlabCiYamlProcessor.new(config, path) end.to raise_error(GitlabCiYamlProcessor::ValidationError, "rspec job: allow_failure parameter should be an boolean") end it "returns errors if job stage is not a string" do config = YAML.dump({ rspec: { script: "test", type: 1 } }) expect do GitlabCiYamlProcessor.new(config, path) end.to raise_error(GitlabCiYamlProcessor::ValidationError, "rspec job: stage parameter should be build, test, deploy") end it "returns errors if job stage is not a pre-defined stage" do config = YAML.dump({ rspec: { script: "test", type: "acceptance" } }) expect do GitlabCiYamlProcessor.new(config, path) end.to raise_error(GitlabCiYamlProcessor::ValidationError, "rspec job: stage parameter should be build, test, deploy") end it "returns errors if job stage is not a defined stage" do config = YAML.dump({ types: ["build", "test"], rspec: { script: "test", type: "acceptance" } }) expect do GitlabCiYamlProcessor.new(config, path) end.to raise_error(GitlabCiYamlProcessor::ValidationError, "rspec job: stage parameter should be build, test") end it "returns errors if stages is not an array" do config = YAML.dump({ types: "test", rspec: { script: "test" } }) expect do GitlabCiYamlProcessor.new(config, path) end.to raise_error(GitlabCiYamlProcessor::ValidationError, "stages should be an array of strings") end it "returns errors if stages is not an array of strings" do config = YAML.dump({ types: [true, "test"], rspec: { script: "test" } }) expect do GitlabCiYamlProcessor.new(config, path) end.to raise_error(GitlabCiYamlProcessor::ValidationError, "stages should be an array of strings") end it "returns errors if variables is not a map" do config = YAML.dump({ variables: "test", rspec: { script: "test" } }) expect do GitlabCiYamlProcessor.new(config, path) end.to raise_error(GitlabCiYamlProcessor::ValidationError, "variables should be a map of key-value strings") end it "returns errors if variables is not a map of key-value strings" do config = YAML.dump({ variables: { test: false }, rspec: { script: "test" } }) expect do GitlabCiYamlProcessor.new(config, path) end.to raise_error(GitlabCiYamlProcessor::ValidationError, "variables should be a map of key-value strings") end it "returns errors if job when is not on_success, on_failure or always" do config = YAML.dump({ rspec: { script: "test", when: 1 } }) expect do GitlabCiYamlProcessor.new(config, path) end.to raise_error(GitlabCiYamlProcessor::ValidationError, "rspec job: when parameter should be on_success, on_failure or always") end it "returns errors if job artifacts:name is not an a string" do config = YAML.dump({ types: ["build", "test"], rspec: { script: "test", artifacts: { name: 1 } } }) expect do GitlabCiYamlProcessor.new(config) end.to raise_error(GitlabCiYamlProcessor::ValidationError, "rspec job: artifacts:name parameter should be a string") end it "returns errors if job artifacts:when is not an a predefined value" do config = YAML.dump({ types: ["build", "test"], rspec: { script: "test", artifacts: { when: 1 } } }) expect do GitlabCiYamlProcessor.new(config) end.to raise_error(GitlabCiYamlProcessor::ValidationError, "rspec job: artifacts:when parameter should be on_success, on_failure or always") end it "returns errors if job artifacts:expire_in is not an a string" do config = YAML.dump({ types: ["build", "test"], rspec: { script: "test", artifacts: { expire_in: 1 } } }) expect do GitlabCiYamlProcessor.new(config) end.to raise_error(GitlabCiYamlProcessor::ValidationError, "rspec job: artifacts:expire_in parameter should be a duration") end it "returns errors if job artifacts:expire_in is not an a valid duration" do config = YAML.dump({ types: ["build", "test"], rspec: { script: "test", artifacts: { expire_in: "7 elephants" } } }) expect do GitlabCiYamlProcessor.new(config) end.to raise_error(GitlabCiYamlProcessor::ValidationError, "rspec job: artifacts:expire_in parameter should be a duration") end it "returns errors if job artifacts:untracked is not an array of strings" do config = YAML.dump({ types: ["build", "test"], rspec: { script: "test", artifacts: { untracked: "string" } } }) expect do GitlabCiYamlProcessor.new(config) end.to raise_error(GitlabCiYamlProcessor::ValidationError, "rspec job: artifacts:untracked parameter should be an boolean") end it "returns errors if job artifacts:paths is not an array of strings" do config = YAML.dump({ types: ["build", "test"], rspec: { script: "test", artifacts: { paths: "string" } } }) expect do GitlabCiYamlProcessor.new(config) end.to raise_error(GitlabCiYamlProcessor::ValidationError, "rspec job: artifacts:paths parameter should be an array of strings") end it "returns errors if cache:untracked is not an array of strings" do config = YAML.dump({ cache: { untracked: "string" }, rspec: { script: "test" } }) expect do GitlabCiYamlProcessor.new(config) end.to raise_error(GitlabCiYamlProcessor::ValidationError, "cache:untracked parameter should be an boolean") end it "returns errors if cache:paths is not an array of strings" do config = YAML.dump({ cache: { paths: "string" }, rspec: { script: "test" } }) expect do GitlabCiYamlProcessor.new(config) end.to raise_error(GitlabCiYamlProcessor::ValidationError, "cache:paths parameter should be an array of strings") end it "returns errors if cache:key is not a string" do config = YAML.dump({ cache: { key: 1 }, rspec: { script: "test" } }) expect do GitlabCiYamlProcessor.new(config) end.to raise_error(GitlabCiYamlProcessor::ValidationError, "cache:key parameter should be a string") end it "returns errors if job cache:key is not an a string" do config = YAML.dump({ types: ["build", "test"], rspec: { script: "test", cache: { key: 1 } } }) expect do GitlabCiYamlProcessor.new(config) end.to raise_error(GitlabCiYamlProcessor::ValidationError, "rspec job: cache:key parameter should be a string") end it "returns errors if job cache:untracked is not an array of strings" do config = YAML.dump({ types: ["build", "test"], rspec: { script: "test", cache: { untracked: "string" } } }) expect do GitlabCiYamlProcessor.new(config) end.to raise_error(GitlabCiYamlProcessor::ValidationError, "rspec job: cache:untracked parameter should be an boolean") end it "returns errors if job cache:paths is not an array of strings" do config = YAML.dump({ types: ["build", "test"], rspec: { script: "test", cache: { paths: "string" } } }) expect do GitlabCiYamlProcessor.new(config) end.to raise_error(GitlabCiYamlProcessor::ValidationError, "rspec job: cache:paths parameter should be an array of strings") end it "returns errors if job dependencies is not an array of strings" do config = YAML.dump({ types: ["build", "test"], rspec: { script: "test", dependencies: "string" } }) expect do GitlabCiYamlProcessor.new(config) end.to raise_error(GitlabCiYamlProcessor::ValidationError, "rspec job: dependencies parameter should be an array of strings") end end end end
37.679604
161
0.559106
5d8a836b3a8f4f92ffe326cad50b30c7f9c8da0c
3,918
# This file is auto-generated from the current state of the database. Instead # of editing this file, please use the migrations feature of Active Record to # incrementally modify your database, and then regenerate this schema definition. # # This file is the source Rails uses to define your schema when running `bin/rails # db:schema:load`. When creating a new database, `bin/rails db:schema:load` tends to # be faster and is potentially less error prone than running all of your # migrations from scratch. Old migrations may fail to apply correctly if those # migrations use external dependencies or application code. # # It's strongly recommended that you check this file into your version control system. ActiveRecord::Schema.define(version: 2022_02_06_195648) do # These are extensions that must be enabled in order to support this database enable_extension "plpgsql" create_table "active_storage_attachments", force: :cascade do |t| t.string "name", null: false t.string "record_type", null: false t.bigint "record_id", null: false t.bigint "blob_id", null: false t.datetime "created_at", precision: 6, null: false t.index ["blob_id"], name: "index_active_storage_attachments_on_blob_id" t.index ["record_type", "record_id", "name", "blob_id"], name: "index_active_storage_attachments_uniqueness", unique: true end create_table "active_storage_blobs", force: :cascade do |t| t.string "key", null: false t.string "filename", null: false t.string "content_type" t.text "metadata" t.string "service_name", null: false t.bigint "byte_size", null: false t.string "checksum" t.datetime "created_at", precision: 6, null: false t.index ["key"], name: "index_active_storage_blobs_on_key", unique: true end create_table "active_storage_variant_records", force: :cascade do |t| t.bigint "blob_id", null: false t.string "variation_digest", null: false t.index ["blob_id", "variation_digest"], name: "index_active_storage_variant_records_uniqueness", unique: true end create_table "categories", force: :cascade do |t| t.string "name" t.string "icon" t.datetime "created_at", precision: 6, null: false t.datetime "updated_at", precision: 6, null: false t.bigint "user_id", null: false t.index ["user_id"], name: "index_categories_on_user_id" end create_table "categories_expenses", id: false, force: :cascade do |t| t.bigint "category_id", null: false t.bigint "expense_id", null: false t.index ["category_id", "expense_id"], name: "index_categories_expenses_on_category_id_and_expense_id" t.index ["expense_id", "category_id"], name: "index_categories_expenses_on_expense_id_and_category_id" end create_table "expenses", force: :cascade do |t| t.string "name" t.float "amount" t.datetime "created_at", precision: 6, null: false t.datetime "updated_at", precision: 6, null: false t.bigint "user_id", null: false t.index ["user_id"], name: "index_expenses_on_user_id" end create_table "users", force: :cascade do |t| t.string "full_name", null: false t.string "email", default: "", null: false t.string "encrypted_password", default: "", null: false t.string "reset_password_token" t.datetime "reset_password_sent_at", precision: 6 t.datetime "remember_created_at", precision: 6 t.datetime "created_at", precision: 6, null: false t.datetime "updated_at", precision: 6, null: false t.index ["email"], name: "index_users_on_email", unique: true t.index ["reset_password_token"], name: "index_users_on_reset_password_token", unique: true end add_foreign_key "active_storage_attachments", "active_storage_blobs", column: "blob_id" add_foreign_key "active_storage_variant_records", "active_storage_blobs", column: "blob_id" add_foreign_key "categories", "users" add_foreign_key "expenses", "users" end
44.022472
126
0.73124
1129c5db06080f2137b1f1d4f93d821616207d75
989
Pod::Spec.new do |s| s.name = "SwiftQueue" s.version = "5.0.0" s.summary = "SwiftQueue" s.description = "Job Scheduler for IOS with Concurrent run, failure/retry, persistence, repeat, delay and more" s.homepage = "https://github.com/lucas34/SwiftQueue" s.license = 'MIT' s.author = { "Lucas Nelaupe" => "[email protected]" } s.source = { :git => "https://github.com/lucas34/SwiftQueue.git", :tag => s.version.to_s } s.swift_version = '5.2' s.ios.deployment_target = "8.0" s.tvos.deployment_target= "9.0" s.watchos.deployment_target = "2.0" s.osx.deployment_target= "10.10" s.requires_arc = true s.source_files = 'Sources/SwiftQueue/**.swift' s.ios.source_files = 'Sources/ios/*.swift', 'Sources/SwiftQueue/**.swift' s.ios.dependency 'ReachabilitySwift', '~> 5.0' s.tvos.dependency 'ReachabilitySwift', '~> 5.0' s.osx.dependency 'ReachabilitySwift', '~> 5.0' end
36.62963
118
0.623862
5d2634f089612ff335463e87d3133c38518be904
3,975
# frozen_string_literal: true module RuboCop module Cop module Rails # This cop is used to identify usages of http methods like `get`, `post`, # `put`, `patch` without the usage of keyword arguments in your tests and # change them to use keyword args. This cop only applies to Rails >= 5. # If you are running Rails < 5 you should disable the # Rails/HttpPositionalArguments cop or set your TargetRailsVersion in your # .rubocop.yml file to 4.0, etc. # # @example # # bad # get :new, { user_id: 1} # # # good # get :new, params: { user_id: 1 } class HttpPositionalArguments < Cop extend TargetRailsVersion MSG = 'Use keyword arguments instead of ' \ 'positional arguments for http call: `%<verb>s`.' KEYWORD_ARGS = %i[ method params session body flash xhr as headers env to ].freeze HTTP_METHODS = %i[get post put patch delete head].freeze minimum_target_rails_version 5.0 def_node_matcher :http_request?, <<~PATTERN (send nil? {#{HTTP_METHODS.map(&:inspect).join(' ')}} !nil? $_ ...) PATTERN def on_send(node) http_request?(node) do |data| return unless needs_conversion?(data) add_offense(node, location: :selector, message: format(MSG, verb: node.method_name)) end end # given a pre Rails 5 method: get :new, {user_id: @user.id}, {} # # @return lambda of auto correct procedure # the result should look like: # get :new, params: { user_id: @user.id }, session: {} # the http_method is the method used to call the controller # the controller node can be a symbol, method, object or string # that represents the path/action on the Rails controller # the data is the http parameters and environment sent in # the Rails 5 http call def autocorrect(node) lambda do |corrector| corrector.replace(node.loc.expression, correction(node)) end end private def needs_conversion?(data) return true unless data.hash_type? data.each_pair.none? do |pair| special_keyword_arg?(pair.key) || format_arg?(pair.key) && data.pairs.one? end end def special_keyword_arg?(node) node.sym_type? && KEYWORD_ARGS.include?(node.value) end def format_arg?(node) node.sym_type? && node.value == :format end def convert_hash_data(data, type) return '' if data.hash_type? && data.empty? hash_data = if data.hash_type? format('{ %<data>s }', data: data.pairs.map(&:source).join(', ')) else # user supplies an object, # no need to surround with braces data.source end format(', %<type>s: %<hash_data>s', type: type, hash_data: hash_data) end def correction(node) http_path, *data = *node.arguments controller_action = http_path.source params = convert_hash_data(data.first, 'params') session = convert_hash_data(data.last, 'session') if data.size > 1 format(correction_template(node), name: node.method_name, action: controller_action, params: params, session: session) end def correction_template(node) if parentheses?(node) '%<name>s(%<action>s%<params>s%<session>s)' else '%<name>s %<action>s%<params>s%<session>s' end end end end end end
33.686441
80
0.544906
01f7d2c8f52855f340b8a72fa0319a143023d4db
3,124
=begin Copyright 2012-2013 inBloom, Inc. and its affiliates. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. =end require_relative 'Enum.rb' # Enumerates the types of credential levels. From Ed-Fi-Core.xsd: # <xs:simpleType name="LevelType"> # <xs:annotation> # <xs:documentation>The grade level(s) certified for teaching.</xs:documentation> # </xs:annotation> # <xs:restriction base="xs:token"> # <xs:enumeration value="All Level (Grade Level PK-12)"/> # <xs:enumeration value="All-Level (Grade Level EC-12)"/> # <xs:enumeration value="Early Childhood (PK-K)"/> # <xs:enumeration value="Elementary (Grade Level 1-6)"/> # <xs:enumeration value="Elementary (Grade Level 1-8)"/> # <xs:enumeration value="Elementary (Grade Level 4-8)"/> # <xs:enumeration value="Elementary (Grade Level EC-4)"/> # <xs:enumeration value="Elementary (Grade Level EC-6)"/> # <xs:enumeration value="Elementary (Grade Level PK-5)"/> # <xs:enumeration value="Elementary (Grade Level PK-6)"/> # <xs:enumeration value="Grade Level NA"/> # <xs:enumeration value="Junior High (Grade Level 6-8)"/> # <xs:enumeration value="Secondary (Grade Level 6-12)"/> # <xs:enumeration value="Secondary (Grade Level 8-12)"/> # <xs:enumeration value="Other"/> # </xs:restriction> # </xs:simpleType> # <<11/01/2012: removed 'Other' because ComplexTypes.xsd doesn't currently # support that enumeration>> class CredentialLevelType include Enum CredentialLevelType.define :ALL_LEVEL_GRADES_EC_12, "All-Level (Grade Level EC-12)" CredentialLevelType.define :ALL_LEVEL_GRADES_PK_12, "All Level (Grade Level PK-12)" CredentialLevelType.define :EARLY_CHILDHOOD, "Early Childhood (PK-K)" CredentialLevelType.define :ELEMENTARY_GRADES_1_6, "Elementary (Grade Level 1-6)" CredentialLevelType.define :ELEMENTARY_GRADES_1_8, "Elementary (Grade Level 1-8)" CredentialLevelType.define :ELEMENTARY_GRADES_4_8, "Elementary (Grade Level 4-8)" CredentialLevelType.define :ELEMENTARY_GRADES_EC_4, "Elementary (Grade Level EC-4)" CredentialLevelType.define :ELEMENTARY_GRADES_EC_6, "Elementary (Grade Level EC-6)" CredentialLevelType.define :ELEMENTARY_GRADES_PK_5, "Elementary (Grade Level PK-5)" CredentialLevelType.define :ELEMENTARY_GRADES_PK_6, "Elementary (Grade Level PK-6)" CredentialLevelType.define :GRADE_LEVEL_N_A, "Grade Level NA" CredentialLevelType.define :JUNIOR_HIGH_GRADES_6_8, "Junior High (Grade Level 6-8)" CredentialLevelType.define :SECONDARY_GRADES_6_12, "Secondary (Grade Level 6-12)" CredentialLevelType.define :SECONDAY_GRADES_8_12, "Secondary (Grade Level 8-12)" end
48.8125
85
0.744558
1cc8a385649e7a3684afea4b347d9a2e87f9c68e
240
module Forms::Fields class RadioButtons < Field include OptionedField property :label, String, :required => true property :required, Boolean, :default => false self.renderer = Forms::RadioButtonsRenderer end end
21.818182
50
0.7
79d621b633cce536512dc62ec31e98a5b7b8d6f9
3,537
require 'test_helper' class TableRowTest < Test::Unit::TestCase include Liquid class ArrayDrop < Liquid::Drop include Enumerable def initialize(array) @array = array end def each(&block) @array.each(&block) end end def test_table_row assert_template_result("<tr class=\"row1\">\n<td class=\"col1\"> 1 </td><td class=\"col2\"> 2 </td><td class=\"col3\"> 3 </td></tr>\n<tr class=\"row2\"><td class=\"col1\"> 4 </td><td class=\"col2\"> 5 </td><td class=\"col3\"> 6 </td></tr>\n", '{% tablerow n in numbers cols:3%} {{n}} {% endtablerow %}', 'numbers' => [1,2,3,4,5,6]) assert_template_result("<tr class=\"row1\">\n</tr>\n", '{% tablerow n in numbers cols:3%} {{n}} {% endtablerow %}', 'numbers' => []) end def test_table_row_with_different_cols assert_template_result("<tr class=\"row1\">\n<td class=\"col1\"> 1 </td><td class=\"col2\"> 2 </td><td class=\"col3\"> 3 </td><td class=\"col4\"> 4 </td><td class=\"col5\"> 5 </td></tr>\n<tr class=\"row2\"><td class=\"col1\"> 6 </td></tr>\n", '{% tablerow n in numbers cols:5%} {{n}} {% endtablerow %}', 'numbers' => [1,2,3,4,5,6]) end def test_table_col_counter assert_template_result("<tr class=\"row1\">\n<td class=\"col1\">1</td><td class=\"col2\">2</td></tr>\n<tr class=\"row2\"><td class=\"col1\">1</td><td class=\"col2\">2</td></tr>\n<tr class=\"row3\"><td class=\"col1\">1</td><td class=\"col2\">2</td></tr>\n", '{% tablerow n in numbers cols:2%}{{tablerowloop.col}}{% endtablerow %}', 'numbers' => [1,2,3,4,5,6]) end def test_quoted_fragment assert_template_result("<tr class=\"row1\">\n<td class=\"col1\"> 1 </td><td class=\"col2\"> 2 </td><td class=\"col3\"> 3 </td></tr>\n<tr class=\"row2\"><td class=\"col1\"> 4 </td><td class=\"col2\"> 5 </td><td class=\"col3\"> 6 </td></tr>\n", "{% tablerow n in collections.frontpage cols:3%} {{n}} {% endtablerow %}", 'collections' => {'frontpage' => [1,2,3,4,5,6]}) assert_template_result("<tr class=\"row1\">\n<td class=\"col1\"> 1 </td><td class=\"col2\"> 2 </td><td class=\"col3\"> 3 </td></tr>\n<tr class=\"row2\"><td class=\"col1\"> 4 </td><td class=\"col2\"> 5 </td><td class=\"col3\"> 6 </td></tr>\n", "{% tablerow n in collections['frontpage'] cols:3%} {{n}} {% endtablerow %}", 'collections' => {'frontpage' => [1,2,3,4,5,6]}) end def test_enumerable_drop assert_template_result("<tr class=\"row1\">\n<td class=\"col1\"> 1 </td><td class=\"col2\"> 2 </td><td class=\"col3\"> 3 </td></tr>\n<tr class=\"row2\"><td class=\"col1\"> 4 </td><td class=\"col2\"> 5 </td><td class=\"col3\"> 6 </td></tr>\n", '{% tablerow n in numbers cols:3%} {{n}} {% endtablerow %}', 'numbers' => ArrayDrop.new([1,2,3,4,5,6])) end def test_offset_and_limit assert_template_result("<tr class=\"row1\">\n<td class=\"col1\"> 1 </td><td class=\"col2\"> 2 </td><td class=\"col3\"> 3 </td></tr>\n<tr class=\"row2\"><td class=\"col1\"> 4 </td><td class=\"col2\"> 5 </td><td class=\"col3\"> 6 </td></tr>\n", '{% tablerow n in numbers cols:3 offset:1 limit:6%} {{n}} {% endtablerow %}', 'numbers' => [0,1,2,3,4,5,6,7]) end end
55.265625
260
0.510319