hexsha
stringlengths
40
40
size
int64
2
1.01M
content
stringlengths
2
1.01M
avg_line_length
float64
1.5
100
max_line_length
int64
2
1k
alphanum_fraction
float64
0.25
1
bfc73936e5610555a41a602615946f47449c1930
177
module Admin module Conversations class BlocksController < ApplicationController def index @conversations = Conversation.blocked end end end end
17.7
50
0.700565
fffd1ce3be8775a57e93303ced4afbbdf2a55494
5,915
input = <<eos --config.file="prometheus.yml" Prometheus configuration file path. --web.listen-address="0.0.0.0:9090" Address to listen on for UI, API, and telemetry. --web.config.file="" [EXPERIMENTAL] Path to configuration file that can enable TLS or authentication. --web.read-timeout=5m Maximum duration before timing out read of the request, and closing idle connections. --web.max-connections=512 Maximum number of simultaneous connections. --web.external-url=<URL> The URL under which Prometheus is externally reachable (for example, if Prometheus is served via a reverse proxy). Used for generating relative and absolute links back to Prometheus itself. If the URL has a path portion, it will be used to prefix all HTTP endpoints served by Prometheus. If omitted, relevant URL components will be derived automatically. --web.route-prefix=<path> Prefix for the internal routes of web endpoints. Defaults to path of --web.external-url. --web.user-assets=<path> Path to static asset directory, available at /user. --web.enable-lifecycle Enable shutdown and reload via HTTP request. --web.enable-admin-api Enable API endpoints for admin control actions. --web.console.templates="consoles" Path to the console template directory, available at /consoles. --web.console.libraries="console_libraries" Path to the console library directory. --web.page-title="Prometheus Time Series Collection and Processing Server" Document title of Prometheus instance. --web.cors.origin=".*" Regex for CORS origin. It is fully anchored. Example: 'https?://(domain1|domain2)\.com' --storage.tsdb.path="data/" Base path for metrics storage. --storage.tsdb.retention=STORAGE.TSDB.RETENTION [DEPRECATED] How long to retain samples in storage. This flag has been deprecated, use "storage.tsdb.retention.time" instead. --storage.tsdb.retention.time=STORAGE.TSDB.RETENTION.TIME How long to retain samples in storage. When this flag is set it overrides "storage.tsdb.retention". If neither this flag nor "storage.tsdb.retention" nor "storage.tsdb.retention.size" is set, the retention time defaults to 15d. Units Supported: y, w, d, h, m, s, ms. --storage.tsdb.retention.size=STORAGE.TSDB.RETENTION.SIZE [EXPERIMENTAL] Maximum number of bytes that can be stored for blocks. A unit is required, supported units: B, KB, MB, GB, TB, PB, EB. Ex: "512MB". This flag is experimental and can be changed in future releases. --storage.tsdb.no-lockfile Do not create lockfile in data directory. --storage.tsdb.allow-overlapping-blocks [EXPERIMENTAL] Allow overlapping blocks, which in turn enables vertical compaction and vertical query merge. --storage.tsdb.wal-compression Compress the tsdb WAL. --storage.remote.flush-deadline=<duration> How long to wait flushing sample on shutdown or config reload. --storage.remote.read-sample-limit=5e7 Maximum overall number of samples to return via the remote read interface, in a single query. 0 means no limit. This limit is ignored for streamed response types. --storage.remote.read-concurrent-limit=10 Maximum number of concurrent remote read calls. 0 means no limit. --storage.remote.read-max-bytes-in-frame=1048576 Maximum number of bytes in a single frame for streaming remote read response types before marshalling. Note that client might have limit on frame size as well. 1MB as recommended by protobuf by default. --rules.alert.for-outage-tolerance=1h Max time to tolerate prometheus outage for restoring "for" state of alert. --rules.alert.for-grace-period=10m Minimum duration between alert and restored "for" state. This is maintained only for alerts with configured "for" time greater than grace period. --rules.alert.resend-delay=1m Minimum amount of time to wait before resending an alert to Alertmanager. --alertmanager.notification-queue-capacity=10000 The capacity of the queue for pending Alertmanager notifications. --query.lookback-delta=5m The maximum lookback duration for retrieving metrics during expression evaluations and federation. --query.timeout=2m Maximum time a query may take before being aborted. --query.max-concurrency=20 Maximum number of queries executed concurrently. --query.max-samples=50000000 Maximum number of samples a single query can load into memory. Note that queries will fail if they try to load more samples than this into memory, so this also limits the number of samples a query can return. --enable-feature= ... Comma separated feature names to enable. Valid options: 'promql-at-modifier' to enable the @ modifier, 'remote-write-receiver' to enable remote write receiver. See https://prometheus.io/docs/prometheus/latest/disabled_features/ for more details. --log.level=info Only log messages with the given severity or above. One of: [debug, info, warn, error] --log.format=logfmt Output format of log messages. One of: [logfmt,json] eos exporter = ARGV[0] prefix = ['prometheus', exporter].compact.join('_') puts '############################################################' puts '# Auto generated' puts '############################################################' collectors = [] input.each_line do |line| line = line.chomp next if line.empty? if line.start_with?('--') argument = line[2..line.size-1].split[0] k = argument.split('=') key = k.first value = k.last boolean_flag = key == value if boolean_flag # checking if (default: enabled) if line =~ /: enabled/ collectors << " - '#{key}'" else collectors << "# - '#{key}' # disabled by default" end next end adjusted_key = "#{prefix}_#{key.gsub('.', '__').gsub('-', '_')}" adjusted_key = adjusted_key.gsub("__#{exporter}__", '__') puts %Q( - ['#{key}', "{{ #{adjusted_key} }}"]) else # puts "# #{line}" end end puts '', '', '' puts "#{prefix}____enabled_flags:" collectors.sort.each do |collector| puts collector end
36.067073
79
0.732883
879f1a3f470d2ff1e9dc04090c585f04ec53c511
834
# # Be sure to run `pod lib lint EMVendors.podspec' to ensure this is a # valid spec before submitting. # # Any lines starting with a # are optional, but their use is encouraged # To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html # Pod::Spec.new do |s| s.name = 'EMVendors' s.version = '1.3.0' s.summary = 'EMVendors' s.homepage = 'https://github.com/[email protected]/EMVendors' s.license = { :type => 'MIT', :file => 'LICENSE' } s.author = { 'faterman' => '[email protected]' } s.source = { :git => 'https://github.com/faterman/EMVendors.git', :tag => s.version.to_s } s.ios.deployment_target = '8.0' s.subspec 'NOSSDK' do |sp| sp.ios.vendored_frameworks = 'EMVendors/NOSSDK/NOSSDK.framework' end end
36.26087
102
0.625899
ab3c9c871d362d4cec3e5cadcbdfbf1a4618aaee
2,273
require 'spec_helper' require 'puppet/pops' require 'puppet/loaders' describe 'the assert_type function' do after(:all) { Puppet::Pops::Loaders.clear } around(:each) do |example| loaders = Puppet::Pops::Loaders.new(Puppet::Node::Environment.create(:testing, [])) Puppet.override({:loaders => loaders}, "test-example") do example.run end end let(:func) do Puppet.lookup(:loaders).puppet_system_loader.load(:function, 'assert_type') end it 'asserts compliant type by returning the value' do expect(func.call({}, type(String), 'hello world')).to eql('hello world') end it 'accepts type given as a String' do expect(func.call({}, 'String', 'hello world')).to eql('hello world') end it 'asserts non compliant type by raising an error' do expect do func.call({}, type(Integer), 'hello world') end.to raise_error(Puppet::ParseError, /does not match actual/) end it 'checks that first argument is a type' do expect do func.call({}, 10, 10) end.to raise_error(ArgumentError, Regexp.new(Regexp.escape( "function 'assert_type' called with mis-matched arguments expected one of: assert_type(Type type, Any value, Callable[Type, Type] block {0,1}) - arg count {2,3} assert_type(String type_string, Any value, Callable[Type, Type] block {0,1}) - arg count {2,3} actual: assert_type(Integer, Integer) - arg count {2}"))) end it 'allows the second arg to be undef/nil)' do expect do func.call({}, optional(String), nil) end.to_not raise_error end it 'can be called with a callable that receives a specific type' do expected, actual = func.call({}, optional(String), 1, create_callable_2_args_unit) expect(expected.to_s).to eql('Optional[String]') expect(actual.to_s).to eql('Integer[1, 1]') end def optional(type_ref) Puppet::Pops::Types::TypeFactory.optional(type(type_ref)) end def type(type_ref) Puppet::Pops::Types::TypeFactory.type_of(type_ref) end def create_callable_2_args_unit() Puppet::Functions.create_function(:func) do dispatch :func do param 'Type', :expected param 'Type', :actual end def func(expected, actual) [expected, actual] end end.new({}, nil) end end
28.772152
96
0.678399
e8a97c97256bf03d0950da445585a90993ec559c
4,105
# encoding: utf-8 # This file is distributed under New Relic's license terms. # See https://github.com/newrelic/rpm/blob/master/LICENSE for complete details. MIN_RAILS_VERSION = 4 if defined?(::Rails) && ::Rails::VERSION::MAJOR.to_i >= MIN_RAILS_VERSION && !NewRelic::LanguageSupport.using_engine?('jruby') require File.expand_path(File.join(File.dirname(__FILE__),'..','..','..','test_helper')) require 'new_relic/agent/instrumentation/active_record_subscriber' class NewRelic::Agent::Instrumentation::ActiveRecordSubscriberTest < Minitest::Test class Order; end def setup @config = { :adapter => 'mysql', :host => 'server' } @connection = Object.new @connection.instance_variable_set(:@config, @config) @params = { :name => 'NewRelic::Agent::Instrumentation::ActiveRecordSubscriberTest::Order Load', :sql => 'SELECT * FROM sandwiches', :connection_id => @connection.object_id } @subscriber = NewRelic::Agent::Instrumentation::ActiveRecordSubscriber.new @stats_engine = NewRelic::Agent.instance.stats_engine @stats_engine.clear_stats end def test_records_metrics_for_simple_find freeze_time simulate_query(2) metric_name = 'Datastore/statement/ActiveRecord/NewRelic::Agent::Instrumentation::ActiveRecordSubscriberTest::Order/find' assert_metrics_recorded( metric_name => { :call_count => 1, :total_call_time => 2.0 } ) end def test_records_scoped_metrics freeze_time in_transaction('test_txn') { simulate_query(2) } metric_name = 'Datastore/statement/ActiveRecord/NewRelic::Agent::Instrumentation::ActiveRecordSubscriberTest::Order/find' assert_metrics_recorded( [metric_name, 'test_txn'] => { :call_count => 1, :total_call_time => 2 } ) end def test_records_nothing_if_tracing_disabled freeze_time NewRelic::Agent.disable_all_tracing { simulate_query(2) } metric_name = 'Datastore/statement/ActiveRecord/NewRelic::Agent::Instrumentation::ActiveRecordSubscriberTest::Order/find' assert_metrics_not_recorded([metric_name]) end def test_records_rollup_metrics freeze_time in_web_transaction { simulate_query(2) } assert_metrics_recorded( 'Datastore/operation/ActiveRecord/find' => { :call_count => 1, :total_call_time => 2 }, 'Datastore/allWeb' => { :call_count => 1, :total_call_time => 2 }, 'Datastore/all' => { :call_count => 1, :total_call_time => 2 } ) end def test_creates_txn_node freeze_time in_transaction do simulate_query(2) end last_node = nil sampler = NewRelic::Agent.instance.transaction_sampler sampler.last_sample.root_node.each_node{|s| last_node = s } assert_equal('Datastore/statement/ActiveRecord/NewRelic::Agent::Instrumentation::ActiveRecordSubscriberTest::Order/find', last_node.metric_name) assert_equal('SELECT * FROM sandwiches', last_node.params[:sql].sql) end def test_creates_slow_sql_node freeze_time sampler = NewRelic::Agent.instance.sql_sampler sql = nil in_transaction do simulate_query(2) sql = sampler.tl_transaction_data.sql_data[0].sql end assert_equal 'SELECT * FROM sandwiches', sql end def test_should_not_raise_due_to_an_exception_during_instrumentation_callback @subscriber.stubs(:record_metrics).raises(StandardError) simulate_query end def simulate_query(duration=nil) @subscriber.start('sql.active_record', :id, @params) advance_time(duration) if duration @subscriber.finish('sql.active_record', :id, @params) end def test_active_record_config_for_event target_connection = ActiveRecord::Base.connection_handler.connection_pool_list.first.connections.first expected_config = target_connection.instance_variable_get(:@config) payload = { :connection_id => target_connection.object_id } result = @subscriber.active_record_config(payload) assert_equal expected_config, result end end else puts "Skipping tests in #{__FILE__} because Rails >= #{MIN_RAILS_VERSION} is unavailable" end
31.098485
126
0.730816
61eed3188b07320046d2c9a276591081698ad3b2
223
class CollectionCell < UICollectionViewCell def on_load find(self).apply_style :collection_cell find(self.contentView).tap do |q| q.append(UILabel, :title).get.text = rand(100).to_s end end end
18.583333
57
0.690583
5dc603832f160f84e4c89fdb7a18451b6593fead
122
require 'io/console' while 1 do r = IO.select([ STDIN ]) p r[0] if r = STDIN p STDIN.getch end sleep(1) end
11.090909
26
0.590164
6a0b45051993cabec5aa60810dc7636ffd432239
99
# frozen_string_literal: true FactoryBot.define do factory :like do user post end end
11
29
0.707071
e2e8b60edc87be7ae2a8764574c42a56a0fb6a0a
5,665
require "rails_helper" RSpec.describe "casa_cases/show", type: :system do let(:organization) { create(:casa_org) } let(:admin) { create(:casa_admin, casa_org: organization) } let(:volunteer) { build(:volunteer, display_name: "Bob Loblaw", casa_org: organization) } let(:casa_case) { create(:casa_case, :with_one_court_order, casa_org: organization, case_number: "CINA-1", transition_aged_youth: true, court_report_due_date: 1.month.from_now) } let!(:case_assignment) { create(:case_assignment, volunteer: volunteer, casa_case: casa_case) } let!(:case_contact) { create(:case_contact, creator: volunteer, casa_case: casa_case) } let!(:emancipation_categories) { create_list(:emancipation_category, 3) } let!(:future_court_date) { create(:court_date, date: 1.year.from_now, casa_case: casa_case) } before do sign_in user visit casa_case_path(casa_case.id) end context "when admin" do let(:user) { admin } it_behaves_like "shows court dates links" it "can see case creator in table" do expect(page).to have_text("Bob Loblaw") end it "can navigate to edit volunteer page" do expect(page).to have_link("Bob Loblaw", href: "/volunteers/#{volunteer.id}/edit") end it "sees link to profile page" do expect(page).to have_link(href: "/users/edit") end it "can see court orders" do expect(page).to have_content("Court Orders") expect(page).to have_content(casa_case.case_court_orders[0].text) expect(page).to have_content(casa_case.case_court_orders[0].implementation_status_symbol) end it "can see next court date", js: true do if casa_case.court_date expect(page).to have_content("Next Court Date: #{I18n.l(future_court_date.date, format: :day_and_date, default: "")}") end end it "can see Add to Calendar buttons", js: true do expect(page).to have_content("Add to Calendar") end context "when there is no future court date or court report due date" do before do casa_case = create(:casa_case, casa_org: organization) visit casa_case_path(casa_case.id) end it "can not see Add to Calendar buttons", js: true do expect(page).not_to have_content("Add to Calendar") end end end context "supervisor user" do let(:user) { create(:supervisor, casa_org: organization) } let!(:case_contact) { create(:case_contact, creator: user, casa_case: casa_case) } it "sees link to own edit page" do expect(page).to have_link(href: "/supervisors/#{user.id}/edit") end context "case contact by another supervisor" do let(:other_supervisor) { create(:supervisor, casa_org: organization) } let!(:case_contact) { create(:case_contact, creator: other_supervisor, casa_case: casa_case) } it "sees link to other supervisor" do expect(page).to have_link(href: "/supervisors/#{other_supervisor.id}/edit") end end it "can see court orders" do expect(page).to have_content("Court Orders") expect(page).to have_content(casa_case.case_court_orders[0].text) expect(page).to have_content(casa_case.case_court_orders[0].implementation_status_symbol) end context "when generating a report, supervisor sees waiting page", js: true do before do click_button "Generate Report" end describe "'Generate Report' button" do it "has been disabled" do options = {visible: :visible} expect(page).to have_selector "#btnGenerateReport[disabled]", **options end end describe "Spinner" do it "becomes visible" do options = {visible: :visible} expect(page).to have_selector "#spinner", **options end end end end context "volunteer user" do let(:user) { volunteer } it "sees link to emancipation" do expect(page).to have_link("Emancipation 0 / #{emancipation_categories.size}") end it "can see court orders" do expect(page).to have_content("Court Orders") expect(page).to have_content(casa_case.case_court_orders[0].text) expect(page).to have_content(casa_case.case_court_orders[0].implementation_status_symbol) end end context "court order - implementation status symbol" do let(:user) { admin } it "when implemented" do casa_case.case_court_orders[0].update(implementation_status: :implemented) visit casa_case_path(casa_case) expect(page).to have_content("Court Orders") expect(page).to have_content(casa_case.case_court_orders[0].text) expect(page).to have_content("βœ…") end it "when not implemented" do casa_case.case_court_orders[0].update(implementation_status: :not_implemented) visit casa_case_path(casa_case) expect(page).to have_content("Court Orders") expect(page).to have_content(casa_case.case_court_orders[0].text) expect(page).to have_content("❌") end it "when partiall implemented" do casa_case.case_court_orders[0].update(implementation_status: :partially_implemented) visit casa_case_path(casa_case) expect(page).to have_content("Court Orders") expect(page).to have_content(casa_case.case_court_orders[0].text) expect(page).to have_content("πŸ•—") end it "when not specified" do casa_case.case_court_orders[0].update(implementation_status: nil) visit casa_case_path(casa_case) expect(page).to have_content("Court Orders") expect(page).to have_content(casa_case.case_court_orders[0].text) expect(page).to have_content("❌") end end end
33.52071
126
0.68985
4a868c922486ef1d35df82af2991c61eb4bc0728
329
cask 'outline' do version '3.23.1' sha256 'cd41a98975ca6349f9c12053cde2b0e4f4727c9936558cb9558a6ab5a5ebfe09' url "http://static.outline.ws/versions/Outline_#{version}.zip" appcast 'https://gorillized.s3.amazonaws.com/versions/update_channel.xml' name 'Outline' homepage 'http://outline.ws/' app 'Outline.app' end
27.416667
75
0.762918
e9444880037259c3691be62e0ec777b1751f41d5
1,653
# == Schema Information # # Table name: languages # # id :integer not null, primary key # name :string(32) not null # is_right_to_left :boolean default(FALSE) # can_transliterate :boolean default(FALSE) # created_at :datetime # updated_at :datetime # script :string(255) # locale :string(255) # bilingual :boolean default(FALSE) # language_font_id :integer # level_band :string(255) # # Indexes # # index_languages_on_name (name) # require 'rails_helper' describe Language, :type => :model do # it { should validate_presence_of :name } # it { should validate_presence_of :script } # it { should validate_uniqueness_of(:name).case_insensitive} # it { should ensure_length_of(:name).is_at_most(32) } describe 'default scope' do let!(:c1) { Language.create(name: 'Kannada', translated_name: 'Kananda translation', script: 'kn') } let!(:c2) { Language.create(name: 'English', translated_name: 'English translation', script: 'en') } it 'should order in ascending name' do expect(Language.all).to eq [c2,c1] end end describe "is_right_to_left" do it "should default to false" do lang = Language.create(name: 'Tamil', translated_name: 'Tamil translation', script: 'ta') expect(lang.is_right_to_left?).to eql(false) end end describe "can_transliterate" do it "should default to false" do lang = Language.create(name: 'Tamil', translated_name: 'Tamil translation', script: 'ta') expect(lang.can_transliterate?).to eql(false) end end end
31.788462
104
0.646703
ed4d3e0859b8db479c17d74c9919c9d2632ceca7
473
# frozen_string_literal: true module SolidusGdpr class DataExporter # @api private class SendArchive attr_reader :email, :archive_path def initialize(email, archive_path:) @email = email @archive_path = archive_path end def call SolidusGdpr.configuration.exports_mailer_class.constantize.export_email( email, export: File.read(archive_path), ).deliver_now end end end end
20.565217
80
0.651163
ffc430eac4ea5f7764a02e1d9b4d2ecf32726c11
122
class EventUserSerializer < ActiveModel::Serializer attributes( :id, :user_id, :event_id, :role ) end
13.555556
51
0.647541
ed8dc837ab758552e89356fa366158641104c439
1,080
require_relative 'externals/bash_sheller' require_relative 'externals/pipe_maker' require_relative 'externals/process_spawner' require_relative 'externals/random' require_relative 'externals/stdout_logger' require_relative 'externals/asynchronous_threader' require_relative 'node' require_relative 'prober' require_relative 'puller' require_relative 'runner' class Context def initialize(options = {}) @node = options[:node ] || Node.new(self) @prober = options[:prober] || Prober.new(self) @puller = options[:puller] || Puller.new(self) @runner = options[:runner] || Runner.new(self) @process = options[:process ] || ProcessSpawner.new @sheller = options[:sheller ] || BashSheller.new(self) @threader = options[:threader] || AsynchronousThreader.new @piper = options[:piper ] || PipeMaker.new @logger = options[:logger] || StdoutLogger.new @random = options[:random] || Random.new end attr_reader :node, :prober, :puller, :runner attr_reader :process, :sheller, :threader, :piper attr_reader :logger, :random end
31.764706
62
0.723148
e2ae2738d2f1112e2ba92116106dff3306121cd0
3,636
# frozen_string_literal: true require 'spec_helper' RSpec.describe 'User searches for issues', :js do let(:user) { create(:user) } let(:project) { create(:project, namespace: user.namespace) } let!(:issue1) { create(:issue, title: 'issue Foo', project: project, created_at: 1.hour.ago) } let!(:issue2) { create(:issue, :closed, :confidential, title: 'issue Bar', project: project) } def search_for_issue(search) fill_in('dashboard_search', with: search) find('.btn-search').click select_search_scope('Issues') end context 'when signed in' do before do project.add_maintainer(user) sign_in(user) visit(search_path) end include_examples 'top right search form' it 'finds an issue' do search_for_issue(issue1.title) page.within('.results') do expect(page).to have_link(issue1.title) expect(page).not_to have_link(issue2.title) end end it 'hides confidential icon for non-confidential issues' do search_for_issue(issue1.title) page.within('.results') do expect(page).not_to have_css('[data-testid="eye-slash-icon"]') end end it 'shows confidential icon for confidential issues' do search_for_issue(issue2.title) page.within('.results') do expect(page).to have_css('[data-testid="eye-slash-icon"]') end end it 'shows correct badge for open issues' do search_for_issue(issue1.title) page.within('.results') do expect(page).to have_css('.badge-success') expect(page).not_to have_css('.badge-info') end end it 'shows correct badge for closed issues' do search_for_issue(issue2.title) page.within('.results') do expect(page).not_to have_css('.badge-success') expect(page).to have_css('.badge-info') end end it 'sorts by created date' do search_for_issue('issue') page.within('.results') do expect(page.all('.search-result-row').first).to have_link(issue2.title) expect(page.all('.search-result-row').last).to have_link(issue1.title) end find('.reverse-sort-btn').click page.within('.results') do expect(page.all('.search-result-row').first).to have_link(issue1.title) expect(page.all('.search-result-row').last).to have_link(issue2.title) end end context 'when on a project page' do it 'finds an issue' do find('.js-search-project-dropdown').click find('[data-testid="project-filter"]').click_link(project.full_name) search_for_issue(issue1.title) page.within('.results') do expect(page).to have_link(issue1.title) expect(page).not_to have_link(issue2.title) end end end end context 'when signed out' do context 'when block_anonymous_global_searches is disabled' do let(:project) { create(:project, :public) } before do stub_feature_flags(block_anonymous_global_searches: false) visit(search_path) end include_examples 'top right search form' it 'finds an issue' do search_for_issue(issue1.title) page.within('.results') do expect(page).to have_link(issue1.title) expect(page).not_to have_link(issue2.title) end end end context 'when block_anonymous_global_searches is enabled' do before do visit(search_path) end it 'is redirected to login page' do expect(page).to have_content('You must be logged in to search across all of GitLab') end end end end
27.338346
96
0.64659
e82f5b54f6a6084237913ad629b80ac85007d197
3,270
require 'spec_helper' module Ransack describe Configuration do it 'yields Ransack on configure' do Ransack.configure do |config| expect(config).to eq Ransack end end it 'adds predicates' do Ransack.configure do |config| config.add_predicate :test_predicate end expect(Ransack.predicates).to have_key 'test_predicate' expect(Ransack.predicates).to have_key 'test_predicate_any' expect(Ransack.predicates).to have_key 'test_predicate_all' end it 'avoids creating compound predicates if compounds: false' do Ransack.configure do |config| config.add_predicate( :test_predicate_without_compound, :compounds => false ) end expect(Ransack.predicates) .to have_key 'test_predicate_without_compound' expect(Ransack.predicates) .not_to have_key 'test_predicate_without_compound_any' expect(Ransack.predicates) .not_to have_key 'test_predicate_without_compound_all' end it 'should have default value for search key' do expect(Ransack.options[:search_key]).to eq :q end it 'changes default search key parameter' do # store original state so we can restore it later before = Ransack.options.clone Ransack.configure do |config| config.search_key = :query end expect(Ransack.options[:search_key]).to eq :query # restore original state so we don't break other tests Ransack.options = before end it 'adds predicates that take arrays, overriding compounds' do Ransack.configure do |config| config.add_predicate( :test_array_predicate, :wants_array => true, :compounds => true ) end expect(Ransack.predicates['test_array_predicate'].wants_array).to eq true expect(Ransack.predicates).not_to have_key 'test_array_predicate_any' expect(Ransack.predicates).not_to have_key 'test_array_predicate_all' end describe '`wants_array` option takes precedence over Arel predicate' do it 'implicitly wants an array for in/not in predicates' do Ransack.configure do |config| config.add_predicate( :test_in_predicate, :arel_predicate => 'in' ) config.add_predicate( :test_not_in_predicate, :arel_predicate => 'not_in' ) end expect(Ransack.predicates['test_in_predicate'].wants_array).to eq true expect(Ransack.predicates['test_not_in_predicate'].wants_array).to eq true end it 'explicitly does not want array for in/not_in predicates' do Ransack.configure do |config| config.add_predicate( :test_in_predicate_no_array, :arel_predicate => 'in', :wants_array => false ) config.add_predicate( :test_not_in_predicate_no_array, :arel_predicate => 'not_in', :wants_array => false ) end expect(Ransack.predicates['test_in_predicate_no_array'].wants_array).to eq false expect(Ransack.predicates['test_not_in_predicate_no_array'].wants_array).to eq false end end end end
31.142857
92
0.655352
e8af6690b7f45c377d0b3e3ffb1e8976ccb9a333
363
class CreateProjects < ActiveRecord::Migration def change create_table :projects do |t| t.string :title, null: false t.date :end_date, null: false t.integer :days_left, default: 0 t.integer :current_count, default: 0 t.integer :total_count, null: false t.string :counter t.timestamps null: false end end end
24.2
46
0.661157
1a8fd2d83dd5a9f4d92c4d8328f146dc749713ea
168
json.extract! comment4, :id, :content, :post_id, :something, :somethingelse, :something, :more, :created_at, :updated_at json.url comment4_url(comment4, format: :json)
56
120
0.755952
39e2284342e1c97adc6db7113507255f9567d819
1,476
# coding: utf-8 lib = File.expand_path('../lib', __FILE__) $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib) require 'meta_commit/version' Gem::Specification.new do |spec| spec.name = "meta_commit" spec.version = MetaCommit::VERSION spec.authors = ["Stanislav Dobrovolskiy","Vitalii Shwetz"] spec.email = ["[email protected]","[email protected]"] spec.summary = %q{Enrich commit diffs with programing language insights} spec.homepage = "https://github.com/usernam3/meta_commit" spec.license = "MIT" spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) } spec.bindir = "exe" spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) } spec.require_paths = ["lib"] spec.add_runtime_dependency "meta_commit_contracts", "~> 0.1" spec.add_runtime_dependency "rugged", "~> 0.25" spec.add_runtime_dependency "dry-container", "~> 0.6.0" spec.add_runtime_dependency "thor", "~> 0.19" spec.add_development_dependency "bundler", "~> 1.10" spec.add_development_dependency "rake", "~> 10.0" spec.add_development_dependency "rspec", "~> 3.6" spec.add_development_dependency "cucumber", "~> 2.4" spec.add_development_dependency "aruba", "~> 0.14.2" spec.add_development_dependency "rspec-mocks", "~> 3.6" spec.add_development_dependency "byebug", "~> 9.0" spec.add_development_dependency "coveralls", "~> 0.8" end
42.171429
104
0.676829
616e15cf21fd53e49cb0fbc2b3cd9fee92f4914f
430
require 'rails_helper' # Specs in this file have access to a helper object that includes # the DemosHelper. For example: # # describe DemosHelper do # describe "string concat" do # it "concats two strings with spaces" do # expect(helper.concat_strings("this","that")).to eq("this that") # end # end # end RSpec.describe DemosHelper, type: :helper do pending "add some examples to (or delete) #{__FILE__}" end
26.875
71
0.704651
33cb9de3c9b48302d8cc65e6174eff3e87a20128
2,872
require 'partials/idp_selection_partial_controller' require 'partials/analytics_cookie_partial_controller' class RedirectToIdpWarningController < ApplicationController include IdpSelectionPartialController include AnalyticsCookiePartialController include ViewableIdpPartialController SELECTED_IDP_HISTORY_LENGTH = 5 helper_method :user_has_no_docs_or_foreign_id_only?, :other_ways_description def index @idp = decorated_idp @service_name = current_transaction.name if !idp_is_providing_registrations?(@idp) something_went_wrong("IDP with entity id: #{@idp.entity_id} is not providing registrations", :bad_request) elsif @idp.viewable? render 'redirect_to_idp_warning' else something_went_wrong("Couldn't display IDP with entity id: #{@idp.entity_id}") end end def continue idp = decorated_idp if idp.viewable? select_registration(idp) redirect_to redirect_to_idp_register_path else something_went_wrong("Couldn't display IDP with entity id: #{idp.entity_id}") end end def continue_ajax idp = decorated_idp if idp.viewable? select_registration(idp) ajax_idp_redirection_registration_request(recommended, idp.entity_id) else head :bad_request end end private def select_registration(idp) POLICY_PROXY.select_idp(session['verify_session_id'], idp.entity_id, session['requested_loa'], true, analytics_session_id, session[:journey_type]) set_journey_hint_followed(idp.entity_id) set_attempt_journey_hint(idp.entity_id) register_idp_selections(idp.display_name) end def register_idp_selections(idp_name) session[:selected_idp_name] = idp_name selected_idp_names = session[:selected_idp_names] || [] if selected_idp_names.size < SELECTED_IDP_HISTORY_LENGTH selected_idp_names << idp_name session[:selected_idp_names] = selected_idp_names end end def recommended begin if session.fetch(:selected_idp_was_recommended) '(recommended)' else '(not recommended)' end rescue KeyError '(idp recommendation key not set)' end end def decorated_idp @decorated_idp ||= IDENTITY_PROVIDER_DISPLAY_DECORATOR.decorate(selected_identity_provider) end def other_ways_description @other_ways_description = current_transaction.other_ways_description end def user_has_no_docs_or_foreign_id_only? user_has_no_docs? || user_has_foreign_doc_only? end def user_has_no_docs? selected_answer_store.selected_evidence_for('documents').empty? end def user_has_foreign_doc_only? selected_answer_store.selected_evidence_for('documents') == [:non_uk_id_document] end def idp_is_providing_registrations?(idp) current_identity_providers_for_loa.any? { |check_idp| check_idp.simple_id == idp.simple_id } end end
29.306122
150
0.764624
7a30d0b1a3a39a05675cfc01664419b42ca76e36
614
require 'spec_helper_acceptance' # Ensure NIS CLient is not installed - Section 2.3.1 describe package('ypbind') do it { should_not be_installed } end # Ensure rsh Client is not installed - Section 2.3.2 describe package('rsh') do it { should_not be_installed} end # Ensure talk client is not installed - Section 2.3.3 describe package('talk') do it { should_not be_installed } end # Ensure telnet client is not installed - Section 2.3.4 describe package('telnet') do it { should_not be_installed } end # Ensure LDAP client is not installed - Section 2.3.5 describe package('openldap-clients')
25.583333
55
0.739414
218d4e34d98b6c68888eb2a556fa0a007840507b
1,270
require 'rails_helper' describe "New author page", type: :feature do let(:first_name) { 'Alan' } let(:last_name) { 'Turing' } let(:homepage) { 'http://wikipedia.de/Alan_Turing' } before do Author.create(first_name: first_name, last_name: last_name, homepage: homepage) visit authors_path end it "should display details on existing authors" do expect(page).to have_text(first_name) expect(page).to have_text(last_name) expect(page).to have_text(homepage) expect(page).to have_css('table') expect(page).to have_css('th', text: 'Name') expect(page).to have_css('th', text: 'Homepage') end it "should link to the individual author details page" do @author = FactoryBot.create :author expect(page).to have_link @author.name end it "should link to the New Author page" do expect(page).to have_link 'Create Author', href: new_author_path end it "should link to the individual author edit page" do @author = FactoryBot.create :author expect(page).to have_link "Edit" end it "should delete author edit page" do click_on 'Delete' expect(page).to have_no_text(first_name) expect(page).to have_no_text(last_name) expect(page).to have_no_text(homepage) end end
28.863636
83
0.69685
4affb07d06c4e4add296a38a57694a760db50102
37
module Sober VERSION = '0.0.2' end
9.25
19
0.648649
38090c0f3ea0bb610f96bb406052418115280555
1,615
# frozen_string_literal: true require 'view/actionable' require 'view/company' module View class BuyCompanies < Snabberb::Component include Actionable needs :selected_company, default: nil, store: true def render @corporation = @game.current_entity h(:div, 'Buy Private Companies', [ *render_companies, ].compact) end def render_companies props = { style: { display: 'inline-block', 'vertical-align': 'top', }, } companies = @game.purchasable_companies.sort_by do |company| [company.owner == @corporation.owner ? 0 : 1, company.value] end companies.map do |company| children = [h(Company, company: company)] children << render_input if @selected_company == company h(:div, props, children) end end def render_input input = h(:input, style: { 'margin-right': '1rem' }, props: { value: @selected_company.max_price, type: 'number', min: @selected_company.min_price, max: @selected_company.max_price, size: @corporation.cash.to_s.size, }) buy = lambda do price = input.JS['elm'].JS['value'].to_i process_action(Engine::Action::BuyCompany.new(@corporation, @selected_company, price)) store(:selected_company, nil, skip: true) end props = { style: { 'text-align': 'center', 'margin': '1rem', }, } h(:div, props, [ input, h(:button, { on: { click: buy } }, 'Buy'), ]) end end end
23.75
94
0.572136
212b49dc4d65e4258d9a68c2aead27e1742f4be1
6,904
# frozen_string_literal: true require 'rails_helper' RSpec.describe ContentMetadataGenerator do subject(:generate) do described_class.generate(druid: 'druid:bc123de5678', object: model) end let(:model) do Cocina::Models.build_request(JSON.parse(data)) end let(:druid) { 'druid:bc123de5678' } let(:file1) do { 'version' => 1, 'type' => 'http://cocina.sul.stanford.edu/models/file.jsonld', 'filename' => '00001.html', 'label' => '00001.html', 'hasMimeType' => 'text/html', 'use' => 'transcription', 'size' => 997, 'administrative' => { 'sdrPreserve' => true, 'shelve' => false }, 'access' => { 'access' => 'dark' }, 'hasMessageDigests' => [ { 'type' => 'sha1', 'digest' => 'cb19c405f8242d1f9a0a6180122dfb69e1d6e4c7' }, { 'type' => 'md5', 'digest' => 'e6d52da47a5ade91ae31227b978fb023' } ] } end let(:file2) do { 'version' => 1, 'type' => 'http://cocina.sul.stanford.edu/models/file.jsonld', 'filename' => '00001.jp2', 'label' => '00001.jp2', 'hasMimeType' => 'image/jp2', 'size' => 149570, 'administrative' => { 'sdrPreserve' => true, 'shelve' => true }, 'access' => { 'access' => 'stanford' }, 'hasMessageDigests' => [] } end let(:file3) do { 'version' => 1, 'type' => 'http://cocina.sul.stanford.edu/models/file.jsonld', 'filename' => '00002.html', 'label' => '00002.html', 'hasMimeType' => 'text/html', 'size' => 1914, 'administrative' => { 'sdrPreserve' => true, 'shelve' => false }, 'access' => { 'access' => 'world' }, 'hasMessageDigests' => [] } end let(:file4) do { 'version' => 1, 'type' => 'http://cocina.sul.stanford.edu/models/file.jsonld', 'filename' => '00002.jp2', 'label' => '00002.jp2', 'hasMimeType' => 'image/jp2', 'size' => 111467, 'administrative' => { 'sdrPreserve' => true, 'shelve' => true }, 'access' => { 'access' => 'world' }, 'hasMessageDigests' => [] } end let(:file5) do { 'version' => 1, 'type' => 'http://cocina.sul.stanford.edu/models/file.jsonld', 'filename' => 'checksum.txt', 'label' => 'checksum.txt', 'hasMimeType' => 'text/plain', 'size' => 11468, 'administrative' => { 'sdrPreserve' => true, 'shelve' => true }, 'access' => { 'access' => 'world' }, 'hasMessageDigests' => [] } end let(:filesets) do [ { 'version' => 1, 'type' => 'http://cocina.sul.stanford.edu/models/fileset.jsonld', 'label' => 'Page 1', 'structural' => { 'contains' => [file1, file2] } }, { 'version' => 1, 'type' => 'http://cocina.sul.stanford.edu/models/fileset.jsonld', 'label' => 'Page 2', 'structural' => { 'contains' => [file3, file4] } } ] end let(:data) do <<~JSON { "type":"#{object_type}", "label":"The object label","version":1,"access":{}, "administrative":{"releaseTags":[],"hasAdminPolicy":"druid:dd999df4567"}, "description":{"title":[{"status":"primary","value":"the object title"}]}, "identification":{},"structural":{"contains":#{filesets.to_json}}} JSON end context 'with a book' do let(:object_type) { Cocina::Models::Vocab.book } let(:filesets) do [ { 'version' => 1, 'type' => 'http://cocina.sul.stanford.edu/models/fileset.jsonld', 'label' => 'Page 1', 'structural' => { 'contains' => [file1, file2] } }, { 'version' => 1, 'type' => 'http://cocina.sul.stanford.edu/models/fileset.jsonld', 'label' => 'Page 2', 'structural' => { 'contains' => [file3, file4] } }, { 'version' => 1, 'type' => 'http://cocina.sul.stanford.edu/models/fileset.jsonld', 'label' => 'Object 1', 'structural' => { 'contains' => [file5] } } ] end it 'generates contentMetadata.xml' do expect(generate).to be_equivalent_to '<?xml version="1.0"?> <contentMetadata objectId="druid:bc123de5678" type="book"> <resource id="bc123de5678_1" sequence="1" type="page"> <label>Page 1</label> <file id="00001.html" mimetype="text/html" size="997" preserve="yes" publish="no" shelve="no" role="transcription"> <checksum type="sha1">cb19c405f8242d1f9a0a6180122dfb69e1d6e4c7</checksum> <checksum type="md5">e6d52da47a5ade91ae31227b978fb023</checksum> </file> <file id="00001.jp2" mimetype="image/jp2" size="149570" preserve="yes" publish="yes" shelve="yes"/> </resource> <resource id="bc123de5678_2" sequence="2" type="page"> <label>Page 2</label> <file id="00002.html" mimetype="text/html" size="1914" preserve="yes" publish="yes" shelve="no"/> <file id="00002.jp2" mimetype="image/jp2" size="111467" preserve="yes" publish="yes" shelve="yes"/> </resource> <resource id="bc123de5678_3" sequence="3" type="object"> <label>Object 1</label> <file id="checksum.txt" mimetype="text/plain" size="11468" preserve="yes" publish="yes" shelve="yes"/> </resource> </contentMetadata>' end end context 'with an image' do let(:object_type) { Cocina::Models::Vocab.image } it 'generates contentMetadata.xml' do expect(generate).to be_equivalent_to '<?xml version="1.0"?> <contentMetadata objectId="druid:bc123de5678" type="image"> <resource id="bc123de5678_1" sequence="1" type="image"> <label>Page 1</label> <file id="00001.html" mimetype="text/html" size="997" preserve="yes" publish="no" shelve="no" role="transcription"> <checksum type="sha1">cb19c405f8242d1f9a0a6180122dfb69e1d6e4c7</checksum> <checksum type="md5">e6d52da47a5ade91ae31227b978fb023</checksum> </file> <file id="00001.jp2" mimetype="image/jp2" size="149570" preserve="yes" publish="yes" shelve="yes"/> </resource> <resource id="bc123de5678_2" sequence="2" type="image"> <label>Page 2</label> <file id="00002.html" mimetype="text/html" size="1914" preserve="yes" publish="yes" shelve="no"/> <file id="00002.jp2" mimetype="image/jp2" size="111467" preserve="yes" publish="yes" shelve="yes"/> </resource> </contentMetadata>' end end end
30.959641
128
0.531576
0125f05489c7fe6e3a4df723ffe21b6b4a0934dc
350
cask "wondershare-filmora" do version "9.5.1.13" sha256 "c61779db5cf1f4b8f5f102f1c77139aeeabeced08979ed51e607c1b1639b80fc" url "http://download.wondershare.com/filmora#{version.major}-mac_full718.dmg" name "Wondershare Filmora9" homepage "https://filmora.wondershare.com/video-editor/" app "Wondershare Filmora#{version.major}.app" end
31.818182
79
0.785714
0306f857c36e5ca18678f6a8c52e2f0fe36ca2e5
15,852
# typed: false # frozen_string_literal: true module Homebrew module Diagnostic class Volumes def initialize @volumes = get_mounts end def which(path) vols = get_mounts path # no volume found return -1 if vols.empty? vol_index = @volumes.index(vols[0]) # volume not found in volume list return -1 if vol_index.nil? vol_index end def get_mounts(path = nil) vols = [] # get the volume of path, if path is nil returns all volumes args = %w[/bin/df -P] args << path if path Utils.popen_read(*args) do |io| io.each_line do |line| case line.chomp # regex matches: /dev/disk0s2 489562928 440803616 48247312 91% / when /^.+\s+[0-9]+\s+[0-9]+\s+[0-9]+\s+[0-9]{1,3}%\s+(.+)/ vols << Regexp.last_match(1) end end end vols end end class Checks undef fatal_preinstall_checks, fatal_build_from_source_checks, fatal_setup_build_environment_checks, supported_configuration_checks, build_from_source_checks def fatal_preinstall_checks checks = %w[ check_access_directories ] # We need the developer tools for `codesign`. checks << "check_for_installed_developer_tools" if Hardware::CPU.arm? checks.freeze end def fatal_build_from_source_checks %w[ check_xcode_license_approved check_xcode_minimum_version check_clt_minimum_version check_if_xcode_needs_clt_installed check_if_supported_sdk_available check_broken_sdks ].freeze end def fatal_setup_build_environment_checks %w[ check_xcode_minimum_version check_clt_minimum_version check_if_supported_sdk_available ].freeze end def supported_configuration_checks %w[ check_for_unsupported_macos ].freeze end def build_from_source_checks %w[ check_for_installed_developer_tools check_xcode_up_to_date check_clt_up_to_date ].freeze end def check_for_non_prefixed_findutils findutils = Formula["findutils"] return unless findutils.any_version_installed? gnubin = %W[#{findutils.opt_libexec}/gnubin #{findutils.libexec}/gnubin] default_names = Tab.for_name("findutils").with? "default-names" return if !default_names && (paths & gnubin).empty? <<~EOS Putting non-prefixed findutils in your path can cause python builds to fail. EOS rescue FormulaUnavailableError nil end def check_for_unsupported_macos return if Homebrew::EnvConfig.developer? return if ENV["HOMEBREW_INTEGRATION_TEST"] who = +"We" what = if OS::Mac.version.prerelease? "pre-release version" elsif OS::Mac.version.outdated_release? who << " (and Apple)" "old version" end return if what.blank? who.freeze <<~EOS You are using macOS #{MacOS.version}. #{who} do not provide support for this #{what}. #{please_create_pull_requests(what)} EOS end def check_xcode_up_to_date return unless MacOS::Xcode.outdated? # CI images are going to end up outdated so don't complain when # `brew test-bot` runs `brew doctor` in the CI for the Homebrew/brew # repository. This only needs to support whatever CI providers # Homebrew/brew is currently using. return if ENV["GITHUB_ACTIONS"] message = <<~EOS Your Xcode (#{MacOS::Xcode.version}) is outdated. Please update to Xcode #{MacOS::Xcode.latest_version} (or delete it). #{MacOS::Xcode.update_instructions} EOS if OS::Mac.version.prerelease? current_path = Utils.popen_read("/usr/bin/xcode-select", "-p") message += <<~EOS If #{MacOS::Xcode.latest_version} is installed, you may need to: sudo xcode-select --switch /Applications/Xcode.app Current developer directory is: #{current_path} EOS end message end def check_clt_up_to_date return unless MacOS::CLT.outdated? # CI images are going to end up outdated so don't complain when # `brew test-bot` runs `brew doctor` in the CI for the Homebrew/brew # repository. This only needs to support whatever CI providers # Homebrew/brew is currently using. return if ENV["GITHUB_ACTIONS"] <<~EOS A newer Command Line Tools release is available. #{MacOS::CLT.update_instructions} EOS end def check_xcode_minimum_version return unless MacOS::Xcode.below_minimum_version? xcode = MacOS::Xcode.version.to_s xcode += " => #{MacOS::Xcode.prefix}" unless MacOS::Xcode.default_prefix? <<~EOS Your Xcode (#{xcode}) is too outdated. Please update to Xcode #{MacOS::Xcode.latest_version} (or delete it). #{MacOS::Xcode.update_instructions} EOS end def check_clt_minimum_version return unless MacOS::CLT.below_minimum_version? <<~EOS Your Command Line Tools are too outdated. #{MacOS::CLT.update_instructions} EOS end def check_if_xcode_needs_clt_installed return unless MacOS::Xcode.needs_clt_installed? <<~EOS Xcode alone is not sufficient on #{MacOS.version.pretty_name}. #{DevelopmentTools.installation_instructions} EOS end def check_ruby_version return if RUBY_VERSION == HOMEBREW_REQUIRED_RUBY_VERSION return if Homebrew::EnvConfig.developer? && OS::Mac.version.prerelease? <<~EOS Ruby version #{RUBY_VERSION} is unsupported on macOS #{MacOS.version}. Homebrew is developed and tested on Ruby #{HOMEBREW_REQUIRED_RUBY_VERSION}, and may not work correctly on other Rubies. Patches are accepted as long as they don't cause breakage on supported Rubies. EOS end def check_xcode_prefix prefix = MacOS::Xcode.prefix return if prefix.nil? return unless prefix.to_s.include?(" ") <<~EOS Xcode is installed to a directory with a space in the name. This will cause some formulae to fail to build. EOS end def check_xcode_prefix_exists prefix = MacOS::Xcode.prefix return if prefix.nil? || prefix.exist? <<~EOS The directory Xcode is reportedly installed to doesn't exist: #{prefix} You may need to `xcode-select` the proper path if you have moved Xcode. EOS end def check_xcode_select_path return if MacOS::CLT.installed? return unless MacOS::Xcode.installed? return if File.file?("#{MacOS.active_developer_dir}/usr/bin/xcodebuild") path = MacOS::Xcode.bundle_path path = "/Developer" if path.nil? || !path.directory? <<~EOS Your Xcode is configured with an invalid path. You should change it to the correct path: sudo xcode-select --switch #{path} EOS end def check_xcode_license_approved # If the user installs Xcode-only, they have to approve the # license or no "xc*" tool will work. return unless `/usr/bin/xcrun clang 2>&1`.include?("license") return if $CHILD_STATUS.success? <<~EOS You have not agreed to the Xcode license. Agree to the license by opening Xcode.app or running: sudo xcodebuild -license EOS end def check_filesystem_case_sensitive dirs_to_check = [ HOMEBREW_PREFIX, HOMEBREW_REPOSITORY, HOMEBREW_CELLAR, HOMEBREW_TEMP, ] case_sensitive_dirs = dirs_to_check.select do |dir| # We select the dir as being case-sensitive if either the UPCASED or the # downcased variant is missing. # Of course, on a case-insensitive fs, both exist because the os reports so. # In the rare situation when the user has indeed a downcased and an upcased # dir (e.g. /TMP and /tmp) this check falsely thinks it is case-insensitive # but we don't care because: 1. there is more than one dir checked, 2. the # check is not vital and 3. we would have to touch files otherwise. upcased = Pathname.new(dir.to_s.upcase) downcased = Pathname.new(dir.to_s.downcase) dir.exist? && !(upcased.exist? && downcased.exist?) end return if case_sensitive_dirs.empty? volumes = Volumes.new case_sensitive_vols = case_sensitive_dirs.map do |case_sensitive_dir| volumes.get_mounts(case_sensitive_dir) end case_sensitive_vols.uniq! <<~EOS The filesystem on #{case_sensitive_vols.join(",")} appears to be case-sensitive. The default macOS filesystem is case-insensitive. Please report any apparent problems. EOS end def check_for_gettext find_relative_paths("lib/libgettextlib.dylib", "lib/libintl.dylib", "include/libintl.h") return if @found.empty? # Our gettext formula will be caught by check_linked_keg_only_brews gettext = begin Formulary.factory("gettext") rescue nil end if gettext&.linked_keg&.directory? allowlist = ["#{HOMEBREW_CELLAR}/gettext"] if Hardware::CPU.physical_cpu_arm64? allowlist += %W[ #{HOMEBREW_MACOS_ARM_DEFAULT_PREFIX}/Cellar/gettext #{HOMEBREW_DEFAULT_PREFIX}/Cellar/gettext ] end return if @found.all? do |path| realpath = Pathname.new(path).realpath.to_s allowlist.any? { |rack| realpath.start_with?(rack) } end end inject_file_list @found, <<~EOS gettext files detected at a system prefix. These files can cause compilation and link failures, especially if they are compiled with improper architectures. Consider removing these files: EOS end def check_for_iconv find_relative_paths("lib/libiconv.dylib", "include/iconv.h") return if @found.empty? libiconv = begin Formulary.factory("libiconv") rescue nil end if libiconv&.linked_keg&.directory? unless libiconv.keg_only? <<~EOS A libiconv formula is installed and linked. This will break stuff. For serious. Unlink it. EOS end else inject_file_list @found, <<~EOS libiconv files detected at a system prefix other than /usr. Homebrew doesn't provide a libiconv formula, and expects to link against the system version in /usr. libiconv in other prefixes can cause compile or link failure, especially if compiled with improper architectures. macOS itself never installs anything to /usr/local so it was either installed by a user or some other third party software. tl;dr: delete these files: EOS end end def check_for_bitdefender if !Pathname("/Library/Bitdefender/AVP/EndpointSecurityforMac.app").exist? && !Pathname("/Library/Bitdefender/AVP/BDLDaemon").exist? return end <<~EOS You have installed Bitdefender. The "Traffic Scan" option interferes with Homebrew's ability to download packages. See: #{Formatter.url("https://github.com/Homebrew/brew/issues/5558")} EOS end def check_for_multiple_volumes return unless HOMEBREW_CELLAR.exist? volumes = Volumes.new # Find the volumes for the TMP folder & HOMEBREW_CELLAR real_cellar = HOMEBREW_CELLAR.realpath where_cellar = volumes.which real_cellar begin tmp = Pathname.new(Dir.mktmpdir("doctor", HOMEBREW_TEMP)) begin real_tmp = tmp.realpath.parent where_tmp = volumes.which real_tmp ensure Dir.delete tmp end rescue return end return if where_cellar == where_tmp <<~EOS Your Cellar and TEMP directories are on different volumes. macOS won't move relative symlinks across volumes unless the target file already exists. Brews known to be affected by this are Git and Narwhal. You should set the "HOMEBREW_TEMP" environment variable to a suitable directory on the same volume as your Cellar. EOS end def check_deprecated_caskroom_taps tapped_caskroom_taps = Tap.select { |t| t.user == "caskroom" || t.name == "phinze/cask" } .map(&:name) return if tapped_caskroom_taps.empty? <<~EOS You have the following deprecated, cask taps tapped: #{tapped_caskroom_taps.join("\n ")} Untap them with `brew untap`. EOS end def check_if_supported_sdk_available return unless DevelopmentTools.installed? return unless MacOS.sdk_root_needed? return if MacOS.sdk locator = MacOS.sdk_locator source = if locator.source == :clt return if MacOS::CLT.below_minimum_version? # Handled by other diagnostics. update_instructions = MacOS::CLT.update_instructions "Command Line Tools (CLT)" else return if MacOS::Xcode.below_minimum_version? # Handled by other diagnostics. update_instructions = MacOS::Xcode.update_instructions "Xcode" end <<~EOS Your #{source} does not support macOS #{MacOS.version}. It is either outdated or was modified. Please update your #{source} or delete it if no updates are available. #{update_instructions} EOS end # The CLT 10.x -> 11.x upgrade process on 10.14 contained a bug which broke the SDKs. # Notably, MacOSX10.14.sdk would indirectly symlink to MacOSX10.15.sdk. # This diagnostic was introduced to check for this and recommend a full reinstall. def check_broken_sdks locator = MacOS.sdk_locator return if locator.all_sdks.all? do |sdk| path_version = sdk.path.basename.to_s[MacOS::SDK::VERSIONED_SDK_REGEX, 1] next true if path_version.blank? sdk.version == MacOS::Version.new(path_version).strip_patch end if locator.source == :clt source = "Command Line Tools (CLT)" path_to_remove = MacOS::CLT::PKG_PATH installation_instructions = MacOS::CLT.installation_instructions else source = "Xcode" path_to_remove = MacOS::Xcode.bundle_path installation_instructions = MacOS::Xcode.installation_instructions end <<~EOS The contents of the SDKs in your #{source} installation do not match the SDK folder names. A clean reinstall of #{source} should fix this. Remove the broken installation before reinstalling: sudo rm -rf #{path_to_remove} #{installation_instructions} EOS end end end end
32.819876
103
0.614118
08a7f3dc06107d77640678783e9da4d92e77b53b
102
#!/usr/bin/env ruby -Ks require "mkmf" $CFLAGS << ' -W -Wall' create_makefile("bitpack/bitpack")
11.333333
34
0.647059
1c921e7d5acbc4289694a39582b3d5cafea01593
158
require 'rails_helper' RSpec.describe User, :type => :model do let(:user) { Factory.build(:user) } subject(:user) { user } it { should be_valid } end
17.555556
39
0.658228
2648543dbc037916af070167201857a3e1ba509e
2,789
#!/usr/bin/env ruby # Encoding: utf-8 # # Copyright:: Copyright 2011, Google Inc. All Rights Reserved. # # License:: Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. # # This example removes an ad using the 'REMOVE' operator. To get ads, run # get_text_ads.rb. require 'adwords_api' def remove_ad(ad_group_id, ad_id) # AdwordsApi::Api will read a config file from ENV['HOME']/adwords_api.yml # when called without parameters. adwords = AdwordsApi::Api.new # To enable logging of SOAP requests, set the log_level value to 'DEBUG' in # the configuration file or provide your own logger: # adwords.logger = Logger.new('adwords_xml.log') ad_group_ad_srv = adwords.service(:AdGroupAdService, API_VERSION) # Prepare for deleting ad. operation = { :operator => 'REMOVE', :operand => { :ad_group_id => ad_group_id, :ad => { :xsi_type => 'Ad', :id => ad_id } } } # Remove ad. response = ad_group_ad_srv.mutate([operation]) if response and response[:value] ad = response[:value].first puts "Ad ID %d was successfully removed." % ad[:ad][:id] else puts 'No ads were removed.' end end if __FILE__ == $0 API_VERSION = :v201607 begin # IDs of an ad to remove and its ad group. ad_group_id = 'INSERT_AD_GROUP_ID_HERE'.to_i ad_id = 'INSERT_AD_ID_HERE'.to_i remove_ad(ad_group_id, ad_id) # Authorization error. rescue AdsCommon::Errors::OAuth2VerificationRequired => e puts "Authorization credentials are not valid. Edit adwords_api.yml for " + "OAuth2 client ID and secret and run misc/setup_oauth2.rb example " + "to retrieve and store OAuth2 tokens." puts "See this wiki page for more details:\n\n " + 'https://github.com/googleads/google-api-ads-ruby/wiki/OAuth2' # HTTP errors. rescue AdsCommon::Errors::HttpError => e puts "HTTP Error: %s" % e # API errors. rescue AdwordsApi::Errors::ApiException => e puts "Message: %s" % e.message puts 'Errors:' e.errors.each_with_index do |error, index| puts "\tError [%d]:" % (index + 1) error.each do |field, value| puts "\t\t%s: %s" % [field, value] end end end end
30.988889
79
0.659376
1dfc586f94461c9de50195ab97ace2b412aa9a11
150
class Experience < ApplicationRecord validates :title, presence: true validates :name, presence: true validates :start_date, presence: true end
25
39
0.78
796d5b58094806df470b7ff66749dd10bfd21fe8
2,032
class ChangeEnumsToIntegers < ActiveRecord::Migration[5.0] class ShipmentModel < ApplicationRecord self.table_name = "shipments" SHIPPING_CARRIER_MAP = { "fedex" => 0, "usps" => 1, "ups" => 2 } REVERSE_SHIPPING_CARRIER_MAP = SHIPPING_CARRIER_MAP.invert def new_shipping_carrier_value SHIPPING_CARRIER_MAP[old_shipping_carrier] end def old_shipping_carrier_value REVERSE_SHIPPING_CARRIER_MAP[old_shipping_carrier] end end class OrderModel < ApplicationRecord self.table_name = "orders" STATUS_MAP = { "pending" => 0, "approved" => 1, "rejected" => 2, "filled" => 3, "shipped" => 4, "received" => 5, "closed" => 6 } REVERSE_STATUS_MAP = STATUS_MAP.invert def new_status_value STATUS_MAP[old_status] end def old_status_value REVERSE_STATUS_MAP[old_status] end end def change reversible do |dir| rename_column :shipments, :shipping_carrier, :old_shipping_carrier rename_column :orders, :status, :old_status dir.up do add_column :shipments, :shipping_carrier, :integer add_column :orders, :status, :integer end dir.down do add_column :shipments, :shipping_carrier, :string add_column :orders, :status, :string end ShipmentModel.reset_column_information ShipmentModel.all.each do |shipment| dir.up { shipment.shipping_carrier = shipment.new_shipping_carrier_value } dir.down { shipment.shipping_carrier = shipment.old_shipping_carrier_value } shipment.save! end OrderModel.reset_column_information OrderModel.all.each do |order| dir.up { order.status = order.new_status_value } dir.down { order.status = order.old_status_value } order.save! end change_column_null :orders, :status, false remove_column :shipments, :old_shipping_carrier remove_column :orders, :old_status end end end
24.481928
84
0.663878
01bbc385fb67724eb8cbc1a625bc5c6ead6303d0
2,908
require 'spec_helper' describe 'Keyword Hint Enrichment' do before do @item = Consummo::FeedItem.new(title: "The quick brown fox jumps over the lazy dog") end context 'Nil text for Enrichment' do it 'should return a nil value' do sut = Consummo::KeywordHintEnricher.new(keywords: %w(Apple Banana Pear)) result = sut.enrich(Consummo::FeedItem.new(title: nil), []) expect(result[:hinted_title]).to be_nil end end context 'Empty text for Enrichment' do it 'should return a nil value' do sut = Consummo::KeywordHintEnricher.new(keywords: %w(Apple Banana Pear)) result = sut.enrich(Consummo::FeedItem.new(title: ""), []) expect(result[:hinted_title]).to be_nil end end context 'Default Keywords' do it 'should accept a list of default keywords' do sut = Consummo::KeywordHintEnricher.new(keywords: %w(Apple Banana Pear)) end it 'should use the default keywords for enrichment' do sut = Consummo::KeywordHintEnricher.new(keywords: ["quick"]) result = sut.enrich(@item, []) expect(result[:hinted_title]).to eq "The <strong>quick</strong> brown fox jumps over the lazy dog" end end context 'when no keywords are present' do it 'should return the original text' do keywords = ["foooooobaarrrrr"] sut = Consummo::KeywordHintEnricher.new result = sut.enrich(@item, keywords) expect(result[:hinted_title]).to eq @item.title end end context 'when one single word keyword' do it 'should modify strings in the text that match the keyword' do keywords = ["quick"] sut = Consummo::KeywordHintEnricher.new result = sut.enrich(@item, keywords) expect(result[:hinted_title]).to eq "The <strong>quick</strong> brown fox jumps over the lazy dog" end end context 'when one multi-word keyword' do it 'should modify strings in the text that match the keyword' do keywords = ["quick brown"] sut = Consummo::KeywordHintEnricher.new result = sut.enrich(@item, keywords) expect(result[:hinted_title]).to eq "The <strong>quick brown</strong> fox jumps over the lazy dog" end end context 'when two keywords multi-word' do it 'should modify strings in the text that match the keyword' do keywords = ["quick brown", "over the"] sut = Consummo::KeywordHintEnricher.new result = sut.enrich(@item, keywords) expect(result[:hinted_title]).to eq "The <strong>quick brown</strong> fox jumps <strong>over the</strong> lazy dog" end end context 'when cases are mismatched' do it 'should modify strings in the text that match the keyword' do keywords = ["Quick Brown"] sut = Consummo::KeywordHintEnricher.new result = sut.enrich(@item, keywords) expect(result[:hinted_title]).to eq "The <strong>quick brown</strong> fox jumps over the lazy dog" end end end
40.388889
121
0.685007
ab8be996e5f05fbd8e79e1d8dbd64fd49012f5d5
2,986
# encoding: utf-8 # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. module Azure::CognitiveServices::EntitySearch::V1_0 module Models # # Defines a contractual rule for link attribution. # class ContractualRulesLinkAttribution < ContractualRulesAttribution include MsRestAzure def initialize @_type = "ContractualRules/LinkAttribution" end attr_accessor :_type # @return [String] The attribution text. attr_accessor :text # @return [String] The URL to the provider's website. Use text and URL to # create the hyperlink. attr_accessor :url # @return [Boolean] Indicates whether this provider's attribution is # optional. attr_accessor :optional_for_list_display # # Mapper for ContractualRulesLinkAttribution class as Ruby Hash. # This will be used for serialization/deserialization. # def self.mapper() { client_side_validation: true, required: false, serialized_name: 'ContractualRules/LinkAttribution', type: { name: 'Composite', class_name: 'ContractualRulesLinkAttribution', model_properties: { target_property_name: { client_side_validation: true, required: false, read_only: true, serialized_name: 'targetPropertyName', type: { name: 'String' } }, _type: { client_side_validation: true, required: true, serialized_name: '_type', type: { name: 'String' } }, must_be_close_to_content: { client_side_validation: true, required: false, read_only: true, serialized_name: 'mustBeCloseToContent', type: { name: 'Boolean' } }, text: { client_side_validation: true, required: true, serialized_name: 'text', type: { name: 'String' } }, url: { client_side_validation: true, required: true, serialized_name: 'url', type: { name: 'String' } }, optional_for_list_display: { client_side_validation: true, required: false, read_only: true, serialized_name: 'optionalForListDisplay', type: { name: 'Boolean' } } } } } end end end end
28.438095
79
0.499665
4a099ba2f124af8282595c1af0463e56616467e6
441
ENV['RAILS_ENV'] ||= 'test' require_relative '../config/environment' require 'rails/test_help' require 'minitest/reporters' MiniTest::Reporters.use! class ActiveSupport::TestCase # Run tests in parallel with specified workers parallelize(workers: :number_of_processors) # Setup all fixtures in test/fixtures/*.yml for all tests in alphabetical order. fixtures :all # Add more helper methods to be used by all tests here... end
25.941176
82
0.759637
6a7fee1cd6a5c553c2afda6b8a873c709c585dbb
134
require 'test_helper' class ConversationTest < ActionDispatch::IntegrationTest # test "the truth" do # assert true # end end
16.75
56
0.731343
ed5f0a01cf9b2978484ef8c20f0ff2944127468a
3,178
require 'spec_helper' describe Overcommit::ConfigurationValidator do let(:output) { StringIO.new } let(:logger) { Overcommit::Logger.new(output) } let(:options) { { logger: logger } } let(:config) { Overcommit::Configuration.new(config_hash, validate: false) } subject { described_class.new.validate(config, config_hash, options) } context 'when hook has an invalid name' do let(:config_hash) do { 'PreCommit' => { 'My_Hook' => { 'enabled' => false, }, }, } end it 'raises an error' do expect { subject }.to raise_error Overcommit::Exceptions::ConfigurationError end end context 'when hook has `env` set' do let(:config_hash) do { 'PreCommit' => { 'MyHook' => { 'enabled' => true, 'env' => env, }, }, } end context 'and it is a single string' do let(:env) { 'OVERCOMMIT_ENV_VAR=1' } it 'raises an error and mentions `env` must be a hash' do expect { subject }.to raise_error Overcommit::Exceptions::ConfigurationError output.string.should =~ /must be a hash/i end end context 'and it is a hash with string values' do let(:env) { { 'OVERCOMMIT_ENV_VAR' => '1', 'OVERCOMMIT_ENV_VAR_2' => '2' } } it 'is valid' do expect { subject }.not_to raise_error end end context 'and it is a hash with integer values' do let(:env) { { 'OVERCOMMIT_ENV_VAR' => 1, 'OVERCOMMIT_ENV_VAR_2' => 2 } } it 'raises an error' do expect { subject }.to raise_error Overcommit::Exceptions::ConfigurationError output.string.should =~ /`OVERCOMMIT_ENV_VAR`.*must be a string/i output.string.should =~ /`OVERCOMMIT_ENV_VAR_2`.*must be a string/i end end context 'and it is a hash with boolean values' do let(:env) { { 'OVERCOMMIT_ENV_VAR' => true, 'OVERCOMMIT_ENV_VAR_2' => false } } it 'raises an error' do expect { subject }.to raise_error Overcommit::Exceptions::ConfigurationError output.string.should =~ /`OVERCOMMIT_ENV_VAR`.*must be a string/i output.string.should =~ /`OVERCOMMIT_ENV_VAR_2`.*must be a string/i end end end context 'when hook has `processors` set' do let(:concurrency) { 4 } let(:config_hash) do { 'concurrency' => concurrency, 'PreCommit' => { 'MyHook' => { 'enabled' => true, 'processors' => processors, }, }, } end context 'and it is larger than `concurrency`' do let(:processors) { concurrency + 1 } it 'raises an error' do expect { subject }.to raise_error Overcommit::Exceptions::ConfigurationError end end context 'and it is equal to `concurrency`' do let(:processors) { concurrency } it 'is valid' do expect { subject }.not_to raise_error end end context 'and it is less than `concurrency`' do let(:processors) { concurrency - 1 } it 'is valid' do expect { subject }.not_to raise_error end end end end
27.162393
85
0.591567
28b5a0316c5e82bb2eda5d51c5e97267f77ba6bf
1,398
# frozen_string_literal: true # WARNING ABOUT GENERATED CODE # # This file is generated. See the contributing guide for more information: # https://github.com/aws/aws-sdk-ruby/blob/master/CONTRIBUTING.md # # WARNING ABOUT GENERATED CODE require 'aws-sdk-core' require 'aws-sigv4' require_relative 'aws-sdk-mediatailor/types' require_relative 'aws-sdk-mediatailor/client_api' require_relative 'aws-sdk-mediatailor/client' require_relative 'aws-sdk-mediatailor/errors' require_relative 'aws-sdk-mediatailor/resource' require_relative 'aws-sdk-mediatailor/customizations' # This module provides support for AWS MediaTailor. This module is available in the # `aws-sdk-mediatailor` gem. # # # Client # # The {Client} class provides one method for each API operation. Operation # methods each accept a hash of request parameters and return a response # structure. # # media_tailor = Aws::MediaTailor::Client.new # resp = media_tailor.delete_playback_configuration(params) # # See {Client} for more information. # # # Errors # # Errors returned from AWS MediaTailor are defined in the # {Errors} module and all extend {Errors::ServiceError}. # # begin # # do stuff # rescue Aws::MediaTailor::Errors::ServiceError # # rescues all AWS MediaTailor API errors # end # # See {Errors} for more information. # # @service module Aws::MediaTailor GEM_VERSION = '1.29.0' end
26.377358
83
0.752504
61a6cc005fd2cb48e3f1b1e072859a0bac151195
463
class PagesController < ApplicationController skip_before_action :authenticate_user! before_action :deny_spammers!, only: [:show] def show template = "pages/#{params[:page]}" if template_exists? template render layout: true, template: template else raise ActionController::RoutingError.new("Page not found: #{template}") end end def error_404 render layout: false, file: 'public/404.html', status: :not_found end end
25.722222
77
0.712743
6189d7eefa5c27f4da95af9b4cfd3f3a7843cf6e
3,218
# frozen_string_literal: true # db_title = 'Demo vragenlijst' # Dagboekvragenlijst moet geen titel hebben alleen een logo # # db_name1 = 'demo' # dagboek1 = Questionnaire.find_by(name: db_name1) # dagboek1 ||= Questionnaire.new(name: db_name1) # dagboek1.key = File.basename(__FILE__)[0...-3] # dagboek_content = [ # { # type: :raw, # content: '<p class="flow-text">Hier staat een demo vragenlijst voor u klaar. Dit staat in een RAW tag</p>' # }, { # id: :v1, # 1 # type: :radio, # show_otherwise: false, # title: 'Voorbeeld van een radio', # options: [ # { title: 'Ja', shows_questions: %i[v2] }, # { title: 'Nee', shows_questions: %i[v2] } # ] # }, { # id: :v2, # hidden: true, # type: :range, # title: 'Voorbeeld met een range', # labels: ['heel weinig', 'heel veel'] # }, { # id: :v3, # type: :time, # hours_from: 0, # hours_to: 11, # hours_step: 1, # title: 'Voorbeeld van een time vraag', # section_start: 'Overige vragen' # }, { # id: :v4, # type: :date, # title: 'Voorbeeld van een date vraag', # labels: ['helemaal intuΓ―tief ', 'helemaal gepland'] # }, { # id: :v5, # type: :textarea, # placeholder: 'Hier staat standaard tekst', # title: 'Voorbeeld van een textarea' # }, { # id: :v6, # type: :textfield, # placeholder: 'Hier staat standaard tekst', # title: 'Voorbeeld van een textfield' # }, { # id: :v7, # type: :checkbox, # required: true, # title: 'Voorbeeld van een checkbox vraag', # options: [ # { title: 'Antwoord 1', tooltip: 'Tooltip 1' }, # { title: 'Antwoord 2', tooltip: 'Tooltip 2' }, # { title: 'Antwoord 3', tooltip: 'Tooltip 3' } # ] # }, { # id: :v8, # type: :likert, # title: 'Voorbeeld van een likertschaal', # tooltip: 'some tooltip', # options: ['helemaal oneens', 'oneens', 'neutraal', 'eens', 'helemaal eens'] # }, { # id: :v9, # type: :number, # title: 'Voorbeeld van een numeriek veld', # tooltip: 'some tooltip', # maxlength: 4, # placeholder: '1234', # min: 0, # max: 9999, # required: true # }, { # id: :v10, # type: :textfield, # placeholder: 'Hier staat standaard tekst', # title: 'Voorbeeld van een klein vrij textveld' # }, { # id: :v11, # title: 'Voorbeeld van een expandable', # remove_button_label: 'Verwijder', # add_button_label: 'Voeg toe', # type: :expandable, # default_expansions: 1, # max_expansions: 10, # content: [ # { # id: :v11_1, # type: :checkbox, # title: 'Met een checkbox vraag', # options: [ # 'Antwoord A', # 'Antwoord B', # 'Antwoord C', # 'Antwoord D', # 'Antwoord E', # 'Antwoord F' # ] # } # ] # }, { # id: :v12, # type: :dropdown, # title: 'Waar hadden de belangrijkste gebeurtenissen mee te maken?', # options: ['hobby/sport', 'werk', 'vriendschap', 'romantische relatie', 'thuis'] # } # ] # dagboek1.content = { questions: dagboek_content, scores: [] } # dagboek1.title = db_title # dagboek1.save!
27.982609
112
0.548167
1c48d886e8bc0595ace9ed547742f8a8743e64d6
331
# Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. module Azure::Relay::Mgmt::V2017_04_01 module Models # # Defines values for Relaytype # module Relaytype NetTcp = "NetTcp" Http = "Http" end end end
20.6875
70
0.679758
5dac411ceed40f5535205e1f943c19901b2e3add
2,307
# (c) Copyright 2006-2007 Nick Sieger <[email protected]> # See the file LICENSE.txt included with the distribution for # software license details. require File.dirname(__FILE__) + "/../../../spec_helper.rb" require 'rake' def save_env(v) ENV["PREV_#{v}"] = ENV[v] end def restore_env(v) ENV[v] = ENV["PREV_#{v}"] ENV.delete("PREV_#{v}") end describe "ci_reporter ci:setup:testunit task" do before(:each) do @rake = Rake::Application.new Rake.application = @rake load CI_REPORTER_LIB + '/ci/reporter/rake/test_unit.rb' save_env "CI_REPORTS" save_env "TESTOPTS" ENV["CI_REPORTS"] = "some-bogus-nonexistent-directory-that-wont-fail-rm_rf" end after(:each) do restore_env "TESTOPTS" restore_env "CI_REPORTS" Rake.application = nil end it "should set ENV['TESTOPTS'] to include test/unit setup file" do @rake["ci:setup:testunit"].invoke ENV["TESTOPTS"].should =~ /test_unit_loader/ end it "should append to ENV['TESTOPTS'] if it already contains a value" do ENV["TESTOPTS"] = "somevalue".freeze @rake["ci:setup:testunit"].invoke ENV["TESTOPTS"].should =~ /somevalue.*test_unit_loader/ end end describe "ci_reporter ci:setup:rspec task" do before(:each) do @rake = Rake::Application.new Rake.application = @rake load CI_REPORTER_LIB + '/ci/reporter/rake/rspec.rb' save_env "CI_REPORTS" save_env "SPEC_OPTS" ENV["CI_REPORTS"] = "some-bogus-nonexistent-directory-that-wont-fail-rm_rf" end after(:each) do restore_env "SPEC_OPTS" restore_env "CI_REPORTS" Rake.application = nil end it "should set ENV['SPEC_OPTS'] to include rspec formatter args" do @rake["ci:setup:rspec"].invoke ENV["SPEC_OPTS"].should =~ /--require.*rspec_loader.*--format.*CI::Reporter::RSpec/ end it "should set ENV['SPEC_OPTS'] to include rspec doc formatter if task is ci:setup:rspecdoc" do @rake["ci:setup:rspecdoc"].invoke ENV["SPEC_OPTS"].should =~ /--require.*rspec_loader.*--format.*CI::Reporter::RSpecDoc/ end it "should append to ENV['SPEC_OPTS'] if it already contains a value" do ENV["SPEC_OPTS"] = "somevalue".freeze @rake["ci:setup:rspec"].invoke ENV["SPEC_OPTS"].should =~ /somevalue.*--require.*rspec_loader.*--format.*CI::Reporter::RSpec/ end end
31.175676
98
0.687473
08504d4124bb482bae0e8e69e855981e4e18ea89
569
# frozen_string_literal: true require "rails/generators/test_unit" module TestUnit # :nodoc: module Generators # :nodoc: class SystemGenerator < Base # :nodoc: check_class_collision suffix: "Test" def create_test_files if !File.exist?(File.join("test/application_system_test_case.rb")) template "application_system_test_case.rb", File.join("test", "application_system_test_case.rb") end template "system_test.rb", File.join("test/system", class_path, "#{file_name.pluralize}_test.rb") end end end end
28.45
106
0.699473
035a62ce556adb8ec9e855d855d74d145f86fd17
781
# frozen_string_literal: true require 'rails/generators/base' module Evnt # HandlerGenerator. class HandlerGenerator < Rails::Generators::Base source_root File.expand_path('../templates', __FILE__) argument :informations, type: :array, optional: false def create_handler path = informations.first.split('::') @handler_class = path.last.camelize @handler_modules = path - [path.last] @handler_events = (informations - [informations.first]) template( './handler/handler.rb.erb', handler_path ) end def handler_path path = './app/handlers' @handler_modules.map { |m| path = "#{path}/#{m.underscore}" } path = "#{path}/#{@handler_class.underscore}.rb" path end end end
21.694444
67
0.640205
ac1485292daebd87bccba3a22b0e20a56b10ecfc
371
# Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. module Azure::Batch::Mgmt::V2017_01_01 module Models # # Defines values for PackageState # module PackageState Pending = "pending" Active = "active" Unmapped = "unmapped" end end end
21.823529
70
0.681941
e98ceac4253c4bdd6d07d9e71e8c264be02126a8
957
require File.dirname(__FILE__) + '/../../spec_helper' require File.dirname(__FILE__) + '/fixtures/classes' describe "Array#include?" do it "returns true if object is present, false otherwise" do [1, 2, "a", "b"].include?("c").should == false [1, 2, "a", "b"].include?("a").should == true end it "determines presence by using element == obj" do o = mock('') [1, 2, "a", "b"].include?(o).should == false def o.==(other); other == 'a'; end [1, 2, o, "b"].include?('a').should == true [1, 2.0, 3].include?(2).should == true end it "calls == on elements from left to right until success" do key = "x" one = mock('one') two = mock('two') three = mock('three') one.should_receive(:==).any_number_of_times.and_return(false) two.should_receive(:==).any_number_of_times.and_return(true) three.should_not_receive(:==) ary = [one, two, three] ary.include?(key).should == true end end
28.147059
65
0.602926
bb937578772160bffd55b8748cbb36088a5d00ec
383
require 'base_kde_formula' class Kmplot < BaseKdeFormula homepage 'http://www.kde.org/' url 'http://download.kde.org/stable/4.11.4/src/kmplot-4.11.4.tar.xz' sha1 '84c64369da91994b19e6f6ed7cd23fe1ad84c5ce' devel do url 'http://download.kde.org/stable/4.12.0/src/kmplot-4.12.0.tar.xz' sha1 '6f6253c7cf2656777e1599747d36286bd75bf6f5' end depends_on 'kdelibs' end
25.533333
72
0.744125
015c30ba17d06dfe509aed685e47d755562f8960
769
cask 'uninstallpkg' do version '1.1.5' sha256 '8a92278d73334007d1df581584f7364adede9b74d4d46580055a435484c459cd' url "https://www.corecode.io/downloads/uninstallpkg_#{version}.zip" appcast 'https://www.corecode.io/uninstallpkg/uninstallpkg.xml' name 'UninstallPKG' homepage 'https://www.corecode.io/uninstallpkg/' app 'UninstallPKG.app' uninstall delete: '/Library/PrivilegedHelperTools/com.corecode.UninstallPKGDeleteHelper', launchctl: 'com.corecode.UninstallPKGDeleteHelper' zap trash: [ '~/Library/Application Support/UninstallPKG', '~/Library/Preferences/com.corecode.UninstallPKG.plist', '~/Library/Saved Application State/com.corecode.UninstallPKG.savedState', ] end
36.619048
94
0.717815
33fb81924dff8e80c39dcc537ff813b1bbe3e5c2
1,306
class RepliesController < ApplicationController before_action :set_reply, only: [:show, :edit, :update, :destroy] before_action :set_artical, only: [:show, :edit, :update, :destroy] def new @reply = Reply.new end def index @replies = Reply.All end def show end def create @artical = Artical.find(params[:artical_id]) @reply = @artical.replies.new(params[:reply].permit(:content, :artical_id)) @reply.user_id=session[:user_id] @reply.save redirect_to artical_path(@artical) end def update respond_to do |format| if @reply.update(reply_params) format.html { redirect_to @artical, notice: 'Reply was successfully updated.' } format.json { head :no_content } else format.html { render action: 'edit' } format.json { render json: @reply.errors, status: :unprocessable_entity } end end end def destroy Reply.find(params[:id]).destroy flash[:success] = "Reply deleted" redirect_to @artical end private def set_reply @reply = Reply.find(params[:id]) end def set_artical @artical = Artical.find(@reply.artical) end def reply_params params.require(:reply).permit( :content, :user) end end
22.135593
87
0.630168
ffe3097955ef9f9bd79c47c8932ee40dde05d775
346
module Flickr class Image URL_TEMPLATE = 'https://farm%s.staticflickr.com/%s/%s_%s.jpg'.freeze attr_reader :url def initialize(id:, farm:, server:, secret:) @url = URL_TEMPLATE % [farm, server, id, secret] end def read blob = Curl.get(url).body_str Magick::Image.from_blob(blob).first end end end
20.352941
72
0.635838
1c29b9e71fc4ce4b991f1740acce3645b625cd14
401
# frozen_string_literal: true require File.dirname(__FILE__) + '/../spec_helper' describe YARD::Tags::DefaultTag do it "creates a tag with defaults" do o = YARD::Tags::DefaultTag.new('tagname', 'desc', ['types'], 'name', ['defaults']) expect(o.defaults).to eq ['defaults'] expect(o.tag_name).to eq 'tagname' expect(o.name).to eq 'name' expect(o.types).to eq ['types'] end end
30.846154
86
0.663342
3920171205f62992d416012c6baef4b0c926f186
25,484
module API class Users < Grape::API include PaginationParams include APIGuard include Helpers::CustomAttributes allow_access_with_scope :read_user, if: -> (request) { request.get? } resource :users, requirements: { uid: /[0-9]*/, id: /[0-9]*/ } do include CustomAttributesEndpoints before do authenticate_non_get! end helpers do def find_user_by_id(params) id = params[:user_id] || params[:id] User.find_by(id: id) || not_found!('User') end def reorder_users(users) if params[:order_by] && params[:sort] users.reorder(params[:order_by] => params[:sort]) else users end end params :optional_attributes do optional :skype, type: String, desc: 'The Skype username' optional :linkedin, type: String, desc: 'The LinkedIn username' optional :twitter, type: String, desc: 'The Twitter username' optional :website_url, type: String, desc: 'The website of the user' optional :organization, type: String, desc: 'The organization of the user' optional :projects_limit, type: Integer, desc: 'The number of projects a user can create' optional :extern_uid, type: String, desc: 'The external authentication provider UID' optional :provider, type: String, desc: 'The external provider' optional :bio, type: String, desc: 'The biography of the user' optional :location, type: String, desc: 'The location of the user' optional :admin, type: Boolean, desc: 'Flag indicating the user is an administrator' optional :can_create_group, type: Boolean, desc: 'Flag indicating the user can create groups' optional :external, type: Boolean, desc: 'Flag indicating the user is an external user' optional :avatar, type: File, desc: 'Avatar image for user' all_or_none_of :extern_uid, :provider end params :sort_params do optional :order_by, type: String, values: %w[id name username created_at updated_at], default: 'id', desc: 'Return users ordered by a field' optional :sort, type: String, values: %w[asc desc], default: 'desc', desc: 'Return users sorted in ascending and descending order' end end desc 'Get the list of users' do success Entities::UserBasic end params do # CE optional :username, type: String, desc: 'Get a single user with a specific username' optional :extern_uid, type: String, desc: 'Get a single user with a specific external authentication provider UID' optional :provider, type: String, desc: 'The external provider' optional :search, type: String, desc: 'Search for a username' optional :active, type: Boolean, default: false, desc: 'Filters only active users' optional :external, type: Boolean, default: false, desc: 'Filters only external users' optional :blocked, type: Boolean, default: false, desc: 'Filters only blocked users' optional :created_after, type: DateTime, desc: 'Return users created after the specified time' optional :created_before, type: DateTime, desc: 'Return users created before the specified time' all_or_none_of :extern_uid, :provider use :sort_params use :pagination use :with_custom_attributes end get do authenticated_as_admin! if params[:external].present? || (params[:extern_uid].present? && params[:provider].present?) unless current_user&.admin? params.except!(:created_after, :created_before, :order_by, :sort) end users = UsersFinder.new(current_user, params).execute users = reorder_users(users) authorized = can?(current_user, :read_users_list) # When `current_user` is not present, require that the `username` # parameter is passed, to prevent an unauthenticated user from accessing # a list of all the users on the GitLab instance. `UsersFinder` performs # an exact match on the `username` parameter, so we are guaranteed to # get either 0 or 1 `users` here. authorized &&= params[:username].present? if current_user.blank? forbidden!("Not authorized to access /api/v4/users") unless authorized entity = current_user&.admin? ? Entities::UserWithAdmin : Entities::UserBasic users = users.preload(:identities, :u2f_registrations) if entity == Entities::UserWithAdmin users, options = with_custom_attributes(users, with: entity) present paginate(users), options end desc 'Get a single user' do success Entities::User end params do requires :id, type: Integer, desc: 'The ID of the user' use :with_custom_attributes end get ":id" do user = User.find_by(id: params[:id]) not_found!('User') unless user && can?(current_user, :read_user, user) opts = current_user&.admin? ? { with: Entities::UserWithAdmin } : { with: Entities::User } user, opts = with_custom_attributes(user, opts) present user, opts end desc 'Create a user. Available only for admins.' do success Entities::UserPublic end params do requires :email, type: String, desc: 'The email of the user' optional :password, type: String, desc: 'The password of the new user' optional :reset_password, type: Boolean, desc: 'Flag indicating the user will be sent a password reset token' optional :skip_confirmation, type: Boolean, desc: 'Flag indicating the account is confirmed' at_least_one_of :password, :reset_password requires :name, type: String, desc: 'The name of the user' requires :username, type: String, desc: 'The username of the user' use :optional_attributes end post do authenticated_as_admin! params = declared_params(include_missing: false) user = ::Users::CreateService.new(current_user, params).execute(skip_authorization: true) if user.persisted? present user, with: Entities::UserPublic else conflict!('Email has already been taken') if User .where(email: user.email) .count > 0 conflict!('Username has already been taken') if User .where(username: user.username) .count > 0 render_validation_error!(user) end end desc 'Update a user. Available only for admins.' do success Entities::UserPublic end params do requires :id, type: Integer, desc: 'The ID of the user' optional :email, type: String, desc: 'The email of the user' optional :password, type: String, desc: 'The password of the new user' optional :skip_reconfirmation, type: Boolean, desc: 'Flag indicating the account skips the confirmation by email' optional :name, type: String, desc: 'The name of the user' optional :username, type: String, desc: 'The username of the user' use :optional_attributes end put ":id" do authenticated_as_admin! user = User.find_by(id: params.delete(:id)) not_found!('User') unless user conflict!('Email has already been taken') if params[:email] && User.where(email: params[:email]) .where.not(id: user.id).count > 0 conflict!('Username has already been taken') if params[:username] && User.where(username: params[:username]) .where.not(id: user.id).count > 0 user_params = declared_params(include_missing: false) identity_attrs = user_params.slice(:provider, :extern_uid) if identity_attrs.any? identity = user.identities.find_by(provider: identity_attrs[:provider]) if identity identity.update_attributes(identity_attrs) else identity = user.identities.build(identity_attrs) identity.save end end user_params[:password_expires_at] = Time.now if user_params[:password].present? result = ::Users::UpdateService.new(current_user, user_params.except(:extern_uid, :provider).merge(user: user)).execute if result[:status] == :success present user, with: Entities::UserPublic else render_validation_error!(user) end end desc 'Add an SSH key to a specified user. Available only for admins.' do success Entities::SSHKey end params do requires :id, type: Integer, desc: 'The ID of the user' requires :key, type: String, desc: 'The new SSH key' requires :title, type: String, desc: 'The title of the new SSH key' end post ":id/keys" do authenticated_as_admin! user = User.find_by(id: params.delete(:id)) not_found!('User') unless user key = user.keys.new(declared_params(include_missing: false)) if key.save present key, with: Entities::SSHKey else render_validation_error!(key) end end desc 'Get the SSH keys of a specified user. Available only for admins.' do success Entities::SSHKey end params do requires :id, type: Integer, desc: 'The ID of the user' use :pagination end get ':id/keys' do authenticated_as_admin! user = User.find_by(id: params[:id]) not_found!('User') unless user present paginate(user.keys), with: Entities::SSHKey end desc 'Delete an existing SSH key from a specified user. Available only for admins.' do success Entities::SSHKey end params do requires :id, type: Integer, desc: 'The ID of the user' requires :key_id, type: Integer, desc: 'The ID of the SSH key' end delete ':id/keys/:key_id' do authenticated_as_admin! user = User.find_by(id: params[:id]) not_found!('User') unless user key = user.keys.find_by(id: params[:key_id]) not_found!('Key') unless key destroy_conditionally!(key) end desc 'Add a GPG key to a specified user. Available only for admins.' do detail 'This feature was added in GitLab 10.0' success Entities::GPGKey end params do requires :id, type: Integer, desc: 'The ID of the user' requires :key, type: String, desc: 'The new GPG key' end post ':id/gpg_keys' do authenticated_as_admin! user = User.find_by(id: params.delete(:id)) not_found!('User') unless user key = user.gpg_keys.new(declared_params(include_missing: false)) if key.save present key, with: Entities::GPGKey else render_validation_error!(key) end end desc 'Get the GPG keys of a specified user. Available only for admins.' do detail 'This feature was added in GitLab 10.0' success Entities::GPGKey end params do requires :id, type: Integer, desc: 'The ID of the user' use :pagination end get ':id/gpg_keys' do authenticated_as_admin! user = User.find_by(id: params[:id]) not_found!('User') unless user present paginate(user.gpg_keys), with: Entities::GPGKey end desc 'Delete an existing GPG key from a specified user. Available only for admins.' do detail 'This feature was added in GitLab 10.0' end params do requires :id, type: Integer, desc: 'The ID of the user' requires :key_id, type: Integer, desc: 'The ID of the GPG key' end delete ':id/gpg_keys/:key_id' do authenticated_as_admin! user = User.find_by(id: params[:id]) not_found!('User') unless user key = user.gpg_keys.find_by(id: params[:key_id]) not_found!('GPG Key') unless key status 204 key.destroy end desc 'Revokes an existing GPG key from a specified user. Available only for admins.' do detail 'This feature was added in GitLab 10.0' end params do requires :id, type: Integer, desc: 'The ID of the user' requires :key_id, type: Integer, desc: 'The ID of the GPG key' end post ':id/gpg_keys/:key_id/revoke' do authenticated_as_admin! user = User.find_by(id: params[:id]) not_found!('User') unless user key = user.gpg_keys.find_by(id: params[:key_id]) not_found!('GPG Key') unless key key.revoke status :accepted end desc 'Add an email address to a specified user. Available only for admins.' do success Entities::Email end params do requires :id, type: Integer, desc: 'The ID of the user' requires :email, type: String, desc: 'The email of the user' end post ":id/emails" do authenticated_as_admin! user = User.find_by(id: params.delete(:id)) not_found!('User') unless user email = Emails::CreateService.new(current_user, declared_params(include_missing: false).merge(user: user)).execute if email.errors.blank? present email, with: Entities::Email else render_validation_error!(email) end end desc 'Get the emails addresses of a specified user. Available only for admins.' do success Entities::Email end params do requires :id, type: Integer, desc: 'The ID of the user' use :pagination end get ':id/emails' do authenticated_as_admin! user = User.find_by(id: params[:id]) not_found!('User') unless user present paginate(user.emails), with: Entities::Email end desc 'Delete an email address of a specified user. Available only for admins.' do success Entities::Email end params do requires :id, type: Integer, desc: 'The ID of the user' requires :email_id, type: Integer, desc: 'The ID of the email' end delete ':id/emails/:email_id' do authenticated_as_admin! user = User.find_by(id: params[:id]) not_found!('User') unless user email = user.emails.find_by(id: params[:email_id]) not_found!('Email') unless email destroy_conditionally!(email) do |email| Emails::DestroyService.new(current_user, user: user).execute(email) end end desc 'Delete a user. Available only for admins.' do success Entities::Email end params do requires :id, type: Integer, desc: 'The ID of the user' optional :hard_delete, type: Boolean, desc: "Whether to remove a user's contributions" end delete ":id" do Gitlab::QueryLimiting.whitelist('https://gitlab.com/gitlab-org/gitlab-ce/issues/42279') authenticated_as_admin! user = User.find_by(id: params[:id]) not_found!('User') unless user destroy_conditionally!(user) do user.delete_async(deleted_by: current_user, params: params) end end desc 'Block a user. Available only for admins.' params do requires :id, type: Integer, desc: 'The ID of the user' end post ':id/block' do authenticated_as_admin! user = User.find_by(id: params[:id]) not_found!('User') unless user if !user.ldap_blocked? user.block else forbidden!('LDAP blocked users cannot be modified by the API') end end desc 'Unblock a user. Available only for admins.' params do requires :id, type: Integer, desc: 'The ID of the user' end post ':id/unblock' do authenticated_as_admin! user = User.find_by(id: params[:id]) not_found!('User') unless user if user.ldap_blocked? forbidden!('LDAP blocked users cannot be unblocked by the API') else user.activate end end params do requires :user_id, type: Integer, desc: 'The ID of the user' end segment ':user_id' do resource :impersonation_tokens do helpers do def finder(options = {}) user = find_user_by_id(params) PersonalAccessTokensFinder.new({ user: user, impersonation: true }.merge(options)) end def find_impersonation_token finder.find_by(id: declared_params[:impersonation_token_id]) || not_found!('Impersonation Token') end end before { authenticated_as_admin! } desc 'Retrieve impersonation tokens. Available only for admins.' do detail 'This feature was introduced in GitLab 9.0' success Entities::ImpersonationToken end params do use :pagination optional :state, type: String, default: 'all', values: %w[all active inactive], desc: 'Filters (all|active|inactive) impersonation_tokens' end get { present paginate(finder(declared_params(include_missing: false)).execute), with: Entities::ImpersonationToken } desc 'Create a impersonation token. Available only for admins.' do detail 'This feature was introduced in GitLab 9.0' success Entities::ImpersonationToken end params do requires :name, type: String, desc: 'The name of the impersonation token' optional :expires_at, type: Date, desc: 'The expiration date in the format YEAR-MONTH-DAY of the impersonation token' optional :scopes, type: Array, desc: 'The array of scopes of the impersonation token' end post do impersonation_token = finder.build(declared_params(include_missing: false)) if impersonation_token.save present impersonation_token, with: Entities::ImpersonationToken else render_validation_error!(impersonation_token) end end desc 'Retrieve impersonation token. Available only for admins.' do detail 'This feature was introduced in GitLab 9.0' success Entities::ImpersonationToken end params do requires :impersonation_token_id, type: Integer, desc: 'The ID of the impersonation token' end get ':impersonation_token_id' do present find_impersonation_token, with: Entities::ImpersonationToken end desc 'Revoke a impersonation token. Available only for admins.' do detail 'This feature was introduced in GitLab 9.0' end params do requires :impersonation_token_id, type: Integer, desc: 'The ID of the impersonation token' end delete ':impersonation_token_id' do token = find_impersonation_token destroy_conditionally!(token) do token.revoke! end end end end end resource :user do before do authenticate! end desc 'Get the currently authenticated user' do success Entities::UserPublic end get do entity = if current_user.admin? Entities::UserWithAdmin else Entities::UserPublic end present current_user, with: entity end desc "Get the currently authenticated user's SSH keys" do success Entities::SSHKey end params do use :pagination end get "keys" do present paginate(current_user.keys), with: Entities::SSHKey end desc 'Get a single key owned by currently authenticated user' do success Entities::SSHKey end params do requires :key_id, type: Integer, desc: 'The ID of the SSH key' end get "keys/:key_id" do key = current_user.keys.find_by(id: params[:key_id]) not_found!('Key') unless key present key, with: Entities::SSHKey end desc 'Add a new SSH key to the currently authenticated user' do success Entities::SSHKey end params do requires :key, type: String, desc: 'The new SSH key' requires :title, type: String, desc: 'The title of the new SSH key' end post "keys" do key = current_user.keys.new(declared_params) if key.save present key, with: Entities::SSHKey else render_validation_error!(key) end end desc 'Delete an SSH key from the currently authenticated user' do success Entities::SSHKey end params do requires :key_id, type: Integer, desc: 'The ID of the SSH key' end delete "keys/:key_id" do key = current_user.keys.find_by(id: params[:key_id]) not_found!('Key') unless key destroy_conditionally!(key) end desc "Get the currently authenticated user's GPG keys" do detail 'This feature was added in GitLab 10.0' success Entities::GPGKey end params do use :pagination end get 'gpg_keys' do present paginate(current_user.gpg_keys), with: Entities::GPGKey end desc 'Get a single GPG key owned by currently authenticated user' do detail 'This feature was added in GitLab 10.0' success Entities::GPGKey end params do requires :key_id, type: Integer, desc: 'The ID of the GPG key' end get 'gpg_keys/:key_id' do key = current_user.gpg_keys.find_by(id: params[:key_id]) not_found!('GPG Key') unless key present key, with: Entities::GPGKey end desc 'Add a new GPG key to the currently authenticated user' do detail 'This feature was added in GitLab 10.0' success Entities::GPGKey end params do requires :key, type: String, desc: 'The new GPG key' end post 'gpg_keys' do key = current_user.gpg_keys.new(declared_params) if key.save present key, with: Entities::GPGKey else render_validation_error!(key) end end desc 'Revoke a GPG key owned by currently authenticated user' do detail 'This feature was added in GitLab 10.0' end params do requires :key_id, type: Integer, desc: 'The ID of the GPG key' end post 'gpg_keys/:key_id/revoke' do key = current_user.gpg_keys.find_by(id: params[:key_id]) not_found!('GPG Key') unless key key.revoke status :accepted end desc 'Delete a GPG key from the currently authenticated user' do detail 'This feature was added in GitLab 10.0' end params do requires :key_id, type: Integer, desc: 'The ID of the SSH key' end delete 'gpg_keys/:key_id' do key = current_user.gpg_keys.find_by(id: params[:key_id]) not_found!('GPG Key') unless key status 204 key.destroy end desc "Get the currently authenticated user's email addresses" do success Entities::Email end params do use :pagination end get "emails" do present paginate(current_user.emails), with: Entities::Email end desc 'Get a single email address owned by the currently authenticated user' do success Entities::Email end params do requires :email_id, type: Integer, desc: 'The ID of the email' end get "emails/:email_id" do email = current_user.emails.find_by(id: params[:email_id]) not_found!('Email') unless email present email, with: Entities::Email end desc 'Add new email address to the currently authenticated user' do success Entities::Email end params do requires :email, type: String, desc: 'The new email' end post "emails" do email = Emails::CreateService.new(current_user, declared_params.merge(user: current_user)).execute if email.errors.blank? present email, with: Entities::Email else render_validation_error!(email) end end desc 'Delete an email address from the currently authenticated user' params do requires :email_id, type: Integer, desc: 'The ID of the email' end delete "emails/:email_id" do email = current_user.emails.find_by(id: params[:email_id]) not_found!('Email') unless email destroy_conditionally!(email) do |email| Emails::DestroyService.new(current_user, user: current_user).execute(email) end end desc 'Get a list of user activities' params do optional :from, type: DateTime, default: 6.months.ago, desc: 'Date string in the format YEAR-MONTH-DAY' use :pagination end get "activities" do authenticated_as_admin! activities = User .where(User.arel_table[:last_activity_on].gteq(params[:from])) .reorder(last_activity_on: :asc) present paginate(activities), with: Entities::UserActivity end end end end
34.437838
150
0.620036
f7d986c78e80257322aeecce4ea9e34d3644e57b
909
#ExStart: require 'aspose_slides_cloud' class Document include AsposeSlidesCloud include AsposeStorageCloud def initialize #Get App key and App SID from https://cloud.aspose.com AsposeApp.app_key_and_sid("", "") @slides_api = SlidesApi.new end def upload_file(file_name) @storage_api = StorageApi.new response = @storage_api.put_create(file_name, File.open("../../../data/" << file_name,"r") { |io| io.read } ) end # Get slides document in specified format def convert_to_other_file_format_using_third_part_storage file_name = "sample.pptx" upload_file(file_name) format = "tiff" storage = "MyDropboxStorage"; folder = ""; response = @slides_api.get_slides_document_with_format(file_name, format, {storage: storage, folder: folder}) end end document = Document.new() puts document.convert_to_other_file_format_using_third_part_storage #ExEnd:
25.971429
113
0.735974
bb7d5e6a61037a8ed409868988e802c632735716
5,704
# frozen_string_literal: true require 'grape' require 'grape-swagger/instance' require 'grape-swagger/version' require 'grape-swagger/endpoint' require 'grape-swagger/errors' require 'grape-swagger/doc_methods' require 'grape-swagger/model_parsers' module GrapeSwagger class << self def model_parsers @model_parsers ||= GrapeSwagger::ModelParsers.new end end autoload :Rake, 'grape-swagger/rake/oapi_tasks' end module SwaggerRouting private def combine_routes(app, doc_klass) app.routes.each do |route| route_path = route.path route_match = route_path.split(/^.*?#{route.prefix.to_s}/).last next unless route_match route_match = route_match.match('\/([\w|-]*?)[\.\/\(]') || route_match.match('\/([\w|-]*)$') next unless route_match resource = route_match.captures.first resource = '/' if resource.empty? @target_class.combined_routes[resource] ||= [] next if doc_klass.hide_documentation_path && route.path.match(/#{doc_klass.mount_path}($|\/|\(\.)/) @target_class.combined_routes[resource].unshift route end end def determine_namespaced_routes(name, parent_route) if parent_route.nil? @target_class.combined_routes.values.flatten else parent_route.reject do |route| !route_path_start_with?(route, name) || !route_instance_variable_equals?(route, name) end end end def combine_namespace_routes(namespaces) # iterate over each single namespace namespaces.each_key do |name, _| # get the parent route for the namespace parent_route_name = extract_parent_route(name) parent_route = @target_class.combined_routes[parent_route_name] # fetch all routes that are within the current namespace namespace_routes = determine_namespaced_routes(name, parent_route) # default case when not explicitly specified or nested == true standalone_namespaces = namespaces.reject do |_, ns| !ns.options.key?(:swagger) || !ns.options[:swagger].key?(:nested) || ns.options[:swagger][:nested] != false end parent_standalone_namespaces = standalone_namespaces.select { |ns_name, _| name.start_with?(ns_name) } # add only to the main route # if the namespace is not within any other namespace appearing as standalone resource # rubocop:disable Style/Next if parent_standalone_namespaces.empty? # default option, append namespace methods to parent route parent_route = @target_class.combined_namespace_routes.key?(parent_route_name) @target_class.combined_namespace_routes[parent_route_name] = [] unless parent_route @target_class.combined_namespace_routes[parent_route_name].push(*namespace_routes) end # rubocop:enable Style/Next end end def extract_parent_route(name) route_name = name.match(%r{^/?([^/]*).*$})[1] return route_name unless route_name.include? ':' matches = name.match(/\/[a-z]+/) matches.nil? ? route_name : matches[0].delete('/') end def route_instance_variable(route) route.instance_variable_get(:@options)[:namespace] end def route_instance_variable_equals?(route, name) route_instance_variable(route) == "/#{name}" || route_instance_variable(route) == "/:version/#{name}" end def route_path_start_with?(route, name) route_prefix = route.prefix ? "/#{route.prefix}/#{name}" : "/#{name}" route_versioned_prefix = route.prefix ? "/#{route.prefix}/:version/#{name}" : "/:version/#{name}" route.path.start_with?(route_prefix, route_versioned_prefix) end end module SwaggerDocumentationAdder attr_accessor :combined_namespaces, :combined_namespace_identifiers attr_accessor :combined_routes, :combined_namespace_routes include SwaggerRouting def add_swagger_documentation(options = {}) documentation_class = create_documentation_class version_for(options) options = { target_class: self }.merge(options) @target_class = options[:target_class] auth_wrapper = options[:endpoint_auth_wrapper] || Class.new use auth_wrapper if auth_wrapper.method_defined?(:before) && !middleware.flatten.include?(auth_wrapper) documentation_class.setup(options) mount(documentation_class) @target_class.combined_routes = {} combine_routes(@target_class, documentation_class) @target_class.combined_namespaces = {} combine_namespaces(@target_class) @target_class.combined_namespace_routes = {} @target_class.combined_namespace_identifiers = {} combine_namespace_routes(@target_class.combined_namespaces) exclusive_route_keys = @target_class.combined_routes.keys - @target_class.combined_namespaces.keys exclusive_route_keys.each do |key| @target_class.combined_namespace_routes[key] = @target_class.combined_routes[key] end documentation_class end private def version_for(options) options[:version] = version if version end def combine_namespaces(app) app.endpoints.each do |endpoint| ns = endpoint.namespace_stackable(:namespace).last # use the full namespace here (not the latest level only) # and strip leading slash mount_path = (endpoint.namespace_stackable(:mount_path) || []).join('/') full_namespace = (mount_path + endpoint.namespace).sub(/\/{2,}/, '/').sub(/^\//, '') @target_class.combined_namespaces[full_namespace] = ns if ns combine_namespaces(endpoint.options[:app]) if endpoint.options[:app] end end def create_documentation_class Class.new(GrapeInstance) do extend GrapeSwagger::DocMethods end end end GrapeInstance.extend(SwaggerDocumentationAdder)
33.162791
108
0.718093
abcc021e77a785fbccd2c57ded86fc5faabdacd4
522
# frozen_string_literal: true class AddFeaturedTopicsToCategories < ActiveRecord::Migration[4.2] def up add_column :categories, :num_featured_topics, :integer, default: 3 result = execute("select value from site_settings where name = 'category_featured_topics' and value != '3'") if result.count > 0 && result[0]["value"].to_i > 0 execute "UPDATE categories SET num_featured_topics = #{result[0]["value"].to_i}" end end def down remove_column :categories, :num_featured_topics end end
30.705882
112
0.722222
39711aff2f69a43bbac25cd50885ea01902aa8d0
9,674
# coding: utf-8 require 'helper' describe Twitter::REST::Favorites do before do @client = Twitter::REST::Client.new(:consumer_key => 'CK', :consumer_secret => 'CS', :access_token => 'AT', :access_token_secret => 'AS') end describe '#favorites' do context 'with a screen name passed' do before do stub_get('/1.1/favorites/list.json').with(:query => {:screen_name => 'sferik'}).to_return(:body => fixture('user_timeline.json'), :headers => {:content_type => 'application/json; charset=utf-8'}) end it 'requests the correct resource' do @client.favorites('sferik') expect(a_get('/1.1/favorites/list.json').with(:query => {:screen_name => 'sferik'})).to have_been_made end it 'returns the 20 most recent favorite Tweets for the authenticating user or user specified by the ID parameter' do favorites = @client.favorites('sferik') expect(favorites).to be_an Array expect(favorites.first).to be_a Twitter::Tweet expect(favorites.first.user.id).to eq(7_505_382) end context 'with a URI object passed' do it 'requests the correct resource' do user = URI.parse('https://twitter.com/sferik') @client.favorites(user) expect(a_get('/1.1/favorites/list.json').with(:query => {:screen_name => 'sferik'})).to have_been_made end end context 'with a URI string passed' do it 'requests the correct resource' do @client.favorites('https://twitter.com/sferik') expect(a_get('/1.1/favorites/list.json').with(:query => {:screen_name => 'sferik'})).to have_been_made end end end context 'without arguments passed' do before do stub_get('/1.1/favorites/list.json').to_return(:body => fixture('user_timeline.json'), :headers => {:content_type => 'application/json; charset=utf-8'}) end it 'requests the correct resource' do @client.favorites expect(a_get('/1.1/favorites/list.json')).to have_been_made end it 'returns the 20 most recent favorite Tweets for the authenticating user or user specified by the ID parameter' do favorites = @client.favorites expect(favorites).to be_an Array expect(favorites.first).to be_a Twitter::Tweet expect(favorites.first.user.id).to eq(7_505_382) end end end describe '#unfavorite' do before do stub_post('/1.1/favorites/destroy.json').with(:body => {:id => '25938088801'}).to_return(:body => fixture('status.json'), :headers => {:content_type => 'application/json; charset=utf-8'}) end it 'requests the correct resource' do @client.unfavorite(25_938_088_801) expect(a_post('/1.1/favorites/destroy.json').with(:body => {:id => '25938088801'})).to have_been_made end it 'returns an array of un-favorited Tweets' do tweets = @client.unfavorite(25_938_088_801) expect(tweets).to be_an Array expect(tweets.first).to be_a Twitter::Tweet expect(tweets.first.text).to eq("\"I hope you'll keep...building bonds of friendship that will enrich your lives &amp; enrich our world\" β€”FLOTUS in China, http://t.co/fxmuQN9JL9") end context 'with a URI object passed' do it 'requests the correct resource' do tweet = URI.parse('https://twitter.com/sferik/status/25938088801') @client.unfavorite(tweet) expect(a_post('/1.1/favorites/destroy.json').with(:body => {:id => '25938088801'})).to have_been_made end end context 'with a URI string passed' do it 'requests the correct resource' do @client.unfavorite('https://twitter.com/sferik/status/25938088801') expect(a_post('/1.1/favorites/destroy.json').with(:body => {:id => '25938088801'})).to have_been_made end end context 'with a Tweet passed' do it 'requests the correct resource' do tweet = Twitter::Tweet.new(:id => 25_938_088_801) @client.unfavorite(tweet) expect(a_post('/1.1/favorites/destroy.json').with(:body => {:id => '25938088801'})).to have_been_made end end end describe '#favorite' do before do stub_post('/1.1/favorites/create.json').with(:body => {:id => '25938088801'}).to_return(:body => fixture('status.json'), :headers => {:content_type => 'application/json; charset=utf-8'}) end it 'requests the correct resource' do @client.favorite(25_938_088_801) expect(a_post('/1.1/favorites/create.json').with(:body => {:id => '25938088801'})).to have_been_made end it 'returns an array of favorited Tweets' do tweets = @client.favorite(25_938_088_801) expect(tweets).to be_an Array expect(tweets.first).to be_a Twitter::Tweet expect(tweets.first.text).to eq("\"I hope you'll keep...building bonds of friendship that will enrich your lives &amp; enrich our world\" β€”FLOTUS in China, http://t.co/fxmuQN9JL9") end context 'already favorited' do before do stub_post('/1.1/favorites/create.json').with(:body => {:id => '25938088801'}).to_return(:status => 403, :body => fixture('already_favorited.json'), :headers => {:content_type => 'application/json; charset=utf-8'}) end it 'does not raise an error' do expect { @client.favorite(25_938_088_801) }.not_to raise_error end end context 'not found' do before do stub_post('/1.1/favorites/create.json').with(:body => {:id => '25938088801'}).to_return(:status => 404, :body => fixture('not_found.json'), :headers => {:content_type => 'application/json; charset=utf-8'}) end it 'does not raise an error' do expect { @client.favorite(25_938_088_801) }.not_to raise_error end end context 'with a URI object passed' do it 'requests the correct resource' do tweet = URI.parse('https://twitter.com/sferik/status/25938088801') @client.favorite(tweet) expect(a_post('/1.1/favorites/create.json').with(:body => {:id => '25938088801'})).to have_been_made end end context 'with a URI string passed' do it 'requests the correct resource' do @client.favorite('https://twitter.com/sferik/status/25938088801') expect(a_post('/1.1/favorites/create.json').with(:body => {:id => '25938088801'})).to have_been_made end end context 'with a Tweet passed' do it 'requests the correct resource' do tweet = Twitter::Tweet.new(:id => 25_938_088_801) @client.favorite(tweet) expect(a_post('/1.1/favorites/create.json').with(:body => {:id => '25938088801'})).to have_been_made end end end describe '#favorite!' do before do stub_post('/1.1/favorites/create.json').with(:body => {:id => '25938088801'}).to_return(:body => fixture('status.json'), :headers => {:content_type => 'application/json; charset=utf-8'}) end it 'requests the correct resource' do @client.favorite!(25_938_088_801) expect(a_post('/1.1/favorites/create.json').with(:body => {:id => '25938088801'})).to have_been_made end it 'returns an array of favorited Tweets' do tweets = @client.favorite!(25_938_088_801) expect(tweets).to be_an Array expect(tweets.first).to be_a Twitter::Tweet expect(tweets.first.text).to eq("\"I hope you'll keep...building bonds of friendship that will enrich your lives &amp; enrich our world\" β€”FLOTUS in China, http://t.co/fxmuQN9JL9") end context 'forbidden' do before do stub_post('/1.1/favorites/create.json').with(:body => {:id => '25938088801'}).to_return(:status => 403, :body => '{}', :headers => {:content_type => 'application/json; charset=utf-8'}) end it 'raises a Forbidden error' do expect { @client.favorite!(25_938_088_801) }.to raise_error(Twitter::Error::Forbidden) end end context 'already favorited' do before do stub_post('/1.1/favorites/create.json').with(:body => {:id => '25938088801'}).to_return(:status => 403, :body => fixture('already_favorited.json'), :headers => {:content_type => 'application/json; charset=utf-8'}) end it 'raises an AlreadyFavorited error' do expect { @client.favorite!(25_938_088_801) }.to raise_error(Twitter::Error::AlreadyFavorited) end end context 'does not exist' do before do stub_post('/1.1/favorites/create.json').with(:body => {:id => '25938088801'}).to_return(:status => 404, :body => fixture('not_found.json'), :headers => {:content_type => 'application/json; charset=utf-8'}) end it 'raises a NotFound error' do expect { @client.favorite!(25_938_088_801) }.to raise_error(Twitter::Error::NotFound) end end context 'with a URI object passed' do it 'requests the correct resource' do tweet = URI.parse('https://twitter.com/sferik/status/25938088801') @client.favorite!(tweet) expect(a_post('/1.1/favorites/create.json').with(:body => {:id => '25938088801'})).to have_been_made end end context 'with a URI string passed' do it 'requests the correct resource' do @client.favorite!('https://twitter.com/sferik/status/25938088801') expect(a_post('/1.1/favorites/create.json').with(:body => {:id => '25938088801'})).to have_been_made end end context 'with a Tweet passed' do it 'requests the correct resource' do tweet = Twitter::Tweet.new(:id => 25_938_088_801) @client.favorite!(tweet) expect(a_post('/1.1/favorites/create.json').with(:body => {:id => '25938088801'})).to have_been_made end end end end
47.655172
221
0.649783
b9fca41b7520ed7ea0fe1b80268e0172726904bd
725
module Fog module Compute class Rackspace class Real # Get details for image by id # # ==== Returns # * response<~Excon::Response>: # * body<~Hash>: # * 'id'<~Integer> - Id of the image # * 'name'<~String> - Name of the image # * 'serverId'<~Integer> - Id of server image was created from # * 'status'<~Integer> - Status of image # * 'updated'<~String> - Timestamp of last update def get_image_details(image_id) request( :expects => [200, 203], :method => 'GET', :path => "images/#{image_id}.json" ) end end end end end
27.884615
74
0.477241
181cbcab39b454841a25120341b6fa7d4cfb4a31
636
require 'puppetlabs_spec_helper/module_spec_helper' require 'rspec-puppet-facts' include RspecPuppetFacts # Add facts for a Docker bridge interface def add_docker_iface(facts, name = 'docker0', params = {}) interface_params = { :ipaddress => '172.17.0.1', :macaddress => '02:42:41:0b:31:b8', :mtu => '1500', :netmask => '255.255.0.0', :network => '172.17.0.0' }.merge(params) new_facts = Hash[interface_params.map { |k, v| ["#{k}_#{name}".to_sym, v] }] interfaces = facts[:interfaces].split(',') interfaces << name new_facts[:interfaces] = interfaces.sort.uniq.join(',') facts.merge(new_facts) end
27.652174
78
0.663522
87dba1c2b580047074311ebc6693326fc90fc325
297
class GameWizardStep1Validator < ActiveModel::Validator def validate(document) if document.step1? if Game.where(title: document.title).exists? && document.cache_key.starts_with?('new_') document.errors[:title] << I18n.t('games.errors.title_taken') end end end end
27
93
0.700337
39c8f1e6bdf170cb6d27c28ed1937fa829fd31f7
638
# frozen_string_literal: true module API class Pages < Grape::API before do require_pages_config_enabled! authenticated_with_can_read_all_resources! end params do requires :id, type: String, desc: 'The ID of a project' end resource :projects, requirements: API::NAMESPACE_OR_PROJECT_REQUIREMENTS do desc 'Unpublish pages' do detail 'This feature was introduced in GitLab 12.6' end delete ':id/pages' do authorize! :remove_pages, user_project status 204 ::Pages::DeleteService.new(user_project, current_user).execute end end end end
23.62963
79
0.678683
edb030c61525f6729d3f46690943b6103c079579
2,729
Pod::Spec.new do |s| s.name = "KIOpenSSL" s.version = "1.0.2#{("a".."z").to_a.index 'g'}" s.summary = "OpenSSL for iOS" s.description = "OpenSSL is an SSL/TLS and Crypto toolkit. Deprecated in Mac OS and gone in iOS, this spec gives your project non-deprecated OpenSSL support. Supports iOS including Simulator (armv7,armv7s,arm64,i386,x86_64)." s.homepage = "https://github.com/smartwalle/KIOpenSSL" s.license = { :type => 'OpenSSL (OpenSSL/SSLeay)', :file => 'LICENSE.txt' } s.source = { :git => "https://github.com/smartwalle/KIOpenSSL.git", :tag => "#{s.version}" } s.authors = {'Mark J. Cox' => '[email protected]', 'Ralf S. Engelschall' => '[email protected]', 'Dr. Stephen Henson' => '[email protected]', 'Ben Laurie' => '[email protected]', 'Lutz JΓ€nicke' => '[email protected]', 'Nils Larsch' => '[email protected]', 'Richard Levitte' => '[email protected]', 'Bodo MΓΆller' => '[email protected]', 'Ulf MΓΆller' => '[email protected]', 'Andy Polyakov' => '[email protected]', 'Geoff Thorpe' => '[email protected]', 'Holger Reif' => '[email protected]', 'Paul C. Sutton' => '[email protected]', 'Eric A. Young' => '[email protected]', 'Tim Hudson' => '[email protected]', 'Justin Plouffe' => '[email protected]'} s.ios.platform = :ios, '6.0' s.ios.deployment_target = '6.0' s.ios.source_files = 'include/openssl/**/*.h' s.ios.public_header_files = 'include/openssl/**/*.h' s.ios.header_dir = 'openssl' s.ios.preserve_paths = 'lib/libcrypto.a', 'lib/libssl.a' s.ios.vendored_libraries = 'lib/libcrypto.a', 'lib/libssl.a' # s.osx.platform = :osx, '10.9' # s.osx.deployment_target = '10.8' # s.osx.source_files = 'include-osx/openssl/**/*.h' # s.osx.public_header_files = 'include-osx/openssl/**/*.h' # s.osx.header_dir = 'openssl' # s.osx.preserve_paths = 'lib-osx/libcrypto.a', 'lib-osx/libssl.a' # s.osx.vendored_libraries = 'lib-osx/libcrypto.a', 'lib-osx/libssl.a' s.tvos.deployment_target = '9.0' s.tvos.source_files = 'include/openssl/**/*.h' s.tvos.public_header_files = 'include/openssl/**/*.h' s.tvos.header_dir = 'openssl' s.tvos.preserve_paths = 'lib/libcrypto.a', 'lib/libssl.a' s.tvos.vendored_libraries = 'lib/libcrypto-tvOS.a', 'lib/libssl-tvOS.a' s.libraries = 'ssl', 'crypto' s.requires_arc = false end
52.480769
228
0.559546
39dbd013331833c92031f1b254ff268cd7ec463a
1,258
class Reposurgeon < Formula desc "Edit version-control repository history" homepage "http://www.catb.org/esr/reposurgeon/" url "https://gitlab.com/esr/reposurgeon.git", :tag => "4.15", :revision => "0128a04cbfa6e29841d696284798f63bfd104b79" head "https://gitlab.com/esr/reposurgeon.git" bottle do cellar :any_skip_relocation sha256 "b66f514eadf1c9c1b05a953cf8ffda7623adc2ea6a1e828926e9658e52458956" => :catalina sha256 "ee5bdbf4903ce0a7be0916f4b0193db771984ade17b670dfcfa33db2ace7e226" => :mojave sha256 "124eca67a4500349387b3a28cc237a65f520bff81d508c83d0cd6513345f0292" => :high_sierra end depends_on "asciidoctor" => :build depends_on "go" => :build depends_on "git" # requires >= 2.19.2 def install ENV["XML_CATALOG_FILES"] = "#{etc}/xml/catalog" system "make" system "make", "install", "prefix=#{prefix}" elisp.install "reposurgeon-mode.el" end test do (testpath/".gitconfig").write <<~EOS [user] name = Real Person email = [email protected] EOS system "git", "init" system "git", "commit", "--allow-empty", "--message", "brewing" assert_match "brewing", shell_output("script -q /dev/null #{bin}/reposurgeon read list") end end
31.45
93
0.693959
bfcc09d40c0d4840eea980fcd865a54dd3e5a135
1,227
# frozen_string_literal: true # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Auto-generated by gapic-generator-ruby. DO NOT EDIT! require "googleauth" module Google module Ads module GoogleAds module V3 module Services module CustomerManagerLinkService # Credentials for the CustomerManagerLinkService API. class Credentials < ::Google::Auth::Credentials self.env_vars = [ "GOOGLEADS_CREDENTIALS", "GOOGLEADS_KEYFILE", "GOOGLEADS_CREDENTIALS_JSON", "GOOGLEADS_KEYFILE_JSON" ] end end end end end end end
28.534884
74
0.664222
381de4736065aa14c4f6f9cd3a7d816f27e8c018
1,772
require_relative '../spec_helper' describe TrafficJam do include RedisHelper TrafficJam.configure do |config| config.redis = RedisHelper.redis end let(:period) { 0.1 } let(:limit) do TrafficJam::SimpleLimit.new(:test, "user1", max: 3, period: period) end describe :increment do after do Spy.teardown end it "should be true when rate limit is not exceeded" do assert limit.increment(1) end it "should be false when raise limit is exceeded" do assert !limit.increment(4) assert limit.increment(1) assert limit.increment(2) assert !limit.increment(1) end it "should raise an argument error if given a float" do assert_raises(ArgumentError) do limit.increment(1.5) end end it "should be a no-op when limit would be exceeded" do limit.increment(2) assert !limit.increment(2) assert limit.increment(1) end it "should be true when sufficient time passes" do assert limit.increment(3) sleep(period / 2) assert limit.increment(1) sleep(period * 2) assert limit.increment(3) end describe "when max is zero" do let(:limit) do TrafficJam::SimpleLimit.new(:test, "user1", max: 0, period: period) end it "should be false for any positive amount" do assert !limit.increment end end end describe :used do it "should be 0 when there has been no incrementing" do assert_equal 0, limit.used end it "should be the amount used" do limit.increment(1) assert_equal 1, limit.used end it "should decrease over time" do limit.increment(2) sleep(period / 2) assert_equal 1, limit.used end end end
22.43038
75
0.640519
01f21801e0f39a8294d095db3384b54e0adfb54b
1,186
=begin #OpenAPI Petstore #This spec is mainly for testing Petstore server and contains fake endpoints, models. Please do not use this for any other purpose. Special characters: \" \\ The version of the OpenAPI document: 1.0.0 Generated by: https://openapi-generator.tech OpenAPI Generator version: 4.3.0-SNAPSHOT =end require 'spec_helper' require 'json' require 'date' # Unit tests for Petstore::Tag # Automatically generated by openapi-generator (https://openapi-generator.tech) # Please update as you see appropriate describe 'Tag' do before do # run before each test @instance = Petstore::Tag.new end after do # run after each test end describe 'test an instance of Tag' do it 'should create an instance of Tag' do expect(@instance).to be_instance_of(Petstore::Tag) end end describe 'test attribute "id"' do it 'should work' do # assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers end end describe 'test attribute "name"' do it 'should work' do # assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers end end end
24.708333
157
0.719224
62aac65639f4789ee870d92a11fd9e5f6219457d
238
class CreateClients < ActiveRecord::Migration[6.0] def change create_table :client do |t| t.text :name t.text :phone t.text :datestamp t.text :barber t.text :color t.timestamps end end end
17
50
0.609244
0833dcfbb684245fc0b48cfd4f04ca59ab8195e9
115
class DropExerciseWorkouts < ActiveRecord::Migration[6.0] def change drop_table :exercise_workouts end end
19.166667
57
0.782609
03f45529e685a369a8e380a60bedd109a074ad0b
652
require File.dirname(__FILE__) + '/../spec_helper' describe YARD::CLI::MarkupTypes do it "lists all available markup types" do YARD::CLI::MarkupTypes.run data = log.io.string exts = YARD::Templates::Helpers::MarkupHelper::MARKUP_EXTENSIONS YARD::Templates::Helpers::MarkupHelper::MARKUP_PROVIDERS.each do |name, providers| data.should match(/\b#{name}\b/) # Match all extensions exts[name].each do |ext| data.should include(".#{ext}") end if exts[name] # Match all provider libs providers.each do |provider| data.should match(/\b#{provider[:lib]}\b/) end end end end
28.347826
86
0.65184
ff4fc6aa9db51cbe63e308edce4b74c107309682
215
module Api class IssuesBlockJob < IssuesStatusChangedJob include IsPublishable action "issue_status_changed" timestamp Proc.new { Time.now.utc.iso8601} def status "blocked" end end end
19.545455
47
0.716279
1ab10eaff3dfc3c54f92a2fd29b11e93946dffe8
2,652
require 'spec_helper' describe Storytime::Post do describe "#to_partial_path" do before{ Storytime::BlogPost.instance_variable_set "@_to_partial_path", nil } after{ Storytime::BlogPost.instance_variable_set "@_to_partial_path", nil } it "includes site in the path" do allow(File).to receive(:exists?).and_return(true) site = FactoryGirl.create(:site, title: "Test Site") blog_post = FactoryGirl.create(:post, site: site) partial_path = blog_post.to_partial_path expect(partial_path).to eq("storytime/test-site/blog_posts/blog_post") Storytime::BlogPost.instance_variable_set "@_to_partial_path", nil end it "looks up the inheritance chain" do allow(File).to receive(:exists?).and_return(false) site = FactoryGirl.build(:site, title: "Test Site") video_post = VideoPost.new(site: site) partial_path = video_post.to_partial_path expect(partial_path).to eq("storytime/posts/post") end end it "sets the page slug on create" do post = FactoryGirl.create(:post) post.slug.should == post.title.parameterize end it "sets slug to user inputted value" do post = FactoryGirl.create(:post) post.slug = "random slug here" post.save post.slug.should == "random-slug-here" end it "does not allow the same slug" do post_1 = FactoryGirl.create(:post) post_2 = FactoryGirl.create(:post) post_2.slug = post_1.slug post_2.save post_2.slug.should_not == post_1.slug post_2.slug.should include(post_1.slug) end it "does not allow a blank slug" do post = FactoryGirl.create(:post) post.slug = "" post.save post.slug.should_not == "" post.slug.should == post.title.parameterize end it "creates tags from tag_list attribute" do post = FactoryGirl.create(:post) post.tag_list = ["tag1", "tag2"] post.tags.count.should == 2 end it "scopes posts by tag" do post_1 = FactoryGirl.create(:post, tag_list: ["tag1", "tag2"]) post_2 = FactoryGirl.create(:post, tag_list: ["tag1"]) Storytime::Post.tagged_with("tag1").should include(post_1) Storytime::Post.tagged_with("tag1").should include(post_2) Storytime::Post.tagged_with("tag2").should include(post_1) Storytime::Post.tagged_with("tag2").should_not include(post_2) end it "counts tags across posts" do post_1 = FactoryGirl.create(:post, tag_list: ["tag1", "tag2"]) post_2 = FactoryGirl.create(:post, tag_list: ["tag1"]) Storytime::Post.tag_counts.find_by(name: "tag1").count.should == 2 Storytime::Post.tag_counts.find_by(name: "tag2").count.should == 1 end end
30.482759
80
0.687783
91e1c6334a15dc2fea7490c614d0c8145c7f38d1
120
require 'coveralls/rake/task' Coveralls::RakeTask.new task :test_with_coveralls => [:spec, :cucumber, 'coveralls:push']
30
65
0.766667
2614f2c175b5729f119013a1adec8bc8dccebba1
9,032
require 'puppet-lint/checkplugin' class PuppetLint::Checks # Public: Get an Array of problem Hashes. attr_reader :problems # Public: Get an Array of PuppetLint::Lexer::Token objects. attr_reader :tokens # Public: Initialise a new PuppetLint::Checks object and prepare the check # methods. def initialize @problems = [] @default_info = {:check => 'unknown', :linenumber => 0, :column => 0} PuppetLint.configuration.checks.each do |check| method = PuppetLint.configuration.check_method[check] self.class.send(:define_method, "lint_check_#{check}", &method) end PuppetLint.configuration.helpers.each do |helper| method = PuppetLint.configuration.helper_method[helper] self.class.send(:define_method, helper, &method) end end # Public: Add a message to the problems array. # # kind - The kind of problem as a Symbol (:warning, :error). # problem - A Hash containing the attributes of the problem. # :message - The String message describing the problem. # :linenumber - The Integer line number of the location of the problem. # :check - The String name of the check that the problem came from. # :column - The Integer column number of the location of the problem. # # Returns nothing. def notify(kind, problem) problem[:kind] = kind problem.merge!(@default_info) {|key, v1, v2| v1 } @problems << problem end # Internal: Tokenise the manifest code and prepare it for checking. # # fileinfo - A Hash containing the following: # :fullpath - The expanded path to the file as a String. # :filename - The name of the file as a String. # :path - The original path to the file as passed to puppet-lint as # a String. # data - The String manifest code to be checked. # # Returns nothing. def load_data(fileinfo, data) lexer = PuppetLint::Lexer.new begin @tokens = lexer.tokenise(data) rescue PuppetLint::LexerError => e notify :error, { :message => 'Syntax error (try running `puppet parser validate <file>`)', :linenumber => e.line_no, :column => e.column, } @tokens = [] end @fileinfo = fileinfo @data = data end # Internal: Run the lint checks over the manifest code. # # fileinfo - A Hash containing the following: # :fullpath - The expanded path to the file as a String. # :filename - The name of the file as a String. # :path - The original path to the file as passed to puppet-lint as # a String. # data - The String manifest code to be checked. # # Returns an Array of problem Hashes. def run(fileinfo, data) load_data(fileinfo, data) enabled_checks.each do |check| @default_info[:check] = check self.send("lint_check_#{check}") end @problems end # Internal: Get a list of checks that have not been disabled. # # Returns an Array of String check names. def enabled_checks @enabled_checks ||= Proc.new do self.public_methods.select { |method| method.to_s.start_with? 'lint_check_' }.map { |method| method.to_s[11..-1] }.select { |name| PuppetLint.configuration.send("#{name}_enabled?") } end.call end # Public: Get the full expanded path to the file being checked. # # Returns a String path. def fullpath @fileinfo[:fullpath] end # Public: Retrieve a list of tokens that represent resource titles. # # Returns an Array of PuppetLint::Lexer::Token objects. def title_tokens @title_tokens ||= Proc.new do result = [] tokens.each_index do |token_idx| if tokens[token_idx].type == :COLON # gather a list of tokens that are resource titles if tokens[token_idx-1].type == :RBRACK array_start_idx = tokens.rindex { |r| r.type == :LBRACK } title_array_tokens = tokens[(array_start_idx + 1)..(token_idx - 2)] result += title_array_tokens.select { |token| {:STRING => true, :NAME => true}.include? token.type } else next_token = tokens[token_idx].next_code_token if next_token.type != :LBRACE result << tokens[token_idx - 1] end end end end result end.call end # Public: Calculate the positions of all resource declarations within the # tokenised manifest. These positions only point to the content of the # resource declaration, they do not include resource types or # titles/namevars. # # Returns an Array of Hashes, each containing: # :start - An Integer position in the `tokens` Array pointing to the first # Token of a resource declaration parameters (type :NAME). # :end - An Integer position in the `tokens` Array pointing to the last # Token of a resource declaration parameters (type :RBRACE). def resource_indexes @resource_indexes ||= Proc.new do result = [] tokens.each_index do |token_idx| if tokens[token_idx].type == :COLON next_token = tokens[token_idx].next_code_token depth = 1 if next_token.type != :LBRACE tokens[(token_idx + 1)..-1].each_index do |idx| real_idx = token_idx + idx + 1 if tokens[real_idx].type == :LBRACE depth += 1 elsif {:SEMIC => true, :RBRACE => true}.include? tokens[real_idx].type unless tokens[real_idx].type == :SEMIC && depth > 1 depth -= 1 if depth == 0 result << {:start => token_idx + 1, :end => real_idx} break end end end end end end end result end.call end # Public: Calculate the positions of all class definitions within the # tokenised manifest. # # Returns an Array of Hashes, each containing: # :start - An Integer position in the `tokens` Array pointing to the first # token of a class (type :CLASS). # :end - An Integer position in the `tokens` Array pointing to the last # token of a class (type :RBRACE). def class_indexes @class_indexes ||= Proc.new do result = [] tokens.each_index do |token_idx| if tokens[token_idx].type == :CLASS depth = 0 in_params = false tokens[token_idx+1..-1].each_index do |class_token_idx| idx = class_token_idx + token_idx + 1 if tokens[idx].type == :LPAREN in_params = true elsif tokens[idx].type == :RPAREN in_params = false elsif tokens[idx].type == :LBRACE depth += 1 unless in_params elsif tokens[idx].type == :RBRACE depth -= 1 unless in_params if depth == 0 && ! in_params if tokens[token_idx].next_code_token.type != :LBRACE result << {:start => token_idx, :end => idx} end break end end end end end result end.call end # Public: Calculate the positions of all defined type definitions within # the tokenised manifest. # # Returns an Array of Hashes, each containing: # :start - An Integer position in the `tokens` Array pointing to the first # token of a defined type (type :DEFINE). # :end - An Integer position in the `tokens` Array pointing to the last # token of a defined type (type :RBRACE). def defined_type_indexes @defined_type_indexes ||= Proc.new do result = [] tokens.each_index do |token_idx| if tokens[token_idx].type == :DEFINE depth = 0 in_params = false tokens[token_idx+1..-1].each_index do |define_token_idx| idx = define_token_idx + token_idx + 1 if tokens[idx].type == :LPAREN in_params = true elsif tokens[idx].type == :RPAREN in_params = false elsif tokens[idx].type == :LBRACE depth += 1 unless in_params elsif tokens[idx].type == :RBRACE depth -= 1 unless in_params if depth == 0 && ! in_params result << {:start => token_idx, :end => idx} break end end end end end result end.call end # Public: Retrieves a list of token types that are considered to be # formatting tokens (ie whitespace, newlines, etc). # # Returns an Array of Symbols. def formatting_tokens @formatting_tokens ||= PuppetLint::Lexer::FORMATTING_TOKENS end # Public: Access the lines of the manifest that is being checked. # # Returns an Array of Strings. def manifest_lines @manifest_lines ||= @data.split("\n") end end
33.328413
84
0.598096
f80341c1947a1432b324010d7626382eba32dba0
687
Pod::Spec.new do |s| s.name = "RNBraintreeDropIn" s.version = "1.0.0" s.summary = "RNBraintreeDropIn" s.description = <<-DESC RNBraintreeDropIn DESC s.homepage = "https://github.com/bamlab/react-native-braintree-payments-drop-in" s.license = "MIT" # s.license = { :type => "MIT", :file => "../LICENSE" } s.author = { "author" => "[email protected]" } s.platform = :ios, "9.0" s.source = { :git => "https://github.com/bamlab/react-native-braintree-payments-drop-in.git", :tag => "master" } s.source_files = "RNBraintreeDropIn/**/*.{h,m}" s.requires_arc = true end
40.411765
120
0.558952
7a0d379f067cfc7294ee1de5da2529a7d60d7519
7,598
require File.dirname(__FILE__) + '/spec_helper.rb' include RR require File.dirname(__FILE__) + "/../config/test_config.rb" describe "PostgreSQL schema support" do before(:each) do config = deep_copy(standard_config) config.options[:rep_prefix] = 'rx' config.left[:schema_search_path] = 'rr' config.right[:schema_search_path] = 'rr' Initializer.configuration = config end after(:each) do end if Initializer.configuration.left[:adapter] == 'postgresql' it "tables should show the tables from the schema and no others" do session = Session.new session.left.tables.include?('rr_simple').should be_true session.left.tables.include?('scanner_records').should be_false end it "tables should not show the tables from other schemas" do session = Session.new standard_config session.left.tables.include?('scanner_records').should be_true session.left.tables.include?('rr_simple').should be_false end it "primary_key_names should work" do session = Session.new session.left.primary_key_names('rr_simple').should == ['id'] end it "primary_key_names should pick the table in the target schema" do session = Session.new session.left.primary_key_names('rr_duplicate').should == ['id'] end it "column_names should work" do session = Session.new session.left.column_names('rr_simple').should == ['id', 'name'] end it "column_names should pick the table in the target schema" do session = Session.new session.left.column_names('rr_duplicate').should == ['id', 'name'] end it "referenced_tables should work" do session = Session.new session.left.referenced_tables(['rr_referencing']).should == { 'rr_referencing' => ['rr_referenced'] } end it "table_select_query should work" do session = Session.new session.left.table_select_query('rr_simple'). should == 'select "id", "name" from "rr_simple" order by "id"' end it "TypeCastingCursor should work" do session = Session.new org_cursor = session.left.select_cursor( :query => "select id, name from rr_simple where id = 1", :type_cast => false ) cursor = TypeCastingCursor.new session.left, 'rr_simple', org_cursor row = cursor.next_row row.should == { 'id' => 1, 'name' => 'bla' } end it "sequence_values should pick the table in the target schema" do session = Session.new session.left.sequence_values('rx', 'rr_duplicate').keys.should == ["rr_duplicate_id_seq"] end it "clear_sequence_setup should pick the table in the target schema" do session = nil begin session = Session.new initializer = ReplicationInitializer.new(session) session.left.begin_db_transaction session.right.begin_db_transaction table_pair = {:left => 'rr_duplicate', :right => 'rr_duplicate'} initializer.ensure_sequence_setup table_pair, 5, 2, 1 id1, id2 = get_example_sequence_values(session, 'rr_duplicate') (id2 - id1).should == 5 (id1 % 5).should == 2 initializer.clear_sequence_setup :left, 'rr_duplicate' id1, id2 = get_example_sequence_values(session, 'rr_duplicate') (id2 - id1).should == 1 ensure [:left, :right].each do |database| initializer.clear_sequence_setup database, 'rr_duplicate' rescue nil if session session.send(database).execute "delete from rr_duplicate" rescue nil if session session.send(database).rollback_db_transaction rescue nil if session end end end it "sequence setup should work" do session = nil begin session = Session.new initializer = ReplicationInitializer.new(session) session.left.begin_db_transaction session.right.begin_db_transaction table_pair = {:left => 'rr_sequence_test', :right => 'rr_sequence_test'} initializer.ensure_sequence_setup table_pair, 5, 2, 1 id1, id2 = get_example_sequence_values(session, 'rr_sequence_test') (id2 - id1).should == 5 (id1 % 5).should == 2 ensure [:left, :right].each do |database| initializer.clear_sequence_setup database, 'rr_sequence_test' rescue nil if session session.send(database).execute "delete from rr_sequence_test" rescue nil if session session.send(database).rollback_db_transaction rescue nil if session end end end it "clear_sequence_setup should work" do session = nil begin session = Session.new initializer = ReplicationInitializer.new(session) session.left.begin_db_transaction session.right.begin_db_transaction table_pair = {:left => 'rr_sequence_test', :right => 'rr_sequence_test'} initializer.ensure_sequence_setup table_pair, 5, 2, 2 initializer.clear_sequence_setup :left, 'rr_sequence_test' id1, id2 = get_example_sequence_values(session, 'rr_sequence_test') (id2 - id1).should == 1 ensure [:left, :right].each do |database| initializer.clear_sequence_setup database, 'rr_sequence_test' if session session.send(database).execute "delete from rr_sequence_test" if session session.send(database).rollback_db_transaction if session end end end it "initializer should create tables in target schema" do session = nil begin config = deep_copy(Initializer.configuration) config.options[:rep_prefix] = 'ry' session = Session.new config session.left.begin_db_transaction initializer = ReplicationInitializer.new(session) initializer.create_change_log(:left) # no exception ==> means table was created in target schema session.left.select_one("select id from rr.ry_pending_changes") ensure session.left.rollback_db_transaction if session end end it "create_trigger, trigger_exists? and drop_trigger should work" do session = nil begin session = Session.new initializer = ReplicationInitializer.new(session) session.left.begin_db_transaction initializer.create_trigger :left, 'rr_trigger_test' initializer.trigger_exists?(:left, 'rr_trigger_test'). should be_true # Verify that the trigger can find the pending_changes table even if # current search_path does not include it. session.left.execute "set search_path = 'public'" session.left.execute <<-EOF insert into rr.rr_trigger_test(first_id, second_id) values(10, 11) EOF session.left.execute "set search_path = 'rr'" session.left.select_one("select change_key from rx_pending_changes")['change_key']. should == "first_id|10|second_id|11" initializer.drop_trigger(:left, 'rr_trigger_test') initializer.trigger_exists?(:left, 'rr_trigger_test'). should be_false ensure session.left.rollback_db_transaction if session end end it "should work with complex search paths" do config = deep_copy(standard_config) config.left[:schema_search_path] = 'public,rr' config.right[:schema_search_path] = 'public,rr' session = Session.new(config) tables = session.left.tables tables.include?('rr_simple').should be_true tables.include?('scanner_records').should be_true end end end
35.671362
95
0.668729
bfad7cca308e335b6fdc8effe099bb291192b7da
66
FactoryGirl.define do factory :genre do movie nil end end
11
21
0.712121
4aa9b725e91d990b2b8485c7f2f0b837351ca899
152
class PremiumPayment < ApplicationRecord belongs_to :user COST_PER_MONTH = 10000000 def end_at start_at + seconds_credited.seconds end end
16.888889
40
0.776316
bb1ff14fc9c58f70ecf03641c4e7fdfd0dfad9e6
1,871
class Konsole < Formula desc "KDE's terminal emulator" homepage "https://www.kde.org" url "https://download.kde.org/stable/release-service/20.04.0/src/konsole-20.04.0.tar.xz" sha256 "179b2bb442e13c22032e56457986c68074b31a5c2da67e0e6e854658a37e24de" head "git://anongit.kde.org/konsole.git" depends_on "cmake" => :build depends_on "KDE-mac/kde/kf5-extra-cmake-modules" => :build depends_on "kde-mac/kde/kf5-kdoctools" => :build depends_on "ninja" => :build depends_on "KDE-mac/kde/kf5-breeze-icons" depends_on "KDE-mac/kde/kf5-kinit" depends_on "KDE-mac/kde/kf5-knewstuff" depends_on "KDE-mac/kde/kf5-knotifyconfig" depends_on "KDE-mac/kde/kf5-kparts" depends_on "KDE-mac/kde/kf5-kpty" def install args = std_cmake_args args << "-DBUILD_TESTING=OFF" args << "-DKDE_INSTALL_QMLDIR=lib/qt5/qml" args << "-DKDE_INSTALL_PLUGINDIR=lib/qt5/plugins" args << "-DCMAKE_INSTALL_BUNDLEDIR=#{bin}" mkdir "build" do system "cmake", "-G", "Ninja", "..", *args system "ninja" system "ninja", "install" prefix.install "install_manifest.txt" end # Extract Qt plugin path qtpp = `#{Formula["qt"].bin}/qtpaths --plugin-dir`.chomp system "/usr/libexec/PlistBuddy", "-c", "Add :LSEnvironment:QT_PLUGIN_PATH string \"#{qtpp}\:#{HOMEBREW_PREFIX}/lib/qt5/plugins\"", "#{bin}/konsole.app/Contents/Info.plist" end def post_install mkdir_p HOMEBREW_PREFIX/"share/konsole" ln_sf HOMEBREW_PREFIX/"share/icons/breeze/breeze-icons.rcc", HOMEBREW_PREFIX/"share/konsole/icontheme.rcc" end def caveats <<~EOS You need to take some manual steps in order to make this formula work: "$(brew --repo kde-mac/kde)/tools/do-caveats.sh" EOS end test do assert `"#{bin}/konsole.app/Contents/MacOS/konsole" --help | grep -- --help` =~ /--help/ end end
32.824561
110
0.684126
bb65d337802180c79ff7973420a06b48d985860c
152
Rails.application.routes.draw do mount Searchengine::Engine => "/search" resources :shows do collection do searchable end end end
13.818182
41
0.684211
d52ce3125adb48243a9659df0d695912a036fb82
191
require 'voicearchive/client' module Voicearchive class UserClient < Client def get_coordinators response = call('coordinators') JSON.parse(response.body) end end end
19.1
37
0.717277
1c11069d5f58c7b0bdb972f4d003f3ae13918c6e
922
# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. module Google module Apis module DriveV3 # Version of the google-apis-drive_v3 gem GEM_VERSION = "0.1.0" # Version of the code generator used to generate this client GENERATOR_VERSION = "0.1.1" # Revision of the discovery document this client was generated from REVISION = "20201130" end end end
31.793103
74
0.726681
f81d2e61cd8f14f4fa98f27fa79ce994408e1da6
841
cask 'datacolor-spyder-elite' do version '5.2' sha256 '12290c3ab2e7585d97430da6d6ed1fe3d39f0032c8f3401fdaedb7d567c17197' # d3d9ci7ypuovlo.cloudfront.net/spyder was verified as official when first introduced to the cask url "http://d3d9ci7ypuovlo.cloudfront.net/spyder#{version.major}/Spyder#{version.major}Elite_#{version}_OSX_Installer.zip" name 'Spyder Elite' homepage "http://www.datacolor.com/photography-design/product-overview/spyder#{version.major}-family/#spyder#{version.major}elite" auto_updates true depends_on macos: '>= :lion' installer manual: "Spyder#{version.major}Elite_#{version}_OSX_Installer.app" uninstall delete: "/Applications/Datacolor/Spyder#{version.major}Elite", signal: ['TERM', "com.datacolor.spyder#{version.major}utility"], rmdir: '/Applications/Datacolor' end
44.263158
132
0.762188
4aca1eba94d08bab828e5bf8200dbbc7b7eff0cb
775
module Bosh::Director module DeploymentPlan class LinkPath < Struct.new(:deployment, :job, :template, :name, :path) def self.parse(current_deployment_name, path, logger) parts = path.split('.') if parts.size == 3 logger.debug("Link '#{path}' does not specify deployment, using current deployment") parts.unshift(current_deployment_name) end if parts.size != 4 logger.error("Invalid link format: #{path}") raise DeploymentInvalidLink, "Link '#{path}' is invalid. A link must have either 3 or 4 parts: " + "[deployment_name.]job_name.template_name.link_name" end new(*parts, path) end def to_s path end end end end
27.678571
94
0.59871
bf8671b2dd11e1f40d911c5971ca34d9797db0ef
342
# An example of how to define a screen group # Hippo::Screen.define_group '<%= identifier %>-screens' do | group | # group.title = "<%= namespace %> Screens" # group.description = "Screens relating to <%= namespace %>" # group.icon = "heart" # end Hippo::Screen.for_extension '<%= identifier %>' do | screens | end
31.090909
69
0.619883
03c57c3edd4e7c866e5cd9edbf6a7e685876f330
52
def truth 42 end def incrementing(n) n + 1 end
6.5
19
0.653846
ac89912b09c575b23e83ca7932dd9438a24f2861
4,174
require 'docx/containers' require 'docx/elements' require 'nokogiri' require 'zip' module Docx # The Document class wraps around a docx file and provides methods to # interface with it. # # # get a Docx::Document for a docx file in the local directory # doc = Docx::Document.open("test.docx") # # # get the text from the document # puts doc.text # # # do the same thing in a block # Docx::Document.open("test.docx") do |d| # puts d.text # end class Document attr_reader :xml, :doc, :zip, :styles def initialize(path, &block) @replace = {} @zip = Zip::File.open(path) @document_xml = @zip.read('word/document.xml') @doc = Nokogiri::XML(@document_xml) @styles_xml = @zip.read('word/styles.xml') @styles = Nokogiri::XML(@styles_xml) if block_given? yield self @zip.close end end # This stores the current global document properties, for now def document_properties { font_size: font_size } end # With no associated block, Docx::Document.open is a synonym for Docx::Document.new. If the optional code block is given, it will be passed the opened +docx+ file as an argument and the Docx::Document oject will automatically be closed when the block terminates. The values of the block will be returned from Docx::Document.open. # call-seq: # open(filepath) => file # open(filepath) {|file| block } => obj def self.open(path, &block) self.new(path, &block) end def paragraphs @doc.xpath('//w:document//w:body/w:p').map { |p_node| parse_paragraph_from p_node } end def bookmarks bkmrks_hsh = Hash.new bkmrks_ary = @doc.xpath('//w:bookmarkStart').map { |b_node| parse_bookmark_from b_node } # auto-generated by office 2010 bkmrks_ary.reject! {|b| b.name == "_GoBack" } bkmrks_ary.each {|b| bkmrks_hsh[b.name] = b } bkmrks_hsh end def tables @doc.xpath('//w:document//w:body//w:tbl').map { |t_node| parse_table_from t_node } end # Some documents have this set, others don't. # Values are returned as half-points, so to get points, that's why it's divided by 2. def font_size size_tag = @styles.xpath('//w:docDefaults//w:rPrDefault//w:rPr//w:sz').first size_tag ? size_tag.attributes['val'].value.to_i / 2 : nil end ## # *Deprecated* # # Iterates over paragraphs within document # call-seq: # each_paragraph => Enumerator def each_paragraph paragraphs.each { |p| yield(p) } end # call-seq: # to_s -> string def to_s paragraphs.map(&:to_s).join("\n") end # Output entire document as a String HTML fragment def to_html paragraphs.map(&:to_html).join('\n') end # Save document to provided path # call-seq: # save(filepath) => void def save(path) update Zip::OutputStream.open(path) do |out| zip.each do |entry| next unless entry.file? out.put_next_entry(entry.name) if @replace[entry.name] out.write(@replace[entry.name]) else out.write(zip.read(entry.name)) end end end zip.close end alias_method :text, :to_s def replace_entry(entry_path, file_contents) @replace[entry_path] = file_contents end private #-- # TODO: Flesh this out to be compatible with other files # TODO: Method to set flag on files that have been edited, probably by inserting something at the # end of methods that make edits? #++ def update replace_entry "word/document.xml", doc.serialize(:save_with => 0) end # generate Elements::Containers::Paragraph from paragraph XML node def parse_paragraph_from(p_node) Elements::Containers::Paragraph.new(p_node, document_properties) end # generate Elements::Bookmark from bookmark XML node def parse_bookmark_from(b_node) Elements::Bookmark.new(b_node) end def parse_table_from(t_node) Elements::Containers::Table.new(t_node) end end end
27.826667
333
0.633206
7a3946d76e64e7932de1ac19268daf3e7645ff34
20,901
# Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with this # work for additional information regarding copyright ownership. The ASF # licenses this file to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. require File.join(File.dirname(__FILE__), '../spec_helpers') describe 'local task', :shared=>true do it "should execute task for project in current directory" do define 'foobar' lambda { @task.invoke }.should run_task("foobar:#{@task.name}") end it "should not execute task for projects in other directory" do define 'foobar', :base_dir=>'elsewhere' lambda { task('build').invoke }.should_not run_task('foobar:build') end end describe 'build task' do it_should_behave_like 'local task' before(:each) { @task = task('build') } end describe 'clean task' do it_should_behave_like 'local task' before(:each) { @task = task('clean') } end describe 'package task' do it_should_behave_like 'local task' before(:each) { @task = task('package') } it 'should execute build task as prerequisite' do lambda { @task.invoke }.should run_task('build') end end describe 'install task' do it_should_behave_like 'local task' before(:each) { @task = task('install') } it 'should execute package task as prerequisite' do lambda { @task.invoke }.should run_task('package') end end describe 'uninstall task' do it_should_behave_like 'local task' before(:each) { @task = task('uninstall') } end describe 'upload task' do it_should_behave_like 'local task' before(:each) { @task = task('upload') } it 'should execute package task as prerequisite' do lambda { @task.invoke }.should run_task('package') end end describe Project, '#build' do it 'should return the project\'s build task' do define('foo').build.should eql(task('foo:build')) end it 'should enhance the project\'s build task' do task 'prereq' task 'action' define 'foo' do build 'prereq' do task('action').invoke end end lambda { project('foo').build.invoke }.should run_tasks('prereq', 'action') end it 'should execute build task for sub-project' do define 'foo' do define 'bar' end lambda { task('foo:build').invoke }.should run_task('foo:bar:build') end it 'should not execute build task of other projects' do define 'foo' define 'bar' lambda { task('foo:build').invoke }.should_not run_task('bar:build') end end describe Project, '#clean' do it 'should return the project\'s clean task' do define('foo').clean.should eql(task('foo:clean')) end it 'should enhance the project\'s clean task' do task 'prereq' task 'action' define 'foo' do clean 'prereq' do task('action').invoke end end lambda { project('foo').clean.invoke }.should run_tasks('prereq', 'action') end it 'should remove target directory' do define 'foo' do self.layout[:target] = 'targeted' end mkpath 'targeted' lambda { project('foo').clean.invoke }.should change { File.exist?('targeted') }.from(true).to(false) end it 'should remove reports directory' do define 'foo' do self.layout[:reports] = 'reported' end mkpath 'reported' lambda { project('foo').clean.invoke }.should change { File.exist?('reported') }.from(true).to(false) end it 'should execute clean task for sub-project' do define 'foo' do define 'bar' end lambda { task('foo:clean').invoke }.should run_task('foo:bar:clean') end it 'should not execute clean task of other projects' do define 'foo' define 'bar' lambda { task('foo:clean').invoke }.should_not run_task('bar:clean') end end describe Project, '#target' do before :each do @project = define('foo', :layout=>Layout.new) end it 'should default to target' do @project.target.should eql('target') end it 'should set layout :target' do @project.target = 'bar' @project.layout.expand(:target).should point_to_path('bar') end it 'should come from layout :target' do @project.layout[:target] = 'baz' @project.target.should eql('baz') end it 'should be removed in version 1.5 since it was deprecated in version 1.3' do Buildr::VERSION.should < '1.5' end end describe Project, '#reports' do before :each do @project = define('foo', :layout=>Layout.new) end it 'should default to reports' do @project.reports.should eql('reports') end it 'should set layout :reports' do @project.reports = 'bar' @project.layout.expand(:reports).should point_to_path('bar') end it 'should come from layout :reports' do @project.layout[:reports] = 'baz' @project.reports.should eql('baz') end it 'should be removed in version 1.5 since it was deprecated in version 1.3' do Buildr::VERSION.should < '1.5' end end describe Git do describe '#uncommitted_files' do it 'should return an empty array on a clean repository' do Git.should_receive(:`).with('git status').and_return <<-EOF # On branch master nothing to commit (working directory clean) EOF Git.uncommitted_files.should be_empty end it 'should reject a dirty repository, Git 1.4.2 or former' do Git.should_receive(:`).with('git status').and_return <<-EOF # On branch master # # Changed but not updated: # (use "git add <file>..." to update what will be committed) # (use "git checkout -- <file>..." to discard changes in working directory) # # modified: lib/buildr.rb # modified: spec/buildr_spec.rb # # Untracked files: # (use "git add <file>..." to include in what will be committed) # # error.log EOF Git.uncommitted_files.should include('lib/buildr.rb', 'error.log') end it 'should reject a dirty repository, Git 1.4.3 or higher' do Git.should_receive(:`).with('git status').and_return <<-EOF # On branch master # Changed but not updated: # (use "git add <file>..." to update what will be committed) # #\tmodified: lib/buildr.rb #\tmodified: spec/buildr_spec.rb # # Untracked files: # (use "git add <file>..." to include in what will be committed) # #\terror.log no changes added to commit (use "git add" and/or "git commit -a") EOF Git.uncommitted_files.should include('lib/buildr.rb', 'error.log') end end describe '#remote' do it 'should return the name of the corresponding remote' do Git.should_receive(:git).with('config', '--get', 'branch.master.remote').and_return "origin\n" Git.should_receive(:git).with('remote').and_return "upstream\norigin\n" Git.send(:remote, 'master').should == 'origin' end it 'should return nil if no remote for the given branch' do Git.should_receive(:git).with('config', '--get', 'branch.master.remote').and_return "\n" Git.should_not_receive(:git).with('remote') Git.send(:remote, 'master').should be_nil end end describe '#current_branch' do it 'should return the current branch' do Git.should_receive(:git).with('branch').and_return(" master\n* a-clever-idea\n ze-great-idea") Git.send(:current_branch).should == 'a-clever-idea' end end end # of Git describe Svn do describe '#tag' do it 'should remove any existing tag with the same name' do Svn.stub!(:repo_url).and_return('http://my.repo.org/foo/trunk') Svn.stub!(:copy) Svn.should_receive(:remove).with('http://my.repo.org/foo/tags/1.0.0', 'Removing old copy') Svn.tag '1.0.0' end it 'should do an svn copy with the release version' do Svn.stub!(:repo_url).and_return('http://my.repo.org/foo/trunk') Svn.stub!(:remove) Svn.should_receive(:copy).with(Dir.pwd, 'http://my.repo.org/foo/tags/1.0.0', 'Release 1.0.0') Svn.tag '1.0.0' end end # Reference: http://svnbook.red-bean.com/en/1.4/svn.reposadmin.planning.html#svn.reposadmin.projects.chooselayout describe '#tag_url' do it 'should accept to tag foo/trunk' do Svn.tag_url('http://my.repo.org/foo/trunk', '1.0.0').should == 'http://my.repo.org/foo/tags/1.0.0' end it 'should accept to tag foo/branches/1.0' do Svn.tag_url('http://my.repo.org/foo/branches/1.0', '1.0.1').should == 'http://my.repo.org/foo/tags/1.0.1' end it 'should accept to tag trunk/foo' do Svn.tag_url('http://my.repo.org/trunk/foo', '1.0.0').should == 'http://my.repo.org/tags/foo/1.0.0' end it 'should accept to tag branches/foo/1.0' do Svn.tag_url('http://my.repo.org/branches/foo/1.0', '1.0.0').should == 'http://my.repo.org/tags/foo/1.0.0' end describe '#repo_url' do it 'should extract the SVN URL from svn info' do Svn.should_receive(:svn).and_return <<-XML <?xml version="1.0"?> <info> <entry kind="dir" path="." revision="724987"> <url>http://my.repo.org/foo/trunk</url> <repository> <root>http://my.repo.org</root> <uuid>13f79535-47bb-0310-9956-ffa450edef68</uuid> </repository> <wc-info> <schedule>normal</schedule> <depth>infinity</depth> </wc-info> <commit revision="724955"> <author>boisvert</author> <date>2008-12-10T01:53:51.240936Z</date> </commit> </entry> </info> XML Svn.repo_url.should == 'http://my.repo.org/foo/trunk' end end end end # of Buildr::Svn describe Release do describe 'find' do it 'should return GitRelease if project uses Git' do write '.git/config' Release.find.should be_instance_of(GitRelease) end it 'should return SvnRelease if project uses SVN' do write '.svn/xml' Release.find.should be_instance_of(SvnRelease) end it 'should return nil if no known release process' do Dir.chdir(Dir.tmpdir) do Release.find.should be_nil end end after :each do Release.instance_exec { @release = nil } end end end describe 'a release process', :shared=>true do describe '#make' do before do write 'buildfile', "VERSION_NUMBER = '1.0.0-SNAPSHOT'" # Prevent a real call to a spawned buildr process. @release.stub!(:buildr) @release.stub!(:check) @release.should_receive(:ruby).with('-S', 'buildr', "_#{Buildr::VERSION}_", '--buildfile', File.expand_path('buildfile.next'), '--environment', 'development', 'clean', 'upload', 'DEBUG=no') end it 'should tag a release with the release version' do @release.stub!(:update_version_to_next) @release.should_receive(:tag_release).with('1.0.0') @release.make end it 'should not alter the buildfile before tagging' do @release.stub!(:update_version_to_next) @release.should_receive(:tag_release).with('1.0.0') @release.make file('buildfile').should contain('VERSION_NUMBER = "1.0.0"') end it 'should update the buildfile with the next version number' do @release.stub!(:tag_release) @release.make file('buildfile').should contain('VERSION_NUMBER = "1.0.1-SNAPSHOT"') end it 'should keep leading zeros in the next version number' do write 'buildfile', "VERSION_NUMBER = '1.0.001-SNAPSHOT'" @release.stub!(:tag_release) @release.make file('buildfile').should contain('VERSION_NUMBER = "1.0.002-SNAPSHOT"') end it 'should commit the updated buildfile' do @release.stub!(:tag_release) @release.make file('buildfile').should contain('VERSION_NUMBER = "1.0.1-SNAPSHOT"') end end describe '#resolve_tag' do before do @release.stub!(:extract_version).and_return('1.0.0') end it 'should return tag specified by tag_name' do Release.tag_name = 'first' @release.send(:resolve_tag).should == 'first' end it 'should use tag returned by tag_name if tag_name is a proc' do Release.tag_name = lambda { |version| "buildr-#{version}" } @release.send(:resolve_tag).should == 'buildr-1.0.0' end after { Release.tag_name = nil } end describe '#tag_release' do it 'should inform the user' do @release.stub!(:extract_version).and_return('1.0.0') lambda { @release.tag_release('1.0.0') }.should show_info('Tagging release 1.0.0') end end describe '#extract_version' do it 'should extract VERSION_NUMBER with single quotes' do write 'buildfile', "VERSION_NUMBER = '1.0.0-SNAPSHOT'" @release.extract_version.should == '1.0.0-SNAPSHOT' end it 'should extract VERSION_NUMBER with double quotes' do write 'buildfile', %{VERSION_NUMBER = "1.0.1-SNAPSHOT"} @release.extract_version.should == '1.0.1-SNAPSHOT' end it 'should extract VERSION_NUMBER without any spaces' do write 'buildfile', "VERSION_NUMBER='1.0.2-SNAPSHOT'" @release.extract_version.should == '1.0.2-SNAPSHOT' end it 'should extract THIS_VERSION as an alternative to VERSION_NUMBER' do write 'buildfile', "THIS_VERSION = '1.0.3-SNAPSHOT'" @release.extract_version.should == '1.0.3-SNAPSHOT' end it 'should complain if no current version number' do write 'buildfile', 'define foo' lambda { @release.extract_version }.should raise_error('Looking for THIS_VERSION = "..." in your Buildfile, none found') end end describe '#with_release_candidate_version' do before do Buildr.application.stub!(:buildfile).and_return(file('buildfile')) write 'buildfile', "THIS_VERSION = '1.1.0-SNAPSHOT'" end it 'should yield the name of the release candidate buildfile' do @release.send :with_release_candidate_version do |new_filename| File.read(new_filename).should == %{THIS_VERSION = "1.1.0"} end end it 'should yield a name different from the original buildfile' do @release.send :with_release_candidate_version do |new_filename| new_filename.should_not point_to_path('buildfile') end end end describe '#update_version_to_next' do before do write 'buildfile', 'THIS_VERSION = "1.0.0"' end it 'should update the buildfile with a new version number' do @release.send :update_version_to_next file('buildfile').should contain('THIS_VERSION = "1.0.1-SNAPSHOT"') end it 'should commit the new buildfile on the trunk' do @release.should_receive(:message).and_return('Changed version number to 1.0.1-SNAPSHOT') @release.update_version_to_next end it 'should use the commit message specified by commit_message' do Release.commit_message = 'Here is my custom message' @release.should_receive(:message).and_return('Here is my custom message') @release.update_version_to_next end it 'should use the commit message returned by commit_message if commit_message is a proc' do Release.commit_message = lambda { |new_version| new_version.should == '1.0.1-SNAPSHOT' "increment version number to #{new_version}" } @release.should_receive(:message).and_return('increment version number to 1.0.1-SNAPSHOT') @release.update_version_to_next end it 'should inform the user of the new version' do lambda { @release.update_version_to_next }.should show_info('Current version is now 1.0.1-SNAPSHOT') end after { Release.commit_message = nil } end end describe GitRelease do it_should_behave_like 'a release process' before do @release = GitRelease.new Git.stub!(:git) Git.stub!(:current_branch).and_return('master') end describe '#applies_to?' do it 'should reject a non-git repo' do Dir.chdir(Dir.tmpdir) do GitRelease.applies_to?.should be_false end end it 'should accept a git repo' do FileUtils.mkdir '.git' FileUtils.touch File.join('.git', 'config') GitRelease.applies_to?.should be_true end end describe '#release_check' do before do @release = GitRelease.new end it 'should accept a clean repository' do Git.should_receive(:`).with('git status').and_return <<-EOF # On branch master nothing to commit (working directory clean) EOF Git.should_receive(:remote).and_return('master') lambda { @release.check }.should_not raise_error end it 'should reject a dirty repository' do Git.should_receive(:`).with('git status').and_return <<-EOF # On branch master # Untracked files: # (use "git add <file>..." to include in what will be committed) # # foo.temp EOF lambda { @release.check }.should raise_error(RuntimeError, /uncommitted files/i) end it 'should reject a repository not tracking remote branch' do Git.should_receive(:uncommitted_files).and_return([]) Git.should_receive(:remote).and_return(nil) lambda{ @release.check }.should raise_error(RuntimeError, "You are releasing from a local branch that does not track a remote!") end end describe '#tag_release' do before do @release = GitRelease.new @release.stub!(:extract_version).and_return('1.0.1') @release.stub!(:resolve_tag).and_return('TEST_TAG') Git.stub!(:git).with('tag', '-a', 'TEST_TAG', '-m', '[buildr] Cutting release TEST_TAG') Git.stub!(:git).with('push', 'origin', 'tag', 'TEST_TAG') Git.stub!(:commit) Git.stub!(:push) Git.stub!(:remote).and_return('origin') end it 'should delete any existing tag with the same name' do Git.should_receive(:git).with('tag', '-d', 'TEST_TAG') Git.should_receive(:git).with('push', 'origin', ':refs/tags/TEST_TAG') @release.tag_release 'TEST_TAG' end it 'should commit the buildfile before tagging' do Git.should_receive(:commit).with(File.basename(Buildr.application.buildfile.to_s), "Changed version number to 1.0.1") @release.tag_release 'TEST_TAG' end it 'should push the tag if a remote is tracked' do Git.should_receive(:git).with('tag', '-d', 'TEST_TAG') Git.should_receive(:git).with('push', 'origin', ':refs/tags/TEST_TAG') Git.should_receive(:git).with('tag', '-a', 'TEST_TAG', '-m', '[buildr] Cutting release TEST_TAG') Git.should_receive(:git).with('push', 'origin', 'tag', 'TEST_TAG') @release.tag_release 'TEST_TAG' end it 'should NOT push the tag if no remote is tracked' do Git.stub!(:remote).and_return(nil) Git.should_not_receive(:git).with('push', 'origin', 'tag', 'TEST_TAG') @release.tag_release 'TEST_TAG' end end end describe SvnRelease do it_should_behave_like 'a release process' before do @release = SvnRelease.new Svn.stub!(:svn) Svn.stub!(:repo_url).and_return('http://my.repo.org/foo/trunk') Svn.stub!(:tag) end describe '#applies_to?' do it 'should reject a non-git repo' do SvnRelease.applies_to?.should be_false end it 'should accept a git repo' do FileUtils.touch '.svn' SvnRelease.applies_to?.should be_true end end describe '#check' do before do Svn.stub!(:uncommitted_files).and_return([]) @release = SvnRelease.new end it 'should accept to release from the trunk' do Svn.stub!(:repo_url).and_return('http://my.repo.org/foo/trunk') lambda { @release.check }.should_not raise_error end it 'should accept to release from a branch' do Svn.stub!(:repo_url).and_return('http://my.repo.org/foo/branches/1.0') lambda { @release.check }.should_not raise_error end it 'should reject releasing from a tag' do Svn.stub!(:repo_url).and_return('http://my.repo.org/foo/tags/1.0.0') lambda { @release.check }.should raise_error(RuntimeError, "SVN URL must contain 'trunk' or 'branches/...'") end it 'should reject a non standard repository layout' do Svn.stub!(:repo_url).and_return('http://my.repo.org/foo/bar') lambda { @release.check }.should raise_error(RuntimeError, "SVN URL must contain 'trunk' or 'branches/...'") end it 'should reject an uncommitted file' do Svn.stub!(:repo_url).and_return('http://my.repo.org/foo/trunk') Svn.stub!(:uncommitted_files).and_return(['foo.rb']) lambda { @release.check }.should raise_error(RuntimeError, "Uncommitted files violate the First Principle Of Release!\n" + "foo.rb") end end end
30.782032
132
0.666236
e2f590429d8378cab9a933a8be9aa59e12fb5c23
9,314
require 'spec_helper' describe Nebulous::Parser do context 'around parsing CSVs:' do subject { Nebulous::Parser } let(:path) { './spec/support/assets/crlf-comma-delimited.csv' } let(:parser) { subject.new(path) } context '#initialize' do it 'can be initialized' do expect(parser).to be_instance_of subject end context 'around options' do let(:parser) { subject.new(path, foo: :bar, col_sep: "HI!") } it 'accepts options' do expect(parser.options.foo).to eq :bar end it 'merges options' do expect(parser.options.col_sep).to eq "HI!" end end end context '#headers' do context 'around fetching headers' do it 'returns expected headers' do expect(parser.headers).to eq( ["First name", "Last name", "From", "Access", "Qty"] ) end context 'around empty lines' do let(:path) do './spec/support/assets/crlf-comma-delimited-emptyline.csv' end it 'returns expected headers' do expect(parser.headers).to eq( ["First name", "Last name", "From", "Access", "Qty"] ) end end end end context '#process' do context 'around limits' do let(:parser) { subject.new(path, limit: limit) } context 'with zero limit' do let(:limit) { 0 } it 'returns empty data set' do expect(parser.process).to be_empty end end context 'with in-bounds limit' do let(:limit) { 2 } it 'returns expected chunk size' do expect(parser.process.length).to eq 2 end end context 'with out of bounds limit' do let(:limit) { 1_000_000 } it 'returns expected chunk size' do expect(parser.process.length).to eq 20 end end end context 'around missing headers' do let(:path) { './spec/support/assets/no-headers.csv' } let(:parser) { subject.new(path, headers: false) } it 'returns unmapped data' do expect(parser.process.first.to_a).to be_instance_of Array end it 'returns expected chunk size' do expect(parser.process.length).to eq 20 end end context 'around user-provided headers' do let(:map) do { first_name: :test1, last_name: :test2, qty: :test3 } end let(:parser) { subject.new(path, mapping: map) } let(:data) { parser.process } let(:headers) { data.first.keys } it 'returns expected keys' do expect(headers).to eq %i(test1 test2 test3) end it 'correctly maps keys to values' do expect(data.first[:test3]).to eq 2 end end context 'around headers_line' do let(:path) { './spec/support/assets/headers-on-secondary-line.csv' } let(:parser) { subject.new(path, start: 1) } let(:data) { parser.process } let(:headers) { data.first.keys } it 'returns expected keys' do expect(headers).to eq %i(first_name last_name from access qty) end it 'correctly maps keys to values' do expect(data.first[:qty]).to eq 2 end end context 'around chunking' do let(:parser) { subject.new(path, chunk: 6) } it 'returns entire dataset when no block passed' do expect(parser.process.length).to eq 20 end context 'with block given' do it 'yields for each chunk' do count = 0 parser.process { count += 1 } expect(count).to eq 4 end it 'returns expected total rows' do data = [] parser.process do |chunk| data << chunk end expect(data.map(&:size).inject(:+)).to eq 20 end end end context 'around chunk: false' do let(:data) { parser.process } let(:headers) { data.first.keys } let(:values) { data.first.values } it 'returns expected length' do expect(data.length).to eq 20 end it 'contains expected headers' do expect(headers).to eq %i(first_name last_name from access qty) end it 'contains expected values' do expect(values).to eq( ["γ©γƒΌγ‚‚γ‚γ‚ŠγŒγ¨γ†", "γƒŸγ‚Ήγ‚ΏγƒΌΒ·γƒ­γƒœγƒƒγƒˆ", "VIP", "VIP", 2] ) end end context 'around limits' do end context 'around empty values' do end context 'when no headers are present' do end context 'around rewinding' do it 'can process many times' do parser.process expect(parser.process.length).to eq 20 end end end context '#delimiters' do context 'with CRLF and comma delimiters' do let(:path) { './spec/support/assets/crlf-comma-delimited.csv' } it 'returns the expected delimiters' do expect(parser.delimiters).to eq( { col_sep: ",", row_sep: "\n" } ) end end context 'with CRLF and tab delimiters' do let(:path) { './spec/support/assets/crlf-tab-delimited.tsv' } it 'returns the expected delimiters' do expect(parser.delimiters).to eq( { col_sep: "\t", row_sep: "\n" } ) end end context 'with CR, LF and comma delimiters' do let(:path) { './spec/support/assets/cr-lf-comma-delimited.csv' } it 'returns the expected delimiters' do expect(parser.delimiters).to eq( { col_sep: ",", row_sep: "\r" } ) end end context 'with CR and comma delimiters' do let(:path) { './spec/support/assets/cr-comma-delimited.csv' } it 'returns the expected delimiters' do expect(parser.delimiters).to eq( { col_sep: ",", row_sep: "\r" } ) end end end context '#read_input' do context 'with a path string input' do it 'returns expected instance of File' do expect(parser.file).to be_instance_of File end end context 'with a file input' do let(:file) { File.new(path) } let(:parser) { subject.new(file) } it 'returns expected instance of File' do expect(parser.file).to be_instance_of File end end end context '#readline' do it 'reads from file input' do expect(parser.send(:readline)).to eq( "First name,Last name,From,Access,Qty" ) expect(parser.send(:readline)).to eq( "γ©γƒΌγ‚‚γ‚γ‚ŠγŒγ¨γ†,γƒŸγ‚Ήγ‚ΏγƒΌΒ·γƒ­γƒœγƒƒγƒˆ,VIP,VIP,2" ) expect(parser.send(:readline)).to eq( "Meghan,Koch,VIP,VIP,5" ) end context 'around line terminators' do context 'with CR-LF terminators' do let(:path) { './spec/support/assets/cr-lf-comma-delimited.csv' } it 'reads from file input' do expect(parser.send(:readline)).to eq( "First Name, Last Name" ) end end end context 'around encoding', pending: true do let(:path) { './spec/support/assets/fucky-encoding.csv' } # it 'properly reads and encodes data' do # expect(parser.send(:readline)).to eq nil # end end end context '#encoding' do context 'with provided encoding' do let(:parser) { subject.new(path, encoding: Encoding::ISO_8859_1.to_s) } it 'returns expected encoding' do expect(parser.send(:encoding)).to eq Encoding::ISO_8859_1.to_s end end context 'with default encoding' do it 'returns UTF-8 encoding' do expect(parser.send(:encoding)).to eq Encoding::UTF_8.to_s end end end context '#merge_delimiters' do context 'with provided delimeters' do let(:parser) { subject.new(path, col_sep: "\cA", row_sep: "\cB\n") } it 'returns the expected delimiters' do expect(parser.options.col_sep).to eq "\cA" expect(parser.options.row_sep).to eq "\cB\n" end end context 'with auto-detected delimiters' do it 'returns the expected delimiters' do expect(parser.options.col_sep).to eq "," expect(parser.options.row_sep).to eq "\n" end end end context '#line_terminator' do context 'with CRLF terminators' do let(:path) { './spec/support/assets/crlf-comma-delimited.csv' } it 'sets the expected line terminator' do expect(parser.send(:line_terminator)).to eq "\n" end end context 'with CR, LF terminators' do let(:path) { './spec/support/assets/cr-lf-comma-delimited.csv' } it 'sets the expected line terminator' do expect(parser.send(:line_terminator)).to eq "\r" end end context 'with CR terminators' do let(:path) { './spec/support/assets/cr-comma-delimited.csv' } it 'sets the expected line terminator' do expect(parser.send(:line_terminator)).to eq "\r" end end end end end
28.570552
79
0.559266
62c81a6f038a2f74eb08884eaf2235a861ca3f46
6,696
require File.expand_path('../../../spec_helper', __FILE__) class EEtee::Context def builder MonitoringProtocols::Collectd::Builder end end describe 'Collectd Ruby parser' do before do @builder_class = MonitoringProtocols::Collectd::Builder @packet = ->(cmd, *args){ @builder_class.send(cmd, *args) } @parser_class = MonitoringProtocols::Collectd::Parser end describe 'Simple packets' do it 'can parse numbers' do type, val, buffer = @parser_class.parse_part( builder.number(1, 122) ) buffer.should == "" val.should == 122 type, val, _ = @parser_class.parse_part( builder.number(1, 2500) ) val.should == 2500 type, val, _ = @parser_class.parse_part( builder.number(1, 356798) ) val.should == 356798 end should 'parse strings' do type, str, _ = @parser_class.parse_part( builder.string(0, "hostname1") ) str.should == 'hostname1' type, str, _ = @parser_class.parse_part( builder.string(0, "string with spaces") ) str.should == 'string with spaces' type, str, _ = @parser_class.parse_part( builder.string(0, "a really long string with many words in it") ) str.should == 'a really long string with many words in it' end should 'parse values' do buffer = builder.values( [builder::COUNTER, builder::DERIVE, builder::DERIVE], [1034, -4567, 34] ) type, values, rest = @parser_class.parse_part( buffer ) rest.should == "" values.should == [1034, -4567, 34] end end describe 'One notification in buffer' do before do @now = Time.new.to_i @pkt = "" @pkt << builder.number(1, @now) @pkt << builder.string(0, 'hostname') @pkt << builder.string(2, 'plugin') @pkt << builder.string(3, 'plugin_inst') @pkt << builder.string(4, 'type') @pkt << builder.string(5, 'type_inst') @pkt << builder.number(257, 2) # severity @pkt << builder.string(256, 'a message') end should 'parse the notification' do data, rest = @parser_class.parse_packet(@pkt) rest.should == "" data.class.should == MonitoringProtocols::Collectd::NetworkMessage data.host.should == 'hostname' data.time.should == @now data.plugin.should == 'plugin' data.plugin_instance.should == 'plugin_inst' data.type.should == 'type' data.type_instance.should == 'type_inst' data.message.should == 'a message' data.severity.should == 2 end end describe 'One packet in buffer' do before do @now = Time.new.to_i @interval = 10 pkt = builder.new pkt.time = @now pkt.host = 'hostname' pkt.interval = @interval pkt.plugin = 'plugin' pkt.plugin_instance = 'plugin_inst' pkt.type = 'type' pkt.type_instance = 'type_inst' pkt.add_value(:counter, 1034) pkt.add_value(:gauge, 3.45) @pkt = pkt.build_packet end should 'parse buffer' do data, rest = @parser_class.parse_packet(@pkt) data.class.should == MonitoringProtocols::Collectd::NetworkMessage data.host.should == 'hostname' data.time.should == @now data.interval.should == @interval data.plugin.should == 'plugin' data.plugin_instance.should == 'plugin_inst' data.type.should == 'type' data.type_instance.should == 'type_inst' data.values.size.should == 2 data.values[0].should == 1034 data.values[1].should == 3.45 rest.should == "" end end describe "Multiple packets in buffer" do before do @now = Time.new.to_i @interval = 10 pkt = builder.new pkt.time = @now pkt.host = 'hostname' pkt.interval = @interval pkt.plugin = 'plugin' pkt.plugin_instance = 'plugin_inst' pkt.type = 'type' pkt.type_instance = 'type_inst' pkt.add_value(:counter, 1034) pkt.add_value(:gauge, 3.45) @pkt = pkt.build_packet @pkt << builder.string(2, 'plugin2') @pkt << builder.string(3, 'plugin2_inst') @pkt << builder.string(4, 'type2') @pkt << builder.string(5, 'type2_inst') @pkt << builder.values([builder::COUNTER], [42]) @pkt << builder.string(5, 'type21_inst') @pkt << builder.values([builder::GAUGE], [3.1415927]) end should 'parse buffer' do data = @parser_class.parse(@pkt) data.size.should == 3 data[0].class.should == MonitoringProtocols::Collectd::NetworkMessage data[0].host.should == 'hostname' data[0].time.should == @now data[0].interval.should == @interval data[0].plugin.should == 'plugin' data[0].plugin_instance.should == 'plugin_inst' data[0].type.should == 'type' data[0].type_instance.should == 'type_inst' data[0].values.size.should == 2 data[0].values[0].should == 1034 data[0].values[1].should == 3.45 data[1].host.should == 'hostname' data[1].time.should == @now data[1].interval.should == @interval data[1].plugin.should == 'plugin2' data[1].plugin_instance.should == 'plugin2_inst' data[1].type.should == 'type2' data[1].type_instance.should == 'type2_inst' data[1].values.size.should == 1 data[1].values[0].should == 42 data[2].host.should == 'hostname' data[2].time.should == @now data[2].interval.should == @interval data[2].plugin.should == 'plugin2' data[2].plugin_instance.should == 'plugin2_inst' data[2].type.should == 'type2' data[2].type_instance.should == 'type21_inst' data[2].values.size.should == 1 data[2].values[0].should == 3.1415927 end # should 'parse using feed interface' do # parser = @parser.new # ret = parser.feed(@pkt[0,20]) # ret.size.should == [] # ret = parser.feed(@pkt[21..-1]) # ret.size.should == 2 # end end end
31.43662
112
0.538082
e20a19aa1be464dcc98f0c2ec332310f07174212
1,962
# -*- encoding: utf-8 -*- # stub: devise 4.4.3 ruby lib Gem::Specification.new do |s| s.name = "devise".freeze s.version = "4.4.3" s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version= s.require_paths = ["lib".freeze] s.authors = ["Jos\u{e9} Valim".freeze, "Carlos Ant\u{f4}nio".freeze] s.date = "2018-03-18" s.description = "Flexible authentication solution for Rails with Warden".freeze s.email = "[email protected]".freeze s.homepage = "https://github.com/plataformatec/devise".freeze s.licenses = ["MIT".freeze] s.required_ruby_version = Gem::Requirement.new(">= 2.1.0".freeze) s.rubygems_version = "2.5.2.3".freeze s.summary = "Flexible authentication solution for Rails with Warden".freeze s.installed_by_version = "2.5.2.3" if s.respond_to? :installed_by_version if s.respond_to? :specification_version then s.specification_version = 4 if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then s.add_runtime_dependency(%q<warden>.freeze, ["~> 1.2.3"]) s.add_runtime_dependency(%q<orm_adapter>.freeze, ["~> 0.1"]) s.add_runtime_dependency(%q<bcrypt>.freeze, ["~> 3.0"]) s.add_runtime_dependency(%q<railties>.freeze, ["< 6.0", ">= 4.1.0"]) s.add_runtime_dependency(%q<responders>.freeze, [">= 0"]) else s.add_dependency(%q<warden>.freeze, ["~> 1.2.3"]) s.add_dependency(%q<orm_adapter>.freeze, ["~> 0.1"]) s.add_dependency(%q<bcrypt>.freeze, ["~> 3.0"]) s.add_dependency(%q<railties>.freeze, ["< 6.0", ">= 4.1.0"]) s.add_dependency(%q<responders>.freeze, [">= 0"]) end else s.add_dependency(%q<warden>.freeze, ["~> 1.2.3"]) s.add_dependency(%q<orm_adapter>.freeze, ["~> 0.1"]) s.add_dependency(%q<bcrypt>.freeze, ["~> 3.0"]) s.add_dependency(%q<railties>.freeze, ["< 6.0", ">= 4.1.0"]) s.add_dependency(%q<responders>.freeze, [">= 0"]) end end
42.652174
112
0.647808
abf57fd626528b70d082a4e8ba45e6bab4e340bc
102
module AtlysVoting module ApplicationHelper include Rails.application.routes.url_helpers end end
17
45
0.843137
28e46903bf2bda55cb4b6ff20696bac962620df7
170
class ACPJob < ApplicationJob queue_as :low def perform(job_class) job = job_class.constantize ACP.perform_each do job.perform_later end end end
15.454545
31
0.705882
ff5b5c380b46e79d3156e9598b3b69399fe52709
866
class HhResponse < ActiveRecord::Base delegate :name, :city, :link, to: :hh_vacancy, prefix: :vacancy, allow_nil: true serialize :resume belongs_to :issue belongs_to :hh_vacancy def applicant_first_name resume['first_name'] if resume end def applicant_last_name resume['last_name'] if resume end def applicant_city resume.dig('area', 'name') if resume end def applicant_email resume['contact'].select { |c| c['type']['id'] == 'email' }.first['value'] if resume end def applicant_birth_date resume['birth_date'] if resume end def resume_link resume['alternate_url'] if resume end def salary resume.dig('salary', 'amount') if resume end def skills resume['skills'] if resume end def experience resume['experience'] if resume end end
19.244444
89
0.650115
e25f3b27511ee450a68633b74865721c2094fa70
9,896
# frozen_string_literal: true require 'rails_helper' # Test article rendering for installed themes RSpec.describe ArticlesController, type: :controller do render_views let(:blog) { create :blog } with_each_theme do |theme, _view_path| context "with theme #{theme}" do before do blog.theme = theme blog.save! end describe '#redirect' do let(:article) { create :article } let(:from_param) { article.permalink_url.sub(%r{#{blog.base_url}/}, '') } it 'successfully renders an article' do get :redirect, params: { from: from_param } expect(response).to be_successful end context 'when the article has an excerpt' do let(:article) { create :article, excerpt: 'foo', body: 'bar' } it 'does not render a continue reading link' do get :redirect, params: { from: from_param } aggregate_failures do expect(response.body).to have_text 'bar' expect(response.body).not_to have_text 'foo' expect(response.body). not_to have_text I18n.t!('articles.article_excerpt.continue_reading') end end end describe 'accessing an article' do let!(:article) { create(:article) } before do get :redirect, params: { from: from_param } end it 'has good rss feed link' do expect(response.body).to have_selector("head>link[href=\"#{article.permalink_url}.rss\"]", visible: false) end it 'has good atom feed link' do expect(response.body).to have_selector("head>link[href=\"#{article.permalink_url}.atom\"]", visible: false) end it 'has a canonical url' do expect(response.body).to have_selector("head>link[href='#{article.permalink_url}']", visible: false) end it 'has a good title' do expect(response.body).to have_selector('title', text: 'A big article | test blog', visible: false) end end describe 'theme rendering' do let!(:article) { create(:article) } it 'renders without errors when no comments or trackbacks are present' do get :redirect, params: { from: from_param } expect(response).to be_successful end it 'renders without errors when recaptcha is enabled' do Recaptcha.configure do |config| config.site_key = 'YourAPIkeysHere_yyyyyyyyyyyyyyyyy' config.secret_key = 'YourAPIkeysHere_xxxxxxxxxxxxxxxxx' end blog.use_recaptcha = true blog.save! get :redirect, params: { from: from_param } expect(response).to be_successful end it 'renders without errors when comments and trackbacks are present' do create :trackback, article: article create :comment, article: article get :redirect, params: { from: from_param } expect(response).to be_successful end end context 'when the article is password protected' do let(:article) { create(:article, password: 'password') } it 'article alone should be password protected' do get :redirect, params: { from: from_param } expect(response.body).to have_selector('input[id="article_password"]', count: 1) end end end describe '#index' do let!(:user) { create :user } context 'without any parameters' do let!(:article) { create(:article) } let!(:note) { create(:note) } before do get :index end it 'has good link feed rss' do expect(response.body).to have_selector('head>link[href="http://test.host/articles.rss"]', visible: false) end it 'has good link feed atom' do expect(response.body).to have_selector('head>link[href="http://test.host/articles.atom"]', visible: false) end it 'has a canonical url' do expect(response.body).to have_selector("head>link[href='#{blog.base_url}/']", visible: false) end it 'has good title' do expect(response.body).to have_selector('title', text: 'test blog | test subtitle', visible: false) end end context 'when an article has an excerpt' do let!(:article) { create :article, excerpt: 'foo', body: 'bar' } it 'renders a continue reading link' do get :index aggregate_failures do expect(response.body).not_to have_text 'bar' expect(response.body).to have_text 'foo' expect(response.body). to have_text I18n.t!('articles.article_excerpt.continue_reading') end end end context 'when requesting archives for a month' do before do create(:article, published_at: Time.utc(2004, 4, 23)) get 'index', params: { year: 2004, month: 4 } end it 'has a canonical url' do expect(response.body).to have_selector("head>link[href='#{blog.base_url}/2004/4']", visible: false) end it 'has a good title' do expect(response.body).to have_selector('title', text: 'Archives for test blog', visible: false) end end end describe '#search' do let!(:user) { create :user } before do create(:article, body: "in markdown format\n\n * we\n * use\n [ok](http://blog.ok.com) to define a link", text_filter: create(:markdown)) create(:article, body: 'xyz') get :search, params: { q: 'a' } end it 'has good feed rss link' do expect(response.body).to have_selector('head>link[href="http://test.host/search/a.rss"]', visible: false) end it 'has good feed atom link' do expect(response.body).to have_selector('head>link[href="http://test.host/search/a.atom"]', visible: false) end it 'has a canonical url' do expect(response.body).to have_selector("head>link[href='#{blog.base_url}/search/a']", visible: false) end it 'has a good title' do expect(response.body).to have_selector('title', text: 'Results for a | test blog', visible: false) end it 'has content markdown interpret and without html tag' do expect(response.body).to have_selector('div') do |div| expect(div).to match(%{in markdown format * we * use [ok](http://blog.ok.com) to define a link}) end end end describe '#livesearch' do before do create(:article, body: 'hello world and im herer') create(:article, title: 'hello', body: 'worldwide') create(:article) get :live_search, params: { q: 'hello world' } end it 'does not have h3 tag' do expect(response.body).to have_selector('h3') end end describe '#archives' do context 'with several articles' do let!(:articles) { create_list :article, 3 } before do get 'archives' end it 'has the correct self-link and title' do expect(response.body). to have_selector("head>link[href='#{blog.base_url}/archives']", visible: false). and have_selector('title', text: 'Archives for test blog', visible: false) end it 'shows the current month only once' do expect(response.body). to have_css('h3', count: 1). and have_text I18n.l(articles.first.published_at, format: :letters_month_with_year) end end context 'with an article with tags' do it 'renders correctly' do create :article, keywords: 'foo, bar' get 'archives' expect(response.body).to have_text 'foo' expect(response.body).to have_text 'bar' end end end describe '#preview' do context 'with logged user' do let(:admin) { create(:user, :as_admin) } let(:article) { create(:article, user: admin) } before do sign_in admin end it 'renders the regular read template' do get :preview, params: { id: article.id } expect(response).to render_template('articles/read') end context 'when the article has an excerpt' do let(:article) { create :article, excerpt: 'foo', body: 'bar', user: admin } it 'does not render a continue reading link' do get :preview, params: { id: article.id } aggregate_failures do expect(response.body).to have_text 'bar' expect(response.body).not_to have_text 'foo' expect(response.body). not_to have_text I18n.t!('articles.article_excerpt.continue_reading') end end end end end describe '#check_password' do let!(:article) { create(:article, password: 'password') } it 'shows article when given correct password' do get :check_password, xhr: true, params: { article: { id: article.id, password: article.password } } expect(response.body).not_to have_selector('input[id="article_password"]') end it 'shows password form when given incorrect password' do get :check_password, xhr: true, params: { article: { id: article.id, password: 'wrong password' } } expect(response.body).to have_selector('input[id="article_password"]') end end end end end
34.601399
147
0.576597
260be2cb3bbe57bb9d10b5571cb56f58d4b54e40
5,998
require 'spec_helper' describe 'cis_hardening::services::special' do on_supported_os.each do |os, os_facts| context "on #{os}" do let(:facts) { os_facts } # Check for main class it { is_expected.to contain_class('cis_hardening::services::special') } # 2.2.1 - Time Synchronization # Ensure Time Synchronization is in use - Section 2.2.1.1 it { is_expected.to contain_package('ntp').with( 'ensure' => 'present', ) } # Ensure ntp is configured - Section 2.2.1.3 it { is_expected.to contain_file('/etc/ntp.conf').with( 'ensure' => 'present', 'owner' => 'root', 'group' => 'root', 'mode' => '0644', 'source' => 'puppet:///modules/cis_hardening/etc_ntp_conf', ).that_requires('Package[ntp]') } it { is_expected.to contain_file('/etc/sysconfig/ntpd').with( 'ensure' => 'present', 'owner' => 'root', 'group' => 'root', 'mode' => '0644', 'source' => 'puppet:///modules/cis_hardening/etc_sysconfig_ntpd', ).that_requires('File[/etc/ntp.conf]') } it { is_expected.to contain_file_line('ntp_options').with( 'ensure' => 'present', 'path' => '/usr/lib/systemd/system/ntpd.service', 'line' => 'ExecStart=/usr/sbin/ntpd -u ntp:ntp $OPTIONS', 'match' => '^ExecStart=/usr/sbin/ntpd -u ntp:ntp $OPTIONS', ) } # Ensure X Window System is not installed - Section 2.2.2 it { is_expected.to contain_package('xorg-x11-server-Xorg').with( 'ensure' => 'absent', ) } # Ensure Avahi Server is not installed - Section 2.2.3 it { is_expected.to contain_service('avahi-daemon').with( 'enable' => false, ) } it { is_expected.to contain_service('avahi-autoipd').with( 'ensure' => 'absent', ) } # Ensure CUPS is not installed - Section 2.2.4 it { is_expected.to contain_package('cups').with( 'ensure' => 'absent', ) } # Ensure DHCP Server is not installed - Section 2.2.5 it { is_expected.to contain_package('dhcp').with( 'ensure' => 'absent', ) } # Ensure LDAP Server is not installed - Section 2.2.6 it { is_expected.to contain_package('openldap-servers').with( 'ensure' => 'absent', ) } # Ensure nfs-utils is not installed or the nfs-server service is masked - Section 2.2.7 it { is_expected.to contain_package('nfs-utils').with( 'ensure' => 'absent', ) } # Ensure rpcbind is not installed or the rpcbind services are masked - Section 2.2.8 it { is_expected.to contain_package('rpcbind').with( 'ensure' => 'absent', ) } # Ensure DNS Server is not installed - Section 2.2.9 it { is_expected.to contain_package('').with( 'enable' => false, ) } # Ensure FTP Server is not enabled - Section 2.2.9 it { is_expected.to contain_service('vsftpd').with( 'enable' => false, ) } # Ensure HTTP Server is not enabled - Section 2.2.10 it { is_expected.to contain_service('httpd').with( 'enable' => false, ) } # Ensure IMAP and POP3 Server are not enabled - Section 2.2.11 it { is_expected.to contain_service('dovecot').with( 'enable' => false, ) } # Ensure Samba is not enabled - Section 2.2.12 it { is_expected.to contain_service('smb').with( 'enable' => false, ) } # Ensure HTTP Proxy Server is not enabled - Section 2.2.13 it { is_expected.to contain_service('squid').with( 'enable' => false, ) } # Ensure SNMP Server is not enabled - Section 2.2.14 it { is_expected.to contain_service('snmpd').with( 'enable' => false, ) } # Ensure MTA is configured for local-only mode - Section 2.2.15 it { is_expected.to contain_file_line('smptp_local_only_mode').with( 'ensure' => 'present', 'path' => '/etc/postfix/main.cf', 'line' => 'inet_interfaces = loopback-only', 'match' => '^inet_interfaces\ =', ) } # Ensure NIS Server is not enabled - Section 2.2.16 it { is_expected.to contain_service('ypserv').with( 'enable' => false, ) } # Ensure RSH Server is not enabled - Section 2.2.17 it { is_expected.to contain_service('rsh.socket').with( 'enable' => false, ) } it { is_expected.to contain_service('rlogin.socket').with( 'enable' => false, ) } it { is_expected.to contain_service('rexec.socket').with( 'enable' => false, ) } # Ensure Telnet Server is not enabled - Section 2.2.18 it { is_expected.to contain_service('telnet.socket').with( 'enable' => false, ) } # Ensure tftp Server is not enabled - Section 2.2.19 it { is_expected.to contain_service('tftp.socket').with( 'enable' => false, ) } # Ensure Rsync Service is not enabled - Section 2.2.20 it { is_expected.to contain_service('rsyncd').with( 'enable' => false, ) } # Ensure Talk server is not enabled - Section 2.2.21 it { is_expected.to contain_service('ntalk').with( 'enable' => false, ) } # Ensure manifest compiles with all dependencies it { is_expected.to compile.with_all_deps } end end end
26.539823
93
0.526676
3303e1646b2ec82884cbaaa2f114cab9a9c8fa2a
5,258
# This file was generated by the `rspec --init` command. Conventionally, all # specs live under a `spec` directory, which RSpec adds to the `$LOAD_PATH`. # The generated `.rspec` file contains `--require spec_helper` which will cause # this file to always be loaded, without a need to explicitly require it in any # files. # # Given that it is always loaded, you are encouraged to keep this file as # light-weight as possible. Requiring heavyweight dependencies from this file # will add to the boot time of your test suite on EVERY test run, even for an # individual file that may not need all of that loaded. Instead, consider making # a separate helper file that requires the additional dependencies and performs # the additional setup, and require it from the spec files that actually need # it. # # See http://rubydoc.info/gems/rspec-core/RSpec/Core/Configuration require 'vcr' VCR.configure do |config| config.cassette_library_dir = File.join(File.dirname(__FILE__), "vcr_cassettes") config.hook_into :webmock config.configure_rspec_metadata! end RSpec.configure do |config| # rspec-expectations config goes here. You can use an alternate # assertion/expectation library such as wrong or the stdlib/minitest # assertions if you prefer. config.expect_with :rspec do |expectations| # This option will default to `true` in RSpec 4. It makes the `description` # and `failure_message` of custom matchers include text for helper methods # defined using `chain`, e.g.: # be_bigger_than(2).and_smaller_than(4).description # # => "be bigger than 2 and smaller than 4" # ...rather than: # # => "be bigger than 2" expectations.include_chain_clauses_in_custom_matcher_descriptions = true end # rspec-mocks config goes here. You can use an alternate test double # library (such as bogus or mocha) by changing the `mock_with` option here. config.mock_with :rspec do |mocks| # Prevents you from mocking or stubbing a method that does not exist on # a real object. This is generally recommended, and will default to # `true` in RSpec 4. mocks.verify_partial_doubles = true end # This option will default to `:apply_to_host_groups` in RSpec 4 (and will # have no way to turn it off -- the option exists only for backwards # compatibility in RSpec 3). It causes shared context metadata to be # inherited by the metadata hash of host groups and examples, rather than # triggering implicit auto-inclusion in groups with matching metadata. config.shared_context_metadata_behavior = :apply_to_host_groups # The settings below are suggested to provide a good initial experience # with RSpec, but feel free to customize to your heart's content. =begin # This allows you to limit a spec run to individual examples or groups # you care about by tagging them with `:focus` metadata. When nothing # is tagged with `:focus`, all examples get run. RSpec also provides # aliases for `it`, `describe`, and `context` that include `:focus` # metadata: `fit`, `fdescribe` and `fcontext`, respectively. config.filter_run_when_matching :focus # Allows RSpec to persist some state between runs in order to support # the `--only-failures` and `--next-failure` CLI options. We recommend # you configure your source control system to ignore this file. config.example_status_persistence_file_path = "spec/examples.txt" # Limits the available syntax to the non-monkey patched syntax that is # recommended. For more details, see: # - http://rspec.info/blog/2012/06/rspecs-new-expectation-syntax/ # - http://www.teaisaweso.me/blog/2013/05/27/rspecs-new-message-expectation-syntax/ # - http://rspec.info/blog/2014/05/notable-changes-in-rspec-3/#zero-monkey-patching-mode config.disable_monkey_patching! # This setting enables warnings. It's recommended, but in some cases may # be too noisy due to issues in dependencies. config.warnings = true # Many RSpec users commonly either run the entire suite or an individual # file, and it's useful to allow more verbose output when running an # individual spec file. if config.files_to_run.one? # Use the documentation formatter for detailed output, # unless a formatter has already been configured # (e.g. via a command-line flag). config.default_formatter = "doc" end # Print the 10 slowest examples and example groups at the # end of the spec run, to help surface which specs are running # particularly slow. config.profile_examples = 10 # Run specs in random order to surface order dependencies. If you find an # order dependency and want to debug it, you can fix the order by providing # the seed, which is printed after each run. # --seed 1234 config.order = :random # Seed global randomization in this process using the `--seed` CLI option. # Setting this allows you to use `--seed` to deterministically reproduce # test failures related to randomization by passing the same `--seed` value # as the one that triggered the failure. Kernel.srand config.seed =end end Dir[File.join(File.dirname(__FILE__), "support", "*.rb")].sort.each { |f| require f } require 'elasticsearch' require 'elasticsearch/dsl' include Elasticsearch::DSL
43.816667
92
0.743248
e2405f39d3b4926ec51f822b8d76445b930e53ef
967
# frozen_string_literal: true require 'spec_helper' RSpec.describe 'Creating a DAST Site Token' do include GraphqlHelpers let_it_be(:project) { create(:project) } let_it_be(:current_user) { create(:user) } let_it_be(:dast_site_token) { create(:dast_site_token, project: project)} let_it_be(:dast_site_validation) { create(:dast_site_validation, state: :passed, dast_site_token: dast_site_token)} let(:mutation_name) { :dast_site_validation_revoke } let(:mutation) do graphql_mutation( mutation_name, full_path: project.full_path, normalized_target_url: dast_site_validation.url_base ) end it_behaves_like 'an on-demand scan mutation when user cannot run an on-demand scan' it_behaves_like 'an on-demand scan mutation when user can run an on-demand scan' do it 'deletes dast_site_validations where state=passed' do expect { subject }.to change { DastSiteValidation.count }.from(1).to(0) end end end
31.193548
117
0.746639
e29e796cfe64f3c6ee0f75eddd0582275d40cc61
1,491
# helper methods for rspec # These particular methods pertain to using the bib2lod converter code # from the LD4L-Labs project, at https://github.com/ld4l-labs/bib2lod # Pre-Reqs: # 1. https://github.com/ld4l-labs/bib2lod must be cloned # 2. location of clone repo must be in MARC2BIBFRAME_PATH module Helpers BIB2LOD_PATH = CONFIG_SETTINGS['bib2lod_path'] BIB2LOD_CONFIG = CONFIG_SETTINGS['bib2lod_config'] # given a marc record as a String containing marcxml, and a name to use for the temporary output files # run the marc record through the converter and return the result as an RDF::Graph object # @param [String] marcxml_str an xml representaiton of a MARC record # @param [String] fname the name to assign to the marcxml and rdfxml files in the tmp directory # @return [RDF::Graph] loaded graph object from the converter for the marc record passed in def marc_to_graph_bib2lod(marcxml_str, fname) ensure_marc_parses(marcxml_str) marc_path = create_marcxml_file(marcxml_str, fname) ntriples_path = create_ntriples_via_bib2lod(marc_path) load_graph_from_ntriples(ntriples_path) end # Call the bib2lod converter code. # @param [String] the path of the marcxml file # @return [String] the path of the rdfxml file created def create_ntriples_via_bib2lod(marc_path) output_file = marc_path.gsub('marcxml', 'nt') command = "java -jar #{BIB2LOD_PATH}/bib2lod.jar -c #{BIB2LOD_CONFIG}" `#{command}` output_file end end
42.6
104
0.754527
1a358cf8d54541165068909bada456ae59a51d54
903
Pod::Spec.new do |s| s.name = 'SBJson' s.version = '3.1' s.license = 'BSD' s.summary = 'This library implements strict JSON parsing and generation in Objective-C.' s.deprecated_in_favor_of = 'SBJson4' s.description = <<-DESC All versions of SBJson prior to version 4 are deprecated. Please upgrade to SBJson4. Alternatively, if you don't need to work with JSON streams you should probably just use NSJSONSerialisation. Don't fight the platform when you don't have to. DESC s.homepage = 'http://sbjson.org' s.source = { :git => 'https://github.com/stig/json-framework.git', :tag => 'v3.1' } s.author = { 'Stig Brautaset' => '[email protected]' } s.social_media_url = "http://twitter.com/stigbra" s.requires_arc = true s.source_files = 'Classes' end
39.26087
91
0.606866
bb233b0bb23268736e0c56fc922ed2e45be60441
441
define :nginx_site_for do user = app_name = params[:name] conf_path = "/home/#{user}/production/shared/config/nginx.production.conf" template conf_path do source 'nginx.conf.erb' mode 0775 owner user group user action :create_if_missing variables app_name: app_name cookbook '__nginx' end link "usr/local/nginx/sites-enabled/#{app_name}" do to conf_path only_if { File.exists?(to) } end end
22.05
76
0.693878