hexsha
stringlengths
40
40
size
int64
2
1.01M
content
stringlengths
2
1.01M
avg_line_length
float64
1.5
100
max_line_length
int64
2
1k
alphanum_fraction
float64
0.25
1
e8e31be1f36e87f8db80d0ed974d18420db7ee65
3,024
class SipOctaveApp < Formula desc "Tool to create Python bindings for C and C++ libraries" homepage "https://www.riverbankcomputing.com/software/sip/intro" url "https://dl.bintray.com/homebrew/mirror/sip-4.19.8.tar.gz" mirror "https://downloads.sourceforge.net/project/pyqt/sip/sip-4.19.8/sip-4.19.8.tar.gz" sha256 "7eaf7a2ea7d4d38a56dd6d2506574464bddf7cf284c960801679942377c297bc" revision 3 head "https://www.riverbankcomputing.com/hg/sip", :using => :hg keg_only "conflicts with regular sip" depends_on "python" => :optional depends_on "python2" => :optional def install ENV.prepend_path "PATH", Formula["python2"].opt_libexec/"bin" if build.head? # Link the Mercurial repository into the download directory so # build.py can use it to figure out a version number. ln_s cached_download/".hg", ".hg" # build.py doesn't run with python3 system "python", "build.py", "prepare" end Language::Python.each_python(build) do |python, version| ENV.delete("SDKROOT") # Avoid picking up /Application/Xcode.app paths system python, "configure.py", "--deployment-target=#{MacOS.version}", "--destdir=#{lib}/python#{version}/site-packages", "--bindir=#{bin}", "--incdir=#{include}", "--sipdir=#{HOMEBREW_PREFIX}/share/sip" system "make" system "make", "install" system "make", "clean" end end def post_install (HOMEBREW_PREFIX/"share/sip").mkpath end def caveats; <<~EOS The sip-dir for Python is #{HOMEBREW_PREFIX}/share/sip. EOS end test do (testpath/"test.h").write <<~EOS #pragma once class Test { public: Test(); void test(); }; EOS (testpath/"test.cpp").write <<~EOS #include "test.h" #include <iostream> Test::Test() {} void Test::test() { std::cout << "Hello World!" << std::endl; } EOS (testpath/"test.sip").write <<~EOS %Module test class Test { %TypeHeaderCode #include "test.h" %End public: Test(); void test(); }; EOS (testpath/"generate.py").write <<~EOS from sipconfig import SIPModuleMakefile, Configuration m = SIPModuleMakefile(Configuration(), "test.build") m.extra_libs = ["test"] m.extra_lib_dirs = ["."] m.generate() EOS (testpath/"run.py").write <<~EOS from test import Test t = Test() t.test() EOS system ENV.cxx, "-shared", "-Wl,-install_name,#{testpath}/libtest.dylib", "-o", "libtest.dylib", "test.cpp" system bin/"sip", "-b", "test.build", "-c", ".", "test.sip" Language::Python.each_python(build) do |python, version| ENV["PYTHONPATH"] = lib/"python#{version}/site-packages" system python, "generate.py" system "make", "-j1", "clean", "all" system python, "run.py" end end end
29.940594
90
0.593254
d568a98a6fb830fcddf7611f2d084a72001a1e01
1,020
class BoardsController < ApplicationController before_action :set_board, only: [:show, :update, :destroy] # GET /boards def index @boards = Board.all render json: @boards end # GET /boards/1 def show render json: @board end # POST /boards def create @board = Board.new(board_params) if @board.save render json: @board, status: :created, location: @board else render json: @board.errors, status: :unprocessable_entity end end # PATCH/PUT /boards/1 def update if @board.update(board_params) render json: @board else render json: @board.errors, status: :unprocessable_entity end end # DELETE /boards/1 def destroy @board.destroy end private # Use callbacks to share common setup or constraints between actions. def set_board @board = Board.find(params[:id]) end # Only allow a list of trusted parameters through. def board_params params.require(:board).permit(:title) end end
19.615385
73
0.659804
e2fa8bcea6d06c6c3bca77811eb462ee21726f79
7,068
# encoding: UTF-8 # frozen_string_literal: true describe API::V2::Account::Deposits, type: :request do let(:member) { create(:member, :level_3) } let(:other_member) { create(:member, :level_3) } let(:token) { jwt_for(member) } let(:level_0_member) { create(:member, :level_0) } let(:level_0_member_token) { jwt_for(level_0_member) } describe 'GET /api/v2/account/deposits' do before do create(:deposit_btc, member: member) create(:deposit_usd, member: member) create(:deposit_usd, member: member, txid: 1, amount: 520) create(:deposit_btc, member: member, created_at: 2.day.ago, txid: 'test', amount: 111) create(:deposit_usd, member: other_member, txid: 10) end it 'requires authentication' do api_get '/api/v2/account/deposits' expect(response.code).to eq '401' end it 'returns with auth token deposits' do api_get '/api/v2/account/deposits', token: token expect(response).to be_successful end it 'returns all deposits num' do api_get '/api/v2/account/deposits', token: token result = JSON.parse(response.body) expect(result.size).to eq 4 expect(response.headers.fetch('Total')).to eq '4' end it 'returns limited deposits' do api_get '/api/v2/account/deposits', params: { limit: 2, page: 1 }, token: token result = JSON.parse(response.body) expect(result.size).to eq 2 expect(response.headers.fetch('Total')).to eq '4' api_get '/api/v2/account/deposits', params: { limit: 1, page: 2 }, token: token result = JSON.parse(response.body) expect(result.size).to eq 1 expect(response.headers.fetch('Total')).to eq '4' end it 'filters deposits by state' do api_get '/api/v2/account/deposits', params: { state: 'canceled' }, token: token result = JSON.parse(response.body) expect(result.size).to eq 0 d = create(:deposit_btc, member: member, aasm_state: :canceled) api_get '/api/v2/account/deposits', params: { state: 'canceled' }, token: token result = JSON.parse(response.body) expect(result.size).to eq 1 expect(result.first['txid']).to eq d.txid end it 'filters deposits by multiple states' do create(:deposit_btc, member: member, aasm_state: :rejected) api_get '/api/v2/account/deposits', params: { state: ['canceled', 'rejected'] }, token: token result = JSON.parse(response.body) expect(result.size).to eq 1 create(:deposit_btc, member: member, aasm_state: :canceled) api_get '/api/v2/account/deposits', params: { state: ['canceled', 'rejected'] }, token: token result = JSON.parse(response.body) expect(result.size).to eq 2 end it 'returns deposits for currency usd' do api_get '/api/v2/account/deposits', params: { currency: 'usd' }, token: token result = JSON.parse(response.body) expect(result.size).to eq 2 expect(result.all? { |d| d['currency'] == 'usd' }).to be_truthy end it 'returns deposits with txid filter' do api_get '/api/v2/account/deposits', params: { txid: Deposit.first.txid }, token: token result = JSON.parse(response.body) expect(result.size).to eq 1 expect(result.all? { |d| d['txid'] == Deposit.first.txid }).to be_truthy end it 'returns deposits for currency btc' do api_get '/api/v2/account/deposits', params: { currency: 'btc' }, token: token result = JSON.parse(response.body) expect(response.headers.fetch('Total')).to eq '2' expect(result.all? { |d| d['currency'] == 'btc' }).to be_truthy end it 'return 404 if txid not exist' do api_get '/api/v2/account/deposits/5', token: token expect(response.code).to eq '404' expect(response).to include_api_error('record.not_found') end it 'returns 404 if txid not belongs_to you ' do api_get '/api/v2/account/deposits/10', token: token expect(response.code).to eq '404' expect(response).to include_api_error('record.not_found') end it 'returns deposit txid if exist' do api_get '/api/v2/account/deposits/1', token: token result = JSON.parse(response.body) expect(response.code).to eq '200' expect(result['amount']).to eq '520.0' end it 'returns deposit no time limit ' do api_get '/api/v2/account/deposits/test', token: token result = JSON.parse(response.body) expect(response.code).to eq '200' expect(result['amount']).to eq '111.0' end it 'denies access to unverified member' do api_get '/api/v2/account/deposits', token: level_0_member_token expect(response.code).to eq '403' expect(response).to include_api_error('account.deposit.not_permitted') end end describe 'GET /api/v2/account/deposit_address/:currency' do let(:currency) { :bch } context 'failed' do it 'validates currency' do api_get "/api/v2/account/deposit_address/dildocoin", token: token expect(response).to have_http_status 422 expect(response).to include_api_error('account.currency.doesnt_exist') end it 'validates currency address format' do api_get '/api/v2/account/deposit_address/btc', params: { address_format: 'cash' }, token: token expect(response).to have_http_status 422 expect(response).to include_api_error('account.deposit_address.doesnt_support_cash_address_format') end it 'validates currency with address_format param' do api_get '/api/v2/account/deposit_address/abc', params: { address_format: 'cash' }, token: token expect(response).to have_http_status 422 expect(response).to include_api_error('account.currency.doesnt_exist') end end context 'successful' do context 'eth address' do let(:currency) { :eth } let(:wallet) { Wallet.joins(:currencies).find_by(currencies: { id: currency }) } before { member.payment_address(wallet.id).update!(address: '2N2wNXrdo4oEngp498XGnGCbru29MycHogR') } it 'expose data about eth address' do api_get "/api/v2/account/deposit_address/#{currency}", token: token expect(response.body).to eq '{"currencies":["eth"],"address":"2n2wnxrdo4oengp498xgngcbru29mychogr","state":"active"}' end it 'pending user address state' do member.payment_address(wallet.id).update!(address: nil) api_get "/api/v2/account/deposit_address/#{currency}", token: token expect(response.body).to eq '{"currencies":["eth"],"address":null,"state":"pending"}' end end end context 'disabled deposit for currency' do let(:currency) { :btc } before { Currency.find(currency).update!(deposit_enabled: false) } it 'returns error' do api_get "/api/v2/account/deposit_address/#{currency}", token: token expect(response).to have_http_status 422 expect(response).to include_api_error('account.currency.deposit_disabled') end end end end
36.246154
127
0.659451
abb81922d6bbd0c0abf4fbe4dd75a19ece109b16
552
Pod::Spec.new do |s| s.name = "VAProgressCircle" s.version = "0.0.9" s.summary = "A custom loading bar for iOS" s.homepage = "https://github.com/MitchellMalleo/VAProgressCircle" s.license = 'MIT' s.author = { "Mitch Malleo" => "[email protected]" } s.source = { :git => "https://github.com/MitchellMalleo/VAProgressCircle.git", :tag => s.version.to_s } s.platform = :ios, '5.0' s.requires_arc = true s.source_files = 'Classes', 'Classes/**/*.{h,m}' end
39.428571
115
0.570652
01a0005917ef18b7c283552f3fa263478be5bfa5
320
cask 'checkra1n' do version '0.9.3' sha256 'f7a16ee122a6d7c8c9a0429db1a0b8fb4ecc3543684713eda951f19b30c02c11' url "https://checkra.in/assets/downloads/macos/#{sha256}/checkra1n%20beta%20#{version}.dmg" appcast 'https://checkra.in/' name 'checkra1n' homepage 'https://checkra.in/' app 'checkra1n.app' end
26.666667
93
0.746875
62908f3e22263c3e77b5a4c0477ed10234898806
1,361
module Chronicle module ETL module Registry # Records details about a connector such as its provider and a description class ConnectorRegistration attr_accessor :identifier, :provider, :klass, :description def initialize(klass) @klass = klass end def phase if klass.ancestors.include? Chronicle::ETL::Extractor :extractor elsif klass.ancestors.include? Chronicle::ETL::Transformer :transformer elsif klass.ancestors.include? Chronicle::ETL::Loader :loader end end def to_s "#{phase}-#{identifier}" end def built_in? @klass.to_s.include? 'Chronicle::ETL' end def klass_name @klass.to_s end def identifier @identifier || @klass.to_s.split('::').last.gsub!(/(Extractor$|Loader$|Transformer$)/, '').downcase end def description @description || @klass.to_s.split('::').last end def descriptive_phrase prefix = case phase when :extractor "Extracts from" when :transformer "Transforms" when :loader "Loads to" end "#{prefix} #{description}" end end end end end
23.465517
109
0.542248
ed12e215cf92702aa22f54d18acc2f7db419ac83
898
class Admin::PostCategoriesController < AdminController include CrudConcern before_action :find_post_category, only: [:edit, :update, :destroy] before_action :get_locales, only: [:edit, :create, :new] def index # @post_categories = PostCategory.all @post_categories = index_helper("PostCategory") end def new @post_category = PostCategory.new end def create @post_category = PostCategory.new(post_category_params) create_helper(@post_category, "edit_admin_post_category_path") end def update update_helper(@post_category, "edit_admin_post_category_path", post_category_params) end def edit end def destroy destroy_helper(@post_category, "admin_post_categories_path") end private def find_post_category @post_category = PostCategory.find(params[:id]) end def post_category_params params.require(:post_category).permit(:title, :body, :slug) end end
20.409091
86
0.776169
017b98e4976e9ab544aa676f0138aac2017697e9
1,572
require 'active_record_date_formatted/date_format_validator' # Creates additional instance getter and setter methods for each date attribute with postfix _formatted # These methods use date string formatted with the current locale. module ActiveRecordDateFormatted module Model def inherited(subclass) super subclass.add_date_formatted_methods unless subclass == ActiveRecord::SchemaMigration || subclass.to_s.ends_with?('Temp') # todo nasty bugfix for temporary migration classes with custom table names end def add_date_formatted_methods self.column_types.each do |attr_name, c| if c.type == :date attr_accessor "#{attr_name}_formatted" validates "#{attr_name}_formatted", "active_record_date_formatted/date_format" => true before_save "save_formatted_#{attr_name}" define_method "#{attr_name}_formatted" do if instance_variable_get("@#{attr_name}_formatted").nil? date_value = read_attribute(attr_name) date_value.nil? ? nil : date_value.strftime(I18n.t "date.formats.default") else instance_variable_get("@#{attr_name}_formatted") end end define_method "save_formatted_#{attr_name}" do write_attribute(attr_name, self.send("#{attr_name}_formatted").blank? ? nil : Date.strptime(self.send("#{attr_name}_formatted"), I18n.t("date.formats.default"))) end end end end end end ActiveRecord::Base.extend ActiveRecordDateFormatted::Model
39.3
202
0.693384
d52bf999f3337ce343d339e0695a5b29d5e895a0
975
namespace :static_resources do desc 'Sync public/{packs,assets} to s3' task sync_s3: :environment do require 'static_resources_rails/storage' ['assets', *StaticResourcesRails.additional_sync_dirs].each do |dir| StaticResourcesRails::Storage.sync(dir) end end desc 'Download manifest.json' task download_manifest: :environment do unless Rails.application.config.assets.manifest raise StaticResourcesRails::ManifestError, 'config.assets.manifest is blank!' end manifest_files = ["assets/#{StaticResourcesRails.sprockets_manifest_filename}", *StaticResourcesRails.additional_manifest_files] manifest_files.each do |manifest_file| download_url = "https://#{Rails.application.config.action_controller.asset_host}/#{manifest_file}" file_path = Rails.public_path.join(manifest_file) file_path.parent.mkdir unless file_path.parent.exist? IO.write(file_path, URI.open(download_url).read) end end end
36.111111
132
0.757949
01174a4b72a21425e5caca5e144cf69736a0df58
201
Group.seed(:id, [ { id: 99, name: "GitLab", path: 'gitlab', owner_id: 1 }, { id: 100, name: "Brightbox", path: 'brightbox', owner_id: 1 }, { id: 101, name: "KDE", path: 'kde', owner_id: 1 }, ])
33.5
65
0.557214
f7240969588ddf196768fd9fbd0c7baa664166fc
49,805
require 'spec_helper' describe NotificationService, services: true do include EmailHelpers let(:notification) { NotificationService.new } around(:each) do |example| perform_enqueued_jobs do example.run end end shared_examples 'notifications for new mentions' do def send_notifications(*new_mentions) reset_delivered_emails! notification.send(notification_method, mentionable, new_mentions, @u_disabled) end it 'sends no emails when no new mentions are present' do send_notifications should_not_email_anyone end it 'emails new mentions with a watch level higher than participant' do send_notifications(@u_watcher, @u_participant_mentioned, @u_custom_global) should_only_email(@u_watcher, @u_participant_mentioned, @u_custom_global) end it 'does not email new mentions with a watch level equal to or less than participant' do send_notifications(@u_participating, @u_mentioned) should_not_email_anyone end end # Next shared examples are intended to test notifications of "participants" # # they take the following parameters: # * issuable # * notification trigger # * participant # shared_examples 'participating by note notification' do it 'emails the participant' do create(:note_on_issue, noteable: issuable, project_id: project.id, note: 'anything', author: participant) notification_trigger should_email(participant) end end shared_examples 'participating by assignee notification' do it 'emails the participant' do issuable.update_attribute(:assignee, participant) notification_trigger should_email(participant) end end shared_examples 'participating by author notification' do it 'emails the participant' do issuable.author = participant notification_trigger should_email(participant) end end shared_examples_for 'participating notifications' do it_should_behave_like 'participating by note notification' it_should_behave_like 'participating by author notification' it_should_behave_like 'participating by assignee notification' end describe 'Keys' do describe '#new_key' do let!(:key) { create(:personal_key) } it { expect(notification.new_key(key)).to be_truthy } it 'sends email to key owner' do expect{ notification.new_key(key) }.to change{ ActionMailer::Base.deliveries.size }.by(1) end end end describe 'Email' do describe '#new_email' do let!(:email) { create(:email) } it { expect(notification.new_email(email)).to be_truthy } it 'sends email to email owner' do expect{ notification.new_email(email) }.to change{ ActionMailer::Base.deliveries.size }.by(1) end end end describe 'Notes' do context 'issue note' do let(:project) { create(:empty_project, :private) } let(:issue) { create(:issue, project: project, assignee: create(:user)) } let(:mentioned_issue) { create(:issue, assignee: issue.assignee) } let(:note) { create(:note_on_issue, noteable: issue, project_id: issue.project_id, note: '@mention referenced, @outsider also') } before do build_team(note.project) project.add_master(issue.author) project.add_master(issue.assignee) project.add_master(note.author) create(:note_on_issue, noteable: issue, project_id: issue.project_id, note: '@subscribed_participant cc this guy') update_custom_notification(:new_note, @u_guest_custom, project) update_custom_notification(:new_note, @u_custom_global) end describe '#new_note' do it do add_users_with_subscription(note.project, issue) # Ensure create SentNotification by noteable = issue 6 times, not noteable = note expect(SentNotification).to receive(:record).with(issue, any_args).exactly(8).times reset_delivered_emails! notification.new_note(note) should_email(@u_watcher) should_email(note.noteable.author) should_email(note.noteable.assignee) should_email(@u_custom_global) should_email(@u_mentioned) should_email(@subscriber) should_email(@watcher_and_subscriber) should_email(@subscribed_participant) should_not_email(@u_guest_custom) should_not_email(@u_guest_watcher) should_not_email(note.author) should_not_email(@u_participating) should_not_email(@u_disabled) should_not_email(@unsubscriber) should_not_email(@u_outsider_mentioned) should_not_email(@u_lazy_participant) end it 'filters out "mentioned in" notes' do mentioned_note = SystemNoteService.cross_reference(mentioned_issue, issue, issue.author) expect(Notify).not_to receive(:note_issue_email) notification.new_note(mentioned_note) end context 'participating' do context 'by note' do before do reset_delivered_emails! note.author = @u_lazy_participant note.save notification.new_note(note) end it { should_not_email(@u_lazy_participant) } end end end describe 'new note on issue in project that belongs to a group' do let(:group) { create(:group) } before do note.project.namespace_id = group.id note.project.group.add_user(@u_watcher, GroupMember::MASTER) note.project.group.add_user(@u_custom_global, GroupMember::MASTER) note.project.save @u_watcher.notification_settings_for(note.project).participating! @u_watcher.notification_settings_for(note.project.group).global! update_custom_notification(:new_note, @u_custom_global) reset_delivered_emails! end it do notification.new_note(note) should_email(note.noteable.author) should_email(note.noteable.assignee) should_email(@u_mentioned) should_email(@u_custom_global) should_not_email(@u_guest_custom) should_not_email(@u_guest_watcher) should_not_email(@u_watcher) should_not_email(note.author) should_not_email(@u_participating) should_not_email(@u_disabled) should_not_email(@u_lazy_participant) end end end context 'confidential issue note' do let(:project) { create(:empty_project, :public) } let(:author) { create(:user) } let(:assignee) { create(:user) } let(:non_member) { create(:user) } let(:member) { create(:user) } let(:guest) { create(:user) } let(:admin) { create(:admin) } let(:confidential_issue) { create(:issue, :confidential, project: project, author: author, assignee: assignee) } let(:note) { create(:note_on_issue, noteable: confidential_issue, project: project, note: "#{author.to_reference} #{assignee.to_reference} #{non_member.to_reference} #{member.to_reference} #{admin.to_reference}") } let(:guest_watcher) { create_user_with_notification(:watch, "guest-watcher-confidential") } it 'filters out users that can not read the issue' do project.add_developer(member) project.add_guest(guest) expect(SentNotification).to receive(:record).with(confidential_issue, any_args).exactly(4).times reset_delivered_emails! notification.new_note(note) should_not_email(non_member) should_not_email(guest) should_not_email(guest_watcher) should_email(author) should_email(assignee) should_email(member) should_email(admin) end end context 'issue note mention' do let(:project) { create(:empty_project, :public) } let(:issue) { create(:issue, project: project, assignee: create(:user)) } let(:mentioned_issue) { create(:issue, assignee: issue.assignee) } let(:note) { create(:note_on_issue, noteable: issue, project_id: issue.project_id, note: '@all mentioned') } before do build_team(note.project) note.project.add_master(note.author) reset_delivered_emails! end describe '#new_note' do it 'notifies the team members' do notification.new_note(note) # Notify all team members note.project.team.members.each do |member| # User with disabled notification should not be notified next if member.id == @u_disabled.id # Author should not be notified next if member.id == note.author.id should_email(member) end should_email(@u_guest_watcher) should_email(note.noteable.author) should_email(note.noteable.assignee) should_not_email(note.author) should_email(@u_mentioned) should_not_email(@u_disabled) should_email(@u_not_mentioned) end it 'filters out "mentioned in" notes' do mentioned_note = SystemNoteService.cross_reference(mentioned_issue, issue, issue.author) expect(Notify).not_to receive(:note_issue_email) notification.new_note(mentioned_note) end end end context 'project snippet note' do let(:project) { create(:empty_project, :public) } let(:snippet) { create(:project_snippet, project: project, author: create(:user)) } let(:note) { create(:note_on_project_snippet, noteable: snippet, project_id: snippet.project.id, note: '@all mentioned') } before do build_team(note.project) note.project.add_master(note.author) reset_delivered_emails! end describe '#new_note' do it 'notifies the team members' do notification.new_note(note) # Notify all team members note.project.team.members.each do |member| # User with disabled notification should not be notified next if member.id == @u_disabled.id # Author should not be notified next if member.id == note.author.id should_email(member) end # it emails custom global users on mention should_email(@u_custom_global) should_email(@u_guest_watcher) should_email(note.noteable.author) should_not_email(note.author) should_email(@u_mentioned) should_not_email(@u_disabled) should_email(@u_not_mentioned) end end end context 'personal snippet note' do let(:snippet) { create(:personal_snippet, :public, author: @u_snippet_author) } let(:note) { create(:note_on_personal_snippet, noteable: snippet, note: '@mentioned note', author: @u_note_author) } before do @u_watcher = create_global_setting_for(create(:user), :watch) @u_participant = create_global_setting_for(create(:user), :participating) @u_disabled = create_global_setting_for(create(:user), :disabled) @u_mentioned = create_global_setting_for(create(:user, username: 'mentioned'), :mention) @u_mentioned_level = create_global_setting_for(create(:user, username: 'participator'), :mention) @u_note_author = create(:user, username: 'note_author') @u_snippet_author = create(:user, username: 'snippet_author') @u_not_mentioned = create_global_setting_for(create(:user, username: 'regular'), :participating) reset_delivered_emails! end let!(:notes) do [ create(:note_on_personal_snippet, noteable: snippet, note: 'note', author: @u_watcher), create(:note_on_personal_snippet, noteable: snippet, note: 'note', author: @u_participant), create(:note_on_personal_snippet, noteable: snippet, note: 'note', author: @u_mentioned), create(:note_on_personal_snippet, noteable: snippet, note: 'note', author: @u_disabled), create(:note_on_personal_snippet, noteable: snippet, note: 'note', author: @u_note_author), ] end describe '#new_note' do it 'notifies the participants' do notification.new_note(note) # it emails participants should_email(@u_watcher) should_email(@u_participant) should_email(@u_watcher) should_email(@u_snippet_author) # it emails mentioned users should_email(@u_mentioned) # it does not email participants with mention notification level should_not_email(@u_mentioned_level) # it does not email note author should_not_email(@u_note_author) end end end context 'commit note' do let(:project) { create(:project, :public) } let(:note) { create(:note_on_commit, project: project) } before do build_team(note.project) reset_delivered_emails! allow_any_instance_of(Commit).to receive(:author).and_return(@u_committer) update_custom_notification(:new_note, @u_guest_custom, project) update_custom_notification(:new_note, @u_custom_global) end describe '#new_note, #perform_enqueued_jobs' do it do notification.new_note(note) should_email(@u_guest_watcher) should_email(@u_custom_global) should_email(@u_guest_custom) should_email(@u_committer) should_email(@u_watcher) should_not_email(@u_mentioned) should_not_email(note.author) should_not_email(@u_participating) should_not_email(@u_disabled) should_not_email(@u_lazy_participant) end it do note.update_attribute(:note, '@mention referenced') notification.new_note(note) should_email(@u_guest_watcher) should_email(@u_committer) should_email(@u_watcher) should_email(@u_mentioned) should_not_email(note.author) should_not_email(@u_participating) should_not_email(@u_disabled) should_not_email(@u_lazy_participant) end it do @u_committer = create_global_setting_for(@u_committer, :mention) notification.new_note(note) should_not_email(@u_committer) end end end context "merge request diff note" do let(:project) { create(:project) } let(:user) { create(:user) } let(:merge_request) { create(:merge_request, source_project: project, assignee: user) } let(:note) { create(:diff_note_on_merge_request, project: project, noteable: merge_request) } before do build_team(note.project) project.add_master(merge_request.author) project.add_master(merge_request.assignee) end describe '#new_note' do it "records sent notifications" do # Ensure create SentNotification by noteable = merge_request 6 times, not noteable = note expect(SentNotification).to receive(:record_note).with(note, any_args).exactly(3).times.and_call_original notification.new_note(note) expect(SentNotification.last.position).to eq(note.position) end end end end describe 'Issues' do let(:group) { create(:group) } let(:project) { create(:empty_project, :public, namespace: group) } let(:another_project) { create(:empty_project, :public, namespace: group) } let(:issue) { create :issue, project: project, assignee: create(:user), description: 'cc @participant' } before do build_team(issue.project) build_group(issue.project) add_users_with_subscription(issue.project, issue) reset_delivered_emails! update_custom_notification(:new_issue, @u_guest_custom, project) update_custom_notification(:new_issue, @u_custom_global) end describe '#new_issue' do it do notification.new_issue(issue, @u_disabled) should_email(issue.assignee) should_email(@u_watcher) should_email(@u_guest_watcher) should_email(@u_guest_custom) should_email(@u_custom_global) should_email(@u_participant_mentioned) should_email(@g_global_watcher) should_email(@g_watcher) should_not_email(@u_mentioned) should_not_email(@u_participating) should_not_email(@u_disabled) should_not_email(@u_lazy_participant) end it do create_global_setting_for(issue.assignee, :mention) notification.new_issue(issue, @u_disabled) should_not_email(issue.assignee) end it "emails subscribers of the issue's labels" do user_1 = create(:user) user_2 = create(:user) user_3 = create(:user) user_4 = create(:user) label = create(:label, project: project, issues: [issue]) group_label = create(:group_label, group: group, issues: [issue]) issue.reload label.toggle_subscription(user_1, project) group_label.toggle_subscription(user_2, project) group_label.toggle_subscription(user_3, another_project) group_label.toggle_subscription(user_4) notification.new_issue(issue, @u_disabled) should_email(user_1) should_email(user_2) should_not_email(user_3) should_email(user_4) end context 'confidential issues' do let(:author) { create(:user) } let(:assignee) { create(:user) } let(:non_member) { create(:user) } let(:member) { create(:user) } let(:guest) { create(:user) } let(:admin) { create(:admin) } let(:confidential_issue) { create(:issue, :confidential, project: project, title: 'Confidential issue', author: author, assignee: assignee) } it "emails subscribers of the issue's labels that can read the issue" do project.add_developer(member) project.add_guest(guest) label = create(:label, project: project, issues: [confidential_issue]) confidential_issue.reload label.toggle_subscription(non_member, project) label.toggle_subscription(author, project) label.toggle_subscription(assignee, project) label.toggle_subscription(member, project) label.toggle_subscription(guest, project) label.toggle_subscription(admin, project) reset_delivered_emails! notification.new_issue(confidential_issue, @u_disabled) should_not_email(@u_guest_watcher) should_not_email(non_member) should_not_email(author) should_not_email(guest) should_email(assignee) should_email(member) should_email(admin) end end end describe '#new_mentions_in_issue' do let(:notification_method) { :new_mentions_in_issue } let(:mentionable) { issue } include_examples 'notifications for new mentions' end describe '#reassigned_issue' do before do update_custom_notification(:reassign_issue, @u_guest_custom, project) update_custom_notification(:reassign_issue, @u_custom_global) end it 'emails new assignee' do notification.reassigned_issue(issue, @u_disabled) should_email(issue.assignee) should_email(@u_watcher) should_email(@u_guest_watcher) should_email(@u_guest_custom) should_email(@u_custom_global) should_email(@u_participant_mentioned) should_email(@subscriber) should_not_email(@unsubscriber) should_not_email(@u_participating) should_not_email(@u_disabled) should_not_email(@u_lazy_participant) end it 'emails previous assignee even if he has the "on mention" notif level' do issue.update_attribute(:assignee, @u_mentioned) issue.update_attributes(assignee: @u_watcher) notification.reassigned_issue(issue, @u_disabled) should_email(@u_mentioned) should_email(@u_watcher) should_email(@u_guest_watcher) should_email(@u_guest_custom) should_email(@u_participant_mentioned) should_email(@subscriber) should_email(@u_custom_global) should_not_email(@unsubscriber) should_not_email(@u_participating) should_not_email(@u_disabled) should_not_email(@u_lazy_participant) end it 'emails new assignee even if he has the "on mention" notif level' do issue.update_attributes(assignee: @u_mentioned) notification.reassigned_issue(issue, @u_disabled) expect(issue.assignee).to be @u_mentioned should_email(issue.assignee) should_email(@u_watcher) should_email(@u_guest_watcher) should_email(@u_guest_custom) should_email(@u_participant_mentioned) should_email(@subscriber) should_email(@u_custom_global) should_not_email(@unsubscriber) should_not_email(@u_participating) should_not_email(@u_disabled) should_not_email(@u_lazy_participant) end it 'emails new assignee' do issue.update_attribute(:assignee, @u_mentioned) notification.reassigned_issue(issue, @u_disabled) expect(issue.assignee).to be @u_mentioned should_email(issue.assignee) should_email(@u_watcher) should_email(@u_guest_watcher) should_email(@u_guest_custom) should_email(@u_participant_mentioned) should_email(@subscriber) should_email(@u_custom_global) should_not_email(@unsubscriber) should_not_email(@u_participating) should_not_email(@u_disabled) should_not_email(@u_lazy_participant) end it 'does not email new assignee if they are the current user' do issue.update_attribute(:assignee, @u_mentioned) notification.reassigned_issue(issue, @u_mentioned) expect(issue.assignee).to be @u_mentioned should_email(@u_watcher) should_email(@u_guest_watcher) should_email(@u_guest_custom) should_email(@u_participant_mentioned) should_email(@subscriber) should_email(@u_custom_global) should_not_email(issue.assignee) should_not_email(@unsubscriber) should_not_email(@u_participating) should_not_email(@u_disabled) should_not_email(@u_lazy_participant) end it_behaves_like 'participating notifications' do let(:participant) { create(:user, username: 'user-participant') } let(:issuable) { issue } let(:notification_trigger) { notification.reassigned_issue(issue, @u_disabled) } end end describe '#relabeled_issue' do let(:group_label_1) { create(:group_label, group: group, title: 'Group Label 1', issues: [issue]) } let(:group_label_2) { create(:group_label, group: group, title: 'Group Label 2') } let(:label_1) { create(:label, project: project, title: 'Label 1', issues: [issue]) } let(:label_2) { create(:label, project: project, title: 'Label 2') } let!(:subscriber_to_group_label_1) { create(:user) { |u| group_label_1.toggle_subscription(u, project) } } let!(:subscriber_1_to_group_label_2) { create(:user) { |u| group_label_2.toggle_subscription(u, project) } } let!(:subscriber_2_to_group_label_2) { create(:user) { |u| group_label_2.toggle_subscription(u) } } let!(:subscriber_to_group_label_2_on_another_project) { create(:user) { |u| group_label_2.toggle_subscription(u, another_project) } } let!(:subscriber_to_label_1) { create(:user) { |u| label_1.toggle_subscription(u, project) } } let!(:subscriber_to_label_2) { create(:user) { |u| label_2.toggle_subscription(u, project) } } it "emails subscribers of the issue's added labels only" do notification.relabeled_issue(issue, [group_label_2, label_2], @u_disabled) should_not_email(subscriber_to_label_1) should_not_email(subscriber_to_group_label_1) should_not_email(subscriber_to_group_label_2_on_another_project) should_email(subscriber_1_to_group_label_2) should_email(subscriber_2_to_group_label_2) should_email(subscriber_to_label_2) end it "doesn't send email to anyone but subscribers of the given labels" do notification.relabeled_issue(issue, [group_label_2, label_2], @u_disabled) should_not_email(issue.assignee) should_not_email(issue.author) should_not_email(@u_watcher) should_not_email(@u_guest_watcher) should_not_email(@u_participant_mentioned) should_not_email(@subscriber) should_not_email(@watcher_and_subscriber) should_not_email(@unsubscriber) should_not_email(@u_participating) should_not_email(subscriber_to_label_1) should_not_email(subscriber_to_group_label_1) should_not_email(subscriber_to_group_label_2_on_another_project) should_email(subscriber_1_to_group_label_2) should_email(subscriber_2_to_group_label_2) should_email(subscriber_to_label_2) end context 'confidential issues' do let(:author) { create(:user) } let(:assignee) { create(:user) } let(:non_member) { create(:user) } let(:member) { create(:user) } let(:guest) { create(:user) } let(:admin) { create(:admin) } let(:confidential_issue) { create(:issue, :confidential, project: project, title: 'Confidential issue', author: author, assignee: assignee) } let!(:label_1) { create(:label, project: project, issues: [confidential_issue]) } let!(:label_2) { create(:label, project: project) } it "emails subscribers of the issue's labels that can read the issue" do project.add_developer(member) project.add_guest(guest) label_2.toggle_subscription(non_member, project) label_2.toggle_subscription(author, project) label_2.toggle_subscription(assignee, project) label_2.toggle_subscription(member, project) label_2.toggle_subscription(guest, project) label_2.toggle_subscription(admin, project) reset_delivered_emails! notification.relabeled_issue(confidential_issue, [label_2], @u_disabled) should_not_email(non_member) should_not_email(guest) should_email(author) should_email(assignee) should_email(member) should_email(admin) end end end describe '#close_issue' do before do update_custom_notification(:close_issue, @u_guest_custom, project) update_custom_notification(:close_issue, @u_custom_global) end it 'sends email to issue assignee and issue author' do notification.close_issue(issue, @u_disabled) should_email(issue.assignee) should_email(issue.author) should_email(@u_watcher) should_email(@u_guest_watcher) should_email(@u_guest_custom) should_email(@u_custom_global) should_email(@u_participant_mentioned) should_email(@subscriber) should_email(@watcher_and_subscriber) should_not_email(@unsubscriber) should_not_email(@u_participating) should_not_email(@u_disabled) should_not_email(@u_lazy_participant) end it_behaves_like 'participating notifications' do let(:participant) { create(:user, username: 'user-participant') } let(:issuable) { issue } let(:notification_trigger) { notification.close_issue(issue, @u_disabled) } end end describe '#reopen_issue' do before do update_custom_notification(:reopen_issue, @u_guest_custom, project) update_custom_notification(:reopen_issue, @u_custom_global) end it 'sends email to issue notification recipients' do notification.reopen_issue(issue, @u_disabled) should_email(issue.assignee) should_email(issue.author) should_email(@u_watcher) should_email(@u_guest_watcher) should_email(@u_guest_custom) should_email(@u_custom_global) should_email(@u_participant_mentioned) should_email(@subscriber) should_email(@watcher_and_subscriber) should_not_email(@unsubscriber) should_not_email(@u_participating) should_not_email(@u_disabled) should_not_email(@u_lazy_participant) end it_behaves_like 'participating notifications' do let(:participant) { create(:user, username: 'user-participant') } let(:issuable) { issue } let(:notification_trigger) { notification.reopen_issue(issue, @u_disabled) } end end describe '#issue_moved' do let(:new_issue) { create(:issue) } it 'sends email to issue notification recipients' do notification.issue_moved(issue, new_issue, @u_disabled) should_email(issue.assignee) should_email(issue.author) should_email(@u_watcher) should_email(@u_guest_watcher) should_email(@u_participant_mentioned) should_email(@subscriber) should_email(@watcher_and_subscriber) should_not_email(@unsubscriber) should_not_email(@u_participating) should_not_email(@u_disabled) should_not_email(@u_lazy_participant) end it_behaves_like 'participating notifications' do let(:participant) { create(:user, username: 'user-participant') } let(:issuable) { issue } let(:notification_trigger) { notification.issue_moved(issue, new_issue, @u_disabled) } end end end describe 'Merge Requests' do let(:group) { create(:group) } let(:project) { create(:project, :public, namespace: group) } let(:another_project) { create(:empty_project, :public, namespace: group) } let(:merge_request) { create :merge_request, source_project: project, assignee: create(:user), description: 'cc @participant' } before do build_team(merge_request.target_project) add_users_with_subscription(merge_request.target_project, merge_request) update_custom_notification(:new_merge_request, @u_guest_custom, project) update_custom_notification(:new_merge_request, @u_custom_global) reset_delivered_emails! end describe '#new_merge_request' do before do update_custom_notification(:new_merge_request, @u_guest_custom, project) update_custom_notification(:new_merge_request, @u_custom_global) end it do notification.new_merge_request(merge_request, @u_disabled) should_email(merge_request.assignee) should_email(@u_watcher) should_email(@watcher_and_subscriber) should_email(@u_participant_mentioned) should_email(@u_guest_watcher) should_email(@u_guest_custom) should_email(@u_custom_global) should_not_email(@u_participating) should_not_email(@u_disabled) should_not_email(@u_lazy_participant) end it "emails subscribers of the merge request's labels" do user_1 = create(:user) user_2 = create(:user) user_3 = create(:user) user_4 = create(:user) label = create(:label, project: project, merge_requests: [merge_request]) group_label = create(:group_label, group: group, merge_requests: [merge_request]) label.toggle_subscription(user_1, project) group_label.toggle_subscription(user_2, project) group_label.toggle_subscription(user_3, another_project) group_label.toggle_subscription(user_4) notification.new_merge_request(merge_request, @u_disabled) should_email(user_1) should_email(user_2) should_not_email(user_3) should_email(user_4) end context 'participating' do it_should_behave_like 'participating by assignee notification' do let(:participant) { create(:user, username: 'user-participant')} let(:issuable) { merge_request } let(:notification_trigger) { notification.new_merge_request(merge_request, @u_disabled) } end it_should_behave_like 'participating by note notification' do let(:participant) { create(:user, username: 'user-participant')} let(:issuable) { merge_request } let(:notification_trigger) { notification.new_merge_request(merge_request, @u_disabled) } end context 'by author' do let(:participant) { create(:user, username: 'user-participant')} before do merge_request.author = participant merge_request.save notification.new_merge_request(merge_request, @u_disabled) end it { should_not_email(participant) } end end end describe '#new_mentions_in_merge_request' do let(:notification_method) { :new_mentions_in_merge_request } let(:mentionable) { merge_request } include_examples 'notifications for new mentions' end describe '#reassigned_merge_request' do before do update_custom_notification(:reassign_merge_request, @u_guest_custom, project) update_custom_notification(:reassign_merge_request, @u_custom_global) end it do notification.reassigned_merge_request(merge_request, merge_request.author) should_email(merge_request.assignee) should_email(@u_watcher) should_email(@u_participant_mentioned) should_email(@subscriber) should_email(@watcher_and_subscriber) should_email(@u_guest_watcher) should_email(@u_guest_custom) should_email(@u_custom_global) should_not_email(@unsubscriber) should_not_email(@u_participating) should_not_email(@u_disabled) should_not_email(@u_lazy_participant) end it_behaves_like 'participating notifications' do let(:participant) { create(:user, username: 'user-participant') } let(:issuable) { merge_request } let(:notification_trigger) { notification.reassigned_merge_request(merge_request, @u_disabled) } end end describe '#relabel_merge_request' do let(:group_label_1) { create(:group_label, group: group, title: 'Group Label 1', merge_requests: [merge_request]) } let(:group_label_2) { create(:group_label, group: group, title: 'Group Label 2') } let(:label_1) { create(:label, project: project, title: 'Label 1', merge_requests: [merge_request]) } let(:label_2) { create(:label, project: project, title: 'Label 2') } let!(:subscriber_to_group_label_1) { create(:user) { |u| group_label_1.toggle_subscription(u, project) } } let!(:subscriber_1_to_group_label_2) { create(:user) { |u| group_label_2.toggle_subscription(u, project) } } let!(:subscriber_2_to_group_label_2) { create(:user) { |u| group_label_2.toggle_subscription(u) } } let!(:subscriber_to_group_label_2_on_another_project) { create(:user) { |u| group_label_2.toggle_subscription(u, another_project) } } let!(:subscriber_to_label_1) { create(:user) { |u| label_1.toggle_subscription(u, project) } } let!(:subscriber_to_label_2) { create(:user) { |u| label_2.toggle_subscription(u, project) } } it "emails subscribers of the merge request's added labels only" do notification.relabeled_merge_request(merge_request, [group_label_2, label_2], @u_disabled) should_not_email(subscriber_to_label_1) should_not_email(subscriber_to_group_label_1) should_not_email(subscriber_to_group_label_2_on_another_project) should_email(subscriber_1_to_group_label_2) should_email(subscriber_2_to_group_label_2) should_email(subscriber_to_label_2) end it "doesn't send email to anyone but subscribers of the given labels" do notification.relabeled_merge_request(merge_request, [group_label_2, label_2], @u_disabled) should_not_email(merge_request.assignee) should_not_email(merge_request.author) should_not_email(@u_watcher) should_not_email(@u_participant_mentioned) should_not_email(@subscriber) should_not_email(@watcher_and_subscriber) should_not_email(@unsubscriber) should_not_email(@u_participating) should_not_email(@u_lazy_participant) should_not_email(subscriber_to_label_1) should_not_email(subscriber_to_group_label_1) should_not_email(subscriber_to_group_label_2_on_another_project) should_email(subscriber_1_to_group_label_2) should_email(subscriber_2_to_group_label_2) should_email(subscriber_to_label_2) end end describe '#closed_merge_request' do before do update_custom_notification(:close_merge_request, @u_guest_custom, project) update_custom_notification(:close_merge_request, @u_custom_global) end it do notification.close_mr(merge_request, @u_disabled) should_email(merge_request.assignee) should_email(@u_watcher) should_email(@u_guest_watcher) should_email(@u_guest_custom) should_email(@u_custom_global) should_email(@u_participant_mentioned) should_email(@subscriber) should_email(@watcher_and_subscriber) should_not_email(@unsubscriber) should_not_email(@u_participating) should_not_email(@u_disabled) should_not_email(@u_lazy_participant) end it_behaves_like 'participating notifications' do let(:participant) { create(:user, username: 'user-participant') } let(:issuable) { merge_request } let(:notification_trigger) { notification.close_mr(merge_request, @u_disabled) } end end describe '#merged_merge_request' do before do update_custom_notification(:merge_merge_request, @u_guest_custom, project) update_custom_notification(:merge_merge_request, @u_custom_global) end it do notification.merge_mr(merge_request, @u_disabled) should_email(merge_request.assignee) should_email(@u_watcher) should_email(@u_participant_mentioned) should_email(@subscriber) should_email(@watcher_and_subscriber) should_email(@u_guest_watcher) should_email(@u_custom_global) should_email(@u_guest_custom) should_not_email(@unsubscriber) should_not_email(@u_participating) should_not_email(@u_disabled) should_not_email(@u_lazy_participant) end it "notifies the merger when the pipeline succeeds is true" do merge_request.merge_when_pipeline_succeeds = true notification.merge_mr(merge_request, @u_watcher) should_email(@u_watcher) end it "does not notify the merger when the pipeline succeeds is false" do merge_request.merge_when_pipeline_succeeds = false notification.merge_mr(merge_request, @u_watcher) should_not_email(@u_watcher) end it_behaves_like 'participating notifications' do let(:participant) { create(:user, username: 'user-participant') } let(:issuable) { merge_request } let(:notification_trigger) { notification.merge_mr(merge_request, @u_disabled) } end end describe '#reopen_merge_request' do before do update_custom_notification(:reopen_merge_request, @u_guest_custom, project) update_custom_notification(:reopen_merge_request, @u_custom_global) end it do notification.reopen_mr(merge_request, @u_disabled) should_email(merge_request.assignee) should_email(@u_watcher) should_email(@u_participant_mentioned) should_email(@subscriber) should_email(@watcher_and_subscriber) should_email(@u_guest_watcher) should_email(@u_guest_custom) should_email(@u_custom_global) should_not_email(@unsubscriber) should_not_email(@u_participating) should_not_email(@u_disabled) should_not_email(@u_lazy_participant) end it_behaves_like 'participating notifications' do let(:participant) { create(:user, username: 'user-participant') } let(:issuable) { merge_request } let(:notification_trigger) { notification.reopen_mr(merge_request, @u_disabled) } end end describe "#resolve_all_discussions" do it do notification.resolve_all_discussions(merge_request, @u_disabled) should_email(merge_request.assignee) should_email(@u_watcher) should_email(@u_participant_mentioned) should_email(@subscriber) should_email(@watcher_and_subscriber) should_email(@u_guest_watcher) should_not_email(@unsubscriber) should_not_email(@u_participating) should_not_email(@u_disabled) should_not_email(@u_lazy_participant) end it_behaves_like 'participating notifications' do let(:participant) { create(:user, username: 'user-participant') } let(:issuable) { merge_request } let(:notification_trigger) { notification.resolve_all_discussions(merge_request, @u_disabled) } end end end describe 'Projects' do let(:project) { create :project } before do build_team(project) reset_delivered_emails! end describe '#project_was_moved' do it do notification.project_was_moved(project, "gitlab/gitlab") should_email(@u_watcher) should_email(@u_participating) should_email(@u_lazy_participant) should_email(@u_custom_global) should_not_email(@u_guest_watcher) should_not_email(@u_guest_custom) should_not_email(@u_disabled) end end end describe 'GroupMember' do describe '#decline_group_invite' do let(:creator) { create(:user) } let(:group) { create(:group) } let(:member) { create(:user) } before(:each) do group.add_owner(creator) group.add_developer(member, creator) end it do group_member = group.members.first expect do notification.decline_group_invite(group_member) end.to change { ActionMailer::Base.deliveries.size }.by(1) end end end describe 'ProjectMember' do describe '#decline_group_invite' do let(:project) { create(:project) } let(:member) { create(:user) } before(:each) do project.add_developer(member, current_user: project.owner) end it do project_member = project.members.first expect do notification.decline_project_invite(project_member) end.to change { ActionMailer::Base.deliveries.size }.by(1) end end end context 'guest user in private project' do let(:private_project) { create(:empty_project, :private) } let(:guest) { create(:user) } let(:developer) { create(:user) } let(:assignee) { create(:user) } let(:merge_request) { create(:merge_request, source_project: private_project, assignee: assignee) } let(:merge_request1) { create(:merge_request, source_project: private_project, assignee: assignee, description: "cc @#{guest.username}") } let(:note) { create(:note, noteable: merge_request, project: private_project) } before do private_project.add_developer(assignee) private_project.add_developer(developer) private_project.add_guest(guest) ActionMailer::Base.deliveries.clear end it 'filters out guests when new note is created' do expect(SentNotification).to receive(:record).with(merge_request, any_args).exactly(1).times notification.new_note(note) should_not_email(guest) should_email(assignee) end it 'filters out guests when new merge request is created' do notification.new_merge_request(merge_request1, @u_disabled) should_not_email(guest) should_email(assignee) end it 'filters out guests when merge request is closed' do notification.close_mr(merge_request, developer) should_not_email(guest) should_email(assignee) end it 'filters out guests when merge request is reopened' do notification.reopen_mr(merge_request, developer) should_not_email(guest) should_email(assignee) end it 'filters out guests when merge request is merged' do notification.merge_mr(merge_request, developer) should_not_email(guest) should_email(assignee) end end describe 'Pipelines' do describe '#pipeline_finished' do let(:project) { create(:project, :public) } let(:current_user) { create(:user) } let(:u_member) { create(:user) } let(:u_other) { create(:user) } let(:commit) { project.commit } let(:pipeline) do create(:ci_pipeline, :success, project: project, user: current_user, ref: 'refs/heads/master', sha: commit.id, before_sha: '00000000') end before do project.add_master(current_user) project.add_master(u_member) reset_delivered_emails! end context 'without custom recipients' do it 'notifies the pipeline user' do notification.pipeline_finished(pipeline) should_only_email(current_user, kind: :bcc) end end context 'with custom recipients' do it 'notifies the custom recipients' do users = [u_member, u_other] notification.pipeline_finished(pipeline, users.map(&:notification_email)) should_only_email(*users, kind: :bcc) end end end end def build_team(project) @u_watcher = create_global_setting_for(create(:user), :watch) @u_participating = create_global_setting_for(create(:user), :participating) @u_participant_mentioned = create_global_setting_for(create(:user, username: 'participant'), :participating) @u_disabled = create_global_setting_for(create(:user), :disabled) @u_mentioned = create_global_setting_for(create(:user, username: 'mention'), :mention) @u_committer = create(:user, username: 'committer') @u_not_mentioned = create_global_setting_for(create(:user, username: 'regular'), :participating) @u_outsider_mentioned = create(:user, username: 'outsider') @u_custom_global = create_global_setting_for(create(:user, username: 'custom_global'), :custom) # User to be participant by default # This user does not contain any record in notification settings table # It should be treated with a :participating notification_level @u_lazy_participant = create(:user, username: 'lazy-participant') @u_guest_watcher = create_user_with_notification(:watch, 'guest_watching') @u_guest_custom = create_user_with_notification(:custom, 'guest_custom') project.add_master(@u_watcher) project.add_master(@u_participating) project.add_master(@u_participant_mentioned) project.add_master(@u_disabled) project.add_master(@u_mentioned) project.add_master(@u_committer) project.add_master(@u_not_mentioned) project.add_master(@u_lazy_participant) project.add_master(@u_custom_global) end # Users in the project's group but not part of project's team # with different notification settings def build_group(project) group = create(:group, :public) project.group = group # Group member: global=disabled, group=watch @g_watcher = create_user_with_notification(:watch, 'group_watcher', project.group) @g_watcher.notification_settings_for(nil).disabled! # Group member: global=watch, group=global @g_global_watcher = create_global_setting_for(create(:user), :watch) group.add_users([@g_watcher, @g_global_watcher], :master) group end def create_global_setting_for(user, level) setting = user.global_notification_setting setting.level = level setting.save user end def create_user_with_notification(level, username, resource = project) user = create(:user, username: username) setting = user.notification_settings_for(resource) setting.level = level setting.save user end # Create custom notifications # When resource is nil it means global notification def update_custom_notification(event, user, resource = nil) setting = user.notification_settings_for(resource) setting.events[event] = true setting.save end def add_users_with_subscription(project, issuable) @subscriber = create :user @unsubscriber = create :user @subscribed_participant = create_global_setting_for(create(:user, username: 'subscribed_participant'), :participating) @watcher_and_subscriber = create_global_setting_for(create(:user), :watch) project.add_master(@subscribed_participant) project.add_master(@subscriber) project.add_master(@unsubscriber) project.add_master(@watcher_and_subscriber) issuable.subscriptions.create(user: @subscriber, project: project, subscribed: true) issuable.subscriptions.create(user: @subscribed_participant, project: project, subscribed: true) issuable.subscriptions.create(user: @unsubscriber, project: project, subscribed: false) # Make the watcher a subscriber to detect dupes issuable.subscriptions.create(user: @watcher_and_subscriber, project: project, subscribed: true) end end
36.810791
220
0.682462
1cb0fbe09a2eb177f6b35a1eb921f0dea4a05c3e
188
# encoding=UTF-8 require_relative '../writeFile.rb' ObjSample = CWriteFile.new ObjSample.start({"action" => "WriteFile","path" => "./document/text.txt","isCover"=>true,"text"=>"123"})
37.6
104
0.675532
91f64875f78d769cbb20dd19af17ad77e97d9534
1,893
require 'test_helper' class BlockUnitTest < Minitest::Test include Liquid def test_blankspace template = Liquid::Template.parse(" ") assert_equal [" "], template.root.nodelist end def test_variable_beginning template = Liquid::Template.parse("{{funk}} ") assert_equal 2, template.root.nodelist.size assert_equal Variable, template.root.nodelist[0].class assert_equal String, template.root.nodelist[1].class end def test_variable_end template = Liquid::Template.parse(" {{funk}}") assert_equal 2, template.root.nodelist.size assert_equal String, template.root.nodelist[0].class assert_equal Variable, template.root.nodelist[1].class end def test_variable_middle template = Liquid::Template.parse(" {{funk}} ") assert_equal 3, template.root.nodelist.size assert_equal String, template.root.nodelist[0].class assert_equal Variable, template.root.nodelist[1].class assert_equal String, template.root.nodelist[2].class end def test_variable_many_embedded_fragments template = Liquid::Template.parse(" {{funk}} {{so}} {{brother}} ") assert_equal 7, template.root.nodelist.size assert_equal [String, Variable, String, Variable, String, Variable, String], block_types(template.root.nodelist) end def test_with_block template = Liquid::Template.parse(" {% comment %} {% endcomment %} ") assert_equal [String, Comment, String], block_types(template.root.nodelist) assert_equal 3, template.root.nodelist.size end def test_with_custom_tag Liquid::Template.register_tag("testtag", Block) assert Liquid::Template.parse("{% testtag %} {% endtesttag %}") ensure Liquid::Template.tags.delete('testtag') end private def block_types(nodelist) nodelist.collect(&:class) end end # VariableTest
32.084746
81
0.698362
21157591cfd4fb660c5cc8a15f7e217247337741
350
class RolePlayMigrationGenerator < Rails::Generators::Base include Rails::Generators::Migration source_root File.expand_path("../templates", __FILE__) def self.next_migration_number(path) Time.now.utc.strftime("%Y%m%d%H%M%S") end def create_model_file migration_template "migration.rb", "db/migrate/create_roles.rb" end end
26.923077
67
0.745714
ed676b52b42a829b09ca7cc8381819f961e71d0b
1,332
# -*- encoding: utf-8 -*- # stub: em-websocket 0.5.1 ruby lib Gem::Specification.new do |s| s.name = "em-websocket".freeze s.version = "0.5.1" s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version= s.require_paths = ["lib".freeze] s.authors = ["Ilya Grigorik".freeze, "Martyn Loughran".freeze] s.date = "2014-04-23" s.description = "EventMachine based WebSocket server".freeze s.email = ["[email protected]".freeze, "[email protected]".freeze] s.homepage = "http://github.com/igrigorik/em-websocket".freeze s.rubygems_version = "3.0.6".freeze s.summary = "EventMachine based WebSocket server".freeze s.installed_by_version = "3.0.6" if s.respond_to? :installed_by_version if s.respond_to? :specification_version then s.specification_version = 4 if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then s.add_runtime_dependency(%q<eventmachine>.freeze, [">= 0.12.9"]) s.add_runtime_dependency(%q<http_parser.rb>.freeze, ["~> 0.6.0"]) else s.add_dependency(%q<eventmachine>.freeze, [">= 0.12.9"]) s.add_dependency(%q<http_parser.rb>.freeze, ["~> 0.6.0"]) end else s.add_dependency(%q<eventmachine>.freeze, [">= 0.12.9"]) s.add_dependency(%q<http_parser.rb>.freeze, ["~> 0.6.0"]) end end
38.057143
112
0.676426
0117d58683eb4deb1a5c25e876735f8e97756fc1
179
require File.expand_path('../../../spec_helper', __FILE__) require File.expand_path('../shared/equal', __FILE__) describe "Bignum#==" do it_behaves_like :bignum_equal, :== end
25.571429
58
0.715084
ed4a3e72ee7c99a153323a703cfd2b2c75cd230c
110
module Pulsar class Install include Interactor::Organizer organize CopyInitialRepository end end
13.75
34
0.772727
38040bbcb9323f1182bf7459972301181abfc39e
1,767
# frozen_string_literal: true require "rails_helper" RSpec.describe TraineeStatusCard::View do include Rails.application.routes.url_helpers alias_method :component, :page let(:current_user) { create(:user, system_admin: true) } let(:trainee) { create(:trainee) } let(:trainees) { Trainee.all } let(:target) { trainees_path("state[]": "draft") } describe "#state_name" do it "returns state name in correct format" do award_states = %w[qts_recommended qts_awarded eyts_recommended eyts_awarded] (Trainee.states.keys + award_states).each do |state| expect(described_class.new(state: state, target: target, count: 1).state_name) .to eql(I18n.t("activerecord.attributes.trainee.states.#{state}")) end end end describe "#status_colour" do it "returns the correct colour for given state" do described_class::STATUS_COLOURS.each_key do |state| expect(described_class.new(state: state, target: target, count: 1).status_colour) .to eql(described_class::STATUS_COLOURS[state]) end end end describe "rendered component" do before do trainee render_inline(described_class.new(state: "draft", target: target, count: 1)) end it "renders the correct css colour" do expect(component).to have_css(".app-status-card--grey") end it "renders the correct text" do expect(component).to have_text("Draft") end it "renders the trainee count for those in draft" do expect(component).to have_text("1") end it "renders the correct filter link" do expect(component).to have_link(href: "/trainees?state%5B%5D=draft") end end end
29.949153
89
0.659875
3859c875f9d347fd9debc0f4ca0b142761da81aa
113
class Admin < ApplicationRecord validates :name, presence: true has_many :logins, foreign_key: :user_id end
18.833333
41
0.769912
e973d3df37d77f4a5a5d6e20b53d0da3c4c515cd
368
require_relative '../options_helper' describe OctocatalogDiff::CatalogDiff::Cli::Options do describe '#opt_bootstrap_script' do it 'should set options[:bootstrap_script]' do result = run_optparse(['--bootstrap-script', 'my-bootstrap-script']) expect(result.fetch(:bootstrap_script, 'key-not-defined')).to eq('my-bootstrap-script') end end end
33.454545
93
0.728261
61cefd24b3f466796c92e53b1f72ea57df02ac01
7,705
# encoding: utf-8 # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. module Azure::CDN::Mgmt::V2016_10_02 # # Use these APIs to manage Azure CDN resources through the Azure Resource # Manager. You must make sure that requests made to these resources are # secure. # class EdgeNodes include MsRestAzure # # Creates and initializes a new instance of the EdgeNodes class. # @param client service class for accessing basic functionality. # def initialize(client) @client = client end # @return [CdnManagementClient] reference to the CdnManagementClient attr_reader :client # # Lists all the edge nodes of a CDN service. # # @param custom_headers [Hash{String => String}] A hash of custom headers that # will be added to the HTTP request. # # @return [Array<EdgeNode>] operation results. # def list(custom_headers:nil) first_page = list_as_lazy(custom_headers:custom_headers) first_page.get_all_items end # # Lists all the edge nodes of a CDN service. # # @param custom_headers [Hash{String => String}] A hash of custom headers that # will be added to the HTTP request. # # @return [MsRestAzure::AzureOperationResponse] HTTP response information. # def list_with_http_info(custom_headers:nil) list_async(custom_headers:custom_headers).value! end # # Lists all the edge nodes of a CDN service. # # @param [Hash{String => String}] A hash of custom headers that will be added # to the HTTP request. # # @return [Concurrent::Promise] Promise object which holds the HTTP response. # def list_async(custom_headers:nil) fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil? request_headers = {} request_headers['Content-Type'] = 'application/json; charset=utf-8' # Set Headers request_headers['x-ms-client-request-id'] = SecureRandom.uuid request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil? path_template = 'providers/Microsoft.Cdn/edgenodes' request_url = @base_url || @client.base_url options = { middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]], query_params: {'api-version' => @client.api_version}, headers: request_headers.merge(custom_headers || {}), base_url: request_url } promise = @client.make_request_async(:get, path_template, options) promise = promise.then do |result| http_response = result.response status_code = http_response.status response_content = http_response.body unless status_code == 200 error_model = JSON.load(response_content) fail MsRest::HttpOperationError.new(result.request, http_response, error_model) end result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil? # Deserialize Response if status_code == 200 begin parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content) result_mapper = Azure::CDN::Mgmt::V2016_10_02::Models::EdgenodeResult.mapper() result.body = @client.deserialize(result_mapper, parsed_response) rescue Exception => e fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result) end end result end promise.execute end # # Lists all the edge nodes of a CDN service. # # @param next_page_link [String] The NextLink from the previous successful call # to List operation. # @param custom_headers [Hash{String => String}] A hash of custom headers that # will be added to the HTTP request. # # @return [EdgenodeResult] operation results. # def list_next(next_page_link, custom_headers:nil) response = list_next_async(next_page_link, custom_headers:custom_headers).value! response.body unless response.nil? end # # Lists all the edge nodes of a CDN service. # # @param next_page_link [String] The NextLink from the previous successful call # to List operation. # @param custom_headers [Hash{String => String}] A hash of custom headers that # will be added to the HTTP request. # # @return [MsRestAzure::AzureOperationResponse] HTTP response information. # def list_next_with_http_info(next_page_link, custom_headers:nil) list_next_async(next_page_link, custom_headers:custom_headers).value! end # # Lists all the edge nodes of a CDN service. # # @param next_page_link [String] The NextLink from the previous successful call # to List operation. # @param [Hash{String => String}] A hash of custom headers that will be added # to the HTTP request. # # @return [Concurrent::Promise] Promise object which holds the HTTP response. # def list_next_async(next_page_link, custom_headers:nil) fail ArgumentError, 'next_page_link is nil' if next_page_link.nil? request_headers = {} request_headers['Content-Type'] = 'application/json; charset=utf-8' # Set Headers request_headers['x-ms-client-request-id'] = SecureRandom.uuid request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil? path_template = '{nextLink}' request_url = @base_url || @client.base_url options = { middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]], skip_encoding_path_params: {'nextLink' => next_page_link}, headers: request_headers.merge(custom_headers || {}), base_url: request_url } promise = @client.make_request_async(:get, path_template, options) promise = promise.then do |result| http_response = result.response status_code = http_response.status response_content = http_response.body unless status_code == 200 error_model = JSON.load(response_content) fail MsRest::HttpOperationError.new(result.request, http_response, error_model) end result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil? # Deserialize Response if status_code == 200 begin parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content) result_mapper = Azure::CDN::Mgmt::V2016_10_02::Models::EdgenodeResult.mapper() result.body = @client.deserialize(result_mapper, parsed_response) rescue Exception => e fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result) end end result end promise.execute end # # Lists all the edge nodes of a CDN service. # # @param custom_headers [Hash{String => String}] A hash of custom headers that # will be added to the HTTP request. # # @return [EdgenodeResult] which provide lazy access to pages of the response. # def list_as_lazy(custom_headers:nil) response = list_async(custom_headers:custom_headers).value! unless response.nil? page = response.body page.next_method = Proc.new do |next_page_link| list_next_async(next_page_link, custom_headers:custom_headers) end page end end end end
35.506912
129
0.673459
033f029d27bcbd0180b874ce1307a4c9a7fd0f15
1,655
# # Be sure to run `pod lib lint OMTFoundation.podspec' to ensure this is a # valid spec before submitting. # # Any lines starting with a # are optional, but their use is encouraged # To learn more about a Podspec see https://guides.cocoapods.org/syntax/podspec.html # Pod::Spec.new do |s| s.name = 'OMTFoundation' s.version = '0.0.1' s.summary = 'A short description of OMTFoundation.' # This description is used to generate tags and improve search results. # * Think: What does it do? Why did you write it? What is the focus? # * Try to keep it short, snappy and to the point. # * Write the description between the DESC delimiters below. # * Finally, don't worry about the indent, CocoaPods strips it! s.description = <<-DESC TODO: Add long description of the pod here. DESC s.homepage = 'https://github.com/Xiaoye220/OMTFoundation' # s.screenshots = 'www.example.com/screenshots_1', 'www.example.com/screenshots_2' s.license = { :type => 'MIT', :file => 'LICENSE' } s.author = { 'Xiaoye220' => '[email protected]' } s.source = { :git => 'https://github.com/Xiaoye220/OMTFoundation.git', :tag => s.version.to_s } # s.social_media_url = 'https://twitter.com/<TWITTER_USERNAME>' s.ios.deployment_target = '7.0' s.ios.vendored_frameworks = 'OMTFoundation-' + s.version + '/ios/OMTFoundation.framework' # s.resource_bundles = { # 'OMTFoundation' => ['OMTFoundation/Assets/*.png'] # } # s.public_header_files = 'Pod/Classes/**/*.h' # s.frameworks = 'UIKit', 'MapKit' # s.dependency 'AFNetworking' end
35.978261
107
0.648943
e2bab9c1999d2dd743280b753ad7dc99961b6f3f
12,366
# frozen_string_literal: true RSpec.describe "real source plugins" do context "with a minimal source plugin" do before do build_repo2 do build_plugin "bundler-source-mpath" do |s| s.write "plugins.rb", <<-RUBY require "bundler-source-mpath" class MPath < Bundler::Plugin::API source "mpath" attr_reader :path def initialize(opts) super @path = Pathname.new options["uri"] end def fetch_gemspec_files @spec_files ||= begin glob = "{,*,*/*}.gemspec" if installed? search_path = install_path else search_path = path end Dir["\#{search_path.to_s}/\#{glob}"] end end def install(spec, opts) mkdir_p(install_path.parent) require 'fileutils' FileUtils.cp_r(path, install_path) spec_path = install_path.join("\#{spec.full_name}.gemspec") spec_path.open("wb") {|f| f.write spec.to_ruby } spec.loaded_from = spec_path.to_s post_install(spec) nil end end RUBY end # build_plugin end build_lib "a-path-gem" gemfile <<-G source "#{file_uri_for(gem_repo2)}" # plugin source source "#{lib_path("a-path-gem-1.0")}", :type => :mpath do gem "a-path-gem" end G end it "installs" do bundle "install" expect(out).to include("Bundle complete!") expect(the_bundle).to include_gems("a-path-gem 1.0") end it "writes to lock file" do bundle "install" lockfile_should_be <<-G PLUGIN SOURCE remote: #{lib_path("a-path-gem-1.0")} type: mpath specs: a-path-gem (1.0) GEM remote: #{file_uri_for(gem_repo2)}/ specs: PLATFORMS #{lockfile_platforms} DEPENDENCIES a-path-gem! BUNDLED WITH #{Bundler::VERSION} G end it "provides correct #full_gem_path" do bundle "install" run <<-RUBY puts Bundler.rubygems.find_name('a-path-gem').first.full_gem_path RUBY expect(out).to eq(bundle("info a-path-gem --path")) end it "installs the gem executables" do build_lib "gem_with_bin" do |s| s.executables = ["foo"] end install_gemfile <<-G source "#{file_uri_for(gem_repo2)}" # plugin source source "#{lib_path("gem_with_bin-1.0")}", :type => :mpath do gem "gem_with_bin" end G bundle "exec foo" expect(out).to eq("1.0") end describe "bundle cache/package" do let(:uri_hash) { Digest(:SHA1).hexdigest(lib_path("a-path-gem-1.0").to_s) } it "copies repository to vendor cache and uses it" do bundle "install" bundle "config set cache_all true" bundle :cache expect(bundled_app("vendor/cache/a-path-gem-1.0-#{uri_hash}")).to exist expect(bundled_app("vendor/cache/a-path-gem-1.0-#{uri_hash}/.git")).not_to exist expect(bundled_app("vendor/cache/a-path-gem-1.0-#{uri_hash}/.bundlecache")).to be_file FileUtils.rm_rf lib_path("a-path-gem-1.0") expect(the_bundle).to include_gems("a-path-gem 1.0") end it "copies repository to vendor cache and uses it even when installed with `path` configured" do bundle "config set --local path vendor/bundle" bundle :install bundle "config set cache_all true" bundle :cache expect(bundled_app("vendor/cache/a-path-gem-1.0-#{uri_hash}")).to exist FileUtils.rm_rf lib_path("a-path-gem-1.0") expect(the_bundle).to include_gems("a-path-gem 1.0") end it "bundler package copies repository to vendor cache" do bundle "config set --local path vendor/bundle" bundle :install bundle "config set cache_all true" bundle :cache expect(bundled_app("vendor/cache/a-path-gem-1.0-#{uri_hash}")).to exist FileUtils.rm_rf lib_path("a-path-gem-1.0") expect(the_bundle).to include_gems("a-path-gem 1.0") end end context "with lockfile" do before do lockfile <<-G PLUGIN SOURCE remote: #{lib_path("a-path-gem-1.0")} type: mpath specs: a-path-gem (1.0) GEM remote: #{file_uri_for(gem_repo2)}/ specs: PLATFORMS #{generic_local_platform} DEPENDENCIES a-path-gem! BUNDLED WITH #{Bundler::VERSION} G end it "installs" do bundle "install" expect(the_bundle).to include_gems("a-path-gem 1.0") end end end context "with a more elaborate source plugin" do before do build_repo2 do build_plugin "bundler-source-gitp" do |s| s.write "plugins.rb", <<-RUBY require "open3" class SPlugin < Bundler::Plugin::API source "gitp" attr_reader :ref def initialize(opts) super @ref = options["ref"] || options["branch"] || options["tag"] || "master" @unlocked = false end def eql?(other) other.is_a?(self.class) && uri == other.uri && ref == other.ref end alias_method :==, :eql? def fetch_gemspec_files @spec_files ||= begin glob = "{,*,*/*}.gemspec" if !cached? cache_repo end if installed? && !@unlocked path = install_path else path = cache_path end Dir["\#{path}/\#{glob}"] end end def install(spec, opts) mkdir_p(install_path.dirname) rm_rf(install_path) `git clone --no-checkout --quiet "\#{cache_path}" "\#{install_path}"` Open3.capture2e("git reset --hard \#{revision}", :chdir => install_path) spec_path = install_path.join("\#{spec.full_name}.gemspec") spec_path.open("wb") {|f| f.write spec.to_ruby } spec.loaded_from = spec_path.to_s post_install(spec) nil end def options_to_lock opts = {"revision" => revision} opts["ref"] = ref if ref != "master" opts end def unlock! @unlocked = true @revision = latest_revision end def app_cache_dirname "\#{base_name}-\#{shortref_for_path(revision)}" end private def cache_path @cache_path ||= cache_dir.join("gitp", base_name) end def cache_repo `git clone --quiet \#{@options["uri"]} \#{cache_path}` end def cached? File.directory?(cache_path) end def locked_revision options["revision"] end def revision @revision ||= locked_revision || latest_revision end def latest_revision if !cached? || @unlocked rm_rf(cache_path) cache_repo end output, _status = Open3.capture2e("git rev-parse --verify \#{@ref}", :chdir => cache_path) output.strip end def base_name File.basename(uri.sub(%r{^(\w+://)?([^/:]+:)?(//\w*/)?(\w*/)*}, ""), ".git") end def shortref_for_path(ref) ref[0..11] end def install_path @install_path ||= begin git_scope = "\#{base_name}-\#{shortref_for_path(revision)}" path = gem_install_dir.join(git_scope) if !path.exist? && requires_sudo? user_bundle_path.join(ruby_scope).join(git_scope) else path end end end def installed? File.directory?(install_path) end end RUBY end end build_git "ma-gitp-gem" gemfile <<-G source "#{file_uri_for(gem_repo2)}" # plugin source source "#{file_uri_for(lib_path("ma-gitp-gem-1.0"))}", :type => :gitp do gem "ma-gitp-gem" end G end it "handles the source option" do bundle "install" expect(out).to include("Bundle complete!") expect(the_bundle).to include_gems("ma-gitp-gem 1.0") end it "writes to lock file" do revision = revision_for(lib_path("ma-gitp-gem-1.0")) bundle "install" lockfile_should_be <<-G PLUGIN SOURCE remote: #{file_uri_for(lib_path("ma-gitp-gem-1.0"))} type: gitp revision: #{revision} specs: ma-gitp-gem (1.0) GEM remote: #{file_uri_for(gem_repo2)}/ specs: PLATFORMS #{lockfile_platforms} DEPENDENCIES ma-gitp-gem! BUNDLED WITH #{Bundler::VERSION} G end context "with lockfile" do before do revision = revision_for(lib_path("ma-gitp-gem-1.0")) lockfile <<-G PLUGIN SOURCE remote: #{file_uri_for(lib_path("ma-gitp-gem-1.0"))} type: gitp revision: #{revision} specs: ma-gitp-gem (1.0) GEM remote: #{file_uri_for(gem_repo2)}/ specs: PLATFORMS #{generic_local_platform} DEPENDENCIES ma-gitp-gem! BUNDLED WITH #{Bundler::VERSION} G end it "installs" do bundle "install" expect(the_bundle).to include_gems("ma-gitp-gem 1.0") end it "uses the locked ref" do update_git "ma-gitp-gem" bundle "install" run <<-RUBY require 'ma/gitp/gem' puts "WIN" unless defined?(MAGITPGEM_PREV_REF) RUBY expect(out).to eq("WIN") end it "updates the deps on bundler update" do update_git "ma-gitp-gem" bundle "update ma-gitp-gem" run <<-RUBY require 'ma/gitp/gem' puts "WIN" if defined?(MAGITPGEM_PREV_REF) RUBY expect(out).to eq("WIN") end it "updates the deps on change in gemfile" do update_git "ma-gitp-gem", "1.1", :path => lib_path("ma-gitp-gem-1.0"), :gemspec => true gemfile <<-G source "#{file_uri_for(gem_repo2)}" # plugin source source "#{file_uri_for(lib_path("ma-gitp-gem-1.0"))}", :type => :gitp do gem "ma-gitp-gem", "1.1" end G bundle "install" expect(the_bundle).to include_gems("ma-gitp-gem 1.1") end end describe "bundle cache with gitp" do it "copies repository to vendor cache and uses it" do git = build_git "foo" ref = git.ref_for("master", 11) install_gemfile <<-G source "#{file_uri_for(gem_repo2)}" # plugin source source '#{lib_path("foo-1.0")}', :type => :gitp do gem "foo" end G bundle "config set cache_all true" bundle :cache expect(bundled_app("vendor/cache/foo-1.0-#{ref}")).to exist expect(bundled_app("vendor/cache/foo-1.0-#{ref}/.git")).not_to exist expect(bundled_app("vendor/cache/foo-1.0-#{ref}/.bundlecache")).to be_file FileUtils.rm_rf lib_path("foo-1.0") expect(the_bundle).to include_gems "foo 1.0" end end end end
26.941176
106
0.505337
e8d7a4266ec001a50737aaf96c04a61d06e6d518
1,657
# # Be sure to run `pod lib lint DBDatePickerView.podspec' to ensure this is a # valid spec before submitting. # # Any lines starting with a # are optional, but their use is encouraged # To learn more about a Podspec see https://guides.cocoapods.org/syntax/podspec.html # Pod::Spec.new do |s| s.name = 'DBDatePickerView' s.version = '1.0.0' s.summary = 'A short description of DBDatePickerView.' # This description is used to generate tags and improve search results. # * Think: What does it do? Why did you write it? What is the focus? # * Try to keep it short, snappy and to the point. # * Write the description between the DESC delimiters below. # * Finally, don't worry about the indent, CocoaPods strips it! s.description = <<-DESC TODO: Add long description of the pod here. DESC s.homepage = 'https://github.com/TylerDB/DBDatePickerView' # s.screenshots = 'www.example.com/screenshots_1', 'www.example.com/screenshots_2' s.license = { :type => 'MIT', :file => 'LICENSE' } s.author = { 'db' => '[email protected]' } s.source = { :git => '[email protected]:TylerDB/DBDatePickerView.git', :tag => s.version.to_s } # s.social_media_url = 'https://twitter.com/<TWITTER_USERNAME>' s.ios.deployment_target = '9.0' s.source_files = 'DBDatePickerView/Classes/**/*' # s.resource_bundles = { # 'DBDatePickerView' => ['DBDatePickerView/Assets/*.png'] # } s.public_header_files = 'DBDatePickerView/Classes/*.h' # s.frameworks = 'UIKit', 'MapKit' s.dependency 'Masonry' s.dependency 'DBCategoryKit' end
36.822222
104
0.655401
6a8f13c08b93139039e613c7d9a6fda92cac64f6
8,018
class Locust < Formula include Language::Python::Virtualenv desc "Scalable user load testing tool written in Python" homepage "https://locust.io/" url "https://files.pythonhosted.org/packages/6e/ee/8c91551fc6f216f3106786191c8205e6bc05186dd690ffdf15bae06e6f66/locust-2.7.0.tar.gz" sha256 "0dc4a801afab0197e8dcff58f56c372cc31c04c4bb51f4693ca837414aabd144" license "MIT" bottle do sha256 cellar: :any_skip_relocation, arm64_monterey: "4f2462b28fb2812081ead1b90d1d679d2a8b188a39b4193ec011964e03bec4d2" sha256 cellar: :any_skip_relocation, arm64_big_sur: "c3b8375e2a5f07941ec9f9d0221d1b60f1de9548c6308e9fd27fe6e64b779b0b" sha256 cellar: :any_skip_relocation, monterey: "7a33d0a53fec00a7cf35cc0ad0015f915fa5d7c29c0abad4ca8348b6a6bd751f" sha256 cellar: :any_skip_relocation, big_sur: "5bb0c84696bfd522b824330b27f2f3b1aa48d7ca293c260a383f1afa09fe911f" sha256 cellar: :any_skip_relocation, catalina: "2ae939093f040359bed3b76c2514cd1df9e3fbfe37fe06f3c0ecdd1b17c7f9c1" sha256 cellar: :any_skip_relocation, x86_64_linux: "3f3770f5f7332aa6dfe271ce04b94979ff312beb45c5b4b9e9a3c1a1cfe22b1a" end depends_on "[email protected]" depends_on "six" resource "Brotli" do url "https://files.pythonhosted.org/packages/2a/18/70c32fe9357f3eea18598b23aa9ed29b1711c3001835f7cf99a9818985d0/Brotli-1.0.9.zip" sha256 "4d1b810aa0ed773f81dceda2cc7b403d01057458730e309856356d4ef4188438" end resource "certifi" do url "https://files.pythonhosted.org/packages/6c/ae/d26450834f0acc9e3d1f74508da6df1551ceab6c2ce0766a593362d6d57f/certifi-2021.10.8.tar.gz" sha256 "78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872" end resource "charset-normalizer" do url "https://files.pythonhosted.org/packages/48/44/76b179e0d1afe6e6a91fd5661c284f60238987f3b42b676d141d01cd5b97/charset-normalizer-2.0.10.tar.gz" sha256 "876d180e9d7432c5d1dfd4c5d26b72f099d503e8fcc0feb7532c9289be60fcbd" end resource "click" do url "https://files.pythonhosted.org/packages/f4/09/ad003f1e3428017d1c3da4ccc9547591703ffea548626f47ec74509c5824/click-8.0.3.tar.gz" sha256 "410e932b050f5eed773c4cda94de75971c89cdb3155a72a0831139a79e5ecb5b" end resource "ConfigArgParse" do url "https://files.pythonhosted.org/packages/16/05/385451bc8d20a3aa1d8934b32bd65847c100849ebba397dbf6c74566b237/ConfigArgParse-1.5.3.tar.gz" sha256 "1b0b3cbf664ab59dada57123c81eff3d9737e0d11d8cf79e3d6eb10823f1739f" end resource "Flask" do url "https://files.pythonhosted.org/packages/95/40/b976286b5e7ba01794a7e7588e7e7fa27fb16c6168fa849234840bf0f61d/Flask-2.0.2.tar.gz" sha256 "7b2fb8e934ddd50731893bdcdb00fc8c0315916f9fcd50d22c7cc1a95ab634e2" end resource "Flask-BasicAuth" do url "https://files.pythonhosted.org/packages/16/18/9726cac3c7cb9e5a1ac4523b3e508128136b37aadb3462c857a19318900e/Flask-BasicAuth-0.2.0.tar.gz" sha256 "df5ebd489dc0914c224419da059d991eb72988a01cdd4b956d52932ce7d501ff" end resource "Flask-Cors" do url "https://files.pythonhosted.org/packages/cf/25/e3b2553d22ed542be807739556c69621ad2ab276ae8d5d2560f4ed20f652/Flask-Cors-3.0.10.tar.gz" sha256 "b60839393f3b84a0f3746f6cdca56c1ad7426aa738b70d6c61375857823181de" end resource "gevent" do url "https://files.pythonhosted.org/packages/c8/18/631398e45c109987f2d8e57f3adda161cc5ff2bd8738ca830c3a2dd41a85/gevent-21.12.0.tar.gz" sha256 "f48b64578c367b91fa793bf8eaaaf4995cb93c8bc45860e473bf868070ad094e" end resource "geventhttpclient" do url "https://files.pythonhosted.org/packages/7e/52/f799b56882eb2730c09c281bcc7f71607963853302ce27f3c565aa736bc8/geventhttpclient-1.5.3.tar.gz" sha256 "d80ec9ff42b7219f33558185499d0b4365597fc55ff886207b45f5632e099780" end resource "greenlet" do url "https://files.pythonhosted.org/packages/0c/10/754e21b5bea89d0e73f99d60c83754df7cc64db74f47d98ab187669ce341/greenlet-1.1.2.tar.gz" sha256 "e30f5ea4ae2346e62cedde8794a56858a67b878dd79f7df76a0767e356b1744a" end resource "idna" do url "https://files.pythonhosted.org/packages/62/08/e3fc7c8161090f742f504f40b1bccbfc544d4a4e09eb774bf40aafce5436/idna-3.3.tar.gz" sha256 "9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d" end resource "itsdangerous" do url "https://files.pythonhosted.org/packages/58/66/d6c5859dcac92b442626427a8c7a42322068c5cd5d4a463ce78b93f730b7/itsdangerous-2.0.1.tar.gz" sha256 "9e724d68fc22902a1435351f84c3fb8623f303fffcc566a4cb952df8c572cff0" end resource "Jinja2" do url "https://files.pythonhosted.org/packages/91/a5/429efc6246119e1e3fbf562c00187d04e83e54619249eb732bb423efa6c6/Jinja2-3.0.3.tar.gz" sha256 "611bb273cd68f3b993fabdc4064fc858c5b47a973cb5aa7999ec1ba405c87cd7" end resource "MarkupSafe" do url "https://files.pythonhosted.org/packages/bf/10/ff66fea6d1788c458663a84d88787bae15d45daa16f6b3ef33322a51fc7e/MarkupSafe-2.0.1.tar.gz" sha256 "594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a" end resource "msgpack" do url "https://files.pythonhosted.org/packages/61/3c/2206f39880d38ca7ad8ac1b28d2d5ca81632d163b2d68ef90e46409ca057/msgpack-1.0.3.tar.gz" sha256 "51fdc7fb93615286428ee7758cecc2f374d5ff363bdd884c7ea622a7a327a81e" end resource "psutil" do url "https://files.pythonhosted.org/packages/47/b6/ea8a7728f096a597f0032564e8013b705aa992a0990becd773dcc4d7b4a7/psutil-5.9.0.tar.gz" sha256 "869842dbd66bb80c3217158e629d6fceaecc3a3166d3d1faee515b05dd26ca25" end resource "pyzmq" do url "https://files.pythonhosted.org/packages/6c/95/d37e7db364d7f569e71068882b1848800f221c58026670e93a4c6d50efe7/pyzmq-22.3.0.tar.gz" sha256 "8eddc033e716f8c91c6a2112f0a8ebc5e00532b4a6ae1eb0ccc48e027f9c671c" end resource "requests" do url "https://files.pythonhosted.org/packages/60/f3/26ff3767f099b73e0efa138a9998da67890793bfa475d8278f84a30fec77/requests-2.27.1.tar.gz" sha256 "68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61" end resource "roundrobin" do url "https://files.pythonhosted.org/packages/3e/5d/60ce8f2ad7b8c8f7124a78eead5ecfc7f702ba80d8ad1e93b25337419a75/roundrobin-0.0.2.tar.gz" sha256 "ac30cb78570a36bb0ce0db7b907af9394ec7a5610ece2ede072280e8dd867caa" end resource "typing-extensions" do url "https://files.pythonhosted.org/packages/0d/4a/60ba3706797b878016f16edc5fbaf1e222109e38d0fa4d7d9312cb53f8dd/typing_extensions-4.0.1.tar.gz" sha256 "4ca091dea149f945ec56afb48dae714f21e8692ef22a395223bcd328961b6a0e" end resource "urllib3" do url "https://files.pythonhosted.org/packages/b0/b1/7bbf5181f8e3258efae31702f5eab87d8a74a72a0aa78bc8c08c1466e243/urllib3-1.26.8.tar.gz" sha256 "0e7c33d9a63e7ddfcb86780aac87befc2fbddf46c58dbb487e0855f7ceec283c" end resource "Werkzeug" do url "https://files.pythonhosted.org/packages/83/3c/ecdb36f49ab06defb0d5a466cfeb4ae90a55d02cfef379f781da2801a45d/Werkzeug-2.0.2.tar.gz" sha256 "aa2bb6fc8dee8d6c504c0ac1e7f5f7dc5810a9903e793b6f715a9f015bdadb9a" end resource "zope.event" do url "https://files.pythonhosted.org/packages/30/00/94ed30bfec18edbabfcbd503fcf7482c5031b0fbbc9bc361f046cb79781c/zope.event-4.5.0.tar.gz" sha256 "5e76517f5b9b119acf37ca8819781db6c16ea433f7e2062c4afc2b6fbedb1330" end resource "zope.interface" do url "https://files.pythonhosted.org/packages/ae/58/e0877f58daa69126a5fb325d6df92b20b77431cd281e189c5ec42b722f58/zope.interface-5.4.0.tar.gz" sha256 "5dba5f530fec3f0988d83b78cc591b58c0b6eb8431a85edd1569a0539a8a5a0e" end def install virtualenv_install_with_resources end test do (testpath/"locustfile.py").write <<~EOS from locust import HttpUser, task class HelloWorldUser(HttpUser): @task def hello_world(self): self.client.get("/headers") self.client.get("/ip") EOS ENV["LOCUST_LOCUSTFILE"] = testpath/"locustfile.py" ENV["LOCUST_HOST"] = "http://httpbin.org" ENV["LOCUST_USERS"] = "2" system "locust", "--headless", "--run-time", "30s" end end
47.443787
149
0.815665
186cda333dc5a335451e9fd0138134f2f3290570
247
class AddTestGroupNumberToCachedRessiEvents < ActiveRecord::Migration[5.2] def self.up add_column :cached_ressi_events, :test_group_number, :integer end def self.down remove_column :cached_ressi_events, :test_group_number end end
24.7
74
0.793522
38194124eac677b2e44b21e68b3dd11ca088c300
624
module Imdb # Represents a TV series on IMDB.com class Serie < Base def season(number) seasons[number - 1] end def seasons season_urls.map { |url| Imdb::Season.new(url) } end def creators document.search("div[text()*='Creator']//a").map { |a| a.content.strip } end private def newest_season document.at("section div[text()*='Season'] a[@href*='episodes?season']").content.strip.to_i rescue 0 end def season_urls (1..newest_season).map do |num| Imdb::Base.url_for(@id, "episodes?season=#{num}") end end end # Serie end # Imdb
21.517241
106
0.607372
7ac2ec3fb2ae8ea8b438b9ae130d39f5a2bf953d
1,348
# frozen_string_literal: true module Fields class DateRangeField < Field # serialize :validations, Validations::DateRangeField # serialize :options, Options::DateRangeField def interpret_to(model, overrides: {}) check_model_validity!(model) accessibility = overrides.fetch(:accessibility, self.accessibility) return model if accessibility == :hidden # nested_model = Class.new(::Fields::DateRangeField::DateRange) # model.nested_models[name] = nested_model # model.embeds_one name, anonymous_class: nested_model, validate: true # model.accepts_nested_attributes_for name, reject_if: :all_blank interpret_validations_to model, accessibility, overrides interpret_extra_to model, accessibility, overrides model end # class DateRange < VirtualModel # attribute :start, :datetime # attribute :finish, :datetime # validates :start, :finish, # presence: true # validates :finish, # timeliness: { # after: :start, # type: :date # }, # allow_blank: false # def start=(val) # super(val.try(:in_time_zone)&.utc) # end # def finish=(val) # super(val.try(:in_time_zone)&.utc) # end # end end end
26.431373
76
0.622404
ab5fe0a812e4dd3b51f00912747954a30bb150c4
954
Pod::Spec.new do |s| s.name = "ADProgressView" s.version = "1.2.0" s.summary = "A UIProgressView subclass allowing start/pause/continue/reset." s.description = "I created this subclass after discovering that UIProgressView does not allow you to pause/continue its progression. Thanks to ADProgressView, you can now start/pause/continue/reset your progress view. A Timer instance is used to control the progression. You can also customize the time interval between each update, and of course the total duration." s.homepage = "https://github.com/adauguet/ADProgressView.git" s.license = "MIT" s.author = { "Antoine DAUGUET" => "[email protected]" } s.platform = :ios, "9.0" s.source = { :git => "https://github.com/adauguet/ADProgressView.git", :tag => s.version } s.source_files = "ADProgressView", "ADProgressView/**/*.{h,m,swift}" s.exclude_files = "Example" end
43.363636
370
0.685535
336515cd3638f9d11958426792a7630ba0d4327d
189
class DropColumnnsFromBillingInvoices < ActiveRecord::Migration def change remove_column :bill_invoices, :invoiceable_id remove_column :bill_invoices, :invoiceable_type end end
27
63
0.814815
91609de6f1bff033146a91033c6a78284d907894
613
module RailsSqlViews module Loader SUPPORTED_ADAPTERS = %w( Mysql PostgreSQL SQLServer SQLite ) def self.load_extensions SUPPORTED_ADAPTERS.each do |db| if ActiveRecord::ConnectionAdapters.const_defined?("#{db}Adapter") require "rails_sql_views/connection_adapters/#{db.downcase}_adapter" ActiveRecord::ConnectionAdapters.const_get("#{db}Adapter").class_eval do include RailsSqlViews::ConnectionAdapters::AbstractAdapter include RailsSqlViews::ConnectionAdapters.const_get("#{db}Adapter") end end end end end end
32.263158
82
0.698206
1ad400605d70f7cbfaa2107dad0bfd97f368e072
2,547
class Consul < Formula desc "Tool for service discovery, monitoring and configuration" homepage "https://www.consul.io" url "https://github.com/hashicorp/consul.git", :tag => "v0.9.3", :revision => "112c0603d3d6fb23ab5f15e8fdb1a761da8eaf9a" head "https://github.com/hashicorp/consul.git", :shallow => false bottle do cellar :any_skip_relocation sha256 "50cf2c2e86fcc50e250bfb2bc10777525ba9a7e9f234fcb53998599eba5f48d6" => :high_sierra sha256 "116c2a66ce62414ce9829c1e8ec93db61c8eb071dfb5832470f542b727a72b77" => :sierra sha256 "8598f078a558ecc6d2f23804280b8f8d692b40f340a387730497e2e5fc6877b0" => :el_capitan sha256 "8a3aa00a1e75e2524af83d1d2ff2cc71c5fc7e2ce0f0785ce1065acf85d4080b" => :yosemite end depends_on "go" => :build def install contents = Dir["{*,.git,.gitignore}"] gopath = buildpath/"gopath" (gopath/"src/github.com/hashicorp/consul").install contents ENV["GOPATH"] = gopath ENV.prepend_create_path "PATH", gopath/"bin" cd gopath/"src/github.com/hashicorp/consul" do system "make" bin.install "bin/consul" zsh_completion.install "contrib/zsh-completion/_consul" end end plist_options :manual => "consul agent -dev -advertise 127.0.0.1" def plist; <<-EOS.undent <?xml version="1.0" encoding="UTF-8"?> <!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd"> <plist version="1.0"> <dict> <key>KeepAlive</key> <dict> <key>SuccessfulExit</key> <false/> </dict> <key>Label</key> <string>#{plist_name}</string> <key>ProgramArguments</key> <array> <string>#{opt_bin}/consul</string> <string>agent</string> <string>-dev</string> <string>-advertise</string> <string>127.0.0.1</string> </array> <key>RunAtLoad</key> <true/> <key>WorkingDirectory</key> <string>#{var}</string> <key>StandardErrorPath</key> <string>#{var}/log/consul.log</string> <key>StandardOutPath</key> <string>#{var}/log/consul.log</string> </dict> </plist> EOS end test do # Workaround for Error creating agent: Failed to get advertise address: Multiple private IPs found. Please configure one. return if ENV["CIRCLECI"] || ENV["TRAVIS"] fork do exec "#{bin}/consul", "agent", "-data-dir", "." end sleep 3 system "#{bin}/consul", "leave" end end
31.060976
125
0.639183
e92b28fc2e81aeccf9aea977eee0108d111aab9d
2,004
# Be sure to restart your server when you modify this file # Specifies gem version of Rails to use when vendor/rails is not present RAILS_GEM_VERSION = '2.3.4' unless defined? RAILS_GEM_VERSION # Bootstrap the Rails environment, frameworks, and default configuration require File.join(File.dirname(__FILE__), 'boot') require 'enterprise/boot' Rails::Initializer.run do |config| # Settings in config/environments/* take precedence over those specified here. # Application configuration should go into files in config/initializers # -- all .rb files in that directory are automatically loaded. # Add additional load paths for your own custom dirs # config.load_paths += %W( #{RAILS_ROOT}/extras ) # Specify gems that this application depends on and have them installed with rake gems:install # config.gem "bj" # config.gem "hpricot", :version => '0.6', :source => "http://code.whytheluckystiff.net" # config.gem "sqlite3-ruby", :lib => "sqlite3" # config.gem "aws-s3", :lib => "aws/s3" # Only load the plugins named here, in the order given (default is alphabetical). # :all can be used as a placeholder for all plugins not explicitly named # config.plugins = [ :exception_notification, :ssl_requirement, :all ] # Skip frameworks you're not going to use. To use Rails without a database, # you must remove the Active Record framework. # config.frameworks -= [ :active_record, :active_resource, :action_mailer ] # Activate observers that should always be running # config.active_record.observers = :cacher, :garbage_collector, :forum_observer # Set Time.zone default to the specified zone and make Active Record auto-convert to this zone. # Run "rake -D time" for a list of tasks for finding time zone names. config.time_zone = 'UTC' # The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded. # config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}')] # config.i18n.default_locale = :de end
44.533333
97
0.735529
11044155e6a990579a57c705c35a75d3bbceca47
918
=begin #RadioManager #RadioManager OpenAPI spec version: 2.0 Contact: [email protected] Generated by: https://github.com/swagger-api/swagger-codegen.git Swagger Codegen version: 2.3.0 =end require 'spec_helper' require 'json' require 'date' # Unit tests for RadioManagerClient::Tag # Automatically generated by swagger-codegen (github.com/swagger-api/swagger-codegen) # Please update as you see appropriate describe 'Tag' do before do # run before each test @instance = RadioManagerClient::Tag.new end after do # run after each test end describe 'test an instance of Tag' do it 'should create an instance of Tag' do expect(@instance).to be_instance_of(RadioManagerClient::Tag) end end describe 'test attribute "name"' do it 'should work' do # assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers end end end
21.348837
103
0.729847
28610ce41ff3d3e46485e648467cd1efde1c68a3
2,427
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. project_id = attribute('project_id') name = attribute('name') region = attribute('region') location_id = attribute('location_id') alternative_location_id = attribute('alternative_location_id') memory_size_gb = attribute('memory_size_gb') auth_enabled = attribute('auth_enabled') transit_encryption_mode = attribute('transit_encryption_mode') output_id = attribute('output_id') output_region = attribute('output_region') output_host = attribute('output_host') output_current_location_id = attribute('output_current_location_id') describe 'Outputs' do it 'should reflect inputted variables' do expect(output_region).to eq region expect(output_current_location_id).to eq(location_id).or eq(alternative_location_id) end it 'should have a valid host ip' do expect(output_host).to match(/^(?:[0-9]{1,3}\.){3}[0-9]{1,3}$/) end it 'should have a valid id' do expect(output_id).to end_with name end end control 'redis-instance' do describe command("gcloud redis instances describe #{name} --project=#{project_id} --region=#{region} --format=json") do its('exit_status') { should eq 0 } its('stderr') { should eq '' } let(:metadata) do if subject.exit_status == 0 JSON.parse(subject.stdout, symbolize_names: true) else {} end end it "memorySizeGb matches var.memory_size_gb" do expect(metadata).to include(memorySizeGb: memory_size_gb.to_i) end it "authEnabled matches var.auth_enabled" do expect(metadata).to include(authEnabled: auth_enabled) end it "transitEncryptionMode matches var.transit_encryption_mode" do expect(metadata).to include(transitEncryptionMode: transit_encryption_mode) end end end
35.173913
121
0.699629
39b277fb252fe49ba43a413411b1e4e4827220d5
1,409
class RegistrationsController < Devise::RegistrationsController before_action :configure_permitted_parameters, if: :devise_controller? # updated from original to look for changes to user crosswalk setting def update_resource(resource, params) original_crosswalk_setting = resource.crosswalk_enabled saved = super(resource, params) if saved && resource.is_portfolio? && (resource.crosswalk_enabled != original_crosswalk_setting) Measure.by_user(current_user).each { |m| m.clear_cached_js } end return saved end def create build_resource(sign_up_params) if resource.harp_id.nil? || resource.harp_id.blank? resource.errors.add :base, "HARP ID is required" respond_with resource else super end end def after_inactive_sign_up_path_for(resource) resource.deactivate resource.save set_flash_message :notice, :signed_up_but_inactive if (resource.is_a?(User)) Rails.logger.info "New user created with full name: #{resource.full_name}." end "#{(respond_to?(:root_path) ? root_path : "/")}user/registered_not_active" end def destroy super end protected def configure_permitted_parameters devise_parameter_sanitizer.permit(:account_update, keys: [:crosswalk_enabled, :harp_id]) devise_parameter_sanitizer.permit(:sign_up, keys: [:first_name, :last_name, :harp_id, :telephone]) end end
29.978723
102
0.740951
6295cf0287433bf51243f9ae6376d6b0dce75be2
1,275
require 'pathname' class Repository NAME = '.hangover' class << self def find(dir) path = Pathname.new(dir).expand_path.realpath begin try_path = path + NAME return new(path.to_s) if try_path.directory? end while (path = path.parent).to_s != '/' nil end end def initialize(dir) @repository = "#{dir}/#{NAME}" ENV['GIT_DIR'] = @repository ENV['GIT_WORK_TREE'] = dir end def exists! if File.directory?(@repository) Hangover.logger.warn "Repository already exists at #{@repository}" return end Hangover.logger.info "Initializing new hangover repo at #{@repository}" init File.open("#{@repository}/info/exclude", "w") do |f| f.puts NAME f.puts ".git" end add_all commit_all("Initial commit") end def gitk `gitk` end def init git 'init' end def add_all git 'add .' end def commit(message, args = '') git "commit #{args} -m \"#{message}\"" end def commit_all(message) commit(message, '-a') end def diff git 'diff --unified=0' end def clean git 'clean' end def git(args) Hangover.logger.debug { "git #{args}" } `git #{args}` end end
17.22973
75
0.574118
267c54b2c935479d730ff60fdd05858256107aa1
1,074
module Cocoadex class DataType < SequentialNodeElement TEMPLATE_NAME=:data_type class Field < Parameter;end attr_reader :abstract, :declaration, :declared_in, :discussion, :availability, :considerations attr_accessor :next_termdef def fields @fields ||= [] end def constants @constants ||= [] end def origin @origin end def handle_node node if ["Fields","Constants"].include? node.text next_termdef = node.text elsif node.classes.include? "termdef" and not next_termdef.nil? if list = termdef_to_properties(next_termdef) node.css("dt").each do |field_title_node| field_name = field_title_node.css("code").text description = field_title_node.next.text list << Field.new(field_name, description) end next_termdef = "" end end end def termdef_to_properties termdef case termdef when "Fields" then fields when "Constants" then constants end end end end
22.851064
69
0.627561
1a4ea76572bee97a37aad33400a3f0daf113dab8
524
require 'minitest/autorun' class Array undef uniq end module Enumerable undef uniq end require_relative 'uniq' class UniqTest < MiniTest::Test def test_without_block assert_equal ["a", "b", "c"], ["a", "a", "b", "b", "c"].uniq end def test_with_block assert_equal [["student", "sam"], ["teacher", "matz"]], [["student","sam"], ["student","george"], ["teacher","matz"]].uniq {|s| s.first } end def test_for_hash assert_equal([[:foo, :bar]], {foo: :bar, buzz: :bar}.uniq {|e| e.last}) end end
20.96
141
0.624046
61c12abe67669562cd49e0ea50710535a65f8b35
2,408
# frozen_string_literal: true class Admin::RolesController < Admin::ApplicationController before_action :set_role, only: %i[show user update] before_action :set_breadcrumbs, only: %i[new edit create update], if: -> { request.format.html? } def index prepare_meta_tags title: t('.title') @roles = Role.all end def show prepare_meta_tags title: @role.role_name @ncworkno = params[:ncworkno] @users = @role.users.includes(:departments) @users_auto = case @role.id when 6 # 查看项目地图并允许下载合同 contract_map_access_where = BaselinePositionAccess.contract_map_accesses[:project_detail_with_download] position_ids = Position.joins(:baseline_position_access).where(baseline_position_access: { contract_map_access: contract_map_access_where }).pluck(:id) User.joins(:position_users).where(position_users: { position_id: position_ids }).distinct when 30 # 查看项目地图与合同信息 contract_map_access_where = [BaselinePositionAccess.contract_map_accesses[:project_detail_with_download], BaselinePositionAccess.contract_map_accesses[:view_project_details]] position_ids = Position.joins(:baseline_position_access).where(baseline_position_access: { contract_map_access: contract_map_access_where }).pluck(:id) User.joins(:position_users).where(position_users: { position_id: position_ids }).distinct else [] end end def update to_add_user = User.find_by(clerk_code: params[:ncworkno]) @role.role_users.find_or_create_by(user: to_add_user) redirect_to admin_role_path(id: @role.id), notice: t('.update_succss') end def user to_remove_user = User.find(params[:user_id]) @role.role_users.find_by(user_id: to_remove_user.id)&.destroy redirect_to admin_role_path(id: @role.id), notice: t('.remove_succss') end def generate GenerateRoleAllWorker.perform_in(5.seconds) Role.first.update_columns(in_generating: true) redirect_to admin_roles_path, notice: t('.generate_pending') end private def set_role @role = Role.find(params[:id]) end def set_breadcrumbs @_breadcrumbs = [{ text: t('layouts.sidebar.admin.users'), link: admin_users_path }] end end
38.83871
194
0.679402
39b43bae18f63485c374c58e0cd3a295586c199e
683
cask "scrutiny" do version "9.13.0" sha256 "27df87b74681e938cc4e1f6013907d1bae08c32e988d4e65a1fcf13b2bd2a7a1" url "https://peacockmedia.software/mac/scrutiny/scrutiny.dmg" appcast "https://peacockmedia.software/mac/scrutiny/version_history.html" name "Scrutiny" homepage "https://peacockmedia.software/mac/scrutiny/" app "Scrutiny #{version.major}.app" zap trash: [ "~/Library/Application Support/Scrutiny #{version.major}", "~/Library/Caches/com.peacockmedia.Scrutiny-#{version.major}", "~/Library/Cookies/com.peacockmedia.Scrutiny-#{version.major}.binarycookies", "~/Library/Preferences/com.peacockmedia.Scrutiny-#{version.major}.plist", ] end
35.947368
81
0.751098
62532ebdeffa04e2e7a410f56aa0c04e8771f8f2
13,346
require 'webmock/rspec' require 'avro_turf/messaging' require 'avro_turf/test/fake_confluent_schema_registry_server' describe AvroTurf::Messaging do let(:registry_url) { "http://registry.example.com" } let(:client_cert) { "test client cert" } let(:client_key) { "test client key" } let(:client_key_pass) { "test client key password" } let(:logger) { Logger.new(StringIO.new) } let(:avro) { AvroTurf::Messaging.new( registry_url: registry_url, schemas_path: "spec/schemas", logger: logger, client_cert: client_cert, client_key: client_key, client_key_pass: client_key_pass ) } let(:message) { { "full_name" => "John Doe" } } let(:schema_json) do <<-AVSC { "name": "person", "type": "record", "fields": [ { "type": "string", "name": "full_name" } ] } AVSC end let(:schema) { Avro::Schema.parse(schema_json) } before do FileUtils.mkdir_p("spec/schemas") end before do stub_request(:any, /^#{registry_url}/).to_rack(FakeConfluentSchemaRegistryServer) FakeConfluentSchemaRegistryServer.clear end before do define_schema "person.avsc", schema_json end shared_examples_for "encoding and decoding with the schema from schema store" do it "encodes and decodes messages" do data = avro.encode(message, schema_name: "person") expect(avro.decode(data)).to eq message end it "allows specifying a reader's schema" do data = avro.encode(message, schema_name: "person") expect(avro.decode(data, schema_name: "person")).to eq message end it "caches parsed schemas for decoding" do data = avro.encode(message, schema_name: "person") avro.decode(data) allow(Avro::Schema).to receive(:parse).and_call_original expect(avro.decode(data)).to eq message expect(Avro::Schema).not_to have_received(:parse) end end shared_examples_for 'encoding and decoding with the schema from registry' do before do registry = AvroTurf::ConfluentSchemaRegistry.new(registry_url, logger: logger) registry.register('person', schema) registry.register('people', schema) end it 'encodes and decodes messages' do data = avro.encode(message, subject: 'person', version: 1) expect(avro.decode(data)).to eq message end it "allows specifying a reader's schema by subject and version" do data = avro.encode(message, subject: 'person', version: 1) expect(avro.decode(data, schema_name: 'person')).to eq message end it 'raises AvroTurf::SchemaNotFoundError when the schema does not exist on registry' do expect { avro.encode(message, subject: 'missing', version: 1) }.to raise_error(AvroTurf::SchemaNotFoundError) end it 'caches parsed schemas for decoding' do data = avro.encode(message, subject: 'person', version: 1) avro.decode(data) allow(Avro::Schema).to receive(:parse).and_call_original expect(avro.decode(data)).to eq message expect(Avro::Schema).not_to have_received(:parse) end end shared_examples_for 'encoding and decoding with the schema_id from registry' do before do registry = AvroTurf::ConfluentSchemaRegistry.new(registry_url, logger: logger) registry.register('person', schema) registry.register('people', schema) end it 'encodes and decodes messages' do data = avro.encode(message, schema_id: 1) expect(avro.decode(data)).to eq message end it 'raises AvroTurf::SchemaNotFoundError when the schema does not exist on registry' do expect { avro.encode(message, schema_id: 5) }.to raise_error(AvroTurf::SchemaNotFoundError) end it 'caches parsed schemas for decoding' do data = avro.encode(message, schema_id: 1) avro.decode(data) allow(Avro::Schema).to receive(:parse).and_call_original expect(avro.decode(data)).to eq message expect(Avro::Schema).not_to have_received(:parse) end end it_behaves_like "encoding and decoding with the schema from schema store" it_behaves_like 'encoding and decoding with the schema from registry' it_behaves_like 'encoding and decoding with the schema_id from registry' context "with a provided registry" do let(:registry) { AvroTurf::ConfluentSchemaRegistry.new(registry_url, logger: logger) } let(:avro) do AvroTurf::Messaging.new( registry: registry, schemas_path: "spec/schemas", logger: logger ) end it_behaves_like "encoding and decoding with the schema from schema store" it_behaves_like 'encoding and decoding with the schema from registry' it_behaves_like 'encoding and decoding with the schema_id from registry' it "uses the provided registry" do allow(registry).to receive(:register).and_call_original message = { "full_name" => "John Doe" } avro.encode(message, schema_name: "person") expect(registry).to have_received(:register).with("person", anything) end it "allows specifying a schema registry subject" do allow(registry).to receive(:register).and_call_original message = { "full_name" => "John Doe" } avro.encode(message, schema_name: "person", subject: "people") expect(registry).to have_received(:register).with("people", anything) end end context "with a provided schema store" do let(:schema_store) { AvroTurf::SchemaStore.new(path: "spec/schemas") } let(:avro) do AvroTurf::Messaging.new( registry_url: registry_url, schema_store: schema_store, logger: logger ) end it_behaves_like "encoding and decoding with the schema from schema store" it "uses the provided schema store" do allow(schema_store).to receive(:find).and_call_original avro.encode(message, schema_name: "person") expect(schema_store).to have_received(:find).with("person", nil) end end describe 'decoding with #decode_message' do shared_examples_for "encoding and decoding with the schema from schema store" do it "encodes and decodes messages" do data = avro.encode(message, schema_name: "person") result = avro.decode_message(data) expect(result.message).to eq message expect(result.schema_id).to eq 0 expect(result.writer_schema).to eq schema expect(result.reader_schema).to eq nil end it "allows specifying a reader's schema" do data = avro.encode(message, schema_name: "person") result = avro.decode_message(data, schema_name: "person") expect(result.message).to eq message expect(result.writer_schema).to eq schema expect(result.reader_schema).to eq schema end it "caches parsed schemas for decoding" do data = avro.encode(message, schema_name: "person") avro.decode_message(data) allow(Avro::Schema).to receive(:parse).and_call_original expect(avro.decode_message(data).message).to eq message expect(Avro::Schema).not_to have_received(:parse) end end shared_examples_for 'encoding and decoding with the schema from registry' do before do registry = AvroTurf::ConfluentSchemaRegistry.new(registry_url, logger: logger) registry.register('person', schema) registry.register('people', schema) end it 'encodes and decodes messages' do data = avro.encode(message, subject: 'person', version: 1) result = avro.decode_message(data) expect(result.message).to eq message expect(result.schema_id).to eq 0 end it "allows specifying a reader's schema by subject and version" do data = avro.encode(message, subject: 'person', version: 1) expect(avro.decode_message(data, schema_name: 'person').message).to eq message end it 'raises AvroTurf::SchemaNotFoundError when the schema does not exist on registry' do expect { avro.encode(message, subject: 'missing', version: 1) }.to raise_error(AvroTurf::SchemaNotFoundError) end it 'caches parsed schemas for decoding' do data = avro.encode(message, subject: 'person', version: 1) avro.decode_message(data) allow(Avro::Schema).to receive(:parse).and_call_original expect(avro.decode_message(data).message).to eq message expect(Avro::Schema).not_to have_received(:parse) end end it_behaves_like "encoding and decoding with the schema from schema store" it_behaves_like 'encoding and decoding with the schema from registry' context "with a provided registry" do let(:registry) { AvroTurf::ConfluentSchemaRegistry.new(registry_url, logger: logger) } let(:avro) do AvroTurf::Messaging.new( registry: registry, schemas_path: "spec/schemas", logger: logger ) end it_behaves_like "encoding and decoding with the schema from schema store" it_behaves_like 'encoding and decoding with the schema from registry' it "uses the provided registry" do allow(registry).to receive(:register).and_call_original message = { "full_name" => "John Doe" } avro.encode(message, schema_name: "person") expect(registry).to have_received(:register).with("person", anything) end it "allows specifying a schema registry subject" do allow(registry).to receive(:register).and_call_original message = { "full_name" => "John Doe" } avro.encode(message, schema_name: "person", subject: "people") expect(registry).to have_received(:register).with("people", anything) end end context "with a provided schema store" do let(:schema_store) { AvroTurf::SchemaStore.new(path: "spec/schemas") } let(:avro) do AvroTurf::Messaging.new( registry_url: registry_url, schema_store: schema_store, logger: logger ) end it_behaves_like "encoding and decoding with the schema from schema store" it "uses the provided schema store" do allow(schema_store).to receive(:find).and_call_original avro.encode(message, schema_name: "person") expect(schema_store).to have_received(:find).with("person", nil) end end end context "validating" do subject(:encode){ avro.encode(message, schema_name: "person", validate: true) } context "for correct message" do it { expect { encode }.not_to raise_error } end context "when message has wrong type" do let(:message) { { "full_name" => 123 } } it { expect { encode }.to raise_error(Avro::SchemaValidator::ValidationError, /\.full_name expected type string, got int/) } end context "when message contains extra fields (typo in key)" do let(:message) { { "fulll_name" => "John Doe" } } it { expect { encode }.to raise_error(Avro::SchemaValidator::ValidationError, /extra field 'fulll_name'/) } end end context 'fetching and registering schema' do let(:schema_store) { AvroTurf::SchemaStore.new(path: "spec/schemas") } let(:registry) { AvroTurf::ConfluentSchemaRegistry.new(registry_url, logger: logger) } let(:avro) do AvroTurf::Messaging.new( registry: registry, schema_store: schema_store, logger: logger ) end let(:schema_id) { 234 } context 'using fetch_schema' do subject { avro.fetch_schema(subject: subj, version: version) } let(:subj) { 'subject' } let(:version) { 'version' } let(:response) { {'id' => schema_id, 'schema' => schema_json} } before do allow(registry).to receive(:subject_version).with(subj, version).and_return(response) end it 'gets schema from registry' do expect(subject).to eq([schema, schema_id]) end end context 'using fetch_schema_by_id' do subject { avro.fetch_schema_by_id(schema_id) } before do allow(registry).to receive(:fetch).with(schema_id).and_return(schema_json) end it 'gets schema from registry' do expect(subject).to eq([schema, schema_id]) end end context 'using register_schema' do let(:schema_name) { 'schema_name' } let(:namespace) { 'namespace' } before do allow(schema_store).to receive(:find).with(schema_name, namespace).and_return(schema) end context 'when subject is not set' do subject { avro.register_schema(schema_name: schema_name, namespace: namespace) } before do allow(registry).to receive(:register).with(schema.fullname, schema).and_return(schema_id) end it 'registers schema in registry' do expect(subject).to eq([schema, schema_id]) end end context 'when subject is set' do subject { avro.register_schema(schema_name: schema_name, namespace: namespace, subject: subj) } let(:subj) { 'subject' } before do allow(registry).to receive(:register).with(subj, schema).and_return(schema_id) end it 'registers schema in registry' do expect(subject).to eq([schema, schema_id]) end end end end end
33.116625
130
0.667915
618360822f358fd8d6919951b549c8411202fab9
1,222
require 'bunny' require 'amqp' require 'eventmachine' module BunnyHop class Reader def initialize(config) @config = config end def run(name, controller, settings = {}) count = settings.fetch(:count, -1) closed = false EventMachine.run do conn = AMQP.connect(@config) conn.on_tcp_connection_loss {|c,s| c.reconnect()} channel = AMQP::Channel.new(conn, auto_recovery: true, prefetch: 1) queue = channel.queue(name, BunnyHop::Base::DEFAULT_MESSAGE_OPTIONS) queue.subscribe(:ack => true) do |meta, raw| next if closed handled = false for i in 0..3 do sleep(i) unless i == 0 begin message = BunnyHop::Message.new(meta, Oj.load(raw, symbol_keys: true)) meta.ack() && handled = true if controller.send(message.handler, message) rescue => e meta.ack() && handled = true if controller.error(e) end break if handled end count -= 1 if count == 0 || handled == false conn.close { EventMachine.stop } closed = true end end end end end end
29.095238
87
0.556465
d5929ad6d7f101b59aa4d919d07817648c48fad0
142
# coding: UTF-8 class FeatureFlagsUser < Sequel::Model include CartoDB::MiniSequel many_to_one :feature_flag many_to_one :user end
12.909091
38
0.753521
18d20ef653ee0e14d6dadaa039dd28835b14df7b
626
# Be sure to restart your server when you modify this file. # Version of your assets, change this if you want to expire all your assets. Rails.application.config.assets.version = '1.0' # Add additional assets to the asset load path. # Rails.application.config.assets.paths << Emoji.images_path # Add Yarn node_modules folder to the asset load path. Rails.application.config.assets.paths << Rails.root.join('node_modules') # Precompile additional assets. # application.js, application.css, and all non-JS/CSS in the app/assets # folder are already added. Rails.application.config.assets.precompile += %w[admin.js admin.css]
41.733333
76
0.77476
62142ea10c710e7dda07540048c28ec76a46b332
313
require File.expand_path(File.dirname(__FILE__) + '/../test_helper') ## Armenia class AMTest < Phonie::TestCase def test_local parse_test('+37422212345', '374', '222', '12345', 'Armenia', false) end def test_mobile parse_test('+37451234567', '374', '5', '1234567', 'Armenia', true) end end
24.076923
73
0.661342
5df4a7d4c84cfa57ad07a19df24128bc16796fb9
223
class AddAttachmentCarouselToImages < ActiveRecord::Migration[4.2] def self.up change_table :carousels do |t| t.attachment :image end end def self.down remove_attachment :carousel, :image end end
18.583333
66
0.713004
6a06fc0c3ec7708c53eb014bf49a28b25b394374
5,703
module Rockstar class RockstarParser < Parslet::Parser root(:blocks) # general rule(:blocks) {(function | conditional_block | block).repeat(1).as(:blocks)} rule(:eol) {match['\n']} rule(:eof) {any.absent?} rule(:space) {match('[ \t]').repeat(1)} rule(:block) {(block_part >> (eol >> block_part).repeat >> (eol.repeat(2) | eof)).as(:block)} rule(:block_part) {loop_block | function_call | increment | decrement | loop_continue | loop_break | poetic_literal} # types rule(:poetic_literal) {(poetic_string_literal | poetic_type_literal | poetic_number_literal).as(:var)} rule(:poetic_string_literal) {proper_variable.as(:var_name) >> space >> str('says') >> space >> string_literal.as(:var_value)} rule(:poetic_type_literal) {proper_variable.as(:var_name) >> space >> to_be >> type_literal.as(:var_value)} rule(:poetic_number_literal) {proper_variable.as(:var_name) >> space >> to_be >> (math_expression | number_literal).as(:var_value)} rule(:type_literal) {null_type_literal.as(:nil) | true_type_literal.as(:true) | false_type_literal.as(:false)} rule(:true_type_literal) {str('true') | str('right') | str('yes') | str('ok')} rule(:false_type_literal) {str('false') | str('wrong') | str('no') | str('lies')} rule(:null_type_literal) {str('null') | str('nothing') | str('nowhere') | str('nobody')} rule(:string_literal) {match("[^\n]").repeat(1).as(:str)} rule(:number_literal) {(unique_variable_name >> (space >> unique_variable_name).repeat).as(:number_str)} rule(:proper_variable) {(keyword.absent? >> match('[A-Z]') >> match('[a-z]').repeat >> (space >> proper_variable).repeat).repeat(1)} rule(:common_variable) {str('a') | str('an') | str('the') | str('my') | str('your')} rule(:unique_variable_name) {(keyword.absent? >> match('[a-z]')).repeat(1) >> (space >> unique_variable_name).repeat} rule(:to_be) {(str('is') | str('was') | str('were')) >> space} rule(:keyword) {math_operation | comparison_operation | str('up') | str('down') | str('and') | if_keyword | else_keyword} # math rule(:plus) {(str('plus') | str('with')).as(:+)} rule(:minus) {(str('minus') | str('without')).as(:-)} rule(:multiply) {(str('times') | str('of')).as(:*)} rule(:divide) {(str('over') | str('by')).as(:/)} rule(:math_operation) {(minus | plus | multiply | divide).as(:op)} rule(:math_expression) {unique_variable_name.as(:l_op) >> space >> math_operation >> space >> unique_variable_name.as(:r_op)} rule(:operand) {function_call | unique_variable_name} # comparison rule(:comparison_operand) {math_expression | operand} rule(:comparison) {(comparison_operand.as(:l_op) >> space >> comparison_operation >> space >> comparison_operand.as(:r_op)).as(:comparison)} rule(:comparison_operation) {(greater.as(:>) | less.as(:<) | greater_or_equal.as(:>=) | less_or_equal.as(:<=) | not_equal.as(:!=) | equal.as(:==)).as(:comparison_op)} rule(:greater) {equal >> space >> (str('higher') | str('greater') | str('bigger') | str('stronger')) >> space >> str('than')} rule(:less) {equal >> space >> (str('lower') | str('less') | str('smaller') | str('weaker')) >> space >> str('than')} rule(:greater_or_equal) {equal >> space >> str('as') >> space >> (str('high') | str('great') | str('big') | str('strong')) >> space >> str('as')} rule(:less_or_equal) {equal >> space >> str('as') >> space >> (str('low') | str('little') | str('small') | str('weak')) >> space >> str('as')} rule(:equal) {str('is')} rule(:not_equal) {str("ain't") | (equal >> space >> str('not'))} # conditionals rule(:if_keyword) {str('If')} rule(:else_keyword) {str('Else')} rule(:conditional_block) {(if_else_block | if_block.as(:if_block))} rule(:comparison_expression) { (comparison | comparison_operand).as(:comparison_expression)} rule(:if_block) {if_keyword >> space >> comparison_expression >> eol >> block} rule(:else_block) {else_keyword >> space.maybe >> eol >> block} rule(:if_else_block) {if_block.as(:if_block) >> else_block.as(:else_block)} # loops rule(:loop_block) {until_block.as(:until_block) | while_block.as(:while_block)} rule(:until_block) {str('Until') >> space >> comparison >> eol >> block} rule(:while_block) {str('While') >> space >> comparison >> eol >> block} rule(:loop_break) {str('Break') | str('Break it down')} rule(:loop_continue) {str('Continue') | str('Take it to the top')} rule(:increment) {str('Build') >> space >> unique_variable_name.as(:increment) >> space >> str('up')} rule(:decrement) {str('Knock') >> space >> unique_variable_name.as(:decrement) >> space >> str('down')} # functions definition rule(:function) do proper_variable.as(:function_name) >> space >> str('takes') >> space >> function_args.as(:function_args) >> eol >> function_body.as(:function_body) >> function_return end rule(:function_args) {function_arg >> (space >> str('and') >> space >> function_arg).repeat} rule(:function_arg) {unique_variable_name.as(:function_arg)} rule(:function_body) {(conditional_block | loop_block | (block_part >> eol)).repeat(1)} rule(:function_return) {str('Give back') >> space >> unique_variable_name.as(:function_return) >> (eol.repeat(2) | eof)} # functions call rule(:function_call) do proper_variable.as(:function_name) >> space >> str('taking') >> space >> function_passed_args.as(:function_passed_args) end rule(:function_passed_args) {function_passed_arg >> (str(',') >> space >> function_passed_arg).repeat} rule(:function_passed_arg) {unique_variable_name.as(:function_passed_arg)} end end
63.366667
170
0.644047
6aa4d62309dfd3e5bd7219c2235a449d2329bf16
162
if ENV["LOGSTASH_HOST"].present? Rails.application.configure do SemanticLogger.add_appender(file_name: "log/#{Rails.env}.json", formatter: :json) end end
27
85
0.746914
28449ae3667efdeb11d951260f18a89c442b6527
3,603
Rails.application.configure do # Settings specified here will take precedence over those in config/application.rb. # Code is not reloaded between requests. config.cache_classes = true # Eager load code on boot. This eager loads most of Rails and # your application in memory, allowing both threaded web servers # and those relying on copy on write to perform better. # Rake tasks automatically ignore this option for performance. config.eager_load = true # Full error reports are disabled and caching is turned on. config.consider_all_requests_local = false config.action_controller.perform_caching = true # Enable Rack::Cache to put a simple HTTP cache in front of your application # Add `rack-cache` to your Gemfile before enabling this. # For large-scale production use, consider using a caching reverse proxy like # NGINX, varnish or squid. # config.action_dispatch.rack_cache = true # Disable serving static files from the `/public` folder by default since # Apache or NGINX already handles this. config.serve_static_files = ENV['RAILS_SERVE_STATIC_FILES'].present? # Compress JavaScripts and CSS. config.assets.js_compressor = :uglifier config.assets.css_compressor = :sass # Do not fallback to assets pipeline if a precompiled asset is missed. config.assets.compile = false # Asset digests allow you to set far-future HTTP expiration dates on all assets, # yet still be able to expire them through the digest params. config.assets.digest = true # `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb # Specifies the header that your server uses for sending files. # config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache # config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX # Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies. # config.force_ssl = true # Use the lowest log level to ensure availability of diagnostic information # when problems arise. config.log_level = :debug # Prepend all log lines with the following tags. # config.log_tags = [ :subdomain, :uuid ] # Use a different logger for distributed setups. # config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new) # Use a different cache store in production. config.cache_store = :dalli_store, (ENV["MEMCACHIER_SERVERS"] || "").split(","), {username: ENV["MEMCACHIER_USERNAME"], password: ENV["MEMCACHIER_PASSWORD"], failover: true, socket_timeout: 1.5, socket_failure_delay: 0.2} # Enable serving of images, stylesheets, and JavaScripts from an asset server. # config.action_controller.asset_host = 'http://assets.example.com' # Ignore bad email addresses and do not raise email delivery errors. # Set this to true and configure the email server for immediate delivery to raise delivery errors. # config.action_mailer.raise_delivery_errors = false # Enable locale fallbacks for I18n (makes lookups for any locale fall back to # the I18n.default_locale when a translation cannot be found). config.i18n.fallbacks = true # Send deprecation notices to registered listeners. config.active_support.deprecation = :notify # Use default logging formatter so that PID and timestamp are not suppressed. config.log_formatter = ::Logger::Formatter.new # Do not dump schema after migrations. config.active_record.dump_schema_after_migration = false end
42.388235
102
0.737441
288e36338ca795e993b281a62dd8f7164a9b0c1e
176
module Rpush def self.reflect yield reflection_stack[0] if block_given? end def self.reflection_stack @reflection_stack ||= [ReflectionCollection.new] end end
17.6
52
0.744318
abedbdda8e1f70e3cde4dd066fca68098bb5856a
397
class CreateNotifications < ActiveRecord::Migration[6.0] def change create_table :notifications do |t| t.integer :contestant_id, null: false t.boolean :read, null: false, default: false t.text :encoded_message, null: false t.timestamps end add_index :notifications, [:contestant_id, :id] add_index :notifications, [:contestant_id, :read, :id] end end
26.466667
58
0.690176
f8965a91d2bb48fd51ea625c3d7e404e256e14b6
254
def first_n_fibs(n) return [0, 1].take(n) if n <= 2 prev_fibs = first_n_fibs(n-1) first_to_last = prev_fibs[-1] second_to_last = prev_fibs[-2] prev_fibs << first_to_last + second_to_last end p first_n_fibs(8) # => [0, 1, 1, 2, 3, 5, 8, 13]
19.538462
45
0.649606
bfd7ba0aee60ce0759e07573c8c96c18a7ea1984
7,250
# Copyright (c) 2016, 2021, Oracle and/or its affiliates. All rights reserved. # This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license. module OCI module DatabaseManagement # Module containing models for requests made to, and responses received from, # OCI DatabaseManagement services module Models end end end # Require models require 'oci/database_management/models/activity_time_series_metrics' require 'oci/database_management/models/add_managed_database_to_managed_database_group_details' require 'oci/database_management/models/allowed_parameter_value' require 'oci/database_management/models/awr_db_collection' require 'oci/database_management/models/awr_db_cpu_usage_collection' require 'oci/database_management/models/awr_db_cpu_usage_summary' require 'oci/database_management/models/awr_db_metric_collection' require 'oci/database_management/models/awr_db_metric_summary' require 'oci/database_management/models/awr_db_parameter_change_collection' require 'oci/database_management/models/awr_db_parameter_change_summary' require 'oci/database_management/models/awr_db_parameter_collection' require 'oci/database_management/models/awr_db_parameter_summary' require 'oci/database_management/models/awr_db_report' require 'oci/database_management/models/awr_db_snapshot_collection' require 'oci/database_management/models/awr_db_snapshot_range_collection' require 'oci/database_management/models/awr_db_snapshot_range_summary' require 'oci/database_management/models/awr_db_snapshot_summary' require 'oci/database_management/models/awr_db_sql_report' require 'oci/database_management/models/awr_db_summary' require 'oci/database_management/models/awr_db_sysstat_collection' require 'oci/database_management/models/awr_db_sysstat_summary' require 'oci/database_management/models/awr_db_top_wait_event_collection' require 'oci/database_management/models/awr_db_top_wait_event_summary' require 'oci/database_management/models/awr_db_wait_event_bucket_collection' require 'oci/database_management/models/awr_db_wait_event_bucket_summary' require 'oci/database_management/models/awr_db_wait_event_collection' require 'oci/database_management/models/awr_db_wait_event_summary' require 'oci/database_management/models/awr_query_result' require 'oci/database_management/models/change_database_parameter_details' require 'oci/database_management/models/change_database_parameters_details' require 'oci/database_management/models/change_job_compartment_details' require 'oci/database_management/models/change_managed_database_group_compartment_details' require 'oci/database_management/models/child_database' require 'oci/database_management/models/cluster_cache_metric' require 'oci/database_management/models/compare_type' require 'oci/database_management/models/create_job_details' require 'oci/database_management/models/create_managed_database_group_details' require 'oci/database_management/models/create_sql_job_details' require 'oci/database_management/models/database_credentials' require 'oci/database_management/models/database_fleet_health_metrics' require 'oci/database_management/models/database_home_metric_definition' require 'oci/database_management/models/database_home_metrics' require 'oci/database_management/models/database_io_aggregate_metrics' require 'oci/database_management/models/database_instance_home_metrics_definition' require 'oci/database_management/models/database_parameter_summary' require 'oci/database_management/models/database_parameter_update_status' require 'oci/database_management/models/database_parameters_collection' require 'oci/database_management/models/database_status' require 'oci/database_management/models/database_storage_aggregate_metrics' require 'oci/database_management/models/database_sub_type' require 'oci/database_management/models/database_time_aggregate_metrics' require 'oci/database_management/models/database_type' require 'oci/database_management/models/database_usage_metrics' require 'oci/database_management/models/datafile' require 'oci/database_management/models/fleet_metric_definition' require 'oci/database_management/models/fleet_metric_summary_definition' require 'oci/database_management/models/fleet_status_by_category' require 'oci/database_management/models/fleet_summary' require 'oci/database_management/models/instance_details' require 'oci/database_management/models/job' require 'oci/database_management/models/job_collection' require 'oci/database_management/models/job_database' require 'oci/database_management/models/job_execution' require 'oci/database_management/models/job_execution_collection' require 'oci/database_management/models/job_execution_result_details' require 'oci/database_management/models/job_execution_result_location' require 'oci/database_management/models/job_execution_summary' require 'oci/database_management/models/job_run' require 'oci/database_management/models/job_run_collection' require 'oci/database_management/models/job_run_summary' require 'oci/database_management/models/job_summary' require 'oci/database_management/models/job_types' require 'oci/database_management/models/lifecycle_states' require 'oci/database_management/models/managed_database' require 'oci/database_management/models/managed_database_collection' require 'oci/database_management/models/managed_database_group' require 'oci/database_management/models/managed_database_group_collection' require 'oci/database_management/models/managed_database_group_summary' require 'oci/database_management/models/managed_database_summary' require 'oci/database_management/models/memory_aggregate_metrics' require 'oci/database_management/models/metric_data_point' require 'oci/database_management/models/metric_dimension_definition' require 'oci/database_management/models/object_storage_job_execution_result_details' require 'oci/database_management/models/object_storage_job_execution_result_location' require 'oci/database_management/models/parameter_scope' require 'oci/database_management/models/parent_group' require 'oci/database_management/models/pdb_status_details' require 'oci/database_management/models/remove_managed_database_from_managed_database_group_details' require 'oci/database_management/models/reset_database_parameters_details' require 'oci/database_management/models/sort_orders' require 'oci/database_management/models/sql_job' require 'oci/database_management/models/tablespace' require 'oci/database_management/models/tablespace_collection' require 'oci/database_management/models/tablespace_summary' require 'oci/database_management/models/time_series_metric_data_point' require 'oci/database_management/models/time_series_metric_definition' require 'oci/database_management/models/update_database_parameters_result' require 'oci/database_management/models/update_managed_database_group_details' # Require generated clients require 'oci/database_management/db_management_client' require 'oci/database_management/db_management_client_composite_operations' # Require service utilities require 'oci/database_management/util'
60.92437
245
0.883172
ac7bcd83a68e520cd7a48638804f6ee9c6f31986
2,794
#!/bin/ruby # -*- coding: utf-8 -*- # # Copyright (c) 2015, msyksphinz # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # * Neither the name of the msyksphinz nor the # names of its contributors may be used to endorse or promote products # derived from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY # DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # class FieldArchInfo def initialize(f_name, info) @field_name = f_name @info_arrays = Array[] @info_arrays.push(info) end def field_name return @field_name end def set_field_name(field_name) @field_name = field_name end def info_arrays return @info_arrays end end class MultiFieldArchInfo def initialize(f_name, info_arrays) @field_name = Array[] field_name.push(f_name) @info_arrays = info_arrays end def field_name return @field_name end def push_field_name(field_name) @field_name.push(field_name) end def info_arrays return @info_arrays end end class DecodeFieldInfo def initialize (f_name, f_idx, msb, lsb) @m_name = f_name @m_idx = f_idx @m_msb = msb @m_lsb = lsb end def field_name return @m_name end def field_idx return @m_idx end def field_msb return @m_msb end def field_lsb return @m_lsb end end # DecodeFieldInfo def gen_inst_id_code (inst_mnemonic) return inst_mnemonic.split(' ')[0].gsub(/[\.:\[\]]/,'_').gsub('.', '_').upcase end def gen_inst_id (inst_mnemonic) return "InstId_t::INST_ID_" + gen_inst_id_code(inst_mnemonic) end
27.392157
86
0.73121
79a6fa4dde6dd3be99671c288792264709436e55
849
class Logtalk < Formula desc "Declarative object-oriented logic programming language" homepage "https://logtalk.org/" url "https://github.com/LogtalkDotOrg/logtalk3/archive/lgt3400stable.tar.gz" version "3.40.0" sha256 "61e591388fff3aa81fbd383360d24f85a270422eacf7660aec83df689342cb35" license "Apache-2.0" bottle do cellar :any_skip_relocation sha256 "1e63c6e51b8adfb080c6380cf99f449f29e4aea512dc355d188ee32d9421d0ed" => :catalina sha256 "852c533d1c8680912cdf33a1f271f747ec9cd67b1076e57f72cbef3f80edb3ea" => :mojave sha256 "d4812f740e9875219bbe7a707804cc9d3e65d7c7f412d54ba7985c9cfae920ff" => :high_sierra sha256 "e2ca90f0054621da9b41231a85596501f33bcaa71e3921d6ac5742d73c464b8c" => :x86_64_linux end depends_on "gnu-prolog" def install cd("scripts") { system "./install.sh", "-p", prefix } end end
36.913043
94
0.792697
b9d0af5478ba8e1de48ec2fa32b787b64a7a13c4
1,529
#snippet-comment:[These are tags for the AWS doc team's sample catalog. Do not remove.] #snippet-sourceauthor:[Doug-AWS] #snippet-sourcedescription:[Adds items from a JSON file to a DynamoDB table.] #snippet-keyword:[Amazon DynamoDB] #snippet-keyword:[put_item method] #snippet-keyword:[Ruby] #snippet-service:[dynamodb] #snippet-sourcetype:[full-example] #snippet-sourcedate:[2018-03-16] # Copyright 2010-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # This file is licensed under the Apache License, Version 2.0 (the "License"). # You may not use this file except in compliance with the License. A copy of the # License is located at # # http://aws.amazon.com/apache2.0/ # # This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS # OF ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. require 'aws-sdk-dynamodb' # v2: require 'aws-sdk' require 'json' # Create dynamodb client in us-west-2 region dynamodb = Aws::DynamoDB::Client.new(region: 'us-west-2') file = File.read('movie_data.json') movies = JSON.parse(file) movies.each{|movie| params = { table_name: 'Movies', item: movie } begin result = dynamodb.put_item(params) puts 'Added movie: ' + {movie['year']}.to_i.to_s + ' - ' + {movie['title']} rescue Aws::DynamoDB::Errors::ServiceError => error puts 'Unable to add movie:' puts error.message end }
33.23913
88
0.701112
d5768e290f234f632adabae7e6006268b6622c25
322
class FollowerPolicy < ApplicationPolicy def destroy? raise Pundit::NotAuthorizedError unless record.followed_id == user.id return Regular.new(record.follower) end class Scope < Scope def resolve return Regular.new(scope, User) end end class Regular < FlexiblePermissions::Base end end
20.125
73
0.726708
18a7787efd1d8141924d62ed061be187a3a3732a
1,279
require 'rails_helper' RSpec.describe 'PowerConverter' do context 'access_url' do [ { to_convert: Sipity::Models::Attachment.create!(file: File.new(__FILE__), work_id: 1, pid: 2, predicate_name: 'attachment'), expected: %r{#{File.join(Figaro.env.url_host, "attachments")}/\w+} }, { to_convert: Sipity::Models::WorkArea.new(slug: 'wa-slug'), expected: File.join(Figaro.env.url_host, "/areas/wa-slug") }, { to_convert: Sipity::Models::SubmissionWindow.new(slug: 'sw-slug', work_area: Sipity::Models::WorkArea.new(slug: 'wa-slug')), expected: File.join(Figaro.env.url_host, "/areas/wa-slug/sw-slug") }, { to_convert: Sipity::Models::Work.new(id: 'w-id'), expected: File.join(Figaro.env.url_host, "/work_submissions/w-id") } ].each do |scenario| it "will convert #{scenario.fetch(:to_convert).inspect} to '#{scenario.fetch(:expected)}'" do expect(PowerConverter.convert(scenario.fetch(:to_convert), to: :access_url)).to match(scenario.fetch(:expected)) end end it 'will not convert a string' do expect { PowerConverter.convert("Your Unconvertable", to: :access_url) }. to raise_error(PowerConverter::ConversionError) end end end
41.258065
132
0.650508
91a248bc2370358ba715e82258bb70d43ff8eb12
494
# frozen_string_literal: true module Shmup module Entities module Bullet class Graphics < Core::Component def initialize(game_object, sprite) super(game_object) @sprite = sprite end def draw @sprite.draw_rot(x, y, ZOrder::BULLET, object.physics.angle) end def width @widht ||= @sprite.width end def height @height ||= @sprite.width end end end end end
18.296296
70
0.552632
8782305ecbf6a6845b25130c10d6f5e62716d974
94
require "asana/cli/version" module Asana module Cli # Your code goes here... end end
11.75
28
0.680851
bbe514c8cc4f83bb2586b8709d7bfe0976149fa2
247
require "json" Dir.chdir(__dir__) contents = "" lines = JSON.parse(File.read("roberta-base-vocab.json")) lines.each do |k, v| token = k.sub("Ġ", "▁") contents << "#{token}\t-#{v}\n" end File.write("spiece.model.exportvocab.txt", contents)
17.642857
56
0.647773
91f5eaec72f9a57a63a54d837f6530682f46c07a
690
# Section 03 - Lesson 02 - Inversions made easy # Isn't She Lovely - Stevie Wonder require "#{Dir.home}/ruby/pianoforall/section03/lesson02/bossa_nova_bounce" use_synth :piano in_thread(name: :right_hand) do bossa_nova_bounce_treble(:E4, :minor7, shift: -1, bounce: 1.5) bossa_nova_bounce_treble(:A3, shift: -2, bounce: 1.5) bossa_nova_bounce_treble(:C4, bounce: 1.5) bossa_nova_bounce_treble(:G3, shift: -1, bounce: 1.5) play invert_chord(chord(:G3), -1) end in_thread(name: :left_hand) do bossa_nova_bounce_bass(:E3, bounce: 1.5) bossa_nova_bounce_bass(:A2, bounce: 1.5) bossa_nova_bounce_bass(:D3, bounce: 1.5) bossa_nova_bounce_bass(:G2, bounce: 1.5) play :G2 end
32.857143
75
0.73913
4aefd671f4a80186486d366aa353ea9ea7de1f33
6,702
require 'rails_helper' RSpec.describe ArchiveSignaturesJob, type: :job do let(:petition) { FactoryBot.create(:validated_petition, sponsors_signed: true) } let(:archived_petition) { FactoryBot.create(:archived_petition, id: petition.id) } let(:archived_signature) { archived_petition.signatures.last } it "copies every signature" do expect { described_class.perform_now(petition, archived_petition) }.to change { archived_petition.signatures.count }.from(0).to(6) end it "marks every signature as archived" do expect { described_class.perform_now(petition, archived_petition) }.to change { petition.signatures.unarchived.count }.from(6).to(0) end it "schedules a new job if it doesn't finish archiving" do expect { described_class.perform_now(petition, archived_petition, limit: 2) }.to change { enqueued_jobs.size }.from(0).to(1) end it "marks the petition as archived if it finishes archiving" do expect { described_class.perform_now(petition, archived_petition) }.to change { petition.archived_at }.from(nil).to(be_within(2.second).of(Time.current)) end context "with the creator signature" do let(:signature) { archived_petition.signatures.first } before do described_class.perform_now(petition, archived_petition) end it "assigns the creator attribute" do expect(signature).to be_creator end end context "with a sponsor signature" do let(:signature) { archived_petition.signatures.second } before do described_class.perform_now(petition, archived_petition) end it "assigns the sponsor attribute" do expect(signature).to be_sponsor end end shared_examples_for "a copied signature" do it "copies the attributes of the signature" do expect(archived_signature.uuid).to eq(signature.uuid) expect(archived_signature.state).to eq(signature.state) expect(archived_signature.number).to eq(signature.number) expect(archived_signature.name).to eq(signature.name) expect(archived_signature.email).to eq(signature.email) expect(archived_signature.postcode).to eq(signature.postcode) expect(archived_signature.location_code).to eq(signature.location_code) expect(archived_signature.constituency_id).to eq(signature.constituency_id) expect(archived_signature.ip_address).to eq(signature.ip_address) expect(archived_signature.perishable_token).to eq(signature.perishable_token) expect(archived_signature.unsubscribe_token).to eq(signature.unsubscribe_token) expect(archived_signature.notify_by_email).to eq(signature.notify_by_email) expect(archived_signature.created_at).to be_usec_precise_with(signature.created_at) expect(archived_signature.updated_at).to be_usec_precise_with(signature.updated_at) end it "is persisted" do expect(archived_signature.persisted?).to eq(true) end end context "with a pending signature" do let!(:signature) { FactoryBot.create(:pending_signature, petition: petition) } before do described_class.perform_now(petition, archived_petition) end it_behaves_like "a copied signature" end context "with a validated signature" do let!(:signature) { FactoryBot.create(:validated_signature, petition: petition, number: 7) } before do described_class.perform_now(petition, archived_petition) end it_behaves_like "a copied signature" it "copies the validated_at timestamp" do expect(archived_signature.validated_at).to be_usec_precise_with(signature.validated_at) end end context "with an invalidated signature" do let!(:invalidation) { FactoryBot.create(:invalidation, name: "Jo Public") } let!(:signature) { FactoryBot.create(:invalidated_signature, petition: petition, invalidation: invalidation) } before do described_class.perform_now(petition, archived_petition) end it_behaves_like "a copied signature" it "copies the invalidation assocation" do expect(archived_signature.invalidation_id).to be_usec_precise_with(signature.invalidation_id) end it "copies the invalidated_at timestamp" do expect(archived_signature.invalidated_at).to be_usec_precise_with(signature.invalidated_at) end end context "with a fradulent signature" do let!(:signature) { FactoryBot.create(:fraudulent_signature, petition: petition) } before do described_class.perform_now(petition, archived_petition) end it_behaves_like "a copied signature" end context "with a signature that has been notified about a government response" do let!(:signature) { FactoryBot.create(:validated_signature, petition: petition, government_response_email_at: 4.weeks.ago) } before do described_class.perform_now(petition, archived_petition) end it_behaves_like "a copied signature" it "copies the government_response_email_at timestamp" do expect(archived_signature.government_response_email_at).to be_usec_precise_with(signature.government_response_email_at) end end context "with a signature that has been notified about a scheduled debate" do let!(:signature) { FactoryBot.create(:validated_signature, petition: petition, debate_scheduled_email_at: 4.weeks.ago) } before do described_class.perform_now(petition, archived_petition) end it_behaves_like "a copied signature" it "copies the debate_scheduled_email_at timestamp" do expect(archived_signature.debate_scheduled_email_at).to be_usec_precise_with(signature.debate_scheduled_email_at) end end context "with a signature that has been notified about a debate outcome" do let!(:signature) { FactoryBot.create(:validated_signature, petition: petition, debate_outcome_email_at: 4.weeks.ago) } before do described_class.perform_now(petition, archived_petition) end it_behaves_like "a copied signature" it "copies the debate_outcome_email_at timestamp" do expect(archived_signature.debate_outcome_email_at).to be_usec_precise_with(signature.debate_outcome_email_at) end end context "with a signature that has been notified about a other business" do let!(:signature) { FactoryBot.create(:validated_signature, petition: petition, petition_email_at: 4.weeks.ago) } before do described_class.perform_now(petition, archived_petition) end it_behaves_like "a copied signature" it "copies the petition_email_at timestamp" do expect(archived_signature.petition_email_at).to be_usec_precise_with(signature.petition_email_at) end end end
34.193878
127
0.751567
e99e33e4f0ad81906610f020f1ad516679f96ab9
7,627
=begin #Datadog API V2 Collection #Collection of all Datadog Public endpoints. The version of the OpenAPI document: 1.0 Contact: [email protected] Generated by: https://openapi-generator.tech Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. This product includes software developed at Datadog (https://www.datadoghq.com/). Copyright 2020-Present Datadog, Inc. =end require 'date' require 'time' module DatadogAPIClient::V2 # Attributes of the created user. class UserCreateAttributes # whether the object has unparsed attributes attr_accessor :_unparsed # The email of the user. attr_accessor :email # The name of the user. attr_accessor :name # The title of the user. attr_accessor :title # Attribute mapping from ruby-style variable name to JSON key. def self.attribute_map { :'email' => :'email', :'name' => :'name', :'title' => :'title' } end # Returns all the JSON keys this model knows about def self.acceptable_attributes attribute_map.values end # Attribute type mapping. def self.openapi_types { :'email' => :'String', :'name' => :'String', :'title' => :'String' } end # List of attributes with nullable: true def self.openapi_nullable Set.new([ ]) end # Initializes the object # @param [Hash] attributes Model attributes in the form of hash def initialize(attributes = {}) if (!attributes.is_a?(Hash)) fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::UserCreateAttributes` initialize method" end # check to see if the attribute exists and convert string to symbol for hash key attributes = attributes.each_with_object({}) { |(k, v), h| if (!self.class.attribute_map.key?(k.to_sym)) fail ArgumentError, "`#{k}` is not a valid attribute in `DatadogAPIClient::V2::UserCreateAttributes`. Please check the name to make sure it's valid. List of attributes: " + self.class.attribute_map.keys.inspect end h[k.to_sym] = v } if attributes.key?(:'email') self.email = attributes[:'email'] end if attributes.key?(:'name') self.name = attributes[:'name'] end if attributes.key?(:'title') self.title = attributes[:'title'] end end # Show invalid properties with the reasons. Usually used together with valid? # @return Array for valid properties with the reasons def list_invalid_properties invalid_properties = Array.new if @email.nil? invalid_properties.push('invalid value for "email", email cannot be nil.') end invalid_properties end # Check to see if the all the properties in the model are valid # @return true if the model is valid def valid? return false if @email.nil? true end # Checks equality by comparing each attribute. # @param [Object] Object to be compared def ==(o) return true if self.equal?(o) self.class == o.class && email == o.email && name == o.name && title == o.title end # @see the `==` method # @param [Object] Object to be compared def eql?(o) self == o end # Calculates hash code according to all attributes. # @return [Integer] Hash code def hash [email, name, title].hash end # Builds the object from hash # @param [Hash] attributes Model attributes in the form of hash # @return [Object] Returns the model itself def self.build_from_hash(attributes) new.build_from_hash(attributes) end # Builds the object from hash # @param [Hash] attributes Model attributes in the form of hash # @return [Object] Returns the model itself def build_from_hash(attributes) return nil unless attributes.is_a?(Hash) self.class.openapi_types.each_pair do |key, type| if attributes[self.class.attribute_map[key]].nil? && self.class.openapi_nullable.include?(key) self.send("#{key}=", nil) elsif type =~ /\AArray<(.*)>/i # check to ensure the input is an array given that the attribute # is documented as an array but the input is not if attributes[self.class.attribute_map[key]].is_a?(Array) self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) }) end elsif !attributes[self.class.attribute_map[key]].nil? self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]])) end end self end # Deserializes the data based on type # @param string type Data type # @param string value Value to be deserialized # @return [Object] Deserialized data def _deserialize(type, value) case type.to_sym when :Time Time.parse(value) when :Date Date.parse(value) when :String value.to_s when :Integer value.to_i when :Float value.to_f when :Boolean if value.to_s =~ /\A(true|t|yes|y|1)\z/i true else false end when :Object # generic object (usually a Hash), return directly value when :Array # generic array, return directly value when /\AArray<(?<inner_type>.+)>\z/ inner_type = Regexp.last_match[:inner_type] value.map { |v| _deserialize(inner_type, v) } when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/ k_type = Regexp.last_match[:k_type] v_type = Regexp.last_match[:v_type] {}.tap do |hash| value.each do |k, v| hash[_deserialize(k_type, k)] = _deserialize(v_type, v) end end else # model # models (e.g. Pet) or oneOf klass = DatadogAPIClient::V2.const_get(type) res = klass.respond_to?(:openapi_one_of) ? klass.build(value) : klass.build_from_hash(value) if res.instance_of? DatadogAPIClient::V2::UnparsedObject self._unparsed = true end res end end # Returns the string representation of the object # @return [String] String presentation of the object def to_s to_hash.to_s end # to_body is an alias to to_hash (backward compatibility) # @return [Hash] Returns the object in the form of hash def to_body to_hash end # Returns the object in the form of hash # @return [Hash] Returns the object in the form of hash def to_hash hash = {} self.class.attribute_map.each_pair do |attr, param| value = self.send(attr) if value.nil? is_nullable = self.class.openapi_nullable.include?(attr) next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) end hash[param] = _to_hash(value) end hash end # Outputs non-array value in the form of hash # For object, use to_hash. Otherwise, just return the value # @param [Object] value Any valid value # @return [Hash] Returns the value in the form of hash def _to_hash(value) if value.is_a?(Array) value.compact.map { |v| _to_hash(v) } elsif value.is_a?(Hash) {}.tap do |hash| value.each { |k, v| hash[k] = _to_hash(v) } end elsif value.respond_to? :to_hash value.to_hash else value end end end end
29.447876
220
0.621214
11696c4867a86d3ed400fca81dbba0e21abfcd12
358
# frozen_string_literal: true require 'spec_helper' # Before running this spec again, you need to set environment variable BOLETOSIMPLES_API_TOKEN RSpec.describe BoletoSimples::SmsDelivery do describe 'all', vcr: { cassette_name: 'resources/sns_delivery/all' } do subject { described_class.all } it { expect(subject.first).to be_nil } end end
27.538462
94
0.765363
8750b57fc37e471582b16013585360255ea7f3e8
820
# authentication tutorial: http://railscasts.com/episodes/250-authentication-from-scratch class User < ApplicationRecord attr_accessor(:password) before_save(:encrypt_password) validates_confirmation_of(:password) validates_presence_of(:password, :on => :create) validates_presence_of(:username) validates_uniqueness_of(:username) def self.authenticate(name_of_user, password) user = User.find_by(username: name_of_user) if (user != nil) && (user.password_hash == BCrypt::Engine.hash_secret(password, user.password_salt)) return user else return nil end end def encrypt_password() if password.present?() == true self.password_salt = BCrypt::Engine.generate_salt() self.password_hash = BCrypt::Engine.hash_secret(password, password_salt) end end end
30.37037
104
0.741463
6263923c5f88a3c8bd49992a86ca61ca92a48ac7
1,243
# Copyright 2015 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. require 'google/apis/smartdevicemanagement_v1/service.rb' require 'google/apis/smartdevicemanagement_v1/classes.rb' require 'google/apis/smartdevicemanagement_v1/representations.rb' module Google module Apis # Smart Device Management API # # Allow select enterprise partners to access, control, and manage Google and # Nest devices programmatically. # # @see https://developers.google.com/nest/device-access module SmartdevicemanagementV1 VERSION = 'V1' REVISION = '20201022' # See and/or control the devices that you selected AUTH_SDM_SERVICE = 'https://www.googleapis.com/auth/sdm.service' end end end
34.527778
80
0.746581
1818947d79c8e1ab63e087a416c54dc961acccfa
382
class CreateEventSubscriptions < ActiveRecord::Migration def change create_table :event_subscriptions do |t| t.references :event, index: true t.references :user, index: true t.timestamps null: false end add_foreign_key :event_subscriptions, :events, on_delete: :cascade add_foreign_key :event_subscriptions, :users, on_delete: :cascade end end
31.833333
70
0.73822
f7817cfa1fd9fc8b107db75b4743e0a77ea6541c
2,235
# Copyright 2011-2015, The Trustees of Indiana University and Northwestern # University. Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. # --- END LICENSE_HEADER BLOCK --- require 'spec_helper' describe Permalink do describe '#permalink' do let(:master_file){ FactoryGirl.build(:master_file) } context 'permalink does not exist' do it 'returns nil' do master_file.permalink.should be_nil end it 'returns nil with query variables' do master_file.permalink({a:'b'}).should be_nil end end context 'permalink exists' do let(:permalink_url){ "http://permalink.com/object/#{master_file.pid}" } before do master_file.add_relationship(:has_permalink, permalink_url, true) end it 'returns a string' do master_file.permalink.should be_kind_of(String) end it 'appends query variables to the url' do query_var_hash = { urlappend: '/embed' } master_file.permalink(query_var_hash).should == "#{permalink_url}?#{query_var_hash.to_query}" end end context 'creating permalink' do let(:media_object) { master_file.mediaobject } it 'should get the absolute path to the object' do expect(Permalink.url_for(media_object)).to eq("http://test.host/media_objects/#{media_object.pid}") expect(Permalink.url_for(master_file)).to eq("http://test.host/media_objects/#{media_object.pid}/section/#{master_file.pid}") end it 'permalink_for raises ArgumentError if not passed mediaobject or masterfile' do expect{Permalink.permalink_for(Object.new)}.to raise_error(ArgumentError) end end end end
33.863636
133
0.685906
ac940da658cbc28d431f4c9fc3b650643792afcd
1,790
# encoding: utf-8 # frozen_string_literal: true module RuboCop module Cop module Performance # This cop is used to identify usages of `count` on an # `Array` and `Hash` and change them to `size`. # # @example # # bad # [1, 2, 3].count # # # bad # {a: 1, b: 2, c: 3}.count # # # good # [1, 2, 3].size # # # good # {a: 1, b: 2, c: 3}.size # # # good # [1, 2, 3].count { |e| e > 2 } # TODO: Add advanced detection of variables that could # have been assigned to an array or a hash. class Size < Cop MSG = 'Use `size` instead of `count`.'.freeze def on_send(node) return unless eligible_node?(node) add_offense(node, node.loc.selector) end private def autocorrect(node) ->(corrector) { corrector.replace(node.loc.selector, 'size') } end def eligible_node?(node) receiver, method, args = *node return false unless method == :count return false if args eligible_receiver?(receiver) && !allowed_parent?(node.parent) end def eligible_receiver?(node) return false unless node array?(node) || hash?(node) end def allowed_parent?(node) node && node.block_type? end def array?(node) receiver, method = *node _, constant = *receiver node.array_type? || constant == :Array || method == :to_a end def hash?(node) receiver, method = *node _, constant = *receiver node.hash_type? || constant == :Hash || method == :to_h end end end end end
22.948718
72
0.510615
5d8a7dfe42b9b4ef8f6ad93cc8a061e7d31b539b
2,179
# Licensed to Elasticsearch B.V under one or more agreements. # Elasticsearch B.V licenses this file to you under the Apache 2.0 License. # See the LICENSE file in the project root for more information require 'spec_helper' describe 'client.cluster#stats' do let(:expected_args) do [ 'GET', url, params, body, nil ] end let(:url) do '_stats' end let(:body) do nil end let(:params) do {} end it 'performs the request' do expect(client_double.indices.stats).to eq({}) end context 'when an index is specified' do let(:url) do 'foo/_stats' end it 'performs the request' do expect(client_double.indices.stats(index: 'foo')).to eq({}) end end context 'when multiple indicies are specified as a list' do let(:url) do 'foo,bar/_stats' end it 'performs the request' do expect(client_double.indices.stats(index: ['foo', 'bar'])).to eq({}) end end context 'when multiple indicies are specified as a string' do let(:url) do 'foo,bar/_stats' end it 'performs the request' do expect(client_double.indices.stats(index: 'foo,bar')).to eq({}) end end context 'when parameters are specified' do let(:params) do { expand_wildcards: true } end let(:url) do 'foo/_stats' end it 'performs the request' do expect(client_double.indices.stats(index: 'foo', expand_wildcards: true)).to eq({}) end end context 'when the fields parameter is specified as a list' do let(:params) do { fields: 'foo,bar' } end let(:url) do 'foo/_stats/fielddata' end it 'performs the request' do expect(client_double.indices.stats(index: 'foo', fielddata: true, fields: [ 'foo', 'bar'])).to eq({}) end end context 'when the groups parameter is specified as a list' do let(:params) do { groups: 'groupA,groupB' } end let(:url) do '_stats/search' end it 'performs the request' do expect(client_double.indices.stats(search: true, groups: [ 'groupA', 'groupB'])).to eq({}) end end end
19.283186
107
0.616338
bb30b37adc01d63502b36a82e0af53b2c08b77ac
1,133
# frozen_string_literal: true Rails.application.configure do config.cache_classes = false # Do not eager load code on boot. config.eager_load = false # Show full error reports and disable caching. config.consider_all_requests_local = true config.action_controller.perform_caching = false # Don't care if the mailer can't send. config.action_mailer.raise_delivery_errors = false # Print deprecation notices to the Rails logger. config.active_support.deprecation = :log # Raise an error on page load if there are pending migrations. config.active_record.migration_error = :page_load # Debug mode disables concatenation and preprocessing of assets. # This option may cause significant delays in view rendering with a large # number of complex assets. config.assets.debug = true # Adds additional error checking when serving assets at runtime. # Checks for improperly declared sprockets dependencies. # Raises helpful error messages. config.assets.raise_runtime_errors = true # Raises error for missing translations # config.action_view.raise_on_missing_translations = true end
32.371429
75
0.776699
ff56ef51cf2702abefaa495a534d5112466ba04c
634
module Cucumber module Mate module Files class StepDetector def initialize(path_to_a_feature_file) @step_files = ( Dir[File.dirname(path_to_a_feature_file) + "/step_definitions/**/*.rb"] + Dir[File.dirname(path_to_a_feature_file) + "/**/*_steps.rb"] ).uniq end # returns [ { :file_path => path, :name => StepFile#name } ] def step_files_and_names @step_files.map do |step_file| { :file_path => File.expand_path(step_file), :name => StepsFile.new(step_file).name } end end end end end end
24.384615
97
0.57571
39cfbbf23c16fb0611962f8d9eaa537b18ee5e11
295
module ApplicationHelper # a customized out link function, can add prefix to the weblink def link_to_external_lookup(options={}) #url_prefix = 'https://www.google.com/search?q=' options[:value].map do |url| link_to "#{url}", "#{url}" end end end
26.818182
67
0.616949
bb544de5af5bba66d65eceddb2a9df99f7cff9d8
504
module Cofidin33 class SellaComprobante def self.call(comprobante:, certificado:, llave_privada:, fecha_hora:) comprobante.fecha = fecha_hora serie, cert = ProcesaCertificado.call certificado comprobante.no_certificado = serie comprobante.certificado = cert cadena_original = GeneraCadenaOriginal.call(comprobante) sello = GeneraSello.call(cadena_original, llave_privada) comprobante.sello = sello xml = GeneraXml.call(comprobante) end end end
33.6
74
0.736111
bbdb4cac2dc4cb17f2a167a35aeba534dcc4394b
260
class Xtrafinder < Cask url 'http://www.trankynam.com/xtrafinder/downloads/XtraFinder.dmg' homepage 'http://www.trankynam.com/xtrafinder/' version 'latest' no_checksum install 'XtraFinder.pkg' uninstall :pkgutil => 'com.trankynam.xtrafinder.*' end
28.888889
68
0.75
62a5e4f5daae91ff4d0c04135349001a36318470
2,170
unless Hash.instance_methods.include? '-' Hash.class_eval do # removes one or more keys from a hash # {:red => 1, :blue => 2, :green => 3} - [:red, :blue] => {:green => 3} def -(v) hsh = self.dup (v.is_a?(Array) ? v : [v]).each{|k| hsh.delete(k) } hsh end end end unless Hash.instance_methods.include? 'stringify_values' Hash.class_eval do # returns a new hash with each of the values converted to a string # {:red => 1, :blue => 2} => {:red => '1', :blue => '2'} def stringify_values inject({}) do |options, (key, value)| options[key] = value.to_s options end end # returns the hash with each of the values converted to a string # {:red => 1, :blue => 2} => {:red => '1', :blue => '2'} def stringify_values! self.each do |key, value| self[key] = value.to_s end self end end end unless Hash.instance_methods.include? 'transform_key' Hash.class_eval do # returns a new hash with a key renamed # {:one => 1, :two => 2}.transform_key(:two, :three) => {:one => 1, :three => 2} def transform_key(old_key, new_key) self.dup.transform_key!(old_key, new_key) end # renames a key in a hash # {:one => 1, :two => 2}.transform_key(:two, :three) => {:one => 1, :three => 2} def transform_key!(old_key, new_key) self[new_key] = self.delete(old_key) return self end # returns a new hash with renamed keys # accepts a hash of key, value pairs to rename # {:one => 1, :two => 2}.transform_keys(:two => :three) => {:one => 1, :three => 2} def transform_keys(transform) self.dup.transform_keys!(transform) end # returns a hash with renamed keys # accepts a hash of key, value pairs to rename # {:one => 1, :two => 2}.transform_keys(:two => :three) => {:one => 1, :three => 2} def transform_keys!(transform) raise ArgumentError, "transform_keys takes a single hash argument" unless transform.is_a?(Hash) self.each_key do |k| self[transform.has_key?(k) ? transform[k] : k] = self.delete(k) end self end end end
31.911765
101
0.588479
e81a177ef5fb0035619b5cc57cee40c07740d4a6
1,972
require 'active_support/core_ext/object/to_query' require 'action_view' require 'action_view/helpers' require 'rack/utils' require 'query_report/errors' module QueryReportEngineHelper def query_report_render_filter(filter, comparator) hint = comparator.name search_tag_name = comparator.search_tag_name value = comparator.param_value method_name = :"query_report_#{filter.type.to_s}_filter" default_method_name = :"query_report_default_#{filter.type.to_s}_filter" if respond_to? method_name send method_name, search_tag_name, value, :placeholder => hint elsif respond_to? default_method_name send default_method_name, search_tag_name, value, :placeholder => hint else raise QueryReport::FilterNotDefined, %Q{#{filter.type.to_s} filter is not defined. Please define a method as following, def #{method_name}(name, value, options={}) text_field_tag name, value, options end } end end def render_query_report(report = nil) report ||= @report render :partial => "query_report/list", locals: {report: report} end def export_report_url_with_format(format) url_for(request.params.merge({arbitrary_argument:'value', format: format})) end def hash_to_hidden_fields(hash) cleaned_hash = hash.reject { |k, v| v.nil? } pairs = cleaned_hash.to_query.split(Rack::Utils::DEFAULT_SEP) tags = pairs.map do |pair| key, value = pair.split('=', 2).map { |str| Rack::Utils.unescape(str) } hidden_field_tag(key, value) end tags.join("\n").html_safe end def query_report_search_form(report, &block) if !report.array_record? form_tag(url_for({}), (QueryReport.config.search_form_options || {}).merge({method: :get, remote: @remote}), &block) else search_form_for(report.search, url: url_for({}), remote: @remote, html: (QueryReport.config.search_form_options || {}).merge(method: :get), &block) end end end
34.596491
153
0.707404
0193d64f7b9f2e5dc4dd83669e060ab1c41951a1
122
module UserProfile class Profile < ApplicationRecord belongs_to :user, class_name: UserProfile.user_class end end
20.333333
56
0.795082
6ad1ce5f4deb4caf7d75b55bf493bda8968e0c34
7,864
require 'spec_helper_acceptance' #fact based two stage confine #confine array confine_array = [ (fact('operatingsystem') == 'Ubuntu' && fact('operatingsystemrelease') == '10.04'), (fact('osfamily') == 'RedHat' && fact('operatingsystemmajrelease') == '5'), (fact('operatingsystem') == 'Debian' && fact('operatingsystemmajrelease') == '6'), fact('osfamily') == 'Suse' ] stop_test = false stop_test = true if UNSUPPORTED_PLATFORMS.any?{ |up| fact('osfamily') == up} || confine_array.any? describe 'Acceptance case one', :unless => stop_test do after :all do shell('pkill -f tomcat', :acceptable_exit_codes => [0,1]) shell('rm -rf /opt/tomcat*', :acceptable_exit_codes => [0,1]) shell('rm -rf /opt/apache-tomcat*', :acceptable_exit_codes => [0,1]) end context 'Initial install Tomcat and verification' do it 'Should apply the manifest without error' do pp = <<-EOS class{'java':} class{'gcc':} $java_home = $::osfamily ? { 'RedHat' => '/etc/alternatives/java_sdk', 'Debian' => "/usr/lib/jvm/java-7-openjdk-${::architecture}", default => undef } class jsvc { staging::extract { 'commons-daemon-native.tar.gz': source => "/opt/apache-tomcat/bin/commons-daemon-native.tar.gz", target => "/opt/apache-tomcat/bin", unless => "test -d /opt/apache-tomcat/bin/commons-daemon-1.0.15-native-src", } -> exec { 'configure jsvc': command => "JAVA_HOME=${java_home} configure --with-java=${java_home}", creates => "/opt/apache-tomcat/bin/commons-daemon-1.0.15-native-src/unix/Makefile", cwd => "/opt/apache-tomcat/bin/commons-daemon-1.0.15-native-src/unix", path => "/usr/local/sbin:/usr/local/bin:/sbin:/bin:/usr/sbin:/usr/bin:/root/bin:/opt/apache-tomcat/bin/commons-daemon-1.0.15-native-src/unix", require => [ Class['gcc'], Class['java'] ], provider => shell, } -> exec { 'make jsvc': command => 'make', creates => "/opt/apache-tomcat/bin/commons-daemon-1.0.15-native-src/unix/jsvc", cwd => "/opt/apache-tomcat/bin/commons-daemon-1.0.15-native-src/unix", path => "/usr/local/sbin:/usr/local/bin:/sbin:/bin:/usr/sbin:/usr/bin:/root/bin:/opt/apache-tomcat/bin/commons-daemon-1.0.15-native-src/unix", provider => shell, } -> file { 'jsvc': ensure => link, path => "/opt/apache-tomcat/bin/jsvc", target => "/opt/apache-tomcat/bin/commons-daemon-1.0.15-native-src/unix/jsvc", } } # The default tomcat::install { '/opt/apache-tomcat': source_url => '#{TOMCAT8_RECENT_SOURCE}', } -> class { 'jsvc': } -> tomcat::instance { 'tomcat_one': catalina_base => '/opt/apache-tomcat/tomcat8-jsvc', user => 'tomcat8', group => 'tomcat8', java_home => $java_home, use_jsvc => true, } tomcat::config::server { 'tomcat8-jsvc': catalina_base => '/opt/apache-tomcat/tomcat8-jsvc', port => '80', } tomcat::config::server::connector { 'tomcat8-jsvc': catalina_base => '/opt/apache-tomcat/tomcat8-jsvc', port => '80', protocol => 'HTTP/1.1', additional_attributes => { 'redirectPort' => '443' }, } tomcat::config::server::connector { 'tomcat8-ajp': catalina_base => '/opt/apache-tomcat/tomcat8-jsvc', connector_ensure => absent, port => '8309', } tomcat::war { 'war_one.war': catalina_base => '/opt/apache-tomcat/tomcat8-jsvc', war_source => '#{SAMPLE_WAR}', } tomcat::setenv::entry { 'JAVA_HOME': value => $java_home, } EOS apply_manifest(pp, :catch_failures => true) apply_manifest(pp, :catch_changes => true) shell('sleep 15') end it 'Should be serving a page on port 80' do shell('curl localhost:80/war_one/hello.jsp', :acceptable_exit_codes => 0) do |r| r.stdout.should match(/Sample Application JSP Page/) end end end context 'Stop tomcat with verification!!!' do it 'Should apply the manifest without error' do pp = <<-EOS $java_home = $::osfamily ? { 'RedHat' => '/etc/alternatives/java_sdk', 'Debian' => "/usr/lib/jvm/java-7-openjdk-${::architecture}", default => undef } tomcat::service { 'jsvc-default': service_ensure => stopped, catalina_home => '/opt/apache-tomcat', catalina_base => '/opt/apache-tomcat/tomcat8-jsvc', use_jsvc => true, java_home => $java_home, user => 'tomcat8', } EOS apply_manifest(pp, :catch_failures => true, :acceptable_exit_codes => [0,2]) shell('sleep 15') end it 'Should not be serving a page on port 80' do shell('curl localhost:80/war_one/hello.jsp', :acceptable_exit_codes => 7) end end context 'Start Tomcat with verification' do it 'Should apply the manifest without error' do pp = <<-EOS $java_home = $::osfamily ? { 'RedHat' => '/etc/alternatives/java_sdk', 'Debian' => "/usr/lib/jvm/java-7-openjdk-${::architecture}", default => undef } tomcat::service { 'jsvc-default': service_ensure => running, catalina_home => '/opt/apache-tomcat', catalina_base => '/opt/apache-tomcat/tomcat8-jsvc', use_jsvc => true, java_home => $java_home, user => 'tomcat8', } EOS apply_manifest(pp, :catch_failures => true, :acceptable_exit_codes => [0,2]) shell('sleep 15') end it 'Should be serving a page on port 80' do shell('curl localhost:80/war_one/hello.jsp', :acceptable_exit_codes => 0) do |r| r.stdout.should match(/Sample Application JSP Page/) end end end context 'un-deploy the war with verification' do it 'Should apply the manifest without error' do pp = <<-EOS tomcat::war { 'war_one.war': catalina_base => '/opt/apache-tomcat/tomcat8-jsvc', war_source => '#{SAMPLE_WAR}', war_ensure => absent, } EOS apply_manifest(pp, :catch_failures => true, :acceptable_exit_codes => [0,2]) shell('sleep 15') end it 'Should not have deployed the war' do shell('curl localhost:80/war_one/hello.jsp', :acceptable_exit_codes => 0) do |r| r.stdout.should eq("") end end end context 'remove the connector with verification' do it 'Should apply the manifest without error' do pp = <<-EOS $java_home = $::osfamily ? { 'RedHat' => '/etc/alternatives/java_sdk', 'Debian' => "/usr/lib/jvm/java-7-openjdk-${::architecture}", default => undef } tomcat::config::server::connector { 'tomcat8-jsvc': connector_ensure => 'absent', catalina_base => '/opt/apache-tomcat/tomcat8-jsvc', port => '80', notify => Tomcat::Service['jsvc-default'] } tomcat::service { 'jsvc-default': service_ensure => running, catalina_home => '/opt/apache-tomcat', catalina_base => '/opt/apache-tomcat/tomcat8-jsvc', java_home => $java_home, use_jsvc => true, user => 'tomcat8', } EOS apply_manifest(pp, :catch_failures => true, :acceptable_exit_codes => [0,2]) shell('sleep 15') end it 'Should not be able to serve pages over port 80' do shell('curl localhost:80', :acceptable_exit_codes => 7) end end end
36.239631
156
0.56854
87be11eecccdf89a5609451133e3903a99319df0
943
module SearchParserMatchers class QueryFor def initialize(term) @term = term end def matches?(parser) @parser = parser @parser.query == @term end def failure_message %{expected parser to have query for "#{@term}" but got query "#{@parser.query}"} end end def query_for(term) QueryFor.new(term) end class FilterOn def initialize(name, value) case value when Range if value.begin.respond_to?(:to_d) && value.end.respond_to?(:to_d) value = value.begin.to_d..value.end.to_d end end @filter = {name => value} end def matches?(parser) @parser = parser @parser.filters.include?(@filter) end def failure_message %{expected parser to filter on #{@filter.inspect}, but it filtered on #{@parser.filters.inspect}} end end def filter_on(name, value) FilterOn.new(name, value) end end
20.06383
103
0.616119
28feddbd86ae6bc820473f2ec993b9c274dae256
1,106
require "active_support/core_ext/module/delegation" require "active_support/concern" module MiqToolsServices module SidekiqWorkerMixin extend ActiveSupport::Concern included do delegate :sidekiq_queue, :workers, :running?, :to => self end module ClassMethods def sidekiq_queue sidekiq_options unless sidekiq_options_hash? # init the sidekiq_options_hash sidekiq_options_hash["queue"] end def workers queue = sidekiq_queue.to_s workers = Sidekiq::Workers.new workers = workers.select do |_processid, _threadid, work| work["queue"] == queue && work.fetch_path("payload", "class") == name end workers.sort_by! { |_processid, _threadid, work| work.fetch_path("payload", "enqueued_at") } workers end def running?(workers = nil) (workers || self.workers).any? end end def first_unique_worker?(workers = nil) _processid, _threadid, work = (workers || self.workers).first work.nil? || work.fetch_path("payload", "jid") == jid end end end
26.97561
100
0.654611
1dcdc124c854648847a4a238c33325fd46f567e8
979
Praxis::Application.configure do |application| # This is a commented out copy of the default Praxis layout # This example app follows the standard practices, so there is no reason to override it # If we wanted to organize the structure and ordering of files, we can uncomment the layout # and define it at our own leisure # application.layout do # map :initializers, 'config/initializers/**/*' # map :lib, 'lib/**/*' # map :design, 'design/' do # map :api, 'api.rb' # map :helpers, '**/helpers/**/*' # map :types, '**/types/**/*' # map :media_types, '**/media_types/**/*' # map :endpoints, '**/endpoints/**/*' # end # map :app, 'app/' do # map :models, 'models/**/*' # map :responses, '**/responses/**/*' # map :exceptions, '**/exceptions/**/*' # map :concerns, '**/concerns/**/*' # map :resources, '**/resources/**/*' # map :controllers, '**/controllers/**/*' # end # end end
37.653846
93
0.579162
87045238c82d4821a4d6652f012c11b0a3a1bbef
12,632
# Use this hook to configure devise mailer, warden hooks and so forth. # Many of these configuration options can be set straight in your model. Devise.setup do |config| # The secret key used by Devise. Devise uses this key to generate # random tokens. Changing this key will render invalid all existing # confirmation, reset password and unlock tokens in the database. config.secret_key = ENV["DEVISE_SECRET_KEY"] # ==> Mailer Configuration # Configure the e-mail address which will be shown in Devise::Mailer, # note that it will be overwritten if you use your own mailer class # with default "from" parameter. config.mailer_sender = "Dabble Me <hello@#{ENV['MAIN_DOMAIN']}>" # Configure the class responsible to send e-mails. # config.mailer = 'Devise::Mailer' # ==> ORM configuration # Load and configure the ORM. Supports :active_record (default) and # :mongoid (bson_ext recommended) by default. Other ORMs may be # available as additional gems. require 'devise/orm/active_record' # ==> Configuration for any authentication mechanism # Configure which keys are used when authenticating a user. The default is # just :email. You can configure it to use [:username, :subdomain], so for # authenticating a user, both parameters are required. Remember that those # parameters are used only when authenticating and not when retrieving from # session. If you need permissions, you should implement that in a before filter. # You can also supply a hash where the value is a boolean determining whether # or not authentication should be aborted when the value is not present. # config.authentication_keys = [ :email ] # Configure parameters from the request object used for authentication. Each entry # given should be a request method and it will automatically be passed to the # find_for_authentication method and considered in your model lookup. For instance, # if you set :request_keys to [:subdomain], :subdomain will be used on authentication. # The same considerations mentioned for authentication_keys also apply to request_keys. # config.request_keys = [] # Configure which authentication keys should be case-insensitive. # These keys will be downcased upon creating or modifying a user and when used # to authenticate or find a user. Default is :email. config.case_insensitive_keys = [ :email ] # Configure which authentication keys should have whitespace stripped. # These keys will have whitespace before and after removed upon creating or # modifying a user and when used to authenticate or find a user. Default is :email. config.strip_whitespace_keys = [ :email ] # Tell if authentication through request.params is enabled. True by default. # It can be set to an array that will enable params authentication only for the # given strategies, for example, `config.params_authenticatable = [:database]` will # enable it only for database (email + password) authentication. # config.params_authenticatable = true # Tell if authentication through HTTP Auth is enabled. False by default. # It can be set to an array that will enable http authentication only for the # given strategies, for example, `config.http_authenticatable = [:database]` will # enable it only for database authentication. The supported strategies are: # :database = Support basic authentication with authentication key + password # config.http_authenticatable = false # If http headers should be returned for AJAX requests. True by default. # config.http_authenticatable_on_xhr = true # The realm used in Http Basic Authentication. 'Application' by default. # config.http_authentication_realm = 'Application' # It will change confirmation, password recovery and other workflows # to behave the same regardless if the e-mail provided was right or wrong. # Does not affect registerable. # config.paranoid = true # By default Devise will store the user in session. You can skip storage for # particular strategies by setting this option. # Notice that if you are skipping storage for all authentication paths, you # may want to disable generating routes to Devise's sessions controller by # passing skip: :sessions to `devise_for` in your config/routes.rb config.skip_session_storage = [:http_auth] # By default, Devise cleans up the CSRF token on authentication to # avoid CSRF token fixation attacks. This means that, when using AJAX # requests for sign in and sign up, you need to get a new CSRF token # from the server. You can disable this option at your own risk. # config.clean_up_csrf_token_on_authentication = true # ==> Configuration for :database_authenticatable # For bcrypt, this is the cost for hashing the password and defaults to 10. If # using other encryptors, it sets how many times you want the password re-encrypted. # # Limiting the stretches to just one in testing will increase the performance of # your test suite dramatically. However, it is STRONGLY RECOMMENDED to not use # a value less than 10 in other environments. Note that, for bcrypt (the default # encryptor), the cost increases exponentially with the number of stretches (e.g. # a value of 20 is already extremely slow: approx. 60 seconds for 1 calculation). config.stretches = Rails.env.test? ? 1 : 10 # Setup a pepper to generate the encrypted password. # config.pepper = 'fd5567a150b39b61ac1c6c71668ed5cec0911cb31b3881fd5ff7197a7a9b1c88ca07f83936a9dda34d8e6db30675a5ecfa211ce9d95e8062b7977a9b80fbfb9e' # ==> Configuration for :confirmable # A period that the user is allowed to access the website even without # confirming their account. For instance, if set to 2.days, the user will be # able to access the website for two days without confirming their account, # access will be blocked just in the third day. Default is 0.days, meaning # the user cannot access the website without confirming their account. # config.allow_unconfirmed_access_for = 2.days # A period that the user is allowed to confirm their account before their # token becomes invalid. For example, if set to 3.days, the user can confirm # their account within 3 days after the mail was sent, but on the fourth day # their account can't be confirmed with the token any more. # Default is nil, meaning there is no restriction on how long a user can take # before confirming their account. # config.confirm_within = 3.days # If true, requires any email changes to be confirmed (exactly the same way as # initial account confirmation) to be applied. Requires additional unconfirmed_email # db field (see migrations). Until confirmed, new email is stored in # unconfirmed_email column, and copied to email column on successful confirmation. config.reconfirmable = true # Defines which key will be used when confirming an account # config.confirmation_keys = [ :email ] # ==> Configuration for :rememberable # The time the user will be remembered without asking for credentials again. config.remember_for = 2.weeks # Invalidates all the remember me tokens when the user signs out. config.expire_all_remember_me_on_sign_out = true # If true, extends the user's remember period when remembered via cookie. config.extend_remember_period = false # Options to be passed to the created cookie. For instance, you can set # secure: true in order to force SSL only cookies. config.rememberable_options = { secure: true } # ==> Configuration for :validatable # Range for password length. config.password_length = 6..128 # Email regex used to validate email formats. It simply asserts that # one (and only one) @ exists in the given string. This is mainly # to give user feedback and not to assert the e-mail validity. # config.email_regexp = /\A[^@]+@[^@]+\z/ # ==> Configuration for :timeoutable # The time you want to timeout the user session without activity. After this # time the user will be asked for credentials again. Default is 30 minutes. # config.timeout_in = 30.minutes # If true, expires auth token on session timeout. # config.expire_auth_token_on_timeout = false # ==> Configuration for :lockable # Defines which strategy will be used to lock an account. # :failed_attempts = Locks an account after a number of failed attempts to sign in. # :none = No lock strategy. You should handle locking by yourself. # config.lock_strategy = :failed_attempts # Defines which key will be used when locking and unlocking an account # config.unlock_keys = [ :email ] # Defines which strategy will be used to unlock an account. # :email = Sends an unlock link to the user email # :time = Re-enables login after a certain amount of time (see :unlock_in below) # :both = Enables both strategies # :none = No unlock strategy. You should handle unlocking by yourself. # config.unlock_strategy = :both # Number of authentication tries before locking an account if lock_strategy # is failed attempts. # config.maximum_attempts = 20 # Time interval to unlock the account if :time is enabled as unlock_strategy. # config.unlock_in = 1.hour # Warn on the last attempt before the account is locked. # config.last_attempt_warning = false # ==> Configuration for :recoverable # # Defines which key will be used when recovering the password for an account # config.reset_password_keys = [ :email ] # Time interval you can reset your password with a reset password key. # Don't put a too small interval or your users won't have the time to # change their passwords. config.reset_password_within = 6.hours # ==> Configuration for :encryptable # Allow you to use another encryption algorithm besides bcrypt (default). You can use # :sha1, :sha512 or encryptors from others authentication tools as :clearance_sha1, # :authlogic_sha512 (then you should set stretches above to 20 for default behavior) # and :restful_authentication_sha1 (then you should set stretches to 10, and copy # REST_AUTH_SITE_KEY to pepper). # # Require the `devise-encryptable` gem when using anything other than bcrypt # config.encryptor = :sha512 # ==> Scopes configuration # Turn scoped views on. Before rendering "sessions/new", it will first check for # "users/sessions/new". It's turned off by default because it's slower if you # are using only default views. # config.scoped_views = false # Configure the default scope given to Warden. By default it's the first # devise role declared in your routes (usually :user). # config.default_scope = :user # Set this configuration to false if you want /users/sign_out to sign out # only the current scope. By default, Devise signs out all scopes. # config.sign_out_all_scopes = true # ==> Navigation configuration # Lists the formats that should be treated as navigational. Formats like # :html, should redirect to the sign in page when the user does not have # access, but formats like :xml or :json, should return 401. # # If you have any extra navigational formats, like :iphone or :mobile, you # should add them to the navigational formats lists. # # The "*/*" below is required to match Internet Explorer requests. # config.navigational_formats = ['*/*', :html] # The default HTTP method used to sign out a resource. Default is :delete. config.sign_out_via = :delete # ==> OmniAuth # Add a new OmniAuth provider. Check the wiki for more information on setting # up on your models and hooks. # config.omniauth :github, 'APP_ID', 'APP_SECRET', scope: 'user,public_repo' # ==> Warden configuration # If you want to use other strategies, that are not supported by Devise, or # change the failure app, you can configure them inside the config.warden block. # # config.warden do |manager| # manager.intercept_401 = false # manager.default_strategies(scope: :user).unshift :some_external_strategy # end # ==> Mountable engine configurations # When using Devise inside an engine, let's call it `MyEngine`, and this engine # is mountable, there are some extra configurations to be taken into account. # The following options are available, assuming the engine is mounted as: # # mount MyEngine, at: '/my_engine' # # The router that invoked `devise_for`, in the example above, would be: # config.router_name = :my_engine # # When using omniauth, Devise cannot automatically set Omniauth path, # so you need to do it manually. For the users scope, it would be: # config.omniauth_path_prefix = '/my_engine/users/auth' end
48.584615
150
0.747863
bfba13ae50c3fb7e0b48e6558d2321e75e5107d7
1,028
class DebateOutcome < ActiveRecord::Base include Translatable translate :overview, :transcript_url, :video_url, :debate_pack_url belongs_to :petition, touch: true validates :petition, presence: true validates :debated_on, presence: true, if: :debated? with_options url: true, length: { maximum: 500 } do validates :transcript_url_en, :video_url_en, :debate_pack_url_en validates :transcript_url_gd, :video_url_gd, :debate_pack_url_gd end has_one_attached :image validates :image, image: { content_type: "image/jpeg", byte_size: 512.kilobytes, dimensions: { width: 600..1800, height: 338..1200, ratio: (1.5)..(1.8) } } after_create do Appsignal.increment_counter("petition.debated", 1) petition.touch(:debate_outcome_at) unless petition.debate_outcome_at? end after_save do petition.update_columns(debate_state: debate_state) end def date debated_on end private def debate_state debated? ? 'debated' : 'not_debated' end end
22.347826
73
0.715953
18b52bc011ba25ab0c33135e14acb1a3be409b8c
1,027
# frozen_string_literal: true require 'rails_helper' module Validators RSpec.describe PhoneContract, dbclean: :after_each do subject do described_class.new.call(params) end describe 'missing kind and full_phone_number field' do let(:params) do { } end let(:error_message) {{:kind => ["is missing", "must be a string"], :full_phone_number => ["is missing", "must be a string"]}} it "fails" do expect(subject).not_to be_success expect(subject.errors.to_h).to eq error_message end end describe 'empty kind and full_phone_number fields' do let(:params) do {kind: '', full_phone_number: ''} end it 'success' do expect(subject).to be_success end end describe 'passing valid kind and full_phone_number fields' do let(:params) do {kind: 'test', full_phone_number: '9898989898'} end it 'passes' do expect(subject).to be_success end end end end
20.137255
131
0.623174
79e7e9fae0bef6869a2d9b27942c3e35ba70757c
126
test_cask 'appcast-invalid-sha256' do appcast 'http://localhost/appcast.xml', :sha256 => 'not a valid shasum' end
25.2
41
0.674603
1a5692885af869cd1b89e523d69f49a2086e8a2b
1,387
require_relative '../../../kitchen/data/spec_helper' suffix = node['tomcat']['base_version'].to_i < 7 ? node['tomcat']['base_version'] : '' describe 'should be running tomcat6 on port 8080' do describe service("tomcat#{suffix}") do it { should be_enabled } it { should be_running } end describe port(8080) do it { should be_listening } end end describe 'should be running nginx on port 80' do describe service('nginx') do it { should be_enabled } it { should be_running } end describe port(80) do it { should be_listening } end end describe 'should be configured to run a processer' do describe file("/var/lib/tomcat#{suffix}/webapps/releases/0.0.4/WEB-INF/classes/ice.properties") do its(:content) { should match(/ice\.processor=true/) } end end describe 'should be configured to run a reader' do describe file("/var/lib/tomcat#{suffix}/webapps/releases/0.0.4/WEB-INF/classes/ice.properties") do its(:content) { should match(/ice\.reader=true/) } end end describe 'should be configured to pull billing files from 90 days back' do describe file("/var/lib/tomcat#{suffix}/webapps/releases/0.0.4/WEB-INF/classes/ice.properties") do processing_start_millis = (Date.today - 90).strftime('%Q')[0..-6] # drop last 6 digits its(:content) { should match(/ice\.startmillis=#{processing_start_millis}\d+{5,5}/) } end end
30.822222
100
0.69863
acfe150589459fd5bc2e915533ccb9b0d8cf2c6a
422
require "spec_helper" module YieldStarClient module GetLeaseTermRent describe Request do it "is configured to get lease term rents" do expect(described_class.lease_term_request_opts[:request_element]) .to eq :lease_term_rent_unit_request end it "has the correct SOAP_ACTION" do expect(described_class::SOAP_ACTION).to eq :get_lease_term_rent end end end end
24.823529
73
0.71564
1105ec23145cf941a0929d705484d50d7fcba518
2,769
require 'bundler' require 'thor' require_relative 'templates/resource' puts 'Loaded Resource Generator' module Voom module Generators class Resource < Thor::Group include Thor::Actions argument :resource, types: :string def self.source_root __dir__ end # These methods need to be located above the block below where they are used no_commands do def _r_ ::Resource.new(resource) end end def commands %i(create update delete).each do |cmd| create_file File.join(_r_.dirs.commands, _r_.plural.lcase.resource, "#{cmd}.rb"), `#{__dir__}/ribosome #{__dir__}/templates/command_#{cmd}.rb.dna #{resource}` end end def controllers create_file File.join(_r_.dirs.controller, "#{_r_.plural.lcase.resource}_controller.rb"), `#{__dir__}/ribosome #{__dir__}/templates/controller.rb.dna #{resource}` end def db create_file File.join(_r_.dirs.db_migrations, "add_#{_r_.plural.lcase.resource}.rb"), `#{__dir__}/ribosome #{__dir__}/templates/db_migration.rb.dna #{resource}` end def helpers create_file File.join(_r_.dirs.helpers, "#{_r_.plural.lcase.resource}.rb"), `#{__dir__}/ribosome #{__dir__}/templates/helper.rb.dna #{resource}` end def models create_file File.join(_r_.dirs.models, "#{_r_.lcase.resource}.rb"), `#{__dir__}/ribosome #{__dir__}/templates/model.rb.dna #{resource}` end def presenters create_file File.join(_r_.dirs.presenters, "#{_r_.plural.lcase.resource}.pom"), `#{__dir__}/ribosome #{__dir__}/templates/presenters/top.pom.dna #{resource}` create_file File.join(_r_.dirs.presenters, _r_.plural.lcase.resource, 'cards', "#{_r_.lcase.resource}.pom"), `#{__dir__}/ribosome #{__dir__}/templates/presenters/card.pom.dna #{resource}` %i(add edit delete).each do |dlg| create_file File.join(_r_.dirs.presenters, _r_.plural.lcase.resource, 'dialogs', "#{dlg}.pom"), `#{__dir__}/ribosome #{__dir__}/templates/presenters/#{dlg}_dialog.pom.dna #{resource}` end create_file File.join(_r_.dirs.presenters, _r_.plural.lcase.resource, 'lists', "#{_r_.plural.lcase.resource}.pom"), `#{__dir__}/ribosome #{__dir__}/templates/presenters/list.pom.dna #{resource}` end def queries create_file File.join(_r_.dirs.queries, "#{_r_.plural.lcase.resource}.rb"), `#{__dir__}/ribosome #{__dir__}/templates/query.rb.dna #{resource}` end end end end
37.931507
123
0.609967
4af0fcc86f5f12e73ba28e2cfacb3868cc71aec9
899
require 'test_helper' class UserMailerTest < ActionMailer::TestCase test "account_activation" do user = users(:michael) user.activation_token = User.new_token mail = UserMailer.account_activation(user) assert_equal "Account Activation", mail.subject assert_equal [user.email], mail.to assert_equal ["[email protected]"], mail.from assert_match user.name, mail.body.encoded assert_match user.activation_token, mail.body.encoded assert_match CGI.escape(user.email), mail.body.encoded end test "password_reset" do user = users(:michael) user.reset_token = User.new_token mail = UserMailer.password_reset(user) assert_equal "Password reset", mail.subject assert_equal [user.email], mail.to assert_equal ["[email protected]"], mail.from assert_match user.reset_token, mail.body.encoded assert_match CGI.escape(user.email), mail.body.encoded end end
31
56
0.766407
1d5e3e423a3d89b41fe8d4909d52339f54764591
2,047
class Juju < Formula desc "DevOps management tool" homepage "https://juju.is/" url "https://github.com/juju/juju.git", tag: "juju-2.9.16", revision: "b84c5592b1036265aa1ce28b1e40b79c3886a21a" license "AGPL-3.0-only" version_scheme 1 head "https://github.com/juju/juju.git" livecheck do url :stable regex(/^juju[._-]v?(\d+(?:\.\d+)+)$/i) end bottle do sha256 cellar: :any_skip_relocation, arm64_monterey: "c625f730908972eab4c6bb0ef94b5df32e99c00fe579217f1a8410fecb437491" sha256 cellar: :any_skip_relocation, arm64_big_sur: "488790b4c784ddd3937a0b081f6e5e7c431b13824aaeb897ca667877c32faaea" sha256 cellar: :any_skip_relocation, monterey: "ec047db66e9c39cd71f0f5592396104a0cb15dae8dfb52ea6cfda84c5020f39d" sha256 cellar: :any_skip_relocation, big_sur: "7ec0328625b55c1f6617a070a57ac2b21c87ce3d285486458e6aff541109b1af" sha256 cellar: :any_skip_relocation, catalina: "7df94ba4cd676d09967c4809c603723b80a5b8c796132736a7a335af283658da" sha256 cellar: :any_skip_relocation, mojave: "020fa17eb67e6bf18fdc37c0fa42aae456c7ef4afb147063a6d5c82da4920659" sha256 cellar: :any_skip_relocation, x86_64_linux: "7a5e8a994802ce99d201ebaf5e24cd0f1f05fadd87a6d3037c679785e0dd654a" end depends_on "go" => :build def install ld_flags = %W[ -s -w -X version.GitCommit=#{Utils.git_head} -X version.GitTreeState=clean ] system "go", "build", *std_go_args, "-ldflags", ld_flags.join(" "), "./cmd/juju" system "go", "build", *std_go_args, "-ldflags", ld_flags.join(" "), "-o", bin/"juju-metadata", "./cmd/plugins/juju-metadata" bash_completion.install "etc/bash_completion.d/juju" end test do system "#{bin}/juju", "version" assert_match "No controllers registered", shell_output("#{bin}/juju list-users 2>&1", 1) assert_match "No controllers registered", shell_output("#{bin}/juju-metadata list-images 2>&1", 2) end end
40.94
123
0.700049
01c1fec6ec471d8d572e702e2892f7b2b9107898
4,357
# Copyright 2007 Chang Sau Sheong #Licensed under the Apache License, Version 2.0 (the "License"); #you may not use this file except in compliance with the License. #You may obtain a copy of the License at # #http://www.apache.org/licenses/LICENSE-2.0 # #Unless required by applicable law or agreed to in writing, software #distributed under the License is distributed on an "AS IS" BASIS, #WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #See the License for the specific language governing permissions and #limitations under the License. class Alf MAX_SPEED = 10 MAX_SCOPE = 150 LIFESPAN = 200 MIN_START_FOOD = 10 COLOR = java.awt.Color.red attr_reader :num, :food def initialize(num,opt={}) @num = num @status = :alive @life = LIFESPAN + rand(1000) @food = MIN_START_FOOD + rand(100) @color = opt[:color].nil? ? COLOR : opt[:color] @location = opt[:location].nil? ? [10,10] : opt[:location] @tail = @location # scope is how far the alf can see @scope = opt[:scope].nil? ? MAX_SCOPE.to_i : opt[:scope].to_f # speed is how fast the alf can move in a single turn @speed = opt[:speed].nil? ? MAX_SPEED.to_i : opt[:speed].to_f # digestion rate is the rate the alf eats up its stored food @digestion_rate = opt[:digestion_rate].nil? ? rand(4) + 1 : opt[:digestion_rate] # consumption rate is how much the alf eats the food at every bite @consumption_rate = opt[:consumption_rate].nil? ? rand(4) + 1 : opt[:consumption_rate] # starvation rate is how fast the alf will reduce its lifespan due to lack # of food @starvation_rate = 1 end # randomly generate an alf # default world size is 400 x 400 def Alf.generate_randomly(num, size=[400,400]) alf = Alf.new(num, :location => [rand(size[0]), rand(size[1])], :speed => rand(MAX_SPEED - 1) + 1) end # move to the next location def move @tail = @location closest_food = detect_closest_food @location = calculate_next_location(closest_food) end # find the nearest food available def detect_closest_food @food_within_scope = [] $food.each { |food| @food_within_scope << food if (distance_from(food.location) <= @scope and !food.exhausted) } if @food_within_scope.empty? xrand = rand(10) * (rand(2) == 0? -1 : 1) yrand = rand(10) * (rand(2) == 0? -1 : 1) return [@location[0] + xrand, @location[1] + yrand] else @food_within_scope.sort!{|x,y| distance_from(x.location) <=> distance_from(y.location)} return @food_within_scope.first.location end end def distance_from(another_point) return ((another_point[0] - @location[0])**2 + (another_point[1] - @location[1])**2) ** 0.5 end # calculate where the alf will go next based on the location of the closest # available food def calculate_next_location(food) x = food[0] - @location[0] y = food[1] - @location[1] distance = (x**2 + y**2) ** 0.5 next_location = [@location[0] + x*@speed/distance, @location[1] + y*@speed/distance] return next_location end # life ticking away, speeds up if the alf runs out of food def reduce_life if @life > 0 @life = @life - (1 * @starvation_rate) else @status = :dead end end # need to eat to live, alfs with bigger appetites will digest more, alfs with # bigger 'bites' will eat faster def consume_food if @status == :alive # eat the food if it is within my scope @food = @food + @food_within_scope.first.eat(@consumption_rate) if !@food_within_scope.empty? # if I have a storage of food, I'll digest it if @food > 0 @food = @food - (1 * @digestion_rate) @food = 0 if @food < 0 else # if i run out of food, i'll starve i.e. die faster @starvation_rate = @starvation_rate + 1 end end end def alive? return @status == :alive end def tick(g) move consume_food reduce_life render(g) end def render(g) g.setColor(@color) if distance_from(@tail) < 15 g.drawLine(@tail[0], @tail[1],@location[0],@location[1]) end g.fillOval(@location[0],@location[1], 4, 4) end end
28.292208
99
0.630939
611610bf3f31a5252d428fc2c206bbc77acce4d1
1,126
# frozen_string_literal: true require 'cucumber/platform' module Cucumber module Constantize #:nodoc: def constantize(camel_cased_word) try = 0 begin try += 1 names = camel_cased_word.split('::') names.shift if names.empty? || names.first.empty? constant = ::Object names.each do |name| constant = constantize_name(constant, name) end constant rescue NameError => e require underscore(camel_cased_word) if try < 2 retry else raise e end end end # Snagged from active_support def underscore(camel_cased_word) camel_cased_word.to_s.gsub(/::/, '/') .gsub(/([A-Z]+)([A-Z][a-z])/, '\1_\2') .gsub(/([a-z\d])([A-Z])/, '\1_\2') .tr('-', '_') .downcase end private def constantize_name(constant, name) if constant.const_defined?(name, false) constant.const_get(name, false) else constant.const_missing(name) end end end end
23.458333
60
0.531972
e9b025c9cc06f6ffa713694543962de5a4d9ea07
19,616
require 'spec_helper' require 'shared_examples/protect_product_actions' module Spree describe Api::V1::ProductsController, type: :controller do render_views let!(:product) { create(:product) } let!(:inactive_product) { create(:product, available_on: Time.current.tomorrow, name: 'inactive') } let(:base_attributes) { Api::ApiHelpers.product_attributes } let(:show_attributes) { base_attributes.dup.push(:has_variants) } let(:new_attributes) { base_attributes } let(:product_data) do { name: 'The Other Product', price: 19.99, shipping_category_id: create(:shipping_category).id } end let(:attributes_for_variant) do h = attributes_for(:variant).except(:option_values, :product) h.merge(options: [ { name: 'size', value: 'small' }, { name: 'color', value: 'black' } ]) end before do stub_authentication! end context 'as a normal user' do context 'with caching enabled' do before do create(:product) # product_2 ActionController::Base.perform_caching = true end it 'returns unique products' do api_get :index product_ids = json_response['products'].map { |p| p['id'] } expect(product_ids.uniq.count).to eq(product_ids.count) end after do ActionController::Base.perform_caching = false end end it 'retrieves a list of products' do api_get :index expect(json_response['products'].first).to have_attributes(show_attributes) expect(json_response['total_count']).to eq(1) expect(json_response['current_page']).to eq(1) expect(json_response['pages']).to eq(1) expect(json_response['per_page']).to eq(Kaminari.config.default_per_page) end it 'retrieves a list of products by id' do api_get :index, ids: [product.id] expect(json_response['products'].first).to have_attributes(show_attributes) expect(json_response['total_count']).to eq(1) expect(json_response['current_page']).to eq(1) expect(json_response['pages']).to eq(1) expect(json_response['per_page']).to eq(Kaminari.config.default_per_page) end context 'product has more than one price' do before { product.master.prices.create currency: 'EUR', amount: 22 } it 'returns distinct products only' do api_get :index expect(assigns(:products).map(&:id).uniq).to eq assigns(:products).map(&:id) end end it 'retrieves a list of products by ids string' do second_product = create(:product) api_get :index, ids: [product.id, second_product.id].join(',') expect(json_response['products'].first).to have_attributes(show_attributes) expect(json_response['products'][1]).to have_attributes(show_attributes) expect(json_response['total_count']).to eq(2) expect(json_response['current_page']).to eq(1) expect(json_response['pages']).to eq(1) expect(json_response['per_page']).to eq(Kaminari.config.default_per_page) end it 'does not return inactive products when queried by ids' do api_get :index, ids: [inactive_product.id] expect(json_response['count']).to eq(0) end it 'does not list unavailable products' do api_get :index expect(json_response['products'].first['name']).not_to eq('inactive') end context 'pagination' do before { create(:product) } it 'can select the next page of products' do api_get :index, page: 2, per_page: 1 expect(json_response['products'].first).to have_attributes(show_attributes) expect(json_response['total_count']).to eq(2) expect(json_response['current_page']).to eq(2) expect(json_response['pages']).to eq(2) end it 'can control the page size through a parameter' do api_get :index, per_page: 1 expect(json_response['count']).to eq(1) expect(json_response['total_count']).to eq(2) expect(json_response['current_page']).to eq(1) expect(json_response['pages']).to eq(2) end end it 'can search for products' do create(:product, name: 'The best product in the world') api_get :index, q: { name_cont: 'best' } expect(json_response['products'].first).to have_attributes(show_attributes) expect(json_response['count']).to eq(1) end # regression test for https://github.com/spree/spree/issues/8207 it 'can sort products by date' do first_product = create(:product, created_at: Time.current - 1.month) create(:product, created_at: Time.current) # second_product api_get :index, q: { s: 'created_at asc' } expect(json_response['products'].first['id']).to eq(first_product.id) end it 'gets a single product' do create_image(product.master, image('thinking-cat.jpg')) create(:variant, product: product) create_image(product.variants.first, image('thinking-cat.jpg')) product.set_property('spree', 'rocks') product.taxons << create(:taxon) api_get :show, id: product.to_param expect(json_response).to have_attributes(show_attributes) expect(json_response['variants'].first).to have_attributes([:name, :is_master, :price, :images, :in_stock]) expect(json_response['variants'].first['images'].first).to have_attributes([:attachment_file_name, :attachment_width, :attachment_height, :attachment_content_type, :mini_url, :small_url, :product_url, :large_url]) expect(json_response['product_properties'].first).to have_attributes([:value, :product_id, :property_name]) expect(json_response['classifications'].first).to have_attributes([:taxon_id, :position, :taxon]) expect(json_response['classifications'].first['taxon']).to have_attributes([:id, :name, :pretty_name, :permalink, :taxonomy_id, :parent_id]) end context 'tracking is disabled' do before { Config.track_inventory_levels = false } it 'still displays valid json with total_on_hand Float::INFINITY' do api_get :show, id: product.to_param expect(response).to be_ok expect(json_response[:total_on_hand]).to eq nil end after { Config.track_inventory_levels = true } end context 'finds a product by slug first then by id' do let!(:other_product) { create(:product, slug: 'these-are-not-the-droids-you-are-looking-for') } before do product.update_column(:slug, "#{other_product.id}-and-1-ways") end specify do api_get :show, id: product.to_param expect(json_response['slug']).to match(/and-1-ways/) product.destroy api_get :show, id: other_product.id expect(json_response['slug']).to match(/droids/) end end it 'cannot see inactive products' do api_get :show, id: inactive_product.to_param assert_not_found! end it 'returns a 404 error when it cannot find a product' do api_get :show, id: 'non-existant' assert_not_found! end it 'can learn how to create a new product' do api_get :new expect(json_response['attributes']).to eq(new_attributes.map(&:to_s)) required_attributes = json_response['required_attributes'] expect(required_attributes).to include('name') expect(required_attributes).to include('price') expect(required_attributes).to include('shipping_category') end it_behaves_like 'modifying product actions are restricted' end context 'as an admin' do let(:taxon_1) { create(:taxon) } let(:taxon_2) { create(:taxon) } sign_in_as_admin! it 'can see all products' do api_get :index expect(json_response['products'].count).to eq(2) expect(json_response['count']).to eq(2) expect(json_response['current_page']).to eq(1) expect(json_response['pages']).to eq(1) end # Regression test for #1626 context 'deleted products' do before do create(:product, deleted_at: 1.day.ago) end it 'does not include deleted products' do api_get :index expect(json_response['products'].count).to eq(2) end it 'can include deleted products' do api_get :index, show_deleted: 1 expect(json_response['products'].count).to eq(3) end end describe 'creating a product' do it 'can create a new product' do api_post :create, product: { name: 'The Other Product', price: 19.99, shipping_category_id: create(:shipping_category).id } expect(json_response).to have_attributes(base_attributes) expect(response.status).to eq(201) end it 'creates with embedded variants' do product_data[:variants] = [attributes_for_variant, attributes_for_variant] api_post :create, product: product_data expect(response.status).to eq 201 variants = json_response['variants'] expect(variants.count).to eq(2) expect(variants.last['option_values'][0]['name']).to eq('small') expect(variants.last['option_values'][0]['option_type_name']).to eq('size') expect(json_response['option_types'].count).to eq(2) # size, color end it 'can create a new product with embedded product_properties' do product_data[:product_properties_attributes] = [{ property_name: 'fabric', value: 'cotton' }] api_post :create, product: product_data expect(json_response['product_properties'][0]['property_name']).to eq('fabric') expect(json_response['product_properties'][0]['value']).to eq('cotton') end it 'can create a new product with option_types' do product_data[:option_types] = ['size', 'color'] api_post :create, product: product_data expect(json_response['option_types'].count).to eq(2) end it 'creates product with option_types ids' do option_type = create(:option_type) product_data[:option_type_ids] = [option_type.id] api_post :create, product: product_data expect(json_response['option_types'].first['id']).to eq option_type.id end it 'creates with shipping categories' do hash = { name: 'The Other Product', price: 19.99, shipping_category: 'Free Ships' } api_post :create, product: hash expect(response.status).to eq 201 shipping_id = ShippingCategory.find_by(name: 'Free Ships').id expect(json_response['shipping_category_id']).to eq shipping_id end it 'puts the created product in the given taxons' do product_data[:taxon_ids] = [taxon_1.id, taxon_2.id] api_post :create, product: product_data expect(json_response['taxon_ids']).to eq([taxon_1.id, taxon_2.id]) end # Regression test for #2140 context 'with authentication_required set to false' do before do Spree::Api::Config.requires_authentication = false end after do Spree::Api::Config.requires_authentication = true end it 'can still create a product' do api_post :create, product: product_data, token: 'fake' expect(json_response).to have_attributes(show_attributes) expect(response.status).to eq(201) end end it 'cannot create a new product with invalid attributes' do api_post :create, product: { foo: :bar } expect(response.status).to eq(422) expect(json_response['error']).to eq('Invalid resource. Please fix errors and try again.') errors = json_response['errors'] errors.delete('slug') # Don't care about this one. expect(errors.keys).to match_array(['name', 'price', 'shipping_category']) end end context 'updating a product' do it 'can update a product' do api_put :update, id: product.to_param, product: { name: 'New and Improved Product!' } expect(response.status).to eq(200) end it 'can create new option types on a product' do api_put :update, id: product.to_param, product: { option_types: ['shape', 'color'] } expect(json_response['option_types'].count).to eq(2) end it 'can create new variants on a product' do api_put :update, id: product.to_param, product: { variants: [attributes_for_variant, attributes_for_variant.merge(sku: "ABC-#{Kernel.rand(9999)}")] } expect(response.status).to eq 200 expect(json_response['variants'].count).to eq(2) # 2 variants variants = json_response['variants'].reject { |v| v['is_master'] } expect(variants.last['option_values'][0]['name']).to eq('small') expect(variants.last['option_values'][0]['option_type_name']).to eq('size') expect(json_response['option_types'].count).to eq(2) # size, color end it 'can update an existing variant on a product' do variant_hash = { sku: '123', price: 19.99, options: [{ name: 'size', value: 'small' }] } variant_id = product.variants.create!({ product: product }.merge(variant_hash)).id api_put :update, id: product.to_param, product: { variants: [ variant_hash.merge( id: variant_id.to_s, sku: '456', options: [{ name: 'size', value: 'large' }] ) ] } expect(json_response['variants'].count).to eq(1) variants = json_response['variants'].reject { |v| v['is_master'] } expect(variants.last['option_values'][0]['name']).to eq('large') expect(variants.last['sku']).to eq('456') expect(variants.count).to eq(1) end it 'cannot update a product with an invalid attribute' do api_put :update, id: product.to_param, product: { name: '' } expect(response.status).to eq(422) expect(json_response['error']).to eq('Invalid resource. Please fix errors and try again.') expect(json_response['errors']['name']).to eq(["can't be blank"]) end it 'puts the updated product in the given taxons' do api_put :update, id: product.to_param, product: { taxon_ids: [taxon_1.id, taxon_2.id] } expect(json_response['taxon_ids'].to_set).to eql([taxon_1.id, taxon_2.id].to_set) end end it 'can delete a product' do expect(product.deleted_at).to be_nil api_delete :destroy, id: product.to_param expect(response.status).to eq(204) expect(product.reload.deleted_at).not_to be_nil end end describe '#find_product' do let(:products) { Spree::Product.all } def send_request api_get :show, id: product.id end before { allow(controller).to receive(:product_scope).and_return(products) } context 'product found using friendly_id' do before do allow(products).to receive(:friendly).and_return(products) allow(products).to receive(:find).with(product.id.to_s).and_return(product) end describe 'expects to receive' do it { expect(controller).to receive(:product_scope).and_return(products) } it { expect(products).to receive(:friendly).and_return(products) } it { expect(products).to receive(:find).with(product.id.to_s).and_return(product) } after { send_request } end describe 'assigns' do before { send_request } it { expect(assigns(:product)).to eq(product) } end describe 'response' do before { send_request } it { expect(response).to have_http_status(:ok) } it { expect(json_response[:id]).to eq(product.id) } it { expect(json_response[:name]).to eq(product.name) } end end context 'product not found using friendly_id, but found in normal scope using id' do before do allow(products).to receive(:friendly).and_return(products) allow(products).to receive(:find).with(product.id.to_s).and_raise(ActiveRecord::RecordNotFound) allow(products).to receive(:find_by).with(id: product.id.to_s).and_return(product) end describe 'expects to receive' do it { expect(controller).to receive(:product_scope).and_return(products) } it { expect(products).to receive(:friendly).and_return(products) } it { expect(products).to receive(:find_by).with(id: product.id.to_s).and_return(product) } after { send_request } end describe 'assigns' do before { send_request } it { expect(assigns(:product)).to eq(product) } end describe 'response' do before { send_request } it { expect(response).to have_http_status(:ok) } it { expect(json_response[:id]).to eq(product.id) } it { expect(json_response[:name]).to eq(product.name) } end end context 'product not found' do before do allow(products).to receive(:friendly).and_return(products) allow(products).to receive(:find).with(product.id.to_s).and_raise(ActiveRecord::RecordNotFound) allow(products).to receive(:find_by).with(id: product.id.to_s).and_return(nil) end describe 'expects to receive' do it { expect(controller).to receive(:product_scope).and_return(products) } it { expect(products).to receive(:friendly).and_return(products) } it { expect(products).to receive(:find_by).with(id: product.id.to_s).and_return(nil) } after { send_request } end describe 'assigns' do before { send_request } it { expect(assigns(:product)).to eq(nil) } end describe 'response' do before { send_request } it { assert_not_found! } end end end end end
39.468813
159
0.589876
ace1bfa3e6301e270e03c66d20e45bb4dc911f3b
11,644
require 'spec_helper' describe 'dhcp' do on_os_under_test.each do |os, facts| context "on #{os}" do let(:facts) do facts end let(:params) do { :interfaces => ['eth0'], }.merge(overridden_params) end conf_path = case os when /^FreeBSD/i '/usr/local/etc' when /^Archlinux/i '/etc' else '/etc/dhcp' end describe "dhcp class without any parameters on #{os}" do let(:overridden_params) do { } end it { should compile.with_all_deps } it { verify_concat_fragment_exact_contents(catalogue, 'dhcp.conf+01_main.dhcp', [ 'omapi-port 7911;', 'default-lease-time 43200;', 'max-lease-time 86400;', 'ddns-update-style none;', 'option domain-name "example.com";', 'option domain-name-servers 8.8.8.8, 8.8.4.4;', "option ntp-servers none;", 'allow booting;', 'allow bootp;', 'option fqdn.no-client-update on; # set the "O" and "S" flag bits', 'option fqdn.rcode2 255;', 'option pxegrub code 150 = text ;', 'log-facility local7;', "include \"#{conf_path}/dhcpd.hosts\";", ]) } it { is_expected.not_to contain_concat__fragment('dhcp.conf+20_includes') } if facts[:osfamily] == 'RedHat' && facts[:operatingsystemmajrelease].to_i >= 7 it { is_expected.to contain_systemd__dropin_file('interfaces.conf') } else it { is_expected.not_to contain_systemd__dropin_file('interfaces.conf') } end if facts[:osfamily] == 'RedHat' && facts[:operatingsystemmajrelease].to_i < 7 it { is_expected.to contain_file('/etc/sysconfig/dhcpd') } else it { is_expected.not_to contain_file('/etc/sysconfig/dhcpd') } end if facts[:osfamily] == 'Debian' it { is_expected.to contain_file('/etc/default/isc-dhcp-server') } else it { is_expected.not_to contain_file('/etc/default/isc-dhcp-server') } end if ['FreeBSD', 'DragonFly'].include?(facts[:osfamily]) it { is_expected.to contain_augeas('set listen interfaces') } else it { is_expected.not_to contain_augeas('set listen interfaces') } end end describe "dhcp class parameters on #{os}" do let(:overridden_params) do { :dnsupdatekey => 'mydnsupdatekey', :ntpservers => ['1.1.1.1', '1.1.1.2'], :omapi_name => 'mykeyname', :omapi_key => 'myomapikey', :pxeserver => '10.0.0.5', :mtu => 9000, :pxefilename => 'mypxefilename', :bootfiles => { '00:00' => 'pxelinux.0', '00:06' => 'shim.efi', '00:07' => 'shim.efi', '00:09' => 'shim.efi', }, :option_static_route => true, :options => ['provision-url code 224 = text', 'provision-type code 225 = text'], :authoritative => true, :ddns_domainname => 'example.com', :ddns_rev_domainname => 'in-addr.arpa', :ddns_update_style => 'standard', :includes => ['myinclude1', 'myinclude2'], } end it { should compile.with_all_deps } it { verify_concat_fragment_exact_contents(catalogue, 'dhcp.conf+01_main.dhcp', [ 'omapi-port 7911;', 'key mykeyname {', ' algorithm HMAC-MD5;', ' secret "myomapikey";', '}', 'omapi-key mykeyname;', 'default-lease-time 43200;', 'max-lease-time 86400;', 'authoritative;', 'ddns-updates on;', 'ddns-update-style standard;', 'update-static-leases on;', 'use-host-decl-names on;', 'ddns-domainname "example.com";', 'ddns-rev-domainname "in-addr.arpa";', 'include "mydnsupdatekey";', 'zone example.com. {', ' primary 8.8.8.8;', ' key rndc-key;', '}', 'option domain-name "example.com";', 'option domain-name-servers 8.8.8.8, 8.8.4.4;', 'option ntp-servers 1.1.1.1, 1.1.1.2;', 'allow booting;', 'allow bootp;', 'option fqdn.no-client-update on; # set the "O" and "S" flag bits', 'option fqdn.rcode2 255;', 'option pxegrub code 150 = text ;', 'option rfc3442-classless-static-routes code 121 = array of integer 8;', 'option ms-classless-static-routes code 249 = array of integer 8;', 'option interface-mtu 9000;', 'option provision-url code 224 = text;', 'option provision-type code 225 = text;', 'next-server 10.0.0.5;', 'option architecture code 93 = unsigned integer 16 ;', 'if option architecture = 00:00 {', ' filename "pxelinux.0";', '} elsif option architecture = 00:06 {', ' filename "shim.efi";', '} elsif option architecture = 00:07 {', ' filename "shim.efi";', '} elsif option architecture = 00:09 {', ' filename "shim.efi";', '} else {', ' filename "mypxefilename";', '}', 'log-facility local7;', "include \"#{conf_path}/dhcpd.hosts\";", ]) } it { verify_concat_fragment_exact_contents(catalogue, 'dhcp.conf+20_includes', [ 'include "myinclude1";', 'include "myinclude2";', ]) } end describe "ddns-updates without key" do let(:overridden_params) do { :ddns_updates => true, } end it { should compile.with_all_deps } it { verify_concat_fragment_exact_contents(catalogue, 'dhcp.conf+01_main.dhcp', [ 'omapi-port 7911;', 'default-lease-time 43200;', 'max-lease-time 86400;', 'ddns-updates on;', 'ddns-update-style interim;', 'update-static-leases on;', 'use-host-decl-names on;', 'zone example.com. {', ' primary 8.8.8.8;', '}', 'option domain-name "example.com";', 'option domain-name-servers 8.8.8.8, 8.8.4.4;', "option ntp-servers none;", 'allow booting;', 'allow bootp;', 'option fqdn.no-client-update on; # set the "O" and "S" flag bits', 'option fqdn.rcode2 255;', 'option pxegrub code 150 = text ;', 'log-facility local7;', "include \"#{conf_path}/dhcpd.hosts\";", ]) } end describe "without omapi" do let(:overridden_params) do { :omapi => false, } end it { should compile.with_all_deps } it { verify_concat_fragment_exact_contents(catalogue, 'dhcp.conf+01_main.dhcp', [ 'default-lease-time 43200;', 'max-lease-time 86400;', 'ddns-update-style none;', 'option domain-name "example.com";', 'option domain-name-servers 8.8.8.8, 8.8.4.4;', "option ntp-servers none;", 'allow booting;', 'allow bootp;', 'option fqdn.no-client-update on; # set the "O" and "S" flag bits', 'option fqdn.rcode2 255;', 'option pxegrub code 150 = text ;', 'log-facility local7;', "include \"#{conf_path}/dhcpd.hosts\";", ]) } end describe "without bootp" do let(:overridden_params) do { :bootp => false, } end it { should compile.with_all_deps } it { verify_concat_fragment_exact_contents(catalogue, 'dhcp.conf+01_main.dhcp', [ 'omapi-port 7911;', 'default-lease-time 43200;', 'max-lease-time 86400;', 'ddns-update-style none;', 'option domain-name "example.com";', 'option domain-name-servers 8.8.8.8, 8.8.4.4;', "option ntp-servers none;", 'allow booting;', 'option fqdn.no-client-update on; # set the "O" and "S" flag bits', 'option fqdn.rcode2 255;', 'option pxegrub code 150 = text ;', 'log-facility local7;', "include \"#{conf_path}/dhcpd.hosts\";", ]) } end describe "with failover, bootp undef" do let(:overridden_params) do { :failover => true } end it { should compile.with_all_deps } it { verify_concat_fragment_exact_contents(catalogue, 'dhcp.conf+01_main.dhcp', [ 'omapi-port 7911;', 'default-lease-time 43200;', 'max-lease-time 86400;', 'ddns-update-style none;', 'option domain-name "example.com";', 'option domain-name-servers 8.8.8.8, 8.8.4.4;', "option ntp-servers none;", 'allow booting;', 'option fqdn.no-client-update on; # set the "O" and "S" flag bits', 'option fqdn.rcode2 255;', 'option pxegrub code 150 = text ;', 'log-facility local7;', "include \"#{conf_path}/dhcpd.hosts\";", ]) } end describe "with failover, bootp true" do let(:overridden_params) do { :failover => true, :bootp => true } end it { should compile.with_all_deps } it { verify_concat_fragment_exact_contents(catalogue, 'dhcp.conf+01_main.dhcp', [ 'omapi-port 7911;', 'default-lease-time 43200;', 'max-lease-time 86400;', 'ddns-update-style none;', 'option domain-name "example.com";', 'option domain-name-servers 8.8.8.8, 8.8.4.4;', "option ntp-servers none;", 'allow booting;', 'allow bootp;', 'option fqdn.no-client-update on; # set the "O" and "S" flag bits', 'option fqdn.rcode2 255;', 'option pxegrub code 150 = text ;', 'log-facility local7;', "include \"#{conf_path}/dhcpd.hosts\";", ]) } end describe "with failover, bootp false" do let(:overridden_params) do { :failover => true, :bootp => false } end it { should compile.with_all_deps } it { verify_concat_fragment_exact_contents(catalogue, 'dhcp.conf+01_main.dhcp', [ 'omapi-port 7911;', 'default-lease-time 43200;', 'max-lease-time 86400;', 'ddns-update-style none;', 'option domain-name "example.com";', 'option domain-name-servers 8.8.8.8, 8.8.4.4;', "option ntp-servers none;", 'allow booting;', 'option fqdn.no-client-update on; # set the "O" and "S" flag bits', 'option fqdn.rcode2 255;', 'option pxegrub code 150 = text ;', 'log-facility local7;', "include \"#{conf_path}/dhcpd.hosts\";", ]) } end end end end
35.178248
95
0.499141
5d04d96ba8e1c1afac05a2d93dbdd0655cb5c99c
294
class AddSubtotalToShoppeTransactions < ActiveRecord::Migration def change rename_column :shoppe_subscriber_transactions, :amount, :total add_column :shoppe_subscriber_transactions, :subtotal, :decimal add_column :shoppe_subscriber_transactions, :discount_code, :string end end
36.75
71
0.816327
01f54c2a24ff8227f7a2e2cc659ecf9fc6920a40
427
# From ruby_parser 3.x to fix warnings under ruby_parser 2.x # # ruby2ruby sets some constants, but not ONCE. ruby_parser 2.x checks for # ONCE and then causes duplicate definitions of ENC_*. class Regexp ONCE = 0 unless defined? ONCE # FIX: remove this - it makes no sense unless defined? ENC_NONE ENC_NONE = /x/n.options ENC_EUC = /x/e.options ENC_SJIS = /x/s.options ENC_UTF8 = /x/u.options end end
28.466667
74
0.709602
3934a54bb99ee98e5be3a954be4b9125a63ee899
4,504
require 'rails_helper' module Spree module PromotionHandler RSpec.describe Cart, type: :model do let(:line_item) { create(:line_item) } let(:order) { line_item.order } let(:promotion) { create(:promotion, apply_automatically: true) } let(:calculator) { Calculator::FlatPercentItemTotal.new(preferred_flat_percent: 10) } subject { Cart.new(order, line_item) } shared_context "creates the adjustment" do it "creates the adjustment" do expect { subject.activate }.to change { adjustable.adjustments.count }.by(1) end end shared_context "creates an order promotion" do it "connects the promotion to the order" do expect { subject.activate }.to change { order.promotions.reload.to_a }.from([]).to([promotion]) end end context "activates in LineItem level" do let!(:action) { Promotion::Actions::CreateItemAdjustments.create(promotion: promotion, calculator: calculator) } let(:adjustable) { line_item } context "promotion with no rules" do include_context "creates the adjustment" include_context "creates an order promotion" context "for a non-sale promotion" do let(:promotion) { create(:promotion, apply_automatically: false) } it "doesn't connect the promotion to the order" do expect { subject.activate }.to change { order.promotions.count }.by(0) end it "doesn't create an adjustment" do expect { subject.activate }.to change { adjustable.adjustments.count }.by(0) end end end context "promotion includes item involved" do let!(:rule) { Promotion::Rules::Product.create(products: [line_item.product], promotion: promotion) } include_context "creates the adjustment" include_context "creates an order promotion" end context "promotion has item total rule" do let(:shirt) { create(:product) } let!(:rule) { Promotion::Rules::ItemTotal.create(preferred_operator: 'gt', preferred_amount: 50, promotion: promotion) } before do # Makes the order eligible for this promotion order.item_total = 100 order.save end include_context "creates the adjustment" include_context "creates an order promotion" end end context "activates in Order level" do let!(:action) { Promotion::Actions::CreateAdjustment.create(promotion: promotion, calculator: calculator) } let(:adjustable) { order } context "promotion with no rules" do before do # Gives the calculator something to discount order.item_total = 10 order.save end include_context "creates the adjustment" include_context "creates an order promotion" end context "promotion has item total rule" do let(:shirt) { create(:product) } let!(:rule) { Promotion::Rules::ItemTotal.create(preferred_operator: 'gt', preferred_amount: 50, promotion: promotion) } before do # Makes the order eligible for this promotion order.item_total = 100 order.save end include_context "creates the adjustment" include_context "creates an order promotion" end end context "activates promotions associated with the order" do let(:promotion) { create :promotion, :with_order_adjustment, code: 'promo' } let(:promotion_code) { promotion.codes.first } let(:adjustable) { order } before do Spree::OrderPromotion.create!(promotion: promotion, order: order, promotion_code: promotion_code) order.recalculate end include_context "creates the adjustment" it "records the promotion code in the adjustment" do subject.activate expect(adjustable.adjustments.map(&:promotion_code)).to eq [promotion_code] end it "checks if the promotion code is eligible" do expect_any_instance_of(Spree::Promotion).to receive(:eligible?).at_least(2).times.with(anything, promotion_code: promotion_code).and_return(false) subject.activate end end end end end
34.381679
156
0.618339