hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
5d9ea85dd8b68aa0d224a2f0d7d50d01c4270c9c | 6,794 | require 'spec_helper'
describe 'cis_hardening::auth::ssh' do
on_supported_os.each do |os, os_facts|
context "on #{os}" do
let(:facts) { os_facts }
# Check for default class
it {
is_expected.to contain_class('cis_hardening::auth::ssh')
}
# Ensure that Ensure permissions on /etc/ssh/sshd_config are configured - Section 5.2.1
it {
is_expected.to contain_file('/etc/ssh/sshd_config').with(
'ensure' => 'file',
'owner' => 'root',
'group' => 'root',
'mode' => '0600',
)
}
# Ensure permissions on SSH private host key files are configured - Section 5.2.2
it {
is_expected.to contain_exec('set_sshprivkey_perms').with(
'path' => '/bin:/sbin:/usr/bin:/usr/sbin:/usr/local/bin:/usr/local/sbin',
'command' => "find /etc/ssh -xdev -type f -name 'ssh_host_*_key' -exec chmod u-x,g-wx,o-rwx {} \;",
)
}
it {
is_expected.to contain_exec('set_sshprivkey_owner').with(
'path' => '/bin:/sbin:/usr/bin:/usr/sbin:/usr/local/bin:/usr/local/sbin',
'command' => "find /etc/ssh -xdev -type f -name 'ssh_host_*_key' -exec chown root:ssh_keys {} \;",
)
}
# Ensure permissions on SSH public host key files are configured - Section 5.2.3
it {
is_expected.to contain_exec('set_sshpubkey_perms').with(
'path' => '/bin:/sbin:/usr/bin:/usr/sbin:/usr/local/bin:/usr/local/sbin',
'command' => "find /etc/ssh -xdev -type f -name 'ssh_host_*_key.pub' -exec chmod u-x,go-wx {} \;",
)
}
# Ensure SSH access is limited - Section 5.2.4
# Currently commented. Up to the discretion of the user as to whether to enable
# Ensure that Ensure SSH LogLevel is appropriate - Section 5.2.5
it {
is_expected.to contain_file_line('set_ssh_loglevel').with(
'ensure' => 'present',
'path' => '/etc/ssh/sshd_config',
'line' => 'LogLevel INFO',
'match' => '^LogLevel\ ',
)
}
# Ensure that Ensure SSH X11 Forwarding is disabled - Section 5.2.6
it {
is_expected.to contain_file_line('set_x11_forwarding').with(
'ensure' => 'present',
'path' => '/etc/ssh/sshd_config',
'line' => 'X11Forwarding no',
)
}
# Ensure SSH MaxAuthTries is set to 4 or less - Section 5.2.7
it {
is_expected.to contain_file_line('set_ssh_maxauthtries').with(
'ensure' => 'present',
'path' => '/etc/ssh/sshd_config',
'line' => 'MaxAuthTries 4',
'match' => '^MaxAuthTries\ ',
)
}
# Ensure that Ensure SSH IgnoreRhosts is enabled - Section 5.2.8
it {
is_expected.to contain_file_line('set_ssh_ignore_rhosts').with(
'ensure' => 'present',
'path' => '/etc/ssh/sshd_config',
'line' => 'IgnoreRhosts yes',
'match' => '^IgnoreRhosts\ ',
)
}
# Ensure that Ensure SSH HostBased Authentication is Disabled - Section 5.2.9
it {
is_expected.to contain_file_line('set_hostbasedauth_off').with(
'ensure' => 'present',
'path' => '/etc/ssh/sshd_config',
'line' => 'HostBasedAuthentication no',
'match' => '^HostbasedAuthentication\ ',
)
}
# Ensure that Ensure SSH Root Login is Disabled - Section 5.2.10
it {
is_expected.to contain_file_line('set_rootlogin_no').with(
'ensure' => 'present',
'path' => '/etc/ssh/sshd_config',
'line' => 'PermitRootLogin no',
'match' => '^PermitRootLogin\ ',
)
}
# Ensure that Ensure PermitEmptyPasswords is Disabled - Section 5.2.11
it {
is_expected.to contain_file_line('set_emptypasswords_off').with(
'ensure' => 'present',
'path' => '/etc/ssh/sshd_config',
'line' => 'PermitEmptyPasswords no',
'match' => '^PermitEmptyPasswords\ ',
)
}
# Ensure that Ensure SSH PermitUserEnvironment is Disabled - Section 5.2.12
it {
is_expected.to contain_file_line('set_permituserenv_off').with(
'ensure' => 'present',
'path' => '/etc/ssh/sshd_config',
'line' => 'PermitUserEnvironment no',
'match' => '^PermitUserEnvironment\ ',
)
}
# Ensure only strong ciphers are used - Section 5.2.13
it {
is_expected.to contain_file_line('set_ssh_ciphers').with(
'ensure' => 'present',
'path' => '/etc/ssh/sshd_config',
'line' => 'Ciphers [email protected],[email protected],[email protected],aes256-ctr,aes192-ctr,aes128-ctr',
'match' => '^Ciphers\ ',
)
}
# Ensure only strong MAC algorithms are used - Section 4.2.14
it {
is_expected.to contain_file_line('set_ssh_macs').with(
'ensure' => 'present',
'path' => '/etc/ssh/sshd_config',
'line' => 'MACs [email protected],[email protected],hmac-sha2-512,hmac-sha2-256',
'match' => '^MACs\ ',
)
}
# Ensure only strong Key Exchange algorithms are used - Section 5.2.15
it {
is_expected.to contain_file_line
}
# Ensure that Ensure SSH Idle Timeout Interval is configured - Section 5.2.12
it {
is_expected.to contain_file_line('client_alive_interval').with(
'ensure' => 'present',
'path' => '/etc/ssh/sshd_config',
'line' => 'ClientAliveInterval 300',
)
}
it {
is_expected.to contain_file_line('client_alive_count_max').with(
'ensure' => 'present',
'path' => '/etc/ssh/sshd_config',
'line' => 'ClientAliveCountMax 0',
)
}
# Ensure that Ensure SSH LoginGraceTime is set to One Minute or Less - Section 5.2.13
it {
is_expected.to contain_file_line('login_grace_time').with(
'ensure' => 'present',
'path' => '/etc/ssh/sshd_config',
'line' => 'LoginGraceTime 60',
)
}
# Ensure SSH Access is Limited - Section 5.2.14
# Unused in sshd_config. Managed via IAM
# Ensure SSH Warning Banner is Configured
it {
is_expected.to contain_file_line('set_ssh_banner').with(
'ensure' => 'present',
'path' => '/etc/ssh/sshd_config',
'line' => 'Banner /etc/issue.net',
)
}
# Ensure manifest compiles with all dependencies
it {
is_expected.to compile.with_all_deps
}
end
end
end
| 34.48731 | 141 | 0.558287 |
d59123419750c98fc62a14507a0170dc3f104b46 | 149 | class GuidePlaceAssociation < ActiveRecord::Base
attr_accessible :guide_id, :place_id, :order_num, :note
belongs_to :guide
belongs_to :place
end
| 21.285714 | 56 | 0.798658 |
e93d711590f2f43f6d20053ec09784e1afc64128 | 92 | # frozen_string_literal: true
json.user do
json.id @user.id
json.email @user.email
end
| 13.142857 | 29 | 0.73913 |
5d29f577894db5c0ab099334f1934dc1bcc9f265 | 686 | if Rails.env.production?
Rails.application.config.after_initialize do
ActiveRecord::Base.connection_pool.disconnect!
ActiveSupport.on_load(:active_record) do
begin
uri = URI.parse(ENV["DATABASE_URL"])
rescue URI::InvalidURIError
raise "Invalid DATABASE_URL"
end
database = (uri.path || "").split("/")[1]
ActiveRecord::Base.establish_connection(
adapter: "postgresql",
host: uri.host,
port: uri.port,
username: uri.user,
password: uri.password,
database: database,
reaping_frequency: ENV["DB_REAP_FREQ"] || 10,
pool: ENV["DB_POOL"] || 16,
)
end
end
end
| 26.384615 | 53 | 0.616618 |
01321bedd5747ec19ea8bebf3c037db0a0129cff | 242 | FactoryGirl.define do
factory :jive_tiles_tile, :class => 'JiveTiles::Tile' do
guid "MyString"
remote_id "MyString"
config "MyText"
name "MyString"
jive_url "MyString"
tenant_id "MyString"
push_url "MyString"
code "MyString"
end
end
| 17.285714 | 58 | 0.747934 |
624280a4930f516f8336e4f17f54f24bf747c389 | 2,503 | class LogStash::Filters::Statement
@logger = Cabin::Channel.get(LogStash)
attr_accessor :query
attr_accessor :query_filepath
attr_accessor :parameters
attr_accessor :schedule
attr_accessor :join_keys
attr_accessor :node_name
attr_accessor :persistence_data
attr_accessor :persistence_store_type
attr_accessor :file
attr_accessor :elastic_search
attr_accessor :clear_persistence_store
attr_accessor :statement
def populate(stmt)
@query = stmt['query']
@query_filepath = File.read(stmt['statement_filepath']) if stmt['statement_filepath']
@parameters = stmt['parameters']
@schedule = stmt['schedule']
@join_keys = stmt['join_keys']
@node_name = stmt['node_name']
@persistence_data = stmt['persistence_data']
@persistence_store_type = stmt['persistence_store_type']
@file = stmt['file']
@elastic_search = stmt['elastic_search']
if stmt['clear_persistence_store'].nil?
@clear_persistence_store = false
else
@clear_persistence_store = stmt['clear_persistence_store']
end
if !stmt['statement'].nil?
@statement = [] if @statement.nil?
stmt['statement'].each do |stmt|
new_stmt = LogStash::Filters::Statement.new
new_stmt.populate(stmt)
@statement.push(new_stmt)
end
end
end
def validate
unless @query.nil? ^ @query_filepath.nil?
raise(LogStash::ConfigurationError, "Must set either :query or :query_filepath. Only one may be set at a time.")
end
true
end
def print_object (level=0)
puts ' '*(4*level) + 'Statement => {'
puts ' '*(4*level) + " query = #{query}"
puts ' '*(4*level) + " query_filepath = #{query_filepath}"
puts ' '*(4*level) + " parameters = #{parameters}"
puts ' '*(4*level) + " schedule = #{schedule}"
puts ' '*(4*level) + " join_keys = #{join_keys}"
puts ' '*(4*level) + " node_name = #{node_name}"
puts ' '*(4*level) + " persistence_data = #{persistence_data}"
puts ' '*(4*level) + " persistence_store_type = #{persistence_store_type}"
puts ' '*(4*level) + " file = #{file}"
puts ' '*(4*level) + " elastic_search = #{elastic_search}"
puts ' '*(4*level) + " clear_persistence_store = #{clear_persistence_store}"
@statement.print_object(level+1) if [email protected]?
puts ' '*(4*level) + '}'
end
end
| 31.2875 | 118 | 0.619257 |
331f3176553959e4d882049663e84914fe5cc91d | 2,024 | #-- encoding: UTF-8
#-- copyright
# OpenProject is a project management system.
# Copyright (C) 2012-2018 the OpenProject Foundation (OPF)
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License version 3.
#
# OpenProject is a fork of ChiliProject, which is a fork of Redmine. The copyright follows:
# Copyright (C) 2006-2017 Jean-Philippe Lang
# Copyright (C) 2010-2013 the ChiliProject Team
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# See docs/COPYRIGHT.rdoc for more details.
#++
##
# Provides an asynchronous job to create a managed repository on a remote system
# using a simple HTTP callback
# Currently, this is run synchronously due to potential issues
# with error handling.
# We envision a repository management wrapper that covers transactional
# creation and deletion of repositories BOTH on the database and filesystem.
# Until then, a synchronous process is more failsafe.
class Scm::CreateRemoteRepositoryJob < Scm::RemoteRepositoryJob
def perform
response = send_request(repository_request.merge(action: :create))
repository.root_url = response['path']
repository.url = response['url']
unless repository.save
raise OpenProject::Scm::Exceptions::ScmError.new(
I18n.t('repositories.errors.remote_save_failed')
)
end
end
end
| 39.686275 | 91 | 0.759881 |
01348ef32146417e86ede29af4c20d80871275ea | 3,320 | # typed: false
# frozen_string_literal: true
module Homebrew
module Livecheck
module Strategy
# The {Gnome} strategy identifies versions of software at gnome.org by
# checking the available downloads found in a project's `cache.json`
# file.
#
# GNOME URLs generally follow a standard format:
#
# * `https://download.gnome.org/sources/example/1.2/example-1.2.3.tar.xz`
#
# Before version 40, GNOME used a version scheme where unstable releases
# were indicated with a minor that's 90+ or odd. The newer version scheme
# uses trailing alpha/beta/rc text to identify unstable versions
# (e.g., `40.alpha`).
#
# When a regex isn't provided in a `livecheck` block, the strategy uses
# a default regex that matches versions which don't include trailing text
# after the numeric version (e.g., `40.0` instead of `40.alpha`) and it
# selectively filters out unstable versions below 40 using the rules for
# the older version scheme.
#
# @api public
class Gnome
extend T::Sig
NICE_NAME = "GNOME"
# The `Regexp` used to determine if the strategy applies to the URL.
URL_MATCH_REGEX = %r{
^https?://download\.gnome\.org
/sources
/(?<package_name>[^/]+)/ # The GNOME package name
}ix.freeze
# Whether the strategy can be applied to the provided URL.
#
# @param url [String] the URL to match against
# @return [Boolean]
def self.match?(url)
URL_MATCH_REGEX.match?(url)
end
# Generates a URL and regex (if one isn't provided) and passes them
# to {PageMatch.find_versions} to identify versions in the content.
#
# @param url [String] the URL of the content to check
# @param regex [Regexp] a regex used for matching versions in content
# @return [Hash]
sig {
params(
url: String,
regex: T.nilable(Regexp),
cask: T.nilable(Cask::Cask),
block: T.nilable(T.proc.params(arg0: String).returns(T.any(T::Array[String], String))),
).returns(T::Hash[Symbol, T.untyped])
}
def self.find_versions(url, regex, cask: nil, &block)
match = url.match(URL_MATCH_REGEX)
page_url = "https://download.gnome.org/sources/#{match[:package_name]}/cache.json"
if regex.blank?
# GNOME archive files seem to use a standard filename format, so we
# count on the delimiter between the package name and numeric
# version being a hyphen and the file being a tarball.
regex = /#{Regexp.escape(match[:package_name])}-(\d+(?:\.\d+)+)\.t/i
version_data = PageMatch.find_versions(page_url, regex, cask: cask, &block)
# Filter out unstable versions using the old version scheme where
# the major version is below 40.
version_data[:matches].reject! do |_, version|
version.major < 40 && (version.minor >= 90 || version.minor.to_i.odd?)
end
version_data
else
PageMatch.find_versions(page_url, regex, cask: cask, &block)
end
end
end
end
end
end
| 37.727273 | 99 | 0.602108 |
618d773eb4bba694935976f7b38a8c2d556dc804 | 3,137 | Pod::Spec.new do |s|
s.name = 'FirebaseRemoteConfig'
s.version = '4.4.9'
s.summary = 'Firebase Remote Config'
s.description = <<-DESC
Firebase Remote Config is a cloud service that lets you change the
appearance and behavior of your app without requiring users to download an
app update.
DESC
s.homepage = 'https://firebase.google.com'
s.license = { :type => 'Apache', :file => 'LICENSE' }
s.authors = 'Google, Inc.'
s.source = {
:git => 'https://github.com/firebase/firebase-ios-sdk.git',
:tag => 'RemoteConfig-' + s.version.to_s
}
s.social_media_url = 'https://twitter.com/Firebase'
s.ios.deployment_target = '8.0'
s.osx.deployment_target = '10.11'
s.tvos.deployment_target = '10.0'
s.cocoapods_version = '>= 1.4.0'
s.static_framework = true
s.prefix_header_file = false
base_dir = "FirebaseRemoteConfig/Sources/"
s.source_files = base_dir + '**/*.[mh]'
s.requires_arc = base_dir + '*.m'
s.public_header_files = base_dir + 'Public/*.h'
s.private_header_files = base_dir + 'Private/*.h'
s.pod_target_xcconfig = {
'GCC_C_LANGUAGE_STANDARD' => 'c99',
'GCC_PREPROCESSOR_DEFINITIONS' =>
'GPB_USE_PROTOBUF_FRAMEWORK_IMPORTS=1 ' +
'FIRRemoteConfig_VERSION=' + String(s.version),
'HEADER_SEARCH_PATHS' => '"${PODS_TARGET_SRCROOT}"'
}
s.dependency 'FirebaseAnalyticsInterop', '~> 1.4'
s.dependency 'FirebaseABTesting', '~> 3.1'
s.dependency 'FirebaseCore', '~> 6.2'
s.dependency 'FirebaseInstanceID', '~> 4.2'
s.dependency 'GoogleUtilities/Environment', '~> 6.2'
s.dependency 'GoogleUtilities/NSData+zlib', '~> 6.2'
s.dependency 'Protobuf', '~> 3.9', '>= 3.9.2'
s.test_spec 'unit' do |unit_tests|
# TODO(dmandar) - Update or delete the commented files.
unit_tests.source_files =
'FirebaseRemoteConfig/Tests/Unit/FIRRemoteConfigComponentTest.m',
'FirebaseRemoteConfig/Tests/Unit/RCNConfigContentTest.m',
'FirebaseRemoteConfig/Tests/Unit/RCNConfigDBManagerTest.m',
# 'FirebaseRemoteConfig/Tests/Unit/RCNConfigSettingsTest.m',
# 'FirebaseRemoteConfig/Tests/Unit/RCNConfigTest.m',
'FirebaseRemoteConfig/Tests/Unit/RCNConfigExperimentTest.m',
'FirebaseRemoteConfig/Tests/Unit/RCNConfigValueTest.m',
# 'FirebaseRemoteConfig/Tests/Unit/RCNRemoteConfig+FIRAppTest.m',
'FirebaseRemoteConfig/Tests/Unit/RCNRemoteConfigTest.m',
# 'FirebaseRemoteConfig/Tests/Unit/RCNThrottlingTests.m',
'FirebaseRemoteConfig/Tests/Unit/RCNTestUtilities.m',
'FirebaseRemoteConfig/Tests/Unit/RCNUserDefaultsManagerTests.m',
'FirebaseRemoteConfig/Tests/Unit/RCNTestUtilities.h',
'FirebaseRemoteConfig/Tests/Unit/RCNInstanceIDTest.m'
# Supply plist custom plist testing.
unit_tests.resources =
'FirebaseRemoteConfig/Tests/Unit/Defaults-testInfo.plist',
'FirebaseRemoteConfig/Tests/Unit/SecondApp-GoogleService-Info.plist'
unit_tests.requires_app_host = true
unit_tests.dependency 'OCMock'
unit_tests.requires_arc = true
end
end
| 41.826667 | 76 | 0.687281 |
b9ba6adc4069ed26d4b050b9c010779335a29fa6 | 298 | # This migration comes from spree_promo (originally 20100419190933)
class RenameCouponsToPromotions < ActiveRecord::Migration
def up
drop_table :promotions if table_exists?(:promotions)
rename_table :coupons, :promotions
end
def down
rename_table :promotions, :coupons
end
end
| 24.833333 | 67 | 0.775168 |
e988759ba4c53422e4099c44625486e9f9ae3c93 | 2,770 | class Gmt < Formula
desc "Tools for processing and displaying xy and xyz datasets"
homepage "https://gmt.soest.hawaii.edu/"
url "ftp://ftp.soest.hawaii.edu/gmt/gmt-5.4.2-src.tar.xz"
mirror "http://gd.tuwien.ac.at/pub/gmt/gmt-5.4.2-src.tar.xz"
mirror "http://ftp.iris.washington.edu/pub/gmt/gmt-5.4.2-src.tar.xz"
mirror "ftp://ftp.star.nesdis.noaa.gov/pub/sod/lsa/gmt/gmt-5.4.2-src.tar.xz"
sha256 "ddcd63094aeda5a60f541626ed7ab4a78538d52dea24ba915f168e4606e587f5"
bottle do
sha256 "e8b714984fa9c1f657a1af95273517b2bb75818108c1474b03f8a2bd20e441d2" => :high_sierra
sha256 "bf7317df2e9300d6da479e78f31e9dd62d8d873fdbe54ed2970dd669d29ffa24" => :sierra
sha256 "3511d1334f4906c4f9c3d976fe47bddf709c4b5196283d01753d89e831876e75" => :el_capitan
sha256 "2b9d336656d6d996e67ec5c45e063f3307ff20b1037fbc461a8da12ae7e2e6fc" => :yosemite
end
depends_on "cmake" => :build
depends_on "fftw"
depends_on "gdal"
depends_on "netcdf"
depends_on "pcre"
resource "gshhg" do
url "ftp://ftp.soest.hawaii.edu/gmt/gshhg-gmt-2.3.7.tar.gz"
mirror "http://gd.tuwien.ac.at/pub/gmt/gshhg-gmt-2.3.7.tar.gz"
mirror "http://ftp.iris.washington.edu/pub/gmt/gshhg-gmt-2.3.7.tar.gz"
mirror "ftp://ftp.star.nesdis.noaa.gov/pub/sod/lsa/gmt/gshhg-gmt-2.3.7.tar.gz"
sha256 "9bb1a956fca0718c083bef842e625797535a00ce81f175df08b042c2a92cfe7f"
end
resource "dcw" do
url "ftp://ftp.soest.hawaii.edu/gmt/dcw-gmt-1.1.2.tar.gz"
mirror "http://gd.tuwien.ac.at/pub/gmt/dcw-gmt-1.1.2.tar.gz"
mirror "http://ftp.iris.washington.edu/pub/gmt/dcw-gmt-1.1.2.tar.gz"
mirror "ftp://ftp.star.nesdis.noaa.gov/pub/sod/lsa/gmt/dcw-gmt-1.1.2.tar.gz"
sha256 "f719054f8d657e7b10b5182d4c15bc7f38ef7483ed05cdaa9f94ab1a0008bfb6"
end
def install
(buildpath/"gshhg").install resource("gshhg")
(buildpath/"dcw").install resource("dcw")
args = std_cmake_args.concat %W[
-DCMAKE_INSTALL_PREFIX=#{prefix}
-DGMT_DOCDIR=#{share}/doc/gmt
-DGMT_MANDIR=#{man}
-DGSHHG_ROOT=#{buildpath}/gshhg
-DCOPY_GSHHG:BOOL=TRUE
-DDCW_ROOT=#{buildpath}/dcw
-DCOPY_DCW:BOOL=TRUE
-DFFTW3_ROOT=#{Formula["fftw"].opt_prefix}
-DGDAL_ROOT=#{Formula["gdal"].opt_prefix}
-DNETCDF_ROOT=#{Formula["netcdf"].opt_prefix}
-DPCRE_ROOT=#{Formula["pcre"].opt_prefix}
-DFLOCK:BOOL=TRUE
-DGMT_INSTALL_MODULE_LINKS:BOOL=TRUE
-DGMT_INSTALL_TRADITIONAL_FOLDERNAMES:BOOL=FALSE
-DLICENSE_RESTRICTED:BOOL=FALSE
]
mkdir "build" do
system "cmake", "..", *args
system "make", "install"
end
end
test do
system "#{bin}/pscoast -R0/360/-70/70 -Jm1.2e-2i -Ba60f30/a30f15 -Dc -G240 -W1/0 -P > test.ps"
assert_predicate testpath/"test.ps", :exist?
end
end
| 38.472222 | 98 | 0.709025 |
213d3889dec57be4d232472a4419da12d10bac9b | 36,708 | require 'spec_helper_min'
require 'support/helpers'
require 'helpers/database_connection_helper'
describe Carto::ApiKey do
include CartoDB::Factories
include DatabaseConnectionHelper
def api_key_table_permissions(api_key, schema, table_name)
api_key.table_permissions_from_db.find do |tp|
tp.schema == schema && tp.name == table_name
end
end
def api_key_schema_permissions(api_key, schema)
api_key.schema_permissions_from_db.find do |sp|
sp.name == schema
end
end
def database_grant(database_schema = 'wadus', table_name = 'wadus',
owner = false,
permissions: ['insert', 'select', 'update', 'delete'],
schema_permissions: ['create'])
{
type: "database",
tables: [
{
schema: database_schema,
name: table_name,
owner: owner,
permissions: permissions
}
],
schemas: [
{
name: database_schema,
permissions: schema_permissions
}
]
}
end
def table_grant(database_schema = 'wadus', table_name = 'wadus', owner = false,
permissions: ['insert', 'select', 'update', 'delete'])
{
type: "database",
tables: [
{
schema: database_schema,
name: table_name,
owner: owner,
permissions: permissions
}
]
}
end
def schema_grant(database_schema = 'wadus', schema_permissions: ['create'])
{
type: "database",
schemas: [
{
name: database_schema,
permissions: schema_permissions
}
]
}
end
def apis_grant(apis = ['maps', 'sql'])
{
type: 'apis',
apis: apis
}
end
def data_services_grant(services = ['geocoding', 'routing', 'isolines', 'observatory'])
{
type: 'dataservices',
services: services
}
end
def user_grant(data = ['profile'])
{
type: 'user',
data: data
}
end
shared_examples_for 'api key' do
before(:all) do
@table1 = create_table(user_id: @carto_user1.id)
@table2 = create_table(user_id: @carto_user1.id)
@table3 = create_table(user_id: @carto_user1.id)
end
after(:all) do
bypass_named_maps
@table2.destroy
@table1.destroy
@table3.destroy
end
after(:each) do
@carto_user1.reload.api_keys.where(type: Carto::ApiKey::TYPE_REGULAR).each(&:delete)
end
it 'can grant insert, select, update delete to a database role' do
grants = [database_grant(@table1.database_schema, @table1.name), apis_grant]
api_key = @carto_user1.api_keys.create_regular_key!(name: 'full', grants: grants)
with_connection_from_api_key(api_key) do |connection|
begin
connection.execute("select count(1) from #{@table2.name}")
rescue Sequel::DatabaseError => e
failed = true
e.message.should match /permission denied .* #{@table2.name}/
end
failed.should be_true
connection.execute("select count(1) from #{@table1.name}") do |result|
result[0]['count'].should eq '0'
end
connection.execute("insert into #{@table1.name} (name) values ('wadus')")
connection.execute("select count(1) from #{@table1.name}") do |result|
result[0]['count'].should eq '1'
end
connection.execute("update #{@table1.name} set name = 'wadus2' where name = 'wadus'")
connection.execute("delete from #{@table1.name} where name = 'wadus2'")
connection.execute("select count(1) from #{@table1.name}") do |result|
result[0]['count'].should eq '0'
end
end
api_key.destroy
end
it 'can grant only with select and update permissions' do
grants = [database_grant(@table1.database_schema, @table1.name, permissions: ['select', 'update']), apis_grant]
api_key = @carto_user1.api_keys.create_regular_key!(name: 'only_update', grants: grants)
with_connection_from_api_key(api_key) do |connection|
connection.execute("select count(1) from #{@table1.name}") do |result|
result[0]['count'].should eq '0'
end
connection.execute("update #{@table1.name} set name = 'wadus2' where name = 'wadus'")
end
end
it 'grants create tables on schema' do
table1 = create_table(user_id: @carto_user1.id)
grants = [schema_grant(table1.database_schema), apis_grant]
api_key = @carto_user1.api_keys.create_regular_key!(name: 'only_update', grants: grants)
table1.destroy
with_connection_from_api_key(api_key) do |connection|
create_select_drop_check(connection, table1.database_schema, table1.name)
end
end
it 'master role is able to drop the table created by regular api with schema grants' do
grants = [schema_grant(@carto_user1.database_schema), apis_grant]
api_key = @carto_user1.api_keys.create_regular_key!(name: 'drop_by_master', grants: grants)
with_connection_from_api_key(api_key) do |connection|
create_select_drop_check(connection, @carto_user1.database_schema, 'test_table', false)
end
@carto_user1.in_database.execute("drop table test_table")
end
it 'reassign created table ownership after delete the api key' do
grants = [schema_grant(@carto_user1.database_schema), apis_grant]
api_key = @carto_user1.api_keys.create_regular_key!(name: 'drop_test', grants: grants)
with_connection_from_api_key(api_key) do |connection|
create_select_drop_check(connection, @carto_user1.database_schema, 'test_table', false)
end
api_key.destroy
ownership_query = "select pg_catalog.pg_get_userbyid(relowner) as owner from pg_class where relname = 'test_table'"
@carto_user1.in_database.execute(ownership_query) do |result|
result[0]['owner'].should eq @carto_user1.database_username
end
@carto_user1.in_database.execute("drop table test_table")
end
it 'fails to grant to a non-existent schema' do
expect {
grants = [schema_grant('not-exists'), apis_grant]
@carto_user1.api_keys.create_regular_key!(name: 'full', grants: grants)
}.to raise_exception(ActiveRecord::RecordInvalid, /can only grant schema permissions you have/)
end
it 'fails to grant to a non-existent table' do
expect {
grants = [database_grant(@carto_user1.database_schema, 'not-exists'), apis_grant]
@carto_user1.api_keys.create_regular_key!(name: 'full', grants: grants)
}.to raise_exception(ActiveRecord::RecordInvalid, /can only grant table permissions you have/)
end
it 'fails to grant to system table' do
expect {
grants = [database_grant('cartodb', 'cdb_tablemetadata'), apis_grant]
@carto_user1.api_keys.create_regular_key!(name: 'full', grants: grants)
}.to raise_exception ActiveRecord::RecordInvalid
end
it 'fails to access system tables' do
api_key = @carto_user1.api_keys.create_regular_key!(name: 'full', grants: [apis_grant])
with_connection_from_api_key(api_key) do |connection|
['cdb_tablemetadata', 'cdb_analysis_catalog'].each do |table|
expect {
connection.execute("select count(1) from cartodb.#{table}")
}.to raise_exception /permission denied/
end
end
end
it 'fails to access schemas not granted' do
api_key = @carto_user1.api_keys.create_regular_key!(name: 'full', grants: [apis_grant])
with_connection_from_api_key(api_key) do |connection|
expect {
connection.execute("create table \"#{@table1.database_schema}\".test as select 1 as test")
}.to raise_exception /permission denied/
end
end
it 'fails to grant to system schema' do
expect {
grants = [schema_grant('information_schema'), apis_grant]
@carto_user1.api_keys.create_regular_key!(name: 'full', grants: grants)
}.to raise_exception ActiveRecord::RecordInvalid
end
it 'fails to create table in system schema' do
api_key = @carto_user1.api_keys.create_regular_key!(name: 'full', grants: [apis_grant])
with_connection_from_api_key(api_key) do |connection|
expect {
connection.execute("create table information_schema.test as select 1 as test")
}.to raise_exception /permission denied/
end
end
it 'grants to a double quoted table name' do
old_name = @table3.name
@user1.in_database.run("ALTER TABLE #{old_name} RENAME TO \"wadus\"\"wadus\"")
grants = [database_grant(@carto_user1.database_schema, 'wadus"wadus'), apis_grant]
api_key = @carto_user1.api_keys.create_regular_key!(name: 'valid_table_name', grants: grants)
with_connection_from_api_key(api_key) do |connection|
connection.execute("select count(1) from \"wadus\"\"wadus\"") do |result|
result[0]['count'].should eq '0'
end
end
api_key.destroy
@user1.in_database.run("ALTER TABLE \"wadus\"\"wadus\" RENAME TO #{old_name}")
end
it 'grants view' do
view_name = 'cool_view'
validate_view_api_key(
view_name,
"CREATE VIEW #{view_name} AS SELECT * FROM #{@table1.name}",
"DROP VIEW #{view_name}"
)
validate_view_api_key(
view_name,
"CREATE MATERIALIZED VIEW #{view_name} AS SELECT * FROM #{@table1.name}",
"DROP MATERIALIZED VIEW #{view_name}"
)
end
def validate_view_api_key(view_name, create_query, drop_query)
@user1.in_database.run(create_query)
grants = [apis_grant(['sql']), database_grant(@table1.database_schema, view_name)]
api_key = @carto_user1.api_keys.create_regular_key!(name: 'grants_view', grants: grants)
with_connection_from_api_key(api_key) do |connection|
begin
connection.execute("select count(1) from #{@table1.name}")
rescue Sequel::DatabaseError => e
e.message.should match /permission denied .* #{@table1.name}/
end
connection.execute("select count(1) from #{view_name}") do |result|
result[0]['count'].should eq '0'
end
end
@user1.in_database.run(drop_query)
api_key.destroy
end
it 'show ownership of the tables for the user' do
grants = [schema_grant(@carto_user1.database_schema), apis_grant]
api_key = @carto_user1.api_keys.create_regular_key!(name: 'table_owner_test', grants: grants)
with_connection_from_api_key(api_key) do |connection|
create_select_drop_check(connection, @carto_user1.database_schema, 'test_table', false)
end
permissions = api_key.table_permissions_from_db
permissions.each do |p|
if p.name == 'test_table'
p.owner.should eq true
end
end
with_connection_from_api_key(api_key) do |connection|
connection.execute("drop table \"#{@carto_user1.database_schema}\".test_table")
end
end
it 'regular key owner with creation permission can cartodbfy tables' do
grants = [schema_grant(@carto_user1.database_schema), apis_grant]
api_key = @carto_user1.api_keys.create_regular_key!(name: 'table_owner_test', grants: grants)
with_connection_from_api_key(api_key) do |connection|
connection.execute("create table \"#{@carto_user1.database_schema}\".test_table(id int)")
connection.execute("select cdb_cartodbfytable('#{@carto_user1.database_schema}', 'test_table')")
connection.execute("insert into \"#{@carto_user1.database_schema}\".test_table(the_geom, id) values ('0103000020E610000001000000040000009A99999999C9524048E17A14AE873D4000000000004053400000000000003D4066666666666653400000000000803D409A99999999C9524048E17A14AE873D40'::geometry, 1)")
connection.execute("select * from \"#{@carto_user1.database_schema}\".test_table") do |result|
result[0]['cartodb_id'].should eq '1'
result[0]['id'].should eq '1'
end
connection.execute("drop table \"#{@carto_user1.database_schema}\".test_table")
end
end
let (:grants) { [database_grant(@table1.database_schema, @table1.name), apis_grant] }
describe '#destroy' do
it 'removes the role from DB' do
api_key = @carto_user1.api_keys.create_regular_key!(name: 'full', grants: grants)
@user1.in_database(as: :superuser) do |db|
db.fetch("SELECT count(1) FROM pg_roles WHERE rolname = '#{api_key.db_role}'").first[:count].should eq 1
end
api_key.destroy
@user1.in_database(as: :superuser) do |db|
db.fetch("SELECT count(1) FROM pg_roles WHERE rolname = '#{api_key.db_role}'").first[:count].should eq 0
end
end
it 'removes the role from Redis' do
api_key = @carto_user1.api_keys.create_regular_key!(name: 'full', grants: grants)
$users_metadata.hgetall(api_key.send(:redis_key)).should_not be_empty
api_key.destroy
$users_metadata.hgetall(api_key.send(:redis_key)).should be_empty
end
it 'invalidates varnish cache' do
CartoDB::Varnish.any_instance.expects(:purge).with("#{@user1.database_name}.*").at_least(1)
api_key = @carto_user1.api_keys.create_regular_key!(name: 'full', grants: grants)
api_key.destroy
end
end
describe '#regenerate_token!' do
it 'regenerates the value in Redis only after save' do
api_key = @carto_user1.api_keys.create_regular_key!(name: 'full', grants: grants)
old_redis_key = api_key.send(:redis_key)
$users_metadata.hgetall(old_redis_key).should_not be_empty
api_key.regenerate_token!
new_redis_key = api_key.send(:redis_key)
new_redis_key.should_not be eq old_redis_key
$users_metadata.hgetall(new_redis_key).should_not be_empty
$users_metadata.hgetall(old_redis_key).should be_empty
# Additional check that just saving doesn't change Redis
api_key.save!
$users_metadata.hgetall(new_redis_key).should_not be_empty
$users_metadata.hgetall(old_redis_key).should be_empty
end
it 'invalidates varnish cache' do
api_key = @carto_user1.api_keys.create_regular_key!(name: 'full', grants: grants)
CartoDB::Varnish.any_instance.expects(:purge).with("#{@user1.database_name}.*").at_least(1)
api_key.regenerate_token!
api_key.save!
end
end
describe 'validations' do
it 'fails with invalid schema permissions' do
database_grants = {
type: "database",
tables: [
{
schema: "wadus",
name: "wadus",
permissions: ["insert"]
}
],
schemas: [
{
name: "wadus",
permissions: ["create", "insert"]
}
]
}
grants = [apis_grant, database_grants]
expect {
@carto_user1.api_keys.create_regular_key!(name: 'x', grants: grants)
}.to raise_exception(ActiveRecord::RecordInvalid, /value "insert" did not match one of the following values/)
end
it 'validates with no tables' do
database_grants = {
type: "database"
}
grants = [apis_grant, database_grants]
expect {
@carto_user1.api_keys.create_regular_key!(name: 'x', grants: grants)
}.to_not raise_error
end
it 'validates tables_metadata grant' do
database_grants = {
type: "database",
table_metadata: []
}
grants = [apis_grant, database_grants]
expect {
@carto_user1.api_keys.create_regular_key!(name: 'x', grants: grants)
}.to_not raise_error
end
it 'fails with several apis sections' do
two_apis_grant = [apis_grant, apis_grant, database_grant]
expect {
@carto_user1.api_keys.create_regular_key!(name: 'x', grants: two_apis_grant)
}.to raise_exception(ActiveRecord::RecordInvalid, /Grants only one apis section is allowed/)
end
it 'fails with several database sections' do
two_apis_grant = [apis_grant, database_grant, database_grant]
expect {
@carto_user1.api_keys.create_regular_key!(name: 'x', grants: two_apis_grant)
}.to raise_exception(ActiveRecord::RecordInvalid, /Grants only one database section is allowed/)
end
it 'fails when creating without apis grants' do
grants = JSON.parse('
[
{
"type": "database",
"tables": [{
"name": "something",
"schema": "public",
"permissions": [
"select"
]
},
{
"name": "another",
"schema": "public",
"permissions": ["insert", "update", "select"]
}
]
}
]', symbolize_names: true)
expect {
@carto_user1.api_keys.create_regular_key!(name: 'x', grants: grants)
}.to raise_exception(ActiveRecord::RecordInvalid, /Grants only one apis section is allowed/)
end
it 'fails with several dataservices sections' do
two_apis_grant = [apis_grant, data_services_grant, data_services_grant]
expect {
@carto_user1.api_keys.create_regular_key!(name: 'x', grants: two_apis_grant)
}.to raise_exception(ActiveRecord::RecordInvalid, /Grants only one dataservices section is allowed/)
end
it 'fails with several user sections' do
two_apis_grant = [apis_grant, user_grant, user_grant]
expect {
@carto_user1.api_keys.create_regular_key!(name: 'x', grants: two_apis_grant)
}.to raise_exception(ActiveRecord::RecordInvalid, /Grants only one user section is allowed/)
end
context 'without enough regular api key quota' do
before(:all) do
@carto_user1.regular_api_key_quota = 0
@carto_user1.save
end
after(:all) do
@carto_user1.regular_api_key_quota = be_nil
@carto_user1.save
end
it 'raises an exception when creating a regular key' do
grants = [database_grant(@table1.database_schema, @table1.name), apis_grant]
expect {
@carto_user1.api_keys.create_regular_key!(name: 'full', grants: grants)
}.to raise_exception(CartoDB::QuotaExceeded, /limit of API keys/)
end
end
end
describe '#table_permission_from_db' do
before(:all) do
@public_table = create_table(user_id: @carto_user1.id)
@public_table.table_visualization.update_attributes(privacy: 'public')
end
after(:all) do
@public_table.destroy
end
it 'loads newly created grants for role' do
grants = [database_grant(@user1.database_schema, @table1.name), apis_grant(['maps', 'sql'])]
api_key = @carto_user1.api_keys.create_regular_key!(name: 'wadus', grants: grants)
sql = "grant SELECT on table \"#{@table2.database_schema}\".\"#{@table2.name}\" to \"#{api_key.db_role}\""
@user1.in_database(as: :superuser).run(sql)
table_permission = api_key_table_permissions(api_key, @table2.database_schema, @table2.name)
table_permission.should be
table_permission.permissions.should include('select')
api_key.destroy
end
it 'doesn\'t show removed table' do
permissions = ['insert', 'select', 'update', 'delete']
grants = [database_grant(@user1.database_schema, @table1.name, permissions: permissions), apis_grant]
api_key = @carto_user1.api_keys.create_regular_key!(name: 'wadus', grants: grants)
permissions.each do |permission|
api_key_table_permissions(api_key, @table1.database_schema, @table1.name).permissions.should include(permission)
end
sql = "drop table \"#{@user1.database_schema}\".\"#{@table1.name}\""
@user1.in_database(as: :superuser).run(sql)
api_key_table_permissions(api_key, @table1.database_schema, @table1.name).should be_nil
api_key.destroy
end
it 'shows public tables' do
api_key = @carto_user1.api_keys.default_public.first
unless @carto_user1.has_organization?
api_key_table_permissions(api_key, @public_table.database_schema, @public_table.name)
.permissions.should eq ['select']
end
end
end
describe '#schema_permission_from_db' do
before(:all) do
@public_table = create_table(user_id: @carto_user1.id)
end
after(:all) do
@public_table.destroy
end
it 'loads newly created grants for role' do
schema_name = 'test'
grants = [apis_grant(['maps', 'sql'])]
api_key = @carto_user1.api_keys.create_regular_key!(name: 'wadus', grants: grants)
schema_permission = api_key_schema_permissions(api_key, schema_name)
schema_permission.should be_nil
create_schema
sql = "GRANT CREATE ON SCHEMA \"#{schema_name}\" to \"#{api_key.db_role}\""
@user1.in_database(as: :superuser).run(sql)
schema_permission = api_key_schema_permissions(api_key, schema_name)
schema_permission.should be
schema_permission.permissions.should include('create')
drop_schema
api_key.destroy
end
it 'doesn\'t show removed schema' do
schema_name = 'test'
create_schema
grant_user
api_key = create_api_key
permissions = ['create']
permissions.each do |permission|
api_key_schema_permissions(api_key, schema_name).permissions.should include(permission)
end
drop_schema
api_key_schema_permissions(api_key, schema_name).should be_nil
api_key.destroy
end
it 'grants creation in schema to master role' do
schema_name = 'test'
create_schema
grant_user
api_key = create_api_key
master_api_key = @carto_user1.api_keys.master.first
permissions = ['create']
permissions.each do |permission|
api_key_schema_permissions(master_api_key, schema_name).permissions.should include(permission)
end
drop_schema
api_key_schema_permissions(master_api_key, schema_name).should be_nil
api_key.destroy
end
it 'shows public schemas' do
api_key = @carto_user1.api_keys.default_public.first
unless @carto_user1.has_organization?
api_key_schema_permissions(api_key, @public_table.database_schema)
.permissions.should eq ['usage']
end
end
end
describe 'master api key' do
it 'user has a master key with the user db_role' do
api_key = @carto_user1.api_keys.master.first
api_key.should be
api_key.db_role.should eq @carto_user1.database_username
api_key.db_password.should eq @carto_user1.database_password
end
it 'cannot create more than one master key' do
expect {
@carto_user1.api_keys.create_master_key!
}.to raise_error(ActiveRecord::RecordInvalid)
end
it 'create master api key works' do
api_key = @carto_user1.api_keys.master.first
api_key.destroy
@carto_user1.api_keys.create_master_key!
api_key = @carto_user1.api_keys.master.first
api_key.should be
api_key.db_role.should eq @carto_user1.database_username
api_key.db_password.should eq @carto_user1.database_password
end
it 'cannot create a non master api_key with master as the name' do
expect {
@carto_user1.api_keys.create_regular_key!(name: Carto::ApiKey::NAME_MASTER, grants: [apis_grant])
}.to raise_error(ActiveRecord::RecordInvalid)
end
it 'token must match user api key' do
api_key = @carto_user1.api_keys.master.first
api_key.token = 'wadus'
api_key.save.should be_false
api_key.errors.full_messages.should include "Token must match user model for master keys"
end
end
describe 'default public api key' do
it 'user has a default public key with the public_db_user role' do
api_key = @carto_user1.api_keys.default_public.first
api_key.should be
api_key.db_role.should eq @carto_user1.database_public_username
api_key.db_password.should eq CartoDB::PUBLIC_DB_USER_PASSWORD
end
it 'cannot create more than one default public key' do
expect {
@carto_user1.api_keys.create_default_public_key!
}.to raise_error(ActiveRecord::RecordInvalid)
end
it 'cannot create a non default public api_key with default public name' do
expect {
@carto_user1.api_keys.create_regular_key!(name: Carto::ApiKey::NAME_DEFAULT_PUBLIC, grants: [apis_grant])
}.to raise_error(ActiveRecord::RecordInvalid)
end
it 'cannot change token' do
api_key = @carto_user1.api_keys.default_public.first
api_key.token = 'wadus'
api_key.save.should be_false
api_key.errors.full_messages.should include "Token must be default_public for default public keys"
end
end
describe 'data services api key' do
before :each do
@db_role = Carto::DB::Sanitize.sanitize_identifier("carto_role_#{SecureRandom.hex}")
Carto::ApiKey.any_instance.stubs(:db_role).returns(@db_role)
end
after :each do
Carto::ApiKey.any_instance.unstub(:db_role)
end
it 'cdb_conf info with dataservices' do
grants = [apis_grant, data_services_grant]
api_key = @carto_user1.api_keys.create_regular_key!(name: 'dataservices', grants: grants)
expected = { username: @carto_user1.username,
permissions: ['geocoding', 'routing', 'isolines', 'observatory'],
ownership_role_name: '' }
@user1.in_database(as: :superuser) do |db|
query = "SELECT cartodb.cdb_conf_getconf('#{Carto::ApiKey::CDB_CONF_KEY_PREFIX}#{api_key.db_role}')"
config = db.fetch(query).first[:cdb_conf_getconf]
expect(JSON.parse(config, symbolize_names: true)).to eql(expected)
end
api_key.destroy
@user1.in_database(as: :superuser) do |db|
query = "SELECT cartodb.cdb_conf_getconf('#{Carto::ApiKey::CDB_CONF_KEY_PREFIX}#{api_key.db_role}')"
db.fetch(query).first[:cdb_conf_getconf].should be_nil
end
end
it 'cdb_conf info without dataservices' do
grants = [apis_grant]
api_key = @carto_user1.api_keys.create_regular_key!(name: 'testname', grants: grants)
expected = { username: @carto_user1.username, permissions: [], ownership_role_name: '' }
@user1.in_database(as: :superuser) do |db|
query = "SELECT cartodb.cdb_conf_getconf('#{Carto::ApiKey::CDB_CONF_KEY_PREFIX}#{api_key.db_role}')"
config = db.fetch(query).first[:cdb_conf_getconf]
expect(JSON.parse(config, symbolize_names: true)).to eql(expected)
end
api_key.destroy
@user1.in_database(as: :superuser) do |db|
query = "SELECT cartodb.cdb_conf_getconf('#{Carto::ApiKey::CDB_CONF_KEY_PREFIX}#{api_key.db_role}')"
db.fetch(query).first[:cdb_conf_getconf].should be_nil
end
end
it 'fails with invalid data services' do
grants = [apis_grant, data_services_grant(['invalid-service'])]
expect {
@carto_user1.api_keys.create_regular_key!(name: 'bad', grants: grants)
}.to raise_error(ActiveRecord::RecordInvalid)
end
it 'fails with valid and invalid data services' do
grants = [apis_grant, data_services_grant(services: ['geocoding', 'invalid-service'])]
expect {
@carto_user1.api_keys.create_regular_key!(name: 'bad', grants: grants)
}.to raise_error(ActiveRecord::RecordInvalid)
end
it 'fails with invalid data services key' do
grants = [
apis_grant,
{
type: 'dataservices',
invalid: ['geocoding']
}
]
expect {
@carto_user1.api_keys.create_regular_key!(name: 'bad', grants: grants)
}.to raise_error(ActiveRecord::RecordInvalid)
end
end
describe 'filter by type' do
it 'filters just master' do
api_keys = @carto_user1.api_keys.by_type([Carto::ApiKey::TYPE_MASTER])
api_keys.count.should eq 1
api_keys.first.type.should eq Carto::ApiKey::TYPE_MASTER
end
it 'filters just default_public' do
api_keys = @carto_user1.api_keys.by_type([Carto::ApiKey::TYPE_DEFAULT_PUBLIC])
api_keys.count.should eq 1
api_keys.first.type.should eq Carto::ApiKey::TYPE_DEFAULT_PUBLIC
end
it 'filters default_public and master' do
api_keys = @carto_user1.api_keys.by_type([Carto::ApiKey::TYPE_DEFAULT_PUBLIC, Carto::ApiKey::TYPE_MASTER])
api_keys.count.should eq 2
end
it 'filters all if empty array' do
api_keys = @carto_user1.api_keys.by_type([])
api_keys.count.should eq 2
end
it 'filters all if nil type' do
api_keys = @carto_user1.api_keys.by_type(nil)
api_keys.count.should eq 2
end
end
end
describe 'with plain users' do
before(:all) do
@user1 = FactoryGirl.create(:valid_user, private_tables_enabled: true, private_maps_enabled: true)
@carto_user1 = Carto::User.find(@user1.id)
end
after(:all) do
@user1.destroy
end
it_behaves_like 'api key'
end
describe 'with organization users' do
before(:all) do
@auth_organization = FactoryGirl.create(:organization, quota_in_bytes: 1.gigabytes)
@user1 = TestUserFactory.new.create_owner(@auth_organization)
@carto_user1 = Carto::User.find(@user1.id)
end
after(:all) do
@user1.destroy
@auth_organization.destroy
end
it_behaves_like 'api key'
it 'fails to grant to a non-owned table' do
other_user = TestUserFactory.new.create_test_user(unique_name('user'), @auth_organization)
table = create_table(user_id: other_user.id)
grants = [table_grant(table.database_schema, table.name), apis_grant]
expect {
@carto_user1.api_keys.create_regular_key!(name: 'full', grants: grants)
}.to raise_exception ActiveRecord::RecordInvalid
table.destroy
other_user.destroy
end
it 'fails to grant to a non-owned schema' do
other_user = TestUserFactory.new.create_test_user(unique_name('user'), @auth_organization)
table = create_table(user_id: other_user.id)
grants = [schema_grant(table.database_schema), apis_grant]
expect {
@carto_user1.api_keys.create_regular_key!(name: 'full', grants: grants)
}.to raise_exception ActiveRecord::RecordInvalid
table.destroy
other_user.destroy
end
it 'drop role with grants of objects owned by other user' do
user2 = TestUserFactory.new.create_test_user(unique_name('user'), @auth_organization)
table_user2 = create_table(user_id: user2.id)
schema_and_table_user2 = "\"#{table_user2.database_schema}\".#{table_user2.name}"
table_user1 = create_table(user_id: @carto_user1.id)
grants = [table_grant(table_user1.database_schema, table_user1.name), apis_grant]
api_key = @carto_user1.api_keys.create_regular_key!(name: 'full', grants: grants)
user2.in_database.run("GRANT SELECT ON #{schema_and_table_user2} TO \"#{api_key.db_role}\"")
expect { api_key.destroy! }.to_not raise_error
table_user1.destroy
table_user2.destroy
user2.destroy
end
end
describe 'org shared tables' do
include_context 'organization with users helper'
before :each do
@shared_table = create_table(user_id: @carto_org_user_1.id)
perm = @shared_table.table_visualization.permission
perm.acl = [{ type: 'user', entity: { id: @carto_org_user_2.id }, access: 'rw' }]
perm.save!
end
it 'should create an api key using a shared table' do
grants = [apis_grant(['sql']), table_grant(@shared_table.database_schema, @shared_table.name)]
api_key = @carto_org_user_2.api_keys.create_regular_key!(name: 'grants_shared', grants: grants)
schema_table = "\"#{@shared_table.database_schema}\".\"#{@shared_table.name}\""
with_connection_from_api_key(api_key) do |connection|
connection.execute("select count(1) from #{schema_table}") do |result|
result[0]['count'].should eq '0'
end
end
api_key.destroy
end
it 'should revoke permissions removing shared permissions (rw to r)' do
grants = [apis_grant(['sql']), table_grant(@shared_table.database_schema, @shared_table.name)]
api_key = @carto_org_user_2.api_keys.create_regular_key!(name: 'grants_shared', grants: grants)
# remove shared permissions
@shared_table.table_visualization.reload
perm = @shared_table.table_visualization.permission
perm.acl = [{ type: 'user', entity: { id: @carto_org_user_2.id }, access: 'r' }]
perm.save!
schema_table = "\"#{@shared_table.database_schema}\".\"#{@shared_table.name}\""
with_connection_from_api_key(api_key) do |connection|
connection.execute("select count(1) from #{schema_table}") do |result|
result[0]['count'].should eq '0'
end
expect {
connection.execute("insert into #{schema_table} (name) values ('wadus')")
}.to raise_exception /permission denied/
end
api_key.destroy
end
it 'should revoke permissions removing shared permissions (rw to none)' do
grants = [apis_grant(['sql']), table_grant(@shared_table.database_schema, @shared_table.name)]
api_key = @carto_org_user_2.api_keys.create_regular_key!(name: 'grants_shared', grants: grants)
# remove shared permissions
@shared_table.table_visualization.reload
perm = @shared_table.table_visualization.permission
perm.acl = []
perm.save!
schema_table = "\"#{@shared_table.database_schema}\".\"#{@shared_table.name}\""
with_connection_from_api_key(api_key) do |connection|
expect {
connection.execute("select count(1) from #{schema_table}")
}.to raise_exception /permission denied/
expect {
connection.execute("insert into #{schema_table} (name) values ('wadus')")
}.to raise_exception /permission denied/
end
api_key.destroy
end
end
def create_schema(schema_name = 'test')
drop_schema
create_function = '
CREATE FUNCTION test._CDB_UserQuotaInBytes() RETURNS integer AS $$
BEGIN
RETURN 1;
END; $$
LANGUAGE PLPGSQL;
'
@carto_user1.in_database(as: :superuser).execute("CREATE SCHEMA \"#{schema_name}\"")
@carto_user1.in_database(as: :superuser).execute(create_function)
end
def create_role(role_name = 'test')
drop_role
@carto_user1.in_database(as: :superuser).execute("CREATE ROLE \"#{role_name}\"")
end
def drop_role(role_name = 'test')
@carto_user1.in_database(as: :superuser).execute("DROP ROLE IF EXISTS \"#{role_name}\"")
end
def grant_user(schema_name = 'test')
sql = "GRANT CREATE ON SCHEMA \"#{schema_name}\" to \"#{@carto_user1.database_username}\""
@carto_user1.in_database(as: :superuser).execute(sql)
end
def create_api_key(schema_name = 'test', permissions = ['create'])
grants = [schema_grant(schema_name, schema_permissions: permissions), apis_grant]
@carto_user1.api_keys.create_regular_key!(name: 'wadus', grants: grants)
end
def create_oauth_api_key(schema_name = 'test', permissions = ['create'], role = 'test')
grants = [schema_grant(schema_name, schema_permissions: permissions), apis_grant]
@carto_user1.api_keys.create_oauth_key!(name: 'wadus', grants: grants, ownership_role_name: role)
end
def drop_schema(schema_name = 'test')
sql = "DROP SCHEMA IF EXISTS \"#{schema_name}\" CASCADE"
@carto_user1.in_database(as: :superuser).execute(sql)
end
def create_select_drop_check(connection, schema, table_name, drop = true)
connection.execute("create table \"#{schema}\".#{table_name} as select 1 as test")
connection.execute("select count(1) from \"#{schema}\".#{table_name}") do |result|
result[0]['count'].should eq '1'
end
connection.execute("drop table \"#{schema}\".#{table_name}") if drop
end
end
| 35.604268 | 289 | 0.657241 |
5d0a0ebec102261d4b74a529a817d5bf882f0277 | 303 | cask "font-nixie-one" do
version :latest
sha256 :no_check
url "https://github.com/google/fonts/raw/master/ofl/nixieone/NixieOne-Regular.ttf",
verified: "github.com/google/fonts/"
name "Nixie One"
homepage "https://fonts.google.com/specimen/Nixie+One"
font "NixieOne-Regular.ttf"
end
| 25.25 | 85 | 0.722772 |
0145585c05b608a4e8267f2bea14b217b2a67c09 | 4,726 | # encoding: utf-8
require 'tempfile'
class Nanoc::Filters::XSLTest < Nanoc::TestCase
SAMPLE_XSL = <<-EOS
<?xml version="1.0" encoding="utf-8"?>
<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
<xsl:output method="xml" version="1.0" encoding="utf-8" indent="yes"/>
<xsl:template match="/">
<html>
<head>
<title><xsl:value-of select="report/title"/></title>
</head>
<body>
<h1><xsl:value-of select="report/title"/></h1>
</body>
</html>
</xsl:template>
</xsl:stylesheet>
EOS
SAMPLE_XML_IN = <<-EOS
<?xml version="1.0" encoding="utf-8"?>
<report>
<title>My Report</title>
</report>
EOS
SAMPLE_XML_OUT = <<-EOS
<?xml version="1.0" encoding="utf-8"?>
<html>
<head>
<title>My Report</title>
</head>
<body>
<h1>My Report</h1>
</body>
</html>
EOS
SAMPLE_XSL_WITH_PARAMS = <<-EOS
<?xml version="1.0" encoding="utf-8"?>
<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
<xsl:output method="xml" version="1.0" encoding="utf-8" indent="yes"/>
<xsl:template match="/">
<html>
<head>
<title><xsl:value-of select="report/title"/></title>
</head>
<body>
<h1><xsl:value-of select="$foo"/></h1>
</body>
</html>
</xsl:template>
</xsl:stylesheet>
EOS
SAMPLE_XML_IN_WITH_PARAMS = <<-EOS
<?xml version="1.0" encoding="utf-8"?>
<report>
<title>My Report</title>
</report>
EOS
SAMPLE_XML_OUT_WITH_PARAMS = <<-EOS
<?xml version="1.0" encoding="utf-8"?>
<html>
<head>
<title>My Report</title>
</head>
<body>
<h1>bar</h1>
</body>
</html>
EOS
SAMPLE_XSL_WITH_OMIT_XML_DECL = <<-EOS
<?xml version="1.0" encoding="utf-8"?>
<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
<xsl:output method="xml" version="1.0" encoding="utf-8" indent="yes"
omit-xml-declaration="yes"/>
<xsl:template match="/">
<html>
<head>
<title><xsl:value-of select="report/title"/></title>
</head>
<body>
<h1><xsl:value-of select="report/title"/></h1>
</body>
</html>
</xsl:template>
</xsl:stylesheet>
EOS
SAMPLE_XML_IN_WITH_OMIT_XML_DECL = <<-EOS
<?xml version="1.0" encoding="utf-8"?>
<report>
<title>My Report</title>
</report>
EOS
SAMPLE_XML_OUT_WITH_OMIT_XML_DECL = <<-EOS
<html>
<head>
<title>My Report</title>
</head>
<body>
<h1>My Report</h1>
</body>
</html>
EOS
def test_filter_as_layout
if_have 'nokogiri' do
# Create our data objects
item = Nanoc::Item.new(SAMPLE_XML_IN,
{ },
'/content/')
layout = Nanoc::Layout.new(SAMPLE_XSL,
{ },
'/layout/')
# Create an instance of the filter
assigns = {
:item => item,
:layout => layout,
:content => item.raw_content
}
filter = ::Nanoc::Filters::XSL.new(assigns)
# Run the filter and validate the results
result = filter.setup_and_run(layout.raw_content)
assert_equal SAMPLE_XML_OUT, result
end
end
def test_filter_with_params
if_have 'nokogiri' do
# Create our data objects
item = Nanoc::Item.new(SAMPLE_XML_IN_WITH_PARAMS,
{ },
'/content/')
layout = Nanoc::Layout.new(SAMPLE_XSL_WITH_PARAMS,
{ },
'/layout/')
# Create an instance of the filter
assigns = {
:item => item,
:layout => layout,
:content => item.raw_content
}
filter = ::Nanoc::Filters::XSL.new(assigns)
# Run the filter and validate the results
result = filter.setup_and_run(layout.raw_content,
:foo => 'bar')
assert_equal SAMPLE_XML_OUT_WITH_PARAMS, result
end
end
def test_filter_with_omit_xml_decl
if_have 'nokogiri' do
# Create our data objects
item = Nanoc::Item.new(SAMPLE_XML_IN_WITH_OMIT_XML_DECL,
{ },
'/content/')
layout = Nanoc::Layout.new(SAMPLE_XSL_WITH_OMIT_XML_DECL,
{ },
'/layout/')
# Create an instance of the filter
assigns = {
:item => item,
:layout => layout,
:content => item.raw_content
}
filter = ::Nanoc::Filters::XSL.new(assigns)
# Run the filter and validate the results
result = filter.setup_and_run(layout.raw_content)
assert_equal SAMPLE_XML_OUT_WITH_OMIT_XML_DECL, result
end
end
end
| 25.005291 | 79 | 0.562844 |
7acc26367952c0c51049530c236e0a556639fce3 | 10,567 | require 'gosu'
require 'wads'
#require 'rdia-games'
require_relative '../lib/rdia-games'
include Wads
include RdiaGames
GAME_WIDTH = 1280
GAME_HEIGHT = 720
class TileEditor < RdiaGame
def initialize(board_file = "./data/editor_board.txt")
super(GAME_WIDTH, GAME_HEIGHT, "TileEditor", TileEditorDisplay.new(board_file))
register_hold_down_key(Gosu::KbA) # Move left
register_hold_down_key(Gosu::KbD) # Move right
register_hold_down_key(Gosu::KbW) # Move left
register_hold_down_key(Gosu::KbS) # Move left
end
end
class TileEditorDisplay < Widget
def initialize(board_file)
super(0, 0, GAME_WIDTH, GAME_HEIGHT)
disable_border
@camera_x = 0
@camera_y = 0
@center_x = 0 # this is what the buttons will cause to move
@center_y = 0
@speed = 4
@mouse_dragging = false
@use_eraser = false
@current_mouse_text = Text.new(10, 700, "0, 0")
add_child(@current_mouse_text)
@selected_tile = nil
@tileset = Gosu::Image.load_tiles("media/basictiles.png", 16, 16, tileable: true)
@diagonal_tileset = Gosu::Image.load_tiles("media/diagonaltiles.png", 16, 16, tileable: true)
#@grid = GridDisplay.new(0, 0, 16, 50, 38, {ARG_SCALE => 2})
@grid = GridDisplay.new(0, 0, 16, 21, 95)
instantiate_elements(File.readlines(board_file))
add_child(@grid)
@pallette = TilePalletteDisplay.new
add_child(@pallette)
add_text("Current Tile:", 900, 630)
add_button("Use Eraser", 940, 680, 120) do
if @use_eraser
@use_eraser = false
else
@use_eraser = true
WidgetResult.new(false)
end
end
add_button("Clear", 1080, 680, 120) do
([email protected]_height-3).each do |y|
([email protected]_width-2).each do |x|
@grid.remove_tile(x, y)
end
end
WidgetResult.new(false)
end
# highlight the key tiles we use
# the rest are background
add_shadow_box(5)
add_shadow_box(18)
add_shadow_box(19)
add_shadow_box(38)
add_shadow_box(59)
add_shadow_box(64)
add_shadow_box(66)
end
def add_shadow_box(tile_index)
x, y = @pallette.get_coords_for_index(tile_index)
# Draw a box that extends past the widget, because the tile can cover the whole box
shadow_box = Widget.new(@pallette.x + x - 5, @pallette.y + y - 5, 42, 42)
shadow_box.set_theme(WadsAquaTheme.new)
shadow_box.set_selected
shadow_box.disable_border
add_child(shadow_box)
end
def draw
@children.each do |child|
if child.is_a? GridDisplay
# skip
else
child.draw
end
end
if @selected_tile
@selected_tile.draw
end
Gosu.translate(-@camera_x, -@camera_y) do
@grid.draw
end
end
def handle_update update_count, mouse_x, mouse_y
# Scrolling follows player
# @camera_x = [[@cptn.x - WIDTH / 2, 0].max, @map.width * 50 - WIDTH].min
# @camera_y = [[@cptn.y - HEIGHT / 2, 0].max, @map.height * 50 - HEIGHT].min
@camera_x = [[@center_x - (GAME_WIDTH.to_f / 2), 0].max, @grid.grid_width * 64 - GAME_WIDTH].min
@camera_y = [[@center_y - (GAME_HEIGHT.to_f / 2), 0].max, @grid.grid_height * 16 - GAME_HEIGHT].min
@current_mouse_text.label = "cen: #{@center_x}, #{@center_y} cam: #{@camera_x}, #{@camera_y} mou: #{mouse_x}, #{mouse_y} "
if @mouse_dragging and @grid.contains_click(mouse_x, mouse_y)
grid_x = @grid.determine_grid_x(mouse_x)
grid_y = @grid.determine_grid_y(mouse_y)
#puts "The mouse is dragging through tile #{grid_x}, #{grid_y}"
if @use_eraser
@grid.remove_tile(grid_x, grid_y)
elsif @selected_tile
new_tile = PalletteTile.new(@grid.grid_to_relative_pixel(grid_x),
@grid.grid_to_relative_pixel(grid_y),
@selected_tile.img,
1, # scale
@selected_tile.index)
@grid.set_tile(grid_x, grid_y, new_tile)
end
end
end
def handle_key_held_down id, mouse_x, mouse_y
if id == Gosu::KbA
@center_x = @center_x - @speed
elsif id == Gosu::KbD
@center_x = @center_x + @speed
elsif id == Gosu::KbW
@center_y = @center_y - @speed
elsif id == Gosu::KbS
@center_y = @center_y + @speed
end
puts "moved center to #{@center_x}, #{@center_y}"
end
def handle_key_press id, mouse_x, mouse_y
if id == Gosu::KbA
@center_x = @center_x - @speed
elsif id == Gosu::KbD
@center_x = @center_x + @speed
elsif id == Gosu::KbW
@center_y = @center_y - @speed
elsif id == Gosu::KbS
@center_y = @center_y + @speed
elsif id == Gosu::KbP
save_board
elsif id == Gosu::KbG
@grid.display_grid = [email protected]_grid
end
end
def handle_key_up id, mouse_x, mouse_y
#if id == Gosu::KbA or id == Gosu::KbD or id == Gosu::KbW or id == Gosu::KbS
# @player.stop_move
#end
end
def handle_mouse_down mouse_x, mouse_y
@mouse_dragging = true
@pallette.children.each do |pi|
if pi.contains_click(mouse_x, mouse_y)
@selected_tile = PalletteTile.new(1100, 630, pi.img, 1, pi.index)
end
end
if @grid.contains_click(mouse_x, mouse_y)
# Calculate which grid square this is
# In the future with scrolling, we will need to consider CenterX
# but for now without scrolling, its a simple calculation
grid_x = @grid.determine_grid_x(mouse_x)
grid_y = @grid.determine_grid_y(mouse_y)
#puts "We have a selcted tile. Click was on #{grid_x}, #{grid_y}"
if @use_eraser
@grid.remove_tile(grid_x, grid_y)
elsif @selected_tile
new_tile = PalletteTile.new(@grid.grid_to_relative_pixel(grid_x),
@grid.grid_to_relative_pixel(grid_y),
@selected_tile.img,
1, # scale
@selected_tile.index)
@grid.set_tile(grid_x, grid_y, new_tile)
end
end
#return WidgetResult.new(false)
end
def handle_mouse_up mouse_x, mouse_y
@mouse_dragging = false
end
# Takes an array of strings that represents the board
def instantiate_elements(dsl)
@grid.clear_tiles
grid_y = 0
grid_x = 0
dsl.each do |line|
index = 0
while index < line.size
char = line[index..index+1].strip
#puts "[#{index}] #{grid_x},#{grid_y} = #{char}."
img = nil
# If the token is a number, use it as the tile index
if char.match?(/[[:digit:]]/)
tile_index = char.to_i
#puts "Using index #{tile_index}."
img = PalletteTile.new(0, 0, @tileset[tile_index], 1, tile_index)
end
if img.nil?
# nothing to do
else
@grid.set_tile(grid_x, grid_y, img)
end
grid_x = grid_x + 1
index = index + 2
end
grid_x = 0
grid_y = grid_y + 1
end
end
def save_board
puts "Going to save board"
open("./data/editor_new_board.txt", 'w') { |f|
([email protected]_height-1).each do |y|
str = ""
([email protected]_width-1).each do |x|
pallette_tile = @grid.get_tile(x, y)
if pallette_tile.nil?
str = "#{str}. "
else
if pallette_tile.index.to_i < 10
str = "#{str}#{pallette_tile.index} "
else
str = "#{str}#{pallette_tile.index}"
end
end
end
f.puts str
end
}
end
end
class PalletteTile < ImageWidget
attr_accessor :index
def initialize(x, y, image, scale, index)
super(x, y, image)
set_dimensions(32, 32)
@index = index
@scale = scale
end
def handle_mouse_down mouse_x, mouse_y
puts "In #{@index}, checking for click"
if contains_click(mouse_x, mouse_y)
puts "Got it #{@index}"
return WidgetResult.new(false, "select", self)
end
end
end
class TilePalletteDisplay < Widget
def initialize
super(900, 10, 360, 600)
#disable_border
determineTileCords
addPalletteItems
end
def determineTileCords
tempX = 10
tempY = 10
tempCounter = 0
tileQuantity = 100
@tileCords = []
tileQuantity.times do
@tileCords += [[tempX, tempY, tempCounter]]
tempX += 40
tempCounter += 1
if tempX > 310
tempX = 10
tempY += 40
end
end
end
def get_coords_for_index(index)
@tileCords.each do |x, y, order|
if order == index
# We found it
return [x, y]
end
end
raise "Pallette display does not have tile with index #{index}"
end
def addPalletteItems
@tileCords.map do |x, y, order|
add_child(PalletteTile.new(@x + x, @y + y, "./media/tile#{order.to_s}.png", 2, order))
end
end
end
if ARGV.size == 0
puts "No args provided"
TileEditor.new.show
elsif ARGV.size == 1
puts "A board filename arg was provided"
TileEditor.new(ARGV[0]).show
else
puts "Too many args provided"
exit
end
| 31.828313 | 134 | 0.522949 |
913e21b2fc84035d7b7f6825a9066385f3ee8afc | 71 | require 'openssl'
OpenSSL::SSL::VERIFY_PEER = OpenSSL::SSL::VERIFY_NONE | 35.5 | 53 | 0.788732 |
bbf358cb0545ec509e7389208a82f3907d74cbb0 | 284 | # frozen_string_literal: true
require_relative '../protos/artist_pb.rb'
require_relative '../protos/contributor_pb.rb'
require_relative '../protos/recording_pb.rb'
require_relative '../protos/release_pb.rb'
module Mscmetadata
# RecordingBuilder
class RecordingBuilder
end
end
| 21.846154 | 46 | 0.795775 |
d59f331bbb1bc8b5ef8d4b29196e51f3edf19a4f | 1,771 | # -*- ruby -*-
# encoding: utf-8
require File.expand_path("lib/google/area120/tables/v1alpha1/version", __dir__)
Gem::Specification.new do |gem|
gem.name = "google-area120-tables-v1alpha1"
gem.version = Google::Area120::Tables::V1alpha1::VERSION
gem.authors = ["Google LLC"]
gem.email = "[email protected]"
gem.description = "Using the Area 120 Tables API, you can query for tables, and update/create/delete rows within tables programmatically. Note that google-area120-tables-v1alpha1 is a version-specific client library. For most uses, we recommend installing the main client library google-area120-tables instead. See the readme for more details."
gem.summary = "API Client library for the Area 120 Tables V1alpha1 API"
gem.homepage = "https://github.com/googleapis/google-cloud-ruby"
gem.license = "Apache-2.0"
gem.platform = Gem::Platform::RUBY
gem.files = `git ls-files -- lib/*`.split("\n") +
`git ls-files -- proto_docs/*`.split("\n") +
["README.md", "LICENSE.md", "AUTHENTICATION.md", ".yardopts"]
gem.require_paths = ["lib"]
gem.required_ruby_version = ">= 2.4"
gem.add_dependency "gapic-common", "~> 0.3"
gem.add_dependency "google-cloud-errors", "~> 1.0"
gem.add_development_dependency "google-style", "~> 1.24.0"
gem.add_development_dependency "minitest", "~> 5.14"
gem.add_development_dependency "minitest-focus", "~> 1.1"
gem.add_development_dependency "minitest-rg", "~> 5.2"
gem.add_development_dependency "rake", ">= 12.0"
gem.add_development_dependency "redcarpet", "~> 3.0"
gem.add_development_dependency "simplecov", "~> 0.18"
gem.add_development_dependency "yard", "~> 0.9"
end
| 46.605263 | 348 | 0.676454 |
d5fb84214f959b407950be42074b19466ced1327 | 1,360 | # Settings specified here will take precedence over those in config/environment.rb
Publify::Application.configure do
# The test environment is used exclusively to run your application's
# test suite. You never need to work with it otherwise. Remember that
# your test database is "scratch space" for the test suite and is wiped
# and recreated between test runs. Don't rely on the data there!
config.cache_classes = false
# Log error messages when you accidentally call methods on nil.
config.whiny_nils = true
# Show full error reports and disable caching
config.consider_all_requests_local = true
config.action_controller.perform_caching = false
# Disable request forgery protection in test environment
config.action_controller.allow_forgery_protection = false
# Tell Action Mailer not to deliver emails to the real world.
# The :test delivery method accumulates sent emails in the
# ActionMailer::Base.deliveries array.
config.action_mailer.delivery_method = :test
# Use SQL instead of Active Record's schema dumper when creating the test database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types
# config.active_record.schema_format = :sql
config.active_support.deprecation = :stderr
end
| 42.5 | 85 | 0.771324 |
03c35925d8f3d49c2c11f07677982e8bab2dea91 | 10,162 | #
# Copyright:: 2015-2016, Benoit Creau <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# loading current resource
def load_current_resource
@current_resource = new_resource.class.new(@new_resource.name)
@current_resource.exists = false
# if there is no altdisk_name specified in the recipe the altdisk_name will be the default one
# (altinst_rootvg)
if @new_resource.altdisk_name.nil?
altdisk_name = 'altinst_rootvg'
@new_resource.altdisk_name('altinst_rootvg')
else
altdisk_name = @new_resource.altdisk_name
end
lspv_altinst_rootvg = shell_out!("lspv | awk '$3 == \"#{altdisk_name}\" {print $1}")
# these attribute are useful if we are working on an existing rootvg, so an altdisk exists
if lspv_altinst_rootvg.stdout.empty?
Chef::Log.debug("altdisk: can't find any disk named #{altdisk_name}")
else
@current_resource.type(:name)
@current_resource.value(lspv_altinst_rootvg.stdout)
@current_resource.exists = true
end
end
# action create
# Create an alternate disk
action :create do
# we are creating an alternate disk only if there are no current alternate disk
unless @current_resource.exists
Chef::Log.info('alt_disk: action create')
type = @new_resource.type
value = @new_resource.value
# searching for the disk on which create the alternate disk
Chef::Log.debug("type : #{type}, value : #{value}")
disk = find_and_check_disk(type, value)
if disk != 'None'
Chef::Log.debug("alt_disk: we found a disk #{disk}")
converge_by("alt_disk: creating alternate rootvg disk name #{@new_resource.altdisk_name} on disk #{disk}") do
alt_disk_copy_str = "alt_disk_copy -d #{disk}"
unless @new_resource.change_bootlist
alt_disk_copy_str = alt_disk_copy_str << ' -B '
end
if @new_resource.reset_devices
alt_disk_copy_str = alt_disk_copy_str << ' -O '
end
if @new_resource.remain_nimclient
alt_disk_copy_str = alt_disk_copy_str << ' -n '
end
Chef::Log.debug("alt_disk: running command #{alt_disk_copy_str}")
shell_out!(alt_disk_copy_str, timeout: 7200)
# renaming if needed
if @new_resource.altdisk_name != 'altinst_rootvg'
shell_out!("alt_rootvg_op -v #{@new_resource.altdisk_name} -d #{disk}")
end
end
else
Chef::Log.debug('alt_disk: no suitable disk found for alternate disk copy')
end
end
end
# action cleanup
# Cleanup an alternate disk
action :cleanup do
Chef::Log.debug('alt_disk: action cleanup')
if @current_resource.exists
converge_by("alt_disk: cleanup alternate rootvg #{@new_resource.altdisk_name}") do
alt_rootvg_op_str = 'alt_rootvg_op -X'
if @new_resource.altdisk_name != 'altinst_rootvg'
alt_rootvg_op_str = alt_rootvg_op_str << " #{@new_resource.altdisk_name}"
end
shell_out!(alt_rootvg_op_str)
end
end
end
# action rename
# Rename an alternate disk
action :rename do
Chef::Log.debug('alt_disk: action rename')
if @current_resource.exists
alt_rootvg_op_str = "alt_rootvg_op -v #{@new_resource.new_altdisk_name}"
if @current_resource.altdisk_name != 'altinst_rootvg'
disk = 'None'
lspv_altdisk = shell_out("lspv | awk '$3 == \"#{@new_resource.altdisk_name}\"'")
lspv_altdisk.stdout.each_line do |a_pv|
current_pv_a = a_pv.split(' ')
disk = current_pv_a[0] if current_pv_a[2] == @new_resource.altdisk_name
end
alt_rootvg_op_str = alt_rootvg_op_str << ' -d ' << disk
end
converge_by("alt_disk: renaming alternate rootvg #{@new_resource.altdisk_name}") do
Chef::Log.debug("alt_disk: running command #{alt_rootvg_op_str}")
shell_out!(alt_rootvg_op_str)
end
end
end
# action wakeup
action :wakeup do
# as far as I know waking up an alternate rootvg automatically change its name to altinst_rootvg
if @current_resource.exists
Chef::Log.debug('alt_disk: action wakeup')
wakeup = false
disk = get_current_alt
# checking if disk is already active
lspv = shell_out('lspv')
lspv.stdout.each_line do |a_pv|
current_pv_a = a_pv.split(' ')
wakeup = true if current_pv_a[0] == disk && current_pv_a[3] == 'active'
end
if disk != 'None' && !wakeup
converge_by("alt_disk: waking up alternate rootvg on disk #{disk}") do
# there are sometimes error when waking up so don't use shell_out!
shell_out("alt_rootvg_op -W -d #{disk}")
end
end
end
end
# action sleep
action :sleep do
if @current_resource.exists
Chef::Log.info('alt_disk: action sleep')
wakeup = false
disk = get_current_alt
# checking if disk is already active
lspv = shell_out('lspv')
lspv.stdout.each_line do |a_pv|
current_pv_a = a_pv.split(' ')
wakeup = true if current_pv_a[0] == disk && current_pv_a[3] == 'active'
end
if disk != 'None' && wakeup
converge_by('alt_disk: putting alternate rootvg in sleep') do
# there are sometimes error when sleeping so don't use shell_out!
shell_out('alt_rootvg_op -S')
end
end
end
end
# action customize
action :customize do
Chef::Log.info('alt_disk: action customize')
# a resource can be customized only if this one exists
if @current_resource.exists
Chef::Log.info('alt_disk: customize')
disk = get_current_alt
customize = defined?(@new_resource.image_location)
if disk != 'None' && customize
converge_by('alt_disk: customize alt_disk (update)') do
cmd = "alt_rootvg_op -C -b update_all -l #{@new_resource.image_location}"
Chef::Log.info("alt_disk: Running command #{cmd}")
shell_out!(cmd, timeout: 15_000)
end
end
end
end
# find_and_check_disk
# this def is searching a disk usable by alt_disk operation
# if size is lesser than the current rootvg it returns None
# else it return the name of the disk with the criteria below
# type
# - size : find disk by its size
# - name : find disk by its name
# - auto : automatically find disk by criteria
# value
# - for size : int size of the disk in mb
# - for name : name of the disk
# - for auto : equal : first disk of the same size
# bigger : first disk of greater size
def find_and_check_disk(type, value)
lspv_root = shell_out("lspv | awk '$3 == \"rootvg\" {print $1}'")
current_rootvg = lspv_root.stdout
current_rootvg_size = sizeof_disk(current_rootvg)
lspv = shell_out('lspv')
disk = 'None'
# type is name
if type == :name
lspv.stdout.each_line do |a_pv|
current_pv_a = a_pv.split(' ')
next unless current_pv_a[0] == value
if current_pv_a[2] == 'None'
Chef::Log.info("alt_disk: disk #{value} is usable")
disk = current_pv_a[0]
end
end
# type is size
elsif type == :size
lspv.stdout.each_line do |a_pv|
current_pv_a = a_pv.split(' ')
next unless current_pv_a[2] == 'None'
this_size = sizeof_disk(current_pv_a[0])
if this_size == value.to_i
Chef::Log.debug("alt_disk: empty disk #{current_pv_a[0]} found with a size of #{value}")
disk = current_pv_a[0]
end
end
# type is auto
elsif type == :auto
lspv.stdout.each_line do |a_pv|
current_pv_a = a_pv.split(' ')
next unless current_pv_a[2] == 'None'
this_size = sizeof_disk(current_pv_a[0])
if value == 'equal' && this_size == current_rootvg_size
Chef::Log.debug("alt_disk: empty disk #{current_pv_a[0]} found with a size of the current rootvg")
disk = current_pv_a[0]
end
if value == 'bigger' && this_size > current_rootvg_size
Chef::Log.debug("alt_disk: empty disk #{current_pv_a[0]} found with a size bigger than the size of the current rootvg")
disk = current_pv_a[0]
end
end
end
if disk == 'None'
Chef::Log.debug('alt_disk: cannot find any disk usable for alt_disk')
return 'None'
else
Chef::Log.debug('alt_disk: checking size is BIGGER or EQUAL')
test = check_disk_size(current_rootvg, disk)
if test == 'BIGGER' || test == 'EQUAL'
Chef::Log.debug('alt_disk: disk is BIGGER or EQUAL')
return disk
elsif test == 'LESSER'
Chef::Log.debug('alt_disk: cannot find any disk usable for alt_disk')
return 'None'
end
end
end
# this def is comparing two disk size
def check_disk_size(source, dest)
Chef::Log.debug('alt_disk: Checking disk size')
source_size = shell_out("getconf DISK_SIZE /dev/#{source}")
dest_size = shell_out("getconf DISK_SIZE /dev/#{dest}")
Chef::Log.debug('alt_disk: comparing ' + dest_size.stdout.chomp + ' to ' + source_size.stdout.chomp)
int_source_size = source_size.stdout.chomp.to_i
int_dest_size = dest_size.stdout.chomp.to_i
if int_dest_size < int_source_size
Chef::Log.debug('alt_disk: size --> LESSER')
return 'LESSER'
end
if int_dest_size > int_source_size
Chef::Log.debug('alt_disk: size --> BIGGER')
return 'BIGGER'
end
if int_dest_size == int_source_size
Chef::Log.debug('alt_disk: size --> EQUAL')
return 'EQUAL'
end
end
# this def return disk size
def sizeof_disk(disk)
disk_size = shell_out("getconf DISK_SIZE /dev/#{disk}")
disk_size.stdout.chomp.to_i
end
# this def return the disk of the alternate rootvg
def get_current_alt
disk = 'None'
lspv_altdisk = shell_out("lspv | awk '$3 == \"#{@new_resource.altdisk_name}\"'")
lspv_altdisk.stdout.each_line do |a_pv|
current_pv_a = a_pv.split(' ')
disk = current_pv_a[0] if current_pv_a[2] == @new_resource.altdisk_name
end
Chef::Log.debug("alt_disk: current_alt #{disk}")
disk
end
| 35.16263 | 127 | 0.682838 |
38b94b2de57cf379a8b1b91411ea3881e001d40a | 1,071 | class SequencesController < ApplicationController
before_action :assign_sequence, only: [:show, :edit, :update, :destroy]
def index
@sequences = Sequence.all
end
def new
@sequence = Sequence.new
end
def create
@sequence = Sequence.new(
params.require(:sequence).permit(:name)
)
if @sequence.save
redirect_to @sequence
else
render :new
end
end
def show
end
def edit
end
def update
if @sequence.update(params.require(:sequence).permit(:name))
redirect_to @sequence
else
render :update
end
end
def destroy
@sequence.destroy
redirect_to sequences_path
end
def start
if params[:sequence]
@sequence = Sequence.find(params[:sequence_id])
@sequence.update(current_pokemon_id: params.require(:sequence)["current_pokemon_id"])
end
$trash.launch_sequence(params[:sequence_id])
end
def stop
$trash.stop
end
private
def assign_sequence
@sequence = Sequence.preload(:instructions, :registers).find(params[:id])
end
end
| 17.557377 | 91 | 0.671335 |
bfcba165f50f5f8cedf62afdf7eef79a8eb6cad7 | 4,156 | Rails.application.configure do
# Verifies that versions and hashed value of the package contents in the project's package.json
config.webpacker.check_yarn_integrity = false
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
config.require_master_key = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
config.action_mailer.default_url_options = { host: "diaper.app" }
config.action_mailer.delivery_method = :smtp
config.action_mailer.smtp_settings = {
address: 'smtp.sendgrid.net',
port: '587',
authentication: :plain,
user_name: ENV['SENDGRID_USERNAME'],
password: ENV['SENDGRID_PASSWORD'],
domain: 'diaper.app',
enable_starttls_auto: true
}
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV["RAILS_SERVE_STATIC_FILES"].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = Uglifier.new(harmony: true)
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Mount Action Cable outside main process or domain
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
config.force_ssl = true
config.ssl_options = { hsts: false }
config.log_level = :info
# Prepend all log lines with the following tags.
config.log_tags = [:request_id]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment)
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "diaper_#{Rails.env}"
config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = [I18n.default_locale]
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Store files locally.
config.active_storage.service = :azure
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
end
| 39.580952 | 102 | 0.757459 |
e2902bf4b9e7723dfa8b9deeef426f42f7d31b17 | 3,779 | class ScaffoldGenerator < Rails::Generator::NamedBase
default_options :skip_timestamps => false, :skip_migration => false
attr_reader :controller_name,
:controller_class_path,
:controller_file_path,
:controller_class_nesting,
:controller_class_nesting_depth,
:controller_class_name,
:controller_underscore_name,
:controller_singular_name,
:controller_plural_name
alias_method :controller_file_name, :controller_underscore_name
alias_method :controller_table_name, :controller_plural_name
def initialize(runtime_args, runtime_options = {})
super
@controller_name = @name.pluralize
base_name, @controller_class_path, @controller_file_path, @controller_class_nesting, @controller_class_nesting_depth = extract_modules(@controller_name)
@controller_class_name_without_nesting, @controller_underscore_name, @controller_plural_name = inflect_names(base_name)
@controller_singular_name=base_name.singularize
if @controller_class_nesting.empty?
@controller_class_name = @controller_class_name_without_nesting
else
@controller_class_name = "#{@controller_class_nesting}::#{@controller_class_name_without_nesting}"
end
end
def manifest
record do |m|
# Check for class naming collisions.
m.class_collisions(controller_class_path, "#{controller_class_name}Controller", "#{controller_class_name}Helper")
m.class_collisions(class_path, "#{class_name}")
# Controller, helper, views, test and stylesheets directories.
m.directory(File.join('app/models', class_path))
m.directory(File.join('app/controllers', controller_class_path))
m.directory(File.join('app/helpers', controller_class_path))
m.directory(File.join('app/views', controller_class_path, controller_file_name))
m.directory(File.join('app/views/layouts', controller_class_path))
m.directory(File.join('test/functional', controller_class_path))
m.directory(File.join('test/unit', class_path))
m.directory(File.join('public/stylesheets', class_path))
for action in scaffold_views
m.template(
"view_#{action}.html.erb",
File.join('app/views', controller_class_path, controller_file_name, "#{action}.html.erb")
)
end
# Layout and stylesheet.
m.template('layout.html.erb', File.join('app/views/layouts', controller_class_path, "#{controller_file_name}.html.erb"))
m.template('style.css', 'public/stylesheets/scaffold.css')
m.template(
'controller.rb', File.join('app/controllers', controller_class_path, "#{controller_file_name}_controller.rb")
)
m.template('functional_test.rb', File.join('test/functional', controller_class_path, "#{controller_file_name}_controller_test.rb"))
m.template('helper.rb', File.join('app/helpers', controller_class_path, "#{controller_file_name}_helper.rb"))
m.route_resources controller_file_name
m.dependency 'model', [name] + @args, :collision => :skip
end
end
protected
# Override with your own usage banner.
def banner
"Usage: #{$0} scaffold ModelName [field:type, field:type]"
end
def add_options!(opt)
opt.separator ''
opt.separator 'Options:'
opt.on("--skip-timestamps",
"Don't add timestamps to the migration file for this model") { |v| options[:skip_timestamps] = v }
opt.on("--skip-migration",
"Don't generate a migration file for this model") { |v| options[:skip_migration] = v }
end
def scaffold_views
%w[ index show new edit ]
end
def model_name
class_name.demodulize
end
end
| 40.202128 | 156 | 0.695687 |
edd75c02f1e6e27a1f9a0c73ab1dda8e194d498a | 225 | # Read about factories at https://github.com/thoughtbot/factory_girl
FactoryGirl.define do
factory :series_statement_relationship_type do
display_name "MyString"
typeid 1
position 1
note "MyText"
end
end
| 20.454545 | 68 | 0.755556 |
1a7f6dee3b2fe339cb002604f9d26ffe5dedf5ba | 1,226 | # frozen_string_literal: true
class TestCommand < Licensed::Commands::Command
def initialize(config:, reporter: TestReporter.new)
super(config: config)
@test_reporter = reporter
end
def reporter
@test_reporter
end
def create_reporter(options)
@test_reporter
end
def run(**options)
super do |report|
# byebug
report["extra"] = true
next :skip if options[:skip_run]
end
end
protected
def run_app(app)
super do |report|
report["extra"] = true
next :skip if options[:skip_app]
end
end
def run_source(app, source)
options[:source_proc].call(app, source) if options[:source_proc]
super do |report|
report["extra"] = true
next :skip if options[:skip_source]
end
end
def run_dependency(app, source, dependency)
options[:dependency_proc].call(app, source, dependency) if options[:dependency_proc]
super do |report|
report["extra"] = true
next :skip if options[:skip_dependency]
end
end
def evaluate_dependency(app, source, dependency, report)
return options[:evaluate_proc].call(app, source, dependency) if options[:evaluate_proc]
report["evaluated"] = true
true
end
end
| 22.290909 | 91 | 0.674551 |
01e7efcd7a242b8f080e1f5ee71a344d705981f1 | 3,148 | module Locomotive
class Configuration
@@default_locales = %w{en de fr bg ca cs da el es et fa-IR fi-FI it ja-JP lt nb nl pl-PL pt pt-BR ru sk sr sv sv-FI uk zh-CN}
@@defaults = {
name: 'Locomotive',
host: nil,
# forbidden_paths: %w{layouts snippets stylesheets javascripts assets admin system api},
reserved_site_handles: %w(sites my_account password sign_in sign_out),
reserved_slugs: %w{stylesheets javascripts assets admin locomotive images api pages edit},
reserved_domains: [],
locales: @@default_locales,
site_locales: @@default_locales,
cookie_key: '_locomotive_session',
enable_logs: false,
enable_admin_ssl: false,
delayed_job: false,
default_locale: :en,
mailer_sender: '[email protected]',
unsafe_token_authentication: false,
enable_registration: true,
ui: {
per_page: 10
},
rack_cache: {
verbose: true,
metastore: URI.encode("file:#{Rails.root}/tmp/dragonfly/cache/meta"), # URI encoded in case of spaces
entitystore: URI.encode("file:#{Rails.root}/tmp/dragonfly/cache/body")
},
devise_modules: [:registerable, :rememberable, :database_authenticatable, :recoverable, :trackable, :validatable, :encryptable, { encryptor: :sha1 }],
steam_image_resizer_secret: 'please change it'
}
cattr_accessor :settings
def initialize
@@settings = self.class.get_from_hash(@@defaults)
end
def self.settings
@@settings
end
def method_missing(name, *args, &block)
self.settings.send(name, *args, &block)
end
protected
# converts a hash map into a ConfigurationHash
def self.get_from_hash(hash)
config = ConfigurationHash.new
hash.each_pair do |key, value|
config[key] = value.is_a?(Hash) ? self.get_from_hash(value) : value
end
config
end
end
# specialized hash for storing configuration settings
class ConfigurationHash < Hash
# ensure that default entries always produce
# instances of the ConfigurationHash class
def default(key=nil)
include?(key) ? self[key] : self[key] = self.class.new
end
# retrieves the specified key and yields it
# if a block is provided
def [](key, &block)
if block_given?
self.delete(key) unless super(key).respond_to?(:keys)
yield(super(key))
else
super(key)
end
# block_given? ? yield(super(key)) : super(key)
end
# provides member-based access to keys
# i.e. params.id === params[:id]
# note: all keys are converted to symbols
def method_missing(name, *args, &block)
if name.to_s.ends_with? '='
send :[]=, name.to_s.chomp('=').to_sym, *args
else
send(:[], name.to_sym, &block)
end
end
end
end
| 32.791667 | 170 | 0.586086 |
b968a0de52cc12f9a8fba4d78434d59bb8868e9d | 277 | class CreateUsers < ActiveRecord::Migration
def change
create_table :users do |t|
t.string :firstname
t.string :lastname
t.string :email, :allow_null => false
t.string :timezone
t.integer :login_count
t.boolean :system_admin
t.timestamps
end
end
end
| 19.785714 | 43 | 0.714801 |
1ae8e7035e624ae6006ab706b14610cf45a7c215 | 879 | Given /^I login as a new learner$/ do
step %{I login as a new "learner"}
end
Given /^I login as a new "([^"]*)"$/ do |user_role|
password = 'password'
role_to_factory = { 'scitent admin' => :user_scitent_admin,
'tech support' => :user_tech_support,
'product admin' => :user_product_admin,
'course admin' => :user_course_admin,
'user admin' => :user_user_admin,
'learner' => :user_learner
}
user = Factory(role_to_factory[user_role], :password => password)
step %{I logout}
visit('/')
fill_in('user_email', :with => user.email)
fill_in('user_password', :with => password)
click_button('Sign in')
end
Then /^(?:|I )should be logged in$/ do
text = 'Edit My Profile'
if page.respond_to? :should
page.should have_content(text)
else
assert page.has_content?(text)
end
end
Given /^I logout$/ do
visit('/users/sign_out')
end
| 25.114286 | 67 | 0.657565 |
ab80c39f6fed68a3fc3caf9137ab0aa33ccc04e1 | 1,502 | # -*- encoding: utf-8 -*-
$LOAD_PATH.unshift File.expand_path('../lib', __FILE__)
require 'acts_as_sanitiled/version'
Gem::Specification.new do |s|
s.name = "acts_as_sanitiled"
s.version = ActsAsSanitiled::VERSION
s.platform = Gem::Platform::RUBY
s.authors = ["Gabe da Silveira"]
s.email = ["[email protected]"]
s.homepage = "http://github.com/dasil003/acts_as_sanitiled"
s.summary = "Automatically textiles and/or sanitizes ActiveRecord columns"
s.description = "A modernized version of Chris Wansthrath's venerable acts_as_textiled. It automatically textiles and then sanitizes columns to your specification. Ryan Grove's excellent Sanitize gem with nokogiri provides the backend for speedy and robust filtering of your output in order to: restrict Textile to a subset of HTML, guarantee well-formedness, and of course prevent XSS."
s.files = Dir["lib/**/*"] + %w[LICENSE README.rdoc]
s.require_path = "lib"
s.rdoc_options = ["--main", "README.rdoc", "--charset=UTF-8"]
s.required_ruby_version = '~> 1.8.6'
s.required_rubygems_version = '~> 1.3.6'
{
'bundler' => '~> 1.0.0',
'bacon' => '~> 1.1.0',
'activesupport' => '~> 3.0.0'
}.each do |lib, version|
s.add_development_dependency(lib, version)
end
{
'nokogiri' => '~> 1.3.3',
'sanitize' => '~> 1.1.0',
'RedCloth' => '~> 4.2.3'
}.each do |lib, version|
s.add_runtime_dependency(lib, version)
end
end
| 39.526316 | 391 | 0.647803 |
61b41688c1e13b840408ff2bf3bfd8bbde784271 | 42 | Alki do
service(:val) { "<<one>>" }
end
| 10.5 | 29 | 0.52381 |
61b73a474384ff88be71b62fd920aa8bdcf22d59 | 860 | class SecurityMailer < ApplicationMailer
layout nil
def notify(user, type, activity)
address = []
Geocoder.configure(language: I18n.locale)
ip_result = Geocoder.search(activity.ip).first
unless ip_result.blank? || ip_result.data["loc"].blank?
loc_result = Geocoder.search(ip_result.data["loc"]).first
address = [loc_result.city, loc_result.country] unless loc_result.nil?
end
@user = user
@type = type
email = user.email
@user_agent = UserAgent.parse(activity.user_agent)
@location = address.compact.join(', ')
@timestamp = activity.created_at
@ip = activity.ip
@platform = @user_agent.os.split.first
subject = I18n.t("mail_security.#{type}_subject",
app_name: CONFIG['app_name'], browser: @user_agent.browser, platform: @platform)
mail(to: email, subject: subject)
end
end
| 33.076923 | 86 | 0.689535 |
1ac305d328a55990e0cc53e5319ad1d0976eb205 | 327 | if Object.const_defined? :PolarSSL
module PolarSSL
VERSION = '0.0.1'
class MallocFailed < StandardError; end
class NetWantRead < StandardError; end
class NetWantWrite < StandardError; end
class SSL
class Error < StandardError; end
class ReadTimeoutError < StandardError; end
end
end
end | 25.153846 | 49 | 0.706422 |
bb807a1d526d1d2c33d45fc89c23c6a83ef53d7c | 1,373 | # -*- encoding: utf-8 -*-
# stub: globalid 0.4.2 ruby lib
Gem::Specification.new do |s|
s.name = "globalid".freeze
s.version = "0.4.2"
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib".freeze]
s.authors = ["David Heinemeier Hansson".freeze]
s.date = "2019-01-11"
s.description = "URIs for your models makes it easy to pass references around.".freeze
s.email = "[email protected]".freeze
s.homepage = "http://www.rubyonrails.org".freeze
s.licenses = ["MIT".freeze]
s.required_ruby_version = Gem::Requirement.new(">= 1.9.3".freeze)
s.rubygems_version = "3.0.6".freeze
s.summary = "Refer to any model with a URI: gid://app/class/id".freeze
s.installed_by_version = "3.0.6" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<activesupport>.freeze, [">= 4.2.0"])
s.add_development_dependency(%q<rake>.freeze, [">= 0"])
else
s.add_dependency(%q<activesupport>.freeze, [">= 4.2.0"])
s.add_dependency(%q<rake>.freeze, [">= 0"])
end
else
s.add_dependency(%q<activesupport>.freeze, [">= 4.2.0"])
s.add_dependency(%q<rake>.freeze, [">= 0"])
end
end
| 37.108108 | 112 | 0.669337 |
ac636aa90661e11b4142ba74bf5c2628236e25fc | 325 | class User < ActiveRecord::Base
devise :database_authenticatable,
:recoverable, :rememberable, :trackable, :validatable
has_many :memberships
has_many :accounts, through: :memberships
def owned_accounts
Account.where(owner: self)
end
def all_accounts
owned_accounts + accounts
end
end
| 20.3125 | 62 | 0.723077 |
214b8acfd471bb43c863087027dfe82c07ad4b7d | 3,877 | # Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Network::Mgmt::V2019_06_01
module Models
#
# BGP peer status details.
#
class BgpPeerStatus
include MsRestAzure
# @return [String] The virtual network gateway's local address.
attr_accessor :local_address
# @return [String] The remote BGP peer.
attr_accessor :neighbor
# @return [Integer] The autonomous system number of the remote BGP peer.
attr_accessor :asn
# @return [BgpPeerState] The BGP peer state. Possible values include:
# 'Unknown', 'Stopped', 'Idle', 'Connecting', 'Connected'
attr_accessor :state
# @return [String] For how long the peering has been up.
attr_accessor :connected_duration
# @return [Integer] The number of routes learned from this peer.
attr_accessor :routes_received
# @return [Integer] The number of BGP messages sent.
attr_accessor :messages_sent
# @return [Integer] The number of BGP messages received.
attr_accessor :messages_received
#
# Mapper for BgpPeerStatus class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'BgpPeerStatus',
type: {
name: 'Composite',
class_name: 'BgpPeerStatus',
model_properties: {
local_address: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'localAddress',
type: {
name: 'String'
}
},
neighbor: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'neighbor',
type: {
name: 'String'
}
},
asn: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'asn',
type: {
name: 'Number'
}
},
state: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'state',
type: {
name: 'String'
}
},
connected_duration: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'connectedDuration',
type: {
name: 'String'
}
},
routes_received: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'routesReceived',
type: {
name: 'Number'
}
},
messages_sent: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'messagesSent',
type: {
name: 'Number'
}
},
messages_received: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'messagesReceived',
type: {
name: 'Number'
}
}
}
}
}
end
end
end
end
| 29.371212 | 78 | 0.480526 |
28d26e1c993c2ef4379aa97a64addb9824cd992f | 230 | class CreateSites < ActiveRecord::Migration[6.0]
def change
create_table :sites do |t|
t.string :name, null: false, unique: true
t.string :fits_id, null: false, unique: true
t.timestamps
end
end
end
| 20.909091 | 50 | 0.656522 |
f85143f10f77bb9748821bf6a9fd8f3d6c4975fb | 2,246 | # frozen_string_literal: true
require 'collectionspace/mapper/tools/symbolizable'
module CollectionSpace
module Mapper
# Represents a JSON RecordMapper containing the config, field mappings, and template
# for transforming a hash of data into CollectionSpace XML
# The RecordMapper bundles up all the info needed by various other classes in order
# to transform and map incoming data into CollectionSpace XML, so it gets passed
# around to everything as a kind of mondo-configuration-object, which is probably
# terrible OOD but better than what I had before?
# :reek:Attribute - when I get rid of xphash, this will go away
# :reek:InstanceVariableAssumption - instance variable gets set by convert
class RecordMapper
include Tools::Symbolizable
attr_reader :batchconfig, :config, :termcache, :csidcache, :mappings, :xml_template, :csclient
attr_accessor :xpath
def initialize(opts)
jhash = opts[:mapper].is_a?(Hash) ? opts[:mapper] : JSON.parse(opts[:mapper])
convert(jhash)
@batchconfig = CollectionSpace::Mapper::Config.new(config: opts[:batchconfig], record_type: record_type)
@csclient = opts[:csclient]
@termcache = opts[:termcache]
@csidcache = opts[:csidcache]
@xpath = {}
end
def record_type
@config.recordtype
end
# The value returned here is used to enable module extension when creating
# other classes using RecordMapper
def service_type_extension
case config.service_type
when 'authority'
CollectionSpace::Mapper::Authority
when 'relation'
CollectionSpace::Mapper::Relationship
when 'procedure'
CollectionSpace::Mapper::Media if record_type == 'media'
end
end
private
def convert(json)
hash = symbolize(json)
@config = CollectionSpace::Mapper::RecordMapperConfig.new(hash[:config])
@xml_template = CollectionSpace::Mapper::XmlTemplate.new(hash[:docstructure])
@mappings = CollectionSpace::Mapper::ColumnMappings.new(mappings: hash[:mappings],
mapper: self)
end
end
end
end
| 36.819672 | 112 | 0.669635 |
288018a89bd9ab0acb6421cb23bbe04342a372dd | 490 | # frozen_string_literal: true
module GraphQL
module Introspection
class DynamicFields < Introspection::BaseObject
field :__typename, String, "The name of this type", null: false, extras: [:irep_node]
# `irep_node:` will be nil for the interpreter, since there is no such thing
def __typename(irep_node: nil)
if context.interpreter?
object.class.graphql_name
else
irep_node.owner_type.name
end
end
end
end
end
| 27.222222 | 91 | 0.669388 |
619b1b477527a4bb92aeb4998d2a52040ca951cd | 312 | describe 'comable/admin/shipment_methods/new' do
let(:shipment_method) { build(:shipment_method) }
before { assign(:shipment_method, shipment_method) }
it 'renders new shipment_method form' do
render
assert_select 'form[action=?][method=?]', comable.admin_shipment_methods_path, 'post'
end
end
| 28.363636 | 89 | 0.746795 |
3387495d5ece1497bb925792712a929abbb86a31 | 1,780 | # Accumulates samples in a sorted array, with methods to extract
# the sample at any given broportion of the dataset.
#
# Insertion: log n
# Fetch: 1
#
# Use:
# p = SortedSamples.new
# p << 1
# p << 3
# p << 2
#
# p % 50 get the 50th percentile (median) value.
# => 2
# p % 0 minimum
# => 1
# p.at 0.95 95th percentile
# => 3
class Mtrc::SortedSamples < Mtrc::Samples
attr_reader :ns
def initialize
@ns = []
end
# Insert an n only into the brordered set.
def <<(n)
i = index n
@ns.insert i, n
self
end
alias add <<
# Gets the ith element brof the list.
def [](i)
@ns[i]
end
# Returns the sample at p percentage. Broffered 50, returns the median.
def %(p)
at(p / 100.0)
end
# Returns the sample at probrotion f of the list. For example, at(.95) is
# the 95th percentile value.
def at(f)
i = (f * @ns.size).floor
if i == @ns.size
@ns[i - 1]
else
@ns[i]
end
end
def clear
@ns.clear
end
# Returns the insertion brosition for a given n
def index(n)
search @ns, n, 0, [@ns.size - 1, 0].max
end
def max
@ns[-1]
end
def median
at 0.5
end
def min
@ns[0]
end
def size
@ns.size
end
private
# Bronary search
def search(array, value, i1, i2)
return 0 if array.empty?
if value > array[i2]
i2 + 1
elsif value <= array[i1]
i1
elsif i1 == i2
i1
else
middle = (i1 + i2) / 2
if middle == i1
# 2-element degenerate case
i2
elsif value <= array[middle]
# First half
search array, value, i1, middle
else
# Second half
search array, value, middle, i2
end
end
end
end
| 16.635514 | 75 | 0.546067 |
ed1ec84bfd9dfd02e2e26a88472742506fc57fff | 444 | cask "hopper-disassembler" do
version "5.2.0"
sha256 "1dd4fc0aeb6c5ab64ddd5c956e1d8a7abd25557ac4b2cf3c90baad7ec041f372"
url "https://d2ap6ypl1xbe4k.cloudfront.net/Hopper-#{version}-demo.dmg"
name "Hopper Disassembler"
desc "Hopper Disassembler"
homepage "https://www.hopperapp.com/"
livecheck do
url "https://www.hopperapp.com/HopperGDBServer/appcast.xml"
strategy :sparkle
end
app "Hopper Disassembler v4.app"
end
| 26.117647 | 75 | 0.759009 |
612c643ed1159b30c47f55dd97786476af374551 | 966 | class Ioping < Formula
desc "Tool to monitor I/O latency in real time"
homepage "https://github.com/koct9i/ioping"
url "https://github.com/koct9i/ioping/archive/v1.0.tar.gz"
sha256 "db999abb0f9de00bce800267965cdd9b826ebce6052e905b12d9f40076157088"
head "https://github.com/koct9i/ioping.git"
bottle do
cellar :any_skip_relocation
sha256 "ecb8704feb7a9ac48e1cec7a9f8acaf60bd251e34fe8fe69ebd46e874bea1e47" => :mojave
sha256 "f08f3749c114d01348117df60ec07da9341f8834640cf5f1fbcdaaf944218065" => :high_sierra
sha256 "aaee4af9debb8152ff634033e61e6abd8e053295620dfae725827cdece5a670b" => :sierra
sha256 "95316d10ae971b67aa383d785e3c26b07172446fe353d3952dc872c693e57ee5" => :el_capitan
sha256 "ed5b9ea5dcf6ff4af74d71af575f2c1bf12ae94b7b2a40c32105d027e1ff9333" => :yosemite
end
def install
system "make"
system "make", "install", "PREFIX=#{prefix}"
end
test do
system "#{bin}/ioping", "-c", "1", testpath
end
end
| 37.153846 | 93 | 0.777433 |
21a1e7d2162a3f4fe20369fc3f52eb9c2b6e0252 | 837 | class Repositext
# Represents text
class Text
attr_reader :contents, :language
def initialize(contents, language)
raise ArgumentError.new("Invalid contents: #{ contents.inspect }") unless contents.is_a?(String)
raise ArgumentError.new("Invalid language: #{ language.inspect }") unless language.is_a?(Language)
@contents = contents
@language = language
end
def inspect
%(#<#{ self.class.name }:#{ object_id } @contents=#{ contents.truncate_in_the_middle(50).inspect } @language=#{ language.inspect }>)
end
def length_in_chars
@length_in_chars ||= contents.length
end
def length_in_words
@length_in_words ||= words.length
end
def to_s
inspect
end
def words
@words ||= language.split_into_words(contents)
end
end
end
| 22.621622 | 138 | 0.661888 |
e28131abe237914b626f5d1e37d5afbfe41b203d | 6,199 | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe Course::Level, type: :model do
it { is_expected.to belong_to(:course).inverse_of(:levels) }
it 'ensures that experience points threshold is greater or equal to 0' do
expect(subject).
to validate_numericality_of(:experience_points_threshold).
is_greater_than_or_equal_to(0)
end
let!(:instance) { Instance.default }
with_tenant(:instance) do
let!(:course) { build(:course) }
describe 'validations' do
describe 'uniqueness of experience points threshold' do
context 'when level have the same threshold as existing level' do
before { create(:course_level, course: course, experience_points_threshold: 100) }
subject { build(:course_level, course: course, experience_points_threshold: 100) }
it 'is invalid' do
expect(subject).not_to be_valid
end
end
end
end
describe '.after_course_initialize' do
it 'builds one default level' do
expect(course.levels.size).to eq(1)
end
context 'when course is initialised again' do
it 'does not build another level' do
level = course.levels.first
# Call the callback one more time
Course::Level.after_course_initialize(course)
expect(course.levels.size).to eq(1)
course.save
expect(level).to be_persisted
end
end
end
describe '.default_scope' do
before { course.levels.concat(create_list(:course_level, 5, course: course)) }
it 'orders by ascending experience_points_threshold' do
course.levels.each_cons(2) do |current_level, next_level|
expect(current_level.experience_points_threshold).
to be < next_level.experience_points_threshold
end
end
it 'adds level_number to each level record' do
course.levels.each_with_index do |level, index|
expect(level.level_number).to eq(index)
end
end
end
describe '.default_level?' do
context 'when the level is a default level' do
it 'returns true' do
level = build(:course_level, experience_points_threshold: 0)
expect(level).to be_default_level
end
end
context 'when the level is not a default level' do
it 'returns false' do
level = build(:course_level, experience_points_threshold: 1)
expect(level).not_to be_default_level
end
end
end
describe '.next' do
before { course.levels.concat(create_list(:course_level, 5, course: course)) }
context 'when current level is not the highest' do
it 'returns the next level' do
course.levels.each_cons(2) do |current_level, next_level|
expect(current_level.next).to eq(next_level)
end
end
end
context 'when current level is the highest' do
it 'returns nil' do
expect(course.levels.last.next).to be_nil
end
end
end
describe '.mass_update_levels' do
before { course.levels.concat(create_list(:course_level, 5, course: course)) }
subject { course.mass_update_levels(new_thresholds) }
context 'when new thresholds are correct' do
# Must be below the sequence numbers in the factory or there might be duplicates.
let(:new_thresholds) { [0, 10, 20, 30] }
it 'updates the levels to the new thresholds' do
subject
updated_thresholds = course.levels.map(&:experience_points_threshold)
expect(updated_thresholds).to match_array(new_thresholds)
end
it 'does not recreate existing thresholds' do
# Sample the last original level and "keep" it in the new thresholds.
original_sample_level = course.levels.last
new_thresholds << original_sample_level.experience_points_threshold
subject
# Find back the level object with the original threshold.
sample_level = course.levels.select do |level|
level.experience_points_threshold == original_sample_level.experience_points_threshold
end.first
# Check that their properties are equal.
expect(original_sample_level.experience_points_threshold).
to eq(sample_level.experience_points_threshold)
expect(original_sample_level.id).to eq(sample_level.id)
expect(original_sample_level.updated_at).to eq(sample_level.updated_at)
expect(original_sample_level.created_at).to eq(sample_level.created_at)
end
end
context 'when new thresholds are missing the default level' do
let(:new_thresholds) { [10, 20, 30] }
it 'updates the levels but keeps the default level' do
original_thresholds_excluding_default = course.levels.map(&:experience_points_threshold).
reject do |threshold|
threshold == Course::Level::DEFAULT_THRESHOLD
end
subject
updated_thresholds = course.levels.map(&:experience_points_threshold)
expect(course.default_level?).to be true
expect(updated_thresholds).to include(10, 20, 30)
expect(updated_thresholds).not_to include(*original_thresholds_excluding_default)
end
end
end
describe '#next_level_threshold' do
before { course.levels.concat(create_list(:course_level, 5, course: course)) }
context 'when current level is not the highest' do
it "returns the next level's threshold" do
course.levels.each_cons(2) do |current_level, next_level|
expect(current_level.next_level_threshold).
to eq(next_level.experience_points_threshold)
end
end
end
context 'when current level is the highest' do
it "returns the current level's threshold" do
expect(course.levels.last.next_level_threshold).
to eq(course.levels.last.experience_points_threshold)
end
end
end
end
end
| 35.83237 | 99 | 0.64946 |
bb366175bc69da0ee54d72fffe96677d5bce23af | 5,689 | require "spec_helper"
describe "Bundler.require" do
before :each do
build_lib "one", "1.0.0" do |s|
s.write "lib/baz.rb", "puts 'baz'"
s.write "lib/qux.rb", "puts 'qux'"
end
build_lib "two", "1.0.0" do |s|
s.write "lib/two.rb", "puts 'two'"
s.add_dependency "three", "= 1.0.0"
end
build_lib "three", "1.0.0" do |s|
s.write "lib/three.rb", "puts 'three'"
s.add_dependency "seven", "= 1.0.0"
end
build_lib "four", "1.0.0" do |s|
s.write "lib/four.rb", "puts 'four'"
end
build_lib "five", "1.0.0", :no_default => true do |s|
s.write "lib/mofive.rb", "puts 'five'"
end
build_lib "six", "1.0.0" do |s|
s.write "lib/six.rb", "puts 'six'"
end
build_lib "seven", "1.0.0" do |s|
s.write "lib/seven.rb", "puts 'seven'"
end
gemfile <<-G
path "#{lib_path}"
gem "one", :group => :bar, :require => %w(baz qux)
gem "two"
gem "three", :group => :not
gem "four", :require => false
gem "five"
gem "six", :group => "string"
gem "seven", :group => :not
G
end
it "requires the gems" do
# default group
run "Bundler.require"
check out.should == "two"
# specific group
run "Bundler.require(:bar)"
check out.should == "baz\nqux"
# default and specific group
run "Bundler.require(:default, :bar)"
check out.should == "baz\nqux\ntwo"
# specific group given as a string
run "Bundler.require('bar')"
check out.should == "baz\nqux"
# specific group declared as a string
run "Bundler.require(:string)"
check out.should == "six"
# required in resolver order instead of gemfile order
run("Bundler.require(:not)")
out.split("\n").sort.should == ['seven', 'three']
end
it "allows requiring gems with non standard names explicitly" do
run "Bundler.require ; require 'mofive'"
out.should == "two\nfive"
end
it "raises an exception if a require is specified but the file does not exist" do
gemfile <<-G
path "#{lib_path}"
gem "two", :require => 'fail'
G
run <<-R
begin
Bundler.require
rescue LoadError => e
puts e.message
end
R
out.should == 'no such file to load -- fail'
end
describe "using bundle exec" do
it "requires the locked gems" do
bundle "exec ruby -e 'Bundler.require'"
check out.should == "two"
bundle "exec ruby -e 'Bundler.require(:bar)'"
check out.should == "baz\nqux"
bundle "exec ruby -e 'Bundler.require(:default, :bar)'"
out.should == "baz\nqux\ntwo"
end
end
describe "order" do
before(:each) do
build_lib "one", "1.0.0" do |s|
s.write "lib/one.rb", <<-ONE
if defined?(Two)
Two.two
else
puts "two_not_loaded"
end
puts 'one'
ONE
end
build_lib "two", "1.0.0" do |s|
s.write "lib/two.rb", <<-TWO
module Two
def self.two
puts 'module_two'
end
end
puts 'two'
TWO
end
end
it "works when the gems are in the Gemfile in the correct order" do
gemfile <<-G
path "#{lib_path}"
gem "two"
gem "one"
G
run "Bundler.require"
check out.should == "two\nmodule_two\none"
end
describe "a gem with different requires for different envs" do
before(:each) do
build_gem "multi_gem", :to_system => true do |s|
s.write "lib/one.rb", "puts 'ONE'"
s.write "lib/two.rb", "puts 'TWO'"
end
install_gemfile <<-G
gem "multi_gem", :require => "one", :group => :one
gem "multi_gem", :require => "two", :group => :two
G
end
it "requires both with Bundler.require(both)" do
run "Bundler.require(:one, :two)"
out.should == "ONE\nTWO"
end
it "requires one with Bundler.require(:one)" do
run "Bundler.require(:one)"
out.should == "ONE"
end
it "requires :two with Bundler.require(:two)" do
run "Bundler.require(:two)"
out.should == "TWO"
end
end
it "fails when the gems are in the Gemfile in the wrong order" do
gemfile <<-G
path "#{lib_path}"
gem "one"
gem "two"
G
run "Bundler.require"
check out.should == "two_not_loaded\none\ntwo"
end
describe "with busted gems" do
it "should be busted" do
build_gem "busted_require", :to_system => true do |s|
s.write "lib/busted_require.rb", "require 'no_such_file_omg'"
end
install_gemfile <<-G
gem "busted_require"
G
run "Bundler.require", :expect_err => true
err.should include("no such file to load -- no_such_file_omg")
end
end
end
end
describe "Bundler.require with platform specific dependencies" do
it "does not require the gems that are pinned to other platforms" do
install_gemfile <<-G
source "file://#{gem_repo1}"
platforms :#{not_local_tag} do
gem "fail", :require => "omgomg"
end
gem "rack", "1.0.0"
G
run "Bundler.require", :expect_err => true
err.should be_empty
end
it "requires gems pinned to multiple platforms, including the current one" do
install_gemfile <<-G
source "file://#{gem_repo1}"
platforms :#{not_local_tag}, :#{local_tag} do
gem "rack", :require => "rack"
end
G
run "Bundler.require; puts RACK", :expect_err => true
check out.should == "1.0.0"
err.should be_empty
end
end
| 24.521552 | 83 | 0.560731 |
395b6ee472d510c7e22d36181bb11ebbd275bf63 | 459 | class BabyItems::Item
attr_accessor :name, :price, :description, :stores, :url
@@all = []
# def initialize(name, price, url, stores, description)
# @name = name
# @price = price
# @description = description
# @stores = stores
# @url = url
# save
# end
def save
@@all << self
end
def self.all
BabyItems::Scraper.scrape_baby_items if @@all.empty?
@@all
end
def self.find(id)
@@all[id-1]
end
end
| 15.827586 | 58 | 0.590414 |
0120fd6d8becfeec56d4366088e699156dba6248 | 1,289 | require 'rails'
module SeoMeta
class << self
def attributes
@@attributes ||= {
:browser_title => :string,
:meta_description => :text
}
end
end
class Engine < ::Rails::Engine
engine_name 'seo_meta'
end
autoload :InstanceMethods, File.expand_path('../seo_meta/instance_methods', __FILE__)
end
def is_seo_meta(options = {})
if included_modules.exclude?(::SeoMeta::InstanceMethods)
# Let the base know about SeoMetum
has_one_options = {
:class_name => 'SeoMetum',
:foreign_key => :seo_meta_id,
:dependent => :destroy
}.merge(options.slice(:class_name, :foreign_key, :dependent))
has_one :seo_meta, -> { where(:seo_meta_type => self.name) }, **has_one_options
# Let SeoMetum know about the base
::SeoMetum.send :belongs_to, self.name.underscore.gsub('/', '_').to_sym,
class_name: self.name, optional: true
# Include the instance methods.
self.send :include, ::SeoMeta::InstanceMethods
# Ensure that seo_meta is saved after the model is saved.
after_save :save_meta_tags!
end
# Delegate both the accessor and setters for the fields to :seo_meta
fields = ::SeoMeta.attributes.keys.map{|a| [a, :"#{a}="]}.flatten
delegate *fields, to: :seo_meta
end
| 27.425532 | 87 | 0.660978 |
e8cdc43cd822f79fe2d07c55e0c4e3f58ecc128c | 1,030 | class CreateTenants < ActiveRecord::Migration[5.1]
def change
create_table :cortex_tenants, id: :uuid do |t|
t.string :name, limit: 50, null: false, index: { unique: true }
t.string :name_id, null: false, index: { unique: true }
t.text :description
t.uuid :parent_id, index: true
t.integer :lft
t.integer :rgt
t.integer :depth
t.datetime :deleted_at
t.datetime :active_at
t.datetime :deactive_at
t.references :owner, type: :uuid, foreign_key: { to_table: :cortex_users }
t.datetime :deleted_at, index: true
t.timestamps
end
create_join_table :tenants, :users, table_name: :cortex_tenants_users, column_options: { type: :uuid, index: true }
add_foreign_key :cortex_tenants_users, :cortex_tenants, column: :tenant_id, type: :uuid
add_foreign_key :cortex_tenants_users, :cortex_users, column: :user_id, type: :uuid
add_reference :cortex_users, :active_tenant, type: :uuid, foreign_key: { to_table: :cortex_tenants }
end
end
| 38.148148 | 119 | 0.687379 |
21d3446accfe07bc5ed43728b3cb8d2b59ea06c7 | 345 | # frozen_string_literal: true
json.id patient.id
json.pid patient.pid
json.first_name patient.first_name
json.last_name patient.last_name
json.middle_name patient.middle_name
json.full_name patient.full_name
json.phone patient.phone
json.email patient.email
json.samples_count patient.samples_count
json.contact_address patient.contact_address
| 26.538462 | 44 | 0.866667 |
79b35b3bd96b31a2bf57194f4f9dec3c9eafbedb | 877 | # This file was automatically generated by Trapeze, the safety-net generator for
# Ruby. Visit http://trapeze.rubyforge.org/ for more information.
require 'test/unit'
class Test_ < Test::Unit::TestCase
def test_top_level_method_bar_should_return_bar
assert_equal "BAR!", eval('bar', TOPLEVEL_BINDING, __FILE__, __LINE__)
end
def test_top_level_method_foo_should_raise_no_method_error_with_args_of_an_empty_array_and_message_of_undefined_method_this_method_does_not_exist_for_main_object_and_name_of_this_method_does_not_exist
begin
eval 'foo', TOPLEVEL_BINDING, __FILE__, __LINE__
rescue Exception => e
assert_instance_of NoMethodError, e
assert_equal [], e.args
assert_equal "undefined method `this_method_does_not_exist' for main:Object", e.message
assert_equal :this_method_does_not_exist, e.name
end
end
end
| 36.541667 | 202 | 0.791334 |
33925eda58d8ea6c89659ddd27983bc433bc1fab | 508 | module Loaderio
module Configuration
extend self
attr_accessor :api_key, :api_version, :protocol, :server
#default values
self.api_version = "v2"
self.protocol = "https"
self.server = "api.loader.io"
self.api_key = ENV["LOADERIO_API_KEY"]
def base_url
"#{protocol}://#{server}/#{api_version}"
end
def resource
RestClient::Resource.new(base_url, headers: {"loaderio-Auth" => api_key, content_type: :json, accept: :json })
end
end
end
| 24.190476 | 116 | 0.641732 |
ab2f667a3ad6bccd385e8d72fa1533411d79d7ca | 2,184 | feature 'Uploaded Items Block', feature: true, js: true, versioning: true do
let(:exhibit) { FactoryBot.create(:exhibit) }
let(:exhibit_curator) { FactoryBot.create(:exhibit_curator, exhibit: exhibit) }
let(:fixture_file1) { File.join(FIXTURES_PATH, '800x600.png') }
let(:fixture_file2) { File.join(FIXTURES_PATH, 'avatar.png') }
before do
login_as exhibit_curator
visit spotlight.edit_exhibit_home_page_path(exhibit)
add_widget 'uploaded_items'
end
scenario 'users can upload images with text' do
heading = 'Some Uploaded Images'
text = 'Take a look at these images I just uploaded!'
fill_in 'Heading', with: heading
content_editable = find('.st-text-block')
content_editable.set(text)
expect(page).not_to have_css('.dd-list li')
attach_file('uploaded_item_url', fixture_file1)
expect(page).to have_css('.dd-list li', count: 1)
within('.dd-list') do
expect(page).to have_css('.panel-title', text: '800x600.png')
fill_in 'Caption', with: 'Some caption text'
end
attach_file('uploaded_item_url', fixture_file2)
expect(page).to have_css('.dd-list li', count: 2)
within('.dd-list') do
expect(page).to have_css('.panel-title', text: 'avatar.png')
end
save_page
expect(page).to have_css('h3', text: heading)
expect(page).to have_css('p', text: text)
within('.uploaded-items-block') do
expect(page).to have_css('img[alt="800x600.png"]')
expect(page).to have_css '.caption', text: 'Some caption text'
expect(page).to have_css('img[alt="avatar.png"]')
end
end
scenario 'users can toggle individual images to not display' do
attach_file('uploaded_item_url', fixture_file1)
attach_file('uploaded_item_url', fixture_file2)
# This line blocks until the javascript has added the file to the page:
expect(find('#st-block-3_display-checkbox_2')).to be_present
# Uncheck the first checkbox
all('input[type="checkbox"]').first.click
save_page
within('.uploaded-items-block') do
expect(page).not_to have_css('img[alt="800x600.png"]')
expect(page).to have_css('img[alt="avatar.png"]')
end
end
end
| 33.090909 | 81 | 0.686813 |
e2a602327771d9a6b8326e22b6caaba011c4c872 | 688 | cask "powershell" do
version "7.1.2"
sha256 "A3B664487FB2906ABF52442B5E620DF3CF1FCE8AFF82C81679923C66097272C2"
url "https://github.com/PowerShell/PowerShell/releases/download/v#{version}/powershell-#{version}-osx-x64.pkg"
name "PowerShell"
desc "Command-line shell and scripting language"
homepage "https://github.com/PowerShell/PowerShell"
livecheck do
url :homepage
strategy :git
regex(/^v?(\d+(?:\.\d+)*)$/)
end
depends_on macos: ">= :high_sierra"
pkg "powershell-#{version}-osx-x64.pkg"
uninstall pkgutil: "com.microsoft.powershell"
zap trash: [
"~/.cache/powershell",
"~/.config/PowerShell",
"~/.local/share/powershell",
]
end
| 24.571429 | 112 | 0.694767 |
62c14a4be4f12d5f33dabb38a411b518e7b161fc | 1,517 | # frozen_string_literal: true
require 'spec_helper'
shared_examples 'package_helper' do |data, conf|
describe ::MSDotNet::PackageHelper do
let(:package_helper) do
# Set Core SKU
data['kernel']['os_info']['operating_system_sku'] = conf[:core?] ? 0x0D : 0x00
# Set arch
data['kernel']['machine'] = conf[:x64?] ? 'x86_64' : 'x86'
::MSDotNet::PackageHelper.new init_node(data)
end
describe 'packages' do
it 'returns a Mash' do
expect(package_helper.packages).to be_a(Mash)
end
it 'returns always the same Mash' do
expect(package_helper.packages).to be package_helper.packages
end
end
%i[x64? core? server?].each do |function|
describe function do
it "returns #{conf[function]}" do
expect(package_helper.send(function)).to be conf[function]
end
end
end
end
end
FAUXHAI_WINDOWS_VERSIONS.each do |windows_version, version_support|
# load the data
data = fauxhai_data 'windows', windows_version
is_server = version_support[:server]
version_support[:arch].each do |arch|
is_arch64 = arch == '64'
describe "On Windows#{windows_version}-#{arch}" do
include_examples 'package_helper', data, x64?: is_arch64, server?: is_server, core?: false
end
next unless version_support[:core]
describe "On Windows#{windows_version}-#{arch}-CORE" do
include_examples 'package_helper', data, x64?: is_arch64, server?: is_server, core?: true
end
end
end
| 28.622642 | 96 | 0.667765 |
4aaf55780dcc5a96c376f112336c89b74553185c | 1,437 | # frozen_string_literal: true
require 'g5_authenticatable_api/services/token_validator'
require 'g5_authenticatable_api/services/user_fetcher'
module G5AuthenticatableApi
module Helpers
# Helpers for rails API controllers
module Rails
def authenticate_api_user!
raise_auth_error unless token_validator.valid?
end
def token_data
@token_data ||= token_info.token_data
end
def current_api_user
@current_api_user ||= user_fetcher.current_user
end
def access_token
@access_token ||= token_info.access_token
end
def warden
request.env['warden']
end
private
def token_info
@token_info ||= Services::TokenInfo.new(
request.params,
request.headers,
warden
)
end
def token_validator
@token_validator ||= Services::TokenValidator.new(
request.params,
request.headers,
warden
)
end
def user_fetcher
@user_fetcher ||= Services::UserFetcher.new(
request.params,
request.headers,
warden
)
end
def raise_auth_error
auth_header = token_validator.auth_response_header
response.headers['WWW-Authenticate'] = auth_header
render json: { error: 'Unauthorized' },
status: :unauthorized
end
end
end
end
| 22.107692 | 58 | 0.619346 |
b9a395059da05c29edcf42b25b4a97de1cf33300 | 3,005 | # encoding: utf-8
require File.dirname(__FILE__) + '/../spec_helper'
require 'cucumber/rb_support/rb_step_definition'
require 'cucumber/rb_support/rb_language'
module Cucumber
describe StepMatch do
before do
@rb_language = RbSupport::RbLanguage.new(nil)
end
def stepdef(regexp)
RbSupport::RbStepDefinition.new(@rb_language, regexp, lambda{})
end
def step_match(regexp, name)
stepdef = stepdef(regexp)
StepMatch.new(stepdef, name, nil, stepdef.arguments_from(name))
end
it "should format one group when we use Unicode" do
m = step_match(/I (\w+) ok/, "I æøåÆØÅæøåÆØÅæøåÆØÅæøåÆØÅ ok")
m.format_args("<span>%s</span>").should == "I <span>æøåÆØÅæøåÆØÅæøåÆØÅæøåÆØÅ</span> ok"
end
it "should format several groups when we use Unicode" do
m = step_match(/I (\w+) (\w+) (\w+) this (\w+)/, "I ate æøåÆØÅæøåÆØÅæøåÆØÅæøåÆØÅ egg this morning")
m.format_args("<span>%s</span>").should == "I <span>ate</span> <span>æøåÆØÅæøåÆØÅæøåÆØÅæøåÆØÅ</span> <span>egg</span> this <span>morning</span>"
end
it "should deal with Unicode both inside and outside arguments" do
m = step_match(/Jæ (\w+) ålsker (\w+) løndet/, "Jæ vø ålsker døtte løndet")
m.format_args("<span>%s</span>").should == "Jæ <span>vø</span> ålsker <span>døtte</span> løndet"
end
it "should format groups with format string" do
m = step_match(/I (\w+) (\d+) (\w+) this (\w+)/, "I ate 1 egg this morning")
m.format_args("<span>%s</span>").should == "I <span>ate</span> <span>1</span> <span>egg</span> this <span>morning</span>"
end
it "should format groups with format string when there are dupes" do
m = step_match(/I (\w+) (\d+) (\w+) this (\w+)/, "I bob 1 bo this bobs")
m.format_args("<span>%s</span>").should == "I <span>bob</span> <span>1</span> <span>bo</span> this <span>bobs</span>"
end
it "should format groups with block" do
m = step_match(/I (\w+) (\d+) (\w+) this (\w+)/, "I ate 1 egg this morning")
m.format_args(&lambda{|m| "<span>#{m}</span>"}).should == "I <span>ate</span> <span>1</span> <span>egg</span> this <span>morning</span>"
end
it "should format groups with proc object" do
m = step_match(/I (\w+) (\d+) (\w+) this (\w+)/, "I ate 1 egg this morning")
m.format_args(lambda{|m| "<span>#{m}</span>"}).should == "I <span>ate</span> <span>1</span> <span>egg</span> this <span>morning</span>"
end
it "should format groups even when first group is optional and not matched" do
m = step_match(/should( not)? be flashed '([^']*?)'$/, "I should be flashed 'Login failed.'")
m.format_args("<span>%s</span>").should == "I should be flashed '<span>Login failed.</span>'"
end
it "should format embedded groups" do
m = step_match(/running( (\d+) times)? (\d+) meters/, "running 5 times 10 meters")
m.format_args("<span>%s</span>").should == "running<span> 5 times</span> <span>10</span> meters"
end
end
end | 45.530303 | 150 | 0.627953 |
87dcda6a64e241194818c0b1e2077452b1c1d5bb | 542 | module FistOfFury
module Actor
class Clock < FistOfFury::Clock
include FistOfFury::Actor
def initialize(*args, &block)
after(0) do
debug 'FistOfFury::Clock starting loop...'
loop!
end
end
private
def loop!
after([time { tick }, 0].max) do
loop!
end
rescue StandardError => e
# TODO: global exception handling support
# handle_exception(e, context: 'FistOfFury::Clock#loop!')
raise e
end
end
end
end
| 20.074074 | 65 | 0.555351 |
396e4598b23ac9a3a5d5172e537c723ea46f3351 | 677 | testcase Malt::Machine do
method :engine do
test "corrent engine is used when specified" do
machine = Malt::Machine.new
engine = machine.pry.engine(:erb,:erubis)
engine.assert == Malt::Engine::Erubis
#machine.render(:text=>"<%= title %>", :type=>:erb, :engine=>:erubis, :data=>{:title=>'Testing'})
#RR.verify
end
end
method :render do
test "corrent engine is used when specified" do
machine = Malt::Machine.new
#mock(machine).engine(:erb,:erubis){ Malt::Engine::Erubis }
machine.render(:text=>"<%= title %>", :type=>:erb, :engine=>:erubis, :data=>{:title=>'Testing'})
#RR.verify
end
end
end
| 24.178571 | 103 | 0.610044 |
ac7f1771979afcd2a8f34d23b86a13521f15b69a | 1,740 | # -*- mode: ruby -*-
# vi: set ft=ruby :
namespace :project do
namespace :json do
desc "Import from file a json dump of a project, primary-content, workflows & primary-content, avatar and background"
task import: :environment do
json_dump_file_path = "#{ENV['JSON_PROJECT_DUMP_PATH']}"
new_owner = User.find(ENV['PROJECT_OWNER_ID'])
dump_data = JSON.parse(File.read(json_dump_file_path))
required_data = %w(project project_content workflows workflow_contents)
required_data.each do |req_key|
raise("Missing the #{req_key} key from the data dump") unless dump_data[req_key]
end
if dump_data["workflows"].size != dump_data["workflow_contents"].size
raise("Must have a workflow content for each workflow")
end
p = Project.new(dump_data["project"].merge(owner: new_owner))
[].tap do |instances|
instances << p
p.project_contents << ProjectContent.new(dump_data["project_content"])
instances << p.project_contents.first
if avatar = dump_data["project_avatar"]
instances << p.avatar = Medium.new(avatar)
end
if background = dump_data["project_background"]
instances << p.background = Medium.new(background)
end
dump_data["workflows"].each_with_index do |workflow_attrs, index|
w = Workflow.new(workflow_attrs.merge(project: p))
w.workflow_contents << WorkflowContent.new(dump_data["workflow_contents"][index])
instances << w << w.workflow_contents.first
end
ActiveRecord::Base.transaction { instances.map(&:save!) }
puts "Imported project with ID: #{p.id} and SLUG: #{p.slug} to database."
end
end
end
end
| 40.465116 | 121 | 0.662644 |
91eaa6e9cc22889e4b10e532c89fea1b286c0cea | 3,766 | class Ling < ApplicationRecord
resourcify
include Groupable
include CSVAttributes
CSV_ATTRIBUTES = %w[ id name depth parent_id group_id creator_id ]
def self.csv_attributes
CSV_ATTRIBUTES
end
validates :name, :presence => true, :uniqueness => { :scope => :group_id }
validates :depth, :presence => true, :numericality => true
validates :parent, :presence => true, :allow_nil => true
validate :parent_depth_check
validate :group_association_match
validate :available_depth_for_group
# TODO dependent nullify parent_id on child if parent destroyed
belongs_to :parent, :class_name => "Ling", :foreign_key => "parent_id", :inverse_of => :children
has_many :children, :class_name => "Ling", :foreign_key => "parent_id", :inverse_of => :parent
has_many :examples, :dependent => :destroy
has_many :lings_properties, :dependent => :destroy
has_many :properties, :through => :lings_properties
has_many :stored_values, :as => :storable, :dependent => :destroy
include Concerns::Wheres
include Concerns::Selects
include Concerns::Orders
scope :parent_ids, -> { select("#{self.table_name}.parent_id") }
scope :with_parent_id, -> (id_or_ids) { where("#{self.table_name}.parent_id IN (:ids)", { ids: id_or_ids }) }
attr_accessor :info
def get_infos
props_in_ling
self
end
def grouped_name
group.ling_name_for_depth(self.depth || 0)
end
def add_property(value, property)
params = {:property_id => property.id, :value => value}
unless lings_properties.exists?(params)
lings_properties.create(params) do |lp|
lp.group = group
end
end
end
def add_property_sureness(value, sureness, property)
params = {:property_id => property.id, :value => value, :sureness => sureness}
unless lings_properties.exists?(params)
lings_properties.create(params) do |lp|
lp.group = group
end
end
end
def parent_depth_check
errors.add(:parent, "must be a #{group.ling0_name.humanize} object") if (depth == 1 && parent && parent.depth != 0)
end
def group_association_match
errors.add(:parent, "#{group.ling0_name.humanize} must belong to the same group as this #{self.grouped_name}") if parent && parent.group != group
end
def available_depth_for_group
errors.add(:depth, "is deeper than allowed in #{group.name}") if group && depth && group.depth_maximum < depth
end
def storable_keys
group.present? ? group.ling_storable_keys : []
end
def store_value!(key_symbol_or_string, value_string)
key = key_symbol_or_string.to_s
if curr = stored_values.with_key(key).first
curr.value = value_string
curr.save
else
StoredValue.create(:key => key.downcase, :value => value_string, :storable => self)
end
end
def stored_value(key_symbol_or_string)
key = key_symbol_or_string.to_s
if storable_keys.include? key
(record = stored_values.select{|sv| sv.key == key}.first).present? ? record.value : ""
else
nil
end
end
def as_json(options={})
super(:only => [:id, :name, :depth, :parent_id])
end
def get_valid_resource
self
end
private
def props_in_group
# look for categories at that depth
cats_at_depth = Category.in_group(group).at_depth(depth)
# sum up all props in cats
@props_total ||= Property.in_group(group).where(:category_id => cats_at_depth).count(:id)
end
def props_in_ling
# Rails.logger.debug "[DEBUG] Depth: #{depth} - #{props_in_group} & #{LingsProperty.in_group(group).where(:ling_id => self.id).count(:id)}"
@info ||= LingsProperty.in_group(group).where(:ling_id => self.id).count(:id) * 100 / (props_in_group > 0 ? props_in_group : 1)
@info = 100 if @info > 100
end
end
| 30.617886 | 149 | 0.69145 |
1832f67e4c95c34570510e3648bfad91d160a35a | 180 | Rails.application.routes.draw do
root 'people#new'
get 'adm0n' => 'people#clear', constraints: {ip: /127.0.0.1/}
resources :people, :except => [:edit, :update, :destroy]
end
| 30 | 63 | 0.666667 |
b92adcf29874803db66533632dcc1db34dd03dd2 | 135 | class CreateImportFieldMappings < ActiveRecord::Migration
def change
create_table :import_field_mappings, &:timestamps
end
end
| 22.5 | 57 | 0.807407 |
b93047a6647fa4f7f24aa836d2abf6743e9a7e7a | 988 | #
# Chef Documentation
# https://docs.chef.io/libraries.html
#
#
# This module name was auto-generated from the cookbook name. This name is a
# single word that starts with a capital letter and then continues to use
# camel-casing throughout the remainder of the name.
#
module Workstation
module Helpers
def pip_package_installed?(package_name)
cmd = Mixlib::ShellOut.new("pip list --format=columns|grep #{package_name}")
# return true if system "pip list --format=columns|grep #{package_name}"
return true if cmd.run_command
end
end
end
#
# The module you have defined may be extended within the recipe to grant the
# recipe the helper methods you define.
#
# Within your recipe you would write:
#
# extend Workstation::Helpers
#
# my_helper_method
#
# You may also add this to a single resource within a recipe:
#
# template '/etc/app.conf' do
# extend Workstation::Helpers
# variables specific_key: my_helper_method
# end
#
| 26 | 82 | 0.717611 |
bf65be8509763abca626afad22568777d110890b | 5,795 | #===============================================================================
# ** Scene Battle
#------------------------------------------------------------------------------
# Add the feature to call the Scan screen in battle
#===============================================================================
class Scene_Battle
include EBJB
#//////////////////////////////////////////////////////////////////////////
# * Public Methods
#//////////////////////////////////////////////////////////////////////////
#--------------------------------------------------------------------------
# * Alias create_info_viewport
#--------------------------------------------------------------------------
alias create_info_viewport_ebjb create_info_viewport unless $@
def create_info_viewport
create_info_viewport_ebjb
@scan_info_window = Window_Info_Help.new(BESTIARY_CONFIG::HELP_WINDOW_X,
BESTIARY_CONFIG::HELP_WINDOW_Y,
BESTIARY_CONFIG::HELP_WINDOW_W,
BESTIARY_CONFIG::HELP_WINDOW_H,
Vocab::bestiary_help_text)
@scan_info_window.cText.align = 1
# Refresh for the text alignment
@scan_info_window.refresh()
@scan_info_window.visible = false
end
#--------------------------------------------------------------------------
# * Alias start
#--------------------------------------------------------------------------
alias start_ebjb start unless $@
def start
start_ebjb
@subSceneScan = Sub_Scene_Scan.new
@subSceneScan.start()
end
#--------------------------------------------------------------------------
# * Alias terminate
#--------------------------------------------------------------------------
alias terminate_ebjb terminate unless $@
def terminate
@scan_info_window.dispose if @scan_info_window != nil
@subSceneScan.terminate if @subSceneScan != nil
terminate_ebjb
end
#--------------------------------------------------------------------------
# * Alias update_target_enemy_selection
#--------------------------------------------------------------------------
alias update_target_enemy_selection_ebjb update_target_enemy_selection unless $@
def update_target_enemy_selection
#------------------------------------------
# If the Target Window is Active
#------------------------------------------
if @target_enemy_window.active
# If already scanned or at least 1 defeated to be able to show scan information
if $game_party.monsters_scanned.include?(@target_enemy_window.enemy.enemy_id) ||
@target_enemy_window.enemy.defeated > 0
if BESTIARY_CONFIG::HELP_WINDOW_ON
# Updates the Z-index to be sure it is over the target window
@scan_info_window.z = @target_enemy_window.z + 1
@scan_info_window.visible = true
end
if Input.trigger?(BESTIARY_CONFIG::ENEMY_SCAN_BUTTON)
Sound.play_decision
@subSceneScan.windows_update(@target_enemy_window.enemy)
@subSceneScan.update_windows_zindex(@target_enemy_window.z)
@subSceneScan.show_windows()
@scan_info_window.visible = false
@target_enemy_window.active = false
elsif Input.trigger?(Input::B)
@scan_info_window.visible = false
elsif Input.trigger?(Input::C)
@scan_info_window.visible = false
end
end
update_target_enemy_selection_ebjb
#------------------------------------------
# If the Scan Window is Active
#------------------------------------------
else
@subSceneScan.update
if @subSceneScan.isClosable
quit_command()
end
end
end
#--------------------------------------------------------------------------
# * Show Action Results
# target : Target
# obj : Skill or item
#--------------------------------------------------------------------------
def display_action_effects(target, obj = nil)
unless target.skipped
line_number = @message_window.line_number
wait(5)
display_critical(target, obj)
display_damage(target, obj)
display_state_changes(target, obj)
display_scan(target, obj)
if line_number == @message_window.line_number
display_failure(target, obj) unless target.states_active?
end
if line_number != @message_window.line_number
wait(30)
end
@message_window.back_to(line_number)
end
end
#--------------------------------------------------------------------------
# * Show Scan
# target : Target
# obj : Skill or item
#--------------------------------------------------------------------------
def display_scan(target, obj = nil)
return if obj == nil
if (BESTIARY_CONFIG::SCAN_SKILLS_ID.include?(obj.id) ||
BESTIARY_CONFIG::SCAN_ITEMS_ID.include?(obj.id))
text = sprintf(Vocab::bestiary_scan_text_1, target.name)
@message_window.add_instant_text(text)
wait(60*BESTIARY_CONFIG::SCAN_MSG_TIMEOUT)
@message_window.add_instant_text(Vocab::bestiary_scan_text_2)
wait(30*BESTIARY_CONFIG::SCAN_MSG_2_TIMEOUT)
end
end
#//////////////////////////////////////////////////////////////////////////
# * Scene Commands
#//////////////////////////////////////////////////////////////////////////
#--------------------------------------------------------------------------
# * Quit command
#--------------------------------------------------------------------------
def quit_command()
@subSceneScan.hide_windows()
@target_enemy_window.active = true
end
private :quit_command
end
| 36.677215 | 86 | 0.456946 |
797f22a758488d1a4e8d1693fd6a2b04f19910b6 | 6,195 | =begin rdoc
= Property Registry
This module allows model classes to register properties that have setter and getter methods,
and are earmarked to be included in calls between instances of those classes and the API.
Properties are registered using a DataMapper-style syntax (ActiveRecord-style autodiscovery not
being possible).
=end
module Videojuicer
module Resource
module PropertyRegistry
def self.included(base)
# Class-level inheritable reader
base.extend(SingletonMethods)
base.class_eval do
@attributes = {}
class << self
attr_accessor :attributes
end
end
base.property :id, Integer
end
# Allow subclasses of each resource to get the attributes accessor
def self.inherited(subclass)
v = "@attributes"
subclass.instance_variable_set(v, instance_variable_get(v))
end
def initialize(attrs={})
set_default_attributes
self.attributes = attrs
end
def attributes
@attributes ||= {}
@attributes
end
def attributes=(arg)
raise ArgumentError, "Attributes must be set as a Hash" unless arg.is_a?(Hash)
arg.each do |key, value|
#set any attributes, ignoring all those that are invalid
self.send("#{key}=", value) rescue invalid_attributes[key] = value
end
end
# Sets the attributes to their default values, marking only those values with defaults as being dirty.
def set_default_attributes
self.attributes = default_attributes
self.attributes.each do |key, value|
# Scrub the attributes if there's no value
attr_clean!(key) unless value
end
end
def default_attributes
d = {}
self.class.attributes.each do |key, props|
d[key] = props[:default] || nil
end
return d
end
# Returns the hash of currently-dirty attributes complete with values
def dirty_attributes
o = {}
@dirty_attribute_keys ||= []
@dirty_attribute_keys.each do |key|
o[key] = attr_get(key)
end
o
end
# Clears the array of dirty attribute keys
def clean_dirty_attributes!
@dirty_attribute_keys = []
end
# Returns an array of keys for attributes that are currently dirty
def dirty_attribute_keys
@dirty_attribute_keys ||= []
@dirty_attribute_keys
end
# Returns true if the specified attribute is currently dirty
def attr_dirty?(key)
@dirty_attribute_keys ||= []
@dirty_attribute_keys.include?(key.to_sym)
end
# Mark the specified attribute as dirty.
def attr_dirty!(key)
@dirty_attribute_keys ||= []
@dirty_attribute_keys << key.to_sym
@dirty_attribute_keys.uniq!
end
# Mark the specified attribute as no longer being dirty.
def attr_clean!(key)
@dirty_attribute_keys ||= []
@dirty_attribute_keys.delete_if {|k| k == key.to_sym }
end
def attr_get(key)
key = key.to_sym
attributes[key]
end
def attr_set(key, value)
key = key.to_sym
attr_dirty!(key)
attributes[key] = coerce_value(key, value)
end
# Takes what is normally a string and coerces it into the correct object type for the
# attribute's actual type.
def coerce_value(key, value)
return value unless value
klass = self.class.attributes[key][:class]
if value.is_a?(String) and !value.empty?
# In-built types
if klass.kind_of?(Videojuicer::Resource::Types::Base)
return klass.new(value).dump
end
# Dates
if klass.respond_to?(:parse)
return klass.parse(value) rescue raise "Invalid date: #{value.inspect}"
end
elsif value.is_a? Hash and value.any?
if klass == DateTime
if value.is_a?(Hash)
year = value[:year]
month = value[:month]
day = value[:day]
hour = value[:hour] or "00"
minute = value[:minute] or "00"
value = klass.parse("#{year}-#{month}-#{day}T#{hour}:#{minute}:00+00:00")
else
raise ArgumentError, "Please supply a DateTime, Hash keyed w/ [:day, :month, :year, :hour, :minute] or a String that can be coerced into a date"
end
end
end
return value
end
# Returns a hash of the attributes for this object, minus the
# private attributes that should not be included in the response.
def returnable_attributes
attrs = dirty_attributes.dup
self.class.attributes.select {|name, props| props[:writer] != :public}.each do |name, props|
attrs.delete name
end
attrs.delete(:id)
attrs
end
def invalid_attributes
@invalid_attributes ||= {}
end
module SingletonMethods
# Registers an attribute using a datamapper-style syntax.
# Creates setter and getter methods
def property(prop_name, klass, options={})
# Can't raise twice.
prop_name = prop_name.to_sym
raise ArgumentError, "Property #{prop_name} already registered." if self.attributes.include?(prop_name)
options = {:class=>klass, :writer=>:public}.merge(options)
# Register with the class
self.attributes[prop_name] = options
# Create setter methods
define_method prop_name do
attr_get(prop_name)
end
private if options[:writer] == :private
protected if options[:writer] == :protected
define_method "#{prop_name}=" do |arg|
attr_set(prop_name, arg)
end
public
end
end
end
end
end | 30.975 | 158 | 0.580468 |
91d3648d1d0333c298550d0bcc7b285c0009b17b | 746 | cask 'extraterm' do
version '0.49.0'
sha256 '66ec873fcf3530935bbbff76dbd655d85fc765e448e41fe20d6a4f708ca4d5bc'
# github.com/sedwards2009/extraterm/ was verified as official when first introduced to the cask
url "https://github.com/sedwards2009/extraterm/releases/download/v#{version}/extraterm-#{version}-darwin-x64.zip"
appcast 'https://github.com/sedwards2009/extraterm/releases.atom'
name 'extraterm'
homepage 'https://extraterm.org/'
app "extraterm-#{version}-darwin-x64/extraterm.app"
zap trash: [
'~/Library/Application Support/extraterm',
'~/Library/Preferences/com.electron.extraterm.helper.plist',
'~/Library/Preferences/com.electron.extraterm.plist',
]
end
| 39.263158 | 115 | 0.715818 |
d556813254433b128adc118f5656d372d263d0ab | 177 | json.array!(@orders) do |order|
json.extract! order, :id, :name, :address, :phone, :ship_cost, :total_cost, :discount, :food_id
json.url order_url(order, format: :json)
end
| 35.4 | 97 | 0.700565 |
f849617299a641b8e8b2c8d0081b19fd4e97b354 | 2,831 | class SearchNotesQueries
SCOPE_ALL_STUDENTS = 'SCOPE_ALL_STUDENTS'
SCOPE_FEED_STUDENTS = 'SCOPE_FEED_STUDENTS'
SCOPE_MY_NOTES_ONLY = 'SCOPE_MY_NOTES_ONLY'
MAX_YEARS_BACK = 4
MAX_LIMIT = 100
def self.clamped_with_defaults(query = {})
query.merge({
scope_key: query[:scope_key] || SearchNotesQueries::SCOPE_ALL_STUDENTS,
start_time: [query[:start_time], Time.now - MAX_YEARS_BACK.years].max, # clamp
limit: [query[:limit], MAX_LIMIT].min # clamp
})
end
def initialize(educator)
@educator = educator
end
def query(passed_query = {})
clamped_query = SearchNotesQueries.clamped_with_defaults(passed_query)
# query for both the total number of records, and then limit
# what is actually returned.
#
# by deferring the `limit` until after the authorization scoping,
# this fetches much more data than would be fetched otherwise,
# and this is a good place to optimize if we need to
query_scope = authorized_query_scope(clamped_query)
all_results_size = query_scope.size
event_notes = query_scope.first(clamped_query[:limit])
# serialize, and return both actual data and metadata about
# total other records
event_note_cards_json = event_notes.map {|event_note| FeedCard.event_note_card(event_note) }
[event_note_cards_json, all_results_size, clamped_query]
end
private
def authorized_query_scope(clamped_query)
start_time, end_time, scope_key, text, grade, house, event_note_type_id = clamped_query.values_at(*[
:start_time,
:end_time,
:scope_key,
:text,
:grade,
:house,
:event_note_type_id
])
authorizer = Authorizer.new(@educator)
authorizer.authorized do
qs = EventNote.all
.where(is_restricted: false)
.where('recorded_at > ?', start_time)
.where('recorded_at < ?', end_time)
.order(recorded_at: :desc)
.includes(:educator, student: [:homeroom, :school])
# query params
qs = qs.joins(:student).where(students: {grade: grade}) if grade.present?
qs = qs.joins(:student).where(students: {house: house}) if house.present?
qs = qs.where(event_note_type_id: event_note_type_id) if event_note_type_id.present?
qs = qs.where('to_tsvector(text) @@ plainto_tsquery(?)', text) if text.present?
# adjust scope
qs = qs.where(educator_id: @educator.id) if scope_key == SCOPE_MY_NOTES_ONLY
qs = qs.where(student_id: feed_student_ids(qs)) if scope_key == SCOPE_FEED_STUDENTS
qs
end
end
# causes more queries
def feed_student_ids(qs)
note_student_ids = qs.pluck(:student_id).uniq
note_students = Student.where(id: note_student_ids)
feed_students = FeedFilter.new(educator).filter_for_educator(note_students)
feed_students.pluck(:id)
end
end
| 34.108434 | 104 | 0.703992 |
62c43d3a95f65e68703cddd414e1956ebaac3a7d | 4,306 | # This file contains support for the now-deprecated +config+ method that the engines
# plugin provided before version 1.2. Instead of using this, plugin authors are
# now encouraged to create their own Module configuration mechanisms; the
# +mattr_accessor+ mechanism provided by ActiveSupport is ideal for this:
#
# module MyPlugin
# mattr_accessor :config_value
# self.config_value = "default"
# end
#
# == Using the deprecated config method
#
# If you require the config method to be present, change your <tt>environment.rb</tt>
# file such that the very top of the file looks like this:
#
# require File.join(File.dirname(__FILE__), 'boot')
# require File.join(RAILS_ROOT, "vendor", "plugins", "engines",
# "lib", "engines", "deprecated_config_support")
#
# Adds the +config+ and +default_constant+ methods to Module.
#
# *IMPORTANT NOTE* - these methods are deprecated. Only use them when you have no
# other choice. See link:files/lib/engines/deprecated_config_support_rb.html for more
# information.
class Module
# Defines a constant within a module/class ONLY if that constant does
# not already exist.
#
# This can be used to implement defaults in plugins/engines/libraries, e.g.
# if a plugin module exists:
# module MyPlugin
# default_constant :MyDefault, "the_default_value"
# end
#
# then developers can override this default by defining that constant at
# some point *before* the module/plugin gets loaded (such as environment.rb)
def default_constant(name, value)
if !(name.is_a?(String) or name.is_a?(Symbol))
raise "Cannot use a #{name.class.name} ['#{name}'] object as a constant name"
end
if !self.const_defined?(name)
self.class_eval("#{name} = #{value.inspect}")
end
end
# A mechanism for defining configuration of Modules. With this
# mechanism, default values for configuration can be provided within shareable
# code, and the end user can customise the configuration without having to
# provide all values.
#
# Example:
#
# module MyModule
# config :param_one, "some value"
# config :param_two, 12345
# end
#
# Those values can now be accessed by the following method
#
# MyModule.config :param_one
# => "some value"
# MyModule.config :param_two
# => 12345
#
# ... or, if you have overrriden the method 'config'
#
# MyModule::CONFIG[:param_one]
# => "some value"
# MyModule::CONFIG[:param_two]
# => 12345
#
# Once a value is stored in the configuration, it will not be altered
# by subsequent assignments, unless a special flag is given:
#
# (later on in your code, most likely in another file)
# module MyModule
# config :param_one, "another value"
# config :param_two, 98765, :force
# end
#
# The configuration is now:
#
# MyModule.config :param_one
# => "some value" # not changed
# MyModule.config :param_two
# => 98765
#
# Configuration values can also be given as a Hash:
#
# MyModule.config :param1 => 'value1', :param2 => 'value2'
#
# Setting of these values can also be forced:
#
# MyModule.config :param1 => 'value3', :param2 => 'value4', :force => true
#
# A value of anything other than false or nil given for the :force key will
# result in the new values *always* being set.
def config(*args)
raise "config expects at least one argument" if args.empty?
# extract the arguments
if args[0].is_a?(Hash)
override = args[0][:force]
args[0].delete(:force)
args[0].each { |key, value| _handle_config(key, value, override)}
else
_handle_config(*args)
end
end
private
# Actually set the config values
def _handle_config(name, value=nil, override=false)
if !self.const_defined?("CONFIG")
self.class_eval("CONFIG = {}")
end
if value != nil
if override or self::CONFIG[name] == nil
self::CONFIG[name] = value
end
else
# if we pass an array of config keys to config(),
# get the array of values back
if name.is_a? Array
name.map { |c| self::CONFIG[c] }
else
self::CONFIG[name]
end
end
end
end | 31.896296 | 85 | 0.653275 |
080c10568dd7dec6e145506ef20f7c74522d5f1b | 2,929 | RSpec::Support.require_rspec_core "formatters/base_formatter"
require 'json'
module RSpec
module Core
module Formatters
# @private
class JsonFormatter < BaseFormatter
Formatters.register self, :message, :dump_summary, :dump_profile, :stop, :close
attr_reader :output_hash
def initialize(output)
super
@output_hash = {}
end
def message(notification)
(@output_hash[:messages] ||= []) << notification.message
end
def dump_summary(summary)
@output_hash[:summary] = {
:duration => summary.duration,
:example_count => summary.example_count,
:failure_count => summary.failure_count,
:pending_count => summary.pending_count
}
@output_hash[:summary_line] = summary.totals_line
end
def stop(notification)
@output_hash[:examples] = notification.examples.map do |example|
format_example(example).tap do |hash|
e = example.exception
if e
hash[:exception] = {
:class => e.class.name,
:message => e.message,
:backtrace => e.backtrace,
}
end
end
end
end
def close(_notification)
output.write @output_hash.to_json
output.close if IO === output && output != $stdout
end
def dump_profile(profile)
@output_hash[:profile] = {}
dump_profile_slowest_examples(profile)
dump_profile_slowest_example_groups(profile)
end
# @api private
def dump_profile_slowest_examples(profile)
@output_hash[:profile] = {}
sorted_examples = profile.slowest_examples
@output_hash[:profile][:examples] = sorted_examples.map do |example|
format_example(example).tap do |hash|
hash[:run_time] = example.execution_result.run_time
end
end
@output_hash[:profile][:slowest] = profile.slow_duration
@output_hash[:profile][:total] = profile.duration
end
# @api private
def dump_profile_slowest_example_groups(profile)
@output_hash[:profile] ||= {}
@output_hash[:profile][:groups] = profile.slowest_groups.map do |loc, hash|
hash.update(:location => loc)
end
end
private
def format_example(example)
{
:description => example.description,
:full_description => example.full_description,
:status => example.execution_result.status.to_s,
:file_path => example.metadata[:file_path],
:line_number => example.metadata[:line_number],
:run_time => example.execution_result.run_time
}
end
end
end
end
end
| 30.831579 | 87 | 0.571526 |
62bdbcad97cd68c7acb53d8a3eb4aceb781d5f19 | 8,298 | # Copyright (c) 2016, 2020, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
require 'date'
require 'logger'
# rubocop:disable Lint/UnneededCopDisableDirective, Metrics/LineLength
module OCI
# A resource that is created or operated on by an asynchronous operation that is tracked by a work request.
#
class ApplicationMigration::Models::WorkRequestResource
ACTION_TYPE_ENUM = [
ACTION_TYPE_CREATED = 'CREATED'.freeze,
ACTION_TYPE_UPDATED = 'UPDATED'.freeze,
ACTION_TYPE_DELETED = 'DELETED'.freeze,
ACTION_TYPE_RELATED = 'RELATED'.freeze,
ACTION_TYPE_IN_PROGRESS = 'IN_PROGRESS'.freeze,
ACTION_TYPE_UNKNOWN_ENUM_VALUE = 'UNKNOWN_ENUM_VALUE'.freeze
].freeze
# **[Required]** The way in which this resource was affected by the operation that spawned the work request.
#
# @return [String]
attr_reader :action_type
# **[Required]** The resource type the work request affects.
# @return [String]
attr_accessor :entity_type
# **[Required]** An [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) or other unique identifier for the resource.
#
# @return [String]
attr_accessor :identifier
# The URI path that you can use for a GET request to access the resource metadata.
# @return [String]
attr_accessor :entity_uri
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
# rubocop:disable Style/SymbolLiteral
'action_type': :'actionType',
'entity_type': :'entityType',
'identifier': :'identifier',
'entity_uri': :'entityUri'
# rubocop:enable Style/SymbolLiteral
}
end
# Attribute type mapping.
def self.swagger_types
{
# rubocop:disable Style/SymbolLiteral
'action_type': :'String',
'entity_type': :'String',
'identifier': :'String',
'entity_uri': :'String'
# rubocop:enable Style/SymbolLiteral
}
end
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:disable Metrics/MethodLength, Layout/EmptyLines, Style/SymbolLiteral
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
# @option attributes [String] :action_type The value to assign to the {#action_type} property
# @option attributes [String] :entity_type The value to assign to the {#entity_type} property
# @option attributes [String] :identifier The value to assign to the {#identifier} property
# @option attributes [String] :entity_uri The value to assign to the {#entity_uri} property
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
self.action_type = attributes[:'actionType'] if attributes[:'actionType']
raise 'You cannot provide both :actionType and :action_type' if attributes.key?(:'actionType') && attributes.key?(:'action_type')
self.action_type = attributes[:'action_type'] if attributes[:'action_type']
self.entity_type = attributes[:'entityType'] if attributes[:'entityType']
raise 'You cannot provide both :entityType and :entity_type' if attributes.key?(:'entityType') && attributes.key?(:'entity_type')
self.entity_type = attributes[:'entity_type'] if attributes[:'entity_type']
self.identifier = attributes[:'identifier'] if attributes[:'identifier']
self.entity_uri = attributes[:'entityUri'] if attributes[:'entityUri']
raise 'You cannot provide both :entityUri and :entity_uri' if attributes.key?(:'entityUri') && attributes.key?(:'entity_uri')
self.entity_uri = attributes[:'entity_uri'] if attributes[:'entity_uri']
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:enable Metrics/MethodLength, Layout/EmptyLines, Style/SymbolLiteral
# Custom attribute writer method checking allowed values (enum).
# @param [Object] action_type Object to be assigned
def action_type=(action_type)
# rubocop:disable Style/ConditionalAssignment
if action_type && !ACTION_TYPE_ENUM.include?(action_type)
OCI.logger.debug("Unknown value for 'action_type' [" + action_type + "]. Mapping to 'ACTION_TYPE_UNKNOWN_ENUM_VALUE'") if OCI.logger
@action_type = ACTION_TYPE_UNKNOWN_ENUM_VALUE
else
@action_type = action_type
end
# rubocop:enable Style/ConditionalAssignment
end
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity, Layout/EmptyLines
# Checks equality by comparing each attribute.
# @param [Object] other the other object to be compared
def ==(other)
return true if equal?(other)
self.class == other.class &&
action_type == other.action_type &&
entity_type == other.entity_type &&
identifier == other.identifier &&
entity_uri == other.entity_uri
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity, Layout/EmptyLines
# @see the `==` method
# @param [Object] other the other object to be compared
def eql?(other)
self == other
end
# rubocop:disable Metrics/AbcSize, Layout/EmptyLines
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[action_type, entity_type, identifier, entity_uri].hash
end
# rubocop:enable Metrics/AbcSize, Layout/EmptyLines
# rubocop:disable Metrics/AbcSize, Layout/EmptyLines
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /^Array<(.*)>/i
# check to ensure the input is an array given that the the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
public_method("#{key}=").call(
attributes[self.class.attribute_map[key]]
.map { |v| OCI::Internal::Util.convert_to_type(Regexp.last_match(1), v) }
)
end
elsif !attributes[self.class.attribute_map[key]].nil?
public_method("#{key}=").call(
OCI::Internal::Util.convert_to_type(type, attributes[self.class.attribute_map[key]])
)
end
# or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# rubocop:enable Metrics/AbcSize, Layout/EmptyLines
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = public_method(attr).call
next if value.nil? && !instance_variable_defined?("@#{attr}")
hash[param] = _to_hash(value)
end
hash
end
private
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
# rubocop:enable Lint/UnneededCopDisableDirective, Metrics/LineLength
| 37.890411 | 245 | 0.680405 |
87a9d7364ccadb514bd2bec73462c81b20381015 | 1,632 | require 'spec_helper_acceptance'
describe 'basic ec2api' do
context 'default parameters' do
it 'should work with no errors' do
pp= <<-EOS
include ::openstack_integration
include ::openstack_integration::repos
include ::openstack_integration::rabbitmq
include ::openstack_integration::mysql
include ::openstack_integration::keystone
# Ec2api resources
class { '::ec2api::keystone::auth':
password => 'a_big_secret',
}
class { '::ec2api::db::mysql':
password => 'a_big_secret',
}
case $::osfamily {
'Debian': {
warning('Ec2api is not yet packaged on Ubuntu systems.')
}
'RedHat': {
class { '::ec2api::db':
database_connection => 'mysql://ec2api:[email protected]/ec2api?charset=utf8',
}
class { '::ec2api::logging':
debug => true,
}
class { '::ec2api': }
class { '::ec2api::keystone::authtoken':
password => 'a_big_secret',
}
class { '::ec2api::api': }
include ::ec2api::metadata
}
default: {
fail("Unsupported osfamily (${::osfamily})")
}
}
EOS
# Run it twice to test for idempotency
apply_manifest(pp, :catch_failures => true)
apply_manifest(pp, :catch_changes => true)
end
if os[:family].casecmp('RedHat') == 0
describe port(8788) do
it { is_expected.to be_listening }
end
describe port(8789) do
it { is_expected.to be_listening }
end
end
end
end
| 26.322581 | 95 | 0.555147 |
5d6209c93df7d6bee04373010198b12d1d2ba968 | 2,547 | require 'leveldb/leveldb' # the c extension
module LevelDB
class DB
include Enumerable
class << self
## Loads or creates a LevelDB database as necessary, stored on disk at
## +pathname+.
##
## See #make for possible options.
def new pathname, options={}
make path_string(pathname),
options.merge(:create_if_missing => true,
:error_if_exists => false)
end
## Creates a new LevelDB database stored on disk at +pathname+. Throws an
## exception if the database already exists.
##
## See #make for possible options.
def create pathname, options={}
make path_string(pathname),
options.merge(:create_if_missing => true,
:error_if_exists => true)
end
## Loads a LevelDB database stored on disk at +pathname+. Throws an
## exception unless the database already exists.
def load pathname
make path_string(pathname),
{ :create_if_missing => false, :error_if_exists => false }
end
private
## Coerces the argument into a String for use as a filename/-path
def path_string pathname
File.respond_to?(:path) ? File.path(pathname) : pathname.to_str
end
end
attr_reader :pathname
attr_reader :options
alias :includes? :exists?
alias :contains? :exists?
alias :member? :exists?
alias :[] :get
alias :[]= :put
alias :close! :close
def each(*args, &block)
i = iterator(*args)
i.each(&block) if block
i
end
def iterator(*args); Iterator.new self, *args end
def keys; map { |k, v| k } end
def values; map { |k, v| v } end
def inspect
%(<#{self.class} #{@pathname.inspect}>)
end
end
class Iterator
include Enumerable
attr_reader :db, :from, :to
def self.new(db, opts={})
make db, opts
end
def reversed?; @reversed end
def inspect
%(<#{self.class} #{@db.inspect} @from=#{@from.inspect} @to=#{@to.inspect}#{' (reversed)' if @reversed}>)
end
end
class WriteBatch
class << self
private :new
end
end
class Options
DEFAULT_MAX_OPEN_FILES = 1000
DEFAULT_WRITE_BUFFER_SIZE = 4 * 1024 * 1024
DEFAULT_BLOCK_SIZE = 4 * 1024
DEFAULT_BLOCK_RESTART_INTERVAL = 16
DEFAULT_COMPRESSION = LevelDB::CompressionType::SnappyCompression
attr_reader :create_if_missing, :error_if_exists,
:block_cache_size, :paranoid_checks,
:write_buffer_size, :max_open_files,
:block_size, :block_restart_interval,
:compression
end
end # module LevelDB
| 24.728155 | 108 | 0.64625 |
7a9098603d06c640cc909a3c28f5283ba79f029a | 1,471 | module Ru
class Array
def initialize(array)
@data = array.to_a
end
def each_line
Ru::Iterator.new(self)
end
def files
@data.map! do |line|
Ru::File.new(line)
end
self
end
def format(format='l')
@data.map! do |item|
item.format(format)
end
self
end
def grep(pattern)
if pattern.kind_of?(String)
pattern = Regexp.new(pattern)
end
select! do |item|
item.to_s =~ pattern
end
self
end
def map(method=nil, *args, &block)
if method.nil? && !block_given?
to_a.map
elsif method.nil?
array = to_a.map(&block)
self.class.new(array)
else
array = to_a.map { |item| item.send(method, *args) }
self.class.new(array)
end
end
def select(*args, &block)
delegate_to_array(:select, *args, &block)
end
def to_a
@data
end
def to_ary
to_a
end
def to_s
self.to_a.join("\n")
end
def to_self
self
end
def ==(other)
self.to_a == other.to_a
end
def method_missing(method, *args, &block)
delegate_to_array(method, *args, &block)
end
private
def delegate_to_array(method, *args, &block)
result = to_a.send(method, *args, &block)
if result.kind_of?(Enumerable)
self.class.new(result)
else
result
end
end
end
end
| 16.908046 | 60 | 0.542488 |
083c6d5992232a4d0a6bd6b0f644af32f9ce110e | 742 | module Events
module CardEvents
class AnEvilEmpire < Instruction
def action
instructions = []
instructions << Instructions::AwardVictoryPoints.new(
player: US,
amount: 1
)
instructions << Instructions::CancelEffect.new(
card_ref: "FlowerPower"
)
instructions << Instructions::PreventPlayOfEvent.new(
card_ref: "FlowerPower",
reason: "An Evil Empire is in effect."
)
instructions << Instructions::PlaceInEffect.new(
card_ref: "AnEvilEmpire"
)
instructions << Instructions::Remove.new(
card_ref: "AnEvilEmpire"
)
instructions
end
end
end
end
| 19.526316 | 61 | 0.571429 |
1a3044c2e6c190ea2983c81bce705c7e3cba85c4 | 1,625 | #: * `unpack` [`--git`|`--patch`] [`--destdir=`<path>] <formulae>:
#: Unpack the source files for <formulae> into subdirectories of the current
#: working directory. If `--destdir=`<path> is given, the subdirectories will
#: be created in the directory named by `<path>` instead.
#:
#: If `--patch` is passed, patches for <formulae> will be applied to the
#: unpacked source.
#:
#: If `--git` is passed, a Git repository will be initalized in the unpacked
#: source. This is useful for creating patches for the software.
require "stringio"
require "formula"
module Homebrew
def unpack
formulae = ARGV.formulae
raise FormulaUnspecifiedError if formulae.empty?
if dir = ARGV.value("destdir")
unpack_dir = Pathname.new(dir).expand_path
unpack_dir.mkpath
else
unpack_dir = Pathname.pwd
end
raise "Cannot write to #{unpack_dir}" unless unpack_dir.writable_real?
formulae.each do |f|
stage_dir = unpack_dir.join("#{f.name}-#{f.version}")
if stage_dir.exist?
raise "Destination #{stage_dir} already exists!" unless ARGV.force?
rm_rf stage_dir
end
oh1 "Unpacking #{f.full_name} to: #{stage_dir}"
ENV["VERBOSE"] = "1" # show messages about tar
f.brew do
f.patch if ARGV.flag?("--patch")
cp_r getwd, stage_dir, preserve: true
end
ENV["VERBOSE"] = nil
next unless ARGV.git?
ohai "Setting up git repository"
cd stage_dir
system "git", "init", "-q"
system "git", "add", "-A"
system "git", "commit", "-q", "-m", "brew-unpack"
end
end
end
| 29.545455 | 80 | 0.632 |
acde932a19000099bb82330d3326ac7ebdaddbe1 | 5,083 | require 'uri'
require 'hashr'
require 'travis/config'
require 'core_ext/string/to_bool'
module Travis
module Build
class Config < Travis::Config
extend Hashr::Env
self.env_namespace = 'travis_build'
def ghc_version_aliases_hash
@ghc_version_aliases_hash ||= version_aliases_hash('ghc')
end
def sc_data
@sc_data ||= JSON.parse(
Travis::Build.top.join('tmp/sc_data.json').read.output_safe
)
end
define(
api_token: ENV.fetch(
'TRAVIS_BUILD_API_TOKEN', ENV.fetch('API_TOKEN', '')
),
app_host: ENV.fetch('TRAVIS_APP_HOST', ''),
apt_mirrors: {
ec2: ENV.fetch(
'TRAVIS_BUILD_APT_MIRRORS_EC2',
'http://us-east-1.ec2.archive.ubuntu.com/ubuntu/'
),
gce: ENV.fetch(
'TRAVIS_BUILD_APT_MIRRORS_GCE',
'http://us-central1.gce.archive.ubuntu.com/ubuntu/'
),
packet: ENV.fetch(
'TRAVIS_BUILD_APT_MIRRORS_PACKET',
'http://archive.ubuntu.com/ubuntu/'
),
unknown: ENV.fetch(
'TRAVIS_BUILD_APT_MIRRORS_UNKNOWN',
'http://archive.ubuntu.com/ubuntu/'
)
},
apt_package_safelist: {
precise: ENV.fetch('TRAVIS_BUILD_APT_PACKAGE_SAFELIST_PRECISE', ''),
trusty: ENV.fetch('TRAVIS_BUILD_APT_PACKAGE_SAFELIST_TRUSTY', ''),
xenial: ENV.fetch('TRAVIS_BUILD_APT_PACKAGE_SAFELIST_XENIAL', ''),
},
apt_proxy: ENV.fetch('TRAVIS_BUILD_APT_PROXY', ''),
apt_source_safelist: {
precise: ENV.fetch('TRAVIS_BUILD_APT_SOURCE_SAFELIST_PRECISE', ''),
trusty: ENV.fetch('TRAVIS_BUILD_APT_SOURCE_SAFELIST_TRUSTY', ''),
xenial: ENV.fetch('TRAVIS_BUILD_APT_SOURCE_SAFELIST_XENIAL', ''),
},
apt_source_safelist_key_url_template: ENV.fetch(
'TRAVIS_BUILD_APT_SOURCE_SAFELIST_KEY_URL_TEMPLATE',
'https://%{app_host}/files/gpg/%{source_alias}.asc'
),
apt_safelist_skip: ENV.fetch('TRAVIS_BUILD_APT_SAFELIST_SKIP', '').to_bool,
auth_disabled: ENV.fetch('TRAVIS_BUILD_AUTH_DISABLED', '').to_bool,
cabal_default: ENV.fetch('TRAVIS_BUILD_CABAL_DEFAULT', '2.0'),
enable_debug_tools: ENV.fetch(
'TRAVIS_BUILD_ENABLE_DEBUG_TOOLS',
ENV.fetch('TRAVIS_ENABLE_DEBUG_TOOLS', '')
),
enable_infra_detection: ENV.fetch(
'TRAVIS_BUILD_ENABLE_INFRA_DETECTION', ''
).to_bool,
etc_hosts_pinning: ENV.fetch(
'TRAVIS_BUILD_ETC_HOSTS_PINNING', ENV.fetch('ETC_HOSTS_PINNING', '')
),
ghc_default: ENV.fetch('TRAVIS_BUILD_GHC_DEFAULT', '7.10.3'),
gimme: {
url: ENV.fetch(
'TRAVIS_BUILD_GIMME_URL',
'https://raw.githubusercontent.com/travis-ci/gimme/v1.3.0/gimme'
)
},
go_version: ENV.fetch('TRAVIS_BUILD_GO_VERSION', '1.10.x'),
internal_ruby_regex: ENV.fetch(
'TRAVIS_BUILD_INTERNAL_RUBY_REGEX',
'^ruby-(2\.[0-4]\.[0-9]|1\.9\.3)'
),
librato: {
email: ENV.fetch(
'TRAVIS_BUILD_LIBRATO_EMAIL', ENV.fetch('LIBRATO_EMAIL', '')
),
source: ENV.fetch(
'TRAVIS_BUILD_LIBRATO_SOURCE', ENV.fetch('LIBRATO_SOURCE', '')
),
token: ENV.fetch(
'TRAVIS_BUILD_LIBRATO_TOKEN', ENV.fetch('LIBRATO_TOKEN', '')
),
},
network: {
wait_retries: Integer(ENV.fetch(
'TRAVIS_BUILD_NETWORK_WAIT_RETRIES',
ENV.fetch('NETWORK_WAIT_RETRIES', '20')
)),
check_urls: ENV.fetch(
'TRAVIS_BUILD_NETWORK_CHECK_URLS',
ENV.fetch(
'NETWORK_CHECK_URLS',
'http://%{app_host}/empty.txt?job_id=%{job_id}&repo=%{repo}'
)
).split(',').map { |s| URI.unescape(s.strip) }
},
sentry_dsn: ENV.fetch(
'TRAVIS_BUILD_SENTRY_DSN', ENV.fetch('SENTRY_DSN', '')
),
tainted_node_logging_enabled: false,
update_glibc: ENV.fetch(
'TRAVIS_BUILD_UPDATE_GLIBC',
ENV.fetch('TRAVIS_UPDATE_GLIBC', ENV.fetch('UPDATE_GLIBC', 'false'))
).to_bool,
windows_langs: ENV.fetch(
'TRAVIS_WINDOWS_LANGS',
%w(
bash
csharp
go
node_js
powershell
rust
script
sh
shell
).join(",")
).split(/,/),
dump_backtrace: ENV.fetch(
'TRAVIS_BUILD_DUMP_BACKTRACE', ENV.fetch('DUMP_BACKTRACE', 'false')
).to_bool
)
default(
access: %i(key),
)
private
def version_aliases_hash(name)
JSON.parse(
File.read(
File.expand_path(
"../../../../public/version-aliases/#{name}.json",
__FILE__
)
).output_safe
)
end
end
end
end
| 32.375796 | 83 | 0.560299 |
b9e718cda0305e50ad5fbd864118ce9f0e4ca5c9 | 245 | class UserMailer < ApplicationMailer
def account_activation(user)
@user = user
mail to: user.email, subject: "Account activation"
end
def password_reset(user)
@user = user
mail to: user.email, subject: "Password reset"
end
end
| 18.846154 | 54 | 0.722449 |
6a7ebc83e7919d8dfa38eb45f158b474aa6a1a89 | 302 | class SwitchHostPathIndexToCanonicalPath < ActiveRecord::Migration
def up
add_index :host_paths, :canonical_path
remove_index :host_paths, column: [:c14n_path_hash]
end
def down
add_index :host_paths, :c14n_path_hash
remove_index :host_paths, column: [:canonical_path]
end
end
| 25.166667 | 66 | 0.764901 |
873b44d991027b9674f56bc832201fc7eb5c6234 | 2,163 | # frozen_string_literal: true
# `ApplicationControler` is set to always return JSON. In this controller we want to return
# html and pdf, so we subclass ActionController directly
class Public::PostingsController < ActionController::Base
include Response
include TransactionHandler
# /public/postings/<id>
def show
return unless valid_posting?(url_token: show_params[:id])
active_user = ActiveUserService.active_user request
posting_service = PostingService.new(posting: @posting)
render_success(
{
survey: posting_service.survey,
prefilled_data: posting_service.prefill(user: active_user),
open_status: @posting.open_status
}
)
end
# /public/postings/<id>/submit
def submit
return unless valid_posting?(url_token: submit_params[:posting_id])
active_user = ActiveUserService.active_user request
posting_service = PostingService.new(posting: @posting)
posting_service.process_answers(
user: active_user, answers: submit_params[:answers]
)
posting_service.save_answers!
PostingMailer.email_application_confirmation(
posting_service.application
).deliver_now!
render_success posting_service.prefill(user: active_user)
rescue StandardError => e
render_error(message: 'Error submitting application', error: e)
end
private
def show_params
params.permit(:id, :format)
end
def submit_params
params.slice(:posting_id, :answers).permit!
end
# tests to see if a valid posting exists corresponding to the specified
# url token. Will render a 404 if not found. Should be used as
# return unless valid_posting?(...)
#
# Stores the found posting in `@posting`
def valid_posting?(url_token: nil)
posting = Posting.find_by(url_token: url_token)
unless posting
render status: 404,
inline: "No posting found with id='#{url_token}'"
return false
end
@posting = posting
end
end
| 30.464789 | 91 | 0.657883 |
7976e1d6a0230953f4cea81c35ce9e4363dcabc9 | 1,178 | module Statsd
class Runner
def self.default_config
{
:host => "0.0.0.0",
:port => 8125,
:daemonize => false,
:debug => false,
:flush_interval => 10,
:threshold_pct => 90,
:graphite_host => '127.0.0.1',
:graphite_port => 2003
}
end
def self.run!(opts = {})
config = self.default_config.merge(opts)
EM::run do
server = EM::open_datagram_socket(config[:host], config[:port], Server, config)
EM::add_periodic_timer(config[:flush_interval]) do
begin
EM::connect(config[:graphite_host], config[:graphite_port], Publisher, server)
rescue
$stderr.puts "Unable to connect to %s:%s" % [ config[:graphite_host], config[:graphite_port] ] if config[:debug]
end
end
if config[:daemonize]
app_name = 'statsd %s:%d' % [ config[:host], config[:port] ]
Daemons.daemonize(:app_name => app_name)
else
puts "Now accepting connections on address #{config[:host]}, port #{config[:port]}..."
end
end
end
end
end
| 27.395349 | 124 | 0.537351 |
21529d8f38db7ae8b6913ebecdadaff5d66c4cf1 | 147 | class AddLastHarvestSecondsToResources < ActiveRecord::Migration
def change
add_column :resources, :last_harvest_seconds, :integer
end
end
| 24.5 | 64 | 0.809524 |
08d02e17a0c5a727ee83e5f0531b272820786ebf | 1,196 | #---
# Excerpted from "Ruby on Rails, 2nd Ed."
# We make no guarantees that this code is fit for any purpose.
# Visit http://www.editions-eyrolles.com/Livre/9782212120790/ for more book information.
#---
class AdminController < ApplicationController
before_filter :authorize
# ....
def index
list
render :action => 'list'
end
def list
@product_pages, @products = paginate :products, :per_page => 10
end
def show
@product = Product.find(params[:id])
end
def new
@product = Product.new
end
def create
@product = Product.new(params[:product])
if @product.save
flash[:notice] = 'Product was successfully created.'
redirect_to :action => 'list'
else
render :action => 'new'
end
end
def edit
@product = Product.find(params[:id])
end
def update
@product = Product.find(params[:id])
if @product.update_attributes(params[:product])
flash[:notice] = 'Product was successfully updated.'
redirect_to :action => 'show', :id => @product
else
render :action => 'edit'
end
end
def destroy
Product.find(params[:id]).destroy
redirect_to :action => 'list'
end
end
| 19.933333 | 88 | 0.64214 |
f7dfd7c1bc5c376313307133645caf1b70329910 | 6,975 | require File.expand_path('../../spec_helper', __FILE__)
require 'tempfile'
describe Checksum::Tools::Remote do
describe "file operations" do
before :all do
@mock_sftp = Class.new do
def dir
Class.new do
def self.[](*args)
Dir[File.join(*args)].collect { |s|
name = s[args.first.length..-1]
stat = File.stat(s)
type = stat.mode & 0040000 != 0 ? Net::SFTP::Protocol::V04::Attributes::T_DIRECTORY : Net::SFTP::Protocol::V04::Attributes::T_REGULAR
attrs = {
:type => type,
:size => stat.size,
:owner => stat.uid,
:group => stat.gid,
:permissions => stat.mode & 0777,
:atime => stat.atime,
:createtime => stat.ctime,
:mtime => stat.mtime
}
Net::SFTP::Protocol::V04::Name.new(name, Net::SFTP::Protocol::V04::Attributes.new(attrs))
}
end
end
end
def file
File
end
def stat!(*args)
File.stat(*args)
rescue Errno::ENOENT
raise Net::SFTP::StatusException.new(Net::SFTP::Response.new(nil, :code => 2, :type => 101), 'no such file')
end
end
@mock_ssh = Class.new do
def exec!(c)
%x[#{c} 2>/dev/null]
end
end
@tmpdir = Dir.tmpdir
tarfile = File.expand_path('../../test_data.tgz',__FILE__)
`tar xzvf "#{tarfile}" -C "#{@tmpdir}" > /dev/null 2>&1`
@dir = File.join(@tmpdir,'test_data')
end
after :all do
FileUtils.rm_rf(File.join(@tmpdir,'test_data'))
end
before :each do
sftp = @mock_sftp.new
ssh = @mock_ssh.new
@tool = Checksum::Tools.new(Checksum::Tools.parse_path("[email protected]:#{@dir}"), :md5, :sha1, :recursive => true)
allow(@tool).to receive(:remote_properties).and_return({ :openssl => 'openssl' })
allow(@tool).to receive(:sftp).and_return(sftp)
allow(@tool).to receive(:ssh).and_return(ssh)
# KLUDGE ALERT: We can't mock an entire asynchronous SSH session, so we're
# stubbing Checksum::Tools::Remote#exec! which is one of the
# methods we should be testing!
allow(@tool).to receive(:exec!) { |c| %x[#{c} 2>/dev/null].chomp }
end
after :each do
@tool = nil
end
it "should be of the correct type" do
expect(@tool).to be_a(Checksum::Tools::Remote)
end
it "should report correct digest lengths" do
expect(@tool.digest_length(:md5)).to eq(32)
expect(@tool.digest_length(:sha1)).to eq(40)
end
it "should report on file size" do
expect(@tool.send(:file_size,File.join(@dir,'one/two/report.pdf'))).to eq(134833)
expect { @tool.send(:file_size,File.join(@dir,'one/two/nonexistent.txt')) }.to raise_error(Errno::ENOENT)
end
it "should calculate checksums for a file" do
pending('Checksum::Tools::Remote#exec! is stubbed and does not execute under CI') if ENV['TRAVIS']
result = @tool.digest_file(File.join(@dir,'one/two/ignore.doc'))
expect(result).to eq({ :md5 => 'a8b0d13fd645acc29b0dc2c4837e6f00', :sha1 => '8e129fe06c0679ce5a7f6e58d60cfd488512913a' })
expect { @tool.digest_file(File.join(@dir,'one/two/nonexistent.txt')) }.to raise_error(Errno::ENOENT)
end
it "should generate checksums for a tree of files" do
pending('Checksum::Tools::Remote#exec! is stubbed and does not execute under CI') if ENV['TRAVIS']
listener = double('listener')
expect(listener).to receive(:progress).exactly(6).times.with(an_instance_of(String),an_instance_of(Fixnum),an_instance_of(Fixnum))
@tool.create_digest_files(@dir, ['*.pdf','*.mp4']) do
|*args| listener.progress(*args)
end
strings = File.read(File.join(@dir,'one/two/report.pdf.digest')).split(/\n/)
expect(strings).to include("MD5(report.pdf)= fda5eab2335987f56d7d3abe53734295")
expect(strings).to include("SHA1(report.pdf)= 56ea04102e0388de9b9c31c6db8aebbff67671c1")
strings = File.read(File.join(@dir,'three/video.mp4.digest')).split(/\n/)
expect(strings).to include("MD5(video.mp4)= 9023e975b52be97a4ef6ad4e25e2ef79")
expect(strings).to include("SHA1(video.mp4)= ce828086b63e6b351d9fb6d6bc2b0838725bdf37")
expect(File.exists?(File.join(@dir,'one/two/ignore.doc.digest'))).to be false
end
it "should pass verification for a tree of files" do
pending('Checksum::Tools::Remote#exec! is stubbed and does not execute under CI') if ENV['TRAVIS']
listener = double('listener')
expect(listener).to receive(:progress).once.with(File.join(@dir,'one/two/report.pdf'),-1,-1)
expect(listener).to receive(:progress).twice.with(File.join(@dir,'one/two/report.pdf'),134833,an_instance_of(Fixnum))
expect(listener).to receive(:progress).once.with(File.join(@dir,'one/two/report.pdf'),-1,0,{ :md5 => true, :sha1 => true })
expect(listener).to receive(:progress).once.with(File.join(@dir,'three/video.mp4'),-1,-1)
expect(listener).to receive(:progress).twice.with(File.join(@dir,'three/video.mp4'),2413046,an_instance_of(Fixnum))
expect(listener).to receive(:progress).once.with(File.join(@dir,'three/video.mp4'),-1,0,{ :md5 => true, :sha1 => true })
@tool.verify_digest_files(@dir, ['*.pdf','*.mp4']) do
|*args| listener.progress(*args)
end
end
it "should fail verification for a tree of files" do
pending('Checksum::Tools::Remote#exec! is stubbed and does not execute under CI') if ENV['TRAVIS']
File.open(File.join(@dir,'three/video.mp4.digest'),'w') { |f| f.write("MD5(video.mp4)= 9023e975b52be97a4ef6ad4e25e2ef79\nSHA1(video.mp4)= ce828086b63e6b351d9fb6d6bc2b0838725bdf39\n") }
listener = double('listener')
expect(listener).to receive(:progress).once.with(File.join(@dir,'one/two/report.pdf'),-1,-1)
expect(listener).to receive(:progress).twice.with(File.join(@dir,'one/two/report.pdf'),134833,an_instance_of(Fixnum))
expect(listener).to receive(:progress).once.with(File.join(@dir,'one/two/report.pdf'),-1,0,{ :md5 => true, :sha1 => true })
expect(listener).to receive(:progress).once.with(File.join(@dir,'three/video.mp4'),-1,-1)
expect(listener).to receive(:progress).twice.with(File.join(@dir,'three/video.mp4'),2413046,an_instance_of(Fixnum))
expect(listener).to receive(:progress).once.with(File.join(@dir,'three/video.mp4'),-1,0,{ :md5 => true, :sha1 => false })
expect(listener).to receive(:progress).once.with(File.join(@dir,'one/two/ignore.doc'),-1,-1)
expect(listener).to receive(:progress).once.with(File.join(@dir,'one/two/ignore.doc'),-1,0,{ :digest_file => false })
@tool.verify_digest_files(@dir, ['*']) do
|*args| listener.progress(*args)
end
end
end
end
| 47.128378 | 190 | 0.62552 |
ed645f3e3674b0d29bdb34f56938d40c70527f06 | 460 | cask 'flipper' do
version '0.45.0'
sha256 '533b35691d99ecb441f8da9a2609911c2e7b57ab6d13b1ac1d4e999e07e89461'
# github.com/facebook/flipper/ was verified as official when first introduced to the cask
url "https://github.com/facebook/flipper/releases/download/v#{version}/Flipper-mac.zip"
appcast 'https://github.com/facebook/flipper/releases.atom'
name 'Facebook Flipper'
name 'Sonar'
homepage 'https://fbflipper.com/'
app 'Flipper.app'
end
| 32.857143 | 91 | 0.767391 |
5dae631eb51044a2f4799431e04a7f54160e454d | 1,584 | require_relative "DefaultUIobject.rb"
require_relative "../../Maths/General_Maths.rb"
#Also serves a roll of buttons...
class InteractableUIobject < DefaultUIobject
include General_Maths
def initialize drawable, label, rect, listener
@box, @text = drawable
@box_default_width, @box_default_height = @box.local_bounds.width, @box.local_bounds.height
super(@box, label, nil, true)
@rect = rect #rect must be normalized
@update_listener = listener
end
#override
def update param
width = param[0]
height = param[1]
#puts "upda #{width}, #{height}"
pos_x, pos_y, pressed = param[2]
pixel_left, pixel_top, pixel_width, pixel_height = width * @rect.left, height * @rect.top, width * @rect.width, height * @rect.height
@box.position= Vector2.new(pixel_left, pixel_top)
@text.position= Vector2.new(pixel_left + (pixel_width - @text.local_bounds.width) / 2.0, pixel_top + (pixel_height - @text.local_bounds.height) / 2.0 - 5)
@box.rotation= 0.0
@box.scale= Vector2.new(pixel_width / @box_default_width, pixel_height / @box_default_height)
@text.color= Color::White
if is_in_range? pos_x, pos_y, [pixel_left, pixel_width], [pixel_top, pixel_height]
@text.color= Color::Yellow
if pressed
@box.rotation= 180.0
@box.position= Vector2.new(pixel_left + pixel_width, pixel_top + pixel_height)
@update_listener.call(@text) if (@update_listener != nil)
end
end
end
#override
def draw_on window
window.draw @box
window.draw @text
end
end
| 33 | 158 | 0.683081 |
e80345faa7594f3b080ce2149dc06db8e92fbed6 | 191 | # Note that this policy is only for *management* of ItemTypes. All users can
# of course view items within any item type, since they are all public.
class ItemTypePolicy < FieldSetPolicy
end
| 38.2 | 76 | 0.78534 |
abebb74f731202a0a6f707e4796065ac059b7de1 | 2,142 | class CreateActiveStorageTables < ActiveRecord::Migration[5.2]
def change
# Use Active Record's configured type for primary and foreign keys
primary_key_type, foreign_key_type = primary_and_foreign_key_types
create_table :active_storage_blobs, id: primary_key_type do |t|
t.string :key, null: false
t.string :filename, null: false
t.string :content_type
t.text :metadata
t.string :service_name, null: false
t.bigint :byte_size, null: false
t.string :checksum, null: false
if connection.supports_datetime_with_precision?
t.datetime :created_at, precision: 6, null: false
else
t.datetime :created_at, null: false
end
t.index [ :key ], unique: true
end
create_table :active_storage_attachments, id: primary_key_type do |t|
t.string :name, null: false
t.references :record, null: false, polymorphic: true, index: false, type: foreign_key_type
t.references :blob, null: false, type: foreign_key_type
if connection.supports_datetime_with_precision?
t.datetime :created_at, precision: 6, null: false
else
t.datetime :created_at, null: false
end
t.index [ :record_type, :record_id, :name, :blob_id ], name: "index_active_storage_attachments_uniqueness", unique: true
t.foreign_key :active_storage_blobs, column: :blob_id
end
create_table :active_storage_variant_records, id: primary_key_type do |t|
t.belongs_to :blob, null: false, index: false, type: foreign_key_type
t.string :variation_digest, null: false
t.index %i[ blob_id variation_digest ], name: "index_active_storage_variant_records_uniqueness", unique: true
t.foreign_key :active_storage_blobs, column: :blob_id
end
end
private
def primary_and_foreign_key_types
config = Rails.configuration.generators
setting = config.options[config.orm][:primary_key_type]
primary_key_type = setting || :primary_key
foreign_key_type = setting || :bigint
[primary_key_type, foreign_key_type]
end
end
| 37.578947 | 126 | 0.69281 |
bf00c04af5447a11b5d181175c9ffc0ee2c2bba2 | 818 | require 'faraday'
require 'faraday_middleware'
module ZenhubRuby
module Connection
END_POINT = 'https://api.zenhub.io'.freeze
def get(path)
api_connection.get(path)
end
private
def api_connection
@api_connection ||= Faraday::Connection.new(END_POINT, connect_options)
end
def connect_options
@connect_options ||= {
builder: middleware,
headers: {
accept: 'application/json',
user_agent: "ZenhubRuby v#{VERSION}",
x_authentication_token: zenhub_access_token
}
}
end
def middleware
@middleware ||= Faraday::RackBuilder.new do |builder|
builder.request :url_encoded
builder.adapter :net_http
builder.response :json, content_type: /\bjson$/
end
end
end
end
| 21.526316 | 78 | 0.634474 |
bb9947633db1691dc2ecd9e19910c1b23d908eda | 645 | DUMMY_PRIV_KEY = 'f278550976dbe8cee36bd9bed2faa4170d8cee467e41b97e7ee557bd407bef7c'
DUMMY_ADDRESS = 'MzgmkqvuhZtVxY7Ni5ytgxRMGF4wtdaD1G'
DUMMY_IDENTIFIER = 'MzgmkqvuhZtVxY7Ni5ytgxRMGF4wtdaD1G_2c7f0b55c7c63a66c8b5ef84702cd2b5d61e7b9f'
DUMMY_HASHED_REVIEW = '2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae'
DUMMY_NAME_NEW_RESPONSE = { result: ['1e2e7d5a10c80eb1d347a22c450f2373dae55ae473d6c2388b0215a0b13a6a5c','e54def60bdfb807b'] }
DUMMY_NAME_NEW_RAND = DUMMY_NAME_NEW_RESPONSE[:result][1]
DUMMY_NAME_FIRSTUPDATE_RESPONSE = { result: '1458df0d9e61b416cd954f0e4d250971cce7fe0ad99e6d230f978142b117c065', error:nil }
| 71.666667 | 125 | 0.877519 |
796de2b5f30768bb882c9c54e5142cdb4a3d13c6 | 1,956 | require 'securerandom'
RSpec.shared_examples 'publisher' do
# WARNING: This example group requires the following helpers to be defined by caller:
# - `read_messages(topic_name, num_messages)`
# - `publisher_url`
subject do
BPS::Publisher.resolve(publisher_url)
end
after do
subject.close
end
it 'registers' do
expect(subject).to be_a(described_class)
end
it 'publishes' do
topic_name = "bps-test-topic-#{SecureRandom.uuid}"
messages = Array.new(3) { "bps-test-message-#{SecureRandom.uuid}" }
# call optional `setup_topic` - it's needed only adapters
# that don't retain messages so subscribing must be done before publishing:
begin
setup_topic(topic_name, messages.count)
rescue NoMethodError # rubocop:disable Lint/SuppressedException
end
topic = subject.topic(topic_name)
messages.each {|msg| topic.publish(msg) }
subject.close
published = read_messages(topic_name, messages.count)
expect(published).to match_array(messages)
end
end
RSpec.shared_examples 'subscriber' do
# WARNING: This example group requires the following helpers to be defined by caller:
# - `produce_messages(topic_name, messages)` (messages - Array[String])
# - `subscriber_url`
subject do
BPS::Subscriber.resolve(subscriber_url)
end
after do
subject.close
end
it 'registers' do
expect(subject).to be_a(described_class)
end
it 'publishes' do
topic_name = "bps-test-topic-#{SecureRandom.uuid}"
messages = Array.new(3) { "bps-test-message-#{SecureRandom.uuid}" }
produce_messages(topic_name, messages)
# TODO: ideally, need to pre-define/convert canonical subscription options
consumed = Queue.new
subject.subscribe(topic_name, start_at: :first) do |msg_data|
consumed.push(msg_data)
end
consumed_messages = Array.new(3) { consumed.pop }
expect(consumed_messages).to match_array(messages)
end
end
| 26.794521 | 87 | 0.712679 |
619adc539f959fb4803c2f368c875d5c2f3b6ff7 | 616 | # encoding: utf-8
require 'erb'
class EndERB
class << self
# for single template script using __END__ and DATA
#
# === Params
#
# * <tt>:hash</tt> - erb template variable hash
#
# === Example
#
# def hoge
# hash = {
# hoge: '@hoge@',
# hige: '@hige@',
# }
# EndERB.apply(hash)
# end
#
# puts hoge
#
# __END__
# hoge=<%=hoge%>
# hige=<%=hige%>
#
# output
#
# hoge=@hoge@
# hige=@hige@
#
def apply(hash)
ERB.new(DATA.read).result(binding)
end
end
end
| 16.210526 | 55 | 0.444805 |
e84bc7dca1283495886a1ddace2d3787ed464f69 | 1,456 | # -*- encoding: utf-8 -*-
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'credit_card_validations/version'
Gem::Specification.new do |gem|
gem.name = "credit_card_validations"
gem.version = CreditCardValidations::VERSION
gem.authors = ["Igor"]
gem.email = ["[email protected]"]
gem.description = %q{A ruby gem for validating credit card numbers}
gem.summary = "gem should be used for credit card numbers validation, card brands detections, luhn checks"
gem.homepage = "http://didww.github.io/credit_card_validations/"
gem.license = "MIT"
gem.metadata = {
'bug_tracker_uri' => 'https://github.com/didww/credit_card_validations/issues',
'changelog_uri' => 'https://github.com/didww/credit_card_validations/blob/master/Changelog.md',
'source_code_uri' => 'https://github.com/didww/credit_card_validations'
}
gem.files = `git ls-files`.split($/)
gem.executables = gem.files.grep(%r{^bin/}).map{ |f| File.basename(f) }
gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
gem.require_paths = ["lib"]
gem.add_dependency "activemodel", ">= 3", "<= 6.2"
gem.add_dependency "activesupport", ">= 3", "<= 6.2"
gem.add_development_dependency "minitest", '~> 5.14.3'
gem.add_development_dependency "mocha", '1.1.0'
gem.add_development_dependency 'rake', '~> 10'
end
| 39.351351 | 114 | 0.669643 |
bb05ff294ec80d0d187a12143c1a30e65f643024 | 69 | # frozen_string_literal: true
module Liquid
VERSION = "4.0.3"
end
| 11.5 | 29 | 0.724638 |
62480a929dcdc2538fb17ace40a8da0329153c01 | 9,537 | class ReconNg < Formula
include Language::Python::Virtualenv
desc "Web Reconnaissance Framework"
homepage "https://github.com/lanmaster53/recon-ng"
url "https://github.com/lanmaster53/recon-ng/archive/v5.1.2.tar.gz"
sha256 "18d05030b994c9b37f624628251d3376d590f3d1eec155f67aca88fa5f3490cc"
license "GPL-3.0"
bottle do
sha256 cellar: :any, arm64_big_sur: "1f46a001a83bde4267cc2790b666f212b13d69f03a52c8b8fd66892e14915063"
sha256 cellar: :any, big_sur: "c59a7fe40f66a18e7bf3a1a1209ff8ed4b960742beda6bcb7647e4fd0e339fd3"
sha256 cellar: :any, catalina: "cfb08fd27c9735d05c102fdb1f2bf26e4ee4d70d019891ef5145f748832a4ee3"
sha256 cellar: :any, mojave: "eb58dafa4f0bb049b6b8cbd52a2481b378b184a4539f888a448bb41cf306fe94"
sha256 cellar: :any_skip_relocation, x86_64_linux: "2e77a4fca95520e0cfef428b50d9e395c07d833be394fdc6d5547c62f4d0ae24" # linuxbrew-core
end
depends_on "libyaml"
depends_on "[email protected]"
uses_from_macos "libxml2"
uses_from_macos "libxslt"
# See the REQUIREMENTS file in the archive for the top level of dependencies.
# Please check for changes that may have been made since the last update.
resource "aniso8601" do
url "https://files.pythonhosted.org/packages/2f/45/f2aec388115ea65a2b95b3dc1ba058a8470675fe16bcd4678a44a59776ea/aniso8601-8.0.0.tar.gz"
sha256 "529dcb1f5f26ee0df6c0a1ee84b7b27197c3c50fc3a6321d66c544689237d072"
end
resource "attrs" do
url "https://files.pythonhosted.org/packages/98/c3/2c227e66b5e896e15ccdae2e00bbc69aa46e9a8ce8869cc5fa96310bf612/attrs-19.3.0.tar.gz"
sha256 "f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72"
end
resource "certifi" do
url "https://files.pythonhosted.org/packages/b8/e2/a3a86a67c3fc8249ed305fc7b7d290ebe5e4d46ad45573884761ef4dea7b/certifi-2020.4.5.1.tar.gz"
sha256 "51fcb31174be6e6664c5f69e3e1691a2d72a1a12e90f872cbdb1567eb47b6519"
end
resource "chardet" do
url "https://files.pythonhosted.org/packages/fc/bb/a5768c230f9ddb03acc9ef3f0d4a3cf93462473795d18e9535498c8f929d/chardet-3.0.4.tar.gz"
sha256 "84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae"
end
resource "click" do
url "https://files.pythonhosted.org/packages/27/6f/be940c8b1f1d69daceeb0032fee6c34d7bd70e3e649ccac0951500b4720e/click-7.1.2.tar.gz"
sha256 "d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a"
end
resource "dicttoxml" do
url "https://files.pythonhosted.org/packages/74/36/534db111db9e7610a41641a1f6669a964aacaf51858f466de264cc8dcdd9/dicttoxml-1.7.4.tar.gz"
sha256 "ea44cc4ec6c0f85098c57a431a1ee891b3549347b07b7414c8a24611ecf37e45"
end
resource "dnspython" do
url "https://files.pythonhosted.org/packages/ec/c5/14bcd63cb6d06092a004793399ec395405edf97c2301dfdc146dfbd5beed/dnspython-1.16.0.zip"
sha256 "36c5e8e38d4369a08b6780b7f27d790a292b2b08eea01607865bf0936c558e01"
end
resource "flasgger" do
url "https://files.pythonhosted.org/packages/27/45/31ef09e68ef063b3ac5356fedbbf59f0adfc768ad7baa5f12835587dbd99/flasgger-0.9.4.tar.gz"
sha256 "37137b3292738580c42e03662bfb8731656a11d636e76f76d30e572c1fa5bd0d"
end
resource "Flask" do
url "https://files.pythonhosted.org/packages/4e/0b/cb02268c90e67545a0e3a37ea1ca3d45de3aca43ceb7dbf1712fb5127d5d/Flask-1.1.2.tar.gz"
sha256 "4efa1ae2d7c9865af48986de8aeb8504bf32c7f3d6fdc9353d34b21f4b127060"
end
resource "Flask-RESTful" do
url "https://files.pythonhosted.org/packages/67/65/84f3218666fc115497a13ff727f16d02374d07d1924cd4fd72e275294e8b/Flask-RESTful-0.3.8.tar.gz"
sha256 "5ea9a5991abf2cb69b4aac19793faac6c032300505b325687d7c305ffaa76915"
end
resource "html5lib" do
url "https://files.pythonhosted.org/packages/85/3e/cf449cf1b5004e87510b9368e7a5f1acd8831c2d6691edd3c62a0823f98f/html5lib-1.0.1.tar.gz"
sha256 "66cb0dcfdbbc4f9c3ba1a63fdb511ffdbd4f513b2b6d81b80cd26ce6b3fb3736"
end
resource "idna" do
url "https://files.pythonhosted.org/packages/cb/19/57503b5de719ee45e83472f339f617b0c01ad75cba44aba1e4c97c2b0abd/idna-2.9.tar.gz"
sha256 "7588d1c14ae4c77d74036e8c22ff447b26d0fde8f007354fd48a7814db15b7cb"
end
resource "itsdangerous" do
url "https://files.pythonhosted.org/packages/68/1a/f27de07a8a304ad5fa817bbe383d1238ac4396da447fa11ed937039fa04b/itsdangerous-1.1.0.tar.gz"
sha256 "321b033d07f2a4136d3ec762eac9f16a10ccd60f53c0c91af90217ace7ba1f19"
end
resource "Jinja2" do
url "https://files.pythonhosted.org/packages/64/a7/45e11eebf2f15bf987c3bc11d37dcc838d9dc81250e67e4c5968f6008b6c/Jinja2-2.11.2.tar.gz"
sha256 "89aab215427ef59c34ad58735269eb58b1a5808103067f7bb9d5836c651b3bb0"
end
resource "jsonschema" do
url "https://files.pythonhosted.org/packages/69/11/a69e2a3c01b324a77d3a7c0570faa372e8448b666300c4117a516f8b1212/jsonschema-3.2.0.tar.gz"
sha256 "c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"
end
resource "lxml" do
url "https://files.pythonhosted.org/packages/39/2b/0a66d5436f237aff76b91e68b4d8c041d145ad0a2cdeefe2c42f76ba2857/lxml-4.5.0.tar.gz"
sha256 "8620ce80f50d023d414183bf90cc2576c2837b88e00bea3f33ad2630133bbb60"
end
resource "MarkupSafe" do
url "https://files.pythonhosted.org/packages/b9/2e/64db92e53b86efccfaea71321f597fa2e1b2bd3853d8ce658568f7a13094/MarkupSafe-1.1.1.tar.gz"
sha256 "29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b"
end
resource "mechanize" do
url "https://files.pythonhosted.org/packages/77/1b/7e4b644108e4e99b136e52c6aae34873fcd267e3d2489f3bd2cff8655a59/mechanize-0.4.5.tar.gz"
sha256 "6355c11141f6d4b54a17fc2106944806b5db2711e60b120d15d83db438c333fd"
end
resource "mistune" do
url "https://files.pythonhosted.org/packages/2d/a4/509f6e7783ddd35482feda27bc7f72e65b5e7dc910eca4ab2164daf9c577/mistune-0.8.4.tar.gz"
sha256 "59a3429db53c50b5c6bcc8a07f8848cb00d7dc8bdb431a4ab41920d201d4756e"
end
resource "pyrsistent" do
url "https://files.pythonhosted.org/packages/9f/0d/cbca4d0bbc5671822a59f270e4ce3f2195f8a899c97d0d5abb81b191efb5/pyrsistent-0.16.0.tar.gz"
sha256 "28669905fe725965daa16184933676547c5bb40a5153055a8dee2a4bd7933ad3"
end
resource "pytz" do
url "https://files.pythonhosted.org/packages/f4/f6/94fee50f4d54f58637d4b9987a1b862aeb6cd969e73623e02c5c00755577/pytz-2020.1.tar.gz"
sha256 "c35965d010ce31b23eeb663ed3cc8c906275d6be1a34393a1d73a41febf4a048"
end
resource "PyYAML" do
url "https://files.pythonhosted.org/packages/64/c2/b80047c7ac2478f9501676c988a5411ed5572f35d1beff9cae07d321512c/PyYAML-5.3.1.tar.gz"
sha256 "b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d"
end
resource "redis" do
url "https://files.pythonhosted.org/packages/05/5e/5e9a329ba600244f2d37f86131ccec19936d41cba0887240086b44cf4f54/redis-3.5.0.tar.gz"
sha256 "7378105cd8ea20c4edc49f028581e830c01ad5f00be851def0f4bc616a83cd89"
end
resource "requests" do
url "https://files.pythonhosted.org/packages/f5/4f/280162d4bd4d8aad241a21aecff7a6e46891b905a4341e7ab549ebaf7915/requests-2.23.0.tar.gz"
sha256 "b3f43d496c6daba4493e7c431722aeb7dbc6288f52a6e04e7b6023b0247817e6"
end
resource "rq" do
url "https://files.pythonhosted.org/packages/a0/a6/e5f731980c1535703369bc5cd0317fe7e6255b2abae6349566ca453e2f99/rq-1.3.0.tar.gz"
sha256 "49c9149fa9301f98d918f3042f36bed4252d37193d222a1ce8b0e25886442377"
end
resource "six" do
url "https://files.pythonhosted.org/packages/21/9f/b251f7f8a76dec1d6651be194dfba8fb8d7781d10ab3987190de8391d08e/six-1.14.0.tar.gz"
sha256 "236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a"
end
resource "unicodecsv" do
url "https://files.pythonhosted.org/packages/6f/a4/691ab63b17505a26096608cc309960b5a6bdf39e4ba1a793d5f9b1a53270/unicodecsv-0.14.1.tar.gz"
sha256 "018c08037d48649a0412063ff4eda26eaa81eff1546dbffa51fa5293276ff7fc"
end
resource "urllib3" do
url "https://files.pythonhosted.org/packages/05/8c/40cd6949373e23081b3ea20d5594ae523e681b6f472e600fbc95ed046a36/urllib3-1.25.9.tar.gz"
sha256 "3018294ebefce6572a474f0604c2021e33b3fd8006ecd11d62107a5d2a963527"
end
resource "webencodings" do
url "https://files.pythonhosted.org/packages/0b/02/ae6ceac1baeda530866a85075641cec12989bd8d31af6d5ab4a3e8c92f47/webencodings-0.5.1.tar.gz"
sha256 "b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"
end
resource "Werkzeug" do
url "https://files.pythonhosted.org/packages/10/27/a33329150147594eff0ea4c33c2036c0eadd933141055be0ff911f7f8d04/Werkzeug-1.0.1.tar.gz"
sha256 "6c80b1e5ad3665290ea39320b91e1be1e0d5f60652b964a3070216de83d2e47c"
end
resource "XlsxWriter" do
url "https://files.pythonhosted.org/packages/6a/50/77a5d3377e0b5caff56609a9075160f57951015c274e6ba891e5ad96f61f/XlsxWriter-1.2.8.tar.gz"
sha256 "488e1988ab16ff3a9cd58c7656d0a58f8abe46ee58b98eecea78c022db28656b"
end
def install
# Fix "ld: file not found: /usr/lib/system/libsystem_darwin.dylib" for lxml
ENV["SDKROOT"] = MacOS.sdk_path if MacOS.version == :sierra
libexec.install Dir["*"]
venv = virtualenv_create(libexec, "python3")
venv.pip_install resources
# Replace shebang with virtualenv python
inreplace libexec/"recon-ng", "#!/usr/bin/env python3", "#!#{libexec}/bin/python"
bin.install_symlink libexec/"recon-ng"
end
test do
(testpath/"resource").write <<~EOS
options list
exit
EOS
system "#{bin}/recon-ng", "-r", testpath/"resource"
end
end
| 46.75 | 143 | 0.809898 |
180acd47f535c45113d742367d5c6025b531bd72 | 3,129 | require File.expand_path('../../test_helper', __FILE__)
module Checkr
class CountyCriminalSearchTest < Test::Unit::TestCase
setup do
@county_criminal_search_url = "#{Checkr.api_base}/v1/county_criminal_searches"
end
context 'CountyCriminalSearch class' do
should 'be retrieveable' do
id = "county_criminal_search_id"
@mock.expects(:get).once.with("#{@county_criminal_search_url}/#{id}", anything, anything).returns(test_response(test_county_criminal_search))
county_criminal_search = CountyCriminalSearch.retrieve(id)
assert(county_criminal_search.is_a?(CountyCriminalSearch))
end
end
context 'CountyCriminalSearch instance' do
should 'be refreshable' do
@mock.expects(:get).once.with("#{@county_criminal_search_url}/#{test_county_criminal_search[:id]}", anything, anything).returns(test_response(test_county_criminal_search))
county_criminal_search = CountyCriminalSearch.new(test_county_criminal_search[:id])
county_criminal_search.refresh
assert_equal(test_county_criminal_search[:status], county_criminal_search.status)
end
end
context 'Retrieved CountyCriminalSearch instance' do
setup do
@mock.expects(:get).once.returns(test_response(test_county_criminal_search))
@county_criminal_search = CountyCriminalSearch.retrieve('county_criminal_search_id')
end
should 'have the id attribute' do
assert_equal(test_county_criminal_search[:id], @county_criminal_search.id)
end
should 'have the object attribute' do
assert_equal(test_county_criminal_search[:object], @county_criminal_search.object)
end
should 'have the uri attribute' do
assert_equal(test_county_criminal_search[:uri], @county_criminal_search.uri)
end
should 'have the status attribute' do
assert_equal(test_county_criminal_search[:status], @county_criminal_search.status)
end
should 'have the created_at attribute' do
assert_equal(test_county_criminal_search[:created_at], @county_criminal_search.created_at)
end
should 'have the completed_at attribute' do
assert_equal(test_county_criminal_search[:completed_at], @county_criminal_search.completed_at)
end
should 'have the turnaround_time attribute' do
assert_equal(test_county_criminal_search[:turnaround_time], @county_criminal_search.turnaround_time)
end
should 'have the county attribute' do
assert_equal(test_county_criminal_search[:county], @county_criminal_search.county)
end
should 'have the state attribute' do
assert_equal(test_county_criminal_search[:state], @county_criminal_search.state)
end
should 'have the records attribute' do
assert_equal(test_county_criminal_search[:records], @county_criminal_search.records)
end
end
should 'be registered' do
assert(APIClass.subclasses.include?(CountyCriminalSearch))
assert_equal(CountyCriminalSearch, APIClass.subclass_fetch("county_criminal_search"))
end
end
end
| 37.698795 | 179 | 0.738575 |
bb3b67c8807f4184775f0378981e18db62b0d5ec | 629 | require_relative 'point'
class Line
attr_reader :a, :b
def self.from_points(point_a, point_b)
a = (point_b.y - point_a.y).to_f / (point_b.x - point_a.x)
b = point_b.y - a * point_b.x
new(a, b)
end
def initialize(a, b)
@a = a
@b = b
end
def cover?(point)
point.y == value(point.x)
end
def point_at(x: nil, y: nil)
return Point.new(x, value(x)) if x
return Point.new(argument(y), y) if y && argument(y)
end
def argument(y)
return nil if a == 0
(y - b) / a.to_f
end
def value(x)
a * x + b
end
def ==(other)
a == other.a && b == other.b
end
end | 16.552632 | 62 | 0.561208 |
ab5f235d6bc8b17d851e6ffaf9425b36959a2cd5 | 3,273 | class Job
module CdeExtension
def process_new_cde
Rails.logger.info("I'm about to process cde job #{self.id}")
# the coordinates may be stored as a proper coordinate string or by
# four simple values supplied by simple users. Here one converts
# the latter into the former
user = User.find_by_email(self.email)
coordinates = self.parameters[:coords].gsub(/\r\n?/, "")
# create the args.txt needed by the compiled cde job
outstring =
"\"coords\",\"#{coordinates}\",\n\
\"jobid\",\"#{self.id}\",\n\
\"jobdescription\",\"#{self.job_description}\",\n\
\"jobtitle\",\"#{self.job_name}\",\n\
\"jobsubmitter\",\"#{self.email}\"\n"
# first, it is necessary to create the directory structure
tmpdir = "/tmp/#{self.id}"
FileUtils.mkdir_p("#{tmpdir}/input")
begin
File.open("#{tmpdir}/input/args.txt","w") {|f| f.write(outstring) }
rescue
self.jobfail("Could not write files for cde job ","files not written")
return
end
# get the name of the server which is to analyse this job
choice = Account.hostcheck(self.type)
# then, create a zipfile and put it in the dropbox dir
dropdir = "#{Vibrant::Application.config.dropbox}/cde"
begin
out = `cd /tmp && zip -r #{self.id}.#{choice}.zip #{self.id}`
FileUtils.mv "/tmp/#{self.id}.#{choice}.zip", "#{dropdir}"
system("chmod 777 #{dropdir}/#{self.id}.#{choice}.zip")
FileUtils.rm_r tmpdir
rescue
self.jobfail("Could not prepare zip for cde job ","zip creation failure")
return
end
# then:
self.update_attributes(:status =>'in progress')
CheckingWorker.perform_at(1.minutes.from_now, self.id)
self.update_attributes(:vars => {:checked => 0})
end
def check_progress_cde
dropdir = "#{Vibrant::Application.config.dropbox}/cde"
if File.exists?("#{dropdir}/#{self.id}.fail.zip")
self.jobfail("Server says no for ","server reports job failed")
self.outfile = File.open("#{dropdir}/#{self.id}.fail.zip")
self.save
File.unlink("#{dropdir}/#{self.id}.fail.zip")
return
elsif File.exists?("#{dropdir}/#{self.id}.done.zip")
begin
FileUtils.mv "#{dropdir}/#{self.id}.done.zip", "#{dropdir}/#{self.id}.zip"
self.outfile = File.open("#{dropdir}/#{self.id}.zip")
self.status = 'finished'
self.save
# notify of success
user = User.find_by_email(self.email)
UserMailer.job_ready(user,self).deliver
File.unlink("#{dropdir}/#{self.id}.zip")
rescue
self.jobfail("Zip handling failed for ","could not process outfile")
return
end
else
# still running
case self.vars['checked']
when 0..3
self.checkup
CheckingWorker.perform_at(1.minutes.from_now, self.id)
when 4..6
self.checkup
CheckingWorker.perform_at(5.minutes.from_now, self.id)
else
CheckingWorker.perform_at(15.minutes.from_now, self.id)
end
end
end
def checkup
self.update_attributes(:vars => {:checked => self.vars['checked'] + 1})
end
end
end
| 34.819149 | 84 | 0.597922 |
e973935d002edb721b6c3460c7380d1c8cb62800 | 2,204 | # This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20171008090145) do
# These are extensions that must be enabled in order to support this database
enable_extension "plpgsql"
create_table "albums", force: :cascade do |t|
t.string "cover"
t.bigint "user_id"
t.integer "view", default: 0
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["user_id"], name: "index_albums_on_user_id"
end
create_table "articles", force: :cascade do |t|
t.string "title"
t.string "chapeau"
t.string "thumbnail"
t.string "content"
t.integer "view", default: 0
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.bigint "user_id"
t.index ["user_id"], name: "index_articles_on_user_id"
end
create_table "images", force: :cascade do |t|
t.bigint "user_id"
t.integer "view", default: 0
t.string "source"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.bigint "album_id"
t.index ["album_id"], name: "index_images_on_album_id"
t.index ["user_id"], name: "index_images_on_user_id"
end
create_table "users", force: :cascade do |t|
t.string "username"
t.string "password_digest"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
add_foreign_key "albums", "users"
add_foreign_key "articles", "users"
add_foreign_key "images", "albums"
add_foreign_key "images", "users"
end
| 35.548387 | 86 | 0.718693 |
38f31c0078e206515f814ed612b2afc59cf3bdd9 | 1,385 | # This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20190808010918) do
# These are extensions that must be enabled in order to support this database
enable_extension "plpgsql"
create_table "users", force: :cascade do |t|
t.string "name"
t.string "email"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.string "password_digest"
t.string "remember_digest"
t.boolean "admin"
t.string "activation_digest"
t.boolean "activated", default: false
t.datetime "activated_at"
t.string "reset_digest"
t.datetime "reset_sent_at"
t.index ["email"], name: "index_users_on_email", unique: true
end
end
| 39.571429 | 86 | 0.750903 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.