hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
ac4ac537a8ac14fceac4c675f2ef3d0de2c168ea | 15,966 | # Gitaly note: JV: two sets of straightforward RPC's. 1 Hard RPC: fork_repository.
# SSH key operations are not part of Gitaly so will never be migrated.
require 'securerandom'
module Gitlab
class Shell
GITLAB_SHELL_ENV_VARS = %w(GIT_TERMINAL_PROMPT).freeze
Error = Class.new(StandardError)
KeyAdder = Struct.new(:io) do
def add_key(id, key)
key = Gitlab::Shell.strip_key(key)
# Newline and tab are part of the 'protocol' used to transmit id+key to the other end
if key.include?("\t") || key.include?("\n")
raise Error.new("Invalid key: #{key.inspect}")
end
io.puts("#{id}\t#{key}")
end
end
class << self
def secret_token
@secret_token ||= begin
File.read(Gitlab.config.gitlab_shell.secret_file).chomp
end
end
def ensure_secret_token!
return if File.exist?(File.join(Gitlab.config.gitlab_shell.path, '.gitlab_shell_secret'))
generate_and_link_secret_token
end
def version_required
@version_required ||= File.read(Rails.root
.join('GITLAB_SHELL_VERSION')).strip
end
def strip_key(key)
key.split(/[ ]+/)[0, 2].join(' ')
end
private
# Create (if necessary) and link the secret token file
def generate_and_link_secret_token
secret_file = Gitlab.config.gitlab_shell.secret_file
shell_path = Gitlab.config.gitlab_shell.path
unless File.size?(secret_file)
# Generate a new token of 16 random hexadecimal characters and store it in secret_file.
@secret_token = SecureRandom.hex(16)
File.write(secret_file, @secret_token)
end
link_path = File.join(shell_path, '.gitlab_shell_secret')
if File.exist?(shell_path) && !File.exist?(link_path)
FileUtils.symlink(secret_file, link_path)
end
end
end
# Init new repository
#
# storage - project's storage name
# name - project disk path
#
# Ex.
# create_repository("/path/to/storage", "gitlab/gitlab-ci")
#
def create_repository(storage, name)
relative_path = name.dup
relative_path << '.git' unless relative_path.end_with?('.git')
gitaly_migrate(:create_repository,
status: Gitlab::GitalyClient::MigrationStatus::OPT_OUT) do |is_enabled|
if is_enabled
repository = Gitlab::Git::Repository.new(storage, relative_path, '')
repository.gitaly_repository_client.create_repository
true
else
repo_path = File.join(Gitlab.config.repositories.storages[storage].legacy_disk_path, relative_path)
Gitlab::Git::Repository.create(repo_path, bare: true, symlink_hooks_to: gitlab_shell_hooks_path)
end
end
rescue => err # Once the Rugged codes gets removes this can be improved
Rails.logger.error("Failed to add repository #{storage}/#{name}: #{err}")
false
end
# Import repository
#
# storage - project's storage name
# name - project disk path
# url - URL to import from
#
# Ex.
# import_repository("nfs-file06", "gitlab/gitlab-ci", "https://gitlab.com/gitlab-org/gitlab-test.git")
#
# Gitaly migration: https://gitlab.com/gitlab-org/gitaly/issues/874
def import_repository(storage, name, url)
if url.start_with?('.', '/')
raise Error.new("don't use disk paths with import_repository: #{url.inspect}")
end
# The timeout ensures the subprocess won't hang forever
cmd = gitlab_projects(storage, "#{name}.git")
success = cmd.import_project(url, git_timeout)
raise Error, cmd.output unless success
success
end
# Fetch remote for repository
#
# repository - an instance of Git::Repository
# remote - remote name
# ssh_auth - SSH known_hosts data and a private key to use for public-key authentication
# forced - should we use --force flag?
# no_tags - should we use --no-tags flag?
#
# Ex.
# fetch_remote(my_repo, "upstream")
#
def fetch_remote(repository, remote, ssh_auth: nil, forced: false, no_tags: false, prune: true)
gitaly_migrate(:fetch_remote) do |is_enabled|
if is_enabled
repository.gitaly_repository_client.fetch_remote(remote, ssh_auth: ssh_auth, forced: forced, no_tags: no_tags, timeout: git_timeout, prune: prune)
else
local_fetch_remote(repository.storage, repository.relative_path, remote, ssh_auth: ssh_auth, forced: forced, no_tags: no_tags, prune: prune)
end
end
end
# Move repository reroutes to mv_directory which is an alias for
# mv_namespace. Given the underlying implementation is a move action,
# indescriminate of what the folders might be.
#
# storage - project's storage path
# path - project disk path
# new_path - new project disk path
#
# Ex.
# mv_repository("/path/to/storage", "gitlab/gitlab-ci", "randx/gitlab-ci-new")
#
# Gitaly migration: https://gitlab.com/gitlab-org/gitaly/issues/873
def mv_repository(storage, path, new_path)
return false if path.empty? || new_path.empty?
!!mv_directory(storage, "#{path}.git", "#{new_path}.git")
end
# Fork repository to new path
# forked_from_storage - forked-from project's storage name
# forked_from_disk_path - project disk relative path
# forked_to_storage - forked-to project's storage name
# forked_to_disk_path - forked project disk relative path
#
# Ex.
# fork_repository("nfs-file06", "gitlab/gitlab-ci", "nfs-file07", "new-namespace/gitlab-ci")
#
# Gitaly migration: https://gitlab.com/gitlab-org/gitaly/issues/817
def fork_repository(forked_from_storage, forked_from_disk_path, forked_to_storage, forked_to_disk_path)
gitlab_projects(forked_from_storage, "#{forked_from_disk_path}.git")
.fork_repository(forked_to_storage, "#{forked_to_disk_path}.git")
end
# Removes a repository from file system, using rm_diretory which is an alias
# for rm_namespace. Given the underlying implementation removes the name
# passed as second argument on the passed storage.
#
# storage - project's storage path
# name - project disk path
#
# Ex.
# remove_repository("/path/to/storage", "gitlab/gitlab-ci")
#
# Gitaly migration: https://gitlab.com/gitlab-org/gitaly/issues/873
def remove_repository(storage, name)
return false if name.empty?
!!rm_directory(storage, "#{name}.git")
rescue ArgumentError => e
Rails.logger.warn("Repository does not exist: #{e} at: #{name}.git")
false
end
# Add new key to gitlab-shell
#
# Ex.
# add_key("key-42", "sha-rsa ...")
#
def add_key(key_id, key_content)
return unless self.authorized_keys_enabled?
gitlab_shell_fast_execute([gitlab_shell_keys_path,
'add-key', key_id, self.class.strip_key(key_content)])
end
# Batch-add keys to authorized_keys
#
# Ex.
# batch_add_keys { |adder| adder.add_key("key-42", "sha-rsa ...") }
def batch_add_keys(&block)
return unless self.authorized_keys_enabled?
IO.popen(%W(#{gitlab_shell_path}/bin/gitlab-keys batch-add-keys), 'w') do |io|
yield(KeyAdder.new(io))
end
end
# Remove ssh key from gitlab shell
#
# Ex.
# remove_key("key-342", "sha-rsa ...")
#
def remove_key(key_id, key_content = nil)
return unless self.authorized_keys_enabled?
args = [gitlab_shell_keys_path, 'rm-key', key_id]
args << key_content if key_content
gitlab_shell_fast_execute(args)
end
# Remove all ssh keys from gitlab shell
#
# Ex.
# remove_all_keys
#
def remove_all_keys
return unless self.authorized_keys_enabled?
gitlab_shell_fast_execute([gitlab_shell_keys_path, 'clear'])
end
# Remove ssh keys from gitlab shell that are not in the DB
#
# Ex.
# remove_keys_not_found_in_db
#
def remove_keys_not_found_in_db
return unless self.authorized_keys_enabled?
Rails.logger.info("Removing keys not found in DB")
batch_read_key_ids do |ids_in_file|
ids_in_file.uniq!
keys_in_db = Key.where(id: ids_in_file)
next unless ids_in_file.size > keys_in_db.count # optimization
ids_to_remove = ids_in_file - keys_in_db.pluck(:id)
ids_to_remove.each do |id|
Rails.logger.info("Removing key-#{id} not found in DB")
remove_key("key-#{id}")
end
end
end
# Iterate over all ssh key IDs from gitlab shell, in batches
#
# Ex.
# batch_read_key_ids { |batch| keys = Key.where(id: batch) }
#
def batch_read_key_ids(batch_size: 100, &block)
return unless self.authorized_keys_enabled?
list_key_ids do |key_id_stream|
key_id_stream.lazy.each_slice(batch_size) do |lines|
key_ids = lines.map { |l| l.chomp.to_i }
yield(key_ids)
end
end
end
# Stream all ssh key IDs from gitlab shell, separated by newlines
#
# Ex.
# list_key_ids
#
def list_key_ids(&block)
return unless self.authorized_keys_enabled?
IO.popen(%W(#{gitlab_shell_path}/bin/gitlab-keys list-key-ids), &block)
end
# Add empty directory for storing repositories
#
# Ex.
# add_namespace("/path/to/storage", "gitlab")
#
def add_namespace(storage, name)
Gitlab::GitalyClient.migrate(:add_namespace,
status: Gitlab::GitalyClient::MigrationStatus::OPT_OUT) do |enabled|
if enabled
gitaly_namespace_client(storage).add(name)
else
path = full_path(storage, name)
FileUtils.mkdir_p(path, mode: 0770) unless exists?(storage, name)
end
end
rescue Errno::EEXIST => e
Rails.logger.warn("Directory exists as a file: #{e} at: #{path}")
rescue GRPC::InvalidArgument => e
raise ArgumentError, e.message
end
# Remove directory from repositories storage
# Every repository inside this directory will be removed too
#
# Ex.
# rm_namespace("/path/to/storage", "gitlab")
#
def rm_namespace(storage, name)
Gitlab::GitalyClient.migrate(:remove_namespace,
status: Gitlab::GitalyClient::MigrationStatus::OPT_OUT) do |enabled|
if enabled
gitaly_namespace_client(storage).remove(name)
else
FileUtils.rm_r(full_path(storage, name), force: true)
end
end
rescue GRPC::InvalidArgument => e
raise ArgumentError, e.message
end
alias_method :rm_directory, :rm_namespace
# Move namespace directory inside repositories storage
#
# Ex.
# mv_namespace("/path/to/storage", "gitlab", "gitlabhq")
#
def mv_namespace(storage, old_name, new_name)
Gitlab::GitalyClient.migrate(:rename_namespace,
status: Gitlab::GitalyClient::MigrationStatus::OPT_OUT) do |enabled|
if enabled
gitaly_namespace_client(storage).rename(old_name, new_name)
else
break false if exists?(storage, new_name) || !exists?(storage, old_name)
FileUtils.mv(full_path(storage, old_name), full_path(storage, new_name))
end
end
rescue GRPC::InvalidArgument
false
end
alias_method :mv_directory, :mv_namespace
def url_to_repo(path)
Gitlab.config.gitlab_shell.ssh_path_prefix + "#{path}.git"
end
# Return GitLab shell version
def version
gitlab_shell_version_file = "#{gitlab_shell_path}/VERSION"
if File.readable?(gitlab_shell_version_file)
File.read(gitlab_shell_version_file).chomp
end
end
# Check if such directory exists in repositories.
#
# Usage:
# exists?(storage, 'gitlab')
# exists?(storage, 'gitlab/cookies.git')
#
# Gitaly migration: https://gitlab.com/gitlab-org/gitaly/issues/385
def exists?(storage, dir_name)
Gitlab::GitalyClient.migrate(:namespace_exists,
status: Gitlab::GitalyClient::MigrationStatus::OPT_OUT) do |enabled|
if enabled
gitaly_namespace_client(storage).exists?(dir_name)
else
File.exist?(full_path(storage, dir_name))
end
end
end
protected
def gitlab_shell_path
File.expand_path(Gitlab.config.gitlab_shell.path)
end
def gitlab_shell_hooks_path
File.expand_path(Gitlab.config.gitlab_shell.hooks_path)
end
def gitlab_shell_user_home
File.expand_path("~#{Gitlab.config.gitlab_shell.ssh_user}")
end
def full_path(storage, dir_name)
raise ArgumentError.new("Directory name can't be blank") if dir_name.blank?
File.join(storage, dir_name)
end
def gitlab_shell_projects_path
File.join(gitlab_shell_path, 'bin', 'gitlab-projects')
end
def gitlab_shell_keys_path
File.join(gitlab_shell_path, 'bin', 'gitlab-keys')
end
def authorized_keys_enabled?
# Return true if nil to ensure the authorized_keys methods work while
# fixing the authorized_keys file during migration.
return true if Gitlab::CurrentSettings.current_application_settings.authorized_keys_enabled.nil?
Gitlab::CurrentSettings.current_application_settings.authorized_keys_enabled
end
private
def gitlab_projects(shard_name, disk_path)
Gitlab::Git::GitlabProjects.new(
shard_name,
disk_path,
global_hooks_path: Gitlab.config.gitlab_shell.hooks_path,
logger: Rails.logger
)
end
def local_fetch_remote(storage_name, repository_relative_path, remote, ssh_auth: nil, forced: false, no_tags: false, prune: true)
vars = { force: forced, tags: !no_tags, prune: prune }
if ssh_auth&.ssh_import?
if ssh_auth.ssh_key_auth? && ssh_auth.ssh_private_key.present?
vars[:ssh_key] = ssh_auth.ssh_private_key
end
if ssh_auth.ssh_known_hosts.present?
vars[:known_hosts] = ssh_auth.ssh_known_hosts
end
end
cmd = gitlab_projects(storage_name, repository_relative_path)
success = cmd.fetch_remote(remote, git_timeout, vars)
raise Error, cmd.output unless success
success
end
def gitlab_shell_fast_execute(cmd)
output, status = gitlab_shell_fast_execute_helper(cmd)
return true if status.zero?
Rails.logger.error("gitlab-shell failed with error #{status}: #{output}")
false
end
def gitlab_shell_fast_execute_raise_error(cmd, vars = {})
output, status = gitlab_shell_fast_execute_helper(cmd, vars)
raise Error, output unless status.zero?
true
end
def gitlab_shell_fast_execute_helper(cmd, vars = {})
vars.merge!(ENV.to_h.slice(*GITLAB_SHELL_ENV_VARS))
# Don't pass along the entire parent environment to prevent gitlab-shell
# from wasting I/O by searching through GEM_PATH
Bundler.with_original_env { Popen.popen(cmd, nil, vars) }
end
def gitaly_namespace_client(storage_path)
storage, _value = Gitlab.config.repositories.storages.find do |storage, value|
value.legacy_disk_path == storage_path
end
Gitlab::GitalyClient::NamespaceService.new(storage)
end
def git_timeout
Gitlab.config.gitlab_shell.git_timeout
end
def gitaly_migrate(method, status: Gitlab::GitalyClient::MigrationStatus::OPT_IN, &block)
Gitlab::GitalyClient.migrate(method, status: status, &block)
rescue GRPC::NotFound, GRPC::BadStatus => e
# Old Popen code returns [Error, output] to the caller, so we
# need to do the same here...
raise Error, e
end
end
end
| 31.995992 | 156 | 0.65746 |
28e9f80e01cebb8672de28c8e3d8c74f48871834 | 435 | module Factory
class ProducePlanJob < ApplicationJob
def perform
SceneAutomatic.all.each do |scene_automatic|
(Date.today .. Date.today + scene_automatic.advance_days).each do |produce_on|
produce_plan = ProducePlan.find_or_create_by(scene_id: scene_automatic.scene_id, organ_id: scene_automatic.organ_id, produce_on: produce_on)
produce_plan.automatic
end
end
end
end
end
| 29 | 150 | 0.71954 |
f81c2f0b0be0c8b6cfd9337a5d73a7848c4daff0 | 573 | # frozen_string_literal: true
require 'rails'
module RailsMasterKeyKmsDecrypter
module WithKmsEncryptedConfiguration
def encrypted(path, key_path: 'config/master.key', env_key: 'RAILS_MASTER_KEY')
RailsMasterKeyKmsDecrypter::KmsEncryptedConfiguration.new(
config_path: Rails.root.join(path),
key_path: Rails.root.join(key_path),
env_key: env_key,
raise_if_missing_key: config.require_master_key
)
end
end
class Railtie < ::Rails::Railtie
::Rails::Application.prepend(WithKmsEncryptedConfiguration)
end
end
| 27.285714 | 83 | 0.73822 |
38b8d920fc11be67f7f7b6293858ca118a09c488 | 1,296 | require 'cinch'
require 'vpsfree-irc-bot/api'
require 'vpsfree-irc-bot/command'
require 'vpsfree-irc-bot/helpers'
module VpsFree::Irc::Bot
class Cluster
include Cinch::Plugin
include Command
include Helpers
include Api
command :status do
desc 'show cluster status'
end
def cmd_status(m, channel)
client do |api|
nodes = api.node.public_status
# Have to access status via attributes, because status is also a subresource
down = nodes.count { |n| !n.attributes[:status] && n.maintenance_lock == 'no' }
maintenance = nodes.count { |n| n.maintenance_lock != 'no' }
online = nodes.size - down - maintenance
reply(m, "#{online} nodes online, #{maintenance} under maintenance, #{down} down")
if maintenance > 0
reply(
m,
"Under maintenance: "+
nodes.select { |n|
n.maintenance_lock != 'no'
}.map { |n| n.name }.join(', ')
)
end
if down > 0
reply(
m,
"Down: "+
nodes.select { |n|
!n.attributes[:status] && n.maintenance_lock == 'no'
}.map { |n| n.name }.join(', ')
)
end
end
end
end
end
| 25.411765 | 90 | 0.532407 |
1c43f3603f5860d751ebea217f090a90d305f065 | 7,198 | #
# Cookbook Name:: mysql
# Recipe:: default
#
# Copyright 2008-2011, Opscode, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
include_recipe "mysql::client"
include_recipe "database::client"
ha_enabled = node[:database][:ha][:enabled]
# For Crowbar, we need to set the address to bind - default to admin node.
addr = node[:database][:mysql][:bind_address] || ""
newaddr = Chef::Recipe::Barclamp::Inventory.get_network_by_type(node, "admin").address
if addr != newaddr
node[:database][:mysql][:bind_address] = newaddr
node.save
end
package "mysql-server" do
package_name "mysql" if node[:platform_family] == "suse"
action :install
end
case node[:platform_family]
when "rhel", "fedora"
mysql_service_name = "mysqld"
else
mysql_service_name = "mysql"
end
service "mysql" do
service_name mysql_service_name
if ha_enabled
supports status: true,
restart: true,
reload: true,
restart_crm_resource: true,
pacemaker_resource: "galera",
crm_resource_stop_cmd: "force-demote",
crm_resource_start_cmd: "force-promote"
else
supports status: true,
restart: true,
reload: true
end
action :enable
provider Chef::Provider::CrowbarPacemakerService if ha_enabled
end
utils_systemd_service_restart "mysql" do
action ha_enabled ? :disable : :enable
end
directory node[:database][:mysql][:tmpdir] do
owner "mysql"
group "mysql"
mode "0700"
action :create
end
script "handle mysql restart" do
interpreter "bash"
action :nothing
code <<EOC
service mysql stop
rm /var/lib/mysql/ib_logfile?
service mysql start
EOC
end
if node[:database][:mysql][:ssl][:enabled]
ssl_setup "setting up ssl for mysql" do
generate_certs node[:database][:mysql][:ssl][:generate_certs]
keyfile node[:database][:mysql][:ssl][:keyfile]
certfile node[:database][:mysql][:ssl][:certfile]
ca_certs node[:database][:mysql][:ssl][:ca_certs]
cert_required !(
node[:database][:mysql][:ssl][:generate_certs] ||
node[:database][:mysql][:ssl][:insecure])
group "mysql"
fqdn CrowbarDatabaseHelper.get_listen_address(node)
end
end
template "/etc/my.cnf.d/openstack.cnf" do
source "my.cnf.erb"
owner "root"
group "mysql"
mode "0640"
notifies :restart, "service[mysql]", :immediately
end
template "/etc/my.cnf.d/ssl.cnf" do
source "ssl.cnf.erb"
owner "root"
group "mysql"
mode "0640"
notifies :restart, "service[mysql]", :immediately
end
template "/etc/my.cnf.d/logging.cnf" do
source "logging.cnf.erb"
owner "root"
group "mysql"
mode "0640"
variables(
slow_query_logging_enabled: node[:database][:mysql][:slow_query_logging]
)
notifies :restart, "service[mysql]", :immediately
end
template "/etc/my.cnf.d/tuning.cnf" do
source "tuning.cnf.erb"
owner "root"
group "mysql"
mode "0640"
variables(
innodb_buffer_pool_size: node[:database][:mysql][:innodb_buffer_pool_size],
innodb_flush_log_at_trx_commit: node[:database][:mysql][:innodb_flush_log_at_trx_commit],
innodb_buffer_pool_instances: node[:database][:mysql][:innodb_buffer_pool_instances],
max_connections: node[:database][:mysql][:max_connections],
tmp_table_size: node[:database][:mysql][:tmp_table_size],
max_heap_table_size: node[:database][:mysql][:max_heap_table_size]
)
notifies :restart, "service[mysql]", :immediately
end
unless Chef::Config[:solo]
ruby_block "save node data" do
block do
node.save
end
action :create
end
end
if ha_enabled
log "HA support for mysql is enabled"
include_recipe "mysql::ha_galera"
else
log "HA support for mysql is disabled"
end
server_root_password = node[:database][:mysql][:server_root_password]
execute "assign-root-password" do
command "/usr/bin/mysqladmin -u root password \"#{server_root_password}\""
action :run
not_if { ha_enabled } # password already set as part of the ha bootstrap
only_if "/usr/bin/mysql -u root -e 'show databases;'"
end
db_settings = fetch_database_settings
db_connection = db_settings[:connection].dup
db_connection[:host] = "localhost"
db_connection[:username] = "root"
db_connection[:password] = node[:database][:mysql][:server_root_password]
db_connection[:ssl] = {}
unless node[:database][:database_bootstrapped]
database_user "create db_maker database user" do
connection db_connection
username "db_maker"
password node[:database][:db_maker_password]
host "%"
provider db_settings[:user_provider]
action :create
only_if { !ha_enabled || CrowbarPacemakerHelper.is_cluster_founder?(node) }
end
database_user "grant db_maker access" do
connection db_connection
username "db_maker"
password node[:database][:db_maker_password]
host "%"
privileges db_settings[:privs] + [
"ALTER ROUTINE",
"CREATE ROUTINE",
"CREATE TEMPORARY TABLES",
"CREATE USER",
"CREATE VIEW",
"EXECUTE",
"GRANT OPTION",
"LOCK TABLES",
"RELOAD",
"SHOW DATABASES",
"SHOW VIEW",
"TRIGGER"
]
provider db_settings[:user_provider]
require_ssl node[:database][:mysql][:ssl][:enabled]
action :grant
only_if { !ha_enabled || CrowbarPacemakerHelper.is_cluster_founder?(node) }
end
database "drop test database" do
connection db_connection
database_name "test"
provider db_settings[:provider]
action :drop
only_if { !ha_enabled || CrowbarPacemakerHelper.is_cluster_founder?(node) }
end
["localhost", node[:hostname]].each do |hostname|
database_user "drop anonymous database user at #{hostname}" do
connection db_connection
username ""
host hostname
provider db_settings[:user_provider]
action :drop
only_if { !ha_enabled || CrowbarPacemakerHelper.is_cluster_founder?(node) }
end
end
# Drop unneeded root users, we only use root access via unix domain socket
["127.0.0.1", "::1", node[:hostname]].each do |hostname|
database_user "drop unneeded root database user at #{hostname}" do
connection db_connection
username "root"
host hostname
provider db_settings[:user_provider]
action :drop
only_if { !ha_enabled || CrowbarPacemakerHelper.is_cluster_founder?(node) }
end
end
end
ruby_block "mark node for database bootstrap" do
block do
node.set[:database][:database_bootstrapped] = true
node.save
end
not_if { node[:database][:database_bootstrapped] }
end
directory "/var/log/mysql/" do
owner "mysql"
group "root"
mode "0755"
action :create
end
directory "/var/run/mysqld/" do
owner "mysql"
group "root"
mode "0755"
action :create
end
| 27.473282 | 93 | 0.70339 |
08180ac3350d32b80b50632fb22354e1ed88fd13 | 996 | # frozen_string_literal: true
class AddValidRangeForLeaveTime < ActiveRecord::Migration[5.0]
def change
remove_column :leave_times, :year, :integer
add_column :leave_times, :effective_date, :date
add_column :leave_times, :expiration_date, :date
LeaveTime.all.each do |leave_time|
leave_time.update(
effective_date: Date.new(2016, 1, 1),
expiration_date: Date.new(2016, 12, 31)
)
end
reversible do |dir|
dir.up do
execute 'alter table leave_times alter column effective_date set default now()'
execute 'alter table leave_times alter column expiration_date set default now()'
change_column :leave_times, :effective_date, :date, null: false
change_column :leave_times, :expiration_date, :date, null: false
change_column :leave_times, :user_id, :integer, null: false
end
dir.down do
change_column :leave_times, :user_id, :integer, null: true
end
end
end
end
| 33.2 | 88 | 0.682731 |
61e78178b0f88134754af1cd90859ba528b697cb | 5,016 | class Libtensorflow < Formula
desc "C interface for Google's OS library for Machine Intelligence"
homepage "https://www.tensorflow.org/"
url "https://github.com/tensorflow/tensorflow/archive/refs/tags/v2.6.2.tar.gz"
sha256 "e68c1d346fc3d529653530ca346b2c62f5b31bd4fcca7ffc9c65bb39ab2f6ed3"
license "Apache-2.0"
bottle do
sha256 cellar: :any, big_sur: "9a28c0436f90030375d4969e216bbc9a21cab227b10c1dcefb363a75e48c5246"
sha256 cellar: :any, catalina: "01aff263c54697b77bf30db815df42d3c9ffb46bc3e79d2d4448302b773aa845"
end
depends_on "bazel" => :build
depends_on "numpy" => :build
depends_on "[email protected]" => :build
resource "test-model" do
url "https://github.com/tensorflow/models/raw/v1.13.0/samples/languages/java/training/model/graph.pb"
sha256 "147fab50ddc945972818516418942157de5e7053d4b67e7fca0b0ada16733ecb"
end
def install
# Allow tensorflow to use current version of bazel
(buildpath / ".bazelversion").atomic_write Formula["bazel"].version
ENV["PYTHON_BIN_PATH"] = Formula["[email protected]"].opt_bin/"python3"
ENV["CC_OPT_FLAGS"] = "-march=native"
ENV["TF_IGNORE_MAX_BAZEL_VERSION"] = "1"
ENV["TF_NEED_JEMALLOC"] = "1"
ENV["TF_NEED_GCP"] = "0"
ENV["TF_NEED_HDFS"] = "0"
ENV["TF_ENABLE_XLA"] = "0"
ENV["USE_DEFAULT_PYTHON_LIB_PATH"] = "1"
ENV["TF_NEED_OPENCL"] = "0"
ENV["TF_NEED_CUDA"] = "0"
ENV["TF_NEED_MKL"] = "0"
ENV["TF_NEED_VERBS"] = "0"
ENV["TF_NEED_MPI"] = "0"
ENV["TF_NEED_S3"] = "1"
ENV["TF_NEED_GDR"] = "0"
ENV["TF_NEED_KAFKA"] = "0"
ENV["TF_NEED_OPENCL_SYCL"] = "0"
ENV["TF_NEED_ROCM"] = "0"
ENV["TF_DOWNLOAD_CLANG"] = "0"
ENV["TF_SET_ANDROID_WORKSPACE"] = "0"
ENV["TF_CONFIGURE_IOS"] = "0"
system "./configure"
bazel_args =%W[
--jobs=#{ENV.make_jobs}
--compilation_mode=opt
--copt=-march=native
]
targets = %w[
tensorflow:libtensorflow.so
tensorflow:install_headers
tensorflow/tools/benchmark:benchmark_model
tensorflow/tools/graph_transforms:summarize_graph
tensorflow/tools/graph_transforms:transform_graph
]
system "bazel", "build", *bazel_args, *targets
lib.install Dir["bazel-bin/tensorflow/*.so*", "bazel-bin/tensorflow/*.dylib*"]
include.install "bazel-bin/tensorflow/include/tensorflow"
bin.install %w[
bazel-bin/tensorflow/tools/benchmark/benchmark_model
bazel-bin/tensorflow/tools/graph_transforms/summarize_graph
bazel-bin/tensorflow/tools/graph_transforms/transform_graph
]
(lib/"pkgconfig/tensorflow.pc").write <<~EOS
Name: tensorflow
Description: Tensorflow library
Version: #{version}
Libs: -L#{lib} -ltensorflow
Cflags: -I#{include}
EOS
end
test do
(testpath/"test.c").write <<~EOS
#include <stdio.h>
#include <tensorflow/c/c_api.h>
int main() {
printf("%s", TF_Version());
}
EOS
system ENV.cc, "-L#{lib}", "-ltensorflow", "-o", "test_tf", "test.c"
assert_equal version, shell_output("./test_tf")
resource("test-model").stage(testpath)
summarize_graph_output = shell_output("#{bin}/summarize_graph --in_graph=#{testpath}/graph.pb 2>&1")
variables_match = /Found \d+ variables:.+$/.match(summarize_graph_output)
refute_nil variables_match, "Unexpected stdout from summarize_graph for graph.pb (no found variables)"
variables_names = variables_match[0].scan(/name=([^,]+)/).flatten.sort
transform_command = %W[
#{bin}/transform_graph
--in_graph=#{testpath}/graph.pb
--out_graph=#{testpath}/graph-new.pb
--inputs=n/a
--outputs=n/a
--transforms="obfuscate_names"
2>&1
].join(" ")
shell_output(transform_command)
assert_predicate testpath/"graph-new.pb", :exist?, "transform_graph did not create an output graph"
new_summarize_graph_output = shell_output("#{bin}/summarize_graph --in_graph=#{testpath}/graph-new.pb 2>&1")
new_variables_match = /Found \d+ variables:.+$/.match(new_summarize_graph_output)
refute_nil new_variables_match, "Unexpected summarize_graph output for graph-new.pb (no found variables)"
new_variables_names = new_variables_match[0].scan(/name=([^,]+)/).flatten.sort
refute_equal variables_names, new_variables_names, "transform_graph didn't obfuscate variable names"
benchmark_model_match = /benchmark_model -- (.+)$/.match(new_summarize_graph_output)
refute_nil benchmark_model_match,
"Unexpected summarize_graph output for graph-new.pb (no benchmark_model example)"
benchmark_model_args = benchmark_model_match[1].split
benchmark_model_args.delete("--show_flops")
benchmark_model_command = [
"#{bin}/benchmark_model",
"--time_limit=10",
"--num_threads=1",
*benchmark_model_args,
"2>&1",
].join(" ")
assert_includes shell_output(benchmark_model_command),
"Timings (microseconds):",
"Unexpected benchmark_model output (no timings)"
end
end
| 36.347826 | 112 | 0.689992 |
6152936158ec9a83fa939b42a30a9793aef0eed8 | 126 | class SilenceSidekiqLogging
def call(_worker, _job, _queue, &block)
ActiveRecord::Base.logger.silence(&block)
end
end
| 21 | 45 | 0.753968 |
bf580aaf98987631b4e154995872d512e64a1ed2 | 36,980 | # This file should contain all the record creation needed to seed the database with its default values.
# The data can then be loaded with the rake db:seed (or created alongside the db with db:setup).
puts "Creating sports"
football = Sport.create :title => "Football"
Sport.create :title => "Cricket", :uses_scores => false, :uses_manual_points => true
Sport.create :title => "Tennis", :uses_scores => true
Sport.create :title => "Table tennis", :uses_scores => true, :uses_teams => false
Sport.create :title => "Badminton", :uses_scores => true, :uses_teams => false
Sport.create :title => "Hockey", :uses_scores => true
Sport.create :title => "Rugby", :uses_scores => true, :uses_manual_points => true
Sport.create :title => "Chess", :uses_scores => true, :uses_teams => false
Sport.create :title => "Bowls", :uses_scores => true
puts "Creating invites"
20.times do
temp_code = rand(99999999999) + 1000000000000
Invite.create(:code => temp_code.to_s)
end
puts "Creating organisations"
test_organisation = Organisation.new(
:title => "Test Organisation",
:nickname => "test",
:summary => "A simple little organisation for testing with.",
:theme => 'classic',
:sport_id => football.id
)
test_organisation.save
puts "Creating seasons"
season = test_organisation.seasons.build(
:title => '2012'
)
season.save
puts "Creating user"
user = test_organisation.users.build(
:email => '[email protected]',
:password => 'password',
:name => 'Terry Tester'
)
user.save
content = "h3. Mea id tempor laoreet suscipit.
Tale dicit iracundia eu vel. Congue percipit in vis. His impetus intellegebat at, ut pri sumo aliquid deserunt. An qui viris senserit partiendo, eu pro mollis viderer, veri maiestatis cotidieque pri ut. Velit euripidis no vel.
h3. Timeam praesent consectetuer qui et.
Nam omittam contentiones ut. Ius ad harum incorrupte, reque graece molestie no eos. Id semper aliquip facilisis mea, doming erroribus ei vel, iudico vocibus dissentiunt ne quo. Cu delenit expetenda qui. Sea an vero putant, aperiri sensibus antiopam ne vel, ad est delectus volutpat complectitur.
Ad eos habemus delectus sensibus. Mel diam delenit et, eum ex iisque splendide. Has prima facilisis at. Nec ut omnis noster vituperatoribus, malorum forensibus contentiones ei est."
puts "Creating pages"
test_organisation.pages.build(
:title => "Rules and regulations",
:position => 1,
:content => content
).save
test_organisation.pages.build(
:title => "Child safety policy",
:position => 2,
:content => content
).save
test_organisation.pages.build(
:title => "Disciplinary procedures",
:position => 3,
:content => content
).save
test_organisation.pages.build(
:title => "League committee",
:position => 4,
:content => content
).save
test_organisation.pages.build(
:title => "Sports hall locations",
:position => 5,
:content => content
).save
test_organisation.pages.build(
:title => "Press office",
:position => 6,
:content => content
).save
test_organisation.pages.build(
:title => "Records",
:position => 7,
:content => content
).save
puts "Creating notices"
test_organisation.notices.build(
:user_id => user.id,
:heading => 'Welcome to the test organisation!',
:created_on => 20.weeks.ago,
:updated_on => 20.weeks.ago,
:content => content
).save
test_organisation.notices.build(
:user_id => user.id,
:heading => "Don't forget to pay your league subs!",
:created_on => 19.weeks.ago,
:updated_on => 19.weeks.ago,
:content => content
).save
test_organisation.notices.build(
:user_id => user.id,
:heading => "Please leave all facilities clean after matches",
:created_on => 18.weeks.ago,
:updated_on => 18.weeks.ago,
:content => content
).save
test_organisation.notices.build(
:user_id => user.id,
:heading => "Get your tickets for the annual dinner and awards night",
:created_on => 17.weeks.ago,
:updated_on => 17.weeks.ago,
:content => content
).save
test_organisation.notices.build(
:user_id => user.id,
:heading => "Please give referees the respect they deserve",
:created_on => 16.weeks.ago,
:updated_on => 16.weeks.ago,
:content => content
).save
test_organisation.notices.build(
:user_id => user.id,
:heading => "Rules have been updated",
:created_on => 15.weeks.ago,
:updated_on => 15.weeks.ago,
:content => content
).save
test_organisation.notices.build(
:user_id => user.id,
:heading => "Don't forget to vote for player of the year",
:created_on => 14.weeks.ago,
:updated_on => 14.weeks.ago,
:content => content
).save
test_organisation.notices.build(
:user_id => user.id,
:heading => "Main sports hall closed for painting work",
:created_on => 13.weeks.ago,
:updated_on => 13.weeks.ago,
:content => content
).save
test_organisation.notices.build(
:user_id => user.id,
:heading => "Fines increased for abusive behaviour",
:created_on => 12.weeks.ago,
:updated_on => 12.weeks.ago,
:content => content
).save
test_organisation.notices.build(
:user_id => user.id,
:heading => "Charity matches a huge success",
:created_on => 11.weeks.ago,
:updated_on => 11.weeks.ago,
:content => content
).save
test_organisation.notices.build(
:user_id => user.id,
:heading => "Fixtures changes are now live",
:created_on => 10.weeks.ago,
:updated_on => 10.weeks.ago,
:content => content
).save
test_organisation.notices.build(
:user_id => user.id,
:heading => "Don't forget all players must wear shin pads",
:created_on => 9.weeks.ago,
:updated_on => 9.weeks.ago,
:content => content
).save
test_organisation.notices.build(
:user_id => user.id,
:heading => "Sports hall car park out of action for a week",
:created_on => 8.weeks.ago,
:updated_on => 8.weeks.ago,
:content => content
).save
test_organisation.notices.build(
:user_id => user.id,
:heading => "New player registration forms ready to roll",
:created_on => 7.weeks.ago,
:updated_on => 7.weeks.ago,
:content => content
).save
test_organisation.notices.build(
:user_id => user.id,
:heading => "New league treasurer needed",
:created_on => 6.weeks.ago,
:updated_on => 6.weeks.ago,
:content => content
).save
test_organisation.notices.build(
:user_id => user.id,
:heading => "New teams wanted",
:created_on => 5.weeks.ago,
:updated_on => 5.weeks.ago,
:content => content
).save
test_organisation.notices.build(
:user_id => user.id,
:heading => "Team sheet format change coming - prepare now",
:created_on => 4.weeks.ago,
:updated_on => 4.weeks.ago,
:content => content
).save
test_organisation.notices.build(
:user_id => user.id,
:heading => "New balls have been ordered",
:created_on => 3.weeks.ago,
:updated_on => 3.weeks.ago,
:content => content
).save
test_organisation.notices.build(
:user_id => user.id,
:heading => "New league sponser signs up",
:created_on => 2.weeks.ago,
:updated_on => 2.weeks.ago,
:content => content
).save
test_organisation.notices.build(
:user_id => user.id,
:heading => "Anybody intereted in playing friendlies?",
:created_on => 1.week.ago,
:updated_on => 1.week.ago,
:content => content
).save
puts "Creating teams"
team_a = test_organisation.teams.build( :title => 'Alpha Athletic', :slug => 'alpha-athletic' )
team_b = test_organisation.teams.build( :title => 'Bravo Bombers', :slug => 'bravo-bombers' )
team_c = test_organisation.teams.build( :title => 'Charlie City', :slug => 'charlie-city' )
team_d = test_organisation.teams.build( :title => 'Dynamo Deltas', :slug => 'dynamo-deltas' )
team_e = test_organisation.teams.build( :title => 'Echo Evergreens', :slug => 'echo-evergreens' )
team_f = test_organisation.teams.build( :title => 'FC Foxtrot', :slug => 'fc-foxtrot' )
team_g = test_organisation.teams.build( :title => 'Getahead Golfers', :slug => 'getahead-golfers' )
team_h = test_organisation.teams.build( :title => 'Hotel Hotspur', :slug => 'hotel-hotspur' )
team_i = test_organisation.teams.build( :title => 'Indigo Ingrates', :slug => 'indigo-ingrates' )
team_j = test_organisation.teams.build( :title => 'Juliet Jumpers', :slug => 'juliet-jumpers' )
team_k = test_organisation.teams.build( :title => 'Kilo Killers', :slug => 'kilo-killers' )
team_l = test_organisation.teams.build( :title => 'Lima Lollopers', :slug => 'lima-lollopers' )
team_m = test_organisation.teams.build( :title => 'Mike Mania', :slug => 'mike-mania' )
team_n = test_organisation.teams.build( :title => 'November Normals', :slug => 'november-normals' )
team_o = test_organisation.teams.build( :title => 'Oscar Overachievers', :slug => 'oscar-overachievers' )
team_p = test_organisation.teams.build( :title => 'Papa Pathetics', :slug => 'papa-pathetics' )
team_q = test_organisation.teams.build( :title => 'Quebec Quagmires', :slug => 'quebec-quagmires' )
team_r = test_organisation.teams.build( :title => 'Romeo Rovers', :slug => 'romeo-rovers' )
team_s = test_organisation.teams.build( :title => 'Sierra Shooters', :slug => 'sierra-shooters' )
team_t = test_organisation.teams.build( :title => 'Tango Town', :slug => 'tango-town' )
team_u = test_organisation.teams.build( :title => 'Uniform University', :slug => 'uniform University' )
team_v = test_organisation.teams.build( :title => 'Victor Vase', :slug => 'victor-vase' )
team_w = test_organisation.teams.build( :title => 'Whiskey Wednesday', :slug => 'whiskey-wednesday' )
team_x = test_organisation.teams.build( :title => 'Xray Xenons', :slug => 'xray-xenons' )
team_y = test_organisation.teams.build( :title => 'Yankee Doodle Dandies', :slug => 'yankee-doodle-dandies' )
team_z = test_organisation.teams.build( :title => 'Zulu Zippers', :slug => 'zulu-zippers' )
team_1 = test_organisation.teams.build( :title => 'One Uppers', :slug => 'one-uppers' )
team_2 = test_organisation.teams.build( :title => 'Two Timers', :slug => 'two-timers' )
team_3 = test_organisation.teams.build( :title => 'Three Ways', :slug => 'three-ways' )
team_4 = test_organisation.teams.build( :title => 'Four Strokes', :slug => 'four-strokes' )
team_5 = test_organisation.teams.build( :title => 'Five Star', :slug => 'five-star' )
team_6 = test_organisation.teams.build( :title => 'Six Toes', :slug => 'six-toes' )
team_a.save
team_b.save
team_c.save
team_d.save
team_e.save
team_f.save
team_g.save
team_h.save
team_i.save
team_j.save
team_k.save
team_l.save
team_m.save
team_n.save
team_o.save
team_p.save
team_q.save
team_r.save
team_s.save
team_t.save
team_u.save
team_v.save
team_w.save
team_x.save
team_y.save
team_z.save
team_1.save
team_2.save
team_3.save
team_4.save
team_5.save
team_6.save
puts "Creating competitions"
division_one = season.competitions.build(
:title => 'Division One',
:slug => 'division-one',
:summary => 'The creme-de-la-creme.',
:position => 1
)
division_two = season.competitions.build(
:title => 'Division Two',
:slug => 'division-two',
:summary => 'The try hards.',
:position => 2
)
league_cup = season.competitions.build(
:title => 'League Cup',
:slug => 'league-cup',
:summary => 'Open to everyone.',
:position => 3
)
super_cup = season.competitions.build(
:title => 'Super Cup',
:slug => 'super-cup',
:summary => 'Only the best can compete.',
:position => 4
)
division_one.save
division_two.save
league_cup.save
super_cup.save
puts "Creating stages"
division_one_stage = division_one.stages.build(
:title => 'League table',
:slug => 'league-table',
:position => 1,
:automatic_promotion_places => 0,
:conditional_promotion_places => 0,
:automatic_relegation_places => 2,
:conditional_relegation_places => 0,
:is_knockout => false
)
division_two_stage = division_two.stages.build(
:title => 'League table',
:slug => 'league-table',
:position => 1,
:automatic_promotion_places => 0,
:conditional_promotion_places => 0,
:automatic_relegation_places => 2,
:conditional_relegation_places => 0,
:is_knockout => false
)
# 32 teams
league_cup_round_one = league_cup.stages.build(
:title => 'Round One',
:slug => 'round-one',
:position => 1,
:automatic_promotion_places => 1,
:conditional_promotion_places => 0,
:automatic_relegation_places => 0,
:conditional_relegation_places => 0,
:is_knockout => true
)
# 16 teams
league_cup_round_two = league_cup.stages.build(
:title => 'Round Two',
:slug => 'round-two',
:position => 2,
:automatic_promotion_places => 1,
:conditional_promotion_places => 0,
:automatic_relegation_places => 0,
:conditional_relegation_places => 0,
:is_knockout => true
)
# 8 teams
league_cup_round_three = league_cup.stages.build(
:title => 'Quarter Finals',
:slug => 'quarter-finals',
:position => 3,
:automatic_promotion_places => 1,
:conditional_promotion_places => 0,
:automatic_relegation_places => 0,
:conditional_relegation_places => 0,
:is_knockout => true
)
# 4 teams
league_cup_round_four = league_cup.stages.build(
:title => 'Semi Finals',
:slug => 'semi-finals',
:position => 4,
:automatic_promotion_places => 1,
:conditional_promotion_places => 0,
:automatic_relegation_places => 0,
:conditional_relegation_places => 0,
:is_knockout => true
)
# 2 teams
league_cup_round_five = league_cup.stages.build(
:title => 'Final',
:slug => 'final',
:position => 5,
:automatic_promotion_places => 1,
:conditional_promotion_places => 0,
:automatic_relegation_places => 0,
:conditional_relegation_places => 0,
:is_knockout => true
)
# Group stage (16 teams in 4 groups of 4)
super_cup_group_stage = super_cup.stages.build(
:title => 'Group stages',
:slug => 'group-stages',
:position => 1,
:automatic_promotion_places => 2,
:conditional_promotion_places => 0,
:automatic_relegation_places => 0,
:conditional_relegation_places => 0,
:is_knockout => false
)
# 8 teams
super_cup_quarterfinals = super_cup.stages.build(
:title => 'Quarter Finals',
:slug => 'quarter-finals',
:position => 2,
:automatic_promotion_places => 1,
:conditional_promotion_places => 0,
:automatic_relegation_places => 0,
:conditional_relegation_places => 0,
:is_knockout => true
)
# 4 teams
super_cup_semi_finals = super_cup.stages.build(
:title => 'Semi Finals',
:slug => 'semi-finals',
:position => 3,
:automatic_promotion_places => 1,
:conditional_promotion_places => 0,
:automatic_relegation_places => 0,
:conditional_relegation_places => 0,
:is_knockout => true
)
# 2 teams
super_cup_final = super_cup.stages.build(
:title => 'Final',
:slug => 'final',
:position => 4,
:automatic_promotion_places => 1,
:conditional_promotion_places => 0,
:automatic_relegation_places => 0,
:conditional_relegation_places => 0,
:is_knockout => true
)
division_one_stage.save
division_two_stage.save
league_cup_round_one.save
league_cup_round_two.save
league_cup_round_three.save
league_cup_round_four.save
league_cup_round_five.save
super_cup_group_stage.save
super_cup_quarterfinals.save
super_cup_semi_finals.save
super_cup_final.save
puts "Creating league groups"
division_one_group = division_one_stage.groups.build( :title => 'Table', :slug => 'table' )
division_two_group = division_two_stage.groups.build( :title => 'Table', :slug => 'table' )
puts "Putting teams in to league groups"
division_one_group.teams << team_a
division_one_group.teams << team_b
division_one_group.teams << team_c
division_one_group.teams << team_d
division_one_group.teams << team_e
division_one_group.teams << team_f
division_one_group.teams << team_g
division_one_group.teams << team_h
division_one_group.teams << team_i
division_one_group.teams << team_j
division_one_group.teams << team_k
division_one_group.teams << team_l
division_one_group.teams << team_m
division_one_group.teams << team_n
division_one_group.teams << team_o
division_one_group.teams << team_p
division_two_group.teams << team_q
division_two_group.teams << team_r
division_two_group.teams << team_s
division_two_group.teams << team_t
division_two_group.teams << team_u
division_two_group.teams << team_v
division_two_group.teams << team_w
division_two_group.teams << team_x
division_two_group.teams << team_y
division_two_group.teams << team_z
division_two_group.teams << team_1
division_two_group.teams << team_2
division_two_group.teams << team_3
division_two_group.teams << team_4
division_two_group.teams << team_5
division_two_group.teams << team_6
division_one_group.save
division_two_group.save
puts "Creating league games"
division_one_group.games.build( :kickoff => '2012-07-01 15:00:00', :played => true, :hometeam_id => team_a.id, :home_score => 2, :home_points => 3, :awayteam_id => team_b.id, :away_score => 1, :away_points => 0 ).save
division_one_group.games.build( :kickoff => '2012-07-01 15:00:00', :played => true, :hometeam_id => team_c.id, :home_score => 4, :home_points => 3, :awayteam_id => team_d.id, :away_score => 0, :away_points => 0).save
division_one_group.games.build( :kickoff => '2012-07-01 15:00:00', :played => true, :hometeam_id => team_e.id, :home_score => 0, :home_points => 0, :awayteam_id => team_f.id, :away_score => 4, :away_points => 3).save
division_one_group.games.build( :kickoff => '2012-07-01 15:00:00', :played => true, :hometeam_id => team_g.id, :home_score => 1, :home_points => 0, :awayteam_id => team_h.id, :away_score => 3, :away_points => 3).save
division_one_group.games.build( :kickoff => '2012-07-01 15:00:00', :played => true, :hometeam_id => team_i.id, :home_score => 0, :home_points => 1, :awayteam_id => team_j.id, :away_score => 0, :away_points => 1).save
division_one_group.games.build( :kickoff => '2012-07-01 15:00:00', :played => true, :hometeam_id => team_k.id, :home_score => 0, :home_points => 0, :awayteam_id => team_l.id, :away_score => 1, :away_points => 3).save
division_one_group.games.build( :kickoff => '2012-07-01 15:00:00', :played => true, :hometeam_id => team_m.id, :home_score => 1, :home_points => 0, :awayteam_id => team_n.id, :away_score => 2, :away_points => 3).save
division_one_group.games.build( :kickoff => '2012-07-01 15:00:00', :played => true, :hometeam_id => team_o.id, :home_score => 3, :home_points => 3, :awayteam_id => team_p.id, :away_score => 1, :away_points => 0).save
division_one_group.games.build( :kickoff => '2012-07-08 15:00:00', :played => true, :hometeam_id => team_b.id, :home_score => 0, :home_points => 0, :awayteam_id => team_c.id, :away_score => 1, :away_points => 3).save
division_one_group.games.build( :kickoff => '2012-07-08 15:00:00', :played => true, :hometeam_id => team_d.id, :home_score => 2, :home_points => 1, :awayteam_id => team_e.id, :away_score => 2, :away_points => 1).save
division_one_group.games.build( :kickoff => '2012-07-08 15:00:00', :played => true, :hometeam_id => team_f.id, :home_score => 1, :home_points => 0, :awayteam_id => team_g.id, :away_score => 3, :away_points => 3).save
division_one_group.games.build( :kickoff => '2012-07-08 15:00:00', :played => true, :hometeam_id => team_h.id, :home_score => 4, :home_points => 3, :awayteam_id => team_i.id, :away_score => 0, :away_points => 0).save
division_one_group.games.build( :kickoff => '2012-07-08 15:00:00', :played => true, :hometeam_id => team_j.id, :home_score => 5, :home_points => 3, :awayteam_id => team_k.id, :away_score => 2, :away_points => 0).save
division_one_group.games.build( :kickoff => '2012-07-08 15:00:00', :played => true, :hometeam_id => team_l.id, :home_score => 0, :home_points => 0, :awayteam_id => team_m.id, :away_score => 1, :away_points => 3).save
division_one_group.games.build( :kickoff => '2012-07-08 15:00:00', :played => true, :hometeam_id => team_n.id, :home_score => 3, :home_points => 3, :awayteam_id => team_o.id, :away_score => 0, :away_points => 0).save
division_one_group.games.build( :kickoff => '2012-07-08 15:00:00', :played => true, :hometeam_id => team_p.id, :home_score => 2, :home_points => 3, :awayteam_id => team_a.id, :away_score => 1, :away_points => 0).save
division_one_group.games.build( :kickoff => '2012-07-15 15:00:00', :hometeam_id => team_a.id, :awayteam_id => team_i.id).save
division_one_group.games.build( :kickoff => '2012-07-15 15:00:00', :hometeam_id => team_b.id, :awayteam_id => team_j.id).save
division_one_group.games.build( :kickoff => '2012-07-15 15:00:00', :hometeam_id => team_c.id, :awayteam_id => team_k.id).save
division_one_group.games.build( :kickoff => '2012-07-15 15:00:00', :hometeam_id => team_d.id, :awayteam_id => team_l.id).save
division_one_group.games.build( :kickoff => '2012-07-16 15:00:00', :hometeam_id => team_e.id, :awayteam_id => team_m.id).save
division_one_group.games.build( :kickoff => '2012-07-16 15:00:00', :hometeam_id => team_f.id, :awayteam_id => team_n.id).save
division_one_group.games.build( :kickoff => '2012-07-16 15:00:00', :hometeam_id => team_g.id, :awayteam_id => team_o.id).save
division_one_group.games.build( :kickoff => '2012-07-16 15:00:00', :hometeam_id => team_h.id, :awayteam_id => team_p.id).save
division_one_group.games.build( :kickoff => '2012-07-20 15:00:00', :hometeam_id => team_i.id, :awayteam_id => team_g.id).save
division_one_group.games.build( :kickoff => '2012-07-20 15:00:00', :hometeam_id => team_j.id, :awayteam_id => team_p.id).save
division_one_group.games.build( :kickoff => '2012-07-20 15:00:00', :hometeam_id => team_l.id, :awayteam_id => team_f.id).save
division_one_group.games.build( :kickoff => '2012-07-20 15:00:00', :hometeam_id => team_k.id, :awayteam_id => team_b.id).save
division_one_group.games.build( :kickoff => '2012-07-20 15:00:00', :hometeam_id => team_c.id, :awayteam_id => team_a.id).save
division_one_group.games.build( :kickoff => '2012-07-20 15:00:00', :hometeam_id => team_d.id, :awayteam_id => team_n.id).save
division_one_group.games.build( :kickoff => '2012-07-20 15:00:00', :hometeam_id => team_m.id, :awayteam_id => team_o.id).save
division_one_group.games.build( :kickoff => '2012-07-20 15:00:00', :hometeam_id => team_h.id, :awayteam_id => team_e.id).save
puts "Creating league cup groups"
league_cup_round_one_match01 = league_cup_round_one.groups.build( :title => 'Match 01', :slug => 'match01' )
league_cup_round_one_match02 = league_cup_round_one.groups.build( :title => 'Match 02', :slug => 'match02' )
league_cup_round_one_match03 = league_cup_round_one.groups.build( :title => 'Match 03', :slug => 'match03' )
league_cup_round_one_match04 = league_cup_round_one.groups.build( :title => 'Match 04', :slug => 'match04' )
league_cup_round_one_match05 = league_cup_round_one.groups.build( :title => 'Match 05', :slug => 'match05' )
league_cup_round_one_match06 = league_cup_round_one.groups.build( :title => 'Match 06', :slug => 'match06' )
league_cup_round_one_match07 = league_cup_round_one.groups.build( :title => 'Match 07', :slug => 'match07' )
league_cup_round_one_match08 = league_cup_round_one.groups.build( :title => 'Match 08', :slug => 'match08' )
league_cup_round_one_match09 = league_cup_round_one.groups.build( :title => 'Match 09', :slug => 'match09' )
league_cup_round_one_match10 = league_cup_round_one.groups.build( :title => 'Match 10', :slug => 'match10' )
league_cup_round_one_match11 = league_cup_round_one.groups.build( :title => 'Match 11', :slug => 'match11' )
league_cup_round_one_match12 = league_cup_round_one.groups.build( :title => 'Match 12', :slug => 'match12' )
league_cup_round_one_match13 = league_cup_round_one.groups.build( :title => 'Match 13', :slug => 'match13' )
league_cup_round_one_match14 = league_cup_round_one.groups.build( :title => 'Match 14', :slug => 'match14' )
league_cup_round_one_match15 = league_cup_round_one.groups.build( :title => 'Match 15', :slug => 'match15' )
league_cup_round_one_match16 = league_cup_round_one.groups.build( :title => 'Match 16', :slug => 'match16' )
puts "Adding teams to league cup"
league_cup_round_one_match01.teams << team_a
league_cup_round_one_match01.teams << team_b
league_cup_round_one_match02.teams << team_c
league_cup_round_one_match02.teams << team_d
league_cup_round_one_match03.teams << team_e
league_cup_round_one_match03.teams << team_f
league_cup_round_one_match04.teams << team_g
league_cup_round_one_match04.teams << team_h
league_cup_round_one_match05.teams << team_i
league_cup_round_one_match06.teams << team_j
league_cup_round_one_match06.teams << team_k
league_cup_round_one_match06.teams << team_l
league_cup_round_one_match07.teams << team_m
league_cup_round_one_match07.teams << team_n
league_cup_round_one_match08.teams << team_o
league_cup_round_one_match08.teams << team_p
league_cup_round_one_match09.teams << team_q
league_cup_round_one_match09.teams << team_r
league_cup_round_one_match10.teams << team_s
league_cup_round_one_match10.teams << team_t
league_cup_round_one_match11.teams << team_u
league_cup_round_one_match11.teams << team_v
league_cup_round_one_match12.teams << team_w
league_cup_round_one_match12.teams << team_x
league_cup_round_one_match13.teams << team_y
league_cup_round_one_match13.teams << team_z
league_cup_round_one_match14.teams << team_1
league_cup_round_one_match14.teams << team_2
league_cup_round_one_match15.teams << team_3
league_cup_round_one_match15.teams << team_4
league_cup_round_one_match16.teams << team_5
league_cup_round_one_match16.teams << team_6
league_cup_round_one_match01.save
league_cup_round_one_match02.save
league_cup_round_one_match03.save
league_cup_round_one_match04.save
league_cup_round_one_match05.save
league_cup_round_one_match06.save
league_cup_round_one_match07.save
league_cup_round_one_match08.save
league_cup_round_one_match09.save
league_cup_round_one_match10.save
league_cup_round_one_match11.save
league_cup_round_one_match12.save
league_cup_round_one_match13.save
league_cup_round_one_match14.save
league_cup_round_one_match15.save
league_cup_round_one_match16.save
puts "Creating league cup games"
league_cup_round_one_match01.games.build( :kickoff => '2012-08-01 15:00:00', :hometeam_id => team_a.id, :awayteam_id => team_b.id).save
league_cup_round_one_match02.games.build( :kickoff => '2012-08-01 15:00:00', :hometeam_id => team_c.id, :awayteam_id => team_d.id).save
league_cup_round_one_match03.games.build( :kickoff => '2012-08-01 15:00:00', :hometeam_id => team_e.id, :awayteam_id => team_f.id).save
league_cup_round_one_match04.games.build( :kickoff => '2012-08-01 15:00:00', :hometeam_id => team_g.id, :awayteam_id => team_h.id).save
league_cup_round_one_match05.games.build( :kickoff => '2012-08-01 15:00:00', :hometeam_id => team_i.id, :awayteam_id => team_j.id).save
league_cup_round_one_match06.games.build( :kickoff => '2012-08-01 15:00:00', :hometeam_id => team_k.id, :awayteam_id => team_l.id).save
league_cup_round_one_match07.games.build( :kickoff => '2012-08-01 15:00:00', :hometeam_id => team_m.id, :awayteam_id => team_n.id).save
league_cup_round_one_match08.games.build( :kickoff => '2012-08-01 15:00:00', :hometeam_id => team_o.id, :awayteam_id => team_p.id).save
league_cup_round_one_match09.games.build( :kickoff => '2012-08-02 15:00:00', :hometeam_id => team_q.id, :awayteam_id => team_r.id).save
league_cup_round_one_match10.games.build( :kickoff => '2012-08-02 15:00:00', :hometeam_id => team_s.id, :awayteam_id => team_t.id).save
league_cup_round_one_match11.games.build( :kickoff => '2012-08-02 15:00:00', :hometeam_id => team_u.id, :awayteam_id => team_v.id).save
league_cup_round_one_match12.games.build( :kickoff => '2012-08-02 15:00:00', :hometeam_id => team_w.id, :awayteam_id => team_x.id).save
league_cup_round_one_match13.games.build( :kickoff => '2012-08-02 15:00:00', :hometeam_id => team_y.id, :awayteam_id => team_z.id).save
league_cup_round_one_match14.games.build( :kickoff => '2012-08-02 15:00:00', :hometeam_id => team_1.id, :awayteam_id => team_2.id).save
league_cup_round_one_match15.games.build( :kickoff => '2012-08-02 15:00:00', :hometeam_id => team_3.id, :awayteam_id => team_4.id).save
league_cup_round_one_match16.games.build( :kickoff => '2012-08-02 15:00:00', :hometeam_id => team_5.id, :awayteam_id => team_6.id).save
puts "Creating super cup groups"
super_cup_group_a = super_cup_group_stage.groups.build( :title => 'Group A', :slug => 'group-a' )
super_cup_group_b = super_cup_group_stage.groups.build( :title => 'Group B', :slug => 'group-b' )
super_cup_group_c = super_cup_group_stage.groups.build( :title => 'Group C', :slug => 'group-c' )
super_cup_group_d = super_cup_group_stage.groups.build( :title => 'Group D', :slug => 'group-d' )
super_cup_quarter_final_match_1 = super_cup_quarterfinals.groups.build( :title => 'Match 1', :slug => 'match-1' )
super_cup_quarter_final_match_2 = super_cup_quarterfinals.groups.build( :title => 'Match 2', :slug => 'match-2' )
super_cup_quarter_final_match_3 = super_cup_quarterfinals.groups.build( :title => 'Match 3', :slug => 'match-3' )
super_cup_quarter_final_match_4 = super_cup_quarterfinals.groups.build( :title => 'Match 4', :slug => 'match-4' )
super_cup_semi_final_match_1 = super_cup_semi_finals.groups.build( :title => 'Semi final 1', :slug => 'match-1' )
super_cup_semi_final_match_2 = super_cup_semi_finals.groups.build( :title => 'Semi final 2', :slug => 'match-2' )
super_cup_final_match = super_cup_final.groups.build( :title => 'The final', :slug => 'the-final' )
puts "Putting teams in to super cup groups"
super_cup_group_a.teams << team_a
super_cup_group_a.teams << team_b
super_cup_group_a.teams << team_c
super_cup_group_a.teams << team_d
super_cup_group_b.teams << team_e
super_cup_group_b.teams << team_f
super_cup_group_b.teams << team_g
super_cup_group_b.teams << team_h
super_cup_group_c.teams << team_i
super_cup_group_c.teams << team_j
super_cup_group_c.teams << team_k
super_cup_group_c.teams << team_l
super_cup_group_d.teams << team_m
super_cup_group_d.teams << team_n
super_cup_group_d.teams << team_o
super_cup_group_d.teams << team_p
super_cup_group_a.save
super_cup_group_b.save
super_cup_group_c.save
super_cup_group_d.save
super_cup_quarter_final_match_1.save
super_cup_quarter_final_match_2.save
super_cup_quarter_final_match_3.save
super_cup_quarter_final_match_4.save
super_cup_semi_final_match_1.save
super_cup_semi_final_match_2.save
super_cup_final_match.save
puts "Creating super cup games"
super_cup_group_a.games.build( :kickoff => '2013-01-10 15:00:00', :hometeam_id => team_a.id, :awayteam_id => team_b.id).save
super_cup_group_a.games.build( :kickoff => '2013-01-10 15:00:00', :hometeam_id => team_c.id, :awayteam_id => team_d.id).save
super_cup_group_a.games.build( :kickoff => '2013-01-17 15:00:00', :hometeam_id => team_b.id, :awayteam_id => team_c.id).save
super_cup_group_a.games.build( :kickoff => '2013-01-17 15:00:00', :hometeam_id => team_d.id, :awayteam_id => team_a.id).save
super_cup_group_a.games.build( :kickoff => '2013-01-24 15:00:00', :hometeam_id => team_a.id, :awayteam_id => team_c.id).save
super_cup_group_a.games.build( :kickoff => '2013-01-24 15:30:00', :hometeam_id => team_b.id, :awayteam_id => team_d.id).save
super_cup_group_a.games.build( :kickoff => '2013-02-10 15:00:00', :hometeam_id => team_b.id, :awayteam_id => team_a.id).save
super_cup_group_a.games.build( :kickoff => '2013-02-10 15:00:00', :hometeam_id => team_d.id, :awayteam_id => team_c.id).save
super_cup_group_a.games.build( :kickoff => '2013-02-17 15:30:00', :hometeam_id => team_c.id, :awayteam_id => team_b.id).save
super_cup_group_a.games.build( :kickoff => '2013-02-17 15:00:00', :hometeam_id => team_a.id, :awayteam_id => team_d.id).save
super_cup_group_a.games.build( :kickoff => '2013-02-24 15:00:00', :hometeam_id => team_c.id, :awayteam_id => team_a.id).save
super_cup_group_a.games.build( :kickoff => '2013-02-24 15:30:00', :hometeam_id => team_d.id, :awayteam_id => team_b.id).save
super_cup_group_b.games.build( :kickoff => '2013-01-10 15:00:00', :hometeam_id => team_e.id, :awayteam_id => team_f.id).save
super_cup_group_b.games.build( :kickoff => '2013-01-10 15:00:00', :hometeam_id => team_g.id, :awayteam_id => team_h.id).save
super_cup_group_b.games.build( :kickoff => '2013-01-17 15:00:00', :hometeam_id => team_f.id, :awayteam_id => team_g.id).save
super_cup_group_b.games.build( :kickoff => '2013-01-17 15:00:00', :hometeam_id => team_h.id, :awayteam_id => team_e.id).save
super_cup_group_b.games.build( :kickoff => '2013-01-24 15:00:00', :hometeam_id => team_e.id, :awayteam_id => team_g.id).save
super_cup_group_b.games.build( :kickoff => '2013-01-24 15:30:00', :hometeam_id => team_f.id, :awayteam_id => team_h.id).save
super_cup_group_b.games.build( :kickoff => '2013-02-10 15:00:00', :hometeam_id => team_f.id, :awayteam_id => team_e.id).save
super_cup_group_b.games.build( :kickoff => '2013-02-10 15:00:00', :hometeam_id => team_h.id, :awayteam_id => team_g.id).save
super_cup_group_b.games.build( :kickoff => '2013-02-17 15:30:00', :hometeam_id => team_g.id, :awayteam_id => team_f.id).save
super_cup_group_b.games.build( :kickoff => '2013-02-17 15:00:00', :hometeam_id => team_e.id, :awayteam_id => team_h.id).save
super_cup_group_b.games.build( :kickoff => '2013-02-24 15:00:00', :hometeam_id => team_g.id, :awayteam_id => team_e.id).save
super_cup_group_b.games.build( :kickoff => '2013-02-24 15:30:00', :hometeam_id => team_h.id, :awayteam_id => team_f.id).save
super_cup_group_c.games.build( :kickoff => '2013-01-10 15:00:00', :hometeam_id => team_i.id, :awayteam_id => team_j.id).save
super_cup_group_c.games.build( :kickoff => '2013-01-10 15:00:00', :hometeam_id => team_k.id, :awayteam_id => team_l.id).save
super_cup_group_c.games.build( :kickoff => '2013-01-17 15:00:00', :hometeam_id => team_j.id, :awayteam_id => team_k.id).save
super_cup_group_c.games.build( :kickoff => '2013-01-17 15:00:00', :hometeam_id => team_l.id, :awayteam_id => team_i.id).save
super_cup_group_c.games.build( :kickoff => '2013-01-24 15:00:00', :hometeam_id => team_i.id, :awayteam_id => team_k.id).save
super_cup_group_c.games.build( :kickoff => '2013-01-24 15:30:00', :hometeam_id => team_j.id, :awayteam_id => team_l.id).save
super_cup_group_c.games.build( :kickoff => '2013-02-10 15:00:00', :hometeam_id => team_j.id, :awayteam_id => team_i.id).save
super_cup_group_c.games.build( :kickoff => '2013-02-10 15:00:00', :hometeam_id => team_l.id, :awayteam_id => team_k.id).save
super_cup_group_c.games.build( :kickoff => '2013-02-17 15:30:00', :hometeam_id => team_k.id, :awayteam_id => team_j.id).save
super_cup_group_c.games.build( :kickoff => '2013-02-17 15:00:00', :hometeam_id => team_i.id, :awayteam_id => team_l.id).save
super_cup_group_c.games.build( :kickoff => '2013-02-24 15:00:00', :hometeam_id => team_k.id, :awayteam_id => team_i.id).save
super_cup_group_c.games.build( :kickoff => '2013-02-24 15:30:00', :hometeam_id => team_l.id, :awayteam_id => team_j.id).save
super_cup_group_d.games.build( :kickoff => '2013-01-10 15:00:00', :hometeam_id => team_m.id, :awayteam_id => team_n.id).save
super_cup_group_d.games.build( :kickoff => '2013-01-10 15:00:00', :hometeam_id => team_o.id, :awayteam_id => team_p.id).save
super_cup_group_d.games.build( :kickoff => '2013-01-17 15:00:00', :hometeam_id => team_n.id, :awayteam_id => team_o.id).save
super_cup_group_d.games.build( :kickoff => '2013-01-17 15:00:00', :hometeam_id => team_p.id, :awayteam_id => team_m.id).save
super_cup_group_d.games.build( :kickoff => '2013-01-24 15:00:00', :hometeam_id => team_m.id, :awayteam_id => team_o.id).save
super_cup_group_d.games.build( :kickoff => '2013-01-24 15:30:00', :hometeam_id => team_n.id, :awayteam_id => team_p.id).save
super_cup_group_d.games.build( :kickoff => '2013-02-10 15:00:00', :hometeam_id => team_n.id, :awayteam_id => team_m.id).save
super_cup_group_d.games.build( :kickoff => '2013-02-10 15:00:00', :hometeam_id => team_p.id, :awayteam_id => team_o.id).save
super_cup_group_d.games.build( :kickoff => '2013-02-17 15:30:00', :hometeam_id => team_o.id, :awayteam_id => team_n.id).save
super_cup_group_d.games.build( :kickoff => '2013-02-17 15:00:00', :hometeam_id => team_m.id, :awayteam_id => team_p.id).save
super_cup_group_d.games.build( :kickoff => '2013-02-24 15:00:00', :hometeam_id => team_o.id, :awayteam_id => team_m.id).save
super_cup_group_d.games.build( :kickoff => '2013-02-24 15:30:00', :hometeam_id => team_p.id, :awayteam_id => team_n.id).save
super_cup_quarter_final_match_1.games.build( :kickoff => '2013-03-02 15:00:00', :hometeam_id => nil, :awayteam_id => nil).save
super_cup_quarter_final_match_2.games.build( :kickoff => '2013-03-02 15:00:00', :hometeam_id => nil, :awayteam_id => nil).save
super_cup_quarter_final_match_3.games.build( :kickoff => '2013-03-02 15:00:00', :hometeam_id => nil, :awayteam_id => nil).save
super_cup_quarter_final_match_4.games.build( :kickoff => '2013-03-02 15:00:00', :hometeam_id => nil, :awayteam_id => nil).save
super_cup_semi_final_match_1.games.build( :kickoff => '2013-03-09 15:00:00', :hometeam_id => nil, :awayteam_id => nil).save
super_cup_semi_final_match_2.games.build( :kickoff => '2013-03-09 15:00:00', :hometeam_id => nil, :awayteam_id => nil).save
super_cup_final_match.games.build( :kickoff => '2013-03-20 16:00:00', :hometeam_id => nil, :awayteam_id => nil).save
| 46.109726 | 295 | 0.729989 |
e230338d188d57c13aa919c575477d3a88c2fa01 | 764 | class SessionsController < ApplicationController
def new
end
def create
user = User.find_by(email: params[:session][:email].downcase)
if user && user.authenticate(params[:session][:password])
if user.activated?
log_in user
params[:session][:remember_me] == '1' ? remember(user) : forget(user)
redirect_back_or user
else
message = "Account not activated. "
message += "Check your email for the activation link."
flash[:warning] = message
redirect_to root_url
end
else
flash.now[:danger] = 'Invalid email/password combination'
render 'new'
end
end
def destroy
log_out if logged_in?
redirect_to root_url
end
end
| 25.466667 | 78 | 0.617801 |
614c8ef637b758c92c72397f005dc911d0387608 | 4,592 | class OpensslAT11 < Formula
desc "Cryptography and SSL/TLS Toolkit"
homepage "https://openssl.org/"
url "https://www.openssl.org/source/openssl-1.1.1a.tar.gz"
mirror "https://dl.bintray.com/homebrew/mirror/[email protected]"
mirror "https://www.mirrorservice.org/sites/ftp.openssl.org/source/openssl-1.1.1a.tar.gz"
sha256 "fc20130f8b7cbd2fb918b2f14e2f429e109c31ddd0fb38fc5d71d9ffed3f9f41"
version_scheme 1
bottle do
sha256 "802fef5cc7072a3dc6c1f15898d9f028d7bd32c659e93a354545e719f54a2456" => :mojave
sha256 "29ff522cbd3509b4f881643d2dfdec78396d41f8858d7d308d59f2e3b1839aa1" => :high_sierra
sha256 "af76397790ce9cb571a8e0bd1014a90a47ed1c05b5ee6727a2f6796ebdb95365" => :sierra
sha256 "c153b05e49ef98fa890dfead369799e7451d30b4361eb91d655c6adff86b2412" => :x86_64_linux
end
keg_only :provided_by_macos,
"openssl/libressl is provided by macOS so don't link an incompatible version"
unless OS.mac?
resource "cacert" do
# homepage "http://curl.haxx.se/docs/caextract.html"
url "https://curl.haxx.se/ca/cacert-2017-01-18.pem"
mirror "http://cdn.rawgit.com/sjackman/e4066d2cb6b45fbb6d213e676cb109d0/raw/58964378cb5eefe96cba245ef863c57fb2b480e0/cacert-2017-01-18.pem"
sha256 "e62a07e61e5870effa81b430e1900778943c228bd7da1259dd6a955ee2262b47"
end
end
# SSLv2 died with 1.1.0, so no-ssl2 no longer required.
# SSLv3 & zlib are off by default with 1.1.0 but this may not
# be obvious to everyone, so explicitly state it for now to
# help debug inevitable breakage.
def configure_args; %W[
--prefix=#{prefix}
--openssldir=#{openssldir}
no-ssl3
no-ssl3-method
no-zlib
#{[ENV.cppflags, ENV.cflags, ENV.ldflags].join(" ").strip unless OS.mac?}
]
end
def install
# This could interfere with how we expect OpenSSL to build.
ENV.delete("OPENSSL_LOCAL_CONFIG_DIR")
# This ensures where Homebrew's Perl is needed the Cellar path isn't
# hardcoded into OpenSSL's scripts, causing them to break every Perl update.
# Whilst our env points to opt_bin, by default OpenSSL resolves the symlink.
if which("perl") == Formula["perl"].opt_bin/"perl"
ENV["PERL"] = Formula["perl"].opt_bin/"perl"
end
unless OS.mac?
arch_args = %w[linux-x86_64]
end
if OS.mac?
arch_args = %w[darwin64-x86_64-cc enable-ec_nistp_64_gcc_128]
end
ENV.deparallelize
system "perl", "./Configure", *(configure_args + arch_args)
system "make"
system "make", "test" if OS.mac?
system "make", "install", "MANDIR=#{man}", "MANSUFFIX=ssl"
# See https://github.com/Linuxbrew/homebrew-core/pull/8891
system "make", "test" if build.with?("test") && !OS.mac?
end
def openssldir
etc/"[email protected]"
end
def post_install
unless OS.mac?
# Download and install cacert.pem from curl.haxx.se
cacert = resource("cacert")
rm_f openssldir/"cert.pem"
filename = Pathname.new(cacert.url).basename
openssldir.install cacert.files(filename => "cert.pem")
return
end
keychains = %w[
/System/Library/Keychains/SystemRootCertificates.keychain
]
certs_list = `security find-certificate -a -p #{keychains.join(" ")}`
certs = certs_list.scan(
/-----BEGIN CERTIFICATE-----.*?-----END CERTIFICATE-----/m,
)
valid_certs = certs.select do |cert|
IO.popen("#{bin}/openssl x509 -inform pem -checkend 0 -noout >/dev/null", "w") do |openssl_io|
openssl_io.write(cert)
openssl_io.close_write
end
$CHILD_STATUS.success?
end
openssldir.mkpath
(openssldir/"cert.pem").atomic_write(valid_certs.join("\n") << "\n")
end
def caveats; <<~EOS
A CA file has been bootstrapped using certificates from the system
keychain. To add additional certificates, place .pem files in
#{openssldir}/certs
and run
#{opt_bin}/c_rehash
EOS
end
test do
# Make sure the necessary .cnf file exists, otherwise OpenSSL gets moody.
assert_predicate HOMEBREW_PREFIX/"etc/[email protected]/openssl.cnf", :exist?,
"OpenSSL requires the .cnf file for some functionality"
# Check OpenSSL itself functions as expected.
(testpath/"testfile.txt").write("This is a test file")
expected_checksum = "e2d0fe1585a63ec6009c8016ff8dda8b17719a637405a4e23c0ff81339148249"
system bin/"openssl", "dgst", "-sha256", "-out", "checksum.txt", "testfile.txt"
open("checksum.txt") do |f|
checksum = f.read(100).split("=").last.strip
assert_equal checksum, expected_checksum
end
end
end
| 35.053435 | 145 | 0.697517 |
e9b8e17cda786719aec83e1c604163e9b9a8c23f | 2,640 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::EventGrid::V2018_01_01
module Models
#
# Schema of the Data property of an EventGridEvent for a
# Microsoft.Media.JobStateChange event.
#
class MediaJobStateChangeEventData
include MsRestAzure
# @return [MediaJobState] The previous state of the Job. Possible values
# include: 'Canceled', 'Canceling', 'Error', 'Finished', 'Processing',
# 'Queued', 'Scheduled'
attr_accessor :previous_state
# @return [MediaJobState] The new state of the Job. Possible values
# include: 'Canceled', 'Canceling', 'Error', 'Finished', 'Processing',
# 'Queued', 'Scheduled'
attr_accessor :state
# @return [Hash{String => String}] Gets the Job correlation data.
attr_accessor :correlation_data
#
# Mapper for MediaJobStateChangeEventData class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'MediaJobStateChangeEventData',
type: {
name: 'Composite',
class_name: 'MediaJobStateChangeEventData',
model_properties: {
previous_state: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'previousState',
type: {
name: 'Enum',
module: 'MediaJobState'
}
},
state: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'state',
type: {
name: 'Enum',
module: 'MediaJobState'
}
},
correlation_data: {
client_side_validation: true,
required: false,
serialized_name: 'correlationData',
type: {
name: 'Dictionary',
value: {
client_side_validation: true,
required: false,
serialized_name: 'StringElementType',
type: {
name: 'String'
}
}
}
}
}
}
}
end
end
end
end
| 30.697674 | 78 | 0.512121 |
bb6f1fea71f20e6ecea0ffcda0bedc7e80919f3c | 2,078 | class SiteController < ApplicationController
before_filter :authenticate_user!
def index
today = Time.zone.now
@dates = {
"Today" => today.strftime("%Y-%m-%d"),
"Tomorrow" => (today + (3600 * 24)).strftime("%Y-%m-%d"),
"Day After Tomorrow" => (today + (3600 * 24 * 2)).strftime("%Y-%m-%d")
}
@nonprofits = Nonprofit.is_public.featured_from(Time.zone.now.to_date + 1.day).limit(16)
if Nonprofit.is_public.for_today.present?
@todays_nonprofit = Nonprofit.is_public.for_today
else
@todays_nonprofit = Nonprofit.is_public.for_next_possible_day.first || Nonprofit.new(name: "No Nonprofit for Today", blurb: "n/a", description: "n/a", newsletter: Newsletter.new)
end
@subscriber = Subscriber.new
end
def donate
end
def wall_calendar
end
def legal
end
def faq
end
def contact
end
def send_feedback
params[:email] = params[:email].to_s
params[:message] = params[:message].to_s
if params[:email] !~ Devise.email_regexp
flash[:alert] = "Please enter a valid email address."
render :contact
elsif params[:message].blank?
flash[:alert] = "Please enter a message to send."
render :contact
else
SendFeedbackJob.new(params[:email], params[:message]).save
flash[:notice] = "Thanks! We'll look at your message in a bit."
redirect_to root_url
end
end
def calendar
@page_title = "Calendar"
# TODO better solution than a rescue fallback here
@date = Date.parse(params[:date]) rescue Date.today
@future_nonprofits = Nonprofit.is_public.featured_from(@date).limit(31)
@past_nonprofits = Nonprofit.is_public.featured_reverse_from(@date).limit(1)
@subscriber = Subscriber.new
end
def share
@full_url = params[:url]
if @full_url
@full_url += "&"
else
@full_url = [root_url, '?'].join('') if !@full_url
end
params.each do |k,v|
@full_url += "#{k}=#{v}&" if ["redirect_uri", "href", "app_id", "display"].include?(k)
end
render :layout => false
end
end
| 26.303797 | 184 | 0.646776 |
2193a698272038d93de301ab4ea6431f14a2bb22 | 659 | # frozen_string_literal: true
class RemoveForeignKeysFromCiTestCaseFailures < ActiveRecord::Migration[6.1]
include Gitlab::Database::MigrationHelpers
TABLE_NAME = :ci_test_case_failures
disable_ddl_transaction!
def up
with_lock_retries do
remove_foreign_key_if_exists(TABLE_NAME, column: :build_id)
end
with_lock_retries do
remove_foreign_key_if_exists(TABLE_NAME, column: :test_case_id)
end
end
def down
add_concurrent_foreign_key(TABLE_NAME, :ci_builds, column: :build_id, on_delete: :cascade)
add_concurrent_foreign_key(TABLE_NAME, :ci_test_cases, column: :test_case_id, on_delete: :cascade)
end
end
| 26.36 | 102 | 0.77997 |
ace7e0b14976430d875ac6b99fc7899b51f96f06 | 964 | require 'vertx-mail/mail_client'
# Start a local STMP server, remove this line if you want to use your own server.
# It just prints the sent message to the console
Java::IoVertxExampleMail::LocalSmtpServer.start(2528)
mailConfig = {
'hostname' => "localhost",
'port' => 2528
}
mailClient = VertxMail::MailClient.create_shared($vertx, mailConfig)
email = {
'from' => "[email protected]",
'to' => ["[email protected]", "[email protected]", "[email protected]"],
'headers' => {
'X-Mailer' => "Vert.x Mail-Client 3.7.0",
'Message-ID' => "[email protected]",
'Reply-To' => "[email protected]",
'Received' => [
"by vertx mail service",
"from [192.168.1.1] by localhost"
]
},
'text' => "This message should have a custom Message-ID"
}
mailClient.send_mail(email) { |result_err,result|
if (result_err == nil)
puts result
puts "Mail sent"
else
puts "got exception"
result_err.print_stack_trace()
end
}
| 26.777778 | 81 | 0.654564 |
035d035b37c53b6c989bd88779aec4d3574cae51 | 564 | class RolesController < ApplicationController
inherit_resources
def create
create! do |format|
format.html { redirect_to(roles_url) }
end
end
def update
update! do |format|
format.html { redirect_to(roles_url) }
end
end
def delete
delete! do |format|
format.html { redirect_to(roles_url) }
end
end
protected
def collection
@roles ||= resource_class.all
end
def resource
@role ||= resource_class.get(params[:id])
end
def resource_class
Role.access_as(current_user)
end
end
| 15.243243 | 45 | 0.661348 |
e9ce629cd8d1ca5bcabf5da15d0a62c40dd7abdc | 296 | class NavicatForSqlite < Cask
url 'http://download.navicat.com/download/navicat110_sqlite_en.dmg'
homepage 'http://www.navicat.com/products/navicat-for-sqlite'
version '11.0.16'
sha256 '583ec1190deaf83a3c3e0d6e6199d80a9089d7b04f6241d88bf5b70871e7a5e6'
link 'Navicat for SQLite.app'
end
| 37 | 75 | 0.804054 |
28c5ec188dadb5cfc3eb698e195fbc63eea6bb40 | 93 | module Heroku; end
$LOAD_PATH.unshift(File.dirname(__FILE__) + '/heroku')
require 'client'
| 15.5 | 54 | 0.741935 |
79997ebb74c7a1c835858b24e0285e51b0c3e5e0 | 151 | class AssociationBetweenDecksAndGames < ActiveRecord::Migration
def change
change_table :decks do |t|
t.belongs_to :game
end
end
end
| 18.875 | 63 | 0.728477 |
793a526d1678724cb02b49cc847bdce5e1f0b97c | 3,016 | require 'sequel_postgresql_triggers'
Sequel.migration do
up do
extension :pg_triggers
create_table(:packing_methods, ignore_index_errors: true) do
primary_key :id
String :packing_method_code, null: false
String :description
Decimal :actual_count_reduction_factor, null: false
TrueClass :active, null: false, default: true
DateTime :created_at, null: false
DateTime :updated_at, null: false
index [:packing_method_code], name: :packing_methods_unique_code, unique: true
end
pgt_created_at(:packing_methods,
:created_at,
function_name: :packing_methods_set_created_at,
trigger_name: :set_created_at)
pgt_updated_at(:packing_methods,
:updated_at,
function_name: :packing_methods_set_updated_at,
trigger_name: :set_updated_at)
# Log changes to this table. Exclude changes to the updated_at column.
run "SELECT audit.audit_table('packing_methods', true, true, '{updated_at}'::text[]);"
alter_table(:product_resource_allocations) do
add_foreign_key :packing_method_id, :packing_methods, key: [:id]
end
alter_table(:carton_labels) do
add_foreign_key :packing_method_id, :packing_methods, key: [:id]
end
alter_table(:cartons) do
add_foreign_key :packing_method_id, :packing_methods, key: [:id]
end
run "INSERT INTO packing_methods (packing_method_code, description, actual_count_reduction_factor) VALUES('NORMAL', 'Normal', 1) ON CONFLICT DO NOTHING;"
run "UPDATE product_resource_allocations SET packing_method_id = (SELECT id FROM packing_methods WHERE packing_method_code = 'NORMAL') WHERE packing_method_id IS NULL;
UPDATE carton_labels SET packing_method_id = (SELECT id FROM packing_methods WHERE packing_method_code = 'NORMAL') WHERE packing_method_id IS NULL;
UPDATE cartons SET packing_method_id = (SELECT id FROM packing_methods WHERE packing_method_code = 'NORMAL') WHERE packing_method_id IS NULL;
"
alter_table(:product_resource_allocations) do
set_column_not_null :packing_method_id
end
alter_table(:carton_labels) do
set_column_not_null :packing_method_id
end
alter_table(:cartons) do
set_column_not_null :packing_method_id
end
end
down do
alter_table(:product_resource_allocations) do
drop_column :packing_method_id
end
alter_table(:carton_labels) do
drop_column :packing_method_id
end
alter_table(:cartons) do
drop_column :packing_method_id
end
# Drop logging for this table.
drop_trigger(:packing_methods, :audit_trigger_row)
drop_trigger(:packing_methods, :audit_trigger_stm)
drop_trigger(:packing_methods, :set_created_at)
drop_function(:packing_methods_set_created_at)
drop_trigger(:packing_methods, :set_updated_at)
drop_function(:packing_methods_set_updated_at)
drop_table(:packing_methods)
end
end
| 37.234568 | 171 | 0.726127 |
e8d2ef2d7f07b1dd2ddd7926e8d3b815f94cf30c | 287 | shared_context 'JSON response' do
let(:json_response) { JSON.parse(response.body) }
end
RSpec.configure do |config|
config.include_context 'JSON response', type: :controller
config.include_context 'JSON response', type: :request
config.include_context 'JSON response', :api
end
| 28.7 | 59 | 0.766551 |
5d730c80245cb7171952cfd241aa3274349d2b26 | 781 | cask 'whatsapp' do
version '0.2.8082'
sha256 'ed3ed0b2c43e5bf0210b306e02e24876ac6b3b1b6375b66c7f6c996364517efd'
url "https://web.whatsapp.com/desktop/mac/files/release-#{version}.zip"
appcast 'https://web.whatsapp.com/desktop/mac/releases?platform=darwin&arch=x64',
checkpoint: '319878f0507aec8f162ee38f3d276cdcee245311fb3f5c83af85ea85fb5ebc09'
name 'WhatsApp'
homepage 'https://www.whatsapp.com/'
auto_updates true
app 'WhatsApp.app'
zap trash: [
'~/Library/Application Support/WhatsApp',
'~/Library/Application Support/WhatsApp.ShipIt',
'~/Library/Caches/WhatsApp',
'~/Library/Preferences/WhatsApp.plist',
'~/Library/Preferences/WhatsApp-Helper.plist',
]
end
| 33.956522 | 88 | 0.679898 |
f7ef711f6dd47ba75ec512ba93c946f7866ea9fd | 1,807 | class Being
def initialize(specialty=nil)
@specialty=specialty
end
def to_s
"(object_id = #{object_id})\n"+"(#{self.class}):".ljust(12)+to_s4Being+(@specialty ? "\n"+" "*12+@specialty : "")
end
def to_s4Being
"I am a collection of cooperative molecules with a talent for self-preservation."
end
end
class Earthling < Being
def to_s4Being
"I originate from a blue planet.\n"+" "*12+to_s4Earthling
end
end
class Mammal < Earthling
def initialize(type)
@type=type
end
def to_s4Earthling
"I am champion in taking care of my offspring and eating everything I can find, except mammals of type #{@type}."
end
end
class Fish < Earthling
def initialize(iq)
@iq=(iq>1 ? :instrustableValue : iq)
end
def to_s4Earthling
"Although I think I can think, I can't resist biting in hooks."
end
end
class Moonling < Being
def to_s4Being
"My name is Janneke Maan, and apparently some Earthlings will pay me a visit."
end
end
diverseCollection=[]
diverseCollection << (marsian=Being.new("I come from Mars and like playing hide and seek."))
diverseCollection << (me=Mammal.new(:human))
diverseCollection << (nemo=Fish.new(0.99))
diverseCollection << (jannakeMaan=Moonling.new)
puts "BEGIN ORIGINAL DIVERSE COLLECTION"
diverseCollection.each do |being|
puts "",being.to_s
end
puts "END ORIGINAL DIVERSE COLLECTION"
puts "\n"+"*"*50+"\n\n"
#Marshal the diverse Array of beings
File.open('diverseCollection.bin','w') do |fo|
fo << Marshal.dump(diverseCollection)
end
#load the Array of diverse beings
sameDiverseCollection=Marshal.load(File.read('diverseCollection.bin'))
puts "BEGIN LOADED DIVERSE COLLECTION"
puts(
sameDiverseCollection.collect do |being|
being.to_s
end.join("\n\n")
)
puts "END LOADED DIVERSE COLLECTION"
| 25.450704 | 117 | 0.715551 |
1a9d7aed6c3b6341e050cb64fd674f253269c08f | 365 | RSpec::Matchers.define :be_valid do
match do |actual|
actual.valid?
end
failure_message_for_should do |actual|
"expected that #{actual} would be valid (errors: #{actual.errors.full_messages.inspect})"
end
failure_message_for_should_not do |actual|
"expected that #{actual} would not be valid"
end
description do
"be valid"
end
end | 21.470588 | 93 | 0.715068 |
7a03b9d7ded70f59ec61f89440973ae1b7846822 | 6,368 | require 'roby/test/expect_execution'
module Roby
module Test
# Handlers for minitest-based tests
#
# They mainly "tune" the default minitest behaviour to match some of the
# Roby idioms as e.g. using pretty-print to format exception messages
module MinitestHelpers
include ExpectExecution
def roby_find_matching_exception(expected, exception)
queue = [exception]
seen = Set.new
while !queue.empty?
e = queue.shift
next if seen.include?(e)
seen << e
if expected.any? { |expected_e| e.kind_of?(expected_e) }
return e
end
if e.respond_to?(:original_exceptions)
queue.concat(e.original_exceptions)
end
end
nil
end
def assert_raises(*exp, display_exceptions: false, return_original_exception: false, &block)
if plan.executable?
# Avoid having it displayed by the execution engine. We're going
# to display any unexpected exception anyways
display_exceptions_enabled, plan.execution_engine.display_exceptions =
plan.execution_engine.display_exceptions?, display_exceptions
end
msg = exp.pop if String === exp.last
matchers = exp.dup
exp = exp.map do |e|
if e.kind_of?(Queries::LocalizedErrorMatcher)
e.model
else
e
end
end
# The caller expects a non-Roby exception. It is going to be
# wrapped in a LocalizedError, so make sure we properly
# process it
begin
yield
rescue *exp => e
if matchers.any? { |m| m === e }
assert_exception_can_be_pretty_printed(e)
return e
else
flunk("#{matchers.map(&:to_s).join(", ")} exceptions expected, not #{e.class}")
end
rescue ::Exception => root_e
assert_exception_can_be_pretty_printed(root_e)
all = Roby.flatten_exception(root_e)
if actual_e = all.find { |e| matchers.any? { |expected_e| expected_e === e } }
if return_original_exception
return actual_e, root_e
else
return actual_e
end
end
actually_caught = roby_exception_to_string(*all)
flunk("#{exp.map(&:to_s).join(", ")} exceptions expected, not #{root_e.class} #{actually_caught}")
end
flunk("#{exp.map(&:to_s).join(", ")} exceptions expected but received nothing")
ensure
if plan.executable?
plan.execution_engine.display_exceptions =
display_exceptions_enabled
end
end
def roby_exception_to_string(*queue)
msg = ""
seen = Set.new
while e = queue.shift
next if seen.include?(e)
seen << e
e_bt = Minitest.filter_backtrace(e.backtrace).join "\n "
msg << "\n\n" << Roby.format_exception(e).join("\n") +
"\n #{e_bt}"
queue.concat(e.original_exceptions) if e.respond_to?(:original_exceptions)
end
msg
end
def to_s
if !error?
super
else
failures.map { |failure|
bt = Minitest.filter_backtrace(failure.backtrace).join "\n "
msg =
if failure.kind_of?(Minitest::UnexpectedError)
roby_exception_to_string(failure.exception)
else
failure.message
end
"#{failure.result_label}:\n#{self.location}:\n#{msg}\n"
}.join "\n"
end
end
def register_failure(e)
case e
when Assertion
self.failures << e
else
self.failures << Minitest::UnexpectedError.new(e)
end
end
def capture_exceptions
super do
begin
yield
rescue Exception => root_e
if !root_e.respond_to?(:each_original_exception)
raise
end
exceptions = root_e.each_original_exception
register_failure(root_e)
# Try to be smart and to only keep the toplevel
# exceptions
filter_execution_exceptions(exceptions).each do |e|
if !e.backtrace
e.set_backtrace(root_e.backtrace)
end
register_failure(e)
end
end
end
end
def filter_execution_exceptions(exceptions)
exceptions.flat_map { |e| Roby.flatten_exception(e).to_a }.uniq
end
def exception_details e, msg
[
"#{msg}",
"Class: <#{e.class}>",
"Message: <#{e.message.inspect}>",
"Pretty-print:",
*Roby.format_exception(e),
"---Backtrace---",
"#{Minitest.filter_backtrace(e.backtrace).join("\n")}",
"---------------",
].join "\n"
end
end
end
end
| 38.131737 | 118 | 0.433417 |
7911392ef19944f4c2510d92da57c6994389dd54 | 1,725 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe MergeRequests::BaseService do
include ProjectForksHelper
let_it_be(:project) { create(:project, :repository) }
let(:title) { 'Awesome merge_request' }
let(:params) do
{
title: title,
description: 'please fix',
source_branch: 'feature',
target_branch: 'master'
}
end
subject { MergeRequests::CreateService.new(project: project, current_user: project.owner, params: params) }
describe '#execute_hooks' do
shared_examples 'enqueues Jira sync worker' do
specify :aggregate_failures do
expect(JiraConnect::SyncMergeRequestWorker).to receive(:perform_async).with(kind_of(Numeric), kind_of(Numeric)).and_call_original
Sidekiq::Testing.fake! do
expect { subject.execute }.to change(JiraConnect::SyncMergeRequestWorker.jobs, :size).by(1)
end
end
end
shared_examples 'does not enqueue Jira sync worker' do
it do
Sidekiq::Testing.fake! do
expect { subject.execute }.not_to change(JiraConnect::SyncMergeRequestWorker.jobs, :size)
end
end
end
context 'with a Jira subscription' do
before do
create(:jira_connect_subscription, namespace: project.namespace)
end
context 'MR contains Jira issue key' do
let(:title) { 'Awesome merge_request with issue JIRA-123' }
it_behaves_like 'enqueues Jira sync worker'
end
context 'MR does not contain Jira issue key' do
it_behaves_like 'does not enqueue Jira sync worker'
end
end
context 'without a Jira subscription' do
it_behaves_like 'does not enqueue Jira sync worker'
end
end
end
| 28.278689 | 137 | 0.682319 |
383c50d3700adfb29c61c02bc955963e8cb028fc | 169 | module Varnish
LIBVARNISHAPI = [
'libvarnishapi.1', # Mac OS X
'libvarnishapi.so.1' # Debian / Ubuntu
]
end
require 'varnish/vsm'
require 'varnish/vsl'
| 16.9 | 43 | 0.656805 |
214a00d6b565b30661aa0a4f16d68e5dc06e1102 | 109 | require 'spec_helper'
module KeplerProcessor
describe IndexDupRemover do
pending "write it"
end
end
| 13.625 | 29 | 0.770642 |
334676103c4401a5b2b3e32a8ebcd6a4ab7c953b | 7,575 | # frozen_string_literal: true
require_relative '../command'
require 'httparty'
require 'csv'
require 'covid'
module Covid
module Commands
class Update < Covid::Command
COVID_CONFIRMED_PATH = "https://raw.githubusercontent.com/CSSEGISandData/COVID-19/master/csse_covid_19_data/csse_covid_19_time_series/time_series_19-covid-Confirmed.csv"
COVID_DEATHS_PATH = "https://raw.githubusercontent.com/CSSEGISandData/COVID-19/master/csse_covid_19_data/csse_covid_19_time_series/time_series_19-covid-Deaths.csv"
COVID_RECOVERED_PATH = "https://raw.githubusercontent.com/CSSEGISandData/COVID-19/master/csse_covid_19_data/csse_covid_19_time_series/time_series_19-covid-Recovered.csv"
REPORTS_DATE_FORMAT = "%m/%d/%y"
STATES = {
"AL" => "Alabama",
"AK" => "Alaska",
"AZ" => "Arizona",
"AR" => "Arkansas",
"CA" => "California",
"CO" => "Colorado",
"CT" => "Connecticut",
"DE" => "Delaware",
"DC" => "District of Columbia",
"D.C." => "District of Columbia",
"FL" => "Florida",
"GA" => "Georgia",
"HI" => "Hawaii",
"ID" => "Idaho",
"IL" => "Illinois",
"IN" => "Indiana",
"IA" => "Iowa",
"KS" => "Kansas",
"KY" => "Kentucky",
"LA" => "Louisiana",
"ME" => "Maine",
"MD" => "Maryland",
"MA" => "Massachusetts",
"MI" => "Michigan",
"MN" => "Minnesota",
"MS" => "Mississippi",
"MO" => "Missouri",
"MT" => "Montana",
"NE" => "Nebraska",
"NV" => "Nevada",
"NH" => "New Hampshire",
"NJ" => "New Jersey",
"NM" => "New Mexico",
"NY" => "New York",
"NC" => "North Carolina",
"ND" => "North Dakota",
"OH" => "Ohio",
"OK" => "Oklahoma",
"OR" => "Oregon",
"PA" => "Pennsylvania",
"PR" => "Puerto Rico",
"RI" => "Rhode Island",
"SC" => "South Carolina",
"SD" => "South Dakota",
"TN" => "Tennessee",
"TX" => "Texas",
"UT" => "Utah",
"VT" => "Vermont",
"VA" => "Virginia",
"WA" => "Washington",
"WV" => "West Virginia",
"WI" => "Wisconsin",
"WY" => "Wyoming"
}
def initialize(options)
@options = options
end
def execute(input: $stdin, output: $stdout)
fetch(:confirmed)
fetch(:deaths)
fetch(:recovered)
update_us(:confirmed)
update_us(:deaths)
update_us(:recovered)
output.puts "Countries: #{Country.count}"
output.puts "States: #{State.count}"
end
def location_for(row)
raise "Header for Country/Region doesn't exist" unless row.key?("Country/Region")
raise "Header for Provice/State doesn't exist" unless row.key?("Province/State")
country = row["Country/Region"]
province_state = row["Province/State"]
lat = row["Lat"]
long = row["Long"]
country = Country.find_by(name: country) || Country.create( Hash.new.tap { |hash|
hash[:name] = country
hash[:latitude] = lat unless province_state # store lat,long at state level
hash[:longitude] = long unless province_state # store lat,long at state level
})
if province_state
name = us_state_name_for(province_state) || province_state
s = State.find_by(name: name)
s ? s : State.create(name: name, latitude: lat, longitude: long, country: country)
else
country.tap { |c| c.update(latitude: lat, longitude: long) }
end
end
def valid_date?( str, format = REPORTS_DATE_FORMAT)
Date.strptime(str,format) rescue false
end
def us_state_name_for(string)
if string.in?(STATES.values)
# string is an US state
#
# Washington
string
else
# lookup US state via US state abbr
#
# Pottawattamie, IA
# Camden, NC
#
us_state_name_for_full_state_abbr_string(string)
end
end
def us_state_name_for_full_state_abbr_string(abbr)
# lookup US state via US state abbr
#
# Pottawattamie, IA
# Camden, NC
#
STATES[abbr.to_s.split(",").last.strip]
end
def fetch(reportable, output=$stdout)
data_source = case reportable
when :confirmed then COVID_CONFIRMED_PATH
when :deaths then COVID_DEATHS_PATH
when :recovered then COVID_RECOVERED_PATH
end
resp = HTTParty.get(data_source)
CSV.parse(resp.body, headers: true).each do |row|
location = location_for(row)
row.to_h.select { |date, count| valid_date?(date) }.each do |date, count|
report = {
date: Date.strptime(date, REPORTS_DATE_FORMAT),
count: count
}
if location.name == "US"
require 'pry'; binding.pry
end
location.send(reportable).build(report).tap { |report|
output.puts "#{reportable.to_s.titleize}: #{location.full_name}, #{report.date}, #{report.count}"
}
end
location.save
end
rename_taiwan
update_us(reportable)
end
def rename_taiwan
country = Country.find_by(name: "Taiwan*")
if country
country.update(name: "Taiwan")
end
end
def update_us(reportable)
#
# The source data intially had US numbers by county, state. Then, later
# rolled those numbers up into their own US State row. Gracefully
# ensure numbers aren't double counted, and all history is preserved.
#
data_source = case reportable
when :confirmed then COVID_CONFIRMED_PATH
when :deaths then COVID_DEATHS_PATH
when :recovered then COVID_RECOVERED_PATH
end
resp = HTTParty.get(data_source)
csv = CSV.parse(resp.body, headers: true)
#
# => ["1/22/20",
# "1/23/20",
# "1/24/20",
# "1/25/20",
# "1/26/20",
# "1/27/20",
#
all_dates = csv.headers.select { |date| valid_date?(date) }
data = all_dates.each_with_object([]) do |date, memo|
country_column = "Country/Region"
state_column = "Province/State"
parsed_date = Date.strptime(date, REPORTS_DATE_FORMAT)
date_of_aggregated_state_reporting_switchover = Date.new(2020, 3, 10)
count = STATES.values.uniq.map { |state_name|
aggregated_state_row = csv.find { |row| row[country_column] == "US" && row[state_column] == state_name }
counties = csv.select { |row| row[country_column] == "US" && us_state_name_for_full_state_abbr_string(row[state_column]) == state_name }
counties_sum = counties.map { |county| county[date].to_i }.sum
aggregated_state_sum = aggregated_state_row[date].to_i
if parsed_date >= date_of_aggregated_state_reporting_switchover
aggregated_state_row[date].to_i
else
counties_sum
end
}.sum
memo << {
date: parsed_date,
count: count
}
end
us = Country.find_by(name: "US")
us.send(reportable).destroy_all
us.send(reportable).build(data)
us.save
end
end
end
end
| 32.234043 | 175 | 0.555248 |
b91798b383c8d7ea5277cb837562d98fb5a9e497 | 193 | class Author
include Elastictastic::EmbeddedDocument
field :id, :type => 'integer'
field :name
field :email, :index => 'not_analyzed'
validates :name, :exclusion => %w(INVALID)
end
| 19.3 | 44 | 0.694301 |
ed77f1efdbddefff8aeb9b98dc6a8deca296a74a | 862 | require 'rails_helper'
describe 'articles/feedback_rss_feed.rss.builder', type: :view do
let!(:blog) { build_stubbed :blog }
describe 'with feedback consisting of one trackback and one comment' do
let(:article) { stub_full_article }
let(:trackback) { build(:trackback, article: article) }
let(:comment) { build(:comment, article: article, body: 'Comment body') }
before(:each) do
assign(:feedback, [trackback, comment])
render
end
it 'renders a valid RSS feed with two items' do
assert_rss20 rendered, 2
end
it 'renders the trackback RSS partial once' do
expect(view).to render_template(partial: 'shared/_rss_item_trackback', count: 1)
end
it 'renders the comment RSS partial once' do
expect(view).to render_template(partial: 'shared/_rss_item_comment', count: 1)
end
end
end
| 29.724138 | 86 | 0.691415 |
7abf430148eccd197528acb4d5b7eee543b84946 | 432 | #
# Cookbook:: Subread
# Recipe:: default
#
# Copyright:: 2019, Eagle Genomics Ltd, All Rights Reserved.
include_recipe 'tar'
tar_extract node['Subread']['url'] do
target_dir node['Subread']['install_path']
creates node['Subread']['dir']
end
magic_shell_environment 'PATH' do
filename 'star'
value "$PATH:#{node['Subread']['bin']}"
end
magic_shell_environment 'SUBREAD_VERSION' do
value node['Subread']['version']
end
| 19.636364 | 60 | 0.712963 |
ed6df4fb27db7e064380c8b90a24b4f184e2cd05 | 128 | class UntestedKlass
def initialize
raise 'This class is used for checking that untouched files are not tracked'
end
end
| 21.333333 | 80 | 0.773438 |
3310aaadd085af21733ba65fff11b1649514fc03 | 1,711 | class Platypus < Formula
desc "Create macOS applications from {Perl,Ruby,sh,Python} scripts"
homepage "https://sveinbjorn.org/platypus"
url "https://sveinbjorn.org/files/software/platypus/platypus5.3.src.zip"
sha256 "b5b707d4f664ab6f60eed545d49a7d38da7557ce8268cc4791886eee7b3ca571"
head "https://github.com/sveinbjornt/Platypus.git"
bottle do
cellar :any_skip_relocation
sha256 "8e1b66ba6d450ba4cef3ccd2192d58c08f1401a443a44338c80a917f7607341e" => :catalina
sha256 "a08defbfae9f265bc7473c639b060fb8fa0dd1b6923746a1cf86756112347250" => :mojave
sha256 "df48127dd7e77c37b7ed73247c74f3bb3d37d0e239590d848f91f8af5f98f628" => :high_sierra
sha256 "d46dd428161d8ed7febf5ea4109f9bcddfa65c75d4e67619781745587c6b6f55" => :sierra
end
depends_on :xcode => ["8.0", :build]
def install
xcodebuild "SYMROOT=build", "DSTROOT=#{buildpath}/dst",
"-project", "Platypus.xcodeproj",
"-target", "platypus",
"-target", "ScriptExec",
"CODE_SIGN_IDENTITY=", "CODE_SIGNING_REQUIRED=NO",
"clean",
"install"
man1.install "CLT/man/platypus.1"
bin.install "dst/platypus_clt" => "platypus"
cd "build/UninstalledProducts/macosx/ScriptExec.app/Contents" do
pkgshare.install "Resources/MainMenu.nib", "MacOS/ScriptExec"
end
end
def caveats
<<~EOS
This formula only installs the command-line Platypus tool, not the GUI.
The GUI can be downloaded from Platypus' website:
https://sveinbjorn.org/platypus
Alternatively, install with Homebrew Cask:
brew cask install platypus
EOS
end
test do
system "#{bin}/platypus", "-v"
end
end
| 33.54902 | 93 | 0.704267 |
1a79cd9228c0bf30bb9dd0b23614132240653ea7 | 10,177 | ##
# This code was generated by
# \ / _ _ _| _ _
# | (_)\/(_)(_|\/| |(/_ v1.0.0
# / /
#
# frozen_string_literal: true
require 'spec_helper.rb'
describe 'Member' do
it "can fetch" do
@holodeck.mock(Twilio::Response.new(500, ''))
expect {
@client.ip_messaging.v2.services('ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX') \
.channels('CHXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX') \
.members('MBXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX').fetch()
}.to raise_exception(Twilio::REST::TwilioError)
values = {}
expect(
@holodeck.has_request?(Holodeck::Request.new(
method: 'get',
url: 'https://ip-messaging.twilio.com/v2/Services/ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Channels/CHXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Members/MBXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX',
))).to eq(true)
end
it "receives fetch responses" do
@holodeck.mock(Twilio::Response.new(
200,
%q[
{
"sid": "MBaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"channel_sid": "CHaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"service_sid": "ISaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"identity": "jing",
"role_sid": "RLaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"last_consumed_message_index": null,
"last_consumption_timestamp": null,
"date_created": "2016-03-24T21:05:50Z",
"date_updated": "2016-03-24T21:05:50Z",
"url": "https://chat.twilio.com/v2/Services/ISaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Channels/CHaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Members/MBaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
}
]
))
actual = @client.ip_messaging.v2.services('ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX') \
.channels('CHXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX') \
.members('MBXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX').fetch()
expect(actual).to_not eq(nil)
end
it "can create" do
@holodeck.mock(Twilio::Response.new(500, ''))
expect {
@client.ip_messaging.v2.services('ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX') \
.channels('CHXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX') \
.members.create(identity: 'identity')
}.to raise_exception(Twilio::REST::TwilioError)
values = {'Identity' => 'identity', }
expect(
@holodeck.has_request?(Holodeck::Request.new(
method: 'post',
url: 'https://ip-messaging.twilio.com/v2/Services/ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Channels/CHXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Members',
data: values,
))).to eq(true)
end
it "receives create responses" do
@holodeck.mock(Twilio::Response.new(
201,
%q[
{
"sid": "MBaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"channel_sid": "CHaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"service_sid": "ISaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"identity": "jing",
"role_sid": "RLaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"last_consumed_message_index": null,
"last_consumption_timestamp": null,
"date_created": "2016-03-24T21:05:50Z",
"date_updated": "2016-03-24T21:05:50Z",
"url": "https://chat.twilio.com/v2/Services/ISaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Channels/CHaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Members/MBaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
}
]
))
actual = @client.ip_messaging.v2.services('ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX') \
.channels('CHXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX') \
.members.create(identity: 'identity')
expect(actual).to_not eq(nil)
end
it "can read" do
@holodeck.mock(Twilio::Response.new(500, ''))
expect {
@client.ip_messaging.v2.services('ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX') \
.channels('CHXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX') \
.members.list()
}.to raise_exception(Twilio::REST::TwilioError)
values = {}
expect(
@holodeck.has_request?(Holodeck::Request.new(
method: 'get',
url: 'https://ip-messaging.twilio.com/v2/Services/ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Channels/CHXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Members',
))).to eq(true)
end
it "receives read_full responses" do
@holodeck.mock(Twilio::Response.new(
200,
%q[
{
"meta": {
"page": 0,
"page_size": 50,
"first_page_url": "https://chat.twilio.com/v2/Services/ISaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Channels/CHaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Members?PageSize=50&Page=0",
"previous_page_url": null,
"url": "https://chat.twilio.com/v2/Services/ISaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Channels/CHaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Members?PageSize=50&Page=0",
"next_page_url": null,
"key": "members"
},
"members": [
{
"sid": "MBaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"channel_sid": "CHaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"service_sid": "ISaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"identity": "jing",
"role_sid": "RLaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"last_consumed_message_index": null,
"last_consumption_timestamp": null,
"date_created": "2016-03-24T21:05:50Z",
"date_updated": "2016-03-24T21:05:50Z",
"url": "https://chat.twilio.com/v2/Services/ISaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Channels/CHaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Members/MBaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
}
]
}
]
))
actual = @client.ip_messaging.v2.services('ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX') \
.channels('CHXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX') \
.members.list()
expect(actual).to_not eq(nil)
end
it "receives read_empty responses" do
@holodeck.mock(Twilio::Response.new(
200,
%q[
{
"meta": {
"page": 0,
"page_size": 50,
"first_page_url": "https://chat.twilio.com/v2/Services/ISaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Channels/CHaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Members?PageSize=50&Page=0",
"previous_page_url": null,
"url": "https://chat.twilio.com/v2/Services/ISaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Channels/CHaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Members?PageSize=50&Page=0",
"next_page_url": null,
"key": "members"
},
"members": []
}
]
))
actual = @client.ip_messaging.v2.services('ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX') \
.channels('CHXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX') \
.members.list()
expect(actual).to_not eq(nil)
end
it "can delete" do
@holodeck.mock(Twilio::Response.new(500, ''))
expect {
@client.ip_messaging.v2.services('ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX') \
.channels('CHXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX') \
.members('MBXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX').delete()
}.to raise_exception(Twilio::REST::TwilioError)
values = {}
expect(
@holodeck.has_request?(Holodeck::Request.new(
method: 'delete',
url: 'https://ip-messaging.twilio.com/v2/Services/ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Channels/CHXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Members/MBXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX',
))).to eq(true)
end
it "receives delete responses" do
@holodeck.mock(Twilio::Response.new(
204,
nil,
))
actual = @client.ip_messaging.v2.services('ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX') \
.channels('CHXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX') \
.members('MBXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX').delete()
expect(actual).to eq(true)
end
it "can update" do
@holodeck.mock(Twilio::Response.new(500, ''))
expect {
@client.ip_messaging.v2.services('ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX') \
.channels('CHXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX') \
.members('MBXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX').update()
}.to raise_exception(Twilio::REST::TwilioError)
values = {}
expect(
@holodeck.has_request?(Holodeck::Request.new(
method: 'post',
url: 'https://ip-messaging.twilio.com/v2/Services/ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Channels/CHXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Members/MBXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX',
))).to eq(true)
end
it "receives update_role_sid responses" do
@holodeck.mock(Twilio::Response.new(
200,
%q[
{
"sid": "MBaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"channel_sid": "CHaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"service_sid": "ISaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"identity": "jing",
"role_sid": "RLaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"last_consumed_message_index": 20,
"last_consumption_timestamp": "2016-03-24T21:05:52Z",
"date_created": "2016-03-24T21:05:50Z",
"date_updated": "2016-03-24T21:05:51Z",
"url": "https://chat.twilio.com/v2/Services/ISaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Channels/CHaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Members/MBaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
}
]
))
actual = @client.ip_messaging.v2.services('ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX') \
.channels('CHXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX') \
.members('MBXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX').update()
expect(actual).to_not eq(nil)
end
end | 39.599222 | 184 | 0.625626 |
1a25588d882cdcfb78e09d37e337041d2cea9032 | 263 | class VueDemoController < ApplicationController
layout 'vue_demo'
def bar
render vue: 'bar' # same as `render html: vue_entry('bar'), layout: true`
end
def baz
render html: vue_entry('foo') # same as `render vue: 'foo', layout: false`
end
end
| 21.916667 | 78 | 0.680608 |
26e871ae86d1be08c659bf74c7158d88d5386613 | 198 | # frozen_string_literal: true
class Fan
include Mongoid::Document
include Mongoid::Timestamps
field :name, type: String
has_and_belongs_to_many :teams
validates_presence_of(:name)
end
| 15.230769 | 32 | 0.777778 |
edd82099e92e0bd9863b0908685168cfcea3f5ab | 22,115 | # -*- coding: binary -*-
module Msf
module Serializer
# This class formats information in a plain-text format that
# is meant to be displayed on a console or some other non-GUI
# medium.
class ReadableText
#Default number of characters to wrap at.
DefaultColumnWrap = 70
#Default number of characters to indent.
DefaultIndent = 2
# Returns a formatted string that contains information about
# the supplied module instance.
#
# @param mod [Msf::Module] the module to dump information for.
# @param indent [String] the indentation to use.
# @return [String] formatted text output of the dump.
def self.dump_module(mod, indent = " ")
case mod.type
when Msf::MODULE_PAYLOAD
return dump_payload_module(mod, indent)
when Msf::MODULE_NOP
return dump_basic_module(mod, indent)
when Msf::MODULE_ENCODER
return dump_basic_module(mod, indent)
when Msf::MODULE_EXPLOIT
return dump_exploit_module(mod, indent)
when Msf::MODULE_AUX
return dump_auxiliary_module(mod, indent)
when Msf::MODULE_POST
return dump_post_module(mod, indent)
else
return dump_generic_module(mod, indent)
end
end
# Dumps an exploit's targets.
#
# @param mod [Msf::Exploit] the exploit module to dump targets
# for.
# @param indent [String] the indentation to use (only the length
# matters).
# @param h [String] the string to display as the table heading.
# @return [String] the string form of the table.
def self.dump_exploit_targets(mod, indent = '', h = nil)
tbl = Rex::Text::Table.new(
'Indent' => indent.length,
'Header' => h,
'Columns' =>
[
'Id',
'Name',
])
mod.targets.each_with_index { |target, idx|
tbl << [ idx.to_s, target.name || 'All' ]
}
tbl.to_s + "\n"
end
# Dumps the exploit's selected target
#
# @param mod [Msf::Exploit] the exploit module.
# @param indent [String] the indentation to use (only the length
# matters)
# @param h [String] the string to display as the table heading.
# @return [String] the string form of the table.
def self.dump_exploit_target(mod, indent = '', h = nil)
tbl = Rex::Text::Table.new(
'Indent' => indent.length,
'Header' => h,
'Columns' =>
[
'Id',
'Name',
])
tbl << [ mod.target_index, mod.target.name || 'All' ]
tbl.to_s + "\n"
end
# Dumps a module's actions
#
# @param mod [Msf::Module] the module.
# @param indent [String] the indentation to use (only the length
# matters)
# @param h [String] the string to display as the table heading.
# @return [String] the string form of the table.
def self.dump_module_actions(mod, indent = '', h = nil)
tbl = Rex::Text::Table.new(
'Indent' => indent.length,
'Header' => h,
'Columns' =>
[
'Name',
'Description'
])
mod.actions.each_with_index { |target, idx|
tbl << [ target.name || 'All' , target.description || '' ]
}
tbl.to_s + "\n"
end
# Dumps the module's selected action
#
# @param mod [Msf::Module] the module.
# @param indent [String] the indentation to use (only the length
# matters)
# @param h [String] the string to display as the table heading.
# @return [String] the string form of the table.
def self.dump_module_action(mod, indent = '', h = nil)
tbl = Rex::Text::Table.new(
'Indent' => indent.length,
'Header' => h,
'Columns' =>
[
'Name',
'Description',
])
tbl << [ mod.action.name || 'All', mod.action.description || '' ]
tbl.to_s + "\n"
end
# Dumps the table of payloads that are compatible with the supplied
# exploit.
#
# @param exploit [Msf::Exploit] the exploit module.
# @param indent [String] the indentation to use (only the length
# matters)
# @param h [String] the string to display as the table heading.
# @return [String] the string form of the table.
def self.dump_compatible_payloads(exploit, indent = '', h = nil)
tbl = Rex::Text::Table.new(
'Indent' => indent.length,
'Header' => h,
'Columns' =>
[
'Name',
'Description',
])
exploit.compatible_payloads.each { |entry|
tbl << [ entry[0], entry[1].new.description ]
}
tbl.to_s + "\n"
end
# Dumps information about an exploit module.
#
# @param mod [Msf::Exploit] the exploit module.
# @param indent [String] the indentation to use.
# @return [String] the string form of the information.
def self.dump_exploit_module(mod, indent = '')
output = "\n"
output << " Name: #{mod.name}\n"
output << " Module: #{mod.fullname}\n"
output << " Platform: #{mod.platform_to_s}\n"
output << " Privileged: " + (mod.privileged? ? "Yes" : "No") + "\n"
output << " License: #{mod.license}\n"
output << " Rank: #{mod.rank_to_s.capitalize}\n"
output << " Disclosed: #{mod.disclosure_date}\n" if mod.disclosure_date
output << "\n"
# Authors
output << "Provided by:\n"
mod.each_author { |author|
output << indent + author.to_s + "\n"
}
output << "\n"
# Targets
output << "Available targets:\n"
output << dump_exploit_targets(mod, indent)
# Options
if (mod.options.has_options?)
output << "Basic options:\n"
output << dump_options(mod, indent)
output << "\n"
end
# Payload information
if (mod.payload_info.length)
output << "Payload information:\n"
if (mod.payload_space)
output << indent + "Space: " + mod.payload_space.to_s + "\n"
end
if (mod.payload_badchars)
output << indent + "Avoid: " + mod.payload_badchars.length.to_s + " characters\n"
end
output << "\n"
end
# Description
output << "Description:\n"
output << word_wrap(Rex::Text.compress(mod.description))
output << "\n"
# References
output << dump_references(mod, indent)
return output
end
# Dumps information about an auxiliary module.
#
# @param mod [Msf::Auxiliary] the auxiliary module.
# @param indent [String] the indentation to use.
# @return [String] the string form of the information.
def self.dump_auxiliary_module(mod, indent = '')
output = "\n"
output << " Name: #{mod.name}\n"
output << " Module: #{mod.fullname}\n"
output << " License: #{mod.license}\n"
output << " Rank: #{mod.rank_to_s.capitalize}\n"
output << " Disclosed: #{mod.disclosure_date}\n" if mod.disclosure_date
output << "\n"
# Authors
output << "Provided by:\n"
mod.each_author { |author|
output << indent + author.to_s + "\n"
}
output << "\n"
# Actions
if mod.action
output << "Available actions:\n"
output << dump_module_actions(mod, indent)
end
# Options
if (mod.options.has_options?)
output << "Basic options:\n"
output << dump_options(mod, indent)
output << "\n"
end
# Description
output << "Description:\n"
output << word_wrap(Rex::Text.compress(mod.description))
output << "\n"
# References
output << dump_references(mod, indent)
return output
end
# Dumps information about a post module.
#
# @param mod [Msf::Post] the post module.
# @param indent [String] the indentation to use.
# @return [String] the string form of the information.
def self.dump_post_module(mod, indent = '')
output = "\n"
output << " Name: #{mod.name}\n"
output << " Module: #{mod.fullname}\n"
output << " Platform: #{mod.platform_to_s}\n"
output << " Arch: #{mod.arch_to_s}\n"
output << " Rank: #{mod.rank_to_s.capitalize}\n"
output << " Disclosed: #{mod.disclosure_date}\n" if mod.disclosure_date
output << "\n"
# Authors
output << "Provided by:\n"
mod.each_author { |author|
output << indent + author.to_s + "\n"
}
output << "\n"
# Actions
if mod.action
output << "Available actions:\n"
output << dump_module_actions(mod, indent)
end
# Options
if (mod.options.has_options?)
output << "Basic options:\n"
output << dump_options(mod, indent)
output << "\n"
end
# Description
output << "Description:\n"
output << word_wrap(Rex::Text.compress(mod.description))
output << "\n"
# References
output << dump_references(mod, indent)
return output
end
# Dumps information about a payload module.
#
# @param mod [Msf::Payload] the payload module.
# @param indent [String] the indentation to use.
# @return [String] the string form of the information.
def self.dump_payload_module(mod, indent = '')
# General
output = "\n"
output << " Name: #{mod.name}\n"
output << " Module: #{mod.fullname}\n"
output << " Platform: #{mod.platform_to_s}\n"
output << " Arch: #{mod.arch_to_s}\n"
output << "Needs Admin: " + (mod.privileged? ? "Yes" : "No") + "\n"
output << " Total size: #{mod.size}\n"
output << " Rank: #{mod.rank_to_s.capitalize}\n"
output << "\n"
# Authors
output << "Provided by:\n"
mod.each_author { |author|
output << indent + author.to_s + "\n"
}
output << "\n"
# Options
if (mod.options.has_options?)
output << "Basic options:\n"
output << dump_options(mod)
output << "\n"
end
# Description
output << "Description:\n"
output << word_wrap(Rex::Text.compress(mod.description))
output << "\n\n"
return output
end
# Dumps information about a module, just the basics.
#
# @param mod [Msf::Module] the module.
# @param indent [String] the indentation to use.
# @return [String] the string form of the information.
def self.dump_basic_module(mod, indent = '')
# General
output = "\n"
output << " Name: #{mod.name}\n"
output << " Module: #{mod.fullname}\n"
output << " Platform: #{mod.platform_to_s}\n"
output << " Arch: #{mod.arch_to_s}\n"
output << " Rank: #{mod.rank_to_s.capitalize}\n"
output << "\n"
# Authors
output << "Provided by:\n"
mod.each_author { |author|
output << indent + author.to_s + "\n"
}
output << "\n"
# Description
output << "Description:\n"
output << word_wrap(Rex::Text.compress(mod.description))
output << "\n"
output << dump_references(mod, indent)
output << "\n"
return output
end
#No current use
def self.dump_generic_module(mod, indent = '')
end
# Dumps the list of options associated with the
# supplied module.
#
# @param mod [Msf::Module] the module.
# @param indent [String] the indentation to use.
# @param missing [Boolean] dump only empty required options.
# @return [String] the string form of the information.
def self.dump_options(mod, indent = '', missing = false)
tbl = Rex::Text::Table.new(
'Indent' => indent.length,
'Columns' =>
[
'Name',
'Current Setting',
'Required',
'Description'
])
mod.options.sorted.each do |name, opt|
val = mod.datastore[name].nil? ? opt.default : mod.datastore[name]
next if (opt.advanced?)
next if (opt.evasion?)
next if (missing && opt.valid?(val))
desc = opt.desc.dup
# Hint at RPORT proto by regexing mixins
if name == 'RPORT' && opt.kind_of?(Msf::OptPort)
mod.class.included_modules.each do |m|
case m.name
when /tcp/i, /HttpClient$/
desc << ' (TCP)'
break
when /udp/i
desc << ' (UDP)'
break
end
end
end
tbl << [ name, opt.display_value(val), opt.required? ? "yes" : "no", desc ]
end
return tbl.to_s
end
# Dumps the advanced options associated with the supplied module.
#
# @param mod [Msf::Module] the module.
# @param indent [String] the indentation to use.
# @return [String] the string form of the information.
def self.dump_advanced_options(mod, indent = '')
tbl = Rex::Text::Table.new(
'Indent' => indent.length,
'Columns' =>
[
'Name',
'Current Setting',
'Required',
'Description'
])
mod.options.sorted.each do |name, opt|
next unless opt.advanced?
val = mod.datastore[name].nil? ? opt.default : mod.datastore[name]
tbl << [ name, opt.display_value(val), opt.required? ? "yes" : "no", opt.desc ]
end
return tbl.to_s
end
# Dumps the evasion options associated with the supplied module.
#
# @param mod [Msf::Module] the module.
# @param indent [String] the indentation to use.
# @return [String] the string form of the information.
def self.dump_evasion_options(mod, indent = '')
tbl = Rex::Text::Table.new(
'Indent' => indent.length,
'Columns' =>
[
'Name',
'Current Setting',
'Required',
'Description'
])
mod.options.sorted.each do |name, opt|
next unless opt.evasion?
val = mod.datastore[name].nil? ? opt.default : mod.datastore[name]
tbl << [ name, opt.display_value(val), opt.required? ? "yes" : "no", opt.desc ]
end
return tbl.to_s
end
# Dumps the references associated with the supplied module.
#
# @param mod [Msf::Module] the module.
# @param indent [String] the indentation to use.
# @return [String] the string form of the information.
def self.dump_references(mod, indent = '')
output = ''
if (mod.respond_to?(:references) && mod.references && mod.references.length > 0)
output << "References:\n"
mod.references.each { |ref|
output << indent + ref.to_s + "\n"
}
output << "\n"
end
output
end
# Dumps the contents of a datastore.
#
# @param name [String] displayed as the table header.
# @param ds [Msf::DataStore] the DataStore to dump.
# @param indent [Integer] the indentation size.
# @param col [Integer] the column width.
# @return [String] the formatted DataStore contents.
def self.dump_datastore(name, ds, indent = DefaultIndent, col = DefaultColumnWrap)
tbl = Rex::Text::Table.new(
'Indent' => indent,
'Header' => name,
'Columns' =>
[
'Name',
'Value'
])
ds.keys.sort.each { |k|
tbl << [ k, (ds[k] != nil) ? ds[k].to_s : '' ]
}
return ds.length > 0 ? tbl.to_s : "#{tbl.header_to_s}No entries in data store.\n"
end
# Dumps the list of active sessions.
#
# @param framework [Msf::Framework] the framework to dump.
# @param opts [Hash] the options to dump with.
# @option opts :verbose [Boolean] gives more information if set to
# true.
# @option opts :indent [Integer] set the indentation amount.
# @return [String] the formatted list of sessions.
def self.dump_sessions(framework, opts={})
verbose = opts[:verbose] || false
show_extended = opts[:show_extended] || false
indent = opts[:indent] || DefaultIndent
return dump_sessions_verbose(framework, opts) if verbose
columns = []
columns << 'Id'
columns << 'Name'
columns << 'Type'
columns << 'Checkin?' if show_extended
columns << 'Enc?' if show_extended
columns << 'Local URI' if show_extended
columns << 'Information'
columns << 'Connection'
tbl = Rex::Text::Table.new(
'Indent' => indent,
'Header' => "Active sessions",
'Columns' => columns)
framework.sessions.each_sorted { |k|
session = framework.sessions[k]
sinfo = session.info.to_s
# Arbitrarily cut it at 80 columns
if sinfo.length > 80
sinfo = sinfo[0,77] + "..."
end
row = []
row << session.sid.to_s
row << session.sname.to_s
row << session.type.to_s
if session.respond_to?(:session_type)
row[-1] << (" " + session.session_type)
elsif session.respond_to?(:platform)
row[-1] << (" " + session.platform)
end
if show_extended
if session.respond_to?(:last_checkin) && session.last_checkin
row << "#{(Time.now.to_i - session.last_checkin.to_i)}s ago"
else
row << '?'
end
if session.respond_to?(:tlv_enc_key) && session.tlv_enc_key && session.tlv_enc_key[:key]
row << "Y"
else
row << 'N'
end
if session.exploit_datastore && session.exploit_datastore.has_key?('LURI') && !session.exploit_datastore['LURI'].empty?
row << " (#{session.exploit_datastore['LURI']})"
else
row << '?'
end
end
row << sinfo
row << session.tunnel_to_s + " (#{session.session_host})"
tbl << row
}
return framework.sessions.length > 0 ? tbl.to_s : "#{tbl.header_to_s}No active sessions.\n"
end
# Dumps the list of active sessions in verbose mode
#
# @param framework [Msf::Framework] the framework to dump.
# @param opts [Hash] the options to dump with.
# @return [String] the formatted list of sessions.
def self.dump_sessions_verbose(framework, opts={})
out = "Active sessions\n" +
"===============\n\n"
if framework.sessions.length == 0
out << "No active sessions.\n"
return out
end
framework.sessions.each_sorted do |k|
session = framework.sessions[k]
sess_info = session.info.to_s
sess_id = session.sid.to_s
sess_name = session.sname.to_s
sess_tunnel = session.tunnel_to_s + " (#{session.session_host})"
sess_via = session.via_exploit.to_s
sess_type = session.type.to_s
sess_uuid = session.payload_uuid.to_s
sess_puid = session.payload_uuid.respond_to?(:puid_hex) ? session.payload_uuid.puid_hex : nil
sess_luri = session.exploit_datastore['LURI'] || "" if session.exploit_datastore
sess_enc = false
if session.respond_to?(:tlv_enc_key) && session.tlv_enc_key && session.tlv_enc_key[:key]
sess_enc = true
end
sess_checkin = "<none>"
sess_registration = "No"
if session.respond_to?(:platform)
sess_type << " " + session.platform
end
if session.respond_to?(:last_checkin) && session.last_checkin
sess_checkin = "#{(Time.now.to_i - session.last_checkin.to_i)}s ago @ #{session.last_checkin.to_s}"
end
if session.payload_uuid.respond_to?(:puid_hex) && (uuid_info = framework.uuid_db[sess_puid])
sess_registration = "Yes"
if uuid_info['name']
sess_registration << " - Name=\"#{uuid_info['name']}\""
end
end
out << " Session ID: #{sess_id}\n"
out << " Name: #{sess_name}\n"
out << " Type: #{sess_type}\n"
out << " Info: #{sess_info}\n"
out << " Tunnel: #{sess_tunnel}\n"
out << " Via: #{sess_via}\n"
out << " Encrypted: #{sess_enc}\n"
out << " UUID: #{sess_uuid}\n"
out << " CheckIn: #{sess_checkin}\n"
out << " Registered: #{sess_registration}\n"
unless (sess_luri || '').empty?
out << " LURI: #{sess_luri}\n"
end
out << "\n"
end
out << "\n"
return out
end
# Dumps the list of running jobs.
#
# @param framework [Msf::Framework] the framework.
# @param verbose [Boolean] if true, also prints the payload, LPORT, URIPATH
# and start time, if they exist, for each job.
# @param indent [Integer] the indentation amount.
# @param col [Integer] the column wrap width.
# @return [String] the formatted list of running jobs.
def self.dump_jobs(framework, verbose = false, indent = DefaultIndent, col = DefaultColumnWrap)
columns = [ 'Id', 'Name', "Payload", "Payload opts" ]
if (verbose)
columns += [ "URIPATH", "Start Time", "Handler opts" ]
end
tbl = Rex::Text::Table.new(
'Indent' => indent,
'Header' => "Jobs",
'Columns' => columns
)
# jobs are stored as a hash with the keys being a numeric String job_id.
framework.jobs.keys.sort_by(&:to_i).each do |job_id|
# Job context is stored as an Array with the 0th element being
# the running module. If that module is an exploit, ctx will also
# contain its payload.
exploit_mod, _payload_mod = framework.jobs[job_id].ctx
row = []
row[0] = job_id
row[1] = framework.jobs[job_id].name
pinst = exploit_mod.respond_to?(:payload_instance) ? exploit_mod.payload_instance : nil
payload_uri = ''
if pinst.nil?
row[2] = ""
row[3] = ""
else
row[2] = pinst.refname
row[3] = ""
if pinst.respond_to?(:payload_uri)
payload_uri = pinst.payload_uri.strip
row[3] << payload_uri
end
if pinst.respond_to?(:luri)
row[3] << pinst.luri
end
end
if verbose
uripath = exploit_mod.get_resource if exploit_mod.respond_to?(:get_resource)
uripath ||= exploit_mod.datastore['URIPATH']
row[4] = uripath
row[5] = framework.jobs[job_id].start_time
row[6] = ''
if pinst.respond_to?(:listener_uri)
listener_uri = pinst.listener_uri.strip
row[6] = listener_uri unless listener_uri == payload_uri
end
end
tbl << row
end
return framework.jobs.keys.length > 0 ? tbl.to_s : "#{tbl.header_to_s}No active jobs.\n"
end
# Jacked from Ernest Ellingson <erne [at] powernav.com>, modified
# a bit to add indention
#
# @param str [String] the string to wrap.
# @param indent [Integer] the indentation amount.
# @param col [Integer] the column wrap width.
# @return [String] the wrapped string.
def self.word_wrap(str, indent = DefaultIndent, col = DefaultColumnWrap)
return Rex::Text.wordwrap(str, indent, col)
end
end
end end
| 29.291391 | 127 | 0.594709 |
62ab9069d936baa0cd738effddefdc973dfd5f14 | 373 | module SpaceshipMissionSimulator
class ValidateEventData
include Interactor
def call
return true if correct_event_data?
msg = 'should provide proper event format ([state, gravity])'
context.fail! message: msg
end
private
def correct_event_data?
context.raw_data.is_a?(Array) && context.raw_data.size == 2
end
end
end
| 19.631579 | 67 | 0.689008 |
38627a1c39ec70c0b06f8eb245ba3d405fa80afd | 10,658 | # typed: false
require 'datadog/core/configuration/agent_settings_resolver'
require 'datadog/core/configuration/settings'
RSpec.describe Datadog::Core::Configuration::AgentSettingsResolver do
around { |example| ClimateControl.modify(default_environment.merge(environment)) { example.run } }
let(:default_environment) do
{
'DD_AGENT_HOST' => nil,
'DD_TRACE_AGENT_PORT' => nil,
'DD_TRACE_AGENT_URL' => nil
}
end
let(:environment) { {} }
let(:ddtrace_settings) { Datadog::Core::Configuration::Settings.new }
let(:logger) { instance_double(Datadog::Core::Logger) }
let(:settings) do
{
adapter: adapter,
ssl: false,
hostname: hostname,
port: port,
uds_path: uds_path,
timeout_seconds: nil,
deprecated_for_removal_transport_configuration_proc: nil,
}
end
let(:adapter) { :net_http }
let(:hostname) { '127.0.0.1' }
let(:port) { 8126 }
let(:uds_path) { nil }
before do
# Environment does not have existing unix socket for the base testing case
allow(File).to receive(:exist?).with('/var/run/datadog/apm.socket').and_return(false)
end
subject(:resolver) { described_class.call(ddtrace_settings, logger: logger) }
context 'by default' do
it 'contacts the agent using the http adapter, using hostname 127.0.0.1 and port 8126' do
expect(resolver).to have_attributes settings
end
context 'with default unix socket present' do
before do
expect(File).to receive(:exist?).with('/var/run/datadog/apm.socket').and_return(true)
end
let(:adapter) { :unix }
let(:uds_path) { '/var/run/datadog/apm.socket' }
let(:hostname) { nil }
let(:port) { nil }
it 'configures the agent to connect to unix:/var/run/datadog/apm.socket' do
expect(resolver).to have_attributes settings
end
end
end
describe 'http adapter hostname' do
context 'when a custom hostname is specified via environment variable' do
let(:environment) { { 'DD_AGENT_HOST' => 'custom-hostname' } }
it 'contacts the agent using the http adapter, using the custom hostname' do
expect(resolver).to have_attributes(**settings, hostname: 'custom-hostname')
end
end
context 'when a custom hostname is specified via code using "agent.host ="' do
before do
ddtrace_settings.agent.host = 'custom-hostname'
end
it 'contacts the agent using the http adapter, using the custom hostname' do
expect(resolver).to have_attributes(**settings, hostname: 'custom-hostname')
end
context 'and a different hostname is also specified via the DD_AGENT_HOST environment variable' do
let(:environment) { { 'DD_AGENT_HOST' => 'this-is-a-different-hostname' } }
before do
allow(logger).to receive(:warn)
end
it 'prioritizes the hostname specified via code' do
expect(resolver).to have_attributes(**settings, hostname: 'custom-hostname')
end
it 'logs a warning' do
expect(logger).to receive(:warn).with(/Configuration mismatch/)
resolver
end
end
context 'and a different hostname is also specified via the DD_TRACE_AGENT_URL environment variable' do
let(:environment) { { 'DD_TRACE_AGENT_URL' => 'http://this-is-a-different-hostname:8126' } }
before do
allow(logger).to receive(:warn)
end
it 'prioritizes the hostname specified via code' do
expect(resolver).to have_attributes(**settings, hostname: 'custom-hostname')
end
it 'logs a warning' do
expect(logger).to receive(:warn).with(/Configuration mismatch/)
resolver
end
end
end
end
describe 'http adapter port' do
context 'when a custom port is specified via environment variable' do
let(:environment) { { 'DD_TRACE_AGENT_PORT' => '1234' } }
it 'contacts the agent using the http adapter, using the custom port' do
expect(resolver).to have_attributes(**settings, port: 1234)
end
context 'when the custom port is invalid' do
let(:environment) { { 'DD_TRACE_AGENT_PORT' => 'this-is-an-invalid-port' } }
before do
allow(logger).to receive(:warn)
end
it 'logs a warning' do
expect(logger).to receive(:warn).with(/Invalid value/)
resolver
end
it 'falls back to the defaults' do
expect(resolver).to have_attributes settings
end
end
end
context 'when a custom port is specified via code using "agent.port = "' do
before do
ddtrace_settings.agent.port = 1234
end
it 'contacts the agent using the http adapter, using the custom port' do
expect(resolver).to have_attributes(**settings, port: 1234)
end
context 'and a different port is also specified via the DD_TRACE_AGENT_PORT environment variable' do
let(:environment) { { 'DD_TRACE_AGENT_PORT' => '5678' } }
before do
allow(logger).to receive(:warn)
end
it 'prioritizes the port specified via code' do
expect(resolver).to have_attributes(**settings, port: 1234)
end
it 'logs a warning' do
expect(logger).to receive(:warn).with(/Configuration mismatch/)
resolver
end
end
context 'and a different port is also specified via the DD_TRACE_AGENT_URL environment variable' do
let(:environment) { { 'DD_TRACE_AGENT_URL' => 'http://127.0.0.1:5678' } }
before do
allow(logger).to receive(:warn)
end
it 'prioritizes the port specified via code' do
expect(resolver).to have_attributes(**settings, port: 1234)
end
it 'logs a warning' do
expect(logger).to receive(:warn).with(/Configuration mismatch/)
resolver
end
end
context 'when the port is specified as a string instead of a number' do
before do
ddtrace_settings.agent.port = '1234'
end
it 'contacts the agent using the http adapter, using the custom port' do
expect(resolver).to have_attributes(**settings, port: 1234)
end
end
context 'when the port is an invalid string value' do
before do
ddtrace_settings.agent.port = 'kaboom'
allow(logger).to receive(:warn)
end
it 'logs a warning' do
expect(logger).to receive(:warn).with(/Invalid value/)
resolver
end
it 'falls back to the defaults' do
expect(resolver).to have_attributes settings
end
end
context 'when the port is an invalid object' do
before do
ddtrace_settings.agent.port = Object.new
allow(logger).to receive(:warn)
end
it 'logs a warning' do
expect(logger).to receive(:warn).with(/Invalid value/)
resolver
end
it 'falls back to the defaults' do
expect(resolver).to have_attributes settings
end
end
end
end
context 'when a custom url is specified via environment variable' do
let(:environment) { { 'DD_TRACE_AGENT_URL' => 'http://custom-hostname:1234' } }
it 'contacts the agent using the http adapter, using the custom hostname and port' do
expect(resolver).to have_attributes(
**settings,
ssl: false,
hostname: 'custom-hostname',
port: 1234
)
end
context 'and a different hostname is also specified via the DD_AGENT_HOST environment variable' do
let(:environment) do
{
'DD_TRACE_AGENT_URL' => 'http://custom-hostname:1234',
'DD_AGENT_HOST' => 'this-is-a-different-hostname'
}
end
before do
allow(logger).to receive(:warn)
end
it 'prioritizes the hostname specified via DD_TRACE_AGENT_URL' do
expect(resolver).to have_attributes(hostname: 'custom-hostname')
end
it 'logs a warning' do
expect(logger).to receive(:warn).with(/Configuration mismatch/)
resolver
end
end
context 'and a different port is also specified via the DD_TRACE_AGENT_PORT environment variable' do
let(:environment) do
{
'DD_TRACE_AGENT_URL' => 'http://custom-hostname:1234',
'DD_TRACE_AGENT_PORT' => '5678'
}
end
before do
allow(logger).to receive(:warn)
end
it 'prioritizes the port specified via DD_TRACE_AGENT_URL' do
expect(resolver).to have_attributes(port: 1234)
end
it 'logs a warning' do
expect(logger).to receive(:warn).with(/Configuration mismatch/)
resolver
end
end
context 'when the uri scheme is https' do
let(:environment) { { 'DD_TRACE_AGENT_URL' => 'https://custom-hostname:1234' } }
it 'contacts the agent using the http adapter, using ssl: true' do
expect(resolver).to have_attributes(ssl: true)
end
end
context 'when the uri scheme is not http OR https' do
let(:environment) { { 'DD_TRACE_AGENT_URL' => 'steam://custom-hostname:1234' } }
before do
allow(logger).to receive(:warn)
end
it 'falls back to the defaults' do
expect(resolver).to have_attributes settings
end
it 'logs a warning' do
expect(logger).to receive(:warn).with(/Invalid URI scheme/)
resolver
end
end
end
context 'when a proc is configured in tracer.transport_options' do
let(:deprecated_for_removal_transport_configuration_proc) { proc {} }
before do
ddtrace_settings.tracing.transport_options = deprecated_for_removal_transport_configuration_proc
end
it 'includes the given proc in the resolved settings as the deprecated_for_removal_transport_configuration_proc' do
expect(resolver).to have_attributes(
**settings,
deprecated_for_removal_transport_configuration_proc: deprecated_for_removal_transport_configuration_proc
)
end
end
describe '#log_warning' do
let(:message) { 'this is a test warning' }
subject(:log_warning) do
described_class.new(ddtrace_settings, logger: logger).send(:log_warning, message)
end
it 'logs a warning used the configured logger' do
expect(logger).to receive(:warn).with('this is a test warning')
log_warning
end
context 'when logger is nil' do
let(:logger) { nil }
it 'does not log anything' do
log_warning
end
end
end
end
| 29.441989 | 119 | 0.640739 |
18517b07ca97cd3450b6c6c158be5bf7a9f89879 | 13,360 | require 'spec_helper'
require 'pry'
describe ApplicationController do
describe "Homepage" do
it 'loads the homepage' do
get '/'
expect(last_response.status).to eq(200)
expect(last_response.body).to include("Welcome to Fwitter")
end
end
describe "Signup Page" do
it 'loads the signup page' do
get '/signup'
expect(last_response.status).to eq(200)
end
it 'signup directs user to twitter index' do
params = {
:username => "skittles123",
:email => "[email protected]",
:password => "rainbows"
}
post '/signup', params
expect(last_response.location).to include("/tweets")
end
it 'does not let a user sign up without a username' do
params = {
:username => "",
:email => "[email protected]",
:password => "rainbows"
}
post '/signup', params
expect(last_response.location).to include('/signup')
end
it 'does not let a user sign up without an email' do
params = {
:username => "skittles123",
:email => "",
:password => "rainbows"
}
post '/signup', params
expect(last_response.location).to include('/signup')
end
it 'does not let a user sign up without a password' do
params = {
:username => "skittles123",
:email => "[email protected]",
:password => ""
}
post '/signup', params
expect(last_response.location).to include('/signup')
end
it 'creates a new user and logs them in on valid submission and does not let a logged in user view the signup page' do
params = {
:username => "skittles123",
:email => "[email protected]",
:password => "rainbows"
}
post '/signup', params
get '/signup'
expect(last_response.location).to include('/tweets')
end
end
describe "login" do
it 'loads the login page' do
get '/login'
expect(last_response.status).to eq(200)
end
it 'loads the tweets index after login' do
user = User.create(:username => "becky567", :email => "[email protected]", :password => "kittens")
params = {
:username => "becky567",
:password => "kittens"
}
post '/login', params
expect(last_response.status).to eq(302)
follow_redirect!
expect(last_response.status).to eq(200)
expect(last_response.body).to include("Welcome,")
end
it 'does not let user view login page if already logged in' do
user = User.create(:username => "becky567", :email => "[email protected]", :password => "kittens")
params = {
:username => "becky567",
:password => "kittens"
}
post '/login', params
get '/login'
expect(last_response.location).to include("/tweets")
end
end
describe "logout" do
it "lets a user logout if they are already logged in" do
user = User.create(:username => "becky567", :email => "[email protected]", :password => "kittens")
params = {
:username => "becky567",
:password => "kittens"
}
post '/login', params
get '/logout'
expect(last_response.location).to include("/login")
end
it 'does not let a user logout if not logged in' do
get '/logout'
expect(last_response.location).to include("/")
end
it 'does not load /tweets if user not logged in' do
get '/tweets'
expect(last_response.location).to include("/login")
end
end
describe 'user show page' do
it 'shows all a single users tweets' do
user = User.create(:username => "becky567", :email => "[email protected]", :password => "kittens")
tweet1 = Tweet.create(:content => "tweeting!", :user_id => user.id)
tweet2 = Tweet.create(:content => "tweet tweet tweet", :user_id => user.id)
get "/users/#{user.slug}"
expect(last_response.body).to include("tweeting!")
expect(last_response.body).to include("tweet tweet tweet")
end
end
describe 'index action' do
context 'logged in' do
it 'lets a user view the tweets index if logged in' do
user1 = User.create(:username => "becky567", :email => "[email protected]", :password => "kittens")
tweet1 = Tweet.create(:content => "tweeting!", :user_id => user1.id)
user2 = User.create(:username => "silverstallion", :email => "[email protected]", :password => "horses")
tweet2 = Tweet.create(:content => "look at this tweet", :user_id => user2.id)
visit '/login'
fill_in(:username, :with => "becky567")
fill_in(:password, :with => "kittens")
click_button 'submit'
visit "/tweets"
expect(page.body).to include(tweet1.content)
end
end
context 'logged out' do
it 'does not let a user view the tweets index if not logged in' do
get '/tweets'
expect(last_response.location).to include("/login")
end
end
end
describe 'new action' do
context 'logged in' do
it 'lets user view new tweet form if logged in' do
user = User.create(:username => "becky567", :email => "[email protected]", :password => "kittens")
visit '/login'
fill_in(:username, :with => "becky567")
fill_in(:password, :with => "kittens")
click_button 'submit'
visit '/tweets/new'
expect(page.status_code).to eq(200)
end
it 'lets user create a tweet if they are logged in' do
user = User.create(:username => "becky567", :email => "[email protected]", :password => "kittens")
visit '/login'
fill_in(:username, :with => "becky567")
fill_in(:password, :with => "kittens")
click_button 'submit'
visit '/tweets/new'
fill_in(:content, :with => "tweet!!!")
click_button 'submit'
user = User.find_by(:username => "becky567")
tweet = Tweet.find_by(:content => "tweet!!!")
expect(tweet).to be_instance_of(Tweet)
expect(tweet.user_id).to eq(user.id)
expect(page.status_code).to eq(200)
end
it 'does not let a user tweet from another user' do
user = User.create(:username => "becky567", :email => "[email protected]", :password => "kittens")
user2 = User.create(:username => "silverstallion", :email => "[email protected]", :password => "horses")
visit '/login'
fill_in(:username, :with => "becky567")
fill_in(:password, :with => "kittens")
click_button 'submit'
visit '/tweets/new'
fill_in(:content, :with => "tweet!!!")
click_button 'submit'
user = User.find_by(:id=> user.id)
user2 = User.find_by(:id => user2.id)
tweet = Tweet.find_by(:content => "tweet!!!")
expect(tweet).to be_instance_of(Tweet)
expect(tweet.user_id).to eq(user.id)
expect(tweet.user_id).not_to eq(user2.id)
end
it 'does not let a user create a blank tweet' do
user = User.create(:username => "becky567", :email => "[email protected]", :password => "kittens")
visit '/login'
fill_in(:username, :with => "becky567")
fill_in(:password, :with => "kittens")
click_button 'submit'
visit '/tweets/new'
fill_in(:content, :with => "")
click_button 'submit'
expect(Tweet.find_by(:content => "")).to eq(nil)
expect(page.current_path).to eq("/tweets/new")
end
end
context 'logged out' do
it 'does not let user view new tweet form if not logged in' do
get '/tweets/new'
expect(last_response.location).to include("/login")
end
end
end
describe 'show action' do
context 'logged in' do
it 'displays a single tweet' do
user = User.create(:username => "becky567", :email => "[email protected]", :password => "kittens")
tweet = Tweet.create(:content => "i am a boss at tweeting", :user_id => user.id)
visit '/login'
fill_in(:username, :with => "becky567")
fill_in(:password, :with => "kittens")
click_button 'submit'
visit "/tweets/#{tweet.id}"
expect(page.status_code).to eq(200)
expect(page.body).to include("Delete Tweet")
expect(page.body).to include(tweet.content)
expect(page.body).to include("Edit Tweet")
end
end
context 'logged out' do
it 'does not let a user view a tweet' do
user = User.create(:username => "becky567", :email => "[email protected]", :password => "kittens")
tweet = Tweet.create(:content => "i am a boss at tweeting", :user_id => user.id)
get "/tweets/#{tweet.id}"
expect(last_response.location).to include("/login")
end
end
end
describe 'edit action' do
context "logged in" do
it 'lets a user view tweet edit form if they are logged in' do
user = User.create(:username => "becky567", :email => "[email protected]", :password => "kittens")
tweet = Tweet.create(:content => "tweeting!", :user_id => user.id)
visit '/login'
fill_in(:username, :with => "becky567")
fill_in(:password, :with => "kittens")
click_button 'submit'
visit '/tweets/1/edit'
expect(page.status_code).to eq(200)
expect(page.body).to include(tweet.content)
end
it 'does not let a user edit a tweet they did not create' do
user1 = User.create(:username => "becky567", :email => "[email protected]", :password => "kittens")
tweet1 = Tweet.create(:content => "tweeting!", :user_id => user1.id)
user2 = User.create(:username => "silverstallion", :email => "[email protected]", :password => "horses")
tweet2 = Tweet.create(:content => "look at this tweet", :user_id => user2.id)
visit '/login'
fill_in(:username, :with => "becky567")
fill_in(:password, :with => "kittens")
click_button 'submit'
visit "/tweets/#{tweet2.id}/edit"
expect(page.current_path).to include('/tweets')
end
it 'lets a user edit their own tweet if they are logged in' do
user = User.create(:username => "becky567", :email => "[email protected]", :password => "kittens")
tweet = Tweet.create(:content => "tweeting!", :user_id => 1)
visit '/login'
fill_in(:username, :with => "becky567")
fill_in(:password, :with => "kittens")
click_button 'submit'
visit '/tweets/1/edit'
fill_in(:content, :with => "i love tweeting")
click_button 'submit'
expect(Tweet.find_by(:content => "i love tweeting")).to be_instance_of(Tweet)
expect(Tweet.find_by(:content => "tweeting!")).to eq(nil)
expect(page.status_code).to eq(200)
end
it 'does not let a user edit a text with blank content' do
user = User.create(:username => "becky567", :email => "[email protected]", :password => "kittens")
tweet = Tweet.create(:content => "tweeting!", :user_id => 1)
visit '/login'
fill_in(:username, :with => "becky567")
fill_in(:password, :with => "kittens")
click_button 'submit'
visit '/tweets/1/edit'
fill_in(:content, :with => "")
click_button 'submit'
expect(Tweet.find_by(:content => "i love tweeting")).to be(nil)
expect(page.current_path).to eq("/tweets/1/edit")
end
end
context "logged out" do
it 'does not load -- instead redirects to login' do
get '/tweets/1/edit'
expect(last_response.location).to include("/login")
end
end
end
describe 'delete action' do
context "logged in" do
it 'lets a user delete their own tweet if they are logged in' do
user = User.create(:username => "becky567", :email => "[email protected]", :password => "kittens")
tweet = Tweet.create(:content => "tweeting!", :user_id => 1)
visit '/login'
fill_in(:username, :with => "becky567")
fill_in(:password, :with => "kittens")
click_button 'submit'
visit 'tweets/1'
click_button "Delete Tweet"
expect(page.status_code).to eq(200)
expect(Tweet.find_by(:content => "tweeting!")).to eq(nil)
end
it 'does not let a user delete a tweet they did not create' do
user1 = User.create(:username => "becky567", :email => "[email protected]", :password => "kittens")
tweet1 = Tweet.create(:content => "tweeting!", :user_id => user1.id)
user2 = User.create(:username => "silverstallion", :email => "[email protected]", :password => "horses")
tweet2 = Tweet.create(:content => "look at this tweet", :user_id => user2.id)
visit '/login'
fill_in(:username, :with => "becky567")
fill_in(:password, :with => "kittens")
click_button 'submit'
visit "tweets/#{tweet2.id}"
click_button "Delete Tweet"
expect(page.status_code).to eq(200)
expect(Tweet.find_by(:content => "look at this tweet")).to be_instance_of(Tweet)
expect(page.current_path).to include('/tweets')
end
end
context "logged out" do
it 'does not load let user delete a tweet if not logged in' do
tweet = Tweet.create(:content => "tweeting!", :user_id => 1)
visit '/tweets/1'
expect(page.current_path).to eq("/login")
end
end
end
end
| 33.151365 | 122 | 0.591018 |
283f8c3b7648104dfea44941fbe77f3fab7a141c | 1,850 | # -*- encoding: utf-8 -*-
$:.push File.expand_path('../lib', __FILE__)
require 'casino/version'
Gem::Specification.new do |s|
s.name = 'casino'
s.version = CASino::VERSION
s.authors = ['Nils Caspar', 'Raffael Schmid', 'Samuel Sieg']
s.email = ['[email protected]', '[email protected]', '[email protected]']
s.homepage = 'http://rbcas.org/'
s.license = 'MIT'
s.summary = 'A simple CAS server written in Ruby using the Rails framework.'
s.description = 'CASino is a simple CAS (Central Authentication Service) server.'
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
s.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
s.require_paths = ['lib']
sign_file = File.expand_path '~/.gem/casino-private_key.pem'
if File.exist?(sign_file)
s.signing_key = sign_file
s.cert_chain = ['casino-public_cert.pem']
end
s.add_development_dependency 'capybara', '~> 2.1'
s.add_development_dependency 'rake', '~> 10.0'
s.add_development_dependency 'rspec', '~> 3.0'
s.add_development_dependency 'rspec-its', '~> 1.0'
s.add_development_dependency 'rspec-rails', '~> 3.0'
s.add_development_dependency 'sqlite3', '~> 1.3'
s.add_development_dependency 'factory_girl', '~> 4.1'
s.add_development_dependency 'webmock', '~> 1.9'
s.add_development_dependency 'coveralls', '~> 0.7'
s.add_runtime_dependency 'rails', '~> 4.1'
s.add_runtime_dependency 'sass-rails', '~> 4.0'
s.add_runtime_dependency 'http_accept_language', '~> 2.0.0.pre'
s.add_runtime_dependency 'addressable', '~> 2.3'
s.add_runtime_dependency 'terminal-table', '~> 1.4'
s.add_runtime_dependency 'useragent', '~> 0.4'
s.add_runtime_dependency 'faraday', '~> 0.8'
s.add_runtime_dependency 'rotp', '~> 2.0'
end
| 41.111111 | 83 | 0.662703 |
1af0792a33c3bc32b0ebf2ef6c909699930ff602 | 524 | actions :auto_attach
attribute :mount_point, :kind_of => String
attribute :disk_count, :kind_of => Integer
attribute :disk_size, :kind_of => Integer
attribute :level, :default => 10
attribute :filesystem, :default => "ext4"
attribute :filesystem_options, :default => "rw,noatime,nobootwait"
attribute :snapshots, :default => []
attribute :disk_type, :kind_of => String, :default => 'standard'
attribute :disk_piops, :kind_of => Integer, :default => 0
| 40.307692 | 73 | 0.637405 |
79c4e9be664545b4241177dfa30a1ab3faac981d | 2,120 | module ApiStub
module Models
module DNS
# Mock class for Record Set
class RecordSet
def self.create_record_set_obj(dns_client)
record_set = '{
"id" : "/subscriptions/########-####-####-####-############/resourceGroups/fog-test-rg/providers/Microsoft.Network/dnszones/fog-test-zone.com/A/fog-test-record_set",
"name": "fog-test-record_set",
"type": "Microsoft.Network/dnszones/A",
"etag": "3376a38f-a53f-4ed0-a2e7-dfaba67dbb40",
"location": "global",
"properties":
{
"metadata": [],
"fqdn": "fog-test-record_set.fog-test-zone.com.",
"TTL": 60,
"ARecords":
[
{
"ipv4Address": "1.2.3.4"
},
{
"ipv4Address": "1.2.3.3"
}
]
}
}'
record_set_mapper = Azure::Dns::Mgmt::V2017_10_01::Models::RecordSet.mapper
dns_client.deserialize(record_set_mapper, Fog::JSON.decode(record_set))
end
def self.response_for_cname(dns_client)
cname_record = '{
"id": "/subscriptions/########-####-####-####-############/resourceGroups/EdgeMonitoring2/providers/Microsoft.Network/dnszones/edgemonitoring2.com./CNAME/www",
"location": "global",
"name": "www",
"tags": {},
"type": "Microsoft.Network/dnszones/CNAME",
"etag": "5b83020b-b59c-44be-8f19-a052ebe80fe7",
"properties": {
"metadata": [],
"fqdn": "fog-test-record_set.fog-test-zone.com.",
"TTL": "60",
"CNAMERecord":
{
"cname": "test.fog.com"
}
}
}'
cname_record_mapper = Azure::Dns::Mgmt::V2017_10_01::Models::RecordSet.mapper
dns_client.deserialize(cname_record_mapper, Fog::JSON.decode(cname_record))
end
end
end
end
end
| 36.551724 | 177 | 0.477358 |
bf2dca53278eeb4a99d9965b390748d8506feee7 | 272 | require 'test_helper'
class CityTest < ActiveSupport::TestCase
should validate_presence_of(:name)
should validate_uniqueness_of(:name).scoped_to(:country_id)
should_not validate_uniqueness_of(:name)
should belong_to(:country)
should have_many(:athletes)
end
| 20.923077 | 61 | 0.797794 |
18d4ae3ac96df8fde06d01d54aaa1bc72e59f26f | 1,654 | class TeamMembersController < ProjectResourceController
# Authorize
before_filter :authorize_read_project!
before_filter :authorize_admin_project!, except: [:index, :show]
def index
@teams = UserTeam.scoped
end
def show
@user_project_relation = project.users_projects.find_by_user_id(member)
@events = member.recent_events.in_projects(project).limit(7)
end
def new
@user_project_relation = project.users_projects.new
end
def create
users = User.where(id: params[:user_ids])
@project.team << [users, params[:project_access]]
if params[:redirect_to]
redirect_to params[:redirect_to]
else
redirect_to project_team_index_path(@project)
end
end
def update
@user_project_relation = project.users_projects.find_by_user_id(member)
@user_project_relation.update_attributes(params[:team_member])
unless @user_project_relation.valid?
flash[:alert] = "User should have at least one role"
end
redirect_to project_team_index_path(@project)
end
def destroy
@user_project_relation = project.users_projects.find_by_user_id(member)
@user_project_relation.destroy
respond_to do |format|
format.html { redirect_to project_team_index_path(@project) }
format.js { render nothing: true }
end
end
def apply_import
giver = Project.find(params[:source_project_id])
status = @project.team.import(giver)
notice = status ? "Succesfully imported" : "Import failed"
redirect_to project_team_members_path(project), notice: notice
end
protected
def member
@member ||= User.find_by_username(params[:id])
end
end
| 25.446154 | 75 | 0.732164 |
d536d21df1036aa3c15fa6ae8c2c040de90a6bcd | 213 | # frozen_string_literal: true
# config/routes.rb
Rails.application.routes.draw do
devise_for :users
get 'favorites/update'
resources :comics
get 'comics', to: 'comics#index'
root to: 'comics#index'
end
| 19.363636 | 34 | 0.737089 |
b9e12f2e8b7b6470d13dd97551b55c44a17152cc | 1,102 | require "language/node"
class GatsbyCli < Formula
desc "Gatsby command-line interface"
homepage "https://www.gatsbyjs.org/docs/gatsby-cli/"
url "https://registry.npmjs.org/gatsby-cli/-/gatsby-cli-2.12.49.tgz"
sha256 "5cc34948df99e3bf81fcfdd59c8b74ee181855923ff70db718f773c4cd0a89ad"
bottle do
sha256 "2652e2bd5ed70122a1d091ac4dd1339d7fd95638140f9743b69d96efbc04e56a" => :catalina
sha256 "67b3ebe76e31e225ba583a1646e7dd2d8d7c1170a9914c515b52a6c247b669ba" => :mojave
sha256 "4ed18b67f9253692e0077f40ded757a954b9723a7a4309f5caabf12c5c3468f5" => :high_sierra
end
depends_on "node"
def install
system "npm", "install", *Language::Node.std_npm_install_args(libexec)
bin.install_symlink Dir["#{libexec}/bin/*"]
# Avoid references to Homebrew shims
rm_f "#{libexec}/lib/node_modules/gatsby-cli/node_modules/websocket/builderror.log"
end
test do
system bin/"gatsby", "new", "hello-world", "https://github.com/gatsbyjs/gatsby-starter-hello-world"
assert_predicate testpath/"hello-world/package.json", :exist?, "package.json was not cloned"
end
end
| 36.733333 | 103 | 0.767695 |
268ada5074b40abe2334e5967d605a64d259fb09 | 218 | require "#{Rails.root}/lib/scraper/recipes_scraper.rb"
namespace :scrape do
task :category, ['category_id'] => :environment do |task, args|
CategoryScraper.scrape_category(args['category_id'])
end
end
| 27.25 | 67 | 0.711009 |
87b33ab9e7beb7d37e057ef0a90d7762986c81ee | 173 | class CreatePredictionModels < ActiveRecord::Migration[5.1]
def change
create_table :prediction_models do |t|
t.binary :dump_data, null: false
end
end
end
| 21.625 | 59 | 0.722543 |
e975a1f5a95fc50e9055dd2c6effddc010420d06 | 1,966 | require 'test_helper'
class UsersLoginTest < ActionDispatch::IntegrationTest
def setup
@user = users(:michael)
end
test "login with invalid information" do
get login_path
assert_template 'sessions/new'
post login_path, params: { session: { email: "", password: "" } }
assert_template 'sessions/new'
assert_not flash.empty?
get root_path
assert flash.empty?
end
test "login with valid information" do
get login_path
post login_path, params: { session: {
email: @user.email,
password: 'password' } }
assert_redirected_to @user
follow_redirect!
assert_template 'users/show'
assert_select "a[href=?]", login_path, count: 0
assert_select "a[href=?]", logout_path
assert_select "a[href=?]", user_path(@user)
end
test "login with valid information followed by logout" do
get login_path
post login_path, params: { session: { email: @user.email,
password: 'password' } }
assert is_logged_in?
assert_redirected_to @user
follow_redirect!
assert_template 'users/show'
assert_select "a[href=?]", login_path, count: 0
assert_select "a[href=?]", logout_path
assert_select "a[href=?]", user_path(@user)
delete logout_path
assert_not is_logged_in?
assert_redirected_to root_url
# 2番目のウィンドウでログアウトをクリックするユーザーをシミュレートする
delete logout_path
follow_redirect!
assert_select "a[href=?]", login_path
assert_select "a[href=?]", logout_path, count: 0
assert_select "a[href=?]", user_path(@user), count: 0
end
test "login with remembering" do
log_in_as(@user, remember_me: '1')
assert_not_empty cookies['remember_token']
end
test "login without remembering" do
# クッキーを保存してログイン
log_in_as(@user, remember_me: '1')
delete logout_path
# クッキーを削除してログイン
log_in_as(@user, remember_me: '0')
assert_empty cookies['remember_token']
end
end | 28.085714 | 69 | 0.673449 |
d53675fa95b770ee15613b47973cc21085959e17 | 326 | module VCAP::CloudController
module InternalApi
def configure(config)
@config = config
end
module_function :configure
def credentials
[
@config.get(:internal_api, :auth_user),
@config.get(:internal_api, :auth_password),
]
end
module_function :credentials
end
end
| 19.176471 | 51 | 0.650307 |
e9420ef99115ca4dcc325340ba95bdb0ae688a7b | 25,604 | require "helper"
require "inspec/resource"
require "inspec/resources/service"
require "hashie"
describe "Inspec::Resources::Service" do
let(:runlevels) { { 0 => false, 1 => false, 2 => true, 3 => true, 4 => true, 5 => true, 6 => false } }
# windows
it "verify service parsing" do
resource = MockLoader.new(:windows).load_resource("service", "dhcp")
params = Hashie::Mash.new({})
_(resource.type).must_equal "windows"
_(resource.name).must_equal "dhcp"
_(resource.description).must_equal "DHCP Client"
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal true
_(resource.startmode). must_equal "Auto"
_(resource.startname). must_equal "LocalSystem"
_(resource.params).must_equal params
end
# ubuntu
it "verify ubuntu service parsing" do
resource = MockLoader.new(:ubuntu1404).load_resource("service", "ssh")
params = Hashie::Mash.new({})
_(resource.type).must_equal "upstart"
_(resource.name).must_equal "ssh"
_(resource.description).must_be_nil
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal true
_(resource.params).must_equal params
end
it "verify ubuntu service parsing with default upstart_service" do
resource = MockLoader.new(:ubuntu).load_resource("upstart_service", "ssh")
params = Hashie::Mash.new({})
_(resource.type).must_equal "upstart"
_(resource.name).must_equal "ssh"
_(resource.description).must_be_nil
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal true
_(resource.params).must_equal params
_(resource.params.UnitFileState).must_be_nil
end
it "verify ubuntu service parsing" do
resource = MockLoader.new(:ubuntu).load_resource("service", "sshd")
params = Hashie::Mash.new({ "ActiveState" => "active", "Description" => "OpenSSH server daemon", "Id" => "sshd.service", "LoadState" => "loaded", "Names" => "sshd.service", "SubState" => "running", "UnitFileState" => "enabled" })
_(resource.type).must_equal "systemd"
_(resource.name).must_equal "sshd.service"
_(resource.description).must_equal "OpenSSH server daemon"
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal true
_(resource.params).must_equal params
_(resource.params.SubState).must_equal "running"
end
it "verify ubuntu service parsing with default systemd_service" do
resource = MockLoader.new(:ubuntu).load_resource("systemd_service", "sshd")
params = Hashie::Mash.new({ "ActiveState" => "active", "Description" => "OpenSSH server daemon", "Id" => "sshd.service", "LoadState" => "loaded", "Names" => "sshd.service", "SubState" => "running", "UnitFileState" => "enabled" })
_(resource.type).must_equal "systemd"
_(resource.name).must_equal "sshd.service"
_(resource.description).must_equal "OpenSSH server daemon"
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal true
_(resource.params).must_equal params
end
# linux mint 17 with upstart
it "verify mint service parsing" do
resource = MockLoader.new(:mint17).load_resource("service", "ssh")
params = Hashie::Mash.new({})
_(resource.type).must_equal "upstart"
_(resource.name).must_equal "ssh"
_(resource.description).must_be_nil
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal true
_(resource.params).must_equal params
end
it "verify mint service parsing with default upstart_service" do
resource = MockLoader.new(:mint17).load_resource("upstart_service", "ssh")
params = Hashie::Mash.new({})
_(resource.type).must_equal "upstart"
_(resource.name).must_equal "ssh"
_(resource.description).must_be_nil
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal true
_(resource.params).must_equal params
_(resource.params.UnitFileState).must_be_nil
end
# mint 18 with systemd
it "verify mint service parsing" do
resource = MockLoader.new(:mint18).load_resource("service", "sshd")
params = Hashie::Mash.new({ "ActiveState" => "active", "Description" => "OpenSSH server daemon", "Id" => "sshd.service", "LoadState" => "loaded", "Names" => "sshd.service", "SubState" => "running", "UnitFileState" => "enabled" })
_(resource.type).must_equal "systemd"
_(resource.name).must_equal "sshd.service"
_(resource.description).must_equal "OpenSSH server daemon"
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal true
_(resource.params).must_equal params
_(resource.params.SubState).must_equal "running"
end
it "verify mint service parsing with default systemd_service" do
resource = MockLoader.new(:mint18).load_resource("systemd_service", "sshd")
params = Hashie::Mash.new({ "ActiveState" => "active", "Description" => "OpenSSH server daemon", "Id" => "sshd.service", "LoadState" => "loaded", "Names" => "sshd.service", "SubState" => "running", "UnitFileState" => "enabled" })
_(resource.type).must_equal "systemd"
_(resource.name).must_equal "sshd.service"
_(resource.description).must_equal "OpenSSH server daemon"
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal true
_(resource.params).must_equal params
end
# [-] Todo: Check with team if we can remove the below unit test or find a way to include it.
# The below test is not required because we cannot differentiate between amazon and amazon2 during testing.
# After all, the differentiation is not based on the version but based on the utility available on the system.
# However, the service resource works perfectly fine for the actual instance of Amazon and Amazon2 Linux.
# Amazon Linux
# it "verify amazon linux service parsing" do
# resource = MockLoader.new(:amazon).load_resource("service", "ssh")
# params = Hashie::Mash.new({})
# # _(resource.type).must_equal "upstart"
# # _(resource.name).must_equal "ssh"
# _(resource.description).must_be_nil
# _(resource.installed?).must_equal true
# _(resource.enabled?).must_equal true
# _(resource.running?).must_equal true
# _(resource.params).must_equal params
# _(resource.params.UnitFileState).must_be_nil
# end
# Amazon Linux 2
it "verify amazon linux 2 service parsing" do
resource = MockLoader.new(:amazon2).load_resource("service", "sshd")
params = Hashie::Mash.new({ "ActiveState" => "active", "Description" => "OpenSSH server daemon", "Id" => "sshd.service", "LoadState" => "loaded", "Names" => "sshd.service", "SubState" => "running", "UnitFileState" => "enabled" })
_(resource.type).must_equal "systemd"
_(resource.name).must_equal "sshd.service"
_(resource.description).must_equal "OpenSSH server daemon"
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal true
_(resource.params).must_equal params
end
# Aliyun Linux 3 (Alibaba)
it "verify aliyun linux 3 service parsing" do
resource = MockLoader.new(:aliyun3).load_resource("service", "sshd")
params = Hashie::Mash.new({ "ActiveState" => "active", "Description" => "OpenSSH server daemon", "Id" => "sshd.service", "LoadState" => "loaded", "Names" => "sshd.service", "SubState" => "running", "UnitFileState" => "enabled" })
_(resource.type).must_equal "systemd"
_(resource.name).must_equal "sshd.service"
_(resource.description).must_equal "OpenSSH server daemon"
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal true
_(resource.params).must_equal params
end
# centos 6 with sysv
it "verify centos 6 service parsing" do
resource = MockLoader.new(:centos6).load_resource("service", "sshd")
params = Hashie::Mash.new({})
_(resource.type).must_equal "sysv"
_(resource.name).must_equal "sshd"
_(resource.description).must_be_nil
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal true
_(resource.params).must_equal params
_(resource.params.SubState).must_be_nil
end
it "verify centos 6 service parsing with default sysv_service" do
resource = MockLoader.new(:centos6).load_resource("sysv_service", "sshd")
params = Hashie::Mash.new({})
_(resource.type).must_equal "sysv"
_(resource.name).must_equal "sshd"
_(resource.description).must_be_nil
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal true
_(resource.params).must_equal params
end
# centos 7 with systemd
it "verify centos 7 service parsing" do
resource = MockLoader.new(:centos7).load_resource("service", "sshd")
params = Hashie::Mash.new({ "ActiveState" => "active", "Description" => "OpenSSH server daemon", "Id" => "sshd.service", "LoadState" => "loaded", "Names" => "sshd.service", "SubState" => "running", "UnitFileState" => "enabled" })
_(resource.type).must_equal "systemd"
_(resource.name).must_equal "sshd.service"
_(resource.description).must_equal "OpenSSH server daemon"
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal true
_(resource.params).must_equal params
end
it "verify centos 7 service parsing with systemd_service and service_ctl override" do
resource = MockLoader.new(:centos7).load_resource("systemd_service", "sshd", "/path/to/systemctl")
params = Hashie::Mash.new({ "ActiveState" => "active", "Description" => "OpenSSH server daemon", "Id" => "sshd.service", "LoadState" => "loaded", "Names" => "sshd.service", "UnitFileState" => "enabled", "SubState" => "running" })
_(resource.type).must_equal "systemd"
_(resource.name).must_equal "sshd.service"
_(resource.description).must_equal "OpenSSH server daemon"
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal true
_(resource.params).must_equal params
end
it "verify centos 7 service parsing with static loaded service" do
resource = MockLoader.new(:centos7).load_resource("service", "dbus")
params = Hashie::Mash.new({ "Description" => "D-Bus System Message Bus", "Id" => "dbus.service", "LoadState" => "loaded", "Names" => "messagebus.service dbus.service", "SubState" => "running", "UnitFileState" => "static", "User" => "root" })
_(resource.type).must_equal "systemd"
_(resource.name).must_equal "dbus.service"
_(resource.description).must_equal "D-Bus System Message Bus"
_(resource.startname).must_equal "root"
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal true
_(resource.params).must_equal params
_(resource.params.UnitFileState).must_equal "static"
end
# cloudlinux 7 with systemd
it "verify cloudlinux 7 service parsing" do
resource = MockLoader.new(:cloudlinux).load_resource("service", "sshd")
params = Hashie::Mash.new({ "ActiveState" => "active", "Description" => "OpenSSH server daemon", "Id" => "sshd.service", "LoadState" => "loaded", "Names" => "sshd.service", "SubState" => "running", "UnitFileState" => "enabled" })
_(resource.type).must_equal "systemd"
_(resource.name).must_equal "sshd.service"
_(resource.description).must_equal "OpenSSH server daemon"
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal true
_(resource.params).must_equal params
end
it "verify cloudlinux 7 service parsing with systemd_service and service_ctl override" do
resource = MockLoader.new(:cloudlinux).load_resource("systemd_service", "sshd", "/path/to/systemctl")
params = Hashie::Mash.new({ "ActiveState" => "active", "Description" => "OpenSSH server daemon", "Id" => "sshd.service", "LoadState" => "loaded", "Names" => "sshd.service", "UnitFileState" => "enabled", "SubState" => "running" })
_(resource.type).must_equal "systemd"
_(resource.name).must_equal "sshd.service"
_(resource.description).must_equal "OpenSSH server daemon"
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal true
_(resource.params).must_equal params
end
it "verify cloudlinux 7 service parsing with static loaded service" do
resource = MockLoader.new(:cloudlinux).load_resource("service", "dbus")
params = Hashie::Mash.new({ "Description" => "D-Bus System Message Bus", "Id" => "dbus.service", "LoadState" => "loaded", "Names" => "messagebus.service dbus.service", "SubState" => "running", "UnitFileState" => "static", "User" => "root" })
_(resource.type).must_equal "systemd"
_(resource.name).must_equal "dbus.service"
_(resource.description).must_equal "D-Bus System Message Bus"
_(resource.startname).must_equal "root"
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal true
_(resource.params).must_equal params
_(resource.params.UnitFileState).must_equal "static"
end
# freebsd 9
it "verify freebsd9 service parsing" do
resource = MockLoader.new(:freebsd9).load_resource("service", "sendmail")
params = Hashie::Mash.new({})
_(resource.type).must_equal "bsd-init"
_(resource.name).must_equal "sendmail"
_(resource.description).must_be_nil
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal true
_(resource.params).must_equal params
end
it "verify freebsd9 service parsing with default bsd_service" do
resource = MockLoader.new(:freebsd9).load_resource("bsd_service", "sendmail")
params = Hashie::Mash.new({})
_(resource.type).must_equal "bsd-init"
_(resource.name).must_equal "sendmail"
_(resource.description).must_be_nil
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal true
_(resource.params).must_equal params
end
it "verify freebsd9 service parsing when one service is a suffix of another" do
resource = MockLoader.new(:freebsd9).load_resource("service", "mail") # "mail" is suffix of "sendmail", which is enabled
_(resource.enabled?).must_equal false
end
# freebsd 10+
it "verify freebsd10 service parsing" do
resource = MockLoader.new(:freebsd10).load_resource("service", "sendmail")
params = Hashie::Mash.new({})
_(resource.type).must_equal "bsd-init"
_(resource.name).must_equal "sendmail"
_(resource.description).must_be_nil
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal true
_(resource.params).must_equal params
end
it "verify freebsd10 service parsing with default bsd_service" do
resource = MockLoader.new(:freebsd10).load_resource("bsd_service", "sendmail")
params = Hashie::Mash.new({})
_(resource.type).must_equal "bsd-init"
_(resource.name).must_equal "sendmail"
_(resource.description).must_be_nil
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal true
_(resource.params).must_equal params
end
# arch linux with systemd
it "verify arch linux service parsing" do
resource = MockLoader.new(:arch).load_resource("service", "sshd")
params = Hashie::Mash.new({ "ActiveState" => "active", "Description" => "OpenSSH server daemon", "Id" => "sshd.service", "LoadState" => "loaded", "Names" => "sshd.service", "SubState" => "running", "UnitFileState" => "enabled" })
_(resource.type).must_equal "systemd"
_(resource.name).must_equal "sshd.service"
_(resource.description).must_equal "OpenSSH server daemon"
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal true
_(resource.params).must_equal params
end
# coreos linux with systemd
it "verify coreos linux service parsing" do
resource = MockLoader.new(:coreos).load_resource("service", "sshd")
params = Hashie::Mash.new({ "ActiveState" => "active", "Description" => "OpenSSH server daemon", "Id" => "sshd.service", "LoadState" => "loaded", "Names" => "sshd.service", "SubState" => "running", "UnitFileState" => "enabled" })
_(resource.type).must_equal "systemd"
_(resource.name).must_equal "sshd.service"
_(resource.description).must_equal "OpenSSH server daemon"
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal true
_(resource.params).must_equal params
end
# debian 7 with systemv
it "verify debian 7 service parsing" do
resource = MockLoader.new(:debian7).load_resource("service", "sshd")
params = Hashie::Mash.new({})
_(resource.type).must_equal "sysv"
_(resource.name).must_equal "sshd"
_(resource.description).must_be_nil
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal true
_(resource.params).must_equal params
end
# debian 8 with systemd
it "verify debian 8 service parsing" do
resource = MockLoader.new(:debian8).load_resource("service", "sshd")
params = Hashie::Mash.new({ "ActiveState" => "active", "Description" => "OpenSSH server daemon", "Id" => "sshd.service", "LoadState" => "loaded", "Names" => "sshd.service", "SubState" => "running", "UnitFileState" => "enabled" })
_(resource.type).must_equal "systemd"
_(resource.name).must_equal "sshd.service"
_(resource.description).must_equal "OpenSSH server daemon"
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal true
_(resource.params).must_equal params
end
# debian 10 with systemd
it "verify debian 10 service parsing" do
resource = MockLoader.new(:debian10).load_resource("service", "sshd")
params = Hashie::Mash.new({ "ActiveState" => "active", "Description" => "OpenSSH server daemon", "Id" => "sshd.service", "LoadState" => "loaded", "Names" => "sshd.service", "SubState" => "running", "UnitFileState" => "enabled" })
_(resource.type).must_equal "systemd"
_(resource.name).must_equal "sshd.service"
_(resource.description).must_equal "OpenSSH server daemon"
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal true
_(resource.params).must_equal params
end
# debian 8 with systemd but no service file
it "gets the correct service info when the `.service` file is missing" do
resource = MockLoader.new(:debian8).load_resource("service", "apache2")
params = Hashie::Mash.new(
"ActiveState" => "active",
"Description" => "LSB: Apache2 web server",
"Id" => "apache2.service",
"LoadState" => "loaded",
"Names" => "apache2.service",
"SubState" => "running",
"UnitFileState" => ""
)
_(resource.type).must_equal "systemd"
_(resource.name).must_equal "apache2.service"
_(resource.description).must_equal "LSB: Apache2 web server"
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal true
_(resource.params).must_equal params
end
# macos test
it "verify mac osx service parsing" do
resource = MockLoader.new(:macos10_10).load_resource("service", "ssh")
params = Hashie::Mash.new({})
_(resource.type).must_equal "darwin"
_(resource.name).must_equal "org.openbsd.ssh-agent"
_(resource.description).must_be_nil
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal true
_(resource.params).must_equal params
end
it "verify macos 10.16 (11 / big sur) service parsing" do
resource = MockLoader.new(:macos10_16).load_resource("service", "ssh")
params = Hashie::Mash.new({})
_(resource.type).must_equal "darwin"
_(resource.name).must_equal "org.openbsd.ssh-agent"
_(resource.description).must_be_nil
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal true
_(resource.params).must_equal params
end
it "verify mac osx service parsing with not-running service" do
resource = MockLoader.new(:macos10_10).load_resource("service", "FilesystemUI")
params = Hashie::Mash.new({})
_(resource.type).must_equal "darwin"
_(resource.name).must_equal "com.apple.FilesystemUI"
_(resource.description).must_be_nil
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal false
_(resource.params).must_equal params
end
it "verify mac osx service parsing with default launchd_service" do
resource = MockLoader.new(:macos10_10).load_resource("launchd_service", "ssh")
params = Hashie::Mash.new({})
_(resource.type).must_equal "darwin"
_(resource.name).must_equal "org.openbsd.ssh-agent"
_(resource.description).must_be_nil
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal true
_(resource.params).must_equal params
end
# wrlinux
it "verify wrlinux service parsing" do
resource = MockLoader.new(:wrlinux).load_resource("service", "sshd")
params = Hashie::Mash.new({})
_(resource.type).must_equal "sysv"
_(resource.name).must_equal "sshd"
_(resource.description).must_be_nil
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal true
_(resource.params).must_equal params
end
# yocto
it "verify yocto service parsing" do
resource = MockLoader.new(:yocto).load_resource("service", "sshd")
params = Hashie::Mash.new({ "ActiveState" => "active", "Description" => "OpenSSH server daemon", "Id" => "sshd.service", "LoadState" => "loaded", "Names" => "sshd.service", "SubState" => "running", "UnitFileState" => "enabled" })
_(resource.type).must_equal "systemd"
_(resource.name).must_equal "sshd.service"
_(resource.description).must_equal "OpenSSH server daemon"
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal true
_(resource.params).must_equal params
end
it "verify alpine service parsing" do
resource = MockLoader.new(:alpine).load_resource("service", "sshd")
params = Hashie::Mash.new({})
_(resource.type).must_equal "sysv"
_(resource.name).must_equal "sshd"
_(resource.description).must_be_nil
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal true
_(resource.params).must_equal params
end
# unknown OS
it "verify service handling on unsupported os" do
resource = MockLoader.new(:undefined).load_resource("service", "dhcp")
params = Hashie::Mash.new({})
_(resource.installed?).must_equal false
_(resource.description).must_be_nil
_(resource.params).must_equal params
end
# runlevel detection
describe "runlevels on centos 6 (system V)" do
let(:service) { MockLoader.new(:centos6).load_resource("service", "sshd") }
it "grabs all runlevels" do
_(service.runlevels.keys).must_equal [0, 1, 2, 3, 4, 5, 6]
end
it "grabs runlevels via filter nil" do
_(service.runlevels(nil).keys).must_equal [0, 1, 2, 3, 4, 5, 6]
end
it "grabs runlevels by number" do
_(service.runlevels(3).keys).must_equal [3]
end
it "grabs runlevels by multiple numbers" do
_(service.runlevels(3, 4, 8).keys).must_equal [3, 4]
end
it "grabs runlevels via regex" do
_(service.runlevels(/[5-9]/).keys).must_equal [5, 6]
end
it "checks enabled true if all services are enabled" do
_(service.runlevels(2, 4).enabled?).must_equal true
end
it "checks enabled false if some services are not enabled" do
_(service.runlevels(1, 4).enabled?).must_equal false
end
it "checks disabled true if all services are disabled" do
_(service.runlevels(0, 1).disabled?).must_equal true
end
it "checks disabled false if some services are not disabled" do
_(service.runlevels(0, 4).enabled?).must_equal false
end
# windows
it "verify serverspec compatible matchers on windows" do
resource = MockLoader.new(:windows).load_resource("service", "dhcp")
_(resource.name).must_equal "dhcp"
_(resource.has_start_mode?("Auto")).must_equal true
end
# ubuntu
it "verify serverspec compatible matchers on ubuntu" do
resource = MockLoader.new(:ubuntu1404).load_resource("service", "ssh")
_(resource.name).must_equal "ssh"
_(resource.monitored_by?("monit")).must_equal true
ex = _ { resource.has_start_mode?("Auto") }.must_raise(Inspec::Exceptions::ResourceSkipped)
_(ex.message).must_include "The `has_start_mode` matcher is not supported on your OS yet."
end
end
end
| 44.919298 | 245 | 0.702546 |
e93e4e0a54c3c10c5b36a1acfa61695c41a9fcb5 | 479 | class ChangeUserDataValueType < ActiveRecord::Migration[5.0]
def up
change_column :user_data, :value, :jsonb, using: 'value::jsonb'
execute <<-SQL.gsub(/\s+/, ' ')
UPDATE user_data
SET value = jsonb_build_object(
'kind',
'effort',
'totalPoint',
value
)
WHERE kind = 'point' AND jsonb_typeof(value) = 'number'
SQL
end
def down
change_column :user_data, :value, :string, using: 'value::text'
end
end
| 23.95 | 67 | 0.605428 |
abac18fc83cb0c5bcf5023b128e4de1984753157 | 14,361 | # frozen_string_literal: true
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
require "helper"
require "gapic/grpc/service_stub"
require "google/cloud/aiplatform/v1/endpoint_service_pb"
require "google/cloud/aiplatform/v1/endpoint_service_services_pb"
require "google/cloud/ai_platform/v1/endpoint_service"
class ::Google::Cloud::AIPlatform::V1::EndpointService::OperationsTest < Minitest::Test
class ClientStub
attr_accessor :call_rpc_count, :requests
def initialize response, operation, &block
@response = response
@operation = operation
@block = block
@call_rpc_count = 0
@requests = []
end
def call_rpc *args, **kwargs
@call_rpc_count += 1
@requests << @block&.call(*args, **kwargs)
yield @response, @operation if block_given?
@response
end
end
def test_list_operations
# Create GRPC objects.
grpc_response = ::Google::Longrunning::ListOperationsResponse.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
name = "hello world"
filter = "hello world"
page_size = 42
page_token = "hello world"
list_operations_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :list_operations, name
assert_kind_of ::Google::Longrunning::ListOperationsRequest, request
assert_equal "hello world", request["name"]
assert_equal "hello world", request["filter"]
assert_equal 42, request["page_size"]
assert_equal "hello world", request["page_token"]
refute_nil options
end
Gapic::ServiceStub.stub :new, list_operations_client_stub do
# Create client
client = ::Google::Cloud::AIPlatform::V1::EndpointService::Operations.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.list_operations({ name: name, filter: filter, page_size: page_size, page_token: page_token }) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use named arguments
client.list_operations name: name, filter: filter, page_size: page_size, page_token: page_token do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.list_operations ::Google::Longrunning::ListOperationsRequest.new(name: name, filter: filter, page_size: page_size, page_token: page_token) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.list_operations({ name: name, filter: filter, page_size: page_size, page_token: page_token }, grpc_options) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.list_operations(::Google::Longrunning::ListOperationsRequest.new(name: name, filter: filter, page_size: page_size, page_token: page_token), grpc_options) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, list_operations_client_stub.call_rpc_count
end
end
def test_get_operation
# Create GRPC objects.
grpc_response = ::Google::Longrunning::Operation.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
name = "hello world"
get_operation_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :get_operation, name
assert_kind_of ::Google::Longrunning::GetOperationRequest, request
assert_equal "hello world", request["name"]
refute_nil options
end
Gapic::ServiceStub.stub :new, get_operation_client_stub do
# Create client
client = ::Google::Cloud::AIPlatform::V1::EndpointService::Operations.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.get_operation({ name: name }) do |response, operation|
assert_kind_of Gapic::Operation, response
assert_equal grpc_response, response.grpc_op
assert_equal grpc_operation, operation
end
# Use named arguments
client.get_operation name: name do |response, operation|
assert_kind_of Gapic::Operation, response
assert_equal grpc_response, response.grpc_op
assert_equal grpc_operation, operation
end
# Use protobuf object
client.get_operation ::Google::Longrunning::GetOperationRequest.new(name: name) do |response, operation|
assert_kind_of Gapic::Operation, response
assert_equal grpc_response, response.grpc_op
assert_equal grpc_operation, operation
end
# Use hash object with options
client.get_operation({ name: name }, grpc_options) do |response, operation|
assert_kind_of Gapic::Operation, response
assert_equal grpc_response, response.grpc_op
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.get_operation(::Google::Longrunning::GetOperationRequest.new(name: name), grpc_options) do |response, operation|
assert_kind_of Gapic::Operation, response
assert_equal grpc_response, response.grpc_op
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, get_operation_client_stub.call_rpc_count
end
end
def test_delete_operation
# Create GRPC objects.
grpc_response = ::Google::Protobuf::Empty.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
name = "hello world"
delete_operation_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :delete_operation, name
assert_kind_of ::Google::Longrunning::DeleteOperationRequest, request
assert_equal "hello world", request["name"]
refute_nil options
end
Gapic::ServiceStub.stub :new, delete_operation_client_stub do
# Create client
client = ::Google::Cloud::AIPlatform::V1::EndpointService::Operations.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.delete_operation({ name: name }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.delete_operation name: name do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.delete_operation ::Google::Longrunning::DeleteOperationRequest.new(name: name) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.delete_operation({ name: name }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.delete_operation(::Google::Longrunning::DeleteOperationRequest.new(name: name), grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, delete_operation_client_stub.call_rpc_count
end
end
def test_cancel_operation
# Create GRPC objects.
grpc_response = ::Google::Protobuf::Empty.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
name = "hello world"
cancel_operation_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :cancel_operation, name
assert_kind_of ::Google::Longrunning::CancelOperationRequest, request
assert_equal "hello world", request["name"]
refute_nil options
end
Gapic::ServiceStub.stub :new, cancel_operation_client_stub do
# Create client
client = ::Google::Cloud::AIPlatform::V1::EndpointService::Operations.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.cancel_operation({ name: name }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.cancel_operation name: name do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.cancel_operation ::Google::Longrunning::CancelOperationRequest.new(name: name) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.cancel_operation({ name: name }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.cancel_operation(::Google::Longrunning::CancelOperationRequest.new(name: name), grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, cancel_operation_client_stub.call_rpc_count
end
end
def test_wait_operation
# Create GRPC objects.
grpc_response = ::Google::Longrunning::Operation.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
name = "hello world"
timeout = {}
wait_operation_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :wait_operation, name
assert_kind_of ::Google::Longrunning::WaitOperationRequest, request
assert_equal "hello world", request["name"]
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Protobuf::Duration), request["timeout"]
refute_nil options
end
Gapic::ServiceStub.stub :new, wait_operation_client_stub do
# Create client
client = ::Google::Cloud::AIPlatform::V1::EndpointService::Operations.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.wait_operation({ name: name, timeout: timeout }) do |response, operation|
assert_kind_of Gapic::Operation, response
assert_equal grpc_response, response.grpc_op
assert_equal grpc_operation, operation
end
# Use named arguments
client.wait_operation name: name, timeout: timeout do |response, operation|
assert_kind_of Gapic::Operation, response
assert_equal grpc_response, response.grpc_op
assert_equal grpc_operation, operation
end
# Use protobuf object
client.wait_operation ::Google::Longrunning::WaitOperationRequest.new(name: name, timeout: timeout) do |response, operation|
assert_kind_of Gapic::Operation, response
assert_equal grpc_response, response.grpc_op
assert_equal grpc_operation, operation
end
# Use hash object with options
client.wait_operation({ name: name, timeout: timeout }, grpc_options) do |response, operation|
assert_kind_of Gapic::Operation, response
assert_equal grpc_response, response.grpc_op
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.wait_operation(::Google::Longrunning::WaitOperationRequest.new(name: name, timeout: timeout), grpc_options) do |response, operation|
assert_kind_of Gapic::Operation, response
assert_equal grpc_response, response.grpc_op
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, wait_operation_client_stub.call_rpc_count
end
end
def test_configure
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
client = block_config = config = nil
Gapic::ServiceStub.stub :new, nil do
client = ::Google::Cloud::AIPlatform::V1::EndpointService::Operations.new do |config|
config.credentials = grpc_channel
end
end
config = client.configure do |c|
block_config = c
end
assert_same block_config, config
assert_kind_of ::Google::Cloud::AIPlatform::V1::EndpointService::Operations::Configuration, config
end
end
| 37.692913 | 191 | 0.716036 |
5d247b927a819a00906abcc5107730e9f2564071 | 3,749 | # frozen_string_literal: true
require 'rails_helper'
require_relative '../support/iam_session_helper'
require_relative '../support/matchers/json_schema_matcher'
RSpec.describe 'maintenance windows', type: :request do
include JsonSchemaMatchers
describe 'GET /v0/maintenance_windows' do
context 'when no maintenance windows are active' do
before { get '/mobile/v0/maintenance_windows', headers: { 'X-Key-Inflection' => 'camel' } }
it 'matches the expected schema' do
expect(response.body).to match_json_schema('maintenance_windows')
end
it 'returns an empty array of affected services' do
expect(response.parsed_body['data']).to eq([])
end
end
context 'when a maintenance with many dependent services and a window not in the service map is active' do
before do
Timecop.freeze('2021-05-25T23:33:39Z')
FactoryBot.create(:mobile_maintenance_evss)
FactoryBot.create(:mobile_maintenance_mpi)
FactoryBot.create(:mobile_maintenance_dslogon)
get '/mobile/v0/maintenance_windows', headers: { 'X-Key-Inflection' => 'camel' }
end
after { Timecop.return }
it 'matches the expected schema' do
expect(response.body).to match_json_schema('maintenance_windows')
end
it 'returns an array of the affected services' do
expect(response.parsed_body['data']).to eq(
[
{
'id' => '321e9dcf-2578-5956-9baa-295735d97c3c',
'type' => 'maintenance_window',
'attributes' => {
'service' => 'claims',
'startTime' => '2021-05-25T21:33:39.000Z',
'endTime' => '2021-05-26T01:45:00.000Z'
}
},
{
'id' => '14ad3ba9-7ec8-51b8-bbb3-dc20e6655b26',
'type' => 'maintenance_window',
'attributes' => {
'service' => 'direct_deposit_benefits',
'startTime' => '2021-05-25T21:33:39.000Z',
'endTime' => '2021-05-26T01:45:00.000Z'
}
},
{
'id' => '858b59df-4cef-5f34-91a4-57edd382e4e5',
'type' => 'maintenance_window',
'attributes' => {
'service' => 'disability_rating',
'startTime' => '2021-05-25T21:33:39.000Z',
'endTime' => '2021-05-26T01:45:00.000Z'
}
},
{
'id' => 'cac05630-8879-594c-8655-1a6ff582dc5d',
'type' => 'maintenance_window',
'attributes' => {
'service' => 'letters_and_documents',
'startTime' => '2021-05-25T21:33:39.000Z',
'endTime' => '2021-05-26T01:45:00.000Z'
}
}
]
)
end
end
context 'when BGS is down' do
before do
Timecop.freeze('2021-05-25T23:33:39Z')
FactoryBot.create(:mobile_maintenance_bgs)
get '/mobile/v0/maintenance_windows', headers: { 'X-Key-Inflection' => 'camel' }
end
after { Timecop.return }
it 'matches the expected schema' do
expect(response.body).to match_json_schema('maintenance_windows')
end
it 'includes payment history as an affected service' do
expect(response.parsed_body['data']).to include(
{
'id' => '4ebb2370-3f56-5f24-a2f9-3b211f59077e',
'type' => 'maintenance_window',
'attributes' => {
'service' => 'payment_history',
'startTime' => '2021-05-25T23:33:39.000Z',
'endTime' => '2021-05-26T01:45:00.000Z'
}
}
)
end
end
end
end
| 34.081818 | 110 | 0.55108 |
03ad4a3ee34bc6a2ebc41a64fd16839fda8ac1af | 4,367 | #!/usr/bin/env ruby
# Encoding: utf-8
#
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This example shows how to add ad group bid modifiers to a hotel ad group
# based on hotel check-in day and hotel length of stay.
require 'optparse'
require 'google/ads/google_ads'
require 'date'
# [START add_hotel_ad_group_bid_modifiers]
def add_hotel_ad_group_bid_modifiers(customer_id, ad_group_id)
# GoogleAdsClient will read a config file from
# ENV['HOME']/google_ads_config.rb when called without parameters
client = Google::Ads::GoogleAds::GoogleAdsClient.new
operations = []
ad_group_resource = client.path.ad_group(customer_id, ad_group_id)
# 1) Creates an ad group bid modifier based on the hotel check-in day.
operations << client.operation.create_resource.ad_group_bid_modifier do |bm|
# Sets the ad group.
bm.ad_group = ad_group_resource
# Sets the check-in day to Monday.
bm.hotel_check_in_day = client.resource.hotel_check_in_day_info do |info|
info.day_of_week = :MONDAY
end
# Sets the bid modifier value to 150%.
bm.bid_modifier = 1.5
end
# 2) Creates an ad group bid modifier based on the hotel length of stay.
operations << client.operation.create_resource.ad_group_bid_modifier do |bm|
# Sets the ad group.
bm.ad_group = ad_group_resource
# Creates the hotel length of stay info.
bm.hotel_length_of_stay = client.resource.hotel_length_of_stay_info do |info|
info.min_nights = 3
info.max_nights = 7
end
# Sets the bid modifier value to 170%.
bm.bid_modifier = 1.7
end
# 3) Issues a mutate request to add an ad group bid modifiers.
ad_group_bid_modifier_service = client.service.ad_group_bid_modifier
response = ad_group_bid_modifier_service.mutate_ad_group_bid_modifiers(
customer_id: customer_id,
operations: operations,
)
# Print out resource names of the added ad group bid modifiers.
puts "Added #{response.results.size} hotel ad group bid modifiers:"
response.results.each do |added_ad_group_bid_modifier|
puts "\t#{added_ad_group_bid_modifier.resource_name}"
end
end
# [END add_hotel_ad_group_bid_modifiers]
if __FILE__ == $0
options = {}
# The following parameter(s) should be provided to run the example. You can
# either specify these by changing the INSERT_XXX_ID_HERE values below, or on
# the command line.
#
# Parameters passed on the command line will override any parameters set in
# code.
#
# Running the example with -h will print the command line usage.
options[:customer_id] = 'INSERT_CUSTOMER_ID_HERE'
options[:ad_group_id] = 'INSERT_AD_GROUP_ID_HERE'
OptionParser.new do |opts|
opts.banner = sprintf('Usage: %s [options]', File.basename(__FILE__))
opts.separator ''
opts.separator 'Options:'
opts.on('-C', '--customer-id CUSTOMER-ID', String, 'Customer ID') do |v|
options[:customer_id] = v
end
opts.on('-A', '--ad-group-id AD-GROUP-ID', Integer,
'AdGroup ID') do |v|
options[:ad_group_id] = v
end
opts.separator ''
opts.separator 'Help:'
opts.on_tail('-h', '--help', 'Show this message') do
puts opts
exit
end
end.parse!
begin
add_hotel_ad_group_bid_modifiers(options.fetch(:customer_id).tr("-", ""),
options[:ad_group_id])
rescue Google::Ads::GoogleAds::Errors::GoogleAdsError => e
e.failure.errors.each do |error|
STDERR.printf("Error with message: %s\n", error.message)
if error.location
error.location.field_path_elements.each do |field_path_element|
STDERR.printf("\tOn field: %s\n", field_path_element.field_name)
end
end
error.error_code.to_h.each do |k, v|
next if v == :UNSPECIFIED
STDERR.printf("\tType: %s\n\tCode: %s\n", k, v)
end
end
raise
end
end
| 32.834586 | 81 | 0.711472 |
e2080a5323702a9cfed41a458ebf79bac6d6148c | 189 | require 'helper'
class TestOmgtex < MiniTest::Unit::TestCase
def test_something_for_real
flunk "hey buddy, you should probably rename this file and start testing for real"
end
end
| 23.625 | 86 | 0.772487 |
1d88333125d44ab1cbcdfa68ae6cea2066f6d43f | 144 | # frozen_string_literal: true
FactoryBot.define do
factory :cart do
account
factory :cart_with_user do
user
end
end
end
| 12 | 30 | 0.6875 |
030e896eb243dabaae6627fd10f9807e9a857a3c | 8,987 | # encoding: utf-8
require "logstash/filters/base"
require "logstash/namespace"
require "tempfile"
require "lru_redux"
# The GeoIP filter adds information about the geographical location of IP addresses,
# based on data from the Maxmind database.
#
# Starting with version 1.3.0 of Logstash, a `[geoip][location]` field is created if
# the GeoIP lookup returns a latitude and longitude. The field is stored in
# http://geojson.org/geojson-spec.html[GeoJSON] format. Additionally,
# the default Elasticsearch template provided with the
# <<plugins-outputs-elasticsearch,`elasticsearch` output>> maps
# the `[geoip][location]` field to an https://www.elastic.co/guide/en/elasticsearch/reference/1.7/mapping-geo-point-type.html#_mapping_options[Elasticsearch geo_point].
#
# As this field is a `geo_point` _and_ it is still valid GeoJSON, you get
# the awesomeness of Elasticsearch's geospatial query, facet and filter functions
# and the flexibility of having GeoJSON for all other applications (like Kibana's
# map visualization).
#
# Logstash releases ship with the GeoLiteCity database made available from
# Maxmind with a CCA-ShareAlike 3.0 license. For more details on GeoLite, see
# <http://www.maxmind.com/en/geolite>.
class LogStash::Filters::GeoASN < LogStash::Filters::Base
LOOKUP_CACHE_INIT_MUTEX = Mutex.new
# Map of lookup caches, keyed by geoip_type
LOOKUP_CACHES = {}
attr_accessor :lookup_cache
attr_reader :threadkey
config_name "geoasn"
# The path to the GeoIP database file which Logstash should use. Country, City, ASN, ISP
# and organization databases are supported.
#
# If not specified, this will default to the GeoLiteCity database that ships
# with Logstash.
# Up-to-date databases can be downloaded from here: <https://dev.maxmind.com/geoip/legacy/geolite/>
# Please be sure to download a legacy format database.
config :database, :validate => :path
# The field containing the IP address or hostname to map via geoip. If
# this field is an array, only the first value will be used.
config :source, :validate => :string, :required => true
# An array of geoip fields to be included in the event.
#
# Possible fields depend on the database type. By default, all geoip fields
# are included in the event.
#
# For the built-in GeoLiteCity database, the following are available:
# `city_name`, `continent_code`, `country_code2`, `country_code3`, `country_name`,
# `dma_code`, `ip`, `latitude`, `longitude`, `postal_code`, `region_name` and `timezone`.
config :fields, :validate => :array
# Specify the field into which Logstash should store the geoip data.
# This can be useful, for example, if you have `src\_ip` and `dst\_ip` fields and
# would like the GeoIP information of both IPs.
#
# If you save the data to a target field other than `geoip` and want to use the
# `geo\_point` related functions in Elasticsearch, you need to alter the template
# provided with the Elasticsearch output and configure the output to use the
# new template.
#
# Even if you don't use the `geo\_point` mapping, the `[target][location]` field
# is still valid GeoJSON.
config :target, :validate => :string, :default => 'geoasn'
# GeoIP lookup is surprisingly expensive. This filter uses an LRU cache to take advantage of the fact that
# IPs agents are often found adjacent to one another in log files and rarely have a random distribution.
# The higher you set this the more likely an item is to be in the cache and the faster this filter will run.
# However, if you set this too high you can use more memory than desired.
#
# Experiment with different values for this option to find the best performance for your dataset.
#
# This MUST be set to a value > 0. There is really no reason to not want this behavior, the overhead is minimal
# and the speed gains are large.
#
# It is important to note that this config value is global to the geoip_type. That is to say all instances of the geoip filter
# of the same geoip_type share the same cache. The last declared cache size will 'win'. The reason for this is that there would be no benefit
# to having multiple caches for different instances at different points in the pipeline, that would just increase the
# number of cache misses and waste memory.
config :lru_cache_size, :validate => :number, :default => 1000
public
def register
require "geoip"
if @database.nil?
@database = ::Dir.glob(::File.join(::File.expand_path("../../../vendor/", ::File.dirname(__FILE__)),"GeoLiteCity*.dat")).first
if !File.exists?(@database)
raise "You must specify 'database => ...' in your geoasn filter (I looked for '#{@database}'"
end
end
@logger.info("Using geoip database", :path => @database)
# For the purpose of initializing this filter, geoip is initialized here but
# not set as a global. The geoip module imposes a mutex, so the filter needs
# to re-initialize this later in the filter() thread, and save that access
# as a thread-local variable.
geoip_initialize = ::GeoIP.new(@database)
@geoip_type = case geoip_initialize.database_type
when GeoIP::GEOIP_CITY_EDITION_REV0, GeoIP::GEOIP_CITY_EDITION_REV1
:city
when GeoIP::GEOIP_COUNTRY_EDITION
:country
when GeoIP::GEOIP_ASNUM_EDITION
:asn
when GeoIP::GEOIP_ISP_EDITION, GeoIP::GEOIP_ORG_EDITION
:isp
else
raise RuntimeException.new "This GeoIP database is not currently supported"
end
@threadkey = "geoip-#{self.object_id}"
# This is wrapped in a mutex to make sure the initialization behavior of LOOKUP_CACHES (see def above) doesn't create a dupe
LOOKUP_CACHE_INIT_MUTEX.synchronize do
self.lookup_cache = LOOKUP_CACHES[@geoip_type] ||= LruRedux::ThreadSafeCache.new(1000)
end
@no_fields = @fields.nil? || @fields.empty?
end # def register
public
def filter(event)
geo_data_hash = get_geo_data(event)
if apply_geodata(geo_data_hash, event)
filter_matched(event)
end
end # def filter
def apply_geodata(geo_data_hash, event)
# don't do anything more if the lookup result is nil?
return false if geo_data_hash.nil?
# only set the event.set(@target) if the lookup result is not nil: BWC
event.set(@target, {}) if event.get(@target).nil?
# don't do anything more if the lookup result is empty?
return false if geo_data_hash.empty?
geo_data_hash.each do |key, value|
if @no_fields || @fields.include?(key)
# can't dup numerics
event.set("[#{@target}][#{key}]", value.is_a?(Numeric) ? value : value.dup)
end
end # geo_data_hash.each
true
end
def get_geo_data(event)
# pure function, must control return value
result = {}
ip = event.get(@source)
ip = ip.first if ip.is_a? Array
return nil if ip.nil?
begin
result = get_geo_data_for_ip(ip)
rescue SocketError => e
@logger.error("IP Field contained invalid IP address or hostname", :field => @source, :event => event)
rescue StandardError => e
@logger.error("Unknown error while looking up GeoIP data", :exception => e, :field => @source, :event => event)
end
result
end
def get_geo_data_for_ip(ip)
ensure_database!
if (cached = lookup_cache[ip])
cached
else
geo_data = Thread.current[threadkey].send(@geoip_type, ip)
converted = prepare_geodata_for_cache(geo_data)
lookup_cache[ip] = converted
converted
end
end
def prepare_geodata_for_cache(geo_data)
# GeoIP returns a nil or a Struct subclass
return nil if !geo_data.respond_to?(:each_pair)
#lets just do this once before caching
result = {}
geo_data.each_pair do |k, v|
next if v.nil? || k == :request
if v.is_a?(String)
next if v.empty?
# Some strings from GeoIP don't have the correct encoding...
result[k.to_s] = case v.encoding
# I have found strings coming from GeoIP that are ASCII-8BIT are actually
# ISO-8859-1...
when Encoding::ASCII_8BIT
v.force_encoding(Encoding::ISO_8859_1).encode(Encoding::UTF_8)
when Encoding::ISO_8859_1, Encoding::US_ASCII
v.encode(Encoding::UTF_8)
else
v
end
else
result[k.to_s] = v
end
end
lat, lng = result.values_at("latitude", "longitude")
if lat && lng
result["location"] = [ lng.to_f, lat.to_f ]
end
result
end
def ensure_database!
# Use thread-local access to GeoIP. The Ruby GeoIP module forces a mutex
# around access to the database, which can be overcome with :pread.
# Unfortunately, :pread requires the io-extra gem, with C extensions that
# aren't supported on JRuby. If / when :pread becomes available, we can stop
# needing thread-local access.
Thread.current[threadkey] ||= ::GeoIP.new(@database)
end
end # class LogStash::Filters::GeoASN
| 40.665158 | 168 | 0.703906 |
ed9ed7acc4420688c20fc8b681dcec7a870c543a | 740 | require 'spec_helper_acceptance'
describe 'chrony class:' do
context 'default parameters' do
# Using puppet_apply as a helper
it 'runs successfully' do
pp = "class { 'chrony': }"
# Run it twice and test for idempotency
apply_manifest(pp, catch_failures: true)
expect(apply_manifest(pp, catch_failures: true).exit_code).to be_zero
end
describe package('chrony') do
it { is_expected.to be_installed }
end
service = case fact('os.family')
when 'RedHat'
'chronyd'
else
'chrony'
end
describe service(service) do
it { is_expected.to be_enabled }
it { is_expected.to be_running }
end
end
end
| 25.517241 | 75 | 0.608108 |
f8ffc5b154f4fa7c1ec4ce220a0d621172016b92 | 2,374 | # -*- encoding: utf-8 -*-
# stub: awesome_print 1.8.0 ruby lib
Gem::Specification.new do |s|
s.name = "awesome_print".freeze
s.version = "1.8.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib".freeze]
s.authors = ["Michael Dvorkin".freeze]
s.date = "2017-06-16"
s.description = "Great Ruby dubugging companion: pretty print Ruby objects to visualize their structure. Supports custom object formatting via plugins".freeze
s.email = "[email protected]".freeze
s.homepage = "https://github.com/awesome-print/awesome_print".freeze
s.licenses = ["MIT".freeze]
s.rubyforge_project = "awesome_print".freeze
s.rubygems_version = "2.7.7".freeze
s.summary = "Pretty print Ruby objects with proper indentation and colors".freeze
s.installed_by_version = "2.7.7" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<rspec>.freeze, [">= 3.0.0"])
s.add_development_dependency(%q<appraisal>.freeze, [">= 0"])
s.add_development_dependency(%q<fakefs>.freeze, [">= 0.2.1"])
s.add_development_dependency(%q<sqlite3>.freeze, [">= 0"])
s.add_development_dependency(%q<nokogiri>.freeze, [">= 1.6.5"])
s.add_development_dependency(%q<simplecov>.freeze, [">= 0"])
s.add_development_dependency(%q<codeclimate-test-reporter>.freeze, [">= 0"])
else
s.add_dependency(%q<rspec>.freeze, [">= 3.0.0"])
s.add_dependency(%q<appraisal>.freeze, [">= 0"])
s.add_dependency(%q<fakefs>.freeze, [">= 0.2.1"])
s.add_dependency(%q<sqlite3>.freeze, [">= 0"])
s.add_dependency(%q<nokogiri>.freeze, [">= 1.6.5"])
s.add_dependency(%q<simplecov>.freeze, [">= 0"])
s.add_dependency(%q<codeclimate-test-reporter>.freeze, [">= 0"])
end
else
s.add_dependency(%q<rspec>.freeze, [">= 3.0.0"])
s.add_dependency(%q<appraisal>.freeze, [">= 0"])
s.add_dependency(%q<fakefs>.freeze, [">= 0.2.1"])
s.add_dependency(%q<sqlite3>.freeze, [">= 0"])
s.add_dependency(%q<nokogiri>.freeze, [">= 1.6.5"])
s.add_dependency(%q<simplecov>.freeze, [">= 0"])
s.add_dependency(%q<codeclimate-test-reporter>.freeze, [">= 0"])
end
end
| 45.653846 | 160 | 0.662595 |
e29844fe99211848c510564aad9abac85eaad9ff | 238 | # encoding: utf-8
begin
require 'active_record'
puts "active_record gem found, running ActiveRecord specs \e[32m#{'✔'}\e[0m"
rescue LoadError
puts "active_record gem not found, not running ActiveRecord specs \e[31m#{'✖'}\e[0m"
end
| 26.444444 | 86 | 0.726891 |
6a24035dae24173ee2202d53232c0dd15f40c7f0 | 1,059 | class HerokuToolbelt < Formula
desc "Everything you need to get started with Heroku"
homepage "https://toolbelt.heroku.com/other"
url "https://s3.amazonaws.com/assets.heroku.com/heroku-client/heroku-client-3.41.4.tgz"
sha256 "47f43d7292128234d1412005cc751a45218219e60a1b99081086dc33f682cf5a"
head "https://github.com/heroku/heroku.git"
depends_on :ruby => "1.9"
def install
libexec.install Dir["*"]
# turn off autoupdates (off by default in HEAD)
if build.stable?
inreplace libexec/"bin/heroku", "Heroku::Updater.inject_libpath", "Heroku::Updater.disable(\"Use `brew upgrade heroku-toolbelt` to update\")"
end
bin.write_exec_script libexec/"bin/heroku"
end
def caveats
<<-EOS.undent
Unlike the standalone download for Heroku Toolbelt, the Homebrew package
does not come with Foreman. It is available via RubyGems, direct download,
and other installation methods. See https://ddollar.github.io/foreman/ for more info.
EOS
end
test do
system "#{bin}/heroku", "version"
end
end
| 34.16129 | 147 | 0.723324 |
f747eb0345429f0c2c07bbdb7f4809445690e14a | 422 | ActiveAdmin.register HtmlPage do
# See permitted parameters documentation:
# https://github.com/activeadmin/activeadmin/blob/master/docs/2-resource-customization.md#setting-up-strong-parameters
#
# permit_params :list, :of, :attributes, :on, :model
#
# or
#
# permit_params do
# permitted = [:permitted, :attributes]
# permitted << :other if params[:action] == 'create' && current_user.admin?
# permitted
# end
end
| 26.375 | 118 | 0.734597 |
5d1a79a17e701c91be574acf2f16f711993f3c25 | 1,250 | class Kind < Formula
desc "Run local Kubernetes cluster in Docker"
homepage "https://kind.sigs.k8s.io/"
url "https://github.com/kubernetes-sigs/kind/archive/v0.6.0.tar.gz"
sha256 "966b5c9817850f958acf14496349276a8df6d6609adfdc41633a8b7bc73d5e5d"
head "https://github.com/kubernetes-sigs/kind.git"
bottle do
cellar :any_skip_relocation
sha256 "90b44f5c4b4b11526371b974d86c32cf871e7b9d4042fde8e717befa33c3da07" => :catalina
sha256 "81100ccfcfda9542f639e8b0ab55761085e539fbfb86c9d459cc22bec6981fd0" => :mojave
sha256 "32d4b9e2e0b243668e10c365c18e85b3f01255d7fb06f5ca317eeb7ee3589d06" => :high_sierra
end
depends_on "go" => :build
def install
system "go", "build", "-o", bin/"kind"
prefix.install_metafiles
# Install bash completion
output = Utils.popen_read("#{bin}/kind completion bash")
(bash_completion/"kind").write output
# Install zsh completion
output = Utils.popen_read("#{bin}/kind completion zsh")
(zsh_completion/"_kind").write output
end
test do
# Should error out as creating a kind cluster requires root
status_output = shell_output("#{bin}/kind get kubeconfig --name homebrew 2>&1", 1)
assert_match "failed to list clusters", status_output
end
end
| 34.722222 | 93 | 0.7464 |
79aa0aab3953cd23ae8a08f3cbd662aa6e77d092 | 585 | class CacheSectionArticlesCount < ActiveRecord::Migration
class AssignedSection < ActiveRecord::Base
belongs_to :section
end
class Section < ActiveRecord::Base; end
def self.up
add_column "sections", "articles_count", :integer, :default => 0
say_with_time "Update Section articles_count values..." do
AssignedSection.count(:all, :group => :section_id).each do |section_id, count|
Section.update_all ['articles_count = ?', count], ['id = ?', section_id]
end
end
end
def self.down
remove_column "sections", "articles_count"
end
end
| 30.789474 | 84 | 0.700855 |
08fae7f4a7d4941bd371a1919de30fee5cec8712 | 1,448 | # ISO <<Abstract>> ProcessStep
# 19115-2 writer output in XML
# History:
# Stan Smith 2019-09-25 original script.
require_relative 'class_liProcessStep'
require_relative 'class_leProcessStep'
module ADIWG
module Mdtranslator
module Writers
module Iso19115_2
class ProcessStep
def initialize(xml, hResponseObj)
@xml = xml
@hResponseObj = hResponseObj
end
def writeXML(hProcess, inContext = nil)
# classes used
liProcessClass = LI_ProcessStep.new(@xml, @hResponseObj)
leProcessClass = LE_ProcessStep.new(@xml, @hResponseObj)
outContext = inContext
# use LE_ProcessStep if hProcess has any ...
# processingInformation, reports
# stepProducts (output)
useLE = false
useLE = true unless hProcess[:processingInformation].empty?
useLE = true unless hProcess[:reports].empty?
useLE = true unless hProcess[:stepProducts].empty?
if useLE
leProcessClass.writeXML(hProcess, outContext)
else
liProcessClass.writeXML(hProcess, outContext)
end
end # writeXML
end # processStep class
end
end
end
end
| 27.846154 | 77 | 0.54558 |
91192706e081e3148d4522131eefa42ab6c545f4 | 538 | class Sshpass < Formula
homepage "https://sourceforge.net/projects/sshpass/"
url "https://sourceforge.net/projects/sshpass/files/sshpass/1.09/sshpass-1.09.tar.gz"
sha256 "71746e5e057ffe9b00b44ac40453bf47091930cba96bbea8dc48717dedc49fb7"
def install
system "./configure", "--disable-debug",
"--disable-dependency-tracking",
"--disable-silent-rules",
"--prefix=#{prefix}"
system "make", "install"
end
test do
system "sshpass"
end
end
| 29.888889 | 87 | 0.622677 |
abcf9831ec6b80bc05e22ae56da5262c2ed23a0f | 1,537 | require 'fog/core/model'
module Fog
module AWS
class Compute
class Image < Fog::Model
identity :id, :aliases => 'imageId'
attribute :architecture
attribute :block_device_mapping, :aliases => 'blockDeviceMapping'
attribute :description
attribute :location, :aliases => 'imageLocation'
attribute :owner_id, :aliases => 'imageOwnerId'
attribute :state, :aliases => 'imageState'
attribute :type, :aliases => 'imageType'
attribute :is_public, :aliases => 'isPublic'
attribute :kernel_id, :aliases => 'kernelId'
attribute :platform
attribute :product_codes, :aliases => 'productCodes'
attribute :ramdisk_id, :aliases => 'ramdiskId'
attribute :root_device_type, :aliases => 'rootDeviceType'
attribute :root_device_name, :aliases => 'rootDeviceName'
attribute :tags, :aliases => 'tagSet'
attribute :name
def deregister(delete_snapshot = false)
connection.deregister_image(id)
if(delete_snapshot && root_device_type == "ebs")
block_device = block_device_mapping.detect {|block_device| block_device['deviceName'] == root_device_name}
@connection.snapshots.new(:id => block_device['snapshotId']).destroy
else
true
end
end
end
end
end
end
| 34.931818 | 118 | 0.56799 |
e234e9bcaa565915c999a5e55a7a301c81f30729 | 673 | module Stripe
class Subscription < APIResource
include Stripe::APIOperations::Update
include Stripe::APIOperations::Delete
def url
"#{Customer.url}/#{CGI.escape(customer)}/subscriptions/#{CGI.escape(id)}"
end
def self.retrieve(id, opts=nil)
raise NotImplementedError.new("Subscriptions cannot be retrieved without a customer ID. Retrieve a subscription using customer.subscriptions.retrieve('subscription_id')")
end
def delete_discount
response, opts = request(:delete, discount_url)
refresh_from({ :discount => nil }, opts, true)
end
private
def discount_url
url + '/discount'
end
end
end
| 25.884615 | 176 | 0.695394 |
edbfae711339cdc8c0d1368755bb736b30ee1389 | 2,615 | require_relative 'utils/formatting'
require_relative 'utils/position'
module PDEX
class NPPESPractitioner
include Formatting
include Position
attr_reader :raw_data
def initialize(raw_data)
@raw_data = raw_data.freeze
end
def npi
@npi ||= raw_data['NPI']
end
def name
OpenStruct.new(
{
first: first_name,
middle: middle_name,
last: last_name,
prefix: raw_data['Provider Name Prefix Text']&.capitalize,
suffix: raw_data['Provider Name Suffix Text']&.capitalize,
credential: raw_data['Provider Credential Text']
}
)
end
def first_name
@first_name ||= raw_data['Provider First Name']
end
def middle_name
@middle_name ||= raw_data['Provider Middle Name']
end
def last_name
@last_name ||= raw_data['Provider Last Name (Legal Name)']
end
def phone_numbers
@phone_numbers ||= [raw_data['Provider Business Practice Location Address Telephone Number']]
end
def fax_numbers
@fax_numbers ||= [raw_data['Provider Business Practice Location Address Fax Number']]
end
def address
OpenStruct.new(
{
lines: [
raw_data['Provider First Line Business Practice Location Address'],
raw_data['Provider Second Line Business Practice Location Address']
].reject(&:blank?),
city: raw_data['Provider Business Practice Location Address City Name'],
state: raw_data['Provider Business Practice Location Address State Name'],
zip: format_zip(raw_data['Provider Business Practice Location Address Postal Code'])
}
)
end
def gender
raw_data['Provider Gender Code']
end
def qualifications
provided_qualifications = (1..50).map { |index| qualification(index) }.compact
provided_qualifications.blank? ? [default_qualification] : provided_qualifications
end
def qualification(index)
keys = [
"Healthcare Provider Taxonomy Code_#{index}",
"Provider License Number_#{index}",
"Provider License Number State Code_#{index}"
]
return nil if keys.any? { |key| raw_data[key].blank? }
OpenStruct.new(
{
state: raw_data["Provider License Number State Code_#{index}"],
license_number: raw_data["Provider License Number_#{index}"],
taxonomy_code: raw_data["Healthcare Provider Taxonomy Code_#{index}"]
}
)
end
def default_qualification
@default_qualification
end
end
end
| 26.683673 | 99 | 0.639388 |
d5d0a82890a97206d5b44257f8e2e774c3ec645b | 9,496 | ##
# This code was generated by
# \ / _ _ _| _ _
# | (_)\/(_)(_|\/| |(/_ v1.0.0
# / /
#
# frozen_string_literal: true
require 'spec_helper.rb'
describe 'Service' do
it "can fetch" do
@holodeck.mock(Twilio::Response.new(500, ''))
expect {
@client.ip_messaging.v1.services('ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX').fetch()
}.to raise_exception(Twilio::REST::TwilioError)
values = {}
expect(
@holodeck.has_request?(Holodeck::Request.new(
method: 'get',
url: 'https://ip-messaging.twilio.com/v1/Services/ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX',
))).to eq(true)
end
it "receives fetch responses" do
@holodeck.mock(Twilio::Response.new(
200,
%q[
{
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"consumption_report_interval": 100,
"date_created": "2015-07-30T20:00:00Z",
"date_updated": "2015-07-30T20:00:00Z",
"default_channel_creator_role_sid": "RLaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"default_channel_role_sid": "RLaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"default_service_role_sid": "RLaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"friendly_name": "friendly_name",
"limits": {
"actions_per_second": 20,
"channel_members": 100,
"user_channels": 250
},
"links": {},
"notifications": {},
"post_webhook_url": "post_webhook_url",
"pre_webhook_url": "pre_webhook_url",
"reachability_enabled": false,
"read_status_enabled": false,
"sid": "ISaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"typing_indicator_timeout": 100,
"url": "http://www.example.com",
"webhook_filters": [
"webhook_filters"
],
"webhook_method": "webhook_method",
"webhooks": {}
}
]
))
actual = @client.ip_messaging.v1.services('ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX').fetch()
expect(actual).to_not eq(nil)
end
it "can delete" do
@holodeck.mock(Twilio::Response.new(500, ''))
expect {
@client.ip_messaging.v1.services('ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX').delete()
}.to raise_exception(Twilio::REST::TwilioError)
values = {}
expect(
@holodeck.has_request?(Holodeck::Request.new(
method: 'delete',
url: 'https://ip-messaging.twilio.com/v1/Services/ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX',
))).to eq(true)
end
it "receives delete responses" do
@holodeck.mock(Twilio::Response.new(
204,
nil,
))
actual = @client.ip_messaging.v1.services('ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX').delete()
expect(actual).to eq(true)
end
it "can create" do
@holodeck.mock(Twilio::Response.new(500, ''))
expect {
@client.ip_messaging.v1.services.create(friendly_name: 'friendly_name')
}.to raise_exception(Twilio::REST::TwilioError)
values = {'FriendlyName' => 'friendly_name', }
expect(
@holodeck.has_request?(Holodeck::Request.new(
method: 'post',
url: 'https://ip-messaging.twilio.com/v1/Services',
data: values,
))).to eq(true)
end
it "receives create responses" do
@holodeck.mock(Twilio::Response.new(
201,
%q[
{
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"consumption_report_interval": 100,
"date_created": "2015-07-30T20:00:00Z",
"date_updated": "2015-07-30T20:00:00Z",
"default_channel_creator_role_sid": "RLaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"default_channel_role_sid": "RLaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"default_service_role_sid": "RLaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"friendly_name": "friendly_name",
"limits": {
"actions_per_second": 20,
"channel_members": 100,
"user_channels": 250
},
"links": {},
"notifications": {},
"post_webhook_url": "post_webhook_url",
"pre_webhook_url": "pre_webhook_url",
"reachability_enabled": false,
"read_status_enabled": false,
"sid": "ISaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"typing_indicator_timeout": 100,
"url": "http://www.example.com",
"webhook_filters": [
"webhook_filters"
],
"webhook_method": "webhook_method",
"webhooks": {}
}
]
))
actual = @client.ip_messaging.v1.services.create(friendly_name: 'friendly_name')
expect(actual).to_not eq(nil)
end
it "can read" do
@holodeck.mock(Twilio::Response.new(500, ''))
expect {
@client.ip_messaging.v1.services.list()
}.to raise_exception(Twilio::REST::TwilioError)
values = {}
expect(
@holodeck.has_request?(Holodeck::Request.new(
method: 'get',
url: 'https://ip-messaging.twilio.com/v1/Services',
))).to eq(true)
end
it "receives read_empty responses" do
@holodeck.mock(Twilio::Response.new(
200,
%q[
{
"meta": {
"first_page_url": "https://chat.twilio.com/v1/Services?Page=0&PageSize=50",
"key": "services",
"next_page_url": null,
"page": 0,
"page_size": 0,
"previous_page_url": null,
"url": "https://chat.twilio.com/v1/Services"
},
"services": []
}
]
))
actual = @client.ip_messaging.v1.services.list()
expect(actual).to_not eq(nil)
end
it "receives read_full responses" do
@holodeck.mock(Twilio::Response.new(
200,
%q[
{
"meta": {
"first_page_url": "https://chat.twilio.com/v1/Services?Page=0&PageSize=50",
"key": "services",
"next_page_url": null,
"page": 0,
"page_size": 1,
"previous_page_url": null,
"url": "https://chat.twilio.com/v1/Services"
},
"services": [
{
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"consumption_report_interval": 100,
"date_created": "2015-07-30T20:00:00Z",
"date_updated": "2015-07-30T20:00:00Z",
"default_channel_creator_role_sid": "RLaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"default_channel_role_sid": "RLaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"default_service_role_sid": "RLaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"friendly_name": "friendly_name",
"limits": {
"actions_per_second": 20,
"channel_members": 100,
"user_channels": 250
},
"links": {},
"notifications": {},
"post_webhook_url": "post_webhook_url",
"pre_webhook_url": "pre_webhook_url",
"reachability_enabled": false,
"read_status_enabled": false,
"sid": "ISaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"typing_indicator_timeout": 100,
"url": "http://www.example.com",
"webhook_filters": [
"webhook_filters"
],
"webhook_method": "webhook_method",
"webhooks": {}
}
]
}
]
))
actual = @client.ip_messaging.v1.services.list()
expect(actual).to_not eq(nil)
end
it "can update" do
@holodeck.mock(Twilio::Response.new(500, ''))
expect {
@client.ip_messaging.v1.services('ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX').update()
}.to raise_exception(Twilio::REST::TwilioError)
values = {}
expect(
@holodeck.has_request?(Holodeck::Request.new(
method: 'post',
url: 'https://ip-messaging.twilio.com/v1/Services/ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX',
))).to eq(true)
end
it "receives update responses" do
@holodeck.mock(Twilio::Response.new(
200,
%q[
{
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"consumption_report_interval": 100,
"date_created": "2015-07-30T20:00:00Z",
"date_updated": "2015-07-30T20:00:00Z",
"default_channel_creator_role_sid": "RLaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"default_channel_role_sid": "RLaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"default_service_role_sid": "RLaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"friendly_name": "friendly_name",
"limits": {
"actions_per_second": 20,
"channel_members": 500,
"user_channels": 600
},
"links": {},
"notifications": {},
"post_webhook_url": "post_webhook_url",
"pre_webhook_url": "pre_webhook_url",
"reachability_enabled": false,
"read_status_enabled": false,
"sid": "ISaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"typing_indicator_timeout": 100,
"url": "http://www.example.com",
"webhook_filters": [
"webhook_filters"
],
"webhook_method": "webhook_method",
"webhooks": {}
}
]
))
actual = @client.ip_messaging.v1.services('ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX').update()
expect(actual).to_not eq(nil)
end
end | 31.759197 | 94 | 0.572662 |
b9cb3bc93cfd5784b4238c68d81cd05788bbd1c6 | 1,664 | # frozen_string_literal: true
module Mutant
class Mutator
class Node
class Block < self
handle(:block)
children :send, :arguments, :body
private
# Emit mutations
#
# @return [undefined]
def dispatch
emit_singletons
emit(send) unless n_lambda?(send)
emit_send_mutations(&method(:valid_send_mutation?))
emit_arguments_mutations
mutate_body
end
# Emit body mutations
#
# @return [undefined]
def mutate_body
emit_body(nil)
emit_body(N_RAISE)
return unless body
emit(body) unless body_has_control?
emit_body_mutations
mutate_body_receiver
end
# Test if body has control structures
#
# @return [Boolean]
def body_has_control?
AST.find_last_path(body) do |node|
n_break?(node) || n_next?(node)
end.any?
end
# Mutate method send in body scope of `send`
#
# @return [undefined]
def mutate_body_receiver
return if n_lambda?(send) || !n_send?(body)
body_meta = AST::Meta::Send.new(body)
emit(s(:send, send, body_meta.selector, *body_meta.arguments))
end
# Test for valid send mutations
#
# @return [true, false, nil]
def valid_send_mutation?(node)
return unless n_send?(node)
last = AST::Meta::Send.new(node).arguments.last
!last&.type.equal?(:block_pass)
end
end # Block
end # Node
end # Mutator
end # Mutant
| 22.186667 | 72 | 0.554688 |
6a29e26a741fdbbdfabe390941a6e37bfbac0439 | 977 | class AccountsController < ApplicationController
before_action :authenticate_account!
before_action :set_account, only: [:profile]
def index
following_ids = Follower.where(follower_id: current_account.id).map(&:following_id)
following_ids << current_account.id
@posts = Post.includes(:account).where(account_id: following_ids).active
@comment = Comment.new
@follower_suggestions = Account.where.not(id: following_ids)
end
def profile
@posts = @account.posts.active
end
def set_account
@account = Account.find_by_username(params[:username])
end
def follow_account
follower_id = params[:follow_id]
if Follower.create!(follower_id: current_account.id, following_id: follower_id)
flash[:success] = "Now following a new user"
else
flash[:danger] = "Unable to follow"
end
redirect_to dashboard_path
end
private
def account_params
params.require(:account).permit(:profile_image)
end
end
| 25.710526 | 87 | 0.731832 |
39413bb977225fe6fff2ff130935e7197a4baf34 | 1,086 | require_relative 'boot'
require 'rails'
# Pick the frameworks you want:
require 'active_model/railtie'
require 'active_job/railtie'
require 'active_record/railtie'
require 'active_storage/engine'
require 'action_controller/railtie'
require 'action_mailer/railtie'
require 'action_view/railtie'
require 'action_cable/engine'
require 'sprockets/railtie'
# require "rails/test_unit/railtie"
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module Lightsoft
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
config.load_defaults 5.2
# Settings in config/environments/* take precedence over those specified here.
# Application configuration can go into files in config/initializers
# -- all .rb files in that directory are automatically loaded after loading
# the framework and any gems in your application.
# Don't generate system test files.
config.generators.system_tests = nil
end
end
| 31.941176 | 82 | 0.779926 |
6198b705415815c377e5d4d84cdf984c872971bf | 737 | cask 'webstorm-eap' do
version '144.2925.4'
sha256 'fa100e8b91553484df066839b27a9b8740448557bcc155f6829db8d5e5c3d8fb'
url "https://download.jetbrains.com/webstorm/WebStorm-EAP-#{version}.dmg"
name 'WebStorm'
homepage 'https://confluence.jetbrains.com/display/WI/WebStorm+EAP'
license :commercial
app 'WebStorm 12 EAP.app'
binary 'WebStorm 12 EAP.app/Contents/MacOS/webstorm'
zap delete: [
'~/.WebStorm12',
'~/Library/Preferences/com.jetbrains.webstorm.plist',
'~/Library/Preferences/WebStorm12',
'~/Library/Application Support/WebStorm12',
'~/Library/Caches/WebStorm12',
'~/Library/Logs/WebStorm12',
]
end
| 33.5 | 75 | 0.647218 |
4aade15cc7e4d7d05c45e135c82e523d28b5cea4 | 556 | class PurchasedOrderMonitor < ApplicationMonitor
self.report_class = Spree::OrderReport
def relation
Spree::Order.complete
end
def groupers
%i[completed_at]
end
def aggregators
%i[total]
end
def dimensions
{
completed_at: { min: period.min, max: period.max }
}
end
def summary
Spree::Money.new(report.data.sum(0.0) { |d| d[:values].first[:value] || 0 })
end
def summary_diff
0
end
def show_chart?
true
end
def show_summary?
true
end
def show_table?
false
end
end
| 13.238095 | 80 | 0.638489 |
91a4b1acb8486146960259ed6a06df2be1373869 | 10,041 | #
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
# Homebrew doesn't support specifying anything more recent than 'nehalem',
# but nehalem is 19x slower than sandybrdige at some real-world workloads,
# and sandybridge is an old enough architecture that we're going to assume
# that HHVM users have it.
module MonkeyPatchCPU
def optimization_flags
super.merge({nehalem: "-march=sandybridge"}).freeze
end
end
class << Hardware::CPU
prepend MonkeyPatchCPU
end
class Hhvm416 < Formula
desc "JIT compiler and runtime for the Hack language"
homepage "http://hhvm.com/"
head "https://github.com/facebook/hhvm.git"
url "https://dl.hhvm.com/source/hhvm-4.16.4.tar.gz"
sha256 "85c6ff82e777f46e462e65880e7561258565d5ebf71033bbd8493c71565bc666"
bottle do
root_url "https://dl.hhvm.com/homebrew-bottles"
sha256 mojave: "e73685ef74ab821270791b9a77cb558bf022a6c565f63ae2a4465b38b82fe915"
sha256 high_sierra: "b6a80b6ae527bafb9b1f1d127ebc5a48851f68793159fe7286791ab4e2b76d63"
end
option "with-debug", <<~EOS
Make an unoptimized build with assertions enabled. This will run PHP and
Hack code dramatically slower than a release build, and is suitable mostly
for debugging HHVM itself.
EOS
# Needs very recent xcode
depends_on :macos => :sierra
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "cmake" => :build
depends_on "double-conversion"
depends_on "dwarfutils" => :build
depends_on "gawk" => :build
depends_on "libelf" => :build
depends_on "libtool" => :build
depends_on "md5sha1sum" => :build
depends_on "pkg-config" => :build
depends_on "wget" => :build
# We statically link against icu4c as every non-bugfix release is not
# backwards compatible; needing to rebuild for every release is too
# brittle
depends_on "icu4c" => :build
depends_on "boost"
depends_on "freetype"
depends_on "gd"
depends_on "gettext"
depends_on "glog"
depends_on "gmp"
depends_on "imagemagick@6"
depends_on "jemalloc"
depends_on "jpeg"
depends_on "libevent"
depends_on "libmemcached"
depends_on "libsodium"
depends_on "libpng"
depends_on "libxml2"
depends_on "libzip"
depends_on "lz4"
depends_on "mcrypt"
depends_on "oniguruma"
depends_on "openssl"
depends_on "pcre" # Used for Hack but not HHVM build - see #116
depends_on "postgresql"
depends_on "sqlite"
depends_on "tbb@2020"
def install
cmake_args = %W[
-DCMAKE_INSTALL_PREFIX=#{prefix}
-DCMAKE_INSTALL_SYSCONFDIR=#{etc}
-DDEFAULT_CONFIG_DIR=#{etc}/hhvm
]
# Force use of bundled PCRE to workaround #116
cmake_args += %W[
-DSYSTEM_PCRE_HAS_JIT=0
]
# Features which don't work on OS X yet since they haven't been ported yet.
cmake_args += %W[
-DENABLE_MCROUTER=OFF
-DENABLE_EXTENSION_MCROUTER=OFF
-DENABLE_EXTENSION_IMAP=OFF
]
# Required to specify a socket path if you are using the bundled async SQL
# client (which is very strongly recommended).
cmake_args << "-DMYSQL_UNIX_SOCK_ADDR=/tmp/mysql.sock"
# LZ4 warning macros are currently incompatible with clang
cmake_args << "-DCMAKE_C_FLAGS=-DLZ4_DISABLE_DEPRECATE_WARNINGS=1"
cmake_args << "-DCMAKE_CXX_FLAGS=-DLZ4_DISABLE_DEPRECATE_WARNINGS=1 -DU_USING_ICU_NAMESPACE=1"
# Debug builds. This switch is all that's needed, it sets all the right
# cflags and other config changes.
if build.with? "debug"
cmake_args << "-DCMAKE_BUILD_TYPE=Debug"
else
cmake_args << "-DCMAKE_BUILD_TYPE=RelWithDebInfo"
end
# Statically link libICU
cmake_args += %W[
-DICU_INCLUDE_DIR=#{Formula["icu4c"].opt_include}
-DICU_I18N_LIBRARY=#{Formula["icu4c"].opt_lib}/libicui18n.a
-DICU_LIBRARY=#{Formula["icu4c"].opt_lib}/libicuuc.a
-DICU_DATA_LIBRARY=#{Formula["icu4c"].opt_lib}/libicudata.a
]
# TBB looks for itself in a different place than brew installs to.
ENV["TBB_ARCH_PLATFORM"] = "."
cmake_args += %W[
-DTBB_INCLUDE_DIR=#{Formula["tbb@2020"].opt_include}
-DTBB_INSTALL_DIR=#{Formula["tbb@2020"].opt_prefix}
-DTBB_LIBRARY=#{Formula["tbb@2020"].opt_lib}/libtbb.dylib
-DTBB_LIBRARY_DEBUG=#{Formula["tbb@2020"].opt_lib}/libtbb.dylib
-DTBB_LIBRARY_DIR=#{Formula["tbb@2020"].opt_lib}
-DTBB_MALLOC_LIBRARY=#{Formula["tbb@2020"].opt_lib}/libtbbmalloc.dylib
-DTBB_MALLOC_LIBRARY_DEBUG=#{Formula["tbb@2020"].opt_lib}/libtbbmalloc.dylib
]
system "cmake", *cmake_args, '.'
system "make"
system "make", "install"
tp_notices = (share/"doc/third_party_notices.txt")
(share/"doc").install "third-party/third_party_notices.txt"
(share/"doc/third_party_notices.txt").append_lines <<EOF
-----
The following software may be included in this product: icu4c. This Software contains the following license and notice below:
Unicode Data Files include all data files under the directories
http://www.unicode.org/Public/, http://www.unicode.org/reports/,
http://www.unicode.org/cldr/data/, http://source.icu-project.org/repos/icu/, and
http://www.unicode.org/utility/trac/browser/.
Unicode Data Files do not include PDF online code charts under the
directory http://www.unicode.org/Public/.
Software includes any source code published in the Unicode Standard
or under the directories
http://www.unicode.org/Public/, http://www.unicode.org/reports/,
http://www.unicode.org/cldr/data/, http://source.icu-project.org/repos/icu/, and
http://www.unicode.org/utility/trac/browser/.
NOTICE TO USER: Carefully read the following legal agreement.
BY DOWNLOADING, INSTALLING, COPYING OR OTHERWISE USING UNICODE INC.'S
DATA FILES ("DATA FILES"), AND/OR SOFTWARE ("SOFTWARE"),
YOU UNEQUIVOCALLY ACCEPT, AND AGREE TO BE BOUND BY, ALL OF THE
TERMS AND CONDITIONS OF THIS AGREEMENT.
IF YOU DO NOT AGREE, DO NOT DOWNLOAD, INSTALL, COPY, DISTRIBUTE OR USE
THE DATA FILES OR SOFTWARE.
COPYRIGHT AND PERMISSION NOTICE
Copyright © 1991-2017 Unicode, Inc. All rights reserved.
Distributed under the Terms of Use in http://www.unicode.org/copyright.html.
Permission is hereby granted, free of charge, to any person obtaining
a copy of the Unicode data files and any associated documentation
(the "Data Files") or Unicode software and any associated documentation
(the "Software") to deal in the Data Files or Software
without restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, and/or sell copies of
the Data Files or Software, and to permit persons to whom the Data Files
or Software are furnished to do so, provided that either
(a) this copyright and permission notice appear with all copies
of the Data Files or Software, or
(b) this copyright and permission notice appear in associated
Documentation.
THE DATA FILES AND SOFTWARE ARE PROVIDED "AS IS", WITHOUT WARRANTY OF
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT OF THIRD PARTY RIGHTS.
IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS
NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL
DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
PERFORMANCE OF THE DATA FILES OR SOFTWARE.
Except as contained in this notice, the name of a copyright holder
shall not be used in advertising or otherwise to promote the sale,
use or other dealings in these Data Files or Software without prior
written authorization of the copyright holder.
EOF
ini = etc/"hhvm"
(ini/"php.ini").write php_ini unless File.exist? (ini/"php.ini")
(ini/"server.ini").write server_ini unless File.exist? (ini/"server.ini")
end
test do
(testpath/"test.php").write <<~EOS
<?php
exit(is_integer(HHVM_VERSION_ID) ? 0 : 1);
EOS
system "#{bin}/hhvm", testpath/"test.php"
end
plist_options :manual => "hhvm -m daemon -c #{HOMEBREW_PREFIX}/etc/hhvm/php.ini -c #{HOMEBREW_PREFIX}/etc/hhvm/server.ini"
def plist
<<~EOS
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>#{plist_name}</string>
<key>RunAtLoad</key>
<true/>
<key>KeepAlive</key>
<true/>
<key>ProgramArguments</key>
<array>
<string>#{opt_bin}/hhvm</string>
<string>-m</string>
<string>server</string>
<string>-c</string>
<string>#{etc}/hhvm/php.ini</string>
<string>-c</string>
<string>#{etc}/hhvm/server.ini</string>
</array>
<key>WorkingDirectory</key>
<string>#{HOMEBREW_PREFIX}</string>
</dict>
</plist>
EOS
end
# https://github.com/hhvm/packaging/blob/master/hhvm/deb/skeleton/etc/hhvm/php.ini
def php_ini
<<~EOS
; php options
session.save_handler = files
session.save_path = #{var}/lib/hhvm/sessions
session.gc_maxlifetime = 1440
; hhvm specific
hhvm.log.always_log_unhandled_exceptions = true
hhvm.log.runtime_error_reporting_level = 8191
hhvm.mysql.typed_results = false
EOS
end
# https://github.com/hhvm/packaging/blob/master/hhvm/deb/skeleton/etc/hhvm/server.ini
def server_ini
<<~EOS
; php options
pid = #{var}/run/hhvm/pid
; hhvm specific
hhvm.server.port = 9000
hhvm.server.default_document = index.php
hhvm.log.use_log_file = true
hhvm.log.file = #{var}/log/hhvm/error.log
hhvm.repo.central.path = #{var}/run/hhvm/hhvm.hhbc
EOS
end
end
| 35.480565 | 125 | 0.710387 |
08198242bf56619368c583a9e2f524ac6b7f75fe | 869 | class Azcopy < Formula
desc "Azure Storage data transfer utility"
homepage "https://github.com/Azure/azure-storage-azcopy"
url "https://github.com/Azure/azure-storage-azcopy/archive/10.3.4.tar.gz"
sha256 "3a0a1450889af252ef251ae5bc86a145f3f77922d316947d7a90088fd1427619"
bottle do
cellar :any_skip_relocation
sha256 "8a72a2b66b7351c5842549d66e5631cd2e64445d5879c614fad6618ef9a50ce1" => :catalina
sha256 "59b7c6aded6ed98cdf911f89f2b1acec44a49999dfe9850351babbc45f8eb80d" => :mojave
sha256 "c53dd2b636af6f83c2dbc35bee9fe74db71a9f9c895c492f2ca6138f427a3d46" => :high_sierra
end
depends_on "go" => :build
def install
system "go", "build", *std_go_args
end
test do
assert_match "failed to obtain credential info", shell_output("#{bin}/azcopy list https://storageaccountname.blob.core.windows.net/containername/", 1)
end
end
| 36.208333 | 154 | 0.783659 |
1d22acd7b9b736631f46291e09df9ac13cdf4e88 | 1,183 | class Txr < Formula
desc "Original, new programming language for convenient data munging"
homepage "https://www.nongnu.org/txr/"
url "http://www.kylheku.com/cgit/txr/snapshot/txr-265.tar.bz2"
sha256 "5dac8fa64350efe5abd3e9f618db2fcfeee1718db329cfd27b29bd61f113605d"
license "BSD-2-Clause"
livecheck do
url "http://www.kylheku.com/cgit/txr"
regex(/href=.*?txr[._-]v?(\d+(?:\.\d+)*)\.t/i)
end
bottle do
sha256 cellar: :any, arm64_big_sur: "e6843e2622acd9d3d5b0572d653c8e1815e94334d9ccf252fd2e37a6eef35027"
sha256 cellar: :any, big_sur: "923d2986a6abfdd835b64d33e81ba524bb7f10e159d4d06ef16b6d41d081c09e"
sha256 cellar: :any, catalina: "48150bfeeeb08f84c4832c9b5f4bd2092d46a2cf0ef42c08b9777783f2dba074"
sha256 cellar: :any, mojave: "fa4cccbcd32ceeaa321f6ea489d8f3273cdb92f5457244f634b293abe4bb2b31"
end
depends_on "libffi"
uses_from_macos "bison" => :build
uses_from_macos "flex" => :build
def install
system "./configure", "--prefix=#{prefix}", "--inline=static inline"
system "make"
system "make", "install"
end
test do
assert_equal "3", shell_output(bin/"txr -p '(+ 1 2)'").chomp
end
end
| 33.8 | 106 | 0.723584 |
e2da89c90ef2107eb447eaec6021ebc78c82757d | 3,170 | # frozen_string_literal: true
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
module Google
module Cloud
module AIPlatform
module V1
# A collection of metrics calculated by comparing Model's predictions on a
# slice of the test data against ground truth annotations.
# @!attribute [r] name
# @return [::String]
# Output only. The resource name of the ModelEvaluationSlice.
# @!attribute [r] slice
# @return [::Google::Cloud::AIPlatform::V1::ModelEvaluationSlice::Slice]
# Output only. The slice of the test data that is used to evaluate the Model.
# @!attribute [r] metrics_schema_uri
# @return [::String]
# Output only. Points to a YAML file stored on Google Cloud Storage describing the
# {::Google::Cloud::AIPlatform::V1::ModelEvaluationSlice#metrics metrics} of this ModelEvaluationSlice. The
# schema is defined as an OpenAPI 3.0.2 [Schema
# Object](https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.2.md#schemaObject).
# @!attribute [r] metrics
# @return [::Google::Protobuf::Value]
# Output only. Sliced evaluation metrics of the Model. The schema of the metrics is stored
# in {::Google::Cloud::AIPlatform::V1::ModelEvaluationSlice#metrics_schema_uri metrics_schema_uri}
# @!attribute [r] create_time
# @return [::Google::Protobuf::Timestamp]
# Output only. Timestamp when this ModelEvaluationSlice was created.
class ModelEvaluationSlice
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
# Definition of a slice.
# @!attribute [r] dimension
# @return [::String]
# Output only. The dimension of the slice.
# Well-known dimensions are:
# * `annotationSpec`: This slice is on the test data that has either
# ground truth or prediction with {::Google::Cloud::AIPlatform::V1::AnnotationSpec#display_name AnnotationSpec.display_name}
# equals to {::Google::Cloud::AIPlatform::V1::ModelEvaluationSlice::Slice#value value}.
# @!attribute [r] value
# @return [::String]
# Output only. The value of the dimension in this slice.
class Slice
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
end
end
end
end
end
| 45.942029 | 142 | 0.65142 |
915bb409b3a409f357f435a056bc093df6abf9f6 | 1,209 | # frozen_string_literal: true
module SolidusSubscriptions
module ChurnBuster
class Client
BASE_API_URL = 'https://api.churnbuster.io/v1'
attr_reader :account_id, :api_key
def initialize(account_id:, api_key:)
@account_id = account_id
@api_key = api_key
end
def report_failed_payment(order)
post('/failed_payments', OrderSerializer.serialize(order))
end
def report_successful_payment(order)
post('/successful_payments', OrderSerializer.serialize(order))
end
def report_subscription_cancellation(subscription)
post('/cancellations', SubscriptionSerializer.serialize(subscription))
end
def report_payment_method_change(subscription)
post('/payment_methods', SubscriptionPaymentMethodSerializer.serialize(subscription))
end
private
def post(path, body)
HTTParty.post(
"#{BASE_API_URL}#{path}",
body: body.to_json,
headers: {
'Content-Type' => 'application/json',
},
basic_auth: {
username: account_id,
password: api_key,
},
)
end
end
end
end
| 24.673469 | 93 | 0.629446 |
5da20c2bf72a658b181dd4a35bc2c3b62d524a90 | 54,792 | #--
# Copyright 2006 by Chad Fowler, Rich Kilmer, Jim Weirich and others.
# All rights reserved.
# See LICENSE.txt for permissions.
#++
require 'rubygems/version'
require 'rubygems/requirement'
require 'rubygems/platform'
require "rubygems/deprecate"
# :stopdoc:
class Date; end # for ruby_code if date.rb wasn't required
# :startdoc:
##
# The Specification class contains the metadata for a Gem. Typically
# defined in a .gemspec file or a Rakefile, and looks like this:
#
# spec = Gem::Specification.new do |s|
# s.name = 'example'
# s.version = '1.0'
# s.summary = 'Example gem specification'
# ...
# end
#
# For a great way to package gems, use Hoe.
class Gem::Specification
##
# The the version number of a specification that does not specify one
# (i.e. RubyGems 0.7 or earlier).
NONEXISTENT_SPECIFICATION_VERSION = -1
##
# The specification version applied to any new Specification instances
# created. This should be bumped whenever something in the spec format
# changes.
#
# Specification Version History:
#
# spec ruby
# ver ver yyyy-mm-dd description
# -1 <0.8.0 pre-spec-version-history
# 1 0.8.0 2004-08-01 Deprecated "test_suite_file" for "test_files"
# "test_file=x" is a shortcut for "test_files=[x]"
# 2 0.9.5 2007-10-01 Added "required_rubygems_version"
# Now forward-compatible with future versions
# 3 1.3.2 2009-01-03 Added Fixnum validation to specification_version
#--
# When updating this number, be sure to also update #to_ruby.
#
# NOTE RubyGems < 1.2 cannot load specification versions > 2.
CURRENT_SPECIFICATION_VERSION = 3
# :stopdoc:
# version => # of fields
MARSHAL_FIELDS = { -1 => 16, 1 => 16, 2 => 16, 3 => 17 }
today = Time.now.utc
TODAY = Time.utc(today.year, today.month, today.day)
# :startdoc:
##
# List of attribute names: [:name, :version, ...]
@@required_attributes = [:rubygems_version,
:specification_version,
:name,
:version,
:date,
:summary,
:require_paths]
##
# Map of attribute names to default values.
@@default_value = {
:authors => [],
:autorequire => nil,
:bindir => 'bin',
:cert_chain => [],
:date => TODAY,
:dependencies => [],
:description => nil,
:email => nil,
:executables => [],
:extensions => [],
:extra_rdoc_files => [],
:files => [],
:homepage => nil,
:licenses => [],
:name => nil,
:platform => Gem::Platform::RUBY,
:post_install_message => nil,
:rdoc_options => [],
:require_paths => ['lib'],
:required_ruby_version => Gem::Requirement.default,
:required_rubygems_version => Gem::Requirement.default,
:requirements => [],
:rubyforge_project => nil,
:rubygems_version => Gem::VERSION,
:signing_key => nil,
:specification_version => CURRENT_SPECIFICATION_VERSION,
:summary => nil,
:test_files => [],
:version => nil,
}
@@attributes = @@default_value.keys.sort_by { |s| s.to_s }
@@array_attributes = @@default_value.reject { |k,v| v != [] }.keys
@@nil_attributes, @@non_nil_attributes = @@default_value.keys.partition { |k|
@@default_value[k].nil?
}
######################################################################
# :section: Required gemspec attributes
##
# This gem's name
attr_accessor :name
##
# This gem's version
attr_reader :version
##
# Paths in the gem to add to $LOAD_PATH when this gem is activated.
#
# The default ['lib'] is typically sufficient.
attr_accessor :require_paths
##
# The version of RubyGems used to create this gem.
#
# Do not set this, it is set automatically when the gem is packaged.
attr_accessor :rubygems_version
##
# The Gem::Specification version of this gemspec.
#
# Do not set this, it is set automatically when the gem is packaged.
attr_accessor :specification_version
##
# A short summary of this gem's description. Displayed in `gem list -d`.
#
# The description should be more detailed than the summary. For example,
# you might wish to copy the entire README into the description.
attr_reader :summary
######################################################################
# :section: Optional gemspec attributes
##
# Autorequire was used by old RubyGems to automatically require a file.
#
# Deprecated: It is neither supported nor functional.
attr_accessor :autorequire
##
# The path in the gem for executable scripts. Usually 'bin'
attr_accessor :bindir
##
# The certificate chain used to sign this gem. See Gem::Security for
# details.
attr_accessor :cert_chain
##
# A long description of this gem
attr_reader :description
##
# Sets the default executable for this gem.
#
# Deprecated: You must now specify the executable name to Gem.bin_path.
attr_writer :default_executable
##
# A contact email for this gem
#
# If you are providing multiple authors and multiple emails they should be
# in the same order such that:
#
# Hash[*spec.authors.zip(spec.emails).flatten]
#
# Gives a hash of author name to email address.
attr_accessor :email
##
# The URL of this gem's home page
attr_accessor :homepage
##
# True when this gemspec has been activated. This attribute is not persisted.
attr_accessor :loaded # :nodoc:
alias :loaded? :loaded # :nodoc:
##
# True when this gemspec has been activated. This attribute is not persisted.
attr_accessor :activated
alias :activated? :activated
##
# Path this gemspec was loaded from. This attribute is not persisted.
attr_reader :loaded_from
##
# Allows deinstallation of gems with legacy platforms.
attr_writer :original_platform # :nodoc:
##
# A message that gets displayed after the gem is installed
attr_accessor :post_install_message
##
# The version of ruby required by this gem
attr_reader :required_ruby_version
##
# The RubyGems version required by this gem
attr_reader :required_rubygems_version
##
# The rubyforge project this gem lives under. i.e. RubyGems'
# rubyforge_project is "rubygems".
attr_accessor :rubyforge_project
##
# The key used to sign this gem. See Gem::Security for details.
attr_accessor :signing_key
def self._all # :nodoc:
unless defined?(@@all) && @@all then
specs = []
self.dirs.reverse_each { |dir|
Dir[File.join(dir, "*.gemspec")].each { |path|
spec = Gem::Specification.load path.untaint
# #load returns nil if the spec is bad, so we just ignore
# it at this stage
specs << spec if spec
}
}
@@all = specs
_resort!
end
@@all
end
def self._resort! # :nodoc:
@@all.sort! { |a, b|
names = a.name <=> b.name
next names if names.nonzero?
b.version <=> a.version
}
end
##
# Adds +spec+ to the known specifications, keeping the collection
# properly sorted.
def self.add_spec spec
# TODO: find all extraneous adds
# puts
# p :add_spec => [spec.full_name, caller.reject { |s| s =~ /minitest/ }]
# TODO: flush the rest of the crap from the tests
# raise "no dupes #{spec.full_name} in #{all_names.inspect}" if
# _all.include? spec
raise "nil spec!" unless spec # TODO: remove once we're happy with tests
return if _all.include? spec
_all << spec
_resort!
end
##
# Adds multiple specs to the known specifications.
def self.add_specs *specs
raise "nil spec!" if specs.any?(&:nil?) # TODO: remove once we're happy
# TODO: this is much more efficient, but we need the extra checks for now
# _all.concat specs
# _resort!
specs.each do |spec| # TODO: slow
add_spec spec
end
end
##
# Returns all specifications. This method is discouraged from use.
# You probably want to use one of the Enumerable methods instead.
def self.all
warn "NOTE: Specification.all called from #{caller.first}" unless
Gem::Deprecate.skip
_all
end
##
# Sets the known specs to +specs+. Not guaranteed to work for you in
# the future. Use at your own risk. Caveat emptor. Doomy doom doom.
# Etc etc.
#
#--
# Makes +specs+ the known specs
# Listen, time is a river
# Winter comes, code breaks
#
# -- wilsonb
def self.all= specs
@@all = specs
end
##
# Return full names of all specs in sorted order.
def self.all_names
self._all.map(&:full_name)
end
##
# Return the list of all array-oriented instance variables.
#--
# Not sure why we need to use so much stupid reflection in here...
def self.array_attributes
@@array_attributes.dup
end
##
# Return the list of all instance variables.
#--
# Not sure why we need to use so much stupid reflection in here...
def self.attribute_names
@@attributes.dup
end
##
# Return the directories that Specification uses to find specs.
def self.dirs
@@dirs ||= Gem.path.collect { |dir|
File.join dir, "specifications"
}
end
##
# Set the directories that Specification uses to find specs. Setting
# this resets the list of known specs.
def self.dirs= dirs
# TODO: find extra calls to dir=
# warn "NOTE: dirs= called from #{caller.first} for #{dirs.inspect}"
self.reset
# ugh
@@dirs = Array(dirs).map { |dir| File.join dir, "specifications" }
end
extend Enumerable
##
# Enumerate every known spec. See ::dirs= and ::add_spec to set the list of
# specs.
def self.each
return enum_for(:each) unless block_given?
self._all.each do |x|
yield x
end
end
##
# Returns every spec that matches +name+ and optional +requirements+.
def self.find_all_by_name name, *requirements
requirements = Gem::Requirement.default if requirements.empty?
# TODO: maybe try: find_all { |s| spec === dep }
Gem::Dependency.new(name, *requirements).matching_specs
end
##
# Find the best specification matching a +name+ and +requirements+. Raises
# if the dependency doesn't resolve to a valid specification.
def self.find_by_name name, *requirements
requirements = Gem::Requirement.default if requirements.empty?
# TODO: maybe try: find { |s| spec === dep }
Gem::Dependency.new(name, *requirements).to_spec
end
##
# Return the best specification that contains the file matching +path+.
def self.find_by_path path
self.find { |spec|
spec.contains_requirable_file? path
}
end
##
# Return currently unresolved specs that contain the file matching +path+.
def self.find_in_unresolved path
# TODO: do we need these?? Kill it
specs = Gem.unresolved_deps.values.map { |dep| dep.to_specs }.flatten
specs.find_all { |spec| spec.contains_requirable_file? path }
end
##
# Search through all unresolved deps and sub-dependencies and return
# specs that contain the file matching +path+.
def self.find_in_unresolved_tree path
specs = Gem.unresolved_deps.values.map { |dep| dep.to_specs }.flatten
specs.reverse_each do |spec|
trails = []
spec.traverse do |from_spec, dep, to_spec, trail|
next unless to_spec.conflicts.empty?
trails << trail if to_spec.contains_requirable_file? path
end
next if trails.empty?
return trails.map(&:reverse).sort.first.reverse
end
[]
end
##
# Special loader for YAML files. When a Specification object is loaded
# from a YAML file, it bypasses the normal Ruby object initialization
# routine (#initialize). This method makes up for that and deals with
# gems of different ages.
#
# +input+ can be anything that YAML.load() accepts: String or IO.
def self.from_yaml(input)
Gem.load_yaml
input = normalize_yaml_input input
spec = YAML.load input
if spec && spec.class == FalseClass then
raise Gem::EndOfYAMLException
end
unless Gem::Specification === spec then
raise Gem::Exception, "YAML data doesn't evaluate to gem specification"
end
unless (spec.instance_variables.include? '@specification_version' or
spec.instance_variables.include? :@specification_version) and
spec.instance_variable_get :@specification_version
spec.instance_variable_set :@specification_version,
NONEXISTENT_SPECIFICATION_VERSION
end
spec
end
##
# Return the latest specs, optionally including prerelease specs if
# +prerelease+ is true.
def self.latest_specs prerelease = false
result = Hash.new { |h,k| h[k] = {} }
native = {}
Gem::Specification._all.reverse_each do |spec|
next if spec.version.prerelease? unless prerelease
native[spec.name] = spec.version if spec.platform == Gem::Platform::RUBY
result[spec.name][spec.platform] = spec
end
result.map(&:last).map(&:values).flatten.reject { |spec|
minimum = native[spec.name]
minimum && spec.version < minimum
}
end
##
# Loads Ruby format gemspec from +file+.
def self.load file
return unless file && File.file?(file)
file = file.dup.untaint
code = if defined? Encoding
File.read file, :encoding => "UTF-8"
else
File.read file
end
code.untaint
begin
spec = eval code, binding, file
if Gem::Specification === spec
spec.loaded_from = file.to_s
return spec
end
warn "[#{file}] isn't a Gem::Specification (#{spec.class} instead)."
rescue SignalException, SystemExit
raise
rescue SyntaxError, Exception => e
warn "Invalid gemspec in [#{file}]: #{e}"
end
nil
end
##
# Specification attributes that must be non-nil
def self.non_nil_attributes
@@non_nil_attributes.dup
end
##
# Make sure the YAML specification is properly formatted with dashes
def self.normalize_yaml_input(input)
result = input.respond_to?(:read) ? input.read : input
result = "--- " + result unless result =~ /\A--- /
result.gsub!(/ !!null \n/, " \n")
# date: 2011-04-26 00:00:00.000000000Z
# date: 2011-04-26 00:00:00.000000000 Z
result.gsub!(/^(date: \d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}\.\d+?)Z/, '\1 Z')
result
end
##
# Return a list of all outdated specifications. This method is HEAVY
# as it must go fetch specifications from the server.
def self.outdated
outdateds = []
# TODO: maybe we should switch to rubygems' version service?
fetcher = Gem::SpecFetcher.fetcher
latest_specs.each do |local|
dependency = Gem::Dependency.new local.name, ">= #{local.version}"
remotes = fetcher.find_matching dependency
remotes = remotes.map { |(_, version, _), _| version }
latest = remotes.sort.last
outdateds << local.name if latest and local.version < latest
end
outdateds
end
##
# Removes +spec+ from the known specs.
def self.remove_spec spec
# TODO: beat on the tests
raise "wtf: #{spec.full_name} not in #{all_names.inspect}" unless
_all.include? spec
_all.delete spec
end
##
# Is +name+ a required attribute?
def self.required_attribute?(name)
@@required_attributes.include? name.to_sym
end
##
# Required specification attributes
def self.required_attributes
@@required_attributes.dup
end
##
# Reset the list of known specs, running pre and post reset hooks
# registered in Gem.
def self.reset
@@dirs = nil
# from = caller.first(10).reject { |s| s =~ /minitest/ }
# warn ""
# warn "NOTE: Specification.reset from #{from.inspect}"
Gem.pre_reset_hooks.each { |hook| hook.call }
@@all = nil
Gem.post_reset_hooks.each { |hook| hook.call }
end
##
# Load custom marshal format, re-initializing defaults as needed
def self._load(str)
array = Marshal.load str
spec = Gem::Specification.new
spec.instance_variable_set :@specification_version, array[1]
current_version = CURRENT_SPECIFICATION_VERSION
field_count = if spec.specification_version > current_version then
spec.instance_variable_set :@specification_version,
current_version
MARSHAL_FIELDS[current_version]
else
MARSHAL_FIELDS[spec.specification_version]
end
if array.size < field_count then
raise TypeError, "invalid Gem::Specification format #{array.inspect}"
end
spec.instance_variable_set :@rubygems_version, array[0]
# spec version
spec.instance_variable_set :@name, array[2]
spec.instance_variable_set :@version, array[3]
spec.instance_variable_set :@date, array[4]
spec.instance_variable_set :@summary, array[5]
spec.instance_variable_set :@required_ruby_version, array[6]
spec.instance_variable_set :@required_rubygems_version, array[7]
spec.instance_variable_set :@original_platform, array[8]
spec.instance_variable_set :@dependencies, array[9]
spec.instance_variable_set :@rubyforge_project, array[10]
spec.instance_variable_set :@email, array[11]
spec.instance_variable_set :@authors, array[12]
spec.instance_variable_set :@description, array[13]
spec.instance_variable_set :@homepage, array[14]
spec.instance_variable_set :@has_rdoc, array[15]
spec.instance_variable_set :@new_platform, array[16]
spec.instance_variable_set :@platform, array[16].to_s
spec.instance_variable_set :@license, array[17]
spec.instance_variable_set :@loaded, false
spec.instance_variable_set :@activated, false
spec
end
def <=>(other) # :nodoc:
sort_obj <=> other.sort_obj
end
def == other # :nodoc:
self.class === other &&
name == other.name &&
version == other.version &&
platform == other.platform
end
##
# Dump only crucial instance variables.
#--
# MAINTAIN ORDER!
# (down with the man)
def _dump(limit)
Marshal.dump [
@rubygems_version,
@specification_version,
@name,
@version,
date,
@summary,
@required_ruby_version,
@required_rubygems_version,
@original_platform,
@dependencies,
@rubyforge_project,
@email,
@authors,
@description,
@homepage,
true, # has_rdoc
@new_platform,
@licenses
]
end
##
# Activate this spec, registering it as a loaded spec and adding
# it's lib paths to $LOAD_PATH. Returns true if the spec was
# activated, false if it was previously activated. Freaks out if
# there are conflicts upon activation.
def activate
raise_if_conflicts
return false if Gem.loaded_specs[self.name]
activate_dependencies
add_self_to_load_path
Gem.loaded_specs[self.name] = self
@activated = true
@loaded = true
return true
end
##
# Activate all unambiguously resolved runtime dependencies of this
# spec. Add any ambigous dependencies to the unresolved list to be
# resolved later, as needed.
def activate_dependencies
self.runtime_dependencies.each do |spec_dep|
if loaded = Gem.loaded_specs[spec_dep.name]
next if spec_dep.matches_spec? loaded
msg = "can't satisfy '#{spec_dep}', already activated '#{loaded.full_name}'"
e = Gem::LoadError.new msg
e.name = spec_dep.name
raise e
end
specs = spec_dep.to_specs
if specs.size == 1 then
specs.first.activate
else
name = spec_dep.name
Gem.unresolved_deps[name] = Gem.unresolved_deps[name].merge spec_dep
end
end
Gem.unresolved_deps.delete self.name
end
##
# Returns an array with bindir attached to each executable in the
# +executables+ list
def add_bindir(executables)
return nil if executables.nil?
if @bindir then
Array(executables).map { |e| File.join(@bindir, e) }
else
executables
end
rescue
return nil
end
##
# Adds a dependency on gem +dependency+ with type +type+ that requires
# +requirements+. Valid types are currently <tt>:runtime</tt> and
# <tt>:development</tt>.
def add_dependency_with_type(dependency, type, *requirements)
requirements = if requirements.empty? then
Gem::Requirement.default
else
requirements.flatten
end
unless dependency.respond_to?(:name) &&
dependency.respond_to?(:version_requirements)
dependency = Gem::Dependency.new(dependency, requirements, type)
end
dependencies << dependency
end
private :add_dependency_with_type
##
# Adds a development dependency named +gem+ with +requirements+ to this
# Gem. For example:
#
# spec.add_development_dependency 'example', '~> 1.1', '>= 1.1.4'
#
# Development dependencies aren't installed by default and aren't
# activated when a gem is required.
def add_development_dependency(gem, *requirements)
add_dependency_with_type(gem, :development, *requirements)
end
##
# Adds a runtime dependency named +gem+ with +requirements+ to this Gem.
# For example:
#
# spec.add_runtime_dependency 'example', '~> 1.1', '>= 1.1.4'
def add_runtime_dependency(gem, *requirements)
add_dependency_with_type(gem, :runtime, *requirements)
end
alias add_dependency add_runtime_dependency
##
# Adds this spec's require paths to LOAD_PATH, in the proper location.
def add_self_to_load_path
paths = require_paths.map do |path|
File.join full_gem_path, path
end
# gem directories must come after -I and ENV['RUBYLIB']
insert_index = Gem.load_path_insert_index
if insert_index then
# gem directories must come after -I and ENV['RUBYLIB']
$LOAD_PATH.insert(insert_index, *paths)
else
# we are probably testing in core, -I and RUBYLIB don't apply
$LOAD_PATH.unshift(*paths)
end
end
##
# Singular reader for #authors
def author
val = authors and val.first
end
##
# Singular writer for #authors
def author= o
self.authors = [o]
end
##
# The list of author names who wrote this gem.
#
# If you are providing multiple authors and multiple emails they should be
# in the same order such that:
#
# Hash[*spec.authors.zip(spec.emails).flatten]
#
# Gives a hash of author name to email address.
def authors
@authors ||= []
end
##
# Sets the list of authors, ensuring it is an array.
def authors= value
@authors = Array(value).flatten.grep(String)
end
##
# Returns the full path to the base gem directory.
#
# eg: /usr/local/lib/ruby/gems/1.8
def base_dir
return Gem.dir unless loaded_from
@base_dir ||= File.dirname File.dirname loaded_from
end
##
# Returns the full path to installed gem's bin directory.
#
# NOTE: do not confuse this with +bindir+, which is just 'bin', not
# a full path.
def bin_dir
@bin_dir ||= File.join gem_dir, bindir # TODO: this is unfortunate
end
##
# Returns the full path to an executable named +name+ in this gem.
def bin_file name
File.join bin_dir, name
end
##
# Returns the full path to the cache directory containing this
# spec's cached gem.
def cache_dir
@cache_dir ||= File.join base_dir, "cache"
end
##
# Returns the full path to the cached gem for this spec.
def cache_file
@cache_file ||= File.join cache_dir, "#{full_name}.gem"
end
alias :cache_gem :cache_file
##
# Return any possible conflicts against the currently loaded specs.
def conflicts
conflicts = {}
Gem.loaded_specs.values.each do |spec|
bad = self.runtime_dependencies.find_all { |dep|
spec.name == dep.name and not spec.satisfies_requirement? dep
}
conflicts[spec] = bad unless bad.empty?
end
conflicts
end
##
# Return true if this spec can require +file+.
def contains_requirable_file? file
root = full_gem_path
require_paths.each do |lib|
base = "#{root}/#{lib}/#{file}"
Gem.suffixes.each do |suf|
path = "#{base}#{suf}"
return true if File.file? path
end
end
return false
end
##
# The date this gem was created. Lazily defaults to TODAY.
def date
@date ||= TODAY
end
##
# The date this gem was created
#
# Do not set this, it is set automatically when the gem is packaged.
def date= date
# We want to end up with a Time object with one-day resolution.
# This is the cleanest, most-readable, faster-than-using-Date
# way to do it.
@date = case date
when String then
if /\A(\d{4})-(\d{2})-(\d{2})\Z/ =~ date then
Time.utc($1.to_i, $2.to_i, $3.to_i)
else
raise(Gem::InvalidSpecificationException,
"invalid date format in specification: #{date.inspect}")
end
when Time, Date then
Time.utc(date.year, date.month, date.day)
else
TODAY
end
end
##
# The default executable for this gem.
#
# Deprecated: The name of the gem is assumed to be the name of the
# executable now. See Gem.bin_path.
def default_executable
if defined?(@default_executable) and @default_executable
result = @default_executable
elsif @executables and @executables.size == 1
result = Array(@executables).first
else
result = nil
end
result
end
##
# The default value for specification attribute +name+
def default_value name
@@default_value[name]
end
##
# A list of Gem::Dependency objects this gem depends on.
#
# Use #add_dependency or #add_development_dependency to add dependencies to
# a gem.
def dependencies
@dependencies ||= []
end
##
# Return a list of all gems that have a dependency on this gemspec. The
# list is structured with entries that conform to:
#
# [depending_gem, dependency, [list_of_gems_that_satisfy_dependency]]
def dependent_gems
out = []
Gem::Specification.each do |spec|
spec.dependencies.each do |dep|
if self.satisfies_requirement?(dep) then
sats = []
find_all_satisfiers(dep) do |sat|
sats << sat
end
out << [spec, dep, sats]
end
end
end
out
end
##
# Returns all specs that matches this spec's runtime dependencies.
def dependent_specs
runtime_dependencies.map { |dep| dep.to_specs }.flatten
end
##
# A long description of this gem
def description= str
@description = str.to_s
end
##
# List of dependencies that are used for development
def development_dependencies
dependencies.select { |d| d.type == :development }
end
##
# Returns the full path to this spec's documentation directory.
def doc_dir
@doc_dir ||= File.join base_dir, 'doc', full_name
end
def encode_with coder # :nodoc:
mark_version
coder.add 'name', @name
coder.add 'version', @version
platform = case @original_platform
when nil, '' then
'ruby'
when String then
@original_platform
else
@original_platform.to_s
end
coder.add 'platform', platform
attributes = @@attributes.map(&:to_s) - %w[name version platform]
attributes.each do |name|
coder.add name, instance_variable_get("@#{name}")
end
end
def eql? other # :nodoc:
self.class === other && same_attributes?(other)
end
##
# Singular accessor for #executables
def executable
val = executables and val.first
end
##
# Singular accessor for #executables
def executable=o
self.executables = [o]
end
##
# Executables included in the gem.
def executables
@executables ||= []
end
##
# Sets executables to +value+, ensuring it is an array. Don't
# use this, push onto the array instead.
def executables= value
# TODO: warn about setting instead of pushing
@executables = Array(value)
end
##
# Extensions to build when installing the gem. See
# Gem::Installer#build_extensions for valid values.
def extensions
@extensions ||= []
end
##
# Sets extensions to +extensions+, ensuring it is an array. Don't
# use this, push onto the array instead.
def extensions= extensions
# TODO: warn about setting instead of pushing
@extensions = Array extensions
end
##
# Extra files to add to RDoc such as README or doc/examples.txt
def extra_rdoc_files
@extra_rdoc_files ||= []
end
##
# Sets extra_rdoc_files to +files+, ensuring it is an array. Don't
# use this, push onto the array instead.
def extra_rdoc_files= files
# TODO: warn about setting instead of pushing
@extra_rdoc_files = Array files
end
##
# The default (generated) file name of the gem. See also #spec_name.
#
# spec.file_name # => "example-1.0.gem"
def file_name
"#{full_name}.gem"
end
##
# Files included in this gem. You cannot append to this accessor, you must
# assign to it.
#
# Only add files you can require to this list, not directories, etc.
#
# Directories are automatically stripped from this list when building a gem,
# other non-files cause an error.
def files
# DO NOT CHANGE TO ||= ! This is not a normal accessor. (yes, it sucks)
@files = [@files,
@test_files,
add_bindir(@executables),
@extra_rdoc_files,
@extensions,
].flatten.uniq.compact
end
##
# Sets files to +files+, ensuring it is an array.
def files= files
@files = Array files
end
##
# Finds all gems that satisfy +dep+
def find_all_satisfiers dep
Gem::Specification.each do |spec|
yield spec if spec.satisfies_requirement? dep
end
end
private :find_all_satisfiers
##
# Creates a duplicate spec without large blobs that aren't used at runtime.
def for_cache
spec = dup
spec.files = nil
spec.test_files = nil
spec
end
##
# The full path to the gem (install path + full name).
def full_gem_path
# TODO: try to get rid of this... or the awkward
# TODO: also, shouldn't it default to full_name if it hasn't been written?
return @full_gem_path if defined?(@full_gem_path) && @full_gem_path
@full_gem_path = File.expand_path File.join(gems_dir, full_name)
return @full_gem_path if File.directory? @full_gem_path
@full_gem_path = File.expand_path File.join(gems_dir, original_name)
end
##
# Returns the full name (name-version) of this Gem. Platform information
# is included (name-version-platform) if it is specified and not the
# default Ruby platform.
def full_name
if platform == Gem::Platform::RUBY or platform.nil? then
"#{@name}-#{@version}"
else
"#{@name}-#{@version}-#{platform}"
end
end
##
# Returns the full path to this spec's gem directory.
# eg: /usr/local/lib/ruby/1.8/gems/mygem-1.0
def gem_dir
@gem_dir ||= File.expand_path File.join(gems_dir, full_name)
end
##
# Returns the full path to the gems directory containing this spec's
# gem directory. eg: /usr/local/lib/ruby/1.8/gems
def gems_dir
# TODO: this logic seems terribly broken, but tests fail if just base_dir
@gems_dir ||= File.join(loaded_from && base_dir || Gem.dir, "gems")
end
##
# Deprecated and ignored, defaults to true.
#
# Formerly used to indicate this gem was RDoc-capable.
def has_rdoc
true
end
##
# Deprecated and ignored.
#
# Formerly used to indicate this gem was RDoc-capable.
def has_rdoc= ignored
@has_rdoc = true
end
alias :has_rdoc? :has_rdoc
##
# True if this gem has files in test_files
def has_unit_tests?
not test_files.empty?
end
# :stopdoc:
alias has_test_suite? has_unit_tests?
# :startdoc:
def hash # :nodoc:
@@attributes.inject(0) { |hash_code, (name, _)|
hash_code ^ self.send(name).hash
}
end
def init_with coder # :nodoc:
yaml_initialize coder.tag, coder.map
end
##
# Specification constructor. Assigns the default values to the attributes
# and yields itself for further initialization. Optionally takes +name+ and
# +version+.
def initialize name = nil, version = nil
@loaded = false
@activated = false
@loaded_from = nil
@original_platform = nil
@@nil_attributes.each do |key|
instance_variable_set "@#{key}", nil
end
@@non_nil_attributes.each do |key|
default = default_value(key)
value = case default
when Time, Numeric, Symbol, true, false, nil then default
else default.dup
end
instance_variable_set "@#{key}", value
end
@new_platform = Gem::Platform::RUBY
self.name = name if name
self.version = version if version
yield self if block_given?
end
##
# Duplicates array_attributes from +other_spec+ so state isn't shared.
def initialize_copy other_spec
other_ivars = other_spec.instance_variables
other_ivars = other_ivars.map { |ivar| ivar.intern } if # for 1.9
String === other_ivars.first
self.class.array_attributes.each do |name|
name = :"@#{name}"
next unless other_ivars.include? name
begin
val = other_spec.instance_variable_get(name)
if val then
instance_variable_set name, val.dup
else
warn "WARNING: #{full_name} has an invalid nil value for #{name}"
end
rescue TypeError
e = Gem::FormatException.new \
"#{full_name} has an invalid value for #{name}"
e.file_path = loaded_from
raise e
end
end
end
##
# The directory that this gem was installed into.
# TODO: rename - horrible. this is the base_dir for a gem path
def installation_path
loaded_from && base_dir
end
##
# Returns a string usable in Dir.glob to match all requirable paths
# for this spec.
def lib_dirs_glob
dirs = if self.require_paths.size > 1 then
"{#{self.require_paths.join(',')}}"
else
self.require_paths.first
end
"#{self.full_gem_path}/#{dirs}"
end
##
# Files in the Gem under one of the require_paths
def lib_files
@files.select do |file|
require_paths.any? do |path|
file.index(path) == 0
end
end
end
##
# Singular accessor for #licenses
def license
val = licenses and val.first
end
##
# Singular accessor for #licenses
def license=o
self.licenses = [o]
end
##
# The license(s) for the library. Each license must be a short name, no
# more than 64 characters.
def licenses
@licenses ||= []
end
##
# Set licenses to +licenses+, ensuring it is an array.
def licenses= licenses
@licenses = Array licenses
end
##
# Set the location a Specification was loaded from. +obj+ is converted
# to a String.
def loaded_from= path
@loaded_from = path.to_s
end
##
# Sets the rubygems_version to the current RubyGems version.
def mark_version
@rubygems_version = Gem::VERSION
end
##
# Return all files in this gem that match for +glob+.
def matches_for_glob glob # TODO: rename?
# TODO: do we need these?? Kill it
glob = File.join(self.lib_dirs_glob, glob)
Dir[glob].map { |f| f.untaint } # FIX our tests are broken, run w/ SAFE=1
end
##
# Warn about unknown attributes while loading a spec.
def method_missing(sym, *a, &b) # :nodoc:
if @specification_version > CURRENT_SPECIFICATION_VERSION and
sym.to_s =~ /=$/ then
warn "ignoring #{sym} loading #{full_name}" if $DEBUG
else
super
end
end
##
# Normalize the list of files so that:
# * All file lists have redundancies removed.
# * Files referenced in the extra_rdoc_files are included in the package
# file list.
def normalize
if defined?(@extra_rdoc_files) and @extra_rdoc_files then
@extra_rdoc_files.uniq!
@files ||= []
@files.concat(@extra_rdoc_files)
end
@files = @files.uniq if @files
@extensions = @extensions.uniq if @extensions
@test_files = @test_files.uniq if @test_files
@executables = @executables.uniq if @executables
@extra_rdoc_files = @extra_rdoc_files.uniq if @extra_rdoc_files
end
##
# Returns the full name (name-version) of this gemspec using the original
# platform. For use with legacy gems.
def original_name # :nodoc:
if platform == Gem::Platform::RUBY or platform.nil? then
"#{@name}-#{@version}"
else
"#{@name}-#{@version}-#{@original_platform}"
end
end
##
# Cruft. Use +platform+.
def original_platform # :nodoc:
@original_platform ||= platform
end
##
# The platform this gem runs on. See Gem::Platform for details.
def platform
@new_platform ||= Gem::Platform::RUBY
end
##
# The platform this gem runs on. See Gem::Platform for details.
#
# Setting this to any value other than Gem::Platform::RUBY or
# Gem::Platform::CURRENT is probably wrong.
def platform= platform
if @original_platform.nil? or
@original_platform == Gem::Platform::RUBY then
@original_platform = platform
end
case platform
when Gem::Platform::CURRENT then
@new_platform = Gem::Platform.local
@original_platform = @new_platform.to_s
when Gem::Platform then
@new_platform = platform
# legacy constants
when nil, Gem::Platform::RUBY then
@new_platform = Gem::Platform::RUBY
when 'mswin32' then # was Gem::Platform::WIN32
@new_platform = Gem::Platform.new 'x86-mswin32'
when 'i586-linux' then # was Gem::Platform::LINUX_586
@new_platform = Gem::Platform.new 'x86-linux'
when 'powerpc-darwin' then # was Gem::Platform::DARWIN
@new_platform = Gem::Platform.new 'ppc-darwin'
else
@new_platform = Gem::Platform.new platform
end
@platform = @new_platform.to_s
@new_platform
end
def pretty_print(q) # :nodoc:
q.group 2, 'Gem::Specification.new do |s|', 'end' do
q.breakable
# REFACTOR: each_attr - use in to_yaml as well
@@attributes.each do |attr_name|
current_value = self.send attr_name
if current_value != default_value(attr_name) or
self.class.required_attribute? attr_name then
q.text "s.#{attr_name} = "
if attr_name == :date then
current_value = current_value.utc
q.text "Time.utc(#{current_value.year}, #{current_value.month}, #{current_value.day})"
else
q.pp current_value
end
q.breakable
end
end
end
end
##
# Check the spec for possible conflicts and freak out if there are any.
def raise_if_conflicts
other = Gem.loaded_specs[self.name]
if other and self.version != other.version then
# This gem is already loaded. If the currently loaded gem is not in the
# list of candidate gems, then we have a version conflict.
msg = "can't activate #{full_name}, already activated #{other.full_name}"
e = Gem::LoadError.new msg
e.name = self.name
# TODO: e.requirement = dep.requirement
raise e
end
conf = self.conflicts
unless conf.empty? then
y = conf.map { |act,con|
"#{act.full_name} conflicts with #{con.join(", ")}"
}.join ", "
# TODO: improve message by saying who activated `con`
raise Gem::LoadError, "Unable to activate #{self.full_name}, because #{y}"
end
end
##
# An ARGV style array of options to RDoc
def rdoc_options
@rdoc_options ||= []
end
##
# Sets rdoc_options to +value+, ensuring it is an array. Don't
# use this, push onto the array instead.
def rdoc_options= options
# TODO: warn about setting instead of pushing
@rdoc_options = Array options
end
##
# Singular accessor for #require_paths
def require_path
val = require_paths and val.first
end
##
# Singular accessor for #require_paths
def require_path= path
self.require_paths = [path]
end
##
# The version of ruby required by this gem
def required_ruby_version= req
@required_ruby_version = Gem::Requirement.create req
end
##
# The RubyGems version required by this gem
def required_rubygems_version= req
@required_rubygems_version = Gem::Requirement.create req
end
##
# An array or things required by this gem. Not used by anything
# presently.
def requirements
@requirements ||= []
end
##
# Set requirements to +req+, ensuring it is an array. Don't
# use this, push onto the array instead.
def requirements= req
# TODO: warn about setting instead of pushing
@requirements = Array req
end
##
# Returns the full path to this spec's ri directory.
def ri_dir
@ri_dir ||= File.join base_dir, 'ri', full_name
end
##
# Return a string containing a Ruby code representation of the given
# object.
def ruby_code(obj)
case obj
when String then obj.dump
when Array then '[' + obj.map { |x| ruby_code x }.join(", ") + ']'
when Gem::Version then obj.to_s.dump
when Date then obj.strftime('%Y-%m-%d').dump
when Time then obj.strftime('%Y-%m-%d').dump
when Numeric then obj.inspect
when true, false, nil then obj.inspect
when Gem::Platform then "Gem::Platform.new(#{obj.to_a.inspect})"
when Gem::Requirement then "Gem::Requirement.new(#{obj.to_s.inspect})"
else raise Gem::Exception, "ruby_code case not handled: #{obj.class}"
end
end
private :ruby_code
##
# List of dependencies that will automatically be activated at runtime.
def runtime_dependencies
dependencies.select { |d| d.type == :runtime }
end
##
# True if this gem has the same attributes as +other+.
def same_attributes? spec
@@attributes.all? { |name, default| self.send(name) == spec.send(name) }
end
private :same_attributes?
##
# Checks if this specification meets the requirement of +dependency+.
def satisfies_requirement? dependency
return @name == dependency.name &&
dependency.requirement.satisfied_by?(@version)
end
##
# Returns an object you can use to sort specifications in #sort_by.
def sort_obj
# TODO: this is horrible. Deprecate it.
[@name, @version, @new_platform == Gem::Platform::RUBY ? -1 : 1]
end
##
# Returns the full path to the directory containing this spec's
# gemspec file. eg: /usr/local/lib/ruby/gems/1.8/specifications
def spec_dir
@spec_dir ||= File.join base_dir, "specifications"
end
##
# Returns the full path to this spec's gemspec file.
# eg: /usr/local/lib/ruby/gems/1.8/specifications/mygem-1.0.gemspec
def spec_file
@spec_file ||= File.join spec_dir, "#{full_name}.gemspec"
end
##
# The default name of the gemspec. See also #file_name
#
# spec.spec_name # => "example-1.0.gemspec"
def spec_name
"#{full_name}.gemspec"
end
##
# A short summary of this gem's description.
def summary= str
@summary = str.to_s.strip.
gsub(/(\w-)\n[ \t]*(\w)/, '\1\2').gsub(/\n[ \t]*/, " ") # so. weird.
end
##
# Singular accessor for #test_files
def test_file
val = test_files and val.first
end
##
# Singular accessor for #test_files
def test_file= file
self.test_files = [file]
end
##
# Test files included in this gem. You cannot append to this accessor, you
# must assign to it.
def test_files
# Handle the possibility that we have @test_suite_file but not
# @test_files. This will happen when an old gem is loaded via
# YAML.
if defined? @test_suite_file then
@test_files = [@test_suite_file].flatten
@test_suite_file = nil
end
if defined?(@test_files) and @test_files then
@test_files
else
@test_files = []
end
end
##
# Set test_files to +files+, ensuring it is an array.
def test_files= files
@test_files = Array files
end
def test_suite_file # :nodoc:
# TODO: deprecate
test_files.first
end
def test_suite_file= file # :nodoc:
# TODO: deprecate
@test_files = [] unless defined? @test_files
@test_files << file
end
##
# Returns a Ruby code representation of this specification, such that it can
# be eval'ed and reconstruct the same specification later. Attributes that
# still have their default values are omitted.
def to_ruby
mark_version
result = []
result << "# -*- encoding: utf-8 -*-"
result << nil
result << "Gem::Specification.new do |s|"
result << " s.name = #{ruby_code name}"
result << " s.version = #{ruby_code version}"
unless platform.nil? or platform == Gem::Platform::RUBY then
result << " s.platform = #{ruby_code original_platform}"
end
result << ""
result << " s.required_rubygems_version = #{ruby_code required_rubygems_version} if s.respond_to? :required_rubygems_version="
handled = [
:dependencies,
:name,
:platform,
:required_rubygems_version,
:specification_version,
:version,
:has_rdoc,
:default_executable,
]
@@attributes.each do |attr_name|
next if handled.include? attr_name
current_value = self.send(attr_name)
if current_value != default_value(attr_name) or
self.class.required_attribute? attr_name then
result << " s.#{attr_name} = #{ruby_code current_value}"
end
end
result << nil
result << " if s.respond_to? :specification_version then"
result << " s.specification_version = #{specification_version}"
result << nil
result << " if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then"
dependencies.each do |dep|
req = dep.requirements_list.inspect
dep.instance_variable_set :@type, :runtime if dep.type.nil? # HACK
result << " s.add_#{dep.type}_dependency(%q<#{dep.name}>, #{req})"
end
result << " else"
dependencies.each do |dep|
version_reqs_param = dep.requirements_list.inspect
result << " s.add_dependency(%q<#{dep.name}>, #{version_reqs_param})"
end
result << ' end'
result << " else"
dependencies.each do |dep|
version_reqs_param = dep.requirements_list.inspect
result << " s.add_dependency(%q<#{dep.name}>, #{version_reqs_param})"
end
result << " end"
result << "end"
result << nil
result.join "\n"
end
##
# Returns a Ruby lighter-weight code representation of this specification,
# used for indexing only.
#
# See #to_ruby.
def to_ruby_for_cache
for_cache.to_ruby
end
def to_s # :nodoc:
"#<Gem::Specification name=#{@name} version=#{@version}>"
end
def to_yaml(opts = {}) # :nodoc:
if YAML.const_defined?(:ENGINE) && !YAML::ENGINE.syck? then
super.gsub(/ !!null \n/, " \n")
else
YAML.quick_emit object_id, opts do |out|
out.map taguri, to_yaml_style do |map|
encode_with map
end
end
end
end
##
# Recursively walk dependencies of this spec, executing the +block+ for each
# hop.
def traverse trail = [], &block
trail = trail + [self]
runtime_dependencies.each do |dep|
dep.to_specs.each do |dep_spec|
block[self, dep, dep_spec, trail + [dep_spec]]
dep_spec.traverse(trail, &block) unless
trail.map(&:name).include? dep_spec.name
end
end
end
##
# Checks that the specification contains all required fields, and does a
# very basic sanity check.
#
# Raises InvalidSpecificationException if the spec does not pass the
# checks..
def validate packaging = true
require 'rubygems/user_interaction'
extend Gem::UserInteraction
normalize
nil_attributes = self.class.non_nil_attributes.find_all do |name|
instance_variable_get("@#{name}").nil?
end
unless nil_attributes.empty? then
raise Gem::InvalidSpecificationException,
"#{nil_attributes.join ', '} must not be nil"
end
if packaging and rubygems_version != Gem::VERSION then
raise Gem::InvalidSpecificationException,
"expected RubyGems version #{Gem::VERSION}, was #{rubygems_version}"
end
@@required_attributes.each do |symbol|
unless self.send symbol then
raise Gem::InvalidSpecificationException,
"missing value for attribute #{symbol}"
end
end
unless String === name then
raise Gem::InvalidSpecificationException,
"invalid value for attribute name: \"#{name.inspect}\""
end
if require_paths.empty? then
raise Gem::InvalidSpecificationException,
'specification must have at least one require_path'
end
@files.delete_if { |x| File.directory?(x) }
@test_files.delete_if { |x| File.directory?(x) }
@executables.delete_if { |x| File.directory?(File.join(@bindir, x)) }
@extra_rdoc_files.delete_if { |x| File.directory?(x) }
@extensions.delete_if { |x| File.directory?(x) }
non_files = files.reject { |x| File.file?(x) }
unless not packaging or non_files.empty? then
raise Gem::InvalidSpecificationException,
"[\"#{non_files.join "\", \""}\"] are not files"
end
unless specification_version.is_a?(Fixnum)
raise Gem::InvalidSpecificationException,
'specification_version must be a Fixnum (did you mean version?)'
end
case platform
when Gem::Platform, Gem::Platform::RUBY then # ok
else
raise Gem::InvalidSpecificationException,
"invalid platform #{platform.inspect}, see Gem::Platform"
end
self.class.array_attributes.each do |field|
val = self.send field
klass = case field
when :dependencies
Gem::Dependency
else
String
end
unless Array === val and val.all? { |x| x.kind_of?(klass) } then
raise(Gem::InvalidSpecificationException,
"#{field} must be an Array of #{klass}")
end
end
[:authors].each do |field|
val = self.send field
raise Gem::InvalidSpecificationException, "#{field} may not be empty" if
val.empty?
end
licenses.each { |license|
if license.length > 64
raise Gem::InvalidSpecificationException,
"each license must be 64 characters or less"
end
}
# reject lazy developers:
lazy = '"FIxxxXME" or "TOxxxDO"'.gsub(/xxx/, '')
unless authors.grep(/FI XME|TO DO/x).empty? then
raise Gem::InvalidSpecificationException, "#{lazy} is not an author"
end
unless Array(email).grep(/FI XME|TO DO/x).empty? then
raise Gem::InvalidSpecificationException, "#{lazy} is not an email"
end
if description =~ /FI XME|TO DO/x then
raise Gem::InvalidSpecificationException, "#{lazy} is not a description"
end
if summary =~ /FI XME|TO DO/x then
raise Gem::InvalidSpecificationException, "#{lazy} is not a summary"
end
if homepage and not homepage.empty? and
homepage !~ /\A[a-z][a-z\d+.-]*:/i then
raise Gem::InvalidSpecificationException,
"\"#{homepage}\" is not a URI"
end
# Warnings
%w[author description email homepage summary].each do |attribute|
value = self.send attribute
alert_warning "no #{attribute} specified" if value.nil? or value.empty?
end
if description == summary then
alert_warning 'description and summary are identical'
end
# TODO: raise at some given date
alert_warning "deprecated autorequire specified" if autorequire
executables.each do |executable|
executable_path = File.join(bindir, executable)
shebang = File.read(executable_path, 2) == '#!'
alert_warning "#{executable_path} is missing #! line" unless shebang
end
true
end
##
# Set the version to +version+, potentially also setting
# required_rubygems_version if +version+ indicates it is a
# prerelease.
def version= version
@version = Gem::Version.create(version)
self.required_rubygems_version = '> 1.3.1' if @version.prerelease?
return @version
end
# FIX: have this handle the platform/new_platform/original_platform bullshit
def yaml_initialize(tag, vals) # :nodoc:
vals.each do |ivar, val|
instance_variable_set "@#{ivar}", val
end
@original_platform = @platform # for backwards compatibility
self.platform = Gem::Platform.new @platform
end
extend Gem::Deprecate
deprecate :test_suite_file, :test_file, 2011, 10
deprecate :test_suite_file=, :test_file=, 2011, 10
deprecate :loaded, :activated, 2011, 10
deprecate :loaded?, :activated?, 2011, 10
deprecate :loaded=, :activated=, 2011, 10
deprecate :installation_path, :base_dir, 2011, 10
deprecate :cache_gem, :cache_file, 2011, 10
# TODO:
# deprecate :has_rdoc, :none, 2011, 10
# deprecate :has_rdoc?, :none, 2011, 10
# deprecate :has_rdoc=, :none, 2011, 10
# deprecate :default_executable, :none, 2011, 10
# deprecate :default_executable=, :none, 2011, 10
# deprecate :spec_name, :spec_file, 2011, 10
# deprecate :file_name, :cache_file, 2011, 10
# deprecate :full_gem_path, :cache_file, 2011, 10
end
Gem.clear_paths
| 25.615708 | 131 | 0.636115 |
edfe6a255f13667be8e0db48879c0e7ada8e225c | 3,839 | module Cms::NodeFilter::ListView
extend ActiveSupport::Concern
include Cms::NodeFilter::View
include Cms::PublicFilter::Node
included do
before_action :accept_cors_request, only: [:rss]
before_action :prepend_current_view_path, only: [:generate]
helper Cms::ListHelper
end
private
def prepend_current_view_path
prepend_view_path "app/views/#{params[:controller]}"
end
def index_page_exists?
path = "#{@cur_node.filename}/index.html"
Cms::Page.site(@cur_site).and_public.filename(path).present?
end
def cleanup_index_files(start_index)
start_index.upto(9_999) do |page_index|
basename = "index.p#{page_index + 1}.html"
file = "#{@cur_node.path}/#{basename}"
break unless Fs.exists?(file)
Fs.rm_rf file
end
end
def _render_with_pagination(items)
save_items = @items
@items = items
body = render_to_string(template: "index")
mime = rendered_format
if @cur_node.view_layout == "cms/redirect" && !mobile_path?
@redirect_link = trusted_url!(@cur_node.redirect_link)
body = render_to_string(html: "", layout: "cms/redirect")
elsif mime.html? && @cur_node.layout
@last_rendered_layout = nil if @last_rendered_node_filename != @cur_node.filename
@last_rendered_layout ||= begin
rendered_layout = render_layout(@cur_node.layout, content: "<!-- layout_yield --><!-- /layout_yield -->")
rendered_layout = render_to_string(html: rendered_layout.html_safe, layout: request.xhr? ? false : "cms/page")
@last_rendered_node_filename = @cur_node.filename
rendered_layout
end
body = @last_rendered_layout.sub("<!-- layout_yield --><!-- /layout_yield -->", body)
end
body
ensure
@items = save_items
end
def generate_empty_files
html = _render_with_pagination([])
basename = "index.html"
if Fs.write_data_if_modified("#{@cur_node.path}/#{basename}", html)
@task.log "#{@cur_node.url}#{basename}" if @task
end
basename = "rss.xml"
rss = _render_rss(@cur_node, [])
if Fs.write_data_if_modified("#{@cur_node.path}/#{basename}", rss.to_xml)
@task.log "#{@cur_node.url}#{basename}" if @task
end
end
public
def index
@items = pages.
order_by(@cur_node.sort_hash).
page(params[:page]).
per(@cur_node.limit)
render_with_pagination @items
end
def rss
@items = pages.
order_by(@cur_node.sort_hash).
limit(@cur_node.limit)
render_rss @cur_node, @items
end
def generate
if index_page_exists? || !@cur_node.serve_static_file?
cleanup_index_files(1)
return true
end
all_pages = pages.order_by(@cur_node.sort_hash).to_a
if all_pages.blank?
generate_empty_files
cleanup_index_files(1)
return true
end
next_page_index = 0
limit = @cur_node.limit
total_count = all_pages.length
all_pages.each_slice(limit).each_with_index do |pages, page_index|
offset = page_index * limit
pages = Kaminari.paginate_array(pages, limit: limit, offset: offset, total_count: total_count)
html = _render_with_pagination(pages)
if page_index == 0
basename = "index.html"
else
basename = "index.p#{page_index + 1}.html"
end
if Fs.write_data_if_modified("#{@cur_node.path}/#{basename}", html)
@task.log "#{@cur_node.url}#{basename}" if @task
end
if page_index == 0
basename = "rss.xml"
rss = _render_rss(@cur_node, pages)
if Fs.write_data_if_modified("#{@cur_node.path}/#{basename}", rss.to_xml)
@task.log "#{@cur_node.url}#{basename}" if @task
end
end
next_page_index = page_index + 1
end
cleanup_index_files(next_page_index)
true
ensure
head :no_content
end
end
| 27.421429 | 118 | 0.660589 |
62a3325a642ada8e75a4bbefe6bee548d3701cf1 | 2,208 | require 'digest'
module Locomotive
module PageContentHelper
def sections_content(model, sections, definitions)
source = model.respond_to?(:title) ? :page : :site
content = model.sections_content || {}
(sections[:top] + sections[:bottom]).each do |attributes|
next if attributes[:source] != source
key, type = attributes[:key], attributes[:type]
# no content yet? take the default one from the section definition
if content[key].blank?
definition = definitions.find { |definition| definition['type'] == type } || {}
content[key] = definition['default'] || { 'settings' => {}, 'blocks' => [] }
else
content[key]['settings'] ||= {}
content[key]['blocks'] ||= []
end
content[key]['id'] = attributes[:id] # FIXME: attributes[:id] => section domId
# reset block id
content[key]['blocks'] = (content[key]['blocks'] || []).each_with_index.map do |block, index|
block['id'] = index.to_s
block
end
end
content
end
def sections_dropzone_content(page)
(page.sections_dropzone_content || {}).each_with_index.map do |section, index|
section['id'] = "dropzone-#{index}"
# FIXME: sections content deployed with Wagon might have no blocks (nil)
section['blocks'] = (section['blocks'] || []).each_with_index.map do |block, _index|
block['id'] = _index.to_s
block
end
section
end
end
def sections_by_id(sections, page)
{}.tap do |ids|
(sections[:top] + sections[:bottom]).each do |attributes|
attributes[:uuid] = Digest::MD5.hexdigest(attributes[:id])
ids[attributes[:uuid]] = attributes
end
(page.sections_dropzone_content || {}).each_with_index do |section, index|
id = "dropzone-#{index}"
uuid = Digest::MD5.hexdigest(id)
section[:uuid] = uuid
ids[uuid] = {
uuid: uuid,
id: "dropzone-#{index}",
type: section['type'],
source: 'dropzone'
}
end
end
end
end
end
| 28.675325 | 101 | 0.558877 |
f7b3e2449bf93cf7b3a7692863c1b277cdf6b59d | 4,869 |
require 'json'
require 'chef/azure/heartbeat'
require 'chef/azure/status'
require 'chef/config'
module ChefAzure
module Shared
def find_highest_extension_version(extension_root)
#Get the latest version extension root. Required in case of extension update
highest_version_extension = ""
if windows?
# Path format: C:\Packages\Plugins\Chef.Bootstrap.WindowsAzure.ChefClient\1205.12.2.1
split_path = extension_root.split("/")
version = split_path.last.gsub(".","").to_i
root_path = extension_root.gsub(split_path.last, "")
Dir.entries(root_path).each do |d|
if d.split(".").size > 1
d_version = d.gsub(".","").to_i
if d_version >= version
version = d_version
highest_version_extension = root_path + d
end
end
end
else
# Path format: /var/lib/waagent/Chef.Bootstrap.WindowsAzure.LinuxChefClient-1207.12.3.0
root_path = extension_root.split("-")
version = root_path.last.gsub(".","").to_i
Dir.glob(root_path.first + "*").each do |d|
if d.split("-").size > 1
d_version = d.split("-").last.gsub(".","").to_i
if d_version >= version
version = d_version
highest_version_extension = d
end
end
end
end
highest_version_extension
end
def windows?
if RUBY_PLATFORM =~ /mswin|mingw|windows/
true
else
false
end
end
def bootstrap_directory
if windows?
"#{ENV['SYSTEMDRIVE']}/chef"
else
"/etc/chef"
end
end
def chef_bin_path
if windows?
"C:\\opscode\\chef\\bin;C:\\opscode\\chef\\embedded\\bin"
else
"/opt/chef/bin:/opt/chef/embedded/bin"
end
end
def append_to_path(path)
if windows?
ENV["PATH"] = "#{path};#{ENV["PATH"]}"
else
ENV["PATH"] = "#{path}:#{ENV["PATH"]}"
end
end
def chef_config
@chef_config ||=
begin
Chef::Config.from_file("#{bootstrap_directory}/client.rb")
Chef::Config
end
end
def handler_settings_file
@handler_settings_file ||=
begin
files = Dir.glob("#{File.expand_path(@azure_config_folder)}/*.settings").sort
if files and not files.empty?
files.last
else
error_message = "Configuration error. Azure chef extension Settings file missing."
Chef::Log.error error_message
report_status_to_azure error_message, "error"
@exit_code = 1
raise error_message
end
end
end
end
module Config
def read_config(chef_extension_root)
Chef::Log.info "Loading Handler environment..."
# Load environment from chef_extension_root/HandlerEnvironment.json
handler_env = JSON.parse(File.read("#{chef_extension_root}/HandlerEnvironment.json"))
azure_heart_beat_file = handler_env[0]["handlerEnvironment"]["heartbeatFile"]
azure_status_folder = handler_env[0]["handlerEnvironment"]["statusFolder"]
azure_plugin_log_location = handler_env[0]["handlerEnvironment"]["logFolder"]
azure_config_folder = handler_env[0]["handlerEnvironment"]["configFolder"]
Chef::Log.info "#{azure_config_folder} --> #{azure_status_folder} --> #{azure_heart_beat_file} --> #{azure_plugin_log_location}"
# Get name of status file by finding the latest sequence number from runtime settings file
settings_file_name = Dir.glob("#{azure_config_folder}/*.settings".gsub(/\\/,'/')).sort.last
sequence = File.basename(settings_file_name, ".settings")
azure_status_file = azure_status_folder + "/" + sequence + ".status"
Chef::Log.info "Status file name: #{azure_status_file}"
# return configs read
[ azure_heart_beat_file, azure_status_folder,
azure_plugin_log_location, azure_config_folder,
azure_status_file
]
end
end
module Reporting
def load_azure_env
@azure_heart_beat_file, @azure_status_folder, @azure_plugin_log_location, @azure_config_folder, @azure_status_file = read_config(@chef_extension_root)
end
def report_heart_beat_to_azure(status, code, message)
# update @azure_heart_beat_file
Chef::Log.info "Reporting heartbeat..."
Chef::Log.info "Reporting heartbeat... Status: #{status}, code: #{code}, message: #{message} "
AzureHeartBeat.update(@azure_heart_beat_file, status, code, message)
end
def report_status_to_azure (message, status_type)
AzureExtensionStatus.log(@azure_status_file, message, status_type)
end
end
end | 33.8125 | 157 | 0.61984 |
4a73658e02043e14ec299e105b65b725341e110b | 1,106 | require 'spec_helper'
describe 'ssh_authorized_keys' do
context 'supported operating systems' do
['Debian', 'RedHat'].each do |osfamily|
describe "ssh_authorized_keys class without any parameters on #{osfamily}" do
let(:params) {{ }}
let(:facts) {{
:osfamily => osfamily,
}}
it { should compile.with_all_deps }
it { should contain_class('ssh_authorized_keys::params') }
it { should contain_class('ssh_authorized_keys::install').that_comes_before('ssh_authorized_keys::config') }
it { should contain_class('ssh_authorized_keys::config') }
it { should contain_class('ssh_authorized_keys::service').that_subscribes_to('ssh_authorized_keys::config') }
end
end
end
context 'unsupported operating system' do
describe 'ssh_authorized_keys class without any parameters on Solaris/Nexenta' do
let(:facts) {{
:osfamily => 'Solaris',
:operatingsystem => 'Nexenta',
}}
it { expect { should }.to raise_error(Puppet::Error, /Nexenta not supported/) }
end
end
end
| 32.529412 | 117 | 0.658228 |
2899e8881cfc50321f5d2def7a12dd045a688ade | 317 | #
# Cookbook Name:: basehttploadbalancer
# Attributes:: default
#
# Copyright 2016, ONS
#
# All rights reserved - Do Not Redistribute
#
default['basehttploadbalancer']['balancing_method'] = 'least_conn'
default['basehttploadbalancer']['cluster_name'] = ''
default['basehttploadbalancer']['origin_servers'] = []
| 26.416667 | 66 | 0.731861 |
91d7b22dcb41acb19eee051d1aa90a08ad63d1cd | 3,715 | # frozen_string_literal: true
describe API::V2::CoinMarketCap::Orderbook, type: :request do
describe 'GET /api/v2/coinmarketcap/orderbook/:market_pair' do
before do
create_list(:order_bid, 5, :btcusd)
create_list(:order_bid, 5, :btcusd, price: 2)
create_list(:order_ask, 5, :btcusd)
create_list(:order_ask, 5, :btcusd, price: 3)
end
let(:asks) { [["1.0", "5.0"], ["3.0", "5.0"]] }
let(:bids) { [["2.0", "5.0"], ["1.0", "5.0"]] }
context 'valid market param' do
it 'sorts asks and bids from highest to lowest' do
get "/api/v2/coinmarketcap/orderbook/BTC_USD"
expect(response).to be_successful
result = JSON.parse(response.body)
expect(result['asks'].size).to eq 2
expect(result['bids'].size).to eq 2
expect(result['asks']).to eq asks
expect(result['bids']).to eq bids
end
context 'with depth param' do
before do
create_list(:order_bid, 5, :btcusd)
create_list(:order_bid, 5, :btcusd, price: 4.1)
create_list(:order_ask, 5, :btcusd)
create_list(:order_ask, 5, :btcusd, price: 12.2)
end
it 'get asks and bids with depth param' do
get '/api/v2/coinmarketcap/orderbook/BTC_USD', params: { depth: 2 }
expect(response).to be_successful
result = JSON.parse(response.body)
expect(result['asks'].size).to eq 1
expect(result['bids'].size).to eq 1
end
it 'get asks and bids with depth param' do
get '/api/v2/coinmarketcap/orderbook/BTC_USD', params: { depth: 4 }
expect(response).to be_successful
result = JSON.parse(response.body)
expect(result['asks'].size).to eq 2
expect(result['bids'].size).to eq 2
end
it 'get asks and bids with depth param' do
get '/api/v2/coinmarketcap/orderbook/BTC_USD', params: { depth: 1 }
expect(response).to be_successful
result = JSON.parse(response.body)
expect(result['asks'].size).to eq 0
expect(result['bids'].size).to eq 0
end
it 'get asks and bids with depth param' do
get '/api/v2/coinmarketcap/orderbook/BTC_USD', params: { depth: 3 }
expect(response).to be_successful
result = JSON.parse(response.body)
expect(result['asks'].size).to eq 1
expect(result['bids'].size).to eq 1
end
it 'get asks and bids with depth param for all orderbook' do
get '/api/v2/coinmarketcap/orderbook/BTC_USD', params: { depth: 0 }
expect(response).to be_successful
result = JSON.parse(response.body)
expect(result['asks'].size).to eq 3
expect(result['bids'].size).to eq 3
end
context 'invalid depth params' do
it 'shoud return error' do
get '/api/v2/coinmarketcap/orderbook/BTC_USD', params: { depth: 'test' }
expect(response).to have_http_status 422
expect(response).to include_api_error('coinmarketcap.market_depth.non_integer_depth')
end
it 'shoud return error' do
get '/api/v2/coinmarketcap/orderbook/BTC_USD', params: { depth: 2000 }
expect(response).to have_http_status 422
expect(response).to include_api_error('coinmarketcap.market_depth.invalid_depth')
end
end
end
end
context 'invalid market param' do
it 'validates market param' do
api_get "/api/v2/coinmarketcap/orderbook/usdusd"
expect(response).to have_http_status 404
expect(response).to include_api_error('record.not_found')
end
end
end
end
| 37.525253 | 97 | 0.60646 |
08a742787d82154341ee92fcead3ea4131f0658f | 1,117 | require 'spec_helper'
describe TaxonConcept do
context "Caretta caretta CMS" do
include_context "Caretta caretta CMS"
context "LISTING" do
describe :cms_listing do
context "for family Cheloniidae" do
specify { @family.cms_listing.should == 'I/II' }
end
context "for species Caretta caretta" do
specify { @species.cms_listing.should == 'I/II' }
end
end
describe :cms_listed do
context "for family Cheloniidae" do
specify { @family.cms_listed.should be_truthy }
end
context "for species Caretta caretta" do
specify { @species.cms_listed.should be_truthy }
end
end
end
context "CASCADING LISTING" do
describe :current_cms_additions do
context "for family Cheloniidae" do
specify {
@family.current_cms_additions.size.should == 1
}
end
context "for species Caretta caretta" do
specify {
@species.current_cms_additions.size.should == 2
}
end
end
end
end
end
| 25.386364 | 59 | 0.600716 |
4acd622362930c890e95d5a75f7858c0c04752bf | 132 | json.array!(@comments) do |comment|
json.extract! comment, :id, :user_id, :com
json.url comment_url(comment, format: :json)
end
| 26.4 | 46 | 0.712121 |
5d5e0edd4a51e1fac2ea85997306986a0c91d1ed | 2,042 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Network::Mgmt::V2018_07_01
module Models
#
# Response for ListRoutesTable associated with the Express Route Circuits
# API.
#
class ExpressRouteCircuitsRoutesTableListResult
include MsRestAzure
# @return [Array<ExpressRouteCircuitRoutesTable>] The list of routes
# table.
attr_accessor :value
# @return [String] The URL to get the next set of results.
attr_accessor :next_link
#
# Mapper for ExpressRouteCircuitsRoutesTableListResult class as Ruby
# Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'ExpressRouteCircuitsRoutesTableListResult',
type: {
name: 'Composite',
class_name: 'ExpressRouteCircuitsRoutesTableListResult',
model_properties: {
value: {
client_side_validation: true,
required: false,
serialized_name: 'value',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'ExpressRouteCircuitRoutesTableElementType',
type: {
name: 'Composite',
class_name: 'ExpressRouteCircuitRoutesTable'
}
}
}
},
next_link: {
client_side_validation: true,
required: false,
serialized_name: 'nextLink',
type: {
name: 'String'
}
}
}
}
}
end
end
end
end
| 29.171429 | 83 | 0.529383 |
1da8aa546f1081db484de1543168071ae9c2053f | 2,402 | require 'spec_helper'
module Gamification
describe RewardsController do
routes { Gamification::Engine.routes }
describe "POST 'create'" do
context 'with an invalid checksum' do
let(:article) { create :article }
let(:subject) { create :user }
before do
create :gamification_goal, rewarding: article
end
before do
post 'create', redirect_url: 'http://example.org', reward: {
rewarding_type: article.class.name,
rewarding_id: article.id,
rewardable_type: subject.class.name,
rewardable_id: subject.id
},
checksum: 'tampered'
end
it 'should not create a reward' do
expect(Reward.count).to eq 0
end
it 'should respond with forbidden' do
expect(response).to be_forbidden
end
end
context 'all goals for a given rewardable' do
let(:article) { create :article }
let(:subject) { create :user }
before do
create :gamification_goal, rewarding: article
end
before do
post 'create', redirect_url: 'http://example.org', reward: {
rewarding_type: article.class.name,
rewarding_id: article.id,
rewardable_type: subject.class.name,
rewardable_id: subject.id
},
checksum: Checksum.generate([article.class.name, article.id, subject.class.name, subject.id])
end
it 'should create a reward' do
expect(Reward.count).to eq 1
end
it 'should redirect' do
expect(response).to be_redirect
end
end
context 'a single goal' do
let(:goal) { create :gamification_goal }
let(:subject) { create :user }
before do
post 'create', redirect_url: 'http://example.org', reward: {
rewarding_type: goal.class.name,
rewarding_id: goal.id,
rewardable_type: subject.class.name,
rewardable_id: subject.id,
},
checksum: Checksum.generate([goal.class.name, goal.id, subject.class.name, subject.id])
end
it 'should create a reward' do
expect(Reward.count).to eq 1
end
it 'should redirect' do
expect(response).to be_redirect
end
end
end
end
end
| 27.609195 | 103 | 0.574521 |
39d70cf9416b6ee62bd10f7aac3c551becf44510 | 258 | class CreateAlcscores < ActiveRecord::Migration[5.1]
def change
create_table :alcscores do |t|
t.integer :code
t.integer :maleco
t.integer :femaleco
t.references :alcohol, foreign_key: true
t.timestamps
end
end
end
| 19.846154 | 52 | 0.662791 |
18a165e39a9889c5b2d1920410268da14892b01a | 414 | # frozen_string_literal: true
# Environment update contract
class Environment::UpdateContract < Dry::Validation::Contract
params do
required(:id).filled(:str?)
required(:environment).schema do
optional(:description).filled(:str?)
optional(:cookbook_versions).each(:hash?)
optional(:override_attributes).value(:hash?)
optional(:default_attributes).value(:hash?)
end
end
end
| 27.6 | 61 | 0.714976 |
1ab77917b138079281b3a380062efe0947c79d40 | 2,608 | # encoding: utf-8
module QyWechatApi
module Api
class User < Base
# 创建成员
# userid 是 员工UserID。对应管理端的帐号,企业内必须唯一。长度为1~64个字符
# name 是 成员名称。长度为1~64个字符
# department 否 成员所属部门id列表。注意,每个部门的直属员工上限为1000个
# position 否 职位信息。长度为0~64个字符
# mobile 否 手机号码。企业内必须唯一,mobile/weixinid/email三者不能同时为空
# gender 否 性别。gender=0表示男,=1表示女。默认gender=0
# tel 否 办公电话。长度为0~64个字符
# email 否 邮箱。长度为0~64个字符。企业内必须唯一
# weixinid 否 微信号。企业内必须唯一
# extattr 否 扩展属性。扩展属性需要在WEB管理端创建后才生效,否则忽略未知属性的赋值
def create(user_id, name, options={})
user = {userid: user_id}
user[:name] = name
user.merge!(options)
http_post("create", user)
end
# 更新成员
def update(user_id, options={})
user = {userid: user_id}
user.merge!(options)
http_post("update", user)
end
# 删除成员
def delete(id)
http_get("delete", {userid: id})
end
# 批量删除成员
def batch_delete(user_ids)
http_post("batchdelete", {useridlist: user_ids})
end
# 获取成员
def get(id)
http_get("get", {userid: id})
end
def getuserinfo(code)
http_get("getuserinfo",{code: code})
end
# 获取部门成员
# department_id 是 获取的部门id
# fetch_child 否 1/0:是否递归获取子部门下面的成员
# status 否 0获取全部员工,1获取已关注成员列表,2获取禁用成员列表,4获取未关注成员列表。status可叠加
def simple_list(department_id, fetch_child=nil, status=nil)
params = {department_id: department_id}
params[:fetch_child] = fetch_child if not fetch_child.nil?
params[:status] = status if not status.nil?
http_get("simplelist", params)
end
# 获取部门成员(详情)
def full_list(department_id, fetch_child=nil, status=nil)
params = {department_id: department_id}
params[:fetch_child] = fetch_child if not fetch_child.nil?
params[:status] = status if not status.nil?
http_get("list", params)
end
# 邀请成员关注
def send_invitation(user_id, tips="")
payload = {userid: user_id, invite_tips: tips}
http_post("/invite/send", payload, {waive_base_url: true})
end
# userid转换成openid接口
def covert_to_open_id(user_id, agent_id="")
payload = {userid: user_id}
payload.merge!(agentid: agent_id) if agent_id.present?
http_post("convert_to_openid", payload)
end
# openid转换成userid接口
def covert_to_user_id(open_id)
http_post("convert_to_userid", {openid: open_id})
end
private
def base_url
"/user"
end
end
end
end
| 26.886598 | 67 | 0.610813 |
f75443740de1b2d3c580c794d2a6ed1519a3bb03 | 5,995 | # frozen_string_literal: true
require 'test_helper'
class RssFeedTest < ActiveSupport::TestCase
before { Feedjira.logger.stubs(:warn) }
it 'should validate RSS feed' do
rss_feed = build(:rss_feed, url: 'invalid_url')
rss_feed.wont_be :valid?
rss_feed.errors.messages[:url].first.must_equal 'Invalid URL Format'
end
it 'should validate and create a RSS feed' do
rss_feed = create(:rss_feed)
rss_feed.must_be :valid?
end
it 'should parse the RSS feed and return true' do
rss_feed = create(:rss_feed, url: 'http://www.vcrlocalhost.org/feed.rss')
VCR.use_cassette('RssFeed') do
assert_difference 'rss_feed.rss_articles.count' do
rss_feed.fetch
end
end
end
it 'shouldn\'t parse the RSS and raise error' do
rss_feed = create(:rss_feed, url: 'http://www.somedomain.com')
rss_feed.fetch
rss_feed.errors.wont_be_nil
end
it 'should not allow blank urls' do
rss_feed = build(:rss_feed, url: '')
rss_feed.wont_be :valid?
rss_feed.errors.messages[:url].first.must_equal 'Invalid URL Format'
end
it 'should handle time out errors' do
url = 'http://rss.roll/never_gonna_sync_you_up.rss'
rss_feed = create(:rss_feed, url: url)
rss_feed.stubs(:create_rss_articles).raises Timeout::Error.new
rss_feed.fetch
rss_feed.error.must_equal I18n.t('rss_feeds.index.timeout_error', url: url)
end
it 'should update any associated projects whenever a successful sync occurs' do
VCR.use_cassette('RssFeed') do
before = Time.current - 4.hours
rss_feed = create(:rss_feed)
project = create(:project)
project.update(updated_at: before)
project.reload.updated_at.to_i.must_equal before.to_i
create(:rss_subscription, rss_feed: rss_feed, project: project)
rss_feed.url = 'http://www.vcrlocalhost.org/feed.rss'
rss_feed.fetch
project.reload.updated_at.wont_equal before
end
end
it 'should create new rss_articles when fetch happens' do
VCR.use_cassette('RssFeed') do
before = Time.current - 4.hours
rss_feed = create(:rss_feed)
project = create(:project)
project.update(updated_at: before)
project.reload.updated_at.to_i.must_equal before.to_i
create(:rss_subscription, rss_feed: rss_feed, project: project)
rss_feed.url = 'http://www.vcrlocalhost.org/feed.rss'
rss_feed.rss_articles.must_equal []
rss_feed.fetch
rss_feed.rss_articles.count.must_equal 1
rss_feed.fetch
rss_feed.rss_articles.count.must_equal 1
end
end
it 'should honor encoding properly' do
VCR.use_cassette('RssFeed', preserve_exact_body_bytes: true) do
rss_feed = create(:rss_feed)
rss_feed.url = 'http://www.vcrlocalhost.org/feed.rss'
rss_feed.fetch
rss_feed.rss_articles.first.title.must_equal 'It will display ÀāĎĠĦž'
end
end
describe '.sync' do
it 'should sync only the feeds which next_fetch time is less than or equal to current time' do
VCR.use_cassette('RssFeed') do
before = Time.current - 4.hours
rss_feed = create(:rss_feed, url: 'http://www.vcrlocalhost.org/feed.rss', next_fetch: Time.current + 1.day)
project = create(:project)
project.update(updated_at: before)
create(:rss_subscription, rss_feed: rss_feed, project: project)
rss_feed.rss_articles.must_equal []
project.reload.updated_at.to_i.must_equal before.to_i
RssFeed.sync
rss_feed.reload.rss_articles.must_equal []
project.reload.updated_at.to_i.must_equal before.to_i
rss_feed = create(:rss_feed, url: 'http://www.vcrlocalhost.org/feed.rss')
project = create(:project)
project.update(updated_at: before)
create(:rss_subscription, rss_feed: rss_feed, project: project)
rss_feed.rss_articles.must_equal []
project.reload.updated_at.to_i.must_equal before.to_i
RssFeed.sync
rss_feed.reload.rss_articles.count.must_equal 1
project.reload.updated_at.to_i.wont_equal before.to_i
end
end
it 'should sync only subscribed feeds' do
VCR.use_cassette('RssFeed') do
before = Time.current - 4.hours
rss_feed = create(:rss_feed, url: 'http://www.vcrlocalhost.org/feed.rss', next_fetch: Time.current - 1.day)
project = create(:project)
project.update(updated_at: before)
create(:rss_subscription, rss_feed: rss_feed, project: project, deleted: true)
rss_feed.rss_articles.must_equal []
project.reload.updated_at.to_i.must_equal before.to_i
RssFeed.sync
rss_feed.reload.rss_articles.must_equal []
project.reload.updated_at.to_i.must_equal before.to_i
rss_feed = create(:rss_feed, url: 'http://www.vcrlocalhost.org/feed.rss', next_fetch: Time.current - 1.day)
project = create(:project)
project.update(updated_at: before)
create(:rss_subscription, rss_feed: rss_feed, project: project, deleted: false)
rss_feed.rss_articles.must_equal []
project.reload.updated_at.to_i.must_equal before.to_i
RssFeed.sync
rss_feed.reload.rss_articles.count.must_equal 1
project.reload.updated_at.to_i.wont_equal before.to_i
end
end
end
describe '#fetch' do
it 'should remove duplicate new feeds before saving into rss_articles table' do
VCR.use_cassette('RssFeedDuplicate', allow_playback_repeats: true) do
rss_feed = create(:rss_feed, url: 'http://www.vcrlocalhost.org/duplicate-feeds.rss')
create(:rss_subscription, rss_feed: rss_feed)
rss_feed.rss_articles.must_equal []
rss_feed.fetch
rss_feed.rss_articles.count.must_equal 1
rss_feed.rss_articles.delete_all
articles = rss_feed.send(:new_rss_article_items).map { |item| RssArticle.from_item(item) }
articles.count.must_equal 2
RssArticle.remove_duplicates(articles).count.must_equal 1
end
end
end
end
| 38.429487 | 115 | 0.698249 |
7a09e0e10483ff2f31d650a16f3f36ec73cfb092 | 12,192 | # Copyright (c) 2014 National ICT Australia Limited (NICTA).
# This software may be used and distributed solely under the terms of the MIT license (License).
# You should find a copy of the License in LICENSE.TXT or at http://opensource.org/licenses/MIT.
# By downloading or using this software you accept the terms and the liability disclaimer in the License
require 'hashie'
require 'omf_common/auth/assertion'
module OmfEc
class Runner
include Hashie
attr_reader :oedl_path
def initialize
@gem_version = OmfEc::VERSION
@oml_enabled = false
@executable_name = File.basename($PROGRAM_NAME)
@oml_opts = {
appName: @executable_name,
afterParse: lambda { |o| parse_cmd_opts }
}
# Default configuration options
@config_opts = Mash.new(
environment: 'development',
communication: { url: "amqp://localhost" },
logging: {
level: { default: 'debug' },
appenders: {
stdout: {
level: :info,
date_pattern: '%H:%M:%S',
pattern: '%d %5l %c{2}: %m\n'
},
rolling_file: {
level: :debug,
log_dir: '/var/tmp',
size: 1024*10, # max 510k of each log file
keep: 1, # keep 1 log in total
date_pattern: '%F %T %z',
pattern: '[%d] %-5l %c: %m\n'
}
}
}
)
@cmd_opts = Mash.new
@argv = ARGV.dup
end
def oml_init
begin
@oml_enabled = OML4R::init(ARGV, @oml_opts) do |op|
op.banner = "OMF Experiment Controller version '#{@gem_version}'\n"
op.banner += "Usage: #{@executable_name} [options] path_to_oedl_file [-- --experiment_property value]"
op.on("-u", "--uri ADDRESS", "URI for communication layer [amqp://localhost]") do |uri|
@cmd_opts[:uri] = uri
remove_cmd_opts_from_argv("-u", "--uri", uri)
end
op.on("-c CONFIGFILE", "Configuration File") do |file|
@cmd_opts[:config_file] = file
remove_cmd_opts_from_argv("-c", file)
end
op.on("--log_config CONFIGFILE", "Logging Configuration File") do |file|
@cmd_opts[:logging_configfile] = file
remove_cmd_opts_from_argv("--log_config", file)
end
op.on("-e ENVIRONMENT", "Environment (development, production ...) [#{@config_opts[:environment]}]") do |e|
@cmd_opts[:environment] = e
remove_cmd_opts_from_argv("-e", e)
end
op.on("--root_cert_dir DIRECTORY", "Directory containing root certificates") do |dir|
@cmd_opts[:root_cert_dir] = dir
remove_cmd_opts_from_argv("--root_cert_dir", dir)
end
op.on("--cert CERTIFICATE", "Your certificate") do |cert|
@cmd_opts[:cert] = cert
remove_cmd_opts_from_argv("--cert", cert)
end
op.on("--key KEY", "Your private key") do |key|
@cmd_opts[:key] = key
remove_cmd_opts_from_argv("--key", key)
end
op.on("--assertion PATH_TO_ASSERTION_FILE", "Assertion") do |assertion|
@cmd_opts[:assertion] = assertion
remove_cmd_opts_from_argv("--assertion", assertion)
end
op.on("--name", "--experiment EXPERIMENT_NAME", "Experiment name") do |e_name|
@cmd_opts[:experiment_name] = e_name
OmfEc.experiment.name = e_name
remove_cmd_opts_from_argv("--name", "--experiment", e_name)
end
op.on("--slice SLICE_NAME", "Slice name [optional]") do |slice|
@cmd_opts[:slice] = slice
OmfEc.experiment.sliceID = slice
remove_cmd_opts_from_argv("--slice", slice)
end
op.on("--job-service URL", "URL to the JobService [optional]") do |url|
OmfEc.experiment.js_url = url
remove_cmd_opts_from_argv("--job-service", url)
end
op.on("--job-url URL", "URL to the Job for this experiment trial [optional]") do |url|
OmfEc.experiment.job_url = url
remove_cmd_opts_from_argv("--job-url", url)
end
op.on("--slice-service URL", "URL to the SliceService [optional]") do |url|
OmfEc.experiment.ss_url = url
remove_cmd_opts_from_argv("--slice-service", url)
end
op.on("--oml_uri URI", "URI for the OML data collection of experiment applications") do |uri|
@cmd_opts[:oml_uri] = uri
remove_cmd_opts_from_argv("--oml_uri", uri)
end
op.on("--inst_oml_uri URI", "EC Instrumentation: OML URI to use") do |uri|
@cmd_opts[:inst_oml_uri] = uri
remove_cmd_opts_from_argv("--inst_oml_uri", uri)
end
op.on("--inst_oml_id ID", "EC Instrumentation: OML ID to use") do |id|
@cmd_opts[:inst_oml_id] = id
remove_cmd_opts_from_argv("--inst_oml_id", id)
end
op.on("--inst_oml_domain DOMAIN", "EC Instrumentation: OML Domain to use") do |domain|
@cmd_opts[:inst_oml_domain] = domain
remove_cmd_opts_from_argv("--inst_oml_domain", domain)
end
op.on("-g", "--show-graph", "Parse graph definition to construct graph information in log output") do
@cmd_opts['show-graph'] = true
remove_cmd_opts_from_argv("--show-graph")
end
op.on("-v", "--version", "Show version") do
puts "OMF Experiment Controller version '#{@gem_version}'"
exit
end
op.on("-d", "--debug", "Debug mode (Set logging level in Stdout to :debug)") do
@cmd_opts[:debug] = true
remove_cmd_opts_from_argv("-d", "--debug")
end
op.on("-h", "--help", "Show this message") do
puts op
exit
end
end
rescue OML4R::MissingArgumentException => e
puts "Warning: #{e.message} to instrument, so it will run without instrumentation. (see --oml-help)"
rescue => e
puts e.message
puts e.backtrace.join("\n")
exit(1)
end
end
def parse_cmd_opts
parse_config_file
# uri in command line is short for communication/url
uri = @cmd_opts.delete(:uri)
@config_opts[:communication][:url] = uri if uri
@config_opts[:communication][:auth] = { authenticate: true } if @cmd_opts[:cert]
@config_opts.merge!(@cmd_opts)
if @config_opts[:oml_uri]
# Only change default if they are not set in config file
@config_opts[:logging][:appenders][:oml4r] ||= {
level: :debug, # Log everything to OML
appName: 'omf_ec',
domain: OmfEc.experiment.id,
collect: @config_opts[:oml_uri]
}
end
end
def parse_config_file
if (config_file = @cmd_opts.delete(:config_file))
if File.exist?(config_file)
@config_opts.merge!(OmfCommon.load_yaml(config_file, erb_process: true))
else
puts "Config file '#{config_file}' doesn't exist"
exit(1)
end
end
end
def remove_cmd_opts_from_argv(*args)
args.each { |v| @argv.slice!(@argv.index(v)) if @argv.index(v) }
end
def setup_experiment
OmfEc.experiment.oml_uri = @config_opts[:oml_uri] if @config_opts[:oml_uri]
OmfEc.experiment.show_graph = @config_opts['show-graph']
# Parse assertion JSON if provided
#
# It is specified in config file as JSON string but
# OmfCommon.load_yaml will turn it to hash (mash)
#
# OR provided via command line as path to the assertion JSON file
#
if @config_opts['assertion']
case @config_opts['assertion']
when Hash
assert = @config_opts['assertion'].to_json
when String
assert = File.read(File.expand_path(@config_opts['assertion']))
end
OmfEc.experiment.assertion = OmfCommon::Auth::Assertion.parse(assert)
end
# Instrument EC
if @config_opts[:inst_oml_uri] && @config_opts[:inst_oml_id] && @config_opts[:inst_oml_domain]
instrument_ec = OML4R::init(nil, {
collect: @config_opts[:inst_oml_uri],
nodeID: @config_opts[:inst_oml_id],
domain: @config_opts[:inst_oml_domain],
appName: @executable_name
})
OmfCommon::Measure.enable if instrument_ec
end
remove_cmd_opts_from_argv("exec")
index_of_dividing_hyphen = @argv.index("--")
@argv[0..index_of_dividing_hyphen || -1].in_groups_of(2) do |arg_g|
if arg_g[0] =~ /^--(.+)/ && !arg_g[1].nil?
remove_cmd_opts_from_argv(*arg_g)
end
end
@oedl_path = @argv[0] && File.expand_path(@argv[0])
if @oedl_path.nil? || !File.exist?(@oedl_path)
puts "Experiment script '#{@argv[0]}' not found"
exit(1)
end
@argv.slice!(0)
# User-provided command line values for Experiment Properties cannot be
# set here as the properties have not been defined yet by the experiment.
# Thus just pass them to the experiment, which will be responsible
# for setting them later
properties = {}
if index_of_dividing_hyphen
remove_cmd_opts_from_argv("--")
exp_properties = @argv
exp_properties.in_groups_of(2) do |p|
unless p[0] =~ /^--(.+)/ && !p[1].nil?
puts "Malformatted properties '#{exp_properties.join(' ')}'"
exit(1)
else
properties[$1.to_sym] = p[1].ducktype
remove_cmd_opts_from_argv(*p)
end
end
OmfEc.experiment.cmdline_properties = properties
end
end
def setup_logging
OmfCommon.load_logging_config(@config_opts[:logging_configfile])
if @config_opts[:debug]
Logging.logger.root.level = 'debug'
stdout_appender = Logging.logger.root.appenders.find { |a| a.class == Logging::Appenders::Stdout }
stdout_appender.level = 'debug' if stdout_appender
else
Logging.consolidate 'OmfCommon', 'OmfRc'
end
end
def load_experiment
begin
OmfCommon.init(@config_opts.delete(:environment), @config_opts) do |el|
setup_logging
OmfCommon.comm.on_connected do |comm|
info "OMF Experiment Controller #{OmfEc::VERSION} - Start"
info "Connected using #{comm.conn_info}"
info "Execute: #{@oedl_path}"
info "Properties: #{OmfEc.experiment.cmdline_properties}"
if @config_opts[:communication][:auth] && @config_opts[:communication][:auth][:authenticate]
ec_cert = OmfCommon.load_credentials(
root_cert_dir: @config_opts[:root_cert_dir],
entity_cert: @config_opts[:cert],
entity_key: @config_opts[:key]
)
ec_cert.resource_id = OmfCommon.comm.local_address
OmfCommon::Auth::CertificateStore.instance.register(ec_cert)
end
OmfEc.experiment.log_metadata("ec_version", "#{OmfEc::VERSION}")
OmfEc.experiment.log_metadata("exp_path", @oedl_path)
OmfEc.experiment.log_metadata("ec_pid", "#{Process.pid}")
OmfEc.experiment.archive_oedl(@oedl_path)
OmfEc.experiment.create_job
begin
load @oedl_path
OmfEc::Experiment.start
rescue => e
OmfEc.experiment.log_metadata("state", "error")
error e.message
error e.backtrace.join("\n")
end
trap(:TERM) { OmfEc::Experiment.done }
trap(:INT) { OmfEc::Experiment.done }
end
end
rescue => e
logger.fatal e.message
logger.fatal e.backtrace.join("\n")
puts "Experiment controller exits unexpectedly"
puts e
exit(1)
end
end
def init
oml_init
setup_experiment
end
def run
load_experiment
end
end
end
| 34.055866 | 117 | 0.582677 |
1107c85fa04f0dfbcf414dfa95454ee02d5d97b5 | 911 | require 'rails_helper'
RSpec.describe 'Studio', type: :model do
it 'gets created with valid params' do
expect do
Studio.create(city: 'Seattle', address: 'Studio1')
end.to change(Studio.all, :count).by(1)
end
it 'gets not created with blank city input' do
expect do
Studio.create(city: '', address: 'Studio1')
end.to change(Studio.all, :count).by(0)
end
it 'gets not created with too long city input' do
expect do
Studio.create(city: '' * 51, address: 'Studio1')
end.to change(Studio.all, :count).by(0)
end
it 'gets not created with blank address input' do
expect do
Studio.create(city: 'Seattle', address: '')
end.to change(Studio.all, :count).by(0)
end
it 'gets not created with too long address input' do
expect do
Studio.create(city: 'Seattle', address: 'S' * 201)
end.to change(Studio.all, :count).by(0)
end
end
| 26.794118 | 56 | 0.652031 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.