hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
11116af7dbbb0885984166e0f28144f291cb2f07 | 235 | # frozen_string_literal: true
FactoryBot.define do
factory :sent_notification do
project
recipient { project.creator }
noteable { create(:issue, project: project) }
reply_key { SentNotification.reply_key }
end
end
| 21.363636 | 49 | 0.731915 |
b9c4af3d6276618383a76fc9a5adae50e59a0028 | 2,907 | # encoding: utf-8
module Github
# Repository Starring is a feature that lets users bookmark repositories.
# Stars are shown next to repositories to show an approximate level of interest. # Stars have no effect on notifications or the activity feed.
class Repos::Starring < API
# List stargazers
#
# = Examples
# github = Github.new :user => 'user-name', :repo => 'repo-name'
# github.repos.starring.list
# github.repos.starring.list { |star| ... }
#
def list(user_name, repo_name, params={})
_update_user_repo_params(user_name, repo_name)
_validate_user_repo_params(user, repo) unless user? && repo?
normalize! params
response = get_request("/repos/#{user}/#{repo}/stargazers", params)
return response unless block_given?
response.each { |el| yield el }
end
alias :all :list
# List repos being starred by a user
#
# = Examples
# github = Github.new
# github.repos.starring.starred :user => 'user-name'
#
# List repos being starred by the authenticated user
#
# = Examples
# github = Github.new :oauth_token => '...'
# github.repos.starring.starred
#
def starred(*args)
params = args.extract_options!
normalize! params
response = if (user_name = params.delete('user'))
get_request("/users/#{user_name}/starred", params)
else
get_request("/user/starred", params)
end
return response unless block_given?
response.each { |el| yield el }
end
# Check if you are starring a repository
#
# Returns <tt>true</tt> if this repo is starred by you,<tt>false</tt> otherwise
#
# = Examples
# github = Github.new
# github.repos.starring.starring? 'user-name', 'repo-name'
#
def starring?(user_name, repo_name, params={})
_validate_presence_of user_name, repo_name
normalize! params
get_request("/user/starred/#{user_name}/#{repo_name}", params)
true
rescue Github::Error::NotFound
false
end
# Star a repository
#
# You need to be authenticated to star a repository
#
# = Examples
# github = Github.new
# github.repos.starring.star 'user-name', 'repo-name'
#
def star(user_name, repo_name, params={})
_validate_presence_of user_name, repo_name
normalize! params
put_request("/user/starred/#{user_name}/#{repo_name}", params)
end
# Unstar a repository
#
# You need to be authenticated to unstar a repository.
#
# = Examples
# github = Github.new
# github.repos.starring.unstar 'user-name', 'repo-name'
#
def unstar(user_name, repo_name, params={})
_validate_presence_of user_name, repo_name
normalize! params
delete_request("/user/starred/#{user_name}/#{repo_name}", params)
end
end # Repos::Starring
end # Github
| 29.663265 | 145 | 0.639147 |
ff7a0e0e6a23c9687947a854676ab636aa614b77 | 2,342 | # -------------------------------------------------------------------------- #
# Copyright 2002-2018, OpenNebula Project, OpenNebula Systems #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); you may #
# not use this file except in compliance with the License. You may obtain #
# a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
#--------------------------------------------------------------------------- #
require 'OpenNebulaJSON/JSONUtils'
module OpenNebulaJSON
class AclJSON < OpenNebula::Acl
include JSONUtils
def create(template_json)
acl_string = parse_json(template_json, 'acl')
acl_rule = Acl.parse_rule(acl_string)
if OpenNebula.is_error?(acl_rule)
return acl_rule
end
self.allocate(acl_rule[0],acl_rule[1],acl_rule[2],acl_rule[3])
end
def perform_action(template_json)
action_hash = parse_json(template_json, 'action')
if OpenNebula.is_error?(action_hash)
return action_hash
end
error_msg = "#{action_hash['perform']} action not " <<
" available for this resource"
OpenNebula::Error.new(error_msg)
# rc = case action_hash['perform']
# #no actions!
# else
# error_msg = "#{action_hash['perform']} action not " <<
# " available for this resource"
# OpenNebula::Error.new(error_msg)
# end
end
end
end
| 45.038462 | 78 | 0.464133 |
b9412d389564cb5ab619777be68acefb9572020d | 6,197 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# Source: google/devtools/artifactregistry/v1beta2/service.proto for package 'Google.Cloud.ArtifactRegistry.V1beta2'
# Original file comments:
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'grpc'
require 'google/devtools/artifactregistry/v1beta2/service_pb'
module Google
module Cloud
module ArtifactRegistry
module V1beta2
module ArtifactRegistry
# The Artifact Registry API service.
#
# Artifact Registry is an artifact management system for storing artifacts
# from different package management systems.
#
# The resources managed by this API are:
#
# * Repositories, which group packages and their data.
# * Packages, which group versions and their tags.
# * Versions, which are specific forms of a package.
# * Tags, which represent alternative names for versions.
# * Files, which contain content and are optionally associated with a Package
# or Version.
class Service
include GRPC::GenericService
self.marshal_class_method = :encode
self.unmarshal_class_method = :decode
self.service_name = 'google.devtools.artifactregistry.v1beta2.ArtifactRegistry'
# Lists repositories.
rpc :ListRepositories, ::Google::Cloud::ArtifactRegistry::V1beta2::ListRepositoriesRequest, ::Google::Cloud::ArtifactRegistry::V1beta2::ListRepositoriesResponse
# Gets a repository.
rpc :GetRepository, ::Google::Cloud::ArtifactRegistry::V1beta2::GetRepositoryRequest, ::Google::Cloud::ArtifactRegistry::V1beta2::Repository
# Creates a repository. The returned Operation will finish once the
# repository has been created. Its response will be the created Repository.
rpc :CreateRepository, ::Google::Cloud::ArtifactRegistry::V1beta2::CreateRepositoryRequest, ::Google::Longrunning::Operation
# Updates a repository.
rpc :UpdateRepository, ::Google::Cloud::ArtifactRegistry::V1beta2::UpdateRepositoryRequest, ::Google::Cloud::ArtifactRegistry::V1beta2::Repository
# Deletes a repository and all of its contents. The returned Operation will
# finish once the repository has been deleted. It will not have any Operation
# metadata and will return a google.protobuf.Empty response.
rpc :DeleteRepository, ::Google::Cloud::ArtifactRegistry::V1beta2::DeleteRepositoryRequest, ::Google::Longrunning::Operation
# Lists packages.
rpc :ListPackages, ::Google::Cloud::ArtifactRegistry::V1beta2::ListPackagesRequest, ::Google::Cloud::ArtifactRegistry::V1beta2::ListPackagesResponse
# Gets a package.
rpc :GetPackage, ::Google::Cloud::ArtifactRegistry::V1beta2::GetPackageRequest, ::Google::Cloud::ArtifactRegistry::V1beta2::Package
# Deletes a package and all of its versions and tags. The returned operation
# will complete once the package has been deleted.
rpc :DeletePackage, ::Google::Cloud::ArtifactRegistry::V1beta2::DeletePackageRequest, ::Google::Longrunning::Operation
# Lists versions.
rpc :ListVersions, ::Google::Cloud::ArtifactRegistry::V1beta2::ListVersionsRequest, ::Google::Cloud::ArtifactRegistry::V1beta2::ListVersionsResponse
# Gets a version
rpc :GetVersion, ::Google::Cloud::ArtifactRegistry::V1beta2::GetVersionRequest, ::Google::Cloud::ArtifactRegistry::V1beta2::Version
# Deletes a version and all of its content. The returned operation will
# complete once the version has been deleted.
rpc :DeleteVersion, ::Google::Cloud::ArtifactRegistry::V1beta2::DeleteVersionRequest, ::Google::Longrunning::Operation
# Lists files.
rpc :ListFiles, ::Google::Cloud::ArtifactRegistry::V1beta2::ListFilesRequest, ::Google::Cloud::ArtifactRegistry::V1beta2::ListFilesResponse
# Gets a file.
rpc :GetFile, ::Google::Cloud::ArtifactRegistry::V1beta2::GetFileRequest, ::Google::Cloud::ArtifactRegistry::V1beta2::File
# Lists tags.
rpc :ListTags, ::Google::Cloud::ArtifactRegistry::V1beta2::ListTagsRequest, ::Google::Cloud::ArtifactRegistry::V1beta2::ListTagsResponse
# Gets a tag.
rpc :GetTag, ::Google::Cloud::ArtifactRegistry::V1beta2::GetTagRequest, ::Google::Cloud::ArtifactRegistry::V1beta2::Tag
# Creates a tag.
rpc :CreateTag, ::Google::Cloud::ArtifactRegistry::V1beta2::CreateTagRequest, ::Google::Cloud::ArtifactRegistry::V1beta2::Tag
# Updates a tag.
rpc :UpdateTag, ::Google::Cloud::ArtifactRegistry::V1beta2::UpdateTagRequest, ::Google::Cloud::ArtifactRegistry::V1beta2::Tag
# Deletes a tag.
rpc :DeleteTag, ::Google::Cloud::ArtifactRegistry::V1beta2::DeleteTagRequest, ::Google::Protobuf::Empty
# Updates the IAM policy for a given resource.
rpc :SetIamPolicy, ::Google::Iam::V1::SetIamPolicyRequest, ::Google::Iam::V1::Policy
# Gets the IAM policy for a given resource.
rpc :GetIamPolicy, ::Google::Iam::V1::GetIamPolicyRequest, ::Google::Iam::V1::Policy
# Tests if the caller has a list of permissions on a resource.
rpc :TestIamPermissions, ::Google::Iam::V1::TestIamPermissionsRequest, ::Google::Iam::V1::TestIamPermissionsResponse
end
Stub = Service.rpc_stub_class
end
end
end
end
end
| 60.165049 | 172 | 0.685493 |
ff49cae3cb1755bc48094acd935f66fd7559d6d1 | 904 | require "spec_helper"
describe Bunny::Channel, "#prefetch" do
let(:connection) do
c = Bunny.new(:user => "bunny_gem", :password => "bunny_password", :vhost => "bunny_testbed")
c.start
c
end
after :each do
connection.close
end
context "with a positive integer < 65535" do
it "sets that prefetch level via basic.qos" do
ch = connection.create_channel
expect(ch.prefetch(10)).to be_instance_of(AMQ::Protocol::Basic::QosOk)
end
end
context "with a positive integer > 65535" do
it "raises an ArgumentError" do
ch = connection.create_channel
expect {
ch.prefetch(100_000)
}.to raise_error(ArgumentError)
end
end
context "with a negative integer" do
it "raises an ArgumentError" do
ch = connection.create_channel
expect {
ch.prefetch(-2)
}.to raise_error(ArgumentError)
end
end
end
| 23.179487 | 97 | 0.655973 |
62d660a6b3f7d4ed94fbe86ce90be611eceee6e5 | 260 | module Tasuku::Concerns
require 'tasuku/concerns/controllers'
require 'tasuku/concerns/models'
require 'tasuku/concerns/verifiable'
require 'tasuku/concerns/illustratable'
require 'tasuku/concerns/redirectable'
require 'tasuku/concerns/author'
end
| 28.888889 | 41 | 0.796154 |
912e99d5735e3ee96474061e79577b7eb3d1a4d9 | 144 | class Post < ApplicationRecord
belongs_to :user
validates :title,:content,:image, presence: true
mount_uploader :image, ImageUploader
end
| 24 | 50 | 0.784722 |
385b738bf7774b6b003870bbffc9235752ecf25f | 596 | require_relative '../../../spec_helper'
describe Web::Controllers::Players::Show do
let(:action) { Web::Controllers::Players::Show.new }
let(:params) { Hash[id: 1] }
let(:repository) { PlayerRepository.new }
before do
@player = repository.create(id: 1, first_name: 'George', last_name: 'Abitbol', email:'[email protected]')
end
after do
repository.clear
end
it 'is successful' do
response = action.call(params)
response[0].must_equal 200
end
it 'exposes the player' do
action.call(params)
action.exposures[:player].must_equal @player
end
end
| 22.923077 | 112 | 0.682886 |
d532759e1b48f24e1a7179c29a1aa2c32fd20592 | 1,418 |
require 'spec_helper'
describe 'core_extensions/string' do
describe "to_crlf" do
it "should change a single LF to CRLF" do
"\n".to_crlf.should eq "\r\n"
end
it "should change multiple LF to CRLF" do
"\n\n".to_crlf.should eq "\r\n\r\n"
end
it "should change a single CR to CRLF" do
"\r".to_crlf.should eq "\r\n"
end
it "should not change CRLF" do
"\r\n".to_crlf.should eq "\r\n"
end
it "should not change multiple CRLF" do
"\r\n\r\n".to_crlf.should eq "\r\n\r\n"
end
it "should handle a mix" do
"\r \n\r\n".to_crlf.should eq "\r\n \r\n\r\n"
end
end
describe "to_lf" do
it "should change a single CR to LF" do
"\r".to_lf.should eq "\n"
end
it "should change multiple LF to CRLF" do
"\r\r".to_lf.should eq "\n\n"
end
it "should change a single CRLF to LF" do
"\r\n".to_lf.should eq "\n"
end
it "should change multiple CR to LF" do
"\r\n\r\n".to_lf.should eq "\n\n"
end
it "should not change LF" do
"\n".to_lf.should eq "\n"
end
it "should not change multiple CRLF" do
"\n\n".to_lf.should eq "\n\n"
end
it "should handle a mix" do
"\r \n\r\n".to_lf.should eq "\n \n\n"
end
end
describe 'constantize' do
it 'should converts string to constant' do
"Kernel".constantize.should eq Kernel
end
end
end
| 20.257143 | 51 | 0.582511 |
1db6bbb63f07b398432bb523b8c15fa9428ed7b8 | 823 | module TreeNode
class MiqServer < Node
set_attribute(:image, '100/miq_server.png')
set_attribute(:expand, true)
set_attributes(:title, :tooltip) do
if @options[:is_current]
tooltip = _("%{server}: %{server_name} [%{server_id}] (current)") %
{:server => ui_lookup(:model => @object.class.to_s), :server_name => @object.name, :server_id => @object.id}
tooltip += " (#{@object.status})" if @options[:tree] == :roles_by_server_tree
title = content_tag(:strong, ERB::Util.html_escape(tooltip))
else
tooltip = "#{ui_lookup(:model => @object.class.to_s)}: #{@object.name} [#{@object.id}]"
tooltip += " (#{@object.status})" if @options[:tree] == :roles_by_server_tree
title = tooltip
end
[title, tooltip]
end
end
end
| 39.190476 | 127 | 0.601458 |
1c8e63a8e512bafbb73c98fdecf97d6a4211354a | 887 | require 'spaceship/base'
require 'spaceship/client'
require 'spaceship/launcher'
# Dev Portal
require 'spaceship/portal/portal'
require 'spaceship/portal/spaceship'
# iTunes Connect
require 'spaceship/tunes/tunes'
require 'spaceship/tunes/spaceship'
# To support legacy code
module Spaceship
ROOT = Pathname.new(File.expand_path('../..', __FILE__))
# Dev Portal
Certificate = Spaceship::Portal::Certificate
ProvisioningProfile = Spaceship::Portal::ProvisioningProfile
Device = Spaceship::Portal::Device
App = Spaceship::Portal::App
AppGroup = Spaceship::Portal::AppGroup
AppService = Spaceship::Portal::AppService
# iTunes Connect
AppVersion = Spaceship::Tunes::AppVersion
AppSubmission = Spaceship::Tunes::AppSubmission
Application = Spaceship::Tunes::Application
DESCRIPTION = "Ruby library to access the Apple Dev Center and iTunes Connect".freeze
end
| 27.71875 | 87 | 0.770011 |
b92f635386bc416de47e42a46b98966bdea1f2ba | 1,148 | module Fog
module Compute
class Brightbox
class Real
# Add a number of servers to the server group.
#
# @param [String] identifier Unique reference to identify the resource
# @param [Hash] options
# @option options [Array<Hash>] :servers Array of Hashes containing
# +{"server" => server_id}+ for each server to add
#
# @return [Hash] if successful Hash version of JSON object
# @return [NilClass] if no options were passed
#
# @see https://api.gb1.brightbox.com/1.0/#server_group_add_servers_server_group
#
# @example
# options = {
# :servers => [
# {"server" => "srv-abcde"},
# {"server" => "srv-fghij"}
# ]
# }
# Compute[:brightbox].add_servers_server_group "grp-12345", options
#
def add_servers_server_group(identifier, options)
return nil if identifier.nil? || identifier == ""
wrapped_request("post", "/1.0/server_groups/#{identifier}/add_servers", [202], options)
end
end
end
end
end
| 33.764706 | 97 | 0.561847 |
b939b56f5a3c815d55b4dd1c772b2d98e85e4294 | 202 | # -*- encoding : utf-8 -*-
require 'test_helper'
class Admin::ExpensesControllerTest < ActionController::TestCase
# Replace this with your real tests.
test "the truth" do
assert true
end
end
| 20.2 | 64 | 0.712871 |
aba7bd4230a7ab7a5160438e9e58e862a28363f5 | 4,492 | class CinsCodeigniter < CMS
def self.articles params
url = get_url '/api/articles'
language = params['language']
if(language.blank?)
# Should be extracted
language = "rs"
end
version = params["v"]
@response = Rails.cache.fetch("cins_codeigniter_articles/#{language}/#{version}", expires_in: 1.hour) do
logger.info("articles are not cached, making call to newscoop server")
response = HTTParty.get(url)
body = JSON.parse response.body
return_response = format_cins_codeigniter_response(body)
return_response['results'] = clean_up_response(return_response['results'])
logger.debug(return_response)
return return_response
end
end
def self.article params
url = get_url '/api/article'
language = params['language']
if(language.blank?)
# Should be extracted
language = "rs"
end
version = params["v"]
article_id = params['id']
@response = Rails.cache.fetch("cins_codeigniter_article/#{article_id}/#{language}/#{version}", expires_in: 1.hour) do
options = {id: params['id']}
logger.info("articles are not cached, making call to newscoop server")
response = HTTParty.get(url, query: options)
body = JSON.parse response.body
return format_cins_codeigniter_response(body)
end
end
def self.search params
url = get_url '/api/search'
language = params['language']
if(language.blank?)
# Should be extracted
language = "rs"
end
version = params["v"]
options = {q: params['q']}
response = HTTParty.get(url, query: options)
body = JSON.parse response.body
return format_cins_codeigniter_response(body)
end
private
def self.get_url path
url = ENV['codeigniter_url']
return "#{url}#{path}"
end
def self.get_articles url, extras = {}, version = 1
logger.debug("Calling: #{url}")
response = HTTParty.get(url)
body = JSON.parse response.body
if(body['results'].nil?)
body['results'] = Array.new
end
results = clean_up_response(body['results'], version)
results = clean_up_for_wordpress results
response = {start_date: "19700101",
end_date: DateTime.now.strftime("%Y%m%d"),
total_results: results.size,
page: "1",
results: results
}
# add in any extras from the call, query string etc.
response = response.merge(extras)
return response
end
def self.language_parameter language
if(!language.blank?)
language = "/#{language}/"
end
return language
end
def self.format_cins_codeigniter_response body
items = body['results']
new_items = []
items.each do |item|
item['body'] = "<strong>" + item['description'] + "</strong>" + "<br><br>" + item['body']
logger.debug "Parsing: #{item['publish_date']}"
date = Date.strptime(item['publish_date'], "%Y-%m-%d %H:%M:%S")
item['publish_date'] = date.strftime("%Y%m%d")
extract_images item
# There's a bug in the cins plugin that doesn't add protocols to links
# This should fix it
# first determine if it's a link to CINS
# If it is, then add https
url = Addressable::URI.parse(base_url)
elements = Nokogiri::HTML::fragment item['body']
elements.css('a').each do |link|
uri = Addressable::URI.parse(link.attribute("href"))
url_host = url.host.gsub("www.", "")
uri_host = uri.host.gsub("www.", "")
if(url_host == uri_host)
uri.scheme = 'https'
link.attribute("href").value = uri.to_s
end
end
elements.css('p').each do |tag|
if tag.content.squish.blank?
tag.remove
end
end
item['body'] = elements.to_html
item['body'] = add_formatting_for_sidebars item['body']
new_items.push item
end
body['results'] = new_items
return body
end
def self.add_formatting_for_sidebars html
elements = Nokogiri::HTML::fragment html
elements.css('div.enterfile').each do |sidebar|
style = sidebar['style']
if(style == nil)
style = ""
else
style += ";"
end
#style += "background-color: #e6e6e6; color: #3a3a3a"
style += "color: #3a3a3a"
sidebar['style'] = style
sidebar.prepend_child("<br />—<br />")
sidebar.add_child("<br />—<br />")
end
return elements.to_html
end
end | 25.816092 | 122 | 0.615761 |
91129237f7fc2b0fb66561daf31089785906669e | 264 | require 'json'
require 'test/unit/assertions'
include Test::Unit::Assertions
config_json_path = __FILE__
begin
output = JSON.parse IO.read config_json_path
rescue Exception => e
puts "an exception occurred: #{e}"
end
assert_nil output, "output expected nil" | 20.307692 | 46 | 0.768939 |
bb762e4f7d0d7c3ac7264ef2176a5dd66fd75188 | 938 | # $HeadURL$
# $Id$
#
# Copyright (c) 2009-2012 by Public Library of Science, a non-profit corporation
# http://www.plos.org/
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
Delayed::Worker.destroy_failed_jobs = true
Delayed::Worker.sleep_delay = 5
Delayed::Worker.max_attempts = 10
Delayed::Worker.default_priority = 5
Delayed::Worker.max_run_time = 90.minutes
Delayed::Worker.read_ahead = 10
Delayed::Worker.delay_jobs = !Rails.env.test?
| 36.076923 | 80 | 0.761194 |
ab9b601e75664f70c431b22acaf56a9a3bbb40f6 | 175 | class AddAssignerToAssignment < ActiveRecord::Migration[4.2]
def change
add_column :assignments, :assigner_id, :integer
add_index :assignments, :assigner_id
end
end
| 25 | 60 | 0.777143 |
bf08bcd4c9703ce11268226096f2fa946625a033 | 2,013 | class AddDeviseToUsers < ActiveRecord::Migration
def self.up
#encrypting passwords and authentication related fields
rename_column :users, "crypted_password", "encrypted_password"
change_column :users, "encrypted_password", :string, :limit => 128, :default => "", :null => false
rename_column :users, "salt", "password_salt"
change_column :users, "password_salt", :string, :default => "", :null => false
#confirmation related fields
rename_column :users, "activation_code", "confirmation_token"
rename_column :users, "activated_at", "confirmed_at"
change_column :users, "confirmation_token", :string
add_column :users, "confirmation_sent_at", :datetime
#reset password related fields
rename_column :users, "password_reset_code", "reset_password_token"
#rememberme related fields
add_column :users, "remember_created_at", :datetime #additional field required for devise.
#add trackable fields
change_table(:users) do |t|
## Trackable
t.integer :sign_in_count, default: 0, null: false
t.datetime :current_sign_in_at
t.datetime :last_sign_in_at
t.string :current_sign_in_ip
t.string :last_sign_in_ip
end
end
def self.down
#rememberme related fields
remove_column :users, "remember_created_at"
#reset password related fields
rename_column :users, "reset_password_token", "password_reset_code"
#confirmation related fields
rename_column :users, "confirmation_token", "activation_code"
rename_column :users, "confirmed_at", "activated_at"
change_column :users, "activation_code", :string
remove_column :users, "confirmation_sent_at"
#encrypting passwords and authentication related fields
rename_column :users, "encrypted_password", "crypted_password"
change_column :users, "crypted_password", :string, :limit => 40
rename_column :users, "password_salt", "salt"
change_column :users, "salt", :string, :limit => 40
end
end
| 35.946429 | 102 | 0.720815 |
010a938f055fa97fc902473e0df6d388474f625f | 1,175 | class Decree < ActiveRecord::Base
include OpenCourts::Model
include Purgeable
belongs_to :source
# TODO consider this required in future
belongs_to :proceeding, optional: true
belongs_to :court, optional: true
has_many :judgements
has_many :exact_judgements, -> { exact }, class_name: :Judgement, source: :judge
has_many :inexact_judgements, -> { inexact }, class_name: :Judgement, source: :judge
has_many :judges, through: :judgements
belongs_to :form, class_name: 'Decree::Form', foreign_key: :decree_form_id
has_many :naturalizations, class_name: 'Decree::Naturalization'
has_many :natures, class_name: 'Decree::Nature', through: :naturalizations
belongs_to :legislation_area, class_name: 'Legislation::Area', optional: true
belongs_to :legislation_subarea, class_name: 'Legislation::Subarea', optional: true
has_many :legislation_usages, class_name: 'Legislation::Usage'
has_many :legislations, through: :legislation_usages
has_many :paragraph_explanations, class_name: 'Paragraph::Explanation', through: :legislations
has_many :paragraphs, through: :paragraph_explanations
has_many :pages, class_name: 'Decree::Page'
end
| 36.71875 | 96 | 0.770213 |
e934ada1cabdffdeb6b3b0f89d24b341cd38fa9c | 9,149 | require 'benchmark'
require 'capistrano/errors'
require 'capistrano/processable'
module Capistrano
# This class encapsulates a single command to be executed on a set of remote
# machines, in parallel.
class Command
include Processable
class Tree
attr_reader :configuration
attr_reader :branches
attr_reader :fallback
include Enumerable
class Branch
attr_accessor :command, :callback, :condition
attr_reader :options
def initialize(command, options, callback)
@command = command.strip.gsub(/\r?\n/, "\\\n")
@callback = callback || Capistrano::Configuration.default_io_proc
@options = options
@skip = false
end
def last?
options[:last]
end
def skip?
@skip
end
def skip!
@skip = true
end
def match(server)
true
end
def to_s(parallel=false)
if parallel && @condition
"#{condition.inspect} :: #{command.inspect}"
else
command.inspect
end
end
end
class ConditionBranch < Branch
attr_accessor :configuration
class Evaluator
attr_reader :configuration, :condition, :server
def initialize(config, condition, server)
@configuration = config
@condition = condition
@server = server
end
def in?(role)
configuration.roles[role].include?(server)
end
def result
eval(condition, binding)
end
def method_missing(sym, *args, &block)
if server.respond_to?(sym)
server.send(sym, *args, &block)
elsif configuration.respond_to?(sym)
configuration.send(sym, *args, &block)
else
super
end
end
end
def initialize(configuration, condition, command, options, callback)
@configuration = configuration
@condition = condition
super(command, options, callback)
end
def match(server)
Evaluator.new(configuration, condition, server).result
end
end
class ElseBranch < Branch
def initialize(command, options, callback)
@condition = "else"
super(command, options, callback)
end
end
def initialize(config)
@configuration = config
@branches = []
yield self if block_given?
end
def when(condition, command, options={}, &block)
branches << ConditionBranch.new(configuration, condition, command, options, block)
end
def else(command, &block)
@fallback = ElseBranch.new(command, {}, block)
end
def branches_for(server)
seen_last = false
matches = branches.select do |branch|
success = !seen_last && !branch.skip? && branch.match(server)
seen_last = success && branch.last?
success
end
matches << fallback if matches.empty? && fallback
return matches
end
def each
branches.each { |branch| yield branch }
yield fallback if fallback
return self
end
end
attr_reader :tree, :sessions, :options
def self.process(tree, sessions, options={})
new(tree, sessions, options).process!
end
# Instantiates a new command object. The +command+ must be a string
# containing the command to execute. +sessions+ is an array of Net::SSH
# session instances, and +options+ must be a hash containing any of the
# following keys:
#
# * +logger+: (optional), a Capistrano::Logger instance
# * +data+: (optional), a string to be sent to the command via it's stdin
# * +env+: (optional), a string or hash to be interpreted as environment
# variables that should be defined for this command invocation.
def initialize(tree, sessions, options={}, &block)
if String === tree
tree = Tree.new(nil) { |t| t.else(tree, &block) }
elsif block
raise ArgumentError, "block given with tree argument"
end
@tree = tree
@sessions = sessions
@options = options
@channels = open_channels
end
# Processes the command in parallel on all specified hosts. If the command
# fails (non-zero return code) on any of the hosts, this will raise a
# Capistrano::CommandError.
def process!
elapsed = Benchmark.realtime do
loop do
break unless process_iteration { @channels.any? { |ch| !ch[:closed] } }
end
end
logger.trace "command finished in #{(elapsed * 1000).round}ms" if logger
if (failed = @channels.select { |ch| ch[:status] != 0 }).any?
commands = failed.inject({}) { |map, ch| (map[ch[:command]] ||= []) << ch[:server]; map }
message = commands.map { |command, list| "#{command.inspect} on #{list.join(',')}" }.join("; ")
error = CommandError.new("failed: #{message}")
error.hosts = commands.values.flatten
raise error
end
self
end
# Force the command to stop processing, by closing all open channels
# associated with this command.
def stop!
@channels.each do |ch|
ch.close unless ch[:closed]
end
end
private
def logger
options[:logger]
end
def open_channels
sessions.map do |session|
server = session.xserver
tree.branches_for(server).map do |branch|
session.open_channel do |channel|
channel[:server] = server
channel[:host] = server.host
channel[:options] = options
channel[:branch] = branch
request_pty_if_necessary(channel) do |ch, success|
if success
logger.trace "executing command", ch[:server] if logger
cmd = replace_placeholders(channel[:branch].command, ch)
if options[:shell] == false
shell = nil
else
shell = "#{options[:shell] || "sh"} -c"
cmd = cmd.gsub(/'/) { |m| "'\\''" }
cmd = "'#{cmd}'"
end
command_line = [environment, shell, cmd].compact.join(" ")
ch[:command] = command_line
ch.exec(command_line)
ch.send_data(options[:data]) if options[:data]
ch.eof! if options[:eof]
else
# just log it, don't actually raise an exception, since the
# process method will see that the status is not zero and will
# raise an exception then.
logger.important "could not open channel", ch[:server] if logger
ch.close
end
end
channel.on_data do |ch, data|
ch[:branch].callback[ch, :out, data]
end
channel.on_extended_data do |ch, type, data|
ch[:branch].callback[ch, :err, data]
end
channel.on_request("exit-status") do |ch, data|
ch[:status] = data.read_long
end
channel.on_request("exit-signal") do |ch, data|
if logger
exit_signal = data.read_string
logger.important "command received signal #{exit_signal}", ch[:server]
end
end
channel.on_close do |ch|
ch[:closed] = true
end
end
end
end.flatten
end
def request_pty_if_necessary(channel)
if options[:pty]
channel.request_pty do |ch, success|
yield ch, success
end
else
yield channel, true
end
end
def replace_placeholders(command, channel)
roles = @tree.configuration && @tree.configuration.role_names_for_host(channel[:server])
command = command.gsub(/\$CAPISTRANO:HOST\$/, channel[:host])
command.gsub!(/\$CAPISTRANO:HOSTROLES\$/, roles.join(',')) if roles
command
end
# prepare a space-separated sequence of variables assignments
# intended to be prepended to a command, so the shell sets
# the environment before running the command.
# i.e.: options[:env] = {'PATH' => '/opt/ruby/bin:$PATH',
# 'TEST' => '( "quoted" )'}
# environment returns:
# "env TEST=(\ \"quoted\"\ ) PATH=/opt/ruby/bin:$PATH"
def environment
return if options[:env].nil? || options[:env].empty?
@environment ||= if String === options[:env]
"env #{options[:env]}"
else
options[:env].inject("env") do |string, (name, value)|
value = value.to_s.gsub(/[ "]/) { |m| "\\#{m}" }
string << " #{name}=#{value}"
end
end
end
end
end
| 30.095395 | 103 | 0.548038 |
7aa3762116013c7c21877c58e7320bbfda0c00ef | 371 | # frozen_string_literal: true
require "loggeryk"
RSpec.configure do |config|
# Enable flags like --only-failures and --next-failure
config.example_status_persistence_file_path = ".rspec_status"
# Disable RSpec exposing methods globally on `Module` and `main`
config.disable_monkey_patching!
config.expect_with :rspec do |c|
c.syntax = :expect
end
end
| 23.1875 | 66 | 0.754717 |
0145b8c9d90542c4a915663b7272b65679677917 | 2,718 | # Copyright 2017 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
module Google
module Cloud
module Logging
class Entry
##
# # SourceLocation
#
# Additional information about the source code location that produced
# the log entry.
#
# See also {Google::Cloud::Logging::Entry#source_location}.
#
class SourceLocation
##
# @private Create an empty SourceLocation object.
def initialize; end
##
# Source file name. Depending on the runtime environment, this might
# be a simple name or a fully-qualified name. Optional.
attr_accessor :file
##
# Line within the source file. 1-based; `0` indicates no line number
# available. Optional.
attr_accessor :line
##
# Human-readable name of the function or method being invoked, with
# optional context such as the class or package name. This information
# may be used in contexts such as the logs viewer, where a file and
# line number are less meaningful. Optional.
attr_accessor :function
##
# @private Determines if the SourceLocation has any data.
def empty?
file.nil? &&
line.nil? &&
function.nil?
end
##
# @private Exports the SourceLocation to a
# Google::Logging::V2::LogEntrySourceLocation object.
def to_grpc
return nil if empty?
Google::Logging::V2::LogEntrySourceLocation.new(
file: file.to_s,
line: line,
function: function.to_s
)
end
##
# @private New Google::Cloud::Logging::Entry::SourceLocation from a
# Google::Logging::V2::LogEntrySourceLocation object.
def self.from_grpc grpc
return new if grpc.nil?
new.tap do |o|
o.file = grpc.file
o.line = grpc.line
o.function = grpc.function
end
end
end
end
end
end
end
| 31.604651 | 80 | 0.582781 |
ac6429e7695513a0447cf105703fc7c99aff1c04 | 2,007 | Rails.application.routes.draw do
root 'wonolo#index', as: 'root'
get 'data/completed' => 'wonolo#completed', as: 'completed'
get 'data/in_progress' => 'wonolo#in_progress', as: 'in_progress'
get 'data/no_show' => 'wonolo#no_show', as: 'no_show'
get 'data/cancelled' => 'wonolo#cancelled', as: 'cancelled'
get 'data/charts' => 'wonolo#charts', as: 'charts'
get 'data/timeline' => 'wonolo#timeline', as: 'timeline'
# The priority is based upon order of creation: first created -> highest priority.
# See how all your routes lay out with "rake routes".
# You can have the root of your site routed with "root"
# root 'welcome#index'
# Example of regular route:
# get 'products/:id' => 'catalog#view'
# Example of named route that can be invoked with purchase_url(id: product.id)
# get 'products/:id/purchase' => 'catalog#purchase', as: :purchase
# Example resource route (maps HTTP verbs to controller actions automatically):
# resources :products
# Example resource route with options:
# resources :products do
# member do
# get 'short'
# post 'toggle'
# end
#
# collection do
# get 'sold'
# end
# end
# Example resource route with sub-resources:
# resources :products do
# resources :comments, :sales
# resource :seller
# end
# Example resource route with more complex sub-resources:
# resources :products do
# resources :comments
# resources :sales do
# get 'recent', on: :collection
# end
# end
# Example resource route with concerns:
# concern :toggleable do
# post 'toggle'
# end
# resources :posts, concerns: :toggleable
# resources :photos, concerns: :toggleable
# Example resource route within a namespace:
# namespace :admin do
# # Directs /admin/products/* to Admin::ProductsController
# # (app/controllers/admin/products_controller.rb)
# resources :products
# end
end
| 29.514706 | 84 | 0.646238 |
01e461de4ff3a0ec7d68d5c565af9a97d1e3e963 | 3,605 | class Qca < Formula
desc "Qt Cryptographic Architecture (QCA)"
homepage "http://delta.affinix.com/qca/"
revision 2
head "https://anongit.kde.org/qca.git"
stable do
url "https://github.com/KDE/qca/archive/v2.1.3.tar.gz"
sha256 "a5135ffb0250a40e9c361eb10cd3fe28293f0cf4e5c69d3761481eafd7968067"
# upstream fixes for macOS building (remove on 2.2.0 upgrade)
patch do
url "https://github.com/KDE/qca/commit/7ba0ee591e0f50a7e7b532f9eb7e500e7da784fb.diff?full_index=1"
sha256 "3f6c8a8bbd246556c690142c209a34973981be66e46fee991a456fb2e8b66d72"
end
patch do
url "https://github.com/KDE/qca/commit/b435c1b87b14ac2d2de9f83e586bfd6d8c2a755e.diff?full_index=1"
sha256 "9ea01ad6b21282ff62b18ac02588f7106b75056ab8379dff3fdfcff13a6c122f"
end
patch do
url "https://github.com/KDE/qca/commit/f4b2eb0ced5310f3c43398eb1f03e0c065e08a82.diff?full_index=1"
sha256 "d6c27ebfd8fec5284e4a0a39faf62e44764be5baff08141bd7f4da6d0b9f438d"
end
# use major version for framework, instead of full version
# see: https://github.com/KDE/qca/pull/3
patch do
url "https://github.com/KDE/qca/pull/3.patch?full_index=1"
sha256 "37281b8fefbbdab768d7abcc39fb1c1bf85159730c2a4de6e84f0bf318ebac2c"
end
end
bottle do
rebuild 1
sha256 "b1232ad866b1e40e28a3f84674570741c0f9f8356ca43ead89574b00306f1875" => :mojave
sha256 "7fca5c9a591a204813356e3314077a628cbbff1cb5e6669355a2e26cd92765aa" => :high_sierra
sha256 "8dd6479be1f5cacb740915646bf9dd2fb8103df38e9f75ecfbb507ed3a0b201e" => :sierra
sha256 "7790fd8de8b6ee98ca8d4f687894437137d774538c209a80a340f513a8fbc159" => :el_capitan
sha256 "89d8a72a4e408504d1897200b342b77475cff1e8f35982755d9227de28ed496c" => :x86_64_linux
end
option "with-api-docs", "Build API documentation"
deprecated_option "with-gpg2" => "with-gnupg"
# commented dep = plugin
# (QCA needs at least one plugin to do anything useful)
depends_on "cmake" => :build
depends_on "pkg-config" => :build
depends_on "openssl" # qca-ossl
depends_on "qt"
depends_on "botan" => :optional # qca-botan
depends_on "gnupg" => :optional # qca-gnupg
depends_on "libgcrypt" => :optional # qca-gcrypt
depends_on "nss" => :optional # qca-nss
depends_on "pkcs11-helper" => :optional # qca-pkcs11
if build.with? "api-docs"
depends_on "graphviz" => :build
depends_on "doxygen" => :build
end
def install
args = std_cmake_args
args << "-DQT4_BUILD=OFF"
args << "-DBUILD_TESTS=OFF"
args << "-DQCA_PLUGINS_INSTALL_DIR=#{lib}/qt5/plugins"
# Plugins (qca-ossl, qca-cyrus-sasl, qca-logger, qca-softstore always built)
args << "-DWITH_botan_PLUGIN=#{build.with?("botan") ? "YES" : "NO"}"
args << "-DWITH_gcrypt_PLUGIN=#{build.with?("libgcrypt") ? "YES" : "NO"}"
args << "-DWITH_gnupg_PLUGIN=#{build.with?("gnupg") ? "YES" : "NO"}"
args << "-DWITH_nss_PLUGIN=#{build.with?("nss") ? "YES" : "NO"}"
args << "-DWITH_pkcs11_PLUGIN=#{build.with?("pkcs11-helper") ? "YES" : "NO"}"
# ensure opt_lib for framework install name and linking (can't be done via CMake configure)
inreplace "src/CMakeLists.txt",
/^(\s+)(INSTALL_NAME_DIR )("\$\{QCA_LIBRARY_INSTALL_DIR\}")$/,
"\\1\\2\"#{opt_lib}\""
system "cmake", ".", *args
system "make", "install"
if build.with? "api-docs"
system "make", "doc"
doc.install "apidocs/html"
end
end
test do
system bin/"qcatool-qt5", "--noprompt", "--newpass=",
"key", "make", "rsa", "2048", "test.key"
end
end
| 37.947368 | 104 | 0.696533 |
d5a23cd530f1c2826fd6c0b971759f6926f67def | 1,869 | # This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# This file is the source Rails uses to define your schema when running `bin/rails
# db:schema:load`. When creating a new database, `bin/rails db:schema:load` tends to
# be faster and is potentially less error prone than running all of your
# migrations from scratch. Old migrations may fail to apply correctly if those
# migrations use external dependencies or application code.
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 2021_05_21_151112) do
create_table "admins", force: :cascade do |t|
t.string "email", default: "", null: false
t.string "encrypted_password", default: "", null: false
t.string "reset_password_token"
t.datetime "reset_password_sent_at"
t.datetime "remember_created_at"
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.index ["email"], name: "index_admins_on_email", unique: true
t.index ["reset_password_token"], name: "index_admins_on_reset_password_token", unique: true
end
create_table "users", force: :cascade do |t|
t.string "email", default: "", null: false
t.string "encrypted_password", default: "", null: false
t.string "reset_password_token"
t.datetime "reset_password_sent_at"
t.datetime "remember_created_at"
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.index ["email"], name: "index_users_on_email", unique: true
t.index ["reset_password_token"], name: "index_users_on_reset_password_token", unique: true
end
end
| 46.725 | 96 | 0.743713 |
e9e25c0a5e1a6d34bfe52c9656147bde02e06989 | 25,612 | require "helper"
describe Thor do
describe "#method_option" do
it "sets options to the next method to be invoked" do
args = %w(foo bar --force)
_, options = MyScript.start(args)
expect(options).to eq("force" => true)
end
describe ":lazy_default" do
it "is absent when option is not specified" do
_, options = MyScript.start(%w(with_optional))
expect(options).to eq({})
end
it "sets a default that can be overridden for strings" do
_, options = MyScript.start(%w(with_optional --lazy))
expect(options).to eq("lazy" => "yes")
_, options = MyScript.start(%w(with_optional --lazy yesyes!))
expect(options).to eq("lazy" => "yesyes!")
end
it "sets a default that can be overridden for numerics" do
_, options = MyScript.start(%w(with_optional --lazy-numeric))
expect(options).to eq("lazy_numeric" => 42)
_, options = MyScript.start(%w(with_optional --lazy-numeric 20000))
expect(options).to eq("lazy_numeric" => 20_000)
end
it "sets a default that can be overridden for arrays" do
_, options = MyScript.start(%w(with_optional --lazy-array))
expect(options).to eq("lazy_array" => %w(eat at joes))
_, options = MyScript.start(%w(with_optional --lazy-array hello there))
expect(options).to eq("lazy_array" => %w(hello there))
end
it "sets a default that can be overridden for hashes" do
_, options = MyScript.start(%w(with_optional --lazy-hash))
expect(options).to eq("lazy_hash" => {"swedish" => "meatballs"})
_, options = MyScript.start(%w(with_optional --lazy-hash polish:sausage))
expect(options).to eq("lazy_hash" => {"polish" => "sausage"})
end
end
describe "when :for is supplied" do
it "updates an already defined command" do
_, options = MyChildScript.start(%w(animal horse --other=fish))
expect(options[:other]).to eq("fish")
end
describe "and the target is on the parent class" do
it "updates an already defined command" do
args = %w(example_default_command my_param --new-option=verified)
options = Scripts::MyScript.start(args)
expect(options[:new_option]).to eq("verified")
end
it "adds a command to the command list if the updated command is on the parent class" do
expect(Scripts::MyScript.commands["example_default_command"]).to be
end
it "clones the parent command" do
expect(Scripts::MyScript.commands["example_default_command"]).not_to eq(MyChildScript.commands["example_default_command"])
end
end
end
end
describe "#default_command" do
it "sets a default command" do
expect(MyScript.default_command).to eq("example_default_command")
end
it "invokes the default command if no command is specified" do
expect(MyScript.start([])).to eq("default command")
end
it "invokes the default command if no command is specified even if switches are given" do
expect(MyScript.start(%w(--with option))).to eq("with" => "option")
end
it "inherits the default command from parent" do
expect(MyChildScript.default_command).to eq("example_default_command")
end
end
describe "#stop_on_unknown_option!" do
my_script = Class.new(Thor) do
class_option "verbose", :type => :boolean
class_option "mode", :type => :string
stop_on_unknown_option! :exec
desc "exec", "Run a command"
def exec(*args)
[options, args]
end
desc "boring", "An ordinary command"
def boring(*args)
[options, args]
end
end
it "passes remaining args to command when it encounters a non-option" do
expect(my_script.start(%w(exec command --verbose))).to eq [{}, %w(command --verbose)]
end
it "passes remaining args to command when it encounters an unknown option" do
expect(my_script.start(%w(exec --foo command --bar))).to eq [{}, %w(--foo command --bar)]
end
it "still accepts options that are given before non-options" do
expect(my_script.start(%w(exec --verbose command --foo))).to eq [{"verbose" => true}, %w(command --foo)]
end
it "still accepts options that require a value" do
expect(my_script.start(%w(exec --mode rashly command))).to eq [{"mode" => "rashly"}, %w(command)]
end
it "still passes everything after -- to command" do
expect(my_script.start(%w(exec -- --verbose))).to eq [{}, %w(--verbose)]
end
it "still passes everything after -- to command, complex" do
expect(my_script.start(%w[exec command --mode z again -- --verbose more])).to eq [{}, %w[command --mode z again -- --verbose more]]
end
it "does not affect ordinary commands" do
expect(my_script.start(%w(boring command --verbose))).to eq [{"verbose" => true}, %w(command)]
end
context "when provided with multiple command names" do
klass = Class.new(Thor) do
stop_on_unknown_option! :foo, :bar
end
it "affects all specified commands" do
expect(klass.stop_on_unknown_option?(double(:name => "foo"))).to be true
expect(klass.stop_on_unknown_option?(double(:name => "bar"))).to be true
expect(klass.stop_on_unknown_option?(double(:name => "baz"))).to be false
end
end
context "when invoked several times" do
klass = Class.new(Thor) do
stop_on_unknown_option! :foo
stop_on_unknown_option! :bar
end
it "affects all specified commands" do
expect(klass.stop_on_unknown_option?(double(:name => "foo"))).to be true
expect(klass.stop_on_unknown_option?(double(:name => "bar"))).to be true
expect(klass.stop_on_unknown_option?(double(:name => "baz"))).to be false
end
end
it "doesn't break new" do
expect(my_script.new).to be_a(Thor)
end
context "along with check_unknown_options!" do
my_script2 = Class.new(Thor) do
class_option "verbose", :type => :boolean
class_option "mode", :type => :string
check_unknown_options!
stop_on_unknown_option! :exec
desc "exec", "Run a command"
def exec(*args)
[options, args]
end
end
it "passes remaining args to command when it encounters a non-option" do
expect(my_script2.start(%w[exec command --verbose])).to eq [{}, %w[command --verbose]]
end
it "does not accept if first non-option looks like an option, but only refuses that invalid option" do
expect(capture(:stderr) do
my_script2.start(%w[exec --foo command --bar])
end.strip).to eq("Unknown switches \"--foo\"")
end
it "still accepts options that are given before non-options" do
expect(my_script2.start(%w[exec --verbose command])).to eq [{"verbose" => true}, %w[command]]
end
it "still accepts when non-options are given after real options and argument" do
expect(my_script2.start(%w[exec --verbose command --foo])).to eq [{"verbose" => true}, %w[command --foo]]
end
it "does not accept when non-option looks like an option and is after real options" do
expect(capture(:stderr) do
my_script2.start(%w[exec --verbose --foo])
end.strip).to eq("Unknown switches \"--foo\"")
end
it "still accepts options that require a value" do
expect(my_script2.start(%w[exec --mode rashly command])).to eq [{"mode" => "rashly"}, %w[command]]
end
it "still passes everything after -- to command" do
expect(my_script2.start(%w[exec -- --verbose])).to eq [{}, %w[--verbose]]
end
it "still passes everything after -- to command, complex" do
expect(my_script2.start(%w[exec command --mode z again -- --verbose more])).to eq [{}, %w[command --mode z again -- --verbose more]]
end
end
end
describe "#check_unknown_options!" do
my_script = Class.new(Thor) do
class_option "verbose", :type => :boolean
class_option "mode", :type => :string
check_unknown_options!
desc "checked", "a command with checked"
def checked(*args)
[options, args]
end
end
it "still accept options and arguments" do
expect(my_script.start(%w[checked command --verbose])).to eq [{"verbose" => true}, %w[command]]
end
it "still accepts options that are given before arguments" do
expect(my_script.start(%w[checked --verbose command])).to eq [{"verbose" => true}, %w[command]]
end
it "does not accept if non-option that looks like an option is before the arguments" do
expect(capture(:stderr) do
my_script.start(%w[checked --foo command --bar])
end.strip).to eq("Unknown switches \"--foo\", \"--bar\"")
end
it "does not accept if non-option that looks like an option is after an argument" do
expect(capture(:stderr) do
my_script.start(%w[checked command --foo --bar])
end.strip).to eq("Unknown switches \"--foo\", \"--bar\"")
end
it "does not accept when non-option that looks like an option is after real options" do
expect(capture(:stderr) do
my_script.start(%w[checked --verbose --foo])
end.strip).to eq("Unknown switches \"--foo\"")
end
it "does not accept when non-option that looks like an option is before real options" do
expect(capture(:stderr) do
my_script.start(%w[checked --foo --verbose])
end.strip).to eq("Unknown switches \"--foo\"")
end
it "still accepts options that require a value" do
expect(my_script.start(%w[checked --mode rashly command])).to eq [{"mode" => "rashly"}, %w[command]]
end
it "still passes everything after -- to command" do
expect(my_script.start(%w[checked -- --verbose])).to eq [{}, %w[--verbose]]
end
it "still passes everything after -- to command, complex" do
expect(my_script.start(%w[checked command --mode z again -- --verbose more])).to eq [{"mode" => "z"}, %w[command again --verbose more]]
end
end
describe "#disable_required_check!" do
my_script = Class.new(Thor) do
class_option "foo", :required => true
disable_required_check! :boring
desc "exec", "Run a command"
def exec(*args)
[options, args]
end
desc "boring", "An ordinary command"
def boring(*args)
[options, args]
end
end
it "does not check the required option in the given command" do
expect(my_script.start(%w(boring command))).to eq [{}, %w(command)]
end
it "does check the required option of the remaining command" do
content = capture(:stderr) { my_script.start(%w(exec command)) }
expect(content).to eq "No value provided for required options '--foo'\n"
end
it "does affects help by default" do
expect(my_script.disable_required_check?(double(:name => "help"))).to be true
end
context "when provided with multiple command names" do
klass = Class.new(Thor) do
disable_required_check! :foo, :bar
end
it "affects all specified commands" do
expect(klass.disable_required_check?(double(:name => "help"))).to be true
expect(klass.disable_required_check?(double(:name => "foo"))).to be true
expect(klass.disable_required_check?(double(:name => "bar"))).to be true
expect(klass.disable_required_check?(double(:name => "baz"))).to be false
end
end
context "when invoked several times" do
klass = Class.new(Thor) do
disable_required_check! :foo
disable_required_check! :bar
end
it "affects all specified commands" do
expect(klass.disable_required_check?(double(:name => "help"))).to be true
expect(klass.disable_required_check?(double(:name => "foo"))).to be true
expect(klass.disable_required_check?(double(:name => "bar"))).to be true
expect(klass.disable_required_check?(double(:name => "baz"))).to be false
end
end
end
describe "#map" do
it "calls the alias of a method if one is provided" do
expect(MyScript.start(%w(-T fish))).to eq(%w(fish))
end
it "calls the alias of a method if several are provided via #map" do
expect(MyScript.start(%w(-f fish))).to eq(["fish", {}])
expect(MyScript.start(%w(--foo fish))).to eq(["fish", {}])
end
it "inherits all mappings from parent" do
expect(MyChildScript.default_command).to eq("example_default_command")
end
end
describe "#package_name" do
it "provides a proper description for a command when the package_name is assigned" do
content = capture(:stdout) { PackageNameScript.start(%w(help)) }
expect(content).to match(/Baboon commands:/m)
end
# TODO: remove this, might be redundant, just wanted to prove full coverage
it "provides a proper description for a command when the package_name is NOT assigned" do
content = capture(:stdout) { MyScript.start(%w(help)) }
expect(content).to match(/Commands:/m)
end
end
describe "#desc" do
it "provides description for a command" do
content = capture(:stdout) { MyScript.start(%w(help)) }
expect(content).to match(/thor my_script:zoo\s+# zoo around/m)
end
it "provides no namespace if $thor_runner is false" do
begin
$thor_runner = false
content = capture(:stdout) { MyScript.start(%w(help)) }
expect(content).to match(/thor zoo\s+# zoo around/m)
ensure
$thor_runner = true
end
end
describe "when :for is supplied" do
it "overwrites a previous defined command" do
expect(capture(:stdout) { MyChildScript.start(%w(help)) }).to match(/animal KIND \s+# fish around/m)
end
end
describe "when :hide is supplied" do
it "does not show the command in help" do
expect(capture(:stdout) { MyScript.start(%w(help)) }).not_to match(/this is hidden/m)
end
it "but the command is still invokable, does not show the command in help" do
expect(MyScript.start(%w(hidden yesyes))).to eq(%w(yesyes))
end
end
end
describe "#method_options" do
it "sets default options if called before an initializer" do
options = MyChildScript.class_options
expect(options[:force].type).to eq(:boolean)
expect(options[:param].type).to eq(:numeric)
end
it "overwrites default options if called on the method scope" do
args = %w(zoo --force --param feathers)
options = MyChildScript.start(args)
expect(options).to eq("force" => true, "param" => "feathers")
end
it "allows default options to be merged with method options" do
args = %w(animal bird --force --param 1.0 --other tweets)
arg, options = MyChildScript.start(args)
expect(arg).to eq("bird")
expect(options).to eq("force" => true, "param" => 1.0, "other" => "tweets")
end
end
describe "#start" do
it "calls a no-param method when no params are passed" do
expect(MyScript.start(%w(zoo))).to eq(true)
end
it "calls a single-param method when a single param is passed" do
expect(MyScript.start(%w(animal fish))).to eq(%w(fish))
end
it "does not set options in attributes" do
expect(MyScript.start(%w(with_optional --all))).to eq([nil, {"all" => true}, []])
end
it "raises an error if the wrong number of params are provided" do
arity_asserter = lambda do |args, msg|
stderr = capture(:stderr) { Scripts::Arities.start(args) }
expect(stderr.strip).to eq(msg)
end
arity_asserter.call %w(zero_args one), 'ERROR: "thor zero_args" was called with arguments ["one"]
Usage: "thor scripts:arities:zero_args"'
arity_asserter.call %w(one_arg), 'ERROR: "thor one_arg" was called with no arguments
Usage: "thor scripts:arities:one_arg ARG"'
arity_asserter.call %w(one_arg one two), 'ERROR: "thor one_arg" was called with arguments ["one", "two"]
Usage: "thor scripts:arities:one_arg ARG"'
arity_asserter.call %w(one_arg one two), 'ERROR: "thor one_arg" was called with arguments ["one", "two"]
Usage: "thor scripts:arities:one_arg ARG"'
arity_asserter.call %w(two_args one), 'ERROR: "thor two_args" was called with arguments ["one"]
Usage: "thor scripts:arities:two_args ARG1 ARG2"'
arity_asserter.call %w(optional_arg one two), 'ERROR: "thor optional_arg" was called with arguments ["one", "two"]
Usage: "thor scripts:arities:optional_arg [ARG]"'
end
it "raises an error if the invoked command does not exist" do
expect(capture(:stderr) { Amazing.start(%w(animal)) }.strip).to eq('Could not find command "animal" in "amazing" namespace.')
end
it "calls method_missing if an unknown method is passed in" do
expect(MyScript.start(%w(unk hello))).to eq([:unk, %w(hello)])
end
it "does not call a private method no matter what" do
expect(capture(:stderr) { MyScript.start(%w(what)) }.strip).to eq('Could not find command "what" in "my_script" namespace.')
end
it "uses command default options" do
options = MyChildScript.start(%w(animal fish)).last
expect(options).to eq("other" => "method default")
end
it "raises when an exception happens within the command call" do
expect { MyScript.start(%w(call_myself_with_wrong_arity)) }.to raise_error(ArgumentError)
end
context "when the user enters an unambiguous substring of a command" do
it "invokes a command" do
expect(MyScript.start(%w(z))).to eq(MyScript.start(%w(zoo)))
end
it "invokes a command, even when there's an alias it resolves to the same command" do
expect(MyScript.start(%w(hi arg))).to eq(MyScript.start(%w(hidden arg)))
end
it "invokes an alias" do
expect(MyScript.start(%w(animal_pri))).to eq(MyScript.start(%w(zoo)))
end
end
context "when the user enters an ambiguous substring of a command" do
it "raises an exception and displays a message that explains the ambiguity" do
shell = Thor::Base.shell.new
expect(shell).to receive(:error).with("Ambiguous command call matches [call_myself_with_wrong_arity, call_unexistent_method]")
MyScript.start(%w(call), :shell => shell)
end
it "raises an exception when there is an alias" do
shell = Thor::Base.shell.new
expect(shell).to receive(:error).with("Ambiguous command f matches [foo, fu]")
MyScript.start(%w(f), :shell => shell)
end
end
end
describe "#help" do
def shell
@shell ||= Thor::Base.shell.new
end
describe "on general" do
before do
@content = capture(:stdout) { MyScript.help(shell) }
end
it "provides useful help info for the help method itself" do
expect(@content).to match(/help \[COMMAND\]\s+# Describe available commands/)
end
it "provides useful help info for a method with params" do
expect(@content).to match(/animal TYPE\s+# horse around/)
end
it "uses the maximum terminal size to show commands" do
expect(@shell).to receive(:terminal_width).and_return(80)
content = capture(:stdout) { MyScript.help(shell) }
expect(content).to match(/aaa\.\.\.$/)
end
it "provides description for commands from classes in the same namespace" do
expect(@content).to match(/baz\s+# do some bazing/)
end
it "shows superclass commands" do
content = capture(:stdout) { MyChildScript.help(shell) }
expect(content).to match(/foo BAR \s+# do some fooing/)
end
it "shows class options information" do
content = capture(:stdout) { MyChildScript.help(shell) }
expect(content).to match(/Options\:/)
expect(content).to match(/\[\-\-param=N\]/)
end
it "injects class arguments into default usage" do
content = capture(:stdout) { Scripts::MyScript.help(shell) }
expect(content).to match(/zoo ACCESSOR \-\-param\=PARAM/)
end
end
describe "for a specific command" do
it "provides full help info when talking about a specific command" do
expect(capture(:stdout) { MyScript.command_help(shell, "foo") }).to eq(<<-END)
Usage:
thor my_script:foo BAR
Options:
[--force] # Force to do some fooing
do some fooing
This is more info!
Everyone likes more info!
END
end
it "raises an error if the command can't be found" do
expect do
MyScript.command_help(shell, "unknown")
end.to raise_error(Thor::UndefinedCommandError, 'Could not find command "unknown" in "my_script" namespace.')
end
it "normalizes names before claiming they don't exist" do
expect(capture(:stdout) { MyScript.command_help(shell, "name-with-dashes") }).to match(/thor my_script:name-with-dashes/)
end
it "uses the long description if it exists" do
expect(capture(:stdout) { MyScript.command_help(shell, "long_description") }).to eq(<<-HELP)
Usage:
thor my_script:long_description
Description:
This is a really really really long description. Here you go. So very long.
It even has two paragraphs.
HELP
end
it "doesn't assign the long description to the next command without one" do
expect(capture(:stdout) do
MyScript.command_help(shell, "name_with_dashes")
end).not_to match(/so very long/i)
end
end
describe "instance method" do
it "calls the class method" do
expect(capture(:stdout) { MyScript.start(%w(help)) }).to match(/Commands:/)
end
it "calls the class method" do
expect(capture(:stdout) { MyScript.start(%w(help foo)) }).to match(/Usage:/)
end
end
context "with required class_options" do
let(:klass) do
Class.new(Thor) do
class_option :foo, :required => true
desc "bar", "do something"
def bar; end
end
end
it "shows the command help" do
content = capture(:stdout) { klass.start(%w(help)) }
expect(content).to match(/Commands:/)
end
end
end
describe "subcommands" do
it "triggers a subcommand help when passed --help" do
parent = Class.new(Thor)
child = Class.new(Thor)
parent.desc "child", "child subcommand"
parent.subcommand "child", child
parent.desc "dummy", "dummy"
expect(child).to receive(:help).with(anything, anything)
parent.start ["child", "--help"]
end
end
describe "when creating commands" do
it "prints a warning if a public method is created without description or usage" do
expect(capture(:stdout) do
klass = Class.new(Thor)
klass.class_eval "def hello_from_thor; end"
end).to match(/\[WARNING\] Attempted to create command "hello_from_thor" without usage or description/)
end
it "does not print if overwriting a previous command" do
expect(capture(:stdout) do
klass = Class.new(Thor)
klass.class_eval "def help; end"
end).to be_empty
end
end
describe "edge-cases" do
it "can handle boolean options followed by arguments" do
klass = Class.new(Thor) do
method_option :loud, :type => :boolean
desc "hi NAME", "say hi to name"
def hi(name)
name = name.upcase if options[:loud]
"Hi #{name}"
end
end
expect(klass.start(%w(hi jose))).to eq("Hi jose")
expect(klass.start(%w(hi jose --loud))).to eq("Hi JOSE")
expect(klass.start(%w(hi --loud jose))).to eq("Hi JOSE")
end
it "passes through unknown options" do
klass = Class.new(Thor) do
desc "unknown", "passing unknown options"
def unknown(*args)
args
end
end
expect(klass.start(%w(unknown foo --bar baz bat --bam))).to eq(%w(foo --bar baz bat --bam))
expect(klass.start(%w(unknown --bar baz))).to eq(%w(--bar baz))
end
it "does not pass through unknown options with strict args" do
klass = Class.new(Thor) do
strict_args_position!
desc "unknown", "passing unknown options"
def unknown(*args)
args
end
end
expect(klass.start(%w(unknown --bar baz))).to eq([])
expect(klass.start(%w(unknown foo --bar baz))).to eq(%w(foo))
end
it "strict args works in the inheritance chain" do
parent = Class.new(Thor) do
strict_args_position!
end
klass = Class.new(parent) do
desc "unknown", "passing unknown options"
def unknown(*args)
args
end
end
expect(klass.start(%w(unknown --bar baz))).to eq([])
expect(klass.start(%w(unknown foo --bar baz))).to eq(%w(foo))
end
it "does not check the default type when check_default_type! is not called" do
expect do
Class.new(Thor) do
option "bar", :type => :numeric, :default => "foo"
end
end.not_to raise_error
end
it "checks the default type when check_default_type! is called" do
expect do
Class.new(Thor) do
check_default_type!
option "bar", :type => :numeric, :default => "foo"
end
end.to raise_error(ArgumentError, "Expected numeric default value for '--bar'; got \"foo\" (string)")
end
it "send as a command name" do
expect(MyScript.start(%w(send))).to eq(true)
end
end
end
| 35.572222 | 141 | 0.636928 |
b95b9f93306743c3e726fa7c77f237ae10f9a511 | 3,261 | # encoding: utf-8
require 'yaml'
Gem::Specification.new do |gemspec|
root = File.dirname(__FILE__)
lib_dir = File.join(root,'lib')
files = `git ls-files`.split($/)
filter_files = lambda { |paths|
files & case paths
when Array
paths
when String
Dir[paths]
end
}
version = {
:file => 'nmap/version',
:constant => 'Nmap::VERSION'
}
defaults = {
'name' => File.basename(root),
'files' => files,
'require_paths' => ['ext', 'lib'].select { |dir| File.directory?(dir) },
'executables' => filter_files['bin/*'].map { |path| File.basename(path) },
'test_files' => filter_files['{test/{**/}*_test.rb,spec/{**/}*_spec.rb}'],
'doc_files' => filter_files['*.{txt,rdoc,md,markdown,tt,textile}'],
'extra_doc_files' => filter_files['*.{txt,rdoc,md,markdown,tt,textile}']
}
metadata = defaults.merge(YAML.load_file('gemspec.yml'))
gemspec.name = metadata['name']
gemspec.version = if metadata['version']
metadata['version']
else
$LOAD_PATH << lib_dir unless $LOAD_PATH.include?(lib_dir)
require version[:file]
eval(version[:constant])
end
gemspec.summary = metadata.fetch('summary',metadata['description'])
gemspec.description = metadata.fetch('description',metadata['summary'])
gemspec.licenses = Array(metadata['license'])
gemspec.authors = Array(metadata['authors'])
gemspec.email = metadata['email']
gemspec.homepage = metadata['homepage']
gemspec.require_paths = Array(metadata['require_paths'])
gemspec.files = filter_files[metadata['files']]
gemspec.files += Array(metadata['generated_files'])
gemspec.executables = metadata['executables']
gemspec.extensions = metadata['extensions']
if Gem::VERSION < '1.7.'
gemspec.default_executable = gemspec.executables.first
end
gemspec.test_files = filter_files[metadata['test_files']]
gemspec.extra_rdoc_files = Array(metadata['extra_doc_files'])
gemspec.post_install_message = metadata['post_install_message']
gemspec.requirements = metadata['requirements']
if gemspec.respond_to?(:required_ruby_version=)
gemspec.required_ruby_version = metadata['required_ruby_version']
end
if gemspec.respond_to?(:required_rubygems_version=)
gemspec.required_rubygems_version = metadata['required_ruby_version']
end
parse_versions = lambda { |versions|
case versions
when Array
versions.map { |v| v.to_s }
when String
versions.split(/,\s*/)
end
}
if metadata['dependencies']
metadata['dependencies'].each do |name,versions|
gemspec.add_dependency(name,parse_versions[versions])
end
end
if metadata['runtime_dependencies']
metadata['runtime_dependencies'].each do |name,versions|
gemspec.add_runtime_dependency(name,parse_versions[versions])
end
end
if metadata['development_dependencies']
metadata['development_dependencies'].each do |name,versions|
gemspec.add_development_dependency(name,parse_versions[versions])
end
end
end
| 30.764151 | 83 | 0.642748 |
61d1155f2cc08b110deeb453705c52d4cc0ba350 | 1,149 | $LOAD_PATH.unshift File.expand_path '../lib', __FILE__
require 'active_ingredients/version'
Gem::Specification.new do |s|
s.name = 'active-ingredients'
s.summary = 'Simple Value Objects For Active Model'
s.description = 'Compose big Active Model classes with smaller ingredients that map directly to values on the original'
s.version = ActiveIngredients::VERSION
s.date = '2014-03-13'
s.homepage = 'http://www.dablweb.com'
s.authors = [ 'Brett Richardson' ]
s.email = [ '[email protected]' ]
s.require_path = 'lib'
s.files = Dir.glob( 'lib/**/*' ) + %w{ Gemfile Guardfile MIT-LICENSE README.md }
s.add_development_dependency 'rails', '~> 4.0' # For testing form helpers
s.add_development_dependency 'bundler'
s.add_development_dependency 'sqlite3'
s.add_development_dependency 'rake'
s.add_development_dependency 'guard'
s.add_development_dependency 'guard-rspec'
s.add_development_dependency 'guard-bundler'
s.add_development_dependency 'growl'
s.add_development_dependency 'fuubar'
s.add_development_dependency 'pry'
s.add_development_dependency 'pry-nav'
end
| 39.62069 | 121 | 0.731941 |
1d1c921f603d7f8e3239f3c20fb5b19aef477517 | 2,665 | require 'test_helper'
class PasswordResetsTest < ActionDispatch::IntegrationTest
def setup
ActionMailer::Base.deliveries.clear
@user = users(:michael)
end
test "password resets" do
get new_password_reset_path
assert_template 'password_resets/new'
post password_resets_path, params: { password_reset: { email: "" } }
assert_not flash.empty?
assert_template 'password_resets/new'
post password_resets_path,
params: { password_reset: { email: @user.email } }
assert_not_equal @user.reset_digest, @user.reload.reset_digest
assert_equal 1, ActionMailer::Base.deliveries.size
assert_not flash.empty?
assert_redirected_to root_url
user = assigns(:user)
get edit_password_reset_path(user.reset_token, email: "")
assert_redirected_to root_url
user.toggle!(:activated)
get edit_password_reset_path(user.reset_token, email: user.email)
assert_redirected_to root_url
user.toggle!(:activated)
get edit_password_reset_path('wrong token', email: user.email)
assert_redirected_to root_url
get edit_password_reset_path(user.reset_token, email: user.email)
assert_template 'password_resets/edit'
assert_select "input[name=email][type=hidden][value=?]", user.email
patch password_reset_path(user.reset_token),
params: { email: user.email,
user: { password: "foobaz",
password_confirmation: "barquux" } }
assert_select 'div#error_explanation'
patch password_reset_path(user.reset_token),
params: { email: user.email,
user: { password: "",
password_confirmation: "" } }
assert_select 'div#error_explanation'
patch password_reset_path(user.reset_token),
params: { email: user.email,
user: { password: "foobaz",
password_confirmation: "foobaz" } }
assert is_logged_in?
assert_not flash.empty?
assert_redirected_to user
user.reload
assert_nil user.reset_digest
end
test "expired token" do
get new_password_reset_path
post password_resets_path,
params: { password_reset: { email: @user.email } }
@user = assigns(:user)
@user.update_attribute(:reset_sent_at, 3.hours.ago)
patch password_reset_path(@user.reset_token),
params: { email: @user.email,
user: { password: "foobar",
password_confirmation: "foobar" } }
assert_response :redirect
follow_redirect!
assert_match /.*expired.*/i, response.body
end
end
| 32.901235 | 72 | 0.657036 |
21ede51406af4e127815a61d3fbc01ba2880201e | 1,101 | require "application_system_test_case"
class CommentsTest < ApplicationSystemTestCase
setup do
@comment = comments(:one)
end
test "visiting the index" do
visit comments_url
assert_selector "h1", text: "Comments"
end
test "should create comment" do
visit comments_url
click_on "New comment"
fill_in "Article", with: @comment.article_id
fill_in "Body", with: @comment.body
fill_in "Title", with: @comment.title
click_on "Create Comment"
assert_text "Comment was successfully created"
click_on "Back"
end
test "should update Comment" do
visit comment_url(@comment)
click_on "Edit this comment", match: :first
fill_in "Article", with: @comment.article_id
fill_in "Body", with: @comment.body
fill_in "Title", with: @comment.title
click_on "Update Comment"
assert_text "Comment was successfully updated"
click_on "Back"
end
test "should destroy Comment" do
visit comment_url(@comment)
click_on "Destroy this comment", match: :first
assert_text "Comment was successfully destroyed"
end
end
| 23.934783 | 52 | 0.708447 |
62ab8c8b9015d596ddb00e321be525e749481365 | 712 | require 'test_helper'
class StaticPagesControllerTest < ActionDispatch::IntegrationTest
def setup
@base_title = "Ruby on Rails Tutorial Sample App"
end
test "should get home" do
get root_path
assert_response :success
assert_select "title", "#{@base_title}"
end
test "should get help" do
get help_path
assert_response :success
assert_select "title", "Help | #{@base_title}"
end
test "should get about" do
get about_path
assert_response :success
assert_select "title", "About | #{@base_title}"
end
test "should get Contact" do
get contact_path
assert_response :success
assert_select "title", "Contact | #{@base_title}"
end
end
| 20.941176 | 65 | 0.686798 |
e9bc2c39d8b195c4b62af7c41d701dd3dec6dfd8 | 945 | class Exploitdb < Formula
desc "The official Exploit Database"
homepage "https://www.exploit-db.com/"
url "https://github.com/offensive-security/exploitdb.git",
:tag => "2020-01-11",
:revision => "d3ca8599713572b6fe9d2736aafddd9b6741b065"
version "2020-01-11"
head "https://github.com/offensive-security/exploitdb.git"
bottle :unneeded
def install
inreplace "searchsploit",
"rc_file=\"\"", "rc_file=\"#{etc}/searchsploit_rc\""
optpath = opt_share/"exploitdb"
inreplace ".searchsploit_rc" do |s|
s.gsub! "\"/opt/exploitdb\"", optpath
s.gsub! "\"/opt/exploitdb-papers\"", "#{optpath}-papers"
end
bin.install "searchsploit"
etc.install ".searchsploit_rc" => "searchsploit_rc"
pkgshare.install %w[.git exploits files_exploits.csv files_shellcodes.csv
shellcodes]
end
test do
system "#{bin}/searchsploit", "sendpage"
end
end
| 28.636364 | 77 | 0.65291 |
621119a6381f40477392a43069e809ecbb8a019d | 3,782 | Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Attempt to read encrypted secrets from `config/secrets.yml.enc`.
# Requires an encryption key in `ENV["RAILS_MASTER_KEY"]` or
# `config/secrets.yml.key`.
config.read_encrypted_secrets = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Mount Action Cable outside main process or domain
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
config.log_tags = [ :request_id ]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment)
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "sample_#{Rails.env}"
config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
end
| 41.108696 | 102 | 0.757271 |
d5d7a006ec74db734679a519f6d45a023ab9b173 | 679 |
Pod::Spec.new do |s|
s.name = "RNReactNativeChartboost"
s.version = "1.0.0"
s.summary = "RNReactNativeChartboost"
s.description = <<-DESC
RNReactNativeChartboost
DESC
s.homepage = ""
s.license = "MIT"
# s.license = { :type => "MIT", :file => "FILE_LICENSE" }
s.author = { "author" => "[email protected]" }
s.platform = :ios, "7.0"
s.source = { :git => "https://github.com/author/RNReactNativeChartboost.git", :tag => "master" }
s.source_files = "RNReactNativeChartboost/**/*.{h,m}"
s.requires_arc = true
s.dependency "React"
#s.dependency "others"
end
| 28.291667 | 104 | 0.55081 |
6106b654d9f03de7e8c6e62e9be7983ab78521d7 | 459 | require 'action_controller'
require 'action_view'
class TestView
include ActionView::Helpers::PrototypeHelper
include ActionView::Helpers::ScriptaculousHelper
include ActionView::Helpers::JavaScriptHelper
include ActionView::Helpers::TagHelper
def url_for(params)
return "http://somemockurl.com"
end
def protect_against_forgery?
false
end
def request_forgery_protection_token
"my_request_forgery_protection_token"
end
end | 21.857143 | 50 | 0.79085 |
9150084380704af062c3668a073a4048d9c916ef | 4,098 | # frozen_string_literal: true
require 'capybara/selector/filter'
module Capybara
class Selector
class FilterSet
attr_reader :node_filters, :expression_filters
def initialize(name, &block)
@name = name
@node_filters = {}
@expression_filters = {}
@descriptions = Hash.new { |hsh, key| hsh[key] = [] }
instance_eval(&block)
end
def node_filter(names, *types, **options, &block)
Array(names).each do |name|
add_filter(name, Filters::NodeFilter, *types, **options, &block)
end
end
alias_method :filter, :node_filter
def expression_filter(name, *types, **options, &block)
add_filter(name, Filters::ExpressionFilter, *types, **options, &block)
end
def describe(what = nil, &block)
case what
when nil
undeclared_descriptions.push block
when :node_filters
node_filter_descriptions.push block
when :expression_filters
expression_filter_descriptions.push block
else
raise ArgumentError, 'Unknown description type'
end
end
def description(node_filters: true, expression_filters: true, **options)
opts = options_with_defaults(options)
description = +''
description << undeclared_descriptions.map { |desc| desc.call(**opts).to_s }.join
description << expression_filter_descriptions.map { |desc| desc.call(**opts).to_s }.join if expression_filters
description << node_filter_descriptions.map { |desc| desc.call(**opts).to_s }.join if node_filters
description
end
def descriptions
Capybara::Helpers.warn 'DEPRECATED: FilterSet#descriptions is deprecated without replacement'
[undeclared_descriptions, node_filter_descriptions, expression_filter_descriptions].flatten
end
def import(name, filters = nil)
filter_selector = filters.nil? ? ->(*) { true } : ->(filter_name, _) { filters.include? filter_name }
self.class[name].tap do |f_set|
expression_filters.merge!(f_set.expression_filters.select(&filter_selector))
node_filters.merge!(f_set.node_filters.select(&filter_selector))
f_set.undeclared_descriptions.each { |desc| describe(&desc) }
f_set.expression_filter_descriptions.each { |desc| describe(:expression_filters, &desc) }
f_set.node_filter_descriptions.each { |desc| describe(:node_filters, &desc) }
end
self
end
class << self
def all
@filter_sets ||= {} # rubocop:disable Naming/MemoizedInstanceVariableName
end
def [](name)
all.fetch(name.to_sym) { |set_name| raise ArgumentError, "Unknown filter set (:#{set_name})" }
end
def add(name, &block)
all[name.to_sym] = FilterSet.new(name.to_sym, &block)
end
def remove(name)
all.delete(name.to_sym)
end
end
protected
def undeclared_descriptions
@descriptions[:undeclared]
end
def node_filter_descriptions
@descriptions[:node_filters]
end
def expression_filter_descriptions
@descriptions[:expression_filters]
end
private
def options_with_defaults(options)
options = options.dup
[expression_filters, node_filters].each do |filters|
filters.select { |_n, filter| filter.default? }.each do |name, filter|
options[name] = filter.default unless options.key?(name)
end
end
options
end
def add_filter(name, filter_class, *types, matcher: nil, **options, &block)
types.each { |type| options[type] = true }
if matcher && options[:default]
raise 'ArgumentError', ':default option is not supported for filters with a :matcher option'
end
filter = filter_class.new(name, matcher, block, **options)
(filter_class <= Filters::ExpressionFilter ? @expression_filters : @node_filters)[name] = filter
end
end
end
end
| 32.784 | 118 | 0.638604 |
e807161d79f0503306ca5ee6dd14f72b506449b8 | 986 | require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module NJBusNowRestAPI
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
end
end
| 41.083333 | 99 | 0.725152 |
e93d97e7bdd63c8805d95ceaac936a9f6172c9eb | 1,107 | cask '[email protected]' do
version '2018.1.0b2,79c3bdce0980'
sha256 :no_check
url "http://beta.unity3d.com/download/79c3bdce0980/MacEditorTargetInstaller/UnitySetup-AppleTV-Support-for-Editor-2018.1.0b2.pkg"
name 'tvOS Build Support'
homepage 'https://unity3d.com/unity/'
pkg 'UnitySetup-AppleTV-Support-for-Editor-2018.1.0b2.pkg'
depends_on cask: '[email protected]'
preflight do
if File.exist? "/Applications/Unity"
FileUtils.move "/Applications/Unity", "/Applications/Unity.temp"
end
if File.exist? "/Applications/Unity-2018.1.0b2"
FileUtils.move "/Applications/Unity-2018.1.0b2", '/Applications/Unity'
end
end
postflight do
if File.exist? '/Applications/Unity'
FileUtils.move '/Applications/Unity', "/Applications/Unity-2018.1.0b2"
end
if File.exist? '/Applications/Unity.temp'
FileUtils.move '/Applications/Unity.temp', '/Applications/Unity'
end
end
uninstall quit: 'com.unity3d.UnityEditor5.x',
delete: '/Applications/Unity-2018.1.0b2/PlaybackEngines/AppleTVSupport'
end
| 30.75 | 131 | 0.71364 |
084fa4047a40476bbf457df131e1954a78a3821b | 1,110 | # frozen_string_literal: true
module Gitlab
module Ci
class Config
module Entry
##
# Entry that represents a configuration of Docker service.
#
class Service < Image
include ::Gitlab::Config::Entry::Validatable
ALLOWED_KEYS = %i[name entrypoint command alias ports].freeze
validations do
validates :config, hash_or_string: true
validates :config, allowed_keys: ALLOWED_KEYS
validates :config, disallowed_keys: %i[ports], unless: :with_image_ports?
validates :name, type: String, presence: true
validates :entrypoint, array_of_strings: true, allow_nil: true
validates :command, array_of_strings: true, allow_nil: true
validates :alias, type: String, allow_nil: true
validates :alias, type: String, presence: true, unless: ->(record) { record.ports.blank? }
end
def alias
value[:alias]
end
def command
value[:command]
end
end
end
end
end
end
| 28.461538 | 102 | 0.595495 |
e2ddca8b1d444f594d331616f45e1d48fb7fe035 | 1,100 | RSpec.feature 'Updating currencies settings' do
stub_authorization!
background do
reset_spree_preferences do |config|
config.supported_currencies = 'USD'
config.allow_currency_change = false
config.show_currency_selector = false
end
end
scenario 'allows to update supported currencies' do
visit spree.edit_admin_general_settings_path
# Test initial state
expect(find_field('supported_currencies').value).to eq 'USD'
expect(find('#allow_currency_change')).to_not be_checked
expect(find('#show_currency_selector')).to_not be_checked
# Interact with the form
fill_in 'supported_currencies', with: 'USD,PLN'
find(:css, '#allow_currency_change').set(true)
find(:css, '#show_currency_selector').set(true)
click_button 'Update'
# Test final state
expect(page).to have_content 'General Settings has been successfully updated!'
expect(find_field('supported_currencies').value).to eq 'USD,PLN'
expect(find('#allow_currency_change')).to be_checked
expect(find('#show_currency_selector')).to be_checked
end
end
| 33.333333 | 82 | 0.739091 |
614dcf1eab6088daedba050a82f8ea05978557c3 | 5,696 | # encoding: utf-8
require "logstash/environment"
module LogStash::Util
UNAME = case RbConfig::CONFIG["host_os"]
when /^linux/; "linux"
else; RbConfig::CONFIG["host_os"]
end
PR_SET_NAME = 15
def self.set_thread_name(name)
previous_name = Java::java.lang.Thread.currentThread.getName() if block_given?
# Keep java and ruby thread names in sync.
Java::java.lang.Thread.currentThread.setName(name)
Thread.current[:name] = name
if UNAME == "linux"
require "logstash/util/prctl"
# prctl PR_SET_NAME allows up to 16 bytes for a process name
# since MRI 1.9, JRuby, and Rubinius use system threads for this.
LibC.prctl(PR_SET_NAME, name[0..16], 0, 0, 0)
end
if block_given?
begin
yield
ensure
set_thread_name(previous_name)
end
end
end # def set_thread_name
def self.set_thread_plugin(plugin)
Thread.current[:plugin] = plugin
end
def self.thread_info(thread)
# When the `thread` is dead, `Thread#backtrace` returns `nil`; fall back to an empty array.
backtrace = (thread.backtrace || []).map do |line|
line.sub(LogStash::Environment::LOGSTASH_HOME, "[...]")
end
blocked_on = case backtrace.first
when /in `push'/ then "blocked_on_push"
when /(?:pipeline|base).*pop/ then "waiting_for_events"
else nil
end
{
"thread_id" => get_thread_id(thread), # might be nil for dead threads
"name" => thread[:name],
"plugin" => (thread[:plugin] ? thread[:plugin].debug_info : nil),
"backtrace" => backtrace,
"blocked_on" => blocked_on,
"status" => thread.status,
"current_call" => backtrace.first
}
end
# Merge hash 'src' into 'dst' nondestructively
#
# Duplicate keys will become array values
#
# [ src["foo"], dst["foo"] ]
def self.hash_merge(dst, src)
src.each do |name, svalue|
if dst.include?(name)
dvalue = dst[name]
if dvalue.is_a?(Hash) && svalue.is_a?(Hash)
dvalue = hash_merge(dvalue, svalue)
elsif svalue.is_a?(Array)
if dvalue.is_a?(Array)
# merge arrays without duplicates.
dvalue |= svalue
else
dvalue = [dvalue] | svalue
end
else
if dvalue.is_a?(Array)
dvalue << svalue unless dvalue.include?(svalue)
else
dvalue = [dvalue, svalue] unless dvalue == svalue
end
end
dst[name] = dvalue
else
# dst doesn't have this key, just set it.
dst[name] = svalue
end
end
return dst
end # def self.hash_merge
# Merge hash 'src' into 'dst' nondestructively
#
# Duplicate keys will become array values
# Arrays merged will simply be appended.
#
# [ src["foo"], dst["foo"] ]
def self.hash_merge_with_dups(dst, src)
src.each do |name, svalue|
if dst.include?(name)
dvalue = dst[name]
if dvalue.is_a?(Hash) && svalue.is_a?(Hash)
dvalue = hash_merge(dvalue, svalue)
elsif svalue.is_a?(Array)
if dvalue.is_a?(Array)
# merge arrays without duplicates.
dvalue += svalue
else
dvalue = [dvalue] + svalue
end
else
if dvalue.is_a?(Array)
dvalue << svalue unless dvalue.include?(svalue)
else
dvalue = [dvalue, svalue] unless dvalue == svalue
end
end
dst[name] = dvalue
else
# dst doesn't have this key, just set it.
dst[name] = svalue
end
end
return dst
end # def self.hash_merge
def self.hash_merge_many(*hashes)
dst = {}
hashes.each do |hash|
hash_merge_with_dups(dst, hash)
end
return dst
end # def hash_merge_many
# normalize method definition based on platform.
# normalize is used to convert an object create through
# json deserialization from JrJackson in :raw mode to pure Ruby
# to support these pure Ruby object monkey patches.
# see logstash/json.rb and logstash/java_integration.rb
require "java"
# recursively convert any Java LinkedHashMap and ArrayList to pure Ruby.
# will not recurse into pure Ruby objects. Pure Ruby object should never
# contain LinkedHashMap and ArrayList since these are only created at
# initial deserialization, anything after (deeper) will be pure Ruby.
def self.normalize(o)
case o
when Java::JavaUtil::LinkedHashMap
o.inject({}){|r, (k, v)| r[k] = normalize(v); r}
when Java::JavaUtil::ArrayList
o.map{|i| normalize(i)}
else
o
end
end
def self.stringify_symbols(o)
case o
when Hash
o.inject({}){|r, (k, v)| r[k.is_a?(Symbol) ? k.to_s : k] = stringify_symbols(v); r}
when Array
o.map{|i| stringify_symbols(i)}
when Symbol
o.to_s
else
o
end
end
# Take a instance reference and return the name of the class
# stripping all the modules.
#
# @param [Object] The object to return the class)
# @return [String] The name of the class
def self.class_name(instance)
instance.class.name.split("::").last
end
def self.deep_clone(o)
case o
when Hash
o.inject({}) {|h, (k,v)| h[k] = deep_clone(v); h }
when Array
o.map {|v| deep_clone(v) }
when Integer, Symbol, IO, TrueClass, FalseClass, NilClass
o
when LogStash::Codecs::Base
o.clone
when String
o.clone #need to keep internal state e.g. frozen
when LogStash::Timestamp
o.clone
else
Marshal.load(Marshal.dump(o))
end
end
end # module LogStash::Util
| 27.516908 | 95 | 0.610077 |
6ab9aa46c63d502cda72865402f7f394b0491e28 | 5,683 | =begin
example usage:
fail "this script requires the cli_builder util from the repo" unless Script.exists?("cli_builder")
load $script_dir + "cli_builder.rb"
CLI.of "map" do
# a general help description
describe(self, "a utility for working with rooms")
# describes a given command
describe(:id, "output room id")
# implement the actual command
def id()
respond Room.current.id
end
# you should keep your cli wrapper light
# and delegate to service objects!
def search(*param)
MapSearch.perform param.join(" ")
end
end
further explaination:
1. CLI.of generates all the necessary boilerplate to safely make a persistent cli script.
2. Commands are "slash" style, in the above example the cli would be registered at `/map`
3. the `help` menu is automagically generated from the `describe` wrappers and some metaprogramming (ewww)
4. checkout cli_builder_demo.lic for a working example
=end
# namespace
class CLI
def self.of(*args, &block)
CLI::Builder.new(*args, &block)
end
def self.describe_parameters(params)
params.map do |kind, name|
if kind.eql?(:keyreq)
"--#{name}=\n"
else
"[[#{name}]]"
end
end.join("\t\t ")
end
def self.describe_commands(dsl, descriptions)
descriptions.select {|k, v| k.is_a?(Symbol)}.map do |command, desc|
params = dsl.method(command).parameters
command = command.to_s
if params.empty?
"#{command.rjust(10)} ... #{desc}"
else
"#{command.rjust(10)} ... #{desc}\n\t\t#{CLI.describe_parameters(params)}"
end
end.join("\n\t")
end
end
module CLI::KWArgs
FLAG_PREFIX = "--"
def self.is_flag?(str)
str.start_with?(FLAG_PREFIX)
end
def self.parse_command(h, c)
h[c.to_sym] = true
end
def self.parse_flag(h, f)
(name, val) = f[2..-1].split("=")
if val.nil?
h[name.to_sym] = true
else
val = val.split(",")
h[name.to_sym] = val.size == 1 ? val.first : val
end
end
def self.parse(args)
return _parse(args)
end
def self._parse(args)
args.to_a.reduce(Hash.new) do |opts, v|
if is_flag?(v)
Opts.parse_flag(opts, v)
else
Opts.parse_command(opts, v)
end
opts
end
end
end
class CLI::DSL
def initialize(context:)
@context = context
@descriptions = {}
end
def method_missing(method, *args)
fail CLI::Error, "/#{@context.name} does not implement command #{method}"
end
def describe(command, description)
if command == self
@descriptions[self] = description
else
@descriptions[command.to_sym] = description
end
end
def help(*rest)
_respond <<~HELP
name: <b>#{@context.name}</b>
hook: #{@context.hook}
description: #{@descriptions[self] || "no info"}
commands:\n\t#{CLI.describe_commands(self, @descriptions)}
HELP
end
end
#
# parses all cluster ui input
#
class CLI::Builder
class CLI::Error < Exception; end
HELP_CMDS = %w[help info]
attr_reader :cmd, :pending, :dsl, :hook, :name
def initialize(cmd, attach: true, debug: false, &block)
@name = cmd
@debug = debug
@cmd = "<c>/#{cmd}"
@hook = "/cli/#{cmd}"
@pending = Queue.new
@dsl = CLI::DSL.new(context: self)
@dsl.instance_eval(&block)
self.attach() if attach
end
def debug?
@debug
end
def add_hook()
before_dying {UpstreamHook.remove(@hook)}
UpstreamHook.add(@hook, -> incoming {
return incoming unless incoming.start_with?(@cmd)
self.unwrap_error { @pending << incoming }
return nil
})
end
def parse_input(raw)
(command, *args) = raw.slice(@cmd.size+1..-1).split(/\s+/)
command = command.to_sym
command = :help if command.nil?
@dsl.method_missing(command) unless @dsl.respond_to?(command)
method = @dsl.method(command)
stack = []
parsed = []
for argv in args
if argv.include?(%["])
stack << argv.gsub(%["], "")
if stack.size > 1
parsed << stack.slice(0..-1).join(" ")
stack.clear
end
elsif not stack.empty?
stack << argv
else
parsed << argv
end
end
fail CLI::Error, "unterminated quote in argv" unless stack.empty?
kwargs, vars = parsed.partition {|argv| CLI::KWArgs.is_flag?(argv) }
kwargs = CLI::KWArgs.parse(kwargs)
method.parameters.each.with_index do |kind, param, i|
respond("validating(#{param}) -> #{kind}") if debug?
case kind
in :keyreq
fail CLI::Error, "--#{param}= is required" unless kwargs.key?(param)
in [:key, _]
in [:rest, _]
in [:opt, _name]
# silence is golden
in [:req, name]
fail CLI::Error, "[[#{name}]] is required as argument #{param}" if vars[param].nil?
in _
fail Exception, "unhandled argv(#{kind}, #{param})"
end
end
return [command, vars, kwargs]
end
def unwrap_error(context =nil)
begin
yield
rescue CLI::Error => e
_respond "<b>error: #{e.message}</b>"
rescue Exception => e
respond e
respond "context: #{context}" unless context.nil?
respond e.backtrace
end
end
def attach()
add_hook()
loop do
wait_while do @pending.empty? end
raw_command = @pending.shift
unwrap_error(raw_command) {
(command, vars, kwargs) = self.parse_input(raw_command)
if kwargs.empty?
@dsl.send(command, *vars)
else
@dsl.send(command, *vars, **kwargs)
end
}
end
end
end | 25.257778 | 110 | 0.598627 |
6a6cd8d8c38f865497d9d8760cb932d2a8d02173 | 1,811 | # frozen_string_literal: true
# this file is managed by dry-rb/devtools project
lib = File.expand_path('lib', __dir__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'dry/types/version'
Gem::Specification.new do |spec|
spec.name = 'dry-types'
spec.authors = ["Piotr Solnica"]
spec.email = ["[email protected]"]
spec.license = 'MIT'
spec.version = Dry::Types::VERSION.dup
spec.summary = "Type system for Ruby supporting coercions, constraints and complex types like structs, value objects, enums etc"
spec.description = spec.summary
spec.homepage = 'https://dry-rb.org/gems/dry-types'
spec.files = Dir["CHANGELOG.md", "LICENSE", "README.md", "dry-types.gemspec", "lib/**/*"]
spec.bindir = 'bin'
spec.executables = []
spec.require_paths = ['lib']
spec.metadata['allowed_push_host'] = 'https://rubygems.org'
spec.metadata['changelog_uri'] = 'https://github.com/dry-rb/dry-types/blob/master/CHANGELOG.md'
spec.metadata['source_code_uri'] = 'https://github.com/dry-rb/dry-types'
spec.metadata['bug_tracker_uri'] = 'https://github.com/dry-rb/dry-types/issues'
spec.required_ruby_version = ">= 2.5.0"
# to update dependencies edit project.yml
spec.add_runtime_dependency "concurrent-ruby", "~> 1.0"
spec.add_runtime_dependency "dry-container", "~> 0.3"
spec.add_runtime_dependency "dry-core", "~> 0.5", ">= 0.5"
spec.add_runtime_dependency "dry-inflector", "~> 0.1", ">= 0.1.2"
spec.add_runtime_dependency "dry-logic", "~> 1.0", ">= 1.0.2"
spec.add_development_dependency "bundler"
spec.add_development_dependency "dry-monads", "~> 1.0"
spec.add_development_dependency "rake"
spec.add_development_dependency "rspec"
spec.add_development_dependency "yard"
end
| 42.116279 | 136 | 0.686913 |
08ec0cebd90005fd2f12ab0407bf9be1fa8e81ea | 94 | class HardWorker
include Sidekiq::Worker
def perform(*args)
# Do something
end
end
| 11.75 | 25 | 0.702128 |
08cb406b691fa98978b869c6bbd441c895eef8d9 | 259 | class CreateListsMovies < ActiveRecord::Migration[6.1]
def change
create_table :lists_movies do |t|
t.belongs_to :movie, null: false, foreign_key: true
t.belongs_to :list, null: false, foreign_key: true
t.timestamps
end
end
end
| 23.545455 | 57 | 0.694981 |
ff0d1d3cf8aad3f3e54bf0bd3e4891783adc0c1d | 855 | # frozen_string_literal: true
namespace :analytics do
root to: 'analytics#index'
constraints(-> (req) { Gitlab::Analytics.cycle_analytics_enabled? }) do
resource :cycle_analytics, only: :show, path: 'value_stream_analytics'
scope module: :cycle_analytics, as: 'cycle_analytics', path: 'value_stream_analytics' do
resources :stages, only: [:index, :create, :update, :destroy] do
member do
get :duration_chart
get :median
get :records
end
end
resource :summary, controller: :summary, only: :show
get '/time_summary' => 'summary#time_summary'
end
get '/cycle_analytics', to: redirect('-/analytics/value_stream_analytics')
end
scope :type_of_work do
resource :tasks_by_type, controller: :tasks_by_type, only: :show do
get :top_labels
end
end
end
| 30.535714 | 92 | 0.673684 |
bfe4106e88fadf2343a3d08124bbb171113076bb | 642 | PassbookRailsExample::Application.routes.draw do
PASS_TYPE_IDENTIFIER_REGEXP = /([\w\d]\.?)+/
namespace :passbook, path: "passbook/v1" do
constraints(pass_type_identifier: /([\w\d]\.?)+/) do
get '/passes/:pass_type_identifier/:serial_number' => 'passes#show'
get '/devices/:device_library_identifier/registrations/:pass_type_identifier' => 'registrations#index'
post '/devices/:device_library_identifier/registrations/:pass_type_identifier/:serial_number' => 'registrations#create'
delete '/devices/:device_library_identifier/registrations/:pass_type_identifier' => 'registrations#destroy'
end
end
end
| 45.857143 | 125 | 0.746106 |
87e3ef45845bfe4a448f074156bc88914b762445 | 1,666 | module Fog
module Network
class OpenStack
class Real
def update_vpn_service(vpn_service_id, options = {})
data = {'vpnservice' => {}}
vanilla_options = [:name, :description, :admin_state_up]
vanilla_options.select { |o| options.key?(o) }.each do |key|
data['vpnservice'][key] = options[key]
end
request(
:body => Fog::JSON.encode(data),
:expects => 200,
:method => 'PUT',
:path => "vpn/vpnservices/#{vpn_service_id}"
)
end
end
class Mock
def update_vpn_service(vpn_service_id, options = {})
response = Excon::Response.new
if vpn_service = list_vpn_services.body['vpnservices'].detect { |instance| instance['id'] == vpn_service_id }
vpn_service['id'] = vpn_service_id
vpn_service['subnet_id'] = options[:subnet_id]
vpn_service['router_id'] = options[:router_id]
vpn_service['name'] = options[:name]
vpn_service['description'] = options[:description]
vpn_service['status'] = 'ACTIVE'
vpn_service['admin_state_up'] = options[:admin_state_up]
vpn_service['tenant_id'] = options[:tenant_id]
vpn_service['external_v4_ip'] = '1.2.3.4'
vpn_service['external_v6_ip'] = '::1'
response.body = {'vpnservice' => vpn_service}
response.status = 200
response
else
raise Fog::Network::OpenStack::NotFound
end
end
end
end
end
end
| 35.446809 | 119 | 0.538415 |
acb6d18aa26f25e021bb0e3ec3a8e83f0eef53ae | 361 | class Picoc < Formula
desc "C interpreter for scripting"
homepage "https://code.google.com/p/picoc/"
url "https://picoc.googlecode.com/files/picoc-2.1.tar.bz2"
sha256 "bfed355fab810b337ccfa9e3215679d0b9886c00d9cb5e691f7e7363fd388b7e"
def install
system "make", "CC=#{ENV.cc}", "CFLAGS=#{ENV.cflags} -DUNIX_HOST"
bin.install "picoc"
end
end
| 30.083333 | 75 | 0.731302 |
1af2d8ddb47c0ef44477c55a1776255143eebe12 | 398 | # frozen_string_literal: true
require 'spec_helper'
support :test_adaptor_helpers
RSpec.describe LedgerSync::Adaptors::Test::Customer::Operations::Update do
include TestAdaptorHelpers
let(:customer) { LedgerSync::Customer.new(ledger_id: '123', name: 'Test') }
it do
instance = described_class.new(resource: customer, adaptor: test_adaptor)
expect(instance).to be_valid
end
end
| 23.411765 | 77 | 0.761307 |
f8b7d6b4859e2977216c28a43a4908a36cc64536 | 507 | RSpec.shared_examples_for 'Msf::DBManager::Adapter' do
if ENV['REMOTE_DB']
before {skip("Not used for remote data service")}
end
context 'CONSTANTS' do
context 'ADAPTER' do
subject(:adapter) {
described_class::ADAPTER
}
it { is_expected.to eq('postgresql') }
end
end
it { is_expected.to respond_to :driver }
it { is_expected.to respond_to :drivers }
it { is_expected.to respond_to :drivers= }
it { is_expected.to respond_to :initialize_adapter }
end | 24.142857 | 54 | 0.676529 |
f845693a59d087f170873d606ddeea601718ffb2 | 158 | # Used by third party farmwares (eg: weed-detection) to mark points on a map.
class GenericPointer < Point
def name_used_when_syncing
"Point"
end
end
| 22.571429 | 77 | 0.746835 |
288463e266b5241821a28c3f618c65dfd4d77b65 | 699 | require_relative '../../npi_download'
RSpec.describe "Creating an NPI Registry Query" do
it "is a type of NpiDownload" do
npi_request = NpiDownload.new
expect(npi_request).to be_kind_of(NpiDownload)
end
it "includes the NPI Registry URL in the uri host string" do
npi_request = NpiDownload.new
npi_request.postal_code = '29910'
uri = npi_request.build_uri
expect(uri.host).to include("npiregistry.cms.hhs.gov")
end
it "includes the specified query criteria in the API uri" do
npi_request = NpiDownload.new
npi_request.postal_code = '29910'
uri = npi_request.build_uri
expect(uri.query).to include(npi_request.postal_code)
end
end | 18.891892 | 62 | 0.718169 |
115e31db5132da6e0af6619b733c8a5735a7f43f | 281 | #Environment
require 'httparty'
require 'pry'
require_relative './superhero/version'
require_relative './superhero/hero'
require_relative './superhero/cli'
require_relative "./superhero/api"
#API Call
BASE_URL = "https://superheroapi.com/api.php/"
ACCESS_TOKEN = "814865975490" | 23.416667 | 46 | 0.775801 |
1d95c65d0d596d16255fa490081e147857728706 | 1,904 | Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# The test environment is used exclusively to run your application's
# test suite. You never need to work with it otherwise. Remember that
# your test database is "scratch space" for the test suite and is wiped
# and recreated between test runs. Don't rely on the data there!
config.cache_classes = true
# Do not eager load code on boot. This avoids loading your whole application
# just for the purpose of running a single test. If you are using a tool that
# preloads Rails for running tests, you may have to set it to true.
config.eager_load = false
# Configure public file server for tests with Cache-Control for performance.
config.public_file_server.enabled = true
config.public_file_server.headers = {
'Cache-Control' => "public, max-age=#{1.hour.to_i}"
}
# Show full error reports and disable caching.
config.consider_all_requests_local = true
config.action_controller.perform_caching = false
# Raise exceptions instead of rendering exception templates.
config.action_dispatch.show_exceptions = false
# Disable request forgery protection in test environment.
config.action_controller.allow_forgery_protection = false
# Store uploaded files on the local file system in a temporary directory
config.active_storage.service = :test
config.action_mailer.perform_caching = false
# Tell Action Mailer not to deliver emails to the real world.
# The :test delivery method accumulates sent emails in the
# ActionMailer::Base.deliveries array.
config.action_mailer.delivery_method = :test
# Print deprecation notices to the stderr.
config.active_support.deprecation = :stderr
# Raises error for missing translations
# config.action_view.raise_on_missing_translations = true
config.encoding = 'utf-8'
end
| 38.857143 | 85 | 0.768908 |
5dd871b12b3f1d3c5bdddac611e0521bb337edde | 5,911 | # frozen_string_literal: true
RSpec.describe DHeap, "comparisons" do
describe "with optimized types" do
it "sorts 'small' integers (T_FIXNUM)" do
heap = DHeap.new
heap << 2**4
heap << 2**62 - 1 # max FIXNUM
heap << 2**16
heap << 2**8
heap << 2**2
heap << 2**1
heap << 2**32
expect(heap.pop).to eq 2**1
expect(heap.pop).to eq 2**2
expect(heap.pop).to eq 2**4
expect(heap.pop).to eq 2**8
expect(heap.pop).to eq 2**16
expect(heap.pop).to eq 2**32
expect(heap.pop).to eq 2**62 - 1
expect(heap).to be_empty
end
it "sorts 'big' integers (T_BIGNUM) if they fit in unsigned long long " do
# every bit set at various bit widths
char_min = -(1 << 7)
char_max = (1 << 7) - 1
uchar_max = (1 << 8) - 1
short_min = -(1 << 15)
short_max = (1 << 15) - 1
ushort_max = (1 << 16) - 1
long_min = -(1 << 31)
long_max = (1 << 31) - 1
ulong_max = (1 << 32) - 1
fixnum_min = -(2**(0.size * 8 - 2)) # on a 64bit system: -(2**62)
fixnum_max = (2**(0.size * 8 - 2)) - 1 # on a 64bit system: (2**62)-1
llong_min = -(1 << 63) - 1
llong_max = (1 << 63) - 1
ullong_max = (1 << 64) - 1
# These represent the mantissa so they are the largest *consecutive* ints
# The sign bit is stored separately, so this is the largest absolute val.
flo_mantissa = (1 << 24) - 1
dbl_mantissa = (1 << 53) - 1
heap = DHeap.new
heap << char_max
heap << char_min
heap << long_max
heap << long_min
heap << short_max
heap << short_min
heap << uchar_max
heap << ushort_max
heap << ulong_max
heap << -uchar_max
heap << -ushort_max
heap << -ulong_max
# floating point mantissas
heap << flo_mantissa
heap << -flo_mantissa
heap << dbl_mantissa
heap << -dbl_mantissa
heap << 0.0.next_float
heap << 0
heap << 0.0.prev_float
# automatically convert Integer outside dbl_mantissa: will lose precision
heap << -ullong_max
heap << llong_max + 1
heap << llong_max
heap << llong_min
heap << fixnum_min
heap << fixnum_max
heap << ullong_max + 1
heap << -ullong_max - 1
heap << llong_min - 1
heap << ullong_max
heap << ullong_max + 2.0
heap << -ullong_max - 2.0
heap << (2**129).to_f
heap << -(2**129).to_f
expect(heap.pop).to eq(-(2**129).to_f)
ambiguous = Array.new(3) { heap.pop }
expect(ambiguous).to contain_exactly(
-ullong_max - 2.0,
-ullong_max - 1,
-ullong_max,
)
ambiguous = Array.new(2) { heap.pop }
expect(ambiguous).to contain_exactly(
llong_min - 1,
llong_min,
)
expect(heap.pop).to eq(fixnum_min)
expect(heap.pop).to eq(-dbl_mantissa)
expect(heap.pop).to eq(-ulong_max)
expect(heap.pop).to eq(long_min)
expect(heap.pop).to eq(-flo_mantissa)
expect(heap.pop).to eq(-ushort_max)
expect(heap.pop).to eq(short_min)
expect(heap.pop).to eq(-uchar_max)
expect(heap.pop).to eq(char_min)
expect(heap.pop).to eq(0.0.prev_float)
expect(heap.pop).to eq(0)
expect(heap.pop).to eq(0.0.next_float)
expect(heap.pop).to eq(char_max)
expect(heap.pop).to eq(uchar_max)
expect(heap.pop).to eq(short_max)
expect(heap.pop).to eq(ushort_max)
expect(heap.pop).to eq(flo_mantissa)
expect(heap.pop).to eq(long_max)
expect(heap.pop).to eq(ulong_max)
expect(heap.pop).to eq(dbl_mantissa)
expect(heap.pop).to eq(fixnum_max)
ambiguous = Array.new(2) { heap.pop }
expect(ambiguous).to contain_exactly(
llong_max,
llong_max + 1,
)
ambiguous = Array.new(3) { heap.pop }
expect(ambiguous).to contain_exactly(
ullong_max,
ullong_max + 1,
ullong_max + 2.0,
)
expect(heap.pop).to eq((2**129).to_f)
expect(heap).to be_empty
end
# It looks like rb_float_cmp works? Is this safe on all platforms?
it "sorts floats" do
heap = DHeap.new
20.times do heap << rand(10.0...100.0) end
array = []
array << heap.pop until heap.empty?
expect(array).to eq(array.sort)
expect(array.length).to eq(20)
end
# Only allowing floats, for now... but might bring this back later
# it "sorts strings (bitwise then by encoding)" do
# xff1 = [0xFF].pack("C").force_encoding("utf-8")
# xff2 = [0xFF].pack("C").force_encoding("iso-8859-1")
# heap = DHeap.new
# heap << "abc"
# heap << "ÄÖÛ"
# heap << xff1
# heap << "123"
# heap << "ÄÖÜ"
# heap << "do re mi"
# heap << xff2
# heap << "ABC"
# expect(heap.pop).to eq "123"
# expect(heap.pop).to eq "ABC"
# expect(heap.pop).to eq "abc"
# expect(heap.pop).to eq "do re mi"
# expect(heap.pop).to eq "ÄÖÛ"
# expect(heap.pop).to eq "ÄÖÜ"
# expect(heap.pop).to eq xff1
# expect(heap.pop).to eq xff2
# expect(heap).to be_empty
# end
end
it "sorts heterogeneous comparable numbers" do
heap = DHeap.new
10.times do heap << Rational(rand(1..100), rand(1..1000)) end
10.times do heap << rand(1..1000) end
10.times do heap << rand(10.0...100.0) end
array = []
array << heap.pop until heap.empty?
expect(array).to eq(array.sort)
expect(array.length).to eq(30)
end
# n.b. I tried to use rb_rational_cmp, but that symbol isn't exported
it "sorts rationals" do
heap = DHeap.new
20.times do heap << Rational(rand(1..100), rand(1..1000)) end
array = []
array << heap.pop until heap.empty?
expect(array).to eq(array.sort)
expect(array.length).to eq(20)
end
end
| 30.158163 | 79 | 0.564879 |
288644869bb436088dd459a8b9dc2622eb5bf4b3 | 19,171 | # A nomenclator name, composed of existing {Protonym}s. Each record reflects the subsequent use of two or more protonyms.
# Only the first use of a combination is stored here, subsequence uses of this combination are referenced in Citations.
#
# A {Combination} has no name, it exists to group related Protonyms into an epithet.
#
# They are applicable to genus group names and finer epithets.
#
# All elements of the combination must be defined, nothing is assumed based on the relationship to the parent.
#
# c = Combination.new
# c.genus = a_protonym_genus
# c.species = a_protonym_species
# c.save # => true
# c.genus_taxon_name_relationship # => A instance of TaxonNameRelationship::Combination::Genus
#
# # or
#
# c = Combination.new(genus: genus_protonym, species: species_protonym)
#
# Getters and setters for each of the APPLICABLE_RANKS are available:
# `genus subgenus section subsection series subseries species subspecies variety subvariety form subform`
# `genus_id subgenus_id section_id subsection_id series_id subseries_id species_id subspecies_id variety_id subvariety_id form_id subform_id`
#
# You can do things like (notice mix/match of _id or not):
# c = Combination.new(genus_id: @genus_protonym.id, subspecies: @some_species_group)
# c.species_id = Protonym.find(some_species_id).id
# or
# c.species = Protonym.find(some_species_id)
#
# Combinations are composed of TaxonNameRelationships.
# In those relationship the Combination#id is always the `object_taxon_name_id`, the
# individual Protonyms are stored in `subject_taxon_name_id`.
#
# @!attribute combination_verbatim_name
# Use with caution, and sparingly! If the combination of values from Protonyms can not reflect the formulation of the combination as provided by the original author that string can be provided here.
# The verbatim value is not further parsed. It is only provided to clarify what the combination looked like when first published.
# The following recommendations are made:
# 1) The provided string should visually reflect as close as possible what was seen in the publication itself, including
# capitalization, accented characters etc.
# 2) The full epithet (combination) should be provided, not just the differing component part (see 3 below).
# 3) Misspellings can be more acurately reflected by creating new Protonyms.
# Example uses:
# 1) Jones 1915 publishes Aus aus. Smith 1920 uses, literally "Aus (Bus) Janes 1915".
# It is clear "Janes" is "Jones", therefor "Aus (Bus) Janes 1915" is provided as combination_verbatim_name.
# 2) Smith 1800 publishes Aus Jonesi (i.e. Aus jonesi). The combination_combination_verbatim name is used to
# provide the fact that Jonesi was capitalized.
# 3) "Aus brocen" is used for "Aus broken". If the curators decide not to create a new protonym, perhaps because
# they feel "brocen" was a printing press error that left off the straight bit of the "k" then they should minimally
# include "Aus brocen" in this field, rather than just "brocen". An alternative is to create a new Protonym "brocen".
# 4) 'Aus (Aus)' was originally described in 1920. "(Aus)" was used in a new combination alone as "Aus". This is the only case
# in which combination may contain a single protonym.
# @return [String]
#
# @!attribute parent_id
# the parent is the *parent* of the highest ranked component Protonym, it is automatically set i.e. it should never be assigned directly
# @return [Integer]
#
class Combination < TaxonName
# The ranks that can be used to build combinations. ! TODO: family group names ?
APPLICABLE_RANKS = %w{family subfamily tribe subtribe genus subgenus section subsection
series subseries species subspecies variety subvariety form subform}.freeze
before_validation :set_parent
validate :validate_absence_of_subject_relationships
# TODO: make access private
attr_accessor :disable_combination_relationship_check
# Overwritten here from TaxonName to allow for destroy
has_many :related_taxon_name_relationships, class_name: 'TaxonNameRelationship',
foreign_key: :object_taxon_name_id,
inverse_of: :object_taxon_name,
dependent: :destroy
has_many :combination_relationships, -> {
joins(:taxon_name_relationships)
where("taxon_name_relationships.type LIKE 'TaxonNameRelationship::Combination::%'")
}, class_name: 'TaxonNameRelationship',
foreign_key: :object_taxon_name_id
has_many :combination_taxon_names, through: :combination_relationships, source: :subject_taxon_name
# Create syntactic helper methods
TaxonNameRelationship.descendants.each do |d|
if d.respond_to?(:assignment_method)
if d.name.to_s =~ /TaxonNameRelationship::SourceClassifiedAs/
relationship = "#{d.assignment_method}_relationship".to_sym
has_one relationship, class_name: d.name.to_s, foreign_key: :subject_taxon_name_id
has_one d.assignment_method.to_sym, through: relationship, source: :object_taxon_name
end
if d.name.to_s =~ /TaxonNameRelationship::Combination/ # |SourceClassifiedAs
relationships = "#{d.assignment_method}_relationships".to_sym
has_many relationships, -> {
where('taxon_name_relationships.type LIKE ?', d.name + '%')
}, class_name: 'TaxonNameRelationship', foreign_key: :subject_taxon_name_id
has_many d.assignment_method.to_s.pluralize.to_sym, through: relationships, source: :object_taxon_name
end
end
if d.respond_to?(:inverse_assignment_method)
if d.name.to_s =~ /TaxonNameRelationship::SourceClassifiedAs/
relationships = "#{d.inverse_assignment_method}_relationships".to_sym
has_many relationships, -> {
where('taxon_name_relationships.type LIKE ?', d.name + '%')
}, class_name: 'TaxonNameRelationship', foreign_key: :object_taxon_name_id
has_many d.inverse_assignment_method.to_s.pluralize.to_sym, through: relationships, source: :subject_taxon_name
end
if d.name.to_s =~ /TaxonNameRelationship::Combination/ # |SourceClassifiedAs
relationship = "#{d.inverse_assignment_method}_relationship".to_sym
has_one relationship, class_name: d.name.to_s, foreign_key: :object_taxon_name_id
has_one d.inverse_assignment_method.to_sym, through: relationship, source: :subject_taxon_name
end
end
end
APPLICABLE_RANKS.each do |rank|
has_one "#{rank}_taxon_name_relationship".to_sym, -> {
joins(:combination_relationships)
where(taxon_name_relationships: {type: "TaxonNameRelationship::Combination::#{rank.capitalize}"}) },
class_name: 'TaxonNameRelationship', foreign_key: :object_taxon_name_id
has_one rank.to_sym, -> {
joins(:combination_relationships)
where(taxon_name_relationships: {type: "TaxonNameRelationship::Combination::#{rank.capitalize}"})
}, through: "#{rank}_taxon_name_relationship".to_sym, source: :subject_taxon_name
accepts_nested_attributes_for rank.to_sym
attr_accessor "#{rank}_id".to_sym
method = "#{rank}_id"
define_method(method) {
if self.send(rank)
self.send(rank).id
else
nil
end
}
define_method("#{method}=") {|value|
if !value.blank?
if n = Protonym.find(value)
self.send("#{rank}=", n)
end
end
}
end
scope :with_protonym_at_rank, -> (rank, protonym) {
includes(:combination_relationships).
where('taxon_name_relationships.type = ? and taxon_name_relationships.subject_taxon_name_id = ?', rank, protonym).
references(:combination_relationships)}
validate :is_unique
validate :does_not_exist_as_original_combination, unless: Proc.new {|a| a.errors.full_messages.include? 'Combination exists.' }
validate :parent_is_properly_set , unless: Proc.new {|a| a.errors.full_messages.include? 'Combination exists.' }
validate :composition, unless: Proc.new {|a| disable_combination_relationship_check == true || a.errors.full_messages.include?('Combination exists.') }
validates :rank_class, absence: true
soft_validate(:sv_combination_duplicates, set: :combination_duplicates, has_fix: false)
soft_validate(:sv_year_of_publication_matches_source, set: :dates, has_fix: false)
soft_validate(:sv_year_of_publication_not_older_than_protonyms, set: :dates, has_fix: false)
soft_validate(:sv_source_not_older_than_protonyms, set: :dates, has_fix: false)
# @return [Protonym Scope]
# @params protonym_ids [Hash] like `{genus: 4, species: 5}`
# the absence of _id in the keys in part reflects integration with Biodiversity gem
# AHA from http://stackoverflow.com/questions/28568205/rails-4-arel-join-on-subquery
# See also Descriptor::Gene
def self.protonyms_matching_original_relationships(protonym_ids = {})
protonym_ids.compact!
return Protonym.none if !protonym_ids.keys.any?
s = Protonym.arel_table
sr = TaxonNameRelationship.arel_table
j = s.alias('j') # required for group/having purposes
b = s.project(j[Arel.star]).from(j)
.join(sr)
.on(sr['object_taxon_name_id'].eq(j['id']))
# Build an aliased join for each set of attributes
protonym_ids.each do |rank, id|
sr_a = sr.alias("b_#{rank}")
b = b.join(sr_a).on(
sr_a['object_taxon_name_id'].eq(j['id']),
sr_a['type'].eq("TaxonNameRelationship::OriginalCombination::Original#{rank.capitalize}"),
sr_a['subject_taxon_name_id'].eq(id)
)
end
b = b.group(j['id']).having(sr['object_taxon_name_id'].count.eq(protonym_ids.count))
b = b.as('join_alias')
Protonym.joins(Arel::Nodes::InnerJoin.new(b, Arel::Nodes::On.new(b['id'].eq(s['id']))))
end
# @return [Protonym Scope] hmmm- a Protonym class method?!
# Protonyms matching original relations, if name provided then name added as an additional check on verbatim match
# @params name [String, nil] the non-htmlized version of the name, without author year
def self.matching_protonyms(name = nil, **protonym_ids)
q = nil
if name.blank?
q = protonyms_matching_original_relationships(protonym_ids)
else
q = protonyms_matching_original_relationships(protonym_ids).where('taxon_names.cached_original_combination = ?', name)
end
q
end
# @return [Scope]
# @params keyword_args [Hash] like `{genus: 123, :species: 456}` (note no `_id` suffix)
def self.find_by_protonym_ids(**keyword_args)
keyword_args.compact!
return Combination.none if keyword_args.empty?
c = Combination.arel_table
r = TaxonNameRelationship.arel_table
a = c.alias("a_foo")
b = c.project(a[Arel.star]).from(a)
.join(r)
.on(r['object_taxon_name_id'].eq(a['id']))
s = []
i = 0
keyword_args.each do |rank, id|
r_a = r.alias("foo_#{i}")
b = b.join(r_a).on(
r_a['object_taxon_name_id'].eq(a['id']),
r_a['type'].eq(TAXON_NAME_RELATIONSHIP_COMBINATION_TYPES[rank]),
r_a['subject_taxon_name_id'].eq(id)
)
i += 1
end
b = b.group(a['id']).having(r['object_taxon_name_id'].count.eq(keyword_args.keys.count))
b = b.as("z_bar")
Combination.joins(Arel::Nodes::InnerJoin.new(b, Arel::Nodes::On.new(b['id'].eq(c['id']))))
end
# @return [Combination, false]
# @params keyword_args [Hash] like `{genus: 123, :species: 456}` (note no `_id` suffix)
# the matching Combination if it exists, otherwise false
# if name is provided then cached must match (i.e. verbatim_name if provided must also match)
def self.match_exists?(name = nil, **keyword_args)
if name.blank?
a = find_by_protonym_ids(**keyword_args).first
else
a = find_by_protonym_ids(**keyword_args).where(cached: name).first
end
a ? a : false
end
# @return [Boolean]
# true if the finest level (typically species) currently has the same parent
def is_current_placement?
return false if protonyms.second_to_last.nil?
protonyms.last.parent_id == protonyms.second_to_last.id
end
# @return [Array of TaxonName]
# pre-ordered by rank
# TODO: hard code sort order
def protonyms
return protonyms_by_association if new_record?
p = combination_taxon_names.sort{|a,b| RANKS.index(a.rank_string) <=> RANKS.index(b.rank_string) }
return protonyms_by_association if p.empty?
return p
end
# @return [Hash]
# like `{ genus: 1, species: 2 }`
def protonym_ids_params
protonyms_by_rank.inject({}) {|hsh, p| hsh.merge!( p[0].to_sym => p[1].id )}
end
# Overrides {TaxonName#full_name_hash}
# @return [Hash]
def full_name_hash
gender = nil
data = {}
protonyms_by_rank.each do |rank, i|
gender = i.gender_name if rank == 'genus'
if ['genus', 'subgenus', 'species', 'subspecies'].include? (rank)
data[rank] = [nil, i.name_with_misspelling(gender)]
else
data[rank] = [i.rank_class.abbreviation, i.name_with_misspelling(gender)]
end
end
if data['genus'].nil?
data['genus'] = [nil, "[GENUS NOT SPECIFIED]"]
end
if data['species'].nil? && (!data['subspecies'].nil? || !data['variety'].nil? || !data['subvariety'].nil? || !data['form'].nil? || !data['subform'].nil?)
data['species'] = [nil, "[SPECIES NOT SPECIFIED]"]
end
if data['variety'].nil? && !data['subvariety'].nil?
data['variety'] = [nil, "[VARIETY NOT SPECIFIED]"]
end
if data['form'].nil? && !data['subform'].nil?
data['form'] = [nil, "[FORM NOT SPECIFIED]"]
end
data
end
# @return [Hash of {rank: Protonym}, nil]
# the component names for this combination prior to it being saved (used to return values prior to save)
def protonyms_by_rank
result = {}
APPLICABLE_RANKS.each do |rank|
if protonym = send(rank)
result[rank] = protonym
end
end
result
end
# @return [Array of Integers]
# the collective years the protonyms were (nomenclaturaly) published on (ordered from genus to below)
def publication_years
description_years = protonyms.collect{|a| a.nomenclature_date ? a.nomenclature_date.year : nil}.compact
end
# @return [Integer, nil]
# the earliest year (nomenclature) that a component Protonym was published on
def earliest_protonym_year
publication_years.sort.first
end
# return [Array of TaxonNameRelationship]
# classes that are applicable to this name, as deterimned by Rank
def combination_class_relationships(rank_string)
relations = []
TaxonNameRelationship::Combination.descendants.each do |r|
relations.push(r) if r.valid_object_ranks.include?(rank_string)
end
relations
end
# TODO: DEPRECATE this is likely not required in our new interfaces
def combination_relationships_and_stubs(rank_string)
display_order = [
:combination_genus, :combination_subgenus, :combination_species, :combination_subspecies, :combination_variety, :combination_form
]
defined_relations = combination_relationships.all
created_already = defined_relations.collect{|a| a.class}
new_relations = []
combination_class_relationships(rank_string).each do |r|
new_relations.push( r.new(object_taxon_name: self) ) if !created_already.include?(r)
end
(new_relations + defined_relations).sort{|a,b|
display_order.index(a.class.inverse_assignment_method) <=> display_order.index(b.class.inverse_assignment_method)
}
end
def get_valid_taxon_name
c = protonyms_by_rank
return self if c.empty?
c[c.keys.last].valid_taxon_name
end
def finest_protonym
protonyms_by_rank.values.last
end
def get_author_and_year
ay = iczn_author_and_year
ay.blank? ? nil : ay
end
# @return [Array of TaxonNames, nil]
# return the component names for this combination prior to it being saved
def protonyms_by_association
APPLICABLE_RANKS.collect{|r| self.send(r)}.compact
end
protected
def validate_absence_of_subject_relationships
if TaxonNameRelationship.where(subject_taxon_name_id: self.id).any?
errors.add(:base, 'This combination could not be used as a Subject in any TaxonNameRelationships.')
end
end
# TODO: this is a TaxonName level validation, it doesn't belong here
def sv_year_of_publication_matches_source
source_year = source.nomenclature_year if source
if year_of_publication && source_year
soft_validations.add(:year_of_publication, 'The published date of the combination is not the same as provided by the original publication') if source_year != year_of_publication
end
end
def sv_source_not_older_than_protonyms
source_year = source.try(:nomenclature_year)
target_year = earliest_protonym_year
if source_year && target_year
soft_validations.add(:base, "The publication date of combination (#{source_year}) is older than the original publication date of one of the name in the combination (#{target_year}") if source_year < target_year
end
end
def sv_year_of_publication_not_older_than_protonyms
if year_of_publication && earliest_protonym_year
soft_validations.add(:year_of_publication, "The publication date of combination (#{year_of_publication}) is older than the original publication date of one of the name in the combination (#{earliest_protonym_year}") if year_of_publication < earliest_protonym_year
end
end
def sv_combination_duplicates
duplicate = Combination.not_self(self).where(cached: cached)
soft_validations.add(:base, 'Combination is a duplicate') unless duplicate.empty?
end
def set_parent
names = protonyms
write_attribute(:parent_id, names.first.parent.id) if names.count > 0 && names.first.parent
end
# The parent of a Combination is the parent of the highest ranked protonym in that Combination
def parent_is_properly_set
check = protonyms.first
if parent && check && check.parent
errors.add(:base, 'Parent is not highest ranked name.') if parent != check.parent
end
end
def composition
c = protonyms.count
if c == 0
errors.add(:base, 'Combination includes no names.')
return
end
protonyms.each do |p|
if !p.is_genus_or_species_rank?
errors.add(:base, 'Combination includes one or more non-species or genus group names.')
return
end
end
# There are more than one protonyms, which seem to be valid elements
p = protonyms.last
errors.add(:base, 'Combination includes only one name and that is name is not a genus name.') if c < 2 && p.is_species_rank?
errors.add(:base, 'Combination includes more than two genus group names.') if c > 2 && p.is_genus_rank?
end
def is_unique
if a = Combination.match_exists?(verbatim_name, **protonym_ids_params)
errors.add(:base, 'Combination exists.') if a.id != id
end
end
def does_not_exist_as_original_combination
if a = Combination.matching_protonyms(get_full_name, **protonym_ids_params)
errors.add(:base, "Combination exists as protonym(s) with matching original combination: #{a.all.pluck(:cached).join(', ')}.") if a.any?
end
end
end
| 40.70276 | 270 | 0.718533 |
abc5ca4909aa3174e8d8600c3f6723730b346275 | 4,991 | class Wal2json < Formula
desc "Convert PostgreSQL changesets to JSON format"
homepage "https://github.com/eulerto/wal2json"
url "https://github.com/eulerto/wal2json/archive/wal2json_2_2.tar.gz"
sha256 "e2cb764ee1fccb86ba38dbc8a5e2acd2d272e96172203db67fd9c102be0ae3b5"
license "BSD-3-Clause"
bottle do
cellar :any_skip_relocation
rebuild 1
sha256 "f8d15f5d13d6e4942ccd4baa3ec633a39cd62337224eb1a165689cd2aebcbe51" => :catalina
sha256 "9f7218381271a905d4da6a943599cc1db3bbe92965dadc49ac2ae94a35350003" => :mojave
sha256 "18182f272c0ada8069adc54d65b3607c51c12e144b0cbe2620ee8fb3fb09c00f" => :high_sierra
end
depends_on "postgresql"
def install
mkdir "stage"
system "make", "install", "USE_PGXS=1", "DESTDIR=#{buildpath}/stage"
lib.install Dir["stage/#{HOMEBREW_PREFIX}/lib/*"]
end
test do
return if ENV["CI"]
system "initdb", testpath/"datadir"
mkdir testpath/"socket"
File.open(testpath/"datadir"/"postgresql.conf", "a") do |f|
f << "wal_level = logical\n"
f << "listen_addresses = ''\n"
f << "unix_socket_directories = '#{testpath}/socket'\n"
f << "dynamic_library_path = '$libdir:#{lib}/postgresql'\n"
end
pid = Process.fork { exec "postgres", "-D", testpath/"datadir" }
sleep 2
begin
system "createdb", "-h", testpath/"socket", "test"
input_sql = <<~EOS
CREATE TABLE table2_with_pk (a SERIAL, b VARCHAR(30), c TIMESTAMP NOT NULL, PRIMARY KEY(a, c));
CREATE TABLE table2_without_pk (a SERIAL, b NUMERIC(5,2), c TEXT);
SELECT 'init' FROM pg_create_logical_replication_slot('test_slot', 'wal2json');
BEGIN;
INSERT INTO table2_with_pk (b, c) VALUES('Backup and Restore', '2019-10-08 12:00:00');
INSERT INTO table2_with_pk (b, c) VALUES('Tuning', '2019-10-08 12:00:00');
INSERT INTO table2_with_pk (b, c) VALUES('Replication', '2019-10-08 12:00:00');
DELETE FROM table2_with_pk WHERE a < 3;
INSERT INTO table2_without_pk (b, c) VALUES(2.34, 'Tapir');
-- it is not added to stream because there isn't a pk or a replica identity
UPDATE table2_without_pk SET c = 'Anta' WHERE c = 'Tapir';
COMMIT;
SELECT data FROM pg_logical_slot_get_changes('test_slot', NULL, NULL, 'pretty-print', '1');
SELECT 'stop' FROM pg_drop_replication_slot('test_slot');
EOS
File.open(testpath/"input.sql", "w") do |f|
f.write(input_sql)
end
system "psql", "-h", testpath/"socket", "-f", testpath/"input.sql", "-o", testpath/"output.txt", "-Atq", "test"
actual_output = File.read(testpath/"output.txt")
expected_output = <<~EOS
init
{
"change": [
{
"kind": "insert",
"schema": "public",
"table": "table2_with_pk",
"columnnames": ["a", "b", "c"],
"columntypes": ["integer", "character varying(30)", "timestamp without time zone"],
"columnvalues": [1, "Backup and Restore", "2019-10-08 12:00:00"]
}
,{
"kind": "insert",
"schema": "public",
"table": "table2_with_pk",
"columnnames": ["a", "b", "c"],
"columntypes": ["integer", "character varying(30)", "timestamp without time zone"],
"columnvalues": [2, "Tuning", "2019-10-08 12:00:00"]
}
,{
"kind": "insert",
"schema": "public",
"table": "table2_with_pk",
"columnnames": ["a", "b", "c"],
"columntypes": ["integer", "character varying(30)", "timestamp without time zone"],
"columnvalues": [3, "Replication", "2019-10-08 12:00:00"]
}
,{
"kind": "delete",
"schema": "public",
"table": "table2_with_pk",
"oldkeys": {
"keynames": ["a", "c"],
"keytypes": ["integer", "timestamp without time zone"],
"keyvalues": [1, "2019-10-08 12:00:00"]
}
}
,{
"kind": "delete",
"schema": "public",
"table": "table2_with_pk",
"oldkeys": {
"keynames": ["a", "c"],
"keytypes": ["integer", "timestamp without time zone"],
"keyvalues": [2, "2019-10-08 12:00:00"]
}
}
,{
"kind": "insert",
"schema": "public",
"table": "table2_without_pk",
"columnnames": ["a", "b", "c"],
"columntypes": ["integer", "numeric(5,2)", "text"],
"columnvalues": [1, 2.34, "Tapir"]
}
]
}
stop
EOS
assert_equal(expected_output.gsub(/\s+/, ""), actual_output.gsub(/\s+/, ""))
ensure
Process.kill 9, pid
Process.wait pid
end
end
end
| 36.698529 | 117 | 0.545582 |
7ab4c4b7462db928172f0faddbe2589eda4167fc | 4,721 | require_relative "../test_helper"
# current headless chrome does not support setting the Accepted-Language header, therefore only default(en) can be tested
class Test::AdminUi::TestLocales < Minitest::Capybara::Test
include Capybara::Screenshot::MiniTestPlugin
include ApiUmbrellaTestHelpers::AdminAuth
include ApiUmbrellaTestHelpers::Setup
LOCALES_ROOT_DIR = File.join(API_UMBRELLA_SRC_ROOT, "src/api-umbrella/web-app/config/locales")
EXPECTED_I18N = {
# :de => {
# :allowed_ips => "IP-Adresse Beschränkungen",
# :analytics => "Analytics",
# :forgot_password => "Passwort vergessen?",
# :password => "Passwort",
# },
:en => {
:allowed_ips => "Restrict Access to IPs",
:analytics => "Analytics",
:forgot_password => "Forgot your password?",
:password => "Password",
}
# :"es-419" => {
# :allowed_ips => "Restringir acceso a IPs",
# :analytics => "Analítica",
# :forgot_password => "¿Ha olvidado su contraseña?",
# :password => "Contraseña",
# },
# :fi => {
# :allowed_ips => "Rajoita pääsyä IP:siin",
# :analytics => "Analytiikka",
# :forgot_password => "Unohditko salasanasi?",
# :password => "Salasana",
# },
# :fr => {
# :allowed_ips => "Liste noire IP",
# :analytics => "Statistiques",
# :forgot_password => "Mot de passe oublié ?",
# :password => "Mot de passe",
# },
# :it => {
# :allowed_ips => "Limita Accesso ad IP",
# :analytics => "Analitiche",
# :forgot_password => "Password dimenticata?",
# :password => "Password",
# },
# :ru => {
# :allowed_ips => "Ограничить доступ к IP",
# :analytics => "Аналитика",
# :forgot_password => "Забыли пароль?",
# :password => "Пароль",
# },
}.freeze
def setup
super
setup_server
once_per_class_setup do
# Ensure at least one admin exists so the login page can be hit directly
# without redirecting to the first-time admin create page.
FactoryBot.create(:admin)
end
end
# Test all the available locales except the special test "zy" (which we use
# to test for incomplete data).
valid_locales = EXPECTED_I18N.keys
valid_locales.each do |locale|
locale_method_name = locale.to_s.downcase.gsub(/[^\w]/, "_")
define_method("test_server_side_translations_in_#{locale_method_name}_locale") do
selenium_use_language_driver(locale.to_s)
visit "/admin/login"
# From devise-i18n based on attribute names
assert_i18n_text(locale, :password, find("label[for=admin_password]"))
# From devise-i18n manually assigned in view
assert_i18n_text(locale, :forgot_password, find("a[href='/admins/password/new']"))
end
define_method("test_client_side_translations_in_#{locale_method_name}_locale") do
selenium_use_language_driver(locale.to_s)
admin_login
visit "/admin/#/api_users/new"
# Form
assert_i18n_text(locale, :allowed_ips, find("label[for$='allowedIpsString']"))
# Navigation
assert_i18n_text(locale, :analytics, find("li.nav-analytics > a"))
end
end
def test_server_side_fall_back_to_english_for_unknown_locale
locale = "zz"
selenium_use_language_driver(locale)
visit "/admin/login"
refute(File.exist?(File.join(LOCALES_ROOT_DIR, "#{locale}.yml")))
assert_i18n_text(:en, :password, find("label[for=admin_password]"))
end
def test_client_side_fall_back_to_english_for_unknown_locale
locale = "zz"
selenium_use_language_driver(locale)
admin_login
visit "/admin/#/api_users/new"
refute(File.exist?(File.join(LOCALES_ROOT_DIR, "#{locale}.yml")))
assert_i18n_text(:en, :allowed_ips, find("label[for$='allowedIpsString']"))
end
def test_server_side_fall_back_to_english_for_missing_data_in_known_locale
locale = "zy"
selenium_use_language_driver(locale)
visit "/admin/login"
assert(File.exist?(File.join(LOCALES_ROOT_DIR, "#{locale}.yml")))
assert_i18n_text(:en, :password, find("label[for=admin_password]"))
end
def test_client_side_fall_back_to_english_for_missing_data_in_known_locale
locale = "zy"
selenium_use_language_driver(locale)
admin_login
visit "/admin/#/api_users/new"
assert(File.exist?(File.join(LOCALES_ROOT_DIR, "#{locale}.yml")))
assert_i18n_text(:en, :allowed_ips, find("label[for$='allowedIpsString']"))
end
private
def assert_i18n_text(expected_locale, expected_key, element)
assert(element)
expected_text = EXPECTED_I18N.fetch(expected_locale).fetch(expected_key)
refute_empty(expected_text)
assert_equal(expected_text, element.text)
end
end
| 33.246479 | 121 | 0.67401 |
61511be80a7a2d303edfe58b0837f150131e6cf4 | 378 | module Quickbooks
module Model
class PaymentLineDetail < BaseModel
xml_accessor :item_ref, :from => 'ItemRef', :as => Integer
xml_accessor :class_ref, :from => 'ClassRef', :as => Integer
xml_accessor :balance, :from => 'Balance', :as => Float
xml_accessor :discount, :from => 'Discount', :as => Quickbooks::Model::DiscountOverride
end
end
end | 37.8 | 93 | 0.664021 |
2889527db824180cbeaa66aebb6c379d565fc686 | 1,602 | require "rbexy/version"
require "active_support/inflector"
require "active_support/concern"
require "active_support/core_ext/enumerable"
require "action_view/helpers/output_safety_helper"
require "action_view/helpers/capture_helper"
require "action_view/helpers/tag_helper"
require "action_view/context"
module Rbexy
autoload :Lexer, "rbexy/lexer"
autoload :Parser, "rbexy/parser"
autoload :Nodes, "rbexy/nodes"
autoload :Runtime, "rbexy/runtime"
autoload :ComponentContext, "rbexy/component_context"
autoload :Configuration, "rbexy/configuration"
autoload :ComponentResolver, "rbexy/component_resolver"
autoload :Template, "rbexy/template"
autoload :Refinements, "rbexy/refinements"
autoload :ASTTransformer, "rbexy/ast_transformer"
ContextNotFound = Class.new(StandardError)
class << self
def configure
yield(configuration)
end
def configuration
@configuration ||= Configuration.new
end
def compile(template, context = build_default_compile_context(template))
tokens = Lexer.new(template, context.element_resolver).tokenize
root = Parser.new(tokens).parse
root.inject_compile_context(context)
root.transform!
root.precompile.compile
end
def evaluate(template_string, runtime = Rbexy::Runtime.new)
runtime.evaluate compile(Template.new(template_string))
end
def build_default_compile_context(template)
OpenStruct.new(
template: template,
element_resolver: configuration.element_resolver,
ast_transformer: configuration.transforms
)
end
end
end
| 29.666667 | 76 | 0.750936 |
bfc97d5650e7368b7a8562fcc28174131aa89979 | 1,617 | #
# Be sure to run `pod lib lint PTSuperImages.podspec' to ensure this is a
# valid spec before submitting.
#
# Any lines starting with a # are optional, but their use is encouraged
# To learn more about a Podspec see https://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = 'PTSuperImages'
s.version = '0.1.0'
s.summary = 'A short description of PTSuperImages.'
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = <<-DESC
TODO: Add long description of the pod here.
DESC
s.homepage = 'https://github.com/parthv21/PTSuperImages'
# s.screenshots = 'www.example.com/screenshots_1', 'www.example.com/screenshots_2'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'parthv21' => '[email protected]' }
s.source = { :git => 'https://github.com/parthv21/PTSuperImages.git', :tag => s.version.to_s }
# s.social_media_url = 'https://twitter.com/<TWITTER_USERNAME>'
s.ios.deployment_target = '8.0'
s.source_files = 'PTSuperImages/Classes/**/*'
# s.resource_bundles = {
# 'PTSuperImages' => ['PTSuperImages/Assets/*.png']
# }
# s.public_header_files = 'Pod/Classes/**/*.h'
# s.frameworks = 'UIKit', 'MapKit'
# s.dependency 'AFNetworking', '~> 2.3'
end
| 37.604651 | 106 | 0.642548 |
6aa177e6df23a5c7679dd10181b5614b4817c27d | 535 | require 'spec_helper_acceptance'
# Ensure message of the day (MOTD) is properly configured - Section 1.7.1.1,
# Ensure permisisons on /etc/motd are configured - Section 1.7.1.4
describe file('/etc/motd') do
it { should be_file }
it { should be_owned_by 'root' }
it { should be_grouped_into 'root' }
it { should be_mode 644 }
its(:content) { should match /Puppet/ }
end
# Ensure local login warning banner is configured properly - Section 1.7.1.2
# Ensure permissions on /etc/issue are configured - Section 1.7.1.5 | 38.214286 | 76 | 0.71028 |
1cb3bcfcff0b5a27a5646ee154f3ebac7cccdc9c | 2,823 | module Spree
class LineItem < ActiveRecord::Base
before_validation :adjust_quantity
belongs_to :order, class_name: "Spree::Order", :inverse_of => :line_items
belongs_to :variant, class_name: "Spree::Variant"
belongs_to :tax_category, class_name: "Spree::TaxCategory"
has_one :product, through: :variant
has_many :adjustments, as: :adjustable, dependent: :destroy
before_validation :copy_price
before_validation :copy_tax_category
validates :variant, presence: true
validates :quantity, numericality: {
only_integer: true,
greater_than: -1,
message: Spree.t('validation.must_be_int')
}
validates :price, numericality: true
validates_with Stock::AvailabilityValidator
before_save :update_inventory
before_destroy :update_inventory
after_save :update_adjustments
after_create :create_tax_charge
delegate :name, :description, to: :variant
attr_accessor :target_shipment
def copy_price
if variant
self.price = variant.price if price.nil?
self.cost_price = variant.cost_price if cost_price.nil?
self.currency = variant.currency if currency.nil?
end
end
def copy_tax_category
if variant
self.tax_category = variant.tax_category
end
end
def amount
price * quantity
end
alias subtotal amount
def discounted_amount
amount + promo_total
end
def final_amount
amount + adjustment_total.to_f
end
alias total final_amount
def single_money
Spree::Money.new(price, { currency: currency })
end
alias single_display_amount single_money
def money
Spree::Money.new(amount, { currency: currency })
end
alias display_total money
alias display_amount money
def adjust_quantity
self.quantity = 0 if quantity.nil? || quantity < 0
end
def sufficient_stock?
Stock::Quantifier.new(variant_id).can_supply? quantity
end
def insufficient_stock?
!sufficient_stock?
end
def assign_stock_changes_to=(shipment)
@preferred_shipment = shipment
end
# Remove product default_scope `deleted_at: nil`
def product
variant.product
end
# Remove variant default_scope `deleted_at: nil`
def variant
Spree::Variant.unscoped { super }
end
private
def update_inventory
if changed?
Spree::OrderInventory.new(self.order).verify(self, target_shipment)
end
end
def update_adjustments
if quantity_changed?
recalculate_adjustments
end
end
def recalculate_adjustments
Spree::ItemAdjustments.new(self).update
end
def create_tax_charge
Spree::TaxRate.adjust(order, [self])
end
end
end
| 23.330579 | 77 | 0.678356 |
08f1ad8997ee5252c369453b141dab2075268241 | 137 | FactoryBot.define do
factory :farmware_env do
device
key { Faker::Pokemon.move }
value { Faker::Pokemon.move }
end
end
| 17.125 | 33 | 0.664234 |
f7394db0239de843e54193e61ec3d42d17269817 | 425 | require_dependency "cadmin/application_controller"
module Cadmin
class CartsController < ApplicationController
skip_before_action :authenticate_cadmin_user!, :only => [:index]
before_action :set_cart
def index
@total_cart_amount = @cart.total_cart_amount
@main_service = MainService.first
end
private
def set_cart
@cart = Cart.find(session[:cart_id])
end
end
end | 25 | 68 | 0.708235 |
ac3b28d59b1b94b6760aeeaab9febce40827c7d8 | 1,096 | require 'spec_helper'
require 'mailclerk'
describe Zenzai::MailClerk do
before do
@first_user = Factory.create_user
@second_user = Factory.create_user({:email => "[email protected]",
:gets_daily_dharma => true})
@first_jewel = Factory.create_jewel
@second_jewel = Factory.create_jewel({:source => "123"})
@today = Date.today
@first_delivery = Delivery.create!(:date => @today,
:jewel_id => @first_jewel.id)
@second_delivery = Delivery.create!(:date => @today + 1.day,
:jewel_id => @second_jewel.id)
@clerk = described_class.new
end
after(:each) do
ActionMailer::Base.deliveries.clear
end
it "gets the jewel for today" do
expect(@clerk.current_jewel).to eq(@first_jewel)
end
it "gets the right users" do
expect(@clerk.daily_dharma_recipients).to eq([@second_user])
end
it "sends out the email" do
@clerk.send_daily_dharma
expect(ActionMailer::Base.deliveries.last.bcc).to eq([@second_user.email])
end
end
| 28.842105 | 78 | 0.622263 |
ac76a2ac19e400b95317937c9a05e5cc2e774dcd | 2,938 | require 'rack-flash'
class MessagesController < ApplicationController
use Rack::Flash, :sweep => true
get '/messages' do
if logged_in?
@messages = Message.all
erb :'messages/index'
else
flash[:notice] = "You must be logged in to do that!"
redirect '/login'
end
end
get '/messages/new' do
if logged_in?
erb :'/messages/new'
else
flash[:notice] = "You must be logged in to do that!"
redirect '/login'
end
end
post '/messages' do
if params[:content] != ""
message = Message.create(:content => params[:content], :date_posted => Date.today)
current_user.messages << message
flash[:success] = "Message created."
redirect '/messages'
else
flash[:notice] = "The content field cannot be left blank."
redirect '/messages/new'
end
end
#edit message
get '/messages/:id' do
@message = Message.find_by_id(params[:id])
if logged_in?
if @message
erb :'/messages/show'
else
flash[:error] = "Something went wrong. Please try again."
redirect '/messages'
end
else
flash[:notice] = "You must be logged in to do that!"
redirect '/login'
end
end
get '/messages/:id/edit' do
@message = Message.find_by_id(params[:id])
if logged_in?
# if @message && @message.user == current_user
if @message && authorized_to_edit?(@message)
erb :'/messages/edit'
else
flash[:error] = "Sorry, you do not have permissions to edit that message."
redirect '/messages'
end
else
flash[:notice] = "You must be logged in to do that!"
redirect '/login'
end
end
patch '/messages/:id' do
@message = Message.find_by_id(params[:id])
if params["content"] == ""
flash[:notice] = "The content field cannot be left blank. To delete message, please press delete."
redirect "/messages/#{@message.id}/edit"
else
@message.update(:content => params["content"])
flash[:success] = "Message updated."
redirect "/messages/#{@message.id}"
end
end
#delete message
delete '/messages/:id' do
@message = Message.find_by_id(params[:id])
if logged_in?
if authorized_to_edit?(@message)
@message.delete
flash[:success] = "Message deleted."
redirect '/messages'
else
redirect '/account'
end
else
redirect '/login'
end
end
end | 28.524272 | 110 | 0.507488 |
79592ae7ae2c3273eff4648d26540c0707057c19 | 326 | cask 'red' do
version '0.6.1'
sha256 'afefaa392e5dbc1ec6d8805376ecffe86a1f6d1ce46d426800556f3c4f698693'
url "http://static.red-lang.org/dl/mac/red-#{version.no_dots}"
name 'Red Programming Language'
homepage 'http://www.red-lang.org/'
container type: :naked
binary "red-#{version.no_dots}", target: 'red'
end
| 25.076923 | 75 | 0.730061 |
d5272de5e58462e567758a695a7cbb26577a83e3 | 1,185 | control "VCRP-67-000006" do
title "rhttpproxy must have logging enabled."
desc "After a security incident has occurred, investigators will often
review log files to determine what happened. rhttpproxy must create logs upon
service start up in order to capture information relevant to investigations."
impact 0.5
tag severity: "CAT II"
tag gtitle: "SRG-APP-000093-WSR-000053"
tag gid: nil
tag rid: "VCRP-67-000006"
tag stig_id: "VCRP-67-000006"
tag cci: "CCI-001462"
tag nist: ["AU-14 (2)", "Rev_4"]
desc 'check', "At the command prompt, execute the following command:
# xmllint --xpath '/config/log/outputToFiles' /etc/vmware-rhttpproxy/config.xml
Expected result:
<outputToFiles>true</outputToFiles>
If the output does not match the expected result, this is a finding."
desc 'fix', "Navigate to and open /etc/vmware-rhttpproxy/config.xml
Locate the <log> block and configure <outputToFiles> as follows:
<outputToFiles>true</outputToFiles>
Restart the service for changes to take effect.
# vmon-cli --restart rhttpproxy"
describe xml('/etc/vmware-rhttpproxy/config.xml') do
its(['/config/log/outputToFiles']) { should cmp ['true'] }
end
end
| 30.384615 | 79 | 0.739241 |
7a6aa58a743ab88fd710410a172240283e3f4ba5 | 451 | Spec::Matchers.define :smart_match do |expected|
def regexp?
/^\/.*\/?$/
end
def quoted?
/^".*"$/
end
match do |actual|
case expected
when regexp?
actual =~ eval(expected)
when quoted?
actual.index(eval(expected))
else # multi-line string
actual.index(expected)
end
end
failure_message_for_should do |actual|
<<-MESSAGE
#{'*'*50}
got:
#{'*'*30}
#{actual}
#{'*'*50}
MESSAGE
end
end
| 14.09375 | 48 | 0.580931 |
1dbf5fd95ab79e95ea1e9b21e626c793ae46308e | 604 | module RailsAdmin
module Config
module Actions
class RestApi2 < RailsAdmin::Config::Actions::Base
register_instance_option :member do
true
end
register_instance_option :i18n_key do
:rest_api
end
register_instance_option :http_methods do
[:get]
end
register_instance_option :link_icon do
'icon-cog'
end
register_instance_option :controller do
proc do
render :template => 'rails_admin/main/rest_api'
end
end
end
end
end
end
| 18.875 | 59 | 0.581126 |
b9492d986d5bb64a75e030ef1118c57638bd8e71 | 2,298 | require 'spec_helper'
require 'puppet/module_tool/installed_modules'
require 'puppet_spec/modules'
describe Puppet::ModuleTool::InstalledModules do
include PuppetSpec::Files
around do |example|
dir = tmpdir("deep_path")
FileUtils.mkdir_p(@modpath = File.join(dir, "modpath"))
@env = Puppet::Node::Environment.create(:env, [@modpath])
Puppet.override(:current_environment => @env) do
example.run
end
end
it 'works when given a semantic version' do
mod = PuppetSpec::Modules.create('goodsemver', @modpath, :metadata => {:version => '1.2.3'})
installed = described_class.new(@env)
expect(installed.modules["puppetlabs-#{mod.name}"].version).to eq(Semantic::Version.parse('1.2.3'))
end
it 'defaults when not given a semantic version' do
mod = PuppetSpec::Modules.create('badsemver', @modpath, :metadata => {:version => 'banana'})
Puppet.expects(:warning).with(regexp_matches(/Semantic Version/))
installed = described_class.new(@env)
expect(installed.modules["puppetlabs-#{mod.name}"].version).to eq(Semantic::Version.parse('0.0.0'))
end
it 'defaults when not given a full semantic version' do
mod = PuppetSpec::Modules.create('badsemver', @modpath, :metadata => {:version => '1.2'})
Puppet.expects(:warning).with(regexp_matches(/Semantic Version/))
installed = described_class.new(@env)
expect(installed.modules["puppetlabs-#{mod.name}"].version).to eq(Semantic::Version.parse('0.0.0'))
end
it 'still works if there is an invalid version in one of the modules' do
mod1 = PuppetSpec::Modules.create('badsemver', @modpath, :metadata => {:version => 'banana'})
mod2 = PuppetSpec::Modules.create('goodsemver', @modpath, :metadata => {:version => '1.2.3'})
mod3 = PuppetSpec::Modules.create('notquitesemver', @modpath, :metadata => {:version => '1.2'})
Puppet.expects(:warning).with(regexp_matches(/Semantic Version/)).twice
installed = described_class.new(@env)
expect(installed.modules["puppetlabs-#{mod1.name}"].version).to eq(Semantic::Version.parse('0.0.0'))
expect(installed.modules["puppetlabs-#{mod2.name}"].version).to eq(Semantic::Version.parse('1.2.3'))
expect(installed.modules["puppetlabs-#{mod3.name}"].version).to eq(Semantic::Version.parse('0.0.0'))
end
end
| 45.96 | 104 | 0.697998 |
3860642bdd89afc6cf2a21865c1162d9e623a9ef | 571 | Pod::Spec.new do |s|
s.name = 'PKCCrop'
s.version = '0.2.4'
s.summary = 'Images crop'
s.description = 'There are many options that can be used to easily put images into crops.'
s.homepage = 'https://github.com/pikachu987/PKCCrop'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'pikachu987' => '[email protected]' }
s.source = { :git => 'https://github.com/pikachu987/PKCCrop.git', :tag => s.version.to_s }
s.ios.deployment_target = '8.0'
s.source_files = 'PKCCrop/Classes/*'
end
| 43.923077 | 100 | 0.590193 |
08fa1e575087148131da1c38aba09930a5cc3a45 | 1,114 | require 'spec_helper'
module BarkingDog
describe PoolProxy do
before do
class BaseConcurrentService
include BarkingDog::BasicService
self.concurrency = 2
on "cool", :handle_cool
class_attribute :cool_handler
self.cool_handler = Queue.new
def handle_cool(_, evt)
self.class.cool_handler << evt
end
end
BaseConcurrentService.cool_handler.length.should == 0
end
let(:event_publisher) { EventPublisher.new }
let(:event_receiver) { EventReceiver.new }
let(:event_future) { event_receiver.future.wait_for("cool") }
let(:pool_proxy) { PoolProxy.new(BaseConcurrentService, size: 2) }
after do
pool_proxy.terminate
event_publisher.terminate
end
before do
event_future #calling to setup the future
pool_proxy.subscribe_to_class_events
end
it "should proxy events to the pool" do
event_publisher.trigger("cool")
event_future.value(1)[0].should == "barking-dog/cool"
BaseConcurrentService.cool_handler.length.should == 1
end
end
end
| 21.843137 | 70 | 0.670557 |
ff282d897b2d8c5e1120448ceb11738cd74f247b | 556 | class CreateGoogleMerchantCategories < ActiveRecord::Migration
def self.up
create_table :google_merchant_categories do |t|
t.string :name
t.integer :parent_id
t.integer :left
t.integer :right
t.integer :google_category_id
t.string :google_path, limit: 300
t.timestamps
end
add_index :google_merchant_categories, :parent_id
add_index :google_merchant_categories, :left
add_index :google_merchant_categories, :right
end
def self.down
drop_table :google_merchant_categories
end
end
| 25.272727 | 62 | 0.726619 |
6236478d3485ead0d5fb2ce4d725d381f34604da | 2,073 | # frozen_string_literal: true
FactoryBot.define do
factory :internship_application do
student { create(:student) }
motivation { 'Suis hyper motivé' }
trait :drafted do
aasm_state { :drafted }
end
trait :submitted do
aasm_state { :submitted }
submitted_at { 3.days.ago }
end
trait :expired do
aasm_state { :expired }
submitted_at { 15.days.ago }
expired_at { 3.days.ago }
end
trait :approved do
aasm_state { :approved }
submitted_at { 3.days.ago }
approved_at { 2.days.ago }
end
trait :rejected do
aasm_state { :rejected }
submitted_at { 3.days.ago }
rejected_at { 2.days.ago }
end
trait :canceled_by_employer do
aasm_state { :canceled_by_employer }
submitted_at { 3.days.ago }
rejected_at { 2.days.ago }
canceled_at { 2.days.ago }
end
trait :canceled_by_student do
aasm_state { :canceled_by_student }
submitted_at { 3.days.ago }
canceled_at { 2.days.ago }
end
trait :convention_signed do
aasm_state { :convention_signed }
submitted_at { 3.days.ago }
approved_at { 2.days.ago }
convention_signed_at { 1.days.ago }
end
transient do
weekly_internship_offer_helper {create(:weekly_internship_offer)}
end
trait :weekly do
internship_offer { weekly_internship_offer_helper }
internship_offer_week { internship_offer.internship_offer_weeks.first}
end
trait :free_date do
internship_offer { create(:free_date_internship_offer) }
end
factory :weekly_internship_application, traits: [:weekly],
parent: :internship_application,
class: 'InternshipApplications::WeeklyFramed'
factory :free_date_internship_application, traits: [:free_date],
parent: :internship_application,
class: 'InternshipApplications::FreeDate'
end
end
| 28.39726 | 89 | 0.614568 |
6aedd082e9caf12e534ec8dba5b2b3c8d4006c9f | 482 | describe User do
before { @user = FactoryBot.build(:passenger) }
subject { @user }
it { should validate_presence_of(:email) }
it { should validate_presence_of(:full_name) }
it { should validate_presence_of(:phone_number) }
it { should validate_uniqueness_of(:email).case_insensitive }
it { should validate_uniqueness_of(:phone_number).case_insensitive }
it { should validate_confirmation_of(:password) }
it { should validate_length_of(:full_name).is_at_least(6) }
end | 32.133333 | 69 | 0.763485 |
1a2f8fecd602dc826de50674b270f77138a73fb3 | 1,879 | require 'test_helper'
class FollowingTest < ActionDispatch::IntegrationTest
def setup
@user = users(:Andrew)
@other_user= users(:Archer)
log_in_as(@user,password: "pepeb312", remember_me: "1")
end
test "following page" do
get following_user_path(@user)
assert_not @user.following.empty?
assert_match @user.following.count.to_s, response.body
@user.following.each do |user|
assert_select "a[href=?]", user_path(user)
end
end
test "followers page" do
get followers_user_path(@user)
assert_not @user.followers.empty?
assert_match @user.followers.count.to_s, response.body
@user.followers.each do |user|
assert_select "a[href=?]", user_path(user)
end
end
test "should follow a user the standard way" do
assert_difference "@user.following.count", 1 do
post relationships_path, params:{followed_id: @other_user.id}
end
end
test "should follow a user wioth Ajax" do
assert_difference "@user.following.count", 1 do
post relationships_path, xhr: true, params: {followed_id: @other_user.id}
end
end
test "should unfollow a user the standard way" do
@user.follow(@other_user)
relationship = @user.active_relationships.find_by(followed_id: @other_user.id)
assert_difference "@user.following.count", -1 do
delete relationship_path(relationship)
end
end
test "should unfollow a user with Ajax" do
@user.follow(@other_user)
relationship = @user.active_relationships.find_by(followed_id: @other_user.id)
assert_difference "@user.following.count", -1 do
delete relationship_path(relationship), xhr: true
end
end
test "feed on Home page" do
get root_path
@user.feed.paginate(page: 1).each do |micropost|
assert_match CGI.escapeHTML(micropost.content), response.body
end
end
end
| 29.825397 | 82 | 0.700373 |
91eb4094338cb06b804287cdb291b48cd50aff49 | 744 | class NodeBuild < Formula
desc "Install NodeJS versions"
homepage "https://github.com/nodenv/node-build"
url "https://github.com/nodenv/node-build/archive/v4.9.39.tar.gz"
sha256 "0f94c52fba5a59bd4b45242a80d5accbf870a847f25b3b1cd2f65eaee910ecf7"
license "MIT"
head "https://github.com/nodenv/node-build.git"
livecheck do
url :stable
regex(/^v?(\d+(?:\.\d+)+)$/i)
end
bottle do
sha256 cellar: :any_skip_relocation, all: "e1492923ab735ec71040dea6d6ca5fcbde21f08c616203b9b63f2d7b8c4270c1"
end
depends_on "autoconf"
depends_on "[email protected]"
depends_on "pkg-config"
def install
ENV["PREFIX"] = prefix
system "./install.sh"
end
test do
system "#{bin}/node-build", "--definitions"
end
end
| 24 | 112 | 0.711022 |
6abbd1efb0db4a96da007e119dc7a8f40ddc49ab | 4,522 | # December 2015 Michael G. Wiebe
#
# Copyright (c) 2015-2016 Cisco and/or its affiliates.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'cisco_node_utils' if Puppet.features.cisco_node_utils?
begin
require 'puppet_x/cisco/autogen'
rescue LoadError # seen on master, not on agent
# See longstanding Puppet issues #4248, #7316, #14073, #14149, etc. Ugh.
require File.expand_path(File.join(File.dirname(__FILE__), '..', '..', '..',
'puppet_x', 'cisco', 'autogen.rb'))
end
Puppet::Type.type(:cisco_vxlan_vtep).provide(:cisco) do
confine feature: :cisco_node_utils
defaultfor operatingsystem: :nexus
mk_resource_methods
# Property symbol array for method auto-generation.
# NOTE: For maintainability please keep this list in alphabetical order.
VXLAN_VTEP_NON_BOOL_PROPS = [
:description,
:host_reachability,
:source_interface,
:source_interface_hold_down_time,
:multisite_border_gateway_interface,
]
VXLAN_VTEP_BOOL_PROPS = [
:shutdown
]
VXLAN_VTEP_ALL_PROPS = VXLAN_VTEP_NON_BOOL_PROPS + VXLAN_VTEP_BOOL_PROPS
PuppetX::Cisco::AutoGen.mk_puppet_methods(:non_bool, self, '@vtep_interface',
VXLAN_VTEP_NON_BOOL_PROPS)
PuppetX::Cisco::AutoGen.mk_puppet_methods(:bool, self, '@vtep_interface',
VXLAN_VTEP_BOOL_PROPS)
def initialize(value={})
super(value)
@vtep_interface = Cisco::VxlanVtep.vteps[@property_hash[:name]]
@property_flush = {}
end
def self.properties_get(interface_name, intf)
debug "Checking vtep instance, #{interface_name}."
current_state = {
interface: interface_name,
name: interface_name,
ensure: :present,
}
# Call node_utils getter for each property
VXLAN_VTEP_NON_BOOL_PROPS.each do |prop|
current_state[prop] = intf.send(prop)
end
VXLAN_VTEP_BOOL_PROPS.each do |prop|
val = intf.send(prop)
if val.nil?
current_state[prop] = nil
else
current_state[prop] = val ? :true : :false
end
end
new(current_state)
end # self.properties_get
def self.instances
interfaces = []
Cisco::VxlanVtep.vteps.each do |interface_name, intf|
begin
interfaces << properties_get(interface_name, intf)
end
end
interfaces
end # self.instances
def self.prefetch(resources)
interfaces = instances
resources.keys.each do |name|
provider = interfaces.find { |intf| intf.instance_name == name }
resources[name].provider = provider unless provider.nil?
end
end # self.prefetch
def exists?
(@property_hash[:ensure] == :present)
end
def create
@property_flush[:ensure] = :present
end
def destroy
@property_flush[:ensure] = :absent
end
def instance_name
interface
end
def properties_set(new_interface=false)
VXLAN_VTEP_ALL_PROPS.each do |prop|
next unless @resource[prop]
send("#{prop}=", @resource[prop]) if new_interface
unless @property_flush[prop].nil?
@vtep_interface.send("#{prop}=", @property_flush[prop]) if
@vtep_interface.respond_to?("#{prop}=")
end
end
end
def flush
if @property_flush[:ensure] == :absent
@vtep_interface.destroy
@vtep_interface = nil
else
# Create/Update
if @vtep_interface.nil?
new_vtep_interface = true
@vtep_interface = Cisco::VxlanVtep.new(@resource[:interface])
end
properties_set(new_vtep_interface)
end
puts_config
end
def puts_config
if @vtep_interface.nil?
info "Vxlan Vtep Interface=#{@resource[:interface]} is absent."
return
end
# Dump all current properties for this interface
current = sprintf("\n%30s: %s", 'interface', @vtep_interface.name)
VXLAN_VTEP_ALL_PROPS.each do |prop|
current.concat(sprintf("\n%30s: %s", prop, @vtep_interface.send(prop)))
end
debug current
end # puts_config
end
| 29.174194 | 79 | 0.676249 |
bb37fec7c49c7a80970bef0914d99e412b82b74a | 1,792 | module RbSDL2
class Palette
require_relative 'ref_count_pointer'
class PalettePointer < RefCountPointer
class << self
def release(ptr) = ::SDL.FreePalette(ptr)
def entity_class = ::SDL::Palette
end
end
class << self
def [](*color)
plt = new(color.length)
color.each.with_index { |c, nth| plt[nth] = c }
plt
end
def new(num_colors)
# SDL_AllocPalette() のエラーは引数が < 1、または必要なメモリー領域がない場合。
# パレットのカラーは作成時に全て [255, 255, 255, 255] で埋められる。
ptr = PalettePointer.new(::SDL.AllocPalette(num_colors))
raise RbSDL2Error if ptr.null?
super(ptr)
end
def to_ptr(ptr)
obj = allocate
obj.__send__(:initialize, PalettePointer.to_ptr(ptr))
obj
end
end
def initialize(ptr)
@st = ::SDL::Palette.new(ptr)
end
def ==(other)
other.respond_to?(:to_ptr) && other.to_ptr == to_ptr
end
def [](nth)
raise ArgumentError if nth < 0 || length <= nth
::SDL::Color.new(@st[:colors] + ::SDL::Color.size * nth).values
end
# color 引数には 3要素以上の配列であること。4要素目以降は無視される。
# color 引数は内部で splat する。これに対応していれば配列以外のオブジェクトでもよい。
# パレットのカラーが描画に使われるときはアルファ値は無視されて不透明(ALPHA_OPAQUE)として扱う。
def []=(nth, color)
raise ArgumentError if nth < 0 || length <= nth
c = ::SDL::Color.new
c[:r], c[:g], c[:b] = color
err = ::SDL.SetPaletteColors(self, c, nth, 1)
raise RbSDL2Error if err < 0
end
def each = length.times { |nth| yield(self[nth]) }
def inspect
"#<#{self.class.name} colors=#{length}>"
end
def length = @st[:ncolors]
alias size length
def to_a = to_enum.to_a
def to_ptr = @st.to_ptr
def version = @st[:version]
end
end
| 23.893333 | 69 | 0.592076 |
18883fb8d6922666ea28cab368b6b067c9d815ed | 416 | # frozen_string_literal: true
class Api::V0::Madmp::MadmpSchemasController < Api::V0::BaseController
before_action :authenticate
def show
@schema = MadmpSchema.find(params[:id])
# check if the user has permissions to use the templates API
unless Api::V0::Madmp::MadmpSchemaPolicy.new(@user, @fragment).show?
raise Pundit::NotAuthorizedError
end
respond_with @schema.schema
end
end
| 26 | 72 | 0.730769 |
f8a390380880fd3a7c31d6d4a3de464769c3b887 | 56 | describe command('dotnet') do
it { should exist }
end
| 14 | 29 | 0.696429 |
f841d47eac153d229e20bbf0011eea7afd8f7fd5 | 587 | cask "alacritty" do
version "0.7.0"
sha256 "b4337889193332c33d00d5f1e8c744ee9d17cd178f84eae7df86067eac7ddf9a"
url "https://github.com/alacritty/alacritty/releases/download/v#{version}/Alacritty-v#{version}.dmg"
appcast "https://github.com/alacritty/alacritty/releases.atom"
name "Alacritty"
desc "Cross-platform, GPU-accelerated terminal emulator"
homepage "https://github.com/alacritty/alacritty/"
app "Alacritty.app"
binary "#{appdir}/Alacritty.app/Contents/MacOS/alacritty"
zap delete: [
"~/Library/Saved Application State/io.alacritty.savedState",
]
end
| 32.611111 | 102 | 0.761499 |
01e13586e51ed25bff5cec10b9ccf8c4a00378c4 | 343 | require "spec_helper"
RSpec.describe Cronis::Lecture3::ReverseArray do
describe '#sum' do
context 'when random array is passed' do
before do
@service = Cronis::Lecture3::MaxValue.new
end
it 'should return reverted array' do
expect(@service.call([1, 4, 22, -1])).to eq 22
end
end
end
end
| 21.4375 | 54 | 0.629738 |
91f1ba81e6a23958a3f6fe9610a6752f9a2d185b | 290 | class RemoveColumnsFromPeople < ActiveRecord::Migration
def change
remove_column :people, :header_pic
remove_column :people, :profile_pic
remove_column :people, :skill_one_icon
remove_column :people, :skill_two_icon
remove_column :people, :skill_three_icon
end
end
| 26.363636 | 55 | 0.772414 |
9110a93827b2e59dea458cd3c71df1cfe99437a2 | 50,423 | require 'enumerator'
require 'miq-hash_struct'
class MiqRequestWorkflow
include Vmdb::Logging
include_concern "DialogFieldValidation"
# We rely on MiqRequestWorkflow's descendants to be comprehensive
singleton_class.send(:prepend, DescendantLoader::ArDescendantsWithLoader)
attr_accessor :dialogs, :requester, :values, :last_vm_id
def self.automate_dialog_request
nil
end
def self.default_dialog_file
nil
end
def self.default_pre_dialog_file
nil
end
def self.encrypted_options_fields
[]
end
def self.encrypted_options_field_regs
encrypted_options_fields.map { |f| /\[:#{f}\]/ }
end
def self.all_encrypted_options_fields
descendants.flat_map(&:encrypted_options_fields).uniq
end
def self.update_requester_from_parameters(data, user)
return user if data[:user_name].blank?
new_user = User.lookup_by_identity(data[:user_name])
unless new_user
_log.error "requested not changed to <#{data[:user_name]}> due to a lookup failure"
raise ActiveRecord::RecordNotFound
end
_log.warn "requested changed to <#{new_user.userid}>"
new_user
end
def initialize(values, requester, options = {})
instance_var_init(values, requester, options)
unless options[:skip_dialog_load] == true
# If this is the first time we are called the values hash will be empty
# Also skip if we are being called from a web-service
if @dialogs.nil?
@dialogs = get_dialogs
normalize_numeric_fields
else
@running_pre_dialog = true if options[:use_pre_dialog] != false
end
end
unless options[:skip_dialog_load] == true
set_default_values
update_field_visibility
end
end
def instance_var_init(values, requester, options)
@values = values
@filters = {}
@requester = requester.kind_of?(User) ? requester : User.lookup_by_identity(requester)
group_description = values[:requester_group]
if group_description && group_description != @requester.miq_group_description
@requester = @requester.clone
@requester.current_group_by_description = group_description
end
@values.merge!(options) unless options.blank?
end
# Helper method when not using workflow
def make_request(request, values, requester = nil, auto_approve = false)
return false unless validate(values)
password_helper(values, true)
# Ensure that tags selected in the pre-dialog get applied to the request
values[:vm_tags] = (values[:vm_tags].to_miq_a + @values[:pre_dialog_vm_tags]).uniq if @values.try(:[], :pre_dialog_vm_tags).present?
if request
MiqRequest.update_request(request, values, @requester)
else
set_request_values(values)
req = request_class.new(:options => values, :requester => @requester, :request_type => request_type.to_s)
return req unless req.valid? # TODO: CatalogController#atomic_req_submit is the only one that enumerates over the errors
values[:__request_type__] = request_type.to_s.presence # Pass this along to MiqRequest#create_request
request_class.create_request(values, @requester, auto_approve)
end
end
def init_from_dialog(init_values)
@dialogs[:dialogs].keys.each do |dialog_name|
get_all_fields(dialog_name).each_pair do |field_name, field_values|
next unless init_values[field_name].nil?
next if field_values[:display] == :ignore
if !field_values[:default].nil?
val = field_values[:default]
end
if field_values[:values]
if field_values[:values].kind_of?(Hash)
# Save [value, description], skip for timezones array
init_values[field_name] = [val, field_values[:values][val]]
else
field_values[:values].each do |tz|
if tz[1].to_i_with_method == val.to_i_with_method
# Save [value, description] for timezones array
init_values[field_name] = [val, tz[0]]
end
end
end
else
# Set to default value
init_values[field_name] = val
end
end
end
end
def validate(values)
# => Input - A hash keyed by field name with entered values
# => Output - true || false
#
# Update @dialogs adding error keys to fields that don't validate
valid = true
get_all_dialogs(false).each do |d, dlg|
# Check if the entire dialog is ignored or disabled and check while processing the fields
dialog_disabled = !dialog_active?(d, dlg, values)
get_all_fields(d, false).each do |f, fld|
fld[:error] = nil
# Check the disabled flag here so we reset the "error" value on each field
next if dialog_disabled || fld[:display] == :hide
value = get_value(values[f])
if fld[:required] == true
# If :required_method is defined let it determine if the field is value
unless fld[:required_method].nil?
Array.wrap(fld[:required_method]).each do |method|
fld[:error] = send(method, f, values, dlg, fld, value)
# Bail out early if we see an error
break unless fld[:error].nil?
end
unless fld[:error].nil?
valid = false
next
end
else
default_require_method = "default_require_#{f}".to_sym
if self.respond_to?(default_require_method)
fld[:error] = send(default_require_method, f, values, dlg, fld, value)
unless fld[:error].nil?
valid = false
next
end
else
if value.blank?
fld[:error] = "#{required_description(dlg, fld)} is required"
valid = false
next
end
end
end
end
if fld[:validation_method] && respond_to?(fld[:validation_method])
if (fld[:error] = send(fld[:validation_method], f, values, dlg, fld, value))
valid = false
next
end
end
next if value.blank?
msg = "'#{fld[:description]}' in dialog #{dlg[:description]} must be of type #{fld[:data_type]}"
case fld[:data_type]
when :integer
unless is_integer?(value)
fld[:error] = msg; valid = false
end
when :float
unless is_numeric?(value)
fld[:error] = msg; valid = false
end
when :boolean
# TODO: do we need validation for boolean
when :button
# Ignore
else
data_type = Object.const_get(fld[:data_type].to_s.camelize)
unless value.kind_of?(data_type)
fld[:error] = msg; valid = false
end
end
end
end
valid
end
def get_dialog_order
@dialogs[:dialog_order]
end
def get_buttons
@dialogs[:buttons] || [:submit, :cancel]
end
def provisioning_tab_list
dialog_names = @dialogs[:dialog_order].collect(&:to_s)
dialog_descriptions = dialog_names.collect do |dialog_name|
@dialogs.fetch_path(:dialogs, dialog_name.to_sym, :description)
end
dialog_display = dialog_names.collect do |dialog_name|
@dialogs.fetch_path(:dialogs, dialog_name.to_sym, :display)
end
tab_list = []
dialog_names.each_with_index do |dialog_name, index|
next if dialog_display[index] == :hide || dialog_display[index] == :ignore
tab_list << {
:name => dialog_name,
:description => dialog_descriptions[index]
}
end
tab_list
end
def get_all_dialogs(refresh_values = true)
@dialogs[:dialogs].each_key { |d| get_dialog(d, refresh_values) }
@dialogs[:dialogs]
end
def get_dialog(dialog_name, refresh_values = true)
dialog = @dialogs.fetch_path(:dialogs, dialog_name.to_sym)
return {} unless dialog
get_all_fields(dialog_name, refresh_values)
dialog
end
def get_all_fields(dialog_name, refresh_values = true)
dialog = @dialogs.fetch_path(:dialogs, dialog_name.to_sym)
return {} unless dialog
dialog[:fields].each_key { |f| get_field(f, dialog_name, refresh_values) }
dialog[:fields]
end
def get_field(field_name, dialog_name = nil, refresh_values = true)
field_name = field_name.to_sym
dialog_name = find_dialog_from_field_name(field_name) if dialog_name.nil?
field = @dialogs.fetch_path(:dialogs, dialog_name.to_sym, :fields, field_name)
return {} unless field
if field.key?(:values_from) && refresh_values
options = field[:values_from][:options] || {}
options[:prov_field_name] = field_name
field[:values] = send(field[:values_from][:method], options)
# Reset then currently selected item if it no longer appears in the available values
if field[:values].kind_of?(Hash)
if field[:values].length == 1
unless field[:auto_select_single] == false
@values[field_name] = field[:values].to_a.first
end
else
currently_selected = get_value(@values[field_name])
unless currently_selected.nil? || field[:values].key?(currently_selected)
@values[field_name] = [nil, nil]
end
end
end
end
field
end
# TODO: Return list in defined ordered
def dialogs
@dialogs[:dialogs].each_pair { |n, d| yield(n, d) }
end
def fields(dialog = nil)
dialog = [*dialog] unless dialog.nil?
@dialogs[:dialogs].each_pair do |dn, d|
next unless dialog.blank? || dialog.include?(dn)
d[:fields].each_pair do |fn, f|
yield(fn, f, dn, d)
end
end
end
def normalize_numeric_fields
fields do |_fn, f, _dn, _d|
if f[:data_type] == :integer
f[:default] = f[:default].to_i_with_method unless f[:default].blank?
unless f[:values].blank?
keys = f[:values].keys.dup
keys.each { |k| f[:values][k.to_i_with_method] = f[:values].delete(k) }
end
end
end
end
# Helper method to write message to the rails log (production.log) for debugging
def rails_logger(_name, _start)
# Rails.logger.warn("#{name} #{start.zero? ? 'start' : 'end'}")
end
def parse_ws_string(text_input, options = {})
self.class.parse_ws_string(text_input, options)
end
def self.parse_ws_string(text_input, options = {})
return parse_request_parameter_hash(text_input, options) if text_input.kind_of?(Hash)
return {} unless text_input.kind_of?(String)
deprecated_warn = "method: parse_ws_string, arg Type => String"
solution = "arg should be a hash"
MiqAeMethodService::Deprecation.deprecation_warning(deprecated_warn, solution)
result = {}
text_input.split('|').each do |value|
next if value.blank?
idx = value.index('=')
next if idx.nil?
key = options[:modify_key_name] == false ? value[0, idx].strip : value[0, idx].strip.downcase.to_sym
result[key] = value[idx + 1..-1].strip
end
result
end
def self.parse_request_parameter_hash(parameter_hash, options = {})
parameter_hash.each_with_object({}) do |param, hash|
key, value = param
key = key.strip.downcase.to_sym unless options[:modify_key_name] == false
hash[key] = value
end
end
def ws_tags(tag_string, parser = :parse_ws_string)
# Tags are passed as category|value. Example: cc|001|environment|test
ws_tags = send(parser, tag_string)
tags = allowed_tags.each_with_object({}) do |v, tags|
tags[v[:name]] = v[:children].each_with_object({}) { |(k, v), tc| tc[v[:name]] = k }
end
ws_tags.collect { |cat, tag| tags.fetch_path(cat.to_s.downcase, tag.downcase) }.compact
end
# @param parser [:parse_ws_string|:parse_ws_string_v1]
# @param additional_values [String] values of the form cc=001|environment=test
def ws_values(additional_values, parser = :parse_ws_string, parser_options = {})
parsed_values = send(parser, additional_values, parser_options)
parsed_values.each_with_object({}) { |(k, v), ws_values| ws_values[k.to_sym] = v }
end
def parse_ws_string_v1(values, _options = {})
na = []
values.to_s.split("|").each_slice(2) do |k, v|
next if v.nil?
na << [k.strip, v.strip]
end
na
end
def find_dialog_from_field_name(field_name)
@dialogs[:dialogs].each_key do |dialog_name|
return dialog_name if @dialogs[:dialogs][dialog_name][:fields].key?(field_name.to_sym)
end
nil
end
def get_value(data)
data.kind_of?(Array) ? data.first : data
end
def set_or_default_field_values(values)
field_names = values.keys
fields do |fn, f, _dn, _d|
if field_names.include?(fn)
if f.key?(:values)
selected_key = nil
if f[:values].key?(values[fn])
selected_key = values[fn]
elsif f.key?(:default) && f[:values].key?(f[:default])
selected_key = f[:default]
else
unless f[:values].blank?
sorted_values = f[:values].sort
selected_key = sorted_values.first.first
end
end
@values[fn] = [selected_key, f[:values][selected_key]] unless selected_key.nil?
else
@values[fn] = values[fn]
end
end
end
end
def clear_field_values(field_names)
fields do |fn, f, _dn, _d|
if field_names.include?(fn)
@values[fn] = f.key?(:values) ? [nil, nil] : nil
end
end
end
def set_value_from_list(fn, f, value, values = nil, partial_key = false)
@values[fn] = [nil, nil]
values = f[:values] if values.nil?
unless value.nil?
@values[fn] = values.to_a.detect do |v|
if partial_key
_log.warn "comparing [#{v[0]}] to [#{value}]"
v[0].to_s.downcase.include?(value.to_s.downcase)
else
v.include?(value)
end
end
if @values[fn].nil?
_log.info "set_value_from_list did not matched an item" if partial_key
@values[fn] = [nil, nil]
else
_log.info "set_value_from_list matched item value:[#{value}] to item:[#{@values[fn][0]}]" if partial_key
end
end
end
def show_dialog(dialog_name, show_flag, enabled_flag = nil)
dialog = @dialogs.fetch_path(:dialogs, dialog_name.to_sym)
unless dialog.nil?
dialog[:display_init] = dialog[:display] if dialog[:display_init].nil?
# If the initial dialog is not set to show then do not modify it here.
return if dialog[:display_init] != :show
dialog[:display] = show_flag
@values["#{dialog_name}_enabled".to_sym] = [enabled_flag] unless enabled_flag.nil?
end
end
def required_description(dlg, fld)
"'#{dlg[:description]}/#{fld[:required_description] || fld[:description]}'"
end
def allowed_filters(options = {})
model_name = options[:category]
return @filters[model_name] unless @filters[model_name].nil?
rails_logger("allowed_filters - #{model_name}", 0)
@filters[model_name] = @requester.get_expressions(model_name).invert
rails_logger("allowed_filters - #{model_name}", 1)
@filters[model_name]
end
def dialog_active?(name, config, values)
return false if config[:display] == :ignore
enabled_field = "#{name}_enabled".to_sym
# Check if the fields hash contains a <dialog_name>_enabled field
enabled = get_value(values[enabled_field])
return false if enabled == false || enabled == "disabled"
true
end
def show_fields(display_flag, field_names, display_field = :display)
fields do |fn, f, _dn, _d|
if field_names.include?(fn)
flag = f[:display_override].blank? ? display_flag : f[:display_override]
f[display_field] = flag
end
end
end
def retrieve_ldap(_options = {})
email = get_value(@values[:owner_email])
unless email.blank?
l = MiqLdap.new
if l.bind_with_default == true
raise _("No information returned for %{email}") % {:email => email} if (d = l.get_user_info(email)).nil?
[:first_name, :last_name, :address, :city, :state, :zip, :country, :title, :company,
:department, :office, :phone, :phone_mobile, :manager, :manager_mail, :manager_phone].each do |prop|
@values["owner_#{prop}".to_sym] = d[prop].nil? ? nil : d[prop].dup
end
@values[:sysprep_organization] = d[:company].nil? ? nil : d[:company].dup
end
end
end
def default_schedule_time(options = {})
# TODO: Added support for "default_from", like values_from, that gets called once after dialog creation
# Update VM description
fields do |fn, f, _dn, _d|
if fn == :schedule_time
f[:default] = Time.now + options[:offset].to_i_with_method if f[:default].nil?
break
end
end
end
def values_less_then(options)
results = options[:values].transform_keys(&:to_i_with_method)
field, include_equals = options[:field], options[:include_equals]
max_value = field.nil? ? options[:value].to_i_with_method : get_value(@values[field]).to_i_with_method
return results if max_value <= 0
results.reject { |k, _v| include_equals == true ? max_value < k : max_value <= k }
end
def tags
vm_tags = @values[:vm_tags]
vm_tags.each do |tag_id|
tag = Classification.find(tag_id)
yield(tag.name, tag.parent.name) unless tag.nil? # yield the tag's name and category
end if vm_tags.kind_of?(Array)
end
def get_tags
tag_string = ''
tags do |tag, cat|
tag_string << ':' unless tag_string.empty?
tag_string << "#{cat}/#{tag}"
end
tag_string
end
def allowed_tags(options = {})
return @tags unless @tags.nil?
region_number = options.delete(:region_number)
# TODO: Call allowed_tags properly from controller - it is currently hard-coded with no options passed
field_options = @dialogs.fetch_path(:dialogs, :purpose, :fields, :vm_tags, :options)
options = field_options unless field_options.nil?
rails_logger('allowed_tags', 0)
st = Time.now
@tags = {}
exclude_list = options[:exclude].blank? ? [] : options[:exclude].collect(&:to_s)
include_list = options[:include].blank? ? [] : options[:include].collect(&:to_s)
single_select = options[:single_select].blank? ? [] : options[:single_select].collect(&:to_s)
cats = Classification.visible.writeable.managed
cats = cats.in_region(region_number) if region_number
cats.each do |t|
next if exclude_list.include?(t.name)
next unless include_list.blank? || include_list.include?(t.name)
# Force passed tags to be single select
single_value = single_select.include?(t.name) ? true : t.single_value?
@tags[t.id] = {:name => t.name, :description => t.description, :single_value => single_value, :children => {}, :id => t.id}
end
ents = Classification.visible.writeable.parent_ids(@tags.keys).with_tag_name
ents = ents.in_region(region_number) if region_number
ents.each do |t|
full_tag_name = "#{@tags[t.parent_id][:name]}/#{t.name}"
next if exclude_list.include?(full_tag_name)
@tags[t.parent_id][:children][t.id] = {:name => t.name, :description => t.description}
end
@tags.delete_if { |_k, v| v[:children].empty? }
# Now sort the tags based on the order passed options. All remaining tags not defined in the order
# will be sorted by description and appended to the other sorted tags
tag_results, tags_to_sort = [], []
sort_order = options[:order].blank? ? [] : options[:order].collect(&:to_s)
@tags.each do |_k, v|
(idx = sort_order.index(v[:name])).nil? ? tags_to_sort << v : tag_results[idx] = v
end
tags_to_sort = tags_to_sort.sort_by { |a| a[:description] }
@tags = tag_results.compact + tags_to_sort
@tags.each do |tag|
tag[:children] = if tag[:children].first.last[:name] =~ /^\d/
tag[:children].sort_by { |_k, v| v[:name].to_i }
else
tag[:children].sort_by { |_k, v| v[:description] }
end
end
rails_logger('allowed_tags', 1)
_log.info "allowed_tags returned [#{@tags.length}] objects in [#{Time.now - st}] seconds"
@tags
end
def allowed_tags_and_pre_tags
pre_tags = @values[:pre_dialog_vm_tags].to_miq_a
return allowed_tags if pre_tags.blank?
tag_cats = allowed_tags.dup
tag_cat_names = tag_cats.collect { |cat| cat[:name] }
Classification.where(:id => pre_tags).each do |tag|
parent = tag.parent
next if tag_cat_names.include?(parent.name)
new_cat = {:name => parent.name, :description => parent.description, :single_value => parent.single_value?, :children => {}, :id => parent.id}
parent.children.each { |c| new_cat[:children][c.id] = {:name => c.name, :description => c.description} }
tag_cats << new_cat
tag_cat_names << new_cat[:name]
end
tag_cats
end
def tag_symbol
:tag_ids
end
def build_ci_hash_struct(ci, props)
nh = MiqHashStruct.new(:id => ci.id, :evm_object_class => ci.class.base_class.name.to_sym)
props.each { |p| nh.send("#{p}=", ci.send(p)) }
nh
end
def get_dialogs
@values[:miq_request_dialog_name] ||= @values[:provision_dialog_name] || dialog_name_from_automate || self.class.default_dialog_file
dp = @values[:miq_request_dialog_name] = File.basename(@values[:miq_request_dialog_name], ".rb")
_log.info "Loading dialogs <#{dp}> for user <#{@requester.userid}>"
d = MiqDialog.find_by("lower(name) = ? and dialog_type = ?", dp.downcase, self.class.base_model.name)
if d.nil?
raise MiqException::Error,
"Dialog cannot be found. Name:[%{name}] Type:[%{type}]" % {:name => @values[:miq_request_dialog_name],
:type => self.class.base_model.name}
end
d.content
end
def get_pre_dialogs
pre_dialogs = nil
pre_dialog_name = dialog_name_from_automate('get_pre_dialog_name')
unless pre_dialog_name.blank?
pre_dialog_name = File.basename(pre_dialog_name, ".rb")
d = MiqDialog.find_by(:name => pre_dialog_name, :dialog_type => self.class.base_model.name)
unless d.nil?
_log.info "Loading pre-dialogs <#{pre_dialog_name}> for user <#{@requester.userid}>"
pre_dialogs = d.content
end
end
pre_dialogs
end
def dialog_name_from_automate(message = 'get_dialog_name', input_fields = [:request_type], extra_attrs = {})
return nil if self.class.automate_dialog_request.nil?
_log.info "Querying Automate Profile for dialog name"
attrs = {'request' => self.class.automate_dialog_request, 'message' => message}
extra_attrs.each { |k, v| attrs[k] = v }
@values.each_key do |k|
key = "dialog_input_#{k.to_s.downcase}"
if attrs.key?(key)
_log.info "Skipping key=<#{key}> because already set to <#{attrs[key]}>"
else
value = (k == :vm_tags) ? get_tags : get_value(@values[k]).to_s
_log.info "Setting attrs[#{key}]=<#{value}>"
attrs[key] = value
end
end
input_fields.each { |k| attrs["dialog_input_#{k.to_s.downcase}"] = send(k).to_s }
ws = MiqAeEngine.resolve_automation_object("REQUEST", @requester, attrs, :vmdb_object => @requester)
if ws && ws.root
dialog_option_prefix = 'dialog_option_'
dialog_option_prefix_length = dialog_option_prefix.length
ws.root.attributes.each do |key, value|
next unless key.downcase.starts_with?(dialog_option_prefix)
next unless key.length > dialog_option_prefix_length
key = key[dialog_option_prefix_length..-1].downcase
_log.info "Setting @values[#{key}]=<#{value}>"
@values[key.to_sym] = value
end
name = ws.root("dialog_name")
return name.presence
end
nil
end
def self.request_type(type)
type.presence.try(:to_sym) || request_class.request_types.first
end
def request_type
self.class.request_type(get_value(@values[:request_type]))
end
def request_class
req_class = self.class.request_class
return req_class unless get_value(@values[:service_template_request]) == true
(req_class.name + "Template").constantize
end
def self.request_class
@workflow_class ||= name.underscore.gsub(/_workflow$/, "_request").camelize.constantize
end
def set_default_values
set_default_user_info rescue nil
end
def set_default_user_info
return if get_dialog(:requester).blank?
if get_value(@values[:owner_email]).blank? && @requester.email.present?
@values[:owner_email] = @requester.email
retrieve_ldap if MiqLdap.using_ldap?
end
show_flag = MiqLdap.using_ldap? ? :show : :hide
show_fields(show_flag, [:owner_load_ldap])
end
def set_request_values(values)
values[:requester_group] ||= @requester.current_group.description
email = values[:owner_email]
if email.present? && values[:owner_group].blank?
values[:owner_group] = User.find_by_lower_email(email, @requester).try(:miq_group_description)
end
end
def password_helper(values = @values, encrypt = true)
self.class.encrypted_options_fields.each do |pwd_key|
next if values[pwd_key].blank?
if encrypt
values[pwd_key].replace(MiqPassword.try_encrypt(values[pwd_key]))
else
values[pwd_key].replace(MiqPassword.try_decrypt(values[pwd_key]))
end
end
end
def update_field_visibility
end
def refresh_field_values(values)
st = Time.now
@values = values
get_source_and_targets(true)
# @values gets modified during this call
get_all_dialogs
values.merge!(@values)
# Update the display flag for fields based on current settings
update_field_visibility
_log.info "refresh completed in [#{Time.now - st}] seconds"
rescue => err
_log.error "[#{err}]"
$log.error err.backtrace.join("\n")
raise err
end
# Run the relationship methods and perform set intersections on the returned values.
# Optional starting set of results maybe passed in.
def allowed_ci(ci, relats, sources, filtered_ids = nil)
result = nil
relats.each do |rsc_type|
rails_logger("allowed_ci - #{rsc_type}_to_#{ci}", 0)
rc = send("#{rsc_type}_to_#{ci}", sources)
rails_logger("allowed_ci - #{rsc_type}_to_#{ci}", 1)
unless rc.nil?
rc = rc.to_a
result = result.nil? ? rc : result & rc
end
end
result = [] if result.nil?
result.reject! { |k, _v| !filtered_ids.include?(k) } unless filtered_ids.nil?
result.each_with_object({}) { |s, hash| hash[s[0]] = s[1] }
end
def process_filter(filter_prop, ci_klass, targets)
rails_logger("process_filter - [#{ci_klass}]", 0)
filter_id = get_value(@values[filter_prop]).to_i
MiqSearch.filtered(filter_id, ci_klass, targets,
:user => @requester,
:miq_group => @requester.current_group,
).tap { rails_logger("process_filter - [#{ci_klass}]", 1) }
end
def find_all_ems_of_type(klass, src = nil)
result = []
each_ems_metadata(src, klass) { |ci| result << ci }
result
end
def find_hosts_under_ci(item)
find_classes_under_ci(item, Host)
end
def find_respools_under_ci(item)
find_classes_under_ci(item, ResourcePool)
end
def find_classes_under_ci(item, klass)
results = []
return results if item.nil?
node = load_ems_node(item, _log.prefix)
each_ems_metadata(node.attributes[:object], klass) { |ci| results << ci } unless node.nil?
results
end
def load_ems_node(item, log_header)
@ems_xml_nodes ||= {}
klass_name = item.kind_of?(MiqHashStruct) ? item.evm_object_class : item.class.base_class.name
node = @ems_xml_nodes["#{klass_name}_#{item.id}"]
$log.error "#{log_header} Resource <#{klass_name}_#{item.id} - #{item.name}> not found in cached resource tree." if node.nil?
node
end
def ems_has_clusters?
found = each_ems_metadata(nil, EmsCluster) { |ci| break(ci) }
return found.evm_object_class == :EmsCluster if found.kind_of?(MiqHashStruct)
false
end
def get_ems_folders(folder, dh = {}, full_path = "")
if folder.evm_object_class == :EmsFolder
if folder.hidden
return dh if folder.name != 'vm'
else
full_path += full_path.blank? ? folder.name.to_s : " / #{folder.name}"
dh[folder.id] = full_path unless folder.type == "Datacenter"
end
end
# Process child folders
node = load_ems_node(folder, _log.prefix)
node.children.each { |child| get_ems_folders(child.attributes[:object], dh, full_path) } unless node.nil?
dh
end
def get_ems_respool(node, dh = {}, full_path = "")
return if node.nil?
if node.kind_of?(XmlHash::Element)
folder = node.attributes[:object]
if node.name == :ResourcePool
full_path += full_path.blank? ? folder.name.to_s : " / #{folder.name}"
dh[folder.id] = full_path
end
end
# Process child folders
node.children.each { |child| get_ems_respool(child, dh, full_path) }
dh
end
def find_datacenter_for_ci(item, ems_src = nil)
find_class_above_ci(item, EmsFolder, ems_src, true)
end
def find_hosts_for_respool(item, ems_src = nil)
hosts = find_class_above_ci(item, Host, ems_src)
return [hosts] unless hosts.blank?
cluster = find_cluster_above_ci(item)
find_hosts_under_ci(cluster)
end
def find_cluster_above_ci(item, ems_src = nil)
find_class_above_ci(item, EmsCluster, ems_src)
end
def find_class_above_ci(item, klass, _ems_src = nil, datacenter = false)
result = nil
node = load_ems_node(item, _log.prefix)
klass_name = klass.name.to_sym
# Walk the xml document parents to find the requested class
while node.kind_of?(XmlHash::Element)
ci = node.attributes[:object]
if node.name == klass_name && (datacenter == false || datacenter == true && ci.type == "Datacenter")
result = ci
break
end
node = node.parent
end
result
end
def each_ems_metadata(ems_ci = nil, klass = nil, &_blk)
if ems_ci.nil?
src = get_source_and_targets
ems_xml = get_ems_metadata_tree(src)
ems_node = ems_xml.try(:root)
else
ems_node = load_ems_node(ems_ci, _log.prefix)
end
klass_name = klass.name.to_sym unless klass.nil?
unless ems_node.nil?
ems_node.each_recursive { |node| yield(node.attributes[:object]) if klass.nil? || klass_name == node.name }
end
end
def get_ems_metadata_tree(src)
@ems_metadata_tree ||= begin
return if src[:ems].nil?
st = Time.zone.now
result = load_ar_obj(src[:ems]).fulltree_arranged(:except_type => "VmOrTemplate")
ems_metadata_tree_add_hosts_under_clusters!(result)
@ems_xml_nodes = {}
xml = MiqXml.newDoc(:xmlhash)
convert_to_xml(xml, result)
_log.info "EMS metadata collection completed in [#{Time.zone.now - st}] seconds"
xml
end
end
def ems_metadata_tree_add_hosts_under_clusters!(result)
result.each do |obj, children|
ems_metadata_tree_add_hosts_under_clusters!(children)
obj.hosts.each { |h| children[h] = {} } if obj.kind_of?(EmsCluster)
end
end
def convert_to_xml(xml, result)
result.each do |obj, children|
@ems_xml_nodes["#{obj.class.base_class}_#{obj.id}"] = node = xml.add_element(obj.class.base_class.name, :object => ci_to_hash_struct(obj))
convert_to_xml(node, children)
end
end
def add_target(dialog_key, key, klass, result)
key_id = "#{key}_id".to_sym
result[key_id] = get_value(@values[dialog_key])
result[key_id] = nil if result[key_id] == 0
result[key] = ci_to_hash_struct(klass.find_by(:id => result[key_id])) unless result[key_id].nil?
end
def ci_to_hash_struct(ci)
return if ci.nil?
return ci.collect { |c| ci_to_hash_struct(c) } if ci.respond_to?(:collect)
method_name = "#{ci.class.base_class.name.underscore}_to_hash_struct".to_sym
return send(method_name, ci) if respond_to?(method_name, true)
default_ci_to_hash_struct(ci)
end
def host_to_hash_struct(ci)
build_ci_hash_struct(ci, [:name, :vmm_product, :vmm_version, :state, :v_total_vms])
end
def vm_or_template_to_hash_struct(ci)
v = build_ci_hash_struct(ci, [:name, :platform])
v.snapshots = ci.snapshots.collect { |si| ci_to_hash_struct(si) }
v
end
def ems_folder_to_hash_struct(ci)
build_ci_hash_struct(ci, [:name, :type, :hidden])
end
def storage_to_hash_struct(ci)
storage_clusters = ci.storage_clusters.blank? ? nil : ci.storage_clusters.collect(&:name).join(', ')
build_ci_hash_struct(ci, [:name, :free_space, :total_space, :storage_domain_type]).tap do |hs|
hs.storage_clusters = storage_clusters
end
end
def snapshot_to_hash_struct(ci)
build_ci_hash_struct(ci, [:name, :current?])
end
def customization_spec_to_hash_struct(ci)
build_ci_hash_struct(ci, [:name, :typ, :description, :last_update_time, :is_sysprep_spec?])
end
def load_ar_obj(ci)
return load_ar_objs(ci) if ci.kind_of?(Array)
return ci unless ci.kind_of?(MiqHashStruct)
ci.evm_object_class.to_s.camelize.constantize.find_by(:id => ci.id)
end
def load_ar_objs(ci)
ci.collect { |i| load_ar_obj(i) }
end
# Return empty hash if we are selecting placement automatically so we do not
# spend time determining all the available resources
def resources_for_ui
get_source_and_targets
end
def allowed_hosts_obj(options = {})
return [] if (src = resources_for_ui).blank? || src[:ems].nil?
datacenter = src[:datacenter] || options[:datacenter]
rails_logger('allowed_hosts_obj', 0)
st = Time.now
hosts_ids = find_all_ems_of_type(Host).collect(&:id)
hosts_ids &= load_ar_obj(src[:storage]).hosts.collect(&:id) unless src[:storage].nil?
if datacenter
dc_node = load_ems_node(datacenter, _log.prefix)
hosts_ids &= find_hosts_under_ci(dc_node.attributes[:object]).collect(&:id)
end
return [] if hosts_ids.blank?
# Remove any hosts that are no longer in the list
all_hosts = load_ar_obj(src[:ems]).hosts.find_all { |h| hosts_ids.include?(h.id) }
allowed_hosts_obj_cache = process_filter(:host_filter, Host, all_hosts)
_log.info "allowed_hosts_obj returned [#{allowed_hosts_obj_cache.length}] objects in [#{Time.now - st}] seconds"
rails_logger('allowed_hosts_obj', 1)
allowed_hosts_obj_cache
end
def allowed_storages(_options = {})
return [] if (src = resources_for_ui).blank? || src[:ems].nil?
hosts = src[:host].nil? ? allowed_hosts_obj({}) : [load_ar_obj(src[:host])]
return [] if hosts.blank?
rails_logger('allowed_storages', 0)
st = Time.now
MiqPreloader.preload(hosts, :storages)
storages = hosts.each_with_object({}) do |host, hash|
host.writable_storages.each { |s| hash[s.id] = s }
end.values
selected_storage_profile_id = get_value(@values[:placement_storage_profile])
if selected_storage_profile_id
storages.reject! { |s| !s.storage_profiles.pluck(:id).include?(selected_storage_profile_id) }
end
allowed_storages_cache = process_filter(:ds_filter, Storage, storages).collect do |s|
ci_to_hash_struct(s)
end
_log.info "allowed_storages returned [#{allowed_storages_cache.length}] objects in [#{Time.now - st}] seconds"
rails_logger('allowed_storages', 1)
allowed_storages_cache
end
def allowed_hosts(_options = {})
hosts = allowed_hosts_obj
hosts_ids = hosts.collect(&:id)
result_hosts_hash = allowed_ci(:host, [:cluster, :respool, :folder], hosts_ids)
host_ids = result_hosts_hash.to_a.transpose.first
return [] if host_ids.nil?
find_all_ems_of_type(Host).collect { |h| h if host_ids.include?(h.id) }.compact
end
def allowed_datacenters(_options = {})
allowed_ci(:datacenter, [:cluster, :respool, :host, :folder])
end
def allowed_clusters(_options = {})
all_clusters = EmsCluster.where(:ems_id => get_source_and_targets[:ems].try(:id))
filtered_targets = process_filter(:cluster_filter, EmsCluster, all_clusters)
allowed_ci(:cluster, [:respool, :host, :folder], filtered_targets.collect(&:id))
end
def allowed_respools(_options = {})
all_resource_pools = ResourcePool.where(:ems_id => get_source_and_targets[:ems].try(:id))
filtered_targets = process_filter(:rp_filter, ResourcePool, all_resource_pools)
allowed_ci(:respool, [:cluster, :host, :folder], filtered_targets.collect(&:id))
end
alias_method :allowed_resource_pools, :allowed_respools
def allowed_folders(_options = {})
allowed_ci(:folder, [:cluster, :host, :respool])
end
def cluster_to_datacenter(src)
return nil unless ems_has_clusters?
ci_to_datacenter(src, :cluster, EmsCluster)
end
def respool_to_datacenter(src)
ci_to_datacenter(src, :respool, ResourcePool)
end
def host_to_datacenter(src)
ci_to_datacenter(src, :host, Host)
end
def folder_to_datacenter(src)
return nil if src[:folder].nil?
ci_to_datacenter(src, :folder, EmsFolder)
end
def ci_to_datacenter(src, ci, ci_type)
sources = src[ci].nil? ? find_all_ems_of_type(ci_type) : [src[ci]]
sources.collect { |c| find_datacenter_for_ci(c) }.compact.uniq.each_with_object({}) { |c, r| r[c.id] = c.name }
end
def respool_to_cluster(src)
return nil unless ems_has_clusters?
sources = src[:respool].nil? ? find_all_ems_of_type(ResourcePool) : [src[:respool]]
targets = sources.collect { |rp| find_cluster_above_ci(rp) }.compact
targets.each_with_object({}) { |c, r| r[c.id] = c.name }
end
def host_to_cluster(src)
return nil unless ems_has_clusters?
sources = src[:host].nil? ? allowed_hosts_obj : [src[:host]]
targets = sources.collect { |h| find_cluster_above_ci(h) }.compact
targets.each_with_object({}) { |c, r| r[c.id] = c.name }
end
def folder_to_cluster(src)
return nil unless ems_has_clusters?
source = find_all_ems_of_type(EmsCluster)
# If a folder is selected, reduce the cluster list to only clusters in the same data center as the folder
source = source.reject { |c| find_datacenter_for_ci(c).id != src[:datacenter].id } unless src[:datacenter].nil?
source.each_with_object({}) { |c, r| r[c.id] = c.name }
end
def cluster_to_respool(src)
return nil unless ems_has_clusters?
targets = src[:cluster].nil? ? find_all_ems_of_type(ResourcePool) : find_respools_under_ci(src[:cluster])
res_pool_with_path = get_ems_respool(get_ems_metadata_tree(src))
targets.each_with_object({}) { |rp, r| r[rp.id] = res_pool_with_path[rp.id] }
end
def folder_to_respool(src)
return nil if src[:folder].nil?
datacenter = find_datacenter_for_ci(src[:folder])
targets = find_respools_under_ci(datacenter)
res_pool_with_path = get_ems_respool(get_ems_metadata_tree(src))
targets.each_with_object({}) { |rp, r| r[rp.id] = res_pool_with_path[rp.id] }
end
def host_to_respool(src)
hosts = src[:host].nil? ? allowed_hosts_obj : [src[:host]]
targets = hosts.collect do |h|
cluster = find_cluster_above_ci(h)
source = cluster.nil? ? h : cluster
find_respools_under_ci(source)
end.flatten
res_pool_with_path = get_ems_respool(get_ems_metadata_tree(src))
targets.each_with_object({}) { |rp, r| r[rp.id] = res_pool_with_path[rp.id] }
end
def cluster_to_host(src)
return nil unless ems_has_clusters?
hosts = src[:cluster].nil? ? find_all_ems_of_type(Host) : find_hosts_under_ci(src[:cluster])
hosts.each_with_object({}) { |h, r| r[h.id] = h.name }
end
def respool_to_host(src)
hosts = src[:respool].nil? ? find_all_ems_of_type(Host) : find_hosts_for_respool(src[:respool])
hosts.each_with_object({}) { |h, r| r[h.id] = h.name }
end
def folder_to_host(src)
source = find_all_ems_of_type(Host)
# If a folder is selected, reduce the host list to only hosts in the same datacenter as the folder
source = source.reject { |h| find_datacenter_for_ci(h).id != src[:datacenter].id } unless src[:datacenter].nil?
source.each_with_object({}) { |h, r| r[h.id] = h.name }
end
def host_to_folder(src)
sources = src[:host].nil? ? allowed_hosts_obj : [src[:host]]
datacenters = sources.collect do |h|
rails_logger("host_to_folder for host #{h.name}", 0)
result = find_datacenter_for_ci(h)
rails_logger("host_to_folder for host #{h.name}", 1)
result
end.compact
datacenters.each_with_object({}) do |dc, folders|
rails_logger("host_to_folder for dc #{dc.name}", 0)
folders.merge!(get_ems_folders(dc))
rails_logger("host_to_folder for dc #{dc.name}", 1)
end
end
def cluster_to_folder(src)
return nil unless ems_has_clusters?
return nil if src[:cluster].nil?
sources = [src[:cluster]]
datacenters = sources.collect { |h| find_datacenter_for_ci(h) }.compact
datacenters.each_with_object({}) { |dc, folders| folders.merge!(get_ems_folders(dc)) }
end
def respool_to_folder(src)
return nil if src[:respool].nil?
sources = [src[:respool]]
datacenters = sources.collect { |h| find_datacenter_for_ci(h) }.compact
datacenters.each_with_object({}) { |dc, folders| folders.merge!(get_ems_folders(dc)) }
end
def set_ws_field_value(values, key, data, dialog_name, dlg_fields)
value = data.delete(key)
dlg_field = dlg_fields[key]
data_type = dlg_field[:data_type]
set_value = cast_value(value, data_type)
result = nil
if dlg_field.key?(:values)
get_source_and_targets(true)
get_field(key, dialog_name)
field_values = dlg_field[:values]
_log.info "processing key <#{dialog_name}:#{key}(#{data_type})> with values <#{field_values.inspect}>"
if field_values.present?
result = if field_values.first.kind_of?(MiqHashStruct)
found = field_values.detect { |v| v.id == set_value }
[found.id, found.name] if found
else
[set_value, field_values[set_value]] if field_values.key?(set_value)
end
set_value = [result.first, result.last] unless result.nil?
end
end
_log.warn "Unable to find value for key <#{dialog_name}:#{key}(#{data_type})> with input value <#{set_value.inspect}>. No matching item found." if result.nil?
_log.info "setting key <#{dialog_name}:#{key}(#{data_type})> to value <#{set_value.inspect}>"
values[key] = set_value
end
def cast_value(value, data_type)
case data_type
when :integer then value.to_i_with_method
when :float then value.to_f
when :boolean then value.to_s.downcase.in?(%w(true t))
when :time then Time.zone.parse(value)
when :button then value # Ignore
else value # Ignore
end
end
def set_ws_field_value_by_display_name(values, key, data, dialog_name, dlg_fields, obj_key = :name)
value = data.delete(key)
dlg_field = dlg_fields[key]
data_type = dlg_field[:data_type]
find_value = value.to_s.downcase
if dlg_field.key?(:values)
field_values = dlg_field[:values]
_log.info "processing key <#{dialog_name}:#{key}(#{data_type})> with values <#{field_values.inspect}>"
if field_values.present?
result = if field_values.first.kind_of?(MiqHashStruct)
found = field_values.detect { |v| v.send(obj_key).to_s.downcase == find_value }
[found.id, found.send(obj_key)] if found
else
field_values.detect { |_k, v| v.to_s.downcase == find_value }
end
unless result.nil?
set_value = [result.first, result.last]
_log.info "setting key <#{dialog_name}:#{key}(#{data_type})> to value <#{set_value.inspect}>"
values[key] = set_value
else
_log.warn "Unable to set key <#{dialog_name}:#{key}(#{data_type})> to value <#{find_value.inspect}>. No matching item found."
end
end
end
end
def set_ws_field_value_by_id_or_name(values, dlg_field, data, dialog_name, dlg_fields, data_key = nil, id_klass = nil)
data_key = dlg_field if data_key.blank?
if data.key?(data_key)
data[data_key] = "#{id_klass}::#{data[data_key]}" unless id_klass.blank?
data[dlg_field] = data.delete(data_key)
set_ws_field_value(values, dlg_field, data, dialog_name, dlg_fields)
else
data_key_without_id = data_key.to_s.chomp('_id').to_sym
if data.key?(data_key_without_id)
data[data_key] = data.delete(data_key_without_id)
data[dlg_field] = data.delete(data_key)
set_ws_field_value_by_display_name(values, dlg_field, data, dialog_name, dlg_fields, :name)
end
end
end
def get_ws_dialog_fields(dialog_name)
dlg_fields = @dialogs.fetch_path(:dialogs, dialog_name, :fields)
_log.info "<#{dialog_name}> dialog not found in dialogs. Field updates will be skipped." if dlg_fields.nil?
dlg_fields
end
def allowed_customization_templates(_options = {})
result = []
customization_template_id = get_value(@values[:customization_template_id])
@values[:customization_template_script] = nil if customization_template_id.nil?
prov_typ = self.class == MiqHostProvisionWorkflow ? "host" : "vm"
image = supports_iso? ? get_iso_image : get_pxe_image
unless image.nil?
result = image.customization_templates.collect do |c|
# filter customizationtemplates
if c.pxe_image_type.provision_type.blank? || c.pxe_image_type.provision_type == prov_typ
@values[:customization_template_script] = c.script if c.id == customization_template_id
build_ci_hash_struct(c, [:name, :description, :updated_at])
end
end.compact
end
@values[:customization_template_script] = nil if result.blank?
result
end
def get_iso_image
get_image_by_type(:iso_image_id)
end
def get_pxe_image
get_image_by_type(:pxe_image_id)
end
def get_image_by_type(image_type)
klass, id = get_value(@values[image_type]).to_s.split('::')
return nil if id.blank?
klass.constantize.find_by(:id => id)
end
def get_pxe_server
PxeServer.find_by(:id => get_value(@values[:pxe_server_id]))
end
def allowed_pxe_servers(_options = {})
PxeServer.all.each_with_object({}) { |p, h| h[p.id] = p.name }
end
def allowed_pxe_images(_options = {})
pxe_server = get_pxe_server
return [] if pxe_server.nil?
prov_typ = self.class == MiqHostProvisionWorkflow ? "host" : "vm"
pxe_server.pxe_images.collect do |p|
next if p.pxe_image_type.nil? || p.default_for_windows
# filter pxe images by provision_type to show vm/any or host/any
build_ci_hash_struct(p, [:name, :description]) if p.pxe_image_type.provision_type.blank? || p.pxe_image_type.provision_type == prov_typ
end.compact
end
def allowed_windows_images(_options = {})
pxe_server = get_pxe_server
return [] if pxe_server.nil?
pxe_server.windows_images.collect do |p|
build_ci_hash_struct(p, [:name, :description])
end.compact
end
def allowed_images(options = {})
result = allowed_pxe_images(options) + allowed_windows_images(options)
# Change the ID to contain the class name since this is a mix class type
result.each { |ci| ci.id = "#{ci.evm_object_class}::#{ci.id}" }
result
end
def get_iso_images
template = VmOrTemplate.find_by(:id => get_value(@values[:src_vm_id]))
template.try(:ext_management_system).try(:iso_datastore).try(:iso_images) || []
end
def allowed_iso_images(_options = {})
result = get_iso_images.collect do |p|
build_ci_hash_struct(p, [:name])
end.compact
# Change the ID to contain the class name since this is a mix class type
result.each { |ci| ci.id = "#{ci.evm_object_class}::#{ci.id}" }
result
end
def ws_requester_fields(values, fields)
dialog_name = :requester
dlg_fields = @dialogs.fetch_path(:dialogs, :requester, :fields)
if dlg_fields.nil?
_log.info "<#{dialog_name}> dialog not found in dialogs. Field updates be skipped."
return
end
data = parse_ws_string(fields)
_log.info "data:<#{data.inspect}>"
values[:auto_approve] = data.delete(:auto_approve) == 'true'
data.delete(:user_name)
# get owner values from LDAP if configured
if data[:owner_email].present? && MiqLdap.using_ldap?
email = data[:owner_email]
unless email.include?('@')
email = "#{email}@#{::Settings.authentication.user_suffix}"
end
values[:owner_email] = email
retrieve_ldap rescue nil
end
dlg_keys = dlg_fields.keys
data.keys.each do |key|
if dlg_keys.include?(key)
_log.info "processing key <#{dialog_name}:#{key}> with value <#{data[key].inspect}>"
values[key] = data[key]
else
_log.warn "Skipping key <#{dialog_name}:#{key}>. Key name not found in dialog"
end
end
end
def ws_schedule_fields(values, _fields, data)
return if (dlg_fields = get_ws_dialog_fields(dialog_name = :schedule)).nil?
unless data[:schedule_time].blank?
values[:schedule_type] = 'schedule'
[:schedule_time, :retirement_time].each do |key|
data_type = :time
time_value = data.delete(key)
set_value = time_value.blank? ? nil : Time.parse(time_value)
_log.info "setting key <#{dialog_name}:#{key}(#{data_type})> to value <#{set_value.inspect}>"
values[key] = set_value
end
end
dlg_keys = dlg_fields.keys
data.keys.each { |key| set_ws_field_value(values, key, data, dialog_name, dlg_fields) if dlg_keys.include?(key) }
end
def raise_validate_errors
errors = []
fields { |_fn, f, _dn, _d| errors << f[:error] unless f[:error].nil? }
err_text = "Provision failed for the following reasons:\n#{errors.join("\n")}"
_log.error "<#{err_text}>"
raise _("Provision failed for the following reasons:\n%{errors}") % {:errors => errors.join("\n")}
end
private
def default_ci_to_hash_struct(ci)
attributes = []
attributes << :name if ci.respond_to?(:name)
build_ci_hash_struct(ci, attributes)
end
end
| 34.702684 | 163 | 0.664974 |
e29d61acdb6febd95603898f746eddba96b689b5 | 29,415 | # frozen_string_literal: true
# WARNING ABOUT GENERATED CODE
#
# This file is generated. See the contributing guide for more information:
# https://github.com/aws/aws-sdk-ruby/blob/master/CONTRIBUTING.md
#
# WARNING ABOUT GENERATED CODE
require 'seahorse/client/plugins/content_length.rb'
require 'aws-sdk-core/plugins/credentials_configuration.rb'
require 'aws-sdk-core/plugins/logging.rb'
require 'aws-sdk-core/plugins/param_converter.rb'
require 'aws-sdk-core/plugins/param_validator.rb'
require 'aws-sdk-core/plugins/user_agent.rb'
require 'aws-sdk-core/plugins/helpful_socket_errors.rb'
require 'aws-sdk-core/plugins/retry_errors.rb'
require 'aws-sdk-core/plugins/global_configuration.rb'
require 'aws-sdk-core/plugins/regional_endpoint.rb'
require 'aws-sdk-core/plugins/endpoint_discovery.rb'
require 'aws-sdk-core/plugins/endpoint_pattern.rb'
require 'aws-sdk-core/plugins/response_paging.rb'
require 'aws-sdk-core/plugins/stub_responses.rb'
require 'aws-sdk-core/plugins/idempotency_token.rb'
require 'aws-sdk-core/plugins/jsonvalue_converter.rb'
require 'aws-sdk-core/plugins/client_metrics_plugin.rb'
require 'aws-sdk-core/plugins/client_metrics_send_plugin.rb'
require 'aws-sdk-core/plugins/transfer_encoding.rb'
require 'aws-sdk-core/plugins/http_checksum.rb'
require 'aws-sdk-core/plugins/signature_v4.rb'
require 'aws-sdk-core/plugins/protocols/json_rpc.rb'
Aws::Plugins::GlobalConfiguration.add_identifier(:pi)
module Aws::PI
# An API client for PI. To construct a client, you need to configure a `:region` and `:credentials`.
#
# client = Aws::PI::Client.new(
# region: region_name,
# credentials: credentials,
# # ...
# )
#
# For details on configuring region and credentials see
# the [developer guide](/sdk-for-ruby/v3/developer-guide/setup-config.html).
#
# See {#initialize} for a full list of supported configuration options.
class Client < Seahorse::Client::Base
include Aws::ClientStubs
@identifier = :pi
set_api(ClientApi::API)
add_plugin(Seahorse::Client::Plugins::ContentLength)
add_plugin(Aws::Plugins::CredentialsConfiguration)
add_plugin(Aws::Plugins::Logging)
add_plugin(Aws::Plugins::ParamConverter)
add_plugin(Aws::Plugins::ParamValidator)
add_plugin(Aws::Plugins::UserAgent)
add_plugin(Aws::Plugins::HelpfulSocketErrors)
add_plugin(Aws::Plugins::RetryErrors)
add_plugin(Aws::Plugins::GlobalConfiguration)
add_plugin(Aws::Plugins::RegionalEndpoint)
add_plugin(Aws::Plugins::EndpointDiscovery)
add_plugin(Aws::Plugins::EndpointPattern)
add_plugin(Aws::Plugins::ResponsePaging)
add_plugin(Aws::Plugins::StubResponses)
add_plugin(Aws::Plugins::IdempotencyToken)
add_plugin(Aws::Plugins::JsonvalueConverter)
add_plugin(Aws::Plugins::ClientMetricsPlugin)
add_plugin(Aws::Plugins::ClientMetricsSendPlugin)
add_plugin(Aws::Plugins::TransferEncoding)
add_plugin(Aws::Plugins::HttpChecksum)
add_plugin(Aws::Plugins::SignatureV4)
add_plugin(Aws::Plugins::Protocols::JsonRpc)
# @overload initialize(options)
# @param [Hash] options
# @option options [required, Aws::CredentialProvider] :credentials
# Your AWS credentials. This can be an instance of any one of the
# following classes:
#
# * `Aws::Credentials` - Used for configuring static, non-refreshing
# credentials.
#
# * `Aws::SharedCredentials` - Used for loading static credentials from a
# shared file, such as `~/.aws/config`.
#
# * `Aws::AssumeRoleCredentials` - Used when you need to assume a role.
#
# * `Aws::AssumeRoleWebIdentityCredentials` - Used when you need to
# assume a role after providing credentials via the web.
#
# * `Aws::SSOCredentials` - Used for loading credentials from AWS SSO using an
# access token generated from `aws login`.
#
# * `Aws::ProcessCredentials` - Used for loading credentials from a
# process that outputs to stdout.
#
# * `Aws::InstanceProfileCredentials` - Used for loading credentials
# from an EC2 IMDS on an EC2 instance.
#
# * `Aws::ECSCredentials` - Used for loading credentials from
# instances running in ECS.
#
# * `Aws::CognitoIdentityCredentials` - Used for loading credentials
# from the Cognito Identity service.
#
# When `:credentials` are not configured directly, the following
# locations will be searched for credentials:
#
# * `Aws.config[:credentials]`
# * The `:access_key_id`, `:secret_access_key`, and `:session_token` options.
# * ENV['AWS_ACCESS_KEY_ID'], ENV['AWS_SECRET_ACCESS_KEY']
# * `~/.aws/credentials`
# * `~/.aws/config`
# * EC2/ECS IMDS instance profile - When used by default, the timeouts
# are very aggressive. Construct and pass an instance of
# `Aws::InstanceProfileCredentails` or `Aws::ECSCredentials` to
# enable retries and extended timeouts.
#
# @option options [required, String] :region
# The AWS region to connect to. The configured `:region` is
# used to determine the service `:endpoint`. When not passed,
# a default `:region` is searched for in the following locations:
#
# * `Aws.config[:region]`
# * `ENV['AWS_REGION']`
# * `ENV['AMAZON_REGION']`
# * `ENV['AWS_DEFAULT_REGION']`
# * `~/.aws/credentials`
# * `~/.aws/config`
#
# @option options [String] :access_key_id
#
# @option options [Boolean] :active_endpoint_cache (false)
# When set to `true`, a thread polling for endpoints will be running in
# the background every 60 secs (default). Defaults to `false`.
#
# @option options [Boolean] :adaptive_retry_wait_to_fill (true)
# Used only in `adaptive` retry mode. When true, the request will sleep
# until there is sufficent client side capacity to retry the request.
# When false, the request will raise a `RetryCapacityNotAvailableError` and will
# not retry instead of sleeping.
#
# @option options [Boolean] :client_side_monitoring (false)
# When `true`, client-side metrics will be collected for all API requests from
# this client.
#
# @option options [String] :client_side_monitoring_client_id ("")
# Allows you to provide an identifier for this client which will be attached to
# all generated client side metrics. Defaults to an empty string.
#
# @option options [String] :client_side_monitoring_host ("127.0.0.1")
# Allows you to specify the DNS hostname or IPv4 or IPv6 address that the client
# side monitoring agent is running on, where client metrics will be published via UDP.
#
# @option options [Integer] :client_side_monitoring_port (31000)
# Required for publishing client metrics. The port that the client side monitoring
# agent is running on, where client metrics will be published via UDP.
#
# @option options [Aws::ClientSideMonitoring::Publisher] :client_side_monitoring_publisher (Aws::ClientSideMonitoring::Publisher)
# Allows you to provide a custom client-side monitoring publisher class. By default,
# will use the Client Side Monitoring Agent Publisher.
#
# @option options [Boolean] :convert_params (true)
# When `true`, an attempt is made to coerce request parameters into
# the required types.
#
# @option options [Boolean] :correct_clock_skew (true)
# Used only in `standard` and adaptive retry modes. Specifies whether to apply
# a clock skew correction and retry requests with skewed client clocks.
#
# @option options [Boolean] :disable_host_prefix_injection (false)
# Set to true to disable SDK automatically adding host prefix
# to default service endpoint when available.
#
# @option options [String] :endpoint
# The client endpoint is normally constructed from the `:region`
# option. You should only configure an `:endpoint` when connecting
# to test or custom endpoints. This should be a valid HTTP(S) URI.
#
# @option options [Integer] :endpoint_cache_max_entries (1000)
# Used for the maximum size limit of the LRU cache storing endpoints data
# for endpoint discovery enabled operations. Defaults to 1000.
#
# @option options [Integer] :endpoint_cache_max_threads (10)
# Used for the maximum threads in use for polling endpoints to be cached, defaults to 10.
#
# @option options [Integer] :endpoint_cache_poll_interval (60)
# When :endpoint_discovery and :active_endpoint_cache is enabled,
# Use this option to config the time interval in seconds for making
# requests fetching endpoints information. Defaults to 60 sec.
#
# @option options [Boolean] :endpoint_discovery (false)
# When set to `true`, endpoint discovery will be enabled for operations when available.
#
# @option options [Aws::Log::Formatter] :log_formatter (Aws::Log::Formatter.default)
# The log formatter.
#
# @option options [Symbol] :log_level (:info)
# The log level to send messages to the `:logger` at.
#
# @option options [Logger] :logger
# The Logger instance to send log messages to. If this option
# is not set, logging will be disabled.
#
# @option options [Integer] :max_attempts (3)
# An integer representing the maximum number attempts that will be made for
# a single request, including the initial attempt. For example,
# setting this value to 5 will result in a request being retried up to
# 4 times. Used in `standard` and `adaptive` retry modes.
#
# @option options [String] :profile ("default")
# Used when loading credentials from the shared credentials file
# at HOME/.aws/credentials. When not specified, 'default' is used.
#
# @option options [Proc] :retry_backoff
# A proc or lambda used for backoff. Defaults to 2**retries * retry_base_delay.
# This option is only used in the `legacy` retry mode.
#
# @option options [Float] :retry_base_delay (0.3)
# The base delay in seconds used by the default backoff function. This option
# is only used in the `legacy` retry mode.
#
# @option options [Symbol] :retry_jitter (:none)
# A delay randomiser function used by the default backoff function.
# Some predefined functions can be referenced by name - :none, :equal, :full,
# otherwise a Proc that takes and returns a number. This option is only used
# in the `legacy` retry mode.
#
# @see https://www.awsarchitectureblog.com/2015/03/backoff.html
#
# @option options [Integer] :retry_limit (3)
# The maximum number of times to retry failed requests. Only
# ~ 500 level server errors and certain ~ 400 level client errors
# are retried. Generally, these are throttling errors, data
# checksum errors, networking errors, timeout errors, auth errors,
# endpoint discovery, and errors from expired credentials.
# This option is only used in the `legacy` retry mode.
#
# @option options [Integer] :retry_max_delay (0)
# The maximum number of seconds to delay between retries (0 for no limit)
# used by the default backoff function. This option is only used in the
# `legacy` retry mode.
#
# @option options [String] :retry_mode ("legacy")
# Specifies which retry algorithm to use. Values are:
#
# * `legacy` - The pre-existing retry behavior. This is default value if
# no retry mode is provided.
#
# * `standard` - A standardized set of retry rules across the AWS SDKs.
# This includes support for retry quotas, which limit the number of
# unsuccessful retries a client can make.
#
# * `adaptive` - An experimental retry mode that includes all the
# functionality of `standard` mode along with automatic client side
# throttling. This is a provisional mode that may change behavior
# in the future.
#
#
# @option options [String] :secret_access_key
#
# @option options [String] :session_token
#
# @option options [Boolean] :simple_json (false)
# Disables request parameter conversion, validation, and formatting.
# Also disable response data type conversions. This option is useful
# when you want to ensure the highest level of performance by
# avoiding overhead of walking request parameters and response data
# structures.
#
# When `:simple_json` is enabled, the request parameters hash must
# be formatted exactly as the DynamoDB API expects.
#
# @option options [Boolean] :stub_responses (false)
# Causes the client to return stubbed responses. By default
# fake responses are generated and returned. You can specify
# the response data to return or errors to raise by calling
# {ClientStubs#stub_responses}. See {ClientStubs} for more information.
#
# ** Please note ** When response stubbing is enabled, no HTTP
# requests are made, and retries are disabled.
#
# @option options [Boolean] :validate_params (true)
# When `true`, request parameters are validated before
# sending the request.
#
# @option options [URI::HTTP,String] :http_proxy A proxy to send
# requests through. Formatted like 'http://proxy.com:123'.
#
# @option options [Float] :http_open_timeout (15) The number of
# seconds to wait when opening a HTTP session before raising a
# `Timeout::Error`.
#
# @option options [Integer] :http_read_timeout (60) The default
# number of seconds to wait for response data. This value can
# safely be set per-request on the session.
#
# @option options [Float] :http_idle_timeout (5) The number of
# seconds a connection is allowed to sit idle before it is
# considered stale. Stale connections are closed and removed
# from the pool before making a request.
#
# @option options [Float] :http_continue_timeout (1) The number of
# seconds to wait for a 100-continue response before sending the
# request body. This option has no effect unless the request has
# "Expect" header set to "100-continue". Defaults to `nil` which
# disables this behaviour. This value can safely be set per
# request on the session.
#
# @option options [Boolean] :http_wire_trace (false) When `true`,
# HTTP debug output will be sent to the `:logger`.
#
# @option options [Boolean] :ssl_verify_peer (true) When `true`,
# SSL peer certificates are verified when establishing a
# connection.
#
# @option options [String] :ssl_ca_bundle Full path to the SSL
# certificate authority bundle file that should be used when
# verifying peer certificates. If you do not pass
# `:ssl_ca_bundle` or `:ssl_ca_directory` the the system default
# will be used if available.
#
# @option options [String] :ssl_ca_directory Full path of the
# directory that contains the unbundled SSL certificate
# authority files for verifying peer certificates. If you do
# not pass `:ssl_ca_bundle` or `:ssl_ca_directory` the the
# system default will be used if available.
#
def initialize(*args)
super
end
# @!group API Operations
# For a specific time period, retrieve the top `N` dimension keys for a
# metric.
#
# <note markdown="1"> Each response element returns a maximum of 500 bytes. For larger
# elements, such as SQL statements, only the first 500 bytes are
# returned.
#
# </note>
#
# @option params [required, String] :service_type
# The AWS service for which Performance Insights will return metrics.
# The only valid value for *ServiceType* is `RDS`.
#
# @option params [required, String] :identifier
# An immutable, AWS Region-unique identifier for a data source.
# Performance Insights gathers metrics from this data source.
#
# To use an Amazon RDS instance as a data source, you specify its
# `DbiResourceId` value. For example, specify
# `db-FAIHNTYBKTGAUSUZQYPDS2GW4A`
#
# @option params [required, Time,DateTime,Date,Integer,String] :start_time
# The date and time specifying the beginning of the requested time
# series data. You must specify a `StartTime` within the past 7 days.
# The value specified is *inclusive*, which means that data points equal
# to or greater than `StartTime` are returned.
#
# The value for `StartTime` must be earlier than the value for
# `EndTime`.
#
# @option params [required, Time,DateTime,Date,Integer,String] :end_time
# The date and time specifying the end of the requested time series
# data. The value specified is *exclusive*, which means that data points
# less than (but not equal to) `EndTime` are returned.
#
# The value for `EndTime` must be later than the value for `StartTime`.
#
# @option params [required, String] :metric
# The name of a Performance Insights metric to be measured.
#
# Valid values for `Metric` are:
#
# * `db.load.avg` - a scaled representation of the number of active
# sessions for the database engine.
#
# * `db.sampledload.avg` - the raw number of active sessions for the
# database engine.
#
# If the number of active sessions is less than an internal Performance
# Insights threshold, `db.load.avg` and `db.sampledload.avg` are the
# same value. If the number of active sessions is greater than the
# internal threshold, Performance Insights samples the active sessions,
# with `db.load.avg` showing the scaled values, `db.sampledload.avg`
# showing the raw values, and `db.sampledload.avg` less than
# `db.load.avg`. For most use cases, you can query `db.load.avg` only.
#
# @option params [Integer] :period_in_seconds
# The granularity, in seconds, of the data points returned from
# Performance Insights. A period can be as short as one second, or as
# long as one day (86400 seconds). Valid values are:
#
# * `1` (one second)
#
# * `60` (one minute)
#
# * `300` (five minutes)
#
# * `3600` (one hour)
#
# * `86400` (twenty-four hours)
#
# If you don't specify `PeriodInSeconds`, then Performance Insights
# chooses a value for you, with a goal of returning roughly 100-200 data
# points in the response.
#
# @option params [required, Types::DimensionGroup] :group_by
# A specification for how to aggregate the data points from a query
# result. You must specify a valid dimension group. Performance Insights
# returns all dimensions within this group, unless you provide the names
# of specific dimensions within this group. You can also request that
# Performance Insights return a limited number of values for a
# dimension.
#
# @option params [Types::DimensionGroup] :partition_by
# For each dimension specified in `GroupBy`, specify a secondary
# dimension to further subdivide the partition keys in the response.
#
# @option params [Hash<String,String>] :filter
# One or more filters to apply in the request. Restrictions:
#
# * Any number of filters by the same dimension, as specified in the
# `GroupBy` or `Partition` parameters.
#
# * A single filter for any other dimension in this dimension group.
#
# @option params [Integer] :max_results
# The maximum number of items to return in the response. If more items
# exist than the specified `MaxRecords` value, a pagination token is
# included in the response so that the remaining results can be
# retrieved.
#
# @option params [String] :next_token
# An optional pagination token provided by a previous request. If this
# parameter is specified, the response includes only records beyond the
# token, up to the value specified by `MaxRecords`.
#
# @return [Types::DescribeDimensionKeysResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::DescribeDimensionKeysResponse#aligned_start_time #aligned_start_time} => Time
# * {Types::DescribeDimensionKeysResponse#aligned_end_time #aligned_end_time} => Time
# * {Types::DescribeDimensionKeysResponse#partition_keys #partition_keys} => Array<Types::ResponsePartitionKey>
# * {Types::DescribeDimensionKeysResponse#keys #keys} => Array<Types::DimensionKeyDescription>
# * {Types::DescribeDimensionKeysResponse#next_token #next_token} => String
#
# @example Request syntax with placeholder values
#
# resp = client.describe_dimension_keys({
# service_type: "RDS", # required, accepts RDS
# identifier: "String", # required
# start_time: Time.now, # required
# end_time: Time.now, # required
# metric: "String", # required
# period_in_seconds: 1,
# group_by: { # required
# group: "String", # required
# dimensions: ["String"],
# limit: 1,
# },
# partition_by: {
# group: "String", # required
# dimensions: ["String"],
# limit: 1,
# },
# filter: {
# "String" => "String",
# },
# max_results: 1,
# next_token: "String",
# })
#
# @example Response structure
#
# resp.aligned_start_time #=> Time
# resp.aligned_end_time #=> Time
# resp.partition_keys #=> Array
# resp.partition_keys[0].dimensions #=> Hash
# resp.partition_keys[0].dimensions["String"] #=> String
# resp.keys #=> Array
# resp.keys[0].dimensions #=> Hash
# resp.keys[0].dimensions["String"] #=> String
# resp.keys[0].total #=> Float
# resp.keys[0].partitions #=> Array
# resp.keys[0].partitions[0] #=> Float
# resp.next_token #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/pi-2018-02-27/DescribeDimensionKeys AWS API Documentation
#
# @overload describe_dimension_keys(params = {})
# @param [Hash] params ({})
def describe_dimension_keys(params = {}, options = {})
req = build_request(:describe_dimension_keys, params)
req.send_request(options)
end
# Retrieve Performance Insights metrics for a set of data sources, over
# a time period. You can provide specific dimension groups and
# dimensions, and provide aggregation and filtering criteria for each
# group.
#
# <note markdown="1"> Each response element returns a maximum of 500 bytes. For larger
# elements, such as SQL statements, only the first 500 bytes are
# returned.
#
# </note>
#
# @option params [required, String] :service_type
# The AWS service for which Performance Insights returns metrics. The
# only valid value for *ServiceType* is `RDS`.
#
# @option params [required, String] :identifier
# An immutable, AWS Region-unique identifier for a data source.
# Performance Insights gathers metrics from this data source.
#
# To use a DB instance as a data source, specify its `DbiResourceId`
# value. For example, specify `db-FAIHNTYBKTGAUSUZQYPDS2GW4A`.
#
# @option params [required, Array<Types::MetricQuery>] :metric_queries
# An array of one or more queries to perform. Each query must specify a
# Performance Insights metric, and can optionally specify aggregation
# and filtering criteria.
#
# @option params [required, Time,DateTime,Date,Integer,String] :start_time
# The date and time specifying the beginning of the requested time
# series data. You can't specify a `StartTime` that's earlier than 7
# days ago. The value specified is *inclusive* - data points equal to or
# greater than `StartTime` will be returned.
#
# The value for `StartTime` must be earlier than the value for
# `EndTime`.
#
# @option params [required, Time,DateTime,Date,Integer,String] :end_time
# The date and time specifying the end of the requested time series
# data. The value specified is *exclusive* - data points less than (but
# not equal to) `EndTime` will be returned.
#
# The value for `EndTime` must be later than the value for `StartTime`.
#
# @option params [Integer] :period_in_seconds
# The granularity, in seconds, of the data points returned from
# Performance Insights. A period can be as short as one second, or as
# long as one day (86400 seconds). Valid values are:
#
# * `1` (one second)
#
# * `60` (one minute)
#
# * `300` (five minutes)
#
# * `3600` (one hour)
#
# * `86400` (twenty-four hours)
#
# If you don't specify `PeriodInSeconds`, then Performance Insights
# will choose a value for you, with a goal of returning roughly 100-200
# data points in the response.
#
# @option params [Integer] :max_results
# The maximum number of items to return in the response. If more items
# exist than the specified `MaxRecords` value, a pagination token is
# included in the response so that the remaining results can be
# retrieved.
#
# @option params [String] :next_token
# An optional pagination token provided by a previous request. If this
# parameter is specified, the response includes only records beyond the
# token, up to the value specified by `MaxRecords`.
#
# @return [Types::GetResourceMetricsResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::GetResourceMetricsResponse#aligned_start_time #aligned_start_time} => Time
# * {Types::GetResourceMetricsResponse#aligned_end_time #aligned_end_time} => Time
# * {Types::GetResourceMetricsResponse#identifier #identifier} => String
# * {Types::GetResourceMetricsResponse#metric_list #metric_list} => Array<Types::MetricKeyDataPoints>
# * {Types::GetResourceMetricsResponse#next_token #next_token} => String
#
# @example Request syntax with placeholder values
#
# resp = client.get_resource_metrics({
# service_type: "RDS", # required, accepts RDS
# identifier: "String", # required
# metric_queries: [ # required
# {
# metric: "String", # required
# group_by: {
# group: "String", # required
# dimensions: ["String"],
# limit: 1,
# },
# filter: {
# "String" => "String",
# },
# },
# ],
# start_time: Time.now, # required
# end_time: Time.now, # required
# period_in_seconds: 1,
# max_results: 1,
# next_token: "String",
# })
#
# @example Response structure
#
# resp.aligned_start_time #=> Time
# resp.aligned_end_time #=> Time
# resp.identifier #=> String
# resp.metric_list #=> Array
# resp.metric_list[0].key.metric #=> String
# resp.metric_list[0].key.dimensions #=> Hash
# resp.metric_list[0].key.dimensions["String"] #=> String
# resp.metric_list[0].data_points #=> Array
# resp.metric_list[0].data_points[0].timestamp #=> Time
# resp.metric_list[0].data_points[0].value #=> Float
# resp.next_token #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/pi-2018-02-27/GetResourceMetrics AWS API Documentation
#
# @overload get_resource_metrics(params = {})
# @param [Hash] params ({})
def get_resource_metrics(params = {}, options = {})
req = build_request(:get_resource_metrics, params)
req.send_request(options)
end
# @!endgroup
# @param params ({})
# @api private
def build_request(operation_name, params = {})
handlers = @handlers.for(operation_name)
context = Seahorse::Client::RequestContext.new(
operation_name: operation_name,
operation: config.api.operation(operation_name),
client: self,
params: params,
config: config)
context[:gem_name] = 'aws-sdk-pi'
context[:gem_version] = '1.26.0'
Seahorse::Client::Request.new(handlers, context)
end
# @api private
# @deprecated
def waiter_names
[]
end
class << self
# @api private
attr_reader :identifier
# @api private
def errors_module
Errors
end
end
end
end
| 43.96861 | 148 | 0.657794 |
18f4ae78938225fbc9d9a92948d32e92fe65da2f | 1,101 | Pod::Spec.new do |s|
s.name = 'Apollo'
s.version = `scripts/get-version.sh`
s.author = 'Meteor Development Group'
s.homepage = 'https://github.com/apollographql/apollo-ios'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.summary = "A GraphQL client for iOS, written in Swift."
s.source = { :git => 'https://github.com/apollographql/apollo-ios.git', :tag => s.version }
s.requires_arc = true
s.default_subspecs = 'Core'
s.ios.deployment_target = '9.0'
s.osx.deployment_target = '10.10'
s.tvos.deployment_target = '9.1'
s.subspec 'Core' do |ss|
ss.source_files = 'Sources/Apollo/*.swift'
ss.resource = 'scripts/check-and-run-apollo-codegen.sh'
end
# Apollo provides exactly one persistent cache out-of-the-box, as a reasonable default choice for
# those who require cache persistence. Third-party caches may use different storage mechanisms.
s.subspec 'SQLite' do |ss|
ss.source_files = 'Sources/ApolloSQLite/*.swift'
ss.dependency 'Apollo/Core'
ss.dependency 'SQLite.swift', '~> 0.11.0'
end
end
| 33.363636 | 99 | 0.659401 |
bf3fd3d3b1580005593a42e05e9c2b35b5508d00 | 7,487 | =begin
#Xero Payroll AU
#This is the Xero Payroll API for orgs in Australia region.
The version of the OpenAPI document: 2.6.0
Contact: [email protected]
Generated by: https://openapi-generator.tech
OpenAPI Generator version: 4.3.1
=end
require 'time'
require 'date'
module XeroRuby::PayrollAu
require 'bigdecimal'
class PayItem
attr_accessor :earnings_rates
attr_accessor :deduction_types
attr_accessor :leave_types
attr_accessor :reimbursement_types
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'earnings_rates' => :'EarningsRates',
:'deduction_types' => :'DeductionTypes',
:'leave_types' => :'LeaveTypes',
:'reimbursement_types' => :'ReimbursementTypes'
}
end
# Attribute type mapping.
def self.openapi_types
{
:'earnings_rates' => :'Array<EarningsRate>',
:'deduction_types' => :'Array<DeductionType>',
:'leave_types' => :'Array<LeaveType>',
:'reimbursement_types' => :'Array<ReimbursementType>'
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
if (!attributes.is_a?(Hash))
fail ArgumentError, "The input argument (attributes) must be a hash in `XeroRuby::PayrollAu::PayItem` initialize method"
end
# check to see if the attribute exists and convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h|
if (!self.class.attribute_map.key?(k.to_sym))
fail ArgumentError, "`#{k}` is not a valid attribute in `XeroRuby::PayrollAu::PayItem`. Please check the name to make sure it's valid. List of attributes: " + self.class.attribute_map.keys.inspect
end
h[k.to_sym] = v
}
if attributes.key?(:'earnings_rates')
if (value = attributes[:'earnings_rates']).is_a?(Array)
self.earnings_rates = value
end
end
if attributes.key?(:'deduction_types')
if (value = attributes[:'deduction_types']).is_a?(Array)
self.deduction_types = value
end
end
if attributes.key?(:'leave_types')
if (value = attributes[:'leave_types']).is_a?(Array)
self.leave_types = value
end
end
if attributes.key?(:'reimbursement_types')
if (value = attributes[:'reimbursement_types']).is_a?(Array)
self.reimbursement_types = value
end
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
earnings_rates == o.earnings_rates &&
deduction_types == o.deduction_types &&
leave_types == o.leave_types &&
reimbursement_types == o.reimbursement_types
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Integer] Hash code
def hash
[earnings_rates, deduction_types, leave_types, reimbursement_types].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def self.build_from_hash(attributes)
new.build_from_hash(attributes)
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.openapi_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end # or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(parse_date(value))
when :Date
Date.parse(parse_date(value))
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BigDecimal
BigDecimal(value.to_s)
when :Boolean
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
XeroRuby::PayrollAu.const_get(type).build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
# customized data_parser
def parse_date(datestring)
seconds_since_epoch = datestring.scan(/[0-9]+/)[0].to_i / 1000.0
return Time.at(seconds_since_epoch).strftime('%Y-%m-%dT%l:%M:%S%z').to_s
end
end
end
| 30.311741 | 206 | 0.627221 |
bf6d774f9e8923d901f99b6d63b4118008db1093 | 459 | name 'patroni'
maintainer 'GitLab.com'
maintainer_email '[email protected]'
license 'Apache 2.0'
description 'Installs/Configures Patroni for GitLab'
long_description 'Installs/Configures Patroni for GitLab'
version '0.1.0'
chef_version '>= 12.1' if respond_to?(:chef_version)
issues_url 'https://gitlab.com/gitlab-org/omnibus-gitlab/issues'
source_url 'https://gitlab.com/gitlab-org/omnibus-gitlab'
depends 'package'
depends 'gitlab'
depends 'postgresql'
| 28.6875 | 64 | 0.79085 |
383de5697c448bb02140384474aff2612bde3ee7 | 3,204 | module Beaker::DSL::InstallUtils
# This helper class encapsulates querying feature flag settings from
# options[:answers] which can be used to drive Beaker's install behavior
# around new or experimental features, typically in the PE Modules.
#
# Also handles initializing feature flag settings from environment variables
# for CI. In this way, flags can be pulled in without needing to munge
# Beaker's config file which is often handled inside of a script in Jenkins.
#
# Flags are expected to be found in a +feature_flags+ hash in the
# options[:answers] under the key :feature_flags. Beaker::OptionHash should
# ensure that all keys end up as symbols. If you are programatically
# constructing the answers, you must take care to use merge() to add
# elements.
#
# @example The use of the pe-modules-next package is handled by:
# :answers => {
# :feature_flags => {
# :pe_modules_next => true
# }
# }
#
# All flag keys are expected to be downcased with underscores.
#
# Environment variables may be uppercased.
class FeatureFlags
FLAGS = %w{
pe_modules_next
}.freeze
attr_reader :options
def initialize(options)
@options = options
end
# Returns the boolean state of the flag as found in options[:answers],
# or if not found in the answers, then it checks for an environment variable.
#
# @param String flag key
# @return [Boolean,nil] boolean true or false unless not found at all, then nil.
def flag?(flag)
key = canonical_key(flag)
state = flags[key] if flags?
state = environment_flag?(key) if state.nil?
case state
when nil then nil
else state.to_s == 'true'
end
end
# Updates options[:answers][:feature_flags] with any environment variables
# found based on FLAGS, but if and only if they are not already present.
#
# (existing :answers take precedence)
def register_flags!
answers_with_registered_flags = answers
answers_with_registered_flags[:feature_flags] ||= StringifyHash.new
new_flags = answers_with_registered_flags[:feature_flags]
FLAGS.each do |flag|
key = canonical_key(flag)
value = flag?(key)
if !new_flags.include?(key) && !value.nil?
new_flags[key] = value
end
end
options.merge!(
:answers => answers_with_registered_flags
) if !new_flags.empty?
options
end
private
# Does the +feature_flags+ hash exist?
def flags?
answers? && !answers[:feature_flags].nil?
end
def flags
answers[:feature_flags] || StringifyHash.new
end
# Does the +answers+ hash exist?
def answers?
!options[:answers].nil?
end
def answers
options[:answers] || StringifyHash.new
end
def canonical_key(key)
key.to_s.downcase.to_sym
end
def environmental_keys(key)
[key.to_s.upcase, key.to_s.downcase]
end
def environment_flag?(flag)
value = nil
environmental_keys(flag).each do |f|
value = ENV[f] if ENV.include?(f)
break if !value.nil?
end
value
end
end
end
| 27.86087 | 84 | 0.657303 |
62d693e0111cafb7373130a576d0a58de0b36195 | 1,521 | class UseDeletedOnInContainersTables < ActiveRecord::Migration[5.0]
class ContainerDefinition < ActiveRecord::Base
end
class ContainerGroup < ActiveRecord::Base
self.inheritance_column = :_type_disabled
end
class ContainerImage < ActiveRecord::Base
end
class ContainerProject < ActiveRecord::Base
end
class ContainerNode < ActiveRecord::Base
self.inheritance_column = :_type_disabled
end
class Container < ActiveRecord::Base
self.inheritance_column = :_type_disabled
end
def disconnect_to_soft_delete(model)
model.where(:deleted_on => nil, :ems_id => nil).update_all(:deleted_on => Time.now.utc)
model.where.not(:deleted_on => nil).where.not(:ems_id => nil).update_all(:deleted_on => nil)
model.where.not(:deleted_on => nil).update_all("ems_id = old_ems_id")
end
def soft_delete_to_disconnect(model)
model.where.not(:deleted_on => nil).update_all(:ems_id => nil)
end
MODEL_CLASSES = [
ContainerDefinition,
ContainerGroup,
ContainerImage,
ContainerProject,
ContainerNode,
Container,
].freeze
def up
MODEL_CLASSES.each do |model_class|
say_with_time("Change ':deleted_on not nil' :ems_id to :old_ems_id for #{model_class}") do
disconnect_to_soft_delete(model_class)
end
end
end
def down
MODEL_CLASSES.each do |model_class|
say_with_time("Change ':deleted_on not nil' :ems_id to nil for #{model_class}") do
soft_delete_to_disconnect(model_class)
end
end
end
end
| 26.224138 | 96 | 0.717949 |
874ffce782cff80d7067eb4e33556f3cc14ffe35 | 3,829 | test_name 'ttls configured with custom group containing core and custom facts' do
tag 'risk:high'
skip_test "Known issue. Scenario does not work."
require 'facter/acceptance/user_fact_utils'
extend Facter::Acceptance::UserFactUtils
custom_fact_file = 'custom_facts.rb'
custom_fact_name = 'random_custom_fact'
uptime_seconds_value = ''
custom_fact_content = <<-CUSTOM_FACT
Facter.add(:#{custom_fact_name}) do
setcode do
Facter.value('system_uptime.seconds')
end
end
CUSTOM_FACT
config_data = <<~FACTER_CONF
facts : {
ttls : [
{ "cached-custom-facts" : 3 days },
]
}
fact-groups : {
cached-custom-facts : ["#{custom_fact_name}", "system_uptime"],
}
FACTER_CONF
agents.each do |agent|
cache_folder = get_cached_facts_dir(agent['platform'], on(agent, facter('kernelmajversion')).stdout.chomp.to_f)
fact_dir = agent.tmpdir('facter')
env = { 'FACTERLIB' => fact_dir }
config_dir = get_default_fact_dir(agent['platform'], on(agent, facter('kernelmajversion')).stdout.chomp.to_f)
config_file = File.join(config_dir, 'facter.conf')
step "Agent #{agent}: create config file" do
agent.mkdir_p(config_dir)
create_remote_file(agent, config_file, config_data)
fact_file = File.join(fact_dir, custom_fact_file)
create_remote_file(agent, fact_file, custom_fact_content)
teardown do
agent.rm_rf(fact_dir)
agent.rm_rf("#{cache_folder}/*")
agent.rm_rf(config_file)
end
step "should log that it creates cache file and it caches custom facts found in facter.conf" do
on(agent, facter("--debug --json", environment: env)) do |facter_result|
output_json = JSON.parse(facter_result.stdout.chomp)
uptime_seconds_value = output_json['system_uptime']['seconds']
assert_match(/caching values for cached-custom-facts facts/, facter_result.stderr,
'Expected debug message to state that custom facts will be cached')
end
end
step "should create a cached-custom-facts cache file that contains fact information" do
result = agent.file_exist?("#{cache_folder}/cached-custom-facts")
assert_equal(true, result)
cat_output = agent.cat("#{cache_folder}/cached-custom-facts")
output_json = JSON.parse(cat_output.chomp)
assert_match(output_json[custom_fact_name], uptime_seconds_value, 'Expected cached custom fact file to contain fact information')
assert_match(output_json['system_uptime.seconds'], uptime_seconds_value, 'Expected cached file to contain system_uptime information')
end
step 'should read from the cached file for a custom fact that has been cached' do
on(agent, facter("--debug", environment: env)) do |facter_result|
output_json = JSON.parse(facter_result.stdout.chomp)
assert_match(output_json[custom_fact_name], uptime_seconds_value, 'Expected cached custom fact file to contain fact information')
assert_match(output_json['system_uptime.seconds'], uptime_seconds_value, 'Expected cached file to contain system_uptime information')
assert_match(/caching values for cached-custom-facts facts/, facter_result.stderr,
'Expected debug message to state that custom facts will be cached')
assert_match(/loading cached values for #{custom_fact_name} facts/, facter_result.stderr,
'Expected debug message to state that cached custom facts are read from file')
assert_match(/loading cached values for system_uptime.seconds facts/, facter_result.stderr,
'Expected debug message to state that system_uptime facts are read from file')
end
end
end
end
end | 43.511364 | 143 | 0.692609 |
115cafe31bd271c229e282157f3a22a1ed8f78e7 | 7,036 | =begin
#Datadog API V1 Collection
#Collection of all Datadog Public endpoints.
The version of the OpenAPI document: 1.0
Contact: [email protected]
Generated by: https://openapi-generator.tech
Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License.
This product includes software developed at Datadog (https://www.datadoghq.com/).
Copyright 2020-Present Datadog, Inc.
=end
require 'date'
require 'time'
module DatadogAPIClient::V1
# Metadata associated with your host.
class HostMeta
# whether the object has unparsed attributes
attr_accessor :_unparsed
# Array of Unix versions.
attr_accessor :nix_v
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'nix_v' => :'nixV'
}
end
# Returns all the JSON keys this model knows about
def self.acceptable_attributes
attribute_map.values
end
# Attribute type mapping.
def self.openapi_types
{
:'nix_v' => :'Array<String>'
}
end
# List of attributes with nullable: true
def self.openapi_nullable
Set.new([
])
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
if (!attributes.is_a?(Hash))
fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V1::HostMeta` initialize method"
end
# check to see if the attribute exists and convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h|
if (!self.class.attribute_map.key?(k.to_sym))
fail ArgumentError, "`#{k}` is not a valid attribute in `DatadogAPIClient::V1::HostMeta`. Please check the name to make sure it's valid. List of attributes: " + self.class.attribute_map.keys.inspect
end
h[k.to_sym] = v
}
if attributes.key?(:'nix_v')
if (value = attributes[:'nix_v']).is_a?(Array)
self.nix_v = value
end
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
nix_v == o.nix_v
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Integer] Hash code
def hash
[nix_v].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def self.build_from_hash(attributes)
new.build_from_hash(attributes)
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.openapi_types.each_pair do |key, type|
if attributes[self.class.attribute_map[key]].nil? && self.class.openapi_nullable.include?(key)
self.send("#{key}=", nil)
elsif type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :Time
Time.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :Boolean
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when :Array
# generic array, return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
# models (e.g. Pet) or oneOf
klass = DatadogAPIClient::V1.const_get(type)
res = klass.respond_to?(:openapi_one_of) ? klass.build(value) : klass.build_from_hash(value)
if res.instance_of? DatadogAPIClient::V1::UnparsedObject
self._unparsed = true
end
res
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
if value.nil?
is_nullable = self.class.openapi_nullable.include?(attr)
next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}"))
end
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 29.813559 | 208 | 0.627771 |
ff8ec3133ca863d57875869346d88ef0d5964395 | 1,422 | Before do
set_env('MSF_DATBASE_CONFIG', Rails.configuration.paths['config/database'].existent.first)
set_env('RAILS_ENV', 'test')
@aruba_timeout_seconds = 8.minutes
end
# don't setup child processes to load simplecov_setup.rb if simplecov isn't installed
unless Bundler.settings.without.include?(:coverage)
Before do |scenario|
command_name = case scenario
when Cucumber::Ast::Scenario, Cucumber::Ast::ScenarioOutline
"#{scenario.feature.title} #{scenario.name}"
when Cucumber::Ast::OutlineTable::ExampleRow
scenario_outline = scenario.scenario_outline
"#{scenario_outline.feature.title} #{scenario_outline.name} #{scenario.name}"
else
raise TypeError, "Don't know how to extract command name from #{scenario.class}"
end
# Used in simplecov_setup so that each scenario has a different name and their coverage results are merged instead
# of overwriting each other as 'Cucumber Features'
set_env('SIMPLECOV_COMMAND_NAME', command_name)
simplecov_setup_pathname = Pathname.new(__FILE__).expand_path.parent.join('simplecov_setup')
# set environment variable so child processes will merge their coverage data with parent process's coverage data.
set_env('RUBYOPT', "#{ENV['RUBYOPT']} -r#{simplecov_setup_pathname}")
end
end
| 47.4 | 118 | 0.688467 |
2627b0f3dc81daf63ea341e8d796c7b268d8a50c | 11,931 | require 'minitest/autorun'
require 'sprockets/path_utils'
class TestPathUtils < MiniTest::Test
include Sprockets::PathUtils
DOSISH = File::ALT_SEPARATOR != nil
DOSISH_DRIVE_LETTER = File.dirname("A:") == "A:."
DOSISH_UNC = File.dirname("//") == "//"
def test_stat
assert_kind_of File::Stat, stat(File.expand_path("../fixtures", __FILE__))
refute stat("/tmp/sprockets/missingfile")
end
def test_file
assert_equal true, file?(File.expand_path("../fixtures/default/hello.txt", __FILE__))
assert_equal false, file?(File.expand_path("../fixtures", __FILE__))
end
def test_directory
assert_equal true, directory?(File.expand_path("../fixtures", __FILE__))
assert_equal false, directory?(File.expand_path("../fixtures/default/hello.txt", __FILE__))
end
def test_entries
assert_equal [
"asset",
"compass",
"context",
"default",
"directives",
"encoding",
"errors",
"index-assets",
"manifest_utils",
"octicons",
"paths",
"public",
"resolve",
"sass",
"server",
"source-maps",
"symlink"
], entries(File.expand_path("../fixtures", __FILE__))
[ ['a', 'b'], ['a', 'b', '.', '..'] ].each do |dir_contents|
Dir.stub :entries, dir_contents do
assert_equal ['a', 'b'], entries(Dir.tmpdir)
end
end
assert_equal [], entries("/tmp/sprockets/missingdir")
end
def test_check_absolute_path
assert absolute_path?(Dir.pwd)
assert absolute_path?("/foo.rb")
refute absolute_path?("foo.rb")
refute absolute_path?("./foo.rb")
refute absolute_path?("../foo.rb")
if DOSISH_DRIVE_LETTER
assert absolute_path?("A:foo.rb")
assert absolute_path?("A:/foo.rb")
assert absolute_path?("A:\\foo.rb")
end
if DOSISH
assert absolute_path?("/foo.rb")
assert absolute_path?("\\foo.rb")
end
end
def test_check_relative_path
assert relative_path?(".")
assert relative_path?("..")
assert relative_path?("./")
assert relative_path?("../")
assert relative_path?("./foo.rb")
assert relative_path?("../foo.rb")
if DOSISH
assert relative_path?(".\\")
assert relative_path?("..\\")
assert relative_path?(".\\foo.rb")
assert relative_path?("..\\foo.rb")
end
refute relative_path?(Dir.pwd)
refute relative_path?("/foo.rb")
refute relative_path?("foo.rb")
refute relative_path?(".foo.rb")
refute relative_path?("..foo.rb")
end
def test_split_subpath_from_root_path
path = File.expand_path("../fixtures/default", __FILE__)
subpath = File.expand_path("../fixtures/default/application.js", __FILE__)
assert_equal "application.js", split_subpath(path, subpath)
subpath = File.expand_path("../fixtures/default/application.js", __FILE__)
assert_equal "application.js", split_subpath(path + "/", subpath)
subpath = File.expand_path("../fixtures/default/app/application.js", __FILE__)
assert_equal "app/application.js", split_subpath(path, subpath)
subpath = File.expand_path("../fixtures/default", __FILE__)
assert_equal "", split_subpath(path, subpath)
subpath = File.expand_path("../fixtures/other/app/application.js", __FILE__)
refute split_subpath(path, subpath)
end
def test_split_paths_root_from_base
paths = [File.expand_path("../fixtures/default", __FILE__)]
filename = File.expand_path("../fixtures/default/application.js", __FILE__)
expected = [paths.first, "application.js"]
assert_equal expected, paths_split(paths, filename)
filename = File.expand_path("../fixtures/default/app/application.js", __FILE__)
expected = [paths.first, "app/application.js"]
assert_equal expected, paths_split(paths, filename)
filename = File.expand_path("../fixtures/default", __FILE__)
expected = [paths.first, ""]
assert_equal expected, paths_split(paths, filename)
filename = File.expand_path("../fixtures/other/app/application.js", __FILE__)
refute paths_split(paths, filename)
end
def test_path_extensions
assert_equal [".txt"], path_extnames("hello.txt")
assert_equal [".txt"], path_extnames("sub/hello.txt")
assert_equal [".txt"], path_extnames("sub.dir/hello.txt")
assert_equal [".js"], path_extnames("jquery.js")
assert_equal [".min", ".js"], path_extnames("jquery.min.js")
assert_equal [".js", ".erb"], path_extnames("jquery.js.erb")
assert_equal [".min", ".js", ".erb"], path_extnames("jquery.min.js.erb")
end
def test_match_path_extname
extensions = { ".txt" => "text/plain" }
assert_equal [".txt", "text/plain"], match_path_extname("hello.txt", extensions)
assert_equal [".txt", "text/plain"], match_path_extname("sub/hello.txt", extensions)
refute match_path_extname("hello.text", extensions)
extensions = { ".js" => "application/javascript" }
assert_equal [".js", "application/javascript"], match_path_extname("jquery.js", extensions)
assert_equal [".js", "application/javascript"], match_path_extname("jquery.min.js", extensions)
refute match_path_extname("jquery.js.erb", extensions)
refute match_path_extname("jquery.min.js.erb", extensions)
extensions = { ".js" => "application/javascript", ".js.erb" => "application/javascript+ruby" }
assert_equal [".js", "application/javascript"], match_path_extname("jquery.js", extensions)
assert_equal [".js", "application/javascript"], match_path_extname("jquery.min.js", extensions)
assert_equal [".js.erb", "application/javascript+ruby"], match_path_extname("jquery.js.erb", extensions)
assert_equal [".js.erb", "application/javascript+ruby"], match_path_extname("jquery.min.js.erb", extensions)
refute match_path_extname("jquery.min.coffee.erb", extensions)
extensions = { ".js.map" => "application/json", ".css.map" => "application/json" }
assert_equal [".js.map", "application/json"], match_path_extname("jquery.js.map", extensions)
assert_equal [".js.map", "application/json"], match_path_extname("jquery.min.js.map", extensions)
assert_equal [".css.map", "application/json"], match_path_extname("jquery-ui.css.map", extensions)
assert_equal [".css.map", "application/json"], match_path_extname("jquery-ui.min.css.map", extensions)
refute match_path_extname("jquery.map", extensions)
refute match_path_extname("jquery.map.js", extensions)
refute match_path_extname("jquery.map.css", extensions)
end
def test_find_matching_path_for_extensions
dirname = File.expand_path("../fixtures/default", __FILE__)
extensions = { ".js" => "application/javascript", ".coffee" => "text/coffeescript" }
assert_equal [
["#{dirname}/application.coffee", "text/coffeescript"]
], find_matching_path_for_extensions(dirname, "application", extensions)
extensions = { ".txt" => "text/plain", ".jst.ejs" => "application/ejs" }
assert_equal [
["#{dirname}/hello.jst.ejs", "application/ejs"],
["#{dirname}/hello.txt", "text/plain"]
], find_matching_path_for_extensions(dirname, "hello", extensions)
end
def test_path_parents
root = File.expand_path("../..", __FILE__)
assert_kind_of Array, path_parents(File.expand_path(__FILE__))
assert_equal ["#{root}/test", root],
path_parents(File.expand_path(__FILE__), root)
assert_equal ["#{root}/test", root],
path_parents("#{root}/test/fixtures/", root)
assert_equal ["#{root}/test/fixtures", "#{root}/test", root],
path_parents("#{root}/test/fixtures/default", root)
assert_equal ["#{root}/test/fixtures/default", "#{root}/test/fixtures", "#{root}/test", root],
path_parents("#{root}/test/fixtures/default/POW.png", root)
assert_equal ["#{root}/test/fixtures/default", "#{root}/test/fixtures", "#{root}/test"],
path_parents("#{root}/test/fixtures/default/POW.png", "#{root}/test")
assert_equal ["#{root}/test/fixtures/default"],
path_parents("#{root}/test/fixtures/default/POW.png", "#{root}/test/fixtures/default")
end
def test_find_upwards
root = File.expand_path("../..", __FILE__)
assert_equal "#{root}/Gemfile",
find_upwards("Gemfile", File.expand_path(__FILE__))
assert_equal "#{root}/Gemfile",
find_upwards("Gemfile", "#{root}/test/fixtures/")
assert_equal "#{root}/Gemfile",
find_upwards("Gemfile", "#{root}/test/fixtures/default/POW.png")
assert_equal "#{root}/test/sprockets_test.rb",
find_upwards("sprockets_test.rb", "#{root}/test/fixtures/default/POW.png")
end
FILES_IN_SERVER = Dir["#{File.expand_path("../fixtures/server", __FILE__)}/*"]
def test_stat_directory
files = stat_directory(File.expand_path("../fixtures/server", __FILE__)).to_a
assert_equal FILES_IN_SERVER.size, files.size
path, stat = stat_directory(File.expand_path("../fixtures/server", __FILE__)).first
assert_equal File.expand_path("../fixtures/server/app", __FILE__), path
assert_kind_of File::Stat, stat
assert_equal [], stat_directory(File.expand_path("../fixtures/missing", __FILE__)).to_a
end
def test_stat_tree
files = stat_tree(File.expand_path("../fixtures/asset/tree/all", __FILE__)).to_a
assert_equal 11, files.size
path, stat = files.first
assert_equal File.expand_path("../fixtures/asset/tree/all/README.md", __FILE__), path
assert_kind_of File::Stat, stat
assert_equal [
File.expand_path("../fixtures/asset/tree/all/README.md", __FILE__),
File.expand_path("../fixtures/asset/tree/all/b", __FILE__),
File.expand_path("../fixtures/asset/tree/all/b/c", __FILE__),
File.expand_path("../fixtures/asset/tree/all/b/c/d.js", __FILE__),
File.expand_path("../fixtures/asset/tree/all/b/c/e.js", __FILE__),
File.expand_path("../fixtures/asset/tree/all/b/c.js", __FILE__),
File.expand_path("../fixtures/asset/tree/all/b.css", __FILE__),
File.expand_path("../fixtures/asset/tree/all/b.js.erb", __FILE__),
File.expand_path("../fixtures/asset/tree/all/d", __FILE__),
File.expand_path("../fixtures/asset/tree/all/d/c.coffee", __FILE__),
File.expand_path("../fixtures/asset/tree/all/d/e.js", __FILE__)
], files.map(&:first)
assert_equal [], stat_tree("#{File.expand_path("../fixtures", __FILE__)}/missing").to_a
end
def test_stat_sorted_tree
files = stat_sorted_tree(File.expand_path("../fixtures/asset/tree/all", __FILE__)).to_a
assert_equal 11, files.size
path, stat = files.first
assert_equal File.expand_path("../fixtures/asset/tree/all/README.md", __FILE__), path
assert_kind_of File::Stat, stat
assert_equal [
File.expand_path("../fixtures/asset/tree/all/README.md", __FILE__),
File.expand_path("../fixtures/asset/tree/all/b.css", __FILE__),
File.expand_path("../fixtures/asset/tree/all/b.js.erb", __FILE__),
File.expand_path("../fixtures/asset/tree/all/b", __FILE__),
File.expand_path("../fixtures/asset/tree/all/b/c.js", __FILE__),
File.expand_path("../fixtures/asset/tree/all/b/c", __FILE__),
File.expand_path("../fixtures/asset/tree/all/b/c/d.js", __FILE__),
File.expand_path("../fixtures/asset/tree/all/b/c/e.js", __FILE__),
File.expand_path("../fixtures/asset/tree/all/d", __FILE__),
File.expand_path("../fixtures/asset/tree/all/d/c.coffee", __FILE__),
File.expand_path("../fixtures/asset/tree/all/d/e.js", __FILE__)
], files.map(&:first)
assert_equal [], stat_tree(File.expand_path("../fixtures/missing", __FILE__)).to_a
end
def test_atomic_write_without_errors
filename = "atomic.file"
begin
contents = "Atomic Text"
atomic_write(filename) do |file|
file.write(contents)
assert !File.exist?(filename)
end
assert File.exist?(filename)
assert_equal contents, File.read(filename)
ensure
File.unlink(filename) rescue nil
end
end
end
| 39.506623 | 112 | 0.677479 |
e23d6a989c00b914dbbb2a994a714dd1761ef146 | 162 | json.extract! transaction, :id, :transaction_time, :transaction_type, :description, :created_at, :updated_at
json.url transaction_url(transaction, format: :json)
| 54 | 108 | 0.802469 |
39f4568a4144bf54a7e7f74a784423c8f6f83191 | 197 | class CoactiveItemFinder < IIFinder::Base
model Item
coact Coactors::NameFinder, Coactors::AgeFinder
context :results, default: []
before_call do
@context.results << 'Main'
end
end
| 17.909091 | 49 | 0.720812 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.