hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
910a99a1aebb43d3d365952b6235cd90e4a1b845 | 1,999 | # frozen_string_literal: true
# Licensed to Elasticsearch B.V. under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
# ownership. Elasticsearch B.V. licenses this file to you under
# the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
module Elastic
module EnterpriseSearch
module AppSearch
module Actions
# Engine - Add a source engine to an existing meta engine
#
# @param engine_name [String] (*Required*)
# @param arguments [Hash] endpoint arguments
# @option arguments [Array] :source_engines List of engine names (*Required*)
# @option arguments [Hash] :headers optional HTTP headers to send with the request
#
# @see https://www.elastic.co/guide/en/app-search/current/meta-engines.html#meta-engines-add-source-engines
#
def add_meta_engine_source(engine_name, arguments = {})
raise ArgumentError, "Required parameter 'engine_name' missing" unless engine_name
raise ArgumentError, "Required parameter 'source_engines' missing" unless arguments[:source_engines]
source_engines = arguments.delete(:source_engines) || {}
headers = arguments.delete(:headers) || {}
request(
:post,
"api/as/v1/engines/#{engine_name}/source_engines/",
arguments,
source_engines,
headers
)
end
end
end
end
end
| 38.442308 | 115 | 0.687344 |
1a3674c0478307823dba8e3475257020531aba05 | 5,178 | module Twitter
class Client
# Defines methods related to direct messages
module DirectMessages
# Returns the 20 most recent direct messages sent to the authenticating user
#
# @see https://dev.twitter.com/docs/api/1/get/direct_messages
# @rate_limited Yes
# @requires_authentication Yes
# @response_format `json`
# @response_format `xml`
# @param options [Hash] A customizable set of options.
# @option options [Integer] :since_id Returns results with an ID greater than (that is, more recent than) the specified ID.
# @option options [Integer] :max_id Returns results with an ID less than (that is, older than) or equal to the specified ID.
# @option options [Integer] :count Specifies the number of records to retrieve. Must be less than or equal to 200.
# @option options [Integer] :page Specifies the page of results to retrieve.
# @option options [Boolean, String, Integer] :include_entities Include {https://dev.twitter.com/docs/tweet-entities Tweet Entities} when set to true, 't' or 1.
# @return [Array] Direct messages sent to the authenticating user.
# @example Return the 20 most recent direct messages sent to the authenticating user
# Twitter.direct_messages
def direct_messages(options={})
response = get('1/direct_messages', options)
format.to_s.downcase == 'xml' ? response['direct_messages'] : response
end
# Returns the 20 most recent direct messages sent by the authenticating user
#
# @see https://dev.twitter.com/docs/api/1/get/direct_messages/sent
# @rate_limited Yes
# @requires_authentication Yes
# @response_format `json`
# @response_format `xml`
# @param options [Hash] A customizable set of options.
# @option options [Integer] :since_id Returns results with an ID greater than (that is, more recent than) the specified ID.
# @option options [Integer] :max_id Returns results with an ID less than (that is, older than) or equal to the specified ID.
# @option options [Integer] :count Specifies the number of records to retrieve. Must be less than or equal to 200.
# @option options [Integer] :page Specifies the page of results to retrieve.
# @option options [Boolean, String, Integer] :include_entities Include {https://dev.twitter.com/docs/tweet-entities Tweet Entities} when set to true, 't' or 1.
# @return [Array] Direct messages sent by the authenticating user.
# @example Return the 20 most recent direct messages sent by the authenticating user
# Twitter.direct_messages_sent
def direct_messages_sent(options={})
response = get('1/direct_messages/sent', options)
format.to_s.downcase == 'xml' ? response['direct_messages'] : response
end
# Sends a new direct message to the specified user from the authenticating user
#
# @see https://dev.twitter.com/docs/api/1/post/direct_messages/new
# @rate_limited No
# @requires_authentication Yes
# @response_format `json`
# @response_format `xml`
# @param user [Integer, String] A Twitter user ID or screen name.
# @param text [String] The text of your direct message, up to 140 characters.
# @param options [Hash] A customizable set of options.
# @option options [Boolean, String, Integer] :include_entities Include {https://dev.twitter.com/docs/tweet-entities Tweet Entities} when set to true, 't' or 1.
# @return [Hashie::Mash] The sent message.
# @example Send a direct message to @sferik from the authenticating user
# Twitter.direct_message_create("sferik", "I'm sending you this message via the Twitter Ruby Gem!")
# Twitter.direct_message_create(7505382, "I'm sending you this message via the Twitter Ruby Gem!") # Same as above
def direct_message_create(user, text, options={})
merge_user_into_options!(user, options)
response = post('1/direct_messages/new', options.merge(:text => text))
format.to_s.downcase == 'xml' ? response['direct_message'] : response
end
# Destroys a direct message
#
# @see https://dev.twitter.com/docs/api/1/post/direct_messages/destroy/:id
# @note The authenticating user must be the recipient of the specified direct message.
# @rate_limited No
# @requires_authentication Yes
# @response_format `json`
# @response_format `xml`
# @param id [Integer] The ID of the direct message to delete.
# @param options [Hash] A customizable set of options.
# @option options [Boolean, String, Integer] :include_entities Include {https://dev.twitter.com/docs/tweet-entities Tweet Entities} when set to true, 't' or 1.
# @return [Hashie::Mash] The deleted message.
# @example Destroys the direct message with the ID 1825785544
# Twitter.direct_message_destroy(1825785544)
def direct_message_destroy(id, options={})
response = delete("1/direct_messages/destroy/#{id}", options)
format.to_s.downcase == 'xml' ? response['direct_message'] : response
end
end
end
end
| 58.179775 | 165 | 0.690614 |
bb7efc3275f7d5e50126ca60860e993f52322c07 | 996 | cask "cocoapods" do
version "1.5.2"
sha256 "03aa37afb129d6ae515d3b9ee7a81c30ba91050131e2dfbb3683bdd2f05ac67a"
# github.com/CocoaPods/CocoaPods-app/ was verified as official when first introduced to the cask
url "https://github.com/CocoaPods/CocoaPods-app/releases/download/#{version}/CocoaPods.app-#{version}.tar.bz2"
appcast "https://app.cocoapods.org/sparkle"
name "CocoaPods.app"
desc "Dependency manager for Cocoa projects"
homepage "https://cocoapods.org/"
conflicts_with formula: "cocoapods"
app "CocoaPods.app"
binary "#{appdir}/CocoaPods.app/Contents/Helpers/pod"
postflight do
# Because Homebrew Cask symlinks the binstub directly, stop the app from asking the user to install the binstub.
system_command "/usr/bin/defaults",
args: ["write", "org.cocoapods.CocoaPods", "CPDoNotRequestCLIToolInstallationAgain",
"-bool", "true"]
end
zap trash: "~/Library/Preferences/org.cocoapods.CocoaPods.plist"
end
| 38.307692 | 116 | 0.726908 |
915082176f5b13326509224048e0a1f8534bfca0 | 671 | # DRUNKWATER TEMPLATE(add description and prototypes)
# Question Title and Description on leetcode.com
# Function Declaration and Function Prototypes on leetcode.com
#53. Maximum Subarray
#Given an integer array nums, find the contiguous subarray (containing at least one number) which has the largest sum and return its sum.
#Example:
#Input: [-2,1,-3,4,-1,2,1,-5,4],
#Output: 6
#Explanation: [4,-1,2,1] has the largest sum = 6.
#Follow up:
#If you have figured out the O(n) solution, try coding another solution using the divide and conquer approach, which is more subtle.
## @param {Integer[]} nums
## @return {Integer}
#def max_sub_array(nums)
#end
# Time Is Money | 35.315789 | 137 | 0.746647 |
2176844058b0ab9f2b6a81f20f4c073fac2a4a48 | 1,776 | {
matrix_id: '2851',
name: 'analytics',
group: 'Precima',
description: 'Precima/analytics: data analytics problem',
author: 'C. Ugaz',
editor: 'T. Davis',
date: '2018',
kind: 'data analytics problem',
problem_2D_or_3D: '0',
num_rows: '303813',
num_cols: '303813',
nonzeros: '2006126',
num_explicit_zeros: '0',
num_strongly_connected_components: '41422',
num_dmperm_blocks: '41422',
structural_full_rank: 'true',
structural_rank: '303813',
pattern_symmetry: '1.000',
numeric_symmetry: '1.000',
rb_type: 'real',
structure: 'symmetric',
cholesky_candidate: 'no',
positive_definite: 'no',
notes: 'Precima: data analytics problem
Submitted by Carl Ugaz, Precima, Inc. [email protected]
The matrix is nonsingular, ill-conditioned,, and symmetric indefinite.
If factorized with KLU, best results are obtained if the permutation to
block triangular form (BTF) is not used, because it breaks the symmetry
of the pattern.
Ignoring the diagonal (which has some zero entries and some nonzero)
the undirected graph of A divides into 41,422 connected components.
Of those, 40,966 are single nodes, and 451 components have between
4 and 90 nodes. The remaining 5 components have 93738, 89686, 40319,
15884, and 4289 nodes, respectively.
',
image_files: 'analytics.png,analytics_dmperm.png,analytics_scc.png,',
}
| 42.285714 | 83 | 0.5625 |
2120870f11aa40613db9e90a76875338dffcf173 | 377 | # === COPYRIGHT:
# Copyright (c) North Carolina State University
# Developed with funding for the National eXtension Initiative.
# === LICENSE:
#
# see LICENSE file
class CreateGroupNode < ActiveRecord::Base
self.establish_connection :create
self.table_name= 'field_data_group_audience'
def created_at
Time.at(self.group_audience_created).to_datetime
end
end
| 22.176471 | 64 | 0.763926 |
21c2006a1f896dacaeaf7838470a20b944ee5ae3 | 2,714 | # frozen_string_literal: true
require 'yle_tf/action/verify_terraform_version'
require 'yle_tf/config'
require 'yle_tf/error'
describe YleTf::Action::VerifyTerraformVersion do
subject(:action) { described_class.new(app) }
let(:app) { double('app', call: nil) }
describe '#call' do
before do
stub_const('YleTf::TERRAFORM_VERSION_REQUIREMENT', '>= 1.1')
allow(action).to receive(:terraform_version) { terraform_version }
end
subject(:call) { action.call(env) }
let(:env) { { config: config } }
let(:config) do
YleTf::Config.new(
{
'terraform' => {
'version_requirement' => config_requirement
}
}
)
end
let(:config_requirement) { nil }
context 'when Terraform not found' do
let(:terraform_version) { nil }
it { expect { call }.to raise_error(YleTf::Error, 'Terraform not found') }
end
context 'without configuration' do
let(:config_requirement) { nil }
context 'with supported Terraform version' do
let(:terraform_version) { '1.2.3' }
it 'stores the version to env' do
call
expect(env[:terraform_version]).to eq('1.2.3')
end
it 'calls next app' do
expect(app).to receive(:call).with(env)
call
end
end
context 'with unsupported Terraform version' do
let(:terraform_version) { '0.11.11' }
it do
expect { call }.to raise_error(
YleTf::Error, "Terraform version '>= 1.1' required by YleTf, '0.11.11' found"
)
end
end
end
context 'with configuration' do
let(:config_requirement) { '~> 1.3.7' }
context 'with supported Terraform version' do
context 'when accepted by config' do
let(:terraform_version) { '1.3.12' }
it 'stores the version to env' do
call
expect(env[:terraform_version]).to eq('1.3.12')
end
it 'calls next app' do
expect(app).to receive(:call).with(env)
call
end
end
context 'when denied by config' do
let(:terraform_version) { '1.5.0' }
it do
expect { call }.to raise_error(
YleTf::Error, "Terraform version '~> 1.3.7' required by config, '1.5.0' found"
)
end
end
end
context 'with unsupported Terraform version' do
let(:terraform_version) { '0.11.11' }
it do
expect { call }.to raise_error(
YleTf::Error, "Terraform version '>= 1.1' required by YleTf, '0.11.11' found"
)
end
end
end
end
end
| 25.603774 | 92 | 0.56227 |
91a1ff3244ab38911eeafb0adfdb36c490cbab24 | 3,299 | # -*- encoding: utf-8 -*-
# stub: net-ldap 0.16.2 ruby lib
Gem::Specification.new do |s|
s.name = "net-ldap".freeze
s.version = "0.16.2"
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib".freeze]
s.authors = ["Francis Cianfrocca".freeze, "Emiel van de Laar".freeze, "Rory O'Connell".freeze, "Kaspar Schiess".freeze, "Austin Ziegler".freeze, "Michael Schaarschmidt".freeze]
s.date = "2019-11-18"
s.description = "Net::LDAP for Ruby (also called net-ldap) implements client access for the\nLightweight Directory Access Protocol (LDAP), an IETF standard protocol for\naccessing distributed directory services. Net::LDAP is written completely in\nRuby with no external dependencies. It supports most LDAP client features and a\nsubset of server features as well.\n\nNet::LDAP has been tested against modern popular LDAP servers including\nOpenLDAP and Active Directory. The current release is mostly compliant with\nearlier versions of the IETF LDAP RFCs (2251-2256, 2829-2830, 3377, and 3771).\nOur roadmap for Net::LDAP 1.0 is to gain full <em>client</em> compliance with\nthe most recent LDAP RFCs (4510-4519, plutions of 4520-4532).".freeze
s.email = ["[email protected]".freeze, "[email protected]".freeze, "[email protected]".freeze, "[email protected]".freeze, "[email protected]".freeze]
s.extra_rdoc_files = ["Contributors.rdoc".freeze, "Hacking.rdoc".freeze, "History.rdoc".freeze, "License.rdoc".freeze, "README.rdoc".freeze]
s.files = ["Contributors.rdoc".freeze, "Hacking.rdoc".freeze, "History.rdoc".freeze, "License.rdoc".freeze, "README.rdoc".freeze]
s.homepage = "http://github.com/ruby-ldap/ruby-net-ldap".freeze
s.licenses = ["MIT".freeze]
s.rdoc_options = ["--main".freeze, "README.rdoc".freeze]
s.required_ruby_version = Gem::Requirement.new(">= 2.0.0".freeze)
s.rubygems_version = "2.7.10".freeze
s.summary = "Net::LDAP for Ruby (also called net-ldap) implements client access for the Lightweight Directory Access Protocol (LDAP), an IETF standard protocol for accessing distributed directory services".freeze
s.installed_by_version = "2.7.10" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<flexmock>.freeze, ["~> 1.3"])
s.add_development_dependency(%q<rake>.freeze, ["~> 10.0"])
s.add_development_dependency(%q<rubocop>.freeze, ["~> 0.42.0"])
s.add_development_dependency(%q<test-unit>.freeze, [">= 0"])
s.add_development_dependency(%q<byebug>.freeze, [">= 0"])
else
s.add_dependency(%q<flexmock>.freeze, ["~> 1.3"])
s.add_dependency(%q<rake>.freeze, ["~> 10.0"])
s.add_dependency(%q<rubocop>.freeze, ["~> 0.42.0"])
s.add_dependency(%q<test-unit>.freeze, [">= 0"])
s.add_dependency(%q<byebug>.freeze, [">= 0"])
end
else
s.add_dependency(%q<flexmock>.freeze, ["~> 1.3"])
s.add_dependency(%q<rake>.freeze, ["~> 10.0"])
s.add_dependency(%q<rubocop>.freeze, ["~> 0.42.0"])
s.add_dependency(%q<test-unit>.freeze, [">= 0"])
s.add_dependency(%q<byebug>.freeze, [">= 0"])
end
end
| 67.326531 | 747 | 0.70385 |
91b5cc198e9e4b8cc8ec24027175014923e73680 | 209 | FactoryBot.define do
factory :product do
title { Faker::Book.title }
price { Faker::Number.decimal(l_digits: 2) }
available_on { Faker::Date.between(from: 2.days.ago, to: Date.today) }
end
end
| 26.125 | 74 | 0.679426 |
ffffc7bbbd75ee001cc64ad5e4a687c733b83a0c | 69 | require "manageiq/content/engine"
require "manageiq/content/version"
| 23 | 34 | 0.826087 |
01bf6ec81213ad0b5426c155a441d37697dcdb78 | 2,461 | require 'test_helper'
class AssayTypeReaderTest < ActiveSupport::TestCase
test 'initialise' do
reader = Seek::Ontologies::AssayTypeReader.instance
refute_nil reader
refute_nil reader.ontology
assert reader.ontology.count > 500, 'should be over 500 statements'
end
test 'class hierarchy' do
reader = Seek::Ontologies::AssayTypeReader.instance
hierarchy = reader.class_hierarchy
refute_nil hierarchy
assert hierarchy.is_a?(Seek::Ontologies::OntologyClass)
assert_equal 'http://jermontology.org/ontology/JERMOntology#Experimental_assay_type', hierarchy.uri.to_s
hierarchy = hierarchy.subclasses
refute_empty hierarchy
genomics = hierarchy.select { |t| t.uri.to_s == 'http://jermontology.org/ontology/JERMOntology#Genomics' }
assert_equal 1, genomics.count
refute_empty genomics.first.subclasses
amp = genomics.first.subclasses.select { |t| t.uri.to_s == 'http://jermontology.org/ontology/JERMOntology#Amplification' }
refute_empty amp
end
test 'label exists?' do
reader = Seek::Ontologies::AssayTypeReader.instance
assert reader.label_exists?('amplification')
assert reader.label_exists?('AmplifiCation') # case insensitive
refute reader.label_exists?('sdkfhsdfkhsdfhksdf')
refute reader.label_exists?(nil)
end
test 'all labels' do
reader = Seek::Ontologies::AssayTypeReader.instance
labels = reader.all_labels
assert_equal 59, labels.size
assert_includes labels, 'amplification'
end
test 'class for uri' do
reader = Seek::Ontologies::AssayTypeReader.instance
c = reader.class_for_uri('http://jermontology.org/ontology/JERMOntology#Amplification')
refute_nil c
assert_equal 'http://jermontology.org/ontology/JERMOntology#Amplification', c.uri
assert_nil reader.class_for_uri('http://jermontology.org/ontology/JERMOntology#sdfskdfhsdf')
end
test 'parents are set' do
amp = Seek::Ontologies::AssayTypeReader.instance.class_hierarchy.hash_by_uri['http://jermontology.org/ontology/JERMOntology#Amplification']
refute_nil amp
assert_equal 1, amp.parents.count
genomics = amp.parents.first
assert_equal 'Genomics', genomics.label
assert_equal 'http://jermontology.org/ontology/JERMOntology#Genomics', genomics.uri.to_s
assert_equal 1, genomics.parents.count
assert_equal 'http://jermontology.org/ontology/JERMOntology#Experimental_assay_type', genomics.parents.first.uri.to_s
end
end
| 38.453125 | 143 | 0.759447 |
61bf7384c6511c80442f1813256e9183bff94368 | 2,545 | class Fontforge < Formula
desc "Command-line outline and bitmap font editor/converter"
homepage "https://fontforge.github.io"
url "https://github.com/fontforge/fontforge/releases/download/20220308/fontforge-20220308.tar.xz"
sha256 "01e4017f7a0ccecf436c74b8e1f6b374fc04a5283c1d68967996782e15618e59"
license "GPL-3.0-or-later"
bottle do
root_url "https://github.com/gromgit/homebrew-core-mojave/releases/download/fontforge"
sha256 mojave: "fc067bd1009d038e364d62cfffbafac0579e072b21c3bb582acfcc38f83dbe2b"
end
depends_on "cmake" => :build
depends_on "ninja" => :build
depends_on "pkg-config" => :build
depends_on "cairo"
depends_on "fontconfig"
depends_on "freetype"
depends_on "gettext"
depends_on "giflib"
depends_on "glib"
depends_on "jpeg"
depends_on "libpng"
depends_on "libspiro"
depends_on "libtiff"
depends_on "libtool"
depends_on "libuninameslist"
depends_on "pango"
depends_on "[email protected]"
depends_on "readline"
uses_from_macos "libxml2"
# Fix for rpath on ARM
# https://github.com/fontforge/fontforge/issues/4658
patch :DATA
def install
mkdir "build" do
system "cmake", "..",
"-GNinja",
"-DENABLE_GUI=OFF",
"-DENABLE_FONTFORGE_EXTRAS=ON",
*std_cmake_args
system "ninja"
system "ninja", "install"
end
end
def caveats
on_macos do
<<~EOS
This formula only installs the command line utilities.
FontForge.app can be downloaded directly from the website:
https://fontforge.github.io
Alternatively, install with Homebrew Cask:
brew install --cask fontforge
EOS
end
end
test do
system bin/"fontforge", "-version"
system bin/"fontforge", "-lang=py", "-c", "import fontforge; fontforge.font()"
system Formula["[email protected]"].opt_bin/"python3", "-c", "import fontforge; fontforge.font()"
end
end
__END__
diff --git a/contrib/fonttools/CMakeLists.txt b/contrib/fonttools/CMakeLists.txt
index 0d3f464bc..b9f210cde 100644
--- a/contrib/fonttools/CMakeLists.txt
+++ b/contrib/fonttools/CMakeLists.txt
@@ -18,3 +18,5 @@ target_link_libraries(dewoff PRIVATE ZLIB::ZLIB)
target_link_libraries(pcl2ttf PRIVATE MathLib::MathLib)
target_link_libraries(ttf2eps PRIVATE fontforge)
target_link_libraries(woff PRIVATE ZLIB::ZLIB)
+
+install(TARGETS acorn2sfd dewoff findtable pcl2ttf pfadecrypt rmligamarks showttf stripttc ttf2eps woff RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR})
| 31.036585 | 149 | 0.709627 |
212b1b5c4e5e37bb0c7125c5293195fe2f331688 | 3,198 | namespace :ds_tools do
@ssh_user = ENV['SSH_USER'] || "root"
@ssh_port = ENV['PORT'] || "22"
@user = ENV['USERNAME'] || "admin"
@group = ENV['GROUP'] || @user
desc "Runs several initial server preparation tasks for Moonshine. Creates #{@user.capitalize} user as sudo. Changes root password."
task :prepare do
default_settings
create_sudo_user
change_root_password
end
desc "Change user password"
task :change_user_password do
default_settings
change_password(@user)
end
desc "Change root password"
task :change_root_password do
default_settings
change_password(@ssh_user)
end
desc "Creates a user on server"
task :create_user do
default_settings
change_password(@user) do
unless user_exists?(@user)
sudo "cat /etc/group" do |channel, stream, data|
sudo "addgroup #{@group}" unless /#{@group}/.match(data)
sudo "adduser #{@user.downcase} --quiet --ingroup #{@group} --disabled-password --gecos ''"
end
else
puts "#{@user.capitalize} already exists"
end
end
end
desc "Add user to sudoers"
task :add_user_as_sudo do
default_settings
sudo "cat /etc/sudoers" do |channel, stream, data|
sudo "echo '#{@user.downcase} ALL=(ALL) ALL' >> /etc/sudoers" unless /%#{@user.downcase}\s*ALL=\(ALL\)\s*ALL/.match(data)
end
end
desc "Create user and add to sudoers"
task :create_sudo_user do
default_settings
create_user
add_user_as_sudo
end
desc "Add user to group"
task :add_user_to_group do
# the "-a" flag in usermod retains user's
# current group(s) when adding new one
# otherwise it would remove user
# from all existing groups with the -G flag
sudo "usermod -aG #{@group} #{@user}"
end
def default_settings
set :user, @ssh_user
set :port, @ssh_port
end
def change_password(user)
prompt_for_new_password(user) do
yield if block_given?
sudo "passwd #{user.downcase}" do |channel, stream, data|
case data
when /Enter new/i then channel.send_data("#{new_user_password}\n")
when /Retype new/i then channel.send_data("#{confirmed_new_user_password}\n")
when /.*successfully.*/ then puts "#{user} user password changed successfully"
else
puts "Setting #{user} password failed"
puts data
end
end
end
end
def change_password_prompt(user)
set(:new_user_password) { Capistrano::CLI.password_prompt("New #{user} password: ") }
set(:confirmed_new_user_password) { Capistrano::CLI.password_prompt("RETYPE New #{user} password: ") }
end
def prompt_for_new_password(user)
change_password_prompt(user)
if (new_user_password == confirmed_new_user_password)
yield if block_given?
else
puts
puts "===== Passwords do not match - Please try again ====="
prompt_for_new_password(user)
end
end
def user_exists?(user)
result = false
sudo "cat /etc/passwd" do |channel, stream, data|
result = /^#{@user.downcase}:/.match(data) ? true : false
end
result
end
end
| 25.380952 | 134 | 0.645091 |
18a8482a9d4e75ea17754f96d65d75ba1a2db423 | 144 | require 'test_helper'
class AttrFormatter::Test < ActiveSupport::TestCase
test "truth" do
assert_kind_of Module, AttrFormatter
end
end
| 18 | 51 | 0.770833 |
33c91f07474d492106f8558ae4f3800709d5dd4f | 1,134 | require './lib/jiraSOAP/version'
Gem::Specification.new do |s|
s.name = 'jiraSOAP'
s.version = JIRA::VERSION
s.summary = 'A Ruby client for the JIRA SOAP API'
s.description = 'Written to run fast and work on Ruby 1.9 as well as MacRuby'
s.authors = ['Mark Rada']
s.email = ['[email protected]']
s.homepage = 'http://github.com/Marketcircle/jiraSOAP'
s.license = 'MIT'
s.files = Dir.glob('lib/**/*.rb') + ['.yardopts', 'Rakefile']
s.test_files = Dir.glob('test/**/*')
s.extra_rdoc_files = [
'README.markdown',
'ChangeLog',
'LICENSE.txt',
'docs/GettingStarted.markdown',
'docs/Examples.markdown'
]
s.add_runtime_dependency 'nokogiri', '~> 1.5.0'
s.add_runtime_dependency 'handsoap', '~> 1.1.8'
# s.add_runtime_dependency 'httpclient', '~> 2.2.1'
s.add_development_dependency 'yard', '~> 0.7.5'
s.add_development_dependency 'redcarpet', '~> 1.17'
s.add_development_dependency 'minitest', '~> 2.11'
end
| 35.4375 | 79 | 0.560847 |
6a8523cccd462b4c114c98c906c520e35b62708f | 2,501 | #!/usr/bin/ruby
#######################################################################################################################
require 'yaml'
require 'deep_merge'
require 'pp'
######################################################################################################################
hiera_yaml_file = './hiera.yaml'
hieradata_root_dir = './data'
hr = "-------------------------------------------------"
######################################################################################################################
puts "\nFACTS USED IN HIERARCHY:\n#{hr}" # extend if there are more facts used
print "NODETYPE: "
nodetype = gets.chomp
######################################################################################################################
hiera_yaml = YAML.load(File.read(hiera_yaml_file))
levels = hiera_yaml['hierarchy']
yaml_files = Array.new
levels.each do |level|
yaml_files << level['path']
end
yaml_files.map! { |item| item.gsub("%{nodetype}", "#{nodetype}") } # extend if there are more facts used
######################################################################################################################
i = 0
declared_files_size = yaml_files.size
matched_yaml_files = Array.new
while i < declared_files_size do
file = yaml_files[i]
if File.exist?("#{hieradata_root_dir}/#{file}") # makes sure file exists
matched_yaml_files << file
end
i = i + 1
end
######################################################################################################################
existing_files_size = matched_yaml_files.size
if existing_files_size > 0
puts "\nYAML FILES TO BE MERGED:\n#{hr}"
matched_yaml_files.each { |file| puts "#{hieradata_root_dir}/#{file}" }
i = 0
output = ''
if existing_files_size == 1 # just print out that one file
output = YAML::load(File.read("#{hieradata_root_dir}/#{matched_yaml_files[0]}"))
else # merge multiple files
while i < existing_files_size-1 do
if i == 0
file1 = matched_yaml_files[i]
f1 = File.read("#{hieradata_root_dir}/#{file1}")
yaml1 = YAML::load(f1)
else
yaml1 = output
end
file2 = matched_yaml_files[i+1]
f2 = File.read("#{hieradata_root_dir}/#{file2}")
yaml2 = YAML::load(f2)
output = yaml1.deep_merge(yaml2)
i = i + 1
end
end
puts "\nDEEP-MERGING OUTPUT:\n#{hr}"
pp output
puts "\n"
else
puts "\n[ERROR] No .yaml files found!\n"
end
| 39.078125 | 119 | 0.455418 |
edbab507841e5e62bdc399ae064133ee6914b27b | 1,602 | # This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# This file is the source Rails uses to define your schema when running `rails
# db:schema:load`. When creating a new database, `rails db:schema:load` tends to
# be faster and is potentially less error prone than running all of your
# migrations from scratch. Old migrations may fail to apply correctly if those
# migrations use external dependencies or application code.
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 2020_03_05_170142) do
# These are extensions that must be enabled in order to support this database
enable_extension "plpgsql"
create_table "employees", force: :cascade do |t|
t.string "name"
t.string "last_name"
t.string "gender"
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.string "email"
t.index ["email"], name: "index_employees_on_email", unique: true
end
create_table "entries", force: :cascade do |t|
t.bigint "employee_id", null: false
t.date "date", null: false
t.time "time_in", null: false
t.time "time_out"
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.index ["employee_id"], name: "index_entries_on_employee_id"
end
add_foreign_key "entries", "employees"
end
| 40.05 | 86 | 0.738452 |
0888af05e943177356325c1660dba9c429ea4538 | 2,476 | #
# Author:: Jimmy McCrory (<[email protected]>)
# Copyright:: Copyright (c) 2014 Jimmy McCrory
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require "spec_helper"
describe Chef::Knife::NodeEnvironmentSet do
before(:each) do
Chef::Config[:node_name] = "webmonkey.example.com"
@knife = Chef::Knife::NodeEnvironmentSet.new
@knife.name_args = [ "adam", "bar" ]
allow(@knife).to receive(:output).and_return(true)
@node = Chef::Node.new()
@node.name("knifetest-node")
@node.chef_environment << "foo"
allow(@node).to receive(:save).and_return(true)
allow(Chef::Node).to receive(:load).and_return(@node)
end
describe "run" do
it "should load the node" do
expect(Chef::Node).to receive(:load).with("adam")
@knife.run
end
it "should update the environment" do
@knife.run
expect(@node.chef_environment).to eq("bar")
end
it "should save the node" do
expect(@node).to receive(:save)
@knife.run
end
it "should print the environment" do
expect(@knife).to receive(:output).and_return(true)
@knife.run
end
describe "with no environment" do
# Set up outputs for inspection later
before(:each) do
@stdout = StringIO.new
@stderr = StringIO.new
allow(@knife.ui).to receive(:stdout).and_return(@stdout)
allow(@knife.ui).to receive(:stderr).and_return(@stderr)
end
it "should exit" do
@knife.name_args = [ "adam" ]
expect { @knife.run }.to raise_error SystemExit
end
it "should show the user the usage and an error" do
@knife.name_args = [ "adam" ]
begin ; @knife.run ; rescue SystemExit ; end
expect(@stdout.string).to eq "USAGE: knife node environment set NODE ENVIRONMENT\n"
expect(@stderr.string).to eq "FATAL: You must specify a node name and an environment.\n"
end
end
end
end
| 30.567901 | 96 | 0.66357 |
f85069e712e05f1c12292d378cdf38e6be757405 | 2,234 | #
# Author:: Tim Hinderliter (<[email protected]>)
# Copyright:: Copyright (c) 2011 Opscode, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Set of methods to create model objects for spec tests.
# None of these methods save or otherwise commit the objects they
# create; they simply initialize the respective model object and
# set its name (and other important attributes, where appropriate).
def make_node(name)
res = Chef::Node.new
res.name(name)
res
end
def make_role(name)
res = Chef::Role.new
res.name(name)
res
end
def make_environment(name)
res = Chef::Environment.new
res.name(name)
res
end
def make_cookbook(name, version)
res = Chef::CookbookVersion.new(name)
res.version = version
res
end
def make_runlist(*items)
res = Chef::RunList.new
items.each do |item|
res << item
end
res
end
def stub_checksum(checksum, present = true)
Chef::Checksum.should_receive(:new).with(checksum).and_return do
obj = stub(Chef::Checksum)
obj.should_receive(:storage).and_return do
storage = stub("storage")
if present
storage.should_receive(:file_location).and_return("/var/chef/checksums/#{checksum[0..1]}/#{checksum}")
else
storage.should_receive(:file_location).and_raise(Errno::ENOENT)
end
storage
end
obj
end
end
# Take an Array of cookbook_versions,
# And return a hash like:
# {
# "cookbook_name" => [CookbookVersion, CookbookVersion],
# }
def make_filtered_cookbook_hash(*array_cookbook_versions)
array_cookbook_versions.inject({}) do |res, cookbook_version|
res[cookbook_version.name] ||= Array.new
res[cookbook_version.name] << cookbook_version
res
end
end
| 26.595238 | 110 | 0.723814 |
e29e76d3e1a2fb04583cf202a38439459bd43441 | 74 | # frozen_string_literal: true
module Rabbit
module Receiving
end
end
| 10.571429 | 29 | 0.783784 |
bb112b957f355aa759913fd0bce088cd81618e10 | 25,568 | # frozen_string_literal: true
require_relative 'meta'
require_relative 'map'
require_relative 'entities'
require_relative '../base'
require_relative '../company_price_up_to_face'
module Engine
module Game
module G1840
class Game < Game::Base
include_meta(G1840::Meta)
include Map
include Entities
include CompanyPriceUpToFace
register_colors(red: '#d1232a',
orange: '#f58121',
black: '#110a0c',
blue: '#025aaa',
purple: '#A79ECD',
lightBlue: '#8dd7f6',
yellow: '#ffe600',
green: '#32763f',
brightGreen: '#6ec037')
TRACK_RESTRICTION = :permissive
SELL_BUY_ORDER = :sell_buy
CURRENCY_FORMAT_STR = '%d'
BANK_CASH = 99_999
CERT_LIMIT = { 3 => 16, 4 => 14, 5 => 13, 6 => 12 }.freeze
STARTING_CASH = { 3 => 300, 4 => 260, 5 => 230, 6 => 200 }.freeze
ADDITIONAL_CASH = 350
OPERATING_ROUND_NAME = 'Line'
AVAILABLE_CORP_COLOR = '#c6e9af'
EBUY_DEPOT_TRAIN_MUST_BE_CHEAPEST = false
EBUY_CAN_SELL_SHARES = false
ALLOW_TRAIN_BUY_FROM_OTHERS = false
TILE_LAYS = [{ lay: true, upgrade: true, cost: 0 }, { lay: true, upgrade: true, cost: 0 }].freeze
GAME_END_CHECK = { custom: :current_round }.freeze
GAME_END_REASONS_TEXT = Base::GAME_END_REASONS_TEXT.merge(
custom: 'Fixed number of Rounds'
)
NEXT_SR_PLAYER_ORDER = :least_cash
MARKET_TEXT = {
par: 'City Corporation Par',
par_2: 'Major Corporation Par',
}.freeze
STOCKMARKET_COLORS = Base::STOCKMARKET_COLORS.merge(
par: :red,
par_2: :green,
).freeze
PAR_RANGE = {
city: [65, 75, 85, 95],
major: [70, 80, 90, 100],
}.freeze
INITIAL_CITY_PAR = {
'W' => 95,
'V' => 85,
'G' => 75,
'D' => 65,
}.freeze
INITIAL_CITY_TOKENS = {
'W' => [
{ coordinate: 'I1' },
{ coordinate: 'I9' },
{ coordinate: 'I15' },
{ coordinate: 'F24' },
],
'V' => [
{ city_index: 1, coordinate: 'A17' },
{ coordinate: 'A13' },
{ coordinate: 'B10' },
{ coordinate: 'C7' },
{ coordinate: 'F6' },
{ coordinate: 'G3' },
],
'G' => [
{ coordinate: 'A17' },
{ coordinate: 'D12' },
{ coordinate: 'I11' },
],
'D' => [
{ city_index: 2, coordinate: 'A17' },
{ coordinate: 'D22' },
{ coordinate: 'E23' },
{ city_index: 1, coordinate: 'F24' },
],
}.freeze
PROGRESS_INFORMATION = [
{ type: :PRE },
{ type: :SR, name: '1' },
{ type: :CR, name: '1', value: '1x' },
{ type: :LR, name: '1a' },
{ type: :LR, name: '1b' },
{ type: :CR, name: '2', value: '1x' },
{ type: :SR, name: '2' },
{ type: :LR, name: '2a' },
{ type: :LR, name: '2b' },
{ type: :CR, name: '3', value: '1x' },
{ type: :SR, name: '3' },
{ type: :LR, name: '3a' },
{ type: :LR, name: '3b' },
{ type: :CR, name: '4', value: '2x' },
{ type: :SR, name: '4' },
{ type: :LR, name: '4a' },
{ type: :LR, name: '4b' },
{ type: :CR, name: '5', value: '3x' },
{ type: :SR, name: '5' },
{ type: :LR, name: '5a' },
{ type: :LR, name: '5b' },
{ type: :LR, name: '5c' },
{ type: :CR, name: '6', value: '10x' },
{ type: :End },
].freeze
CITY_TRACK_EXITS = {
# G
'B16' => [1, 3],
'B14' => [0, 4],
'C13' => [0, 3],
'D12' => [5, 3],
'E13' => [0, 2],
'F12' => [0, 3],
'H12' => [0, 2],
# V
'B10' => [0, 3],
'C9' => [1, 3],
'D6' => [5, 3],
'E7' => [0, 2],
'F6' => [0, 3],
'G5' => [1, 3],
# D
'B20' => [2, 5],
'C21' => [2, 5],
'D22' => [2, 5],
'E23' => [2, 5],
# W
'G23' => [1, 3],
'G21' => [1, 4],
'G19' => [1, 4],
'G17' => [0, 4],
'H16' => [0, 3],
'I15' => [1, 3],
'I13' => [1, 4],
'I9' => [1, 4],
'I7' => [1, 4],
'I5' => [1, 4],
'I3' => [1, 4],
}.freeze
CITY_HOME_HEXES = {
'G' => %w[A17 I11],
'V' => %w[A17 G3],
'D' => %w[A17 F24],
'W' => %w[I1 F24],
}.freeze
RED_TILES = %w[D20 E19 E21].freeze
TILES_FIXED_ROTATION = %w[L30a L30b L31a L31b].freeze
PURPLE_SPECIAL_TILES = {
'G11' => %w[L30a L31a],
'F24' => %w[L30b L31b],
}.freeze
TRAIN_ORDER = [
%w[Y1 O1],
%w[Y2 O2 R1],
%w[O3 R2 Pi1],
%w[R3 Pi2 Pu1],
%w[Pi3 Pu2],
[],
].freeze
DEPOT_CLEARING = [
'',
'',
'Y1',
'O1',
'R1',
].freeze
MAINTENANCE_COST = {
'Y1' => {},
'O1' => {},
'R1' => { 'Y1' => -50 },
'Pi1' => { 'Y1' => -200, 'O1' => -100, 'R1' => -50 },
'Pu1' => { 'Y1' => -400, 'O1' => -300, 'R1' => -100, 'Pu1' => 200 },
}.freeze
PRICE_MOVEMENT_CHART = [
['Dividend', 'Share Price Change'],
['0', '1 ←'],
['10 - 90', 'none'],
['100 - 190', '1 →'],
['200 - 390', '2 →'],
['400 - 590', '3 →'],
['600 - 990', '4 →'],
['1000 - 1490', '5 →'],
['1500 - 2490', '6 →'],
['2500+', '7 →'],
].freeze
TRAIN_FOR_PLAYER_COUNT = {
2 => { Y1: 2, O1: 3, R1: 3, Pi1: 3, Pu1: 3 },
3 => { Y1: 4, O1: 4, R1: 4, Pi1: 4, Pu1: 4 },
4 => { Y1: 6, O1: 5, R1: 5, Pi1: 5, Pu1: 5 },
5 => { Y1: 8, O1: 6, R1: 6, Pi1: 6, Pu1: 6 },
6 => { Y1: 10, O1: 7, R1: 7, Pi1: 7, Pu1: 7 },
}.freeze
CR_MULTIPLIER = [1, 1, 1, 2, 3, 10].freeze
attr_reader :tram_corporations, :major_corporations, :tram_owned_by_corporation, :city_graph, :city_tracks
def setup
@intern_cr_phase_counter = 0
@cr_counter = 0
@first_stock_round = true
@or = 0
@active_maintenance_cost = {}
@player_debts = Hash.new { |h, k| h[k] = 0 }
@last_revenue = Hash.new { |h, k| h[k] = 0 }
@player_order_first_sr = Hash.new { |h, k| h[k] = 0 }
@all_tram_corporations = @corporations.select { |item| item.type == :minor }
@tram_corporations = @all_tram_corporations.reject { |item| item.id == '2' }.sort_by do
rand
end.first(@players.size + 1)
@tram_corporations.each { |corp| corp.reservation_color = self.class::AVAILABLE_CORP_COLOR }
@unavailable_tram_corporations = @all_tram_corporations - @tram_corporations
@city_corporations = @corporations.select { |item| item.type == :city }
@major_corporations = @corporations.select { |item| item.type == :major }
.sort_by { rand }.first(@players.size)
@tram_owned_by_corporation = {}
@major_corporations.each do |item|
@tram_owned_by_corporation[item] = []
end
@city_corporations.each do |corporation|
par_value = INITIAL_CITY_PAR[corporation.id]
price = @stock_market.par_prices.find { |p| p.price == par_value }
@stock_market.set_par(corporation, price)
corporation.ipoed = true
initial_coordinates_info = INITIAL_CITY_TOKENS[corporation.id]
initial_coordinates_info.each do |info|
token = corporation.find_token_by_type
city_index = info[:city_index] || 0
hex_by_id(info[:coordinate]).tile.cities[city_index].place_token(corporation, token,
check_tokenable: false)
end
corporation.owner = @share_pool
train = @depot.upcoming.find { |item| item.name == 'City' }
@depot.remove_train(train)
train.owner = corporation
corporation.trains << train
end
@corporations.clear
@corporations.concat(@major_corporations)
@corporations.concat(@city_corporations)
@corporations.concat(@tram_corporations)
@city_graph = Graph.new(self, skip_track: :broad)
@city_tracks = {
'D' => %w[B20 C21 D22 E23],
'G' => %w[B16 B14 C13 D12 E13 F12 H12],
'V' => %w[B10 C9 D6 E7 F6 G5],
'W' => %w[G23 G21 G19 G17 H16 I15 I13 I9 I7 I5 I3],
}
setup_company_price_up_to_face
end
def init_graph
Graph.new(self, skip_track: :narrow)
end
def new_auction_round
Engine::Round::Auction.new(self, [
G1840::Step::SelectionAuction,
])
end
def player_order_round
G1840::Round::Choices.new(self, [
G1840::Step::ChoosePlayerOrder,
])
end
def stock_round
if @first_stock_round
@log << "Every Player receives #{format_currency(ADDITIONAL_CASH)} to par a corporation"
@players.each { |item| @bank.spend(ADDITIONAL_CASH, item) }
@first_stock_round = false
end
G1840::Round::Stock.new(self, [
G1840::Step::BuySellParShares,
])
end
def init_company_round
@round_counter += 1
@intern_cr_phase_counter = 1
@cr_counter += 1
remove_obsolete_trains
@log << "-- #{round_description('Company', nil)} --"
new_company_operating_route_round
end
def new_operating_round(round_num = 1)
if [2, 6, 8].include?(@or)
@phase.next!
@operating_rounds = @phase.operating_rounds
end
@log << "-- #{round_description(self.class::OPERATING_ROUND_NAME, round_num)} --"
@or += 1
@round_counter += 1
operating_round(round_num)
end
def new_company_operating_route_round
G1840::Round::Company.new(self, [
G1840::Step::SellCompany,
G1840::Step::Route,
G1840::Step::Dividend,
], no_city: false)
end
def new_company_operating_buy_train_round
G1840::Round::Company.new(self, [
G1840::Step::SellCompany,
G1840::Step::BuyTrain,
], no_city: true)
end
def new_company_operating_auction_round
G1840::Round::Acquisition.new(self, [
G1840::Step::SellCompany,
G1840::Step::InterruptingBuyTrain,
G1840::Step::AcquisitionAuction,
])
end
def new_company_operating_switch_trains
G1840::Round::Company.new(self, [
G1840::Step::SellCompany,
G1840::Step::ReassignTrains,
], no_city: true)
end
def operating_round(round_num)
G1840::Round::Line.new(self, [
G1840::Step::SellCompany,
G1840::Step::SpecialTrack,
G1840::Step::SpecialToken,
G1840::Step::BuyCompany,
Engine::Step::HomeToken,
G1840::Step::TrackAndToken,
G1840::Step::Route,
G1840::Step::Dividend,
[G1840::Step::BuyCompany, { blocks: true }],
], round_num: round_num)
end
def next_round!
@round =
case @round
when Engine::Round::Stock
reorder_players(log_player_order: true)
if @cr_counter.zero?
init_company_round
else
new_operating_round(@round.round_num)
end
when G1840::Round::Company
@intern_cr_phase_counter += 1
if @intern_cr_phase_counter < 3
new_company_operating_buy_train_round
elsif @intern_cr_phase_counter < 4
new_company_operating_auction_round
elsif @cr_counter == 1
new_operating_round(@round.round_num)
else
new_stock_round
end
when new_company_operating_auction_round.class
new_company_operating_switch_trains
when Engine::Round::Operating
if @round.round_num < @operating_rounds
or_round_finished
new_operating_round(@round.round_num + 1)
else
@turn += 1
or_round_finished
or_set_finished
init_company_round
end
when init_round.class
player_order_round
when player_order_round.class
init_round_finished
order_for_first_sr
new_stock_round
end
end
def par_prices(corp)
par_nodes = stock_market.par_prices
available_par_prices = PAR_RANGE[corp.type]
par_nodes.select { |par_node| available_par_prices.include?(par_node.price) }
end
def all_major_corporations_ipoed?
@major_corporations.all?(&:ipoed)
end
def can_par?(corporation, parrer)
super && corporation.type == :major
end
def show_progress_bar?
true
end
def progress_information
self.class::PROGRESS_INFORMATION
end
def corporate_card_minors(corporation)
@tram_owned_by_corporation[corporation] || []
end
def owning_major_corporation(corporation)
@tram_owned_by_corporation.find { |_k, v| v.find { |item| item == corporation } }&.first
end
def buy_tram_corporation(buying_corporation, tram_corporation)
tram_corporation.ipoed = true
tram_corporation.ipo_shares.each do |share|
@share_pool.transfer_shares(
share.to_bundle,
share_pool,
spender: share_pool,
receiver: buying_corporation,
price: 0,
allow_president_change: false
)
end
tram_corporation.owner = buying_corporation.owner
@tram_owned_by_corporation[buying_corporation] << tram_corporation
@tram_corporations.delete(tram_corporation)
end
def restock_tram_corporations
count_new_tram_corporations = @players.size + 1 - @tram_corporations.size
return if count_new_tram_corporations.zero?
new_tram_corporations = @unavailable_tram_corporations.sort_by { rand }.first(count_new_tram_corporations)
new_tram_corporations.each { |corp| corp.reservation_color = self.class::AVAILABLE_CORP_COLOR }
@tram_corporations.concat(new_tram_corporations)
@corporations.concat(new_tram_corporations)
@unavailable_tram_corporations -= new_tram_corporations
end
def payout_companies
return unless @intern_cr_phase_counter == 1
super
end
def place_home_token(corporation)
super
@graph.clear
end
def buying_power(entity, **)
return 0 if entity.type == :city
return entity.cash if entity.type == :major
owning_major_corporation(entity).cash
end
def orange_framed?(tile)
tile.frame&.color == '#ffa500'
end
def upgrades_to?(from, to, special = false, selected_company: nil)
if from.towns.empty? && from.cities.empty? && !to.towns.empty? && to.cities.empty? &&
from.color == :white && to.color == :yellow
return true
end
if orange_framed?(from) && from.towns.size == 1 &&
to.towns.size == 2 && from.color == :yellow && to.color == :green
return true
end
return true if from.color == :red && to.color == :red && RED_TILES.include?(from.hex.coordinates)
return true if from.color == :purple && to.color == :purple
super
end
def needed_exits_for_hex(hex)
CITY_TRACK_EXITS[hex.id]
end
def info_train_name(train)
names = train.names_to_prices.keys.sort
active_variant = active_variant(train)
return names.join(', ') unless active_variant
names -= [active_variant]
"#{active_variant}, (#{names.join(', ')})"
end
def info_available_train(_first_train, train)
!active_variant(train).nil?
end
def info_train_price(train)
name_and_prices = train.names_to_prices.sort_by { |k, _v| k }.to_h
active_variant = active_variant(train)
return name_and_prices.values.map { |p| format_currency(p) }.join(', ') unless active_variant
active_price = name_and_prices[active_variant]
name_and_prices.delete(active_variant)
"#{active_price}, (#{name_and_prices.values.map { |p| format_currency(p) }.join(', ')})"
end
def active_variant(train)
(available_trains & train.variants.keys).first
end
def available_trains
index = [@cr_counter - 1, 0].max
TRAIN_ORDER[index]
end
def remove_obsolete_trains
train_to_remove = DEPOT_CLEARING[@cr_counter - 1]
return unless train_to_remove
@depot.export_all!(train_to_remove)
end
def buy_train(operator, train, price = nil)
super
new_cost = MAINTENANCE_COST[train.sym]
@active_maintenance_cost = new_cost if new_cost['Y1'] &&
(@active_maintenance_cost['Y1'].nil? ||
new_cost['Y1'] < @active_maintenance_cost['Y1'])
end
def status_str(corporation)
return "Maintenance: #{format_currency(maintenance_costs(corporation))}" if corporation.type == :minor
return 'Revenue' if corporation.type == :major
end
def status_array(corporation)
return if corporation.type != :major
["Last: #{format_currency(@last_revenue[corporation])}",
"Next: #{format_currency(major_revenue(corporation))}"]
end
def maintenance_costs(corporation)
corporation.trains.sum { |train| train_maintenance(train.sym) }
end
def train_maintenance(train_sym)
@active_maintenance_cost[train_sym] || 0
end
def routes_revenue(routes)
return super if routes.empty?
corporation = routes.first.train.owner
sum = routes.sum(&:revenue)
return sum + maintenance_costs(corporation) if corporation.type == :minor
sum * current_cr_multipler
end
def scrap_train(train, entity)
@log << "#{entity.name} scraps #{train.name}"
remove_train(train)
train.owner = nil
end
def increase_debt(player, amount)
@player_debts[player] += amount * 2
end
def player_debt(player)
@player_debts[player]
end
def player_value(player)
super - player_debt(player)
end
def check_other(route)
check_track_type(route)
end
def check_track_type(route)
corporation = route.corporation
track_types = route.chains.flat_map { |item| item[:paths] }.flat_map(&:track).uniq
if corporation.type == :city && !(track_types - [:narrow]).empty?
raise GameError,
'Route may only contain narrow tracks'
end
return if corporation.type != :minor || (track_types - ['broad']).empty?
raise GameError, 'Route may only contain broad tracks'
end
def graph_for_entity(entity)
return @city_graph if entity.type == :city
@graph
end
def major_revenue(corporation)
corporate_card_minors(corporation).sum(&:cash)
end
def price_movement_chart
PRICE_MOVEMENT_CHART
end
def update_last_revenue(entity)
@last_revenue[entity] = major_revenue(entity)
end
def revenue_for(route, stops)
# without city or with tokened city
base_revenue = stops.sum do |stop|
next 0 if stop.is_a?(Engine::Part::City) && stop.tokens.none? do |token|
token&.corporation == route.corporation
end
stop.route_revenue(route.phase, route.train)
end
return base_revenue if route.corporation.type == :city
valid_stops = stops.reject do |s|
s.hex.tile.cities.empty? && s.hex.tile.towns.empty?
end
hex_ids = valid_stops.map { |s| s.hex.id }.uniq
major_corp = owning_major_corporation(route.corporation)
major_corp.companies.each do |company|
abilities(company, :hex_bonus) do |ability|
base_revenue += hex_ids&.sum { |id| ability.hexes.include?(id) ? ability.amount : 0 }
end
end
base_revenue
end
def check_connected(route, token)
return if route.corporation.type == :city
super
end
def scrappable_trains(entity)
corporate_card_minors(entity).flat_map(&:trains) + entity.trains
end
def scrap_info(train)
"Maintenance: #{format_currency(train_maintenance(train.sym))}"
end
def scrap_button_text
'Scrap'
end
def num_trains(train)
num_players = [@players.size, 3].max
TRAIN_FOR_PLAYER_COUNT[num_players][train[:name].to_sym]
end
def set_order_for_first_sr(player, index)
@player_order_first_sr[player] = index
end
def order_for_first_sr
@players.sort_by! { |p| @player_order_first_sr[p] }
@log << "Priority order: #{@players.map(&:name).join(', ')}"
end
def entity_can_use_company?(entity, company)
return true if entity.player? && entity == company.owner
return true if entity.corporation? && company.owner == owning_major_corporation(entity)
return true if entity.corporation? && company.owner == entity.corporation.owner
false
end
def sell_company_choice(company)
{ { type: :sell } => "Sell for #{format_currency(company.value)} to the bank" }
end
def sell_company(company)
price = company.value
owner = company.owner
@log << "#{owner.name} sells #{company.name} for #{format_currency(price)} to the bank"
@bank.spend(price, owner)
company.close!
end
def current_cr_multipler
index = [@cr_counter - 1, 0].max
CR_MULTIPLIER[index]
end
def custom_end_game_reached?
@cr_counter == 6
end
def game_ending_description
_, after = game_end_check
return unless after
'Game Ends at conclusion of this Company Round'
end
def starting_nodes(corporation)
case corporation.id
when 'V'
[hex_by_id('A17').tile.cities[1], hex_by_id('G3').tile.cities.first]
when 'D'
[hex_by_id('A17').tile.cities[2], hex_by_id('F24').tile.cities[1]]
when 'G'
[hex_by_id('A17').tile.cities.first, hex_by_id('I11').tile.cities.first]
when 'W'
[hex_by_id('F24').tile.cities.first, hex_by_id('I1').tile.cities.first]
end
end
def take_loan(player, amount)
loan_count = (amount / 100.to_f).ceil
loan_amount = loan_count * 100
increase_debt(player, loan_amount)
@log << "#{player.name} takes a loan of #{format_currency(loan_amount)}. " \
"The player value is decreased by #{format_currency(loan_amount * 2)}."
@bank.spend(loan_amount, player)
end
def remove_open_tram_corporations
@log << '-- All major corporations owns 3 line corporations --'
@all_tram_corporations.each do |corp|
unless owning_major_corporation(corp)
close_corporation(corp)
corp.close!
end
end
end
def timeline
@timeline = ['Green tiles available in LR2, brown tiles in LR4 and grey tiles in LR5.',
'Maintenance cost increase when first train bought:',
'Red → Yellow: -50 ',
'Pink → Yellow: -200 | Orange: -100 | Red: -50 ',
'Purple → Yellow: -400 | Orange: -300 | Red: -100 | Purple +200 '].freeze
end
end
end
end
end
| 31.722084 | 116 | 0.524171 |
1ce96c2c38c0698c38fe2f4c8d88946e9d76a985 | 14,851 |
require 'simple-spreadsheet-extractor'
class DataFilesController < ApplicationController
layout :get_layout
include IndexPager
include SysMODB::SpreadsheetExtractor
include MimeTypesHelper
include DotGenerator
include Seek::AssetsCommon
before_filter :find_assets, :only => [ :index ]
before_filter :find_and_authorize_requested_item, :except => [ :index, :new, :upload_for_tool, :upload_from_email, :create, :request_resource, :preview, :test_asset_url, :update_annotations_ajax]
before_filter :find_display_asset, :only=>[:show,:explore,:download,:matching_models]
skip_before_filter :verify_authenticity_token, :only => [:upload_for_tool, :upload_from_email]
before_filter :xml_login_only, :only => [:upload_for_tool,:upload_from_email]
#has to come after the other filters
include Seek::Publishing::PublishingCommon
include Seek::BreadCrumbs
def convert_to_presentation
@data_file = DataFile.find params[:id]
@presentation = @data_file.to_presentation
respond_to do |format|
if [email protected]_record?
disable_authorization_checks do
# first reload all associations which are already assigned to the presentation. Otherwise, all associations will be destroyed when data file is destroyed
@data_file.reload
@data_file.destroy
end
ActivityLog.create :action=>"create",:culprit=>User.current_user,:activity_loggable=>@presentation,:controller_name=>controller_name.downcase
flash[:notice]="#{t('data_file')} '#{@presentation.title}' is successfully converted to #{t('presentation')}"
format.html { redirect_to presentation_path(@presentation) }
else
flash[:error] = "#{t('data_file')} failed to convert to #{t('presentation')}!!"
format.html {
redirect_to data_file_path @data_file
}
end
end
end
def plot
sheet = params[:sheet] || 2
@csv_data = spreadsheet_to_csv(open(@data_file.content_blob.filepath),sheet,true)
respond_to do |format|
format.html
end
end
def new_version
if handle_upload_data
comments=params[:revision_comment]
respond_to do |format|
if @data_file.save_as_new_version(comments)
create_content_blobs
#Duplicate studied factors
factors = @data_file.find_version(@data_file.version-1).studied_factors
factors.each do |f|
new_f = f.dup
new_f.data_file_version = @data_file.version
new_f.save
end
flash[:notice] = "New version uploaded - now on version #{@data_file.version}"
if @data_file.is_with_sample?
bio_samples = @data_file.bio_samples_population @data_file.samples.first.institution_id if @data_file.samples.first
unless bio_samples.errors.blank?
flash[:notice] << "<br/> However, Sample database population failed."
flash[:error] = bio_samples.errors.html_safe
end
end
else
flash[:error] = "Unable to save new version"
end
format.html {redirect_to @data_file }
end
else
flash[:error]=flash.now[:error]
redirect_to @data_file
end
end
# DELETE /models/1
# DELETE /models/1.xml
def destroy
#FIXME: Double check auth is working for deletion. Also, maybe should only delete if not associated with any assays.
@data_file.destroy
respond_to do |format|
format.html { redirect_to(data_files_path) }
format.xml { head :ok }
end
end
def new
@data_file = DataFile.new
@data_file.parent_name = params[:parent_name]
@data_file.is_with_sample= params[:is_with_sample]
@page_title = params[:page_title]
respond_to do |format|
if User.logged_in_and_member?
format.html # new.html.erb
else
flash[:error] = "You are not authorized to upload new Data files. Only members of known projects, institutions or work groups are allowed to create new content."
format.html { redirect_to data_files_path }
end
end
end
def upload_for_tool
if handle_upload_data
params[:data_file][:project_ids] = [params[:data_file].delete(:project_id)] if params[:data_file][:project_id]
@data_file = DataFile.new params[:data_file]
#@data_file.content_blob = ContentBlob.new :tmp_io_object => @tmp_io_object, :url=>@data_url
@data_file.policy = Policy.new_for_upload_tool(@data_file, params[:recipient_id])
if @data_file.save
@data_file.creators = [current_user.person]
create_content_blobs
#send email to the file uploader and receiver
Mailer.file_uploaded(current_user,Person.find(params[:recipient_id]),@data_file,base_host).deliver
flash.now[:notice] ="#{t('data_file')} was successfully uploaded and saved." if flash.now[:notice].nil?
render :text => flash.now[:notice]
else
errors = (@data_file.errors.map { |e| e.join(" ") }.join("\n"))
render :text => errors, :status => 500
end
end
end
def upload_from_email
if current_user.is_admin? && Seek::Config.admin_impersonation_enabled
User.with_current_user Person.find(params[:sender_id]).user do
if handle_upload_data
@data_file = DataFile.new params[:data_file]
@data_file.policy = Policy.new_from_email(@data_file, params[:recipient_ids], params[:cc_ids])
if @data_file.save
@data_file.creators = [User.current_user.person]
create_content_blobs
flash.now[:notice] ="#{t('data_file')} was successfully uploaded and saved." if flash.now[:notice].nil?
render :text => flash.now[:notice]
else
errors = (@data_file.errors.map { |e| e.join(" ") }.join("\n"))
render :text => errors, :status => 500
end
end
end
else
render :text => "This user is not permitted to act on behalf of other users", :status => :forbidden
end
end
def create
if handle_upload_data
@data_file = DataFile.new params[:data_file]
@data_file.policy.set_attributes_with_sharing params[:sharing], @data_file.projects
Planout.instance.log_event('create', {
'sessionid' => request.session_options[:id],
'treatment' => session[:planout_data][:template][:title],
'inputs' => session[:planout_input],
'trial' => session[:planout_trial],
'with_who' => params[:with_who],
'when_to' => params[:when_to],
'data_file_title' => params[:data_file][:title],
})
assay_ids = params[:assay_ids] || []
if @data_file.save
update_annotations @data_file
update_scales @data_file
create_content_blobs
# update attributions
Relationship.create_or_update_attributions(@data_file, params[:attributions])
# update related publications
Relationship.create_or_update_attributions(@data_file, params[:related_publication_ids].collect { |i| ["Publication", i.split(",").first] }, Relationship::RELATED_TO_PUBLICATION) unless params[:related_publication_ids].nil?
#Add creators
AssetsCreator.add_or_update_creator_list(@data_file, params[:creators])
if !@data_file.parent_name.blank?
render :partial => "assets/back_to_fancy_parent", :locals => {:child => @data_file, :parent_name => @data_file.parent_name, :is_not_fancy => true}
else
respond_to do |format|
flash[:notice] = "#{t('data_file')} was successfully uploaded and saved." if flash.now[:notice].nil?
#parse the data file if it is with sample data
if @data_file.is_with_sample
bio_samples = @data_file.bio_samples_population params[:institution_id]
#@bio_samples = bio_samples
#Rails.logger.warn "BIO SAMPLES ::: " + @bio_samples.treatments_text
unless bio_samples.errors.blank?
flash[:notice] << "<br/> However, Sample database population failed."
flash[:error] = bio_samples.errors.html_safe
#respond_to do |format|
# format.html{
# render :action => "new"
# }
# end
end
end
assay_ids.each do |text|
a_id, r_type = text.split(",")
@assay = Assay.find(a_id)
if @assay.can_edit?
@assay.relate(@data_file, RelationshipType.find_by_title(r_type))
end
end
# format.html { redirect_to data_file_path(@data_file) }
# format.html { redirect_to frame_experiment_path(:usability_ask) }
format.html { redirect_to new_feedback_path(:with_who => params[:with_who], :when_to => params[:when_to]) }
end
end
else
respond_to do |format|
format.html {
render :action => "new"
}
end
end
else
handle_upload_data_failure
end
end
def show
# store timestamp of the previous last usage
@last_used_before_now = @data_file.last_used_at
@data_file.just_used
#Rails.logger.warn "template in data_files_controller/show : #{params[:parsing_template]}"
respond_to do |format|
format.html #{render :locals => {:template => params[:parsing_template]}}# show.html.erb
format.xml
format.rdf { render :template=>'rdf/show'}
format.json
end
end
def edit
end
def update
# remove protected columns (including a "link" to content blob - actual data cannot be updated!)
if params[:data_file]
[:contributor_id, :contributor_type, :original_filename, :content_type, :content_blob_id, :created_at, :updated_at, :last_used_at].each do |column_name|
params[:data_file].delete(column_name)
end
# update 'last_used_at' timestamp on the DataFile
params[:data_file][:last_used_at] = Time.now
end
publication_params = params[:related_publication_ids].nil?? [] : params[:related_publication_ids].collect { |i| ["Publication", i.split(",").first]}
update_annotations @data_file
update_scales @data_file
assay_ids = params[:assay_ids] || []
respond_to do |format|
@data_file.attributes = params[:data_file]
if params[:sharing]
@data_file.policy_or_default
@data_file.policy.set_attributes_with_sharing params[:sharing], @data_file.projects
end
if @data_file.save
# update attributions
Relationship.create_or_update_attributions(@data_file, params[:attributions])
# update related publications
Relationship.create_or_update_attributions(@data_file, publication_params, Relationship::RELATED_TO_PUBLICATION)
#update creators
AssetsCreator.add_or_update_creator_list(@data_file, params[:creators])
flash[:notice] = "#{t('data_file')} metadata was successfully updated."
format.html { redirect_to data_file_path(@data_file) }
# Update new assay_asset
a_ids = []
assay_ids.each do |text|
a_id, r_type = text.split(",")
a_ids.push(a_id)
@assay = Assay.find(a_id)
if @assay.can_edit?
@assay.relate(@data_file, RelationshipType.find_by_title(r_type))
end
end
#Destroy AssayAssets that aren't needed
assay_assets = @data_file.assay_assets
assay_assets.each do |assay_asset|
if assay_asset.assay.can_edit? and !a_ids.include?(assay_asset.assay_id.to_s)
AssayAsset.destroy(assay_asset.id)
end
end
else
format.html {
render :action => "edit"
}
end
end
end
def data
@data_file = DataFile.find(params[:id])
sheet = params[:sheet] || 1
trim = params[:trim] || false
content_blob = @data_file.content_blob
mime_extensions = mime_extensions(content_blob.content_type)
if !(["xls","xlsx"] & mime_extensions).empty?
respond_to do |format|
file = open(content_blob.filepath)
format.html #currently complains about a missing template, but we don't want people using this for now - its purely XML
format.xml {render :xml=>spreadsheet_to_xml(file) }
format.csv {render :text=>spreadsheet_to_csv(file,sheet,trim) }
end
else
respond_to do |format|
flash[:error] = "Unable to view contents of this data file"
format.html { redirect_to @data_file,:format=>"html" }
end
end
end
def preview
element=params[:element]
data_file=DataFile.find_by_id(params[:id])
render :update do |page|
if data_file.try :can_view?
page.replace_html element,:partial=>"assets/resource_preview",:locals=>{:resource=>data_file}
else
page.replace_html element,:text=>"Nothing is selected to preview."
end
end
end
def explore
if @display_data_file.contains_extractable_spreadsheet?
respond_to do |format|
format.html
end
else
respond_to do |format|
flash[:error] = "Unable to view contents of this data file"
format.html { redirect_to data_file_path(@data_file,:version=>@display_data_file.version) }
end
end
end
def clear_population bio_samples
specimens = Specimen.find_all_by_title bio_samples.instance_values["specimen_names"].values
samples = Sample.find_all_by_title bio_samples.instance_values["sample_names"].values
samples.each do |s|
s.assays.clear
s.destroy
end
specimens.each &:destroy
end
def matching_models
#FIXME: should use the correct version
@matching_model_items = @data_file.matching_models
#filter authorization
ids = @matching_model_items.collect &:primary_key
models = Model.find_all_by_id(ids)
authorised_ids = Model.authorize_asset_collection(models,"view").collect &:id
@matching_model_items = @matching_model_items.select{|mdf| authorised_ids.include?(mdf.primary_key.to_i)}
flash.now[:notice]="#{@matching_model_items.count} #{t('model').pluralize} were found that may be relevant to this #{t('data_file')} "
respond_to do |format|
format.html
end
end
protected
def translate_action action
action="download" if action=="data"
action="view" if ["matching_models"].include?(action)
super action
end
def xml_login_only
unless session[:xml_login]
flash[:error] = "Only available when logged in via xml"
redirect_to root_url
end
end
def get_layout
session[:planout_data][:template][:layout]
end
end
| 34.377315 | 231 | 0.653963 |
e274d94047792a0db3c476593beb0e200449b4e6 | 4,316 | # frozen_string_literal:true
module SheepAst
# TBD
module Helper
def patch_process(datastore)
datastore.value(:_sheep_patch_file_HL)&.each do |file, pfile|
return if datastore.find(:_sheep_patched_file_HL, file)
res = ''
process = false
File.open(file) do |f|
res = f.read
process = true
end
if process
res, patched = patching_files(pfile, res)
if patched
File.open(file, 'w').write(res)
linfo "Finished patching for #{file}"
end
end
datastore.assign(:_sheep_patched_file_HL, file, true)
end
rescue
lfatal "Exception was occured at preparing patching files. info => #{e.message}"
File.open("#{file}_patching_fail", 'w').write(res)
end
def patching_files(pattern, str)
work_str = str
files = Dir[pattern]
if files.empty?
return nil, false
else
linfo "Patch file #{files} are found", :blue
end
files.each do |file|
work_str = patching_file(file, work_str)
end
return work_str, true
end
def patching_file(patch_file, str)
# Patch file format
# @@ ... @@ xxxx
# abcde
# -xxx
# -yyy
# +zzz
if File.exist?(patch_file)
patch_str = File.open(patch_file).read
linfo "patch file: #{patch_file} is applying", :blue
return patching(str, patch_str)
else
return nil
end
end
def patch_chunk(str_arr)
chunks = {}
start_flag = false
start_index = 0
str_arr.each_with_index { |line, index|
if line.match(/^@@.*@@/)
if start_flag
chunks[start_index] = index - 1
else
start_flag = true
end
start_index = index
end
if index + 1 == str_arr.size
chunks[start_index] = index
end
}
return chunks
end
def patch_search_str(patch_arr, p_from, p_to)
arr = patch_arr[(p_from + 1)..p_to]
res = []
arr.each do |x|
next if x == '' || x[0] == '+'
res << x[1..-1]
end
return res
end
def same_array?(arr1, arr2)
arr1.each_with_index do |_, index|
if arr1[index].gsub("\n", '').rstrip != arr2[index].gsub("\n", '').rstrip
return false
end
end
return true
end
def patching_point(offset, str_arr, search_arr)
comp_arr = str_arr[offset..-1]
comp_arr.each_with_index do |_, index|
if same_array?(comp_arr[index..(index + search_arr.size - 1)], search_arr)
return offset + index
end
end
return nil
end
def patching_process(str_arr, patching_range, patch_arr, chunks)
res = []
chunks_start_points = chunks.keys.map { |x| x + 1 }
chunks_end_points = chunks.values
start_index = 0
index = 0
patching_range.each do |start, tail|
if start_index < start - 1
res += str_arr[start_index..start - 1]
end
patch_arr[chunks_start_points[index]..chunks_end_points[index]]&.each do |elem|
next if elem.empty?
res << elem[1..-1] if elem[0] != '-'
end
start_index = tail
index += 1
end
res += str_arr[start_index..-1]
return res.join
end
def patching(str, patch_str)
str_arr = str.lines
if patch_str[-1] != "\n"
patch_str += "\n"
end
patch_arr = patch_str.lines
chks = patch_chunk(patch_arr)
index = 0
next_index = 0
patching_range = {}
chks.each do |k, v|
search = patch_search_str(patch_arr, k, v)
index = patching_point(next_index, str_arr, search)
next_index = index.to_i + search.size
if index.nil?
application_error "Could not find matched context in to patch file for #{search}."
end
validation_index = patching_point(next_index, str_arr, search)
if validation_index
application_error "More than two points are matched by context: #{search} "
end
patching_range[index] = next_index
end
return patching_process(str_arr, patching_range, patch_arr, chks)
end
end
end
| 25.844311 | 92 | 0.56835 |
8748d7863ede02cebba9b5a6ebd539df5a29142f | 4,866 | # encoding: utf-8
namespace :l10n do
# flattens the yaml hash into 1 dimensional hash
def flatten(hash, namespace=nil)
flattened = {}
namespace = namespace + '.' if namespace
hash.each do |key, value|
if value.class == Hash
flattened.merge!(flatten(value, "#{namespace}#{key}"))
else
flattened["#{namespace}#{key}"] = value
end
end
return flattened
end
# given a nested hash and key as string, find the value
# key can be indicate nesting by being dot separated
def getValue(hash, key)
key.to_s.split('.').inject(hash) { |h, k| h[k] unless h.nil? }
end
# set a value in the hash given a string as key much like getValue
def setValue(hash, key, value)
subkeys = key.split('.')
lastkey = subkeys.pop
subhash = subkeys.inject(hash) do |h, k|
h[k] = {} if h[k].nil?
h[k]
end
subhash[lastkey] = value
end
desc "Determine diff of keys between two yml files. Call by keys_diff[filename1, filename2]"
task :keys_diff, :file1, :file2 do |t, args|
file1 = YAML::load(File.open(args[:file1]))
file2 = YAML::load(File.open(args[:file2]))
# ignore language designation, so start with first.second
file1_keys = flatten(file1.first.second).keys
file2_keys = flatten(file2.first.second).keys
puts "Keys in #{args[:file2]} that are missing in #{args[:file1]}:"
puts (file2_keys - file1_keys).sort
puts "\n\nKeys in #{args[:file1]} that are missing in #{args[:file2]}:"
puts (file1_keys - file2_keys).sort
end
desc "Convert CSV to YAML file, pass in CSV filename and output YAML filename"
task :csv_to_yaml, :lang, :csvfile, :yamlfile do |t, args|
require 'csv_bridge'
phrases = {}
row_index = 0
CSVBridge.foreach(args[:csvfile], {:headers => :first_row, :return_headers => true, :encoding => 'u'}) do |row|
row_index += 1
# Skipping row if key has whitespace of if it is header row
if !row.header_row? && (row['key'].strip =~ /\s/) == nil
phrase = row[args[:lang]]
phrase = phrase.strip if phrase
setValue(phrases, "#{args[:lang]}.#{row['key']}", phrase)
else
puts "Skipping row ##{row_index}: #{row.to_csv[0..60]}"
end
end
File.open(args[:yamlfile], "w") do |f|
f.write("# DO NOT EDIT THIS FILE MANUALLY #\n")
f.write("# This file has been auto-generated from our Google Docs Spreadsheet.\n")
f.write(phrases.to_yaml)
end
end
task :get_english_yaml, :file1, :file2, :en_file do |t, args|
file1 = YAML::load(File.open(args[:file1]))
file2 = YAML::load(File.open(args[:file2]))
en = YAML::load(File.open(args[:en_file])).first.second
# ignore language designation, so start with first.second
file1_keys = flatten(file1.first.second).keys
file2_keys = flatten(file2.first.second).keys
newHash = {}
(file2_keys - file1_keys).each do |key|
value = getValue(en, key)
setValue(newHash, "#{file1.first.first}.#{key}", value)
end
puts newHash.to_yaml
end
desc "Convert a YAML file to CSV. Second parameter is secondary language to include, defaults to English."
task :yaml_to_csv, :language1, :language2 do |t, args|
require 'csv_bridge'
args = {:language2 => 'en'}.merge(args)
path = Rails.root + "config/locales/allourideas/"
file1 = YAML::load(File.open(path + "#{args[:language1]}.yml"))
file2 = YAML::load(File.open(path + "#{args[:language2]}.yml"))
userfacing = YAML::load(File.open(path + "he.yml"))
hash1 = flatten(file1.first.second)
hash2 = flatten(file2.first.second)
userfacing_hash = flatten(userfacing.first.second)
csv_string = CSVBridge.generate do |csv|
csv << ["key", args[:language2], args[:language1]]
csv << ['Thank you for helping to internationalize allourideas.org. This spreadsheet has three columns. The first column is a "key". This column is for our code and you should not worry about it or change it. The second column is the English phrase that appears on the website. The final column is where you should add the appropriate phrase in your language.
Some of the strings will include "special characters" like "\n"; you should just ignore these special characters and change the text around them.
For example, the key
"X_votes_on_Y_ideas
has this representation in English
%{1}%{votes_count}%{_1} votes on %{2}%{ideas_count}%{_2} ideas
If you were translating to French you would write something like:
%{1}%{votes_count}%{_1} voix sur les %{2}%{ideas_count}%{_2} idées
Thank you again for your help. This process can be confusing, so please do not hesitate to ask questions.
']
(hash1.keys + (userfacing_hash.keys - hash1.keys)).sort.each do |key|
csv << [key, hash2[key], hash1[key]]
end
end
puts csv_string
end
end
| 37.72093 | 371 | 0.660707 |
61ed2823883e17c13680a7d5771e1c0fdecc0b02 | 972 | class BooksController < ApplicationController
def index
@books = Book.all
end
def list
render :json => Book.all
end
def search
render :json => Book.where('title like "%' + params[:keyword] + '%"')
end
def pending
render :json => Book.where('status = "' + params[:status] + '"')
end
def comments
render :json => Comment.where('book_id = ' + params[:book_id])
end
def addRequest
#params = params.require(:data).permit(:book_id, :title, :name, :text, :evaluation)
book = params[:data].permit(:purchaser, :title, :author, :published_date, :base, :isbn, :status)
logger.debug(book)
logger.debug(params)
logger.debug('---')
logger.debug(params[:data])
data = params[:data].to_h
logger.debug(data)
render :json => Book.create(book)
end
def changeStatus
book = params[:data].permit(:id, :status)
render :json => Book.find_by(id: book[:id]).update(status:book[:status])
end
end
| 24.3 | 100 | 0.631687 |
4a88f76fd0b12be1a7cd5c4b381a2a280d6e4d7a | 3,443 | # encoding: utf-8
# This file is distributed under Ting Yun's license terms.
require File.expand_path(File.join(File.dirname(__FILE__),'..','test_helper'))
require 'ting_yun/logger/startup_logger'
require 'ting_yun/agent'
require 'ting_yun/agent/agent'
module TingYun
class AgentTest < Minitest::Test
include TingYun::Agent
def setup
@agent = TingYun::Agent
@agent.agent = TingYun::Agent::Agent.instance
end
def teardown
@agent.agent.drop_buffered_data
end
def test_reset_config
with_config(:test => "test") do
assert_equal "test", @agent.config[:test]
end
@agent.reset_config
assert_nil @agent.config[:test]
end
def test_record_metric_with_number
@agent.record_metric("custom/number/test",1);
stats = @agent.agent.stats_engine.instance_variable_get(:@stats_hash)["custom/number/test"]
assert stats
assert_equal 1, stats.call_count
assert_equal 1, stats.total_call_time
assert_equal 1, stats.min_call_time
assert_equal 1, stats.max_call_time
assert_equal 1, stats.total_exclusive_time
assert_equal 1, stats.sum_of_squares
@agent.record_metric("custom/number/test",3);
stats = @agent.agent.stats_engine.instance_variable_get(:@stats_hash)["custom/number/test"]
assert_equal 2, stats.call_count
assert_equal 4, stats.total_call_time
assert_equal 1, stats.min_call_time
assert_equal 3, stats.max_call_time
assert_equal 4, stats.total_exclusive_time
assert_equal 10, stats.sum_of_squares
@agent.record_metric("custom/number/test2",1);
stats = @agent.agent.stats_engine.instance_variable_get(:@stats_hash)["custom/number/test2"]
assert stats
assert_equal 1, stats.call_count
assert_equal 1, stats.total_call_time
assert_equal 1, stats.min_call_time
assert_equal 1, stats.max_call_time
assert_equal 1, stats.total_exclusive_time
assert_equal 1, stats.sum_of_squares
end
def test_record_metric_with_hash
@agent.record_metric("custom/hash/test",count: 2, total: 4, min: 1, max: 3, sum_of_squares: 10);
stats = @agent.agent.stats_engine.instance_variable_get(:@stats_hash)["custom/hash/test"]
assert stats
assert_equal 2, stats.call_count
assert_equal 4, stats.total_call_time
assert_equal 1, stats.min_call_time
assert_equal 3, stats.max_call_time
assert_equal 4, stats.total_exclusive_time
assert_equal 10, stats.sum_of_squares
end
def test_manual_start_default
mocked_framework
TingYun::Frameworks.expects(:init_start).with({:'nbs.agent_enabled' => true, :sync_startup => true})
TingYun::Agent.manual_start
end
def test_manual_start_with_opts
TingYun::Frameworks.expects(:init_start).with({:'nbs.agent_enabled' => true, :sync_startup => false})
TingYun::Agent.manual_start(:sync_startup => false)
end
private
def mocked_framework
server = TingYun::Support::Collector.new('localhost', 3000)
framework = OpenStruct.new(:license_key => 'abcdef',
:server => server)
framework.instance_eval do
def [](key)
nil
end
def fetch(k,d)
nil
end
end
TingYun::Frameworks::Framework.stubs(:instance).returns(framework)
framework
end
end
end | 31.87963 | 107 | 0.688644 |
ffa4882c457414a807448a910abbb6660be91a67 | 220 | class CreateFriendships < ActiveRecord::Migration[6.0]
def change
create_table :friendships do |t|
t.integer :user_id
t.integer :friend_id
t.boolean :confirm
t.timestamps
end
end
end
| 18.333333 | 54 | 0.668182 |
e8589140e20bd93f485990ad32d91d1294777a2d | 87 | require 'rails_helper'
RSpec.describe SubscribersController, type: :controller do
end
| 17.4 | 58 | 0.827586 |
ff03e0f7d8ca8c72889754172acf5053781ff766 | 10,536 | # == Schema Information
#
# Table name: people
#
# id :string(22) not null, primary key
# uuid :binary(16) not null
# community_id :integer not null
# created_at :datetime
# updated_at :datetime
# is_admin :integer default(0)
# locale :string(255) default("fi")
# preferences :text(65535)
# active_days_count :integer default(0)
# last_page_load_date :datetime
# test_group_number :integer default(1)
# username :string(255) not null
# email :string(255)
# encrypted_password :string(255) default(""), not null
# legacy_encrypted_password :string(255)
# reset_password_token :string(255)
# reset_password_sent_at :datetime
# remember_created_at :datetime
# sign_in_count :integer default(0)
# current_sign_in_at :datetime
# last_sign_in_at :datetime
# current_sign_in_ip :string(255)
# last_sign_in_ip :string(255)
# password_salt :string(255)
# given_name :string(255)
# family_name :string(255)
# display_name :string(255)
# description :text(65535)
# image_file_name :string(255)
# image_content_type :string(255)
# image_file_size :integer
# image_updated_at :datetime
# image_processing :boolean
# facebook_id :string(255)
# authentication_token :string(255)
# community_updates_last_sent_at :datetime
# min_days_between_community_updates :integer default(1)
# deleted :boolean default(FALSE)
# cloned_from :string(22)
# person_type :string(255)
# phone_number2 :string(255)
# children_number :string(255)
# children_gender :string(255)
# children_age :string(255)
# children_need :string(255)
# phone_number1 :string(255)
# grade_level :string(255)
# university_degree :string(255)
# university_name :string(255)
# arrest :string(255)
# convicted_felony :string(255)
# certificate_issue :datetime
# certificate_expiration :datetime
# parent_contact_type :string(255)
# certificate_image_file_name :string(255)
# certificate_image_content_type :string(255)
# certificate_image_file_size :integer
# certificate_image_updated_at :datetime
# certificate_image_processing :boolean
# subject_matter :string(255)
# age :integer
#
# Indexes
#
# index_people_on_authentication_token (authentication_token)
# index_people_on_community_id (community_id)
# index_people_on_email (email) UNIQUE
# index_people_on_facebook_id (facebook_id)
# index_people_on_facebook_id_and_community_id (facebook_id,community_id) UNIQUE
# index_people_on_id (id)
# index_people_on_reset_password_token (reset_password_token) UNIQUE
# index_people_on_username (username)
# index_people_on_username_and_community_id (username,community_id) UNIQUE
# index_people_on_uuid (uuid) UNIQUE
#
require 'spec_helper'
describe Person, type: :model do
before(:all) do
#These will be created only once for the whole example group
@test_person = FactoryGirl.create(:person)
end
it "should be valid" do
expect(@test_person.class).to eq(Person)
expect(@test_person).not_to be_nil
expect(@test_person).to be_valid
end
it "should have an id other than 0" do
expect(@test_person.id).not_to eq(0)
# "Test_person.id is 0, possible reason is INT type for id in test DB."
end
describe "#create" do
it "should create a person in Sharetribe DB" do
username = generate_random_username
p = Person.create!({:username => username,
community_id: 1,
:password => "testi",
:email => "#{username}@example.com",
"given_name" => "Tero",
"family_name" => "Turari"})
expect(Person.find(p.id)).not_to be_nil
expect(p.username).to eq(username)
end
it "should not store anything to Sharetribe DB if creation failed for invalid data" do
username = generate_random_username
expect {
p = nil
expect {
p = Person.create!({:username => username,
community_id: 1,
:password => "testi",
:emails => [Email.new(:address => "invalid-email")],
"given_name" => "Tero",
"family_name" => "Turari"})
}.to raise_error(ActiveRecord::RecordInvalid)
expect(p).to be_nil
}.not_to change{Person.count}
end
end
describe "#update_attributes" do
it "should update the attributes" do
@test_person.update_attributes({'given_name' => "Totti",
'family_name' => "Tester",
'phone_number' => "050-55555555"})
expect(@test_person.family_name).to eq("Tester")
expect(@test_person.phone_number).to eq("050-55555555")
end
end
describe "#create_listing" do
it "creates a new listing with the submitted attributes" do
listing = FactoryGirl.create(:listing,
:title => "Test",
:author => @test_person,
:listing_shape_id => 123
)
expect(listing.title).to eq("Test")
expect(@test_person.listings.last).to eq(listing)
end
end
describe "name getters" do
before(:each) do
@test_person.update_attributes({'given_name' => "Ripa", 'family_name' => "Riuska"})
end
it "returns the name of the user" do
expect(@test_person.name('first_name_with_initial')).not_to be_blank
expect(@test_person.name('first_name_with_initial')).to eq("Ripa R")
end
it "returns the given or the last name of the user" do
expect(@test_person.given_name).to eq("Ripa")
expect(@test_person.family_name).to eq("Riuska")
end
it "returns the name in desired format" do
expect(@test_person.name("first_name_with_initial")).to eq("Ripa R")
expect(@test_person.name("first_name_only")).to eq("Ripa")
expect(@test_person.name("full_name")).to eq("Ripa Riuska")
end
describe "#given_name" do
it "should return the given name" do
expect(@test_person.given_name).to eq("Ripa")
end
it "should return blank if given name is blank" do
@test_person.update_attributes({'given_name' => "", 'family_name' => ""})
expect(@test_person.given_name).to eq("")
end
end
describe "#given_name_or_username" do
it "should return the given name if it exists" do
expect(@test_person.given_name_or_username).to eq("Ripa")
end
it "should return username if given name is blank" do
@test_person.update_attributes({'given_name' => "", 'family_name' => ""})
expect(@test_person.given_name_or_username).to eq(@test_person.username)
end
end
describe "devise valid_password?" do
it "Test that the hashing works. (makes more sense to test this if ASI digest is used)" do
expect(FactoryGirl.build(:person).valid_password?('testi')).to be_truthy
expect(FactoryGirl.build(:person).valid_password?('something_else')).not_to be_truthy
end
end
end
describe "#delete" do
it "should delete also related conversations and testimonials" do
conv = FactoryGirl.create(:conversation)
conv.participants << @test_person
conv_id = conv.id
expect(Conversation.find_by_id(conv_id)).not_to be_nil
expect(@test_person.conversations).to include(conv)
tes = FactoryGirl.create(:testimonial, :author => @test_person)
tes_id = tes.id
expect(Testimonial.find_by_id(tes_id)).not_to be_nil
expect(@test_person.authored_testimonials).to include(tes)
@test_person.destroy
# check that related stuff was removed too
expect(Conversation.find_by_id(conv_id)).to be_nil
expect(Testimonial.find_by_id(tes_id)).to be_nil
end
end
describe "#latest_pending_email_address" do
before (:each) do
@p = FactoryGirl.create(:person)
end
it "should return nil if none pending" do
expect(@p.latest_pending_email_address()).to be_nil
end
it "should return main email if that's pending" do
@p.emails.each { |email| email.update_attribute(:confirmed_at, nil) }
expect(@p.latest_pending_email_address()).to match(/kassi_tester\[email protected]/)
end
it "should pick the right email to return" do
c = FactoryGirl.create(:community, :allowed_emails => "@example.com, @ex.ample, @something.else")
e = FactoryGirl.create(:email, :address => "[email protected]", :confirmed_at => nil, :person => @p)
e2 = FactoryGirl.create(:email, :address => "[email protected]", :confirmed_at => nil, :person => @p)
# e3 = FactoryGirl.create(:email, :address => "[email protected]", :confirmed_at => nil, :person => @p)
expect(@p.latest_pending_email_address(c)).to eq("[email protected]")
end
end
describe "inherits_settings_from" do
let(:person) { FactoryGirl.build(:person) }
let(:community) { FactoryGirl.build(:community, :default_min_days_between_community_updates => 30) }
it "inherits_settings_from" do
person.inherit_settings_from(community)
expect(person.min_days_between_community_updates).to eql(30)
end
end
end
| 39.022222 | 110 | 0.584377 |
ac730202b866104fa451dc29864b1aecaad87491 | 7,609 | #
# Author:: Adam Jacob (<[email protected]>)
# Copyright:: Copyright 2008-2016, Chef Software Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require "chef/provider/package"
require "chef/mixin/command"
require "chef/resource/package"
class Chef
class Provider
class Package
class Apt < Chef::Provider::Package
provides :package, platform_family: "debian"
provides :apt_package, os: "linux"
# return [Hash] mapping of package name to Boolean value
attr_accessor :is_virtual_package
def initialize(new_resource, run_context)
super
@is_virtual_package = {}
end
def load_current_resource
@current_resource = Chef::Resource::Package.new(@new_resource.name)
@current_resource.package_name(@new_resource.package_name)
check_all_packages_state(@new_resource.package_name)
@current_resource
end
def define_resource_requirements
super
requirements.assert(:all_actions) do |a|
a.assertion { !@new_resource.source }
a.failure_message(Chef::Exceptions::Package, "apt package provider cannot handle source attribute. Use dpkg provider instead")
end
end
def default_release_options
# Use apt::Default-Release option only if provider supports it
"-o APT::Default-Release=#{@new_resource.default_release}" if @new_resource.respond_to?(:default_release) && @new_resource.default_release
end
def check_package_state(pkg)
is_virtual_package = false
installed = false
installed_version = nil
candidate_version = nil
shell_out_with_timeout!("apt-cache#{expand_options(default_release_options)} policy #{pkg}").stdout.each_line do |line|
case line
when /^\s{2}Installed: (.+)$/
installed_version = $1
if installed_version == "(none)"
Chef::Log.debug("#{@new_resource} current version is nil")
installed_version = nil
else
Chef::Log.debug("#{@new_resource} current version is #{installed_version}")
installed = true
end
when /^\s{2}Candidate: (.+)$/
candidate_version = $1
if candidate_version == "(none)"
# This may not be an appropriate assumption, but it shouldn't break anything that already worked -- btm
is_virtual_package = true
showpkg = shell_out_with_timeout!("apt-cache showpkg #{pkg}").stdout
providers = Hash.new
showpkg.rpartition(/Reverse Provides: ?#{$/}/)[2].each_line do |line|
provider, version = line.split
providers[provider] = version
end
# Check if the package providing this virtual package is installed
num_providers = providers.length
raise Chef::Exceptions::Package, "#{@new_resource.package_name} has no candidate in the apt-cache" if num_providers == 0
# apt will only install a virtual package if there is a single providing package
raise Chef::Exceptions::Package, "#{@new_resource.package_name} is a virtual package provided by #{num_providers} packages, you must explicitly select one to install" if num_providers > 1
# Check if the package providing this virtual package is installed
Chef::Log.info("#{@new_resource} is a virtual package, actually acting on package[#{providers.keys.first}]")
ret = check_package_state(providers.keys.first)
installed = ret[:installed]
installed_version = ret[:installed_version]
else
Chef::Log.debug("#{@new_resource} candidate version is #{$1}")
end
end
end
return {
installed_version: installed_version,
installed: installed,
candidate_version: candidate_version,
is_virtual_package: is_virtual_package,
}
end
def check_all_packages_state(package)
installed_version = {}
candidate_version = {}
installed = {}
[package].flatten.each do |pkg|
ret = check_package_state(pkg)
is_virtual_package[pkg] = ret[:is_virtual_package]
installed[pkg] = ret[:installed]
installed_version[pkg] = ret[:installed_version]
candidate_version[pkg] = ret[:candidate_version]
end
if package.is_a?(Array)
@candidate_version = []
final_installed_version = []
[package].flatten.each do |pkg|
@candidate_version << candidate_version[pkg]
final_installed_version << installed_version[pkg]
end
@current_resource.version(final_installed_version)
else
@candidate_version = candidate_version[package]
@current_resource.version(installed_version[package])
end
end
def install_package(name, version)
name_array = [ name ].flatten
version_array = [ version ].flatten
package_name = name_array.zip(version_array).map do |n, v|
is_virtual_package[n] ? n : "#{n}=#{v}"
end.join(" ")
run_noninteractive("apt-get -q -y#{expand_options(default_release_options)}#{expand_options(@new_resource.options)} install #{package_name}")
end
def upgrade_package(name, version)
install_package(name, version)
end
def remove_package(name, version)
package_name = [ name ].flatten.join(" ")
run_noninteractive("apt-get -q -y#{expand_options(@new_resource.options)} remove #{package_name}")
end
def purge_package(name, version)
package_name = [ name ].flatten.join(" ")
run_noninteractive("apt-get -q -y#{expand_options(@new_resource.options)} purge #{package_name}")
end
def preseed_package(preseed_file)
Chef::Log.info("#{@new_resource} pre-seeding package installation instructions")
run_noninteractive("debconf-set-selections #{preseed_file}")
end
def reconfig_package(name, version)
package_name = [ name ].flatten.join(" ")
Chef::Log.info("#{@new_resource} reconfiguring")
run_noninteractive("dpkg-reconfigure #{package_name}")
end
private
# Runs command via shell_out with magic environment to disable
# interactive prompts. Command is run with default localization rather
# than forcing locale to "C", so command output may not be stable.
def run_noninteractive(command)
shell_out_with_timeout!(command, :env => { "DEBIAN_FRONTEND" => "noninteractive", "LC_ALL" => nil })
end
end
end
end
end
| 40.908602 | 203 | 0.620975 |
d59b0a0aa3bdb185d907a1965ca7c505a238555e | 3,513 | module Locomotive
module Render
extend ActiveSupport::Concern
module InstanceMethods
protected
def render_locomotive_page
if request.fullpath =~ /^\/admin\//
render :template => "/admin/errors/404", :layout => '/admin/layouts/box', :status => :not_found
else
@page = locomotive_page
redirect_to(@page.redirect_url) and return if @page.present? && @page.redirect?
render_no_page_error and return if @page.nil?
output = @page.render(locomotive_context)
self.prepare_and_set_response(output)
end
end
def render_no_page_error
render :template => "/admin/errors/no_page", :layout => false
end
def locomotive_page
path = (params[:path] || request.fullpath).clone # TODO: params[:path] is more consistent
path.gsub!(/\.[a-zA-Z][a-zA-Z0-9]{2,}$/, '')
path.gsub!(/^\//, '')
path = 'index' if path.blank?
if path != 'index'
dirname = File.dirname(path).gsub(/^\.$/, '') # also look for templatized page path
path = [path, File.join(dirname, 'content_type_template').gsub(/^\//, '')]
end
if page = current_site.pages.any_in(:fullpath => [*path]).first
if not page.published? and current_admin.nil?
page = nil
else
if page.templatized?
@content_instance = page.content_type.contents.where(:_slug => File.basename(path.first)).first
if @content_instance.nil? || (!@content_instance.visible? && current_admin.nil?) # content instance not found or not visible
page = nil
end
end
end
end
page || not_found_page
end
def locomotive_context
assigns = {
'site' => current_site,
'page' => @page,
'asset_collections' => Locomotive::Liquid::Drops::AssetCollections.new,
'contents' => Locomotive::Liquid::Drops::Contents.new,
'current_page' => self.params[:page]
}.merge(flash.stringify_keys) # data from api
if @page.templatized? # add instance from content type
assigns['content_instance'] = @content_instance
assigns[@page.content_type.slug.singularize] = @content_instance # just here to help to write readable liquid code
end
registers = {
:controller => self,
:site => current_site,
:page => @page,
:inline_editor => self.editing_page?,
:current_admin => current_admin
}
::Liquid::Context.new({}, assigns, registers)
end
def prepare_and_set_response(output)
flash.discard
response.headers['Content-Type'] = 'text/html; charset=utf-8'
if @page.with_cache?
fresh_when :etag => @page, :last_modified => @page.updated_at.utc, :public => true
if @page.cache_strategy != 'simple' # varnish
response.cache_control[:max_age] = @page.cache_strategy
end
end
render :text => output, :layout => false, :status => page_status
end
def not_found_page
current_site.pages.not_found.published.first
end
def editing_page?
self.params[:editing] == true && current_admin
end
def page_status
@page == not_found_page ? :not_found : :ok
end
end
end
end
| 30.547826 | 138 | 0.574153 |
6111941a2e7a44315036ae81087eec351cd5cfeb | 157 | module Shortener::ActiveRecordExtension
def has_shortened_urls
has_many :shortened_urls, class_name: "::Shortener::ShortenedUrl", as: :owner
end
end
| 26.166667 | 81 | 0.783439 |
28eee56555c6d51039e7db2b2580574f5d403f33 | 5,560 | require File.dirname(__FILE__) + '/spec_helper'
describe Merb::Config do
before do
Merb::Config.setup
end
it "should be able to yield the configuration via #use" do
res = nil
Merb::Config.use {|c| res = c}
res.should == Merb::Config.defaults
end
it "should be able to get a configuration key" do
Merb::Config[:host].should == "0.0.0.0"
end
it "should be able to set a configuration key" do
Merb::Config[:bar] = "Hello"
Merb::Config[:bar].should == "Hello"
end
it "should be able to #delete a configuration key" do
Merb::Config[:bar] = "Hello"
Merb::Config[:bar].should == "Hello"
Merb::Config.delete(:bar)
Merb::Config[:bar].should == nil
end
it "should be able to #fetch a key that does exist" do
Merb::Config.fetch(:host, "192.168.2.1").should == "0.0.0.0"
end
it "should be able to #fetch a key that does exist" do
Merb::Config.fetch(:bar, "heylo").should == "heylo"
end
it "should be able to dump to YAML" do
Merb::Config.to_yaml.should == Merb::Config.instance_variable_get("@configuration").to_yaml
end
it "should support -u to set the user to run Merb as" do
Merb::Config.parse_args(["-u", "tester"])
Merb::Config[:user].should == "tester"
end
it "should support -G to set the group to run Merb as" do
Merb::Config.parse_args(["-G", "tester"])
Merb::Config[:group].should == "tester"
end
it "should support -d to daemonize Merb" do
Merb::Config.parse_args(["-d"])
Merb::Config[:daemonize].should == true
end
it "should support -c to set the number of cluster nodes" do
Merb::Config.parse_args(["-c", "4"])
Merb::Config[:cluster].should == "4"
end
it "should support -p to set the port number" do
Merb::Config.parse_args(["-p", "6000"])
Merb::Config[:port].should == "6000"
end
it "should support -P to set the PIDfile" do
Merb::Config.parse_args(["-P", "pidfile"])
Merb::Config[:pid_file].should == "pidfile"
end
it "should have server return PIDfile setting as is with no cluster nodes" do
Merb::Config.parse_args(["-P", "pidfile", "-p", "6000"])
Merb::Server.pid_file(6000).should == "pidfile"
Merb::Server.pid_files.should == ["pidfile"]
end
it "should support setting of PIDfile with cluster nodes" do
Merb::Config.parse_args(["-P", "/tmp/merb.pidfile", "-c", "2", "-p", "6000"])
Merb::Server.pid_file(6000).should == "/tmp/merb.6000.pidfile"
Merb::Server.pid_file(6001).should == "/tmp/merb.6001.pidfile"
Dir.should_receive(:[]).with("/tmp/merb.*.pidfile")
Merb::Server.pid_files
end
it "should support default PIDfile setting" do
Merb::Config.parse_args(["-p", "6000"])
Merb::Server.pid_file(6000).should == Merb.log_path / "merb.6000.pid"
Dir.should_receive(:[]).with(Merb.log_path / "merb.*.pid")
Merb::Server.pid_files
end
it "should support -h to set the hostname" do
Merb::Config.parse_args(["-h", "hostname"])
Merb::Config[:host].should == "hostname"
end
it "should support -i to specify loading IRB" do
Merb::Config.parse_args(["-i"])
Merb::Config[:adapter].should == "irb"
end
it "should support -l to specify the log level" do
Merb::Config.parse_args(["-l", "debug"])
Merb::Config[:log_level].should == :debug
end
it "should support -L to specify the location of the log file" do
Merb::Config.parse_args(["-L", "log_file"])
Merb::Config[:log_file].should == "log_file"
end
it "should support -r to specify a runner" do
Merb::Config.parse_args(["-r", "foo_runner"])
Merb::Config[:runner_code].should == "foo_runner"
Merb::Config[:adapter].should == "runner"
end
it "should support -R to specify a rackup file" do
Merb::Config.parse_args(["-R", "config.ru"])
Merb::Config[:rackup].should == "config.ru"
end
it "should support -K for a graceful kill" do
Merb::Server.should_receive(:kill).with("all", 1)
Merb.start(["-K", "all"])
end
it "should support -k for a hard kill" do
Merb::Server.should_receive(:kill).with("all", 9)
Merb.start(["-k", "all"])
end
it "should support -X off to turn off the mutex" do
Merb::Config.parse_args(["-X", "off"])
Merb::Config[:use_mutex].should == false
end
it "should support -X on to turn off the mutex" do
Merb::Config.parse_args(["-X", "on"])
Merb::Config[:use_mutex].should == true
end
it "should take Merb.disable into account" do
Merb::Config[:disabled_components].should == []
Merb::Config[:disabled_components] << :foo
Merb.disable(:bar)
Merb.disable(:buz, :fux)
Merb::Config[:disabled_components].should == [:foo, :bar, :buz, :fux]
Merb.disabled?(:foo).should == true
Merb.disabled?(:foo, :buz).should == true
end
it "should take Merb.testing? into account" do
$TESTING.should == true
Merb::Config[:testing].should be_nil
Merb.should be_testing
$TESTING = false
Merb.should_not be_testing
Merb::Config[:testing] = true
Merb.should be_testing
$TESTING = true; Merb::Config[:testing] = false # reset
end
it "supports -V to turn on verbose mode" do
Merb::Config[:verbose] = false
Merb::Config.parse_args(["-V"])
Merb::Config[:verbose].should be(true)
end
it "supports --verbose to turn on verbose mode" do
Merb::Config[:verbose] = false
Merb::Config.parse_args(["--verbose"])
Merb::Config[:verbose].should be(true)
end
it "has verbose mode turned off by default" do
Merb::Config[:verbose].should be(false)
end
end
| 30.382514 | 95 | 0.646763 |
bb315f4e6036723e7137859b8f445d1849ccebf5 | 831 | require 'tty-file'
require 'pathname'
class Templater
attr_reader :templates
def initialize()
@templates = []
end
def add_empty_directories(project_name)
tree ={
"#{project_name}" => [
"01-discover" => [],
"02-content" => [],
"03-scripts" => []
]
}
TTY::File.create_dir(tree)
end
def add_mapping(source, target)
@templates << [source, target]
end
def generate(template_options)
@templates.each do |template|
source = templates_root_path.join(template[0])
target = template[1]
TTY::File.copy_file(source, target, context: template_options)
end
end
def templates_root_path
Pathname(__dir__)
end
end | 18.065217 | 74 | 0.535499 |
03593be8d7743b5b35ba74b14b4b3f433eb5b825 | 124 | comment_regexp = /^\s*#/
p comment_regexp.match(" # Pure comment!")
p comment_regexp.match(" x = 1 # Code plus comment!")
| 31 | 54 | 0.669355 |
e2b867aeee087c14ed7ae3e0845457c3213e26bd | 1,315 | =begin
#Topological Inventory Ingress API
#Topological Inventory Ingress API
The version of the OpenAPI document: 0.0.2
Contact: [email protected]
Generated by: https://openapi-generator.tech
OpenAPI Generator version: 4.2.1
=end
require 'spec_helper'
require 'json'
require 'date'
# Unit tests for TopologicalInventoryIngressApiClient::ServiceOfferingTagAllOf
# Automatically generated by openapi-generator (https://openapi-generator.tech)
# Please update as you see appropriate
describe 'ServiceOfferingTagAllOf' do
before do
# run before each test
@instance = TopologicalInventoryIngressApiClient::ServiceOfferingTagAllOf.new
end
after do
# run after each test
end
describe 'test an instance of ServiceOfferingTagAllOf' do
it 'should create an instance of ServiceOfferingTagAllOf' do
expect(@instance).to be_instance_of(TopologicalInventoryIngressApiClient::ServiceOfferingTagAllOf)
end
end
describe 'test attribute "service_offering"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "tag"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
end
| 27.395833 | 104 | 0.765779 |
1c2cec40b3610edcfc38e1b0ac5949303390d1e6 | 180 | $: << File.dirname(__FILE__)
require 'helper'
class TestLoading < Test::Unit::TestCase
#def setup
#end
#def teardown
#end
def test_loading
require 'wbem'
end
end
| 12.857143 | 40 | 0.672222 |
26a1f93a458f288a24588bea02e08fb737ddaa16 | 1,844 | require 'thor'
require 'user_config'
module Recluse
module CLI
##
# Whitelist related commands.
class Whitelist < Thor #:nodoc: all
desc 'add profile pattern1 [pattern2] ...', 'add glob patterns to whitelist'
def add(name, *patterns)
uconf = UserConfig.new '.recluse'
unless uconf.exist?("#{name}.yaml")
puts "Profile #{name} doesn't exist"
exit(-1)
end
profile = uconf["#{name}.yaml"]
if profile.key?('whitelist')
profile['whitelist'] += patterns
else
profile['whitelist'] = patterns
end
profile.save
end
desc 'remove profile pattern1 [pattern2] ...', 'remove patterns from whitelist'
def remove(name, *patterns)
uconf = UserConfig.new '.recluse'
unless uconf.exist?("#{name}.yaml")
puts "Profile #{name} doesn't exist"
exit(-1)
end
profile = uconf["#{name}.yaml"]
return unless profile.key?('whitelist')
profile['whitelist'] -= patterns
profile.save
end
desc 'clear profile', 'remove all patterns in the whitelist'
def clear(name)
uconf = UserConfig.new '.recluse'
unless uconf.exist?("#{name}.yaml")
puts "Profile #{name} doesn't exist"
exit(-1)
end
profile = uconf["#{name}.yaml"]
profile['whitelist'] = []
profile.save
end
desc 'list profile', 'list patterns in whitelist'
def list(name)
uconf = UserConfig.new '.recluse'
unless uconf.exist?("#{name}.yaml")
puts "Profile #{name} doesn't exist"
exit(-1)
end
profile = uconf["#{name}.yaml"]
profile['whitelist'].each { |pattern| puts pattern } if profile.key?('whitelist')
end
end
end
end
| 30.733333 | 89 | 0.561822 |
ff156c535bcd7a1ac986eb3779e1a585d39b67e5 | 1,451 | module SimpleFormExtension
module Inputs
class DateTimeInput < SimpleForm::Inputs::DateTimeInput
include SimpleFormExtension::Translations
delegate :content_tag, to: :template
def input(wrapper_options = nil)
input_html_options[:class] << "form-control"
input_html_options[:data] ||= {}
input_html_options[:data].merge!(type_specific_option)
if (value = object.send(attribute_name).presence) && !input_html_options.key?(:value)
format = _translate("#{ input_type }.format.rails")
input_html_options[:value] = I18n.l(value, format: format)
end
content_tag(:div, class: 'input-group') do
@builder.text_field(attribute_name, input_html_options) +
content_tag(:span, class: 'input-group-btn') do
content_tag(:button, type: 'button', class: 'btn btn-default datetimepicker-trigger') do
content_tag(:i, '', class: "fa fa-#{ icon }")
end
end
end
end
private
def type_specific_option
options = { format: _translate("#{ input_type }.format.js") }
unless input_type == :time
options[:'week-start-day'] = _translate('shared.week_start_day')
options[:'format-date'] = _translate('date.format.js')
end
options
end
def icon
input_type == :time ? 'clock-o' : 'calendar'
end
end
end
end
| 29.612245 | 100 | 0.612681 |
61de318c88b116706fac13a422b09ba66f4cf030 | 301 | # frozen_string_literal: true
FactoryBot.define do
factory :va526ez, class: 'SavedClaim::DisabilityCompensation::Form526AllClaim' do
form {
JSON.parse(
File.read('spec/support/disability_compensation_form/all_claims_fe_submission.json')
)['form526'].to_json
}
end
end
| 25.083333 | 92 | 0.727575 |
339eb41ebb033c51f850229ace3ab4ee8102411d | 2,027 | # frozen_string_literal: true
# == Schema Information
# Schema version: 20110604174521
#
# Table name: sources
#
# id :integer not null, primary key
# title :string(255)
# url :string(255)
# imported_at :datetime
# created_at :datetime
# updated_at :datetime
#
# == Source
#
# A model that represents a source of events data, such as feeds for hCal, iCal, etc.
require 'calagator/decode_html_entities_hack'
require 'paper_trail'
require 'loofah-activerecord'
require 'loofah/activerecord/xss_foliate'
module Calagator
class Source < ActiveRecord::Base
self.table_name = 'sources'
validate :assert_url
has_many :events, dependent: :destroy
has_many :venues, dependent: :destroy
scope :listing, -> { order('created_at DESC') }
has_paper_trail
xss_foliate
include DecodeHtmlEntitiesHack
# Create events for this source. Returns the events created. URL must be set
# for this source for this to work.
def create_events!
save!
to_events.select { |event| event.valid? && !event.old? }.each(&:save!)
end
# Normalize the URL.
def url=(value)
url = URI.parse(value.strip)
unless %w[http https ftp].include?(url.scheme) || url.scheme.nil?
url.scheme = 'http'
end
self[:url] = url.scheme.nil? ? 'http://' + value.strip : url.to_s
rescue URI::InvalidURIError
false
end
# Returns an Array of Event objects that were read from this source.
def to_events
raise ActiveRecord::RecordInvalid, self unless valid?
self.imported_at = Time.now.in_time_zone
Source::Parser.to_events(url: url, source: self)
end
# Return the name of the source, which can be its title or URL.
def name
[title, url].detect(&:present?)
end
private
# Ensure that the URL for this source is valid.
def assert_url
URI.parse(url)
rescue URI::InvalidURIError
errors.add :url, 'has invalid format'
false
end
end
end
| 24.719512 | 85 | 0.662062 |
4a014d6d425a1949abc24d930d690ab6c97ad868 | 713 | require_relative "boot"
require "rails/all"
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module TwittterRedesign
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
config.load_defaults 6.1
# Configuration for the application, engines, and railties goes here.
#
# These settings can be overridden in specific environments using the files
# in config/environments, which are processed later.
#
# config.time_zone = "Central Time (US & Canada)"
# config.eager_load_paths << Rails.root.join("extras")
end
end
| 31 | 79 | 0.740533 |
01b6f1c655abc68a671db6324df3228aaae9df97 | 441 | # frozen_string_literal: true
class CategoryGroupPolicy < ApplicationPolicy
class Scope < Scope
def resolve
scope.where(user: user)
end
end
def show?
owner?
end
def create?
user.present?
end
def new?
user.present?
end
def update?
owner?
end
def edit?
owner?
end
def destroy?
owner?
end
private
def owner?
record.user.present? && record.user == user
end
end
| 11.307692 | 47 | 0.623583 |
e999e83157323018d612a04fd936fe14e4051150 | 9,592 | # Copyright (c) 2018 Public Library of Science
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
require 'rails_helper'
describe SimilarityCheck, type: :model, redis: true do
describe "the factory" do
let(:similarity_check) { build :similarity_check }
it "creates a valid record" do
expect(similarity_check).to be_valid
end
end
describe "#start_report_async" do
let(:similarity_check) { create :similarity_check }
subject(:start_report_async) { similarity_check.start_report_async }
it "enqueues a SimilarityCheckStartReportWorker job" do
expect do
start_report_async
end.to change { SimilarityCheckStartReportWorker.jobs.size }.by(1)
end
it "enqueues a job with the SimilarityCheck record id as an arg" do
start_report_async
args = SimilarityCheckStartReportWorker.jobs.first["args"]
expect(similarity_check.id).to be_present
expect(args).to eq [similarity_check.id]
end
end
describe "#start_report!" do
let(:similarity_check) { create :similarity_check }
let(:paper) { create :paper, :version_with_file_type, :with_creator }
let!(:similarity_check) { create :similarity_check, versioned_text: paper.latest_version }
let(:file) { double }
let(:stubbed_url) { Faker::Internet.url }
let(:fake_doc_id) { Faker::Number.number(8).to_i }
let(:fake_ithenticate_response) do
{
"api_status" => 200,
"uploaded" => [
{
"id" => fake_doc_id
}
]
}
end
subject(:start_report!) { similarity_check.start_report! }
before do
allow(similarity_check).to receive(:file).and_return(file)
allow(file).to receive(:[]).with(:file)
.and_return("#{Faker::Lorem.word.downcase}.docx")
allow(file).to receive(:url).and_return(stubbed_url)
stub_request(:get, stubbed_url).to_return(body: "turtles")
allow(Ithenticate::Api).to receive_message_chain(:new_from_tahi_env, :error?)
.and_return(false)
end
it "adds a document through the Ithenticate::Api" do
Sidekiq.redis { |redis| redis.set('ithenticate_folder', 1) }
expect(Ithenticate::Api).to(
receive_message_chain(:new_from_tahi_env, :add_document)
.and_return(fake_ithenticate_response)
)
start_report!
end
describe "successful ithenticate api calls" do
before do
allow(Ithenticate::Api).to(
receive_message_chain(:new_from_tahi_env, :add_document)
.and_return(fake_ithenticate_response)
)
end
it "sets ithenticate_document_id on the SimilarityCheck record" do
Sidekiq.redis { |redis| redis.set('ithenticate_folder', 1) }
expect do
start_report!
end.to change { similarity_check.ithenticate_document_id }
.from(nil).to(fake_doc_id)
end
it "updates the AASM state of the SimilarityCheck record" do
Sidekiq.redis { |redis| redis.set('ithenticate_folder', 1) }
expect do
start_report!
end.to change { similarity_check.state }
.from("needs_upload").to("waiting_for_report")
end
it "sets the similarity check's timeout_at" do
Sidekiq.redis { |redis| redis.set('ithenticate_folder', 1) }
expect(SimilarityCheck::TIMEOUT_INTERVAL).to be_present
Timecop.freeze do |now|
expect do
start_report!
end.to change { similarity_check.timeout_at }
.from(nil).to(now + SimilarityCheck::TIMEOUT_INTERVAL)
end
end
it "sets document_s3_url" do
Sidekiq.redis { |redis| redis.set('ithenticate_folder', 1) }
expect do
start_report!
end.to change { similarity_check.document_s3_url }
.from(nil).to(stubbed_url)
end
end
end
describe "#sync_document!" do
let(:similarity_check) { create :similarity_check, :waiting_for_report }
before do
allow_any_instance_of(Ithenticate::Api).to receive(:get_document)
.and_return(response_double)
end
context "the document's report is finished" do
let(:report_score) { Faker::Number.number(2).to_i }
let(:report_id) { Faker::Number.number(8).to_i }
let(:response_double) do
double("response", error: nil, report_complete?: true, score: report_score, report_id: report_id, error?: false)
end
it "updates the similarity check's state to 'report_complete'" do
expect do
similarity_check.sync_document!
end.to change { similarity_check.state }
.from("waiting_for_report")
.to("report_complete")
end
it "updates the similarity check with the report score" do
expect do
similarity_check.sync_document!
end.to change { similarity_check.ithenticate_score }.from(nil).to(report_score)
end
it "updates the similarity check with the report id" do
expect do
similarity_check.sync_document!
end.to change { similarity_check.ithenticate_report_id }.from(nil).to(report_id)
end
it "updates the similarity check's ithenticate_report_completed_at" do
Timecop.freeze do |now|
expect do
similarity_check.sync_document!
end.to change { similarity_check.ithenticate_report_completed_at }
.from(nil).to(now)
end
end
it "sets the state of the similarity check" do
expect do
similarity_check.sync_document!
end.to change { similarity_check.state }
.from("waiting_for_report")
.to("report_complete")
end
end
context "the document's report is finished" do
let(:similarity_check) { create :similarity_check, :waiting_for_report }
let(:report_score) { Faker::Number.number(2).to_i }
let(:report_id) { Faker::Number.number(8).to_i }
let(:response_double) do
double("response", report_complete?: false, error: nil, error?: false)
end
context "the system time is after the similarity check's timeout_at" do
let(:timeout_offset) { 1.second }
it "updates to similarity check's status to 'failed'" do
Timecop.freeze(similarity_check.timeout_at + timeout_offset) do
expect do
similarity_check.sync_document!
end.to raise_error('Report timed out after 10 minutes.')
expect(similarity_check.state).to eq('failed')
end
end
it "adds an error_message to similarity check" do
Timecop.freeze(similarity_check.timeout_at + timeout_offset) do
expect do
similarity_check.sync_document!
end.to raise_error('Report timed out after 10 minutes.')
expect(similarity_check.error_message).to eq("Report timed out after 10 minutes.")
end
end
end
context "the system time is before the similarity check's timeout_at" do
let(:timeout_offset) { -1.seconds }
it "updates to similarity check's status to 'failed'" do
Timecop.freeze(similarity_check.timeout_at + timeout_offset) do
expect do
similarity_check.sync_document!
end.to_not change { similarity_check.state }
end
end
end
end
end
describe "#report_view_only_url" do
context "the similarity check's report is not complete" do
let(:similarity_check) { create :similarity_check, :waiting_for_report }
it "raises an exception" do
expect do
similarity_check.report_view_only_url
end.to raise_exception(SimilarityCheck::IncorrectState)
end
end
context "the similarity check's report is complete" do
let(:similarity_check) { create :similarity_check, :report_complete }
let(:fake_url) { Faker::Internet.url }
let(:response_double) do
double(Ithenticate::ReportResponse, view_only_url: fake_url)
end
before do
allow_any_instance_of(Ithenticate::Api).to(
receive(:get_report).with(
id: similarity_check.ithenticate_report_id
).and_return(response_double)
)
end
it "returns an expiring url to the view_only version of the report" do
expect(similarity_check.report_view_only_url).to eq fake_url
end
end
end
end
| 36.196226 | 120 | 0.656902 |
7a85c71897949684fea63bd6a307a85e5790feb8 | 4,133 | require "language/python"
class Caveats
extend Forwardable
attr_reader :f
def initialize(f)
@f = f
end
def caveats
caveats = []
begin
build = f.build
f.build = Tab.for_formula(f)
s = f.caveats.to_s
caveats << s.chomp + "\n" unless s.empty?
ensure
f.build = build
end
caveats << keg_only_text
caveats << function_completion_caveats(:bash)
caveats << function_completion_caveats(:zsh)
caveats << function_completion_caveats(:fish)
caveats << plist_caveats
caveats << elisp_caveats
caveats.compact.join("\n")
end
delegate [:empty?, :to_s] => :caveats
def keg_only_text(skip_reason: false)
return unless f.keg_only?
s = if skip_reason
""
else
<<~EOS
#{f.name} is keg-only, which means it was not symlinked into #{HOMEBREW_PREFIX},
because #{f.keg_only_reason.to_s.chomp}.
EOS
end
if f.bin.directory? || f.sbin.directory?
s << <<~EOS
If you need to have #{f.name} first in your PATH run:
EOS
s << " #{Utils::Shell.prepend_path_in_profile(f.opt_bin.to_s)}\n" if f.bin.directory?
s << " #{Utils::Shell.prepend_path_in_profile(f.opt_sbin.to_s)}\n" if f.sbin.directory?
end
if f.lib.directory? || f.include.directory?
s << <<~EOS
For compilers to find #{f.name} you may need to set:
EOS
s << " #{Utils::Shell.export_value("LDFLAGS", "-L#{f.opt_lib}")}\n" if f.lib.directory?
s << " #{Utils::Shell.export_value("CPPFLAGS", "-I#{f.opt_include}")}\n" if f.include.directory?
if which("pkg-config", ENV["HOMEBREW_PATH"]) &&
((f.lib/"pkgconfig").directory? || (f.share/"pkgconfig").directory?)
s << <<~EOS
For pkg-config to find #{f.name} you may need to set:
EOS
if (f.lib/"pkgconfig").directory?
s << " #{Utils::Shell.export_value("PKG_CONFIG_PATH", "#{f.opt_lib}/pkgconfig")}\n"
end
if (f.share/"pkgconfig").directory?
s << " #{Utils::Shell.export_value("PKG_CONFIG_PATH", "#{f.opt_share}/pkgconfig")}\n"
end
end
end
s << "\n"
end
private
def keg
@keg ||= [f.prefix, f.opt_prefix, f.linked_keg].map do |d|
begin
Keg.new(d.resolved_path)
rescue
nil
end
end.compact.first
end
def function_completion_caveats(shell)
return unless keg
return unless which(shell.to_s, ENV["HOMEBREW_PATH"])
completion_installed = keg.completion_installed?(shell)
functions_installed = keg.functions_installed?(shell)
return unless completion_installed || functions_installed
installed = []
installed << "completions" if completion_installed
installed << "functions" if functions_installed
root_dir = f.keg_only? ? f.opt_prefix : HOMEBREW_PREFIX
case shell
when :bash
<<~EOS
Bash completion has been installed to:
#{root_dir}/etc/bash_completion.d
EOS
when :zsh
<<~EOS
zsh #{installed.join(" and ")} have been installed to:
#{root_dir}/share/zsh/site-functions
EOS
when :fish
fish_caveats = "fish #{installed.join(" and ")} have been installed to:"
fish_caveats << "\n #{root_dir}/share/fish/vendor_completions.d" if completion_installed
fish_caveats << "\n #{root_dir}/share/fish/vendor_functions.d" if functions_installed
fish_caveats
end
end
def elisp_caveats
return if f.keg_only?
return unless keg
return unless keg.elisp_installed?
<<~EOS
Emacs Lisp files have been installed to:
#{HOMEBREW_PREFIX}/share/emacs/site-lisp/#{f.name}
EOS
end
def plist_caveats; end
def plist_path
destination = if f.plist_startup
"/Library/LaunchDaemons"
else
"~/Library/LaunchAgents"
end
plist_filename = if f.plist
f.plist_path.basename
else
File.basename Dir["#{keg}/*.plist"].first
end
destination_path = Pathname.new(File.expand_path(destination))
destination_path/plist_filename
end
end
require "extend/os/caveats"
| 25.83125 | 103 | 0.627147 |
6ad32cf1d4d58188ea4f898d2d0bfdc2a337495b | 249 | class CreateRuleSets < ActiveRecord::Migration[5.2]
def change
create_table :rule_sets do |t|
t.string :object_key
t.string :name
t.string :class_name
t.boolean :rule_set_aware
t.boolean :active
end
end
end
| 20.75 | 51 | 0.662651 |
b97cfdedfefae124b07a27ce09b23be8ad3447d5 | 245 | # frozen_string_literal: true
module Spree
class ReturnsCalculator < Calculator
def compute(_return_item)
raise NotImplementedError, "Please implement 'compute(return_item)' in your calculator: #{self.class.name}"
end
end
end
| 24.5 | 113 | 0.755102 |
26a5ce96f9c1f6169ba60339f44965051a5b4caa | 1,817 | # frozen_string_literal: true
require_relative '../test_harness'
require_relative './orders_helper'
require 'json'
include PayPalCheckoutSdk::Orders
describe OrdersPatchRequest do
def build_request_body
[
{
"op": 'add',
"path": "/purchase_units/@reference_id=='test_ref_id1'/description",
"value": 'added_description'
},
{
"op": 'replace',
"path": "/purchase_units/@reference_id=='test_ref_id1'/amount",
"value": {
"currency_code": 'USD',
"value": '200.00'
}
}
]
end
it 'successfully makes a request' do
create_resp = OrdersHelper.create_order
request = OrdersPatchRequest.new(create_resp.result.id)
request.request_body(build_request_body)
resp = TestHarness.client.execute(request)
expect(resp.status_code).to eq(204)
resp = OrdersHelper.get_order create_resp.result.id
expect(resp.status_code).to eq(200)
expect(resp.result).not_to be_nil
expect(resp.result.intent).to eq('CAPTURE')
expect(resp.result.purchase_units.count).to eq(1)
expect(resp.result.purchase_units[0].reference_id).to eq('test_ref_id1')
expect(resp.result.purchase_units[0].amount.currency_code).to eq('USD')
expect(resp.result.purchase_units[0].amount.value).to eq('200.00')
expect(resp.result.purchase_units[0].description).to eq('added_description')
expect(resp.result.create_time).not_to be_nil
expect(resp.result.links).not_to be_nil
found_approve = false
resp.result.links.each do |link|
next unless 'approve' === link.rel
expect(link['href']).not_to be_nil
expect(link['method']).to eq('GET')
found_approve = true
end
expect(found_approve).to be_truthy
expect(resp.result.status).to eq('CREATED')
end
end
| 29.786885 | 80 | 0.679141 |
616e9b4eebad3019468d0df2951568ca4966af06 | 2,537 | # frozen_string_literal: true
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
require "gapic/common"
require "gapic/config"
require "gapic/config/method"
require "google/cloud/kms/v1/version"
require "google/cloud/kms/v1/iam_policy/credentials"
require "google/cloud/kms/v1/iam_policy/client"
module Google
module Cloud
module Kms
module V1
##
# ## API Overview
#
# Manages Identity and Access Management (IAM) policies.
#
# Any implementation of an API that offers access control features
# implements the google.iam.v1.IAMPolicy interface.
#
# ## Data model
#
# Access control is applied when a principal (user or service account), takes
# some action on a resource exposed by a service. Resources, identified by
# URI-like names, are the unit of access control specification. Service
# implementations can choose the granularity of access control and the
# supported permissions for their resources.
# For example one database service may allow access control to be
# specified only at the Table level, whereas another might allow access control
# to also be specified at the Column level.
#
# ## Policy Structure
#
# See google.iam.v1.Policy
#
# This is intentionally not a CRUD style API because access control policies
# are created and deleted implicitly with the resources to which they are
# attached.
#
# To load this service and instantiate a client:
#
# require "google/cloud/kms/v1/iam_policy"
# client = ::Google::Cloud::Kms::V1::IAMPolicy::Client.new
#
module IAMPolicy
end
end
end
end
end
helper_path = ::File.join __dir__, "iam_policy", "helpers.rb"
require "google/cloud/kms/v1/iam_policy/helpers" if ::File.file? helper_path
| 34.753425 | 87 | 0.677178 |
61362e7f767ea7e96a8c4e374e11c49f9c4a7398 | 7,516 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Brcobranca::Boleto::Banrisul do # :nodoc:[all]
let(:valid_attributes) do
{
data_vencimento: Date.parse('2015-06-26'),
valor: 1278.90,
cedente: 'Kivanio Barbosa',
documento_cedente: '12345678912',
sacado: 'Claudio Pozzebom',
sacado_documento: '12345678900',
agencia: '1102',
conta_corrente: '1454204',
nosso_numero: '22832563',
convenio: '9000150',
digito_convenio: '46'
}
end
it 'Criar nova instancia com atributos padrões' do
boleto_novo = described_class.new
expect(boleto_novo.banco).to eql('041')
expect(boleto_novo.especie_documento).to eql('DM')
expect(boleto_novo.especie).to eql('R$')
expect(boleto_novo.moeda).to eql('9')
expect(boleto_novo.data_vencimento).to eql(Date.current)
expect(boleto_novo.aceite).to eql('S')
expect(boleto_novo.quantidade).to be(1)
expect(boleto_novo.valor).to eq(0.0)
expect(boleto_novo.valor_documento).to eq(0.0)
expect(boleto_novo.local_pagamento).to eql('QUALQUER BANCO ATÉ O VENCIMENTO')
expect(boleto_novo.carteira).to eql('2')
end
it 'Criar nova instancia com atributos válidos' do
boleto_novo = described_class.new(valid_attributes)
expect(boleto_novo.banco).to eql('041')
expect(boleto_novo.especie_documento).to eql('DM')
expect(boleto_novo.especie).to eql('R$')
expect(boleto_novo.moeda).to eql('9')
expect(boleto_novo.data_processamento).to eql(Date.current)
expect(boleto_novo.data_vencimento).to eql(Date.parse('2015-06-26'))
expect(boleto_novo.aceite).to eql('S')
expect(boleto_novo.quantidade).to be(1)
expect(boleto_novo.valor).to eq(1278.9)
expect(boleto_novo.valor_documento).to eq(1278.9)
expect(boleto_novo.local_pagamento).to eql('QUALQUER BANCO ATÉ O VENCIMENTO')
expect(boleto_novo.cedente).to eql('Kivanio Barbosa')
expect(boleto_novo.documento_cedente).to eql('12345678912')
expect(boleto_novo.sacado).to eql('Claudio Pozzebom')
expect(boleto_novo.sacado_documento).to eql('12345678900')
expect(boleto_novo.conta_corrente).to eql('01454204')
expect(boleto_novo.agencia).to eql('1102')
expect(boleto_novo.nosso_numero).to eql('22832563')
expect(boleto_novo.nosso_numero_boleto).to eql('22832563-51')
expect(boleto_novo.carteira).to eql('2')
expect(boleto_novo.codigo_barras).to eql('04191647100001278902111029000150228325634059')
expect(boleto_novo.codigo_barras.linha_digitavel).to eql('04192.11107 29000.150226 83256.340593 1 64710000127890')
end
it 'Não permitir gerar boleto com atributos inválido' do
boleto_novo = described_class.new
expect { boleto_novo.codigo_barras }.to raise_error(Brcobranca::BoletoInvalido)
expect(boleto_novo.errors.count).to be(5)
end
it 'Montar nosso_numero_boleto' do
valid_attributes[:nosso_numero] = '00009274'
boleto_novo = described_class.new(valid_attributes)
expect(boleto_novo.nosso_numero_boleto).to eql('00009274-22')
valid_attributes[:nosso_numero] = '00009194'
boleto_novo = described_class.new(valid_attributes)
expect(boleto_novo.nosso_numero_boleto).to eql('00009194-38')
valid_attributes[:nosso_numero] = '22832563'
boleto_novo = described_class.new(valid_attributes)
expect(boleto_novo.nosso_numero_boleto).to eql('22832563-51')
end
it 'Montar codio de barras' do
valid_attributes[:nosso_numero] = '22832563'
valid_attributes[:data_vencimento] = Date.parse('2000-07-04')
valid_attributes[:valor] = 550.0
valid_attributes[:agencia] = '1102'
valid_attributes[:conta_corrente] = '00099999'
valid_attributes[:convenio] = '9000150'
boleto_novo = described_class.new(valid_attributes)
expect(boleto_novo.codigo_barras).to eql('04198100100000550002111029000150228325634059')
expect(boleto_novo.codigo_barras.linha_digitavel).to eql('04192.11107 29000.150226 83256.340593 8 10010000055000')
valid_attributes[:nosso_numero] = '00009274'
valid_attributes[:data_vencimento] = Date.parse('2000-07-04')
valid_attributes[:valor] = 550.00
valid_attributes[:agencia] = '1102'
valid_attributes[:conta_corrente] = '00099999'
valid_attributes[:convenio] = '9000150'
boleto_novo = described_class.new(valid_attributes)
expect(boleto_novo.codigo_barras).to eql('04194100100000550002111029000150000092744028')
expect(boleto_novo.codigo_barras.linha_digitavel).to eql('04192.11107 29000.150002 00927.440289 4 10010000055000')
valid_attributes[:nosso_numero] = '00009194'
valid_attributes[:data_vencimento] = Date.parse('2000-07-04')
valid_attributes[:valor] = 550.00
valid_attributes[:agencia] = '1102'
valid_attributes[:conta_corrente] = '00099999'
valid_attributes[:convenio] = '9000150'
boleto_novo = described_class.new(valid_attributes)
expect(boleto_novo.codigo_barras).to eql('04198100100000550002111029000150000091944023')
expect(boleto_novo.codigo_barras.linha_digitavel).to eql('04192.11107 29000.150002 00919.440230 8 10010000055000')
valid_attributes[:nosso_numero] = '03408099'
valid_attributes[:data_vencimento] = Date.parse('2017-01-12')
valid_attributes[:valor] = 1216.00
valid_attributes[:agencia] = '0016'
valid_attributes[:conta_corrente] = '00099999'
valid_attributes[:convenio] = '0164640'
boleto_novo = described_class.new(valid_attributes)
expect(boleto_novo.codigo_barras).to eql('04192703700001216002100160164640034080994027')
expect(boleto_novo.codigo_barras.linha_digitavel).to eql('04192.10018 60164.640033 40809.940279 2 70370000121600')
end
it 'Montar agencia_conta_boleto' do
valid_attributes[:convenio] = '9000150'
boleto_novo = described_class.new(valid_attributes)
expect(boleto_novo.agencia_conta_boleto).to eql('1102 / 900015.0.46')
valid_attributes[:convenio] = '8505610'
valid_attributes[:digito_convenio] = '99'
boleto_novo = described_class.new(valid_attributes)
expect(boleto_novo.agencia_conta_boleto).to eql('1102 / 850561.0.99')
end
describe 'Busca logotipo do banco' do
it_behaves_like 'busca_logotipo'
end
it 'Gerar boleto nos formatos válidos com método to_' do
valid_attributes[:valor] = 135.00
valid_attributes[:data_vencimento] = Date.parse('2008-02-03')
valid_attributes[:nosso_numero] = '240'
boleto_novo = described_class.new(valid_attributes)
%w[pdf jpg tif png].each do |format|
file_body = boleto_novo.send("to_#{format}".to_sym)
tmp_file = Tempfile.new(['foobar.', format])
tmp_file.puts file_body
tmp_file.close
expect(File).to exist(tmp_file.path)
expect(File.stat(tmp_file.path)).not_to be_zero
expect(File.delete(tmp_file.path)).to be(1)
expect(File).not_to exist(tmp_file.path)
end
end
it 'Gerar boleto nos formatos válidos' do
valid_attributes[:valor] = 135.00
valid_attributes[:data_vencimento] = Date.parse('2008-02-03')
valid_attributes[:nosso_numero] = '240'
boleto_novo = described_class.new(valid_attributes)
%w[pdf jpg tif png].each do |format|
file_body = boleto_novo.to(format)
tmp_file = Tempfile.new(['foobar.', format])
tmp_file.puts file_body
tmp_file.close
expect(File).to exist(tmp_file.path)
expect(File.stat(tmp_file.path)).not_to be_zero
expect(File.delete(tmp_file.path)).to be(1)
expect(File).not_to exist(tmp_file.path)
end
end
end
| 41.524862 | 118 | 0.735764 |
1cea64dea8806bed0cdcf8aec9d2a58c480e6977 | 415 | require 'spec_helper'
describe Almanac::Blog do
it "should be possible to create a valid blog" do
expect(create(:blog)).to eq(Almanac::Blog.first)
end
it "should not be possible to create a blog without a title" do
build(:blog, title: "").should_not be_valid
end
it "should not be possible to create more than one blog" do
create(:blog)
expect { create(:blog) }.to raise_error
end
end | 25.9375 | 65 | 0.701205 |
183eb063c75f4caa55aa7fec1bda37c3a4de9528 | 25,412 | #
# Copyright 2012 Chef Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'fileutils'
require 'uri'
require 'omnibus/manifest_entry'
module Omnibus
class Software
class << self
#
# @param [Project] project
# the project that loaded this software definition
# @param [String] name
# the path to the software definition to load from disk
#
# @return [Software]
#
def load(project, name, manifest)
loaded_softwares[name] ||= begin
filepath = Omnibus.software_path(name)
if filepath.nil?
raise MissingSoftware.new(name)
else
log.internal(log_key) do
"Loading software `#{name}' from `#{filepath}'."
end
end
instance = new(project, filepath, manifest)
instance.evaluate_file(filepath)
instance.load_dependencies
# Add the loaded component to the library
project.library.component_added(instance)
instance
end
end
private
#
# The list of softwares loaded thus far.
#
# @return [Hash<String, Software>]
#
def loaded_softwares
@loaded_softwares ||= {}
end
end
include Cleanroom
include Digestable
include Logging
include NullArgumentable
include Sugarable
attr_reader :manifest
#
# Create a new software object.
#
# @param [Project] project
# the Omnibus project that instantiated this software definition
# @param [String] filepath
# the path to where this software definition lives on disk
# @param [String] manifest
# the user-supplied software manifest
#
# @return [Software]
#
def initialize(project, filepath = nil, manifest=nil)
unless project.is_a?(Project)
raise ArgumentError,
"`project' must be a kind of `Omnibus::Project', but was `#{project.class.inspect}'!"
end
# Magical methods
@filepath = filepath
@project = project
@manifest = manifest
# Overrides
@overrides = NULL
end
def manifest_entry
@manifest_entry ||= if manifest
log.info(log_key) {"Using user-supplied manifest entry for #{name}"}
manifest.entry_for(name)
else
log.info(log_key) {"Resolving manifest entry for #{name}"}
to_manifest_entry
end
end
#
# Compare two software projects (by name).
#
# @return [1, 0, -1]
#
def <=>(other)
self.name <=> other.name
end
#
# @!group DSL methods
#
# The following DSL methods are available from within software definitions.
# --------------------------------------------------
#
# The project that created this software.
#
# @return [Project]
#
def project
@project
end
expose :project
#
# **[Required]** Sets or retreives the name of the software.
#
# @raise [MissingRequiredAttribute]
#
# @example
# name 'libxslt'
#
# @param [String] val
# name of the Software
#
# @return [String]
#
def name(val = NULL)
if null?(val)
@name || raise(MissingRequiredAttribute.new(self, :name, 'libxslt'))
else
@name = val
end
end
expose :name
#
# Sets the description of the software.
#
# @example
# description 'Installs libxslt'
#
# @param [String] val
# the description of the software
#
# @return [String]
#
def description(val = NULL)
if null?(val)
@description
else
@description = val
end
end
expose :description
#
# Add a software dependency to this software.
#
# @example
# dependency 'libxml2'
# dependency 'libpng'
#
# @param [String] val
# the name of a software dependency
#
# @return [Array<String>]
# the list of current dependencies
#
def dependency(val)
dependencies << val
dependencies.dup
end
expose :dependency
#
# Set or retrieve the source for the software.
#
# @raise [InvalidValue]
# if the parameter is not a Hash
# @raise [InvalidValue]
# if the hash includes extraneous keys
# @raise [InvalidValue]
# if the hash declares keys that cannot work together
# (like +:git+ and +:path+)
#
# @example
# source url: 'http://ftp.gnu.org/gnu/autoconf/autoconf-2.68.tar.gz',
# md5: 'c3b5247592ce694f7097873aa07d66fe'
#
# @param [Hash<Symbol, String>] val
# a single key/pair that defines the kind of source and a path specifier
#
# @option val [String] :git (nil)
# a git URL
# @option val [String] :url (nil)
# general URL
# @option val [String] :path (nil)
# a fully-qualified local file system path
# @option val [String] :md5 (nil)
# the MD5 checksum of the downloaded artifact
# @option val [String] :sha1 (nil)
# the SHA1 checksum of the downloaded artifact
# @option val [String] :sha256 (nil)
# the SHA256 checksum of the downloaded artifact
# @option val [String] :sha512 (nil)
# the SHA512 checksum of the downloaded artifact
# @option val [String] :cookie (nil)
# a cookie to set
# @option val [String] :warning (nil)
# a warning message to print when downloading
# @option val [Boolean] :submodules (false)
# clone git submodules
#
# If multiple checksum types are provided, only the strongest will be used.
#
# @return [Hash]
#
def source(val = NULL)
unless null?(val)
unless val.is_a?(Hash)
raise InvalidValue.new(:source,
"be a kind of `Hash', but was `#{val.class.inspect}'")
end
extra_keys = val.keys - [:git, :path, :url, :md5, :sha1, :sha256, :sha512,
:cookie, :warning, :unsafe, :options, :submodules]
unless extra_keys.empty?
raise InvalidValue.new(:source,
"only include valid keys. Invalid keys: #{extra_keys.inspect}")
end
duplicate_keys = val.keys & [:git, :path, :url]
unless duplicate_keys.size < 2
raise InvalidValue.new(:source,
"not include duplicate keys. Duplicate keys: #{duplicate_keys.inspect}")
end
@source ||= {}
@source.merge!(val)
end
apply_overrides(:source)
end
expose :source
#
# Set or retrieve the {#default_version} of the software to build.
#
# @example
# default_version '1.2.3'
#
# @param [String] val
# the default version to set for the software.
# For a git source, the default version may be a git ref (e.g. tag, branch name, or sha).
#
# @return [String]
#
def default_version(val = NULL)
if null?(val)
@version
else
@version = val
end
end
expose :default_version
#
# Evaluate a block only if the version matches.
#
# @example
# version '1.2.3' do
# source path: '/local/path/to/software-1.2.3'
# end
#
# @param [String] val
# the version of the software
# @param [Proc] block
# the block to run if the version we are building matches the argument
#
# @return [String, Proc]
#
def version(val = NULL, &block)
if block_given?
if val.equal?(NULL)
raise InvalidValue.new(:version,
'pass a block when given a version argument')
else
if val == apply_overrides(:version)
block.call
end
end
end
apply_overrides(:version)
end
expose :version
#
# Add a file to the healthcheck whitelist.
#
# @example
# whitelist_file '/path/to/file'
#
# @param [String, Regexp] file
# the name of a file to ignore in the healthcheck
#
# @return [Array<String>]
# the list of currently whitelisted files
#
def whitelist_file(file)
file = Regexp.new(file) unless file.kind_of?(Regexp)
whitelist_files << file
whitelist_files.dup
end
expose :whitelist_file
#
# The relative path inside the extracted tarball.
#
# @example
# relative_path 'example-1.2.3'
#
# @param [String] val
# the relative path inside the tarball
#
# @return [String]
#
def relative_path(val = NULL)
if null?(val)
@relative_path || name
else
@relative_path = val
end
end
expose :relative_path
#
# The path where the extracted software lives.
#
# @return [String]
#
def project_dir
File.expand_path("#{Config.source_dir}/#{relative_path}")
end
expose :project_dir
#
# The path where the software will be built.
#
# @return [String]
#
def build_dir
File.expand_path("#{Config.build_dir}/#{project.name}")
end
expose :build_dir
#
# The directory where this software is installed on disk.
#
# @example
# { 'PATH' => "#{install_dir}/embedded/bin:#{ENV["PATH"]}", }
#
# @return [String]
#
def install_dir
@project.install_dir
end
expose :install_dir
#
# Define a series of {Builder} DSL commands that are executed to build the
# software.
#
# @see Builder
#
# @param [Proc] block
# a block of build commands
#
# @return [Proc]
# the build block
#
def build(&block)
builder.evaluate(&block)
end
expose :build
#
# The path to the downloaded file from a NetFetcher.
#
# @deprecated There is no replacement for this DSL method
#
def project_file
if fetcher && fetcher.is_a?(NetFetcher)
log.deprecated(log_key) do
"project_file (DSL). This is a property of the NetFetcher and will " \
"not be publically exposed in the next major release. In general, " \
"you should not be using this method in your software definitions " \
"as it is an internal implementation detail of the NetFetcher. If " \
"you disagree with this statement, you should open an issue on the " \
"Omnibus repository on GitHub an explain your use case. For now, " \
"I will return the path to the downloaded file on disk, but please " \
"rethink the problem you are trying to solve :)."
end
fetcher.downloaded_file
else
log.warn(log_key) do
"Cannot retrieve a `project_file' for software `#{name}'. This " \
"attribute is actually an internal representation that is unique " \
"to the NetFetcher class and requires the use of a `source' " \
"attribute that is declared using a `:url' key. For backwards-" \
"compatability, I will return `nil', but this is most likely not " \
"your desired behavior."
end
nil
end
end
expose :project_file
#
# Add standard compiler flags to the environment hash to produce omnibus
# binaries (correct RPATH, etc).
#
# Supported options:
# :aix => :use_gcc force using gcc/g++ compilers on aix
#
# @param [Hash] env
# @param [Hash] opts
#
# @return [Hash]
#
def with_standard_compiler_flags(env = {}, opts = {})
env ||= {}
opts ||= {}
compiler_flags =
case Ohai['platform']
when "aix"
{
"CC" => "xlc_r -q64",
"CXX" => "xlC_r -q64",
"CFLAGS" => "-q64 -I#{install_dir}/embedded/include -D_LARGE_FILES -O",
"LDFLAGS" => "-q64 -L#{install_dir}/embedded/lib -Wl,-blibpath:#{install_dir}/embedded/lib:/usr/lib:/lib",
"LD" => "ld -b64",
"OBJECT_MODE" => "64",
"ARFLAGS" => "-X64 cru",
}
when "mac_os_x"
{
"LDFLAGS" => "-L#{install_dir}/embedded/lib",
"CFLAGS" => "-I#{install_dir}/embedded/include",
}
when "solaris2"
{
# this override is due to a bug in libtool documented here:
# http://lists.gnu.org/archive/html/bug-libtool/2005-10/msg00004.html
"CC" => "gcc -static-libgcc",
"LDFLAGS" => "-R#{install_dir}/embedded/lib -L#{install_dir}/embedded/lib -static-libgcc",
"CFLAGS" => "-I#{install_dir}/embedded/include",
}
when "freebsd"
freebsd_flags = {
"LDFLAGS" => "-L#{install_dir}/embedded/lib",
"CFLAGS" => "-I#{install_dir}/embedded/include",
}
# Clang became the default compiler in FreeBSD 10+
if Ohai['os_version'].to_i >= 1000024
freebsd_flags.merge!(
"CC" => "clang",
"CXX" => "clang++",
)
end
freebsd_flags
else
{
"LDFLAGS" => "-Wl,-rpath,#{install_dir}/embedded/lib -L#{install_dir}/embedded/lib",
"CFLAGS" => "-I#{install_dir}/embedded/include",
}
end
# merge LD_RUN_PATH into the environment. most unix distros will fall
# back to this if there is no LDFLAGS passed to the linker that sets
# the rpath. the LDFLAGS -R or -Wl,-rpath will override this, but in
# some cases software may drop our LDFLAGS or think it knows better
# and edit them, and we *really* want the rpath setting and do know
# better. in that case LD_RUN_PATH will probably survive whatever
# edits the configure script does
extra_linker_flags = {
"LD_RUN_PATH" => "#{install_dir}/embedded/lib"
}
if solaris2?
# in order to provide compatibility for earlier versions of libc on solaris 10,
# we need to specify a mapfile that restricts the version of system libraries
# used. See http://docs.oracle.com/cd/E23824_01/html/819-0690/chapter5-1.html
# for more information
# use the mapfile if it exists, otherwise ignore it
ld_options = "-R#{install_dir}/embedded/lib"
mapfile_path = File.expand_path(Config.solaris_linker_mapfile, Config.project_root)
ld_options << " -M #{mapfile_path}" if File.exist?(mapfile_path)
# solaris linker can also use LD_OPTIONS, so we throw the kitchen sink against
# the linker, to find every way to make it use our rpath. This is also required
# to use the aforementioned mapfile.
extra_linker_flags.merge!(
{
"LD_OPTIONS" => ld_options
}
)
end
env.merge(compiler_flags).
merge(extra_linker_flags).
# always want to favor pkg-config from embedded location to not hose
# configure scripts which try to be too clever and ignore our explicit
# CFLAGS and LDFLAGS in favor of pkg-config info
merge({"PKG_CONFIG_PATH" => "#{install_dir}/embedded/lib/pkgconfig"}).
# Set default values for CXXFLAGS and CPPFLAGS.
merge('CXXFLAGS' => compiler_flags['CFLAGS']).
merge('CPPFLAGS' => compiler_flags['CFLAGS'])
end
expose :with_standard_compiler_flags
#
# A PATH variable format string representing the current PATH with the
# project's embedded/bin directory prepended. The correct path separator
# for the platform is used to join the paths.
#
# @param [Hash] env
#
# @return [Hash]
#
def with_embedded_path(env = {})
path_value = prepend_path("#{install_dir}/bin", "#{install_dir}/embedded/bin")
env.merge(path_key => path_value)
end
expose :with_embedded_path
#
# A PATH variable format string representing the current PATH with the
# given path prepended. The correct path separator
# for the platform is used to join the paths.
#
# @param [Array<String>] paths
#
# @return [String]
#
def prepend_path(*paths)
path_values = Array(paths)
path_values << ENV[path_key]
separator = File::PATH_SEPARATOR || ':'
path_values.join(separator)
end
expose :prepend_path
#
# A proxy method to the underlying Ohai system.
#
# @example
# ohai['platform_family']
#
# @return [Ohai]
#
def ohai
Ohai
end
expose :ohai
#
# @!endgroup
# --------------------------------------------------
#
# @!group Public API
#
# In addition to the DSL methods, the following methods are considered to
# be the "public API" for a software.
# --------------------------------------------------
#
# Recursively load all the dependencies for this software.
#
# @return [true]
#
def load_dependencies
dependencies.each do |dependency|
Software.load(project, dependency, manifest)
end
true
end
#
# The builder object for this software definition.
#
# @return [Builder]
#
def builder
@builder ||= Builder.new(self)
end
def to_manifest_entry
Omnibus::ManifestEntry.new(name, {
source_type: source_type,
described_version: version,
locked_version: Fetcher.resolve_version(version, source),
locked_source: source})
end
#
# Fetch the software definition using the appropriate fetcher. This may
# fetch the software from a local path location, git location, or download
# the software from a remote URL (HTTP(s)/FTP)
#
# @return [true, false]
# true if the software was fetched, false if it was cached
#
def fetch
if fetcher.fetch_required?
fetcher.fetch
true
else
false
end
end
#
# The list of software dependencies for this software. These is the software
# that comprises your software, and is distinct from runtime dependencies.
#
# @see #dependency
#
# @param [Array<String>]
#
# @return [Array<String>]
#
def dependencies
@dependencies ||= []
end
#
# The list of files to ignore in the healthcheck.
#
# @return [Array<String>]
#
def whitelist_files
@whitelist_files ||= []
end
#
# The path (on disk) where this software came from. Warning: this can be
# +nil+ if a software was dynamically created!
#
# @return [String, nil]
#
def filepath
@filepath
end
#
# The repo-level and project-level overrides for the software.
#
# @return [Hash]
#
def overrides
if null?(@overrides)
# lazily initialized because we need the 'name' to be parsed first
@overrides = {}
@overrides = project.overrides[name.to_sym].dup if project.overrides[name.to_sym]
end
@overrides
end
#
# Determine if this software version overridden externally, relative to the
# version declared within the software DSL file?
#
# @return [true, false]
#
def overridden?
# NOTE: using instance variables to bypass accessors that enforce overrides
@overrides.key?(:version) && (@overrides[:version] != @version)
end
#
# @!endgroup
# --------------------------------------------------
# @todo see comments on {Omnibus::Fetcher#without_caching_for}
def version_guid
fetcher.version_guid
end
# Returns the version to be used in cache.
def version_for_cache
@version_for_cache ||= if fetcher.version_for_cache
fetcher.version_for_cache
elsif version
version
else
log.warn(log_key) do
"No version given! This is probably a bad thing. I am going to " \
"assume the version `0.0.0', but that is most certainly not your " \
"desired behavior. If git caching seems off, this is probably why."
end
'0.0.0'
end
end
#
# The fetcher for this software
#
# @return [Fetcher]
#
def fetcher
@fetcher ||= Fetcher.fetcher_class_for_source(self.source).new(manifest_entry, project_dir, build_dir)
end
#
# The type of source specified for this software defintion.
#
# @return [String]
#
def source_type
if source
if source[:url]
:url
elsif source[:git]
:git
elsif source[:path]
:path
end
else
:project_local
end
end
#
# Build the software package. If git caching is turned on (see
# {Config#use_git_caching}), the build is restored according to the
# documented restoration procedure in the git cache. If the build cannot
# be restored (if the tag does not exist), the actual build steps are
# executed.
#
# @return [true]
#
def build_me
if Config.use_git_caching
if project.dirty?
log.info(log_key) do
"Building because `#{project.culprit.name}' dirtied the cache"
end
execute_build
elsif git_cache.restore
log.info(log_key) { "Restored from cache" }
else
log.info(log_key) { "Could not restore from cache" }
execute_build
project.dirty!(self)
end
else
log.debug(log_key) { "Forcing build because git caching is off" }
execute_build
end
project.build_version_dsl.resolve(self)
true
end
#
# The unique "hash" for this software.
#
# @see (#shasum)
#
# @return [Fixnum]
#
def hash
shasum.hash
end
#
# Determine if two softwares are identical.
#
# @param [Software] other
#
# @return [true, false]
#
def ==(other)
self.hash == other.hash
end
alias_method :eql?, :==
#
# The unique SHA256 for this sofware definition.
#
# A software is defined by its parent project's shasum, its own name, its
# version_for_cache, and any overrides (as JSON). Additionally, if provided,
# the actual file contents are included in the SHA to ensure uniqueness.
#
# @return [String]
#
def shasum
@shasum ||= begin
digest = Digest::SHA256.new
update_with_string(digest, project.shasum)
update_with_string(digest, builder.shasum)
update_with_string(digest, name)
update_with_string(digest, version_for_cache)
update_with_string(digest, JSON.fast_generate(overrides))
if filepath && File.exist?(filepath)
update_with_file_contents(digest, filepath)
else
update_with_string(digest, '<DYNAMIC>')
end
digest.hexdigest
end
end
private
#
# The git caching implementation for this software.
#
# @return [GitCache]
#
def git_cache
@git_cache ||= GitCache.new(self)
end
#
# The proper platform-specific "$PATH" key.
#
# @return [String]
#
def path_key
# The ruby devkit needs ENV['Path'] set instead of ENV['PATH'] because
# $WINDOWSRAGE, and if you don't set that your native gem compiles
# will fail because the magic fixup it does to add the mingw compiler
# stuff won't work.
#
# Turns out there is other build environments that only set ENV['PATH'] and if we
# modify ENV['Path'] then it ignores that. So, we scan ENV and returns the first
# one that we find.
#
if Ohai['platform'] == 'windows'
ENV.keys.grep(/\Apath\Z/i).first
else
'PATH'
end
end
#
# Apply overrides in the @overrides hash that mask instance variables
# that are set by parsing the DSL
#
def apply_overrides(attr)
val = instance_variable_get(:"@#{attr}")
if val.is_a?(Hash) || overrides[attr].is_a?(Hash)
val ||= {}
override = overrides[attr] || {}
val.merge(override)
else
overrides[attr] || val
end
end
#
# Actually build this software, executing the steps provided in the
# {#build} block and dirtying the cache.
#
# @return [void]
#
def execute_build
fetcher.clean
builder.build
if Config.use_git_caching
git_cache.incremental
log.info(log_key) { 'Dirtied the cache' }
end
end
#
# The log key for this software, including its name.
#
# @return [String]
#
def log_key
@log_key ||= "#{super}: #{name}"
end
def to_s
"#{name}[#{filepath}]"
end
end
end
| 27.442765 | 118 | 0.582874 |
ed31b14d03bbf7df2af4179d3ec5f09c51d8c563 | 305 | # frozen_string_literal: true
require_relative './binary_operator_base'
module RpnCalc
module Calculators
module Operators
# allows to substract second operand from first
class DivisionOperator < BinaryOperatorBase
def sign
:/
end
end
end
end
end
| 17.941176 | 53 | 0.67541 |
e946306c907737a151f89e17e39bd0f12cb59092 | 695 | class Api::Users::Communities::MutesController < ApplicationController
before_action :set_user, only: [:index]
before_action -> { authorize(Api::Users::Communities::MutesPolicy, @user) }
def index
query = CommunitiesQuery.new.with_user_muted(@user)
communities = paginate(
query,
attributes: [:url],
order: :asc,
limit: 25,
after: params[:after].present? ? CommunitiesQuery.new.with_url(params[:after]).take : nil
)
render json: CommunitySerializer.serialize(communities)
end
private
def set_user
@user ||= UsersQuery.new.with_username(params[:user_id]).take!
end
def pundit_user
Context.new(current_user, nil)
end
end | 25.740741 | 95 | 0.693525 |
26954ce2b91e400b1eac305cf90f5f4edc38c73a | 568 | cask "media-converter" do
version "2.0.10"
sha256 "d5a7db000e8106e5892daad84ee7dea82651aa9662df5b4c6da0b943afae03c1"
# downloads.sourceforge.net/media-converter/ was verified as official when first introduced to the cask
url "https://downloads.sourceforge.net/media-converter/media-converter/#{version}/media-converter-#{version}.zip"
appcast "https://sourceforge.net/projects/media-converter/rss?path=/media-converter"
name "Media Converter"
homepage "https://media-converter.sourceforge.io/"
app "Media Converter.localized/Media Converter.app"
end
| 43.692308 | 115 | 0.792254 |
f83940e2c3d319f9233af89faf82d549db04d6a5 | 194 | require 'spec_helper'
describe Ripify do
it 'has a version number' do
expect(Ripify::VERSION).not_to be nil
end
it 'does something useful' do
expect(false).to eq(true)
end
end
| 16.166667 | 41 | 0.701031 |
e9b3533943414257a39e82b619dd8f345ddcf7c4 | 3,087 | #
# Author:: Joshua Timberman (<[email protected]>)
# Copyright:: Copyright (c) 2009 Opscode, Inc
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'chef/resource'
class Chef
class Resource
class Mount < Chef::Resource
def initialize(name, run_context=nil)
super
@resource_name = :mount
@mount_point = name
@device = nil
@device_type = :device
@fstype = "auto"
@options = ["defaults"]
@dump = 0
@pass = 2
@mounted = false
@enabled = false
@action = :mount
@supports = { :remount => false }
@allowed_actions.push(:mount, :umount, :remount, :enable, :disable)
end
def mount_point(arg=nil)
set_or_return(
:mount_point,
arg,
:kind_of => [ String ]
)
end
def device(arg=nil)
set_or_return(
:device,
arg,
:kind_of => [ String ]
)
end
def device_type(arg=nil)
real_arg = arg.kind_of?(String) ? arg.to_sym : arg
set_or_return(
:device_type,
real_arg,
:equal_to => [ :device, :label, :uuid ]
)
end
def fstype(arg=nil)
set_or_return(
:fstype,
arg,
:kind_of => [ String ]
)
end
def options(arg=nil)
if arg.is_a?(String)
converted_arg = arg.gsub(/,/, ' ').split(/ /)
else
converted_arg = arg
end
set_or_return(
:options,
converted_arg,
:kind_of => [ Array ]
)
end
def dump(arg=nil)
set_or_return(
:dump,
arg,
:kind_of => [ Integer, FalseClass ]
)
end
def pass(arg=nil)
set_or_return(
:pass,
arg,
:kind_of => [ Integer, FalseClass ]
)
end
def mounted(arg=nil)
set_or_return(
:mounted,
arg,
:kind_of => [ TrueClass, FalseClass ]
)
end
def enabled(arg=nil)
set_or_return(
:enabled,
arg,
:kind_of => [ TrueClass, FalseClass ]
)
end
def supports(args={})
if args.is_a? Array
args.each { |arg| @supports[arg] = true }
elsif args.any?
@supports = args
else
@supports
end
end
end
end
end
| 22.866667 | 75 | 0.514739 |
e9164bddc845c720b83dc6e7786a1197c6eadfa1 | 1,879 | require 'fiddle'
require 'fileutils'
require 'tempfile'
if RUBY_ENGINE != 'natalie'
begin
if STDOUT.tty?
require 'readline'
end
rescue LoadError
end
end
unless defined?(Readline)
class Readline
def self.readline(prompt, _)
print(prompt)
if !(line = gets)
exit
end
line
end
end
end
module Natalie
class Repl
def go
GC.disable
env = nil
vars = {}
repl_num = 0
multi_line_expr = []
loop do
break unless (line = get_line)
begin
multi_line_expr << line
ast = Natalie::Parser.new(multi_line_expr.join("\n"), '(repl)').ast
rescue Parser::IncompleteExpression
next
else
multi_line_expr = []
end
next if ast == s(:block)
last_node = ast.pop
ast << last_node.new(:call, nil, 'puts', s(:call, s(:lasgn, :_, last_node), 'inspect'))
temp = Tempfile.create('natalie.so')
compiler = Compiler.new(ast, '(repl)')
compiler.repl = true
compiler.repl_num = (repl_num += 1)
compiler.vars = vars
compiler.out_path = temp.path
compiler.compile
vars = compiler.context[:vars]
lib = Fiddle.dlopen(temp.path)
Fiddle::Function.new(lib['GC_disable'], [], Fiddle::TYPE_VOIDP).call
env ||= Fiddle::Function.new(lib['build_top_env'], [], Fiddle::TYPE_VOIDP).call
eval_func = Fiddle::Function.new(lib['EVAL'], [Fiddle::TYPE_VOIDP], Fiddle::TYPE_VOIDP)
eval_func.call(env)
File.unlink(temp.path)
clang_dir = temp.path + ".dSYM"
FileUtils.rm_rf(clang_dir) if File.directory?(clang_dir)
end
end
private
def get_line
line = Readline.readline('nat> ', true)
if line
line
else
puts
nil
end
end
end
end
| 23.78481 | 95 | 0.574242 |
184001635e82ee6bd9541c7e4884a6163fd1bee9 | 4,384 | require "spec_helper"
describe Berkshelf::Lockfile do
before do
allow(subject).to receive(:filepath).and_return(fixture) if defined?(fixture)
subject.parse
end
context "with an old 2.0 lockfile format" do
let(:fixture) { fixtures_path.join("lockfiles/2.0.lock") }
it "does not blow up" do
expect { subject }.to_not raise_error
end
it "warns the user" do
expect(Berkshelf.formatter).to receive(:warn)
subject.parse
end
it "sets the dependencies" do
expect(subject).to have_dependency("apt")
expect(subject).to have_dependency("jenkins")
expect(subject).to have_dependency("runit")
expect(subject).to have_dependency("yum")
expect(subject.find("apt").version_constraint.to_s).to eq(">= 0.0.0")
expect(subject.find("jenkins").version_constraint.to_s).to eq(">= 0.0.0")
expect(subject.find("runit").version_constraint.to_s).to eq(">= 0.0.0")
expect(subject.find("yum").version_constraint.to_s).to eq(">= 0.0.0")
expect(subject.find("apt").locked_version.to_s).to eq("2.3.6")
expect(subject.find("jenkins").locked_version.to_s).to eq("2.0.3")
expect(subject.find("runit").locked_version.to_s).to eq("1.5.8")
expect(subject.find("yum").locked_version.to_s).to eq("3.0.6")
end
it "sets the graph" do
graph = subject.graph
expect(graph).to have_lock("apt")
expect(graph).to have_lock("jenkins")
expect(graph).to have_lock("runit")
expect(graph).to have_lock("yum")
expect(graph.find("apt").version).to eq("2.3.6")
expect(graph.find("jenkins").version).to eq("2.0.3")
expect(graph.find("runit").version).to eq("1.5.8")
expect(graph.find("yum").version).to eq("3.0.6")
end
end
context "with a blank lockfile" do
let(:fixture) { fixtures_path.join("lockfiles/blank.lock") }
it "warns the user" do
expect(Berkshelf.formatter).to receive(:warn)
subject.parse
end
it "sets an empty list of dependencies" do
expect(subject.dependencies).to be_empty
end
it "sets an empty graph" do
expect(subject.graph.locks).to be_empty
end
end
context "with an empty lockfile" do
let(:fixture) { fixtures_path.join("lockfiles/empty.lock") }
it "does not warn the user" do
expect(Berkshelf.formatter).to_not receive(:warn)
subject.parse
end
it "sets an empty list of dependencies" do
expect(subject.dependencies).to be_empty
end
it "sets an empty graph" do
expect(subject.graph.locks).to be_empty
end
end
context "with real lockfile" do
let(:fixture) { fixtures_path.join("lockfiles/default.lock") }
it "sets the dependencies" do
expect(subject).to have_dependency("apt")
expect(subject).to have_dependency("jenkins")
expect(subject.find("apt").version_constraint.to_s).to eq("~> 2.0")
expect(subject.find("jenkins").version_constraint.to_s).to eq("~> 2.0")
expect(subject.find("apt").locked_version.to_s).to eq("2.3.6")
expect(subject.find("jenkins").locked_version.to_s).to eq("2.0.3")
end
it "sets the graph" do
graph = subject.graph
expect(graph).to have_lock("apt")
expect(graph).to have_lock("build-essential")
expect(graph).to have_lock("jenkins")
expect(graph).to have_lock("runit")
expect(graph).to have_lock("yum")
expect(graph).to have_lock("yum-epel")
expect(graph.find("apt").version).to eq("2.3.6")
expect(graph.find("build-essential").version).to eq("1.4.2")
expect(graph.find("jenkins").version).to eq("2.0.3")
expect(graph.find("runit").version).to eq("1.5.8")
expect(graph.find("yum").version).to eq("3.0.6")
expect(graph.find("yum-epel").version).to eq("0.2.0")
end
it "sets the graph item dependencies" do
jenkins = subject.graph.find("jenkins")
runit = subject.graph.find("runit")
expect(jenkins.dependencies).to include("apt" => "~> 2.0")
expect(jenkins.dependencies).to include("runit" => "~> 1.5")
expect(jenkins.dependencies).to include("yum" => "~> 3.0")
expect(runit.dependencies).to include("build-essential" => ">= 0.0.0")
expect(runit.dependencies).to include("yum" => "~> 3.0")
expect(runit.dependencies).to include("yum-epel" => ">= 0.0.0")
end
end
end
| 32.962406 | 81 | 0.645985 |
1166dd7522f5ca2033473eb5c551f05c8d8a0a4a | 319 | # frozen_string_literal: true
namespace :start do
desc 'Start dev server'
task :development do
exec 'foreman start -f Procfile.dev'
end
desc 'Start production server'
task :production do
exec 'NPM_CONFIG_PRODUCTION=true npm run postinstall && foreman start'
end
end
task start: 'start:development'
| 22.785714 | 74 | 0.742947 |
d59b46d5f037872cd4a0e2f3d3a9fcb576a75601 | 193 | require 'test_helper'
class ReviewRequestsControllerTest < ActionDispatch::IntegrationTest
test "should get index" do
get new_review_request_url
assert_response :success
end
end
| 17.545455 | 68 | 0.792746 |
7a11ef77e605f4bf1d23197fa6943111826e3a37 | 1,146 | # Top level module for TZInfo.
module TZInfo
end
require 'tzinfo/ruby_core_support'
require 'tzinfo/offset_rationals'
require 'tzinfo/time_or_datetime'
require 'tzinfo/timezone_definition'
require 'tzinfo/timezone_offset'
require 'tzinfo/timezone_transition'
require 'tzinfo/transition_rule'
require 'tzinfo/annual_rules'
require 'tzinfo/timezone_transition_definition'
require 'tzinfo/timezone_index_definition'
require 'tzinfo/timezone_info'
require 'tzinfo/data_timezone_info'
require 'tzinfo/linked_timezone_info'
require 'tzinfo/transition_data_timezone_info'
require 'tzinfo/zoneinfo_timezone_info'
require 'tzinfo/data_source'
require 'tzinfo/ruby_data_source'
require 'tzinfo/posix_time_zone_parser'
require 'tzinfo/zoneinfo_data_source'
require 'tzinfo/timezone_period'
require 'tzinfo/timezone'
require 'tzinfo/info_timezone'
require 'tzinfo/data_timezone'
require 'tzinfo/linked_timezone'
require 'tzinfo/timezone_proxy'
require 'tzinfo/country_index_definition'
require 'tzinfo/country_info'
require 'tzinfo/ruby_country_info'
require 'tzinfo/zoneinfo_country_info'
require 'tzinfo/country'
require 'tzinfo/country_timezone'
| 26.045455 | 47 | 0.847295 |
08460aa2bf002b23a70167f8f6c819d2e500139a | 779 | # frozen_string_literal: true
require 'test_helper'
class TranscodedStreamControllerTest < ActionDispatch::IntegrationTest
setup do
Setting.update(media_path: Rails.root.join('test', 'fixtures', 'files'))
end
test 'should get new stream for transcode format' do
assert_login_access(url: new_transcoded_stream_url(song_id: songs(:flac_sample).id)) do
assert_response :success
end
end
test 'should get transcoded data' do
assert_login_access(url: new_transcoded_stream_url(song_id: songs(:flac_sample).id)) do
create_tmp_file(format: 'mp3') do |tmp_file_path|
File.open(tmp_file_path, 'w') do |file|
file.write response.body
end
assert_equal 128, audio_bitrate(tmp_file_path)
end
end
end
end
| 27.821429 | 91 | 0.720154 |
1c46b150f6c6c1f1d498a77b91c93ed369563dc8 | 593 | Pod::Spec.new do |s|
s.name = 'AngleGradientLayer'
s.version = '1.2.1'
s.summary = 'Objective-C angle gradients for iOS.'
s.homepage = 'https://github.com/paiv/AngleGradientLayer'
s.screenshots = 'https://raw.github.com/paiv/AngleGradientLayer/master/screenshot.png'
s.license = 'MIT'
s.author = 'Pavel Ivashkov'
s.source = { :git => 'https://github.com/paiv/AngleGradientLayer.git', :tag => "v#{s.version}" }
s.platform = :ios, '3.2'
s.source_files = 'AngleGradient/*.{h,m}'
s.frameworks = 'CoreGraphics', 'QuartzCore'
end
| 39.533333 | 104 | 0.623946 |
39a0a3b8846ad4ed216ddf25cd4a7b997f3a179a | 5,880 | #Copyright:: Copyright (c) 2012-2014 Chef Software, Inc.
#
# All Rights Reserved
#
require "/opt/opscode/embedded/service/omnibus-ctl/open_source_chef12_upgrade"
require 'optparse'
require 'ostruct'
add_command_under_category "upgrade", "general", "Upgrade your private chef installation.", 2 do
# Since this is evaled, need to have methods first so they can be picked up
def parse(args)
@options = OpenStruct.new
# Define defaults
@options.skip_confirmation = false
@options.chef11_server_url = "https://localhost"
@options.chef12_server_url = "https://localhost"
@options.upload_threads = 10
@options.chef11_admin_client_name = "admin"
@options.chef11_admin_client_key = "/etc/chef-server/admin.pem"
opt_parser = OptionParser.new do |opts|
opts.banner = "Usage: chef-server-ctl upgrade [options]"
opts.banner = opts.banner << "\n Options only apply to open source Chef 11 server to Chef 12 server upgrades."
opts.banner = opts.banner << "\n If upgrading from Enterprise Chef 11 server to Chef 12 server no options are needed."
opts.on("-y", "--yes", "Skip confirmation") do |y|
@options.skip_confirmation = y
end
opts.on("-o", "--org-name [name]", String, "The name of the Chef 12 organization to be created. It must begin with a lower case letter or digit; can only have lower case letters, digits, hyphens, and underscores and must be between 1 and 255 characters long (Will ask interactively if not passed).") do |n|
@options.org_name = n
end
opts.on("-f", "--full-org-name [name]", String, "The full name of the Chef 12 organization to be created. It must begin with a non-white space character and must be between 1 and 1023 characters long (Will ask interactively if not passed).") do |n|
@options.full_org_name = n
end
# This option matches the knife -s option
opts.on("-s", "--chef11-server-url [url]", String, "The url of the Chef 11 server. Defaults to #{@options.chef11_server_url}") do |url|
@options.chef11_server_url = url
end
opts.on("-x", "--chef12-server-url [url]", String, "The url of the Chef 12 server. Defaults to #{@options.chef12_server_url}") do |url|
@options.chef12_server_url = url
end
# This option matches the knife -u option
opts.on("-u", "--user [user]", String, "Chef 11 API client user. This is the admin user who will be used to download the Chef 11 data. Should match with the key specified. Defaults to #{@options.chef11_admin_client_name}") do |user|
@options.chef11_admin_client_name = user
end
# This option matches the knife -k option
opts.on("-k", "--key [key]", String, "Chef 11 API client key. This is the admin key that will be used to download the Chef 11 data. Should match with the user specified. Defaults to #{@options.chef11_admin_client_key}") do |key|
@options.chef11_admin_client_key = key
end
opts.on("-d", "--chef11-data-dir [directory]", String, "Directory to store open source Chef 11 server data. Defaults to a created tmp dir.") do |chef11_dir|
@options.chef11_data_dir = chef11_dir
end
opts.on("-e", "--chef12-data-dir [directory]", String, "Directory where data for upload to the Chef 12 server is located. Defaults to a created tmp dir.") do |chef12_dir|
@options.chef12_data_dir = chef12_dir
end
opts.on("-t", "--upload-threads [number]", Integer, "The number of threads to use when migrating cookbooks to the new server. Defaults to #{@options.upload_threads}") do |n|
@options.upload_threads = n
end
# TODO(jmink) Make this work without the '--'
opts.on("-h", "--help", "Show this message") do
puts opts
exit
end
end
opt_parser.parse!(args)
log "Upgrading with options #{@options.inspect}"
end
def detect_chef11
# Is this reliable enough?
File.directory?("/opt/chef-server")
end
def upgrade?
if @options.skip_confirmation
log "Performing upgrade"
return true
end
log "Would you like to upgrade? [yN]"
answer = STDIN.gets.chomp
return answer == 'Y' || answer == 'y'
end
def partybus_upgrade
# Original Enterprise Chef upgrade path
# Put everything in a down state except postgres before we upgrade things.
run_command("private-chef-ctl stop")
if reconfigure(false) != 0
exit 1
end
# How upgrades should handle services:
# + It should expect services to be down, but turn off services
# if its important that they be off for the upgrade.
# + It should start any services it needed, and turn them off
# at the end of a migration.
# with postgres being the exception to those rules. We are leaving
# postgres up atm to avoid having to constantly restart it.
run_command("private-chef-ctl stop")
if running_config["private_chef"]["use_chef_backend"]
run_command("private-chef-ctl start haproxy")
else
run_command("private-chef-ctl start postgresql")
end
sleep 15
Dir.chdir(File.join(base_path, "embedded", "service", "partybus"))
bundle = File.join(base_path, "embedded", "bin", "bundle")
status = run_command("#{bundle} exec ./bin/partybus upgrade")
if status.success?
puts "Chef Server Upgraded!"
exit 0
else
exit 1
end
end
### Start script ###
parse(ARGV)
if detect_chef11
log "Open source Chef 11 server detected."
if upgrade?
log "Upgrading the open source Chef 11 server."
chef11_upgrade = OpenSourceChef11Upgrade.new(@options, self)
chef11_upgrade.run_upgrade
else
puts "Aborting upgrade."
exit 0
end
else
# Open source Chef 11 isn't on the system, must be a partybus upgrade
partybus_upgrade
end
end
| 37.935484 | 312 | 0.67466 |
f7d348d06e0b55486d19b4dfa333854367e1530c | 946 | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
module Google
module Apis
module EssentialcontactsV1
# Version of the google-apis-essentialcontacts_v1 gem
GEM_VERSION = "0.8.0"
# Version of the code generator used to generate this client
GENERATOR_VERSION = "0.4.1"
# Revision of the discovery document this client was generated from
REVISION = "20220317"
end
end
end
| 32.62069 | 74 | 0.733615 |
f7e4a3037bb12f6e1f6ed51c4119cbd702b5d89c | 1,028 | class DroperMailer < ActionMailer::Base
helper :application
default from: "Petit Kiwi <[email protected]>"
def monthly_report(droper, sales, revenues, year, month)
@date = Date.parse("#{year}-#{month}-01")
@sales = sales
@revenues = revenues
mail(
to: droper.email,
subject: "Petit Kiwi - vos ventes #{l(@date, format: :month_year)}"
)
end
def late_payments(droper)
mail(
to: droper.email,
subject: "Petit Kiwi - retard dans le paiement de vos ventes de juillet"
)
end
def returns(droper, products, max_droping_date, giving_date, max_product_reference)
@droper = droper
@products = products
@max_droping_date = max_droping_date
@giving_date = giving_date
@max_product_reference = max_product_reference
mail(
to: "[email protected]",
subject: "Bientôt 1 an que vos articles sont en vente. Souhaitez-vous récupérer les invendus? (réf: #{@droper.code})"
)
end
end
| 30.235294 | 123 | 0.63716 |
e927dcc379b6000243923bbf58d2d2926820681a | 4,117 | class Cookiecutter < Formula
include Language::Python::Virtualenv
desc "Utility that creates projects from templates"
homepage "https://github.com/audreyr/cookiecutter"
url "https://github.com/audreyr/cookiecutter/archive/1.5.1.tar.gz"
sha256 "a01addd59d8a572f850cf8210f1e98e4dc924bec6210010d4e23277ec4fa094b"
head "https://github.com/audreyr/cookiecutter.git"
bottle do
cellar :any_skip_relocation
sha256 "f46a25752dfec4512309109b064b75961b4185469ce1fc9a83af42ab020fea24" => :high_sierra
sha256 "9c4c18b9f4f389903998af689dba4ba58a60613da60fff825edc3dfcc7c80cf5" => :sierra
sha256 "5497ae2200f26c0474079aa17aea4a36ef8f4dbccbe1d76c5a99d1b31fc731d1" => :el_capitan
sha256 "f2aca9bcfdca4486607257193c112dd3ec5d0ebf5c281b5791ad4b40f6e447c9" => :yosemite
end
depends_on :python if MacOS.version <= :snow_leopard
resource "arrow" do
url "https://files.pythonhosted.org/packages/54/db/76459c4dd3561bbe682619a5c576ff30c42e37c2e01900ed30a501957150/arrow-0.10.0.tar.gz"
sha256 "805906f09445afc1f0fc80187db8fe07670e3b25cdafa09b8d8ac264a8c0c722"
end
resource "binaryornot" do
url "https://files.pythonhosted.org/packages/52/69/9ca055b887ccc841fa2d0265aa2599c9d63bc57d3d421dfcda874e0ad3ef/binaryornot-0.4.0.tar.gz"
sha256 "ab0f387b28912ac9c300db843461359e2773da3b922ae378ab69b0d85b288ec8"
end
resource "chardet" do
url "https://files.pythonhosted.org/packages/7d/87/4e3a3f38b2f5c578ce44f8dc2aa053217de9f0b6d737739b0ddac38ed237/chardet-2.3.0.tar.gz"
sha256 "e53e38b3a4afe6d1132de62b7400a4ac363452dc5dfcf8d88e8e0cce663c68aa"
end
resource "click" do
url "https://files.pythonhosted.org/packages/95/d9/c3336b6b5711c3ab9d1d3a80f1a3e2afeb9d8c02a7166462f6cc96570897/click-6.7.tar.gz"
sha256 "f15516df478d5a56180fbf80e68f206010e6d160fc39fa508b65e035fd75130b"
end
resource "future" do
url "https://files.pythonhosted.org/packages/00/2b/8d082ddfed935f3608cc61140df6dcbf0edea1bc3ab52fb6c29ae3e81e85/future-0.16.0.tar.gz"
sha256 "e39ced1ab767b5936646cedba8bcce582398233d6a627067d4c6a454c90cfedb"
end
resource "Jinja2" do
url "https://files.pythonhosted.org/packages/71/59/d7423bd5e7ddaf3a1ce299ab4490e9044e8dfd195420fc83a24de9e60726/Jinja2-2.9.5.tar.gz"
sha256 "702a24d992f856fa8d5a7a36db6128198d0c21e1da34448ca236c42e92384825"
end
resource "jinja2-time" do
url "https://files.pythonhosted.org/packages/de/7c/ee2f2014a2a0616ad3328e58e7dac879251babdb4cb796d770b5d32c469f/jinja2-time-0.2.0.tar.gz"
sha256 "d14eaa4d315e7688daa4969f616f226614350c48730bfa1692d2caebd8c90d40"
end
resource "MarkupSafe" do
url "https://files.pythonhosted.org/packages/c0/41/bae1254e0396c0cc8cf1751cb7d9afc90a602353695af5952530482c963f/MarkupSafe-0.23.tar.gz"
sha256 "a4ec1aff59b95a14b45eb2e23761a0179e98319da5a7eb76b56ea8cdc7b871c3"
end
resource "poyo" do
url "https://files.pythonhosted.org/packages/7a/93/3f5e0a792de7470ffe730bdb6a3dc311b8f9734aa65598ad3825bbf48edf/poyo-0.4.0.tar.gz"
sha256 "8a95d95193eb0838117cc8847257bf17248ef6d157aaa55ea5c20509a87388b8"
end
resource "python-dateutil" do
url "https://files.pythonhosted.org/packages/51/fc/39a3fbde6864942e8bb24c93663734b74e281b984d1b8c4f95d64b0c21f6/python-dateutil-2.6.0.tar.gz"
sha256 "62a2f8df3d66f878373fd0072eacf4ee52194ba302e00082828e0d263b0418d2"
end
resource "six" do
url "https://files.pythonhosted.org/packages/b3/b2/238e2590826bfdd113244a40d9d3eb26918bd798fc187e2360a8367068db/six-1.10.0.tar.gz"
sha256 "105f8d68616f8248e24bf0e9372ef04d3cc10104f1980f54d57b2ce73a5ad56a"
end
resource "whichcraft" do
url "https://files.pythonhosted.org/packages/6b/73/c38063b84519a2597c0a57e472d28970d2f8ad991fde18612ff3708fda0c/whichcraft-0.4.0.tar.gz"
sha256 "e756d2d9f157ab8516e7e9849c1808c57162b3689734a588c9a134e2178049a9"
end
def install
virtualenv_install_with_resources
end
test do
system "git", "clone", "https://github.com/audreyr/cookiecutter-pypackage.git"
system bin/"cookiecutter", "--no-input", "cookiecutter-pypackage"
assert (testpath/"python_boilerplate").directory?
end
end
| 45.744444 | 145 | 0.820015 |
188f306bf943777e376e3eb56d9cf16691440186 | 1,175 | # frozen_string_literal: true
# Copyright (c) 2008-2013 Michael Dvorkin and contributors.
#
# Fat Free CRM is freely distributable under the terms of MIT license.
# See MIT-LICENSE file or http://www.opensource.org/licenses/mit-license.php
#------------------------------------------------------------------------------
require 'spec_helper'
module FatFreeCrm
describe "/fat_free_crm/admin/users/update" do
before do
login_admin
assign(:user, @user = build_stubbed(:user))
end
describe "no errors:" do
it "replaces [Edit User] form with user partial and highlights it" do
render
expect(rendered).to include("user_#{@user.id}")
expect(rendered).to include(%/$('#user_#{@user.id}').effect("highlight"/)
end
end
describe "validation errors:" do
before do
@user.errors.add(:name)
end
it "redraws [Edit User] form and shakes it" do
render
expect(rendered).to include("user_#{@user.id}")
expect(rendered).to include(%/$('#user_#{@user.id}').effect("shake"/)
expect(rendered).to include(%/$('#user_username').focus()/)
end
end
end
end
| 28.658537 | 81 | 0.600851 |
4a80a26527ca606a8dfcc68ed1011410db29970e | 173 | class CreateEntries < ActiveRecord::Migration
def change
create_table :entries do |t|
t.string :title
t.string :body
t.timestamps
end
end
end
| 15.727273 | 45 | 0.653179 |
6adf2b11ac385be407595583e19e3746dd830c4a | 7,225 | require "test_helper"
class ErrorsTest < MiniTest::Spec
class AlbumForm < TestForm
property :title
validation do
params { required(:title).filled }
end
property :artists, default: []
property :producer do
property :name
end
property :hit do
property :title
validation do
params { required(:title).filled }
end
end
collection :songs do
property :title
validation do
params { required(:title).filled }
end
end
property :band do # yepp, people do crazy stuff like that.
property :name
property :label do
property :name
validation do
params { required(:name).filled }
end
end
# TODO: make band a required object.
validation do
config.messages.load_paths << "test/fixtures/dry_new_api_error_messages.yml"
params { required(:name).filled }
rule(:name) { key.failure(:good_musical_taste?) if value == "Nickelback" }
end
end
validation do
params do
required(:title).filled
required(:artists).each(:str?)
required(:producer).hash do
required(:name).filled
end
end
end
end
let(:album_title) { "Blackhawks Over Los Angeles" }
let(:album) do
OpenStruct.new(
title: album_title,
hit: song,
songs: songs, # TODO: document this requirement,
band: Struct.new(:name, :label).new("Epitaph", OpenStruct.new),
producer: Struct.new(:name).new("Sun Records")
)
end
let(:song) { OpenStruct.new(title: "Downtown") }
let(:songs) { [song = OpenStruct.new(title: "Calling"), song] }
let(:form) { AlbumForm.new(album) }
describe "#validate with invalid array property" do
it do
form.validate(
title: "Swimming Pool - EP",
band: {
name: "Marie Madeleine",
label: {name: "Ekler'o'shocK"}
},
artists: [42, "Good Charlotte", 43]
).must_equal false
form.errors.messages.must_equal(artists: {0 => ["must be a string"], 2 => ["must be a string"]})
form.errors.size.must_equal(1)
end
end
describe "#errors without #validate" do
it do
form.errors.size.must_equal 0
end
end
describe "blank everywhere" do
before do
form.validate(
"hit" => {"title" => ""},
"title" => "",
"songs" => [{"title" => ""}, {"title" => ""}],
"producer" => {"name" => ""}
)
end
it do
form.errors.messages.must_equal(
title: ["must be filled"],
"hit.title": ["must be filled"],
"songs.title": ["must be filled"],
"band.label.name": ["must be filled"],
"producer.name": ["must be filled"]
)
end
# it do
# form.errors.must_equal({:title => ["must be filled"]})
# TODO: this should only contain local errors?
# end
# nested forms keep their own Errors:
it { form.producer.errors.messages.must_equal(name: ["must be filled"]) }
it { form.hit.errors.messages.must_equal(title: ["must be filled"]) }
it { form.songs[0].errors.messages.must_equal(title: ["must be filled"]) }
it do
form.errors.messages.must_equal(
title: ["must be filled"],
"hit.title": ["must be filled"],
"songs.title": ["must be filled"],
"band.label.name": ["must be filled"],
"producer.name": ["must be filled"]
)
form.errors.size.must_equal(5)
end
end
describe "#validate with main form invalid" do
it do
form.validate("title" => "", "band" => {"label" => {name: "Fat Wreck"}}, "producer" => nil).must_equal false
form.errors.messages.must_equal(title: ["must be filled"], producer: ["must be a hash"])
form.errors.size.must_equal(2)
end
end
describe "#validate with middle nested form invalid" do
before { @result = form.validate("hit" => {"title" => ""}, "band" => {"label" => {name: "Fat Wreck"}}) }
it { @result.must_equal false }
it { form.errors.messages.must_equal("hit.title": ["must be filled"]) }
it { form.errors.size.must_equal(1) }
end
describe "#validate with collection form invalid" do
before { @result = form.validate("songs" => [{"title" => ""}], "band" => {"label" => {name: "Fat Wreck"}}) }
it { @result.must_equal false }
it { form.errors.messages.must_equal("songs.title": ["must be filled"]) }
it { form.errors.size.must_equal(1) }
end
describe "#validate with collection and 2-level-nested invalid" do
before { @result = form.validate("songs" => [{"title" => ""}], "band" => {"label" => {}}) }
it { @result.must_equal false }
it { form.errors.messages.must_equal("songs.title": ["must be filled"], "band.label.name": ["must be filled"]) }
it { form.errors.size.must_equal(2) }
end
describe "#validate with nested form using :base invalid" do
it do
result = form.validate("songs" => [{"title" => "Someday"}], "band" => {"name" => "Nickelback", "label" => {"name" => "Roadrunner Records"}})
result.must_equal false
form.errors.messages.must_equal("band.name": ["you're a bad person"])
form.errors.size.must_equal(1)
end
end
describe "#add" do
let(:album_title) { nil }
it do
result = form.validate("songs" => [{"title" => "Someday"}], "band" => {"name" => "Nickelback", "label" => {"name" => "Roadrunner Records"}})
result.must_equal false
form.errors.messages.must_equal(title: ["must be filled"], "band.name": ["you're a bad person"])
# add a new custom error
form.errors.add(:policy, "error_text")
form.errors.messages.must_equal(title: ["must be filled"], "band.name": ["you're a bad person"], policy: ["error_text"])
# does not duplicate errors
form.errors.add(:title, "must be filled")
form.errors.messages.must_equal(title: ["must be filled"], "band.name": ["you're a bad person"], policy: ["error_text"])
# merge existing errors
form.errors.add(:policy, "another error")
form.errors.messages.must_equal(title: ["must be filled"], "band.name": ["you're a bad person"], policy: ["error_text", "another error"])
end
end
describe "correct #validate" do
before do
@result = form.validate(
"hit" => {"title" => "Sacrifice"},
"title" => "Second Heat",
"songs" => [{"title" => "Heart Of A Lion"}],
"band" => {"label" => {name: "Fat Wreck"}}
)
end
it { @result.must_equal true }
it { form.hit.title.must_equal "Sacrifice" }
it { form.title.must_equal "Second Heat" }
it { form.songs.first.title.must_equal "Heart Of A Lion" }
it do
skip "WE DON'T NEED COUNT AND EMPTY? ON THE CORE ERRORS OBJECT"
form.errors.size.must_equal(0)
form.errors.empty?.must_equal(true)
end
end
describe "Errors#to_s" do
before { form.validate("songs" => [{"title" => ""}], "band" => {"label" => {}}) }
# to_s is aliased to messages
it {
skip "why do we need Errors#to_s ?"
form.errors.to_s.must_equal "{:\"songs.title\"=>[\"must be filled\"], :\"band.label.name\"=>[\"must be filled\"]}"
}
end
end
| 31.969027 | 146 | 0.590865 |
bb1b981d66662f1c988ca2f2168b0de43026a82e | 5,311 | #!/usr/bin/env ruby
$VERBOSE = true
class TestFiles
RANDOM_ASCII_FILE1 = 'test/data/generated/randomAscii1.txt'
RANDOM_ASCII_FILE2 = 'test/data/generated/randomAscii2.txt'
RANDOM_ASCII_FILE3 = 'test/data/generated/randomAscii3.txt'
RANDOM_BINARY_FILE1 = 'test/data/generated/randomBinary1.bin'
RANDOM_BINARY_FILE2 = 'test/data/generated/randomBinary2.bin'
NULL_FILE = 'test/data/generated/null.zip' # Zero length, so not a zip file.
EMPTY_TEST_DIR = 'test/data/generated/emptytestdir'
ASCII_TEST_FILES = [RANDOM_ASCII_FILE1, RANDOM_ASCII_FILE2, RANDOM_ASCII_FILE3]
BINARY_TEST_FILES = [RANDOM_BINARY_FILE1, RANDOM_BINARY_FILE2]
TEST_DIRECTORIES = [EMPTY_TEST_DIR]
TEST_FILES = [ASCII_TEST_FILES, BINARY_TEST_FILES, EMPTY_TEST_DIR].flatten!
class << self
def create_test_files
Dir.mkdir 'test/data/generated' unless Dir.exist?('test/data/generated')
ASCII_TEST_FILES.each_with_index do |filename, index|
create_random_ascii(filename, 1E4 * (index + 1))
end
BINARY_TEST_FILES.each_with_index do |filename, index|
create_random_binary(filename, 1E4 * (index + 1))
end
system("touch #{NULL_FILE}")
ensure_dir(EMPTY_TEST_DIR)
end
private
def create_random_ascii(filename, size)
File.open(filename, 'wb') do |file|
file << rand while file.tell < size
end
end
def create_random_binary(filename, size)
File.open(filename, 'wb') do |file|
file << [rand].pack('V') while file.tell < size
end
end
def ensure_dir(name)
if File.exist?(name)
return if File.stat(name).directory?
File.delete(name)
end
Dir.mkdir(name)
end
end
end
# For representation and creation of
# test data
class TestZipFile
attr_accessor :zip_name, :entry_names, :comment
def initialize(zip_name, entry_names, comment = '')
@zip_name = zip_name
@entry_names = entry_names
@comment = comment
end
def self.create_test_zips
raise "failed to create test zip '#{TEST_ZIP1.zip_name}'" \
unless system("zip -q #{TEST_ZIP1.zip_name} test/data/file2.txt")
raise "failed to remove entry from '#{TEST_ZIP1.zip_name}'" \
unless system(
"zip -q #{TEST_ZIP1.zip_name} -d test/data/file2.txt"
)
File.open('test/data/generated/empty.txt', 'w') {}
File.open('test/data/generated/empty_chmod640.txt', 'w') {}
::File.chmod(0o640, 'test/data/generated/empty_chmod640.txt')
File.open('test/data/generated/short.txt', 'w') { |file| file << 'ABCDEF' }
test_text = ''
File.open('test/data/file2.txt') { |file| test_text = file.read }
File.open('test/data/generated/longAscii.txt', 'w') do |file|
file << test_text while file.tell < 1E5
end
binary_pattern = ''
File.open('test/data/generated/empty.zip') do |file|
binary_pattern = file.read
end
binary_pattern *= 4
File.open('test/data/generated/longBinary.bin', 'wb') do |file|
file << binary_pattern << rand << "\0" while file.tell < 6E5
end
raise "failed to create test zip '#{TEST_ZIP2.zip_name}'" \
unless system(
"zip -q #{TEST_ZIP2.zip_name} #{TEST_ZIP2.entry_names.join(' ')}"
)
if RUBY_PLATFORM =~ /mswin|mingw|cygwin/
raise "failed to add comment to test zip '#{TEST_ZIP2.zip_name}'" \
unless system(
"echo #{TEST_ZIP2.comment}| zip -zq #{TEST_ZIP2.zip_name}\""
)
else
# without bash system interprets everything after echo as parameters to
# echo including | zip -z ...
raise "failed to add comment to test zip '#{TEST_ZIP2.zip_name}'" \
unless system(
"bash -c \"echo #{TEST_ZIP2.comment} | zip -zq #{TEST_ZIP2.zip_name}\""
)
end
raise "failed to create test zip '#{TEST_ZIP3.zip_name}'" \
unless system(
"zip -q #{TEST_ZIP3.zip_name} #{TEST_ZIP3.entry_names.join(' ')}"
)
raise "failed to create test zip '#{TEST_ZIP4.zip_name}'" \
unless system(
"zip -q #{TEST_ZIP4.zip_name} #{TEST_ZIP4.entry_names.join(' ')}"
)
rescue StandardError
# If there are any Windows developers wanting to use a command line zip.exe
# to help create the following files, there's a free one available from
# http://stahlworks.com/dev/index.php?tool=zipunzip
# that works with the above code
raise $ERROR_INFO.to_s +
"\n\nziptest.rb requires the Info-ZIP program 'zip' in the path\n" \
"to create test data. If you don't have it you can download\n" \
'the necessary test files at http://sf.net/projects/rubyzip.'
end
TEST_ZIP1 = TestZipFile.new('test/data/generated/empty.zip', [])
TEST_ZIP2 = TestZipFile.new(
'test/data/generated/5entry.zip',
%w[
test/data/generated/longAscii.txt
test/data/generated/empty.txt
test/data/generated/empty_chmod640.txt
test/data/generated/short.txt
test/data/generated/longBinary.bin
],
'my zip comment'
)
TEST_ZIP3 = TestZipFile.new('test/data/generated/test1.zip', %w[test/data/file1.txt])
TEST_ZIP4 = TestZipFile.new('test/data/generated/zipWithDir.zip', ['test/data/file1.txt',
TestFiles::EMPTY_TEST_DIR])
end
| 33.613924 | 96 | 0.65675 |
1ccfa56d21df535b4ca7c58e62a2a0b0c9f4ed8f | 74 | # frozen_string_literal: true
module Citywrapper
VERSION = "1.0.0"
end
| 12.333333 | 29 | 0.743243 |
ab036197a928511bf06dae12c979dec57cbe2d96 | 51 | module Planify
VERSION = [1, 0, 4].join(".")
end
| 12.75 | 31 | 0.588235 |
33a937b5d71e4afbc8a7cdb4f3251443feffe194 | 11,589 | # frozen_string_literal: true
require "resource"
require "metafiles"
module DiskUsageExtension
def disk_usage
return @disk_usage if @disk_usage
compute_disk_usage
@disk_usage
end
def file_count
return @file_count if @file_count
compute_disk_usage
@file_count
end
def abv
out = +""
compute_disk_usage
out << "#{number_readable(@file_count)} files, " if @file_count > 1
out << disk_usage_readable(@disk_usage).to_s
out.freeze
end
private
def compute_disk_usage
if symlink? && !exist?
@file_count = 1
@disk_usage = 0
return
end
path = if symlink?
resolved_path
else
self
end
if path.directory?
scanned_files = Set.new
@file_count = 0
@disk_usage = 0
path.find do |f|
if f.directory?
@disk_usage += f.lstat.size
else
@file_count += 1 if f.basename.to_s != ".DS_Store"
# use Pathname#lstat instead of Pathname#stat to get info of symlink itself.
stat = f.lstat
file_id = [stat.dev, stat.ino]
# count hardlinks only once.
unless scanned_files.include?(file_id)
@disk_usage += stat.size
scanned_files.add(file_id)
end
end
end
else
@file_count = 1
@disk_usage = path.lstat.size
end
end
end
# Homebrew extends Ruby's `Pathname` to make our code more readable.
# @see https://ruby-doc.org/stdlib-1.8.7/libdoc/pathname/rdoc/Pathname.html Ruby's Pathname API
class Pathname
include DiskUsageExtension
# @private
BOTTLE_EXTNAME_RX = /(\.[a-z0-9_]+\.bottle\.(\d+\.)?tar\.gz)$/.freeze
# Moves a file from the original location to the {Pathname}'s.
def install(*sources)
sources.each do |src|
case src
when Resource
src.stage(self)
when Resource::Partial
src.resource.stage { install(*src.files) }
when Array
if src.empty?
opoo "tried to install empty array to #{self}"
break
end
src.each { |s| install_p(s, File.basename(s)) }
when Hash
if src.empty?
opoo "tried to install empty hash to #{self}"
break
end
src.each { |s, new_basename| install_p(s, new_basename) }
else
install_p(src, File.basename(src))
end
end
end
def install_p(src, new_basename)
raise Errno::ENOENT, src.to_s unless File.symlink?(src) || File.exist?(src)
src = Pathname(src)
dst = join(new_basename)
dst = yield(src, dst) if block_given?
return unless dst
mkpath
# Use FileUtils.mv over File.rename to handle filesystem boundaries. If src
# is a symlink, and its target is moved first, FileUtils.mv will fail:
# https://bugs.ruby-lang.org/issues/7707
# In that case, use the system "mv" command.
if src.symlink?
raise unless Kernel.system "mv", src, dst
else
FileUtils.mv src, dst
end
end
private :install_p
# Creates symlinks to sources in this folder.
def install_symlink(*sources)
sources.each do |src|
case src
when Array
src.each { |s| install_symlink_p(s, File.basename(s)) }
when Hash
src.each { |s, new_basename| install_symlink_p(s, new_basename) }
else
install_symlink_p(src, File.basename(src))
end
end
end
def install_symlink_p(src, new_basename)
mkpath
dstdir = realpath
src = Pathname(src).expand_path(dstdir)
src = src.dirname.realpath/src.basename if src.dirname.exist?
FileUtils.ln_sf(src.relative_path_from(dstdir), dstdir/new_basename)
end
private :install_symlink_p
# @private
alias old_write write
# We assume this pathname object is a file, obviously
def write(content, *open_args)
raise "Will not overwrite #{self}" if exist?
dirname.mkpath
open("w", *open_args) { |f| f.write(content) }
end
# Only appends to a file that is already created.
def append_lines(content, *open_args)
raise "Cannot append file that doesn't exist: #{self}" unless exist?
open("a", *open_args) { |f| f.puts(content) }
end
# NOTE: This always overwrites.
def atomic_write(content)
old_stat = stat if exist?
File.atomic_write(self) do |file|
file.write(content)
end
return unless old_stat
# Try to restore original file's permissions separately
# atomic_write does it itself, but it actually erases
# them if chown fails
begin
# Set correct permissions on new file
chown(old_stat.uid, nil)
chown(nil, old_stat.gid)
rescue Errno::EPERM, Errno::EACCES
# Changing file ownership failed, moving on.
nil
end
begin
# This operation will affect filesystem ACL's
chmod(old_stat.mode)
rescue Errno::EPERM, Errno::EACCES
# Changing file permissions failed, moving on.
nil
end
end
# @private
def cp_path_sub(pattern, replacement)
raise "#{self} does not exist" unless exist?
dst = sub(pattern, replacement)
raise "#{self} is the same file as #{dst}" if self == dst
if directory?
dst.mkpath
else
dst.dirname.mkpath
dst = yield(self, dst) if block_given?
FileUtils.cp(self, dst)
end
end
# @private
alias extname_old extname
# Extended to support common double extensions
def extname(path = to_s)
basename = File.basename(path)
bottle_ext = basename[BOTTLE_EXTNAME_RX, 1]
return bottle_ext if bottle_ext
archive_ext = basename[/(\.(tar|cpio|pax)\.(gz|bz2|lz|xz|Z))\Z/, 1]
return archive_ext if archive_ext
# Don't treat version numbers as extname.
return "" if basename.match?(/\b\d+\.\d+[^.]*\Z/) && !basename.end_with?(".7z")
File.extname(basename)
end
# For filetypes we support, basename without extension
def stem
File.basename((path = to_s), extname(path))
end
# I don't trust the children.length == 0 check particularly, not to mention
# it is slow to enumerate the whole directory just to see if it is empty,
# instead rely on good ol' libc and the filesystem
# @private
def rmdir_if_possible
rmdir
true
rescue Errno::ENOTEMPTY
if (ds_store = join(".DS_Store")).exist? && children.length == 1
ds_store.unlink
retry
else
false
end
rescue Errno::EACCES, Errno::ENOENT, Errno::EBUSY
false
end
# @private
def version
require "version"
Version.parse(basename)
end
# @private
def text_executable?
/^#!\s*\S+/ =~ open("r") { |f| f.read(1024) }
end
def sha256
require "digest/sha2"
Digest::SHA256.file(self).hexdigest
end
def verify_checksum(expected)
raise ChecksumMissingError if expected.nil? || expected.empty?
actual = Checksum.new(expected.hash_type, send(expected.hash_type).downcase)
raise ChecksumMismatchError.new(self, expected, actual) unless expected == actual
end
alias to_str to_s
def cd
Dir.chdir(self) { yield self }
end
def subdirs
children.select(&:directory?)
end
# @private
def resolved_path
symlink? ? dirname.join(readlink) : self
end
# @private
def resolved_path_exists?
link = readlink
rescue ArgumentError
# The link target contains NUL bytes
false
else
dirname.join(link).exist?
end
# @private
def make_relative_symlink(src)
dirname.mkpath
File.symlink(src.relative_path_from(dirname), self)
end
# @private
def ensure_writable
saved_perms = nil
unless writable_real?
saved_perms = stat.mode
FileUtils.chmod "u+rw", to_path
end
yield
ensure
chmod saved_perms if saved_perms
end
# @private
def install_info
quiet_system "/usr/bin/install-info", "--quiet", to_s, "#{dirname}/dir"
end
# @private
def uninstall_info
quiet_system "/usr/bin/install-info", "--delete", "--quiet", to_s, "#{dirname}/dir"
end
# Writes an exec script in this folder for each target pathname
def write_exec_script(*targets)
targets.flatten!
if targets.empty?
opoo "tried to write exec scripts to #{self} for an empty list of targets"
return
end
mkpath
targets.each do |target|
target = Pathname.new(target) # allow pathnames or strings
join(target.basename).write <<~SH
#!/bin/bash
exec "#{target}" "$@"
SH
end
end
# Writes an exec script that sets environment variables
def write_env_script(target, args, env = nil)
unless env
env = args
args = nil
end
env_export = +""
env.each { |key, value| env_export << "#{key}=\"#{value}\" " }
dirname.mkpath
write <<~SH
#!/bin/bash
#{env_export}exec "#{target}" #{args} "$@"
SH
end
# Writes a wrapper env script and moves all files to the dst
def env_script_all_files(dst, env)
dst.mkpath
Pathname.glob("#{self}/*") do |file|
next if file.directory?
dst.install(file)
new_file = dst.join(file.basename)
file.write_env_script(new_file, env)
end
end
# Writes an exec script that invokes a Java jar
def write_jar_script(target_jar, script_name, java_opts = "", java_version: nil)
(self/script_name).write_env_script "java", "#{java_opts} -jar \"#{target_jar}\"",
Language::Java.overridable_java_home_env(java_version)
end
def install_metafiles(from = Pathname.pwd)
Pathname(from).children.each do |p|
next if p.directory?
next unless Metafiles.copy?(p.basename.to_s)
# Some software symlinks these files (see help2man.rb)
filename = p.resolved_path
# Some software links metafiles together, so by the time we iterate to one of them
# we may have already moved it. libxml2's COPYING and Copyright are affected by this.
next unless filename.exist?
filename.chmod 0644
install(filename)
end
end
def ds_store?
basename.to_s == ".DS_Store"
end
def binary_executable?
false
end
def mach_o_bundle?
false
end
def dylib?
false
end
end
require "extend/os/pathname"
# @private
module ObserverPathnameExtension
class << self
attr_accessor :n, :d
def reset_counts!
@n = @d = 0
@put_verbose_trimmed_warning = false
end
def total
n + d
end
def counts
[n, d]
end
MAXIMUM_VERBOSE_OUTPUT = 100
def verbose?
return Homebrew.args.verbose? unless ENV["CI"]
return false unless Homebrew.args.verbose?
if total < MAXIMUM_VERBOSE_OUTPUT
true
else
unless @put_verbose_trimmed_warning
puts "Only the first #{MAXIMUM_VERBOSE_OUTPUT} operations were output."
@put_verbose_trimmed_warning = true
end
false
end
end
end
def unlink
super
puts "rm #{self}" if ObserverPathnameExtension.verbose?
ObserverPathnameExtension.n += 1
end
def mkpath
super
puts "mkdir -p #{self}" if ObserverPathnameExtension.verbose?
end
def rmdir
super
puts "rmdir #{self}" if ObserverPathnameExtension.verbose?
ObserverPathnameExtension.d += 1
end
def make_relative_symlink(src)
super
puts "ln -s #{src.relative_path_from(dirname)} #{basename}" if ObserverPathnameExtension.verbose?
ObserverPathnameExtension.n += 1
end
def install_info
super
puts "info #{self}" if ObserverPathnameExtension.verbose?
end
def uninstall_info
super
puts "uninfo #{self}" if ObserverPathnameExtension.verbose?
end
end
| 23.894845 | 101 | 0.648201 |
1cfbbaafd842ae9a08cc684392ec563b94cc248e | 8,776 | class CheckSearch
def initialize(options)
# options include keywords, projects, tags, status
options = JSON.parse(options)
@options = options.clone
@options['input'] = options.clone
@options['team_id'] = Team.current.id unless Team.current.nil?
# set sort options
@options['sort'] ||= 'recent_added'
@options['sort_type'] ||= 'desc'
# set show options
@options['show'] ||= ['medias']
end
def pusher_channel
if @options['parent'] && @options['parent']['type'] == 'project'
Project.find(@options['parent']['id']).pusher_channel
elsif @options['parent'] && @options['parent']['type'] == 'team'
Team.where(slug: @options['parent']['slug']).last.pusher_channel
else
nil
end
end
def teams
[]
end
def id
CheckSearch.id(@options['input'])
end
def self.id(options = {})
Base64.encode64("CheckSearch/#{options.to_json}")
end
def class_name
'CheckSearch'
end
def get_ids_from_result(results)
relationship_type = @options['relationship_type']
results.collect do |result|
sources = result.relationship_sources || []
source = relationship_type.blank? ? sources.first : sources.select{ |x| x.split('_').first == Digest::MD5.hexdigest(relationship_type) }.first
(source.blank? || source == '-') ? result.annotated_id : source.split('_').last.to_i
end
end
def medias
return [] unless @options['show'].include?('medias') && index_exists?
return @medias if @medias
@medias = []
filters = {}
filters[:archived] = @options.has_key?('archived') ? (@options['archived'].to_i == 1) : false
filters[:sources_count] = 0
if should_hit_elasticsearch?
query = medias_build_search_query
ids = get_ids_from_result(medias_get_search_result(query))
filters = filters.merge({ id: ids })
@ids = ids
end
results = ProjectMedia.where(filters).eager_load(:media).joins(:project)
@medias = sort_pg_results(results, 'media')
@medias
end
def project_medias
medias
end
def sources
return [] unless @options['show'].include?('sources') && index_exists?
return @sources if @sources
@sources = []
filters = {}
if should_hit_elasticsearch?
query = medias_build_search_query('ProjectSource')
ids = medias_get_search_result(query).map(&:annotated_id)
filters = { id: ids }
end
results = ProjectSource.where(filters).eager_load(:source).joins(:project)
@sources = sort_pg_results(results, 'source')
@sources
end
def project_sources
sources
end
def number_of_results
medias_count = medias.is_a?(Array) ? medias.size : medias.permissioned.count
sources_count = sources.is_a?(Array) ? sources.size : sources.permissioned.count
medias_count + sources_count
end
def medias_build_search_query(associated_type = 'ProjectMedia')
conditions = []
conditions << {term: { annotated_type: associated_type.downcase } }
conditions << {term: { team_id: @options["team_id"] } } unless @options["team_id"].nil?
conditions.concat build_search_keyword_conditions
conditions.concat build_search_tags_conditions
conditions.concat build_search_doc_conditions
dynamic_conditions = build_search_dynamic_annotation_conditions
conditions.concat(dynamic_conditions) unless dynamic_conditions.blank?
{ bool: { must: conditions } }
end
def medias_get_search_result(query)
sort = build_search_dynamic_annotation_sort
MediaSearch.search(query: query, sort: sort, size: 10000).results
end
private
def index_exists?
client = MediaSearch.gateway.client
client.indices.exists? index: CheckElasticSearchModel.get_index_alias
end
def should_hit_elasticsearch?
status_blank = true
status_search_fields.each do |field|
status_blank = false unless @options[field].blank?
end
!(status_blank && @options['tags'].blank? && @options['keyword'].blank? && @options['dynamic'].blank? && ['recent_activity', 'recent_added'].include?(@options['sort']))
end
# def show_filter?(type)
# # show filter should not include all media types to hit ES
# show_options = (type == 'medias') ? ['uploadedimage', 'link', 'claim'] : ['source']
# (show_options - @options['show']).empty?
# end
def build_search_keyword_conditions
return [] if @options["keyword"].blank?
# add keyword conditions
keyword_fields = %w(title description quote)
keyword_c = [{ simple_query_string: { query: @options["keyword"], fields: keyword_fields, default_operator: "AND" } }]
[['comments', 'text'], ['dynamics', 'indexable']].each do |pair|
keyword_c << { nested: { path: "#{pair[0]}", query: { simple_query_string: { query: @options["keyword"], fields: ["#{pair[0]}.#{pair[1]}"], default_operator: "AND" }}}}
end
keyword_c << search_tags_query(@options["keyword"].split(' '))
keyword_c << { nested: { path: "accounts", query: { simple_query_string: { query: @options["keyword"], fields: %w(accounts.username accounts.title), default_operator: "AND" }}}}
[{ bool: { should: keyword_c } }]
end
def build_search_dynamic_annotation_conditions
conditions = []
return conditions unless @options.has_key?('dynamic')
@options['dynamic'].each do |name, values|
next if values.blank?
method = "field_search_query_type_#{name}"
queries = []
values.each do |value|
query = Dynamic.respond_to?(method) ? Dynamic.send(method, value) : { term: { "dynamics.#{name}": value } }
queries << query
end
condition = {
nested: {
path: 'dynamics',
query: {
bool: {
should: queries
}
}
}
}
conditions << condition
end
conditions
end
def build_search_dynamic_annotation_sort
return [] if ['recent_activity', 'recent_added'].include?(@options['sort'].to_s)
[
{
"dynamics.#{@options['sort']}": {
order: @options['sort_type'],
unmapped_type: 'long',
nested: {
path: 'dynamics'
}
}
}
]
end
def build_search_tags_conditions
return [] if @options["tags"].blank?
tags_c = search_tags_query(@options["tags"])
[tags_c]
end
def search_tags_query(tags)
tags_c = []
tags = tags.collect{ |t| t.delete('#').downcase }
tags.each do |tag|
tags_c << { match: { "tags.tag.raw": { query: tag, operator: 'and' } } }
end
tags_c << { terms: { "tags.tag": tags } }
{ nested: { path: 'tags', query: { bool: { should: tags_c } } } }
end
def build_search_doc_conditions
doc_c = []
unless @options['show'].blank?
types_mapping = {
'medias' => ['Link', 'Claim', 'UploadedImage'],
'sources' => ['Source']
}
types = @options['show'].collect{ |type| types_mapping[type] }.flatten
doc_c << { terms: { 'associated_type': types } }
end
fields = { 'project_id' => 'projects' }
status_search_fields.each do |field|
fields[field] = field
end
fields.each do |k, v|
doc_c << { terms: { "#{k}": @options[v] } } unless @options[v].blank?
end
doc_c
end
def filter_by_team_and_project(results)
results = results.where('projects.team_id' => @options['team_id']) unless @options['team_id'].blank?
results = results.where(project_id: @options['projects']) unless @options['projects'].blank?
results
end
def sort_pg_results(results, type)
results = filter_by_team_and_project(results)
if ['recent_activity', 'recent_added'].include?(@options['sort'].to_s)
sort_field = @options['sort'].to_s == 'recent_activity' ? 'updated_at' : 'created_at'
sort_type = @options['sort_type'].blank? ? 'desc' : @options['sort_type'].downcase
results = results.order(sort_field => sort_type)
elsif @ids && type == 'media'
values = []
@ids.each_with_index do |id, i|
values << "(#{id}, #{i})"
end
joins = ActiveRecord::Base.send(:sanitize_sql_array, ["JOIN (VALUES %s) AS x(value, order_number) ON project_medias.id = x.value", values.join(', ')])
results = results.joins(joins).order('x.order_number')
end
results
end
# def prepare_show_filter(show)
# m_types = ['photos', 'links', 'quotes']
# show ||= m_types
# if show.include?('medias')
# show.delete('medias')
# show += m_types
# end
# show.map(&:downcase)
# show_mapping = {'photos' => 'uploadedimage', 'links' => 'link', 'quotes' => 'claim', 'sources' => 'source'}
# show.each_with_index do |v, i|
# show[i] = show_mapping[v] unless show_mapping[v].blank?
# end
# show
# end
end
| 32.264706 | 181 | 0.636281 |
1a72d52034c5ec2dc954721aa3a03aa8bdca858f | 2,879 | require 'test_helper'
module Nls
module EndpointTest
class TestTimeout < NlsTestCommon
def test_timeout_with_wait
skip_timeout_with_ASAN
data = {
timeout: 100,
wait: 150
}
expected_error = "NlsCancelCleanupOnTimeout : Request timeout after"
exception = assert_raises RestClient::ExceptionWithResponse do
Nls.query_get(Nls.url_test, data)
end
assert_response_has_error expected_error, exception, "Get"
exception = assert_raises RestClient::ExceptionWithResponse do
Nls.query_post(Nls.url_test, data)
end
assert_response_has_error expected_error, exception, "Post by body"
exception = assert_raises RestClient::ExceptionWithResponse do
Nls.query_post(Nls.url_test, {}, data)
end
assert_response_has_error expected_error, exception, "Post by param"
end
def test_timeout_with_infinite_loop
skip_timeout_with_ASAN
data = {
timeout: 100,
wait: "infinite"
}
expected_error = "NlsCancelCleanupOnTimeout : Request timeout after"
exception = assert_raises RestClient::ExceptionWithResponse do
Nls.query_get(Nls.url_test, data)
end
assert_response_has_error expected_error, exception, "Get"
exception = assert_raises RestClient::ExceptionWithResponse do
Nls.query_post(Nls.url_test, data)
end
assert_response_has_error expected_error, exception, "Post by body"
exception = assert_raises RestClient::ExceptionWithResponse do
Nls.query_post(Nls.url_test, {}, data)
end
assert_response_has_error expected_error, exception, "Post by param"
end
def test_stop_during_request_with_timeout
skip_timeout_with_ASAN
thr1 = Thread.new {
data = {
timeout: 200,
wait: "infinite"
}
expected_error = "NlsCancelCleanupOnTimeout : Request timeout after"
exception = assert_raises RestClient::ExceptionWithResponse do
Nls.query_get(Nls.url_test, data)
end
assert_response_has_error expected_error, exception
}
thr2 = Thread.new {
sleep(0.1)
Nls.stop
}
thr1.join
thr2.join
end
def test_stop_during_request_without_timeout
skip_timeout_with_ASAN
thr1 = Thread.new {
data = {
wait: 200
}
actual = Nls.query_get(Nls.url_test, data)
expected = {
"wait" => 200
}
assert_equal expected, actual
}
thr2 = Thread.new {
sleep(0.1)
Nls.stop
}
thr1.join
thr2.join
end
end
end
end
| 22.317829 | 78 | 0.608545 |
034d413a147ccca005d6bd79688c862d4b1f0f32 | 2,758 | #!/usr/bin/ruby
$:.unshift '../lib'
require 'test/unit'
require 'xmpp4r/callbacks'
include Jabber
class CallbacksTest < Test::Unit::TestCase
def test_test1
called = 0
cb = Callback::new(5, "toto", Proc::new { called += 1 })
assert_equal(5, cb.priority)
assert_equal("toto", cb.ref)
cb.block.call
assert_equal(1, called)
cb.block.call
assert_equal(2, called)
end
def test_callbacklist1
cbl = CallbackList::new
called1 = false
called2 = false
called3 = false
called4 = false
cbl.add(5, "ref1") { called1 = true ; true }
cbl.add(7, "ref1") { |e| called2 = true ; false}
cbl.add(9, "ref1") { called3 = true ;false }
cbl.add(11, "ref1") { called4 = true ; false }
o = "aaaa"
assert(cbl.process(o))
assert(called1)
assert(called2)
assert(called3)
assert(called4)
end
def test_callbacklist2
cbl = CallbackList::new
assert(0, cbl.length)
cbl.add(5, "ref1") { called1 = true }
assert(1, cbl.length)
cbl.add(7, "ref2") { |e| called2 = true ; e.consume }
assert(2, cbl.length)
cbl.delete("ref2")
assert(1, cbl.length)
cbl.add(9, "ref3") { called3 = true }
assert(2, cbl.length)
end
def test_callbacklist4
cbl = CallbackList::new
cbl.add(5, "ref1") { false }
cbl.add(7, "ref1") { false }
o = "o"
assert(!cbl.process(o))
end
def test_callbacklist5
cbl = CallbackList::new
cbl.add(5, "ref1") { true }
cbl.add(7, "ref1") { false }
o = "o"
assert(cbl.process(o))
end
def test_callbacklist6
cbl = CallbackList::new
ok = false
c = 'a'
d = 'b'
cbl.add(5, "ref1") { |a, b|
if a == 'a' and b == 'b'
ok = true
end
false
}
assert(!cbl.process(c, d))
assert(ok)
end
def test_callbacklist7
cbl = CallbackList::new
called1 = false
called2 = false
called3 = false
called4 = false
cbl.add(3, "ref1") { called4 = true ; true }
cbl.add(5, "ref1") { called1 = true ; true }
cbl.add(7, "ref1") { called2 = true ; 'a'}
cbl.add(9, "ref1") { called3 = true ;1 }
o = "aaaa"
assert(cbl.process(o))
assert(called1)
assert(called2)
assert(called3)
assert(!called4)
end
def test_nested
cbl = CallbackList.new
called_outer = 0
called_inner = 0
cbl.add(100, nil) {
called_outer += 1
if called_outer == 1
cbl.add(200, nil) {
called_inner += 1
}
end
}
assert_equal(0, called_inner)
assert_equal(0, called_outer)
cbl.process
assert_equal(0, called_inner)
assert_equal(1, called_outer)
cbl.process
assert_equal(1, called_inner)
assert_equal(2, called_outer)
end
end
| 21.215385 | 60 | 0.583756 |
1da14d9ea6c7faba1607b73fb430e971e5b7b2d5 | 1,153 | cask :v1 => 'clamxav' do
if MacOS.release <= :tiger
version '2.2.1'
sha256 'e075b21fe5154f31dcbde86e492531c87c67ab44ad75294d3063f32ae1e58278'
elsif MacOS.release <= :leopard
version '2.5.1'
sha256 '02a7529c74d11724e2d0e8226ac83a0d3cfb599afb354d02f6609632d69d9eb1'
else
version '2.8.8'
sha256 'd9a460ecf762cacf8ae3ef93d04ce1b223fd1ea2c54327b7bc231e8fbd516cd3'
appcast 'https://www.clamxav.com/sparkle/appcast.xml',
:sha256 => 'e68625af3cc82a17dc19f7e378b74b0e15d61dc9b16ace1cb5f0bdc308d27389'
end
url "https://www.clamxav.com/downloads/ClamXav_#{version}.dmg"
name 'ClamXav'
homepage 'https://www.clamxav.com/'
license :commercial
app 'ClamXav.app'
postflight do
suppress_move_to_applications
end
zap :delete => [
'~/Library/Caches/uk.co.markallan.clamxav',
'~/Library/Logs/clamXav-scan.log',
# todo glob/expand needed here
'~/Library/Logs/clamXav-scan.log.0.bz2',
]
caveats do
# this happens sometime after installation, but still worth warning about
files_in_usr_local
end
end
| 29.564103 | 89 | 0.685169 |
f8621de7ec4f919e2e218c896eba48832d6dd79e | 271 | FactoryBot.define do
factory :casa_org do
sequence(:name) { |n| "CASA Org #{n}" }
sequence(:display_name) { |n| "CASA Org #{n}" }
address { "123 Main St"}
footer_links { [["www.example.com", "First Link"], ["www.foobar.com", "Second Link"]] }
end
end
| 30.111111 | 91 | 0.601476 |
625285dc78aa4fd1e9eba9df45496e67954672d9 | 327 | working_directory "/home/ubuntu/wangyu/dianwaimai"
pid "/home/ubuntu/wangyu/dianwaimai" + "/tmp/pids/unicorn.pid"
stderr_path "/home/ubuntu/wangyu/dianwaimai" + "/log/unicorn_error.log"
stdout_path "/home/ubuntu/wangyu/dianwaimai" + "/log/unicorn_access.log"
listen "/tmp/unicorn_call_lunch.sock"
worker_processes 2
timeout 30 | 40.875 | 72 | 0.792049 |
e9049c7b04df752c3795f54d40acf179be0a2646 | 3,035 | Rollbar.configure do |config|
# Without configuration, Rollbar is enabled in all environments.
# To disable in specific environments, set config.enabled=false.
config.access_token = ENV['ROLLBAR_ACCESS_TOKEN']
unless Rails.env.production?
config.enabled = false
end
# By default, Rollbar will try to call the `current_user` controller method
# to fetch the logged-in user object, and then call that object's `id`,
# `username`, and `email` methods to fetch those properties. To customize:
# config.person_method = "my_current_user"
# config.person_id_method = "my_id"
# config.person_username_method = "my_username"
# config.person_email_method = "my_email"
# If you want to attach custom data to all exception and message reports,
# provide a lambda like the following. It should return a hash.
# config.custom_data_method = lambda { {:some_key => "some_value" } }
# Add exception class names to the exception_level_filters hash to
# change the level that exception is reported at. Note that if an exception
# has already been reported and logged the level will need to be changed
# via the rollbar interface.
# Valid levels: 'critical', 'error', 'warning', 'info', 'debug', 'ignore'
# 'ignore' will cause the exception to not be reported at all.
config.exception_level_filters.merge!('ActionController::RoutingError' => 'ignore')
#
# You can also specify a callable, which will be called with the exception instance.
# config.exception_level_filters.merge!('MyCriticalException' => lambda { |e| 'critical' })
# Enable asynchronous reporting (uses girl_friday or Threading if girl_friday
# is not installed)
# config.use_async = true
# Supply your own async handler:
# config.async_handler = Proc.new { |payload|
# Thread.new { Rollbar.process_from_async_handler(payload) }
# }
# Enable asynchronous reporting (using sucker_punch)
# config.use_sucker_punch
# Enable delayed reporting (using Sidekiq)
# config.use_sidekiq
# You can supply custom Sidekiq options:
# config.use_sidekiq 'queue' => 'default'
# If your application runs behind a proxy server, you can set proxy parameters here.
# If https_proxy is set in your environment, that will be used. Settings here have precedence.
# The :host key is mandatory and must include the URL scheme (e.g. 'http://'), all other fields
# are optional.
#
# config.proxy = {
# host: 'http://some.proxy.server',
# port: 80,
# user: 'username_if_auth_required',
# password: 'password_if_auth_required'
# }
# If you run your staging application instance in production environment then
# you'll want to override the environment reported by `Rails.env` with an
# environment variable like this: `ROLLBAR_ENV=staging`. This is a recommended
# setup for Heroku. See:
# https://devcenter.heroku.com/articles/deploying-to-a-custom-rails-environment
config.environment = ENV['ROLLBAR_ENV'].presence || Rails.env
config.project_gems = ['energy-sparks_analytics']
end
| 42.746479 | 97 | 0.733443 |
e836ecaf8b25036a37dd98252d1d700cb0aacb7d | 2,000 | cask 'mactex-no-gui' do
version '20170524'
sha256 '0caf76027c9e0534a0b636f2b880ace4a0463105a7ad5774ccacede761be8c2d'
# mirror.ctan.org/systems/mac/mactex was verified as official when first introduced to the cask
url "http://mirror.ctan.org/systems/mac/mactex/mactex-#{version}.pkg"
appcast 'https://www.tug.org/mactex/downloading.html',
checkpoint: '2dd3e7c71fe586512a5241f2b26c24f93af3510d2bda2f56da1a404098b894ee'
name 'MacTeX'
homepage 'https://www.tug.org/mactex/'
conflicts_with cask: [
'basictex',
'mactex',
]
depends_on formula: 'ghostscript'
depends_on macos: '>= :yosemite'
pkg "mactex-#{version}.pkg",
choices: [
{
# TeXLive
'choiceIdentifier' => 'choice1',
'choiceAttribute' => 'selected',
'attributeSetting' => 1,
},
{
# GUI-Applications
'choiceIdentifier' => 'choice2',
'choiceAttribute' => 'selected',
'attributeSetting' => 0,
},
{
# Ghostscript
'choiceIdentifier' => 'choice3',
'choiceAttribute' => 'selected',
'attributeSetting' => 0,
},
]
uninstall pkgutil: 'org.tug.mactex.texlive2017',
delete: [
'/usr/local/texlive/2017',
'/Library/PreferencePanes/TeXDistPrefPane.prefPane',
'/Library/TeX',
'/etc/paths.d/TeX',
'/etc/manpaths.d/TeX',
]
zap trash: [
'/usr/local/texlive/texmf-local',
'~/Library/texlive/2017',
],
rmdir: [
'/usr/local/texlive',
'~/Library/texlive',
]
end
| 33.898305 | 97 | 0.4755 |
4ae3b75c473a689d088d5c2fd09dc45389dae9ab | 1,433 | class ImageUploader < CarrierWave::Uploader::Base
# Include RMagick or MiniMagick support:
# include CarrierWave::RMagick
include CarrierWave::MiniMagick
# Choose what kind of storage to use for this uploader:
#storage :file
storage :aws
# Override the directory where uploaded files will be stored.
# This is a sensible default for uploaders that are meant to be mounted:
def store_dir
"uploads/#{model.class.to_s.underscore}/#{mounted_as}/#{model.id}"
end
# Provide a default URL as a default if there hasn't been a file uploaded:
# def default_url(*args)
# # For Rails 3.1+ asset pipeline compatibility:
# # ActionController::Base.helpers.asset_path("fallback/" + [version_name, "default.png"].compact.join('_'))
#
# "/images/fallback/" + [version_name, "default.png"].compact.join('_')
# end
# Process files as they are uploaded:
process resize_to_fill: [800, 350]
# Create different versions of your uploaded files:
# version :thumb do
# process resize_to_fit: [50, 50]
# end
# Add a white list of extensions which are allowed to be uploaded.
# For images you might use something like this:
# def extension_whitelist
# %w(jpg jpeg gif png)
# end
# Override the filename of the uploaded files:
# Avoid using model.id or version_name here, see uploader/store.rb for details.
# def filename
# "something.jpg" if original_filename
# end
end
| 31.152174 | 112 | 0.7097 |
1855b8f298f4bdccfc1c952465248a1cb4983951 | 2,549 | module ThinkingSphinx
module ActiveRecord
class SQLBuilder
attr_reader :source
def initialize(source)
@source = source
end
def sql_query
statement.to_relation.to_sql.gsub(/\n/, "\\\n")
end
def sql_query_range
return nil if source.disable_range?
statement.to_query_range_relation.to_sql
end
def sql_query_pre
query.to_query
end
private
delegate :adapter, :model, :delta_processor, :to => :source
delegate :convert_nulls, :time_zone_query_pre, :utf8_query_pre,
:cast_to_bigint, :to => :adapter
def query
Query.new(self)
end
def statement
Statement.new(self)
end
def config
ThinkingSphinx::Configuration.instance
end
def relation
model.unscoped
end
def associations
@associations ||= ThinkingSphinx::ActiveRecord::SourceJoins.call(
model, source
)
end
def quote_column(column)
model.connection.quote_column_name(column)
end
def quoted_primary_key
"#{model.quoted_table_name}.#{quote_column(source.primary_key)}"
end
def quoted_inheritance_column
"#{model.quoted_table_name}.#{quote_column(model.inheritance_column)}"
end
def pre_select
('SQL_NO_CACHE ' if source.type == 'mysql').to_s
end
def big_document_ids?
source.options[:big_document_ids] || config.settings['big_document_ids']
end
def document_id
quoted_alias = quote_column source.primary_key
column = quoted_primary_key
column = cast_to_bigint column if big_document_ids?
column = "#{column} * #{config.indices.count} + #{source.offset}"
"#{column} AS #{quoted_alias}"
end
def range_condition
condition = []
condition << "#{quoted_primary_key} BETWEEN $start AND $end" unless source.disable_range?
condition += source.conditions
condition
end
def groupings
groupings = source.groupings
if model.column_names.include?(model.inheritance_column)
groupings << quoted_inheritance_column
end
groupings
end
def model_name
klass = model.name
klass = klass.demodulize unless model.store_full_sti_class
klass
end
end
end
end
require 'thinking_sphinx/active_record/sql_builder/statement'
require 'thinking_sphinx/active_record/sql_builder/query'
| 24.04717 | 97 | 0.635936 |
f8d8183e2a3905e3f10a89b952bd74448bcb2bdc | 13,086 | # Copyright (c) 2016, 2021, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
require 'date'
require 'logger'
# rubocop:disable Lint/UnneededCopDisableDirective, Metrics/LineLength
module OCI
# Pre-authenticated requests provide a way to let users access a bucket or an object without having their own credentials.
# When you create a pre-authenticated request, a unique URL is generated. Users in your organization, partners, or third
# parties can use this URL to access the targets identified in the pre-authenticated request.
# See [Using Pre-Authenticated Requests](https://docs.cloud.oracle.com/Content/Object/Tasks/usingpreauthenticatedrequests.htm).
#
# To use any of the API operations, you must be authorized in an IAM policy. If you are not authorized, talk to an
# administrator. If you are an administrator who needs to write policies to give users access, see
# [Getting Started with Policies](https://docs.cloud.oracle.com/Content/Identity/Concepts/policygetstarted.htm).
#
class ObjectStorage::Models::PreauthenticatedRequest
BUCKET_LISTING_ACTION_ENUM = [
BUCKET_LISTING_ACTION_DENY = 'Deny'.freeze,
BUCKET_LISTING_ACTION_LIST_OBJECTS = 'ListObjects'.freeze,
BUCKET_LISTING_ACTION_UNKNOWN_ENUM_VALUE = 'UNKNOWN_ENUM_VALUE'.freeze
].freeze
ACCESS_TYPE_ENUM = [
ACCESS_TYPE_OBJECT_READ = 'ObjectRead'.freeze,
ACCESS_TYPE_OBJECT_WRITE = 'ObjectWrite'.freeze,
ACCESS_TYPE_OBJECT_READ_WRITE = 'ObjectReadWrite'.freeze,
ACCESS_TYPE_ANY_OBJECT_WRITE = 'AnyObjectWrite'.freeze,
ACCESS_TYPE_ANY_OBJECT_READ = 'AnyObjectRead'.freeze,
ACCESS_TYPE_ANY_OBJECT_READ_WRITE = 'AnyObjectReadWrite'.freeze,
ACCESS_TYPE_UNKNOWN_ENUM_VALUE = 'UNKNOWN_ENUM_VALUE'.freeze
].freeze
# **[Required]** The unique identifier to use when directly addressing the pre-authenticated request.
# @return [String]
attr_accessor :id
# **[Required]** The user-provided name of the pre-authenticated request.
# @return [String]
attr_accessor :name
# **[Required]** The URI to embed in the URL when using the pre-authenticated request.
# @return [String]
attr_accessor :access_uri
# The name of the object that is being granted access to by the pre-authenticated request. Avoid entering confidential
# information. The object name can be null and if so, the pre-authenticated request grants access to the entire bucket.
# Example: test/object1.log
#
# @return [String]
attr_accessor :object_name
# Specifies whether a list operation is allowed on a PAR with accessType \"AnyObjectRead\" or \"AnyObjectReadWrite\".
# Deny: Prevents the user from performing a list operation.
# ListObjects: Authorizes the user to perform a list operation.
#
# @return [String]
attr_reader :bucket_listing_action
# **[Required]** The operation that can be performed on this resource.
# @return [String]
attr_reader :access_type
# **[Required]** The expiration date for the pre-authenticated request as per [RFC 3339](https://tools.ietf.org/html/rfc3339). After
# this date the pre-authenticated request will no longer be valid.
#
# @return [DateTime]
attr_accessor :time_expires
# **[Required]** The date when the pre-authenticated request was created as per specification
# [RFC 3339](https://tools.ietf.org/html/rfc3339).
#
# @return [DateTime]
attr_accessor :time_created
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
# rubocop:disable Style/SymbolLiteral
'id': :'id',
'name': :'name',
'access_uri': :'accessUri',
'object_name': :'objectName',
'bucket_listing_action': :'bucketListingAction',
'access_type': :'accessType',
'time_expires': :'timeExpires',
'time_created': :'timeCreated'
# rubocop:enable Style/SymbolLiteral
}
end
# Attribute type mapping.
def self.swagger_types
{
# rubocop:disable Style/SymbolLiteral
'id': :'String',
'name': :'String',
'access_uri': :'String',
'object_name': :'String',
'bucket_listing_action': :'String',
'access_type': :'String',
'time_expires': :'DateTime',
'time_created': :'DateTime'
# rubocop:enable Style/SymbolLiteral
}
end
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:disable Metrics/MethodLength, Layout/EmptyLines, Style/SymbolLiteral
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
# @option attributes [String] :id The value to assign to the {#id} property
# @option attributes [String] :name The value to assign to the {#name} property
# @option attributes [String] :access_uri The value to assign to the {#access_uri} property
# @option attributes [String] :object_name The value to assign to the {#object_name} property
# @option attributes [String] :bucket_listing_action The value to assign to the {#bucket_listing_action} property
# @option attributes [String] :access_type The value to assign to the {#access_type} property
# @option attributes [DateTime] :time_expires The value to assign to the {#time_expires} property
# @option attributes [DateTime] :time_created The value to assign to the {#time_created} property
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
self.id = attributes[:'id'] if attributes[:'id']
self.name = attributes[:'name'] if attributes[:'name']
self.access_uri = attributes[:'accessUri'] if attributes[:'accessUri']
raise 'You cannot provide both :accessUri and :access_uri' if attributes.key?(:'accessUri') && attributes.key?(:'access_uri')
self.access_uri = attributes[:'access_uri'] if attributes[:'access_uri']
self.object_name = attributes[:'objectName'] if attributes[:'objectName']
raise 'You cannot provide both :objectName and :object_name' if attributes.key?(:'objectName') && attributes.key?(:'object_name')
self.object_name = attributes[:'object_name'] if attributes[:'object_name']
self.bucket_listing_action = attributes[:'bucketListingAction'] if attributes[:'bucketListingAction']
raise 'You cannot provide both :bucketListingAction and :bucket_listing_action' if attributes.key?(:'bucketListingAction') && attributes.key?(:'bucket_listing_action')
self.bucket_listing_action = attributes[:'bucket_listing_action'] if attributes[:'bucket_listing_action']
self.access_type = attributes[:'accessType'] if attributes[:'accessType']
raise 'You cannot provide both :accessType and :access_type' if attributes.key?(:'accessType') && attributes.key?(:'access_type')
self.access_type = attributes[:'access_type'] if attributes[:'access_type']
self.time_expires = attributes[:'timeExpires'] if attributes[:'timeExpires']
raise 'You cannot provide both :timeExpires and :time_expires' if attributes.key?(:'timeExpires') && attributes.key?(:'time_expires')
self.time_expires = attributes[:'time_expires'] if attributes[:'time_expires']
self.time_created = attributes[:'timeCreated'] if attributes[:'timeCreated']
raise 'You cannot provide both :timeCreated and :time_created' if attributes.key?(:'timeCreated') && attributes.key?(:'time_created')
self.time_created = attributes[:'time_created'] if attributes[:'time_created']
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:enable Metrics/MethodLength, Layout/EmptyLines, Style/SymbolLiteral
# Custom attribute writer method checking allowed values (enum).
# @param [Object] bucket_listing_action Object to be assigned
def bucket_listing_action=(bucket_listing_action)
# rubocop:disable Style/ConditionalAssignment
if bucket_listing_action && !BUCKET_LISTING_ACTION_ENUM.include?(bucket_listing_action)
OCI.logger.debug("Unknown value for 'bucket_listing_action' [" + bucket_listing_action + "]. Mapping to 'BUCKET_LISTING_ACTION_UNKNOWN_ENUM_VALUE'") if OCI.logger
@bucket_listing_action = BUCKET_LISTING_ACTION_UNKNOWN_ENUM_VALUE
else
@bucket_listing_action = bucket_listing_action
end
# rubocop:enable Style/ConditionalAssignment
end
# Custom attribute writer method checking allowed values (enum).
# @param [Object] access_type Object to be assigned
def access_type=(access_type)
# rubocop:disable Style/ConditionalAssignment
if access_type && !ACCESS_TYPE_ENUM.include?(access_type)
OCI.logger.debug("Unknown value for 'access_type' [" + access_type + "]. Mapping to 'ACCESS_TYPE_UNKNOWN_ENUM_VALUE'") if OCI.logger
@access_type = ACCESS_TYPE_UNKNOWN_ENUM_VALUE
else
@access_type = access_type
end
# rubocop:enable Style/ConditionalAssignment
end
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity, Layout/EmptyLines
# Checks equality by comparing each attribute.
# @param [Object] other the other object to be compared
def ==(other)
return true if equal?(other)
self.class == other.class &&
id == other.id &&
name == other.name &&
access_uri == other.access_uri &&
object_name == other.object_name &&
bucket_listing_action == other.bucket_listing_action &&
access_type == other.access_type &&
time_expires == other.time_expires &&
time_created == other.time_created
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity, Layout/EmptyLines
# @see the `==` method
# @param [Object] other the other object to be compared
def eql?(other)
self == other
end
# rubocop:disable Metrics/AbcSize, Layout/EmptyLines
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[id, name, access_uri, object_name, bucket_listing_action, access_type, time_expires, time_created].hash
end
# rubocop:enable Metrics/AbcSize, Layout/EmptyLines
# rubocop:disable Metrics/AbcSize, Layout/EmptyLines
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /^Array<(.*)>/i
# check to ensure the input is an array given that the the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
public_method("#{key}=").call(
attributes[self.class.attribute_map[key]]
.map { |v| OCI::Internal::Util.convert_to_type(Regexp.last_match(1), v) }
)
end
elsif !attributes[self.class.attribute_map[key]].nil?
public_method("#{key}=").call(
OCI::Internal::Util.convert_to_type(type, attributes[self.class.attribute_map[key]])
)
end
# or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# rubocop:enable Metrics/AbcSize, Layout/EmptyLines
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = public_method(attr).call
next if value.nil? && !instance_variable_defined?("@#{attr}")
hash[param] = _to_hash(value)
end
hash
end
private
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
# rubocop:enable Lint/UnneededCopDisableDirective, Metrics/LineLength
| 42.764706 | 245 | 0.698304 |
1c495563e85631c3ea4b839071fe37a30169b664 | 230 | class AddColumnOrganizationalUnitsToLdapConfigs < ActiveRecord::Migration[6.0]
def change
change_table :ldap_configs do |t|
t.string :organizational_unit_attribute
t.string :organizational_unit
end
end
end
| 25.555556 | 78 | 0.765217 |
4a0e5cdf80abcc75ba44c560841fb47c2cda742c | 1,297 | # Copyright (C) 2018-2019 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
module Mongo
module Operation
class ListCollections
# A MongoDB listcollections operation sent as a command message.
#
# @api private
#
# @since 2.5.2
class Command
include Specifiable
include Executable
include Limited
include ReadPreferenceSupported
include PolymorphicResult
private
def selector(server)
(spec[SELECTOR] || {}).merge(
listCollections: 1, filter: { name: { '$not' => /system\.|\$/ }}
)
end
def message(server)
Protocol::Query.new(db_name, Database::COMMAND, command(server), options(server))
end
end
end
end
end
| 28.195652 | 91 | 0.655359 |
edd0507ada90609cc286375f5bee08312534eed1 | 4,484 | # Copyright (c) 2016, 2021, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
module OCI
module Dns
# Module containing models for requests made to, and responses received from,
# OCI Dns services
module Models
end
end
end
# Require models
require 'oci/dns/models/attached_view'
require 'oci/dns/models/attached_view_details'
require 'oci/dns/models/change_resolver_compartment_details'
require 'oci/dns/models/change_steering_policy_compartment_details'
require 'oci/dns/models/change_tsig_key_compartment_details'
require 'oci/dns/models/change_view_compartment_details'
require 'oci/dns/models/change_zone_compartment_details'
require 'oci/dns/models/create_migrated_dynect_zone_details'
require 'oci/dns/models/create_resolver_endpoint_details'
require 'oci/dns/models/create_resolver_vnic_endpoint_details'
require 'oci/dns/models/create_steering_policy_attachment_details'
require 'oci/dns/models/create_steering_policy_details'
require 'oci/dns/models/create_tsig_key_details'
require 'oci/dns/models/create_view_details'
require 'oci/dns/models/create_zone_base_details'
require 'oci/dns/models/create_zone_details'
require 'oci/dns/models/dynect_migration_details'
require 'oci/dns/models/external_master'
require 'oci/dns/models/migration_replacement'
require 'oci/dns/models/nameserver'
require 'oci/dns/models/patch_domain_records_details'
require 'oci/dns/models/patch_rr_set_details'
require 'oci/dns/models/patch_zone_records_details'
require 'oci/dns/models/rr_set'
require 'oci/dns/models/record'
require 'oci/dns/models/record_collection'
require 'oci/dns/models/record_details'
require 'oci/dns/models/record_operation'
require 'oci/dns/models/resolver'
require 'oci/dns/models/resolver_endpoint'
require 'oci/dns/models/resolver_endpoint_summary'
require 'oci/dns/models/resolver_forward_rule'
require 'oci/dns/models/resolver_forward_rule_details'
require 'oci/dns/models/resolver_rule'
require 'oci/dns/models/resolver_rule_details'
require 'oci/dns/models/resolver_summary'
require 'oci/dns/models/resolver_vnic_endpoint'
require 'oci/dns/models/resolver_vnic_endpoint_summary'
require 'oci/dns/models/scope'
require 'oci/dns/models/sort_order'
require 'oci/dns/models/steering_policy'
require 'oci/dns/models/steering_policy_answer'
require 'oci/dns/models/steering_policy_attachment'
require 'oci/dns/models/steering_policy_attachment_summary'
require 'oci/dns/models/steering_policy_filter_answer_data'
require 'oci/dns/models/steering_policy_filter_rule'
require 'oci/dns/models/steering_policy_filter_rule_case'
require 'oci/dns/models/steering_policy_health_rule'
require 'oci/dns/models/steering_policy_health_rule_case'
require 'oci/dns/models/steering_policy_limit_rule'
require 'oci/dns/models/steering_policy_limit_rule_case'
require 'oci/dns/models/steering_policy_priority_answer_data'
require 'oci/dns/models/steering_policy_priority_rule'
require 'oci/dns/models/steering_policy_priority_rule_case'
require 'oci/dns/models/steering_policy_rule'
require 'oci/dns/models/steering_policy_summary'
require 'oci/dns/models/steering_policy_weighted_answer_data'
require 'oci/dns/models/steering_policy_weighted_rule'
require 'oci/dns/models/steering_policy_weighted_rule_case'
require 'oci/dns/models/tsig_key'
require 'oci/dns/models/tsig_key_summary'
require 'oci/dns/models/update_domain_records_details'
require 'oci/dns/models/update_rr_set_details'
require 'oci/dns/models/update_resolver_details'
require 'oci/dns/models/update_resolver_endpoint_details'
require 'oci/dns/models/update_resolver_vnic_endpoint_details'
require 'oci/dns/models/update_steering_policy_attachment_details'
require 'oci/dns/models/update_steering_policy_details'
require 'oci/dns/models/update_tsig_key_details'
require 'oci/dns/models/update_view_details'
require 'oci/dns/models/update_zone_details'
require 'oci/dns/models/update_zone_records_details'
require 'oci/dns/models/view'
require 'oci/dns/models/view_summary'
require 'oci/dns/models/zone'
require 'oci/dns/models/zone_summary'
require 'oci/dns/models/zone_transfer_server'
# Require generated clients
require 'oci/dns/dns_client'
require 'oci/dns/dns_client_composite_operations'
# Require service utilities
require 'oci/dns/util'
| 45.755102 | 245 | 0.843889 |
ed5a76833ccdef7e9fd8ed020c3349b890c8f29a | 143 | # Be sure to restart your server when you modify this file.
Rails.application.config.session_store :cookie_store, key: '_test_server_session'
| 35.75 | 81 | 0.811189 |
4a0471e65a817b5ad9ebf55bc8bacd6e1fcf1f74 | 2,165 | =begin
#DialMyCalls API
#The DialMyCalls API
OpenAPI spec version: 2.0.1
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=end
require 'spec_helper'
require 'json'
require 'date'
# Unit tests for DialMyCalls::Keyword
# Automatically generated by swagger-codegen (github.com/swagger-api/swagger-codegen)
# Please update as you see appropriate
describe 'Keyword' do
before do
# run before each test
@instance = DialMyCalls::Keyword.new
end
after do
# run after each test
end
describe 'test an instance of Keyword' do
it 'should create an instact of Keyword' do
expect(@instance).to be_instance_of(DialMyCalls::Keyword)
end
end
describe 'test attribute "id"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "keyword"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "status"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "created_at"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "updated_at"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
end
| 27.75641 | 103 | 0.733025 |
f7067fca15488870c630b8ff960119f966dcf90d | 4,018 | require 'ndr_import/table'
require 'ndr_import/file/registry'
require 'json'
require 'pry'
require 'csv'
# folder = File.expand_path('../', __dir__)
# $LOAD_PATH.unshift(folder) unless $LOAD_PATH.include?(folder)
module Import
module Colorectal
module Core
# Reads in pseudonymised BRCA data, then invokes processing method on each
# record. This base can be extended so that processing is the full importer,
# or only the corrections preprocessor and a csv writer
class ColorectalBase
def initialize(filename, batch)
@filename = filename
@batch = batch
@logger = Log.get_logger(batch.original_filename, batch.provider)
@logger.info "Initialized import for #{@filename}" unless Rails.env.test?
@logger.debug 'Available fields are: ' unless Rails.env.test?
fw = Import::Utility::PseudonymisedFileWrapper.new(@filename)
fw.process
return if Rails.env.test?
fw.available_fields.each { |field| @logger.debug "\t#{field}" }
end
def load
# ensure_file_not_already_loaded # TODO: PUT THIS BACK, TESTING ONLY
tables = NdrImport::File::Registry.tables(@filename, table_mapping.try(:format), {})
return load_manchester(tables) if 'R0A' == @batch.provider
# Enumerate over the tables
# Under normal circustances, there will only be one table
tables.each do |_tablename, table_content|
table_mapping.transform(table_content).each do |klass, fields, _index|
build_and_process_records(klass, fields)
end
end
end
def load_manchester(tables)
tables.each do |_tablename, table_content|
mapped_table = table_mapping.transform(table_content)
# Ignore the first row, it doesn't contain data
grouped_records_by_linkage = mapped_table.to_a[1..-1].group_by do |_klass, fields, _i|
grouping = fields.values_at('pseudo_id1', 'pseudo_id2')
rawtext = JSON.parse(fields['rawtext_clinical.to_json'])
grouping << rawtext['servicereportidentifier']
grouping << rawtext['authoriseddate']
grouping
end
cleaned_records = []
# From each set of grouped records, build a normalised record
grouped_records_by_linkage.each do |_linkage, records|
cleaned_records << [records.first.first, grouped_rawtext_record_from(records)]
end
cleaned_records.each { |klass, fields| build_and_process_records(klass, fields) }
end
end
private
# `records` is an array of many [klass, fields, index]
def grouped_rawtext_record_from(records)
# Use the first record's `fields` as a starting point
fields = records.first[1].dup
raw_records_array = records.map { |record| record[1] }
rawtext_clinical_array = raw_records_array.map do |raw_record|
JSON.parse(raw_record['rawtext_clinical.to_json'])
end
fields['rawtext_clinical.to_json'] = rawtext_clinical_array.to_json
fields
end
def build_and_process_records(klass, fields)
Pseudo::Ppatient.transaction { process_records(klass, fields) }
end
def file
@file ||= SafeFile.exist?(@filename) ? SafeFile.new(@filename, 'r') : nil
end
# Load the required mapping file based on @batch.e_type
def table_mapping
# TODO: Decide on e_type names
mapping_file = case @batch.e_type
when 'PSMOLE'
'brca_mapping.yml'
else
raise "No mapping found for #{@batch.e_type}"
end
YAML.load_file(SafePath.new('mappings_config').join(mapping_file))
end
end
end
end
end
| 38.634615 | 98 | 0.614485 |
3383ffb1e0fc6e25352b430b628218daf6a222bb | 1,298 | # tests the Near Real Time support in the :updates => true mode
require 'test_helper'
class NrtImmediate < Test::Unit::TestCase
load_schema
resque_available
class User < ActiveRecord::Base
elastic_index
end
def setup
User.delete_all
User.delete_index
Escargot::LocalIndexing.create_index_for_model(User)
@tim = User.create(:name => 'Tim the Wise')
User.create(:name => 'Peter the Young')
User.create(:name => 'Peter the Old')
User.create(:name => 'Bob the Skinny')
User.create(:name => 'Jamie the Flying Machine')
end
def teardown
User.delete_all
User.delete_index
end
def test_document_creation
User.refresh_index
assert_equal 5, User.search_count
results = User.search("wise")
assert_equal results.total_entries, 1
assert_equal results.first.name, 'Tim the Wise'
end
def test_document_updates
# make a change in a document
@tim.name = 'Tim the Reborn'
@tim.save!
User.refresh_index
assert_equal User.search_count("wise"), 0
assert_equal User.search_count("reborn"), 1
end
def test_document_deletes
User.refresh_index
assert_equal 5, User.search_count
@tim.destroy
User.refresh_index
assert_equal 4, User.search_count
end
end | 22.77193 | 63 | 0.690293 |
f8bdf85309215b4d2b4013fcecd44d9ade3c305b | 866 | require "helper"
require "digest/md5"
class TestTextcaptcha < Test::Unit::TestCase
def test_obtain
response = Textcaptcha.obtain
assert_equal false, response[:question].nil?
assert_equal true, response[:question].length > 0
assert_equal 'Array', response[:answers].class.to_s
assert_equal true, response[:answers].length > 0
end
def test_repeated_obtain
response = Textcaptcha.obtain
response = Textcaptcha.obtain
assert_equal false, response[:question].nil?
end
def test_valid?
answers = [ Digest::MD5.hexdigest('three'), Digest::MD5.hexdigest('3') ]
assert_equal false, Textcaptcha.valid?('four', answers)
assert_equal false, Textcaptcha.valid?(4, answers)
assert_equal true, Textcaptcha.valid?('three', answers)
assert_equal true, Textcaptcha.valid?('3', answers)
assert_equal true, Textcaptcha.valid?(3, answers)
end
end
| 27.935484 | 74 | 0.750577 |
ffb6b8f748870491331d48015e3b6f8c6a5e3608 | 430 | Capybara.javascript_driver = :webkit
Capybara.asset_host = "http://localhost:3000"
module CapybaraHelper
def page!
save_and_open_page
end
def screenshot!
save_and_open_screenshot
end
def blur!
page.find('body').click
end
end
Capybara::Webkit.configure do |config|
config.allow_url(
%w(fonts.googleapis.com)
)
end
RSpec.configure do |config|
config.include CapybaraHelper, type: :feature
end
| 15.925926 | 47 | 0.732558 |
abff597cbff545d15ae57a4d9af6bb4bf348f848 | 196 | class AddDestroyByDateToOffering < ActiveRecord::Migration
def self.up
add_column :offerings, :destroy_by, :string
end
def self.down
remove_column :offerings, :destroy_by
end
end
| 19.6 | 58 | 0.755102 |
bfc5df08fede2cfca2ceaff6135a055dc841513d | 1,624 | require 'rails_helper'
describe CampaignListener do
let(:listener) { described_class.instance }
let(:account) { create(:account) }
let(:inbox) { create(:inbox, account: account) }
let(:contact) { create(:contact, account: account, identifier: '123') }
let(:contact_inbox) { create(:contact_inbox, contact: contact, inbox: inbox) }
let(:campaign) { create(:campaign, inbox: inbox, account: account) }
let!(:event) do
Events::Base.new('campaign_triggered', Time.zone.now,
contact_inbox: contact_inbox, event_info: { campaign_id: campaign.display_id })
end
describe '#campaign_triggered' do
let(:builder) { double }
before do
allow(Campaigns::CampaignConversationBuilder).to receive(:new).and_return(builder)
allow(builder).to receive(:perform)
end
context 'when params contain campaign id' do
it 'triggers campaign conversation builder' do
expect(Campaigns::CampaignConversationBuilder).to receive(:new)
.with({ contact_inbox_id: contact_inbox.id, campaign_display_id: campaign.display_id, conversation_additional_attributes: {} }).once
listener.campaign_triggered(event)
end
end
context 'when params does not contain campaign id' do
it 'does not trigger campaign conversation builder' do
event = Events::Base.new('campaign_triggered', Time.zone.now,
contact_inbox: contact_inbox, event_info: {})
expect(Campaigns::CampaignConversationBuilder).to receive(:new).exactly(0).times
listener.campaign_triggered(event)
end
end
end
end
| 39.609756 | 142 | 0.691502 |
0149d3cb6ea62b3e1390e583c86a03db7e00fc50 | 238 | class CreateComments < ActiveRecord::Migration[6.0]
def change
create_table :comments do |t|
t.string :username
t.text :body
t.references :post, null: false, foreign_key: true
t.timestamps
end
end
end
| 19.833333 | 56 | 0.659664 |
1d9e6c2d0bcebeee16f0d481ff265e12384a94bd | 615 | cask 'texworks' do
version '0.6.3,201903161730:a2470ca'
sha256 '618d16f4bf915e3d73d354263fa4b557dbb165d6868b414ecc47973ddc92a986'
# github.com/TeXworks/texworks was verified as official when first introduced to the cask
url "https://github.com/TeXworks/texworks/releases/download/release-#{version.before_comma}/TeXworks-osx-#{version.before_comma}-#{version.after_comma.before_colon}-git_#{version.after_colon}.dmg"
appcast 'https://github.com/TeXworks/texworks/releases.atom'
name 'TeXworks'
homepage 'https://www.tug.org/texworks/'
depends_on macos: '>= :high_sierra'
app 'TeXworks.app'
end
| 41 | 198 | 0.782114 |
1cc29d3410661be9c928b2e7ebf5b95e0a8eee97 | 2,043 | require 'yaml'
require 'json'
require 'yaml/store'
module Braid
class Config
class PathAlreadyInUse < BraidError
def message
"path already in use: #{super}"
end
end
class MirrorDoesNotExist < BraidError
def message
"mirror does not exist: #{super}"
end
end
def initialize(config_file = CONFIG_FILE)
@config_file = config_file
begin
store = YAML::Store.new(@config_file)
@db = {}
store.transaction(true) do
store.roots.each do |path|
@db[path] = store[path]
end
end
rescue
@db = JSON.parse(@config_file)
end
end
def add_from_options(url, options)
mirror = Mirror.new_from_options(url, options)
add(mirror)
mirror
end
def mirrors
@db.keys
end
def get(path)
key = path.to_s.sub(/\/$/, '')
attributes = @db[key]
return attributes ? Mirror.new(path, attributes) : nil
end
def get!(path)
mirror = get(path)
raise MirrorDoesNotExist, path unless mirror
mirror
end
def add(mirror)
raise PathAlreadyInUse, mirror.path if get(mirror.path)
write_mirror(mirror)
end
def remove(mirror)
@db.delete(mirror.path)
write_db
end
def update(mirror)
raise MirrorDoesNotExist, mirror.path unless get(mirror.path)
@db.delete(mirror.path)
write_mirror(mirror)
end
private
def write_mirror(mirror)
@db[mirror.path] = clean_attributes(mirror.attributes)
write_db
end
def write_db
new_db = {}
@db.keys.sort.each do |key|
new_db[key] = @db[key]
new_db[key].keys.each do |k|
new_db[key].delete(k) if !Braid::Mirror::ATTRIBUTES.include?(k)
end
end
File.open(@config_file, "wb") do |f|
f.write JSON.pretty_generate(new_db)
f.write "\n"
end
end
def clean_attributes(hash)
hash.reject { |k, v| v.nil? }
end
end
end
| 21.061856 | 73 | 0.586882 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.